@fjell/cache 4.7.54 → 4.7.56

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -16,6 +16,12 @@ import Logging from "@fjell/logging";
16
16
  var LibLogger = Logging.getLogger("@fjell/cache");
17
17
  var logger_default = LibLogger;
18
18
 
19
+ // src/cache/layers/ItemCache.ts
20
+ var logger = logger_default.get("ItemCache");
21
+
22
+ // src/cache/layers/QueryCache.ts
23
+ var logger2 = logger_default.get("QueryCache");
24
+
19
25
  // src/CacheMap.ts
20
26
  var CacheMap = class {
21
27
  types;
@@ -25,7 +31,7 @@ var CacheMap = class {
25
31
  };
26
32
 
27
33
  // src/cache/layers/TwoLayerCacheMap.ts
28
- var logger = logger_default.get("TwoLayerCacheMap");
34
+ var logger3 = logger_default.get("TwoLayerCacheMap");
29
35
  var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
30
36
  constructor(underlyingCache, options = {}) {
31
37
  super(underlyingCache.types);
@@ -35,18 +41,15 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
35
41
  // 1 hour for items
36
42
  queryTTL: options.queryTTL || 300,
37
43
  // 5 minutes for complete queries
38
- facetTTL: options.facetTTL || 60,
44
+ facetTTL: options.facetTTL || 60
39
45
  // 1 minute for partial queries
40
- debug: options.debug || false
41
46
  };
42
- if (this.options.debug) {
43
- logger.info("TwoLayerCacheMap initialized", {
44
- underlyingType: this.underlyingCache.implementationType,
45
- itemTTL: this.options.itemTTL,
46
- queryTTL: this.options.queryTTL,
47
- facetTTL: this.options.facetTTL
48
- });
49
- }
47
+ logger3.debug("TwoLayerCacheMap initialized", {
48
+ underlyingType: this.underlyingCache.implementationType,
49
+ itemTTL: this.options.itemTTL,
50
+ queryTTL: this.options.queryTTL,
51
+ facetTTL: this.options.facetTTL
52
+ });
50
53
  }
51
54
  options;
52
55
  // Query metadata tracking for enhanced TTL and completeness
@@ -94,13 +97,11 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
94
97
  * Set a query result with rich metadata for two-layer architecture
95
98
  */
96
99
  async setQueryResult(queryHash, itemKeys) {
97
- logger.debug("QUERY_CACHE: TwoLayerCacheMap.setQueryResult() called", {
100
+ logger3.debug("QUERY_CACHE: TwoLayerCacheMap.setQueryResult() called", {
98
101
  queryHash,
99
102
  itemKeyCount: itemKeys.length,
100
103
  itemKeys: itemKeys.map((k) => JSON.stringify(k))
101
104
  });
102
- await this.underlyingCache.setQueryResult(queryHash, itemKeys);
103
- logger.debug("QUERY_CACHE: Stored query result in underlying cache", { queryHash });
104
105
  const now = /* @__PURE__ */ new Date();
105
106
  const isComplete = this.determineQueryCompleteness(queryHash, itemKeys);
106
107
  const ttlSeconds = isComplete ? this.options.queryTTL : this.options.facetTTL;
@@ -114,7 +115,17 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
114
115
  params: this.extractParams(queryHash)
115
116
  };
116
117
  this.queryMetadataMap.set(queryHash, metadata);
117
- logger.debug("QUERY_CACHE: Set query result with metadata", {
118
+ if ("setQueryResult" in this.underlyingCache) {
119
+ const setQueryResultFn = this.underlyingCache.setQueryResult;
120
+ if (setQueryResultFn.length >= 3) {
121
+ await this.underlyingCache.setQueryResult(queryHash, itemKeys, metadata);
122
+ logger3.debug("QUERY_CACHE: Stored query result with metadata in underlying cache", { queryHash });
123
+ } else {
124
+ await this.underlyingCache.setQueryResult(queryHash, itemKeys);
125
+ logger3.debug("QUERY_CACHE: Stored query result without metadata in underlying cache (not supported)", { queryHash });
126
+ }
127
+ }
128
+ logger3.debug("QUERY_CACHE: Set query result with metadata", {
118
129
  queryHash,
119
130
  itemCount: itemKeys.length,
120
131
  isComplete,
@@ -130,12 +141,27 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
130
141
  * Get a query result with expiration checking
131
142
  */
132
143
  async getQueryResult(queryHash) {
133
- logger.debug("QUERY_CACHE: TwoLayerCacheMap.getQueryResult() called", { queryHash });
134
- const metadata = this.queryMetadataMap.get(queryHash);
144
+ logger3.debug("QUERY_CACHE: TwoLayerCacheMap.getQueryResult() called", { queryHash });
145
+ let metadata = this.queryMetadataMap.get(queryHash);
146
+ if (!metadata && "getQueryResultWithMetadata" in this.underlyingCache) {
147
+ logger3.debug("QUERY_CACHE: Metadata not in memory, loading from underlying cache", { queryHash });
148
+ const resultWithMetadata = await this.underlyingCache.getQueryResultWithMetadata(queryHash);
149
+ if (resultWithMetadata?.metadata) {
150
+ const restoredMetadata = resultWithMetadata.metadata;
151
+ metadata = restoredMetadata;
152
+ this.queryMetadataMap.set(queryHash, restoredMetadata);
153
+ logger3.debug("QUERY_CACHE: Loaded metadata from underlying cache", {
154
+ queryHash,
155
+ expiresAt: restoredMetadata.expiresAt.toISOString(),
156
+ isComplete: restoredMetadata.isComplete,
157
+ queryType: restoredMetadata.queryType
158
+ });
159
+ }
160
+ }
135
161
  if (metadata) {
136
162
  const now = /* @__PURE__ */ new Date();
137
163
  const isExpired = metadata.expiresAt < now;
138
- logger.debug("QUERY_CACHE: Query metadata found", {
164
+ logger3.debug("QUERY_CACHE: Query metadata found", {
139
165
  queryHash,
140
166
  isExpired,
141
167
  expiresAt: metadata.expiresAt.toISOString(),
@@ -144,7 +170,7 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
144
170
  queryType: metadata.queryType
145
171
  });
146
172
  if (isExpired) {
147
- logger.debug("QUERY_CACHE: Query result EXPIRED, removing", {
173
+ logger3.debug("QUERY_CACHE: Query result EXPIRED, removing", {
148
174
  queryHash,
149
175
  expiresAt: metadata.expiresAt.toISOString(),
150
176
  now: now.toISOString()
@@ -153,19 +179,19 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
153
179
  return null;
154
180
  }
155
181
  } else {
156
- logger.debug("QUERY_CACHE: No metadata found for query hash", { queryHash });
182
+ logger3.debug("QUERY_CACHE: No metadata found for query hash (neither in memory nor persistent)", { queryHash });
157
183
  }
158
- logger.debug("QUERY_CACHE: Fetching query result from underlying cache", { queryHash });
184
+ logger3.debug("QUERY_CACHE: Fetching query result from underlying cache", { queryHash });
159
185
  const result = await this.underlyingCache.getQueryResult(queryHash);
160
186
  if (result) {
161
- logger.debug("QUERY_CACHE: Query result retrieved from underlying cache", {
187
+ logger3.debug("QUERY_CACHE: Query result retrieved from underlying cache", {
162
188
  queryHash,
163
189
  itemCount: result.length,
164
190
  isComplete: metadata?.isComplete,
165
191
  itemKeys: result.map((k) => JSON.stringify(k))
166
192
  });
167
193
  } else {
168
- logger.debug("QUERY_CACHE: No query result found in underlying cache", { queryHash });
194
+ logger3.debug("QUERY_CACHE: No query result found in underlying cache", { queryHash });
169
195
  }
170
196
  return result;
171
197
  }
@@ -180,12 +206,12 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
180
206
  * Delete a query result and its metadata
181
207
  */
182
208
  async deleteQueryResult(queryHash) {
183
- logger.debug("QUERY_CACHE: TwoLayerCacheMap.deleteQueryResult() called", { queryHash });
209
+ logger3.debug("QUERY_CACHE: TwoLayerCacheMap.deleteQueryResult() called", { queryHash });
184
210
  const hadMetadata = this.queryMetadataMap.has(queryHash);
185
211
  const metadata = this.queryMetadataMap.get(queryHash);
186
212
  await this.underlyingCache.deleteQueryResult(queryHash);
187
213
  this.queryMetadataMap.delete(queryHash);
188
- logger.debug("QUERY_CACHE: Deleted query result", {
214
+ logger3.debug("QUERY_CACHE: Deleted query result", {
189
215
  queryHash,
190
216
  hadMetadata,
191
217
  wasComplete: metadata?.isComplete,
@@ -197,11 +223,11 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
197
223
  * Invalidate queries that are affected by item changes
198
224
  */
199
225
  async invalidateQueriesForItem(itemKey) {
200
- logger.debug("QUERY_CACHE: Invalidating queries for item change", {
226
+ logger3.debug("QUERY_CACHE: Invalidating queries for item change", {
201
227
  itemKey: JSON.stringify(itemKey)
202
228
  });
203
229
  const affectedQueries = await this.findQueriesContainingItem(itemKey);
204
- logger.debug("QUERY_CACHE: Found queries containing item", {
230
+ logger3.debug("QUERY_CACHE: Found queries containing item", {
205
231
  itemKey: JSON.stringify(itemKey),
206
232
  affectedQueryCount: affectedQueries.length,
207
233
  affectedQueries
@@ -210,13 +236,13 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
210
236
  await this.deleteQueryResult(queryHash);
211
237
  }
212
238
  if (affectedQueries.length > 0) {
213
- logger.debug("QUERY_CACHE: Invalidated queries for item change", {
239
+ logger3.debug("QUERY_CACHE: Invalidated queries for item change", {
214
240
  itemKey: JSON.stringify(itemKey),
215
241
  queriesInvalidated: affectedQueries.length,
216
242
  queryHashes: affectedQueries
217
243
  });
218
244
  } else {
219
- logger.debug("QUERY_CACHE: No queries found containing item", {
245
+ logger3.debug("QUERY_CACHE: No queries found containing item", {
220
246
  itemKey: JSON.stringify(itemKey)
221
247
  });
222
248
  }
@@ -240,13 +266,13 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
240
266
  * Determine if a query result is complete or partial based on query hash
241
267
  */
242
268
  determineQueryCompleteness(queryHash, itemKeys) {
243
- if (queryHash.includes('"query":{}') || queryHash.includes('"query": {}')) {
244
- return true;
245
- }
246
269
  if (queryHash.includes("facet:") || queryHash.includes("filter:")) {
247
270
  return false;
248
271
  }
249
- if (queryHash.includes("all:") && !queryHash.includes("query:")) {
272
+ if (queryHash.startsWith("all:") && (queryHash.includes("query:{}") || queryHash.includes('"query":{}') || queryHash.includes('"query": {}'))) {
273
+ return true;
274
+ }
275
+ if (queryHash.startsWith("all:") && !queryHash.includes("query:")) {
250
276
  return true;
251
277
  }
252
278
  return false;
@@ -359,14 +385,14 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
359
385
  const startTime = Date.now();
360
386
  const now = /* @__PURE__ */ new Date();
361
387
  const expiredQueries = [];
362
- logger.debug("TWO_LAYER: Starting query cleanup", {
388
+ logger3.debug("TWO_LAYER: Starting query cleanup", {
363
389
  totalQueries: this.queryMetadataMap.size,
364
390
  now: now.toISOString()
365
391
  });
366
392
  for (const [queryHash, metadata] of this.queryMetadataMap.entries()) {
367
393
  if (metadata.expiresAt < now) {
368
394
  expiredQueries.push(queryHash);
369
- logger.debug("TWO_LAYER: Found expired query", {
395
+ logger3.debug("TWO_LAYER: Found expired query", {
370
396
  queryHash,
371
397
  queryType: metadata.queryType,
372
398
  isComplete: metadata.isComplete,
@@ -380,13 +406,13 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
380
406
  }
381
407
  const duration = Date.now() - startTime;
382
408
  if (expiredQueries.length > 0) {
383
- logger.debug("TWO_LAYER: Query cleanup completed", {
409
+ logger3.debug("TWO_LAYER: Query cleanup completed", {
384
410
  expiredCount: expiredQueries.length,
385
411
  totalQueries: this.queryMetadataMap.size,
386
412
  duration
387
413
  });
388
414
  } else {
389
- logger.debug("TWO_LAYER: Query cleanup - no expired queries", {
415
+ logger3.debug("TWO_LAYER: Query cleanup - no expired queries", {
390
416
  totalQueries: this.queryMetadataMap.size,
391
417
  duration
392
418
  });
@@ -402,7 +428,7 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
402
428
  // ===== MISSING ABSTRACT METHODS FROM CacheMap =====
403
429
  async invalidateItemKeys(keys) {
404
430
  const startTime = Date.now();
405
- logger.debug("TWO_LAYER: Invalidating item keys", {
431
+ logger3.debug("TWO_LAYER: Invalidating item keys", {
406
432
  keyCount: keys.length,
407
433
  keys: keys.map((k) => JSON.stringify(k))
408
434
  });
@@ -417,14 +443,14 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
417
443
  const invalidated = beforeCount - afterCount;
418
444
  totalInvalidatedQueries += invalidated;
419
445
  if (invalidated > 0) {
420
- logger.debug("TWO_LAYER: Invalidated queries for item", {
446
+ logger3.debug("TWO_LAYER: Invalidated queries for item", {
421
447
  key: JSON.stringify(key),
422
448
  queriesInvalidated: invalidated
423
449
  });
424
450
  }
425
451
  }
426
452
  const duration = Date.now() - startTime;
427
- logger.debug("TWO_LAYER: Item key invalidation completed", {
453
+ logger3.debug("TWO_LAYER: Item key invalidation completed", {
428
454
  keyCount: keys.length,
429
455
  totalQueriesInvalidated: totalInvalidatedQueries,
430
456
  duration
@@ -433,7 +459,7 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
433
459
  async invalidateLocation(locations) {
434
460
  const startTime = Date.now();
435
461
  const queryCountBefore = this.queryMetadataMap.size;
436
- logger.debug("TWO_LAYER: Invalidating location", {
462
+ logger3.debug("TWO_LAYER: Invalidating location", {
437
463
  locations: JSON.stringify(locations),
438
464
  queryCountBefore
439
465
  });
@@ -442,7 +468,7 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
442
468
  }
443
469
  this.queryMetadataMap.clear();
444
470
  const duration = Date.now() - startTime;
445
- logger.debug("TWO_LAYER: Location invalidation completed", {
471
+ logger3.debug("TWO_LAYER: Location invalidation completed", {
446
472
  locations: JSON.stringify(locations),
447
473
  queriesCleared: queryCountBefore,
448
474
  duration
@@ -451,7 +477,7 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
451
477
  async clearQueryResults() {
452
478
  const startTime = Date.now();
453
479
  const queryCountBefore = this.queryMetadataMap.size;
454
- logger.debug("TWO_LAYER: Clearing all query results", {
480
+ logger3.debug("TWO_LAYER: Clearing all query results", {
455
481
  queryCountBefore
456
482
  });
457
483
  if ("clearQueryResults" in this.underlyingCache && typeof this.underlyingCache.clearQueryResults === "function") {
@@ -459,7 +485,7 @@ var TwoLayerCacheMap = class _TwoLayerCacheMap extends CacheMap {
459
485
  }
460
486
  this.queryMetadataMap.clear();
461
487
  const duration = Date.now() - startTime;
462
- logger.debug("TWO_LAYER: Cleared all query results", {
488
+ logger3.debug("TWO_LAYER: Cleared all query results", {
463
489
  queriesCleared: queryCountBefore,
464
490
  duration
465
491
  });
@@ -790,7 +816,7 @@ var CacheEventFactory = class {
790
816
  };
791
817
 
792
818
  // src/ops/all.ts
793
- var logger2 = logger_default.get("all");
819
+ var logger4 = logger_default.get("all");
794
820
  var inFlightRequests = /* @__PURE__ */ new Map();
795
821
  var CLEANUP_INTERVAL = 3e4;
796
822
  var REQUEST_TIMEOUT = 25e3;
@@ -798,14 +824,14 @@ setInterval(() => {
798
824
  const now = Date.now();
799
825
  inFlightRequests.forEach((request, key) => {
800
826
  if (now - request.timestamp > REQUEST_TIMEOUT) {
801
- logger2.debug("Cleaning up stale in-flight all() request", { key });
827
+ logger4.debug("Cleaning up stale in-flight all() request", { key });
802
828
  inFlightRequests.delete(key);
803
829
  }
804
830
  });
805
831
  }, CLEANUP_INTERVAL);
806
832
  var all = async (query = {}, locations = [], context, allOptions) => {
807
833
  const { coordinate } = context;
808
- logger2.default("all", { query, locations, allOptions });
834
+ logger4.default("all", { query, locations, allOptions });
809
835
  const wrappedAll = createAllWrapper(
810
836
  coordinate,
811
837
  async (q, locs, opts) => {
@@ -829,27 +855,27 @@ async function executeAllLogic(query, locations, context, allOptions) {
829
855
  }
830
856
  });
831
857
  if (context.options?.bypassCache) {
832
- logger2.debug("Cache bypass enabled, fetching directly from API", { query, locations });
858
+ logger4.debug("Cache bypass enabled, fetching directly from API", { query, locations });
833
859
  try {
834
860
  const ret = await api.all(query, locations, allOptions);
835
- logger2.debug("API response received (not cached due to bypass)", { query, locations, itemCount: ret.items.length });
861
+ logger4.debug("API response received (not cached due to bypass)", { query, locations, itemCount: ret.items.length });
836
862
  return ret;
837
863
  } catch (error) {
838
- logger2.error("API request failed", { query, locations, error });
864
+ logger4.error("API request failed", { query, locations, error });
839
865
  throw error;
840
866
  }
841
867
  }
842
868
  const queryHash = createQueryHash(pkType, query, locations);
843
- logger2.debug("QUERY_CACHE: Generated query hash for all()", {
869
+ logger4.debug("QUERY_CACHE: Generated query hash for all()", {
844
870
  queryHash,
845
871
  query: JSON.stringify(query),
846
872
  locations: JSON.stringify(locations),
847
873
  pkType
848
874
  });
849
- logger2.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
875
+ logger4.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
850
876
  const cachedItemKeys = await cacheMap.getQueryResult(queryHash);
851
877
  if (cachedItemKeys) {
852
- logger2.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
878
+ logger4.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
853
879
  queryHash,
854
880
  cachedKeyCount: cachedItemKeys.length,
855
881
  itemKeys: cachedItemKeys.map((k) => JSON.stringify(k))
@@ -861,14 +887,14 @@ async function executeAllLogic(query, locations, context, allOptions) {
861
887
  const item = await cacheMap.get(itemKey);
862
888
  if (item) {
863
889
  cachedItems.push(item);
864
- logger2.debug("QUERY_CACHE: Retrieved cached item", {
890
+ logger4.debug("QUERY_CACHE: Retrieved cached item", {
865
891
  itemKey: JSON.stringify(itemKey),
866
892
  itemKeyStr: JSON.stringify(item.key)
867
893
  });
868
894
  } else {
869
895
  allItemsAvailable = false;
870
896
  missingKeys.push(itemKey);
871
- logger2.debug("QUERY_CACHE: Cached item MISSING from item cache", {
897
+ logger4.debug("QUERY_CACHE: Cached item MISSING from item cache", {
872
898
  itemKey: JSON.stringify(itemKey),
873
899
  queryHash
874
900
  });
@@ -876,13 +902,13 @@ async function executeAllLogic(query, locations, context, allOptions) {
876
902
  }
877
903
  }
878
904
  if (allItemsAvailable) {
879
- logger2.debug("QUERY_CACHE: All cached items available, returning from cache", {
905
+ logger4.debug("QUERY_CACHE: All cached items available, returning from cache", {
880
906
  queryHash,
881
907
  itemCount: cachedItems.length
882
908
  });
883
909
  return createCachedResult(cachedItems);
884
910
  } else {
885
- logger2.debug("QUERY_CACHE: Some cached items missing, invalidating query cache", {
911
+ logger4.debug("QUERY_CACHE: Some cached items missing, invalidating query cache", {
886
912
  queryHash,
887
913
  missingKeys: missingKeys.map((k) => JSON.stringify(k)),
888
914
  foundCount: cachedItems.length,
@@ -891,11 +917,11 @@ async function executeAllLogic(query, locations, context, allOptions) {
891
917
  cacheMap.deleteQueryResult(queryHash);
892
918
  }
893
919
  } else {
894
- logger2.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
920
+ logger4.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
895
921
  }
896
922
  const isEmptyQuery = Object.keys(query).length === 0 || (Object.keys(query).length === 1 && "limit" in query || "offset" in query);
897
923
  if (!isEmptyQuery) {
898
- logger2.debug("QUERY_CACHE: Attempting direct cache query using queryIn() for filtered query", {
924
+ logger4.debug("QUERY_CACHE: Attempting direct cache query using queryIn() for filtered query", {
899
925
  queryHash,
900
926
  query: JSON.stringify(query),
901
927
  locations: JSON.stringify(locations)
@@ -903,35 +929,35 @@ async function executeAllLogic(query, locations, context, allOptions) {
903
929
  try {
904
930
  const directCachedItems = await cacheMap.queryIn(query, locations);
905
931
  if (directCachedItems && directCachedItems.length > 0) {
906
- logger2.debug("QUERY_CACHE: Direct cache query SUCCESS - Found items in item cache", {
932
+ logger4.debug("QUERY_CACHE: Direct cache query SUCCESS - Found items in item cache", {
907
933
  queryHash,
908
934
  itemCount: directCachedItems.length,
909
935
  itemKeys: directCachedItems.map((item) => JSON.stringify(item.key))
910
936
  });
911
937
  const itemKeys = directCachedItems.map((item) => item.key);
912
938
  await cacheMap.setQueryResult(queryHash, itemKeys);
913
- logger2.debug("QUERY_CACHE: Stored query result from direct cache hit", {
939
+ logger4.debug("QUERY_CACHE: Stored query result from direct cache hit", {
914
940
  queryHash,
915
941
  itemKeyCount: itemKeys.length,
916
942
  itemKeys: itemKeys.map((k) => JSON.stringify(k))
917
943
  });
918
944
  return createCachedResult(directCachedItems);
919
945
  } else {
920
- logger2.debug("QUERY_CACHE: Direct cache query returned no items", { queryHash });
946
+ logger4.debug("QUERY_CACHE: Direct cache query returned no items", { queryHash });
921
947
  }
922
948
  } catch (error) {
923
- logger2.debug("QUERY_CACHE: Error querying cache directly, proceeding to API", {
949
+ logger4.debug("QUERY_CACHE: Error querying cache directly, proceeding to API", {
924
950
  queryHash,
925
951
  error: error instanceof Error ? error.message : String(error)
926
952
  });
927
953
  }
928
954
  } else {
929
- logger2.debug("QUERY_CACHE: Skipping direct cache query for empty/all query - cannot trust completeness", {
955
+ logger4.debug("QUERY_CACHE: Skipping direct cache query for empty/all query - cannot trust completeness", {
930
956
  queryHash,
931
957
  query: JSON.stringify(query)
932
958
  });
933
959
  }
934
- logger2.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
960
+ logger4.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
935
961
  queryHash,
936
962
  query: JSON.stringify(query),
937
963
  locations: JSON.stringify(locations)
@@ -939,7 +965,7 @@ async function executeAllLogic(query, locations, context, allOptions) {
939
965
  const timestamp = Date.now();
940
966
  const existingRequest = inFlightRequests.get(queryHash);
941
967
  if (existingRequest && timestamp - existingRequest.timestamp < REQUEST_TIMEOUT) {
942
- logger2.debug("QUERY_CACHE: Using existing in-flight all() request", {
968
+ logger4.debug("QUERY_CACHE: Using existing in-flight all() request", {
943
969
  queryHash,
944
970
  age: timestamp - existingRequest.timestamp
945
971
  });
@@ -952,19 +978,19 @@ async function executeAllLogic(query, locations, context, allOptions) {
952
978
  const cleanup = () => inFlightRequests.delete(queryHash);
953
979
  apiRequest.then(cleanup, cleanup);
954
980
  apiResult = await apiRequest;
955
- logger2.debug("QUERY_CACHE: API response received", {
981
+ logger4.debug("QUERY_CACHE: API response received", {
956
982
  queryHash,
957
983
  itemCount: apiResult.items.length,
958
984
  total: apiResult.metadata?.total,
959
985
  itemKeys: apiResult.items.map((item) => JSON.stringify(item.key))
960
986
  });
961
- logger2.debug("QUERY_CACHE: Storing items in item cache", {
987
+ logger4.debug("QUERY_CACHE: Storing items in item cache", {
962
988
  queryHash,
963
989
  itemCount: apiResult.items.length
964
990
  });
965
991
  for (const v of apiResult.items) {
966
992
  await cacheMap.set(v.key, v);
967
- logger2.debug("QUERY_CACHE: Stored item in cache", {
993
+ logger4.debug("QUERY_CACHE: Stored item in cache", {
968
994
  itemKey: JSON.stringify(v.key),
969
995
  queryHash
970
996
  });
@@ -974,7 +1000,7 @@ async function executeAllLogic(query, locations, context, allOptions) {
974
1000
  for (const evictedKey of evictedKeys) {
975
1001
  const parsedKey = JSON.parse(evictedKey);
976
1002
  await cacheMap.delete(parsedKey);
977
- logger2.debug("QUERY_CACHE: Evicted item due to cache limits", {
1003
+ logger4.debug("QUERY_CACHE: Evicted item due to cache limits", {
978
1004
  evictedKey,
979
1005
  queryHash
980
1006
  });
@@ -982,29 +1008,29 @@ async function executeAllLogic(query, locations, context, allOptions) {
982
1008
  }
983
1009
  const itemKeys = apiResult.items.map((item) => item.key);
984
1010
  await cacheMap.setQueryResult(queryHash, itemKeys);
985
- logger2.debug("QUERY_CACHE: Stored query result in query cache", {
1011
+ logger4.debug("QUERY_CACHE: Stored query result in query cache", {
986
1012
  queryHash,
987
1013
  itemKeyCount: itemKeys.length,
988
1014
  itemKeys: itemKeys.map((k) => JSON.stringify(k))
989
1015
  });
990
1016
  const event = CacheEventFactory.createQueryEvent(query, locations, apiResult.items);
991
1017
  context.eventEmitter.emit(event);
992
- logger2.debug("QUERY_CACHE: Emitted query event", { queryHash });
1018
+ logger4.debug("QUERY_CACHE: Emitted query event", { queryHash });
993
1019
  } catch (e) {
994
1020
  inFlightRequests.delete(queryHash);
995
1021
  if (e instanceof NotFoundError) {
996
- logger2.debug("QUERY_CACHE: API returned NotFoundError, caching empty result", { queryHash });
1022
+ logger4.debug("QUERY_CACHE: API returned NotFoundError, caching empty result", { queryHash });
997
1023
  await cacheMap.setQueryResult(queryHash, []);
998
- logger2.debug("QUERY_CACHE: Cached empty query result for not found", { queryHash });
1024
+ logger4.debug("QUERY_CACHE: Cached empty query result for not found", { queryHash });
999
1025
  } else {
1000
- logger2.debug("QUERY_CACHE: API error occurred", {
1026
+ logger4.debug("QUERY_CACHE: API error occurred", {
1001
1027
  queryHash,
1002
1028
  error: e instanceof Error ? e.message : String(e)
1003
1029
  });
1004
1030
  throw e;
1005
1031
  }
1006
1032
  }
1007
- logger2.debug("QUERY_CACHE: all() operation completed", {
1033
+ logger4.debug("QUERY_CACHE: all() operation completed", {
1008
1034
  queryHash,
1009
1035
  resultCount: apiResult.items.length,
1010
1036
  total: apiResult.metadata?.total
@@ -1179,10 +1205,10 @@ function validateSizeConfig(config) {
1179
1205
  }
1180
1206
 
1181
1207
  // src/ops/one.ts
1182
- var logger3 = logger_default.get("one");
1208
+ var logger5 = logger_default.get("one");
1183
1209
  var one = async (query = {}, locations = [], context) => {
1184
1210
  const { api, cacheMap, pkType, ttlManager, coordinate } = context;
1185
- logger3.default("one", { query, locations });
1211
+ logger5.default("one", { query, locations });
1186
1212
  const wrappedOne = createOneWrapper(
1187
1213
  coordinate,
1188
1214
  async (q, locs) => {
@@ -1195,64 +1221,64 @@ var one = async (query = {}, locations = [], context) => {
1195
1221
  async function executeOneLogic(query, locations, context) {
1196
1222
  const { api, cacheMap, pkType, ttlManager } = context;
1197
1223
  if (context.options?.bypassCache) {
1198
- logger3.debug("Cache bypass enabled, fetching directly from API", { query, locations });
1224
+ logger5.debug("Cache bypass enabled, fetching directly from API", { query, locations });
1199
1225
  try {
1200
1226
  const retItem2 = await api.one(query, locations);
1201
1227
  if (retItem2) {
1202
- logger3.debug("API response received (not cached due to bypass)", { query, locations });
1228
+ logger5.debug("API response received (not cached due to bypass)", { query, locations });
1203
1229
  return retItem2;
1204
1230
  } else {
1205
- logger3.debug("API returned null", { query, locations });
1231
+ logger5.debug("API returned null", { query, locations });
1206
1232
  return null;
1207
1233
  }
1208
1234
  } catch (error) {
1209
- logger3.error("API request failed", { query, locations, error });
1235
+ logger5.error("API request failed", { query, locations, error });
1210
1236
  throw error;
1211
1237
  }
1212
1238
  }
1213
1239
  const queryHash = createQueryHash(pkType, query, locations);
1214
- logger3.debug("QUERY_CACHE: Generated query hash for one()", {
1240
+ logger5.debug("QUERY_CACHE: Generated query hash for one()", {
1215
1241
  queryHash,
1216
1242
  query: JSON.stringify(query),
1217
1243
  locations: JSON.stringify(locations),
1218
1244
  pkType
1219
1245
  });
1220
- logger3.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
1246
+ logger5.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
1221
1247
  const cachedItemKeys = await cacheMap.getQueryResult(queryHash);
1222
1248
  if (cachedItemKeys) {
1223
- logger3.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
1249
+ logger5.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
1224
1250
  queryHash,
1225
1251
  cachedKeyCount: cachedItemKeys.length,
1226
1252
  itemKeys: cachedItemKeys.map((k) => JSON.stringify(k))
1227
1253
  });
1228
1254
  if (cachedItemKeys.length === 0) {
1229
- logger3.debug("QUERY_CACHE: Cached empty result (not found)", { queryHash });
1255
+ logger5.debug("QUERY_CACHE: Cached empty result (not found)", { queryHash });
1230
1256
  return null;
1231
1257
  }
1232
1258
  const itemKey = cachedItemKeys[0];
1233
- logger3.debug("QUERY_CACHE: Retrieving first cached item", {
1259
+ logger5.debug("QUERY_CACHE: Retrieving first cached item", {
1234
1260
  queryHash,
1235
1261
  itemKey: JSON.stringify(itemKey)
1236
1262
  });
1237
1263
  const item = await cacheMap.get(itemKey);
1238
1264
  if (item) {
1239
- logger3.debug("QUERY_CACHE: Retrieved cached item successfully", {
1265
+ logger5.debug("QUERY_CACHE: Retrieved cached item successfully", {
1240
1266
  queryHash,
1241
1267
  itemKey: JSON.stringify(itemKey),
1242
1268
  itemKeyStr: JSON.stringify(item.key)
1243
1269
  });
1244
1270
  return item;
1245
1271
  } else {
1246
- logger3.debug("QUERY_CACHE: Cached item MISSING from item cache, invalidating query cache", {
1272
+ logger5.debug("QUERY_CACHE: Cached item MISSING from item cache, invalidating query cache", {
1247
1273
  queryHash,
1248
1274
  itemKey: JSON.stringify(itemKey)
1249
1275
  });
1250
1276
  cacheMap.deleteQueryResult(queryHash);
1251
1277
  }
1252
1278
  } else {
1253
- logger3.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
1279
+ logger5.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
1254
1280
  }
1255
- logger3.debug("QUERY_CACHE: Attempting direct cache query using queryIn()", {
1281
+ logger5.debug("QUERY_CACHE: Attempting direct cache query using queryIn()", {
1256
1282
  queryHash,
1257
1283
  query: JSON.stringify(query),
1258
1284
  locations: JSON.stringify(locations)
@@ -1260,28 +1286,28 @@ async function executeOneLogic(query, locations, context) {
1260
1286
  try {
1261
1287
  const directCachedItems = await cacheMap.queryIn(query, locations);
1262
1288
  if (directCachedItems && directCachedItems.length > 0) {
1263
- logger3.debug("QUERY_CACHE: Direct cache query SUCCESS - Found item in item cache", {
1289
+ logger5.debug("QUERY_CACHE: Direct cache query SUCCESS - Found item in item cache", {
1264
1290
  queryHash,
1265
1291
  itemCount: directCachedItems.length,
1266
1292
  itemKeys: directCachedItems.map((item) => JSON.stringify(item.key))
1267
1293
  });
1268
1294
  const foundItem = directCachedItems[0];
1269
1295
  await cacheMap.setQueryResult(queryHash, [foundItem.key]);
1270
- logger3.debug("QUERY_CACHE: Stored query result from direct cache hit", {
1296
+ logger5.debug("QUERY_CACHE: Stored query result from direct cache hit", {
1271
1297
  queryHash,
1272
1298
  itemKey: JSON.stringify(foundItem.key)
1273
1299
  });
1274
1300
  return foundItem;
1275
1301
  } else {
1276
- logger3.debug("QUERY_CACHE: Direct cache query returned no items", { queryHash });
1302
+ logger5.debug("QUERY_CACHE: Direct cache query returned no items", { queryHash });
1277
1303
  }
1278
1304
  } catch (error) {
1279
- logger3.debug("QUERY_CACHE: Error querying cache directly, proceeding to API", {
1305
+ logger5.debug("QUERY_CACHE: Error querying cache directly, proceeding to API", {
1280
1306
  queryHash,
1281
1307
  error: error instanceof Error ? error.message : String(error)
1282
1308
  });
1283
1309
  }
1284
- logger3.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
1310
+ logger5.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
1285
1311
  queryHash,
1286
1312
  query: JSON.stringify(query),
1287
1313
  locations: JSON.stringify(locations)
@@ -1290,11 +1316,11 @@ async function executeOneLogic(query, locations, context) {
1290
1316
  try {
1291
1317
  retItem = await api.one(query, locations);
1292
1318
  if (retItem) {
1293
- logger3.debug("QUERY_CACHE: API response received", {
1319
+ logger5.debug("QUERY_CACHE: API response received", {
1294
1320
  queryHash,
1295
1321
  itemKey: JSON.stringify(retItem.key)
1296
1322
  });
1297
- logger3.debug("QUERY_CACHE: Storing item in item cache", {
1323
+ logger5.debug("QUERY_CACHE: Storing item in item cache", {
1298
1324
  queryHash,
1299
1325
  itemKey: JSON.stringify(retItem.key)
1300
1326
  });
@@ -1317,35 +1343,35 @@ async function executeOneLogic(query, locations, context) {
1317
1343
  for (const evictedKey of evictedKeys) {
1318
1344
  const parsedKey = JSON.parse(evictedKey);
1319
1345
  await cacheMap.delete(parsedKey);
1320
- logger3.debug("QUERY_CACHE: Evicted item due to cache limits", {
1346
+ logger5.debug("QUERY_CACHE: Evicted item due to cache limits", {
1321
1347
  evictedKey,
1322
1348
  queryHash
1323
1349
  });
1324
1350
  }
1325
1351
  await cacheMap.setQueryResult(queryHash, [retItem.key]);
1326
- logger3.debug("QUERY_CACHE: Stored query result in query cache", {
1352
+ logger5.debug("QUERY_CACHE: Stored query result in query cache", {
1327
1353
  queryHash,
1328
1354
  itemKey: JSON.stringify(retItem.key)
1329
1355
  });
1330
1356
  } else {
1331
- logger3.debug("QUERY_CACHE: API returned null, caching empty result", { queryHash });
1357
+ logger5.debug("QUERY_CACHE: API returned null, caching empty result", { queryHash });
1332
1358
  await cacheMap.setQueryResult(queryHash, []);
1333
- logger3.debug("QUERY_CACHE: Cached empty query result", { queryHash });
1359
+ logger5.debug("QUERY_CACHE: Cached empty query result", { queryHash });
1334
1360
  }
1335
1361
  } catch (e) {
1336
1362
  if (e instanceof NotFoundError2) {
1337
- logger3.debug("QUERY_CACHE: API returned NotFoundError, caching empty result", { queryHash });
1363
+ logger5.debug("QUERY_CACHE: API returned NotFoundError, caching empty result", { queryHash });
1338
1364
  await cacheMap.setQueryResult(queryHash, []);
1339
- logger3.debug("QUERY_CACHE: Cached empty query result for not found", { queryHash });
1365
+ logger5.debug("QUERY_CACHE: Cached empty query result for not found", { queryHash });
1340
1366
  } else {
1341
- logger3.debug("QUERY_CACHE: API error occurred", {
1367
+ logger5.debug("QUERY_CACHE: API error occurred", {
1342
1368
  queryHash,
1343
1369
  error: e instanceof Error ? e.message : String(e)
1344
1370
  });
1345
1371
  throw e;
1346
1372
  }
1347
1373
  }
1348
- logger3.debug("QUERY_CACHE: one() operation completed", {
1374
+ logger5.debug("QUERY_CACHE: one() operation completed", {
1349
1375
  queryHash,
1350
1376
  result: retItem ? JSON.stringify(retItem.key) : null
1351
1377
  });
@@ -1356,10 +1382,10 @@ async function executeOneLogic(query, locations, context) {
1356
1382
  import {
1357
1383
  createCreateWrapper
1358
1384
  } from "@fjell/core";
1359
- var logger4 = logger_default.get("create");
1385
+ var logger6 = logger_default.get("create");
1360
1386
  var create = async (v, locations = [], context) => {
1361
1387
  const { coordinate } = context;
1362
- logger4.default("create", { v, locations });
1388
+ logger6.default("create", { v, locations });
1363
1389
  const wrappedCreate = createCreateWrapper(
1364
1390
  coordinate,
1365
1391
  async (item, createOptions2) => {
@@ -1399,7 +1425,7 @@ import {
1399
1425
  createGetWrapper,
1400
1426
  isValidItemKey
1401
1427
  } from "@fjell/core";
1402
- var logger5 = logger_default.get("get");
1428
+ var logger7 = logger_default.get("get");
1403
1429
  var inFlightRequests2 = /* @__PURE__ */ new Map();
1404
1430
  var CLEANUP_TIMEOUT = 5 * 60 * 1e3;
1405
1431
  var cleanupStaleRequests = () => {
@@ -1411,7 +1437,7 @@ var cleanupStaleRequests = () => {
1411
1437
  }
1412
1438
  });
1413
1439
  keysToDelete.forEach((key) => {
1414
- logger5.debug("Cleaning up stale in-flight request", { key });
1440
+ logger7.debug("Cleaning up stale in-flight request", { key });
1415
1441
  inFlightRequests2.delete(key);
1416
1442
  });
1417
1443
  };
@@ -1419,7 +1445,7 @@ var cleanupInterval = setInterval(cleanupStaleRequests, 60 * 1e3);
1419
1445
  var keyToString = createNormalizedHashFunction();
1420
1446
  var get = async (key, context) => {
1421
1447
  const { api, cacheMap, pkType, ttlManager, statsManager, coordinate } = context;
1422
- logger5.default("get", { key, defaultTTL: ttlManager.getDefaultTTL() });
1448
+ logger7.default("get", { key, defaultTTL: ttlManager.getDefaultTTL() });
1423
1449
  const wrappedGet = createGetWrapper(
1424
1450
  coordinate,
1425
1451
  async (k) => {
@@ -1433,7 +1459,7 @@ async function executeGetLogic(key, context) {
1433
1459
  const startTime = Date.now();
1434
1460
  const { api, cacheMap, pkType, ttlManager, statsManager } = context;
1435
1461
  const keyStr = JSON.stringify(key);
1436
- logger5.debug("CACHE_OP: get() started", {
1462
+ logger7.debug("CACHE_OP: get() started", {
1437
1463
  key: keyStr,
1438
1464
  ttlEnabled: ttlManager.isTTLEnabled(),
1439
1465
  defaultTTL: ttlManager.getDefaultTTL(),
@@ -1441,32 +1467,32 @@ async function executeGetLogic(key, context) {
1441
1467
  });
1442
1468
  statsManager.incrementRequests();
1443
1469
  if (!isValidItemKey(key)) {
1444
- logger5.error("CACHE_OP: Invalid key for get", { key: keyStr });
1470
+ logger7.error("CACHE_OP: Invalid key for get", { key: keyStr });
1445
1471
  throw new Error("Key for Get is not a valid ItemKey");
1446
1472
  }
1447
1473
  if (context.options?.bypassCache) {
1448
- logger5.debug("CACHE_OP: Cache bypass enabled, fetching directly from API", { key: keyStr });
1474
+ logger7.debug("CACHE_OP: Cache bypass enabled, fetching directly from API", { key: keyStr });
1449
1475
  statsManager.incrementMisses();
1450
1476
  try {
1451
1477
  const apiStartTime = Date.now();
1452
1478
  const ret2 = await api.get(key);
1453
1479
  const apiDuration = Date.now() - apiStartTime;
1454
1480
  if (ret2) {
1455
- logger5.debug("CACHE_OP: API response received (bypass mode, not cached)", {
1481
+ logger7.debug("CACHE_OP: API response received (bypass mode, not cached)", {
1456
1482
  key: keyStr,
1457
1483
  apiDuration,
1458
1484
  totalDuration: Date.now() - startTime
1459
1485
  });
1460
1486
  return ret2;
1461
1487
  } else {
1462
- logger5.debug("CACHE_OP: API returned null (bypass mode)", {
1488
+ logger7.debug("CACHE_OP: API returned null (bypass mode)", {
1463
1489
  key: keyStr,
1464
1490
  apiDuration
1465
1491
  });
1466
1492
  return null;
1467
1493
  }
1468
1494
  } catch (error) {
1469
- logger5.error("CACHE_OP: API request failed in bypass mode", {
1495
+ logger7.error("CACHE_OP: API request failed in bypass mode", {
1470
1496
  key: keyStr,
1471
1497
  duration: Date.now() - startTime,
1472
1498
  error
@@ -1479,7 +1505,7 @@ async function executeGetLogic(key, context) {
1479
1505
  const cachedItem = await cacheMap.get(key);
1480
1506
  const cacheCheckDuration = Date.now() - cacheCheckStart;
1481
1507
  if (cachedItem) {
1482
- logger5.debug("CACHE_OP: Item found in cache, checking TTL validity", {
1508
+ logger7.debug("CACHE_OP: Item found in cache, checking TTL validity", {
1483
1509
  key: keyStr,
1484
1510
  cacheCheckDuration,
1485
1511
  defaultTTL: ttlManager.getDefaultTTL()
@@ -1489,7 +1515,7 @@ async function executeGetLogic(key, context) {
1489
1515
  const ttlCheckDuration = Date.now() - ttlCheckStart;
1490
1516
  if (isValid) {
1491
1517
  const totalDuration = Date.now() - startTime;
1492
- logger5.debug("CACHE_OP: Cache HIT with valid TTL", {
1518
+ logger7.debug("CACHE_OP: Cache HIT with valid TTL", {
1493
1519
  key: keyStr,
1494
1520
  cacheCheckDuration,
1495
1521
  ttlCheckDuration,
@@ -1499,7 +1525,7 @@ async function executeGetLogic(key, context) {
1499
1525
  statsManager.incrementHits();
1500
1526
  return cachedItem;
1501
1527
  } else {
1502
- logger5.debug("CACHE_OP: Cache item EXPIRED, removing from cache", {
1528
+ logger7.debug("CACHE_OP: Cache item EXPIRED, removing from cache", {
1503
1529
  key: keyStr,
1504
1530
  cacheCheckDuration,
1505
1531
  ttlCheckDuration
@@ -1508,13 +1534,13 @@ async function executeGetLogic(key, context) {
1508
1534
  statsManager.incrementMisses();
1509
1535
  }
1510
1536
  } else {
1511
- logger5.debug("CACHE_OP: Cache MISS (no item found)", {
1537
+ logger7.debug("CACHE_OP: Cache MISS (no item found)", {
1512
1538
  key: keyStr,
1513
1539
  cacheCheckDuration
1514
1540
  });
1515
1541
  statsManager.incrementMisses();
1516
1542
  }
1517
- logger5.debug("CACHE_OP: Proceeding to API fetch (TTL-enabled cache miss or expired)", {
1543
+ logger7.debug("CACHE_OP: Proceeding to API fetch (TTL-enabled cache miss or expired)", {
1518
1544
  key: keyStr,
1519
1545
  defaultTTL: ttlManager.getDefaultTTL()
1520
1546
  });
@@ -1524,7 +1550,7 @@ async function executeGetLogic(key, context) {
1524
1550
  const cacheCheckDuration = Date.now() - cacheCheckStart;
1525
1551
  if (cachedItem) {
1526
1552
  const totalDuration = Date.now() - startTime;
1527
- logger5.debug("CACHE_OP: Cache HIT (TTL disabled)", {
1553
+ logger7.debug("CACHE_OP: Cache HIT (TTL disabled)", {
1528
1554
  key: keyStr,
1529
1555
  cacheCheckDuration,
1530
1556
  totalDuration
@@ -1532,7 +1558,7 @@ async function executeGetLogic(key, context) {
1532
1558
  statsManager.incrementHits();
1533
1559
  return cachedItem;
1534
1560
  } else {
1535
- logger5.debug("CACHE_OP: Cache MISS (TTL disabled)", {
1561
+ logger7.debug("CACHE_OP: Cache MISS (TTL disabled)", {
1536
1562
  key: keyStr,
1537
1563
  cacheCheckDuration
1538
1564
  });
@@ -1546,7 +1572,7 @@ async function executeGetLogic(key, context) {
1546
1572
  let apiRequest;
1547
1573
  const apiStartTime = Date.now();
1548
1574
  if (!requestEntry) {
1549
- logger5.debug("CACHE_OP: Creating new API request", { key: keyStr });
1575
+ logger7.debug("CACHE_OP: Creating new API request", { key: keyStr });
1550
1576
  apiRequest = api.get(key);
1551
1577
  if (apiRequest && typeof apiRequest.then === "function") {
1552
1578
  const timestamp = Date.now();
@@ -1559,7 +1585,7 @@ async function executeGetLogic(key, context) {
1559
1585
  }
1560
1586
  }
1561
1587
  } else {
1562
- logger5.debug("CACHE_OP: Using existing in-flight request", {
1588
+ logger7.debug("CACHE_OP: Using existing in-flight request", {
1563
1589
  key: keyStr,
1564
1590
  requestAge: Date.now() - requestEntry.timestamp
1565
1591
  });
@@ -1568,7 +1594,7 @@ async function executeGetLogic(key, context) {
1568
1594
  ret = await apiRequest;
1569
1595
  const apiDuration = Date.now() - apiStartTime;
1570
1596
  if (ret) {
1571
- logger5.debug("CACHE_OP: API request successful, caching result", {
1597
+ logger7.debug("CACHE_OP: API request successful, caching result", {
1572
1598
  key: keyStr,
1573
1599
  apiDuration,
1574
1600
  itemKeyMatches: JSON.stringify(ret.key) === keyStr
@@ -1590,7 +1616,7 @@ async function executeGetLogic(key, context) {
1590
1616
  estimatedSize
1591
1617
  };
1592
1618
  await cacheMap.setMetadata(itemKeyStr, baseMetadata);
1593
- logger5.debug("CACHE_OP: Created base metadata for cached item", {
1619
+ logger7.debug("CACHE_OP: Created base metadata for cached item", {
1594
1620
  key: itemKeyStr,
1595
1621
  estimatedSize
1596
1622
  });
@@ -1600,7 +1626,7 @@ async function executeGetLogic(key, context) {
1600
1626
  const evictedKeys = await context.evictionManager.onItemAdded(itemKeyStr, ret, cacheMap);
1601
1627
  const evictionDuration = Date.now() - evictionStart;
1602
1628
  if (evictedKeys.length > 0) {
1603
- logger5.debug("CACHE_OP: Eviction triggered by new item", {
1629
+ logger7.debug("CACHE_OP: Eviction triggered by new item", {
1604
1630
  key: itemKeyStr,
1605
1631
  evictedCount: evictedKeys.length,
1606
1632
  evictedKeys
@@ -1612,12 +1638,12 @@ async function executeGetLogic(key, context) {
1612
1638
  for (const evictedKey of evictedKeys) {
1613
1639
  const parsedKey = JSON.parse(evictedKey);
1614
1640
  await cacheMap.delete(parsedKey);
1615
- logger5.debug("CACHE_OP: Removed evicted item", { evictedKey });
1641
+ logger7.debug("CACHE_OP: Removed evicted item", { evictedKey });
1616
1642
  }
1617
1643
  const event = CacheEventFactory.itemRetrieved(ret.key, ret, "api");
1618
1644
  context.eventEmitter.emit(event);
1619
1645
  const totalDuration = Date.now() - startTime;
1620
- logger5.debug("CACHE_OP: get() completed successfully (cache miss)", {
1646
+ logger7.debug("CACHE_OP: get() completed successfully (cache miss)", {
1621
1647
  key: keyStr,
1622
1648
  apiDuration,
1623
1649
  cacheSetDuration,
@@ -1628,7 +1654,7 @@ async function executeGetLogic(key, context) {
1628
1654
  evictedCount: evictedKeys.length
1629
1655
  });
1630
1656
  } else {
1631
- logger5.debug("CACHE_OP: API returned null", {
1657
+ logger7.debug("CACHE_OP: API returned null", {
1632
1658
  key: keyStr,
1633
1659
  apiDuration,
1634
1660
  totalDuration: Date.now() - startTime
@@ -1637,7 +1663,7 @@ async function executeGetLogic(key, context) {
1637
1663
  } catch (e) {
1638
1664
  inFlightRequests2.delete(requestKeyStr);
1639
1665
  const duration = Date.now() - startTime;
1640
- logger5.error("CACHE_OP: Error in get() operation", {
1666
+ logger7.error("CACHE_OP: Error in get() operation", {
1641
1667
  key: keyStr,
1642
1668
  duration,
1643
1669
  message: e.message,
@@ -1652,24 +1678,24 @@ async function executeGetLogic(key, context) {
1652
1678
  import {
1653
1679
  isValidItemKey as isValidItemKey2
1654
1680
  } from "@fjell/core";
1655
- var logger6 = logger_default.get("retrieve");
1681
+ var logger8 = logger_default.get("retrieve");
1656
1682
  var retrieve = async (key, context) => {
1657
1683
  const startTime = Date.now();
1658
1684
  const { cacheMap, pkType, statsManager } = context;
1659
1685
  const keyStr = JSON.stringify(key);
1660
- logger6.default("retrieve", { key });
1661
- logger6.debug("CACHE_OP: retrieve() started", {
1686
+ logger8.default("retrieve", { key });
1687
+ logger8.debug("CACHE_OP: retrieve() started", {
1662
1688
  key: keyStr,
1663
1689
  cacheType: cacheMap.implementationType,
1664
1690
  bypassEnabled: !!context.options?.bypassCache
1665
1691
  });
1666
1692
  statsManager.incrementRequests();
1667
1693
  if (!isValidItemKey2(key)) {
1668
- logger6.error("CACHE_OP: Invalid key for retrieve", { key: keyStr });
1694
+ logger8.error("CACHE_OP: Invalid key for retrieve", { key: keyStr });
1669
1695
  throw new Error("Key for Retrieve is not a valid ItemKey");
1670
1696
  }
1671
1697
  if (context.options?.bypassCache) {
1672
- logger6.debug("CACHE_OP: Cache bypass enabled, fetching directly from API", { key: keyStr });
1698
+ logger8.debug("CACHE_OP: Cache bypass enabled, fetching directly from API", { key: keyStr });
1673
1699
  statsManager.incrementMisses();
1674
1700
  try {
1675
1701
  const apiStartTime = Date.now();
@@ -1677,14 +1703,14 @@ var retrieve = async (key, context) => {
1677
1703
  const retrieved2 = await api.get(key);
1678
1704
  const apiDuration = Date.now() - apiStartTime;
1679
1705
  if (retrieved2) {
1680
- logger6.debug("CACHE_OP: API response received (bypass mode)", {
1706
+ logger8.debug("CACHE_OP: API response received (bypass mode)", {
1681
1707
  key: keyStr,
1682
1708
  apiDuration,
1683
1709
  hasValue: true
1684
1710
  });
1685
1711
  return [null, retrieved2];
1686
1712
  } else {
1687
- logger6.debug("CACHE_OP: API returned null (bypass mode)", {
1713
+ logger8.debug("CACHE_OP: API returned null (bypass mode)", {
1688
1714
  key: keyStr,
1689
1715
  apiDuration
1690
1716
  });
@@ -1692,7 +1718,7 @@ var retrieve = async (key, context) => {
1692
1718
  }
1693
1719
  } catch (error) {
1694
1720
  const duration = Date.now() - startTime;
1695
- logger6.error("CACHE_OP: API request failed in bypass mode", {
1721
+ logger8.error("CACHE_OP: API request failed in bypass mode", {
1696
1722
  key: keyStr,
1697
1723
  duration,
1698
1724
  error
@@ -1701,36 +1727,36 @@ var retrieve = async (key, context) => {
1701
1727
  }
1702
1728
  }
1703
1729
  const containsItemKey = await cacheMap.includesKey(key);
1704
- logger6.debug("CACHE_OP: Cache key check completed", {
1730
+ logger8.debug("CACHE_OP: Cache key check completed", {
1705
1731
  key: keyStr,
1706
1732
  exists: containsItemKey
1707
1733
  });
1708
1734
  let retrieved;
1709
1735
  let contextToReturn;
1710
1736
  if (containsItemKey) {
1711
- logger6.default("Looking for Object in Cache", key);
1712
- logger6.debug("CACHE_OP: Cache HIT - retrieving from cache", { key: keyStr });
1737
+ logger8.default("Looking for Object in Cache", key);
1738
+ logger8.debug("CACHE_OP: Cache HIT - retrieving from cache", { key: keyStr });
1713
1739
  const getStartTime = Date.now();
1714
1740
  retrieved = await cacheMap.get(key);
1715
1741
  const getDuration = Date.now() - getStartTime;
1716
1742
  contextToReturn = null;
1717
1743
  statsManager.incrementHits();
1718
1744
  const totalDuration = Date.now() - startTime;
1719
- logger6.debug("CACHE_OP: retrieve() completed (cache hit)", {
1745
+ logger8.debug("CACHE_OP: retrieve() completed (cache hit)", {
1720
1746
  key: keyStr,
1721
1747
  getDuration,
1722
1748
  totalDuration,
1723
1749
  hasValue: !!retrieved
1724
1750
  });
1725
1751
  } else {
1726
- logger6.default("Object Not Found in Cache, Retrieving from Server API", { key });
1727
- logger6.debug("CACHE_OP: Cache MISS - fetching from API", { key: keyStr });
1752
+ logger8.default("Object Not Found in Cache, Retrieving from Server API", { key });
1753
+ logger8.debug("CACHE_OP: Cache MISS - fetching from API", { key: keyStr });
1728
1754
  statsManager.incrementMisses();
1729
1755
  const apiStartTime = Date.now();
1730
1756
  [contextToReturn, retrieved] = await get(key, context);
1731
1757
  const apiDuration = Date.now() - apiStartTime;
1732
1758
  const totalDuration = Date.now() - startTime;
1733
- logger6.debug("CACHE_OP: retrieve() completed (cache miss)", {
1759
+ logger8.debug("CACHE_OP: retrieve() completed (cache miss)", {
1734
1760
  key: keyStr,
1735
1761
  apiDuration,
1736
1762
  totalDuration,
@@ -1749,10 +1775,10 @@ import {
1749
1775
  createRemoveWrapper,
1750
1776
  isValidItemKey as isValidItemKey3
1751
1777
  } from "@fjell/core";
1752
- var logger7 = logger_default.get("remove");
1778
+ var logger9 = logger_default.get("remove");
1753
1779
  var remove = async (key, context) => {
1754
1780
  const { coordinate } = context;
1755
- logger7.default("remove", { key });
1781
+ logger9.default("remove", { key });
1756
1782
  const wrappedRemove = createRemoveWrapper(
1757
1783
  coordinate,
1758
1784
  async (k) => {
@@ -1765,7 +1791,7 @@ var remove = async (key, context) => {
1765
1791
  async function executeRemoveLogic(key, context) {
1766
1792
  const { api, cacheMap } = context;
1767
1793
  if (!isValidItemKey3(key)) {
1768
- logger7.error("Key for Remove is not a valid ItemKey: %j", key);
1794
+ logger9.error("Key for Remove is not a valid ItemKey: %j", key);
1769
1795
  throw new Error("Key for Remove is not a valid ItemKey");
1770
1796
  }
1771
1797
  try {
@@ -1783,9 +1809,9 @@ async function executeRemoveLogic(key, context) {
1783
1809
  { source: "operation", context: { operation: "remove" } }
1784
1810
  );
1785
1811
  context.eventEmitter.emit(queryInvalidatedEvent);
1786
- logger7.debug("Successfully removed item from API and cache", { key });
1812
+ logger9.debug("Successfully removed item from API and cache", { key });
1787
1813
  } catch (e) {
1788
- logger7.error("Error deleting item", { error: e });
1814
+ logger9.error("Error deleting item", { error: e });
1789
1815
  throw e;
1790
1816
  }
1791
1817
  }
@@ -1795,10 +1821,10 @@ import {
1795
1821
  createUpdateWrapper,
1796
1822
  isValidItemKey as isValidItemKey4
1797
1823
  } from "@fjell/core";
1798
- var logger8 = logger_default.get("update");
1824
+ var logger10 = logger_default.get("update");
1799
1825
  var update = async (key, v, context) => {
1800
1826
  const { coordinate } = context;
1801
- logger8.default("update", { key, v });
1827
+ logger10.default("update", { key, v });
1802
1828
  const wrappedUpdate = createUpdateWrapper(
1803
1829
  coordinate,
1804
1830
  async (k, item) => {
@@ -1811,16 +1837,16 @@ var update = async (key, v, context) => {
1811
1837
  async function executeUpdateLogic(key, v, context) {
1812
1838
  const { api, cacheMap, pkType } = context;
1813
1839
  if (!isValidItemKey4(key)) {
1814
- logger8.error("Key for Update is not a valid ItemKey: %j", key);
1840
+ logger10.error("Key for Update is not a valid ItemKey: %j", key);
1815
1841
  throw new Error("Key for Update is not a valid ItemKey");
1816
1842
  }
1817
- logger8.debug("Invalidating item key before update", { key });
1843
+ logger10.debug("Invalidating item key before update", { key });
1818
1844
  cacheMap.invalidateItemKeys([key]);
1819
1845
  await cacheMap.clearQueryResults();
1820
1846
  try {
1821
1847
  const previousItem = await cacheMap.get(key);
1822
1848
  const updated = await api.update(key, v);
1823
- logger8.debug("Caching update result", { updatedKey: updated.key });
1849
+ logger10.debug("Caching update result", { updatedKey: updated.key });
1824
1850
  await cacheMap.set(updated.key, updated);
1825
1851
  const cachedItem = await cacheMap.get(updated.key);
1826
1852
  const keyStr = JSON.stringify(updated.key);
@@ -1853,7 +1879,7 @@ async function executeUpdateLogic(key, v, context) {
1853
1879
  context.eventEmitter.emit(queryInvalidatedEvent);
1854
1880
  return updated;
1855
1881
  } catch (e) {
1856
- logger8.error("Error updating item", { error: e });
1882
+ logger10.error("Error updating item", { error: e });
1857
1883
  throw e;
1858
1884
  }
1859
1885
  }
@@ -1866,7 +1892,7 @@ import {
1866
1892
 
1867
1893
  // src/utils/cacheInvalidation.ts
1868
1894
  import { toKeyTypeArray } from "@fjell/core";
1869
- var logger9 = logger_default.get("cache", "utils", "cacheInvalidation");
1895
+ var logger11 = logger_default.get("cache", "utils", "cacheInvalidation");
1870
1896
  var extractKeysAndKeyTypesFromActionResult = (affectedItems) => {
1871
1897
  const keys = [];
1872
1898
  const keyTypeArrays = [];
@@ -1883,7 +1909,7 @@ var extractKeysAndKeyTypesFromActionResult = (affectedItems) => {
1883
1909
  return { keys, keyTypeArrays };
1884
1910
  };
1885
1911
  var invalidateCachesByKeysAndKeyTypes = async (registry, keys, keyTypeArrays) => {
1886
- logger9.debug("Invalidating caches by keys and key types", {
1912
+ logger11.debug("Invalidating caches by keys and key types", {
1887
1913
  keysCount: keys.length,
1888
1914
  keyTypeArrays
1889
1915
  });
@@ -1901,22 +1927,22 @@ var invalidateCachesByKeysAndKeyTypes = async (registry, keys, keyTypeArrays) =>
1901
1927
  try {
1902
1928
  const cacheInstance = registry.get(keyTypes);
1903
1929
  if (cacheInstance && isCache(cacheInstance)) {
1904
- logger9.debug("Found cache instance for targeted invalidation", {
1930
+ logger11.debug("Found cache instance for targeted invalidation", {
1905
1931
  keyTypes,
1906
1932
  cacheType: cacheInstance.coordinate.kta,
1907
1933
  keysToInvalidate: cacheKeys.length
1908
1934
  });
1909
1935
  await cacheInstance.cacheMap.invalidateItemKeys(cacheKeys);
1910
1936
  await cacheInstance.cacheMap.clearQueryResults();
1911
- logger9.debug("Successfully invalidated specific items in cache", {
1937
+ logger11.debug("Successfully invalidated specific items in cache", {
1912
1938
  keyTypes,
1913
1939
  invalidatedCount: cacheKeys.length
1914
1940
  });
1915
1941
  } else {
1916
- logger9.debug("No cache instance found for key types", { keyTypes });
1942
+ logger11.debug("No cache instance found for key types", { keyTypes });
1917
1943
  }
1918
1944
  } catch (error) {
1919
- logger9.warning("Failed to invalidate cache for key types", {
1945
+ logger11.warning("Failed to invalidate cache for key types", {
1920
1946
  keyTypes,
1921
1947
  error: error instanceof Error ? error.message : String(error)
1922
1948
  });
@@ -1926,12 +1952,12 @@ var invalidateCachesByKeysAndKeyTypes = async (registry, keys, keyTypeArrays) =>
1926
1952
  try {
1927
1953
  const cacheInstance = registry.get(keyTypes);
1928
1954
  if (cacheInstance && isCache(cacheInstance)) {
1929
- logger9.debug("Handling location-based invalidation", { keyTypes });
1955
+ logger11.debug("Handling location-based invalidation", { keyTypes });
1930
1956
  await cacheInstance.cacheMap.clearQueryResults();
1931
- logger9.debug("Successfully cleared query results for location", { keyTypes });
1957
+ logger11.debug("Successfully cleared query results for location", { keyTypes });
1932
1958
  }
1933
1959
  } catch (error) {
1934
- logger9.warning("Failed to handle location-based invalidation", {
1960
+ logger11.warning("Failed to handle location-based invalidation", {
1935
1961
  keyTypes,
1936
1962
  error: error instanceof Error ? error.message : String(error)
1937
1963
  });
@@ -1942,7 +1968,7 @@ function isCache(instance) {
1942
1968
  return instance !== null && typeof instance === "object" && "operations" in instance && "cacheMap" in instance && typeof instance.cacheMap.invalidateItemKeys === "function";
1943
1969
  }
1944
1970
  var handleActionCacheInvalidation = async (registry, affectedItems) => {
1945
- logger9.debug("Handling action cache invalidation", {
1971
+ logger11.debug("Handling action cache invalidation", {
1946
1972
  affectedItemsCount: affectedItems.length
1947
1973
  });
1948
1974
  const { keys, keyTypeArrays } = extractKeysAndKeyTypesFromActionResult(affectedItems);
@@ -1950,10 +1976,10 @@ var handleActionCacheInvalidation = async (registry, affectedItems) => {
1950
1976
  };
1951
1977
 
1952
1978
  // src/ops/action.ts
1953
- var logger10 = logger_default.get("action");
1979
+ var logger12 = logger_default.get("action");
1954
1980
  var action = async (key, action2, body = {}, context) => {
1955
1981
  const { coordinate } = context;
1956
- logger10.default("action", { key, action: action2, body });
1982
+ logger12.default("action", { key, action: action2, body });
1957
1983
  const wrappedAction = createActionWrapper(
1958
1984
  coordinate,
1959
1985
  async (k, a, b) => {
@@ -1966,28 +1992,28 @@ var action = async (key, action2, body = {}, context) => {
1966
1992
  async function executeActionLogic(key, action2, body, context) {
1967
1993
  const { api, cacheMap, pkType, registry } = context;
1968
1994
  if (!isValidItemKey5(key)) {
1969
- logger10.error("Key for Action is not a valid ItemKey: %j", key);
1995
+ logger12.error("Key for Action is not a valid ItemKey: %j", key);
1970
1996
  throw new Error("Key for Action is not a valid ItemKey");
1971
1997
  }
1972
- logger10.debug("Invalidating item key before action", { key });
1998
+ logger12.debug("Invalidating item key before action", { key });
1973
1999
  cacheMap.invalidateItemKeys([key]);
1974
2000
  const result = await api.action(key, action2, body);
1975
2001
  const updated = result[0];
1976
2002
  const affectedItems = result[1];
1977
2003
  if (affectedItems && affectedItems.length > 0) {
1978
- logger10.debug("Handling cache invalidation for affected items", {
2004
+ logger12.debug("Handling cache invalidation for affected items", {
1979
2005
  affectedItemsCount: affectedItems.length
1980
2006
  });
1981
2007
  try {
1982
2008
  await handleActionCacheInvalidation(registry, affectedItems);
1983
2009
  } catch (error) {
1984
- logger10.warning("Failed to handle cache invalidation for affected items", {
2010
+ logger12.warning("Failed to handle cache invalidation for affected items", {
1985
2011
  error: error instanceof Error ? error.message : String(error),
1986
2012
  affectedItems
1987
2013
  });
1988
2014
  }
1989
2015
  }
1990
- logger10.debug("Caching action result", { updatedKey: updated.key });
2016
+ logger12.debug("Caching action result", { updatedKey: updated.key });
1991
2017
  cacheMap.set(updated.key, updated);
1992
2018
  const keyStr = JSON.stringify(updated.key);
1993
2019
  context.ttlManager.onItemAdded(keyStr, cacheMap);
@@ -1997,19 +2023,19 @@ async function executeActionLogic(key, action2, body, context) {
1997
2023
  const parsedKey = JSON.parse(evictedKey);
1998
2024
  await cacheMap.delete(parsedKey);
1999
2025
  } catch (error) {
2000
- logger10.error("Failed to parse evicted key during deletion", {
2026
+ logger12.error("Failed to parse evicted key during deletion", {
2001
2027
  evictedKey,
2002
2028
  error: error instanceof Error ? error.message : String(error)
2003
2029
  });
2004
2030
  }
2005
2031
  }
2006
- logger10.debug("Emitting itemUpdated event after action", {
2032
+ logger12.debug("Emitting itemUpdated event after action", {
2007
2033
  key: updated.key,
2008
2034
  action: action2
2009
2035
  });
2010
2036
  const itemEvent = CacheEventFactory.itemUpdated(updated.key, updated, null, "api");
2011
2037
  context.eventEmitter.emit(itemEvent);
2012
- logger10.debug("Emitting queryInvalidatedEvent after action", {
2038
+ logger12.debug("Emitting queryInvalidatedEvent after action", {
2013
2039
  eventType: "query_invalidated",
2014
2040
  reason: "item_changed",
2015
2041
  action: action2
@@ -2029,10 +2055,10 @@ import {
2029
2055
  createAllActionWrapper
2030
2056
  } from "@fjell/core";
2031
2057
  import { NotFoundError as NotFoundError3 } from "@fjell/http-api";
2032
- var logger11 = logger_default.get("allAction");
2058
+ var logger13 = logger_default.get("allAction");
2033
2059
  var allAction = async (action2, body = {}, locations = [], context) => {
2034
2060
  const { coordinate } = context;
2035
- logger11.default("allAction", { action: action2, body, locations });
2061
+ logger13.default("allAction", { action: action2, body, locations });
2036
2062
  const wrappedAllAction = createAllActionWrapper(
2037
2063
  coordinate,
2038
2064
  async (a, b, locs) => {
@@ -2052,10 +2078,10 @@ async function executeAllActionLogic(action2, body, locations, context) {
2052
2078
  existingItems.push(...cachedItems);
2053
2079
  }
2054
2080
  } catch (error) {
2055
- logger11.debug("Could not retrieve existing items for comparison", { error });
2081
+ logger13.debug("Could not retrieve existing items for comparison", { error });
2056
2082
  }
2057
2083
  }
2058
- logger11.debug("Invalidating location before allAction", { locations });
2084
+ logger13.debug("Invalidating location before allAction", { locations });
2059
2085
  await cacheMap.invalidateLocation(locations);
2060
2086
  let ret = [];
2061
2087
  let affectedItems = [];
@@ -2065,7 +2091,7 @@ async function executeAllActionLogic(action2, body, locations, context) {
2065
2091
  ret = result[0];
2066
2092
  affectedItems = result[1];
2067
2093
  } else {
2068
- logger11.warning("Unexpected result format from allAction", {
2094
+ logger13.warning("Unexpected result format from allAction", {
2069
2095
  resultType: typeof result,
2070
2096
  isArray: Array.isArray(result),
2071
2097
  resultLength: Array.isArray(result) ? result.length : "not array"
@@ -2074,19 +2100,19 @@ async function executeAllActionLogic(action2, body, locations, context) {
2074
2100
  affectedItems = [];
2075
2101
  }
2076
2102
  if (affectedItems && affectedItems.length > 0) {
2077
- logger11.debug("Handling cache invalidation for affected items", {
2103
+ logger13.debug("Handling cache invalidation for affected items", {
2078
2104
  affectedItemsCount: affectedItems.length
2079
2105
  });
2080
2106
  try {
2081
2107
  await handleActionCacheInvalidation(registry, affectedItems);
2082
2108
  } catch (error) {
2083
- logger11.warning("Failed to handle cache invalidation for affected items", {
2109
+ logger13.warning("Failed to handle cache invalidation for affected items", {
2084
2110
  error: error instanceof Error ? error.message : String(error),
2085
2111
  affectedItems
2086
2112
  });
2087
2113
  }
2088
2114
  }
2089
- logger11.debug("Caching allAction results", { resultCount: ret.length });
2115
+ logger13.debug("Caching allAction results", { resultCount: ret.length });
2090
2116
  const modifiedItems = [];
2091
2117
  const newItems = [];
2092
2118
  for (const v of ret) {
@@ -2108,7 +2134,7 @@ async function executeAllActionLogic(action2, body, locations, context) {
2108
2134
  }
2109
2135
  }
2110
2136
  for (const item of modifiedItems) {
2111
- logger11.debug("Emitting item_updated event for modified item", { key: item.key });
2137
+ logger13.debug("Emitting item_updated event for modified item", { key: item.key });
2112
2138
  const itemEvent = CacheEventFactory.itemUpdated(
2113
2139
  item.key,
2114
2140
  item,
@@ -2119,7 +2145,7 @@ async function executeAllActionLogic(action2, body, locations, context) {
2119
2145
  eventEmitter.emit(itemEvent);
2120
2146
  }
2121
2147
  for (const item of newItems) {
2122
- logger11.debug("Emitting item_created event for new item", { key: item.key });
2148
+ logger13.debug("Emitting item_created event for new item", { key: item.key });
2123
2149
  const itemEvent = CacheEventFactory.itemCreated(
2124
2150
  item.key,
2125
2151
  item,
@@ -2129,14 +2155,14 @@ async function executeAllActionLogic(action2, body, locations, context) {
2129
2155
  }
2130
2156
  if (modifiedItems.length > 0) {
2131
2157
  const modifiedKeys = modifiedItems.map((item) => item.key);
2132
- logger11.debug("Invalidating individual item keys for modified items", {
2158
+ logger13.debug("Invalidating individual item keys for modified items", {
2133
2159
  keyCount: modifiedKeys.length,
2134
2160
  keys: modifiedKeys
2135
2161
  });
2136
2162
  await cacheMap.invalidateItemKeys(modifiedKeys);
2137
2163
  }
2138
2164
  await cacheMap.clearQueryResults();
2139
- logger11.debug("Emitting query_invalidated event after allAction", {
2165
+ logger13.debug("Emitting query_invalidated event after allAction", {
2140
2166
  eventType: "query_invalidated",
2141
2167
  reason: "item_changed",
2142
2168
  action: action2,
@@ -2169,10 +2195,10 @@ async function executeAllActionLogic(action2, body, locations, context) {
2169
2195
  import {
2170
2196
  createFacetWrapper
2171
2197
  } from "@fjell/core";
2172
- var logger12 = logger_default.get("facet");
2198
+ var logger14 = logger_default.get("facet");
2173
2199
  var facet = async (key, facet2, params = {}, context) => {
2174
2200
  const { coordinate, api } = context;
2175
- logger12.default("facet", { key, facet: facet2 });
2201
+ logger14.default("facet", { key, facet: facet2 });
2176
2202
  const wrappedFacet = createFacetWrapper(
2177
2203
  coordinate,
2178
2204
  async (k, f, p) => {
@@ -2186,10 +2212,10 @@ var facet = async (key, facet2, params = {}, context) => {
2186
2212
  import {
2187
2213
  createAllFacetWrapper
2188
2214
  } from "@fjell/core";
2189
- var logger13 = logger_default.get("allFacet");
2215
+ var logger15 = logger_default.get("allFacet");
2190
2216
  var allFacet = async (facet2, params = {}, locations = [], context) => {
2191
2217
  const { api, coordinate } = context;
2192
- logger13.default("allFacet", { facet: facet2, params, locations });
2218
+ logger15.default("allFacet", { facet: facet2, params, locations });
2193
2219
  const wrappedAllFacet = createAllFacetWrapper(
2194
2220
  coordinate,
2195
2221
  async (f, p, locs) => {
@@ -2203,10 +2229,10 @@ var allFacet = async (facet2, params = {}, locations = [], context) => {
2203
2229
  import {
2204
2230
  createFindWrapper
2205
2231
  } from "@fjell/core";
2206
- var logger14 = logger_default.get("find");
2232
+ var logger16 = logger_default.get("find");
2207
2233
  var find = async (finder, params = {}, locations = [], context, findOptions) => {
2208
2234
  const { coordinate } = context;
2209
- logger14.default("find", { finder, params, locations, findOptions });
2235
+ logger16.default("find", { finder, params, locations, findOptions });
2210
2236
  const wrappedFind = createFindWrapper(
2211
2237
  coordinate,
2212
2238
  async (f, p, locs, opts) => {
@@ -2236,27 +2262,27 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2236
2262
  }
2237
2263
  });
2238
2264
  if (context.options?.bypassCache) {
2239
- logger14.debug("Cache bypass enabled, fetching directly from API", { finder, params, locations, findOptions });
2265
+ logger16.debug("Cache bypass enabled, fetching directly from API", { finder, params, locations, findOptions });
2240
2266
  try {
2241
2267
  const ret2 = await api.find(finder, params, locations, findOptions);
2242
- logger14.debug("API response received (not cached due to bypass)", { finder, params, locations, itemCount: ret2.items.length, total: ret2.metadata.total });
2268
+ logger16.debug("API response received (not cached due to bypass)", { finder, params, locations, itemCount: ret2.items.length, total: ret2.metadata.total });
2243
2269
  return ret2;
2244
2270
  } catch (error) {
2245
- logger14.error("API request failed", { finder, params, locations, findOptions, error });
2271
+ logger16.error("API request failed", { finder, params, locations, findOptions, error });
2246
2272
  throw error;
2247
2273
  }
2248
2274
  }
2249
2275
  const queryHash = createFinderHash(finder, params, locations);
2250
- logger14.debug("QUERY_CACHE: Generated query hash for find()", {
2276
+ logger16.debug("QUERY_CACHE: Generated query hash for find()", {
2251
2277
  queryHash,
2252
2278
  finder,
2253
2279
  params: JSON.stringify(params),
2254
2280
  locations: JSON.stringify(locations)
2255
2281
  });
2256
- logger14.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
2282
+ logger16.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
2257
2283
  const cachedItemKeys = await cacheMap.getQueryResult(queryHash);
2258
2284
  if (cachedItemKeys) {
2259
- logger14.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
2285
+ logger16.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
2260
2286
  queryHash,
2261
2287
  cachedKeyCount: cachedItemKeys.length,
2262
2288
  itemKeys: cachedItemKeys.map((k) => JSON.stringify(k))
@@ -2268,14 +2294,14 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2268
2294
  const item = await cacheMap.get(itemKey);
2269
2295
  if (item) {
2270
2296
  cachedItems.push(item);
2271
- logger14.debug("QUERY_CACHE: Retrieved cached item", {
2297
+ logger16.debug("QUERY_CACHE: Retrieved cached item", {
2272
2298
  itemKey: JSON.stringify(itemKey),
2273
2299
  itemKeyStr: JSON.stringify(item.key)
2274
2300
  });
2275
2301
  } else {
2276
2302
  allItemsAvailable = false;
2277
2303
  missingKeys.push(itemKey);
2278
- logger14.debug("QUERY_CACHE: Cached item MISSING from item cache", {
2304
+ logger16.debug("QUERY_CACHE: Cached item MISSING from item cache", {
2279
2305
  itemKey: JSON.stringify(itemKey),
2280
2306
  queryHash
2281
2307
  });
@@ -2283,7 +2309,7 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2283
2309
  }
2284
2310
  }
2285
2311
  if (allItemsAvailable) {
2286
- logger14.debug("QUERY_CACHE: All cached items available, returning from cache", {
2312
+ logger16.debug("QUERY_CACHE: All cached items available, returning from cache", {
2287
2313
  queryHash,
2288
2314
  itemCount: cachedItems.length
2289
2315
  });
@@ -2308,7 +2334,7 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2308
2334
  }
2309
2335
  };
2310
2336
  } else {
2311
- logger14.debug("QUERY_CACHE: Some cached items missing, invalidating query cache", {
2337
+ logger16.debug("QUERY_CACHE: Some cached items missing, invalidating query cache", {
2312
2338
  queryHash,
2313
2339
  missingKeys: missingKeys.map((k) => JSON.stringify(k)),
2314
2340
  foundCount: cachedItems.length,
@@ -2317,9 +2343,9 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2317
2343
  cacheMap.deleteQueryResult(queryHash);
2318
2344
  }
2319
2345
  } else {
2320
- logger14.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
2346
+ logger16.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
2321
2347
  }
2322
- logger14.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
2348
+ logger16.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
2323
2349
  queryHash,
2324
2350
  finder,
2325
2351
  params: JSON.stringify(params),
@@ -2327,19 +2353,19 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2327
2353
  findOptions
2328
2354
  });
2329
2355
  const ret = await api.find(finder, params, locations, findOptions);
2330
- logger14.debug("QUERY_CACHE: API response received", {
2356
+ logger16.debug("QUERY_CACHE: API response received", {
2331
2357
  queryHash,
2332
2358
  itemCount: ret.items.length,
2333
2359
  total: ret.metadata.total,
2334
2360
  itemKeys: ret.items.map((item) => JSON.stringify(item.key))
2335
2361
  });
2336
- logger14.debug("QUERY_CACHE: Storing items in item cache", {
2362
+ logger16.debug("QUERY_CACHE: Storing items in item cache", {
2337
2363
  queryHash,
2338
2364
  itemCount: ret.items.length
2339
2365
  });
2340
2366
  for (const v of ret.items) {
2341
2367
  await cacheMap.set(v.key, v);
2342
- logger14.debug("QUERY_CACHE: Stored item in cache", {
2368
+ logger16.debug("QUERY_CACHE: Stored item in cache", {
2343
2369
  itemKey: JSON.stringify(v.key),
2344
2370
  queryHash
2345
2371
  });
@@ -2349,7 +2375,7 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2349
2375
  for (const evictedKey of evictedKeys) {
2350
2376
  const parsedKey = JSON.parse(evictedKey);
2351
2377
  await cacheMap.delete(parsedKey);
2352
- logger14.debug("QUERY_CACHE: Evicted item due to cache limits", {
2378
+ logger16.debug("QUERY_CACHE: Evicted item due to cache limits", {
2353
2379
  evictedKey,
2354
2380
  queryHash
2355
2381
  });
@@ -2357,15 +2383,15 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2357
2383
  }
2358
2384
  const itemKeys = ret.items.map((item) => item.key);
2359
2385
  await cacheMap.setQueryResult(queryHash, itemKeys);
2360
- logger14.debug("QUERY_CACHE: Stored query result in query cache", {
2386
+ logger16.debug("QUERY_CACHE: Stored query result in query cache", {
2361
2387
  queryHash,
2362
2388
  itemKeyCount: itemKeys.length,
2363
2389
  itemKeys: itemKeys.map((k) => JSON.stringify(k))
2364
2390
  });
2365
2391
  const event = CacheEventFactory.createQueryEvent(params, locations, ret.items);
2366
2392
  eventEmitter.emit(event);
2367
- logger14.debug("QUERY_CACHE: Emitted query event", { queryHash });
2368
- logger14.debug("QUERY_CACHE: find() operation completed", {
2393
+ logger16.debug("QUERY_CACHE: Emitted query event", { queryHash });
2394
+ logger16.debug("QUERY_CACHE: find() operation completed", {
2369
2395
  queryHash,
2370
2396
  resultCount: ret.items.length,
2371
2397
  total: ret.metadata.total
@@ -2377,10 +2403,10 @@ async function executeFindLogic(finder, params, locations, context, findOptions)
2377
2403
  import {
2378
2404
  createFindOneWrapper
2379
2405
  } from "@fjell/core";
2380
- var logger15 = logger_default.get("findOne");
2406
+ var logger17 = logger_default.get("findOne");
2381
2407
  var findOne = async (finder, finderParams = {}, locations = [], context) => {
2382
2408
  const { coordinate } = context;
2383
- logger15.default("findOne", { finder, finderParams, locations });
2409
+ logger17.default("findOne", { finder, finderParams, locations });
2384
2410
  const wrappedFindOne = createFindOneWrapper(
2385
2411
  coordinate,
2386
2412
  async (f, p, locs) => {
@@ -2393,58 +2419,58 @@ var findOne = async (finder, finderParams = {}, locations = [], context) => {
2393
2419
  async function executeFindOneLogic(finder, finderParams, locations, context) {
2394
2420
  const { api, cacheMap, pkType, ttlManager, eventEmitter } = context;
2395
2421
  if (context.options?.bypassCache) {
2396
- logger15.debug("Cache bypass enabled, fetching directly from API", { finder, finderParams, locations });
2422
+ logger17.debug("Cache bypass enabled, fetching directly from API", { finder, finderParams, locations });
2397
2423
  try {
2398
2424
  const ret2 = await api.findOne(finder, finderParams, locations);
2399
2425
  if (ret2 === null) {
2400
2426
  throw new Error(`findOne returned null for finder: ${finder}`);
2401
2427
  }
2402
- logger15.debug("API response received (not cached due to bypass)", { finder, finderParams, locations });
2428
+ logger17.debug("API response received (not cached due to bypass)", { finder, finderParams, locations });
2403
2429
  return ret2;
2404
2430
  } catch (error) {
2405
- logger15.error("API request failed", { finder, finderParams, locations, error });
2431
+ logger17.error("API request failed", { finder, finderParams, locations, error });
2406
2432
  throw error;
2407
2433
  }
2408
2434
  }
2409
2435
  const queryHash = createFinderHash(finder, finderParams, locations);
2410
- logger15.debug("QUERY_CACHE: Generated query hash for findOne()", {
2436
+ logger17.debug("QUERY_CACHE: Generated query hash for findOne()", {
2411
2437
  queryHash,
2412
2438
  finder,
2413
2439
  finderParams: JSON.stringify(finderParams),
2414
2440
  locations: JSON.stringify(locations)
2415
2441
  });
2416
- logger15.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
2442
+ logger17.debug("QUERY_CACHE: Checking query cache for hash", { queryHash });
2417
2443
  const cachedItemKeys = await cacheMap.getQueryResult(queryHash);
2418
2444
  if (cachedItemKeys && cachedItemKeys.length > 0) {
2419
- logger15.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
2445
+ logger17.debug("QUERY_CACHE: Cache HIT - Found cached query result", {
2420
2446
  queryHash,
2421
2447
  cachedKeyCount: cachedItemKeys.length,
2422
2448
  itemKeys: cachedItemKeys.map((k) => JSON.stringify(k))
2423
2449
  });
2424
2450
  const itemKey = cachedItemKeys[0];
2425
- logger15.debug("QUERY_CACHE: Retrieving first cached item", {
2451
+ logger17.debug("QUERY_CACHE: Retrieving first cached item", {
2426
2452
  queryHash,
2427
2453
  itemKey: JSON.stringify(itemKey)
2428
2454
  });
2429
2455
  const item = await cacheMap.get(itemKey);
2430
2456
  if (item) {
2431
- logger15.debug("QUERY_CACHE: Retrieved cached item successfully", {
2457
+ logger17.debug("QUERY_CACHE: Retrieved cached item successfully", {
2432
2458
  queryHash,
2433
2459
  itemKey: JSON.stringify(itemKey),
2434
2460
  itemKeyStr: JSON.stringify(item.key)
2435
2461
  });
2436
2462
  return item;
2437
2463
  } else {
2438
- logger15.debug("QUERY_CACHE: Cached item MISSING from item cache, invalidating query cache", {
2464
+ logger17.debug("QUERY_CACHE: Cached item MISSING from item cache, invalidating query cache", {
2439
2465
  queryHash,
2440
2466
  itemKey: JSON.stringify(itemKey)
2441
2467
  });
2442
2468
  cacheMap.deleteQueryResult(queryHash);
2443
2469
  }
2444
2470
  } else {
2445
- logger15.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
2471
+ logger17.debug("QUERY_CACHE: Cache MISS - No cached query result found", { queryHash });
2446
2472
  }
2447
- logger15.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
2473
+ logger17.debug("QUERY_CACHE: Fetching from API (cache miss or invalid)", {
2448
2474
  queryHash,
2449
2475
  finder,
2450
2476
  finderParams: JSON.stringify(finderParams),
@@ -2452,14 +2478,14 @@ async function executeFindOneLogic(finder, finderParams, locations, context) {
2452
2478
  });
2453
2479
  const ret = await api.findOne(finder, finderParams, locations);
2454
2480
  if (ret === null) {
2455
- logger15.debug("QUERY_CACHE: API returned null, throwing error", { queryHash, finder });
2481
+ logger17.debug("QUERY_CACHE: API returned null, throwing error", { queryHash, finder });
2456
2482
  throw new Error(`findOne returned null for finder: ${finder}`);
2457
2483
  }
2458
- logger15.debug("QUERY_CACHE: API response received", {
2484
+ logger17.debug("QUERY_CACHE: API response received", {
2459
2485
  queryHash,
2460
2486
  itemKey: JSON.stringify(ret.key)
2461
2487
  });
2462
- logger15.debug("QUERY_CACHE: Storing item in item cache", {
2488
+ logger17.debug("QUERY_CACHE: Storing item in item cache", {
2463
2489
  queryHash,
2464
2490
  itemKey: JSON.stringify(ret.key)
2465
2491
  });
@@ -2470,20 +2496,20 @@ async function executeFindOneLogic(finder, finderParams, locations, context) {
2470
2496
  for (const evictedKey of evictedKeys) {
2471
2497
  const parsedKey = JSON.parse(evictedKey);
2472
2498
  await cacheMap.delete(parsedKey);
2473
- logger15.debug("QUERY_CACHE: Evicted item due to cache limits", {
2499
+ logger17.debug("QUERY_CACHE: Evicted item due to cache limits", {
2474
2500
  evictedKey,
2475
2501
  queryHash
2476
2502
  });
2477
2503
  }
2478
2504
  await cacheMap.setQueryResult(queryHash, [ret.key]);
2479
- logger15.debug("QUERY_CACHE: Stored query result in query cache", {
2505
+ logger17.debug("QUERY_CACHE: Stored query result in query cache", {
2480
2506
  queryHash,
2481
2507
  itemKey: JSON.stringify(ret.key)
2482
2508
  });
2483
2509
  const event = CacheEventFactory.createQueryEvent(finderParams, locations, [ret]);
2484
2510
  eventEmitter.emit(event);
2485
- logger15.debug("QUERY_CACHE: Emitted query event", { queryHash });
2486
- logger15.debug("QUERY_CACHE: findOne() operation completed", {
2511
+ logger17.debug("QUERY_CACHE: Emitted query event", { queryHash });
2512
+ logger17.debug("QUERY_CACHE: findOne() operation completed", {
2487
2513
  queryHash,
2488
2514
  itemKey: JSON.stringify(ret.key)
2489
2515
  });
@@ -2495,7 +2521,7 @@ import {
2495
2521
  isItemKeyEqual,
2496
2522
  isValidItemKey as isValidItemKey6
2497
2523
  } from "@fjell/core";
2498
- var logger16 = logger_default.get("set");
2524
+ var logger18 = logger_default.get("set");
2499
2525
  var normalizeKeyValue2 = (value) => {
2500
2526
  return String(value);
2501
2527
  };
@@ -2547,17 +2573,17 @@ var set = async (key, v, context) => {
2547
2573
  const startTime = Date.now();
2548
2574
  const { cacheMap, pkType, ttlManager, evictionManager, eventEmitter } = context;
2549
2575
  const keyStr = JSON.stringify(key);
2550
- logger16.default("set", { key, v });
2551
- logger16.debug("CACHE_OP: set() started", {
2576
+ logger18.default("set", { key, v });
2577
+ logger18.debug("CACHE_OP: set() started", {
2552
2578
  key: keyStr,
2553
2579
  cacheType: cacheMap.implementationType
2554
2580
  });
2555
2581
  if (!isValidItemKey6(key)) {
2556
- logger16.error("CACHE_OP: Invalid key for set", { key: keyStr });
2582
+ logger18.error("CACHE_OP: Invalid key for set", { key: keyStr });
2557
2583
  throw new Error("Key for Set is not a valid ItemKey");
2558
2584
  }
2559
2585
  if (!isItemKeyEqualNormalized(key, v.key)) {
2560
- logger16.error("CACHE_OP: Key mismatch in set", {
2586
+ logger18.error("CACHE_OP: Key mismatch in set", {
2561
2587
  providedKey: keyStr,
2562
2588
  itemKey: JSON.stringify(v.key)
2563
2589
  });
@@ -2566,7 +2592,7 @@ var set = async (key, v, context) => {
2566
2592
  const checkStartTime = Date.now();
2567
2593
  const previousItem = await cacheMap.get(key);
2568
2594
  const checkDuration = Date.now() - checkStartTime;
2569
- logger16.debug("CACHE_OP: Previous item check", {
2595
+ logger18.debug("CACHE_OP: Previous item check", {
2570
2596
  key: keyStr,
2571
2597
  hadPreviousItem: !!previousItem,
2572
2598
  checkDuration
@@ -2587,12 +2613,12 @@ var set = async (key, v, context) => {
2587
2613
  estimatedSize
2588
2614
  };
2589
2615
  await cacheMap.setMetadata(keyStr, baseMetadata);
2590
- logger16.debug("CACHE_OP: Created base metadata", {
2616
+ logger18.debug("CACHE_OP: Created base metadata", {
2591
2617
  key: keyStr,
2592
2618
  estimatedSize
2593
2619
  });
2594
2620
  } else {
2595
- logger16.debug("CACHE_OP: Metadata already exists", {
2621
+ logger18.debug("CACHE_OP: Metadata already exists", {
2596
2622
  key: keyStr,
2597
2623
  addedAt: new Date(metadata.addedAt).toISOString(),
2598
2624
  accessCount: metadata.accessCount
@@ -2606,7 +2632,7 @@ var set = async (key, v, context) => {
2606
2632
  const evictedKeys = await evictionManager.onItemAdded(keyStr, v, cacheMap);
2607
2633
  const evictionDuration = Date.now() - evictionStartTime;
2608
2634
  if (evictedKeys.length > 0) {
2609
- logger16.debug("CACHE_OP: Eviction triggered by set", {
2635
+ logger18.debug("CACHE_OP: Eviction triggered by set", {
2610
2636
  key: keyStr,
2611
2637
  evictedCount: evictedKeys.length,
2612
2638
  evictedKeys
@@ -2615,12 +2641,12 @@ var set = async (key, v, context) => {
2615
2641
  for (const evictedKey of evictedKeys) {
2616
2642
  const parsedKey = JSON.parse(evictedKey);
2617
2643
  await cacheMap.delete(parsedKey);
2618
- logger16.debug("CACHE_OP: Removed evicted item", { evictedKey });
2644
+ logger18.debug("CACHE_OP: Removed evicted item", { evictedKey });
2619
2645
  }
2620
2646
  const event = CacheEventFactory.itemSet(key, v, previousItem);
2621
2647
  eventEmitter.emit(event);
2622
2648
  const totalDuration = Date.now() - startTime;
2623
- logger16.debug("CACHE_OP: set() completed", {
2649
+ logger18.debug("CACHE_OP: set() completed", {
2624
2650
  key: keyStr,
2625
2651
  checkDuration,
2626
2652
  setDuration,
@@ -2639,7 +2665,7 @@ import {
2639
2665
  isComKey,
2640
2666
  isQueryMatch
2641
2667
  } from "@fjell/core";
2642
- var logger17 = logger_default.get("MemoryCacheMap");
2668
+ var logger19 = logger_default.get("MemoryCacheMap");
2643
2669
  var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2644
2670
  implementationType = "memory/memory";
2645
2671
  map = {};
@@ -2657,13 +2683,13 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2657
2683
  const key = JSON.parse(keyStr);
2658
2684
  this.set(key, value);
2659
2685
  } catch (error) {
2660
- logger17.error("Failed to parse initial data key", { keyStr, error });
2686
+ logger19.error("Failed to parse initial data key", { keyStr, error });
2661
2687
  }
2662
2688
  }
2663
2689
  }
2664
2690
  }
2665
2691
  async get(key) {
2666
- logger17.trace("get", { key });
2692
+ logger19.trace("get", { key });
2667
2693
  const hashedKey = this.normalizedHashFunction(key);
2668
2694
  const entry = this.map[hashedKey];
2669
2695
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey) {
@@ -2678,7 +2704,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2678
2704
  return null;
2679
2705
  }
2680
2706
  async set(key, value) {
2681
- logger17.trace("set", { key, value });
2707
+ logger19.trace("set", { key, value });
2682
2708
  const hashedKey = this.normalizedHashFunction(key);
2683
2709
  const keyStr = JSON.stringify(key);
2684
2710
  this.map[hashedKey] = { originalKey: key, value };
@@ -2705,7 +2731,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2705
2731
  return !!entry && this.normalizedHashFunction(entry.originalKey) === hashedKey;
2706
2732
  }
2707
2733
  async delete(key) {
2708
- logger17.trace("delete", { key });
2734
+ logger19.trace("delete", { key });
2709
2735
  const hashedKey = this.normalizedHashFunction(key);
2710
2736
  const entry = this.map[hashedKey];
2711
2737
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey) {
@@ -2734,10 +2760,10 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2734
2760
  async allIn(locations) {
2735
2761
  const allValues = await this.values();
2736
2762
  if (locations.length === 0) {
2737
- logger17.debug("Returning all items, LocKeys is empty");
2763
+ logger19.debug("Returning all items, LocKeys is empty");
2738
2764
  return allValues;
2739
2765
  } else {
2740
- logger17.debug("allIn", { locations, count: allValues.length });
2766
+ logger19.debug("allIn", { locations, count: allValues.length });
2741
2767
  return allValues.filter((item) => {
2742
2768
  const key = item.key;
2743
2769
  if (key && isComKey(key)) {
@@ -2749,12 +2775,12 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2749
2775
  }
2750
2776
  }
2751
2777
  async contains(query, locations) {
2752
- logger17.debug("contains", { query, locations });
2778
+ logger19.debug("contains", { query, locations });
2753
2779
  const items = await this.allIn(locations);
2754
2780
  return items.some((item) => isQueryMatch(item, query));
2755
2781
  }
2756
2782
  async queryIn(query, locations = []) {
2757
- logger17.debug("queryIn", { query, locations });
2783
+ logger19.debug("queryIn", { query, locations });
2758
2784
  const items = await this.allIn(locations);
2759
2785
  return items.filter((item) => isQueryMatch(item, query));
2760
2786
  }
@@ -2776,32 +2802,45 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2776
2802
  return clone;
2777
2803
  }
2778
2804
  // Query result caching methods implementation
2779
- async setQueryResult(queryHash, itemKeys) {
2780
- logger17.trace("setQueryResult", { queryHash, itemKeys });
2805
+ async setQueryResult(queryHash, itemKeys, metadata) {
2806
+ logger19.trace("setQueryResult", { queryHash, itemKeys, hasMetadata: !!metadata });
2781
2807
  const entry = {
2782
- itemKeys: [...itemKeys]
2808
+ itemKeys: [...itemKeys],
2783
2809
  // Create a copy to avoid external mutations
2810
+ metadata
2784
2811
  };
2785
2812
  this.queryResultCache[queryHash] = entry;
2786
2813
  }
2787
2814
  async getQueryResult(queryHash) {
2788
- logger17.trace("getQueryResult", { queryHash });
2815
+ logger19.trace("getQueryResult", { queryHash });
2789
2816
  const entry = this.queryResultCache[queryHash];
2790
2817
  if (!entry) {
2791
2818
  return null;
2792
2819
  }
2793
2820
  return [...entry.itemKeys];
2794
2821
  }
2822
+ async getQueryResultWithMetadata(queryHash) {
2823
+ logger19.trace("getQueryResultWithMetadata", { queryHash });
2824
+ const entry = this.queryResultCache[queryHash];
2825
+ if (!entry) {
2826
+ return null;
2827
+ }
2828
+ return {
2829
+ itemKeys: [...entry.itemKeys],
2830
+ // Return a copy to avoid external mutations
2831
+ metadata: entry.metadata
2832
+ };
2833
+ }
2795
2834
  async hasQueryResult(queryHash) {
2796
2835
  const entry = this.queryResultCache[queryHash];
2797
2836
  return !!entry;
2798
2837
  }
2799
2838
  async deleteQueryResult(queryHash) {
2800
- logger17.trace("deleteQueryResult", { queryHash });
2839
+ logger19.trace("deleteQueryResult", { queryHash });
2801
2840
  delete this.queryResultCache[queryHash];
2802
2841
  }
2803
2842
  async invalidateItemKeys(keys) {
2804
- logger17.debug("invalidateItemKeys", { keys });
2843
+ logger19.debug("invalidateItemKeys", { keys });
2805
2844
  if (keys.length === 0) {
2806
2845
  return;
2807
2846
  }
@@ -2832,14 +2871,14 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2832
2871
  queriesToRemove.forEach((queryHash) => {
2833
2872
  this.deleteQueryResult(queryHash);
2834
2873
  });
2835
- logger17.debug("Selectively invalidated queries referencing affected keys", {
2874
+ logger19.debug("Selectively invalidated queries referencing affected keys", {
2836
2875
  affectedKeys: keys.length,
2837
2876
  queriesRemoved: queriesToRemove.length,
2838
2877
  totalQueries: Object.keys(this.queryResultCache).length
2839
2878
  });
2840
2879
  }
2841
2880
  async invalidateLocation(locations) {
2842
- logger17.debug("invalidateLocation", { locations });
2881
+ logger19.debug("invalidateLocation", { locations });
2843
2882
  let keysToInvalidate = [];
2844
2883
  if (locations.length === 0) {
2845
2884
  const allKeys = await this.keys();
@@ -2852,7 +2891,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
2852
2891
  await this.invalidateItemKeys(keysToInvalidate);
2853
2892
  }
2854
2893
  async clearQueryResults() {
2855
- logger17.trace("clearQueryResults");
2894
+ logger19.trace("clearQueryResults");
2856
2895
  this.queryResultCache = {};
2857
2896
  }
2858
2897
  // CacheMapMetadataProvider implementation
@@ -2894,7 +2933,7 @@ import {
2894
2933
  isComKey as isComKey2,
2895
2934
  isQueryMatch as isQueryMatch2
2896
2935
  } from "@fjell/core";
2897
- var logger18 = logger_default.get("EnhancedMemoryCacheMap");
2936
+ var logger20 = logger_default.get("EnhancedMemoryCacheMap");
2898
2937
  var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
2899
2938
  implementationType = "memory/enhanced";
2900
2939
  map = {};
@@ -2913,11 +2952,11 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
2913
2952
  this.normalizedHashFunction = createNormalizedHashFunction();
2914
2953
  if (sizeConfig?.maxSizeBytes) {
2915
2954
  this.maxSizeBytes = parseSizeString(sizeConfig.maxSizeBytes);
2916
- logger18.debug("Cache size limit set", { maxSizeBytes: this.maxSizeBytes });
2955
+ logger20.debug("Cache size limit set", { maxSizeBytes: this.maxSizeBytes });
2917
2956
  }
2918
2957
  if (sizeConfig?.maxItems) {
2919
2958
  this.maxItems = sizeConfig.maxItems;
2920
- logger18.debug("Cache item limit set", { maxItems: this.maxItems });
2959
+ logger20.debug("Cache item limit set", { maxItems: this.maxItems });
2921
2960
  }
2922
2961
  if (initialData) {
2923
2962
  for (const [keyStr, value] of Object.entries(initialData)) {
@@ -2925,13 +2964,13 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
2925
2964
  const key = JSON.parse(keyStr);
2926
2965
  this.set(key, value);
2927
2966
  } catch (error) {
2928
- logger18.error("Failed to parse initial data key", { keyStr, error });
2967
+ logger20.error("Failed to parse initial data key", { keyStr, error });
2929
2968
  }
2930
2969
  }
2931
2970
  }
2932
2971
  }
2933
2972
  async get(key) {
2934
- logger18.trace("get", { key });
2973
+ logger20.trace("get", { key });
2935
2974
  const hashedKey = this.normalizedHashFunction(key);
2936
2975
  const entry = this.map[hashedKey];
2937
2976
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey && entry.value !== null) {
@@ -2940,7 +2979,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
2940
2979
  return null;
2941
2980
  }
2942
2981
  async set(key, value) {
2943
- logger18.trace("set", { key, value });
2982
+ logger20.trace("set", { key, value });
2944
2983
  const hashedKey = this.normalizedHashFunction(key);
2945
2984
  const estimatedSize = estimateValueSize(value);
2946
2985
  const existingEntry = this.map[hashedKey];
@@ -2951,7 +2990,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
2951
2990
  const oldValue = existingEntry.value;
2952
2991
  existingEntry.value = value;
2953
2992
  existingEntry.metadata.estimatedSize = estimatedSize;
2954
- logger18.trace("Updated existing cache entry", {
2993
+ logger20.trace("Updated existing cache entry", {
2955
2994
  key: hashedKey,
2956
2995
  sizeDiff,
2957
2996
  currentSize: this.currentSizeBytes,
@@ -2972,7 +3011,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
2972
3011
  };
2973
3012
  this.currentSizeBytes += estimatedSize;
2974
3013
  this.currentItemCount++;
2975
- logger18.trace("Added new cache entry", {
3014
+ logger20.trace("Added new cache entry", {
2976
3015
  key: hashedKey,
2977
3016
  size: estimatedSize,
2978
3017
  currentSize: this.currentSizeBytes,
@@ -2989,14 +3028,14 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
2989
3028
  this.deleteInternal(key, true, "filter");
2990
3029
  }
2991
3030
  deleteInternal(key, invalidateQueries = false, invalidationMode = "remove") {
2992
- logger18.trace("delete", { key });
3031
+ logger20.trace("delete", { key });
2993
3032
  const hashedKey = this.normalizedHashFunction(key);
2994
3033
  const entry = this.map[hashedKey];
2995
3034
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey) {
2996
3035
  this.currentSizeBytes -= entry.metadata.estimatedSize;
2997
3036
  this.currentItemCount--;
2998
3037
  delete this.map[hashedKey];
2999
- logger18.trace("Deleted cache entry", {
3038
+ logger20.trace("Deleted cache entry", {
3000
3039
  key: hashedKey,
3001
3040
  freedSize: entry.metadata.estimatedSize,
3002
3041
  currentSize: this.currentSizeBytes,
@@ -3018,7 +3057,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3018
3057
  return Object.values(this.map).filter((entry) => entry.value !== null).map((entry) => entry.value);
3019
3058
  }
3020
3059
  async clear() {
3021
- logger18.debug("Clearing cache", {
3060
+ logger20.debug("Clearing cache", {
3022
3061
  itemsCleared: this.currentItemCount,
3023
3062
  bytesFreed: this.currentSizeBytes
3024
3063
  });
@@ -3029,10 +3068,10 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3029
3068
  async allIn(locations) {
3030
3069
  const allValues = await this.values();
3031
3070
  if (locations.length === 0) {
3032
- logger18.debug("Returning all items, LocKeys is empty");
3071
+ logger20.debug("Returning all items, LocKeys is empty");
3033
3072
  return allValues;
3034
3073
  } else {
3035
- logger18.debug("allIn", { locations, count: allValues.length });
3074
+ logger20.debug("allIn", { locations, count: allValues.length });
3036
3075
  return allValues.filter((item) => {
3037
3076
  const key = item.key;
3038
3077
  if (key && isComKey2(key)) {
@@ -3043,12 +3082,12 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3043
3082
  }
3044
3083
  }
3045
3084
  async contains(query, locations) {
3046
- logger18.debug("contains", { query, locations });
3085
+ logger20.debug("contains", { query, locations });
3047
3086
  const items = await this.allIn(locations);
3048
3087
  return items.some((item) => isQueryMatch2(item, query));
3049
3088
  }
3050
3089
  async queryIn(query, locations = []) {
3051
- logger18.debug("queryIn", { query, locations });
3090
+ logger20.debug("queryIn", { query, locations });
3052
3091
  const items = await this.allIn(locations);
3053
3092
  return items.filter((item) => isQueryMatch2(item, query));
3054
3093
  }
@@ -3093,26 +3132,39 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3093
3132
  return stats;
3094
3133
  }
3095
3134
  // Query result caching methods
3096
- async setQueryResult(queryHash, itemKeys) {
3097
- logger18.trace("setQueryResult", { queryHash, itemKeys });
3135
+ async setQueryResult(queryHash, itemKeys, metadata) {
3136
+ logger20.trace("setQueryResult", { queryHash, itemKeys, hasMetadata: !!metadata });
3098
3137
  if (queryHash in this.queryResultCache) {
3099
3138
  this.removeQueryResultFromSizeTracking(queryHash);
3100
3139
  }
3101
3140
  const entry = {
3102
- itemKeys: [...itemKeys]
3141
+ itemKeys: [...itemKeys],
3103
3142
  // Create a copy to avoid external mutations
3143
+ metadata
3104
3144
  };
3105
3145
  this.queryResultCache[queryHash] = entry;
3106
3146
  this.addQueryResultToSizeTracking(queryHash, entry);
3107
3147
  }
3108
3148
  async getQueryResult(queryHash) {
3109
- logger18.trace("getQueryResult", { queryHash });
3149
+ logger20.trace("getQueryResult", { queryHash });
3110
3150
  const entry = this.queryResultCache[queryHash];
3111
3151
  if (!entry) {
3112
3152
  return null;
3113
3153
  }
3114
3154
  return [...entry.itemKeys];
3115
3155
  }
3156
+ async getQueryResultWithMetadata(queryHash) {
3157
+ logger20.trace("getQueryResultWithMetadata", { queryHash });
3158
+ const entry = this.queryResultCache[queryHash];
3159
+ if (!entry) {
3160
+ return null;
3161
+ }
3162
+ return {
3163
+ itemKeys: [...entry.itemKeys],
3164
+ // Return a copy to avoid external mutations
3165
+ metadata: entry.metadata
3166
+ };
3167
+ }
3116
3168
  async hasQueryResult(queryHash) {
3117
3169
  const entry = this.queryResultCache[queryHash];
3118
3170
  return !!entry;
@@ -3128,7 +3180,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3128
3180
  this.queryResultsCacheSize = 0;
3129
3181
  }
3130
3182
  async invalidateItemKeys(keys) {
3131
- logger18.debug("invalidateItemKeys", { keys });
3183
+ logger20.debug("invalidateItemKeys", { keys });
3132
3184
  if (keys.length === 0) {
3133
3185
  return;
3134
3186
  }
@@ -3178,7 +3230,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3178
3230
  });
3179
3231
  }
3180
3232
  async invalidateLocation(locations) {
3181
- logger18.debug("invalidateLocation", { locations });
3233
+ logger20.debug("invalidateLocation", { locations });
3182
3234
  let keysToInvalidate = [];
3183
3235
  if (locations.length === 0) {
3184
3236
  const allKeys = await this.keys();
@@ -3198,7 +3250,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3198
3250
  const itemKeysSize = estimateValueSize(entry.itemKeys);
3199
3251
  const totalSize = hashSize + itemKeysSize;
3200
3252
  this.queryResultsCacheSize += totalSize;
3201
- logger18.trace("Added query result to size tracking", {
3253
+ logger20.trace("Added query result to size tracking", {
3202
3254
  queryHash,
3203
3255
  estimatedSize: totalSize,
3204
3256
  totalQueryCacheSize: this.queryResultsCacheSize
@@ -3214,7 +3266,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
3214
3266
  const itemKeysSize = estimateValueSize(entry.itemKeys);
3215
3267
  const totalSize = hashSize + itemKeysSize;
3216
3268
  this.queryResultsCacheSize = Math.max(0, this.queryResultsCacheSize - totalSize);
3217
- logger18.trace("Removed query result from size tracking", {
3269
+ logger20.trace("Removed query result from size tracking", {
3218
3270
  queryHash,
3219
3271
  estimatedSize: totalSize,
3220
3272
  totalQueryCacheSize: this.queryResultsCacheSize
@@ -3299,7 +3351,7 @@ import {
3299
3351
  isComKey as isComKey3,
3300
3352
  isQueryMatch as isQueryMatch3
3301
3353
  } from "@fjell/core";
3302
- var logger19 = logger_default.get("LocalStorageCacheMap");
3354
+ var logger21 = logger_default.get("LocalStorageCacheMap");
3303
3355
  var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3304
3356
  implementationType = "browser/localStorage";
3305
3357
  keyPrefix;
@@ -3330,7 +3382,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3330
3382
  }
3331
3383
  return keys;
3332
3384
  } catch (error) {
3333
- logger19.error("Error getting keys by prefix from localStorage", { prefix, error });
3385
+ logger21.error("Error getting keys by prefix from localStorage", { prefix, error });
3334
3386
  throw error;
3335
3387
  }
3336
3388
  }
@@ -3338,12 +3390,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3338
3390
  try {
3339
3391
  const allEntries = this.collectCacheEntries();
3340
3392
  if (allEntries.length === 0) {
3341
- logger19.debug("No entries to clean up");
3393
+ logger21.debug("No entries to clean up");
3342
3394
  return false;
3343
3395
  }
3344
3396
  return this.removeOldestEntries(allEntries, aggressive);
3345
3397
  } catch (error) {
3346
- logger19.error("Failed to cleanup old localStorage entries", { error });
3398
+ logger21.error("Failed to cleanup old localStorage entries", { error });
3347
3399
  return false;
3348
3400
  }
3349
3401
  }
@@ -3369,7 +3421,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3369
3421
  }
3370
3422
  }
3371
3423
  } catch (error) {
3372
- logger19.debug("Found corrupted entry during cleanup", { key, error });
3424
+ logger21.debug("Found corrupted entry during cleanup", { key, error });
3373
3425
  allEntries.push({ key, timestamp: 0, size: 0 });
3374
3426
  }
3375
3427
  }
@@ -3388,12 +3440,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3388
3440
  removedCount++;
3389
3441
  removedSize += allEntries[i].size;
3390
3442
  } catch (error) {
3391
- logger19.error("Failed to remove entry during cleanup", { key: allEntries[i].key, error });
3443
+ logger21.error("Failed to remove entry during cleanup", { key: allEntries[i].key, error });
3392
3444
  }
3393
3445
  }
3394
3446
  if (removedCount > 0) {
3395
3447
  const cleanupType = aggressive ? "aggressive" : "normal";
3396
- logger19.info(`Cleaned up ${removedCount} old localStorage entries (${removedSize} bytes) using ${cleanupType} cleanup to free space`);
3448
+ logger21.info(`Cleaned up ${removedCount} old localStorage entries (${removedSize} bytes) using ${cleanupType} cleanup to free space`);
3397
3449
  }
3398
3450
  return removedCount > 0;
3399
3451
  }
@@ -3401,7 +3453,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3401
3453
  return this.getAllKeysStartingWith(`${this.keyPrefix}:`);
3402
3454
  }
3403
3455
  async get(key) {
3404
- logger19.trace("get", { key });
3456
+ logger21.trace("get", { key });
3405
3457
  try {
3406
3458
  const storageKey = this.getStorageKey(key);
3407
3459
  let stored = localStorage.getItem(storageKey);
@@ -3416,18 +3468,18 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3416
3468
  return parsed.value;
3417
3469
  }
3418
3470
  } catch (parseError) {
3419
- logger19.debug("Failed to parse stored value", { key, error: parseError });
3471
+ logger21.debug("Failed to parse stored value", { key, error: parseError });
3420
3472
  return null;
3421
3473
  }
3422
3474
  }
3423
3475
  return null;
3424
3476
  } catch (error) {
3425
- logger19.error("Error retrieving from localStorage", { key, error });
3477
+ logger21.error("Error retrieving from localStorage", { key, error });
3426
3478
  return null;
3427
3479
  }
3428
3480
  }
3429
3481
  async set(key, value) {
3430
- logger19.trace("set", { key, value });
3482
+ logger21.trace("set", { key, value });
3431
3483
  for (let attempt = 0; attempt < this.MAX_RETRY_ATTEMPTS; attempt++) {
3432
3484
  try {
3433
3485
  const storageKey = this.getStorageKey(key);
@@ -3438,12 +3490,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3438
3490
  };
3439
3491
  localStorage.setItem(storageKey, JSON.stringify(toStore));
3440
3492
  if (attempt > 0) {
3441
- logger19.info(`Successfully stored item after ${attempt} retries`);
3493
+ logger21.info(`Successfully stored item after ${attempt} retries`);
3442
3494
  }
3443
3495
  return;
3444
3496
  } catch (error) {
3445
3497
  const isLastAttempt = attempt === this.MAX_RETRY_ATTEMPTS - 1;
3446
- logger19.error(`Error storing to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
3498
+ logger21.error(`Error storing to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
3447
3499
  key,
3448
3500
  value,
3449
3501
  error,
@@ -3470,30 +3522,30 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3470
3522
  const parsed = JSON.parse(stored);
3471
3523
  return this.normalizedHashFunction(parsed.originalKey) === this.normalizedHashFunction(key);
3472
3524
  } catch (parseError) {
3473
- logger19.debug("Failed to parse stored value in includesKey", { key, error: parseError });
3525
+ logger21.debug("Failed to parse stored value in includesKey", { key, error: parseError });
3474
3526
  return false;
3475
3527
  }
3476
3528
  }
3477
3529
  return false;
3478
3530
  } catch (error) {
3479
- logger19.error("Error checking key in localStorage", { key, error });
3531
+ logger21.error("Error checking key in localStorage", { key, error });
3480
3532
  return false;
3481
3533
  }
3482
3534
  }
3483
3535
  async delete(key) {
3484
- logger19.trace("delete", { key });
3536
+ logger21.trace("delete", { key });
3485
3537
  try {
3486
3538
  const storageKey = this.getStorageKey(key);
3487
3539
  localStorage.removeItem(storageKey);
3488
3540
  } catch (error) {
3489
- logger19.error("Error deleting from localStorage", { key, error });
3541
+ logger21.error("Error deleting from localStorage", { key, error });
3490
3542
  throw error;
3491
3543
  }
3492
3544
  }
3493
3545
  async allIn(locations) {
3494
3546
  const allKeys = this.keys();
3495
3547
  if (locations.length === 0) {
3496
- logger19.debug("Returning all items, LocKeys is empty");
3548
+ logger21.debug("Returning all items, LocKeys is empty");
3497
3549
  const items = [];
3498
3550
  for (const key of await allKeys) {
3499
3551
  const item = await this.get(key);
@@ -3505,10 +3557,10 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3505
3557
  } else {
3506
3558
  const locKeys = locations;
3507
3559
  const resolvedKeys = await allKeys;
3508
- logger19.debug("allIn", { locKeys, keys: resolvedKeys.length });
3560
+ logger21.debug("allIn", { locKeys, keys: resolvedKeys.length });
3509
3561
  const filteredKeys = resolvedKeys.filter((key) => key && isComKey3(key)).filter((key) => {
3510
3562
  const ComKey16 = key;
3511
- logger19.debug("Comparing Location Keys", {
3563
+ logger21.debug("Comparing Location Keys", {
3512
3564
  locKeys,
3513
3565
  ComKey: ComKey16
3514
3566
  });
@@ -3525,12 +3577,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3525
3577
  }
3526
3578
  }
3527
3579
  async contains(query, locations) {
3528
- logger19.debug("contains", { query, locations });
3580
+ logger21.debug("contains", { query, locations });
3529
3581
  const items = await this.allIn(locations);
3530
3582
  return items.some((item) => isQueryMatch3(item, query));
3531
3583
  }
3532
3584
  async queryIn(query, locations = []) {
3533
- logger19.debug("queryIn", { query, locations });
3585
+ logger21.debug("queryIn", { query, locations });
3534
3586
  const items = await this.allIn(locations);
3535
3587
  return items.filter((item) => isQueryMatch3(item, query));
3536
3588
  }
@@ -3544,7 +3596,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3544
3596
  return JSON.parse(stored);
3545
3597
  }
3546
3598
  } catch (parseError) {
3547
- logger19.debug("Skipping corrupted localStorage entry", { storageKey, error: parseError });
3599
+ logger21.debug("Skipping corrupted localStorage entry", { storageKey, error: parseError });
3548
3600
  }
3549
3601
  return null;
3550
3602
  }
@@ -3559,7 +3611,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3559
3611
  }
3560
3612
  }
3561
3613
  } catch (error) {
3562
- logger19.error("Error getting keys from localStorage", { error });
3614
+ logger21.error("Error getting keys from localStorage", { error });
3563
3615
  }
3564
3616
  return keys;
3565
3617
  }
@@ -3574,37 +3626,38 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3574
3626
  }
3575
3627
  }
3576
3628
  } catch (error) {
3577
- logger19.error("Error getting values from localStorage", { error });
3629
+ logger21.error("Error getting values from localStorage", { error });
3578
3630
  }
3579
3631
  return values;
3580
3632
  }
3581
3633
  async clear() {
3582
- logger19.debug("Clearing localStorage cache");
3634
+ logger21.debug("Clearing localStorage cache");
3583
3635
  try {
3584
3636
  const storageKeys = this.getAllStorageKeys();
3585
3637
  for (const storageKey of storageKeys) {
3586
3638
  localStorage.removeItem(storageKey);
3587
3639
  }
3588
3640
  } catch (error) {
3589
- logger19.error("Error clearing localStorage cache", { error });
3641
+ logger21.error("Error clearing localStorage cache", { error });
3590
3642
  throw error;
3591
3643
  }
3592
3644
  }
3593
3645
  // Query result caching methods implementation
3594
- async setQueryResult(queryHash, itemKeys) {
3595
- logger19.trace("setQueryResult", { queryHash, itemKeys });
3646
+ async setQueryResult(queryHash, itemKeys, metadata) {
3647
+ logger21.trace("setQueryResult", { queryHash, itemKeys, hasMetadata: !!metadata });
3596
3648
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
3597
3649
  const entry = {
3598
- itemKeys
3650
+ itemKeys,
3651
+ metadata
3599
3652
  };
3600
3653
  try {
3601
3654
  localStorage.setItem(queryKey, JSON.stringify(entry));
3602
3655
  } catch (error) {
3603
- logger19.error("Failed to store query result in localStorage", { queryHash, error });
3656
+ logger21.error("Failed to store query result in localStorage", { queryHash, error });
3604
3657
  }
3605
3658
  }
3606
3659
  async getQueryResult(queryHash) {
3607
- logger19.trace("getQueryResult", { queryHash });
3660
+ logger21.trace("getQueryResult", { queryHash });
3608
3661
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
3609
3662
  try {
3610
3663
  const data = localStorage.getItem(queryKey);
@@ -3617,7 +3670,36 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3617
3670
  }
3618
3671
  return entry.itemKeys || null;
3619
3672
  } catch (error) {
3620
- logger19.error("Failed to retrieve query result from localStorage", { queryHash, error });
3673
+ logger21.error("Failed to retrieve query result from localStorage", { queryHash, error });
3674
+ return null;
3675
+ }
3676
+ }
3677
+ async getQueryResultWithMetadata(queryHash) {
3678
+ logger21.trace("getQueryResultWithMetadata", { queryHash });
3679
+ const queryKey = `${this.keyPrefix}:query:${queryHash}`;
3680
+ try {
3681
+ const data = localStorage.getItem(queryKey);
3682
+ if (!data) {
3683
+ return null;
3684
+ }
3685
+ const entry = JSON.parse(data);
3686
+ if (Array.isArray(entry)) {
3687
+ return { itemKeys: entry, metadata: void 0 };
3688
+ }
3689
+ if (entry.metadata) {
3690
+ if (entry.metadata.createdAt) {
3691
+ entry.metadata.createdAt = new Date(entry.metadata.createdAt);
3692
+ }
3693
+ if (entry.metadata.expiresAt) {
3694
+ entry.metadata.expiresAt = new Date(entry.metadata.expiresAt);
3695
+ }
3696
+ }
3697
+ return {
3698
+ itemKeys: entry.itemKeys || [],
3699
+ metadata: entry.metadata
3700
+ };
3701
+ } catch (error) {
3702
+ logger21.error("Failed to retrieve query result with metadata from localStorage", { queryHash, error });
3621
3703
  return null;
3622
3704
  }
3623
3705
  }
@@ -3626,21 +3708,21 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3626
3708
  try {
3627
3709
  return localStorage.getItem(queryKey) !== null;
3628
3710
  } catch (error) {
3629
- logger19.error("Failed to check query result in localStorage", { queryHash, error });
3711
+ logger21.error("Failed to check query result in localStorage", { queryHash, error });
3630
3712
  return false;
3631
3713
  }
3632
3714
  }
3633
3715
  async deleteQueryResult(queryHash) {
3634
- logger19.trace("deleteQueryResult", { queryHash });
3716
+ logger21.trace("deleteQueryResult", { queryHash });
3635
3717
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
3636
3718
  try {
3637
3719
  localStorage.removeItem(queryKey);
3638
3720
  } catch (error) {
3639
- logger19.error("Failed to delete query result from localStorage", { queryHash, error });
3721
+ logger21.error("Failed to delete query result from localStorage", { queryHash, error });
3640
3722
  }
3641
3723
  }
3642
3724
  async invalidateItemKeys(keys) {
3643
- logger19.debug("invalidateItemKeys", { keys });
3725
+ logger21.debug("invalidateItemKeys", { keys });
3644
3726
  if (keys.length === 0) {
3645
3727
  return;
3646
3728
  }
@@ -3678,24 +3760,24 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3678
3760
  }
3679
3761
  }
3680
3762
  } catch (error) {
3681
- logger19.debug("Failed to parse query result", { queryKey, error });
3763
+ logger21.debug("Failed to parse query result", { queryKey, error });
3682
3764
  }
3683
3765
  }
3684
3766
  queriesToRemove.forEach((queryKey) => {
3685
3767
  localStorage.removeItem(queryKey);
3686
3768
  });
3687
- logger19.debug("Selectively invalidated queries referencing affected keys", {
3769
+ logger21.debug("Selectively invalidated queries referencing affected keys", {
3688
3770
  affectedKeys: keys.length,
3689
3771
  queriesRemoved: queriesToRemove.length,
3690
3772
  totalQueries: queryKeys.length
3691
3773
  });
3692
3774
  } catch (error) {
3693
- logger19.error("Error during selective query invalidation, falling back to clearing all queries", { error });
3775
+ logger21.error("Error during selective query invalidation, falling back to clearing all queries", { error });
3694
3776
  await this.clearQueryResults();
3695
3777
  }
3696
3778
  }
3697
3779
  async invalidateLocation(locations) {
3698
- logger19.debug("invalidateLocation", { locations });
3780
+ logger21.debug("invalidateLocation", { locations });
3699
3781
  let keysToInvalidate = [];
3700
3782
  if (locations.length === 0) {
3701
3783
  const allKeys = await this.keys();
@@ -3710,7 +3792,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3710
3792
  }
3711
3793
  }
3712
3794
  async clearQueryResults() {
3713
- logger19.trace("clearQueryResults");
3795
+ logger21.trace("clearQueryResults");
3714
3796
  const queryPrefix = `${this.keyPrefix}:query:`;
3715
3797
  try {
3716
3798
  const keysToRemove = this.getAllKeysStartingWith(queryPrefix);
@@ -3718,11 +3800,11 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3718
3800
  try {
3719
3801
  localStorage.removeItem(key);
3720
3802
  } catch (error) {
3721
- logger19.error("Failed to remove query result from localStorage", { key, error });
3803
+ logger21.error("Failed to remove query result from localStorage", { key, error });
3722
3804
  }
3723
3805
  }
3724
3806
  } catch (error) {
3725
- logger19.error("Failed to clear query results from localStorage", { error });
3807
+ logger21.error("Failed to clear query results from localStorage", { error });
3726
3808
  }
3727
3809
  }
3728
3810
  // CacheMapMetadataProvider implementation
@@ -3734,13 +3816,13 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3734
3816
  try {
3735
3817
  return JSON.parse(stored);
3736
3818
  } catch (e) {
3737
- logger19.debug("Invalid metadata JSON, treating as null", { key, error: e });
3819
+ logger21.debug("Invalid metadata JSON, treating as null", { key, error: e });
3738
3820
  return null;
3739
3821
  }
3740
3822
  }
3741
3823
  return null;
3742
3824
  } catch (error) {
3743
- logger19.error("Error getting metadata from localStorage", { key, error });
3825
+ logger21.error("Error getting metadata from localStorage", { key, error });
3744
3826
  throw error;
3745
3827
  }
3746
3828
  }
@@ -3750,12 +3832,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3750
3832
  const metadataKey = `${this.keyPrefix}:metadata:${key}`;
3751
3833
  localStorage.setItem(metadataKey, JSON.stringify(metadata));
3752
3834
  if (attempt > 0) {
3753
- logger19.info(`Successfully stored metadata after ${attempt} retries`);
3835
+ logger21.info(`Successfully stored metadata after ${attempt} retries`);
3754
3836
  }
3755
3837
  return;
3756
3838
  } catch (error) {
3757
3839
  const isLastAttempt = attempt === this.MAX_RETRY_ATTEMPTS - 1;
3758
- logger19.error(`Error storing metadata to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
3840
+ logger21.error(`Error storing metadata to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
3759
3841
  key,
3760
3842
  error,
3761
3843
  isLastAttempt
@@ -3777,7 +3859,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3777
3859
  const metadataKey = `${this.keyPrefix}:metadata:${key}`;
3778
3860
  localStorage.removeItem(metadataKey);
3779
3861
  } catch (error) {
3780
- logger19.error("Error deleting metadata from localStorage", { key, error });
3862
+ logger21.error("Error deleting metadata from localStorage", { key, error });
3781
3863
  throw error;
3782
3864
  }
3783
3865
  }
@@ -3796,11 +3878,11 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3796
3878
  metadata.set(metadataKey, parsed);
3797
3879
  }
3798
3880
  } catch (error) {
3799
- logger19.debug("Skipping invalid metadata entry", { key, error });
3881
+ logger21.debug("Skipping invalid metadata entry", { key, error });
3800
3882
  }
3801
3883
  }
3802
3884
  } catch (error) {
3803
- logger19.error("Error getting metadata from localStorage", { error });
3885
+ logger21.error("Error getting metadata from localStorage", { error });
3804
3886
  throw error;
3805
3887
  }
3806
3888
  return metadata;
@@ -3811,7 +3893,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3811
3893
  const keysToDelete = this.getAllKeysStartingWith(metadataPrefix);
3812
3894
  keysToDelete.forEach((key) => localStorage.removeItem(key));
3813
3895
  } catch (error) {
3814
- logger19.error("Error clearing metadata from localStorage", { error });
3896
+ logger21.error("Error clearing metadata from localStorage", { error });
3815
3897
  throw error;
3816
3898
  }
3817
3899
  }
@@ -3838,16 +3920,16 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
3838
3920
  itemCount++;
3839
3921
  }
3840
3922
  } catch (error) {
3841
- logger19.debug("Invalid entry in getCurrentSize", { key, error });
3923
+ logger21.debug("Invalid entry in getCurrentSize", { key, error });
3842
3924
  }
3843
3925
  }
3844
3926
  } catch (error) {
3845
- logger19.debug("Size calculation failed, using string length", { key, error });
3927
+ logger21.debug("Size calculation failed, using string length", { key, error });
3846
3928
  sizeBytes += value.length;
3847
3929
  }
3848
3930
  }
3849
3931
  } catch (error) {
3850
- logger19.error("Error calculating size from localStorage", { error });
3932
+ logger21.error("Error calculating size from localStorage", { error });
3851
3933
  throw error;
3852
3934
  }
3853
3935
  return { itemCount, sizeBytes };
@@ -3868,7 +3950,7 @@ import {
3868
3950
  isQueryMatch as isQueryMatch4
3869
3951
  } from "@fjell/core";
3870
3952
  import safeStringify2 from "fast-safe-stringify";
3871
- var logger20 = logger_default.get("SessionStorageCacheMap");
3953
+ var logger22 = logger_default.get("SessionStorageCacheMap");
3872
3954
  var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
3873
3955
  implementationType = "browser/sessionStorage";
3874
3956
  keyPrefix;
@@ -3896,7 +3978,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
3896
3978
  }
3897
3979
  }
3898
3980
  } catch (error) {
3899
- logger20.error("Error getting keys from sessionStorage", { error });
3981
+ logger22.error("Error getting keys from sessionStorage", { error });
3900
3982
  }
3901
3983
  return keys;
3902
3984
  }
@@ -3919,7 +4001,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
3919
4001
  }
3920
4002
  }
3921
4003
  async get(key) {
3922
- logger20.trace("get", { key });
4004
+ logger22.trace("get", { key });
3923
4005
  try {
3924
4006
  const currentHash = this.normalizedHashFunction(key);
3925
4007
  if (this.hasCollisionForHash(currentHash)) {
@@ -3939,14 +4021,14 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
3939
4021
  }
3940
4022
  return null;
3941
4023
  } catch (error) {
3942
- logger20.error("Error retrieving from sessionStorage", { key, error });
4024
+ logger22.error("Error retrieving from sessionStorage", { key, error });
3943
4025
  return null;
3944
4026
  }
3945
4027
  }
3946
4028
  async set(key, value) {
3947
4029
  try {
3948
4030
  const storageKey = this.getStorageKey(key);
3949
- logger20.trace("set", { storageKey });
4031
+ logger22.trace("set", { storageKey });
3950
4032
  const toStore = {
3951
4033
  originalKey: key,
3952
4034
  value,
@@ -3956,7 +4038,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
3956
4038
  const jsonString = safeStringify2(toStore);
3957
4039
  sessionStorage.setItem(storageKey, jsonString);
3958
4040
  } catch (error) {
3959
- logger20.error("Error storing to sessionStorage", { errorMessage: error?.message });
4041
+ logger22.error("Error storing to sessionStorage", { errorMessage: error?.message });
3960
4042
  throw new Error(`Failed to store item in sessionStorage: ${error}`);
3961
4043
  }
3962
4044
  }
@@ -3977,23 +4059,23 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
3977
4059
  }
3978
4060
  return false;
3979
4061
  } catch (error) {
3980
- logger20.error("Error checking key in sessionStorage", { key, error });
4062
+ logger22.error("Error checking key in sessionStorage", { key, error });
3981
4063
  return false;
3982
4064
  }
3983
4065
  }
3984
4066
  async delete(key) {
3985
- logger20.trace("delete", { key });
4067
+ logger22.trace("delete", { key });
3986
4068
  try {
3987
4069
  const storageKey = this.getStorageKey(key);
3988
4070
  sessionStorage.removeItem(storageKey);
3989
4071
  } catch (error) {
3990
- logger20.error("Error deleting from sessionStorage", { key, error });
4072
+ logger22.error("Error deleting from sessionStorage", { key, error });
3991
4073
  }
3992
4074
  }
3993
4075
  async allIn(locations) {
3994
4076
  const allKeys = this.keys();
3995
4077
  if (locations.length === 0) {
3996
- logger20.debug("Returning all items, LocKeys is empty");
4078
+ logger22.debug("Returning all items, LocKeys is empty");
3997
4079
  const items = [];
3998
4080
  for (const key of await allKeys) {
3999
4081
  const item = await this.get(key);
@@ -4005,10 +4087,10 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4005
4087
  } else {
4006
4088
  const locKeys = locations;
4007
4089
  const resolvedKeys = await allKeys;
4008
- logger20.debug("allIn", { locKeys, keys: resolvedKeys.length });
4090
+ logger22.debug("allIn", { locKeys, keys: resolvedKeys.length });
4009
4091
  const filteredKeys = resolvedKeys.filter((key) => key && isComKey4(key)).filter((key) => {
4010
4092
  const ComKey16 = key;
4011
- logger20.debug("Comparing Location Keys", {
4093
+ logger22.debug("Comparing Location Keys", {
4012
4094
  locKeys,
4013
4095
  ComKey: ComKey16
4014
4096
  });
@@ -4025,12 +4107,12 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4025
4107
  }
4026
4108
  }
4027
4109
  async contains(query, locations) {
4028
- logger20.debug("contains", { query, locations });
4110
+ logger22.debug("contains", { query, locations });
4029
4111
  const items = await this.allIn(locations);
4030
4112
  return items.some((item) => isQueryMatch4(item, query));
4031
4113
  }
4032
4114
  async queryIn(query, locations = []) {
4033
- logger20.debug("queryIn", { query, locations });
4115
+ logger22.debug("queryIn", { query, locations });
4034
4116
  const items = await this.allIn(locations);
4035
4117
  return items.filter((item) => isQueryMatch4(item, query));
4036
4118
  }
@@ -4050,11 +4132,11 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4050
4132
  keys.push(parsed.originalKey);
4051
4133
  }
4052
4134
  } catch (itemError) {
4053
- logger20.trace("Skipping invalid storage item", { storageKey, error: itemError });
4135
+ logger22.trace("Skipping invalid storage item", { storageKey, error: itemError });
4054
4136
  }
4055
4137
  }
4056
4138
  } catch (error) {
4057
- logger20.error("Error getting keys from sessionStorage", { error });
4139
+ logger22.error("Error getting keys from sessionStorage", { error });
4058
4140
  }
4059
4141
  return keys;
4060
4142
  }
@@ -4071,41 +4153,42 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4071
4153
  values.push(parsed.value);
4072
4154
  }
4073
4155
  } catch (itemError) {
4074
- logger20.trace("Skipping invalid storage item for values", { storageKey, error: itemError });
4156
+ logger22.trace("Skipping invalid storage item for values", { storageKey, error: itemError });
4075
4157
  }
4076
4158
  }
4077
4159
  } catch (error) {
4078
- logger20.error("Error getting values from sessionStorage", { error });
4160
+ logger22.error("Error getting values from sessionStorage", { error });
4079
4161
  }
4080
4162
  return values;
4081
4163
  }
4082
4164
  async clear() {
4083
- logger20.debug("Clearing sessionStorage cache");
4165
+ logger22.debug("Clearing sessionStorage cache");
4084
4166
  try {
4085
4167
  const storageKeys = this.getAllStorageKeys();
4086
4168
  for (const storageKey of storageKeys) {
4087
4169
  sessionStorage.removeItem(storageKey);
4088
4170
  }
4089
4171
  } catch (error) {
4090
- logger20.error("Error clearing sessionStorage cache", { error });
4172
+ logger22.error("Error clearing sessionStorage cache", { error });
4091
4173
  }
4092
4174
  }
4093
4175
  // Query result caching methods implementation
4094
- async setQueryResult(queryHash, itemKeys) {
4095
- logger20.trace("setQueryResult", { queryHash, itemKeys });
4176
+ async setQueryResult(queryHash, itemKeys, metadata) {
4177
+ logger22.trace("setQueryResult", { queryHash, itemKeys, hasMetadata: !!metadata });
4096
4178
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
4097
4179
  const entry = {
4098
- itemKeys
4180
+ itemKeys,
4181
+ metadata
4099
4182
  };
4100
4183
  try {
4101
4184
  const jsonString = safeStringify2(entry);
4102
4185
  sessionStorage.setItem(queryKey, jsonString);
4103
4186
  } catch (error) {
4104
- logger20.error("Failed to store query result in sessionStorage", { queryHash, error });
4187
+ logger22.error("Failed to store query result in sessionStorage", { queryHash, error });
4105
4188
  }
4106
4189
  }
4107
4190
  async getQueryResult(queryHash) {
4108
- logger20.trace("getQueryResult", { queryHash });
4191
+ logger22.trace("getQueryResult", { queryHash });
4109
4192
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
4110
4193
  try {
4111
4194
  const data = sessionStorage.getItem(queryKey);
@@ -4118,7 +4201,36 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4118
4201
  }
4119
4202
  return entry.itemKeys || null;
4120
4203
  } catch (error) {
4121
- logger20.error("Failed to retrieve query result from sessionStorage", { queryHash, error });
4204
+ logger22.error("Failed to retrieve query result from sessionStorage", { queryHash, error });
4205
+ return null;
4206
+ }
4207
+ }
4208
+ async getQueryResultWithMetadata(queryHash) {
4209
+ logger22.trace("getQueryResultWithMetadata", { queryHash });
4210
+ const queryKey = `${this.keyPrefix}:query:${queryHash}`;
4211
+ try {
4212
+ const data = sessionStorage.getItem(queryKey);
4213
+ if (!data) {
4214
+ return null;
4215
+ }
4216
+ const entry = JSON.parse(data);
4217
+ if (Array.isArray(entry)) {
4218
+ return { itemKeys: entry, metadata: void 0 };
4219
+ }
4220
+ if (entry.metadata) {
4221
+ if (entry.metadata.createdAt) {
4222
+ entry.metadata.createdAt = new Date(entry.metadata.createdAt);
4223
+ }
4224
+ if (entry.metadata.expiresAt) {
4225
+ entry.metadata.expiresAt = new Date(entry.metadata.expiresAt);
4226
+ }
4227
+ }
4228
+ return {
4229
+ itemKeys: entry.itemKeys || [],
4230
+ metadata: entry.metadata
4231
+ };
4232
+ } catch (error) {
4233
+ logger22.error("Failed to retrieve query result with metadata from sessionStorage", { queryHash, error });
4122
4234
  return null;
4123
4235
  }
4124
4236
  }
@@ -4127,21 +4239,21 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4127
4239
  try {
4128
4240
  return sessionStorage.getItem(queryKey) !== null;
4129
4241
  } catch (error) {
4130
- logger20.error("Failed to check query result in sessionStorage", { queryHash, error });
4242
+ logger22.error("Failed to check query result in sessionStorage", { queryHash, error });
4131
4243
  return false;
4132
4244
  }
4133
4245
  }
4134
4246
  async deleteQueryResult(queryHash) {
4135
- logger20.trace("deleteQueryResult", { queryHash });
4247
+ logger22.trace("deleteQueryResult", { queryHash });
4136
4248
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
4137
4249
  try {
4138
4250
  sessionStorage.removeItem(queryKey);
4139
4251
  } catch (error) {
4140
- logger20.error("Failed to delete query result from sessionStorage", { queryHash, error });
4252
+ logger22.error("Failed to delete query result from sessionStorage", { queryHash, error });
4141
4253
  }
4142
4254
  }
4143
4255
  async clearQueryResults() {
4144
- logger20.trace("clearQueryResults");
4256
+ logger22.trace("clearQueryResults");
4145
4257
  const queryPrefix = `${this.keyPrefix}:query:`;
4146
4258
  try {
4147
4259
  const keysToRemove = [];
@@ -4153,7 +4265,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4153
4265
  }
4154
4266
  keysToRemove.forEach((key) => sessionStorage.removeItem(key));
4155
4267
  } catch (error) {
4156
- logger20.error("Failed to clear query results from sessionStorage", { error });
4268
+ logger22.error("Failed to clear query results from sessionStorage", { error });
4157
4269
  }
4158
4270
  }
4159
4271
  // CacheMapMetadataProvider implementation
@@ -4200,7 +4312,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4200
4312
  }
4201
4313
  return metadata;
4202
4314
  } catch (error) {
4203
- logger20.error("Error getting all metadata from sessionStorage", { error });
4315
+ logger22.error("Error getting all metadata from sessionStorage", { error });
4204
4316
  return metadata;
4205
4317
  }
4206
4318
  }
@@ -4220,7 +4332,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4220
4332
  }
4221
4333
  // Invalidation methods
4222
4334
  async invalidateItemKeys(keys) {
4223
- logger20.debug("invalidateItemKeys", { keys });
4335
+ logger22.debug("invalidateItemKeys", { keys });
4224
4336
  if (keys.length === 0) {
4225
4337
  return;
4226
4338
  }
@@ -4264,24 +4376,24 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
4264
4376
  }
4265
4377
  }
4266
4378
  } catch (error) {
4267
- logger20.debug("Failed to parse query result", { queryKey, error });
4379
+ logger22.debug("Failed to parse query result", { queryKey, error });
4268
4380
  }
4269
4381
  }
4270
4382
  queriesToRemove.forEach((queryKey) => {
4271
4383
  sessionStorage.removeItem(queryKey);
4272
4384
  });
4273
- logger20.debug("Selectively invalidated queries referencing affected keys", {
4385
+ logger22.debug("Selectively invalidated queries referencing affected keys", {
4274
4386
  affectedKeys: keys.length,
4275
4387
  queriesRemoved: queriesToRemove.length,
4276
4388
  totalQueries: queryKeys.length
4277
4389
  });
4278
4390
  } catch (error) {
4279
- logger20.error("Error during selective query invalidation, falling back to clearing all queries", { error });
4391
+ logger22.error("Error during selective query invalidation, falling back to clearing all queries", { error });
4280
4392
  await this.clearQueryResults();
4281
4393
  }
4282
4394
  }
4283
4395
  async invalidateLocation(locations) {
4284
- logger20.debug("invalidateLocation", { locations });
4396
+ logger22.debug("invalidateLocation", { locations });
4285
4397
  let keysToInvalidate = [];
4286
4398
  if (locations.length === 0) {
4287
4399
  const allKeys = await this.keys();
@@ -4347,7 +4459,7 @@ import {
4347
4459
  isQueryMatch as isQueryMatch5
4348
4460
  } from "@fjell/core";
4349
4461
  import safeStringify3 from "fast-safe-stringify";
4350
- var logger21 = logger_default.get("AsyncIndexDBCacheMap");
4462
+ var logger23 = logger_default.get("AsyncIndexDBCacheMap");
4351
4463
  var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4352
4464
  types;
4353
4465
  dbName;
@@ -4373,19 +4485,19 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4373
4485
  }
4374
4486
  const request = indexedDB.open(this.dbName, this.version);
4375
4487
  request.onerror = () => {
4376
- logger21.error("Error opening IndexedDB", { error: request.error });
4488
+ logger23.error("Error opening IndexedDB", { error: request.error });
4377
4489
  reject(request.error);
4378
4490
  };
4379
4491
  request.onsuccess = () => {
4380
- logger21.debug("IndexedDB opened successfully");
4492
+ logger23.debug("IndexedDB opened successfully");
4381
4493
  resolve(request.result);
4382
4494
  };
4383
4495
  request.onupgradeneeded = (event) => {
4384
- logger21.debug("IndexedDB upgrade needed");
4496
+ logger23.debug("IndexedDB upgrade needed");
4385
4497
  const db = event.target.result;
4386
4498
  if (!db.objectStoreNames.contains(this.storeName)) {
4387
4499
  db.createObjectStore(this.storeName);
4388
- logger21.debug("Created object store", { storeName: this.storeName });
4500
+ logger23.debug("Created object store", { storeName: this.storeName });
4389
4501
  }
4390
4502
  };
4391
4503
  });
@@ -4396,7 +4508,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4396
4508
  return this.normalizedHashFunction(key);
4397
4509
  }
4398
4510
  async get(key) {
4399
- logger21.trace("get", { key });
4511
+ logger23.trace("get", { key });
4400
4512
  try {
4401
4513
  const db = await this.getDB();
4402
4514
  const transaction = db.transaction([this.storeName], "readonly");
@@ -4405,7 +4517,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4405
4517
  return new Promise((resolve, reject) => {
4406
4518
  const request = store.get(storageKey);
4407
4519
  request.onerror = () => {
4408
- logger21.error("Error getting from IndexedDB", { key, error: request.error });
4520
+ logger23.error("Error getting from IndexedDB", { key, error: request.error });
4409
4521
  reject(request.error);
4410
4522
  };
4411
4523
  request.onsuccess = () => {
@@ -4418,7 +4530,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4418
4530
  };
4419
4531
  });
4420
4532
  } catch (error) {
4421
- logger21.error("Error in IndexedDB get operation", { key, error });
4533
+ logger23.error("Error in IndexedDB get operation", { key, error });
4422
4534
  return null;
4423
4535
  }
4424
4536
  }
@@ -4426,7 +4538,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4426
4538
  * Get both the value and metadata for an item
4427
4539
  */
4428
4540
  async getWithMetadata(key) {
4429
- logger21.trace("getWithMetadata", { key });
4541
+ logger23.trace("getWithMetadata", { key });
4430
4542
  try {
4431
4543
  const db = await this.getDB();
4432
4544
  const transaction = db.transaction([this.storeName], "readonly");
@@ -4435,7 +4547,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4435
4547
  return new Promise((resolve, reject) => {
4436
4548
  const request = store.get(storageKey);
4437
4549
  request.onerror = () => {
4438
- logger21.error("Error getting from IndexedDB", { key, error: request.error });
4550
+ logger23.error("Error getting from IndexedDB", { key, error: request.error });
4439
4551
  reject(request.error);
4440
4552
  };
4441
4553
  request.onsuccess = () => {
@@ -4451,12 +4563,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4451
4563
  };
4452
4564
  });
4453
4565
  } catch (error) {
4454
- logger21.error("Error in IndexedDB getWithMetadata operation", { key, error });
4566
+ logger23.error("Error in IndexedDB getWithMetadata operation", { key, error });
4455
4567
  return null;
4456
4568
  }
4457
4569
  }
4458
4570
  async set(key, value, metadata) {
4459
- logger21.trace("set", { key, value, hasMetadata: !!metadata });
4571
+ logger23.trace("set", { key, value, hasMetadata: !!metadata });
4460
4572
  try {
4461
4573
  const db = await this.getDB();
4462
4574
  const transaction = db.transaction([this.storeName], "readwrite");
@@ -4471,7 +4583,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4471
4583
  return new Promise((resolve, reject) => {
4472
4584
  const request = store.put(storedItem, storageKey);
4473
4585
  request.onerror = () => {
4474
- logger21.error("Error setting in IndexedDB", { key, value, error: request.error });
4586
+ logger23.error("Error setting in IndexedDB", { key, value, error: request.error });
4475
4587
  reject(request.error);
4476
4588
  };
4477
4589
  request.onsuccess = () => {
@@ -4479,7 +4591,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4479
4591
  };
4480
4592
  });
4481
4593
  } catch (error) {
4482
- logger21.error("Error in IndexedDB set operation", { key, value, error });
4594
+ logger23.error("Error in IndexedDB set operation", { key, value, error });
4483
4595
  throw new Error(`Failed to store item in IndexedDB: ${error}`);
4484
4596
  }
4485
4597
  }
@@ -4487,16 +4599,16 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4487
4599
  * Update only the metadata for an existing item
4488
4600
  */
4489
4601
  async setMetadata(key, metadata) {
4490
- logger21.trace("setMetadata", { key, metadata });
4602
+ logger23.trace("setMetadata", { key, metadata });
4491
4603
  try {
4492
4604
  const existing = await this.getWithMetadata(key);
4493
4605
  if (existing) {
4494
4606
  await this.set(key, existing.value, metadata);
4495
4607
  } else {
4496
- logger21.warning("Attempted to set metadata for non-existent item", { key });
4608
+ logger23.warning("Attempted to set metadata for non-existent item", { key });
4497
4609
  }
4498
4610
  } catch (error) {
4499
- logger21.error("Error in IndexedDB setMetadata operation", { key, error });
4611
+ logger23.error("Error in IndexedDB setMetadata operation", { key, error });
4500
4612
  throw new Error(`Failed to update metadata in IndexedDB: ${error}`);
4501
4613
  }
4502
4614
  }
@@ -4509,7 +4621,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4509
4621
  return new Promise((resolve, reject) => {
4510
4622
  const request = store.get(storageKey);
4511
4623
  request.onerror = () => {
4512
- logger21.error("Error checking key in IndexedDB", { key, error: request.error });
4624
+ logger23.error("Error checking key in IndexedDB", { key, error: request.error });
4513
4625
  reject(request.error);
4514
4626
  };
4515
4627
  request.onsuccess = () => {
@@ -4523,12 +4635,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4523
4635
  };
4524
4636
  });
4525
4637
  } catch (error) {
4526
- logger21.error("Error in IndexedDB includesKey operation", { key, error });
4638
+ logger23.error("Error in IndexedDB includesKey operation", { key, error });
4527
4639
  return false;
4528
4640
  }
4529
4641
  }
4530
4642
  async delete(key) {
4531
- logger21.trace("delete", { key });
4643
+ logger23.trace("delete", { key });
4532
4644
  try {
4533
4645
  const db = await this.getDB();
4534
4646
  const transaction = db.transaction([this.storeName], "readwrite");
@@ -4537,7 +4649,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4537
4649
  return new Promise((resolve, reject) => {
4538
4650
  const request = store.delete(storageKey);
4539
4651
  request.onerror = () => {
4540
- logger21.error("Error deleting from IndexedDB", { key, error: request.error });
4652
+ logger23.error("Error deleting from IndexedDB", { key, error: request.error });
4541
4653
  reject(request.error);
4542
4654
  };
4543
4655
  request.onsuccess = () => {
@@ -4545,22 +4657,22 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4545
4657
  };
4546
4658
  });
4547
4659
  } catch (error) {
4548
- logger21.error("Error in IndexedDB delete operation", { key, error });
4660
+ logger23.error("Error in IndexedDB delete operation", { key, error });
4549
4661
  }
4550
4662
  }
4551
4663
  async allIn(locations) {
4552
4664
  const allKeys = await this.keys();
4553
4665
  if (locations.length === 0) {
4554
- logger21.debug("Returning all items, LocKeys is empty");
4666
+ logger23.debug("Returning all items, LocKeys is empty");
4555
4667
  const promises = allKeys.map((key) => this.get(key));
4556
4668
  const results = await Promise.all(promises);
4557
4669
  return results.filter((item) => item !== null);
4558
4670
  } else {
4559
4671
  const locKeys = locations;
4560
- logger21.debug("allIn", { locKeys, keys: allKeys.length });
4672
+ logger23.debug("allIn", { locKeys, keys: allKeys.length });
4561
4673
  const filteredKeys = allKeys.filter((key) => key && isComKey5(key)).filter((key) => {
4562
4674
  const ComKey16 = key;
4563
- logger21.debug("Comparing Location Keys", {
4675
+ logger23.debug("Comparing Location Keys", {
4564
4676
  locKeys,
4565
4677
  ComKey: ComKey16
4566
4678
  });
@@ -4572,12 +4684,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4572
4684
  }
4573
4685
  }
4574
4686
  async contains(query, locations) {
4575
- logger21.debug("contains", { query, locations });
4687
+ logger23.debug("contains", { query, locations });
4576
4688
  const items = await this.allIn(locations);
4577
4689
  return items.some((item) => isQueryMatch5(item, query));
4578
4690
  }
4579
4691
  async queryIn(query, locations = []) {
4580
- logger21.debug("queryIn", { query, locations });
4692
+ logger23.debug("queryIn", { query, locations });
4581
4693
  const items = await this.allIn(locations);
4582
4694
  return items.filter((item) => isQueryMatch5(item, query));
4583
4695
  }
@@ -4593,7 +4705,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4593
4705
  return new Promise((resolve, reject) => {
4594
4706
  const request = store.openCursor();
4595
4707
  request.onerror = () => {
4596
- logger21.error("Error getting keys from IndexedDB", { error: request.error });
4708
+ logger23.error("Error getting keys from IndexedDB", { error: request.error });
4597
4709
  reject(request.error);
4598
4710
  };
4599
4711
  request.onsuccess = (event) => {
@@ -4608,7 +4720,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4608
4720
  };
4609
4721
  });
4610
4722
  } catch (error) {
4611
- logger21.error("Error in IndexedDB keys operation", { error });
4723
+ logger23.error("Error in IndexedDB keys operation", { error });
4612
4724
  return [];
4613
4725
  }
4614
4726
  }
@@ -4624,7 +4736,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4624
4736
  return new Promise((resolve, reject) => {
4625
4737
  const request = store.openCursor();
4626
4738
  request.onerror = () => {
4627
- logger21.error("Error getting metadata from IndexedDB", { error: request.error });
4739
+ logger23.error("Error getting metadata from IndexedDB", { error: request.error });
4628
4740
  reject(request.error);
4629
4741
  };
4630
4742
  request.onsuccess = (event) => {
@@ -4642,7 +4754,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4642
4754
  };
4643
4755
  });
4644
4756
  } catch (error) {
4645
- logger21.error("Error in IndexedDB getAllMetadata operation", { error });
4757
+ logger23.error("Error in IndexedDB getAllMetadata operation", { error });
4646
4758
  return metadataMap;
4647
4759
  }
4648
4760
  }
@@ -4655,7 +4767,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4655
4767
  return new Promise((resolve, reject) => {
4656
4768
  const request = store.openCursor();
4657
4769
  request.onerror = () => {
4658
- logger21.error("Error getting values from IndexedDB", { error: request.error });
4770
+ logger23.error("Error getting values from IndexedDB", { error: request.error });
4659
4771
  reject(request.error);
4660
4772
  };
4661
4773
  request.onsuccess = (event) => {
@@ -4670,12 +4782,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4670
4782
  };
4671
4783
  });
4672
4784
  } catch (error) {
4673
- logger21.error("Error in IndexedDB values operation", { error });
4785
+ logger23.error("Error in IndexedDB values operation", { error });
4674
4786
  return [];
4675
4787
  }
4676
4788
  }
4677
4789
  async clear() {
4678
- logger21.debug("Clearing IndexedDB cache");
4790
+ logger23.debug("Clearing IndexedDB cache");
4679
4791
  try {
4680
4792
  const db = await this.getDB();
4681
4793
  const transaction = db.transaction([this.storeName], "readwrite");
@@ -4683,7 +4795,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4683
4795
  return new Promise((resolve, reject) => {
4684
4796
  const request = store.clear();
4685
4797
  request.onerror = () => {
4686
- logger21.error("Error clearing IndexedDB cache", { error: request.error });
4798
+ logger23.error("Error clearing IndexedDB cache", { error: request.error });
4687
4799
  reject(request.error);
4688
4800
  };
4689
4801
  request.onsuccess = () => {
@@ -4691,17 +4803,17 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4691
4803
  };
4692
4804
  });
4693
4805
  } catch (error) {
4694
- logger21.error("Error in IndexedDB clear operation", { error });
4806
+ logger23.error("Error in IndexedDB clear operation", { error });
4695
4807
  }
4696
4808
  }
4697
4809
  // Async Query result caching methods
4698
- async setQueryResult(queryHash, itemKeys) {
4699
- logger21.trace("setQueryResult", { queryHash, itemKeys });
4810
+ async setQueryResult(queryHash, itemKeys, metadata) {
4811
+ logger23.trace("setQueryResult", { queryHash, itemKeys, hasMetadata: !!metadata });
4700
4812
  try {
4701
4813
  return new Promise((resolve, reject) => {
4702
4814
  const request = indexedDB.open(this.dbName, this.version);
4703
4815
  request.onerror = () => {
4704
- logger21.error("Failed to open database for setQueryResult", { error: request.error });
4816
+ logger23.error("Failed to open database for setQueryResult", { error: request.error });
4705
4817
  reject(request.error);
4706
4818
  };
4707
4819
  request.onsuccess = () => {
@@ -4709,12 +4821,13 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4709
4821
  const transaction = db.transaction([this.storeName], "readwrite");
4710
4822
  const store = transaction.objectStore(this.storeName);
4711
4823
  const entry = {
4712
- itemKeys
4824
+ itemKeys,
4825
+ metadata: metadata || void 0
4713
4826
  };
4714
4827
  const queryKey = `query:${queryHash}`;
4715
4828
  const putRequest = store.put(safeStringify3(entry), queryKey);
4716
4829
  putRequest.onerror = () => {
4717
- logger21.error("Failed to store query result", { queryHash, error: putRequest.error });
4830
+ logger23.error("Failed to store query result", { queryHash, error: putRequest.error });
4718
4831
  reject(putRequest.error);
4719
4832
  };
4720
4833
  putRequest.onsuccess = () => {
@@ -4723,17 +4836,17 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4723
4836
  };
4724
4837
  });
4725
4838
  } catch (error) {
4726
- logger21.error("Error in setQueryResult", { queryHash, error });
4839
+ logger23.error("Error in setQueryResult", { queryHash, error });
4727
4840
  throw error;
4728
4841
  }
4729
4842
  }
4730
4843
  async getQueryResult(queryHash) {
4731
- logger21.trace("getQueryResult", { queryHash });
4844
+ logger23.trace("getQueryResult", { queryHash });
4732
4845
  try {
4733
4846
  return new Promise((resolve, reject) => {
4734
4847
  const request = indexedDB.open(this.dbName, this.version);
4735
4848
  request.onerror = () => {
4736
- logger21.error("Failed to open database for getQueryResult", { error: request.error });
4849
+ logger23.error("Failed to open database for getQueryResult", { error: request.error });
4737
4850
  reject(request.error);
4738
4851
  };
4739
4852
  request.onsuccess = () => {
@@ -4743,7 +4856,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4743
4856
  const queryKey = `query:${queryHash}`;
4744
4857
  const getRequest = store.get(queryKey);
4745
4858
  getRequest.onerror = () => {
4746
- logger21.error("Failed to retrieve query result", { queryHash, error: getRequest.error });
4859
+ logger23.error("Failed to retrieve query result", { queryHash, error: getRequest.error });
4747
4860
  reject(getRequest.error);
4748
4861
  };
4749
4862
  getRequest.onsuccess = () => {
@@ -4760,34 +4873,89 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4760
4873
  }
4761
4874
  resolve(entry.itemKeys || null);
4762
4875
  } catch (parseError) {
4763
- logger21.error("Failed to parse query result", { queryHash, error: parseError });
4876
+ logger23.error("Failed to parse query result", { queryHash, error: parseError });
4764
4877
  resolve(null);
4765
4878
  }
4766
4879
  };
4767
4880
  };
4768
4881
  });
4769
4882
  } catch (error) {
4770
- logger21.error("Error in getQueryResult", { queryHash, error });
4883
+ logger23.error("Error in getQueryResult", { queryHash, error });
4884
+ return null;
4885
+ }
4886
+ }
4887
+ async getQueryResultWithMetadata(queryHash) {
4888
+ logger23.trace("getQueryResultWithMetadata", { queryHash });
4889
+ try {
4890
+ return new Promise((resolve, reject) => {
4891
+ const request = indexedDB.open(this.dbName, this.version);
4892
+ request.onerror = () => {
4893
+ logger23.error("Failed to open database for getQueryResultWithMetadata", { error: request.error });
4894
+ reject(request.error);
4895
+ };
4896
+ request.onsuccess = () => {
4897
+ const db = request.result;
4898
+ const transaction = db.transaction([this.storeName], "readonly");
4899
+ const store = transaction.objectStore(this.storeName);
4900
+ const queryKey = `query:${queryHash}`;
4901
+ const getRequest = store.get(queryKey);
4902
+ getRequest.onerror = () => {
4903
+ logger23.error("Failed to retrieve query result with metadata", { queryHash, error: getRequest.error });
4904
+ reject(getRequest.error);
4905
+ };
4906
+ getRequest.onsuccess = () => {
4907
+ try {
4908
+ const result = getRequest.result;
4909
+ if (!result) {
4910
+ resolve(null);
4911
+ return;
4912
+ }
4913
+ const entry = JSON.parse(result);
4914
+ if (Array.isArray(entry)) {
4915
+ resolve({ itemKeys: entry, metadata: void 0 });
4916
+ return;
4917
+ }
4918
+ if (entry.metadata) {
4919
+ if (entry.metadata.createdAt) {
4920
+ entry.metadata.createdAt = new Date(entry.metadata.createdAt);
4921
+ }
4922
+ if (entry.metadata.expiresAt) {
4923
+ entry.metadata.expiresAt = new Date(entry.metadata.expiresAt);
4924
+ }
4925
+ }
4926
+ resolve({
4927
+ itemKeys: entry.itemKeys || [],
4928
+ metadata: entry.metadata
4929
+ });
4930
+ } catch (parseError) {
4931
+ logger23.error("Failed to parse query result with metadata", { queryHash, error: parseError });
4932
+ resolve(null);
4933
+ }
4934
+ };
4935
+ };
4936
+ });
4937
+ } catch (error) {
4938
+ logger23.error("Error in getQueryResultWithMetadata", { queryHash, error });
4771
4939
  return null;
4772
4940
  }
4773
4941
  }
4774
4942
  async hasQueryResult(queryHash) {
4775
- logger21.trace("hasQueryResult", { queryHash });
4943
+ logger23.trace("hasQueryResult", { queryHash });
4776
4944
  try {
4777
4945
  const result = await this.getQueryResult(queryHash);
4778
4946
  return result !== null;
4779
4947
  } catch (error) {
4780
- logger21.error("Error in hasQueryResult", { queryHash, error });
4948
+ logger23.error("Error in hasQueryResult", { queryHash, error });
4781
4949
  return false;
4782
4950
  }
4783
4951
  }
4784
4952
  async deleteQueryResult(queryHash) {
4785
- logger21.trace("deleteQueryResult", { queryHash });
4953
+ logger23.trace("deleteQueryResult", { queryHash });
4786
4954
  try {
4787
4955
  return new Promise((resolve, reject) => {
4788
4956
  const request = indexedDB.open(this.dbName, this.version);
4789
4957
  request.onerror = () => {
4790
- logger21.error("Failed to open database for deleteQueryResult", { error: request.error });
4958
+ logger23.error("Failed to open database for deleteQueryResult", { error: request.error });
4791
4959
  reject(request.error);
4792
4960
  };
4793
4961
  request.onsuccess = () => {
@@ -4797,7 +4965,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4797
4965
  const queryKey = `query:${queryHash}`;
4798
4966
  const deleteRequest = store.delete(queryKey);
4799
4967
  deleteRequest.onerror = () => {
4800
- logger21.error("Failed to delete query result", { queryHash, error: deleteRequest.error });
4968
+ logger23.error("Failed to delete query result", { queryHash, error: deleteRequest.error });
4801
4969
  reject(deleteRequest.error);
4802
4970
  };
4803
4971
  deleteRequest.onsuccess = () => {
@@ -4806,12 +4974,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4806
4974
  };
4807
4975
  });
4808
4976
  } catch (error) {
4809
- logger21.error("Error in deleteQueryResult", { queryHash, error });
4977
+ logger23.error("Error in deleteQueryResult", { queryHash, error });
4810
4978
  throw error;
4811
4979
  }
4812
4980
  }
4813
4981
  async invalidateItemKeys(keys) {
4814
- logger21.debug("invalidateItemKeys", { keys });
4982
+ logger23.debug("invalidateItemKeys", { keys });
4815
4983
  if (keys.length === 0) {
4816
4984
  return;
4817
4985
  }
@@ -4848,7 +5016,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4848
5016
  queryResults2[queryHash] = itemKeys;
4849
5017
  }
4850
5018
  } catch (error) {
4851
- logger21.debug("Failed to parse query result", { key: item.key, error });
5019
+ logger23.debug("Failed to parse query result", { key: item.key, error });
4852
5020
  }
4853
5021
  }
4854
5022
  }
@@ -4876,18 +5044,18 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4876
5044
  });
4877
5045
  }
4878
5046
  }
4879
- logger21.debug("Selectively invalidated queries referencing affected keys", {
5047
+ logger23.debug("Selectively invalidated queries referencing affected keys", {
4880
5048
  affectedKeys: keys.length,
4881
5049
  queriesRemoved: queriesToRemove.length,
4882
5050
  totalQueries: Object.keys(queryResults).length
4883
5051
  });
4884
5052
  } catch (error) {
4885
- logger21.error("Error during selective query invalidation, falling back to clearing all queries", { error });
5053
+ logger23.error("Error during selective query invalidation, falling back to clearing all queries", { error });
4886
5054
  await this.clearQueryResults();
4887
5055
  }
4888
5056
  }
4889
5057
  async invalidateLocation(locations) {
4890
- logger21.debug("invalidateLocation", { locations });
5058
+ logger23.debug("invalidateLocation", { locations });
4891
5059
  let keysToInvalidate = [];
4892
5060
  if (locations.length === 0) {
4893
5061
  await this.clearQueryResults();
@@ -4900,12 +5068,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4900
5068
  }
4901
5069
  }
4902
5070
  async clearQueryResults() {
4903
- logger21.trace("clearQueryResults");
5071
+ logger23.trace("clearQueryResults");
4904
5072
  try {
4905
5073
  return new Promise((resolve, reject) => {
4906
5074
  const request = indexedDB.open(this.dbName, this.version);
4907
5075
  request.onerror = () => {
4908
- logger21.error("Failed to open database for clearQueryResults", { error: request.error });
5076
+ logger23.error("Failed to open database for clearQueryResults", { error: request.error });
4909
5077
  reject(request.error);
4910
5078
  };
4911
5079
  request.onsuccess = () => {
@@ -4915,7 +5083,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4915
5083
  const cursorRequest = store.openCursor();
4916
5084
  const keysToDelete = [];
4917
5085
  cursorRequest.onerror = () => {
4918
- logger21.error("Failed to open cursor for clearQueryResults", { error: cursorRequest.error });
5086
+ logger23.error("Failed to open cursor for clearQueryResults", { error: cursorRequest.error });
4919
5087
  reject(cursorRequest.error);
4920
5088
  };
4921
5089
  cursorRequest.onsuccess = () => {
@@ -4936,7 +5104,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4936
5104
  keysToDelete.forEach((queryKey) => {
4937
5105
  const deleteRequest = store.delete(queryKey);
4938
5106
  deleteRequest.onerror = () => {
4939
- logger21.error("Failed to delete query key", { queryKey, error: deleteRequest.error });
5107
+ logger23.error("Failed to delete query key", { queryKey, error: deleteRequest.error });
4940
5108
  deletedCount++;
4941
5109
  if (deletedCount === totalToDelete) {
4942
5110
  resolve();
@@ -4954,14 +5122,14 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
4954
5122
  };
4955
5123
  });
4956
5124
  } catch (error) {
4957
- logger21.error("Error in clearQueryResults", { error });
5125
+ logger23.error("Error in clearQueryResults", { error });
4958
5126
  throw error;
4959
5127
  }
4960
5128
  }
4961
5129
  };
4962
5130
 
4963
5131
  // src/browser/IndexDBCacheMap.ts
4964
- var logger22 = logger_default.get("IndexDBCacheMap");
5132
+ var logger24 = logger_default.get("IndexDBCacheMap");
4965
5133
  var IndexDBCacheMap = class _IndexDBCacheMap extends CacheMap {
4966
5134
  implementationType = "browser/indexedDB";
4967
5135
  // Memory storage
@@ -5125,15 +5293,37 @@ var IndexDBCacheMap = class _IndexDBCacheMap extends CacheMap {
5125
5293
  return candidates.filter((item) => isQueryMatch6(item, query));
5126
5294
  }
5127
5295
  // Query result caching methods
5128
- async setQueryResult(queryHash, itemKeys) {
5296
+ async setQueryResult(queryHash, itemKeys, metadata) {
5129
5297
  this.queryResultCache[queryHash] = {
5130
- itemKeys
5298
+ itemKeys,
5299
+ metadata
5131
5300
  };
5301
+ await this.asyncCache.setQueryResult(queryHash, itemKeys, metadata);
5132
5302
  }
5133
5303
  async getQueryResult(queryHash) {
5134
5304
  const entry = this.queryResultCache[queryHash];
5135
5305
  return entry ? entry.itemKeys : null;
5136
5306
  }
5307
+ async getQueryResultWithMetadata(queryHash) {
5308
+ let entry = this.queryResultCache[queryHash];
5309
+ if (!entry) {
5310
+ const persistedResult = await this.asyncCache.getQueryResultWithMetadata(queryHash);
5311
+ if (persistedResult) {
5312
+ this.queryResultCache[queryHash] = {
5313
+ itemKeys: persistedResult.itemKeys,
5314
+ metadata: persistedResult.metadata
5315
+ };
5316
+ entry = this.queryResultCache[queryHash];
5317
+ }
5318
+ }
5319
+ if (!entry) {
5320
+ return null;
5321
+ }
5322
+ return {
5323
+ itemKeys: entry.itemKeys,
5324
+ metadata: entry.metadata
5325
+ };
5326
+ }
5137
5327
  async hasQueryResult(queryHash) {
5138
5328
  return queryHash in this.queryResultCache;
5139
5329
  }
@@ -5145,7 +5335,7 @@ var IndexDBCacheMap = class _IndexDBCacheMap extends CacheMap {
5145
5335
  }
5146
5336
  // Invalidation methods
5147
5337
  async invalidateItemKeys(keys) {
5148
- logger22.debug("invalidateItemKeys", { keys });
5338
+ logger24.debug("invalidateItemKeys", { keys });
5149
5339
  if (keys.length === 0) {
5150
5340
  return;
5151
5341
  }
@@ -5175,7 +5365,7 @@ var IndexDBCacheMap = class _IndexDBCacheMap extends CacheMap {
5175
5365
  queriesToRemove.forEach((queryHash) => {
5176
5366
  delete this.queryResultCache[queryHash];
5177
5367
  });
5178
- logger22.debug("Selectively invalidated queries referencing affected keys", {
5368
+ logger24.debug("Selectively invalidated queries referencing affected keys", {
5179
5369
  affectedKeys: keys.length,
5180
5370
  queriesRemoved: queriesToRemove.length,
5181
5371
  totalQueries: Object.keys(this.queryResultCache).length
@@ -5667,8 +5857,11 @@ var highTrafficTTLConfig = {
5667
5857
  }
5668
5858
  };
5669
5859
 
5860
+ // src/cache/warming/CacheWarmer.ts
5861
+ var logger25 = logger_default.get("CacheWarmer");
5862
+
5670
5863
  // src/Operations.ts
5671
- var logger23 = logger_default.get("Operations");
5864
+ var logger26 = logger_default.get("Operations");
5672
5865
  var CacheMapOperations = class {
5673
5866
  constructor(api, coordinate, cacheMap, pkType, options, eventEmitter, ttlManager, evictionManager, statsManager, registry) {
5674
5867
  this.api = api;
@@ -5682,7 +5875,7 @@ var CacheMapOperations = class {
5682
5875
  this.statsManager = statsManager;
5683
5876
  this.registry = registry;
5684
5877
  if (this.options.enableDebugLogging) {
5685
- logger23.debug("CacheMapOperations initialized", {
5878
+ logger26.debug("CacheMapOperations initialized", {
5686
5879
  cacheType: this.cacheMap.implementationType,
5687
5880
  isTwoLayer: this.cacheMap instanceof TwoLayerCacheMap
5688
5881
  });
@@ -5775,7 +5968,7 @@ var createOperations = (api, coordinate, cacheMap, pkType, options, eventEmitter
5775
5968
  };
5776
5969
 
5777
5970
  // src/eviction/EvictionManager.ts
5778
- var logger24 = logger_default.get("EvictionManager");
5971
+ var logger27 = logger_default.get("EvictionManager");
5779
5972
  var EvictionManager = class {
5780
5973
  evictionStrategy;
5781
5974
  constructor(evictionStrategy) {
@@ -5787,7 +5980,7 @@ var EvictionManager = class {
5787
5980
  */
5788
5981
  setEvictionStrategy(strategy) {
5789
5982
  this.evictionStrategy = strategy;
5790
- logger24.debug("Eviction strategy updated", {
5983
+ logger27.debug("Eviction strategy updated", {
5791
5984
  strategy: strategy?.getStrategyName() || "none"
5792
5985
  });
5793
5986
  }
@@ -5808,13 +6001,13 @@ var EvictionManager = class {
5808
6001
  return;
5809
6002
  }
5810
6003
  try {
5811
- logger24.debug("EVICTION: Item accessed, updating metadata", {
6004
+ logger27.debug("EVICTION: Item accessed, updating metadata", {
5812
6005
  key,
5813
6006
  strategy: this.evictionStrategy.getStrategyName()
5814
6007
  });
5815
6008
  await this.evictionStrategy.onItemAccessed(key, metadataProvider);
5816
6009
  } catch (error) {
5817
- logger24.error("EVICTION: Error in eviction strategy onItemAccessed", {
6010
+ logger27.error("EVICTION: Error in eviction strategy onItemAccessed", {
5818
6011
  key,
5819
6012
  error,
5820
6013
  strategy: this.evictionStrategy?.getStrategyName()
@@ -5832,12 +6025,12 @@ var EvictionManager = class {
5832
6025
  const startTime = Date.now();
5833
6026
  const evictedKeys = [];
5834
6027
  if (!this.evictionStrategy) {
5835
- logger24.debug("EVICTION: No eviction strategy configured", { key });
6028
+ logger27.debug("EVICTION: No eviction strategy configured", { key });
5836
6029
  return evictedKeys;
5837
6030
  }
5838
6031
  try {
5839
6032
  const estimatedSize = estimateValueSize(value);
5840
- logger24.debug("EVICTION: Item addition started", {
6033
+ logger27.debug("EVICTION: Item addition started", {
5841
6034
  key,
5842
6035
  estimatedSize,
5843
6036
  strategy: this.evictionStrategy.getStrategyName()
@@ -5845,7 +6038,7 @@ var EvictionManager = class {
5845
6038
  const contextStartTime = Date.now();
5846
6039
  const context = await this.createEvictionContext(metadataProvider, estimatedSize);
5847
6040
  const contextDuration = Date.now() - contextStartTime;
5848
- logger24.debug("EVICTION: Current cache state", {
6041
+ logger27.debug("EVICTION: Current cache state", {
5849
6042
  key,
5850
6043
  currentItemCount: context.currentSize.itemCount,
5851
6044
  currentSizeBytes: context.currentSize.sizeBytes,
@@ -5858,7 +6051,7 @@ var EvictionManager = class {
5858
6051
  const keysToEvict = await this.evictionStrategy.selectForEviction(metadataProvider, context);
5859
6052
  const selectionDuration = Date.now() - selectionStartTime;
5860
6053
  if (keysToEvict.length > 0) {
5861
- logger24.debug("EVICTION: Items selected for eviction", {
6054
+ logger27.debug("EVICTION: Items selected for eviction", {
5862
6055
  key,
5863
6056
  evictCount: keysToEvict.length,
5864
6057
  keysToEvict,
@@ -5870,7 +6063,7 @@ var EvictionManager = class {
5870
6063
  for (const evictKey of keysToEvict) {
5871
6064
  await this.evictionStrategy.onItemRemoved(evictKey, metadataProvider);
5872
6065
  evictedKeys.push(evictKey);
5873
- logger24.debug("EVICTION: Marked item for eviction", {
6066
+ logger27.debug("EVICTION: Marked item for eviction", {
5874
6067
  evictedKey: evictKey,
5875
6068
  newKey: key
5876
6069
  });
@@ -5881,7 +6074,7 @@ var EvictionManager = class {
5881
6074
  const addMetadataDuration = Date.now() - addMetadataStart;
5882
6075
  const totalDuration = Date.now() - startTime;
5883
6076
  if (evictedKeys.length > 0) {
5884
- logger24.debug("EVICTION: Eviction completed", {
6077
+ logger27.debug("EVICTION: Eviction completed", {
5885
6078
  newKey: key,
5886
6079
  evictedCount: evictedKeys.length,
5887
6080
  evictedKeys,
@@ -5892,14 +6085,14 @@ var EvictionManager = class {
5892
6085
  totalDuration
5893
6086
  });
5894
6087
  } else {
5895
- logger24.debug("EVICTION: No eviction needed", {
6088
+ logger27.debug("EVICTION: No eviction needed", {
5896
6089
  newKey: key,
5897
6090
  estimatedSize,
5898
6091
  totalDuration
5899
6092
  });
5900
6093
  }
5901
6094
  } catch (error) {
5902
- logger24.error("EVICTION: Error in eviction strategy onItemAdded", {
6095
+ logger27.error("EVICTION: Error in eviction strategy onItemAdded", {
5903
6096
  key,
5904
6097
  error,
5905
6098
  strategy: this.evictionStrategy?.getStrategyName()
@@ -5919,7 +6112,7 @@ var EvictionManager = class {
5919
6112
  try {
5920
6113
  this.evictionStrategy.onItemRemoved(key, metadataProvider);
5921
6114
  } catch (error) {
5922
- logger24.error("Error in eviction strategy onItemRemoved", { key, error });
6115
+ logger27.error("Error in eviction strategy onItemRemoved", { key, error });
5923
6116
  }
5924
6117
  }
5925
6118
  /**
@@ -5931,15 +6124,15 @@ var EvictionManager = class {
5931
6124
  const startTime = Date.now();
5932
6125
  const evictedKeys = [];
5933
6126
  if (!this.evictionStrategy) {
5934
- logger24.debug("EVICTION: No eviction strategy configured for manual eviction");
6127
+ logger27.debug("EVICTION: No eviction strategy configured for manual eviction");
5935
6128
  return evictedKeys;
5936
6129
  }
5937
6130
  try {
5938
- logger24.debug("EVICTION: Manual eviction started", {
6131
+ logger27.debug("EVICTION: Manual eviction started", {
5939
6132
  strategy: this.evictionStrategy.getStrategyName()
5940
6133
  });
5941
6134
  const context = await this.createEvictionContext(metadataProvider);
5942
- logger24.debug("EVICTION: Manual eviction - current cache state", {
6135
+ logger27.debug("EVICTION: Manual eviction - current cache state", {
5943
6136
  currentItemCount: context.currentSize.itemCount,
5944
6137
  currentSizeBytes: context.currentSize.sizeBytes,
5945
6138
  maxItems: context.limits.maxItems,
@@ -5952,20 +6145,20 @@ var EvictionManager = class {
5952
6145
  }
5953
6146
  const duration = Date.now() - startTime;
5954
6147
  if (evictedKeys.length > 0) {
5955
- logger24.debug("EVICTION: Manual eviction completed", {
6148
+ logger27.debug("EVICTION: Manual eviction completed", {
5956
6149
  evictedCount: evictedKeys.length,
5957
6150
  evictedKeys,
5958
6151
  strategy: this.evictionStrategy.getStrategyName(),
5959
6152
  duration
5960
6153
  });
5961
6154
  } else {
5962
- logger24.debug("EVICTION: Manual eviction - no items to evict", {
6155
+ logger27.debug("EVICTION: Manual eviction - no items to evict", {
5963
6156
  strategy: this.evictionStrategy.getStrategyName(),
5964
6157
  duration
5965
6158
  });
5966
6159
  }
5967
6160
  } catch (error) {
5968
- logger24.error("EVICTION: Error in manual eviction", {
6161
+ logger27.error("EVICTION: Error in manual eviction", {
5969
6162
  error,
5970
6163
  strategy: this.evictionStrategy?.getStrategyName()
5971
6164
  });
@@ -5988,7 +6181,7 @@ var EvictionManager = class {
5988
6181
  this.evictionStrategy.reset();
5989
6182
  }
5990
6183
  }
5991
- logger24.debug("Eviction manager cleared");
6184
+ logger27.debug("Eviction manager cleared");
5992
6185
  }
5993
6186
  /**
5994
6187
  * Create eviction context from current cache state
@@ -7326,7 +7519,7 @@ function createEvictionStrategy(policy, maxCacheSize, config) {
7326
7519
  }
7327
7520
 
7328
7521
  // src/ttl/TTLManager.ts
7329
- var logger25 = logger_default.get("TTLManager");
7522
+ var logger28 = logger_default.get("TTLManager");
7330
7523
  var TTLManager = class {
7331
7524
  config;
7332
7525
  cleanupTimer;
@@ -7338,7 +7531,7 @@ var TTLManager = class {
7338
7531
  validateOnAccess: true,
7339
7532
  ...config
7340
7533
  };
7341
- logger25.debug("TTL_DEBUG: TTLManager created", {
7534
+ logger28.debug("TTL_DEBUG: TTLManager created", {
7342
7535
  config: this.config,
7343
7536
  isTTLEnabled: this.isTTLEnabled(),
7344
7537
  defaultTTL: this.config.defaultTTL
@@ -7371,13 +7564,13 @@ var TTLManager = class {
7371
7564
  this.startAutoCleanup();
7372
7565
  }
7373
7566
  }
7374
- logger25.debug("TTL configuration updated", { config: this.config });
7567
+ logger28.debug("TTL configuration updated", { config: this.config });
7375
7568
  }
7376
7569
  /**
7377
7570
  * Set TTL metadata for an item when it's added
7378
7571
  */
7379
7572
  async onItemAdded(key, metadataProvider, itemTTL) {
7380
- logger25.debug("TTL_DEBUG: onItemAdded called", {
7573
+ logger28.debug("TTL_DEBUG: onItemAdded called", {
7381
7574
  key,
7382
7575
  itemTTL,
7383
7576
  isTTLEnabled: this.isTTLEnabled(),
@@ -7385,19 +7578,19 @@ var TTLManager = class {
7385
7578
  metadataProviderType: metadataProvider?.constructor?.name
7386
7579
  });
7387
7580
  if (!this.isTTLEnabled() && !itemTTL) {
7388
- logger25.debug("TTL_DEBUG: No TTL configured for item - returning early", { key });
7581
+ logger28.debug("TTL_DEBUG: No TTL configured for item - returning early", { key });
7389
7582
  return;
7390
7583
  }
7391
- logger25.debug("TTL_DEBUG: Getting metadata for key", { key });
7584
+ logger28.debug("TTL_DEBUG: Getting metadata for key", { key });
7392
7585
  const metadata = await metadataProvider.getMetadata(key);
7393
- logger25.debug("TTL_DEBUG: Retrieved metadata", {
7586
+ logger28.debug("TTL_DEBUG: Retrieved metadata", {
7394
7587
  key,
7395
7588
  hasMetadata: !!metadata,
7396
7589
  metadataKeys: metadata ? Object.keys(metadata) : null,
7397
7590
  metadata
7398
7591
  });
7399
7592
  if (!metadata) {
7400
- logger25.debug("TTL_DEBUG: No metadata found for item when setting TTL", {
7593
+ logger28.debug("TTL_DEBUG: No metadata found for item when setting TTL", {
7401
7594
  key,
7402
7595
  metadataProviderType: metadataProvider?.constructor?.name,
7403
7596
  metadataProviderMethods: metadataProvider ? Object.getOwnPropertyNames(Object.getPrototypeOf(metadataProvider)) : null
@@ -7405,7 +7598,7 @@ var TTLManager = class {
7405
7598
  return;
7406
7599
  }
7407
7600
  const ttl = itemTTL || this.config.defaultTTL;
7408
- logger25.debug("TTL_DEBUG: Calculated TTL value", {
7601
+ logger28.debug("TTL_DEBUG: Calculated TTL value", {
7409
7602
  key,
7410
7603
  itemTTL,
7411
7604
  defaultTTL: this.config.defaultTTL,
@@ -7418,7 +7611,7 @@ var TTLManager = class {
7418
7611
  expiresAt: metadata.addedAt + ttl,
7419
7612
  ttl
7420
7613
  };
7421
- logger25.debug("TTL_DEBUG: Setting TTL metadata", {
7614
+ logger28.debug("TTL_DEBUG: Setting TTL metadata", {
7422
7615
  key,
7423
7616
  ttl,
7424
7617
  addedAt: metadata.addedAt,
@@ -7426,9 +7619,9 @@ var TTLManager = class {
7426
7619
  ttlMetadata
7427
7620
  });
7428
7621
  await metadataProvider.setMetadata(key, ttlMetadata);
7429
- logger25.trace("TTL_DEBUG: TTL set for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
7622
+ logger28.trace("TTL_DEBUG: TTL set for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
7430
7623
  } else {
7431
- logger25.debug("TTL_DEBUG: No TTL set - invalid TTL value", { key, ttl });
7624
+ logger28.debug("TTL_DEBUG: No TTL set - invalid TTL value", { key, ttl });
7432
7625
  }
7433
7626
  }
7434
7627
  /**
@@ -7437,14 +7630,14 @@ var TTLManager = class {
7437
7630
  async isExpired(key, metadataProvider) {
7438
7631
  const metadata = await metadataProvider.getMetadata(key);
7439
7632
  if (!metadata || !metadata.expiresAt) {
7440
- logger25.debug("TTL_CHECK: No TTL set for item", { key, hasMetadata: !!metadata });
7633
+ logger28.debug("TTL_CHECK: No TTL set for item", { key, hasMetadata: !!metadata });
7441
7634
  return false;
7442
7635
  }
7443
7636
  const now = Date.now();
7444
7637
  const expired = now >= metadata.expiresAt;
7445
7638
  const remainingMs = metadata.expiresAt - now;
7446
7639
  if (expired) {
7447
- logger25.debug("TTL_CHECK: Item EXPIRED", {
7640
+ logger28.debug("TTL_CHECK: Item EXPIRED", {
7448
7641
  key,
7449
7642
  expiresAt: new Date(metadata.expiresAt).toISOString(),
7450
7643
  now: new Date(now).toISOString(),
@@ -7452,7 +7645,7 @@ var TTLManager = class {
7452
7645
  ttl: metadata.ttl
7453
7646
  });
7454
7647
  } else {
7455
- logger25.debug("TTL_CHECK: Item still valid", {
7648
+ logger28.debug("TTL_CHECK: Item still valid", {
7456
7649
  key,
7457
7650
  expiresAt: new Date(metadata.expiresAt).toISOString(),
7458
7651
  remainingMs,
@@ -7468,17 +7661,17 @@ var TTLManager = class {
7468
7661
  */
7469
7662
  async validateItem(key, metadataProvider) {
7470
7663
  if (!this.config.validateOnAccess) {
7471
- logger25.debug("TTL_VALIDATE: Validation disabled, skipping check", { key });
7664
+ logger28.debug("TTL_VALIDATE: Validation disabled, skipping check", { key });
7472
7665
  return true;
7473
7666
  }
7474
- logger25.debug("TTL_VALIDATE: Validating item", {
7667
+ logger28.debug("TTL_VALIDATE: Validating item", {
7475
7668
  key,
7476
7669
  ttlEnabled: this.isTTLEnabled(),
7477
7670
  defaultTTL: this.config.defaultTTL
7478
7671
  });
7479
7672
  const isExpired = await this.isExpired(key, metadataProvider);
7480
7673
  const isValid = !isExpired;
7481
- logger25.debug("TTL_VALIDATE: Validation result", {
7674
+ logger28.debug("TTL_VALIDATE: Validation result", {
7482
7675
  key,
7483
7676
  isValid,
7484
7677
  isExpired
@@ -7512,7 +7705,7 @@ var TTLManager = class {
7512
7705
  const expiredKeys = [];
7513
7706
  const allMetadata = await metadataProvider.getAllMetadata();
7514
7707
  const now = Date.now();
7515
- logger25.debug("TTL_CLEANUP: Scanning for expired items", {
7708
+ logger28.debug("TTL_CLEANUP: Scanning for expired items", {
7516
7709
  totalItems: allMetadata.size,
7517
7710
  now: new Date(now).toISOString()
7518
7711
  });
@@ -7523,7 +7716,7 @@ var TTLManager = class {
7523
7716
  itemsWithTTL++;
7524
7717
  if (now >= ttlMetadata.expiresAt) {
7525
7718
  expiredKeys.push(key);
7526
- logger25.debug("TTL_CLEANUP: Found expired item", {
7719
+ logger28.debug("TTL_CLEANUP: Found expired item", {
7527
7720
  key,
7528
7721
  expiresAt: new Date(ttlMetadata.expiresAt).toISOString(),
7529
7722
  expiredByMs: now - ttlMetadata.expiresAt
@@ -7533,7 +7726,7 @@ var TTLManager = class {
7533
7726
  }
7534
7727
  const duration = Date.now() - startTime;
7535
7728
  if (expiredKeys.length > 0) {
7536
- logger25.debug("TTL_CLEANUP: Expired items found", {
7729
+ logger28.debug("TTL_CLEANUP: Expired items found", {
7537
7730
  expiredCount: expiredKeys.length,
7538
7731
  totalItems: allMetadata.size,
7539
7732
  itemsWithTTL,
@@ -7541,7 +7734,7 @@ var TTLManager = class {
7541
7734
  duration
7542
7735
  });
7543
7736
  } else {
7544
- logger25.debug("TTL_CLEANUP: No expired items found", {
7737
+ logger28.debug("TTL_CLEANUP: No expired items found", {
7545
7738
  totalItems: allMetadata.size,
7546
7739
  itemsWithTTL,
7547
7740
  duration
@@ -7573,7 +7766,7 @@ var TTLManager = class {
7573
7766
  }
7574
7767
  metadata.expiresAt += additionalTTL;
7575
7768
  await metadataProvider.setMetadata(key, metadata);
7576
- logger25.trace("TTL extended for item", { key, additionalTTL, newExpiresAt: metadata.expiresAt });
7769
+ logger28.trace("TTL extended for item", { key, additionalTTL, newExpiresAt: metadata.expiresAt });
7577
7770
  return true;
7578
7771
  }
7579
7772
  /**
@@ -7595,7 +7788,7 @@ var TTLManager = class {
7595
7788
  ttl
7596
7789
  };
7597
7790
  await metadataProvider.setMetadata(key, ttlMetadata);
7598
- logger25.trace("TTL refreshed for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
7791
+ logger28.trace("TTL refreshed for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
7599
7792
  return true;
7600
7793
  }
7601
7794
  /**
@@ -7607,9 +7800,9 @@ var TTLManager = class {
7607
7800
  }
7608
7801
  if (this.config.cleanupInterval) {
7609
7802
  this.cleanupTimer = setInterval(() => {
7610
- logger25.trace("Auto cleanup timer triggered");
7803
+ logger28.trace("Auto cleanup timer triggered");
7611
7804
  }, this.config.cleanupInterval);
7612
- logger25.debug("Auto cleanup started", { interval: this.config.cleanupInterval });
7805
+ logger28.debug("Auto cleanup started", { interval: this.config.cleanupInterval });
7613
7806
  }
7614
7807
  }
7615
7808
  /**
@@ -7619,7 +7812,7 @@ var TTLManager = class {
7619
7812
  if (this.cleanupTimer) {
7620
7813
  clearInterval(this.cleanupTimer);
7621
7814
  this.cleanupTimer = null;
7622
- logger25.debug("Auto cleanup stopped");
7815
+ logger28.debug("Auto cleanup stopped");
7623
7816
  }
7624
7817
  }
7625
7818
  /**
@@ -7627,14 +7820,14 @@ var TTLManager = class {
7627
7820
  */
7628
7821
  clear() {
7629
7822
  this.stopAutoCleanup();
7630
- logger25.debug("TTL manager cleared");
7823
+ logger28.debug("TTL manager cleared");
7631
7824
  }
7632
7825
  /**
7633
7826
  * Cleanup resources
7634
7827
  */
7635
7828
  destroy() {
7636
7829
  this.stopAutoCleanup();
7637
- logger25.debug("TTL manager destroyed");
7830
+ logger28.debug("TTL manager destroyed");
7638
7831
  }
7639
7832
  };
7640
7833
 
@@ -8034,9 +8227,9 @@ var CacheStatsManager = class {
8034
8227
  };
8035
8228
 
8036
8229
  // src/Cache.ts
8037
- var logger26 = logger_default.get("Cache");
8230
+ var logger29 = logger_default.get("Cache");
8038
8231
  var createCache = (api, coordinate, registry, options) => {
8039
- logger26.debug("createCache", { coordinate, registry, options });
8232
+ logger29.debug("createCache", { coordinate, registry, options });
8040
8233
  const completeOptions = createOptions(options);
8041
8234
  const cacheMap = createCacheMap(coordinate.kta, completeOptions);
8042
8235
  const pkType = coordinate.kta[0];
@@ -8116,13 +8309,13 @@ var isCache2 = (cache) => {
8116
8309
  };
8117
8310
 
8118
8311
  // src/InstanceFactory.ts
8119
- var logger27 = logger_default.get("InstanceFactory");
8312
+ var logger30 = logger_default.get("InstanceFactory");
8120
8313
  var createInstanceFactory = (api, options) => {
8121
8314
  const templateOptions = createOptions(options);
8122
8315
  validateOptions(templateOptions);
8123
8316
  return (coordinate, context) => {
8124
8317
  const instanceOptions = createOptions(options);
8125
- logger27.debug("Creating cache instance", {
8318
+ logger30.debug("Creating cache instance", {
8126
8319
  coordinate,
8127
8320
  registry: context.registry,
8128
8321
  api,
@@ -8189,9 +8382,9 @@ var createInstanceFactory = (api, options) => {
8189
8382
  };
8190
8383
 
8191
8384
  // src/Instance.ts
8192
- var logger28 = logger_default.get("Instance");
8385
+ var logger31 = logger_default.get("Instance");
8193
8386
  var createInstance = (registry, coordinate, api, options) => {
8194
- logger28.debug("createInstance", { coordinate, api, registry, options });
8387
+ logger31.debug("createInstance", { coordinate, api, registry, options });
8195
8388
  return createCache(api, coordinate, registry, options);
8196
8389
  };
8197
8390
  var isInstance = (instance) => {
@@ -8199,7 +8392,7 @@ var isInstance = (instance) => {
8199
8392
  };
8200
8393
 
8201
8394
  // src/Aggregator.ts
8202
- var logger29 = logger_default.get("ItemAggregator");
8395
+ var logger32 = logger_default.get("ItemAggregator");
8203
8396
  var toCacheConfig = (config) => {
8204
8397
  let cacheConfig;
8205
8398
  if (config.optional === void 0) {
@@ -8211,22 +8404,22 @@ var toCacheConfig = (config) => {
8211
8404
  };
8212
8405
  var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8213
8406
  const populate = async (item) => {
8214
- logger29.default("populate", { item });
8407
+ logger32.default("populate", { item });
8215
8408
  for (const key in aggregates) {
8216
8409
  await populateAggregate(key, item);
8217
8410
  }
8218
8411
  for (const key in events) {
8219
8412
  await populateEvent(key, item);
8220
8413
  }
8221
- logger29.default("populate done", { item });
8414
+ logger32.default("populate done", { item });
8222
8415
  return item;
8223
8416
  };
8224
8417
  const populateAggregate = async (key, item) => {
8225
- logger29.default("populate aggregate key", { key });
8418
+ logger32.default("populate aggregate key", { key });
8226
8419
  const cacheConfig = toCacheConfig(aggregates[key]);
8227
8420
  if (item.refs === void 0) {
8228
8421
  if (cacheConfig.optional === false) {
8229
- logger29.error("Item does not have refs an is not optional ", { item });
8422
+ logger32.error("Item does not have refs an is not optional ", { item });
8230
8423
  throw new Error("Item does not have refs an is not optional " + JSON.stringify(item));
8231
8424
  } else {
8232
8425
  if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
@@ -8235,7 +8428,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8235
8428
  }
8236
8429
  } else if (item.refs[key] === void 0) {
8237
8430
  if (cacheConfig.optional === false) {
8238
- logger29.error("Item does not have mandatory ref with key, not optional ", { key, item });
8431
+ logger32.error("Item does not have mandatory ref with key, not optional ", { key, item });
8239
8432
  throw new Error("Item does not have mandatory ref with key, not optional " + key + " " + JSON.stringify(item));
8240
8433
  } else {
8241
8434
  if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
@@ -8244,7 +8437,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8244
8437
  }
8245
8438
  } else {
8246
8439
  const ref = item.refs[key];
8247
- logger29.default("AGG Retrieving Item in Populate", { key: ref });
8440
+ logger32.default("AGG Retrieving Item in Populate", { key: ref });
8248
8441
  const newItem = await cacheConfig.cache.operations.retrieve(ref);
8249
8442
  if (newItem) {
8250
8443
  if (item.aggs === void 0) {
@@ -8261,25 +8454,25 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8261
8454
  }
8262
8455
  };
8263
8456
  const populateEvent = async (key, item) => {
8264
- logger29.default("populate event key", { key });
8457
+ logger32.default("populate event key", { key });
8265
8458
  const cacheConfig = toCacheConfig(events[key]);
8266
8459
  if (item.events === void 0) {
8267
8460
  throw new Error("Item does not have events " + JSON.stringify(item));
8268
8461
  } else if (item.events[key] === void 0) {
8269
8462
  if (cacheConfig.optional === false) {
8270
- logger29.error("Item does not have mandatory event with key", { key, item });
8463
+ logger32.error("Item does not have mandatory event with key", { key, item });
8271
8464
  throw new Error("Item does not have mandatory event with key " + key + " " + JSON.stringify(item));
8272
8465
  }
8273
8466
  } else {
8274
8467
  const event = item.events[key];
8275
8468
  if (event.by === void 0) {
8276
- logger29.error(
8469
+ logger32.error(
8277
8470
  "populateEvent with an Event that does not have by",
8278
8471
  { event, ik: item.key, eventKey: key }
8279
8472
  );
8280
8473
  throw new Error("populateEvent with an Event that does not have by: " + JSON.stringify({ key }));
8281
8474
  }
8282
- logger29.default("EVENT Retrieving Item in Populate", { key: event.by });
8475
+ logger32.default("EVENT Retrieving Item in Populate", { key: event.by });
8283
8476
  const newItem = await cacheConfig.cache.operations.retrieve(event.by);
8284
8477
  if (newItem) {
8285
8478
  event.agg = newItem;
@@ -8287,13 +8480,13 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8287
8480
  }
8288
8481
  };
8289
8482
  const all2 = async (query = {}, locations = []) => {
8290
- logger29.default("all", { query, locations });
8483
+ logger32.default("all", { query, locations });
8291
8484
  const result = await cache.operations.all(query, locations);
8292
8485
  const populatedItems = await Promise.all(result.items.map(async (item) => populate(item)));
8293
8486
  return populatedItems;
8294
8487
  };
8295
8488
  const one2 = async (query = {}, locations = []) => {
8296
- logger29.default("one", { query, locations });
8489
+ logger32.default("one", { query, locations });
8297
8490
  const item = await cache.operations.one(query, locations);
8298
8491
  let populatedItem = null;
8299
8492
  if (item) {
@@ -8302,30 +8495,30 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8302
8495
  return populatedItem;
8303
8496
  };
8304
8497
  const action2 = async (key, action3, body = {}) => {
8305
- logger29.default("action", { key, action: action3, body });
8498
+ logger32.default("action", { key, action: action3, body });
8306
8499
  const [item, affectedItems] = await cache.operations.action(key, action3, body);
8307
8500
  const populatedItem = await populate(item);
8308
8501
  return [populatedItem, affectedItems];
8309
8502
  };
8310
8503
  const allAction2 = async (action3, body = {}, locations = []) => {
8311
- logger29.default("action", { action: action3, body, locations });
8504
+ logger32.default("action", { action: action3, body, locations });
8312
8505
  const [items, affectedItems] = await cache.operations.allAction(action3, body, locations);
8313
8506
  const populatedItems = await Promise.all(items.map(async (item) => populate(item)));
8314
8507
  return [populatedItems, affectedItems];
8315
8508
  };
8316
8509
  const allFacet2 = async (facet3, params = {}, locations = []) => {
8317
- logger29.default("allFacet", { facet: facet3, params, locations });
8510
+ logger32.default("allFacet", { facet: facet3, params, locations });
8318
8511
  const response = await cache.operations.allFacet(facet3, params, locations);
8319
8512
  return response;
8320
8513
  };
8321
8514
  const create2 = async (v, locations = []) => {
8322
- logger29.default("create", { v, locations });
8515
+ logger32.default("create", { v, locations });
8323
8516
  const item = locations.length === 0 ? await cache.operations.create(v) : await cache.operations.create(v, { locations });
8324
8517
  const populatedItem = await populate(item);
8325
8518
  return populatedItem;
8326
8519
  };
8327
8520
  const get2 = async (key) => {
8328
- logger29.default("get", { key });
8521
+ logger32.default("get", { key });
8329
8522
  const item = await cache.operations.get(key);
8330
8523
  let populatedItem = null;
8331
8524
  if (item) {
@@ -8334,7 +8527,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8334
8527
  return populatedItem;
8335
8528
  };
8336
8529
  const retrieve2 = async (key) => {
8337
- logger29.default("retrieve", { key });
8530
+ logger32.default("retrieve", { key });
8338
8531
  const item = await cache.operations.retrieve(key);
8339
8532
  let populatedItem = null;
8340
8533
  if (item) {
@@ -8343,22 +8536,22 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8343
8536
  return populatedItem;
8344
8537
  };
8345
8538
  const remove2 = async (key) => {
8346
- logger29.default("remove", { key });
8539
+ logger32.default("remove", { key });
8347
8540
  await cache.operations.remove(key);
8348
8541
  };
8349
8542
  const update2 = async (key, v) => {
8350
- logger29.default("update", { key, v });
8543
+ logger32.default("update", { key, v });
8351
8544
  const item = await cache.operations.update(key, v);
8352
8545
  const populatedItem = await populate(item);
8353
8546
  return populatedItem;
8354
8547
  };
8355
8548
  const facet2 = async (key, facet3) => {
8356
- logger29.default("facet", { key, facet: facet3 });
8549
+ logger32.default("facet", { key, facet: facet3 });
8357
8550
  const response = await cache.operations.facet(key, facet3);
8358
8551
  return response;
8359
8552
  };
8360
8553
  const find2 = async (finder, finderParams = {}, locations = [], findOptions) => {
8361
- logger29.default("find", { finder, finderParams, locations, findOptions });
8554
+ logger32.default("find", { finder, finderParams, locations, findOptions });
8362
8555
  const result = await cache.operations.find(finder, finderParams, locations, findOptions);
8363
8556
  const populatedItems = await Promise.all(result.items.map(async (item) => populate(item)));
8364
8557
  return {
@@ -8367,7 +8560,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8367
8560
  };
8368
8561
  };
8369
8562
  const findOne2 = async (finder, finderParams = {}, locations = []) => {
8370
- logger29.default("find", { finder, finderParams, locations });
8563
+ logger32.default("find", { finder, finderParams, locations });
8371
8564
  const item = await cache.operations.findOne(finder, finderParams, locations);
8372
8565
  if (!item) {
8373
8566
  return null;
@@ -8376,7 +8569,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8376
8569
  return populatedItem;
8377
8570
  };
8378
8571
  const set2 = async (key, v) => {
8379
- logger29.default("set", { key, v });
8572
+ logger32.default("set", { key, v });
8380
8573
  const item = await cache.operations.set(key, v);
8381
8574
  const populatedItem = await populate(item);
8382
8575
  return populatedItem;
@@ -8428,13 +8621,13 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
8428
8621
  import {
8429
8622
  createRegistry as createBaseRegistry
8430
8623
  } from "@fjell/registry";
8431
- var logger30 = logger_default.get("Registry");
8624
+ var logger33 = logger_default.get("Registry");
8432
8625
  var createRegistryFactory = () => {
8433
8626
  return (type, registryHub) => {
8434
8627
  if (type !== "cache") {
8435
8628
  throw new Error(`Cache registry factory can only create 'cache' type registries, got: ${type}`);
8436
8629
  }
8437
- logger30.debug("Creating cache registry", { type, registryHub });
8630
+ logger33.debug("Creating cache registry", { type, registryHub });
8438
8631
  const baseRegistry = createBaseRegistry(type, registryHub);
8439
8632
  return baseRegistry;
8440
8633
  };