@fjell/cache 4.5.2 → 4.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.kodrdriv/config.yaml +10 -0
- package/.kodrdriv/context/content.md +1 -0
- package/dist/Aggregator.cjs.js +276 -0
- package/dist/{src/Aggregator.d.ts → Aggregator.d.ts} +3 -3
- package/dist/Aggregator.es.js +271 -0
- package/dist/Cache.cjs.js +279 -0
- package/dist/{src/Cache.d.ts → Cache.d.ts} +4 -4
- package/dist/{src/Cache.js → Cache.es.js} +129 -77
- package/dist/CacheMap.cjs.js +108 -0
- package/dist/{src/CacheMap.d.ts → CacheMap.d.ts} +1 -1
- package/dist/{src/CacheMap.js → CacheMap.es.js} +42 -23
- package/dist/CacheRegistry.cjs.js +66 -0
- package/dist/{src/CacheRegistry.d.ts → CacheRegistry.d.ts} +4 -7
- package/dist/CacheRegistry.es.js +62 -0
- package/dist/index.cjs +708 -0
- package/dist/index.cjs.js +17 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.es.js +5 -0
- package/dist/logger.cjs.js +10 -0
- package/dist/logger.d.ts +2 -0
- package/dist/logger.es.js +6 -0
- package/package.json +31 -18
- package/src/Aggregator.ts +12 -4
- package/src/Cache.ts +5 -5
- package/src/CacheRegistry.ts +37 -25
- package/src/index.ts +4 -0
- package/src/logger.ts +1 -1
- package/vitest.config.ts +34 -0
- package/dist/src/Aggregator.js +0 -182
- package/dist/src/Aggregator.js.map +0 -1
- package/dist/src/Cache.js.map +0 -1
- package/dist/src/CacheMap.js.map +0 -1
- package/dist/src/CacheRegistry.js +0 -34
- package/dist/src/CacheRegistry.js.map +0 -1
- package/dist/src/logger.d.ts +0 -2
- package/dist/src/logger.js +0 -4
- package/dist/src/logger.js.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/eslint.config.mjs +0 -70
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,708 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
|
4
|
+
|
|
5
|
+
const Logging = require('@fjell/logging');
|
|
6
|
+
const core = require('@fjell/core');
|
|
7
|
+
const httpApi = require('@fjell/http-api');
|
|
8
|
+
|
|
9
|
+
const LibLogger = Logging.getLogger('@fjell/cache');
|
|
10
|
+
|
|
11
|
+
const logger$3 = LibLogger.get('ItemAggregator');
|
|
12
|
+
const toCacheConfig = (config)=>{
|
|
13
|
+
let cacheConfig;
|
|
14
|
+
if (config.optional === undefined) {
|
|
15
|
+
cacheConfig = {
|
|
16
|
+
cache: config,
|
|
17
|
+
optional: false
|
|
18
|
+
};
|
|
19
|
+
} else {
|
|
20
|
+
cacheConfig = config;
|
|
21
|
+
}
|
|
22
|
+
return cacheConfig;
|
|
23
|
+
};
|
|
24
|
+
const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
25
|
+
const populate = async (item)=>{
|
|
26
|
+
logger$3.default('populate', {
|
|
27
|
+
item
|
|
28
|
+
});
|
|
29
|
+
for(const key in aggregates){
|
|
30
|
+
await populateAggregate(key, item);
|
|
31
|
+
}
|
|
32
|
+
for(const key in events){
|
|
33
|
+
await populateEvent(key, item);
|
|
34
|
+
}
|
|
35
|
+
logger$3.default('populate done', {
|
|
36
|
+
item
|
|
37
|
+
});
|
|
38
|
+
return item;
|
|
39
|
+
};
|
|
40
|
+
const populateAggregate = async (key, item)=>{
|
|
41
|
+
logger$3.default('populate aggregate key', {
|
|
42
|
+
key
|
|
43
|
+
});
|
|
44
|
+
const cacheConfig = toCacheConfig(aggregates[key]);
|
|
45
|
+
if (item.refs === undefined) {
|
|
46
|
+
if (cacheConfig.optional === false) {
|
|
47
|
+
logger$3.error('Item does not have refs an is not optional ' + JSON.stringify(item));
|
|
48
|
+
throw new Error('Item does not have refs an is not optional ' + JSON.stringify(item));
|
|
49
|
+
} else {
|
|
50
|
+
if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
|
|
51
|
+
delete item.events[key];
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
} else if (item.refs[key] === undefined) {
|
|
55
|
+
if (cacheConfig.optional === false) {
|
|
56
|
+
logger$3.error('Item does not have mandatory ref with key, not optional ' + key + ' ' + JSON.stringify(item));
|
|
57
|
+
throw new Error('Item does not have mandatory ref with key, not optional ' + key + ' ' + JSON.stringify(item));
|
|
58
|
+
} else {
|
|
59
|
+
if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
|
|
60
|
+
delete item.events[key];
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
} else {
|
|
64
|
+
const ref = item.refs[key];
|
|
65
|
+
logger$3.default('AGG Retrieving Item in Populate', {
|
|
66
|
+
key: ref
|
|
67
|
+
});
|
|
68
|
+
const [, newItem] = await cacheConfig.cache.retrieve(ref);
|
|
69
|
+
if (newItem) {
|
|
70
|
+
if (item.aggs === undefined) {
|
|
71
|
+
item.aggs = {};
|
|
72
|
+
}
|
|
73
|
+
item.aggs[key] = {
|
|
74
|
+
key: ref,
|
|
75
|
+
item: newItem
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
// TODO: I'm not a big fan that this just "automatically" assumes that the "by" key in event is a ref.
|
|
81
|
+
const populateEvent = async (key, item)=>{
|
|
82
|
+
logger$3.default('populate event key', {
|
|
83
|
+
key
|
|
84
|
+
});
|
|
85
|
+
const cacheConfig = toCacheConfig(events[key]);
|
|
86
|
+
if (item.events === undefined) {
|
|
87
|
+
throw new Error('Item does not have events ' + JSON.stringify(item));
|
|
88
|
+
} else if (item.events[key] === undefined) {
|
|
89
|
+
if (cacheConfig.optional === false) {
|
|
90
|
+
logger$3.error('Item does not have mandatory event with key ' + key + ' ' + JSON.stringify(item));
|
|
91
|
+
throw new Error('Item does not have mandatory event with key ' + key + ' ' + JSON.stringify(item));
|
|
92
|
+
}
|
|
93
|
+
} else {
|
|
94
|
+
const event = item.events[key];
|
|
95
|
+
if (event.by === undefined) {
|
|
96
|
+
logger$3.error('populateEvent with an Event that does not have by', {
|
|
97
|
+
event,
|
|
98
|
+
ik: item.key,
|
|
99
|
+
eventKey: key
|
|
100
|
+
});
|
|
101
|
+
throw new Error('populateEvent with an Event that does not have by: ' + JSON.stringify({
|
|
102
|
+
key,
|
|
103
|
+
event
|
|
104
|
+
}));
|
|
105
|
+
}
|
|
106
|
+
logger$3.default('EVENT Retrieving Item in Populate', {
|
|
107
|
+
key: event.by
|
|
108
|
+
});
|
|
109
|
+
const [, newItem] = await cacheConfig.cache.retrieve(event.by);
|
|
110
|
+
if (newItem) {
|
|
111
|
+
event.agg = newItem;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
const all = async (query = {}, locations = [])=>{
|
|
116
|
+
logger$3.default('all', {
|
|
117
|
+
query,
|
|
118
|
+
locations
|
|
119
|
+
});
|
|
120
|
+
const [cacheMap, items] = await cache.all(query, locations);
|
|
121
|
+
const populatedItems = await Promise.all(items.map(async (item)=>populate(item)));
|
|
122
|
+
return [
|
|
123
|
+
cacheMap,
|
|
124
|
+
populatedItems
|
|
125
|
+
];
|
|
126
|
+
};
|
|
127
|
+
const one = async (query = {}, locations = [])=>{
|
|
128
|
+
logger$3.default('one', {
|
|
129
|
+
query,
|
|
130
|
+
locations
|
|
131
|
+
});
|
|
132
|
+
const [cacheMap, item] = await cache.one(query, locations);
|
|
133
|
+
let populatedItem = null;
|
|
134
|
+
if (item) {
|
|
135
|
+
populatedItem = await populate(item);
|
|
136
|
+
}
|
|
137
|
+
return [
|
|
138
|
+
cacheMap,
|
|
139
|
+
populatedItem
|
|
140
|
+
];
|
|
141
|
+
};
|
|
142
|
+
const action = async (key, action, body = {})=>{
|
|
143
|
+
logger$3.default('action', {
|
|
144
|
+
key,
|
|
145
|
+
action,
|
|
146
|
+
body
|
|
147
|
+
});
|
|
148
|
+
const [cacheMap, item] = await cache.action(key, action, body);
|
|
149
|
+
const populatedItem = await populate(item);
|
|
150
|
+
return [
|
|
151
|
+
cacheMap,
|
|
152
|
+
populatedItem
|
|
153
|
+
];
|
|
154
|
+
};
|
|
155
|
+
const allAction = async (action, body = {}, locations = [])=>{
|
|
156
|
+
logger$3.default('action', {
|
|
157
|
+
action,
|
|
158
|
+
body,
|
|
159
|
+
locations
|
|
160
|
+
});
|
|
161
|
+
const [cacheMap, items] = await cache.allAction(action, body, locations);
|
|
162
|
+
const populatedItems = await Promise.all(items.map(async (item)=>populate(item)));
|
|
163
|
+
return [
|
|
164
|
+
cacheMap,
|
|
165
|
+
populatedItems
|
|
166
|
+
];
|
|
167
|
+
};
|
|
168
|
+
const create = async (v, locations = [])=>{
|
|
169
|
+
logger$3.default('create', {
|
|
170
|
+
v,
|
|
171
|
+
locations
|
|
172
|
+
});
|
|
173
|
+
const [cacheMap, item] = await cache.create(v, locations);
|
|
174
|
+
const populatedItem = await populate(item);
|
|
175
|
+
return [
|
|
176
|
+
cacheMap,
|
|
177
|
+
populatedItem
|
|
178
|
+
];
|
|
179
|
+
};
|
|
180
|
+
const get = async (key)=>{
|
|
181
|
+
logger$3.default('get', {
|
|
182
|
+
key
|
|
183
|
+
});
|
|
184
|
+
const [cacheMap, item] = await cache.get(key);
|
|
185
|
+
let populatedItem = null;
|
|
186
|
+
if (item) {
|
|
187
|
+
populatedItem = await populate(item);
|
|
188
|
+
}
|
|
189
|
+
return [
|
|
190
|
+
cacheMap,
|
|
191
|
+
populatedItem
|
|
192
|
+
];
|
|
193
|
+
};
|
|
194
|
+
const retrieve = async (key)=>{
|
|
195
|
+
logger$3.default('retrieve', {
|
|
196
|
+
key
|
|
197
|
+
});
|
|
198
|
+
const [cacheMap, item] = await cache.retrieve(key);
|
|
199
|
+
let populatedItem = null;
|
|
200
|
+
if (item) {
|
|
201
|
+
populatedItem = await populate(item);
|
|
202
|
+
}
|
|
203
|
+
return [
|
|
204
|
+
cacheMap,
|
|
205
|
+
populatedItem
|
|
206
|
+
];
|
|
207
|
+
};
|
|
208
|
+
const remove = async (key)=>{
|
|
209
|
+
logger$3.default('remove', {
|
|
210
|
+
key
|
|
211
|
+
});
|
|
212
|
+
const cacheMap = await cache.remove(key);
|
|
213
|
+
return cacheMap;
|
|
214
|
+
};
|
|
215
|
+
const update = async (key, v)=>{
|
|
216
|
+
logger$3.default('update', {
|
|
217
|
+
key,
|
|
218
|
+
v
|
|
219
|
+
});
|
|
220
|
+
const [cacheMap, item] = await cache.update(key, v);
|
|
221
|
+
const populatedItem = await populate(item);
|
|
222
|
+
return [
|
|
223
|
+
cacheMap,
|
|
224
|
+
populatedItem
|
|
225
|
+
];
|
|
226
|
+
};
|
|
227
|
+
const find = async (finder, finderParams, locations = [])=>{
|
|
228
|
+
logger$3.default('find', {
|
|
229
|
+
finder,
|
|
230
|
+
finderParams,
|
|
231
|
+
locations
|
|
232
|
+
});
|
|
233
|
+
const [cacheMap, items] = await cache.find(finder, finderParams, locations);
|
|
234
|
+
const populatedItems = await Promise.all(items.map(async (item)=>populate(item)));
|
|
235
|
+
return [
|
|
236
|
+
cacheMap,
|
|
237
|
+
populatedItems
|
|
238
|
+
];
|
|
239
|
+
};
|
|
240
|
+
const set = async (key, v)=>{
|
|
241
|
+
logger$3.default('set', {
|
|
242
|
+
key,
|
|
243
|
+
v
|
|
244
|
+
});
|
|
245
|
+
// TODO: There should be some input validation here to ensure a valid item.
|
|
246
|
+
const [cacheMap, item] = await cache.set(key, v);
|
|
247
|
+
const populatedItem = await populate(item);
|
|
248
|
+
return [
|
|
249
|
+
cacheMap,
|
|
250
|
+
populatedItem
|
|
251
|
+
];
|
|
252
|
+
};
|
|
253
|
+
const reset = async ()=>{
|
|
254
|
+
const cacheMap = await cache.reset();
|
|
255
|
+
return cacheMap;
|
|
256
|
+
};
|
|
257
|
+
return {
|
|
258
|
+
all,
|
|
259
|
+
one,
|
|
260
|
+
action,
|
|
261
|
+
allAction,
|
|
262
|
+
create,
|
|
263
|
+
get,
|
|
264
|
+
retrieve,
|
|
265
|
+
remove,
|
|
266
|
+
update,
|
|
267
|
+
find,
|
|
268
|
+
reset,
|
|
269
|
+
set,
|
|
270
|
+
pkTypes: cache.pkTypes,
|
|
271
|
+
cacheMap: cache.cacheMap,
|
|
272
|
+
populate,
|
|
273
|
+
populateAggregate,
|
|
274
|
+
populateEvent
|
|
275
|
+
};
|
|
276
|
+
};
|
|
277
|
+
|
|
278
|
+
function _define_property$1(obj, key, value) {
|
|
279
|
+
if (key in obj) {
|
|
280
|
+
Object.defineProperty(obj, key, {
|
|
281
|
+
value: value,
|
|
282
|
+
enumerable: true,
|
|
283
|
+
configurable: true,
|
|
284
|
+
writable: true
|
|
285
|
+
});
|
|
286
|
+
} else {
|
|
287
|
+
obj[key] = value;
|
|
288
|
+
}
|
|
289
|
+
return obj;
|
|
290
|
+
}
|
|
291
|
+
const logger$2 = LibLogger.get("CacheMap");
|
|
292
|
+
// const isObj = (x: any) => typeof x === "object" && x !== null;
|
|
293
|
+
// const intersection = (a: object, b: object): object => {
|
|
294
|
+
// const result: { [key: string]: any } = {}
|
|
295
|
+
// if (([a, b]).every(isObj)) {
|
|
296
|
+
// Object.keys(a).forEach((key) => {
|
|
297
|
+
// // @ts-ignore
|
|
298
|
+
// const value = a[key]
|
|
299
|
+
// // @ts-ignore
|
|
300
|
+
// const other = b[key]
|
|
301
|
+
// if (isObj(value)) {
|
|
302
|
+
// result[key] = intersection(value, other)
|
|
303
|
+
// } else if (value === other) {
|
|
304
|
+
// result[key] = value
|
|
305
|
+
// }
|
|
306
|
+
// })
|
|
307
|
+
// }
|
|
308
|
+
// return result
|
|
309
|
+
// }
|
|
310
|
+
// const removeEmptyObjects = (obj: object): object => {
|
|
311
|
+
// const result: { [key: string]: any } = {}
|
|
312
|
+
// Object.keys(obj).forEach((key) => {
|
|
313
|
+
// // @ts-ignore
|
|
314
|
+
// const value = obj[key];
|
|
315
|
+
// if (isObj(value)) {
|
|
316
|
+
// const nested = removeEmptyObjects(value);
|
|
317
|
+
// if (Object.keys(nested).length > 0) {
|
|
318
|
+
// result[key] = nested
|
|
319
|
+
// }
|
|
320
|
+
// } else if (value !== null) {
|
|
321
|
+
// result[key] = value
|
|
322
|
+
// }
|
|
323
|
+
// });
|
|
324
|
+
// return result;
|
|
325
|
+
// }
|
|
326
|
+
class CacheMap extends core.Dictionary {
|
|
327
|
+
get(key) {
|
|
328
|
+
return super.get(key);
|
|
329
|
+
}
|
|
330
|
+
allIn(locations) {
|
|
331
|
+
if (locations.length === 0) {
|
|
332
|
+
logger$2.debug('Returning all items, LocKeys is empty');
|
|
333
|
+
return this.values();
|
|
334
|
+
} else {
|
|
335
|
+
const locKeys = locations;
|
|
336
|
+
logger$2.debug('allIn', {
|
|
337
|
+
locKeys,
|
|
338
|
+
keys: this.keys().length
|
|
339
|
+
});
|
|
340
|
+
return this.keys().filter((key)=>key && core.isComKey(key)).filter((key)=>{
|
|
341
|
+
const ComKey = key;
|
|
342
|
+
logger$2.debug('Comparing Location Keys', {
|
|
343
|
+
locKeys,
|
|
344
|
+
ComKey
|
|
345
|
+
});
|
|
346
|
+
return JSON.stringify(locKeys) === JSON.stringify(ComKey.loc);
|
|
347
|
+
}).map((key)=>this.get(key));
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
// TODO: Can we do case insensitive matching?
|
|
351
|
+
contains(query, locations) {
|
|
352
|
+
logger$2.debug('contains', {
|
|
353
|
+
query,
|
|
354
|
+
locations
|
|
355
|
+
});
|
|
356
|
+
const items = this.allIn(locations);
|
|
357
|
+
return items.some((item)=>core.isQueryMatch(item, query));
|
|
358
|
+
}
|
|
359
|
+
queryIn(query, locations = []) {
|
|
360
|
+
logger$2.debug('queryIn', {
|
|
361
|
+
query,
|
|
362
|
+
locations
|
|
363
|
+
});
|
|
364
|
+
const items = this.allIn(locations);
|
|
365
|
+
return items.filter((item)=>core.isQueryMatch(item, query));
|
|
366
|
+
}
|
|
367
|
+
clone() {
|
|
368
|
+
const clone = new CacheMap(this.types, this.map);
|
|
369
|
+
return clone;
|
|
370
|
+
}
|
|
371
|
+
constructor(types, map){
|
|
372
|
+
super(map), _define_property$1(this, "types", void 0);
|
|
373
|
+
this.types = types;
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
const logger$1 = LibLogger.get('Cache');
|
|
378
|
+
const createCache = async (api, pkType, parentCache)=>{
|
|
379
|
+
let pkTypes = [
|
|
380
|
+
pkType
|
|
381
|
+
];
|
|
382
|
+
if (parentCache) {
|
|
383
|
+
pkTypes = pkTypes.concat(parentCache.pkTypes);
|
|
384
|
+
}
|
|
385
|
+
let cacheMap = new CacheMap(pkTypes);
|
|
386
|
+
const all = async (query = {}, locations = [])=>{
|
|
387
|
+
logger$1.default('all', {
|
|
388
|
+
query,
|
|
389
|
+
locations
|
|
390
|
+
});
|
|
391
|
+
let ret = [];
|
|
392
|
+
try {
|
|
393
|
+
ret = await api.all(query, {}, locations);
|
|
394
|
+
ret.forEach((v)=>{
|
|
395
|
+
cacheMap.set(v.key, v);
|
|
396
|
+
});
|
|
397
|
+
} catch (e) {
|
|
398
|
+
if (e instanceof httpApi.NotFoundError) ; else {
|
|
399
|
+
throw e;
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
return [
|
|
403
|
+
cacheMap,
|
|
404
|
+
core.validatePK(ret, pkType)
|
|
405
|
+
];
|
|
406
|
+
};
|
|
407
|
+
const one = async (query = {}, locations = [])=>{
|
|
408
|
+
logger$1.default('one', {
|
|
409
|
+
query,
|
|
410
|
+
locations
|
|
411
|
+
});
|
|
412
|
+
let retItem = null;
|
|
413
|
+
try {
|
|
414
|
+
retItem = await api.one(query, {}, locations);
|
|
415
|
+
if (retItem) {
|
|
416
|
+
cacheMap.set(retItem.key, retItem);
|
|
417
|
+
}
|
|
418
|
+
} catch (e) {
|
|
419
|
+
if (e instanceof httpApi.NotFoundError) ; else {
|
|
420
|
+
throw e;
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
return [
|
|
424
|
+
cacheMap,
|
|
425
|
+
retItem ? core.validatePK(retItem, pkType) : null
|
|
426
|
+
];
|
|
427
|
+
};
|
|
428
|
+
const action = async (key, action, body = {})=>{
|
|
429
|
+
logger$1.default('action', {
|
|
430
|
+
key,
|
|
431
|
+
action,
|
|
432
|
+
body
|
|
433
|
+
});
|
|
434
|
+
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
435
|
+
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
436
|
+
if (!core.isValidItemKey(key)) {
|
|
437
|
+
logger$1.error('Key for Action is not a valid ItemKey: %j', key);
|
|
438
|
+
throw new Error('Key for Action is not a valid ItemKey');
|
|
439
|
+
}
|
|
440
|
+
const updated = await api.action(key, action, body, {});
|
|
441
|
+
cacheMap.set(updated.key, updated);
|
|
442
|
+
return [
|
|
443
|
+
cacheMap,
|
|
444
|
+
core.validatePK(updated, pkType)
|
|
445
|
+
];
|
|
446
|
+
};
|
|
447
|
+
const allAction = async (action, body = {}, locations = [])=>{
|
|
448
|
+
logger$1.default('allAction', {
|
|
449
|
+
action,
|
|
450
|
+
body,
|
|
451
|
+
locations
|
|
452
|
+
});
|
|
453
|
+
let ret = [];
|
|
454
|
+
try {
|
|
455
|
+
ret = await api.allAction(action, body, {}, locations);
|
|
456
|
+
ret.forEach((v)=>{
|
|
457
|
+
cacheMap.set(v.key, v);
|
|
458
|
+
});
|
|
459
|
+
} catch (e) {
|
|
460
|
+
// istanbul ignore next
|
|
461
|
+
if (e instanceof httpApi.NotFoundError) ; else {
|
|
462
|
+
throw e;
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
return [
|
|
466
|
+
cacheMap,
|
|
467
|
+
core.validatePK(ret, pkType)
|
|
468
|
+
];
|
|
469
|
+
};
|
|
470
|
+
const create = async (v, locations = [])=>{
|
|
471
|
+
logger$1.default('create', {
|
|
472
|
+
v,
|
|
473
|
+
locations
|
|
474
|
+
});
|
|
475
|
+
const created = await api.create(v, {}, locations);
|
|
476
|
+
cacheMap.set(created.key, created);
|
|
477
|
+
return [
|
|
478
|
+
cacheMap,
|
|
479
|
+
core.validatePK(created, pkType)
|
|
480
|
+
];
|
|
481
|
+
};
|
|
482
|
+
const get = async (key)=>{
|
|
483
|
+
logger$1.default('get', {
|
|
484
|
+
key
|
|
485
|
+
});
|
|
486
|
+
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
487
|
+
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
488
|
+
if (!core.isValidItemKey(key)) {
|
|
489
|
+
logger$1.error('Key for Get is not a valid ItemKey: %j', key);
|
|
490
|
+
throw new Error('Key for Get is not a valid ItemKey');
|
|
491
|
+
}
|
|
492
|
+
let ret;
|
|
493
|
+
try {
|
|
494
|
+
ret = await api.get(key, {});
|
|
495
|
+
if (ret) {
|
|
496
|
+
cacheMap.set(ret.key, ret);
|
|
497
|
+
}
|
|
498
|
+
} catch (e) {
|
|
499
|
+
logger$1.error("Error getting item for key", {
|
|
500
|
+
key,
|
|
501
|
+
message: e.message,
|
|
502
|
+
stack: e.stack
|
|
503
|
+
});
|
|
504
|
+
throw e;
|
|
505
|
+
}
|
|
506
|
+
return [
|
|
507
|
+
cacheMap,
|
|
508
|
+
ret ? core.validatePK(ret, pkType) : null
|
|
509
|
+
];
|
|
510
|
+
};
|
|
511
|
+
const retrieve = async (key)=>{
|
|
512
|
+
logger$1.default('retrieve', {
|
|
513
|
+
key
|
|
514
|
+
});
|
|
515
|
+
if (!core.isValidItemKey(key)) {
|
|
516
|
+
logger$1.error('Key for Retrieve is not a valid ItemKey: %j', key);
|
|
517
|
+
throw new Error('Key for Retrieve is not a valid ItemKey');
|
|
518
|
+
}
|
|
519
|
+
const containsItemKey = cacheMap.includesKey(key);
|
|
520
|
+
let retrieved;
|
|
521
|
+
if (containsItemKey) {
|
|
522
|
+
logger$1.default('Looking for Object in Cache', key);
|
|
523
|
+
retrieved = cacheMap.get(key);
|
|
524
|
+
} else {
|
|
525
|
+
logger$1.default('Object Not Found in Cache, Retrieving from Server API', {
|
|
526
|
+
key
|
|
527
|
+
});
|
|
528
|
+
[, retrieved] = await get(key);
|
|
529
|
+
}
|
|
530
|
+
const retValue = [
|
|
531
|
+
containsItemKey ? null : cacheMap,
|
|
532
|
+
retrieved ? core.validatePK(retrieved, pkType) : null
|
|
533
|
+
];
|
|
534
|
+
// logger.debug('Returning from retrieve', { retValue });
|
|
535
|
+
return retValue;
|
|
536
|
+
};
|
|
537
|
+
const remove = async (key)=>{
|
|
538
|
+
logger$1.default('remove', {
|
|
539
|
+
key
|
|
540
|
+
});
|
|
541
|
+
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
542
|
+
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
543
|
+
if (!core.isValidItemKey(key)) {
|
|
544
|
+
logger$1.error('Key for Remove is not a valid ItemKey: %j', key);
|
|
545
|
+
throw new Error('Key for Remove is not a valid ItemKey');
|
|
546
|
+
}
|
|
547
|
+
try {
|
|
548
|
+
await api.remove(key, {});
|
|
549
|
+
cacheMap.delete(key);
|
|
550
|
+
} catch (e) {
|
|
551
|
+
logger$1.error("Error deleting item", {
|
|
552
|
+
error: e
|
|
553
|
+
});
|
|
554
|
+
throw e;
|
|
555
|
+
}
|
|
556
|
+
return cacheMap;
|
|
557
|
+
};
|
|
558
|
+
const update = async (key, v)=>{
|
|
559
|
+
logger$1.default('update', {
|
|
560
|
+
key,
|
|
561
|
+
v
|
|
562
|
+
});
|
|
563
|
+
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
564
|
+
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
565
|
+
if (!core.isValidItemKey(key)) {
|
|
566
|
+
logger$1.error('Key for Update is not a valid ItemKey: %j', key);
|
|
567
|
+
throw new Error('Key for Update is not a valid ItemKey');
|
|
568
|
+
}
|
|
569
|
+
try {
|
|
570
|
+
const updated = await api.update(key, v, {});
|
|
571
|
+
cacheMap.set(updated.key, updated);
|
|
572
|
+
return [
|
|
573
|
+
cacheMap,
|
|
574
|
+
core.validatePK(updated, pkType)
|
|
575
|
+
];
|
|
576
|
+
} catch (e) {
|
|
577
|
+
logger$1.error("Error updating chat", {
|
|
578
|
+
error: e
|
|
579
|
+
});
|
|
580
|
+
throw e;
|
|
581
|
+
}
|
|
582
|
+
};
|
|
583
|
+
const find = async (finder, finderParams, locations = [])=>{
|
|
584
|
+
logger$1.default('find', {
|
|
585
|
+
finder,
|
|
586
|
+
finderParams,
|
|
587
|
+
locations
|
|
588
|
+
});
|
|
589
|
+
const ret = await api.find(finder, finderParams, {}, locations);
|
|
590
|
+
ret.forEach((v)=>{
|
|
591
|
+
cacheMap.set(v.key, v);
|
|
592
|
+
});
|
|
593
|
+
return [
|
|
594
|
+
cacheMap,
|
|
595
|
+
core.validatePK(ret, pkType)
|
|
596
|
+
];
|
|
597
|
+
};
|
|
598
|
+
const reset = async ()=>{
|
|
599
|
+
cacheMap = new CacheMap(pkTypes);
|
|
600
|
+
return [
|
|
601
|
+
cacheMap
|
|
602
|
+
];
|
|
603
|
+
};
|
|
604
|
+
const set = async (key, v)=>{
|
|
605
|
+
logger$1.default('set', {
|
|
606
|
+
key,
|
|
607
|
+
v
|
|
608
|
+
});
|
|
609
|
+
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
610
|
+
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
611
|
+
if (!core.isValidItemKey(key)) {
|
|
612
|
+
logger$1.error('Key for Update is not a valid ItemKey: %j', key);
|
|
613
|
+
throw new Error('Key for Update is not a valid ItemKey');
|
|
614
|
+
}
|
|
615
|
+
// TODO: This could be merged with the isValidItemKey check, later.
|
|
616
|
+
core.validatePK(v, pkType);
|
|
617
|
+
if (!core.isItemKeyEqual(key, v.key)) {
|
|
618
|
+
logger$1.error('Key does not match item key: %j != %j', key, v.key);
|
|
619
|
+
throw new Error('Key does not match item key');
|
|
620
|
+
}
|
|
621
|
+
cacheMap.set(key, v);
|
|
622
|
+
return [
|
|
623
|
+
cacheMap,
|
|
624
|
+
core.validatePK(v, pkType)
|
|
625
|
+
];
|
|
626
|
+
};
|
|
627
|
+
return {
|
|
628
|
+
all,
|
|
629
|
+
one,
|
|
630
|
+
action,
|
|
631
|
+
allAction,
|
|
632
|
+
create,
|
|
633
|
+
get,
|
|
634
|
+
retrieve,
|
|
635
|
+
remove,
|
|
636
|
+
update,
|
|
637
|
+
find,
|
|
638
|
+
reset,
|
|
639
|
+
set,
|
|
640
|
+
pkTypes,
|
|
641
|
+
cacheMap
|
|
642
|
+
};
|
|
643
|
+
};
|
|
644
|
+
|
|
645
|
+
function _define_property(obj, key, value) {
|
|
646
|
+
if (key in obj) {
|
|
647
|
+
Object.defineProperty(obj, key, {
|
|
648
|
+
value: value,
|
|
649
|
+
enumerable: true,
|
|
650
|
+
configurable: true,
|
|
651
|
+
writable: true
|
|
652
|
+
});
|
|
653
|
+
} else {
|
|
654
|
+
obj[key] = value;
|
|
655
|
+
}
|
|
656
|
+
return obj;
|
|
657
|
+
}
|
|
658
|
+
const logger = LibLogger.get('CacheRegistry');
|
|
659
|
+
class CacheRegistry {
|
|
660
|
+
constructor(){
|
|
661
|
+
// TODO: My use of Generics has Boxed me into a corner where I can't reference AbstractCache without the types
|
|
662
|
+
_define_property(this, "cacheMap", {});
|
|
663
|
+
_define_property(this, "registerCache", async (cache)=>{
|
|
664
|
+
try {
|
|
665
|
+
logger.debug('Attempting to register cache with pkTypes:', cache.pkTypes);
|
|
666
|
+
const key = JSON.stringify(cache.pkTypes);
|
|
667
|
+
if (this.cacheMap[key]) {
|
|
668
|
+
logger.debug(`Cache with pkTypes ${key} already exists, will be overwritten`);
|
|
669
|
+
}
|
|
670
|
+
this.cacheMap[key] = cache;
|
|
671
|
+
logger.debug('Cache registered successfully with key:', key);
|
|
672
|
+
} catch (error) {
|
|
673
|
+
logger.error('Failed to register cache:', error);
|
|
674
|
+
throw error;
|
|
675
|
+
}
|
|
676
|
+
});
|
|
677
|
+
_define_property(this, "getCache", (kts)=>{
|
|
678
|
+
logger.debug('Attempting to get cache for key types:', kts);
|
|
679
|
+
const key = JSON.stringify(kts);
|
|
680
|
+
logger.debug('Looking up cache with key:', key);
|
|
681
|
+
const cache = this.cacheMap[key];
|
|
682
|
+
if (!cache) {
|
|
683
|
+
logger.warning(`No cache found for key types: ${key}`);
|
|
684
|
+
}
|
|
685
|
+
return cache;
|
|
686
|
+
});
|
|
687
|
+
_define_property(this, "printRegisteredCaches", ()=>{
|
|
688
|
+
logger.debug('Printing all registered caches:');
|
|
689
|
+
const cacheCount = Object.keys(this.cacheMap).length;
|
|
690
|
+
logger.debug(`Total number of registered caches: ${cacheCount}`);
|
|
691
|
+
if (cacheCount === 0) {
|
|
692
|
+
logger.debug('No caches are currently registered');
|
|
693
|
+
}
|
|
694
|
+
Object.entries(this.cacheMap).forEach(([keyTypes])=>{
|
|
695
|
+
logger.debug(`Cache with key types: ${keyTypes}`);
|
|
696
|
+
});
|
|
697
|
+
});
|
|
698
|
+
logger.debug('CacheRegistry instance created');
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
_define_property(CacheRegistry, "instance", void 0);
|
|
702
|
+
|
|
703
|
+
exports.CacheMap = CacheMap;
|
|
704
|
+
exports.CacheRegistry = CacheRegistry;
|
|
705
|
+
exports.createAggregator = createAggregator;
|
|
706
|
+
exports.createCache = createCache;
|
|
707
|
+
exports.toCacheConfig = toCacheConfig;
|
|
708
|
+
//# sourceMappingURL=index.cjs.map
|