@fjell/cache 4.6.10 → 4.6.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +96 -0
- package/dist/Aggregator.cjs.js +26 -20
- package/dist/Aggregator.d.ts +17 -1
- package/dist/Aggregator.es.js +26 -20
- package/dist/Cache.cjs.js +22 -345
- package/dist/Cache.d.ts +25 -20
- package/dist/Cache.es.js +22 -346
- package/dist/Instance.cjs.js +7 -11
- package/dist/Instance.d.ts +5 -8
- package/dist/Instance.es.js +6 -10
- package/dist/InstanceFactory.cjs.js +17 -5
- package/dist/InstanceFactory.d.ts +3 -3
- package/dist/InstanceFactory.es.js +17 -5
- package/dist/Operations.cjs.js +43 -0
- package/dist/Operations.d.ts +70 -0
- package/dist/Operations.es.js +39 -0
- package/dist/index.cjs +416 -369
- package/dist/index.cjs.js +4 -1
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.es.js +3 -2
- package/dist/ops/action.cjs.js +28 -0
- package/dist/ops/action.d.ts +4 -0
- package/dist/ops/action.es.js +24 -0
- package/dist/ops/all.cjs.js +33 -0
- package/dist/ops/all.d.ts +4 -0
- package/dist/ops/all.es.js +29 -0
- package/dist/ops/allAction.cjs.js +35 -0
- package/dist/ops/allAction.d.ts +4 -0
- package/dist/ops/allAction.es.js +31 -0
- package/dist/ops/allFacet.cjs.js +22 -0
- package/dist/ops/allFacet.d.ts +4 -0
- package/dist/ops/allFacet.es.js +18 -0
- package/dist/ops/create.cjs.js +23 -0
- package/dist/ops/create.d.ts +4 -0
- package/dist/ops/create.es.js +19 -0
- package/dist/ops/facet.cjs.js +21 -0
- package/dist/ops/facet.d.ts +4 -0
- package/dist/ops/facet.es.js +17 -0
- package/dist/ops/find.cjs.js +26 -0
- package/dist/ops/find.d.ts +4 -0
- package/dist/ops/find.es.js +22 -0
- package/dist/ops/findOne.cjs.js +24 -0
- package/dist/ops/findOne.d.ts +4 -0
- package/dist/ops/findOne.es.js +20 -0
- package/dist/ops/get.cjs.js +38 -0
- package/dist/ops/get.d.ts +4 -0
- package/dist/ops/get.es.js +34 -0
- package/dist/ops/one.cjs.js +33 -0
- package/dist/ops/one.d.ts +4 -0
- package/dist/ops/one.es.js +29 -0
- package/dist/ops/remove.cjs.js +30 -0
- package/dist/ops/remove.d.ts +4 -0
- package/dist/ops/remove.es.js +26 -0
- package/dist/ops/reset.cjs.js +15 -0
- package/dist/ops/reset.d.ts +4 -0
- package/dist/ops/reset.es.js +11 -0
- package/dist/ops/retrieve.cjs.js +37 -0
- package/dist/ops/retrieve.d.ts +4 -0
- package/dist/ops/retrieve.es.js +33 -0
- package/dist/ops/set.cjs.js +71 -0
- package/dist/ops/set.d.ts +3 -0
- package/dist/ops/set.es.js +67 -0
- package/dist/ops/update.cjs.js +34 -0
- package/dist/ops/update.d.ts +4 -0
- package/dist/ops/update.es.js +30 -0
- package/docs/docs.config.ts +75 -0
- package/docs/index.html +18 -0
- package/docs/package.json +34 -0
- package/docs/public/README.md +96 -0
- package/docs/public/examples-README.md +302 -0
- package/docs/public/test.txt +0 -0
- package/docs/src/index.css +3 -0
- package/docs/src/main.tsx +12 -0
- package/docs/src/test/setup.ts +1 -0
- package/docs/tsconfig.node.json +15 -0
- package/examples/README.md +34 -39
- package/examples/aggregator-example.ts +9 -14
- package/examples/basic-cache-example.ts +18 -21
- package/package.json +16 -11
- package/vitest.config.ts +1 -1
package/dist/index.cjs
CHANGED
|
@@ -7,9 +7,9 @@ const core = require('@fjell/core');
|
|
|
7
7
|
const httpApi = require('@fjell/http-api');
|
|
8
8
|
const registry = require('@fjell/registry');
|
|
9
9
|
|
|
10
|
-
const
|
|
10
|
+
const logger$1.default = Logging.getLogger('@fjell/cache');
|
|
11
11
|
|
|
12
|
-
const logger$
|
|
12
|
+
const logger$j = logger$1.default.get('ItemAggregator');
|
|
13
13
|
const toCacheConfig = (config)=>{
|
|
14
14
|
let cacheConfig;
|
|
15
15
|
if (config.optional === undefined) {
|
|
@@ -24,7 +24,7 @@ const toCacheConfig = (config)=>{
|
|
|
24
24
|
};
|
|
25
25
|
const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
26
26
|
const populate = async (item)=>{
|
|
27
|
-
logger$
|
|
27
|
+
logger$j.default('populate', {
|
|
28
28
|
item
|
|
29
29
|
});
|
|
30
30
|
for(const key in aggregates){
|
|
@@ -33,19 +33,19 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
33
33
|
for(const key in events){
|
|
34
34
|
await populateEvent(key, item);
|
|
35
35
|
}
|
|
36
|
-
logger$
|
|
36
|
+
logger$j.default('populate done', {
|
|
37
37
|
item
|
|
38
38
|
});
|
|
39
39
|
return item;
|
|
40
40
|
};
|
|
41
41
|
const populateAggregate = async (key, item)=>{
|
|
42
|
-
logger$
|
|
42
|
+
logger$j.default('populate aggregate key', {
|
|
43
43
|
key
|
|
44
44
|
});
|
|
45
45
|
const cacheConfig = toCacheConfig(aggregates[key]);
|
|
46
46
|
if (item.refs === undefined) {
|
|
47
47
|
if (cacheConfig.optional === false) {
|
|
48
|
-
logger$
|
|
48
|
+
logger$j.error('Item does not have refs an is not optional ' + JSON.stringify(item));
|
|
49
49
|
throw new Error('Item does not have refs an is not optional ' + JSON.stringify(item));
|
|
50
50
|
} else {
|
|
51
51
|
if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
|
|
@@ -54,7 +54,7 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
54
54
|
}
|
|
55
55
|
} else if (item.refs[key] === undefined) {
|
|
56
56
|
if (cacheConfig.optional === false) {
|
|
57
|
-
logger$
|
|
57
|
+
logger$j.error('Item does not have mandatory ref with key, not optional ' + key + ' ' + JSON.stringify(item));
|
|
58
58
|
throw new Error('Item does not have mandatory ref with key, not optional ' + key + ' ' + JSON.stringify(item));
|
|
59
59
|
} else {
|
|
60
60
|
if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
|
|
@@ -63,10 +63,10 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
63
63
|
}
|
|
64
64
|
} else {
|
|
65
65
|
const ref = item.refs[key];
|
|
66
|
-
logger$
|
|
66
|
+
logger$j.default('AGG Retrieving Item in Populate', {
|
|
67
67
|
key: ref
|
|
68
68
|
});
|
|
69
|
-
const [, newItem] = await cacheConfig.cache.retrieve(ref);
|
|
69
|
+
const [, newItem] = await cacheConfig.cache.operations.retrieve(ref);
|
|
70
70
|
if (newItem) {
|
|
71
71
|
if (item.aggs === undefined) {
|
|
72
72
|
item.aggs = {};
|
|
@@ -80,7 +80,7 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
80
80
|
};
|
|
81
81
|
// TODO: I'm not a big fan that this just "automatically" assumes that the "by" key in event is a ref.
|
|
82
82
|
const populateEvent = async (key, item)=>{
|
|
83
|
-
logger$
|
|
83
|
+
logger$j.default('populate event key', {
|
|
84
84
|
key
|
|
85
85
|
});
|
|
86
86
|
const cacheConfig = toCacheConfig(events[key]);
|
|
@@ -88,13 +88,13 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
88
88
|
throw new Error('Item does not have events ' + JSON.stringify(item));
|
|
89
89
|
} else if (item.events[key] === undefined) {
|
|
90
90
|
if (cacheConfig.optional === false) {
|
|
91
|
-
logger$
|
|
91
|
+
logger$j.error('Item does not have mandatory event with key ' + key + ' ' + JSON.stringify(item));
|
|
92
92
|
throw new Error('Item does not have mandatory event with key ' + key + ' ' + JSON.stringify(item));
|
|
93
93
|
}
|
|
94
94
|
} else {
|
|
95
95
|
const event = item.events[key];
|
|
96
96
|
if (event.by === undefined) {
|
|
97
|
-
logger$
|
|
97
|
+
logger$j.error('populateEvent with an Event that does not have by', {
|
|
98
98
|
event,
|
|
99
99
|
ik: item.key,
|
|
100
100
|
eventKey: key
|
|
@@ -104,21 +104,21 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
104
104
|
event
|
|
105
105
|
}));
|
|
106
106
|
}
|
|
107
|
-
logger$
|
|
107
|
+
logger$j.default('EVENT Retrieving Item in Populate', {
|
|
108
108
|
key: event.by
|
|
109
109
|
});
|
|
110
|
-
const [, newItem] = await cacheConfig.cache.retrieve(event.by);
|
|
110
|
+
const [, newItem] = await cacheConfig.cache.operations.retrieve(event.by);
|
|
111
111
|
if (newItem) {
|
|
112
112
|
event.agg = newItem;
|
|
113
113
|
}
|
|
114
114
|
}
|
|
115
115
|
};
|
|
116
116
|
const all = async (query = {}, locations = [])=>{
|
|
117
|
-
logger$
|
|
117
|
+
logger$j.default('all', {
|
|
118
118
|
query,
|
|
119
119
|
locations
|
|
120
120
|
});
|
|
121
|
-
const [cacheMap, items] = await cache.all(query, locations);
|
|
121
|
+
const [cacheMap, items] = await cache.operations.all(query, locations);
|
|
122
122
|
const populatedItems = await Promise.all(items.map(async (item)=>populate(item)));
|
|
123
123
|
return [
|
|
124
124
|
cacheMap,
|
|
@@ -126,11 +126,11 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
126
126
|
];
|
|
127
127
|
};
|
|
128
128
|
const one = async (query = {}, locations = [])=>{
|
|
129
|
-
logger$
|
|
129
|
+
logger$j.default('one', {
|
|
130
130
|
query,
|
|
131
131
|
locations
|
|
132
132
|
});
|
|
133
|
-
const [cacheMap, item] = await cache.one(query, locations);
|
|
133
|
+
const [cacheMap, item] = await cache.operations.one(query, locations);
|
|
134
134
|
let populatedItem = null;
|
|
135
135
|
if (item) {
|
|
136
136
|
populatedItem = await populate(item);
|
|
@@ -141,12 +141,12 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
141
141
|
];
|
|
142
142
|
};
|
|
143
143
|
const action = async (key, action, body = {})=>{
|
|
144
|
-
logger$
|
|
144
|
+
logger$j.default('action', {
|
|
145
145
|
key,
|
|
146
146
|
action,
|
|
147
147
|
body
|
|
148
148
|
});
|
|
149
|
-
const [cacheMap, item] = await cache.action(key, action, body);
|
|
149
|
+
const [cacheMap, item] = await cache.operations.action(key, action, body);
|
|
150
150
|
const populatedItem = await populate(item);
|
|
151
151
|
return [
|
|
152
152
|
cacheMap,
|
|
@@ -154,12 +154,12 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
154
154
|
];
|
|
155
155
|
};
|
|
156
156
|
const allAction = async (action, body = {}, locations = [])=>{
|
|
157
|
-
logger$
|
|
157
|
+
logger$j.default('action', {
|
|
158
158
|
action,
|
|
159
159
|
body,
|
|
160
160
|
locations
|
|
161
161
|
});
|
|
162
|
-
const [cacheMap, items] = await cache.allAction(action, body, locations);
|
|
162
|
+
const [cacheMap, items] = await cache.operations.allAction(action, body, locations);
|
|
163
163
|
const populatedItems = await Promise.all(items.map(async (item)=>populate(item)));
|
|
164
164
|
return [
|
|
165
165
|
cacheMap,
|
|
@@ -167,23 +167,23 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
167
167
|
];
|
|
168
168
|
};
|
|
169
169
|
const allFacet = async (facet, params = {}, locations = [])=>{
|
|
170
|
-
logger$
|
|
170
|
+
logger$j.default('allFacet', {
|
|
171
171
|
facet,
|
|
172
172
|
params,
|
|
173
173
|
locations
|
|
174
174
|
});
|
|
175
|
-
const [cacheMap, response] = await cache.allFacet(facet, params, locations);
|
|
175
|
+
const [cacheMap, response] = await cache.operations.allFacet(facet, params, locations);
|
|
176
176
|
return [
|
|
177
177
|
cacheMap,
|
|
178
178
|
response
|
|
179
179
|
];
|
|
180
180
|
};
|
|
181
181
|
const create = async (v, locations = [])=>{
|
|
182
|
-
logger$
|
|
182
|
+
logger$j.default('create', {
|
|
183
183
|
v,
|
|
184
184
|
locations
|
|
185
185
|
});
|
|
186
|
-
const [cacheMap, item] = await cache.create(v, locations);
|
|
186
|
+
const [cacheMap, item] = await cache.operations.create(v, locations);
|
|
187
187
|
const populatedItem = await populate(item);
|
|
188
188
|
return [
|
|
189
189
|
cacheMap,
|
|
@@ -191,10 +191,10 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
191
191
|
];
|
|
192
192
|
};
|
|
193
193
|
const get = async (key)=>{
|
|
194
|
-
logger$
|
|
194
|
+
logger$j.default('get', {
|
|
195
195
|
key
|
|
196
196
|
});
|
|
197
|
-
const [cacheMap, item] = await cache.get(key);
|
|
197
|
+
const [cacheMap, item] = await cache.operations.get(key);
|
|
198
198
|
let populatedItem = null;
|
|
199
199
|
if (item) {
|
|
200
200
|
populatedItem = await populate(item);
|
|
@@ -205,10 +205,10 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
205
205
|
];
|
|
206
206
|
};
|
|
207
207
|
const retrieve = async (key)=>{
|
|
208
|
-
logger$
|
|
208
|
+
logger$j.default('retrieve', {
|
|
209
209
|
key
|
|
210
210
|
});
|
|
211
|
-
const [cacheMap, item] = await cache.retrieve(key);
|
|
211
|
+
const [cacheMap, item] = await cache.operations.retrieve(key);
|
|
212
212
|
let populatedItem = null;
|
|
213
213
|
if (item) {
|
|
214
214
|
populatedItem = await populate(item);
|
|
@@ -219,18 +219,18 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
219
219
|
];
|
|
220
220
|
};
|
|
221
221
|
const remove = async (key)=>{
|
|
222
|
-
logger$
|
|
222
|
+
logger$j.default('remove', {
|
|
223
223
|
key
|
|
224
224
|
});
|
|
225
|
-
const cacheMap = await cache.remove(key);
|
|
225
|
+
const cacheMap = await cache.operations.remove(key);
|
|
226
226
|
return cacheMap;
|
|
227
227
|
};
|
|
228
228
|
const update = async (key, v)=>{
|
|
229
|
-
logger$
|
|
229
|
+
logger$j.default('update', {
|
|
230
230
|
key,
|
|
231
231
|
v
|
|
232
232
|
});
|
|
233
|
-
const [cacheMap, item] = await cache.update(key, v);
|
|
233
|
+
const [cacheMap, item] = await cache.operations.update(key, v);
|
|
234
234
|
const populatedItem = await populate(item);
|
|
235
235
|
return [
|
|
236
236
|
cacheMap,
|
|
@@ -239,23 +239,23 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
239
239
|
};
|
|
240
240
|
// Facets are a pass-thru for aggregators
|
|
241
241
|
const facet = async (key, facet)=>{
|
|
242
|
-
logger$
|
|
242
|
+
logger$j.default('facet', {
|
|
243
243
|
key,
|
|
244
244
|
facet
|
|
245
245
|
});
|
|
246
|
-
const [cacheMap, response] = await cache.facet(key, facet);
|
|
246
|
+
const [cacheMap, response] = await cache.operations.facet(key, facet);
|
|
247
247
|
return [
|
|
248
248
|
cacheMap,
|
|
249
249
|
response
|
|
250
250
|
];
|
|
251
251
|
};
|
|
252
252
|
const find = async (finder, finderParams = {}, locations = [])=>{
|
|
253
|
-
logger$
|
|
253
|
+
logger$j.default('find', {
|
|
254
254
|
finder,
|
|
255
255
|
finderParams,
|
|
256
256
|
locations
|
|
257
257
|
});
|
|
258
|
-
const [cacheMap, items] = await cache.find(finder, finderParams, locations);
|
|
258
|
+
const [cacheMap, items] = await cache.operations.find(finder, finderParams, locations);
|
|
259
259
|
const populatedItems = await Promise.all(items.map(async (item)=>populate(item)));
|
|
260
260
|
return [
|
|
261
261
|
cacheMap,
|
|
@@ -263,12 +263,12 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
263
263
|
];
|
|
264
264
|
};
|
|
265
265
|
const findOne = async (finder, finderParams = {}, locations = [])=>{
|
|
266
|
-
logger$
|
|
266
|
+
logger$j.default('find', {
|
|
267
267
|
finder,
|
|
268
268
|
finderParams,
|
|
269
269
|
locations
|
|
270
270
|
});
|
|
271
|
-
const [cacheMap, item] = await cache.findOne(finder, finderParams, locations);
|
|
271
|
+
const [cacheMap, item] = await cache.operations.findOne(finder, finderParams, locations);
|
|
272
272
|
const populatedItem = await populate(item);
|
|
273
273
|
return [
|
|
274
274
|
cacheMap,
|
|
@@ -276,12 +276,12 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
276
276
|
];
|
|
277
277
|
};
|
|
278
278
|
const set = async (key, v)=>{
|
|
279
|
-
logger$
|
|
279
|
+
logger$j.default('set', {
|
|
280
280
|
key,
|
|
281
281
|
v
|
|
282
282
|
});
|
|
283
283
|
// TODO: There should be some input validation here to ensure a valid item.
|
|
284
|
-
const [cacheMap, item] = await cache.set(key, v);
|
|
284
|
+
const [cacheMap, item] = await cache.operations.set(key, v);
|
|
285
285
|
const populatedItem = await populate(item);
|
|
286
286
|
return [
|
|
287
287
|
cacheMap,
|
|
@@ -289,10 +289,17 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
289
289
|
];
|
|
290
290
|
};
|
|
291
291
|
const reset = async ()=>{
|
|
292
|
-
const cacheMap = await cache.reset();
|
|
292
|
+
const cacheMap = await cache.operations.reset();
|
|
293
293
|
return cacheMap;
|
|
294
294
|
};
|
|
295
295
|
return {
|
|
296
|
+
// Cache properties
|
|
297
|
+
coordinate: cache.coordinate,
|
|
298
|
+
registry: cache.registry,
|
|
299
|
+
api: cache.api,
|
|
300
|
+
cacheMap: cache.cacheMap,
|
|
301
|
+
operations: cache.operations,
|
|
302
|
+
// Cache operations exposed directly
|
|
296
303
|
all,
|
|
297
304
|
one,
|
|
298
305
|
action,
|
|
@@ -308,8 +315,7 @@ const createAggregator = async (cache, { aggregates = {}, events = {} })=>{
|
|
|
308
315
|
findOne,
|
|
309
316
|
reset,
|
|
310
317
|
set,
|
|
311
|
-
|
|
312
|
-
cacheMap: cache.cacheMap,
|
|
318
|
+
// Aggregator-specific operations
|
|
313
319
|
populate,
|
|
314
320
|
populateAggregate,
|
|
315
321
|
populateEvent
|
|
@@ -329,7 +335,7 @@ function _define_property(obj, key, value) {
|
|
|
329
335
|
}
|
|
330
336
|
return obj;
|
|
331
337
|
}
|
|
332
|
-
const logger$
|
|
338
|
+
const logger$i = logger$1.default.get("CacheMap");
|
|
333
339
|
// Normalize a key value to string for consistent comparison and hashing
|
|
334
340
|
const normalizeKeyValue$1 = (value)=>{
|
|
335
341
|
return String(value);
|
|
@@ -432,17 +438,17 @@ class CacheMap extends core.Dictionary {
|
|
|
432
438
|
}
|
|
433
439
|
allIn(locations) {
|
|
434
440
|
if (locations.length === 0) {
|
|
435
|
-
logger$
|
|
441
|
+
logger$i.debug('Returning all items, LocKeys is empty');
|
|
436
442
|
return this.values();
|
|
437
443
|
} else {
|
|
438
444
|
const locKeys = locations;
|
|
439
|
-
logger$
|
|
445
|
+
logger$i.debug('allIn', {
|
|
440
446
|
locKeys,
|
|
441
447
|
keys: this.keys().length
|
|
442
448
|
});
|
|
443
449
|
return this.keys().filter((key)=>key && core.isComKey(key)).filter((key)=>{
|
|
444
450
|
const ComKey = key;
|
|
445
|
-
logger$
|
|
451
|
+
logger$i.debug('Comparing Location Keys', {
|
|
446
452
|
locKeys,
|
|
447
453
|
ComKey
|
|
448
454
|
});
|
|
@@ -452,7 +458,7 @@ class CacheMap extends core.Dictionary {
|
|
|
452
458
|
}
|
|
453
459
|
// TODO: Can we do case insensitive matching?
|
|
454
460
|
contains(query, locations) {
|
|
455
|
-
logger$
|
|
461
|
+
logger$i.debug('contains', {
|
|
456
462
|
query,
|
|
457
463
|
locations
|
|
458
464
|
});
|
|
@@ -460,7 +466,7 @@ class CacheMap extends core.Dictionary {
|
|
|
460
466
|
return items.some((item)=>core.isQueryMatch(item, query));
|
|
461
467
|
}
|
|
462
468
|
queryIn(query, locations = []) {
|
|
463
|
-
logger$
|
|
469
|
+
logger$i.debug('queryIn', {
|
|
464
470
|
query,
|
|
465
471
|
locations
|
|
466
472
|
});
|
|
@@ -477,7 +483,272 @@ class CacheMap extends core.Dictionary {
|
|
|
477
483
|
}
|
|
478
484
|
}
|
|
479
485
|
|
|
480
|
-
const logger$
|
|
486
|
+
const logger$h = logger$1.default.get('all');
|
|
487
|
+
const all = async (api, cacheMap, pkType, query = {}, locations = [])=>{
|
|
488
|
+
logger$h.default('all', {
|
|
489
|
+
query,
|
|
490
|
+
locations
|
|
491
|
+
});
|
|
492
|
+
let ret = [];
|
|
493
|
+
try {
|
|
494
|
+
ret = await api.all(query, locations);
|
|
495
|
+
ret.forEach((v)=>{
|
|
496
|
+
cacheMap.set(v.key, v);
|
|
497
|
+
});
|
|
498
|
+
} catch (e) {
|
|
499
|
+
if (e instanceof httpApi.NotFoundError) ; else {
|
|
500
|
+
throw e;
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
return [
|
|
504
|
+
cacheMap,
|
|
505
|
+
core.validatePK(ret, pkType)
|
|
506
|
+
];
|
|
507
|
+
};
|
|
508
|
+
|
|
509
|
+
const logger$g = logger$1.default.get('one');
|
|
510
|
+
const one = async (api, cacheMap, pkType, query = {}, locations = [])=>{
|
|
511
|
+
logger$g.default('one', {
|
|
512
|
+
query,
|
|
513
|
+
locations
|
|
514
|
+
});
|
|
515
|
+
let retItem = null;
|
|
516
|
+
try {
|
|
517
|
+
retItem = await api.one(query, locations);
|
|
518
|
+
if (retItem) {
|
|
519
|
+
cacheMap.set(retItem.key, retItem);
|
|
520
|
+
}
|
|
521
|
+
} catch (e) {
|
|
522
|
+
if (e instanceof httpApi.NotFoundError) ; else {
|
|
523
|
+
throw e;
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
return [
|
|
527
|
+
cacheMap,
|
|
528
|
+
retItem ? core.validatePK(retItem, pkType) : null
|
|
529
|
+
];
|
|
530
|
+
};
|
|
531
|
+
|
|
532
|
+
const logger$f = logger$1.default.get('create');
|
|
533
|
+
const create = async (api, cacheMap, pkType, v, locations = [])=>{
|
|
534
|
+
logger$f.default('create', {
|
|
535
|
+
v,
|
|
536
|
+
locations
|
|
537
|
+
});
|
|
538
|
+
const created = await api.create(v, locations);
|
|
539
|
+
cacheMap.set(created.key, created);
|
|
540
|
+
return [
|
|
541
|
+
cacheMap,
|
|
542
|
+
core.validatePK(created, pkType)
|
|
543
|
+
];
|
|
544
|
+
};
|
|
545
|
+
|
|
546
|
+
const logger$e = logger$1.default.get('get');
|
|
547
|
+
const get.get = async (api, cacheMap, pkType, key)=>{
|
|
548
|
+
logger$e.default('get', {
|
|
549
|
+
key
|
|
550
|
+
});
|
|
551
|
+
if (!core.isValidItemKey(key)) {
|
|
552
|
+
logger$e.error('Key for Get is not a valid ItemKey: %j', key);
|
|
553
|
+
throw new Error('Key for Get is not a valid ItemKey');
|
|
554
|
+
}
|
|
555
|
+
let ret;
|
|
556
|
+
try {
|
|
557
|
+
ret = await api.get(key);
|
|
558
|
+
if (ret) {
|
|
559
|
+
cacheMap.set(ret.key, ret);
|
|
560
|
+
}
|
|
561
|
+
} catch (e) {
|
|
562
|
+
logger$e.error("Error getting item for key", {
|
|
563
|
+
key,
|
|
564
|
+
message: e.message,
|
|
565
|
+
stack: e.stack
|
|
566
|
+
});
|
|
567
|
+
throw e;
|
|
568
|
+
}
|
|
569
|
+
return [
|
|
570
|
+
cacheMap,
|
|
571
|
+
ret ? core.validatePK(ret, pkType) : null
|
|
572
|
+
];
|
|
573
|
+
};
|
|
574
|
+
|
|
575
|
+
const logger$d = logger$1.default.get('retrieve');
|
|
576
|
+
const retrieve = async (api, cacheMap, pkType, key)=>{
|
|
577
|
+
logger$d.default('retrieve', {
|
|
578
|
+
key
|
|
579
|
+
});
|
|
580
|
+
if (!core.isValidItemKey(key)) {
|
|
581
|
+
logger$d.error('Key for Retrieve is not a valid ItemKey: %j', key);
|
|
582
|
+
throw new Error('Key for Retrieve is not a valid ItemKey');
|
|
583
|
+
}
|
|
584
|
+
const containsItemKey = cacheMap.includesKey(key);
|
|
585
|
+
let retrieved;
|
|
586
|
+
if (containsItemKey) {
|
|
587
|
+
logger$d.default('Looking for Object in Cache', key);
|
|
588
|
+
retrieved = cacheMap.get(key);
|
|
589
|
+
} else {
|
|
590
|
+
logger$d.default('Object Not Found in Cache, Retrieving from Server API', {
|
|
591
|
+
key
|
|
592
|
+
});
|
|
593
|
+
[, retrieved] = await get.get(api, cacheMap, pkType, key);
|
|
594
|
+
}
|
|
595
|
+
const retValue = [
|
|
596
|
+
containsItemKey ? null : cacheMap,
|
|
597
|
+
retrieved ? core.validatePK(retrieved, pkType) : null
|
|
598
|
+
];
|
|
599
|
+
return retValue;
|
|
600
|
+
};
|
|
601
|
+
|
|
602
|
+
const logger$c = logger$1.default.get('remove');
|
|
603
|
+
const remove = async (api, cacheMap, key)=>{
|
|
604
|
+
logger$c.default('remove', {
|
|
605
|
+
key
|
|
606
|
+
});
|
|
607
|
+
if (!core.isValidItemKey(key)) {
|
|
608
|
+
logger$c.error('Key for Remove is not a valid ItemKey: %j', key);
|
|
609
|
+
throw new Error('Key for Remove is not a valid ItemKey');
|
|
610
|
+
}
|
|
611
|
+
try {
|
|
612
|
+
await api.remove(key);
|
|
613
|
+
cacheMap.delete(key);
|
|
614
|
+
} catch (e) {
|
|
615
|
+
logger$c.error("Error deleting item", {
|
|
616
|
+
error: e
|
|
617
|
+
});
|
|
618
|
+
throw e;
|
|
619
|
+
}
|
|
620
|
+
return cacheMap;
|
|
621
|
+
};
|
|
622
|
+
|
|
623
|
+
const logger$b = logger$1.default.get('update');
|
|
624
|
+
const update = async (api, cacheMap, pkType, key, v)=>{
|
|
625
|
+
logger$b.default('update', {
|
|
626
|
+
key,
|
|
627
|
+
v
|
|
628
|
+
});
|
|
629
|
+
if (!core.isValidItemKey(key)) {
|
|
630
|
+
logger$b.error('Key for Update is not a valid ItemKey: %j', key);
|
|
631
|
+
throw new Error('Key for Update is not a valid ItemKey');
|
|
632
|
+
}
|
|
633
|
+
try {
|
|
634
|
+
const updated = await api.update(key, v);
|
|
635
|
+
cacheMap.set(updated.key, updated);
|
|
636
|
+
return [
|
|
637
|
+
cacheMap,
|
|
638
|
+
core.validatePK(updated, pkType)
|
|
639
|
+
];
|
|
640
|
+
} catch (e) {
|
|
641
|
+
logger$b.error("Error updating item", {
|
|
642
|
+
error: e
|
|
643
|
+
});
|
|
644
|
+
throw e;
|
|
645
|
+
}
|
|
646
|
+
};
|
|
647
|
+
|
|
648
|
+
const logger$a = logger$1.default.get('action');
|
|
649
|
+
const action = async (api, cacheMap, pkType, key, action, body = {})=>{
|
|
650
|
+
logger$a.default('action', {
|
|
651
|
+
key,
|
|
652
|
+
action,
|
|
653
|
+
body
|
|
654
|
+
});
|
|
655
|
+
if (!core.isValidItemKey(key)) {
|
|
656
|
+
logger$a.error('Key for Action is not a valid ItemKey: %j', key);
|
|
657
|
+
throw new Error('Key for Action is not a valid ItemKey');
|
|
658
|
+
}
|
|
659
|
+
const updated = await api.action(key, action, body);
|
|
660
|
+
cacheMap.set(updated.key, updated);
|
|
661
|
+
return [
|
|
662
|
+
cacheMap,
|
|
663
|
+
core.validatePK(updated, pkType)
|
|
664
|
+
];
|
|
665
|
+
};
|
|
666
|
+
|
|
667
|
+
const logger$9 = logger$1.default.get('allAction');
|
|
668
|
+
const allAction = async (api, cacheMap, pkType, action, body = {}, locations = [])=>{
|
|
669
|
+
logger$9.default('allAction', {
|
|
670
|
+
action,
|
|
671
|
+
body,
|
|
672
|
+
locations
|
|
673
|
+
});
|
|
674
|
+
let ret = [];
|
|
675
|
+
try {
|
|
676
|
+
ret = await api.allAction(action, body, locations);
|
|
677
|
+
ret.forEach((v)=>{
|
|
678
|
+
cacheMap.set(v.key, v);
|
|
679
|
+
});
|
|
680
|
+
} catch (e) {
|
|
681
|
+
// istanbul ignore next
|
|
682
|
+
if (e instanceof httpApi.NotFoundError) ; else {
|
|
683
|
+
throw e;
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
return [
|
|
687
|
+
cacheMap,
|
|
688
|
+
core.validatePK(ret, pkType)
|
|
689
|
+
];
|
|
690
|
+
};
|
|
691
|
+
|
|
692
|
+
const logger$8 = logger$1.default.get('facet');
|
|
693
|
+
const facet = async (api, cacheMap, key, facet, params = {})=>{
|
|
694
|
+
logger$8.default('facet', {
|
|
695
|
+
key,
|
|
696
|
+
facet
|
|
697
|
+
});
|
|
698
|
+
const ret = await api.facet(key, facet, params);
|
|
699
|
+
return [
|
|
700
|
+
cacheMap,
|
|
701
|
+
ret
|
|
702
|
+
];
|
|
703
|
+
};
|
|
704
|
+
|
|
705
|
+
const logger$7 = logger$1.default.get('allFacet');
|
|
706
|
+
const allFacet = async (api, cacheMap, facet, params = {}, locations = [])=>{
|
|
707
|
+
logger$7.default('allFacet', {
|
|
708
|
+
facet,
|
|
709
|
+
params,
|
|
710
|
+
locations
|
|
711
|
+
});
|
|
712
|
+
const ret = await api.allFacet(facet, params, locations);
|
|
713
|
+
return [
|
|
714
|
+
cacheMap,
|
|
715
|
+
ret
|
|
716
|
+
];
|
|
717
|
+
};
|
|
718
|
+
|
|
719
|
+
const logger$6 = logger$1.default.get('find');
|
|
720
|
+
const find = async (api, cacheMap, pkType, finder, params = {}, locations = [])=>{
|
|
721
|
+
logger$6.default('find', {
|
|
722
|
+
finder,
|
|
723
|
+
params,
|
|
724
|
+
locations
|
|
725
|
+
});
|
|
726
|
+
const ret = await api.find(finder, params, locations);
|
|
727
|
+
ret.forEach((v)=>{
|
|
728
|
+
cacheMap.set(v.key, v);
|
|
729
|
+
});
|
|
730
|
+
return [
|
|
731
|
+
cacheMap,
|
|
732
|
+
core.validatePK(ret, pkType)
|
|
733
|
+
];
|
|
734
|
+
};
|
|
735
|
+
|
|
736
|
+
const logger$5 = logger$1.default.get('findOne');
|
|
737
|
+
const findOne = async (api, cacheMap, pkType, finder, finderParams = {}, locations = [])=>{
|
|
738
|
+
logger$5.default('findOne', {
|
|
739
|
+
finder,
|
|
740
|
+
finderParams,
|
|
741
|
+
locations
|
|
742
|
+
});
|
|
743
|
+
const ret = await api.findOne(finder, finderParams, locations);
|
|
744
|
+
cacheMap.set(ret.key, ret);
|
|
745
|
+
return [
|
|
746
|
+
cacheMap,
|
|
747
|
+
core.validatePK(ret, pkType)
|
|
748
|
+
];
|
|
749
|
+
};
|
|
750
|
+
|
|
751
|
+
const logger$4 = logger$1.default.get('set');
|
|
481
752
|
// Normalize a key value to string for consistent comparison
|
|
482
753
|
const normalizeKeyValue = (value)=>{
|
|
483
754
|
return String(value);
|
|
@@ -517,345 +788,119 @@ const normalizeKey = (key)=>{
|
|
|
517
788
|
}
|
|
518
789
|
return key;
|
|
519
790
|
};
|
|
520
|
-
const
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
791
|
+
const set = async (cacheMap, pkType, key, v)=>{
|
|
792
|
+
logger$4.default('set', {
|
|
793
|
+
key,
|
|
794
|
+
v
|
|
795
|
+
});
|
|
796
|
+
if (!core.isValidItemKey(key)) {
|
|
797
|
+
logger$4.error('Key for Set is not a valid ItemKey: %j', key);
|
|
798
|
+
throw new Error('Key for Set is not a valid ItemKey');
|
|
526
799
|
}
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
}
|
|
544
|
-
return [
|
|
545
|
-
cacheMap,
|
|
546
|
-
core.validatePK(ret, pkType)
|
|
547
|
-
];
|
|
548
|
-
};
|
|
549
|
-
const one = async (query = {}, locations = [])=>{
|
|
550
|
-
logger$3.default('one', {
|
|
551
|
-
query,
|
|
552
|
-
locations
|
|
553
|
-
});
|
|
554
|
-
let retItem = null;
|
|
555
|
-
try {
|
|
556
|
-
retItem = await api.one(query, locations);
|
|
557
|
-
if (retItem) {
|
|
558
|
-
cacheMap.set(retItem.key, retItem);
|
|
559
|
-
}
|
|
560
|
-
} catch (e) {
|
|
561
|
-
if (e instanceof httpApi.NotFoundError) ; else {
|
|
562
|
-
throw e;
|
|
563
|
-
}
|
|
564
|
-
}
|
|
565
|
-
return [
|
|
566
|
-
cacheMap,
|
|
567
|
-
retItem ? core.validatePK(retItem, pkType) : null
|
|
568
|
-
];
|
|
569
|
-
};
|
|
570
|
-
const action = async (key, action, body = {})=>{
|
|
571
|
-
logger$3.default('action', {
|
|
572
|
-
key,
|
|
573
|
-
action,
|
|
574
|
-
body
|
|
575
|
-
});
|
|
576
|
-
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
577
|
-
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
578
|
-
if (!core.isValidItemKey(key)) {
|
|
579
|
-
logger$3.error('Key for Action is not a valid ItemKey: %j', key);
|
|
580
|
-
throw new Error('Key for Action is not a valid ItemKey');
|
|
581
|
-
}
|
|
582
|
-
const updated = await api.action(key, action, body);
|
|
583
|
-
cacheMap.set(updated.key, updated);
|
|
584
|
-
return [
|
|
585
|
-
cacheMap,
|
|
586
|
-
core.validatePK(updated, pkType)
|
|
587
|
-
];
|
|
588
|
-
};
|
|
589
|
-
const allAction = async (action, body = {}, locations = [])=>{
|
|
590
|
-
logger$3.default('allAction', {
|
|
591
|
-
action,
|
|
592
|
-
body,
|
|
593
|
-
locations
|
|
594
|
-
});
|
|
595
|
-
let ret = [];
|
|
596
|
-
try {
|
|
597
|
-
ret = await api.allAction(action, body, locations);
|
|
598
|
-
ret.forEach((v)=>{
|
|
599
|
-
cacheMap.set(v.key, v);
|
|
600
|
-
});
|
|
601
|
-
} catch (e) {
|
|
602
|
-
// istanbul ignore next
|
|
603
|
-
if (e instanceof httpApi.NotFoundError) ; else {
|
|
604
|
-
throw e;
|
|
605
|
-
}
|
|
606
|
-
}
|
|
607
|
-
return [
|
|
608
|
-
cacheMap,
|
|
609
|
-
core.validatePK(ret, pkType)
|
|
610
|
-
];
|
|
611
|
-
};
|
|
612
|
-
const allFacet = async (facet, params = {}, locations = [])=>{
|
|
613
|
-
logger$3.default('allFacet', {
|
|
614
|
-
facet,
|
|
615
|
-
params,
|
|
616
|
-
locations
|
|
617
|
-
});
|
|
618
|
-
const ret = await api.allFacet(facet, params, locations);
|
|
619
|
-
return [
|
|
620
|
-
cacheMap,
|
|
621
|
-
ret
|
|
622
|
-
];
|
|
623
|
-
};
|
|
624
|
-
const create = async (v, locations = [])=>{
|
|
625
|
-
logger$3.default('create', {
|
|
626
|
-
v,
|
|
627
|
-
locations
|
|
628
|
-
});
|
|
629
|
-
const created = await api.create(v, locations);
|
|
630
|
-
cacheMap.set(created.key, created);
|
|
631
|
-
return [
|
|
632
|
-
cacheMap,
|
|
633
|
-
core.validatePK(created, pkType)
|
|
634
|
-
];
|
|
635
|
-
};
|
|
636
|
-
const get = async (key)=>{
|
|
637
|
-
logger$3.default('get', {
|
|
638
|
-
key
|
|
639
|
-
});
|
|
640
|
-
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
641
|
-
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
642
|
-
if (!core.isValidItemKey(key)) {
|
|
643
|
-
logger$3.error('Key for Get is not a valid ItemKey: %j', key);
|
|
644
|
-
throw new Error('Key for Get is not a valid ItemKey');
|
|
645
|
-
}
|
|
646
|
-
let ret;
|
|
647
|
-
try {
|
|
648
|
-
ret = await api.get(key);
|
|
649
|
-
if (ret) {
|
|
650
|
-
cacheMap.set(ret.key, ret);
|
|
651
|
-
}
|
|
652
|
-
} catch (e) {
|
|
653
|
-
logger$3.error("Error getting item for key", {
|
|
654
|
-
key,
|
|
655
|
-
message: e.message,
|
|
656
|
-
stack: e.stack
|
|
657
|
-
});
|
|
658
|
-
throw e;
|
|
659
|
-
}
|
|
660
|
-
return [
|
|
661
|
-
cacheMap,
|
|
662
|
-
ret ? core.validatePK(ret, pkType) : null
|
|
663
|
-
];
|
|
664
|
-
};
|
|
665
|
-
const retrieve = async (key)=>{
|
|
666
|
-
logger$3.default('retrieve', {
|
|
667
|
-
key
|
|
668
|
-
});
|
|
669
|
-
if (!core.isValidItemKey(key)) {
|
|
670
|
-
logger$3.error('Key for Retrieve is not a valid ItemKey: %j', key);
|
|
671
|
-
throw new Error('Key for Retrieve is not a valid ItemKey');
|
|
672
|
-
}
|
|
673
|
-
const containsItemKey = cacheMap.includesKey(key);
|
|
674
|
-
let retrieved;
|
|
675
|
-
if (containsItemKey) {
|
|
676
|
-
logger$3.default('Looking for Object in Cache', key);
|
|
677
|
-
retrieved = cacheMap.get(key);
|
|
678
|
-
} else {
|
|
679
|
-
logger$3.default('Object Not Found in Cache, Retrieving from Server API', {
|
|
680
|
-
key
|
|
681
|
-
});
|
|
682
|
-
[, retrieved] = await get(key);
|
|
683
|
-
}
|
|
684
|
-
const retValue = [
|
|
685
|
-
containsItemKey ? null : cacheMap,
|
|
686
|
-
retrieved ? core.validatePK(retrieved, pkType) : null
|
|
687
|
-
];
|
|
688
|
-
// logger.debug('Returning from retrieve', { retValue });
|
|
689
|
-
return retValue;
|
|
690
|
-
};
|
|
691
|
-
const remove = async (key)=>{
|
|
692
|
-
logger$3.default('remove', {
|
|
693
|
-
key
|
|
694
|
-
});
|
|
695
|
-
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
696
|
-
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
697
|
-
if (!core.isValidItemKey(key)) {
|
|
698
|
-
logger$3.error('Key for Remove is not a valid ItemKey: %j', key);
|
|
699
|
-
throw new Error('Key for Remove is not a valid ItemKey');
|
|
700
|
-
}
|
|
701
|
-
try {
|
|
702
|
-
await api.remove(key);
|
|
703
|
-
cacheMap.delete(key);
|
|
704
|
-
} catch (e) {
|
|
705
|
-
logger$3.error("Error deleting item", {
|
|
706
|
-
error: e
|
|
707
|
-
});
|
|
708
|
-
throw e;
|
|
709
|
-
}
|
|
710
|
-
return cacheMap;
|
|
711
|
-
};
|
|
712
|
-
const update = async (key, v)=>{
|
|
713
|
-
logger$3.default('update', {
|
|
714
|
-
key,
|
|
715
|
-
v
|
|
716
|
-
});
|
|
717
|
-
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
718
|
-
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
719
|
-
if (!core.isValidItemKey(key)) {
|
|
720
|
-
logger$3.error('Key for Update is not a valid ItemKey: %j', key);
|
|
721
|
-
throw new Error('Key for Update is not a valid ItemKey');
|
|
722
|
-
}
|
|
723
|
-
try {
|
|
724
|
-
const updated = await api.update(key, v);
|
|
725
|
-
cacheMap.set(updated.key, updated);
|
|
726
|
-
return [
|
|
727
|
-
cacheMap,
|
|
728
|
-
core.validatePK(updated, pkType)
|
|
729
|
-
];
|
|
730
|
-
} catch (e) {
|
|
731
|
-
logger$3.error("Error updating chat", {
|
|
732
|
-
error: e
|
|
733
|
-
});
|
|
734
|
-
throw e;
|
|
735
|
-
}
|
|
736
|
-
};
|
|
737
|
-
// Facets are a pass-thru for caches
|
|
738
|
-
const facet = async (key, facet, params = {})=>{
|
|
739
|
-
logger$3.default('facet', {
|
|
740
|
-
key,
|
|
741
|
-
facet
|
|
742
|
-
});
|
|
743
|
-
const ret = await api.facet(key, facet, params);
|
|
744
|
-
return [
|
|
745
|
-
cacheMap,
|
|
746
|
-
ret
|
|
747
|
-
];
|
|
748
|
-
};
|
|
749
|
-
const find = async (finder, params = {}, locations = [])=>{
|
|
750
|
-
logger$3.default('find', {
|
|
751
|
-
finder,
|
|
752
|
-
params,
|
|
753
|
-
locations
|
|
754
|
-
});
|
|
755
|
-
const ret = await api.find(finder, params, locations);
|
|
756
|
-
ret.forEach((v)=>{
|
|
757
|
-
cacheMap.set(v.key, v);
|
|
758
|
-
});
|
|
759
|
-
return [
|
|
760
|
-
cacheMap,
|
|
761
|
-
core.validatePK(ret, pkType)
|
|
762
|
-
];
|
|
763
|
-
};
|
|
764
|
-
const findOne = async (finder, finderParams = {}, locations = [])=>{
|
|
765
|
-
logger$3.default('findOne', {
|
|
766
|
-
finder,
|
|
767
|
-
finderParams,
|
|
768
|
-
locations
|
|
769
|
-
});
|
|
770
|
-
const ret = await api.findOne(finder, finderParams, locations);
|
|
771
|
-
cacheMap.set(ret.key, ret);
|
|
772
|
-
return [
|
|
773
|
-
cacheMap,
|
|
774
|
-
core.validatePK(ret, pkType)
|
|
775
|
-
];
|
|
776
|
-
};
|
|
777
|
-
const reset = async ()=>{
|
|
778
|
-
cacheMap = new CacheMap(pkTypes);
|
|
779
|
-
return [
|
|
780
|
-
cacheMap
|
|
781
|
-
];
|
|
782
|
-
};
|
|
783
|
-
const set = async (key, v)=>{
|
|
784
|
-
logger$3.default('set', {
|
|
785
|
-
key,
|
|
786
|
-
v
|
|
787
|
-
});
|
|
788
|
-
// TODO: This is validating the key, but it doesn't have knowledge of the pkType
|
|
789
|
-
// This should be looking at the parentCaches and calculating an array of pkTypes
|
|
790
|
-
if (!core.isValidItemKey(key)) {
|
|
791
|
-
logger$3.error('Key for Update is not a valid ItemKey: %j', key);
|
|
792
|
-
throw new Error('Key for Update is not a valid ItemKey');
|
|
793
|
-
}
|
|
794
|
-
// TODO: This could be merged with the isValidItemKey check, later.
|
|
795
|
-
core.validatePK(v, pkType);
|
|
796
|
-
if (!isItemKeyEqualNormalized(key, v.key)) {
|
|
797
|
-
logger$3.error('Key does not match item key: %j != %j', key, v.key);
|
|
798
|
-
throw new Error('Key does not match item key');
|
|
799
|
-
}
|
|
800
|
-
cacheMap.set(key, v);
|
|
801
|
-
return [
|
|
802
|
-
cacheMap,
|
|
803
|
-
core.validatePK(v, pkType)
|
|
804
|
-
];
|
|
805
|
-
};
|
|
806
|
-
return {
|
|
807
|
-
all,
|
|
808
|
-
one,
|
|
809
|
-
action,
|
|
810
|
-
allAction,
|
|
811
|
-
allFacet,
|
|
812
|
-
create,
|
|
813
|
-
get,
|
|
814
|
-
retrieve,
|
|
815
|
-
remove,
|
|
816
|
-
update,
|
|
817
|
-
facet,
|
|
818
|
-
find,
|
|
819
|
-
findOne,
|
|
820
|
-
reset,
|
|
821
|
-
set,
|
|
822
|
-
pkTypes,
|
|
800
|
+
// Validate the item's primary key
|
|
801
|
+
core.validatePK(v, pkType);
|
|
802
|
+
if (!isItemKeyEqualNormalized(key, v.key)) {
|
|
803
|
+
logger$4.error('Key does not match item key: %j != %j', key, v.key);
|
|
804
|
+
throw new Error('Key does not match item key');
|
|
805
|
+
}
|
|
806
|
+
cacheMap.set(key, v);
|
|
807
|
+
return [
|
|
808
|
+
cacheMap,
|
|
809
|
+
core.validatePK(v, pkType)
|
|
810
|
+
];
|
|
811
|
+
};
|
|
812
|
+
|
|
813
|
+
const reset = async (coordinate)=>{
|
|
814
|
+
const cacheMap = new CacheMap(coordinate.kta);
|
|
815
|
+
return [
|
|
823
816
|
cacheMap
|
|
817
|
+
];
|
|
818
|
+
};
|
|
819
|
+
|
|
820
|
+
// Import all operation functions
|
|
821
|
+
const createOperations = (api, coordinate, cacheMap, pkType)=>{
|
|
822
|
+
return {
|
|
823
|
+
all: (query, locations)=>all(api, cacheMap, pkType, query, locations),
|
|
824
|
+
one: (query, locations)=>one(api, cacheMap, pkType, query, locations),
|
|
825
|
+
create: (item, locations)=>create(api, cacheMap, pkType, item, locations),
|
|
826
|
+
get: (key)=>get.get(api, cacheMap, pkType, key),
|
|
827
|
+
retrieve: (key)=>retrieve(api, cacheMap, pkType, key),
|
|
828
|
+
remove: (key)=>remove(api, cacheMap, key),
|
|
829
|
+
update: (key, item)=>update(api, cacheMap, pkType, key, item),
|
|
830
|
+
action: (key, actionName, body)=>action(api, cacheMap, pkType, key, actionName, body),
|
|
831
|
+
allAction: (actionName, body, locations)=>allAction(api, cacheMap, pkType, actionName, body, locations),
|
|
832
|
+
facet: (key, facetName, params)=>facet(api, cacheMap, key, facetName, params),
|
|
833
|
+
allFacet: (facetName, params, locations)=>allFacet(api, cacheMap, facetName, params, locations),
|
|
834
|
+
find: (finder, params, locations)=>find(api, cacheMap, pkType, finder, params, locations),
|
|
835
|
+
findOne: (finder, params, locations)=>findOne(api, cacheMap, pkType, finder, params, locations),
|
|
836
|
+
set: (key, item)=>set(cacheMap, pkType, key, item),
|
|
837
|
+
reset: ()=>reset(coordinate)
|
|
824
838
|
};
|
|
825
839
|
};
|
|
826
840
|
|
|
827
|
-
const logger$
|
|
828
|
-
const
|
|
829
|
-
logger$
|
|
841
|
+
const logger$3 = logger$1.default.get('Cache');
|
|
842
|
+
const createCache = async (api, coordinate, registry)=>{
|
|
843
|
+
logger$3.debug('createCache', {
|
|
830
844
|
coordinate,
|
|
831
|
-
|
|
832
|
-
registry: registry$1
|
|
845
|
+
registry
|
|
833
846
|
});
|
|
834
|
-
|
|
847
|
+
// Create the cache map using the coordinate's key type array
|
|
848
|
+
const cacheMap = new CacheMap(coordinate.kta);
|
|
849
|
+
// Get the primary key type from the coordinate
|
|
850
|
+
const pkType = coordinate.kta[0];
|
|
851
|
+
// Create operations
|
|
852
|
+
const operations = createOperations(api, coordinate, cacheMap, pkType);
|
|
835
853
|
return {
|
|
836
|
-
|
|
837
|
-
|
|
854
|
+
coordinate,
|
|
855
|
+
registry,
|
|
856
|
+
api,
|
|
857
|
+
cacheMap,
|
|
858
|
+
operations
|
|
838
859
|
};
|
|
839
860
|
};
|
|
861
|
+
const isCache = (cache)=>{
|
|
862
|
+
return cache !== null && typeof cache === 'object' && 'coordinate' in cache && 'registry' in cache && 'api' in cache && 'cacheMap' in cache && 'operations' in cache;
|
|
863
|
+
};
|
|
864
|
+
|
|
865
|
+
const logger$2 = logger$1.default.get("Instance");
|
|
866
|
+
const createInstance = async (registry, coordinate, api)=>{
|
|
867
|
+
logger$2.debug("createInstance", {
|
|
868
|
+
coordinate,
|
|
869
|
+
api,
|
|
870
|
+
registry
|
|
871
|
+
});
|
|
872
|
+
return await createCache(api, coordinate, registry);
|
|
873
|
+
};
|
|
840
874
|
const isInstance = (instance)=>{
|
|
841
|
-
return instance !== null && instance
|
|
875
|
+
return instance !== null && typeof instance === 'object' && 'coordinate' in instance && 'registry' in instance && 'api' in instance && 'cacheMap' in instance && 'operations' in instance;
|
|
842
876
|
};
|
|
843
877
|
|
|
844
|
-
const logger$1 =
|
|
878
|
+
const logger$1 = logger$1.default.get("InstanceFactory");
|
|
845
879
|
/**
|
|
846
880
|
* Factory function for creating cache instances
|
|
847
|
-
*/ const createInstanceFactory = (
|
|
881
|
+
*/ const createInstanceFactory = (api)=>{
|
|
848
882
|
return (coordinate, context)=>{
|
|
849
883
|
logger$1.debug("Creating cache instance", {
|
|
850
884
|
coordinate,
|
|
851
885
|
registry: context.registry,
|
|
852
|
-
|
|
853
|
-
});
|
|
854
|
-
|
|
886
|
+
api
|
|
887
|
+
});
|
|
888
|
+
// Since InstanceFactory must be synchronous but our createInstance is async,
|
|
889
|
+
// we need to create a special cache instance synchronously and defer the async initialization
|
|
890
|
+
const cacheMap = new CacheMap(coordinate.kta);
|
|
891
|
+
const pkType = coordinate.kta[0];
|
|
892
|
+
const operations = createOperations(api, coordinate, cacheMap, pkType);
|
|
893
|
+
return {
|
|
894
|
+
coordinate,
|
|
895
|
+
registry: context.registry,
|
|
896
|
+
api,
|
|
897
|
+
cacheMap,
|
|
898
|
+
operations
|
|
899
|
+
};
|
|
855
900
|
};
|
|
856
901
|
};
|
|
857
902
|
|
|
858
|
-
const logger =
|
|
903
|
+
const logger = logger$1.default.get("Registry");
|
|
859
904
|
/**
|
|
860
905
|
* Factory function for creating cache registries
|
|
861
906
|
*/ const createRegistryFactory = ()=>{
|
|
@@ -886,8 +931,10 @@ exports.createAggregator = createAggregator;
|
|
|
886
931
|
exports.createCache = createCache;
|
|
887
932
|
exports.createInstance = createInstance;
|
|
888
933
|
exports.createInstanceFactory = createInstanceFactory;
|
|
934
|
+
exports.createOperations = createOperations;
|
|
889
935
|
exports.createRegistry = createRegistry;
|
|
890
936
|
exports.createRegistryFactory = createRegistryFactory;
|
|
937
|
+
exports.isCache = isCache;
|
|
891
938
|
exports.isInstance = isInstance;
|
|
892
939
|
exports.toCacheConfig = toCacheConfig;
|
|
893
940
|
//# sourceMappingURL=index.cjs.map
|