ueberdb2 1.4.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/npmpublish.yml +103 -0
- package/.travis.yml +46 -0
- package/CHANGELOG.md +167 -0
- package/CONTRIBUTING.md +103 -0
- package/LICENSE +202 -0
- package/README.md +356 -0
- package/SECURITY.md +5 -0
- package/databases/cassandra_db.js +250 -0
- package/databases/couch_db.js +201 -0
- package/databases/dirty_db.js +80 -0
- package/databases/dirty_git_db.js +78 -0
- package/databases/elasticsearch_db.js +288 -0
- package/databases/mock_db.js +42 -0
- package/databases/mongodb_db.js +136 -0
- package/databases/mssql_db.js +218 -0
- package/databases/mysql_db.js +178 -0
- package/databases/postgres_db.js +198 -0
- package/databases/postgrespool_db.js +11 -0
- package/databases/redis_db.js +128 -0
- package/databases/rethink_db.js +98 -0
- package/databases/sqlite_db.js +158 -0
- package/index.js +191 -0
- package/lib/AbstractDatabase.js +32 -0
- package/lib/CacheAndBufferLayer.js +610 -0
- package/package.json +122 -0
- package/test/lib/databases.js +62 -0
- package/test/lib/mysql.sql +84 -0
- package/test/test.js +312 -0
- package/test/test_bulk.js +71 -0
- package/test/test_lru.js +145 -0
- package/test/test_metrics.js +733 -0
- package/test/test_mysql.js +68 -0
- package/test/test_postgres.js +17 -0
package/test/test.js
ADDED
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const wtfnode = require('wtfnode'); // This should be first so that it can instrument everything.
|
|
4
|
+
|
|
5
|
+
const Clitable = require('cli-table');
|
|
6
|
+
const Randexp = require('randexp');
|
|
7
|
+
const assert = require('assert').strict;
|
|
8
|
+
const databases = require('./lib/databases').databases;
|
|
9
|
+
const fs = require('fs').promises;
|
|
10
|
+
const ueberdb = require('../index');
|
|
11
|
+
const util = require('util');
|
|
12
|
+
|
|
13
|
+
const maxKeyLength = 100;
|
|
14
|
+
const randomString = (length = maxKeyLength) => new Randexp(new RegExp(`.{${length}}`)).gen();
|
|
15
|
+
|
|
16
|
+
// eslint-disable-next-line mocha/no-top-level-hooks
|
|
17
|
+
after(async function () {
|
|
18
|
+
// Add a timeout to forcibly exit if something is keeping node from exiting cleanly.
|
|
19
|
+
// The timeout is unref()ed so that it doesn't prevent node from exiting when done.
|
|
20
|
+
setTimeout(() => {
|
|
21
|
+
console.error('node should have exited by now but something is keeping it open ' +
|
|
22
|
+
'such as an open connection or active timer');
|
|
23
|
+
wtfnode.dump();
|
|
24
|
+
process.exit(1); // eslint-disable-line no-process-exit
|
|
25
|
+
}, 5000).unref();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
// Returns an object with promisified equivalents of ueberdb.Database methods.
|
|
29
|
+
const promisifyDb = (db) => {
|
|
30
|
+
const ret = {};
|
|
31
|
+
const fns = ['init', 'close', 'get', 'getSub', 'findKeys', 'set', 'setSub', 'remove', 'flush'];
|
|
32
|
+
for (const fn of fns) ret[fn] = util.promisify(db[fn].bind(db));
|
|
33
|
+
return ret;
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
describe(__filename, function () {
|
|
37
|
+
let speedTable;
|
|
38
|
+
let db;
|
|
39
|
+
let pdb;
|
|
40
|
+
|
|
41
|
+
before(async function () {
|
|
42
|
+
speedTable = new Clitable({
|
|
43
|
+
head: [
|
|
44
|
+
'Database',
|
|
45
|
+
'read cache',
|
|
46
|
+
'write buffer',
|
|
47
|
+
'#',
|
|
48
|
+
'ms/set',
|
|
49
|
+
'ms/get',
|
|
50
|
+
'ms/findKeys',
|
|
51
|
+
'ms/remove',
|
|
52
|
+
'total ms',
|
|
53
|
+
'total ms/#',
|
|
54
|
+
],
|
|
55
|
+
colWidths: [15, 15, 15, 8, 13, 13, 13, 13, 13, 13],
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
after(async function () {
|
|
60
|
+
console.log(speedTable.toString());
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
for (const database of Object.keys(databases)) {
|
|
64
|
+
const dbSettings = databases[database];
|
|
65
|
+
describe(database, function () {
|
|
66
|
+
for (const readCache of [false, true]) {
|
|
67
|
+
describe(`${readCache ? '' : 'no '}read cache`, function () {
|
|
68
|
+
for (const writeBuffer of [false, true]) {
|
|
69
|
+
describe(`${writeBuffer ? '' : 'no '}write buffer`, function () {
|
|
70
|
+
this.timeout(5000);
|
|
71
|
+
|
|
72
|
+
before(async function () {
|
|
73
|
+
if (dbSettings.filename) await fs.unlink(dbSettings.filename).catch(() => {});
|
|
74
|
+
db = new ueberdb.Database(database, dbSettings, {
|
|
75
|
+
...(readCache ? {} : {cache: 0}),
|
|
76
|
+
...(writeBuffer ? {} : {writeInterval: 0}),
|
|
77
|
+
});
|
|
78
|
+
pdb = promisifyDb(db);
|
|
79
|
+
await pdb.init();
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
after(async function () {
|
|
83
|
+
await pdb.close();
|
|
84
|
+
if (dbSettings.filename) await fs.unlink(dbSettings.filename).catch(() => {});
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
describe('white space in key is not ignored', function () {
|
|
88
|
+
for (const space of [false, true]) {
|
|
89
|
+
describe(`key ${space ? 'has' : 'does not have'} a trailing space`, function () {
|
|
90
|
+
let input;
|
|
91
|
+
let key;
|
|
92
|
+
|
|
93
|
+
before(async function () {
|
|
94
|
+
input = {a: 1, b: new Randexp(/.+/).gen()};
|
|
95
|
+
key = randomString(maxKeyLength - 1) + (space ? ' ' : '');
|
|
96
|
+
await pdb.set(key, input);
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
it('get(key) -> record', async function () {
|
|
100
|
+
const output = await pdb.get(key);
|
|
101
|
+
assert.equal(JSON.stringify(output), JSON.stringify(input));
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it('get(`${key} `) -> nullish', async function () {
|
|
105
|
+
const output = await pdb.get(`${key} `);
|
|
106
|
+
assert(output == null);
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
if (space) {
|
|
110
|
+
it('get(key.slice(0, -1)) -> nullish', async function () {
|
|
111
|
+
const output = await pdb.get(key.slice(0, -1));
|
|
112
|
+
assert(output == null);
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
it('get of unknown key -> nullish', async function () {
|
|
120
|
+
const key = randomString();
|
|
121
|
+
assert(await pdb.get(key) == null);
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it('set+get works', async function () {
|
|
125
|
+
const input = {a: 1, b: new Randexp(/.+/).gen()};
|
|
126
|
+
const key = randomString();
|
|
127
|
+
await pdb.set(key, input);
|
|
128
|
+
const output = await pdb.get(key);
|
|
129
|
+
assert.equal(JSON.stringify(output), JSON.stringify(input));
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
it('set+get with random key/value works', async function () {
|
|
133
|
+
const input = {testLongString: new Randexp(/[a-f0-9]{50000}/).gen()};
|
|
134
|
+
const key = randomString();
|
|
135
|
+
await pdb.set(key, input);
|
|
136
|
+
const output = await pdb.get(key);
|
|
137
|
+
assert.equal(JSON.stringify(output), JSON.stringify(input));
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
it('findKeys works', async function () {
|
|
141
|
+
const input = {a: 1, b: new Randexp(/.+/).gen()};
|
|
142
|
+
// TODO setting a key with non ascii chars
|
|
143
|
+
const key = new Randexp(/([a-z]\w{0,20})foo\1/).gen();
|
|
144
|
+
await Promise.all([
|
|
145
|
+
pdb.set(`${key}:test2`, input),
|
|
146
|
+
pdb.set(`${key}:test`, input),
|
|
147
|
+
]);
|
|
148
|
+
const output = await pdb.findKeys(`${key}:*`, null);
|
|
149
|
+
for (const keyVal of output) {
|
|
150
|
+
const output = await pdb.get(keyVal);
|
|
151
|
+
assert.equal(JSON.stringify(output), JSON.stringify(input));
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
it('remove works', async function () {
|
|
156
|
+
const input = {a: 1, b: new Randexp(/.+/).gen()};
|
|
157
|
+
const key = randomString();
|
|
158
|
+
await pdb.set(key, input);
|
|
159
|
+
assert.equal(JSON.stringify(await pdb.get(key)), JSON.stringify(input));
|
|
160
|
+
await pdb.remove(key);
|
|
161
|
+
assert(await pdb.get(key) == null);
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
it('getSub of existing property works', async function () {
|
|
165
|
+
await pdb.set('k', {sub1: {sub2: 'v'}});
|
|
166
|
+
assert.equal(await pdb.getSub('k', ['sub1', 'sub2']), 'v');
|
|
167
|
+
assert.deepEqual(await pdb.getSub('k', ['sub1']), {sub2: 'v'});
|
|
168
|
+
assert.deepEqual(await pdb.getSub('k', []), {sub1: {sub2: 'v'}});
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
it('getSub of missing property returns nullish', async function () {
|
|
172
|
+
await pdb.set('k', {sub1: {}});
|
|
173
|
+
assert(await pdb.getSub('k', ['sub1', 'sub2']) == null);
|
|
174
|
+
|
|
175
|
+
await pdb.set('k', {});
|
|
176
|
+
assert(await pdb.getSub('k', ['sub1', 'sub2']) == null);
|
|
177
|
+
assert(await pdb.getSub('k', ['sub1']) == null);
|
|
178
|
+
|
|
179
|
+
await pdb.remove('k');
|
|
180
|
+
assert(await pdb.getSub('k', ['sub1', 'sub2']) == null);
|
|
181
|
+
assert(await pdb.getSub('k', ['sub1']) == null);
|
|
182
|
+
assert(await pdb.getSub('k', []) == null);
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
it('setSub can modify an existing property', async function () {
|
|
186
|
+
await pdb.set('k', {sub1: {sub2: 'v'}});
|
|
187
|
+
await pdb.setSub('k', ['sub1', 'sub2'], 'v2');
|
|
188
|
+
assert.deepEqual(await pdb.get('k'), {sub1: {sub2: 'v2'}});
|
|
189
|
+
|
|
190
|
+
await pdb.setSub('k', ['sub1'], 'v2');
|
|
191
|
+
assert.deepEqual(await pdb.get('k'), {sub1: 'v2'});
|
|
192
|
+
|
|
193
|
+
await pdb.setSub('k', [], 'v3');
|
|
194
|
+
assert.equal(await pdb.get('k'), 'v3');
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
it('setSub can add a new property', async function () {
|
|
198
|
+
await pdb.remove('k');
|
|
199
|
+
await pdb.setSub('k', [], {});
|
|
200
|
+
assert.deepEqual(await pdb.get('k'), {});
|
|
201
|
+
await pdb.setSub('k', ['sub1'], {});
|
|
202
|
+
assert.deepEqual(await pdb.get('k'), {sub1: {}});
|
|
203
|
+
await pdb.setSub('k', ['sub1', 'sub2'], 'v');
|
|
204
|
+
assert.deepEqual(await pdb.get('k'), {sub1: {sub2: 'v'}});
|
|
205
|
+
|
|
206
|
+
await pdb.remove('k');
|
|
207
|
+
await pdb.setSub('k', ['sub1', 'sub2'], 'v');
|
|
208
|
+
assert.deepEqual(await pdb.get('k'), {sub1: {sub2: 'v'}});
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
it('setSub rejects attempts to set properties on primitives', async function () {
|
|
212
|
+
for (const v of ['hello world', 42, true]) {
|
|
213
|
+
await pdb.set('k', v);
|
|
214
|
+
assert.rejects(pdb.setSub('k', ['sub'], 'x'), {
|
|
215
|
+
name: 'TypeError',
|
|
216
|
+
message: /property "sub" on non-object/,
|
|
217
|
+
});
|
|
218
|
+
assert.deepEqual(await pdb.get('k'), v);
|
|
219
|
+
}
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
it('speed is acceptable', async function () {
|
|
223
|
+
this.timeout(60000);
|
|
224
|
+
|
|
225
|
+
const {speeds: {
|
|
226
|
+
count = 1000,
|
|
227
|
+
setMax = 3,
|
|
228
|
+
getMax = 0.1,
|
|
229
|
+
findKeysMax = 3,
|
|
230
|
+
removeMax = 1,
|
|
231
|
+
} = {}} = dbSettings || {};
|
|
232
|
+
|
|
233
|
+
const input = {a: 1, b: new Randexp(/.+/).gen()};
|
|
234
|
+
// TODO setting a key with non ascii chars
|
|
235
|
+
const key = new Randexp(/([a-z]\w{0,20})foo\1/).gen();
|
|
236
|
+
// Pre-allocate an array before starting the timer so that time spent growing the
|
|
237
|
+
// array doesn't throw off the benchmarks.
|
|
238
|
+
const promises = [...Array(count + 1)].map(() => null);
|
|
239
|
+
|
|
240
|
+
const timers = {start: Date.now()};
|
|
241
|
+
|
|
242
|
+
for (let i = 0; i < count; ++i) promises[i] = pdb.set(key + i, input);
|
|
243
|
+
promises[count] = pdb.flush();
|
|
244
|
+
await Promise.all(promises);
|
|
245
|
+
timers.set = Date.now();
|
|
246
|
+
|
|
247
|
+
for (let i = 0; i < count; ++i) promises[i] = pdb.get(key + i);
|
|
248
|
+
await Promise.all(promises);
|
|
249
|
+
timers.get = Date.now();
|
|
250
|
+
|
|
251
|
+
for (let i = 0; i < count; ++i) promises[i] = pdb.findKeys(key + i, null);
|
|
252
|
+
await Promise.all(promises);
|
|
253
|
+
timers.findKeys = Date.now();
|
|
254
|
+
|
|
255
|
+
for (let i = 0; i < count; ++i) promises[i] = pdb.remove(key + i);
|
|
256
|
+
promises[count] = pdb.flush();
|
|
257
|
+
await Promise.all(promises);
|
|
258
|
+
timers.remove = Date.now();
|
|
259
|
+
|
|
260
|
+
const timePerOp = {
|
|
261
|
+
set: (timers.set - timers.start) / count,
|
|
262
|
+
get: (timers.get - timers.set) / count,
|
|
263
|
+
findKeys: (timers.findKeys - timers.get) / count,
|
|
264
|
+
remove: (timers.remove - timers.findKeys) / count,
|
|
265
|
+
};
|
|
266
|
+
speedTable.push([
|
|
267
|
+
database,
|
|
268
|
+
readCache ? 'yes' : 'no',
|
|
269
|
+
writeBuffer ? 'yes' : 'no',
|
|
270
|
+
count,
|
|
271
|
+
timePerOp.set,
|
|
272
|
+
timePerOp.get,
|
|
273
|
+
timePerOp.findKeys,
|
|
274
|
+
timePerOp.remove,
|
|
275
|
+
timers.remove - timers.start,
|
|
276
|
+
(timers.remove - timers.start) / count,
|
|
277
|
+
]);
|
|
278
|
+
|
|
279
|
+
// Removes the "Acceptable ms/op" column if there is no enforced limit.
|
|
280
|
+
const filterColumn = (row) => {
|
|
281
|
+
if (readCache && writeBuffer) return row;
|
|
282
|
+
row.splice(1, 1);
|
|
283
|
+
return row;
|
|
284
|
+
};
|
|
285
|
+
const acceptableTable = new Clitable({
|
|
286
|
+
head: filterColumn(['op', 'Acceptable ms/op', 'Actual ms/op']),
|
|
287
|
+
colWidths: filterColumn([10, 18, 18]),
|
|
288
|
+
});
|
|
289
|
+
acceptableTable.push(...[
|
|
290
|
+
['set', setMax, timePerOp.set],
|
|
291
|
+
['get', getMax, timePerOp.get],
|
|
292
|
+
['findKeys', findKeysMax, timePerOp.findKeys],
|
|
293
|
+
['remove', removeMax, timePerOp.remove],
|
|
294
|
+
].map(filterColumn));
|
|
295
|
+
console.log(acceptableTable.toString());
|
|
296
|
+
|
|
297
|
+
if (readCache && writeBuffer) {
|
|
298
|
+
assert(setMax >= timePerOp.set);
|
|
299
|
+
assert(getMax >= timePerOp.get);
|
|
300
|
+
assert(findKeysMax >= timePerOp.findKeys);
|
|
301
|
+
assert(removeMax >= timePerOp.remove);
|
|
302
|
+
}
|
|
303
|
+
});
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
});
|
|
311
|
+
|
|
312
|
+
// TODO: Need test which prefills with 1e7 of data then does a get.
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const assert = require('assert').strict;
|
|
4
|
+
const ueberdb = require('../index');
|
|
5
|
+
const util = require('util');
|
|
6
|
+
|
|
7
|
+
const range = (N) => [...Array(N).keys()];
|
|
8
|
+
|
|
9
|
+
describe(__filename, function () {
|
|
10
|
+
let db = null;
|
|
11
|
+
let mock = null;
|
|
12
|
+
const createDb = async (wrapperSettings) => {
|
|
13
|
+
const settings = {};
|
|
14
|
+
const udb = new ueberdb.Database('mock', settings, wrapperSettings);
|
|
15
|
+
mock = settings.mock;
|
|
16
|
+
db = {};
|
|
17
|
+
for (const fn of ['init', 'close', 'set']) db[fn] = util.promisify(udb[fn].bind(udb));
|
|
18
|
+
mock.once('init', (cb) => cb());
|
|
19
|
+
await db.init();
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
afterEach(async function () {
|
|
23
|
+
if (mock != null) {
|
|
24
|
+
mock.removeAllListeners();
|
|
25
|
+
mock.once('close', (cb) => cb());
|
|
26
|
+
mock = null;
|
|
27
|
+
}
|
|
28
|
+
if (db != null) {
|
|
29
|
+
await db.close();
|
|
30
|
+
db = null;
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
describe('bulkLimit', function () {
|
|
35
|
+
const bulkLimits = [0, false, null, undefined, '', 1, 2];
|
|
36
|
+
for (const bulkLimit of bulkLimits) {
|
|
37
|
+
it(bulkLimit === undefined ? 'undefined' : JSON.stringify(bulkLimit), async function () {
|
|
38
|
+
await createDb({bulkLimit});
|
|
39
|
+
const gotWrites = [];
|
|
40
|
+
mock.on('set', util.callbackify(async (k, v) => gotWrites.push(1)));
|
|
41
|
+
mock.on('doBulk', util.callbackify(async (ops) => gotWrites.push(ops.length)));
|
|
42
|
+
const N = 10;
|
|
43
|
+
await Promise.all(range(N).map((i) => db.set(`key${i}`, `val${i}`)));
|
|
44
|
+
const wantLimit = bulkLimit || N;
|
|
45
|
+
const wantWrites = range(N / wantLimit).map((i) => wantLimit);
|
|
46
|
+
assert.deepEqual(gotWrites, wantWrites);
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it('bulk failures are retried individually', async function () {
|
|
52
|
+
await createDb({});
|
|
53
|
+
const gotDoBulkCalls = [];
|
|
54
|
+
mock.on('doBulk', util.callbackify(async (ops) => {
|
|
55
|
+
gotDoBulkCalls.push(ops.length);
|
|
56
|
+
throw new Error('test');
|
|
57
|
+
}));
|
|
58
|
+
const gotWrites = new Map();
|
|
59
|
+
const wantWrites = new Map();
|
|
60
|
+
mock.on('set', util.callbackify(async (k, v) => gotWrites.set(k, v)));
|
|
61
|
+
const N = 10;
|
|
62
|
+
await Promise.all(range(N).map(async (i) => {
|
|
63
|
+
const k = `key${i}`;
|
|
64
|
+
const v = `val${i}`;
|
|
65
|
+
wantWrites.set(k, JSON.stringify(v));
|
|
66
|
+
await db.set(k, v);
|
|
67
|
+
}));
|
|
68
|
+
assert.deepEqual(gotDoBulkCalls, [N]);
|
|
69
|
+
assert.deepEqual(gotWrites, wantWrites);
|
|
70
|
+
});
|
|
71
|
+
});
|
package/test/test_lru.js
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const LRU = require('../lib/CacheAndBufferLayer').exportedForTesting.LRU;
|
|
4
|
+
const assert = require('assert').strict;
|
|
5
|
+
|
|
6
|
+
describe(__filename, function () {
|
|
7
|
+
describe('capacity = 0', function () {
|
|
8
|
+
it('constructor does not throw', async function () {
|
|
9
|
+
new LRU(0);
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
describe('behavior when empty', function () {
|
|
13
|
+
it('get() returns nullish', async function () {
|
|
14
|
+
assert((new LRU(0)).get('k') == null);
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
it('empty iteration', async function () {
|
|
18
|
+
assert.equal([...(new LRU(0))].length, 0);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
it('evictOld() does not throw', async function () {
|
|
22
|
+
(new LRU(0)).evictOld();
|
|
23
|
+
});
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
describe('single entry with evictable = false', function () {
|
|
27
|
+
let evictable, lru, key, val;
|
|
28
|
+
|
|
29
|
+
beforeEach(async function () {
|
|
30
|
+
evictable = false;
|
|
31
|
+
lru = new LRU(0, () => evictable);
|
|
32
|
+
key = 'k';
|
|
33
|
+
val = 'v';
|
|
34
|
+
lru.set(key, val);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it('get() works', async function () {
|
|
38
|
+
assert.equal(lru.get(key), val);
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it('iterate works', async function () {
|
|
42
|
+
assert.deepEqual([...lru], [[key, val]]);
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
it('re-set() works', async function () {
|
|
46
|
+
const val2 = 'v2';
|
|
47
|
+
lru.set(key, val2);
|
|
48
|
+
assert.equal(lru.get(key), val2);
|
|
49
|
+
assert.deepEqual([...lru], [[key, val2]]);
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
it('evictOld() does not evict', async function () {
|
|
53
|
+
lru.evictOld();
|
|
54
|
+
assert.deepEqual([...lru], [[key, val]]);
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
it('evictOld() evicts after setting evictable = true', async function () {
|
|
58
|
+
evictable = true;
|
|
59
|
+
lru.evictOld();
|
|
60
|
+
assert.deepEqual([...lru], []);
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
describe('set immediately evicts if evictable', function () {
|
|
65
|
+
it('explicitly evictable', async function () {
|
|
66
|
+
const lru = new LRU(0, () => true);
|
|
67
|
+
lru.set('k', 'v');
|
|
68
|
+
assert(lru.get('k') == null);
|
|
69
|
+
assert.deepEqual([...lru], []);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it('is evictable by default', async function () {
|
|
73
|
+
const lru = new LRU(0);
|
|
74
|
+
lru.set('k', 'v');
|
|
75
|
+
assert(lru.get('k') == null);
|
|
76
|
+
assert.deepEqual([...lru], []);
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
describe('capacity = 2', function () {
|
|
82
|
+
let evictable, lru;
|
|
83
|
+
|
|
84
|
+
beforeEach(async function () {
|
|
85
|
+
evictable = () => false;
|
|
86
|
+
lru = new LRU(2, (k, v) => evictable(k, v));
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
it('iterates oldest first', async function () {
|
|
90
|
+
lru.set(0, '0');
|
|
91
|
+
lru.set(1, '1');
|
|
92
|
+
let i = 0;
|
|
93
|
+
for (const [k, v] of lru) {
|
|
94
|
+
assert.equal(k, i);
|
|
95
|
+
assert.equal(v, `${i}`);
|
|
96
|
+
i++;
|
|
97
|
+
}
|
|
98
|
+
assert.equal(i, 2);
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
it('get(k) updates recently used', async function () {
|
|
102
|
+
lru.set(0, '0');
|
|
103
|
+
lru.set(1, '1');
|
|
104
|
+
assert.equal(lru.get(0), '0');
|
|
105
|
+
assert.deepEqual([...lru], [[1, '1'], [0, '0']]);
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
it('get(k, false) does not update recently used', async function () {
|
|
109
|
+
lru.set(0, '0');
|
|
110
|
+
lru.set(1, '1');
|
|
111
|
+
assert.equal(lru.get(0, false), '0');
|
|
112
|
+
assert.deepEqual([...lru], [[0, '0'], [1, '1']]);
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
it('re-set() updates recently used', async function () {
|
|
116
|
+
lru.set(0, '0');
|
|
117
|
+
lru.set(1, '1');
|
|
118
|
+
lru.set(0, '00');
|
|
119
|
+
assert.deepEqual([...lru], [[1, '1'], [0, '00']]);
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
it('evictOld() only evicts evictable entries', async function () {
|
|
123
|
+
evictable = () => false;
|
|
124
|
+
lru.set(0, '0');
|
|
125
|
+
lru.set(1, '1');
|
|
126
|
+
lru.set(2, '2');
|
|
127
|
+
lru.set(3, '3');
|
|
128
|
+
assert.deepEqual([...lru], [[0, '0'], [1, '1'], [2, '2'], [3, '3']]);
|
|
129
|
+
evictable = (k) => k >= 2;
|
|
130
|
+
lru.evictOld();
|
|
131
|
+
// The newer entries should be evicted because the older are dirty/writingInProgress.
|
|
132
|
+
assert.deepEqual([...lru], [[0, '0'], [1, '1']]);
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
it('evictOld() does nothing if at or below capacity', async function () {
|
|
136
|
+
evictable = () => true;
|
|
137
|
+
lru.set(0, '0');
|
|
138
|
+
lru.evictOld();
|
|
139
|
+
assert.deepEqual([...lru], [[0, '0']]);
|
|
140
|
+
lru.set(1, '1');
|
|
141
|
+
lru.evictOld();
|
|
142
|
+
assert.deepEqual([...lru], [[0, '0'], [1, '1']]);
|
|
143
|
+
});
|
|
144
|
+
});
|
|
145
|
+
});
|