@boxyhq/saml-jackson 0.2.3-beta.177 → 0.2.3-beta.210
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ nodemon.json +12 -0
- package/.nyc_output/36a3e9e1-42eb-468d-a9ec-8d206fedcd3e.json +1 -0
- package/.nyc_output/8c0af85a-b807-45bb-8331-20c3aabe15df.json +1 -0
- package/.nyc_output/ad148b90-7401-4df2-959f-3fdcf81a06ec.json +1 -0
- package/.nyc_output/processinfo/36a3e9e1-42eb-468d-a9ec-8d206fedcd3e.json +1 -0
- package/.nyc_output/processinfo/8c0af85a-b807-45bb-8331-20c3aabe15df.json +1 -0
- package/.nyc_output/processinfo/ad148b90-7401-4df2-959f-3fdcf81a06ec.json +1 -0
- package/.nyc_output/processinfo/index.json +1 -0
- package/package.json +23 -15
- package/.eslintrc.js +0 -13
- package/prettier.config.js +0 -4
- package/src/controller/api.js +0 -167
- package/src/controller/error.js +0 -12
- package/src/controller/oauth/allowed.js +0 -19
- package/src/controller/oauth/code-verifier.js +0 -16
- package/src/controller/oauth/redirect.js +0 -18
- package/src/controller/oauth.js +0 -321
- package/src/controller/utils.js +0 -19
- package/src/db/db.js +0 -81
- package/src/db/db.test.js +0 -302
- package/src/db/encrypter.js +0 -36
- package/src/db/mem.js +0 -111
- package/src/db/mongo.js +0 -89
- package/src/db/redis.js +0 -88
- package/src/db/sql/entity/JacksonIndex.js +0 -42
- package/src/db/sql/entity/JacksonStore.js +0 -42
- package/src/db/sql/entity/JacksonTTL.js +0 -23
- package/src/db/sql/model/JacksonIndex.js +0 -9
- package/src/db/sql/model/JacksonStore.js +0 -10
- package/src/db/sql/model/JacksonTTL.js +0 -8
- package/src/db/sql/sql.js +0 -153
- package/src/db/store.js +0 -42
- package/src/db/utils.js +0 -30
- package/src/env.js +0 -39
- package/src/index.js +0 -67
- package/src/jackson.js +0 -161
- package/src/read-config.js +0 -24
- package/src/saml/claims.js +0 -40
- package/src/saml/saml.js +0 -223
- package/src/saml/x509.js +0 -48
- package/src/test/api.test.js +0 -186
- package/src/test/data/metadata/boxyhq.js +0 -6
- package/src/test/data/metadata/boxyhq.xml +0 -30
- package/src/test/data/saml_response +0 -1
- package/src/test/oauth.test.js +0 -342
package/src/controller/utils.js
DELETED
@@ -1,19 +0,0 @@
|
|
1
|
-
const indexNames = {
|
2
|
-
entityID: 'entityID',
|
3
|
-
tenantProduct: 'tenantProduct',
|
4
|
-
};
|
5
|
-
|
6
|
-
const extractAuthToken = (req) => {
|
7
|
-
const authHeader = req.get('authorization');
|
8
|
-
const parts = (authHeader || '').split(' ');
|
9
|
-
if (parts.length > 1) {
|
10
|
-
return parts[1];
|
11
|
-
}
|
12
|
-
|
13
|
-
return null;
|
14
|
-
};
|
15
|
-
|
16
|
-
module.exports = {
|
17
|
-
indexNames,
|
18
|
-
extractAuthToken,
|
19
|
-
};
|
package/src/db/db.js
DELETED
@@ -1,81 +0,0 @@
|
|
1
|
-
const mem = require('./mem.js');
|
2
|
-
const mongo = require('./mongo.js');
|
3
|
-
const redis = require('./redis.js');
|
4
|
-
const sql = require('./sql/sql.js');
|
5
|
-
const store = require('./store.js');
|
6
|
-
const encrypter = require('./encrypter.js');
|
7
|
-
|
8
|
-
const decrypt = (res, encryptionKey) => {
|
9
|
-
if (res.iv && res.tag) {
|
10
|
-
return JSON.parse(
|
11
|
-
encrypter.decrypt(res.value, res.iv, res.tag, encryptionKey)
|
12
|
-
);
|
13
|
-
}
|
14
|
-
|
15
|
-
return JSON.parse(res.value);
|
16
|
-
};
|
17
|
-
|
18
|
-
class DB {
|
19
|
-
constructor(db, encryptionKey) {
|
20
|
-
this.db = db;
|
21
|
-
this.encryptionKey = encryptionKey;
|
22
|
-
}
|
23
|
-
|
24
|
-
async get(namespace, key) {
|
25
|
-
const res = await this.db.get(namespace, key);
|
26
|
-
if (!res) {
|
27
|
-
return null;
|
28
|
-
}
|
29
|
-
|
30
|
-
return decrypt(res, this.encryptionKey);
|
31
|
-
}
|
32
|
-
|
33
|
-
async getByIndex(namespace, idx) {
|
34
|
-
const res = await this.db.getByIndex(namespace, idx);
|
35
|
-
const encryptionKey = this.encryptionKey;
|
36
|
-
return res.map((r) => {
|
37
|
-
return decrypt(r, encryptionKey);
|
38
|
-
});
|
39
|
-
}
|
40
|
-
|
41
|
-
// ttl is in seconds
|
42
|
-
async put(namespace, key, val, ttl = 0, ...indexes) {
|
43
|
-
if (ttl > 0 && indexes && indexes.length > 0) {
|
44
|
-
throw new Error('secondary indexes not allow on a store with ttl');
|
45
|
-
}
|
46
|
-
|
47
|
-
const dbVal = this.encryptionKey
|
48
|
-
? encrypter.encrypt(JSON.stringify(val), this.encryptionKey)
|
49
|
-
: { value: JSON.stringify(val) };
|
50
|
-
|
51
|
-
return await this.db.put(namespace, key, dbVal, ttl, ...indexes);
|
52
|
-
}
|
53
|
-
|
54
|
-
async delete(namespace, key) {
|
55
|
-
return await this.db.delete(namespace, key);
|
56
|
-
}
|
57
|
-
|
58
|
-
store(namespace, ttl = 0) {
|
59
|
-
return store.new(namespace, this, ttl);
|
60
|
-
}
|
61
|
-
}
|
62
|
-
|
63
|
-
module.exports = {
|
64
|
-
new: async (options) => {
|
65
|
-
const encryptionKey = options.encryptionKey
|
66
|
-
? Buffer.from(options.encryptionKey, 'latin1')
|
67
|
-
: null;
|
68
|
-
switch (options.engine) {
|
69
|
-
case 'redis':
|
70
|
-
return new DB(await redis.new(options), encryptionKey);
|
71
|
-
case 'sql':
|
72
|
-
return new DB(await sql.new(options), encryptionKey);
|
73
|
-
case 'mongo':
|
74
|
-
return new DB(await mongo.new(options), encryptionKey);
|
75
|
-
case 'mem':
|
76
|
-
return new DB(await mem.new(options), encryptionKey);
|
77
|
-
default:
|
78
|
-
throw new Error('unsupported db engine: ' + options.engine);
|
79
|
-
}
|
80
|
-
},
|
81
|
-
};
|
package/src/db/db.test.js
DELETED
@@ -1,302 +0,0 @@
|
|
1
|
-
const t = require('tap');
|
2
|
-
|
3
|
-
const DB = require('./db.js');
|
4
|
-
|
5
|
-
const encryptionKey = '3yGrTcnKPBqqHoH3zZMAU6nt4bmIYb2q';
|
6
|
-
|
7
|
-
let configStores = [];
|
8
|
-
let ttlStores = [];
|
9
|
-
const ttl = 3;
|
10
|
-
|
11
|
-
const record1 = {
|
12
|
-
id: '1',
|
13
|
-
name: 'Deepak',
|
14
|
-
city: 'London',
|
15
|
-
};
|
16
|
-
const record2 = {
|
17
|
-
id: '2',
|
18
|
-
name: 'Sama',
|
19
|
-
city: 'London',
|
20
|
-
};
|
21
|
-
|
22
|
-
const memDbConfig = {
|
23
|
-
engine: 'mem',
|
24
|
-
ttl: 1,
|
25
|
-
};
|
26
|
-
|
27
|
-
const redisDbConfig = {
|
28
|
-
engine: 'redis',
|
29
|
-
url: 'redis://localhost:6379',
|
30
|
-
};
|
31
|
-
|
32
|
-
const postgresDbConfig = {
|
33
|
-
engine: 'sql',
|
34
|
-
url: 'postgresql://postgres:postgres@localhost:5432/postgres',
|
35
|
-
type: 'postgres',
|
36
|
-
ttl: 1,
|
37
|
-
cleanupLimit: 1,
|
38
|
-
};
|
39
|
-
|
40
|
-
const mongoDbConfig = {
|
41
|
-
engine: 'mongo',
|
42
|
-
url: 'mongodb://localhost:27017/jackson',
|
43
|
-
};
|
44
|
-
|
45
|
-
const mysqlDbConfig = {
|
46
|
-
engine: 'sql',
|
47
|
-
url: 'mysql://root:mysql@localhost:3307/mysql',
|
48
|
-
type: 'mysql',
|
49
|
-
ttl: 1,
|
50
|
-
cleanupLimit: 1,
|
51
|
-
};
|
52
|
-
|
53
|
-
const mariadbDbConfig = {
|
54
|
-
engine: 'sql',
|
55
|
-
url: 'mariadb://root@localhost:3306/mysql',
|
56
|
-
type: 'mariadb',
|
57
|
-
ttl: 1,
|
58
|
-
cleanupLimit: 1,
|
59
|
-
};
|
60
|
-
|
61
|
-
const dbs = [
|
62
|
-
{
|
63
|
-
...memDbConfig,
|
64
|
-
},
|
65
|
-
{
|
66
|
-
...memDbConfig,
|
67
|
-
encryptionKey,
|
68
|
-
},
|
69
|
-
{
|
70
|
-
...redisDbConfig,
|
71
|
-
},
|
72
|
-
{
|
73
|
-
...redisDbConfig,
|
74
|
-
encryptionKey,
|
75
|
-
},
|
76
|
-
{
|
77
|
-
...postgresDbConfig,
|
78
|
-
},
|
79
|
-
{
|
80
|
-
...postgresDbConfig,
|
81
|
-
encryptionKey,
|
82
|
-
},
|
83
|
-
{
|
84
|
-
...mongoDbConfig,
|
85
|
-
},
|
86
|
-
{
|
87
|
-
...mongoDbConfig,
|
88
|
-
encryptionKey,
|
89
|
-
},
|
90
|
-
{
|
91
|
-
...mysqlDbConfig,
|
92
|
-
},
|
93
|
-
{
|
94
|
-
...mysqlDbConfig,
|
95
|
-
encryptionKey,
|
96
|
-
},
|
97
|
-
{
|
98
|
-
...mariadbDbConfig,
|
99
|
-
},
|
100
|
-
{
|
101
|
-
...mariadbDbConfig,
|
102
|
-
encryptionKey,
|
103
|
-
},
|
104
|
-
];
|
105
|
-
|
106
|
-
t.before(async () => {
|
107
|
-
for (const idx in dbs) {
|
108
|
-
const opts = dbs[idx];
|
109
|
-
const db = await DB.new(opts);
|
110
|
-
|
111
|
-
configStores.push(db.store('saml:config'));
|
112
|
-
ttlStores.push(db.store('oauth:session', ttl));
|
113
|
-
}
|
114
|
-
});
|
115
|
-
|
116
|
-
t.teardown(async () => {
|
117
|
-
process.exit(0);
|
118
|
-
});
|
119
|
-
|
120
|
-
t.test('dbs', ({ end }) => {
|
121
|
-
for (const idx in configStores) {
|
122
|
-
const configStore = configStores[idx];
|
123
|
-
const ttlStore = ttlStores[idx];
|
124
|
-
let dbEngine = dbs[idx].engine;
|
125
|
-
if (dbs[idx].type) {
|
126
|
-
dbEngine += ': ' + dbs[idx].type;
|
127
|
-
}
|
128
|
-
t.test('put(): ' + dbEngine, async (t) => {
|
129
|
-
await configStore.put(
|
130
|
-
record1.id,
|
131
|
-
record1,
|
132
|
-
{
|
133
|
-
// secondary index on city
|
134
|
-
name: 'city',
|
135
|
-
value: record1.city,
|
136
|
-
},
|
137
|
-
{
|
138
|
-
// secondary index on name
|
139
|
-
name: 'name',
|
140
|
-
value: record1.name,
|
141
|
-
}
|
142
|
-
);
|
143
|
-
|
144
|
-
await configStore.put(
|
145
|
-
record2.id,
|
146
|
-
record2,
|
147
|
-
{
|
148
|
-
// secondary index on city
|
149
|
-
name: 'city',
|
150
|
-
value: record2.city,
|
151
|
-
},
|
152
|
-
{
|
153
|
-
// secondary index on name
|
154
|
-
name: 'name',
|
155
|
-
value: record2.name,
|
156
|
-
}
|
157
|
-
);
|
158
|
-
|
159
|
-
t.end();
|
160
|
-
});
|
161
|
-
|
162
|
-
t.test('get(): ' + dbEngine, async (t) => {
|
163
|
-
const ret1 = await configStore.get(record1.id);
|
164
|
-
const ret2 = await configStore.get(record2.id);
|
165
|
-
|
166
|
-
t.same(ret1, record1, 'unable to get record1');
|
167
|
-
t.same(ret2, record2, 'unable to get record2');
|
168
|
-
|
169
|
-
t.end();
|
170
|
-
});
|
171
|
-
|
172
|
-
t.test('getByIndex(): ' + dbEngine, async (t) => {
|
173
|
-
const ret1 = await configStore.getByIndex({
|
174
|
-
name: 'name',
|
175
|
-
value: record1.name,
|
176
|
-
});
|
177
|
-
|
178
|
-
const ret2 = await configStore.getByIndex({
|
179
|
-
name: 'city',
|
180
|
-
value: record1.city,
|
181
|
-
});
|
182
|
-
|
183
|
-
t.same(ret1, [record1], 'unable to get index "name"');
|
184
|
-
t.same(
|
185
|
-
ret2.sort((a, b) => a.id.localeCompare(b.id)),
|
186
|
-
[record1, record2].sort((a, b) => a.id.localeCompare(b.id)),
|
187
|
-
'unable to get index "city"'
|
188
|
-
);
|
189
|
-
|
190
|
-
t.end();
|
191
|
-
});
|
192
|
-
|
193
|
-
t.test('delete(): ' + dbEngine, async (t) => {
|
194
|
-
await configStore.delete(record1.id);
|
195
|
-
|
196
|
-
const ret0 = await configStore.getByIndex({
|
197
|
-
name: 'city',
|
198
|
-
value: record1.city,
|
199
|
-
});
|
200
|
-
|
201
|
-
t.same(ret0, [record2], 'unable to get index "city" after delete');
|
202
|
-
|
203
|
-
await configStore.delete(record2.id);
|
204
|
-
|
205
|
-
const ret1 = await configStore.get(record1.id);
|
206
|
-
const ret2 = await configStore.get(record2.id);
|
207
|
-
|
208
|
-
const ret3 = await configStore.getByIndex({
|
209
|
-
name: 'name',
|
210
|
-
value: record1.name,
|
211
|
-
});
|
212
|
-
const ret4 = await configStore.getByIndex({
|
213
|
-
name: 'city',
|
214
|
-
value: record1.city,
|
215
|
-
});
|
216
|
-
|
217
|
-
t.same(ret1, null, 'delete for record1 failed');
|
218
|
-
t.same(ret2, null, 'delete for record2 failed');
|
219
|
-
|
220
|
-
t.same(ret3, [], 'delete for record1 failed');
|
221
|
-
t.same(ret4, [], 'delete for record2 failed');
|
222
|
-
|
223
|
-
t.end();
|
224
|
-
});
|
225
|
-
|
226
|
-
t.test('ttl indexes: ' + dbEngine, async (t) => {
|
227
|
-
try {
|
228
|
-
await ttlStore.put(
|
229
|
-
record1.id,
|
230
|
-
record1,
|
231
|
-
{
|
232
|
-
// secondary index on city
|
233
|
-
name: 'city',
|
234
|
-
value: record1.city,
|
235
|
-
},
|
236
|
-
{
|
237
|
-
// secondary index on name
|
238
|
-
name: 'name',
|
239
|
-
value: record1.name,
|
240
|
-
}
|
241
|
-
);
|
242
|
-
|
243
|
-
t.fail('expecting a secondary indexes not allow on a store with ttl');
|
244
|
-
} catch (err) {
|
245
|
-
t.ok(err, 'got expected error');
|
246
|
-
}
|
247
|
-
|
248
|
-
t.end();
|
249
|
-
});
|
250
|
-
|
251
|
-
t.test('ttl put(): ' + dbEngine, async (t) => {
|
252
|
-
await ttlStore.put(record1.id, record1);
|
253
|
-
|
254
|
-
await ttlStore.put(record2.id, record2);
|
255
|
-
|
256
|
-
t.end();
|
257
|
-
});
|
258
|
-
|
259
|
-
t.test('ttl get(): ' + dbEngine, async (t) => {
|
260
|
-
const ret1 = await ttlStore.get(record1.id);
|
261
|
-
const ret2 = await ttlStore.get(record2.id);
|
262
|
-
|
263
|
-
t.same(ret1, record1, 'unable to get record1');
|
264
|
-
t.same(ret2, record2, 'unable to get record2');
|
265
|
-
|
266
|
-
t.end();
|
267
|
-
});
|
268
|
-
|
269
|
-
t.test('ttl expiry: ' + dbEngine, async (t) => {
|
270
|
-
// mongo runs ttl task every 60 seconds
|
271
|
-
if (dbEngine.startsWith('mongo')) {
|
272
|
-
t.end();
|
273
|
-
return;
|
274
|
-
}
|
275
|
-
|
276
|
-
await new Promise((resolve) =>
|
277
|
-
setTimeout(resolve, (2 * ttl + 0.5) * 1000)
|
278
|
-
);
|
279
|
-
|
280
|
-
const ret1 = await ttlStore.get(record1.id);
|
281
|
-
const ret2 = await ttlStore.get(record2.id);
|
282
|
-
|
283
|
-
t.same(ret1, null, 'ttl for record1 failed');
|
284
|
-
t.same(ret2, null, 'ttl for record2 failed');
|
285
|
-
|
286
|
-
t.end();
|
287
|
-
});
|
288
|
-
}
|
289
|
-
|
290
|
-
t.test('db.new() error', async (t) => {
|
291
|
-
try {
|
292
|
-
await DB.new('somedb');
|
293
|
-
t.fail('expecting an unsupported db error');
|
294
|
-
} catch (err) {
|
295
|
-
t.ok(err, 'got expected error');
|
296
|
-
}
|
297
|
-
|
298
|
-
t.end();
|
299
|
-
});
|
300
|
-
|
301
|
-
end();
|
302
|
-
});
|
package/src/db/encrypter.js
DELETED
@@ -1,36 +0,0 @@
|
|
1
|
-
const crypto = require('crypto');
|
2
|
-
|
3
|
-
const ALGO = 'aes-256-gcm';
|
4
|
-
const BLOCK_SIZE = 16; // 128 bit
|
5
|
-
|
6
|
-
const encrypt = (text, key) => {
|
7
|
-
const iv = crypto.randomBytes(BLOCK_SIZE);
|
8
|
-
const cipher = crypto.createCipheriv(ALGO, key, iv);
|
9
|
-
|
10
|
-
let ciphertext = cipher.update(text, 'utf8', 'base64');
|
11
|
-
ciphertext += cipher.final('base64');
|
12
|
-
return {
|
13
|
-
iv: iv.toString('base64'),
|
14
|
-
tag: cipher.getAuthTag().toString('base64'),
|
15
|
-
value: ciphertext,
|
16
|
-
};
|
17
|
-
};
|
18
|
-
|
19
|
-
const decrypt = (ciphertext, iv, tag, key) => {
|
20
|
-
const decipher = crypto.createDecipheriv(
|
21
|
-
ALGO,
|
22
|
-
key,
|
23
|
-
Buffer.from(iv, 'base64')
|
24
|
-
);
|
25
|
-
decipher.setAuthTag(Buffer.from(tag, 'base64'));
|
26
|
-
|
27
|
-
let cleartext = decipher.update(ciphertext, 'base64', 'utf8');
|
28
|
-
cleartext += decipher.final('utf8');
|
29
|
-
|
30
|
-
return cleartext;
|
31
|
-
};
|
32
|
-
|
33
|
-
module.exports = {
|
34
|
-
encrypt,
|
35
|
-
decrypt,
|
36
|
-
};
|
package/src/db/mem.js
DELETED
@@ -1,111 +0,0 @@
|
|
1
|
-
// This is an in-memory implementation to be used with testing and prototyping only
|
2
|
-
const dbutils = require('./utils.js');
|
3
|
-
|
4
|
-
class Mem {
|
5
|
-
constructor(options) {
|
6
|
-
return (async () => {
|
7
|
-
this.store = {}; // map of key, value
|
8
|
-
this.indexes = {}; // map of key, Set
|
9
|
-
this.cleanup = {}; // map of indexes for cleanup when store key is deleted
|
10
|
-
this.ttlStore = {}; // map of key to ttl
|
11
|
-
|
12
|
-
if (options.ttl) {
|
13
|
-
this.ttlCleanup = async () => {
|
14
|
-
const now = Date.now();
|
15
|
-
for (const k in this.ttlStore) {
|
16
|
-
if (this.ttlStore[k].expiresAt < now) {
|
17
|
-
await this.delete(
|
18
|
-
this.ttlStore[k].namespace,
|
19
|
-
this.ttlStore[k].key
|
20
|
-
);
|
21
|
-
}
|
22
|
-
}
|
23
|
-
|
24
|
-
this.timerId = setTimeout(this.ttlCleanup, options.ttl * 1000);
|
25
|
-
};
|
26
|
-
|
27
|
-
this.timerId = setTimeout(this.ttlCleanup, options.ttl * 1000);
|
28
|
-
}
|
29
|
-
|
30
|
-
return this;
|
31
|
-
})();
|
32
|
-
}
|
33
|
-
|
34
|
-
async get(namespace, key) {
|
35
|
-
let res = this.store[dbutils.key(namespace, key)];
|
36
|
-
if (res) {
|
37
|
-
return res;
|
38
|
-
}
|
39
|
-
|
40
|
-
return null;
|
41
|
-
}
|
42
|
-
|
43
|
-
async getByIndex(namespace, idx) {
|
44
|
-
const dbKeys = await this.indexes[dbutils.keyForIndex(namespace, idx)];
|
45
|
-
|
46
|
-
const ret = [];
|
47
|
-
for (const dbKey of dbKeys || []) {
|
48
|
-
ret.push(await this.get(namespace, dbKey));
|
49
|
-
}
|
50
|
-
|
51
|
-
return ret;
|
52
|
-
}
|
53
|
-
|
54
|
-
async put(namespace, key, val, ttl = 0, ...indexes) {
|
55
|
-
const k = dbutils.key(namespace, key);
|
56
|
-
|
57
|
-
this.store[k] = val;
|
58
|
-
|
59
|
-
if (ttl) {
|
60
|
-
this.ttlStore[k] = {
|
61
|
-
namespace,
|
62
|
-
key,
|
63
|
-
expiresAt: Date.now() + ttl * 1000,
|
64
|
-
};
|
65
|
-
}
|
66
|
-
|
67
|
-
// no ttl support for secondary indexes
|
68
|
-
for (const idx of indexes || []) {
|
69
|
-
const idxKey = dbutils.keyForIndex(namespace, idx);
|
70
|
-
let set = this.indexes[idxKey];
|
71
|
-
if (!set) {
|
72
|
-
set = new Set();
|
73
|
-
this.indexes[idxKey] = set;
|
74
|
-
}
|
75
|
-
|
76
|
-
set.add(key);
|
77
|
-
|
78
|
-
const cleanupKey = dbutils.keyFromParts(dbutils.indexPrefix, k);
|
79
|
-
let cleanup = this.cleanup[cleanupKey];
|
80
|
-
if (!cleanup) {
|
81
|
-
cleanup = new Set();
|
82
|
-
this.cleanup[cleanupKey] = cleanup;
|
83
|
-
}
|
84
|
-
|
85
|
-
cleanup.add(idxKey);
|
86
|
-
}
|
87
|
-
}
|
88
|
-
|
89
|
-
async delete(namespace, key) {
|
90
|
-
const k = dbutils.key(namespace, key);
|
91
|
-
|
92
|
-
delete this.store[k];
|
93
|
-
|
94
|
-
const idxKey = dbutils.keyFromParts(dbutils.indexPrefix, k);
|
95
|
-
// delete secondary indexes and then the mapping of the seconary indexes
|
96
|
-
const dbKeys = this.cleanup[idxKey];
|
97
|
-
|
98
|
-
for (const dbKey of dbKeys || []) {
|
99
|
-
this.indexes[dbKey] && this.indexes[dbKey].delete(key);
|
100
|
-
}
|
101
|
-
|
102
|
-
delete this.cleanup[idxKey];
|
103
|
-
delete this.ttlStore[k];
|
104
|
-
}
|
105
|
-
}
|
106
|
-
|
107
|
-
module.exports = {
|
108
|
-
new: async (options) => {
|
109
|
-
return new Mem(options);
|
110
|
-
},
|
111
|
-
};
|
package/src/db/mongo.js
DELETED
@@ -1,89 +0,0 @@
|
|
1
|
-
const { MongoClient } = require('mongodb');
|
2
|
-
const dbutils = require('./utils.js');
|
3
|
-
|
4
|
-
class Mongo {
|
5
|
-
constructor(options) {
|
6
|
-
return (async () => {
|
7
|
-
this.client = new MongoClient(options.url);
|
8
|
-
|
9
|
-
await this.client.connect();
|
10
|
-
this.db = this.client.db();
|
11
|
-
this.collection = this.db.collection('jacksonStore');
|
12
|
-
|
13
|
-
await this.collection.createIndex({ indexes: 1 });
|
14
|
-
await this.collection.createIndex(
|
15
|
-
{ expiresAt: 1 },
|
16
|
-
{ expireAfterSeconds: 1 }
|
17
|
-
);
|
18
|
-
|
19
|
-
return this;
|
20
|
-
})();
|
21
|
-
}
|
22
|
-
|
23
|
-
async get(namespace, key) {
|
24
|
-
let res = await this.collection.findOne({
|
25
|
-
_id: dbutils.key(namespace, key),
|
26
|
-
});
|
27
|
-
if (res && res.value) {
|
28
|
-
return res.value;
|
29
|
-
}
|
30
|
-
|
31
|
-
return null;
|
32
|
-
}
|
33
|
-
|
34
|
-
async getByIndex(namespace, idx) {
|
35
|
-
const docs = await this.collection
|
36
|
-
.find({
|
37
|
-
indexes: dbutils.keyForIndex(namespace, idx),
|
38
|
-
})
|
39
|
-
.toArray();
|
40
|
-
|
41
|
-
const ret = [];
|
42
|
-
for (const doc of docs || []) {
|
43
|
-
ret.push(doc.value);
|
44
|
-
}
|
45
|
-
|
46
|
-
return ret;
|
47
|
-
}
|
48
|
-
|
49
|
-
async put(namespace, key, val, ttl = 0, ...indexes) {
|
50
|
-
const doc = {
|
51
|
-
value: val,
|
52
|
-
};
|
53
|
-
|
54
|
-
if (ttl) {
|
55
|
-
doc.expiresAt = new Date(Date.now() + ttl * 1000);
|
56
|
-
}
|
57
|
-
|
58
|
-
// no ttl support for secondary indexes
|
59
|
-
for (const idx of indexes || []) {
|
60
|
-
const idxKey = dbutils.keyForIndex(namespace, idx);
|
61
|
-
|
62
|
-
if (!doc.indexes) {
|
63
|
-
doc.indexes = [];
|
64
|
-
}
|
65
|
-
|
66
|
-
doc.indexes.push(idxKey);
|
67
|
-
}
|
68
|
-
|
69
|
-
await this.collection.updateOne(
|
70
|
-
{ _id: dbutils.key(namespace, key) },
|
71
|
-
{
|
72
|
-
$set: doc,
|
73
|
-
},
|
74
|
-
{ upsert: true }
|
75
|
-
);
|
76
|
-
}
|
77
|
-
|
78
|
-
async delete(namespace, key) {
|
79
|
-
return await this.collection.deleteOne({
|
80
|
-
_id: dbutils.key(namespace, key),
|
81
|
-
});
|
82
|
-
}
|
83
|
-
}
|
84
|
-
|
85
|
-
module.exports = {
|
86
|
-
new: async (options) => {
|
87
|
-
return new Mongo(options);
|
88
|
-
},
|
89
|
-
};
|