@didcid/gatekeeper 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +65 -0
- package/dist/cjs/abstract-json-BJMq-iEa.cjs +216 -0
- package/dist/cjs/db/json-cache.cjs +75 -0
- package/dist/cjs/db/json-memory.cjs +29 -0
- package/dist/cjs/db/json.cjs +36 -0
- package/dist/cjs/db/mongo.cjs +242 -0
- package/dist/cjs/db/redis.cjs +291 -0
- package/dist/cjs/db/sqlite.cjs +330 -0
- package/dist/cjs/errors-DQaog-FG.cjs +34 -0
- package/dist/cjs/gatekeeper-client.cjs +369 -0
- package/dist/cjs/gatekeeper.cjs +42212 -0
- package/dist/cjs/index.cjs +12 -0
- package/dist/cjs/node.cjs +40 -0
- package/dist/esm/db/abstract-json.js +212 -0
- package/dist/esm/db/abstract-json.js.map +1 -0
- package/dist/esm/db/json-cache.js +68 -0
- package/dist/esm/db/json-cache.js.map +1 -0
- package/dist/esm/db/json-memory.js +22 -0
- package/dist/esm/db/json-memory.js.map +1 -0
- package/dist/esm/db/json.js +29 -0
- package/dist/esm/db/json.js.map +1 -0
- package/dist/esm/db/mongo.js +236 -0
- package/dist/esm/db/mongo.js.map +1 -0
- package/dist/esm/db/redis.js +285 -0
- package/dist/esm/db/redis.js.map +1 -0
- package/dist/esm/db/sqlite.js +305 -0
- package/dist/esm/db/sqlite.js.map +1 -0
- package/dist/esm/gatekeeper-client.js +363 -0
- package/dist/esm/gatekeeper-client.js.map +1 -0
- package/dist/esm/gatekeeper.js +1090 -0
- package/dist/esm/gatekeeper.js.map +1 -0
- package/dist/esm/index.js +4 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/node.js +8 -0
- package/dist/esm/node.js.map +1 -0
- package/dist/esm/types.js +2 -0
- package/dist/esm/types.js.map +1 -0
- package/dist/types/db/abstract-json.d.ts +26 -0
- package/dist/types/db/json-cache.d.ts +14 -0
- package/dist/types/db/json-memory.d.ts +9 -0
- package/dist/types/db/json.d.ts +8 -0
- package/dist/types/db/mongo.d.ts +23 -0
- package/dist/types/db/redis.d.ts +29 -0
- package/dist/types/db/sqlite.d.ts +30 -0
- package/dist/types/gatekeeper-client.d.ts +40 -0
- package/dist/types/gatekeeper.d.ts +67 -0
- package/dist/types/index.d.ts +3 -0
- package/dist/types/node.d.ts +7 -0
- package/dist/types/types.d.ts +226 -0
- package/package.json +128 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Archetech
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# Archon Gatekeeper
|
|
2
|
+
|
|
3
|
+
Gatekeeper is a node library for Archon.
|
|
4
|
+
It manages a local database of DIDs on the Archon network.
|
|
5
|
+
Gatekeeper functions are used to Create, Read, Update, and Delete DIDs (CRUD).
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @didcid/gatekeeper
|
|
11
|
+
```
|
|
12
|
+
## Usage
|
|
13
|
+
|
|
14
|
+
### Library
|
|
15
|
+
|
|
16
|
+
The library must be configured by calling the start function with one of the supported databases:
|
|
17
|
+
- JSON - @didcid/gatekeeper/db/json
|
|
18
|
+
- JSON with memory cache - @didcid/gatekeeper/db/json-cache
|
|
19
|
+
- sqlite - @didcid/gatekeeper/db/sqlite
|
|
20
|
+
- mongodb - @didcid/gatekeeper/db/mongodb
|
|
21
|
+
- redis - @didcid/gatekeeper/db/redis
|
|
22
|
+
|
|
23
|
+
```js
|
|
24
|
+
// Import using subpaths
|
|
25
|
+
import Gatekeeper from '@didcid/gatekeeper';
|
|
26
|
+
import DbRedis from '@didcid/gatekeeper/db/redis';
|
|
27
|
+
|
|
28
|
+
// Non-subpath imports
|
|
29
|
+
import Gatekeeper, { DbRedis } from '@didcid/gatekeeper';
|
|
30
|
+
|
|
31
|
+
const db_redis = new DbRedis('archon-test');
|
|
32
|
+
await db_redis.start();
|
|
33
|
+
|
|
34
|
+
const gatekeeper = new Gatekeeper({ db: db_redis });
|
|
35
|
+
const did = 'did:cid:z3v8AuaTV5VKcT9MJoSHkSTRLpXDoqcgqiKkwGBNSV4nVzb6kLk';
|
|
36
|
+
const docs = await gatekeeper.resolveDID(did);
|
|
37
|
+
console.log(JSON.stringify(docs, null, 4));
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
### Client
|
|
41
|
+
|
|
42
|
+
The GatekeeperClient is used to communicate with a Gatekeeper REST API service.
|
|
43
|
+
|
|
44
|
+
```js
|
|
45
|
+
// Import using subpaths
|
|
46
|
+
import GatekeeperClient from '@didcid/gatekeeper/client';
|
|
47
|
+
|
|
48
|
+
// Non-subpath imports
|
|
49
|
+
import { GatekeeperClient } from '@didcid/gatekeeper';
|
|
50
|
+
|
|
51
|
+
// Try connecting to the gatekeeper service every second,
|
|
52
|
+
// and start reporting (chatty) if not connected after 5 attempts
|
|
53
|
+
const gatekeeper = new GatekeeperClient();
|
|
54
|
+
await gatekeeper.connect({
|
|
55
|
+
url: 'http://gatekeeper-host:4224',
|
|
56
|
+
waitUntilReady: true,
|
|
57
|
+
intervalSeconds: 1,
|
|
58
|
+
chatty: false,
|
|
59
|
+
becomeChattyAfter: 5
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
const did = 'did:cid:z3v8AuaTV5VKcT9MJoSHkSTRLpXDoqcgqiKkwGBNSV4nVzb6kLk';
|
|
63
|
+
const docs = await gatekeeper.resolveDID(did);
|
|
64
|
+
console.log(JSON.stringify(docs, null, 4));
|
|
65
|
+
```
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var errors = require('./errors-DQaog-FG.cjs');
|
|
4
|
+
|
|
5
|
+
class AbstractJson {
|
|
6
|
+
dataFolder;
|
|
7
|
+
dbName;
|
|
8
|
+
_lock = Promise.resolve();
|
|
9
|
+
runExclusive(fn) {
|
|
10
|
+
const run = async () => await fn();
|
|
11
|
+
const chained = this._lock.then(run, run);
|
|
12
|
+
this._lock = chained.then(() => undefined, () => undefined);
|
|
13
|
+
return chained;
|
|
14
|
+
}
|
|
15
|
+
constructor(name, folder = 'data') {
|
|
16
|
+
this.dataFolder = folder;
|
|
17
|
+
this.dbName = `${this.dataFolder}/${name}.json`;
|
|
18
|
+
}
|
|
19
|
+
async start() {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
async stop() {
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
splitSuffix(did) {
|
|
26
|
+
if (!did) {
|
|
27
|
+
throw new errors.InvalidDIDError();
|
|
28
|
+
}
|
|
29
|
+
const suffix = did.split(':').pop();
|
|
30
|
+
if (!suffix) {
|
|
31
|
+
throw new errors.InvalidDIDError();
|
|
32
|
+
}
|
|
33
|
+
return suffix;
|
|
34
|
+
}
|
|
35
|
+
async addEvent(did, event) {
|
|
36
|
+
const suffix = this.splitSuffix(did);
|
|
37
|
+
return this.runExclusive(async () => {
|
|
38
|
+
const db = this.loadDb();
|
|
39
|
+
if (!db.ops) {
|
|
40
|
+
db.ops = {};
|
|
41
|
+
}
|
|
42
|
+
// Store operation separately if present
|
|
43
|
+
if (event.opid && event.operation) {
|
|
44
|
+
db.ops[event.opid] = event.operation;
|
|
45
|
+
}
|
|
46
|
+
// Strip operation and store only opid reference
|
|
47
|
+
const { operation, ...strippedEvent } = event;
|
|
48
|
+
if (db.dids[suffix]) {
|
|
49
|
+
db.dids[suffix].push(strippedEvent);
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
db.dids[suffix] = [strippedEvent];
|
|
53
|
+
}
|
|
54
|
+
this.writeDb(db);
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
async getEvents(did) {
|
|
58
|
+
try {
|
|
59
|
+
const db = this.loadDb();
|
|
60
|
+
const suffix = this.splitSuffix(did);
|
|
61
|
+
const events = db.dids[suffix] || [];
|
|
62
|
+
// Hydrate operations from ops table
|
|
63
|
+
return events.map(event => {
|
|
64
|
+
if (event.operation) {
|
|
65
|
+
return event;
|
|
66
|
+
}
|
|
67
|
+
if (event.opid && db.ops?.[event.opid]) {
|
|
68
|
+
return { ...event, operation: db.ops[event.opid] };
|
|
69
|
+
}
|
|
70
|
+
return event;
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
catch {
|
|
74
|
+
return [];
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
async setEvents(did, events) {
|
|
78
|
+
const suffix = this.splitSuffix(did);
|
|
79
|
+
return this.runExclusive(async () => {
|
|
80
|
+
const db = this.loadDb();
|
|
81
|
+
if (!db.ops) {
|
|
82
|
+
db.ops = {};
|
|
83
|
+
}
|
|
84
|
+
// Update operations in ops table if modified, then strip from events
|
|
85
|
+
const strippedEvents = events.map(event => {
|
|
86
|
+
if (event.opid && event.operation) {
|
|
87
|
+
db.ops[event.opid] = event.operation;
|
|
88
|
+
}
|
|
89
|
+
const { operation, ...stripped } = event;
|
|
90
|
+
return stripped;
|
|
91
|
+
});
|
|
92
|
+
db.dids[suffix] = strippedEvents;
|
|
93
|
+
this.writeDb(db);
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
async deleteEvents(did) {
|
|
97
|
+
const suffix = this.splitSuffix(did);
|
|
98
|
+
return this.runExclusive(async () => {
|
|
99
|
+
const db = this.loadDb();
|
|
100
|
+
if (db.dids[suffix]) {
|
|
101
|
+
delete db.dids[suffix];
|
|
102
|
+
this.writeDb(db);
|
|
103
|
+
}
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
async queueOperation(registry, op) {
|
|
107
|
+
return this.runExclusive(async () => {
|
|
108
|
+
const db = this.loadDb();
|
|
109
|
+
if (!db.queue) {
|
|
110
|
+
db.queue = {};
|
|
111
|
+
}
|
|
112
|
+
if (registry in db.queue) {
|
|
113
|
+
db.queue[registry].push(op);
|
|
114
|
+
}
|
|
115
|
+
else
|
|
116
|
+
db.queue[registry] = [op];
|
|
117
|
+
this.writeDb(db);
|
|
118
|
+
return db.queue[registry].length;
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
async getQueue(registry) {
|
|
122
|
+
try {
|
|
123
|
+
const db = this.loadDb();
|
|
124
|
+
if (!db.queue || !db.queue[registry]) {
|
|
125
|
+
return [];
|
|
126
|
+
}
|
|
127
|
+
return db.queue[registry];
|
|
128
|
+
}
|
|
129
|
+
catch {
|
|
130
|
+
return [];
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
async clearQueue(registry, batch) {
|
|
134
|
+
return this.runExclusive(async () => {
|
|
135
|
+
try {
|
|
136
|
+
const db = this.loadDb();
|
|
137
|
+
if (!db.queue) {
|
|
138
|
+
return true;
|
|
139
|
+
}
|
|
140
|
+
const oldQueue = db.queue[registry];
|
|
141
|
+
if (!oldQueue) {
|
|
142
|
+
return true;
|
|
143
|
+
}
|
|
144
|
+
db.queue[registry] = oldQueue.filter(item => !batch.some(op => op.signature?.value === item.signature?.value));
|
|
145
|
+
this.writeDb(db);
|
|
146
|
+
return true;
|
|
147
|
+
}
|
|
148
|
+
catch {
|
|
149
|
+
return false;
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
async getAllKeys() {
|
|
154
|
+
const db = this.loadDb();
|
|
155
|
+
return Object.keys(db.dids);
|
|
156
|
+
}
|
|
157
|
+
async addBlock(registry, blockInfo) {
|
|
158
|
+
return this.runExclusive(async () => {
|
|
159
|
+
const db = this.loadDb();
|
|
160
|
+
if (!db.blocks) {
|
|
161
|
+
db.blocks = {};
|
|
162
|
+
}
|
|
163
|
+
if (!(registry in db.blocks)) {
|
|
164
|
+
db.blocks[registry] = {};
|
|
165
|
+
}
|
|
166
|
+
db.blocks[registry][blockInfo.hash] = blockInfo;
|
|
167
|
+
this.writeDb(db);
|
|
168
|
+
return true;
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
async getBlock(registry, blockId) {
|
|
172
|
+
const db = this.loadDb();
|
|
173
|
+
const registryBlocks = db.blocks?.[registry];
|
|
174
|
+
if (!registryBlocks)
|
|
175
|
+
return null;
|
|
176
|
+
const blockEntries = Object.entries(registryBlocks);
|
|
177
|
+
if (blockEntries.length === 0)
|
|
178
|
+
return null;
|
|
179
|
+
if (blockId === undefined) {
|
|
180
|
+
// Get block with max height
|
|
181
|
+
let maxBlock = null;
|
|
182
|
+
for (const [, block] of blockEntries) {
|
|
183
|
+
if (!maxBlock || block.height > maxBlock.height) {
|
|
184
|
+
maxBlock = block;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
return maxBlock;
|
|
188
|
+
}
|
|
189
|
+
if (typeof blockId === 'number') {
|
|
190
|
+
// Search for block with matching height
|
|
191
|
+
for (const [, block] of blockEntries) {
|
|
192
|
+
if (block.height === blockId)
|
|
193
|
+
return block;
|
|
194
|
+
}
|
|
195
|
+
return null;
|
|
196
|
+
}
|
|
197
|
+
// Lookup by hash (O(1))
|
|
198
|
+
return registryBlocks[blockId] || null;
|
|
199
|
+
}
|
|
200
|
+
async addOperation(opid, op) {
|
|
201
|
+
return this.runExclusive(async () => {
|
|
202
|
+
const db = this.loadDb();
|
|
203
|
+
if (!db.ops) {
|
|
204
|
+
db.ops = {};
|
|
205
|
+
}
|
|
206
|
+
db.ops[opid] = op;
|
|
207
|
+
this.writeDb(db);
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
async getOperation(opid) {
|
|
211
|
+
const db = this.loadDb();
|
|
212
|
+
return db.ops?.[opid] ?? null;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
exports.AbstractJson = AbstractJson;
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var fs = require('fs');
|
|
6
|
+
var abstractJson = require('../abstract-json-BJMq-iEa.cjs');
|
|
7
|
+
require('../errors-DQaog-FG.cjs');
|
|
8
|
+
|
|
9
|
+
class DbJsonCache extends abstractJson.AbstractJson {
|
|
10
|
+
dbCache = null;
|
|
11
|
+
saveLoopTimeoutId = null;
|
|
12
|
+
constructor(name, folder = 'data') {
|
|
13
|
+
super(name, folder);
|
|
14
|
+
this.loadDb();
|
|
15
|
+
}
|
|
16
|
+
async start() {
|
|
17
|
+
await this.saveLoop();
|
|
18
|
+
}
|
|
19
|
+
async stop() {
|
|
20
|
+
this.saveDb(); // Save the current state one last time
|
|
21
|
+
if (this.saveLoopTimeoutId !== null) {
|
|
22
|
+
clearTimeout(this.saveLoopTimeoutId); // Cancel the next scheduled saveLoop
|
|
23
|
+
this.saveLoopTimeoutId = null; // Reset the timeout ID
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
async saveLoop() {
|
|
27
|
+
try {
|
|
28
|
+
this.saveDb();
|
|
29
|
+
console.log(`DID db saved to ${this.dbName}`);
|
|
30
|
+
}
|
|
31
|
+
catch (error) {
|
|
32
|
+
console.error(`Error in saveLoop: ${error}`);
|
|
33
|
+
}
|
|
34
|
+
this.saveLoopTimeoutId = setTimeout(() => this.saveLoop(), 20 * 1000);
|
|
35
|
+
}
|
|
36
|
+
loadDb() {
|
|
37
|
+
if (!this.dbCache) {
|
|
38
|
+
try {
|
|
39
|
+
const raw = fs.readFileSync(this.dbName, 'utf-8');
|
|
40
|
+
const parsed = JSON.parse(raw);
|
|
41
|
+
this.dbCache = JSON.parse(fs.readFileSync(this.dbName, 'utf-8'));
|
|
42
|
+
if (!parsed.dids) {
|
|
43
|
+
throw new Error();
|
|
44
|
+
}
|
|
45
|
+
this.dbCache = parsed;
|
|
46
|
+
}
|
|
47
|
+
catch (err) {
|
|
48
|
+
this.dbCache = { dids: {} };
|
|
49
|
+
this.saveDb();
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return this.dbCache;
|
|
53
|
+
}
|
|
54
|
+
writeDb(db) {
|
|
55
|
+
this.dbCache = db;
|
|
56
|
+
}
|
|
57
|
+
saveDb() {
|
|
58
|
+
if (!fs.existsSync(this.dataFolder)) {
|
|
59
|
+
fs.mkdirSync(this.dataFolder, { recursive: true });
|
|
60
|
+
}
|
|
61
|
+
if (!this.dbCache) {
|
|
62
|
+
this.dbCache = { dids: {} };
|
|
63
|
+
}
|
|
64
|
+
fs.writeFileSync(this.dbName, JSON.stringify(this.dbCache, null, 4));
|
|
65
|
+
}
|
|
66
|
+
async resetDb() {
|
|
67
|
+
if (fs.existsSync(this.dbName)) {
|
|
68
|
+
fs.rmSync(this.dbName);
|
|
69
|
+
}
|
|
70
|
+
this.dbCache = null;
|
|
71
|
+
return this.loadDb();
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
exports.default = DbJsonCache;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var abstractJson = require('../abstract-json-BJMq-iEa.cjs');
|
|
6
|
+
require('../errors-DQaog-FG.cjs');
|
|
7
|
+
|
|
8
|
+
class DbJsonMemory extends abstractJson.AbstractJson {
|
|
9
|
+
dbCache = null;
|
|
10
|
+
constructor(name, folder = 'data') {
|
|
11
|
+
super(name, folder);
|
|
12
|
+
this.loadDb();
|
|
13
|
+
}
|
|
14
|
+
loadDb() {
|
|
15
|
+
if (!this.dbCache) {
|
|
16
|
+
this.dbCache = JSON.stringify({ dids: {} });
|
|
17
|
+
}
|
|
18
|
+
return JSON.parse(this.dbCache);
|
|
19
|
+
}
|
|
20
|
+
writeDb(db) {
|
|
21
|
+
this.dbCache = JSON.stringify(db);
|
|
22
|
+
}
|
|
23
|
+
async resetDb() {
|
|
24
|
+
this.dbCache = null;
|
|
25
|
+
return this.loadDb();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
exports.default = DbJsonMemory;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var fs = require('fs');
|
|
6
|
+
var abstractJson = require('../abstract-json-BJMq-iEa.cjs');
|
|
7
|
+
require('../errors-DQaog-FG.cjs');
|
|
8
|
+
|
|
9
|
+
class DbJson extends abstractJson.AbstractJson {
|
|
10
|
+
constructor(name, folder = 'data') {
|
|
11
|
+
super(name, folder);
|
|
12
|
+
}
|
|
13
|
+
loadDb() {
|
|
14
|
+
try {
|
|
15
|
+
return JSON.parse(fs.readFileSync(this.dbName, 'utf-8'));
|
|
16
|
+
}
|
|
17
|
+
catch (err) {
|
|
18
|
+
const db = { dids: {} };
|
|
19
|
+
this.writeDb(db);
|
|
20
|
+
return db;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
writeDb(db) {
|
|
24
|
+
if (!fs.existsSync(this.dataFolder)) {
|
|
25
|
+
fs.mkdirSync(this.dataFolder, { recursive: true });
|
|
26
|
+
}
|
|
27
|
+
fs.writeFileSync(this.dbName, JSON.stringify(db, null, 4));
|
|
28
|
+
}
|
|
29
|
+
async resetDb() {
|
|
30
|
+
if (fs.existsSync(this.dbName)) {
|
|
31
|
+
fs.rmSync(this.dbName);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
exports.default = DbJson;
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var mongodb = require('mongodb');
|
|
6
|
+
var errors = require('../errors-DQaog-FG.cjs');
|
|
7
|
+
|
|
8
|
+
const MONGO_NOT_STARTED_ERROR = 'Mongo not started. Call start() first.';
|
|
9
|
+
class DbMongo {
|
|
10
|
+
dbName;
|
|
11
|
+
client;
|
|
12
|
+
db;
|
|
13
|
+
constructor(dbName) {
|
|
14
|
+
this.dbName = dbName;
|
|
15
|
+
this.client = null;
|
|
16
|
+
this.db = null;
|
|
17
|
+
}
|
|
18
|
+
splitSuffix(did) {
|
|
19
|
+
if (!did) {
|
|
20
|
+
throw new errors.InvalidDIDError();
|
|
21
|
+
}
|
|
22
|
+
const suffix = did.split(':').pop();
|
|
23
|
+
if (!suffix) {
|
|
24
|
+
throw new errors.InvalidDIDError();
|
|
25
|
+
}
|
|
26
|
+
return suffix;
|
|
27
|
+
}
|
|
28
|
+
async start() {
|
|
29
|
+
this.client = new mongodb.MongoClient(process.env.ARCHON_MONGODB_URL || 'mongodb://localhost:27017');
|
|
30
|
+
await this.client.connect();
|
|
31
|
+
this.db = this.client.db(this.dbName);
|
|
32
|
+
await this.db.collection('dids').createIndex({ id: 1 });
|
|
33
|
+
await this.db.collection('blocks').createIndex({ registry: 1, height: -1 }); // for latest and height lookups
|
|
34
|
+
await this.db.collection('blocks').createIndex({ registry: 1, hash: 1 }, { unique: true }); // for hash lookup
|
|
35
|
+
await this.db.collection('operations').createIndex({ opid: 1 }, { unique: true });
|
|
36
|
+
}
|
|
37
|
+
async stop() {
|
|
38
|
+
if (this.client) {
|
|
39
|
+
await this.client.close();
|
|
40
|
+
this.client = null;
|
|
41
|
+
this.db = null;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
async resetDb() {
|
|
45
|
+
if (!this.db) {
|
|
46
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
47
|
+
}
|
|
48
|
+
await this.db.collection('dids').deleteMany({});
|
|
49
|
+
await this.db.collection('queue').deleteMany({});
|
|
50
|
+
await this.db.collection('operations').deleteMany({});
|
|
51
|
+
}
|
|
52
|
+
async addEvent(did, event) {
|
|
53
|
+
if (!this.db) {
|
|
54
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
55
|
+
}
|
|
56
|
+
const id = this.splitSuffix(did);
|
|
57
|
+
// Store operation separately if present
|
|
58
|
+
if (event.opid && event.operation) {
|
|
59
|
+
await this.addOperation(event.opid, event.operation);
|
|
60
|
+
}
|
|
61
|
+
// Strip operation and store only opid reference
|
|
62
|
+
const { operation, ...strippedEvent } = event;
|
|
63
|
+
const result = await this.db.collection('dids').updateOne({ id }, {
|
|
64
|
+
$push: {
|
|
65
|
+
events: { $each: [strippedEvent] }
|
|
66
|
+
}
|
|
67
|
+
}, { upsert: true });
|
|
68
|
+
// Return how many docs were modified
|
|
69
|
+
return result.modifiedCount + (result.upsertedCount ?? 0);
|
|
70
|
+
}
|
|
71
|
+
async setEvents(did, events) {
|
|
72
|
+
if (!this.db) {
|
|
73
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
74
|
+
}
|
|
75
|
+
const id = this.splitSuffix(did);
|
|
76
|
+
// Update operations in ops collection if modified, then strip from events
|
|
77
|
+
const strippedEvents = [];
|
|
78
|
+
for (const event of events) {
|
|
79
|
+
if (event.opid && event.operation) {
|
|
80
|
+
await this.addOperation(event.opid, event.operation);
|
|
81
|
+
}
|
|
82
|
+
const { operation, ...stripped } = event;
|
|
83
|
+
strippedEvents.push(stripped);
|
|
84
|
+
}
|
|
85
|
+
await this.db
|
|
86
|
+
.collection('dids')
|
|
87
|
+
.updateOne({ id }, { $set: { events: strippedEvents } }, { upsert: true });
|
|
88
|
+
}
|
|
89
|
+
async getEvents(did) {
|
|
90
|
+
if (!this.db) {
|
|
91
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
92
|
+
}
|
|
93
|
+
const id = this.splitSuffix(did);
|
|
94
|
+
try {
|
|
95
|
+
const row = await this.db.collection('dids').findOne({ id });
|
|
96
|
+
const events = row?.events ?? [];
|
|
97
|
+
// Hydrate operations from ops collection
|
|
98
|
+
const hydrated = [];
|
|
99
|
+
for (const event of events) {
|
|
100
|
+
if (event.operation) {
|
|
101
|
+
hydrated.push(event);
|
|
102
|
+
}
|
|
103
|
+
else if (event.opid) {
|
|
104
|
+
const operation = await this.getOperation(event.opid);
|
|
105
|
+
if (operation) {
|
|
106
|
+
hydrated.push({ ...event, operation });
|
|
107
|
+
}
|
|
108
|
+
else {
|
|
109
|
+
hydrated.push(event);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
hydrated.push(event);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return hydrated;
|
|
117
|
+
}
|
|
118
|
+
catch {
|
|
119
|
+
return [];
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
async deleteEvents(did) {
|
|
123
|
+
if (!this.db) {
|
|
124
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
125
|
+
}
|
|
126
|
+
const id = this.splitSuffix(did);
|
|
127
|
+
const result = await this.db.collection('dids').deleteOne({ id });
|
|
128
|
+
return result.deletedCount ?? 0;
|
|
129
|
+
}
|
|
130
|
+
async getAllKeys() {
|
|
131
|
+
if (!this.db) {
|
|
132
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
133
|
+
}
|
|
134
|
+
const rows = await this.db.collection('dids').find().toArray();
|
|
135
|
+
return rows.map(row => row.id);
|
|
136
|
+
}
|
|
137
|
+
async queueOperation(registry, op) {
|
|
138
|
+
if (!this.db) {
|
|
139
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
140
|
+
}
|
|
141
|
+
const result = await this.db.collection('queue').findOneAndUpdate({ id: registry }, {
|
|
142
|
+
$push: {
|
|
143
|
+
ops: {
|
|
144
|
+
$each: [op]
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}, {
|
|
148
|
+
upsert: true,
|
|
149
|
+
returnDocument: 'after'
|
|
150
|
+
});
|
|
151
|
+
return result?.ops.length ?? 0;
|
|
152
|
+
}
|
|
153
|
+
async getQueue(registry) {
|
|
154
|
+
if (!this.db) {
|
|
155
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
156
|
+
}
|
|
157
|
+
try {
|
|
158
|
+
const row = await this.db.collection('queue').findOne({ id: registry });
|
|
159
|
+
return row?.ops ?? [];
|
|
160
|
+
}
|
|
161
|
+
catch {
|
|
162
|
+
return [];
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
async clearQueue(registry, batch) {
|
|
166
|
+
if (!this.db) {
|
|
167
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
168
|
+
}
|
|
169
|
+
try {
|
|
170
|
+
const hashes = batch
|
|
171
|
+
.map(op => op.signature?.hash)
|
|
172
|
+
.filter((h) => !!h);
|
|
173
|
+
if (hashes.length === 0) {
|
|
174
|
+
return true;
|
|
175
|
+
}
|
|
176
|
+
await this.db
|
|
177
|
+
.collection('queue')
|
|
178
|
+
.updateOne({ id: registry }, { $pull: { ops: { 'signature.hash': { $in: hashes } } } });
|
|
179
|
+
return true;
|
|
180
|
+
}
|
|
181
|
+
catch (error) {
|
|
182
|
+
console.error(error);
|
|
183
|
+
return false;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
async addBlock(registry, blockInfo) {
|
|
187
|
+
if (!this.db) {
|
|
188
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
189
|
+
}
|
|
190
|
+
try {
|
|
191
|
+
// Store block info in the "blocks" collection
|
|
192
|
+
await this.db.collection('blocks').updateOne({ registry, hash: blockInfo.hash }, { $set: blockInfo }, { upsert: true });
|
|
193
|
+
return true;
|
|
194
|
+
}
|
|
195
|
+
catch (error) {
|
|
196
|
+
return false;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
async getBlock(registry, blockId) {
|
|
200
|
+
if (!this.db) {
|
|
201
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
202
|
+
}
|
|
203
|
+
try {
|
|
204
|
+
const blocks = this.db.collection('blocks');
|
|
205
|
+
let query;
|
|
206
|
+
if (blockId === undefined) {
|
|
207
|
+
// Get block with max height
|
|
208
|
+
query = { registry };
|
|
209
|
+
return await blocks
|
|
210
|
+
.find(query)
|
|
211
|
+
.sort({ height: -1 })
|
|
212
|
+
.limit(1)
|
|
213
|
+
.next(); // more efficient than toArray()[0]
|
|
214
|
+
}
|
|
215
|
+
if (typeof blockId === 'number') {
|
|
216
|
+
query = { registry, height: blockId };
|
|
217
|
+
}
|
|
218
|
+
else {
|
|
219
|
+
query = { registry, hash: blockId };
|
|
220
|
+
}
|
|
221
|
+
return await blocks.findOne(query);
|
|
222
|
+
}
|
|
223
|
+
catch (error) {
|
|
224
|
+
return null;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
async addOperation(opid, op) {
|
|
228
|
+
if (!this.db) {
|
|
229
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
230
|
+
}
|
|
231
|
+
await this.db.collection('operations').updateOne({ opid }, { $set: { opid, ...op } }, { upsert: true });
|
|
232
|
+
}
|
|
233
|
+
async getOperation(opid) {
|
|
234
|
+
if (!this.db) {
|
|
235
|
+
throw new Error(MONGO_NOT_STARTED_ERROR);
|
|
236
|
+
}
|
|
237
|
+
const doc = await this.db.collection('operations').findOne({ opid }, { projection: { _id: 0, opid: 0 } });
|
|
238
|
+
return doc;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
exports.default = DbMongo;
|