core-3nweb-client-lib 0.28.4 → 0.29.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/core/asmail/inbox/{msg-indexing.d.ts → msg-indexing/index.d.ts} +3 -4
- package/build/core/asmail/inbox/msg-indexing/index.js +96 -0
- package/build/core/asmail/inbox/msg-indexing/logs-n-entries.d.ts +52 -0
- package/build/core/asmail/inbox/msg-indexing/logs-n-entries.js +333 -0
- package/build/core/asmail/inbox/msg-indexing/sql-indexing.d.ts +4 -0
- package/build/core/asmail/inbox/msg-indexing/sql-indexing.js +320 -0
- package/build/lib-client/3nstorage/xsp-fs/attrs.js +1 -1
- package/build/lib-client/cryptor/cryptor-wasm.js +1 -1
- package/build/lib-client/cryptor/cryptor.wasm +0 -0
- package/build/lib-sqlite-on-3nstorage/deferred.d.ts +6 -0
- package/build/lib-sqlite-on-3nstorage/deferred.js +29 -0
- package/build/lib-sqlite-on-3nstorage/index.d.ts +34 -17
- package/build/lib-sqlite-on-3nstorage/index.js +158 -55
- package/build/lib-sqlite-on-3nstorage/sqljs.js +13 -8
- package/build/lib-sqlite-on-3nstorage/synced.d.ts +73 -0
- package/build/lib-sqlite-on-3nstorage/synced.js +167 -0
- package/package.json +1 -1
- package/build/core/asmail/inbox/msg-indexing.js +0 -518
|
@@ -1,518 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
/*
|
|
3
|
-
Copyright (C) 2022 3NSoft Inc.
|
|
4
|
-
|
|
5
|
-
This program is free software: you can redistribute it and/or modify it under
|
|
6
|
-
the terms of the GNU General Public License as published by the Free Software
|
|
7
|
-
Foundation, either version 3 of the License, or (at your option) any later
|
|
8
|
-
version.
|
|
9
|
-
|
|
10
|
-
This program is distributed in the hope that it will be useful, but
|
|
11
|
-
WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12
|
-
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
|
13
|
-
See the GNU General Public License for more details.
|
|
14
|
-
|
|
15
|
-
You should have received a copy of the GNU General Public License along with
|
|
16
|
-
this program. If not, see <http://www.gnu.org/licenses/>.
|
|
17
|
-
*/
|
|
18
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
19
|
-
exports.MsgIndex = void 0;
|
|
20
|
-
const synced_1 = require("../../../lib-common/processes/synced");
|
|
21
|
-
const timed_cache_1 = require("../../../lib-common/timed-cache");
|
|
22
|
-
const lib_sqlite_on_3nstorage_1 = require("../../../lib-sqlite-on-3nstorage");
|
|
23
|
-
const file_1 = require("../../../lib-common/exceptions/file");
|
|
24
|
-
const buffer_utils_1 = require("../../../lib-common/buffer-utils");
|
|
25
|
-
const fs_sync_utils_1 = require("../../../lib-client/fs-sync-utils");
|
|
26
|
-
const rxjs_1 = require("rxjs");
|
|
27
|
-
const operators_1 = require("rxjs/operators");
|
|
28
|
-
// XXX Use TableColumnsAndParams from lib-with-sql.
|
|
29
|
-
// And should we update here sqlite from that project?
|
|
30
|
-
const indexTab = 'inbox_index';
|
|
31
|
-
const column = {
|
|
32
|
-
msgId: 'msg_id',
|
|
33
|
-
msgType: 'msg_type',
|
|
34
|
-
deliveryTS: 'delivery_ts',
|
|
35
|
-
key: 'msg_key',
|
|
36
|
-
keyStatus: 'key_status',
|
|
37
|
-
mainObjHeaderOfs: 'main_obj_header_ofs',
|
|
38
|
-
removeAfter: 'remove_after'
|
|
39
|
-
};
|
|
40
|
-
Object.freeze(column);
|
|
41
|
-
const createIndexTab = `CREATE TABLE ${indexTab} (
|
|
42
|
-
${column.msgId} TEXT PRIMARY KEY,
|
|
43
|
-
${column.msgType} TEXT,
|
|
44
|
-
${column.deliveryTS} INTEGER,
|
|
45
|
-
${column.key} BLOB,
|
|
46
|
-
${column.keyStatus} TEXT,
|
|
47
|
-
${column.mainObjHeaderOfs} INTEGER,
|
|
48
|
-
${column.removeAfter} INTEGER DEFAULT 0
|
|
49
|
-
) STRICT`;
|
|
50
|
-
const insertRec = `INSERT INTO ${indexTab} (
|
|
51
|
-
${column.msgId}, ${column.msgType}, ${column.deliveryTS},
|
|
52
|
-
${column.key}, ${column.keyStatus}, ${column.mainObjHeaderOfs},
|
|
53
|
-
${column.removeAfter}
|
|
54
|
-
) VALUES (
|
|
55
|
-
$${column.msgId}, $${column.msgType}, $${column.deliveryTS},
|
|
56
|
-
$${column.key}, $${column.keyStatus}, $${column.mainObjHeaderOfs},
|
|
57
|
-
$${column.removeAfter}
|
|
58
|
-
)`;
|
|
59
|
-
const deleteRec = `DELETE FROM ${indexTab}
|
|
60
|
-
WHERE ${column.msgId}=$${column.msgId}`;
|
|
61
|
-
function listMsgInfos(db, fromTS) {
|
|
62
|
-
let result;
|
|
63
|
-
if (fromTS) {
|
|
64
|
-
result = db.exec(`SELECT ${column.msgId}, ${column.msgType}, ${column.deliveryTS} FROM ${indexTab} WHERE ${column.deliveryTS}>$fromTS`, {
|
|
65
|
-
'$fromTS': fromTS
|
|
66
|
-
});
|
|
67
|
-
}
|
|
68
|
-
else {
|
|
69
|
-
result = db.exec(`SELECT ${column.msgId}, ${column.msgType}, ${column.deliveryTS} FROM ${indexTab}`);
|
|
70
|
-
}
|
|
71
|
-
if (result.length === 0) {
|
|
72
|
-
return [];
|
|
73
|
-
}
|
|
74
|
-
const { columns, values: rows } = result[0];
|
|
75
|
-
const indecies = columnIndecies(columns, column.msgId, column.msgType, column.deliveryTS);
|
|
76
|
-
const msgs = [];
|
|
77
|
-
for (const row of rows) {
|
|
78
|
-
msgs.push({
|
|
79
|
-
msgId: row[indecies.get(column.msgId)],
|
|
80
|
-
msgType: row[indecies.get(column.msgType)],
|
|
81
|
-
deliveryTS: row[indecies.get(column.deliveryTS)]
|
|
82
|
-
});
|
|
83
|
-
}
|
|
84
|
-
return msgs;
|
|
85
|
-
}
|
|
86
|
-
function columnIndecies(columns, ...columnNames) {
|
|
87
|
-
const indecies = new Map();
|
|
88
|
-
for (const colName of columnNames) {
|
|
89
|
-
indecies.set(colName, columns.indexOf(colName));
|
|
90
|
-
}
|
|
91
|
-
return indecies;
|
|
92
|
-
}
|
|
93
|
-
function deleteMsgFrom(db, msgId) {
|
|
94
|
-
db.exec(deleteRec, { [`$${column.msgId}`]: msgId });
|
|
95
|
-
return (db.getRowsModified() > 0);
|
|
96
|
-
}
|
|
97
|
-
const LIMIT_RECORDS_PER_FILE = 200;
|
|
98
|
-
class RecordsInSQL {
|
|
99
|
-
constructor(files, latest, fileTSs) {
|
|
100
|
-
this.files = files;
|
|
101
|
-
this.latest = latest;
|
|
102
|
-
this.fileTSs = fileTSs;
|
|
103
|
-
this.older = (0, timed_cache_1.makeTimedCache)(10 * 60 * 1000);
|
|
104
|
-
Object.seal(this);
|
|
105
|
-
}
|
|
106
|
-
async add(msgInfo, decrInfo, removeAfter) {
|
|
107
|
-
const { msgId, msgType, deliveryTS } = msgInfo;
|
|
108
|
-
const { key, keyStatus, msgKeyPackLen: mainObjHeaderOfs } = decrInfo;
|
|
109
|
-
const params = {
|
|
110
|
-
[`$${column.msgId}`]: msgId,
|
|
111
|
-
[`$${column.msgType}`]: msgType,
|
|
112
|
-
[`$${column.deliveryTS}`]: deliveryTS,
|
|
113
|
-
[`$${column.key}`]: key,
|
|
114
|
-
[`$${column.keyStatus}`]: keyStatus,
|
|
115
|
-
[`$${column.mainObjHeaderOfs}`]: mainObjHeaderOfs,
|
|
116
|
-
[`$${column.removeAfter}`]: removeAfter,
|
|
117
|
-
};
|
|
118
|
-
const { db, fileTS } = await this.getDbFor(msgInfo.deliveryTS);
|
|
119
|
-
db.db.exec(insertRec, params);
|
|
120
|
-
if (fileTS) {
|
|
121
|
-
await db.saveToFile();
|
|
122
|
-
return;
|
|
123
|
-
}
|
|
124
|
-
else {
|
|
125
|
-
return {
|
|
126
|
-
type: 'addition',
|
|
127
|
-
record: {
|
|
128
|
-
msgId, msgType, deliveryTS,
|
|
129
|
-
key: buffer_utils_1.base64.pack(key),
|
|
130
|
-
keyStatus, mainObjHeaderOfs, removeAfter
|
|
131
|
-
}
|
|
132
|
-
};
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
async saveLatestWithAttr(logTail) {
|
|
136
|
-
// XXX
|
|
137
|
-
}
|
|
138
|
-
async getDbFor(deliveryTS) {
|
|
139
|
-
if ((this.fileTSs.length === 0)
|
|
140
|
-
|| (this.fileTSs[this.fileTSs.length - 1] < deliveryTS)) {
|
|
141
|
-
return { db: this.latest };
|
|
142
|
-
}
|
|
143
|
-
let fileTS = this.fileTSs[this.fileTSs.length - 1];
|
|
144
|
-
for (let i = (this.fileTSs.length - 2); i >= 0; i -= 1) {
|
|
145
|
-
if (this.fileTSs[i] >= deliveryTS) {
|
|
146
|
-
fileTS = this.fileTSs[i];
|
|
147
|
-
}
|
|
148
|
-
else {
|
|
149
|
-
break;
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
const db = await this.dbFromCacheOrInit(fileTS);
|
|
153
|
-
return { db, fileTS };
|
|
154
|
-
}
|
|
155
|
-
async dbFromCacheOrInit(fileTS) {
|
|
156
|
-
let db = this.older.get(fileTS);
|
|
157
|
-
if (db) {
|
|
158
|
-
return db;
|
|
159
|
-
}
|
|
160
|
-
const dbFile = await this.files.getDBFile(fileTS);
|
|
161
|
-
db = await lib_sqlite_on_3nstorage_1.SQLiteOnSyncedFS.makeAndStart(dbFile);
|
|
162
|
-
this.older.set(fileTS, db);
|
|
163
|
-
return db;
|
|
164
|
-
}
|
|
165
|
-
async remove(msgId) {
|
|
166
|
-
for await (const { db, fileTS } of this.iterateDBs()) {
|
|
167
|
-
if (deleteMsgFrom(db.db, msgId)) {
|
|
168
|
-
if (fileTS) {
|
|
169
|
-
await db.saveToFile();
|
|
170
|
-
return;
|
|
171
|
-
}
|
|
172
|
-
else {
|
|
173
|
-
return {
|
|
174
|
-
type: 'removal',
|
|
175
|
-
msgId
|
|
176
|
-
};
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
}
|
|
181
|
-
async *iterateDBs() {
|
|
182
|
-
yield { db: this.latest };
|
|
183
|
-
for (let i = (this.fileTSs.length - 1); i >= 0; i = -1) {
|
|
184
|
-
const fileTS = this.fileTSs[i];
|
|
185
|
-
if (!fileTS) {
|
|
186
|
-
continue;
|
|
187
|
-
}
|
|
188
|
-
const db = await this.dbFromCacheOrInit(fileTS);
|
|
189
|
-
if (db) {
|
|
190
|
-
yield { db, fileTS };
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
async listMsgs(fromTS) {
|
|
195
|
-
let lst = listMsgInfos(this.latest.db, fromTS);
|
|
196
|
-
for (let i = (this.fileTSs.length - 1); i >= 0; i -= 1) {
|
|
197
|
-
const fileTS = this.fileTSs[i];
|
|
198
|
-
if (fromTS && (fileTS <= fromTS)) {
|
|
199
|
-
break;
|
|
200
|
-
}
|
|
201
|
-
const older = await this.dbFromCacheOrInit(fileTS);
|
|
202
|
-
lst = listMsgInfos(older.db, fromTS).concat(lst);
|
|
203
|
-
}
|
|
204
|
-
lst.sort((a, b) => (a.deliveryTS - b.deliveryTS));
|
|
205
|
-
return lst;
|
|
206
|
-
}
|
|
207
|
-
async getIndexWith(deliveryTS) {
|
|
208
|
-
let fileTS = undefined;
|
|
209
|
-
for (let i = (this.fileTSs.length - 1); i >= 0; i -= 1) {
|
|
210
|
-
const fTS = this.fileTSs[i];
|
|
211
|
-
if (fTS < deliveryTS) {
|
|
212
|
-
break;
|
|
213
|
-
}
|
|
214
|
-
fileTS = fTS;
|
|
215
|
-
}
|
|
216
|
-
if (fileTS) {
|
|
217
|
-
return await this.dbFromCacheOrInit(fileTS);
|
|
218
|
-
}
|
|
219
|
-
else {
|
|
220
|
-
return this.latest;
|
|
221
|
-
}
|
|
222
|
-
}
|
|
223
|
-
async getKeyFor(msgId, deliveryTS) {
|
|
224
|
-
const db = await this.getIndexWith(deliveryTS);
|
|
225
|
-
const result = db.db.exec(`SELECT ${column.key}, ${column.keyStatus}, ${column.mainObjHeaderOfs}
|
|
226
|
-
FROM ${indexTab}
|
|
227
|
-
WHERE ${column.msgId}=$${column.msgId}`, { [`$${column.msgId}`]: msgId });
|
|
228
|
-
if (result.length === 0) {
|
|
229
|
-
return;
|
|
230
|
-
}
|
|
231
|
-
const { columns, values: [row] } = result[0];
|
|
232
|
-
const indecies = columnIndecies(columns, column.key, column.keyStatus, column.mainObjHeaderOfs);
|
|
233
|
-
return {
|
|
234
|
-
msgKey: row[indecies.get(column.key)],
|
|
235
|
-
msgKeyRole: row[indecies.get(column.keyStatus)],
|
|
236
|
-
mainObjHeaderOfs: row[indecies.get(column.mainObjHeaderOfs)]
|
|
237
|
-
};
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
|
-
Object.freeze(RecordsInSQL.prototype);
|
|
241
|
-
Object.freeze(RecordsInSQL);
|
|
242
|
-
class LogOfChanges {
|
|
243
|
-
constructor(files, logNums, latestLogVersion) {
|
|
244
|
-
this.files = files;
|
|
245
|
-
this.logNums = logNums;
|
|
246
|
-
this.latestLogVersion = latestLogVersion;
|
|
247
|
-
this.latestLogNum = this.logNums[this.logNums.length - 1];
|
|
248
|
-
Object.seal(this);
|
|
249
|
-
}
|
|
250
|
-
async push(change) {
|
|
251
|
-
const bytes = buffer_utils_1.utf8.pack(JSON.stringify(change));
|
|
252
|
-
const { uploadedVersion, writeOfs } = await this.files.appendLogFile(this.latestLogNum, bytes);
|
|
253
|
-
this.latestLogVersion = uploadedVersion;
|
|
254
|
-
return {
|
|
255
|
-
num: this.latestLogNum,
|
|
256
|
-
version: this.latestLogVersion,
|
|
257
|
-
writeOfs
|
|
258
|
-
};
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
Object.freeze(LogOfChanges.prototype);
|
|
262
|
-
Object.freeze(LogOfChanges);
|
|
263
|
-
const DBS_FOLDER = 'dbs';
|
|
264
|
-
const CHANGES_FOLDER = 'changes';
|
|
265
|
-
const LOG_EXT = '.log.json';
|
|
266
|
-
const DB_EXT = '.sqlite';
|
|
267
|
-
const LATEST_DB = `latest${DB_EXT}`;
|
|
268
|
-
const COMMA_BYTE = buffer_utils_1.utf8.pack(',');
|
|
269
|
-
const SQ_BRACKET_BYTE = buffer_utils_1.utf8.pack(']');
|
|
270
|
-
class LogAndStructFiles {
|
|
271
|
-
constructor(logsFS, dbsFS) {
|
|
272
|
-
this.logsFS = logsFS;
|
|
273
|
-
this.dbsFS = dbsFS;
|
|
274
|
-
this.syncing = undefined;
|
|
275
|
-
// XXX synchronize file saving
|
|
276
|
-
this.logsFSaccessProc = new synced_1.SingleProc();
|
|
277
|
-
(0, file_1.ensureCorrectFS)(this.logsFS, 'synced', true);
|
|
278
|
-
Object.seal(this);
|
|
279
|
-
}
|
|
280
|
-
static async makeAndStart(syncedFS) {
|
|
281
|
-
(0, file_1.ensureCorrectFS)(syncedFS, 'synced', true);
|
|
282
|
-
await (0, fs_sync_utils_1.getRemoteFolderChanges)(syncedFS);
|
|
283
|
-
const logsFS = await (0, fs_sync_utils_1.getOrMakeAndUploadFolderIn)(syncedFS, CHANGES_FOLDER);
|
|
284
|
-
const dbsFS = await (0, fs_sync_utils_1.getOrMakeAndUploadFolderIn)(syncedFS, DBS_FOLDER);
|
|
285
|
-
await (0, fs_sync_utils_1.uploadFolderChangesIfAny)(syncedFS);
|
|
286
|
-
const files = new LogAndStructFiles(logsFS, dbsFS);
|
|
287
|
-
const logs = await files.makeLogOfChanges();
|
|
288
|
-
const records = await files.makeRecords();
|
|
289
|
-
files.startSyncing();
|
|
290
|
-
return { files, logs, records };
|
|
291
|
-
}
|
|
292
|
-
async makeLogOfChanges() {
|
|
293
|
-
const logNums = await this.logsInFolder();
|
|
294
|
-
let logsTail;
|
|
295
|
-
if (logNums.length === 0) {
|
|
296
|
-
logsTail = await this.createNewLogFile(1);
|
|
297
|
-
logNums.push(logsTail.num);
|
|
298
|
-
}
|
|
299
|
-
else {
|
|
300
|
-
const lastLog = logNums[logNums.length - 1];
|
|
301
|
-
({ tail: logsTail } = await this.statLogFile(lastLog));
|
|
302
|
-
}
|
|
303
|
-
return new LogOfChanges(this, logNums, logsTail.version);
|
|
304
|
-
}
|
|
305
|
-
logFileName(logNum) {
|
|
306
|
-
return `${logNum}${LOG_EXT}`;
|
|
307
|
-
}
|
|
308
|
-
async logsInFolder() {
|
|
309
|
-
const lst = await this.logsFS.listFolder(``);
|
|
310
|
-
const logNums = [];
|
|
311
|
-
for (const { isFile, name } of lst) {
|
|
312
|
-
if (!isFile || !name.endsWith(LOG_EXT)) {
|
|
313
|
-
continue;
|
|
314
|
-
}
|
|
315
|
-
const numStr = name.substring(0, LOG_EXT.length);
|
|
316
|
-
const logNum = parseInt(numStr);
|
|
317
|
-
if (isNaN(logNum)) {
|
|
318
|
-
continue;
|
|
319
|
-
}
|
|
320
|
-
logNums.push(logNum);
|
|
321
|
-
}
|
|
322
|
-
logNums.sort();
|
|
323
|
-
return logNums;
|
|
324
|
-
}
|
|
325
|
-
createNewLogFile(logNum, jsonStr = '[]') {
|
|
326
|
-
return this.logsFSaccessProc.startOrChain(async () => {
|
|
327
|
-
const logFile = this.logFileName(logNum);
|
|
328
|
-
const version = await this.logsFS.v.writeTxtFile(logFile, jsonStr, { create: true, exclusive: true });
|
|
329
|
-
// XXX sync disabled for now, and may be need another structure
|
|
330
|
-
// await this.logsFS.v!.sync!.upload(logFile);
|
|
331
|
-
// await this.logsFS.v!.sync!.upload('');
|
|
332
|
-
return {
|
|
333
|
-
num: logNum,
|
|
334
|
-
version,
|
|
335
|
-
writeOfs: jsonStr.length - 1
|
|
336
|
-
};
|
|
337
|
-
});
|
|
338
|
-
}
|
|
339
|
-
async statLogFile(logNum) {
|
|
340
|
-
const logFile = this.logFileName(logNum);
|
|
341
|
-
// XXX sync disabled for now, and may be need another structure
|
|
342
|
-
// const { state: syncState } = await this.logsFS.v!.sync!.status(logFile);
|
|
343
|
-
// if (syncState === 'behind') {
|
|
344
|
-
// await this.logsFS.v!.sync!.adoptRemote(logFile);
|
|
345
|
-
// } else if (syncState === 'unsynced') {
|
|
346
|
-
// await this.logsFS.v!.sync!.upload(logFile);
|
|
347
|
-
// } else if (syncState === 'conflicting') {
|
|
348
|
-
// // XXX
|
|
349
|
-
// throw new Error(`conflict resolution needs implementation`);
|
|
350
|
-
// }
|
|
351
|
-
const { size, version } = await this.logsFS.stat(logFile);
|
|
352
|
-
return {
|
|
353
|
-
syncState: 'unsynced',
|
|
354
|
-
tail: {
|
|
355
|
-
num: logNum,
|
|
356
|
-
version: version,
|
|
357
|
-
writeOfs: size - 1
|
|
358
|
-
}
|
|
359
|
-
};
|
|
360
|
-
}
|
|
361
|
-
async appendLogFile(logNum, bytes) {
|
|
362
|
-
return this.logsFSaccessProc.startOrChain(async () => {
|
|
363
|
-
const logFile = this.logFileName(logNum);
|
|
364
|
-
// XXX sync disabled for now, and may be need another structure
|
|
365
|
-
// const { state } = await this.logsFS.v!.sync!.status(logFile);
|
|
366
|
-
// if (state === 'behind') {
|
|
367
|
-
// await this.logsFS.v!.sync!.adoptRemote(logFile);
|
|
368
|
-
// } else if (state === 'conflicting') {
|
|
369
|
-
// // XXX
|
|
370
|
-
// throw new Error(`conflict resolution needs implementation`);
|
|
371
|
-
// }
|
|
372
|
-
const sink = await this.logsFS.getByteSink(logFile, { truncate: false });
|
|
373
|
-
const len = await sink.getSize();
|
|
374
|
-
let writeOfs;
|
|
375
|
-
if (len === 2) {
|
|
376
|
-
await sink.splice(len - 1, 1);
|
|
377
|
-
writeOfs = len - 1;
|
|
378
|
-
}
|
|
379
|
-
else {
|
|
380
|
-
await sink.splice(len - 1, 1, COMMA_BYTE);
|
|
381
|
-
writeOfs = len;
|
|
382
|
-
}
|
|
383
|
-
await sink.splice(writeOfs, 0, bytes);
|
|
384
|
-
writeOfs += bytes.length;
|
|
385
|
-
await sink.splice(writeOfs, 0, SQ_BRACKET_BYTE);
|
|
386
|
-
await sink.done();
|
|
387
|
-
// XXX sync disabled for now, and may be need another structure
|
|
388
|
-
// const uploadedVersion = (await this.logsFS.v!.sync!.upload(logFile))!;
|
|
389
|
-
const uploadedVersion = logNum;
|
|
390
|
-
return { uploadedVersion, writeOfs };
|
|
391
|
-
});
|
|
392
|
-
}
|
|
393
|
-
dbFileName(fileTS) {
|
|
394
|
-
return `${fileTS}${DB_EXT}`;
|
|
395
|
-
}
|
|
396
|
-
async makeRecords() {
|
|
397
|
-
const latest = await this.readOrInitializeLatestDB();
|
|
398
|
-
const fileTSs = await this.fileTSsOfDBShards();
|
|
399
|
-
return new RecordsInSQL(this, latest, fileTSs);
|
|
400
|
-
}
|
|
401
|
-
async getDBFile(fileTS) {
|
|
402
|
-
return await this.dbsFS.writableFile(this.dbFileName(fileTS), { create: false });
|
|
403
|
-
}
|
|
404
|
-
async readOrInitializeLatestDB() {
|
|
405
|
-
if (await this.dbsFS.checkFilePresence(LATEST_DB)) {
|
|
406
|
-
const dbFile = await this.dbsFS.writableFile(LATEST_DB, { create: false });
|
|
407
|
-
return await lib_sqlite_on_3nstorage_1.SQLiteOnVersionedFS.makeAndStart(dbFile);
|
|
408
|
-
}
|
|
409
|
-
else {
|
|
410
|
-
const dbFile = await this.dbsFS.writableFile(LATEST_DB, { create: true, exclusive: true });
|
|
411
|
-
const latest = await lib_sqlite_on_3nstorage_1.SQLiteOnVersionedFS.makeAndStart(dbFile);
|
|
412
|
-
latest.db.run(createIndexTab);
|
|
413
|
-
await latest.saveToFile();
|
|
414
|
-
return latest;
|
|
415
|
-
}
|
|
416
|
-
}
|
|
417
|
-
async fileTSsOfDBShards() {
|
|
418
|
-
const lst = await this.dbsFS.listFolder('');
|
|
419
|
-
const fileTSs = [];
|
|
420
|
-
for (const { isFile, name } of lst) {
|
|
421
|
-
if (!isFile || !name.endsWith(DB_EXT)) {
|
|
422
|
-
continue;
|
|
423
|
-
}
|
|
424
|
-
const numStr = name.substring(0, DB_EXT.length);
|
|
425
|
-
const fileTS = parseInt(numStr);
|
|
426
|
-
if (isNaN(fileTS)) {
|
|
427
|
-
continue;
|
|
428
|
-
}
|
|
429
|
-
fileTSs.push(fileTS);
|
|
430
|
-
}
|
|
431
|
-
fileTSs.sort();
|
|
432
|
-
return fileTSs;
|
|
433
|
-
}
|
|
434
|
-
startSyncing() {
|
|
435
|
-
const db$ = (0, fs_sync_utils_1.observableFromTreeEvents)(this.dbsFS, '');
|
|
436
|
-
const change$ = (0, fs_sync_utils_1.observableFromTreeEvents)(this.logsFS, '');
|
|
437
|
-
// XXX
|
|
438
|
-
// - start from data in fs, attempt to get fresh, etc. Both log and data
|
|
439
|
-
// (in parallel ?).
|
|
440
|
-
// - start sync process
|
|
441
|
-
// - unblock processing, as init is done
|
|
442
|
-
// Write theses in functions that use RecordsInSQL and LogOfChanges
|
|
443
|
-
// structures.
|
|
444
|
-
// Somehow aforementioned processes ain't exclusive to either point.
|
|
445
|
-
// XXX
|
|
446
|
-
// should start from reading folder and placing logs into yet unsynced db
|
|
447
|
-
this.syncing = (0, rxjs_1.merge)(change$, db$)
|
|
448
|
-
.pipe((0, operators_1.filter)(ev => ev.type.startsWith('remote-')))
|
|
449
|
-
// .subscribe({
|
|
450
|
-
// next: ev => console.log(`------ fs event:`, ev),
|
|
451
|
-
// complete: () => console.log(` +++ MsgIndex's sync process completed`),
|
|
452
|
-
// error: err => console.log(` *** error in MsgIndex's sync process`, err)
|
|
453
|
-
// });
|
|
454
|
-
.subscribe();
|
|
455
|
-
}
|
|
456
|
-
stopSyncing() {
|
|
457
|
-
if (this.syncing) {
|
|
458
|
-
this.syncing.unsubscribe();
|
|
459
|
-
this.syncing = undefined;
|
|
460
|
-
}
|
|
461
|
-
}
|
|
462
|
-
}
|
|
463
|
-
Object.freeze(LogAndStructFiles.prototype);
|
|
464
|
-
Object.freeze(LogAndStructFiles);
|
|
465
|
-
/**
|
|
466
|
-
* This message index stores info for messages present on the server, in the
|
|
467
|
-
* inbox. Records contain message key info, time of delivery, and time of
|
|
468
|
-
* desired removal.
|
|
469
|
-
*
|
|
470
|
-
* Message info with keys is stored in SQLite dbs sharded/partitioned by
|
|
471
|
-
* delivery timestamp. The latest shard, shard without upper time limit
|
|
472
|
-
* is stored in local storage, while all other shards with limits are stored in
|
|
473
|
-
* synced storage. Information in synced storage is a sum of all limited shards
|
|
474
|
-
* and action logs. Action logs
|
|
475
|
-
*
|
|
476
|
-
*/
|
|
477
|
-
class MsgIndex {
|
|
478
|
-
constructor(files, records, changes) {
|
|
479
|
-
this.files = files;
|
|
480
|
-
this.records = records;
|
|
481
|
-
this.changes = changes;
|
|
482
|
-
Object.seal(this);
|
|
483
|
-
}
|
|
484
|
-
static async make(syncedFS) {
|
|
485
|
-
const { files, logs, records } = await LogAndStructFiles.makeAndStart(syncedFS);
|
|
486
|
-
const index = new MsgIndex(files, records, logs);
|
|
487
|
-
return index;
|
|
488
|
-
}
|
|
489
|
-
stopSyncing() {
|
|
490
|
-
this.files.stopSyncing();
|
|
491
|
-
}
|
|
492
|
-
async add(msgInfo, decrInfo, removeAfter = 0) {
|
|
493
|
-
const logChange = await this.records.add(msgInfo, decrInfo, removeAfter);
|
|
494
|
-
if (!logChange) {
|
|
495
|
-
return;
|
|
496
|
-
}
|
|
497
|
-
const logTail = await this.changes.push(logChange);
|
|
498
|
-
await this.records.saveLatestWithAttr(logTail);
|
|
499
|
-
}
|
|
500
|
-
async remove(msgId) {
|
|
501
|
-
const logChange = await this.records.remove(msgId);
|
|
502
|
-
if (!logChange) {
|
|
503
|
-
return;
|
|
504
|
-
}
|
|
505
|
-
const logTail = await this.changes.push(logChange);
|
|
506
|
-
await this.records.saveLatestWithAttr(logTail);
|
|
507
|
-
}
|
|
508
|
-
listMsgs(fromTS) {
|
|
509
|
-
return this.records.listMsgs(fromTS);
|
|
510
|
-
}
|
|
511
|
-
getKeyFor(msgId, deliveryTS) {
|
|
512
|
-
return this.records.getKeyFor(msgId, deliveryTS);
|
|
513
|
-
}
|
|
514
|
-
}
|
|
515
|
-
exports.MsgIndex = MsgIndex;
|
|
516
|
-
Object.freeze(MsgIndex.prototype);
|
|
517
|
-
Object.freeze(MsgIndex);
|
|
518
|
-
Object.freeze(exports);
|