core-3nweb-client-lib 0.27.4 → 0.27.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/api-defs/files.d.ts +40 -10
- package/build/core/asmail/delivery/per-recipient-wip.js +2 -2
- package/build/core/asmail/inbox/inbox-events.js +10 -5
- package/build/core/asmail/inbox/index.d.ts +3 -2
- package/build/core/asmail/inbox/index.js +14 -10
- package/build/core/asmail/inbox/msg-indexing.d.ts +17 -64
- package/build/core/asmail/inbox/msg-indexing.js +440 -311
- package/build/core/asmail/inbox/msg-on-disk.js +2 -1
- package/build/core/asmail/index.js +3 -2
- package/build/core/asmail/keyring/correspondent-keys.js +3 -1
- package/build/core/asmail/keyring/index.d.ts +1 -4
- package/build/core/asmail/keyring/index.js +6 -6
- package/build/core/asmail/msg/opener.js +4 -3
- package/build/core/asmail/msg/packer.d.ts +1 -0
- package/build/core/asmail/msg/packer.js +8 -4
- package/build/core/index.js +2 -3
- package/build/core/storage/local/obj-files.js +2 -1
- package/build/core/storage/synced/storage.js +3 -2
- package/build/core/storage/synced/upload-header-file.js +4 -2
- package/build/core/storage/synced/upsyncer.js +3 -5
- package/build/ipc-via-protobuf/asmail-cap.js +14 -15
- package/build/ipc-via-protobuf/bytes.js +42 -18
- package/build/ipc-via-protobuf/file.js +43 -39
- package/build/ipc-via-protobuf/fs.js +72 -67
- package/build/ipc-via-protobuf/mailerid.js +2 -2
- package/build/ipc-via-protobuf/protobuf-msg.d.ts +1 -0
- package/build/ipc-via-protobuf/protobuf-msg.js +5 -1
- package/build/ipc-via-protobuf/startup-cap.js +8 -8
- package/build/ipc-via-protobuf/storage-cap.js +4 -4
- package/build/ipc.d.ts +1 -0
- package/build/ipc.js +3 -1
- package/build/lib-client/3nstorage/xsp-fs/common.js +2 -0
- package/build/lib-client/3nstorage/xsp-fs/file-node.js +0 -17
- package/build/lib-client/3nstorage/xsp-fs/file.d.ts +1 -1
- package/build/lib-client/3nstorage/xsp-fs/folder-node.d.ts +1 -1
- package/build/lib-client/3nstorage/xsp-fs/folder-node.js +13 -7
- package/build/lib-client/3nstorage/xsp-fs/fs.d.ts +1 -1
- package/build/lib-client/3nstorage/xsp-fs/fs.js +1 -1
- package/build/lib-client/3nstorage/xsp-fs/node-in-fs.d.ts +2 -2
- package/build/lib-client/3nstorage/xsp-fs/node-in-fs.js +7 -3
- package/build/lib-client/3nstorage/xsp-fs/node-persistence.d.ts +1 -0
- package/build/lib-client/3nstorage/xsp-fs/node-persistence.js +7 -5
- package/build/lib-client/3nstorage/xsp-fs/xsp-payload-v1.js +3 -4
- package/build/lib-client/3nstorage/xsp-fs/xsp-payload-v2.js +20 -15
- package/build/lib-client/cryptor/cryptor-in-worker.js +37 -47
- package/build/lib-client/cryptor/cryptor-wasm.js +1 -1
- package/build/lib-client/cryptor/cryptor.wasm +0 -0
- package/build/lib-client/cryptor/in-proc-js.js +15 -12
- package/build/lib-client/cryptor/in-proc-wasm.js +11 -8
- package/build/lib-client/cryptor/serialization-for-wasm.d.ts +36 -0
- package/build/lib-client/cryptor/serialization-for-wasm.js +58 -0
- package/build/lib-client/cryptor-work-labels.d.ts +26 -0
- package/build/lib-client/cryptor-work-labels.js +152 -0
- package/build/lib-client/fs-sync-utils.d.ts +7 -1
- package/build/lib-client/fs-sync-utils.js +18 -7
- package/build/lib-client/local-files/dev-file-src.d.ts +2 -1
- package/build/lib-client/local-files/dev-file-src.js +5 -1
- package/build/lib-client/local-files/device-fs.js +2 -1
- package/build/lib-client/objs-on-disk/obj-on-disk.js +5 -1
- package/build/lib-client/request-utils.js +14 -14
- package/build/lib-common/async-cryptor-wrap.d.ts +9 -9
- package/build/lib-common/async-cryptor-wrap.js +13 -13
- package/build/lib-common/byte-streaming/pipe.d.ts +1 -1
- package/build/lib-common/byte-streaming/pipe.js +3 -3
- package/build/lib-common/byte-streaming/wrapping.js +4 -2
- package/build/lib-common/json-utils.js +0 -3
- package/build/lib-common/processes/synced.js +0 -184
- package/build/lib-sqlite-on-3nstorage/index.d.ts +32 -0
- package/build/lib-sqlite-on-3nstorage/index.js +117 -0
- package/build/lib-sqlite-on-3nstorage/sqljs.d.ts +279 -0
- package/build/lib-sqlite-on-3nstorage/sqljs.js +223 -0
- package/build/protos/asmail.proto.js +3748 -1121
- package/build/protos/bytes.proto.js +731 -204
- package/build/protos/common.proto.js +192 -44
- package/build/protos/cryptor.proto.js +184 -61
- package/build/protos/file.proto.js +1336 -324
- package/build/protos/fs.proto.js +3099 -846
- package/build/protos/ipc.proto.js +244 -61
- package/build/protos/logger.proto.js +219 -53
- package/build/protos/mailerid.proto.js +230 -50
- package/build/protos/startup.proto.js +341 -77
- package/build/protos/storage.proto.js +276 -62
- package/package.json +8 -7
- package/protos/bytes.proto +13 -4
- package/protos/file.proto +4 -1
- package/protos/fs.proto +4 -1
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
/*
|
|
3
|
-
Copyright (C)
|
|
3
|
+
Copyright (C) 2022 3NSoft Inc.
|
|
4
4
|
|
|
5
5
|
This program is free software: you can redistribute it and/or modify it under
|
|
6
6
|
the terms of the GNU General Public License as published by the Free Software
|
|
@@ -17,361 +17,490 @@
|
|
|
17
17
|
*/
|
|
18
18
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
19
19
|
exports.MsgIndex = void 0;
|
|
20
|
-
const synced_1 = require("../../../lib-common/processes/synced");
|
|
21
|
-
const buffer_utils_1 = require("../../../lib-common/buffer-utils");
|
|
22
20
|
const timed_cache_1 = require("../../../lib-common/timed-cache");
|
|
23
|
-
const
|
|
24
|
-
const
|
|
25
|
-
const
|
|
26
|
-
const
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
21
|
+
const lib_sqlite_on_3nstorage_1 = require("../../../lib-sqlite-on-3nstorage");
|
|
22
|
+
const file_1 = require("../../../lib-common/exceptions/file");
|
|
23
|
+
const buffer_utils_1 = require("../../../lib-common/buffer-utils");
|
|
24
|
+
const fs_sync_utils_1 = require("../../../lib-client/fs-sync-utils");
|
|
25
|
+
const rxjs_1 = require("rxjs");
|
|
26
|
+
const operators_1 = require("rxjs/operators");
|
|
27
|
+
const indexTab = 'inbox_index';
|
|
28
|
+
const column = {
|
|
29
|
+
msgId: 'msg_id',
|
|
30
|
+
msgType: 'msg_type',
|
|
31
|
+
deliveryTS: 'delivery_ts',
|
|
32
|
+
key: 'msg_key',
|
|
33
|
+
keyStatus: 'key_status',
|
|
34
|
+
mainObjHeaderOfs: 'main_obj_header_ofs',
|
|
35
|
+
removeAfter: 'remove_after'
|
|
36
|
+
};
|
|
37
|
+
Object.freeze(column);
|
|
38
|
+
const createIndexTab = `CREATE TABLE ${indexTab} (
|
|
39
|
+
${column.msgId} TEXT PRIMARY KEY,
|
|
40
|
+
${column.msgType} TEXT,
|
|
41
|
+
${column.deliveryTS} INTEGER,
|
|
42
|
+
${column.key} BLOB,
|
|
43
|
+
${column.keyStatus} TEXT,
|
|
44
|
+
${column.mainObjHeaderOfs} INTEGER,
|
|
45
|
+
${column.removeAfter} INTEGER DEFAULT 0
|
|
46
|
+
) STRICT`;
|
|
47
|
+
const insertRec = `INSERT INTO ${indexTab} (
|
|
48
|
+
${column.msgId}, ${column.msgType}, ${column.deliveryTS},
|
|
49
|
+
${column.key}, ${column.keyStatus}, ${column.mainObjHeaderOfs},
|
|
50
|
+
${column.removeAfter}
|
|
51
|
+
) VALUES (
|
|
52
|
+
$${column.msgId}, $${column.msgType}, $${column.deliveryTS},
|
|
53
|
+
$${column.key}, $${column.keyStatus}, $${column.mainObjHeaderOfs},
|
|
54
|
+
$${column.removeAfter}
|
|
55
|
+
)`;
|
|
56
|
+
const deleteRec = `DELETE FROM ${indexTab}
|
|
57
|
+
WHERE ${column.msgId}=$${column.msgId}`;
|
|
58
|
+
function listMsgInfos(db, fromTS) {
|
|
59
|
+
let result;
|
|
60
|
+
if (fromTS) {
|
|
61
|
+
result = db.exec(`SELECT ${column.msgId}, ${column.msgType}, ${column.deliveryTS} FROM ${indexTab} WHERE ${column.deliveryTS}>$fromTS`, { '$fromTS': fromTS });
|
|
36
62
|
}
|
|
37
63
|
else {
|
|
38
|
-
|
|
39
|
-
for (let i = (records.ordered.length - 1); i >= 0; i -= 1) {
|
|
40
|
-
if (records.ordered[i].deliveryTS <= ts) {
|
|
41
|
-
records.ordered.splice(i + 1, 0, rec);
|
|
42
|
-
return;
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
records.ordered.splice(0, 0, rec);
|
|
64
|
+
result = db.exec(`SELECT ${column.msgId}, ${column.msgType}, ${column.deliveryTS} FROM ${indexTab}`);
|
|
46
65
|
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
records.byId.delete(msgId);
|
|
50
|
-
for (let i = 0; i < records.ordered.length; i += 1) {
|
|
51
|
-
if (records.ordered[i].msgId === msgId) {
|
|
52
|
-
records.ordered.splice(i, 1);
|
|
53
|
-
break;
|
|
54
|
-
}
|
|
66
|
+
if (result.length === 0) {
|
|
67
|
+
return [];
|
|
55
68
|
}
|
|
69
|
+
const { columns, values: rows } = result[0];
|
|
70
|
+
const indecies = columnIndecies(columns, column.msgId, column.msgType, column.deliveryTS);
|
|
71
|
+
const msgs = [];
|
|
72
|
+
for (const row of rows) {
|
|
73
|
+
msgs.push({
|
|
74
|
+
msgId: row[indecies.get(column.msgId)],
|
|
75
|
+
msgType: row[indecies.get(column.msgType)],
|
|
76
|
+
deliveryTS: row[indecies.get(column.deliveryTS)]
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
return msgs;
|
|
56
80
|
}
|
|
57
|
-
function
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
const byId = new Map();
|
|
62
|
-
const ordered = records.ordered.splice(0, recToExtract);
|
|
63
|
-
const fileTS = ordered[ordered.length - 1].deliveryTS;
|
|
64
|
-
for (const rec of ordered) {
|
|
65
|
-
byId.set(rec.msgId, rec);
|
|
66
|
-
records.byId.delete(rec.msgId);
|
|
67
|
-
}
|
|
68
|
-
return { byId, ordered, fileTS };
|
|
69
|
-
}
|
|
70
|
-
function reduceToMsgInfosInPlace(records) {
|
|
71
|
-
for (let i = 0; i < records.length; i += 1) {
|
|
72
|
-
const orig = records[i];
|
|
73
|
-
records[i] = {
|
|
74
|
-
msgType: orig.msgType,
|
|
75
|
-
msgId: orig.msgId,
|
|
76
|
-
deliveryTS: orig.deliveryTS
|
|
77
|
-
};
|
|
81
|
+
function columnIndecies(columns, ...columnNames) {
|
|
82
|
+
const indecies = new Map();
|
|
83
|
+
for (const colName of columnNames) {
|
|
84
|
+
indecies.set(colName, columns.indexOf(colName));
|
|
78
85
|
}
|
|
86
|
+
return indecies;
|
|
79
87
|
}
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
*
|
|
88
|
-
* This implementation is not using database for records, but a log-like files.
|
|
89
|
-
* Latest file is appended to certain length, and then is left to a mostly-read
|
|
90
|
-
* existence, carrying messages' info till message removal.
|
|
91
|
-
* When lookup is done only with message id, all files potentially need to be
|
|
92
|
-
* read. When both message id and delivery timestamp are given, lookup will at
|
|
93
|
-
* most touch two-three files.
|
|
94
|
-
*
|
|
95
|
-
*/
|
|
96
|
-
class MsgIndex {
|
|
97
|
-
constructor(files) {
|
|
88
|
+
function deleteMsgFrom(db, msgId) {
|
|
89
|
+
db.exec(deleteRec, { [`$${column.msgId}`]: msgId });
|
|
90
|
+
return (db.getRowsModified() > 0);
|
|
91
|
+
}
|
|
92
|
+
const LIMIT_RECORDS_PER_FILE = 200;
|
|
93
|
+
class RecordsInSQL {
|
|
94
|
+
constructor(files, latest, fileTSs) {
|
|
98
95
|
this.files = files;
|
|
99
|
-
this.latest =
|
|
100
|
-
this.
|
|
101
|
-
this.
|
|
102
|
-
this.fileProc = new synced_1.SingleProc();
|
|
96
|
+
this.latest = latest;
|
|
97
|
+
this.fileTSs = fileTSs;
|
|
98
|
+
this.older = (0, timed_cache_1.makeTimedCache)(10 * 60 * 1000);
|
|
103
99
|
Object.seal(this);
|
|
104
100
|
}
|
|
105
|
-
|
|
106
|
-
const
|
|
107
|
-
|
|
108
|
-
const
|
|
109
|
-
|
|
110
|
-
|
|
101
|
+
async add(msgInfo, decrInfo, removeAfter) {
|
|
102
|
+
const { msgId, msgType, deliveryTS } = msgInfo;
|
|
103
|
+
const { key, keyStatus, msgKeyPackLen: mainObjHeaderOfs } = decrInfo;
|
|
104
|
+
const params = {
|
|
105
|
+
[`$${column.msgId}`]: msgId,
|
|
106
|
+
[`$${column.msgType}`]: msgType,
|
|
107
|
+
[`$${column.deliveryTS}`]: deliveryTS,
|
|
108
|
+
[`$${column.key}`]: key,
|
|
109
|
+
[`$${column.keyStatus}`]: keyStatus,
|
|
110
|
+
[`$${column.mainObjHeaderOfs}`]: mainObjHeaderOfs,
|
|
111
|
+
[`$${column.removeAfter}`]: removeAfter,
|
|
112
|
+
};
|
|
113
|
+
const { db, fileTS } = await this.getDbFor(msgInfo.deliveryTS);
|
|
114
|
+
db.db.exec(insertRec, params);
|
|
115
|
+
if (fileTS) {
|
|
116
|
+
await db.saveToFile();
|
|
117
|
+
return;
|
|
111
118
|
}
|
|
112
119
|
else {
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
120
|
+
return {
|
|
121
|
+
type: 'addition',
|
|
122
|
+
record: {
|
|
123
|
+
msgId, msgType, deliveryTS,
|
|
124
|
+
key: buffer_utils_1.base64.pack(key),
|
|
125
|
+
keyStatus, mainObjHeaderOfs, removeAfter
|
|
126
|
+
}
|
|
117
127
|
};
|
|
118
|
-
await index.saveRecords(index.latest);
|
|
119
128
|
}
|
|
120
|
-
// 2) initialize list of file timestamps, that act as index file names
|
|
121
|
-
index.fileTSs = (await index.files.listFolder('.'))
|
|
122
|
-
.map(f => f.name)
|
|
123
|
-
.filter(fName => fName.match(INDEX_FNAME_REGEXP))
|
|
124
|
-
.map(fName => parseInt(fName.substring(0, fName.length - INDEX_EXT.length)))
|
|
125
|
-
.sort(fileTSOrderComparator);
|
|
126
|
-
return index;
|
|
127
129
|
}
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
* @return a promise, resolvable when given records are saved. Latest
|
|
131
|
-
* records can also be chunked and saved as needed.
|
|
132
|
-
*/
|
|
133
|
-
async saveRecords(recs) {
|
|
134
|
-
if (typeof recs.fileTS !== 'number') {
|
|
135
|
-
if (this.latest.ordered.length > 1.25 * LIMIT_RECORDS_PER_FILE) {
|
|
136
|
-
const recs = extractEarlyRecords(this.latest, LIMIT_RECORDS_PER_FILE);
|
|
137
|
-
const recsFileTS = recs.ordered[recs.ordered.length - 1].deliveryTS;
|
|
138
|
-
const fName = recsFileTS + INDEX_EXT;
|
|
139
|
-
await this.files.writeJSONFile(fName, recs.ordered);
|
|
140
|
-
this.cached.set(recsFileTS, recs);
|
|
141
|
-
this.fileTSs.push(recsFileTS);
|
|
142
|
-
}
|
|
143
|
-
await this.files.writeJSONFile(LATEST_INDEX, this.latest.ordered);
|
|
144
|
-
}
|
|
145
|
-
else {
|
|
146
|
-
const fName = recs.fileTS + INDEX_EXT;
|
|
147
|
-
await this.files.writeJSONFile(fName, recs.ordered);
|
|
148
|
-
this.cached.set(recs.fileTS, recs);
|
|
149
|
-
}
|
|
130
|
+
async saveLatestWithAttr(logTail) {
|
|
131
|
+
// XXX
|
|
150
132
|
}
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
* cache, or from a file.
|
|
156
|
-
*/
|
|
157
|
-
async getRecords(fileTS) {
|
|
158
|
-
if (typeof fileTS === 'number') {
|
|
159
|
-
const recs = this.cached.get(fileTS);
|
|
160
|
-
if (recs) {
|
|
161
|
-
return recs;
|
|
162
|
-
}
|
|
133
|
+
async getDbFor(deliveryTS) {
|
|
134
|
+
if ((this.fileTSs.length === 0)
|
|
135
|
+
|| (this.fileTSs[this.fileTSs.length - 1] < deliveryTS)) {
|
|
136
|
+
return { db: this.latest };
|
|
163
137
|
}
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
138
|
+
let fileTS = this.fileTSs[this.fileTSs.length - 1];
|
|
139
|
+
for (let i = (this.fileTSs.length - 2); i >= 0; i -= 1) {
|
|
140
|
+
if (this.fileTSs[i] >= deliveryTS) {
|
|
141
|
+
fileTS = this.fileTSs[i];
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
break;
|
|
167
145
|
}
|
|
168
146
|
}
|
|
169
|
-
const
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
ordered.sort(sortMsgByDeliveryTime);
|
|
177
|
-
const byId = new Map();
|
|
178
|
-
for (const rec of ordered) {
|
|
179
|
-
byId.set(rec.msgId, rec);
|
|
180
|
-
}
|
|
181
|
-
const records = { byId, ordered, fileTS: fileTS };
|
|
182
|
-
if (typeof fileTS === 'number') {
|
|
183
|
-
this.cached.set(fileTS, records);
|
|
184
|
-
}
|
|
185
|
-
return records;
|
|
186
|
-
}
|
|
187
|
-
/**
|
|
188
|
-
* @param msgInfo is a minimal message info object
|
|
189
|
-
* @param decrInfo
|
|
190
|
-
* @return a promise, resolvable when given message info bits are recorded.
|
|
191
|
-
*/
|
|
192
|
-
add(msgInfo, decrInfo) {
|
|
193
|
-
if (!decrInfo.key) {
|
|
194
|
-
throw new Error(`Given message decryption info doesn't have a key for message ${msgInfo.msgId}`);
|
|
147
|
+
const db = await this.dbFromCacheOrInit(fileTS);
|
|
148
|
+
return { db, fileTS };
|
|
149
|
+
}
|
|
150
|
+
async dbFromCacheOrInit(fileTS) {
|
|
151
|
+
let db = this.older.get(fileTS);
|
|
152
|
+
if (db) {
|
|
153
|
+
return db;
|
|
195
154
|
}
|
|
196
|
-
const
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
(msg.deliveryTS >= this.fileTSs[this.fileTSs.length - 1])) {
|
|
208
|
-
insertInto(this.latest, msg);
|
|
209
|
-
await this.saveRecords(this.latest);
|
|
210
|
-
return;
|
|
211
|
-
}
|
|
212
|
-
// 2) find non-latest file for insertion
|
|
213
|
-
let fileTS = this.fileTSs[this.fileTSs.length - 1];
|
|
214
|
-
for (let i = (this.fileTSs.length - 2); i <= 0; i -= 1) {
|
|
215
|
-
if (msg.deliveryTS >= this.fileTSs[i]) {
|
|
216
|
-
break;
|
|
155
|
+
const dbFile = await this.files.getDBFile(fileTS);
|
|
156
|
+
db = await lib_sqlite_on_3nstorage_1.SQLiteOnSyncedFS.makeAndStart(dbFile);
|
|
157
|
+
this.older.set(fileTS, db);
|
|
158
|
+
return db;
|
|
159
|
+
}
|
|
160
|
+
async remove(msgId) {
|
|
161
|
+
for await (const { db, fileTS } of this.iterateDBs()) {
|
|
162
|
+
if (deleteMsgFrom(db.db, msgId)) {
|
|
163
|
+
if (fileTS) {
|
|
164
|
+
await db.saveToFile();
|
|
165
|
+
return;
|
|
217
166
|
}
|
|
218
167
|
else {
|
|
219
|
-
|
|
168
|
+
return {
|
|
169
|
+
type: 'removal',
|
|
170
|
+
msgId
|
|
171
|
+
};
|
|
220
172
|
}
|
|
221
173
|
}
|
|
222
|
-
|
|
223
|
-
if (!records) {
|
|
224
|
-
throw new Error(`Expectation fail: there should be some message records.`);
|
|
225
|
-
}
|
|
226
|
-
insertInto(records, msg);
|
|
227
|
-
await this.saveRecords(records);
|
|
228
|
-
});
|
|
174
|
+
}
|
|
229
175
|
}
|
|
230
|
-
async
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
if (!
|
|
235
|
-
|
|
176
|
+
async *iterateDBs() {
|
|
177
|
+
yield { db: this.latest };
|
|
178
|
+
for (let i = (this.fileTSs.length - 1); i >= 0; i = -1) {
|
|
179
|
+
const fileTS = this.fileTSs[i];
|
|
180
|
+
if (!fileTS) {
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
const db = await this.dbFromCacheOrInit(fileTS);
|
|
184
|
+
if (db) {
|
|
185
|
+
yield { db, fileTS };
|
|
236
186
|
}
|
|
237
|
-
return this.latest;
|
|
238
187
|
}
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
return;
|
|
247
|
-
}
|
|
248
|
-
fileTS = this.fileTSs[i];
|
|
249
|
-
records = await this.getRecords(fileTS);
|
|
250
|
-
if (!records || !records.byId.has(msg.msgId)) {
|
|
251
|
-
return;
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
return records;
|
|
188
|
+
}
|
|
189
|
+
async listMsgs(fromTS) {
|
|
190
|
+
let lst = listMsgInfos(this.latest.db, fromTS);
|
|
191
|
+
for (let i = (this.fileTSs.length - 1); i >= 0; i -= 1) {
|
|
192
|
+
const fileTS = this.fileTSs[i];
|
|
193
|
+
if (fromTS && (fileTS <= fromTS)) {
|
|
194
|
+
break;
|
|
255
195
|
}
|
|
256
|
-
|
|
257
|
-
|
|
196
|
+
const older = await this.dbFromCacheOrInit(fileTS);
|
|
197
|
+
lst = listMsgInfos(older.db, fromTS).concat(lst);
|
|
198
|
+
}
|
|
199
|
+
lst.sort((a, b) => (a.deliveryTS - b.deliveryTS));
|
|
200
|
+
return lst;
|
|
201
|
+
}
|
|
202
|
+
async getIndexWith(deliveryTS) {
|
|
203
|
+
let fileTS = undefined;
|
|
204
|
+
for (let i = (this.fileTSs.length - 1); i >= 0; i -= 1) {
|
|
205
|
+
const fTS = this.fileTSs[i];
|
|
206
|
+
if (fTS < deliveryTS) {
|
|
207
|
+
break;
|
|
258
208
|
}
|
|
209
|
+
fileTS = fTS;
|
|
210
|
+
}
|
|
211
|
+
if (fileTS) {
|
|
212
|
+
return await this.dbFromCacheOrInit(fileTS);
|
|
259
213
|
}
|
|
260
|
-
|
|
261
|
-
|
|
214
|
+
else {
|
|
215
|
+
return this.latest;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
async getKeyFor(msgId, deliveryTS) {
|
|
219
|
+
const db = await this.getIndexWith(deliveryTS);
|
|
220
|
+
const result = db.db.exec(`SELECT
|
|
221
|
+
${column.key}, ${column.keyStatus}, ${column.mainObjHeaderOfs}
|
|
222
|
+
FROM ${indexTab}
|
|
223
|
+
WHERE ${column.msgId}=$${column.msgId}`, { [`$${column.msgId}`]: msgId });
|
|
224
|
+
if (result.length === 0) {
|
|
262
225
|
return;
|
|
263
226
|
}
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
}
|
|
283
|
-
});
|
|
227
|
+
const { columns, values: [row] } = result[0];
|
|
228
|
+
const indecies = columnIndecies(columns, column.key, column.keyStatus, column.mainObjHeaderOfs);
|
|
229
|
+
return {
|
|
230
|
+
msgKey: row[indecies.get(column.key)],
|
|
231
|
+
msgKeyRole: row[indecies.get(column.keyStatus)],
|
|
232
|
+
mainObjHeaderOfs: row[indecies.get(column.mainObjHeaderOfs)]
|
|
233
|
+
};
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
Object.freeze(RecordsInSQL.prototype);
|
|
237
|
+
Object.freeze(RecordsInSQL);
|
|
238
|
+
class LogOfChanges {
|
|
239
|
+
constructor(files, logNums, latestLogVersion) {
|
|
240
|
+
this.files = files;
|
|
241
|
+
this.logNums = logNums;
|
|
242
|
+
this.latestLogVersion = latestLogVersion;
|
|
243
|
+
this.latestLogNum = this.logNums[this.logNums.length - 1];
|
|
244
|
+
Object.seal(this);
|
|
284
245
|
}
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
246
|
+
async push(change) {
|
|
247
|
+
const bytes = buffer_utils_1.utf8.pack(JSON.stringify(change));
|
|
248
|
+
const { uploadedVersion, writeOfs } = await this.files.appendLogFile(this.latestLogNum, bytes);
|
|
249
|
+
this.latestLogVersion = uploadedVersion;
|
|
250
|
+
return {
|
|
251
|
+
num: this.latestLogNum,
|
|
252
|
+
version: this.latestLogVersion,
|
|
253
|
+
writeOfs
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
Object.freeze(LogOfChanges.prototype);
|
|
258
|
+
Object.freeze(LogOfChanges);
|
|
259
|
+
const DBS_FOLDER = 'dbs';
|
|
260
|
+
const CHANGES_FOLDER = 'changes';
|
|
261
|
+
const LOG_EXT = '.log.json';
|
|
262
|
+
const DB_EXT = '.sqlite';
|
|
263
|
+
const LATEST_DB = `latest${DB_EXT}`;
|
|
264
|
+
const COMMA_BYTE = buffer_utils_1.utf8.pack(',');
|
|
265
|
+
const SQ_BRACKET_BYTE = buffer_utils_1.utf8.pack(']');
|
|
266
|
+
class LogAndStructFiles {
|
|
267
|
+
constructor(logsFS, dbsFS) {
|
|
268
|
+
this.logsFS = logsFS;
|
|
269
|
+
this.dbsFS = dbsFS;
|
|
270
|
+
this.syncing = undefined;
|
|
271
|
+
(0, file_1.ensureCorrectFS)(this.logsFS, 'synced', true);
|
|
272
|
+
Object.seal(this);
|
|
273
|
+
}
|
|
274
|
+
static async makeAndStart(syncedFS) {
|
|
275
|
+
(0, file_1.ensureCorrectFS)(syncedFS, 'synced', true);
|
|
276
|
+
await (0, fs_sync_utils_1.getRemoteFolderChanges)(syncedFS);
|
|
277
|
+
const logsFS = await (0, fs_sync_utils_1.getOrMakeAndUploadFolderIn)(syncedFS, CHANGES_FOLDER);
|
|
278
|
+
const dbsFS = await (0, fs_sync_utils_1.getOrMakeAndUploadFolderIn)(syncedFS, DBS_FOLDER);
|
|
279
|
+
await (0, fs_sync_utils_1.uploadFolderChangesIfAny)(syncedFS);
|
|
280
|
+
const files = new LogAndStructFiles(logsFS, dbsFS);
|
|
281
|
+
const logs = await files.makeLogOfChanges();
|
|
282
|
+
const records = await files.makeRecords();
|
|
283
|
+
files.startSyncing();
|
|
284
|
+
return { files, logs, records };
|
|
285
|
+
}
|
|
286
|
+
async makeLogOfChanges() {
|
|
287
|
+
const logNums = await this.logsInFolder();
|
|
288
|
+
let logsTail;
|
|
289
|
+
if (logNums.length === 0) {
|
|
290
|
+
logsTail = await this.createNewLogFile(1);
|
|
291
|
+
logNums.push(logsTail.num);
|
|
292
|
+
}
|
|
293
|
+
else {
|
|
294
|
+
const lastLog = logNums[logNums.length - 1];
|
|
295
|
+
({ tail: logsTail } = await this.statLogFile(lastLog));
|
|
296
|
+
}
|
|
297
|
+
return new LogOfChanges(this, logNums, logsTail.version);
|
|
298
|
+
}
|
|
299
|
+
logFileName(logNum) {
|
|
300
|
+
return `${logNum}${LOG_EXT}`;
|
|
301
|
+
}
|
|
302
|
+
async logsInFolder() {
|
|
303
|
+
const lst = await this.logsFS.listFolder(``);
|
|
304
|
+
const logNums = [];
|
|
305
|
+
for (const { isFile, name } of lst) {
|
|
306
|
+
if (!isFile || !name.endsWith(LOG_EXT)) {
|
|
307
|
+
continue;
|
|
297
308
|
}
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
}
|
|
303
|
-
else {
|
|
304
|
-
await this.saveRecords(records);
|
|
305
|
-
}
|
|
309
|
+
const numStr = name.substring(0, LOG_EXT.length);
|
|
310
|
+
const logNum = parseInt(numStr);
|
|
311
|
+
if (isNaN(logNum)) {
|
|
312
|
+
continue;
|
|
306
313
|
}
|
|
307
|
-
|
|
314
|
+
logNums.push(logNum);
|
|
315
|
+
}
|
|
316
|
+
logNums.sort();
|
|
317
|
+
return logNums;
|
|
308
318
|
}
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
319
|
+
async createNewLogFile(logNum, jsonStr = '[]') {
|
|
320
|
+
const logFile = this.logFileName(logNum);
|
|
321
|
+
const version = await this.logsFS.v.writeTxtFile(logFile, jsonStr, { create: true, exclusive: true });
|
|
322
|
+
await this.logsFS.v.sync.upload(logFile);
|
|
323
|
+
await this.logsFS.v.sync.upload('');
|
|
324
|
+
return {
|
|
325
|
+
num: logNum,
|
|
326
|
+
version,
|
|
327
|
+
writeOfs: jsonStr.length - 1
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
async statLogFile(logNum) {
|
|
331
|
+
const logFile = this.logFileName(logNum);
|
|
332
|
+
const { state: syncState } = await this.logsFS.v.sync.status(logFile);
|
|
333
|
+
if (syncState === 'behind') {
|
|
334
|
+
await this.logsFS.v.sync.adoptRemote(logFile);
|
|
335
|
+
}
|
|
336
|
+
else if (syncState === 'unsynced') {
|
|
337
|
+
await this.logsFS.v.sync.upload(logFile);
|
|
338
|
+
}
|
|
339
|
+
else if (syncState === 'conflicting') {
|
|
340
|
+
// XXX
|
|
341
|
+
throw new Error(`conflict resolution needs implementation`);
|
|
342
|
+
}
|
|
343
|
+
const { size, version } = await this.logsFS.stat(logFile);
|
|
344
|
+
return {
|
|
345
|
+
syncState,
|
|
346
|
+
tail: {
|
|
347
|
+
num: logNum,
|
|
348
|
+
version: version,
|
|
349
|
+
writeOfs: size - 1
|
|
332
350
|
}
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
async appendLogFile(logNum, bytes) {
|
|
354
|
+
const logFile = this.logFileName(logNum);
|
|
355
|
+
const { state } = await this.logsFS.v.sync.status(logFile);
|
|
356
|
+
if (state === 'behind') {
|
|
357
|
+
await this.logsFS.v.sync.adoptRemote(logFile);
|
|
358
|
+
}
|
|
359
|
+
else if (state === 'conflicting') {
|
|
360
|
+
// XXX
|
|
361
|
+
throw new Error(`conflict resolution needs implementation`);
|
|
362
|
+
}
|
|
363
|
+
const sink = await this.logsFS.getByteSink(logFile, { truncate: false });
|
|
364
|
+
const len = await sink.getSize();
|
|
365
|
+
let writeOfs;
|
|
366
|
+
if (len === 2) {
|
|
367
|
+
await sink.splice(len - 1, 1);
|
|
368
|
+
writeOfs = len - 1;
|
|
369
|
+
}
|
|
370
|
+
else {
|
|
371
|
+
await sink.splice(len - 1, 1, COMMA_BYTE);
|
|
372
|
+
writeOfs = len;
|
|
373
|
+
}
|
|
374
|
+
await sink.splice(writeOfs, 0, bytes);
|
|
375
|
+
writeOfs += bytes.length;
|
|
376
|
+
await sink.splice(writeOfs, 0, SQ_BRACKET_BYTE);
|
|
377
|
+
await sink.done();
|
|
378
|
+
const uploadedVersion = (await this.logsFS.v.sync.upload(logFile));
|
|
379
|
+
return { uploadedVersion, writeOfs };
|
|
380
|
+
}
|
|
381
|
+
dbFileName(fileTS) {
|
|
382
|
+
return `${fileTS}${DB_EXT}`;
|
|
383
|
+
}
|
|
384
|
+
async makeRecords() {
|
|
385
|
+
const latest = await this.readOrInitializeLatestDB();
|
|
386
|
+
const fileTSs = await this.fileTSsOfDBShards();
|
|
387
|
+
return new RecordsInSQL(this, latest, fileTSs);
|
|
388
|
+
}
|
|
389
|
+
async getDBFile(fileTS) {
|
|
390
|
+
return await this.dbsFS.writableFile(this.dbFileName(fileTS), { create: false });
|
|
336
391
|
}
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
392
|
+
async readOrInitializeLatestDB() {
|
|
393
|
+
if (await this.dbsFS.checkFilePresence(LATEST_DB)) {
|
|
394
|
+
const dbFile = await this.dbsFS.writableFile(LATEST_DB, { create: false });
|
|
395
|
+
return await lib_sqlite_on_3nstorage_1.SQLiteOnVersionedFS.makeAndStart(dbFile);
|
|
396
|
+
}
|
|
397
|
+
else {
|
|
398
|
+
const dbFile = await this.dbsFS.writableFile(LATEST_DB, { create: true, exclusive: true });
|
|
399
|
+
const latest = await lib_sqlite_on_3nstorage_1.SQLiteOnVersionedFS.makeAndStart(dbFile);
|
|
400
|
+
latest.db.run(createIndexTab);
|
|
401
|
+
await latest.saveToFile();
|
|
402
|
+
return latest;
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
async fileTSsOfDBShards() {
|
|
406
|
+
const lst = await this.dbsFS.listFolder('');
|
|
407
|
+
const fileTSs = [];
|
|
408
|
+
for (const { isFile, name } of lst) {
|
|
409
|
+
if (!isFile || !name.endsWith(DB_EXT)) {
|
|
410
|
+
continue;
|
|
348
411
|
}
|
|
349
|
-
const
|
|
350
|
-
|
|
351
|
-
|
|
412
|
+
const numStr = name.substring(0, DB_EXT.length);
|
|
413
|
+
const fileTS = parseInt(numStr);
|
|
414
|
+
if (isNaN(fileTS)) {
|
|
415
|
+
continue;
|
|
352
416
|
}
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
417
|
+
fileTSs.push(fileTS);
|
|
418
|
+
}
|
|
419
|
+
fileTSs.sort();
|
|
420
|
+
return fileTSs;
|
|
421
|
+
}
|
|
422
|
+
startSyncing() {
|
|
423
|
+
const db$ = (0, fs_sync_utils_1.observableFromTreeEvents)(this.dbsFS, '');
|
|
424
|
+
const change$ = (0, fs_sync_utils_1.observableFromTreeEvents)(this.logsFS, '');
|
|
425
|
+
// XXX
|
|
426
|
+
// - start from data in fs, attempt to get fresh, etc. Both log and data
|
|
427
|
+
// (in parallel ?).
|
|
428
|
+
// - start sync process
|
|
429
|
+
// - unblock processing, as init is done
|
|
430
|
+
// Write theses in functions that use RecordsInSQL and LogOfChanges
|
|
431
|
+
// structures.
|
|
432
|
+
// Somehow aforementioned processes ain't exclusive to either point.
|
|
433
|
+
// XXX
|
|
434
|
+
// should start from reading folder and placing logs into yet unsynced db
|
|
435
|
+
this.syncing = (0, rxjs_1.merge)(change$, db$)
|
|
436
|
+
.pipe((0, operators_1.filter)(ev => ev.type.startsWith('remote-')))
|
|
437
|
+
// .subscribe({
|
|
438
|
+
// next: ev => console.log(`------ fs event:`, ev),
|
|
439
|
+
// complete: () => console.log(` +++ MsgIndex's sync process completed`),
|
|
440
|
+
// error: err => console.log(` *** error in MsgIndex's sync process`, err)
|
|
441
|
+
// });
|
|
442
|
+
.subscribe();
|
|
443
|
+
}
|
|
444
|
+
stopSyncing() {
|
|
445
|
+
if (this.syncing) {
|
|
446
|
+
this.syncing.unsubscribe();
|
|
447
|
+
this.syncing = undefined;
|
|
448
|
+
}
|
|
359
449
|
}
|
|
360
450
|
}
|
|
361
|
-
|
|
362
|
-
Object.freeze(
|
|
363
|
-
Object.freeze(MsgIndex);
|
|
451
|
+
Object.freeze(LogAndStructFiles.prototype);
|
|
452
|
+
Object.freeze(LogAndStructFiles);
|
|
364
453
|
/**
|
|
365
|
-
* This
|
|
366
|
-
*
|
|
454
|
+
* This message index stores info for messages present on the server, in the
|
|
455
|
+
* inbox. Records contain message key info, time of delivery, and time of
|
|
456
|
+
* desired removal.
|
|
457
|
+
*
|
|
458
|
+
* Message info with keys is stored in SQLite dbs sharded/partitioned by
|
|
459
|
+
* delivery timestamp. The latest shard, shard without upper time limit
|
|
460
|
+
* is stored in local storage, while all other shards with limits are stored in
|
|
461
|
+
* synced storage. Information in synced storage is a sum of all limited shards
|
|
462
|
+
* and action logs. Action logs
|
|
463
|
+
*
|
|
367
464
|
*/
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
465
|
+
class MsgIndex {
|
|
466
|
+
constructor(files, records, changes) {
|
|
467
|
+
this.files = files;
|
|
468
|
+
this.records = records;
|
|
469
|
+
this.changes = changes;
|
|
470
|
+
Object.seal(this);
|
|
471
|
+
}
|
|
472
|
+
static async make(syncedFS) {
|
|
473
|
+
const { files, logs, records } = await LogAndStructFiles.makeAndStart(syncedFS);
|
|
474
|
+
const index = new MsgIndex(files, records, logs);
|
|
475
|
+
return index;
|
|
476
|
+
}
|
|
477
|
+
stopSyncing() {
|
|
478
|
+
this.files.stopSyncing();
|
|
479
|
+
}
|
|
480
|
+
async add(msgInfo, decrInfo, removeAfter = 0) {
|
|
481
|
+
const logChange = await this.records.add(msgInfo, decrInfo, removeAfter);
|
|
482
|
+
if (!logChange) {
|
|
483
|
+
return;
|
|
484
|
+
}
|
|
485
|
+
const logTail = await this.changes.push(logChange);
|
|
486
|
+
await this.records.saveLatestWithAttr(logTail);
|
|
487
|
+
}
|
|
488
|
+
async remove(msgId) {
|
|
489
|
+
const logChange = await this.records.remove(msgId);
|
|
490
|
+
if (!logChange) {
|
|
491
|
+
return;
|
|
492
|
+
}
|
|
493
|
+
const logTail = await this.changes.push(logChange);
|
|
494
|
+
await this.records.saveLatestWithAttr(logTail);
|
|
495
|
+
}
|
|
496
|
+
listMsgs(fromTS) {
|
|
497
|
+
return this.records.listMsgs(fromTS);
|
|
498
|
+
}
|
|
499
|
+
getKeyFor(msgId, deliveryTS) {
|
|
500
|
+
return this.records.getKeyFor(msgId, deliveryTS);
|
|
371
501
|
}
|
|
372
|
-
return;
|
|
373
|
-
}
|
|
374
|
-
function sortMsgByDeliveryTime(a, b) {
|
|
375
|
-
return (a.deliveryTS - b.deliveryTS);
|
|
376
502
|
}
|
|
503
|
+
exports.MsgIndex = MsgIndex;
|
|
504
|
+
Object.freeze(MsgIndex.prototype);
|
|
505
|
+
Object.freeze(MsgIndex);
|
|
377
506
|
Object.freeze(exports);
|