prostgles-server 4.2.159 → 4.2.161
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Auth/setEmailProvider.js +2 -2
- package/dist/Auth/setEmailProvider.js.map +1 -1
- package/lib/Auth/AuthHandler.ts +436 -0
- package/lib/Auth/AuthTypes.ts +280 -0
- package/lib/Auth/getSafeReturnURL.ts +35 -0
- package/lib/Auth/sendEmail.ts +83 -0
- package/lib/Auth/setAuthProviders.ts +128 -0
- package/lib/Auth/setEmailProvider.ts +85 -0
- package/lib/Auth/setupAuthRoutes.ts +161 -0
- package/lib/DBEventsManager.ts +178 -0
- package/lib/DBSchemaBuilder.ts +225 -0
- package/lib/DboBuilder/DboBuilder.ts +319 -0
- package/lib/DboBuilder/DboBuilderTypes.ts +361 -0
- package/lib/DboBuilder/QueryBuilder/Functions.ts +1153 -0
- package/lib/DboBuilder/QueryBuilder/QueryBuilder.ts +288 -0
- package/lib/DboBuilder/QueryBuilder/getJoinQuery.ts +263 -0
- package/lib/DboBuilder/QueryBuilder/getNewQuery.ts +271 -0
- package/lib/DboBuilder/QueryBuilder/getSelectQuery.ts +136 -0
- package/lib/DboBuilder/QueryBuilder/prepareHaving.ts +22 -0
- package/lib/DboBuilder/QueryStreamer.ts +250 -0
- package/lib/DboBuilder/TableHandler/DataValidator.ts +428 -0
- package/lib/DboBuilder/TableHandler/TableHandler.ts +205 -0
- package/lib/DboBuilder/TableHandler/delete.ts +115 -0
- package/lib/DboBuilder/TableHandler/insert.ts +183 -0
- package/lib/DboBuilder/TableHandler/insertTest.ts +78 -0
- package/lib/DboBuilder/TableHandler/onDeleteFromFileTable.ts +62 -0
- package/lib/DboBuilder/TableHandler/runInsertUpdateQuery.ts +134 -0
- package/lib/DboBuilder/TableHandler/update.ts +126 -0
- package/lib/DboBuilder/TableHandler/updateBatch.ts +49 -0
- package/lib/DboBuilder/TableHandler/updateFile.ts +48 -0
- package/lib/DboBuilder/TableHandler/upsert.ts +34 -0
- package/lib/DboBuilder/ViewHandler/ViewHandler.ts +393 -0
- package/lib/DboBuilder/ViewHandler/count.ts +38 -0
- package/lib/DboBuilder/ViewHandler/find.ts +153 -0
- package/lib/DboBuilder/ViewHandler/getExistsCondition.ts +73 -0
- package/lib/DboBuilder/ViewHandler/getExistsFilters.ts +74 -0
- package/lib/DboBuilder/ViewHandler/getInfo.ts +32 -0
- package/lib/DboBuilder/ViewHandler/getTableJoinQuery.ts +84 -0
- package/lib/DboBuilder/ViewHandler/parseComplexFilter.ts +96 -0
- package/lib/DboBuilder/ViewHandler/parseFieldFilter.ts +105 -0
- package/lib/DboBuilder/ViewHandler/parseJoinPath.ts +208 -0
- package/lib/DboBuilder/ViewHandler/prepareSortItems.ts +163 -0
- package/lib/DboBuilder/ViewHandler/prepareWhere.ts +90 -0
- package/lib/DboBuilder/ViewHandler/size.ts +37 -0
- package/lib/DboBuilder/ViewHandler/subscribe.ts +118 -0
- package/lib/DboBuilder/ViewHandler/validateViewRules.ts +70 -0
- package/lib/DboBuilder/dboBuilderUtils.ts +222 -0
- package/lib/DboBuilder/getColumns.ts +114 -0
- package/lib/DboBuilder/getCondition.ts +201 -0
- package/lib/DboBuilder/getSubscribeRelatedTables.ts +190 -0
- package/lib/DboBuilder/getTablesForSchemaPostgresSQL.ts +426 -0
- package/lib/DboBuilder/insertNestedRecords.ts +355 -0
- package/lib/DboBuilder/parseUpdateRules.ts +187 -0
- package/lib/DboBuilder/prepareShortestJoinPaths.ts +186 -0
- package/lib/DboBuilder/runSQL.ts +182 -0
- package/lib/DboBuilder/runTransaction.ts +50 -0
- package/lib/DboBuilder/sqlErrCodeToMsg.ts +254 -0
- package/lib/DboBuilder/uploadFile.ts +69 -0
- package/lib/Event_Trigger_Tags.ts +118 -0
- package/lib/FileManager/FileManager.ts +358 -0
- package/lib/FileManager/getValidatedFileType.ts +69 -0
- package/lib/FileManager/initFileManager.ts +187 -0
- package/lib/FileManager/upload.ts +62 -0
- package/lib/FileManager/uploadStream.ts +79 -0
- package/lib/Filtering.ts +463 -0
- package/lib/JSONBValidation/validate_jsonb_schema_sql.ts +502 -0
- package/lib/JSONBValidation/validation.ts +143 -0
- package/lib/Logging.ts +127 -0
- package/lib/PostgresNotifListenManager.ts +143 -0
- package/lib/Prostgles.ts +485 -0
- package/lib/ProstglesTypes.ts +196 -0
- package/lib/PubSubManager/PubSubManager.ts +609 -0
- package/lib/PubSubManager/addSub.ts +138 -0
- package/lib/PubSubManager/addSync.ts +141 -0
- package/lib/PubSubManager/getCreatePubSubManagerError.ts +72 -0
- package/lib/PubSubManager/getPubSubManagerInitQuery.ts +662 -0
- package/lib/PubSubManager/initPubSubManager.ts +79 -0
- package/lib/PubSubManager/notifListener.ts +173 -0
- package/lib/PubSubManager/orphanTriggerCheck.ts +70 -0
- package/lib/PubSubManager/pushSubData.ts +55 -0
- package/lib/PublishParser/PublishParser.ts +162 -0
- package/lib/PublishParser/getFileTableRules.ts +124 -0
- package/lib/PublishParser/getSchemaFromPublish.ts +141 -0
- package/lib/PublishParser/getTableRulesWithoutFileTable.ts +177 -0
- package/lib/PublishParser/publishTypesAndUtils.ts +399 -0
- package/lib/RestApi.ts +127 -0
- package/lib/SchemaWatch/SchemaWatch.ts +90 -0
- package/lib/SchemaWatch/createSchemaWatchEventTrigger.ts +3 -0
- package/lib/SchemaWatch/getValidatedWatchSchemaType.ts +45 -0
- package/lib/SchemaWatch/getWatchSchemaTagList.ts +27 -0
- package/lib/SyncReplication.ts +557 -0
- package/lib/TableConfig/TableConfig.ts +468 -0
- package/lib/TableConfig/getColumnDefinitionQuery.ts +111 -0
- package/lib/TableConfig/getConstraintDefinitionQueries.ts +95 -0
- package/lib/TableConfig/getFutureTableSchema.ts +64 -0
- package/lib/TableConfig/getPGIndexes.ts +53 -0
- package/lib/TableConfig/getTableColumnQueries.ts +129 -0
- package/lib/TableConfig/initTableConfig.ts +326 -0
- package/lib/index.ts +13 -0
- package/lib/initProstgles.ts +319 -0
- package/lib/onSocketConnected.ts +102 -0
- package/lib/runClientRequest.ts +129 -0
- package/lib/shortestPath.ts +122 -0
- package/lib/typeTests/DBoGenerated.d.ts +320 -0
- package/lib/typeTests/dboTypeCheck.ts +81 -0
- package/lib/utils.ts +15 -0
- package/package.json +1 -1
|
@@ -0,0 +1,557 @@
|
|
|
1
|
+
|
|
2
|
+
import { PubSubManager, SyncParams, pickKeys, omitKeys, log } from "./PubSubManager/PubSubManager";
|
|
3
|
+
import { OrderBy, WAL, AnyObject, SyncBatchParams } from "prostgles-types";
|
|
4
|
+
import { TableHandler } from "./DboBuilder/TableHandler/TableHandler";
|
|
5
|
+
|
|
6
|
+
export type ClientSyncInfo = Partial<{
|
|
7
|
+
c_fr: AnyObject;
|
|
8
|
+
c_lr: AnyObject;
|
|
9
|
+
/**
|
|
10
|
+
* PG count is ussually string due to bigint
|
|
11
|
+
*/
|
|
12
|
+
c_count: number | string;
|
|
13
|
+
}>;
|
|
14
|
+
|
|
15
|
+
export type ServerSyncInfo = Partial<{
|
|
16
|
+
s_fr: AnyObject;
|
|
17
|
+
s_lr: AnyObject;
|
|
18
|
+
/**
|
|
19
|
+
* PG count is ussually string due to bigint
|
|
20
|
+
*/
|
|
21
|
+
s_count: number | string;
|
|
22
|
+
}>
|
|
23
|
+
|
|
24
|
+
export type SyncBatchInfo = Partial<{
|
|
25
|
+
from_synced: number | null;
|
|
26
|
+
to_synced: number | null;
|
|
27
|
+
end_offset: number | null;
|
|
28
|
+
}>;
|
|
29
|
+
|
|
30
|
+
export type onSyncRequestResponse = {
|
|
31
|
+
onSyncRequest?: ClientSyncInfo
|
|
32
|
+
} | {
|
|
33
|
+
err: AnyObject | string;
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
export type ClientExpressData = ClientSyncInfo & {
|
|
37
|
+
data?: AnyObject[];
|
|
38
|
+
deleted?: AnyObject[];
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function getNumbers(numberArr: (null | undefined | string | number)[]): number[] {
|
|
42
|
+
return numberArr.filter(v => v !== null && v !== undefined && Number.isFinite(+v)) as any;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Server or client requested data sync
|
|
47
|
+
*/
|
|
48
|
+
export async function syncData (this: PubSubManager, sync: SyncParams, clientData: ClientExpressData | undefined, source: "trigger" | "client"){
|
|
49
|
+
await this._log({
|
|
50
|
+
type: "sync",
|
|
51
|
+
command: "syncData",
|
|
52
|
+
tableName: sync.table_name,
|
|
53
|
+
sid: sync.sid,
|
|
54
|
+
source,
|
|
55
|
+
...pickKeys(sync, ["socket_id", "condition", "last_synced", "is_syncing"]),
|
|
56
|
+
lr: JSON.stringify(sync.lr),
|
|
57
|
+
connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map(s => s.id),
|
|
58
|
+
localParams: undefined,
|
|
59
|
+
duration: -1,
|
|
60
|
+
socketId: sync.socket_id
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
const {
|
|
64
|
+
socket_id, channel_name, table_name, filter,
|
|
65
|
+
table_rules, allow_delete = false, params,
|
|
66
|
+
synced_field, id_fields = [], batch_size,
|
|
67
|
+
wal, throttle = 0
|
|
68
|
+
} = sync;
|
|
69
|
+
|
|
70
|
+
const socket = this.sockets[socket_id];
|
|
71
|
+
if (!socket) {
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const sync_fields = [synced_field, ...id_fields.sort()],
|
|
76
|
+
orderByAsc: OrderBy = sync_fields.reduce((a, v) => ({ ...a, [v]: true }), {}),
|
|
77
|
+
rowsIdsMatch = (a?: AnyObject, b?: AnyObject) => {
|
|
78
|
+
return a && b && !id_fields.find(key => (a[key]).toString() !== (b[key]).toString())
|
|
79
|
+
},
|
|
80
|
+
rowsFullyMatch = (a?: AnyObject, b?: AnyObject) => {
|
|
81
|
+
return rowsIdsMatch(a, b) && a?.[synced_field].toString() === b?.[synced_field].toString();
|
|
82
|
+
},
|
|
83
|
+
getServerRowInfo = async (args: SyncBatchParams = {}): Promise<ServerSyncInfo> => {
|
|
84
|
+
const { from_synced = null, to_synced = null, offset = 0, limit } = args;
|
|
85
|
+
const _filter: AnyObject = { ...filter };
|
|
86
|
+
|
|
87
|
+
if (from_synced || to_synced) {
|
|
88
|
+
_filter[synced_field] = {
|
|
89
|
+
...(from_synced ? { $gte: from_synced } : {}),
|
|
90
|
+
...(to_synced ? { $lte: to_synced } : {})
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
if (this.dbo?.[table_name]?.find === undefined || this?.dbo?.[table_name]?.count === undefined) {
|
|
94
|
+
throw `dbo.${table_name}.find or .count are missing or not allowed`;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const first_rows = await this.dbo?.[table_name]?.find?.(_filter, { orderBy: orderByAsc, select: sync_fields, limit, offset }, undefined, table_rules);
|
|
98
|
+
const last_rows = first_rows?.slice(-1); // Why not logic below?
|
|
99
|
+
// const last_rows = await _this?.dbo[table_name]?.find?.(_filter, { orderBy: (orderByDesc as OrderBy), select: sync_fields, limit: 1, offset: -offset || 0 }, null, table_rules);
|
|
100
|
+
const count = await this.dbo?.[table_name]?.count?.(_filter, undefined, undefined, table_rules);
|
|
101
|
+
|
|
102
|
+
return { s_fr: first_rows?.[0] || null, s_lr: last_rows?.[0] || null, s_count: count }
|
|
103
|
+
},
|
|
104
|
+
getClientRowInfo = (args: SyncBatchInfo = {}) => {
|
|
105
|
+
const { from_synced = null, to_synced = null, end_offset = null } = args;
|
|
106
|
+
const res = new Promise<any>((resolve, reject) => {
|
|
107
|
+
const onSyncRequest = { from_synced, to_synced, end_offset };//, forReal: true };
|
|
108
|
+
socket.emit(channel_name, { onSyncRequest }, (resp?: onSyncRequestResponse) => {
|
|
109
|
+
if (resp && "onSyncRequest" in resp && resp?.onSyncRequest) {
|
|
110
|
+
const c_fr = resp.onSyncRequest.c_fr,
|
|
111
|
+
c_lr = resp.onSyncRequest.c_lr,
|
|
112
|
+
c_count = resp.onSyncRequest.c_count;
|
|
113
|
+
|
|
114
|
+
// console.log(onSyncRequest, { c_fr, c_lr, c_count }, socket._user);
|
|
115
|
+
return resolve({ c_fr, c_lr, c_count });
|
|
116
|
+
} else if (resp && "err" in resp && resp?.err) {
|
|
117
|
+
reject(resp.err);
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
return res;
|
|
123
|
+
},
|
|
124
|
+
getClientData = (from_synced = 0, offset = 0): Promise<AnyObject[]> => {
|
|
125
|
+
return new Promise((resolve, reject) => {
|
|
126
|
+
const onPullRequest = { from_synced: from_synced || 0, offset: offset || 0, limit: batch_size };
|
|
127
|
+
socket.emit(channel_name, { onPullRequest }, async (resp?: { data?: AnyObject[] }) => {
|
|
128
|
+
if (resp && resp.data && Array.isArray(resp.data)) {
|
|
129
|
+
// console.log({ onPullRequest, resp }, socket._user)
|
|
130
|
+
resolve(sortClientData(resp.data));
|
|
131
|
+
} else {
|
|
132
|
+
reject("unexpected onPullRequest response: " + JSON.stringify(resp));
|
|
133
|
+
}
|
|
134
|
+
});
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
function sortClientData(data: AnyObject[]) {
|
|
138
|
+
return data.sort((a, b) => {
|
|
139
|
+
/* Order by increasing synced and ids (sorted alphabetically) */
|
|
140
|
+
return (+a[synced_field] - +b[synced_field]) || id_fields.sort().map(idKey => a[idKey] < b[idKey] ? -1 : a[idKey] > b[idKey] ? 1 : 0).find(v => v) || 0;
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
},
|
|
144
|
+
getServerData = async (from_synced = 0, offset = 0): Promise<AnyObject[]> => {
|
|
145
|
+
const _filter = {
|
|
146
|
+
...filter,
|
|
147
|
+
[synced_field]: { $gte: from_synced || 0 }
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
if (!this?.dbo?.[table_name]?.find) throw "_this?.dbo?.[table_name]?.find is missing";
|
|
151
|
+
|
|
152
|
+
try {
|
|
153
|
+
const res = this?.dbo?.[table_name]?.find?.(
|
|
154
|
+
_filter,
|
|
155
|
+
{
|
|
156
|
+
select: params.select,
|
|
157
|
+
orderBy: (orderByAsc as OrderBy),
|
|
158
|
+
offset: offset || 0,
|
|
159
|
+
limit: batch_size
|
|
160
|
+
},
|
|
161
|
+
undefined,
|
|
162
|
+
table_rules
|
|
163
|
+
);
|
|
164
|
+
|
|
165
|
+
if (!res) throw "_this?.dbo?.[table_name]?.find is missing";
|
|
166
|
+
|
|
167
|
+
return res;
|
|
168
|
+
} catch (e) {
|
|
169
|
+
console.error("Sync getServerData failed: ", e);
|
|
170
|
+
throw "INTERNAL ERROR"
|
|
171
|
+
}
|
|
172
|
+
},
|
|
173
|
+
deleteData = async (deleted: AnyObject[]) => {
|
|
174
|
+
// console.log("deleteData deleteData deleteData " + deleted.length);
|
|
175
|
+
if (allow_delete) {
|
|
176
|
+
return Promise.all(deleted.map(async d => {
|
|
177
|
+
const id_filter = pickKeys(d, id_fields);
|
|
178
|
+
try {
|
|
179
|
+
await (this.dbo[table_name] as TableHandler).delete(id_filter, undefined, undefined, table_rules);
|
|
180
|
+
return 1;
|
|
181
|
+
} catch (e) {
|
|
182
|
+
console.error(e)
|
|
183
|
+
}
|
|
184
|
+
return 0;
|
|
185
|
+
}))
|
|
186
|
+
} else {
|
|
187
|
+
console.warn("client tried to delete data without permission (allow_delete is false)")
|
|
188
|
+
}
|
|
189
|
+
return false;
|
|
190
|
+
},
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Upserts the given client data where synced_field is higher than on server
|
|
194
|
+
*/
|
|
195
|
+
upsertData = async (data: AnyObject[]) => {
|
|
196
|
+
|
|
197
|
+
const start = Date.now();
|
|
198
|
+
const result = await this.dboBuilder.getTX(async (dbTX) => {
|
|
199
|
+
const tbl = dbTX[table_name] as TableHandler;
|
|
200
|
+
const existingData = await tbl.find(
|
|
201
|
+
{ $or: data.map(d => pickKeys(d, id_fields)) },
|
|
202
|
+
{
|
|
203
|
+
select: [synced_field, ...id_fields],
|
|
204
|
+
orderBy: (orderByAsc as OrderBy),
|
|
205
|
+
},
|
|
206
|
+
undefined,
|
|
207
|
+
table_rules
|
|
208
|
+
);
|
|
209
|
+
let inserts = data.filter(d => !existingData.find(ed => rowsIdsMatch(ed, d)));
|
|
210
|
+
let updates = data.filter(d => existingData.find(ed => rowsIdsMatch(ed, d) && +ed[synced_field] < +d[synced_field]));
|
|
211
|
+
try {
|
|
212
|
+
if (!table_rules) throw "table_rules missing";
|
|
213
|
+
|
|
214
|
+
if (table_rules.update && updates.length) {
|
|
215
|
+
const updateData: [any, any][] = [];
|
|
216
|
+
await Promise.all(updates.map(upd => {
|
|
217
|
+
const id_filter = pickKeys(upd, id_fields);
|
|
218
|
+
const syncSafeFilter = { $and: [id_filter, { [synced_field]: { "<": upd[synced_field] } }] }
|
|
219
|
+
|
|
220
|
+
updateData.push([syncSafeFilter, omitKeys(upd, id_fields)])
|
|
221
|
+
}));
|
|
222
|
+
await tbl.updateBatch(updateData, { fixIssues: true }, table_rules);
|
|
223
|
+
} else {
|
|
224
|
+
updates = [];
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
if (table_rules.insert && inserts.length) {
|
|
228
|
+
await tbl.insert(inserts, { fixIssues: true }, undefined, table_rules);
|
|
229
|
+
} else {
|
|
230
|
+
inserts = [];
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
return { inserts, updates };
|
|
234
|
+
} catch (e) {
|
|
235
|
+
console.trace(e);
|
|
236
|
+
throw e;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
}).then(({ inserts, updates }) => {
|
|
240
|
+
log(`upsertData: inserted( ${inserts.length} ) updated( ${updates.length} ) total( ${data.length} ) \n last insert ${JSON.stringify(inserts.at(-1))} \n last update ${JSON.stringify(updates.at(-1))}`);
|
|
241
|
+
return { inserted: inserts.length, updated: updates.length, total: data.length };
|
|
242
|
+
})
|
|
243
|
+
.catch(err => {
|
|
244
|
+
console.trace("Something went wrong with syncing to server: \n ->", err, data.length, id_fields);
|
|
245
|
+
return Promise.reject("Something went wrong with syncing to server: ")
|
|
246
|
+
});
|
|
247
|
+
|
|
248
|
+
await this._log({
|
|
249
|
+
type: "sync",
|
|
250
|
+
command: "upsertData",
|
|
251
|
+
tableName: sync.table_name,
|
|
252
|
+
rows: data.length,
|
|
253
|
+
socketId: socket_id,
|
|
254
|
+
sid: sync.sid,
|
|
255
|
+
duration: Date.now() - start,
|
|
256
|
+
connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map(s => s.id)
|
|
257
|
+
});
|
|
258
|
+
|
|
259
|
+
return result;
|
|
260
|
+
},
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Pushes the given data to client
|
|
264
|
+
* @param isSynced = true if
|
|
265
|
+
*/
|
|
266
|
+
pushData = async (data?: AnyObject[], isSynced = false, err: any = null) => {
|
|
267
|
+
const start = Date.now();
|
|
268
|
+
const result = await new Promise((resolve, reject) => {
|
|
269
|
+
socket.emit(channel_name, { data, isSynced }, (resp?: { ok: boolean }) => {
|
|
270
|
+
|
|
271
|
+
if (resp && resp.ok) {
|
|
272
|
+
// console.log("PUSHED to client: fr/lr", data[0], data[data.length - 1]);
|
|
273
|
+
resolve({ pushed: data?.length, resp })
|
|
274
|
+
} else {
|
|
275
|
+
reject(resp);
|
|
276
|
+
console.error("Unexpected response");
|
|
277
|
+
}
|
|
278
|
+
});
|
|
279
|
+
});
|
|
280
|
+
|
|
281
|
+
await this._log({
|
|
282
|
+
type: "sync",
|
|
283
|
+
command: "pushData",
|
|
284
|
+
tableName: sync.table_name,
|
|
285
|
+
rows: data?.length ?? 0,
|
|
286
|
+
socketId: socket_id,
|
|
287
|
+
duration: Date.now() - start,
|
|
288
|
+
sid: sync.sid,
|
|
289
|
+
connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map(s => s.id)
|
|
290
|
+
});
|
|
291
|
+
|
|
292
|
+
return result;
|
|
293
|
+
},
|
|
294
|
+
|
|
295
|
+
/**
|
|
296
|
+
* Returns the lowest synced_field between server and client by checking client and server sync data.
|
|
297
|
+
* If last rows don't match it will find an earlier matching last row and use that last matching from_synced
|
|
298
|
+
* If no rows or fully synced (c_lr and s_lr match) then returns null
|
|
299
|
+
*/
|
|
300
|
+
getLastSynced = async (clientSyncInfo?: ClientSyncInfo): Promise<number | null> => {
|
|
301
|
+
|
|
302
|
+
// Get latest row info
|
|
303
|
+
const { c_fr, c_lr, c_count } = clientSyncInfo || await getClientRowInfo();
|
|
304
|
+
const { s_fr, s_lr, s_count } = await getServerRowInfo();
|
|
305
|
+
|
|
306
|
+
// console.log("getLastSynced", clientData, socket._user )
|
|
307
|
+
|
|
308
|
+
let result = null;
|
|
309
|
+
|
|
310
|
+
/* Nothing to sync */
|
|
311
|
+
if (!c_fr && !s_fr || rowsFullyMatch(c_lr, s_lr)) { // c_count === s_count &&
|
|
312
|
+
// sync.last_synced = null;
|
|
313
|
+
result = null;
|
|
314
|
+
|
|
315
|
+
/* Sync Everything */
|
|
316
|
+
} else if (!rowsFullyMatch(c_fr, s_fr)) {
|
|
317
|
+
if (c_fr && s_fr) {
|
|
318
|
+
result = Math.min(c_fr[synced_field], s_fr[synced_field]);
|
|
319
|
+
|
|
320
|
+
} else if (c_fr || s_fr) {
|
|
321
|
+
result = (c_fr || s_fr)[synced_field];
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
/* Sync from last matching synced value */
|
|
325
|
+
} else if (rowsFullyMatch(c_fr, s_fr)) {
|
|
326
|
+
|
|
327
|
+
if (s_lr && c_lr) {
|
|
328
|
+
result = Math.min(...getNumbers([c_lr[synced_field], s_lr[synced_field]]));
|
|
329
|
+
} else {
|
|
330
|
+
result = Math.min(...getNumbers([c_fr[synced_field], s_fr?.[synced_field]]));
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
const min_count = Math.min(...getNumbers([c_count, s_count]));
|
|
334
|
+
let end_offset = 1;// Math.min(s_count, c_count) - 1;
|
|
335
|
+
let step = 0;
|
|
336
|
+
|
|
337
|
+
while (min_count > 5 && end_offset < min_count) {
|
|
338
|
+
const { c_lr = null } = await getClientRowInfo({ from_synced: 0, to_synced: result, end_offset });
|
|
339
|
+
// console.log("getLastSynced... end_offset > " + end_offset);
|
|
340
|
+
let server_row;
|
|
341
|
+
|
|
342
|
+
if (c_lr) {
|
|
343
|
+
const _filter: AnyObject = {};
|
|
344
|
+
sync_fields.map(key => {
|
|
345
|
+
_filter[key] = c_lr[key];
|
|
346
|
+
});
|
|
347
|
+
server_row = await this?.dbo?.[table_name]?.find?.(_filter, { select: sync_fields, limit: 1 }, undefined, table_rules);
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
// if(rowsFullyMatch(c_lr, s_lr)){ //c_count === s_count &&
|
|
351
|
+
if (server_row && server_row.length) {
|
|
352
|
+
server_row = server_row[0];
|
|
353
|
+
|
|
354
|
+
result = +server_row[synced_field];
|
|
355
|
+
end_offset = min_count;
|
|
356
|
+
// console.log(`getLastSynced found for ${table_name} -> ${result}`);
|
|
357
|
+
} else {
|
|
358
|
+
end_offset += 1 + step * (step > 4 ? 2 : 1);
|
|
359
|
+
// console.log(`getLastSynced NOT found for ${table_name} -> ${result}`);
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
step++;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
return result;
|
|
367
|
+
},
|
|
368
|
+
|
|
369
|
+
updateSyncLR = (data: AnyObject) => {
|
|
370
|
+
if (data.length) {
|
|
371
|
+
const lastRow = data[data.length - 1];
|
|
372
|
+
if (sync.lr?.[synced_field] && +sync.lr?.[synced_field] > +lastRow[synced_field]) {
|
|
373
|
+
console.error({ syncIssue: "sync.lr[synced_field] is greater than lastRow[synced_field]" }, sync.table_name)
|
|
374
|
+
}
|
|
375
|
+
sync.lr = lastRow;
|
|
376
|
+
sync.last_synced = +sync.lr?.[synced_field];
|
|
377
|
+
}
|
|
378
|
+
},
|
|
379
|
+
|
|
380
|
+
/**
|
|
381
|
+
* Will push pull sync between client and server from a given from_synced value
|
|
382
|
+
*/
|
|
383
|
+
syncBatch = async (from_synced: SyncBatchInfo["from_synced"]) => {
|
|
384
|
+
let offset = 0,
|
|
385
|
+
canContinue = true;
|
|
386
|
+
const limit = batch_size,
|
|
387
|
+
min_synced = from_synced || 0,
|
|
388
|
+
max_synced = from_synced;
|
|
389
|
+
|
|
390
|
+
let inserted = 0, updated = 0, pushed = 0, deleted = 0, total = 0;
|
|
391
|
+
|
|
392
|
+
// console.log("syncBatch", from_synced)
|
|
393
|
+
|
|
394
|
+
while (canContinue) {
|
|
395
|
+
const cData = await getClientData(min_synced, offset);
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
if (cData.length) {
|
|
399
|
+
const res = await upsertData(cData);
|
|
400
|
+
inserted += res.inserted;
|
|
401
|
+
updated += res.updated;
|
|
402
|
+
}
|
|
403
|
+
let sData: AnyObject[] | undefined;
|
|
404
|
+
|
|
405
|
+
try {
|
|
406
|
+
sData = await getServerData(min_synced, offset);
|
|
407
|
+
} catch (e) {
|
|
408
|
+
console.trace("sync getServerData err", e);
|
|
409
|
+
await pushData(undefined, undefined, "Internal error. Check server logs");
|
|
410
|
+
throw " d"
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
// console.log("allow_delete", table_rules.delete);
|
|
414
|
+
if (allow_delete && table_rules?.delete) {
|
|
415
|
+
const to_delete = sData.filter(d => {
|
|
416
|
+
!cData.find(c => rowsIdsMatch(c, d))
|
|
417
|
+
});
|
|
418
|
+
await Promise.all(to_delete.map(d => {
|
|
419
|
+
deleted++;
|
|
420
|
+
return (this.dbo[table_name] as TableHandler).delete(pickKeys(d, id_fields), {}, undefined, table_rules);
|
|
421
|
+
}));
|
|
422
|
+
sData = await getServerData(min_synced, offset);
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
const forClient = sData.filter(s => {
|
|
426
|
+
return !cData.find(c =>
|
|
427
|
+
rowsIdsMatch(c, s) &&
|
|
428
|
+
+c[synced_field] >= +s[synced_field]
|
|
429
|
+
);
|
|
430
|
+
});
|
|
431
|
+
if (forClient.length) {
|
|
432
|
+
const res: any = await pushData(forClient.filter(d => !sync.wal || !sync.wal.isInHistory(d)));
|
|
433
|
+
pushed += res.pushed;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
if (sData.length) {
|
|
437
|
+
updateSyncLR(sData);
|
|
438
|
+
total += sData.length;
|
|
439
|
+
}
|
|
440
|
+
offset += sData.length;
|
|
441
|
+
|
|
442
|
+
// canContinue = offset >= limit;
|
|
443
|
+
canContinue = sData.length >= limit;
|
|
444
|
+
// console.log(`sData ${sData.length} limit ${limit}`);
|
|
445
|
+
}
|
|
446
|
+
log(`server.syncBatch ${table_name}: inserted( ${inserted} ) updated( ${updated} ) deleted( ${deleted} ) pushed to client( ${pushed} ) total( ${total} )`, socket._user );
|
|
447
|
+
|
|
448
|
+
return true;
|
|
449
|
+
};
|
|
450
|
+
|
|
451
|
+
if (!wal) {
|
|
452
|
+
/* Used to throttle and merge incomming updates */
|
|
453
|
+
sync.wal = new WAL({
|
|
454
|
+
id_fields, synced_field, throttle, batch_size,
|
|
455
|
+
DEBUG_MODE: this.dboBuilder.prostgles.opts.DEBUG_MODE,
|
|
456
|
+
onSendStart: () => {
|
|
457
|
+
sync.is_syncing = true;
|
|
458
|
+
},
|
|
459
|
+
onSend: async (data) => {
|
|
460
|
+
// console.log("WAL upsertData START", data)
|
|
461
|
+
const res = await upsertData(data);
|
|
462
|
+
// const max_incoming_synced = Math.max(...data.map(d => +d[synced_field]));
|
|
463
|
+
// if(Number.isFinite(max_incoming_synced) && max_incoming_synced > +sync.last_synced){
|
|
464
|
+
// sync.last_synced = max_incoming_synced;
|
|
465
|
+
// }
|
|
466
|
+
// console.log("WAL upsertData END")
|
|
467
|
+
|
|
468
|
+
/******** */
|
|
469
|
+
/* TO DO -> Store and push patch updates instead of full data if and where possible */
|
|
470
|
+
/******** */
|
|
471
|
+
// 1. Store successfully upserted wal items for a couple of seconds
|
|
472
|
+
// 2. When pushing data to clients check if any matching wal items exist
|
|
473
|
+
// 3. Replace text fields with matching patched data
|
|
474
|
+
|
|
475
|
+
return res;
|
|
476
|
+
},
|
|
477
|
+
onSendEnd: (batch) => {
|
|
478
|
+
updateSyncLR(batch);
|
|
479
|
+
sync.is_syncing = false;
|
|
480
|
+
// console.log("syncData from WAL.onSendEnd")
|
|
481
|
+
|
|
482
|
+
/**
|
|
483
|
+
* After all data was inserted request SyncInfo from client and sync again if necessary
|
|
484
|
+
*/
|
|
485
|
+
this.syncData(sync, undefined, source);
|
|
486
|
+
},
|
|
487
|
+
})
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
/* Debounce sync requests */
|
|
491
|
+
if (!sync.wal) throw "sync.wal missing";
|
|
492
|
+
if (!sync.wal.isSending() && sync.is_syncing) {
|
|
493
|
+
if (!this.syncTimeout) {
|
|
494
|
+
this.syncTimeout = setTimeout(() => {
|
|
495
|
+
this.syncTimeout = undefined;
|
|
496
|
+
// console.log("SYNC FROM TIMEOUT")
|
|
497
|
+
this.syncData(sync, undefined, source);
|
|
498
|
+
}, throttle)
|
|
499
|
+
}
|
|
500
|
+
// console.log("SYNC THROTTLE")
|
|
501
|
+
return;
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
// console.log("syncData", clientData)
|
|
505
|
+
|
|
506
|
+
/**
|
|
507
|
+
* Express data sent from a client that has already been synced
|
|
508
|
+
* Add to WAL manager which will sync at the end
|
|
509
|
+
*/
|
|
510
|
+
if (clientData) {
|
|
511
|
+
if (clientData.data && Array.isArray(clientData.data) && clientData.data.length) {
|
|
512
|
+
if (!sync.wal) throw "sync.wal missing";
|
|
513
|
+
|
|
514
|
+
sync.wal.addData(clientData.data.map(d => ({ current: d })));
|
|
515
|
+
return;
|
|
516
|
+
// await upsertData(clientData.data, true);
|
|
517
|
+
|
|
518
|
+
/* Not expecting this anymore. use normal db.table.delete channel */
|
|
519
|
+
} else if (clientData.deleted && Array.isArray(clientData.deleted) && clientData.deleted.length) {
|
|
520
|
+
await deleteData(clientData.deleted);
|
|
521
|
+
}
|
|
522
|
+
} else {
|
|
523
|
+
// do nothing
|
|
524
|
+
}
|
|
525
|
+
if (sync.wal.isSending()) return;
|
|
526
|
+
|
|
527
|
+
sync.is_syncing = true;
|
|
528
|
+
|
|
529
|
+
// from synced does not make sense. It should be sync.lr only!!!
|
|
530
|
+
let from_synced = null;
|
|
531
|
+
|
|
532
|
+
/** Sync was already synced */
|
|
533
|
+
if (sync.lr) {
|
|
534
|
+
const { s_lr } = await getServerRowInfo();
|
|
535
|
+
|
|
536
|
+
/* Make sure trigger is not firing on freshly synced data */
|
|
537
|
+
if (!rowsFullyMatch(sync.lr, s_lr)) {
|
|
538
|
+
from_synced = sync.last_synced;
|
|
539
|
+
} else {
|
|
540
|
+
// console.log("rowsFullyMatch")
|
|
541
|
+
}
|
|
542
|
+
// console.log(table_name, sync.lr[synced_field])
|
|
543
|
+
} else {
|
|
544
|
+
from_synced = await getLastSynced(clientData);
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
if (from_synced !== null) {
|
|
548
|
+
await syncBatch(from_synced);
|
|
549
|
+
} else {
|
|
550
|
+
// console.log("from_synced is null")
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
await pushData([], true);
|
|
554
|
+
|
|
555
|
+
sync.is_syncing = false;
|
|
556
|
+
// console.log(`Finished sync for ${table_name}`, socket._user);
|
|
557
|
+
}
|