prostgles-server 4.2.158 → 4.2.160
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Auth/AuthTypes.d.ts +4 -8
- package/dist/Auth/AuthTypes.d.ts.map +1 -1
- package/dist/Auth/setAuthProviders.d.ts.map +1 -1
- package/dist/Auth/setAuthProviders.js +4 -5
- package/dist/Auth/setAuthProviders.js.map +1 -1
- package/dist/Auth/setEmailProvider.js +3 -3
- package/dist/Auth/setEmailProvider.js.map +1 -1
- package/package.json +1 -1
- package/lib/Auth/AuthHandler.ts +0 -436
- package/lib/Auth/AuthTypes.ts +0 -285
- package/lib/Auth/getSafeReturnURL.ts +0 -35
- package/lib/Auth/sendEmail.ts +0 -83
- package/lib/Auth/setAuthProviders.ts +0 -129
- package/lib/Auth/setEmailProvider.ts +0 -85
- package/lib/Auth/setupAuthRoutes.ts +0 -161
- package/lib/DBEventsManager.ts +0 -178
- package/lib/DBSchemaBuilder.ts +0 -225
- package/lib/DboBuilder/DboBuilder.ts +0 -319
- package/lib/DboBuilder/DboBuilderTypes.ts +0 -361
- package/lib/DboBuilder/QueryBuilder/Functions.ts +0 -1153
- package/lib/DboBuilder/QueryBuilder/QueryBuilder.ts +0 -288
- package/lib/DboBuilder/QueryBuilder/getJoinQuery.ts +0 -263
- package/lib/DboBuilder/QueryBuilder/getNewQuery.ts +0 -271
- package/lib/DboBuilder/QueryBuilder/getSelectQuery.ts +0 -136
- package/lib/DboBuilder/QueryBuilder/prepareHaving.ts +0 -22
- package/lib/DboBuilder/QueryStreamer.ts +0 -250
- package/lib/DboBuilder/TableHandler/DataValidator.ts +0 -428
- package/lib/DboBuilder/TableHandler/TableHandler.ts +0 -205
- package/lib/DboBuilder/TableHandler/delete.ts +0 -115
- package/lib/DboBuilder/TableHandler/insert.ts +0 -183
- package/lib/DboBuilder/TableHandler/insertTest.ts +0 -78
- package/lib/DboBuilder/TableHandler/onDeleteFromFileTable.ts +0 -62
- package/lib/DboBuilder/TableHandler/runInsertUpdateQuery.ts +0 -134
- package/lib/DboBuilder/TableHandler/update.ts +0 -126
- package/lib/DboBuilder/TableHandler/updateBatch.ts +0 -49
- package/lib/DboBuilder/TableHandler/updateFile.ts +0 -48
- package/lib/DboBuilder/TableHandler/upsert.ts +0 -34
- package/lib/DboBuilder/ViewHandler/ViewHandler.ts +0 -393
- package/lib/DboBuilder/ViewHandler/count.ts +0 -38
- package/lib/DboBuilder/ViewHandler/find.ts +0 -153
- package/lib/DboBuilder/ViewHandler/getExistsCondition.ts +0 -73
- package/lib/DboBuilder/ViewHandler/getExistsFilters.ts +0 -74
- package/lib/DboBuilder/ViewHandler/getInfo.ts +0 -32
- package/lib/DboBuilder/ViewHandler/getTableJoinQuery.ts +0 -84
- package/lib/DboBuilder/ViewHandler/parseComplexFilter.ts +0 -96
- package/lib/DboBuilder/ViewHandler/parseFieldFilter.ts +0 -105
- package/lib/DboBuilder/ViewHandler/parseJoinPath.ts +0 -208
- package/lib/DboBuilder/ViewHandler/prepareSortItems.ts +0 -163
- package/lib/DboBuilder/ViewHandler/prepareWhere.ts +0 -90
- package/lib/DboBuilder/ViewHandler/size.ts +0 -37
- package/lib/DboBuilder/ViewHandler/subscribe.ts +0 -118
- package/lib/DboBuilder/ViewHandler/validateViewRules.ts +0 -70
- package/lib/DboBuilder/dboBuilderUtils.ts +0 -222
- package/lib/DboBuilder/getColumns.ts +0 -114
- package/lib/DboBuilder/getCondition.ts +0 -201
- package/lib/DboBuilder/getSubscribeRelatedTables.ts +0 -190
- package/lib/DboBuilder/getTablesForSchemaPostgresSQL.ts +0 -426
- package/lib/DboBuilder/insertNestedRecords.ts +0 -355
- package/lib/DboBuilder/parseUpdateRules.ts +0 -187
- package/lib/DboBuilder/prepareShortestJoinPaths.ts +0 -186
- package/lib/DboBuilder/runSQL.ts +0 -182
- package/lib/DboBuilder/runTransaction.ts +0 -50
- package/lib/DboBuilder/sqlErrCodeToMsg.ts +0 -254
- package/lib/DboBuilder/uploadFile.ts +0 -69
- package/lib/Event_Trigger_Tags.ts +0 -118
- package/lib/FileManager/FileManager.ts +0 -358
- package/lib/FileManager/getValidatedFileType.ts +0 -69
- package/lib/FileManager/initFileManager.ts +0 -187
- package/lib/FileManager/upload.ts +0 -62
- package/lib/FileManager/uploadStream.ts +0 -79
- package/lib/Filtering.ts +0 -463
- package/lib/JSONBValidation/validate_jsonb_schema_sql.ts +0 -502
- package/lib/JSONBValidation/validation.ts +0 -143
- package/lib/Logging.ts +0 -127
- package/lib/PostgresNotifListenManager.ts +0 -143
- package/lib/Prostgles.ts +0 -485
- package/lib/ProstglesTypes.ts +0 -196
- package/lib/PubSubManager/PubSubManager.ts +0 -609
- package/lib/PubSubManager/addSub.ts +0 -138
- package/lib/PubSubManager/addSync.ts +0 -141
- package/lib/PubSubManager/getCreatePubSubManagerError.ts +0 -72
- package/lib/PubSubManager/getPubSubManagerInitQuery.ts +0 -662
- package/lib/PubSubManager/initPubSubManager.ts +0 -79
- package/lib/PubSubManager/notifListener.ts +0 -173
- package/lib/PubSubManager/orphanTriggerCheck.ts +0 -70
- package/lib/PubSubManager/pushSubData.ts +0 -55
- package/lib/PublishParser/PublishParser.ts +0 -162
- package/lib/PublishParser/getFileTableRules.ts +0 -124
- package/lib/PublishParser/getSchemaFromPublish.ts +0 -141
- package/lib/PublishParser/getTableRulesWithoutFileTable.ts +0 -177
- package/lib/PublishParser/publishTypesAndUtils.ts +0 -399
- package/lib/RestApi.ts +0 -127
- package/lib/SchemaWatch/SchemaWatch.ts +0 -90
- package/lib/SchemaWatch/createSchemaWatchEventTrigger.ts +0 -3
- package/lib/SchemaWatch/getValidatedWatchSchemaType.ts +0 -45
- package/lib/SchemaWatch/getWatchSchemaTagList.ts +0 -27
- package/lib/SyncReplication.ts +0 -557
- package/lib/TableConfig/TableConfig.ts +0 -468
- package/lib/TableConfig/getColumnDefinitionQuery.ts +0 -111
- package/lib/TableConfig/getConstraintDefinitionQueries.ts +0 -95
- package/lib/TableConfig/getFutureTableSchema.ts +0 -64
- package/lib/TableConfig/getPGIndexes.ts +0 -53
- package/lib/TableConfig/getTableColumnQueries.ts +0 -129
- package/lib/TableConfig/initTableConfig.ts +0 -326
- package/lib/index.ts +0 -13
- package/lib/initProstgles.ts +0 -319
- package/lib/onSocketConnected.ts +0 -102
- package/lib/runClientRequest.ts +0 -129
- package/lib/shortestPath.ts +0 -122
- package/lib/typeTests/DBoGenerated.d.ts +0 -320
- package/lib/typeTests/dboTypeCheck.ts +0 -81
- package/lib/utils.ts +0 -15
- package/tests/client/hooks.spec.ts +0 -205
- package/tests/client/index.ts +0 -139
- package/tests/client/package-lock.json +0 -637
- package/tests/client/package.json +0 -26
- package/tests/client/renderReactHook.ts +0 -177
- package/tests/client/tsconfig.json +0 -15
- package/tests/client/useProstgles.spec.ts +0 -120
- package/tests/clientFileTests.spec.ts +0 -102
- package/tests/clientOnlyQueries.spec.ts +0 -667
- package/tests/clientRestApi.spec.ts +0 -82
- package/tests/config_test/DBoGenerated.d.ts +0 -407
- package/tests/config_test/index.html +0 -109
- package/tests/config_test/index.js +0 -86
- package/tests/config_test/index.js.map +0 -1
- package/tests/config_test/index.ts +0 -91
- package/tests/config_test/init.sql +0 -48
- package/tests/config_test/package.json +0 -29
- package/tests/config_test/tsconfig.json +0 -23
- package/tests/config_testDBoGenerated.d.ts +0 -407
- package/tests/isomorphicQueries.spec.ts +0 -1493
- package/tests/server/DBoGenerated.d.ts +0 -537
- package/tests/server/index.html +0 -73
- package/tests/server/index.ts +0 -289
- package/tests/server/init.sql +0 -224
- package/tests/server/package-lock.json +0 -2164
- package/tests/server/package.json +0 -25
- package/tests/server/publishTypeCheck.ts +0 -136
- package/tests/server/server.ts +0 -35
- package/tests/server/testPublish.ts +0 -147
- package/tests/server/testTableConfig.ts +0 -156
- package/tests/server/tsconfig.json +0 -22
- package/tests/serverOnlyQueries.spec.ts +0 -32
- package/tests/test.sh +0 -20
|
@@ -1,667 +0,0 @@
|
|
|
1
|
-
import { strict as assert } from 'assert';
|
|
2
|
-
import type { DBHandlerClient, AuthHandler } from "./client";
|
|
3
|
-
import { AnyObject, DBSchemaTable, SocketSQLStreamPacket, isDefined } from "prostgles-types";
|
|
4
|
-
import { tryRun, tryRunP } from './isomorphicQueries.spec';
|
|
5
|
-
import { describe, test } from "node:test";
|
|
6
|
-
|
|
7
|
-
export const clientOnlyQueries = async (db: DBHandlerClient, auth: AuthHandler, log: (...args: any[]) => any, methods, tableSchema: DBSchemaTable[], token: string) => {
|
|
8
|
-
|
|
9
|
-
await describe("Client only queries", async (t) => {
|
|
10
|
-
|
|
11
|
-
// await test("Social auth redirect routes work", async ( ) => {
|
|
12
|
-
// assert.equal(!!auth.login.withProvider.github, true);
|
|
13
|
-
// const response = await fetch("http://localhost:3001/auth/github");
|
|
14
|
-
// assert.equal(response.status, 302);
|
|
15
|
-
// });
|
|
16
|
-
|
|
17
|
-
await test("SQL Stream more than 1k records", async ( ) => {
|
|
18
|
-
const expectedRowCount = 2e3;
|
|
19
|
-
await tryRunP("", async (resolve, reject) => {
|
|
20
|
-
let rows: any[] = [];
|
|
21
|
-
const res = await db.sql!(`SELECT * FROM generate_series(1, ${expectedRowCount})`, {}, { returnType: "stream" });
|
|
22
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
23
|
-
if(packet.type === "error"){
|
|
24
|
-
reject(packet.error);
|
|
25
|
-
} else {
|
|
26
|
-
if(packet.rows){
|
|
27
|
-
rows = [
|
|
28
|
-
...rows,
|
|
29
|
-
...packet.rows
|
|
30
|
-
]
|
|
31
|
-
}
|
|
32
|
-
if(packet.ended){
|
|
33
|
-
assert.equal(packet.ended, true);
|
|
34
|
-
assert.equal(rows.length, expectedRowCount);
|
|
35
|
-
resolve("ok");
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
};
|
|
39
|
-
await res.start(listener);
|
|
40
|
-
});
|
|
41
|
-
});
|
|
42
|
-
|
|
43
|
-
await test("SQL Stream persistedConnection with streamLimit works for subsequent queries", async () => {
|
|
44
|
-
await tryRunP("", async (resolve, reject) => {
|
|
45
|
-
const query = "SELECT * FROM generate_series(1, 100)";
|
|
46
|
-
let results: any[] = [];
|
|
47
|
-
const streamLimit = 10;
|
|
48
|
-
const res = await db.sql!(query, {}, { returnType: "stream", persistStreamConnection: true, streamLimit });
|
|
49
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
50
|
-
try {
|
|
51
|
-
|
|
52
|
-
if(packet.type === "error"){
|
|
53
|
-
reject(packet.error);
|
|
54
|
-
} else {
|
|
55
|
-
results = results.concat(packet.rows);
|
|
56
|
-
if(results.length === streamLimit){
|
|
57
|
-
assert.equal(packet.type, "data");
|
|
58
|
-
assert.equal(packet.ended, true);
|
|
59
|
-
assert.equal(packet.rows.length, 10);
|
|
60
|
-
startHandler.run(`SELECT '${query}' as query`).catch(reject);
|
|
61
|
-
} else {
|
|
62
|
-
assert.equal(packet.type, "data");
|
|
63
|
-
assert.equal(packet.ended, true);
|
|
64
|
-
assert.equal(packet.rows.length, 1);
|
|
65
|
-
assert.equal(packet.rows[0][0], query);
|
|
66
|
-
resolve("ok");
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
} catch(err){
|
|
70
|
-
reject(err);
|
|
71
|
-
}
|
|
72
|
-
};
|
|
73
|
-
const startHandler = await res.start(listener);
|
|
74
|
-
});
|
|
75
|
-
});
|
|
76
|
-
|
|
77
|
-
await test("SQL Stream ensure the connection is never released (same pg_backend_pid is the same for subsequent) when using persistConnectionId", async () => {
|
|
78
|
-
await tryRunP("", async (resolve, reject) => {
|
|
79
|
-
const query = "SELECT pg_backend_pid()";
|
|
80
|
-
const res = await db.sql!(query, {}, { returnType: "stream", persistStreamConnection: true });
|
|
81
|
-
const pids: number[] = [];
|
|
82
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
83
|
-
if(packet.type === "error"){
|
|
84
|
-
reject(packet.error);
|
|
85
|
-
} else {
|
|
86
|
-
assert.equal(packet.type, "data");
|
|
87
|
-
assert.equal(packet.ended, true);
|
|
88
|
-
assert.equal(packet.rows.length, 1);
|
|
89
|
-
const pid = packet.rows[0][0];
|
|
90
|
-
pids.push(pid);
|
|
91
|
-
if(pids.length === 1){
|
|
92
|
-
startHandler.run(query).catch(reject);
|
|
93
|
-
}
|
|
94
|
-
if(pids.length === 2){
|
|
95
|
-
assert.equal(pids[0], pids[1]);
|
|
96
|
-
resolve("ok");
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
};
|
|
100
|
-
const startHandler = await res.start(listener);
|
|
101
|
-
});
|
|
102
|
-
})
|
|
103
|
-
|
|
104
|
-
await test("SQL Stream stop kills the query", async ( ) => {
|
|
105
|
-
await tryRunP("", async (resolve, reject) => {
|
|
106
|
-
const query = "SELECT * FROM pg_sleep(5)";
|
|
107
|
-
const res = await db.sql!(query, {}, { returnType: "stream" });
|
|
108
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
109
|
-
if(packet.type === "error"){
|
|
110
|
-
const queryState = await db.sql!("SELECT * FROM pg_stat_activity WHERE query = $1", [query], { returnType: "rows" });
|
|
111
|
-
assert.equal(queryState.length, 1);
|
|
112
|
-
assert.equal(queryState[0].state, "idle");
|
|
113
|
-
assert.equal(packet.error.message, "canceling statement due to user request");
|
|
114
|
-
resolve("ok");
|
|
115
|
-
} else {
|
|
116
|
-
assert.equal(packet.type, "data");
|
|
117
|
-
assert.equal(packet.ended, true);
|
|
118
|
-
assert.deepStrictEqual(packet.rows, [['']]);
|
|
119
|
-
reject("SQL Stream stop kills the query");
|
|
120
|
-
}
|
|
121
|
-
};
|
|
122
|
-
const startHandler = await res.start(listener);
|
|
123
|
-
setTimeout(() => {
|
|
124
|
-
startHandler.stop().catch(reject);
|
|
125
|
-
}, 1000);
|
|
126
|
-
});
|
|
127
|
-
})
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
await test("SQL Stream limit works", async ( ) => {
|
|
131
|
-
|
|
132
|
-
await tryRunP("", async (resolve, reject) => {
|
|
133
|
-
const res = await db.sql!("SELECT * FROM generate_series(1, 1e5)", {}, { returnType: "stream", streamLimit: 10 });
|
|
134
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
135
|
-
if(packet.type === "error"){
|
|
136
|
-
reject(packet.error);
|
|
137
|
-
} else {
|
|
138
|
-
assert.equal(packet.type, "data");
|
|
139
|
-
assert.equal(packet.ended, true);
|
|
140
|
-
assert.equal(packet.rows.length, 10);
|
|
141
|
-
resolve("ok");
|
|
142
|
-
}
|
|
143
|
-
};
|
|
144
|
-
await res.start(listener);
|
|
145
|
-
});
|
|
146
|
-
});
|
|
147
|
-
|
|
148
|
-
await test("SQL Stream stop with terminate kills the query", async ( ) => {
|
|
149
|
-
await tryRunP("", async (resolve, reject) => {
|
|
150
|
-
const totalRows = 5e6;
|
|
151
|
-
const query = `SELECT * FROM generate_series(1, ${totalRows})`;
|
|
152
|
-
const res = await db.sql!(query, {}, { returnType: "stream" });
|
|
153
|
-
const rowsReceived: any[] = [];
|
|
154
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
155
|
-
if(packet.type === "error"){
|
|
156
|
-
const queryState = await db.sql!("SELECT * FROM pg_stat_activity WHERE query = $1", [query], { returnType: "rows" });
|
|
157
|
-
assert.equal(queryState.length, 0);
|
|
158
|
-
resolve("ok");
|
|
159
|
-
} else {
|
|
160
|
-
try {
|
|
161
|
-
rowsReceived.push(...packet.rows);
|
|
162
|
-
console.log(rowsReceived.length)
|
|
163
|
-
assert.equal(packet.ended, false);
|
|
164
|
-
assert.equal(rowsReceived.length < totalRows, true);
|
|
165
|
-
} catch(error){
|
|
166
|
-
reject(error);
|
|
167
|
-
}
|
|
168
|
-
}
|
|
169
|
-
};
|
|
170
|
-
const startHandler = await res.start(listener);
|
|
171
|
-
setTimeout(() => {
|
|
172
|
-
startHandler.stop(true).catch(reject);
|
|
173
|
-
}, 22);
|
|
174
|
-
});
|
|
175
|
-
});
|
|
176
|
-
|
|
177
|
-
await test("SQL Stream", async () => {
|
|
178
|
-
await Promise.all([1e3, 1e2].map(async (numberOfRows) => {
|
|
179
|
-
await tryRunP("", async (resolve, reject) => {
|
|
180
|
-
const res = await db.sql!(`SELECT v.* FROM generate_series(1, ${numberOfRows}) v`, {}, { returnType: "stream" });
|
|
181
|
-
let rows: any[] = [];
|
|
182
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
183
|
-
if(packet.type === "error"){
|
|
184
|
-
reject(packet.error);
|
|
185
|
-
} else {
|
|
186
|
-
rows = rows.concat(packet.rows);
|
|
187
|
-
if(packet.ended){
|
|
188
|
-
assert.equal(rows.length, numberOfRows);
|
|
189
|
-
resolve("ok");
|
|
190
|
-
}
|
|
191
|
-
}
|
|
192
|
-
};
|
|
193
|
-
await res.start(listener);
|
|
194
|
-
});
|
|
195
|
-
}));
|
|
196
|
-
});
|
|
197
|
-
|
|
198
|
-
await test("SQL Stream parallel execution + parameters", async ( ) => {
|
|
199
|
-
await tryRunP("", async (resolve, reject) => {
|
|
200
|
-
const getExpected = (val: string) => new Promise(async (resolve, reject) => {
|
|
201
|
-
const res = await db.sql!("SELECT ${val} as val", { val }, { returnType: "stream" });
|
|
202
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
203
|
-
try {
|
|
204
|
-
assert.equal(packet.type, "data");
|
|
205
|
-
assert.equal(packet.ended, true);
|
|
206
|
-
assert.deepStrictEqual(packet.rows, [[val]]);
|
|
207
|
-
resolve(1);
|
|
208
|
-
} catch(err){
|
|
209
|
-
reject(err);
|
|
210
|
-
}
|
|
211
|
-
};
|
|
212
|
-
await res.start(listener);
|
|
213
|
-
});
|
|
214
|
-
let resolved = 0;
|
|
215
|
-
const expected = ["a", "b", "c"];
|
|
216
|
-
expected.forEach((val) => {
|
|
217
|
-
getExpected(val).then(() => {
|
|
218
|
-
resolved++;
|
|
219
|
-
if(resolved === expected.length){
|
|
220
|
-
resolve("ok");
|
|
221
|
-
}
|
|
222
|
-
}).catch(reject);
|
|
223
|
-
})
|
|
224
|
-
});
|
|
225
|
-
});
|
|
226
|
-
await test("SQL Stream query error structure matches default sql run error", async ( ) => {
|
|
227
|
-
await tryRunP("", async (resolve, reject) => {
|
|
228
|
-
const badQuery = "SELECT * FROM not_existing_table"
|
|
229
|
-
const res = await db.sql!(badQuery, {}, { returnType: "stream" });
|
|
230
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
231
|
-
try {
|
|
232
|
-
const normalSqlError = await db.sql!(badQuery, {}).catch(err => err);
|
|
233
|
-
assert.equal(packet.type, "error");
|
|
234
|
-
assert.equal(packet.error.message, 'relation "not_existing_table" does not exist');
|
|
235
|
-
assert.deepEqual(packet.error, normalSqlError);
|
|
236
|
-
resolve("ok");
|
|
237
|
-
} catch(err){
|
|
238
|
-
reject(err);
|
|
239
|
-
}
|
|
240
|
-
};
|
|
241
|
-
await res.start(listener);
|
|
242
|
-
});
|
|
243
|
-
});
|
|
244
|
-
await test("SQL Stream streamLimit", async ( ) => {
|
|
245
|
-
await tryRunP("", async (resolve, reject) => {
|
|
246
|
-
const generate_series = "SELECT * FROM generate_series(1, 100)";
|
|
247
|
-
const res = await db.sql!(generate_series, {}, { returnType: "stream", streamLimit: 10 });
|
|
248
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
249
|
-
if(packet.type === "error"){
|
|
250
|
-
reject(packet.error);
|
|
251
|
-
} else {
|
|
252
|
-
assert.equal(packet.type, "data");
|
|
253
|
-
assert.equal(packet.ended, true);
|
|
254
|
-
assert.equal(packet.rows.length, 10);
|
|
255
|
-
|
|
256
|
-
const normalSql = await db.sql!(generate_series, {});
|
|
257
|
-
|
|
258
|
-
/** fields the same as on normal sql request */
|
|
259
|
-
assert.deepStrictEqual(packet.fields, normalSql.fields);
|
|
260
|
-
|
|
261
|
-
/** result is rowMode=array */
|
|
262
|
-
assert.equal(Array.isArray(packet.rows), true);
|
|
263
|
-
assert.equal(Array.isArray(packet.rows[0]), true);
|
|
264
|
-
|
|
265
|
-
assert.deepStrictEqual(packet.rows.flat(), Array.from({ length: 10 }, (_, i) => i + 1).flat());
|
|
266
|
-
resolve("ok");
|
|
267
|
-
}
|
|
268
|
-
};
|
|
269
|
-
await res.start(listener);
|
|
270
|
-
});
|
|
271
|
-
});
|
|
272
|
-
|
|
273
|
-
await test("SQL Stream table fields are the same as on default request", async ( ) => {
|
|
274
|
-
await tryRunP("", async (resolve, reject) => {
|
|
275
|
-
await db.sql!("TRUNCATE planes RESTART IDENTITY CASCADE;", {});
|
|
276
|
-
await db.sql!("INSERT INTO planes (last_updated) VALUES (56789);", {});
|
|
277
|
-
const res = await db.sql!("SELECT * FROM planes", {}, { returnType: "stream" });
|
|
278
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
279
|
-
if(packet.type === "error"){
|
|
280
|
-
reject(packet.error);
|
|
281
|
-
} else {
|
|
282
|
-
assert.equal(packet.type, "data");
|
|
283
|
-
assert.equal(packet.ended, true);
|
|
284
|
-
assert.equal(packet.rows.length, 1);
|
|
285
|
-
const normalSql = await db.sql!("SELECT * FROM planes LIMIT 1", {});
|
|
286
|
-
await db.sql!("DELETE FROM planes", {});
|
|
287
|
-
assert.deepStrictEqual(packet.fields, normalSql.fields);
|
|
288
|
-
assert.equal(packet.fields.length > 0, true);
|
|
289
|
-
resolve("ok");
|
|
290
|
-
}
|
|
291
|
-
};
|
|
292
|
-
await res.start(listener);
|
|
293
|
-
});
|
|
294
|
-
});
|
|
295
|
-
await test("SQL Stream works for multiple statements", async ( ) => {
|
|
296
|
-
await tryRunP("", async (resolve, reject) => {
|
|
297
|
-
const res = await db.sql!("SELECT * FROM planes; SELECT 1 as a", {}, { returnType: "stream" });
|
|
298
|
-
const listener = async (packet: SocketSQLStreamPacket) => {
|
|
299
|
-
if(packet.type === "error"){
|
|
300
|
-
reject(packet.error);
|
|
301
|
-
} else {
|
|
302
|
-
assert.equal(packet.type, "data");
|
|
303
|
-
assert.equal(packet.ended, true);
|
|
304
|
-
assert.equal(packet.rows.length, 1);
|
|
305
|
-
const normalSql = await db.sql!("SELECT 1 as a", {});
|
|
306
|
-
await db.sql!("DELETE FROM planes", {});
|
|
307
|
-
assert.deepStrictEqual(packet.fields, normalSql.fields);
|
|
308
|
-
assert.equal(packet.fields.length > 0, true);
|
|
309
|
-
resolve("ok");
|
|
310
|
-
}
|
|
311
|
-
};
|
|
312
|
-
await res.start(listener);
|
|
313
|
-
});
|
|
314
|
-
});
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
/**
|
|
318
|
-
* tableSchema must contan an array of all tables and their columns that have getInfo and getColumns allowed
|
|
319
|
-
*/
|
|
320
|
-
await test("Check tableSchema", async () => {
|
|
321
|
-
const dbTables = Object.keys(db).map(k => {
|
|
322
|
-
const h = db[k];
|
|
323
|
-
return !!(h.getColumns && h.getInfo)? k : undefined;
|
|
324
|
-
}).filter(isDefined);
|
|
325
|
-
const missingTbl = dbTables.find(t => !tableSchema.some(st => st.name === t));
|
|
326
|
-
if(missingTbl) throw `${missingTbl} is missing from tableSchema: ${JSON.stringify(tableSchema)}`
|
|
327
|
-
const missingscTbl = tableSchema.find(t => !dbTables.includes(t.name));
|
|
328
|
-
if(missingscTbl) throw `${missingscTbl} is missing from db`;
|
|
329
|
-
|
|
330
|
-
await Promise.all(tableSchema.map(async tbl => {
|
|
331
|
-
const cols = await db[tbl.name]?.getColumns?.();
|
|
332
|
-
const info = await db[tbl.name]?.getInfo?.();
|
|
333
|
-
assert.deepStrictEqual(tbl.columns, cols);
|
|
334
|
-
assert.deepStrictEqual(tbl.info, info);
|
|
335
|
-
}))
|
|
336
|
-
});
|
|
337
|
-
|
|
338
|
-
const testRealtime = () => {
|
|
339
|
-
return new Promise(async (resolveTest, rejectTest) => {
|
|
340
|
-
try {
|
|
341
|
-
|
|
342
|
-
/* METHODS */
|
|
343
|
-
const t222 = await methods.get();
|
|
344
|
-
assert.equal(t222, 222, "methods.get() failed");
|
|
345
|
-
|
|
346
|
-
/* RAWSQL */
|
|
347
|
-
await tryRun("SQL Full result", async () => {
|
|
348
|
-
if(!db.sql) throw "db.sql missing";
|
|
349
|
-
const sqlStatement = await db.sql("SELECT $1", [1], { returnType: "statement" });
|
|
350
|
-
assert.equal(sqlStatement, "SELECT 1", "db.sql statement query failed");
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
await db.sql("SELECT 1 -- ${param}", {}, { hasParams: false });
|
|
354
|
-
|
|
355
|
-
const arrayMode = await db.sql("SELECT 1 as a, 2 as a", undefined, { returnType: "arrayMode" });
|
|
356
|
-
assert.equal(arrayMode.rows?.[0].join("."), "1.2", "db.sql statement arrayMode failed");
|
|
357
|
-
assert.equal(arrayMode.fields?.map(f => f.name).join("."), "a.a", "db.sql statement arrayMode failed");
|
|
358
|
-
|
|
359
|
-
const select1 = await db.sql("SELECT $1 as col1", [1], { returnType: "rows" });
|
|
360
|
-
assert.deepStrictEqual(select1[0], { col1: 1 }, "db.sql justRows query failed");
|
|
361
|
-
|
|
362
|
-
const fullResult = await db.sql("SELECT $1 as col1", [1]);
|
|
363
|
-
// console.log(fullResult)
|
|
364
|
-
assert.deepStrictEqual(fullResult.rows[0], { col1: 1 }, "db.sql query failed");
|
|
365
|
-
assert.deepStrictEqual(fullResult.fields, [ {
|
|
366
|
-
name: 'col1',
|
|
367
|
-
tableID: 0,
|
|
368
|
-
columnID: 0,
|
|
369
|
-
dataTypeID: 23,
|
|
370
|
-
dataTypeSize: 4,
|
|
371
|
-
dataTypeModifier: -1,
|
|
372
|
-
format: 'text',
|
|
373
|
-
dataType: 'int4',
|
|
374
|
-
udt_name: 'int4',
|
|
375
|
-
tsDataType: "number"
|
|
376
|
-
}] , "db.sql query failed");
|
|
377
|
-
});
|
|
378
|
-
|
|
379
|
-
await tryRunP("sql LISTEN NOTIFY events", async (resolve, reject) => {
|
|
380
|
-
if(!db.sql) throw "db.sql missing";
|
|
381
|
-
|
|
382
|
-
try {
|
|
383
|
-
|
|
384
|
-
const sub = await db.sql("LISTEN chnl ", {}, { allowListen: true, returnType: "arrayMode" });
|
|
385
|
-
if(!("addListener" in sub)) {
|
|
386
|
-
reject("addListener missing");
|
|
387
|
-
return
|
|
388
|
-
}
|
|
389
|
-
|
|
390
|
-
sub.addListener(notif => {
|
|
391
|
-
const expected = "hello"
|
|
392
|
-
if(notif === expected) resolve(true);
|
|
393
|
-
else reject(`Notif value is not what we expect: ${JSON.stringify(notif)} is not ${JSON.stringify(expected)} (expected) `)
|
|
394
|
-
});
|
|
395
|
-
db.sql("NOTIFY chnl , 'hello'; ");
|
|
396
|
-
} catch(e){
|
|
397
|
-
reject(e);
|
|
398
|
-
}
|
|
399
|
-
});
|
|
400
|
-
|
|
401
|
-
await tryRunP("sql NOTICE events", async (resolve, reject) => {
|
|
402
|
-
if(!db.sql) throw "db.sql missing";
|
|
403
|
-
|
|
404
|
-
const sub = await db.sql("", {}, { returnType: "noticeSubscription" });
|
|
405
|
-
|
|
406
|
-
sub.addListener(notice => {
|
|
407
|
-
const expected = "hello2"
|
|
408
|
-
if(notice.message === expected) resolve(true);
|
|
409
|
-
else reject(`Notice value is not what we expect: ${JSON.stringify(notice)} is not ${JSON.stringify(expected)} (expected) `)
|
|
410
|
-
});
|
|
411
|
-
db.sql(`
|
|
412
|
-
DO $$
|
|
413
|
-
BEGIN
|
|
414
|
-
|
|
415
|
-
RAISE NOTICE 'hello2';
|
|
416
|
-
|
|
417
|
-
END $$;
|
|
418
|
-
`);
|
|
419
|
-
}, { log });
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
/* REPLICATION */
|
|
423
|
-
log("Started testRealtime")
|
|
424
|
-
const start = Date.now();
|
|
425
|
-
|
|
426
|
-
await db.planes.delete!();
|
|
427
|
-
await db.sql!("TRUNCATE planes RESTART IDENTITY CASCADE;", {});
|
|
428
|
-
let inserts = new Array(100).fill(null).map((d, i) => ({ id: i, flight_number: `FN${i}`, x: Math.random(), y: i }));
|
|
429
|
-
await db.planes.insert!(inserts);
|
|
430
|
-
|
|
431
|
-
const CLOCK_DRIFT = 2000;
|
|
432
|
-
|
|
433
|
-
if((await db.planes.count!()) !== 100) throw "Not 100 planes";
|
|
434
|
-
|
|
435
|
-
/**
|
|
436
|
-
* Two listeners are added at the same time to dbo.planes (which has 100 records):
|
|
437
|
-
* subscribe({ x: 10 }
|
|
438
|
-
* sync({}
|
|
439
|
-
*
|
|
440
|
-
* Then sync starts updating x to 10 for each record
|
|
441
|
-
* subscribe waits for 100 records of x=10 and then updates everything to x=20
|
|
442
|
-
* sync waits for 100 records of x=20 and finishes the test
|
|
443
|
-
*/
|
|
444
|
-
|
|
445
|
-
/* After all sync records are updated to x10 here we'll update them to x20 */
|
|
446
|
-
const sP = await db.planes.subscribe!({ x: 10 }, { }, async planes => {
|
|
447
|
-
|
|
448
|
-
const p10 = planes.filter(p => p.x == 10);
|
|
449
|
-
log(Date.now() + ": sub stats: x10 -> " + p10.length + " x20 ->" + planes.filter(p => p.x == 20).length);
|
|
450
|
-
|
|
451
|
-
if(p10.length === 100){
|
|
452
|
-
|
|
453
|
-
/** 2 second delay to account for client-server clock drift */
|
|
454
|
-
setTimeout(async () => {
|
|
455
|
-
|
|
456
|
-
// db.planes.findOne({}, { select: { last_updated: "$max"}}).then(log);
|
|
457
|
-
|
|
458
|
-
await sP.unsubscribe();
|
|
459
|
-
log(Date.now() + ": sub: db.planes.update({}, { x: 20, last_updated });");
|
|
460
|
-
const dLastUpdated = Math.max(...p10.map(v => +v.last_updated))
|
|
461
|
-
const last_updated = Date.now();
|
|
462
|
-
if(dLastUpdated >= last_updated) throw "dLastUpdated >= last_updated should not happen"
|
|
463
|
-
await db.planes.update!({}, { x: 20, last_updated });
|
|
464
|
-
log(Date.now() + ": sub: Updated to x20" , await db.planes.count!({ x: 20 }))
|
|
465
|
-
|
|
466
|
-
// db.planes.findOne({}, { select: { last_updated: "$max"}}).then(log)
|
|
467
|
-
}, CLOCK_DRIFT)
|
|
468
|
-
}
|
|
469
|
-
});
|
|
470
|
-
|
|
471
|
-
let updt = 0;
|
|
472
|
-
const sync = await db.planes.sync!({}, { handlesOnData: true, patchText: true, }, (planes, deltas) => {
|
|
473
|
-
const x20 = planes.filter(p => p.x == 20).length;
|
|
474
|
-
const x10 = planes.filter(p => p.x == 10);
|
|
475
|
-
log(Date.now() + `: sync stats: x10 -> ${x10.length} x20 -> ${x20}`);
|
|
476
|
-
|
|
477
|
-
let update = false;
|
|
478
|
-
planes.map(p => {
|
|
479
|
-
// if(p.y === 1) window.up = p;
|
|
480
|
-
if(typeof p.x !== "number") log(typeof p.x)
|
|
481
|
-
if(+p.x < 10){
|
|
482
|
-
updt++;
|
|
483
|
-
update = true;
|
|
484
|
-
p.$update!({ x: 10 });
|
|
485
|
-
log(Date.now() + `: sync: p.$update({ x: 10 }); (id: ${p.id})`);
|
|
486
|
-
}
|
|
487
|
-
});
|
|
488
|
-
// if(update) log("$update({ x: 10 })", updt)
|
|
489
|
-
|
|
490
|
-
if(x20 === 100){
|
|
491
|
-
// log(22)
|
|
492
|
-
// console.timeEnd("test")
|
|
493
|
-
log(Date.now() + ": sync end: Finished replication test. Inserting 100 rows then updating two times took: " + (Date.now() - start - CLOCK_DRIFT) + "ms")
|
|
494
|
-
resolveTest(true)
|
|
495
|
-
}
|
|
496
|
-
});
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
const msLimit = 20000;
|
|
500
|
-
setTimeout(async () => {
|
|
501
|
-
const dbCounts = {
|
|
502
|
-
x10: await db.planes.count!({ x: 10 }),
|
|
503
|
-
x20: await db.planes.count!({ x: 20 }),
|
|
504
|
-
latest: await db.planes.findOne!({}, { orderBy: { last_updated: -1 } }),
|
|
505
|
-
}
|
|
506
|
-
const syncCounts = {
|
|
507
|
-
x10: sync?.getItems().filter(d => d.x == 10).length,
|
|
508
|
-
x20: sync?.getItems().filter(d => d.x == 20).length,
|
|
509
|
-
latest: sync?.getItems()?.sort((a, b) => +b.last_updated - +a.last_updated )[0],
|
|
510
|
-
}
|
|
511
|
-
const msg = "Replication test failed due to taking longer than " + msLimit + "ms \n " + JSON.stringify({ dbCounts, syncCounts }, null, 2);
|
|
512
|
-
log(msg)
|
|
513
|
-
rejectTest(msg)
|
|
514
|
-
}, msLimit);
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
} catch(err){
|
|
518
|
-
log(JSON.stringify(err));
|
|
519
|
-
await tout(1000);
|
|
520
|
-
throw err;
|
|
521
|
-
}
|
|
522
|
-
});
|
|
523
|
-
|
|
524
|
-
}
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
/* TODO: SECURITY */
|
|
529
|
-
log("auth.user:", auth.user);
|
|
530
|
-
const isUser = !!auth.user;
|
|
531
|
-
|
|
532
|
-
// Public data
|
|
533
|
-
await test("Security rules example", { skip: isUser }, async () => {
|
|
534
|
-
log("Checking public data");
|
|
535
|
-
const vQ = await db.items4.find!({}, { select: { added: 0 }});
|
|
536
|
-
assert.deepStrictEqual(vQ, [
|
|
537
|
-
{ id: 1, public: 'public data' },
|
|
538
|
-
{ id: 2, public: 'public data' }
|
|
539
|
-
]);
|
|
540
|
-
|
|
541
|
-
const cols = await db.insert_rules.getColumns!();
|
|
542
|
-
assert.equal(cols.filter(({ insert, update: u, select: s, delete: d }) => insert && !u && s && !d).length, 2, "Validated getColumns failed")
|
|
543
|
-
|
|
544
|
-
/* Validated insert */
|
|
545
|
-
const expectB = await db.insert_rules.insert!({ name: "a" }, { returning: "*" });
|
|
546
|
-
assert.deepStrictEqual(expectB, { name: "b" }, "Validated insert failed");
|
|
547
|
-
|
|
548
|
-
/* forced UUID insert */
|
|
549
|
-
const row: any = await db.uuid_text.insert!({}, {returning: "*"});
|
|
550
|
-
assert.equal(row.id, 'c81089e1-c4c1-45d7-a73d-e2d613cb7c3e');
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
try {
|
|
554
|
-
await db.insert_rules.insert!({ name: "notfail" }, { returning: "*" });
|
|
555
|
-
await db.insert_rules.insert!({ name: "fail" }, { returning: "*" });
|
|
556
|
-
await db.insert_rules.insert!({ name: "fail-check" }, { returning: "*" });
|
|
557
|
-
throw "post insert checks should have failed";
|
|
558
|
-
} catch(err){
|
|
559
|
-
|
|
560
|
-
}
|
|
561
|
-
assert.equal(0, +(await db.insert_rules.count!({ name: "fail" })), "postValidation failed");
|
|
562
|
-
assert.equal(0, +(await db.insert_rules.count!({ name: "fail-check" })), "checkFilter failed");
|
|
563
|
-
assert.equal(1, +(await db.insert_rules.count!({ name: "notfail" })), "postValidation failed");
|
|
564
|
-
});
|
|
565
|
-
|
|
566
|
-
// await tryRun("Duplicate subscription", async () => {
|
|
567
|
-
|
|
568
|
-
// return new Promise(async (resolve, reject) => {
|
|
569
|
-
// let data1 = [], data2 = [], cntr = 0;
|
|
570
|
-
// function check(){
|
|
571
|
-
// cntr++;
|
|
572
|
-
// if(cntr === 2){
|
|
573
|
-
// assert.equal(data1.length, data2.length);
|
|
574
|
-
// console.error(data1, data2)
|
|
575
|
-
// reject( data1);
|
|
576
|
-
// resolve(data1)
|
|
577
|
-
// }
|
|
578
|
-
// }
|
|
579
|
-
|
|
580
|
-
// const sub1 = await db.planes.subscribe({}, {}, data => {
|
|
581
|
-
// data1 = data;
|
|
582
|
-
// check()
|
|
583
|
-
// });
|
|
584
|
-
// const sub2 = await db.planes.subscribe({}, {}, data => {
|
|
585
|
-
// data2 = data;
|
|
586
|
-
// check()
|
|
587
|
-
// });
|
|
588
|
-
// })
|
|
589
|
-
// })
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
await test("Realtime", { skip: isUser }, async () => {
|
|
593
|
-
await testRealtime();
|
|
594
|
-
});
|
|
595
|
-
|
|
596
|
-
/* Bug:
|
|
597
|
-
doing a
|
|
598
|
-
some_table.sync({}, { handlesOnData: true }, console.log);
|
|
599
|
-
will make all subsequent
|
|
600
|
-
some_table.sync({}, { handlesOnData: false }, console.log);
|
|
601
|
-
return no data items
|
|
602
|
-
*/
|
|
603
|
-
await test("sync handlesOnData true -> false no data bug", { skip: isUser }, async () => {
|
|
604
|
-
|
|
605
|
-
let sync1Planes: AnyObject[] = [];
|
|
606
|
-
let sync2Planes: AnyObject[] = [];
|
|
607
|
-
const sync1 = await db.planes.sync!({}, { handlesOnData: true }, async (planes1, deltas) => {
|
|
608
|
-
sync1Planes = planes1;
|
|
609
|
-
log("sync handlesOnData true", planes1.length);
|
|
610
|
-
});
|
|
611
|
-
await tout(1000);
|
|
612
|
-
const sync2 = await db.planes.sync!({}, { handlesOnData: false }, (planes2, deltas) => {
|
|
613
|
-
sync2Planes = planes2;
|
|
614
|
-
});
|
|
615
|
-
await tout(1000);
|
|
616
|
-
if(sync1Planes.length !== sync2Planes.length || sync1Planes.length === 0) {
|
|
617
|
-
throw `sync2Planes.length !== 100: ${sync1Planes.length} vs ${sync2Planes.length}`;
|
|
618
|
-
}
|
|
619
|
-
await sync1.$unsync();
|
|
620
|
-
await sync2.$unsync();
|
|
621
|
-
});
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
// User data
|
|
625
|
-
await test("Security rules example", { skip: !isUser }, async () => {
|
|
626
|
-
log("Checking User data");
|
|
627
|
-
const vQ = await db.items4.find!();
|
|
628
|
-
assert.deepStrictEqual(vQ, [
|
|
629
|
-
{ id: 1, public: 'public data' },
|
|
630
|
-
{ id: 2, public: 'public data' }
|
|
631
|
-
]);
|
|
632
|
-
|
|
633
|
-
await db.items4.find!({}, { select: { id: 1 }, orderBy: { added: 1 } });
|
|
634
|
-
|
|
635
|
-
const dynamicCols = await db.uuid_text.getColumns!(undefined, {
|
|
636
|
-
rule: "update",
|
|
637
|
-
filter: {
|
|
638
|
-
id: 'c81089e1-c4c1-45d7-a73d-e2d613cb7c3e'
|
|
639
|
-
},
|
|
640
|
-
data: {
|
|
641
|
-
id: "dwadwa"
|
|
642
|
-
}
|
|
643
|
-
});
|
|
644
|
-
assert.equal(dynamicCols.length, 1);
|
|
645
|
-
assert.equal(dynamicCols[0].name, "id");
|
|
646
|
-
const defaultCols = await db.uuid_text.getColumns!(undefined, {
|
|
647
|
-
rule: "update",
|
|
648
|
-
filter: {
|
|
649
|
-
id: 'not matching'
|
|
650
|
-
},
|
|
651
|
-
data: {
|
|
652
|
-
id: "dwadwa"
|
|
653
|
-
}
|
|
654
|
-
});
|
|
655
|
-
throw defaultCols.map(c => c.name);
|
|
656
|
-
});
|
|
657
|
-
|
|
658
|
-
});
|
|
659
|
-
}
|
|
660
|
-
|
|
661
|
-
const tout = (t = 3000) => {
|
|
662
|
-
return new Promise(async (resolve, reject) => {
|
|
663
|
-
setTimeout(() => {
|
|
664
|
-
resolve(true)
|
|
665
|
-
},t)
|
|
666
|
-
});
|
|
667
|
-
}
|