@saltcorn/cli 0.8.8-beta.1 → 0.8.8-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -41
- package/npm-shrinkwrap.json +420 -402
- package/oclif.manifest.json +1 -1
- package/package.json +6 -6
- package/src/commands/build-app.js +8 -1
- package/src/commands/list-tenants.js +29 -36
- package/src/commands/list-triggers.js +7 -22
- package/src/commands/run-js.js +79 -0
- package/src/commands/run-sql.js +6 -40
- package/src/commands/sync-upload-data.js +246 -0
- package/src/common.js +20 -0
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
const { Command, flags } = require("@oclif/command");
|
|
2
|
+
const path = require("path");
|
|
3
|
+
const { init_multi_tenant } = require("@saltcorn/data/db/state");
|
|
4
|
+
const User = require("@saltcorn/data/models/user");
|
|
5
|
+
const Table = require("@saltcorn/data/models/table");
|
|
6
|
+
const fs = require("fs").promises;
|
|
7
|
+
const { getState } = require("@saltcorn/data/db/state");
|
|
8
|
+
const db = require("@saltcorn/data/db");
|
|
9
|
+
const { loadAllPlugins } = require("@saltcorn/server/load_plugins");
|
|
10
|
+
|
|
11
|
+
const pickFields = (table, pkName, row, keepId) => {
|
|
12
|
+
const result = {};
|
|
13
|
+
for (const { name, type, calculated } of table.getFields()) {
|
|
14
|
+
if ((!keepId && name === pkName) || calculated || !row[name]) continue;
|
|
15
|
+
if (type?.name === "Date") {
|
|
16
|
+
result[name] = row[name] ? new Date(row[name]) : undefined;
|
|
17
|
+
} else {
|
|
18
|
+
result[name] = row[name];
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return result;
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
const translateInsertFks = async (allChanges, allTranslations) => {
|
|
25
|
+
const schema = db.getTenantSchemaPrefix();
|
|
26
|
+
const rowIds = (fk, targetTrans, tblName, pkName, changes) => {
|
|
27
|
+
if (Object.keys(targetTrans || {}).length > 0) {
|
|
28
|
+
const srcTrans = allTranslations[tblName] || {};
|
|
29
|
+
// ids with a fk where the target was translated
|
|
30
|
+
const insertIds = (changes.inserts || [])
|
|
31
|
+
.filter((row) => targetTrans[row[fk.name]] !== undefined)
|
|
32
|
+
.map((row) => srcTrans[row[pkName]] || row[pkName]);
|
|
33
|
+
return insertIds;
|
|
34
|
+
}
|
|
35
|
+
return null;
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
for (const [tblName, changes] of Object.entries(allChanges)) {
|
|
39
|
+
const table = Table.findOne({ name: tblName });
|
|
40
|
+
if (!table) throw new Error(`The table '${tblName}' does not exists`);
|
|
41
|
+
const pkName = table.pk_name;
|
|
42
|
+
for (const fk of table.getForeignKeys()) {
|
|
43
|
+
const targetTrans = allTranslations[fk.reftable_name];
|
|
44
|
+
const ids = rowIds(fk, targetTrans, table.name, pkName, changes);
|
|
45
|
+
if (ids?.length > 0) {
|
|
46
|
+
for (const [from, to] of Object.entries(targetTrans)) {
|
|
47
|
+
await db.query(
|
|
48
|
+
`update ${schema}"${db.sqlsanitize(tblName)}" set "${db.sqlsanitize(
|
|
49
|
+
fk.name
|
|
50
|
+
)}" = ${to}
|
|
51
|
+
where "${db.sqlsanitize(
|
|
52
|
+
fk.name
|
|
53
|
+
)}" = ${from} and "${db.sqlsanitize(pkName)}" in (${ids.join(
|
|
54
|
+
","
|
|
55
|
+
)})`
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
const applyInserts = async (changes, syncTimestamp, user) => {
|
|
64
|
+
const schema = db.getTenantSchemaPrefix();
|
|
65
|
+
const allTranslations = {};
|
|
66
|
+
for (const [tblName, vals] of Object.entries(changes)) {
|
|
67
|
+
const table = Table.findOne({ name: tblName });
|
|
68
|
+
if (!table) throw new Error(`The table '${tblName}' does not exists`);
|
|
69
|
+
if (vals.inserts?.length > 0) {
|
|
70
|
+
const pkName = table.pk_name;
|
|
71
|
+
await db.query(
|
|
72
|
+
`alter table ${schema}"${db.sqlsanitize(tblName)}" disable trigger all`
|
|
73
|
+
);
|
|
74
|
+
const translations = {};
|
|
75
|
+
for (const insert of vals.inserts || []) {
|
|
76
|
+
const row = pickFields(table, pkName, insert);
|
|
77
|
+
const newId = await table.insertRow(
|
|
78
|
+
row,
|
|
79
|
+
user,
|
|
80
|
+
undefined,
|
|
81
|
+
true,
|
|
82
|
+
syncTimestamp
|
|
83
|
+
);
|
|
84
|
+
if (!newId) throw new Error(`Unable to insert into ${tblName}`);
|
|
85
|
+
else if (newId !== insert[pkName]) translations[insert[pkName]] = newId;
|
|
86
|
+
}
|
|
87
|
+
allTranslations[tblName] = translations;
|
|
88
|
+
await db.query(
|
|
89
|
+
`alter table ${schema}"${db.sqlsanitize(tblName)}" enable trigger all`
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
return allTranslations;
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
const applyUpdates = async (changes, allTranslations, syncTimestamp, user) => {
|
|
97
|
+
for (const [tblName, vals] of Object.entries(changes)) {
|
|
98
|
+
if (vals.updates?.length > 0) {
|
|
99
|
+
const table = Table.findOne({ name: tblName });
|
|
100
|
+
if (!table) throw new Error(`The table '${tblName}' does not exists`);
|
|
101
|
+
const pkName = table.pk_name;
|
|
102
|
+
const insertTranslations = allTranslations[tblName];
|
|
103
|
+
for (const update of vals.updates) {
|
|
104
|
+
const row = pickFields(table, pkName, update, true);
|
|
105
|
+
if (insertTranslations?.[row[pkName]])
|
|
106
|
+
row[pkName] = insertTranslations[row[pkName]];
|
|
107
|
+
for (const fk of table.getForeignKeys()) {
|
|
108
|
+
const oldVal = row[fk.name];
|
|
109
|
+
if (oldVal) {
|
|
110
|
+
const newVal = allTranslations[fk.reftable_name]?.[oldVal];
|
|
111
|
+
if (newVal) row[fk.name] = newVal;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
const result = await table.updateRow(
|
|
115
|
+
row,
|
|
116
|
+
row[pkName],
|
|
117
|
+
user,
|
|
118
|
+
true,
|
|
119
|
+
undefined,
|
|
120
|
+
undefined,
|
|
121
|
+
syncTimestamp
|
|
122
|
+
);
|
|
123
|
+
if (result) throw new Error(`Unable to update ${tblName}: ${result}`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
const applyDeletes = async (changes, user) => {
|
|
130
|
+
for (const [tblName, vals] of Object.entries(changes)) {
|
|
131
|
+
const table = Table.findOne({ name: tblName });
|
|
132
|
+
if (!table) throw new Error(`The table '${tblName}' does not exists`);
|
|
133
|
+
const pkName = table.pk_name;
|
|
134
|
+
if (vals.deletes?.length > 0) {
|
|
135
|
+
const delIds = [];
|
|
136
|
+
const latestInfos = await table.latestSyncInfos(
|
|
137
|
+
vals.deletes.map((del) => del[pkName])
|
|
138
|
+
);
|
|
139
|
+
const refToInfo = {};
|
|
140
|
+
for (const info of latestInfos) {
|
|
141
|
+
refToInfo[info.ref] = info;
|
|
142
|
+
}
|
|
143
|
+
for (const del of vals.deletes) {
|
|
144
|
+
const appTimestamp = new Date(del.last_modified);
|
|
145
|
+
const info = refToInfo[del[pkName]];
|
|
146
|
+
if (!info || appTimestamp >= info.last_modified)
|
|
147
|
+
delIds.push(del[pkName]);
|
|
148
|
+
}
|
|
149
|
+
if (delIds.length > 0) {
|
|
150
|
+
await table.deleteRows({ [pkName]: { in: delIds } }, user, true);
|
|
151
|
+
if ((await table.countRows({ [pkName]: { in: delIds } })) !== 0)
|
|
152
|
+
throw new Error(
|
|
153
|
+
`Unable to delete in '${tblName}': Some rows were not deleted`
|
|
154
|
+
);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
const writeTranslatedIds = async (translatedIds, directory) => {
|
|
161
|
+
const writeName = path.join(directory, "translated-ids.out");
|
|
162
|
+
await fs.writeFile(writeName, JSON.stringify(translatedIds));
|
|
163
|
+
await fs.rename(writeName, path.join(directory, "translated-ids.json"));
|
|
164
|
+
};
|
|
165
|
+
|
|
166
|
+
const writeErrorFile = async (message, directory) => {
|
|
167
|
+
const writeName = path.join(directory, "error.out");
|
|
168
|
+
await fs.writeFile(writeName, JSON.stringify({ message }));
|
|
169
|
+
await fs.rename(writeName, path.join(directory, "error.json"));
|
|
170
|
+
};
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
*
|
|
174
|
+
*/
|
|
175
|
+
class SyncUploadData extends Command {
|
|
176
|
+
async run() {
|
|
177
|
+
let returnCode = 0,
|
|
178
|
+
inTransaction = false;
|
|
179
|
+
const { flags } = await this.parse(SyncUploadData);
|
|
180
|
+
if (db.is_it_multi_tenant() && flags.tenantAppName) {
|
|
181
|
+
await init_multi_tenant(loadAllPlugins, true, [flags.tenantAppName]);
|
|
182
|
+
}
|
|
183
|
+
const doSync = async () => {
|
|
184
|
+
try {
|
|
185
|
+
const changes = JSON.parse(
|
|
186
|
+
await fs.readFile(path.join(flags.directory, "changes.json"))
|
|
187
|
+
);
|
|
188
|
+
const syncTimestamp = flags.syncTimestamp;
|
|
189
|
+
const user = flags.userEmail
|
|
190
|
+
? await User.findOne({ email: flags.userEmail })
|
|
191
|
+
: undefined;
|
|
192
|
+
await loadAllPlugins();
|
|
193
|
+
await db.begin();
|
|
194
|
+
inTransaction = true;
|
|
195
|
+
const translatedIds = await applyInserts(changes, syncTimestamp, user);
|
|
196
|
+
await translateInsertFks(changes, translatedIds);
|
|
197
|
+
await applyUpdates(changes, translatedIds, syncTimestamp, user);
|
|
198
|
+
await applyDeletes(changes, user);
|
|
199
|
+
await db.commit();
|
|
200
|
+
await writeTranslatedIds(translatedIds, flags.directory);
|
|
201
|
+
} catch (error) {
|
|
202
|
+
returnCode = 1;
|
|
203
|
+
getState().log(2, `Unable to sync: ${error.message}`);
|
|
204
|
+
await writeErrorFile(error.message, flags.directory);
|
|
205
|
+
if (inTransaction) await db.rollback();
|
|
206
|
+
} finally {
|
|
207
|
+
process.exit(returnCode);
|
|
208
|
+
}
|
|
209
|
+
};
|
|
210
|
+
if (
|
|
211
|
+
flags.tenantAppName &&
|
|
212
|
+
flags.tenantAppName !== db.connectObj.default_schema
|
|
213
|
+
) {
|
|
214
|
+
await db.runWithTenant(flags.tenantAppName, doSync);
|
|
215
|
+
} else {
|
|
216
|
+
await doSync();
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
SyncUploadData.description = "Runs a sync for data supplied by the mobile app";
|
|
222
|
+
|
|
223
|
+
SyncUploadData.flags = {
|
|
224
|
+
tenantAppName: flags.string({
|
|
225
|
+
name: "tenant",
|
|
226
|
+
string: "tenant",
|
|
227
|
+
description: "Optional name of a tenant application",
|
|
228
|
+
}),
|
|
229
|
+
userEmail: flags.string({
|
|
230
|
+
name: "user email",
|
|
231
|
+
string: "userEmail",
|
|
232
|
+
description: "email of the user running the sync",
|
|
233
|
+
}),
|
|
234
|
+
directory: flags.string({
|
|
235
|
+
name: "directory",
|
|
236
|
+
string: "directory",
|
|
237
|
+
description: "directory name for input output data",
|
|
238
|
+
}),
|
|
239
|
+
syncTimestamp: flags.integer({
|
|
240
|
+
name: "syncTimestamp",
|
|
241
|
+
string: "syncTimestamp",
|
|
242
|
+
description: "new timestamp for the sync_info rows",
|
|
243
|
+
}),
|
|
244
|
+
};
|
|
245
|
+
|
|
246
|
+
module.exports = SyncUploadData;
|
package/src/common.js
CHANGED
|
@@ -50,9 +50,29 @@ function sleep(ms) {
|
|
|
50
50
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
51
51
|
}
|
|
52
52
|
|
|
53
|
+
/**
|
|
54
|
+
* Read txt file (SyncMode)
|
|
55
|
+
* @param filename - absolute path to file
|
|
56
|
+
* @returns {null|string}
|
|
57
|
+
*/
|
|
58
|
+
function readFileSync(filename){
|
|
59
|
+
const path = require('path'), fs = require('fs');
|
|
60
|
+
try {
|
|
61
|
+
//let p = path.join(__dirname, filename);
|
|
62
|
+
let str = fs.readFileSync(filename, 'utf8');
|
|
63
|
+
// let str = fs.readFileSync(p, {encoding: 'utf8'});
|
|
64
|
+
console.log(str);
|
|
65
|
+
return str;
|
|
66
|
+
} catch (e) {
|
|
67
|
+
console.error(e.message);
|
|
68
|
+
return null;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
53
72
|
module.exports = {
|
|
54
73
|
maybe_as_tenant,
|
|
55
74
|
parseJSONorString,
|
|
56
75
|
sleep,
|
|
57
76
|
init_some_tenants,
|
|
77
|
+
readFileSync,
|
|
58
78
|
};
|