nesoi 3.0.9 → 3.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/compiler/apps/monolyth/monolyth_compiler.d.ts +1 -1
- package/lib/compiler/apps/monolyth/monolyth_compiler.js +7 -7
- package/lib/elements/blocks/machine/machine.builder.js +12 -1
- package/lib/elements/blocks/machine/machine.schema.d.ts +2 -1
- package/lib/elements/blocks/machine/machine.schema.js +2 -1
- package/lib/elements/edge/controller/adapters/controller_adapter.d.ts +3 -1
- package/lib/elements/edge/controller/adapters/controller_adapter.js +2 -1
- package/lib/elements/edge/controller/controller.config.d.ts +3 -2
- package/lib/elements/edge/controller/controller.d.ts +3 -3
- package/lib/elements/edge/controller/controller.js +3 -3
- package/lib/elements/entities/bucket/adapters/bucket_adapter.d.ts +2 -1
- package/lib/elements/entities/bucket/bucket.config.d.ts +3 -3
- package/lib/elements/entities/bucket/bucket.d.ts +3 -2
- package/lib/elements/entities/bucket/bucket.js +4 -4
- package/lib/engine/apps/app.config.d.ts +12 -11
- package/lib/engine/apps/app.d.ts +11 -22
- package/lib/engine/apps/app.js +9 -9
- package/lib/engine/apps/inline.app.d.ts +9 -8
- package/lib/engine/apps/inline.app.js +24 -24
- package/lib/engine/apps/monolyth/monolyth.app.d.ts +5 -5
- package/lib/engine/apps/monolyth/monolyth.app.js +6 -6
- package/lib/engine/apps/service.d.ts +30 -0
- package/lib/engine/apps/service.js +15 -0
- package/lib/engine/auth/authn.d.ts +10 -1
- package/lib/engine/auth/zero.authn_provider.d.ts +7 -3
- package/lib/engine/auth/zero.authn_provider.js +9 -2
- package/lib/engine/cli/cli.d.ts +2 -2
- package/lib/engine/cli/cli.js +1 -1
- package/lib/engine/cli/ui.js +1 -1
- package/lib/engine/daemon.d.ts +11 -11
- package/lib/engine/daemon.js +18 -18
- package/lib/engine/dependency.d.ts +2 -1
- package/lib/engine/dependency.js +18 -0
- package/lib/engine/module.d.ts +6 -4
- package/lib/engine/module.js +25 -10
- package/lib/engine/transaction/nodes/bucket.trx_node.js +1 -2
- package/lib/engine/transaction/nodes/bucket_query.trx_node.js +5 -10
- package/lib/engine/transaction/nodes/job.trx_node.js +2 -4
- package/lib/engine/transaction/nodes/machine.trx_node.js +2 -4
- package/lib/engine/transaction/nodes/queue.trx_node.js +1 -2
- package/lib/engine/transaction/nodes/resource.trx_node.js +2 -4
- package/lib/engine/transaction/trx.d.ts +6 -4
- package/lib/engine/transaction/trx.js +2 -1
- package/lib/engine/transaction/trx_engine.config.d.ts +2 -2
- package/lib/engine/transaction/trx_engine.d.ts +4 -3
- package/lib/engine/transaction/trx_engine.js +20 -17
- package/lib/engine/transaction/trx_node.d.ts +4 -2
- package/lib/engine/transaction/trx_node.js +13 -1
- package/lib/engine/tree.js +6 -17
- package/lib/schema.d.ts +1 -1
- package/package.json +2 -3
- package/tools/joaquin/job.js +2 -2
- package/tools/joaquin/message.js +2 -2
- package/tools/joaquin/mock.d.ts +6 -6
- package/tsconfig.build.tsbuildinfo +1 -1
- package/lib/adapters/postgres/src/migrator/csv.d.ts +0 -7
- package/lib/adapters/postgres/src/migrator/csv.js +0 -72
- package/lib/adapters/postgres/src/migrator/database.d.ts +0 -34
- package/lib/adapters/postgres/src/migrator/database.js +0 -88
- package/lib/adapters/postgres/src/migrator/generator/generator.d.ts +0 -22
- package/lib/adapters/postgres/src/migrator/generator/generator.js +0 -326
- package/lib/adapters/postgres/src/migrator/generator/migration.d.ts +0 -66
- package/lib/adapters/postgres/src/migrator/generator/migration.js +0 -249
- package/lib/adapters/postgres/src/migrator/generator/provider.d.ts +0 -19
- package/lib/adapters/postgres/src/migrator/generator/provider.js +0 -74
- package/lib/adapters/postgres/src/migrator/index.d.ts +0 -47
- package/lib/adapters/postgres/src/migrator/index.js +0 -22
- package/lib/adapters/postgres/src/migrator/runner/runner.d.ts +0 -17
- package/lib/adapters/postgres/src/migrator/runner/runner.js +0 -249
- package/lib/adapters/postgres/src/migrator/runner/status.d.ts +0 -17
- package/lib/adapters/postgres/src/migrator/runner/status.js +0 -55
- package/lib/adapters/postgres/src/postgres.bucket_adapter.d.ts +0 -42
- package/lib/adapters/postgres/src/postgres.bucket_adapter.js +0 -294
- package/lib/adapters/postgres/src/postgres.cli.d.ts +0 -76
- package/lib/adapters/postgres/src/postgres.cli.js +0 -207
- package/lib/adapters/postgres/src/postgres.config.d.ts +0 -5
- package/lib/adapters/postgres/src/postgres.config.js +0 -2
- package/lib/adapters/postgres/src/postgres.nql.d.ts +0 -16
- package/lib/adapters/postgres/src/postgres.nql.js +0 -123
- package/lib/adapters/postgres/src/postgres.provider.d.ts +0 -18
- package/lib/adapters/postgres/src/postgres.provider.js +0 -77
- package/lib/adapters/postgres/test/postgres.bucket_adapter.test.d.ts +0 -1
- package/lib/adapters/postgres/test/postgres.bucket_adapter.test.js +0 -210
|
@@ -1,294 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.PostgresBucketAdapter = void 0;
|
|
4
|
-
const bucket_adapter_1 = require("../../../elements/entities/bucket/adapters/bucket_adapter");
|
|
5
|
-
const log_1 = require("../../../engine/util/log");
|
|
6
|
-
const trx_node_1 = require("../../../engine/transaction/trx_node");
|
|
7
|
-
const trx_1 = require("../../../engine/transaction/trx");
|
|
8
|
-
class PostgresBucketAdapter extends bucket_adapter_1.BucketAdapter {
|
|
9
|
-
constructor(schema, provider, tableName) {
|
|
10
|
-
super(schema, provider.nql, provider.config);
|
|
11
|
-
this.schema = schema;
|
|
12
|
-
this.provider = provider;
|
|
13
|
-
this.tableName = tableName;
|
|
14
|
-
}
|
|
15
|
-
guard(sql) {
|
|
16
|
-
return (template, ...params) => {
|
|
17
|
-
return sql.call(sql, template, ...params).catch(e => {
|
|
18
|
-
log_1.Log.error('bucket', 'postgres', e.toString(), e);
|
|
19
|
-
throw new Error('Database error.');
|
|
20
|
-
});
|
|
21
|
-
};
|
|
22
|
-
}
|
|
23
|
-
getQueryMeta() {
|
|
24
|
-
return {
|
|
25
|
-
scope: 'PG',
|
|
26
|
-
avgTime: 50
|
|
27
|
-
};
|
|
28
|
-
}
|
|
29
|
-
/* Dangerous, not implemented. */
|
|
30
|
-
deleteEverything(trx) {
|
|
31
|
-
throw new Error('Unsafe operation.');
|
|
32
|
-
return Promise.resolve();
|
|
33
|
-
}
|
|
34
|
-
/* Read operations */
|
|
35
|
-
async index(trx) {
|
|
36
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
37
|
-
const objs = await this.guard(sql) `
|
|
38
|
-
SELECT *
|
|
39
|
-
FROM ${sql(this.tableName)}
|
|
40
|
-
ORDER BY ${this.config.meta.updated_at} DESC
|
|
41
|
-
`;
|
|
42
|
-
return objs;
|
|
43
|
-
}
|
|
44
|
-
async get(trx, id) {
|
|
45
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
46
|
-
const objs = await this.guard(sql) `
|
|
47
|
-
SELECT *
|
|
48
|
-
FROM ${sql(this.tableName)}
|
|
49
|
-
WHERE id = ${id}
|
|
50
|
-
`;
|
|
51
|
-
return objs[0];
|
|
52
|
-
}
|
|
53
|
-
/* Write Operations */
|
|
54
|
-
precleanup(obj) {
|
|
55
|
-
var _a, _b;
|
|
56
|
-
obj[_a = this.config.meta.created_by] ?? (obj[_a] = null);
|
|
57
|
-
obj[_b = this.config.meta.updated_by] ?? (obj[_b] = null);
|
|
58
|
-
for (const key in obj) {
|
|
59
|
-
if (obj[key] === undefined) {
|
|
60
|
-
delete obj[key];
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
async create(trx, obj) {
|
|
65
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
66
|
-
// Use schema fields excluding id
|
|
67
|
-
const keys = Object.keys(this.schema.model.fields)
|
|
68
|
-
.filter(key => obj[key] !== undefined);
|
|
69
|
-
// Add meta (created_*/updated_*)
|
|
70
|
-
keys.push(...Object.values(this.config.meta));
|
|
71
|
-
this.precleanup(obj);
|
|
72
|
-
// Create
|
|
73
|
-
const objs = await this.guard(sql) `
|
|
74
|
-
INSERT INTO ${sql(this.tableName)}
|
|
75
|
-
${sql(obj, keys)}
|
|
76
|
-
RETURNING *`;
|
|
77
|
-
return objs[0];
|
|
78
|
-
}
|
|
79
|
-
async createMany(trx, objs) {
|
|
80
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
81
|
-
// Use schema fields excluding id
|
|
82
|
-
const keys = Object.keys(this.schema.model.fields)
|
|
83
|
-
.filter(key => key !== 'id');
|
|
84
|
-
// Add meta (created_*/updated_*)
|
|
85
|
-
keys.push(...Object.values(this.config.meta));
|
|
86
|
-
// Pre-cleanup
|
|
87
|
-
for (const obj of objs) {
|
|
88
|
-
this.precleanup(obj);
|
|
89
|
-
}
|
|
90
|
-
// Create
|
|
91
|
-
const inserted = await this.guard(sql) `
|
|
92
|
-
INSERT INTO ${sql(this.tableName)}
|
|
93
|
-
${sql(objs, keys)}
|
|
94
|
-
RETURNING *
|
|
95
|
-
`;
|
|
96
|
-
return inserted;
|
|
97
|
-
}
|
|
98
|
-
async patch(trx, obj) {
|
|
99
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
100
|
-
// Use schema keys that exist on object
|
|
101
|
-
const keys = Object.keys(this.schema.model.fields)
|
|
102
|
-
.filter(key => obj[key] !== undefined)
|
|
103
|
-
.filter(key => key in obj);
|
|
104
|
-
// Add meta
|
|
105
|
-
keys.push(this.config.meta.updated_by, this.config.meta.updated_at);
|
|
106
|
-
// Pre-cleanup
|
|
107
|
-
this.precleanup(obj);
|
|
108
|
-
// Update
|
|
109
|
-
const objs = await this.guard(sql) `
|
|
110
|
-
UPDATE ${sql(this.tableName)} SET
|
|
111
|
-
${sql(obj, keys)}
|
|
112
|
-
WHERE id = ${obj.id}
|
|
113
|
-
RETURNING *
|
|
114
|
-
`;
|
|
115
|
-
return objs[0];
|
|
116
|
-
}
|
|
117
|
-
async patchMany(trx, objs) {
|
|
118
|
-
const _objs = [];
|
|
119
|
-
for (const obj of objs) {
|
|
120
|
-
_objs.push(await this.patch(trx, obj));
|
|
121
|
-
}
|
|
122
|
-
return _objs;
|
|
123
|
-
}
|
|
124
|
-
async replace(trx, obj) {
|
|
125
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
126
|
-
// Use all schema keys
|
|
127
|
-
const keys = Object.keys(this.schema.model.fields)
|
|
128
|
-
.filter(key => obj[key] !== undefined);
|
|
129
|
-
keys.push(this.config.meta.updated_by, this.config.meta.updated_at);
|
|
130
|
-
this.precleanup(obj);
|
|
131
|
-
const objs = await this.guard(sql) `
|
|
132
|
-
UPDATE ${sql(this.tableName)} SET
|
|
133
|
-
${sql(obj, keys)}
|
|
134
|
-
WHERE id = ${obj.id}
|
|
135
|
-
RETURNING *
|
|
136
|
-
`;
|
|
137
|
-
return objs[0];
|
|
138
|
-
}
|
|
139
|
-
async replaceMany(trx, objs) {
|
|
140
|
-
const _objs = [];
|
|
141
|
-
for (const obj of objs) {
|
|
142
|
-
_objs.push(await this.replace(trx, obj));
|
|
143
|
-
}
|
|
144
|
-
return _objs;
|
|
145
|
-
}
|
|
146
|
-
async put(trx, obj) {
|
|
147
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
148
|
-
// Use all schema keys
|
|
149
|
-
const keys = Object.keys(this.schema.model.fields)
|
|
150
|
-
.filter(key => obj[key] !== undefined);
|
|
151
|
-
// Add meta (created_*/updated_*)
|
|
152
|
-
const ikeys = keys.concat(...Object.values(this.config.meta));
|
|
153
|
-
const ukeys = keys.concat(this.config.meta.updated_by, this.config.meta.updated_at);
|
|
154
|
-
this.precleanup(obj);
|
|
155
|
-
const objs = await this.guard(sql) `
|
|
156
|
-
INSERT INTO ${sql(this.tableName)}
|
|
157
|
-
${sql(obj, ikeys)}
|
|
158
|
-
ON CONFLICT(id)
|
|
159
|
-
DO UPDATE SET
|
|
160
|
-
${sql(obj, ukeys)}
|
|
161
|
-
RETURNING *
|
|
162
|
-
`;
|
|
163
|
-
return objs[0];
|
|
164
|
-
}
|
|
165
|
-
async putMany(trx, objs) {
|
|
166
|
-
const _objs = [];
|
|
167
|
-
for (const obj of objs) {
|
|
168
|
-
_objs.push(await this.put(trx, obj));
|
|
169
|
-
}
|
|
170
|
-
return _objs;
|
|
171
|
-
}
|
|
172
|
-
async delete(trx, id) {
|
|
173
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
174
|
-
await this.guard(sql) `
|
|
175
|
-
DELETE FROM ${sql(this.tableName)}
|
|
176
|
-
WHERE id = ${id}
|
|
177
|
-
`;
|
|
178
|
-
}
|
|
179
|
-
async deleteMany(trx, ids) {
|
|
180
|
-
const sql = trx_1.Trx.get(trx, 'sql');
|
|
181
|
-
await this.guard(sql) `
|
|
182
|
-
DELETE FROM ${sql(this.tableName)}
|
|
183
|
-
WHERE id IN ${ids}
|
|
184
|
-
`;
|
|
185
|
-
}
|
|
186
|
-
/* Cache Operations */
|
|
187
|
-
async syncOne(trx, id, lastObjUpdateEpoch) {
|
|
188
|
-
throw new Error('Not implemented yet.');
|
|
189
|
-
return {};
|
|
190
|
-
// // 1. Check if object was deleted
|
|
191
|
-
// const obj = await this.get(trx, id);
|
|
192
|
-
// if (!obj) {
|
|
193
|
-
// return 'deleted' as const;
|
|
194
|
-
// }
|
|
195
|
-
// // 2. Check if object was updated
|
|
196
|
-
// const updateEpoch = this.getUpdateEpoch(obj);
|
|
197
|
-
// const hasObjUpdated = updateEpoch > lastObjUpdateEpoch;
|
|
198
|
-
// if (!hasObjUpdated) {
|
|
199
|
-
// return null;
|
|
200
|
-
// }
|
|
201
|
-
// // 3. Return updated object and epoch
|
|
202
|
-
// return {
|
|
203
|
-
// obj,
|
|
204
|
-
// updateEpoch
|
|
205
|
-
// };
|
|
206
|
-
}
|
|
207
|
-
async syncOneAndPast(trx, id, lastUpdateEpoch) {
|
|
208
|
-
throw new Error('Not implemented yet.');
|
|
209
|
-
return {};
|
|
210
|
-
// // 1. Check if object was deleted
|
|
211
|
-
// const obj = await this.get(trx, id);
|
|
212
|
-
// if (!obj) {
|
|
213
|
-
// return 'deleted' as const;
|
|
214
|
-
// }
|
|
215
|
-
// // 2. Check if object was updated
|
|
216
|
-
// const objUpdateEpoch = this.getUpdateEpoch(obj);
|
|
217
|
-
// const hasObjUpdated = objUpdateEpoch > lastUpdateEpoch;
|
|
218
|
-
// if (!hasObjUpdated) {
|
|
219
|
-
// return null;
|
|
220
|
-
// }
|
|
221
|
-
// // 3. Return all objects updated and the max epoch
|
|
222
|
-
// let updateEpoch = 0;
|
|
223
|
-
// const changed = (Object.values(this.data) as Obj[])
|
|
224
|
-
// .map(obj => {
|
|
225
|
-
// const epoch = this.getUpdateEpoch(obj);
|
|
226
|
-
// if (epoch > updateEpoch) {
|
|
227
|
-
// updateEpoch = epoch;
|
|
228
|
-
// }
|
|
229
|
-
// return { obj, updateEpoch: epoch };
|
|
230
|
-
// })
|
|
231
|
-
// .filter(obj => obj.updateEpoch > lastUpdateEpoch);
|
|
232
|
-
// if (!changed.length) {
|
|
233
|
-
// return null;
|
|
234
|
-
// }
|
|
235
|
-
// return changed;
|
|
236
|
-
}
|
|
237
|
-
async syncAll(trx, lastHash, lastUpdateEpoch = 0) {
|
|
238
|
-
throw new Error('Not implemented yet.');
|
|
239
|
-
return {};
|
|
240
|
-
// // 1. Hash the current ids
|
|
241
|
-
// const idStr = Object.keys(this.data).sort().join('');
|
|
242
|
-
// const hash = createHash('md5').update(idStr).digest('hex');
|
|
243
|
-
// // 2. If hash changed, return a reset sync with all objects
|
|
244
|
-
// if (hash !== lastHash) {
|
|
245
|
-
// let updateEpoch = 0;
|
|
246
|
-
// const sync = (await this.index(trx) as Obj[])
|
|
247
|
-
// .map(obj => {
|
|
248
|
-
// const epoch = this.getUpdateEpoch(obj);
|
|
249
|
-
// if (epoch > updateEpoch) {
|
|
250
|
-
// updateEpoch = epoch;
|
|
251
|
-
// }
|
|
252
|
-
// return { obj, updateEpoch: epoch };
|
|
253
|
-
// });
|
|
254
|
-
// return {
|
|
255
|
-
// sync,
|
|
256
|
-
// hash,
|
|
257
|
-
// updateEpoch,
|
|
258
|
-
// reset: true
|
|
259
|
-
// };
|
|
260
|
-
// }
|
|
261
|
-
// // 3. Find the data that changed and return it
|
|
262
|
-
// let updateEpoch = 0;
|
|
263
|
-
// const sync = (Object.values(this.data) as Obj[])
|
|
264
|
-
// .map(obj => {
|
|
265
|
-
// const epoch = this.getUpdateEpoch(obj);
|
|
266
|
-
// if (epoch > updateEpoch) {
|
|
267
|
-
// updateEpoch = epoch;
|
|
268
|
-
// }
|
|
269
|
-
// return { obj, updateEpoch: epoch };
|
|
270
|
-
// })
|
|
271
|
-
// .filter(obj => obj.updateEpoch > lastUpdateEpoch);
|
|
272
|
-
// if (!sync.length) {
|
|
273
|
-
// return null;
|
|
274
|
-
// }
|
|
275
|
-
// return {
|
|
276
|
-
// sync,
|
|
277
|
-
// hash,
|
|
278
|
-
// updateEpoch,
|
|
279
|
-
// reset: false
|
|
280
|
-
// };
|
|
281
|
-
}
|
|
282
|
-
static getTableMeta(trx, meta) {
|
|
283
|
-
const trxModule = trx_node_1.TrxNode.getModule(trx);
|
|
284
|
-
const bucketName = meta.bucket.name;
|
|
285
|
-
const refName = (trxModule.name === meta.bucket.module ? '' : `${meta.bucket.module}::`) + bucketName;
|
|
286
|
-
const bucket = trxModule.buckets[refName];
|
|
287
|
-
const adapter = bucket.adapter;
|
|
288
|
-
return {
|
|
289
|
-
tableName: adapter.tableName,
|
|
290
|
-
meta: adapter.config.meta
|
|
291
|
-
};
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
exports.PostgresBucketAdapter = PostgresBucketAdapter;
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
import { CLIAdapter, CLICommand } from "../../../engine/cli/cli_adapter";
|
|
2
|
-
import { PostgresProvider } from './postgres.provider';
|
|
3
|
-
import { AnyDaemon } from "../../../engine/daemon";
|
|
4
|
-
import { CLI } from "../../../engine/cli/cli";
|
|
5
|
-
export declare class cmd_check extends CLICommand {
|
|
6
|
-
provider: PostgresProvider;
|
|
7
|
-
constructor(provider: PostgresProvider);
|
|
8
|
-
run(daemon: AnyDaemon): Promise<void>;
|
|
9
|
-
}
|
|
10
|
-
export declare class cmd_tables extends CLICommand {
|
|
11
|
-
provider: PostgresProvider;
|
|
12
|
-
constructor(provider: PostgresProvider);
|
|
13
|
-
run(): Promise<void>;
|
|
14
|
-
}
|
|
15
|
-
export declare class cmd_create_db extends CLICommand {
|
|
16
|
-
provider: PostgresProvider;
|
|
17
|
-
constructor(provider: PostgresProvider);
|
|
18
|
-
run(daemon: AnyDaemon, $: {
|
|
19
|
-
name: string;
|
|
20
|
-
}): Promise<void>;
|
|
21
|
-
}
|
|
22
|
-
export declare class cmd_status extends CLICommand {
|
|
23
|
-
provider: PostgresProvider;
|
|
24
|
-
constructor(provider: PostgresProvider);
|
|
25
|
-
run(daemon: AnyDaemon): Promise<void>;
|
|
26
|
-
}
|
|
27
|
-
export declare class cmd_make_empty_migration extends CLICommand {
|
|
28
|
-
cli: CLI;
|
|
29
|
-
provider: PostgresProvider;
|
|
30
|
-
constructor(cli: CLI, provider: PostgresProvider);
|
|
31
|
-
run(daemon: AnyDaemon, $: {
|
|
32
|
-
name?: string;
|
|
33
|
-
}): Promise<void>;
|
|
34
|
-
}
|
|
35
|
-
export declare class cmd_make_migrations extends CLICommand {
|
|
36
|
-
provider: PostgresProvider;
|
|
37
|
-
constructor(provider: PostgresProvider);
|
|
38
|
-
run(daemon: AnyDaemon, $: {
|
|
39
|
-
tag: string;
|
|
40
|
-
}): Promise<void>;
|
|
41
|
-
}
|
|
42
|
-
export declare class cmd_migrate_up extends CLICommand {
|
|
43
|
-
provider: PostgresProvider;
|
|
44
|
-
constructor(provider: PostgresProvider);
|
|
45
|
-
run(daemon: AnyDaemon): Promise<void>;
|
|
46
|
-
}
|
|
47
|
-
export declare class cmd_migrate_one_up extends CLICommand {
|
|
48
|
-
provider: PostgresProvider;
|
|
49
|
-
constructor(provider: PostgresProvider);
|
|
50
|
-
run(daemon: AnyDaemon): Promise<void>;
|
|
51
|
-
}
|
|
52
|
-
export declare class cmd_migrate_down extends CLICommand {
|
|
53
|
-
provider: PostgresProvider;
|
|
54
|
-
constructor(provider: PostgresProvider);
|
|
55
|
-
run(daemon: AnyDaemon): Promise<void>;
|
|
56
|
-
}
|
|
57
|
-
export declare class cmd_migrate_one_down extends CLICommand {
|
|
58
|
-
provider: PostgresProvider;
|
|
59
|
-
constructor(provider: PostgresProvider);
|
|
60
|
-
run(daemon: AnyDaemon): Promise<void>;
|
|
61
|
-
}
|
|
62
|
-
export declare class cmd_query extends CLICommand {
|
|
63
|
-
provider: PostgresProvider;
|
|
64
|
-
constructor(provider: PostgresProvider);
|
|
65
|
-
run(): Promise<void>;
|
|
66
|
-
}
|
|
67
|
-
export declare class cmd_import_csv extends CLICommand {
|
|
68
|
-
provider: PostgresProvider;
|
|
69
|
-
constructor(provider: PostgresProvider);
|
|
70
|
-
run(daemon: AnyDaemon, input: Record<string, any>): Promise<void>;
|
|
71
|
-
}
|
|
72
|
-
export declare class PostgresCLI extends CLIAdapter {
|
|
73
|
-
cli: CLI;
|
|
74
|
-
provider: PostgresProvider;
|
|
75
|
-
constructor(cli: CLI, provider: PostgresProvider);
|
|
76
|
-
}
|
|
@@ -1,207 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.PostgresCLI = exports.cmd_import_csv = exports.cmd_query = exports.cmd_migrate_one_down = exports.cmd_migrate_down = exports.cmd_migrate_one_up = exports.cmd_migrate_up = exports.cmd_make_migrations = exports.cmd_make_empty_migration = exports.cmd_status = exports.cmd_create_db = exports.cmd_tables = exports.cmd_check = void 0;
|
|
7
|
-
const cli_adapter_1 = require("../../../engine/cli/cli_adapter");
|
|
8
|
-
const database_1 = require("./migrator/database");
|
|
9
|
-
const provider_1 = require("./migrator/generator/provider");
|
|
10
|
-
const ui_1 = __importDefault(require("../../../engine/cli/ui"));
|
|
11
|
-
const daemon_1 = require("../../../engine/daemon");
|
|
12
|
-
const postgres_bucket_adapter_1 = require("./postgres.bucket_adapter");
|
|
13
|
-
const csv_1 = require("./migrator/csv");
|
|
14
|
-
const runner_1 = require("./migrator/runner/runner");
|
|
15
|
-
const migration_1 = require("./migrator/generator/migration");
|
|
16
|
-
class cmd_check extends cli_adapter_1.CLICommand {
|
|
17
|
-
constructor(provider) {
|
|
18
|
-
super('any', 'check', 'check', 'Check if the connection to PostgreSQL is working properly');
|
|
19
|
-
this.provider = provider;
|
|
20
|
-
}
|
|
21
|
-
async run(daemon) {
|
|
22
|
-
const res = await database_1.Database.checkConnection(this.provider.sql);
|
|
23
|
-
if (res == true)
|
|
24
|
-
ui_1.default.result('ok', 'Connection to PostgreSQL working.');
|
|
25
|
-
else
|
|
26
|
-
ui_1.default.result('error', 'Connection to PostgreSQL not working.', res);
|
|
27
|
-
await provider_1.MigrationProvider.create(daemon, this.provider.sql);
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
exports.cmd_check = cmd_check;
|
|
31
|
-
class cmd_tables extends cli_adapter_1.CLICommand {
|
|
32
|
-
constructor(provider) {
|
|
33
|
-
super('any', 'tables', 'tables', 'List the tables present on the database');
|
|
34
|
-
this.provider = provider;
|
|
35
|
-
}
|
|
36
|
-
async run() {
|
|
37
|
-
const res = await database_1.Database.listTables(this.provider.sql);
|
|
38
|
-
ui_1.default.list(res);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
exports.cmd_tables = cmd_tables;
|
|
42
|
-
class cmd_create_db extends cli_adapter_1.CLICommand {
|
|
43
|
-
constructor(provider) {
|
|
44
|
-
super('any', 'create db', 'create db( NAME)', 'Create the database used by the application', /(\w*)/, ['name']);
|
|
45
|
-
this.provider = provider;
|
|
46
|
-
}
|
|
47
|
-
async run(daemon, $) {
|
|
48
|
-
let name = $.name;
|
|
49
|
-
const config = this.provider.config?.connection;
|
|
50
|
-
if (!name) {
|
|
51
|
-
if (!config?.db) {
|
|
52
|
-
ui_1.default.result('error', 'Database name not configured on PostgresConfig used', config);
|
|
53
|
-
return;
|
|
54
|
-
}
|
|
55
|
-
name = config.db;
|
|
56
|
-
}
|
|
57
|
-
try {
|
|
58
|
-
await database_1.Database.createDatabase(name, config);
|
|
59
|
-
ui_1.default.result('ok', `Database ${name} created`);
|
|
60
|
-
}
|
|
61
|
-
catch (e) {
|
|
62
|
-
ui_1.default.result('error', `Failed to create database ${name}`, e);
|
|
63
|
-
}
|
|
64
|
-
await provider_1.MigrationProvider.create(daemon, this.provider.sql);
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
exports.cmd_create_db = cmd_create_db;
|
|
68
|
-
class cmd_status extends cli_adapter_1.CLICommand {
|
|
69
|
-
constructor(provider) {
|
|
70
|
-
super('any', 'status', 'status', 'Show the status of migrations on the current database');
|
|
71
|
-
this.provider = provider;
|
|
72
|
-
}
|
|
73
|
-
async run(daemon) {
|
|
74
|
-
const migrator = await provider_1.MigrationProvider.create(daemon, this.provider.sql);
|
|
75
|
-
console.log(migrator.status.describe());
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
exports.cmd_status = cmd_status;
|
|
79
|
-
class cmd_make_empty_migration extends cli_adapter_1.CLICommand {
|
|
80
|
-
constructor(cli, provider) {
|
|
81
|
-
super('any', 'make empty migration', 'make empty migration( NAME)', 'Generate an empty migration to be filled by the user', /(\w*)/, ['name']);
|
|
82
|
-
this.cli = cli;
|
|
83
|
-
this.provider = provider;
|
|
84
|
-
}
|
|
85
|
-
async run(daemon, $) {
|
|
86
|
-
const module = await ui_1.default.select('Pick a module to create the migration into:', daemon_1.Daemon.getModules(daemon).map(m => m.name));
|
|
87
|
-
const name = $.name || await ui_1.default.question('Migration name');
|
|
88
|
-
const migration = migration_1.$Migration.empty(module.value, name);
|
|
89
|
-
const filepath = migration.save();
|
|
90
|
-
this.cli.openEditor(filepath);
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
exports.cmd_make_empty_migration = cmd_make_empty_migration;
|
|
94
|
-
class cmd_make_migrations extends cli_adapter_1.CLICommand {
|
|
95
|
-
constructor(provider) {
|
|
96
|
-
super('any', 'make migrations', 'make migrations( TAG)', 'Generate migrations for the bucket(s) using PostgresBucketAdapter', /(\w*)/, ['tag']);
|
|
97
|
-
this.provider = provider;
|
|
98
|
-
}
|
|
99
|
-
async run(daemon, $) {
|
|
100
|
-
console.clear();
|
|
101
|
-
// TODO: restrict by tag
|
|
102
|
-
const migrator = await provider_1.MigrationProvider.create(daemon, this.provider.sql);
|
|
103
|
-
const migrations = await migrator.generate();
|
|
104
|
-
for (const migration of migrations) {
|
|
105
|
-
migration.save();
|
|
106
|
-
}
|
|
107
|
-
await runner_1.MigrationRunner.up(daemon, this.provider.sql, 'batch');
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
exports.cmd_make_migrations = cmd_make_migrations;
|
|
111
|
-
class cmd_migrate_up extends cli_adapter_1.CLICommand {
|
|
112
|
-
constructor(provider) {
|
|
113
|
-
super('any', 'migrate up', 'migrate up', 'Run ALL the pending migrations up (batch)');
|
|
114
|
-
this.provider = provider;
|
|
115
|
-
}
|
|
116
|
-
async run(daemon) {
|
|
117
|
-
console.clear();
|
|
118
|
-
await runner_1.MigrationRunner.up(daemon, this.provider.sql, 'batch');
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
exports.cmd_migrate_up = cmd_migrate_up;
|
|
122
|
-
class cmd_migrate_one_up extends cli_adapter_1.CLICommand {
|
|
123
|
-
constructor(provider) {
|
|
124
|
-
super('any', 'migrate one up', 'migrate one up', 'Run ONE pending migration up');
|
|
125
|
-
this.provider = provider;
|
|
126
|
-
}
|
|
127
|
-
async run(daemon) {
|
|
128
|
-
console.clear();
|
|
129
|
-
await runner_1.MigrationRunner.up(daemon, this.provider.sql, 'one');
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
exports.cmd_migrate_one_up = cmd_migrate_one_up;
|
|
133
|
-
class cmd_migrate_down extends cli_adapter_1.CLICommand {
|
|
134
|
-
constructor(provider) {
|
|
135
|
-
super('any', 'migrate down', 'migrate down', 'Rollback the last batch of migrations');
|
|
136
|
-
this.provider = provider;
|
|
137
|
-
}
|
|
138
|
-
async run(daemon) {
|
|
139
|
-
console.clear();
|
|
140
|
-
await runner_1.MigrationRunner.down(daemon, this.provider.sql, 'batch');
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
exports.cmd_migrate_down = cmd_migrate_down;
|
|
144
|
-
class cmd_migrate_one_down extends cli_adapter_1.CLICommand {
|
|
145
|
-
constructor(provider) {
|
|
146
|
-
super('any', 'migrate one down', 'migrate one down', 'Rollback the last migration');
|
|
147
|
-
this.provider = provider;
|
|
148
|
-
}
|
|
149
|
-
async run(daemon) {
|
|
150
|
-
console.clear();
|
|
151
|
-
await runner_1.MigrationRunner.down(daemon, this.provider.sql, 'one');
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
exports.cmd_migrate_one_down = cmd_migrate_one_down;
|
|
155
|
-
class cmd_query extends cli_adapter_1.CLICommand {
|
|
156
|
-
constructor(provider) {
|
|
157
|
-
super('any', 'query', 'query', 'Run a SQL query on the database server');
|
|
158
|
-
this.provider = provider;
|
|
159
|
-
}
|
|
160
|
-
async run() {
|
|
161
|
-
const query = await ui_1.default.question('SQL');
|
|
162
|
-
const res = await this.provider.sql.unsafe(query);
|
|
163
|
-
console.log(res);
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
exports.cmd_query = cmd_query;
|
|
167
|
-
class cmd_import_csv extends cli_adapter_1.CLICommand {
|
|
168
|
-
constructor(provider) {
|
|
169
|
-
super('any', 'import csv', 'import csv PATH', 'Run a SQL query on the database server', /(.+)/, ['path']);
|
|
170
|
-
this.provider = provider;
|
|
171
|
-
}
|
|
172
|
-
async run(daemon, input) {
|
|
173
|
-
const buckets = daemon_1.Daemon.getModules(daemon)
|
|
174
|
-
.map(module => Object.values(module.buckets)
|
|
175
|
-
.filter(bucket => bucket.adapter instanceof postgres_bucket_adapter_1.PostgresBucketAdapter)
|
|
176
|
-
.map(bucket => ({
|
|
177
|
-
name: `${module.name}::${bucket.schema.name}`,
|
|
178
|
-
tableName: bucket.adapter.tableName
|
|
179
|
-
})))
|
|
180
|
-
.flat(1);
|
|
181
|
-
const bucket = await ui_1.default.select('Bucket', buckets, b => b.name);
|
|
182
|
-
await csv_1.CSV.import(this.provider.sql, bucket.value.tableName, input.path);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
exports.cmd_import_csv = cmd_import_csv;
|
|
186
|
-
class PostgresCLI extends cli_adapter_1.CLIAdapter {
|
|
187
|
-
constructor(cli, provider) {
|
|
188
|
-
super(cli);
|
|
189
|
-
this.cli = cli;
|
|
190
|
-
this.provider = provider;
|
|
191
|
-
this.commands = {
|
|
192
|
-
'check': new cmd_check(provider),
|
|
193
|
-
'tables': new cmd_tables(provider),
|
|
194
|
-
'create db': new cmd_create_db(provider),
|
|
195
|
-
'status': new cmd_status(provider),
|
|
196
|
-
'make migrations': new cmd_make_migrations(provider),
|
|
197
|
-
'make empty migration': new cmd_make_empty_migration(cli, provider),
|
|
198
|
-
'migrate up': new cmd_migrate_up(provider),
|
|
199
|
-
'migrate one up': new cmd_migrate_one_up(provider),
|
|
200
|
-
'migrate down': new cmd_migrate_down(provider),
|
|
201
|
-
'migrate one down': new cmd_migrate_one_down(provider),
|
|
202
|
-
'query': new cmd_query(provider),
|
|
203
|
-
'import csv': new cmd_import_csv(provider),
|
|
204
|
-
};
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
exports.PostgresCLI = PostgresCLI;
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { AnyTrxNode } from "../../../engine/transaction/trx_node";
|
|
2
|
-
import { NQLRunner } from "../../../elements/entities/bucket/query/nql_engine";
|
|
3
|
-
import { NQL_Pagination, NQL_Part } from "../../../elements/entities/bucket/query/nql.schema";
|
|
4
|
-
import postgres from 'postgres';
|
|
5
|
-
type Obj = Record<string, any>;
|
|
6
|
-
export declare class PostgresNQLRunner extends NQLRunner {
|
|
7
|
-
protected sql?: postgres.Sql<any>;
|
|
8
|
-
constructor();
|
|
9
|
-
run(trx: AnyTrxNode, part: NQL_Part, params: Obj, pagination?: NQL_Pagination): Promise<{
|
|
10
|
-
data: Obj[];
|
|
11
|
-
count: number | undefined;
|
|
12
|
-
page: number | undefined;
|
|
13
|
-
perPage: number | undefined;
|
|
14
|
-
}>;
|
|
15
|
-
}
|
|
16
|
-
export {};
|