nesoi 3.0.0 → 3.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -0
- package/lib/adapters/postgres/src/migrator/bucket.d.ts +20 -0
- package/lib/adapters/postgres/src/migrator/bucket.js +184 -0
- package/lib/adapters/postgres/src/migrator/csv.d.ts +7 -0
- package/lib/adapters/postgres/src/migrator/csv.js +72 -0
- package/lib/adapters/postgres/src/migrator/migration.d.ts +2 -18
- package/lib/adapters/postgres/src/migrator/migration.js +10 -158
- package/lib/adapters/postgres/src/migrator/migrator.js +8 -5
- package/lib/adapters/postgres/src/migrator/runner.d.ts +16 -6
- package/lib/adapters/postgres/src/migrator/runner.js +103 -34
- package/lib/adapters/postgres/src/postgres.bucket_adapter.d.ts +19 -22
- package/lib/adapters/postgres/src/postgres.bucket_adapter.js +116 -100
- package/lib/adapters/postgres/src/postgres.cli.d.ts +23 -3
- package/lib/adapters/postgres/src/postgres.cli.js +70 -10
- package/lib/adapters/postgres/src/postgres.config.d.ts +5 -0
- package/lib/adapters/postgres/src/postgres.config.js +2 -0
- package/lib/adapters/postgres/src/postgres.nql.d.ts +7 -3
- package/lib/adapters/postgres/src/postgres.nql.js +86 -32
- package/lib/adapters/postgres/src/postgres.provider.d.ts +18 -0
- package/lib/adapters/postgres/src/postgres.provider.js +77 -0
- package/lib/adapters/postgres/test/postgres.bucket_adapter.test.js +76 -39
- package/lib/compiler/apps/monolyth/monolyth_compiler.d.ts +3 -0
- package/lib/compiler/apps/monolyth/monolyth_compiler.js +24 -0
- package/lib/compiler/apps/monolyth/stages/2_build_typescript_stage.js +2 -1
- package/lib/compiler/apps/monolyth/stages/5_dump_cli_stage.js +1 -1
- package/lib/compiler/apps/monolyth/stages/6_dump_package_json_stage.js +1 -1
- package/lib/compiler/elements/bucket.element.js +26 -11
- package/lib/compiler/elements/constants.element.js +1 -1
- package/lib/compiler/elements/element.d.ts +2 -0
- package/lib/compiler/elements/message.element.js +4 -4
- package/lib/compiler/helpers/dump_helpers.js +5 -2
- package/lib/compiler/stages/7_dump_stage.js +2 -0
- package/lib/compiler/treeshake.js +9 -37
- package/lib/compiler/typescript/bridge/extract.js +12 -0
- package/lib/compiler/typescript/bridge/inject.js +3 -0
- package/lib/compiler/typescript/bridge/organize.js +3 -3
- package/lib/elements/blocks/block.builder.js +4 -2
- package/lib/elements/blocks/job/internal/resource_job.builder.d.ts +22 -20
- package/lib/elements/blocks/job/internal/resource_job.d.ts +2 -1
- package/lib/elements/blocks/job/internal/resource_job.js +17 -4
- package/lib/elements/blocks/job/job.js +3 -0
- package/lib/elements/blocks/job/job.types.d.ts +7 -0
- package/lib/elements/blocks/job/job.types.js +2 -0
- package/lib/elements/blocks/machine/machine.js +3 -2
- package/lib/elements/blocks/resource/resource.builder.js +2 -4
- package/lib/elements/blocks/resource/resource.d.ts +5 -3
- package/lib/elements/blocks/resource/resource.js +26 -17
- package/lib/elements/edge/controller/adapters/controller_adapter.d.ts +2 -1
- package/lib/elements/edge/controller/adapters/controller_adapter.js +11 -2
- package/lib/elements/edge/controller/controller.builder.d.ts +4 -5
- package/lib/elements/edge/controller/controller.builder.js +7 -7
- package/lib/elements/edge/controller/controller.d.ts +2 -1
- package/lib/elements/edge/controller/controller.js +8 -6
- package/lib/elements/entities/bucket/adapters/bucket_adapter.d.ts +61 -23
- package/lib/elements/entities/bucket/adapters/bucket_adapter.js +22 -13
- package/lib/elements/entities/bucket/adapters/memory.bucket_adapter.d.ts +21 -22
- package/lib/elements/entities/bucket/adapters/memory.bucket_adapter.js +68 -2
- package/lib/elements/entities/bucket/adapters/memory.nql.d.ts +10 -6
- package/lib/elements/entities/bucket/adapters/memory.nql.js +38 -3
- package/lib/elements/entities/bucket/adapters/slow_memory.bucket_adapter.d.ts +0 -20
- package/lib/elements/entities/bucket/adapters/slow_memory.bucket_adapter.js +46 -30
- package/lib/elements/entities/bucket/bucket.builder.d.ts +8 -2
- package/lib/elements/entities/bucket/bucket.builder.js +13 -19
- package/lib/elements/entities/bucket/bucket.config.d.ts +5 -1
- package/lib/elements/entities/bucket/bucket.d.ts +180 -19
- package/lib/elements/entities/bucket/bucket.js +662 -48
- package/lib/elements/entities/bucket/bucket.schema.d.ts +7 -1
- package/lib/elements/entities/bucket/bucket.schema.js +2 -1
- package/lib/elements/entities/bucket/bucket.types.d.ts +2 -7
- package/lib/elements/entities/bucket/cache/bucket_cache.d.ts +6 -2
- package/lib/elements/entities/bucket/cache/bucket_cache.js +12 -12
- package/lib/elements/entities/bucket/graph/bucket_graph.d.ts +32 -5
- package/lib/elements/entities/bucket/graph/bucket_graph.js +80 -111
- package/lib/elements/entities/bucket/graph/bucket_graph.schema.d.ts +3 -6
- package/lib/elements/entities/bucket/graph/bucket_graph.schema.js +1 -4
- package/lib/elements/entities/bucket/graph/bucket_graph_link.builder.d.ts +3 -7
- package/lib/elements/entities/bucket/graph/bucket_graph_link.builder.js +6 -2
- package/lib/elements/entities/bucket/model/bucket_model.builder.js +1 -1
- package/lib/elements/entities/bucket/model/bucket_model.convert.js +3 -3
- package/lib/elements/entities/bucket/model/bucket_model.schema.d.ts +37 -8
- package/lib/elements/entities/bucket/model/bucket_model.schema.js +25 -4
- package/lib/elements/entities/bucket/model/bucket_model_field.builder.d.ts +33 -14
- package/lib/elements/entities/bucket/model/bucket_model_field.builder.js +56 -13
- package/lib/elements/entities/bucket/query/nql.schema.d.ts +1 -0
- package/lib/elements/entities/bucket/query/nql_compiler.js +13 -2
- package/lib/elements/entities/bucket/query/nql_engine.d.ts +11 -4
- package/lib/elements/entities/bucket/query/nql_engine.js +20 -11
- package/lib/elements/entities/bucket/view/bucket_view.js +63 -35
- package/lib/elements/entities/bucket/view/bucket_view.schema.d.ts +5 -2
- package/lib/elements/entities/bucket/view/bucket_view_field.builder.d.ts +6 -2
- package/lib/elements/entities/bucket/view/bucket_view_field.builder.js +22 -16
- package/lib/elements/entities/constants/constants.schema.d.ts +1 -1
- package/lib/elements/entities/drive/drive_adapter.d.ts +44 -0
- package/lib/elements/entities/drive/drive_adapter.js +10 -0
- package/lib/elements/entities/drive/local.drive_adapter.d.ts +10 -0
- package/lib/elements/entities/drive/local.drive_adapter.js +34 -0
- package/lib/elements/entities/message/message.schema.d.ts +1 -0
- package/lib/elements/entities/message/message.schema.js +33 -0
- package/lib/elements/entities/message/message_parser.d.ts +5 -1
- package/lib/elements/entities/message/message_parser.js +56 -35
- package/lib/elements/entities/message/template/message_template.schema.d.ts +10 -8
- package/lib/elements/entities/message/template/message_template_field.builder.d.ts +16 -6
- package/lib/elements/entities/message/template/message_template_field.builder.js +25 -0
- package/lib/elements/entities/message/template/message_template_parser.js +2 -1
- package/lib/engine/apps/app.config.d.ts +32 -11
- package/lib/engine/apps/app.config.js +12 -0
- package/lib/engine/apps/app.d.ts +2 -0
- package/lib/engine/apps/app.js +3 -0
- package/lib/engine/apps/inline.app.d.ts +5 -3
- package/lib/engine/apps/inline.app.js +27 -12
- package/lib/engine/apps/monolyth/monolyth.app.d.ts +4 -2
- package/lib/engine/apps/monolyth/monolyth.app.js +22 -10
- package/lib/engine/auth/authn.d.ts +5 -1
- package/lib/engine/auth/zero.authn_provider.d.ts +4 -2
- package/lib/engine/auth/zero.authn_provider.js +2 -2
- package/lib/engine/cli/cli.d.ts +3 -1
- package/lib/engine/cli/cli.js +22 -3
- package/lib/engine/cli/cli_adapter.d.ts +2 -1
- package/lib/engine/cli/cli_adapter.js +2 -1
- package/lib/engine/cli/cli_input.d.ts +19 -0
- package/lib/engine/cli/cli_input.js +207 -0
- package/lib/engine/cli/ui.d.ts +1 -1
- package/lib/engine/cli/ui.js +2 -2
- package/lib/engine/daemon.d.ts +3 -2
- package/lib/engine/daemon.js +14 -2
- package/lib/engine/data/date.js +2 -2
- package/lib/engine/data/datetime.d.ts +40 -4
- package/lib/engine/data/datetime.js +70 -11
- package/lib/engine/data/decimal.d.ts +1 -1
- package/lib/engine/data/decimal.js +3 -3
- package/lib/engine/data/error.d.ts +21 -4
- package/lib/engine/data/error.js +23 -7
- package/lib/engine/data/file.d.ts +38 -0
- package/lib/engine/data/file.js +54 -0
- package/lib/engine/data/json.d.ts +6 -0
- package/lib/engine/data/json.js +26 -0
- package/lib/engine/data/obj.d.ts +1 -1
- package/lib/engine/data/trash.d.ts +14 -0
- package/lib/engine/data/trash.js +2 -0
- package/lib/engine/data/tree.d.ts +7 -12
- package/lib/engine/data/tree.js +101 -49
- package/lib/engine/module.d.ts +2 -1
- package/lib/engine/module.js +2 -5
- package/lib/engine/space.d.ts +1 -0
- package/lib/engine/space.js +6 -0
- package/lib/engine/transaction/nodes/bucket.trx_node.d.ts +184 -24
- package/lib/engine/transaction/nodes/bucket.trx_node.js +346 -451
- package/lib/engine/transaction/nodes/bucket_query.trx_node.d.ts +4 -2
- package/lib/engine/transaction/nodes/bucket_query.trx_node.js +27 -15
- package/lib/engine/transaction/nodes/job.trx_node.d.ts +2 -1
- package/lib/engine/transaction/nodes/job.trx_node.js +6 -0
- package/lib/engine/transaction/trx.d.ts +5 -2
- package/lib/engine/transaction/trx.js +2 -2
- package/lib/engine/transaction/trx_engine.config.d.ts +1 -3
- package/lib/engine/transaction/trx_engine.d.ts +2 -2
- package/lib/engine/transaction/trx_engine.js +14 -11
- package/lib/engine/transaction/trx_node.d.ts +14 -4
- package/lib/engine/transaction/trx_node.js +50 -8
- package/lib/engine/tree.d.ts +1 -1
- package/lib/engine/util/crypto.d.ts +50 -0
- package/lib/engine/util/crypto.js +89 -0
- package/lib/engine/util/deep.d.ts +5 -0
- package/lib/engine/util/deep.js +46 -0
- package/lib/engine/util/dotenv.d.ts +2 -8
- package/lib/engine/util/dotenv.js +14 -36
- package/lib/engine/util/hash.d.ts +3 -0
- package/lib/engine/util/hash.js +23 -0
- package/lib/engine/util/log.js +1 -1
- package/lib/engine/util/mime.d.ts +10 -0
- package/lib/engine/util/mime.js +389 -0
- package/lib/engine/util/parse.d.ts +6 -5
- package/lib/engine/util/parse.js +16 -15
- package/lib/engine/util/path.d.ts +3 -0
- package/lib/engine/util/path.js +92 -0
- package/lib/engine/util/rules.d.ts +4 -0
- package/lib/engine/util/rules.js +12 -0
- package/package.json +2 -2
- package/tools/compile.js +2 -2
- package/tools/dotenv.d.ts +1 -0
- package/tools/dotenv.js +4 -0
- package/tools/joaquin/job.d.ts +5 -5
- package/tools/joaquin/mock.d.ts +23 -2
- package/tools/joaquin/mock.js +127 -21
- package/tsconfig.build.tsbuildinfo +1 -1
- package/lib/adapters/postgres/test/postgres.bucket_query.test.d.ts +0 -0
- package/lib/adapters/postgres/test/postgres.bucket_query.test.js +0 -136
|
@@ -41,6 +41,8 @@ const crypto_1 = require("crypto");
|
|
|
41
41
|
const string_1 = require("../../../../engine/util/string");
|
|
42
42
|
const log_1 = require("../../../../engine/util/log");
|
|
43
43
|
const datetime_1 = require("../../../../engine/data/datetime");
|
|
44
|
+
const daemon_1 = require("../../../../engine/daemon");
|
|
45
|
+
const trx_1 = require("../../../../engine/transaction/trx");
|
|
44
46
|
class MigrationMethod {
|
|
45
47
|
constructor($) {
|
|
46
48
|
this.description = $.description;
|
|
@@ -62,11 +64,13 @@ class MigrationStatus {
|
|
|
62
64
|
if (old) {
|
|
63
65
|
if (old.filehash === filehash) {
|
|
64
66
|
old.state = 'done';
|
|
67
|
+
old.method = migration.method;
|
|
65
68
|
}
|
|
66
69
|
}
|
|
67
70
|
else {
|
|
68
71
|
this.items.push({
|
|
69
72
|
id: undefined,
|
|
73
|
+
module: migration.module,
|
|
70
74
|
name: migration.name,
|
|
71
75
|
description: migration.method.description,
|
|
72
76
|
batch: undefined,
|
|
@@ -78,7 +82,7 @@ class MigrationStatus {
|
|
|
78
82
|
}
|
|
79
83
|
});
|
|
80
84
|
const lastBatch = Math.max(...this.items.map(item => item.batch || 0), 0);
|
|
81
|
-
this.
|
|
85
|
+
this.batch = lastBatch;
|
|
82
86
|
}
|
|
83
87
|
describe() {
|
|
84
88
|
let str = '';
|
|
@@ -89,7 +93,8 @@ class MigrationStatus {
|
|
|
89
93
|
'pending': () => (0, string_1.colored)('pending', 'yellow'),
|
|
90
94
|
'lost': () => (0, string_1.colored)('lost', 'brown'),
|
|
91
95
|
}[item.state]();
|
|
92
|
-
|
|
96
|
+
const module = (0, string_1.colored)(item.module, 'lightcyan');
|
|
97
|
+
str += `└ ${item.id || '*'}\t${state}\t${module} ${item.name} @ ${item.batch || '...'}\n`;
|
|
93
98
|
});
|
|
94
99
|
return str;
|
|
95
100
|
}
|
|
@@ -100,19 +105,26 @@ class MigrationRunner {
|
|
|
100
105
|
this.dirpath = dirpath;
|
|
101
106
|
this.migrations = [];
|
|
102
107
|
}
|
|
103
|
-
static async scanFiles(dirpath) {
|
|
108
|
+
static async scanFiles(daemon, dirpath) {
|
|
109
|
+
const modules = daemon_1.Daemon.getModules(daemon);
|
|
104
110
|
const files = [];
|
|
105
|
-
|
|
106
|
-
.
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
111
|
+
for (const module of modules) {
|
|
112
|
+
const modulepath = path.join('modules', module.name, dirpath);
|
|
113
|
+
if (!fs.existsSync(modulepath))
|
|
114
|
+
continue;
|
|
115
|
+
fs.readdirSync(modulepath, { withFileTypes: true })
|
|
116
|
+
.forEach(node => {
|
|
117
|
+
const nodePath = path.resolve(modulepath, node.name);
|
|
118
|
+
if (nodePath.endsWith('.d.ts')) {
|
|
119
|
+
return;
|
|
120
|
+
}
|
|
121
|
+
files.push({
|
|
122
|
+
module: module.name,
|
|
123
|
+
name: node.name,
|
|
124
|
+
path: nodePath
|
|
125
|
+
});
|
|
114
126
|
});
|
|
115
|
-
}
|
|
127
|
+
}
|
|
116
128
|
const migrations = [];
|
|
117
129
|
for (const file of files) {
|
|
118
130
|
const contents = fs.readFileSync(file.path).toString();
|
|
@@ -123,7 +135,8 @@ class MigrationRunner {
|
|
|
123
135
|
}
|
|
124
136
|
const { default: method } = await Promise.resolve(`${file.path}`).then(s => __importStar(require(s)));
|
|
125
137
|
if (method instanceof MigrationMethod) {
|
|
126
|
-
|
|
138
|
+
const name = file.name.replace('.ts', '').replace('.js', '');
|
|
139
|
+
migrations.push({ module: file.module, name, path: file.path, hash, method });
|
|
127
140
|
}
|
|
128
141
|
}
|
|
129
142
|
return migrations;
|
|
@@ -131,36 +144,61 @@ class MigrationRunner {
|
|
|
131
144
|
static async scanDb(sql) {
|
|
132
145
|
const db = await sql `
|
|
133
146
|
SELECT * FROM ${sql(migrator_1.Migrator.MIGRATION_TABLE_NAME)}
|
|
147
|
+
ORDER BY id
|
|
134
148
|
`;
|
|
135
149
|
return db;
|
|
136
150
|
}
|
|
137
|
-
static async status(sql, dirpath) {
|
|
138
|
-
const fileMigrations = await MigrationRunner.scanFiles(dirpath);
|
|
151
|
+
static async status(daemon, sql, dirpath) {
|
|
152
|
+
const fileMigrations = await MigrationRunner.scanFiles(daemon, dirpath);
|
|
139
153
|
const dbMigrations = await MigrationRunner.scanDb(sql);
|
|
140
154
|
return new MigrationStatus(fileMigrations, dbMigrations);
|
|
141
155
|
}
|
|
142
156
|
static async up(daemon, sql, mode = 'one', dirpath = './migrations') {
|
|
143
|
-
let status = await MigrationRunner.status(sql, dirpath);
|
|
157
|
+
let status = await MigrationRunner.status(daemon, sql, dirpath);
|
|
144
158
|
console.log(status.describe());
|
|
145
159
|
const pending = status.items.filter(item => item.state === 'pending');
|
|
146
160
|
if (!pending.length) {
|
|
147
161
|
log_1.Log.info('migrator', 'up', 'No migrations to run.');
|
|
148
162
|
return;
|
|
149
163
|
}
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
else {
|
|
155
|
-
for (const migration of pending) {
|
|
156
|
-
await this.migrateUp(daemon, sql, migration, status.nextBatch);
|
|
164
|
+
await sql.begin(async (sql) => {
|
|
165
|
+
if (mode === 'one') {
|
|
166
|
+
const migration = pending[0];
|
|
167
|
+
await this.migrateUp(daemon, sql, migration, status.batch + 1);
|
|
157
168
|
}
|
|
169
|
+
else {
|
|
170
|
+
for (const migration of pending) {
|
|
171
|
+
await this.migrateUp(daemon, sql, migration, status.batch + 1);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
});
|
|
175
|
+
status = await MigrationRunner.status(daemon, sql, dirpath);
|
|
176
|
+
console.log(status.describe());
|
|
177
|
+
}
|
|
178
|
+
static async down(daemon, sql, mode = 'one', dirpath = './migrations') {
|
|
179
|
+
let status = await MigrationRunner.status(daemon, sql, dirpath);
|
|
180
|
+
console.log(status.describe());
|
|
181
|
+
const lastBatch = status.items.filter(item => item.batch === status.batch);
|
|
182
|
+
if (!lastBatch.length) {
|
|
183
|
+
log_1.Log.info('migrator', 'down', 'No migrations to rollback.');
|
|
184
|
+
return;
|
|
158
185
|
}
|
|
159
|
-
|
|
186
|
+
await sql.begin(async (sql) => {
|
|
187
|
+
if (mode === 'one') {
|
|
188
|
+
const migration = lastBatch.at(-1);
|
|
189
|
+
await this.migrateDown(daemon, sql, migration);
|
|
190
|
+
}
|
|
191
|
+
else {
|
|
192
|
+
for (const migration of lastBatch) {
|
|
193
|
+
await this.migrateDown(daemon, sql, migration);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
});
|
|
197
|
+
status = await MigrationRunner.status(daemon, sql, dirpath);
|
|
160
198
|
console.log(status.describe());
|
|
161
199
|
}
|
|
162
|
-
static async
|
|
163
|
-
let status = await MigrationRunner.status(sql, dirpath);
|
|
200
|
+
static async injectUp(daemon, sql, migration, dirpath = './migrations') {
|
|
201
|
+
let status = await MigrationRunner.status(daemon, sql, dirpath);
|
|
164
202
|
console.log(status.describe());
|
|
165
203
|
const mig = {
|
|
166
204
|
...migration,
|
|
@@ -168,22 +206,35 @@ class MigrationRunner {
|
|
|
168
206
|
hash: migration.hash(),
|
|
169
207
|
filehash: '',
|
|
170
208
|
method: {
|
|
171
|
-
up: async (
|
|
172
|
-
await sql.unsafe(migration.sqlUp());
|
|
209
|
+
up: async ($) => {
|
|
210
|
+
await $.sql.unsafe(migration.sqlUp());
|
|
173
211
|
},
|
|
174
|
-
down: async (
|
|
175
|
-
await sql.unsafe(migration.sqlDown());
|
|
212
|
+
down: async ($) => {
|
|
213
|
+
await $.sql.unsafe(migration.sqlDown());
|
|
176
214
|
}
|
|
177
215
|
}
|
|
178
216
|
};
|
|
179
|
-
await
|
|
180
|
-
|
|
217
|
+
await sql.begin(async (sql) => {
|
|
218
|
+
await this.migrateUp(daemon, sql, mig, status.batch + 1);
|
|
219
|
+
});
|
|
220
|
+
status = await MigrationRunner.status(daemon, sql, dirpath);
|
|
181
221
|
console.log(status.describe());
|
|
182
222
|
}
|
|
183
223
|
static async migrateUp(daemon, sql, migration, batch) {
|
|
184
224
|
log_1.Log.info('migrator', 'up', `Running migration ${(0, string_1.colored)('▲ UP', 'lightgreen')} ${(0, string_1.colored)(migration.name, 'lightblue')}`);
|
|
185
|
-
await migration.
|
|
225
|
+
const status = await daemon.trx(migration.module)
|
|
226
|
+
.run(async (trx) => {
|
|
227
|
+
trx_1.Trx.set(trx, 'sql', sql);
|
|
228
|
+
await migration.method.up({
|
|
229
|
+
sql,
|
|
230
|
+
trx
|
|
231
|
+
});
|
|
232
|
+
});
|
|
233
|
+
if (status.state !== 'ok') {
|
|
234
|
+
throw new Error('Migration failed. Rolling back all batch changes.');
|
|
235
|
+
}
|
|
186
236
|
const row = {
|
|
237
|
+
module: migration.module,
|
|
187
238
|
name: migration.name,
|
|
188
239
|
batch,
|
|
189
240
|
timestamp: datetime_1.NesoiDatetime.now(),
|
|
@@ -198,5 +249,23 @@ class MigrationRunner {
|
|
|
198
249
|
${sql(row)}
|
|
199
250
|
`;
|
|
200
251
|
}
|
|
252
|
+
static async migrateDown(daemon, sql, migration) {
|
|
253
|
+
log_1.Log.info('migrator', 'up', `Running migration ${(0, string_1.colored)('▼ DOWN', 'yellow')} ${(0, string_1.colored)(migration.name, 'lightblue')}`);
|
|
254
|
+
const status = await daemon.trx(migration.module)
|
|
255
|
+
.run(async (trx) => {
|
|
256
|
+
trx_1.Trx.set(trx, 'sql', sql);
|
|
257
|
+
await migration.method.down({
|
|
258
|
+
sql,
|
|
259
|
+
trx
|
|
260
|
+
});
|
|
261
|
+
});
|
|
262
|
+
if (status.state !== 'ok') {
|
|
263
|
+
throw new Error('Migration failed. Rolling back all batch changes.');
|
|
264
|
+
}
|
|
265
|
+
await sql `
|
|
266
|
+
DELETE FROM ${sql(migrator_1.Migrator.MIGRATION_TABLE_NAME)}
|
|
267
|
+
WHERE id = ${migration.id}
|
|
268
|
+
`;
|
|
269
|
+
}
|
|
201
270
|
}
|
|
202
271
|
exports.MigrationRunner = MigrationRunner;
|
|
@@ -1,26 +1,8 @@
|
|
|
1
1
|
import { $Bucket } from "../../../elements";
|
|
2
|
-
import { BucketAdapter
|
|
2
|
+
import { BucketAdapter } from "../../../elements/entities/bucket/adapters/bucket_adapter";
|
|
3
3
|
import { AnyTrxNode } from "../../../engine/transaction/trx_node";
|
|
4
|
-
import postgres from 'postgres';
|
|
5
|
-
import { PostgresNQLRunner } from './postgres.nql';
|
|
6
|
-
import { AnyTrx } from "../../../engine/transaction/trx";
|
|
7
|
-
import { TrxEngineWrapFn } from "../../../engine/transaction/trx_engine.config";
|
|
8
4
|
import { NQL_QueryMeta } from "../../../elements/entities/bucket/query/nql.schema";
|
|
9
|
-
|
|
10
|
-
connection?: postgres.Options<any>;
|
|
11
|
-
};
|
|
12
|
-
export declare class PostgresProvider {
|
|
13
|
-
config?: PostgresConfig | undefined;
|
|
14
|
-
static make<Name extends string>(name: Name, config?: PostgresConfig): {
|
|
15
|
-
name: Name;
|
|
16
|
-
up: () => PostgresProvider;
|
|
17
|
-
down: () => void;
|
|
18
|
-
};
|
|
19
|
-
sql: postgres.Sql<any>;
|
|
20
|
-
nql: PostgresNQLRunner;
|
|
21
|
-
private constructor();
|
|
22
|
-
static wrap(provider: string): (trx: AnyTrx, fn: TrxEngineWrapFn<any, any>, providers: Record<string, any>) => Promise<import("../../../engine/transaction/trx_node").TrxNodeStatus>;
|
|
23
|
-
}
|
|
5
|
+
import { PostgresProvider } from './postgres.provider';
|
|
24
6
|
export declare class PostgresBucketAdapter<$ extends $Bucket, Obj extends $['#data']> extends BucketAdapter<$['#data']> {
|
|
25
7
|
schema: $;
|
|
26
8
|
provider: PostgresProvider;
|
|
@@ -34,12 +16,27 @@ export declare class PostgresBucketAdapter<$ extends $Bucket, Obj extends $['#da
|
|
|
34
16
|
protected deleteEverything(trx: AnyTrxNode): Promise<void>;
|
|
35
17
|
index(trx: AnyTrxNode): Promise<Obj[]>;
|
|
36
18
|
get(trx: AnyTrxNode, id: Obj['id']): Promise<Obj>;
|
|
19
|
+
private precleanup;
|
|
20
|
+
create(trx: AnyTrxNode, obj: Record<string, any>): Promise<Obj>;
|
|
21
|
+
createMany(trx: AnyTrxNode, objs: Record<string, any>[]): Promise<Obj[]>;
|
|
22
|
+
patch(trx: AnyTrxNode, obj: Record<string, any>): Promise<Obj>;
|
|
23
|
+
patchMany(trx: AnyTrxNode, objs: Record<string, any>[]): Promise<$["#data"][]>;
|
|
24
|
+
replace(trx: AnyTrxNode, obj: Record<string, any>): Promise<Obj>;
|
|
25
|
+
replaceMany(trx: AnyTrxNode, objs: Record<string, any>[]): Promise<$["#data"][]>;
|
|
37
26
|
put(trx: AnyTrxNode, obj: Record<string, any>): Promise<Obj>;
|
|
38
|
-
putMany(trx: AnyTrxNode, objs: Record<string, any>[]): Promise
|
|
27
|
+
putMany(trx: AnyTrxNode, objs: Record<string, any>[]): Promise<$["#data"][]>;
|
|
39
28
|
delete(trx: AnyTrxNode, id: Obj['id']): Promise<void>;
|
|
40
29
|
deleteMany(trx: AnyTrxNode, ids: Obj['id'][]): Promise<void>;
|
|
41
30
|
syncOne(trx: AnyTrxNode, id: Obj['id'], lastObjUpdateEpoch: number): Promise<any>;
|
|
42
31
|
syncOneAndPast(trx: AnyTrxNode, id: Obj['id'], lastUpdateEpoch: number): Promise<any>;
|
|
43
32
|
syncAll(trx: AnyTrxNode, lastHash?: string, lastUpdateEpoch?: number): Promise<any>;
|
|
44
|
-
static
|
|
33
|
+
static getTableMeta(trx: AnyTrxNode, meta: NQL_QueryMeta): {
|
|
34
|
+
tableName: string;
|
|
35
|
+
meta: {
|
|
36
|
+
created_at: string;
|
|
37
|
+
created_by: string;
|
|
38
|
+
updated_at: string;
|
|
39
|
+
updated_by: string;
|
|
40
|
+
};
|
|
41
|
+
};
|
|
45
42
|
}
|
|
@@ -1,69 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.PostgresBucketAdapter =
|
|
3
|
+
exports.PostgresBucketAdapter = void 0;
|
|
4
4
|
const bucket_adapter_1 = require("../../../elements/entities/bucket/adapters/bucket_adapter");
|
|
5
5
|
const log_1 = require("../../../engine/util/log");
|
|
6
6
|
const trx_node_1 = require("../../../engine/transaction/trx_node");
|
|
7
|
-
const date_1 = require("../../../engine/data/date");
|
|
8
|
-
const datetime_1 = require("../../../engine/data/datetime");
|
|
9
|
-
const decimal_1 = require("../../../engine/data/decimal");
|
|
10
|
-
const postgres_nql_1 = require("./postgres.nql");
|
|
11
7
|
const trx_1 = require("../../../engine/transaction/trx");
|
|
12
|
-
const database_1 = require("./migrator/database");
|
|
13
|
-
class PostgresProvider {
|
|
14
|
-
static make(name, config) {
|
|
15
|
-
return {
|
|
16
|
-
name: name,
|
|
17
|
-
up: () => new PostgresProvider(config),
|
|
18
|
-
down: () => { }
|
|
19
|
-
};
|
|
20
|
-
}
|
|
21
|
-
constructor(config) {
|
|
22
|
-
this.config = config;
|
|
23
|
-
log_1.Log.info('postgres', 'provider', 'Connecting to Postgres database');
|
|
24
|
-
this.sql = database_1.Database.connect({
|
|
25
|
-
...(config?.connection || {}),
|
|
26
|
-
debug: true,
|
|
27
|
-
types: {
|
|
28
|
-
char: {
|
|
29
|
-
to: 1042,
|
|
30
|
-
from: [1042],
|
|
31
|
-
serialize: (val) => val?.trim?.(),
|
|
32
|
-
parse: (val) => val?.trim?.()
|
|
33
|
-
},
|
|
34
|
-
date: {
|
|
35
|
-
to: 1082,
|
|
36
|
-
from: [1082],
|
|
37
|
-
serialize: (val) => val?.toISO(),
|
|
38
|
-
parse: (val) => val ? date_1.NesoiDate.fromISO(val) : undefined
|
|
39
|
-
},
|
|
40
|
-
datetime: {
|
|
41
|
-
to: 1114,
|
|
42
|
-
from: [1114],
|
|
43
|
-
serialize: (val) => val?.toISO(),
|
|
44
|
-
parse: (val) => datetime_1.NesoiDatetime.fromISO(val?.replace(' ', 'T') + 'Z')
|
|
45
|
-
},
|
|
46
|
-
decimal: {
|
|
47
|
-
to: 1700,
|
|
48
|
-
from: [1700],
|
|
49
|
-
serialize: (val) => val?.toString(),
|
|
50
|
-
parse: (val) => val ? new decimal_1.Decimal(val) : undefined
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
});
|
|
54
|
-
this.nql = new postgres_nql_1.PostgresNQLRunner();
|
|
55
|
-
}
|
|
56
|
-
static wrap(provider) {
|
|
57
|
-
return (trx, fn, providers) => {
|
|
58
|
-
const postgres = providers[provider].sql;
|
|
59
|
-
return postgres.begin(sql => {
|
|
60
|
-
trx_1.Trx.set(trx.root, 'sql', sql);
|
|
61
|
-
return fn(trx.root);
|
|
62
|
-
});
|
|
63
|
-
};
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
exports.PostgresProvider = PostgresProvider;
|
|
67
8
|
class PostgresBucketAdapter extends bucket_adapter_1.BucketAdapter {
|
|
68
9
|
constructor(schema, provider, tableName) {
|
|
69
10
|
super(schema, provider.nql, provider.config);
|
|
@@ -96,6 +37,7 @@ class PostgresBucketAdapter extends bucket_adapter_1.BucketAdapter {
|
|
|
96
37
|
const objs = await this.guard(sql) `
|
|
97
38
|
SELECT *
|
|
98
39
|
FROM ${sql(this.tableName)}
|
|
40
|
+
ORDER BY ${this.config.meta.updated_at} DESC
|
|
99
41
|
`;
|
|
100
42
|
return objs;
|
|
101
43
|
}
|
|
@@ -109,54 +51,123 @@ class PostgresBucketAdapter extends bucket_adapter_1.BucketAdapter {
|
|
|
109
51
|
return objs[0];
|
|
110
52
|
}
|
|
111
53
|
/* Write Operations */
|
|
112
|
-
|
|
54
|
+
precleanup(obj) {
|
|
55
|
+
var _a, _b;
|
|
56
|
+
obj[_a = this.config.meta.created_by] ?? (obj[_a] = null);
|
|
57
|
+
obj[_b = this.config.meta.updated_by] ?? (obj[_b] = null);
|
|
58
|
+
for (const key in obj) {
|
|
59
|
+
if (obj[key] === undefined) {
|
|
60
|
+
delete obj[key];
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
async create(trx, obj) {
|
|
113
65
|
const sql = trx_1.Trx.get(trx, 'sql');
|
|
66
|
+
// Use schema fields excluding id
|
|
114
67
|
const keys = Object.keys(this.schema.model.fields)
|
|
115
68
|
.filter(key => obj[key] !== undefined);
|
|
69
|
+
// Add meta (created_*/updated_*)
|
|
70
|
+
keys.push(...Object.values(this.config.meta));
|
|
71
|
+
this.precleanup(obj);
|
|
116
72
|
// Create
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
73
|
+
const objs = await this.guard(sql) `
|
|
74
|
+
INSERT INTO ${sql(this.tableName)}
|
|
75
|
+
${sql(obj, keys)}
|
|
76
|
+
RETURNING *`;
|
|
77
|
+
return objs[0];
|
|
78
|
+
}
|
|
79
|
+
async createMany(trx, objs) {
|
|
80
|
+
const sql = trx_1.Trx.get(trx, 'sql');
|
|
81
|
+
// Use schema fields excluding id
|
|
82
|
+
const keys = Object.keys(this.schema.model.fields)
|
|
83
|
+
.filter(key => key !== 'id');
|
|
84
|
+
// Add meta (created_*/updated_*)
|
|
85
|
+
keys.push(...Object.values(this.config.meta));
|
|
86
|
+
// Pre-cleanup
|
|
87
|
+
for (const obj of objs) {
|
|
88
|
+
this.precleanup(obj);
|
|
133
89
|
}
|
|
90
|
+
// Create
|
|
91
|
+
const inserted = await this.guard(sql) `
|
|
92
|
+
INSERT INTO ${sql(this.tableName)}
|
|
93
|
+
${sql(objs, keys)}
|
|
94
|
+
RETURNING *
|
|
95
|
+
`;
|
|
96
|
+
return inserted;
|
|
134
97
|
}
|
|
135
|
-
async
|
|
98
|
+
async patch(trx, obj) {
|
|
136
99
|
const sql = trx_1.Trx.get(trx, 'sql');
|
|
137
|
-
|
|
100
|
+
// Use schema keys that exist on object
|
|
101
|
+
const keys = Object.keys(this.schema.model.fields)
|
|
102
|
+
.filter(key => obj[key] !== undefined)
|
|
103
|
+
.filter(key => key in obj);
|
|
104
|
+
// Add meta
|
|
105
|
+
keys.push(this.config.meta.updated_by, this.config.meta.updated_at);
|
|
106
|
+
// Pre-cleanup
|
|
107
|
+
this.precleanup(obj);
|
|
108
|
+
// Update
|
|
109
|
+
const objs = await this.guard(sql) `
|
|
110
|
+
UPDATE ${sql(this.tableName)} SET
|
|
111
|
+
${sql(obj, keys)}
|
|
112
|
+
WHERE id = ${obj.id}
|
|
113
|
+
RETURNING *
|
|
114
|
+
`;
|
|
115
|
+
return objs[0];
|
|
116
|
+
}
|
|
117
|
+
async patchMany(trx, objs) {
|
|
118
|
+
const _objs = [];
|
|
138
119
|
for (const obj of objs) {
|
|
139
|
-
|
|
140
|
-
if (obj[key] === undefined) {
|
|
141
|
-
delete obj[key];
|
|
142
|
-
}
|
|
143
|
-
}
|
|
120
|
+
_objs.push(await this.patch(trx, obj));
|
|
144
121
|
}
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
122
|
+
return _objs;
|
|
123
|
+
}
|
|
124
|
+
async replace(trx, obj) {
|
|
125
|
+
const sql = trx_1.Trx.get(trx, 'sql');
|
|
126
|
+
// Use all schema keys
|
|
127
|
+
const keys = Object.keys(this.schema.model.fields)
|
|
128
|
+
.filter(key => obj[key] !== undefined);
|
|
129
|
+
keys.push(this.config.meta.updated_by, this.config.meta.updated_at);
|
|
130
|
+
this.precleanup(obj);
|
|
131
|
+
const objs = await this.guard(sql) `
|
|
132
|
+
UPDATE ${sql(this.tableName)} SET
|
|
133
|
+
${sql(obj, keys)}
|
|
134
|
+
WHERE id = ${obj.id}
|
|
135
|
+
RETURNING *
|
|
136
|
+
`;
|
|
137
|
+
return objs[0];
|
|
138
|
+
}
|
|
139
|
+
async replaceMany(trx, objs) {
|
|
140
|
+
const _objs = [];
|
|
141
|
+
for (const obj of objs) {
|
|
142
|
+
_objs.push(await this.replace(trx, obj));
|
|
154
143
|
}
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
144
|
+
return _objs;
|
|
145
|
+
}
|
|
146
|
+
async put(trx, obj) {
|
|
147
|
+
const sql = trx_1.Trx.get(trx, 'sql');
|
|
148
|
+
// Use all schema keys
|
|
149
|
+
const keys = Object.keys(this.schema.model.fields)
|
|
150
|
+
.filter(key => obj[key] !== undefined);
|
|
151
|
+
// Add meta (created_*/updated_*)
|
|
152
|
+
const ikeys = keys.concat(...Object.values(this.config.meta));
|
|
153
|
+
const ukeys = keys.concat(this.config.meta.updated_by, this.config.meta.updated_at);
|
|
154
|
+
this.precleanup(obj);
|
|
155
|
+
const objs = await this.guard(sql) `
|
|
156
|
+
INSERT INTO ${sql(this.tableName)}
|
|
157
|
+
${sql(obj, ikeys)}
|
|
158
|
+
ON CONFLICT(id)
|
|
159
|
+
DO UPDATE SET
|
|
160
|
+
${sql(obj, ukeys)}
|
|
161
|
+
RETURNING *
|
|
162
|
+
`;
|
|
163
|
+
return objs[0];
|
|
164
|
+
}
|
|
165
|
+
async putMany(trx, objs) {
|
|
166
|
+
const _objs = [];
|
|
167
|
+
for (const obj of objs) {
|
|
168
|
+
_objs.push(await this.put(trx, obj));
|
|
158
169
|
}
|
|
159
|
-
return
|
|
170
|
+
return _objs;
|
|
160
171
|
}
|
|
161
172
|
async delete(trx, id) {
|
|
162
173
|
const sql = trx_1.Trx.get(trx, 'sql');
|
|
@@ -268,11 +279,16 @@ class PostgresBucketAdapter extends bucket_adapter_1.BucketAdapter {
|
|
|
268
279
|
// reset: false
|
|
269
280
|
// };
|
|
270
281
|
}
|
|
271
|
-
static
|
|
282
|
+
static getTableMeta(trx, meta) {
|
|
283
|
+
const trxModule = trx_node_1.TrxNode.getModule(trx);
|
|
272
284
|
const bucketName = meta.bucket.name;
|
|
273
|
-
const
|
|
285
|
+
const refName = (trxModule.name === meta.bucket.module ? '' : `${meta.bucket.module}::`) + bucketName;
|
|
286
|
+
const bucket = trxModule.buckets[refName];
|
|
274
287
|
const adapter = bucket.adapter;
|
|
275
|
-
return
|
|
288
|
+
return {
|
|
289
|
+
tableName: adapter.tableName,
|
|
290
|
+
meta: adapter.config.meta
|
|
291
|
+
};
|
|
276
292
|
}
|
|
277
293
|
}
|
|
278
294
|
exports.PostgresBucketAdapter = PostgresBucketAdapter;
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { CLIAdapter, CLICommand } from "../../../engine/cli/cli_adapter";
|
|
2
|
-
import { PostgresProvider } from './postgres.
|
|
2
|
+
import { PostgresProvider } from './postgres.provider';
|
|
3
3
|
import { AnyDaemon } from "../../../engine/daemon";
|
|
4
4
|
export declare class cmd_check extends CLICommand {
|
|
5
5
|
provider: PostgresProvider;
|
|
6
6
|
constructor(provider: PostgresProvider);
|
|
7
|
-
run(): Promise<void>;
|
|
7
|
+
run(daemon: AnyDaemon): Promise<void>;
|
|
8
8
|
}
|
|
9
9
|
export declare class cmd_tables extends CLICommand {
|
|
10
10
|
provider: PostgresProvider;
|
|
@@ -14,7 +14,7 @@ export declare class cmd_tables extends CLICommand {
|
|
|
14
14
|
export declare class cmd_create_db extends CLICommand {
|
|
15
15
|
provider: PostgresProvider;
|
|
16
16
|
constructor(provider: PostgresProvider);
|
|
17
|
-
run(
|
|
17
|
+
run(daemon: AnyDaemon, $: {
|
|
18
18
|
name: string;
|
|
19
19
|
}): Promise<void>;
|
|
20
20
|
}
|
|
@@ -40,6 +40,26 @@ export declare class cmd_migrate_one_up extends CLICommand {
|
|
|
40
40
|
constructor(provider: PostgresProvider);
|
|
41
41
|
run(daemon: AnyDaemon): Promise<void>;
|
|
42
42
|
}
|
|
43
|
+
export declare class cmd_migrate_down extends CLICommand {
|
|
44
|
+
provider: PostgresProvider;
|
|
45
|
+
constructor(provider: PostgresProvider);
|
|
46
|
+
run(daemon: AnyDaemon): Promise<void>;
|
|
47
|
+
}
|
|
48
|
+
export declare class cmd_migrate_one_down extends CLICommand {
|
|
49
|
+
provider: PostgresProvider;
|
|
50
|
+
constructor(provider: PostgresProvider);
|
|
51
|
+
run(daemon: AnyDaemon): Promise<void>;
|
|
52
|
+
}
|
|
53
|
+
export declare class cmd_query extends CLICommand {
|
|
54
|
+
provider: PostgresProvider;
|
|
55
|
+
constructor(provider: PostgresProvider);
|
|
56
|
+
run(): Promise<void>;
|
|
57
|
+
}
|
|
58
|
+
export declare class cmd_import_csv extends CLICommand {
|
|
59
|
+
provider: PostgresProvider;
|
|
60
|
+
constructor(provider: PostgresProvider);
|
|
61
|
+
run(daemon: AnyDaemon, input: Record<string, any>): Promise<void>;
|
|
62
|
+
}
|
|
43
63
|
export declare class PostgresCLI extends CLIAdapter {
|
|
44
64
|
provider: PostgresProvider;
|
|
45
65
|
constructor(provider: PostgresProvider);
|