project-devkit 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +173 -0
- package/core/loader.mjs +151 -0
- package/core/utils.mjs +259 -0
- package/package.json +27 -0
- package/plugins/backups/README.md +88 -0
- package/plugins/backups/backups.mjs +342 -0
- package/plugins/backups/config.yml +31 -0
- package/plugins/backups/index.mjs +53 -0
- package/plugins/directus/config.yml +18 -0
- package/plugins/directus/directus.mjs +179 -0
- package/plugins/directus/index.mjs +54 -0
- package/plugins/docker/config.yml +5 -0
- package/plugins/docker/index.mjs +42 -0
- package/plugins/env/config.yml +40 -0
- package/plugins/env/env.mjs +249 -0
- package/plugins/env/index.mjs +93 -0
- package/plugins/env/infisical.mjs +152 -0
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
import { resolve, basename } from 'path';
|
|
2
|
+
import { existsSync, mkdirSync, readdirSync, statSync } from 'fs';
|
|
3
|
+
import { execSync } from 'child_process';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
|
|
6
|
+
const LOCAL_DUMP_PATTERN = /^dump-(test|stage|local)-(inner|outer)-(\d{4}-\d{2}-\d{2})_(\d{2}_\d{2}_\d{2})\.sql\.gz$/;
|
|
7
|
+
|
|
8
|
+
// --- YaDisk helpers (internal module) ---
|
|
9
|
+
|
|
10
|
+
function makeDumpPattern(env) {
|
|
11
|
+
return new RegExp(`^dump-${env}-(inner|outer)-(\\d{4}-\\d{2}-\\d{2})_(\\d{2}_\\d{2}_\\d{2})\\.sql\\.gz$`);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function getSSHHost(env, config) {
|
|
15
|
+
const server = config.servers?.[env];
|
|
16
|
+
if (!server) throw new Error(`Unknown environment '${env}' (available: ${Object.keys(config.servers || {}).join(', ')})`);
|
|
17
|
+
return server.ssh;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function hasYaDiskToken(config) {
|
|
21
|
+
const envVar = config.yadisk?.token_env || 'YADISK_TOKEN';
|
|
22
|
+
return !!process.env[envVar];
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function getYaDiskToken(config) {
|
|
26
|
+
const envVar = config.yadisk?.token_env || 'YADISK_TOKEN';
|
|
27
|
+
return process.env[envVar];
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function getDumpsViaSSH(mode, env, config, utils) {
|
|
31
|
+
const host = getSSHHost(env, config);
|
|
32
|
+
const infoScript = config.dump?.info_script || '/opt/get_dump_info.sh';
|
|
33
|
+
const output = utils.sshExec(host, `${infoScript} ${mode}`);
|
|
34
|
+
return JSON.parse(output);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
async function getDumpsViaYaDisk(env, config, utils) {
|
|
38
|
+
const token = getYaDiskToken(config);
|
|
39
|
+
if (!token) throw new Error('YADISK_TOKEN is not set');
|
|
40
|
+
const pattern = makeDumpPattern(env);
|
|
41
|
+
const folder = config.yadisk?.folders?.[env];
|
|
42
|
+
if (!folder) throw new Error(`YaDisk folder for '${env}' not configured`);
|
|
43
|
+
const items = await utils.listYaDiskDumps(token, folder);
|
|
44
|
+
return items
|
|
45
|
+
.filter(item => item.type === 'file' && pattern.test(item.name))
|
|
46
|
+
.map(item => {
|
|
47
|
+
const m = item.name.match(pattern);
|
|
48
|
+
return {
|
|
49
|
+
name: item.name, path: item.path,
|
|
50
|
+
type: m[1], date: m[2], time: m[3],
|
|
51
|
+
size: item.size || 0, source: 'yadisk',
|
|
52
|
+
};
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async function getDumps(mode, env, config, utils) {
|
|
57
|
+
try {
|
|
58
|
+
return getDumpsViaSSH(mode, env, config, utils);
|
|
59
|
+
} catch {
|
|
60
|
+
if (hasYaDiskToken(config) && (mode === 'all' || mode === 'remote')) {
|
|
61
|
+
console.log(chalk.yellow(`SSH ${env} unavailable, using YaDisk API`));
|
|
62
|
+
return getDumpsViaYaDisk(env, config, utils);
|
|
63
|
+
}
|
|
64
|
+
throw new Error(`SSH ${env} unavailable and YADISK_TOKEN not set`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async function getDownloadLink(dump, env, config, utils) {
|
|
69
|
+
try {
|
|
70
|
+
const host = getSSHHost(env, config);
|
|
71
|
+
const infoScript = config.dump?.info_script || '/opt/get_dump_info.sh';
|
|
72
|
+
const url = utils.sshExec(host, `${infoScript} download-link '${dump.path}'`);
|
|
73
|
+
if (url.startsWith('http')) return url;
|
|
74
|
+
} catch {}
|
|
75
|
+
if (hasYaDiskToken(config)) {
|
|
76
|
+
return utils.getYaDiskDownloadUrl(getYaDiskToken(config), dump.path);
|
|
77
|
+
}
|
|
78
|
+
return null;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function getLocalDumps() {
|
|
82
|
+
const dir = resolve('backups');
|
|
83
|
+
if (!existsSync(dir)) return [];
|
|
84
|
+
return readdirSync(dir)
|
|
85
|
+
.filter(f => LOCAL_DUMP_PATTERN.test(f))
|
|
86
|
+
.map(f => {
|
|
87
|
+
const m = f.match(LOCAL_DUMP_PATTERN);
|
|
88
|
+
const fullPath = resolve(dir, f);
|
|
89
|
+
let size = 0;
|
|
90
|
+
try { size = statSync(fullPath).size; } catch {}
|
|
91
|
+
return {
|
|
92
|
+
name: f, path: fullPath,
|
|
93
|
+
origin: m[1], type: m[2], date: m[3], time: m[4],
|
|
94
|
+
size, source: 'local',
|
|
95
|
+
};
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function groupDumps(dumps) {
|
|
100
|
+
const groups = {};
|
|
101
|
+
for (const d of dumps) {
|
|
102
|
+
const key = `${d.date}_${d.time}`;
|
|
103
|
+
(groups[key] ||= []).push(d);
|
|
104
|
+
}
|
|
105
|
+
return groups;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
function findDumpFile(options) {
|
|
109
|
+
if (options.src) {
|
|
110
|
+
const src = resolve(options.src);
|
|
111
|
+
if (!existsSync(src)) throw new Error(`File not found: ${src}`);
|
|
112
|
+
const name = basename(src);
|
|
113
|
+
const m = name.match(LOCAL_DUMP_PATTERN);
|
|
114
|
+
const type = options.type || (m ? m[2] : null);
|
|
115
|
+
if (!type) throw new Error(`Cannot determine type (inner/outer) from filename. Use --type`);
|
|
116
|
+
return [{ path: src, name, type }];
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
const dir = resolve('backups');
|
|
120
|
+
if (!existsSync(dir)) throw new Error('backups/ folder not found');
|
|
121
|
+
|
|
122
|
+
let files = readdirSync(dir)
|
|
123
|
+
.filter(f => LOCAL_DUMP_PATTERN.test(f))
|
|
124
|
+
.map(f => {
|
|
125
|
+
const m = f.match(LOCAL_DUMP_PATTERN);
|
|
126
|
+
return { name: f, path: resolve(dir, f), type: m[2], date: m[3], time: m[4] };
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
if (options.type) files = files.filter(f => f.type === options.type);
|
|
130
|
+
if (options.date) files = files.filter(f => f.date === options.date);
|
|
131
|
+
if (files.length === 0) throw new Error('No matching dumps found');
|
|
132
|
+
|
|
133
|
+
files.sort((a, b) => `${b.date}_${b.time}`.localeCompare(`${a.date}_${a.time}`));
|
|
134
|
+
|
|
135
|
+
const latestKey = `${files[0].date}_${files[0].time}`;
|
|
136
|
+
return files.filter(f => `${f.date}_${f.time}` === latestKey);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// --- Commands ---
|
|
140
|
+
|
|
141
|
+
const ORIGIN_COLORS = { test: chalk.blue, stage: chalk.magenta, local: chalk.green };
|
|
142
|
+
|
|
143
|
+
export function ls({ utils }) {
|
|
144
|
+
const dumps = getLocalDumps();
|
|
145
|
+
if (dumps.length === 0) { console.log('No local dumps found (backups/ folder).'); return; }
|
|
146
|
+
|
|
147
|
+
const byOrigin = {};
|
|
148
|
+
for (const d of dumps) {
|
|
149
|
+
(byOrigin[d.origin] ||= []).push(d);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const origins = Object.keys(byOrigin).sort();
|
|
153
|
+
let totalFiles = 0;
|
|
154
|
+
|
|
155
|
+
for (const origin of origins) {
|
|
156
|
+
const colorFn = ORIGIN_COLORS[origin] || chalk.white;
|
|
157
|
+
const items = byOrigin[origin];
|
|
158
|
+
const groups = groupDumps(items);
|
|
159
|
+
const sortedKeys = Object.keys(groups).sort().reverse();
|
|
160
|
+
totalFiles += items.length;
|
|
161
|
+
|
|
162
|
+
console.log(colorFn(`\n ${origin} (${items.length} files):`));
|
|
163
|
+
|
|
164
|
+
for (const key of sortedKeys) {
|
|
165
|
+
const group = groups[key];
|
|
166
|
+
const date = group[0].date;
|
|
167
|
+
const time = group[0].time.replace(/_/g, ':');
|
|
168
|
+
const types = group.map(d => chalk.cyan(d.type)).join(', ');
|
|
169
|
+
const sizes = group.map(d => utils.formatSize(d.size)).join(' + ');
|
|
170
|
+
const total = utils.formatSize(group.reduce((s, d) => s + d.size, 0));
|
|
171
|
+
|
|
172
|
+
console.log(` ${chalk.green(date)} ${time} [${types}] ${sizes} (${total})`);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
console.log(`\nTotal: ${totalFiles} files in backups/`);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
export async function list({ config, utils, env = 'test', source = 'all' }) {
|
|
180
|
+
let dumps;
|
|
181
|
+
|
|
182
|
+
console.log(chalk.bold(`Environment: ${env}\n`));
|
|
183
|
+
|
|
184
|
+
if (source === 'yadisk') {
|
|
185
|
+
dumps = await getDumpsViaYaDisk(env, config, utils);
|
|
186
|
+
} else if (source === 'server') {
|
|
187
|
+
dumps = getDumpsViaSSH('local', env, config, utils);
|
|
188
|
+
} else {
|
|
189
|
+
const mode = { all: 'all', server: 'local', yadisk: 'remote' }[source];
|
|
190
|
+
dumps = await getDumps(mode, env, config, utils);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
if (dumps.length === 0) { console.log('No dumps found.'); return; }
|
|
194
|
+
|
|
195
|
+
const groups = groupDumps(dumps);
|
|
196
|
+
const sortedKeys = Object.keys(groups).sort().reverse();
|
|
197
|
+
|
|
198
|
+
console.log(`Found ${dumps.length} files (${sortedKeys.length} backups):\n`);
|
|
199
|
+
|
|
200
|
+
for (const key of sortedKeys) {
|
|
201
|
+
const items = groups[key];
|
|
202
|
+
const date = items[0].date;
|
|
203
|
+
const time = items[0].time.replace(/_/g, ':');
|
|
204
|
+
const types = items.map(d => chalk.cyan(d.type)).join(', ');
|
|
205
|
+
const sizes = items.map(d => utils.formatSize(d.size)).join(' + ');
|
|
206
|
+
const total = utils.formatSize(items.reduce((s, d) => s + d.size, 0));
|
|
207
|
+
const sources = [...new Set(items.map(d => d.source))].map(s =>
|
|
208
|
+
s === 'yadisk' ? chalk.yellow(s) : chalk.green(s)
|
|
209
|
+
).join('/');
|
|
210
|
+
|
|
211
|
+
console.log(` ${chalk.green(date)} ${time} [${types}] ${sizes} (total ${total}) \u2190 ${sources}`);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
export async function pull({ config, utils, env = 'test', type, date, source = 'auto', dest = 'backups' }) {
|
|
216
|
+
let dumps = await getDumps('all', env, config, utils);
|
|
217
|
+
if (dumps.length === 0) throw new Error('No dumps found');
|
|
218
|
+
|
|
219
|
+
console.log(chalk.bold(`Environment: ${env}\n`));
|
|
220
|
+
|
|
221
|
+
if (date) {
|
|
222
|
+
dumps = dumps.filter(d => d.date === date);
|
|
223
|
+
if (dumps.length === 0) throw new Error(`No dumps found for ${date}`);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
if (type) {
|
|
227
|
+
dumps = dumps.filter(d => d.type === type);
|
|
228
|
+
if (dumps.length === 0) throw new Error(`No dumps found for type '${type}'`);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
if (source === 'server') {
|
|
232
|
+
dumps = dumps.filter(d => d.source === 'server');
|
|
233
|
+
if (dumps.length === 0) throw new Error('None on server. Try --source yadisk');
|
|
234
|
+
} else if (source === 'yadisk') {
|
|
235
|
+
dumps = dumps.filter(d => d.source === 'yadisk');
|
|
236
|
+
if (dumps.length === 0) throw new Error('None on YaDisk');
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
const groups = groupDumps(dumps);
|
|
240
|
+
const latestKey = Object.keys(groups).sort().reverse()[0];
|
|
241
|
+
let toDownload = groups[latestKey];
|
|
242
|
+
|
|
243
|
+
if (source === 'auto') {
|
|
244
|
+
const serverDumps = toDownload.filter(d => d.source === 'server');
|
|
245
|
+
const yadiskDumps = toDownload.filter(d => d.source === 'yadisk');
|
|
246
|
+
const seenTypes = new Set();
|
|
247
|
+
const chosen = [];
|
|
248
|
+
for (const d of serverDumps) { chosen.push(d); seenTypes.add(d.type); }
|
|
249
|
+
for (const d of yadiskDumps) { if (!seenTypes.has(d.type)) chosen.push(d); }
|
|
250
|
+
toDownload = chosen;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
const destDir = resolve(dest);
|
|
254
|
+
if (!existsSync(destDir)) mkdirSync(destDir, { recursive: true });
|
|
255
|
+
|
|
256
|
+
const host = getSSHHost(env, config);
|
|
257
|
+
const dumpDate = toDownload[0].date;
|
|
258
|
+
const dumpTime = toDownload[0].time.replace(/_/g, ':');
|
|
259
|
+
console.log(`Downloading dumps for ${dumpDate} ${dumpTime}:\n`);
|
|
260
|
+
|
|
261
|
+
for (const dump of toDownload) {
|
|
262
|
+
const destFile = resolve(destDir, dump.name);
|
|
263
|
+
|
|
264
|
+
if (dump.source === 'server') {
|
|
265
|
+
console.log(` ${chalk.green('scp')} ${dump.name} (${utils.formatSize(dump.size)})`);
|
|
266
|
+
utils.scpDownload(host, dump.path, destFile);
|
|
267
|
+
} else {
|
|
268
|
+
console.log(` ${chalk.yellow('yadisk')} ${dump.name} (${utils.formatSize(dump.size)})`);
|
|
269
|
+
const url = await getDownloadLink(dump, env, config, utils);
|
|
270
|
+
if (!url) { console.log(chalk.red(' Failed to get download link')); continue; }
|
|
271
|
+
await utils.downloadFromUrl(url, destFile, dump.size);
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
console.log(chalk.green('\nDone!'));
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
export async function dump({ config, utils, projectRoot, type, dest = 'backups' }) {
|
|
279
|
+
const composeFile = config.compose_file || 'docker-compose.dev.yml';
|
|
280
|
+
const containers = utils.getComposeContainers(projectRoot, composeFile);
|
|
281
|
+
const types = type ? [type] : Object.keys(containers);
|
|
282
|
+
const destDir = resolve(dest);
|
|
283
|
+
if (!existsSync(destDir)) mkdirSync(destDir, { recursive: true });
|
|
284
|
+
|
|
285
|
+
const now = new Date();
|
|
286
|
+
const date = now.toISOString().slice(0, 10);
|
|
287
|
+
const time = now.toTimeString().slice(0, 8).replace(/:/g, '_');
|
|
288
|
+
|
|
289
|
+
for (const t of types) {
|
|
290
|
+
const cfg = containers[t];
|
|
291
|
+
if (!cfg) throw new Error(`No configuration for type '${t}'`);
|
|
292
|
+
|
|
293
|
+
const fileName = `dump-local-${t}-${date}_${time}.sql.gz`;
|
|
294
|
+
const destFile = resolve(destDir, fileName);
|
|
295
|
+
|
|
296
|
+
console.log(`${chalk.cyan(t)}: dumping ${cfg.dbName} from ${cfg.pgContainer}...`);
|
|
297
|
+
|
|
298
|
+
execSync(
|
|
299
|
+
`docker exec ${cfg.pgContainer} pg_dump -U ${cfg.dbUser} ${cfg.dbName} | gzip > "${destFile}"`,
|
|
300
|
+
{ stdio: ['pipe', 'pipe', 'inherit'], timeout: 300000 },
|
|
301
|
+
);
|
|
302
|
+
|
|
303
|
+
console.log(chalk.green(` \u2192 ${fileName}`));
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
console.log(chalk.green('\nDone!'));
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
export async function load({ config, utils, projectRoot, type, date, src }) {
|
|
310
|
+
const dumps = findDumpFile({ type, date, src });
|
|
311
|
+
const composeFile = config.compose_file || 'docker-compose.dev.yml';
|
|
312
|
+
const containers = utils.getComposeContainers(projectRoot, composeFile);
|
|
313
|
+
|
|
314
|
+
for (const dump of dumps) {
|
|
315
|
+
const cfg = containers[dump.type];
|
|
316
|
+
if (!cfg) throw new Error(`No configuration for type '${dump.type}'`);
|
|
317
|
+
|
|
318
|
+
console.log(`\n${chalk.cyan(dump.type)}: loading ${dump.name} \u2192 ${cfg.pgContainer}`);
|
|
319
|
+
|
|
320
|
+
console.log(' Stopping Directus...');
|
|
321
|
+
execSync(`docker stop ${cfg.appContainer}`, { stdio: 'pipe', timeout: 30000 });
|
|
322
|
+
|
|
323
|
+
try {
|
|
324
|
+
console.log(' Recreating DB...');
|
|
325
|
+
utils.dockerExec(cfg.pgContainer, `dropdb -U ${cfg.dbUser} --force --if-exists ${cfg.dbName}`);
|
|
326
|
+
utils.dockerExec(cfg.pgContainer, `createdb -U ${cfg.dbUser} ${cfg.dbName}`);
|
|
327
|
+
|
|
328
|
+
console.log(' Restoring dump...');
|
|
329
|
+
execSync(
|
|
330
|
+
`gunzip -c "${dump.path}" | docker exec -i ${cfg.pgContainer} psql -U ${cfg.dbUser} -d ${cfg.dbName}`,
|
|
331
|
+
{ stdio: ['pipe', 'pipe', 'inherit'], timeout: 600000 },
|
|
332
|
+
);
|
|
333
|
+
|
|
334
|
+
console.log(chalk.green(` ${dump.type}: loaded!`));
|
|
335
|
+
} finally {
|
|
336
|
+
console.log(' Starting Directus...');
|
|
337
|
+
execSync(`docker start ${cfg.appContainer}`, { stdio: 'pipe', timeout: 30000 });
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
console.log(chalk.green('\nDone!'));
|
|
342
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
name: backups
|
|
2
|
+
description: Database backup management — list, download, dump, restore
|
|
3
|
+
|
|
4
|
+
# YaDisk configuration — override in .devkit.d/backups.yml
|
|
5
|
+
yadisk:
|
|
6
|
+
token_env: YADISK_TOKEN
|
|
7
|
+
# folders:
|
|
8
|
+
# test: /path/to/backups/test
|
|
9
|
+
# stage: /path/to/backups/stage
|
|
10
|
+
|
|
11
|
+
# Dump file naming
|
|
12
|
+
dump:
|
|
13
|
+
pattern: "dump-{env}-{type}-{date}_{time}.sql.gz"
|
|
14
|
+
local_pattern: "dump-{origin}-{type}-{date}_{time}.sql.gz"
|
|
15
|
+
info_script: /opt/get_dump_info.sh
|
|
16
|
+
|
|
17
|
+
# Docker compose file for local operations
|
|
18
|
+
compose_file: docker-compose.dev.yml
|
|
19
|
+
|
|
20
|
+
# Server definitions — override in .devkit.d/backups.yml
|
|
21
|
+
# Example:
|
|
22
|
+
# servers:
|
|
23
|
+
# test:
|
|
24
|
+
# ssh: user@host
|
|
25
|
+
# projects_path: /opt/projects
|
|
26
|
+
# confirm: false
|
|
27
|
+
# stage:
|
|
28
|
+
# ssh: user@host
|
|
29
|
+
# projects_path: /opt/projects
|
|
30
|
+
# confirm: true
|
|
31
|
+
servers: {}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import * as backups from './backups.mjs';
|
|
2
|
+
|
|
3
|
+
function handleError(e) {
|
|
4
|
+
console.error(`Error: ${e.message}`);
|
|
5
|
+
process.exit(1);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Register backups plugin commands.
|
|
10
|
+
* @param {import('commander').Command} program
|
|
11
|
+
* @param {object} context
|
|
12
|
+
*/
|
|
13
|
+
export function register(program, { config, utils, projectRoot, devkitRoot }) {
|
|
14
|
+
const ctx = { config, utils, projectRoot };
|
|
15
|
+
const backupsCmd = program.command('backups').description(config.description || 'Database backups');
|
|
16
|
+
|
|
17
|
+
backupsCmd
|
|
18
|
+
.command('ls')
|
|
19
|
+
.description('Show local dumps (downloaded and created)')
|
|
20
|
+
.action(() => { try { backups.ls(ctx); } catch (e) { handleError(e); } });
|
|
21
|
+
|
|
22
|
+
backupsCmd
|
|
23
|
+
.command('list')
|
|
24
|
+
.description('Show available dumps on server')
|
|
25
|
+
.option('--env <env>', 'Environment: test or stage', 'test')
|
|
26
|
+
.option('--source <source>', 'Source: all, server, yadisk', 'all')
|
|
27
|
+
.action(opts => backups.list({ ...ctx, ...opts }).catch(handleError));
|
|
28
|
+
|
|
29
|
+
backupsCmd
|
|
30
|
+
.command('pull')
|
|
31
|
+
.description('Download dumps')
|
|
32
|
+
.option('--env <env>', 'Environment: test, stage or local', 'test')
|
|
33
|
+
.option('--type <type>', 'Type: inner or outer')
|
|
34
|
+
.option('--date <date>', 'Dump date (YYYY-MM-DD)')
|
|
35
|
+
.option('--source <source>', 'Source: auto, server, yadisk', 'auto')
|
|
36
|
+
.option('--dest <dest>', 'Destination folder', 'backups')
|
|
37
|
+
.action(opts => backups.pull({ ...ctx, ...opts }).catch(handleError));
|
|
38
|
+
|
|
39
|
+
backupsCmd
|
|
40
|
+
.command('dump')
|
|
41
|
+
.description('Create a local DB dump')
|
|
42
|
+
.option('--type <type>', 'Type: inner or outer (default: both)')
|
|
43
|
+
.option('--dest <dest>', 'Destination folder', 'backups')
|
|
44
|
+
.action(opts => backups.dump({ ...ctx, ...opts }).catch(handleError));
|
|
45
|
+
|
|
46
|
+
backupsCmd
|
|
47
|
+
.command('load')
|
|
48
|
+
.description('Restore a dump into a local container')
|
|
49
|
+
.option('--type <type>', 'Type: inner or outer')
|
|
50
|
+
.option('--date <date>', 'Dump date (YYYY-MM-DD)')
|
|
51
|
+
.option('--src <path>', 'Path to .sql.gz file')
|
|
52
|
+
.action(opts => backups.load({ ...ctx, ...opts }).catch(handleError));
|
|
53
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
name: directus
|
|
2
|
+
description: Directus CLI wrapper — run commands inside Directus containers
|
|
3
|
+
|
|
4
|
+
# Docker compose file to discover containers
|
|
5
|
+
compose_file: docker-compose.dev.yml
|
|
6
|
+
|
|
7
|
+
# Container aliases — override in .devkit.d/directus.yml
|
|
8
|
+
# Maps short alias to docker-compose service name
|
|
9
|
+
# Example:
|
|
10
|
+
# aliases:
|
|
11
|
+
# inner: directus-inner
|
|
12
|
+
# outer: directus-outer
|
|
13
|
+
aliases: {}
|
|
14
|
+
|
|
15
|
+
# Snapshot settings
|
|
16
|
+
snapshot:
|
|
17
|
+
dir: snapshots
|
|
18
|
+
pattern: "snapshot-{origin}-{type}-{date}.json"
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
import { resolve, basename } from 'path';
|
|
2
|
+
import { existsSync, mkdirSync, readdirSync, statSync } from 'fs';
|
|
3
|
+
import { execSync } from 'child_process';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
|
|
6
|
+
const SNAPSHOT_PATTERN = /^snapshot-(local|test|stage)-(inner|outer)-(\d{8})\.json$/;
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Resolve a container alias (e.g. "inner" → "directus-inner") or return as-is.
|
|
10
|
+
*/
|
|
11
|
+
function resolveAlias(alias, config) {
|
|
12
|
+
const aliases = config.aliases || {};
|
|
13
|
+
return aliases[alias] || alias;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Get all defined aliases as list of { alias, container } pairs.
|
|
18
|
+
*/
|
|
19
|
+
function getAliases(config) {
|
|
20
|
+
const aliases = config.aliases || {};
|
|
21
|
+
return Object.entries(aliases).map(([alias, container]) => ({ alias, container }));
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Run a Directus CLI command inside a container.
|
|
26
|
+
*/
|
|
27
|
+
export function exec({ config, utils, projectRoot, alias, args = [] }) {
|
|
28
|
+
const container = resolveAlias(alias, config);
|
|
29
|
+
const cmd = ['node', 'directus/cli.js', ...args].join(' ');
|
|
30
|
+
|
|
31
|
+
console.log(chalk.cyan(`${alias} (${container}): ${cmd}`));
|
|
32
|
+
try {
|
|
33
|
+
execSync(`docker exec ${container} ${cmd}`, { stdio: 'inherit', timeout: 120000 });
|
|
34
|
+
} catch (e) {
|
|
35
|
+
throw new Error(`Command failed in ${container}: ${e.message}`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// --- Snapshot subcommand ---
|
|
40
|
+
|
|
41
|
+
function formatDate() {
|
|
42
|
+
const now = new Date();
|
|
43
|
+
const y = now.getFullYear();
|
|
44
|
+
const m = String(now.getMonth() + 1).padStart(2, '0');
|
|
45
|
+
const d = String(now.getDate()).padStart(2, '0');
|
|
46
|
+
return `${y}${m}${d}`;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function getLocalSnapshots(snapshotDir) {
|
|
50
|
+
const dir = resolve(snapshotDir);
|
|
51
|
+
if (!existsSync(dir)) return [];
|
|
52
|
+
return readdirSync(dir)
|
|
53
|
+
.filter(f => SNAPSHOT_PATTERN.test(f))
|
|
54
|
+
.map(f => {
|
|
55
|
+
const m = f.match(SNAPSHOT_PATTERN);
|
|
56
|
+
const fullPath = resolve(dir, f);
|
|
57
|
+
let size = 0;
|
|
58
|
+
try { size = statSync(fullPath).size; } catch {}
|
|
59
|
+
return { name: f, path: fullPath, origin: m[1], type: m[2], date: m[3], size };
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function findSnapshotFile(options, snapshotDir) {
|
|
64
|
+
if (options.src) {
|
|
65
|
+
const src = resolve(options.src);
|
|
66
|
+
if (!existsSync(src)) throw new Error(`File not found: ${src}`);
|
|
67
|
+
const name = basename(src);
|
|
68
|
+
const m = name.match(SNAPSHOT_PATTERN);
|
|
69
|
+
const type = options.type || (m ? m[2] : null);
|
|
70
|
+
if (!type) throw new Error(`Cannot determine type (inner/outer) from filename. Use --type`);
|
|
71
|
+
return [{ path: src, name, type }];
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const dir = resolve(snapshotDir);
|
|
75
|
+
if (!existsSync(dir)) throw new Error(`${snapshotDir}/ folder not found`);
|
|
76
|
+
|
|
77
|
+
let files = getLocalSnapshots(snapshotDir);
|
|
78
|
+
if (options.type) files = files.filter(f => f.type === options.type);
|
|
79
|
+
if (options.date) files = files.filter(f => f.date === options.date.replace(/-/g, ''));
|
|
80
|
+
if (files.length === 0) throw new Error('No matching snapshots found');
|
|
81
|
+
|
|
82
|
+
files.sort((a, b) => b.date.localeCompare(a.date));
|
|
83
|
+
|
|
84
|
+
const latestDate = files[0].date;
|
|
85
|
+
return files.filter(f => f.date === latestDate);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
export async function snapshotDump({ config, utils, projectRoot, type, dest }) {
|
|
89
|
+
const snapshotDir = dest || config.snapshot?.dir || 'snapshots';
|
|
90
|
+
const aliases = getAliases(config);
|
|
91
|
+
|
|
92
|
+
if (aliases.length === 0) {
|
|
93
|
+
console.log(chalk.yellow('No container aliases configured. Add them to .devkit.d/directus.yml'));
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const types = type ? [type] : aliases.map(a => a.alias);
|
|
98
|
+
const destDir = resolve(snapshotDir);
|
|
99
|
+
if (!existsSync(destDir)) mkdirSync(destDir, { recursive: true });
|
|
100
|
+
|
|
101
|
+
const date = formatDate();
|
|
102
|
+
|
|
103
|
+
for (const t of types) {
|
|
104
|
+
const container = resolveAlias(t, config);
|
|
105
|
+
const fileName = `snapshot-local-${t}-${date}.json`;
|
|
106
|
+
const destFile = resolve(destDir, fileName);
|
|
107
|
+
const tmpFile = `/tmp/snapshot-${t}.json`;
|
|
108
|
+
|
|
109
|
+
console.log(`${chalk.cyan(t)} (${container}): schema snapshot...`);
|
|
110
|
+
|
|
111
|
+
execSync(
|
|
112
|
+
`docker exec ${container} sh -c 'node directus/cli.js schema snapshot --format json > ${tmpFile}'`,
|
|
113
|
+
{ stdio: 'inherit', timeout: 60000 },
|
|
114
|
+
);
|
|
115
|
+
|
|
116
|
+
execSync(`docker cp ${container}:${tmpFile} "${destFile}"`, { timeout: 30000 });
|
|
117
|
+
|
|
118
|
+
try { execSync(`docker exec ${container} rm ${tmpFile}`, { stdio: 'pipe', timeout: 10000 }); } catch {}
|
|
119
|
+
|
|
120
|
+
console.log(chalk.green(` \u2192 ${fileName}`));
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
console.log(chalk.green('\nDone!'));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
export async function snapshotLoad({ config, utils, projectRoot, type, date, src }) {
|
|
127
|
+
const snapshotDir = config.snapshot?.dir || 'snapshots';
|
|
128
|
+
const snapshots = findSnapshotFile({ type, date, src }, snapshotDir);
|
|
129
|
+
|
|
130
|
+
for (const snap of snapshots) {
|
|
131
|
+
const container = resolveAlias(snap.type, config);
|
|
132
|
+
const tmpPath = `/tmp/${snap.name}`;
|
|
133
|
+
|
|
134
|
+
console.log(`\n${chalk.cyan(snap.type)} (${container}): applying ${snap.name}`);
|
|
135
|
+
|
|
136
|
+
execSync(`docker cp "${snap.path}" ${container}:${tmpPath}`, { timeout: 30000 });
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
execSync(
|
|
140
|
+
`docker exec ${container} node directus/cli.js schema apply --yes ${tmpPath}`,
|
|
141
|
+
{ stdio: 'inherit', timeout: 120000 },
|
|
142
|
+
);
|
|
143
|
+
console.log(chalk.green(` ${snap.type}: applied!`));
|
|
144
|
+
} finally {
|
|
145
|
+
try { execSync(`docker exec ${container} rm ${tmpPath}`, { stdio: 'pipe', timeout: 10000 }); } catch {}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
console.log(chalk.green('\nDone!'));
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const ORIGIN_COLORS = { local: chalk.green, test: chalk.blue, stage: chalk.magenta };
|
|
153
|
+
|
|
154
|
+
export function snapshotLs({ config, utils }) {
|
|
155
|
+
const snapshotDir = config.snapshot?.dir || 'snapshots';
|
|
156
|
+
const files = getLocalSnapshots(snapshotDir);
|
|
157
|
+
|
|
158
|
+
if (files.length === 0) {
|
|
159
|
+
console.log(`No snapshots found (${snapshotDir}/ folder).`);
|
|
160
|
+
return;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const byOrigin = {};
|
|
164
|
+
for (const f of files) {
|
|
165
|
+
(byOrigin[f.origin] ||= []).push(f);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
for (const origin of Object.keys(byOrigin).sort()) {
|
|
169
|
+
const colorFn = ORIGIN_COLORS[origin] || chalk.white;
|
|
170
|
+
const items = byOrigin[origin].sort((a, b) => b.date.localeCompare(a.date));
|
|
171
|
+
console.log(colorFn(`\n ${origin} (${items.length} files):`));
|
|
172
|
+
for (const item of items) {
|
|
173
|
+
const dateStr = `${item.date.slice(0, 4)}-${item.date.slice(4, 6)}-${item.date.slice(6, 8)}`;
|
|
174
|
+
console.log(` ${chalk.green(dateStr)} ${chalk.cyan(item.type)} ${utils.formatSize(item.size)}`);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
console.log(`\nTotal: ${files.length} files in ${snapshotDir}/`);
|
|
179
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import * as directus from './directus.mjs';
|
|
2
|
+
|
|
3
|
+
function handleError(e) {
|
|
4
|
+
console.error(`Error: ${e.message}`);
|
|
5
|
+
process.exit(1);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Register directus plugin commands.
|
|
10
|
+
* @param {import('commander').Command} program
|
|
11
|
+
* @param {object} context
|
|
12
|
+
*/
|
|
13
|
+
export function register(program, { config, utils, projectRoot, devkitRoot }) {
|
|
14
|
+
const ctx = { config, utils, projectRoot };
|
|
15
|
+
|
|
16
|
+
const directusCmd = program
|
|
17
|
+
.command('directus')
|
|
18
|
+
.description(config.description || 'Directus CLI wrapper');
|
|
19
|
+
|
|
20
|
+
// --- exec: run arbitrary directus CLI command ---
|
|
21
|
+
|
|
22
|
+
directusCmd
|
|
23
|
+
.command('exec <alias> [args...]')
|
|
24
|
+
.description('Run a Directus CLI command inside a container (e.g. devkit directus exec inner schema snapshot)')
|
|
25
|
+
.action((alias, args) => {
|
|
26
|
+
try { directus.exec({ ...ctx, alias, args }); } catch (e) { handleError(e); }
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
// --- snapshot subcommand ---
|
|
30
|
+
|
|
31
|
+
const snapshotCmd = directusCmd
|
|
32
|
+
.command('snapshot')
|
|
33
|
+
.description('Directus schema snapshots');
|
|
34
|
+
|
|
35
|
+
snapshotCmd
|
|
36
|
+
.command('ls')
|
|
37
|
+
.description('Show local schema snapshots')
|
|
38
|
+
.action(() => { try { directus.snapshotLs(ctx); } catch (e) { handleError(e); } });
|
|
39
|
+
|
|
40
|
+
snapshotCmd
|
|
41
|
+
.command('dump')
|
|
42
|
+
.description('Export schema snapshot from Directus containers')
|
|
43
|
+
.option('--type <type>', 'Type alias: inner or outer (default: all)')
|
|
44
|
+
.option('--dest <dest>', 'Destination folder')
|
|
45
|
+
.action(opts => directus.snapshotDump({ ...ctx, ...opts }).catch(handleError));
|
|
46
|
+
|
|
47
|
+
snapshotCmd
|
|
48
|
+
.command('load')
|
|
49
|
+
.description('Apply schema snapshot to Directus container')
|
|
50
|
+
.option('--type <type>', 'Type alias: inner or outer')
|
|
51
|
+
.option('--date <date>', 'Snapshot date (YYYYMMDD or YYYY-MM-DD)')
|
|
52
|
+
.option('--src <path>', 'Path to .json file')
|
|
53
|
+
.action(opts => directus.snapshotLoad({ ...ctx, ...opts }).catch(handleError));
|
|
54
|
+
}
|