project-devkit 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +173 -0
- package/core/loader.mjs +151 -0
- package/core/utils.mjs +259 -0
- package/package.json +27 -0
- package/plugins/backups/README.md +88 -0
- package/plugins/backups/backups.mjs +342 -0
- package/plugins/backups/config.yml +31 -0
- package/plugins/backups/index.mjs +53 -0
- package/plugins/directus/config.yml +18 -0
- package/plugins/directus/directus.mjs +179 -0
- package/plugins/directus/index.mjs +54 -0
- package/plugins/docker/config.yml +5 -0
- package/plugins/docker/index.mjs +42 -0
- package/plugins/env/config.yml +40 -0
- package/plugins/env/env.mjs +249 -0
- package/plugins/env/index.mjs +93 -0
- package/plugins/env/infisical.mjs +152 -0
package/cli.mjs
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { Command } from 'commander';
|
|
4
|
+
import { fileURLToPath } from 'url';
|
|
5
|
+
import { dirname, resolve } from 'path';
|
|
6
|
+
import { existsSync, readdirSync, readFileSync, mkdirSync } from 'fs';
|
|
7
|
+
import chalk from 'chalk';
|
|
8
|
+
import { parse as parseYaml } from 'yaml';
|
|
9
|
+
import { loadPlugins, findProject, listAvailablePlugins } from './core/loader.mjs';
|
|
10
|
+
import * as utils from './core/utils.mjs';
|
|
11
|
+
|
|
12
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
13
|
+
const __dirname = dirname(__filename);
|
|
14
|
+
const devkitRoot = __dirname;
|
|
15
|
+
|
|
16
|
+
const program = new Command();
|
|
17
|
+
|
|
18
|
+
program
|
|
19
|
+
.name('devkit')
|
|
20
|
+
.description('Plugin-based CLI toolkit for project infrastructure management')
|
|
21
|
+
.version('2.0.0');
|
|
22
|
+
|
|
23
|
+
// --- Core commands (always available) ---
|
|
24
|
+
|
|
25
|
+
program
|
|
26
|
+
.command('plugins')
|
|
27
|
+
.description('List available plugins')
|
|
28
|
+
.action(() => {
|
|
29
|
+
const available = listAvailablePlugins(devkitRoot);
|
|
30
|
+
|
|
31
|
+
console.log(chalk.bold('\nAvailable plugins:\n'));
|
|
32
|
+
for (const p of available) {
|
|
33
|
+
console.log(` ${chalk.cyan(p.name)} ${chalk.dim(p.description)}`);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
try {
|
|
37
|
+
const { plugins } = findProject();
|
|
38
|
+
console.log(chalk.bold('\nEnabled in this project:\n'));
|
|
39
|
+
for (const name of plugins) {
|
|
40
|
+
const isAvailable = available.some(a => a.name === name);
|
|
41
|
+
const status = isAvailable ? chalk.green('\u2713') : chalk.red('\u2717 not found');
|
|
42
|
+
console.log(` ${status} ${name}`);
|
|
43
|
+
}
|
|
44
|
+
} catch {
|
|
45
|
+
console.log(chalk.dim('\n No .devkit manifest found in current directory tree.'));
|
|
46
|
+
}
|
|
47
|
+
console.log();
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
program
|
|
51
|
+
.command('init')
|
|
52
|
+
.description('Create .devkit manifest and .devkit.d/ config directory')
|
|
53
|
+
.action(async () => {
|
|
54
|
+
const cwd = process.cwd();
|
|
55
|
+
const manifestPath = resolve(cwd, '.devkit');
|
|
56
|
+
const configDir = resolve(cwd, '.devkit.d');
|
|
57
|
+
|
|
58
|
+
if (existsSync(manifestPath)) {
|
|
59
|
+
console.log(chalk.yellow('.devkit already exists.'));
|
|
60
|
+
} else {
|
|
61
|
+
const available = listAvailablePlugins(devkitRoot);
|
|
62
|
+
const pluginList = available.map(p => `# ${p.name}`).join('\n');
|
|
63
|
+
const { writeFileSync } = await import('fs');
|
|
64
|
+
writeFileSync(manifestPath, `# Enabled devkit plugins (one per line)\n${pluginList}\n`);
|
|
65
|
+
console.log(chalk.green('Created .devkit'));
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (!existsSync(configDir)) {
|
|
69
|
+
mkdirSync(configDir);
|
|
70
|
+
console.log(chalk.green('Created .devkit.d/'));
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
console.log(chalk.dim('\nEdit .devkit to uncomment the plugins you need.'));
|
|
74
|
+
console.log(chalk.dim('Add plugin overrides in .devkit.d/{plugin}.yml'));
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
program
|
|
78
|
+
.command('setup')
|
|
79
|
+
.description('Full project setup: env files, Docker networks, required directories')
|
|
80
|
+
.option('--env-name <env>', 'Infisical environment', 'dev')
|
|
81
|
+
.option('--force', 'Overwrite existing .env files')
|
|
82
|
+
.action(async (opts) => {
|
|
83
|
+
try {
|
|
84
|
+
const { projectRoot, plugins } = findProject();
|
|
85
|
+
|
|
86
|
+
// 1. env init (if env plugin is enabled)
|
|
87
|
+
if (plugins.includes('env')) {
|
|
88
|
+
console.log(chalk.bold('\n=== Initializing .env files ==='));
|
|
89
|
+
const pluginDir = resolve(devkitRoot, 'plugins', 'env');
|
|
90
|
+
const configPath = resolve(pluginDir, 'config.yml');
|
|
91
|
+
let config = {};
|
|
92
|
+
if (existsSync(configPath)) {
|
|
93
|
+
config = parseYaml(readFileSync(configPath, 'utf-8')) || {};
|
|
94
|
+
}
|
|
95
|
+
const overridePath = resolve(projectRoot, '.devkit.d', 'env.yml');
|
|
96
|
+
if (existsSync(overridePath)) {
|
|
97
|
+
const { deepMerge } = await import('./core/loader.mjs');
|
|
98
|
+
const overrides = parseYaml(readFileSync(overridePath, 'utf-8')) || {};
|
|
99
|
+
config = deepMerge(config, overrides);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const envModule = await import('./plugins/env/env.mjs');
|
|
103
|
+
await envModule.init({
|
|
104
|
+
config, utils, projectRoot,
|
|
105
|
+
envName: opts.envName, force: opts.force,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// 2. Docker networks (if docker plugin is enabled)
|
|
110
|
+
if (plugins.includes('docker')) {
|
|
111
|
+
console.log(chalk.bold('\n=== Docker networks ==='));
|
|
112
|
+
const pluginDir = resolve(devkitRoot, 'plugins', 'docker');
|
|
113
|
+
const configPath = resolve(pluginDir, 'config.yml');
|
|
114
|
+
let config = {};
|
|
115
|
+
if (existsSync(configPath)) {
|
|
116
|
+
config = parseYaml(readFileSync(configPath, 'utf-8')) || {};
|
|
117
|
+
}
|
|
118
|
+
const overridePath = resolve(projectRoot, '.devkit.d', 'docker.yml');
|
|
119
|
+
if (existsSync(overridePath)) {
|
|
120
|
+
const { deepMerge } = await import('./core/loader.mjs');
|
|
121
|
+
const overrides = parseYaml(readFileSync(overridePath, 'utf-8')) || {};
|
|
122
|
+
config = deepMerge(config, overrides);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const composeFile = config.compose_file || 'docker-compose.dev.yml';
|
|
126
|
+
const networks = utils.getComposeExternalNetworks(projectRoot, composeFile);
|
|
127
|
+
if (networks.length === 0) {
|
|
128
|
+
console.log(' No external networks found.');
|
|
129
|
+
} else {
|
|
130
|
+
const created = utils.ensureDockerNetworks(projectRoot, composeFile);
|
|
131
|
+
for (const net of networks) {
|
|
132
|
+
if (created.includes(net)) {
|
|
133
|
+
console.log(chalk.green(` Created network: ${net}`));
|
|
134
|
+
} else {
|
|
135
|
+
console.log(` Network exists: ${net}`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// 3. Upload directories
|
|
142
|
+
console.log(chalk.bold('\n=== Directories ==='));
|
|
143
|
+
const dirs = ['apps/inner/uploads', 'apps/outer/uploads'];
|
|
144
|
+
for (const dir of dirs) {
|
|
145
|
+
const fullPath = resolve(projectRoot, dir);
|
|
146
|
+
if (!existsSync(fullPath)) {
|
|
147
|
+
mkdirSync(fullPath, { recursive: true });
|
|
148
|
+
console.log(chalk.green(` Created: ${dir}`));
|
|
149
|
+
} else {
|
|
150
|
+
console.log(` Exists: ${dir}`);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
console.log(chalk.green('\n\u2713 Project ready'));
|
|
155
|
+
} catch (e) {
|
|
156
|
+
console.error(`Error: ${e.message}`);
|
|
157
|
+
process.exit(1);
|
|
158
|
+
}
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
// --- Load plugins dynamically ---
|
|
162
|
+
|
|
163
|
+
try {
|
|
164
|
+
await loadPlugins(program, utils, devkitRoot);
|
|
165
|
+
} catch (e) {
|
|
166
|
+
// If no .devkit found, still allow core commands (plugins, init)
|
|
167
|
+
if (!process.argv.slice(2).some(a => ['plugins', 'init', '--help', '-h', '--version', '-V'].includes(a))) {
|
|
168
|
+
console.error(chalk.red(e.message));
|
|
169
|
+
process.exit(1);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
program.parse();
|
package/core/loader.mjs
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
import { readFileSync, existsSync, readdirSync, statSync } from 'fs';
|
|
2
|
+
import { resolve, dirname } from 'path';
|
|
3
|
+
import { parse as parseYaml } from 'yaml';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Find the project root by walking up from cwd looking for a .devkit file.
|
|
8
|
+
* @returns {{ projectRoot: string, plugins: string[] }} or throws
|
|
9
|
+
*/
|
|
10
|
+
export function findProject(startDir = process.cwd()) {
|
|
11
|
+
let dir = resolve(startDir);
|
|
12
|
+
const root = resolve('/');
|
|
13
|
+
|
|
14
|
+
while (dir !== root) {
|
|
15
|
+
const manifestPath = resolve(dir, '.devkit');
|
|
16
|
+
if (existsSync(manifestPath)) {
|
|
17
|
+
const plugins = parseManifest(manifestPath);
|
|
18
|
+
return { projectRoot: dir, plugins };
|
|
19
|
+
}
|
|
20
|
+
dir = dirname(dir);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
throw new Error(
|
|
24
|
+
'No .devkit manifest found.\n' +
|
|
25
|
+
'Create a .devkit file in your project root listing the plugins you need:\n\n' +
|
|
26
|
+
' # .devkit\n' +
|
|
27
|
+
' env\n' +
|
|
28
|
+
' backups\n' +
|
|
29
|
+
' docker\n'
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Parse .devkit manifest — plain text, one plugin per line, # for comments.
|
|
35
|
+
*/
|
|
36
|
+
function parseManifest(filePath) {
|
|
37
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
38
|
+
return content
|
|
39
|
+
.split('\n')
|
|
40
|
+
.map(line => line.trim())
|
|
41
|
+
.filter(line => line && !line.startsWith('#'));
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Deep merge two objects. Arrays are replaced, not concatenated.
|
|
46
|
+
*/
|
|
47
|
+
export function deepMerge(target, source) {
|
|
48
|
+
const result = { ...target };
|
|
49
|
+
for (const key of Object.keys(source)) {
|
|
50
|
+
if (
|
|
51
|
+
source[key] !== null &&
|
|
52
|
+
typeof source[key] === 'object' &&
|
|
53
|
+
!Array.isArray(source[key]) &&
|
|
54
|
+
typeof result[key] === 'object' &&
|
|
55
|
+
result[key] !== null &&
|
|
56
|
+
!Array.isArray(result[key])
|
|
57
|
+
) {
|
|
58
|
+
result[key] = deepMerge(result[key], source[key]);
|
|
59
|
+
} else {
|
|
60
|
+
result[key] = source[key];
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return result;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Load plugin config: merge plugin defaults with project overrides.
|
|
68
|
+
*/
|
|
69
|
+
function loadPluginConfig(pluginDir, projectRoot, pluginName) {
|
|
70
|
+
// Layer 1: Plugin defaults
|
|
71
|
+
let config = {};
|
|
72
|
+
const defaultsPath = resolve(pluginDir, 'config.yml');
|
|
73
|
+
if (existsSync(defaultsPath)) {
|
|
74
|
+
config = parseYaml(readFileSync(defaultsPath, 'utf-8')) || {};
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Layer 2: Project overrides (.devkit.d/{name}.yml)
|
|
78
|
+
const overridePath = resolve(projectRoot, '.devkit.d', `${pluginName}.yml`);
|
|
79
|
+
if (existsSync(overridePath)) {
|
|
80
|
+
const overrides = parseYaml(readFileSync(overridePath, 'utf-8')) || {};
|
|
81
|
+
config = deepMerge(config, overrides);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return config;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Load and register all plugins from the manifest.
|
|
89
|
+
*
|
|
90
|
+
* @param {import('commander').Command} program
|
|
91
|
+
* @param {object} utils — shared utilities from core/utils.mjs
|
|
92
|
+
* @param {string} devkitRoot — path to devkit package root
|
|
93
|
+
*/
|
|
94
|
+
export async function loadPlugins(program, utils, devkitRoot) {
|
|
95
|
+
const { projectRoot, plugins } = findProject();
|
|
96
|
+
|
|
97
|
+
const pluginsDir = resolve(devkitRoot, 'plugins');
|
|
98
|
+
|
|
99
|
+
for (const pluginName of plugins) {
|
|
100
|
+
const pluginDir = resolve(pluginsDir, pluginName);
|
|
101
|
+
const entryPoint = resolve(pluginDir, 'index.mjs');
|
|
102
|
+
|
|
103
|
+
if (!existsSync(entryPoint)) {
|
|
104
|
+
console.warn(chalk.yellow(` Warning: plugin '${pluginName}' not found at ${pluginDir}/`));
|
|
105
|
+
continue;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const config = loadPluginConfig(pluginDir, projectRoot, pluginName);
|
|
109
|
+
|
|
110
|
+
try {
|
|
111
|
+
const plugin = await import(entryPoint);
|
|
112
|
+
if (typeof plugin.register !== 'function') {
|
|
113
|
+
console.warn(chalk.yellow(` Warning: plugin '${pluginName}' has no register() function`));
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
plugin.register(program, { config, utils, projectRoot, devkitRoot });
|
|
117
|
+
} catch (e) {
|
|
118
|
+
console.error(chalk.red(` Error loading plugin '${pluginName}': ${e.message}`));
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
return { projectRoot, plugins };
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* List all available plugins in the plugins directory.
|
|
127
|
+
*/
|
|
128
|
+
export function listAvailablePlugins(devkitRoot) {
|
|
129
|
+
const pluginsDir = resolve(devkitRoot, 'plugins');
|
|
130
|
+
if (!existsSync(pluginsDir)) return [];
|
|
131
|
+
|
|
132
|
+
const entries = readdirSync(pluginsDir);
|
|
133
|
+
const plugins = [];
|
|
134
|
+
|
|
135
|
+
for (const name of entries) {
|
|
136
|
+
const dir = resolve(pluginsDir, name);
|
|
137
|
+
try {
|
|
138
|
+
if (!statSync(dir).isDirectory()) continue;
|
|
139
|
+
} catch { continue; }
|
|
140
|
+
|
|
141
|
+
const configPath = resolve(dir, 'config.yml');
|
|
142
|
+
let description = '';
|
|
143
|
+
if (existsSync(configPath)) {
|
|
144
|
+
const cfg = parseYaml(readFileSync(configPath, 'utf-8')) || {};
|
|
145
|
+
description = cfg.description || '';
|
|
146
|
+
}
|
|
147
|
+
plugins.push({ name, description });
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return plugins;
|
|
151
|
+
}
|
package/core/utils.mjs
ADDED
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
import { execSync } from 'child_process';
|
|
2
|
+
import { readFileSync, writeFileSync, existsSync } from 'fs';
|
|
3
|
+
import { resolve, basename } from 'path';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
import { parse as parseYaml } from 'yaml';
|
|
6
|
+
import { InfisicalClient } from '@infisical/sdk';
|
|
7
|
+
|
|
8
|
+
// --- .env ---
|
|
9
|
+
|
|
10
|
+
export function loadEnv(filePath) {
|
|
11
|
+
if (!existsSync(filePath)) return;
|
|
12
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
13
|
+
for (const line of content.split('\n')) {
|
|
14
|
+
const trimmed = line.trim();
|
|
15
|
+
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
16
|
+
const idx = trimmed.indexOf('=');
|
|
17
|
+
if (idx === -1) continue;
|
|
18
|
+
const key = trimmed.slice(0, idx).trim();
|
|
19
|
+
let value = trimmed.slice(idx + 1).trim();
|
|
20
|
+
if ((value.startsWith('"') && value.endsWith('"')) ||
|
|
21
|
+
(value.startsWith("'") && value.endsWith("'"))) {
|
|
22
|
+
value = value.slice(1, -1);
|
|
23
|
+
}
|
|
24
|
+
if (key && !(key in process.env)) {
|
|
25
|
+
process.env[key] = value;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function parseEnv(content) {
|
|
31
|
+
const result = {};
|
|
32
|
+
for (const line of content.split('\n')) {
|
|
33
|
+
const trimmed = line.trim();
|
|
34
|
+
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
35
|
+
const idx = trimmed.indexOf('=');
|
|
36
|
+
if (idx === -1) continue;
|
|
37
|
+
const key = trimmed.slice(0, idx).trim();
|
|
38
|
+
let value = trimmed.slice(idx + 1).trim();
|
|
39
|
+
if ((value.startsWith('"') && value.endsWith('"')) ||
|
|
40
|
+
(value.startsWith("'") && value.endsWith("'"))) {
|
|
41
|
+
value = value.slice(1, -1);
|
|
42
|
+
}
|
|
43
|
+
if (key) result[key] = value;
|
|
44
|
+
}
|
|
45
|
+
return result;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function mergeSecretsIntoEnv(envContent, secrets) {
|
|
49
|
+
const existing = parseEnv(envContent);
|
|
50
|
+
const lines = envContent.split('\n');
|
|
51
|
+
let updated = 0;
|
|
52
|
+
|
|
53
|
+
for (const [key, value] of Object.entries(secrets)) {
|
|
54
|
+
if (key in existing) {
|
|
55
|
+
for (let i = 0; i < lines.length; i++) {
|
|
56
|
+
const trimmed = lines[i].trim();
|
|
57
|
+
if (trimmed.startsWith(`${key}=`) || trimmed.startsWith(`${key} =`)) {
|
|
58
|
+
lines[i] = `${key}=${value}`;
|
|
59
|
+
updated++;
|
|
60
|
+
break;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
} else {
|
|
64
|
+
lines.push(`${key}=${value}`);
|
|
65
|
+
updated++;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return { content: lines.join('\n'), updated };
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// --- Docker / Compose ---
|
|
73
|
+
|
|
74
|
+
export function parseComposeFile(projectRoot, composeFile = 'docker-compose.dev.yml') {
|
|
75
|
+
const composePath = resolve(projectRoot, composeFile);
|
|
76
|
+
if (!existsSync(composePath)) {
|
|
77
|
+
throw new Error(`Compose file not found: ${composePath}`);
|
|
78
|
+
}
|
|
79
|
+
return parseYaml(readFileSync(composePath, 'utf-8'));
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export function getComposeExternalNetworks(projectRoot, composeFile) {
|
|
83
|
+
const doc = parseComposeFile(projectRoot, composeFile);
|
|
84
|
+
const networks = doc.networks || {};
|
|
85
|
+
const external = [];
|
|
86
|
+
for (const [name, cfg] of Object.entries(networks)) {
|
|
87
|
+
if (cfg && cfg.external) {
|
|
88
|
+
external.push(name);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
return external;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export function ensureDockerNetworks(projectRoot, composeFile) {
|
|
95
|
+
const networks = getComposeExternalNetworks(projectRoot, composeFile);
|
|
96
|
+
if (networks.length === 0) return [];
|
|
97
|
+
|
|
98
|
+
const existing = execSync('docker network ls --format "{{.Name}}"', { encoding: 'utf-8' })
|
|
99
|
+
.trim().split('\n').filter(Boolean);
|
|
100
|
+
|
|
101
|
+
const created = [];
|
|
102
|
+
for (const net of networks) {
|
|
103
|
+
if (!existing.includes(net)) {
|
|
104
|
+
execSync(`docker network create ${net}`, { encoding: 'utf-8' });
|
|
105
|
+
created.push(net);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
return created;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export function getComposeContainers(projectRoot, composeFile) {
|
|
112
|
+
const doc = parseComposeFile(projectRoot, composeFile);
|
|
113
|
+
const services = doc.services || {};
|
|
114
|
+
const result = {};
|
|
115
|
+
|
|
116
|
+
for (const [name, svc] of Object.entries(services)) {
|
|
117
|
+
const match = name.match(/^postgres-(inner|outer)$/);
|
|
118
|
+
if (!match) continue;
|
|
119
|
+
const variant = match[1];
|
|
120
|
+
|
|
121
|
+
const containerName = svc.container_name || name;
|
|
122
|
+
|
|
123
|
+
const envFiles = Array.isArray(svc.env_file) ? svc.env_file : [svc.env_file].filter(Boolean);
|
|
124
|
+
let dbUser = 'postgres';
|
|
125
|
+
let dbName = 'crosses';
|
|
126
|
+
|
|
127
|
+
for (const ef of envFiles) {
|
|
128
|
+
const envPath = resolve(projectRoot, ef);
|
|
129
|
+
if (!existsSync(envPath)) continue;
|
|
130
|
+
const vars = parseEnv(readFileSync(envPath, 'utf-8'));
|
|
131
|
+
if (vars.DB_USER) dbUser = vars.DB_USER;
|
|
132
|
+
if (vars.DB_DATABASE) dbName = vars.DB_DATABASE;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
const appServiceName = `directus-${variant}`;
|
|
136
|
+
const appSvc = services[appServiceName];
|
|
137
|
+
const appContainer = appSvc?.container_name || appServiceName;
|
|
138
|
+
|
|
139
|
+
result[variant] = { pgContainer: containerName, appContainer, dbUser, dbName };
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
return result;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
export function dockerExec(container, cmd, options = {}) {
|
|
146
|
+
const flags = options.interactive ? '-i' : '';
|
|
147
|
+
return execSync(`docker exec ${flags} ${container} ${cmd}`, {
|
|
148
|
+
encoding: options.encoding || 'utf-8',
|
|
149
|
+
stdio: options.stdio || 'pipe',
|
|
150
|
+
timeout: options.timeout || 60000,
|
|
151
|
+
...options.execOptions,
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// --- SSH ---
|
|
156
|
+
|
|
157
|
+
export function sshExec(host, cmd) {
|
|
158
|
+
return execSync(`ssh ${host} '${cmd}'`, { encoding: 'utf-8', timeout: 30000 }).trim();
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
export function sshReadFile(host, path) {
|
|
162
|
+
try { return sshExec(host, `cat ${path}`); } catch { return ''; }
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
export function sshWriteFile(host, path, content) {
|
|
166
|
+
execSync(`ssh ${host} 'cat > ${path}'`, { input: content, encoding: 'utf-8', timeout: 10000 });
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
export function scpDownload(host, remotePath, localPath) {
|
|
170
|
+
execSync(`scp ${host}:${remotePath} ${localPath}`, { stdio: 'inherit' });
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// --- Infisical SDK ---
|
|
174
|
+
|
|
175
|
+
const infisicalClients = new Map();
|
|
176
|
+
|
|
177
|
+
export async function getInfisicalClient({ siteUrl, clientId, clientSecret }) {
|
|
178
|
+
const key = `${siteUrl}:${clientId}`;
|
|
179
|
+
if (infisicalClients.has(key)) return infisicalClients.get(key);
|
|
180
|
+
|
|
181
|
+
const client = new InfisicalClient({
|
|
182
|
+
siteUrl,
|
|
183
|
+
auth: {
|
|
184
|
+
universalAuth: { clientId, clientSecret },
|
|
185
|
+
},
|
|
186
|
+
});
|
|
187
|
+
infisicalClients.set(key, client);
|
|
188
|
+
return client;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
export async function fetchInfisicalSecrets({ siteUrl, clientId, clientSecret, projectId, envSlug, secretPath }) {
|
|
192
|
+
const client = await getInfisicalClient({ siteUrl, clientId, clientSecret });
|
|
193
|
+
return client.listSecrets({
|
|
194
|
+
projectId,
|
|
195
|
+
environment: envSlug,
|
|
196
|
+
path: secretPath,
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// --- YaDisk API ---
|
|
201
|
+
|
|
202
|
+
async function yadiskRequest(path, token, params = {}) {
|
|
203
|
+
const url = new URL(`https://cloud-api.yandex.net/v1/disk/resources${path}`);
|
|
204
|
+
for (const [k, v] of Object.entries(params)) url.searchParams.set(k, v);
|
|
205
|
+
const resp = await fetch(url, { headers: { Authorization: `OAuth ${token}` } });
|
|
206
|
+
if (!resp.ok) {
|
|
207
|
+
const body = await resp.text();
|
|
208
|
+
throw new Error(`YaDisk API (${resp.status}): ${body}`);
|
|
209
|
+
}
|
|
210
|
+
return resp.json();
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
export async function listYaDiskDumps(token, folder) {
|
|
214
|
+
const data = await yadiskRequest('', token, { path: folder, limit: '200', sort: '-modified' });
|
|
215
|
+
return (data._embedded?.items || []);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
export async function getYaDiskDownloadUrl(token, filePath) {
|
|
219
|
+
const data = await yadiskRequest('/download', token, { path: filePath });
|
|
220
|
+
return data.href;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
export async function downloadFromUrl(url, destPath, size = 0) {
|
|
224
|
+
const resp = await fetch(url);
|
|
225
|
+
if (!resp.ok) throw new Error(`Download failed: ${resp.status}`);
|
|
226
|
+
|
|
227
|
+
const total = size || parseInt(resp.headers.get('content-length') || '0', 10);
|
|
228
|
+
const reader = resp.body.getReader();
|
|
229
|
+
const chunks = [];
|
|
230
|
+
let downloaded = 0;
|
|
231
|
+
|
|
232
|
+
const fileName = basename(destPath);
|
|
233
|
+
|
|
234
|
+
while (true) {
|
|
235
|
+
const { done, value } = await reader.read();
|
|
236
|
+
if (done) break;
|
|
237
|
+
chunks.push(value);
|
|
238
|
+
downloaded += value.length;
|
|
239
|
+
if (total) {
|
|
240
|
+
const pct = Math.round((downloaded / total) * 100);
|
|
241
|
+
const bar = '\u2588'.repeat(Math.round(pct / 2)) + '\u2591'.repeat(50 - Math.round(pct / 2));
|
|
242
|
+
process.stdout.write(`\r ${fileName} ${bar} ${pct}%`);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
if (total) process.stdout.write('\n');
|
|
246
|
+
|
|
247
|
+
const buffer = Buffer.concat(chunks);
|
|
248
|
+
writeFileSync(destPath, buffer);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
// --- Formatting ---
|
|
252
|
+
|
|
253
|
+
export function formatSize(bytes) {
|
|
254
|
+
for (const unit of ['B', 'KB', 'MB', 'GB']) {
|
|
255
|
+
if (bytes < 1024) return `${bytes.toFixed(1)} ${unit}`;
|
|
256
|
+
bytes /= 1024;
|
|
257
|
+
}
|
|
258
|
+
return `${bytes.toFixed(1)} TB`;
|
|
259
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "project-devkit",
|
|
3
|
+
"version": "2.0.0",
|
|
4
|
+
"description": "Plugin-based CLI toolkit for project infrastructure management",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"devkit": "./cli.mjs"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"cli.mjs",
|
|
11
|
+
"core/",
|
|
12
|
+
"plugins/",
|
|
13
|
+
"README.md"
|
|
14
|
+
],
|
|
15
|
+
"dependencies": {
|
|
16
|
+
"@infisical/sdk": "^2.0.0",
|
|
17
|
+
"@inquirer/prompts": "^7.0.0",
|
|
18
|
+
"chalk": "^5.0.0",
|
|
19
|
+
"commander": "^13.0.0",
|
|
20
|
+
"yaml": "^2.8.3"
|
|
21
|
+
},
|
|
22
|
+
"repository": {
|
|
23
|
+
"type": "git",
|
|
24
|
+
"url": "https://github.com/nike4192/devkit.git"
|
|
25
|
+
},
|
|
26
|
+
"license": "MIT"
|
|
27
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
# Backups Plugin
|
|
2
|
+
|
|
3
|
+
Управление бэкапами базы данных — список, скачивание, создание и накатка дампов.
|
|
4
|
+
|
|
5
|
+
## Команды
|
|
6
|
+
|
|
7
|
+
| Команда | Описание |
|
|
8
|
+
|---------|----------|
|
|
9
|
+
| `devkit backups ls` | Показать локальные дампы (скачанные и созданные) |
|
|
10
|
+
| `devkit backups list --env test` | Доступные дампы на сервере |
|
|
11
|
+
| `devkit backups pull --env test` | Скачать дамп с сервера |
|
|
12
|
+
| `devkit backups dump` | Создать локальный дамп БД |
|
|
13
|
+
| `devkit backups load` | Накатить дамп в локальный контейнер |
|
|
14
|
+
|
|
15
|
+
## Конфигурация
|
|
16
|
+
|
|
17
|
+
Плагин настраивается через `config.yml` внутри плагина и может быть переопределён в `devkit.yml` проекта:
|
|
18
|
+
|
|
19
|
+
```yaml
|
|
20
|
+
# devkit.yml
|
|
21
|
+
plugins:
|
|
22
|
+
backups:
|
|
23
|
+
yadisk:
|
|
24
|
+
folders:
|
|
25
|
+
test: /Projects/crosses/backups/test
|
|
26
|
+
stage: /Projects/crosses/backups/stage
|
|
27
|
+
|
|
28
|
+
servers:
|
|
29
|
+
test:
|
|
30
|
+
ssh: root@45.86.182.200
|
|
31
|
+
stage:
|
|
32
|
+
ssh: root@185.93.108.63
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Источники дампов
|
|
36
|
+
|
|
37
|
+
- **server** — скачивание через SCP с удалённого сервера
|
|
38
|
+
- **yadisk** — скачивание через YaDisk API (требуется `YADISK_TOKEN`)
|
|
39
|
+
- **local** — дампы, созданные через `devkit backups dump`
|
|
40
|
+
|
|
41
|
+
## Формат имён файлов
|
|
42
|
+
|
|
43
|
+
```
|
|
44
|
+
dump-{env}-{type}-{date}_{time}.sql.gz
|
|
45
|
+
|
|
46
|
+
Примеры:
|
|
47
|
+
dump-test-inner-2026-04-03_14_30_00.sql.gz
|
|
48
|
+
dump-stage-outer-2026-04-02_04_00_00.sql.gz
|
|
49
|
+
dump-local-inner-2026-04-03_15_00_00.sql.gz
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
## Примеры использования
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
# Посмотреть локальные дампы
|
|
56
|
+
devkit backups ls
|
|
57
|
+
|
|
58
|
+
# Список дампов на тестовом сервере
|
|
59
|
+
devkit backups list --env test
|
|
60
|
+
|
|
61
|
+
# Список на stage, только с YaDisk
|
|
62
|
+
devkit backups list --env stage --source yadisk
|
|
63
|
+
|
|
64
|
+
# Скачать свежий дамп со stage
|
|
65
|
+
devkit backups pull --env stage
|
|
66
|
+
|
|
67
|
+
# Создать дамп локальной БД
|
|
68
|
+
devkit backups dump
|
|
69
|
+
|
|
70
|
+
# Накатить свежий дамп
|
|
71
|
+
devkit backups load
|
|
72
|
+
|
|
73
|
+
# Накатить конкретный файл
|
|
74
|
+
devkit backups load --src backups/dump-test-inner-2026-04-03_14_30_00.sql.gz
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
## Переменные окружения
|
|
78
|
+
|
|
79
|
+
| Переменная | Описание |
|
|
80
|
+
|-----------|----------|
|
|
81
|
+
| `YADISK_TOKEN` | Токен Яндекс.Диска для скачивания бэкапов |
|
|
82
|
+
| `COMPOSE_FILE` | Путь к docker-compose файлу (по умолчанию `docker-compose.dev.yml`) |
|
|
83
|
+
|
|
84
|
+
## Зависимости
|
|
85
|
+
|
|
86
|
+
- Docker + docker-compose
|
|
87
|
+
- SSH доступ к серверам (test/stage)
|
|
88
|
+
- YaDisk API (опционально, как fallback)
|