@tolinax/ayoune-cli 2026.4.0 → 2026.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/apiClient.js +1 -0
- package/lib/commands/createAggregateCommand.js +656 -0
- package/lib/commands/createDbCommand.js +286 -0
- package/lib/commands/createProgram.js +12 -2
- package/lib/commands/createStatusCommand.js +33 -20
- package/lib/db/copyConfigStore.js +88 -0
- package/lib/db/copyEngine.js +123 -0
- package/lib/db/cronMatcher.js +42 -0
- package/lib/db/types.js +1 -0
- package/lib/helpers/readPipelineInput.js +50 -0
- package/lib/helpers/updateNotifier.js +2 -1
- package/package.json +3 -1
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import chalk from "chalk";
|
|
2
|
+
import { spinner } from "../../index.js";
|
|
3
|
+
import { EXIT_GENERAL_ERROR, EXIT_MISUSE } from "../exitCodes.js";
|
|
4
|
+
import { cliError } from "../helpers/cliError.js";
|
|
5
|
+
import { handleResponseFormatOptions } from "../helpers/handleResponseFormatOptions.js";
|
|
6
|
+
import { executeCopy, getDbStats } from "../db/copyEngine.js";
|
|
7
|
+
import { addCopyConfig, loadCopyConfigs, removeCopyConfig, getDecryptedConfig, updateCopyConfigLastRun, maskUri, } from "../db/copyConfigStore.js";
|
|
8
|
+
import { isCronDue, getNextRun, validateCron } from "../db/cronMatcher.js";
|
|
9
|
+
function generateId() {
|
|
10
|
+
return Math.random().toString(36).substring(2, 10) + Date.now().toString(36);
|
|
11
|
+
}
|
|
12
|
+
function formatBytes(bytes) {
|
|
13
|
+
if (bytes === 0)
|
|
14
|
+
return "0 B";
|
|
15
|
+
const units = ["B", "KB", "MB", "GB", "TB"];
|
|
16
|
+
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
|
17
|
+
return (bytes / Math.pow(1024, i)).toFixed(1) + " " + units[i];
|
|
18
|
+
}
|
|
19
|
+
export function createDbCommand(program) {
|
|
20
|
+
const db = program
|
|
21
|
+
.command("db")
|
|
22
|
+
.description("MongoDB database operations (copy, stats, scheduled replication)")
|
|
23
|
+
.addHelpText("after", `
|
|
24
|
+
Examples:
|
|
25
|
+
ay db copy --from "mongodb://..." --to "mongodb://..." --collections "users,orders"
|
|
26
|
+
ay db copy --from "mongodb://..." --to "mongodb://..." --all --schedule "0 */6 * * *"
|
|
27
|
+
ay db schedules list
|
|
28
|
+
ay db stats "mongodb+srv://..."`);
|
|
29
|
+
// ─── ay db copy ─────────────────────────────────────────────────
|
|
30
|
+
db.command("copy")
|
|
31
|
+
.description("Copy data between MongoDB instances")
|
|
32
|
+
.requiredOption("--from <uri>", "Source MongoDB connection URI")
|
|
33
|
+
.requiredOption("--to <uri>", "Target MongoDB connection URI")
|
|
34
|
+
.option("--collections <list>", "Comma-separated collection names")
|
|
35
|
+
.option("--all", "Copy all collections")
|
|
36
|
+
.option("--query <json>", "JSON filter query for source documents")
|
|
37
|
+
.option("--drop", "Drop target collection before copying")
|
|
38
|
+
.option("--upsert", "Upsert documents by _id (idempotent, safe to re-run)")
|
|
39
|
+
.option("--batch-size <number>", "Documents per batch", parseInt, 1000)
|
|
40
|
+
.option("--schedule <cron>", "Save as scheduled copy (cron expression) instead of executing")
|
|
41
|
+
.addHelpText("after", `
|
|
42
|
+
Examples:
|
|
43
|
+
ay db copy --from "mongodb://..." --to "mongodb://..." --collections "users,orders"
|
|
44
|
+
ay db copy --from "mongodb://..." --to "mongodb://..." --all --drop
|
|
45
|
+
ay db copy --from "mongodb://..." --to "mongodb://..." --collections "logs" --query '{"status":"active"}'
|
|
46
|
+
ay db copy --from "mongodb://..." --to "mongodb://..." --all --upsert --schedule "0 */6 * * *"`)
|
|
47
|
+
.action(async (options) => {
|
|
48
|
+
try {
|
|
49
|
+
const opts = { ...program.opts(), ...options };
|
|
50
|
+
// Validate input
|
|
51
|
+
if (!opts.collections && !opts.all) {
|
|
52
|
+
cliError("Provide --collections or --all", EXIT_MISUSE);
|
|
53
|
+
}
|
|
54
|
+
if (opts.collections && opts.all) {
|
|
55
|
+
cliError("Use either --collections or --all, not both", EXIT_MISUSE);
|
|
56
|
+
}
|
|
57
|
+
if (!opts.from.startsWith("mongodb")) {
|
|
58
|
+
cliError("--from must be a MongoDB URI (mongodb:// or mongodb+srv://)", EXIT_MISUSE);
|
|
59
|
+
}
|
|
60
|
+
if (!opts.to.startsWith("mongodb")) {
|
|
61
|
+
cliError("--to must be a MongoDB URI (mongodb:// or mongodb+srv://)", EXIT_MISUSE);
|
|
62
|
+
}
|
|
63
|
+
let query;
|
|
64
|
+
if (opts.query) {
|
|
65
|
+
try {
|
|
66
|
+
query = JSON.parse(opts.query);
|
|
67
|
+
}
|
|
68
|
+
catch (_a) {
|
|
69
|
+
cliError("--query must be valid JSON", EXIT_MISUSE);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
const collections = opts.all ? ["*"] : opts.collections.split(",").map((c) => c.trim());
|
|
73
|
+
// Schedule mode
|
|
74
|
+
if (opts.schedule) {
|
|
75
|
+
const cronError = validateCron(opts.schedule);
|
|
76
|
+
if (cronError)
|
|
77
|
+
cliError(`Invalid cron expression: ${cronError}`, EXIT_MISUSE);
|
|
78
|
+
const config = {
|
|
79
|
+
id: generateId(),
|
|
80
|
+
createdAt: new Date().toISOString(),
|
|
81
|
+
from: maskUri(opts.from),
|
|
82
|
+
to: maskUri(opts.to),
|
|
83
|
+
fromUri: opts.from, // Will be encrypted by addCopyConfig
|
|
84
|
+
toUri: opts.to,
|
|
85
|
+
collections,
|
|
86
|
+
query,
|
|
87
|
+
drop: opts.drop || false,
|
|
88
|
+
upsert: opts.upsert || false,
|
|
89
|
+
batchSize: opts.batchSize,
|
|
90
|
+
schedule: opts.schedule,
|
|
91
|
+
};
|
|
92
|
+
addCopyConfig(config);
|
|
93
|
+
const nextRun = getNextRun(opts.schedule);
|
|
94
|
+
console.log(chalk.green(`\n Scheduled copy saved (ID: ${config.id})`));
|
|
95
|
+
console.log(chalk.dim(` Source: ${config.from}`));
|
|
96
|
+
console.log(chalk.dim(` Target: ${config.to}`));
|
|
97
|
+
console.log(chalk.dim(` Collections: ${collections.join(", ")}`));
|
|
98
|
+
console.log(chalk.dim(` Schedule: ${opts.schedule}`));
|
|
99
|
+
if (nextRun)
|
|
100
|
+
console.log(chalk.dim(` Next run: ${nextRun.toISOString()}`));
|
|
101
|
+
console.log();
|
|
102
|
+
console.log(chalk.yellow(" Set up a cron job to run scheduled copies:"));
|
|
103
|
+
console.log(chalk.dim(" Linux/macOS: crontab -e → */5 * * * * ay db schedules run"));
|
|
104
|
+
console.log(chalk.dim(" Windows: schtasks /create /tn ayoune-db-sync /tr \"ay db schedules run\" /sc minute /mo 5"));
|
|
105
|
+
console.log();
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
// Execute copy
|
|
109
|
+
spinner.start({ text: `Copying from ${maskUri(opts.from)} to ${maskUri(opts.to)}...`, color: "cyan" });
|
|
110
|
+
const summary = await executeCopy(opts.from, opts.to, collections, {
|
|
111
|
+
query,
|
|
112
|
+
drop: opts.drop,
|
|
113
|
+
upsert: opts.upsert,
|
|
114
|
+
batchSize: opts.batchSize,
|
|
115
|
+
}, (progress) => {
|
|
116
|
+
spinner.update({
|
|
117
|
+
text: `${progress.collection}: ${progress.copied}/${progress.total} docs${progress.errors ? chalk.red(` (${progress.errors} errors)`) : ""}`,
|
|
118
|
+
});
|
|
119
|
+
});
|
|
120
|
+
spinner.stop();
|
|
121
|
+
// Display summary
|
|
122
|
+
const wrapped = {
|
|
123
|
+
payload: summary,
|
|
124
|
+
meta: {
|
|
125
|
+
totalCopied: summary.totalCopied,
|
|
126
|
+
totalErrors: summary.totalErrors,
|
|
127
|
+
duration: summary.duration,
|
|
128
|
+
},
|
|
129
|
+
};
|
|
130
|
+
if (opts.responseFormat === "json" || opts.responseFormat === "yaml") {
|
|
131
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
132
|
+
}
|
|
133
|
+
else {
|
|
134
|
+
console.log(chalk.cyan.bold("\n Copy Summary\n"));
|
|
135
|
+
for (const col of summary.collections) {
|
|
136
|
+
const icon = col.errors === 0 ? chalk.green("●") : chalk.yellow("●");
|
|
137
|
+
console.log(` ${icon} ${col.name}: ${col.copied} docs copied${col.errors ? chalk.red(` (${col.errors} errors)`) : ""} ${chalk.dim(`(${col.duration}ms)`)}`);
|
|
138
|
+
}
|
|
139
|
+
console.log();
|
|
140
|
+
console.log(` ${chalk.green(summary.totalCopied + " total")}${summary.totalErrors ? ` ${chalk.red(summary.totalErrors + " errors")}` : ""} ${chalk.dim(`in ${summary.duration}ms`)}`);
|
|
141
|
+
console.log();
|
|
142
|
+
}
|
|
143
|
+
if (summary.totalErrors > 0)
|
|
144
|
+
process.exit(EXIT_GENERAL_ERROR);
|
|
145
|
+
}
|
|
146
|
+
catch (e) {
|
|
147
|
+
cliError(e.message || "Copy failed", EXIT_GENERAL_ERROR);
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
// ─── ay db schedules ────────────────────────────────────────────
|
|
151
|
+
const schedules = db.command("schedules").alias("sched").description("Manage scheduled copy jobs");
|
|
152
|
+
// ay db schedules list
|
|
153
|
+
schedules
|
|
154
|
+
.command("list")
|
|
155
|
+
.alias("ls")
|
|
156
|
+
.description("List all scheduled copy jobs")
|
|
157
|
+
.action(async (options) => {
|
|
158
|
+
try {
|
|
159
|
+
const opts = { ...program.opts(), ...options };
|
|
160
|
+
const configs = loadCopyConfigs();
|
|
161
|
+
if (configs.length === 0) {
|
|
162
|
+
console.log(chalk.dim("\n No scheduled copies configured.\n"));
|
|
163
|
+
return;
|
|
164
|
+
}
|
|
165
|
+
const wrapped = { payload: configs.map(displayConfig), meta: { total: configs.length } };
|
|
166
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
167
|
+
}
|
|
168
|
+
catch (e) {
|
|
169
|
+
cliError(e.message || "Failed to list schedules", EXIT_GENERAL_ERROR);
|
|
170
|
+
}
|
|
171
|
+
});
|
|
172
|
+
// ay db schedules remove <id>
|
|
173
|
+
schedules
|
|
174
|
+
.command("remove <id>")
|
|
175
|
+
.alias("rm")
|
|
176
|
+
.description("Remove a scheduled copy job")
|
|
177
|
+
.action(async (id) => {
|
|
178
|
+
const removed = removeCopyConfig(id);
|
|
179
|
+
if (removed) {
|
|
180
|
+
spinner.success({ text: `Schedule ${id} removed` });
|
|
181
|
+
}
|
|
182
|
+
else {
|
|
183
|
+
cliError(`No schedule found with ID "${id}"`, EXIT_GENERAL_ERROR);
|
|
184
|
+
}
|
|
185
|
+
});
|
|
186
|
+
// ay db schedules run
|
|
187
|
+
schedules
|
|
188
|
+
.command("run")
|
|
189
|
+
.description("Execute all copy jobs that are due now")
|
|
190
|
+
.option("--id <id>", "Run a specific schedule by ID")
|
|
191
|
+
.action(async (options) => {
|
|
192
|
+
try {
|
|
193
|
+
const configs = loadCopyConfigs();
|
|
194
|
+
const now = new Date();
|
|
195
|
+
const toRun = options.id
|
|
196
|
+
? configs.filter((c) => c.id === options.id)
|
|
197
|
+
: configs.filter((c) => isCronDue(c.schedule, c.lastRun, now));
|
|
198
|
+
if (toRun.length === 0) {
|
|
199
|
+
console.log(chalk.dim("\n No copies due.\n"));
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
console.log(chalk.cyan(`\n Running ${toRun.length} scheduled copies...\n`));
|
|
203
|
+
for (const config of toRun) {
|
|
204
|
+
const { fromUri, toUri } = getDecryptedConfig(config);
|
|
205
|
+
console.log(chalk.dim(` ${config.id}: ${config.from} → ${config.to}`));
|
|
206
|
+
try {
|
|
207
|
+
spinner.start({ text: `${config.id}: copying...`, color: "cyan" });
|
|
208
|
+
const summary = await executeCopy(fromUri, toUri, config.collections, {
|
|
209
|
+
query: config.query,
|
|
210
|
+
drop: config.drop,
|
|
211
|
+
upsert: config.upsert,
|
|
212
|
+
batchSize: config.batchSize,
|
|
213
|
+
}, (progress) => {
|
|
214
|
+
spinner.update({
|
|
215
|
+
text: `${config.id}: ${progress.collection} ${progress.copied}/${progress.total}`,
|
|
216
|
+
});
|
|
217
|
+
});
|
|
218
|
+
updateCopyConfigLastRun(config.id, "success");
|
|
219
|
+
spinner.success({ text: `${config.id}: ${summary.totalCopied} docs copied${summary.totalErrors ? `, ${summary.totalErrors} errors` : ""}` });
|
|
220
|
+
}
|
|
221
|
+
catch (e) {
|
|
222
|
+
updateCopyConfigLastRun(config.id, "failed", e.message);
|
|
223
|
+
spinner.error({ text: `${config.id}: ${e.message}` });
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
console.log();
|
|
227
|
+
}
|
|
228
|
+
catch (e) {
|
|
229
|
+
cliError(e.message || "Scheduled run failed", EXIT_GENERAL_ERROR);
|
|
230
|
+
}
|
|
231
|
+
});
|
|
232
|
+
// ─── ay db stats <uri> ──────────────────────────────────────────
|
|
233
|
+
db.command("stats <uri>")
|
|
234
|
+
.description("Show database statistics (collections, doc counts, sizes)")
|
|
235
|
+
.addHelpText("after", `
|
|
236
|
+
Examples:
|
|
237
|
+
ay db stats "mongodb+srv://user:pass@cluster.mongodb.net/mydb"
|
|
238
|
+
ay db stats "mongodb://localhost:27017/testdb" -r table`)
|
|
239
|
+
.action(async (uri, options) => {
|
|
240
|
+
try {
|
|
241
|
+
const opts = { ...program.opts(), ...options };
|
|
242
|
+
if (!uri.startsWith("mongodb")) {
|
|
243
|
+
cliError("URI must start with mongodb:// or mongodb+srv://", EXIT_MISUSE);
|
|
244
|
+
}
|
|
245
|
+
spinner.start({ text: `Connecting to ${maskUri(uri)}...`, color: "cyan" });
|
|
246
|
+
const stats = await getDbStats(uri);
|
|
247
|
+
spinner.stop();
|
|
248
|
+
const wrapped = {
|
|
249
|
+
payload: stats,
|
|
250
|
+
meta: {
|
|
251
|
+
database: stats.database,
|
|
252
|
+
collectionCount: stats.collections.length,
|
|
253
|
+
totalSize: formatBytes(stats.totalSize),
|
|
254
|
+
},
|
|
255
|
+
};
|
|
256
|
+
if (opts.responseFormat === "json" || opts.responseFormat === "yaml") {
|
|
257
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
258
|
+
}
|
|
259
|
+
else {
|
|
260
|
+
console.log(chalk.cyan.bold(`\n Database: ${stats.database}\n`));
|
|
261
|
+
console.log(` ${chalk.dim("Total size:")} ${formatBytes(stats.totalSize)}`);
|
|
262
|
+
console.log(` ${chalk.dim("Collections:")} ${stats.collections.length}\n`);
|
|
263
|
+
for (const col of stats.collections) {
|
|
264
|
+
console.log(` ${chalk.white(col.name)}: ${col.documents.toLocaleString()} docs ${chalk.dim(`(${formatBytes(col.size)})`)}`);
|
|
265
|
+
}
|
|
266
|
+
console.log();
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
catch (e) {
|
|
270
|
+
cliError(e.message || "Failed to get stats", EXIT_GENERAL_ERROR);
|
|
271
|
+
}
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
function displayConfig(config) {
|
|
275
|
+
const nextRun = getNextRun(config.schedule);
|
|
276
|
+
return {
|
|
277
|
+
id: config.id,
|
|
278
|
+
from: config.from,
|
|
279
|
+
to: config.to,
|
|
280
|
+
collections: config.collections.join(", "),
|
|
281
|
+
schedule: config.schedule,
|
|
282
|
+
nextRun: (nextRun === null || nextRun === void 0 ? void 0 : nextRun.toISOString()) || "—",
|
|
283
|
+
lastRun: config.lastRun || "never",
|
|
284
|
+
lastStatus: config.lastStatus || "—",
|
|
285
|
+
};
|
|
286
|
+
}
|
|
@@ -47,6 +47,8 @@ import { createStatusCommand } from "./createStatusCommand.js";
|
|
|
47
47
|
import { createSelfHostUpdateCommand } from "./createSelfHostUpdateCommand.js";
|
|
48
48
|
import { createContextCommand } from "./createContextCommand.js";
|
|
49
49
|
import { createAccessCommand } from "./createAccessCommand.js";
|
|
50
|
+
import { createAggregateCommand } from "./createAggregateCommand.js";
|
|
51
|
+
import { createDbCommand } from "./createDbCommand.js";
|
|
50
52
|
import { secureStorage } from "../helpers/secureStorage.js";
|
|
51
53
|
import { login } from "../api/login.js";
|
|
52
54
|
import { loadConfig } from "../helpers/configLoader.js";
|
|
@@ -54,7 +56,13 @@ import { getLogo, getDescription, BRAND_BLUE } from "../helpers/logo.js";
|
|
|
54
56
|
import { checkForUpdates } from "../helpers/updateNotifier.js";
|
|
55
57
|
import { createRequire } from "module";
|
|
56
58
|
const require = createRequire(import.meta.url);
|
|
57
|
-
|
|
59
|
+
let pkg;
|
|
60
|
+
try {
|
|
61
|
+
pkg = require("../../package.json"); // dist/ and npm install
|
|
62
|
+
}
|
|
63
|
+
catch (_a) {
|
|
64
|
+
pkg = require("../../../package.json"); // source tree (tests)
|
|
65
|
+
}
|
|
58
66
|
export function createProgram(program) {
|
|
59
67
|
program
|
|
60
68
|
.version(pkg.version || "0.0.0")
|
|
@@ -115,6 +123,8 @@ export function createProgram(program) {
|
|
|
115
123
|
createPermissionsCommand(program);
|
|
116
124
|
createTemplateCommand(program);
|
|
117
125
|
createAccessCommand(program);
|
|
126
|
+
createAggregateCommand(program);
|
|
127
|
+
createDbCommand(program);
|
|
118
128
|
createSetupCommand(program);
|
|
119
129
|
createStatusCommand(program);
|
|
120
130
|
createSelfHostUpdateCommand(program);
|
|
@@ -194,7 +204,7 @@ export function createProgram(program) {
|
|
|
194
204
|
}
|
|
195
205
|
// First-run onboarding: auto-login if no token stored
|
|
196
206
|
const cmdName = thisCommand.name();
|
|
197
|
-
const skipAuth = ["login", "logout", "whoami", "completions", "alias", "config", "help", "setup", "status", "self-host-update", "context"];
|
|
207
|
+
const skipAuth = ["login", "logout", "whoami", "completions", "alias", "config", "help", "setup", "status", "self-host-update", "context", "db"];
|
|
198
208
|
if (!skipAuth.includes(cmdName) && process.stdin.isTTY) {
|
|
199
209
|
const token = secureStorage.getItem("token");
|
|
200
210
|
if (!token) {
|
|
@@ -6,50 +6,63 @@ import { EXIT_GENERAL_ERROR } from "../exitCodes.js";
|
|
|
6
6
|
import { cliError } from "../helpers/cliError.js";
|
|
7
7
|
import { handleResponseFormatOptions } from "../helpers/handleResponseFormatOptions.js";
|
|
8
8
|
const CORE_SERVICES = [
|
|
9
|
-
{ name: "config-api", module: "config"
|
|
10
|
-
{ name: "auth", module: "auth"
|
|
9
|
+
{ name: "config-api", module: "config" },
|
|
10
|
+
{ name: "auth", module: "auth" },
|
|
11
11
|
];
|
|
12
12
|
const MODULE_SERVICES = {
|
|
13
|
-
crm: [{ name: "crm-api", module: "crm"
|
|
14
|
-
marketing: [{ name: "marketing-api", module: "marketing"
|
|
15
|
-
hr: [{ name: "hr-api", module: "hr"
|
|
16
|
-
ecommerce: [{ name: "ecommerce-api", module: "ecommerce"
|
|
17
|
-
pm: [{ name: "pm-api", module: "pm"
|
|
18
|
-
devops: [{ name: "devops-api", module: "devops"
|
|
19
|
-
accounting: [{ name: "accounting-api", module: "accounting"
|
|
20
|
-
automation: [{ name: "automation-api", module: "automation"
|
|
21
|
-
support: [{ name: "support-api", module: "support"
|
|
22
|
-
reporting: [{ name: "reporting-api", module: "reporting"
|
|
23
|
-
monitoring: [{ name: "monitoring-api", module: "monitoring"
|
|
13
|
+
crm: [{ name: "crm-api", module: "crm" }],
|
|
14
|
+
marketing: [{ name: "marketing-api", module: "marketing" }],
|
|
15
|
+
hr: [{ name: "hr-api", module: "hr" }],
|
|
16
|
+
ecommerce: [{ name: "ecommerce-api", module: "ecommerce" }],
|
|
17
|
+
pm: [{ name: "pm-api", module: "pm" }],
|
|
18
|
+
devops: [{ name: "devops-api", module: "devops" }],
|
|
19
|
+
accounting: [{ name: "accounting-api", module: "accounting" }],
|
|
20
|
+
automation: [{ name: "automation-api", module: "automation" }],
|
|
21
|
+
support: [{ name: "support-api", module: "support" }],
|
|
22
|
+
reporting: [{ name: "reporting-api", module: "reporting" }],
|
|
23
|
+
monitoring: [{ name: "monitoring-api", module: "monitoring" }],
|
|
24
24
|
};
|
|
25
|
-
|
|
26
|
-
var _a;
|
|
25
|
+
function extractVersion(data) {
|
|
26
|
+
var _a, _b, _c, _d, _e;
|
|
27
|
+
return ((_b = (_a = data === null || data === void 0 ? void 0 : data.meta) === null || _a === void 0 ? void 0 : _a.version) === null || _b === void 0 ? void 0 : _b.core) || ((_d = (_c = data === null || data === void 0 ? void 0 : data.meta) === null || _c === void 0 ? void 0 : _c.version) === null || _d === void 0 ? void 0 : _d.host) || (data === null || data === void 0 ? void 0 : data.version) || ((_e = data === null || data === void 0 ? void 0 : data.payload) === null || _e === void 0 ? void 0 : _e.version) || undefined;
|
|
28
|
+
}
|
|
29
|
+
async function checkService(name, module) {
|
|
27
30
|
const baseUrl = getModuleBaseUrl(module);
|
|
28
31
|
const start = Date.now();
|
|
29
32
|
try {
|
|
30
|
-
// Use api client directly to avoid handleAPIError calling process.exit()
|
|
31
33
|
const response = await api({
|
|
32
34
|
baseURL: baseUrl,
|
|
33
35
|
method: "get",
|
|
34
|
-
url:
|
|
36
|
+
url: "/",
|
|
35
37
|
timeout: 10000,
|
|
36
38
|
headers: {
|
|
37
39
|
Authorization: `Bearer ${secureStorage.getItem("token") || ""}`,
|
|
38
40
|
},
|
|
39
41
|
});
|
|
40
42
|
const responseTime = Date.now() - start;
|
|
41
|
-
const res = response.data;
|
|
42
43
|
return {
|
|
43
44
|
service: name,
|
|
44
45
|
url: baseUrl,
|
|
45
46
|
status: "healthy",
|
|
46
47
|
responseTime,
|
|
47
|
-
version: (
|
|
48
|
+
version: extractVersion(response.data),
|
|
48
49
|
};
|
|
49
50
|
}
|
|
50
51
|
catch (e) {
|
|
51
52
|
const responseTime = Date.now() - start;
|
|
52
53
|
if (e.response) {
|
|
54
|
+
const data = e.response.data;
|
|
55
|
+
const version = extractVersion(data);
|
|
56
|
+
// A structured JSON response (even 404) with version info means the service is running
|
|
57
|
+
if (e.response.status < 500 && version) {
|
|
58
|
+
return {
|
|
59
|
+
service: name,
|
|
60
|
+
url: baseUrl,
|
|
61
|
+
status: "healthy",
|
|
62
|
+
responseTime,
|
|
63
|
+
version,
|
|
64
|
+
};
|
|
65
|
+
}
|
|
53
66
|
return {
|
|
54
67
|
service: name,
|
|
55
68
|
url: baseUrl,
|
|
@@ -101,7 +114,7 @@ Examples:
|
|
|
101
114
|
}
|
|
102
115
|
}
|
|
103
116
|
// Check services concurrently
|
|
104
|
-
const results = await Promise.all(servicesToCheck.map((svc) => checkService(svc.name, svc.module
|
|
117
|
+
const results = await Promise.all(servicesToCheck.map((svc) => checkService(svc.name, svc.module)));
|
|
105
118
|
spinner.stop();
|
|
106
119
|
const healthy = results.filter((r) => r.status === "healthy").length;
|
|
107
120
|
const unhealthy = results.filter((r) => r.status === "unhealthy").length;
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import os from "os";
|
|
4
|
+
import crypto from "crypto";
|
|
5
|
+
const CONFIG_PATH = path.join(os.homedir(), ".config", "ayoune", "db-copies.json");
|
|
6
|
+
const ALGORITHM = "aes-256-cbc";
|
|
7
|
+
function deriveKey() {
|
|
8
|
+
const seed = `ayoune-cli:${os.hostname()}:${os.userInfo().username}`;
|
|
9
|
+
return crypto.createHash("sha256").update(seed).digest();
|
|
10
|
+
}
|
|
11
|
+
function encrypt(text) {
|
|
12
|
+
const key = deriveKey();
|
|
13
|
+
const iv = crypto.randomBytes(16);
|
|
14
|
+
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
|
|
15
|
+
let encrypted = cipher.update(text, "utf8", "hex");
|
|
16
|
+
encrypted += cipher.final("hex");
|
|
17
|
+
return iv.toString("hex") + ":" + encrypted;
|
|
18
|
+
}
|
|
19
|
+
function decrypt(data) {
|
|
20
|
+
const key = deriveKey();
|
|
21
|
+
const [ivHex, encrypted] = data.split(":");
|
|
22
|
+
if (!ivHex || !encrypted)
|
|
23
|
+
return data;
|
|
24
|
+
try {
|
|
25
|
+
const iv = Buffer.from(ivHex, "hex");
|
|
26
|
+
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
|
|
27
|
+
let decrypted = decipher.update(encrypted, "hex", "utf8");
|
|
28
|
+
decrypted += decipher.final("utf8");
|
|
29
|
+
return decrypted;
|
|
30
|
+
}
|
|
31
|
+
catch (_a) {
|
|
32
|
+
return data;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
export function maskUri(uri) {
|
|
36
|
+
return uri.replace(/:\/\/([^:]+):([^@]+)@/, "://***:***@");
|
|
37
|
+
}
|
|
38
|
+
export function loadCopyConfigs() {
|
|
39
|
+
if (!existsSync(CONFIG_PATH))
|
|
40
|
+
return [];
|
|
41
|
+
try {
|
|
42
|
+
return JSON.parse(readFileSync(CONFIG_PATH, "utf-8"));
|
|
43
|
+
}
|
|
44
|
+
catch (_a) {
|
|
45
|
+
return [];
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
export function saveCopyConfigs(configs) {
|
|
49
|
+
const dir = path.dirname(CONFIG_PATH);
|
|
50
|
+
if (!existsSync(dir))
|
|
51
|
+
mkdirSync(dir, { recursive: true });
|
|
52
|
+
writeFileSync(CONFIG_PATH, JSON.stringify(configs, null, 2), "utf-8");
|
|
53
|
+
}
|
|
54
|
+
export function addCopyConfig(config) {
|
|
55
|
+
const configs = loadCopyConfigs();
|
|
56
|
+
// Encrypt URIs before saving
|
|
57
|
+
config.fromUri = encrypt(config.fromUri);
|
|
58
|
+
config.toUri = encrypt(config.toUri);
|
|
59
|
+
configs.push(config);
|
|
60
|
+
saveCopyConfigs(configs);
|
|
61
|
+
}
|
|
62
|
+
export function removeCopyConfig(id) {
|
|
63
|
+
const configs = loadCopyConfigs();
|
|
64
|
+
const filtered = configs.filter((c) => c.id !== id);
|
|
65
|
+
if (filtered.length === configs.length)
|
|
66
|
+
return false;
|
|
67
|
+
saveCopyConfigs(filtered);
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
70
|
+
export function getDecryptedConfig(config) {
|
|
71
|
+
return {
|
|
72
|
+
fromUri: decrypt(config.fromUri),
|
|
73
|
+
toUri: decrypt(config.toUri),
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
export function updateCopyConfigLastRun(id, status, error) {
|
|
77
|
+
const configs = loadCopyConfigs();
|
|
78
|
+
const config = configs.find((c) => c.id === id);
|
|
79
|
+
if (config) {
|
|
80
|
+
config.lastRun = new Date().toISOString();
|
|
81
|
+
config.lastStatus = status;
|
|
82
|
+
if (error)
|
|
83
|
+
config.lastError = error;
|
|
84
|
+
else
|
|
85
|
+
delete config.lastError;
|
|
86
|
+
saveCopyConfigs(configs);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { MongoClient } from "mongodb";
|
|
2
|
+
export async function executeCopy(fromUri, toUri, collections, options, onProgress) {
|
|
3
|
+
const startTime = Date.now();
|
|
4
|
+
const sourceClient = new MongoClient(fromUri);
|
|
5
|
+
const targetClient = new MongoClient(toUri);
|
|
6
|
+
const results = [];
|
|
7
|
+
try {
|
|
8
|
+
await sourceClient.connect();
|
|
9
|
+
await targetClient.connect();
|
|
10
|
+
const sourceDb = sourceClient.db();
|
|
11
|
+
const targetDb = targetClient.db();
|
|
12
|
+
// Resolve collections if --all was used
|
|
13
|
+
let collectionNames = collections;
|
|
14
|
+
if (collections.length === 1 && collections[0] === "*") {
|
|
15
|
+
const colls = await sourceDb.listCollections().toArray();
|
|
16
|
+
collectionNames = colls
|
|
17
|
+
.map((c) => c.name)
|
|
18
|
+
.filter((n) => !n.startsWith("system."));
|
|
19
|
+
}
|
|
20
|
+
for (const name of collectionNames) {
|
|
21
|
+
const colStart = Date.now();
|
|
22
|
+
let copied = 0;
|
|
23
|
+
let errors = 0;
|
|
24
|
+
const sourceCol = sourceDb.collection(name);
|
|
25
|
+
const targetCol = targetDb.collection(name);
|
|
26
|
+
if (options.drop) {
|
|
27
|
+
try {
|
|
28
|
+
await targetCol.drop();
|
|
29
|
+
}
|
|
30
|
+
catch (_a) {
|
|
31
|
+
// Collection may not exist — ignore
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
const query = options.query || {};
|
|
35
|
+
const total = await sourceCol.countDocuments(query);
|
|
36
|
+
onProgress({ collection: name, copied: 0, total, errors: 0 });
|
|
37
|
+
const cursor = sourceCol.find(query).batchSize(options.batchSize);
|
|
38
|
+
let batch = [];
|
|
39
|
+
for await (const doc of cursor) {
|
|
40
|
+
batch.push(doc);
|
|
41
|
+
if (batch.length >= options.batchSize) {
|
|
42
|
+
const result = await writeBatch(targetCol, batch, options.upsert || false);
|
|
43
|
+
copied += result.written;
|
|
44
|
+
errors += result.errors;
|
|
45
|
+
batch = [];
|
|
46
|
+
onProgress({ collection: name, copied, total, errors });
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
// Write remaining docs
|
|
50
|
+
if (batch.length > 0) {
|
|
51
|
+
const result = await writeBatch(targetCol, batch, options.upsert || false);
|
|
52
|
+
copied += result.written;
|
|
53
|
+
errors += result.errors;
|
|
54
|
+
onProgress({ collection: name, copied, total, errors });
|
|
55
|
+
}
|
|
56
|
+
results.push({ name, copied, errors, duration: Date.now() - colStart });
|
|
57
|
+
}
|
|
58
|
+
const totalCopied = results.reduce((s, r) => s + r.copied, 0);
|
|
59
|
+
const totalErrors = results.reduce((s, r) => s + r.errors, 0);
|
|
60
|
+
return { collections: results, totalCopied, totalErrors, duration: Date.now() - startTime };
|
|
61
|
+
}
|
|
62
|
+
finally {
|
|
63
|
+
await sourceClient.close();
|
|
64
|
+
await targetClient.close();
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
async function writeBatch(collection, docs, upsert) {
|
|
68
|
+
var _a, _b, _c, _d;
|
|
69
|
+
try {
|
|
70
|
+
if (upsert) {
|
|
71
|
+
const ops = docs.map((doc) => ({
|
|
72
|
+
updateOne: {
|
|
73
|
+
filter: { _id: doc._id },
|
|
74
|
+
update: { $set: doc },
|
|
75
|
+
upsert: true,
|
|
76
|
+
},
|
|
77
|
+
}));
|
|
78
|
+
const result = await collection.bulkWrite(ops, { ordered: false });
|
|
79
|
+
return { written: (result.upsertedCount || 0) + (result.modifiedCount || 0), errors: 0 };
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
const result = await collection.insertMany(docs, { ordered: false });
|
|
83
|
+
return { written: result.insertedCount, errors: 0 };
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
catch (e) {
|
|
87
|
+
// Partial success on ordered:false — count what made it through
|
|
88
|
+
const written = (_d = (_b = (_a = e.result) === null || _a === void 0 ? void 0 : _a.nInserted) !== null && _b !== void 0 ? _b : (_c = e.result) === null || _c === void 0 ? void 0 : _c.insertedCount) !== null && _d !== void 0 ? _d : 0;
|
|
89
|
+
const errorCount = docs.length - written;
|
|
90
|
+
return { written, errors: errorCount };
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
export async function getDbStats(uri) {
|
|
94
|
+
const client = new MongoClient(uri);
|
|
95
|
+
try {
|
|
96
|
+
await client.connect();
|
|
97
|
+
const db = client.db();
|
|
98
|
+
const stats = await db.stats();
|
|
99
|
+
const collList = await db.listCollections().toArray();
|
|
100
|
+
const collections = [];
|
|
101
|
+
for (const col of collList) {
|
|
102
|
+
if (col.name.startsWith("system."))
|
|
103
|
+
continue;
|
|
104
|
+
try {
|
|
105
|
+
const count = await db.collection(col.name).estimatedDocumentCount();
|
|
106
|
+
const colStats = await db.command({ collStats: col.name });
|
|
107
|
+
collections.push({ name: col.name, documents: count, size: colStats.size || 0 });
|
|
108
|
+
}
|
|
109
|
+
catch (_a) {
|
|
110
|
+
collections.push({ name: col.name, documents: 0, size: 0 });
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
collections.sort((a, b) => b.documents - a.documents);
|
|
114
|
+
return {
|
|
115
|
+
database: stats.db,
|
|
116
|
+
collections,
|
|
117
|
+
totalSize: stats.dataSize || 0,
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
finally {
|
|
121
|
+
await client.close();
|
|
122
|
+
}
|
|
123
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import cronParser from "cron-parser";
|
|
2
|
+
/**
|
|
3
|
+
* Determines whether a cron-scheduled job is due for execution.
|
|
4
|
+
* Returns true if the current time has passed the next scheduled run after lastRun.
|
|
5
|
+
*/
|
|
6
|
+
export function isCronDue(expression, lastRun, now = new Date()) {
|
|
7
|
+
try {
|
|
8
|
+
if (!lastRun)
|
|
9
|
+
return true; // Never run before → always due
|
|
10
|
+
const lastRunDate = new Date(lastRun);
|
|
11
|
+
const interval = cronParser.parseExpression(expression, { currentDate: lastRunDate });
|
|
12
|
+
const nextRun = interval.next().toDate();
|
|
13
|
+
return now >= nextRun;
|
|
14
|
+
}
|
|
15
|
+
catch (_a) {
|
|
16
|
+
return false; // Invalid cron expression
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Returns the next scheduled run time for a cron expression.
|
|
21
|
+
*/
|
|
22
|
+
export function getNextRun(expression, after = new Date()) {
|
|
23
|
+
try {
|
|
24
|
+
const interval = cronParser.parseExpression(expression, { currentDate: after });
|
|
25
|
+
return interval.next().toDate();
|
|
26
|
+
}
|
|
27
|
+
catch (_a) {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Validates a cron expression. Returns null if valid, error message if invalid.
|
|
33
|
+
*/
|
|
34
|
+
export function validateCron(expression) {
|
|
35
|
+
try {
|
|
36
|
+
cronParser.parseExpression(expression);
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
catch (e) {
|
|
40
|
+
return e.message || "Invalid cron expression";
|
|
41
|
+
}
|
|
42
|
+
}
|
package/lib/db/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|