@tolinax/ayoune-cli 2026.5.0 → 2026.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/apiClient.js +1 -0
- package/lib/commands/createAggregateCommand.js +656 -0
- package/lib/commands/createDbCommand.js +307 -0
- package/lib/commands/createProgram.js +5 -1
- package/lib/db/copyConfigStore.js +88 -0
- package/lib/db/copyEngine.js +123 -0
- package/lib/db/cronMatcher.js +42 -0
- package/lib/db/types.js +1 -0
- package/lib/helpers/readPipelineInput.js +50 -0
- package/package.json +3 -1
package/lib/api/apiClient.js
CHANGED
|
@@ -28,6 +28,7 @@ const MODULE_HOST_OVERRIDES = {
|
|
|
28
28
|
general: "https://api-v1.ayoune.app/api/general",
|
|
29
29
|
usersettings: "https://api-v1.ayoune.app/api/usersettings",
|
|
30
30
|
auth: "https://auth.ayoune.app",
|
|
31
|
+
aggregation: "https://aggregation.ayoune.app",
|
|
31
32
|
};
|
|
32
33
|
export function getModuleBaseUrl(module) {
|
|
33
34
|
return MODULE_HOST_OVERRIDES[module] || `https://${module}-api.ayoune.app`;
|
|
@@ -0,0 +1,656 @@
|
|
|
1
|
+
import chalk from "chalk";
|
|
2
|
+
import inquirer from "inquirer";
|
|
3
|
+
import { apiCallHandler } from "../api/apiCallHandler.js";
|
|
4
|
+
import { handleResponseFormatOptions } from "../helpers/handleResponseFormatOptions.js";
|
|
5
|
+
import { spinner } from "../../index.js";
|
|
6
|
+
import { EXIT_GENERAL_ERROR, EXIT_MISUSE } from "../exitCodes.js";
|
|
7
|
+
import { cliError } from "../helpers/cliError.js";
|
|
8
|
+
import { readPipelineInput } from "../helpers/readPipelineInput.js";
|
|
9
|
+
function wrapAggResult(res) {
|
|
10
|
+
if (Array.isArray(res)) {
|
|
11
|
+
return { payload: res, meta: { resultCount: res.length } };
|
|
12
|
+
}
|
|
13
|
+
if ((res === null || res === void 0 ? void 0 : res.payload) !== undefined)
|
|
14
|
+
return res;
|
|
15
|
+
return { payload: res, meta: {} };
|
|
16
|
+
}
|
|
17
|
+
export function createAggregateCommand(program) {
|
|
18
|
+
const agg = program
|
|
19
|
+
.command("aggregate")
|
|
20
|
+
.alias("agg")
|
|
21
|
+
.description("Run MongoDB aggregation pipelines, manage saved queries, and explore models")
|
|
22
|
+
.addHelpText("after", `
|
|
23
|
+
Examples:
|
|
24
|
+
ay agg run consumers --pipeline '[{"$count":"total"}]'
|
|
25
|
+
ay agg wizard
|
|
26
|
+
ay agg exec monthly-revenue -r table
|
|
27
|
+
ay agg list
|
|
28
|
+
ay agg save --name "Active Users" --model consumers --pipeline '[{"$match":{"status":"active"}}]'
|
|
29
|
+
ay agg validate --pipeline '[{"$group":{"_id":"$type","count":{"$sum":1}}}]'
|
|
30
|
+
ay agg models consumers`);
|
|
31
|
+
// ─── ay agg run <model> ─────────────────────────────────────────
|
|
32
|
+
agg
|
|
33
|
+
.command("run <model>")
|
|
34
|
+
.description("Execute an aggregation pipeline on a model")
|
|
35
|
+
.option("--pipeline <json>", "Pipeline as JSON array string")
|
|
36
|
+
.option("--file <path>", "Read pipeline from a JSON file")
|
|
37
|
+
.option("--global", "Skip tenant filter (superuser only)")
|
|
38
|
+
.option("--replace", "Enable template variable replacement")
|
|
39
|
+
.option("--resultAsObject", "Return result as single object")
|
|
40
|
+
.option("--random", "Return one random result")
|
|
41
|
+
.addHelpText("after", `
|
|
42
|
+
Examples:
|
|
43
|
+
ay agg run consumers --pipeline '[{"$match":{"status":"active"}},{"$count":"total"}]'
|
|
44
|
+
ay agg run orders --file pipeline.json -r table
|
|
45
|
+
cat pipeline.json | ay agg run products`)
|
|
46
|
+
.action(async (model, options) => {
|
|
47
|
+
var _a, _b;
|
|
48
|
+
try {
|
|
49
|
+
const opts = { ...program.opts(), ...options };
|
|
50
|
+
const pipeline = await readPipelineInput(opts);
|
|
51
|
+
spinner.start({ text: `Executing pipeline on ${model}...`, color: "magenta" });
|
|
52
|
+
const params = {};
|
|
53
|
+
if (opts.global)
|
|
54
|
+
params.global = true;
|
|
55
|
+
if (opts.replace)
|
|
56
|
+
params.replace = true;
|
|
57
|
+
if (opts.resultAsObject)
|
|
58
|
+
params.resultAsObject = true;
|
|
59
|
+
if (opts.random)
|
|
60
|
+
params.random = true;
|
|
61
|
+
const res = await apiCallHandler("aggregation", model, "post", pipeline, params);
|
|
62
|
+
const wrapped = wrapAggResult(res);
|
|
63
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
64
|
+
spinner.success({ text: `Pipeline returned ${(_b = (_a = wrapped.meta) === null || _a === void 0 ? void 0 : _a.resultCount) !== null && _b !== void 0 ? _b : "?"} results` });
|
|
65
|
+
}
|
|
66
|
+
catch (e) {
|
|
67
|
+
cliError(e.message || "Pipeline execution failed", EXIT_GENERAL_ERROR);
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
// ─── ay agg exec <slugOrId> ─────────────────────────────────────
|
|
71
|
+
agg
|
|
72
|
+
.command("exec <slugOrId>")
|
|
73
|
+
.description("Execute a saved query by slug name or ObjectId")
|
|
74
|
+
.addHelpText("after", `
|
|
75
|
+
Examples:
|
|
76
|
+
ay agg exec monthly-revenue -r table
|
|
77
|
+
ay agg exec 507f1f77bcf86cd799439011 -r json --quiet`)
|
|
78
|
+
.action(async (slugOrId, options) => {
|
|
79
|
+
var _a, _b, _c, _d;
|
|
80
|
+
try {
|
|
81
|
+
const opts = { ...program.opts(), ...options };
|
|
82
|
+
const isObjectId = /^[a-f0-9]{24}$/i.test(slugOrId);
|
|
83
|
+
spinner.start({ text: `Executing query "${slugOrId}"...`, color: "magenta" });
|
|
84
|
+
let queryId = slugOrId;
|
|
85
|
+
if (!isObjectId) {
|
|
86
|
+
// Resolve slug to ID via config-api
|
|
87
|
+
const lookup = await apiCallHandler("config", "queries", "get", null, {
|
|
88
|
+
slug: slugOrId,
|
|
89
|
+
limit: 1,
|
|
90
|
+
responseFormat: "json",
|
|
91
|
+
});
|
|
92
|
+
const entries = lookup === null || lookup === void 0 ? void 0 : lookup.payload;
|
|
93
|
+
if (!entries || (Array.isArray(entries) && entries.length === 0)) {
|
|
94
|
+
cliError(`No query found with slug "${slugOrId}"`, EXIT_GENERAL_ERROR);
|
|
95
|
+
}
|
|
96
|
+
queryId = Array.isArray(entries) ? entries[0]._id : entries._id;
|
|
97
|
+
}
|
|
98
|
+
const res = await apiCallHandler("aggregation", `queries/execute/${queryId}`, "post");
|
|
99
|
+
const wrapped = wrapAggResult(res);
|
|
100
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
101
|
+
const count = (_d = (_b = (_a = wrapped.meta) === null || _a === void 0 ? void 0 : _a.resultCount) !== null && _b !== void 0 ? _b : (_c = wrapped.payload) === null || _c === void 0 ? void 0 : _c.length) !== null && _d !== void 0 ? _d : "?";
|
|
102
|
+
spinner.success({ text: `Query returned ${count} results` });
|
|
103
|
+
}
|
|
104
|
+
catch (e) {
|
|
105
|
+
cliError(e.message || "Query execution failed", EXIT_GENERAL_ERROR);
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
// ─── ay agg list ────────────────────────────────────────────────
|
|
109
|
+
agg
|
|
110
|
+
.command("list")
|
|
111
|
+
.alias("ls")
|
|
112
|
+
.description("List saved aggregation queries")
|
|
113
|
+
.option("-l, --limit <number>", "Limit results", parseInt, 50)
|
|
114
|
+
.option("-p, --page <number>", "Page number", parseInt, 1)
|
|
115
|
+
.option("--search <term>", "Search by name")
|
|
116
|
+
.action(async (options) => {
|
|
117
|
+
var _a, _b, _c, _d, _e;
|
|
118
|
+
try {
|
|
119
|
+
const opts = { ...program.opts(), ...options };
|
|
120
|
+
spinner.start({ text: "Fetching queries...", color: "magenta" });
|
|
121
|
+
const params = {
|
|
122
|
+
page: opts.page,
|
|
123
|
+
limit: opts.limit,
|
|
124
|
+
responseFormat: opts.responseFormat,
|
|
125
|
+
verbosity: opts.verbosity,
|
|
126
|
+
"dataSource.source": "aggregation",
|
|
127
|
+
};
|
|
128
|
+
if (opts.search)
|
|
129
|
+
params.q = opts.search;
|
|
130
|
+
const res = await apiCallHandler("config", "queries", "get", null, params);
|
|
131
|
+
handleResponseFormatOptions(opts, res);
|
|
132
|
+
const total = (_e = (_c = (_b = (_a = res.meta) === null || _a === void 0 ? void 0 : _a.pageInfo) === null || _b === void 0 ? void 0 : _b.totalEntries) !== null && _c !== void 0 ? _c : (_d = res.payload) === null || _d === void 0 ? void 0 : _d.length) !== null && _e !== void 0 ? _e : 0;
|
|
133
|
+
spinner.success({ text: `Found ${total} queries` });
|
|
134
|
+
}
|
|
135
|
+
catch (e) {
|
|
136
|
+
cliError(e.message || "Failed to list queries", EXIT_GENERAL_ERROR);
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
// ─── ay agg save ────────────────────────────────────────────────
|
|
140
|
+
agg
|
|
141
|
+
.command("save")
|
|
142
|
+
.description("Save a pipeline as a reusable query")
|
|
143
|
+
.requiredOption("--name <name>", "Query name")
|
|
144
|
+
.requiredOption("--model <model>", "Aggregation model name")
|
|
145
|
+
.option("--pipeline <json>", "Pipeline as JSON array string")
|
|
146
|
+
.option("--file <path>", "Read pipeline from a JSON file")
|
|
147
|
+
.addHelpText("after", `
|
|
148
|
+
Examples:
|
|
149
|
+
ay agg save --name "Monthly Revenue" --model orders --pipeline '[{"$group":{"_id":null,"total":{"$sum":"$amount"}}}]'
|
|
150
|
+
ay agg save --name "Active Users" --model consumers --file pipeline.json`)
|
|
151
|
+
.action(async (options) => {
|
|
152
|
+
var _a, _b;
|
|
153
|
+
try {
|
|
154
|
+
const opts = { ...program.opts(), ...options };
|
|
155
|
+
const pipeline = await readPipelineInput(opts);
|
|
156
|
+
spinner.start({ text: "Saving query...", color: "magenta" });
|
|
157
|
+
const body = {
|
|
158
|
+
name: opts.name,
|
|
159
|
+
dataSource: {
|
|
160
|
+
source: "aggregation",
|
|
161
|
+
aggregationModel: opts.model,
|
|
162
|
+
aggregationPipeline: JSON.stringify(pipeline),
|
|
163
|
+
},
|
|
164
|
+
cache: { enabled: false },
|
|
165
|
+
};
|
|
166
|
+
const res = await apiCallHandler("config", "queries", "post", body, {
|
|
167
|
+
responseFormat: opts.responseFormat,
|
|
168
|
+
});
|
|
169
|
+
handleResponseFormatOptions(opts, res);
|
|
170
|
+
const slug = ((_a = res.payload) === null || _a === void 0 ? void 0 : _a.slug) || ((_b = res.payload) === null || _b === void 0 ? void 0 : _b.name) || opts.name;
|
|
171
|
+
spinner.success({ text: `Query saved — run with: ay agg exec ${slug}` });
|
|
172
|
+
}
|
|
173
|
+
catch (e) {
|
|
174
|
+
cliError(e.message || "Failed to save query", EXIT_GENERAL_ERROR);
|
|
175
|
+
}
|
|
176
|
+
});
|
|
177
|
+
// ─── ay agg validate ───────────────────────────────────────────
|
|
178
|
+
agg
|
|
179
|
+
.command("validate")
|
|
180
|
+
.alias("check")
|
|
181
|
+
.description("Validate a pipeline without executing")
|
|
182
|
+
.option("--pipeline <json>", "Pipeline as JSON array string")
|
|
183
|
+
.option("--file <path>", "Read pipeline from a JSON file")
|
|
184
|
+
.action(async (options) => {
|
|
185
|
+
var _a, _b, _c;
|
|
186
|
+
try {
|
|
187
|
+
const opts = { ...program.opts(), ...options };
|
|
188
|
+
const pipeline = await readPipelineInput(opts);
|
|
189
|
+
spinner.start({ text: "Validating pipeline...", color: "magenta" });
|
|
190
|
+
const res = await apiCallHandler("aggregation", "models/validate", "post", { pipeline });
|
|
191
|
+
spinner.stop();
|
|
192
|
+
const v = res.payload || res;
|
|
193
|
+
const icon = v.valid ? chalk.green("✓") : chalk.red("✗");
|
|
194
|
+
console.log(`\n ${icon} Pipeline is ${v.valid ? "valid" : "invalid"} (${(_a = v.stageCount) !== null && _a !== void 0 ? _a : pipeline.length} stages)`);
|
|
195
|
+
if ((_b = v.errors) === null || _b === void 0 ? void 0 : _b.length) {
|
|
196
|
+
for (const err of v.errors)
|
|
197
|
+
console.log(chalk.red(` - ${err}`));
|
|
198
|
+
}
|
|
199
|
+
if ((_c = v.warnings) === null || _c === void 0 ? void 0 : _c.length) {
|
|
200
|
+
for (const w of v.warnings)
|
|
201
|
+
console.log(chalk.yellow(` ! ${w}`));
|
|
202
|
+
}
|
|
203
|
+
console.log();
|
|
204
|
+
if (!v.valid)
|
|
205
|
+
process.exit(EXIT_GENERAL_ERROR);
|
|
206
|
+
}
|
|
207
|
+
catch (e) {
|
|
208
|
+
cliError(e.message || "Validation failed", EXIT_GENERAL_ERROR);
|
|
209
|
+
}
|
|
210
|
+
});
|
|
211
|
+
// ─── ay agg models [model] ─────────────────────────────────────
|
|
212
|
+
agg
|
|
213
|
+
.command("models [model]")
|
|
214
|
+
.description("List available models, or show fields for a specific model")
|
|
215
|
+
.addHelpText("after", `
|
|
216
|
+
Examples:
|
|
217
|
+
ay agg models List all available models
|
|
218
|
+
ay agg models consumers Show fields for Consumers`)
|
|
219
|
+
.action(async (model, options) => {
|
|
220
|
+
var _a, _b, _c, _d, _e;
|
|
221
|
+
try {
|
|
222
|
+
const opts = { ...program.opts(), ...options };
|
|
223
|
+
if (model) {
|
|
224
|
+
spinner.start({ text: `Loading fields for ${model}...`, color: "magenta" });
|
|
225
|
+
const res = await apiCallHandler("aggregation", `models/${model}/fields`, "get");
|
|
226
|
+
handleResponseFormatOptions(opts, res);
|
|
227
|
+
const fieldCount = (_c = (_b = (_a = res.payload) === null || _a === void 0 ? void 0 : _a.fields) === null || _b === void 0 ? void 0 : _b.length) !== null && _c !== void 0 ? _c : 0;
|
|
228
|
+
spinner.success({ text: `${model}: ${fieldCount} fields` });
|
|
229
|
+
}
|
|
230
|
+
else {
|
|
231
|
+
spinner.start({ text: "Loading models...", color: "magenta" });
|
|
232
|
+
const res = await apiCallHandler("aggregation", "models", "get");
|
|
233
|
+
handleResponseFormatOptions(opts, res);
|
|
234
|
+
const count = (_e = (_d = res.payload) === null || _d === void 0 ? void 0 : _d.length) !== null && _e !== void 0 ? _e : 0;
|
|
235
|
+
spinner.success({ text: `${count} models available` });
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
catch (e) {
|
|
239
|
+
cliError(e.message || "Failed to load models", EXIT_GENERAL_ERROR);
|
|
240
|
+
}
|
|
241
|
+
});
|
|
242
|
+
// ─── ay agg wizard ─────────────────────────────────────────────
|
|
243
|
+
agg
|
|
244
|
+
.command("wizard")
|
|
245
|
+
.alias("wiz")
|
|
246
|
+
.description("Interactive pipeline builder wizard")
|
|
247
|
+
.action(async (options) => {
|
|
248
|
+
var _a, _b, _c, _d, _e;
|
|
249
|
+
try {
|
|
250
|
+
const opts = { ...program.opts(), ...options };
|
|
251
|
+
if (!process.stdin.isTTY) {
|
|
252
|
+
cliError("Wizard requires an interactive terminal (TTY)", EXIT_MISUSE);
|
|
253
|
+
}
|
|
254
|
+
// Step 1: Select model
|
|
255
|
+
spinner.start({ text: "Loading models...", color: "magenta" });
|
|
256
|
+
const modelsRes = await apiCallHandler("aggregation", "models", "get");
|
|
257
|
+
spinner.stop();
|
|
258
|
+
const modelChoices = (modelsRes.payload || []).map((m) => ({
|
|
259
|
+
name: `${m.name} ${chalk.dim(`(${m.module})`)}`,
|
|
260
|
+
value: m.name,
|
|
261
|
+
}));
|
|
262
|
+
const { selectedModel } = await inquirer.prompt([
|
|
263
|
+
{
|
|
264
|
+
type: "search-list",
|
|
265
|
+
name: "selectedModel",
|
|
266
|
+
message: "Select a model:",
|
|
267
|
+
choices: modelChoices,
|
|
268
|
+
},
|
|
269
|
+
]);
|
|
270
|
+
// Step 2: Fetch and display fields
|
|
271
|
+
spinner.start({ text: `Loading fields for ${selectedModel}...`, color: "magenta" });
|
|
272
|
+
const fieldsRes = await apiCallHandler("aggregation", `models/${selectedModel}/fields`, "get");
|
|
273
|
+
spinner.stop();
|
|
274
|
+
const fields = ((_a = fieldsRes.payload) === null || _a === void 0 ? void 0 : _a.fields) || [];
|
|
275
|
+
const fieldNames = fields.map((f) => f.field);
|
|
276
|
+
console.log(chalk.cyan(`\n Fields for ${selectedModel}:\n`));
|
|
277
|
+
for (const f of fields) {
|
|
278
|
+
const ref = f.ref ? chalk.dim(` -> ${f.ref}`) : "";
|
|
279
|
+
const req = f.required ? chalk.yellow(" *") : "";
|
|
280
|
+
console.log(` ${chalk.white(f.field)} ${chalk.dim(`(${f.type})`)}${ref}${req}`);
|
|
281
|
+
}
|
|
282
|
+
console.log();
|
|
283
|
+
// Step 3: Build stages iteratively
|
|
284
|
+
const pipeline = [];
|
|
285
|
+
let addMore = true;
|
|
286
|
+
while (addMore) {
|
|
287
|
+
const { stageType } = await inquirer.prompt([
|
|
288
|
+
{
|
|
289
|
+
type: "list",
|
|
290
|
+
name: "stageType",
|
|
291
|
+
message: `Add stage ${pipeline.length + 1}:`,
|
|
292
|
+
choices: [
|
|
293
|
+
{ name: "$match — Filter documents", value: "$match" },
|
|
294
|
+
{ name: "$group — Group and aggregate", value: "$group" },
|
|
295
|
+
{ name: "$sort — Sort results", value: "$sort" },
|
|
296
|
+
{ name: "$project — Include/exclude fields", value: "$project" },
|
|
297
|
+
{ name: "$unwind — Deconstruct array field", value: "$unwind" },
|
|
298
|
+
{ name: "$lookup — Join with another collection", value: "$lookup" },
|
|
299
|
+
{ name: "$limit — Limit results", value: "$limit" },
|
|
300
|
+
{ name: "$skip — Skip results", value: "$skip" },
|
|
301
|
+
{ name: "$count — Count documents", value: "$count" },
|
|
302
|
+
{ name: "$addFields — Add computed fields", value: "$addFields" },
|
|
303
|
+
new inquirer.Separator(),
|
|
304
|
+
{ name: "Raw JSON stage", value: "$raw" },
|
|
305
|
+
new inquirer.Separator(),
|
|
306
|
+
{ name: chalk.green("Done — preview and execute"), value: "$done" },
|
|
307
|
+
],
|
|
308
|
+
},
|
|
309
|
+
]);
|
|
310
|
+
if (stageType === "$done") {
|
|
311
|
+
addMore = false;
|
|
312
|
+
break;
|
|
313
|
+
}
|
|
314
|
+
const stage = await buildStage(stageType, fieldNames, fields);
|
|
315
|
+
if (stage) {
|
|
316
|
+
pipeline.push(stage);
|
|
317
|
+
console.log(chalk.dim(` Added: ${JSON.stringify(stage)}\n`));
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
if (pipeline.length === 0) {
|
|
321
|
+
cliError("Pipeline is empty — add at least one stage", EXIT_MISUSE);
|
|
322
|
+
}
|
|
323
|
+
// Step 4: Preview
|
|
324
|
+
console.log(chalk.cyan("\n Pipeline:\n"));
|
|
325
|
+
console.log(chalk.dim(" " + JSON.stringify(pipeline, null, 2).split("\n").join("\n ")));
|
|
326
|
+
console.log();
|
|
327
|
+
spinner.start({ text: "Running preview (first 5 results)...", color: "magenta" });
|
|
328
|
+
const previewPipeline = [...pipeline, { $limit: 5 }];
|
|
329
|
+
const previewRes = await apiCallHandler("aggregation", selectedModel, "post", previewPipeline);
|
|
330
|
+
spinner.stop();
|
|
331
|
+
const previewWrapped = wrapAggResult(previewRes);
|
|
332
|
+
handleResponseFormatOptions({ ...opts, responseFormat: "yaml" }, previewWrapped);
|
|
333
|
+
// Step 5: Action choice
|
|
334
|
+
const { action } = await inquirer.prompt([
|
|
335
|
+
{
|
|
336
|
+
type: "list",
|
|
337
|
+
name: "action",
|
|
338
|
+
message: "What would you like to do?",
|
|
339
|
+
choices: [
|
|
340
|
+
{ name: "Execute full pipeline", value: "execute" },
|
|
341
|
+
{ name: "Save as reusable query", value: "save" },
|
|
342
|
+
{ name: "Execute and save", value: "both" },
|
|
343
|
+
{ name: "Print pipeline JSON", value: "print" },
|
|
344
|
+
{ name: "Cancel", value: "cancel" },
|
|
345
|
+
],
|
|
346
|
+
},
|
|
347
|
+
]);
|
|
348
|
+
if (action === "execute" || action === "both") {
|
|
349
|
+
spinner.start({ text: "Executing full pipeline...", color: "magenta" });
|
|
350
|
+
const res = await apiCallHandler("aggregation", selectedModel, "post", pipeline);
|
|
351
|
+
const wrapped = wrapAggResult(res);
|
|
352
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
353
|
+
spinner.success({ text: `Pipeline returned ${(_c = (_b = wrapped.meta) === null || _b === void 0 ? void 0 : _b.resultCount) !== null && _c !== void 0 ? _c : "?"} results` });
|
|
354
|
+
}
|
|
355
|
+
if (action === "save" || action === "both") {
|
|
356
|
+
const { queryName } = await inquirer.prompt([
|
|
357
|
+
{
|
|
358
|
+
type: "input",
|
|
359
|
+
name: "queryName",
|
|
360
|
+
message: "Query name:",
|
|
361
|
+
validate: (v) => v.length > 0 || "Name is required",
|
|
362
|
+
},
|
|
363
|
+
]);
|
|
364
|
+
spinner.start({ text: "Saving query...", color: "magenta" });
|
|
365
|
+
const body = {
|
|
366
|
+
name: queryName,
|
|
367
|
+
dataSource: {
|
|
368
|
+
source: "aggregation",
|
|
369
|
+
aggregationModel: selectedModel,
|
|
370
|
+
aggregationPipeline: JSON.stringify(pipeline),
|
|
371
|
+
},
|
|
372
|
+
cache: { enabled: false },
|
|
373
|
+
};
|
|
374
|
+
const saveRes = await apiCallHandler("config", "queries", "post", body, {
|
|
375
|
+
responseFormat: "json",
|
|
376
|
+
});
|
|
377
|
+
const slug = ((_d = saveRes.payload) === null || _d === void 0 ? void 0 : _d.slug) || ((_e = saveRes.payload) === null || _e === void 0 ? void 0 : _e.name) || queryName;
|
|
378
|
+
spinner.success({ text: `Query saved — run with: ay agg exec ${slug}` });
|
|
379
|
+
}
|
|
380
|
+
if (action === "print") {
|
|
381
|
+
console.log(JSON.stringify(pipeline, null, 2));
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
catch (e) {
|
|
385
|
+
cliError(e.message || "Wizard failed", EXIT_GENERAL_ERROR);
|
|
386
|
+
}
|
|
387
|
+
});
|
|
388
|
+
}
|
|
389
|
+
// ─── Stage Builders ────────────────────────────────────────────────
|
|
390
|
+
async function buildStage(stageType, fieldNames, fields) {
|
|
391
|
+
switch (stageType) {
|
|
392
|
+
case "$match":
|
|
393
|
+
return buildMatchStage(fieldNames, fields);
|
|
394
|
+
case "$group":
|
|
395
|
+
return buildGroupStage(fieldNames);
|
|
396
|
+
case "$sort":
|
|
397
|
+
return buildSortStage(fieldNames);
|
|
398
|
+
case "$project":
|
|
399
|
+
return buildProjectStage(fieldNames);
|
|
400
|
+
case "$unwind":
|
|
401
|
+
return buildUnwindStage(fieldNames);
|
|
402
|
+
case "$lookup":
|
|
403
|
+
return buildLookupStage();
|
|
404
|
+
case "$limit":
|
|
405
|
+
return buildNumberStage("$limit", "Maximum documents to return:");
|
|
406
|
+
case "$skip":
|
|
407
|
+
return buildNumberStage("$skip", "Documents to skip:");
|
|
408
|
+
case "$count":
|
|
409
|
+
return buildCountStage();
|
|
410
|
+
case "$addFields":
|
|
411
|
+
return buildRawStage("$addFields");
|
|
412
|
+
case "$raw":
|
|
413
|
+
return buildFullRawStage();
|
|
414
|
+
default:
|
|
415
|
+
return null;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
async function buildMatchStage(fieldNames, fields) {
|
|
419
|
+
const match = {};
|
|
420
|
+
let addMore = true;
|
|
421
|
+
while (addMore) {
|
|
422
|
+
const { field } = await inquirer.prompt([
|
|
423
|
+
{ type: "search-list", name: "field", message: "Field to match:", choices: fieldNames },
|
|
424
|
+
]);
|
|
425
|
+
const fieldMeta = fields.find((f) => f.field === field);
|
|
426
|
+
const isNumber = (fieldMeta === null || fieldMeta === void 0 ? void 0 : fieldMeta.type) === "Number";
|
|
427
|
+
const isDate = (fieldMeta === null || fieldMeta === void 0 ? void 0 : fieldMeta.type) === "Date";
|
|
428
|
+
const isBool = (fieldMeta === null || fieldMeta === void 0 ? void 0 : fieldMeta.type) === "Boolean";
|
|
429
|
+
const { operator } = await inquirer.prompt([
|
|
430
|
+
{
|
|
431
|
+
type: "list",
|
|
432
|
+
name: "operator",
|
|
433
|
+
message: "Operator:",
|
|
434
|
+
choices: [
|
|
435
|
+
{ name: "equals", value: "eq" },
|
|
436
|
+
{ name: "not equals", value: "$ne" },
|
|
437
|
+
{ name: "greater than", value: "$gt" },
|
|
438
|
+
{ name: "less than", value: "$lt" },
|
|
439
|
+
{ name: "greater or equal", value: "$gte" },
|
|
440
|
+
{ name: "less or equal", value: "$lte" },
|
|
441
|
+
{ name: "in (comma-separated)", value: "$in" },
|
|
442
|
+
{ name: "not in", value: "$nin" },
|
|
443
|
+
{ name: "regex", value: "$regex" },
|
|
444
|
+
{ name: "exists", value: "$exists" },
|
|
445
|
+
],
|
|
446
|
+
},
|
|
447
|
+
]);
|
|
448
|
+
let value;
|
|
449
|
+
if (operator === "$exists") {
|
|
450
|
+
const { exists } = await inquirer.prompt([
|
|
451
|
+
{ type: "confirm", name: "exists", message: "Field should exist?", default: true },
|
|
452
|
+
]);
|
|
453
|
+
value = exists;
|
|
454
|
+
match[field] = { $exists: value };
|
|
455
|
+
}
|
|
456
|
+
else if (isBool) {
|
|
457
|
+
const { boolVal } = await inquirer.prompt([
|
|
458
|
+
{ type: "confirm", name: "boolVal", message: `${field} =`, default: true },
|
|
459
|
+
]);
|
|
460
|
+
match[field] = operator === "eq" ? boolVal : { [operator]: boolVal };
|
|
461
|
+
}
|
|
462
|
+
else if (operator === "$in" || operator === "$nin") {
|
|
463
|
+
const { vals } = await inquirer.prompt([
|
|
464
|
+
{ type: "input", name: "vals", message: "Values (comma-separated):" },
|
|
465
|
+
]);
|
|
466
|
+
const arr = vals.split(",").map((v) => coerceValue(v.trim(), isNumber, isDate));
|
|
467
|
+
match[field] = { [operator]: arr };
|
|
468
|
+
}
|
|
469
|
+
else {
|
|
470
|
+
const { val } = await inquirer.prompt([
|
|
471
|
+
{ type: "input", name: "val", message: "Value:" },
|
|
472
|
+
]);
|
|
473
|
+
const coerced = coerceValue(val, isNumber, isDate);
|
|
474
|
+
match[field] = operator === "eq" ? coerced : { [operator]: coerced };
|
|
475
|
+
}
|
|
476
|
+
const { more } = await inquirer.prompt([
|
|
477
|
+
{ type: "confirm", name: "more", message: "Add another match condition?", default: false },
|
|
478
|
+
]);
|
|
479
|
+
addMore = more;
|
|
480
|
+
}
|
|
481
|
+
return { $match: match };
|
|
482
|
+
}
|
|
483
|
+
async function buildGroupStage(fieldNames) {
|
|
484
|
+
const { idType } = await inquirer.prompt([
|
|
485
|
+
{
|
|
486
|
+
type: "list",
|
|
487
|
+
name: "idType",
|
|
488
|
+
message: "Group by:",
|
|
489
|
+
choices: [
|
|
490
|
+
{ name: "Single field", value: "single" },
|
|
491
|
+
{ name: "Multiple fields", value: "multi" },
|
|
492
|
+
{ name: "null (aggregate all)", value: "null" },
|
|
493
|
+
],
|
|
494
|
+
},
|
|
495
|
+
]);
|
|
496
|
+
let _id = null;
|
|
497
|
+
if (idType === "single") {
|
|
498
|
+
const { field } = await inquirer.prompt([
|
|
499
|
+
{ type: "search-list", name: "field", message: "Group by field:", choices: fieldNames },
|
|
500
|
+
]);
|
|
501
|
+
_id = `$${field}`;
|
|
502
|
+
}
|
|
503
|
+
else if (idType === "multi") {
|
|
504
|
+
const { selected } = await inquirer.prompt([
|
|
505
|
+
{ type: "checkbox", name: "selected", message: "Select fields:", choices: fieldNames },
|
|
506
|
+
]);
|
|
507
|
+
_id = {};
|
|
508
|
+
for (const f of selected)
|
|
509
|
+
_id[f] = `$${f}`;
|
|
510
|
+
}
|
|
511
|
+
const group = { _id };
|
|
512
|
+
let addAcc = true;
|
|
513
|
+
while (addAcc) {
|
|
514
|
+
const { outputName } = await inquirer.prompt([
|
|
515
|
+
{ type: "input", name: "outputName", message: "Output field name:", validate: (v) => v.length > 0 },
|
|
516
|
+
]);
|
|
517
|
+
const { accumulator } = await inquirer.prompt([
|
|
518
|
+
{
|
|
519
|
+
type: "list",
|
|
520
|
+
name: "accumulator",
|
|
521
|
+
message: "Accumulator:",
|
|
522
|
+
choices: ["$sum", "$avg", "$min", "$max", "$count", "$push", "$first", "$last"],
|
|
523
|
+
},
|
|
524
|
+
]);
|
|
525
|
+
if (accumulator === "$count") {
|
|
526
|
+
group[outputName] = { $count: {} };
|
|
527
|
+
}
|
|
528
|
+
else {
|
|
529
|
+
const { srcField } = await inquirer.prompt([
|
|
530
|
+
{ type: "search-list", name: "srcField", message: "Source field:", choices: ["1 (constant)", ...fieldNames] },
|
|
531
|
+
]);
|
|
532
|
+
group[outputName] = { [accumulator]: srcField === "1 (constant)" ? 1 : `$${srcField}` };
|
|
533
|
+
}
|
|
534
|
+
const { more } = await inquirer.prompt([
|
|
535
|
+
{ type: "confirm", name: "more", message: "Add another accumulator?", default: false },
|
|
536
|
+
]);
|
|
537
|
+
addAcc = more;
|
|
538
|
+
}
|
|
539
|
+
return { $group: group };
|
|
540
|
+
}
|
|
541
|
+
async function buildSortStage(fieldNames) {
|
|
542
|
+
const sort = {};
|
|
543
|
+
let addMore = true;
|
|
544
|
+
while (addMore) {
|
|
545
|
+
const { field } = await inquirer.prompt([
|
|
546
|
+
{ type: "search-list", name: "field", message: "Sort by field:", choices: fieldNames },
|
|
547
|
+
]);
|
|
548
|
+
const { direction } = await inquirer.prompt([
|
|
549
|
+
{
|
|
550
|
+
type: "list",
|
|
551
|
+
name: "direction",
|
|
552
|
+
message: "Direction:",
|
|
553
|
+
choices: [
|
|
554
|
+
{ name: "Ascending (1)", value: 1 },
|
|
555
|
+
{ name: "Descending (-1)", value: -1 },
|
|
556
|
+
],
|
|
557
|
+
},
|
|
558
|
+
]);
|
|
559
|
+
sort[field] = direction;
|
|
560
|
+
const { more } = await inquirer.prompt([
|
|
561
|
+
{ type: "confirm", name: "more", message: "Add another sort field?", default: false },
|
|
562
|
+
]);
|
|
563
|
+
addMore = more;
|
|
564
|
+
}
|
|
565
|
+
return { $sort: sort };
|
|
566
|
+
}
|
|
567
|
+
async function buildProjectStage(fieldNames) {
|
|
568
|
+
const { mode } = await inquirer.prompt([
|
|
569
|
+
{
|
|
570
|
+
type: "list",
|
|
571
|
+
name: "mode",
|
|
572
|
+
message: "Projection mode:",
|
|
573
|
+
choices: [
|
|
574
|
+
{ name: "Include selected fields (1)", value: "include" },
|
|
575
|
+
{ name: "Exclude selected fields (0)", value: "exclude" },
|
|
576
|
+
],
|
|
577
|
+
},
|
|
578
|
+
]);
|
|
579
|
+
const { selected } = await inquirer.prompt([
|
|
580
|
+
{
|
|
581
|
+
type: "checkbox",
|
|
582
|
+
name: "selected",
|
|
583
|
+
message: `Fields to ${mode}:`,
|
|
584
|
+
choices: fieldNames,
|
|
585
|
+
validate: (v) => v.length > 0 || "Select at least one field",
|
|
586
|
+
},
|
|
587
|
+
]);
|
|
588
|
+
const project = {};
|
|
589
|
+
for (const f of selected)
|
|
590
|
+
project[f] = mode === "include" ? 1 : 0;
|
|
591
|
+
return { $project: project };
|
|
592
|
+
}
|
|
593
|
+
async function buildUnwindStage(fieldNames) {
|
|
594
|
+
const { field } = await inquirer.prompt([
|
|
595
|
+
{ type: "search-list", name: "field", message: "Array field to unwind:", choices: fieldNames },
|
|
596
|
+
]);
|
|
597
|
+
return { $unwind: `$${field}` };
|
|
598
|
+
}
|
|
599
|
+
async function buildLookupStage() {
|
|
600
|
+
const answers = await inquirer.prompt([
|
|
601
|
+
{ type: "input", name: "from", message: "Foreign collection (lowercase):" },
|
|
602
|
+
{ type: "input", name: "localField", message: "Local field:" },
|
|
603
|
+
{ type: "input", name: "foreignField", message: "Foreign field:", default: "_id" },
|
|
604
|
+
{ type: "input", name: "as", message: "Output array field name:" },
|
|
605
|
+
]);
|
|
606
|
+
return { $lookup: answers };
|
|
607
|
+
}
|
|
608
|
+
async function buildNumberStage(op, message) {
|
|
609
|
+
const { num } = await inquirer.prompt([
|
|
610
|
+
{ type: "input", name: "num", message, validate: (v) => !isNaN(parseInt(v)) || "Must be a number" },
|
|
611
|
+
]);
|
|
612
|
+
return { [op]: parseInt(num) };
|
|
613
|
+
}
|
|
614
|
+
async function buildCountStage() {
|
|
615
|
+
const { field } = await inquirer.prompt([
|
|
616
|
+
{ type: "input", name: "field", message: "Count output field name:", default: "count" },
|
|
617
|
+
]);
|
|
618
|
+
return { $count: field };
|
|
619
|
+
}
|
|
620
|
+
async function buildRawStage(op) {
|
|
621
|
+
const { json } = await inquirer.prompt([
|
|
622
|
+
{ type: "input", name: "json", message: `${op} body (JSON):` },
|
|
623
|
+
]);
|
|
624
|
+
try {
|
|
625
|
+
return { [op]: JSON.parse(json) };
|
|
626
|
+
}
|
|
627
|
+
catch (_a) {
|
|
628
|
+
console.log(chalk.red(" Invalid JSON — stage skipped"));
|
|
629
|
+
return null;
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
async function buildFullRawStage() {
|
|
633
|
+
const { json } = await inquirer.prompt([
|
|
634
|
+
{ type: "input", name: "json", message: "Full stage (JSON object):" },
|
|
635
|
+
]);
|
|
636
|
+
try {
|
|
637
|
+
return JSON.parse(json);
|
|
638
|
+
}
|
|
639
|
+
catch (_a) {
|
|
640
|
+
console.log(chalk.red(" Invalid JSON — stage skipped"));
|
|
641
|
+
return null;
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
function coerceValue(val, isNumber, isDate) {
|
|
645
|
+
if (val === "true")
|
|
646
|
+
return true;
|
|
647
|
+
if (val === "false")
|
|
648
|
+
return false;
|
|
649
|
+
if (val === "null")
|
|
650
|
+
return null;
|
|
651
|
+
if (isNumber && !isNaN(Number(val)))
|
|
652
|
+
return Number(val);
|
|
653
|
+
if (isDate && !isNaN(Date.parse(val)))
|
|
654
|
+
return { $date: val };
|
|
655
|
+
return val;
|
|
656
|
+
}
|
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
import chalk from "chalk";
|
|
2
|
+
import { spinner } from "../../index.js";
|
|
3
|
+
import { EXIT_GENERAL_ERROR, EXIT_MISUSE } from "../exitCodes.js";
|
|
4
|
+
import { cliError } from "../helpers/cliError.js";
|
|
5
|
+
import { handleResponseFormatOptions } from "../helpers/handleResponseFormatOptions.js";
|
|
6
|
+
import { executeCopy, getDbStats } from "../db/copyEngine.js";
|
|
7
|
+
import { addCopyConfig, loadCopyConfigs, removeCopyConfig, getDecryptedConfig, updateCopyConfigLastRun, maskUri, } from "../db/copyConfigStore.js";
|
|
8
|
+
import { isCronDue, getNextRun, validateCron } from "../db/cronMatcher.js";
|
|
9
|
+
function generateId() {
|
|
10
|
+
return Math.random().toString(36).substring(2, 10) + Date.now().toString(36);
|
|
11
|
+
}
|
|
12
|
+
function getSourceUri(opts) {
|
|
13
|
+
const uri = opts.from || process.env.MONGO_CONNECTSTRING || process.env.MONGODB_URI;
|
|
14
|
+
if (!uri) {
|
|
15
|
+
cliError("No source database configured. Set MONGO_CONNECTSTRING env var or use --from <uri>", EXIT_MISUSE);
|
|
16
|
+
}
|
|
17
|
+
if (!uri.startsWith("mongodb")) {
|
|
18
|
+
cliError("Source URI must be a MongoDB URI (mongodb:// or mongodb+srv://)", EXIT_MISUSE);
|
|
19
|
+
}
|
|
20
|
+
return uri;
|
|
21
|
+
}
|
|
22
|
+
function formatBytes(bytes) {
|
|
23
|
+
if (bytes === 0)
|
|
24
|
+
return "0 B";
|
|
25
|
+
const units = ["B", "KB", "MB", "GB", "TB"];
|
|
26
|
+
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
|
27
|
+
return (bytes / Math.pow(1024, i)).toFixed(1) + " " + units[i];
|
|
28
|
+
}
|
|
29
|
+
export function createDbCommand(program) {
|
|
30
|
+
const db = program
|
|
31
|
+
.command("db")
|
|
32
|
+
.description("MongoDB database operations (copy, stats, scheduled replication)")
|
|
33
|
+
.addHelpText("after", `
|
|
34
|
+
Source is automatically resolved from MONGO_CONNECTSTRING env var.
|
|
35
|
+
|
|
36
|
+
Examples:
|
|
37
|
+
ay db copy --to "mongodb://backup-host/ayoune" --collections "users,orders"
|
|
38
|
+
ay db copy --to "mongodb://..." --all --schedule "0 */6 * * *"
|
|
39
|
+
ay db copy --from "mongodb://other/db" --to "mongodb://..." --collections "logs"
|
|
40
|
+
ay db schedules list
|
|
41
|
+
ay db stats`);
|
|
42
|
+
// ─── ay db copy ─────────────────────────────────────────────────
|
|
43
|
+
db.command("copy")
|
|
44
|
+
.description("Copy data from aYOUne database to another MongoDB instance")
|
|
45
|
+
.requiredOption("--to <uri>", "Target MongoDB connection URI")
|
|
46
|
+
.option("--from <uri>", "Source URI override (default: MONGO_CONNECTSTRING env var)")
|
|
47
|
+
.option("--collections <list>", "Comma-separated collection names")
|
|
48
|
+
.option("--all", "Copy all collections")
|
|
49
|
+
.option("--query <json>", "JSON filter query for source documents")
|
|
50
|
+
.option("--drop", "Drop target collection before copying")
|
|
51
|
+
.option("--upsert", "Upsert documents by _id (idempotent, safe to re-run)")
|
|
52
|
+
.option("--batch-size <number>", "Documents per batch", parseInt, 1000)
|
|
53
|
+
.option("--schedule <cron>", "Save as scheduled copy (cron expression) instead of executing")
|
|
54
|
+
.addHelpText("after", `
|
|
55
|
+
Source defaults to MONGO_CONNECTSTRING env var (the aYOUne database).
|
|
56
|
+
|
|
57
|
+
Examples:
|
|
58
|
+
ay db copy --to "mongodb://backup-host/ayoune" --collections "users,orders"
|
|
59
|
+
ay db copy --to "mongodb://..." --all --drop
|
|
60
|
+
ay db copy --to "mongodb://..." --collections "logs" --query '{"status":"active"}'
|
|
61
|
+
ay db copy --to "mongodb://..." --all --upsert --schedule "0 */6 * * *"
|
|
62
|
+
ay db copy --from "mongodb://other/db" --to "mongodb://..." --collections "data"`)
|
|
63
|
+
.action(async (options) => {
|
|
64
|
+
try {
|
|
65
|
+
const opts = { ...program.opts(), ...options };
|
|
66
|
+
// Validate input
|
|
67
|
+
if (!opts.collections && !opts.all) {
|
|
68
|
+
cliError("Provide --collections or --all", EXIT_MISUSE);
|
|
69
|
+
}
|
|
70
|
+
if (opts.collections && opts.all) {
|
|
71
|
+
cliError("Use either --collections or --all, not both", EXIT_MISUSE);
|
|
72
|
+
}
|
|
73
|
+
const fromUri = getSourceUri(opts);
|
|
74
|
+
if (!opts.to.startsWith("mongodb")) {
|
|
75
|
+
cliError("--to must be a MongoDB URI (mongodb:// or mongodb+srv://)", EXIT_MISUSE);
|
|
76
|
+
}
|
|
77
|
+
let query;
|
|
78
|
+
if (opts.query) {
|
|
79
|
+
try {
|
|
80
|
+
query = JSON.parse(opts.query);
|
|
81
|
+
}
|
|
82
|
+
catch (_a) {
|
|
83
|
+
cliError("--query must be valid JSON", EXIT_MISUSE);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
const collections = opts.all ? ["*"] : opts.collections.split(",").map((c) => c.trim());
|
|
87
|
+
// Schedule mode
|
|
88
|
+
if (opts.schedule) {
|
|
89
|
+
const cronError = validateCron(opts.schedule);
|
|
90
|
+
if (cronError)
|
|
91
|
+
cliError(`Invalid cron expression: ${cronError}`, EXIT_MISUSE);
|
|
92
|
+
const config = {
|
|
93
|
+
id: generateId(),
|
|
94
|
+
createdAt: new Date().toISOString(),
|
|
95
|
+
from: maskUri(fromUri),
|
|
96
|
+
to: maskUri(opts.to),
|
|
97
|
+
fromUri: fromUri, // Will be encrypted by addCopyConfig
|
|
98
|
+
toUri: opts.to,
|
|
99
|
+
collections,
|
|
100
|
+
query,
|
|
101
|
+
drop: opts.drop || false,
|
|
102
|
+
upsert: opts.upsert || false,
|
|
103
|
+
batchSize: opts.batchSize,
|
|
104
|
+
schedule: opts.schedule,
|
|
105
|
+
};
|
|
106
|
+
addCopyConfig(config);
|
|
107
|
+
const nextRun = getNextRun(opts.schedule);
|
|
108
|
+
console.log(chalk.green(`\n Scheduled copy saved (ID: ${config.id})`));
|
|
109
|
+
console.log(chalk.dim(` Source: ${config.from}`));
|
|
110
|
+
console.log(chalk.dim(` Target: ${config.to}`));
|
|
111
|
+
console.log(chalk.dim(` Collections: ${collections.join(", ")}`));
|
|
112
|
+
console.log(chalk.dim(` Schedule: ${opts.schedule}`));
|
|
113
|
+
if (nextRun)
|
|
114
|
+
console.log(chalk.dim(` Next run: ${nextRun.toISOString()}`));
|
|
115
|
+
console.log();
|
|
116
|
+
console.log(chalk.yellow(" Set up a cron job to run scheduled copies:"));
|
|
117
|
+
console.log(chalk.dim(" Linux/macOS: crontab -e → */5 * * * * ay db schedules run"));
|
|
118
|
+
console.log(chalk.dim(" Windows: schtasks /create /tn ayoune-db-sync /tr \"ay db schedules run\" /sc minute /mo 5"));
|
|
119
|
+
console.log();
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
// Execute copy
|
|
123
|
+
spinner.start({ text: `Copying from ${maskUri(fromUri)} to ${maskUri(opts.to)}...`, color: "cyan" });
|
|
124
|
+
const summary = await executeCopy(fromUri, opts.to, collections, {
|
|
125
|
+
query,
|
|
126
|
+
drop: opts.drop,
|
|
127
|
+
upsert: opts.upsert,
|
|
128
|
+
batchSize: opts.batchSize,
|
|
129
|
+
}, (progress) => {
|
|
130
|
+
spinner.update({
|
|
131
|
+
text: `${progress.collection}: ${progress.copied}/${progress.total} docs${progress.errors ? chalk.red(` (${progress.errors} errors)`) : ""}`,
|
|
132
|
+
});
|
|
133
|
+
});
|
|
134
|
+
spinner.stop();
|
|
135
|
+
// Display summary
|
|
136
|
+
const wrapped = {
|
|
137
|
+
payload: summary,
|
|
138
|
+
meta: {
|
|
139
|
+
totalCopied: summary.totalCopied,
|
|
140
|
+
totalErrors: summary.totalErrors,
|
|
141
|
+
duration: summary.duration,
|
|
142
|
+
},
|
|
143
|
+
};
|
|
144
|
+
if (opts.responseFormat === "json" || opts.responseFormat === "yaml") {
|
|
145
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
console.log(chalk.cyan.bold("\n Copy Summary\n"));
|
|
149
|
+
for (const col of summary.collections) {
|
|
150
|
+
const icon = col.errors === 0 ? chalk.green("●") : chalk.yellow("●");
|
|
151
|
+
console.log(` ${icon} ${col.name}: ${col.copied} docs copied${col.errors ? chalk.red(` (${col.errors} errors)`) : ""} ${chalk.dim(`(${col.duration}ms)`)}`);
|
|
152
|
+
}
|
|
153
|
+
console.log();
|
|
154
|
+
console.log(` ${chalk.green(summary.totalCopied + " total")}${summary.totalErrors ? ` ${chalk.red(summary.totalErrors + " errors")}` : ""} ${chalk.dim(`in ${summary.duration}ms`)}`);
|
|
155
|
+
console.log();
|
|
156
|
+
}
|
|
157
|
+
if (summary.totalErrors > 0)
|
|
158
|
+
process.exit(EXIT_GENERAL_ERROR);
|
|
159
|
+
}
|
|
160
|
+
catch (e) {
|
|
161
|
+
cliError(e.message || "Copy failed", EXIT_GENERAL_ERROR);
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
// ─── ay db schedules ────────────────────────────────────────────
|
|
165
|
+
const schedules = db.command("schedules").alias("sched").description("Manage scheduled copy jobs");
|
|
166
|
+
// ay db schedules list
|
|
167
|
+
schedules
|
|
168
|
+
.command("list")
|
|
169
|
+
.alias("ls")
|
|
170
|
+
.description("List all scheduled copy jobs")
|
|
171
|
+
.action(async (options) => {
|
|
172
|
+
try {
|
|
173
|
+
const opts = { ...program.opts(), ...options };
|
|
174
|
+
const configs = loadCopyConfigs();
|
|
175
|
+
if (configs.length === 0) {
|
|
176
|
+
console.log(chalk.dim("\n No scheduled copies configured.\n"));
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
const wrapped = { payload: configs.map(displayConfig), meta: { total: configs.length } };
|
|
180
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
181
|
+
}
|
|
182
|
+
catch (e) {
|
|
183
|
+
cliError(e.message || "Failed to list schedules", EXIT_GENERAL_ERROR);
|
|
184
|
+
}
|
|
185
|
+
});
|
|
186
|
+
// ay db schedules remove <id>
|
|
187
|
+
schedules
|
|
188
|
+
.command("remove <id>")
|
|
189
|
+
.alias("rm")
|
|
190
|
+
.description("Remove a scheduled copy job")
|
|
191
|
+
.action(async (id) => {
|
|
192
|
+
const removed = removeCopyConfig(id);
|
|
193
|
+
if (removed) {
|
|
194
|
+
spinner.success({ text: `Schedule ${id} removed` });
|
|
195
|
+
}
|
|
196
|
+
else {
|
|
197
|
+
cliError(`No schedule found with ID "${id}"`, EXIT_GENERAL_ERROR);
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
// ay db schedules run
|
|
201
|
+
schedules
|
|
202
|
+
.command("run")
|
|
203
|
+
.description("Execute all copy jobs that are due now")
|
|
204
|
+
.option("--id <id>", "Run a specific schedule by ID")
|
|
205
|
+
.action(async (options) => {
|
|
206
|
+
try {
|
|
207
|
+
const configs = loadCopyConfigs();
|
|
208
|
+
const now = new Date();
|
|
209
|
+
const toRun = options.id
|
|
210
|
+
? configs.filter((c) => c.id === options.id)
|
|
211
|
+
: configs.filter((c) => isCronDue(c.schedule, c.lastRun, now));
|
|
212
|
+
if (toRun.length === 0) {
|
|
213
|
+
console.log(chalk.dim("\n No copies due.\n"));
|
|
214
|
+
return;
|
|
215
|
+
}
|
|
216
|
+
console.log(chalk.cyan(`\n Running ${toRun.length} scheduled copies...\n`));
|
|
217
|
+
for (const config of toRun) {
|
|
218
|
+
const { fromUri, toUri } = getDecryptedConfig(config);
|
|
219
|
+
console.log(chalk.dim(` ${config.id}: ${config.from} → ${config.to}`));
|
|
220
|
+
try {
|
|
221
|
+
spinner.start({ text: `${config.id}: copying...`, color: "cyan" });
|
|
222
|
+
const summary = await executeCopy(fromUri, toUri, config.collections, {
|
|
223
|
+
query: config.query,
|
|
224
|
+
drop: config.drop,
|
|
225
|
+
upsert: config.upsert,
|
|
226
|
+
batchSize: config.batchSize,
|
|
227
|
+
}, (progress) => {
|
|
228
|
+
spinner.update({
|
|
229
|
+
text: `${config.id}: ${progress.collection} ${progress.copied}/${progress.total}`,
|
|
230
|
+
});
|
|
231
|
+
});
|
|
232
|
+
updateCopyConfigLastRun(config.id, "success");
|
|
233
|
+
spinner.success({ text: `${config.id}: ${summary.totalCopied} docs copied${summary.totalErrors ? `, ${summary.totalErrors} errors` : ""}` });
|
|
234
|
+
}
|
|
235
|
+
catch (e) {
|
|
236
|
+
updateCopyConfigLastRun(config.id, "failed", e.message);
|
|
237
|
+
spinner.error({ text: `${config.id}: ${e.message}` });
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
console.log();
|
|
241
|
+
}
|
|
242
|
+
catch (e) {
|
|
243
|
+
cliError(e.message || "Scheduled run failed", EXIT_GENERAL_ERROR);
|
|
244
|
+
}
|
|
245
|
+
});
|
|
246
|
+
// ─── ay db stats [uri] ──────────────────────────────────────────
|
|
247
|
+
db.command("stats [uri]")
|
|
248
|
+
.description("Show database statistics (defaults to aYOUne database)")
|
|
249
|
+
.addHelpText("after", `
|
|
250
|
+
Defaults to MONGO_CONNECTSTRING (the aYOUne database) when no URI is given.
|
|
251
|
+
|
|
252
|
+
Examples:
|
|
253
|
+
ay db stats Stats for aYOUne database
|
|
254
|
+
ay db stats "mongodb://other/db" Stats for a different database
|
|
255
|
+
ay db stats -r table`)
|
|
256
|
+
.action(async (uri, options) => {
|
|
257
|
+
try {
|
|
258
|
+
const opts = { ...program.opts(), ...options };
|
|
259
|
+
const dbUri = uri || process.env.MONGO_CONNECTSTRING || process.env.MONGODB_URI;
|
|
260
|
+
if (!dbUri) {
|
|
261
|
+
cliError("No database URI. Set MONGO_CONNECTSTRING env var or pass a URI argument.", EXIT_MISUSE);
|
|
262
|
+
}
|
|
263
|
+
if (!dbUri.startsWith("mongodb")) {
|
|
264
|
+
cliError("URI must start with mongodb:// or mongodb+srv://", EXIT_MISUSE);
|
|
265
|
+
}
|
|
266
|
+
spinner.start({ text: `Connecting to ${maskUri(dbUri)}...`, color: "cyan" });
|
|
267
|
+
const stats = await getDbStats(dbUri);
|
|
268
|
+
spinner.stop();
|
|
269
|
+
const wrapped = {
|
|
270
|
+
payload: stats,
|
|
271
|
+
meta: {
|
|
272
|
+
database: stats.database,
|
|
273
|
+
collectionCount: stats.collections.length,
|
|
274
|
+
totalSize: formatBytes(stats.totalSize),
|
|
275
|
+
},
|
|
276
|
+
};
|
|
277
|
+
if (opts.responseFormat === "json" || opts.responseFormat === "yaml") {
|
|
278
|
+
handleResponseFormatOptions(opts, wrapped);
|
|
279
|
+
}
|
|
280
|
+
else {
|
|
281
|
+
console.log(chalk.cyan.bold(`\n Database: ${stats.database}\n`));
|
|
282
|
+
console.log(` ${chalk.dim("Total size:")} ${formatBytes(stats.totalSize)}`);
|
|
283
|
+
console.log(` ${chalk.dim("Collections:")} ${stats.collections.length}\n`);
|
|
284
|
+
for (const col of stats.collections) {
|
|
285
|
+
console.log(` ${chalk.white(col.name)}: ${col.documents.toLocaleString()} docs ${chalk.dim(`(${formatBytes(col.size)})`)}`);
|
|
286
|
+
}
|
|
287
|
+
console.log();
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
catch (e) {
|
|
291
|
+
cliError(e.message || "Failed to get stats", EXIT_GENERAL_ERROR);
|
|
292
|
+
}
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
function displayConfig(config) {
|
|
296
|
+
const nextRun = getNextRun(config.schedule);
|
|
297
|
+
return {
|
|
298
|
+
id: config.id,
|
|
299
|
+
from: config.from,
|
|
300
|
+
to: config.to,
|
|
301
|
+
collections: config.collections.join(", "),
|
|
302
|
+
schedule: config.schedule,
|
|
303
|
+
nextRun: (nextRun === null || nextRun === void 0 ? void 0 : nextRun.toISOString()) || "—",
|
|
304
|
+
lastRun: config.lastRun || "never",
|
|
305
|
+
lastStatus: config.lastStatus || "—",
|
|
306
|
+
};
|
|
307
|
+
}
|
|
@@ -47,6 +47,8 @@ import { createStatusCommand } from "./createStatusCommand.js";
|
|
|
47
47
|
import { createSelfHostUpdateCommand } from "./createSelfHostUpdateCommand.js";
|
|
48
48
|
import { createContextCommand } from "./createContextCommand.js";
|
|
49
49
|
import { createAccessCommand } from "./createAccessCommand.js";
|
|
50
|
+
import { createAggregateCommand } from "./createAggregateCommand.js";
|
|
51
|
+
import { createDbCommand } from "./createDbCommand.js";
|
|
50
52
|
import { secureStorage } from "../helpers/secureStorage.js";
|
|
51
53
|
import { login } from "../api/login.js";
|
|
52
54
|
import { loadConfig } from "../helpers/configLoader.js";
|
|
@@ -121,6 +123,8 @@ export function createProgram(program) {
|
|
|
121
123
|
createPermissionsCommand(program);
|
|
122
124
|
createTemplateCommand(program);
|
|
123
125
|
createAccessCommand(program);
|
|
126
|
+
createAggregateCommand(program);
|
|
127
|
+
createDbCommand(program);
|
|
124
128
|
createSetupCommand(program);
|
|
125
129
|
createStatusCommand(program);
|
|
126
130
|
createSelfHostUpdateCommand(program);
|
|
@@ -200,7 +204,7 @@ export function createProgram(program) {
|
|
|
200
204
|
}
|
|
201
205
|
// First-run onboarding: auto-login if no token stored
|
|
202
206
|
const cmdName = thisCommand.name();
|
|
203
|
-
const skipAuth = ["login", "logout", "whoami", "completions", "alias", "config", "help", "setup", "status", "self-host-update", "context"];
|
|
207
|
+
const skipAuth = ["login", "logout", "whoami", "completions", "alias", "config", "help", "setup", "status", "self-host-update", "context", "db"];
|
|
204
208
|
if (!skipAuth.includes(cmdName) && process.stdin.isTTY) {
|
|
205
209
|
const token = secureStorage.getItem("token");
|
|
206
210
|
if (!token) {
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import os from "os";
|
|
4
|
+
import crypto from "crypto";
|
|
5
|
+
const CONFIG_PATH = path.join(os.homedir(), ".config", "ayoune", "db-copies.json");
|
|
6
|
+
const ALGORITHM = "aes-256-cbc";
|
|
7
|
+
function deriveKey() {
|
|
8
|
+
const seed = `ayoune-cli:${os.hostname()}:${os.userInfo().username}`;
|
|
9
|
+
return crypto.createHash("sha256").update(seed).digest();
|
|
10
|
+
}
|
|
11
|
+
function encrypt(text) {
|
|
12
|
+
const key = deriveKey();
|
|
13
|
+
const iv = crypto.randomBytes(16);
|
|
14
|
+
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
|
|
15
|
+
let encrypted = cipher.update(text, "utf8", "hex");
|
|
16
|
+
encrypted += cipher.final("hex");
|
|
17
|
+
return iv.toString("hex") + ":" + encrypted;
|
|
18
|
+
}
|
|
19
|
+
function decrypt(data) {
|
|
20
|
+
const key = deriveKey();
|
|
21
|
+
const [ivHex, encrypted] = data.split(":");
|
|
22
|
+
if (!ivHex || !encrypted)
|
|
23
|
+
return data;
|
|
24
|
+
try {
|
|
25
|
+
const iv = Buffer.from(ivHex, "hex");
|
|
26
|
+
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
|
|
27
|
+
let decrypted = decipher.update(encrypted, "hex", "utf8");
|
|
28
|
+
decrypted += decipher.final("utf8");
|
|
29
|
+
return decrypted;
|
|
30
|
+
}
|
|
31
|
+
catch (_a) {
|
|
32
|
+
return data;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
export function maskUri(uri) {
|
|
36
|
+
return uri.replace(/:\/\/([^:]+):([^@]+)@/, "://***:***@");
|
|
37
|
+
}
|
|
38
|
+
export function loadCopyConfigs() {
|
|
39
|
+
if (!existsSync(CONFIG_PATH))
|
|
40
|
+
return [];
|
|
41
|
+
try {
|
|
42
|
+
return JSON.parse(readFileSync(CONFIG_PATH, "utf-8"));
|
|
43
|
+
}
|
|
44
|
+
catch (_a) {
|
|
45
|
+
return [];
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
export function saveCopyConfigs(configs) {
|
|
49
|
+
const dir = path.dirname(CONFIG_PATH);
|
|
50
|
+
if (!existsSync(dir))
|
|
51
|
+
mkdirSync(dir, { recursive: true });
|
|
52
|
+
writeFileSync(CONFIG_PATH, JSON.stringify(configs, null, 2), "utf-8");
|
|
53
|
+
}
|
|
54
|
+
export function addCopyConfig(config) {
|
|
55
|
+
const configs = loadCopyConfigs();
|
|
56
|
+
// Encrypt URIs before saving
|
|
57
|
+
config.fromUri = encrypt(config.fromUri);
|
|
58
|
+
config.toUri = encrypt(config.toUri);
|
|
59
|
+
configs.push(config);
|
|
60
|
+
saveCopyConfigs(configs);
|
|
61
|
+
}
|
|
62
|
+
export function removeCopyConfig(id) {
|
|
63
|
+
const configs = loadCopyConfigs();
|
|
64
|
+
const filtered = configs.filter((c) => c.id !== id);
|
|
65
|
+
if (filtered.length === configs.length)
|
|
66
|
+
return false;
|
|
67
|
+
saveCopyConfigs(filtered);
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
70
|
+
export function getDecryptedConfig(config) {
|
|
71
|
+
return {
|
|
72
|
+
fromUri: decrypt(config.fromUri),
|
|
73
|
+
toUri: decrypt(config.toUri),
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
export function updateCopyConfigLastRun(id, status, error) {
|
|
77
|
+
const configs = loadCopyConfigs();
|
|
78
|
+
const config = configs.find((c) => c.id === id);
|
|
79
|
+
if (config) {
|
|
80
|
+
config.lastRun = new Date().toISOString();
|
|
81
|
+
config.lastStatus = status;
|
|
82
|
+
if (error)
|
|
83
|
+
config.lastError = error;
|
|
84
|
+
else
|
|
85
|
+
delete config.lastError;
|
|
86
|
+
saveCopyConfigs(configs);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { MongoClient } from "mongodb";
|
|
2
|
+
export async function executeCopy(fromUri, toUri, collections, options, onProgress) {
|
|
3
|
+
const startTime = Date.now();
|
|
4
|
+
const sourceClient = new MongoClient(fromUri);
|
|
5
|
+
const targetClient = new MongoClient(toUri);
|
|
6
|
+
const results = [];
|
|
7
|
+
try {
|
|
8
|
+
await sourceClient.connect();
|
|
9
|
+
await targetClient.connect();
|
|
10
|
+
const sourceDb = sourceClient.db();
|
|
11
|
+
const targetDb = targetClient.db();
|
|
12
|
+
// Resolve collections if --all was used
|
|
13
|
+
let collectionNames = collections;
|
|
14
|
+
if (collections.length === 1 && collections[0] === "*") {
|
|
15
|
+
const colls = await sourceDb.listCollections().toArray();
|
|
16
|
+
collectionNames = colls
|
|
17
|
+
.map((c) => c.name)
|
|
18
|
+
.filter((n) => !n.startsWith("system."));
|
|
19
|
+
}
|
|
20
|
+
for (const name of collectionNames) {
|
|
21
|
+
const colStart = Date.now();
|
|
22
|
+
let copied = 0;
|
|
23
|
+
let errors = 0;
|
|
24
|
+
const sourceCol = sourceDb.collection(name);
|
|
25
|
+
const targetCol = targetDb.collection(name);
|
|
26
|
+
if (options.drop) {
|
|
27
|
+
try {
|
|
28
|
+
await targetCol.drop();
|
|
29
|
+
}
|
|
30
|
+
catch (_a) {
|
|
31
|
+
// Collection may not exist — ignore
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
const query = options.query || {};
|
|
35
|
+
const total = await sourceCol.countDocuments(query);
|
|
36
|
+
onProgress({ collection: name, copied: 0, total, errors: 0 });
|
|
37
|
+
const cursor = sourceCol.find(query).batchSize(options.batchSize);
|
|
38
|
+
let batch = [];
|
|
39
|
+
for await (const doc of cursor) {
|
|
40
|
+
batch.push(doc);
|
|
41
|
+
if (batch.length >= options.batchSize) {
|
|
42
|
+
const result = await writeBatch(targetCol, batch, options.upsert || false);
|
|
43
|
+
copied += result.written;
|
|
44
|
+
errors += result.errors;
|
|
45
|
+
batch = [];
|
|
46
|
+
onProgress({ collection: name, copied, total, errors });
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
// Write remaining docs
|
|
50
|
+
if (batch.length > 0) {
|
|
51
|
+
const result = await writeBatch(targetCol, batch, options.upsert || false);
|
|
52
|
+
copied += result.written;
|
|
53
|
+
errors += result.errors;
|
|
54
|
+
onProgress({ collection: name, copied, total, errors });
|
|
55
|
+
}
|
|
56
|
+
results.push({ name, copied, errors, duration: Date.now() - colStart });
|
|
57
|
+
}
|
|
58
|
+
const totalCopied = results.reduce((s, r) => s + r.copied, 0);
|
|
59
|
+
const totalErrors = results.reduce((s, r) => s + r.errors, 0);
|
|
60
|
+
return { collections: results, totalCopied, totalErrors, duration: Date.now() - startTime };
|
|
61
|
+
}
|
|
62
|
+
finally {
|
|
63
|
+
await sourceClient.close();
|
|
64
|
+
await targetClient.close();
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
async function writeBatch(collection, docs, upsert) {
|
|
68
|
+
var _a, _b, _c, _d;
|
|
69
|
+
try {
|
|
70
|
+
if (upsert) {
|
|
71
|
+
const ops = docs.map((doc) => ({
|
|
72
|
+
updateOne: {
|
|
73
|
+
filter: { _id: doc._id },
|
|
74
|
+
update: { $set: doc },
|
|
75
|
+
upsert: true,
|
|
76
|
+
},
|
|
77
|
+
}));
|
|
78
|
+
const result = await collection.bulkWrite(ops, { ordered: false });
|
|
79
|
+
return { written: (result.upsertedCount || 0) + (result.modifiedCount || 0), errors: 0 };
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
const result = await collection.insertMany(docs, { ordered: false });
|
|
83
|
+
return { written: result.insertedCount, errors: 0 };
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
catch (e) {
|
|
87
|
+
// Partial success on ordered:false — count what made it through
|
|
88
|
+
const written = (_d = (_b = (_a = e.result) === null || _a === void 0 ? void 0 : _a.nInserted) !== null && _b !== void 0 ? _b : (_c = e.result) === null || _c === void 0 ? void 0 : _c.insertedCount) !== null && _d !== void 0 ? _d : 0;
|
|
89
|
+
const errorCount = docs.length - written;
|
|
90
|
+
return { written, errors: errorCount };
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
export async function getDbStats(uri) {
|
|
94
|
+
const client = new MongoClient(uri);
|
|
95
|
+
try {
|
|
96
|
+
await client.connect();
|
|
97
|
+
const db = client.db();
|
|
98
|
+
const stats = await db.stats();
|
|
99
|
+
const collList = await db.listCollections().toArray();
|
|
100
|
+
const collections = [];
|
|
101
|
+
for (const col of collList) {
|
|
102
|
+
if (col.name.startsWith("system."))
|
|
103
|
+
continue;
|
|
104
|
+
try {
|
|
105
|
+
const count = await db.collection(col.name).estimatedDocumentCount();
|
|
106
|
+
const colStats = await db.command({ collStats: col.name });
|
|
107
|
+
collections.push({ name: col.name, documents: count, size: colStats.size || 0 });
|
|
108
|
+
}
|
|
109
|
+
catch (_a) {
|
|
110
|
+
collections.push({ name: col.name, documents: 0, size: 0 });
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
collections.sort((a, b) => b.documents - a.documents);
|
|
114
|
+
return {
|
|
115
|
+
database: stats.db,
|
|
116
|
+
collections,
|
|
117
|
+
totalSize: stats.dataSize || 0,
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
finally {
|
|
121
|
+
await client.close();
|
|
122
|
+
}
|
|
123
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import cronParser from "cron-parser";
|
|
2
|
+
/**
|
|
3
|
+
* Determines whether a cron-scheduled job is due for execution.
|
|
4
|
+
* Returns true if the current time has passed the next scheduled run after lastRun.
|
|
5
|
+
*/
|
|
6
|
+
export function isCronDue(expression, lastRun, now = new Date()) {
|
|
7
|
+
try {
|
|
8
|
+
if (!lastRun)
|
|
9
|
+
return true; // Never run before → always due
|
|
10
|
+
const lastRunDate = new Date(lastRun);
|
|
11
|
+
const interval = cronParser.parseExpression(expression, { currentDate: lastRunDate });
|
|
12
|
+
const nextRun = interval.next().toDate();
|
|
13
|
+
return now >= nextRun;
|
|
14
|
+
}
|
|
15
|
+
catch (_a) {
|
|
16
|
+
return false; // Invalid cron expression
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Returns the next scheduled run time for a cron expression.
|
|
21
|
+
*/
|
|
22
|
+
export function getNextRun(expression, after = new Date()) {
|
|
23
|
+
try {
|
|
24
|
+
const interval = cronParser.parseExpression(expression, { currentDate: after });
|
|
25
|
+
return interval.next().toDate();
|
|
26
|
+
}
|
|
27
|
+
catch (_a) {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Validates a cron expression. Returns null if valid, error message if invalid.
|
|
33
|
+
*/
|
|
34
|
+
export function validateCron(expression) {
|
|
35
|
+
try {
|
|
36
|
+
cronParser.parseExpression(expression);
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
catch (e) {
|
|
40
|
+
return e.message || "Invalid cron expression";
|
|
41
|
+
}
|
|
42
|
+
}
|
package/lib/db/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { readFileSync } from "fs";
|
|
2
|
+
import { cliError } from "./cliError.js";
|
|
3
|
+
import { EXIT_MISUSE } from "../exitCodes.js";
|
|
4
|
+
/**
|
|
5
|
+
* Resolves a MongoDB aggregation pipeline from one of three sources:
|
|
6
|
+
* 1. --pipeline flag (JSON string)
|
|
7
|
+
* 2. --file flag (path to JSON file)
|
|
8
|
+
* 3. stdin (piped input)
|
|
9
|
+
*
|
|
10
|
+
* Returns the parsed pipeline array.
|
|
11
|
+
*/
|
|
12
|
+
export async function readPipelineInput(opts) {
|
|
13
|
+
let raw;
|
|
14
|
+
if (opts.pipeline) {
|
|
15
|
+
raw = opts.pipeline;
|
|
16
|
+
}
|
|
17
|
+
else if (opts.file) {
|
|
18
|
+
try {
|
|
19
|
+
raw = readFileSync(opts.file, "utf-8");
|
|
20
|
+
}
|
|
21
|
+
catch (e) {
|
|
22
|
+
cliError(`Failed to read file: ${e.message}`, EXIT_MISUSE);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
else if (!process.stdin.isTTY) {
|
|
26
|
+
raw = await readStdin();
|
|
27
|
+
}
|
|
28
|
+
if (!raw) {
|
|
29
|
+
cliError("Provide a pipeline via --pipeline, --file, or stdin", EXIT_MISUSE);
|
|
30
|
+
}
|
|
31
|
+
let parsed;
|
|
32
|
+
try {
|
|
33
|
+
parsed = JSON.parse(raw);
|
|
34
|
+
}
|
|
35
|
+
catch (_a) {
|
|
36
|
+
cliError("Invalid JSON — pipeline must be a valid JSON array", EXIT_MISUSE);
|
|
37
|
+
}
|
|
38
|
+
if (!Array.isArray(parsed)) {
|
|
39
|
+
cliError("Pipeline must be a JSON array of stage objects", EXIT_MISUSE);
|
|
40
|
+
}
|
|
41
|
+
return parsed;
|
|
42
|
+
}
|
|
43
|
+
function readStdin() {
|
|
44
|
+
return new Promise((resolve) => {
|
|
45
|
+
let data = "";
|
|
46
|
+
process.stdin.setEncoding("utf-8");
|
|
47
|
+
process.stdin.on("data", (chunk) => (data += chunk));
|
|
48
|
+
process.stdin.on("end", () => resolve(data));
|
|
49
|
+
});
|
|
50
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tolinax/ayoune-cli",
|
|
3
|
-
"version": "2026.
|
|
3
|
+
"version": "2026.6.1",
|
|
4
4
|
"description": "CLI for the aYOUne Business-as-a-Service platform",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./index.js",
|
|
@@ -119,6 +119,7 @@
|
|
|
119
119
|
"chalk": "^5.3.0",
|
|
120
120
|
"chalk-animation": "^2.0.3",
|
|
121
121
|
"commander": "^12.0.0",
|
|
122
|
+
"cron-parser": "^4.9.0",
|
|
122
123
|
"figlet": "^1.7.0",
|
|
123
124
|
"gradient-string": "^2.0.2",
|
|
124
125
|
"inquirer": "^9.2.14",
|
|
@@ -134,6 +135,7 @@
|
|
|
134
135
|
"lodash": "^4.17.21",
|
|
135
136
|
"mkdirp": "^3.0.1",
|
|
136
137
|
"moment": "^2.30.1",
|
|
138
|
+
"mongodb": "^6.21.0",
|
|
137
139
|
"nanospinner": "^1.1.0",
|
|
138
140
|
"node-localstorage": "^3.0.5",
|
|
139
141
|
"os": "^0.1.2",
|