@mtakla/cronops 0.1.1-rc2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -0
- package/README.md +391 -0
- package/config/jobs/example-job1.yaml +17 -0
- package/config/jobs/example-job2.yaml +7 -0
- package/config/jobs/example-job3.yaml +14 -0
- package/config/runner-defaults.yaml +27 -0
- package/config/scripts/example-script.js +1 -0
- package/dist/api/openapi.js +229 -0
- package/dist/api/webapi.js +75 -0
- package/dist/api.js +44 -0
- package/dist/errors/JobError.js +10 -0
- package/dist/handlers/AbstractHandler.js +162 -0
- package/dist/handlers/ExecHandler.js +103 -0
- package/dist/handlers/FileArchiveHandler.js +15 -0
- package/dist/handlers/FileCopyHandler.js +15 -0
- package/dist/handlers/FileDeleteHandler.js +13 -0
- package/dist/handlers/FileMoveHandler.js +16 -0
- package/dist/index.js +17 -0
- package/dist/models/FileHistoryModel.js +45 -0
- package/dist/models/JobModel.js +35 -0
- package/dist/models/JobRunnerContext.js +49 -0
- package/dist/models/JobRunnerResult.js +13 -0
- package/dist/models/JobRunnerSetup.js +105 -0
- package/dist/models/PermissionModel.js +13 -0
- package/dist/server.js +91 -0
- package/dist/tasks/AbstractTask.js +71 -0
- package/dist/tasks/JobLoader.js +73 -0
- package/dist/tasks/JobRunner.js +100 -0
- package/dist/tasks/JobScheduler.js +108 -0
- package/dist/tests/loadtest.js +102 -0
- package/dist/types/Config.types.js +34 -0
- package/dist/types/Options.types.js +19 -0
- package/dist/types/Task.types.js +1 -0
- package/dist/webapi.js +52 -0
- package/package.json +81 -0
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { AbstractHandler } from "./AbstractHandler.js";
|
|
2
|
+
export class FileMoveHandler extends AbstractHandler {
|
|
3
|
+
validateJob(job) {
|
|
4
|
+
super.assertSourceDirExist(job);
|
|
5
|
+
super.assertTargetConfigExists(job);
|
|
6
|
+
}
|
|
7
|
+
async processFiles(ctx, entries, fileHistory) {
|
|
8
|
+
if (entries.length > 0) {
|
|
9
|
+
await super.createTargetDirs(ctx, entries);
|
|
10
|
+
await super.processSources(ctx, entries, fileHistory, this.copyOrMoveFile);
|
|
11
|
+
await super.setTargetDirPermissions(ctx);
|
|
12
|
+
await super.deleteEmptySourceDirs(ctx);
|
|
13
|
+
}
|
|
14
|
+
await super.cleanup(ctx, fileHistory);
|
|
15
|
+
}
|
|
16
|
+
}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { JobScheduler } from "./tasks/JobScheduler.js";
|
|
2
|
+
import { JobModel } from "./models/JobModel.js";
|
|
3
|
+
import { JobRunnerSetup } from "./models/JobRunnerSetup.js";
|
|
4
|
+
import { JobLoader } from "./tasks/JobLoader.js";
|
|
5
|
+
import { JobRunner } from "./tasks/JobRunner.js";
|
|
6
|
+
export function createJobLoader(options = {}) {
|
|
7
|
+
return new JobLoader(options);
|
|
8
|
+
}
|
|
9
|
+
export function createJobScheduler(options = {}) {
|
|
10
|
+
return new JobScheduler(options);
|
|
11
|
+
}
|
|
12
|
+
export function createJobRunner(job, options = {}) {
|
|
13
|
+
const setup = new JobRunnerSetup(options);
|
|
14
|
+
setup.validateJob(job);
|
|
15
|
+
return new JobRunner(new JobModel(job), setup);
|
|
16
|
+
}
|
|
17
|
+
export default { createJobLoader, createJobScheduler, createJobRunner };
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
export class FileHistoryModel {
|
|
2
|
+
data;
|
|
3
|
+
changed;
|
|
4
|
+
included = new Set();
|
|
5
|
+
outdated = new Set();
|
|
6
|
+
constructor(data = { source: {}, target: {} }) {
|
|
7
|
+
this.data = data;
|
|
8
|
+
this.changed = false;
|
|
9
|
+
}
|
|
10
|
+
updateSourceEntry(path, entry) {
|
|
11
|
+
const prev = this.data.source[path];
|
|
12
|
+
const added = prev === undefined;
|
|
13
|
+
const changed = added || prev[0] !== entry[0];
|
|
14
|
+
if (changed) {
|
|
15
|
+
this.data.source[path] = entry;
|
|
16
|
+
this.changed = true;
|
|
17
|
+
}
|
|
18
|
+
this.included.add(path);
|
|
19
|
+
return { changed, added };
|
|
20
|
+
}
|
|
21
|
+
addTargetEntry(path, entry) {
|
|
22
|
+
this.data.target[path] = entry;
|
|
23
|
+
this.changed = true;
|
|
24
|
+
}
|
|
25
|
+
markTargetOutdated(path) {
|
|
26
|
+
if (path in this.data.target)
|
|
27
|
+
this.outdated.add(path);
|
|
28
|
+
}
|
|
29
|
+
cleanup() {
|
|
30
|
+
const useless = new Set(Object.keys(this.data.source)).difference(this.included);
|
|
31
|
+
for (const path of useless)
|
|
32
|
+
this._removeEntry("source", path);
|
|
33
|
+
for (const path of this.outdated)
|
|
34
|
+
this._removeEntry("target", path);
|
|
35
|
+
this.included.clear();
|
|
36
|
+
this.outdated.clear();
|
|
37
|
+
return [...useless];
|
|
38
|
+
}
|
|
39
|
+
_removeEntry(type, path) {
|
|
40
|
+
if (path in this.data[type]) {
|
|
41
|
+
delete this.data[type][path];
|
|
42
|
+
this.changed = true;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { format } from "date-fns";
|
|
2
|
+
export class JobModel {
|
|
3
|
+
id;
|
|
4
|
+
action;
|
|
5
|
+
command = "";
|
|
6
|
+
shell;
|
|
7
|
+
args = [];
|
|
8
|
+
env = {};
|
|
9
|
+
cron = "* * * * *";
|
|
10
|
+
source = {};
|
|
11
|
+
target = {};
|
|
12
|
+
dry_run = false;
|
|
13
|
+
verbose = false;
|
|
14
|
+
enabled = true;
|
|
15
|
+
constructor(data, config = {}) {
|
|
16
|
+
Object.assign(this, config);
|
|
17
|
+
Object.assign(this, data);
|
|
18
|
+
}
|
|
19
|
+
get sourceIncludes() {
|
|
20
|
+
return this.source?.includes ?? ["**/*"];
|
|
21
|
+
}
|
|
22
|
+
get sourceExcludes() {
|
|
23
|
+
return this.source?.excludes ?? [];
|
|
24
|
+
}
|
|
25
|
+
get targetArchiveName() {
|
|
26
|
+
return this.resolveDatePattern(this.target?.archive_name ?? "{{yyyy-MM-dd_HH-mm-ss}}.tgz");
|
|
27
|
+
}
|
|
28
|
+
get targetPermissions() {
|
|
29
|
+
const perm = this.target?.permissions;
|
|
30
|
+
return `${perm?.owner ?? ":"}:${perm?.file_mode ?? ""}:${perm?.dir_mode ?? ""}`;
|
|
31
|
+
}
|
|
32
|
+
resolveDatePattern(input, date = new Date()) {
|
|
33
|
+
return input.replace(/\{\{(.+?)\}\}/g, (_, pattern) => format(date, pattern));
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { EventEmitter } from "node:events";
|
|
2
|
+
import { PermissionModel } from "./PermissionModel.js";
|
|
3
|
+
import { JobRunnerResult } from "./JobRunnerResult.js";
|
|
4
|
+
import { fsyncSync, writeSync } from "node:fs";
|
|
5
|
+
import { join } from "node:path";
|
|
6
|
+
export class JobRunnerContext {
|
|
7
|
+
job;
|
|
8
|
+
result;
|
|
9
|
+
startTime;
|
|
10
|
+
sourceDir;
|
|
11
|
+
targetDir;
|
|
12
|
+
sourceDirs;
|
|
13
|
+
targetDirs;
|
|
14
|
+
targetPermissions;
|
|
15
|
+
events;
|
|
16
|
+
logFd;
|
|
17
|
+
constructor(setup, job, events = new EventEmitter(), logFd = 0) {
|
|
18
|
+
this.job = job;
|
|
19
|
+
this.startTime = Date.now();
|
|
20
|
+
this.sourceDir = setup.resolveSourceDir(job.source?.dir);
|
|
21
|
+
this.targetDir = job.dry_run ? join(setup.tempDir, job.id) : setup.resolveTargetDir(job.target?.dir);
|
|
22
|
+
this.sourceDirs = new Set();
|
|
23
|
+
this.targetDirs = new Set();
|
|
24
|
+
this.targetPermissions = new PermissionModel(job.targetPermissions);
|
|
25
|
+
this.result = new JobRunnerResult();
|
|
26
|
+
this.events = events;
|
|
27
|
+
this.logFd = logFd;
|
|
28
|
+
}
|
|
29
|
+
getLogFd() {
|
|
30
|
+
return this.logFd;
|
|
31
|
+
}
|
|
32
|
+
writeLog(msg) {
|
|
33
|
+
if (this.logFd)
|
|
34
|
+
writeSync(this.logFd, `${msg}\n`);
|
|
35
|
+
}
|
|
36
|
+
processError(error) {
|
|
37
|
+
this.result.errors++;
|
|
38
|
+
this.writeLog(String(error));
|
|
39
|
+
this.events.emit("error", error);
|
|
40
|
+
}
|
|
41
|
+
processActivity(action, path, count) {
|
|
42
|
+
if (this.logFd) {
|
|
43
|
+
writeSync(this.logFd, `✔ ${action} ${path ? path : ""}\n`);
|
|
44
|
+
fsyncSync(this.logFd);
|
|
45
|
+
}
|
|
46
|
+
if (this.job.verbose)
|
|
47
|
+
this.events.emit("activity", action, path, count);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import os from "node:os";
|
|
2
|
+
import cron from "node-cron";
|
|
3
|
+
import { join, resolve, sep } from "node:path";
|
|
4
|
+
import { ENV } from "../types/Options.types.js";
|
|
5
|
+
import { JobError } from "../errors/JobError.js";
|
|
6
|
+
import { JobSchema } from "../types/Config.types.js";
|
|
7
|
+
import { ExecHandler } from "../handlers/ExecHandler.js";
|
|
8
|
+
import { FileCopyHandler } from "../handlers/FileCopyHandler.js";
|
|
9
|
+
import { FileMoveHandler } from "../handlers/FileMoveHandler.js";
|
|
10
|
+
import { FileArchiveHandler } from "../handlers/FileArchiveHandler.js";
|
|
11
|
+
import { FileDeleteHandler } from "../handlers/FileDeleteHandler.js";
|
|
12
|
+
export class JobRunnerSetup {
|
|
13
|
+
sourceRoot;
|
|
14
|
+
targetRoot;
|
|
15
|
+
source2Root;
|
|
16
|
+
target2Root;
|
|
17
|
+
source3Root;
|
|
18
|
+
target3Root;
|
|
19
|
+
shell;
|
|
20
|
+
configDir;
|
|
21
|
+
tempDir;
|
|
22
|
+
logDir;
|
|
23
|
+
scriptDir;
|
|
24
|
+
sourceRootDirs;
|
|
25
|
+
targetRootDirs;
|
|
26
|
+
handlerMap = new Map();
|
|
27
|
+
constructor(options = {}) {
|
|
28
|
+
this.sourceRoot = resolve(options.sourceRoot ?? process.env[ENV.SOURCE_ROOT] ?? "./");
|
|
29
|
+
this.targetRoot = resolve(options.targetRoot ?? process.env[ENV.TARGET_ROOT] ?? "./");
|
|
30
|
+
this.source2Root = resolve(options.source2Root ?? process.env[ENV.SOURCE_2_ROOT] ?? "./");
|
|
31
|
+
this.target2Root = resolve(options.target2Root ?? process.env[ENV.TARGET_2_ROOT] ?? "./");
|
|
32
|
+
this.source3Root = resolve(options.source3Root ?? process.env[ENV.SOURCE_3_ROOT] ?? "./");
|
|
33
|
+
this.target3Root = resolve(options.target3Root ?? process.env[ENV.TARGET_3_ROOT] ?? "./");
|
|
34
|
+
this.configDir = resolve(options.configDir ?? process.env[ENV.CONFIG_DIR] ?? "./config");
|
|
35
|
+
this.tempDir = resolve(options.tempDir ?? process.env[ENV.TEMP_DIR] ?? join(os.tmpdir(), "cronops"));
|
|
36
|
+
this.logDir = resolve(options.logDir ?? process.env[ENV.LOG_DIR] ?? join(os.homedir(), ".cronops"));
|
|
37
|
+
this.shell = options.shell ?? parseShellSettings(process.env[ENV.EXEC_SHELL]) ?? false;
|
|
38
|
+
this.scriptDir = join(this.configDir, "scripts");
|
|
39
|
+
this.sourceRootDirs = [this.sourceRoot, this.source2Root, this.source3Root];
|
|
40
|
+
this.targetRootDirs = [this.targetRoot, this.target2Root, this.target3Root];
|
|
41
|
+
this.handlerMap.set("exec", new ExecHandler(this));
|
|
42
|
+
this.handlerMap.set("copy", new FileCopyHandler(this));
|
|
43
|
+
this.handlerMap.set("move", new FileMoveHandler(this));
|
|
44
|
+
this.handlerMap.set("archive", new FileArchiveHandler(this));
|
|
45
|
+
this.handlerMap.set("delete", new FileDeleteHandler(this));
|
|
46
|
+
}
|
|
47
|
+
resolveSourceDir(relPath = "./") {
|
|
48
|
+
return this._resolveDir(relPath, this.sourceRootDirs);
|
|
49
|
+
}
|
|
50
|
+
resolveTargetDir(relPath = "./") {
|
|
51
|
+
return this._resolveDir(relPath, this.targetRootDirs);
|
|
52
|
+
}
|
|
53
|
+
getActionHandler(action) {
|
|
54
|
+
const handler = this.handlerMap.get(action);
|
|
55
|
+
if (!handler)
|
|
56
|
+
throw new Error(`No handler registered for action '${action}'!`);
|
|
57
|
+
return handler;
|
|
58
|
+
}
|
|
59
|
+
validateJob(job) {
|
|
60
|
+
const res = JobSchema.safeParse(job);
|
|
61
|
+
if (!res.success)
|
|
62
|
+
JobError.throw(job.id, `Invalid job definition. ${res.error.issues[0]?.message}`, res.error);
|
|
63
|
+
this._validateDir(job.source?.dir, job.id);
|
|
64
|
+
this._validateDir(job.target?.dir, job.id);
|
|
65
|
+
if (job.cron && !cron.validate(job.cron))
|
|
66
|
+
JobError.throw(job.id, `invalid cron string '${job.cron}'!`);
|
|
67
|
+
const handler = this.getActionHandler(job.action);
|
|
68
|
+
handler.validateJob(job);
|
|
69
|
+
}
|
|
70
|
+
_validateDir(path = "./", jobId) {
|
|
71
|
+
let issue;
|
|
72
|
+
if (path.includes(".."))
|
|
73
|
+
issue = "Directory traversal ('..') is not allowed!";
|
|
74
|
+
else if (path.startsWith("$")) {
|
|
75
|
+
const digit = path.charCodeAt(1) - 48;
|
|
76
|
+
const nextChar = path.charAt(2);
|
|
77
|
+
if (!(digit > 0 && digit <= 3))
|
|
78
|
+
issue = "Only $1, $2, and $3 are supported as root prefixes.";
|
|
79
|
+
else if (nextChar !== "" && nextChar !== sep)
|
|
80
|
+
issue = `Prefix $${digit} must be followed by a path separator ('${sep}').`;
|
|
81
|
+
}
|
|
82
|
+
if (issue)
|
|
83
|
+
JobError.throw(jobId, issue);
|
|
84
|
+
}
|
|
85
|
+
_resolveDir(relPath, rootDirArray) {
|
|
86
|
+
if (!relPath.startsWith("$")) {
|
|
87
|
+
return resolve(join(rootDirArray[0], relPath));
|
|
88
|
+
}
|
|
89
|
+
const idx = relPath.charCodeAt(1) - 49;
|
|
90
|
+
if (idx >= 0 && idx < 3 && (relPath.charAt(2) === "" || relPath.charAt(2) === sep)) {
|
|
91
|
+
return resolve(join(rootDirArray[idx] || "./", relPath.slice(2)));
|
|
92
|
+
}
|
|
93
|
+
throw new Error(`Invalid dir prefix '${relPath}'! Allowed prefixes are $1, $2, or $3, followed by '${sep}'`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
function parseShellSettings(shellStr) {
|
|
97
|
+
if (!shellStr)
|
|
98
|
+
return false;
|
|
99
|
+
const normalized = shellStr.trim().toLowerCase();
|
|
100
|
+
if (normalized === "true")
|
|
101
|
+
return true;
|
|
102
|
+
if (normalized === "false")
|
|
103
|
+
return false;
|
|
104
|
+
return shellStr;
|
|
105
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export class PermissionModel {
|
|
2
|
+
uid;
|
|
3
|
+
gid;
|
|
4
|
+
fileMode;
|
|
5
|
+
dirMode;
|
|
6
|
+
constructor(attr) {
|
|
7
|
+
const [uidStr, gidStr, fileModeStr, dirModeStr] = attr.split(":");
|
|
8
|
+
this.uid = parseInt(uidStr || "", 10);
|
|
9
|
+
this.gid = parseInt(gidStr || "", 10);
|
|
10
|
+
this.fileMode = parseInt(fileModeStr || "660", 8);
|
|
11
|
+
this.dirMode = parseInt(dirModeStr || "770", 8);
|
|
12
|
+
}
|
|
13
|
+
}
|
package/dist/server.js
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import chalk from "chalk";
|
|
3
|
+
import fsx from "fs-extra";
|
|
4
|
+
import figlet from "figlet";
|
|
5
|
+
import webapi from "./api/webapi.js";
|
|
6
|
+
import { join, dirname } from "node:path";
|
|
7
|
+
import { fileURLToPath } from "node:url";
|
|
8
|
+
import { JobLoader } from "./tasks/JobLoader.js";
|
|
9
|
+
import { JobScheduler } from "./tasks/JobScheduler.js";
|
|
10
|
+
const plural = (n, noun) => `${n > 0 ? n : "no"} ${noun}${n !== 1 ? "s" : ""}`;
|
|
11
|
+
const appDir = join(dirname(fileURLToPath(import.meta.url)), "..");
|
|
12
|
+
export async function start() {
|
|
13
|
+
const jobLoader = new JobLoader();
|
|
14
|
+
const jobScheduler = new JobScheduler();
|
|
15
|
+
try {
|
|
16
|
+
const packageJSON = await fsx.readJSON(join(appDir, "package.json"));
|
|
17
|
+
console.log(figlet.textSync("CronOps", { horizontalLayout: "fitted" }));
|
|
18
|
+
console.log(chalk.cyan.bold(`\n☰ CronOps v${packageJSON.version}`) + chalk.cyan.italic(` »Omnia coniuncta sunt«`));
|
|
19
|
+
console.log(`Monitoring job configs in ${join(jobLoader.configDir, "jobs")} ...`);
|
|
20
|
+
await fsx.emptyDir(jobScheduler.tempDir);
|
|
21
|
+
webapi(jobScheduler);
|
|
22
|
+
}
|
|
23
|
+
catch (err) {
|
|
24
|
+
console.error(`CronOps initialization error. ${err instanceof Error ? err.stack : err}`);
|
|
25
|
+
console.error(`Please check environment settings.`);
|
|
26
|
+
process.exit(1);
|
|
27
|
+
}
|
|
28
|
+
jobLoader.onLoadingError((entry, message) => {
|
|
29
|
+
console.log(`🔴 Error loading job '${entry}'. ${message}`);
|
|
30
|
+
});
|
|
31
|
+
jobLoader.onJobLoaded((job) => {
|
|
32
|
+
if (job.enabled !== false)
|
|
33
|
+
jobScheduler.scheduleJob(job);
|
|
34
|
+
});
|
|
35
|
+
jobLoader.onJobDeleted((jobId) => {
|
|
36
|
+
jobScheduler.unscheduleJob(jobId);
|
|
37
|
+
});
|
|
38
|
+
jobScheduler.onChanged((isReload) => {
|
|
39
|
+
const jobs = jobScheduler.getScheduledJobs();
|
|
40
|
+
console.log(`\nJob config ${isReload ? "changed" : "loaded"} (${plural(jobs.length, "active job")})`);
|
|
41
|
+
for (const job of jobs) {
|
|
42
|
+
console.log(` 🕔 [${job.id}] scheduled (${chalk.greenBright(job.cron)})${job.dry_run ? " 👋 DRY-RUN mode!" : ""}`);
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
jobScheduler.onJobError((job, err) => {
|
|
46
|
+
console.error(chalk.red(`[${job.id}] ERROR ${err.message}`));
|
|
47
|
+
});
|
|
48
|
+
jobScheduler.onJobActivity((job, action, path, count) => {
|
|
49
|
+
if (action === "COPIED")
|
|
50
|
+
console.log(`[${job.id}] ⛃ COPIED → '${path}'`);
|
|
51
|
+
else if (action === "DELETED")
|
|
52
|
+
console.log(`[${job.id}] ⛃ DELETED '${path}'`);
|
|
53
|
+
else if (action === "ARCHIVED")
|
|
54
|
+
console.log(`[${job.id}] ⛃ ARCHIVED ${plural(count, "file")} to '${path}'`);
|
|
55
|
+
else if (action === "EXECUTED")
|
|
56
|
+
console.log(`[${job.id}] ➤➤ EXECUTED '${path}'`);
|
|
57
|
+
else if (action === "PRUNED")
|
|
58
|
+
console.log(`[${job.id}] ⛃ PRUNED target file '${path}'`);
|
|
59
|
+
});
|
|
60
|
+
jobScheduler.onJobFinished((job, stat) => {
|
|
61
|
+
if (!job.verbose && stat.copied + stat.deleted + stat.archived + stat.executed > 0) {
|
|
62
|
+
if (stat.copied > 0 && stat.deleted > 0)
|
|
63
|
+
console.log(`[${job.id}] ✔ MOVED ${stat.copied} in ${stat.durationMs}ms`);
|
|
64
|
+
else if (stat.copied > 0)
|
|
65
|
+
console.log(`[${job.id}] ✔ COPIED ${plural(stat.copied, "file")} in ${stat.durationMs}ms`);
|
|
66
|
+
else if (stat.deleted > 0)
|
|
67
|
+
console.log(`[${job.id}] ✔ DELETED ${plural(stat.deleted, "file")} in ${stat.durationMs}ms`);
|
|
68
|
+
else if (stat.archived > 0)
|
|
69
|
+
console.log(`[${job.id}] ✔ ARCHIVED ${plural(stat.archived, "file")} in ${stat.durationMs}ms`);
|
|
70
|
+
else if (stat.executed === 1)
|
|
71
|
+
console.log(`[${job.id}] ✔ Command EXECUTED in ${stat.durationMs}ms`);
|
|
72
|
+
else if (stat.executed > 1)
|
|
73
|
+
console.log(`[${job.id}] ✔ Command EXECUTED on ${plural(stat.executed, "file")} in ${stat.durationMs}ms`);
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
process.on("SIGTERM", async () => {
|
|
77
|
+
console.log("SIGTERM received. Shutting down CronOps ...");
|
|
78
|
+
if (jobLoader)
|
|
79
|
+
await jobLoader.gracefulTerminate(2000);
|
|
80
|
+
if (jobScheduler)
|
|
81
|
+
await jobScheduler.gracefulTerminate(2000);
|
|
82
|
+
process.exit(0);
|
|
83
|
+
});
|
|
84
|
+
process.on("uncaughtException", (err, origin) => {
|
|
85
|
+
console.error(`Unexpected Termination. Origin: ${origin}. ${err instanceof Error ? err.stack : err}`);
|
|
86
|
+
process.exit(-1);
|
|
87
|
+
});
|
|
88
|
+
jobLoader.schedule(true);
|
|
89
|
+
jobScheduler.schedule();
|
|
90
|
+
}
|
|
91
|
+
await start();
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import cron, {} from "node-cron";
|
|
2
|
+
import { setTimeout } from "node:timers/promises";
|
|
3
|
+
import { EventEmitter } from "node:events";
|
|
4
|
+
import { ENV } from "../types/Options.types.js";
|
|
5
|
+
export class AbstractTask {
|
|
6
|
+
cronTask;
|
|
7
|
+
events = new EventEmitter();
|
|
8
|
+
errorCount = 0;
|
|
9
|
+
isRunning = false;
|
|
10
|
+
constructor(cronStr = "* * * * *") {
|
|
11
|
+
const asyncRunner = async () => {
|
|
12
|
+
if (!this.isRunning) {
|
|
13
|
+
this.isRunning = true;
|
|
14
|
+
try {
|
|
15
|
+
this.events.emit("started");
|
|
16
|
+
this.events.emit("finished", await this.run());
|
|
17
|
+
}
|
|
18
|
+
catch (err) {
|
|
19
|
+
this.events.emit("error", err instanceof Error ? err : new Error(String(err)));
|
|
20
|
+
}
|
|
21
|
+
finally {
|
|
22
|
+
this.isRunning = false;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone ?? process.env[ENV.TZ] ?? "UTC";
|
|
27
|
+
if (!cron.validate(cronStr))
|
|
28
|
+
throw new Error(`Initialization error. Invalid cron string (${cronStr}).`);
|
|
29
|
+
this.cronTask = cron.createTask(cronStr, asyncRunner, { timezone });
|
|
30
|
+
}
|
|
31
|
+
schedule(runImmediately = false) {
|
|
32
|
+
if (runImmediately)
|
|
33
|
+
this.cronTask.once("task:started", () => this.execute());
|
|
34
|
+
this.cronTask.start();
|
|
35
|
+
}
|
|
36
|
+
unschedule() {
|
|
37
|
+
this.events.removeAllListeners();
|
|
38
|
+
this.cronTask.destroy();
|
|
39
|
+
}
|
|
40
|
+
execute(cb) {
|
|
41
|
+
const status = this.cronTask.getStatus();
|
|
42
|
+
if (status === "destroyed")
|
|
43
|
+
throw new Error("Invalid task state (destroyed)");
|
|
44
|
+
if (status === "running" || this.isRunning)
|
|
45
|
+
throw new Error("Invalid task state (running)");
|
|
46
|
+
if (cb)
|
|
47
|
+
this.events.once("finished", cb);
|
|
48
|
+
this.cronTask.execute();
|
|
49
|
+
}
|
|
50
|
+
onScheduled(cb) {
|
|
51
|
+
this.cronTask.on("task:started", cb);
|
|
52
|
+
}
|
|
53
|
+
onStarted(cb) {
|
|
54
|
+
this.events.on("started", cb);
|
|
55
|
+
}
|
|
56
|
+
onFinished(cb) {
|
|
57
|
+
this.events.on("finished", cb);
|
|
58
|
+
}
|
|
59
|
+
onError(cb) {
|
|
60
|
+
this.errorCount++;
|
|
61
|
+
this.events.on("error", (error) => cb(error));
|
|
62
|
+
}
|
|
63
|
+
async gracefulTerminate(timeout = 500) {
|
|
64
|
+
if (this.cronTask.getStatus() !== "destroyed")
|
|
65
|
+
await this.cronTask.destroy();
|
|
66
|
+
const startTime = Date.now();
|
|
67
|
+
while (this.isRunning && Date.now() - startTime < timeout) {
|
|
68
|
+
await setTimeout(20);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import fsx from "fs-extra";
|
|
2
|
+
import YAML from "yaml";
|
|
3
|
+
import glob from "fast-glob";
|
|
4
|
+
import { join, dirname, resolve, basename } from "node:path";
|
|
5
|
+
import { AbstractTask } from "./AbstractTask.js";
|
|
6
|
+
import { ZodError } from "zod";
|
|
7
|
+
import { fileURLToPath } from "node:url";
|
|
8
|
+
import { FileHistoryModel } from "../models/FileHistoryModel.js";
|
|
9
|
+
import { ENV } from "../types/Options.types.js";
|
|
10
|
+
import { JobSchema } from "../types/Config.types.js";
|
|
11
|
+
const appDir = join(dirname(fileURLToPath(import.meta.url)), "..", "..");
|
|
12
|
+
const entry2id = (str) => join(dirname(str), basename(str, ".yaml"));
|
|
13
|
+
export class JobLoader extends AbstractTask {
|
|
14
|
+
configDir;
|
|
15
|
+
firstRun = true;
|
|
16
|
+
jobHistory;
|
|
17
|
+
constructor(options = {}) {
|
|
18
|
+
super("*/8 * * * * *");
|
|
19
|
+
this.configDir = resolve(options.configDir ?? process.env[ENV.CONFIG_DIR] ?? "./config");
|
|
20
|
+
this.jobHistory = new FileHistoryModel();
|
|
21
|
+
}
|
|
22
|
+
async run() {
|
|
23
|
+
const result = [];
|
|
24
|
+
const ttime = Date.now();
|
|
25
|
+
const jobsDir = join(this.configDir, "jobs");
|
|
26
|
+
if (this.firstRun && !fsx.pathExistsSync(jobsDir))
|
|
27
|
+
try {
|
|
28
|
+
await fsx.copy(join(appDir, "config"), this.configDir);
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
}
|
|
32
|
+
const entries = await glob(["**/*.yaml"], { cwd: jobsDir });
|
|
33
|
+
this.firstRun = false;
|
|
34
|
+
for (const entry of entries) {
|
|
35
|
+
const jobFile = join(jobsDir, entry);
|
|
36
|
+
try {
|
|
37
|
+
const stats = await fsx.stat(jobFile);
|
|
38
|
+
const { changed, added } = this.jobHistory.updateSourceEntry(entry, [stats.mtimeMs, ttime]);
|
|
39
|
+
if (changed) {
|
|
40
|
+
const jobConfig = JobSchema.parse(YAML.parse(await fsx.readFile(jobFile, "utf-8")));
|
|
41
|
+
const job = { id: entry2id(entry), ...jobConfig };
|
|
42
|
+
result.push(job);
|
|
43
|
+
this.events.emit("job-loaded", job, !added);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (err) {
|
|
47
|
+
const msg = err instanceof ZodError ? `${err.issues[0]?.message}` : String(err);
|
|
48
|
+
this.events.emit("job-loader-error", entry, msg);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
const removedJobs = this.jobHistory.cleanup();
|
|
52
|
+
for (const entry of removedJobs) {
|
|
53
|
+
this.events.emit("job-deleted", entry2id(entry));
|
|
54
|
+
}
|
|
55
|
+
this.events.emit("loaded", result);
|
|
56
|
+
return result;
|
|
57
|
+
}
|
|
58
|
+
async loadJobs() {
|
|
59
|
+
return await this.run();
|
|
60
|
+
}
|
|
61
|
+
onceLoaded(cb) {
|
|
62
|
+
this.events.on("loaded", cb);
|
|
63
|
+
}
|
|
64
|
+
onLoadingError(cb) {
|
|
65
|
+
this.events.on("job-loader-error", cb);
|
|
66
|
+
}
|
|
67
|
+
onJobLoaded(cb) {
|
|
68
|
+
this.events.on("job-loaded", cb);
|
|
69
|
+
}
|
|
70
|
+
onJobDeleted(cb) {
|
|
71
|
+
this.events.on("job-deleted", cb);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import glob from "fast-glob";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { ensureDir, moveSync, readJSON, writeJSON } from "fs-extra/esm";
|
|
4
|
+
import { AbstractTask } from "./AbstractTask.js";
|
|
5
|
+
import { JobRunnerContext } from "../models/JobRunnerContext.js";
|
|
6
|
+
import { closeSync, fsyncSync, openSync, writeSync } from "node:fs";
|
|
7
|
+
import { JobRunnerResult } from "../models/JobRunnerResult.js";
|
|
8
|
+
import { FileHistoryModel } from "../models/FileHistoryModel.js";
|
|
9
|
+
export class JobRunner extends AbstractTask {
|
|
10
|
+
job;
|
|
11
|
+
setup;
|
|
12
|
+
constructor(job, setup) {
|
|
13
|
+
super(job.cron);
|
|
14
|
+
this.job = job;
|
|
15
|
+
this.setup = setup;
|
|
16
|
+
}
|
|
17
|
+
onActivity(cb) {
|
|
18
|
+
this.events.on("activity", cb);
|
|
19
|
+
}
|
|
20
|
+
async runJob() {
|
|
21
|
+
return await this.run();
|
|
22
|
+
}
|
|
23
|
+
async run() {
|
|
24
|
+
const { setup, job, events } = this;
|
|
25
|
+
if (job.enabled === false)
|
|
26
|
+
return new JobRunnerResult();
|
|
27
|
+
if (this.errorCount >= 25) {
|
|
28
|
+
job.enabled = false;
|
|
29
|
+
throw new Error("Too many errors. Job execution disabled!");
|
|
30
|
+
}
|
|
31
|
+
await ensureDir(setup.logDir);
|
|
32
|
+
const logFd = this.initLog(job);
|
|
33
|
+
const ctx = new JobRunnerContext(setup, job, events, logFd);
|
|
34
|
+
const handler = setup.getActionHandler(job.action);
|
|
35
|
+
try {
|
|
36
|
+
if (job.source) {
|
|
37
|
+
const entries = await glob(job.sourceIncludes, { cwd: ctx.sourceDir, ignore: job.sourceExcludes, dot: true, extglob: false });
|
|
38
|
+
const fileHistory = await this.loadFileHistory(job);
|
|
39
|
+
ctx.writeLog(`Processing source entries ...`);
|
|
40
|
+
await handler.processFiles(ctx, entries, fileHistory);
|
|
41
|
+
await this.saveFileHistory(job, fileHistory);
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
await handler.process(ctx);
|
|
45
|
+
}
|
|
46
|
+
this.closeLog(logFd, ctx.startTime);
|
|
47
|
+
}
|
|
48
|
+
catch (err) {
|
|
49
|
+
this.closeLog(logFd, ctx.startTime, err instanceof Error ? err : new Error(String(err)));
|
|
50
|
+
}
|
|
51
|
+
ctx.result.endTime = Date.now();
|
|
52
|
+
return ctx.result;
|
|
53
|
+
}
|
|
54
|
+
async loadFileHistory(job) {
|
|
55
|
+
try {
|
|
56
|
+
const filePath = join(this.setup.logDir, `${job.id}.idx`);
|
|
57
|
+
return new FileHistoryModel(await readJSON(filePath));
|
|
58
|
+
}
|
|
59
|
+
catch {
|
|
60
|
+
return new FileHistoryModel();
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
async saveFileHistory(job, fileHistory) {
|
|
64
|
+
if (fileHistory.changed) {
|
|
65
|
+
try {
|
|
66
|
+
const filePath = join(this.setup.logDir, `${job.id}.idx`);
|
|
67
|
+
await writeJSON(filePath, fileHistory.data, { spaces: 4 });
|
|
68
|
+
}
|
|
69
|
+
catch { }
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
initLog(job) {
|
|
73
|
+
try {
|
|
74
|
+
const fd = openSync(join(this.setup.logDir, `${job.id}.log`), "w");
|
|
75
|
+
writeSync(fd, `====== CronOps log for job #${job.id}\n`);
|
|
76
|
+
writeSync(fd, `started: ${new Date().toISOString()}\n`);
|
|
77
|
+
writeSync(fd, `action: ${job.action}\n`);
|
|
78
|
+
return fd;
|
|
79
|
+
}
|
|
80
|
+
catch (error) {
|
|
81
|
+
this.events.emit("error", new Error(`Cannot create log file for job '${job.id}'.\n └─ ${String(error)}`));
|
|
82
|
+
return 0;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
closeLog(fd, startTime, err) {
|
|
86
|
+
if (fd)
|
|
87
|
+
try {
|
|
88
|
+
if (err)
|
|
89
|
+
writeSync(fd, `${String(err)}\n`);
|
|
90
|
+
writeSync(fd, `====== ${err ? "JOB FAILED" : "JOB FINISHED"}\n`);
|
|
91
|
+
writeSync(fd, `(duration: ${Date.now() - startTime}ms)\n`);
|
|
92
|
+
fsyncSync(fd);
|
|
93
|
+
closeSync(fd);
|
|
94
|
+
}
|
|
95
|
+
catch { }
|
|
96
|
+
}
|
|
97
|
+
renameLog(job, tag) {
|
|
98
|
+
moveSync(join(this.setup.logDir, `${job.id}.log`), join(this.setup.logDir, `${job.id}.${tag}.log`));
|
|
99
|
+
}
|
|
100
|
+
}
|