@mtakla/cronops 0.1.1-rc2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,229 @@
1
+ import { number } from "zod";
2
+ export const openapi = {
3
+ openapi: "3.0.3",
4
+ info: { title: "CronOps Web API", version: "1.0.0" },
5
+ components: {
6
+ securitySchemes: {
7
+ ApiKeyBearer: {
8
+ type: "http",
9
+ scheme: "bearer",
10
+ bearerFormat: "hex-256",
11
+ },
12
+ },
13
+ },
14
+ tags: [
15
+ {
16
+ name: "public",
17
+ description: "Public api",
18
+ },
19
+ {
20
+ name: "jobs",
21
+ description: "CronOps job related tasks",
22
+ },
23
+ {
24
+ name: "admin",
25
+ description: "CronOps admin tasks",
26
+ },
27
+ {
28
+ name: "user",
29
+ description: "Operations about user",
30
+ externalDocs: {
31
+ description: "Find out more about our store",
32
+ url: "http://swagger.io",
33
+ },
34
+ },
35
+ ],
36
+ paths: {
37
+ "/health": {
38
+ get: {
39
+ summary: "Health check",
40
+ tags: ["public"],
41
+ security: [],
42
+ responses: {
43
+ "200": {
44
+ description: "OK",
45
+ content: {
46
+ "application/json": {
47
+ schema: {
48
+ type: "object",
49
+ properties: {
50
+ status: { type: "string", example: "ok" },
51
+ active_jobs: { type: "integer", example: 3 },
52
+ },
53
+ required: ["status", "active_jobs"],
54
+ },
55
+ },
56
+ },
57
+ },
58
+ },
59
+ },
60
+ },
61
+ "/docs": {
62
+ get: {
63
+ summary: "OpenApi docs",
64
+ tags: ["public"],
65
+ security: [],
66
+ responses: {
67
+ "200": {
68
+ description: "OK",
69
+ },
70
+ },
71
+ },
72
+ },
73
+ "/openapi.json": {
74
+ get: {
75
+ summary: "OpenApi specs",
76
+ tags: ["public"],
77
+ security: [],
78
+ responses: {
79
+ "200": {
80
+ description: "OK",
81
+ content: {
82
+ "application/json": {},
83
+ },
84
+ },
85
+ },
86
+ },
87
+ },
88
+ "/api/jobs/trigger/{jobId}": {
89
+ post: {
90
+ summary: "Trigger a job",
91
+ tags: ["jobs"],
92
+ parameters: [
93
+ {
94
+ name: "jobId",
95
+ in: "path",
96
+ required: true,
97
+ schema: { type: "string" },
98
+ },
99
+ ],
100
+ responses: {
101
+ "200": {
102
+ description: "Triggered",
103
+ content: {
104
+ "application/json": {
105
+ schema: {
106
+ type: "object",
107
+ properties: {
108
+ triggered: { type: "boolean", example: true },
109
+ jobId: { type: "string", example: "job-123" },
110
+ },
111
+ required: ["triggered", "jobId"],
112
+ },
113
+ },
114
+ },
115
+ },
116
+ "404": {
117
+ description: "Job not found",
118
+ },
119
+ },
120
+ },
121
+ },
122
+ "/api/jobs/pause/{jobId}": {
123
+ post: {
124
+ summary: "Pause a job",
125
+ tags: ["jobs"],
126
+ parameters: [
127
+ {
128
+ name: "jobId",
129
+ in: "path",
130
+ required: true,
131
+ schema: { type: "string" },
132
+ },
133
+ ],
134
+ responses: {
135
+ "200": {
136
+ description: "Paused",
137
+ content: {
138
+ "application/json": {
139
+ schema: {
140
+ type: "object",
141
+ properties: {
142
+ paused: { type: "boolean", example: true },
143
+ jobId: { type: "string", example: "job-123" },
144
+ },
145
+ },
146
+ },
147
+ },
148
+ },
149
+ },
150
+ },
151
+ },
152
+ "/api/jobs/resume/{jobId}": {
153
+ post: {
154
+ summary: "Resume a job",
155
+ tags: ["jobs"],
156
+ parameters: [
157
+ {
158
+ name: "jobId",
159
+ in: "path",
160
+ required: true,
161
+ schema: { type: "string" },
162
+ },
163
+ ],
164
+ responses: {
165
+ "200": {
166
+ description: "Resumed",
167
+ content: {
168
+ "application/json": {
169
+ schema: {
170
+ type: "object",
171
+ properties: {
172
+ resumed: { type: "boolean", example: true },
173
+ jobId: { type: "string", example: "job-123" },
174
+ },
175
+ },
176
+ },
177
+ },
178
+ },
179
+ },
180
+ },
181
+ },
182
+ "/api/jobs/pause/": {
183
+ post: {
184
+ summary: "Pause all jobs",
185
+ tags: ["jobs"],
186
+ responses: {
187
+ "200": {
188
+ description: "Paused",
189
+ content: {
190
+ "application/json": {
191
+ schema: {
192
+ type: "object",
193
+ properties: {
194
+ paused: { type: "boolean", example: true },
195
+ jobs: { type: number, example: 4 },
196
+ },
197
+ },
198
+ },
199
+ },
200
+ },
201
+ },
202
+ },
203
+ },
204
+ "/api/jobs/resume/": {
205
+ post: {
206
+ summary: "Resume all jobs",
207
+ tags: ["jobs"],
208
+ responses: {
209
+ "200": {
210
+ description: "Resumed",
211
+ content: {
212
+ "application/json": {
213
+ schema: {
214
+ type: "object",
215
+ properties: {
216
+ resumed: { type: "boolean", example: true },
217
+ jobs: { type: number, example: 4 },
218
+ },
219
+ },
220
+ },
221
+ },
222
+ },
223
+ },
224
+ },
225
+ },
226
+ },
227
+ servers: [{ url: "http://localhost:3000" }],
228
+ security: [{ ApiKeyBearer: [] }],
229
+ };
@@ -0,0 +1,75 @@
1
+ import Fastify from "fastify";
2
+ import chalk from "chalk";
3
+ import { ENV } from "../types/Options.types.js";
4
+ import { openapi } from "./openapi.js";
5
+ const app = Fastify();
6
+ const port = Number(process.env[ENV.PORT] ?? 8118);
7
+ const host = process.env[ENV.HOST] ?? "127.0.0.1";
8
+ const baseUrl = process.env[ENV.BASE_URL] ?? "http://127.0.0.1:8118";
9
+ const apiKey = process.env[ENV.API_KEY];
10
+ app.addHook("preHandler", async (request, reply) => {
11
+ if (request.method === "OPTIONS" || !request.url.startsWith("/api"))
12
+ return;
13
+ const auth = request.headers.authorization;
14
+ const token = auth?.startsWith("Bearer ") ? auth.slice(7) : undefined;
15
+ if (!token || token !== apiKey) {
16
+ return reply.code(401).send("Unauthorized");
17
+ }
18
+ });
19
+ app.get("/docs", async (_, reply) => {
20
+ reply.type("text/html").send(`
21
+ <!doctype html>
22
+ <html>
23
+ <head>
24
+ <meta charset="utf-8" />
25
+ <title>CronOps API</title>
26
+ <script type="module" src="https://unpkg.com/rapidoc/dist/rapidoc-min.js"></script>
27
+ </head>
28
+ <body>
29
+ <rapi-doc spec-url="/openapi.json"></rapi-doc>
30
+ </body>
31
+ </html>`);
32
+ });
33
+ app.get("/openapi.json", async () => {
34
+ return { ...openapi, servers: [{ url: "" }, { url: `${baseUrl}` }] };
35
+ });
36
+ app.get("/health", async (request, reply) => {
37
+ const jobScheduler = request.server.scheduler;
38
+ const jobs = jobScheduler.getScheduledJobs();
39
+ reply.code(200).send({ status: "ok", active_jobs: jobs.length });
40
+ });
41
+ app.post("/api/jobs/trigger/:jobId", async (request, reply) => {
42
+ const { jobId } = request.params;
43
+ const jobScheduler = request.server.scheduler;
44
+ if (!jobScheduler.isJobScheduled(jobId)) {
45
+ return reply.code(404).send();
46
+ }
47
+ await jobScheduler.executeJob(jobId);
48
+ return { triggered: true, jobId };
49
+ });
50
+ app.post("/terminate", async () => {
51
+ return { terminating: true };
52
+ });
53
+ export default function (scheduler) {
54
+ if (!apiKey || !/^[0-9a-f]{64}$/i.test(apiKey)) {
55
+ console.log(chalk.red(`Web API disabled. No valid API key configured!`));
56
+ console.log(`\nTo use the CronOps admin Web API:`);
57
+ console.log(` - Generate a hex‑encoded 256‑bit secret (e.g. 'openssl rand -hex 32')`);
58
+ console.log(` - Configure api-key via environment variable CROPS_API_KEY`);
59
+ console.log(` - Add 'HTTP Bearer' header on each /api HTTP request`);
60
+ }
61
+ else {
62
+ app.decorate("scheduler", scheduler);
63
+ app.listen({ port, host }, (err) => {
64
+ if (err) {
65
+ console.log(chalk.red(`Web API disabled. ${err?.message}`));
66
+ }
67
+ else {
68
+ console.log(`\nWeb API enabled. HTTP Server is listening on port ${port} ...`);
69
+ console.log(` ⎆ API endpoint ${baseUrl}/api (secured)`);
70
+ console.log(` ⎆ OpenAPI docs ${baseUrl}/docs`);
71
+ console.log(` ⎆ Health check ${baseUrl}/health`);
72
+ }
73
+ });
74
+ }
75
+ }
package/dist/api.js ADDED
@@ -0,0 +1,44 @@
1
+ import Fastify from "fastify";
2
+ import { ENV } from "./types/Options.types.js";
3
+ import chalk from "chalk";
4
+ const app = Fastify();
5
+ const port = Number(process.env[ENV.PORT] ?? 8778);
6
+ const apiKey = process.env[ENV.API_KEY];
7
+ app.addHook("preHandler", async (request, reply) => {
8
+ const key = request.headers["x-api-key"];
9
+ if (!apiKey || key !== apiKey) {
10
+ reply.code(401).send("Unauthorized");
11
+ }
12
+ });
13
+ app.get("/status", async () => {
14
+ return { status: "ok" };
15
+ });
16
+ app.post("/trigger/:jobId", async (request) => {
17
+ const { jobId } = request.params;
18
+ return { triggered: true, jobId };
19
+ });
20
+ app.post("/terminate", async () => {
21
+ return { terminating: true };
22
+ });
23
+ export default function () {
24
+ if (!apiKey || !/^[0-9a-f]{64}$/i.test(apiKey)) {
25
+ console.log(chalk.red(`Web-API not enabled: No valid API key found!`));
26
+ console.log(`\nTo use the cronops admin web API:`);
27
+ console.log(` - Generate a hex‑encoded 256‑bit secret (e.g. 'openssl rand -hex 32')`);
28
+ console.log(` - Set it via the environment variable CROPS_API_KEY`);
29
+ }
30
+ else {
31
+ app.listen({ port }, (err, address) => {
32
+ if (err) {
33
+ console.log(chalk.red(`Web API not running. ${err?.message}`));
34
+ }
35
+ else {
36
+ console.log(`Web API listening on port ${port} ...`);
37
+ console.log(`⎆ server running on ${address}`);
38
+ console.log(`⎆ to get server status, type curl -X GET http://localhost:${port}/status`);
39
+ console.log(`⎆ to trigger a job manually, type curl -X POST http://localhost:${port}/trigger/{job-id}`);
40
+ console.log(`⎆ to gracefully terminate server,type curl -X POST http://localhost:${port}/terminate`);
41
+ }
42
+ });
43
+ }
44
+ }
@@ -0,0 +1,10 @@
1
+ export class JobError extends Error {
2
+ jobId;
3
+ constructor(message, jobId, cause) {
4
+ super(message, cause);
5
+ this.jobId = jobId;
6
+ }
7
+ static throw(jobId, message, cause) {
8
+ throw new JobError(jobId, message, cause);
9
+ }
10
+ }
@@ -0,0 +1,162 @@
1
+ import fsx, { ensureDir } from "fs-extra";
2
+ import tar from "tar-fs";
3
+ import pLimit from "p-limit";
4
+ import parse from "parse-duration";
5
+ import { createGzip } from "node:zlib";
6
+ import { pipeline } from "node:stream/promises";
7
+ import { dirname, join, sep } from "node:path";
8
+ import { createWriteStream } from "node:fs";
9
+ import { JobError } from "../errors/JobError.js";
10
+ const limit = pLimit(64);
11
+ export class AbstractHandler {
12
+ setup;
13
+ constructor(setup) {
14
+ this.setup = setup;
15
+ }
16
+ validateJob(_job) { }
17
+ async process(_ctx) { }
18
+ async processFiles(_ctx, _entries, _fileHistory) { }
19
+ assertSourceConfigExists(job) {
20
+ if (!job.source)
21
+ JobError.throw(job.id, `Missing 'source' specs. Please check your config file.`);
22
+ }
23
+ assertTargetConfigExists(job) {
24
+ if (!job.target)
25
+ JobError.throw(job.id, `Missing 'target' specs.Please check your config file.`);
26
+ }
27
+ assertSourceDirExist(job) {
28
+ this.assertSourceConfigExists(job);
29
+ const sourceDir = this.setup.resolveSourceDir(job.source?.dir);
30
+ if (!fsx.pathExistsSync(sourceDir))
31
+ JobError.throw(job.id, `missing source dir '${sourceDir}'!`);
32
+ }
33
+ async processSources(ctx, entries, fileHistory, processor) {
34
+ const { sourceDir } = ctx;
35
+ await Promise.all(entries.map((sourceEntry) => {
36
+ const sourcePath = join(sourceDir, sourceEntry);
37
+ return limit(async () => {
38
+ try {
39
+ const stats = await fsx.stat(sourcePath);
40
+ const { changed } = fileHistory ? fileHistory.updateSourceEntry(sourcePath, [stats.mtimeMs, ctx.startTime]) : { changed: true };
41
+ if (changed && processor)
42
+ await processor.bind(this)(ctx, { sourceEntry, sourcePath, stats }, fileHistory);
43
+ }
44
+ catch (error) {
45
+ ctx.processError(new Error(`Cannot process source entry '${sourceEntry}'.\n └─ ${String(error)}`));
46
+ }
47
+ });
48
+ }));
49
+ }
50
+ async copyOrMoveFile(ctx, entry, fileHistory) {
51
+ const { job, result, targetDir, targetPermissions } = ctx;
52
+ const { sourcePath, sourceEntry, stats } = entry;
53
+ const targetPath = join(targetDir, sourceEntry);
54
+ await fsx.copyFile(sourcePath, targetPath);
55
+ result.copied++;
56
+ await fsx.utimes(targetPath, stats.atime, stats.mtime);
57
+ await this.setTargetFilePermissions(targetPath, targetPermissions);
58
+ if (fileHistory)
59
+ fileHistory.addTargetEntry(targetPath, [stats.mtimeMs, ctx.startTime]);
60
+ ctx.processActivity("COPIED", targetPath, ctx.result.copied);
61
+ if (job.action === "move") {
62
+ await this.deleteFile(ctx, entry);
63
+ }
64
+ }
65
+ async deleteFile(ctx, { sourcePath }) {
66
+ const { job, result, sourceDir, sourceDirs } = ctx;
67
+ if (!job.dry_run) {
68
+ await fsx.remove(sourcePath);
69
+ let current = dirname(sourcePath);
70
+ while (current.length > sourceDir.length && !sourceDirs.has(current)) {
71
+ sourceDirs.add(current);
72
+ current = dirname(current);
73
+ }
74
+ ctx.processActivity("DELETED", sourcePath, ctx.result.deleted);
75
+ }
76
+ result.deleted++;
77
+ }
78
+ async createArchive(ctx, entries, fileHistory) {
79
+ if (entries.length > 0 && !ctx.result.errors) {
80
+ const { job, sourceDir, targetDir, targetPermissions, result } = ctx;
81
+ const dest = join(targetDir, job.targetArchiveName);
82
+ if (fileHistory.changed || !fsx.pathExistsSync(dest)) {
83
+ await ensureDir(targetDir);
84
+ try {
85
+ await pipeline(tar.pack(sourceDir, { entries }), createGzip(), createWriteStream(dest));
86
+ }
87
+ catch (err) {
88
+ ctx.processError(err instanceof Error ? err : new Error("Compression error!"));
89
+ }
90
+ await this.setTargetFilePermissions(dest, targetPermissions);
91
+ ctx.targetDirs.add(targetDir);
92
+ fileHistory.addTargetEntry(dest, [result.startTime, result.startTime]);
93
+ result.archived = entries.length;
94
+ ctx.processActivity("ARCHIVED", dest, result.archived);
95
+ }
96
+ }
97
+ }
98
+ async deleteEmptySourceDirs(ctx) {
99
+ const { job, sourceDirs } = ctx;
100
+ if (!job.dry_run) {
101
+ const sortedDirs = Array.from(sourceDirs).sort((a, b) => b.split(sep).length - a.split(sep).length);
102
+ for (const dir of sortedDirs) {
103
+ try {
104
+ await fsx.rmdir(dir);
105
+ }
106
+ catch { }
107
+ }
108
+ }
109
+ }
110
+ async createTargetDirs(ctx, entries) {
111
+ const { targetDir, targetDirs } = ctx;
112
+ for (const entry of entries)
113
+ targetDirs.add(dirname(join(targetDir, entry)));
114
+ targetDirs.add(targetDir);
115
+ await Promise.all(Array.from(targetDirs).map((dir) => limit(() => fsx.ensureDir(dir))));
116
+ }
117
+ async setTargetDirPermissions(ctx) {
118
+ const folderPromises = new Map();
119
+ for (const dir of ctx.targetDirs)
120
+ this.getFolderPermissionPromises(ctx, dir, folderPromises);
121
+ await Promise.all(folderPromises.values());
122
+ }
123
+ async cleanup(ctx, fileHistory) {
124
+ const { job } = ctx;
125
+ const retentionMs = parse(job.target?.retention) ?? 0;
126
+ const targetScanPromises = Object.keys(fileHistory.data.target).map((path) => {
127
+ return limit(async () => {
128
+ const ttime = fileHistory.data.target[path]?.[1] ?? 0;
129
+ if (await fsx.pathExists(path)) {
130
+ if (retentionMs > 0 && ctx.startTime - (ttime + retentionMs) >= 0) {
131
+ await fsx.remove(path);
132
+ fileHistory.markTargetOutdated(path);
133
+ ctx.result.pruned++;
134
+ ctx.processActivity("PRUNED", path, ctx.result.pruned);
135
+ }
136
+ }
137
+ else
138
+ fileHistory.markTargetOutdated(path);
139
+ });
140
+ });
141
+ await Promise.all(targetScanPromises);
142
+ fileHistory.cleanup();
143
+ }
144
+ async setTargetFilePermissions(destPath, perms, isDir = false) {
145
+ if (perms.uid >= 0 && perms.gid >= 0)
146
+ await fsx.chown(destPath, perms.uid, perms.gid);
147
+ await fsx.chmod(destPath, isDir ? perms.dirMode : perms.fileMode);
148
+ }
149
+ getFolderPermissionPromises(ctx, dirPath, folderPromises) {
150
+ if (dirPath.length < ctx.targetDir.length || folderPromises.has(dirPath))
151
+ return;
152
+ folderPromises.set(dirPath, limit(async () => {
153
+ try {
154
+ await this.setTargetFilePermissions(dirPath, ctx.targetPermissions, true);
155
+ }
156
+ catch (error) {
157
+ ctx.processError(new Error(`Cannot chmod on folder '${dirPath}'. ${error}`));
158
+ }
159
+ }));
160
+ this.getFolderPermissionPromises(ctx, dirname(dirPath), folderPromises);
161
+ }
162
+ }
@@ -0,0 +1,103 @@
1
+ import { spawn } from "node:child_process";
2
+ import { parse } from "node:path";
3
+ import { JobError } from "../errors/JobError.js";
4
+ import { AbstractHandler } from "./AbstractHandler.js";
5
+ export class ExecHandler extends AbstractHandler {
6
+ validateJob(job) {
7
+ if (!job.command)
8
+ JobError.throw(job.id, "Missing job command for execution");
9
+ }
10
+ async process(ctx) {
11
+ await this.exec(ctx);
12
+ }
13
+ async processFiles(ctx, entries, fileHistory) {
14
+ if (entries.length > 0) {
15
+ await this.processSources(ctx, entries, fileHistory, this.exec);
16
+ }
17
+ await super.cleanup(ctx, fileHistory);
18
+ }
19
+ async exec(ctx, entry) {
20
+ const { targetDir } = ctx;
21
+ const verbose = ctx.job.verbose === true;
22
+ await new Promise((resolve, reject) => {
23
+ const { vars, env } = this.createVars(ctx, entry);
24
+ let done = false;
25
+ let pid;
26
+ const cmd = this.resolveVars(ctx.job.command, vars);
27
+ const args = ctx.job.args.map((arg) => this.resolveVars(arg, vars));
28
+ const finish = (err) => {
29
+ if (!done) {
30
+ done = true;
31
+ if (err)
32
+ reject(err);
33
+ else {
34
+ ctx.result.executed++;
35
+ ctx.processActivity("EXECUTED", `Process successfully terminated (pid: ${pid})`, ctx.result.executed);
36
+ resolve();
37
+ }
38
+ }
39
+ };
40
+ const logFd = ctx.getLogFd();
41
+ const child = spawn(cmd, args, {
42
+ stdio: ["ignore", verbose ? logFd : "ignore", verbose ? logFd : "ignore"],
43
+ shell: ctx.job.shell ?? this.setup.shell,
44
+ env: { ...process.env, ...env },
45
+ cwd: targetDir,
46
+ });
47
+ pid = child.pid;
48
+ ctx.writeLog(`◉ Subprocess started (pid:${pid}) ➜ ${cmd} [${args}]`);
49
+ child.once("close", (code, signal) => {
50
+ if (code === 0)
51
+ finish();
52
+ else
53
+ finish(new Error(`✖ Subprocess (pid:${pid}) failed (code=${code}, signal=${signal})`));
54
+ });
55
+ child.once("error", (err) => finish(err));
56
+ });
57
+ }
58
+ createVars(ctx, entry) {
59
+ const vars = {
60
+ jobId: ctx.job.id,
61
+ sourceDir: ctx.sourceDir,
62
+ targetDir: ctx.targetDir,
63
+ scriptDir: this.setup.scriptDir,
64
+ tempDir: this.setup.tempDir,
65
+ logDir: this.setup.logDir,
66
+ file: "",
67
+ fileDir: "",
68
+ fileBase: "",
69
+ fileName: "",
70
+ fileExt: "",
71
+ };
72
+ if (entry) {
73
+ const { dir: fileDir, base: fileBase, name: fileName, ext: fileExt } = parse(entry.sourcePath);
74
+ Object.assign(vars, { file: entry.sourcePath, fileDir, fileBase, fileName, fileExt });
75
+ }
76
+ const env = {};
77
+ for (const [key, value] of Object.entries(ctx.job.env))
78
+ env[key] = this.resolveVars(value, vars);
79
+ Object.assign(env, {
80
+ CROPS_JOB_ID: vars.jobId,
81
+ CROPS_SOURCE_DIR: ctx.sourceDir,
82
+ CROPS_TARGET_DIR: ctx.targetDir,
83
+ CROPS_SCRIPT_DIR: vars.scriptDir,
84
+ CROPS_TEMP_DIR: vars.tempDir,
85
+ CROPS_LOG_DIR: vars.logDir,
86
+ CROPS_DRY_RUN: `${ctx.job.dry_run}`,
87
+ CROPS_VERBOSE: `${ctx.job.verbose}`,
88
+ });
89
+ if (entry) {
90
+ Object.assign(env, {
91
+ CROPS_FILE: vars.file,
92
+ CROPS_FILE_DIR: vars.fileDir,
93
+ CROPS_FILE_BASE: vars.fileBase,
94
+ CROPS_FILE_NAME: vars.fileName,
95
+ CROPS_FILE_EXT: vars.fileExt,
96
+ });
97
+ }
98
+ return { vars, env };
99
+ }
100
+ resolveVars(str, vars) {
101
+ return str.replace(/\{(\w+)\}/g, (_, key) => vars[key] ?? "");
102
+ }
103
+ }
@@ -0,0 +1,15 @@
1
+ import { AbstractHandler } from "./AbstractHandler.js";
2
+ export class FileArchiveHandler extends AbstractHandler {
3
+ validateJob(job) {
4
+ super.assertSourceDirExist(job);
5
+ super.assertTargetConfigExists(job);
6
+ }
7
+ async processFiles(ctx, entries, fileHistory) {
8
+ if (entries.length > 0) {
9
+ await super.processSources(ctx, entries, fileHistory);
10
+ await super.createArchive(ctx, entries, fileHistory);
11
+ await super.setTargetDirPermissions(ctx);
12
+ }
13
+ await super.cleanup(ctx, fileHistory);
14
+ }
15
+ }
@@ -0,0 +1,15 @@
1
+ import { AbstractHandler } from "./AbstractHandler.js";
2
+ export class FileCopyHandler extends AbstractHandler {
3
+ validateJob(job) {
4
+ super.assertSourceDirExist(job);
5
+ super.assertTargetConfigExists(job);
6
+ }
7
+ async processFiles(ctx, entries, fileHistory) {
8
+ if (entries.length > 0) {
9
+ await super.createTargetDirs(ctx, entries);
10
+ await super.processSources(ctx, entries, fileHistory, this.copyOrMoveFile);
11
+ await super.setTargetDirPermissions(ctx);
12
+ }
13
+ await super.cleanup(ctx, fileHistory);
14
+ }
15
+ }
@@ -0,0 +1,13 @@
1
+ import { AbstractHandler } from "./AbstractHandler.js";
2
+ export class FileDeleteHandler extends AbstractHandler {
3
+ validateJob(job) {
4
+ super.assertSourceDirExist(job);
5
+ }
6
+ async processFiles(ctx, entries, fileHistory) {
7
+ if (entries.length > 0) {
8
+ await super.processSources(ctx, entries, fileHistory, this.deleteFile);
9
+ await super.deleteEmptySourceDirs(ctx);
10
+ }
11
+ await super.cleanup(ctx, fileHistory);
12
+ }
13
+ }