petadep 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +78 -0
- package/bin/petadep.js +91 -0
- package/package.json +33 -0
- package/src/agent.js +113 -0
- package/src/cli.js +65 -0
- package/src/deployer.js +138 -0
- package/src/locks.js +32 -0
- package/src/logger.js +11 -0
- package/src/verifyGithub.js +18 -0
package/README.md
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
# petadep
|
|
2
|
+
|
|
3
|
+
Deploy GitHub repos to a VPS on push via GitHub webhooks or a GitHub App.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Make the CLI executable:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
chmod +x bin/petadep.js
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Initialize config
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
petadep init --config ./data/config.json
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
This creates `./data/config.json`, `./data/`, and `./logs/`, prints the webhook secret, and shows the next step.
|
|
24
|
+
|
|
25
|
+
## Add a deployment target
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
petadep add --config ./data/config.json \
|
|
29
|
+
--repo owner/repo \
|
|
30
|
+
--branch staging \
|
|
31
|
+
--env staging \
|
|
32
|
+
--workdir /var/www/repo-staging \
|
|
33
|
+
--script ./deploy/staging.sh
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Run the agent
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
petadep agent --config ./data/config.json
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
The server exposes:
|
|
43
|
+
|
|
44
|
+
- `POST /webhook` (or `config.path`)
|
|
45
|
+
- `GET /health`
|
|
46
|
+
|
|
47
|
+
## GitHub webhook setup
|
|
48
|
+
|
|
49
|
+
- **Webhook URL**: `http://YOUR_VPS_IP:8787/webhook` (match `config.path`)
|
|
50
|
+
- **Secret**: the value printed by `init` (also stored in the config)
|
|
51
|
+
- **Events**: `push` (or update `security.allowedEvents`)
|
|
52
|
+
|
|
53
|
+
If using a GitHub App, set the same URL/secret in the App's webhook settings.
|
|
54
|
+
|
|
55
|
+
## SSH deploy key requirement
|
|
56
|
+
|
|
57
|
+
The agent clones via SSH using `git@github.com:owner/repo.git`. Your VPS must have a deploy key or SSH key with access to the repo. Add the VPS public key as a **Deploy key** in GitHub (read-only is fine).
|
|
58
|
+
|
|
59
|
+
## Config schema
|
|
60
|
+
|
|
61
|
+
```json
|
|
62
|
+
{
|
|
63
|
+
"port": 8787,
|
|
64
|
+
"path": "/webhook",
|
|
65
|
+
"secret": "<random>",
|
|
66
|
+
"deployments": [
|
|
67
|
+
{
|
|
68
|
+
"repo": "owner/repo",
|
|
69
|
+
"branch": "staging",
|
|
70
|
+
"env": "staging",
|
|
71
|
+
"workdir": "/var/www/repo-staging",
|
|
72
|
+
"script": "./deploy/staging.sh"
|
|
73
|
+
}
|
|
74
|
+
],
|
|
75
|
+
"security": { "allowedEvents": ["push"], "lockPerTarget": true, "timeoutSeconds": 900 },
|
|
76
|
+
"logsDir": "./logs"
|
|
77
|
+
}
|
|
78
|
+
```
|
package/bin/petadep.js
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import yargs from "yargs";
|
|
3
|
+
import { hideBin } from "yargs/helpers";
|
|
4
|
+
import { initConfig, addDeployment } from "../src/cli.js";
|
|
5
|
+
import { startAgent } from "../src/agent.js";
|
|
6
|
+
|
|
7
|
+
async function main() {
|
|
8
|
+
yargs(hideBin(process.argv))
|
|
9
|
+
.scriptName("petadep")
|
|
10
|
+
.command(
|
|
11
|
+
"init",
|
|
12
|
+
"Initialize a config file",
|
|
13
|
+
(y) =>
|
|
14
|
+
y.option("config", {
|
|
15
|
+
type: "string",
|
|
16
|
+
demandOption: true,
|
|
17
|
+
describe: "Path to config JSON",
|
|
18
|
+
}),
|
|
19
|
+
async (argv) => {
|
|
20
|
+
await initConfig({ configPath: argv.config });
|
|
21
|
+
}
|
|
22
|
+
)
|
|
23
|
+
.command(
|
|
24
|
+
"add",
|
|
25
|
+
"Add a deployment target",
|
|
26
|
+
(y) =>
|
|
27
|
+
y
|
|
28
|
+
.option("config", {
|
|
29
|
+
type: "string",
|
|
30
|
+
demandOption: true,
|
|
31
|
+
describe: "Path to config JSON",
|
|
32
|
+
})
|
|
33
|
+
.option("repo", {
|
|
34
|
+
type: "string",
|
|
35
|
+
demandOption: true,
|
|
36
|
+
describe: "Repo in owner/repo format",
|
|
37
|
+
})
|
|
38
|
+
.option("branch", {
|
|
39
|
+
type: "string",
|
|
40
|
+
demandOption: true,
|
|
41
|
+
describe: "Branch name",
|
|
42
|
+
})
|
|
43
|
+
.option("env", {
|
|
44
|
+
type: "string",
|
|
45
|
+
demandOption: true,
|
|
46
|
+
describe: "Environment name",
|
|
47
|
+
})
|
|
48
|
+
.option("workdir", {
|
|
49
|
+
type: "string",
|
|
50
|
+
demandOption: true,
|
|
51
|
+
describe: "Working directory on the VPS",
|
|
52
|
+
})
|
|
53
|
+
.option("script", {
|
|
54
|
+
type: "string",
|
|
55
|
+
demandOption: true,
|
|
56
|
+
describe: "Deploy script path (relative to repo or absolute)",
|
|
57
|
+
}),
|
|
58
|
+
async (argv) => {
|
|
59
|
+
await addDeployment({
|
|
60
|
+
configPath: argv.config,
|
|
61
|
+
repo: argv.repo,
|
|
62
|
+
branch: argv.branch,
|
|
63
|
+
env: argv.env,
|
|
64
|
+
workdir: argv.workdir,
|
|
65
|
+
script: argv.script,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
)
|
|
69
|
+
.command(
|
|
70
|
+
"agent",
|
|
71
|
+
"Start the webhook server",
|
|
72
|
+
(y) =>
|
|
73
|
+
y.option("config", {
|
|
74
|
+
type: "string",
|
|
75
|
+
demandOption: true,
|
|
76
|
+
describe: "Path to config JSON",
|
|
77
|
+
}),
|
|
78
|
+
async (argv) => {
|
|
79
|
+
await startAgent({ configPath: argv.config });
|
|
80
|
+
}
|
|
81
|
+
)
|
|
82
|
+
.demandCommand(1, "Choose a command")
|
|
83
|
+
.strict()
|
|
84
|
+
.help()
|
|
85
|
+
.parse();
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
main().catch((err) => {
|
|
89
|
+
console.error(err?.message || err);
|
|
90
|
+
process.exit(1);
|
|
91
|
+
});
|
package/package.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "petadep",
|
|
3
|
+
"version": "1.0.1",
|
|
4
|
+
"description": "Deploy GitHub repos to a VPS on push via webhooks",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"petadep": "./bin/petadep.js"
|
|
8
|
+
},
|
|
9
|
+
"main": "./src/agent.js",
|
|
10
|
+
"files": [
|
|
11
|
+
"bin/",
|
|
12
|
+
"src/",
|
|
13
|
+
"README.md",
|
|
14
|
+
"package.json"
|
|
15
|
+
],
|
|
16
|
+
"scripts": {
|
|
17
|
+
"start": "node bin/petadep.js agent --config ./data/config.json",
|
|
18
|
+
"dev": "nodemon --exec node bin/petadep.js agent --config ./data/config.json"
|
|
19
|
+
},
|
|
20
|
+
"keywords": [],
|
|
21
|
+
"author": "",
|
|
22
|
+
"license": "ISC",
|
|
23
|
+
"dependencies": {
|
|
24
|
+
"dotenv": "^17.2.3",
|
|
25
|
+
"execa": "^9.6.1",
|
|
26
|
+
"express": "^5.2.1",
|
|
27
|
+
"yargs": "^18.0.0",
|
|
28
|
+
"zod": "^4.3.5"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"nodemon": "^3.1.11"
|
|
32
|
+
}
|
|
33
|
+
}
|
package/src/agent.js
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import express from "express";
|
|
4
|
+
import dotenv from "dotenv";
|
|
5
|
+
import { z } from "zod";
|
|
6
|
+
import { verifyGithubSignature } from "./verifyGithub.js";
|
|
7
|
+
import { deployTarget } from "./deployer.js";
|
|
8
|
+
import { appendLog, ensureDir } from "./logger.js";
|
|
9
|
+
|
|
10
|
+
dotenv.config();
|
|
11
|
+
|
|
12
|
+
const deploymentSchema = z.object({
|
|
13
|
+
repo: z.string().min(1),
|
|
14
|
+
branch: z.string().min(1),
|
|
15
|
+
env: z.string().min(1),
|
|
16
|
+
workdir: z.string().min(1),
|
|
17
|
+
script: z.string().min(1),
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
const securitySchema = z
|
|
21
|
+
.object({
|
|
22
|
+
allowedEvents: z.array(z.string()).default(["push"]),
|
|
23
|
+
lockPerTarget: z.boolean().default(true),
|
|
24
|
+
timeoutSeconds: z.coerce.number().int().positive().default(900),
|
|
25
|
+
})
|
|
26
|
+
.default({});
|
|
27
|
+
|
|
28
|
+
const configSchema = z.object({
|
|
29
|
+
port: z.coerce.number().int().positive().default(8787),
|
|
30
|
+
path: z.string().default("/webhook"),
|
|
31
|
+
secret: z.string().min(1),
|
|
32
|
+
deployments: z.array(deploymentSchema).default([]),
|
|
33
|
+
security: securitySchema,
|
|
34
|
+
logsDir: z.string().default("./logs"),
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
async function loadConfig(configPath) {
|
|
38
|
+
const resolvedPath = path.resolve(configPath);
|
|
39
|
+
const raw = await fs.readFile(resolvedPath, "utf8");
|
|
40
|
+
const parsed = JSON.parse(raw);
|
|
41
|
+
return configSchema.parse(parsed);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export async function startAgent({ configPath }) {
|
|
45
|
+
const config = await loadConfig(configPath);
|
|
46
|
+
await ensureDir(config.logsDir);
|
|
47
|
+
|
|
48
|
+
const app = express();
|
|
49
|
+
app.use(
|
|
50
|
+
express.json({
|
|
51
|
+
verify: (req, _res, buf) => {
|
|
52
|
+
req.rawBody = buf;
|
|
53
|
+
},
|
|
54
|
+
})
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
app.get("/health", (_req, res) => {
|
|
58
|
+
res.json({ status: "ok" });
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
app.post(config.path, async (req, res) => {
|
|
62
|
+
const event = req.headers["x-github-event"];
|
|
63
|
+
const signature = req.headers["x-hub-signature-256"];
|
|
64
|
+
const isValid = verifyGithubSignature({
|
|
65
|
+
secret: config.secret,
|
|
66
|
+
rawBody: req.rawBody,
|
|
67
|
+
signature,
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
if (!isValid) {
|
|
71
|
+
const securityLog = path.join(config.logsDir, "security.log");
|
|
72
|
+
const line = `[${new Date().toISOString()}] Invalid signature from ${
|
|
73
|
+
req.ip
|
|
74
|
+
}\n`;
|
|
75
|
+
await appendLog(securityLog, line);
|
|
76
|
+
return res.status(401).json({ error: "invalid signature" });
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (!config.security.allowedEvents.includes(event)) {
|
|
80
|
+
return res.status(202).json({ status: "ignored", reason: "event" });
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const repo = req.body?.repository?.full_name;
|
|
84
|
+
const ref = req.body?.ref;
|
|
85
|
+
const branch = typeof ref === "string" ? ref.replace("refs/heads/", "") : "";
|
|
86
|
+
|
|
87
|
+
if (!repo || !branch) {
|
|
88
|
+
return res.status(400).json({ error: "missing repo or branch" });
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const target = config.deployments.find(
|
|
92
|
+
(d) => d.repo === repo && d.branch === branch
|
|
93
|
+
);
|
|
94
|
+
if (!target) {
|
|
95
|
+
return res.status(202).json({ status: "ignored", reason: "no target" });
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
res.status(202).json({ status: "accepted" });
|
|
99
|
+
|
|
100
|
+
setImmediate(() => {
|
|
101
|
+
deployTarget(target, config).catch(() => {
|
|
102
|
+
return;
|
|
103
|
+
});
|
|
104
|
+
});
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
app.listen(config.port, () => {
|
|
108
|
+
console.log(
|
|
109
|
+
`Webhook listening on http://localhost:${config.port}${config.path}`
|
|
110
|
+
);
|
|
111
|
+
console.log(`Health check on http://localhost:${config.port}/health`);
|
|
112
|
+
});
|
|
113
|
+
}
|
package/src/cli.js
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import crypto from "crypto";
|
|
4
|
+
|
|
5
|
+
function defaultConfig(secret) {
|
|
6
|
+
return {
|
|
7
|
+
port: 8787,
|
|
8
|
+
path: "/webhook",
|
|
9
|
+
secret,
|
|
10
|
+
deployments: [],
|
|
11
|
+
security: {
|
|
12
|
+
allowedEvents: ["push"],
|
|
13
|
+
lockPerTarget: true,
|
|
14
|
+
timeoutSeconds: 900,
|
|
15
|
+
},
|
|
16
|
+
logsDir: "./logs",
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function initConfig({ configPath }) {
|
|
21
|
+
const resolvedPath = path.resolve(configPath);
|
|
22
|
+
const configDir = path.dirname(resolvedPath);
|
|
23
|
+
await fs.mkdir(configDir, { recursive: true });
|
|
24
|
+
await fs.mkdir(path.resolve("data"), { recursive: true });
|
|
25
|
+
await fs.mkdir(path.resolve("logs"), { recursive: true });
|
|
26
|
+
|
|
27
|
+
const secret = crypto.randomBytes(32).toString("hex");
|
|
28
|
+
const config = defaultConfig(secret);
|
|
29
|
+
await fs.writeFile(resolvedPath, JSON.stringify(config, null, 2));
|
|
30
|
+
|
|
31
|
+
console.log("Config created:", resolvedPath);
|
|
32
|
+
console.log("Webhook secret:", secret);
|
|
33
|
+
console.log(
|
|
34
|
+
"Next: add a deployment with `petadep add --config " +
|
|
35
|
+
resolvedPath +
|
|
36
|
+
" --repo owner/repo --branch main --env production --workdir /var/www/repo --script ./deploy.sh`"
|
|
37
|
+
);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export async function addDeployment({
|
|
41
|
+
configPath,
|
|
42
|
+
repo,
|
|
43
|
+
branch,
|
|
44
|
+
env,
|
|
45
|
+
workdir,
|
|
46
|
+
script,
|
|
47
|
+
}) {
|
|
48
|
+
const resolvedPath = path.resolve(configPath);
|
|
49
|
+
const raw = await fs.readFile(resolvedPath, "utf8");
|
|
50
|
+
const config = JSON.parse(raw);
|
|
51
|
+
config.deployments = Array.isArray(config.deployments)
|
|
52
|
+
? config.deployments
|
|
53
|
+
: [];
|
|
54
|
+
|
|
55
|
+
const exists = config.deployments.find(
|
|
56
|
+
(d) => d.repo === repo && d.branch === branch && d.env === env
|
|
57
|
+
);
|
|
58
|
+
if (exists) {
|
|
59
|
+
throw new Error("Deployment already exists for this repo/branch/env.");
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
config.deployments.push({ repo, branch, env, workdir, script });
|
|
63
|
+
await fs.writeFile(resolvedPath, JSON.stringify(config, null, 2));
|
|
64
|
+
console.log("Deployment added:", repo, branch, env);
|
|
65
|
+
}
|
package/src/deployer.js
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import fsp from "fs/promises";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import { execa } from "execa";
|
|
5
|
+
import { appendLog, ensureDir } from "./logger.js";
|
|
6
|
+
import { lockPathForTarget, withFileLock } from "./locks.js";
|
|
7
|
+
|
|
8
|
+
function logPathForTarget(config, target) {
|
|
9
|
+
const safeRepo = target.repo.replace(/\//g, "__");
|
|
10
|
+
return path.join(config.logsDir, `${safeRepo}__${target.env}.log`);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function toIsoLine(message) {
|
|
14
|
+
return `[${new Date().toISOString()}] ${message}\n`;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
async function runCommand(command, args, options, logStream) {
|
|
18
|
+
const child = execa(command, args, options);
|
|
19
|
+
if (child.stdout) {
|
|
20
|
+
child.stdout.pipe(logStream, { end: false });
|
|
21
|
+
}
|
|
22
|
+
if (child.stderr) {
|
|
23
|
+
child.stderr.pipe(logStream, { end: false });
|
|
24
|
+
}
|
|
25
|
+
await child;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async function ensureRepo(target, config, logStream, timeoutMs) {
|
|
29
|
+
await fsp.mkdir(target.workdir, { recursive: true });
|
|
30
|
+
const gitDir = path.join(target.workdir, ".git");
|
|
31
|
+
const hasGit = fs.existsSync(gitDir);
|
|
32
|
+
|
|
33
|
+
if (!hasGit) {
|
|
34
|
+
const contents = await fsp.readdir(target.workdir);
|
|
35
|
+
if (contents.length > 0) {
|
|
36
|
+
throw new Error("Workdir is not empty and is not a git repository.");
|
|
37
|
+
}
|
|
38
|
+
const sshUrl = `git@github.com:${target.repo}.git`;
|
|
39
|
+
await appendLog(
|
|
40
|
+
logPathForTarget(config, target),
|
|
41
|
+
toIsoLine(`Cloning ${sshUrl} into ${target.workdir}`)
|
|
42
|
+
);
|
|
43
|
+
await runCommand(
|
|
44
|
+
"git",
|
|
45
|
+
["clone", sshUrl, target.workdir],
|
|
46
|
+
{ timeout: timeoutMs },
|
|
47
|
+
logStream
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async function checkoutBranch(target, logStream, timeoutMs) {
|
|
53
|
+
const cwd = target.workdir;
|
|
54
|
+
await runCommand("git", ["fetch", "--all", "--prune"], { cwd, timeout: timeoutMs }, logStream);
|
|
55
|
+
|
|
56
|
+
let hasBranch = true;
|
|
57
|
+
try {
|
|
58
|
+
await runCommand(
|
|
59
|
+
"git",
|
|
60
|
+
["rev-parse", "--verify", target.branch],
|
|
61
|
+
{ cwd, timeout: timeoutMs },
|
|
62
|
+
logStream
|
|
63
|
+
);
|
|
64
|
+
} catch {
|
|
65
|
+
hasBranch = false;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (hasBranch) {
|
|
69
|
+
await runCommand(
|
|
70
|
+
"git",
|
|
71
|
+
["checkout", target.branch],
|
|
72
|
+
{ cwd, timeout: timeoutMs },
|
|
73
|
+
logStream
|
|
74
|
+
);
|
|
75
|
+
} else {
|
|
76
|
+
await runCommand(
|
|
77
|
+
"git",
|
|
78
|
+
["checkout", "-b", target.branch, `origin/${target.branch}`],
|
|
79
|
+
{ cwd, timeout: timeoutMs },
|
|
80
|
+
logStream
|
|
81
|
+
);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
await runCommand(
|
|
85
|
+
"git",
|
|
86
|
+
["reset", "--hard", `origin/${target.branch}`],
|
|
87
|
+
{ cwd, timeout: timeoutMs },
|
|
88
|
+
logStream
|
|
89
|
+
);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async function runScript(target, logStream, timeoutMs) {
|
|
93
|
+
const scriptPath = path.isAbsolute(target.script)
|
|
94
|
+
? target.script
|
|
95
|
+
: path.join(target.workdir, target.script);
|
|
96
|
+
await runCommand("bash", [scriptPath], { cwd: target.workdir, timeout: timeoutMs }, logStream);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
export async function deployTarget(target, config) {
|
|
100
|
+
const logFile = logPathForTarget(config, target);
|
|
101
|
+
await ensureDir(config.logsDir);
|
|
102
|
+
await appendLog(logFile, toIsoLine("Deployment starting"));
|
|
103
|
+
|
|
104
|
+
const targetId = `${target.repo}__${target.env}`;
|
|
105
|
+
const lockFile = lockPathForTarget(targetId);
|
|
106
|
+
const timeoutMs = (config.security?.timeoutSeconds || 900) * 1000;
|
|
107
|
+
|
|
108
|
+
const run = async () => {
|
|
109
|
+
const logStream = fs.createWriteStream(logFile, { flags: "a" });
|
|
110
|
+
try {
|
|
111
|
+
await ensureRepo(target, config, logStream, timeoutMs);
|
|
112
|
+
await checkoutBranch(target, logStream, timeoutMs);
|
|
113
|
+
await runScript(target, logStream, timeoutMs);
|
|
114
|
+
await appendLog(logFile, toIsoLine("Deployment finished"));
|
|
115
|
+
} catch (err) {
|
|
116
|
+
await appendLog(
|
|
117
|
+
logFile,
|
|
118
|
+
toIsoLine(`Deployment failed: ${err?.message || err}`)
|
|
119
|
+
);
|
|
120
|
+
throw err;
|
|
121
|
+
} finally {
|
|
122
|
+
logStream.end();
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
if (config.security?.lockPerTarget !== false) {
|
|
127
|
+
try {
|
|
128
|
+
return await withFileLock(lockFile, run);
|
|
129
|
+
} catch (err) {
|
|
130
|
+
await appendLog(
|
|
131
|
+
logFile,
|
|
132
|
+
toIsoLine(`Deployment skipped: ${err?.message || err}`)
|
|
133
|
+
);
|
|
134
|
+
throw err;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
return run();
|
|
138
|
+
}
|
package/src/locks.js
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
|
|
4
|
+
const LOCK_ROOT = "/tmp";
|
|
5
|
+
|
|
6
|
+
function sanitize(name) {
|
|
7
|
+
return name.replace(/[^a-zA-Z0-9_.-]/g, "_");
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export function lockPathForTarget(targetId) {
|
|
11
|
+
return path.join(LOCK_ROOT, `${sanitize(targetId)}.lock`);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export async function withFileLock(lockFile, fn) {
|
|
15
|
+
let handle;
|
|
16
|
+
try {
|
|
17
|
+
await fs.mkdir(path.dirname(lockFile), { recursive: true });
|
|
18
|
+
handle = await fs.open(lockFile, "wx");
|
|
19
|
+
} catch (err) {
|
|
20
|
+
if (err && err.code === "EEXIST") {
|
|
21
|
+
throw new Error("Deployment already running for this target.");
|
|
22
|
+
}
|
|
23
|
+
throw err;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
try {
|
|
27
|
+
return await fn();
|
|
28
|
+
} finally {
|
|
29
|
+
await handle.close();
|
|
30
|
+
await fs.unlink(lockFile).catch(() => {});
|
|
31
|
+
}
|
|
32
|
+
}
|
package/src/logger.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
|
|
4
|
+
export async function ensureDir(dirPath) {
|
|
5
|
+
await fs.mkdir(dirPath, { recursive: true });
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export async function appendLog(filePath, line) {
|
|
9
|
+
await ensureDir(path.dirname(filePath));
|
|
10
|
+
await fs.appendFile(filePath, line);
|
|
11
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import crypto from "crypto";
|
|
2
|
+
|
|
3
|
+
export function verifyGithubSignature({ secret, rawBody, signature }) {
|
|
4
|
+
if (!secret || !rawBody || !signature) {
|
|
5
|
+
return false;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
const hmac = crypto.createHmac("sha256", secret);
|
|
9
|
+
hmac.update(rawBody);
|
|
10
|
+
const digest = `sha256=${hmac.digest("hex")}`;
|
|
11
|
+
|
|
12
|
+
const a = Buffer.from(digest, "utf8");
|
|
13
|
+
const b = Buffer.from(signature, "utf8");
|
|
14
|
+
if (a.length !== b.length) {
|
|
15
|
+
return false;
|
|
16
|
+
}
|
|
17
|
+
return crypto.timingSafeEqual(a, b);
|
|
18
|
+
}
|