@shetty4l/core 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci-shared.yml +36 -0
- package/.github/workflows/ci.yml +14 -0
- package/.github/workflows/release-shared.yml +150 -0
- package/.github/workflows/release.yml +19 -0
- package/.husky/pre-commit +3 -0
- package/README.md +88 -0
- package/biome.json +12 -0
- package/bun.lock +65 -0
- package/package.json +37 -0
- package/scripts/install-lib.sh +149 -0
- package/src/cli.ts +109 -0
- package/src/config.ts +200 -0
- package/src/daemon.ts +204 -0
- package/src/http.ts +138 -0
- package/src/index.ts +21 -0
- package/src/result.ts +28 -0
- package/src/scripts/version-bump.ts +144 -0
- package/src/signals.ts +69 -0
- package/src/version.ts +27 -0
- package/test/cli.test.ts +61 -0
- package/test/config.test.ts +263 -0
- package/test/daemon.test.ts +89 -0
- package/test/http.test.ts +152 -0
- package/test/result.test.ts +58 -0
- package/test/signals.test.ts +25 -0
- package/test/version.test.ts +55 -0
- package/tsconfig.json +18 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates a version bump marker commit for the release workflow.
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* bun run version:bump minor # next release bumps minor (e.g. 0.1.18 -> 0.2.0)
|
|
6
|
+
* bun run version:bump major # next release bumps major (e.g. 0.1.18 -> 1.0.0)
|
|
7
|
+
*
|
|
8
|
+
* Patch bumps are the default in the release workflow and need no marker.
|
|
9
|
+
*
|
|
10
|
+
* The script creates an empty commit with a [minor] or [major] tag in the
|
|
11
|
+
* message. The release workflow scans all commits since the last tag for
|
|
12
|
+
* these markers to determine the bump level.
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
type BumpLevel = "minor" | "major";
|
|
16
|
+
|
|
17
|
+
const VALID_BUMPS: ReadonlySet<string> = new Set(["minor", "major"]);
|
|
18
|
+
|
|
19
|
+
const run = (cmd: string[]): { stdout: string; exitCode: number } => {
|
|
20
|
+
const result = Bun.spawnSync({
|
|
21
|
+
cmd,
|
|
22
|
+
stdout: "pipe",
|
|
23
|
+
stderr: "pipe",
|
|
24
|
+
});
|
|
25
|
+
return {
|
|
26
|
+
stdout: new TextDecoder().decode(result.stdout).trim(),
|
|
27
|
+
exitCode: result.exitCode,
|
|
28
|
+
};
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
const fail = (message: string): never => {
|
|
32
|
+
console.error(`error: ${message}`);
|
|
33
|
+
return process.exit(1) as never;
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
const getLatestTag = (): string => {
|
|
37
|
+
const { stdout, exitCode } = run(["git", "describe", "--tags", "--abbrev=0"]);
|
|
38
|
+
if (exitCode !== 0 || !stdout.startsWith("v")) {
|
|
39
|
+
fail("no git tags found. Create an initial tag (e.g. v0.1.0) first.");
|
|
40
|
+
}
|
|
41
|
+
return stdout;
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
const parseVersion = (tag: string): [number, number, number] => {
|
|
45
|
+
const match = tag.match(/^v(\d+)\.(\d+)\.(\d+)$/);
|
|
46
|
+
if (!match) {
|
|
47
|
+
return fail(`cannot parse tag "${tag}" as semver.`);
|
|
48
|
+
}
|
|
49
|
+
return [Number(match[1]), Number(match[2]), Number(match[3])];
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
const computeNext = (
|
|
53
|
+
current: [number, number, number],
|
|
54
|
+
bump: BumpLevel,
|
|
55
|
+
): string => {
|
|
56
|
+
const [major, minor] = current;
|
|
57
|
+
if (bump === "major") {
|
|
58
|
+
return `${major + 1}.0.0`;
|
|
59
|
+
}
|
|
60
|
+
return `${major}.${minor + 1}.0`;
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
const assertCleanTree = (): void => {
|
|
64
|
+
const { stdout } = run(["git", "status", "--porcelain"]);
|
|
65
|
+
if (stdout.length > 0) {
|
|
66
|
+
fail(
|
|
67
|
+
"working tree is dirty. Commit or stash changes first.\n" +
|
|
68
|
+
" (version bump creates an empty commit and requires a clean tree)",
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
const assertHasCommits = (latestTag: string): void => {
|
|
74
|
+
const { stdout } = run(["git", "rev-list", "--count", `${latestTag}..HEAD`]);
|
|
75
|
+
if (stdout === "0") {
|
|
76
|
+
fail(`no commits since ${latestTag}. Make changes before bumping version.`);
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
|
|
80
|
+
const assertNoPendingBump = (latestTag: string, bump: BumpLevel): void => {
|
|
81
|
+
const { stdout } = run(["git", "log", `${latestTag}..HEAD`, "--format=%s"]);
|
|
82
|
+
if (stdout.includes(`[${bump}]`)) {
|
|
83
|
+
fail(
|
|
84
|
+
`a [${bump}] bump marker already exists in commits since ${latestTag}.\n` +
|
|
85
|
+
" There is no need to run this again.",
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
// Warn if there's a different bump already pending
|
|
89
|
+
const other: BumpLevel = bump === "minor" ? "major" : "minor";
|
|
90
|
+
if (stdout.includes(`[${other}]`)) {
|
|
91
|
+
console.warn(
|
|
92
|
+
`warning: a [${other}] bump marker already exists since ${latestTag}.` +
|
|
93
|
+
` Adding [${bump}] — the release workflow will use the higher of the two.`,
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
|
|
98
|
+
// --- main ---
|
|
99
|
+
|
|
100
|
+
const bumpArg = process.argv[2];
|
|
101
|
+
|
|
102
|
+
if (!bumpArg || !VALID_BUMPS.has(bumpArg)) {
|
|
103
|
+
console.error("usage: bun run version:bump <minor|major>");
|
|
104
|
+
console.error("");
|
|
105
|
+
console.error(
|
|
106
|
+
" minor bump minor version, reset patch (e.g. 0.1.18 -> 0.2.0)",
|
|
107
|
+
);
|
|
108
|
+
console.error(
|
|
109
|
+
" major bump major version, reset minor + patch (e.g. 0.1.18 -> 1.0.0)",
|
|
110
|
+
);
|
|
111
|
+
console.error("");
|
|
112
|
+
console.error(" Patch bumps are automatic and need no marker.");
|
|
113
|
+
process.exit(1);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
const bump = bumpArg as BumpLevel;
|
|
117
|
+
const latestTag = getLatestTag();
|
|
118
|
+
const current = parseVersion(latestTag);
|
|
119
|
+
const next = computeNext(current, bump);
|
|
120
|
+
|
|
121
|
+
assertCleanTree();
|
|
122
|
+
assertHasCommits(latestTag);
|
|
123
|
+
assertNoPendingBump(latestTag, bump);
|
|
124
|
+
|
|
125
|
+
const commitMessage = `chore: version bump v${next} [${bump}]`;
|
|
126
|
+
|
|
127
|
+
const commitResult = run([
|
|
128
|
+
"git",
|
|
129
|
+
"commit",
|
|
130
|
+
"--allow-empty",
|
|
131
|
+
"-m",
|
|
132
|
+
commitMessage,
|
|
133
|
+
]);
|
|
134
|
+
|
|
135
|
+
if (commitResult.exitCode !== 0) {
|
|
136
|
+
fail("git commit failed.");
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
console.log("");
|
|
140
|
+
console.log(` current version: ${latestTag}`);
|
|
141
|
+
console.log(` next version: v${next} (${bump} bump)`);
|
|
142
|
+
console.log("");
|
|
143
|
+
console.log(` created commit: ${commitMessage}`);
|
|
144
|
+
console.log("");
|
package/src/signals.ts
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Graceful shutdown signal handling.
|
|
3
|
+
*
|
|
4
|
+
* Registers SIGINT/SIGTERM handlers that call a cleanup function
|
|
5
|
+
* before exiting. Supports timeout-based forced exit and
|
|
6
|
+
* double-signal emergency exit.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
export interface ShutdownOpts {
|
|
10
|
+
/** Signals to handle. Defaults to ["SIGINT", "SIGTERM"]. */
|
|
11
|
+
signals?: string[];
|
|
12
|
+
/** Force exit after this many ms. No timeout if omitted. */
|
|
13
|
+
timeoutMs?: number;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Register graceful shutdown handlers.
|
|
18
|
+
*
|
|
19
|
+
* On first signal: calls `cleanup()`, then `process.exit(0)`.
|
|
20
|
+
* On second signal (while cleanup is running): forces `process.exit(1)`.
|
|
21
|
+
* If `timeoutMs` is set: forces `process.exit(1)` after timeout.
|
|
22
|
+
*/
|
|
23
|
+
export function onShutdown(
|
|
24
|
+
cleanup: () => void | Promise<void>,
|
|
25
|
+
opts?: ShutdownOpts,
|
|
26
|
+
): void {
|
|
27
|
+
const signals = opts?.signals ?? ["SIGINT", "SIGTERM"];
|
|
28
|
+
const timeoutMs = opts?.timeoutMs;
|
|
29
|
+
let shutting = false;
|
|
30
|
+
|
|
31
|
+
const handler = (signal: string) => {
|
|
32
|
+
if (shutting) {
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
shutting = true;
|
|
36
|
+
console.log(`\n${signal} received, shutting down...`);
|
|
37
|
+
|
|
38
|
+
let timer: ReturnType<typeof setTimeout> | undefined;
|
|
39
|
+
if (timeoutMs !== undefined) {
|
|
40
|
+
timer = setTimeout(() => {
|
|
41
|
+
console.error(`Shutdown timed out after ${timeoutMs}ms, forcing exit`);
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}, timeoutMs);
|
|
44
|
+
// Don't block the event loop from exiting
|
|
45
|
+
timer.unref();
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const maybePromise = cleanup();
|
|
49
|
+
if (maybePromise && typeof maybePromise.then === "function") {
|
|
50
|
+
maybePromise
|
|
51
|
+
.then(() => {
|
|
52
|
+
if (timer) clearTimeout(timer);
|
|
53
|
+
process.exit(0);
|
|
54
|
+
})
|
|
55
|
+
.catch((err) => {
|
|
56
|
+
console.error("Shutdown cleanup error:", err);
|
|
57
|
+
if (timer) clearTimeout(timer);
|
|
58
|
+
process.exit(1);
|
|
59
|
+
});
|
|
60
|
+
} else {
|
|
61
|
+
if (timer) clearTimeout(timer);
|
|
62
|
+
process.exit(0);
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
for (const signal of signals) {
|
|
67
|
+
process.on(signal, () => handler(signal));
|
|
68
|
+
}
|
|
69
|
+
}
|
package/src/version.ts
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Read the VERSION file from a project root directory.
|
|
3
|
+
* Falls back to a default version string if the file doesn't exist.
|
|
4
|
+
*
|
|
5
|
+
* The VERSION file is written by CI during the release workflow.
|
|
6
|
+
* In local development, the fallback is used.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { existsSync, readFileSync } from "fs";
|
|
10
|
+
import { join } from "path";
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Read a VERSION file from the given directory (project root).
|
|
14
|
+
*
|
|
15
|
+
* @param rootDir - Absolute path to the project root (where VERSION lives).
|
|
16
|
+
* @param fallback - Version string to use when VERSION file doesn't exist. Defaults to "0.0.0-dev".
|
|
17
|
+
*/
|
|
18
|
+
export function readVersion(
|
|
19
|
+
rootDir: string,
|
|
20
|
+
fallback: string = "0.0.0-dev",
|
|
21
|
+
): string {
|
|
22
|
+
const versionFile = join(rootDir, "VERSION");
|
|
23
|
+
if (existsSync(versionFile)) {
|
|
24
|
+
return readFileSync(versionFile, "utf-8").trim();
|
|
25
|
+
}
|
|
26
|
+
return fallback;
|
|
27
|
+
}
|
package/test/cli.test.ts
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { describe, expect, test } from "bun:test";
|
|
2
|
+
import { formatUptime, parseArgs } from "../src/cli";
|
|
3
|
+
|
|
4
|
+
// --- parseArgs ---
|
|
5
|
+
|
|
6
|
+
describe("parseArgs", () => {
|
|
7
|
+
test("extracts command and args", () => {
|
|
8
|
+
const result = parseArgs(["start", "--port", "8080"]);
|
|
9
|
+
expect(result.command).toBe("start");
|
|
10
|
+
expect(result.args).toEqual(["--port", "8080"]);
|
|
11
|
+
expect(result.json).toBe(false);
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
test("strips --json flag", () => {
|
|
15
|
+
const result = parseArgs(["status", "--json"]);
|
|
16
|
+
expect(result.command).toBe("status");
|
|
17
|
+
expect(result.args).toEqual([]);
|
|
18
|
+
expect(result.json).toBe(true);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
test("--json can appear anywhere", () => {
|
|
22
|
+
const result = parseArgs(["--json", "health"]);
|
|
23
|
+
expect(result.command).toBe("health");
|
|
24
|
+
expect(result.json).toBe(true);
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
test("defaults to help when empty", () => {
|
|
28
|
+
const result = parseArgs([]);
|
|
29
|
+
expect(result.command).toBe("help");
|
|
30
|
+
expect(result.args).toEqual([]);
|
|
31
|
+
expect(result.json).toBe(false);
|
|
32
|
+
});
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// --- formatUptime ---
|
|
36
|
+
|
|
37
|
+
describe("formatUptime", () => {
|
|
38
|
+
test("formats seconds", () => {
|
|
39
|
+
expect(formatUptime(45)).toBe("45s");
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
test("formats minutes and seconds", () => {
|
|
43
|
+
expect(formatUptime(192)).toBe("3m 12s");
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
test("formats hours and minutes", () => {
|
|
47
|
+
expect(formatUptime(8100)).toBe("2h 15m");
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
test("edge case: exactly 60 seconds", () => {
|
|
51
|
+
expect(formatUptime(60)).toBe("1m 0s");
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
test("edge case: exactly 1 hour", () => {
|
|
55
|
+
expect(formatUptime(3600)).toBe("1h 0m");
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
test("zero seconds", () => {
|
|
59
|
+
expect(formatUptime(0)).toBe("0s");
|
|
60
|
+
});
|
|
61
|
+
});
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
import { afterEach, beforeEach, describe, expect, test } from "bun:test";
|
|
2
|
+
import { existsSync, mkdirSync, rmSync, writeFileSync } from "fs";
|
|
3
|
+
import { homedir } from "os";
|
|
4
|
+
import { join } from "path";
|
|
5
|
+
import {
|
|
6
|
+
expandPath,
|
|
7
|
+
getConfigDir,
|
|
8
|
+
getDataDir,
|
|
9
|
+
interpolateDeep,
|
|
10
|
+
interpolateEnvVars,
|
|
11
|
+
loadJsonConfig,
|
|
12
|
+
parsePort,
|
|
13
|
+
} from "../src/config";
|
|
14
|
+
|
|
15
|
+
const TMP = join(import.meta.dir, ".tmp-config");
|
|
16
|
+
|
|
17
|
+
beforeEach(() => {
|
|
18
|
+
if (existsSync(TMP)) rmSync(TMP, { recursive: true });
|
|
19
|
+
mkdirSync(TMP, { recursive: true });
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
afterEach(() => {
|
|
23
|
+
if (existsSync(TMP)) rmSync(TMP, { recursive: true });
|
|
24
|
+
// Clean up env vars
|
|
25
|
+
delete process.env.XDG_DATA_HOME;
|
|
26
|
+
delete process.env.XDG_CONFIG_HOME;
|
|
27
|
+
delete process.env.CORE_TEST_VAR;
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// --- getDataDir ---
|
|
31
|
+
|
|
32
|
+
describe("getDataDir", () => {
|
|
33
|
+
test("uses XDG_DATA_HOME when set", () => {
|
|
34
|
+
process.env.XDG_DATA_HOME = "/custom/data";
|
|
35
|
+
expect(getDataDir("engram")).toBe("/custom/data/engram");
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
test("falls back to ~/.local/share/{name}", () => {
|
|
39
|
+
delete process.env.XDG_DATA_HOME;
|
|
40
|
+
expect(getDataDir("engram")).toBe(
|
|
41
|
+
join(homedir(), ".local", "share", "engram"),
|
|
42
|
+
);
|
|
43
|
+
});
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
// --- getConfigDir ---
|
|
47
|
+
|
|
48
|
+
describe("getConfigDir", () => {
|
|
49
|
+
test("uses XDG_CONFIG_HOME when set", () => {
|
|
50
|
+
process.env.XDG_CONFIG_HOME = "/custom/config";
|
|
51
|
+
expect(getConfigDir("synapse")).toBe("/custom/config/synapse");
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
test("falls back to ~/.config/{name}", () => {
|
|
55
|
+
delete process.env.XDG_CONFIG_HOME;
|
|
56
|
+
expect(getConfigDir("synapse")).toBe(join(homedir(), ".config", "synapse"));
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
// --- expandPath ---
|
|
61
|
+
|
|
62
|
+
describe("expandPath", () => {
|
|
63
|
+
test("expands ~ to homedir", () => {
|
|
64
|
+
const result = expandPath("~/foo/bar");
|
|
65
|
+
expect(result).toBe(join(homedir(), "foo/bar"));
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
test("leaves absolute paths unchanged", () => {
|
|
69
|
+
expect(expandPath("/usr/bin")).toBe("/usr/bin");
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
test("leaves relative paths unchanged", () => {
|
|
73
|
+
expect(expandPath("foo/bar")).toBe("foo/bar");
|
|
74
|
+
});
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
// --- interpolateEnvVars ---
|
|
78
|
+
|
|
79
|
+
describe("interpolateEnvVars", () => {
|
|
80
|
+
test("replaces env var references", () => {
|
|
81
|
+
process.env.CORE_TEST_VAR = "hello";
|
|
82
|
+
const result = interpolateEnvVars("prefix-${CORE_TEST_VAR}-suffix");
|
|
83
|
+
expect(result.ok).toBe(true);
|
|
84
|
+
if (result.ok) expect(result.value).toBe("prefix-hello-suffix");
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
test("returns err for missing env var", () => {
|
|
88
|
+
delete process.env.CORE_TEST_VAR;
|
|
89
|
+
const result = interpolateEnvVars("${CORE_TEST_VAR}");
|
|
90
|
+
expect(result.ok).toBe(false);
|
|
91
|
+
if (!result.ok) expect(result.error).toContain("CORE_TEST_VAR");
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
test("returns string unchanged when no vars present", () => {
|
|
95
|
+
const result = interpolateEnvVars("plain string");
|
|
96
|
+
expect(result.ok).toBe(true);
|
|
97
|
+
if (result.ok) expect(result.value).toBe("plain string");
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
test("ignores malformed env var references", () => {
|
|
101
|
+
const result = interpolateEnvVars("${not valid}");
|
|
102
|
+
expect(result.ok).toBe(true);
|
|
103
|
+
if (result.ok) expect(result.value).toBe("${not valid}");
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
test("ignores empty braces", () => {
|
|
107
|
+
const result = interpolateEnvVars("${}");
|
|
108
|
+
expect(result.ok).toBe(true);
|
|
109
|
+
if (result.ok) expect(result.value).toBe("${}");
|
|
110
|
+
});
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
// --- interpolateDeep ---
|
|
114
|
+
|
|
115
|
+
describe("interpolateDeep", () => {
|
|
116
|
+
test("interpolates strings in nested objects", () => {
|
|
117
|
+
process.env.CORE_TEST_VAR = "world";
|
|
118
|
+
const result = interpolateDeep({
|
|
119
|
+
greeting: "hello ${CORE_TEST_VAR}",
|
|
120
|
+
nested: { value: "${CORE_TEST_VAR}" },
|
|
121
|
+
});
|
|
122
|
+
expect(result.ok).toBe(true);
|
|
123
|
+
if (result.ok) {
|
|
124
|
+
expect(result.value).toEqual({
|
|
125
|
+
greeting: "hello world",
|
|
126
|
+
nested: { value: "world" },
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
test("interpolates strings in arrays", () => {
|
|
132
|
+
process.env.CORE_TEST_VAR = "item";
|
|
133
|
+
const result = interpolateDeep(["${CORE_TEST_VAR}", 42, true]);
|
|
134
|
+
expect(result.ok).toBe(true);
|
|
135
|
+
if (result.ok) {
|
|
136
|
+
expect(result.value).toEqual(["item", 42, true]);
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
test("passes through non-string primitives", () => {
|
|
141
|
+
const result = interpolateDeep({ num: 42, bool: true, nil: null });
|
|
142
|
+
expect(result.ok).toBe(true);
|
|
143
|
+
if (result.ok) {
|
|
144
|
+
expect(result.value).toEqual({ num: 42, bool: true, nil: null });
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
test("returns err on missing env var in nested value", () => {
|
|
149
|
+
delete process.env.CORE_TEST_VAR;
|
|
150
|
+
const result = interpolateDeep({ deep: { value: "${CORE_TEST_VAR}" } });
|
|
151
|
+
expect(result.ok).toBe(false);
|
|
152
|
+
});
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
// --- parsePort ---
|
|
156
|
+
|
|
157
|
+
describe("parsePort", () => {
|
|
158
|
+
test("valid port returns Ok with branded Port", () => {
|
|
159
|
+
const result = parsePort("8080", "TEST");
|
|
160
|
+
expect(result.ok).toBe(true);
|
|
161
|
+
if (result.ok) expect(result.value as number).toBe(8080);
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
test("port 1 is valid", () => {
|
|
165
|
+
const result = parsePort("1", "TEST");
|
|
166
|
+
expect(result.ok).toBe(true);
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
test("port 65535 is valid", () => {
|
|
170
|
+
const result = parsePort("65535", "TEST");
|
|
171
|
+
expect(result.ok).toBe(true);
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
test("port 0 is invalid", () => {
|
|
175
|
+
const result = parsePort("0", "TEST");
|
|
176
|
+
expect(result.ok).toBe(false);
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
test("port 65536 is invalid", () => {
|
|
180
|
+
const result = parsePort("65536", "TEST");
|
|
181
|
+
expect(result.ok).toBe(false);
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
test("non-numeric returns error", () => {
|
|
185
|
+
const result = parsePort("abc", "SOURCE");
|
|
186
|
+
expect(result.ok).toBe(false);
|
|
187
|
+
if (!result.ok) {
|
|
188
|
+
expect(result.error).toContain("abc");
|
|
189
|
+
expect(result.error).toContain("SOURCE");
|
|
190
|
+
}
|
|
191
|
+
});
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
// --- loadJsonConfig ---
|
|
195
|
+
|
|
196
|
+
describe("loadJsonConfig", () => {
|
|
197
|
+
const defaults = { host: "localhost", port: 3000 };
|
|
198
|
+
|
|
199
|
+
test("returns defaults when config file missing", () => {
|
|
200
|
+
const result = loadJsonConfig({
|
|
201
|
+
name: "test",
|
|
202
|
+
defaults,
|
|
203
|
+
configPath: join(TMP, "nonexistent.json"),
|
|
204
|
+
});
|
|
205
|
+
expect(result.ok).toBe(true);
|
|
206
|
+
if (result.ok) {
|
|
207
|
+
expect(result.value.config).toEqual(defaults);
|
|
208
|
+
expect(result.value.source).toBe("defaults");
|
|
209
|
+
}
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
test("merges file config with defaults", () => {
|
|
213
|
+
const configPath = join(TMP, "config.json");
|
|
214
|
+
writeFileSync(configPath, JSON.stringify({ port: 9999 }));
|
|
215
|
+
|
|
216
|
+
const result = loadJsonConfig({ name: "test", defaults, configPath });
|
|
217
|
+
expect(result.ok).toBe(true);
|
|
218
|
+
if (result.ok) {
|
|
219
|
+
expect(result.value.config).toEqual({ host: "localhost", port: 9999 });
|
|
220
|
+
expect(result.value.source).toBe("file");
|
|
221
|
+
}
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
test("interpolates env vars in config file", () => {
|
|
225
|
+
process.env.CORE_TEST_VAR = "from-env";
|
|
226
|
+
const configPath = join(TMP, "config.json");
|
|
227
|
+
writeFileSync(configPath, JSON.stringify({ host: "${CORE_TEST_VAR}" }));
|
|
228
|
+
|
|
229
|
+
const result = loadJsonConfig({ name: "test", defaults, configPath });
|
|
230
|
+
expect(result.ok).toBe(true);
|
|
231
|
+
if (result.ok) {
|
|
232
|
+
expect(result.value.config.host).toBe("from-env");
|
|
233
|
+
}
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
test("returns err for invalid JSON", () => {
|
|
237
|
+
const configPath = join(TMP, "config.json");
|
|
238
|
+
writeFileSync(configPath, "not json{{{");
|
|
239
|
+
|
|
240
|
+
const result = loadJsonConfig({ name: "test", defaults, configPath });
|
|
241
|
+
expect(result.ok).toBe(false);
|
|
242
|
+
if (!result.ok) expect(result.error).toContain("invalid JSON");
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
test("returns err for non-object JSON", () => {
|
|
246
|
+
const configPath = join(TMP, "config.json");
|
|
247
|
+
writeFileSync(configPath, "[1, 2, 3]");
|
|
248
|
+
|
|
249
|
+
const result = loadJsonConfig({ name: "test", defaults, configPath });
|
|
250
|
+
expect(result.ok).toBe(false);
|
|
251
|
+
if (!result.ok) expect(result.error).toContain("must be a JSON object");
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
test("returns err for missing env var in config", () => {
|
|
255
|
+
delete process.env.CORE_TEST_VAR;
|
|
256
|
+
const configPath = join(TMP, "config.json");
|
|
257
|
+
writeFileSync(configPath, JSON.stringify({ host: "${CORE_TEST_VAR}" }));
|
|
258
|
+
|
|
259
|
+
const result = loadJsonConfig({ name: "test", defaults, configPath });
|
|
260
|
+
expect(result.ok).toBe(false);
|
|
261
|
+
if (!result.ok) expect(result.error).toContain("CORE_TEST_VAR");
|
|
262
|
+
});
|
|
263
|
+
});
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { describe, expect, test } from "bun:test";
|
|
2
|
+
import { existsSync, mkdirSync, rmSync, writeFileSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
import type { DaemonManager } from "../src/daemon";
|
|
5
|
+
import { createDaemonManager } from "../src/daemon";
|
|
6
|
+
|
|
7
|
+
const TMP = join(import.meta.dir, ".tmp-daemon");
|
|
8
|
+
|
|
9
|
+
function setup(): string {
|
|
10
|
+
if (existsSync(TMP)) rmSync(TMP, { recursive: true });
|
|
11
|
+
mkdirSync(TMP, { recursive: true });
|
|
12
|
+
return TMP;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function teardown() {
|
|
16
|
+
if (existsSync(TMP)) rmSync(TMP, { recursive: true });
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
describe("createDaemonManager", () => {
|
|
20
|
+
test("creates a manager with all methods", () => {
|
|
21
|
+
const configDir = setup();
|
|
22
|
+
try {
|
|
23
|
+
const manager = createDaemonManager({
|
|
24
|
+
name: "test",
|
|
25
|
+
configDir,
|
|
26
|
+
cliPath: "/nonexistent/cli.ts",
|
|
27
|
+
});
|
|
28
|
+
expect(typeof manager.start).toBe("function");
|
|
29
|
+
expect(typeof manager.stop).toBe("function");
|
|
30
|
+
expect(typeof manager.restart).toBe("function");
|
|
31
|
+
expect(typeof manager.status).toBe("function");
|
|
32
|
+
} finally {
|
|
33
|
+
teardown();
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
test("status returns not running when no PID file", async () => {
|
|
38
|
+
const configDir = setup();
|
|
39
|
+
try {
|
|
40
|
+
const manager = createDaemonManager({
|
|
41
|
+
name: "test",
|
|
42
|
+
configDir,
|
|
43
|
+
cliPath: "/nonexistent/cli.ts",
|
|
44
|
+
});
|
|
45
|
+
const status = await manager.status();
|
|
46
|
+
expect(status.running).toBe(false);
|
|
47
|
+
expect(status.pid).toBeUndefined();
|
|
48
|
+
} finally {
|
|
49
|
+
teardown();
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
test("status cleans up stale PID file", async () => {
|
|
54
|
+
const configDir = setup();
|
|
55
|
+
try {
|
|
56
|
+
// Write a PID that doesn't correspond to a running process
|
|
57
|
+
writeFileSync(join(configDir, "test.pid"), "999999");
|
|
58
|
+
|
|
59
|
+
const manager = createDaemonManager({
|
|
60
|
+
name: "test",
|
|
61
|
+
configDir,
|
|
62
|
+
cliPath: "/nonexistent/cli.ts",
|
|
63
|
+
});
|
|
64
|
+
const status = await manager.status();
|
|
65
|
+
expect(status.running).toBe(false);
|
|
66
|
+
|
|
67
|
+
// PID file should be cleaned up
|
|
68
|
+
expect(existsSync(join(configDir, "test.pid"))).toBe(false);
|
|
69
|
+
} finally {
|
|
70
|
+
teardown();
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
test("stop returns err when not running", async () => {
|
|
75
|
+
const configDir = setup();
|
|
76
|
+
try {
|
|
77
|
+
const manager = createDaemonManager({
|
|
78
|
+
name: "test",
|
|
79
|
+
configDir,
|
|
80
|
+
cliPath: "/nonexistent/cli.ts",
|
|
81
|
+
});
|
|
82
|
+
const result = await manager.stop();
|
|
83
|
+
expect(result.ok).toBe(false);
|
|
84
|
+
if (!result.ok) expect(result.error).toContain("not running");
|
|
85
|
+
} finally {
|
|
86
|
+
teardown();
|
|
87
|
+
}
|
|
88
|
+
});
|
|
89
|
+
});
|