git-daemon 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.cjs +18 -0
- package/README.md +143 -0
- package/config.schema.json +180 -0
- package/design.md +481 -0
- package/logo.png +0 -0
- package/openapi.yaml +678 -0
- package/package.json +41 -0
- package/src/app.ts +459 -0
- package/src/approvals.ts +35 -0
- package/src/config.ts +104 -0
- package/src/context.ts +64 -0
- package/src/daemon.ts +22 -0
- package/src/deps.ts +134 -0
- package/src/errors.ts +76 -0
- package/src/git.ts +160 -0
- package/src/jobs.ts +194 -0
- package/src/logger.ts +26 -0
- package/src/os.ts +45 -0
- package/src/pairing.ts +52 -0
- package/src/process.ts +55 -0
- package/src/security.ts +80 -0
- package/src/tokens.ts +95 -0
- package/src/tools.ts +45 -0
- package/src/types.ts +111 -0
- package/src/typings/tree-kill.d.ts +9 -0
- package/src/validation.ts +69 -0
- package/src/workspace.ts +83 -0
- package/tests/app.test.ts +122 -0
- package/tsconfig.json +14 -0
- package/vitest.config.ts +8 -0
package/src/process.ts
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { execa, type Options as ExecaOptions } from "execa";
|
|
2
|
+
import treeKill from "tree-kill";
|
|
3
|
+
import type { JobContext } from "./jobs";
|
|
4
|
+
|
|
5
|
+
const attachLineReader = (
|
|
6
|
+
stream: NodeJS.ReadableStream | null,
|
|
7
|
+
onLine: (line: string) => void,
|
|
8
|
+
) => {
|
|
9
|
+
if (!stream) {
|
|
10
|
+
return;
|
|
11
|
+
}
|
|
12
|
+
let buffer = "";
|
|
13
|
+
stream.on("data", (chunk: Buffer) => {
|
|
14
|
+
buffer += chunk.toString();
|
|
15
|
+
const lines = buffer.split(/\r?\n/);
|
|
16
|
+
buffer = lines.pop() ?? "";
|
|
17
|
+
for (const line of lines) {
|
|
18
|
+
if (line.length > 0) {
|
|
19
|
+
onLine(line);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
});
|
|
23
|
+
stream.on("end", () => {
|
|
24
|
+
if (buffer.length > 0) {
|
|
25
|
+
onLine(buffer);
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export const runCommand = async (
|
|
31
|
+
ctx: JobContext,
|
|
32
|
+
command: string,
|
|
33
|
+
args: string[],
|
|
34
|
+
options?: ExecaOptions,
|
|
35
|
+
) => {
|
|
36
|
+
const subprocess = execa(command, args, {
|
|
37
|
+
...options,
|
|
38
|
+
stdout: "pipe",
|
|
39
|
+
stderr: "pipe",
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
if (subprocess.pid) {
|
|
43
|
+
ctx.setCancel(
|
|
44
|
+
() =>
|
|
45
|
+
new Promise<void>((resolve) => {
|
|
46
|
+
treeKill(subprocess.pid, "SIGTERM", () => resolve());
|
|
47
|
+
}),
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
attachLineReader(subprocess.stdout, ctx.logStdout);
|
|
52
|
+
attachLineReader(subprocess.stderr, ctx.logStderr);
|
|
53
|
+
|
|
54
|
+
await subprocess;
|
|
55
|
+
};
|
package/src/security.ts
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import type { Request, Response, NextFunction } from "express";
|
|
2
|
+
import { authInvalid, authRequired, originNotAllowed } from "./errors";
|
|
3
|
+
import type { TokenStore } from "./tokens";
|
|
4
|
+
|
|
5
|
+
const ALLOWED_HOSTS = new Set(["127.0.0.1", "localhost"]);
|
|
6
|
+
|
|
7
|
+
export const getOrigin = (req: Request) => req.headers.origin || "";
|
|
8
|
+
|
|
9
|
+
const isLoopbackAddress = (address: string | undefined) => {
|
|
10
|
+
if (!address) {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
if (address === "127.0.0.1" || address === "::1") {
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
return address.startsWith("::ffff:127.0.0.1");
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
export const originGuard = (allowlist: string[]) => {
|
|
20
|
+
return (req: Request, res: Response, next: NextFunction) => {
|
|
21
|
+
const origin = getOrigin(req);
|
|
22
|
+
if (!origin || !allowlist.includes(origin)) {
|
|
23
|
+
return next(originNotAllowed());
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
res.setHeader("Access-Control-Allow-Origin", origin);
|
|
27
|
+
res.setHeader("Vary", "Origin");
|
|
28
|
+
res.setHeader(
|
|
29
|
+
"Access-Control-Allow-Headers",
|
|
30
|
+
"Authorization, Content-Type",
|
|
31
|
+
);
|
|
32
|
+
res.setHeader("Access-Control-Allow-Methods", "GET,POST,OPTIONS");
|
|
33
|
+
res.setHeader("Access-Control-Max-Age", "600");
|
|
34
|
+
|
|
35
|
+
if (req.method === "OPTIONS") {
|
|
36
|
+
res.status(204).end();
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
next();
|
|
41
|
+
};
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
export const hostGuard = () => {
|
|
45
|
+
return (req: Request, _res: Response, next: NextFunction) => {
|
|
46
|
+
const host = req.headers.host?.split(":")[0];
|
|
47
|
+
if (!host || !ALLOWED_HOSTS.has(host)) {
|
|
48
|
+
return next(originNotAllowed());
|
|
49
|
+
}
|
|
50
|
+
next();
|
|
51
|
+
};
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
export const loopbackGuard = () => {
|
|
55
|
+
return (req: Request, _res: Response, next: NextFunction) => {
|
|
56
|
+
if (!isLoopbackAddress(req.socket.remoteAddress)) {
|
|
57
|
+
return next(originNotAllowed());
|
|
58
|
+
}
|
|
59
|
+
next();
|
|
60
|
+
};
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
export const authGuard = (tokenStore: TokenStore) => {
|
|
64
|
+
return (req: Request, _res: Response, next: NextFunction) => {
|
|
65
|
+
const origin = getOrigin(req);
|
|
66
|
+
const auth = req.headers.authorization;
|
|
67
|
+
if (!auth) {
|
|
68
|
+
return next(authRequired());
|
|
69
|
+
}
|
|
70
|
+
const match = auth.match(/^Bearer (.+)$/i);
|
|
71
|
+
if (!match) {
|
|
72
|
+
return next(authInvalid());
|
|
73
|
+
}
|
|
74
|
+
const token = match[1];
|
|
75
|
+
if (!tokenStore.verifyToken(origin, token)) {
|
|
76
|
+
return next(authInvalid());
|
|
77
|
+
}
|
|
78
|
+
next();
|
|
79
|
+
};
|
|
80
|
+
};
|
package/src/tokens.ts
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { promises as fs } from "fs";
|
|
2
|
+
import crypto from "crypto";
|
|
3
|
+
import { getTokensPath } from "./config";
|
|
4
|
+
import type { TokenEntry, TokenStoreData } from "./types";
|
|
5
|
+
|
|
6
|
+
const TOKEN_BYTES = 32;
|
|
7
|
+
const HASH_BYTES = 32;
|
|
8
|
+
|
|
9
|
+
export class TokenStore {
|
|
10
|
+
private entries: TokenEntry[] = [];
|
|
11
|
+
private readonly tokensPath: string;
|
|
12
|
+
|
|
13
|
+
constructor(configDir: string) {
|
|
14
|
+
this.tokensPath = getTokensPath(configDir);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
async load() {
|
|
18
|
+
try {
|
|
19
|
+
const raw = await fs.readFile(this.tokensPath, "utf8");
|
|
20
|
+
const data = JSON.parse(raw) as TokenStoreData;
|
|
21
|
+
this.entries = Array.isArray(data.entries) ? data.entries : [];
|
|
22
|
+
} catch (err) {
|
|
23
|
+
if ((err as NodeJS.ErrnoException).code !== "ENOENT") {
|
|
24
|
+
throw err;
|
|
25
|
+
}
|
|
26
|
+
this.entries = [];
|
|
27
|
+
await this.save();
|
|
28
|
+
}
|
|
29
|
+
this.pruneExpired();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
private async save() {
|
|
33
|
+
const payload: TokenStoreData = { entries: this.entries };
|
|
34
|
+
await fs.writeFile(this.tokensPath, JSON.stringify(payload, null, 2));
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
private pruneExpired() {
|
|
38
|
+
const now = Date.now();
|
|
39
|
+
this.entries = this.entries.filter((entry) => {
|
|
40
|
+
const expiresAt = Date.parse(entry.expiresAt);
|
|
41
|
+
return Number.isNaN(expiresAt) || expiresAt > now;
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
getActiveToken(origin: string): TokenEntry | undefined {
|
|
46
|
+
this.pruneExpired();
|
|
47
|
+
return this.entries.find((entry) => entry.origin === origin);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
async issueToken(origin: string, ttlDays: number) {
|
|
51
|
+
const token = crypto.randomBytes(TOKEN_BYTES).toString("base64url");
|
|
52
|
+
const salt = crypto.randomBytes(16).toString("base64url");
|
|
53
|
+
const tokenHash = crypto
|
|
54
|
+
.scryptSync(token, salt, HASH_BYTES)
|
|
55
|
+
.toString("base64url");
|
|
56
|
+
const now = new Date();
|
|
57
|
+
const expiresAt = new Date(now.getTime() + ttlDays * 24 * 60 * 60 * 1000);
|
|
58
|
+
|
|
59
|
+
const entry: TokenEntry = {
|
|
60
|
+
origin,
|
|
61
|
+
tokenHash,
|
|
62
|
+
salt,
|
|
63
|
+
createdAt: now.toISOString(),
|
|
64
|
+
expiresAt: expiresAt.toISOString(),
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
this.entries = this.entries.filter((item) => item.origin !== origin);
|
|
68
|
+
this.entries.push(entry);
|
|
69
|
+
await this.save();
|
|
70
|
+
|
|
71
|
+
return {
|
|
72
|
+
token,
|
|
73
|
+
expiresAt: entry.expiresAt,
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async revokeToken(origin: string) {
|
|
78
|
+
this.entries = this.entries.filter((item) => item.origin !== origin);
|
|
79
|
+
await this.save();
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
verifyToken(origin: string, token: string): boolean {
|
|
83
|
+
this.pruneExpired();
|
|
84
|
+
const entry = this.entries.find((item) => item.origin === origin);
|
|
85
|
+
if (!entry) {
|
|
86
|
+
return false;
|
|
87
|
+
}
|
|
88
|
+
const derived = crypto.scryptSync(token, entry.salt, HASH_BYTES);
|
|
89
|
+
const stored = Buffer.from(entry.tokenHash, "base64url");
|
|
90
|
+
if (stored.length !== derived.length) {
|
|
91
|
+
return false;
|
|
92
|
+
}
|
|
93
|
+
return crypto.timingSafeEqual(stored, derived);
|
|
94
|
+
}
|
|
95
|
+
}
|
package/src/tools.ts
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { execa } from "execa";
|
|
2
|
+
import type { ToolInfo, Capabilities } from "./types";
|
|
3
|
+
|
|
4
|
+
const detect = async (
|
|
5
|
+
command: string,
|
|
6
|
+
args: string[] = ["--version"],
|
|
7
|
+
): Promise<ToolInfo> => {
|
|
8
|
+
try {
|
|
9
|
+
const result = await execa(command, args);
|
|
10
|
+
const version = result.stdout.trim();
|
|
11
|
+
return { installed: true, version };
|
|
12
|
+
} catch (err) {
|
|
13
|
+
if ((err as NodeJS.ErrnoException).code === "ENOENT") {
|
|
14
|
+
return { installed: false };
|
|
15
|
+
}
|
|
16
|
+
return { installed: false };
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
export const detectCapabilities = async (): Promise<Capabilities> => {
|
|
21
|
+
const [git, node, npm, pnpm, yarn, code] = await Promise.all([
|
|
22
|
+
detect("git", ["--version"]),
|
|
23
|
+
detect("node", ["--version"]),
|
|
24
|
+
detect("npm", ["--version"]),
|
|
25
|
+
detect("pnpm", ["--version"]),
|
|
26
|
+
detect("yarn", ["--version"]),
|
|
27
|
+
detect("code", ["--version"]),
|
|
28
|
+
]);
|
|
29
|
+
|
|
30
|
+
return {
|
|
31
|
+
tools: {
|
|
32
|
+
git,
|
|
33
|
+
node,
|
|
34
|
+
npm,
|
|
35
|
+
pnpm,
|
|
36
|
+
yarn,
|
|
37
|
+
code,
|
|
38
|
+
},
|
|
39
|
+
};
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export const isToolInstalled = async (command: string) => {
|
|
43
|
+
const info = await detect(command, ["--version"]);
|
|
44
|
+
return info.installed;
|
|
45
|
+
};
|
package/src/types.ts
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
export type Capability = "open-terminal" | "open-vscode" | "deps/install";
|
|
2
|
+
|
|
3
|
+
export type ApprovalEntry = {
|
|
4
|
+
origin: string;
|
|
5
|
+
repoPath: string;
|
|
6
|
+
capabilities: Capability[];
|
|
7
|
+
approvedAt: string;
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
export type AppConfig = {
|
|
11
|
+
configVersion: number;
|
|
12
|
+
server: {
|
|
13
|
+
host: string;
|
|
14
|
+
port: number;
|
|
15
|
+
};
|
|
16
|
+
originAllowlist: string[];
|
|
17
|
+
workspaceRoot: string | null;
|
|
18
|
+
pairing: {
|
|
19
|
+
tokenTtlDays: number;
|
|
20
|
+
};
|
|
21
|
+
jobs: {
|
|
22
|
+
maxConcurrent: number;
|
|
23
|
+
timeoutSeconds: number;
|
|
24
|
+
};
|
|
25
|
+
deps: {
|
|
26
|
+
defaultSafer: boolean;
|
|
27
|
+
};
|
|
28
|
+
logging: {
|
|
29
|
+
directory: string;
|
|
30
|
+
maxFiles: number;
|
|
31
|
+
maxBytes: number;
|
|
32
|
+
};
|
|
33
|
+
approvals: {
|
|
34
|
+
entries: ApprovalEntry[];
|
|
35
|
+
};
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
export type ToolInfo = {
|
|
39
|
+
installed: boolean;
|
|
40
|
+
version?: string;
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
export type Capabilities = {
|
|
44
|
+
tools: {
|
|
45
|
+
git?: ToolInfo;
|
|
46
|
+
node?: ToolInfo;
|
|
47
|
+
npm?: ToolInfo;
|
|
48
|
+
pnpm?: ToolInfo;
|
|
49
|
+
yarn?: ToolInfo;
|
|
50
|
+
code?: ToolInfo;
|
|
51
|
+
};
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
export type TokenEntry = {
|
|
55
|
+
origin: string;
|
|
56
|
+
tokenHash: string;
|
|
57
|
+
salt: string;
|
|
58
|
+
createdAt: string;
|
|
59
|
+
expiresAt: string;
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
export type TokenStoreData = {
|
|
63
|
+
entries: TokenEntry[];
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
export type JobState = "queued" | "running" | "done" | "error" | "cancelled";
|
|
67
|
+
|
|
68
|
+
export type JobEvent =
|
|
69
|
+
| {
|
|
70
|
+
type: "log";
|
|
71
|
+
stream: "stdout" | "stderr";
|
|
72
|
+
line: string;
|
|
73
|
+
}
|
|
74
|
+
| {
|
|
75
|
+
type: "progress";
|
|
76
|
+
kind: "git" | "deps";
|
|
77
|
+
percent?: number;
|
|
78
|
+
detail?: string;
|
|
79
|
+
}
|
|
80
|
+
| {
|
|
81
|
+
type: "state";
|
|
82
|
+
state: JobState;
|
|
83
|
+
message?: string;
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
export type JobStatus = {
|
|
87
|
+
id: string;
|
|
88
|
+
state: JobState;
|
|
89
|
+
createdAt: string;
|
|
90
|
+
startedAt?: string;
|
|
91
|
+
finishedAt?: string;
|
|
92
|
+
error?: ApiErrorBody;
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
export type ApiErrorBody = {
|
|
96
|
+
errorCode:
|
|
97
|
+
| "auth_required"
|
|
98
|
+
| "auth_invalid"
|
|
99
|
+
| "origin_not_allowed"
|
|
100
|
+
| "rate_limited"
|
|
101
|
+
| "request_too_large"
|
|
102
|
+
| "workspace_required"
|
|
103
|
+
| "path_outside_workspace"
|
|
104
|
+
| "invalid_repo_url"
|
|
105
|
+
| "capability_not_granted"
|
|
106
|
+
| "job_not_found"
|
|
107
|
+
| "timeout"
|
|
108
|
+
| "internal_error";
|
|
109
|
+
message: string;
|
|
110
|
+
details?: Record<string, unknown>;
|
|
111
|
+
};
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
|
|
3
|
+
const MAX_PATH_LENGTH = 4096;
|
|
4
|
+
|
|
5
|
+
const isValidRepoUrl = (value: string) => {
|
|
6
|
+
if (value.startsWith("file://")) {
|
|
7
|
+
return false;
|
|
8
|
+
}
|
|
9
|
+
if (
|
|
10
|
+
value.startsWith("/") ||
|
|
11
|
+
value.startsWith("./") ||
|
|
12
|
+
value.startsWith("../")
|
|
13
|
+
) {
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
if (/^git@[^:]+:.+/.test(value)) {
|
|
17
|
+
return true;
|
|
18
|
+
}
|
|
19
|
+
if (/^https:\/\/[^/]+\/.+/.test(value)) {
|
|
20
|
+
return true;
|
|
21
|
+
}
|
|
22
|
+
if (/^ssh:\/\/[^/]+\/.+/.test(value)) {
|
|
23
|
+
return true;
|
|
24
|
+
}
|
|
25
|
+
return false;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
export const pairRequestSchema = z.discriminatedUnion("step", [
|
|
29
|
+
z.object({
|
|
30
|
+
step: z.literal("start"),
|
|
31
|
+
}),
|
|
32
|
+
z.object({
|
|
33
|
+
step: z.literal("confirm"),
|
|
34
|
+
code: z.string().min(1),
|
|
35
|
+
}),
|
|
36
|
+
]);
|
|
37
|
+
|
|
38
|
+
export const gitCloneRequestSchema = z.object({
|
|
39
|
+
repoUrl: z.string().min(1).refine(isValidRepoUrl),
|
|
40
|
+
destRelative: z.string().min(1).max(MAX_PATH_LENGTH),
|
|
41
|
+
options: z
|
|
42
|
+
.object({
|
|
43
|
+
branch: z.string().min(1).optional(),
|
|
44
|
+
depth: z.number().int().min(1).optional(),
|
|
45
|
+
})
|
|
46
|
+
.optional(),
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
export const gitFetchRequestSchema = z.object({
|
|
50
|
+
repoPath: z.string().min(1).max(MAX_PATH_LENGTH),
|
|
51
|
+
remote: z.string().min(1).optional(),
|
|
52
|
+
prune: z.boolean().optional(),
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
export const gitStatusQuerySchema = z.object({
|
|
56
|
+
repoPath: z.string().min(1).max(MAX_PATH_LENGTH),
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
export const osOpenRequestSchema = z.object({
|
|
60
|
+
target: z.enum(["folder", "terminal", "vscode"]),
|
|
61
|
+
path: z.string().min(1).max(MAX_PATH_LENGTH),
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
export const depsInstallRequestSchema = z.object({
|
|
65
|
+
repoPath: z.string().min(1).max(MAX_PATH_LENGTH),
|
|
66
|
+
manager: z.enum(["auto", "npm", "pnpm", "yarn"]).optional(),
|
|
67
|
+
mode: z.enum(["auto", "ci", "install"]).optional(),
|
|
68
|
+
safer: z.boolean().optional(),
|
|
69
|
+
});
|
package/src/workspace.ts
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import path from "path";
|
|
2
|
+
import { promises as fs } from "fs";
|
|
3
|
+
import { pathOutsideWorkspace, workspaceRequired } from "./errors";
|
|
4
|
+
|
|
5
|
+
export class MissingPathError extends Error {}
|
|
6
|
+
|
|
7
|
+
const MAX_PATH_LENGTH = 4096;
|
|
8
|
+
|
|
9
|
+
export const ensureWorkspaceRoot = (root: string | null) => {
|
|
10
|
+
if (!root) {
|
|
11
|
+
throw workspaceRequired();
|
|
12
|
+
}
|
|
13
|
+
if (root.length > MAX_PATH_LENGTH) {
|
|
14
|
+
throw pathOutsideWorkspace();
|
|
15
|
+
}
|
|
16
|
+
return root;
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
const realpathSafe = async (target: string) => {
|
|
20
|
+
try {
|
|
21
|
+
return await fs.realpath(target);
|
|
22
|
+
} catch (err) {
|
|
23
|
+
if ((err as NodeJS.ErrnoException).code === "ENOENT") {
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
26
|
+
throw err;
|
|
27
|
+
}
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export const resolveInsideWorkspace = async (
|
|
31
|
+
workspaceRoot: string,
|
|
32
|
+
candidate: string,
|
|
33
|
+
allowMissing = false,
|
|
34
|
+
) => {
|
|
35
|
+
if (candidate.length > MAX_PATH_LENGTH) {
|
|
36
|
+
throw pathOutsideWorkspace();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const rootReal = await fs.realpath(workspaceRoot);
|
|
40
|
+
const resolved = path.resolve(rootReal, candidate);
|
|
41
|
+
|
|
42
|
+
if (!isInside(rootReal, resolved)) {
|
|
43
|
+
throw pathOutsideWorkspace();
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const realResolved = await realpathSafe(resolved);
|
|
47
|
+
if (realResolved) {
|
|
48
|
+
if (!isInside(rootReal, realResolved)) {
|
|
49
|
+
throw pathOutsideWorkspace();
|
|
50
|
+
}
|
|
51
|
+
return realResolved;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (!allowMissing) {
|
|
55
|
+
throw new MissingPathError("Path does not exist.");
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const parent = path.dirname(resolved);
|
|
59
|
+
const parentReal = await realpathSafe(parent);
|
|
60
|
+
if (parentReal && !isInside(rootReal, parentReal)) {
|
|
61
|
+
throw pathOutsideWorkspace();
|
|
62
|
+
}
|
|
63
|
+
return resolved;
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
const isInside = (root: string, candidate: string) => {
|
|
67
|
+
const relative = path.relative(root, candidate);
|
|
68
|
+
if (relative === "") {
|
|
69
|
+
return true;
|
|
70
|
+
}
|
|
71
|
+
return !relative.startsWith("..") && !path.isAbsolute(relative);
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
export const ensureRelative = (target: string) => {
|
|
75
|
+
if (path.isAbsolute(target)) {
|
|
76
|
+
throw pathOutsideWorkspace();
|
|
77
|
+
}
|
|
78
|
+
const normalized = path.normalize(target);
|
|
79
|
+
if (normalized === "." || normalized.startsWith("..")) {
|
|
80
|
+
throw pathOutsideWorkspace();
|
|
81
|
+
}
|
|
82
|
+
return target;
|
|
83
|
+
};
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import { describe, expect, it } from "vitest";
|
|
2
|
+
import request from "supertest";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import os from "os";
|
|
5
|
+
import { promises as fs } from "fs";
|
|
6
|
+
import pino from "pino";
|
|
7
|
+
import { createApp, type DaemonContext } from "../src/app";
|
|
8
|
+
import { TokenStore } from "../src/tokens";
|
|
9
|
+
import { PairingManager } from "../src/pairing";
|
|
10
|
+
import { JobManager } from "../src/jobs";
|
|
11
|
+
import type { AppConfig } from "../src/types";
|
|
12
|
+
|
|
13
|
+
const createTempDir = async () =>
|
|
14
|
+
fs.mkdtemp(path.join(os.tmpdir(), "git-daemon-test-"));
|
|
15
|
+
|
|
16
|
+
const createConfig = (
|
|
17
|
+
workspaceRoot: string | null,
|
|
18
|
+
origin: string,
|
|
19
|
+
): AppConfig => ({
|
|
20
|
+
configVersion: 1,
|
|
21
|
+
server: { host: "127.0.0.1", port: 0 },
|
|
22
|
+
originAllowlist: [origin],
|
|
23
|
+
workspaceRoot,
|
|
24
|
+
pairing: { tokenTtlDays: 30 },
|
|
25
|
+
jobs: { maxConcurrent: 1, timeoutSeconds: 60 },
|
|
26
|
+
deps: { defaultSafer: true },
|
|
27
|
+
logging: { directory: "logs", maxFiles: 1, maxBytes: 1024 },
|
|
28
|
+
approvals: { entries: [] },
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
const createContext = async (workspaceRoot: string | null, origin: string) => {
|
|
32
|
+
const configDir = await createTempDir();
|
|
33
|
+
const config = createConfig(workspaceRoot, origin);
|
|
34
|
+
const tokenStore = new TokenStore(configDir);
|
|
35
|
+
await tokenStore.load();
|
|
36
|
+
const pairingManager = new PairingManager(
|
|
37
|
+
tokenStore,
|
|
38
|
+
config.pairing.tokenTtlDays,
|
|
39
|
+
);
|
|
40
|
+
const jobManager = new JobManager(
|
|
41
|
+
config.jobs.maxConcurrent,
|
|
42
|
+
config.jobs.timeoutSeconds,
|
|
43
|
+
);
|
|
44
|
+
const logger = pino({ enabled: false });
|
|
45
|
+
const capabilities = { tools: {} };
|
|
46
|
+
const ctx: DaemonContext = {
|
|
47
|
+
config,
|
|
48
|
+
configDir,
|
|
49
|
+
tokenStore,
|
|
50
|
+
pairingManager,
|
|
51
|
+
jobManager,
|
|
52
|
+
capabilities,
|
|
53
|
+
logger,
|
|
54
|
+
version: "0.1.0",
|
|
55
|
+
build: undefined,
|
|
56
|
+
};
|
|
57
|
+
return { ctx, app: createApp(ctx) };
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
describe("Git Daemon API", () => {
|
|
61
|
+
const origin = "http://localhost:5173";
|
|
62
|
+
|
|
63
|
+
it("rejects missing Origin header", async () => {
|
|
64
|
+
const { app } = await createContext(null, origin);
|
|
65
|
+
const res = await request(app).get("/v1/meta").set("Host", "127.0.0.1");
|
|
66
|
+
expect(res.status).toBe(403);
|
|
67
|
+
expect(res.body.errorCode).toBe("origin_not_allowed");
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
it("returns meta for allowed origin", async () => {
|
|
71
|
+
const { app } = await createContext(null, origin);
|
|
72
|
+
const res = await request(app)
|
|
73
|
+
.get("/v1/meta")
|
|
74
|
+
.set("Origin", origin)
|
|
75
|
+
.set("Host", "127.0.0.1");
|
|
76
|
+
expect(res.status).toBe(200);
|
|
77
|
+
expect(res.body.version).toBeTypeOf("string");
|
|
78
|
+
expect(res.body.pairing).toBeTruthy();
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it("requires auth for protected routes", async () => {
|
|
82
|
+
const { app } = await createContext(null, origin);
|
|
83
|
+
const res = await request(app)
|
|
84
|
+
.get("/v1/git/status")
|
|
85
|
+
.query({ repoPath: "repo" })
|
|
86
|
+
.set("Origin", origin)
|
|
87
|
+
.set("Host", "127.0.0.1");
|
|
88
|
+
expect(res.status).toBe(401);
|
|
89
|
+
expect(res.body.errorCode).toBe("auth_required");
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
it("returns workspace_required when not configured", async () => {
|
|
93
|
+
const { app, ctx } = await createContext(null, origin);
|
|
94
|
+
const { token } = await ctx.tokenStore.issueToken(origin, 30);
|
|
95
|
+
|
|
96
|
+
const res = await request(app)
|
|
97
|
+
.get("/v1/git/status")
|
|
98
|
+
.query({ repoPath: "repo" })
|
|
99
|
+
.set("Origin", origin)
|
|
100
|
+
.set("Host", "127.0.0.1")
|
|
101
|
+
.set("Authorization", `Bearer ${token}`);
|
|
102
|
+
|
|
103
|
+
expect(res.status).toBe(409);
|
|
104
|
+
expect(res.body.errorCode).toBe("workspace_required");
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
it("validates repoUrl on clone", async () => {
|
|
108
|
+
const workspaceRoot = await createTempDir();
|
|
109
|
+
const { app, ctx } = await createContext(workspaceRoot, origin);
|
|
110
|
+
const { token } = await ctx.tokenStore.issueToken(origin, 30);
|
|
111
|
+
|
|
112
|
+
const res = await request(app)
|
|
113
|
+
.post("/v1/git/clone")
|
|
114
|
+
.set("Origin", origin)
|
|
115
|
+
.set("Host", "127.0.0.1")
|
|
116
|
+
.set("Authorization", `Bearer ${token}`)
|
|
117
|
+
.send({ repoUrl: "file:///tmp/repo", destRelative: "repo" });
|
|
118
|
+
|
|
119
|
+
expect(res.status).toBe(422);
|
|
120
|
+
expect(res.body.errorCode).toBe("invalid_repo_url");
|
|
121
|
+
});
|
|
122
|
+
});
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2020",
|
|
4
|
+
"module": "commonjs",
|
|
5
|
+
"rootDir": "src",
|
|
6
|
+
"outDir": "dist",
|
|
7
|
+
"strict": true,
|
|
8
|
+
"esModuleInterop": true,
|
|
9
|
+
"forceConsistentCasingInFileNames": true,
|
|
10
|
+
"skipLibCheck": true,
|
|
11
|
+
"resolveJsonModule": true
|
|
12
|
+
},
|
|
13
|
+
"include": ["src", "tests", "vitest.config.ts"]
|
|
14
|
+
}
|