@wastedcode/memex 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +291 -0
- package/dist/cli/client.d.ts +35 -0
- package/dist/cli/client.js +183 -0
- package/dist/cli/client.js.map +1 -0
- package/dist/cli/commands/chown.d.ts +2 -0
- package/dist/cli/commands/chown.js +22 -0
- package/dist/cli/commands/chown.js.map +1 -0
- package/dist/cli/commands/config.d.ts +2 -0
- package/dist/cli/commands/config.js +132 -0
- package/dist/cli/commands/config.js.map +1 -0
- package/dist/cli/commands/create.d.ts +2 -0
- package/dist/cli/commands/create.js +21 -0
- package/dist/cli/commands/create.js.map +1 -0
- package/dist/cli/commands/destroy.d.ts +2 -0
- package/dist/cli/commands/destroy.js +34 -0
- package/dist/cli/commands/destroy.js.map +1 -0
- package/dist/cli/commands/ingest.d.ts +2 -0
- package/dist/cli/commands/ingest.js +74 -0
- package/dist/cli/commands/ingest.js.map +1 -0
- package/dist/cli/commands/lint.d.ts +2 -0
- package/dist/cli/commands/lint.js +46 -0
- package/dist/cli/commands/lint.js.map +1 -0
- package/dist/cli/commands/list.d.ts +2 -0
- package/dist/cli/commands/list.js +28 -0
- package/dist/cli/commands/list.js.map +1 -0
- package/dist/cli/commands/login.d.ts +2 -0
- package/dist/cli/commands/login.js +51 -0
- package/dist/cli/commands/login.js.map +1 -0
- package/dist/cli/commands/logs.d.ts +2 -0
- package/dist/cli/commands/logs.js +26 -0
- package/dist/cli/commands/logs.js.map +1 -0
- package/dist/cli/commands/query.d.ts +2 -0
- package/dist/cli/commands/query.js +48 -0
- package/dist/cli/commands/query.js.map +1 -0
- package/dist/cli/commands/serve.d.ts +2 -0
- package/dist/cli/commands/serve.js +14 -0
- package/dist/cli/commands/serve.js.map +1 -0
- package/dist/cli/commands/status.d.ts +2 -0
- package/dist/cli/commands/status.js +66 -0
- package/dist/cli/commands/status.js.map +1 -0
- package/dist/daemon/auth.d.ts +31 -0
- package/dist/daemon/auth.js +84 -0
- package/dist/daemon/auth.js.map +1 -0
- package/dist/daemon/db.d.ts +36 -0
- package/dist/daemon/db.js +181 -0
- package/dist/daemon/db.js.map +1 -0
- package/dist/daemon/namespace.d.ts +34 -0
- package/dist/daemon/namespace.js +74 -0
- package/dist/daemon/namespace.js.map +1 -0
- package/dist/daemon/peercred.d.ts +15 -0
- package/dist/daemon/peercred.js +19 -0
- package/dist/daemon/peercred.js.map +1 -0
- package/dist/daemon/queue.d.ts +26 -0
- package/dist/daemon/queue.js +126 -0
- package/dist/daemon/queue.js.map +1 -0
- package/dist/daemon/routes.d.ts +38 -0
- package/dist/daemon/routes.js +258 -0
- package/dist/daemon/routes.js.map +1 -0
- package/dist/daemon/runner.d.ts +25 -0
- package/dist/daemon/runner.js +195 -0
- package/dist/daemon/runner.js.map +1 -0
- package/dist/daemon/scaffold.d.ts +38 -0
- package/dist/daemon/scaffold.js +141 -0
- package/dist/daemon/scaffold.js.map +1 -0
- package/dist/daemon/server.d.ts +11 -0
- package/dist/daemon/server.js +145 -0
- package/dist/daemon/server.js.map +1 -0
- package/dist/daemon.d.ts +1 -0
- package/dist/daemon.js +55 -0
- package/dist/daemon.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +36 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/constants.d.ts +17 -0
- package/dist/lib/constants.js +30 -0
- package/dist/lib/constants.js.map +1 -0
- package/dist/lib/errors.d.ts +32 -0
- package/dist/lib/errors.js +64 -0
- package/dist/lib/errors.js.map +1 -0
- package/dist/lib/prompts/ingest.d.ts +9 -0
- package/dist/lib/prompts/ingest.js +48 -0
- package/dist/lib/prompts/ingest.js.map +1 -0
- package/dist/lib/prompts/lint.d.ts +8 -0
- package/dist/lib/prompts/lint.js +62 -0
- package/dist/lib/prompts/lint.js.map +1 -0
- package/dist/lib/prompts/query.d.ts +8 -0
- package/dist/lib/prompts/query.js +37 -0
- package/dist/lib/prompts/query.js.map +1 -0
- package/dist/lib/prompts/wiki.d.ts +11 -0
- package/dist/lib/prompts/wiki.js +112 -0
- package/dist/lib/prompts/wiki.js.map +1 -0
- package/dist/lib/types.d.ts +76 -0
- package/dist/lib/types.js +3 -0
- package/dist/lib/types.js.map +1 -0
- package/dist/standalone/memex.mjs +2313 -0
- package/dist/standalone/memex.mjs.map +7 -0
- package/package.json +54 -0
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { MemexClient } from '../client.js';
|
|
3
|
+
export const statusCommand = new Command('status')
|
|
4
|
+
.description('Check job status for a wiki')
|
|
5
|
+
.argument('<wikiId>', 'Target wiki')
|
|
6
|
+
.argument('[jobId]', 'Specific job ID (omit to list recent jobs)')
|
|
7
|
+
.action(async (wikiId, jobId) => {
|
|
8
|
+
const client = new MemexClient();
|
|
9
|
+
if (jobId) {
|
|
10
|
+
// Show single job
|
|
11
|
+
const resp = await client.getJob(wikiId, parseInt(jobId, 10));
|
|
12
|
+
if (!resp.ok) {
|
|
13
|
+
console.error(`Error: ${resp.error}`);
|
|
14
|
+
process.exit(1);
|
|
15
|
+
}
|
|
16
|
+
const job = resp.data;
|
|
17
|
+
console.log(`Job #${job.id}`);
|
|
18
|
+
console.log(` Type: ${job.type}`);
|
|
19
|
+
console.log(` Status: ${job.status}`);
|
|
20
|
+
console.log(` Created: ${job.created_at}`);
|
|
21
|
+
if (job.started_at)
|
|
22
|
+
console.log(` Started: ${job.started_at}`);
|
|
23
|
+
if (job.completed_at)
|
|
24
|
+
console.log(` Completed: ${job.completed_at}`);
|
|
25
|
+
if (job.result) {
|
|
26
|
+
try {
|
|
27
|
+
const parsed = JSON.parse(job.result);
|
|
28
|
+
if (parsed.output) {
|
|
29
|
+
console.log(`\nOutput:\n${parsed.output}`);
|
|
30
|
+
}
|
|
31
|
+
if (parsed.error) {
|
|
32
|
+
console.log(`\nError:\n${parsed.error}`);
|
|
33
|
+
}
|
|
34
|
+
if (parsed.duration_ms) {
|
|
35
|
+
console.log(`\nDuration: ${parsed.duration_ms}ms`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
catch {
|
|
39
|
+
console.log(`\nResult: ${job.result}`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
// List recent jobs
|
|
45
|
+
const resp = await client.listJobs(wikiId);
|
|
46
|
+
if (!resp.ok) {
|
|
47
|
+
console.error(`Error: ${resp.error}`);
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
const jobs = (resp.data ?? []);
|
|
51
|
+
if (jobs.length === 0) {
|
|
52
|
+
console.log(`No jobs for wiki '${wikiId}'.`);
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
const header = padRow('ID', 'TYPE', 'STATUS', 'CREATED');
|
|
56
|
+
console.log(header);
|
|
57
|
+
console.log('-'.repeat(header.length));
|
|
58
|
+
for (const job of jobs) {
|
|
59
|
+
console.log(padRow(String(job.id), job.type, job.status, job.created_at));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
function padRow(id, type, status, created) {
|
|
64
|
+
return `${id.padEnd(8)} ${type.padEnd(10)} ${status.padEnd(12)} ${created}`;
|
|
65
|
+
}
|
|
66
|
+
//# sourceMappingURL=status.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/cli/commands/status.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,OAAO,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAG3C,MAAM,CAAC,MAAM,aAAa,GAAG,IAAI,OAAO,CAAC,QAAQ,CAAC;KAC/C,WAAW,CAAC,6BAA6B,CAAC;KAC1C,QAAQ,CAAC,UAAU,EAAE,aAAa,CAAC;KACnC,QAAQ,CAAC,SAAS,EAAE,4CAA4C,CAAC;KACjE,MAAM,CAAC,KAAK,EAAE,MAAc,EAAE,KAAc,EAAE,EAAE;IAC/C,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;IAEjC,IAAI,KAAK,EAAE,CAAC;QACV,kBAAkB;QAClB,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC;QAC9D,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC;YACb,OAAO,CAAC,KAAK,CAAC,UAAU,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;YACtC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,IAAgB,CAAC;QAClC,OAAO,CAAC,GAAG,CAAC,QAAQ,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;QACxC,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;QAC1C,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,CAAC,UAAU,EAAE,CAAC,CAAC;QAC9C,IAAI,GAAG,CAAC,UAAU;YAAE,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,CAAC,UAAU,EAAE,CAAC,CAAC;QAClE,IAAI,GAAG,CAAC,YAAY;YAAE,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,CAAC,YAAY,EAAE,CAAC,CAAC;QAEtE,IAAI,GAAG,CAAC,MAAM,EAAE,CAAC;YACf,IAAI,CAAC;gBACH,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBACtC,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;oBAClB,OAAO,CAAC,GAAG,CAAC,cAAc,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC;gBAC7C,CAAC;gBACD,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;oBACjB,OAAO,CAAC,GAAG,CAAC,aAAa,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC;gBAC3C,CAAC;gBACD,IAAI,MAAM,CAAC,WAAW,EAAE,CAAC;oBACvB,OAAO,CAAC,GAAG,CAAC,eAAe,MAAM,CAAC,WAAW,IAAI,CAAC,CAAC;gBACrD,CAAC;YACH,CAAC;YAAC,MAAM,CAAC;gBACP,OAAO,CAAC,GAAG,CAAC,aAAa,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;YACzC,CAAC;QACH,CAAC;IACH,CAAC;SAAM,CAAC;QACN,mBAAmB;QACnB,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;QAC3C,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC;YACb,OAAO,CAAC,KAAK,CAAC,UAAU,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;YACtC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QAED,MAAM,IAAI,GAAG,CAAC,IAAI,CAAC,IAAI,IAAI,EAAE,CAAe,CAAC;QAC7C,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtB,OAAO,CAAC,GAAG,CAAC,qBAAqB,MAAM,IAAI,CAAC,CAAC;YAC7C,OAAO;QACT,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,CAAC;QACzD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QACpB,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;QAEvC,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,OAAO,CAAC,GAAG,CAAC,MAAM,CAChB,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,EACd,GAAG,CAAC,IAAI,EACR,GAAG,CAAC,MAAM,EACV,GAAG,CAAC,UAAU,CACf,CAAC,CAAC;QACL,CAAC;IACH,CAAC;AACH,CAAC,CAAC,CAAC;AAEL,SAAS,MAAM,CAAC,EAAU,EAAE,IAAY,EAAE,MAAc,EAAE,OAAe;IACvE,OAAO,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,OAAO,EAAE,CAAC;AAC9E,CAAC"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
export declare class AuthManager {
|
|
2
|
+
private wikisDir;
|
|
3
|
+
private globalApiKey?;
|
|
4
|
+
constructor(wikisDir?: string, globalApiKey?: string | undefined);
|
|
5
|
+
/**
|
|
6
|
+
* Resolve credentials for a wiki, returning environment variables
|
|
7
|
+
* to set on the claude child process.
|
|
8
|
+
*
|
|
9
|
+
* Priority:
|
|
10
|
+
* 1. Per-wiki API key file (.claude/api-key)
|
|
11
|
+
* 2. Per-wiki OAuth credentials (.claude/.credentials.json exists)
|
|
12
|
+
* 3. Global ANTHROPIC_API_KEY from daemon environment
|
|
13
|
+
*/
|
|
14
|
+
resolveCredentials(wikiId: string): Record<string, string>;
|
|
15
|
+
/**
|
|
16
|
+
* Store an API key for a wiki.
|
|
17
|
+
*/
|
|
18
|
+
setApiKey(wikiId: string, key: string): void;
|
|
19
|
+
/**
|
|
20
|
+
* Get the CLAUDE_CONFIG_DIR path for a wiki.
|
|
21
|
+
*/
|
|
22
|
+
configDir(wikiId: string): string;
|
|
23
|
+
/**
|
|
24
|
+
* Store OAuth credentials for a wiki by copying .credentials.json content.
|
|
25
|
+
*/
|
|
26
|
+
setCredentials(wikiId: string, credentialsJson: string): void;
|
|
27
|
+
/**
|
|
28
|
+
* Check if a wiki has valid credentials (any method).
|
|
29
|
+
*/
|
|
30
|
+
hasCredentials(wikiId: string): boolean;
|
|
31
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { WIKIS_DIR } from '../lib/constants.js';
|
|
4
|
+
import { NoCredentialsError } from '../lib/errors.js';
|
|
5
|
+
export class AuthManager {
|
|
6
|
+
wikisDir;
|
|
7
|
+
globalApiKey;
|
|
8
|
+
constructor(wikisDir = WIKIS_DIR, globalApiKey) {
|
|
9
|
+
this.wikisDir = wikisDir;
|
|
10
|
+
this.globalApiKey = globalApiKey;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Resolve credentials for a wiki, returning environment variables
|
|
14
|
+
* to set on the claude child process.
|
|
15
|
+
*
|
|
16
|
+
* Priority:
|
|
17
|
+
* 1. Per-wiki API key file (.claude/api-key)
|
|
18
|
+
* 2. Per-wiki OAuth credentials (.claude/.credentials.json exists)
|
|
19
|
+
* 3. Global ANTHROPIC_API_KEY from daemon environment
|
|
20
|
+
*/
|
|
21
|
+
resolveCredentials(wikiId) {
|
|
22
|
+
const claudeDir = this.configDir(wikiId);
|
|
23
|
+
// 1. Per-wiki API key
|
|
24
|
+
const apiKeyPath = join(claudeDir, 'api-key');
|
|
25
|
+
if (existsSync(apiKeyPath)) {
|
|
26
|
+
const key = readFileSync(apiKeyPath, 'utf-8').trim();
|
|
27
|
+
if (key) {
|
|
28
|
+
return {
|
|
29
|
+
ANTHROPIC_API_KEY: key,
|
|
30
|
+
CLAUDE_CONFIG_DIR: claudeDir,
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
// 2. Per-wiki OAuth credentials
|
|
35
|
+
const credsPath = join(claudeDir, '.credentials.json');
|
|
36
|
+
if (existsSync(credsPath)) {
|
|
37
|
+
return {
|
|
38
|
+
CLAUDE_CONFIG_DIR: claudeDir,
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
// 3. Global API key
|
|
42
|
+
if (this.globalApiKey) {
|
|
43
|
+
return {
|
|
44
|
+
ANTHROPIC_API_KEY: this.globalApiKey,
|
|
45
|
+
CLAUDE_CONFIG_DIR: claudeDir,
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
throw new NoCredentialsError(wikiId);
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Store an API key for a wiki.
|
|
52
|
+
*/
|
|
53
|
+
setApiKey(wikiId, key) {
|
|
54
|
+
const claudeDir = this.configDir(wikiId);
|
|
55
|
+
mkdirSync(claudeDir, { recursive: true, mode: 0o700 });
|
|
56
|
+
const apiKeyPath = join(claudeDir, 'api-key');
|
|
57
|
+
writeFileSync(apiKeyPath, key.trim(), { mode: 0o600 });
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Get the CLAUDE_CONFIG_DIR path for a wiki.
|
|
61
|
+
*/
|
|
62
|
+
configDir(wikiId) {
|
|
63
|
+
return join(this.wikisDir, wikiId, '.claude');
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Store OAuth credentials for a wiki by copying .credentials.json content.
|
|
67
|
+
*/
|
|
68
|
+
setCredentials(wikiId, credentialsJson) {
|
|
69
|
+
const claudeDir = this.configDir(wikiId);
|
|
70
|
+
mkdirSync(claudeDir, { recursive: true, mode: 0o700 });
|
|
71
|
+
const credsPath = join(claudeDir, '.credentials.json');
|
|
72
|
+
writeFileSync(credsPath, credentialsJson, { mode: 0o600 });
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Check if a wiki has valid credentials (any method).
|
|
76
|
+
*/
|
|
77
|
+
hasCredentials(wikiId) {
|
|
78
|
+
const claudeDir = this.configDir(wikiId);
|
|
79
|
+
const apiKeyPath = join(claudeDir, 'api-key');
|
|
80
|
+
const credsPath = join(claudeDir, '.credentials.json');
|
|
81
|
+
return existsSync(apiKeyPath) || existsSync(credsPath) || !!this.globalApiKey;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
//# sourceMappingURL=auth.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../../src/daemon/auth.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,aAAa,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAC7E,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,SAAS,EAAE,MAAM,qBAAqB,CAAC;AAChD,OAAO,EAAE,kBAAkB,EAAE,MAAM,kBAAkB,CAAC;AAEtD,MAAM,OAAO,WAAW;IAEZ;IACA;IAFV,YACU,WAAmB,SAAS,EAC5B,YAAqB;QADrB,aAAQ,GAAR,QAAQ,CAAoB;QAC5B,iBAAY,GAAZ,YAAY,CAAS;IAC5B,CAAC;IAEJ;;;;;;;;OAQG;IACH,kBAAkB,CAAC,MAAc;QAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;QAEzC,sBAAsB;QACtB,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;QAC9C,IAAI,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;YAC3B,MAAM,GAAG,GAAG,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;YACrD,IAAI,GAAG,EAAE,CAAC;gBACR,OAAO;oBACL,iBAAiB,EAAE,GAAG;oBACtB,iBAAiB,EAAE,SAAS;iBAC7B,CAAC;YACJ,CAAC;QACH,CAAC;QAED,gCAAgC;QAChC,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,EAAE,mBAAmB,CAAC,CAAC;QACvD,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC1B,OAAO;gBACL,iBAAiB,EAAE,SAAS;aAC7B,CAAC;QACJ,CAAC;QAED,oBAAoB;QACpB,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,OAAO;gBACL,iBAAiB,EAAE,IAAI,CAAC,YAAY;gBACpC,iBAAiB,EAAE,SAAS;aAC7B,CAAC;QACJ,CAAC;QAED,MAAM,IAAI,kBAAkB,CAAC,MAAM,CAAC,CAAC;IACvC,CAAC;IAED;;OAEG;IACH,SAAS,CAAC,MAAc,EAAE,GAAW;QACnC,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;QACzC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;QACvD,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;QAC9C,aAAa,CAAC,UAAU,EAAE,GAAG,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;IACzD,CAAC;IAED;;OAEG;IACH,SAAS,CAAC,MAAc;QACtB,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,cAAc,CAAC,MAAc,EAAE,eAAuB;QACpD,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;QACzC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;QACvD,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,EAAE,mBAAmB,CAAC,CAAC;QACvD,aAAa,CAAC,SAAS,EAAE,eAAe,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACH,cAAc,CAAC,MAAc;QAC3B,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;QACzC,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;QAC9C,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,EAAE,mBAAmB,CAAC,CAAC;QACvD,OAAO,UAAU,CAAC,UAAU,CAAC,IAAI,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;IAChF,CAAC;CACF"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { Wiki, WikiConfig, QueueJob, JobType, JobStatus, JobResult, AuditEntry } from '../lib/types.js';
|
|
2
|
+
export declare class Database {
|
|
3
|
+
private db;
|
|
4
|
+
constructor(dbPath: string);
|
|
5
|
+
initialize(): void;
|
|
6
|
+
close(): void;
|
|
7
|
+
createWiki(id: string, name: string, ownerUid: number): Wiki;
|
|
8
|
+
getWiki(id: string): Wiki | undefined;
|
|
9
|
+
listWikis(ownerUid?: number): Wiki[];
|
|
10
|
+
chownWiki(id: string, newOwnerUid: number): Wiki;
|
|
11
|
+
updateWiki(id: string, config: WikiConfig): Wiki;
|
|
12
|
+
deleteWiki(id: string): void;
|
|
13
|
+
createJob(wikiId: string, type: JobType, payload: object): QueueJob;
|
|
14
|
+
getJob(jobId: number): QueueJob | undefined;
|
|
15
|
+
listJobs(wikiId: string, opts?: {
|
|
16
|
+
status?: JobStatus;
|
|
17
|
+
limit?: number;
|
|
18
|
+
}): QueueJob[];
|
|
19
|
+
/**
|
|
20
|
+
* Atomically claim the next pending job for a wiki.
|
|
21
|
+
* Sets status to 'running' and records started_at.
|
|
22
|
+
*/
|
|
23
|
+
claimNextJob(wikiId: string): QueueJob | undefined;
|
|
24
|
+
completeJob(jobId: number, result: JobResult): void;
|
|
25
|
+
failJob(jobId: number, error: string): void;
|
|
26
|
+
getPendingJobCount(wikiId: string): number;
|
|
27
|
+
/**
|
|
28
|
+
* On daemon startup: reset jobs that were 'running' when the process died.
|
|
29
|
+
* They get re-queued as 'pending'.
|
|
30
|
+
*/
|
|
31
|
+
resetStaleJobs(): number;
|
|
32
|
+
/** Get wiki IDs that have pending jobs (for startup drain). */
|
|
33
|
+
wikisWithPendingJobs(): string[];
|
|
34
|
+
logAudit(wikiId: string, action: string, detail?: string): void;
|
|
35
|
+
getAuditLog(wikiId: string, limit?: number): AuditEntry[];
|
|
36
|
+
}
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import BetterSqlite3 from 'better-sqlite3';
|
|
2
|
+
export class Database {
|
|
3
|
+
db;
|
|
4
|
+
constructor(dbPath) {
|
|
5
|
+
this.db = new BetterSqlite3(dbPath);
|
|
6
|
+
this.db.pragma('journal_mode = WAL');
|
|
7
|
+
this.db.pragma('foreign_keys = ON');
|
|
8
|
+
}
|
|
9
|
+
// ── Schema ───────────────────────────────────────────────────────────────
|
|
10
|
+
initialize() {
|
|
11
|
+
this.db.exec(`
|
|
12
|
+
CREATE TABLE IF NOT EXISTS wikis (
|
|
13
|
+
id TEXT PRIMARY KEY,
|
|
14
|
+
name TEXT NOT NULL,
|
|
15
|
+
owner_uid INTEGER NOT NULL,
|
|
16
|
+
default_model TEXT NOT NULL DEFAULT 'sonnet',
|
|
17
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
18
|
+
);
|
|
19
|
+
|
|
20
|
+
CREATE TABLE IF NOT EXISTS queue_jobs (
|
|
21
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
22
|
+
wiki_id TEXT NOT NULL REFERENCES wikis(id) ON DELETE CASCADE,
|
|
23
|
+
type TEXT NOT NULL,
|
|
24
|
+
payload TEXT NOT NULL,
|
|
25
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
26
|
+
retry_count INTEGER NOT NULL DEFAULT 0,
|
|
27
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
28
|
+
started_at TEXT,
|
|
29
|
+
completed_at TEXT,
|
|
30
|
+
result TEXT
|
|
31
|
+
);
|
|
32
|
+
|
|
33
|
+
CREATE TABLE IF NOT EXISTS audit_log (
|
|
34
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
35
|
+
wiki_id TEXT NOT NULL REFERENCES wikis(id) ON DELETE CASCADE,
|
|
36
|
+
action TEXT NOT NULL,
|
|
37
|
+
detail TEXT,
|
|
38
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
39
|
+
);
|
|
40
|
+
|
|
41
|
+
CREATE INDEX IF NOT EXISTS idx_queue_wiki_status
|
|
42
|
+
ON queue_jobs(wiki_id, status);
|
|
43
|
+
CREATE INDEX IF NOT EXISTS idx_audit_wiki
|
|
44
|
+
ON audit_log(wiki_id);
|
|
45
|
+
CREATE INDEX IF NOT EXISTS idx_audit_created
|
|
46
|
+
ON audit_log(created_at);
|
|
47
|
+
`);
|
|
48
|
+
}
|
|
49
|
+
close() {
|
|
50
|
+
this.db.close();
|
|
51
|
+
}
|
|
52
|
+
// ── Wikis ───────────────────────────────────────────────────────────────
|
|
53
|
+
createWiki(id, name, ownerUid) {
|
|
54
|
+
const stmt = this.db.prepare(`
|
|
55
|
+
INSERT INTO wikis (id, name, owner_uid) VALUES (?, ?, ?)
|
|
56
|
+
RETURNING *
|
|
57
|
+
`);
|
|
58
|
+
return stmt.get(id, name, ownerUid);
|
|
59
|
+
}
|
|
60
|
+
getWiki(id) {
|
|
61
|
+
return this.db.prepare('SELECT * FROM wikis WHERE id = ?').get(id);
|
|
62
|
+
}
|
|
63
|
+
listWikis(ownerUid) {
|
|
64
|
+
if (ownerUid !== undefined) {
|
|
65
|
+
return this.db.prepare('SELECT * FROM wikis WHERE owner_uid = ? ORDER BY created_at').all(ownerUid);
|
|
66
|
+
}
|
|
67
|
+
return this.db.prepare('SELECT * FROM wikis ORDER BY created_at').all();
|
|
68
|
+
}
|
|
69
|
+
chownWiki(id, newOwnerUid) {
|
|
70
|
+
const stmt = this.db.prepare('UPDATE wikis SET owner_uid = ? WHERE id = ? RETURNING *');
|
|
71
|
+
return stmt.get(newOwnerUid, id);
|
|
72
|
+
}
|
|
73
|
+
updateWiki(id, config) {
|
|
74
|
+
const sets = [];
|
|
75
|
+
const values = [];
|
|
76
|
+
if (config.name !== undefined) {
|
|
77
|
+
sets.push('name = ?');
|
|
78
|
+
values.push(config.name);
|
|
79
|
+
}
|
|
80
|
+
if (config.default_model !== undefined) {
|
|
81
|
+
sets.push('default_model = ?');
|
|
82
|
+
values.push(config.default_model);
|
|
83
|
+
}
|
|
84
|
+
if (sets.length === 0) {
|
|
85
|
+
return this.getWiki(id);
|
|
86
|
+
}
|
|
87
|
+
values.push(id);
|
|
88
|
+
const stmt = this.db.prepare(`UPDATE wikis SET ${sets.join(', ')} WHERE id = ? RETURNING *`);
|
|
89
|
+
return stmt.get(...values);
|
|
90
|
+
}
|
|
91
|
+
deleteWiki(id) {
|
|
92
|
+
this.db.prepare('DELETE FROM wikis WHERE id = ?').run(id);
|
|
93
|
+
}
|
|
94
|
+
// ── Jobs ─────────────────────────────────────────────────────────────────
|
|
95
|
+
createJob(wikiId, type, payload) {
|
|
96
|
+
const stmt = this.db.prepare(`
|
|
97
|
+
INSERT INTO queue_jobs (wiki_id, type, payload)
|
|
98
|
+
VALUES (?, ?, ?)
|
|
99
|
+
RETURNING *
|
|
100
|
+
`);
|
|
101
|
+
return stmt.get(wikiId, type, JSON.stringify(payload));
|
|
102
|
+
}
|
|
103
|
+
getJob(jobId) {
|
|
104
|
+
return this.db.prepare('SELECT * FROM queue_jobs WHERE id = ?').get(jobId);
|
|
105
|
+
}
|
|
106
|
+
listJobs(wikiId, opts) {
|
|
107
|
+
let sql = 'SELECT * FROM queue_jobs WHERE wiki_id = ?';
|
|
108
|
+
const params = [wikiId];
|
|
109
|
+
if (opts?.status) {
|
|
110
|
+
sql += ' AND status = ?';
|
|
111
|
+
params.push(opts.status);
|
|
112
|
+
}
|
|
113
|
+
sql += ' ORDER BY id DESC';
|
|
114
|
+
if (opts?.limit) {
|
|
115
|
+
sql += ' LIMIT ?';
|
|
116
|
+
params.push(opts.limit);
|
|
117
|
+
}
|
|
118
|
+
return this.db.prepare(sql).all(...params);
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Atomically claim the next pending job for a wiki.
|
|
122
|
+
* Sets status to 'running' and records started_at.
|
|
123
|
+
*/
|
|
124
|
+
claimNextJob(wikiId) {
|
|
125
|
+
const stmt = this.db.prepare(`
|
|
126
|
+
UPDATE queue_jobs
|
|
127
|
+
SET status = 'running', started_at = datetime('now')
|
|
128
|
+
WHERE id = (
|
|
129
|
+
SELECT id FROM queue_jobs
|
|
130
|
+
WHERE wiki_id = ? AND status = 'pending'
|
|
131
|
+
ORDER BY id ASC
|
|
132
|
+
LIMIT 1
|
|
133
|
+
)
|
|
134
|
+
RETURNING *
|
|
135
|
+
`);
|
|
136
|
+
return stmt.get(wikiId);
|
|
137
|
+
}
|
|
138
|
+
completeJob(jobId, result) {
|
|
139
|
+
this.db.prepare(`
|
|
140
|
+
UPDATE queue_jobs
|
|
141
|
+
SET status = 'completed', completed_at = datetime('now'), result = ?
|
|
142
|
+
WHERE id = ?
|
|
143
|
+
`).run(JSON.stringify(result), jobId);
|
|
144
|
+
}
|
|
145
|
+
failJob(jobId, error) {
|
|
146
|
+
this.db.prepare(`
|
|
147
|
+
UPDATE queue_jobs
|
|
148
|
+
SET status = 'failed', completed_at = datetime('now'), result = ?
|
|
149
|
+
WHERE id = ?
|
|
150
|
+
`).run(JSON.stringify({ success: false, error }), jobId);
|
|
151
|
+
}
|
|
152
|
+
getPendingJobCount(wikiId) {
|
|
153
|
+
const row = this.db.prepare("SELECT COUNT(*) as count FROM queue_jobs WHERE wiki_id = ? AND status IN ('pending', 'running')").get(wikiId);
|
|
154
|
+
return row.count;
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* On daemon startup: reset jobs that were 'running' when the process died.
|
|
158
|
+
* They get re-queued as 'pending'.
|
|
159
|
+
*/
|
|
160
|
+
resetStaleJobs() {
|
|
161
|
+
const info = this.db.prepare(`
|
|
162
|
+
UPDATE queue_jobs
|
|
163
|
+
SET status = 'pending', retry_count = retry_count + 1
|
|
164
|
+
WHERE status = 'running'
|
|
165
|
+
`).run();
|
|
166
|
+
return info.changes;
|
|
167
|
+
}
|
|
168
|
+
/** Get wiki IDs that have pending jobs (for startup drain). */
|
|
169
|
+
wikisWithPendingJobs() {
|
|
170
|
+
const rows = this.db.prepare("SELECT DISTINCT wiki_id FROM queue_jobs WHERE status = 'pending'").all();
|
|
171
|
+
return rows.map(r => r.wiki_id);
|
|
172
|
+
}
|
|
173
|
+
// ── Audit ────────────────────────────────────────────────────────────────
|
|
174
|
+
logAudit(wikiId, action, detail) {
|
|
175
|
+
this.db.prepare('INSERT INTO audit_log (wiki_id, action, detail) VALUES (?, ?, ?)').run(wikiId, action, detail ?? null);
|
|
176
|
+
}
|
|
177
|
+
getAuditLog(wikiId, limit = 50) {
|
|
178
|
+
return this.db.prepare('SELECT * FROM audit_log WHERE wiki_id = ? ORDER BY id DESC LIMIT ?').all(wikiId, limit);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
//# sourceMappingURL=db.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"db.js","sourceRoot":"","sources":["../../src/daemon/db.ts"],"names":[],"mappings":"AAAA,OAAO,aAAa,MAAM,gBAAgB,CAAC;AAK3C,MAAM,OAAO,QAAQ;IACX,EAAE,CAAyB;IAEnC,YAAY,MAAc;QACxB,IAAI,CAAC,EAAE,GAAG,IAAI,aAAa,CAAC,MAAM,CAAC,CAAC;QACpC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;QACrC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC;IACtC,CAAC;IAED,4EAA4E;IAE5E,UAAU;QACR,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;KAoCZ,CAAC,CAAC;IAEL,CAAC;IAED,KAAK;QACH,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC;IAClB,CAAC;IAED,2EAA2E;IAE3E,UAAU,CAAC,EAAU,EAAE,IAAY,EAAE,QAAgB;QACnD,MAAM,IAAI,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;;;KAG5B,CAAC,CAAC;QACH,OAAO,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAS,CAAC;IAC9C,CAAC;IAED,OAAO,CAAC,EAAU;QAChB,OAAO,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,kCAAkC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAqB,CAAC;IACzF,CAAC;IAED,SAAS,CAAC,QAAiB;QACzB,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;YAC3B,OAAO,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,6DAA6D,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAW,CAAC;QAChH,CAAC;QACD,OAAO,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,yCAAyC,CAAC,CAAC,GAAG,EAAY,CAAC;IACpF,CAAC;IAED,SAAS,CAAC,EAAU,EAAE,WAAmB;QACvC,MAAM,IAAI,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CAC1B,yDAAyD,CAC1D,CAAC;QACF,OAAO,IAAI,CAAC,GAAG,CAAC,WAAW,EAAE,EAAE,CAAS,CAAC;IAC3C,CAAC;IAED,UAAU,CAAC,EAAU,EAAE,MAAkB;QACvC,MAAM,IAAI,GAAa,EAAE,CAAC;QAC1B,MAAM,MAAM,GAAc,EAAE,CAAC;QAE7B,IAAI,MAAM,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YACtB,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC3B,CAAC;QACD,IAAI,MAAM,CAAC,aAAa,KAAK,SAAS,EAAE,CAAC;YACvC,IAAI,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC;YAC/B,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;QACpC,CAAC;QAED,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtB,OAAO,IAAI,CAAC,OAAO,CAAC,EAAE,CAAS,CAAC;QAClC,CAAC;QAED,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CAC1B,oBAAoB,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,2BAA2B,CAC/D,CAAC;QACF,OAAO,IAAI,CAAC,GAAG,CAAC,GAAG,MAAM,CAAS,CAAC;IACrC,CAAC;IAED,UAAU,CAAC,EAAU;QACnB,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,gCAAgC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAC5D,CAAC;IAED,4EAA4E;IAE5E,SAAS,CAAC,MAAc,EAAE,IAAa,EAAE,OAAe;QACtD,MAAM,IAAI,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;;;;KAI5B,CAAC,CAAC;QACH,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAa,CAAC;IACrE,CAAC;IAED,MAAM,CAAC,KAAa;QAClB,OAAO,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,uCAAuC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAyB,CAAC;IACrG,CAAC;IAED,QAAQ,CAAC,MAAc,EAAE,IAA6C;QACpE,IAAI,GAAG,GAAG,4CAA4C,CAAC;QACvD,MAAM,MAAM,GAAc,CAAC,MAAM,CAAC,CAAC;QAEnC,IAAI,IAAI,EAAE,MAAM,EAAE,CAAC;YACjB,GAAG,IAAI,iBAAiB,CAAC;YACzB,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;QAED,GAAG,IAAI,mBAAmB,CAAC;QAE3B,IAAI,IAAI,EAAE,KAAK,EAAE,CAAC;YAChB,GAAG,IAAI,UAAU,CAAC;YAClB,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;QAED,OAAO,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,MAAM,CAAe,CAAC;IAC3D,CAAC;IAED;;;OAGG;IACH,YAAY,CAAC,MAAc;QACzB,MAAM,IAAI,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;;;;;;;;;;KAU5B,CAAC,CAAC;QACH,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,CAAyB,CAAC;IAClD,CAAC;IAED,WAAW,CAAC,KAAa,EAAE,MAAiB;QAC1C,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;;;;KAIf,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC,CAAC;IACxC,CAAC;IAED,OAAO,CAAC,KAAa,EAAE,KAAa;QAClC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;;;;KAIf,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;IAC3D,CAAC;IAED,kBAAkB,CAAC,MAAc;QAC/B,MAAM,GAAG,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CACzB,iGAAiG,CAClG,CAAC,GAAG,CAAC,MAAM,CAAsB,CAAC;QACnC,OAAO,GAAG,CAAC,KAAK,CAAC;IACnB,CAAC;IAED;;;OAGG;IACH,cAAc;QACZ,MAAM,IAAI,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;;;;KAI5B,CAAC,CAAC,GAAG,EAAE,CAAC;QACT,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED,+DAA+D;IAC/D,oBAAoB;QAClB,MAAM,IAAI,GAAG,IAAI,CAAC,EAAE,CAAC,OAAO,CAC1B,kEAAkE,CACnE,CAAC,GAAG,EAA2B,CAAC;QACjC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAED,4EAA4E;IAE5E,QAAQ,CAAC,MAAc,EAAE,MAAc,EAAE,MAAe;QACtD,IAAI,CAAC,EAAE,CAAC,OAAO,CACb,kEAAkE,CACnE,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,IAAI,IAAI,CAAC,CAAC;IACxC,CAAC;IAED,WAAW,CAAC,MAAc,EAAE,QAAgB,EAAE;QAC5C,OAAO,IAAI,CAAC,EAAE,CAAC,OAAO,CACpB,oEAAoE,CACrE,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAiB,CAAC;IACvC,CAAC;CACF"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Per-job mount namespace isolation.
|
|
3
|
+
*
|
|
4
|
+
* Instead of persistent namespaces (which require kernel-specific unshare --mount=<file>),
|
|
5
|
+
* we wrap each job command in `unshare -m -- sh -c '...'`. The namespace lives and dies
|
|
6
|
+
* with the process. Simpler, more portable, same isolation.
|
|
7
|
+
*/
|
|
8
|
+
export declare class NamespaceManager {
|
|
9
|
+
private wikisDir;
|
|
10
|
+
constructor(wikisDir?: string);
|
|
11
|
+
/**
|
|
12
|
+
* Verify that we have CAP_SYS_ADMIN by attempting a trivial namespace operation.
|
|
13
|
+
*/
|
|
14
|
+
checkCapabilities(): void;
|
|
15
|
+
/**
|
|
16
|
+
* Ensure the /workspace mount target exists on the host.
|
|
17
|
+
*/
|
|
18
|
+
ensureDirectories(): void;
|
|
19
|
+
/**
|
|
20
|
+
* Verify a wiki's directory exists.
|
|
21
|
+
*/
|
|
22
|
+
validateWiki(wikiId: string): void;
|
|
23
|
+
/**
|
|
24
|
+
* Build the command + args to run a command inside a fresh mount namespace
|
|
25
|
+
* with the wiki's directory bind-mounted to /workspace.
|
|
26
|
+
*
|
|
27
|
+
* Returns [command, ...args] to pass to spawn().
|
|
28
|
+
* The caller appends their actual command (e.g. claude -p ...) to innerArgs.
|
|
29
|
+
*/
|
|
30
|
+
wrapCommand(wikiId: string, innerCommand: string[]): {
|
|
31
|
+
command: string;
|
|
32
|
+
args: string[];
|
|
33
|
+
};
|
|
34
|
+
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { execFileSync } from 'node:child_process';
|
|
2
|
+
import { existsSync, mkdirSync } from 'node:fs';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
import { CapabilityError } from '../lib/errors.js';
|
|
5
|
+
import { WIKIS_DIR, WORKSPACE_MOUNT } from '../lib/constants.js';
|
|
6
|
+
/**
|
|
7
|
+
* Per-job mount namespace isolation.
|
|
8
|
+
*
|
|
9
|
+
* Instead of persistent namespaces (which require kernel-specific unshare --mount=<file>),
|
|
10
|
+
* we wrap each job command in `unshare -m -- sh -c '...'`. The namespace lives and dies
|
|
11
|
+
* with the process. Simpler, more portable, same isolation.
|
|
12
|
+
*/
|
|
13
|
+
export class NamespaceManager {
|
|
14
|
+
wikisDir;
|
|
15
|
+
constructor(wikisDir = WIKIS_DIR) {
|
|
16
|
+
this.wikisDir = wikisDir;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Verify that we have CAP_SYS_ADMIN by attempting a trivial namespace operation.
|
|
20
|
+
*/
|
|
21
|
+
checkCapabilities() {
|
|
22
|
+
try {
|
|
23
|
+
execFileSync('unshare', ['-m', '--', 'true'], { stdio: 'pipe' });
|
|
24
|
+
}
|
|
25
|
+
catch {
|
|
26
|
+
throw new CapabilityError();
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Ensure the /workspace mount target exists on the host.
|
|
31
|
+
*/
|
|
32
|
+
ensureDirectories() {
|
|
33
|
+
mkdirSync(WORKSPACE_MOUNT, { recursive: true });
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Verify a wiki's directory exists.
|
|
37
|
+
*/
|
|
38
|
+
validateWiki(wikiId) {
|
|
39
|
+
const wikiDir = join(this.wikisDir, wikiId);
|
|
40
|
+
if (!existsSync(wikiDir)) {
|
|
41
|
+
throw new Error(`Wiki directory does not exist: ${wikiDir}`);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Build the command + args to run a command inside a fresh mount namespace
|
|
46
|
+
* with the wiki's directory bind-mounted to /workspace.
|
|
47
|
+
*
|
|
48
|
+
* Returns [command, ...args] to pass to spawn().
|
|
49
|
+
* The caller appends their actual command (e.g. claude -p ...) to innerArgs.
|
|
50
|
+
*/
|
|
51
|
+
wrapCommand(wikiId, innerCommand) {
|
|
52
|
+
const wikiDir = join(this.wikisDir, wikiId);
|
|
53
|
+
// Build a shell script that:
|
|
54
|
+
// 1. Bind-mounts the wiki dir to /workspace
|
|
55
|
+
// 2. Remounts nosuid,nodev
|
|
56
|
+
// 3. cd into /workspace
|
|
57
|
+
// 4. exec the inner command
|
|
58
|
+
const script = [
|
|
59
|
+
`mount --bind ${shellEscape(wikiDir)} ${shellEscape(WORKSPACE_MOUNT)}`,
|
|
60
|
+
`mount -o remount,nosuid,nodev ${shellEscape(WORKSPACE_MOUNT)}`,
|
|
61
|
+
`cd ${shellEscape(WORKSPACE_MOUNT)}`,
|
|
62
|
+
`exec ${innerCommand.map(shellEscape).join(' ')}`,
|
|
63
|
+
].join(' && ');
|
|
64
|
+
return {
|
|
65
|
+
command: 'unshare',
|
|
66
|
+
args: ['-m', '--propagation', 'private', '--', 'sh', '-c', script],
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
function shellEscape(s) {
|
|
71
|
+
// Wrap in single quotes, escaping any existing single quotes
|
|
72
|
+
return "'" + s.replace(/'/g, "'\\''") + "'";
|
|
73
|
+
}
|
|
74
|
+
//# sourceMappingURL=namespace.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"namespace.js","sourceRoot":"","sources":["../../src/daemon/namespace.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAChD,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACnD,OAAO,EAAE,SAAS,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAEjE;;;;;;GAMG;AACH,MAAM,OAAO,gBAAgB;IAEjB;IADV,YACU,WAAmB,SAAS;QAA5B,aAAQ,GAAR,QAAQ,CAAoB;IACnC,CAAC;IAEJ;;OAEG;IACH,iBAAiB;QACf,IAAI,CAAC;YACH,YAAY,CAAC,SAAS,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;QACnE,CAAC;QAAC,MAAM,CAAC;YACP,MAAM,IAAI,eAAe,EAAE,CAAC;QAC9B,CAAC;IACH,CAAC;IAED;;OAEG;IACH,iBAAiB;QACf,SAAS,CAAC,eAAe,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAClD,CAAC;IAED;;OAEG;IACH,YAAY,CAAC,MAAc;QACzB,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAC5C,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CAAC,kCAAkC,OAAO,EAAE,CAAC,CAAC;QAC/D,CAAC;IACH,CAAC;IAED;;;;;;OAMG;IACH,WAAW,CAAC,MAAc,EAAE,YAAsB;QAChD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE5C,6BAA6B;QAC7B,4CAA4C;QAC5C,2BAA2B;QAC3B,wBAAwB;QACxB,4BAA4B;QAC5B,MAAM,MAAM,GAAG;YACb,gBAAgB,WAAW,CAAC,OAAO,CAAC,IAAI,WAAW,CAAC,eAAe,CAAC,EAAE;YACtE,iCAAiC,WAAW,CAAC,eAAe,CAAC,EAAE;YAC/D,MAAM,WAAW,CAAC,eAAe,CAAC,EAAE;YACpC,QAAQ,YAAY,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;SAClD,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAEf,OAAO;YACL,OAAO,EAAE,SAAS;YAClB,IAAI,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;SACnE,CAAC;IACJ,CAAC;CACF;AAED,SAAS,WAAW,CAAC,CAAS;IAC5B,6DAA6D;IAC7D,OAAO,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,GAAG,GAAG,CAAC;AAC9C,CAAC"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { Socket } from 'node:net';
|
|
2
|
+
interface PeerCred {
|
|
3
|
+
uid: number;
|
|
4
|
+
gid: number;
|
|
5
|
+
pid: number;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Extract the kernel-verified credentials (uid, gid, pid) of the peer
|
|
9
|
+
* process connected to a Unix domain socket.
|
|
10
|
+
*
|
|
11
|
+
* Uses SO_PEERCRED — the kernel fills this in at connect() time,
|
|
12
|
+
* so it cannot be spoofed by the client.
|
|
13
|
+
*/
|
|
14
|
+
export declare function getPeerCred(socket: Socket): PeerCred;
|
|
15
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { createRequire } from 'node:module';
|
|
2
|
+
const require = createRequire(import.meta.url);
|
|
3
|
+
// Load the native N-API addon compiled by node-gyp
|
|
4
|
+
const addon = require('../../build/Release/peercred.node');
|
|
5
|
+
/**
|
|
6
|
+
* Extract the kernel-verified credentials (uid, gid, pid) of the peer
|
|
7
|
+
* process connected to a Unix domain socket.
|
|
8
|
+
*
|
|
9
|
+
* Uses SO_PEERCRED — the kernel fills this in at connect() time,
|
|
10
|
+
* so it cannot be spoofed by the client.
|
|
11
|
+
*/
|
|
12
|
+
export function getPeerCred(socket) {
|
|
13
|
+
const handle = socket._handle;
|
|
14
|
+
if (!handle || typeof handle.fd !== 'number' || handle.fd < 0) {
|
|
15
|
+
throw new Error('Cannot get file descriptor from socket');
|
|
16
|
+
}
|
|
17
|
+
return addon.getPeerCred(handle.fd);
|
|
18
|
+
}
|
|
19
|
+
//# sourceMappingURL=peercred.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"peercred.js","sourceRoot":"","sources":["../../src/daemon/peercred.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAG5C,MAAM,OAAO,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAQ/C,mDAAmD;AACnD,MAAM,KAAK,GAA0C,OAAO,CAAC,mCAAmC,CAAC,CAAC;AAElG;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CAAC,MAAc;IACxC,MAAM,MAAM,GAAI,MAAc,CAAC,OAAO,CAAC;IACvC,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,CAAC,EAAE,KAAK,QAAQ,IAAI,MAAM,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC;QAC9D,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAC;IAC5D,CAAC;IACD,OAAO,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;AACtC,CAAC"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { Database } from './db.js';
|
|
2
|
+
import type { ClaudeRunner } from './runner.js';
|
|
3
|
+
export declare class QueueManager {
|
|
4
|
+
private db;
|
|
5
|
+
private runner;
|
|
6
|
+
private autoLintInterval;
|
|
7
|
+
private wikis;
|
|
8
|
+
private shuttingDown;
|
|
9
|
+
constructor(db: Database, runner: ClaudeRunner, autoLintInterval?: number);
|
|
10
|
+
/**
|
|
11
|
+
* Start draining queues. Called on daemon startup.
|
|
12
|
+
* Kicks off drain loops for all wikis with pending jobs.
|
|
13
|
+
*/
|
|
14
|
+
start(): void;
|
|
15
|
+
/**
|
|
16
|
+
* Signal that a wiki has new work to process.
|
|
17
|
+
* If the wiki isn't already draining, starts a drain loop.
|
|
18
|
+
*/
|
|
19
|
+
notify(wikiId: string): void;
|
|
20
|
+
/**
|
|
21
|
+
* Graceful shutdown. Waits for active jobs to finish.
|
|
22
|
+
*/
|
|
23
|
+
stop(): Promise<void>;
|
|
24
|
+
private drainWiki;
|
|
25
|
+
private scheduleAutoLint;
|
|
26
|
+
}
|