gitnexushub 0.2.12 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api.d.ts +31 -0
- package/dist/api.js +53 -2
- package/dist/cli-helpers.d.ts +23 -0
- package/dist/cli-helpers.js +57 -0
- package/dist/connect-command.d.ts +29 -0
- package/dist/connect-command.js +169 -0
- package/dist/content.js +62 -62
- package/dist/editors/claude-code.js +14 -1
- package/dist/editors/cursor.js +14 -3
- package/dist/editors/opencode.js +14 -3
- package/dist/editors/windsurf.js +14 -3
- package/dist/fingerprint.d.ts +11 -0
- package/dist/fingerprint.js +18 -0
- package/dist/hooks-installer.d.ts +33 -0
- package/dist/hooks-installer.js +114 -0
- package/dist/index.js +23 -171
- package/dist/registry.d.ts +41 -0
- package/dist/registry.js +92 -0
- package/dist/sync-command.d.ts +16 -0
- package/dist/sync-command.js +169 -0
- package/dist/tarball.d.ts +17 -0
- package/dist/tarball.js +75 -0
- package/hooks/gitnexus-enterprise-hook.cjs +415 -0
- package/package.json +58 -53
- package/skills/gitnexus-guide.md +64 -64
- package/skills/gitnexus-refactoring.md +121 -121
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* `gnx sync` — push local working-tree state to the hub for re-indexing.
|
|
3
|
+
*
|
|
4
|
+
* Flow:
|
|
5
|
+
* 1. Resolve cwd → registered repo (from connect-registry.json)
|
|
6
|
+
* 2. If not registered, look up via hub API by GitHub remote, save to registry
|
|
7
|
+
* 3. If still not found, tell user to add via hub UI
|
|
8
|
+
* 4. Compute local HEAD and dirty state
|
|
9
|
+
* 5. Short-circuit if hub already has this commit AND tree is clean
|
|
10
|
+
* 6. Build tarball, upload, optionally poll for completion
|
|
11
|
+
*/
|
|
12
|
+
import { execFileSync } from 'child_process';
|
|
13
|
+
import pc from 'picocolors';
|
|
14
|
+
import { loadConfig } from './config.js';
|
|
15
|
+
import { HubAPI } from './api.js';
|
|
16
|
+
import { resolveCwdToRepo, upsertRegistryEntry } from './registry.js';
|
|
17
|
+
import { buildTarballStream } from './tarball.js';
|
|
18
|
+
import { isGitRepo, getGitRemoteUrl, parseGitRemote } from './project.js';
|
|
19
|
+
function getLocalHead(cwd) {
|
|
20
|
+
return execFileSync('git', ['rev-parse', 'HEAD'], { cwd }).toString('utf-8').trim();
|
|
21
|
+
}
|
|
22
|
+
function isDirty(cwd) {
|
|
23
|
+
const out = execFileSync('git', ['status', '--porcelain'], { cwd }).toString('utf-8').trim();
|
|
24
|
+
return out.length > 0;
|
|
25
|
+
}
|
|
26
|
+
function getCurrentBranch(cwd) {
|
|
27
|
+
return execFileSync('git', ['rev-parse', '--abbrev-ref', 'HEAD'], { cwd })
|
|
28
|
+
.toString('utf-8')
|
|
29
|
+
.trim();
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Terse output helpers — NO internal-implementation leakage in user-visible strings.
|
|
33
|
+
*/
|
|
34
|
+
const ok = (msg) => console.log(' ' + pc.green('✓') + ' ' + msg);
|
|
35
|
+
const fail = (msg) => console.error(' ' + pc.red('✗') + ' ' + msg);
|
|
36
|
+
const info = (msg) => console.error(' ' + pc.yellow('⏱') + ' ' + msg);
|
|
37
|
+
export async function runSync(opts) {
|
|
38
|
+
const cwd = process.cwd();
|
|
39
|
+
if (!isGitRepo()) {
|
|
40
|
+
fail('Not inside a git repository');
|
|
41
|
+
process.exit(1);
|
|
42
|
+
}
|
|
43
|
+
const config = await loadConfig();
|
|
44
|
+
const token = config.hubToken;
|
|
45
|
+
const hubUrl = opts.hub || config.hubUrl;
|
|
46
|
+
if (!token || !hubUrl) {
|
|
47
|
+
fail('Not connected. Run `gnx connect <token>` first.');
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
const api = new HubAPI(hubUrl, token);
|
|
51
|
+
// Step 1: Resolve cwd → registered repo
|
|
52
|
+
let entry = await resolveCwdToRepo(cwd);
|
|
53
|
+
// Step 2: If not registered locally, try to match by GitHub remote
|
|
54
|
+
if (!entry) {
|
|
55
|
+
const remoteUrl = getGitRemoteUrl();
|
|
56
|
+
const fullName = remoteUrl ? parseGitRemote(remoteUrl) : null;
|
|
57
|
+
if (!fullName) {
|
|
58
|
+
fail('Could not determine the GitHub remote for this directory');
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
const hubRepos = await api
|
|
62
|
+
.listRepos()
|
|
63
|
+
.catch(() => []);
|
|
64
|
+
const matched = hubRepos.find((r) => r.fullName === fullName);
|
|
65
|
+
if (!matched) {
|
|
66
|
+
fail(`${pc.bold(fullName)} is not indexed on the hub.`);
|
|
67
|
+
console.error(' Add it at: ' + pc.cyan(hubUrl));
|
|
68
|
+
process.exit(1);
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
entry = {
|
|
72
|
+
localPath: cwd,
|
|
73
|
+
fullName: matched.fullName,
|
|
74
|
+
hubRepoId: matched.id,
|
|
75
|
+
};
|
|
76
|
+
await upsertRegistryEntry(entry);
|
|
77
|
+
}
|
|
78
|
+
// entry is guaranteed non-null at this point (process.exit above on the failure path)
|
|
79
|
+
const repo = entry;
|
|
80
|
+
// Step 3: Compute local state
|
|
81
|
+
const localHead = getLocalHead(cwd);
|
|
82
|
+
const dirty = isDirty(cwd);
|
|
83
|
+
// Step 4: Short-circuit via hub meta (saves an upload if clean and matching)
|
|
84
|
+
try {
|
|
85
|
+
const meta = await api.meta(repo.hubRepoId);
|
|
86
|
+
if (meta.last_commit === localHead && !dirty) {
|
|
87
|
+
ok('Up to date');
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
catch {
|
|
92
|
+
// Meta fetch failed; proceed with upload anyway (network hiccup, etc.)
|
|
93
|
+
}
|
|
94
|
+
// Step 5: Build tarball and upload
|
|
95
|
+
process.stdout.write(' ' + pc.cyan('→') + ' Syncing...');
|
|
96
|
+
const tarStream = buildTarballStream(cwd, { includeDirty: true });
|
|
97
|
+
let result;
|
|
98
|
+
try {
|
|
99
|
+
result = await api.sync(repo.hubRepoId, {
|
|
100
|
+
metadata: {
|
|
101
|
+
local_head: localHead,
|
|
102
|
+
local_branch: getCurrentBranch(cwd),
|
|
103
|
+
dirty,
|
|
104
|
+
},
|
|
105
|
+
tarball: tarStream,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
catch (err) {
|
|
109
|
+
process.stdout.write('\r' + ' '.repeat(60) + '\r');
|
|
110
|
+
// Differentiate retry-later states from real failures so an LLM
|
|
111
|
+
// (or human) reading the transcript can tell whether sync is
|
|
112
|
+
// broken or just deferred.
|
|
113
|
+
if (err?.statusCode === 503) {
|
|
114
|
+
info('Hub busy — retry in a few minutes.');
|
|
115
|
+
process.exit(1);
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
if (err?.statusCode === 409) {
|
|
119
|
+
info('Indexing in progress — retry shortly.');
|
|
120
|
+
process.exit(1);
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
fail('Sync failed: ' + (err.message || String(err)));
|
|
124
|
+
process.exit(1);
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
// Clear the progress line
|
|
128
|
+
process.stdout.write('\r' + ' '.repeat(60) + '\r');
|
|
129
|
+
await upsertRegistryEntry({
|
|
130
|
+
...repo,
|
|
131
|
+
lastSyncedSha: localHead,
|
|
132
|
+
lastSyncedAt: new Date().toISOString(),
|
|
133
|
+
});
|
|
134
|
+
if (result.status === 'already_fresh') {
|
|
135
|
+
ok('Up to date');
|
|
136
|
+
return;
|
|
137
|
+
}
|
|
138
|
+
if (!opts.wait || !result.job_id) {
|
|
139
|
+
ok('Synced');
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
// Step 6: Poll for completion
|
|
143
|
+
const jobId = result.job_id;
|
|
144
|
+
const spinnerFrames = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
|
|
145
|
+
let frame = 0;
|
|
146
|
+
// eslint-disable-next-line no-constant-condition
|
|
147
|
+
while (true) {
|
|
148
|
+
await new Promise((r) => setTimeout(r, 2000));
|
|
149
|
+
try {
|
|
150
|
+
const status = await api.syncStatus(repo.hubRepoId, jobId);
|
|
151
|
+
if (status.status === 'done') {
|
|
152
|
+
process.stdout.write('\r' + ' '.repeat(60) + '\r');
|
|
153
|
+
ok('Synced');
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
if (status.status === 'failed') {
|
|
157
|
+
process.stdout.write('\r' + ' '.repeat(60) + '\r');
|
|
158
|
+
fail('Indexing failed');
|
|
159
|
+
process.exit(1);
|
|
160
|
+
return;
|
|
161
|
+
}
|
|
162
|
+
process.stdout.write(`\r ${pc.cyan(spinnerFrames[frame % spinnerFrames.length])} Syncing...`);
|
|
163
|
+
frame++;
|
|
164
|
+
}
|
|
165
|
+
catch {
|
|
166
|
+
// Network hiccup, keep polling
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tarball builder for gnx sync uploads.
|
|
3
|
+
*
|
|
4
|
+
* Uses `git ls-files` to pick files so .gitignore is respected automatically.
|
|
5
|
+
* Optionally includes untracked files via `git ls-files --others --exclude-standard`.
|
|
6
|
+
* Returns a tar-stream Readable so callers can pipe through gzip and HTTP upload
|
|
7
|
+
* without buffering the whole tarball in memory.
|
|
8
|
+
*/
|
|
9
|
+
import { Readable } from 'stream';
|
|
10
|
+
export interface TarballOptions {
|
|
11
|
+
includeDirty: boolean;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Build a tar-stream Readable containing the repo's tracked files
|
|
15
|
+
* (and optionally untracked files).
|
|
16
|
+
*/
|
|
17
|
+
export declare function buildTarballStream(repoRoot: string, opts: TarballOptions): Readable;
|
package/dist/tarball.js
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tarball builder for gnx sync uploads.
|
|
3
|
+
*
|
|
4
|
+
* Uses `git ls-files` to pick files so .gitignore is respected automatically.
|
|
5
|
+
* Optionally includes untracked files via `git ls-files --others --exclude-standard`.
|
|
6
|
+
* Returns a tar-stream Readable so callers can pipe through gzip and HTTP upload
|
|
7
|
+
* without buffering the whole tarball in memory.
|
|
8
|
+
*/
|
|
9
|
+
import fsp from 'fs/promises';
|
|
10
|
+
import path from 'path';
|
|
11
|
+
import { execFileSync } from 'child_process';
|
|
12
|
+
import tar from 'tar-stream';
|
|
13
|
+
/**
|
|
14
|
+
* Hardcoded exclusions regardless of .gitignore (belt-and-suspenders).
|
|
15
|
+
* .git is already excluded by `git ls-files`. These cover edge cases where
|
|
16
|
+
* users might have tracked vendor/build directories they didn't mean to ship.
|
|
17
|
+
*/
|
|
18
|
+
const HARD_EXCLUDES = [
|
|
19
|
+
/(\/|^)node_modules\//,
|
|
20
|
+
/^target\//,
|
|
21
|
+
/^dist\//,
|
|
22
|
+
/^build\//,
|
|
23
|
+
/^\.venv\//,
|
|
24
|
+
/^\.git\//,
|
|
25
|
+
];
|
|
26
|
+
function listFiles(repoRoot, includeDirty) {
|
|
27
|
+
const tracked = execFileSync('git', ['ls-files', '-z'], { cwd: repoRoot })
|
|
28
|
+
.toString('utf-8')
|
|
29
|
+
.split('\0')
|
|
30
|
+
.filter((s) => s.length > 0);
|
|
31
|
+
let all = tracked;
|
|
32
|
+
if (includeDirty) {
|
|
33
|
+
const untracked = execFileSync('git', ['ls-files', '-z', '--others', '--exclude-standard'], {
|
|
34
|
+
cwd: repoRoot,
|
|
35
|
+
})
|
|
36
|
+
.toString('utf-8')
|
|
37
|
+
.split('\0')
|
|
38
|
+
.filter((s) => s.length > 0);
|
|
39
|
+
all = [...tracked, ...untracked];
|
|
40
|
+
}
|
|
41
|
+
return all.filter((f) => !HARD_EXCLUDES.some((re) => re.test(f)));
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Build a tar-stream Readable containing the repo's tracked files
|
|
45
|
+
* (and optionally untracked files).
|
|
46
|
+
*/
|
|
47
|
+
export function buildTarballStream(repoRoot, opts) {
|
|
48
|
+
const pack = tar.pack();
|
|
49
|
+
const files = listFiles(repoRoot, opts.includeDirty);
|
|
50
|
+
void (async () => {
|
|
51
|
+
for (const rel of files) {
|
|
52
|
+
try {
|
|
53
|
+
const abs = path.join(repoRoot, rel);
|
|
54
|
+
const stat = await fsp.stat(abs);
|
|
55
|
+
if (!stat.isFile())
|
|
56
|
+
continue;
|
|
57
|
+
const content = await fsp.readFile(abs);
|
|
58
|
+
pack.entry({ name: rel, size: content.length, mode: stat.mode & 0o777 }, content);
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
// Skip unreadable files silently — git may have listed files that
|
|
62
|
+
// vanished or lack read permission. A missing file is not a sync failure.
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
pack.finalize();
|
|
66
|
+
})().catch((err) => {
|
|
67
|
+
try {
|
|
68
|
+
pack.destroy(err);
|
|
69
|
+
}
|
|
70
|
+
catch {
|
|
71
|
+
/* ignore */
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
return pack;
|
|
75
|
+
}
|
|
@@ -0,0 +1,415 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* GitNexus Enterprise Hook
|
|
4
|
+
*
|
|
5
|
+
* PreToolUse - POST /api/repos/:id/augment, prepend text to tool result
|
|
6
|
+
* PostToolUse - after git mutations, compare local HEAD vs hub last_commit,
|
|
7
|
+
* emit staleness hint to nudge the agent toward `gnx sync`
|
|
8
|
+
*
|
|
9
|
+
* Resolves cwd -> hub_repo_id via ~/.gitnexus/connect-registry.json.
|
|
10
|
+
* Reads auth from ~/.gitnexus/config.json (written by `gnx connect`).
|
|
11
|
+
*
|
|
12
|
+
* Hard 1500ms timeout on all HTTP calls. Silent failure on error so the
|
|
13
|
+
* original tool runs unchanged. GITNEXUS_NO_AUGMENT=1 disables entirely.
|
|
14
|
+
*
|
|
15
|
+
* CJS, runs as `node gitnexus-enterprise-hook.cjs` without a build step.
|
|
16
|
+
* Dependencies: Node stdlib only (fs, path, os, http, https, child_process,
|
|
17
|
+
* url).
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
const fs = require('fs');
|
|
21
|
+
const path = require('path');
|
|
22
|
+
const os = require('os');
|
|
23
|
+
const http = require('http');
|
|
24
|
+
const https = require('https');
|
|
25
|
+
const { spawnSync } = require('child_process');
|
|
26
|
+
const { URL } = require('url');
|
|
27
|
+
|
|
28
|
+
const HOOK_TIMEOUT_MS = 1500;
|
|
29
|
+
const CONFIG_PATH = path.join(os.homedir(), '.gitnexus', 'config.json');
|
|
30
|
+
const REGISTRY_PATH = path.join(os.homedir(), '.gitnexus', 'connect-registry.json');
|
|
31
|
+
const META_CACHE_DIR = path.join(os.homedir(), '.gitnexus', 'meta-cache');
|
|
32
|
+
const META_CACHE_TTL_MS = 30_000;
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Read the hook event JSON from stdin (file descriptor 0). Claude Code /
|
|
36
|
+
* Cursor / OpenCode all send the event payload as a single blob on stdin.
|
|
37
|
+
* Any parse failure returns an empty object so downstream code can fall
|
|
38
|
+
* through to the silent-failure path.
|
|
39
|
+
*/
|
|
40
|
+
function readInput() {
|
|
41
|
+
try {
|
|
42
|
+
return JSON.parse(fs.readFileSync(0, 'utf-8'));
|
|
43
|
+
} catch {
|
|
44
|
+
return {};
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function readConfig() {
|
|
49
|
+
try {
|
|
50
|
+
return JSON.parse(fs.readFileSync(CONFIG_PATH, 'utf-8'));
|
|
51
|
+
} catch {
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function readRegistry() {
|
|
57
|
+
try {
|
|
58
|
+
const parsed = JSON.parse(fs.readFileSync(REGISTRY_PATH, 'utf-8'));
|
|
59
|
+
return Array.isArray(parsed.entries) ? parsed.entries : [];
|
|
60
|
+
} catch {
|
|
61
|
+
return [];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Resolve the editor's current working directory to the best-matching
|
|
67
|
+
* registry entry.
|
|
68
|
+
*
|
|
69
|
+
* Mirrors the semantics of gitnexus-connect/src/registry.ts
|
|
70
|
+
* resolveCwdToRepo (longest-path match + symlink normalization +
|
|
71
|
+
* path-separator boundary) but reimplemented here with sync fs APIs so
|
|
72
|
+
* the hook has no runtime dependency on the compiled connect bundle.
|
|
73
|
+
* Windows paths are lowercased before comparison so D:\foo and d:\foo
|
|
74
|
+
* match.
|
|
75
|
+
*/
|
|
76
|
+
function resolveCwdToRepo(cwd, entries) {
|
|
77
|
+
if (!entries.length) return null;
|
|
78
|
+
|
|
79
|
+
let resolved;
|
|
80
|
+
try {
|
|
81
|
+
resolved = fs.realpathSync(path.resolve(cwd));
|
|
82
|
+
} catch {
|
|
83
|
+
resolved = path.resolve(cwd);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const isWin = process.platform === 'win32';
|
|
87
|
+
const norm = isWin ? resolved.toLowerCase() : resolved;
|
|
88
|
+
const sep = path.sep;
|
|
89
|
+
|
|
90
|
+
let best = null;
|
|
91
|
+
let bestLen = 0;
|
|
92
|
+
|
|
93
|
+
for (const entry of entries) {
|
|
94
|
+
if (!entry || !entry.localPath || !entry.hubRepoId) continue;
|
|
95
|
+
let ep;
|
|
96
|
+
try {
|
|
97
|
+
ep = fs.realpathSync(path.resolve(entry.localPath));
|
|
98
|
+
} catch {
|
|
99
|
+
ep = path.resolve(entry.localPath);
|
|
100
|
+
}
|
|
101
|
+
const nep = isWin ? ep.toLowerCase() : ep;
|
|
102
|
+
const matched = norm === nep || norm.startsWith(nep + sep);
|
|
103
|
+
if (matched && nep.length > bestLen) {
|
|
104
|
+
best = entry;
|
|
105
|
+
bestLen = nep.length;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return best;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Extract a usable search pattern from the tool input, or null if the
|
|
114
|
+
* tool is not one we augment. Mirrors the OSS engine's best-effort
|
|
115
|
+
* extraction:
|
|
116
|
+
* - Grep: direct pattern field
|
|
117
|
+
* - Glob: first word-like token after a slash or star
|
|
118
|
+
* - Bash: rg/grep command — skip flags and flag values, first bare
|
|
119
|
+
* token wins
|
|
120
|
+
*/
|
|
121
|
+
function extractPattern(toolName, toolInput) {
|
|
122
|
+
if (toolName === 'Grep') {
|
|
123
|
+
return (toolInput && toolInput.pattern) || null;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
if (toolName === 'Glob') {
|
|
127
|
+
const raw = (toolInput && toolInput.pattern) || '';
|
|
128
|
+
const m = raw.match(/[*\/]([a-zA-Z][a-zA-Z0-9_-]{2,})/);
|
|
129
|
+
return m ? m[1] : null;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (toolName === 'Bash') {
|
|
133
|
+
const cmd = (toolInput && toolInput.command) || '';
|
|
134
|
+
if (!/\brg\b|\bgrep\b/.test(cmd)) return null;
|
|
135
|
+
|
|
136
|
+
const tokens = cmd.split(/\s+/);
|
|
137
|
+
let foundCmd = false;
|
|
138
|
+
let skipNext = false;
|
|
139
|
+
const flagsWithValues = new Set([
|
|
140
|
+
'-e',
|
|
141
|
+
'-f',
|
|
142
|
+
'-m',
|
|
143
|
+
'-A',
|
|
144
|
+
'-B',
|
|
145
|
+
'-C',
|
|
146
|
+
'-g',
|
|
147
|
+
'--glob',
|
|
148
|
+
'-t',
|
|
149
|
+
'--type',
|
|
150
|
+
'--include',
|
|
151
|
+
'--exclude',
|
|
152
|
+
]);
|
|
153
|
+
|
|
154
|
+
for (const token of tokens) {
|
|
155
|
+
if (skipNext) {
|
|
156
|
+
skipNext = false;
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
if (!foundCmd) {
|
|
160
|
+
if (/\brg$|\bgrep$/.test(token)) foundCmd = true;
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
if (token.startsWith('-')) {
|
|
164
|
+
if (flagsWithValues.has(token)) skipNext = true;
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
const cleaned = token.replace(/['"]/g, '');
|
|
168
|
+
return cleaned.length >= 3 ? cleaned : null;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
return null;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* POST JSON with a hard 1500ms timeout. Resolves to
|
|
177
|
+
* {status: number, body: any} on success
|
|
178
|
+
* null on any error / timeout / non-JSON
|
|
179
|
+
*
|
|
180
|
+
* Never throws so callers can use the short-circuit pattern
|
|
181
|
+
* const res = await httpPostJson(...); if (!res) return;
|
|
182
|
+
* and rely on silent-failure semantics.
|
|
183
|
+
*/
|
|
184
|
+
function httpPostJson(urlStr, headers, body) {
|
|
185
|
+
return new Promise((resolve) => {
|
|
186
|
+
try {
|
|
187
|
+
const u = new URL(urlStr);
|
|
188
|
+
const mod = u.protocol === 'https:' ? https : http;
|
|
189
|
+
const payload = JSON.stringify(body);
|
|
190
|
+
const req = mod.request(
|
|
191
|
+
{
|
|
192
|
+
method: 'POST',
|
|
193
|
+
hostname: u.hostname,
|
|
194
|
+
port: u.port || (u.protocol === 'https:' ? 443 : 80),
|
|
195
|
+
path: u.pathname + u.search,
|
|
196
|
+
headers: Object.assign(
|
|
197
|
+
{
|
|
198
|
+
'Content-Type': 'application/json',
|
|
199
|
+
'Content-Length': Buffer.byteLength(payload),
|
|
200
|
+
},
|
|
201
|
+
headers || {},
|
|
202
|
+
),
|
|
203
|
+
timeout: HOOK_TIMEOUT_MS,
|
|
204
|
+
},
|
|
205
|
+
(res) => {
|
|
206
|
+
let data = '';
|
|
207
|
+
res.on('data', (c) => (data += c));
|
|
208
|
+
res.on('end', () => {
|
|
209
|
+
try {
|
|
210
|
+
resolve({ status: res.statusCode, body: JSON.parse(data) });
|
|
211
|
+
} catch {
|
|
212
|
+
resolve({ status: res.statusCode, body: null });
|
|
213
|
+
}
|
|
214
|
+
});
|
|
215
|
+
},
|
|
216
|
+
);
|
|
217
|
+
req.on('error', () => resolve(null));
|
|
218
|
+
req.on('timeout', () => {
|
|
219
|
+
try {
|
|
220
|
+
req.destroy();
|
|
221
|
+
} catch {
|
|
222
|
+
/* ignore */
|
|
223
|
+
}
|
|
224
|
+
resolve(null);
|
|
225
|
+
});
|
|
226
|
+
req.write(payload);
|
|
227
|
+
req.end();
|
|
228
|
+
} catch {
|
|
229
|
+
resolve(null);
|
|
230
|
+
}
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* GET JSON with a hard 1500ms timeout. Same contract as httpPostJson.
|
|
236
|
+
*/
|
|
237
|
+
function httpGetJson(urlStr, headers) {
|
|
238
|
+
return new Promise((resolve) => {
|
|
239
|
+
try {
|
|
240
|
+
const u = new URL(urlStr);
|
|
241
|
+
const mod = u.protocol === 'https:' ? https : http;
|
|
242
|
+
const req = mod.request(
|
|
243
|
+
{
|
|
244
|
+
method: 'GET',
|
|
245
|
+
hostname: u.hostname,
|
|
246
|
+
port: u.port || (u.protocol === 'https:' ? 443 : 80),
|
|
247
|
+
path: u.pathname + u.search,
|
|
248
|
+
headers: headers || {},
|
|
249
|
+
timeout: HOOK_TIMEOUT_MS,
|
|
250
|
+
},
|
|
251
|
+
(res) => {
|
|
252
|
+
let data = '';
|
|
253
|
+
res.on('data', (c) => (data += c));
|
|
254
|
+
res.on('end', () => {
|
|
255
|
+
try {
|
|
256
|
+
resolve({ status: res.statusCode, body: JSON.parse(data) });
|
|
257
|
+
} catch {
|
|
258
|
+
resolve(null);
|
|
259
|
+
}
|
|
260
|
+
});
|
|
261
|
+
},
|
|
262
|
+
);
|
|
263
|
+
req.on('error', () => resolve(null));
|
|
264
|
+
req.on('timeout', () => {
|
|
265
|
+
try {
|
|
266
|
+
req.destroy();
|
|
267
|
+
} catch {
|
|
268
|
+
/* ignore */
|
|
269
|
+
}
|
|
270
|
+
resolve(null);
|
|
271
|
+
});
|
|
272
|
+
req.end();
|
|
273
|
+
} catch {
|
|
274
|
+
resolve(null);
|
|
275
|
+
}
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* Emit the Claude Code / Cursor / OpenCode hookSpecificOutput envelope.
|
|
281
|
+
* The editor will prepend `message` to the tool's output before handing
|
|
282
|
+
* control back to the model.
|
|
283
|
+
*/
|
|
284
|
+
function sendResponse(event, message) {
|
|
285
|
+
process.stdout.write(
|
|
286
|
+
JSON.stringify({
|
|
287
|
+
hookSpecificOutput: { hookEventName: event, additionalContext: message },
|
|
288
|
+
}) + '\n',
|
|
289
|
+
);
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
async function handlePreToolUse(input, config, entry) {
|
|
293
|
+
const toolName = input.tool_name || '';
|
|
294
|
+
if (toolName !== 'Grep' && toolName !== 'Glob' && toolName !== 'Bash') return;
|
|
295
|
+
|
|
296
|
+
const pattern = extractPattern(toolName, input.tool_input || {});
|
|
297
|
+
if (!pattern || pattern.length < 3) return;
|
|
298
|
+
|
|
299
|
+
const res = await httpPostJson(
|
|
300
|
+
`${config.hubUrl}/api/repos/${entry.hubRepoId}/augment`,
|
|
301
|
+
{ Authorization: `Bearer ${config.hubToken}` },
|
|
302
|
+
{ pattern },
|
|
303
|
+
);
|
|
304
|
+
if (!res || res.status !== 200 || !res.body || !res.body.text) return;
|
|
305
|
+
|
|
306
|
+
const text = String(res.body.text).trim();
|
|
307
|
+
if (!text) return;
|
|
308
|
+
sendResponse('PreToolUse', text);
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
/**
|
|
312
|
+
* Read the meta cache for a given repo, returning null if absent or
|
|
313
|
+
* older than META_CACHE_TTL_MS. The cache keeps the hook from hammering
|
|
314
|
+
* /meta on every git commit in a batch — editors can fire multiple
|
|
315
|
+
* PostToolUse events in quick succession.
|
|
316
|
+
*/
|
|
317
|
+
function readMetaCache(repoId) {
|
|
318
|
+
try {
|
|
319
|
+
const p = path.join(META_CACHE_DIR, `${repoId}.json`);
|
|
320
|
+
const stat = fs.statSync(p);
|
|
321
|
+
if (Date.now() - stat.mtimeMs > META_CACHE_TTL_MS) return null;
|
|
322
|
+
return JSON.parse(fs.readFileSync(p, 'utf-8'));
|
|
323
|
+
} catch {
|
|
324
|
+
return null;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
function writeMetaCache(repoId, meta) {
|
|
329
|
+
try {
|
|
330
|
+
fs.mkdirSync(META_CACHE_DIR, { recursive: true });
|
|
331
|
+
fs.writeFileSync(path.join(META_CACHE_DIR, `${repoId}.json`), JSON.stringify(meta));
|
|
332
|
+
} catch {
|
|
333
|
+
/* ignore — cache is best-effort */
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
async function handlePostToolUse(input, config, entry) {
|
|
338
|
+
if (input.tool_name !== 'Bash') return;
|
|
339
|
+
const cmd = (input.tool_input && input.tool_input.command) || '';
|
|
340
|
+
if (!/\bgit\s+(commit|merge|rebase|cherry-pick|pull)(\s|$)/.test(cmd)) return;
|
|
341
|
+
|
|
342
|
+
// Only nudge when the git command actually succeeded. Some editors
|
|
343
|
+
// omit exit_code; treat missing as success so we don't silently
|
|
344
|
+
// skip legitimate mutations.
|
|
345
|
+
const exitCode = input.tool_output && input.tool_output.exit_code;
|
|
346
|
+
if (exitCode !== undefined && exitCode !== 0) return;
|
|
347
|
+
|
|
348
|
+
let localHead = '';
|
|
349
|
+
try {
|
|
350
|
+
const r = spawnSync('git', ['rev-parse', 'HEAD'], {
|
|
351
|
+
cwd: input.cwd || process.cwd(),
|
|
352
|
+
encoding: 'utf-8',
|
|
353
|
+
timeout: 2000,
|
|
354
|
+
});
|
|
355
|
+
localHead = (r.stdout || '').trim();
|
|
356
|
+
} catch {
|
|
357
|
+
return;
|
|
358
|
+
}
|
|
359
|
+
if (!localHead) return;
|
|
360
|
+
|
|
361
|
+
let meta = readMetaCache(entry.hubRepoId);
|
|
362
|
+
if (!meta) {
|
|
363
|
+
const res = await httpGetJson(`${config.hubUrl}/api/repos/${entry.hubRepoId}/meta`, {
|
|
364
|
+
Authorization: `Bearer ${config.hubToken}`,
|
|
365
|
+
});
|
|
366
|
+
if (!res || res.status !== 200 || !res.body) return;
|
|
367
|
+
meta = res.body;
|
|
368
|
+
writeMetaCache(entry.hubRepoId, meta);
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
if (meta.last_commit === localHead) return;
|
|
372
|
+
const shortOld = meta.last_commit ? String(meta.last_commit).slice(0, 7) : 'none';
|
|
373
|
+
sendResponse(
|
|
374
|
+
'PostToolUse',
|
|
375
|
+
`GitNexus index is stale (last indexed: ${shortOld}). Run \`gnx sync\` to update.`,
|
|
376
|
+
);
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
async function main() {
|
|
380
|
+
if (process.env.GITNEXUS_NO_AUGMENT === '1') return;
|
|
381
|
+
|
|
382
|
+
const input = readInput();
|
|
383
|
+
const event = input.hook_event_name;
|
|
384
|
+
if (event !== 'PreToolUse' && event !== 'PostToolUse') return;
|
|
385
|
+
|
|
386
|
+
const config = readConfig();
|
|
387
|
+
if (!config || !config.hubToken || !config.hubUrl) return;
|
|
388
|
+
|
|
389
|
+
const entries = readRegistry();
|
|
390
|
+
const cwd = input.cwd || process.cwd();
|
|
391
|
+
const entry = resolveCwdToRepo(cwd, entries);
|
|
392
|
+
if (!entry) return;
|
|
393
|
+
|
|
394
|
+
try {
|
|
395
|
+
if (event === 'PreToolUse') {
|
|
396
|
+
await handlePreToolUse(input, config, entry);
|
|
397
|
+
} else {
|
|
398
|
+
await handlePostToolUse(input, config, entry);
|
|
399
|
+
}
|
|
400
|
+
} catch (err) {
|
|
401
|
+
if (process.env.GITNEXUS_DEBUG) {
|
|
402
|
+
process.stderr.write(
|
|
403
|
+
`gitnexus hook error: ${(err && err.message ? err.message : String(err)).slice(0, 300)}\n`,
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
main().catch((err) => {
|
|
410
|
+
if (process.env.GITNEXUS_DEBUG) {
|
|
411
|
+
process.stderr.write(
|
|
412
|
+
`gitnexus hook fatal: ${(err && err.message ? err.message : String(err)).slice(0, 300)}\n`,
|
|
413
|
+
);
|
|
414
|
+
}
|
|
415
|
+
});
|