claude-attribution 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +431 -0
- package/bin/claude-attribution +9 -0
- package/package.json +26 -0
- package/src/__tests__/differ.test.ts +250 -0
- package/src/attribution/checkpoint.ts +148 -0
- package/src/attribution/commit.ts +163 -0
- package/src/attribution/differ.ts +154 -0
- package/src/attribution/git-notes.ts +185 -0
- package/src/attribution/otel.ts +233 -0
- package/src/cli.ts +109 -0
- package/src/commands/pr.ts +164 -0
- package/src/export/pr-summary.ts +204 -0
- package/src/hooks/post-tool-use.ts +105 -0
- package/src/hooks/pre-tool-use.ts +95 -0
- package/src/hooks/stop.ts +33 -0
- package/src/hooks/subagent.ts +72 -0
- package/src/lib/hooks.ts +60 -0
- package/src/metrics/calculate.ts +21 -0
- package/src/metrics/collect.ts +369 -0
- package/src/metrics/mark-start.ts +40 -0
- package/src/metrics/transcript.ts +245 -0
- package/src/run.sh +25 -0
- package/src/setup/install.ts +321 -0
- package/src/setup/templates/hooks.json +57 -0
- package/src/setup/templates/metrics-command.md +27 -0
- package/src/setup/templates/post-commit.sh +4 -0
- package/src/setup/templates/pr-command.md +33 -0
- package/src/setup/templates/pre-push.sh +4 -0
- package/src/setup/templates/start-command.md +25 -0
- package/src/setup/uninstall.ts +175 -0
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* claude-attribution pr — create a PR with AI metrics embedded automatically.
|
|
3
|
+
*
|
|
4
|
+
* Usage: claude-attribution pr [title] [--draft] [--base <branch>]
|
|
5
|
+
*
|
|
6
|
+
* If title is omitted, it is derived from the current branch name.
|
|
7
|
+
* Reads .github/PULL_REQUEST_TEMPLATE.md if present; otherwise uses a
|
|
8
|
+
* built-in minimal template. Metrics are injected at the
|
|
9
|
+
* <!-- claude-attribution metrics --> placeholder, or appended if missing.
|
|
10
|
+
*
|
|
11
|
+
* Requires `gh` (GitHub CLI) to be installed and authenticated.
|
|
12
|
+
*/
|
|
13
|
+
import { readFile, writeFile, unlink, mkdtemp } from "fs/promises";
|
|
14
|
+
import { existsSync } from "fs";
|
|
15
|
+
import { resolve, join } from "path";
|
|
16
|
+
import { execFile } from "child_process";
|
|
17
|
+
import { promisify } from "util";
|
|
18
|
+
import { tmpdir } from "os";
|
|
19
|
+
import { collectMetrics, renderMetrics } from "../metrics/collect.ts";
|
|
20
|
+
|
|
21
|
+
const execFileAsync = promisify(execFile);
|
|
22
|
+
|
|
23
|
+
const METRICS_PLACEHOLDER = "<!-- claude-attribution metrics -->";
|
|
24
|
+
|
|
25
|
+
const BUILTIN_TEMPLATE = `## Description
|
|
26
|
+
|
|
27
|
+
<!-- What does this PR do? -->
|
|
28
|
+
|
|
29
|
+
## Testing
|
|
30
|
+
|
|
31
|
+
<!-- How has this been tested? -->
|
|
32
|
+
|
|
33
|
+
## Checklist
|
|
34
|
+
|
|
35
|
+
- [ ] Code follows project coding standards
|
|
36
|
+
- [ ] Tests pass
|
|
37
|
+
|
|
38
|
+
${METRICS_PLACEHOLDER}
|
|
39
|
+
`;
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Derive a PR title from the current branch name.
|
|
43
|
+
* "COMM-1234/add-user-auth" → "add user auth"
|
|
44
|
+
* "feature/my-great-feature" → "my great feature"
|
|
45
|
+
* "fix-null-pointer" → "fix null pointer"
|
|
46
|
+
*/
|
|
47
|
+
function titleFromBranch(branch: string): string {
|
|
48
|
+
// Strip leading ticket prefix (e.g. COMM-1234/, JIRA-99/)
|
|
49
|
+
const withoutTicket = branch.replace(/^[A-Z]+-\d+\//, "");
|
|
50
|
+
// Strip common prefixes like feature/, fix/, chore/
|
|
51
|
+
const withoutPrefix = withoutTicket.replace(
|
|
52
|
+
/^(feature|fix|chore|feat|refactor|docs|test)\//,
|
|
53
|
+
"",
|
|
54
|
+
);
|
|
55
|
+
// Replace hyphens and underscores with spaces
|
|
56
|
+
return withoutPrefix.replace(/[-_]/g, " ").trim();
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async function getCurrentBranch(repoRoot: string): Promise<string> {
|
|
60
|
+
const { stdout } = await execFileAsync(
|
|
61
|
+
"git",
|
|
62
|
+
["rev-parse", "--abbrev-ref", "HEAD"],
|
|
63
|
+
{ cwd: repoRoot },
|
|
64
|
+
);
|
|
65
|
+
return stdout.trim();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async function checkGhInstalled(): Promise<boolean> {
|
|
69
|
+
try {
|
|
70
|
+
await execFileAsync("gh", ["--version"]);
|
|
71
|
+
return true;
|
|
72
|
+
} catch {
|
|
73
|
+
return false;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async function main() {
|
|
78
|
+
const repoRoot = resolve(process.cwd());
|
|
79
|
+
|
|
80
|
+
// Parse args: [title] [--draft] [--base <branch>]
|
|
81
|
+
const args = process.argv.slice(2);
|
|
82
|
+
let title: string | undefined;
|
|
83
|
+
let draft = false;
|
|
84
|
+
let base: string | undefined;
|
|
85
|
+
|
|
86
|
+
for (let i = 0; i < args.length; i++) {
|
|
87
|
+
if (args[i] === "--draft") {
|
|
88
|
+
draft = true;
|
|
89
|
+
} else if (args[i] === "--base" && i + 1 < args.length) {
|
|
90
|
+
base = args[++i];
|
|
91
|
+
} else if (!args[i].startsWith("--")) {
|
|
92
|
+
title = args[i];
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Derive title from branch name if not provided
|
|
97
|
+
if (!title) {
|
|
98
|
+
const branch = await getCurrentBranch(repoRoot);
|
|
99
|
+
title = titleFromBranch(branch);
|
|
100
|
+
if (!title) {
|
|
101
|
+
console.error("Error: Could not derive PR title from branch name.");
|
|
102
|
+
console.error(
|
|
103
|
+
'Usage: claude-attribution pr "feat: my title" [--draft] [--base <branch>]',
|
|
104
|
+
);
|
|
105
|
+
process.exit(1);
|
|
106
|
+
}
|
|
107
|
+
console.log(`Using title derived from branch: "${title}"`);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Check gh is available
|
|
111
|
+
if (!(await checkGhInstalled())) {
|
|
112
|
+
console.error("Error: GitHub CLI (gh) is not installed or not on PATH.");
|
|
113
|
+
console.error("Install it from: https://cli.github.com");
|
|
114
|
+
process.exit(1);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Collect metrics
|
|
118
|
+
console.log("Collecting metrics...");
|
|
119
|
+
const metricsData = await collectMetrics(undefined, repoRoot);
|
|
120
|
+
const metricsBlock = renderMetrics(metricsData);
|
|
121
|
+
|
|
122
|
+
// Load PR template
|
|
123
|
+
const templatePath = join(repoRoot, ".github", "PULL_REQUEST_TEMPLATE.md");
|
|
124
|
+
let template = existsSync(templatePath)
|
|
125
|
+
? await readFile(templatePath, "utf8")
|
|
126
|
+
: BUILTIN_TEMPLATE;
|
|
127
|
+
|
|
128
|
+
// Inject metrics at placeholder or append
|
|
129
|
+
let body: string;
|
|
130
|
+
if (template.includes(METRICS_PLACEHOLDER)) {
|
|
131
|
+
body = template.replace(METRICS_PLACEHOLDER, metricsBlock);
|
|
132
|
+
} else {
|
|
133
|
+
body = template.trimEnd() + "\n\n" + metricsBlock;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Write body to a temp file (avoids shell quoting issues)
|
|
137
|
+
const tmpDir = await mkdtemp(join(tmpdir(), "claude-attribution-pr-"));
|
|
138
|
+
const bodyFile = join(tmpDir, "pr-body.md");
|
|
139
|
+
try {
|
|
140
|
+
await writeFile(bodyFile, body);
|
|
141
|
+
|
|
142
|
+
// Build gh pr create args
|
|
143
|
+
const ghArgs = ["pr", "create", "--title", title, "--body-file", bodyFile];
|
|
144
|
+
if (draft) ghArgs.push("--draft");
|
|
145
|
+
if (base) ghArgs.push("--base", base);
|
|
146
|
+
|
|
147
|
+
console.log("Creating PR...");
|
|
148
|
+
const { stdout } = await execFileAsync("gh", ghArgs, { cwd: repoRoot });
|
|
149
|
+
const prUrl = stdout.trim();
|
|
150
|
+
console.log(`\n✓ PR created: ${prUrl}`);
|
|
151
|
+
} catch (err) {
|
|
152
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
153
|
+
console.error(`Error creating PR: ${msg}`);
|
|
154
|
+
process.exit(1);
|
|
155
|
+
} finally {
|
|
156
|
+
await unlink(bodyFile).catch(() => {});
|
|
157
|
+
await execFileAsync("rmdir", [tmpDir]).catch(() => {});
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
main().catch((err) => {
|
|
162
|
+
console.error("Error:", err);
|
|
163
|
+
process.exit(1);
|
|
164
|
+
});
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PR-level attribution aggregator + Datadog push.
|
|
3
|
+
*
|
|
4
|
+
* Run from GitHub Actions on PR merge. Reads attribution git notes for the
|
|
5
|
+
* PR's commits, aggregates to a PR-level payload, and pushes metrics + an
|
|
6
|
+
* event to Datadog using the v2 REST API.
|
|
7
|
+
*
|
|
8
|
+
* Required env (supplied by GHA workflow):
|
|
9
|
+
* PR_NUMBER — PR number (e.g. "42")
|
|
10
|
+
* GITHUB_REPOSITORY — "owner/repo"
|
|
11
|
+
* PR_AUTHOR — GitHub login of PR author
|
|
12
|
+
* DATADOG_API_KEY — Datadog API key (org-level GHA secret)
|
|
13
|
+
*
|
|
14
|
+
* Optional env:
|
|
15
|
+
* DATADOG_SITE — Datadog intake site (default: "datadoghq.com")
|
|
16
|
+
* GITHUB_BASE_REF — base branch name for tagging (default: "main")
|
|
17
|
+
*
|
|
18
|
+
* If DATADOG_API_KEY is unset the script runs in dry-run mode: it prints
|
|
19
|
+
* the payload as JSON to stdout and exits 0. This lets you test the script
|
|
20
|
+
* locally without a key.
|
|
21
|
+
*/
|
|
22
|
+
import { resolve } from "path";
|
|
23
|
+
import {
|
|
24
|
+
listNotes,
|
|
25
|
+
readNote,
|
|
26
|
+
getBranchCommitShas,
|
|
27
|
+
} from "../attribution/git-notes.ts";
|
|
28
|
+
import {
|
|
29
|
+
aggregateTotals,
|
|
30
|
+
type AttributionResult,
|
|
31
|
+
type FileAttribution,
|
|
32
|
+
} from "../attribution/differ.ts";
|
|
33
|
+
|
|
34
|
+
interface MetricPoint {
|
|
35
|
+
timestamp: number;
|
|
36
|
+
value: number;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
interface MetricSeries {
|
|
40
|
+
metric: string;
|
|
41
|
+
/** 3 = gauge */
|
|
42
|
+
type: 3;
|
|
43
|
+
points: MetricPoint[];
|
|
44
|
+
tags: string[];
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
interface DatadogEventPayload {
|
|
48
|
+
title: string;
|
|
49
|
+
text: string;
|
|
50
|
+
tags: string[];
|
|
51
|
+
alert_type: "info";
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async function pushMetrics(
|
|
55
|
+
series: MetricSeries[],
|
|
56
|
+
apiKey: string,
|
|
57
|
+
site: string,
|
|
58
|
+
): Promise<void> {
|
|
59
|
+
const res = await fetch(`https://api.${site}/api/v2/series`, {
|
|
60
|
+
method: "POST",
|
|
61
|
+
headers: { "Content-Type": "application/json", "DD-API-KEY": apiKey },
|
|
62
|
+
body: JSON.stringify({ series }),
|
|
63
|
+
});
|
|
64
|
+
if (!res.ok) {
|
|
65
|
+
throw new Error(`Datadog metrics API ${res.status}: ${await res.text()}`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async function pushEvent(
|
|
70
|
+
payload: DatadogEventPayload,
|
|
71
|
+
apiKey: string,
|
|
72
|
+
site: string,
|
|
73
|
+
): Promise<void> {
|
|
74
|
+
const res = await fetch(`https://api.${site}/api/v1/events`, {
|
|
75
|
+
method: "POST",
|
|
76
|
+
headers: { "Content-Type": "application/json", "DD-API-KEY": apiKey },
|
|
77
|
+
body: JSON.stringify(payload),
|
|
78
|
+
});
|
|
79
|
+
if (!res.ok) {
|
|
80
|
+
throw new Error(`Datadog events API ${res.status}: ${await res.text()}`);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async function main() {
|
|
85
|
+
const repoRoot = resolve(process.cwd());
|
|
86
|
+
|
|
87
|
+
const prNumber = process.env.PR_NUMBER ?? "";
|
|
88
|
+
const repo = process.env.GITHUB_REPOSITORY ?? "";
|
|
89
|
+
const author = process.env.PR_AUTHOR ?? "";
|
|
90
|
+
const baseRef = process.env.GITHUB_BASE_REF ?? "main";
|
|
91
|
+
const apiKey = process.env.DATADOG_API_KEY ?? "";
|
|
92
|
+
const site = process.env.DATADOG_SITE ?? "datadoghq.com";
|
|
93
|
+
const dryRun = !apiKey;
|
|
94
|
+
|
|
95
|
+
// Gather commits on this branch that have attribution notes
|
|
96
|
+
const [allShas, branchShas] = await Promise.all([
|
|
97
|
+
listNotes(repoRoot),
|
|
98
|
+
getBranchCommitShas(repoRoot),
|
|
99
|
+
]);
|
|
100
|
+
const branchSet = new Set(branchShas);
|
|
101
|
+
const shasToRead =
|
|
102
|
+
branchShas.length > 0
|
|
103
|
+
? allShas.filter((sha) => branchSet.has(sha))
|
|
104
|
+
: allShas;
|
|
105
|
+
|
|
106
|
+
// Read notes with concurrency limit to avoid process storms on large PRs
|
|
107
|
+
const CONCURRENCY = 8;
|
|
108
|
+
const notes: Awaited<ReturnType<typeof readNote>>[] = [];
|
|
109
|
+
for (let i = 0; i < shasToRead.length; i += CONCURRENCY) {
|
|
110
|
+
const batch = shasToRead.slice(i, i + CONCURRENCY);
|
|
111
|
+
notes.push(
|
|
112
|
+
...(await Promise.all(batch.map((sha) => readNote(repoRoot, sha)))),
|
|
113
|
+
);
|
|
114
|
+
}
|
|
115
|
+
const results = notes.filter((n): n is AttributionResult => n !== null);
|
|
116
|
+
|
|
117
|
+
if (results.length === 0) {
|
|
118
|
+
console.log(
|
|
119
|
+
"[pr-summary] No attribution notes found for this PR — skipping Datadog push.",
|
|
120
|
+
);
|
|
121
|
+
process.exit(0);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Last-wins per file: sort ascending so later commits overwrite earlier ones.
|
|
125
|
+
// Prevents line-count inflation when the same file appears in multiple commits.
|
|
126
|
+
const sorted = [...results].sort(
|
|
127
|
+
(a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime(),
|
|
128
|
+
);
|
|
129
|
+
const lastSeenByFile = new Map<string, FileAttribution>();
|
|
130
|
+
for (const result of sorted) {
|
|
131
|
+
for (const file of result.files) {
|
|
132
|
+
lastSeenByFile.set(file.path, file);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
const { ai, human, total, pctAi } = aggregateTotals([
|
|
136
|
+
...lastSeenByFile.values(),
|
|
137
|
+
]);
|
|
138
|
+
|
|
139
|
+
// Use branch from the most recent note; fall back to env var
|
|
140
|
+
const branch = results[results.length - 1]?.branch ?? baseRef;
|
|
141
|
+
|
|
142
|
+
const tags = [
|
|
143
|
+
`repo:${repo}`,
|
|
144
|
+
`pr:${prNumber}`,
|
|
145
|
+
`branch:${branch}`,
|
|
146
|
+
`author:${author}`,
|
|
147
|
+
`tool:claude-code`,
|
|
148
|
+
];
|
|
149
|
+
const timestamp = Math.floor(Date.now() / 1000);
|
|
150
|
+
|
|
151
|
+
const series: MetricSeries[] = [
|
|
152
|
+
{
|
|
153
|
+
metric: "claude_attribution.ai_lines",
|
|
154
|
+
type: 3,
|
|
155
|
+
points: [{ timestamp, value: ai }],
|
|
156
|
+
tags,
|
|
157
|
+
},
|
|
158
|
+
{
|
|
159
|
+
metric: "claude_attribution.human_lines",
|
|
160
|
+
type: 3,
|
|
161
|
+
points: [{ timestamp, value: human }],
|
|
162
|
+
tags,
|
|
163
|
+
},
|
|
164
|
+
{
|
|
165
|
+
metric: "claude_attribution.total_lines",
|
|
166
|
+
type: 3,
|
|
167
|
+
points: [{ timestamp, value: total }],
|
|
168
|
+
tags,
|
|
169
|
+
},
|
|
170
|
+
{
|
|
171
|
+
metric: "claude_attribution.pct_ai",
|
|
172
|
+
type: 3,
|
|
173
|
+
points: [{ timestamp, value: pctAi }],
|
|
174
|
+
tags,
|
|
175
|
+
},
|
|
176
|
+
];
|
|
177
|
+
|
|
178
|
+
const event: DatadogEventPayload = {
|
|
179
|
+
title: `PR #${prNumber} merged — ${pctAi}% AI (claude-code)`,
|
|
180
|
+
text: `repo: ${repo}\nbranch: ${branch}\nai: ${ai} / human: ${human} / total: ${total}`,
|
|
181
|
+
tags,
|
|
182
|
+
alert_type: "info",
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
if (dryRun) {
|
|
186
|
+
console.log("[pr-summary] DRY RUN — DATADOG_API_KEY not set. Would push:");
|
|
187
|
+
console.log(JSON.stringify({ series, event }, null, 2));
|
|
188
|
+
process.exit(0);
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
await Promise.all([
|
|
192
|
+
pushMetrics(series, apiKey, site),
|
|
193
|
+
pushEvent(event, apiKey, site),
|
|
194
|
+
]);
|
|
195
|
+
|
|
196
|
+
console.log(
|
|
197
|
+
`[pr-summary] Pushed to Datadog: PR #${prNumber} — ${pctAi}% AI, ${ai}/${total} lines`,
|
|
198
|
+
);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
main().catch((err) => {
|
|
202
|
+
console.error("[pr-summary] Error:", err);
|
|
203
|
+
process.exit(1);
|
|
204
|
+
});
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostToolUse hook — snapshot file AFTER Claude edits + log all tool calls.
|
|
3
|
+
*
|
|
4
|
+
* Registered in .claude/settings.json for matcher: ".*"
|
|
5
|
+
* (Runs for every tool, but only saves checkpoints for write tools.)
|
|
6
|
+
*
|
|
7
|
+
* Claude Code passes a JSON payload via stdin:
|
|
8
|
+
* {
|
|
9
|
+
* session_id: string,
|
|
10
|
+
* tool_name: string,
|
|
11
|
+
* tool_input: { file_path?: string, ... },
|
|
12
|
+
* tool_response: unknown
|
|
13
|
+
* }
|
|
14
|
+
*/
|
|
15
|
+
import { resolve, join } from "path";
|
|
16
|
+
import { mkdir, appendFile } from "fs/promises";
|
|
17
|
+
import { saveCheckpoint, SESSION_ID_RE } from "../attribution/checkpoint.ts";
|
|
18
|
+
import { readStdin, WRITE_TOOLS, getFilePath } from "../lib/hooks.ts";
|
|
19
|
+
import {
|
|
20
|
+
otelEndpoint,
|
|
21
|
+
otelHeaders,
|
|
22
|
+
readOtelContext,
|
|
23
|
+
buildToolCallSpan,
|
|
24
|
+
exportOtlpSpans,
|
|
25
|
+
} from "../attribution/otel.ts";
|
|
26
|
+
|
|
27
|
+
interface HookPayload {
|
|
28
|
+
session_id: string;
|
|
29
|
+
tool_name: string;
|
|
30
|
+
tool_input: Record<string, unknown>;
|
|
31
|
+
tool_response?: unknown;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async function main() {
|
|
35
|
+
const raw = await readStdin();
|
|
36
|
+
if (!raw.trim()) process.exit(0);
|
|
37
|
+
|
|
38
|
+
let payload: HookPayload;
|
|
39
|
+
try {
|
|
40
|
+
payload = JSON.parse(raw) as HookPayload;
|
|
41
|
+
} catch {
|
|
42
|
+
process.exit(0);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const { session_id, tool_name, tool_input } = payload;
|
|
46
|
+
const repoRoot = resolve(process.cwd());
|
|
47
|
+
|
|
48
|
+
// Log every tool call
|
|
49
|
+
const logEntry = {
|
|
50
|
+
timestamp: new Date().toISOString(),
|
|
51
|
+
session: session_id,
|
|
52
|
+
tool: tool_name,
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
const logDir = join(repoRoot, ".claude", "logs");
|
|
57
|
+
await mkdir(join(logDir, "sessions"), { recursive: true });
|
|
58
|
+
const line = JSON.stringify(logEntry) + "\n";
|
|
59
|
+
await appendFile(join(logDir, "tool-usage.jsonl"), line);
|
|
60
|
+
// Guard against path traversal via a crafted session_id
|
|
61
|
+
if (SESSION_ID_RE.test(session_id)) {
|
|
62
|
+
await appendFile(join(logDir, "sessions", `${session_id}.jsonl`), line);
|
|
63
|
+
}
|
|
64
|
+
} catch {
|
|
65
|
+
// Non-fatal
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Save after-checkpoint for write tools
|
|
69
|
+
if (!WRITE_TOOLS.has(tool_name)) process.exit(0);
|
|
70
|
+
|
|
71
|
+
const filePath = getFilePath(tool_input);
|
|
72
|
+
if (!filePath) process.exit(0);
|
|
73
|
+
|
|
74
|
+
const absPath = resolve(filePath);
|
|
75
|
+
|
|
76
|
+
try {
|
|
77
|
+
await saveCheckpoint(session_id, absPath, "after");
|
|
78
|
+
} catch (err) {
|
|
79
|
+
console.error("[claude-attribution] post-tool-use error:", err);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// OTel: emit child span for this tool call
|
|
83
|
+
const endpoint = otelEndpoint();
|
|
84
|
+
if (endpoint) {
|
|
85
|
+
try {
|
|
86
|
+
const repoRoot = resolve(process.cwd());
|
|
87
|
+
const ctx = await readOtelContext(repoRoot);
|
|
88
|
+
if (ctx?.lastToolCallStart) {
|
|
89
|
+
const span = buildToolCallSpan(
|
|
90
|
+
ctx,
|
|
91
|
+
tool_name,
|
|
92
|
+
getFilePath(tool_input) ?? null,
|
|
93
|
+
new Date().toISOString(),
|
|
94
|
+
);
|
|
95
|
+
await exportOtlpSpans([span], endpoint, otelHeaders());
|
|
96
|
+
}
|
|
97
|
+
} catch {
|
|
98
|
+
// Silent — never block Claude
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
process.exit(0);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
main().catch(() => process.exit(0));
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PreToolUse hook — snapshot file content BEFORE Claude edits it.
|
|
3
|
+
*
|
|
4
|
+
* Registered in .claude/settings.json for tools: Edit, Write, MultiEdit
|
|
5
|
+
*
|
|
6
|
+
* Claude Code passes a JSON payload via stdin:
|
|
7
|
+
* {
|
|
8
|
+
* session_id: string,
|
|
9
|
+
* tool_name: string,
|
|
10
|
+
* tool_input: { file_path?: string, path?: string, ... }
|
|
11
|
+
* }
|
|
12
|
+
*/
|
|
13
|
+
import { resolve } from "path";
|
|
14
|
+
import {
|
|
15
|
+
loadCheckpoint,
|
|
16
|
+
saveCheckpoint,
|
|
17
|
+
writeCurrentSession,
|
|
18
|
+
} from "../attribution/checkpoint.ts";
|
|
19
|
+
import { readStdin, WRITE_TOOLS, getFilePath } from "../lib/hooks.ts";
|
|
20
|
+
import {
|
|
21
|
+
otelEndpoint,
|
|
22
|
+
otelHeaders,
|
|
23
|
+
readOtelContext,
|
|
24
|
+
writeOtelContext,
|
|
25
|
+
makeTraceId,
|
|
26
|
+
makeSpanId,
|
|
27
|
+
} from "../attribution/otel.ts";
|
|
28
|
+
|
|
29
|
+
interface HookPayload {
|
|
30
|
+
session_id: string;
|
|
31
|
+
tool_name: string;
|
|
32
|
+
tool_input: Record<string, unknown>;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async function main() {
|
|
36
|
+
const raw = await readStdin();
|
|
37
|
+
if (!raw.trim()) process.exit(0);
|
|
38
|
+
|
|
39
|
+
let payload: HookPayload;
|
|
40
|
+
try {
|
|
41
|
+
payload = JSON.parse(raw) as HookPayload;
|
|
42
|
+
} catch {
|
|
43
|
+
process.exit(0);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const { session_id, tool_name, tool_input } = payload;
|
|
47
|
+
if (!WRITE_TOOLS.has(tool_name)) process.exit(0);
|
|
48
|
+
|
|
49
|
+
const filePath = getFilePath(tool_input);
|
|
50
|
+
if (!filePath) process.exit(0);
|
|
51
|
+
|
|
52
|
+
const absPath = resolve(filePath);
|
|
53
|
+
const repoRoot = resolve(process.cwd());
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
// Only save before-checkpoint if one doesn't already exist for this session+file.
|
|
57
|
+
// This preserves the original human-authored state when Claude edits a file
|
|
58
|
+
// multiple times in one session — we want "before Claude touched it at all",
|
|
59
|
+
// not "before Claude's second edit".
|
|
60
|
+
const existing = await loadCheckpoint(session_id, absPath, "before");
|
|
61
|
+
if (!existing) {
|
|
62
|
+
await saveCheckpoint(session_id, absPath, "before");
|
|
63
|
+
}
|
|
64
|
+
await writeCurrentSession(repoRoot, session_id);
|
|
65
|
+
} catch (err) {
|
|
66
|
+
// Never block Claude — soft fail
|
|
67
|
+
console.error("[claude-attribution] pre-tool-use error:", err);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// OTel: record tool call start time in context file
|
|
71
|
+
if (otelEndpoint()) {
|
|
72
|
+
try {
|
|
73
|
+
const now = new Date().toISOString();
|
|
74
|
+
let ctx = await readOtelContext(repoRoot);
|
|
75
|
+
if (!ctx) {
|
|
76
|
+
ctx = {
|
|
77
|
+
traceId: makeTraceId(),
|
|
78
|
+
rootSpanId: makeSpanId(),
|
|
79
|
+
sessionId: session_id,
|
|
80
|
+
startTime: now,
|
|
81
|
+
lastToolCallStart: now,
|
|
82
|
+
};
|
|
83
|
+
} else {
|
|
84
|
+
ctx.lastToolCallStart = now;
|
|
85
|
+
}
|
|
86
|
+
await writeOtelContext(repoRoot, ctx);
|
|
87
|
+
} catch {
|
|
88
|
+
// Silent — never block Claude
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
process.exit(0);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
main().catch(() => process.exit(0));
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Stop hook (SessionEnd) — runs when a Claude Code session ends.
|
|
3
|
+
*
|
|
4
|
+
* Intentionally does NOT clean up checkpoints. The .after snapshots in
|
|
5
|
+
* /tmp/claude-attribution/<session_id>/ need to survive session close because
|
|
6
|
+
* a developer may close Claude Code before committing. If checkpoints were
|
|
7
|
+
* deleted here, commits made after reopening Claude Code would show 0% AI.
|
|
8
|
+
*
|
|
9
|
+
* /tmp is cleaned by the OS on reboot. Stale checkpoints are harmless.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import { readStdin } from "../lib/hooks.ts";
|
|
13
|
+
|
|
14
|
+
interface HookPayload {
|
|
15
|
+
session_id: string;
|
|
16
|
+
stop_hook_active?: boolean;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function main() {
|
|
20
|
+
const raw = await readStdin();
|
|
21
|
+
if (!raw.trim()) process.exit(0);
|
|
22
|
+
|
|
23
|
+
// Parse to validate payload shape — no action needed
|
|
24
|
+
try {
|
|
25
|
+
JSON.parse(raw) as HookPayload;
|
|
26
|
+
} catch {
|
|
27
|
+
// ignore
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
process.exit(0);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
main().catch(() => process.exit(0));
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SubagentStart / SubagentStop hook — log subagent activity.
|
|
3
|
+
*
|
|
4
|
+
* Registered in .claude/settings.json for both SubagentStart and SubagentStop events.
|
|
5
|
+
* Runs whenever Claude Code launches a subagent (Agent tool call) or when one completes.
|
|
6
|
+
*
|
|
7
|
+
* Logs each event to:
|
|
8
|
+
* .claude/logs/agent-activity.jsonl (all sessions, append-only)
|
|
9
|
+
* .claude/logs/sessions/<session_id>.jsonl (per-session, append-only)
|
|
10
|
+
*
|
|
11
|
+
* The /metrics command reads agent-activity.jsonl to count agent invocations by type.
|
|
12
|
+
* `promptPreview` is truncated to 200 characters to limit log file size.
|
|
13
|
+
*/
|
|
14
|
+
import { resolve, join } from "path";
|
|
15
|
+
import { mkdir, appendFile } from "fs/promises";
|
|
16
|
+
import { SESSION_ID_RE } from "../attribution/checkpoint.ts";
|
|
17
|
+
import { readStdin } from "../lib/hooks.ts";
|
|
18
|
+
|
|
19
|
+
interface HookPayload {
|
|
20
|
+
hook_event_name: string;
|
|
21
|
+
session_id: string;
|
|
22
|
+
agent_id?: string;
|
|
23
|
+
subagent_type?: string;
|
|
24
|
+
subagent_prompt?: string;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async function main() {
|
|
28
|
+
const raw = await readStdin();
|
|
29
|
+
if (!raw.trim()) process.exit(0);
|
|
30
|
+
|
|
31
|
+
let payload: HookPayload;
|
|
32
|
+
try {
|
|
33
|
+
payload = JSON.parse(raw) as HookPayload;
|
|
34
|
+
} catch {
|
|
35
|
+
process.exit(0);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const {
|
|
39
|
+
hook_event_name,
|
|
40
|
+
session_id,
|
|
41
|
+
agent_id,
|
|
42
|
+
subagent_type,
|
|
43
|
+
subagent_prompt,
|
|
44
|
+
} = payload;
|
|
45
|
+
|
|
46
|
+
const entry = {
|
|
47
|
+
timestamp: new Date().toISOString(),
|
|
48
|
+
event: hook_event_name,
|
|
49
|
+
session: session_id,
|
|
50
|
+
agentId: agent_id ?? null,
|
|
51
|
+
subagentType: subagent_type ?? null,
|
|
52
|
+
promptPreview: subagent_prompt ? subagent_prompt.slice(0, 200) : null,
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
const repoRoot = resolve(process.cwd());
|
|
57
|
+
const logDir = join(repoRoot, ".claude", "logs");
|
|
58
|
+
await mkdir(join(logDir, "sessions"), { recursive: true });
|
|
59
|
+
const line = JSON.stringify(entry) + "\n";
|
|
60
|
+
await appendFile(join(logDir, "agent-activity.jsonl"), line);
|
|
61
|
+
// Guard against path traversal via a crafted session_id
|
|
62
|
+
if (SESSION_ID_RE.test(session_id)) {
|
|
63
|
+
await appendFile(join(logDir, "sessions", `${session_id}.jsonl`), line);
|
|
64
|
+
}
|
|
65
|
+
} catch {
|
|
66
|
+
// Non-fatal
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
process.exit(0);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
main().catch(() => process.exit(0));
|