@apdesign/cursor-roi-tracker 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,233 @@
1
+ const path = require("path");
2
+ const { loadConfig, createPathFilters } = require("./config");
3
+ const { CLI_VERSION } = require("./constants");
4
+ const { parseUnifiedDiff } = require("./diff-parser");
5
+ const {
6
+ getRepoRoot,
7
+ getBranch,
8
+ getAuthor,
9
+ getAuthorEmail,
10
+ getRemoteUrl,
11
+ maskRemoteUrl,
12
+ isMergeCommitInProgress,
13
+ listStagedFiles,
14
+ getStagedUnifiedDiff,
15
+ } = require("./git");
16
+ const { loadAiEventsWithMeta, buildAiMetricsFromIntersections } = require("./ai-events");
17
+ const { writePendingReport, writeErrorStatus } = require("./report");
18
+ const { extractTaskSerials, resolveMemberMatch, ratio } = require("./metrics");
19
+ const { acquireMutexLock, releaseMutexLock, resolveEventLockPath } = require("./mutex-lock");
20
+
21
+ async function runPreCommit() {
22
+ let repoRoot = process.cwd();
23
+ try {
24
+ repoRoot = getRepoRoot(process.cwd());
25
+ const branch = getBranch(repoRoot);
26
+ const author = getAuthor(repoRoot);
27
+ const authorEmail = getAuthorEmail(repoRoot);
28
+ const remoteUrl = maskRemoteUrl(getRemoteUrl(repoRoot));
29
+ const config = loadConfig(repoRoot);
30
+
31
+ if (isMergeCommitInProgress(repoRoot)) {
32
+ const skipped = createSkippedReport({
33
+ repoRoot,
34
+ branch,
35
+ author,
36
+ authorEmail,
37
+ remoteUrl,
38
+ reason: "mergeCommit",
39
+ });
40
+ writePendingReport(repoRoot, skipped);
41
+ return;
42
+ }
43
+
44
+ const isSourceFile = createPathFilters(config);
45
+ const stagedFiles = listStagedFiles(repoRoot);
46
+ const sourceFiles = stagedFiles.filter((item) => isSourceFile(item));
47
+
48
+ if (!sourceFiles.length) {
49
+ const emptyReport = createBaseReport({
50
+ repoRoot,
51
+ branch,
52
+ author,
53
+ authorEmail,
54
+ remoteUrl,
55
+ });
56
+ emptyReport.sourceFiles = [];
57
+ emptyReport.skipped = "noSourceFiles";
58
+ writePendingReport(repoRoot, emptyReport);
59
+ return;
60
+ }
61
+
62
+ const diffText = getStagedUnifiedDiff(repoRoot, sourceFiles);
63
+ const parsed = parseUnifiedDiff(diffText);
64
+
65
+ const lockHandle = await acquireMutexLock(resolveEventLockPath(repoRoot));
66
+ let eventsResult;
67
+ try {
68
+ eventsResult = loadAiEventsWithMeta(repoRoot, branch, config);
69
+ } finally {
70
+ await releaseMutexLock(lockHandle);
71
+ }
72
+ const events = eventsResult.events;
73
+ const aiMetrics = buildAiMetricsFromIntersections(
74
+ parsed.fileMetrics,
75
+ events,
76
+ config.highThreshold
77
+ );
78
+ const taskSerials = extractTaskSerials(branch);
79
+ const member = resolveMemberMatch(repoRoot, config, author);
80
+
81
+ const report = createBaseReport({
82
+ repoRoot,
83
+ branch,
84
+ author,
85
+ authorEmail,
86
+ remoteUrl,
87
+ });
88
+ report.sourceFiles = sourceFiles;
89
+ report.totalAddedLines = parsed.totalAddedLines;
90
+ report.totalDeletedLines = parsed.totalDeletedLines;
91
+ report.affectedFilesCount = parsed.affectedFilesCount;
92
+ report.aiAddedLines = aiMetrics.aiAddedLines;
93
+ report.aiDeletedLines = aiMetrics.aiDeletedLines;
94
+ report.aiTouchedFilesCount = aiMetrics.aiTouchedFilesCount;
95
+ report.aiPenetrationAdded = ratio(report.aiAddedLines, report.totalAddedLines);
96
+ report.aiPenetrationChanged = ratio(
97
+ report.aiAddedLines + report.aiDeletedLines,
98
+ report.totalAddedLines + report.totalDeletedLines
99
+ );
100
+ report.taskSerials = taskSerials;
101
+ report.memberMatched = member.memberMatched;
102
+ report.matchedMemberName = member.matchedMemberName;
103
+ report.cleanupPlan = buildCleanupPlan(events, aiMetrics.aiBlocks);
104
+ report.eventReadStats = {
105
+ filesRead: eventsResult.stats.filesRead,
106
+ missingEventIdLines: eventsResult.stats.missingEventIdLines,
107
+ readAtMs: Date.now(),
108
+ };
109
+ report.fileMetrics = parsed.fileMetrics.map((item) => ({
110
+ filePath: item.filePath,
111
+ language: extToLanguage(item.filePath),
112
+ addedLines: item.addedLines,
113
+ deletedLines: item.deletedLines,
114
+ aiAddedLines: item.aiAddedLines,
115
+ aiDeletedLines: item.aiDeletedLines,
116
+ aiTouched: item.aiTouched,
117
+ deletedAttribution: item.deletedAttribution || undefined,
118
+ hunks: item.hunks,
119
+ }));
120
+ report.aiBlocks = aiMetrics.aiBlocks;
121
+
122
+ if (config.configError) {
123
+ report.errorStatus = { code: "CONFIG_PARSE_ERROR", message: config.configError };
124
+ writeErrorStatus(repoRoot, report.errorStatus, {
125
+ stage: "pre-commit",
126
+ branch,
127
+ author,
128
+ });
129
+ } else if (!events.length) {
130
+ // No AI events means AI metrics are expected to be 0; surface this explicitly.
131
+ const eventPaths = Array.isArray(config.eventsDirectory)
132
+ ? config.eventsDirectory.join(", ")
133
+ : String(config.eventsDirectory || "");
134
+ report.errorStatus = {
135
+ code: "AI_EVENTS_NOT_FOUND",
136
+ message: `No AI events found under ${eventPaths}; AI metrics set to 0.`,
137
+ };
138
+ } else if (eventsResult.stats.missingEventIdLines > 0) {
139
+ report.errorStatus = {
140
+ code: "AI_EVENTS_MISSING_EVENT_ID",
141
+ message: `${eventsResult.stats.missingEventIdLines} probe event lines missing eventId were ignored.`,
142
+ };
143
+ }
144
+
145
+ writePendingReport(repoRoot, report);
146
+ } catch (error) {
147
+ const errorStatus = {
148
+ code: "PRE_COMMIT_COLLECT_ERROR",
149
+ message: error?.message || "unknown error",
150
+ };
151
+ writeErrorStatus(repoRoot, errorStatus, { stage: "pre-commit" });
152
+ }
153
+ }
154
+
155
+ function buildCleanupPlan(events, aiBlocks) {
156
+ const consumedEventIds = new Set(
157
+ (Array.isArray(aiBlocks) ? aiBlocks : [])
158
+ .map((item) => String(item?.sourceEventId || "").trim())
159
+ .filter(Boolean)
160
+ );
161
+ if (!consumedEventIds.size) {
162
+ return [];
163
+ }
164
+
165
+ const consumedBySource = new Map();
166
+ for (const event of Array.isArray(events) ? events : []) {
167
+ const eventId = String(event?.eventId || "").trim();
168
+ if (!eventId || !consumedEventIds.has(eventId)) {
169
+ continue;
170
+ }
171
+ const sourceFile = String(event?.sourceFile || "").trim();
172
+ if (!sourceFile) {
173
+ continue;
174
+ }
175
+ if (!consumedBySource.has(sourceFile)) {
176
+ consumedBySource.set(sourceFile, new Set());
177
+ }
178
+ consumedBySource.get(sourceFile).add(eventId);
179
+ }
180
+
181
+ return Array.from(consumedBySource.entries()).map(([sourceFile, ids]) => ({
182
+ sourceFile,
183
+ consumedEventIds: Array.from(ids),
184
+ }));
185
+ }
186
+
187
+ function createBaseReport({ repoRoot, branch, author, authorEmail, remoteUrl }) {
188
+ return {
189
+ schemaVersion: "0.1.0",
190
+ cliVersion: CLI_VERSION,
191
+ generatedAt: Date.now(),
192
+ repoRoot,
193
+ repoName: path.basename(repoRoot),
194
+ branch,
195
+ remoteUrl: remoteUrl || null,
196
+ commitSha: null,
197
+ commitTime: null,
198
+ author,
199
+ authorEmail,
200
+ totalAddedLines: 0,
201
+ totalDeletedLines: 0,
202
+ affectedFilesCount: 0,
203
+ aiAddedLines: 0,
204
+ aiDeletedLines: 0,
205
+ aiTouchedFilesCount: 0,
206
+ aiPenetrationAdded: null,
207
+ aiPenetrationChanged: null,
208
+ taskSerials: [],
209
+ memberMatched: false,
210
+ matchedMemberName: null,
211
+ cleanupPlan: [],
212
+ eventReadStats: null,
213
+ fileMetrics: [],
214
+ aiBlocks: [],
215
+ errorStatus: null,
216
+ };
217
+ }
218
+
219
+ function createSkippedReport({ repoRoot, branch, author, authorEmail, remoteUrl, reason }) {
220
+ return {
221
+ ...createBaseReport({ repoRoot, branch, author, authorEmail, remoteUrl }),
222
+ skipped: reason,
223
+ };
224
+ }
225
+
226
+ function extToLanguage(filePath) {
227
+ const ext = path.extname(filePath).toLowerCase();
228
+ return ext ? ext.slice(1) : null;
229
+ }
230
+
231
+ module.exports = {
232
+ runPreCommit,
233
+ };
package/src/config.js ADDED
@@ -0,0 +1,108 @@
1
+ const fs = require("fs");
2
+ const path = require("path");
3
+ const {
4
+ CONFIG_FILE_NAME,
5
+ DEFAULT_EXCLUDE_PATTERNS,
6
+ DEFAULT_SOURCE_EXTENSIONS,
7
+ } = require("./constants");
8
+
9
+ function loadConfig(repoRoot) {
10
+ const defaults = {
11
+ highThreshold: 0.85,
12
+ sourceExtensions: DEFAULT_SOURCE_EXTENSIONS,
13
+ excludeRegexes: DEFAULT_EXCLUDE_PATTERNS.map((item) => item.source),
14
+ eventsDirectory: [".cursor/animus-ai-events.jsonl", ".cursor/local-ai-events"],
15
+ membersFile: null,
16
+ serverBaseUrl: null,
17
+ silentTokenPath: "/api/cursor-board/auth/silent-token",
18
+ commitReportPath: "/api/cursor-board/commit-reports",
19
+ requestTimeoutMs: 5000,
20
+ };
21
+
22
+ const configPath = path.join(repoRoot, CONFIG_FILE_NAME);
23
+ if (!fs.existsSync(configPath)) {
24
+ return defaults;
25
+ }
26
+
27
+ try {
28
+ const parsed = JSON.parse(fs.readFileSync(configPath, "utf8"));
29
+ return {
30
+ highThreshold:
31
+ typeof parsed.highThreshold === "number"
32
+ ? parsed.highThreshold
33
+ : defaults.highThreshold,
34
+ sourceExtensions: Array.isArray(parsed.sourceExtensions)
35
+ ? parsed.sourceExtensions
36
+ : defaults.sourceExtensions,
37
+ excludeRegexes: Array.isArray(parsed.excludeRegexes)
38
+ ? parsed.excludeRegexes
39
+ : defaults.excludeRegexes,
40
+ eventsDirectory: normalizeEventsDirectoryConfig(parsed.eventsDirectory, defaults.eventsDirectory),
41
+ membersFile:
42
+ typeof parsed.membersFile === "string" ? parsed.membersFile : defaults.membersFile,
43
+ serverBaseUrl:
44
+ typeof parsed.serverBaseUrl === "string"
45
+ ? parsed.serverBaseUrl
46
+ : defaults.serverBaseUrl,
47
+ silentTokenPath:
48
+ typeof parsed.silentTokenPath === "string"
49
+ ? parsed.silentTokenPath
50
+ : defaults.silentTokenPath,
51
+ commitReportPath:
52
+ typeof parsed.commitReportPath === "string"
53
+ ? parsed.commitReportPath
54
+ : defaults.commitReportPath,
55
+ requestTimeoutMs:
56
+ Number.isFinite(Number(parsed.requestTimeoutMs)) && Number(parsed.requestTimeoutMs) > 0
57
+ ? Number(parsed.requestTimeoutMs)
58
+ : defaults.requestTimeoutMs,
59
+ };
60
+ } catch (error) {
61
+ return { ...defaults, configError: error.message };
62
+ }
63
+ }
64
+
65
+ function normalizeEventsDirectoryConfig(input, fallback) {
66
+ if (typeof input === "string" && input.trim()) {
67
+ return [input.trim()];
68
+ }
69
+ if (Array.isArray(input)) {
70
+ const items = input.map((item) => String(item || "").trim()).filter(Boolean);
71
+ if (items.length) {
72
+ return items;
73
+ }
74
+ }
75
+ return Array.isArray(fallback) ? [...fallback] : [String(fallback || "").trim()].filter(Boolean);
76
+ }
77
+
78
+ function createPathFilters(config) {
79
+ const extSet = new Set(
80
+ (config.sourceExtensions || []).map((ext) => ext.toLowerCase().trim()).filter(Boolean)
81
+ );
82
+ const excludes = (config.excludeRegexes || [])
83
+ .map((pattern) => {
84
+ try {
85
+ return new RegExp(pattern, "i");
86
+ } catch (_error) {
87
+ return null;
88
+ }
89
+ })
90
+ .filter(Boolean);
91
+
92
+ return function isSourceFile(filePath) {
93
+ const normalized = filePath.replace(/\\/g, "/");
94
+ if (!normalized || normalized === "/dev/null") {
95
+ return false;
96
+ }
97
+ if (excludes.some((regex) => regex.test(normalized))) {
98
+ return false;
99
+ }
100
+ const extension = path.extname(normalized).toLowerCase();
101
+ return extSet.has(extension);
102
+ };
103
+ }
104
+
105
+ module.exports = {
106
+ loadConfig,
107
+ createPathFilters,
108
+ };
@@ -0,0 +1,61 @@
1
+ const path = require("path");
2
+ const { version: CLI_VERSION = "0.0.0" } = require("../package.json");
3
+
4
+ const DEFAULT_SOURCE_EXTENSIONS = [
5
+ ".ts",
6
+ ".tsx",
7
+ ".js",
8
+ ".jsx",
9
+ ".vue",
10
+ ".css",
11
+ ".scss",
12
+ ".less",
13
+ ".html",
14
+ ".json",
15
+ ".mdx",
16
+ ".py",
17
+ ".go",
18
+ ".java",
19
+ ".kt",
20
+ ".rs",
21
+ ".swift",
22
+ ".rb",
23
+ ".php",
24
+ ".c",
25
+ ".cc",
26
+ ".cpp",
27
+ ".h",
28
+ ".hpp",
29
+ ];
30
+
31
+ const DEFAULT_EXCLUDE_PATTERNS = [
32
+ /(^|\/)dist\//i,
33
+ /(^|\/)build\//i,
34
+ /(^|\/)coverage\//i,
35
+ /\.min\./i,
36
+ /(^|\/).*lock(\.json)?$/i,
37
+ /(^|\/)pnpm-lock\.yaml$/i,
38
+ /(^|\/)yarn\.lock$/i,
39
+ /(^|\/)bun\.lockb?$/i,
40
+ /(^|\/)package-lock\.json$/i,
41
+ /(^|\/)generated\//i,
42
+ ];
43
+
44
+ const CONFIG_FILE_NAME = ".cursor-roi-tracker.json";
45
+ const CURSOR_DIR = ".cursor";
46
+ const PENDING_REPORT_PATH = path.join(CURSOR_DIR, "ai-commit-report.pending.json");
47
+ const FINAL_REPORT_PATH = path.join(CURSOR_DIR, "ai-commit-report.final.json");
48
+ const HISTORY_PATH = path.join(CURSOR_DIR, "ai-commit-history.jsonl");
49
+ const ERROR_HISTORY_PATH = path.join(CURSOR_DIR, "ai-commit-errors.jsonl");
50
+
51
+ module.exports = {
52
+ DEFAULT_SOURCE_EXTENSIONS,
53
+ DEFAULT_EXCLUDE_PATTERNS,
54
+ CONFIG_FILE_NAME,
55
+ CURSOR_DIR,
56
+ CLI_VERSION,
57
+ PENDING_REPORT_PATH,
58
+ FINAL_REPORT_PATH,
59
+ HISTORY_PATH,
60
+ ERROR_HISTORY_PATH,
61
+ };
@@ -0,0 +1,99 @@
1
+ function parseUnifiedDiff(diffText) {
2
+ const fileMetrics = [];
3
+ let currentFile = null;
4
+
5
+ const lines = String(diffText || "").split("\n");
6
+
7
+ for (const line of lines) {
8
+ if (line.startsWith("diff --git")) {
9
+ if (currentFile) {
10
+ fileMetrics.push(finalizeFile(currentFile));
11
+ }
12
+ currentFile = null;
13
+ continue;
14
+ }
15
+
16
+ if (line.startsWith("+++ b/")) {
17
+ const filePath = line.slice("+++ b/".length).trim();
18
+ currentFile = {
19
+ filePath,
20
+ addedLines: 0,
21
+ deletedLines: 0,
22
+ hunks: [],
23
+ };
24
+ continue;
25
+ }
26
+
27
+ if (line.startsWith("+++ /dev/null")) {
28
+ currentFile = null;
29
+ continue;
30
+ }
31
+
32
+ if (!currentFile) {
33
+ continue;
34
+ }
35
+
36
+ if (line.startsWith("@@ ")) {
37
+ const hunk = parseHunkHeader(line);
38
+ if (hunk) {
39
+ currentFile.hunks.push(hunk);
40
+ }
41
+ continue;
42
+ }
43
+
44
+ if (line.startsWith("+") && !line.startsWith("+++")) {
45
+ currentFile.addedLines += 1;
46
+ } else if (line.startsWith("-") && !line.startsWith("---")) {
47
+ currentFile.deletedLines += 1;
48
+ }
49
+ }
50
+
51
+ if (currentFile) {
52
+ fileMetrics.push(finalizeFile(currentFile));
53
+ }
54
+
55
+ let totalAddedLines = 0;
56
+ let totalDeletedLines = 0;
57
+ for (const item of fileMetrics) {
58
+ totalAddedLines += item.addedLines;
59
+ totalDeletedLines += item.deletedLines;
60
+ }
61
+
62
+ return {
63
+ fileMetrics,
64
+ totalAddedLines,
65
+ totalDeletedLines,
66
+ affectedFilesCount: fileMetrics.length,
67
+ };
68
+ }
69
+
70
+ function parseHunkHeader(line) {
71
+ const match = line.match(
72
+ /^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/
73
+ );
74
+ if (!match) {
75
+ return null;
76
+ }
77
+ return {
78
+ oldStart: Number(match[1]),
79
+ oldCount: Number(match[2] ?? 1),
80
+ newStart: Number(match[3]),
81
+ newCount: Number(match[4] ?? 1),
82
+ };
83
+ }
84
+
85
+ function finalizeFile(file) {
86
+ return {
87
+ filePath: file.filePath,
88
+ addedLines: file.addedLines,
89
+ deletedLines: file.deletedLines,
90
+ aiAddedLines: 0,
91
+ aiDeletedLines: 0,
92
+ aiTouched: false,
93
+ hunks: file.hunks,
94
+ };
95
+ }
96
+
97
+ module.exports = {
98
+ parseUnifiedDiff,
99
+ };
@@ -0,0 +1,91 @@
1
+ const fs = require("fs");
2
+ const path = require("path");
3
+ const { acquireMutexLock, releaseMutexLock, resolveEventLockPath } = require("./mutex-lock");
4
+
5
+ async function compactConsumedEvents(repoRoot, cleanupPlan) {
6
+ if (!Array.isArray(cleanupPlan) || cleanupPlan.length === 0) {
7
+ return { compactedFiles: 0, removedEvents: 0, parseErrors: 0 };
8
+ }
9
+
10
+ const lockPath = resolveEventLockPath(repoRoot);
11
+ const lockHandle = await acquireMutexLock(lockPath);
12
+ try {
13
+ let compactedFiles = 0;
14
+ let removedEvents = 0;
15
+ let parseErrors = 0;
16
+
17
+ for (const entry of cleanupPlan) {
18
+ const result = await compactSingleSource(repoRoot, entry);
19
+ compactedFiles += result.compacted ? 1 : 0;
20
+ removedEvents += result.removedEvents;
21
+ parseErrors += result.parseErrors;
22
+ }
23
+
24
+ return { compactedFiles, removedEvents, parseErrors };
25
+ } finally {
26
+ await releaseMutexLock(lockHandle);
27
+ }
28
+ }
29
+
30
+ async function compactSingleSource(repoRoot, entry) {
31
+ const sourceFile = String(entry?.sourceFile || "").trim();
32
+ if (!sourceFile) {
33
+ return { compacted: false, removedEvents: 0, parseErrors: 0 };
34
+ }
35
+ const consumedIds = new Set(
36
+ Array.isArray(entry?.consumedEventIds)
37
+ ? entry.consumedEventIds.map((item) => String(item || "").trim()).filter(Boolean)
38
+ : []
39
+ );
40
+ if (!consumedIds.size) {
41
+ return { compacted: false, removedEvents: 0, parseErrors: 0 };
42
+ }
43
+
44
+ const filePath = path.isAbsolute(sourceFile) ? sourceFile : path.join(repoRoot, sourceFile);
45
+ if (!fs.existsSync(filePath)) {
46
+ return { compacted: false, removedEvents: 0, parseErrors: 0 };
47
+ }
48
+
49
+ const originalText = await fs.promises.readFile(filePath, "utf8");
50
+ const hadTrailingNewline = originalText.endsWith("\n");
51
+ const lines = originalText.split("\n");
52
+ const keptLines = [];
53
+ let removedEvents = 0;
54
+ let parseErrors = 0;
55
+
56
+ for (const line of lines) {
57
+ if (!line.trim()) {
58
+ keptLines.push(line);
59
+ continue;
60
+ }
61
+ try {
62
+ const parsed = JSON.parse(line);
63
+ const eventId = String(parsed?.eventId || parsed?.event_id || "").trim();
64
+ if (eventId && consumedIds.has(eventId)) {
65
+ removedEvents += 1;
66
+ continue;
67
+ }
68
+ keptLines.push(line);
69
+ } catch (_error) {
70
+ parseErrors += 1;
71
+ keptLines.push(line);
72
+ }
73
+ }
74
+
75
+ if (removedEvents <= 0) {
76
+ return { compacted: false, removedEvents: 0, parseErrors };
77
+ }
78
+
79
+ let nextText = keptLines.join("\n");
80
+ if (hadTrailingNewline && nextText && !nextText.endsWith("\n")) {
81
+ nextText += "\n";
82
+ }
83
+ const tempPath = `${filePath}.${process.pid}.${Date.now()}.tmp`;
84
+ await fs.promises.writeFile(tempPath, nextText, "utf8");
85
+ await fs.promises.rename(tempPath, filePath);
86
+ return { compacted: true, removedEvents, parseErrors };
87
+ }
88
+
89
+ module.exports = {
90
+ compactConsumedEvents,
91
+ };