chekk 0.5.4 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +17 -0
- package/dist/index.js +448 -0
- package/package.json +18 -34
- package/bin/chekk.js +0 -62
- package/src/detect.js +0 -146
- package/src/display.js +0 -1153
- package/src/index.js +0 -281
- package/src/insights.js +0 -661
- package/src/metrics/ai-leverage.js +0 -186
- package/src/metrics/debug-cycles.js +0 -204
- package/src/metrics/decomposition.js +0 -158
- package/src/metrics/session-structure.js +0 -199
- package/src/metrics/token-efficiency.js +0 -258
- package/src/parsers/claude-code.js +0 -231
- package/src/parsers/codex.js +0 -188
- package/src/parsers/cursor.js +0 -281
- package/src/scorer.js +0 -228
- package/src/upload.js +0 -140
package/src/parsers/codex.js
DELETED
|
@@ -1,188 +0,0 @@
|
|
|
1
|
-
import { existsSync, readFileSync, readdirSync, statSync } from 'fs';
|
|
2
|
-
import { join } from 'path';
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* Parse OpenAI Codex CLI session files into normalized format.
|
|
6
|
-
*
|
|
7
|
-
* Codex stores sessions under:
|
|
8
|
-
* ~/.codex/sessions/YYYY/MM/DD/rollout-*.jsonl
|
|
9
|
-
*
|
|
10
|
-
* Each JSONL line follows the OpenAI message format:
|
|
11
|
-
* { "role": "user"|"assistant"|"system", "content": "..." | [...], "timestamp": "..." }
|
|
12
|
-
*
|
|
13
|
-
* Tool calls appear as content blocks with type "function_call" or "tool_use".
|
|
14
|
-
* We normalize into the same session format as Claude Code.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
function parseJsonlLine(line) {
|
|
18
|
-
try {
|
|
19
|
-
return JSON.parse(line);
|
|
20
|
-
} catch {
|
|
21
|
-
return null;
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
function extractText(content) {
|
|
26
|
-
if (typeof content === 'string') return content;
|
|
27
|
-
if (!Array.isArray(content)) return '';
|
|
28
|
-
return content
|
|
29
|
-
.filter(block => block.type === 'text' || block.type === 'output_text')
|
|
30
|
-
.map(block => block.text || block.content || '')
|
|
31
|
-
.join('\n');
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
function extractToolCalls(content) {
|
|
35
|
-
if (!Array.isArray(content)) return [];
|
|
36
|
-
return content
|
|
37
|
-
.filter(block =>
|
|
38
|
-
block.type === 'function_call' ||
|
|
39
|
-
block.type === 'tool_use' ||
|
|
40
|
-
block.type === 'tool_call'
|
|
41
|
-
)
|
|
42
|
-
.map(block => ({
|
|
43
|
-
tool: block.name || block.function?.name || 'unknown',
|
|
44
|
-
input: block.arguments || block.input || {},
|
|
45
|
-
}));
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
/**
|
|
49
|
-
* Parse a single Codex rollout JSONL file.
|
|
50
|
-
*/
|
|
51
|
-
function parseSessionFile(filePath) {
|
|
52
|
-
let raw;
|
|
53
|
-
try {
|
|
54
|
-
raw = readFileSync(filePath, 'utf-8');
|
|
55
|
-
} catch {
|
|
56
|
-
return [];
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
const lines = raw.split('\n').filter(l => l.trim());
|
|
60
|
-
const exchanges = [];
|
|
61
|
-
let current = null;
|
|
62
|
-
|
|
63
|
-
for (const line of lines) {
|
|
64
|
-
const entry = parseJsonlLine(line);
|
|
65
|
-
if (!entry) continue;
|
|
66
|
-
|
|
67
|
-
const role = entry.role;
|
|
68
|
-
if (!role) continue;
|
|
69
|
-
|
|
70
|
-
// Skip system messages
|
|
71
|
-
if (role === 'system') continue;
|
|
72
|
-
|
|
73
|
-
// Skip tool result messages
|
|
74
|
-
if (role === 'tool' || role === 'function') continue;
|
|
75
|
-
|
|
76
|
-
const text = extractText(entry.content);
|
|
77
|
-
const tools = role === 'assistant' ? extractToolCalls(entry.content) : [];
|
|
78
|
-
const timestamp = entry.timestamp || entry.created_at || null;
|
|
79
|
-
|
|
80
|
-
if (role === 'user') {
|
|
81
|
-
if (!text.trim()) continue;
|
|
82
|
-
if (current) exchanges.push(current);
|
|
83
|
-
current = {
|
|
84
|
-
userPrompt: text,
|
|
85
|
-
userTimestamp: timestamp,
|
|
86
|
-
assistantResponses: [],
|
|
87
|
-
toolCalls: [],
|
|
88
|
-
thinkingContent: [],
|
|
89
|
-
};
|
|
90
|
-
} else if (role === 'assistant' && current) {
|
|
91
|
-
if (text) current.assistantResponses.push(text);
|
|
92
|
-
current.toolCalls.push(...tools);
|
|
93
|
-
|
|
94
|
-
// Codex may include reasoning/thinking
|
|
95
|
-
if (entry.reasoning || entry.thinking) {
|
|
96
|
-
current.thinkingContent.push(entry.reasoning || entry.thinking);
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
if (current) exchanges.push(current);
|
|
102
|
-
return exchanges;
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
/**
|
|
106
|
-
* Recursively find all rollout-*.jsonl files under sessions dir.
|
|
107
|
-
*/
|
|
108
|
-
function findSessionFiles(dir) {
|
|
109
|
-
const files = [];
|
|
110
|
-
if (!existsSync(dir)) return files;
|
|
111
|
-
|
|
112
|
-
try {
|
|
113
|
-
const entries = readdirSync(dir);
|
|
114
|
-
for (const entry of entries) {
|
|
115
|
-
const fullPath = join(dir, entry);
|
|
116
|
-
try {
|
|
117
|
-
const stat = statSync(fullPath);
|
|
118
|
-
if (stat.isDirectory()) {
|
|
119
|
-
files.push(...findSessionFiles(fullPath));
|
|
120
|
-
} else if (entry.endsWith('.jsonl')) {
|
|
121
|
-
files.push(fullPath);
|
|
122
|
-
}
|
|
123
|
-
} catch {
|
|
124
|
-
continue;
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
} catch {
|
|
128
|
-
// Ignore permission errors
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
return files;
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
/**
|
|
135
|
-
* Extract project name from session file path or content.
|
|
136
|
-
* Codex sessions are in ~/.codex/sessions/YYYY/MM/DD/rollout-*.jsonl
|
|
137
|
-
* Try to get the working directory from the session content.
|
|
138
|
-
*/
|
|
139
|
-
function extractProjectName(filePath, exchanges) {
|
|
140
|
-
// Try to extract from the file path date structure
|
|
141
|
-
const parts = filePath.split('/');
|
|
142
|
-
const dateIdx = parts.findIndex(p => p === 'sessions');
|
|
143
|
-
if (dateIdx >= 0 && parts.length > dateIdx + 3) {
|
|
144
|
-
return `codex/${parts[dateIdx + 1]}/${parts[dateIdx + 2]}/${parts[dateIdx + 3]}`;
|
|
145
|
-
}
|
|
146
|
-
return 'codex';
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
/**
|
|
150
|
-
* Parse all Codex sessions from ~/.codex/sessions.
|
|
151
|
-
*/
|
|
152
|
-
export function parseAllSessions(basePath) {
|
|
153
|
-
const sessionsDir = join(basePath, 'sessions');
|
|
154
|
-
if (!existsSync(sessionsDir)) return [];
|
|
155
|
-
|
|
156
|
-
const sessionFiles = findSessionFiles(sessionsDir);
|
|
157
|
-
const allSessions = [];
|
|
158
|
-
|
|
159
|
-
for (const filePath of sessionFiles) {
|
|
160
|
-
const exchanges = parseSessionFile(filePath);
|
|
161
|
-
if (exchanges.length === 0) continue;
|
|
162
|
-
|
|
163
|
-
const timestamps = exchanges
|
|
164
|
-
.map(e => e.userTimestamp)
|
|
165
|
-
.filter(Boolean)
|
|
166
|
-
.map(t => typeof t === 'number' ? t * 1000 : new Date(t).getTime())
|
|
167
|
-
.filter(t => !isNaN(t))
|
|
168
|
-
.sort();
|
|
169
|
-
|
|
170
|
-
const fileName = filePath.split('/').pop();
|
|
171
|
-
|
|
172
|
-
allSessions.push({
|
|
173
|
-
id: fileName.replace('.jsonl', ''),
|
|
174
|
-
file: fileName,
|
|
175
|
-
project: extractProjectName(filePath, exchanges),
|
|
176
|
-
exchanges,
|
|
177
|
-
turnCount: exchanges.length * 2,
|
|
178
|
-
exchangeCount: exchanges.length,
|
|
179
|
-
startTime: timestamps[0] ? new Date(timestamps[0]).toISOString() : null,
|
|
180
|
-
endTime: timestamps.length > 0 ? new Date(timestamps[timestamps.length - 1]).toISOString() : null,
|
|
181
|
-
durationMinutes: timestamps.length >= 2
|
|
182
|
-
? Math.round((timestamps[timestamps.length - 1] - timestamps[0]) / 60000)
|
|
183
|
-
: 0,
|
|
184
|
-
});
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
return allSessions;
|
|
188
|
-
}
|
package/src/parsers/cursor.js
DELETED
|
@@ -1,281 +0,0 @@
|
|
|
1
|
-
import { existsSync, readdirSync, statSync } from 'fs';
|
|
2
|
-
import { join } from 'path';
|
|
3
|
-
import { execSync } from 'child_process';
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Parse Cursor chat history from SQLite state.vscdb files.
|
|
7
|
-
*
|
|
8
|
-
* Cursor stores chats in SQLite databases under:
|
|
9
|
-
* ~/Library/Application Support/Cursor/User/workspaceStorage/<hash>/state.vscdb
|
|
10
|
-
*
|
|
11
|
-
* Two storage formats exist:
|
|
12
|
-
* - Legacy: key = 'workbench.panel.aichat.view.aichat.chatdata' → JSON with tabs/bubbles
|
|
13
|
-
* - Current: key like 'composerData:%' in table cursorDiskKV → JSON with conversation data
|
|
14
|
-
* Bubble text stored separately under 'bubbleId:<composerId>:<bubbleId>'
|
|
15
|
-
*
|
|
16
|
-
* We normalize into the same session format as Claude Code.
|
|
17
|
-
*/
|
|
18
|
-
|
|
19
|
-
function querySqlite(dbPath, query) {
|
|
20
|
-
try {
|
|
21
|
-
const result = execSync(
|
|
22
|
-
`sqlite3 -json "${dbPath}" "${query.replace(/"/g, '\\"')}"`,
|
|
23
|
-
{ encoding: 'utf-8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] }
|
|
24
|
-
);
|
|
25
|
-
return JSON.parse(result || '[]');
|
|
26
|
-
} catch {
|
|
27
|
-
return [];
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
function querySqliteRaw(dbPath, query) {
|
|
32
|
-
try {
|
|
33
|
-
return execSync(
|
|
34
|
-
`sqlite3 "${dbPath}" "${query.replace(/"/g, '\\"')}"`,
|
|
35
|
-
{ encoding: 'utf-8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] }
|
|
36
|
-
).trim();
|
|
37
|
-
} catch {
|
|
38
|
-
return '';
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
/**
|
|
43
|
-
* Try to detect which table the DB uses (ItemTable vs cursorDiskKV).
|
|
44
|
-
*/
|
|
45
|
-
function detectTable(dbPath) {
|
|
46
|
-
const tables = querySqliteRaw(dbPath, ".tables");
|
|
47
|
-
if (tables.includes('cursorDiskKV')) return 'cursorDiskKV';
|
|
48
|
-
if (tables.includes('ItemTable')) return 'ItemTable';
|
|
49
|
-
return null;
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
/**
|
|
53
|
-
* Parse legacy aichat format (tabs with bubbles inline).
|
|
54
|
-
*/
|
|
55
|
-
function parseLegacyChat(dbPath, table) {
|
|
56
|
-
const raw = querySqliteRaw(
|
|
57
|
-
dbPath,
|
|
58
|
-
`SELECT value FROM ${table} WHERE key = 'workbench.panel.aichat.view.aichat.chatdata';`
|
|
59
|
-
);
|
|
60
|
-
if (!raw) return [];
|
|
61
|
-
|
|
62
|
-
try {
|
|
63
|
-
const data = JSON.parse(raw);
|
|
64
|
-
if (!data.tabs || !Array.isArray(data.tabs)) return [];
|
|
65
|
-
|
|
66
|
-
const sessions = [];
|
|
67
|
-
for (const tab of data.tabs) {
|
|
68
|
-
if (!tab.bubbles || !Array.isArray(tab.bubbles)) continue;
|
|
69
|
-
|
|
70
|
-
const exchanges = [];
|
|
71
|
-
let current = null;
|
|
72
|
-
|
|
73
|
-
for (const bubble of tab.bubbles) {
|
|
74
|
-
const text = bubble.text || '';
|
|
75
|
-
if (!text.trim()) continue;
|
|
76
|
-
|
|
77
|
-
// type 1 = user, type 2 = assistant
|
|
78
|
-
if (bubble.type === 1) {
|
|
79
|
-
if (current) exchanges.push(current);
|
|
80
|
-
current = {
|
|
81
|
-
userPrompt: text,
|
|
82
|
-
userTimestamp: bubble.createdAt || null,
|
|
83
|
-
assistantResponses: [],
|
|
84
|
-
toolCalls: [],
|
|
85
|
-
thinkingContent: [],
|
|
86
|
-
};
|
|
87
|
-
} else if (bubble.type === 2 && current) {
|
|
88
|
-
current.assistantResponses.push(text);
|
|
89
|
-
// Extract tool info if available
|
|
90
|
-
if (bubble.toolFormerData && Array.isArray(bubble.toolFormerData)) {
|
|
91
|
-
for (const tool of bubble.toolFormerData) {
|
|
92
|
-
current.toolCalls.push({ tool: tool.toolName || 'unknown', input: {} });
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
if (current) exchanges.push(current);
|
|
98
|
-
if (exchanges.length === 0) continue;
|
|
99
|
-
|
|
100
|
-
const timestamps = exchanges
|
|
101
|
-
.map(e => e.userTimestamp)
|
|
102
|
-
.filter(Boolean)
|
|
103
|
-
.map(t => new Date(t).getTime())
|
|
104
|
-
.sort();
|
|
105
|
-
|
|
106
|
-
sessions.push({
|
|
107
|
-
id: tab.tabId || `cursor-${sessions.length}`,
|
|
108
|
-
file: 'state.vscdb',
|
|
109
|
-
exchanges,
|
|
110
|
-
turnCount: exchanges.length * 2,
|
|
111
|
-
exchangeCount: exchanges.length,
|
|
112
|
-
startTime: timestamps[0] ? new Date(timestamps[0]).toISOString() : null,
|
|
113
|
-
endTime: timestamps.length > 0 ? new Date(timestamps[timestamps.length - 1]).toISOString() : null,
|
|
114
|
-
durationMinutes: timestamps.length >= 2
|
|
115
|
-
? Math.round((timestamps[timestamps.length - 1] - timestamps[0]) / 60000)
|
|
116
|
-
: 0,
|
|
117
|
-
});
|
|
118
|
-
}
|
|
119
|
-
return sessions;
|
|
120
|
-
} catch {
|
|
121
|
-
return [];
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
/**
|
|
126
|
-
* Parse composer format (cursorDiskKV with separate bubble keys).
|
|
127
|
-
*/
|
|
128
|
-
function parseComposerChat(dbPath) {
|
|
129
|
-
// Get all composer conversation metadata
|
|
130
|
-
const composerRows = querySqlite(
|
|
131
|
-
dbPath,
|
|
132
|
-
`SELECT key, value FROM cursorDiskKV WHERE key LIKE 'composerData:%';`
|
|
133
|
-
);
|
|
134
|
-
|
|
135
|
-
if (!composerRows.length) return [];
|
|
136
|
-
|
|
137
|
-
const sessions = [];
|
|
138
|
-
|
|
139
|
-
for (const row of composerRows) {
|
|
140
|
-
try {
|
|
141
|
-
const composerData = JSON.parse(row.value || '{}');
|
|
142
|
-
const composerId = composerData.composerId || row.key.replace('composerData:', '');
|
|
143
|
-
const headers = composerData.fullConversationHeadersOnly || [];
|
|
144
|
-
|
|
145
|
-
if (headers.length === 0) continue;
|
|
146
|
-
|
|
147
|
-
const exchanges = [];
|
|
148
|
-
let current = null;
|
|
149
|
-
|
|
150
|
-
for (const header of headers) {
|
|
151
|
-
const bubbleKey = `bubbleId:${composerId}:${header.bubbleId}`;
|
|
152
|
-
const bubbleRaw = querySqliteRaw(
|
|
153
|
-
dbPath,
|
|
154
|
-
`SELECT value FROM cursorDiskKV WHERE key = '${bubbleKey}';`
|
|
155
|
-
);
|
|
156
|
-
|
|
157
|
-
if (!bubbleRaw) continue;
|
|
158
|
-
|
|
159
|
-
let bubbleData;
|
|
160
|
-
try {
|
|
161
|
-
bubbleData = JSON.parse(bubbleRaw);
|
|
162
|
-
} catch {
|
|
163
|
-
continue;
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
const text = bubbleData.text || '';
|
|
167
|
-
if (!text.trim()) continue;
|
|
168
|
-
|
|
169
|
-
// type 1 = user, type 2 = assistant
|
|
170
|
-
if (header.type === 1) {
|
|
171
|
-
if (current) exchanges.push(current);
|
|
172
|
-
current = {
|
|
173
|
-
userPrompt: text,
|
|
174
|
-
userTimestamp: bubbleData.createdAt || bubbleData.timingInfo?.startedAt || null,
|
|
175
|
-
assistantResponses: [],
|
|
176
|
-
toolCalls: [],
|
|
177
|
-
thinkingContent: [],
|
|
178
|
-
};
|
|
179
|
-
} else if (header.type === 2 && current) {
|
|
180
|
-
current.assistantResponses.push(text);
|
|
181
|
-
if (bubbleData.toolFormerData && Array.isArray(bubbleData.toolFormerData)) {
|
|
182
|
-
for (const tool of bubbleData.toolFormerData) {
|
|
183
|
-
current.toolCalls.push({ tool: tool.toolName || String(tool.tool || 'unknown'), input: {} });
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
if (current) exchanges.push(current);
|
|
189
|
-
if (exchanges.length === 0) continue;
|
|
190
|
-
|
|
191
|
-
const timestamps = exchanges
|
|
192
|
-
.map(e => e.userTimestamp)
|
|
193
|
-
.filter(Boolean)
|
|
194
|
-
.map(t => typeof t === 'number' ? t : new Date(t).getTime())
|
|
195
|
-
.sort();
|
|
196
|
-
|
|
197
|
-
sessions.push({
|
|
198
|
-
id: composerId,
|
|
199
|
-
file: 'state.vscdb',
|
|
200
|
-
exchanges,
|
|
201
|
-
turnCount: exchanges.length * 2,
|
|
202
|
-
exchangeCount: exchanges.length,
|
|
203
|
-
startTime: timestamps[0] ? new Date(timestamps[0]).toISOString() : null,
|
|
204
|
-
endTime: timestamps.length > 0 ? new Date(timestamps[timestamps.length - 1]).toISOString() : null,
|
|
205
|
-
durationMinutes: timestamps.length >= 2
|
|
206
|
-
? Math.round((timestamps[timestamps.length - 1] - timestamps[0]) / 60000)
|
|
207
|
-
: 0,
|
|
208
|
-
});
|
|
209
|
-
} catch {
|
|
210
|
-
continue;
|
|
211
|
-
}
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
return sessions;
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
/**
|
|
218
|
-
* Get workspace name from workspace.json.
|
|
219
|
-
*/
|
|
220
|
-
function getWorkspaceName(workspaceDir) {
|
|
221
|
-
const wsFile = join(workspaceDir, 'workspace.json');
|
|
222
|
-
if (!existsSync(wsFile)) return workspaceDir.split('/').pop();
|
|
223
|
-
try {
|
|
224
|
-
const data = JSON.parse(execSync(`cat "${wsFile}"`, { encoding: 'utf-8' }));
|
|
225
|
-
// workspace.json has a "folder" field with the project path
|
|
226
|
-
const folder = data.folder || data.workspace || '';
|
|
227
|
-
// Extract meaningful project name from path
|
|
228
|
-
const decoded = decodeURIComponent(folder.replace('file://', ''));
|
|
229
|
-
const parts = decoded.split('/').filter(Boolean);
|
|
230
|
-
return parts.slice(-2).join('/') || workspaceDir.split('/').pop();
|
|
231
|
-
} catch {
|
|
232
|
-
return workspaceDir.split('/').pop();
|
|
233
|
-
}
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
/**
|
|
237
|
-
* Parse all Cursor workspaces from workspaceStorage.
|
|
238
|
-
*/
|
|
239
|
-
export function parseAllWorkspaces(basePath) {
|
|
240
|
-
if (!existsSync(basePath)) return [];
|
|
241
|
-
|
|
242
|
-
const allSessions = [];
|
|
243
|
-
let dirs;
|
|
244
|
-
try {
|
|
245
|
-
dirs = readdirSync(basePath).filter(f => {
|
|
246
|
-
try { return statSync(join(basePath, f)).isDirectory(); } catch { return false; }
|
|
247
|
-
});
|
|
248
|
-
} catch {
|
|
249
|
-
return [];
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
for (const dir of dirs) {
|
|
253
|
-
const workspaceDir = join(basePath, dir);
|
|
254
|
-
const dbPath = join(workspaceDir, 'state.vscdb');
|
|
255
|
-
if (!existsSync(dbPath)) continue;
|
|
256
|
-
|
|
257
|
-
const projectName = getWorkspaceName(workspaceDir);
|
|
258
|
-
const table = detectTable(dbPath);
|
|
259
|
-
if (!table) continue;
|
|
260
|
-
|
|
261
|
-
let sessions = [];
|
|
262
|
-
|
|
263
|
-
// Try composer format first (newer)
|
|
264
|
-
if (table === 'cursorDiskKV') {
|
|
265
|
-
sessions = parseComposerChat(dbPath);
|
|
266
|
-
// Also try legacy key in same table
|
|
267
|
-
if (sessions.length === 0) {
|
|
268
|
-
sessions = parseLegacyChat(dbPath, table);
|
|
269
|
-
}
|
|
270
|
-
} else {
|
|
271
|
-
sessions = parseLegacyChat(dbPath, table);
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
for (const session of sessions) {
|
|
275
|
-
session.project = projectName;
|
|
276
|
-
allSessions.push(session);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
|
|
280
|
-
return allSessions;
|
|
281
|
-
}
|
package/src/scorer.js
DELETED
|
@@ -1,228 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Combine individual metric scores into composite scores and assign archetype.
|
|
3
|
-
*
|
|
4
|
-
* Archetypes and tiers are consistent with the GitHub analysis system.
|
|
5
|
-
* 15 archetypes across 5 tiers, mapped from repo-based signals to
|
|
6
|
-
* prompt-behavior signals measured by the CLI.
|
|
7
|
-
*/
|
|
8
|
-
|
|
9
|
-
// ── Tier thresholds (match GitHub analysis) ──
|
|
10
|
-
|
|
11
|
-
const TIERS = [
|
|
12
|
-
{ name: 'LEGENDARY', min: 85, badge: '\uD83C\uDF1F\uD83C\uDF1F\uD83C\uDF1F', percentile: 'Top 1%' },
|
|
13
|
-
{ name: 'ULTRA RARE', min: 73, badge: '\uD83C\uDF1F\uD83C\uDF1F', percentile: 'Top 5%' },
|
|
14
|
-
{ name: 'RARE', min: 60, badge: '\u2B50', percentile: 'Top 15%' },
|
|
15
|
-
{ name: 'UNCOMMON', min: 47, badge: '\u25C6', percentile: 'Top 30%' },
|
|
16
|
-
{ name: 'COMMON', min: 0, badge: '\u25CF', percentile: 'Top 50%' },
|
|
17
|
-
];
|
|
18
|
-
|
|
19
|
-
// ── Archetype definitions ──
|
|
20
|
-
|
|
21
|
-
const ARCHETYPES = [
|
|
22
|
-
// ── LEGENDARY (85+) ──
|
|
23
|
-
{
|
|
24
|
-
id: 'THE_10X_ENGINEER',
|
|
25
|
-
name: 'THE 10X ENGINEER',
|
|
26
|
-
tierRange: ['LEGENDARY'],
|
|
27
|
-
match: (s) => true,
|
|
28
|
-
priority: 1,
|
|
29
|
-
distribution: '< 1%',
|
|
30
|
-
description: 'Engineers classified as 10x demonstrate exceptional capability across all dimensions. They decompose complex systems methodically, debug with surgical precision, leverage AI as a strategic partner, and maintain disciplined workflows. This classification indicates mastery-level AI-augmented engineering.',
|
|
31
|
-
bestFor: 'CTO, founding engineer, or principal engineer roles requiring full-stack autonomy.',
|
|
32
|
-
},
|
|
33
|
-
|
|
34
|
-
// ── ULTRA RARE (73-84) ──
|
|
35
|
-
{
|
|
36
|
-
id: 'THE_ARCHITECT',
|
|
37
|
-
name: 'THE ARCHITECT',
|
|
38
|
-
tierRange: ['ULTRA RARE'],
|
|
39
|
-
match: (s) => s.decomposition >= 70 && s.sessionStructure >= 65,
|
|
40
|
-
priority: 2,
|
|
41
|
-
distribution: '3%',
|
|
42
|
-
description: 'Architects demonstrate exceptional problem decomposition paired with disciplined session structure. They think in systems, set context deliberately, and use AI for design-level decisions rather than line-level coding. Their sessions show a plan-first, build-second pattern.',
|
|
43
|
-
bestFor: 'Staff engineer, technical lead, or systems architect roles.',
|
|
44
|
-
},
|
|
45
|
-
{
|
|
46
|
-
id: 'THE_PROFESSOR',
|
|
47
|
-
name: 'THE PROFESSOR',
|
|
48
|
-
tierRange: ['ULTRA RARE'],
|
|
49
|
-
match: (s) => s.decomposition >= 65 && s.sessionStructure >= 70,
|
|
50
|
-
priority: 3,
|
|
51
|
-
distribution: '2%',
|
|
52
|
-
description: 'Professors combine deep analytical thinking with structured, pedagogical workflows. Their prompts read like detailed specifications. They set context meticulously and review output critically, treating AI as a collaborator that needs clear direction.',
|
|
53
|
-
bestFor: 'Technical leadership, developer education, or architecture review roles.',
|
|
54
|
-
},
|
|
55
|
-
|
|
56
|
-
// ── RARE (60-72) ──
|
|
57
|
-
{
|
|
58
|
-
id: 'THE_SPECIALIST',
|
|
59
|
-
name: 'THE SPECIALIST',
|
|
60
|
-
tierRange: ['RARE'],
|
|
61
|
-
match: (s) => s.debugCycles >= 70 && s.aiLeverage >= 65,
|
|
62
|
-
priority: 4,
|
|
63
|
-
distribution: '6%',
|
|
64
|
-
description: 'Specialists demonstrate deep expertise in targeted domains with precise, efficient debugging patterns and strategic AI tool usage. They favor depth over breadth and tend toward long, focused sessions on complex problems.',
|
|
65
|
-
bestFor: 'Senior engineer roles requiring deep domain expertise and autonomous execution.',
|
|
66
|
-
},
|
|
67
|
-
{
|
|
68
|
-
id: 'THE_SYSTEMS_THINKER',
|
|
69
|
-
name: 'THE SYSTEMS THINKER',
|
|
70
|
-
tierRange: ['RARE'],
|
|
71
|
-
match: (s) => s.decomposition >= 65 && s.sessionStructure >= 60,
|
|
72
|
-
priority: 5,
|
|
73
|
-
distribution: '5%',
|
|
74
|
-
description: 'Systems Thinkers break complex problems into well-structured components with deliberate context-setting. Their prompt patterns show they understand how parts connect to wholes, making them effective at designing and maintaining large systems.',
|
|
75
|
-
bestFor: 'Platform engineering, infrastructure, or technical architecture roles.',
|
|
76
|
-
},
|
|
77
|
-
{
|
|
78
|
-
id: 'THE_MAINTAINER',
|
|
79
|
-
name: 'THE MAINTAINER',
|
|
80
|
-
tierRange: ['RARE'],
|
|
81
|
-
match: (s) => s.sessionStructure >= 60 && s.debugCycles >= 60,
|
|
82
|
-
priority: 6,
|
|
83
|
-
distribution: '4%',
|
|
84
|
-
description: 'Maintainers combine reliable workflow discipline with solid debugging efficiency. They review before shipping, set context for each task, and resolve issues methodically. Their consistency makes them the backbone of production systems.',
|
|
85
|
-
bestFor: 'Senior engineer or tech lead roles where reliability and code quality are paramount.',
|
|
86
|
-
},
|
|
87
|
-
|
|
88
|
-
// ── UNCOMMON (47-59) ──
|
|
89
|
-
{
|
|
90
|
-
id: 'THE_CRAFTSPERSON',
|
|
91
|
-
name: 'THE CRAFTSPERSON',
|
|
92
|
-
tierRange: ['UNCOMMON'],
|
|
93
|
-
match: (s) => s.debugCycles >= 60 && s.sessionStructure >= 55,
|
|
94
|
-
priority: 7,
|
|
95
|
-
distribution: '8%',
|
|
96
|
-
description: 'Craftspeople show attention to quality through precise debugging and structured sessions. They may not yet operate at the architectural level, but their care for correctness and process indicates strong engineering foundations.',
|
|
97
|
-
bestFor: 'Mid-to-senior engineer roles focused on quality and reliability.',
|
|
98
|
-
},
|
|
99
|
-
{
|
|
100
|
-
id: 'THE_BUILDER',
|
|
101
|
-
name: 'THE BUILDER',
|
|
102
|
-
tierRange: ['UNCOMMON'],
|
|
103
|
-
match: (s) => {
|
|
104
|
-
const min = Math.min(s.decomposition, s.debugCycles, s.aiLeverage, s.sessionStructure);
|
|
105
|
-
return min >= 45;
|
|
106
|
-
},
|
|
107
|
-
priority: 8,
|
|
108
|
-
distribution: '10%',
|
|
109
|
-
description: 'Builders are well-rounded engineers with competence across all dimensions. No major weaknesses, no extreme spikes. They get things done consistently and adapt to different types of problems without over-specializing.',
|
|
110
|
-
bestFor: 'Full-stack or generalist engineer roles requiring versatility.',
|
|
111
|
-
},
|
|
112
|
-
{
|
|
113
|
-
id: 'THE_CONTRIBUTOR',
|
|
114
|
-
name: 'THE CONTRIBUTOR',
|
|
115
|
-
tierRange: ['UNCOMMON'],
|
|
116
|
-
match: (s) => s.aiLeverage >= 55 && s.decomposition >= 50,
|
|
117
|
-
priority: 9,
|
|
118
|
-
distribution: '7%',
|
|
119
|
-
description: 'Contributors show strong AI collaboration skills with good problem decomposition. They leverage AI effectively as a coding partner and break work into manageable pieces, though workflow discipline and debugging precision have room to grow.',
|
|
120
|
-
bestFor: 'Engineer roles in AI-forward teams with collaborative development practices.',
|
|
121
|
-
},
|
|
122
|
-
{
|
|
123
|
-
id: 'THE_HIDDEN_GEM',
|
|
124
|
-
name: 'THE HIDDEN GEM',
|
|
125
|
-
tierRange: ['UNCOMMON'],
|
|
126
|
-
match: (s) => (s.debugCycles >= 55 || s.aiLeverage >= 55) && s.sessionStructure < 50,
|
|
127
|
-
priority: 10,
|
|
128
|
-
distribution: '6%',
|
|
129
|
-
description: 'Hidden Gems demonstrate strong technical skills in debugging or AI leverage but lack workflow structure. They likely produce good output in bursts but could dramatically improve by adding session discipline. High ceiling, underdeveloped process.',
|
|
130
|
-
bestFor: 'Fast-moving startup roles where output matters more than process.',
|
|
131
|
-
},
|
|
132
|
-
{
|
|
133
|
-
id: 'THE_EXPLORER',
|
|
134
|
-
name: 'THE EXPLORER',
|
|
135
|
-
tierRange: ['UNCOMMON'],
|
|
136
|
-
match: (s) => s.aiLeverage >= 50,
|
|
137
|
-
priority: 11,
|
|
138
|
-
distribution: '9%',
|
|
139
|
-
description: 'Explorers are curiosity-driven engineers who use AI heavily for research, investigation, and learning. Their exploratory prompting shows intellectual breadth, though they may benefit from more structured approaches to execution.',
|
|
140
|
-
bestFor: 'R&D, prototyping, or innovation-focused engineer roles.',
|
|
141
|
-
},
|
|
142
|
-
|
|
143
|
-
// ── COMMON (0-46) ──
|
|
144
|
-
{
|
|
145
|
-
id: 'THE_TINKERER',
|
|
146
|
-
name: 'THE TINKERER',
|
|
147
|
-
tierRange: ['COMMON'],
|
|
148
|
-
match: (s) => s.debugCycles >= 40 && s.aiLeverage >= 35,
|
|
149
|
-
priority: 12,
|
|
150
|
-
distribution: '12%',
|
|
151
|
-
description: 'Tinkerers show practical problem-solving instincts with emerging AI skills. They debug adequately and are beginning to use AI beyond basic code generation, but all dimensions have significant room for growth.',
|
|
152
|
-
bestFor: 'Junior-to-mid engineer roles with growth opportunities.',
|
|
153
|
-
},
|
|
154
|
-
{
|
|
155
|
-
id: 'THE_GRINDER',
|
|
156
|
-
name: 'THE GRINDER',
|
|
157
|
-
tierRange: ['COMMON'],
|
|
158
|
-
match: (s) => s.decomposition >= 35,
|
|
159
|
-
priority: 13,
|
|
160
|
-
distribution: '11%',
|
|
161
|
-
description: 'Grinders are high-volume, iterative engineers. They keep going through problems with persistence. The volume of work is there but the approach could benefit from more strategic thinking and structured workflows.',
|
|
162
|
-
bestFor: 'Environments where persistence and volume are valued.',
|
|
163
|
-
},
|
|
164
|
-
{
|
|
165
|
-
id: 'THE_HOBBYIST',
|
|
166
|
-
name: 'THE HOBBYIST',
|
|
167
|
-
tierRange: ['COMMON'],
|
|
168
|
-
match: (s) => s.aiLeverage >= 30 || s.debugCycles >= 30,
|
|
169
|
-
priority: 14,
|
|
170
|
-
distribution: '10%',
|
|
171
|
-
description: 'Hobbyists are active but early in their AI-augmented engineering journey. They show some skill in debugging or AI usage but haven\'t yet developed consistent patterns across dimensions.',
|
|
172
|
-
bestFor: 'Early-career roles with mentorship and structured onboarding.',
|
|
173
|
-
},
|
|
174
|
-
{
|
|
175
|
-
id: 'THE_APPRENTICE',
|
|
176
|
-
name: 'THE APPRENTICE',
|
|
177
|
-
tierRange: ['COMMON'],
|
|
178
|
-
match: () => true,
|
|
179
|
-
priority: 99,
|
|
180
|
-
distribution: '7%',
|
|
181
|
-
description: 'Apprentices are at the beginning of their AI-augmented engineering practice. Their prompt patterns suggest they\'re still learning how to effectively collaborate with AI tools. Significant growth potential across all dimensions.',
|
|
182
|
-
bestFor: 'Entry-level roles with training programs and mentorship.',
|
|
183
|
-
},
|
|
184
|
-
];
|
|
185
|
-
|
|
186
|
-
export function computeOverallScore(metrics) {
|
|
187
|
-
const scores = {
|
|
188
|
-
decomposition: metrics.decomposition.score,
|
|
189
|
-
debugCycles: metrics.debugCycles.score,
|
|
190
|
-
aiLeverage: metrics.aiLeverage.score,
|
|
191
|
-
sessionStructure: metrics.sessionStructure.score,
|
|
192
|
-
};
|
|
193
|
-
|
|
194
|
-
// Weighted average
|
|
195
|
-
const overall = Math.round(
|
|
196
|
-
scores.decomposition * 0.25 +
|
|
197
|
-
scores.debugCycles * 0.25 +
|
|
198
|
-
scores.aiLeverage * 0.3 +
|
|
199
|
-
scores.sessionStructure * 0.2
|
|
200
|
-
);
|
|
201
|
-
|
|
202
|
-
// Assign tier first
|
|
203
|
-
const tier = TIERS.find(t => overall >= t.min) || TIERS[TIERS.length - 1];
|
|
204
|
-
|
|
205
|
-
// Assign archetype within tier
|
|
206
|
-
const tierArchetypes = ARCHETYPES
|
|
207
|
-
.filter(a => a.tierRange.includes(tier.name))
|
|
208
|
-
.sort((a, b) => a.priority - b.priority);
|
|
209
|
-
|
|
210
|
-
const archetype = tierArchetypes.find(a => a.match(scores))
|
|
211
|
-
|| tierArchetypes[tierArchetypes.length - 1]
|
|
212
|
-
|| ARCHETYPES[ARCHETYPES.length - 1];
|
|
213
|
-
|
|
214
|
-
return {
|
|
215
|
-
overall,
|
|
216
|
-
scores,
|
|
217
|
-
archetype: {
|
|
218
|
-
id: archetype.id,
|
|
219
|
-
name: archetype.name,
|
|
220
|
-
description: archetype.description,
|
|
221
|
-
distribution: archetype.distribution,
|
|
222
|
-
bestFor: archetype.bestFor,
|
|
223
|
-
},
|
|
224
|
-
tier: tier.name,
|
|
225
|
-
tierBadge: tier.badge,
|
|
226
|
-
tierPercentile: tier.percentile,
|
|
227
|
-
};
|
|
228
|
-
}
|