chekk 0.5.4 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +17 -0
- package/dist/index.js +448 -0
- package/package.json +18 -34
- package/bin/chekk.js +0 -62
- package/src/detect.js +0 -146
- package/src/display.js +0 -1153
- package/src/index.js +0 -281
- package/src/insights.js +0 -661
- package/src/metrics/ai-leverage.js +0 -186
- package/src/metrics/debug-cycles.js +0 -204
- package/src/metrics/decomposition.js +0 -158
- package/src/metrics/session-structure.js +0 -199
- package/src/metrics/token-efficiency.js +0 -258
- package/src/parsers/claude-code.js +0 -231
- package/src/parsers/codex.js +0 -188
- package/src/parsers/cursor.js +0 -281
- package/src/scorer.js +0 -228
- package/src/upload.js +0 -140
package/src/upload.js
DELETED
|
@@ -1,140 +0,0 @@
|
|
|
1
|
-
import { createInterface } from 'readline';
|
|
2
|
-
|
|
3
|
-
const API_BASE = 'https://chekk-production.up.railway.app/api/v1';
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Call the Chekk API to generate personalized prose from metrics.
|
|
7
|
-
*/
|
|
8
|
-
function truncateExamples(examples, maxLen = 120) {
|
|
9
|
-
if (!examples || !examples.length) return [];
|
|
10
|
-
return examples.map(e => ({
|
|
11
|
-
type: e.type,
|
|
12
|
-
prompt: e.prompt && e.prompt.length > maxLen
|
|
13
|
-
? e.prompt.replace(/\s+/g, ' ').trim().slice(0, maxLen) + '...'
|
|
14
|
-
: (e.prompt || '').replace(/\s+/g, ' ').trim(),
|
|
15
|
-
}));
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
export async function generateProse(metrics, result, sessionStats, tokenEfficiency = null) {
|
|
19
|
-
const payload = {
|
|
20
|
-
metrics: {
|
|
21
|
-
decomposition: {
|
|
22
|
-
score: metrics.decomposition.score,
|
|
23
|
-
details: metrics.decomposition.details,
|
|
24
|
-
examples: truncateExamples(metrics.decomposition.examples),
|
|
25
|
-
},
|
|
26
|
-
debugCycles: {
|
|
27
|
-
score: metrics.debugCycles.score,
|
|
28
|
-
details: metrics.debugCycles.details,
|
|
29
|
-
examples: truncateExamples(metrics.debugCycles.examples),
|
|
30
|
-
},
|
|
31
|
-
aiLeverage: {
|
|
32
|
-
score: metrics.aiLeverage.score,
|
|
33
|
-
details: metrics.aiLeverage.details,
|
|
34
|
-
examples: truncateExamples(metrics.aiLeverage.examples),
|
|
35
|
-
},
|
|
36
|
-
sessionStructure: {
|
|
37
|
-
score: metrics.sessionStructure.score,
|
|
38
|
-
details: metrics.sessionStructure.details,
|
|
39
|
-
examples: truncateExamples(metrics.sessionStructure.examples),
|
|
40
|
-
},
|
|
41
|
-
},
|
|
42
|
-
result: {
|
|
43
|
-
overall: result.overall,
|
|
44
|
-
scores: result.scores,
|
|
45
|
-
archetype: result.archetype,
|
|
46
|
-
tier: result.tier,
|
|
47
|
-
},
|
|
48
|
-
sessionStats,
|
|
49
|
-
// Include token analytics summary for richer prose generation
|
|
50
|
-
tokenEfficiency: tokenEfficiency && tokenEfficiency.hasData ? {
|
|
51
|
-
grandTotal: tokenEfficiency.grandTotal,
|
|
52
|
-
estimatedCostTotal: tokenEfficiency.estimatedCostTotal,
|
|
53
|
-
contextRereadRatio: tokenEfficiency.contextRereadRatio,
|
|
54
|
-
composition: tokenEfficiency.composition,
|
|
55
|
-
avgTokensPerExchange: tokenEfficiency.avgTokensPerExchange,
|
|
56
|
-
sessionsAnalyzed: tokenEfficiency.sessionsAnalyzed,
|
|
57
|
-
} : null,
|
|
58
|
-
};
|
|
59
|
-
|
|
60
|
-
const response = await fetch(`${API_BASE}/public/cli/analyze`, {
|
|
61
|
-
method: 'POST',
|
|
62
|
-
headers: { 'Content-Type': 'application/json' },
|
|
63
|
-
body: JSON.stringify(payload),
|
|
64
|
-
});
|
|
65
|
-
|
|
66
|
-
if (!response.ok) {
|
|
67
|
-
throw new Error(`API returned ${response.status}`);
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
return response.json();
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
/**
|
|
74
|
-
* Ask user if they want to see detailed breakdown.
|
|
75
|
-
*/
|
|
76
|
-
export function askVerbose() {
|
|
77
|
-
return new Promise((resolve) => {
|
|
78
|
-
const rl = createInterface({
|
|
79
|
-
input: process.stdin,
|
|
80
|
-
output: process.stdout,
|
|
81
|
-
});
|
|
82
|
-
|
|
83
|
-
rl.question(' See detailed breakdown? (y/n) ', (answer) => {
|
|
84
|
-
rl.close();
|
|
85
|
-
resolve(answer.toLowerCase().startsWith('y'));
|
|
86
|
-
});
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
/**
|
|
91
|
-
* Ask user if they want to claim their profile.
|
|
92
|
-
*/
|
|
93
|
-
export function askClaim() {
|
|
94
|
-
return new Promise((resolve) => {
|
|
95
|
-
const rl = createInterface({
|
|
96
|
-
input: process.stdin,
|
|
97
|
-
output: process.stdout,
|
|
98
|
-
});
|
|
99
|
-
|
|
100
|
-
rl.question(' Push your score to chekk.dev? Your raw prompts never leave your machine. (y/n) ', (answer) => {
|
|
101
|
-
rl.close();
|
|
102
|
-
resolve(answer.toLowerCase().startsWith('y'));
|
|
103
|
-
});
|
|
104
|
-
});
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
/**
|
|
108
|
-
* Upload score and get a claim URL.
|
|
109
|
-
*/
|
|
110
|
-
export async function uploadAndClaim(metrics, result, sessionStats, prose) {
|
|
111
|
-
const payload = {
|
|
112
|
-
metrics: {
|
|
113
|
-
decomposition: metrics.decomposition.score,
|
|
114
|
-
debugCycles: metrics.debugCycles.score,
|
|
115
|
-
aiLeverage: metrics.aiLeverage.score,
|
|
116
|
-
sessionStructure: metrics.sessionStructure.score,
|
|
117
|
-
},
|
|
118
|
-
result: {
|
|
119
|
-
overall: result.overall,
|
|
120
|
-
archetype: result.archetype,
|
|
121
|
-
tier: result.tier,
|
|
122
|
-
},
|
|
123
|
-
prose,
|
|
124
|
-
sessionStats,
|
|
125
|
-
tools: sessionStats.tools,
|
|
126
|
-
claimedAt: new Date().toISOString(),
|
|
127
|
-
};
|
|
128
|
-
|
|
129
|
-
const response = await fetch(`${API_BASE}/public/cli/claim`, {
|
|
130
|
-
method: 'POST',
|
|
131
|
-
headers: { 'Content-Type': 'application/json' },
|
|
132
|
-
body: JSON.stringify(payload),
|
|
133
|
-
});
|
|
134
|
-
|
|
135
|
-
if (!response.ok) {
|
|
136
|
-
throw new Error(`Claim API returned ${response.status}`);
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
return response.json();
|
|
140
|
-
}
|