fraim-framework 2.0.52 → 2.0.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/registry/scripts/profile-server.js +2 -1
- package/dist/src/ai-manager/ai-manager.js +49 -1
- package/dist/src/ai-manager/phase-flow.js +68 -0
- package/dist/src/utils/digest-utils.js +18 -7
- package/dist/tests/test-debug-session.js +6 -2
- package/dist/tests/test-enhanced-session-init.js +6 -2
- package/dist/tests/test-mcp-lifecycle-methods.js +1 -2
- package/dist/tests/test-mcp-template-processing.js +6 -2
- package/dist/tests/test-modular-issue-tracking.js +6 -2
- package/dist/tests/test-node-compatibility.js +4 -2
- package/dist/tests/test-npm-install.js +4 -2
- package/dist/tests/test-productivity-integration.js +157 -0
- package/dist/tests/test-session-rehydration.js +1 -2
- package/dist/tests/test-telemetry.js +1 -2
- package/dist/tests/test-users-to-target-workflow.js +253 -0
- package/index.js +44 -55
- package/package.json +5 -5
- package/registry/agent-guardrails.md +62 -62
- package/registry/scripts/detect-tautological-tests.sh +38 -38
- package/registry/scripts/productivity/build-productivity-csv.mjs +242 -0
- package/registry/scripts/productivity/fetch-pr-details.mjs +144 -0
- package/registry/scripts/productivity/productivity-report.sh +147 -0
- package/registry/scripts/profile-server.ts +1 -1
- package/registry/scripts/validate-openapi-limits.ts +366 -366
- package/registry/scripts/validate-test-coverage.ts +280 -280
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase1-customer-profiling.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase1-survey-scoping.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase2-platform-discovery.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase2-survey-build-linkedin.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase3-prospect-qualification.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase3-survey-build-reddit.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase4-inventory-compilation.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase4-survey-build-x.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase5-survey-build-facebook.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase6-survey-build-custom.md +11 -0
- package/registry/stubs/workflows/customer-development/ai-coach-phases/phase7-survey-dispatch.md +11 -0
- package/registry/stubs/workflows/customer-development/templates/customer-persona-template.md +11 -0
- package/registry/stubs/workflows/customer-development/templates/search-strategy-template.md +11 -0
- package/registry/stubs/workflows/customer-development/user-survey-dispatch.md +11 -0
- package/registry/stubs/workflows/customer-development/users-to-target.md +11 -0
- package/registry/stubs/workflows/productivity-report/productivity-report.md +11 -0
- package/bin/fraim.js +0 -8
- package/dist/registry/ai-manager-rules/design-phases/design.md +0 -108
- package/dist/registry/ai-manager-rules/design-phases/finalize.md +0 -60
- package/dist/registry/ai-manager-rules/design-phases/validate.md +0 -125
- package/dist/registry/ai-manager-rules/design.json +0 -97
- package/dist/registry/ai-manager-rules/implement-phases/code.md +0 -323
- package/dist/registry/ai-manager-rules/implement-phases/completeness-review.md +0 -94
- package/dist/registry/ai-manager-rules/implement-phases/finalize.md +0 -177
- package/dist/registry/ai-manager-rules/implement-phases/quality-review.md +0 -304
- package/dist/registry/ai-manager-rules/implement-phases/regression.md +0 -159
- package/dist/registry/ai-manager-rules/implement-phases/repro.md +0 -101
- package/dist/registry/ai-manager-rules/implement-phases/scoping.md +0 -93
- package/dist/registry/ai-manager-rules/implement-phases/smoke.md +0 -225
- package/dist/registry/ai-manager-rules/implement-phases/spike.md +0 -118
- package/dist/registry/ai-manager-rules/implement-phases/validate.md +0 -347
- package/dist/registry/ai-manager-rules/implement.json +0 -153
- package/dist/registry/ai-manager-rules/shared-phases/finalize.md +0 -169
- package/dist/registry/ai-manager-rules/spec-phases/finalize.md +0 -60
- package/dist/registry/ai-manager-rules/spec-phases/spec.md +0 -102
- package/dist/registry/ai-manager-rules/spec-phases/validate.md +0 -118
- package/dist/registry/ai-manager-rules/spec.json +0 -112
- package/dist/registry/ai-manager-rules/test.json +0 -98
- package/dist/registry/scripts/build-scripts-generator.js +0 -205
- package/dist/registry/scripts/fraim-config.js +0 -61
- package/dist/registry/scripts/generic-issues-api.js +0 -100
- package/dist/registry/scripts/openapi-generator.js +0 -664
- package/dist/registry/scripts/performance/profile-server.js +0 -390
- package/dist/src/ai-manager/evidence-validator.js +0 -309
- package/dist/src/fraim/issue-tracking/ado-provider.js +0 -304
- package/dist/src/fraim/issue-tracking/factory.js +0 -63
- package/dist/src/fraim/issue-tracking/github-provider.js +0 -200
- package/dist/src/fraim/issue-tracking/types.js +0 -7
- package/dist/src/fraim/issue-tracking-config.js +0 -83
- package/dist/src/static-website-middleware.js +0 -75
- package/dist/test-utils.js +0 -96
- package/dist/tests/esm-compat.js +0 -11
- package/dist/tests/test-ai-manager-phase-protocol.js +0 -147
- package/dist/tests/test-ai-manager.js +0 -118
- package/dist/tests/test-chalk-esm-issue.js +0 -159
- package/dist/tests/test-chalk-real-world.js +0 -265
- package/dist/tests/test-chalk-regression.js +0 -377
- package/dist/tests/test-chalk-resolution-issue.js +0 -304
- package/dist/tests/test-evidence-validation.js +0 -221
- package/dist/tests/test-first-run-interactive.js +0 -1
- package/dist/tests/test-fraim-install-chalk-issue.js +0 -254
- package/dist/tests/test-markdown-to-pdf.js +0 -454
- package/dist/tests/test-npm-resolution-diagnostic.js +0 -140
- package/dist/tests/test-pr-review-integration.js +0 -1
- package/dist/website/.nojekyll +0 -0
- package/dist/website/404.html +0 -101
- package/dist/website/CNAME +0 -1
- package/dist/website/README.md +0 -22
- package/dist/website/demo.html +0 -604
- package/dist/website/images/.gitkeep +0 -1
- package/dist/website/images/fraim-logo.png +0 -0
- package/dist/website/index.html +0 -290
- package/dist/website/pricing.html +0 -414
- package/dist/website/script.js +0 -55
- package/dist/website/styles.css +0 -2647
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Build productivity-report.csv from pre-fetched data.
|
|
4
|
+
* Repository-agnostic version - works with any repository's data files.
|
|
5
|
+
*
|
|
6
|
+
* Run: node build-productivity-csv.mjs after productivity-report.sh (or manual gh fetches).
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import fs from 'fs';
|
|
10
|
+
import path from 'path';
|
|
11
|
+
|
|
12
|
+
const workDir = process.cwd();
|
|
13
|
+
const dataDir = path.join(workDir, '.productivity-data');
|
|
14
|
+
const commitsPath = path.join(dataDir, 'commits.txt');
|
|
15
|
+
const prsDetailsPath = path.join(dataDir, 'prs_details.txt');
|
|
16
|
+
const prsPath = path.join(dataDir, 'prs.txt'); // fallback from list API
|
|
17
|
+
const issuesPath = path.join(dataDir, 'issues_closed.txt');
|
|
18
|
+
|
|
19
|
+
// Create output directory and timestamped filename
|
|
20
|
+
const outputDir = path.join(workDir, 'docs', 'productivity-report');
|
|
21
|
+
const timestamp = new Date().toISOString().slice(0, 16).replace(/[:-]/g, '').replace('T', '-');
|
|
22
|
+
const outPath = path.join(outputDir, `productivity-report-${timestamp}.csv`);
|
|
23
|
+
|
|
24
|
+
// Ensure output directory exists
|
|
25
|
+
if (!fs.existsSync(outputDir)) {
|
|
26
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
27
|
+
console.error(`✅ Created output directory: ${outputDir}`);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Utility functions for date processing
|
|
31
|
+
function getDateKey(iso) {
|
|
32
|
+
return iso.slice(0, 10);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function getWeekKey(iso) {
|
|
36
|
+
const d = new Date(iso);
|
|
37
|
+
const start = new Date(d);
|
|
38
|
+
start.setDate(d.getDate() - d.getDay()); // Sunday as week start
|
|
39
|
+
return start.toISOString().slice(0, 10);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function getMonthKey(iso) {
|
|
43
|
+
return iso.slice(0, 7);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Data aggregation objects
|
|
47
|
+
const byDay = {};
|
|
48
|
+
const byWeek = {};
|
|
49
|
+
const byMonth = {};
|
|
50
|
+
|
|
51
|
+
function ensurePeriod(day, week, month) {
|
|
52
|
+
byDay[day] = byDay[day] || { commits: 0, prs: 0, issuesResolved: 0, comments: 0 };
|
|
53
|
+
byWeek[week] = byWeek[week] || { commits: 0, prs: 0, issuesResolved: 0, comments: 0 };
|
|
54
|
+
byMonth[month] = byMonth[month] || { commits: 0, prs: 0, issuesResolved: 0, comments: 0 };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Check if data directory exists
|
|
58
|
+
if (!fs.existsSync(dataDir)) {
|
|
59
|
+
console.error(`Error: Data directory not found: ${dataDir}`);
|
|
60
|
+
console.error('Run productivity-report.sh first to fetch data.');
|
|
61
|
+
process.exit(1);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
console.error(`Processing data from: ${dataDir}`);
|
|
65
|
+
|
|
66
|
+
// Process commits
|
|
67
|
+
if (fs.existsSync(commitsPath)) {
|
|
68
|
+
try {
|
|
69
|
+
const content = fs.readFileSync(commitsPath, 'utf8').trim();
|
|
70
|
+
const lines = content ? content.split('\n').filter(Boolean) : [];
|
|
71
|
+
|
|
72
|
+
for (const line of lines) {
|
|
73
|
+
const trimmedLine = line.trim();
|
|
74
|
+
if (!trimmedLine) continue;
|
|
75
|
+
|
|
76
|
+
try {
|
|
77
|
+
const d = getDateKey(trimmedLine);
|
|
78
|
+
const w = getWeekKey(trimmedLine);
|
|
79
|
+
const m = getMonthKey(trimmedLine);
|
|
80
|
+
ensurePeriod(d, w, m);
|
|
81
|
+
byDay[d].commits++;
|
|
82
|
+
byWeek[w].commits++;
|
|
83
|
+
byMonth[m].commits++;
|
|
84
|
+
} catch (dateError) {
|
|
85
|
+
console.error(`Warning: Invalid date format in commits: ${trimmedLine}`);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
console.error(`✅ Processed ${lines.length} commits`);
|
|
89
|
+
} catch (error) {
|
|
90
|
+
console.error(`Warning: Could not process commits file:`, error.message);
|
|
91
|
+
}
|
|
92
|
+
} else {
|
|
93
|
+
console.error(`Warning: Commits file not found: ${commitsPath}`);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Process PRs: "created_at,comments,review_comments" (prs_details.txt has full counts)
|
|
97
|
+
const prsSource = fs.existsSync(prsDetailsPath) ? prsDetailsPath : prsPath;
|
|
98
|
+
if (fs.existsSync(prsSource)) {
|
|
99
|
+
try {
|
|
100
|
+
const content = fs.readFileSync(prsSource, 'utf8').trim();
|
|
101
|
+
const lines = content ? content.split('\n').filter(Boolean) : [];
|
|
102
|
+
|
|
103
|
+
let validPRs = 0;
|
|
104
|
+
let totalCommentCount = 0;
|
|
105
|
+
|
|
106
|
+
for (const line of lines) {
|
|
107
|
+
const trimmedLine = line.trim();
|
|
108
|
+
if (!trimmedLine) continue;
|
|
109
|
+
|
|
110
|
+
const parts = trimmedLine.split(',');
|
|
111
|
+
if (parts.length < 1) {
|
|
112
|
+
console.error(`Warning: Invalid PR line format: ${trimmedLine}`);
|
|
113
|
+
continue;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
const createdAt = parts[0];
|
|
117
|
+
const comments = parseInt(parts[1] || '0', 10) || 0;
|
|
118
|
+
const reviewComments = parseInt(parts[2] || '0', 10) || 0;
|
|
119
|
+
const totalComments = comments + reviewComments;
|
|
120
|
+
|
|
121
|
+
try {
|
|
122
|
+
const d = getDateKey(createdAt);
|
|
123
|
+
const w = getWeekKey(createdAt);
|
|
124
|
+
const m = getMonthKey(createdAt);
|
|
125
|
+
ensurePeriod(d, w, m);
|
|
126
|
+
byDay[d].prs++;
|
|
127
|
+
byDay[d].comments += totalComments;
|
|
128
|
+
byWeek[w].prs++;
|
|
129
|
+
byWeek[w].comments += totalComments;
|
|
130
|
+
byMonth[m].prs++;
|
|
131
|
+
byMonth[m].comments += totalComments;
|
|
132
|
+
|
|
133
|
+
validPRs++;
|
|
134
|
+
totalCommentCount += totalComments;
|
|
135
|
+
} catch (dateError) {
|
|
136
|
+
console.error(`Warning: Invalid date format in PRs: ${createdAt}`);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const sourceType = prsSource.includes('details') ? 'detailed' : 'list API';
|
|
141
|
+
console.error(`✅ Processed ${validPRs} PRs (${sourceType}) - Total comments: ${totalCommentCount}`);
|
|
142
|
+
} catch (error) {
|
|
143
|
+
console.error(`Warning: Could not process PRs file:`, error.message);
|
|
144
|
+
}
|
|
145
|
+
} else {
|
|
146
|
+
console.error(`Warning: PRs file not found: ${prsSource}`);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// Process closed issues (non-PR): "closed_at"
|
|
150
|
+
if (fs.existsSync(issuesPath)) {
|
|
151
|
+
try {
|
|
152
|
+
const content = fs.readFileSync(issuesPath, 'utf8').trim();
|
|
153
|
+
const lines = content ? content.split('\n').filter(Boolean) : [];
|
|
154
|
+
|
|
155
|
+
let validIssues = 0;
|
|
156
|
+
|
|
157
|
+
for (const line of lines) {
|
|
158
|
+
const closedAt = line.trim();
|
|
159
|
+
if (!closedAt) continue;
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
const d = getDateKey(closedAt);
|
|
163
|
+
const w = getWeekKey(closedAt);
|
|
164
|
+
const m = getMonthKey(closedAt);
|
|
165
|
+
ensurePeriod(d, w, m);
|
|
166
|
+
byDay[d].issuesResolved++;
|
|
167
|
+
byWeek[w].issuesResolved++;
|
|
168
|
+
byMonth[m].issuesResolved++;
|
|
169
|
+
validIssues++;
|
|
170
|
+
} catch (dateError) {
|
|
171
|
+
console.error(`Warning: Invalid date format in issues: ${closedAt}`);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
console.error(`✅ Processed ${validIssues} closed issues`);
|
|
175
|
+
} catch (error) {
|
|
176
|
+
console.error(`Warning: Could not process issues file:`, error.message);
|
|
177
|
+
}
|
|
178
|
+
} else {
|
|
179
|
+
console.error(`Warning: Issues file not found: ${issuesPath}`);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Build CSV content
|
|
183
|
+
const lines = ['period_type,period,commits,prs_created,issues_resolved,comments_total'];
|
|
184
|
+
const sortKeys = (obj) => Object.keys(obj).sort();
|
|
185
|
+
|
|
186
|
+
// Add daily data
|
|
187
|
+
const dailyKeys = sortKeys(byDay);
|
|
188
|
+
for (const key of dailyKeys) {
|
|
189
|
+
const data = byDay[key];
|
|
190
|
+
lines.push(`day,${key},${data.commits},${data.prs},${data.issuesResolved},${data.comments}`);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// Add separator and weekly data
|
|
194
|
+
lines.push('');
|
|
195
|
+
const weeklyKeys = sortKeys(byWeek);
|
|
196
|
+
for (const key of weeklyKeys) {
|
|
197
|
+
const data = byWeek[key];
|
|
198
|
+
lines.push(`week,${key},${data.commits},${data.prs},${data.issuesResolved},${data.comments}`);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// Add separator and monthly data
|
|
202
|
+
lines.push('');
|
|
203
|
+
const monthlyKeys = sortKeys(byMonth);
|
|
204
|
+
for (const key of monthlyKeys) {
|
|
205
|
+
const data = byMonth[key];
|
|
206
|
+
lines.push(`month,${key},${data.commits},${data.prs},${data.issuesResolved},${data.comments}`);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const content = lines.join('\n');
|
|
210
|
+
|
|
211
|
+
// Write CSV file with fallback for locked files
|
|
212
|
+
try {
|
|
213
|
+
fs.writeFileSync(outPath, content, 'utf8');
|
|
214
|
+
console.error(`✅ Successfully wrote: ${outPath}`);
|
|
215
|
+
|
|
216
|
+
// Report summary statistics
|
|
217
|
+
const totalDays = dailyKeys.length;
|
|
218
|
+
const totalWeeks = weeklyKeys.length;
|
|
219
|
+
const totalMonths = monthlyKeys.length;
|
|
220
|
+
const csvLines = lines.length;
|
|
221
|
+
|
|
222
|
+
console.error(`📊 Report Summary:`);
|
|
223
|
+
console.error(` Daily periods: ${totalDays}`);
|
|
224
|
+
console.error(` Weekly periods: ${totalWeeks}`);
|
|
225
|
+
console.error(` Monthly periods: ${totalMonths}`);
|
|
226
|
+
console.error(` CSV lines: ${csvLines}`);
|
|
227
|
+
|
|
228
|
+
} catch (error) {
|
|
229
|
+
if (error.code === 'EBUSY' || error.code === 'EPERM') {
|
|
230
|
+
const altPath = path.join(outputDir, 'productivity-report-new.csv');
|
|
231
|
+
try {
|
|
232
|
+
fs.writeFileSync(altPath, content, 'utf8');
|
|
233
|
+
console.error(`⚠️ Original file locked - wrote to: ${altPath}`);
|
|
234
|
+
} catch (altError) {
|
|
235
|
+
console.error(`❌ Error: Could not write to either file:`, altError.message);
|
|
236
|
+
process.exit(1);
|
|
237
|
+
}
|
|
238
|
+
} else {
|
|
239
|
+
console.error(`❌ Error writing CSV file:`, error.message);
|
|
240
|
+
process.exit(1);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Fetch comments + review_comments for each PR (list API returns null).
|
|
4
|
+
* Reads .productivity-data/prs_list_raw.json, outputs prs_details.txt
|
|
5
|
+
*
|
|
6
|
+
* Repository-agnostic version - uses REPO_FULL environment variable or auto-detects
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { execSync } from 'child_process';
|
|
10
|
+
import fs from 'fs';
|
|
11
|
+
import path from 'path';
|
|
12
|
+
|
|
13
|
+
// Function to detect repository from git remote
|
|
14
|
+
function detectRepository() {
|
|
15
|
+
try {
|
|
16
|
+
const remoteUrl = execSync('git remote get-url origin', { encoding: 'utf8' }).trim();
|
|
17
|
+
const match = remoteUrl.match(/github\.com[:/]([^/]+\/[^/]+?)(?:\.git)?$/);
|
|
18
|
+
return match ? match[1] : null;
|
|
19
|
+
} catch (error) {
|
|
20
|
+
return null;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// Determine repository
|
|
25
|
+
let REPO = process.env.REPO_FULL;
|
|
26
|
+
|
|
27
|
+
if (!REPO && process.env.REPO_OWNER && process.env.REPO_NAME) {
|
|
28
|
+
REPO = `${process.env.REPO_OWNER}/${process.env.REPO_NAME}`;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
if (!REPO) {
|
|
32
|
+
REPO = detectRepository();
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (!REPO) {
|
|
36
|
+
console.error('Error: Could not determine repository.');
|
|
37
|
+
console.error('Set REPO_FULL environment variable or run from a git repository with GitHub remote.');
|
|
38
|
+
console.error('Example: export REPO_FULL="owner/repo"');
|
|
39
|
+
process.exit(1);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Validate repository format
|
|
43
|
+
if (!/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/.test(REPO)) {
|
|
44
|
+
console.error(`Error: Invalid repository format: ${REPO}`);
|
|
45
|
+
console.error('Expected format: owner/repository-name');
|
|
46
|
+
process.exit(1);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
console.error(`Fetching PR details for repository: ${REPO}`);
|
|
50
|
+
|
|
51
|
+
const DATA_DIR = path.join(process.cwd(), '.productivity-data');
|
|
52
|
+
const prsListPath = path.join(DATA_DIR, 'prs_list_raw.json');
|
|
53
|
+
|
|
54
|
+
// Check if input file exists
|
|
55
|
+
if (!fs.existsSync(prsListPath)) {
|
|
56
|
+
console.error(`Error: ${prsListPath} not found`);
|
|
57
|
+
console.error('Run the main productivity-report.sh script first, or fetch PRs list manually.');
|
|
58
|
+
process.exit(1);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Read and parse PRs list
|
|
62
|
+
let prs;
|
|
63
|
+
try {
|
|
64
|
+
const raw = fs.readFileSync(prsListPath, 'utf8');
|
|
65
|
+
prs = raw.trim().split('\n').filter(Boolean).map((line) => {
|
|
66
|
+
try {
|
|
67
|
+
return JSON.parse(line);
|
|
68
|
+
} catch (parseError) {
|
|
69
|
+
console.error(`Warning: Could not parse line: ${line}`);
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
}).filter(Boolean);
|
|
73
|
+
} catch (error) {
|
|
74
|
+
console.error(`Error reading ${prsListPath}:`, error.message);
|
|
75
|
+
process.exit(1);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if (prs.length === 0) {
|
|
79
|
+
console.error('No PRs found in the list. Creating empty prs_details.txt');
|
|
80
|
+
fs.writeFileSync(path.join(DATA_DIR, 'prs_details.txt'), '');
|
|
81
|
+
process.exit(0);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
console.error(`Processing ${prs.length} PRs...`);
|
|
85
|
+
|
|
86
|
+
const outPath = path.join(DATA_DIR, 'prs_details.txt');
|
|
87
|
+
const out = [];
|
|
88
|
+
let successCount = 0;
|
|
89
|
+
let errorCount = 0;
|
|
90
|
+
|
|
91
|
+
for (let i = 0; i < prs.length; i++) {
|
|
92
|
+
const pr = prs[i];
|
|
93
|
+
|
|
94
|
+
if (!pr.number || !pr.created_at) {
|
|
95
|
+
console.error(`Warning: Invalid PR data at index ${i}:`, pr);
|
|
96
|
+
out.push(`${pr.created_at || 'unknown'},0,0`);
|
|
97
|
+
errorCount++;
|
|
98
|
+
continue;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
const json = execSync(`gh api "repos/${REPO}/pulls/${pr.number}"`, {
|
|
103
|
+
encoding: 'utf8',
|
|
104
|
+
timeout: 10000 // 10 second timeout per request
|
|
105
|
+
});
|
|
106
|
+
const details = JSON.parse(json);
|
|
107
|
+
const comments = details.comments ?? 0;
|
|
108
|
+
const reviewComments = details.review_comments ?? 0;
|
|
109
|
+
|
|
110
|
+
out.push(`${pr.created_at},${comments},${reviewComments}`);
|
|
111
|
+
successCount++;
|
|
112
|
+
} catch (error) {
|
|
113
|
+
console.error(`Warning: Failed to fetch PR #${pr.number}:`, error.message);
|
|
114
|
+
out.push(`${pr.created_at},0,0`);
|
|
115
|
+
errorCount++;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Progress reporting and incremental save every 50 PRs
|
|
119
|
+
if ((i + 1) % 50 === 0) {
|
|
120
|
+
const progress = Math.round(((i + 1) / prs.length) * 100);
|
|
121
|
+
process.stderr.write(` Progress: ${i + 1}/${prs.length} (${progress}%) - Success: ${successCount}, Errors: ${errorCount}\n`);
|
|
122
|
+
|
|
123
|
+
// Incremental save to prevent data loss
|
|
124
|
+
try {
|
|
125
|
+
fs.writeFileSync(outPath, out.join('\n'));
|
|
126
|
+
} catch (saveError) {
|
|
127
|
+
console.error(`Warning: Could not save incremental progress:`, saveError.message);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Final save
|
|
133
|
+
try {
|
|
134
|
+
fs.writeFileSync(outPath, out.join('\n'));
|
|
135
|
+
console.error(`✅ Completed: Wrote prs_details.txt (${prs.length} PRs processed)`);
|
|
136
|
+
console.error(` Success: ${successCount}, Errors: ${errorCount}`);
|
|
137
|
+
|
|
138
|
+
if (errorCount > 0) {
|
|
139
|
+
console.error(`⚠️ Warning: ${errorCount} PRs had errors - comment counts may be incomplete`);
|
|
140
|
+
}
|
|
141
|
+
} catch (error) {
|
|
142
|
+
console.error(`❌ Error: Could not write final output:`, error.message);
|
|
143
|
+
process.exit(1);
|
|
144
|
+
}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Generate productivity-report.csv for any GitHub repository
|
|
3
|
+
# Requires: gh CLI (gh auth login), node, jq
|
|
4
|
+
# Usage: bash productivity-report.sh [REPO_OWNER/REPO_NAME]
|
|
5
|
+
|
|
6
|
+
set -e
|
|
7
|
+
|
|
8
|
+
# Function to detect repository from git remote
|
|
9
|
+
detect_repository() {
|
|
10
|
+
if git remote get-url origin >/dev/null 2>&1; then
|
|
11
|
+
git remote get-url origin | sed 's/.*github\.com[:/]\([^/]*\/[^/]*\)\.git.*/\1/' | sed 's/\.git$//'
|
|
12
|
+
else
|
|
13
|
+
echo ""
|
|
14
|
+
fi
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
# Function to validate repository format
|
|
18
|
+
validate_repo_format() {
|
|
19
|
+
local repo="$1"
|
|
20
|
+
if [[ "$repo" =~ ^[a-zA-Z0-9._-]+/[a-zA-Z0-9._-]+$ ]]; then
|
|
21
|
+
return 0
|
|
22
|
+
else
|
|
23
|
+
return 1
|
|
24
|
+
fi
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
# Determine repository
|
|
28
|
+
if [ -n "$1" ]; then
|
|
29
|
+
# Repository provided as argument
|
|
30
|
+
REPO="$1"
|
|
31
|
+
echo "Using repository from argument: $REPO" >&2
|
|
32
|
+
elif [ -n "$REPO_FULL" ]; then
|
|
33
|
+
# Repository from environment variable
|
|
34
|
+
REPO="$REPO_FULL"
|
|
35
|
+
echo "Using repository from REPO_FULL environment variable: $REPO" >&2
|
|
36
|
+
elif [ -n "$REPO_OWNER" ] && [ -n "$REPO_NAME" ]; then
|
|
37
|
+
# Repository from separate environment variables
|
|
38
|
+
REPO="$REPO_OWNER/$REPO_NAME"
|
|
39
|
+
echo "Using repository from REPO_OWNER/REPO_NAME environment variables: $REPO" >&2
|
|
40
|
+
else
|
|
41
|
+
# Auto-detect from git remote
|
|
42
|
+
REPO=$(detect_repository)
|
|
43
|
+
if [ -n "$REPO" ]; then
|
|
44
|
+
echo "Auto-detected repository from git remote: $REPO" >&2
|
|
45
|
+
else
|
|
46
|
+
echo "Error: Could not determine repository." >&2
|
|
47
|
+
echo "Usage: $0 [REPO_OWNER/REPO_NAME]" >&2
|
|
48
|
+
echo "Or set environment variables:" >&2
|
|
49
|
+
echo " export REPO_FULL=\"owner/repo\"" >&2
|
|
50
|
+
echo " export REPO_OWNER=\"owner\" REPO_NAME=\"repo\"" >&2
|
|
51
|
+
echo "Or run from a git repository with GitHub remote." >&2
|
|
52
|
+
exit 1
|
|
53
|
+
fi
|
|
54
|
+
fi
|
|
55
|
+
|
|
56
|
+
# Validate repository format
|
|
57
|
+
if ! validate_repo_format "$REPO"; then
|
|
58
|
+
echo "Error: Invalid repository format: $REPO" >&2
|
|
59
|
+
echo "Expected format: owner/repository-name" >&2
|
|
60
|
+
exit 1
|
|
61
|
+
fi
|
|
62
|
+
|
|
63
|
+
# Check GitHub CLI authentication
|
|
64
|
+
if ! gh auth status >/dev/null 2>&1; then
|
|
65
|
+
echo "Error: GitHub CLI not authenticated." >&2
|
|
66
|
+
echo "Please run: gh auth login" >&2
|
|
67
|
+
exit 1
|
|
68
|
+
fi
|
|
69
|
+
|
|
70
|
+
# Set up directories
|
|
71
|
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
72
|
+
REPO_ROOT="$(pwd)"
|
|
73
|
+
DATA_DIR="$REPO_ROOT/.productivity-data"
|
|
74
|
+
OUTPUT_DIR="$REPO_ROOT/docs/productivity-report"
|
|
75
|
+
|
|
76
|
+
echo "Repository: $REPO" >&2
|
|
77
|
+
echo "Data directory: $DATA_DIR" >&2
|
|
78
|
+
echo "Output directory: $OUTPUT_DIR" >&2
|
|
79
|
+
echo "Script directory: $SCRIPT_DIR" >&2
|
|
80
|
+
|
|
81
|
+
mkdir -p "$DATA_DIR"
|
|
82
|
+
mkdir -p "$OUTPUT_DIR"
|
|
83
|
+
|
|
84
|
+
# Add .productivity-data to .gitignore if not already present
|
|
85
|
+
GITIGNORE_FILE="$REPO_ROOT/.gitignore"
|
|
86
|
+
if [ -f "$GITIGNORE_FILE" ]; then
|
|
87
|
+
if ! grep -q "^\.productivity-data/" "$GITIGNORE_FILE" 2>/dev/null; then
|
|
88
|
+
echo "" >> "$GITIGNORE_FILE"
|
|
89
|
+
echo "# Productivity report data (generated)" >> "$GITIGNORE_FILE"
|
|
90
|
+
echo ".productivity-data/" >> "$GITIGNORE_FILE"
|
|
91
|
+
echo "Added .productivity-data/ to .gitignore" >&2
|
|
92
|
+
fi
|
|
93
|
+
else
|
|
94
|
+
echo "# Productivity report data (generated)" > "$GITIGNORE_FILE"
|
|
95
|
+
echo ".productivity-data/" >> "$GITIGNORE_FILE"
|
|
96
|
+
echo "Created .gitignore with .productivity-data/ entry" >&2
|
|
97
|
+
fi
|
|
98
|
+
|
|
99
|
+
echo "Fetching commits..." >&2
|
|
100
|
+
gh api "repos/$REPO/commits?per_page=100" --paginate --jq '.[].commit.author.date' > "$DATA_DIR/commits.txt"
|
|
101
|
+
COMMIT_COUNT=$(wc -l < "$DATA_DIR/commits.txt" | tr -d ' ')
|
|
102
|
+
echo " $COMMIT_COUNT commits" >&2
|
|
103
|
+
|
|
104
|
+
echo "Fetching PRs list..." >&2
|
|
105
|
+
gh api "repos/$REPO/pulls?state=all&per_page=100" --paginate --jq '.[] | {number, created_at}' > "$DATA_DIR/prs_list_raw.json"
|
|
106
|
+
PR_COUNT=$(wc -l < "$DATA_DIR/prs_list_raw.json" | tr -d ' ')
|
|
107
|
+
echo " $PR_COUNT PRs" >&2
|
|
108
|
+
|
|
109
|
+
echo "Fetching closed issues (non-PR)..." >&2
|
|
110
|
+
gh api "repos/$REPO/issues?state=closed&per_page=100" --paginate --jq '.[] | select(.pull_request == null) | .closed_at' > "$DATA_DIR/issues_closed.txt"
|
|
111
|
+
ISSUE_COUNT=$(wc -l < "$DATA_DIR/issues_closed.txt" | tr -d ' ')
|
|
112
|
+
echo " $ISSUE_COUNT closed issues" >&2
|
|
113
|
+
|
|
114
|
+
# Estimate time for PR details fetch
|
|
115
|
+
if [ "$PR_COUNT" -gt 0 ]; then
|
|
116
|
+
ESTIMATED_MINUTES=$((PR_COUNT * 15 / 10 / 60 + 1)) # ~1.5 sec per PR
|
|
117
|
+
echo "Fetching PR details (comments + review_comments) - estimated ${ESTIMATED_MINUTES} minutes..." >&2
|
|
118
|
+
else
|
|
119
|
+
echo "Fetching PR details (comments + review_comments)..." >&2
|
|
120
|
+
fi
|
|
121
|
+
|
|
122
|
+
# Export repository for the Node.js scripts
|
|
123
|
+
export REPO_FULL="$REPO"
|
|
124
|
+
node "$SCRIPT_DIR/fetch-pr-details.mjs"
|
|
125
|
+
|
|
126
|
+
echo "Building CSV..." >&2
|
|
127
|
+
node "$SCRIPT_DIR/build-productivity-csv.mjs"
|
|
128
|
+
|
|
129
|
+
# Check if CSV was created successfully
|
|
130
|
+
OUTPUT_PATTERN="$OUTPUT_DIR/productivity-report-*.csv"
|
|
131
|
+
if ls $OUTPUT_PATTERN 1> /dev/null 2>&1; then
|
|
132
|
+
LATEST_CSV=$(ls -t $OUTPUT_PATTERN | head -n1)
|
|
133
|
+
CSV_LINES=$(wc -l < "$LATEST_CSV" | tr -d ' ')
|
|
134
|
+
FILENAME=$(basename "$LATEST_CSV")
|
|
135
|
+
echo "Done. Output: docs/productivity-report/$FILENAME ($CSV_LINES lines)" >&2
|
|
136
|
+
echo "" >&2
|
|
137
|
+
echo "Summary:" >&2
|
|
138
|
+
echo " Repository: $REPO" >&2
|
|
139
|
+
echo " Commits: $COMMIT_COUNT" >&2
|
|
140
|
+
echo " PRs: $PR_COUNT" >&2
|
|
141
|
+
echo " Closed Issues: $ISSUE_COUNT" >&2
|
|
142
|
+
echo " Report Lines: $CSV_LINES" >&2
|
|
143
|
+
echo " Output File: docs/productivity-report/$FILENAME" >&2
|
|
144
|
+
else
|
|
145
|
+
echo "Error: No CSV output file was created in $OUTPUT_DIR" >&2
|
|
146
|
+
exit 1
|
|
147
|
+
fi
|
|
@@ -196,7 +196,7 @@ async function getSystemInformation(appName: string): Promise<void> {
|
|
|
196
196
|
try {
|
|
197
197
|
const response = await axios.post(`https://${appName}.scm.azurewebsites.net/api/command`, {
|
|
198
198
|
command: command.cmd,
|
|
199
|
-
dir: '/home'
|
|
199
|
+
dir: path.join('/', 'home')
|
|
200
200
|
}, {
|
|
201
201
|
headers: { Authorization: `Bearer ${token}` },
|
|
202
202
|
timeout: 5000
|