@equilateral_ai/mindmeld 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +300 -0
- package/hooks/README.md +494 -0
- package/hooks/pre-compact.js +392 -0
- package/hooks/session-start.js +264 -0
- package/package.json +90 -0
- package/scripts/harvest.js +561 -0
- package/scripts/init-project.js +437 -0
- package/scripts/inject.js +388 -0
- package/src/collaboration/CollaborationPrompt.js +460 -0
- package/src/core/AlertEngine.js +813 -0
- package/src/core/AlertNotifier.js +363 -0
- package/src/core/CorrelationAnalyzer.js +774 -0
- package/src/core/CurationEngine.js +688 -0
- package/src/core/LLMPatternDetector.js +508 -0
- package/src/core/LoadBearingDetector.js +242 -0
- package/src/core/NotificationService.js +1032 -0
- package/src/core/PatternValidator.js +355 -0
- package/src/core/README.md +160 -0
- package/src/core/RapportOrchestrator.js +446 -0
- package/src/core/RelevanceDetector.js +577 -0
- package/src/core/StandardsIngestion.js +575 -0
- package/src/core/TeamLoadBearingDetector.js +431 -0
- package/src/database/dbOperations.js +105 -0
- package/src/handlers/activity/activityGetMe.js +98 -0
- package/src/handlers/activity/activityGetTeam.js +130 -0
- package/src/handlers/alerts/alertsAcknowledge.js +91 -0
- package/src/handlers/alerts/alertsGet.js +250 -0
- package/src/handlers/collaborators/collaboratorAdd.js +201 -0
- package/src/handlers/collaborators/collaboratorInvite.js +218 -0
- package/src/handlers/collaborators/collaboratorList.js +88 -0
- package/src/handlers/collaborators/collaboratorRemove.js +127 -0
- package/src/handlers/collaborators/inviteAccept.js +122 -0
- package/src/handlers/context/contextGet.js +57 -0
- package/src/handlers/context/invariantsGet.js +74 -0
- package/src/handlers/context/loopsGet.js +82 -0
- package/src/handlers/context/notesCreate.js +74 -0
- package/src/handlers/context/purposeGet.js +78 -0
- package/src/handlers/correlations/correlationsDeveloperGet.js +226 -0
- package/src/handlers/correlations/correlationsGet.js +93 -0
- package/src/handlers/correlations/correlationsProjectGet.js +161 -0
- package/src/handlers/github/githubConnectionStatus.js +49 -0
- package/src/handlers/github/githubDiscoverPatterns.js +364 -0
- package/src/handlers/github/githubOAuthCallback.js +166 -0
- package/src/handlers/github/githubOAuthStart.js +59 -0
- package/src/handlers/github/githubPatternsReview.js +109 -0
- package/src/handlers/github/githubReposList.js +105 -0
- package/src/handlers/helpers/checkSuperAdmin.js +85 -0
- package/src/handlers/helpers/dbOperations.js +53 -0
- package/src/handlers/helpers/errorHandler.js +49 -0
- package/src/handlers/helpers/index.js +106 -0
- package/src/handlers/helpers/lambdaWrapper.js +60 -0
- package/src/handlers/helpers/responseUtil.js +55 -0
- package/src/handlers/helpers/subscriptionTiers.js +1168 -0
- package/src/handlers/notifications/getPreferences.js +84 -0
- package/src/handlers/notifications/sendNotification.js +170 -0
- package/src/handlers/notifications/updatePreferences.js +316 -0
- package/src/handlers/patterns/patternUsagePost.js +182 -0
- package/src/handlers/patterns/patternViolationPost.js +185 -0
- package/src/handlers/projects/projectCreate.js +107 -0
- package/src/handlers/projects/projectDelete.js +82 -0
- package/src/handlers/projects/projectGet.js +95 -0
- package/src/handlers/projects/projectUpdate.js +118 -0
- package/src/handlers/reports/aiLeverage.js +206 -0
- package/src/handlers/reports/engineeringInvestment.js +132 -0
- package/src/handlers/reports/riskForecast.js +186 -0
- package/src/handlers/reports/standardsRoi.js +162 -0
- package/src/handlers/scheduled/analyzeCorrelations.js +178 -0
- package/src/handlers/scheduled/analyzeGitHistory.js +510 -0
- package/src/handlers/scheduled/generateAlerts.js +135 -0
- package/src/handlers/scheduled/refreshActivity.js +21 -0
- package/src/handlers/scheduled/scanCompliance.js +334 -0
- package/src/handlers/sessions/sessionEndPost.js +180 -0
- package/src/handlers/sessions/sessionStandardsPost.js +135 -0
- package/src/handlers/stripe/addonManagePost.js +240 -0
- package/src/handlers/stripe/billingPortalPost.js +93 -0
- package/src/handlers/stripe/enterpriseCheckoutPost.js +272 -0
- package/src/handlers/stripe/seatsUpdatePost.js +185 -0
- package/src/handlers/stripe/subscriptionCancelDelete.js +169 -0
- package/src/handlers/stripe/subscriptionCreatePost.js +221 -0
- package/src/handlers/stripe/subscriptionUpdatePut.js +163 -0
- package/src/handlers/stripe/webhookPost.js +454 -0
- package/src/handlers/users/cognitoPostConfirmation.js +150 -0
- package/src/handlers/users/userEntitlementsGet.js +89 -0
- package/src/handlers/users/userGet.js +114 -0
- package/src/handlers/webhooks/githubWebhook.js +223 -0
- package/src/index.js +969 -0
|
@@ -0,0 +1,510 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Analyze Git History Scheduled Job
|
|
3
|
+
* Extracts BluOptima-style metrics from git repositories
|
|
4
|
+
*
|
|
5
|
+
* Schedule: Daily at 2am (or triggered via API)
|
|
6
|
+
* Auth: None (Lambda scheduled event)
|
|
7
|
+
*
|
|
8
|
+
* Features:
|
|
9
|
+
* - Commit frequency and volume metrics
|
|
10
|
+
* - Working pattern analysis (time of day, day of week)
|
|
11
|
+
* - File churn and bus factor detection
|
|
12
|
+
* - Standards compliance scanning
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
const { wrapHandler, executeQuery, createSuccessResponse } = require('./helpers');
|
|
16
|
+
const { execSync } = require('child_process');
|
|
17
|
+
const fs = require('fs');
|
|
18
|
+
const path = require('path');
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Main handler - wrapped with wrapHandler for consistent error handling
|
|
22
|
+
*/
|
|
23
|
+
exports.handler = wrapHandler(async (event, context) => {
|
|
24
|
+
// Get all repositories that need analysis
|
|
25
|
+
const repos = await getRepositoriesToAnalyze();
|
|
26
|
+
|
|
27
|
+
if (repos.length === 0) {
|
|
28
|
+
return createSuccessResponse({ repositories_analyzed: 0 }, 'No repositories to analyze');
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const results = [];
|
|
32
|
+
for (const repo of repos) {
|
|
33
|
+
try {
|
|
34
|
+
const result = await analyzeRepository(repo);
|
|
35
|
+
results.push({ repo_id: repo.repo_id, success: true, ...result });
|
|
36
|
+
} catch (error) {
|
|
37
|
+
results.push({ repo_id: repo.repo_id, success: false, error: error.message });
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return createSuccessResponse({
|
|
42
|
+
repositories_analyzed: results.length,
|
|
43
|
+
results
|
|
44
|
+
}, 'Git history analysis complete');
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Get repositories due for analysis
|
|
49
|
+
*/
|
|
50
|
+
async function getRepositoriesToAnalyze() {
|
|
51
|
+
const result = await executeQuery(`
|
|
52
|
+
SELECT repo_id, Company_ID, repo_url, repo_name, default_branch,
|
|
53
|
+
clone_path, last_commit_sha, settings
|
|
54
|
+
FROM rapport.git_repositories
|
|
55
|
+
WHERE last_analyzed_at IS NULL
|
|
56
|
+
OR last_analyzed_at < NOW() - INTERVAL '1 day'
|
|
57
|
+
ORDER BY last_analyzed_at ASC NULLS FIRST
|
|
58
|
+
LIMIT 10
|
|
59
|
+
`);
|
|
60
|
+
return result.rows;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Analyze a single repository
|
|
65
|
+
*/
|
|
66
|
+
async function analyzeRepository(repo) {
|
|
67
|
+
const periodEnd = new Date();
|
|
68
|
+
const periodStart = new Date();
|
|
69
|
+
periodStart.setDate(periodStart.getDate() - 7); // Last 7 days
|
|
70
|
+
|
|
71
|
+
// Get git log data
|
|
72
|
+
const gitLog = await fetchGitLog(repo, periodStart);
|
|
73
|
+
|
|
74
|
+
if (!gitLog || gitLog.length === 0) {
|
|
75
|
+
return { commits: 0, developers: 0 };
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Parse and aggregate metrics
|
|
79
|
+
const developerMetrics = aggregateDeveloperMetrics(gitLog, periodStart, periodEnd);
|
|
80
|
+
const fileMetrics = aggregateFileMetrics(gitLog, periodStart, periodEnd);
|
|
81
|
+
const workingPatterns = analyzeWorkingPatterns(gitLog, periodStart, periodEnd);
|
|
82
|
+
|
|
83
|
+
// Store metrics
|
|
84
|
+
await storeDeveloperMetrics(repo.repo_id, developerMetrics);
|
|
85
|
+
await storeFileMetrics(repo.repo_id, fileMetrics);
|
|
86
|
+
await storeWorkingPatterns(repo.repo_id, workingPatterns);
|
|
87
|
+
|
|
88
|
+
// Scan for standards compliance
|
|
89
|
+
const complianceResults = await scanForCompliance(repo, gitLog);
|
|
90
|
+
|
|
91
|
+
// Update bus factor analysis
|
|
92
|
+
await updateBusFactor(repo.repo_id);
|
|
93
|
+
|
|
94
|
+
// Mark repository as analyzed
|
|
95
|
+
await executeQuery(`
|
|
96
|
+
UPDATE rapport.git_repositories
|
|
97
|
+
SET last_analyzed_at = NOW(),
|
|
98
|
+
last_commit_sha = $2
|
|
99
|
+
WHERE repo_id = $1
|
|
100
|
+
`, [repo.repo_id, gitLog[0]?.sha || repo.last_commit_sha]);
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
commits: gitLog.length,
|
|
104
|
+
developers: Object.keys(developerMetrics).length,
|
|
105
|
+
files: Object.keys(fileMetrics).length,
|
|
106
|
+
compliance: complianceResults
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Fetch git log via GitHub API or local clone
|
|
112
|
+
* Returns array of commit objects
|
|
113
|
+
*/
|
|
114
|
+
async function fetchGitLog(repo, since) {
|
|
115
|
+
// For now, use GitHub API. In production, could use local clone for speed
|
|
116
|
+
const sinceDate = since.toISOString();
|
|
117
|
+
|
|
118
|
+
// Parse owner/repo from URL
|
|
119
|
+
const match = repo.repo_url.match(/github\.com[/:]([^/]+)\/([^/.]+)/);
|
|
120
|
+
if (!match) {
|
|
121
|
+
console.warn(`Cannot parse GitHub URL: ${repo.repo_url}`);
|
|
122
|
+
return [];
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const [, owner, repoName] = match;
|
|
126
|
+
|
|
127
|
+
// Use GitHub API via https
|
|
128
|
+
const https = require('https');
|
|
129
|
+
|
|
130
|
+
return new Promise((resolve, reject) => {
|
|
131
|
+
const options = {
|
|
132
|
+
hostname: 'api.github.com',
|
|
133
|
+
path: `/repos/${owner}/${repoName}/commits?since=${sinceDate}&per_page=100`,
|
|
134
|
+
method: 'GET',
|
|
135
|
+
headers: {
|
|
136
|
+
'User-Agent': 'MindMeld-GitAnalyzer/1.0',
|
|
137
|
+
'Accept': 'application/vnd.github.v3+json'
|
|
138
|
+
},
|
|
139
|
+
timeout: 30000
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
// Add auth if available
|
|
143
|
+
if (process.env.GITHUB_TOKEN) {
|
|
144
|
+
options.headers['Authorization'] = `token ${process.env.GITHUB_TOKEN}`;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
const req = https.request(options, (res) => {
|
|
148
|
+
let data = '';
|
|
149
|
+
res.on('data', chunk => data += chunk);
|
|
150
|
+
res.on('end', () => {
|
|
151
|
+
try {
|
|
152
|
+
if (res.statusCode !== 200) {
|
|
153
|
+
console.warn(`GitHub API returned ${res.statusCode}: ${data}`);
|
|
154
|
+
resolve([]);
|
|
155
|
+
return;
|
|
156
|
+
}
|
|
157
|
+
const commits = JSON.parse(data);
|
|
158
|
+
resolve(commits.map(c => ({
|
|
159
|
+
sha: c.sha,
|
|
160
|
+
message: c.commit.message,
|
|
161
|
+
author_email: c.commit.author.email,
|
|
162
|
+
author_name: c.commit.author.name,
|
|
163
|
+
committed_at: new Date(c.commit.author.date),
|
|
164
|
+
// Note: file stats require separate API call per commit
|
|
165
|
+
files: []
|
|
166
|
+
})));
|
|
167
|
+
} catch (e) {
|
|
168
|
+
reject(e);
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
req.on('error', reject);
|
|
174
|
+
req.on('timeout', () => {
|
|
175
|
+
req.destroy();
|
|
176
|
+
reject(new Error('GitHub API timeout'));
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
req.end();
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Aggregate developer metrics from git log
|
|
185
|
+
*/
|
|
186
|
+
function aggregateDeveloperMetrics(gitLog, periodStart, periodEnd) {
|
|
187
|
+
const metrics = {};
|
|
188
|
+
|
|
189
|
+
for (const commit of gitLog) {
|
|
190
|
+
const email = commit.author_email;
|
|
191
|
+
|
|
192
|
+
if (!metrics[email]) {
|
|
193
|
+
metrics[email] = {
|
|
194
|
+
developer_email: email,
|
|
195
|
+
developer_name: commit.author_name,
|
|
196
|
+
period_start: periodStart,
|
|
197
|
+
period_end: periodEnd,
|
|
198
|
+
commit_count: 0,
|
|
199
|
+
lines_added: 0,
|
|
200
|
+
lines_removed: 0,
|
|
201
|
+
files_changed: 0,
|
|
202
|
+
active_days: new Set(),
|
|
203
|
+
commit_times: [],
|
|
204
|
+
first_commit_time: null,
|
|
205
|
+
last_commit_time: null
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const m = metrics[email];
|
|
210
|
+
m.commit_count++;
|
|
211
|
+
|
|
212
|
+
// Track active days
|
|
213
|
+
const dateStr = commit.committed_at.toISOString().split('T')[0];
|
|
214
|
+
m.active_days.add(dateStr);
|
|
215
|
+
|
|
216
|
+
// Track commit times
|
|
217
|
+
m.commit_times.push(commit.committed_at);
|
|
218
|
+
|
|
219
|
+
// Track first/last commit times
|
|
220
|
+
const timeStr = commit.committed_at.toTimeString().split(' ')[0];
|
|
221
|
+
if (!m.first_commit_time || commit.committed_at < new Date(`1970-01-01T${m.first_commit_time}`)) {
|
|
222
|
+
m.first_commit_time = timeStr;
|
|
223
|
+
}
|
|
224
|
+
if (!m.last_commit_time || commit.committed_at > new Date(`1970-01-01T${m.last_commit_time}`)) {
|
|
225
|
+
m.last_commit_time = timeStr;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// Aggregate file stats (if available)
|
|
229
|
+
if (commit.files) {
|
|
230
|
+
m.files_changed += commit.files.length;
|
|
231
|
+
for (const file of commit.files) {
|
|
232
|
+
m.lines_added += file.additions || 0;
|
|
233
|
+
m.lines_removed += file.deletions || 0;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Finalize metrics
|
|
239
|
+
for (const email in metrics) {
|
|
240
|
+
const m = metrics[email];
|
|
241
|
+
m.active_days = m.active_days.size;
|
|
242
|
+
m.avg_commits_per_active_day = m.active_days > 0
|
|
243
|
+
? (m.commit_count / m.active_days).toFixed(2)
|
|
244
|
+
: 0;
|
|
245
|
+
delete m.commit_times; // Don't store raw times
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
return metrics;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
/**
|
|
252
|
+
* Aggregate file metrics for churn analysis
|
|
253
|
+
*/
|
|
254
|
+
function aggregateFileMetrics(gitLog, periodStart, periodEnd) {
|
|
255
|
+
const metrics = {};
|
|
256
|
+
|
|
257
|
+
for (const commit of gitLog) {
|
|
258
|
+
if (!commit.files) continue;
|
|
259
|
+
|
|
260
|
+
for (const file of commit.files) {
|
|
261
|
+
const filePath = file.filename || file.path;
|
|
262
|
+
if (!filePath) continue;
|
|
263
|
+
|
|
264
|
+
if (!metrics[filePath]) {
|
|
265
|
+
metrics[filePath] = {
|
|
266
|
+
file_path: filePath,
|
|
267
|
+
period_start: periodStart,
|
|
268
|
+
period_end: periodEnd,
|
|
269
|
+
change_count: 0,
|
|
270
|
+
lines_added: 0,
|
|
271
|
+
lines_removed: 0,
|
|
272
|
+
contributors: new Set(),
|
|
273
|
+
last_modified_by: null,
|
|
274
|
+
last_modified_at: null
|
|
275
|
+
};
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
const m = metrics[filePath];
|
|
279
|
+
m.change_count++;
|
|
280
|
+
m.lines_added += file.additions || 0;
|
|
281
|
+
m.lines_removed += file.deletions || 0;
|
|
282
|
+
m.contributors.add(commit.author_email);
|
|
283
|
+
m.last_modified_by = commit.author_email;
|
|
284
|
+
m.last_modified_at = commit.committed_at;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
// Finalize
|
|
289
|
+
for (const filePath in metrics) {
|
|
290
|
+
const m = metrics[filePath];
|
|
291
|
+
m.unique_contributors = m.contributors.size;
|
|
292
|
+
m.is_single_contributor = m.unique_contributors === 1;
|
|
293
|
+
m.bus_factor = m.unique_contributors;
|
|
294
|
+
delete m.contributors;
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
return metrics;
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* Analyze working patterns (time of day, day of week)
|
|
302
|
+
*/
|
|
303
|
+
function analyzeWorkingPatterns(gitLog, periodStart, periodEnd) {
|
|
304
|
+
const patterns = {};
|
|
305
|
+
|
|
306
|
+
for (const commit of gitLog) {
|
|
307
|
+
const email = commit.author_email;
|
|
308
|
+
|
|
309
|
+
if (!patterns[email]) {
|
|
310
|
+
patterns[email] = {
|
|
311
|
+
developer_email: email,
|
|
312
|
+
period_start: periodStart,
|
|
313
|
+
period_end: periodEnd,
|
|
314
|
+
commits_by_hour: {},
|
|
315
|
+
commits_by_day: {},
|
|
316
|
+
weekend_commits: 0,
|
|
317
|
+
after_hours_commits: 0
|
|
318
|
+
};
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
const p = patterns[email];
|
|
322
|
+
const hour = commit.committed_at.getHours();
|
|
323
|
+
const day = commit.committed_at.getDay();
|
|
324
|
+
|
|
325
|
+
// Count by hour
|
|
326
|
+
p.commits_by_hour[hour] = (p.commits_by_hour[hour] || 0) + 1;
|
|
327
|
+
|
|
328
|
+
// Count by day
|
|
329
|
+
p.commits_by_day[day] = (p.commits_by_day[day] || 0) + 1;
|
|
330
|
+
|
|
331
|
+
// Weekend commits (Sat=6, Sun=0)
|
|
332
|
+
if (day === 0 || day === 6) {
|
|
333
|
+
p.weekend_commits++;
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// After hours (before 9am or after 6pm)
|
|
337
|
+
if (hour < 9 || hour >= 18) {
|
|
338
|
+
p.after_hours_commits++;
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// Calculate peak hour/day
|
|
343
|
+
for (const email in patterns) {
|
|
344
|
+
const p = patterns[email];
|
|
345
|
+
p.peak_hour = findPeak(p.commits_by_hour);
|
|
346
|
+
p.peak_day = findPeak(p.commits_by_day);
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
return patterns;
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
function findPeak(obj) {
|
|
353
|
+
let maxKey = null;
|
|
354
|
+
let maxVal = 0;
|
|
355
|
+
for (const key in obj) {
|
|
356
|
+
if (obj[key] > maxVal) {
|
|
357
|
+
maxVal = obj[key];
|
|
358
|
+
maxKey = parseInt(key);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
return maxKey;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
/**
|
|
365
|
+
* Store developer metrics in database
|
|
366
|
+
*/
|
|
367
|
+
async function storeDeveloperMetrics(repoId, metrics) {
|
|
368
|
+
for (const email in metrics) {
|
|
369
|
+
const m = metrics[email];
|
|
370
|
+
await executeQuery(`
|
|
371
|
+
INSERT INTO rapport.developer_metrics (
|
|
372
|
+
repo_id, developer_email, developer_name,
|
|
373
|
+
period_start, period_end,
|
|
374
|
+
commit_count, lines_added, lines_removed, files_changed,
|
|
375
|
+
first_commit_time, last_commit_time,
|
|
376
|
+
active_days, avg_commits_per_active_day
|
|
377
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
|
378
|
+
ON CONFLICT (repo_id, developer_email, period_start, period_end)
|
|
379
|
+
DO UPDATE SET
|
|
380
|
+
commit_count = EXCLUDED.commit_count,
|
|
381
|
+
lines_added = EXCLUDED.lines_added,
|
|
382
|
+
lines_removed = EXCLUDED.lines_removed,
|
|
383
|
+
files_changed = EXCLUDED.files_changed,
|
|
384
|
+
first_commit_time = EXCLUDED.first_commit_time,
|
|
385
|
+
last_commit_time = EXCLUDED.last_commit_time,
|
|
386
|
+
active_days = EXCLUDED.active_days,
|
|
387
|
+
avg_commits_per_active_day = EXCLUDED.avg_commits_per_active_day
|
|
388
|
+
`, [
|
|
389
|
+
repoId, m.developer_email, m.developer_name,
|
|
390
|
+
m.period_start, m.period_end,
|
|
391
|
+
m.commit_count, m.lines_added, m.lines_removed, m.files_changed,
|
|
392
|
+
m.first_commit_time, m.last_commit_time,
|
|
393
|
+
m.active_days, m.avg_commits_per_active_day
|
|
394
|
+
]);
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
/**
|
|
399
|
+
* Store file metrics
|
|
400
|
+
*/
|
|
401
|
+
async function storeFileMetrics(repoId, metrics) {
|
|
402
|
+
for (const filePath in metrics) {
|
|
403
|
+
const m = metrics[filePath];
|
|
404
|
+
await executeQuery(`
|
|
405
|
+
INSERT INTO rapport.file_metrics (
|
|
406
|
+
repo_id, file_path, period_start, period_end,
|
|
407
|
+
change_count, lines_added, lines_removed,
|
|
408
|
+
unique_contributors, is_single_contributor, bus_factor,
|
|
409
|
+
last_modified_by, last_modified_at
|
|
410
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
|
411
|
+
ON CONFLICT (repo_id, file_path, period_start, period_end)
|
|
412
|
+
DO UPDATE SET
|
|
413
|
+
change_count = EXCLUDED.change_count,
|
|
414
|
+
lines_added = EXCLUDED.lines_added,
|
|
415
|
+
lines_removed = EXCLUDED.lines_removed,
|
|
416
|
+
unique_contributors = EXCLUDED.unique_contributors,
|
|
417
|
+
is_single_contributor = EXCLUDED.is_single_contributor,
|
|
418
|
+
bus_factor = EXCLUDED.bus_factor,
|
|
419
|
+
last_modified_by = EXCLUDED.last_modified_by,
|
|
420
|
+
last_modified_at = EXCLUDED.last_modified_at
|
|
421
|
+
`, [
|
|
422
|
+
repoId, m.file_path, m.period_start, m.period_end,
|
|
423
|
+
m.change_count, m.lines_added, m.lines_removed,
|
|
424
|
+
m.unique_contributors, m.is_single_contributor, m.bus_factor,
|
|
425
|
+
m.last_modified_by, m.last_modified_at
|
|
426
|
+
]);
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
/**
|
|
431
|
+
* Store working patterns
|
|
432
|
+
*/
|
|
433
|
+
async function storeWorkingPatterns(repoId, patterns) {
|
|
434
|
+
for (const email in patterns) {
|
|
435
|
+
const p = patterns[email];
|
|
436
|
+
await executeQuery(`
|
|
437
|
+
INSERT INTO rapport.working_patterns (
|
|
438
|
+
repo_id, developer_email, period_start, period_end,
|
|
439
|
+
commits_by_hour, commits_by_day,
|
|
440
|
+
peak_hour, peak_day,
|
|
441
|
+
weekend_commits, after_hours_commits
|
|
442
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
|
443
|
+
ON CONFLICT (repo_id, developer_email, period_start, period_end)
|
|
444
|
+
DO UPDATE SET
|
|
445
|
+
commits_by_hour = EXCLUDED.commits_by_hour,
|
|
446
|
+
commits_by_day = EXCLUDED.commits_by_day,
|
|
447
|
+
peak_hour = EXCLUDED.peak_hour,
|
|
448
|
+
peak_day = EXCLUDED.peak_day,
|
|
449
|
+
weekend_commits = EXCLUDED.weekend_commits,
|
|
450
|
+
after_hours_commits = EXCLUDED.after_hours_commits
|
|
451
|
+
`, [
|
|
452
|
+
repoId, p.developer_email, p.period_start, p.period_end,
|
|
453
|
+
JSON.stringify(p.commits_by_hour), JSON.stringify(p.commits_by_day),
|
|
454
|
+
p.peak_hour, p.peak_day,
|
|
455
|
+
p.weekend_commits, p.after_hours_commits
|
|
456
|
+
]);
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
/**
|
|
461
|
+
* Scan commits for standards compliance
|
|
462
|
+
*/
|
|
463
|
+
async function scanForCompliance(repo, gitLog) {
|
|
464
|
+
// Get patterns to scan for
|
|
465
|
+
const patternsResult = await executeQuery(`
|
|
466
|
+
SELECT pattern_id, pattern_name, pattern_type, language, regex_pattern, severity
|
|
467
|
+
FROM rapport.code_patterns
|
|
468
|
+
WHERE enabled = true
|
|
469
|
+
`);
|
|
470
|
+
const patterns = patternsResult.rows;
|
|
471
|
+
|
|
472
|
+
// This would need file content - for now, return placeholder
|
|
473
|
+
// In production, fetch file content via GitHub API or local clone
|
|
474
|
+
return {
|
|
475
|
+
scanned: false,
|
|
476
|
+
reason: 'File content scanning requires GitHub token with contents permission'
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
/**
|
|
481
|
+
* Update bus factor analysis
|
|
482
|
+
*/
|
|
483
|
+
async function updateBusFactor(repoId) {
|
|
484
|
+
await executeQuery(`
|
|
485
|
+
INSERT INTO rapport.knowledge_silos (repo_id, module_path, primary_contributor, contribution_percentage, total_contributors, risk_level)
|
|
486
|
+
SELECT
|
|
487
|
+
$1,
|
|
488
|
+
SUBSTRING(file_path FROM '^[^/]+'),
|
|
489
|
+
last_modified_by,
|
|
490
|
+
100.0 / NULLIF(unique_contributors, 0),
|
|
491
|
+
unique_contributors,
|
|
492
|
+
CASE
|
|
493
|
+
WHEN unique_contributors = 1 THEN 'critical'
|
|
494
|
+
WHEN unique_contributors = 2 THEN 'high'
|
|
495
|
+
WHEN unique_contributors <= 3 THEN 'medium'
|
|
496
|
+
ELSE 'low'
|
|
497
|
+
END
|
|
498
|
+
FROM rapport.file_metrics
|
|
499
|
+
WHERE repo_id = $1
|
|
500
|
+
AND is_single_contributor = true
|
|
501
|
+
ON CONFLICT (repo_id, module_path) DO UPDATE SET
|
|
502
|
+
primary_contributor = EXCLUDED.primary_contributor,
|
|
503
|
+
contribution_percentage = EXCLUDED.contribution_percentage,
|
|
504
|
+
total_contributors = EXCLUDED.total_contributors,
|
|
505
|
+
risk_level = EXCLUDED.risk_level,
|
|
506
|
+
last_analyzed_at = NOW()
|
|
507
|
+
`, [repoId]);
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
// Removed manual success/failure helpers - using wrapHandler pattern
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generate Alerts Scheduled Job
|
|
3
|
+
* Generates attention alerts for developers who may need support
|
|
4
|
+
*
|
|
5
|
+
* Schedule: Every hour
|
|
6
|
+
* Auth: None (Lambda scheduled event)
|
|
7
|
+
*
|
|
8
|
+
* Alert Types:
|
|
9
|
+
* - stale_commits: No commits in X days
|
|
10
|
+
* - low_conversion: Low session-to-commit conversion rate
|
|
11
|
+
* - no_ai_usage: Active committer not using AI assistance
|
|
12
|
+
* - high_violation_rate: Developer frequently violating standards
|
|
13
|
+
* - stalled_patterns: Developer's patterns not maturing/being used
|
|
14
|
+
* - declining_activity: Activity trending downward
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
const { wrapHandler, createSuccessResponse, executeQuery } = require('./helpers');
|
|
18
|
+
const { AlertEngine, setExecuteQuery } = require('../../core/AlertEngine');
|
|
19
|
+
|
|
20
|
+
// Initialize AlertEngine with database connection
|
|
21
|
+
setExecuteQuery(executeQuery);
|
|
22
|
+
|
|
23
|
+
// Configuration can be overridden via environment variables
|
|
24
|
+
const getThresholdConfig = () => {
|
|
25
|
+
const config = {};
|
|
26
|
+
|
|
27
|
+
// Stale commits config
|
|
28
|
+
if (process.env.ALERT_STALE_COMMITS_CRITICAL_DAYS) {
|
|
29
|
+
config.staleCommits = config.staleCommits || {};
|
|
30
|
+
config.staleCommits.criticalDays = parseInt(process.env.ALERT_STALE_COMMITS_CRITICAL_DAYS);
|
|
31
|
+
}
|
|
32
|
+
if (process.env.ALERT_STALE_COMMITS_WARNING_DAYS) {
|
|
33
|
+
config.staleCommits = config.staleCommits || {};
|
|
34
|
+
config.staleCommits.warningDays = parseInt(process.env.ALERT_STALE_COMMITS_WARNING_DAYS);
|
|
35
|
+
}
|
|
36
|
+
if (process.env.ALERT_STALE_COMMITS_INFO_DAYS) {
|
|
37
|
+
config.staleCommits = config.staleCommits || {};
|
|
38
|
+
config.staleCommits.infoDays = parseInt(process.env.ALERT_STALE_COMMITS_INFO_DAYS);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// Low conversion config
|
|
42
|
+
if (process.env.ALERT_LOW_CONVERSION_MIN_SESSIONS) {
|
|
43
|
+
config.lowConversion = config.lowConversion || {};
|
|
44
|
+
config.lowConversion.minSessions = parseInt(process.env.ALERT_LOW_CONVERSION_MIN_SESSIONS);
|
|
45
|
+
}
|
|
46
|
+
if (process.env.ALERT_LOW_CONVERSION_THRESHOLD) {
|
|
47
|
+
config.lowConversion = config.lowConversion || {};
|
|
48
|
+
config.lowConversion.infoThreshold = parseInt(process.env.ALERT_LOW_CONVERSION_THRESHOLD);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// High violation config
|
|
52
|
+
if (process.env.ALERT_VIOLATION_RATE_CRITICAL) {
|
|
53
|
+
config.highViolationRate = config.highViolationRate || {};
|
|
54
|
+
config.highViolationRate.criticalThreshold = parseInt(process.env.ALERT_VIOLATION_RATE_CRITICAL);
|
|
55
|
+
}
|
|
56
|
+
if (process.env.ALERT_VIOLATION_RATE_WARNING) {
|
|
57
|
+
config.highViolationRate = config.highViolationRate || {};
|
|
58
|
+
config.highViolationRate.warningThreshold = parseInt(process.env.ALERT_VIOLATION_RATE_WARNING);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Aggregation config
|
|
62
|
+
if (process.env.ALERT_COOLDOWN_HOURS) {
|
|
63
|
+
config.aggregation = config.aggregation || {};
|
|
64
|
+
config.aggregation.cooldownHours = parseInt(process.env.ALERT_COOLDOWN_HOURS);
|
|
65
|
+
}
|
|
66
|
+
if (process.env.ALERT_EXPIRATION_DAYS) {
|
|
67
|
+
config.aggregation = config.aggregation || {};
|
|
68
|
+
config.aggregation.expirationDays = parseInt(process.env.ALERT_EXPIRATION_DAYS);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Feature flags to disable specific alert types
|
|
72
|
+
if (process.env.ALERT_DISABLE_STALE_COMMITS === 'true') {
|
|
73
|
+
config.staleCommits = config.staleCommits || {};
|
|
74
|
+
config.staleCommits.enabled = false;
|
|
75
|
+
}
|
|
76
|
+
if (process.env.ALERT_DISABLE_LOW_CONVERSION === 'true') {
|
|
77
|
+
config.lowConversion = config.lowConversion || {};
|
|
78
|
+
config.lowConversion.enabled = false;
|
|
79
|
+
}
|
|
80
|
+
if (process.env.ALERT_DISABLE_NO_AI_USAGE === 'true') {
|
|
81
|
+
config.noAiUsage = config.noAiUsage || {};
|
|
82
|
+
config.noAiUsage.enabled = false;
|
|
83
|
+
}
|
|
84
|
+
if (process.env.ALERT_DISABLE_VIOLATION_RATE === 'true') {
|
|
85
|
+
config.highViolationRate = config.highViolationRate || {};
|
|
86
|
+
config.highViolationRate.enabled = false;
|
|
87
|
+
}
|
|
88
|
+
if (process.env.ALERT_DISABLE_STALLED_PATTERNS === 'true') {
|
|
89
|
+
config.stalledPatterns = config.stalledPatterns || {};
|
|
90
|
+
config.stalledPatterns.enabled = false;
|
|
91
|
+
}
|
|
92
|
+
if (process.env.ALERT_DISABLE_DECLINING_ACTIVITY === 'true') {
|
|
93
|
+
config.decliningActivity = config.decliningActivity || {};
|
|
94
|
+
config.decliningActivity.enabled = false;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return config;
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
exports.handler = wrapHandler(async (event, context) => {
|
|
101
|
+
console.log('[GenerateAlerts] Starting scheduled alert generation');
|
|
102
|
+
|
|
103
|
+
// Get optional company filter from event (for testing or targeted runs)
|
|
104
|
+
const companyId = event?.companyId || null;
|
|
105
|
+
const dryRun = event?.dryRun || process.env.ALERT_DRY_RUN === 'true';
|
|
106
|
+
|
|
107
|
+
// Initialize AlertEngine with configuration
|
|
108
|
+
const thresholds = getThresholdConfig();
|
|
109
|
+
const alertEngine = new AlertEngine({
|
|
110
|
+
thresholds,
|
|
111
|
+
dryRun
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
// Log configuration for debugging
|
|
115
|
+
console.log('[GenerateAlerts] Configuration:', {
|
|
116
|
+
companyId: companyId || 'all',
|
|
117
|
+
dryRun,
|
|
118
|
+
thresholdsOverride: Object.keys(thresholds).length > 0 ? thresholds : 'defaults'
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
// Generate alerts
|
|
122
|
+
const summary = await alertEngine.generateAlerts(companyId);
|
|
123
|
+
|
|
124
|
+
console.log('[GenerateAlerts] Completed:', summary);
|
|
125
|
+
|
|
126
|
+
return createSuccessResponse({
|
|
127
|
+
alerts_created: summary.alertsCreated,
|
|
128
|
+
alerts_expired: summary.alertsExpired,
|
|
129
|
+
by_type: summary.byType,
|
|
130
|
+
errors: summary.errors,
|
|
131
|
+
started_at: summary.startedAt,
|
|
132
|
+
completed_at: summary.completedAt,
|
|
133
|
+
dry_run: dryRun
|
|
134
|
+
}, 'Alerts generated successfully');
|
|
135
|
+
});
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Refresh Activity Scheduled Job
|
|
3
|
+
* Refreshes materialized views for developer and project activity
|
|
4
|
+
*
|
|
5
|
+
* Schedule: Every 15 minutes
|
|
6
|
+
* Auth: None (Lambda scheduled event)
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const { wrapHandler, executeQuery, createSuccessResponse } = require('./helpers');
|
|
10
|
+
|
|
11
|
+
exports.handler = wrapHandler(async (event, context) => {
|
|
12
|
+
// Refresh developer activity view
|
|
13
|
+
await executeQuery('SELECT rapport.refresh_developer_activity()');
|
|
14
|
+
|
|
15
|
+
// Refresh project activity view
|
|
16
|
+
await executeQuery('SELECT rapport.refresh_project_activity()');
|
|
17
|
+
|
|
18
|
+
return createSuccessResponse({
|
|
19
|
+
views_refreshed: ['developer_activity', 'project_activity']
|
|
20
|
+
}, 'Activity views refreshed');
|
|
21
|
+
});
|