@littlebearapps/create-platform 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +98 -0
- package/dist/index.d.ts +6 -1
- package/dist/index.js +36 -6
- package/dist/prompts.d.ts +14 -2
- package/dist/prompts.js +29 -7
- package/dist/templates.js +78 -0
- package/package.json +3 -2
- package/templates/full/workers/lib/pattern-discovery/ai-prompt.ts +644 -0
- package/templates/full/workers/lib/pattern-discovery/clustering.ts +278 -0
- package/templates/full/workers/lib/pattern-discovery/shadow-evaluation.ts +603 -0
- package/templates/full/workers/lib/pattern-discovery/storage.ts +806 -0
- package/templates/full/workers/lib/pattern-discovery/types.ts +159 -0
- package/templates/full/workers/lib/pattern-discovery/validation.ts +278 -0
- package/templates/full/workers/pattern-discovery.ts +661 -0
- package/templates/full/workers/platform-alert-router.ts +1809 -0
- package/templates/full/workers/platform-notifications.ts +424 -0
- package/templates/full/workers/platform-search.ts +480 -0
- package/templates/full/workers/platform-settings.ts +436 -0
- package/templates/shared/workers/lib/analytics-engine.ts +357 -0
- package/templates/shared/workers/lib/billing.ts +293 -0
- package/templates/shared/workers/lib/circuit-breaker-middleware.ts +25 -0
- package/templates/shared/workers/lib/control.ts +292 -0
- package/templates/shared/workers/lib/economics.ts +368 -0
- package/templates/shared/workers/lib/metrics.ts +103 -0
- package/templates/shared/workers/lib/platform-settings.ts +407 -0
- package/templates/shared/workers/lib/shared/allowances.ts +333 -0
- package/templates/shared/workers/lib/shared/cloudflare.ts +1362 -0
- package/templates/shared/workers/lib/shared/types.ts +58 -0
- package/templates/shared/workers/lib/telemetry-sampling.ts +360 -0
- package/templates/shared/workers/lib/usage/collectors/example.ts +96 -0
- package/templates/shared/workers/lib/usage/collectors/index.ts +128 -0
- package/templates/shared/workers/lib/usage/handlers/audit.ts +306 -0
- package/templates/shared/workers/lib/usage/handlers/backfill.ts +845 -0
- package/templates/shared/workers/lib/usage/handlers/behavioral.ts +429 -0
- package/templates/shared/workers/lib/usage/handlers/data-queries.ts +507 -0
- package/templates/shared/workers/lib/usage/handlers/dlq-admin.ts +364 -0
- package/templates/shared/workers/lib/usage/handlers/health-trends.ts +222 -0
- package/templates/shared/workers/lib/usage/handlers/index.ts +35 -0
- package/templates/shared/workers/lib/usage/handlers/usage-admin.ts +421 -0
- package/templates/shared/workers/lib/usage/handlers/usage-features.ts +1262 -0
- package/templates/shared/workers/lib/usage/handlers/usage-metrics.ts +2420 -0
- package/templates/shared/workers/lib/usage/handlers/usage-settings.ts +610 -0
- package/templates/shared/workers/lib/usage/queue/budget-enforcement.ts +1032 -0
- package/templates/shared/workers/lib/usage/queue/cost-budget-enforcement.ts +128 -0
- package/templates/shared/workers/lib/usage/queue/cost-calculator.ts +77 -0
- package/templates/shared/workers/lib/usage/queue/dlq-handler.ts +161 -0
- package/templates/shared/workers/lib/usage/queue/index.ts +19 -0
- package/templates/shared/workers/lib/usage/queue/telemetry-processor.ts +790 -0
- package/templates/shared/workers/lib/usage/scheduled/anomaly-detection.ts +732 -0
- package/templates/shared/workers/lib/usage/scheduled/data-collection.ts +956 -0
- package/templates/shared/workers/lib/usage/scheduled/error-digest.ts +343 -0
- package/templates/shared/workers/lib/usage/scheduled/index.ts +18 -0
- package/templates/shared/workers/lib/usage/scheduled/rollups.ts +1561 -0
- package/templates/shared/workers/lib/usage/shared/constants.ts +362 -0
- package/templates/shared/workers/lib/usage/shared/index.ts +14 -0
- package/templates/shared/workers/lib/usage/shared/types.ts +1066 -0
- package/templates/shared/workers/lib/usage/shared/utils.ts +795 -0
- package/templates/shared/workers/platform-usage.ts +1915 -0
- package/templates/standard/workers/error-collector.ts +2670 -0
- package/templates/standard/workers/lib/error-collector/capture.ts +213 -0
- package/templates/standard/workers/lib/error-collector/digest.ts +448 -0
- package/templates/standard/workers/lib/error-collector/email-health-alerts.ts +262 -0
- package/templates/standard/workers/lib/error-collector/fingerprint.ts +258 -0
- package/templates/standard/workers/lib/error-collector/gap-alerts.ts +293 -0
- package/templates/standard/workers/lib/error-collector/github.ts +329 -0
- package/templates/standard/workers/lib/error-collector/types.ts +262 -0
- package/templates/standard/workers/lib/sentinel/gap-detection.ts +734 -0
- package/templates/standard/workers/lib/shared/slack-alerts.ts +585 -0
- package/templates/standard/workers/platform-sentinel.ts +1744 -0
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Error Capture Decision Logic
|
|
3
|
+
* Determines which tail events should create/update GitHub issues
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type { TailEvent, CaptureDecision, ErrorType, Priority } from './types';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Normalize URL by removing dynamic path segments and query params
|
|
10
|
+
* This helps group similar errors together
|
|
11
|
+
*/
|
|
12
|
+
export function normalizeUrl(url: string | undefined): string {
|
|
13
|
+
if (!url) return 'no-url';
|
|
14
|
+
|
|
15
|
+
try {
|
|
16
|
+
const parsed = new URL(url);
|
|
17
|
+
// Remove query string
|
|
18
|
+
let path = parsed.pathname;
|
|
19
|
+
|
|
20
|
+
// Replace common dynamic segments with placeholders
|
|
21
|
+
// UUIDs
|
|
22
|
+
path = path.replace(/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/gi, ':id');
|
|
23
|
+
// Numeric IDs
|
|
24
|
+
path = path.replace(/\/\d+/g, '/:id');
|
|
25
|
+
// Hash-like segments (e.g., /abc123def456/)
|
|
26
|
+
path = path.replace(/\/[a-f0-9]{16,}/gi, '/:hash');
|
|
27
|
+
|
|
28
|
+
return `${parsed.hostname}${path}`;
|
|
29
|
+
} catch {
|
|
30
|
+
return url.slice(0, 100);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Determine if this tail event should create/update a GitHub issue
|
|
36
|
+
*/
|
|
37
|
+
export function shouldCapture(event: TailEvent): CaptureDecision {
|
|
38
|
+
// Resource limit failures - always capture
|
|
39
|
+
if (event.outcome === 'exceededCpu') {
|
|
40
|
+
return { capture: true, type: 'cpu_limit' };
|
|
41
|
+
}
|
|
42
|
+
if (event.outcome === 'exceededMemory') {
|
|
43
|
+
return { capture: true, type: 'memory_limit' };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Hard exceptions - always capture
|
|
47
|
+
if (event.outcome === 'exception') {
|
|
48
|
+
return { capture: true, type: 'exception' };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Check for soft errors (console.error with 'ok' outcome)
|
|
52
|
+
const hasErrorLogs = event.logs.some((l) => l.level === 'error');
|
|
53
|
+
if (hasErrorLogs) {
|
|
54
|
+
return { capture: true, type: 'soft_error' };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Check for warnings (console.warn)
|
|
58
|
+
const hasWarnings = event.logs.some((l) => l.level === 'warn');
|
|
59
|
+
if (hasWarnings) {
|
|
60
|
+
return { capture: true, type: 'warning' };
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Don't capture successful invocations without errors/warnings
|
|
64
|
+
return { capture: false };
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Calculate priority based on error type, project tier, and occurrence count
|
|
69
|
+
*/
|
|
70
|
+
export function calculatePriority(
|
|
71
|
+
errorType: ErrorType,
|
|
72
|
+
tier: number,
|
|
73
|
+
occurrenceCount: number
|
|
74
|
+
): Priority {
|
|
75
|
+
// Resource limits are always critical
|
|
76
|
+
if (errorType === 'cpu_limit' || errorType === 'memory_limit') {
|
|
77
|
+
return 'P0';
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Exceptions based on project tier
|
|
81
|
+
if (errorType === 'exception') {
|
|
82
|
+
if (tier === 0) return 'P0'; // Tier 0 = Critical (revenue-generating)
|
|
83
|
+
if (tier === 1) return 'P1'; // Tier 1 = High priority
|
|
84
|
+
return 'P2'; // Tier 2+ = Medium priority
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Soft errors escalate with repeated occurrences
|
|
88
|
+
if (errorType === 'soft_error') {
|
|
89
|
+
return occurrenceCount > 5 ? 'P2' : 'P3';
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Warnings are lowest priority
|
|
93
|
+
return 'P4';
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Get GitHub labels for an error
|
|
98
|
+
*/
|
|
99
|
+
export function getLabels(errorType: ErrorType, priority: Priority): string[] {
|
|
100
|
+
const labels: string[] = ['cf:error:auto-generated'];
|
|
101
|
+
|
|
102
|
+
// Priority label
|
|
103
|
+
switch (priority) {
|
|
104
|
+
case 'P0':
|
|
105
|
+
labels.push('cf:priority:critical');
|
|
106
|
+
break;
|
|
107
|
+
case 'P1':
|
|
108
|
+
labels.push('cf:priority:high');
|
|
109
|
+
break;
|
|
110
|
+
case 'P2':
|
|
111
|
+
labels.push('cf:priority:medium');
|
|
112
|
+
break;
|
|
113
|
+
case 'P3':
|
|
114
|
+
labels.push('cf:priority:low');
|
|
115
|
+
break;
|
|
116
|
+
case 'P4':
|
|
117
|
+
labels.push('cf:priority:warning');
|
|
118
|
+
break;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// Error type label
|
|
122
|
+
switch (errorType) {
|
|
123
|
+
case 'exception':
|
|
124
|
+
labels.push('cf:error:exception');
|
|
125
|
+
break;
|
|
126
|
+
case 'cpu_limit':
|
|
127
|
+
labels.push('cf:error:cpu-limit');
|
|
128
|
+
break;
|
|
129
|
+
case 'memory_limit':
|
|
130
|
+
labels.push('cf:error:memory-limit');
|
|
131
|
+
break;
|
|
132
|
+
case 'soft_error':
|
|
133
|
+
labels.push('cf:error:soft-error');
|
|
134
|
+
break;
|
|
135
|
+
case 'warning':
|
|
136
|
+
labels.push('cf:error:warning');
|
|
137
|
+
break;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return labels;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Extract the core message from a log entry, stripping JSON wrapper and dynamic fields
|
|
145
|
+
*/
|
|
146
|
+
export function extractCoreMessage(message: unknown): string {
|
|
147
|
+
if (typeof message === 'string') {
|
|
148
|
+
// Try to parse as JSON to extract just the message field
|
|
149
|
+
try {
|
|
150
|
+
const parsed = JSON.parse(message);
|
|
151
|
+
if (parsed && typeof parsed.message === 'string') {
|
|
152
|
+
return parsed.message;
|
|
153
|
+
}
|
|
154
|
+
} catch {
|
|
155
|
+
// Not JSON, use as-is
|
|
156
|
+
}
|
|
157
|
+
return message;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (message && typeof message === 'object') {
|
|
161
|
+
// If it's an object with a message field, extract it
|
|
162
|
+
const obj = message as Record<string, unknown>;
|
|
163
|
+
if (typeof obj.message === 'string') {
|
|
164
|
+
return obj.message;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
return String(message);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Format error title for GitHub issue
|
|
173
|
+
* Extracts clean message from JSON log entries for readable titles
|
|
174
|
+
*/
|
|
175
|
+
export function formatErrorTitle(
|
|
176
|
+
errorType: ErrorType,
|
|
177
|
+
event: TailEvent,
|
|
178
|
+
scriptName: string
|
|
179
|
+
): string {
|
|
180
|
+
const maxLength = 100;
|
|
181
|
+
|
|
182
|
+
if (errorType === 'cpu_limit') {
|
|
183
|
+
return `[${scriptName}] Exceeded CPU limit`;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
if (errorType === 'memory_limit') {
|
|
187
|
+
return `[${scriptName}] Exceeded memory limit`;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
if (errorType === 'exception' && event.exceptions.length > 0) {
|
|
191
|
+
const exc = event.exceptions[0];
|
|
192
|
+
const msg = exc.message.slice(0, 60);
|
|
193
|
+
return `[${scriptName}] ${exc.name}: ${msg}`.slice(0, maxLength);
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
if (errorType === 'soft_error') {
|
|
197
|
+
const errorLog = event.logs.find((l) => l.level === 'error');
|
|
198
|
+
if (errorLog) {
|
|
199
|
+
const msg = extractCoreMessage(errorLog.message[0]).slice(0, 60);
|
|
200
|
+
return `[${scriptName}] Error: ${msg}`.slice(0, maxLength);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (errorType === 'warning') {
|
|
205
|
+
const warnLog = event.logs.find((l) => l.level === 'warn');
|
|
206
|
+
if (warnLog) {
|
|
207
|
+
const msg = extractCoreMessage(warnLog.message[0]).slice(0, 60);
|
|
208
|
+
return `[${scriptName}] Warning: ${msg}`.slice(0, maxLength);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return `[${scriptName}] ${errorType} error`;
|
|
213
|
+
}
|
|
@@ -0,0 +1,448 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Warning Digest Module
|
|
3
|
+
*
|
|
4
|
+
* Handles daily digest creation for P4 warnings.
|
|
5
|
+
* Groups warnings by type (normalized fingerprint) and creates one issue per type.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { Env, PendingDigestWarning, ScriptMapping } from './types';
|
|
9
|
+
import { GitHubClient } from './github';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Get today's date in YYYY-MM-DD format (UTC)
|
|
13
|
+
*/
|
|
14
|
+
export function getDigestDate(): string {
|
|
15
|
+
return new Date().toISOString().slice(0, 10);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Get yesterday's date in YYYY-MM-DD format (UTC)
|
|
20
|
+
* Used for creating digests of the previous day's warnings
|
|
21
|
+
*/
|
|
22
|
+
export function getYesterdayDigestDate(): string {
|
|
23
|
+
const yesterday = new Date();
|
|
24
|
+
yesterday.setUTCDate(yesterday.getUTCDate() - 1);
|
|
25
|
+
return yesterday.toISOString().slice(0, 10);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Query pending warnings grouped by script and fingerprint
|
|
30
|
+
*/
|
|
31
|
+
export async function getPendingWarnings(
|
|
32
|
+
db: D1Database,
|
|
33
|
+
digestDate: string
|
|
34
|
+
): Promise<Map<string, PendingDigestWarning[]>> {
|
|
35
|
+
// Get all pending_digest warnings from yesterday (or specified date)
|
|
36
|
+
const result = await db
|
|
37
|
+
.prepare(
|
|
38
|
+
`
|
|
39
|
+
SELECT
|
|
40
|
+
id,
|
|
41
|
+
fingerprint,
|
|
42
|
+
script_name,
|
|
43
|
+
project,
|
|
44
|
+
github_repo,
|
|
45
|
+
normalized_message,
|
|
46
|
+
last_exception_message as raw_message,
|
|
47
|
+
last_seen_at as event_timestamp,
|
|
48
|
+
occurrence_count
|
|
49
|
+
FROM error_occurrences
|
|
50
|
+
WHERE status = 'pending_digest'
|
|
51
|
+
AND error_type = 'warning'
|
|
52
|
+
AND date(last_seen_at, 'unixepoch') = ?
|
|
53
|
+
ORDER BY script_name, fingerprint, last_seen_at
|
|
54
|
+
`
|
|
55
|
+
)
|
|
56
|
+
.bind(digestDate)
|
|
57
|
+
.all<PendingDigestWarning>();
|
|
58
|
+
|
|
59
|
+
// Group by script_name:fingerprint
|
|
60
|
+
const grouped = new Map<string, PendingDigestWarning[]>();
|
|
61
|
+
|
|
62
|
+
for (const warning of result.results || []) {
|
|
63
|
+
const key = `${warning.script_name}:${warning.fingerprint}`;
|
|
64
|
+
if (!grouped.has(key)) {
|
|
65
|
+
grouped.set(key, []);
|
|
66
|
+
}
|
|
67
|
+
grouped.get(key)!.push(warning);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return grouped;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Format the digest issue body
|
|
75
|
+
*/
|
|
76
|
+
function formatDigestBody(
|
|
77
|
+
warnings: PendingDigestWarning[],
|
|
78
|
+
digestDate: string,
|
|
79
|
+
mapping: ScriptMapping
|
|
80
|
+
): string {
|
|
81
|
+
const totalOccurrences = warnings.reduce((sum, w) => sum + w.occurrence_count, 0);
|
|
82
|
+
const firstSeen = new Date(
|
|
83
|
+
Math.min(...warnings.map((w) => w.event_timestamp)) * 1000
|
|
84
|
+
).toISOString();
|
|
85
|
+
const lastSeen = new Date(
|
|
86
|
+
Math.max(...warnings.map((w) => w.event_timestamp)) * 1000
|
|
87
|
+
).toISOString();
|
|
88
|
+
|
|
89
|
+
// Extract normalized message from first warning
|
|
90
|
+
const normalizedMessage = warnings[0]?.normalized_message || 'Unknown warning';
|
|
91
|
+
|
|
92
|
+
let body = `## Daily Warning Digest: ${normalizedMessage}\n\n`;
|
|
93
|
+
|
|
94
|
+
body += `| | |\n|---|---|\n`;
|
|
95
|
+
body += `| **Date** | ${digestDate} |\n`;
|
|
96
|
+
body += `| **Project** | ${mapping.displayName} |\n`;
|
|
97
|
+
body += `| **Worker** | \`${warnings[0]?.script_name}\` |\n`;
|
|
98
|
+
body += `| **Total Occurrences** | ${totalOccurrences} |\n`;
|
|
99
|
+
body += `| **Unique Events** | ${warnings.length} |\n`;
|
|
100
|
+
body += `| **First Seen** | ${firstSeen.slice(11, 19)} UTC |\n`;
|
|
101
|
+
body += `| **Last Seen** | ${lastSeen.slice(11, 19)} UTC |\n\n`;
|
|
102
|
+
|
|
103
|
+
// Timeline table (show up to 50 occurrences)
|
|
104
|
+
body += `### Timeline\n\n`;
|
|
105
|
+
body += `| Time (UTC) | Occurrences | Details |\n`;
|
|
106
|
+
body += `|------------|-------------|----------|\n`;
|
|
107
|
+
|
|
108
|
+
const displayWarnings = warnings.slice(0, 50);
|
|
109
|
+
for (const w of displayWarnings) {
|
|
110
|
+
const time = new Date(w.event_timestamp * 1000).toISOString().slice(11, 19);
|
|
111
|
+
const details = (w.raw_message || '').slice(0, 60).replace(/\|/g, '\\|');
|
|
112
|
+
body += `| ${time} | ${w.occurrence_count} | ${details}${w.raw_message && w.raw_message.length > 60 ? '...' : ''} |\n`;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
if (warnings.length > 50) {
|
|
116
|
+
body += `| ... | ... | *(${warnings.length - 50} more occurrences)* |\n`;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
body += `\n`;
|
|
120
|
+
|
|
121
|
+
// Sample log entry
|
|
122
|
+
if (warnings[0]?.raw_message) {
|
|
123
|
+
body += `### Sample Log Entry\n`;
|
|
124
|
+
body += `\`\`\`\n${warnings[0].raw_message.slice(0, 500)}\n\`\`\`\n\n`;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Quick links
|
|
128
|
+
body += `### Quick Links\n`;
|
|
129
|
+
body += `- [Worker Dashboard](https://dash.cloudflare.com/?to=/:account/workers/services/view/${warnings[0]?.script_name})\n`;
|
|
130
|
+
body += `- [Repository](https://github.com/${mapping.repository})\n`;
|
|
131
|
+
body += `- [CLAUDE.md](https://github.com/${mapping.repository}/blob/main/CLAUDE.md)\n\n`;
|
|
132
|
+
|
|
133
|
+
// Investigation guidance
|
|
134
|
+
body += `### Action Required\n`;
|
|
135
|
+
body += `This warning occurred ${totalOccurrences} times on ${digestDate}. Consider:\n`;
|
|
136
|
+
body += `1. Is this expected behavior? If so, consider reducing log level to \`debug\`\n`;
|
|
137
|
+
body += `2. Does this indicate a performance issue that should be addressed?\n`;
|
|
138
|
+
body += `3. Can the underlying condition be fixed to prevent the warning?\n\n`;
|
|
139
|
+
|
|
140
|
+
body += `---\n`;
|
|
141
|
+
body += `_Daily digest generated by Platform Error Collector_\n`;
|
|
142
|
+
|
|
143
|
+
return body;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Create or update a digest issue for a warning type
|
|
148
|
+
*/
|
|
149
|
+
async function createOrUpdateDigestIssue(
|
|
150
|
+
db: D1Database,
|
|
151
|
+
github: GitHubClient,
|
|
152
|
+
warnings: PendingDigestWarning[],
|
|
153
|
+
mapping: ScriptMapping,
|
|
154
|
+
digestDate: string
|
|
155
|
+
): Promise<{ issueNumber: number; issueUrl: string }> {
|
|
156
|
+
const [owner, repo] = mapping.repository.split('/');
|
|
157
|
+
const fingerprint = warnings[0]?.fingerprint || '';
|
|
158
|
+
const scriptName = warnings[0]?.script_name || '';
|
|
159
|
+
const normalizedMessage = warnings[0]?.normalized_message || 'Unknown warning';
|
|
160
|
+
|
|
161
|
+
// Check if we already have a digest issue for this fingerprint today
|
|
162
|
+
const existing = await db
|
|
163
|
+
.prepare(
|
|
164
|
+
`
|
|
165
|
+
SELECT github_issue_number, github_issue_url
|
|
166
|
+
FROM warning_digests
|
|
167
|
+
WHERE digest_date = ? AND script_name = ? AND fingerprint = ?
|
|
168
|
+
`
|
|
169
|
+
)
|
|
170
|
+
.bind(digestDate, scriptName, fingerprint)
|
|
171
|
+
.first<{ github_issue_number: number; github_issue_url: string }>();
|
|
172
|
+
|
|
173
|
+
const body = formatDigestBody(warnings, digestDate, mapping);
|
|
174
|
+
const title = `[${scriptName}] Daily Digest: ${normalizedMessage.slice(0, 50)} (${digestDate})`;
|
|
175
|
+
const totalOccurrences = warnings.reduce((sum, w) => sum + w.occurrence_count, 0);
|
|
176
|
+
|
|
177
|
+
if (existing?.github_issue_number) {
|
|
178
|
+
// Update existing issue
|
|
179
|
+
await github.updateIssue({
|
|
180
|
+
owner,
|
|
181
|
+
repo,
|
|
182
|
+
issue_number: existing.github_issue_number,
|
|
183
|
+
body,
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
// Update digest record
|
|
187
|
+
await db
|
|
188
|
+
.prepare(
|
|
189
|
+
`
|
|
190
|
+
UPDATE warning_digests
|
|
191
|
+
SET occurrence_count = ?,
|
|
192
|
+
last_occurrence_at = unixepoch(),
|
|
193
|
+
updated_at = unixepoch()
|
|
194
|
+
WHERE digest_date = ? AND script_name = ? AND fingerprint = ?
|
|
195
|
+
`
|
|
196
|
+
)
|
|
197
|
+
.bind(totalOccurrences, digestDate, scriptName, fingerprint)
|
|
198
|
+
.run();
|
|
199
|
+
|
|
200
|
+
return {
|
|
201
|
+
issueNumber: existing.github_issue_number,
|
|
202
|
+
issueUrl: existing.github_issue_url,
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Create placeholder record FIRST to prevent race condition
|
|
207
|
+
// If another invocation is processing the same digest, this INSERT will fail
|
|
208
|
+
// due to UNIQUE(digest_date, script_name, fingerprint) constraint
|
|
209
|
+
const id = crypto.randomUUID();
|
|
210
|
+
const now = Math.floor(Date.now() / 1000);
|
|
211
|
+
|
|
212
|
+
try {
|
|
213
|
+
await db
|
|
214
|
+
.prepare(
|
|
215
|
+
`
|
|
216
|
+
INSERT INTO warning_digests (
|
|
217
|
+
id, digest_date, script_name, fingerprint, normalized_message,
|
|
218
|
+
github_repo, github_issue_number, github_issue_url,
|
|
219
|
+
occurrence_count, first_occurrence_at, last_occurrence_at,
|
|
220
|
+
created_at, updated_at
|
|
221
|
+
) VALUES (?, ?, ?, ?, ?, ?, NULL, NULL, ?, ?, ?, ?, ?)
|
|
222
|
+
`
|
|
223
|
+
)
|
|
224
|
+
.bind(
|
|
225
|
+
id,
|
|
226
|
+
digestDate,
|
|
227
|
+
scriptName,
|
|
228
|
+
fingerprint,
|
|
229
|
+
normalizedMessage,
|
|
230
|
+
mapping.repository,
|
|
231
|
+
totalOccurrences,
|
|
232
|
+
now,
|
|
233
|
+
now,
|
|
234
|
+
now,
|
|
235
|
+
now
|
|
236
|
+
)
|
|
237
|
+
.run();
|
|
238
|
+
} catch (e) {
|
|
239
|
+
// Unique constraint violation - another invocation already processing this digest
|
|
240
|
+
// Re-fetch the existing record and return it
|
|
241
|
+
console.log(`Digest already being processed for ${scriptName}:${fingerprint}, skipping`);
|
|
242
|
+
const existing = await db
|
|
243
|
+
.prepare(
|
|
244
|
+
`
|
|
245
|
+
SELECT github_issue_number, github_issue_url
|
|
246
|
+
FROM warning_digests
|
|
247
|
+
WHERE digest_date = ? AND script_name = ? AND fingerprint = ?
|
|
248
|
+
`
|
|
249
|
+
)
|
|
250
|
+
.bind(digestDate, scriptName, fingerprint)
|
|
251
|
+
.first<{ github_issue_number: number; github_issue_url: string }>();
|
|
252
|
+
|
|
253
|
+
if (existing?.github_issue_number) {
|
|
254
|
+
return {
|
|
255
|
+
issueNumber: existing.github_issue_number,
|
|
256
|
+
issueUrl: existing.github_issue_url,
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
// Other invocation is creating the issue — retry with exponential backoff
|
|
260
|
+
for (const delayMs of [1000, 2000, 4000]) {
|
|
261
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
262
|
+
const retried = await db
|
|
263
|
+
.prepare(
|
|
264
|
+
`
|
|
265
|
+
SELECT github_issue_number, github_issue_url
|
|
266
|
+
FROM warning_digests
|
|
267
|
+
WHERE digest_date = ? AND script_name = ? AND fingerprint = ?
|
|
268
|
+
`
|
|
269
|
+
)
|
|
270
|
+
.bind(digestDate, scriptName, fingerprint)
|
|
271
|
+
.first<{ github_issue_number: number; github_issue_url: string }>();
|
|
272
|
+
|
|
273
|
+
if (retried?.github_issue_number) {
|
|
274
|
+
return {
|
|
275
|
+
issueNumber: retried.github_issue_number,
|
|
276
|
+
issueUrl: retried.github_issue_url,
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
return {
|
|
282
|
+
issueNumber: 0,
|
|
283
|
+
issueUrl: '',
|
|
284
|
+
};
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// Now create the GitHub issue (we hold the "lock" via the DB record)
|
|
288
|
+
const issue = await github.createIssue({
|
|
289
|
+
owner,
|
|
290
|
+
repo,
|
|
291
|
+
title,
|
|
292
|
+
body,
|
|
293
|
+
labels: ['cf:error:auto-generated', 'cf:error:warning', 'cf:digest'],
|
|
294
|
+
type: 'Task',
|
|
295
|
+
});
|
|
296
|
+
|
|
297
|
+
// Update the placeholder record with the actual issue details
|
|
298
|
+
await db
|
|
299
|
+
.prepare(
|
|
300
|
+
`
|
|
301
|
+
UPDATE warning_digests
|
|
302
|
+
SET github_issue_number = ?,
|
|
303
|
+
github_issue_url = ?,
|
|
304
|
+
updated_at = unixepoch()
|
|
305
|
+
WHERE id = ?
|
|
306
|
+
`
|
|
307
|
+
)
|
|
308
|
+
.bind(issue.number, issue.html_url, id)
|
|
309
|
+
.run();
|
|
310
|
+
|
|
311
|
+
return {
|
|
312
|
+
issueNumber: issue.number,
|
|
313
|
+
issueUrl: issue.html_url,
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
/**
|
|
318
|
+
* Process all pending warnings and create digest issues
|
|
319
|
+
* Called by the daily cron job
|
|
320
|
+
*/
|
|
321
|
+
export async function processWarningDigests(env: Env): Promise<{
|
|
322
|
+
processed: number;
|
|
323
|
+
issuesCreated: number;
|
|
324
|
+
issuesUpdated: number;
|
|
325
|
+
}> {
|
|
326
|
+
const github = new GitHubClient(env);
|
|
327
|
+
const digestDate = getYesterdayDigestDate(); // Process yesterday's warnings
|
|
328
|
+
|
|
329
|
+
console.log(`Processing warning digests for ${digestDate}`);
|
|
330
|
+
|
|
331
|
+
// Get all pending warnings grouped by script:fingerprint
|
|
332
|
+
const grouped = await getPendingWarnings(env.PLATFORM_DB, digestDate);
|
|
333
|
+
|
|
334
|
+
let processed = 0;
|
|
335
|
+
let issuesCreated = 0;
|
|
336
|
+
let issuesUpdated = 0;
|
|
337
|
+
|
|
338
|
+
for (const [key, warnings] of grouped) {
|
|
339
|
+
const [scriptName] = key.split(':');
|
|
340
|
+
|
|
341
|
+
// Get script mapping
|
|
342
|
+
const mappingStr = await env.PLATFORM_CACHE.get(`SCRIPT_MAP:${scriptName}`);
|
|
343
|
+
if (!mappingStr) {
|
|
344
|
+
console.log(`No mapping found for script: ${scriptName}, skipping digest`);
|
|
345
|
+
continue;
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
const mapping = JSON.parse(mappingStr) as ScriptMapping;
|
|
349
|
+
|
|
350
|
+
try {
|
|
351
|
+
// Create or update digest issue
|
|
352
|
+
const result = await createOrUpdateDigestIssue(
|
|
353
|
+
env.PLATFORM_DB,
|
|
354
|
+
github,
|
|
355
|
+
warnings,
|
|
356
|
+
mapping,
|
|
357
|
+
digestDate
|
|
358
|
+
);
|
|
359
|
+
|
|
360
|
+
// Mark warnings as digested
|
|
361
|
+
const ids = warnings.map((w) => w.id);
|
|
362
|
+
for (const id of ids) {
|
|
363
|
+
await env.PLATFORM_DB.prepare(
|
|
364
|
+
`
|
|
365
|
+
UPDATE error_occurrences
|
|
366
|
+
SET status = 'digested',
|
|
367
|
+
digest_date = ?,
|
|
368
|
+
digest_issue_number = ?,
|
|
369
|
+
updated_at = unixepoch()
|
|
370
|
+
WHERE id = ?
|
|
371
|
+
`
|
|
372
|
+
)
|
|
373
|
+
.bind(digestDate, result.issueNumber, id)
|
|
374
|
+
.run();
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
processed += warnings.length;
|
|
378
|
+
|
|
379
|
+
// Check if this was a new issue or update
|
|
380
|
+
const isNew = !warnings.some((w) => w.fingerprint === key.split(':')[1]);
|
|
381
|
+
if (isNew) {
|
|
382
|
+
issuesCreated++;
|
|
383
|
+
} else {
|
|
384
|
+
issuesUpdated++;
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
console.log(
|
|
388
|
+
`Digest created/updated for ${scriptName}: ${warnings[0]?.normalized_message} (${warnings.length} occurrences) → issue #${result.issueNumber}`
|
|
389
|
+
);
|
|
390
|
+
} catch (e) {
|
|
391
|
+
console.error(`Failed to create digest for ${key}: ${e}`);
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
console.log(
|
|
396
|
+
`Digest processing complete: ${processed} warnings processed, ${issuesCreated} issues created, ${issuesUpdated} issues updated`
|
|
397
|
+
);
|
|
398
|
+
|
|
399
|
+
return { processed, issuesCreated, issuesUpdated };
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
/**
|
|
403
|
+
* Store a warning for later digest (instead of creating immediate issue)
|
|
404
|
+
*/
|
|
405
|
+
export async function storeWarningForDigest(
|
|
406
|
+
db: D1Database,
|
|
407
|
+
kv: KVNamespace,
|
|
408
|
+
fingerprint: string,
|
|
409
|
+
scriptName: string,
|
|
410
|
+
project: string,
|
|
411
|
+
repo: string,
|
|
412
|
+
normalizedMessage: string,
|
|
413
|
+
rawMessage: string
|
|
414
|
+
): Promise<void> {
|
|
415
|
+
const id = crypto.randomUUID();
|
|
416
|
+
const now = Math.floor(Date.now() / 1000);
|
|
417
|
+
|
|
418
|
+
await db
|
|
419
|
+
.prepare(
|
|
420
|
+
`
|
|
421
|
+
INSERT INTO error_occurrences (
|
|
422
|
+
id, fingerprint, script_name, project, error_type, priority,
|
|
423
|
+
github_repo, status, normalized_message, last_exception_message,
|
|
424
|
+
first_seen_at, last_seen_at, occurrence_count,
|
|
425
|
+
created_at, updated_at
|
|
426
|
+
) VALUES (?, ?, ?, ?, 'warning', 'P4', ?, 'pending_digest', ?, ?, ?, ?, 1, ?, ?)
|
|
427
|
+
ON CONFLICT(fingerprint) DO UPDATE SET
|
|
428
|
+
occurrence_count = occurrence_count + 1,
|
|
429
|
+
last_seen_at = excluded.last_seen_at,
|
|
430
|
+
last_exception_message = excluded.last_exception_message,
|
|
431
|
+
updated_at = excluded.updated_at
|
|
432
|
+
`
|
|
433
|
+
)
|
|
434
|
+
.bind(
|
|
435
|
+
id,
|
|
436
|
+
fingerprint,
|
|
437
|
+
scriptName,
|
|
438
|
+
project,
|
|
439
|
+
repo,
|
|
440
|
+
normalizedMessage,
|
|
441
|
+
rawMessage,
|
|
442
|
+
now,
|
|
443
|
+
now,
|
|
444
|
+
now,
|
|
445
|
+
now
|
|
446
|
+
)
|
|
447
|
+
.run();
|
|
448
|
+
}
|