@littlebearapps/platform-admin-sdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +112 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.js +89 -0
- package/dist/prompts.d.ts +27 -0
- package/dist/prompts.js +80 -0
- package/dist/scaffold.d.ts +5 -0
- package/dist/scaffold.js +65 -0
- package/dist/templates.d.ts +16 -0
- package/dist/templates.js +131 -0
- package/package.json +46 -0
- package/templates/full/migrations/006_pattern_discovery.sql +199 -0
- package/templates/full/migrations/007_notifications_search.sql +127 -0
- package/templates/full/workers/lib/pattern-discovery/ai-prompt.ts +644 -0
- package/templates/full/workers/lib/pattern-discovery/clustering.ts +278 -0
- package/templates/full/workers/lib/pattern-discovery/shadow-evaluation.ts +603 -0
- package/templates/full/workers/lib/pattern-discovery/storage.ts +806 -0
- package/templates/full/workers/lib/pattern-discovery/types.ts +159 -0
- package/templates/full/workers/lib/pattern-discovery/validation.ts +278 -0
- package/templates/full/workers/pattern-discovery.ts +661 -0
- package/templates/full/workers/platform-alert-router.ts +1809 -0
- package/templates/full/workers/platform-notifications.ts +424 -0
- package/templates/full/workers/platform-search.ts +480 -0
- package/templates/full/workers/platform-settings.ts +436 -0
- package/templates/full/wrangler.alert-router.jsonc.hbs +34 -0
- package/templates/full/wrangler.notifications.jsonc.hbs +23 -0
- package/templates/full/wrangler.pattern-discovery.jsonc.hbs +33 -0
- package/templates/full/wrangler.search.jsonc.hbs +16 -0
- package/templates/full/wrangler.settings.jsonc.hbs +23 -0
- package/templates/shared/README.md.hbs +69 -0
- package/templates/shared/config/budgets.yaml.hbs +72 -0
- package/templates/shared/config/services.yaml.hbs +45 -0
- package/templates/shared/migrations/001_core_tables.sql +117 -0
- package/templates/shared/migrations/002_usage_warehouse.sql +830 -0
- package/templates/shared/migrations/003_feature_tracking.sql +250 -0
- package/templates/shared/migrations/004_settings_alerts.sql +452 -0
- package/templates/shared/migrations/seed.sql.hbs +4 -0
- package/templates/shared/package.json.hbs +21 -0
- package/templates/shared/scripts/sync-config.ts +242 -0
- package/templates/shared/tsconfig.json +12 -0
- package/templates/shared/workers/lib/analytics-engine.ts +357 -0
- package/templates/shared/workers/lib/billing.ts +293 -0
- package/templates/shared/workers/lib/circuit-breaker-middleware.ts +25 -0
- package/templates/shared/workers/lib/control.ts +292 -0
- package/templates/shared/workers/lib/economics.ts +368 -0
- package/templates/shared/workers/lib/metrics.ts +103 -0
- package/templates/shared/workers/lib/platform-settings.ts +407 -0
- package/templates/shared/workers/lib/shared/allowances.ts +333 -0
- package/templates/shared/workers/lib/shared/cloudflare.ts +1362 -0
- package/templates/shared/workers/lib/shared/types.ts +58 -0
- package/templates/shared/workers/lib/telemetry-sampling.ts +360 -0
- package/templates/shared/workers/lib/usage/collectors/example.ts +96 -0
- package/templates/shared/workers/lib/usage/collectors/index.ts +128 -0
- package/templates/shared/workers/lib/usage/handlers/audit.ts +306 -0
- package/templates/shared/workers/lib/usage/handlers/backfill.ts +845 -0
- package/templates/shared/workers/lib/usage/handlers/behavioral.ts +429 -0
- package/templates/shared/workers/lib/usage/handlers/data-queries.ts +507 -0
- package/templates/shared/workers/lib/usage/handlers/dlq-admin.ts +364 -0
- package/templates/shared/workers/lib/usage/handlers/health-trends.ts +222 -0
- package/templates/shared/workers/lib/usage/handlers/index.ts +35 -0
- package/templates/shared/workers/lib/usage/handlers/usage-admin.ts +421 -0
- package/templates/shared/workers/lib/usage/handlers/usage-features.ts +1262 -0
- package/templates/shared/workers/lib/usage/handlers/usage-metrics.ts +2420 -0
- package/templates/shared/workers/lib/usage/handlers/usage-settings.ts +610 -0
- package/templates/shared/workers/lib/usage/queue/budget-enforcement.ts +1032 -0
- package/templates/shared/workers/lib/usage/queue/cost-budget-enforcement.ts +128 -0
- package/templates/shared/workers/lib/usage/queue/cost-calculator.ts +77 -0
- package/templates/shared/workers/lib/usage/queue/dlq-handler.ts +161 -0
- package/templates/shared/workers/lib/usage/queue/index.ts +19 -0
- package/templates/shared/workers/lib/usage/queue/telemetry-processor.ts +790 -0
- package/templates/shared/workers/lib/usage/scheduled/anomaly-detection.ts +732 -0
- package/templates/shared/workers/lib/usage/scheduled/data-collection.ts +956 -0
- package/templates/shared/workers/lib/usage/scheduled/error-digest.ts +343 -0
- package/templates/shared/workers/lib/usage/scheduled/index.ts +18 -0
- package/templates/shared/workers/lib/usage/scheduled/rollups.ts +1561 -0
- package/templates/shared/workers/lib/usage/shared/constants.ts +362 -0
- package/templates/shared/workers/lib/usage/shared/index.ts +14 -0
- package/templates/shared/workers/lib/usage/shared/types.ts +1066 -0
- package/templates/shared/workers/lib/usage/shared/utils.ts +795 -0
- package/templates/shared/workers/platform-usage.ts +1915 -0
- package/templates/shared/wrangler.usage.jsonc.hbs +58 -0
- package/templates/standard/migrations/005_error_collection.sql +162 -0
- package/templates/standard/workers/error-collector.ts +2670 -0
- package/templates/standard/workers/lib/error-collector/capture.ts +213 -0
- package/templates/standard/workers/lib/error-collector/digest.ts +448 -0
- package/templates/standard/workers/lib/error-collector/email-health-alerts.ts +262 -0
- package/templates/standard/workers/lib/error-collector/fingerprint.ts +258 -0
- package/templates/standard/workers/lib/error-collector/gap-alerts.ts +293 -0
- package/templates/standard/workers/lib/error-collector/github.ts +329 -0
- package/templates/standard/workers/lib/error-collector/types.ts +262 -0
- package/templates/standard/workers/lib/sentinel/gap-detection.ts +734 -0
- package/templates/standard/workers/lib/shared/slack-alerts.ts +585 -0
- package/templates/standard/workers/platform-sentinel.ts +1744 -0
- package/templates/standard/wrangler.error-collector.jsonc.hbs +44 -0
- package/templates/standard/wrangler.sentinel.jsonc.hbs +45 -0
|
@@ -0,0 +1,845 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Backfill Handler for Platform Usage
|
|
3
|
+
*
|
|
4
|
+
* Provides endpoints for:
|
|
5
|
+
* - GET /usage/gaps - Current gap status
|
|
6
|
+
* - GET /usage/gaps/history - Gap detection history
|
|
7
|
+
* - POST /usage/gaps/backfill - Trigger backfill for date range
|
|
8
|
+
*
|
|
9
|
+
* @module workers/lib/usage/handlers/backfill
|
|
10
|
+
* @created 2026-01-29
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import type { D1Database, KVNamespace } from '@cloudflare/workers-types';
|
|
14
|
+
import { PRICING_TIERS } from '@littlebearapps/platform-consumer-sdk';
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Environment bindings required for backfill
|
|
18
|
+
*/
|
|
19
|
+
export interface BackfillEnv {
|
|
20
|
+
PLATFORM_DB: D1Database;
|
|
21
|
+
PLATFORM_CACHE: KVNamespace;
|
|
22
|
+
CLOUDFLARE_ACCOUNT_ID: string;
|
|
23
|
+
CLOUDFLARE_API_TOKEN?: string;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Backfill request payload
|
|
28
|
+
*/
|
|
29
|
+
export interface BackfillRequest {
|
|
30
|
+
startDate: string; // YYYY-MM-DD
|
|
31
|
+
endDate: string; // YYYY-MM-DD
|
|
32
|
+
projects?: string[]; // Specific projects or all
|
|
33
|
+
dryRun?: boolean; // Preview without writing
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Backfill result
|
|
38
|
+
*/
|
|
39
|
+
export interface BackfillResult {
|
|
40
|
+
id: string;
|
|
41
|
+
startDate: string;
|
|
42
|
+
endDate: string;
|
|
43
|
+
projects: string[];
|
|
44
|
+
hoursProcessed: number;
|
|
45
|
+
hoursCreated: number;
|
|
46
|
+
hoursUpdated: number;
|
|
47
|
+
errors: Array<{ hour: string; project: string; error: string }>;
|
|
48
|
+
averageConfidence: number;
|
|
49
|
+
status: 'completed' | 'failed';
|
|
50
|
+
dryRun: boolean;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Gap status response
|
|
55
|
+
*/
|
|
56
|
+
export interface GapStatus {
|
|
57
|
+
currentStatus: 'ok' | 'warning' | 'critical';
|
|
58
|
+
lastCheck: string | null;
|
|
59
|
+
missingHoursLast24h: number;
|
|
60
|
+
staleProjects: string[];
|
|
61
|
+
recentGaps: Array<{
|
|
62
|
+
detectionTime: string;
|
|
63
|
+
missingHours: number;
|
|
64
|
+
staleProjects: number;
|
|
65
|
+
severity: string;
|
|
66
|
+
}>;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Handle GET /usage/gaps - Current gap status
|
|
71
|
+
*/
|
|
72
|
+
export async function handleGapsStatus(env: BackfillEnv): Promise<Response> {
|
|
73
|
+
try {
|
|
74
|
+
// Get latest gap detection result
|
|
75
|
+
const latest = await env.PLATFORM_DB.prepare(
|
|
76
|
+
`
|
|
77
|
+
SELECT detection_time, missing_hours_count, stale_projects_count, severity, report_json
|
|
78
|
+
FROM gap_detection_log
|
|
79
|
+
ORDER BY detection_time DESC
|
|
80
|
+
LIMIT 1
|
|
81
|
+
`
|
|
82
|
+
).first<{
|
|
83
|
+
detection_time: string;
|
|
84
|
+
missing_hours_count: number;
|
|
85
|
+
stale_projects_count: number;
|
|
86
|
+
severity: string;
|
|
87
|
+
report_json: string;
|
|
88
|
+
}>();
|
|
89
|
+
|
|
90
|
+
// Get recent gap events (last 24h)
|
|
91
|
+
const recentResult = await env.PLATFORM_DB.prepare(
|
|
92
|
+
`
|
|
93
|
+
SELECT detection_time, missing_hours_count, stale_projects_count, severity
|
|
94
|
+
FROM gap_detection_log
|
|
95
|
+
WHERE detection_time >= datetime('now', '-24 hours')
|
|
96
|
+
AND severity != 'ok'
|
|
97
|
+
ORDER BY detection_time DESC
|
|
98
|
+
LIMIT 10
|
|
99
|
+
`
|
|
100
|
+
).all<{
|
|
101
|
+
detection_time: string;
|
|
102
|
+
missing_hours_count: number;
|
|
103
|
+
stale_projects_count: number;
|
|
104
|
+
severity: string;
|
|
105
|
+
}>();
|
|
106
|
+
|
|
107
|
+
// Parse stale projects from latest report
|
|
108
|
+
let staleProjects: string[] = [];
|
|
109
|
+
if (latest?.report_json) {
|
|
110
|
+
try {
|
|
111
|
+
const report = JSON.parse(latest.report_json);
|
|
112
|
+
staleProjects = report.staleProjects?.map((p: { project: string }) => p.project) ?? [];
|
|
113
|
+
} catch {
|
|
114
|
+
// Ignore parse errors
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const status: GapStatus = {
|
|
119
|
+
currentStatus: (latest?.severity as 'ok' | 'warning' | 'critical') ?? 'ok',
|
|
120
|
+
lastCheck: latest?.detection_time ?? null,
|
|
121
|
+
missingHoursLast24h: latest?.missing_hours_count ?? 0,
|
|
122
|
+
staleProjects,
|
|
123
|
+
recentGaps:
|
|
124
|
+
recentResult.results?.map((r) => ({
|
|
125
|
+
detectionTime: r.detection_time,
|
|
126
|
+
missingHours: r.missing_hours_count,
|
|
127
|
+
staleProjects: r.stale_projects_count,
|
|
128
|
+
severity: r.severity,
|
|
129
|
+
})) ?? [],
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
return new Response(JSON.stringify({ success: true, data: status }), {
|
|
133
|
+
headers: { 'Content-Type': 'application/json' },
|
|
134
|
+
});
|
|
135
|
+
} catch (error) {
|
|
136
|
+
return new Response(
|
|
137
|
+
JSON.stringify({
|
|
138
|
+
success: false,
|
|
139
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
140
|
+
}),
|
|
141
|
+
{
|
|
142
|
+
status: 500,
|
|
143
|
+
headers: { 'Content-Type': 'application/json' },
|
|
144
|
+
}
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Handle GET /usage/gaps/history - Gap detection history
|
|
151
|
+
*/
|
|
152
|
+
export async function handleGapsHistory(env: BackfillEnv, url: URL): Promise<Response> {
|
|
153
|
+
try {
|
|
154
|
+
const limit = parseInt(url.searchParams.get('limit') ?? '50', 10);
|
|
155
|
+
const offset = parseInt(url.searchParams.get('offset') ?? '0', 10);
|
|
156
|
+
const severityFilter = url.searchParams.get('severity'); // 'ok' | 'warning' | 'critical'
|
|
157
|
+
|
|
158
|
+
let query = `
|
|
159
|
+
SELECT id, detection_time, missing_hours_count, stale_projects_count, severity
|
|
160
|
+
FROM gap_detection_log
|
|
161
|
+
`;
|
|
162
|
+
const params: (string | number)[] = [];
|
|
163
|
+
|
|
164
|
+
if (severityFilter) {
|
|
165
|
+
query += ' WHERE severity = ?';
|
|
166
|
+
params.push(severityFilter);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
query += ' ORDER BY detection_time DESC LIMIT ? OFFSET ?';
|
|
170
|
+
params.push(limit, offset);
|
|
171
|
+
|
|
172
|
+
const result = await env.PLATFORM_DB.prepare(query)
|
|
173
|
+
.bind(...params)
|
|
174
|
+
.all<{
|
|
175
|
+
id: string;
|
|
176
|
+
detection_time: string;
|
|
177
|
+
missing_hours_count: number;
|
|
178
|
+
stale_projects_count: number;
|
|
179
|
+
severity: string;
|
|
180
|
+
}>();
|
|
181
|
+
|
|
182
|
+
// Get total count
|
|
183
|
+
let countQuery = 'SELECT COUNT(*) as total FROM gap_detection_log';
|
|
184
|
+
if (severityFilter) {
|
|
185
|
+
countQuery += ' WHERE severity = ?';
|
|
186
|
+
}
|
|
187
|
+
const countResult = await env.PLATFORM_DB.prepare(countQuery)
|
|
188
|
+
.bind(...(severityFilter ? [severityFilter] : []))
|
|
189
|
+
.first<{ total: number }>();
|
|
190
|
+
|
|
191
|
+
return new Response(
|
|
192
|
+
JSON.stringify({
|
|
193
|
+
success: true,
|
|
194
|
+
data: result.results ?? [],
|
|
195
|
+
pagination: {
|
|
196
|
+
total: countResult?.total ?? 0,
|
|
197
|
+
limit,
|
|
198
|
+
offset,
|
|
199
|
+
},
|
|
200
|
+
}),
|
|
201
|
+
{
|
|
202
|
+
headers: { 'Content-Type': 'application/json' },
|
|
203
|
+
}
|
|
204
|
+
);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
return new Response(
|
|
207
|
+
JSON.stringify({
|
|
208
|
+
success: false,
|
|
209
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
210
|
+
}),
|
|
211
|
+
{
|
|
212
|
+
status: 500,
|
|
213
|
+
headers: { 'Content-Type': 'application/json' },
|
|
214
|
+
}
|
|
215
|
+
);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
/**
|
|
220
|
+
* Handle POST /usage/gaps/backfill - Trigger backfill
|
|
221
|
+
*/
|
|
222
|
+
export async function handleGapsBackfill(request: Request, env: BackfillEnv): Promise<Response> {
|
|
223
|
+
try {
|
|
224
|
+
const body = (await request.json()) as BackfillRequest;
|
|
225
|
+
|
|
226
|
+
// Validate request
|
|
227
|
+
if (!body.startDate || !body.endDate) {
|
|
228
|
+
return new Response(
|
|
229
|
+
JSON.stringify({
|
|
230
|
+
success: false,
|
|
231
|
+
error: 'startDate and endDate are required',
|
|
232
|
+
}),
|
|
233
|
+
{
|
|
234
|
+
status: 400,
|
|
235
|
+
headers: { 'Content-Type': 'application/json' },
|
|
236
|
+
}
|
|
237
|
+
);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Validate date format
|
|
241
|
+
const dateRegex = /^\d{4}-\d{2}-\d{2}$/;
|
|
242
|
+
if (!dateRegex.test(body.startDate) || !dateRegex.test(body.endDate)) {
|
|
243
|
+
return new Response(
|
|
244
|
+
JSON.stringify({
|
|
245
|
+
success: false,
|
|
246
|
+
error: 'Dates must be in YYYY-MM-DD format',
|
|
247
|
+
}),
|
|
248
|
+
{
|
|
249
|
+
status: 400,
|
|
250
|
+
headers: { 'Content-Type': 'application/json' },
|
|
251
|
+
}
|
|
252
|
+
);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Validate date range (max 30 days)
|
|
256
|
+
const start = new Date(body.startDate);
|
|
257
|
+
const end = new Date(body.endDate);
|
|
258
|
+
const daysDiff = (end.getTime() - start.getTime()) / (1000 * 60 * 60 * 24);
|
|
259
|
+
|
|
260
|
+
if (daysDiff < 0) {
|
|
261
|
+
return new Response(
|
|
262
|
+
JSON.stringify({
|
|
263
|
+
success: false,
|
|
264
|
+
error: 'endDate must be after startDate',
|
|
265
|
+
}),
|
|
266
|
+
{
|
|
267
|
+
status: 400,
|
|
268
|
+
headers: { 'Content-Type': 'application/json' },
|
|
269
|
+
}
|
|
270
|
+
);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
if (daysDiff > 30) {
|
|
274
|
+
return new Response(
|
|
275
|
+
JSON.stringify({
|
|
276
|
+
success: false,
|
|
277
|
+
error: 'Date range cannot exceed 30 days',
|
|
278
|
+
}),
|
|
279
|
+
{
|
|
280
|
+
status: 400,
|
|
281
|
+
headers: { 'Content-Type': 'application/json' },
|
|
282
|
+
}
|
|
283
|
+
);
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// Execute backfill
|
|
287
|
+
const result = await executeBackfill(env, body);
|
|
288
|
+
|
|
289
|
+
return new Response(JSON.stringify({ success: true, data: result }), {
|
|
290
|
+
headers: { 'Content-Type': 'application/json' },
|
|
291
|
+
});
|
|
292
|
+
} catch (error) {
|
|
293
|
+
return new Response(
|
|
294
|
+
JSON.stringify({
|
|
295
|
+
success: false,
|
|
296
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
297
|
+
}),
|
|
298
|
+
{
|
|
299
|
+
status: 500,
|
|
300
|
+
headers: { 'Content-Type': 'application/json' },
|
|
301
|
+
}
|
|
302
|
+
);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
/**
|
|
307
|
+
* Execute backfill operation
|
|
308
|
+
*/
|
|
309
|
+
async function executeBackfill(
|
|
310
|
+
env: BackfillEnv,
|
|
311
|
+
options: BackfillRequest
|
|
312
|
+
): Promise<BackfillResult> {
|
|
313
|
+
const id = crypto.randomUUID();
|
|
314
|
+
// TODO: Add your project IDs here (must match project_registry in D1)
|
|
315
|
+
const projects = options.projects ?? ['all', 'platform'];
|
|
316
|
+
|
|
317
|
+
const result: BackfillResult = {
|
|
318
|
+
id,
|
|
319
|
+
startDate: options.startDate,
|
|
320
|
+
endDate: options.endDate,
|
|
321
|
+
projects,
|
|
322
|
+
hoursProcessed: 0,
|
|
323
|
+
hoursCreated: 0,
|
|
324
|
+
hoursUpdated: 0,
|
|
325
|
+
errors: [],
|
|
326
|
+
averageConfidence: 75,
|
|
327
|
+
status: 'completed',
|
|
328
|
+
dryRun: options.dryRun ?? false,
|
|
329
|
+
};
|
|
330
|
+
|
|
331
|
+
// Log backfill start
|
|
332
|
+
if (!options.dryRun) {
|
|
333
|
+
await env.PLATFORM_DB.prepare(
|
|
334
|
+
`
|
|
335
|
+
INSERT INTO backfill_log (id, start_date, end_date, projects, triggered_by, status)
|
|
336
|
+
VALUES (?, ?, ?, ?, 'manual', 'running')
|
|
337
|
+
`
|
|
338
|
+
)
|
|
339
|
+
.bind(id, options.startDate, options.endDate, JSON.stringify(projects))
|
|
340
|
+
.run();
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
try {
|
|
344
|
+
// Get existing daily rollup data to use as source
|
|
345
|
+
const dailyData = await env.PLATFORM_DB.prepare(
|
|
346
|
+
`
|
|
347
|
+
SELECT
|
|
348
|
+
snapshot_date,
|
|
349
|
+
project,
|
|
350
|
+
workers_cost_usd,
|
|
351
|
+
d1_cost_usd,
|
|
352
|
+
kv_cost_usd,
|
|
353
|
+
r2_cost_usd,
|
|
354
|
+
do_cost_usd,
|
|
355
|
+
vectorize_cost_usd,
|
|
356
|
+
aigateway_cost_usd,
|
|
357
|
+
pages_cost_usd,
|
|
358
|
+
queues_cost_usd,
|
|
359
|
+
workersai_cost_usd,
|
|
360
|
+
total_cost_usd,
|
|
361
|
+
workers_requests,
|
|
362
|
+
workers_errors,
|
|
363
|
+
workers_cpu_time_ms,
|
|
364
|
+
d1_rows_read,
|
|
365
|
+
d1_rows_written,
|
|
366
|
+
kv_reads,
|
|
367
|
+
kv_writes,
|
|
368
|
+
r2_class_a_ops,
|
|
369
|
+
r2_class_b_ops,
|
|
370
|
+
do_requests,
|
|
371
|
+
do_gb_seconds
|
|
372
|
+
FROM daily_usage_rollups
|
|
373
|
+
WHERE snapshot_date >= ? AND snapshot_date <= ?
|
|
374
|
+
AND project IN (${projects.map(() => '?').join(',')})
|
|
375
|
+
ORDER BY snapshot_date, project
|
|
376
|
+
`
|
|
377
|
+
)
|
|
378
|
+
.bind(options.startDate, options.endDate, ...projects)
|
|
379
|
+
.all();
|
|
380
|
+
|
|
381
|
+
// Process each day
|
|
382
|
+
for (const day of dailyData.results ?? []) {
|
|
383
|
+
const date = day.snapshot_date as string;
|
|
384
|
+
const project = day.project as string;
|
|
385
|
+
|
|
386
|
+
// Create 24 hourly entries
|
|
387
|
+
for (let hour = 0; hour < 24; hour++) {
|
|
388
|
+
const snapshotHour = `${date}T${hour.toString().padStart(2, '0')}:00:00Z`;
|
|
389
|
+
result.hoursProcessed++;
|
|
390
|
+
|
|
391
|
+
try {
|
|
392
|
+
// Check if already exists
|
|
393
|
+
const existing = await env.PLATFORM_DB.prepare(
|
|
394
|
+
`SELECT id, source, confidence FROM hourly_usage_snapshots WHERE snapshot_hour = ? AND project = ?`
|
|
395
|
+
)
|
|
396
|
+
.bind(snapshotHour, project)
|
|
397
|
+
.first<{ id: string; source: string; confidence: number }>();
|
|
398
|
+
|
|
399
|
+
// Skip if already exists with higher confidence
|
|
400
|
+
if (existing && existing.confidence >= 75) {
|
|
401
|
+
continue;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// Calculate hourly values (divide daily by 24)
|
|
405
|
+
const hourlyData = {
|
|
406
|
+
workers_cost_usd: ((day.workers_cost_usd as number) ?? 0) / 24,
|
|
407
|
+
d1_cost_usd: ((day.d1_cost_usd as number) ?? 0) / 24,
|
|
408
|
+
kv_cost_usd: ((day.kv_cost_usd as number) ?? 0) / 24,
|
|
409
|
+
r2_cost_usd: ((day.r2_cost_usd as number) ?? 0) / 24,
|
|
410
|
+
do_cost_usd: ((day.do_cost_usd as number) ?? 0) / 24,
|
|
411
|
+
vectorize_cost_usd: ((day.vectorize_cost_usd as number) ?? 0) / 24,
|
|
412
|
+
aigateway_cost_usd: ((day.aigateway_cost_usd as number) ?? 0) / 24,
|
|
413
|
+
pages_cost_usd: ((day.pages_cost_usd as number) ?? 0) / 24,
|
|
414
|
+
queues_cost_usd: ((day.queues_cost_usd as number) ?? 0) / 24,
|
|
415
|
+
workersai_cost_usd: ((day.workersai_cost_usd as number) ?? 0) / 24,
|
|
416
|
+
total_cost_usd: ((day.total_cost_usd as number) ?? 0) / 24,
|
|
417
|
+
workers_requests: Math.round(((day.workers_requests as number) ?? 0) / 24),
|
|
418
|
+
workers_errors: Math.round(((day.workers_errors as number) ?? 0) / 24),
|
|
419
|
+
workers_cpu_time_ms: Math.round(((day.workers_cpu_time_ms as number) ?? 0) / 24),
|
|
420
|
+
d1_rows_read: Math.round(((day.d1_rows_read as number) ?? 0) / 24),
|
|
421
|
+
d1_rows_written: Math.round(((day.d1_rows_written as number) ?? 0) / 24),
|
|
422
|
+
kv_reads: Math.round(((day.kv_reads as number) ?? 0) / 24),
|
|
423
|
+
kv_writes: Math.round(((day.kv_writes as number) ?? 0) / 24),
|
|
424
|
+
r2_class_a_ops: Math.round(((day.r2_class_a_ops as number) ?? 0) / 24),
|
|
425
|
+
r2_class_b_ops: Math.round(((day.r2_class_b_ops as number) ?? 0) / 24),
|
|
426
|
+
do_requests: Math.round(((day.do_requests as number) ?? 0) / 24),
|
|
427
|
+
do_gb_seconds: ((day.do_gb_seconds as number) ?? 0) / 24,
|
|
428
|
+
};
|
|
429
|
+
|
|
430
|
+
if (!options.dryRun) {
|
|
431
|
+
if (existing) {
|
|
432
|
+
// Update existing with backfill data
|
|
433
|
+
await env.PLATFORM_DB.prepare(
|
|
434
|
+
`
|
|
435
|
+
UPDATE hourly_usage_snapshots
|
|
436
|
+
SET
|
|
437
|
+
workers_cost_usd = ?,
|
|
438
|
+
d1_cost_usd = ?,
|
|
439
|
+
kv_cost_usd = ?,
|
|
440
|
+
r2_cost_usd = ?,
|
|
441
|
+
do_cost_usd = ?,
|
|
442
|
+
total_cost_usd = ?,
|
|
443
|
+
source = 'backfill',
|
|
444
|
+
confidence = 75,
|
|
445
|
+
backfill_reason = 'gap_backfill'
|
|
446
|
+
WHERE id = ?
|
|
447
|
+
`
|
|
448
|
+
)
|
|
449
|
+
.bind(
|
|
450
|
+
hourlyData.workers_cost_usd,
|
|
451
|
+
hourlyData.d1_cost_usd,
|
|
452
|
+
hourlyData.kv_cost_usd,
|
|
453
|
+
hourlyData.r2_cost_usd,
|
|
454
|
+
hourlyData.do_cost_usd,
|
|
455
|
+
hourlyData.total_cost_usd,
|
|
456
|
+
existing.id
|
|
457
|
+
)
|
|
458
|
+
.run();
|
|
459
|
+
result.hoursUpdated++;
|
|
460
|
+
} else {
|
|
461
|
+
// Insert new hourly snapshot
|
|
462
|
+
const newId = crypto.randomUUID();
|
|
463
|
+
await env.PLATFORM_DB.prepare(
|
|
464
|
+
`
|
|
465
|
+
INSERT INTO hourly_usage_snapshots (
|
|
466
|
+
id, snapshot_hour, project,
|
|
467
|
+
workers_cost_usd, d1_cost_usd, kv_cost_usd, r2_cost_usd, do_cost_usd,
|
|
468
|
+
vectorize_cost_usd, aigateway_cost_usd, pages_cost_usd, queues_cost_usd,
|
|
469
|
+
workersai_cost_usd, total_cost_usd,
|
|
470
|
+
workers_requests, workers_errors, workers_cpu_time_ms,
|
|
471
|
+
d1_rows_read, d1_rows_written,
|
|
472
|
+
kv_reads, kv_writes,
|
|
473
|
+
r2_class_a_ops, r2_class_b_ops,
|
|
474
|
+
do_requests, do_gb_seconds,
|
|
475
|
+
source, confidence, backfill_reason,
|
|
476
|
+
collection_timestamp, sampling_mode
|
|
477
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'backfill', 75, 'gap_backfill', ?, 'normal')
|
|
478
|
+
`
|
|
479
|
+
)
|
|
480
|
+
.bind(
|
|
481
|
+
newId,
|
|
482
|
+
snapshotHour,
|
|
483
|
+
project,
|
|
484
|
+
hourlyData.workers_cost_usd,
|
|
485
|
+
hourlyData.d1_cost_usd,
|
|
486
|
+
hourlyData.kv_cost_usd,
|
|
487
|
+
hourlyData.r2_cost_usd,
|
|
488
|
+
hourlyData.do_cost_usd,
|
|
489
|
+
hourlyData.vectorize_cost_usd,
|
|
490
|
+
hourlyData.aigateway_cost_usd,
|
|
491
|
+
hourlyData.pages_cost_usd,
|
|
492
|
+
hourlyData.queues_cost_usd,
|
|
493
|
+
hourlyData.workersai_cost_usd,
|
|
494
|
+
hourlyData.total_cost_usd,
|
|
495
|
+
hourlyData.workers_requests,
|
|
496
|
+
hourlyData.workers_errors,
|
|
497
|
+
hourlyData.workers_cpu_time_ms,
|
|
498
|
+
hourlyData.d1_rows_read,
|
|
499
|
+
hourlyData.d1_rows_written,
|
|
500
|
+
hourlyData.kv_reads,
|
|
501
|
+
hourlyData.kv_writes,
|
|
502
|
+
hourlyData.r2_class_a_ops,
|
|
503
|
+
hourlyData.r2_class_b_ops,
|
|
504
|
+
hourlyData.do_requests,
|
|
505
|
+
hourlyData.do_gb_seconds,
|
|
506
|
+
new Date().toISOString()
|
|
507
|
+
)
|
|
508
|
+
.run();
|
|
509
|
+
result.hoursCreated++;
|
|
510
|
+
}
|
|
511
|
+
} else {
|
|
512
|
+
// Dry run - just count
|
|
513
|
+
if (existing) {
|
|
514
|
+
result.hoursUpdated++;
|
|
515
|
+
} else {
|
|
516
|
+
result.hoursCreated++;
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
} catch (error) {
|
|
520
|
+
result.errors.push({
|
|
521
|
+
hour: snapshotHour,
|
|
522
|
+
project,
|
|
523
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
524
|
+
});
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
// Update backfill log
|
|
530
|
+
if (!options.dryRun) {
|
|
531
|
+
await env.PLATFORM_DB.prepare(
|
|
532
|
+
`
|
|
533
|
+
UPDATE backfill_log
|
|
534
|
+
SET
|
|
535
|
+
hours_processed = ?,
|
|
536
|
+
hours_created = ?,
|
|
537
|
+
hours_updated = ?,
|
|
538
|
+
errors_count = ?,
|
|
539
|
+
errors_json = ?,
|
|
540
|
+
average_confidence = ?,
|
|
541
|
+
status = 'completed',
|
|
542
|
+
completed_at = datetime('now')
|
|
543
|
+
WHERE id = ?
|
|
544
|
+
`
|
|
545
|
+
)
|
|
546
|
+
.bind(
|
|
547
|
+
result.hoursProcessed,
|
|
548
|
+
result.hoursCreated,
|
|
549
|
+
result.hoursUpdated,
|
|
550
|
+
result.errors.length,
|
|
551
|
+
result.errors.length > 0 ? JSON.stringify(result.errors) : null,
|
|
552
|
+
result.averageConfidence,
|
|
553
|
+
id
|
|
554
|
+
)
|
|
555
|
+
.run();
|
|
556
|
+
}
|
|
557
|
+
} catch (error) {
|
|
558
|
+
result.status = 'failed';
|
|
559
|
+
|
|
560
|
+
if (!options.dryRun) {
|
|
561
|
+
await env.PLATFORM_DB.prepare(
|
|
562
|
+
`
|
|
563
|
+
UPDATE backfill_log
|
|
564
|
+
SET status = 'failed', completed_at = datetime('now')
|
|
565
|
+
WHERE id = ?
|
|
566
|
+
`
|
|
567
|
+
)
|
|
568
|
+
.bind(id)
|
|
569
|
+
.run();
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
throw error;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
return result;
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
/**
|
|
579
|
+
* Handle GET /usage/gaps/backfill/history - Backfill history
|
|
580
|
+
*/
|
|
581
|
+
export async function handleBackfillHistory(env: BackfillEnv, url: URL): Promise<Response> {
|
|
582
|
+
try {
|
|
583
|
+
const limit = parseInt(url.searchParams.get('limit') ?? '20', 10);
|
|
584
|
+
const offset = parseInt(url.searchParams.get('offset') ?? '0', 10);
|
|
585
|
+
|
|
586
|
+
const result = await env.PLATFORM_DB.prepare(
|
|
587
|
+
`
|
|
588
|
+
SELECT
|
|
589
|
+
id, start_date, end_date, projects,
|
|
590
|
+
hours_processed, hours_created, hours_updated, errors_count,
|
|
591
|
+
average_confidence, triggered_by, status,
|
|
592
|
+
started_at, completed_at
|
|
593
|
+
FROM backfill_log
|
|
594
|
+
ORDER BY started_at DESC
|
|
595
|
+
LIMIT ? OFFSET ?
|
|
596
|
+
`
|
|
597
|
+
)
|
|
598
|
+
.bind(limit, offset)
|
|
599
|
+
.all();
|
|
600
|
+
|
|
601
|
+
const countResult = await env.PLATFORM_DB.prepare(
|
|
602
|
+
'SELECT COUNT(*) as total FROM backfill_log'
|
|
603
|
+
).first<{ total: number }>();
|
|
604
|
+
|
|
605
|
+
return new Response(
|
|
606
|
+
JSON.stringify({
|
|
607
|
+
success: true,
|
|
608
|
+
data: result.results ?? [],
|
|
609
|
+
pagination: {
|
|
610
|
+
total: countResult?.total ?? 0,
|
|
611
|
+
limit,
|
|
612
|
+
offset,
|
|
613
|
+
},
|
|
614
|
+
}),
|
|
615
|
+
{
|
|
616
|
+
headers: { 'Content-Type': 'application/json' },
|
|
617
|
+
}
|
|
618
|
+
);
|
|
619
|
+
} catch (error) {
|
|
620
|
+
return new Response(
|
|
621
|
+
JSON.stringify({
|
|
622
|
+
success: false,
|
|
623
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
624
|
+
}),
|
|
625
|
+
{
|
|
626
|
+
status: 500,
|
|
627
|
+
headers: { 'Content-Type': 'application/json' },
|
|
628
|
+
}
|
|
629
|
+
);
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
|
|
633
|
+
/**
|
|
634
|
+
* Project health status
|
|
635
|
+
*/
|
|
636
|
+
export interface ProjectHealth {
|
|
637
|
+
project: string;
|
|
638
|
+
coveragePct: number;
|
|
639
|
+
hoursWithData: number;
|
|
640
|
+
expectedHours: number;
|
|
641
|
+
status: 'healthy' | 'warning' | 'critical';
|
|
642
|
+
lastDataHour: string | null;
|
|
643
|
+
resourceBreakdown?: Array<{
|
|
644
|
+
resourceType: string;
|
|
645
|
+
hoursWithData: number;
|
|
646
|
+
coveragePct: number;
|
|
647
|
+
}>;
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
/**
|
|
651
|
+
* Handle GET /usage/gaps/projects - Per-project health status
|
|
652
|
+
*
|
|
653
|
+
* Returns coverage percentage for ALL projects (not just those below threshold).
|
|
654
|
+
* Used by dashboard to show per-project health scores.
|
|
655
|
+
*/
|
|
656
|
+
export async function handleProjectsHealth(env: BackfillEnv): Promise<Response> {
|
|
657
|
+
try {
|
|
658
|
+
// Query resource-based coverage per project from resource_usage_snapshots.
|
|
659
|
+
// Measures: how many distinct resources have data in the last 24h vs total known resources.
|
|
660
|
+
// This gives genuinely different numbers per project (each project has different resource counts)
|
|
661
|
+
// unlike hour-based counting which is identical for all
|
|
662
|
+
// because the central collector runs for everyone simultaneously.
|
|
663
|
+
const coverageResult = await env.PLATFORM_DB.prepare(
|
|
664
|
+
`
|
|
665
|
+
WITH recent AS (
|
|
666
|
+
SELECT project, resource_type, resource_id
|
|
667
|
+
FROM resource_usage_snapshots
|
|
668
|
+
WHERE snapshot_hour >= datetime('now', '-24 hours')
|
|
669
|
+
AND project IS NOT NULL
|
|
670
|
+
AND project NOT IN ('unknown', 'all')
|
|
671
|
+
),
|
|
672
|
+
known AS (
|
|
673
|
+
SELECT project, resource_type, resource_id
|
|
674
|
+
FROM resource_usage_snapshots
|
|
675
|
+
WHERE project IS NOT NULL
|
|
676
|
+
AND project NOT IN ('unknown', 'all')
|
|
677
|
+
)
|
|
678
|
+
SELECT
|
|
679
|
+
k.project,
|
|
680
|
+
COUNT(DISTINCT k.resource_type || ':' || k.resource_id) as expected_resources,
|
|
681
|
+
COUNT(DISTINCT r.resource_type || ':' || r.resource_id) as active_resources,
|
|
682
|
+
ROUND(
|
|
683
|
+
COUNT(DISTINCT r.resource_type || ':' || r.resource_id) * 100.0 /
|
|
684
|
+
MAX(COUNT(DISTINCT k.resource_type || ':' || k.resource_id), 1),
|
|
685
|
+
1
|
|
686
|
+
) as coverage_pct,
|
|
687
|
+
MAX(r.snapshot_hour) as last_data_hour
|
|
688
|
+
FROM known k
|
|
689
|
+
LEFT JOIN recent r
|
|
690
|
+
ON k.project = r.project
|
|
691
|
+
AND k.resource_type = r.resource_type
|
|
692
|
+
AND k.resource_id = r.resource_id
|
|
693
|
+
GROUP BY k.project
|
|
694
|
+
ORDER BY coverage_pct ASC, k.project ASC
|
|
695
|
+
`
|
|
696
|
+
).all<{
|
|
697
|
+
project: string;
|
|
698
|
+
expected_resources: number;
|
|
699
|
+
active_resources: number;
|
|
700
|
+
coverage_pct: number;
|
|
701
|
+
last_data_hour: string | null;
|
|
702
|
+
}>();
|
|
703
|
+
|
|
704
|
+
const projects: ProjectHealth[] = [];
|
|
705
|
+
|
|
706
|
+
for (const row of coverageResult.results ?? []) {
|
|
707
|
+
// Determine status based on coverage percentage
|
|
708
|
+
let status: 'healthy' | 'warning' | 'critical';
|
|
709
|
+
if (row.coverage_pct >= 90) {
|
|
710
|
+
status = 'healthy';
|
|
711
|
+
} else if (row.coverage_pct >= 70) {
|
|
712
|
+
status = 'warning';
|
|
713
|
+
} else {
|
|
714
|
+
status = 'critical';
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
// Get resource-level breakdown: distinct resources per type
|
|
718
|
+
let resourceBreakdown: ProjectHealth['resourceBreakdown'];
|
|
719
|
+
try {
|
|
720
|
+
const resourceResult = await env.PLATFORM_DB.prepare(
|
|
721
|
+
`
|
|
722
|
+
WITH recent AS (
|
|
723
|
+
SELECT resource_type, resource_id
|
|
724
|
+
FROM resource_usage_snapshots
|
|
725
|
+
WHERE snapshot_hour >= datetime('now', '-24 hours')
|
|
726
|
+
AND project = ?
|
|
727
|
+
),
|
|
728
|
+
known AS (
|
|
729
|
+
SELECT resource_type, resource_id
|
|
730
|
+
FROM resource_usage_snapshots
|
|
731
|
+
WHERE project = ?
|
|
732
|
+
)
|
|
733
|
+
SELECT
|
|
734
|
+
k.resource_type,
|
|
735
|
+
COUNT(DISTINCT k.resource_id) as total_resources,
|
|
736
|
+
COUNT(DISTINCT r.resource_id) as active_resources,
|
|
737
|
+
ROUND(
|
|
738
|
+
COUNT(DISTINCT r.resource_id) * 100.0 /
|
|
739
|
+
MAX(COUNT(DISTINCT k.resource_id), 1),
|
|
740
|
+
1
|
|
741
|
+
) as coverage_pct
|
|
742
|
+
FROM known k
|
|
743
|
+
LEFT JOIN recent r
|
|
744
|
+
ON k.resource_type = r.resource_type
|
|
745
|
+
AND k.resource_id = r.resource_id
|
|
746
|
+
GROUP BY k.resource_type
|
|
747
|
+
ORDER BY coverage_pct ASC
|
|
748
|
+
`
|
|
749
|
+
)
|
|
750
|
+
.bind(row.project, row.project)
|
|
751
|
+
.all<{ resource_type: string; total_resources: number; active_resources: number; coverage_pct: number }>();
|
|
752
|
+
|
|
753
|
+
if (resourceResult.results && resourceResult.results.length > 0) {
|
|
754
|
+
resourceBreakdown = resourceResult.results.map((r) => ({
|
|
755
|
+
resourceType: r.resource_type,
|
|
756
|
+
hoursWithData: r.active_resources,
|
|
757
|
+
coveragePct: r.coverage_pct,
|
|
758
|
+
}));
|
|
759
|
+
}
|
|
760
|
+
} catch {
|
|
761
|
+
// Ignore resource breakdown errors
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
projects.push({
|
|
765
|
+
project: row.project,
|
|
766
|
+
coveragePct: row.coverage_pct,
|
|
767
|
+
hoursWithData: row.active_resources,
|
|
768
|
+
expectedHours: row.expected_resources,
|
|
769
|
+
status,
|
|
770
|
+
lastDataHour: row.last_data_hour,
|
|
771
|
+
resourceBreakdown,
|
|
772
|
+
});
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
// Also get projects from project_registry that may have 0 data
|
|
776
|
+
const registryResult = await env.PLATFORM_DB.prepare(
|
|
777
|
+
`
|
|
778
|
+
SELECT project_id, display_name, status
|
|
779
|
+
FROM project_registry
|
|
780
|
+
WHERE status = 'active'
|
|
781
|
+
AND project_id NOT IN (${projects.map(() => '?').join(',') || "''"})
|
|
782
|
+
`
|
|
783
|
+
)
|
|
784
|
+
.bind(...projects.map((p) => p.project))
|
|
785
|
+
.all<{ project_id: string; display_name: string; status: string }>();
|
|
786
|
+
|
|
787
|
+
// Add projects with 0 coverage
|
|
788
|
+
for (const row of registryResult.results ?? []) {
|
|
789
|
+
projects.push({
|
|
790
|
+
project: row.project_id,
|
|
791
|
+
coveragePct: 0,
|
|
792
|
+
hoursWithData: 0,
|
|
793
|
+
expectedHours: 24,
|
|
794
|
+
status: 'critical',
|
|
795
|
+
lastDataHour: null,
|
|
796
|
+
});
|
|
797
|
+
}
|
|
798
|
+
|
|
799
|
+
// Sort: critical first, then warning, then healthy
|
|
800
|
+
projects.sort((a, b) => {
|
|
801
|
+
const statusOrder = { critical: 0, warning: 1, healthy: 2 };
|
|
802
|
+
return statusOrder[a.status] - statusOrder[b.status] || a.project.localeCompare(b.project);
|
|
803
|
+
});
|
|
804
|
+
|
|
805
|
+
// Calculate summary stats
|
|
806
|
+
const healthyCount = projects.filter((p) => p.status === 'healthy').length;
|
|
807
|
+
const warningCount = projects.filter((p) => p.status === 'warning').length;
|
|
808
|
+
const criticalCount = projects.filter((p) => p.status === 'critical').length;
|
|
809
|
+
const avgCoverage =
|
|
810
|
+
projects.length > 0
|
|
811
|
+
? Math.round((projects.reduce((sum, p) => sum + p.coveragePct, 0) / projects.length) * 10) /
|
|
812
|
+
10
|
|
813
|
+
: 0;
|
|
814
|
+
|
|
815
|
+
return new Response(
|
|
816
|
+
JSON.stringify({
|
|
817
|
+
success: true,
|
|
818
|
+
data: {
|
|
819
|
+
projects,
|
|
820
|
+
summary: {
|
|
821
|
+
total: projects.length,
|
|
822
|
+
healthy: healthyCount,
|
|
823
|
+
warning: warningCount,
|
|
824
|
+
critical: criticalCount,
|
|
825
|
+
averageCoverage: avgCoverage,
|
|
826
|
+
},
|
|
827
|
+
},
|
|
828
|
+
}),
|
|
829
|
+
{
|
|
830
|
+
headers: { 'Content-Type': 'application/json' },
|
|
831
|
+
}
|
|
832
|
+
);
|
|
833
|
+
} catch (error) {
|
|
834
|
+
return new Response(
|
|
835
|
+
JSON.stringify({
|
|
836
|
+
success: false,
|
|
837
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
838
|
+
}),
|
|
839
|
+
{
|
|
840
|
+
status: 500,
|
|
841
|
+
headers: { 'Content-Type': 'application/json' },
|
|
842
|
+
}
|
|
843
|
+
);
|
|
844
|
+
}
|
|
845
|
+
}
|