@littlebearapps/platform-admin-sdk 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +112 -0
  2. package/dist/index.d.ts +16 -0
  3. package/dist/index.js +89 -0
  4. package/dist/prompts.d.ts +27 -0
  5. package/dist/prompts.js +80 -0
  6. package/dist/scaffold.d.ts +5 -0
  7. package/dist/scaffold.js +65 -0
  8. package/dist/templates.d.ts +16 -0
  9. package/dist/templates.js +131 -0
  10. package/package.json +46 -0
  11. package/templates/full/migrations/006_pattern_discovery.sql +199 -0
  12. package/templates/full/migrations/007_notifications_search.sql +127 -0
  13. package/templates/full/workers/lib/pattern-discovery/ai-prompt.ts +644 -0
  14. package/templates/full/workers/lib/pattern-discovery/clustering.ts +278 -0
  15. package/templates/full/workers/lib/pattern-discovery/shadow-evaluation.ts +603 -0
  16. package/templates/full/workers/lib/pattern-discovery/storage.ts +806 -0
  17. package/templates/full/workers/lib/pattern-discovery/types.ts +159 -0
  18. package/templates/full/workers/lib/pattern-discovery/validation.ts +278 -0
  19. package/templates/full/workers/pattern-discovery.ts +661 -0
  20. package/templates/full/workers/platform-alert-router.ts +1809 -0
  21. package/templates/full/workers/platform-notifications.ts +424 -0
  22. package/templates/full/workers/platform-search.ts +480 -0
  23. package/templates/full/workers/platform-settings.ts +436 -0
  24. package/templates/full/wrangler.alert-router.jsonc.hbs +34 -0
  25. package/templates/full/wrangler.notifications.jsonc.hbs +23 -0
  26. package/templates/full/wrangler.pattern-discovery.jsonc.hbs +33 -0
  27. package/templates/full/wrangler.search.jsonc.hbs +16 -0
  28. package/templates/full/wrangler.settings.jsonc.hbs +23 -0
  29. package/templates/shared/README.md.hbs +69 -0
  30. package/templates/shared/config/budgets.yaml.hbs +72 -0
  31. package/templates/shared/config/services.yaml.hbs +45 -0
  32. package/templates/shared/migrations/001_core_tables.sql +117 -0
  33. package/templates/shared/migrations/002_usage_warehouse.sql +830 -0
  34. package/templates/shared/migrations/003_feature_tracking.sql +250 -0
  35. package/templates/shared/migrations/004_settings_alerts.sql +452 -0
  36. package/templates/shared/migrations/seed.sql.hbs +4 -0
  37. package/templates/shared/package.json.hbs +21 -0
  38. package/templates/shared/scripts/sync-config.ts +242 -0
  39. package/templates/shared/tsconfig.json +12 -0
  40. package/templates/shared/workers/lib/analytics-engine.ts +357 -0
  41. package/templates/shared/workers/lib/billing.ts +293 -0
  42. package/templates/shared/workers/lib/circuit-breaker-middleware.ts +25 -0
  43. package/templates/shared/workers/lib/control.ts +292 -0
  44. package/templates/shared/workers/lib/economics.ts +368 -0
  45. package/templates/shared/workers/lib/metrics.ts +103 -0
  46. package/templates/shared/workers/lib/platform-settings.ts +407 -0
  47. package/templates/shared/workers/lib/shared/allowances.ts +333 -0
  48. package/templates/shared/workers/lib/shared/cloudflare.ts +1362 -0
  49. package/templates/shared/workers/lib/shared/types.ts +58 -0
  50. package/templates/shared/workers/lib/telemetry-sampling.ts +360 -0
  51. package/templates/shared/workers/lib/usage/collectors/example.ts +96 -0
  52. package/templates/shared/workers/lib/usage/collectors/index.ts +128 -0
  53. package/templates/shared/workers/lib/usage/handlers/audit.ts +306 -0
  54. package/templates/shared/workers/lib/usage/handlers/backfill.ts +845 -0
  55. package/templates/shared/workers/lib/usage/handlers/behavioral.ts +429 -0
  56. package/templates/shared/workers/lib/usage/handlers/data-queries.ts +507 -0
  57. package/templates/shared/workers/lib/usage/handlers/dlq-admin.ts +364 -0
  58. package/templates/shared/workers/lib/usage/handlers/health-trends.ts +222 -0
  59. package/templates/shared/workers/lib/usage/handlers/index.ts +35 -0
  60. package/templates/shared/workers/lib/usage/handlers/usage-admin.ts +421 -0
  61. package/templates/shared/workers/lib/usage/handlers/usage-features.ts +1262 -0
  62. package/templates/shared/workers/lib/usage/handlers/usage-metrics.ts +2420 -0
  63. package/templates/shared/workers/lib/usage/handlers/usage-settings.ts +610 -0
  64. package/templates/shared/workers/lib/usage/queue/budget-enforcement.ts +1032 -0
  65. package/templates/shared/workers/lib/usage/queue/cost-budget-enforcement.ts +128 -0
  66. package/templates/shared/workers/lib/usage/queue/cost-calculator.ts +77 -0
  67. package/templates/shared/workers/lib/usage/queue/dlq-handler.ts +161 -0
  68. package/templates/shared/workers/lib/usage/queue/index.ts +19 -0
  69. package/templates/shared/workers/lib/usage/queue/telemetry-processor.ts +790 -0
  70. package/templates/shared/workers/lib/usage/scheduled/anomaly-detection.ts +732 -0
  71. package/templates/shared/workers/lib/usage/scheduled/data-collection.ts +956 -0
  72. package/templates/shared/workers/lib/usage/scheduled/error-digest.ts +343 -0
  73. package/templates/shared/workers/lib/usage/scheduled/index.ts +18 -0
  74. package/templates/shared/workers/lib/usage/scheduled/rollups.ts +1561 -0
  75. package/templates/shared/workers/lib/usage/shared/constants.ts +362 -0
  76. package/templates/shared/workers/lib/usage/shared/index.ts +14 -0
  77. package/templates/shared/workers/lib/usage/shared/types.ts +1066 -0
  78. package/templates/shared/workers/lib/usage/shared/utils.ts +795 -0
  79. package/templates/shared/workers/platform-usage.ts +1915 -0
  80. package/templates/shared/wrangler.usage.jsonc.hbs +58 -0
  81. package/templates/standard/migrations/005_error_collection.sql +162 -0
  82. package/templates/standard/workers/error-collector.ts +2670 -0
  83. package/templates/standard/workers/lib/error-collector/capture.ts +213 -0
  84. package/templates/standard/workers/lib/error-collector/digest.ts +448 -0
  85. package/templates/standard/workers/lib/error-collector/email-health-alerts.ts +262 -0
  86. package/templates/standard/workers/lib/error-collector/fingerprint.ts +258 -0
  87. package/templates/standard/workers/lib/error-collector/gap-alerts.ts +293 -0
  88. package/templates/standard/workers/lib/error-collector/github.ts +329 -0
  89. package/templates/standard/workers/lib/error-collector/types.ts +262 -0
  90. package/templates/standard/workers/lib/sentinel/gap-detection.ts +734 -0
  91. package/templates/standard/workers/lib/shared/slack-alerts.ts +585 -0
  92. package/templates/standard/workers/platform-sentinel.ts +1744 -0
  93. package/templates/standard/wrangler.error-collector.jsonc.hbs +44 -0
  94. package/templates/standard/wrangler.sentinel.jsonc.hbs +45 -0
@@ -0,0 +1,1262 @@
1
+ /**
2
+ * Feature Usage Handlers
3
+ *
4
+ * Handlers for feature-level usage endpoints including circuit breakers,
5
+ * budgets, history, and Workers AI metrics.
6
+ *
7
+ * Extracted from platform-usage.ts as part of Phase B migration.
8
+ */
9
+
10
+ import { createLoggerFromEnv } from '@littlebearapps/platform-consumer-sdk';
11
+ import { CloudflareGraphQL } from '../../shared/cloudflare';
12
+ import {
13
+ queryUsageByTimeBucket,
14
+ type TimeBucketedUsage,
15
+ type TimeBucketQueryParams,
16
+ } from '../../analytics-engine';
17
+ import type { Env, FeatureUsageData, WorkersAIResponse, WorkersAISummary } from '../shared';
18
+ import { FEATURE_KV_KEYS, FEATURE_METRIC_FIELDS, jsonResponse, parseQueryParams } from '../shared';
19
+ import { queryAIGatewayMetrics } from './data-queries';
20
+
21
+ // =============================================================================
22
+ // HELPER: D1 FALLBACK FOR ANALYTICS ENGINE
23
+ // =============================================================================
24
+
25
+ /**
26
+ * Query D1 daily_usage_rollups as fallback when Analytics Engine has insufficient data.
27
+ * Transforms D1 columns to match TimeBucketedUsage interface.
28
+ *
29
+ * Analytics Engine has 7-day retention (free tier), so historical queries
30
+ * beyond that window need to fall back to D1 which stores 90 days of daily rollups.
31
+ *
32
+ * @param db D1 database binding
33
+ * @param params Query parameters
34
+ * @returns Time-bucketed usage data from D1
35
+ */
36
+ async function queryUsageFromD1(
37
+ db: D1Database,
38
+ params: TimeBucketQueryParams
39
+ ): Promise<TimeBucketedUsage[]> {
40
+ // Calculate date range based on period
41
+ const now = new Date();
42
+ const daysBack = params.period === '24h' ? 1 : params.period === '7d' ? 7 : 30;
43
+ const startDate = new Date(now);
44
+ startDate.setUTCDate(startDate.getUTCDate() - daysBack);
45
+ const startDateStr = startDate.toISOString().split('T')[0];
46
+ const endDateStr = now.toISOString().split('T')[0];
47
+
48
+ // Build project filter
49
+ const projectFilter = params.project ? `AND project = ?` : `AND project != 'all'`;
50
+ const bindParams = params.project
51
+ ? [startDateStr, endDateStr, params.project]
52
+ : [startDateStr, endDateStr];
53
+
54
+ // Query D1 - groupBy 'hour' not supported in D1 (only daily data), fall back to day
55
+ const query = `
56
+ SELECT
57
+ snapshot_date || 'T00:00:00Z' as time_bucket,
58
+ project as project_id,
59
+ 0 as d1_writes,
60
+ 0 as d1_reads,
61
+ COALESCE(d1_rows_read, 0) as d1_rows_read,
62
+ COALESCE(d1_rows_written, 0) as d1_rows_written,
63
+ COALESCE(kv_reads, 0) as kv_reads,
64
+ COALESCE(kv_writes, 0) as kv_writes,
65
+ COALESCE(kv_deletes, 0) as kv_deletes,
66
+ COALESCE(kv_list_ops, 0) as kv_lists,
67
+ COALESCE(do_requests, 0) as do_requests,
68
+ COALESCE(do_gb_seconds, 0) as do_gb_seconds,
69
+ COALESCE(r2_class_a_ops, 0) as r2_class_a,
70
+ COALESCE(r2_class_b_ops, 0) as r2_class_b,
71
+ COALESCE(workersai_neurons, 0) as ai_neurons,
72
+ COALESCE(workersai_requests, 0) as ai_requests,
73
+ COALESCE(queues_messages_produced, 0) as queue_messages,
74
+ COALESCE(workers_requests, 0) as requests,
75
+ COALESCE(workers_cpu_time_ms, 0) as cpu_ms,
76
+ COALESCE(vectorize_queries, 0) as vectorize_queries,
77
+ 0 as vectorize_inserts,
78
+ COALESCE(workflows_executions, 0) as workflow_invocations,
79
+ COALESCE(samples_count, 1) as interaction_count
80
+ FROM daily_usage_rollups
81
+ WHERE snapshot_date >= ?
82
+ AND snapshot_date <= ?
83
+ ${projectFilter}
84
+ ORDER BY snapshot_date ASC, project ASC
85
+ `;
86
+
87
+ const result = await db
88
+ .prepare(query)
89
+ .bind(...bindParams)
90
+ .all<TimeBucketedUsage>();
91
+ return result.results ?? [];
92
+ }
93
+
94
+ // =============================================================================
95
+ // HANDLERS
96
+ // =============================================================================
97
+
98
+ /**
99
+ * Handle GET /usage/features
100
+ *
101
+ * Returns feature-level usage from Analytics Engine with circuit breaker status.
102
+ * Query params:
103
+ * - period: 'hour' | 'day' (default: 'hour')
104
+ * - feature: optional specific feature key to filter
105
+ */
106
+ export async function handleFeatures(url: URL, env: Env): Promise<Response> {
107
+ const reqStartTime = Date.now();
108
+ const period = url.searchParams.get('period') ?? 'hour';
109
+ const featureFilter = url.searchParams.get('feature');
110
+
111
+ try {
112
+ // Calculate time range for query
113
+ const now = new Date();
114
+ let queryStartTime: Date;
115
+ if (period === 'day') {
116
+ queryStartTime = new Date(now);
117
+ queryStartTime.setUTCHours(0, 0, 0, 0);
118
+ } else {
119
+ queryStartTime = new Date(now);
120
+ queryStartTime.setMinutes(0, 0, 0);
121
+ }
122
+
123
+ // =========================================================================
124
+ // Step 1: Fetch ALL registered features from D1 feature_registry
125
+ // =========================================================================
126
+ // This ensures features appear even when idle (no AE activity)
127
+ let registeredFeatures: Array<{
128
+ feature_key: string;
129
+ project_id: string;
130
+ category: string;
131
+ feature: string;
132
+ display_name: string;
133
+ circuit_breaker_enabled: number;
134
+ daily_limits_json: string | null;
135
+ }> = [];
136
+
137
+ // Also fetch heartbeat data from system_health_checks
138
+ const heartbeatMap = new Map<string, { lastHeartbeat: string; status: string }>();
139
+ try {
140
+ const healthResult = await env.PLATFORM_DB.prepare(
141
+ `SELECT project_id, feature_id, last_heartbeat, status FROM system_health_checks`
142
+ ).all<{ project_id: string; feature_id: string; last_heartbeat: number; status: string }>();
143
+
144
+ for (const row of healthResult.results ?? []) {
145
+ // feature_id is stored as the full feature_key
146
+ heartbeatMap.set(row.feature_id, {
147
+ lastHeartbeat: new Date(row.last_heartbeat * 1000).toISOString(),
148
+ status: row.status,
149
+ });
150
+ }
151
+ } catch (err) {
152
+ // system_health_checks may not have data yet - continue without heartbeat info
153
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
154
+ log.debug('Could not query system_health_checks', {
155
+ error: err instanceof Error ? err.message : String(err),
156
+ });
157
+ }
158
+
159
+ try {
160
+ let registryQuery = `
161
+ SELECT feature_key, project_id, category, feature, display_name,
162
+ circuit_breaker_enabled, daily_limits_json
163
+ FROM feature_registry
164
+ `;
165
+ if (featureFilter) {
166
+ registryQuery += ` WHERE feature_key = ?`;
167
+ const stmt = env.PLATFORM_DB.prepare(registryQuery).bind(featureFilter);
168
+ const registryResult = await stmt.all();
169
+ registeredFeatures = (registryResult.results ?? []) as typeof registeredFeatures;
170
+ } else {
171
+ const registryResult = await env.PLATFORM_DB.prepare(registryQuery).all();
172
+ registeredFeatures = (registryResult.results ?? []) as typeof registeredFeatures;
173
+ }
174
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
175
+ log.info('Loaded features from registry', { count: registeredFeatures.length });
176
+ } catch (err) {
177
+ // feature_registry may not exist in some environments - proceed with AE only
178
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
179
+ log.warn('Could not query feature_registry', err instanceof Error ? err : undefined);
180
+ }
181
+
182
+ // =========================================================================
183
+ // Step 2: Query Analytics Engine for features with activity
184
+ // =========================================================================
185
+ const sumClauses = FEATURE_METRIC_FIELDS.map(
186
+ (field, i) => `SUM(double${i + 1}) as ${field}`
187
+ ).join(', ');
188
+
189
+ // Analytics Engine toDateTime() requires 'YYYY-MM-DD HH:MM:SS' format
190
+ // (no 'T' separator, no milliseconds, no timezone suffix)
191
+ const formattedStartTime = queryStartTime
192
+ .toISOString()
193
+ .replace('T', ' ')
194
+ .replace(/\.\d{3}Z$/, '');
195
+ let whereClause = `timestamp >= toDateTime('${formattedStartTime}')`;
196
+ if (featureFilter) {
197
+ whereClause += ` AND index1 = '${featureFilter.replace(/'/g, "''")}'`;
198
+ }
199
+
200
+ const query = `
201
+ SELECT
202
+ index1 as feature_key,
203
+ blob1 as project,
204
+ blob2 as category,
205
+ blob3 as feature,
206
+ ${sumClauses}
207
+ FROM "platform-analytics"
208
+ WHERE ${whereClause}
209
+ GROUP BY index1, blob1, blob2, blob3
210
+ ORDER BY feature_key
211
+ FORMAT JSON
212
+ `;
213
+
214
+ // Build a map of AE data for quick lookup
215
+ const aeDataMap = new Map<
216
+ string,
217
+ {
218
+ project: string;
219
+ category: string;
220
+ feature: string;
221
+ metrics: Record<string, number>;
222
+ }
223
+ >();
224
+
225
+ const response = await fetch(
226
+ `https://api.cloudflare.com/client/v4/accounts/${env.CLOUDFLARE_ACCOUNT_ID}/analytics_engine/sql`,
227
+ {
228
+ method: 'POST',
229
+ headers: {
230
+ Authorization: `Bearer ${env.CLOUDFLARE_API_TOKEN}`,
231
+ 'Content-Type': 'text/plain',
232
+ },
233
+ body: query,
234
+ }
235
+ );
236
+
237
+ if (response.ok) {
238
+ const result = (await response.json()) as {
239
+ data: Array<{
240
+ feature_key: string;
241
+ project: string;
242
+ category: string;
243
+ feature: string;
244
+ [key: string]: string | number;
245
+ }>;
246
+ };
247
+
248
+ for (const row of result.data ?? []) {
249
+ const metrics: Record<string, number> = {};
250
+ for (const field of FEATURE_METRIC_FIELDS) {
251
+ metrics[field] = typeof row[field] === 'number' ? row[field] : 0;
252
+ }
253
+ aeDataMap.set(row.feature_key, {
254
+ project: row.project,
255
+ category: row.category,
256
+ feature: row.feature,
257
+ metrics,
258
+ });
259
+ }
260
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
261
+ log.info('Loaded features from Analytics Engine', { count: aeDataMap.size });
262
+ } else {
263
+ const errorText = await response.text();
264
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
265
+ // Handle empty dataset - not an error, just no activity yet
266
+ if (!errorText.includes('unable to find type of column')) {
267
+ log.error('Analytics Engine query failed', undefined, {
268
+ status: response.status,
269
+ errorText,
270
+ });
271
+ } else {
272
+ log.info('No data in platform-analytics dataset yet');
273
+ }
274
+ }
275
+
276
+ // =========================================================================
277
+ // Step 3: Merge registered features with AE data
278
+ // =========================================================================
279
+ // Build zero metrics object for features without activity
280
+ const zeroMetrics: Record<string, number> = {};
281
+ for (const field of FEATURE_METRIC_FIELDS) {
282
+ zeroMetrics[field] = 0;
283
+ }
284
+
285
+ // Create a Set of feature keys we've already processed
286
+ const processedKeys = new Set<string>();
287
+ const features: FeatureUsageData[] = [];
288
+
289
+ // Process registered features first (ensures all appear)
290
+ for (const reg of registeredFeatures) {
291
+ const featureKey = reg.feature_key;
292
+ processedKeys.add(featureKey);
293
+
294
+ const aeData = aeDataMap.get(featureKey);
295
+
296
+ // Get circuit breaker state from KV
297
+ const [enabledStr, disabledReason, disabledAt, autoResetAt] = await Promise.all([
298
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.enabled(featureKey)),
299
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.disabledReason(featureKey)),
300
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.disabledAt(featureKey)),
301
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.autoResetAt(featureKey)),
302
+ ]);
303
+
304
+ // Get heartbeat info for this feature
305
+ const heartbeat = heartbeatMap.get(featureKey);
306
+
307
+ features.push({
308
+ featureKey,
309
+ project: aeData?.project ?? reg.project_id,
310
+ category: aeData?.category ?? reg.category,
311
+ feature: aeData?.feature ?? reg.feature,
312
+ metrics: aeData?.metrics ?? { ...zeroMetrics },
313
+ circuitBreaker: {
314
+ enabled: enabledStr !== 'false',
315
+ disabledReason: disabledReason ?? undefined,
316
+ disabledAt: disabledAt ?? undefined,
317
+ autoResetAt: autoResetAt ?? undefined,
318
+ },
319
+ // Include budget info from registry
320
+ budget: reg.daily_limits_json ? JSON.parse(reg.daily_limits_json) : undefined,
321
+ circuitBreakerEnabled: reg.circuit_breaker_enabled === 1,
322
+ hasActivity: !!aeData,
323
+ // Heartbeat info from system_health_checks
324
+ lastHeartbeat: heartbeat?.lastHeartbeat,
325
+ healthStatus: heartbeat?.status,
326
+ });
327
+ }
328
+
329
+ // Add any AE features not in registry (shouldn't happen, but be safe)
330
+ for (const [featureKey, aeData] of aeDataMap) {
331
+ if (processedKeys.has(featureKey)) continue;
332
+
333
+ const [enabledStr, disabledReason, disabledAt, autoResetAt] = await Promise.all([
334
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.enabled(featureKey)),
335
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.disabledReason(featureKey)),
336
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.disabledAt(featureKey)),
337
+ env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.autoResetAt(featureKey)),
338
+ ]);
339
+
340
+ // Get heartbeat info for this feature
341
+ const heartbeat = heartbeatMap.get(featureKey);
342
+
343
+ features.push({
344
+ featureKey,
345
+ project: aeData.project,
346
+ category: aeData.category,
347
+ feature: aeData.feature,
348
+ metrics: aeData.metrics,
349
+ circuitBreaker: {
350
+ enabled: enabledStr !== 'false',
351
+ disabledReason: disabledReason ?? undefined,
352
+ disabledAt: disabledAt ?? undefined,
353
+ autoResetAt: autoResetAt ?? undefined,
354
+ },
355
+ hasActivity: true,
356
+ // Heartbeat info from system_health_checks
357
+ lastHeartbeat: heartbeat?.lastHeartbeat,
358
+ healthStatus: heartbeat?.status,
359
+ });
360
+ }
361
+
362
+ // Sort by feature key for consistent ordering
363
+ features.sort((a, b) => a.featureKey.localeCompare(b.featureKey));
364
+
365
+ return jsonResponse({
366
+ success: true,
367
+ period,
368
+ queryStartTime: queryStartTime.toISOString(),
369
+ features,
370
+ registeredCount: registeredFeatures.length,
371
+ activeCount: aeDataMap.size,
372
+ timestamp: new Date().toISOString(),
373
+ responseTimeMs: Date.now() - reqStartTime,
374
+ });
375
+ } catch (error) {
376
+ const errorMessage = error instanceof Error ? error.message : String(error);
377
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
378
+ log.error('Error fetching feature usage', error instanceof Error ? error : undefined, {
379
+ errorMessage,
380
+ });
381
+
382
+ return jsonResponse(
383
+ { success: false, error: 'Failed to fetch feature usage', message: errorMessage },
384
+ 500
385
+ );
386
+ }
387
+ }
388
+
389
+ /**
390
+ * Handle GET /usage/query
391
+ *
392
+ * Returns time-bucketed aggregated usage data from Analytics Engine,
393
+ * with D1 fallback for historical data beyond AE retention (7 days).
394
+ * Supports period (24h/7d/30d), groupBy (hour/day), and optional project filter.
395
+ *
396
+ * Query params:
397
+ * - period: '24h' | '7d' | '30d' (default: '24h')
398
+ * - groupBy: 'hour' | 'day' (default: 'hour')
399
+ * - project: optional project ID filter
400
+ *
401
+ * Response format:
402
+ * {
403
+ * success: true,
404
+ * data: [{ time_bucket: "2026-01-20T14:00:00Z", project_id: "my-app", ... }],
405
+ * meta: { period: "24h", groupBy: "hour", rowCount: 24, queryTimeMs: 45, source: "ae" | "d1" },
406
+ * timestamp: "2026-01-20T15:30:00Z"
407
+ * }
408
+ */
409
+ export async function handleUsageQuery(url: URL, env: Env): Promise<Response> {
410
+ const startTime = Date.now();
411
+
412
+ // Parse and validate query params
413
+ const periodParam = url.searchParams.get('period') ?? '24h';
414
+ const groupByParam = url.searchParams.get('groupBy') ?? 'hour';
415
+ const projectParam = url.searchParams.get('project') ?? undefined;
416
+
417
+ // Validate period
418
+ const validPeriods = ['24h', '7d', '30d'] as const;
419
+ if (!validPeriods.includes(periodParam as (typeof validPeriods)[number])) {
420
+ return jsonResponse(
421
+ {
422
+ success: false,
423
+ error: 'Invalid period parameter',
424
+ code: 'INVALID_PERIOD',
425
+ message: `period must be one of: ${validPeriods.join(', ')}`,
426
+ },
427
+ 400
428
+ );
429
+ }
430
+
431
+ // Validate groupBy
432
+ const validGroupBy = ['hour', 'day'] as const;
433
+ if (!validGroupBy.includes(groupByParam as (typeof validGroupBy)[number])) {
434
+ return jsonResponse(
435
+ {
436
+ success: false,
437
+ error: 'Invalid groupBy parameter',
438
+ code: 'INVALID_GROUP_BY',
439
+ message: `groupBy must be one of: ${validGroupBy.join(', ')}`,
440
+ },
441
+ 400
442
+ );
443
+ }
444
+
445
+ const period = periodParam as TimeBucketQueryParams['period'];
446
+ const groupBy = groupByParam as TimeBucketQueryParams['groupBy'];
447
+
448
+ // Determine expected data points based on period and groupBy
449
+ // Analytics Engine has ~7 day retention (free tier)
450
+ const expectedDays = period === '24h' ? 1 : period === '7d' ? 7 : 30;
451
+ const aeRetentionDays = 7; // Free tier Analytics Engine retention
452
+
453
+ let data: TimeBucketedUsage[] = [];
454
+ let source: 'ae' | 'd1' | 'ae+d1' = 'ae';
455
+
456
+ try {
457
+ // Try Analytics Engine first (real-time SDK telemetry)
458
+ data = await queryUsageByTimeBucket(
459
+ env.CLOUDFLARE_ACCOUNT_ID,
460
+ env.CLOUDFLARE_API_TOKEN,
461
+ { period, groupBy, project: projectParam },
462
+ 'platform-analytics'
463
+ );
464
+
465
+ // Check if AE returned sufficient data for the requested period
466
+ // For periods beyond AE retention, fall back to D1
467
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:query');
468
+ if (expectedDays > aeRetentionDays && data.length === 0) {
469
+ // AE has no data, use D1 exclusively
470
+ log.info('AE returned 0 rows, falling back to D1 daily_usage_rollups', { period });
471
+ data = await queryUsageFromD1(env.PLATFORM_DB, { period, groupBy, project: projectParam });
472
+ source = 'd1';
473
+ } else if (expectedDays > aeRetentionDays) {
474
+ // AE has partial data, supplement with D1 for older dates
475
+ // Get unique dates from AE data
476
+ const aeDates = new Set(data.map((r) => r.time_bucket.split('T')[0]));
477
+
478
+ // Query D1 for the full period
479
+ const d1Data = await queryUsageFromD1(env.PLATFORM_DB, {
480
+ period,
481
+ groupBy: 'day', // D1 only has daily granularity
482
+ project: projectParam,
483
+ });
484
+
485
+ // Filter D1 data to only include dates not in AE
486
+ const d1OnlyData = d1Data.filter((r) => {
487
+ const date = r.time_bucket.split('T')[0];
488
+ return !aeDates.has(date);
489
+ });
490
+
491
+ if (d1OnlyData.length > 0) {
492
+ log.info('Supplementing AE rows with D1 rows', {
493
+ aeRows: data.length,
494
+ d1Rows: d1OnlyData.length,
495
+ });
496
+ // Combine D1 historical + AE recent, sorted by time
497
+ data = [...d1OnlyData, ...data].sort((a, b) => a.time_bucket.localeCompare(b.time_bucket));
498
+ source = 'ae+d1';
499
+ }
500
+ }
501
+
502
+ const queryTimeMs = Date.now() - startTime;
503
+
504
+ // Return response with 5-minute cache header (Analytics Engine eventual consistency)
505
+ return new Response(
506
+ JSON.stringify({
507
+ success: true,
508
+ data,
509
+ meta: {
510
+ period,
511
+ groupBy,
512
+ project: projectParam ?? 'all',
513
+ rowCount: data.length,
514
+ queryTimeMs,
515
+ source,
516
+ },
517
+ timestamp: new Date().toISOString(),
518
+ }),
519
+ {
520
+ status: 200,
521
+ headers: {
522
+ 'Content-Type': 'application/json',
523
+ 'Cache-Control': 'public, max-age=300',
524
+ },
525
+ }
526
+ );
527
+ } catch (error) {
528
+ const errorMessage = error instanceof Error ? error.message : String(error);
529
+ const logQuery = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:query');
530
+ logQuery.error('Usage query error', error instanceof Error ? error : undefined, {
531
+ errorMessage,
532
+ });
533
+
534
+ // Handle empty dataset case gracefully - try D1 fallback
535
+ if (errorMessage.includes('unable to find type of column')) {
536
+ logQuery.info('AE empty dataset error, trying D1 fallback');
537
+ try {
538
+ data = await queryUsageFromD1(env.PLATFORM_DB, { period, groupBy, project: projectParam });
539
+ source = 'd1';
540
+
541
+ return jsonResponse({
542
+ success: true,
543
+ data,
544
+ meta: {
545
+ period,
546
+ groupBy,
547
+ project: projectParam ?? 'all',
548
+ rowCount: data.length,
549
+ queryTimeMs: Date.now() - startTime,
550
+ source,
551
+ },
552
+ note:
553
+ data.length === 0
554
+ ? 'No usage data found. Data will appear after features report usage.'
555
+ : 'Data from D1 daily_usage_rollups (Analytics Engine empty)',
556
+ timestamp: new Date().toISOString(),
557
+ });
558
+ } catch (d1Error) {
559
+ logQuery.error('D1 fallback also failed', d1Error instanceof Error ? d1Error : undefined);
560
+ return jsonResponse({
561
+ success: true,
562
+ data: [],
563
+ meta: {
564
+ period,
565
+ groupBy,
566
+ project: projectParam ?? 'all',
567
+ rowCount: 0,
568
+ queryTimeMs: Date.now() - startTime,
569
+ source: 'none',
570
+ },
571
+ note: 'No telemetry data collected yet. Data will appear after features report usage.',
572
+ timestamp: new Date().toISOString(),
573
+ });
574
+ }
575
+ }
576
+
577
+ // For other errors, try D1 as a fallback before returning error
578
+ try {
579
+ logQuery.info('AE error, trying D1 fallback', { errorMessage });
580
+ data = await queryUsageFromD1(env.PLATFORM_DB, { period, groupBy, project: projectParam });
581
+ source = 'd1';
582
+
583
+ return jsonResponse({
584
+ success: true,
585
+ data,
586
+ meta: {
587
+ period,
588
+ groupBy,
589
+ project: projectParam ?? 'all',
590
+ rowCount: data.length,
591
+ queryTimeMs: Date.now() - startTime,
592
+ source,
593
+ },
594
+ note: 'Data from D1 daily_usage_rollups (Analytics Engine unavailable)',
595
+ timestamp: new Date().toISOString(),
596
+ });
597
+ } catch {
598
+ // Both AE and D1 failed
599
+ return jsonResponse(
600
+ {
601
+ success: false,
602
+ error: 'Failed to query usage data',
603
+ code: 'QUERY_ERROR',
604
+ message: errorMessage,
605
+ },
606
+ 500
607
+ );
608
+ }
609
+ }
610
+ }
611
+
612
+ /**
613
+ * Handle GET /usage/features/circuit-breakers
614
+ *
615
+ * Returns all feature-level circuit breaker states from KV.
616
+ */
617
+ export async function handleGetFeatureCircuitBreakers(env: Env): Promise<Response> {
618
+ const startTime = Date.now();
619
+
620
+ try {
621
+ // List all FEATURE:* keys from KV
622
+ const { keys } = await env.PLATFORM_CACHE.list({ prefix: 'FEATURE:' });
623
+
624
+ // Group by feature key
625
+ const features: Record<
626
+ string,
627
+ { enabled?: boolean; disabledReason?: string; disabledAt?: string; autoResetAt?: string }
628
+ > = {};
629
+
630
+ for (const key of keys) {
631
+ // Parse key: FEATURE:{project}:{category}:{feature}:{field}
632
+ const match = key.name.match(/^FEATURE:([^:]+:[^:]+:[^:]+):(.+)$/);
633
+ if (match) {
634
+ const [, featureKey, field] = match;
635
+ if (!features[featureKey]) features[featureKey] = {};
636
+
637
+ const value = await env.PLATFORM_CACHE.get(key.name);
638
+ if (field === 'enabled') features[featureKey].enabled = value !== 'false';
639
+ if (field === 'disabled_reason') features[featureKey].disabledReason = value ?? undefined;
640
+ if (field === 'disabled_at') features[featureKey].disabledAt = value ?? undefined;
641
+ if (field === 'auto_reset_at') features[featureKey].autoResetAt = value ?? undefined;
642
+ }
643
+ }
644
+
645
+ return jsonResponse({
646
+ success: true,
647
+ circuitBreakers: features,
648
+ timestamp: new Date().toISOString(),
649
+ responseTimeMs: Date.now() - startTime,
650
+ });
651
+ } catch (error) {
652
+ const errorMessage = error instanceof Error ? error.message : String(error);
653
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
654
+ log.error('Error fetching circuit breakers', error instanceof Error ? error : undefined, {
655
+ tag: 'CB_FETCH_ERROR',
656
+ errorMessage,
657
+ });
658
+
659
+ return jsonResponse(
660
+ { success: false, error: 'Failed to fetch circuit breakers', message: errorMessage },
661
+ 500
662
+ );
663
+ }
664
+ }
665
+
666
+ /**
667
+ * Handle PUT /usage/features/circuit-breakers
668
+ *
669
+ * Toggle a feature circuit breaker.
670
+ * Body: { featureKey: string, enabled: boolean }
671
+ */
672
+ export async function handlePutFeatureCircuitBreakers(
673
+ request: Request,
674
+ env: Env
675
+ ): Promise<Response> {
676
+ try {
677
+ const body = (await request.json()) as { featureKey?: string; enabled?: boolean };
678
+
679
+ if (!body.featureKey) {
680
+ return jsonResponse({ success: false, error: 'Missing featureKey' }, 400);
681
+ }
682
+ if (typeof body.enabled !== 'boolean') {
683
+ return jsonResponse({ success: false, error: 'Missing or invalid enabled (boolean)' }, 400);
684
+ }
685
+
686
+ const featureKey = body.featureKey;
687
+
688
+ if (body.enabled) {
689
+ // Re-enable: delete all disable-related keys
690
+ await Promise.all([
691
+ env.PLATFORM_CACHE.delete(FEATURE_KV_KEYS.enabled(featureKey)),
692
+ env.PLATFORM_CACHE.delete(FEATURE_KV_KEYS.disabledReason(featureKey)),
693
+ env.PLATFORM_CACHE.delete(FEATURE_KV_KEYS.disabledAt(featureKey)),
694
+ env.PLATFORM_CACHE.delete(FEATURE_KV_KEYS.autoResetAt(featureKey)),
695
+ ]);
696
+
697
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
698
+ log.info('Manually enabled feature', { tag: 'FEATURE_ENABLED', featureKey });
699
+ } else {
700
+ // Disable: set enabled=false with manual reason
701
+ const now = new Date().toISOString();
702
+ await Promise.all([
703
+ env.PLATFORM_CACHE.put(FEATURE_KV_KEYS.enabled(featureKey), 'false'),
704
+ env.PLATFORM_CACHE.put(
705
+ FEATURE_KV_KEYS.disabledReason(featureKey),
706
+ 'Manually disabled via dashboard'
707
+ ),
708
+ env.PLATFORM_CACHE.put(FEATURE_KV_KEYS.disabledAt(featureKey), now),
709
+ // No auto-reset for manual disables
710
+ env.PLATFORM_CACHE.delete(FEATURE_KV_KEYS.autoResetAt(featureKey)),
711
+ ]);
712
+
713
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
714
+ log.info('Manually disabled feature', { tag: 'FEATURE_DISABLED', featureKey });
715
+ }
716
+
717
+ return jsonResponse({
718
+ success: true,
719
+ featureKey,
720
+ enabled: body.enabled,
721
+ timestamp: new Date().toISOString(),
722
+ });
723
+ } catch (error) {
724
+ const errorMessage = error instanceof Error ? error.message : String(error);
725
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
726
+ log.error('Error toggling circuit breaker', error instanceof Error ? error : undefined, {
727
+ tag: 'CB_TOGGLE_ERROR',
728
+ errorMessage,
729
+ });
730
+
731
+ return jsonResponse(
732
+ { success: false, error: 'Failed to toggle circuit breaker', message: errorMessage },
733
+ 500
734
+ );
735
+ }
736
+ }
737
+
738
+ /**
739
+ * Handle GET /usage/features/circuit-breaker-events
740
+ *
741
+ * Returns recent circuit breaker events from D1.
742
+ * Query params:
743
+ * - limit: max events to return (default: 50, max: 200)
744
+ * - featureKey: filter by feature key (optional)
745
+ * - eventType: filter by event type: 'trip', 'reset', 'manual_disable', 'manual_enable' (optional)
746
+ */
747
+ export async function handleGetCircuitBreakerEvents(url: URL, env: Env): Promise<Response> {
748
+ const startTime = Date.now();
749
+
750
+ try {
751
+ const limitParam = url.searchParams.get('limit');
752
+ const limit = Math.min(Math.max(parseInt(limitParam ?? '50', 10) || 50, 1), 200);
753
+ const featureKey = url.searchParams.get('featureKey');
754
+ const eventType = url.searchParams.get('eventType');
755
+
756
+ // Build dynamic query based on filters
757
+ let query = `
758
+ SELECT
759
+ id,
760
+ feature_key,
761
+ event_type,
762
+ reason,
763
+ violated_resource,
764
+ current_value,
765
+ budget_limit,
766
+ auto_reset,
767
+ alert_sent,
768
+ datetime(created_at, 'unixepoch') as created_at_iso
769
+ FROM feature_circuit_breaker_events
770
+ `;
771
+ const params: (string | number)[] = [];
772
+
773
+ const conditions: string[] = [];
774
+ if (featureKey) {
775
+ conditions.push('feature_key = ?');
776
+ params.push(featureKey);
777
+ }
778
+ if (eventType) {
779
+ conditions.push('event_type = ?');
780
+ params.push(eventType);
781
+ }
782
+
783
+ if (conditions.length > 0) {
784
+ query += ` WHERE ${conditions.join(' AND ')}`;
785
+ }
786
+
787
+ query += ` ORDER BY created_at DESC LIMIT ?`;
788
+ params.push(limit);
789
+
790
+ const { results } = await env.PLATFORM_DB.prepare(query)
791
+ .bind(...params)
792
+ .all<{
793
+ id: string;
794
+ feature_key: string;
795
+ event_type: string;
796
+ reason: string | null;
797
+ violated_resource: string | null;
798
+ current_value: number | null;
799
+ budget_limit: number | null;
800
+ auto_reset: number;
801
+ alert_sent: number;
802
+ created_at_iso: string;
803
+ }>();
804
+
805
+ // Transform to camelCase
806
+ const events = (results ?? []).map((row) => ({
807
+ id: row.id,
808
+ featureKey: row.feature_key,
809
+ eventType: row.event_type,
810
+ reason: row.reason,
811
+ violatedResource: row.violated_resource,
812
+ currentValue: row.current_value,
813
+ budgetLimit: row.budget_limit,
814
+ autoReset: row.auto_reset === 1,
815
+ alertSent: row.alert_sent === 1,
816
+ createdAt: row.created_at_iso,
817
+ }));
818
+
819
+ const duration = Date.now() - startTime;
820
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
821
+ log.info('Fetched circuit breaker events', {
822
+ tag: 'CB_EVENTS_FETCHED',
823
+ eventCount: events.length,
824
+ durationMs: duration,
825
+ });
826
+
827
+ return jsonResponse({
828
+ success: true,
829
+ events,
830
+ count: events.length,
831
+ limit,
832
+ filters: { featureKey, eventType },
833
+ timestamp: new Date().toISOString(),
834
+ });
835
+ } catch (error) {
836
+ const errorMessage = error instanceof Error ? error.message : String(error);
837
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
838
+ log.error('Error fetching circuit breaker events', error instanceof Error ? error : undefined, {
839
+ tag: 'CB_EVENTS_ERROR',
840
+ errorMessage,
841
+ });
842
+
843
+ return jsonResponse(
844
+ { success: false, error: 'Failed to fetch circuit breaker events', message: errorMessage },
845
+ 500
846
+ );
847
+ }
848
+ }
849
+
850
+ /**
851
+ * Handle GET /usage/features/budgets
852
+ *
853
+ * Returns the feature budgets configuration.
854
+ * Primary: KV FEATURE_KV_KEYS.BUDGETS
855
+ * Fallback: Build from feature_registry.daily_limits_json
856
+ */
857
+ export async function handleGetFeatureBudgets(env: Env): Promise<Response> {
858
+ const startTime = Date.now();
859
+
860
+ try {
861
+ const budgetsJson = await env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.BUDGETS);
862
+
863
+ if (budgetsJson) {
864
+ const budgets = JSON.parse(budgetsJson);
865
+ return jsonResponse({
866
+ success: true,
867
+ budgets,
868
+ source: 'kv',
869
+ timestamp: new Date().toISOString(),
870
+ responseTimeMs: Date.now() - startTime,
871
+ });
872
+ }
873
+
874
+ // =========================================================================
875
+ // Fallback: Build budgets from feature_registry if KV is empty
876
+ // =========================================================================
877
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
878
+ log.info('KV budgets empty, building from feature_registry', { tag: 'BUDGETS_FALLBACK' });
879
+
880
+ // Default budget limits (match budgets.yaml defaults)
881
+ const defaults: Record<string, { hourly: number; daily?: number }> = {
882
+ d1Writes: { hourly: 10000, daily: 100000 },
883
+ d1Reads: { hourly: 100000, daily: 1000000 },
884
+ kvReads: { hourly: 50000, daily: 500000 },
885
+ kvWrites: { hourly: 5000, daily: 50000 },
886
+ requests: { hourly: 50000, daily: 500000 },
887
+ };
888
+
889
+ const features: Record<string, Record<string, { hourly?: number; daily?: number }>> = {};
890
+
891
+ try {
892
+ const registryResult = await env.PLATFORM_DB.prepare(
893
+ `
894
+ SELECT feature_key, daily_limits_json
895
+ FROM feature_registry
896
+ WHERE daily_limits_json IS NOT NULL
897
+ `
898
+ ).all();
899
+
900
+ for (const row of registryResult.results ?? []) {
901
+ const featureKey = row.feature_key as string;
902
+ const limitsJson = row.daily_limits_json as string;
903
+ if (limitsJson) {
904
+ try {
905
+ const limits = JSON.parse(limitsJson);
906
+ // Transform daily_limits_json format to budgets format
907
+ // Registry format: { d1_writes: 5000, kv_reads: 10000, ... }
908
+ // Budgets format: { d1Writes: { hourly: 5000 }, ... }
909
+ const featureBudget: Record<string, { hourly?: number; daily?: number }> = {};
910
+ for (const [key, value] of Object.entries(limits)) {
911
+ // Convert snake_case to camelCase
912
+ const camelKey = key.replace(/_([a-z])/g, (_, c) => c.toUpperCase());
913
+ if (typeof value === 'number') {
914
+ featureBudget[camelKey] = { hourly: value };
915
+ } else if (typeof value === 'object' && value !== null) {
916
+ featureBudget[camelKey] = value as { hourly?: number; daily?: number };
917
+ }
918
+ }
919
+ if (Object.keys(featureBudget).length > 0) {
920
+ features[featureKey] = featureBudget;
921
+ }
922
+ } catch {
923
+ log.warn('Invalid JSON in daily_limits_json', undefined, {
924
+ tag: 'INVALID_LIMITS_JSON',
925
+ featureKey,
926
+ });
927
+ }
928
+ }
929
+ }
930
+
931
+ log.info('Built budgets from registry', {
932
+ tag: 'BUDGETS_BUILT',
933
+ featureCount: Object.keys(features).length,
934
+ });
935
+ } catch (err) {
936
+ log.warn('Could not query feature_registry for budgets', undefined, {
937
+ tag: 'REGISTRY_QUERY_ERROR',
938
+ error: err instanceof Error ? err.message : String(err),
939
+ });
940
+ }
941
+
942
+ const budgets = { _defaults: defaults, features };
943
+
944
+ return jsonResponse({
945
+ success: true,
946
+ budgets,
947
+ source: 'registry',
948
+ timestamp: new Date().toISOString(),
949
+ responseTimeMs: Date.now() - startTime,
950
+ });
951
+ } catch (error) {
952
+ const errorMessage = error instanceof Error ? error.message : String(error);
953
+ const logErr = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
954
+ logErr.error('Error fetching budgets', error instanceof Error ? error : undefined, {
955
+ tag: 'BUDGETS_FETCH_ERROR',
956
+ errorMessage,
957
+ });
958
+
959
+ return jsonResponse(
960
+ { success: false, error: 'Failed to fetch budgets', message: errorMessage },
961
+ 500
962
+ );
963
+ }
964
+ }
965
+
966
+ /**
967
+ * Handle PUT /usage/features/budgets
968
+ *
969
+ * Updates the feature budgets configuration in KV.
970
+ * Body: { _defaults: {...}, features: {...} }
971
+ */
972
+ export async function handlePutFeatureBudgets(request: Request, env: Env): Promise<Response> {
973
+ try {
974
+ const body = (await request.json()) as {
975
+ _defaults?: Record<string, { hourly?: number; daily?: number }>;
976
+ features?: Record<string, Record<string, { hourly?: number; daily?: number }>>;
977
+ };
978
+
979
+ // Validate structure
980
+ if (!body._defaults && !body.features) {
981
+ return jsonResponse(
982
+ { success: false, error: 'Body must contain _defaults or features' },
983
+ 400
984
+ );
985
+ }
986
+
987
+ // Load existing budgets to merge
988
+ const existingJson = await env.PLATFORM_CACHE.get(FEATURE_KV_KEYS.BUDGETS);
989
+ const existing = existingJson ? JSON.parse(existingJson) : { _defaults: {}, features: {} };
990
+
991
+ // Merge updates
992
+ const updated = {
993
+ _defaults: body._defaults ?? existing._defaults,
994
+ features: body.features ? { ...existing.features, ...body.features } : existing.features,
995
+ };
996
+
997
+ // Save to KV
998
+ await env.PLATFORM_CACHE.put(FEATURE_KV_KEYS.BUDGETS, JSON.stringify(updated));
999
+
1000
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
1001
+ log.info('Updated budgets config', { tag: 'BUDGETS_UPDATED' });
1002
+
1003
+ return jsonResponse({
1004
+ success: true,
1005
+ budgets: updated,
1006
+ timestamp: new Date().toISOString(),
1007
+ });
1008
+ } catch (error) {
1009
+ const errorMessage = error instanceof Error ? error.message : String(error);
1010
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
1011
+ log.error('Error updating budgets', error instanceof Error ? error : undefined, {
1012
+ tag: 'BUDGETS_UPDATE_ERROR',
1013
+ errorMessage,
1014
+ });
1015
+
1016
+ return jsonResponse(
1017
+ { success: false, error: 'Failed to update budgets', message: errorMessage },
1018
+ 500
1019
+ );
1020
+ }
1021
+ }
1022
+
1023
+ /**
1024
+ * Handle GET /usage/features/history
1025
+ *
1026
+ * Returns historical feature usage data from D1 for sparkline charts.
1027
+ * Query params:
1028
+ * - days: number of days (default: 7, max: 90)
1029
+ * - featureKey: optional, filter by feature key
1030
+ */
1031
+ export async function handleGetFeatureHistory(url: URL, env: Env): Promise<Response> {
1032
+ const startTime = Date.now();
1033
+
1034
+ try {
1035
+ // Parse query params
1036
+ const daysParam = url.searchParams.get('days');
1037
+ const days = Math.min(Math.max(parseInt(daysParam ?? '7', 10) || 7, 1), 90);
1038
+ const featureKey = url.searchParams.get('featureKey');
1039
+
1040
+ // Calculate date range
1041
+ const endDate = new Date();
1042
+ const startDate = new Date();
1043
+ startDate.setUTCDate(startDate.getUTCDate() - days);
1044
+
1045
+ const startDateStr = startDate.toISOString().split('T')[0];
1046
+ const endDateStr = endDate.toISOString().split('T')[0];
1047
+
1048
+ // Build query
1049
+ let query = `
1050
+ SELECT
1051
+ feature_key,
1052
+ usage_date,
1053
+ d1_writes,
1054
+ d1_reads,
1055
+ kv_reads,
1056
+ kv_writes,
1057
+ do_requests,
1058
+ do_gb_seconds,
1059
+ r2_class_a,
1060
+ r2_class_b,
1061
+ ai_neurons,
1062
+ queue_messages,
1063
+ requests,
1064
+ times_disabled
1065
+ FROM feature_usage_daily
1066
+ WHERE usage_date >= ? AND usage_date <= ?
1067
+ `;
1068
+
1069
+ const params: (string | number)[] = [startDateStr, endDateStr];
1070
+
1071
+ if (featureKey) {
1072
+ query += ' AND feature_key = ?';
1073
+ params.push(featureKey);
1074
+ }
1075
+
1076
+ query += ' ORDER BY feature_key, usage_date ASC';
1077
+
1078
+ const result = await env.PLATFORM_DB.prepare(query)
1079
+ .bind(...params)
1080
+ .all();
1081
+
1082
+ if (!result.success) {
1083
+ throw new Error('D1 query failed');
1084
+ }
1085
+
1086
+ // Group by feature_key for sparkline data
1087
+ const byFeature: Record<
1088
+ string,
1089
+ Array<{
1090
+ date: string;
1091
+ d1Writes: number;
1092
+ d1Reads: number;
1093
+ kvReads: number;
1094
+ kvWrites: number;
1095
+ doRequests: number;
1096
+ doGbSeconds: number;
1097
+ r2ClassA: number;
1098
+ r2ClassB: number;
1099
+ aiNeurons: number;
1100
+ queueMessages: number;
1101
+ requests: number;
1102
+ timesDisabled: number;
1103
+ }>
1104
+ > = {};
1105
+
1106
+ for (const row of result.results ?? []) {
1107
+ const key = row.feature_key as string;
1108
+ if (!byFeature[key]) byFeature[key] = [];
1109
+
1110
+ byFeature[key].push({
1111
+ date: row.usage_date as string,
1112
+ d1Writes: row.d1_writes as number,
1113
+ d1Reads: row.d1_reads as number,
1114
+ kvReads: row.kv_reads as number,
1115
+ kvWrites: row.kv_writes as number,
1116
+ doRequests: row.do_requests as number,
1117
+ doGbSeconds: row.do_gb_seconds as number,
1118
+ r2ClassA: row.r2_class_a as number,
1119
+ r2ClassB: row.r2_class_b as number,
1120
+ aiNeurons: row.ai_neurons as number,
1121
+ queueMessages: row.queue_messages as number,
1122
+ requests: row.requests as number,
1123
+ timesDisabled: row.times_disabled as number,
1124
+ });
1125
+ }
1126
+
1127
+ return jsonResponse({
1128
+ success: true,
1129
+ days,
1130
+ startDate: startDateStr,
1131
+ endDate: endDateStr,
1132
+ features: byFeature,
1133
+ timestamp: new Date().toISOString(),
1134
+ responseTimeMs: Date.now() - startTime,
1135
+ });
1136
+ } catch (error) {
1137
+ const errorMessage = error instanceof Error ? error.message : String(error);
1138
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:features');
1139
+ log.error('Error fetching history', error instanceof Error ? error : undefined, {
1140
+ tag: 'HISTORY_FETCH_ERROR',
1141
+ errorMessage,
1142
+ });
1143
+
1144
+ return jsonResponse(
1145
+ { success: false, error: 'Failed to fetch history', message: errorMessage },
1146
+ 500
1147
+ );
1148
+ }
1149
+ }
1150
+
1151
+ /**
1152
+ * Handle GET /usage/workersai
1153
+ *
1154
+ * Returns Workers AI usage metrics from Analytics Engine.
1155
+ * Aggregates data from all registered projects.
1156
+ *
1157
+ * Query params:
1158
+ * - period: '24h' | '7d' | '30d' (default: '7d')
1159
+ */
1160
+ export async function handleWorkersAI(url: URL, env: Env): Promise<Response> {
1161
+ const startTime = Date.now();
1162
+ const { period } = parseQueryParams(url);
1163
+ const log = createLoggerFromEnv(env, 'platform-usage', 'platform:usage:workersai');
1164
+
1165
+ // Use 15-minute cache (shorter than main usage cache since AI usage changes more frequently)
1166
+ const cacheKey = `workersai:${period}:${Math.floor(Date.now() / 900000)}`;
1167
+
1168
+ // Check cache first
1169
+ try {
1170
+ const cached = (await env.PLATFORM_CACHE.get(cacheKey, 'json')) as WorkersAIResponse | null;
1171
+ if (cached) {
1172
+ log.info('Workers AI cache hit', { tag: 'CACHE_HIT', cacheKey });
1173
+ return jsonResponse({
1174
+ ...cached,
1175
+ cached: true,
1176
+ responseTimeMs: Date.now() - startTime,
1177
+ });
1178
+ }
1179
+ } catch (error) {
1180
+ log.error('Workers AI cache read error', error instanceof Error ? error : undefined, {
1181
+ tag: 'CACHE_READ_ERROR',
1182
+ cacheKey,
1183
+ });
1184
+ }
1185
+
1186
+ log.info('Workers AI cache miss, fetching from Analytics Engine', {
1187
+ tag: 'CACHE_MISS',
1188
+ cacheKey,
1189
+ });
1190
+
1191
+ if (!env.CLOUDFLARE_ACCOUNT_ID || !env.CLOUDFLARE_API_TOKEN) {
1192
+ return jsonResponse(
1193
+ {
1194
+ success: false,
1195
+ error: 'Configuration Error',
1196
+ message: 'Missing CLOUDFLARE_ACCOUNT_ID or CLOUDFLARE_API_TOKEN',
1197
+ },
1198
+ 500
1199
+ );
1200
+ }
1201
+
1202
+ try {
1203
+ // Fetch Workers AI metrics from Analytics Engine AND AI Gateway data from D1 in parallel
1204
+ const client = new CloudflareGraphQL(env);
1205
+ const [metrics, aiGatewayData] = await Promise.all([
1206
+ client.getWorkersAIMetrics(period),
1207
+ queryAIGatewayMetrics(env, period),
1208
+ ]);
1209
+
1210
+ // Merge AI Gateway data into the response if available
1211
+ const metricsWithGateway: WorkersAISummary = {
1212
+ ...metrics,
1213
+ aiGateway: aiGatewayData ?? undefined,
1214
+ };
1215
+
1216
+ const response: WorkersAIResponse = {
1217
+ success: true,
1218
+ period,
1219
+ data: metricsWithGateway,
1220
+ cached: false,
1221
+ timestamp: new Date().toISOString(),
1222
+ };
1223
+
1224
+ // Cache for 15 minutes
1225
+ try {
1226
+ await env.PLATFORM_CACHE.put(cacheKey, JSON.stringify(response), { expirationTtl: 900 });
1227
+ log.info('Workers AI cached response', { tag: 'CACHE_WRITE', cacheKey });
1228
+ } catch (error) {
1229
+ log.error('Workers AI cache write error', error instanceof Error ? error : undefined, {
1230
+ tag: 'CACHE_WRITE_ERROR',
1231
+ cacheKey,
1232
+ });
1233
+ }
1234
+
1235
+ const duration = Date.now() - startTime;
1236
+ log.info('Workers AI data fetched', {
1237
+ tag: 'DATA_FETCHED',
1238
+ durationMs: duration,
1239
+ hasAiGateway: !!aiGatewayData,
1240
+ });
1241
+
1242
+ return jsonResponse({
1243
+ ...response,
1244
+ responseTimeMs: duration,
1245
+ });
1246
+ } catch (error) {
1247
+ const errorMessage = error instanceof Error ? error.message : String(error);
1248
+ log.error('Error fetching Workers AI metrics', error instanceof Error ? error : undefined, {
1249
+ tag: 'WORKERSAI_ERROR',
1250
+ errorMessage,
1251
+ });
1252
+
1253
+ return jsonResponse(
1254
+ {
1255
+ success: false,
1256
+ error: 'Failed to fetch Workers AI metrics',
1257
+ message: errorMessage,
1258
+ },
1259
+ 500
1260
+ );
1261
+ }
1262
+ }