@littlebearapps/platform-admin-sdk 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +112 -0
  2. package/dist/index.d.ts +16 -0
  3. package/dist/index.js +89 -0
  4. package/dist/prompts.d.ts +27 -0
  5. package/dist/prompts.js +80 -0
  6. package/dist/scaffold.d.ts +5 -0
  7. package/dist/scaffold.js +65 -0
  8. package/dist/templates.d.ts +16 -0
  9. package/dist/templates.js +131 -0
  10. package/package.json +46 -0
  11. package/templates/full/migrations/006_pattern_discovery.sql +199 -0
  12. package/templates/full/migrations/007_notifications_search.sql +127 -0
  13. package/templates/full/workers/lib/pattern-discovery/ai-prompt.ts +644 -0
  14. package/templates/full/workers/lib/pattern-discovery/clustering.ts +278 -0
  15. package/templates/full/workers/lib/pattern-discovery/shadow-evaluation.ts +603 -0
  16. package/templates/full/workers/lib/pattern-discovery/storage.ts +806 -0
  17. package/templates/full/workers/lib/pattern-discovery/types.ts +159 -0
  18. package/templates/full/workers/lib/pattern-discovery/validation.ts +278 -0
  19. package/templates/full/workers/pattern-discovery.ts +661 -0
  20. package/templates/full/workers/platform-alert-router.ts +1809 -0
  21. package/templates/full/workers/platform-notifications.ts +424 -0
  22. package/templates/full/workers/platform-search.ts +480 -0
  23. package/templates/full/workers/platform-settings.ts +436 -0
  24. package/templates/full/wrangler.alert-router.jsonc.hbs +34 -0
  25. package/templates/full/wrangler.notifications.jsonc.hbs +23 -0
  26. package/templates/full/wrangler.pattern-discovery.jsonc.hbs +33 -0
  27. package/templates/full/wrangler.search.jsonc.hbs +16 -0
  28. package/templates/full/wrangler.settings.jsonc.hbs +23 -0
  29. package/templates/shared/README.md.hbs +69 -0
  30. package/templates/shared/config/budgets.yaml.hbs +72 -0
  31. package/templates/shared/config/services.yaml.hbs +45 -0
  32. package/templates/shared/migrations/001_core_tables.sql +117 -0
  33. package/templates/shared/migrations/002_usage_warehouse.sql +830 -0
  34. package/templates/shared/migrations/003_feature_tracking.sql +250 -0
  35. package/templates/shared/migrations/004_settings_alerts.sql +452 -0
  36. package/templates/shared/migrations/seed.sql.hbs +4 -0
  37. package/templates/shared/package.json.hbs +21 -0
  38. package/templates/shared/scripts/sync-config.ts +242 -0
  39. package/templates/shared/tsconfig.json +12 -0
  40. package/templates/shared/workers/lib/analytics-engine.ts +357 -0
  41. package/templates/shared/workers/lib/billing.ts +293 -0
  42. package/templates/shared/workers/lib/circuit-breaker-middleware.ts +25 -0
  43. package/templates/shared/workers/lib/control.ts +292 -0
  44. package/templates/shared/workers/lib/economics.ts +368 -0
  45. package/templates/shared/workers/lib/metrics.ts +103 -0
  46. package/templates/shared/workers/lib/platform-settings.ts +407 -0
  47. package/templates/shared/workers/lib/shared/allowances.ts +333 -0
  48. package/templates/shared/workers/lib/shared/cloudflare.ts +1362 -0
  49. package/templates/shared/workers/lib/shared/types.ts +58 -0
  50. package/templates/shared/workers/lib/telemetry-sampling.ts +360 -0
  51. package/templates/shared/workers/lib/usage/collectors/example.ts +96 -0
  52. package/templates/shared/workers/lib/usage/collectors/index.ts +128 -0
  53. package/templates/shared/workers/lib/usage/handlers/audit.ts +306 -0
  54. package/templates/shared/workers/lib/usage/handlers/backfill.ts +845 -0
  55. package/templates/shared/workers/lib/usage/handlers/behavioral.ts +429 -0
  56. package/templates/shared/workers/lib/usage/handlers/data-queries.ts +507 -0
  57. package/templates/shared/workers/lib/usage/handlers/dlq-admin.ts +364 -0
  58. package/templates/shared/workers/lib/usage/handlers/health-trends.ts +222 -0
  59. package/templates/shared/workers/lib/usage/handlers/index.ts +35 -0
  60. package/templates/shared/workers/lib/usage/handlers/usage-admin.ts +421 -0
  61. package/templates/shared/workers/lib/usage/handlers/usage-features.ts +1262 -0
  62. package/templates/shared/workers/lib/usage/handlers/usage-metrics.ts +2420 -0
  63. package/templates/shared/workers/lib/usage/handlers/usage-settings.ts +610 -0
  64. package/templates/shared/workers/lib/usage/queue/budget-enforcement.ts +1032 -0
  65. package/templates/shared/workers/lib/usage/queue/cost-budget-enforcement.ts +128 -0
  66. package/templates/shared/workers/lib/usage/queue/cost-calculator.ts +77 -0
  67. package/templates/shared/workers/lib/usage/queue/dlq-handler.ts +161 -0
  68. package/templates/shared/workers/lib/usage/queue/index.ts +19 -0
  69. package/templates/shared/workers/lib/usage/queue/telemetry-processor.ts +790 -0
  70. package/templates/shared/workers/lib/usage/scheduled/anomaly-detection.ts +732 -0
  71. package/templates/shared/workers/lib/usage/scheduled/data-collection.ts +956 -0
  72. package/templates/shared/workers/lib/usage/scheduled/error-digest.ts +343 -0
  73. package/templates/shared/workers/lib/usage/scheduled/index.ts +18 -0
  74. package/templates/shared/workers/lib/usage/scheduled/rollups.ts +1561 -0
  75. package/templates/shared/workers/lib/usage/shared/constants.ts +362 -0
  76. package/templates/shared/workers/lib/usage/shared/index.ts +14 -0
  77. package/templates/shared/workers/lib/usage/shared/types.ts +1066 -0
  78. package/templates/shared/workers/lib/usage/shared/utils.ts +795 -0
  79. package/templates/shared/workers/platform-usage.ts +1915 -0
  80. package/templates/shared/wrangler.usage.jsonc.hbs +58 -0
  81. package/templates/standard/migrations/005_error_collection.sql +162 -0
  82. package/templates/standard/workers/error-collector.ts +2670 -0
  83. package/templates/standard/workers/lib/error-collector/capture.ts +213 -0
  84. package/templates/standard/workers/lib/error-collector/digest.ts +448 -0
  85. package/templates/standard/workers/lib/error-collector/email-health-alerts.ts +262 -0
  86. package/templates/standard/workers/lib/error-collector/fingerprint.ts +258 -0
  87. package/templates/standard/workers/lib/error-collector/gap-alerts.ts +293 -0
  88. package/templates/standard/workers/lib/error-collector/github.ts +329 -0
  89. package/templates/standard/workers/lib/error-collector/types.ts +262 -0
  90. package/templates/standard/workers/lib/sentinel/gap-detection.ts +734 -0
  91. package/templates/standard/workers/lib/shared/slack-alerts.ts +585 -0
  92. package/templates/standard/workers/platform-sentinel.ts +1744 -0
  93. package/templates/standard/wrangler.error-collector.jsonc.hbs +44 -0
  94. package/templates/standard/wrangler.sentinel.jsonc.hbs +45 -0
@@ -0,0 +1,21 @@
1
+ {
2
+ "name": "{{projectSlug}}-platform",
3
+ "version": "1.0.0",
4
+ "private": true,
5
+ "type": "module",
6
+ "scripts": {
7
+ "typecheck": "tsc --noEmit",
8
+ "sync:config": "npx tsx scripts/sync-config.ts",
9
+ "deploy:usage": "wrangler deploy -c wrangler.{{projectSlug}}-usage.jsonc"
10
+ },
11
+ "dependencies": {
12
+ "@littlebearapps/platform-consumer-sdk": "^{{sdkVersion}}",
13
+ "yaml": "^2.6.0"
14
+ },
15
+ "devDependencies": {
16
+ "@cloudflare/workers-types": "^4.20250214.0",
17
+ "tsx": "^4.19.0",
18
+ "typescript": "^5.7.3",
19
+ "wrangler": "^3.100.0"
20
+ }
21
+ }
@@ -0,0 +1,242 @@
1
+ #!/usr/bin/env npx tsx
2
+ /**
3
+ * Sync Service Registry Configuration
4
+ *
5
+ * Reads services.yaml and budgets.yaml from platform/config/ and syncs to:
6
+ * - D1: project_registry + feature_registry tables
7
+ * - KV: CONFIG:FEATURE:{feature_key}:BUDGET keys
8
+ *
9
+ * Usage:
10
+ * npx tsx scripts/sync-config.ts [--dry-run] [--verbose]
11
+ *
12
+ * YAML files in git are the Source of Truth.
13
+ */
14
+
15
+ import { execSync } from 'child_process';
16
+ import { readFileSync, writeFileSync, existsSync, mkdtempSync, rmSync } from 'fs';
17
+ import { join } from 'path';
18
+ import { tmpdir } from 'os';
19
+ import { parse as parseYAML } from 'yaml';
20
+
21
+ // =============================================================================
22
+ // CONFIGURATION — Update these after creating your Cloudflare resources
23
+ // =============================================================================
24
+
25
+ const CONFIG_DIR = join(process.cwd(), 'platform', 'config');
26
+ const SERVICES_FILE = join(CONFIG_DIR, 'services.yaml');
27
+ const BUDGETS_FILE = join(CONFIG_DIR, 'budgets.yaml');
28
+
29
+ // TODO: Replace with your actual KV namespace ID and D1 database name
30
+ const KV_NAMESPACE_ID = 'YOUR_KV_NAMESPACE_ID';
31
+ const D1_DATABASE_NAME = 'YOUR_D1_DATABASE_NAME';
32
+
33
+ // =============================================================================
34
+ // TYPES
35
+ // =============================================================================
36
+
37
+ interface FeatureDefinition {
38
+ display_name: string;
39
+ feature_id?: string;
40
+ circuit_breaker: boolean;
41
+ description?: string;
42
+ cost_tier: string;
43
+ }
44
+
45
+ interface FeatureCategory {
46
+ [feature: string]: FeatureDefinition;
47
+ }
48
+
49
+ interface Project {
50
+ display_name: string;
51
+ status: string;
52
+ tier: string;
53
+ repository?: string;
54
+ features?: Record<string, FeatureCategory>;
55
+ }
56
+
57
+ interface Services {
58
+ metadata: { version: string };
59
+ projects: Record<string, Project>;
60
+ }
61
+
62
+ interface BudgetLimit {
63
+ d1_writes?: number;
64
+ d1_reads?: number;
65
+ kv_reads?: number;
66
+ kv_writes?: number;
67
+ queue_messages?: number;
68
+ requests?: number;
69
+ cpu_ms?: number;
70
+ }
71
+
72
+ interface Budgets {
73
+ defaults: {
74
+ daily: BudgetLimit;
75
+ circuit_breaker: {
76
+ auto_reset_seconds: number;
77
+ cooldown_seconds: number;
78
+ };
79
+ thresholds: { warning: number; critical: number };
80
+ };
81
+ feature_overrides: Record<string, BudgetLimit>;
82
+ }
83
+
84
+ // =============================================================================
85
+ // YAML 1.2 UNDERSCORE FIX
86
+ // =============================================================================
87
+
88
+ /**
89
+ * YAML 1.2 parses numbers with underscores (e.g. 1_000_000) as strings.
90
+ * This normalises them back to numbers.
91
+ */
92
+ function normaliseBudgetLimits(obj: unknown): unknown {
93
+ if (obj === null || obj === undefined) return obj;
94
+ if (typeof obj === 'string' && /^\d[\d_]*$/.test(obj)) {
95
+ return Number(obj.replace(/_/g, ''));
96
+ }
97
+ if (Array.isArray(obj)) return obj.map(normaliseBudgetLimits);
98
+ if (typeof obj === 'object') {
99
+ const result: Record<string, unknown> = {};
100
+ for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {
101
+ result[key] = normaliseBudgetLimits(value);
102
+ }
103
+ return result;
104
+ }
105
+ return obj;
106
+ }
107
+
108
+ // =============================================================================
109
+ // HELPERS
110
+ // =============================================================================
111
+
112
+ const DRY_RUN = process.argv.includes('--dry-run');
113
+ const VERBOSE = process.argv.includes('--verbose');
114
+
115
+ function log(msg: string): void {
116
+ console.log(`[sync-config] ${msg}`);
117
+ }
118
+
119
+ function verbose(msg: string): void {
120
+ if (VERBOSE) console.log(` ${msg}`);
121
+ }
122
+
123
+ function sanitise(value: string): string {
124
+ return value.replace(/'/g, "''");
125
+ }
126
+
127
+ function runD1(sql: string): void {
128
+ if (DRY_RUN) {
129
+ verbose(`[dry-run] D1: ${sql.substring(0, 100)}...`);
130
+ return;
131
+ }
132
+
133
+ const tmpDir = mkdtempSync(join(tmpdir(), 'sync-config-'));
134
+ const sqlFile = join(tmpDir, 'query.sql');
135
+ writeFileSync(sqlFile, sql);
136
+
137
+ try {
138
+ execSync(
139
+ `wrangler d1 execute ${D1_DATABASE_NAME} --remote --file="${sqlFile}"`,
140
+ { stdio: VERBOSE ? 'inherit' : 'pipe' }
141
+ );
142
+ } finally {
143
+ rmSync(tmpDir, { recursive: true, force: true });
144
+ }
145
+ }
146
+
147
+ function runKVPut(key: string, value: string): void {
148
+ if (DRY_RUN) {
149
+ verbose(`[dry-run] KV PUT: ${key} = ${value.substring(0, 60)}...`);
150
+ return;
151
+ }
152
+
153
+ execSync(
154
+ `wrangler kv key put --namespace-id="${KV_NAMESPACE_ID}" "${key}" '${sanitise(value)}'`,
155
+ { stdio: VERBOSE ? 'inherit' : 'pipe' }
156
+ );
157
+ }
158
+
159
+ // =============================================================================
160
+ // MAIN
161
+ // =============================================================================
162
+
163
+ function main(): void {
164
+ log('Starting config sync...');
165
+
166
+ if (!existsSync(SERVICES_FILE)) {
167
+ console.error(`Missing: ${SERVICES_FILE}`);
168
+ process.exit(1);
169
+ }
170
+ if (!existsSync(BUDGETS_FILE)) {
171
+ console.error(`Missing: ${BUDGETS_FILE}`);
172
+ process.exit(1);
173
+ }
174
+
175
+ const services = normaliseBudgetLimits(
176
+ parseYAML(readFileSync(SERVICES_FILE, 'utf-8'))
177
+ ) as Services;
178
+ const budgets = normaliseBudgetLimits(
179
+ parseYAML(readFileSync(BUDGETS_FILE, 'utf-8'))
180
+ ) as Budgets;
181
+
182
+ // Sync projects to D1 project_registry
183
+ const projectSql: string[] = [];
184
+ for (const [projectId, project] of Object.entries(services.projects)) {
185
+ projectSql.push(
186
+ `INSERT INTO project_registry (project_id, display_name, status, tier, repository)
187
+ VALUES ('${sanitise(projectId)}', '${sanitise(project.display_name)}', '${sanitise(project.status)}', '${sanitise(String(project.tier))}', '${sanitise(project.repository ?? '')}')
188
+ ON CONFLICT (project_id) DO UPDATE SET
189
+ display_name = excluded.display_name,
190
+ status = excluded.status,
191
+ tier = excluded.tier,
192
+ repository = excluded.repository;`
193
+ );
194
+ }
195
+
196
+ if (projectSql.length > 0) {
197
+ log(`Syncing ${projectSql.length} project(s) to D1...`);
198
+ runD1(projectSql.join('\n'));
199
+ }
200
+
201
+ // Sync features to D1 feature_registry + KV budgets
202
+ let featureCount = 0;
203
+ const featureSql: string[] = [];
204
+
205
+ for (const [projectId, project] of Object.entries(services.projects)) {
206
+ if (!project.features) continue;
207
+
208
+ for (const [category, features] of Object.entries(project.features)) {
209
+ for (const [featureName, feature] of Object.entries(features)) {
210
+ const featureKey = feature.feature_id ?? `${projectId}:${category}:${featureName}`;
211
+ const cbEnabled = feature.circuit_breaker ? 1 : 0;
212
+
213
+ featureSql.push(
214
+ `INSERT INTO feature_registry (feature_key, project, category, feature, display_name, circuit_breaker_enabled, cost_tier)
215
+ VALUES ('${sanitise(featureKey)}', '${sanitise(projectId)}', '${sanitise(category)}', '${sanitise(featureName)}', '${sanitise(feature.display_name)}', ${cbEnabled}, '${sanitise(feature.cost_tier)}')
216
+ ON CONFLICT (feature_key) DO UPDATE SET
217
+ display_name = excluded.display_name,
218
+ circuit_breaker_enabled = excluded.circuit_breaker_enabled,
219
+ cost_tier = excluded.cost_tier;`
220
+ );
221
+
222
+ // Sync budget to KV
223
+ const override = budgets.feature_overrides?.[featureKey];
224
+ const budget = override ?? budgets.defaults.daily;
225
+ const kvKey = `CONFIG:FEATURE:${featureKey}:BUDGET`;
226
+ runKVPut(kvKey, JSON.stringify(budget));
227
+
228
+ featureCount++;
229
+ }
230
+ }
231
+ }
232
+
233
+ if (featureSql.length > 0) {
234
+ log(`Syncing ${featureCount} feature(s) to D1...`);
235
+ runD1(featureSql.join('\n'));
236
+ }
237
+
238
+ log(`Done! ${projectSql.length} projects, ${featureCount} features synced.`);
239
+ if (DRY_RUN) log('(dry run — no changes made)');
240
+ }
241
+
242
+ main();
@@ -0,0 +1,12 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2022",
4
+ "module": "ESNext",
5
+ "moduleResolution": "bundler",
6
+ "strict": true,
7
+ "esModuleInterop": true,
8
+ "skipLibCheck": true,
9
+ "types": ["@cloudflare/workers-types"]
10
+ },
11
+ "include": ["workers/**/*", "scripts/**/*"]
12
+ }
@@ -0,0 +1,357 @@
1
+ /**
2
+ * Analytics Engine SQL API Helper
3
+ *
4
+ * Provides helpers for querying Analytics Engine via the SQL API.
5
+ * Used by the daily rollup to aggregate SDK telemetry from PLATFORM_ANALYTICS.
6
+ *
7
+ * @module workers/lib/analytics-engine
8
+ */
9
+
10
+ import { withExponentialBackoff } from '@littlebearapps/platform-consumer-sdk';
11
+
12
+ // =============================================================================
13
+ // TYPES
14
+ // =============================================================================
15
+
16
+ /**
17
+ * Analytics Engine SQL API response structure.
18
+ *
19
+ * The SQL API returns data in one of two formats:
20
+ * 1. Direct format (success): { meta: [...], data: [...], rows: N }
21
+ * 2. Wrapped format (via REST API): { success: true, result: { meta, data, rows } }
22
+ * 3. Error format: { errors: [...] }
23
+ */
24
+ interface AnalyticsEngineResponse {
25
+ // Direct format (SQL API)
26
+ meta?: Array<{ name: string; type: string }>;
27
+ data?: unknown[];
28
+ rows?: number;
29
+ rows_before_limit_at_least?: number;
30
+
31
+ // Wrapped format (REST API)
32
+ success?: boolean;
33
+ errors?: Array<{ code: number; message: string }>;
34
+ result?: {
35
+ data: unknown[];
36
+ meta: Array<{ name: string; type: string }>;
37
+ rows: number;
38
+ rows_before_limit_at_least: number;
39
+ };
40
+ }
41
+
42
+ /**
43
+ * Daily usage aggregation from Analytics Engine
44
+ */
45
+ export interface DailyUsageAggregation {
46
+ project_id: string;
47
+ feature_id: string;
48
+ d1_reads: number;
49
+ d1_writes: number;
50
+ d1_rows_read: number;
51
+ d1_rows_written: number;
52
+ kv_reads: number;
53
+ kv_writes: number;
54
+ kv_deletes: number;
55
+ kv_lists: number;
56
+ ai_requests: number;
57
+ ai_neurons: number;
58
+ vectorize_queries: number;
59
+ vectorize_inserts: number;
60
+ vectorize_deletes: number;
61
+ interaction_count: number;
62
+ }
63
+
64
+ // =============================================================================
65
+ // ANALYTICS ENGINE SQL API CLIENT
66
+ // =============================================================================
67
+
68
+ /**
69
+ * Query Analytics Engine via the SQL API.
70
+ *
71
+ * @param accountId Cloudflare account ID
72
+ * @param apiToken Cloudflare API token with Analytics Engine read access
73
+ * @param sql SQL query to execute
74
+ * @returns Query results
75
+ */
76
+ export async function queryAnalyticsEngine<T>(
77
+ accountId: string,
78
+ apiToken: string,
79
+ sql: string
80
+ ): Promise<T[]> {
81
+ const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/analytics_engine/sql`;
82
+
83
+ const response = await fetch(url, {
84
+ method: 'POST',
85
+ headers: {
86
+ Authorization: `Bearer ${apiToken}`,
87
+ 'Content-Type': 'text/plain',
88
+ },
89
+ body: sql,
90
+ });
91
+
92
+ if (!response.ok) {
93
+ const text = await response.text();
94
+ throw new Error(`Analytics Engine API error: ${response.status} - ${text}`);
95
+ }
96
+
97
+ const rawText = await response.text();
98
+ let data: AnalyticsEngineResponse;
99
+ try {
100
+ data = JSON.parse(rawText) as AnalyticsEngineResponse;
101
+ } catch {
102
+ throw new Error(`Analytics Engine returned invalid JSON: ${rawText.slice(0, 500)}`);
103
+ }
104
+
105
+ // Check for error response
106
+ if (data.errors && data.errors.length > 0) {
107
+ const errorMessages = data.errors.map((e) => e.message).join(', ');
108
+ throw new Error(`Analytics Engine query failed: ${errorMessages}`);
109
+ }
110
+
111
+ // Handle both response formats:
112
+ // 1. Direct format: { meta, data, rows }
113
+ // 2. Wrapped format: { success, result: { meta, data, rows } }
114
+ const meta = data.meta ?? data.result?.meta;
115
+ const resultData = data.data ?? data.result?.data;
116
+
117
+ // Validate response structure
118
+ if (!meta || !resultData) {
119
+ throw new Error(
120
+ `Analytics Engine response missing expected fields. ` +
121
+ `Got keys: ${JSON.stringify(Object.keys(data))}`
122
+ );
123
+ }
124
+
125
+ // Map the result data to typed objects using column metadata
126
+ // Analytics Engine can return data in two formats:
127
+ // 1. Array of arrays: [[val1, val2], [val1, val2]] - needs column mapping
128
+ // 2. Array of objects: [{col1: val1, col2: val2}, ...] - already in object format
129
+ const columns = meta.map((m) => m.name);
130
+
131
+ return resultData.map((row) => {
132
+ // If row is already an object (not an array), return it directly
133
+ if (row !== null && typeof row === 'object' && !Array.isArray(row)) {
134
+ return row as T;
135
+ }
136
+
137
+ // Row is an array - map using column metadata
138
+ const rowArray = row as unknown[];
139
+ const obj: Record<string, unknown> = {};
140
+ columns.forEach((col, i) => {
141
+ obj[col] = rowArray[i];
142
+ });
143
+ return obj as T;
144
+ });
145
+ }
146
+
147
+ /**
148
+ * Get daily usage aggregation from Analytics Engine.
149
+ * Queries the PLATFORM_ANALYTICS dataset for yesterday's telemetry data.
150
+ *
151
+ * @param accountId Cloudflare account ID
152
+ * @param apiToken Cloudflare API token
153
+ * @param datasetName Analytics Engine dataset name (default: platform-analytics)
154
+ * @returns Aggregated usage by project and feature
155
+ */
156
+ export async function getDailyUsageFromAnalyticsEngine(
157
+ accountId: string,
158
+ apiToken: string,
159
+ datasetName = 'platform-analytics'
160
+ ): Promise<DailyUsageAggregation[]> {
161
+ // Query for yesterday's data (00:00:00 to 23:59:59 UTC)
162
+ // Analytics Engine uses blob1-20 and double1-20 naming convention
163
+ //
164
+ // Data schema from queue handler (platform-usage.ts):
165
+ // blobs: [project, category, feature] (feature_key is in indexes)
166
+ // doubles: [d1Writes, d1Reads, kvReads, kvWrites, doRequests, doGbSeconds,
167
+ // r2ClassA, r2ClassB, aiNeurons, queueMessages, requests, cpuMs,
168
+ // d1RowsRead, d1RowsWritten, kvDeletes, kvLists, aiRequests,
169
+ // vectorizeQueries, vectorizeInserts, workflowInvocations]
170
+ // indexes: [feature_key]
171
+ //
172
+ // NOTE: Table name must be quoted because it contains a hyphen
173
+ const sql = `
174
+ SELECT
175
+ blob1 as project_id,
176
+ index1 as feature_id,
177
+ SUM(double2) as d1_reads,
178
+ SUM(double1) as d1_writes,
179
+ SUM(double13) as d1_rows_read,
180
+ SUM(double14) as d1_rows_written,
181
+ SUM(double3) as kv_reads,
182
+ SUM(double4) as kv_writes,
183
+ SUM(double15) as kv_deletes,
184
+ SUM(double16) as kv_lists,
185
+ SUM(double17) as ai_requests,
186
+ SUM(double9) as ai_neurons,
187
+ SUM(double18) as vectorize_queries,
188
+ SUM(double19) as vectorize_inserts,
189
+ 0 as vectorize_deletes,
190
+ count() as interaction_count
191
+ FROM "${datasetName}"
192
+ WHERE timestamp >= NOW() - INTERVAL '1' DAY
193
+ GROUP BY project_id, feature_id
194
+ ORDER BY project_id, feature_id
195
+ `;
196
+
197
+ return queryAnalyticsEngine<DailyUsageAggregation>(accountId, apiToken, sql);
198
+ }
199
+
200
+ /**
201
+ * Get aggregated project-level usage from Analytics Engine.
202
+ * Groups all features by project for higher-level reporting.
203
+ *
204
+ * @param accountId Cloudflare account ID
205
+ * @param apiToken Cloudflare API token
206
+ * @param datasetName Analytics Engine dataset name
207
+ * @returns Aggregated usage by project
208
+ */
209
+ export async function getProjectUsageFromAnalyticsEngine(
210
+ accountId: string,
211
+ apiToken: string,
212
+ datasetName = 'platform-analytics'
213
+ ): Promise<Omit<DailyUsageAggregation, 'feature_id'>[]> {
214
+ // NOTE: Table name must be quoted because it contains a hyphen
215
+ // Schema matches METRIC_FIELDS order from platform-sdk/constants.ts
216
+ const sql = `
217
+ SELECT
218
+ blob1 as project_id,
219
+ SUM(double2) as d1_reads,
220
+ SUM(double1) as d1_writes,
221
+ SUM(double13) as d1_rows_read,
222
+ SUM(double14) as d1_rows_written,
223
+ SUM(double3) as kv_reads,
224
+ SUM(double4) as kv_writes,
225
+ SUM(double15) as kv_deletes,
226
+ SUM(double16) as kv_lists,
227
+ SUM(double17) as ai_requests,
228
+ SUM(double9) as ai_neurons,
229
+ SUM(double18) as vectorize_queries,
230
+ SUM(double19) as vectorize_inserts,
231
+ 0 as vectorize_deletes,
232
+ count() as interaction_count
233
+ FROM "${datasetName}"
234
+ WHERE timestamp >= NOW() - INTERVAL '1' DAY
235
+ GROUP BY project_id
236
+ ORDER BY project_id
237
+ `;
238
+
239
+ return queryAnalyticsEngine<Omit<DailyUsageAggregation, 'feature_id'>>(accountId, apiToken, sql);
240
+ }
241
+
242
+ // =============================================================================
243
+ // TIME-BUCKETED QUERIES
244
+ // =============================================================================
245
+
246
+ /**
247
+ * Time-bucketed usage data from Analytics Engine.
248
+ * Aggregates metrics by time bucket (hour/day) and project.
249
+ */
250
+ export interface TimeBucketedUsage {
251
+ time_bucket: string;
252
+ project_id: string;
253
+ d1_writes: number;
254
+ d1_reads: number;
255
+ d1_rows_read: number;
256
+ d1_rows_written: number;
257
+ kv_reads: number;
258
+ kv_writes: number;
259
+ kv_deletes: number;
260
+ kv_lists: number;
261
+ do_requests: number;
262
+ do_gb_seconds: number;
263
+ r2_class_a: number;
264
+ r2_class_b: number;
265
+ ai_neurons: number;
266
+ ai_requests: number;
267
+ queue_messages: number;
268
+ requests: number;
269
+ cpu_ms: number;
270
+ vectorize_queries: number;
271
+ vectorize_inserts: number;
272
+ workflow_invocations: number;
273
+ interaction_count: number;
274
+ }
275
+
276
+ /**
277
+ * Query parameters for time-bucketed usage.
278
+ */
279
+ export interface TimeBucketQueryParams {
280
+ period: '24h' | '7d' | '30d';
281
+ groupBy: 'hour' | 'day';
282
+ project?: string;
283
+ }
284
+
285
+ /**
286
+ * Query usage by time bucket from Analytics Engine.
287
+ * Returns aggregated metrics grouped by time interval (hour/day) and project.
288
+ *
289
+ * @param accountId Cloudflare account ID
290
+ * @param apiToken Cloudflare API token
291
+ * @param params Query parameters (period, groupBy, optional project filter)
292
+ * @param datasetName Analytics Engine dataset name
293
+ * @returns Time-bucketed usage data
294
+ */
295
+ export async function queryUsageByTimeBucket(
296
+ accountId: string,
297
+ apiToken: string,
298
+ params: TimeBucketQueryParams,
299
+ datasetName = 'platform-analytics'
300
+ ): Promise<TimeBucketedUsage[]> {
301
+ // Determine interval based on groupBy
302
+ const interval = params.groupBy === 'hour' ? 'HOUR' : 'DAY';
303
+
304
+ // Map period to interval parts (number and unit must be separate for Analytics Engine)
305
+ const periodMap: Record<string, { num: string; unit: string }> = {
306
+ '24h': { num: '1', unit: 'DAY' },
307
+ '7d': { num: '7', unit: 'DAY' },
308
+ '30d': { num: '30', unit: 'DAY' },
309
+ };
310
+ const periodParts = periodMap[params.period] ?? { num: '1', unit: 'DAY' };
311
+
312
+ // Build project filter clause
313
+ const projectFilter = params.project ? `AND blob1 = '${params.project}'` : '';
314
+
315
+ // NOTE: Table name must be quoted because it contains a hyphen
316
+ // Analytics Engine columns map (from platform-sdk/constants.ts METRIC_FIELDS):
317
+ // double1=d1Writes, double2=d1Reads, double3=kvReads, double4=kvWrites,
318
+ // double5=doRequests, double6=doGbSeconds, double7=r2ClassA, double8=r2ClassB,
319
+ // double9=aiNeurons, double10=queueMessages, double11=requests, double12=cpuMs,
320
+ // double13=d1RowsRead, double14=d1RowsWritten, double15=kvDeletes, double16=kvLists,
321
+ // double17=aiRequests, double18=vectorizeQueries, double19=vectorizeInserts,
322
+ // double20=workflowInvocations
323
+ // blobs: blob1=project, blob2=category, blob3=feature
324
+ const sql = `
325
+ SELECT
326
+ toStartOfInterval(timestamp, INTERVAL '1' ${interval}) as time_bucket,
327
+ blob1 as project_id,
328
+ SUM(double1) as d1_writes,
329
+ SUM(double2) as d1_reads,
330
+ SUM(double13) as d1_rows_read,
331
+ SUM(double14) as d1_rows_written,
332
+ SUM(double3) as kv_reads,
333
+ SUM(double4) as kv_writes,
334
+ SUM(double15) as kv_deletes,
335
+ SUM(double16) as kv_lists,
336
+ SUM(double5) as do_requests,
337
+ SUM(double6) as do_gb_seconds,
338
+ SUM(double7) as r2_class_a,
339
+ SUM(double8) as r2_class_b,
340
+ SUM(double9) as ai_neurons,
341
+ SUM(double17) as ai_requests,
342
+ SUM(double10) as queue_messages,
343
+ SUM(double11) as requests,
344
+ SUM(double12) as cpu_ms,
345
+ SUM(double18) as vectorize_queries,
346
+ SUM(double19) as vectorize_inserts,
347
+ SUM(double20) as workflow_invocations,
348
+ count() as interaction_count
349
+ FROM "${datasetName}"
350
+ WHERE timestamp >= NOW() - INTERVAL '${periodParts.num}' ${periodParts.unit}
351
+ ${projectFilter}
352
+ GROUP BY time_bucket, project_id
353
+ ORDER BY time_bucket ASC, project_id ASC
354
+ `;
355
+
356
+ return queryAnalyticsEngine<TimeBucketedUsage>(accountId, apiToken, sql);
357
+ }