@littlebearapps/create-platform 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/dist/index.d.ts +11 -0
  2. package/dist/index.js +59 -0
  3. package/dist/prompts.d.ts +15 -0
  4. package/dist/prompts.js +58 -0
  5. package/dist/scaffold.d.ts +5 -0
  6. package/dist/scaffold.js +65 -0
  7. package/dist/templates.d.ts +16 -0
  8. package/dist/templates.js +53 -0
  9. package/package.json +46 -0
  10. package/templates/full/migrations/006_pattern_discovery.sql +199 -0
  11. package/templates/full/migrations/007_notifications_search.sql +127 -0
  12. package/templates/full/wrangler.alert-router.jsonc.hbs +34 -0
  13. package/templates/full/wrangler.notifications.jsonc.hbs +23 -0
  14. package/templates/full/wrangler.pattern-discovery.jsonc.hbs +33 -0
  15. package/templates/full/wrangler.search.jsonc.hbs +16 -0
  16. package/templates/full/wrangler.settings.jsonc.hbs +23 -0
  17. package/templates/shared/README.md.hbs +69 -0
  18. package/templates/shared/config/budgets.yaml.hbs +72 -0
  19. package/templates/shared/config/services.yaml.hbs +45 -0
  20. package/templates/shared/migrations/001_core_tables.sql +117 -0
  21. package/templates/shared/migrations/002_usage_warehouse.sql +830 -0
  22. package/templates/shared/migrations/003_feature_tracking.sql +250 -0
  23. package/templates/shared/migrations/004_settings_alerts.sql +452 -0
  24. package/templates/shared/migrations/seed.sql.hbs +4 -0
  25. package/templates/shared/package.json.hbs +21 -0
  26. package/templates/shared/scripts/sync-config.ts +242 -0
  27. package/templates/shared/tsconfig.json +12 -0
  28. package/templates/shared/wrangler.usage.jsonc.hbs +58 -0
  29. package/templates/standard/migrations/005_error_collection.sql +162 -0
  30. package/templates/standard/wrangler.error-collector.jsonc.hbs +44 -0
  31. package/templates/standard/wrangler.sentinel.jsonc.hbs +45 -0
@@ -0,0 +1,21 @@
1
+ {
2
+ "name": "{{projectSlug}}-platform",
3
+ "version": "1.0.0",
4
+ "private": true,
5
+ "type": "module",
6
+ "scripts": {
7
+ "typecheck": "tsc --noEmit",
8
+ "sync:config": "npx tsx scripts/sync-config.ts",
9
+ "deploy:usage": "wrangler deploy -c wrangler.{{projectSlug}}-usage.jsonc"
10
+ },
11
+ "dependencies": {
12
+ "@littlebearapps/platform-sdk": "^{{sdkVersion}}",
13
+ "yaml": "^2.6.0"
14
+ },
15
+ "devDependencies": {
16
+ "@cloudflare/workers-types": "^4.20250214.0",
17
+ "tsx": "^4.19.0",
18
+ "typescript": "^5.7.3",
19
+ "wrangler": "^3.100.0"
20
+ }
21
+ }
@@ -0,0 +1,242 @@
1
+ #!/usr/bin/env npx tsx
2
+ /**
3
+ * Sync Service Registry Configuration
4
+ *
5
+ * Reads services.yaml and budgets.yaml from platform/config/ and syncs to:
6
+ * - D1: project_registry + feature_registry tables
7
+ * - KV: CONFIG:FEATURE:{feature_key}:BUDGET keys
8
+ *
9
+ * Usage:
10
+ * npx tsx scripts/sync-config.ts [--dry-run] [--verbose]
11
+ *
12
+ * YAML files in git are the Source of Truth.
13
+ */
14
+
15
+ import { execSync } from 'child_process';
16
+ import { readFileSync, writeFileSync, existsSync, mkdtempSync, rmSync } from 'fs';
17
+ import { join } from 'path';
18
+ import { tmpdir } from 'os';
19
+ import { parse as parseYAML } from 'yaml';
20
+
21
+ // =============================================================================
22
+ // CONFIGURATION — Update these after creating your Cloudflare resources
23
+ // =============================================================================
24
+
25
+ const CONFIG_DIR = join(process.cwd(), 'platform', 'config');
26
+ const SERVICES_FILE = join(CONFIG_DIR, 'services.yaml');
27
+ const BUDGETS_FILE = join(CONFIG_DIR, 'budgets.yaml');
28
+
29
+ // TODO: Replace with your actual KV namespace ID and D1 database name
30
+ const KV_NAMESPACE_ID = 'YOUR_KV_NAMESPACE_ID';
31
+ const D1_DATABASE_NAME = 'YOUR_D1_DATABASE_NAME';
32
+
33
+ // =============================================================================
34
+ // TYPES
35
+ // =============================================================================
36
+
37
+ interface FeatureDefinition {
38
+ display_name: string;
39
+ feature_id?: string;
40
+ circuit_breaker: boolean;
41
+ description?: string;
42
+ cost_tier: string;
43
+ }
44
+
45
+ interface FeatureCategory {
46
+ [feature: string]: FeatureDefinition;
47
+ }
48
+
49
+ interface Project {
50
+ display_name: string;
51
+ status: string;
52
+ tier: string;
53
+ repository?: string;
54
+ features?: Record<string, FeatureCategory>;
55
+ }
56
+
57
+ interface Services {
58
+ metadata: { version: string };
59
+ projects: Record<string, Project>;
60
+ }
61
+
62
+ interface BudgetLimit {
63
+ d1_writes?: number;
64
+ d1_reads?: number;
65
+ kv_reads?: number;
66
+ kv_writes?: number;
67
+ queue_messages?: number;
68
+ requests?: number;
69
+ cpu_ms?: number;
70
+ }
71
+
72
+ interface Budgets {
73
+ defaults: {
74
+ daily: BudgetLimit;
75
+ circuit_breaker: {
76
+ auto_reset_seconds: number;
77
+ cooldown_seconds: number;
78
+ };
79
+ thresholds: { warning: number; critical: number };
80
+ };
81
+ feature_overrides: Record<string, BudgetLimit>;
82
+ }
83
+
84
+ // =============================================================================
85
+ // YAML 1.2 UNDERSCORE FIX
86
+ // =============================================================================
87
+
88
+ /**
89
+ * YAML 1.2 parses numbers with underscores (e.g. 1_000_000) as strings.
90
+ * This normalises them back to numbers.
91
+ */
92
+ function normaliseBudgetLimits(obj: unknown): unknown {
93
+ if (obj === null || obj === undefined) return obj;
94
+ if (typeof obj === 'string' && /^\d[\d_]*$/.test(obj)) {
95
+ return Number(obj.replace(/_/g, ''));
96
+ }
97
+ if (Array.isArray(obj)) return obj.map(normaliseBudgetLimits);
98
+ if (typeof obj === 'object') {
99
+ const result: Record<string, unknown> = {};
100
+ for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {
101
+ result[key] = normaliseBudgetLimits(value);
102
+ }
103
+ return result;
104
+ }
105
+ return obj;
106
+ }
107
+
108
+ // =============================================================================
109
+ // HELPERS
110
+ // =============================================================================
111
+
112
+ const DRY_RUN = process.argv.includes('--dry-run');
113
+ const VERBOSE = process.argv.includes('--verbose');
114
+
115
+ function log(msg: string): void {
116
+ console.log(`[sync-config] ${msg}`);
117
+ }
118
+
119
+ function verbose(msg: string): void {
120
+ if (VERBOSE) console.log(` ${msg}`);
121
+ }
122
+
123
+ function sanitise(value: string): string {
124
+ return value.replace(/'/g, "''");
125
+ }
126
+
127
+ function runD1(sql: string): void {
128
+ if (DRY_RUN) {
129
+ verbose(`[dry-run] D1: ${sql.substring(0, 100)}...`);
130
+ return;
131
+ }
132
+
133
+ const tmpDir = mkdtempSync(join(tmpdir(), 'sync-config-'));
134
+ const sqlFile = join(tmpDir, 'query.sql');
135
+ writeFileSync(sqlFile, sql);
136
+
137
+ try {
138
+ execSync(
139
+ `wrangler d1 execute ${D1_DATABASE_NAME} --remote --file="${sqlFile}"`,
140
+ { stdio: VERBOSE ? 'inherit' : 'pipe' }
141
+ );
142
+ } finally {
143
+ rmSync(tmpDir, { recursive: true, force: true });
144
+ }
145
+ }
146
+
147
+ function runKVPut(key: string, value: string): void {
148
+ if (DRY_RUN) {
149
+ verbose(`[dry-run] KV PUT: ${key} = ${value.substring(0, 60)}...`);
150
+ return;
151
+ }
152
+
153
+ execSync(
154
+ `wrangler kv key put --namespace-id="${KV_NAMESPACE_ID}" "${key}" '${sanitise(value)}'`,
155
+ { stdio: VERBOSE ? 'inherit' : 'pipe' }
156
+ );
157
+ }
158
+
159
+ // =============================================================================
160
+ // MAIN
161
+ // =============================================================================
162
+
163
+ function main(): void {
164
+ log('Starting config sync...');
165
+
166
+ if (!existsSync(SERVICES_FILE)) {
167
+ console.error(`Missing: ${SERVICES_FILE}`);
168
+ process.exit(1);
169
+ }
170
+ if (!existsSync(BUDGETS_FILE)) {
171
+ console.error(`Missing: ${BUDGETS_FILE}`);
172
+ process.exit(1);
173
+ }
174
+
175
+ const services = normaliseBudgetLimits(
176
+ parseYAML(readFileSync(SERVICES_FILE, 'utf-8'))
177
+ ) as Services;
178
+ const budgets = normaliseBudgetLimits(
179
+ parseYAML(readFileSync(BUDGETS_FILE, 'utf-8'))
180
+ ) as Budgets;
181
+
182
+ // Sync projects to D1 project_registry
183
+ const projectSql: string[] = [];
184
+ for (const [projectId, project] of Object.entries(services.projects)) {
185
+ projectSql.push(
186
+ `INSERT INTO project_registry (project_id, display_name, status, tier, repository)
187
+ VALUES ('${sanitise(projectId)}', '${sanitise(project.display_name)}', '${sanitise(project.status)}', '${sanitise(String(project.tier))}', '${sanitise(project.repository ?? '')}')
188
+ ON CONFLICT (project_id) DO UPDATE SET
189
+ display_name = excluded.display_name,
190
+ status = excluded.status,
191
+ tier = excluded.tier,
192
+ repository = excluded.repository;`
193
+ );
194
+ }
195
+
196
+ if (projectSql.length > 0) {
197
+ log(`Syncing ${projectSql.length} project(s) to D1...`);
198
+ runD1(projectSql.join('\n'));
199
+ }
200
+
201
+ // Sync features to D1 feature_registry + KV budgets
202
+ let featureCount = 0;
203
+ const featureSql: string[] = [];
204
+
205
+ for (const [projectId, project] of Object.entries(services.projects)) {
206
+ if (!project.features) continue;
207
+
208
+ for (const [category, features] of Object.entries(project.features)) {
209
+ for (const [featureName, feature] of Object.entries(features)) {
210
+ const featureKey = feature.feature_id ?? `${projectId}:${category}:${featureName}`;
211
+ const cbEnabled = feature.circuit_breaker ? 1 : 0;
212
+
213
+ featureSql.push(
214
+ `INSERT INTO feature_registry (feature_key, project, category, feature, display_name, circuit_breaker_enabled, cost_tier)
215
+ VALUES ('${sanitise(featureKey)}', '${sanitise(projectId)}', '${sanitise(category)}', '${sanitise(featureName)}', '${sanitise(feature.display_name)}', ${cbEnabled}, '${sanitise(feature.cost_tier)}')
216
+ ON CONFLICT (feature_key) DO UPDATE SET
217
+ display_name = excluded.display_name,
218
+ circuit_breaker_enabled = excluded.circuit_breaker_enabled,
219
+ cost_tier = excluded.cost_tier;`
220
+ );
221
+
222
+ // Sync budget to KV
223
+ const override = budgets.feature_overrides?.[featureKey];
224
+ const budget = override ?? budgets.defaults.daily;
225
+ const kvKey = `CONFIG:FEATURE:${featureKey}:BUDGET`;
226
+ runKVPut(kvKey, JSON.stringify(budget));
227
+
228
+ featureCount++;
229
+ }
230
+ }
231
+ }
232
+
233
+ if (featureSql.length > 0) {
234
+ log(`Syncing ${featureCount} feature(s) to D1...`);
235
+ runD1(featureSql.join('\n'));
236
+ }
237
+
238
+ log(`Done! ${projectSql.length} projects, ${featureCount} features synced.`);
239
+ if (DRY_RUN) log('(dry run — no changes made)');
240
+ }
241
+
242
+ main();
@@ -0,0 +1,12 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2022",
4
+ "module": "ESNext",
5
+ "moduleResolution": "bundler",
6
+ "strict": true,
7
+ "esModuleInterop": true,
8
+ "skipLibCheck": true,
9
+ "types": ["@cloudflare/workers-types"]
10
+ },
11
+ "include": ["workers/**/*", "scripts/**/*"]
12
+ }
@@ -0,0 +1,58 @@
1
+ {
2
+ "$schema": "./node_modules/wrangler/config-schema.json",
3
+ "name": "{{projectSlug}}-usage",
4
+ "main": "workers/platform-usage.ts",
5
+ "compatibility_date": "2026-01-01",
6
+ "compatibility_flags": ["nodejs_compat_v2"],
7
+ "observability": { "enabled": true },
8
+
9
+ // Cron: hourly data collection + midnight rollups
10
+ "triggers": {
11
+ "crons": ["0 * * * *", "0 0 * * *"]
12
+ },
13
+
14
+ "d1_databases": [
15
+ {
16
+ "binding": "PLATFORM_DB",
17
+ "database_name": "{{projectSlug}}-metrics",
18
+ "database_id": "YOUR_D1_DATABASE_ID",
19
+ "migrations_dir": "storage/d1/migrations"
20
+ }
21
+ ],
22
+
23
+ "kv_namespaces": [
24
+ {
25
+ "binding": "PLATFORM_CACHE",
26
+ "id": "YOUR_KV_NAMESPACE_ID"
27
+ }
28
+ ],
29
+
30
+ "queues": {
31
+ "consumers": [
32
+ {
33
+ "queue": "{{projectSlug}}-telemetry",
34
+ "max_batch_size": 100,
35
+ "max_batch_timeout": 30,
36
+ "dead_letter_queue": "{{projectSlug}}-telemetry-dlq",
37
+ "max_retries": 3
38
+ }
39
+ ],
40
+ "producers": [
41
+ { "binding": "TELEMETRY_QUEUE", "queue": "{{projectSlug}}-telemetry" },
42
+ { "binding": "TELEMETRY_DLQ", "queue": "{{projectSlug}}-telemetry-dlq" }
43
+ ]
44
+ },
45
+
46
+ "analytics_engine_datasets": [
47
+ { "binding": "PLATFORM_ANALYTICS", "dataset": "{{projectSlug}}-analytics" }
48
+ ],
49
+
50
+ "vars": {
51
+ "CLOUDFLARE_ACCOUNT_ID": "YOUR_CLOUDFLARE_ACCOUNT_ID"
52
+ }
53
+
54
+ // Uncomment when you add error-collector (standard tier):
55
+ // "services": [
56
+ // { "binding": "NOTIFICATIONS_API", "service": "{{projectSlug}}-notifications" }
57
+ // ]
58
+ }
@@ -0,0 +1,162 @@
1
+ -- =============================================================================
2
+ -- 005_error_collection.sql — Error tracking (standard tier)
3
+ -- =============================================================================
4
+ -- Consolidated from original migrations: 038, 039, 041, 043
5
+ --
6
+ -- Tables:
7
+ -- error_occurrences — Error tracking with deduplication and GitHub linkage
8
+ -- warning_digests — Daily digest tracking for P4 warnings
9
+ -- fingerprint_decisions — Fingerprint decision audit log for post-hoc analysis
10
+ --
11
+ -- The error_occurrences table uses the FINAL schema from migration 043 which
12
+ -- recreated the table with the correct CHECK constraint (adding 'pending_digest'
13
+ -- and 'digested' status values) and merged columns from 039 and 041.
14
+ -- =============================================================================
15
+
16
+
17
+ -- =============================================================================
18
+ -- ERROR OCCURRENCES (final schema from 043, merging 038 + 039 + 041)
19
+ -- =============================================================================
20
+ -- Error tracking table for deduplication, GitHub issue linkage, and history.
21
+ -- Includes digest columns (from 039) and error_category (from 041).
22
+
23
+ CREATE TABLE IF NOT EXISTS error_occurrences (
24
+ id TEXT PRIMARY KEY,
25
+ fingerprint TEXT NOT NULL,
26
+ script_name TEXT NOT NULL,
27
+ project TEXT NOT NULL,
28
+ error_type TEXT NOT NULL CHECK (error_type IN ('exception', 'cpu_limit', 'memory_limit', 'soft_error', 'warning')),
29
+ priority TEXT NOT NULL CHECK (priority IN ('P0', 'P1', 'P2', 'P3', 'P4')),
30
+
31
+ -- GitHub linkage
32
+ github_issue_number INTEGER,
33
+ github_issue_url TEXT,
34
+ github_repo TEXT NOT NULL,
35
+
36
+ -- Status tracking (includes 'pending_digest' and 'digested' from 043)
37
+ status TEXT DEFAULT 'open' CHECK (status IN ('open', 'resolved', 'wont_fix', 'pending_digest', 'digested')),
38
+ resolved_at INTEGER,
39
+ resolved_by TEXT, -- Commit SHA or 'auto-close'
40
+
41
+ -- Occurrence tracking
42
+ first_seen_at INTEGER NOT NULL,
43
+ last_seen_at INTEGER NOT NULL,
44
+ occurrence_count INTEGER DEFAULT 1,
45
+
46
+ -- Request context (last occurrence)
47
+ last_request_url TEXT,
48
+ last_request_method TEXT,
49
+ last_colo TEXT,
50
+ last_country TEXT,
51
+ last_cf_ray TEXT,
52
+
53
+ -- Error details (last occurrence)
54
+ last_exception_name TEXT,
55
+ last_exception_message TEXT,
56
+ last_logs_json TEXT, -- JSON array of last 20 log entries
57
+
58
+ -- Digest columns (from 039)
59
+ digest_date TEXT,
60
+ digest_issue_number INTEGER,
61
+ normalized_message TEXT,
62
+
63
+ -- Error category for transient error grouping (from 041)
64
+ error_category TEXT,
65
+
66
+ -- Timestamps
67
+ created_at INTEGER DEFAULT (unixepoch()),
68
+ updated_at INTEGER DEFAULT (unixepoch()),
69
+
70
+ UNIQUE(fingerprint)
71
+ );
72
+
73
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_status ON error_occurrences(status);
74
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_project ON error_occurrences(project);
75
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_script ON error_occurrences(script_name);
76
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_fingerprint ON error_occurrences(fingerprint);
77
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_last_seen ON error_occurrences(last_seen_at DESC);
78
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_priority ON error_occurrences(priority, status);
79
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_github ON error_occurrences(github_issue_number) WHERE github_issue_number IS NOT NULL;
80
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_pending_digest ON error_occurrences(status, error_type, script_name, fingerprint) WHERE status = 'pending_digest';
81
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_digest_date ON error_occurrences(digest_date, script_name) WHERE digest_date IS NOT NULL;
82
+ CREATE INDEX IF NOT EXISTS idx_error_occurrences_category ON error_occurrences(error_category) WHERE error_category IS NOT NULL;
83
+
84
+
85
+ -- =============================================================================
86
+ -- WARNING DIGESTS (from 039)
87
+ -- =============================================================================
88
+ -- Tracks daily digest issues. Allows finding/updating existing digest issues
89
+ -- for a given day to batch P4 warnings into single GitHub issues.
90
+
91
+ CREATE TABLE IF NOT EXISTS warning_digests (
92
+ id TEXT PRIMARY KEY,
93
+ digest_date TEXT NOT NULL, -- YYYY-MM-DD
94
+ script_name TEXT NOT NULL,
95
+ fingerprint TEXT NOT NULL, -- Normalised fingerprint (groups similar warnings)
96
+ normalized_message TEXT NOT NULL, -- Human-readable warning type
97
+ github_repo TEXT NOT NULL,
98
+ github_issue_number INTEGER,
99
+ github_issue_url TEXT,
100
+ occurrence_count INTEGER DEFAULT 0,
101
+ first_occurrence_at INTEGER NOT NULL,
102
+ last_occurrence_at INTEGER NOT NULL,
103
+ created_at INTEGER NOT NULL,
104
+ updated_at INTEGER NOT NULL,
105
+ UNIQUE(digest_date, script_name, fingerprint)
106
+ );
107
+
108
+ CREATE INDEX IF NOT EXISTS idx_warning_digests_lookup
109
+ ON warning_digests(digest_date, script_name);
110
+
111
+
112
+ -- =============================================================================
113
+ -- FINGERPRINT DECISIONS (from 041)
114
+ -- =============================================================================
115
+ -- Stores fingerprint decisions for post-hoc analysis of error classification.
116
+ -- Tracks why each error was handled the way it was.
117
+
118
+ CREATE TABLE IF NOT EXISTS fingerprint_decisions (
119
+ id TEXT PRIMARY KEY,
120
+ timestamp INTEGER NOT NULL DEFAULT (unixepoch()),
121
+
122
+ -- Context
123
+ script_name TEXT NOT NULL,
124
+ error_type TEXT NOT NULL,
125
+
126
+ -- Fingerprint details
127
+ raw_message TEXT, -- Original error message (first 500 chars)
128
+ normalized_message TEXT, -- Normalised message used for fingerprinting
129
+ computed_fingerprint TEXT NOT NULL,
130
+ category TEXT, -- Transient error category if classified
131
+
132
+ -- Decision outcome
133
+ decision TEXT NOT NULL CHECK (decision IN (
134
+ 'new_issue', -- Created new GitHub issue
135
+ 'existing_issue', -- Updated existing issue
136
+ 'transient_window', -- Transient error, issue exists for today
137
+ 'suppressed', -- Suppressed (e.g., muted issue)
138
+ 'rate_limited', -- Rate limited, no action taken
139
+ 'digest' -- Stored for daily digest
140
+ )),
141
+
142
+ -- GitHub linkage
143
+ github_issue_number INTEGER,
144
+ github_repo TEXT,
145
+
146
+ -- Metadata
147
+ is_transient INTEGER DEFAULT 0,
148
+ occurrence_count INTEGER,
149
+
150
+ created_at INTEGER DEFAULT (unixepoch())
151
+ );
152
+
153
+ CREATE INDEX IF NOT EXISTS idx_fingerprint_decisions_script
154
+ ON fingerprint_decisions(script_name, timestamp DESC);
155
+ CREATE INDEX IF NOT EXISTS idx_fingerprint_decisions_fingerprint
156
+ ON fingerprint_decisions(computed_fingerprint);
157
+ CREATE INDEX IF NOT EXISTS idx_fingerprint_decisions_category
158
+ ON fingerprint_decisions(category) WHERE category IS NOT NULL;
159
+ CREATE INDEX IF NOT EXISTS idx_fingerprint_decisions_decision
160
+ ON fingerprint_decisions(decision, timestamp DESC);
161
+ CREATE INDEX IF NOT EXISTS idx_fingerprint_decisions_timestamp
162
+ ON fingerprint_decisions(timestamp DESC);
@@ -0,0 +1,44 @@
1
+ {
2
+ "$schema": "./node_modules/wrangler/config-schema.json",
3
+ "name": "{{projectSlug}}-error-collector",
4
+ "main": "workers/error-collector.ts",
5
+ "compatibility_date": "2026-01-01",
6
+ "compatibility_flags": ["nodejs_compat_v2"],
7
+ "observability": { "enabled": true },
8
+
9
+ // Cron: 15-minute error processing + midnight daily digest
10
+ "triggers": {
11
+ "crons": ["*/15 * * * *", "0 0 * * *"]
12
+ },
13
+
14
+ // Tail worker: receives logs from other workers
15
+ "tail_consumers": [
16
+ { "service": "{{projectSlug}}-usage" }
17
+ ],
18
+
19
+ "d1_databases": [
20
+ {
21
+ "binding": "PLATFORM_DB",
22
+ "database_name": "{{projectSlug}}-metrics",
23
+ "database_id": "YOUR_D1_DATABASE_ID"
24
+ }
25
+ ],
26
+
27
+ "kv_namespaces": [
28
+ {
29
+ "binding": "PLATFORM_CACHE",
30
+ "id": "YOUR_KV_NAMESPACE_ID"
31
+ }
32
+ ],
33
+
34
+ "vars": {
35
+ "GITHUB_ORG": "{{githubOrg}}",
36
+ "DEFAULT_ASSIGNEE": "{{defaultAssignee}}",
37
+ "GATUS_HEARTBEAT_URL": "{{gatusUrl}}"
38
+ }
39
+
40
+ // Secrets needed (set via wrangler secret put):
41
+ // GITHUB_APP_ID
42
+ // GITHUB_APP_PRIVATE_KEY
43
+ // GITHUB_APP_INSTALLATION_ID
44
+ }
@@ -0,0 +1,45 @@
1
+ {
2
+ "$schema": "./node_modules/wrangler/config-schema.json",
3
+ "name": "{{projectSlug}}-sentinel",
4
+ "main": "workers/platform-sentinel.ts",
5
+ "compatibility_date": "2026-01-01",
6
+ "compatibility_flags": ["nodejs_compat_v2"],
7
+ "observability": { "enabled": true },
8
+
9
+ // Cron: 15-minute gap detection + cost spike monitoring
10
+ "triggers": {
11
+ "crons": ["*/15 * * * *"]
12
+ },
13
+
14
+ "d1_databases": [
15
+ {
16
+ "binding": "PLATFORM_DB",
17
+ "database_name": "{{projectSlug}}-metrics",
18
+ "database_id": "YOUR_D1_DATABASE_ID"
19
+ }
20
+ ],
21
+
22
+ "kv_namespaces": [
23
+ {
24
+ "binding": "PLATFORM_CACHE",
25
+ "id": "YOUR_KV_NAMESPACE_ID"
26
+ },
27
+ {
28
+ "binding": "PLATFORM_ALERTS",
29
+ "id": "YOUR_KV_ALERTS_NAMESPACE_ID"
30
+ }
31
+ ],
32
+
33
+ "services": [
34
+ { "binding": "ERROR_COLLECTOR", "service": "{{projectSlug}}-error-collector" }
35
+ ],
36
+
37
+ "vars": {
38
+ "CLOUDFLARE_ACCOUNT_ID": "YOUR_CLOUDFLARE_ACCOUNT_ID",
39
+ "GATUS_HEARTBEAT_URL": "{{gatusUrl}}"
40
+ }
41
+
42
+ // Secrets needed (set via wrangler secret put):
43
+ // CLOUDFLARE_API_TOKEN
44
+ // SLACK_WEBHOOK_URL (optional)
45
+ }