@littlebearapps/create-platform 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/dist/index.d.ts +11 -0
  2. package/dist/index.js +59 -0
  3. package/dist/prompts.d.ts +15 -0
  4. package/dist/prompts.js +58 -0
  5. package/dist/scaffold.d.ts +5 -0
  6. package/dist/scaffold.js +65 -0
  7. package/dist/templates.d.ts +16 -0
  8. package/dist/templates.js +53 -0
  9. package/package.json +46 -0
  10. package/templates/full/migrations/006_pattern_discovery.sql +199 -0
  11. package/templates/full/migrations/007_notifications_search.sql +127 -0
  12. package/templates/full/wrangler.alert-router.jsonc.hbs +34 -0
  13. package/templates/full/wrangler.notifications.jsonc.hbs +23 -0
  14. package/templates/full/wrangler.pattern-discovery.jsonc.hbs +33 -0
  15. package/templates/full/wrangler.search.jsonc.hbs +16 -0
  16. package/templates/full/wrangler.settings.jsonc.hbs +23 -0
  17. package/templates/shared/README.md.hbs +69 -0
  18. package/templates/shared/config/budgets.yaml.hbs +72 -0
  19. package/templates/shared/config/services.yaml.hbs +45 -0
  20. package/templates/shared/migrations/001_core_tables.sql +117 -0
  21. package/templates/shared/migrations/002_usage_warehouse.sql +830 -0
  22. package/templates/shared/migrations/003_feature_tracking.sql +250 -0
  23. package/templates/shared/migrations/004_settings_alerts.sql +452 -0
  24. package/templates/shared/migrations/seed.sql.hbs +4 -0
  25. package/templates/shared/package.json.hbs +21 -0
  26. package/templates/shared/scripts/sync-config.ts +242 -0
  27. package/templates/shared/tsconfig.json +12 -0
  28. package/templates/shared/wrangler.usage.jsonc.hbs +58 -0
  29. package/templates/standard/migrations/005_error_collection.sql +162 -0
  30. package/templates/standard/wrangler.error-collector.jsonc.hbs +44 -0
  31. package/templates/standard/wrangler.sentinel.jsonc.hbs +45 -0
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * @littlebearapps/create-platform
4
+ *
5
+ * Scaffolds a Cloudflare Workers platform with SDK integration,
6
+ * circuit breakers, and cost protection.
7
+ *
8
+ * Usage:
9
+ * npx @littlebearapps/create-platform [project-name]
10
+ */
11
+ export {};
package/dist/index.js ADDED
@@ -0,0 +1,59 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * @littlebearapps/create-platform
4
+ *
5
+ * Scaffolds a Cloudflare Workers platform with SDK integration,
6
+ * circuit breakers, and cost protection.
7
+ *
8
+ * Usage:
9
+ * npx @littlebearapps/create-platform [project-name]
10
+ */
11
+ import { resolve } from 'node:path';
12
+ import pc from 'picocolors';
13
+ import { collectOptions } from './prompts.js';
14
+ import { scaffold } from './scaffold.js';
15
+ const BANNER = `
16
+ ${pc.bold(pc.cyan('Platform SDK'))} — Cloudflare Cost Protection
17
+ ${pc.dim('Automatic circuit breakers, budget enforcement, and error collection')}
18
+ `;
19
+ async function main() {
20
+ console.log(BANNER);
21
+ const projectName = process.argv[2];
22
+ const options = await collectOptions(projectName);
23
+ const outputDir = resolve(process.cwd(), options.projectName);
24
+ console.log();
25
+ console.log(` ${pc.bold('Project')}: ${options.projectName}`);
26
+ console.log(` ${pc.bold('Tier')}: ${options.tier}`);
27
+ console.log(` ${pc.bold('Output')}: ${outputDir}`);
28
+ console.log();
29
+ await scaffold(options, outputDir);
30
+ console.log();
31
+ console.log(pc.green(pc.bold(' Done!')));
32
+ console.log();
33
+ console.log(` ${pc.bold('Next steps:')}`);
34
+ console.log();
35
+ console.log(` ${pc.cyan('cd')} ${options.projectName}`);
36
+ console.log(` ${pc.cyan('npm install')}`);
37
+ console.log();
38
+ console.log(` ${pc.dim('# Create Cloudflare resources:')}`);
39
+ console.log(` ${pc.cyan('npx wrangler d1 create')} ${options.projectSlug}-metrics`);
40
+ console.log(` ${pc.cyan('npx wrangler kv namespace create')} PLATFORM_CACHE`);
41
+ if (options.tier !== 'minimal') {
42
+ console.log(` ${pc.cyan('npx wrangler kv namespace create')} PLATFORM_ALERTS`);
43
+ }
44
+ console.log(` ${pc.cyan('npx wrangler queues create')} ${options.projectSlug}-telemetry`);
45
+ console.log(` ${pc.cyan('npx wrangler queues create')} ${options.projectSlug}-telemetry-dlq`);
46
+ console.log();
47
+ console.log(` ${pc.dim('# Update resource IDs in wrangler.*.jsonc, then:')}`);
48
+ console.log(` ${pc.cyan('npm run sync:config')}`);
49
+ console.log(` ${pc.cyan('npx wrangler d1 migrations apply')} ${options.projectSlug}-metrics --remote`);
50
+ console.log(` ${pc.cyan('npx wrangler deploy')} -c wrangler.${options.projectSlug}-usage.jsonc`);
51
+ console.log();
52
+ console.log(` ${pc.dim('# In your consumer projects:')}`);
53
+ console.log(` ${pc.cyan('npm install @littlebearapps/platform-sdk')}`);
54
+ console.log();
55
+ }
56
+ main().catch((error) => {
57
+ console.error(pc.red('Error:'), error instanceof Error ? error.message : String(error));
58
+ process.exit(1);
59
+ });
@@ -0,0 +1,15 @@
1
+ /**
2
+ * Interactive CLI prompts for project scaffolding configuration.
3
+ *
4
+ * Falls back to sensible defaults when running non-interactively.
5
+ */
6
+ export type Tier = 'minimal' | 'standard' | 'full';
7
+ export interface ScaffoldOptions {
8
+ projectName: string;
9
+ projectSlug: string;
10
+ githubOrg: string;
11
+ tier: Tier;
12
+ gatusUrl: string;
13
+ defaultAssignee: string;
14
+ }
15
+ export declare function collectOptions(projectNameArg?: string): Promise<ScaffoldOptions>;
@@ -0,0 +1,58 @@
1
+ /**
2
+ * Interactive CLI prompts for project scaffolding configuration.
3
+ *
4
+ * Falls back to sensible defaults when running non-interactively.
5
+ */
6
+ import * as readline from 'node:readline';
7
+ function slugify(name) {
8
+ return name
9
+ .toLowerCase()
10
+ .replace(/[^a-z0-9]+/g, '-')
11
+ .replace(/^-|-$/g, '');
12
+ }
13
+ async function prompt(question, defaultValue) {
14
+ // Non-interactive: use defaults
15
+ if (!process.stdin.isTTY) {
16
+ return defaultValue;
17
+ }
18
+ const rl = readline.createInterface({
19
+ input: process.stdin,
20
+ output: process.stdout,
21
+ });
22
+ return new Promise((resolve) => {
23
+ const display = defaultValue ? `${question} (${defaultValue}): ` : `${question}: `;
24
+ rl.question(` ${display}`, (answer) => {
25
+ rl.close();
26
+ resolve(answer.trim() || defaultValue);
27
+ });
28
+ });
29
+ }
30
+ async function promptSelect(question, options, defaultIndex = 0) {
31
+ if (!process.stdin.isTTY) {
32
+ return options[defaultIndex];
33
+ }
34
+ console.log(` ${question}`);
35
+ options.forEach((opt, i) => {
36
+ const marker = i === defaultIndex ? '>' : ' ';
37
+ console.log(` ${marker} ${i + 1}. ${opt}`);
38
+ });
39
+ const answer = await prompt('Choose', String(defaultIndex + 1));
40
+ const idx = parseInt(answer, 10) - 1;
41
+ return options[idx] ?? options[defaultIndex];
42
+ }
43
+ export async function collectOptions(projectNameArg) {
44
+ const projectName = projectNameArg || await prompt('Project name', 'my-platform');
45
+ const projectSlug = await prompt('Project slug (for resource names)', slugify(projectName));
46
+ const githubOrg = await prompt('GitHub org (for error issue creation)', '');
47
+ const tier = await promptSelect('Setup tier:', ['minimal', 'standard', 'full'], 1);
48
+ const gatusUrl = await prompt('Gatus status page URL (optional)', '');
49
+ const defaultAssignee = await prompt('Default GitHub assignee (optional)', '');
50
+ return {
51
+ projectName,
52
+ projectSlug,
53
+ githubOrg,
54
+ tier,
55
+ gatusUrl,
56
+ defaultAssignee,
57
+ };
58
+ }
@@ -0,0 +1,5 @@
1
+ /**
2
+ * Scaffolding orchestrator — copies and renders templates into the output directory.
3
+ */
4
+ import type { ScaffoldOptions } from './prompts.js';
5
+ export declare function scaffold(options: ScaffoldOptions, outputDir: string): Promise<void>;
@@ -0,0 +1,65 @@
1
+ /**
2
+ * Scaffolding orchestrator — copies and renders templates into the output directory.
3
+ */
4
+ import { readFileSync, writeFileSync, mkdirSync, existsSync } from 'node:fs';
5
+ import { resolve, dirname, join } from 'node:path';
6
+ import { fileURLToPath } from 'node:url';
7
+ import Handlebars from 'handlebars';
8
+ import pc from 'picocolors';
9
+ import { getFilesForTier } from './templates.js';
10
+ const __filename = fileURLToPath(import.meta.url);
11
+ const __dirname = dirname(__filename);
12
+ function getTemplatesDir() {
13
+ // In development: ../templates/
14
+ // In published package: ../templates/ (relative to dist/)
15
+ const devPath = resolve(__dirname, '..', 'templates');
16
+ if (existsSync(devPath))
17
+ return devPath;
18
+ return resolve(__dirname, '..', '..', 'templates');
19
+ }
20
+ function renderString(template, context) {
21
+ // Simple {{var}} replacement for file paths (not Handlebars — just string replace)
22
+ let result = template;
23
+ for (const [key, value] of Object.entries(context)) {
24
+ result = result.replace(new RegExp(`\\{\\{${key}\\}\\}`, 'g'), value);
25
+ }
26
+ return result;
27
+ }
28
+ export async function scaffold(options, outputDir) {
29
+ if (existsSync(outputDir)) {
30
+ throw new Error(`Directory already exists: ${outputDir}`);
31
+ }
32
+ const templatesDir = getTemplatesDir();
33
+ const files = getFilesForTier(options.tier);
34
+ const context = {
35
+ projectName: options.projectName,
36
+ projectSlug: options.projectSlug,
37
+ githubOrg: options.githubOrg,
38
+ tier: options.tier,
39
+ gatusUrl: options.gatusUrl,
40
+ defaultAssignee: options.defaultAssignee,
41
+ sdkVersion: '0.2.0',
42
+ };
43
+ mkdirSync(outputDir, { recursive: true });
44
+ for (const file of files) {
45
+ const srcPath = join(templatesDir, file.src);
46
+ const destPath = join(outputDir, renderString(file.dest, context));
47
+ // Ensure destination directory exists
48
+ mkdirSync(dirname(destPath), { recursive: true });
49
+ if (!existsSync(srcPath)) {
50
+ console.log(` ${pc.yellow('skip')} ${file.src} ${pc.dim('(template not found)')}`);
51
+ continue;
52
+ }
53
+ const raw = readFileSync(srcPath, 'utf-8');
54
+ if (file.template) {
55
+ const compiled = Handlebars.compile(raw, { noEscape: true });
56
+ const rendered = compiled(context);
57
+ writeFileSync(destPath, rendered);
58
+ }
59
+ else {
60
+ writeFileSync(destPath, raw);
61
+ }
62
+ const relDest = destPath.replace(outputDir + '/', '');
63
+ console.log(` ${pc.green('create')} ${relDest}`);
64
+ }
65
+ }
@@ -0,0 +1,16 @@
1
+ /**
2
+ * Template manifest — maps tiers to files that should be scaffolded.
3
+ *
4
+ * Files ending in .hbs are rendered through Handlebars.
5
+ * All other files are copied verbatim.
6
+ */
7
+ import type { Tier } from './prompts.js';
8
+ export interface TemplateFile {
9
+ /** Path relative to the templates/ directory */
10
+ src: string;
11
+ /** Path relative to the output directory */
12
+ dest: string;
13
+ /** Whether this file uses Handlebars templating */
14
+ template: boolean;
15
+ }
16
+ export declare function getFilesForTier(tier: Tier): TemplateFile[];
@@ -0,0 +1,53 @@
1
+ /**
2
+ * Template manifest — maps tiers to files that should be scaffolded.
3
+ *
4
+ * Files ending in .hbs are rendered through Handlebars.
5
+ * All other files are copied verbatim.
6
+ */
7
+ const SHARED_FILES = [
8
+ // Config
9
+ { src: 'shared/config/services.yaml.hbs', dest: 'platform/config/services.yaml', template: true },
10
+ { src: 'shared/config/budgets.yaml.hbs', dest: 'platform/config/budgets.yaml', template: true },
11
+ // Scripts
12
+ { src: 'shared/scripts/sync-config.ts', dest: 'scripts/sync-config.ts', template: false },
13
+ // Migrations (minimal tier)
14
+ { src: 'shared/migrations/001_core_tables.sql', dest: 'storage/d1/migrations/001_core_tables.sql', template: false },
15
+ { src: 'shared/migrations/002_usage_warehouse.sql', dest: 'storage/d1/migrations/002_usage_warehouse.sql', template: false },
16
+ { src: 'shared/migrations/003_feature_tracking.sql', dest: 'storage/d1/migrations/003_feature_tracking.sql', template: false },
17
+ { src: 'shared/migrations/004_settings_alerts.sql', dest: 'storage/d1/migrations/004_settings_alerts.sql', template: false },
18
+ { src: 'shared/migrations/seed.sql.hbs', dest: 'storage/d1/migrations/seed.sql', template: true },
19
+ // Wrangler config (minimal)
20
+ { src: 'shared/wrangler.usage.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-usage.jsonc', template: true },
21
+ // Project files
22
+ { src: 'shared/package.json.hbs', dest: 'package.json', template: true },
23
+ { src: 'shared/tsconfig.json', dest: 'tsconfig.json', template: false },
24
+ { src: 'shared/README.md.hbs', dest: 'README.md', template: true },
25
+ ];
26
+ const STANDARD_FILES = [
27
+ // Additional migrations
28
+ { src: 'standard/migrations/005_error_collection.sql', dest: 'storage/d1/migrations/005_error_collection.sql', template: false },
29
+ // Wrangler configs
30
+ { src: 'standard/wrangler.error-collector.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-error-collector.jsonc', template: true },
31
+ { src: 'standard/wrangler.sentinel.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-sentinel.jsonc', template: true },
32
+ ];
33
+ const FULL_FILES = [
34
+ // Additional migrations
35
+ { src: 'full/migrations/006_pattern_discovery.sql', dest: 'storage/d1/migrations/006_pattern_discovery.sql', template: false },
36
+ { src: 'full/migrations/007_notifications_search.sql', dest: 'storage/d1/migrations/007_notifications_search.sql', template: false },
37
+ // Wrangler configs
38
+ { src: 'full/wrangler.pattern-discovery.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-pattern-discovery.jsonc', template: true },
39
+ { src: 'full/wrangler.alert-router.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-alert-router.jsonc', template: true },
40
+ { src: 'full/wrangler.notifications.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-notifications.jsonc', template: true },
41
+ { src: 'full/wrangler.search.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-search.jsonc', template: true },
42
+ { src: 'full/wrangler.settings.jsonc.hbs', dest: 'wrangler.{{projectSlug}}-settings.jsonc', template: true },
43
+ ];
44
+ export function getFilesForTier(tier) {
45
+ const files = [...SHARED_FILES];
46
+ if (tier === 'standard' || tier === 'full') {
47
+ files.push(...STANDARD_FILES);
48
+ }
49
+ if (tier === 'full') {
50
+ files.push(...FULL_FILES);
51
+ }
52
+ return files;
53
+ }
package/package.json ADDED
@@ -0,0 +1,46 @@
1
+ {
2
+ "name": "@littlebearapps/create-platform",
3
+ "version": "1.0.0",
4
+ "description": "Scaffold a Cloudflare Workers platform with SDK integration, circuit breakers, and cost protection",
5
+ "type": "module",
6
+ "bin": {
7
+ "create-platform": "./dist/index.js"
8
+ },
9
+ "files": [
10
+ "dist/",
11
+ "templates/"
12
+ ],
13
+ "scripts": {
14
+ "build": "tsc",
15
+ "test": "vitest run",
16
+ "typecheck": "tsc --noEmit",
17
+ "prepublishOnly": "npm run build"
18
+ },
19
+ "dependencies": {
20
+ "handlebars": "^4.7.8",
21
+ "picocolors": "^1.1.1"
22
+ },
23
+ "devDependencies": {
24
+ "@types/node": "^22.0.0",
25
+ "typescript": "^5.7.3",
26
+ "vitest": "^3.0.5"
27
+ },
28
+ "publishConfig": {
29
+ "access": "public"
30
+ },
31
+ "repository": {
32
+ "type": "git",
33
+ "url": "https://github.com/littlebearapps/platform-sdk.git",
34
+ "directory": "packages/create-platform"
35
+ },
36
+ "keywords": [
37
+ "cloudflare-workers",
38
+ "platform-sdk",
39
+ "scaffold",
40
+ "cost-protection",
41
+ "circuit-breaker",
42
+ "create"
43
+ ],
44
+ "author": "Little Bear Apps",
45
+ "license": "MIT"
46
+ }
@@ -0,0 +1,199 @@
1
+ -- =============================================================================
2
+ -- 006_pattern_discovery.sql — AI pattern discovery (full tier)
3
+ -- =============================================================================
4
+ -- Consolidated from original migrations: 042, 049, 050
5
+ --
6
+ -- Tables:
7
+ -- transient_pattern_suggestions — AI-suggested patterns with approval workflow
8
+ -- pattern_audit_log — Pattern lifecycle event audit trail
9
+ -- error_clusters — Groups of similar unclassified errors
10
+ -- pattern_match_evidence — Detailed match evidence for evaluation
11
+ --
12
+ -- Uses the FINAL schema from migration 049 which expanded the status constraint
13
+ -- to include 'shadow' and 'stale' states, and added protection/source tracking.
14
+ -- Also includes review_context from migration 050.
15
+ -- =============================================================================
16
+
17
+
18
+ -- =============================================================================
19
+ -- TRANSIENT PATTERN SUGGESTIONS (final schema from 049, with 050 additions)
20
+ -- =============================================================================
21
+ -- Pattern suggestions from AI analysis with human-in-the-loop approval.
22
+ -- Patterns go through: pending -> shadow -> ready for review -> approved/rejected
23
+ -- Static-imported patterns are protected (is_protected=1, cannot auto-demote).
24
+
25
+ CREATE TABLE IF NOT EXISTS transient_pattern_suggestions (
26
+ id TEXT PRIMARY KEY,
27
+
28
+ -- Pattern definition (supports DSL types for safety)
29
+ pattern_type TEXT NOT NULL CHECK (pattern_type IN ('regex', 'contains', 'startsWith', 'statusCode')),
30
+ pattern_value TEXT NOT NULL,
31
+ category TEXT NOT NULL,
32
+ scope TEXT DEFAULT 'global', -- 'global', 'service:name', 'upstream:name'
33
+
34
+ -- AI metadata
35
+ confidence_score REAL,
36
+ sample_messages TEXT, -- JSON array of sample error messages
37
+ ai_reasoning TEXT,
38
+ cluster_id TEXT, -- Reference to error cluster
39
+
40
+ -- Approval workflow (expanded states from 049)
41
+ status TEXT DEFAULT 'pending' CHECK (status IN ('pending', 'shadow', 'approved', 'stale', 'rejected', 'disabled')),
42
+ reviewed_by TEXT,
43
+ reviewed_at INTEGER,
44
+ rejection_reason TEXT,
45
+
46
+ -- Validation results (backtest against historical errors)
47
+ backtest_match_count INTEGER,
48
+ backtest_total_errors INTEGER,
49
+ backtest_match_rate REAL,
50
+ backtest_run_at INTEGER,
51
+
52
+ -- Shadow mode (run pattern but don't apply)
53
+ shadow_mode_start INTEGER,
54
+ shadow_mode_end INTEGER,
55
+ shadow_mode_matches INTEGER DEFAULT 0,
56
+
57
+ -- Shadow evaluation tracking (from 049)
58
+ shadow_match_days TEXT, -- JSON array of unique days with matches
59
+
60
+ -- Lifecycle tracking
61
+ enabled_at INTEGER,
62
+ disabled_at INTEGER,
63
+ last_matched_at INTEGER,
64
+ match_count INTEGER DEFAULT 0,
65
+
66
+ -- Protection and source tracking (from 049)
67
+ is_protected INTEGER DEFAULT 0, -- 1 = cannot be auto-demoted (for static patterns)
68
+ source TEXT DEFAULT 'ai-discovered' CHECK (source IN ('ai-discovered', 'static-import', 'manual')),
69
+ original_regex TEXT, -- Original regex if converted from static
70
+
71
+ -- Review context (from 050) — JSON with match evidence summary
72
+ review_context TEXT,
73
+ -- review_context stores:
74
+ -- {
75
+ -- "totalMatches": number,
76
+ -- "matchesByProject": { "project-a": 5, "project-b": 3 },
77
+ -- "matchesByScript": { "worker-a": 5, "worker-b": 3 },
78
+ -- "sampleMessages": ["msg1", "msg2", "msg3"],
79
+ -- "distinctDays": 5,
80
+ -- "aiExplainer": "This pattern catches X errors across Y projects...",
81
+ -- "readyForReviewAt": timestamp
82
+ -- }
83
+
84
+ -- Timestamps
85
+ created_at INTEGER DEFAULT (unixepoch()),
86
+ updated_at INTEGER DEFAULT (unixepoch())
87
+ );
88
+
89
+ CREATE INDEX IF NOT EXISTS idx_suggestions_status ON transient_pattern_suggestions(status);
90
+ CREATE INDEX IF NOT EXISTS idx_suggestions_category ON transient_pattern_suggestions(category);
91
+ CREATE INDEX IF NOT EXISTS idx_suggestions_scope ON transient_pattern_suggestions(scope);
92
+ CREATE INDEX IF NOT EXISTS idx_suggestions_created ON transient_pattern_suggestions(created_at DESC);
93
+ CREATE INDEX IF NOT EXISTS idx_suggestions_source ON transient_pattern_suggestions(source);
94
+ CREATE INDEX IF NOT EXISTS idx_suggestions_protected ON transient_pattern_suggestions(is_protected);
95
+
96
+
97
+ -- =============================================================================
98
+ -- PATTERN AUDIT LOG (final schema from 049)
99
+ -- =============================================================================
100
+ -- Audit log for pattern lifecycle events including self-tuning actions.
101
+
102
+ CREATE TABLE IF NOT EXISTS pattern_audit_log (
103
+ id TEXT PRIMARY KEY,
104
+ pattern_id TEXT NOT NULL,
105
+ action TEXT NOT NULL CHECK (action IN (
106
+ 'created', -- AI suggested pattern
107
+ 'approved', -- Human approved
108
+ 'rejected', -- Human rejected
109
+ 'enabled', -- Activated in production
110
+ 'disabled', -- Manually disabled
111
+ 'auto-disabled', -- System disabled due to anomaly
112
+ 'backtest-passed', -- Backtest validation passed
113
+ 'backtest-failed', -- Backtest validation failed
114
+ 'shadow-started', -- Shadow mode started
115
+ 'shadow-completed', -- Shadow mode completed
116
+ 'expired', -- Auto-expired due to inactivity
117
+ 'auto-promoted', -- Auto-promoted from shadow to approved
118
+ 'auto-demoted', -- Auto-demoted from approved to stale
119
+ 'reactivated', -- Reactivated from stale to shadow
120
+ 'imported', -- Imported from static patterns
121
+ 'ready-for-review' -- Pattern ready for human review
122
+ )),
123
+ actor TEXT, -- 'ai:model', 'human:name', 'system:evaluator'
124
+ reason TEXT,
125
+ metadata TEXT, -- JSON with additional context
126
+ created_at INTEGER DEFAULT (unixepoch()),
127
+
128
+ FOREIGN KEY (pattern_id) REFERENCES transient_pattern_suggestions(id)
129
+ );
130
+
131
+ CREATE INDEX IF NOT EXISTS idx_audit_pattern ON pattern_audit_log(pattern_id);
132
+ CREATE INDEX IF NOT EXISTS idx_audit_action ON pattern_audit_log(action);
133
+ CREATE INDEX IF NOT EXISTS idx_audit_created ON pattern_audit_log(created_at DESC);
134
+
135
+
136
+ -- =============================================================================
137
+ -- ERROR CLUSTERS (from 042)
138
+ -- =============================================================================
139
+ -- Groups similar unclassified errors for AI pattern discovery.
140
+
141
+ CREATE TABLE IF NOT EXISTS error_clusters (
142
+ id TEXT PRIMARY KEY,
143
+
144
+ -- Cluster identification
145
+ cluster_hash TEXT NOT NULL UNIQUE, -- Hash of normalised message for dedup
146
+ representative_message TEXT NOT NULL, -- Sample message for display
147
+
148
+ -- Statistics
149
+ occurrence_count INTEGER DEFAULT 1,
150
+ unique_fingerprints INTEGER DEFAULT 1,
151
+ first_seen_at INTEGER NOT NULL,
152
+ last_seen_at INTEGER NOT NULL,
153
+
154
+ -- Scripts affected
155
+ scripts TEXT, -- JSON array of script names
156
+
157
+ -- Processing status
158
+ status TEXT DEFAULT 'pending' CHECK (status IN ('pending', 'processing', 'suggested', 'ignored')),
159
+ suggestion_id TEXT, -- Reference to created suggestion
160
+
161
+ created_at INTEGER DEFAULT (unixepoch()),
162
+ updated_at INTEGER DEFAULT (unixepoch()),
163
+
164
+ FOREIGN KEY (suggestion_id) REFERENCES transient_pattern_suggestions(id)
165
+ );
166
+
167
+ CREATE INDEX IF NOT EXISTS idx_clusters_status ON error_clusters(status);
168
+ CREATE INDEX IF NOT EXISTS idx_clusters_count ON error_clusters(occurrence_count DESC);
169
+ CREATE INDEX IF NOT EXISTS idx_clusters_hash ON error_clusters(cluster_hash);
170
+
171
+
172
+ -- =============================================================================
173
+ -- PATTERN MATCH EVIDENCE (from 050)
174
+ -- =============================================================================
175
+ -- Tracks when patterns match errors during shadow evaluation.
176
+ -- Provides rich context for human review decisions.
177
+
178
+ CREATE TABLE IF NOT EXISTS pattern_match_evidence (
179
+ id TEXT PRIMARY KEY,
180
+ pattern_id TEXT NOT NULL,
181
+
182
+ -- Match context
183
+ script_name TEXT NOT NULL,
184
+ project TEXT,
185
+ error_fingerprint TEXT,
186
+ normalized_message TEXT,
187
+
188
+ -- Match metadata
189
+ matched_at INTEGER NOT NULL DEFAULT (unixepoch()),
190
+ error_type TEXT, -- 'exception', 'soft_error', 'warning'
191
+ priority TEXT, -- 'P0' - 'P4'
192
+
193
+ FOREIGN KEY (pattern_id) REFERENCES transient_pattern_suggestions(id)
194
+ );
195
+
196
+ CREATE INDEX IF NOT EXISTS idx_pattern_match_evidence_pattern_id ON pattern_match_evidence(pattern_id);
197
+ CREATE INDEX IF NOT EXISTS idx_pattern_match_evidence_matched_at ON pattern_match_evidence(matched_at);
198
+ CREATE INDEX IF NOT EXISTS idx_pattern_match_evidence_project ON pattern_match_evidence(project);
199
+ CREATE INDEX IF NOT EXISTS idx_pattern_match_evidence_script ON pattern_match_evidence(script_name);
@@ -0,0 +1,127 @@
1
+ -- =============================================================================
2
+ -- 007_notifications_search.sql — Notifications, search, settings (full tier)
3
+ -- =============================================================================
4
+ -- Consolidated from original migrations: 044, 045, 046
5
+ --
6
+ -- Tables:
7
+ -- notifications — Cross-project notifications from various sources
8
+ -- platform_settings — Unified settings with project/category/key namespacing
9
+ -- search_index — Main search index table
10
+ -- search_fts — FTS5 virtual table for full-text search
11
+ --
12
+ -- Triggers:
13
+ -- search_fts_ai — Sync FTS index on INSERT
14
+ -- search_fts_ad — Sync FTS index on DELETE
15
+ -- search_fts_au — Sync FTS index on UPDATE
16
+ -- =============================================================================
17
+
18
+
19
+ -- =============================================================================
20
+ -- NOTIFICATIONS (from 044)
21
+ -- =============================================================================
22
+ -- Unified notifications from various sources:
23
+ -- - error-collector: P0-P2 errors needing attention
24
+ -- - pattern-discovery: AI-suggested patterns pending approval
25
+ -- - circuit-breaker: Feature budget warnings and pauses
26
+ -- - usage: Cost threshold warnings
27
+ --
28
+ -- Per-user read state is stored in KV for fast access.
29
+
30
+ CREATE TABLE IF NOT EXISTS notifications (
31
+ id TEXT PRIMARY KEY,
32
+ category TEXT NOT NULL CHECK (category IN ('error', 'warning', 'info', 'success')),
33
+ source TEXT NOT NULL, -- 'error-collector', 'pattern-discovery', etc.
34
+ source_id TEXT, -- Reference to source record
35
+ title TEXT NOT NULL,
36
+ description TEXT,
37
+ priority TEXT DEFAULT 'info' CHECK (priority IN ('critical', 'high', 'medium', 'low', 'info')),
38
+ action_url TEXT, -- Deep link to relevant page
39
+ action_label TEXT, -- Button text
40
+ project TEXT, -- Project slug or NULL for global
41
+ created_at INTEGER DEFAULT (unixepoch()),
42
+ expires_at INTEGER -- Optional expiry for transient notifications
43
+ );
44
+
45
+ CREATE INDEX IF NOT EXISTS idx_notifications_created ON notifications(created_at DESC);
46
+ CREATE INDEX IF NOT EXISTS idx_notifications_project ON notifications(project);
47
+ CREATE INDEX IF NOT EXISTS idx_notifications_source ON notifications(source);
48
+ CREATE INDEX IF NOT EXISTS idx_notifications_priority ON notifications(priority);
49
+ CREATE INDEX IF NOT EXISTS idx_notifications_project_created ON notifications(project, created_at DESC);
50
+
51
+
52
+ -- =============================================================================
53
+ -- PLATFORM SETTINGS (from 045)
54
+ -- =============================================================================
55
+ -- Unified settings with project/category/key namespacing.
56
+ -- Categories: notifications, thresholds, display, api.
57
+
58
+ CREATE TABLE IF NOT EXISTS platform_settings (
59
+ id TEXT PRIMARY KEY,
60
+ project TEXT NOT NULL, -- 'global', 'platform', or project slug
61
+ category TEXT NOT NULL, -- 'notifications', 'thresholds', 'display', 'api'
62
+ key TEXT NOT NULL,
63
+ value TEXT NOT NULL, -- JSON-encoded value
64
+ description TEXT,
65
+ updated_at INTEGER DEFAULT (unixepoch()),
66
+ updated_by TEXT,
67
+ UNIQUE(project, category, key)
68
+ );
69
+
70
+ CREATE INDEX IF NOT EXISTS idx_settings_project_category ON platform_settings(project, category);
71
+ CREATE INDEX IF NOT EXISTS idx_settings_project_key ON platform_settings(project, key);
72
+
73
+
74
+ -- =============================================================================
75
+ -- SEARCH INDEX (from 046)
76
+ -- =============================================================================
77
+ -- Platform-wide search across errors, patterns, settings, pages, services.
78
+ -- Uses SQLite FTS5 for efficient full-text search with:
79
+ -- - Prefix matching (e.g., "err*")
80
+ -- - Phrase matching (e.g., "circuit breaker")
81
+ -- - Boolean operators (AND, OR, NOT)
82
+
83
+ -- Main search index table
84
+ CREATE TABLE IF NOT EXISTS search_index (
85
+ id TEXT PRIMARY KEY,
86
+ content_type TEXT NOT NULL, -- 'error', 'pattern', 'setting', 'page', 'service'
87
+ project TEXT, -- Project slug or NULL for global
88
+ title TEXT NOT NULL,
89
+ content TEXT NOT NULL, -- Searchable content (normalised)
90
+ url TEXT NOT NULL, -- Deep link
91
+ metadata TEXT, -- JSON with type-specific data
92
+ indexed_at INTEGER DEFAULT (unixepoch()),
93
+ source_updated_at INTEGER -- When the source record was last updated
94
+ );
95
+
96
+ CREATE INDEX IF NOT EXISTS idx_search_content_type ON search_index(content_type, project);
97
+ CREATE INDEX IF NOT EXISTS idx_search_source_updated ON search_index(source_updated_at);
98
+
99
+ -- FTS5 virtual table for full-text search
100
+ -- content='search_index' makes it a content-less FTS table referencing the main table
101
+ CREATE VIRTUAL TABLE IF NOT EXISTS search_fts USING fts5(
102
+ title,
103
+ content,
104
+ content='search_index',
105
+ content_rowid='rowid',
106
+ tokenize='porter unicode61' -- Porter stemming + Unicode support
107
+ );
108
+
109
+ -- Trigger to keep FTS index in sync on INSERT
110
+ CREATE TRIGGER IF NOT EXISTS search_fts_ai AFTER INSERT ON search_index BEGIN
111
+ INSERT INTO search_fts(rowid, title, content)
112
+ VALUES (new.rowid, new.title, new.content);
113
+ END;
114
+
115
+ -- Trigger to keep FTS index in sync on DELETE
116
+ CREATE TRIGGER IF NOT EXISTS search_fts_ad AFTER DELETE ON search_index BEGIN
117
+ INSERT INTO search_fts(search_fts, rowid, title, content)
118
+ VALUES ('delete', old.rowid, old.title, old.content);
119
+ END;
120
+
121
+ -- Trigger to keep FTS index in sync on UPDATE
122
+ CREATE TRIGGER IF NOT EXISTS search_fts_au AFTER UPDATE ON search_index BEGIN
123
+ INSERT INTO search_fts(search_fts, rowid, title, content)
124
+ VALUES ('delete', old.rowid, old.title, old.content);
125
+ INSERT INTO search_fts(rowid, title, content)
126
+ VALUES (new.rowid, new.title, new.content);
127
+ END;