@delegance/claude-autopilot 5.0.8 → 5.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +42 -0
- package/README.md +5 -1
- package/dist/src/cli/index.js +130 -2
- package/dist/src/cli/init-migrate.d.ts +35 -0
- package/dist/src/cli/init-migrate.js +299 -0
- package/dist/src/cli/migrate-doctor.d.ts +19 -0
- package/dist/src/cli/migrate-doctor.js +191 -0
- package/dist/src/core/migrate/alias-resolver.d.ts +18 -0
- package/dist/src/core/migrate/alias-resolver.js +150 -0
- package/dist/src/core/migrate/audit-log.d.ts +30 -0
- package/dist/src/core/migrate/audit-log.js +100 -0
- package/dist/src/core/migrate/contract.d.ts +27 -0
- package/dist/src/core/migrate/contract.js +35 -0
- package/dist/src/core/migrate/detector-rules.d.ts +26 -0
- package/dist/src/core/migrate/detector-rules.js +147 -0
- package/dist/src/core/migrate/detector.d.ts +16 -0
- package/dist/src/core/migrate/detector.js +105 -0
- package/dist/src/core/migrate/dispatcher.d.ts +19 -0
- package/dist/src/core/migrate/dispatcher.js +358 -0
- package/dist/src/core/migrate/doctor-checks.d.ts +19 -0
- package/dist/src/core/migrate/doctor-checks.js +304 -0
- package/dist/src/core/migrate/envelope.d.ts +25 -0
- package/dist/src/core/migrate/envelope.js +84 -0
- package/dist/src/core/migrate/executor.d.ts +33 -0
- package/dist/src/core/migrate/executor.js +102 -0
- package/dist/src/core/migrate/handshake.d.ts +17 -0
- package/dist/src/core/migrate/handshake.js +130 -0
- package/dist/src/core/migrate/migrator.d.ts +34 -0
- package/dist/src/core/migrate/migrator.js +302 -0
- package/dist/src/core/migrate/monorepo.d.ts +2 -0
- package/dist/src/core/migrate/monorepo.js +114 -0
- package/dist/src/core/migrate/policy-enforcer.d.ts +28 -0
- package/dist/src/core/migrate/policy-enforcer.js +111 -0
- package/dist/src/core/migrate/result-parser.d.ts +16 -0
- package/dist/src/core/migrate/result-parser.js +152 -0
- package/dist/src/core/migrate/schema-validator.d.ts +11 -0
- package/dist/src/core/migrate/schema-validator.js +103 -0
- package/dist/src/core/migrate/types.d.ts +49 -0
- package/dist/src/core/migrate/types.js +3 -0
- package/package.json +5 -1
- package/presets/aliases.lock.json +20 -0
- package/presets/schemas/migrate.schema.json +134 -0
- package/skills/autopilot/SKILL.md +29 -9
- package/skills/migrate/skill.manifest.json +7 -0
- package/skills/migrate-none/SKILL.md +40 -0
- package/skills/migrate-none/skill.manifest.json +7 -0
- package/skills/migrate-supabase/SKILL.md +126 -0
- package/skills/migrate-supabase/skill.manifest.json +7 -0
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
// src/core/migrate/handshake.ts
|
|
2
|
+
//
|
|
3
|
+
// Reads <skillPath>/skill.manifest.json and verifies compatibility:
|
|
4
|
+
// - runtimeVersion must satisfy [min_runtime, max_runtime] (semver, no pre-release)
|
|
5
|
+
// - skill_runtime_api_version major must equal envelopeContractVersion major
|
|
6
|
+
//
|
|
7
|
+
// Fails closed: missing/invalid manifest is rejected, not silently allowed.
|
|
8
|
+
import * as fs from 'node:fs';
|
|
9
|
+
import * as path from 'node:path';
|
|
10
|
+
function parseSemver(s) {
|
|
11
|
+
// Match X.Y.Z or X.Y.Z-prerelease
|
|
12
|
+
const m = /^(\d+)\.(\d+)\.(\d+)(?:-([0-9A-Za-z.-]+))?$/.exec(s);
|
|
13
|
+
if (!m)
|
|
14
|
+
return null;
|
|
15
|
+
return {
|
|
16
|
+
major: parseInt(m[1], 10),
|
|
17
|
+
minor: parseInt(m[2], 10),
|
|
18
|
+
patch: parseInt(m[3], 10),
|
|
19
|
+
...(m[4] ? { prerelease: m[4] } : {}),
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Parse a range expressed as either:
|
|
24
|
+
* - "X.Y.Z" -> exact lower bound (>= X.Y.Z)
|
|
25
|
+
* - "X.x" or "X.*" -> upper bound (< (X+1).0.0)
|
|
26
|
+
*/
|
|
27
|
+
function parseRangeBound(s) {
|
|
28
|
+
const wildcard = /^(\d+)\.[xX*]$/.exec(s) ?? /^(\d+)\.[xX*]\.[xX*]$/.exec(s);
|
|
29
|
+
if (wildcard) {
|
|
30
|
+
return { major: parseInt(wildcard[1], 10), minor: 0, patch: 0, isWildcard: true };
|
|
31
|
+
}
|
|
32
|
+
const exactBound = parseSemver(s);
|
|
33
|
+
if (exactBound) {
|
|
34
|
+
return { major: exactBound.major, minor: exactBound.minor, patch: exactBound.patch, isWildcard: false };
|
|
35
|
+
}
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
function compare(a, b) {
|
|
39
|
+
if (a.major !== b.major)
|
|
40
|
+
return a.major - b.major;
|
|
41
|
+
if (a.minor !== b.minor)
|
|
42
|
+
return a.minor - b.minor;
|
|
43
|
+
if (a.patch !== b.patch)
|
|
44
|
+
return a.patch - b.patch;
|
|
45
|
+
return 0;
|
|
46
|
+
}
|
|
47
|
+
function isWithinRange(version, min, max) {
|
|
48
|
+
const v = parseSemver(version);
|
|
49
|
+
if (!v)
|
|
50
|
+
return { ok: false, reason: 'runtime-below-min' };
|
|
51
|
+
// Strict semver: pre-release versions don't satisfy plain ranges
|
|
52
|
+
if (v.prerelease)
|
|
53
|
+
return { ok: false, reason: 'runtime-below-min' };
|
|
54
|
+
const lo = parseRangeBound(min);
|
|
55
|
+
const hi = parseRangeBound(max);
|
|
56
|
+
if (!lo || !hi)
|
|
57
|
+
return { ok: false, reason: 'runtime-below-min' };
|
|
58
|
+
if (compare(v, lo) < 0)
|
|
59
|
+
return { ok: false, reason: 'runtime-below-min' };
|
|
60
|
+
if (hi.isWildcard) {
|
|
61
|
+
// major.x means < (major+1).0.0
|
|
62
|
+
if (v.major > hi.major)
|
|
63
|
+
return { ok: false, reason: 'runtime-above-max' };
|
|
64
|
+
}
|
|
65
|
+
else {
|
|
66
|
+
if (compare(v, hi) > 0)
|
|
67
|
+
return { ok: false, reason: 'runtime-above-max' };
|
|
68
|
+
}
|
|
69
|
+
return { ok: true };
|
|
70
|
+
}
|
|
71
|
+
function isValidManifest(o) {
|
|
72
|
+
if (!o || typeof o !== 'object')
|
|
73
|
+
return false;
|
|
74
|
+
const m = o;
|
|
75
|
+
return (typeof m.skillId === 'string' &&
|
|
76
|
+
typeof m.skill_runtime_api_version === 'string' &&
|
|
77
|
+
typeof m.min_runtime === 'string' &&
|
|
78
|
+
typeof m.max_runtime === 'string');
|
|
79
|
+
}
|
|
80
|
+
export function performHandshake(opts) {
|
|
81
|
+
const manifestPath = path.join(opts.skillPath, 'skill.manifest.json');
|
|
82
|
+
if (!fs.existsSync(manifestPath)) {
|
|
83
|
+
return {
|
|
84
|
+
ok: false,
|
|
85
|
+
reasonCode: 'manifest-missing',
|
|
86
|
+
message: `skill.manifest.json not found at ${manifestPath}`,
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
let raw;
|
|
90
|
+
try {
|
|
91
|
+
raw = fs.readFileSync(manifestPath, 'utf8');
|
|
92
|
+
}
|
|
93
|
+
catch (err) {
|
|
94
|
+
return { ok: false, reasonCode: 'manifest-invalid', message: `cannot read manifest: ${err.message}` };
|
|
95
|
+
}
|
|
96
|
+
let parsed;
|
|
97
|
+
try {
|
|
98
|
+
parsed = JSON.parse(raw);
|
|
99
|
+
}
|
|
100
|
+
catch (err) {
|
|
101
|
+
return { ok: false, reasonCode: 'manifest-invalid', message: `manifest JSON parse failed: ${err.message}` };
|
|
102
|
+
}
|
|
103
|
+
if (!isValidManifest(parsed)) {
|
|
104
|
+
return { ok: false, reasonCode: 'manifest-invalid', message: 'manifest missing required fields (skillId, skill_runtime_api_version, min_runtime, max_runtime)' };
|
|
105
|
+
}
|
|
106
|
+
// API major check
|
|
107
|
+
const apiMajor = (s) => s.split('.')[0];
|
|
108
|
+
if (apiMajor(parsed.skill_runtime_api_version) !== apiMajor(opts.envelopeContractVersion)) {
|
|
109
|
+
return {
|
|
110
|
+
ok: false,
|
|
111
|
+
reasonCode: 'api-version-mismatch',
|
|
112
|
+
message: `skill API version ${parsed.skill_runtime_api_version} incompatible with envelope contract ${opts.envelopeContractVersion} (major must match)`,
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
// Runtime range check
|
|
116
|
+
const range = isWithinRange(opts.runtimeVersion, parsed.min_runtime, parsed.max_runtime);
|
|
117
|
+
if (!range.ok) {
|
|
118
|
+
const reason = range.reason;
|
|
119
|
+
const hint = reason === 'runtime-below-min'
|
|
120
|
+
? `requires runtime >= ${parsed.min_runtime}, got ${opts.runtimeVersion} -- run \`npm install -g @delegance/claude-autopilot@latest\``
|
|
121
|
+
: `requires runtime <= ${parsed.max_runtime}, got ${opts.runtimeVersion} -- pin an older runtime or upgrade the skill`;
|
|
122
|
+
return {
|
|
123
|
+
ok: false,
|
|
124
|
+
reasonCode: reason,
|
|
125
|
+
message: `skill ${parsed.skillId} ${hint}`,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
return { ok: true, manifest: parsed };
|
|
129
|
+
}
|
|
130
|
+
//# sourceMappingURL=handshake.js.map
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
export interface MigrateLegacySkillOptions {
|
|
2
|
+
repoRoot: string;
|
|
3
|
+
/**
|
|
4
|
+
* If an archive directory with the same ISO timestamp already exists, the
|
|
5
|
+
* migrator appends a monotonic counter rather than overwriting. `force`
|
|
6
|
+
* has no destructive meaning today; reserved for future "yes, replace
|
|
7
|
+
* already-migrated content" semantics. Currently informational only.
|
|
8
|
+
*/
|
|
9
|
+
force?: boolean;
|
|
10
|
+
}
|
|
11
|
+
export interface MigrateLegacySkillResult {
|
|
12
|
+
migrated: boolean;
|
|
13
|
+
/** Short string describing why we did or did not migrate. */
|
|
14
|
+
reason?: string;
|
|
15
|
+
/** Absolute path to the archive directory (only set when migrated=true). */
|
|
16
|
+
archivePath?: string;
|
|
17
|
+
/** Human-readable per-step audit trail of every move/skip decision. */
|
|
18
|
+
report: string[];
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* The thin reference SKILL.md content for `skills/migrate/`. After migration
|
|
22
|
+
* this is what the slot must contain. Kept in sync with the dispatcher's
|
|
23
|
+
* envelope contract (see src/core/migrate/dispatcher.ts).
|
|
24
|
+
*/
|
|
25
|
+
export declare const THIN_MIGRATE_SKILL_MD = "---\nname: migrate\ndescription: Generic migration orchestrator. Reads .autopilot/stack.md, builds an invocation envelope, dispatches to the configured rich migrate skill (migrate.supabase@1, migrate.prisma@1, \u2026), and parses the ResultArtifact.\n---\n\n# /migrate \u2014 Generic migration orchestrator\n\nThis is the thin entrypoint. It does not run migrations itself \u2014 it dispatches\nto the rich skill named in `.autopilot/stack.md` under `migrate.skill`.\n\n## How it works\n\n1. Read `.autopilot/stack.md` for `migrate.skill` (e.g. `migrate.supabase@1`,\n `migrate.prisma@1`, `none@1`).\n2. Build an invocation envelope (contractVersion, invocationId, nonce, env,\n dryRun, gitBase/Head, changedFiles, etc.).\n3. Resolve the stable skill ID via `presets/aliases.lock.json`.\n4. Spawn the resolved skill subprocess with `AUTOPILOT_ENVELOPE` set.\n5. Read the `ResultArtifact` written to `AUTOPILOT_RESULT_PATH`.\n6. Branch the pipeline on `status` and `nextActions`.\n\n## Configuration\n\nSet `migrate.skill` in `.autopilot/stack.md`:\n\n```yaml\nschema_version: 1\nmigrate:\n skill: \"migrate.supabase@1\" # or migrate.prisma@1, none@1, \u2026\n policy:\n allow_prod_in_ci: false\n require_clean_git: true\n require_manual_approval: true\n require_dry_run_first: false\n```\n\nFor the rich Supabase runner (`data/deltas`, `types/supabase.ts`,\n`.claude/supabase-envs.json`) see `skills/migrate-supabase/SKILL.md`.\n\nFor an explicit no-op (docs-only PRs, no DB yet) see\n`skills/migrate-none/SKILL.md`.\n";
|
|
26
|
+
export declare function migrateLegacySkill(opts: MigrateLegacySkillOptions): MigrateLegacySkillResult;
|
|
27
|
+
/**
|
|
28
|
+
* Returns true iff `skills/migrate/SKILL.md` looks like the legacy Delegance
|
|
29
|
+
* Supabase shape (clean OR user-edited). Used by the doctor for *detection*
|
|
30
|
+
* before deciding whether to run the migrator. Never reads from outside
|
|
31
|
+
* `repoRoot`.
|
|
32
|
+
*/
|
|
33
|
+
export declare function detectsLegacyMigrateSkill(repoRoot: string): boolean;
|
|
34
|
+
//# sourceMappingURL=migrator.d.ts.map
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
// src/core/migrate/migrator.ts
|
|
2
|
+
//
|
|
3
|
+
// Task 8.1 — Legacy `/migrate` skill migrator.
|
|
4
|
+
//
|
|
5
|
+
// Migrates a repo where `skills/migrate/SKILL.md` still holds the original
|
|
6
|
+
// Delegance Supabase-shaped content over to the generalised post-Phase-8
|
|
7
|
+
// layout:
|
|
8
|
+
//
|
|
9
|
+
// skills/migrate/SKILL.md — thin generic orchestrator (this file
|
|
10
|
+
// describes the dispatcher contract)
|
|
11
|
+
// skills/migrate-supabase/SKILL.md — rich Supabase runner (already shipped)
|
|
12
|
+
//
|
|
13
|
+
// Detection rules (legacy shape):
|
|
14
|
+
// 1. SKILL.md content sha256 matches the known legacy fingerprint, OR
|
|
15
|
+
// 2. Frontmatter `description:` contains "Supabase" (case-insensitive)
|
|
16
|
+
// AND `name:` is "migrate" (i.e. the slot is the generic skill but the
|
|
17
|
+
// content describes the rich Supabase variant).
|
|
18
|
+
//
|
|
19
|
+
// Outcomes:
|
|
20
|
+
// - "clean" — content matches legacy fingerprint exactly. The
|
|
21
|
+
// legacy file is moved to a timestamped archive
|
|
22
|
+
// directory: skills/migrate.archive-<ISO>/SKILL.md.
|
|
23
|
+
// Then the thin reference is written to
|
|
24
|
+
// skills/migrate/SKILL.md.
|
|
25
|
+
// - "user-edited" — description matches but content drifted. We write
|
|
26
|
+
// a backup at skills/migrate.backup-<ISO>/SKILL.md
|
|
27
|
+
// (preserving user diffs) before installing the thin
|
|
28
|
+
// reference. `force: false` (default) still archives
|
|
29
|
+
// and rewrites — `force` only affects whether we
|
|
30
|
+
// overwrite an existing archive collision.
|
|
31
|
+
// - "already-migrated" — content already matches the thin reference (or
|
|
32
|
+
// frontmatter `name: migrate` with description that
|
|
33
|
+
// does NOT mention Supabase). Returns `migrated:
|
|
34
|
+
// false` and a non-error reason.
|
|
35
|
+
//
|
|
36
|
+
// Safety guarantees:
|
|
37
|
+
// - Never `rm` — archives are timestamped paths; if a collision occurs we
|
|
38
|
+
// append a monotonic counter (`-2`, `-3`, …) instead of overwriting.
|
|
39
|
+
// - Reference content is written via `fs.writeFileSync`. The legacy file is
|
|
40
|
+
// copied via `fs.copyFileSync` into the archive before being replaced.
|
|
41
|
+
// - Idempotent: invoking twice on the same repo is a no-op the second
|
|
42
|
+
// time.
|
|
43
|
+
//
|
|
44
|
+
// See spec § "Task 8.1 (migrator) — collision handling" for archive naming.
|
|
45
|
+
import * as fs from 'node:fs';
|
|
46
|
+
import * as path from 'node:path';
|
|
47
|
+
/**
|
|
48
|
+
* The thin reference SKILL.md content for `skills/migrate/`. After migration
|
|
49
|
+
* this is what the slot must contain. Kept in sync with the dispatcher's
|
|
50
|
+
* envelope contract (see src/core/migrate/dispatcher.ts).
|
|
51
|
+
*/
|
|
52
|
+
export const THIN_MIGRATE_SKILL_MD = `---
|
|
53
|
+
name: migrate
|
|
54
|
+
description: Generic migration orchestrator. Reads .autopilot/stack.md, builds an invocation envelope, dispatches to the configured rich migrate skill (migrate.supabase@1, migrate.prisma@1, …), and parses the ResultArtifact.
|
|
55
|
+
---
|
|
56
|
+
|
|
57
|
+
# /migrate — Generic migration orchestrator
|
|
58
|
+
|
|
59
|
+
This is the thin entrypoint. It does not run migrations itself — it dispatches
|
|
60
|
+
to the rich skill named in \`.autopilot/stack.md\` under \`migrate.skill\`.
|
|
61
|
+
|
|
62
|
+
## How it works
|
|
63
|
+
|
|
64
|
+
1. Read \`.autopilot/stack.md\` for \`migrate.skill\` (e.g. \`migrate.supabase@1\`,
|
|
65
|
+
\`migrate.prisma@1\`, \`none@1\`).
|
|
66
|
+
2. Build an invocation envelope (contractVersion, invocationId, nonce, env,
|
|
67
|
+
dryRun, gitBase/Head, changedFiles, etc.).
|
|
68
|
+
3. Resolve the stable skill ID via \`presets/aliases.lock.json\`.
|
|
69
|
+
4. Spawn the resolved skill subprocess with \`AUTOPILOT_ENVELOPE\` set.
|
|
70
|
+
5. Read the \`ResultArtifact\` written to \`AUTOPILOT_RESULT_PATH\`.
|
|
71
|
+
6. Branch the pipeline on \`status\` and \`nextActions\`.
|
|
72
|
+
|
|
73
|
+
## Configuration
|
|
74
|
+
|
|
75
|
+
Set \`migrate.skill\` in \`.autopilot/stack.md\`:
|
|
76
|
+
|
|
77
|
+
\`\`\`yaml
|
|
78
|
+
schema_version: 1
|
|
79
|
+
migrate:
|
|
80
|
+
skill: "migrate.supabase@1" # or migrate.prisma@1, none@1, …
|
|
81
|
+
policy:
|
|
82
|
+
allow_prod_in_ci: false
|
|
83
|
+
require_clean_git: true
|
|
84
|
+
require_manual_approval: true
|
|
85
|
+
require_dry_run_first: false
|
|
86
|
+
\`\`\`
|
|
87
|
+
|
|
88
|
+
For the rich Supabase runner (\`data/deltas\`, \`types/supabase.ts\`,
|
|
89
|
+
\`.claude/supabase-envs.json\`) see \`skills/migrate-supabase/SKILL.md\`.
|
|
90
|
+
|
|
91
|
+
For an explicit no-op (docs-only PRs, no DB yet) see
|
|
92
|
+
\`skills/migrate-none/SKILL.md\`.
|
|
93
|
+
`;
|
|
94
|
+
/**
|
|
95
|
+
* Sha256 fingerprint of the original Delegance legacy SKILL.md (the one
|
|
96
|
+
* shipped in commit d84f8ff before Task 4.1 added the manifest). Used to
|
|
97
|
+
* recognise a "clean" legacy install.
|
|
98
|
+
*
|
|
99
|
+
* Computed at import time from the canonical legacy text below. We embed the
|
|
100
|
+
* canonical text rather than just the hash so future maintainers can audit.
|
|
101
|
+
*/
|
|
102
|
+
const LEGACY_CANONICAL_SKILL_MD = `---
|
|
103
|
+
name: migrate
|
|
104
|
+
description: Run database migrations against Supabase environments (dev → QA → prod). Validates SQL, executes with ledger tracking, and auto-generates types/supabase.ts.
|
|
105
|
+
---
|
|
106
|
+
|
|
107
|
+
# Database Migration
|
|
108
|
+
|
|
109
|
+
Run a migration through the dev → QA → prod pipeline with validation at each step.
|
|
110
|
+
|
|
111
|
+
## Usage
|
|
112
|
+
|
|
113
|
+
### 1. Identify the migration file
|
|
114
|
+
|
|
115
|
+
If given as argument, use that. Otherwise find the most recently modified \`.sql\` file in \`data/deltas/\`.
|
|
116
|
+
|
|
117
|
+
### 2. Validate (dry run on dev)
|
|
118
|
+
|
|
119
|
+
\`\`\`bash
|
|
120
|
+
npx tsx scripts/supabase/migrate.ts <file> --env dev --dry-run
|
|
121
|
+
\`\`\`
|
|
122
|
+
|
|
123
|
+
Present validation results. If errors, help the user fix them before proceeding.
|
|
124
|
+
|
|
125
|
+
### 3. Run on dev
|
|
126
|
+
|
|
127
|
+
\`\`\`bash
|
|
128
|
+
npx tsx scripts/supabase/migrate.ts <file> --env dev
|
|
129
|
+
\`\`\`
|
|
130
|
+
|
|
131
|
+
### 4. Ask the user
|
|
132
|
+
|
|
133
|
+
> "Migration succeeded on dev. \`types/supabase.ts\` updated. Promote to QA?"
|
|
134
|
+
|
|
135
|
+
### 5. Run on QA
|
|
136
|
+
|
|
137
|
+
\`\`\`bash
|
|
138
|
+
npx tsx scripts/supabase/migrate.ts --promote qa
|
|
139
|
+
\`\`\`
|
|
140
|
+
|
|
141
|
+
### 6. Ask the user
|
|
142
|
+
|
|
143
|
+
> "Migration succeeded on QA. Promote to prod?"
|
|
144
|
+
|
|
145
|
+
### 7. Run on prod
|
|
146
|
+
|
|
147
|
+
\`\`\`bash
|
|
148
|
+
npx tsx scripts/supabase/migrate.ts --promote prod --confirm-prod
|
|
149
|
+
\`\`\`
|
|
150
|
+
|
|
151
|
+
### 8. Commit
|
|
152
|
+
|
|
153
|
+
After all environments are done, commit the updated \`types/supabase.ts\` and the migration file:
|
|
154
|
+
|
|
155
|
+
\`\`\`bash
|
|
156
|
+
git add types/supabase.ts data/deltas/<migration-file>
|
|
157
|
+
git commit -m "feat: <description of schema change>"
|
|
158
|
+
\`\`\`
|
|
159
|
+
|
|
160
|
+
## Flags
|
|
161
|
+
|
|
162
|
+
| Flag | Purpose |
|
|
163
|
+
|------|---------|
|
|
164
|
+
| \`--env dev\\|qa\\|prod\` | Target environment |
|
|
165
|
+
| \`--dry-run\` | Validate only, don't execute |
|
|
166
|
+
| \`--force\` | Allow destructive operations (DROP, TRUNCATE) |
|
|
167
|
+
| \`--confirm-prod\` | Required for prod execution |
|
|
168
|
+
| \`--promote qa\\|prod\` | Run missing migrations from source env |
|
|
169
|
+
|
|
170
|
+
## Validation Checks
|
|
171
|
+
|
|
172
|
+
The system validates before every execution:
|
|
173
|
+
- Duplicate table/column detection
|
|
174
|
+
- snake_case naming enforcement
|
|
175
|
+
- RLS + policy required for every new table
|
|
176
|
+
- Destructive operation blocking (unless --force)
|
|
177
|
+
- Cross-env prerequisite verification
|
|
178
|
+
- Checksum integrity (modified files are rejected)
|
|
179
|
+
- Promotion chain enforcement (prod requires QA first)
|
|
180
|
+
|
|
181
|
+
## Requirements
|
|
182
|
+
|
|
183
|
+
- \`.claude/supabase-envs.json\` with \`dbUrl\` for each env (gitignored)
|
|
184
|
+
- \`postgres\` npm package installed
|
|
185
|
+
`;
|
|
186
|
+
function parseFrontmatter(content) {
|
|
187
|
+
if (!content.startsWith('---'))
|
|
188
|
+
return null;
|
|
189
|
+
const end = content.indexOf('\n---', 3);
|
|
190
|
+
if (end === -1)
|
|
191
|
+
return null;
|
|
192
|
+
const block = content.slice(3, end).trim();
|
|
193
|
+
const out = {};
|
|
194
|
+
for (const rawLine of block.split('\n')) {
|
|
195
|
+
const line = rawLine.trimEnd();
|
|
196
|
+
const m = line.match(/^([a-zA-Z_][a-zA-Z0-9_-]*):\s*(.*)$/);
|
|
197
|
+
if (!m)
|
|
198
|
+
continue;
|
|
199
|
+
const key = m[1];
|
|
200
|
+
const value = m[2] ?? '';
|
|
201
|
+
if (key === 'name')
|
|
202
|
+
out.name = value.trim().replace(/^["']|["']$/g, '');
|
|
203
|
+
else if (key === 'description')
|
|
204
|
+
out.description = value.trim();
|
|
205
|
+
}
|
|
206
|
+
return out;
|
|
207
|
+
}
|
|
208
|
+
function detectLegacyShape(content) {
|
|
209
|
+
// Exact-match check first (clean install).
|
|
210
|
+
if (content === LEGACY_CANONICAL_SKILL_MD) {
|
|
211
|
+
return { kind: 'clean' };
|
|
212
|
+
}
|
|
213
|
+
if (content === THIN_MIGRATE_SKILL_MD) {
|
|
214
|
+
return { kind: 'already-thin' };
|
|
215
|
+
}
|
|
216
|
+
const fm = parseFrontmatter(content);
|
|
217
|
+
if (!fm)
|
|
218
|
+
return { kind: 'not-legacy' };
|
|
219
|
+
if (fm.name !== 'migrate') {
|
|
220
|
+
// Some other skill? Don't touch.
|
|
221
|
+
return { kind: 'not-legacy' };
|
|
222
|
+
}
|
|
223
|
+
// Frontmatter says name: migrate but content drifted. If description still
|
|
224
|
+
// refers to Supabase, treat as legacy that the user has edited.
|
|
225
|
+
if (fm.description && /supabase/i.test(fm.description)) {
|
|
226
|
+
return { kind: 'user-edited' };
|
|
227
|
+
}
|
|
228
|
+
// name: migrate but description doesn't reference Supabase → already a
|
|
229
|
+
// generic / thin variant the user has authored themselves.
|
|
230
|
+
return { kind: 'already-thin' };
|
|
231
|
+
}
|
|
232
|
+
function isoStamp() {
|
|
233
|
+
return new Date().toISOString().replace(/[.:]/g, '-');
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Pick a non-colliding archive path. If `base` already exists, append `-2`,
|
|
237
|
+
* `-3`, … until we find a free slot. Never overwrites.
|
|
238
|
+
*/
|
|
239
|
+
function pickArchivePath(base) {
|
|
240
|
+
if (!fs.existsSync(base))
|
|
241
|
+
return base;
|
|
242
|
+
for (let i = 2; i < 1000; i++) {
|
|
243
|
+
const candidate = `${base}-${i}`;
|
|
244
|
+
if (!fs.existsSync(candidate))
|
|
245
|
+
return candidate;
|
|
246
|
+
}
|
|
247
|
+
// Extremely unlikely; surface as an error rather than silently overwriting.
|
|
248
|
+
throw new Error(`Archive collision: exhausted counter at ${base}`);
|
|
249
|
+
}
|
|
250
|
+
export function migrateLegacySkill(opts) {
|
|
251
|
+
const repoRoot = path.resolve(opts.repoRoot);
|
|
252
|
+
const skillDir = path.join(repoRoot, 'skills', 'migrate');
|
|
253
|
+
const skillMdPath = path.join(skillDir, 'SKILL.md');
|
|
254
|
+
const report = [];
|
|
255
|
+
if (!fs.existsSync(skillMdPath)) {
|
|
256
|
+
report.push(`skipped: ${path.relative(repoRoot, skillMdPath)} not present — nothing to migrate`);
|
|
257
|
+
return { migrated: false, reason: 'no-skill-md', report };
|
|
258
|
+
}
|
|
259
|
+
const content = fs.readFileSync(skillMdPath, 'utf8');
|
|
260
|
+
const detection = detectLegacyShape(content);
|
|
261
|
+
if (detection.kind === 'already-thin') {
|
|
262
|
+
report.push(`skipped: ${path.relative(repoRoot, skillMdPath)} already matches the thin reference (or a custom non-Supabase variant)`);
|
|
263
|
+
return { migrated: false, reason: 'already-migrated', report };
|
|
264
|
+
}
|
|
265
|
+
if (detection.kind === 'not-legacy') {
|
|
266
|
+
report.push(`skipped: ${path.relative(repoRoot, skillMdPath)} has unexpected frontmatter — leaving untouched`);
|
|
267
|
+
return { migrated: false, reason: 'not-legacy-shape', report };
|
|
268
|
+
}
|
|
269
|
+
const stamp = isoStamp();
|
|
270
|
+
const archiveBase = detection.kind === 'clean'
|
|
271
|
+
? path.join(repoRoot, 'skills', `migrate.archive-${stamp}`)
|
|
272
|
+
: path.join(repoRoot, 'skills', `migrate.backup-${stamp}`);
|
|
273
|
+
const archiveDir = pickArchivePath(archiveBase);
|
|
274
|
+
fs.mkdirSync(archiveDir, { recursive: true });
|
|
275
|
+
const archivedSkillMd = path.join(archiveDir, 'SKILL.md');
|
|
276
|
+
fs.copyFileSync(skillMdPath, archivedSkillMd);
|
|
277
|
+
report.push(`archived: ${path.relative(repoRoot, skillMdPath)} → ${path.relative(repoRoot, archivedSkillMd)} (${detection.kind})`);
|
|
278
|
+
// Replace skills/migrate/SKILL.md with the thin reference.
|
|
279
|
+
fs.writeFileSync(skillMdPath, THIN_MIGRATE_SKILL_MD, 'utf8');
|
|
280
|
+
report.push(`wrote: ${path.relative(repoRoot, skillMdPath)} (thin generic orchestrator reference)`);
|
|
281
|
+
return {
|
|
282
|
+
migrated: true,
|
|
283
|
+
reason: detection.kind === 'clean' ? 'clean-archive' : 'user-edited-backup',
|
|
284
|
+
archivePath: archiveDir,
|
|
285
|
+
report,
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
/**
|
|
289
|
+
* Returns true iff `skills/migrate/SKILL.md` looks like the legacy Delegance
|
|
290
|
+
* Supabase shape (clean OR user-edited). Used by the doctor for *detection*
|
|
291
|
+
* before deciding whether to run the migrator. Never reads from outside
|
|
292
|
+
* `repoRoot`.
|
|
293
|
+
*/
|
|
294
|
+
export function detectsLegacyMigrateSkill(repoRoot) {
|
|
295
|
+
const skillMdPath = path.join(path.resolve(repoRoot), 'skills', 'migrate', 'SKILL.md');
|
|
296
|
+
if (!fs.existsSync(skillMdPath))
|
|
297
|
+
return false;
|
|
298
|
+
const content = fs.readFileSync(skillMdPath, 'utf8');
|
|
299
|
+
const det = detectLegacyShape(content);
|
|
300
|
+
return det.kind === 'clean' || det.kind === 'user-edited';
|
|
301
|
+
}
|
|
302
|
+
//# sourceMappingURL=migrator.js.map
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
// src/core/migrate/monorepo.ts
|
|
2
|
+
//
|
|
3
|
+
// Discovers monorepo workspaces from common declarations:
|
|
4
|
+
// pnpm-workspace.yaml, package.json#workspaces, nx.json. Falls back to
|
|
5
|
+
// [repoRoot] for single-workspace repos. Glob patterns expanded
|
|
6
|
+
// (packages/*, apps/*).
|
|
7
|
+
//
|
|
8
|
+
// Workspace paths are filtered to those that actually exist as
|
|
9
|
+
// directories AND stay within repoRoot (no path-escape).
|
|
10
|
+
import * as fs from 'node:fs';
|
|
11
|
+
import * as path from 'node:path';
|
|
12
|
+
import * as yaml from 'js-yaml';
|
|
13
|
+
function expandGlob(repoRoot, pattern) {
|
|
14
|
+
// Only handle simple patterns: 'packages/*', 'apps/*', 'libs/*'
|
|
15
|
+
// (single trailing star). Anything more complex is ignored.
|
|
16
|
+
if (!pattern.includes('*')) {
|
|
17
|
+
const abs = path.resolve(repoRoot, pattern);
|
|
18
|
+
return abs.startsWith(repoRoot) && fs.existsSync(abs) && fs.statSync(abs).isDirectory()
|
|
19
|
+
? [abs]
|
|
20
|
+
: [];
|
|
21
|
+
}
|
|
22
|
+
const idx = pattern.indexOf('*');
|
|
23
|
+
const prefix = pattern.slice(0, idx).replace(/\/$/, '');
|
|
24
|
+
const baseAbs = path.resolve(repoRoot, prefix);
|
|
25
|
+
if (!baseAbs.startsWith(repoRoot))
|
|
26
|
+
return [];
|
|
27
|
+
if (!fs.existsSync(baseAbs))
|
|
28
|
+
return [];
|
|
29
|
+
let entries;
|
|
30
|
+
try {
|
|
31
|
+
entries = fs.readdirSync(baseAbs, { withFileTypes: true });
|
|
32
|
+
}
|
|
33
|
+
catch {
|
|
34
|
+
return [];
|
|
35
|
+
}
|
|
36
|
+
return entries
|
|
37
|
+
.filter(e => e.isDirectory())
|
|
38
|
+
.map(e => path.join(baseAbs, e.name));
|
|
39
|
+
}
|
|
40
|
+
function readPnpmWorkspace(repoRoot) {
|
|
41
|
+
const p = path.join(repoRoot, 'pnpm-workspace.yaml');
|
|
42
|
+
if (!fs.existsSync(p))
|
|
43
|
+
return null;
|
|
44
|
+
try {
|
|
45
|
+
const data = yaml.load(fs.readFileSync(p, 'utf8'));
|
|
46
|
+
return data?.packages ?? null;
|
|
47
|
+
}
|
|
48
|
+
catch {
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
function readPackageJsonWorkspaces(repoRoot) {
|
|
53
|
+
const p = path.join(repoRoot, 'package.json');
|
|
54
|
+
if (!fs.existsSync(p))
|
|
55
|
+
return null;
|
|
56
|
+
try {
|
|
57
|
+
const data = JSON.parse(fs.readFileSync(p, 'utf8'));
|
|
58
|
+
if (!data.workspaces)
|
|
59
|
+
return null;
|
|
60
|
+
if (Array.isArray(data.workspaces))
|
|
61
|
+
return data.workspaces.filter((w) => typeof w === 'string');
|
|
62
|
+
if (typeof data.workspaces === 'object' && data.workspaces !== null && Array.isArray(data.workspaces.packages)) {
|
|
63
|
+
return data.workspaces.packages.filter((w) => typeof w === 'string');
|
|
64
|
+
}
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
catch {
|
|
68
|
+
return null;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
function readNxProjects(repoRoot) {
|
|
72
|
+
const p = path.join(repoRoot, 'nx.json');
|
|
73
|
+
if (!fs.existsSync(p))
|
|
74
|
+
return null;
|
|
75
|
+
try {
|
|
76
|
+
const data = JSON.parse(fs.readFileSync(p, 'utf8'));
|
|
77
|
+
if (Array.isArray(data.projects))
|
|
78
|
+
return data.projects;
|
|
79
|
+
if (data.projects && typeof data.projects === 'object') {
|
|
80
|
+
return Object.values(data.projects)
|
|
81
|
+
.map(v => v?.root)
|
|
82
|
+
.filter((r) => typeof r === 'string');
|
|
83
|
+
}
|
|
84
|
+
return null;
|
|
85
|
+
}
|
|
86
|
+
catch {
|
|
87
|
+
return null;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
export function findWorkspaces(repoRoot) {
|
|
91
|
+
// Resolve to absolute path (no symlink follow needed; repoRoot is canonical
|
|
92
|
+
// by caller's contract).
|
|
93
|
+
const repoAbs = path.resolve(repoRoot);
|
|
94
|
+
const patterns = readPnpmWorkspace(repoAbs) ??
|
|
95
|
+
readPackageJsonWorkspaces(repoAbs) ??
|
|
96
|
+
readNxProjects(repoAbs);
|
|
97
|
+
if (!patterns || patterns.length === 0) {
|
|
98
|
+
return [repoAbs];
|
|
99
|
+
}
|
|
100
|
+
const found = new Set();
|
|
101
|
+
for (const pattern of patterns) {
|
|
102
|
+
for (const abs of expandGlob(repoAbs, pattern)) {
|
|
103
|
+
// Path-escape guard: must remain under repoAbs
|
|
104
|
+
if (abs.startsWith(repoAbs + path.sep) || abs === repoAbs) {
|
|
105
|
+
found.add(abs);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
if (found.size === 0) {
|
|
110
|
+
return [repoAbs];
|
|
111
|
+
}
|
|
112
|
+
return Array.from(found).sort();
|
|
113
|
+
}
|
|
114
|
+
//# sourceMappingURL=monorepo.js.map
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
export interface PolicyConfig {
|
|
2
|
+
allow_prod_in_ci: boolean;
|
|
3
|
+
require_clean_git: boolean;
|
|
4
|
+
require_manual_approval: boolean;
|
|
5
|
+
require_dry_run_first: boolean;
|
|
6
|
+
}
|
|
7
|
+
export interface EnforcementContext {
|
|
8
|
+
policy: PolicyConfig;
|
|
9
|
+
env: string;
|
|
10
|
+
repoRoot: string;
|
|
11
|
+
ci: boolean;
|
|
12
|
+
yesFlag: boolean;
|
|
13
|
+
nonInteractive: boolean;
|
|
14
|
+
gitHead: string;
|
|
15
|
+
/** When set, override AUTOPILOT_TARGET_ENV check with this value (mainly for tests) */
|
|
16
|
+
_targetEnvOverride?: string;
|
|
17
|
+
}
|
|
18
|
+
export type EnforcementResult = {
|
|
19
|
+
ok: true;
|
|
20
|
+
decisions: string[];
|
|
21
|
+
} | {
|
|
22
|
+
ok: false;
|
|
23
|
+
reasonCode: string;
|
|
24
|
+
message: string;
|
|
25
|
+
decisions: string[];
|
|
26
|
+
};
|
|
27
|
+
export declare function enforcePolicy(ctx: EnforcementContext): EnforcementResult;
|
|
28
|
+
//# sourceMappingURL=policy-enforcer.d.ts.map
|