sdx-cli 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +266 -0
- package/bin/dev.js +11 -0
- package/bin/run.js +3 -0
- package/dist/commands/bootstrap/consumer.js +75 -0
- package/dist/commands/bootstrap/org.js +29 -0
- package/dist/commands/bootstrap/quick.js +82 -0
- package/dist/commands/codex/run.js +36 -0
- package/dist/commands/contracts/extract.js +22 -0
- package/dist/commands/docs/generate.js +22 -0
- package/dist/commands/handoff/draft.js +41 -0
- package/dist/commands/init.js +14 -0
- package/dist/commands/map/build.js +44 -0
- package/dist/commands/map/create.js +40 -0
- package/dist/commands/map/exclude.js +25 -0
- package/dist/commands/map/include.js +25 -0
- package/dist/commands/map/remove-override.js +25 -0
- package/dist/commands/map/status.js +30 -0
- package/dist/commands/migrate/artifacts.js +68 -0
- package/dist/commands/plan/review.js +60 -0
- package/dist/commands/prompt.js +62 -0
- package/dist/commands/publish/notices.js +98 -0
- package/dist/commands/publish/sync.js +67 -0
- package/dist/commands/publish/wiki.js +39 -0
- package/dist/commands/repo/add.js +29 -0
- package/dist/commands/repo/sync.js +30 -0
- package/dist/commands/service/propose.js +40 -0
- package/dist/commands/status.js +37 -0
- package/dist/commands/version.js +16 -0
- package/dist/index.js +10 -0
- package/dist/lib/artifactMigration.js +29 -0
- package/dist/lib/bootstrap.js +43 -0
- package/dist/lib/bootstrapConsumer.js +187 -0
- package/dist/lib/bootstrapQuick.js +27 -0
- package/dist/lib/codex.js +138 -0
- package/dist/lib/config.js +40 -0
- package/dist/lib/constants.js +26 -0
- package/dist/lib/contractChanges.js +347 -0
- package/dist/lib/contracts.js +93 -0
- package/dist/lib/db.js +41 -0
- package/dist/lib/docs.js +46 -0
- package/dist/lib/fileScan.js +34 -0
- package/dist/lib/fs.js +36 -0
- package/dist/lib/github.js +52 -0
- package/dist/lib/githubPublish.js +161 -0
- package/dist/lib/handoff.js +62 -0
- package/dist/lib/mapBuilder.js +182 -0
- package/dist/lib/paths.js +39 -0
- package/dist/lib/planReview.js +88 -0
- package/dist/lib/project.js +65 -0
- package/dist/lib/promptParser.js +88 -0
- package/dist/lib/publishContracts.js +876 -0
- package/dist/lib/repoRegistry.js +92 -0
- package/dist/lib/scope.js +110 -0
- package/dist/lib/serviceNoticePlan.js +130 -0
- package/dist/lib/serviceProposal.js +82 -0
- package/dist/lib/status.js +34 -0
- package/dist/lib/types.js +2 -0
- package/dist/lib/version.js +17 -0
- package/dist/lib/workflows.js +70 -0
- package/package.json +50 -0
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
7
|
+
const core_1 = require("@oclif/core");
|
|
8
|
+
const fs_1 = require("../../lib/fs");
|
|
9
|
+
const mapBuilder_1 = require("../../lib/mapBuilder");
|
|
10
|
+
const project_1 = require("../../lib/project");
|
|
11
|
+
const repoRegistry_1 = require("../../lib/repoRegistry");
|
|
12
|
+
const scope_1 = require("../../lib/scope");
|
|
13
|
+
class MapBuildCommand extends core_1.Command {
|
|
14
|
+
static description = 'Generate map artifacts (markdown, mermaid, json) for a named map';
|
|
15
|
+
static args = {
|
|
16
|
+
mapId: core_1.Args.string({ required: true, description: 'Map identifier' }),
|
|
17
|
+
};
|
|
18
|
+
async run() {
|
|
19
|
+
const { args } = await this.parse(MapBuildCommand);
|
|
20
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
21
|
+
const scope = (0, scope_1.loadScopeManifest)(args.mapId, context.cwd);
|
|
22
|
+
(0, scope_1.saveScopeManifest)(scope, context.cwd);
|
|
23
|
+
const repoMap = new Map((0, repoRegistry_1.listAllRepos)(context.db).map((repo) => [repo.name, repo]));
|
|
24
|
+
const artifact = (0, mapBuilder_1.buildServiceMapArtifact)(args.mapId, scope, repoMap);
|
|
25
|
+
const mapDir = node_path_1.default.join(context.cwd, 'maps', args.mapId);
|
|
26
|
+
const jsonPath = node_path_1.default.join(mapDir, 'service-map.json');
|
|
27
|
+
const mdPath = node_path_1.default.join(mapDir, 'service-map.md');
|
|
28
|
+
const mmdPath = node_path_1.default.join(mapDir, 'service-map.mmd');
|
|
29
|
+
(0, fs_1.writeJsonFile)(jsonPath, artifact);
|
|
30
|
+
(0, fs_1.writeTextFile)(mdPath, (0, mapBuilder_1.renderServiceMapMarkdown)(artifact));
|
|
31
|
+
(0, fs_1.writeTextFile)(mmdPath, (0, mapBuilder_1.renderServiceMapMermaid)(artifact));
|
|
32
|
+
(0, project_1.recordRun)(context.db, 'map_build', 'ok', args.mapId, {
|
|
33
|
+
repos: artifact.repos.length,
|
|
34
|
+
nodes: artifact.nodes.length,
|
|
35
|
+
edges: artifact.edges.length,
|
|
36
|
+
});
|
|
37
|
+
context.db.close();
|
|
38
|
+
this.log(`Built map '${args.mapId}'.`);
|
|
39
|
+
this.log(`JSON: ${jsonPath}`);
|
|
40
|
+
this.log(`Markdown: ${mdPath}`);
|
|
41
|
+
this.log(`Mermaid: ${mmdPath}`);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
exports.default = MapBuildCommand;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
7
|
+
const core_1 = require("@oclif/core");
|
|
8
|
+
const project_1 = require("../../lib/project");
|
|
9
|
+
const scope_1 = require("../../lib/scope");
|
|
10
|
+
const repoRegistry_1 = require("../../lib/repoRegistry");
|
|
11
|
+
class MapCreateCommand extends core_1.Command {
|
|
12
|
+
static description = 'Create a named service map scope from discovered repositories';
|
|
13
|
+
static flags = {
|
|
14
|
+
org: core_1.Flags.string({ char: 'o', required: true, description: 'GitHub organization to source repositories from' }),
|
|
15
|
+
};
|
|
16
|
+
static args = {
|
|
17
|
+
mapId: core_1.Args.string({ required: true, description: 'Map identifier' }),
|
|
18
|
+
};
|
|
19
|
+
async run() {
|
|
20
|
+
const { args, flags } = await this.parse(MapCreateCommand);
|
|
21
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
22
|
+
const repos = (0, repoRegistry_1.listReposByOrg)(context.db, flags.org);
|
|
23
|
+
if (repos.length === 0) {
|
|
24
|
+
throw new Error(`No repositories found for org '${flags.org}'. Run 'sdx repo sync --org ${flags.org}' first.`);
|
|
25
|
+
}
|
|
26
|
+
(0, project_1.ensureMapDir)(args.mapId, context.cwd);
|
|
27
|
+
const manifest = (0, scope_1.createScopeManifest)(args.mapId, flags.org, repos.map((repo) => repo.name), context.cwd);
|
|
28
|
+
const archivedOrFork = repos.filter((repo) => repo.archived || repo.fork).map((repo) => repo.name);
|
|
29
|
+
manifest.explicitExclude = [...new Set([...manifest.explicitExclude, ...archivedOrFork])].sort((a, b) => a.localeCompare(b));
|
|
30
|
+
const scopePath = (0, scope_1.saveScopeManifest)(manifest, context.cwd);
|
|
31
|
+
const readmePath = node_path_1.default.join(node_path_1.default.dirname(scopePath), 'README.md');
|
|
32
|
+
this.log(`Map created: ${args.mapId}`);
|
|
33
|
+
this.log(`Scope manifest: ${scopePath}`);
|
|
34
|
+
this.log(`Map directory: ${node_path_1.default.dirname(scopePath)}`);
|
|
35
|
+
this.log(`Create notes in: ${readmePath}`);
|
|
36
|
+
(0, project_1.recordRun)(context.db, 'map_create', 'ok', args.mapId, { org: flags.org, discovered: repos.length });
|
|
37
|
+
context.db.close();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
exports.default = MapCreateCommand;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const project_1 = require("../../lib/project");
|
|
5
|
+
const scope_1 = require("../../lib/scope");
|
|
6
|
+
class MapExcludeCommand extends core_1.Command {
|
|
7
|
+
static description = 'Add repositories to explicit map exclude overrides';
|
|
8
|
+
static args = {
|
|
9
|
+
mapId: core_1.Args.string({ required: true, description: 'Map identifier' }),
|
|
10
|
+
repos: core_1.Args.string({ required: true, multiple: true, description: 'Repository names (space- or comma-separated)' }),
|
|
11
|
+
};
|
|
12
|
+
async run() {
|
|
13
|
+
const { args } = await this.parse(MapExcludeCommand);
|
|
14
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
15
|
+
const repos = args.repos
|
|
16
|
+
.flatMap((entry) => entry.split(','))
|
|
17
|
+
.map((entry) => entry.trim())
|
|
18
|
+
.filter(Boolean);
|
|
19
|
+
const updated = (0, scope_1.applyScopeChange)(args.mapId, 'exclude', repos, 'Manual exclude via CLI', context.cwd);
|
|
20
|
+
(0, project_1.recordRun)(context.db, 'map_exclude', 'ok', args.mapId, { repos });
|
|
21
|
+
context.db.close();
|
|
22
|
+
this.log(`Updated '${args.mapId}'. Effective repos: ${updated.effective.length}`);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
exports.default = MapExcludeCommand;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const project_1 = require("../../lib/project");
|
|
5
|
+
const scope_1 = require("../../lib/scope");
|
|
6
|
+
class MapIncludeCommand extends core_1.Command {
|
|
7
|
+
static description = 'Add repositories to explicit map include overrides';
|
|
8
|
+
static args = {
|
|
9
|
+
mapId: core_1.Args.string({ required: true, description: 'Map identifier' }),
|
|
10
|
+
repos: core_1.Args.string({ required: true, multiple: true, description: 'Repository names (space- or comma-separated)' }),
|
|
11
|
+
};
|
|
12
|
+
async run() {
|
|
13
|
+
const { args } = await this.parse(MapIncludeCommand);
|
|
14
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
15
|
+
const repos = args.repos
|
|
16
|
+
.flatMap((entry) => entry.split(','))
|
|
17
|
+
.map((entry) => entry.trim())
|
|
18
|
+
.filter(Boolean);
|
|
19
|
+
const updated = (0, scope_1.applyScopeChange)(args.mapId, 'include', repos, 'Manual include via CLI', context.cwd);
|
|
20
|
+
(0, project_1.recordRun)(context.db, 'map_include', 'ok', args.mapId, { repos });
|
|
21
|
+
context.db.close();
|
|
22
|
+
this.log(`Updated '${args.mapId}'. Effective repos: ${updated.effective.length}`);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
exports.default = MapIncludeCommand;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const project_1 = require("../../lib/project");
|
|
5
|
+
const scope_1 = require("../../lib/scope");
|
|
6
|
+
class MapRemoveOverrideCommand extends core_1.Command {
|
|
7
|
+
static description = 'Remove include/exclude overrides for repositories in a map';
|
|
8
|
+
static args = {
|
|
9
|
+
mapId: core_1.Args.string({ required: true, description: 'Map identifier' }),
|
|
10
|
+
repos: core_1.Args.string({ required: true, multiple: true, description: 'Repository names (space- or comma-separated)' }),
|
|
11
|
+
};
|
|
12
|
+
async run() {
|
|
13
|
+
const { args } = await this.parse(MapRemoveOverrideCommand);
|
|
14
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
15
|
+
const repos = args.repos
|
|
16
|
+
.flatMap((entry) => entry.split(','))
|
|
17
|
+
.map((entry) => entry.trim())
|
|
18
|
+
.filter(Boolean);
|
|
19
|
+
const updated = (0, scope_1.applyScopeChange)(args.mapId, 'remove_override', repos, 'Override removed via CLI', context.cwd);
|
|
20
|
+
(0, project_1.recordRun)(context.db, 'map_remove_override', 'ok', args.mapId, { repos });
|
|
21
|
+
context.db.close();
|
|
22
|
+
this.log(`Updated '${args.mapId}'. Effective repos: ${updated.effective.length}`);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
exports.default = MapRemoveOverrideCommand;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const project_1 = require("../../lib/project");
|
|
5
|
+
const scope_1 = require("../../lib/scope");
|
|
6
|
+
class MapStatusCommand extends core_1.Command {
|
|
7
|
+
static description = 'Show current map scope and overrides';
|
|
8
|
+
static args = {
|
|
9
|
+
mapId: core_1.Args.string({ required: true, description: 'Map identifier' }),
|
|
10
|
+
};
|
|
11
|
+
async run() {
|
|
12
|
+
const { args } = await this.parse(MapStatusCommand);
|
|
13
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
14
|
+
const scope = (0, scope_1.loadScopeManifest)(args.mapId, context.cwd);
|
|
15
|
+
context.db.close();
|
|
16
|
+
this.log(`Map: ${scope.mapId}`);
|
|
17
|
+
this.log(`Org: ${scope.org}`);
|
|
18
|
+
this.log(`Discovered: ${scope.discovered.length}`);
|
|
19
|
+
this.log(`Explicit include: ${scope.explicitInclude.length}`);
|
|
20
|
+
this.log(`Explicit exclude: ${scope.explicitExclude.length}`);
|
|
21
|
+
this.log(`Effective: ${scope.effective.length}`);
|
|
22
|
+
if (scope.explicitInclude.length > 0) {
|
|
23
|
+
this.log(`Include overrides: ${scope.explicitInclude.join(', ')}`);
|
|
24
|
+
}
|
|
25
|
+
if (scope.explicitExclude.length > 0) {
|
|
26
|
+
this.log(`Exclude overrides: ${scope.explicitExclude.join(', ')}`);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
exports.default = MapStatusCommand;
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
7
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
8
|
+
const core_1 = require("@oclif/core");
|
|
9
|
+
const artifactMigration_1 = require("../../lib/artifactMigration");
|
|
10
|
+
const constants_1 = require("../../lib/constants");
|
|
11
|
+
const project_1 = require("../../lib/project");
|
|
12
|
+
class MigrateArtifactsCommand extends core_1.Command {
|
|
13
|
+
static description = 'Migrate artifact files to the current schema version';
|
|
14
|
+
static flags = {
|
|
15
|
+
from: core_1.Flags.string({ required: false, description: 'Optional source version hint' }),
|
|
16
|
+
};
|
|
17
|
+
async run() {
|
|
18
|
+
const { flags } = await this.parse(MigrateArtifactsCommand);
|
|
19
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
20
|
+
const targets = [];
|
|
21
|
+
const roots = [node_path_1.default.join(context.cwd, 'maps'), node_path_1.default.join(context.cwd, 'plans'), node_path_1.default.join(context.cwd, 'handoffs'), node_path_1.default.join(context.cwd, 'codex')];
|
|
22
|
+
for (const root of roots) {
|
|
23
|
+
if (!node_fs_1.default.existsSync(root)) {
|
|
24
|
+
continue;
|
|
25
|
+
}
|
|
26
|
+
const stack = [root];
|
|
27
|
+
while (stack.length > 0) {
|
|
28
|
+
const current = stack.pop();
|
|
29
|
+
for (const entry of node_fs_1.default.readdirSync(current, { withFileTypes: true })) {
|
|
30
|
+
const nextPath = node_path_1.default.join(current, entry.name);
|
|
31
|
+
if (entry.isDirectory()) {
|
|
32
|
+
stack.push(nextPath);
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
if (entry.name.endsWith('.json') && (0, artifactMigration_1.isManagedArtifactPath)(context.cwd, nextPath)) {
|
|
36
|
+
targets.push(nextPath);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
let migrated = 0;
|
|
42
|
+
for (const filePath of targets) {
|
|
43
|
+
try {
|
|
44
|
+
const data = JSON.parse(node_fs_1.default.readFileSync(filePath, 'utf8'));
|
|
45
|
+
if (!(0, artifactMigration_1.isManagedArtifactPayload)(data)) {
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
if (data.schemaVersion === constants_1.SCHEMA_VERSION) {
|
|
49
|
+
continue;
|
|
50
|
+
}
|
|
51
|
+
data.schemaVersion = constants_1.SCHEMA_VERSION;
|
|
52
|
+
data.migratedAt = new Date().toISOString();
|
|
53
|
+
if (flags.from) {
|
|
54
|
+
data.migratedFrom = flags.from;
|
|
55
|
+
}
|
|
56
|
+
node_fs_1.default.writeFileSync(filePath, `${JSON.stringify(data, null, 2)}\n`, 'utf8');
|
|
57
|
+
migrated += 1;
|
|
58
|
+
}
|
|
59
|
+
catch {
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
(0, project_1.recordRun)(context.db, 'migrate_artifacts', 'ok', undefined, { migrated, scanned: targets.length });
|
|
64
|
+
context.db.close();
|
|
65
|
+
this.log(`Migrated ${migrated} artifact files to schema ${constants_1.SCHEMA_VERSION}.`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
exports.default = MigrateArtifactsCommand;
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
7
|
+
const core_1 = require("@oclif/core");
|
|
8
|
+
const fs_1 = require("../../lib/fs");
|
|
9
|
+
const mapBuilder_1 = require("../../lib/mapBuilder");
|
|
10
|
+
const planReview_1 = require("../../lib/planReview");
|
|
11
|
+
const project_1 = require("../../lib/project");
|
|
12
|
+
const repoRegistry_1 = require("../../lib/repoRegistry");
|
|
13
|
+
const scope_1 = require("../../lib/scope");
|
|
14
|
+
class PlanReviewCommand extends core_1.Command {
|
|
15
|
+
static description = 'Review a proposed service plan against current architecture';
|
|
16
|
+
static flags = {
|
|
17
|
+
map: core_1.Flags.string({ required: true, description: 'Map identifier' }),
|
|
18
|
+
plan: core_1.Flags.string({ required: true, description: 'Path to plan file (markdown or text)' }),
|
|
19
|
+
};
|
|
20
|
+
static args = {
|
|
21
|
+
target: core_1.Args.string({ required: false, description: 'Reserved positional arg' }),
|
|
22
|
+
};
|
|
23
|
+
async run() {
|
|
24
|
+
const { flags } = await this.parse(PlanReviewCommand);
|
|
25
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
26
|
+
const scope = (0, scope_1.loadScopeManifest)(flags.map, context.cwd);
|
|
27
|
+
const repoMap = new Map((0, repoRegistry_1.listAllRepos)(context.db).map((repo) => [repo.name, repo]));
|
|
28
|
+
const mapPath = node_path_1.default.join(context.cwd, 'maps', flags.map, 'service-map.json');
|
|
29
|
+
const fallbackMap = (0, mapBuilder_1.buildServiceMapArtifact)(flags.map, scope, repoMap);
|
|
30
|
+
let selectedMap = fallbackMap;
|
|
31
|
+
if ((0, fs_1.fileExists)(mapPath)) {
|
|
32
|
+
try {
|
|
33
|
+
const serviceMap = (0, fs_1.readJsonFile)(mapPath);
|
|
34
|
+
if (serviceMap?.nodes?.length > 0) {
|
|
35
|
+
selectedMap = serviceMap;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
catch {
|
|
39
|
+
selectedMap = fallbackMap;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
const review = (0, planReview_1.reviewPlan)(flags.map, flags.plan, scope, selectedMap);
|
|
43
|
+
const stamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
44
|
+
const outJsonPath = node_path_1.default.join(context.cwd, 'plans', 'reviews', `${stamp}-${flags.map}.json`);
|
|
45
|
+
const outMdPath = node_path_1.default.join(context.cwd, 'plans', 'reviews', `${stamp}-${flags.map}.md`);
|
|
46
|
+
(0, fs_1.writeJsonFile)(outJsonPath, review);
|
|
47
|
+
(0, fs_1.writeTextFile)(outMdPath, (0, planReview_1.renderPlanReviewMarkdown)(review));
|
|
48
|
+
(0, project_1.recordRun)(context.db, 'plan_review', review.accepted ? 'ok' : 'error', flags.map, {
|
|
49
|
+
missingNfrs: review.missingNfrs,
|
|
50
|
+
impactedRepos: review.impactedRepos,
|
|
51
|
+
output: outMdPath,
|
|
52
|
+
});
|
|
53
|
+
context.db.close();
|
|
54
|
+
this.log(`Plan review written to ${outMdPath}`);
|
|
55
|
+
if (!review.accepted) {
|
|
56
|
+
throw new Error(`Plan review failed required NFR checks: ${review.missingNfrs.join(', ')}`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
exports.default = PlanReviewCommand;
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const promptParser_1 = require("../lib/promptParser");
|
|
5
|
+
const scope_1 = require("../lib/scope");
|
|
6
|
+
const workflows_1 = require("../lib/workflows");
|
|
7
|
+
const project_1 = require("../lib/project");
|
|
8
|
+
class PromptCommand extends core_1.Command {
|
|
9
|
+
static description = 'Parse natural-language map instructions with preview and optional apply';
|
|
10
|
+
static flags = {
|
|
11
|
+
map: core_1.Flags.string({ required: true, description: 'Map identifier' }),
|
|
12
|
+
apply: core_1.Flags.boolean({ required: false, default: false, description: 'Apply the parsed action' }),
|
|
13
|
+
};
|
|
14
|
+
static args = {
|
|
15
|
+
instruction: core_1.Args.string({ required: true, description: 'Natural language instruction' }),
|
|
16
|
+
};
|
|
17
|
+
async run() {
|
|
18
|
+
const { args, flags } = await this.parse(PromptCommand);
|
|
19
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
20
|
+
const scope = (0, scope_1.loadScopeManifest)(flags.map, context.cwd);
|
|
21
|
+
const knownRepos = [...new Set([...scope.discovered, ...scope.explicitInclude, ...scope.explicitExclude])];
|
|
22
|
+
const intent = (0, promptParser_1.parsePromptIntent)(args.instruction, knownRepos);
|
|
23
|
+
this.log((0, promptParser_1.renderPromptPreview)(intent));
|
|
24
|
+
if (!flags.apply) {
|
|
25
|
+
this.log('Preview only. Re-run with --apply to persist changes.');
|
|
26
|
+
context.db.close();
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
if (intent.action === 'include' || intent.action === 'exclude') {
|
|
30
|
+
if (intent.repos.length === 0) {
|
|
31
|
+
throw new Error('No known repositories were parsed from prompt. Use explicit map commands or mention exact repo names.');
|
|
32
|
+
}
|
|
33
|
+
const updated = (0, scope_1.applyScopeChange)(flags.map, intent.action, intent.repos, `${intent.action} via prompt`, context.cwd);
|
|
34
|
+
(0, project_1.recordRun)(context.db, 'prompt_apply', 'ok', flags.map, {
|
|
35
|
+
action: intent.action,
|
|
36
|
+
repos: intent.repos,
|
|
37
|
+
effectiveCount: updated.effective.length,
|
|
38
|
+
});
|
|
39
|
+
this.log(`Applied ${intent.action} to map '${flags.map}'.`);
|
|
40
|
+
context.db.close();
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
if (intent.action === 'build') {
|
|
44
|
+
const result = (0, workflows_1.buildMapArtifacts)(flags.map, context.db, context.cwd);
|
|
45
|
+
(0, project_1.recordRun)(context.db, 'prompt_build', 'ok', flags.map, result);
|
|
46
|
+
this.log(`Built map '${flags.map}' via prompt.`);
|
|
47
|
+
context.db.close();
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
if (intent.action === 'status') {
|
|
51
|
+
const scope = (0, scope_1.loadScopeManifest)(flags.map, context.cwd);
|
|
52
|
+
this.log(`Map ${scope.mapId}: effective=${scope.effective.length}, include=${scope.explicitInclude.length}, exclude=${scope.explicitExclude.length}`);
|
|
53
|
+
(0, project_1.recordRun)(context.db, 'prompt_status', 'ok', flags.map);
|
|
54
|
+
context.db.close();
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
(0, project_1.recordRun)(context.db, 'prompt_unknown', 'error', flags.map, { instruction: args.instruction });
|
|
58
|
+
context.db.close();
|
|
59
|
+
throw new Error('Unable to determine a deterministic action from prompt. Use explicit map commands.');
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
exports.default = PromptCommand;
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const publishContracts_1 = require("../../lib/publishContracts");
|
|
5
|
+
const project_1 = require("../../lib/project");
|
|
6
|
+
class PublishNoticesCommand extends core_1.Command {
|
|
7
|
+
static description = 'Publish cross-repo spec-system Contract Change PRs to target repositories';
|
|
8
|
+
static flags = {
|
|
9
|
+
map: core_1.Flags.string({ required: true, description: 'Map identifier' }),
|
|
10
|
+
'source-repo': core_1.Flags.string({
|
|
11
|
+
required: true,
|
|
12
|
+
description: 'Source repository name or owner/repo',
|
|
13
|
+
}),
|
|
14
|
+
'contract-change-id': core_1.Flags.string({
|
|
15
|
+
required: false,
|
|
16
|
+
description: 'Optional single contract change filter (example: CC-101)',
|
|
17
|
+
}),
|
|
18
|
+
'dry-run': core_1.Flags.boolean({
|
|
19
|
+
default: false,
|
|
20
|
+
description: 'Preview actions without writing branches, files, or PRs',
|
|
21
|
+
}),
|
|
22
|
+
'max-targets': core_1.Flags.integer({
|
|
23
|
+
required: false,
|
|
24
|
+
description: 'Maximum number of pending targets to publish in this run',
|
|
25
|
+
}),
|
|
26
|
+
'notice-type': core_1.Flags.string({
|
|
27
|
+
required: false,
|
|
28
|
+
default: 'contract',
|
|
29
|
+
options: ['contract', 'service'],
|
|
30
|
+
description: 'Notice generation mode',
|
|
31
|
+
}),
|
|
32
|
+
plan: core_1.Flags.string({
|
|
33
|
+
required: false,
|
|
34
|
+
description: 'Required for --notice-type service. Path to structured service plan markdown.',
|
|
35
|
+
}),
|
|
36
|
+
ready: core_1.Flags.boolean({
|
|
37
|
+
default: false,
|
|
38
|
+
description: 'Create ready PRs instead of draft PRs',
|
|
39
|
+
}),
|
|
40
|
+
};
|
|
41
|
+
async run() {
|
|
42
|
+
const { flags } = await this.parse(PublishNoticesCommand);
|
|
43
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
44
|
+
const tokenEnv = context.config.github.tokenEnv;
|
|
45
|
+
const token = process.env[tokenEnv];
|
|
46
|
+
if (!flags['dry-run'] && !token) {
|
|
47
|
+
throw new Error(`Missing ${tokenEnv}. Set it before running publish notices.`);
|
|
48
|
+
}
|
|
49
|
+
if (flags['notice-type'] === 'service' && !flags.plan) {
|
|
50
|
+
throw new Error('`--plan <file>` is required when --notice-type service is used.');
|
|
51
|
+
}
|
|
52
|
+
const result = await (0, publishContracts_1.publishNotices)({
|
|
53
|
+
db: context.db,
|
|
54
|
+
mapId: flags.map,
|
|
55
|
+
sourceRepo: flags['source-repo'],
|
|
56
|
+
contractChangeId: flags['contract-change-id'],
|
|
57
|
+
dryRun: flags['dry-run'],
|
|
58
|
+
maxTargets: flags['max-targets'],
|
|
59
|
+
noticeType: flags['notice-type'],
|
|
60
|
+
planPath: flags.plan,
|
|
61
|
+
ready: flags.ready,
|
|
62
|
+
githubToken: token,
|
|
63
|
+
cwd: context.cwd,
|
|
64
|
+
});
|
|
65
|
+
const status = result.totals.failed > 0 ? 'error' : 'ok';
|
|
66
|
+
(0, project_1.recordRun)(context.db, 'publish_notices', status, flags.map, {
|
|
67
|
+
sourceRepo: result.sourceRepo,
|
|
68
|
+
contractChangeId: result.contractChangeId,
|
|
69
|
+
noticeType: result.noticeType,
|
|
70
|
+
planPath: result.planPath,
|
|
71
|
+
dryRun: result.dryRun,
|
|
72
|
+
created: result.totals.created,
|
|
73
|
+
updated: result.totals.updated,
|
|
74
|
+
skipped: result.totals.skipped,
|
|
75
|
+
failed: result.totals.failed,
|
|
76
|
+
sourceSyncPrUrls: result.sourceSyncPrUrls,
|
|
77
|
+
});
|
|
78
|
+
context.db.close();
|
|
79
|
+
this.log(`Mode=${result.noticeType} Targets: created=${result.totals.created}, updated=${result.totals.updated}, skipped=${result.totals.skipped}, failed=${result.totals.failed}`);
|
|
80
|
+
for (const contract of result.contracts) {
|
|
81
|
+
for (const target of contract.targetResults) {
|
|
82
|
+
this.log(`${contract.contractChangeId} ${target.targetRepoInput}: ${target.status} (${target.stateBefore} -> ${target.stateAfter})${target.prUrl ? ` ${target.prUrl}` : ''}${target.reason ? ` - ${target.reason}` : ''}`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
this.log(`Artifact JSON: ${result.artifactJsonPath}`);
|
|
86
|
+
this.log(`Artifact Markdown: ${result.artifactMarkdownPath}`);
|
|
87
|
+
if (result.failFastStoppedAt) {
|
|
88
|
+
this.log(`Fail-fast stop: ${result.failFastStoppedAt}`);
|
|
89
|
+
}
|
|
90
|
+
if (result.sourceSyncPrUrls.length > 0) {
|
|
91
|
+
this.log(`Source Sync PRs: ${result.sourceSyncPrUrls.join(', ')}`);
|
|
92
|
+
}
|
|
93
|
+
if (result.totals.failed > 0) {
|
|
94
|
+
this.error(`publish notices completed with ${result.totals.failed} failed target(s).`, { exit: 1 });
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
exports.default = PublishNoticesCommand;
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const project_1 = require("../../lib/project");
|
|
5
|
+
const publishContracts_1 = require("../../lib/publishContracts");
|
|
6
|
+
class PublishSyncCommand extends core_1.Command {
|
|
7
|
+
static description = 'Refresh downstream PR lifecycle state and sync source CC artifacts';
|
|
8
|
+
static flags = {
|
|
9
|
+
map: core_1.Flags.string({ required: true, description: 'Map identifier' }),
|
|
10
|
+
'source-repo': core_1.Flags.string({
|
|
11
|
+
required: true,
|
|
12
|
+
description: 'Source repository name or owner/repo',
|
|
13
|
+
}),
|
|
14
|
+
'contract-change-id': core_1.Flags.string({
|
|
15
|
+
required: false,
|
|
16
|
+
description: 'Optional single contract change filter (example: CC-101)',
|
|
17
|
+
}),
|
|
18
|
+
'dry-run': core_1.Flags.boolean({
|
|
19
|
+
default: false,
|
|
20
|
+
description: 'Preview sync state transitions without writing source sync PRs',
|
|
21
|
+
}),
|
|
22
|
+
};
|
|
23
|
+
async run() {
|
|
24
|
+
const { flags } = await this.parse(PublishSyncCommand);
|
|
25
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
26
|
+
const tokenEnv = context.config.github.tokenEnv;
|
|
27
|
+
const token = process.env[tokenEnv];
|
|
28
|
+
if (!flags['dry-run'] && !token) {
|
|
29
|
+
throw new Error(`Missing ${tokenEnv}. Set it before running publish sync.`);
|
|
30
|
+
}
|
|
31
|
+
const result = await (0, publishContracts_1.publishSync)({
|
|
32
|
+
db: context.db,
|
|
33
|
+
mapId: flags.map,
|
|
34
|
+
sourceRepo: flags['source-repo'],
|
|
35
|
+
contractChangeId: flags['contract-change-id'],
|
|
36
|
+
dryRun: flags['dry-run'],
|
|
37
|
+
githubToken: token,
|
|
38
|
+
cwd: context.cwd,
|
|
39
|
+
});
|
|
40
|
+
const status = result.totals.failed > 0 ? 'error' : 'ok';
|
|
41
|
+
(0, project_1.recordRun)(context.db, 'publish_sync', status, flags.map, {
|
|
42
|
+
sourceRepo: result.sourceRepo,
|
|
43
|
+
contractChangeId: result.contractChangeId,
|
|
44
|
+
dryRun: result.dryRun,
|
|
45
|
+
updated: result.totals.updated,
|
|
46
|
+
skipped: result.totals.skipped,
|
|
47
|
+
failed: result.totals.failed,
|
|
48
|
+
sourceSyncPrUrls: result.sourceSyncPrUrls,
|
|
49
|
+
});
|
|
50
|
+
context.db.close();
|
|
51
|
+
this.log(`Targets: updated=${result.totals.updated}, skipped=${result.totals.skipped}, failed=${result.totals.failed}`);
|
|
52
|
+
for (const contract of result.contracts) {
|
|
53
|
+
for (const target of contract.targetResults) {
|
|
54
|
+
this.log(`${contract.contractChangeId} ${target.repo}: ${target.status} (${target.stateBefore} -> ${target.stateAfter})${target.prUrl ? ` ${target.prUrl}` : ''}${target.reason ? ` - ${target.reason}` : ''}`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
this.log(`Artifact JSON: ${result.artifactJsonPath}`);
|
|
58
|
+
this.log(`Artifact Markdown: ${result.artifactMarkdownPath}`);
|
|
59
|
+
if (result.sourceSyncPrUrls.length > 0) {
|
|
60
|
+
this.log(`Source Sync PRs: ${result.sourceSyncPrUrls.join(', ')}`);
|
|
61
|
+
}
|
|
62
|
+
if (result.totals.failed > 0) {
|
|
63
|
+
this.error(`publish sync completed with ${result.totals.failed} failed target(s).`, { exit: 1 });
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
exports.default = PublishSyncCommand;
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
7
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
8
|
+
const core_1 = require("@oclif/core");
|
|
9
|
+
const fs_1 = require("../../lib/fs");
|
|
10
|
+
const project_1 = require("../../lib/project");
|
|
11
|
+
class PublishWikiCommand extends core_1.Command {
|
|
12
|
+
static description = 'Export docs-first artifacts to a wiki-friendly directory';
|
|
13
|
+
static flags = {
|
|
14
|
+
map: core_1.Flags.string({ required: true, description: 'Map identifier' }),
|
|
15
|
+
};
|
|
16
|
+
async run() {
|
|
17
|
+
const { flags } = await this.parse(PublishWikiCommand);
|
|
18
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
19
|
+
const sourceFiles = [
|
|
20
|
+
node_path_1.default.join(context.cwd, 'maps', flags.map, 'service-map.md'),
|
|
21
|
+
node_path_1.default.join(context.cwd, 'maps', flags.map, 'contracts.md'),
|
|
22
|
+
node_path_1.default.join(context.cwd, 'docs', 'architecture', `${flags.map}.md`),
|
|
23
|
+
];
|
|
24
|
+
const wikiDir = node_path_1.default.join(context.cwd, 'wiki-export', flags.map);
|
|
25
|
+
(0, fs_1.ensureDir)(wikiDir);
|
|
26
|
+
for (const source of sourceFiles) {
|
|
27
|
+
if (!node_fs_1.default.existsSync(source)) {
|
|
28
|
+
continue;
|
|
29
|
+
}
|
|
30
|
+
const target = node_path_1.default.join(wikiDir, node_path_1.default.basename(source));
|
|
31
|
+
const body = node_fs_1.default.readFileSync(source, 'utf8');
|
|
32
|
+
(0, fs_1.writeTextFile)(target, body);
|
|
33
|
+
}
|
|
34
|
+
(0, project_1.recordRun)(context.db, 'publish_wiki', 'ok', flags.map, { wikiDir });
|
|
35
|
+
context.db.close();
|
|
36
|
+
this.log(`Wiki export generated at ${wikiDir}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
exports.default = PublishWikiCommand;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
7
|
+
const core_1 = require("@oclif/core");
|
|
8
|
+
const project_1 = require("../../lib/project");
|
|
9
|
+
const repoRegistry_1 = require("../../lib/repoRegistry");
|
|
10
|
+
class RepoAddCommand extends core_1.Command {
|
|
11
|
+
static description = 'Register a local clone path for a repository';
|
|
12
|
+
static flags = {
|
|
13
|
+
name: core_1.Flags.string({ char: 'n', required: true, description: 'Repository name' }),
|
|
14
|
+
path: core_1.Flags.string({ char: 'p', required: true, description: 'Local path to repository clone' }),
|
|
15
|
+
org: core_1.Flags.string({ char: 'o', required: false, description: 'Organization override' }),
|
|
16
|
+
};
|
|
17
|
+
async run() {
|
|
18
|
+
const { flags } = await this.parse(RepoAddCommand);
|
|
19
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
20
|
+
if (!node_fs_1.default.existsSync(flags.path)) {
|
|
21
|
+
throw new Error(`Local path does not exist: ${flags.path}`);
|
|
22
|
+
}
|
|
23
|
+
const repo = (0, repoRegistry_1.setLocalRepoPath)(context.db, flags.name, flags.path, flags.org ?? context.config.github.defaultOrg);
|
|
24
|
+
(0, project_1.recordRun)(context.db, 'repo_add', 'ok', undefined, { name: repo.name, localPath: repo.localPath });
|
|
25
|
+
context.db.close();
|
|
26
|
+
this.log(`Registered local repo '${repo.name}' at ${repo.localPath}`);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
exports.default = RepoAddCommand;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@oclif/core");
|
|
4
|
+
const github_1 = require("../../lib/github");
|
|
5
|
+
const project_1 = require("../../lib/project");
|
|
6
|
+
const repoRegistry_1 = require("../../lib/repoRegistry");
|
|
7
|
+
class RepoSyncCommand extends core_1.Command {
|
|
8
|
+
static description = 'Sync repository inventory from a GitHub organization';
|
|
9
|
+
static flags = {
|
|
10
|
+
org: core_1.Flags.string({ char: 'o', required: false, description: 'GitHub org (defaults to config.github.defaultOrg)' }),
|
|
11
|
+
};
|
|
12
|
+
async run() {
|
|
13
|
+
const { flags } = await this.parse(RepoSyncCommand);
|
|
14
|
+
const context = (0, project_1.loadProject)(process.cwd());
|
|
15
|
+
const org = flags.org ?? context.config.github.defaultOrg;
|
|
16
|
+
if (!org) {
|
|
17
|
+
throw new Error('No organization provided. Use --org or set one via `sdx bootstrap org`.');
|
|
18
|
+
}
|
|
19
|
+
const token = process.env[context.config.github.tokenEnv];
|
|
20
|
+
if (!token) {
|
|
21
|
+
throw new Error(`Missing GitHub token. Set ${context.config.github.tokenEnv}.`);
|
|
22
|
+
}
|
|
23
|
+
const repos = await (0, github_1.fetchOrgRepos)(org, token);
|
|
24
|
+
(0, repoRegistry_1.upsertRepos)(context.db, repos);
|
|
25
|
+
(0, project_1.recordRun)(context.db, 'repo_sync', 'ok', undefined, { org, count: repos.length });
|
|
26
|
+
context.db.close();
|
|
27
|
+
this.log(`Synced ${repos.length} repositories for org '${org}'.`);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
exports.default = RepoSyncCommand;
|