sdx-cli 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +266 -0
- package/bin/dev.js +11 -0
- package/bin/run.js +3 -0
- package/dist/commands/bootstrap/consumer.js +75 -0
- package/dist/commands/bootstrap/org.js +29 -0
- package/dist/commands/bootstrap/quick.js +82 -0
- package/dist/commands/codex/run.js +36 -0
- package/dist/commands/contracts/extract.js +22 -0
- package/dist/commands/docs/generate.js +22 -0
- package/dist/commands/handoff/draft.js +41 -0
- package/dist/commands/init.js +14 -0
- package/dist/commands/map/build.js +44 -0
- package/dist/commands/map/create.js +40 -0
- package/dist/commands/map/exclude.js +25 -0
- package/dist/commands/map/include.js +25 -0
- package/dist/commands/map/remove-override.js +25 -0
- package/dist/commands/map/status.js +30 -0
- package/dist/commands/migrate/artifacts.js +68 -0
- package/dist/commands/plan/review.js +60 -0
- package/dist/commands/prompt.js +62 -0
- package/dist/commands/publish/notices.js +98 -0
- package/dist/commands/publish/sync.js +67 -0
- package/dist/commands/publish/wiki.js +39 -0
- package/dist/commands/repo/add.js +29 -0
- package/dist/commands/repo/sync.js +30 -0
- package/dist/commands/service/propose.js +40 -0
- package/dist/commands/status.js +37 -0
- package/dist/commands/version.js +16 -0
- package/dist/index.js +10 -0
- package/dist/lib/artifactMigration.js +29 -0
- package/dist/lib/bootstrap.js +43 -0
- package/dist/lib/bootstrapConsumer.js +187 -0
- package/dist/lib/bootstrapQuick.js +27 -0
- package/dist/lib/codex.js +138 -0
- package/dist/lib/config.js +40 -0
- package/dist/lib/constants.js +26 -0
- package/dist/lib/contractChanges.js +347 -0
- package/dist/lib/contracts.js +93 -0
- package/dist/lib/db.js +41 -0
- package/dist/lib/docs.js +46 -0
- package/dist/lib/fileScan.js +34 -0
- package/dist/lib/fs.js +36 -0
- package/dist/lib/github.js +52 -0
- package/dist/lib/githubPublish.js +161 -0
- package/dist/lib/handoff.js +62 -0
- package/dist/lib/mapBuilder.js +182 -0
- package/dist/lib/paths.js +39 -0
- package/dist/lib/planReview.js +88 -0
- package/dist/lib/project.js +65 -0
- package/dist/lib/promptParser.js +88 -0
- package/dist/lib/publishContracts.js +876 -0
- package/dist/lib/repoRegistry.js +92 -0
- package/dist/lib/scope.js +110 -0
- package/dist/lib/serviceNoticePlan.js +130 -0
- package/dist/lib/serviceProposal.js +82 -0
- package/dist/lib/status.js +34 -0
- package/dist/lib/types.js +2 -0
- package/dist/lib/version.js +17 -0
- package/dist/lib/workflows.js +70 -0
- package/package.json +50 -0
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.upsertRepos = upsertRepos;
|
|
7
|
+
exports.setLocalRepoPath = setLocalRepoPath;
|
|
8
|
+
exports.getRepoByName = getRepoByName;
|
|
9
|
+
exports.listReposByOrg = listReposByOrg;
|
|
10
|
+
exports.listAllRepos = listAllRepos;
|
|
11
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
12
|
+
function toRecord(row) {
|
|
13
|
+
return {
|
|
14
|
+
name: row.name,
|
|
15
|
+
fullName: row.full_name,
|
|
16
|
+
org: row.org,
|
|
17
|
+
defaultBranch: row.default_branch ?? undefined,
|
|
18
|
+
archived: Boolean(row.archived),
|
|
19
|
+
fork: Boolean(row.fork),
|
|
20
|
+
htmlUrl: row.html_url ?? undefined,
|
|
21
|
+
localPath: row.local_path ?? undefined,
|
|
22
|
+
source: row.source,
|
|
23
|
+
lastSyncedAt: row.last_synced_at ?? undefined,
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
function upsertRepos(db, repos) {
|
|
27
|
+
const stmt = db.prepare(`
|
|
28
|
+
INSERT INTO repo_registry (name, full_name, org, default_branch, archived, fork, html_url, local_path, source, last_synced_at)
|
|
29
|
+
VALUES (@name, @full_name, @org, @default_branch, @archived, @fork, @html_url, @local_path, @source, @last_synced_at)
|
|
30
|
+
ON CONFLICT(name) DO UPDATE SET
|
|
31
|
+
full_name=excluded.full_name,
|
|
32
|
+
org=excluded.org,
|
|
33
|
+
default_branch=excluded.default_branch,
|
|
34
|
+
archived=excluded.archived,
|
|
35
|
+
fork=excluded.fork,
|
|
36
|
+
html_url=excluded.html_url,
|
|
37
|
+
local_path=COALESCE(repo_registry.local_path, excluded.local_path),
|
|
38
|
+
source=CASE
|
|
39
|
+
WHEN repo_registry.local_path IS NOT NULL AND excluded.source='github' THEN 'hybrid'
|
|
40
|
+
ELSE excluded.source
|
|
41
|
+
END,
|
|
42
|
+
last_synced_at=excluded.last_synced_at
|
|
43
|
+
`);
|
|
44
|
+
const insertMany = db.transaction((rows) => {
|
|
45
|
+
for (const repo of rows) {
|
|
46
|
+
stmt.run({
|
|
47
|
+
name: repo.name,
|
|
48
|
+
full_name: repo.fullName,
|
|
49
|
+
org: repo.org,
|
|
50
|
+
default_branch: repo.defaultBranch ?? null,
|
|
51
|
+
archived: repo.archived ? 1 : 0,
|
|
52
|
+
fork: repo.fork ? 1 : 0,
|
|
53
|
+
html_url: repo.htmlUrl ?? null,
|
|
54
|
+
local_path: repo.localPath ?? null,
|
|
55
|
+
source: repo.source,
|
|
56
|
+
last_synced_at: repo.lastSyncedAt ?? null,
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
insertMany(repos);
|
|
61
|
+
}
|
|
62
|
+
function setLocalRepoPath(db, name, localPath, org) {
|
|
63
|
+
const normalized = node_path_1.default.resolve(localPath);
|
|
64
|
+
const existing = db
|
|
65
|
+
.prepare('SELECT * FROM repo_registry WHERE name = ?')
|
|
66
|
+
.get(name);
|
|
67
|
+
if (existing) {
|
|
68
|
+
db.prepare(`UPDATE repo_registry SET local_path = ?, source = CASE WHEN source='github' THEN 'hybrid' ELSE 'local' END WHERE name = ?`).run(normalized, name);
|
|
69
|
+
}
|
|
70
|
+
else {
|
|
71
|
+
db.prepare(`INSERT INTO repo_registry (name, full_name, org, archived, fork, local_path, source)
|
|
72
|
+
VALUES (?, ?, ?, 0, 0, ?, 'local')`).run(name, org ? `${org}/${name}` : name, org ?? 'local', normalized);
|
|
73
|
+
}
|
|
74
|
+
return getRepoByName(db, name);
|
|
75
|
+
}
|
|
76
|
+
function getRepoByName(db, name) {
|
|
77
|
+
const row = db.prepare('SELECT * FROM repo_registry WHERE name = ?').get(name);
|
|
78
|
+
if (!row) {
|
|
79
|
+
throw new Error(`Repository not found: ${name}`);
|
|
80
|
+
}
|
|
81
|
+
return toRecord(row);
|
|
82
|
+
}
|
|
83
|
+
function listReposByOrg(db, org) {
|
|
84
|
+
const rows = db
|
|
85
|
+
.prepare('SELECT * FROM repo_registry WHERE org = ? ORDER BY name')
|
|
86
|
+
.all(org);
|
|
87
|
+
return rows.map(toRecord);
|
|
88
|
+
}
|
|
89
|
+
function listAllRepos(db) {
|
|
90
|
+
const rows = db.prepare('SELECT * FROM repo_registry ORDER BY org, name').all();
|
|
91
|
+
return rows.map(toRecord);
|
|
92
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.computeEffectiveScope = computeEffectiveScope;
|
|
7
|
+
exports.loadScopeManifest = loadScopeManifest;
|
|
8
|
+
exports.saveScopeManifest = saveScopeManifest;
|
|
9
|
+
exports.createScopeManifest = createScopeManifest;
|
|
10
|
+
exports.applyScopeChange = applyScopeChange;
|
|
11
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
12
|
+
const constants_1 = require("./constants");
|
|
13
|
+
const fs_1 = require("./fs");
|
|
14
|
+
const paths_1 = require("./paths");
|
|
15
|
+
function uniqueSorted(values) {
|
|
16
|
+
return [...new Set(values)].sort((a, b) => a.localeCompare(b));
|
|
17
|
+
}
|
|
18
|
+
function computeEffectiveScope(discovered, explicitInclude, explicitExclude) {
|
|
19
|
+
const overlap = explicitInclude.filter((repo) => explicitExclude.includes(repo));
|
|
20
|
+
if (overlap.length > 0) {
|
|
21
|
+
throw new Error(`Invalid scope overrides. Same repo appears in include and exclude: ${overlap.join(', ')}`);
|
|
22
|
+
}
|
|
23
|
+
const combined = new Set(discovered);
|
|
24
|
+
for (const repo of explicitInclude) {
|
|
25
|
+
combined.add(repo);
|
|
26
|
+
}
|
|
27
|
+
for (const repo of explicitExclude) {
|
|
28
|
+
combined.delete(repo);
|
|
29
|
+
}
|
|
30
|
+
return uniqueSorted(Array.from(combined));
|
|
31
|
+
}
|
|
32
|
+
function loadScopeManifest(mapId, cwd = process.cwd()) {
|
|
33
|
+
const scopePath = node_path_1.default.join((0, paths_1.getMapDir)(mapId, cwd), 'scope.json');
|
|
34
|
+
if (!(0, fs_1.fileExists)(scopePath)) {
|
|
35
|
+
throw new Error(`Scope manifest not found for map '${mapId}'. Run 'sdx map create ${mapId} --org <org>' first.`);
|
|
36
|
+
}
|
|
37
|
+
return (0, fs_1.readJsonFile)(scopePath);
|
|
38
|
+
}
|
|
39
|
+
function saveScopeManifest(manifest, cwd = process.cwd()) {
|
|
40
|
+
const mapDir = (0, paths_1.getMapDir)(manifest.mapId, cwd);
|
|
41
|
+
const scopePath = node_path_1.default.join(mapDir, 'scope.json');
|
|
42
|
+
manifest.generatedAt = new Date().toISOString();
|
|
43
|
+
manifest.discovered = uniqueSorted(manifest.discovered);
|
|
44
|
+
manifest.explicitInclude = uniqueSorted(manifest.explicitInclude);
|
|
45
|
+
manifest.explicitExclude = uniqueSorted(manifest.explicitExclude);
|
|
46
|
+
manifest.effective = computeEffectiveScope(manifest.discovered, manifest.explicitInclude, manifest.explicitExclude);
|
|
47
|
+
(0, fs_1.writeJsonFile)(scopePath, manifest);
|
|
48
|
+
const logPath = node_path_1.default.join(mapDir, 'scope-change-log.md');
|
|
49
|
+
const rows = [
|
|
50
|
+
'# Scope Change Log',
|
|
51
|
+
'',
|
|
52
|
+
'| Time | Action | Repositories | Note |',
|
|
53
|
+
'|---|---|---|---|',
|
|
54
|
+
...manifest.history.map((entry) => {
|
|
55
|
+
const repos = entry.repos.length > 0 ? entry.repos.join(', ') : '-';
|
|
56
|
+
const note = entry.note ?? '-';
|
|
57
|
+
return `| ${entry.at} | ${entry.action} | ${repos} | ${note} |`;
|
|
58
|
+
}),
|
|
59
|
+
'',
|
|
60
|
+
];
|
|
61
|
+
(0, fs_1.writeTextFile)(logPath, rows.join('\n'));
|
|
62
|
+
return scopePath;
|
|
63
|
+
}
|
|
64
|
+
function createScopeManifest(mapId, org, discovered, cwd = process.cwd()) {
|
|
65
|
+
const defaultExclude = discovered.filter((repo) => repo.endsWith('-fork') || repo.endsWith('-archive'));
|
|
66
|
+
const manifest = {
|
|
67
|
+
schemaVersion: constants_1.SCHEMA_VERSION,
|
|
68
|
+
generatedAt: new Date().toISOString(),
|
|
69
|
+
mapId,
|
|
70
|
+
org,
|
|
71
|
+
discovered: uniqueSorted(discovered),
|
|
72
|
+
explicitInclude: [],
|
|
73
|
+
explicitExclude: uniqueSorted(defaultExclude),
|
|
74
|
+
effective: [],
|
|
75
|
+
history: [
|
|
76
|
+
{
|
|
77
|
+
at: new Date().toISOString(),
|
|
78
|
+
action: 'create',
|
|
79
|
+
repos: uniqueSorted(discovered),
|
|
80
|
+
note: 'Map created from discovered repositories',
|
|
81
|
+
},
|
|
82
|
+
],
|
|
83
|
+
};
|
|
84
|
+
manifest.effective = computeEffectiveScope(manifest.discovered, manifest.explicitInclude, manifest.explicitExclude);
|
|
85
|
+
return manifest;
|
|
86
|
+
}
|
|
87
|
+
function applyScopeChange(mapId, action, repos, note, cwd = process.cwd()) {
|
|
88
|
+
const manifest = loadScopeManifest(mapId, cwd);
|
|
89
|
+
const cleanRepos = uniqueSorted(repos);
|
|
90
|
+
if (action === 'include') {
|
|
91
|
+
manifest.explicitInclude = uniqueSorted([...manifest.explicitInclude, ...cleanRepos]);
|
|
92
|
+
manifest.explicitExclude = manifest.explicitExclude.filter((repo) => !cleanRepos.includes(repo));
|
|
93
|
+
}
|
|
94
|
+
if (action === 'exclude') {
|
|
95
|
+
manifest.explicitExclude = uniqueSorted([...manifest.explicitExclude, ...cleanRepos]);
|
|
96
|
+
manifest.explicitInclude = manifest.explicitInclude.filter((repo) => !cleanRepos.includes(repo));
|
|
97
|
+
}
|
|
98
|
+
if (action === 'remove_override') {
|
|
99
|
+
manifest.explicitInclude = manifest.explicitInclude.filter((repo) => !cleanRepos.includes(repo));
|
|
100
|
+
manifest.explicitExclude = manifest.explicitExclude.filter((repo) => !cleanRepos.includes(repo));
|
|
101
|
+
}
|
|
102
|
+
manifest.history.push({
|
|
103
|
+
at: new Date().toISOString(),
|
|
104
|
+
action,
|
|
105
|
+
repos: cleanRepos,
|
|
106
|
+
note,
|
|
107
|
+
});
|
|
108
|
+
saveScopeManifest(manifest, cwd);
|
|
109
|
+
return manifest;
|
|
110
|
+
}
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parseServiceNoticePlan = parseServiceNoticePlan;
|
|
4
|
+
function slugify(value) {
|
|
5
|
+
return value
|
|
6
|
+
.toLowerCase()
|
|
7
|
+
.replace(/[^a-z0-9]+/g, '-')
|
|
8
|
+
.replace(/^-+|-+$/g, '');
|
|
9
|
+
}
|
|
10
|
+
function splitMarkdownTableRow(value) {
|
|
11
|
+
return value
|
|
12
|
+
.trim()
|
|
13
|
+
.replace(/^\|/, '')
|
|
14
|
+
.replace(/\|$/, '')
|
|
15
|
+
.split('|')
|
|
16
|
+
.map((part) => part.trim());
|
|
17
|
+
}
|
|
18
|
+
function parseSections(markdown) {
|
|
19
|
+
const sections = {};
|
|
20
|
+
const lines = markdown.split(/\r?\n/);
|
|
21
|
+
let current = '';
|
|
22
|
+
let buffer = [];
|
|
23
|
+
const flush = () => {
|
|
24
|
+
if (!current) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
sections[current] = buffer.join('\n').trim();
|
|
28
|
+
buffer = [];
|
|
29
|
+
};
|
|
30
|
+
for (const line of lines) {
|
|
31
|
+
const heading = line.match(/^##\s+(.+)$/);
|
|
32
|
+
if (heading) {
|
|
33
|
+
flush();
|
|
34
|
+
current = heading[1].trim();
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
if (current) {
|
|
38
|
+
buffer.push(line);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
flush();
|
|
42
|
+
return sections;
|
|
43
|
+
}
|
|
44
|
+
function parseTargets(section) {
|
|
45
|
+
const tableLines = section
|
|
46
|
+
.split(/\r?\n/)
|
|
47
|
+
.map((line) => line.trim())
|
|
48
|
+
.filter((line) => line.startsWith('|'));
|
|
49
|
+
if (tableLines.length < 2) {
|
|
50
|
+
throw new Error('Service plan section "Target Repositories" must include a markdown table with header and separator.');
|
|
51
|
+
}
|
|
52
|
+
const header = splitMarkdownTableRow(tableLines[0]).map((value) => value.toLowerCase());
|
|
53
|
+
const required = ['repo', 'owner', 'context'];
|
|
54
|
+
const missing = required.filter((name) => !header.includes(name));
|
|
55
|
+
if (missing.length > 0) {
|
|
56
|
+
throw new Error(`Service plan target table is missing required columns: ${missing.join(', ')}`);
|
|
57
|
+
}
|
|
58
|
+
const targets = [];
|
|
59
|
+
for (const line of tableLines.slice(2)) {
|
|
60
|
+
const row = splitMarkdownTableRow(line);
|
|
61
|
+
const object = {};
|
|
62
|
+
for (let i = 0; i < header.length; i += 1) {
|
|
63
|
+
object[header[i]] = row[i] ?? '';
|
|
64
|
+
}
|
|
65
|
+
if (!object.repo.trim() && !object.owner.trim() && !object.context.trim()) {
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
targets.push({
|
|
69
|
+
repo: object.repo.trim(),
|
|
70
|
+
owner: object.owner.trim(),
|
|
71
|
+
context: object.context.trim(),
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
if (targets.length === 0) {
|
|
75
|
+
throw new Error('Service plan target table must include at least one target row.');
|
|
76
|
+
}
|
|
77
|
+
return targets;
|
|
78
|
+
}
|
|
79
|
+
function parseIdentity(section) {
|
|
80
|
+
const lines = section
|
|
81
|
+
.split(/\r?\n/)
|
|
82
|
+
.map((line) => line.trim())
|
|
83
|
+
.filter(Boolean);
|
|
84
|
+
const map = {};
|
|
85
|
+
for (const line of lines) {
|
|
86
|
+
const match = line.match(/^[-*]\s*([a-zA-Z0-9_ -]+):\s*(.+)$/);
|
|
87
|
+
if (!match) {
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
const key = match[1].trim().toLowerCase().replace(/\s+/g, '_');
|
|
91
|
+
map[key] = match[2].trim();
|
|
92
|
+
}
|
|
93
|
+
const rawName = map.name ?? map.service_name ?? '';
|
|
94
|
+
const rawServiceId = map.service_id ?? map.id ?? '';
|
|
95
|
+
if (!rawName && !rawServiceId) {
|
|
96
|
+
throw new Error('Service plan "Service Identity" section must include bullet keys for at least "name" or "service_id".');
|
|
97
|
+
}
|
|
98
|
+
const name = rawName || rawServiceId;
|
|
99
|
+
const serviceId = rawServiceId || slugify(rawName);
|
|
100
|
+
if (!serviceId) {
|
|
101
|
+
throw new Error('Unable to derive service_id from "Service Identity" section.');
|
|
102
|
+
}
|
|
103
|
+
return { serviceId, name };
|
|
104
|
+
}
|
|
105
|
+
function requireSection(sections, key) {
|
|
106
|
+
const value = sections[key];
|
|
107
|
+
if (!value || value.trim().length === 0) {
|
|
108
|
+
throw new Error(`Service plan is missing required section: "## ${key}"`);
|
|
109
|
+
}
|
|
110
|
+
return value.trim();
|
|
111
|
+
}
|
|
112
|
+
function parseServiceNoticePlan(markdown) {
|
|
113
|
+
const sections = parseSections(markdown);
|
|
114
|
+
const identity = parseIdentity(requireSection(sections, 'Service Identity'));
|
|
115
|
+
const summary = requireSection(sections, 'Summary');
|
|
116
|
+
const contractSurface = requireSection(sections, 'Contract Surface');
|
|
117
|
+
const migrationGuidance = requireSection(sections, 'Compatibility and Migration Guidance');
|
|
118
|
+
const targetSection = requireSection(sections, 'Target Repositories');
|
|
119
|
+
const targets = parseTargets(targetSection);
|
|
120
|
+
const changeDetails = sections['Change Details']?.trim() || 'See service plan for implementation details.';
|
|
121
|
+
return {
|
|
122
|
+
serviceId: identity.serviceId,
|
|
123
|
+
name: identity.name,
|
|
124
|
+
summary,
|
|
125
|
+
contractSurface,
|
|
126
|
+
changeDetails,
|
|
127
|
+
migrationGuidance,
|
|
128
|
+
targets,
|
|
129
|
+
};
|
|
130
|
+
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.proposeService = proposeService;
|
|
7
|
+
exports.renderServiceProposalMarkdown = renderServiceProposalMarkdown;
|
|
8
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
9
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
10
|
+
const constants_1 = require("./constants");
|
|
11
|
+
function proposeService(mapId, briefPath, scope, contracts) {
|
|
12
|
+
const absolute = node_path_1.default.resolve(briefPath);
|
|
13
|
+
const text = node_fs_1.default.readFileSync(absolute, 'utf8');
|
|
14
|
+
const firstHeadingMatch = text.match(/^#\s+(.+)$/m);
|
|
15
|
+
const proposedServiceName = firstHeadingMatch?.[1]?.trim() || node_path_1.default.basename(briefPath, node_path_1.default.extname(briefPath));
|
|
16
|
+
const lower = text.toLowerCase();
|
|
17
|
+
const integrationCandidates = scope.effective.filter((repo) => lower.includes(repo.toLowerCase()));
|
|
18
|
+
const contractHeavyRepos = new Set(contracts.slice(0, 50).map((contract) => contract.repo));
|
|
19
|
+
const likelyDependencies = integrationCandidates.filter((repo) => contractHeavyRepos.has(repo));
|
|
20
|
+
return {
|
|
21
|
+
schemaVersion: '1.0.0',
|
|
22
|
+
generatedAt: new Date().toISOString(),
|
|
23
|
+
mapId,
|
|
24
|
+
briefPath: absolute,
|
|
25
|
+
proposedServiceName,
|
|
26
|
+
options: [
|
|
27
|
+
{
|
|
28
|
+
name: 'Option A: Isolated service with explicit contracts',
|
|
29
|
+
summary: 'Create a standalone service with versioned API contracts and async event boundaries.',
|
|
30
|
+
tradeoffs: [
|
|
31
|
+
'Higher initial setup cost, cleaner ownership boundaries.',
|
|
32
|
+
'Simpler rollback and blast-radius control.',
|
|
33
|
+
],
|
|
34
|
+
},
|
|
35
|
+
{
|
|
36
|
+
name: 'Option B: Incremental extension of existing service surface',
|
|
37
|
+
summary: 'Ship initial capability through an existing service boundary before splitting out.',
|
|
38
|
+
tradeoffs: [
|
|
39
|
+
'Faster initial delivery, but weaker long-term modularity.',
|
|
40
|
+
'Requires careful migration and deprecation sequencing.',
|
|
41
|
+
],
|
|
42
|
+
},
|
|
43
|
+
],
|
|
44
|
+
integrationCandidates: likelyDependencies.length > 0 ? likelyDependencies : integrationCandidates,
|
|
45
|
+
coverageTags: [...constants_1.PRIMER_DIMENSIONS],
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
function renderServiceProposalMarkdown(proposal) {
|
|
49
|
+
const lines = [
|
|
50
|
+
`# Service Proposal: ${proposal.proposedServiceName}`,
|
|
51
|
+
'',
|
|
52
|
+
`- Generated: ${proposal.generatedAt}`,
|
|
53
|
+
`- Map: ${proposal.mapId}`,
|
|
54
|
+
`- Brief: ${proposal.briefPath}`,
|
|
55
|
+
'',
|
|
56
|
+
'## Candidate Integration Repositories',
|
|
57
|
+
'',
|
|
58
|
+
...(proposal.integrationCandidates.length > 0
|
|
59
|
+
? proposal.integrationCandidates.map((repo) => `- ${repo}`)
|
|
60
|
+
: ['- none detected from brief text']),
|
|
61
|
+
'',
|
|
62
|
+
'## Architecture Options',
|
|
63
|
+
'',
|
|
64
|
+
];
|
|
65
|
+
for (const option of proposal.options) {
|
|
66
|
+
lines.push(`### ${option.name}`);
|
|
67
|
+
lines.push('');
|
|
68
|
+
lines.push(option.summary);
|
|
69
|
+
lines.push('');
|
|
70
|
+
for (const tradeoff of option.tradeoffs) {
|
|
71
|
+
lines.push(`- ${tradeoff}`);
|
|
72
|
+
}
|
|
73
|
+
lines.push('');
|
|
74
|
+
}
|
|
75
|
+
lines.push('## Coverage Tags');
|
|
76
|
+
lines.push('');
|
|
77
|
+
for (const tag of proposal.coverageTags) {
|
|
78
|
+
lines.push(`- ${tag}`);
|
|
79
|
+
}
|
|
80
|
+
lines.push('');
|
|
81
|
+
return `${lines.join('\n')}\n`;
|
|
82
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getStatusSummary = getStatusSummary;
|
|
7
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
8
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
9
|
+
const fs_1 = require("./fs");
|
|
10
|
+
const paths_1 = require("./paths");
|
|
11
|
+
function getStatusSummary(db, cwd = process.cwd()) {
|
|
12
|
+
const repoCount = db.prepare('SELECT COUNT(*) AS count FROM repo_registry').get().count;
|
|
13
|
+
const runCount = db.prepare('SELECT COUNT(*) AS count FROM run_log').get().count;
|
|
14
|
+
const mapRows = db
|
|
15
|
+
.prepare("SELECT DISTINCT map_id FROM run_log WHERE map_id IS NOT NULL AND map_id != '' ORDER BY map_id")
|
|
16
|
+
.all();
|
|
17
|
+
const maps = mapRows.map(({ map_id }) => {
|
|
18
|
+
const mapDir = (0, paths_1.getMapDir)(map_id, cwd);
|
|
19
|
+
const scopePath = node_path_1.default.join(mapDir, 'scope.json');
|
|
20
|
+
const updatedAt = (0, fs_1.fileExists)(scopePath) ? node_fs_1.default.statSync(scopePath).mtime.toISOString() : undefined;
|
|
21
|
+
return {
|
|
22
|
+
mapId: map_id,
|
|
23
|
+
hasScope: (0, fs_1.fileExists)(scopePath),
|
|
24
|
+
hasServiceMap: (0, fs_1.fileExists)(node_path_1.default.join(mapDir, 'service-map.json')),
|
|
25
|
+
updatedAt,
|
|
26
|
+
};
|
|
27
|
+
});
|
|
28
|
+
return {
|
|
29
|
+
initialized: true,
|
|
30
|
+
repoCount,
|
|
31
|
+
runCount,
|
|
32
|
+
maps,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getCliPackageVersion = getCliPackageVersion;
|
|
7
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
8
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
9
|
+
function getCliPackageVersion() {
|
|
10
|
+
const packageJsonPath = node_path_1.default.resolve(__dirname, '..', '..', 'package.json');
|
|
11
|
+
const raw = node_fs_1.default.readFileSync(packageJsonPath, 'utf8');
|
|
12
|
+
const data = JSON.parse(raw);
|
|
13
|
+
if (!data.version || typeof data.version !== 'string') {
|
|
14
|
+
throw new Error(`Unable to resolve CLI version from ${packageJsonPath}`);
|
|
15
|
+
}
|
|
16
|
+
return data.version;
|
|
17
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.buildMapArtifacts = buildMapArtifacts;
|
|
7
|
+
exports.extractContractArtifacts = extractContractArtifacts;
|
|
8
|
+
exports.generateDocsArtifacts = generateDocsArtifacts;
|
|
9
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
10
|
+
const contracts_1 = require("./contracts");
|
|
11
|
+
const docs_1 = require("./docs");
|
|
12
|
+
const fs_1 = require("./fs");
|
|
13
|
+
const mapBuilder_1 = require("./mapBuilder");
|
|
14
|
+
const repoRegistry_1 = require("./repoRegistry");
|
|
15
|
+
const scope_1 = require("./scope");
|
|
16
|
+
function buildMapArtifacts(mapId, db, cwd = process.cwd()) {
|
|
17
|
+
const scope = (0, scope_1.loadScopeManifest)(mapId, cwd);
|
|
18
|
+
(0, scope_1.saveScopeManifest)(scope, cwd);
|
|
19
|
+
const repoMap = new Map((0, repoRegistry_1.listAllRepos)(db).map((repo) => [repo.name, repo]));
|
|
20
|
+
const artifact = (0, mapBuilder_1.buildServiceMapArtifact)(mapId, scope, repoMap);
|
|
21
|
+
const mapDir = node_path_1.default.join(cwd, 'maps', mapId);
|
|
22
|
+
const jsonPath = node_path_1.default.join(mapDir, 'service-map.json');
|
|
23
|
+
const markdownPath = node_path_1.default.join(mapDir, 'service-map.md');
|
|
24
|
+
const mermaidPath = node_path_1.default.join(mapDir, 'service-map.mmd');
|
|
25
|
+
(0, fs_1.writeJsonFile)(jsonPath, artifact);
|
|
26
|
+
(0, fs_1.writeTextFile)(markdownPath, (0, mapBuilder_1.renderServiceMapMarkdown)(artifact));
|
|
27
|
+
(0, fs_1.writeTextFile)(mermaidPath, (0, mapBuilder_1.renderServiceMapMermaid)(artifact));
|
|
28
|
+
return {
|
|
29
|
+
jsonPath,
|
|
30
|
+
markdownPath,
|
|
31
|
+
mermaidPath,
|
|
32
|
+
mapDir,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function extractContractArtifacts(mapId, db, cwd = process.cwd()) {
|
|
36
|
+
const scope = (0, scope_1.loadScopeManifest)(mapId, cwd);
|
|
37
|
+
const repoMap = new Map((0, repoRegistry_1.listAllRepos)(db).map((repo) => [repo.name, repo]));
|
|
38
|
+
const contracts = (0, contracts_1.extractContracts)(mapId, scope, repoMap);
|
|
39
|
+
const mapDir = node_path_1.default.join(cwd, 'maps', mapId);
|
|
40
|
+
const jsonPath = node_path_1.default.join(mapDir, 'contracts.json');
|
|
41
|
+
const markdownPath = node_path_1.default.join(mapDir, 'contracts.md');
|
|
42
|
+
(0, fs_1.writeJsonFile)(jsonPath, contracts);
|
|
43
|
+
(0, fs_1.writeTextFile)(markdownPath, (0, contracts_1.renderContractsMarkdown)(contracts, mapId));
|
|
44
|
+
return {
|
|
45
|
+
jsonPath,
|
|
46
|
+
markdownPath,
|
|
47
|
+
count: contracts.length,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
function generateDocsArtifacts(mapId, db, cwd = process.cwd()) {
|
|
51
|
+
const scope = (0, scope_1.loadScopeManifest)(mapId, cwd);
|
|
52
|
+
const repoMap = new Map((0, repoRegistry_1.listAllRepos)(db).map((repo) => [repo.name, repo]));
|
|
53
|
+
const serviceMap = (0, mapBuilder_1.buildServiceMapArtifact)(mapId, scope, repoMap);
|
|
54
|
+
const contracts = (0, contracts_1.extractContracts)(mapId, scope, repoMap);
|
|
55
|
+
const architectureContent = (0, docs_1.renderArchitectureDoc)(mapId, scope, serviceMap, contracts);
|
|
56
|
+
const architecturePath = node_path_1.default.join(cwd, 'docs', 'architecture', `${mapId}.md`);
|
|
57
|
+
(0, fs_1.writeTextFile)(architecturePath, architectureContent);
|
|
58
|
+
const dependencyPath = node_path_1.default.join(cwd, 'catalog', 'dependencies', `${mapId}.md`);
|
|
59
|
+
const relationLines = [
|
|
60
|
+
`# Dependency Summary: ${mapId}`,
|
|
61
|
+
'',
|
|
62
|
+
...serviceMap.edges.map((edge) => `- ${edge.from} ${edge.relation} ${edge.to}`),
|
|
63
|
+
'',
|
|
64
|
+
];
|
|
65
|
+
(0, fs_1.writeTextFile)(dependencyPath, relationLines.join('\n'));
|
|
66
|
+
return {
|
|
67
|
+
architecturePath,
|
|
68
|
+
dependencyPath,
|
|
69
|
+
};
|
|
70
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "sdx-cli",
|
|
3
|
+
"version": "0.2.1",
|
|
4
|
+
"description": "System Design Intelligence CLI",
|
|
5
|
+
"type": "commonjs",
|
|
6
|
+
"bin": {
|
|
7
|
+
"sdx": "./bin/run.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"bin",
|
|
11
|
+
"dist"
|
|
12
|
+
],
|
|
13
|
+
"scripts": {
|
|
14
|
+
"build": "tsc -p tsconfig.json",
|
|
15
|
+
"clean": "rm -rf dist",
|
|
16
|
+
"dev": "tsx src/index.ts",
|
|
17
|
+
"changeset": "changeset",
|
|
18
|
+
"version-packages": "changeset version",
|
|
19
|
+
"release": "changeset publish",
|
|
20
|
+
"test": "vitest run",
|
|
21
|
+
"typecheck": "tsc -p tsconfig.json --noEmit"
|
|
22
|
+
},
|
|
23
|
+
"oclif": {
|
|
24
|
+
"bin": "sdx",
|
|
25
|
+
"dirname": "sdx",
|
|
26
|
+
"commands": "./dist/commands",
|
|
27
|
+
"topicSeparator": " "
|
|
28
|
+
},
|
|
29
|
+
"engines": {
|
|
30
|
+
"node": ">=20"
|
|
31
|
+
},
|
|
32
|
+
"license": "MIT",
|
|
33
|
+
"dependencies": {
|
|
34
|
+
"@oclif/core": "^4.10.3",
|
|
35
|
+
"@octokit/rest": "^22.0.1",
|
|
36
|
+
"better-sqlite3": "^12.8.0",
|
|
37
|
+
"globby": "^16.2.0",
|
|
38
|
+
"semver": "^7.7.4",
|
|
39
|
+
"yaml": "^2.8.3",
|
|
40
|
+
"zod": "^4.3.6"
|
|
41
|
+
},
|
|
42
|
+
"devDependencies": {
|
|
43
|
+
"@changesets/cli": "^2.29.7",
|
|
44
|
+
"@types/better-sqlite3": "^7.6.13",
|
|
45
|
+
"@types/node": "^25.5.0",
|
|
46
|
+
"tsx": "^4.21.0",
|
|
47
|
+
"typescript": "^6.0.2",
|
|
48
|
+
"vitest": "^4.1.2"
|
|
49
|
+
}
|
|
50
|
+
}
|