sdx-cli 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +266 -0
- package/bin/dev.js +11 -0
- package/bin/run.js +3 -0
- package/dist/commands/bootstrap/consumer.js +75 -0
- package/dist/commands/bootstrap/org.js +29 -0
- package/dist/commands/bootstrap/quick.js +82 -0
- package/dist/commands/codex/run.js +36 -0
- package/dist/commands/contracts/extract.js +22 -0
- package/dist/commands/docs/generate.js +22 -0
- package/dist/commands/handoff/draft.js +41 -0
- package/dist/commands/init.js +14 -0
- package/dist/commands/map/build.js +44 -0
- package/dist/commands/map/create.js +40 -0
- package/dist/commands/map/exclude.js +25 -0
- package/dist/commands/map/include.js +25 -0
- package/dist/commands/map/remove-override.js +25 -0
- package/dist/commands/map/status.js +30 -0
- package/dist/commands/migrate/artifacts.js +68 -0
- package/dist/commands/plan/review.js +60 -0
- package/dist/commands/prompt.js +62 -0
- package/dist/commands/publish/notices.js +98 -0
- package/dist/commands/publish/sync.js +67 -0
- package/dist/commands/publish/wiki.js +39 -0
- package/dist/commands/repo/add.js +29 -0
- package/dist/commands/repo/sync.js +30 -0
- package/dist/commands/service/propose.js +40 -0
- package/dist/commands/status.js +37 -0
- package/dist/commands/version.js +16 -0
- package/dist/index.js +10 -0
- package/dist/lib/artifactMigration.js +29 -0
- package/dist/lib/bootstrap.js +43 -0
- package/dist/lib/bootstrapConsumer.js +187 -0
- package/dist/lib/bootstrapQuick.js +27 -0
- package/dist/lib/codex.js +138 -0
- package/dist/lib/config.js +40 -0
- package/dist/lib/constants.js +26 -0
- package/dist/lib/contractChanges.js +347 -0
- package/dist/lib/contracts.js +93 -0
- package/dist/lib/db.js +41 -0
- package/dist/lib/docs.js +46 -0
- package/dist/lib/fileScan.js +34 -0
- package/dist/lib/fs.js +36 -0
- package/dist/lib/github.js +52 -0
- package/dist/lib/githubPublish.js +161 -0
- package/dist/lib/handoff.js +62 -0
- package/dist/lib/mapBuilder.js +182 -0
- package/dist/lib/paths.js +39 -0
- package/dist/lib/planReview.js +88 -0
- package/dist/lib/project.js +65 -0
- package/dist/lib/promptParser.js +88 -0
- package/dist/lib/publishContracts.js +876 -0
- package/dist/lib/repoRegistry.js +92 -0
- package/dist/lib/scope.js +110 -0
- package/dist/lib/serviceNoticePlan.js +130 -0
- package/dist/lib/serviceProposal.js +82 -0
- package/dist/lib/status.js +34 -0
- package/dist/lib/types.js +2 -0
- package/dist/lib/version.js +17 -0
- package/dist/lib/workflows.js +70 -0
- package/package.json +50 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createGithubPublishOps = createGithubPublishOps;
|
|
4
|
+
const node_buffer_1 = require("node:buffer");
|
|
5
|
+
function isNotFound(error) {
|
|
6
|
+
return Number(error.status) === 404;
|
|
7
|
+
}
|
|
8
|
+
function parsePrUrl(prUrl) {
|
|
9
|
+
const match = prUrl.match(/github\.com\/([^/]+)\/([^/]+)\/pull\/(\d+)/i);
|
|
10
|
+
if (!match) {
|
|
11
|
+
throw new Error(`Invalid PR URL: ${prUrl}`);
|
|
12
|
+
}
|
|
13
|
+
return {
|
|
14
|
+
owner: match[1],
|
|
15
|
+
repo: match[2],
|
|
16
|
+
number: Number(match[3]),
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
async function createOctokit(token) {
|
|
20
|
+
const { Octokit } = await import('@octokit/rest');
|
|
21
|
+
return new Octokit({ auth: token });
|
|
22
|
+
}
|
|
23
|
+
function createGithubPublishOps(token) {
|
|
24
|
+
const octokitPromise = createOctokit(token);
|
|
25
|
+
return {
|
|
26
|
+
async ensureBranch(owner, repo, branch) {
|
|
27
|
+
const octokit = await octokitPromise;
|
|
28
|
+
const repoData = await octokit.rest.repos.get({ owner, repo });
|
|
29
|
+
const defaultBranch = repoData.data.default_branch;
|
|
30
|
+
try {
|
|
31
|
+
await octokit.rest.git.getRef({ owner, repo, ref: `heads/${branch}` });
|
|
32
|
+
return { defaultBranch };
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
if (!isNotFound(error)) {
|
|
36
|
+
throw error;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
const baseRef = await octokit.rest.git.getRef({ owner, repo, ref: `heads/${defaultBranch}` });
|
|
40
|
+
await octokit.rest.git.createRef({
|
|
41
|
+
owner,
|
|
42
|
+
repo,
|
|
43
|
+
ref: `refs/heads/${branch}`,
|
|
44
|
+
sha: baseRef.data.object.sha,
|
|
45
|
+
});
|
|
46
|
+
return { defaultBranch };
|
|
47
|
+
},
|
|
48
|
+
async upsertTextFile(owner, repo, branch, filePath, content, commitMessage) {
|
|
49
|
+
const octokit = await octokitPromise;
|
|
50
|
+
let existingSha;
|
|
51
|
+
let existingBody = '';
|
|
52
|
+
try {
|
|
53
|
+
const current = await octokit.rest.repos.getContent({
|
|
54
|
+
owner,
|
|
55
|
+
repo,
|
|
56
|
+
path: filePath,
|
|
57
|
+
ref: branch,
|
|
58
|
+
});
|
|
59
|
+
if (!Array.isArray(current.data) && current.data.type === 'file') {
|
|
60
|
+
existingSha = current.data.sha;
|
|
61
|
+
const encoded = current.data.content ?? '';
|
|
62
|
+
existingBody = node_buffer_1.Buffer.from(encoded, current.data.encoding === 'base64' ? 'base64' : 'utf8').toString('utf8');
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch (error) {
|
|
66
|
+
if (!isNotFound(error)) {
|
|
67
|
+
throw error;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
if (existingBody === content) {
|
|
71
|
+
return { changed: false };
|
|
72
|
+
}
|
|
73
|
+
await octokit.rest.repos.createOrUpdateFileContents({
|
|
74
|
+
owner,
|
|
75
|
+
repo,
|
|
76
|
+
path: filePath,
|
|
77
|
+
branch,
|
|
78
|
+
message: commitMessage,
|
|
79
|
+
content: node_buffer_1.Buffer.from(content, 'utf8').toString('base64'),
|
|
80
|
+
sha: existingSha,
|
|
81
|
+
});
|
|
82
|
+
return { changed: true };
|
|
83
|
+
},
|
|
84
|
+
async readTextFile(owner, repo, filePath, ref) {
|
|
85
|
+
const octokit = await octokitPromise;
|
|
86
|
+
try {
|
|
87
|
+
const current = await octokit.rest.repos.getContent({
|
|
88
|
+
owner,
|
|
89
|
+
repo,
|
|
90
|
+
path: filePath,
|
|
91
|
+
ref,
|
|
92
|
+
});
|
|
93
|
+
if (Array.isArray(current.data) || current.data.type !== 'file') {
|
|
94
|
+
return undefined;
|
|
95
|
+
}
|
|
96
|
+
const encoded = current.data.content ?? '';
|
|
97
|
+
return node_buffer_1.Buffer.from(encoded, current.data.encoding === 'base64' ? 'base64' : 'utf8').toString('utf8');
|
|
98
|
+
}
|
|
99
|
+
catch (error) {
|
|
100
|
+
if (isNotFound(error)) {
|
|
101
|
+
return undefined;
|
|
102
|
+
}
|
|
103
|
+
throw error;
|
|
104
|
+
}
|
|
105
|
+
},
|
|
106
|
+
async upsertPullRequest(input) {
|
|
107
|
+
const octokit = await octokitPromise;
|
|
108
|
+
const { owner, repo, branch, title, body, draft } = input;
|
|
109
|
+
const repoData = await octokit.rest.repos.get({ owner, repo });
|
|
110
|
+
const base = repoData.data.default_branch;
|
|
111
|
+
const open = await octokit.rest.pulls.list({
|
|
112
|
+
owner,
|
|
113
|
+
repo,
|
|
114
|
+
state: 'open',
|
|
115
|
+
head: `${owner}:${branch}`,
|
|
116
|
+
});
|
|
117
|
+
if (open.data.length > 0) {
|
|
118
|
+
const existing = open.data[0];
|
|
119
|
+
await octokit.rest.pulls.update({
|
|
120
|
+
owner,
|
|
121
|
+
repo,
|
|
122
|
+
pull_number: existing.number,
|
|
123
|
+
title,
|
|
124
|
+
body,
|
|
125
|
+
});
|
|
126
|
+
return {
|
|
127
|
+
number: existing.number,
|
|
128
|
+
url: existing.html_url,
|
|
129
|
+
created: false,
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
const created = await octokit.rest.pulls.create({
|
|
133
|
+
owner,
|
|
134
|
+
repo,
|
|
135
|
+
title,
|
|
136
|
+
body,
|
|
137
|
+
head: branch,
|
|
138
|
+
base,
|
|
139
|
+
draft,
|
|
140
|
+
});
|
|
141
|
+
return {
|
|
142
|
+
number: created.data.number,
|
|
143
|
+
url: created.data.html_url,
|
|
144
|
+
created: true,
|
|
145
|
+
};
|
|
146
|
+
},
|
|
147
|
+
async getPullLifecycle(prUrl) {
|
|
148
|
+
const octokit = await octokitPromise;
|
|
149
|
+
const parsed = parsePrUrl(prUrl);
|
|
150
|
+
const pull = await octokit.rest.pulls.get({
|
|
151
|
+
owner: parsed.owner,
|
|
152
|
+
repo: parsed.repo,
|
|
153
|
+
pull_number: parsed.number,
|
|
154
|
+
});
|
|
155
|
+
if (pull.data.merged_at) {
|
|
156
|
+
return 'merged';
|
|
157
|
+
}
|
|
158
|
+
return pull.data.state === 'open' ? 'opened' : 'blocked';
|
|
159
|
+
},
|
|
160
|
+
};
|
|
161
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.buildHandoff = buildHandoff;
|
|
4
|
+
exports.renderHandoffMarkdown = renderHandoffMarkdown;
|
|
5
|
+
const constants_1 = require("./constants");
|
|
6
|
+
function buildHandoff(mapId, serviceId, scope, contracts) {
|
|
7
|
+
const targets = scope.effective
|
|
8
|
+
.filter((repo) => repo !== serviceId)
|
|
9
|
+
.map((repo) => {
|
|
10
|
+
const impactedContracts = contracts
|
|
11
|
+
.filter((contract) => contract.repo === repo || contract.producers.includes(serviceId))
|
|
12
|
+
.slice(0, 8);
|
|
13
|
+
return {
|
|
14
|
+
repo,
|
|
15
|
+
summary: `New service '${serviceId}' is available. Validate integration touchpoints for ${repo}.`,
|
|
16
|
+
requiredChanges: [
|
|
17
|
+
`Review service-to-service dependency requirements for ${serviceId}.`,
|
|
18
|
+
`Update integration configuration and environment variables if ${repo} consumes ${serviceId}.`,
|
|
19
|
+
'Add compatibility checks in CI for the updated API or event contracts.',
|
|
20
|
+
],
|
|
21
|
+
contractImpacts: impactedContracts.length > 0
|
|
22
|
+
? impactedContracts.map((contract) => `${contract.type}:${contract.path}`)
|
|
23
|
+
: ['No direct contracts detected; validate runtime dependencies manually.'],
|
|
24
|
+
sequencing: 'Integrate in staging first, validate observability signals, then promote to production.',
|
|
25
|
+
};
|
|
26
|
+
});
|
|
27
|
+
return {
|
|
28
|
+
schemaVersion: constants_1.SCHEMA_VERSION,
|
|
29
|
+
generatedAt: new Date().toISOString(),
|
|
30
|
+
mapId,
|
|
31
|
+
serviceId,
|
|
32
|
+
targets,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function renderHandoffMarkdown(handoff) {
|
|
36
|
+
const lines = [
|
|
37
|
+
`# Handoff Draft: ${handoff.serviceId}`,
|
|
38
|
+
'',
|
|
39
|
+
`- Generated: ${handoff.generatedAt}`,
|
|
40
|
+
`- Map: ${handoff.mapId}`,
|
|
41
|
+
'',
|
|
42
|
+
];
|
|
43
|
+
for (const target of handoff.targets) {
|
|
44
|
+
lines.push(`## ${target.repo}`);
|
|
45
|
+
lines.push('');
|
|
46
|
+
lines.push(target.summary);
|
|
47
|
+
lines.push('');
|
|
48
|
+
lines.push('Required changes:');
|
|
49
|
+
for (const item of target.requiredChanges) {
|
|
50
|
+
lines.push(`- ${item}`);
|
|
51
|
+
}
|
|
52
|
+
lines.push('');
|
|
53
|
+
lines.push('Contract impacts:');
|
|
54
|
+
for (const impact of target.contractImpacts) {
|
|
55
|
+
lines.push(`- ${impact}`);
|
|
56
|
+
}
|
|
57
|
+
lines.push('');
|
|
58
|
+
lines.push(`Sequencing: ${target.sequencing}`);
|
|
59
|
+
lines.push('');
|
|
60
|
+
}
|
|
61
|
+
return `${lines.join('\n')}\n`;
|
|
62
|
+
}
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.buildServiceMapArtifact = buildServiceMapArtifact;
|
|
7
|
+
exports.renderServiceMapMermaid = renderServiceMapMermaid;
|
|
8
|
+
exports.renderServiceMapMarkdown = renderServiceMapMarkdown;
|
|
9
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
10
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
11
|
+
const constants_1 = require("./constants");
|
|
12
|
+
const fileScan_1 = require("./fileScan");
|
|
13
|
+
const CONTRACT_REGEX = [
|
|
14
|
+
/openapi.*\.(ya?ml|json)$/i,
|
|
15
|
+
/swagger\.(ya?ml|json)$/i,
|
|
16
|
+
/asyncapi.*\.(ya?ml|json)$/i,
|
|
17
|
+
/\.graphql$/i,
|
|
18
|
+
/\.gql$/i,
|
|
19
|
+
/\.proto$/i,
|
|
20
|
+
];
|
|
21
|
+
function classifyContract(filePath) {
|
|
22
|
+
const lower = filePath.toLowerCase();
|
|
23
|
+
if (lower.includes('asyncapi') || lower.includes('event')) {
|
|
24
|
+
return 'event';
|
|
25
|
+
}
|
|
26
|
+
return 'api';
|
|
27
|
+
}
|
|
28
|
+
function readPackageDependencies(repoPath) {
|
|
29
|
+
const packagePath = node_path_1.default.join(repoPath, 'package.json');
|
|
30
|
+
if (!node_fs_1.default.existsSync(packagePath)) {
|
|
31
|
+
return [];
|
|
32
|
+
}
|
|
33
|
+
try {
|
|
34
|
+
const data = JSON.parse(node_fs_1.default.readFileSync(packagePath, 'utf8'));
|
|
35
|
+
const names = new Set([
|
|
36
|
+
...Object.keys(data.dependencies ?? {}),
|
|
37
|
+
...Object.keys(data.devDependencies ?? {}),
|
|
38
|
+
...Object.keys(data.peerDependencies ?? {}),
|
|
39
|
+
]);
|
|
40
|
+
return [...names];
|
|
41
|
+
}
|
|
42
|
+
catch {
|
|
43
|
+
return [];
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
function dedupeNodes(nodes) {
|
|
47
|
+
const seen = new Set();
|
|
48
|
+
const out = [];
|
|
49
|
+
for (const node of nodes) {
|
|
50
|
+
if (seen.has(node.id)) {
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
seen.add(node.id);
|
|
54
|
+
out.push(node);
|
|
55
|
+
}
|
|
56
|
+
return out;
|
|
57
|
+
}
|
|
58
|
+
function dedupeEdges(edges) {
|
|
59
|
+
const seen = new Set();
|
|
60
|
+
const out = [];
|
|
61
|
+
for (const edge of edges) {
|
|
62
|
+
const key = `${edge.from}|${edge.to}|${edge.relation}`;
|
|
63
|
+
if (seen.has(key)) {
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
seen.add(key);
|
|
67
|
+
out.push(edge);
|
|
68
|
+
}
|
|
69
|
+
return out;
|
|
70
|
+
}
|
|
71
|
+
function buildServiceMapArtifact(mapId, scope, reposByName) {
|
|
72
|
+
const nodes = [];
|
|
73
|
+
const edges = [];
|
|
74
|
+
const effectiveSet = new Set(scope.effective);
|
|
75
|
+
for (const repoName of scope.effective) {
|
|
76
|
+
const repo = reposByName.get(repoName);
|
|
77
|
+
nodes.push({
|
|
78
|
+
id: `service:${repoName}`,
|
|
79
|
+
type: 'service',
|
|
80
|
+
label: repoName,
|
|
81
|
+
repo: repoName,
|
|
82
|
+
metadata: {
|
|
83
|
+
org: repo?.org ?? scope.org,
|
|
84
|
+
},
|
|
85
|
+
});
|
|
86
|
+
nodes.push({
|
|
87
|
+
id: `repo:${repoName}`,
|
|
88
|
+
type: 'repo',
|
|
89
|
+
label: repoName,
|
|
90
|
+
repo: repoName,
|
|
91
|
+
metadata: {
|
|
92
|
+
source: repo?.source ?? 'github',
|
|
93
|
+
localPath: repo?.localPath,
|
|
94
|
+
},
|
|
95
|
+
});
|
|
96
|
+
edges.push({
|
|
97
|
+
from: `service:${repoName}`,
|
|
98
|
+
to: `repo:${repoName}`,
|
|
99
|
+
relation: 'owns',
|
|
100
|
+
});
|
|
101
|
+
if (!repo?.localPath || !node_fs_1.default.existsSync(repo.localPath)) {
|
|
102
|
+
continue;
|
|
103
|
+
}
|
|
104
|
+
const deps = readPackageDependencies(repo.localPath);
|
|
105
|
+
for (const dep of deps) {
|
|
106
|
+
if (!effectiveSet.has(dep)) {
|
|
107
|
+
continue;
|
|
108
|
+
}
|
|
109
|
+
edges.push({
|
|
110
|
+
from: `service:${repoName}`,
|
|
111
|
+
to: `service:${dep}`,
|
|
112
|
+
relation: 'depends_on',
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
const allFiles = (0, fileScan_1.listFilesRecursive)(repo.localPath);
|
|
116
|
+
const contractFiles = allFiles.filter((candidate) => CONTRACT_REGEX.some((pattern) => pattern.test(candidate)));
|
|
117
|
+
for (const contractPath of contractFiles) {
|
|
118
|
+
const relPath = node_path_1.default.relative(repo.localPath, contractPath);
|
|
119
|
+
const nodeType = classifyContract(relPath);
|
|
120
|
+
const contractNodeId = `${nodeType}:${repoName}:${relPath}`;
|
|
121
|
+
nodes.push({
|
|
122
|
+
id: contractNodeId,
|
|
123
|
+
type: nodeType === 'event' ? 'event' : 'api',
|
|
124
|
+
label: relPath,
|
|
125
|
+
repo: repoName,
|
|
126
|
+
});
|
|
127
|
+
edges.push({
|
|
128
|
+
from: `service:${repoName}`,
|
|
129
|
+
to: contractNodeId,
|
|
130
|
+
relation: nodeType === 'event' ? 'publishes' : 'calls',
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return {
|
|
135
|
+
schemaVersion: constants_1.SCHEMA_VERSION,
|
|
136
|
+
generatedAt: new Date().toISOString(),
|
|
137
|
+
mapId,
|
|
138
|
+
org: scope.org,
|
|
139
|
+
repos: [...scope.effective],
|
|
140
|
+
nodes: dedupeNodes(nodes),
|
|
141
|
+
edges: dedupeEdges(edges),
|
|
142
|
+
coverageTags: [...constants_1.PRIMER_DIMENSIONS],
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
function renderServiceMapMermaid(artifact) {
|
|
146
|
+
const lines = ['flowchart LR'];
|
|
147
|
+
for (const node of artifact.nodes) {
|
|
148
|
+
const id = node.id.replace(/[^a-zA-Z0-9_]/g, '_');
|
|
149
|
+
lines.push(` ${id}["${node.label}"]`);
|
|
150
|
+
}
|
|
151
|
+
for (const edge of artifact.edges) {
|
|
152
|
+
const from = edge.from.replace(/[^a-zA-Z0-9_]/g, '_');
|
|
153
|
+
const to = edge.to.replace(/[^a-zA-Z0-9_]/g, '_');
|
|
154
|
+
lines.push(` ${from} -->|"${edge.relation}"| ${to}`);
|
|
155
|
+
}
|
|
156
|
+
return `${lines.join('\n')}\n`;
|
|
157
|
+
}
|
|
158
|
+
function renderServiceMapMarkdown(artifact) {
|
|
159
|
+
const lines = [
|
|
160
|
+
`# Service Map: ${artifact.mapId}`,
|
|
161
|
+
'',
|
|
162
|
+
`- Generated: ${artifact.generatedAt}`,
|
|
163
|
+
`- Org: ${artifact.org}`,
|
|
164
|
+
`- Repos in scope: ${artifact.repos.length}`,
|
|
165
|
+
`- Nodes: ${artifact.nodes.length}`,
|
|
166
|
+
`- Edges: ${artifact.edges.length}`,
|
|
167
|
+
'',
|
|
168
|
+
'## Repositories',
|
|
169
|
+
'',
|
|
170
|
+
...artifact.repos.map((repo) => `- ${repo}`),
|
|
171
|
+
'',
|
|
172
|
+
'## Top Relations',
|
|
173
|
+
'',
|
|
174
|
+
...artifact.edges.slice(0, 30).map((edge) => `- ${edge.from} ${edge.relation} ${edge.to}`),
|
|
175
|
+
'',
|
|
176
|
+
'## Coverage Tags',
|
|
177
|
+
'',
|
|
178
|
+
...artifact.coverageTags.map((tag) => `- ${tag}`),
|
|
179
|
+
'',
|
|
180
|
+
];
|
|
181
|
+
return `${lines.join('\n')}\n`;
|
|
182
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getProjectRoot = getProjectRoot;
|
|
7
|
+
exports.getAppDir = getAppDir;
|
|
8
|
+
exports.getConfigPath = getConfigPath;
|
|
9
|
+
exports.getDbPath = getDbPath;
|
|
10
|
+
exports.getMapDir = getMapDir;
|
|
11
|
+
exports.getCatalogDir = getCatalogDir;
|
|
12
|
+
exports.getDocsDir = getDocsDir;
|
|
13
|
+
exports.getCodexDir = getCodexDir;
|
|
14
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
15
|
+
const constants_1 = require("./constants");
|
|
16
|
+
function getProjectRoot(cwd = process.cwd()) {
|
|
17
|
+
return cwd;
|
|
18
|
+
}
|
|
19
|
+
function getAppDir(cwd = process.cwd()) {
|
|
20
|
+
return node_path_1.default.join(getProjectRoot(cwd), constants_1.APP_DIR);
|
|
21
|
+
}
|
|
22
|
+
function getConfigPath(cwd = process.cwd()) {
|
|
23
|
+
return node_path_1.default.join(getAppDir(cwd), constants_1.CONFIG_FILE);
|
|
24
|
+
}
|
|
25
|
+
function getDbPath(cwd = process.cwd()) {
|
|
26
|
+
return node_path_1.default.join(getAppDir(cwd), constants_1.DB_FILE);
|
|
27
|
+
}
|
|
28
|
+
function getMapDir(mapId, cwd = process.cwd()) {
|
|
29
|
+
return node_path_1.default.join(getProjectRoot(cwd), 'maps', mapId);
|
|
30
|
+
}
|
|
31
|
+
function getCatalogDir(cwd = process.cwd()) {
|
|
32
|
+
return node_path_1.default.join(getProjectRoot(cwd), 'catalog');
|
|
33
|
+
}
|
|
34
|
+
function getDocsDir(cwd = process.cwd()) {
|
|
35
|
+
return node_path_1.default.join(getProjectRoot(cwd), 'docs');
|
|
36
|
+
}
|
|
37
|
+
function getCodexDir(cwd = process.cwd()) {
|
|
38
|
+
return node_path_1.default.join(getProjectRoot(cwd), 'codex');
|
|
39
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.reviewPlan = reviewPlan;
|
|
7
|
+
exports.renderPlanReviewMarkdown = renderPlanReviewMarkdown;
|
|
8
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
9
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
10
|
+
const constants_1 = require("./constants");
|
|
11
|
+
function detectMissingNfrs(planText) {
|
|
12
|
+
const lower = planText.toLowerCase();
|
|
13
|
+
return constants_1.REQUIRED_NFR_KEYWORDS.filter((keyword) => !lower.includes(keyword));
|
|
14
|
+
}
|
|
15
|
+
function detectImpactedRepos(planText, scope) {
|
|
16
|
+
const lower = planText.toLowerCase();
|
|
17
|
+
return scope.effective.filter((repo) => lower.includes(repo.toLowerCase()));
|
|
18
|
+
}
|
|
19
|
+
function detectAssumptions(planText) {
|
|
20
|
+
const lines = planText.split(/\r?\n/);
|
|
21
|
+
return lines.filter((line) => /(tbd|todo|assume|unknown)/i.test(line)).slice(0, 20);
|
|
22
|
+
}
|
|
23
|
+
function reviewPlan(mapId, planPath, scope, serviceMap) {
|
|
24
|
+
const absolute = node_path_1.default.resolve(planPath);
|
|
25
|
+
const planText = node_fs_1.default.readFileSync(absolute, 'utf8');
|
|
26
|
+
const missingNfrs = detectMissingNfrs(planText);
|
|
27
|
+
const impactedRepos = detectImpactedRepos(planText, scope);
|
|
28
|
+
const unresolvedAssumptions = detectAssumptions(planText);
|
|
29
|
+
const decisions = [
|
|
30
|
+
{
|
|
31
|
+
title: 'Service boundary fit',
|
|
32
|
+
rationale: impactedRepos.length > 0
|
|
33
|
+
? 'Plan references existing repository surfaces and should align integration ownership before implementation.'
|
|
34
|
+
: 'Plan does not reference existing repos. Validate downstream integration points before implementation.',
|
|
35
|
+
confidence: impactedRepos.length > 0 ? 0.72 : 0.55,
|
|
36
|
+
dimensions: ['api_style', 'operational_tradeoffs', 'reliability'],
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
title: 'NFR completeness',
|
|
40
|
+
rationale: missingNfrs.length === 0
|
|
41
|
+
? 'Plan includes required NFR categories for latency, availability, durability, SLO intent, and failure handling.'
|
|
42
|
+
: `Plan is missing required NFR categories: ${missingNfrs.join(', ')}.`,
|
|
43
|
+
confidence: missingNfrs.length === 0 ? 0.86 : 0.9,
|
|
44
|
+
dimensions: ['scalability', 'reliability', 'observability', 'consistency_model'],
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
title: 'System design coverage breadth',
|
|
48
|
+
rationale: `Review used ${serviceMap.coverageTags.length} coverage dimensions aligned to the taxonomy baseline.`,
|
|
49
|
+
confidence: 0.8,
|
|
50
|
+
dimensions: [...constants_1.PRIMER_DIMENSIONS],
|
|
51
|
+
},
|
|
52
|
+
];
|
|
53
|
+
return {
|
|
54
|
+
schemaVersion: constants_1.SCHEMA_VERSION,
|
|
55
|
+
generatedAt: new Date().toISOString(),
|
|
56
|
+
mapId,
|
|
57
|
+
planPath: absolute,
|
|
58
|
+
missingNfrs,
|
|
59
|
+
accepted: missingNfrs.length === 0,
|
|
60
|
+
decisions,
|
|
61
|
+
impactedRepos,
|
|
62
|
+
unresolvedAssumptions,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
function renderPlanReviewMarkdown(review) {
|
|
66
|
+
const lines = [
|
|
67
|
+
`# Plan Review: ${review.mapId}`,
|
|
68
|
+
'',
|
|
69
|
+
`- Generated: ${review.generatedAt}`,
|
|
70
|
+
`- Plan: ${review.planPath}`,
|
|
71
|
+
`- Accepted: ${review.accepted ? 'yes' : 'no'}`,
|
|
72
|
+
`- Missing NFRs: ${review.missingNfrs.length === 0 ? 'none' : review.missingNfrs.join(', ')}`,
|
|
73
|
+
'',
|
|
74
|
+
'## Decisions',
|
|
75
|
+
'',
|
|
76
|
+
...review.decisions.map((decision) => `- ${decision.title} (confidence ${decision.confidence.toFixed(2)}): ${decision.rationale} [${decision.dimensions.join(', ')}]`),
|
|
77
|
+
'',
|
|
78
|
+
'## Impacted Repositories',
|
|
79
|
+
'',
|
|
80
|
+
...(review.impactedRepos.length > 0 ? review.impactedRepos.map((repo) => `- ${repo}`) : ['- none detected']),
|
|
81
|
+
'',
|
|
82
|
+
'## Unresolved Assumptions',
|
|
83
|
+
'',
|
|
84
|
+
...(review.unresolvedAssumptions.length > 0 ? review.unresolvedAssumptions.map((line) => `- ${line}`) : ['- none']),
|
|
85
|
+
'',
|
|
86
|
+
];
|
|
87
|
+
return `${lines.join('\n')}\n`;
|
|
88
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ensureStandardDirs = ensureStandardDirs;
|
|
7
|
+
exports.initProject = initProject;
|
|
8
|
+
exports.loadProject = loadProject;
|
|
9
|
+
exports.recordRun = recordRun;
|
|
10
|
+
exports.ensureMapDir = ensureMapDir;
|
|
11
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
12
|
+
const config_1 = require("./config");
|
|
13
|
+
const db_1 = require("./db");
|
|
14
|
+
const fs_1 = require("./fs");
|
|
15
|
+
const paths_1 = require("./paths");
|
|
16
|
+
function ensureStandardDirs(cwd = process.cwd()) {
|
|
17
|
+
(0, fs_1.ensureDir)((0, paths_1.getCatalogDir)(cwd));
|
|
18
|
+
(0, fs_1.ensureDir)(node_path_1.default.join((0, paths_1.getCatalogDir)(cwd), 'services'));
|
|
19
|
+
(0, fs_1.ensureDir)(node_path_1.default.join((0, paths_1.getCatalogDir)(cwd), 'contracts'));
|
|
20
|
+
(0, fs_1.ensureDir)(node_path_1.default.join((0, paths_1.getCatalogDir)(cwd), 'dependencies'));
|
|
21
|
+
(0, fs_1.ensureDir)((0, paths_1.getDocsDir)(cwd));
|
|
22
|
+
(0, fs_1.ensureDir)(node_path_1.default.join((0, paths_1.getDocsDir)(cwd), 'architecture'));
|
|
23
|
+
(0, fs_1.ensureDir)(node_path_1.default.join((0, paths_1.getDocsDir)(cwd), 'adr'));
|
|
24
|
+
(0, fs_1.ensureDir)(node_path_1.default.join(cwd, 'plans', 'reviews'));
|
|
25
|
+
(0, fs_1.ensureDir)(node_path_1.default.join(cwd, 'handoffs'));
|
|
26
|
+
(0, fs_1.ensureDir)(node_path_1.default.join(cwd, 'publish', 'notices'));
|
|
27
|
+
(0, fs_1.ensureDir)(node_path_1.default.join(cwd, 'publish', 'sync'));
|
|
28
|
+
(0, fs_1.ensureDir)(node_path_1.default.join((0, paths_1.getCodexDir)(cwd), 'context-packs'));
|
|
29
|
+
(0, fs_1.ensureDir)(node_path_1.default.join((0, paths_1.getCodexDir)(cwd), 'runs'));
|
|
30
|
+
(0, fs_1.ensureDir)(node_path_1.default.join(cwd, 'diagrams'));
|
|
31
|
+
(0, fs_1.ensureDir)(node_path_1.default.join(cwd, 'snapshots'));
|
|
32
|
+
}
|
|
33
|
+
function initProject(cwd = process.cwd()) {
|
|
34
|
+
if (!(0, config_1.hasConfig)(cwd)) {
|
|
35
|
+
const config = (0, config_1.createDefaultConfig)(cwd);
|
|
36
|
+
(0, config_1.saveConfig)(config, cwd);
|
|
37
|
+
}
|
|
38
|
+
ensureStandardDirs(cwd);
|
|
39
|
+
return {
|
|
40
|
+
cwd,
|
|
41
|
+
rootDir: (0, paths_1.getProjectRoot)(cwd),
|
|
42
|
+
db: (0, db_1.openDb)(cwd),
|
|
43
|
+
config: (0, config_1.loadConfig)(cwd),
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
function loadProject(cwd = process.cwd()) {
|
|
47
|
+
if (!(0, config_1.hasConfig)(cwd)) {
|
|
48
|
+
throw new Error('sdx is not initialized. Run `sdx init` first.');
|
|
49
|
+
}
|
|
50
|
+
ensureStandardDirs(cwd);
|
|
51
|
+
return {
|
|
52
|
+
cwd,
|
|
53
|
+
rootDir: (0, paths_1.getProjectRoot)(cwd),
|
|
54
|
+
db: (0, db_1.openDb)(cwd),
|
|
55
|
+
config: (0, config_1.loadConfig)(cwd),
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
function recordRun(db, runType, status, mapId, metadata) {
|
|
59
|
+
db.prepare('INSERT INTO run_log (run_type, map_id, status, created_at, metadata_json) VALUES (?, ?, ?, ?, ?)').run(runType, mapId ?? null, status, new Date().toISOString(), metadata ? JSON.stringify(metadata) : null);
|
|
60
|
+
}
|
|
61
|
+
function ensureMapDir(mapId, cwd = process.cwd()) {
|
|
62
|
+
const mapDir = (0, paths_1.getMapDir)(mapId, cwd);
|
|
63
|
+
(0, fs_1.ensureDir)(mapDir);
|
|
64
|
+
return mapDir;
|
|
65
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parsePromptIntent = parsePromptIntent;
|
|
4
|
+
exports.renderPromptPreview = renderPromptPreview;
|
|
5
|
+
function escapeRegExp(value) {
|
|
6
|
+
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
7
|
+
}
|
|
8
|
+
function extractRepoTokens(input, knownRepos) {
|
|
9
|
+
const uniqueKnown = [...new Set(knownRepos.map((repo) => repo.trim()).filter(Boolean))];
|
|
10
|
+
if (uniqueKnown.length > 0) {
|
|
11
|
+
const lowerInput = input.toLowerCase();
|
|
12
|
+
const matched = [];
|
|
13
|
+
for (const repo of uniqueKnown) {
|
|
14
|
+
const escaped = escapeRegExp(repo.toLowerCase());
|
|
15
|
+
const barePattern = new RegExp(`(^|[^a-zA-Z0-9._-])${escaped}([^a-zA-Z0-9._-]|$)`);
|
|
16
|
+
const orgPattern = new RegExp(`[a-zA-Z0-9._-]+/${escaped}([^a-zA-Z0-9._-]|$)`);
|
|
17
|
+
if (barePattern.test(lowerInput) || orgPattern.test(lowerInput)) {
|
|
18
|
+
matched.push(repo);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return [...new Set(matched)];
|
|
22
|
+
}
|
|
23
|
+
const stopwords = new Set([
|
|
24
|
+
'include',
|
|
25
|
+
'exclude',
|
|
26
|
+
'remove',
|
|
27
|
+
'status',
|
|
28
|
+
'build',
|
|
29
|
+
'map',
|
|
30
|
+
'from',
|
|
31
|
+
'the',
|
|
32
|
+
'and',
|
|
33
|
+
'in',
|
|
34
|
+
'to',
|
|
35
|
+
'for',
|
|
36
|
+
'of',
|
|
37
|
+
'with',
|
|
38
|
+
]);
|
|
39
|
+
const matches = input.match(/[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+|[a-zA-Z0-9._-]+/g) ?? [];
|
|
40
|
+
return [...new Set(matches.filter((token) => !stopwords.has(token.toLowerCase())))];
|
|
41
|
+
}
|
|
42
|
+
function parsePromptIntent(instruction, knownRepos = []) {
|
|
43
|
+
const lower = instruction.toLowerCase();
|
|
44
|
+
if (/\bstatus\b/.test(lower)) {
|
|
45
|
+
return {
|
|
46
|
+
action: 'status',
|
|
47
|
+
repos: [],
|
|
48
|
+
explanation: 'Show map status.',
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
if (/\bbuild\b/.test(lower)) {
|
|
52
|
+
return {
|
|
53
|
+
action: 'build',
|
|
54
|
+
repos: [],
|
|
55
|
+
explanation: 'Build map artifacts for current scope.',
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
if (/\b(include|add)\b/.test(lower)) {
|
|
59
|
+
return {
|
|
60
|
+
action: 'include',
|
|
61
|
+
repos: extractRepoTokens(instruction, knownRepos),
|
|
62
|
+
explanation: 'Add repositories to explicit include overrides.',
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
if (/\b(exclude|remove|drop)\b/.test(lower)) {
|
|
66
|
+
return {
|
|
67
|
+
action: 'exclude',
|
|
68
|
+
repos: extractRepoTokens(instruction, knownRepos),
|
|
69
|
+
explanation: 'Add repositories to explicit exclude overrides.',
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
return {
|
|
73
|
+
action: 'unknown',
|
|
74
|
+
repos: extractRepoTokens(instruction, knownRepos),
|
|
75
|
+
explanation: 'No deterministic action matched.',
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
function renderPromptPreview(intent) {
|
|
79
|
+
const repoText = intent.repos.length > 0 ? intent.repos.join(', ') : 'none';
|
|
80
|
+
return [
|
|
81
|
+
'# Prompt Preview',
|
|
82
|
+
'',
|
|
83
|
+
`- Action: ${intent.action}`,
|
|
84
|
+
`- Repositories: ${repoText}`,
|
|
85
|
+
`- Explanation: ${intent.explanation}`,
|
|
86
|
+
'',
|
|
87
|
+
].join('\n');
|
|
88
|
+
}
|