sdx-cli 0.2.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -17,7 +17,9 @@
17
17
  </p>
18
18
 
19
19
  <p align="center">
20
+ <a href="#install-from-npm">Install from npm</a> •
20
21
  <a href="#one-command-setup">One-Command Setup</a> •
22
+ <a href="#architecture-pack-org--service-deep-dives">Architecture Pack</a> •
21
23
  <a href="#daily-workflow">Daily Workflow</a> •
22
24
  <a href="#for-codex-agents">For Codex Agents</a> •
23
25
  <a href="#release-process">Release Process</a>
@@ -36,6 +38,28 @@ You use it to:
36
38
 
37
39
  v1 remains manual-triggered. SDX can open draft notice PRs when you run publish commands, but it does not autonomously mutate runtime infrastructure.
38
40
 
41
+ ## Install from npm
42
+ Prerequisite: Node.js `20+`.
43
+
44
+ Choose one install mode:
45
+
46
+ ```bash
47
+ # A) No install, run directly (recommended for first run)
48
+ npx --yes sdx-cli@latest --help
49
+
50
+ # B) Global install
51
+ npm install -g sdx-cli
52
+ sdx --help
53
+
54
+ # C) Project-pinned install (recommended for teams)
55
+ npm install --save-dev sdx-cli
56
+ npx sdx --help
57
+ ```
58
+
59
+ Team recommendation:
60
+ - use `bootstrap quick` once per org workspace,
61
+ - then run `./scripts/sdx ...` so the workspace stays pinned to one CLI version.
62
+
39
63
  ## One-Command Setup
40
64
  ### Org Initialization
41
65
  Run this to initialize SDX for a GitHub org:
@@ -108,6 +132,7 @@ From your SDX workspace root:
108
132
 
109
133
  ./scripts/sdx contracts extract --map platform-core
110
134
  ./scripts/sdx docs generate --map platform-core
135
+ ./scripts/sdx architecture generate --map platform-core
111
136
  ```
112
137
 
113
138
  For planning and rollout:
@@ -124,6 +149,32 @@ For Codex:
124
149
  ./scripts/sdx codex run implementation-plan --map platform-core --input ./plans/new-service.md
125
150
  ```
126
151
 
152
+ ### Architecture Pack (Org + Service Deep Dives)
153
+ Generate an executive-ready architecture pack from your initialized consumer workspace:
154
+
155
+ ```bash
156
+ # full pack (org-level + per-service docs/diagrams)
157
+ ./scripts/sdx architecture generate --map platform-core
158
+
159
+ # org-level only
160
+ ./scripts/sdx architecture generate --map platform-core --depth org
161
+
162
+ # targeted service rebuild
163
+ ./scripts/sdx architecture generate --map platform-core --service payments-api
164
+
165
+ # explicit validation pass (override integrity + completeness checks)
166
+ ./scripts/sdx architecture validate --map platform-core
167
+ ```
168
+
169
+ Override source of truth:
170
+ - `maps/<map-id>/architecture-overrides.json`
171
+
172
+ Use overrides to:
173
+ - declare hidden or external dependencies,
174
+ - assert missing relationships,
175
+ - suppress incorrect inferred edges,
176
+ - attach service owner/criticality/business context metadata.
177
+
127
178
  ## Cross-Repo Tech-Lead PRs (Spec-System Native)
128
179
  Use this flow when SDX should create real `CC-*` contract-change PRs in downstream repos that have spec-system initialized.
129
180
 
@@ -181,11 +232,16 @@ Use this minimal runbook when an agent needs architecture context quickly:
181
232
  2. `./scripts/sdx map status <map-id>`
182
233
  3. `./scripts/sdx map build <map-id>`
183
234
  4. `./scripts/sdx contracts extract --map <map-id>`
184
- 5. `./scripts/sdx codex run <task-type> --map <map-id> --input <file>`
235
+ 5. `./scripts/sdx architecture generate --map <map-id>`
236
+ 6. `./scripts/sdx codex run <task-type> --map <map-id> --input <file>`
185
237
 
186
238
  Where outputs land:
187
239
  - `maps/<map-id>/service-map.json|md|mmd`
188
240
  - `maps/<map-id>/contracts.json|md`
241
+ - `maps/<map-id>/architecture/model.json|validation.json`
242
+ - `maps/<map-id>/architecture-overrides.json`
243
+ - `docs/architecture/<map-id>/index.md`
244
+ - `docs/architecture/<map-id>/services/*.md`
189
245
  - `codex/context-packs/*.json`
190
246
  - `codex/runs/*.md|json`
191
247
 
@@ -202,6 +258,7 @@ sdx repo add
202
258
  sdx map create|include|exclude|remove-override|status|build
203
259
  sdx prompt
204
260
 
261
+ sdx architecture generate|validate
205
262
  sdx contracts extract
206
263
  sdx docs generate
207
264
  sdx plan review
@@ -239,6 +296,14 @@ This repo uses Changesets and releases from `main`.
239
296
  - Publish prerequisites:
240
297
  - configure npm auth for CI (`NPM_TOKEN` repo secret),
241
298
  - allow workflow pushes to `main` under your branch protection policy.
299
+ - use an npm automation token with package `Read and write` and 2FA bypass enabled for CI publish.
300
+
301
+ Set npm token secret (maintainers):
302
+
303
+ ```bash
304
+ gh secret set NPM_TOKEN --repo dana0550/system-desiigner
305
+ gh secret list --repo dana0550/system-desiigner | rg NPM_TOKEN
306
+ ```
242
307
 
243
308
  Maintainer commands:
244
309
 
@@ -0,0 +1,70 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const core_1 = require("@oclif/core");
4
+ const architecture_1 = require("../../lib/architecture");
5
+ const project_1 = require("../../lib/project");
6
+ const workflows_1 = require("../../lib/workflows");
7
+ class ArchitectureGenerateCommand extends core_1.Command {
8
+ static description = 'Generate architecture pack artifacts and diagrams for a map';
9
+ static flags = {
10
+ map: core_1.Flags.string({ required: true, description: 'Map identifier' }),
11
+ depth: core_1.Flags.string({
12
+ required: false,
13
+ options: ['org', 'full'],
14
+ default: 'full',
15
+ description: 'Generation depth: org-only or full (org + per-service packs)',
16
+ }),
17
+ service: core_1.Flags.string({
18
+ required: false,
19
+ description: 'Generate only one service deep-dive (service id/repo name)',
20
+ }),
21
+ };
22
+ async run() {
23
+ const { flags } = await this.parse(ArchitectureGenerateCommand);
24
+ if (flags.depth === 'org' && flags.service) {
25
+ throw new Error('Cannot use --service with --depth org. Use --depth full for targeted service generation.');
26
+ }
27
+ const context = (0, project_1.loadProject)(process.cwd());
28
+ const mapArtifacts = (0, workflows_1.buildMapArtifacts)(flags.map, context.db, context.cwd);
29
+ const contractArtifacts = (0, workflows_1.extractContractArtifacts)(flags.map, context.db, context.cwd);
30
+ const docsArtifacts = (0, workflows_1.generateDocsArtifacts)(flags.map, context.db, context.cwd);
31
+ const result = (0, architecture_1.generateArchitecturePack)({
32
+ mapId: flags.map,
33
+ db: context.db,
34
+ cwd: context.cwd,
35
+ depth: flags.depth,
36
+ serviceId: flags.service,
37
+ });
38
+ (0, project_1.recordRun)(context.db, 'architecture_generate', result.validation.valid ? 'ok' : 'error', flags.map, {
39
+ depth: flags.depth,
40
+ service: flags.service,
41
+ generatedServices: result.generatedServices.length,
42
+ validation: result.validation,
43
+ modelPath: result.modelPath,
44
+ indexDocPath: result.indexDocPath,
45
+ baseline: {
46
+ mapArtifacts,
47
+ contractArtifacts,
48
+ docsArtifacts,
49
+ },
50
+ });
51
+ context.db.close();
52
+ this.log(`Generated architecture pack for map '${flags.map}'.`);
53
+ this.log(`Model: ${result.modelPath}`);
54
+ this.log(`Overrides: ${result.overridesPath}`);
55
+ this.log(`Baseline service map: ${result.baselineArtifacts.serviceMapPath}`);
56
+ this.log(`Baseline contracts: ${result.baselineArtifacts.contractsPath}`);
57
+ this.log(`Baseline architecture doc: ${result.baselineArtifacts.architectureDocPath}`);
58
+ if (result.indexDocPath) {
59
+ this.log(`Architecture index: ${result.indexDocPath}`);
60
+ }
61
+ if (result.generatedServices.length > 0) {
62
+ this.log(`Service deep dives: ${result.generatedServices.length}`);
63
+ }
64
+ this.log(`Validation: ${result.validation.valid ? 'pass' : 'fail'} (errors=${result.validation.errors.length}, warnings=${result.validation.warnings.length})`);
65
+ if (!result.validation.valid) {
66
+ this.error('Architecture validation failed. Resolve override/model issues and rerun.', { exit: 1 });
67
+ }
68
+ }
69
+ }
70
+ exports.default = ArchitectureGenerateCommand;
@@ -0,0 +1,70 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const node_path_1 = __importDefault(require("node:path"));
7
+ const core_1 = require("@oclif/core");
8
+ const architecture_1 = require("../../lib/architecture");
9
+ const fs_1 = require("../../lib/fs");
10
+ const project_1 = require("../../lib/project");
11
+ class ArchitectureValidateCommand extends core_1.Command {
12
+ static description = 'Validate architecture model completeness and override integrity for a map';
13
+ static flags = {
14
+ map: core_1.Flags.string({ required: true, description: 'Map identifier' }),
15
+ };
16
+ async run() {
17
+ const { flags } = await this.parse(ArchitectureValidateCommand);
18
+ const context = (0, project_1.loadProject)(process.cwd());
19
+ const result = (0, architecture_1.validateArchitecture)({
20
+ mapId: flags.map,
21
+ db: context.db,
22
+ cwd: context.cwd,
23
+ });
24
+ const outDir = node_path_1.default.join(context.cwd, 'maps', flags.map, 'architecture');
25
+ const jsonPath = node_path_1.default.join(outDir, 'validation.json');
26
+ const mdPath = node_path_1.default.join(outDir, 'validation.md');
27
+ (0, fs_1.writeJsonFile)(jsonPath, result);
28
+ const lines = [
29
+ `# Architecture Validation: ${flags.map}`,
30
+ '',
31
+ `- Generated: ${result.generatedAt}`,
32
+ `- Valid: ${result.valid ? 'yes' : 'no'}`,
33
+ `- Errors: ${result.errors.length}`,
34
+ `- Warnings: ${result.warnings.length}`,
35
+ '',
36
+ ];
37
+ if (result.errors.length > 0) {
38
+ lines.push('## Errors');
39
+ lines.push('');
40
+ for (const err of result.errors) {
41
+ lines.push(`- ${err}`);
42
+ }
43
+ lines.push('');
44
+ }
45
+ if (result.warnings.length > 0) {
46
+ lines.push('## Warnings');
47
+ lines.push('');
48
+ for (const warning of result.warnings) {
49
+ lines.push(`- ${warning}`);
50
+ }
51
+ lines.push('');
52
+ }
53
+ (0, fs_1.writeTextFile)(mdPath, `${lines.join('\n')}\n`);
54
+ (0, project_1.recordRun)(context.db, 'architecture_validate', result.valid ? 'ok' : 'error', flags.map, {
55
+ validationPath: jsonPath,
56
+ errorCount: result.errors.length,
57
+ warningCount: result.warnings.length,
58
+ stats: result.stats,
59
+ });
60
+ context.db.close();
61
+ this.log(`Validated architecture for map '${flags.map}'.`);
62
+ this.log(`JSON: ${jsonPath}`);
63
+ this.log(`Markdown: ${mdPath}`);
64
+ this.log(`Result: ${result.valid ? 'pass' : 'fail'}`);
65
+ if (!result.valid) {
66
+ this.error('Architecture validation failed. Resolve errors and rerun.', { exit: 1 });
67
+ }
68
+ }
69
+ }
70
+ exports.default = ArchitectureValidateCommand;
@@ -70,6 +70,7 @@ class BootstrapQuickCommand extends core_1.Command {
70
70
  this.log('- ./scripts/sdx map create all-services --org <org>');
71
71
  this.log('- ./scripts/sdx map build all-services');
72
72
  }
73
+ this.log('- ./scripts/sdx architecture generate --map all-services');
73
74
  if (result.warnings.length > 0) {
74
75
  this.log('');
75
76
  this.log('Warnings:');
@@ -8,6 +8,19 @@ const node_path_1 = __importDefault(require("node:path"));
8
8
  const core_1 = require("@oclif/core");
9
9
  const fs_1 = require("../../lib/fs");
10
10
  const project_1 = require("../../lib/project");
11
+ function copyRecursive(source, target) {
12
+ const stat = node_fs_1.default.statSync(source);
13
+ if (stat.isDirectory()) {
14
+ (0, fs_1.ensureDir)(target);
15
+ const entries = node_fs_1.default.readdirSync(source, { withFileTypes: true });
16
+ for (const entry of entries) {
17
+ copyRecursive(node_path_1.default.join(source, entry.name), node_path_1.default.join(target, entry.name));
18
+ }
19
+ return;
20
+ }
21
+ const body = node_fs_1.default.readFileSync(source, 'utf8');
22
+ (0, fs_1.writeTextFile)(target, body);
23
+ }
11
24
  class PublishWikiCommand extends core_1.Command {
12
25
  static description = 'Export docs-first artifacts to a wiki-friendly directory';
13
26
  static flags = {
@@ -21,6 +34,9 @@ class PublishWikiCommand extends core_1.Command {
21
34
  node_path_1.default.join(context.cwd, 'maps', flags.map, 'contracts.md'),
22
35
  node_path_1.default.join(context.cwd, 'docs', 'architecture', `${flags.map}.md`),
23
36
  ];
37
+ const sourceDirs = [
38
+ node_path_1.default.join(context.cwd, 'docs', 'architecture', flags.map),
39
+ ];
24
40
  const wikiDir = node_path_1.default.join(context.cwd, 'wiki-export', flags.map);
25
41
  (0, fs_1.ensureDir)(wikiDir);
26
42
  for (const source of sourceFiles) {
@@ -28,8 +44,14 @@ class PublishWikiCommand extends core_1.Command {
28
44
  continue;
29
45
  }
30
46
  const target = node_path_1.default.join(wikiDir, node_path_1.default.basename(source));
31
- const body = node_fs_1.default.readFileSync(source, 'utf8');
32
- (0, fs_1.writeTextFile)(target, body);
47
+ copyRecursive(source, target);
48
+ }
49
+ for (const source of sourceDirs) {
50
+ if (!node_fs_1.default.existsSync(source)) {
51
+ continue;
52
+ }
53
+ const target = node_path_1.default.join(wikiDir, node_path_1.default.basename(source));
54
+ copyRecursive(source, target);
33
55
  }
34
56
  (0, project_1.recordRun)(context.db, 'publish_wiki', 'ok', flags.map, { wikiDir });
35
57
  context.db.close();
@@ -29,7 +29,8 @@ class StatusCommand extends core_1.Command {
29
29
  const mapDir = node_path_1.default.join(mapsDir, mapId);
30
30
  const hasScope = (0, fs_1.fileExists)(node_path_1.default.join(mapDir, 'scope.json'));
31
31
  const hasServiceMap = (0, fs_1.fileExists)(node_path_1.default.join(mapDir, 'service-map.json'));
32
- this.log(`- ${mapId}: scope=${hasScope ? 'yes' : 'no'}, service-map=${hasServiceMap ? 'yes' : 'no'}`);
32
+ const hasArchitectureModel = (0, fs_1.fileExists)(node_path_1.default.join(mapDir, 'architecture', 'model.json'));
33
+ this.log(`- ${mapId}: scope=${hasScope ? 'yes' : 'no'}, service-map=${hasServiceMap ? 'yes' : 'no'}, architecture=${hasArchitectureModel ? 'yes' : 'no'}`);
33
34
  }
34
35
  context.db.close();
35
36
  }
@@ -0,0 +1,793 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.getArchitectureOverridesPath = getArchitectureOverridesPath;
7
+ exports.ensureArchitectureOverridesFile = ensureArchitectureOverridesFile;
8
+ exports.loadArchitectureOverrides = loadArchitectureOverrides;
9
+ exports.buildArchitectureModel = buildArchitectureModel;
10
+ exports.validateArchitecture = validateArchitecture;
11
+ exports.generateArchitecturePack = generateArchitecturePack;
12
+ const node_fs_1 = __importDefault(require("node:fs"));
13
+ const node_path_1 = __importDefault(require("node:path"));
14
+ const zod_1 = require("zod");
15
+ const constants_1 = require("./constants");
16
+ const contracts_1 = require("./contracts");
17
+ const fileScan_1 = require("./fileScan");
18
+ const fs_1 = require("./fs");
19
+ const mapBuilder_1 = require("./mapBuilder");
20
+ const paths_1 = require("./paths");
21
+ const repoRegistry_1 = require("./repoRegistry");
22
+ const scope_1 = require("./scope");
23
+ const DATASTORE_KEYWORDS = ['postgres', 'mysql', 'mongodb', 'mongo', 'dynamodb', 'redis', 'cassandra', 'sqlite'];
24
+ const QUEUE_KEYWORDS = ['kafka', 'sqs', 'sns', 'rabbitmq', 'nats', 'pubsub', 'pulsar', 'kinesis'];
25
+ const OVERRIDES_SCHEMA = zod_1.z.object({
26
+ schemaVersion: zod_1.z.string().optional(),
27
+ generatedAt: zod_1.z.string().optional(),
28
+ mapId: zod_1.z.string().optional(),
29
+ serviceMetadata: zod_1.z
30
+ .record(zod_1.z.string(), zod_1.z.object({
31
+ owner: zod_1.z.string().optional(),
32
+ criticality: zod_1.z.enum(['low', 'medium', 'high', 'critical']).optional(),
33
+ businessContext: zod_1.z.string().optional(),
34
+ }))
35
+ .default({}),
36
+ assertedNodes: zod_1.z
37
+ .array(zod_1.z.object({
38
+ id: zod_1.z.string().min(1),
39
+ type: zod_1.z.enum(['service', 'repo', 'api', 'event', 'datastore', 'queue', 'team', 'external']),
40
+ label: zod_1.z.string().min(1),
41
+ metadata: zod_1.z.record(zod_1.z.string(), zod_1.z.unknown()).optional(),
42
+ }))
43
+ .default([]),
44
+ assertedEdges: zod_1.z
45
+ .array(zod_1.z.object({
46
+ from: zod_1.z.string().min(1),
47
+ to: zod_1.z.string().min(1),
48
+ relation: zod_1.z.enum(['calls', 'publishes', 'consumes', 'owns', 'depends_on']),
49
+ metadata: zod_1.z.record(zod_1.z.string(), zod_1.z.unknown()).optional(),
50
+ }))
51
+ .default([]),
52
+ suppressedEdges: zod_1.z
53
+ .array(zod_1.z.object({
54
+ from: zod_1.z.string().min(1),
55
+ to: zod_1.z.string().min(1),
56
+ relation: zod_1.z.enum(['calls', 'publishes', 'consumes', 'owns', 'depends_on']),
57
+ }))
58
+ .default([]),
59
+ });
60
+ function toNodeRef(input, nodes) {
61
+ if (nodes.has(input)) {
62
+ return input;
63
+ }
64
+ const prefixes = [
65
+ 'service',
66
+ 'repo',
67
+ 'external',
68
+ 'datastore',
69
+ 'queue',
70
+ 'team',
71
+ 'api',
72
+ 'event',
73
+ ];
74
+ for (const prefix of prefixes) {
75
+ const ref = `${prefix}:${input}`;
76
+ if (nodes.has(ref)) {
77
+ return ref;
78
+ }
79
+ }
80
+ return undefined;
81
+ }
82
+ function dedupeNodes(nodes) {
83
+ const byId = new Map();
84
+ for (const node of nodes) {
85
+ const existing = byId.get(node.id);
86
+ if (!existing) {
87
+ byId.set(node.id, node);
88
+ continue;
89
+ }
90
+ byId.set(node.id, {
91
+ ...existing,
92
+ label: node.label,
93
+ type: node.type,
94
+ metadata: {
95
+ ...(existing.metadata ?? {}),
96
+ ...(node.metadata ?? {}),
97
+ },
98
+ provenance: node.provenance.source === 'override' || existing.provenance.source !== 'override' ? node.provenance : existing.provenance,
99
+ });
100
+ }
101
+ return [...byId.values()].sort((a, b) => a.id.localeCompare(b.id));
102
+ }
103
+ function dedupeEdges(edges) {
104
+ const byKey = new Map();
105
+ for (const edge of edges) {
106
+ const key = `${edge.from}|${edge.to}|${edge.relation}`;
107
+ const existing = byKey.get(key);
108
+ if (!existing) {
109
+ byKey.set(key, edge);
110
+ continue;
111
+ }
112
+ byKey.set(key, {
113
+ ...existing,
114
+ metadata: {
115
+ ...(existing.metadata ?? {}),
116
+ ...(edge.metadata ?? {}),
117
+ },
118
+ provenance: edge.provenance.source === 'override' || existing.provenance.source !== 'override' ? edge.provenance : existing.provenance,
119
+ });
120
+ }
121
+ return [...byKey.values()].sort((a, b) => {
122
+ const left = `${a.from}|${a.to}|${a.relation}`;
123
+ const right = `${b.from}|${b.to}|${b.relation}`;
124
+ return left.localeCompare(right);
125
+ });
126
+ }
127
+ function keywordMatches(allFiles, keywords) {
128
+ const out = new Map();
129
+ for (const filePath of allFiles) {
130
+ const lower = filePath.toLowerCase();
131
+ for (const keyword of keywords) {
132
+ if (!lower.includes(keyword)) {
133
+ continue;
134
+ }
135
+ const previous = out.get(keyword) ?? [];
136
+ if (previous.length < 3) {
137
+ previous.push(filePath);
138
+ }
139
+ out.set(keyword, previous);
140
+ }
141
+ }
142
+ return out;
143
+ }
144
+ function inferInfra(repo, nodes, edges) {
145
+ if (!repo.localPath || !node_fs_1.default.existsSync(repo.localPath)) {
146
+ return;
147
+ }
148
+ const files = (0, fileScan_1.listFilesRecursive)(repo.localPath);
149
+ const serviceNodeId = `service:${repo.name}`;
150
+ const dataStoreHits = keywordMatches(files, DATASTORE_KEYWORDS);
151
+ for (const [keyword, hitFiles] of dataStoreHits.entries()) {
152
+ const nodeId = `datastore:${keyword}`;
153
+ nodes.push({
154
+ id: nodeId,
155
+ type: 'datastore',
156
+ label: keyword,
157
+ metadata: {
158
+ inferredFrom: hitFiles.map((candidate) => node_path_1.default.relative(repo.localPath, candidate)),
159
+ },
160
+ provenance: {
161
+ source: 'inferred',
162
+ confidence: 0.6,
163
+ evidence: [`repo:${repo.name}:keyword:${keyword}`],
164
+ },
165
+ });
166
+ edges.push({
167
+ from: serviceNodeId,
168
+ to: nodeId,
169
+ relation: 'depends_on',
170
+ provenance: {
171
+ source: 'inferred',
172
+ confidence: 0.6,
173
+ evidence: [`repo:${repo.name}:keyword:${keyword}`],
174
+ },
175
+ metadata: {
176
+ inferredFrom: 'file_keyword_scan',
177
+ },
178
+ });
179
+ }
180
+ const queueHits = keywordMatches(files, QUEUE_KEYWORDS);
181
+ for (const [keyword, hitFiles] of queueHits.entries()) {
182
+ const nodeId = `queue:${keyword}`;
183
+ nodes.push({
184
+ id: nodeId,
185
+ type: 'queue',
186
+ label: keyword,
187
+ metadata: {
188
+ inferredFrom: hitFiles.map((candidate) => node_path_1.default.relative(repo.localPath, candidate)),
189
+ },
190
+ provenance: {
191
+ source: 'inferred',
192
+ confidence: 0.6,
193
+ evidence: [`repo:${repo.name}:keyword:${keyword}`],
194
+ },
195
+ });
196
+ edges.push({
197
+ from: serviceNodeId,
198
+ to: nodeId,
199
+ relation: 'depends_on',
200
+ provenance: {
201
+ source: 'inferred',
202
+ confidence: 0.6,
203
+ evidence: [`repo:${repo.name}:keyword:${keyword}`],
204
+ },
205
+ metadata: {
206
+ inferredFrom: 'file_keyword_scan',
207
+ },
208
+ });
209
+ }
210
+ }
211
+ function contractNodeId(contract) {
212
+ const prefix = contract.type === 'asyncapi' ? 'event' : 'api';
213
+ return `${prefix}:${contract.repo}:${contract.path}`;
214
+ }
215
+ function addConsumerSignals(contracts, baseEdges, nodes, edges) {
216
+ const byRepo = new Map();
217
+ for (const contract of contracts) {
218
+ const entries = byRepo.get(contract.repo) ?? [];
219
+ entries.push(contract);
220
+ byRepo.set(contract.repo, entries);
221
+ }
222
+ const hasNode = new Set(nodes.map((node) => node.id));
223
+ for (const relation of baseEdges) {
224
+ if (relation.relation !== 'depends_on' || !relation.from.startsWith('service:') || !relation.to.startsWith('service:')) {
225
+ continue;
226
+ }
227
+ const sourceRepo = relation.from.slice('service:'.length);
228
+ const targetRepo = relation.to.slice('service:'.length);
229
+ const targetContracts = byRepo.get(targetRepo) ?? [];
230
+ if (targetContracts.length === 0) {
231
+ continue;
232
+ }
233
+ edges.push({
234
+ from: relation.from,
235
+ to: relation.to,
236
+ relation: 'calls',
237
+ provenance: {
238
+ source: 'inferred',
239
+ confidence: 0.7,
240
+ evidence: [`dependency:${sourceRepo}->${targetRepo}`],
241
+ },
242
+ metadata: {
243
+ inferredFrom: 'dependency_plus_contract',
244
+ },
245
+ });
246
+ for (const contract of targetContracts) {
247
+ const targetNodeId = contractNodeId(contract);
248
+ if (!hasNode.has(targetNodeId)) {
249
+ continue;
250
+ }
251
+ edges.push({
252
+ from: relation.from,
253
+ to: targetNodeId,
254
+ relation: contract.type === 'asyncapi' ? 'consumes' : 'calls',
255
+ provenance: {
256
+ source: 'inferred',
257
+ confidence: 0.55,
258
+ evidence: [`dependency:${sourceRepo}->${targetRepo}`, `contract:${contract.path}`],
259
+ },
260
+ metadata: {
261
+ inferredFrom: 'dependency_plus_contract',
262
+ },
263
+ });
264
+ }
265
+ }
266
+ }
267
+ function defaultOverrides(mapId) {
268
+ return {
269
+ schemaVersion: constants_1.SCHEMA_VERSION,
270
+ generatedAt: new Date().toISOString(),
271
+ mapId,
272
+ serviceMetadata: {},
273
+ assertedNodes: [],
274
+ assertedEdges: [],
275
+ suppressedEdges: [],
276
+ };
277
+ }
278
+ function getArchitectureOverridesPath(mapId, cwd = process.cwd()) {
279
+ return node_path_1.default.join((0, paths_1.getMapDir)(mapId, cwd), 'architecture-overrides.json');
280
+ }
281
+ function ensureArchitectureOverridesFile(mapId, cwd = process.cwd()) {
282
+ const overridesPath = getArchitectureOverridesPath(mapId, cwd);
283
+ if ((0, fs_1.fileExists)(overridesPath)) {
284
+ return overridesPath;
285
+ }
286
+ (0, fs_1.writeJsonFile)(overridesPath, defaultOverrides(mapId));
287
+ return overridesPath;
288
+ }
289
+ function loadArchitectureOverrides(mapId, cwd = process.cwd()) {
290
+ const overridesPath = ensureArchitectureOverridesFile(mapId, cwd);
291
+ const payload = (0, fs_1.readJsonFile)(overridesPath);
292
+ const parsed = OVERRIDES_SCHEMA.parse(payload);
293
+ return {
294
+ schemaVersion: parsed.schemaVersion ?? constants_1.SCHEMA_VERSION,
295
+ generatedAt: parsed.generatedAt ?? new Date().toISOString(),
296
+ mapId: parsed.mapId ?? mapId,
297
+ serviceMetadata: parsed.serviceMetadata,
298
+ assertedNodes: parsed.assertedNodes,
299
+ assertedEdges: parsed.assertedEdges,
300
+ suppressedEdges: parsed.suppressedEdges,
301
+ };
302
+ }
303
+ function applyOverrides(overrides, modelNodes, modelEdges) {
304
+ const errors = [];
305
+ const warnings = [];
306
+ const nodes = [...modelNodes];
307
+ const edges = [...modelEdges];
308
+ const nodeMap = new Map(nodes.map((node) => [node.id, node]));
309
+ for (const [serviceName, metadata] of Object.entries(overrides.serviceMetadata)) {
310
+ const id = `service:${serviceName}`;
311
+ const node = nodeMap.get(id);
312
+ if (!node) {
313
+ errors.push(`Override serviceMetadata references unknown service '${serviceName}'.`);
314
+ continue;
315
+ }
316
+ node.metadata = {
317
+ ...(node.metadata ?? {}),
318
+ ...metadata,
319
+ };
320
+ node.provenance = {
321
+ source: 'declared',
322
+ confidence: Math.max(node.provenance.confidence, 0.9),
323
+ evidence: [...new Set([...node.provenance.evidence, `override:serviceMetadata:${serviceName}`])],
324
+ };
325
+ nodeMap.set(id, node);
326
+ }
327
+ for (const assertedNode of overrides.assertedNodes) {
328
+ const existing = nodeMap.get(assertedNode.id);
329
+ if (existing) {
330
+ if (existing.type !== assertedNode.type) {
331
+ errors.push(`assertedNode '${assertedNode.id}' conflicts with existing type '${existing.type}' (override type '${assertedNode.type}').`);
332
+ continue;
333
+ }
334
+ existing.label = assertedNode.label;
335
+ existing.metadata = {
336
+ ...(existing.metadata ?? {}),
337
+ ...(assertedNode.metadata ?? {}),
338
+ };
339
+ existing.provenance = {
340
+ source: 'override',
341
+ confidence: 0.98,
342
+ evidence: [...new Set([...existing.provenance.evidence, `override:assertedNode:${assertedNode.id}`])],
343
+ };
344
+ nodeMap.set(assertedNode.id, existing);
345
+ continue;
346
+ }
347
+ const created = {
348
+ ...assertedNode,
349
+ provenance: {
350
+ source: 'override',
351
+ confidence: 0.98,
352
+ evidence: [`override:assertedNode:${assertedNode.id}`],
353
+ },
354
+ };
355
+ nodeMap.set(created.id, created);
356
+ }
357
+ const resolvedNodes = [...nodeMap.values()];
358
+ const resolvedNodeMap = new Map(resolvedNodes.map((node) => [node.id, node]));
359
+ const resolvedEdges = [...edges];
360
+ for (const assertedEdge of overrides.assertedEdges) {
361
+ const from = toNodeRef(assertedEdge.from, resolvedNodeMap);
362
+ const to = toNodeRef(assertedEdge.to, resolvedNodeMap);
363
+ if (!from || !to) {
364
+ errors.push(`assertedEdge '${assertedEdge.from} -> ${assertedEdge.to}' references unknown node(s). Add assertedNodes first or use canonical IDs.`);
365
+ continue;
366
+ }
367
+ resolvedEdges.push({
368
+ from,
369
+ to,
370
+ relation: assertedEdge.relation,
371
+ metadata: {
372
+ ...(assertedEdge.metadata ?? {}),
373
+ inferredFrom: 'override_asserted_edge',
374
+ },
375
+ provenance: {
376
+ source: 'override',
377
+ confidence: 0.98,
378
+ evidence: [`override:assertedEdge:${from}|${to}|${assertedEdge.relation}`],
379
+ },
380
+ });
381
+ }
382
+ for (const suppressed of overrides.suppressedEdges) {
383
+ const from = toNodeRef(suppressed.from, resolvedNodeMap);
384
+ const to = toNodeRef(suppressed.to, resolvedNodeMap);
385
+ if (!from || !to) {
386
+ errors.push(`suppressedEdge '${suppressed.from} -> ${suppressed.to}' references unknown node(s). Use canonical IDs or add assertedNodes first.`);
387
+ continue;
388
+ }
389
+ const before = resolvedEdges.length;
390
+ for (let i = resolvedEdges.length - 1; i >= 0; i -= 1) {
391
+ const edge = resolvedEdges[i];
392
+ if (edge.from === from && edge.to === to && edge.relation === suppressed.relation) {
393
+ resolvedEdges.splice(i, 1);
394
+ }
395
+ }
396
+ if (before === resolvedEdges.length) {
397
+ warnings.push(`suppressedEdge '${from} -> ${to} (${suppressed.relation})' did not match any existing edge.`);
398
+ }
399
+ }
400
+ return {
401
+ nodes: dedupeNodes([...resolvedNodes]),
402
+ edges: dedupeEdges(resolvedEdges),
403
+ errors,
404
+ warnings,
405
+ };
406
+ }
407
+ function buildArchitectureModel(mapId, db, cwd = process.cwd()) {
408
+ const scope = (0, scope_1.loadScopeManifest)(mapId, cwd);
409
+ const repoMap = new Map((0, repoRegistry_1.listAllRepos)(db).map((repo) => [repo.name, repo]));
410
+ const serviceMap = (0, mapBuilder_1.buildServiceMapArtifact)(mapId, scope, repoMap);
411
+ const contracts = (0, contracts_1.extractContracts)(mapId, scope, repoMap);
412
+ const nodes = serviceMap.nodes.map((node) => ({
413
+ ...node,
414
+ provenance: {
415
+ source: 'inferred',
416
+ confidence: node.type === 'service' || node.type === 'repo' ? 0.95 : 0.8,
417
+ evidence: [`service-map:${node.id}`],
418
+ },
419
+ }));
420
+ const edges = serviceMap.edges.map((edge) => ({
421
+ ...edge,
422
+ provenance: {
423
+ source: 'inferred',
424
+ confidence: edge.relation === 'owns' ? 0.95 : 0.75,
425
+ evidence: [`service-map:${edge.from}->${edge.to}:${edge.relation}`],
426
+ },
427
+ }));
428
+ const warnings = [];
429
+ for (const repoName of scope.effective) {
430
+ const repo = repoMap.get(repoName);
431
+ if (!repo?.localPath || !node_fs_1.default.existsSync(repo.localPath)) {
432
+ warnings.push(`Repository '${repoName}' is in scope but has no local path; deep inference may be partial.`);
433
+ continue;
434
+ }
435
+ inferInfra(repo, nodes, edges);
436
+ }
437
+ addConsumerSignals(contracts, serviceMap.edges, nodes, edges);
438
+ const overridesPath = ensureArchitectureOverridesFile(mapId, cwd);
439
+ const overrides = loadArchitectureOverrides(mapId, cwd);
440
+ const applied = applyOverrides(overrides, dedupeNodes(nodes), dedupeEdges(edges));
441
+ warnings.push(...applied.warnings);
442
+ const confidences = applied.edges.map((edge) => edge.provenance.confidence);
443
+ const coverageConfidence = confidences.length === 0 ? 0 : Number((confidences.reduce((sum, c) => sum + c, 0) / confidences.length).toFixed(3));
444
+ return {
445
+ schemaVersion: constants_1.SCHEMA_VERSION,
446
+ generatedAt: new Date().toISOString(),
447
+ mapId,
448
+ org: scope.org,
449
+ overridesPath: node_path_1.default.relative(cwd, overridesPath),
450
+ coverageConfidence,
451
+ errors: applied.errors,
452
+ warnings,
453
+ nodes: applied.nodes,
454
+ edges: applied.edges,
455
+ };
456
+ }
457
+ function modelValidation(model, contracts) {
458
+ const errors = [];
459
+ const warnings = [];
460
+ const services = model.nodes.filter((node) => node.type === 'service');
461
+ const serviceContracts = new Set(contracts.map((record) => record.repo));
462
+ for (const service of services) {
463
+ const serviceId = service.id.replace('service:', '');
464
+ const relations = model.edges.filter((edge) => edge.from === service.id || edge.to === service.id);
465
+ const nonOwnership = relations.filter((edge) => edge.relation !== 'owns');
466
+ if (nonOwnership.length === 0) {
467
+ warnings.push(`Service '${service.label}' has no communication edges outside ownership links.`);
468
+ }
469
+ if (!serviceContracts.has(serviceId)) {
470
+ warnings.push(`Service '${service.label}' has no detected contract files.`);
471
+ }
472
+ }
473
+ for (const signal of model.errors) {
474
+ errors.push(signal);
475
+ }
476
+ const inferredEdges = model.edges.filter((edge) => edge.provenance.source === 'inferred').length;
477
+ const overrideEdges = model.edges.filter((edge) => edge.provenance.source === 'override').length;
478
+ return {
479
+ schemaVersion: constants_1.SCHEMA_VERSION,
480
+ generatedAt: new Date().toISOString(),
481
+ mapId: model.mapId,
482
+ valid: errors.length === 0,
483
+ errors,
484
+ warnings,
485
+ stats: {
486
+ serviceCount: services.length,
487
+ edgeCount: model.edges.length,
488
+ inferredEdges,
489
+ overrideEdges,
490
+ },
491
+ };
492
+ }
493
+ function mermaidId(nodeId) {
494
+ return nodeId.replace(/[^a-zA-Z0-9_]/g, '_');
495
+ }
496
+ function renderMermaid(nodes, edges, title) {
497
+ const lines = ['flowchart LR'];
498
+ if (title) {
499
+ lines.push(` %% ${title}`);
500
+ }
501
+ for (const node of nodes) {
502
+ lines.push(` ${mermaidId(node.id)}["${node.label}"]`);
503
+ }
504
+ for (const edge of edges) {
505
+ lines.push(` ${mermaidId(edge.from)} -->|"${edge.relation}"| ${mermaidId(edge.to)}`);
506
+ }
507
+ return `${lines.join('\n')}\n`;
508
+ }
509
+ function renderArchitectureIndexMarkdown(mapId, model, validation, systemLandscapeMermaid, containerMermaid, services) {
510
+ const lines = [
511
+ `# Architecture Pack: ${mapId}`,
512
+ '',
513
+ `- Generated: ${model.generatedAt}`,
514
+ `- Coverage confidence: ${model.coverageConfidence}`,
515
+ `- Override file: ${model.overridesPath}`,
516
+ `- Services: ${services.length}`,
517
+ `- Nodes: ${model.nodes.length}`,
518
+ `- Edges: ${model.edges.length}`,
519
+ '',
520
+ '## Facts vs Inferred',
521
+ '',
522
+ '- Facts: repository scope, discovered contracts, registered repos/local paths.',
523
+ '- Inferred: communication edges from dependency/config signals.',
524
+ '- Overrides: explicit asserted/suppressed relationships from architecture-overrides.json.',
525
+ '',
526
+ '## Validation',
527
+ '',
528
+ `- Valid: ${validation.valid ? 'yes' : 'no'}`,
529
+ `- Errors: ${validation.errors.length}`,
530
+ `- Warnings: ${validation.warnings.length}`,
531
+ '',
532
+ ];
533
+ if (validation.errors.length > 0) {
534
+ lines.push('### Validation Errors');
535
+ lines.push('');
536
+ for (const err of validation.errors) {
537
+ lines.push(`- ${err}`);
538
+ }
539
+ lines.push('');
540
+ }
541
+ if (validation.warnings.length > 0) {
542
+ lines.push('### Validation Warnings');
543
+ lines.push('');
544
+ for (const warning of validation.warnings.slice(0, 30)) {
545
+ lines.push(`- ${warning}`);
546
+ }
547
+ lines.push('');
548
+ }
549
+ lines.push('## System Landscape');
550
+ lines.push('');
551
+ lines.push('```mermaid');
552
+ lines.push(systemLandscapeMermaid.trimEnd());
553
+ lines.push('```');
554
+ lines.push('');
555
+ lines.push('## Container Communication');
556
+ lines.push('');
557
+ lines.push('```mermaid');
558
+ lines.push(containerMermaid.trimEnd());
559
+ lines.push('```');
560
+ lines.push('');
561
+ lines.push('## Service Deep Dives');
562
+ lines.push('');
563
+ for (const service of services) {
564
+ const serviceId = service.id.replace('service:', '');
565
+ lines.push(`- [${service.label}](./services/${serviceId}.md)`);
566
+ }
567
+ lines.push('');
568
+ return `${lines.join('\n')}\n`;
569
+ }
570
+ function renderServiceDoc(service, contracts, contextDiagram, contractDiagram, model) {
571
+ const serviceId = service.id.replace('service:', '');
572
+ const serviceContracts = contracts.filter((contract) => contract.repo === serviceId);
573
+ const relatedEdges = model.edges.filter((edge) => edge.from === service.id || edge.to === service.id);
574
+ const repoNode = model.nodes.find((node) => node.id === `repo:${serviceId}`);
575
+ const repoPath = String(repoNode?.metadata?.['localPath'] ?? 'not-registered');
576
+ const repoHtmlUrl = typeof repoNode?.metadata?.['htmlUrl'] === 'string' ? String(repoNode.metadata['htmlUrl']) : undefined;
577
+ const defaultBranch = typeof repoNode?.metadata?.['defaultBranch'] === 'string' && String(repoNode.metadata['defaultBranch']).length > 0
578
+ ? String(repoNode.metadata['defaultBranch'])
579
+ : 'main';
580
+ const consumersByContract = new Map();
581
+ for (const edge of model.edges) {
582
+ if (!edge.to.startsWith('api:') && !edge.to.startsWith('event:')) {
583
+ continue;
584
+ }
585
+ if (!edge.from.startsWith('service:')) {
586
+ continue;
587
+ }
588
+ const consumer = edge.from.replace('service:', '');
589
+ const current = consumersByContract.get(edge.to) ?? [];
590
+ if (!current.includes(consumer)) {
591
+ current.push(consumer);
592
+ }
593
+ consumersByContract.set(edge.to, current.sort((a, b) => a.localeCompare(b)));
594
+ }
595
+ function contractSource(contract) {
596
+ if (!repoHtmlUrl) {
597
+ return `\`${contract.sourcePointer}\``;
598
+ }
599
+ const url = `${repoHtmlUrl.replace(/\/$/, '')}/blob/${defaultBranch}/${contract.path}`;
600
+ return `[${contract.path}](${url})`;
601
+ }
602
+ const lines = [
603
+ `# Service Architecture: ${service.label}`,
604
+ '',
605
+ `- Generated: ${model.generatedAt}`,
606
+ `- Map: ${model.mapId}`,
607
+ `- Coverage confidence: ${model.coverageConfidence}`,
608
+ `- Owner: ${String(service.metadata?.['owner'] ?? 'unknown')}`,
609
+ `- Criticality: ${String(service.metadata?.['criticality'] ?? 'unknown')}`,
610
+ `- Business context: ${String(service.metadata?.['businessContext'] ?? 'not declared')}`,
611
+ `- Repo path: ${repoPath}`,
612
+ '',
613
+ '## Facts vs Inferred',
614
+ '',
615
+ '- Facts: service membership, contract files, explicit overrides.',
616
+ '- Inferred: integration relationships from dependencies and config signals.',
617
+ '',
618
+ '## Service Context Diagram',
619
+ '',
620
+ '```mermaid',
621
+ contextDiagram.trimEnd(),
622
+ '```',
623
+ '',
624
+ '## Contract Interaction Diagram',
625
+ '',
626
+ '```mermaid',
627
+ contractDiagram.trimEnd(),
628
+ '```',
629
+ '',
630
+ '## Contract Catalog',
631
+ '',
632
+ '| Type | Contract | Version | Compatibility | Producers | Consumers |',
633
+ '|---|---|---|---|---|---|',
634
+ ...serviceContracts.map((contract) => {
635
+ const consumers = consumersByContract.get(contractNodeId(contract)) ?? [];
636
+ return `| ${contract.type} | ${contractSource(contract)} | ${contract.version ?? '-'} | ${contract.compatibilityStatus} | ${contract.producers.join(', ') || '-'} | ${consumers.join(', ') || '-'} |`;
637
+ }),
638
+ '',
639
+ '## Migration Guidance',
640
+ '',
641
+ '- Review the source contract docs linked above for rollout and compatibility instructions.',
642
+ '- Validate all consuming services against changed contract versions before cutover.',
643
+ '',
644
+ '## Integration Signals',
645
+ '',
646
+ ...relatedEdges.map((edge) => `- ${edge.from} ${edge.relation} ${edge.to} (source=${edge.provenance.source}, confidence=${edge.provenance.confidence})`),
647
+ '',
648
+ ];
649
+ return `${lines.join('\n')}\n`;
650
+ }
651
+ function serviceNodes(model) {
652
+ return model.nodes
653
+ .filter((node) => node.type === 'service')
654
+ .sort((a, b) => a.label.localeCompare(b.label));
655
+ }
656
+ function renderSystemLandscape(model) {
657
+ const allowedTypes = new Set(['service', 'external', 'datastore', 'queue', 'team']);
658
+ const nodes = model.nodes.filter((node) => allowedTypes.has(node.type));
659
+ const nodeIds = new Set(nodes.map((node) => node.id));
660
+ const edges = model.edges.filter((edge) => nodeIds.has(edge.from) && nodeIds.has(edge.to));
661
+ return renderMermaid(nodes, edges, 'System Landscape');
662
+ }
663
+ function renderContainerCommunication(model) {
664
+ const allowedTypes = new Set(['service', 'api', 'event', 'datastore', 'queue', 'external']);
665
+ const nodes = model.nodes.filter((node) => allowedTypes.has(node.type));
666
+ const nodeIds = new Set(nodes.map((node) => node.id));
667
+ const edges = model.edges.filter((edge) => nodeIds.has(edge.from) && nodeIds.has(edge.to));
668
+ return renderMermaid(nodes, edges, 'Container Communication');
669
+ }
670
+ function renderServiceContextDiagram(model, serviceId) {
671
+ const serviceNodeId = `service:${serviceId}`;
672
+ const included = new Set([serviceNodeId]);
673
+ for (const edge of model.edges) {
674
+ if (edge.from === serviceNodeId) {
675
+ included.add(edge.to);
676
+ }
677
+ if (edge.to === serviceNodeId) {
678
+ included.add(edge.from);
679
+ }
680
+ }
681
+ const disallowed = new Set(['api', 'event', 'repo']);
682
+ const nodes = model.nodes.filter((node) => included.has(node.id) && !disallowed.has(node.type));
683
+ const nodeIds = new Set(nodes.map((node) => node.id));
684
+ const edges = model.edges.filter((edge) => nodeIds.has(edge.from) && nodeIds.has(edge.to));
685
+ return renderMermaid(nodes, edges, `Service Context: ${serviceId}`);
686
+ }
687
+ function renderServiceContractsDiagram(model, serviceId) {
688
+ const serviceNodeId = `service:${serviceId}`;
689
+ const ownedContracts = new Set();
690
+ for (const edge of model.edges) {
691
+ if (edge.from !== serviceNodeId) {
692
+ continue;
693
+ }
694
+ if (!edge.to.startsWith('api:') && !edge.to.startsWith('event:')) {
695
+ continue;
696
+ }
697
+ ownedContracts.add(edge.to);
698
+ }
699
+ const included = new Set([serviceNodeId, ...ownedContracts]);
700
+ for (const edge of model.edges) {
701
+ if (ownedContracts.has(edge.to)) {
702
+ included.add(edge.from);
703
+ }
704
+ }
705
+ const nodes = model.nodes.filter((node) => included.has(node.id));
706
+ const nodeIds = new Set(nodes.map((node) => node.id));
707
+ const edges = model.edges.filter((edge) => nodeIds.has(edge.from) && nodeIds.has(edge.to));
708
+ return renderMermaid(nodes, edges, `Service Contracts: ${serviceId}`);
709
+ }
710
+ function validateArchitecture(options) {
711
+ const cwd = options.cwd ?? process.cwd();
712
+ const mapId = options.mapId;
713
+ const model = buildArchitectureModel(mapId, options.db, cwd);
714
+ const scope = (0, scope_1.loadScopeManifest)(mapId, cwd);
715
+ const repoMap = new Map((0, repoRegistry_1.listAllRepos)(options.db).map((repo) => [repo.name, repo]));
716
+ const contracts = (0, contracts_1.extractContracts)(mapId, scope, repoMap);
717
+ return modelValidation(model, contracts);
718
+ }
719
+ function generateArchitecturePack(options) {
720
+ const cwd = options.cwd ?? process.cwd();
721
+ const depth = options.depth ?? 'full';
722
+ const mapId = options.mapId;
723
+ const model = buildArchitectureModel(mapId, options.db, cwd);
724
+ const scope = (0, scope_1.loadScopeManifest)(mapId, cwd);
725
+ const repoMap = new Map((0, repoRegistry_1.listAllRepos)(options.db).map((repo) => [repo.name, repo]));
726
+ const contracts = (0, contracts_1.extractContracts)(mapId, scope, repoMap);
727
+ const validation = modelValidation(model, contracts);
728
+ const mapArchitectureDir = node_path_1.default.join((0, paths_1.getMapDir)(mapId, cwd), 'architecture');
729
+ const docsDir = node_path_1.default.join(cwd, 'docs', 'architecture', mapId);
730
+ const diagramsDir = node_path_1.default.join(docsDir, 'diagrams');
731
+ const servicesDocsDir = node_path_1.default.join(docsDir, 'services');
732
+ const modelPath = node_path_1.default.join(mapArchitectureDir, 'model.json');
733
+ const validationPath = node_path_1.default.join(mapArchitectureDir, 'validation.json');
734
+ const overridesPath = ensureArchitectureOverridesFile(mapId, cwd);
735
+ (0, fs_1.writeJsonFile)(modelPath, model);
736
+ (0, fs_1.writeJsonFile)(validationPath, validation);
737
+ const systemLandscape = renderSystemLandscape(model);
738
+ const containerCommunication = renderContainerCommunication(model);
739
+ const systemLandscapePath = node_path_1.default.join(diagramsDir, 'system-landscape.mmd');
740
+ const containerPath = node_path_1.default.join(diagramsDir, 'container-communication.mmd');
741
+ const output = {
742
+ modelPath,
743
+ overridesPath,
744
+ baselineArtifacts: {
745
+ serviceMapPath: node_path_1.default.join(cwd, 'maps', mapId, 'service-map.json'),
746
+ contractsPath: node_path_1.default.join(cwd, 'maps', mapId, 'contracts.json'),
747
+ architectureDocPath: node_path_1.default.join(cwd, 'docs', 'architecture', `${mapId}.md`),
748
+ },
749
+ serviceDocPaths: [],
750
+ generatedServices: [],
751
+ generatedAt: model.generatedAt,
752
+ validation,
753
+ indexDocPath: undefined,
754
+ overviewPath: undefined,
755
+ };
756
+ if (!options.serviceId) {
757
+ (0, fs_1.writeTextFile)(systemLandscapePath, systemLandscape);
758
+ (0, fs_1.writeTextFile)(containerPath, containerCommunication);
759
+ const services = serviceNodes(model);
760
+ const indexMarkdown = renderArchitectureIndexMarkdown(mapId, model, validation, systemLandscape, containerCommunication, services);
761
+ const indexDocPath = node_path_1.default.join(docsDir, 'index.md');
762
+ const overviewPath = node_path_1.default.join(mapArchitectureDir, 'overview.md');
763
+ (0, fs_1.writeTextFile)(indexDocPath, indexMarkdown);
764
+ (0, fs_1.writeTextFile)(overviewPath, indexMarkdown);
765
+ output.indexDocPath = indexDocPath;
766
+ output.overviewPath = overviewPath;
767
+ }
768
+ if (depth !== 'org') {
769
+ const services = serviceNodes(model)
770
+ .map((node) => node.id.replace('service:', ''))
771
+ .filter((serviceId) => (options.serviceId ? serviceId === options.serviceId : true));
772
+ if (options.serviceId && services.length === 0) {
773
+ throw new Error(`Unknown service '${options.serviceId}' for map '${mapId}'.`);
774
+ }
775
+ for (const serviceId of services) {
776
+ const serviceNode = model.nodes.find((node) => node.id === `service:${serviceId}`);
777
+ if (!serviceNode) {
778
+ continue;
779
+ }
780
+ const contextMermaid = renderServiceContextDiagram(model, serviceId);
781
+ const contractsMermaid = renderServiceContractsDiagram(model, serviceId);
782
+ const serviceDoc = renderServiceDoc(serviceNode, contracts, contextMermaid, contractsMermaid, model);
783
+ const serviceDocPath = node_path_1.default.join(servicesDocsDir, `${serviceId}.md`);
784
+ const serviceDiagramDir = node_path_1.default.join(servicesDocsDir, serviceId);
785
+ (0, fs_1.writeTextFile)(serviceDocPath, serviceDoc);
786
+ (0, fs_1.writeTextFile)(node_path_1.default.join(serviceDiagramDir, 'context.mmd'), contextMermaid);
787
+ (0, fs_1.writeTextFile)(node_path_1.default.join(serviceDiagramDir, 'contracts.mmd'), contractsMermaid);
788
+ output.serviceDocPaths.push(serviceDocPath);
789
+ output.generatedServices.push(serviceId);
790
+ }
791
+ }
792
+ return output;
793
+ }
@@ -8,6 +8,8 @@ exports.isManagedArtifactPayload = isManagedArtifactPayload;
8
8
  const node_path_1 = __importDefault(require("node:path"));
9
9
  const MANAGED_ARTIFACT_PATTERNS = [
10
10
  /^maps\/[^/]+\/(scope|service-map|contracts)\.json$/,
11
+ /^maps\/[^/]+\/architecture\/(model|validation)\.json$/,
12
+ /^maps\/[^/]+\/architecture-overrides\.json$/,
11
13
  /^plans\/reviews\/.*\.json$/,
12
14
  /^plans\/.*-service-proposal\.json$/,
13
15
  /^handoffs\/.*\.json$/,
@@ -21,8 +21,8 @@ function createBootstrapStructure(org, repoName, cwd = process.cwd()) {
21
21
  ' build-test:',
22
22
  ' runs-on: ubuntu-latest',
23
23
  ' steps:',
24
- ' - uses: actions/checkout@v4',
25
- ' - uses: actions/setup-node@v4',
24
+ ' - uses: actions/checkout@v5',
25
+ ' - uses: actions/setup-node@v5',
26
26
  ' with:',
27
27
  ' node-version: 20',
28
28
  ' - run: npm ci',
@@ -34,7 +34,7 @@ function readPackageDependencies(repoPath) {
34
34
  const data = JSON.parse(node_fs_1.default.readFileSync(packagePath, 'utf8'));
35
35
  const names = new Set([
36
36
  ...Object.keys(data.dependencies ?? {}),
37
- ...Object.keys(data.devDependencies ?? {}),
37
+ ...Object.keys(data.optionalDependencies ?? {}),
38
38
  ...Object.keys(data.peerDependencies ?? {}),
39
39
  ]);
40
40
  return [...names];
@@ -91,6 +91,8 @@ function buildServiceMapArtifact(mapId, scope, reposByName) {
91
91
  metadata: {
92
92
  source: repo?.source ?? 'github',
93
93
  localPath: repo?.localPath,
94
+ htmlUrl: repo?.htmlUrl,
95
+ defaultBranch: repo?.defaultBranch,
94
96
  },
95
97
  });
96
98
  edges.push({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sdx-cli",
3
- "version": "0.2.1",
3
+ "version": "0.3.0",
4
4
  "description": "System Design Intelligence CLI",
5
5
  "type": "commonjs",
6
6
  "bin": {