jumpstart-mode 1.1.12 → 1.1.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/agents/jumpstart-adversary.agent.md +2 -1
- package/.github/agents/jumpstart-architect.agent.md +5 -6
- package/.github/agents/jumpstart-challenger.agent.md +2 -1
- package/.github/agents/jumpstart-devops.agent.md +2 -2
- package/.github/agents/jumpstart-diagram-verifier.agent.md +2 -1
- package/.github/agents/jumpstart-maintenance.agent.md +1 -0
- package/.github/agents/jumpstart-performance.agent.md +1 -0
- package/.github/agents/jumpstart-pm.agent.md +1 -1
- package/.github/agents/jumpstart-refactor.agent.md +1 -0
- package/.github/agents/jumpstart-requirements-extractor.agent.md +1 -0
- package/.github/agents/jumpstart-researcher.agent.md +1 -0
- package/.github/agents/jumpstart-retrospective.agent.md +1 -0
- package/.github/agents/jumpstart-reviewer.agent.md +2 -0
- package/.github/agents/jumpstart-scout.agent.md +1 -1
- package/.github/agents/jumpstart-scrum-master.agent.md +1 -0
- package/.github/agents/jumpstart-security.agent.md +2 -1
- package/.github/agents/jumpstart-tech-writer.agent.md +1 -0
- package/.github/workflows/quality.yml +19 -2
- package/.jumpstart/agents/analyst.md +38 -0
- package/.jumpstart/agents/architect.md +38 -0
- package/.jumpstart/agents/challenger.md +38 -0
- package/.jumpstart/agents/developer.md +41 -0
- package/.jumpstart/agents/pm.md +38 -0
- package/.jumpstart/agents/scout.md +33 -0
- package/.jumpstart/agents/ux-designer.md +4 -0
- package/.jumpstart/config.yaml +24 -0
- package/.jumpstart/schemas/timeline.schema.json +1 -0
- package/.jumpstart/skills/skill-creator/SKILL.md +485 -357
- package/.jumpstart/skills/skill-creator/agents/analyzer.md +274 -0
- package/.jumpstart/skills/skill-creator/agents/comparator.md +202 -0
- package/.jumpstart/skills/skill-creator/agents/grader.md +223 -0
- package/.jumpstart/skills/skill-creator/assets/eval_review.html +146 -0
- package/.jumpstart/skills/skill-creator/eval-viewer/generate_review.py +471 -0
- package/.jumpstart/skills/skill-creator/eval-viewer/viewer.html +1325 -0
- package/.jumpstart/skills/skill-creator/references/schemas.md +430 -0
- package/.jumpstart/skills/skill-creator/scripts/__init__.py +0 -0
- package/.jumpstart/skills/skill-creator/scripts/aggregate_benchmark.py +401 -0
- package/.jumpstart/skills/skill-creator/scripts/generate_report.py +326 -0
- package/.jumpstart/skills/skill-creator/scripts/improve_description.py +247 -0
- package/.jumpstart/skills/skill-creator/scripts/package_skill.py +136 -110
- package/.jumpstart/skills/skill-creator/scripts/run_eval.py +310 -0
- package/.jumpstart/skills/skill-creator/scripts/run_loop.py +328 -0
- package/.jumpstart/skills/skill-creator/scripts/utils.py +47 -0
- package/.jumpstart/state/timeline.json +659 -0
- package/.jumpstart/usage-log.json +74 -3
- package/README.md +62 -1
- package/bin/cli.js +3217 -1
- package/bin/headless-runner.js +62 -2
- package/bin/lib/agent-checkpoint.js +168 -0
- package/bin/lib/ai-evaluation.js +104 -0
- package/bin/lib/ai-intake.js +152 -0
- package/bin/lib/ambiguity-heatmap.js +152 -0
- package/bin/lib/artifact-comparison.js +104 -0
- package/bin/lib/ast-edit-engine.js +157 -0
- package/bin/lib/backlog-sync.js +338 -0
- package/bin/lib/bcdr-planning.js +158 -0
- package/bin/lib/bidirectional-trace.js +199 -0
- package/bin/lib/branch-workflow.js +266 -0
- package/bin/lib/cab-output.js +119 -0
- package/bin/lib/chat-integration.js +122 -0
- package/bin/lib/ci-cd-integration.js +208 -0
- package/bin/lib/codebase-retrieval.js +125 -0
- package/bin/lib/collaboration.js +168 -0
- package/bin/lib/compliance-packs.js +213 -0
- package/bin/lib/context-chunker.js +128 -0
- package/bin/lib/context-onboarding.js +122 -0
- package/bin/lib/contract-first.js +124 -0
- package/bin/lib/cost-router.js +148 -0
- package/bin/lib/credential-boundary.js +155 -0
- package/bin/lib/data-classification.js +180 -0
- package/bin/lib/data-contracts.js +129 -0
- package/bin/lib/db-evolution.js +158 -0
- package/bin/lib/decision-conflicts.js +299 -0
- package/bin/lib/delivery-confidence.js +361 -0
- package/bin/lib/dependency-upgrade.js +153 -0
- package/bin/lib/design-system.js +133 -0
- package/bin/lib/deterministic-artifacts.js +151 -0
- package/bin/lib/diagram-studio.js +115 -0
- package/bin/lib/domain-ontology.js +140 -0
- package/bin/lib/ea-review-packet.js +151 -0
- package/bin/lib/enterprise-search.js +123 -0
- package/bin/lib/enterprise-templates.js +140 -0
- package/bin/lib/environment-promotion.js +220 -0
- package/bin/lib/estimation-studio.js +130 -0
- package/bin/lib/event-modeling.js +133 -0
- package/bin/lib/evidence-collector.js +179 -0
- package/bin/lib/finops-planner.js +182 -0
- package/bin/lib/fitness-functions.js +279 -0
- package/bin/lib/focus.js +448 -0
- package/bin/lib/governance-dashboard.js +165 -0
- package/bin/lib/guided-handoff.js +120 -0
- package/bin/lib/impact-analysis.js +190 -0
- package/bin/lib/incident-feedback.js +157 -0
- package/bin/lib/integrate.js +1 -1
- package/bin/lib/knowledge-graph.js +122 -0
- package/bin/lib/legacy-modernizer.js +160 -0
- package/bin/lib/migration-planner.js +144 -0
- package/bin/lib/model-governance.js +185 -0
- package/bin/lib/model-router.js +144 -0
- package/bin/lib/multi-repo.js +272 -0
- package/bin/lib/next-phase.js +53 -8
- package/bin/lib/ops-ownership.js +152 -0
- package/bin/lib/parallel-agents.js +257 -0
- package/bin/lib/pattern-library.js +115 -0
- package/bin/lib/persona-packs.js +99 -0
- package/bin/lib/plan-executor.js +366 -0
- package/bin/lib/platform-engineering.js +119 -0
- package/bin/lib/playback-summaries.js +126 -0
- package/bin/lib/policy-engine.js +240 -0
- package/bin/lib/portfolio-reporting.js +357 -0
- package/bin/lib/pr-package.js +197 -0
- package/bin/lib/project-memory.js +235 -0
- package/bin/lib/prompt-governance.js +130 -0
- package/bin/lib/promptless-mode.js +128 -0
- package/bin/lib/quality-graph.js +193 -0
- package/bin/lib/raci-matrix.js +188 -0
- package/bin/lib/refactor-planner.js +167 -0
- package/bin/lib/reference-architectures.js +304 -0
- package/bin/lib/release-readiness.js +171 -0
- package/bin/lib/repo-graph.js +262 -0
- package/bin/lib/requirements-baseline.js +358 -0
- package/bin/lib/risk-register.js +211 -0
- package/bin/lib/role-approval.js +249 -0
- package/bin/lib/role-views.js +142 -0
- package/bin/lib/root-cause-analysis.js +132 -0
- package/bin/lib/runtime-debugger.js +154 -0
- package/bin/lib/safe-rename.js +135 -0
- package/bin/lib/semantic-diff.js +335 -0
- package/bin/lib/sla-slo.js +210 -0
- package/bin/lib/spec-comments.js +147 -0
- package/bin/lib/spec-maturity.js +287 -0
- package/bin/lib/sre-integration.js +154 -0
- package/bin/lib/structured-elicitation.js +174 -0
- package/bin/lib/telemetry-feedback.js +118 -0
- package/bin/lib/test-generator.js +146 -0
- package/bin/lib/timeline.js +2 -1
- package/bin/lib/tool-bridge.js +107 -0
- package/bin/lib/tool-guardrails.js +139 -0
- package/bin/lib/tool-schemas.js +172 -3
- package/bin/lib/transcript-ingestion.js +150 -0
- package/bin/lib/vendor-risk.js +173 -0
- package/bin/lib/waiver-workflow.js +174 -0
- package/bin/lib/web-dashboard.js +126 -0
- package/bin/lib/workshop-mode.js +165 -0
- package/bin/lib/workstream-ownership.js +104 -0
- package/package.json +1 -1
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* deterministic-artifacts.js — Deterministic Artifact Generation Mode (Item 56)
|
|
3
|
+
*
|
|
4
|
+
* Make outputs as stable as possible for governance and repeatability.
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* node bin/lib/deterministic-artifacts.js normalize|verify|diff [options]
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
'use strict';
|
|
11
|
+
|
|
12
|
+
const fs = require('fs');
|
|
13
|
+
const path = require('path');
|
|
14
|
+
const crypto = require('crypto');
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Normalize markdown content for deterministic comparison.
|
|
18
|
+
*
|
|
19
|
+
* @param {string} content - Markdown content.
|
|
20
|
+
* @returns {string}
|
|
21
|
+
*/
|
|
22
|
+
function normalizeMarkdown(content) {
|
|
23
|
+
return content
|
|
24
|
+
.replace(/\r\n/g, '\n') // Normalize line endings
|
|
25
|
+
.replace(/\t/g, ' ') // Tabs to spaces
|
|
26
|
+
.replace(/[ \t]+$/gm, '') // Trailing whitespace
|
|
27
|
+
.replace(/\n{3,}/g, '\n\n') // Max 2 consecutive newlines
|
|
28
|
+
.replace(/<!--.*?-->/gs, '') // Remove HTML comments
|
|
29
|
+
.replace(/\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[^\s]*/g, '[TIMESTAMP]') // Normalize timestamps
|
|
30
|
+
.replace(/\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/gi, '[UUID]') // Normalize UUIDs
|
|
31
|
+
.trim() + '\n';
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Generate a content hash for an artifact.
|
|
36
|
+
*
|
|
37
|
+
* @param {string} content
|
|
38
|
+
* @returns {string}
|
|
39
|
+
*/
|
|
40
|
+
function hashContent(content) {
|
|
41
|
+
const normalized = normalizeMarkdown(content);
|
|
42
|
+
return crypto.createHash('sha256').update(normalized).digest('hex').substring(0, 16);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Normalize a file for deterministic comparison.
|
|
47
|
+
*
|
|
48
|
+
* @param {string} filePath
|
|
49
|
+
* @param {object} [options]
|
|
50
|
+
* @returns {object}
|
|
51
|
+
*/
|
|
52
|
+
function normalizeFile(filePath, options = {}) {
|
|
53
|
+
if (!fs.existsSync(filePath)) {
|
|
54
|
+
return { success: false, error: `File not found: ${filePath}` };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
58
|
+
const normalized = normalizeMarkdown(content);
|
|
59
|
+
const hash = hashContent(content);
|
|
60
|
+
|
|
61
|
+
if (options.write) {
|
|
62
|
+
fs.writeFileSync(filePath, normalized, 'utf8');
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return {
|
|
66
|
+
success: true,
|
|
67
|
+
file: filePath,
|
|
68
|
+
original_length: content.length,
|
|
69
|
+
normalized_length: normalized.length,
|
|
70
|
+
hash,
|
|
71
|
+
modified: content !== normalized
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Verify artifact stability between versions.
|
|
77
|
+
*
|
|
78
|
+
* @param {string} file1
|
|
79
|
+
* @param {string} file2
|
|
80
|
+
* @returns {object}
|
|
81
|
+
*/
|
|
82
|
+
function verifyStability(file1, file2) {
|
|
83
|
+
if (!fs.existsSync(file1)) return { success: false, error: `File not found: ${file1}` };
|
|
84
|
+
if (!fs.existsSync(file2)) return { success: false, error: `File not found: ${file2}` };
|
|
85
|
+
|
|
86
|
+
const content1 = normalizeMarkdown(fs.readFileSync(file1, 'utf8'));
|
|
87
|
+
const content2 = normalizeMarkdown(fs.readFileSync(file2, 'utf8'));
|
|
88
|
+
const hash1 = hashContent(content1);
|
|
89
|
+
const hash2 = hashContent(content2);
|
|
90
|
+
|
|
91
|
+
const lines1 = content1.split('\n');
|
|
92
|
+
const lines2 = content2.split('\n');
|
|
93
|
+
let diffLines = 0;
|
|
94
|
+
|
|
95
|
+
const maxLines = Math.max(lines1.length, lines2.length);
|
|
96
|
+
for (let i = 0; i < maxLines; i++) {
|
|
97
|
+
if (lines1[i] !== lines2[i]) diffLines++;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const similarity = maxLines > 0 ? Math.round(((maxLines - diffLines) / maxLines) * 100) : 100;
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
success: true,
|
|
104
|
+
identical: hash1 === hash2,
|
|
105
|
+
similarity,
|
|
106
|
+
hash1,
|
|
107
|
+
hash2,
|
|
108
|
+
diff_lines: diffLines,
|
|
109
|
+
total_lines: maxLines
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Batch normalize all spec files.
|
|
115
|
+
*
|
|
116
|
+
* @param {string} root
|
|
117
|
+
* @param {object} [options]
|
|
118
|
+
* @returns {object}
|
|
119
|
+
*/
|
|
120
|
+
function normalizeSpecs(root, options = {}) {
|
|
121
|
+
const specsDir = path.join(root, 'specs');
|
|
122
|
+
if (!fs.existsSync(specsDir)) {
|
|
123
|
+
return { success: true, files: 0, message: 'No specs directory found' };
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
const results = [];
|
|
127
|
+
function walk(dir) {
|
|
128
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
129
|
+
if (entry.isDirectory()) walk(path.join(dir, entry.name));
|
|
130
|
+
else if (entry.isFile() && entry.name.endsWith('.md')) {
|
|
131
|
+
results.push(normalizeFile(path.join(dir, entry.name), options));
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
walk(specsDir);
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
success: true,
|
|
139
|
+
files: results.length,
|
|
140
|
+
modified: results.filter(r => r.modified).length,
|
|
141
|
+
results
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
module.exports = {
|
|
146
|
+
normalizeMarkdown,
|
|
147
|
+
hashContent,
|
|
148
|
+
normalizeFile,
|
|
149
|
+
verifyStability,
|
|
150
|
+
normalizeSpecs
|
|
151
|
+
};
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* diagram-studio.js — Diagram Studio (Item 70)
|
|
3
|
+
*
|
|
4
|
+
* Generate, validate, compare, and refine C4, BPMN, sequence,
|
|
5
|
+
* data flow, and deployment diagrams.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* node bin/lib/diagram-studio.js generate|validate|compare|list [options]
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
'use strict';
|
|
12
|
+
|
|
13
|
+
const fs = require('fs');
|
|
14
|
+
const path = require('path');
|
|
15
|
+
|
|
16
|
+
const DIAGRAM_TYPES = ['c4-context', 'c4-container', 'c4-component', 'sequence', 'data-flow', 'deployment', 'bpmn', 'erd'];
|
|
17
|
+
|
|
18
|
+
const DIAGRAM_TEMPLATES = {
|
|
19
|
+
'c4-context': '```mermaid\nC4Context\n title System Context Diagram\n Person(user, "User")\n System(system, "System")\n Rel(user, system, "Uses")\n```',
|
|
20
|
+
'c4-container': '```mermaid\nC4Container\n title Container Diagram\n Container(api, "API", "Node.js")\n ContainerDb(db, "Database", "PostgreSQL")\n Rel(api, db, "Reads/Writes")\n```',
|
|
21
|
+
'sequence': '```mermaid\nsequenceDiagram\n participant Client\n participant Server\n Client->>Server: Request\n Server-->>Client: Response\n```',
|
|
22
|
+
'data-flow': '```mermaid\nflowchart LR\n A[Input] --> B[Process]\n B --> C[Output]\n```',
|
|
23
|
+
'deployment': '```mermaid\nflowchart TB\n subgraph Cloud\n LB[Load Balancer]\n APP[App Server]\n DB[(Database)]\n end\n LB --> APP --> DB\n```'
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Generate a diagram template.
|
|
28
|
+
*/
|
|
29
|
+
function generateDiagram(type, options = {}) {
|
|
30
|
+
if (!DIAGRAM_TYPES.includes(type)) {
|
|
31
|
+
return { success: false, error: `Unknown type: ${type}. Valid: ${DIAGRAM_TYPES.join(', ')}` };
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const template = DIAGRAM_TEMPLATES[type] || `\`\`\`mermaid\nflowchart LR\n A[${type}] --> B[TODO]\n\`\`\``;
|
|
35
|
+
|
|
36
|
+
return {
|
|
37
|
+
success: true,
|
|
38
|
+
type,
|
|
39
|
+
content: template,
|
|
40
|
+
editable: true,
|
|
41
|
+
generated_at: new Date().toISOString()
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Validate a mermaid diagram string.
|
|
47
|
+
*/
|
|
48
|
+
function validateDiagram(content, options = {}) {
|
|
49
|
+
if (!content) return { success: false, error: 'Diagram content is required' };
|
|
50
|
+
|
|
51
|
+
const issues = [];
|
|
52
|
+
|
|
53
|
+
// Check for mermaid code block
|
|
54
|
+
const hasFence = content.includes('```mermaid');
|
|
55
|
+
if (!hasFence && !content.match(/^(graph|flowchart|sequenceDiagram|classDiagram|C4)/m)) {
|
|
56
|
+
issues.push({ type: 'syntax', message: 'No recognized Mermaid diagram type found' });
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Check for balanced brackets
|
|
60
|
+
const open = (content.match(/[\[{(]/g) || []).length;
|
|
61
|
+
const close = (content.match(/[\]})]/g) || []).length;
|
|
62
|
+
if (open !== close) {
|
|
63
|
+
issues.push({ type: 'syntax', message: `Unbalanced brackets: ${open} open, ${close} close` });
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Check for empty nodes
|
|
67
|
+
if (content.match(/\[\s*\]/)) {
|
|
68
|
+
issues.push({ type: 'warning', message: 'Empty node labels detected' });
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return {
|
|
72
|
+
success: true,
|
|
73
|
+
valid: issues.length === 0,
|
|
74
|
+
issues,
|
|
75
|
+
diagram_type: hasFence ? 'mermaid-fenced' : 'mermaid-raw'
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Compare two diagrams.
|
|
81
|
+
*/
|
|
82
|
+
function compareDiagrams(diagramA, diagramB, options = {}) {
|
|
83
|
+
if (!diagramA || !diagramB) return { success: false, error: 'Both diagrams are required' };
|
|
84
|
+
|
|
85
|
+
const nodesA = new Set((diagramA.match(/\w+[\[({]/g) || []).map(n => n.slice(0, -1)));
|
|
86
|
+
const nodesB = new Set((diagramB.match(/\w+[\[({]/g) || []).map(n => n.slice(0, -1)));
|
|
87
|
+
|
|
88
|
+
const added = [...nodesB].filter(n => !nodesA.has(n));
|
|
89
|
+
const removed = [...nodesA].filter(n => !nodesB.has(n));
|
|
90
|
+
const unchanged = [...nodesA].filter(n => nodesB.has(n));
|
|
91
|
+
|
|
92
|
+
return {
|
|
93
|
+
success: true,
|
|
94
|
+
added,
|
|
95
|
+
removed,
|
|
96
|
+
unchanged,
|
|
97
|
+
has_changes: added.length > 0 || removed.length > 0
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* List available diagram types.
|
|
103
|
+
*/
|
|
104
|
+
function listDiagramTypes() {
|
|
105
|
+
return {
|
|
106
|
+
success: true,
|
|
107
|
+
types: DIAGRAM_TYPES,
|
|
108
|
+
templates_available: Object.keys(DIAGRAM_TEMPLATES)
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
module.exports = {
|
|
113
|
+
generateDiagram, validateDiagram, compareDiagrams, listDiagramTypes,
|
|
114
|
+
DIAGRAM_TYPES, DIAGRAM_TEMPLATES
|
|
115
|
+
};
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* domain-ontology.js — Domain Ontology Support (Item 83)
|
|
3
|
+
*
|
|
4
|
+
* Canonical terms, entities, events, and constraints per business domain.
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* node bin/lib/domain-ontology.js define|query|validate|report [options]
|
|
8
|
+
*
|
|
9
|
+
* State file: .jumpstart/state/domain-ontology.json
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
'use strict';
|
|
13
|
+
|
|
14
|
+
const fs = require('fs');
|
|
15
|
+
const path = require('path');
|
|
16
|
+
|
|
17
|
+
const DEFAULT_STATE_FILE = path.join('.jumpstart', 'state', 'domain-ontology.json');
|
|
18
|
+
|
|
19
|
+
const ELEMENT_TYPES = ['entity', 'event', 'command', 'value-object', 'aggregate', 'constraint'];
|
|
20
|
+
|
|
21
|
+
function defaultState() {
|
|
22
|
+
return { version: '1.0.0', domains: {}, last_updated: null };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function loadState(stateFile) {
|
|
26
|
+
const fp = stateFile || DEFAULT_STATE_FILE;
|
|
27
|
+
if (!fs.existsSync(fp)) return defaultState();
|
|
28
|
+
try { return JSON.parse(fs.readFileSync(fp, 'utf8')); }
|
|
29
|
+
catch { return defaultState(); }
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function saveState(state, stateFile) {
|
|
33
|
+
const fp = stateFile || DEFAULT_STATE_FILE;
|
|
34
|
+
const dir = path.dirname(fp);
|
|
35
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
36
|
+
state.last_updated = new Date().toISOString();
|
|
37
|
+
fs.writeFileSync(fp, JSON.stringify(state, null, 2) + '\n', 'utf8');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function defineElement(domain, name, type, options = {}) {
|
|
41
|
+
if (!domain || !name || !type) return { success: false, error: 'domain, name, and type are required' };
|
|
42
|
+
if (!ELEMENT_TYPES.includes(type)) {
|
|
43
|
+
return { success: false, error: `Unknown type: ${type}. Valid: ${ELEMENT_TYPES.join(', ')}` };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const stateFile = options.stateFile || DEFAULT_STATE_FILE;
|
|
47
|
+
const state = loadState(stateFile);
|
|
48
|
+
|
|
49
|
+
if (!state.domains[domain]) state.domains[domain] = { elements: [], relationships: [] };
|
|
50
|
+
|
|
51
|
+
const element = {
|
|
52
|
+
id: `ONT-${Date.now()}`,
|
|
53
|
+
name,
|
|
54
|
+
type,
|
|
55
|
+
description: options.description || '',
|
|
56
|
+
properties: options.properties || [],
|
|
57
|
+
constraints: options.constraints || [],
|
|
58
|
+
created_at: new Date().toISOString()
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
state.domains[domain].elements.push(element);
|
|
62
|
+
saveState(state, stateFile);
|
|
63
|
+
|
|
64
|
+
return { success: true, element };
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function queryOntology(domain, options = {}) {
|
|
68
|
+
const stateFile = options.stateFile || DEFAULT_STATE_FILE;
|
|
69
|
+
const state = loadState(stateFile);
|
|
70
|
+
|
|
71
|
+
if (!state.domains[domain]) return { success: true, domain, elements: [], total: 0 };
|
|
72
|
+
|
|
73
|
+
let elements = state.domains[domain].elements;
|
|
74
|
+
if (options.type) elements = elements.filter(e => e.type === options.type);
|
|
75
|
+
|
|
76
|
+
return { success: true, domain, elements, total: elements.length };
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function validateTermUsage(domain, text, options = {}) {
|
|
80
|
+
if (!domain || !text) return { success: false, error: 'domain and text are required' };
|
|
81
|
+
|
|
82
|
+
const stateFile = options.stateFile || DEFAULT_STATE_FILE;
|
|
83
|
+
const state = loadState(stateFile);
|
|
84
|
+
|
|
85
|
+
const domainData = state.domains[domain];
|
|
86
|
+
if (!domainData) return { success: true, domain, issues: [], canonical_terms: 0 };
|
|
87
|
+
|
|
88
|
+
const canonicalNames = domainData.elements.map(e => e.name.toLowerCase());
|
|
89
|
+
const issues = [];
|
|
90
|
+
|
|
91
|
+
// Check for near-misses
|
|
92
|
+
const words = text.toLowerCase().split(/\W+/);
|
|
93
|
+
for (const name of canonicalNames) {
|
|
94
|
+
const nameWords = name.split(/\s+/);
|
|
95
|
+
for (const nw of nameWords) {
|
|
96
|
+
if (nw.length > 3 && words.some(w => w !== nw && levenshtein(w, nw) <= 2 && levenshtein(w, nw) > 0)) {
|
|
97
|
+
issues.push({ type: 'possible_typo', canonical: name, severity: 'warning' });
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
success: true,
|
|
104
|
+
domain,
|
|
105
|
+
issues,
|
|
106
|
+
canonical_terms: canonicalNames.length
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function levenshtein(a, b) {
|
|
111
|
+
const m = a.length, n = b.length;
|
|
112
|
+
const dp = Array.from({ length: m + 1 }, (_, i) => Array.from({ length: n + 1 }, (_, j) => i === 0 ? j : j === 0 ? i : 0));
|
|
113
|
+
for (let i = 1; i <= m; i++)
|
|
114
|
+
for (let j = 1; j <= n; j++)
|
|
115
|
+
dp[i][j] = a[i-1] === b[j-1] ? dp[i-1][j-1] : 1 + Math.min(dp[i-1][j], dp[i][j-1], dp[i-1][j-1]);
|
|
116
|
+
return dp[m][n];
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function generateReport(options = {}) {
|
|
120
|
+
const stateFile = options.stateFile || DEFAULT_STATE_FILE;
|
|
121
|
+
const state = loadState(stateFile);
|
|
122
|
+
|
|
123
|
+
const domains = Object.keys(state.domains);
|
|
124
|
+
const report = { success: true, total_domains: domains.length, domains: {} };
|
|
125
|
+
|
|
126
|
+
for (const d of domains) {
|
|
127
|
+
const elements = state.domains[d].elements;
|
|
128
|
+
const byType = {};
|
|
129
|
+
for (const e of elements) byType[e.type] = (byType[e.type] || 0) + 1;
|
|
130
|
+
report.domains[d] = { total_elements: elements.length, by_type: byType };
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return report;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
module.exports = {
|
|
137
|
+
defineElement, queryOntology, validateTermUsage, generateReport,
|
|
138
|
+
loadState, saveState, defaultState,
|
|
139
|
+
ELEMENT_TYPES
|
|
140
|
+
};
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ea-review-packet.js — Enterprise Architecture Review Packet (Item 32)
|
|
3
|
+
*
|
|
4
|
+
* Auto-produce diagrams, decision summaries, standards alignment,
|
|
5
|
+
* and exception lists.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* node bin/lib/ea-review-packet.js generate|status [options]
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
'use strict';
|
|
12
|
+
|
|
13
|
+
const fs = require('fs');
|
|
14
|
+
const path = require('path');
|
|
15
|
+
|
|
16
|
+
const PACKET_SECTIONS = ['architecture-overview', 'decision-summary', 'standards-alignment',
|
|
17
|
+
'exception-list', 'risk-assessment', 'diagrams', 'compliance-status'];
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Generate an EA review packet from project artifacts.
|
|
21
|
+
*
|
|
22
|
+
* @param {string} root - Project root.
|
|
23
|
+
* @param {object} [options]
|
|
24
|
+
* @returns {object}
|
|
25
|
+
*/
|
|
26
|
+
function generatePacket(root, options = {}) {
|
|
27
|
+
const packet = {
|
|
28
|
+
id: `EA-${Date.now()}`,
|
|
29
|
+
generated_at: new Date().toISOString(),
|
|
30
|
+
project_root: root,
|
|
31
|
+
sections: {}
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
// Architecture overview
|
|
35
|
+
const archFile = path.join(root, 'specs', 'architecture.md');
|
|
36
|
+
if (fs.existsSync(archFile)) {
|
|
37
|
+
try {
|
|
38
|
+
const content = fs.readFileSync(archFile, 'utf8');
|
|
39
|
+
const sections = content.match(/^##\s+.+$/gm) || [];
|
|
40
|
+
packet.sections['architecture-overview'] = {
|
|
41
|
+
present: true,
|
|
42
|
+
sections: sections.map(s => s.replace(/^##\s+/, '')),
|
|
43
|
+
word_count: content.split(/\s+/).length
|
|
44
|
+
};
|
|
45
|
+
} catch { packet.sections['architecture-overview'] = { present: false }; }
|
|
46
|
+
} else {
|
|
47
|
+
packet.sections['architecture-overview'] = { present: false };
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Decision summary (ADRs)
|
|
51
|
+
const decisionsDir = path.join(root, 'specs', 'decisions');
|
|
52
|
+
if (fs.existsSync(decisionsDir)) {
|
|
53
|
+
const adrs = fs.readdirSync(decisionsDir).filter(f => f.endsWith('.md'));
|
|
54
|
+
packet.sections['decision-summary'] = {
|
|
55
|
+
present: adrs.length > 0,
|
|
56
|
+
total_adrs: adrs.length,
|
|
57
|
+
adrs: adrs.map(f => f.replace('.md', ''))
|
|
58
|
+
};
|
|
59
|
+
} else {
|
|
60
|
+
packet.sections['decision-summary'] = { present: false, total_adrs: 0, adrs: [] };
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Standards alignment
|
|
64
|
+
const policyFile = path.join(root, '.jumpstart', 'policies.json');
|
|
65
|
+
if (fs.existsSync(policyFile)) {
|
|
66
|
+
try {
|
|
67
|
+
const policies = JSON.parse(fs.readFileSync(policyFile, 'utf8'));
|
|
68
|
+
packet.sections['standards-alignment'] = {
|
|
69
|
+
present: true,
|
|
70
|
+
total_policies: policies.policies ? policies.policies.length : 0
|
|
71
|
+
};
|
|
72
|
+
} catch { packet.sections['standards-alignment'] = { present: false }; }
|
|
73
|
+
} else {
|
|
74
|
+
packet.sections['standards-alignment'] = { present: false };
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Exception list (waivers)
|
|
78
|
+
const waiverFile = path.join(root, '.jumpstart', 'state', 'waivers.json');
|
|
79
|
+
if (fs.existsSync(waiverFile)) {
|
|
80
|
+
try {
|
|
81
|
+
const waivers = JSON.parse(fs.readFileSync(waiverFile, 'utf8'));
|
|
82
|
+
const active = (waivers.waivers || []).filter(w => w.status === 'approved');
|
|
83
|
+
packet.sections['exception-list'] = {
|
|
84
|
+
present: true,
|
|
85
|
+
total_exceptions: active.length,
|
|
86
|
+
exceptions: active.map(w => ({ id: w.id, title: w.title, expires_at: w.expires_at }))
|
|
87
|
+
};
|
|
88
|
+
} catch { packet.sections['exception-list'] = { present: false }; }
|
|
89
|
+
} else {
|
|
90
|
+
packet.sections['exception-list'] = { present: false, total_exceptions: 0 };
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Risk assessment
|
|
94
|
+
const riskFile = path.join(root, '.jumpstart', 'state', 'risk-register.json');
|
|
95
|
+
if (fs.existsSync(riskFile)) {
|
|
96
|
+
try {
|
|
97
|
+
const risks = JSON.parse(fs.readFileSync(riskFile, 'utf8'));
|
|
98
|
+
const highRisks = (risks.risks || []).filter(r => r.score >= 15);
|
|
99
|
+
packet.sections['risk-assessment'] = {
|
|
100
|
+
present: true,
|
|
101
|
+
total_risks: (risks.risks || []).length,
|
|
102
|
+
high_risks: highRisks.length
|
|
103
|
+
};
|
|
104
|
+
} catch { packet.sections['risk-assessment'] = { present: false }; }
|
|
105
|
+
} else {
|
|
106
|
+
packet.sections['risk-assessment'] = { present: false };
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Diagrams
|
|
110
|
+
if (fs.existsSync(archFile)) {
|
|
111
|
+
try {
|
|
112
|
+
const content = fs.readFileSync(archFile, 'utf8');
|
|
113
|
+
const mermaidBlocks = (content.match(/```mermaid/g) || []).length;
|
|
114
|
+
packet.sections.diagrams = { present: mermaidBlocks > 0, count: mermaidBlocks };
|
|
115
|
+
} catch { packet.sections.diagrams = { present: false, count: 0 }; }
|
|
116
|
+
} else {
|
|
117
|
+
packet.sections.diagrams = { present: false, count: 0 };
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Compliance status
|
|
121
|
+
const complianceFile = path.join(root, '.jumpstart', 'state', 'compliance.json');
|
|
122
|
+
if (fs.existsSync(complianceFile)) {
|
|
123
|
+
try {
|
|
124
|
+
const compliance = JSON.parse(fs.readFileSync(complianceFile, 'utf8'));
|
|
125
|
+
packet.sections['compliance-status'] = {
|
|
126
|
+
present: true,
|
|
127
|
+
frameworks: compliance.applied_frameworks || []
|
|
128
|
+
};
|
|
129
|
+
} catch { packet.sections['compliance-status'] = { present: false }; }
|
|
130
|
+
} else {
|
|
131
|
+
packet.sections['compliance-status'] = { present: false };
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
const presentSections = Object.values(packet.sections).filter(s => s.present).length;
|
|
135
|
+
const completeness = Math.round((presentSections / PACKET_SECTIONS.length) * 100);
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
success: true,
|
|
139
|
+
packet_id: packet.id,
|
|
140
|
+
completeness,
|
|
141
|
+
sections_present: presentSections,
|
|
142
|
+
sections_total: PACKET_SECTIONS.length,
|
|
143
|
+
sections: packet.sections,
|
|
144
|
+
gaps: PACKET_SECTIONS.filter(s => !packet.sections[s] || !packet.sections[s].present)
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
module.exports = {
|
|
149
|
+
generatePacket,
|
|
150
|
+
PACKET_SECTIONS
|
|
151
|
+
};
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* enterprise-search.js — Enterprise Search Over Artifacts (Item 96)
|
|
3
|
+
*
|
|
4
|
+
* Ask questions across specs, code, ADRs, incidents, and release records.
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* node bin/lib/enterprise-search.js index|search|report [options]
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
'use strict';
|
|
11
|
+
|
|
12
|
+
const fs = require('fs');
|
|
13
|
+
const path = require('path');
|
|
14
|
+
|
|
15
|
+
const SEARCHABLE_TYPES = ['spec', 'code', 'adr', 'incident', 'release', 'config'];
|
|
16
|
+
|
|
17
|
+
function indexProject(root, options = {}) {
|
|
18
|
+
const index = {
|
|
19
|
+
root,
|
|
20
|
+
indexed_at: new Date().toISOString(),
|
|
21
|
+
entries: []
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
// Index specs
|
|
25
|
+
const specsDir = path.join(root, 'specs');
|
|
26
|
+
if (fs.existsSync(specsDir)) {
|
|
27
|
+
indexDirectory(specsDir, 'spec', root, index.entries);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Index decisions
|
|
31
|
+
const decisionsDir = path.join(root, 'specs', 'decisions');
|
|
32
|
+
if (fs.existsSync(decisionsDir)) {
|
|
33
|
+
indexDirectory(decisionsDir, 'adr', root, index.entries);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Index source
|
|
37
|
+
const srcDir = path.join(root, 'src');
|
|
38
|
+
if (fs.existsSync(srcDir)) {
|
|
39
|
+
indexDirectory(srcDir, 'code', root, index.entries, ['.js', '.ts', '.py', '.java', '.go']);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Index config
|
|
43
|
+
const configFile = path.join(root, '.jumpstart', 'config.yaml');
|
|
44
|
+
if (fs.existsSync(configFile)) {
|
|
45
|
+
index.entries.push({
|
|
46
|
+
type: 'config',
|
|
47
|
+
path: '.jumpstart/config.yaml',
|
|
48
|
+
size: fs.statSync(configFile).size
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return { success: true, total_entries: index.entries.length, index };
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function indexDirectory(dir, type, root, entries, extensions) {
|
|
56
|
+
if (!fs.existsSync(dir)) return;
|
|
57
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
58
|
+
if (entry.isFile()) {
|
|
59
|
+
const ext = path.extname(entry.name).toLowerCase();
|
|
60
|
+
if (extensions && !extensions.includes(ext)) continue;
|
|
61
|
+
if (entry.name.startsWith('.')) continue;
|
|
62
|
+
const fp = path.join(dir, entry.name);
|
|
63
|
+
const relPath = path.relative(root, fp).replace(/\\/g, '/');
|
|
64
|
+
entries.push({ type, path: relPath, name: entry.name, size: fs.statSync(fp).size });
|
|
65
|
+
} else if (entry.isDirectory() && !['node_modules', '.git', 'dist'].includes(entry.name)) {
|
|
66
|
+
indexDirectory(path.join(dir, entry.name), type, root, entries, extensions);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function searchProject(root, query, options = {}) {
|
|
72
|
+
if (!query) return { success: false, error: 'Search query is required' };
|
|
73
|
+
|
|
74
|
+
const q = query.toLowerCase();
|
|
75
|
+
const results = [];
|
|
76
|
+
const searchDirs = [
|
|
77
|
+
{ dir: path.join(root, 'specs'), type: 'spec' },
|
|
78
|
+
{ dir: path.join(root, 'specs', 'decisions'), type: 'adr' },
|
|
79
|
+
{ dir: path.join(root, 'src'), type: 'code' }
|
|
80
|
+
];
|
|
81
|
+
|
|
82
|
+
for (const { dir, type } of searchDirs) {
|
|
83
|
+
if (!fs.existsSync(dir)) continue;
|
|
84
|
+
searchInDirectory(dir, q, type, root, results, options.maxResults || 20);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return {
|
|
88
|
+
success: true,
|
|
89
|
+
query,
|
|
90
|
+
total_results: results.length,
|
|
91
|
+
results: results.slice(0, options.maxResults || 20)
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
function searchInDirectory(dir, query, type, root, results, maxResults) {
|
|
96
|
+
if (!fs.existsSync(dir) || results.length >= maxResults) return;
|
|
97
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
98
|
+
if (results.length >= maxResults) return;
|
|
99
|
+
if (entry.isFile() && !entry.name.startsWith('.')) {
|
|
100
|
+
try {
|
|
101
|
+
const fp = path.join(dir, entry.name);
|
|
102
|
+
const content = fs.readFileSync(fp, 'utf8');
|
|
103
|
+
if (content.toLowerCase().includes(query)) {
|
|
104
|
+
const relPath = path.relative(root, fp).replace(/\\/g, '/');
|
|
105
|
+
const lines = content.split('\n');
|
|
106
|
+
const matchingLines = lines
|
|
107
|
+
.map((l, i) => ({ line: i + 1, text: l.trim() }))
|
|
108
|
+
.filter(l => l.text.toLowerCase().includes(query))
|
|
109
|
+
.slice(0, 3);
|
|
110
|
+
|
|
111
|
+
results.push({ type, path: relPath, matches: matchingLines.length, preview: matchingLines });
|
|
112
|
+
}
|
|
113
|
+
} catch { /* skip binary/unreadable */ }
|
|
114
|
+
} else if (entry.isDirectory() && !['node_modules', '.git', 'dist'].includes(entry.name)) {
|
|
115
|
+
searchInDirectory(path.join(dir, entry.name), query, type, root, results, maxResults);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
module.exports = {
|
|
121
|
+
indexProject, searchProject,
|
|
122
|
+
SEARCHABLE_TYPES
|
|
123
|
+
};
|