iikit-dashboard 1.7.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "iikit-dashboard",
3
- "version": "1.7.0",
3
+ "version": "2.0.0",
4
4
  "description": "Real-time dashboard for Intent Integrity Kit (IIKit) — visualizes every phase of specification-driven AI development",
5
5
  "main": "src/generate-dashboard.js",
6
6
  "bin": {
@@ -11,7 +11,7 @@ const { computePipelineState } = require('./pipeline');
11
11
  const { computeStoryMapState } = require('./storymap');
12
12
  const { computePlanViewState } = require('./planview');
13
13
  const { computeChecklistViewState } = require('./checklist');
14
- const { computeTestifyState } = require('./testify');
14
+ const { computeTestifyState, getFeatureFiles } = require('./testify');
15
15
  const { computeAnalyzeState } = require('./analyze');
16
16
  const { computeBugsState } = require('./bugs');
17
17
 
@@ -62,7 +62,6 @@ function getBoardState(projectPath, featureId) {
62
62
  const featureDir = path.join(projectPath, 'specs', featureId);
63
63
  const specPath = path.join(featureDir, 'spec.md');
64
64
  const tasksPath = path.join(featureDir, 'tasks.md');
65
- const testSpecsPath = path.join(featureDir, 'tests', 'test-specs.md');
66
65
  const contextPath = path.join(featureDir, 'context.json');
67
66
 
68
67
  const specContent = fs.existsSync(specPath) ? fs.readFileSync(specPath, 'utf-8') : '';
@@ -73,9 +72,10 @@ function getBoardState(projectPath, featureId) {
73
72
  const board = computeBoardState(stories, tasks);
74
73
 
75
74
  let integrity = { status: 'missing', currentHash: null, storedHash: null };
76
- if (fs.existsSync(testSpecsPath)) {
77
- const testSpecsContent = fs.readFileSync(testSpecsPath, 'utf-8');
78
- const currentHash = computeAssertionHash(testSpecsContent);
75
+ const featureFiles = getFeatureFiles(featureDir);
76
+ if (featureFiles.length > 0) {
77
+ const allFeatureContent = featureFiles.map(f => fs.readFileSync(f, 'utf-8')).join('\n');
78
+ const currentHash = computeAssertionHash(allFeatureContent);
79
79
  let storedHash = null;
80
80
  if (fs.existsSync(contextPath)) {
81
81
  try {
@@ -260,6 +260,7 @@ async function main() {
260
260
 
261
261
  const watchGlobs = [
262
262
  path.join(projectPath, 'specs', '**', '*.md'),
263
+ path.join(projectPath, 'specs', '**', '*.feature'),
263
264
  path.join(projectPath, 'CONSTITUTION.md'),
264
265
  path.join(projectPath, 'PREMISE.md')
265
266
  ];
package/src/integrity.js CHANGED
@@ -3,10 +3,13 @@
3
3
  const crypto = require('crypto');
4
4
 
5
5
  /**
6
- * Extract Given/When/Then lines from test-specs.md,
7
- * normalize whitespace, sort, and compute SHA256 hash.
6
+ * Extract Gherkin step lines (Given/When/Then/And/But) from .feature content,
7
+ * normalize whitespace, and compute SHA256 hash.
8
8
  *
9
- * @param {string} content - Raw content of test-specs.md
9
+ * Order is preserved (deterministic ordering comes from sorted filenames — caller
10
+ * concatenates all .feature file contents sorted by filename before calling).
11
+ *
12
+ * @param {string} content - Concatenated content of .feature files
10
13
  * @returns {string|null} SHA256 hex hash, or null if no assertions found
11
14
  */
12
15
  function computeAssertionHash(content) {
@@ -16,23 +19,15 @@ function computeAssertionHash(content) {
16
19
  const assertionLines = [];
17
20
 
18
21
  for (const line of lines) {
19
- const trimmed = line.trim();
20
- if (
21
- trimmed.startsWith('**Given**:') ||
22
- trimmed.startsWith('**When**:') ||
23
- trimmed.startsWith('**Then**:')
24
- ) {
22
+ if (/^\s*(Given|When|Then|And|But) /.test(line)) {
25
23
  // Normalize whitespace: collapse multiple spaces to single space
26
- const normalized = trimmed.replace(/\s+/g, ' ').trim();
24
+ const normalized = line.replace(/\s+/g, ' ').trim();
27
25
  assertionLines.push(normalized);
28
26
  }
29
27
  }
30
28
 
31
29
  if (assertionLines.length === 0) return null;
32
30
 
33
- // Sort for deterministic ordering
34
- assertionLines.sort();
35
-
36
31
  const joined = assertionLines.join('\n');
37
32
  return crypto.createHash('sha256').update(joined, 'utf8').digest('hex');
38
33
  }
@@ -40,7 +35,7 @@ function computeAssertionHash(content) {
40
35
  /**
41
36
  * Compare current assertion hash against stored hash.
42
37
  *
43
- * @param {string|null} currentHash - Hash computed from current test-specs.md
38
+ * @param {string|null} currentHash - Hash computed from current .feature files
44
39
  * @param {string|null} storedHash - Hash from context.json
45
40
  * @returns {{status: string, currentHash: string|null, storedHash: string|null}}
46
41
  */
package/src/parser.js CHANGED
@@ -881,58 +881,64 @@ function parseResearchDecisions(content) {
881
881
  }
882
882
 
883
883
  /**
884
- * Parse tests/test-specs.md to extract test specification entries.
885
- * Pattern: ### TS-XXX: Title, then **Type**: value, **Priority**: value, **Traceability**: refs
884
+ * Parse Gherkin .feature file content to extract test specification entries.
885
+ * Collects @tags before Scenario:/Scenario Outline: lines.
886
+ * Tags: @TS-XXX (id), @P1/@P2/@P3 (priority), @acceptance/@contract/@validation (type),
887
+ * @FR-XXX/@SC-XXX (traceability — @US-XXX filtered out).
886
888
  *
887
- * @param {string} content - Raw markdown content of test-specs.md
889
+ * @param {string} content - Raw content of one or more .feature files
888
890
  * @returns {Array<{id: string, title: string, type: string, priority: string, traceability: string[]}>}
889
891
  */
890
892
  function parseTestSpecs(content) {
891
893
  if (!content || typeof content !== 'string') return [];
892
894
 
893
895
  const specs = [];
894
- const headingRegex = /### TS-(\d+): (.+)/g;
895
- const headingStarts = [];
896
- let match;
896
+ const lines = content.split('\n');
897
+ let pendingTags = [];
897
898
 
898
- while ((match = headingRegex.exec(content)) !== null) {
899
- headingStarts.push({
900
- id: `TS-${match[1]}`,
901
- title: match[2].trim(),
902
- index: match.index
903
- });
904
- }
899
+ for (const line of lines) {
900
+ const trimmed = line.trim();
905
901
 
906
- for (let i = 0; i < headingStarts.length; i++) {
907
- const start = headingStarts[i].index;
908
- const end = i + 1 < headingStarts.length ? headingStarts[i + 1].index : content.length;
909
- const section = content.substring(start, end);
902
+ // Collect tag lines (may have multiple tags per line)
903
+ if (trimmed.startsWith('@')) {
904
+ const tags = trimmed.match(/@[\w-]+/g) || [];
905
+ pendingTags.push(...tags);
906
+ continue;
907
+ }
908
+
909
+ // Match Scenario or Scenario Outline
910
+ const scenarioMatch = trimmed.match(/^Scenario(?: Outline)?:\s*(.+)/);
911
+ if (scenarioMatch && pendingTags.length > 0) {
912
+ const title = scenarioMatch[1].trim();
913
+
914
+ // Extract id from @TS-XXX
915
+ const idTag = pendingTags.find(t => /^@TS-\d+$/.test(t));
916
+ const id = idTag ? idTag.slice(1) : null;
917
+ if (!id) { pendingTags = []; continue; }
918
+
919
+ // Extract type from @acceptance/@contract/@validation
920
+ const typeTag = pendingTags.find(t => /^@(acceptance|contract|validation)$/.test(t));
921
+ const type = typeTag ? typeTag.slice(1) : 'validation';
922
+
923
+ // Extract priority from @P1/@P2/@P3
924
+ const priorityTag = pendingTags.find(t => /^@P\d+$/.test(t));
925
+ const priority = priorityTag ? priorityTag.slice(1) : 'P3';
910
926
 
911
- // Extract type
912
- const typeMatch = section.match(/\*\*Type\*\*:\s*(acceptance|contract|validation)/);
913
- const type = typeMatch ? typeMatch[1] : 'validation';
914
-
915
- // Extract priority
916
- const priorityMatch = section.match(/\*\*Priority\*\*:\s*(P\d+)/);
917
- const priority = priorityMatch ? priorityMatch[1] : 'P3';
918
-
919
- // Extract traceability — comma-separated IDs, filter to FR-/SC- only
920
- let traceability = [];
921
- const traceMatch = section.match(/\*\*Traceability\*\*:\s*(.+)/);
922
- if (traceMatch) {
923
- traceability = traceMatch[1]
924
- .split(/,\s*/)
925
- .map(s => s.trim())
926
- .filter(s => /^(FR|SC)-\d+$/.test(s));
927
+ // Extract traceability from @FR-XXX/@SC-XXX (filter out @US-XXX)
928
+ const traceability = pendingTags
929
+ .filter(t => /^@(FR|SC)-\d+$/.test(t))
930
+ .map(t => t.slice(1));
931
+
932
+ specs.push({ id, title, type, priority, traceability });
933
+ pendingTags = [];
934
+ continue;
927
935
  }
928
936
 
929
- specs.push({
930
- id: headingStarts[i].id,
931
- title: headingStarts[i].title,
932
- type,
933
- priority,
934
- traceability
935
- });
937
+ // Skip Background:, Rule:, Feature:, Examples: — just reset tags on non-tag, non-scenario lines
938
+ if (trimmed.startsWith('Feature:') || trimmed.startsWith('Background:') ||
939
+ trimmed.startsWith('Rule:') || trimmed.startsWith('Examples:')) {
940
+ pendingTags = [];
941
+ }
936
942
  }
937
943
 
938
944
  return specs;
package/src/pipeline.js CHANGED
@@ -3,6 +3,7 @@
3
3
  const fs = require('fs');
4
4
  const path = require('path');
5
5
  const { parseTasks, parseChecklists, parseConstitutionTDD, hasClarifications } = require('./parser');
6
+ const { getFeatureFiles } = require('./testify');
6
7
 
7
8
  /**
8
9
  * Compute pipeline phase states for a feature by examining artifacts on disk.
@@ -17,7 +18,6 @@ function computePipelineState(projectPath, featureId) {
17
18
  const specPath = path.join(featureDir, 'spec.md');
18
19
  const planPath = path.join(featureDir, 'plan.md');
19
20
  const checklistDir = path.join(featureDir, 'checklists');
20
- const testSpecsPath = path.join(featureDir, 'tests', 'test-specs.md');
21
21
  const tasksPath = path.join(featureDir, 'tasks.md');
22
22
 
23
23
  const analysisPath = path.join(featureDir, 'analysis.md');
@@ -25,7 +25,7 @@ function computePipelineState(projectPath, featureId) {
25
25
  const specExists = fs.existsSync(specPath);
26
26
  const planExists = fs.existsSync(planPath);
27
27
  const tasksExists = fs.existsSync(tasksPath);
28
- const testSpecsExists = fs.existsSync(testSpecsPath);
28
+ const testSpecsExists = getFeatureFiles(featureDir).length > 0;
29
29
  const constitutionExists = fs.existsSync(constitutionPath);
30
30
  const premiseExists = fs.existsSync(path.join(projectPath, 'PREMISE.md'));
31
31
  const analysisExists = fs.existsSync(analysisPath);
@@ -5331,7 +5331,7 @@
5331
5331
  case 'missing':
5332
5332
  textEl.textContent = 'Missing';
5333
5333
  badge.setAttribute('aria-label', 'Test integrity: no hash data available');
5334
- badge.title = 'No test-specs.md or context.json found';
5334
+ badge.title = 'No test specifications or context.json found';
5335
5335
  break;
5336
5336
  }
5337
5337
  }
package/src/testify.js CHANGED
@@ -5,6 +5,21 @@ const fs = require('fs');
5
5
  const { parseRequirements, parseSuccessCriteria, parseTestSpecs, parseTasks, parseTaskTestRefs } = require('./parser');
6
6
  const { computeAssertionHash, checkIntegrity } = require('./integrity');
7
7
 
8
+ /**
9
+ * Get sorted list of .feature file paths in a feature's tests/features/ directory.
10
+ *
11
+ * @param {string} featureDir - Path to the feature directory (e.g., specs/001-auth)
12
+ * @returns {string[]} Sorted absolute paths to .feature files
13
+ */
14
+ function getFeatureFiles(featureDir) {
15
+ const featuresDir = path.join(featureDir, 'tests', 'features');
16
+ if (!fs.existsSync(featuresDir)) return [];
17
+ return fs.readdirSync(featuresDir)
18
+ .filter(f => f.endsWith('.feature'))
19
+ .sort()
20
+ .map(f => path.join(featuresDir, f));
21
+ }
22
+
8
23
  /**
9
24
  * Build edges between requirements, test specs, and tasks.
10
25
  * Only creates edges where both source and target nodes exist.
@@ -98,7 +113,6 @@ function buildPyramid(testSpecs) {
98
113
  function computeTestifyState(projectPath, featureId) {
99
114
  const featureDir = path.join(projectPath, 'specs', featureId);
100
115
  const specPath = path.join(featureDir, 'spec.md');
101
- const testSpecsPath = path.join(featureDir, 'tests', 'test-specs.md');
102
116
  const tasksPath = path.join(featureDir, 'tasks.md');
103
117
  const contextPath = path.join(featureDir, 'context.json');
104
118
 
@@ -125,10 +139,14 @@ function computeTestifyState(projectPath, featureId) {
125
139
  const scReqs = parseSuccessCriteria(specContent);
126
140
  const requirements = [...frReqs, ...scReqs];
127
141
 
128
- // Parse test specs
129
- const testSpecsExist = fs.existsSync(testSpecsPath);
130
- const testSpecsContent = testSpecsExist ? fs.readFileSync(testSpecsPath, 'utf-8') : '';
131
- const testSpecs = testSpecsExist ? parseTestSpecs(testSpecsContent) : [];
142
+ // Parse test specs from .feature files
143
+ const featureFiles = getFeatureFiles(featureDir);
144
+ const testSpecsExist = featureFiles.length > 0;
145
+ const featureContents = featureFiles.map(f => fs.readFileSync(f, 'utf-8'));
146
+ const allFeatureContent = featureContents.join('\n');
147
+ const testSpecs = testSpecsExist
148
+ ? featureContents.reduce((acc, content) => acc.concat(parseTestSpecs(content)), [])
149
+ : [];
132
150
 
133
151
  // Parse tasks and extract test spec refs
134
152
  const tasksContent = fs.existsSync(tasksPath) ? fs.readFileSync(tasksPath, 'utf-8') : '';
@@ -148,7 +166,7 @@ function computeTestifyState(projectPath, featureId) {
148
166
  // Integrity check
149
167
  let integrity = { status: 'missing', currentHash: null, storedHash: null };
150
168
  if (testSpecsExist) {
151
- const currentHash = computeAssertionHash(testSpecsContent);
169
+ const currentHash = computeAssertionHash(allFeatureContent);
152
170
 
153
171
  let storedHash = null;
154
172
  if (fs.existsSync(contextPath)) {
@@ -175,4 +193,4 @@ function computeTestifyState(projectPath, featureId) {
175
193
  };
176
194
  }
177
195
 
178
- module.exports = { buildEdges, findGaps, buildPyramid, computeTestifyState };
196
+ module.exports = { buildEdges, findGaps, buildPyramid, computeTestifyState, getFeatureFiles };