agileflow 2.90.7 → 2.92.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/README.md +6 -6
- package/lib/README.md +178 -0
- package/lib/codebase-indexer.js +818 -0
- package/lib/colors.js +190 -12
- package/lib/consent.js +232 -0
- package/lib/correlation.js +277 -0
- package/lib/error-codes.js +46 -0
- package/lib/errors.js +48 -6
- package/lib/file-cache.js +182 -0
- package/lib/format-error.js +156 -0
- package/lib/path-resolver.js +155 -7
- package/lib/paths.js +212 -20
- package/lib/placeholder-registry.js +205 -0
- package/lib/registry-di.js +358 -0
- package/lib/result-schema.js +363 -0
- package/lib/result.js +210 -0
- package/lib/session-registry.js +13 -0
- package/lib/session-state-machine.js +465 -0
- package/lib/validate-commands.js +308 -0
- package/lib/validate-names.js +3 -3
- package/lib/validate.js +116 -52
- package/package.json +4 -1
- package/scripts/af +34 -0
- package/scripts/agent-loop.js +63 -9
- package/scripts/agileflow-configure.js +2 -2
- package/scripts/agileflow-welcome.js +435 -23
- package/scripts/archive-completed-stories.sh +57 -11
- package/scripts/claude-tmux.sh +102 -0
- package/scripts/damage-control-bash.js +3 -70
- package/scripts/damage-control-edit.js +3 -20
- package/scripts/damage-control-write.js +3 -20
- package/scripts/dependency-check.js +310 -0
- package/scripts/get-env.js +11 -4
- package/scripts/lib/configure-detect.js +23 -1
- package/scripts/lib/configure-features.js +43 -2
- package/scripts/lib/context-formatter.js +771 -0
- package/scripts/lib/context-loader.js +699 -0
- package/scripts/lib/damage-control-utils.js +107 -0
- package/scripts/lib/json-utils.sh +162 -0
- package/scripts/lib/state-migrator.js +353 -0
- package/scripts/lib/story-state-machine.js +437 -0
- package/scripts/obtain-context.js +118 -1048
- package/scripts/pre-push-check.sh +46 -0
- package/scripts/precompact-context.sh +36 -11
- package/scripts/query-codebase.js +538 -0
- package/scripts/ralph-loop.js +5 -5
- package/scripts/session-manager.js +220 -42
- package/scripts/spawn-parallel.js +651 -0
- package/scripts/tui/blessed/data/watcher.js +180 -0
- package/scripts/tui/blessed/index.js +244 -0
- package/scripts/tui/blessed/panels/output.js +101 -0
- package/scripts/tui/blessed/panels/sessions.js +150 -0
- package/scripts/tui/blessed/panels/trace.js +97 -0
- package/scripts/tui/blessed/ui/help.js +77 -0
- package/scripts/tui/blessed/ui/screen.js +52 -0
- package/scripts/tui/blessed/ui/statusbar.js +47 -0
- package/scripts/tui/blessed/ui/tabbar.js +99 -0
- package/scripts/tui/index.js +38 -30
- package/scripts/validators/README.md +143 -0
- package/scripts/validators/component-validator.js +239 -0
- package/scripts/validators/json-schema-validator.js +186 -0
- package/scripts/validators/markdown-validator.js +152 -0
- package/scripts/validators/migration-validator.js +129 -0
- package/scripts/validators/security-validator.js +380 -0
- package/scripts/validators/story-format-validator.js +197 -0
- package/scripts/validators/test-result-validator.js +114 -0
- package/scripts/validators/workflow-validator.js +247 -0
- package/src/core/agents/accessibility.md +6 -0
- package/src/core/agents/adr-writer.md +6 -0
- package/src/core/agents/analytics.md +6 -0
- package/src/core/agents/api.md +6 -0
- package/src/core/agents/ci.md +6 -0
- package/src/core/agents/codebase-query.md +261 -0
- package/src/core/agents/compliance.md +6 -0
- package/src/core/agents/configuration-damage-control.md +6 -0
- package/src/core/agents/configuration-visual-e2e.md +6 -0
- package/src/core/agents/database.md +10 -0
- package/src/core/agents/datamigration.md +6 -0
- package/src/core/agents/design.md +6 -0
- package/src/core/agents/devops.md +6 -0
- package/src/core/agents/documentation.md +6 -0
- package/src/core/agents/epic-planner.md +6 -0
- package/src/core/agents/integrations.md +6 -0
- package/src/core/agents/mentor.md +6 -0
- package/src/core/agents/mobile.md +6 -0
- package/src/core/agents/monitoring.md +6 -0
- package/src/core/agents/multi-expert.md +6 -0
- package/src/core/agents/performance.md +6 -0
- package/src/core/agents/product.md +6 -0
- package/src/core/agents/qa.md +6 -0
- package/src/core/agents/readme-updater.md +6 -0
- package/src/core/agents/refactor.md +6 -0
- package/src/core/agents/research.md +6 -0
- package/src/core/agents/security.md +6 -0
- package/src/core/agents/testing.md +10 -0
- package/src/core/agents/ui.md +6 -0
- package/src/core/commands/adr.md +114 -0
- package/src/core/commands/agent.md +120 -0
- package/src/core/commands/assign.md +145 -0
- package/src/core/commands/audit.md +401 -0
- package/src/core/commands/babysit.md +32 -5
- package/src/core/commands/board.md +1 -0
- package/src/core/commands/changelog.md +118 -0
- package/src/core/commands/configure.md +42 -6
- package/src/core/commands/diagnose.md +114 -0
- package/src/core/commands/epic.md +205 -1
- package/src/core/commands/handoff.md +128 -0
- package/src/core/commands/help.md +76 -0
- package/src/core/commands/metrics.md +1 -0
- package/src/core/commands/pr.md +96 -0
- package/src/core/commands/research/analyze.md +1 -0
- package/src/core/commands/research/ask.md +2 -0
- package/src/core/commands/research/import.md +1 -0
- package/src/core/commands/research/list.md +2 -0
- package/src/core/commands/research/synthesize.md +584 -0
- package/src/core/commands/research/view.md +2 -0
- package/src/core/commands/roadmap/analyze.md +400 -0
- package/src/core/commands/session/new.md +113 -6
- package/src/core/commands/session/spawn.md +197 -0
- package/src/core/commands/sprint.md +22 -0
- package/src/core/commands/status.md +200 -1
- package/src/core/commands/story/list.md +9 -9
- package/src/core/commands/story/view.md +1 -0
- package/src/core/commands/story.md +143 -4
- package/src/core/experts/codebase-query/expertise.yaml +190 -0
- package/src/core/experts/codebase-query/question.md +73 -0
- package/src/core/experts/codebase-query/self-improve.md +105 -0
- package/src/core/templates/agileflow-metadata.json +55 -2
- package/src/core/templates/plan-template.md +125 -0
- package/src/core/templates/story-lifecycle.md +213 -0
- package/src/core/templates/story-template.md +4 -0
- package/src/core/templates/tdd-test-template.js +241 -0
- package/tools/cli/commands/setup.js +86 -0
- package/tools/cli/installers/core/installer.js +94 -0
- package/tools/cli/installers/ide/_base-ide.js +20 -11
- package/tools/cli/installers/ide/codex.js +29 -47
- package/tools/cli/lib/config-manager.js +17 -2
- package/tools/cli/lib/content-transformer.js +271 -0
- package/tools/cli/lib/error-handler.js +14 -22
- package/tools/cli/lib/ide-error-factory.js +421 -0
- package/tools/cli/lib/ide-health-monitor.js +364 -0
- package/tools/cli/lib/ide-registry.js +114 -1
- package/tools/cli/lib/ui.js +14 -25
|
@@ -387,6 +387,111 @@ function validatePathAgainstPatterns(filePath, config, operation = 'access') {
|
|
|
387
387
|
return { action: 'allow' };
|
|
388
388
|
}
|
|
389
389
|
|
|
390
|
+
/**
|
|
391
|
+
* Factory: Create a path-based damage control hook
|
|
392
|
+
*
|
|
393
|
+
* Reduces boilerplate for Edit/Write/Delete hooks that all use path validation.
|
|
394
|
+
*
|
|
395
|
+
* @param {string} operation - Operation type ('edit', 'write', 'delete')
|
|
396
|
+
* @returns {function} Hook runner function
|
|
397
|
+
*
|
|
398
|
+
* @example
|
|
399
|
+
* // In damage-control-edit.js:
|
|
400
|
+
* const { createPathHook } = require('./lib/damage-control-utils');
|
|
401
|
+
* createPathHook('edit')();
|
|
402
|
+
*/
|
|
403
|
+
function createPathHook(operation) {
|
|
404
|
+
return function runHook() {
|
|
405
|
+
const projectRoot = findProjectRoot();
|
|
406
|
+
const defaultConfig = { zeroAccessPaths: [], readOnlyPaths: [], noDeletePaths: [] };
|
|
407
|
+
|
|
408
|
+
runDamageControlHook({
|
|
409
|
+
getInputValue: input => input.file_path || input.tool_input?.file_path,
|
|
410
|
+
loadConfig: () => loadPatterns(projectRoot, parsePathPatterns, defaultConfig),
|
|
411
|
+
validate: (filePath, config) => validatePathAgainstPatterns(filePath, config, operation),
|
|
412
|
+
onBlock: (result, filePath) => {
|
|
413
|
+
outputBlocked(result.reason, result.detail, `File: ${filePath}`);
|
|
414
|
+
},
|
|
415
|
+
});
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
/**
|
|
420
|
+
* Factory: Create the bash damage control hook
|
|
421
|
+
*
|
|
422
|
+
* @returns {function} Hook runner function
|
|
423
|
+
*
|
|
424
|
+
* @example
|
|
425
|
+
* // In damage-control-bash.js:
|
|
426
|
+
* const { createBashHook } = require('./lib/damage-control-utils');
|
|
427
|
+
* createBashHook()();
|
|
428
|
+
*/
|
|
429
|
+
function createBashHook() {
|
|
430
|
+
return function runHook() {
|
|
431
|
+
const projectRoot = findProjectRoot();
|
|
432
|
+
const defaultConfig = { bashToolPatterns: [], askPatterns: [], agileflowProtections: [] };
|
|
433
|
+
|
|
434
|
+
/**
|
|
435
|
+
* Test command against a single pattern rule
|
|
436
|
+
*/
|
|
437
|
+
function matchesPattern(command, rule) {
|
|
438
|
+
try {
|
|
439
|
+
const flags = rule.flags || '';
|
|
440
|
+
const regex = new RegExp(rule.pattern, flags);
|
|
441
|
+
return regex.test(command);
|
|
442
|
+
} catch (e) {
|
|
443
|
+
// Invalid regex - skip this pattern
|
|
444
|
+
return false;
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
/**
|
|
449
|
+
* Validate command against all patterns
|
|
450
|
+
*/
|
|
451
|
+
function validateCommand(command, config) {
|
|
452
|
+
// Check blocked patterns (bashToolPatterns + agileflowProtections)
|
|
453
|
+
const blockedPatterns = [
|
|
454
|
+
...(config.bashToolPatterns || []),
|
|
455
|
+
...(config.agileflowProtections || []),
|
|
456
|
+
];
|
|
457
|
+
|
|
458
|
+
for (const rule of blockedPatterns) {
|
|
459
|
+
if (matchesPattern(command, rule)) {
|
|
460
|
+
return {
|
|
461
|
+
action: 'block',
|
|
462
|
+
reason: rule.reason || 'Command blocked by damage control',
|
|
463
|
+
};
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
// Check ask patterns
|
|
468
|
+
for (const rule of config.askPatterns || []) {
|
|
469
|
+
if (matchesPattern(command, rule)) {
|
|
470
|
+
return {
|
|
471
|
+
action: 'ask',
|
|
472
|
+
reason: rule.reason || 'Please confirm this command',
|
|
473
|
+
};
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
// Allow by default
|
|
478
|
+
return { action: 'allow' };
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
runDamageControlHook({
|
|
482
|
+
getInputValue: input => input.command || input.tool_input?.command,
|
|
483
|
+
loadConfig: () => loadPatterns(projectRoot, parseBashPatterns, defaultConfig),
|
|
484
|
+
validate: validateCommand,
|
|
485
|
+
onBlock: (result, command) => {
|
|
486
|
+
outputBlocked(
|
|
487
|
+
result.reason,
|
|
488
|
+
`Command: ${command.substring(0, 100)}${command.length > 100 ? '...' : ''}`
|
|
489
|
+
);
|
|
490
|
+
},
|
|
491
|
+
});
|
|
492
|
+
};
|
|
493
|
+
}
|
|
494
|
+
|
|
390
495
|
module.exports = {
|
|
391
496
|
c,
|
|
392
497
|
findProjectRoot,
|
|
@@ -399,6 +504,8 @@ module.exports = {
|
|
|
399
504
|
parseBashPatterns,
|
|
400
505
|
parsePathPatterns,
|
|
401
506
|
validatePathAgainstPatterns,
|
|
507
|
+
createPathHook,
|
|
508
|
+
createBashHook,
|
|
402
509
|
CONFIG_PATHS,
|
|
403
510
|
STDIN_TIMEOUT_MS,
|
|
404
511
|
};
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# AgileFlow JSON Utilities - Bash Edition
|
|
3
|
+
#
|
|
4
|
+
# Source this file: source "$(dirname "${BASH_SOURCE[0]}")/lib/json-utils.sh"
|
|
5
|
+
#
|
|
6
|
+
# Provides safe JSON parsing functions with error handling.
|
|
7
|
+
|
|
8
|
+
# Source colors from canonical source if not already loaded
|
|
9
|
+
UTILS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
10
|
+
if [[ -z "$RESET" ]] && [[ -f "$UTILS_DIR/colors.sh" ]]; then
|
|
11
|
+
source "$UTILS_DIR/colors.sh"
|
|
12
|
+
fi
|
|
13
|
+
|
|
14
|
+
# ============================================================================
|
|
15
|
+
# Safe JSON Parsing
|
|
16
|
+
# ============================================================================
|
|
17
|
+
|
|
18
|
+
# safeJsonParse - Parse JSON file safely with validation
|
|
19
|
+
# Returns the extracted value or a default if parsing fails.
|
|
20
|
+
# Logs errors to stderr but doesn't crash the script.
|
|
21
|
+
#
|
|
22
|
+
# Usage:
|
|
23
|
+
# value=$(safeJsonParse "/path/to/file.json" ".key.path" "default_value")
|
|
24
|
+
# value=$(safeJsonParse "$FILE" ".archival.enabled" "true")
|
|
25
|
+
#
|
|
26
|
+
# Arguments:
|
|
27
|
+
# $1 - File path to parse
|
|
28
|
+
# $2 - jq query path (e.g., ".key.subkey")
|
|
29
|
+
# $3 - Default value if parse fails (required for safety)
|
|
30
|
+
#
|
|
31
|
+
safeJsonParse() {
|
|
32
|
+
local file="$1"
|
|
33
|
+
local query="$2"
|
|
34
|
+
local default="$3"
|
|
35
|
+
|
|
36
|
+
# Validate arguments
|
|
37
|
+
if [[ -z "$file" ]] || [[ -z "$query" ]]; then
|
|
38
|
+
echo "$default"
|
|
39
|
+
return 0
|
|
40
|
+
fi
|
|
41
|
+
|
|
42
|
+
# Check file exists
|
|
43
|
+
if [[ ! -f "$file" ]]; then
|
|
44
|
+
echo "$default"
|
|
45
|
+
return 0
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
# Check file is readable
|
|
49
|
+
if [[ ! -r "$file" ]]; then
|
|
50
|
+
echo -e "\033[0;31m[json-utils] Cannot read file: $file\033[0m" >&2
|
|
51
|
+
echo "$default"
|
|
52
|
+
return 0
|
|
53
|
+
fi
|
|
54
|
+
|
|
55
|
+
# Try jq first (preferred, more robust)
|
|
56
|
+
if command -v jq &> /dev/null; then
|
|
57
|
+
local result
|
|
58
|
+
result=$(jq -r "$query // empty" "$file" 2>/dev/null)
|
|
59
|
+
if [[ -n "$result" ]] && [[ "$result" != "null" ]]; then
|
|
60
|
+
echo "$result"
|
|
61
|
+
return 0
|
|
62
|
+
fi
|
|
63
|
+
fi
|
|
64
|
+
|
|
65
|
+
# Fallback to Node.js if jq unavailable or failed
|
|
66
|
+
if command -v node &> /dev/null; then
|
|
67
|
+
local result
|
|
68
|
+
# Security: Pass file path via environment variable
|
|
69
|
+
result=$(JSON_FILE="$file" JSON_QUERY="$query" node -pe "
|
|
70
|
+
try {
|
|
71
|
+
const fs = require('fs');
|
|
72
|
+
const content = fs.readFileSync(process.env.JSON_FILE, 'utf8');
|
|
73
|
+
const data = JSON.parse(content);
|
|
74
|
+
const keys = process.env.JSON_QUERY.replace(/^\\./, '').split('.');
|
|
75
|
+
let result = data;
|
|
76
|
+
for (const key of keys) {
|
|
77
|
+
if (result && typeof result === 'object' && key in result) {
|
|
78
|
+
result = result[key];
|
|
79
|
+
} else {
|
|
80
|
+
result = null;
|
|
81
|
+
break;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
result !== null && result !== undefined ? String(result) : '';
|
|
85
|
+
} catch (e) {
|
|
86
|
+
'';
|
|
87
|
+
}
|
|
88
|
+
" 2>/dev/null)
|
|
89
|
+
|
|
90
|
+
if [[ -n "$result" ]]; then
|
|
91
|
+
echo "$result"
|
|
92
|
+
return 0
|
|
93
|
+
fi
|
|
94
|
+
fi
|
|
95
|
+
|
|
96
|
+
# If both methods fail, return default
|
|
97
|
+
echo "$default"
|
|
98
|
+
return 0
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
# safeJsonValidate - Check if file contains valid JSON
|
|
102
|
+
# Returns 0 if valid, 1 if invalid
|
|
103
|
+
#
|
|
104
|
+
# Usage:
|
|
105
|
+
# if safeJsonValidate "/path/to/file.json"; then
|
|
106
|
+
# echo "Valid JSON"
|
|
107
|
+
# fi
|
|
108
|
+
#
|
|
109
|
+
safeJsonValidate() {
|
|
110
|
+
local file="$1"
|
|
111
|
+
|
|
112
|
+
if [[ ! -f "$file" ]] || [[ ! -r "$file" ]]; then
|
|
113
|
+
return 1
|
|
114
|
+
fi
|
|
115
|
+
|
|
116
|
+
# Try jq first
|
|
117
|
+
if command -v jq &> /dev/null; then
|
|
118
|
+
jq empty "$file" 2>/dev/null
|
|
119
|
+
return $?
|
|
120
|
+
fi
|
|
121
|
+
|
|
122
|
+
# Fallback to Node.js
|
|
123
|
+
if command -v node &> /dev/null; then
|
|
124
|
+
JSON_FILE="$file" node -e "
|
|
125
|
+
try {
|
|
126
|
+
const fs = require('fs');
|
|
127
|
+
JSON.parse(fs.readFileSync(process.env.JSON_FILE, 'utf8'));
|
|
128
|
+
process.exit(0);
|
|
129
|
+
} catch {
|
|
130
|
+
process.exit(1);
|
|
131
|
+
}
|
|
132
|
+
" 2>/dev/null
|
|
133
|
+
return $?
|
|
134
|
+
fi
|
|
135
|
+
|
|
136
|
+
# If neither tool available, assume invalid
|
|
137
|
+
return 1
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
# safeJsonRead - Read entire JSON file safely
|
|
141
|
+
# Returns the JSON content or empty object if read fails
|
|
142
|
+
#
|
|
143
|
+
# Usage:
|
|
144
|
+
# content=$(safeJsonRead "/path/to/file.json")
|
|
145
|
+
#
|
|
146
|
+
safeJsonRead() {
|
|
147
|
+
local file="$1"
|
|
148
|
+
|
|
149
|
+
if [[ ! -f "$file" ]] || [[ ! -r "$file" ]]; then
|
|
150
|
+
echo "{}"
|
|
151
|
+
return 0
|
|
152
|
+
fi
|
|
153
|
+
|
|
154
|
+
# Validate JSON first
|
|
155
|
+
if ! safeJsonValidate "$file"; then
|
|
156
|
+
echo -e "\033[0;31m[json-utils] Invalid JSON in: $file\033[0m" >&2
|
|
157
|
+
echo "{}"
|
|
158
|
+
return 0
|
|
159
|
+
fi
|
|
160
|
+
|
|
161
|
+
cat "$file"
|
|
162
|
+
}
|
|
@@ -0,0 +1,353 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AgileFlow CLI - State Schema Migrator
|
|
3
|
+
*
|
|
4
|
+
* Handles schema versioning and automatic migrations for status.json.
|
|
5
|
+
* Ensures backwards compatibility as the schema evolves.
|
|
6
|
+
*
|
|
7
|
+
* Schema Version History:
|
|
8
|
+
* - 1.0.0: Original schema (no version field)
|
|
9
|
+
* - 2.0.0: Added schema_version field, normalized story structure
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const fs = require('fs');
|
|
13
|
+
const path = require('path');
|
|
14
|
+
|
|
15
|
+
// Import status constants from single source of truth
|
|
16
|
+
const { VALID_STATUSES } = require('./story-state-machine');
|
|
17
|
+
|
|
18
|
+
// Current schema version
|
|
19
|
+
const CURRENT_SCHEMA_VERSION = '2.0.0';
|
|
20
|
+
|
|
21
|
+
// Migration log
|
|
22
|
+
let migrationLog = [];
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Parse semantic version string into components
|
|
26
|
+
* @param {string} version - Version string (e.g., "1.2.3")
|
|
27
|
+
* @returns {{ major: number, minor: number, patch: number }}
|
|
28
|
+
*/
|
|
29
|
+
function parseVersion(version) {
|
|
30
|
+
if (!version || version === 'unknown') {
|
|
31
|
+
return { major: 1, minor: 0, patch: 0 };
|
|
32
|
+
}
|
|
33
|
+
const parts = version.split('.').map(Number);
|
|
34
|
+
return {
|
|
35
|
+
major: parts[0] !== undefined && !isNaN(parts[0]) ? parts[0] : 1,
|
|
36
|
+
minor: parts[1] !== undefined && !isNaN(parts[1]) ? parts[1] : 0,
|
|
37
|
+
patch: parts[2] !== undefined && !isNaN(parts[2]) ? parts[2] : 0,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Compare two version strings
|
|
43
|
+
* @param {string} v1 - First version
|
|
44
|
+
* @param {string} v2 - Second version
|
|
45
|
+
* @returns {number} -1 if v1 < v2, 0 if equal, 1 if v1 > v2
|
|
46
|
+
*/
|
|
47
|
+
function compareVersions(v1, v2) {
|
|
48
|
+
const p1 = parseVersion(v1);
|
|
49
|
+
const p2 = parseVersion(v2);
|
|
50
|
+
|
|
51
|
+
if (p1.major !== p2.major) return p1.major < p2.major ? -1 : 1;
|
|
52
|
+
if (p1.minor !== p2.minor) return p1.minor < p2.minor ? -1 : 1;
|
|
53
|
+
if (p1.patch !== p2.patch) return p1.patch < p2.patch ? -1 : 1;
|
|
54
|
+
return 0;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Detect the schema version of a status.json object
|
|
59
|
+
* @param {Object} data - Parsed status.json data
|
|
60
|
+
* @returns {string} Detected schema version
|
|
61
|
+
*/
|
|
62
|
+
function detectSchemaVersion(data) {
|
|
63
|
+
// Explicit version field
|
|
64
|
+
if (data.schema_version) {
|
|
65
|
+
return data.schema_version;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// No version field = v1.0.0 (original schema)
|
|
69
|
+
return '1.0.0';
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Log a migration action
|
|
74
|
+
* @param {string} message - Migration log message
|
|
75
|
+
*/
|
|
76
|
+
function logMigration(message) {
|
|
77
|
+
const entry = {
|
|
78
|
+
timestamp: new Date().toISOString(),
|
|
79
|
+
message,
|
|
80
|
+
};
|
|
81
|
+
migrationLog.push(entry);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Clear migration log
|
|
86
|
+
*/
|
|
87
|
+
function clearMigrationLog() {
|
|
88
|
+
migrationLog = [];
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Get migration log
|
|
93
|
+
* @returns {Array} Migration log entries
|
|
94
|
+
*/
|
|
95
|
+
function getMigrationLog() {
|
|
96
|
+
return [...migrationLog];
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Migration: 1.0.0 -> 2.0.0
|
|
101
|
+
* - Adds schema_version field
|
|
102
|
+
* - Normalizes story status values
|
|
103
|
+
* - Adds migrated_at timestamp
|
|
104
|
+
*
|
|
105
|
+
* @param {Object} data - Status data at v1.0.0
|
|
106
|
+
* @returns {Object} Migrated data at v2.0.0
|
|
107
|
+
*/
|
|
108
|
+
function migrate_1_0_0_to_2_0_0(data) {
|
|
109
|
+
logMigration('Starting migration from 1.0.0 to 2.0.0');
|
|
110
|
+
|
|
111
|
+
const migrated = {
|
|
112
|
+
schema_version: '2.0.0',
|
|
113
|
+
...data,
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
// Normalize story status values
|
|
117
|
+
const statusNormalization = {
|
|
118
|
+
todo: 'ready',
|
|
119
|
+
new: 'ready',
|
|
120
|
+
pending: 'ready',
|
|
121
|
+
open: 'ready',
|
|
122
|
+
wip: 'in_progress',
|
|
123
|
+
working: 'in_progress',
|
|
124
|
+
in_review: 'in_progress',
|
|
125
|
+
closed: 'completed',
|
|
126
|
+
done: 'completed',
|
|
127
|
+
finished: 'completed',
|
|
128
|
+
resolved: 'completed',
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
let normalizedCount = 0;
|
|
132
|
+
if (migrated.stories) {
|
|
133
|
+
for (const [storyId, story] of Object.entries(migrated.stories)) {
|
|
134
|
+
if (story.status && statusNormalization[story.status.toLowerCase()]) {
|
|
135
|
+
const oldStatus = story.status;
|
|
136
|
+
story.status = statusNormalization[story.status.toLowerCase()];
|
|
137
|
+
normalizedCount++;
|
|
138
|
+
logMigration(`Normalized ${storyId} status: ${oldStatus} -> ${story.status}`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// Add migration metadata
|
|
144
|
+
migrated.migrated_at = new Date().toISOString();
|
|
145
|
+
migrated.migrated_from = '1.0.0';
|
|
146
|
+
|
|
147
|
+
logMigration(`Migration to 2.0.0 complete. Normalized ${normalizedCount} story statuses.`);
|
|
148
|
+
|
|
149
|
+
return migrated;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Registry of migration functions
|
|
154
|
+
* Key: "fromVersion->toVersion"
|
|
155
|
+
*/
|
|
156
|
+
const MIGRATIONS = {
|
|
157
|
+
'1.0.0->2.0.0': migrate_1_0_0_to_2_0_0,
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Get the migration path from one version to another
|
|
162
|
+
* @param {string} fromVersion - Starting version
|
|
163
|
+
* @param {string} toVersion - Target version
|
|
164
|
+
* @returns {Array<string>} Array of version steps
|
|
165
|
+
*/
|
|
166
|
+
function getMigrationPath(fromVersion, toVersion) {
|
|
167
|
+
// For now, simple direct path
|
|
168
|
+
// Future: implement graph traversal for multi-step migrations
|
|
169
|
+
const path = [];
|
|
170
|
+
|
|
171
|
+
if (compareVersions(fromVersion, '2.0.0') < 0 && compareVersions(toVersion, '2.0.0') >= 0) {
|
|
172
|
+
path.push('1.0.0->2.0.0');
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
return path;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Migrate status.json data to the current schema version
|
|
180
|
+
* @param {Object} data - Parsed status.json data
|
|
181
|
+
* @param {Object} [options] - Migration options
|
|
182
|
+
* @param {boolean} [options.dryRun=false] - If true, don't modify data
|
|
183
|
+
* @returns {{ data: Object, migrated: boolean, fromVersion: string, toVersion: string, log: Array }}
|
|
184
|
+
*/
|
|
185
|
+
function migrate(data, options = {}) {
|
|
186
|
+
const { dryRun = false } = options;
|
|
187
|
+
|
|
188
|
+
clearMigrationLog();
|
|
189
|
+
|
|
190
|
+
const fromVersion = detectSchemaVersion(data);
|
|
191
|
+
const toVersion = CURRENT_SCHEMA_VERSION;
|
|
192
|
+
|
|
193
|
+
// Already at current version
|
|
194
|
+
if (compareVersions(fromVersion, toVersion) >= 0) {
|
|
195
|
+
return {
|
|
196
|
+
data,
|
|
197
|
+
migrated: false,
|
|
198
|
+
fromVersion,
|
|
199
|
+
toVersion,
|
|
200
|
+
log: getMigrationLog(),
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
logMigration(`Detected schema version: ${fromVersion}`);
|
|
205
|
+
logMigration(`Target schema version: ${toVersion}`);
|
|
206
|
+
|
|
207
|
+
// Get migration path
|
|
208
|
+
const migrationPath = getMigrationPath(fromVersion, toVersion);
|
|
209
|
+
|
|
210
|
+
if (migrationPath.length === 0) {
|
|
211
|
+
logMigration('No migration path found');
|
|
212
|
+
return {
|
|
213
|
+
data,
|
|
214
|
+
migrated: false,
|
|
215
|
+
fromVersion,
|
|
216
|
+
toVersion,
|
|
217
|
+
log: getMigrationLog(),
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
let migratedData = dryRun ? JSON.parse(JSON.stringify(data)) : data;
|
|
222
|
+
|
|
223
|
+
// Apply each migration in sequence
|
|
224
|
+
for (const step of migrationPath) {
|
|
225
|
+
const migrationFn = MIGRATIONS[step];
|
|
226
|
+
if (!migrationFn) {
|
|
227
|
+
throw new Error(`Missing migration function for: ${step}`);
|
|
228
|
+
}
|
|
229
|
+
logMigration(`Applying migration: ${step}`);
|
|
230
|
+
migratedData = migrationFn(migratedData);
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
return {
|
|
234
|
+
data: migratedData,
|
|
235
|
+
migrated: true,
|
|
236
|
+
fromVersion,
|
|
237
|
+
toVersion,
|
|
238
|
+
log: getMigrationLog(),
|
|
239
|
+
};
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Load status.json with automatic migration
|
|
244
|
+
* @param {string} filePath - Path to status.json
|
|
245
|
+
* @param {Object} [options] - Options
|
|
246
|
+
* @param {boolean} [options.autoSave=true] - Automatically save migrated data
|
|
247
|
+
* @returns {{ data: Object, migrated: boolean, fromVersion: string, toVersion: string, log: Array }}
|
|
248
|
+
*/
|
|
249
|
+
function loadWithMigration(filePath, options = {}) {
|
|
250
|
+
const { autoSave = true } = options;
|
|
251
|
+
|
|
252
|
+
if (!fs.existsSync(filePath)) {
|
|
253
|
+
throw new Error(`File not found: ${filePath}`);
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
257
|
+
let data;
|
|
258
|
+
try {
|
|
259
|
+
data = JSON.parse(content);
|
|
260
|
+
} catch (e) {
|
|
261
|
+
throw new Error(`Invalid JSON in ${filePath}: ${e.message}`);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
const result = migrate(data);
|
|
265
|
+
|
|
266
|
+
// Auto-save if migration occurred
|
|
267
|
+
if (result.migrated && autoSave) {
|
|
268
|
+
const backupPath = `${filePath}.backup.${Date.now()}`;
|
|
269
|
+
fs.writeFileSync(backupPath, content);
|
|
270
|
+
logMigration(`Created backup: ${backupPath}`);
|
|
271
|
+
|
|
272
|
+
fs.writeFileSync(filePath, JSON.stringify(result.data, null, 2) + '\n');
|
|
273
|
+
logMigration(`Saved migrated data to: ${filePath}`);
|
|
274
|
+
|
|
275
|
+
result.log = getMigrationLog();
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
return result;
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
/**
|
|
282
|
+
* Check if data needs migration
|
|
283
|
+
* @param {Object} data - Parsed status.json data
|
|
284
|
+
* @returns {{ needsMigration: boolean, currentVersion: string, targetVersion: string }}
|
|
285
|
+
*/
|
|
286
|
+
function needsMigration(data) {
|
|
287
|
+
const currentVersion = detectSchemaVersion(data);
|
|
288
|
+
return {
|
|
289
|
+
needsMigration: compareVersions(currentVersion, CURRENT_SCHEMA_VERSION) < 0,
|
|
290
|
+
currentVersion,
|
|
291
|
+
targetVersion: CURRENT_SCHEMA_VERSION,
|
|
292
|
+
};
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
/**
|
|
296
|
+
* Validate migrated data against expected schema
|
|
297
|
+
* @param {Object} data - Data to validate
|
|
298
|
+
* @returns {{ valid: boolean, errors: Array<string> }}
|
|
299
|
+
*/
|
|
300
|
+
function validateSchema(data) {
|
|
301
|
+
const errors = [];
|
|
302
|
+
|
|
303
|
+
// Check required fields for v2.0.0
|
|
304
|
+
if (!data.schema_version) {
|
|
305
|
+
errors.push('Missing required field: schema_version');
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// Check stories structure
|
|
309
|
+
if (data.stories) {
|
|
310
|
+
for (const [storyId, story] of Object.entries(data.stories)) {
|
|
311
|
+
if (!story.title) {
|
|
312
|
+
errors.push(`Story ${storyId} missing required field: title`);
|
|
313
|
+
}
|
|
314
|
+
if (!story.status) {
|
|
315
|
+
errors.push(`Story ${storyId} missing required field: status`);
|
|
316
|
+
}
|
|
317
|
+
// Use VALID_STATUSES from story-state-machine.js (single source of truth)
|
|
318
|
+
if (story.status && !VALID_STATUSES.includes(story.status)) {
|
|
319
|
+
errors.push(`Story ${storyId} has invalid status: ${story.status}`);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
// Check epics structure
|
|
325
|
+
if (data.epics) {
|
|
326
|
+
for (const [epicId, epic] of Object.entries(data.epics)) {
|
|
327
|
+
if (!epic.title) {
|
|
328
|
+
errors.push(`Epic ${epicId} missing required field: title`);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
return {
|
|
334
|
+
valid: errors.length === 0,
|
|
335
|
+
errors,
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
module.exports = {
|
|
340
|
+
CURRENT_SCHEMA_VERSION,
|
|
341
|
+
parseVersion,
|
|
342
|
+
compareVersions,
|
|
343
|
+
detectSchemaVersion,
|
|
344
|
+
migrate,
|
|
345
|
+
loadWithMigration,
|
|
346
|
+
needsMigration,
|
|
347
|
+
validateSchema,
|
|
348
|
+
getMigrationLog,
|
|
349
|
+
clearMigrationLog,
|
|
350
|
+
// For testing
|
|
351
|
+
migrate_1_0_0_to_2_0_0,
|
|
352
|
+
getMigrationPath,
|
|
353
|
+
};
|