ripp-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +292 -0
- package/index.js +1350 -0
- package/lib/ai-provider.js +354 -0
- package/lib/analyzer.js +394 -0
- package/lib/build.js +338 -0
- package/lib/config.js +277 -0
- package/lib/confirmation.js +183 -0
- package/lib/discovery.js +119 -0
- package/lib/evidence.js +368 -0
- package/lib/init.js +488 -0
- package/lib/linter.js +309 -0
- package/lib/migrate.js +203 -0
- package/lib/packager.js +374 -0
- package/package.json +40 -0
package/lib/linter.js
ADDED
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* RIPP Linter
|
|
3
|
+
*
|
|
4
|
+
* Best-practice checker that runs AFTER schema validation.
|
|
5
|
+
* Performs deterministic checks NOT covered by the JSON Schema.
|
|
6
|
+
*
|
|
7
|
+
* Guardrails:
|
|
8
|
+
* - Does NOT reimplement schema validation
|
|
9
|
+
* - Does NOT modify source packets
|
|
10
|
+
* - Checks conventions and best practices only
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Lint a RIPP packet for best practices
|
|
15
|
+
* @param {Object} packet - Validated RIPP packet
|
|
16
|
+
* @param {string} filePath - Path to the packet file
|
|
17
|
+
* @returns {Object} Lint result with errors and warnings
|
|
18
|
+
*/
|
|
19
|
+
function lintPacket(packet, filePath) {
|
|
20
|
+
const errors = [];
|
|
21
|
+
const warnings = [];
|
|
22
|
+
|
|
23
|
+
// Rule 1: Check for missing critical optional sections
|
|
24
|
+
checkCriticalSections(packet, warnings);
|
|
25
|
+
|
|
26
|
+
// Rule 2: Check for missing non_goals (best practice)
|
|
27
|
+
checkNonGoals(packet, warnings);
|
|
28
|
+
|
|
29
|
+
// Rule 3: Check for undefined ID references
|
|
30
|
+
checkIdReferences(packet, errors);
|
|
31
|
+
|
|
32
|
+
// Rule 4: Check for placeholder text
|
|
33
|
+
checkPlaceholders(packet, warnings);
|
|
34
|
+
|
|
35
|
+
// Rule 5: Check for missing verification.done_when in acceptance tests (Level 3)
|
|
36
|
+
checkVerification(packet, warnings);
|
|
37
|
+
|
|
38
|
+
return {
|
|
39
|
+
errors,
|
|
40
|
+
warnings,
|
|
41
|
+
errorCount: errors.length,
|
|
42
|
+
warningCount: warnings.length
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Check for missing critical optional sections
|
|
48
|
+
*/
|
|
49
|
+
function checkCriticalSections(packet, warnings) {
|
|
50
|
+
const level = packet.level || 1;
|
|
51
|
+
|
|
52
|
+
// purpose.out_of_scope is recommended
|
|
53
|
+
if (!packet.purpose?.out_of_scope) {
|
|
54
|
+
warnings.push({
|
|
55
|
+
rule: 'missing-out-of-scope',
|
|
56
|
+
severity: 'warn',
|
|
57
|
+
message:
|
|
58
|
+
"Missing 'purpose.out_of_scope' - explicitly defining what is NOT included improves clarity",
|
|
59
|
+
location: 'purpose.out_of_scope'
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// purpose.assumptions is recommended
|
|
64
|
+
if (!packet.purpose?.assumptions || packet.purpose.assumptions.length === 0) {
|
|
65
|
+
warnings.push({
|
|
66
|
+
rule: 'missing-assumptions',
|
|
67
|
+
severity: 'warn',
|
|
68
|
+
message: "Missing 'purpose.assumptions' - documenting assumptions prevents surprises",
|
|
69
|
+
location: 'purpose.assumptions'
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// For Level 2+, check security-related NFRs
|
|
74
|
+
if (level >= 2 && packet.nfrs && !packet.nfrs.security) {
|
|
75
|
+
warnings.push({
|
|
76
|
+
rule: 'missing-security-nfrs',
|
|
77
|
+
severity: 'warn',
|
|
78
|
+
message: "Missing 'nfrs.security' - security requirements should be explicit for Level 2+",
|
|
79
|
+
location: 'nfrs.security'
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Check for missing non_goals (deprecated - now called out_of_scope)
|
|
86
|
+
*/
|
|
87
|
+
function checkNonGoals(packet, warnings) {
|
|
88
|
+
// This is handled by out_of_scope check above
|
|
89
|
+
// Kept as separate function for future expansion
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Check for undefined ID references in cross-references
|
|
94
|
+
*/
|
|
95
|
+
function checkIdReferences(packet, errors) {
|
|
96
|
+
// Collect all defined IDs
|
|
97
|
+
const definedIds = new Set();
|
|
98
|
+
|
|
99
|
+
// Add test IDs from acceptance_tests
|
|
100
|
+
if (packet.acceptance_tests) {
|
|
101
|
+
packet.acceptance_tests.forEach(test => {
|
|
102
|
+
if (test.test_id) {
|
|
103
|
+
definedIds.add(test.test_id);
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Add entity names from data_contracts
|
|
109
|
+
if (packet.data_contracts) {
|
|
110
|
+
if (packet.data_contracts.inputs) {
|
|
111
|
+
packet.data_contracts.inputs.forEach(entity => {
|
|
112
|
+
if (entity.name) {
|
|
113
|
+
definedIds.add(entity.name);
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
if (packet.data_contracts.outputs) {
|
|
118
|
+
packet.data_contracts.outputs.forEach(entity => {
|
|
119
|
+
if (entity.name) {
|
|
120
|
+
definedIds.add(entity.name);
|
|
121
|
+
}
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Check for schema_ref references in api_contracts
|
|
127
|
+
if (packet.api_contracts) {
|
|
128
|
+
packet.api_contracts.forEach((api, idx) => {
|
|
129
|
+
// Check request schema_ref
|
|
130
|
+
if (api.request?.schema_ref && !definedIds.has(api.request.schema_ref)) {
|
|
131
|
+
errors.push({
|
|
132
|
+
rule: 'undefined-schema-ref',
|
|
133
|
+
severity: 'error',
|
|
134
|
+
message: `Undefined schema reference '${api.request.schema_ref}' in api_contracts[${idx}].request.schema_ref`,
|
|
135
|
+
location: `api_contracts[${idx}].request.schema_ref`,
|
|
136
|
+
reference: api.request.schema_ref
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Check response schema_ref
|
|
141
|
+
if (api.response?.success?.schema_ref && !definedIds.has(api.response.success.schema_ref)) {
|
|
142
|
+
errors.push({
|
|
143
|
+
rule: 'undefined-schema-ref',
|
|
144
|
+
severity: 'error',
|
|
145
|
+
message: `Undefined schema reference '${api.response.success.schema_ref}' in api_contracts[${idx}].response.success.schema_ref`,
|
|
146
|
+
location: `api_contracts[${idx}].response.success.schema_ref`,
|
|
147
|
+
reference: api.response.success.schema_ref
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Check for placeholder text that should be replaced
|
|
156
|
+
*/
|
|
157
|
+
function checkPlaceholders(packet, warnings) {
|
|
158
|
+
const placeholderPatterns = [
|
|
159
|
+
/\bTODO\b/i,
|
|
160
|
+
/\bTBD\b/i,
|
|
161
|
+
/\bFIXME\b/i,
|
|
162
|
+
/lorem ipsum/i,
|
|
163
|
+
/\bplaceholder\b/i,
|
|
164
|
+
/\bexample\.com\b/i
|
|
165
|
+
];
|
|
166
|
+
|
|
167
|
+
function checkValue(value, path) {
|
|
168
|
+
if (typeof value === 'string') {
|
|
169
|
+
placeholderPatterns.forEach(pattern => {
|
|
170
|
+
if (pattern.test(value)) {
|
|
171
|
+
warnings.push({
|
|
172
|
+
rule: 'placeholder-text',
|
|
173
|
+
severity: 'warn',
|
|
174
|
+
message: `Possible placeholder text found: "${value.substring(0, 50)}..."`,
|
|
175
|
+
location: path,
|
|
176
|
+
pattern: pattern.toString()
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
});
|
|
180
|
+
} else if (typeof value === 'object' && value !== null) {
|
|
181
|
+
if (Array.isArray(value)) {
|
|
182
|
+
value.forEach((item, idx) => checkValue(item, `${path}[${idx}]`));
|
|
183
|
+
} else {
|
|
184
|
+
Object.keys(value).forEach(key => checkValue(value[key], `${path}.${key}`));
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Scan entire packet for placeholders
|
|
190
|
+
checkValue(packet, 'root');
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
/**
|
|
194
|
+
* Check for missing or incomplete verification in acceptance tests
|
|
195
|
+
*/
|
|
196
|
+
function checkVerification(packet, warnings) {
|
|
197
|
+
if (packet.level === 3 && packet.acceptance_tests) {
|
|
198
|
+
packet.acceptance_tests.forEach((test, idx) => {
|
|
199
|
+
// Check if verification array is empty
|
|
200
|
+
if (!test.verification || test.verification.length === 0) {
|
|
201
|
+
warnings.push({
|
|
202
|
+
rule: 'missing-verification',
|
|
203
|
+
severity: 'warn',
|
|
204
|
+
message: `Acceptance test '${test.test_id || idx}' has no verification steps`,
|
|
205
|
+
location: `acceptance_tests[${idx}].verification`
|
|
206
|
+
});
|
|
207
|
+
} else {
|
|
208
|
+
// Check for vague verification steps
|
|
209
|
+
test.verification.forEach((step, stepIdx) => {
|
|
210
|
+
if (step.length < 10 || (/\b(check|verify|ensure)\b/i.test(step) && step.length < 20)) {
|
|
211
|
+
warnings.push({
|
|
212
|
+
rule: 'vague-verification',
|
|
213
|
+
severity: 'warn',
|
|
214
|
+
message: `Verification step may be too vague: "${step}"`,
|
|
215
|
+
location: `acceptance_tests[${idx}].verification[${stepIdx}]`
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/**
|
|
225
|
+
* Generate a lint report in JSON format
|
|
226
|
+
*/
|
|
227
|
+
function generateJsonReport(results) {
|
|
228
|
+
return JSON.stringify(
|
|
229
|
+
{
|
|
230
|
+
summary: {
|
|
231
|
+
totalFiles: results.length,
|
|
232
|
+
filesWithErrors: results.filter(r => r.errorCount > 0).length,
|
|
233
|
+
filesWithWarnings: results.filter(r => r.warningCount > 0).length,
|
|
234
|
+
totalErrors: results.reduce((sum, r) => sum + r.errorCount, 0),
|
|
235
|
+
totalWarnings: results.reduce((sum, r) => sum + r.warningCount, 0)
|
|
236
|
+
},
|
|
237
|
+
results: results.map(r => ({
|
|
238
|
+
file: r.file,
|
|
239
|
+
errorCount: r.errorCount,
|
|
240
|
+
warningCount: r.warningCount,
|
|
241
|
+
errors: r.errors,
|
|
242
|
+
warnings: r.warnings
|
|
243
|
+
}))
|
|
244
|
+
},
|
|
245
|
+
null,
|
|
246
|
+
2
|
|
247
|
+
);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Generate a lint report in Markdown format
|
|
252
|
+
*/
|
|
253
|
+
function generateMarkdownReport(results) {
|
|
254
|
+
let md = '# RIPP Lint Report\n\n';
|
|
255
|
+
|
|
256
|
+
const totalFiles = results.length;
|
|
257
|
+
const filesWithErrors = results.filter(r => r.errorCount > 0).length;
|
|
258
|
+
const filesWithWarnings = results.filter(r => r.warningCount > 0).length;
|
|
259
|
+
const totalErrors = results.reduce((sum, r) => sum + r.errorCount, 0);
|
|
260
|
+
const totalWarnings = results.reduce((sum, r) => sum + r.warningCount, 0);
|
|
261
|
+
|
|
262
|
+
md += '## Summary\n\n';
|
|
263
|
+
md += `- **Total Files**: ${totalFiles}\n`;
|
|
264
|
+
md += `- **Files with Errors**: ${filesWithErrors}\n`;
|
|
265
|
+
md += `- **Files with Warnings**: ${filesWithWarnings}\n`;
|
|
266
|
+
md += `- **Total Errors**: ${totalErrors}\n`;
|
|
267
|
+
md += `- **Total Warnings**: ${totalWarnings}\n\n`;
|
|
268
|
+
|
|
269
|
+
md += '---\n\n';
|
|
270
|
+
|
|
271
|
+
md += '## Files\n\n';
|
|
272
|
+
|
|
273
|
+
results.forEach(result => {
|
|
274
|
+
md += `### ${result.file}\n\n`;
|
|
275
|
+
|
|
276
|
+
if (result.errorCount === 0 && result.warningCount === 0) {
|
|
277
|
+
md += '✅ No issues found\n\n';
|
|
278
|
+
} else {
|
|
279
|
+
if (result.errors.length > 0) {
|
|
280
|
+
md += '#### Errors\n\n';
|
|
281
|
+
result.errors.forEach(error => {
|
|
282
|
+
md += `- **[${error.rule}]** ${error.message}\n`;
|
|
283
|
+
md += ` - Location: \`${error.location}\`\n`;
|
|
284
|
+
if (error.reference) {
|
|
285
|
+
md += ` - Reference: \`${error.reference}\`\n`;
|
|
286
|
+
}
|
|
287
|
+
});
|
|
288
|
+
md += '\n';
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
if (result.warnings.length > 0) {
|
|
292
|
+
md += '#### Warnings\n\n';
|
|
293
|
+
result.warnings.forEach(warning => {
|
|
294
|
+
md += `- **[${warning.rule}]** ${warning.message}\n`;
|
|
295
|
+
md += ` - Location: \`${warning.location}\`\n`;
|
|
296
|
+
});
|
|
297
|
+
md += '\n';
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
});
|
|
301
|
+
|
|
302
|
+
return md;
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
module.exports = {
|
|
306
|
+
lintPacket,
|
|
307
|
+
generateJsonReport,
|
|
308
|
+
generateMarkdownReport
|
|
309
|
+
};
|
package/lib/migrate.js
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Migrate RIPP directory structure from legacy to new layout
|
|
6
|
+
*
|
|
7
|
+
* Legacy structure:
|
|
8
|
+
* ripp/features/ → ripp/intent/
|
|
9
|
+
* ripp/handoffs/ → ripp/output/handoffs/
|
|
10
|
+
* ripp/packages/ → ripp/output/packages/
|
|
11
|
+
*
|
|
12
|
+
* New structure:
|
|
13
|
+
* ripp/intent/ (human-authored intent)
|
|
14
|
+
* ripp/output/handoffs/ (finalized packets)
|
|
15
|
+
* ripp/output/packages/ (generated outputs)
|
|
16
|
+
*/
|
|
17
|
+
function migrateDirectoryStructure({ dryRun = false } = {}) {
|
|
18
|
+
const cwd = process.cwd();
|
|
19
|
+
const results = {
|
|
20
|
+
moved: [],
|
|
21
|
+
created: [],
|
|
22
|
+
skipped: [],
|
|
23
|
+
warnings: []
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
console.log('🔍 Scanning for legacy RIPP directories...\n');
|
|
27
|
+
|
|
28
|
+
const rippDir = path.join(cwd, 'ripp');
|
|
29
|
+
if (!fs.existsSync(rippDir)) {
|
|
30
|
+
results.warnings.push('No ripp/ directory found. Run "ripp init" first.');
|
|
31
|
+
return results;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Check legacy directories
|
|
35
|
+
const legacyFeatures = path.join(rippDir, 'features');
|
|
36
|
+
const legacyHandoffs = path.join(rippDir, 'handoffs');
|
|
37
|
+
const legacyPackages = path.join(rippDir, 'packages');
|
|
38
|
+
|
|
39
|
+
// New directories
|
|
40
|
+
const newIntent = path.join(rippDir, 'intent');
|
|
41
|
+
const newOutput = path.join(rippDir, 'output');
|
|
42
|
+
const newHandoffs = path.join(newOutput, 'handoffs');
|
|
43
|
+
const newPackages = path.join(newOutput, 'packages');
|
|
44
|
+
|
|
45
|
+
let hasLegacy = false;
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Safely move directory with cross-device support
|
|
49
|
+
*/
|
|
50
|
+
function safeMove(source, dest, description) {
|
|
51
|
+
try {
|
|
52
|
+
// Try rename first (faster, atomic)
|
|
53
|
+
fs.renameSync(source, dest);
|
|
54
|
+
results.moved.push(`Moved: ${description}`);
|
|
55
|
+
} catch (error) {
|
|
56
|
+
// If rename fails (cross-device), use copy+remove
|
|
57
|
+
if (error.code === 'EXDEV') {
|
|
58
|
+
fs.cpSync(source, dest, { recursive: true });
|
|
59
|
+
fs.rmSync(source, { recursive: true });
|
|
60
|
+
results.moved.push(`Moved: ${description}`);
|
|
61
|
+
} else {
|
|
62
|
+
throw error;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// 1. Migrate features/ → intent/
|
|
68
|
+
if (fs.existsSync(legacyFeatures)) {
|
|
69
|
+
hasLegacy = true;
|
|
70
|
+
if (fs.existsSync(newIntent)) {
|
|
71
|
+
results.warnings.push('Both ripp/features/ and ripp/intent/ exist. Manual merge required.');
|
|
72
|
+
} else {
|
|
73
|
+
if (dryRun) {
|
|
74
|
+
results.moved.push('Would move: ripp/features/ → ripp/intent/');
|
|
75
|
+
} else {
|
|
76
|
+
safeMove(legacyFeatures, newIntent, 'ripp/features/ → ripp/intent/');
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// 2. Create output/ directory if needed
|
|
82
|
+
if (fs.existsSync(legacyHandoffs) || fs.existsSync(legacyPackages)) {
|
|
83
|
+
hasLegacy = true;
|
|
84
|
+
if (!fs.existsSync(newOutput)) {
|
|
85
|
+
if (dryRun) {
|
|
86
|
+
results.created.push('Would create: ripp/output/');
|
|
87
|
+
} else {
|
|
88
|
+
fs.mkdirSync(newOutput, { recursive: true });
|
|
89
|
+
results.created.push('Created: ripp/output/');
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// 3. Migrate handoffs/ → output/handoffs/
|
|
95
|
+
if (fs.existsSync(legacyHandoffs)) {
|
|
96
|
+
if (fs.existsSync(newHandoffs)) {
|
|
97
|
+
results.warnings.push(
|
|
98
|
+
'Both ripp/handoffs/ and ripp/output/handoffs/ exist. Manual merge required.'
|
|
99
|
+
);
|
|
100
|
+
} else {
|
|
101
|
+
if (dryRun) {
|
|
102
|
+
results.moved.push('Would move: ripp/handoffs/ → ripp/output/handoffs/');
|
|
103
|
+
} else {
|
|
104
|
+
safeMove(legacyHandoffs, newHandoffs, 'ripp/handoffs/ → ripp/output/handoffs/');
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// 4. Migrate packages/ → output/packages/
|
|
110
|
+
if (fs.existsSync(legacyPackages)) {
|
|
111
|
+
if (fs.existsSync(newPackages)) {
|
|
112
|
+
results.warnings.push(
|
|
113
|
+
'Both ripp/packages/ and ripp/output/packages/ exist. Manual merge required.'
|
|
114
|
+
);
|
|
115
|
+
} else {
|
|
116
|
+
if (dryRun) {
|
|
117
|
+
results.moved.push('Would move: ripp/packages/ → ripp/output/packages/');
|
|
118
|
+
} else {
|
|
119
|
+
safeMove(legacyPackages, newPackages, 'ripp/packages/ → ripp/output/packages/');
|
|
120
|
+
results.moved.push('Moved: ripp/packages/ → ripp/output/packages/');
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
if (!hasLegacy) {
|
|
126
|
+
results.skipped.push('No legacy directories found. Already using new structure!');
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return results;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* Detect if repository uses legacy directory structure
|
|
134
|
+
*/
|
|
135
|
+
function detectLegacyStructure(cwd = process.cwd()) {
|
|
136
|
+
const rippDir = path.join(cwd, 'ripp');
|
|
137
|
+
|
|
138
|
+
if (!fs.existsSync(rippDir)) {
|
|
139
|
+
return { hasLegacy: false, directories: [] };
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
const legacyDirs = [];
|
|
143
|
+
|
|
144
|
+
const legacyFeatures = path.join(rippDir, 'features');
|
|
145
|
+
const legacyHandoffs = path.join(rippDir, 'handoffs');
|
|
146
|
+
const legacyPackages = path.join(rippDir, 'packages');
|
|
147
|
+
|
|
148
|
+
if (fs.existsSync(legacyFeatures)) {
|
|
149
|
+
legacyDirs.push('ripp/features/');
|
|
150
|
+
}
|
|
151
|
+
if (fs.existsSync(legacyHandoffs)) {
|
|
152
|
+
legacyDirs.push('ripp/handoffs/');
|
|
153
|
+
}
|
|
154
|
+
if (fs.existsSync(legacyPackages)) {
|
|
155
|
+
legacyDirs.push('ripp/packages/');
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
return {
|
|
159
|
+
hasLegacy: legacyDirs.length > 0,
|
|
160
|
+
directories: legacyDirs
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* Get search paths supporting both legacy and new structures
|
|
166
|
+
* Returns an array of paths to search for RIPP packets
|
|
167
|
+
*/
|
|
168
|
+
function getSearchPaths(basePath, cwd = process.cwd()) {
|
|
169
|
+
const paths = [];
|
|
170
|
+
|
|
171
|
+
// Normalize base path - handle both with and without 'ripp/' prefix
|
|
172
|
+
let normalized = basePath;
|
|
173
|
+
if (normalized.startsWith('ripp/')) {
|
|
174
|
+
normalized = normalized.replace(/^ripp\//, '');
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Map of legacy → new paths
|
|
178
|
+
const pathMappings = {
|
|
179
|
+
features: ['intent', 'features'],
|
|
180
|
+
intent: ['intent', 'features'],
|
|
181
|
+
handoffs: ['output/handoffs', 'handoffs'],
|
|
182
|
+
'output/handoffs': ['output/handoffs', 'handoffs'],
|
|
183
|
+
packages: ['output/packages', 'packages'],
|
|
184
|
+
'output/packages': ['output/packages', 'packages']
|
|
185
|
+
};
|
|
186
|
+
|
|
187
|
+
const mappings = pathMappings[normalized] || [normalized];
|
|
188
|
+
|
|
189
|
+
for (const mapping of mappings) {
|
|
190
|
+
const fullPath = path.join(cwd, 'ripp', mapping);
|
|
191
|
+
if (fs.existsSync(fullPath)) {
|
|
192
|
+
paths.push(fullPath);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return paths;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
module.exports = {
|
|
200
|
+
migrateDirectoryStructure,
|
|
201
|
+
detectLegacyStructure,
|
|
202
|
+
getSearchPaths
|
|
203
|
+
};
|