happyskills 0.18.0 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/package.json +2 -1
- package/src/commands/diff.js +12 -6
- package/src/commands/publish.js +4 -1
- package/src/commands/pull.js +93 -26
- package/src/commands/status.js +12 -8
- package/src/commands/validate.js +4 -1
- package/src/merge/changelog_merge.js +117 -0
- package/src/merge/changelog_merge.test.js +92 -0
- package/src/merge/json_merge.js +152 -0
- package/src/merge/json_merge.test.js +148 -0
- package/src/merge/text_merge.js +58 -0
- package/src/merge/text_merge.test.js +100 -0
- package/src/validation/conflict_marker_rules.js +54 -0
- package/src/validation/conflict_marker_rules.test.js +90 -0
package/CHANGELOG.md
CHANGED
|
@@ -7,6 +7,22 @@ and this project adheres to [Semantic Versioning](https://semver.org/).
|
|
|
7
7
|
|
|
8
8
|
## [Unreleased]
|
|
9
9
|
|
|
10
|
+
## [0.19.0] - 2026-03-29
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
- Add three-way auto-merge to `pull` — when both local and remote have changes and no `--theirs`/`--ours` flag is set, `pull` now auto-merges using `node-diff3` for text files, structured JSON merge for `skill.json` (always valid JSON, no conflict markers), and section-aware merge for `CHANGELOG.md` (preserves remote history, prepends local sections)
|
|
14
|
+
- Add `conflict_files` tracking in lock file — `pull` writes an array of files with unresolved conflict markers to the lock entry; `status` shows `conflicts` state when present
|
|
15
|
+
- Add conflict marker validation (`validate_no_conflict_markers`) to `validate` and `publish` — scans all skill files for `<<<<<<< LOCAL` markers and blocks publishing with errors. `--force` does NOT bypass this check (it only bypasses divergence)
|
|
16
|
+
- Add `conflicts` status to `status` command for skills with unresolved merge conflicts
|
|
17
|
+
|
|
18
|
+
### Changed
|
|
19
|
+
- `pull` without `--theirs`/`--ours` no longer exits with an error on `both_modified` files — it attempts auto-merge and reports any remaining conflicts with marker locations
|
|
20
|
+
|
|
21
|
+
## [0.18.1] - 2026-03-29
|
|
22
|
+
|
|
23
|
+
### Fixed
|
|
24
|
+
- Fix `pull` and `diff` recording stale commit SHA from CloudFront-cached clone responses — now clones at the specific head commit from the compare endpoint, ensuring the lock file matches the remote after pull
|
|
25
|
+
|
|
10
26
|
## [0.18.0] - 2026-03-29
|
|
11
27
|
|
|
12
28
|
### Added
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "happyskills",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.19.0",
|
|
4
4
|
"description": "Package manager for AI agent skills",
|
|
5
5
|
"license": "SEE LICENSE IN LICENSE",
|
|
6
6
|
"author": "Nicolas Dao <nic@cloudlesslabs.com> (https://cloudlesslabs.com)",
|
|
@@ -43,6 +43,7 @@
|
|
|
43
43
|
"node": ">=22.0.0"
|
|
44
44
|
},
|
|
45
45
|
"dependencies": {
|
|
46
|
+
"node-diff3": "^3.2.0",
|
|
46
47
|
"puffy-core": "^1.3.1",
|
|
47
48
|
"semver": "^7.6.0"
|
|
48
49
|
},
|
package/src/commands/diff.js
CHANGED
|
@@ -136,15 +136,18 @@ const run = (args) => catch_errors('Diff failed', async () => {
|
|
|
136
136
|
}
|
|
137
137
|
|
|
138
138
|
if (mode === 'remote') {
|
|
139
|
-
// Base vs remote —
|
|
140
|
-
const [
|
|
139
|
+
// Base vs remote — get head commit via compare, clone at that commit
|
|
140
|
+
const [cmp_err, cmp_data] = await repos_api.compare(owner, repo, lock_entry.base_commit)
|
|
141
|
+
if (cmp_err) throw e('Compare failed', cmp_err)
|
|
142
|
+
|
|
143
|
+
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null, { commit: cmp_data.head_commit })
|
|
141
144
|
if (remote_err) throw e('Failed to fetch remote files', remote_err)
|
|
142
145
|
const remote_files = (remote_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
143
146
|
|
|
144
147
|
const classified = classify_changes(base_files, base_files, remote_files)
|
|
145
148
|
|
|
146
149
|
if (args.flags.json) {
|
|
147
|
-
const report = build_report(skill_name, lock_entry.version,
|
|
150
|
+
const report = build_report(skill_name, lock_entry.version, cmp_data.head_version, classified)
|
|
148
151
|
print_json({ data: { mode, report } })
|
|
149
152
|
} else {
|
|
150
153
|
print_file_table(classified)
|
|
@@ -152,16 +155,19 @@ const run = (args) => catch_errors('Diff failed', async () => {
|
|
|
152
155
|
return
|
|
153
156
|
}
|
|
154
157
|
|
|
155
|
-
// Full three-way diff
|
|
158
|
+
// Full three-way diff — get head commit via compare
|
|
159
|
+
const [cmp_err, cmp_data] = await repos_api.compare(owner, repo, lock_entry.base_commit)
|
|
160
|
+
if (cmp_err) throw e('Compare failed', cmp_err)
|
|
161
|
+
|
|
156
162
|
const [local_err, local_files] = await build_local_entries(skill_dir)
|
|
157
163
|
if (local_err) throw e('Failed to read local files', local_err)
|
|
158
164
|
|
|
159
|
-
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null)
|
|
165
|
+
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null, { commit: cmp_data.head_commit })
|
|
160
166
|
if (remote_err) throw e('Failed to fetch remote files', remote_err)
|
|
161
167
|
const remote_files = (remote_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
162
168
|
|
|
163
169
|
const classified = classify_changes(base_files, local_files, remote_files)
|
|
164
|
-
const report = build_report(skill_name, lock_entry.version,
|
|
170
|
+
const report = build_report(skill_name, lock_entry.version, cmp_data.head_version, classified)
|
|
165
171
|
|
|
166
172
|
if (args.flags.json) {
|
|
167
173
|
print_json({ data: { mode, report } })
|
package/src/commands/publish.js
CHANGED
|
@@ -16,6 +16,7 @@ const { hash_directory } = require('../lock/integrity')
|
|
|
16
16
|
const { validate_skill_md } = require('../validation/skill_md_rules')
|
|
17
17
|
const { validate_skill_json } = require('../validation/skill_json_rules')
|
|
18
18
|
const { validate_cross } = require('../validation/cross_rules')
|
|
19
|
+
const { validate_no_conflict_markers } = require('../validation/conflict_marker_rules')
|
|
19
20
|
const { create_spinner } = require('../ui/spinner')
|
|
20
21
|
const { print_help, print_success, print_error, print_warn, print_hint, print_json, code } = require('../ui/output')
|
|
21
22
|
const { exit_with_error, UsageError, CliError } = require('../utils/errors')
|
|
@@ -95,8 +96,10 @@ const run = (args) => catch_errors('Publish failed', async () => {
|
|
|
95
96
|
if (json_err) throw json_err
|
|
96
97
|
const [cross_err, cross_results] = await validate_cross(dir, md_data.frontmatter, json_data.manifest, md_data.content, skill_type)
|
|
97
98
|
if (cross_err) throw cross_err
|
|
99
|
+
const [marker_err, marker_results] = await validate_no_conflict_markers(dir)
|
|
100
|
+
if (marker_err) throw marker_err
|
|
98
101
|
|
|
99
|
-
const all_results = [...md_data.results, ...json_data.results, ...cross_results]
|
|
102
|
+
const all_results = [...md_data.results, ...json_data.results, ...cross_results, ...marker_results]
|
|
100
103
|
const validation_errors = all_results.filter(r => r.severity === 'error')
|
|
101
104
|
const validation_warnings = all_results.filter(r => r.severity === 'warning')
|
|
102
105
|
|
package/src/commands/pull.js
CHANGED
|
@@ -6,6 +6,9 @@ const repos_api = require('../api/repos')
|
|
|
6
6
|
const { detect_status } = require('../merge/detector')
|
|
7
7
|
const { classify_changes } = require('../merge/comparator')
|
|
8
8
|
const { build_report } = require('../merge/report')
|
|
9
|
+
const { three_way_merge } = require('../merge/text_merge')
|
|
10
|
+
const { merge_skill_json } = require('../merge/json_merge')
|
|
11
|
+
const { merge_changelog } = require('../merge/changelog_merge')
|
|
9
12
|
const { hash_blob } = require('../utils/git_hash')
|
|
10
13
|
const { read_lock, get_all_locked_skills } = require('../lock/reader')
|
|
11
14
|
const { write_lock, update_lock_skills } = require('../lock/writer')
|
|
@@ -142,7 +145,8 @@ const run = (args) => catch_errors('Pull failed', async () => {
|
|
|
142
145
|
// 5. No local mods → fast-forward
|
|
143
146
|
if (!det.local_modified || strategy === 'force') {
|
|
144
147
|
spinner.update('Fast-forwarding...')
|
|
145
|
-
|
|
148
|
+
// Clone at the specific head commit from compare (bypasses CDN cache)
|
|
149
|
+
const [clone_err, clone_data] = await repos_api.clone(owner, repo, null, { commit: cmp_data.head_commit })
|
|
146
150
|
if (clone_err) { spinner.fail('Clone failed'); throw clone_err[0] }
|
|
147
151
|
|
|
148
152
|
// Remove existing files and write fresh
|
|
@@ -159,12 +163,12 @@ const run = (args) => catch_errors('Pull failed', async () => {
|
|
|
159
163
|
const [wf_err] = await write_files_from_clone(clone_data, skill_dir)
|
|
160
164
|
if (wf_err) { spinner.fail('Failed to write files'); throw wf_err[0] }
|
|
161
165
|
|
|
162
|
-
// Update lock
|
|
166
|
+
// Update lock — use head_commit from compare (authoritative, not cached)
|
|
163
167
|
const [hash_err, new_integrity] = await hash_directory(skill_dir)
|
|
164
168
|
const updated_entry = {
|
|
165
169
|
...lock_entry,
|
|
166
|
-
commit:
|
|
167
|
-
base_commit:
|
|
170
|
+
commit: cmp_data.head_commit,
|
|
171
|
+
base_commit: cmp_data.head_commit,
|
|
168
172
|
integrity: new_integrity || lock_entry.integrity,
|
|
169
173
|
base_integrity: new_integrity || lock_entry.base_integrity,
|
|
170
174
|
version: cmp_data.head_version || lock_entry.version,
|
|
@@ -196,34 +200,75 @@ const run = (args) => catch_errors('Pull failed', async () => {
|
|
|
196
200
|
const [local_err, local_files] = await build_local_entries(skill_dir)
|
|
197
201
|
if (local_err) { spinner.fail('Failed to read local files'); throw local_err[0] }
|
|
198
202
|
|
|
199
|
-
// Get remote files (clone
|
|
200
|
-
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null)
|
|
203
|
+
// Get remote files (clone at head commit — bypasses CDN cache)
|
|
204
|
+
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null, { commit: cmp_data.head_commit })
|
|
201
205
|
if (remote_err) { spinner.fail('Failed to fetch remote files'); throw remote_err[0] }
|
|
202
206
|
const remote_files = (remote_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
203
207
|
|
|
204
208
|
const classified = classify_changes(base_files, local_files, remote_files)
|
|
205
209
|
const report = build_report(skill_name, lock_entry.version, cmp_data.head_version, classified)
|
|
206
210
|
|
|
207
|
-
//
|
|
211
|
+
// Apply changes
|
|
212
|
+
spinner.update('Applying changes...')
|
|
213
|
+
const base_file_map = new Map((base_clone.files || []).map(f => [f.path, f]))
|
|
214
|
+
const remote_file_map = new Map((remote_clone.files || []).map(f => [f.path, f]))
|
|
215
|
+
|
|
216
|
+
// Auto-merge both_modified files when no strategy is set
|
|
217
|
+
const conflict_files = []
|
|
218
|
+
const json_conflicts = []
|
|
208
219
|
if (classified.both_modified.length > 0 && !strategy) {
|
|
209
|
-
spinner.
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
220
|
+
spinner.update('Auto-merging...')
|
|
221
|
+
for (const entry of classified.both_modified) {
|
|
222
|
+
const report_file = report.files.find(f => f.path === entry.path)
|
|
223
|
+
|
|
224
|
+
// Delete-vs-modify — cannot auto-merge, keep what exists
|
|
225
|
+
if (entry.local_sha === null || entry.remote_sha === null) {
|
|
226
|
+
conflict_files.push(entry.path)
|
|
227
|
+
if (report_file) report_file.conflict_written = true
|
|
228
|
+
if (entry.local_sha === null && entry.remote_sha !== null) {
|
|
229
|
+
// Local deleted, remote modified — write remote version
|
|
230
|
+
const [af_err] = await apply_remote_file(skill_dir, owner, repo, entry)
|
|
231
|
+
if (af_err) { spinner.fail(`Failed to apply ${entry.path}`); throw af_err[0] }
|
|
232
|
+
}
|
|
233
|
+
// Local modified, remote deleted — keep local as-is
|
|
234
|
+
continue
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
const base_file = base_file_map.get(entry.path)
|
|
238
|
+
const remote_file = remote_file_map.get(entry.path)
|
|
239
|
+
const base_text = base_file ? Buffer.from(base_file.content, 'base64').toString('utf-8') : ''
|
|
240
|
+
const local_text = await fs.promises.readFile(path.join(skill_dir, entry.path), 'utf-8')
|
|
241
|
+
const remote_text = remote_file ? Buffer.from(remote_file.content, 'base64').toString('utf-8') : ''
|
|
242
|
+
|
|
243
|
+
if (entry.path === 'skill.json') {
|
|
244
|
+
const result = merge_skill_json(
|
|
245
|
+
JSON.parse(base_text || '{}'),
|
|
246
|
+
JSON.parse(local_text || '{}'),
|
|
247
|
+
JSON.parse(remote_text || '{}')
|
|
248
|
+
)
|
|
249
|
+
await fs.promises.writeFile(path.join(skill_dir, entry.path), JSON.stringify(result.merged, null, '\t') + '\n')
|
|
250
|
+
if (result.conflicts.length > 0) json_conflicts.push(...result.conflicts)
|
|
251
|
+
if (report_file) report_file.merge_result = { type: 'json', conflicts: result.conflicts }
|
|
252
|
+
} else if (entry.path.toLowerCase() === 'changelog.md') {
|
|
253
|
+
const result = merge_changelog(base_text, local_text, remote_text)
|
|
254
|
+
await fs.promises.writeFile(path.join(skill_dir, entry.path), result.merged)
|
|
255
|
+
if (result.has_conflicts) conflict_files.push(entry.path)
|
|
256
|
+
if (report_file) {
|
|
257
|
+
report_file.conflict_written = result.has_conflicts
|
|
258
|
+
report_file.merge_result = { type: 'changelog', has_conflicts: result.has_conflicts, used_fallback: result.used_fallback }
|
|
259
|
+
}
|
|
260
|
+
} else {
|
|
261
|
+
const result = three_way_merge(base_text, local_text, remote_text)
|
|
262
|
+
await fs.promises.writeFile(path.join(skill_dir, entry.path), result.merged)
|
|
263
|
+
if (result.has_conflicts) conflict_files.push(entry.path)
|
|
264
|
+
if (report_file) {
|
|
265
|
+
report_file.conflict_written = result.has_conflicts
|
|
266
|
+
report_file.merge_result = { type: 'text', conflict_count: result.conflict_count, conflict_regions: result.conflict_regions }
|
|
267
|
+
}
|
|
216
268
|
}
|
|
217
|
-
console.error('')
|
|
218
|
-
print_hint('Auto-merge not yet available. Use --theirs (take remote) or --ours (keep local) for all conflicting files, or manually merge and publish with --force.')
|
|
219
269
|
}
|
|
220
|
-
return process.exit(EXIT_CODES.ERROR)
|
|
221
270
|
}
|
|
222
271
|
|
|
223
|
-
// Apply changes
|
|
224
|
-
spinner.update('Applying changes...')
|
|
225
|
-
const remote_file_map = new Map((remote_clone.files || []).map(f => [f.path, f]))
|
|
226
|
-
|
|
227
272
|
// Remote-only modified/added → write remote version
|
|
228
273
|
for (const entry of [...classified.remote_only_modified, ...classified.remote_only_added]) {
|
|
229
274
|
const remote_file = remote_file_map.get(entry.path)
|
|
@@ -262,17 +307,22 @@ const run = (args) => catch_errors('Pull failed', async () => {
|
|
|
262
307
|
}
|
|
263
308
|
// strategy === 'ours' → keep local files as-is (no action needed)
|
|
264
309
|
|
|
265
|
-
// Update lock
|
|
310
|
+
// Update lock — use head_commit from compare (authoritative, not cached)
|
|
266
311
|
const [hash_err, new_integrity] = await hash_directory(skill_dir)
|
|
267
312
|
const updated_entry = {
|
|
268
313
|
...lock_entry,
|
|
269
|
-
commit:
|
|
270
|
-
base_commit:
|
|
314
|
+
commit: cmp_data.head_commit,
|
|
315
|
+
base_commit: cmp_data.head_commit,
|
|
271
316
|
integrity: new_integrity || lock_entry.integrity,
|
|
272
317
|
base_integrity: new_integrity || lock_entry.base_integrity,
|
|
273
318
|
version: cmp_data.head_version || lock_entry.version,
|
|
274
319
|
ref: remote_clone.ref || lock_entry.ref
|
|
275
320
|
}
|
|
321
|
+
if (conflict_files.length > 0) {
|
|
322
|
+
updated_entry.conflict_files = conflict_files
|
|
323
|
+
} else {
|
|
324
|
+
delete updated_entry.conflict_files
|
|
325
|
+
}
|
|
276
326
|
const merged_skills = update_lock_skills(lock_data, { [skill_name]: updated_entry })
|
|
277
327
|
const [wl_err] = await write_lock(lock_root(is_global, project_root), merged_skills)
|
|
278
328
|
if (wl_err) { spinner.fail('Failed to write lock file'); throw wl_err[0] }
|
|
@@ -280,15 +330,32 @@ const run = (args) => catch_errors('Pull failed', async () => {
|
|
|
280
330
|
spinner.stop()
|
|
281
331
|
|
|
282
332
|
if (args.flags.json) {
|
|
283
|
-
|
|
333
|
+
const status = conflict_files.length > 0 ? 'conflicts' : 'merged'
|
|
334
|
+
print_json({ data: { status, report, conflict_files, json_conflicts } })
|
|
284
335
|
} else {
|
|
285
336
|
print_success(`Pulled ${skill_name} → ${cmp_data.head_version || 'latest'}`)
|
|
286
337
|
if (report.summary.auto_merged > 0) {
|
|
287
338
|
print_info(`${report.summary.auto_merged} file(s) auto-applied (non-conflicting changes)`)
|
|
288
339
|
}
|
|
289
|
-
if (classified.both_modified.length > 0) {
|
|
340
|
+
if (classified.both_modified.length > 0 && strategy) {
|
|
290
341
|
print_info(`${classified.both_modified.length} conflict(s) resolved with --${strategy}`)
|
|
291
342
|
}
|
|
343
|
+
const auto_merged_count = classified.both_modified.length - conflict_files.length
|
|
344
|
+
if (auto_merged_count > 0 && !strategy) {
|
|
345
|
+
print_info(`${auto_merged_count} file(s) auto-merged`)
|
|
346
|
+
}
|
|
347
|
+
if (json_conflicts.length > 0) {
|
|
348
|
+
print_warn(`skill.json merge suggestions (review before publishing):`)
|
|
349
|
+
for (const c of json_conflicts) {
|
|
350
|
+
console.error(` - ${c.field}: ${JSON.stringify(c.suggestion)}`)
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
if (conflict_files.length > 0) {
|
|
354
|
+
print_warn(`${conflict_files.length} file(s) with conflict markers — resolve before publishing:`)
|
|
355
|
+
for (const f of conflict_files) {
|
|
356
|
+
console.error(` - ${f}`)
|
|
357
|
+
}
|
|
358
|
+
}
|
|
292
359
|
}
|
|
293
360
|
|
|
294
361
|
// 7. Dependency reconciliation
|
package/src/commands/status.js
CHANGED
|
@@ -25,7 +25,8 @@ Examples:
|
|
|
25
25
|
happyskills st acme/deploy-aws
|
|
26
26
|
happyskills status --json`
|
|
27
27
|
|
|
28
|
-
const classify = (local_modified, remote_updated) => {
|
|
28
|
+
const classify = (local_modified, remote_updated, has_conflicts) => {
|
|
29
|
+
if (has_conflicts) return 'conflicts'
|
|
29
30
|
if (local_modified && remote_updated) return 'diverged'
|
|
30
31
|
if (local_modified) return 'modified'
|
|
31
32
|
if (remote_updated) return 'outdated'
|
|
@@ -78,6 +79,7 @@ const run = (args) => catch_errors('Status failed', async () => {
|
|
|
78
79
|
const short_name = name.split('/')[1] || name
|
|
79
80
|
const dir = skill_install_dir(base_dir, short_name)
|
|
80
81
|
const [, det] = await detect_status(data, dir)
|
|
82
|
+
const has_conflicts = (data.conflict_files || []).length > 0
|
|
81
83
|
results.push({
|
|
82
84
|
skill: name,
|
|
83
85
|
base_version: data.version || null,
|
|
@@ -87,7 +89,8 @@ const run = (args) => catch_errors('Status failed', async () => {
|
|
|
87
89
|
remote_updated: false,
|
|
88
90
|
remote_version: null,
|
|
89
91
|
remote_commit: null,
|
|
90
|
-
|
|
92
|
+
conflict_files: data.conflict_files || [],
|
|
93
|
+
status: has_conflicts ? 'conflicts' : 'clean'
|
|
91
94
|
})
|
|
92
95
|
}
|
|
93
96
|
|
|
@@ -112,7 +115,7 @@ const run = (args) => catch_errors('Status failed', async () => {
|
|
|
112
115
|
// Classify each result
|
|
113
116
|
for (const r of results) {
|
|
114
117
|
if (r.status !== 'not_found') {
|
|
115
|
-
r.status = classify(r.local_modified, r.remote_updated)
|
|
118
|
+
r.status = classify(r.local_modified, r.remote_updated, r.conflict_files.length > 0)
|
|
116
119
|
}
|
|
117
120
|
}
|
|
118
121
|
|
|
@@ -131,11 +134,12 @@ const run = (args) => catch_errors('Status failed', async () => {
|
|
|
131
134
|
skill: r.skill,
|
|
132
135
|
base: r.base_version || '?',
|
|
133
136
|
remote: r.remote_version || '?',
|
|
134
|
-
status: r.status === '
|
|
135
|
-
: r.status === '
|
|
136
|
-
: r.status === '
|
|
137
|
-
: r.status === '
|
|
138
|
-
: '
|
|
137
|
+
status: r.status === 'conflicts' ? 'conflicts (unresolved merge conflicts)'
|
|
138
|
+
: r.status === 'diverged' ? 'diverged (local + remote changes)'
|
|
139
|
+
: r.status === 'modified' ? 'modified (local changes)'
|
|
140
|
+
: r.status === 'outdated' ? 'outdated (remote changes)'
|
|
141
|
+
: r.status === 'not_found' ? 'not found'
|
|
142
|
+
: 'clean'
|
|
139
143
|
}))
|
|
140
144
|
|
|
141
145
|
const w_skill = Math.max(col_skill.length, ...rows.map(r => r.skill.length))
|
package/src/commands/validate.js
CHANGED
|
@@ -3,6 +3,7 @@ const { error: { catch_errors } } = require('puffy-core')
|
|
|
3
3
|
const { validate_skill_md } = require('../validation/skill_md_rules')
|
|
4
4
|
const { validate_skill_json } = require('../validation/skill_json_rules')
|
|
5
5
|
const { validate_cross } = require('../validation/cross_rules')
|
|
6
|
+
const { validate_no_conflict_markers } = require('../validation/conflict_marker_rules')
|
|
6
7
|
const { file_exists, read_json } = require('../utils/fs')
|
|
7
8
|
const { skills_dir, find_project_root } = require('../config/paths')
|
|
8
9
|
const { print_help, print_json } = require('../ui/output')
|
|
@@ -144,8 +145,10 @@ const run = (args) => catch_errors('Validate failed', async () => {
|
|
|
144
145
|
skill_type
|
|
145
146
|
)
|
|
146
147
|
if (cross_err) throw cross_err
|
|
148
|
+
const [marker_err, marker_results] = await validate_no_conflict_markers(skill_dir)
|
|
149
|
+
if (marker_err) throw marker_err
|
|
147
150
|
|
|
148
|
-
const all_results = [...md_data.results, ...json_data.results, ...cross_results]
|
|
151
|
+
const all_results = [...md_data.results, ...json_data.results, ...cross_results, ...marker_results]
|
|
149
152
|
const type_label = is_kit ? ' [kit]' : ''
|
|
150
153
|
|
|
151
154
|
if (args.flags.json) {
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Three-way merge for CHANGELOG.md files.
|
|
3
|
+
*
|
|
4
|
+
* Strategy:
|
|
5
|
+
* - Parse into version sections (split on ## headings)
|
|
6
|
+
* - Take all remote sections (published history is authoritative)
|
|
7
|
+
* - Prepend local unreleased/new section content
|
|
8
|
+
* - Fall back to text_merge if structure is too ambiguous
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const { three_way_merge } = require('./text_merge')
|
|
12
|
+
|
|
13
|
+
const VERSION_HEADING_RE = /^##\s+\[/m
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Parse a changelog into header (preamble before first version section) and version sections.
|
|
17
|
+
* Each section starts with a `## [` heading line.
|
|
18
|
+
*
|
|
19
|
+
* @param {string} text
|
|
20
|
+
* @returns {{ header: string, sections: string[] } | null} null if structure is ambiguous
|
|
21
|
+
*/
|
|
22
|
+
const parse_sections = (text) => {
|
|
23
|
+
if (!text) return null
|
|
24
|
+
|
|
25
|
+
const lines = text.split('\n')
|
|
26
|
+
let header_end = -1
|
|
27
|
+
const section_starts = []
|
|
28
|
+
|
|
29
|
+
for (let i = 0; i < lines.length; i++) {
|
|
30
|
+
if (VERSION_HEADING_RE.test(lines[i])) {
|
|
31
|
+
if (header_end === -1) header_end = i
|
|
32
|
+
section_starts.push(i)
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Ambiguous — no version headings found
|
|
37
|
+
if (section_starts.length === 0) return null
|
|
38
|
+
|
|
39
|
+
const header = lines.slice(0, header_end).join('\n')
|
|
40
|
+
const sections = []
|
|
41
|
+
|
|
42
|
+
for (let i = 0; i < section_starts.length; i++) {
|
|
43
|
+
const start = section_starts[i]
|
|
44
|
+
const end = i + 1 < section_starts.length ? section_starts[i + 1] : lines.length
|
|
45
|
+
sections.push(lines.slice(start, end).join('\n'))
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return { header, sections }
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Extract the version string from a section heading.
|
|
53
|
+
* e.g. "## [1.5.0] - 2026-03-29" → "1.5.0"
|
|
54
|
+
* e.g. "## [Unreleased]" → "Unreleased"
|
|
55
|
+
*/
|
|
56
|
+
const extract_version = (section) => {
|
|
57
|
+
const match = section.match(/^##\s+\[([^\]]+)\]/)
|
|
58
|
+
return match ? match[1] : null
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* @param {string} base_text
|
|
63
|
+
* @param {string} local_text
|
|
64
|
+
* @param {string} remote_text
|
|
65
|
+
* @returns {{ merged: string, has_conflicts: boolean, conflict_count: number, conflict_regions: Array, used_fallback: boolean }}
|
|
66
|
+
*/
|
|
67
|
+
const merge_changelog = (base_text, local_text, remote_text) => {
|
|
68
|
+
const base_parsed = parse_sections(base_text)
|
|
69
|
+
const local_parsed = parse_sections(local_text)
|
|
70
|
+
const remote_parsed = parse_sections(remote_text)
|
|
71
|
+
|
|
72
|
+
// Fall back to text merge if any side is too ambiguous to parse
|
|
73
|
+
if (!base_parsed || !local_parsed || !remote_parsed) {
|
|
74
|
+
const result = three_way_merge(base_text, local_text, remote_text)
|
|
75
|
+
return { ...result, used_fallback: true }
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Collect remote version strings (published history)
|
|
79
|
+
const remote_versions = new Set(remote_parsed.sections.map(extract_version).filter(Boolean))
|
|
80
|
+
|
|
81
|
+
// Find local-only sections: sections in local that are NOT in base and NOT in remote
|
|
82
|
+
const base_versions = new Set(base_parsed.sections.map(extract_version).filter(Boolean))
|
|
83
|
+
const local_new_sections = local_parsed.sections.filter(s => {
|
|
84
|
+
const v = extract_version(s)
|
|
85
|
+
if (!v) return false
|
|
86
|
+
if (v === 'Unreleased') return true
|
|
87
|
+
return !base_versions.has(v) && !remote_versions.has(v)
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
// Build merged changelog: remote header + local new sections + all remote sections
|
|
91
|
+
const parts = [remote_parsed.header]
|
|
92
|
+
|
|
93
|
+
// Add local new sections (unreleased or new version entries)
|
|
94
|
+
for (const section of local_new_sections) {
|
|
95
|
+
parts.push(section)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Add all remote sections (authoritative published history)
|
|
99
|
+
for (const section of remote_parsed.sections) {
|
|
100
|
+
const v = extract_version(section)
|
|
101
|
+
// Skip duplicate unreleased if we already included it from local
|
|
102
|
+
if (v === 'Unreleased' && local_new_sections.some(s => extract_version(s) === 'Unreleased')) {
|
|
103
|
+
continue
|
|
104
|
+
}
|
|
105
|
+
parts.push(section)
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
return {
|
|
109
|
+
merged: parts.join('\n'),
|
|
110
|
+
has_conflicts: false,
|
|
111
|
+
conflict_count: 0,
|
|
112
|
+
conflict_regions: [],
|
|
113
|
+
used_fallback: false
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
module.exports = { merge_changelog, parse_sections, extract_version }
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert/strict')
|
|
3
|
+
const { merge_changelog, parse_sections, extract_version } = require('./changelog_merge')
|
|
4
|
+
|
|
5
|
+
describe('parse_sections', () => {
|
|
6
|
+
it('parses header and version sections', () => {
|
|
7
|
+
const text = '# Changelog\n\nPreamble\n\n## [1.0.0] - 2026-01-01\n\nFirst release\n\n## [0.9.0] - 2025-12-01\n\nBeta'
|
|
8
|
+
const r = parse_sections(text)
|
|
9
|
+
assert.strictEqual(r.sections.length, 2)
|
|
10
|
+
assert.ok(r.header.includes('Preamble'))
|
|
11
|
+
assert.ok(r.sections[0].includes('[1.0.0]'))
|
|
12
|
+
assert.ok(r.sections[1].includes('[0.9.0]'))
|
|
13
|
+
})
|
|
14
|
+
|
|
15
|
+
it('returns null for text without version headings', () => {
|
|
16
|
+
assert.strictEqual(parse_sections('just some text'), null)
|
|
17
|
+
assert.strictEqual(parse_sections(null), null)
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
it('handles Unreleased section', () => {
|
|
21
|
+
const text = '# Changelog\n\n## [Unreleased]\n\nWIP\n\n## [1.0.0]\n\nDone'
|
|
22
|
+
const r = parse_sections(text)
|
|
23
|
+
assert.strictEqual(r.sections.length, 2)
|
|
24
|
+
assert.ok(r.sections[0].includes('Unreleased'))
|
|
25
|
+
})
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
describe('extract_version', () => {
|
|
29
|
+
it('extracts version from heading', () => {
|
|
30
|
+
assert.strictEqual(extract_version('## [1.5.0] - 2026-03-29'), '1.5.0')
|
|
31
|
+
assert.strictEqual(extract_version('## [Unreleased]'), 'Unreleased')
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
it('returns null for non-heading text', () => {
|
|
35
|
+
assert.strictEqual(extract_version('some text'), null)
|
|
36
|
+
})
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
describe('merge_changelog', () => {
|
|
40
|
+
const header = '# Changelog\n\nAll notable changes.\n'
|
|
41
|
+
|
|
42
|
+
it('preserves remote history and prepends local new sections', () => {
|
|
43
|
+
const base = `${header}\n## [1.0.0]\n\nBase release`
|
|
44
|
+
const local = `${header}\n## [Unreleased]\n\nLocal work\n\n## [1.0.0]\n\nBase release`
|
|
45
|
+
const remote = `${header}\n## [1.1.0]\n\nRemote release\n\n## [1.0.0]\n\nBase release`
|
|
46
|
+
const r = merge_changelog(base, local, remote)
|
|
47
|
+
assert.strictEqual(r.has_conflicts, false)
|
|
48
|
+
assert.strictEqual(r.used_fallback, false)
|
|
49
|
+
// Local unreleased should appear before remote versions
|
|
50
|
+
const unreleased_pos = r.merged.indexOf('[Unreleased]')
|
|
51
|
+
const v11_pos = r.merged.indexOf('[1.1.0]')
|
|
52
|
+
const v10_pos = r.merged.indexOf('[1.0.0]')
|
|
53
|
+
assert.ok(unreleased_pos < v11_pos, 'Unreleased before 1.1.0')
|
|
54
|
+
assert.ok(v11_pos < v10_pos, '1.1.0 before 1.0.0')
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
it('avoids duplicate Unreleased sections', () => {
|
|
58
|
+
const base = `${header}\n## [1.0.0]\n\nRelease`
|
|
59
|
+
const local = `${header}\n## [Unreleased]\n\nLocal\n\n## [1.0.0]\n\nRelease`
|
|
60
|
+
const remote = `${header}\n## [Unreleased]\n\nRemote\n\n## [1.0.0]\n\nRelease`
|
|
61
|
+
const r = merge_changelog(base, local, remote)
|
|
62
|
+
const count = (r.merged.match(/\[Unreleased\]/g) || []).length
|
|
63
|
+
assert.strictEqual(count, 1, 'Should have exactly one Unreleased section')
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
it('falls back to text_merge for ambiguous structure', () => {
|
|
67
|
+
const base = 'just text'
|
|
68
|
+
const local = 'local text'
|
|
69
|
+
const remote = 'remote text'
|
|
70
|
+
const r = merge_changelog(base, local, remote)
|
|
71
|
+
assert.strictEqual(r.used_fallback, true)
|
|
72
|
+
})
|
|
73
|
+
|
|
74
|
+
it('handles no local changes (remote-only update)', () => {
|
|
75
|
+
const base = `${header}\n## [1.0.0]\n\nRelease`
|
|
76
|
+
const local = base
|
|
77
|
+
const remote = `${header}\n## [1.1.0]\n\nNew\n\n## [1.0.0]\n\nRelease`
|
|
78
|
+
const r = merge_changelog(base, local, remote)
|
|
79
|
+
assert.strictEqual(r.has_conflicts, false)
|
|
80
|
+
assert.ok(r.merged.includes('[1.1.0]'))
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it('preserves all remote sections as authoritative', () => {
|
|
84
|
+
const base = `${header}\n## [1.0.0]\n\nOriginal`
|
|
85
|
+
const local = `${header}\n## [1.0.0]\n\nOriginal`
|
|
86
|
+
const remote = `${header}\n## [1.2.0]\n\nTwo\n\n## [1.1.0]\n\nOne\n\n## [1.0.0]\n\nOriginal`
|
|
87
|
+
const r = merge_changelog(base, local, remote)
|
|
88
|
+
assert.ok(r.merged.includes('[1.2.0]'))
|
|
89
|
+
assert.ok(r.merged.includes('[1.1.0]'))
|
|
90
|
+
assert.ok(r.merged.includes('[1.0.0]'))
|
|
91
|
+
})
|
|
92
|
+
})
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Structured three-way merge for skill.json files.
|
|
3
|
+
*
|
|
4
|
+
* Always produces valid JSON — no conflict markers.
|
|
5
|
+
* Conflicts are reported structurally so the CLI can display them
|
|
6
|
+
* and the user can resolve before publishing.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const semver = require('semver')
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @param {object} base - Base skill.json (from base_commit)
|
|
13
|
+
* @param {object} local - Local skill.json (from disk)
|
|
14
|
+
* @param {object} remote - Remote skill.json (from head_commit)
|
|
15
|
+
* @returns {{ merged: object, conflicts: Array<{ field: string, base_value: *, local_value: *, remote_value: *, suggestion: * }> }}
|
|
16
|
+
*/
|
|
17
|
+
const merge_skill_json = (base, local, remote) => {
|
|
18
|
+
const merged = {}
|
|
19
|
+
const conflicts = []
|
|
20
|
+
|
|
21
|
+
const all_keys = new Set([
|
|
22
|
+
...Object.keys(base || {}),
|
|
23
|
+
...Object.keys(local || {}),
|
|
24
|
+
...Object.keys(remote || {})
|
|
25
|
+
])
|
|
26
|
+
|
|
27
|
+
for (const key of all_keys) {
|
|
28
|
+
const b = base?.[key]
|
|
29
|
+
const l = local?.[key]
|
|
30
|
+
const r = remote?.[key]
|
|
31
|
+
|
|
32
|
+
if (key === 'version') {
|
|
33
|
+
merge_version(b, l, r, merged, conflicts)
|
|
34
|
+
} else if (key === 'dependencies') {
|
|
35
|
+
merge_dependencies(b, l, r, merged, conflicts)
|
|
36
|
+
} else {
|
|
37
|
+
merge_scalar(key, b, l, r, merged, conflicts)
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return { merged, conflicts }
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const merge_version = (base_v, local_v, remote_v, merged, conflicts) => {
|
|
45
|
+
const local_changed = local_v !== base_v
|
|
46
|
+
const remote_changed = remote_v !== base_v
|
|
47
|
+
|
|
48
|
+
if (!local_changed && !remote_changed) {
|
|
49
|
+
merged.version = base_v
|
|
50
|
+
} else if (local_changed && !remote_changed) {
|
|
51
|
+
merged.version = local_v
|
|
52
|
+
} else if (!local_changed && remote_changed) {
|
|
53
|
+
merged.version = remote_v
|
|
54
|
+
} else if (local_v === remote_v) {
|
|
55
|
+
// Both changed to the same value — no conflict
|
|
56
|
+
merged.version = local_v
|
|
57
|
+
} else {
|
|
58
|
+
// Both changed differently — suggest next patch after the higher version
|
|
59
|
+
const higher = semver.gt(remote_v || '0.0.0', local_v || '0.0.0') ? remote_v : local_v
|
|
60
|
+
const suggestion = semver.inc(higher, 'patch')
|
|
61
|
+
merged.version = suggestion
|
|
62
|
+
conflicts.push({
|
|
63
|
+
field: 'version',
|
|
64
|
+
base_value: base_v,
|
|
65
|
+
local_value: local_v,
|
|
66
|
+
remote_value: remote_v,
|
|
67
|
+
suggestion
|
|
68
|
+
})
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const merge_dependencies = (base_deps, local_deps, remote_deps, merged, conflicts) => {
|
|
73
|
+
const b = base_deps || {}
|
|
74
|
+
const l = local_deps || {}
|
|
75
|
+
const r = remote_deps || {}
|
|
76
|
+
const all_dep_keys = new Set([...Object.keys(b), ...Object.keys(l), ...Object.keys(r)])
|
|
77
|
+
const result = {}
|
|
78
|
+
|
|
79
|
+
for (const dep of all_dep_keys) {
|
|
80
|
+
const bv = b[dep]
|
|
81
|
+
const lv = l[dep]
|
|
82
|
+
const rv = r[dep]
|
|
83
|
+
const local_changed = lv !== bv
|
|
84
|
+
const remote_changed = rv !== bv
|
|
85
|
+
|
|
86
|
+
if (!local_changed && !remote_changed) {
|
|
87
|
+
if (bv !== undefined) result[dep] = bv
|
|
88
|
+
} else if (local_changed && !remote_changed) {
|
|
89
|
+
if (lv !== undefined) result[dep] = lv // local added or modified
|
|
90
|
+
// lv undefined = local removed
|
|
91
|
+
} else if (!local_changed && remote_changed) {
|
|
92
|
+
if (rv !== undefined) result[dep] = rv // remote added or modified
|
|
93
|
+
// rv undefined = remote removed
|
|
94
|
+
} else {
|
|
95
|
+
// Both changed
|
|
96
|
+
if (lv === rv) {
|
|
97
|
+
// Same change — no conflict
|
|
98
|
+
if (lv !== undefined) result[dep] = lv
|
|
99
|
+
} else {
|
|
100
|
+
// Conflict — suggest remote constraint (published version wins)
|
|
101
|
+
const suggestion = rv !== undefined ? rv : lv
|
|
102
|
+
result[dep] = suggestion
|
|
103
|
+
conflicts.push({
|
|
104
|
+
field: `dependencies.${dep}`,
|
|
105
|
+
base_value: bv || null,
|
|
106
|
+
local_value: lv || null,
|
|
107
|
+
remote_value: rv || null,
|
|
108
|
+
suggestion
|
|
109
|
+
})
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
if (Object.keys(result).length > 0) {
|
|
115
|
+
merged.dependencies = result
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
const merge_scalar = (key, base_val, local_val, remote_val, merged, conflicts) => {
|
|
120
|
+
const b_json = JSON.stringify(base_val)
|
|
121
|
+
const l_json = JSON.stringify(local_val)
|
|
122
|
+
const r_json = JSON.stringify(remote_val)
|
|
123
|
+
|
|
124
|
+
const local_changed = l_json !== b_json
|
|
125
|
+
const remote_changed = r_json !== b_json
|
|
126
|
+
|
|
127
|
+
if (!local_changed && !remote_changed) {
|
|
128
|
+
if (base_val !== undefined) merged[key] = base_val
|
|
129
|
+
} else if (local_changed && !remote_changed) {
|
|
130
|
+
if (local_val !== undefined) merged[key] = local_val
|
|
131
|
+
} else if (!local_changed && remote_changed) {
|
|
132
|
+
if (remote_val !== undefined) merged[key] = remote_val
|
|
133
|
+
} else {
|
|
134
|
+
// Both changed
|
|
135
|
+
if (l_json === r_json) {
|
|
136
|
+
if (local_val !== undefined) merged[key] = local_val
|
|
137
|
+
} else {
|
|
138
|
+
// Conflict — suggest remote value
|
|
139
|
+
const suggestion = remote_val !== undefined ? remote_val : local_val
|
|
140
|
+
if (suggestion !== undefined) merged[key] = suggestion
|
|
141
|
+
conflicts.push({
|
|
142
|
+
field: key,
|
|
143
|
+
base_value: base_val !== undefined ? base_val : null,
|
|
144
|
+
local_value: local_val !== undefined ? local_val : null,
|
|
145
|
+
remote_value: remote_val !== undefined ? remote_val : null,
|
|
146
|
+
suggestion: suggestion !== undefined ? suggestion : null
|
|
147
|
+
})
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
module.exports = { merge_skill_json }
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert/strict')
|
|
3
|
+
const { merge_skill_json } = require('./json_merge')
|
|
4
|
+
|
|
5
|
+
describe('merge_skill_json', () => {
|
|
6
|
+
it('returns base unchanged when no changes', () => {
|
|
7
|
+
const base = { name: 'acme/deploy', version: '1.0.0' }
|
|
8
|
+
const r = merge_skill_json(base, { ...base }, { ...base })
|
|
9
|
+
assert.deepStrictEqual(r.merged, base)
|
|
10
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
11
|
+
})
|
|
12
|
+
|
|
13
|
+
it('takes local-only scalar change', () => {
|
|
14
|
+
const base = { name: 'acme/deploy', version: '1.0.0', description: 'old' }
|
|
15
|
+
const local = { ...base, description: 'new local' }
|
|
16
|
+
const r = merge_skill_json(base, local, { ...base })
|
|
17
|
+
assert.strictEqual(r.merged.description, 'new local')
|
|
18
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
it('takes remote-only scalar change', () => {
|
|
22
|
+
const base = { name: 'acme/deploy', version: '1.0.0', description: 'old' }
|
|
23
|
+
const remote = { ...base, description: 'new remote' }
|
|
24
|
+
const r = merge_skill_json(base, { ...base }, remote)
|
|
25
|
+
assert.strictEqual(r.merged.description, 'new remote')
|
|
26
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
it('reports conflict for both-changed scalar', () => {
|
|
30
|
+
const base = { name: 'acme/deploy', description: 'old' }
|
|
31
|
+
const local = { name: 'acme/deploy', description: 'local desc' }
|
|
32
|
+
const remote = { name: 'acme/deploy', description: 'remote desc' }
|
|
33
|
+
const r = merge_skill_json(base, local, remote)
|
|
34
|
+
assert.strictEqual(r.conflicts.length, 1)
|
|
35
|
+
assert.strictEqual(r.conflicts[0].field, 'description')
|
|
36
|
+
assert.strictEqual(r.conflicts[0].suggestion, 'remote desc')
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
it('reports version conflict with suggested next patch', () => {
|
|
40
|
+
const base = { version: '1.5.0' }
|
|
41
|
+
const local = { version: '1.6.0' }
|
|
42
|
+
const remote = { version: '1.8.0' }
|
|
43
|
+
const r = merge_skill_json(base, local, remote)
|
|
44
|
+
assert.strictEqual(r.conflicts.length, 1)
|
|
45
|
+
assert.strictEqual(r.conflicts[0].field, 'version')
|
|
46
|
+
assert.strictEqual(r.merged.version, '1.8.1')
|
|
47
|
+
assert.strictEqual(r.conflicts[0].suggestion, '1.8.1')
|
|
48
|
+
})
|
|
49
|
+
|
|
50
|
+
it('takes local-only version change without conflict', () => {
|
|
51
|
+
const base = { version: '1.0.0' }
|
|
52
|
+
const local = { version: '1.1.0' }
|
|
53
|
+
const r = merge_skill_json(base, local, { ...base })
|
|
54
|
+
assert.strictEqual(r.merged.version, '1.1.0')
|
|
55
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
it('takes remote-only version change without conflict', () => {
|
|
59
|
+
const base = { version: '1.0.0' }
|
|
60
|
+
const remote = { version: '1.2.0' }
|
|
61
|
+
const r = merge_skill_json(base, { ...base }, remote)
|
|
62
|
+
assert.strictEqual(r.merged.version, '1.2.0')
|
|
63
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
it('merges dependencies — local-only add', () => {
|
|
67
|
+
const base = { dependencies: { 'acme/utils': '^2.0.0' } }
|
|
68
|
+
const local = { dependencies: { 'acme/utils': '^2.0.0', 'acme/logger': '^1.0.0' } }
|
|
69
|
+
const r = merge_skill_json(base, local, { ...base })
|
|
70
|
+
assert.strictEqual(r.merged.dependencies['acme/logger'], '^1.0.0')
|
|
71
|
+
assert.strictEqual(r.merged.dependencies['acme/utils'], '^2.0.0')
|
|
72
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
it('merges dependencies — remote-only add', () => {
|
|
76
|
+
const base = { dependencies: { 'acme/utils': '^2.0.0' } }
|
|
77
|
+
const remote = { dependencies: { 'acme/utils': '^2.0.0', 'acme/auth': '^1.0.0' } }
|
|
78
|
+
const r = merge_skill_json(base, { ...base }, remote)
|
|
79
|
+
assert.strictEqual(r.merged.dependencies['acme/auth'], '^1.0.0')
|
|
80
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it('merges dependencies — both add different deps (clean)', () => {
|
|
84
|
+
const base = { dependencies: {} }
|
|
85
|
+
const local = { dependencies: { 'acme/logger': '^1.0.0' } }
|
|
86
|
+
const remote = { dependencies: { 'acme/auth': '^1.0.0' } }
|
|
87
|
+
const r = merge_skill_json(base, local, remote)
|
|
88
|
+
assert.strictEqual(r.merged.dependencies['acme/logger'], '^1.0.0')
|
|
89
|
+
assert.strictEqual(r.merged.dependencies['acme/auth'], '^1.0.0')
|
|
90
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
91
|
+
})
|
|
92
|
+
|
|
93
|
+
it('reports conflict for same dep with different constraints', () => {
|
|
94
|
+
const base = { dependencies: { 'acme/utils': '^2.0.0' } }
|
|
95
|
+
const local = { dependencies: { 'acme/utils': '^2.5.0' } }
|
|
96
|
+
const remote = { dependencies: { 'acme/utils': '^3.0.0' } }
|
|
97
|
+
const r = merge_skill_json(base, local, remote)
|
|
98
|
+
assert.strictEqual(r.conflicts.length, 1)
|
|
99
|
+
assert.strictEqual(r.conflicts[0].field, 'dependencies.acme/utils')
|
|
100
|
+
assert.strictEqual(r.conflicts[0].suggestion, '^3.0.0')
|
|
101
|
+
assert.strictEqual(r.merged.dependencies['acme/utils'], '^3.0.0')
|
|
102
|
+
})
|
|
103
|
+
|
|
104
|
+
it('handles local dep removal', () => {
|
|
105
|
+
const base = { dependencies: { 'acme/utils': '^2.0.0', 'acme/logger': '^1.0.0' } }
|
|
106
|
+
const local = { dependencies: { 'acme/utils': '^2.0.0' } }
|
|
107
|
+
const r = merge_skill_json(base, local, { ...base })
|
|
108
|
+
assert.strictEqual(r.merged.dependencies['acme/logger'], undefined)
|
|
109
|
+
assert.strictEqual(r.merged.dependencies['acme/utils'], '^2.0.0')
|
|
110
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
111
|
+
})
|
|
112
|
+
|
|
113
|
+
it('handles null base (no previous skill.json)', () => {
|
|
114
|
+
const local = { name: 'acme/deploy', version: '1.0.0' }
|
|
115
|
+
const remote = { name: 'acme/deploy', version: '1.0.0' }
|
|
116
|
+
const r = merge_skill_json(null, local, remote)
|
|
117
|
+
assert.strictEqual(r.merged.name, 'acme/deploy')
|
|
118
|
+
assert.strictEqual(r.conflicts.length, 0)
|
|
119
|
+
})
|
|
120
|
+
|
|
121
|
+
it('always produces valid JSON (no conflict markers)', () => {
|
|
122
|
+
const base = { version: '1.0.0', name: 'a' }
|
|
123
|
+
const local = { version: '2.0.0', name: 'b' }
|
|
124
|
+
const remote = { version: '3.0.0', name: 'c' }
|
|
125
|
+
const r = merge_skill_json(base, local, remote)
|
|
126
|
+
const json_str = JSON.stringify(r.merged)
|
|
127
|
+
assert.doesNotThrow(() => JSON.parse(json_str))
|
|
128
|
+
assert.ok(!json_str.includes('<<<<<<<'))
|
|
129
|
+
})
|
|
130
|
+
|
|
131
|
+
it('handles the full spec example correctly', () => {
|
|
132
|
+
const base = { version: '1.5.0', dependencies: { 'acme/utils': '^2.0.0' } }
|
|
133
|
+
const local = { version: '1.6.0', dependencies: { 'acme/utils': '^2.0.0', 'acme/logger': '^1.0.0' } }
|
|
134
|
+
const remote = { version: '1.8.0', dependencies: { 'acme/utils': '^3.0.0', 'acme/auth': '^1.0.0' } }
|
|
135
|
+
const r = merge_skill_json(base, local, remote)
|
|
136
|
+
// Version: both changed → suggestion = 1.8.1
|
|
137
|
+
assert.strictEqual(r.merged.version, '1.8.1')
|
|
138
|
+
// Utils: remote-only change → ^3.0.0
|
|
139
|
+
assert.strictEqual(r.merged.dependencies['acme/utils'], '^3.0.0')
|
|
140
|
+
// Logger: local-only add
|
|
141
|
+
assert.strictEqual(r.merged.dependencies['acme/logger'], '^1.0.0')
|
|
142
|
+
// Auth: remote-only add
|
|
143
|
+
assert.strictEqual(r.merged.dependencies['acme/auth'], '^1.0.0')
|
|
144
|
+
// Only version should be a conflict
|
|
145
|
+
const version_conflicts = r.conflicts.filter(c => c.field === 'version')
|
|
146
|
+
assert.strictEqual(version_conflicts.length, 1)
|
|
147
|
+
})
|
|
148
|
+
})
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Three-way text merge using node-diff3.
|
|
3
|
+
*
|
|
4
|
+
* Wraps the library's output into a structured format with conflict regions.
|
|
5
|
+
* Conflict markers use: <<<<<<< LOCAL / ======= / >>>>>>> REMOTE
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const { merge, diff3MergeRegions } = require('node-diff3')
|
|
9
|
+
|
|
10
|
+
const LABELS = { a: 'LOCAL', b: 'REMOTE' }
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* @param {string} base_text
|
|
14
|
+
* @param {string} local_text
|
|
15
|
+
* @param {string} remote_text
|
|
16
|
+
* @returns {{ merged: string, has_conflicts: boolean, conflict_count: number, conflict_regions: Array }}
|
|
17
|
+
*/
|
|
18
|
+
const three_way_merge = (base_text, local_text, remote_text) => {
|
|
19
|
+
const base_lines = (base_text || '').split('\n')
|
|
20
|
+
const local_lines = (local_text || '').split('\n')
|
|
21
|
+
const remote_lines = (remote_text || '').split('\n')
|
|
22
|
+
|
|
23
|
+
// node-diff3 param order: merge(local, base, remote)
|
|
24
|
+
const result = merge(local_lines, base_lines, remote_lines, { label: LABELS })
|
|
25
|
+
|
|
26
|
+
// Extract conflict regions from diff3MergeRegions for structured reporting
|
|
27
|
+
const conflict_regions = []
|
|
28
|
+
if (result.conflict) {
|
|
29
|
+
const regions = diff3MergeRegions(local_lines, base_lines, remote_lines)
|
|
30
|
+
let line = 1
|
|
31
|
+
for (const region of regions) {
|
|
32
|
+
if (region.stable) {
|
|
33
|
+
line += region.bufferLength
|
|
34
|
+
} else {
|
|
35
|
+
// Unstable region: a=local, o=base, b=remote
|
|
36
|
+
const marker_lines = 2 + (region.aLength || 0) + (region.bLength || 0) // <<<, ===, >>>
|
|
37
|
+
conflict_regions.push({
|
|
38
|
+
start_line: line,
|
|
39
|
+
end_line: line + marker_lines,
|
|
40
|
+
base_content: (region.oContent || []).join('\n'),
|
|
41
|
+
local_content: (region.aContent || []).join('\n'),
|
|
42
|
+
remote_content: (region.bContent || []).join('\n')
|
|
43
|
+
})
|
|
44
|
+
// <<<<<<< LOCAL + local lines + ======= + remote lines + >>>>>>> REMOTE
|
|
45
|
+
line += 3 + (region.aLength || 0) + (region.bLength || 0)
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return {
|
|
51
|
+
merged: result.result.join('\n'),
|
|
52
|
+
has_conflicts: result.conflict,
|
|
53
|
+
conflict_count: conflict_regions.length,
|
|
54
|
+
conflict_regions
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
module.exports = { three_way_merge }
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert/strict')
|
|
3
|
+
const { three_way_merge } = require('./text_merge')
|
|
4
|
+
|
|
5
|
+
describe('three_way_merge', () => {
|
|
6
|
+
it('returns clean merge for non-overlapping edits', () => {
|
|
7
|
+
const base = 'line1\nline2\nline3'
|
|
8
|
+
const local = 'LOCAL1\nline2\nline3'
|
|
9
|
+
const remote = 'line1\nline2\nREMOTE3'
|
|
10
|
+
const r = three_way_merge(base, local, remote)
|
|
11
|
+
assert.strictEqual(r.has_conflicts, false)
|
|
12
|
+
assert.strictEqual(r.conflict_count, 0)
|
|
13
|
+
assert.strictEqual(r.merged, 'LOCAL1\nline2\nREMOTE3')
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
it('returns conflict markers for overlapping edits', () => {
|
|
17
|
+
const base = 'line1\nline2\nline3'
|
|
18
|
+
const local = 'line1\nLOCAL\nline3'
|
|
19
|
+
const remote = 'line1\nREMOTE\nline3'
|
|
20
|
+
const r = three_way_merge(base, local, remote)
|
|
21
|
+
assert.strictEqual(r.has_conflicts, true)
|
|
22
|
+
assert.strictEqual(r.conflict_count, 1)
|
|
23
|
+
assert.ok(r.merged.includes('<<<<<<< LOCAL'))
|
|
24
|
+
assert.ok(r.merged.includes('======='))
|
|
25
|
+
assert.ok(r.merged.includes('>>>>>>> REMOTE'))
|
|
26
|
+
assert.ok(r.merged.includes('LOCAL'))
|
|
27
|
+
assert.ok(r.merged.includes('REMOTE'))
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
it('returns clean merge for identical edits on both sides', () => {
|
|
31
|
+
const base = 'line1\nline2\nline3'
|
|
32
|
+
const local = 'line1\nSAME\nline3'
|
|
33
|
+
const remote = 'line1\nSAME\nline3'
|
|
34
|
+
const r = three_way_merge(base, local, remote)
|
|
35
|
+
assert.strictEqual(r.has_conflicts, false)
|
|
36
|
+
assert.strictEqual(r.merged, 'line1\nSAME\nline3')
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
it('handles add on both sides at different positions', () => {
|
|
40
|
+
const base = 'line1\nline3'
|
|
41
|
+
const local = 'line1\nLOCAL_ADD\nline3'
|
|
42
|
+
const remote = 'line1\nline3\nREMOTE_ADD'
|
|
43
|
+
const r = three_way_merge(base, local, remote)
|
|
44
|
+
assert.strictEqual(r.has_conflicts, false)
|
|
45
|
+
assert.ok(r.merged.includes('LOCAL_ADD'))
|
|
46
|
+
assert.ok(r.merged.includes('REMOTE_ADD'))
|
|
47
|
+
})
|
|
48
|
+
|
|
49
|
+
it('handles completely disjoint changes', () => {
|
|
50
|
+
const base = 'aaa\nbbb\nccc\nddd\neee'
|
|
51
|
+
const local = 'AAA\nbbb\nccc\nddd\neee'
|
|
52
|
+
const remote = 'aaa\nbbb\nccc\nddd\nEEE'
|
|
53
|
+
const r = three_way_merge(base, local, remote)
|
|
54
|
+
assert.strictEqual(r.has_conflicts, false)
|
|
55
|
+
assert.strictEqual(r.merged, 'AAA\nbbb\nccc\nddd\nEEE')
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
it('handles delete-vs-modify as conflict', () => {
|
|
59
|
+
const base = 'line1\nline2\nline3'
|
|
60
|
+
const local = 'line1\nline3'
|
|
61
|
+
const remote = 'line1\nMODIFIED\nline3'
|
|
62
|
+
const r = three_way_merge(base, local, remote)
|
|
63
|
+
assert.strictEqual(r.has_conflicts, true)
|
|
64
|
+
assert.strictEqual(r.conflict_count, 1)
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
it('provides conflict region details', () => {
|
|
68
|
+
const base = 'a\nb\nc'
|
|
69
|
+
const local = 'a\nLOCAL\nc'
|
|
70
|
+
const remote = 'a\nREMOTE\nc'
|
|
71
|
+
const r = three_way_merge(base, local, remote)
|
|
72
|
+
assert.strictEqual(r.conflict_regions.length, 1)
|
|
73
|
+
const region = r.conflict_regions[0]
|
|
74
|
+
assert.strictEqual(region.base_content, 'b')
|
|
75
|
+
assert.strictEqual(region.local_content, 'LOCAL')
|
|
76
|
+
assert.strictEqual(region.remote_content, 'REMOTE')
|
|
77
|
+
assert.ok(region.start_line > 0)
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
it('handles multiple conflict regions', () => {
|
|
81
|
+
const base = 'a\nb\nc\nd\ne'
|
|
82
|
+
const local = 'a\nL1\nc\nL2\ne'
|
|
83
|
+
const remote = 'a\nR1\nc\nR2\ne'
|
|
84
|
+
const r = three_way_merge(base, local, remote)
|
|
85
|
+
assert.strictEqual(r.has_conflicts, true)
|
|
86
|
+
assert.strictEqual(r.conflict_count, 2)
|
|
87
|
+
assert.strictEqual(r.conflict_regions.length, 2)
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
it('handles empty base with local-only change as clean merge', () => {
|
|
91
|
+
const r = three_way_merge('', 'new content', '')
|
|
92
|
+
assert.strictEqual(r.has_conflicts, false)
|
|
93
|
+
assert.ok(r.merged.includes('new content'))
|
|
94
|
+
})
|
|
95
|
+
|
|
96
|
+
it('handles null base (both sides added new file)', () => {
|
|
97
|
+
const r = three_way_merge(null, 'local stuff', 'remote stuff')
|
|
98
|
+
assert.strictEqual(r.has_conflicts, true)
|
|
99
|
+
})
|
|
100
|
+
})
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
const fs = require('fs')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
const { error: { catch_errors } } = require('puffy-core')
|
|
4
|
+
|
|
5
|
+
const MARKER_RE = /^<{7}/
|
|
6
|
+
|
|
7
|
+
const result = (file, line_numbers, marker_count) => ({
|
|
8
|
+
file,
|
|
9
|
+
field: null,
|
|
10
|
+
rule: 'no_conflict_markers',
|
|
11
|
+
severity: 'error',
|
|
12
|
+
message: `Unresolved merge conflict markers (${marker_count} region${marker_count === 1 ? '' : 's'}, line${line_numbers.length === 1 ? '' : 's'} ${line_numbers.join(', ')})`,
|
|
13
|
+
line_numbers,
|
|
14
|
+
marker_count
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Scans all files in a skill directory for conflict markers (`<<<<<<< LOCAL`).
|
|
19
|
+
* Returns error results for each file containing markers.
|
|
20
|
+
* Skips dotfiles (consistent with hash_directory exclusions).
|
|
21
|
+
*
|
|
22
|
+
* @param {string} skill_dir - Absolute path to the skill directory
|
|
23
|
+
* @returns {[errors, results[]]} — results with severity 'error' for each file with markers
|
|
24
|
+
*/
|
|
25
|
+
const validate_no_conflict_markers = (skill_dir) => catch_errors('Failed to check conflict markers', async () => {
|
|
26
|
+
const results = []
|
|
27
|
+
const walk = async (dir, prefix) => {
|
|
28
|
+
let items
|
|
29
|
+
try { items = await fs.promises.readdir(dir, { withFileTypes: true }) } catch { return }
|
|
30
|
+
for (const item of items) {
|
|
31
|
+
if (item.name.startsWith('.')) continue
|
|
32
|
+
const rel = prefix ? `${prefix}/${item.name}` : item.name
|
|
33
|
+
const full = path.join(dir, item.name)
|
|
34
|
+
if (item.isDirectory()) {
|
|
35
|
+
await walk(full, rel)
|
|
36
|
+
} else {
|
|
37
|
+
let content
|
|
38
|
+
try { content = await fs.promises.readFile(full, 'utf-8') } catch { continue }
|
|
39
|
+
const lines = content.split('\n')
|
|
40
|
+
const line_numbers = []
|
|
41
|
+
for (let i = 0; i < lines.length; i++) {
|
|
42
|
+
if (MARKER_RE.test(lines[i])) line_numbers.push(i + 1)
|
|
43
|
+
}
|
|
44
|
+
if (line_numbers.length > 0) {
|
|
45
|
+
results.push(result(rel, line_numbers, line_numbers.length))
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
await walk(skill_dir, '')
|
|
51
|
+
return results
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
module.exports = { validate_no_conflict_markers }
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
const { describe, it, beforeEach, afterEach } = require('node:test')
|
|
2
|
+
const assert = require('node:assert/strict')
|
|
3
|
+
const fs = require('fs')
|
|
4
|
+
const path = require('path')
|
|
5
|
+
const os = require('os')
|
|
6
|
+
const { validate_no_conflict_markers } = require('./conflict_marker_rules')
|
|
7
|
+
|
|
8
|
+
const make_temp_dir = () => fs.mkdtempSync(path.join(os.tmpdir(), 'conflict-marker-test-'))
|
|
9
|
+
const clean = (dir) => fs.rmSync(dir, { recursive: true, force: true })
|
|
10
|
+
|
|
11
|
+
describe('validate_no_conflict_markers', () => {
|
|
12
|
+
let dir
|
|
13
|
+
|
|
14
|
+
beforeEach(() => { dir = make_temp_dir() })
|
|
15
|
+
afterEach(() => { clean(dir) })
|
|
16
|
+
|
|
17
|
+
it('returns error for file with <<<<<<< LOCAL marker', async () => {
|
|
18
|
+
fs.writeFileSync(path.join(dir, 'SKILL.md'), 'line1\n<<<<<<< LOCAL\nlocal\n=======\nremote\n>>>>>>> REMOTE\nline2')
|
|
19
|
+
const [err, results] = await validate_no_conflict_markers(dir)
|
|
20
|
+
assert.strictEqual(err, null)
|
|
21
|
+
assert.strictEqual(results.length, 1)
|
|
22
|
+
assert.strictEqual(results[0].severity, 'error')
|
|
23
|
+
assert.strictEqual(results[0].file, 'SKILL.md')
|
|
24
|
+
assert.deepStrictEqual(results[0].line_numbers, [2])
|
|
25
|
+
assert.strictEqual(results[0].marker_count, 1)
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
it('catches partial markers (only <<<<<<< without =======)', async () => {
|
|
29
|
+
fs.writeFileSync(path.join(dir, 'test.md'), 'some text\n<<<<<<< orphan marker\nmore text')
|
|
30
|
+
const [err, results] = await validate_no_conflict_markers(dir)
|
|
31
|
+
assert.strictEqual(err, null)
|
|
32
|
+
assert.strictEqual(results.length, 1)
|
|
33
|
+
assert.strictEqual(results[0].file, 'test.md')
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
it('returns empty for clean files', async () => {
|
|
37
|
+
fs.writeFileSync(path.join(dir, 'clean.md'), '# Clean file\n\nNo conflicts here.')
|
|
38
|
+
const [err, results] = await validate_no_conflict_markers(dir)
|
|
39
|
+
assert.strictEqual(err, null)
|
|
40
|
+
assert.strictEqual(results.length, 0)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
it('reports multiple conflict regions in one file', async () => {
|
|
44
|
+
const content = [
|
|
45
|
+
'line1',
|
|
46
|
+
'<<<<<<< LOCAL',
|
|
47
|
+
'a',
|
|
48
|
+
'=======',
|
|
49
|
+
'b',
|
|
50
|
+
'>>>>>>> REMOTE',
|
|
51
|
+
'middle',
|
|
52
|
+
'<<<<<<< LOCAL',
|
|
53
|
+
'c',
|
|
54
|
+
'=======',
|
|
55
|
+
'd',
|
|
56
|
+
'>>>>>>> REMOTE',
|
|
57
|
+
'end'
|
|
58
|
+
].join('\n')
|
|
59
|
+
fs.writeFileSync(path.join(dir, 'multi.md'), content)
|
|
60
|
+
const [err, results] = await validate_no_conflict_markers(dir)
|
|
61
|
+
assert.strictEqual(err, null)
|
|
62
|
+
assert.strictEqual(results.length, 1)
|
|
63
|
+
assert.deepStrictEqual(results[0].line_numbers, [2, 8])
|
|
64
|
+
assert.strictEqual(results[0].marker_count, 2)
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
it('scans nested directories recursively', async () => {
|
|
68
|
+
const sub = path.join(dir, 'references')
|
|
69
|
+
fs.mkdirSync(sub)
|
|
70
|
+
fs.writeFileSync(path.join(sub, 'ref.md'), '<<<<<<< LOCAL\nstuff\n=======\nother\n>>>>>>> REMOTE')
|
|
71
|
+
const [err, results] = await validate_no_conflict_markers(dir)
|
|
72
|
+
assert.strictEqual(err, null)
|
|
73
|
+
assert.strictEqual(results.length, 1)
|
|
74
|
+
assert.strictEqual(results[0].file, 'references/ref.md')
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
it('excludes dotfiles', async () => {
|
|
78
|
+
fs.writeFileSync(path.join(dir, '.hidden'), '<<<<<<< LOCAL\nstuff')
|
|
79
|
+
fs.writeFileSync(path.join(dir, 'visible.md'), 'clean')
|
|
80
|
+
const [err, results] = await validate_no_conflict_markers(dir)
|
|
81
|
+
assert.strictEqual(err, null)
|
|
82
|
+
assert.strictEqual(results.length, 0)
|
|
83
|
+
})
|
|
84
|
+
|
|
85
|
+
it('handles empty directory', async () => {
|
|
86
|
+
const [err, results] = await validate_no_conflict_markers(dir)
|
|
87
|
+
assert.strictEqual(err, null)
|
|
88
|
+
assert.strictEqual(results.length, 0)
|
|
89
|
+
})
|
|
90
|
+
})
|