happyskills 0.17.0 → 0.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/package.json +1 -1
- package/src/api/repos.js +19 -4
- package/src/commands/diff.js +178 -0
- package/src/commands/pull.js +339 -0
- package/src/constants.js +3 -0
- package/src/index.js +3 -0
- package/src/merge/comparator.js +98 -0
- package/src/merge/comparator.test.js +161 -0
- package/src/merge/report.js +59 -0
- package/src/merge/report.test.js +85 -0
package/CHANGELOG.md
CHANGED
|
@@ -7,6 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/).
|
|
|
7
7
|
|
|
8
8
|
## [Unreleased]
|
|
9
9
|
|
|
10
|
+
## [0.18.0] - 2026-03-29
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
- Add `pull` command for pulling remote changes and merging with local files — supports `--theirs`, `--ours`, and `--force` strategies with three-way file classification and dependency reconciliation
|
|
14
|
+
- Add `diff` command (`d` alias) for file-level difference display — supports `--remote` (base vs remote), `--full` (three-way), and default local vs base modes
|
|
15
|
+
- Add `cli/src/merge/comparator.js` module with `classify_changes()` for three-way file classification into 8 categories
|
|
16
|
+
- Add `cli/src/merge/report.js` module with `build_report()` for structured JSON merge reports (v1 contract)
|
|
17
|
+
- Add `compare()` and `get_blob()` to CLI API client for server-side tree comparison and individual blob downloads
|
|
18
|
+
- Extend `clone()` API client to accept `options.commit` for cloning at a specific commit SHA
|
|
19
|
+
|
|
10
20
|
## [0.17.0] - 2026-03-29
|
|
11
21
|
|
|
12
22
|
### Added
|
package/package.json
CHANGED
package/src/api/repos.js
CHANGED
|
@@ -22,9 +22,12 @@ const resolve_dependencies = (skill, version, installed = {}) => catch_errors('D
|
|
|
22
22
|
return data
|
|
23
23
|
})
|
|
24
24
|
|
|
25
|
-
const clone = (owner, repo, ref) => catch_errors(`Clone ${owner}/${repo} failed`, async () => {
|
|
26
|
-
const params =
|
|
27
|
-
|
|
25
|
+
const clone = (owner, repo, ref, options = {}) => catch_errors(`Clone ${owner}/${repo} failed`, async () => {
|
|
26
|
+
const params = new URLSearchParams()
|
|
27
|
+
if (options.commit) params.set('commit', options.commit)
|
|
28
|
+
else if (ref) params.set('ref', ref)
|
|
29
|
+
const qs = params.toString() ? `?${params}` : ''
|
|
30
|
+
const [errors, data] = await client.get(`/repos/${owner}/${repo}/clone${qs}`)
|
|
28
31
|
if (errors) throw errors[errors.length - 1]
|
|
29
32
|
return data
|
|
30
33
|
})
|
|
@@ -65,4 +68,16 @@ const patch_repo = (owner, name, fields) => catch_errors(`Failed to update ${own
|
|
|
65
68
|
return data
|
|
66
69
|
})
|
|
67
70
|
|
|
68
|
-
|
|
71
|
+
const compare = (owner, repo, base_commit) => catch_errors(`Compare ${owner}/${repo} failed`, async () => {
|
|
72
|
+
const [errors, data] = await client.post(`/repos/${owner}/${repo}/compare`, { base_commit })
|
|
73
|
+
if (errors) throw errors[errors.length - 1]
|
|
74
|
+
return data
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
const get_blob = (owner, repo, sha) => catch_errors(`Get blob ${owner}/${repo}/${sha} failed`, async () => {
|
|
78
|
+
const [errors, data] = await client.get(`/repos/${owner}/${repo}/blob/${sha}`)
|
|
79
|
+
if (errors) throw errors[errors.length - 1]
|
|
80
|
+
return data
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
module.exports = { search, resolve_dependencies, clone, push, get_refs, get_repo, check_updates, del_repo, patch_repo, compare, get_blob }
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
const fs = require('fs')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
const { error: { catch_errors, wrap_errors: e } } = require('puffy-core')
|
|
4
|
+
const repos_api = require('../api/repos')
|
|
5
|
+
const { detect_status } = require('../merge/detector')
|
|
6
|
+
const { classify_changes } = require('../merge/comparator')
|
|
7
|
+
const { build_report } = require('../merge/report')
|
|
8
|
+
const { hash_blob } = require('../utils/git_hash')
|
|
9
|
+
const { read_lock, get_all_locked_skills } = require('../lock/reader')
|
|
10
|
+
const { find_project_root, lock_root, skills_dir, skill_install_dir } = require('../config/paths')
|
|
11
|
+
const { print_help, print_info, print_json, print_warn, code } = require('../ui/output')
|
|
12
|
+
const { exit_with_error, UsageError } = require('../utils/errors')
|
|
13
|
+
const { EXIT_CODES } = require('../constants')
|
|
14
|
+
|
|
15
|
+
const HELP_TEXT = `Usage: happyskills diff <owner/skill> [options]
|
|
16
|
+
|
|
17
|
+
Show file-level differences for a skill.
|
|
18
|
+
|
|
19
|
+
Arguments:
|
|
20
|
+
owner/skill Skill to diff (required)
|
|
21
|
+
|
|
22
|
+
Options:
|
|
23
|
+
--remote Show base vs remote changes (default: local vs base)
|
|
24
|
+
--full Show three-way diff (base vs local vs remote)
|
|
25
|
+
-g, --global Diff globally installed skill
|
|
26
|
+
--json Output as JSON
|
|
27
|
+
|
|
28
|
+
Aliases: d
|
|
29
|
+
|
|
30
|
+
Examples:
|
|
31
|
+
happyskills diff acme/deploy-aws
|
|
32
|
+
happyskills diff acme/deploy-aws --remote
|
|
33
|
+
happyskills diff acme/deploy-aws --full`
|
|
34
|
+
|
|
35
|
+
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
|
36
|
+
|
|
37
|
+
const build_local_entries = (skill_dir) => catch_errors('Failed to build local entries', async () => {
|
|
38
|
+
const entries = []
|
|
39
|
+
const walk = async (dir, prefix) => {
|
|
40
|
+
const items = await fs.promises.readdir(dir, { withFileTypes: true })
|
|
41
|
+
for (const item of items) {
|
|
42
|
+
if (item.name.startsWith('.')) continue
|
|
43
|
+
const rel = prefix ? `${prefix}/${item.name}` : item.name
|
|
44
|
+
const full = path.join(dir, item.name)
|
|
45
|
+
if (item.isDirectory()) {
|
|
46
|
+
await walk(full, rel)
|
|
47
|
+
} else {
|
|
48
|
+
const content = await fs.promises.readFile(full)
|
|
49
|
+
entries.push({ path: rel, sha: hash_blob(content) })
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
await walk(skill_dir, '')
|
|
54
|
+
return entries
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
const STATUS_LABELS = {
|
|
58
|
+
remote_only_modified: 'M (remote)',
|
|
59
|
+
local_only_modified: 'M (local)',
|
|
60
|
+
both_modified: 'M (both)',
|
|
61
|
+
remote_only_added: 'A (remote)',
|
|
62
|
+
local_only_added: 'A (local)',
|
|
63
|
+
remote_only_deleted: 'D (remote)',
|
|
64
|
+
local_only_deleted: 'D (local)'
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const print_file_table = (classified) => {
|
|
68
|
+
const lines = []
|
|
69
|
+
for (const [category, label] of Object.entries(STATUS_LABELS)) {
|
|
70
|
+
for (const entry of (classified[category] || [])) {
|
|
71
|
+
lines.push({ status: label, path: entry.path })
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (lines.length === 0) {
|
|
76
|
+
print_info('No differences found.')
|
|
77
|
+
return
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const max_status = Math.max(...lines.map(l => l.status.length))
|
|
81
|
+
for (const line of lines) {
|
|
82
|
+
const padded = line.status + ' '.repeat(max_status - line.status.length)
|
|
83
|
+
console.log(` ${padded} ${line.path}`)
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// ─── Main ─────────────────────────────────────────────────────────────────────
|
|
88
|
+
|
|
89
|
+
const run = (args) => catch_errors('Diff failed', async () => {
|
|
90
|
+
if (args.flags._show_help) {
|
|
91
|
+
print_help(HELP_TEXT)
|
|
92
|
+
return process.exit(EXIT_CODES.SUCCESS)
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const skill_name = args._[0]
|
|
96
|
+
if (!skill_name || !skill_name.includes('/')) {
|
|
97
|
+
throw new UsageError('Skill name required in owner/name format. Example: happyskills diff acme/deploy-aws')
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const is_global = args.flags.global || false
|
|
101
|
+
const mode = args.flags.full ? 'full' : args.flags.remote ? 'remote' : 'local'
|
|
102
|
+
const project_root = find_project_root()
|
|
103
|
+
|
|
104
|
+
// Read lock
|
|
105
|
+
const [lock_err, lock_data] = await read_lock(lock_root(is_global, project_root))
|
|
106
|
+
if (lock_err || !lock_data) throw new UsageError('No lock file found.')
|
|
107
|
+
|
|
108
|
+
const all_skills = get_all_locked_skills(lock_data)
|
|
109
|
+
const lock_entry = all_skills[skill_name]
|
|
110
|
+
if (!lock_entry) throw new UsageError(`${skill_name} is not installed.`)
|
|
111
|
+
if (!lock_entry.base_commit) throw new UsageError(`${skill_name} has no base_commit. Run ${code('happyskills install --fresh')} to upgrade the lock file.`)
|
|
112
|
+
|
|
113
|
+
const [owner, repo] = skill_name.split('/')
|
|
114
|
+
const base_dir = skills_dir(is_global, project_root)
|
|
115
|
+
const skill_dir = skill_install_dir(base_dir, repo)
|
|
116
|
+
|
|
117
|
+
// Always need base files
|
|
118
|
+
const [base_err, base_clone] = await repos_api.clone(owner, repo, null, { commit: lock_entry.base_commit })
|
|
119
|
+
if (base_err) throw e('Failed to fetch base files', base_err)
|
|
120
|
+
const base_files = (base_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
121
|
+
|
|
122
|
+
if (mode === 'local') {
|
|
123
|
+
// Local vs base — use classify_changes with base as the "remote" side
|
|
124
|
+
const [local_err, local_files] = await build_local_entries(skill_dir)
|
|
125
|
+
if (local_err) throw e('Failed to read local files', local_err)
|
|
126
|
+
|
|
127
|
+
const classified = classify_changes(base_files, local_files, base_files)
|
|
128
|
+
|
|
129
|
+
if (args.flags.json) {
|
|
130
|
+
const report = build_report(skill_name, lock_entry.version, lock_entry.version, classified)
|
|
131
|
+
print_json({ data: { mode, report } })
|
|
132
|
+
} else {
|
|
133
|
+
print_file_table(classified)
|
|
134
|
+
}
|
|
135
|
+
return
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
if (mode === 'remote') {
|
|
139
|
+
// Base vs remote — use classify_changes with base as the "local" side
|
|
140
|
+
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null)
|
|
141
|
+
if (remote_err) throw e('Failed to fetch remote files', remote_err)
|
|
142
|
+
const remote_files = (remote_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
143
|
+
|
|
144
|
+
const classified = classify_changes(base_files, base_files, remote_files)
|
|
145
|
+
|
|
146
|
+
if (args.flags.json) {
|
|
147
|
+
const report = build_report(skill_name, lock_entry.version, null, classified)
|
|
148
|
+
print_json({ data: { mode, report } })
|
|
149
|
+
} else {
|
|
150
|
+
print_file_table(classified)
|
|
151
|
+
}
|
|
152
|
+
return
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Full three-way diff
|
|
156
|
+
const [local_err, local_files] = await build_local_entries(skill_dir)
|
|
157
|
+
if (local_err) throw e('Failed to read local files', local_err)
|
|
158
|
+
|
|
159
|
+
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null)
|
|
160
|
+
if (remote_err) throw e('Failed to fetch remote files', remote_err)
|
|
161
|
+
const remote_files = (remote_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
162
|
+
|
|
163
|
+
const classified = classify_changes(base_files, local_files, remote_files)
|
|
164
|
+
const report = build_report(skill_name, lock_entry.version, null, classified)
|
|
165
|
+
|
|
166
|
+
if (args.flags.json) {
|
|
167
|
+
print_json({ data: { mode, report } })
|
|
168
|
+
} else {
|
|
169
|
+
print_file_table(classified)
|
|
170
|
+
if (report.summary.conflicted > 0) {
|
|
171
|
+
console.error('')
|
|
172
|
+
print_warn(`${report.summary.conflicted} file(s) modified on both sides`)
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
}).then(([errors]) => { if (errors) { exit_with_error(errors); return } })
|
|
177
|
+
|
|
178
|
+
module.exports = { run }
|
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
const fs = require('fs')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
const os = require('os')
|
|
4
|
+
const { error: { catch_errors, wrap_errors: e } } = require('puffy-core')
|
|
5
|
+
const repos_api = require('../api/repos')
|
|
6
|
+
const { detect_status } = require('../merge/detector')
|
|
7
|
+
const { classify_changes } = require('../merge/comparator')
|
|
8
|
+
const { build_report } = require('../merge/report')
|
|
9
|
+
const { hash_blob } = require('../utils/git_hash')
|
|
10
|
+
const { read_lock, get_all_locked_skills } = require('../lock/reader')
|
|
11
|
+
const { write_lock, update_lock_skills } = require('../lock/writer')
|
|
12
|
+
const { hash_directory } = require('../lock/integrity')
|
|
13
|
+
const { find_project_root, lock_root, skills_dir, skill_install_dir } = require('../config/paths')
|
|
14
|
+
const { ensure_dir } = require('../utils/fs')
|
|
15
|
+
const { create_spinner } = require('../ui/spinner')
|
|
16
|
+
const { print_help, print_success, print_error, print_warn, print_info, print_json, print_hint, code } = require('../ui/output')
|
|
17
|
+
const { exit_with_error, UsageError } = require('../utils/errors')
|
|
18
|
+
const { EXIT_CODES } = require('../constants')
|
|
19
|
+
|
|
20
|
+
const HELP_TEXT = `Usage: happyskills pull <owner/skill> [options]
|
|
21
|
+
|
|
22
|
+
Pull remote changes and merge with local files.
|
|
23
|
+
|
|
24
|
+
Arguments:
|
|
25
|
+
owner/skill Skill to pull (required)
|
|
26
|
+
|
|
27
|
+
Options:
|
|
28
|
+
--theirs Take remote version on conflicts
|
|
29
|
+
--ours Keep local version on conflicts
|
|
30
|
+
--force Discard all local changes, take remote entirely
|
|
31
|
+
-g, --global Pull globally installed skill
|
|
32
|
+
--json Output as JSON
|
|
33
|
+
|
|
34
|
+
Examples:
|
|
35
|
+
happyskills pull acme/deploy-aws
|
|
36
|
+
happyskills pull acme/deploy-aws --theirs
|
|
37
|
+
happyskills pull acme/deploy-aws --force`
|
|
38
|
+
|
|
39
|
+
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
|
40
|
+
|
|
41
|
+
const build_local_entries = (skill_dir) => catch_errors('Failed to build local entries', async () => {
|
|
42
|
+
const entries = []
|
|
43
|
+
const walk = async (dir, prefix) => {
|
|
44
|
+
const items = await fs.promises.readdir(dir, { withFileTypes: true })
|
|
45
|
+
for (const item of items) {
|
|
46
|
+
if (item.name.startsWith('.')) continue
|
|
47
|
+
const rel = prefix ? `${prefix}/${item.name}` : item.name
|
|
48
|
+
const full = path.join(dir, item.name)
|
|
49
|
+
if (item.isDirectory()) {
|
|
50
|
+
await walk(full, rel)
|
|
51
|
+
} else {
|
|
52
|
+
const content = await fs.promises.readFile(full)
|
|
53
|
+
entries.push({ path: rel, sha: hash_blob(content) })
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
await walk(skill_dir, '')
|
|
58
|
+
return entries
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
const write_files_from_clone = (clone_data, skill_dir) => catch_errors('Failed to write files', async () => {
|
|
62
|
+
for (const file of clone_data.files || []) {
|
|
63
|
+
const full = path.join(skill_dir, file.path)
|
|
64
|
+
const [dir_err] = await ensure_dir(path.dirname(full))
|
|
65
|
+
if (dir_err) throw e(`Failed to create directory for ${file.path}`, dir_err)
|
|
66
|
+
await fs.promises.writeFile(full, Buffer.from(file.content, 'base64'))
|
|
67
|
+
}
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
const remove_files = (skill_dir, paths) => catch_errors('Failed to remove files', async () => {
|
|
71
|
+
for (const rel_path of paths) {
|
|
72
|
+
const full = path.join(skill_dir, rel_path)
|
|
73
|
+
try { await fs.promises.unlink(full) } catch { /* already gone */ }
|
|
74
|
+
}
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
const apply_remote_file = (skill_dir, owner, repo, file_entry) => catch_errors('Failed to apply remote file', async () => {
|
|
78
|
+
const sha = file_entry.remote_sha || file_entry.sha
|
|
79
|
+
if (!sha) return
|
|
80
|
+
const [err, blob] = await repos_api.get_blob(owner, repo, sha)
|
|
81
|
+
if (err) throw e(`Failed to download ${file_entry.path}`, err)
|
|
82
|
+
const full = path.join(skill_dir, file_entry.path)
|
|
83
|
+
const [dir_err] = await ensure_dir(path.dirname(full))
|
|
84
|
+
if (dir_err) throw e(`Failed to create directory for ${file_entry.path}`, dir_err)
|
|
85
|
+
await fs.promises.writeFile(full, Buffer.from(blob.content, 'base64'))
|
|
86
|
+
})
|
|
87
|
+
|
|
88
|
+
// ─── Main ─────────────────────────────────────────────────────────────────────
|
|
89
|
+
|
|
90
|
+
const run = (args) => catch_errors('Pull failed', async () => {
|
|
91
|
+
if (args.flags._show_help) {
|
|
92
|
+
print_help(HELP_TEXT)
|
|
93
|
+
return process.exit(EXIT_CODES.SUCCESS)
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const skill_name = args._[0]
|
|
97
|
+
if (!skill_name || !skill_name.includes('/')) {
|
|
98
|
+
throw new UsageError('Skill name required in owner/name format. Example: happyskills pull acme/deploy-aws')
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const is_global = args.flags.global || false
|
|
102
|
+
const strategy = args.flags.theirs ? 'theirs' : args.flags.ours ? 'ours' : args.flags.force ? 'force' : null
|
|
103
|
+
const project_root = find_project_root()
|
|
104
|
+
|
|
105
|
+
// 1. Read lock file
|
|
106
|
+
const [lock_err, lock_data] = await read_lock(lock_root(is_global, project_root))
|
|
107
|
+
if (lock_err || !lock_data) throw new UsageError('No lock file found. Install the skill first.')
|
|
108
|
+
|
|
109
|
+
const all_skills = get_all_locked_skills(lock_data)
|
|
110
|
+
const lock_entry = all_skills[skill_name]
|
|
111
|
+
if (!lock_entry) throw new UsageError(`${skill_name} is not installed. Run ${code(`happyskills install ${skill_name}`)} first.`)
|
|
112
|
+
if (!lock_entry.base_commit) throw new UsageError(`${skill_name} has no base_commit. Run ${code('happyskills install --fresh')} to upgrade the lock file.`)
|
|
113
|
+
|
|
114
|
+
const [owner, repo] = skill_name.split('/')
|
|
115
|
+
const base_dir = skills_dir(is_global, project_root)
|
|
116
|
+
const skill_dir = skill_install_dir(base_dir, repo)
|
|
117
|
+
|
|
118
|
+
const spinner = create_spinner(`Pulling ${skill_name}...`)
|
|
119
|
+
|
|
120
|
+
// 2. Detect local modifications
|
|
121
|
+
const [det_err, det] = await detect_status(lock_entry, skill_dir)
|
|
122
|
+
if (det_err) { spinner.fail('Failed to detect local status'); throw det_err[0] }
|
|
123
|
+
|
|
124
|
+
// 3. Compare with remote
|
|
125
|
+
spinner.update('Comparing with remote...')
|
|
126
|
+
const [cmp_err, cmp_data] = await repos_api.compare(owner, repo, lock_entry.base_commit)
|
|
127
|
+
if (cmp_err) { spinner.fail('Compare failed'); throw cmp_data || cmp_err[0] }
|
|
128
|
+
|
|
129
|
+
const no_remote_changes = cmp_data.added.length === 0 && cmp_data.removed.length === 0 && cmp_data.modified.length === 0
|
|
130
|
+
|
|
131
|
+
// 4. Already up to date?
|
|
132
|
+
if (no_remote_changes) {
|
|
133
|
+
spinner.stop()
|
|
134
|
+
if (args.flags.json) {
|
|
135
|
+
print_json({ data: { status: 'up_to_date', skill: skill_name } })
|
|
136
|
+
} else {
|
|
137
|
+
print_info(`${skill_name} is already up to date.`)
|
|
138
|
+
}
|
|
139
|
+
return
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// 5. No local mods → fast-forward
|
|
143
|
+
if (!det.local_modified || strategy === 'force') {
|
|
144
|
+
spinner.update('Fast-forwarding...')
|
|
145
|
+
const [clone_err, clone_data] = await repos_api.clone(owner, repo, null)
|
|
146
|
+
if (clone_err) { spinner.fail('Clone failed'); throw clone_err[0] }
|
|
147
|
+
|
|
148
|
+
// Remove existing files and write fresh
|
|
149
|
+
const [rm_err] = await catch_errors('Cleanup failed', async () => {
|
|
150
|
+
const items = await fs.promises.readdir(skill_dir, { withFileTypes: true })
|
|
151
|
+
for (const item of items) {
|
|
152
|
+
if (item.name.startsWith('.')) continue
|
|
153
|
+
const full = path.join(skill_dir, item.name)
|
|
154
|
+
await fs.promises.rm(full, { recursive: true, force: true })
|
|
155
|
+
}
|
|
156
|
+
})
|
|
157
|
+
if (rm_err) { spinner.fail('Failed to clean skill directory'); throw rm_err[0] }
|
|
158
|
+
|
|
159
|
+
const [wf_err] = await write_files_from_clone(clone_data, skill_dir)
|
|
160
|
+
if (wf_err) { spinner.fail('Failed to write files'); throw wf_err[0] }
|
|
161
|
+
|
|
162
|
+
// Update lock
|
|
163
|
+
const [hash_err, new_integrity] = await hash_directory(skill_dir)
|
|
164
|
+
const updated_entry = {
|
|
165
|
+
...lock_entry,
|
|
166
|
+
commit: clone_data.commit,
|
|
167
|
+
base_commit: clone_data.commit,
|
|
168
|
+
integrity: new_integrity || lock_entry.integrity,
|
|
169
|
+
base_integrity: new_integrity || lock_entry.base_integrity,
|
|
170
|
+
version: cmp_data.head_version || lock_entry.version,
|
|
171
|
+
ref: clone_data.ref || lock_entry.ref
|
|
172
|
+
}
|
|
173
|
+
const merged_skills = update_lock_skills(lock_data, { [skill_name]: updated_entry })
|
|
174
|
+
const [wl_err] = await write_lock(lock_root(is_global, project_root), merged_skills)
|
|
175
|
+
if (wl_err) { spinner.fail('Failed to write lock file'); throw wl_err[0] }
|
|
176
|
+
|
|
177
|
+
spinner.stop()
|
|
178
|
+
if (args.flags.json) {
|
|
179
|
+
print_json({ data: { status: 'fast_forward', skill: skill_name, version: cmp_data.head_version } })
|
|
180
|
+
} else {
|
|
181
|
+
const label = strategy === 'force' ? 'Force-pulled' : 'Fast-forwarded'
|
|
182
|
+
print_success(`${label} ${skill_name} → ${cmp_data.head_version || 'latest'}`)
|
|
183
|
+
}
|
|
184
|
+
return
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// 6. Both changed — three-way classification
|
|
188
|
+
spinner.update('Classifying changes...')
|
|
189
|
+
|
|
190
|
+
// Get base files (clone at base_commit)
|
|
191
|
+
const [base_err, base_clone] = await repos_api.clone(owner, repo, null, { commit: lock_entry.base_commit })
|
|
192
|
+
if (base_err) { spinner.fail('Failed to fetch base files'); throw base_err[0] }
|
|
193
|
+
const base_files = (base_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
194
|
+
|
|
195
|
+
// Get local files (from disk)
|
|
196
|
+
const [local_err, local_files] = await build_local_entries(skill_dir)
|
|
197
|
+
if (local_err) { spinner.fail('Failed to read local files'); throw local_err[0] }
|
|
198
|
+
|
|
199
|
+
// Get remote files (clone latest)
|
|
200
|
+
const [remote_err, remote_clone] = await repos_api.clone(owner, repo, null)
|
|
201
|
+
if (remote_err) { spinner.fail('Failed to fetch remote files'); throw remote_err[0] }
|
|
202
|
+
const remote_files = (remote_clone.files || []).map(f => ({ path: f.path, sha: f.sha }))
|
|
203
|
+
|
|
204
|
+
const classified = classify_changes(base_files, local_files, remote_files)
|
|
205
|
+
const report = build_report(skill_name, lock_entry.version, cmp_data.head_version, classified)
|
|
206
|
+
|
|
207
|
+
// If both_modified exists and no strategy → exit with report
|
|
208
|
+
if (classified.both_modified.length > 0 && !strategy) {
|
|
209
|
+
spinner.stop()
|
|
210
|
+
if (args.flags.json) {
|
|
211
|
+
print_json({ data: { status: 'conflicts', report } })
|
|
212
|
+
} else {
|
|
213
|
+
print_warn(`${report.summary.conflicted} file(s) modified on both sides:`)
|
|
214
|
+
for (const f of classified.both_modified) {
|
|
215
|
+
console.error(` - ${f.path}`)
|
|
216
|
+
}
|
|
217
|
+
console.error('')
|
|
218
|
+
print_hint('Auto-merge not yet available. Use --theirs (take remote) or --ours (keep local) for all conflicting files, or manually merge and publish with --force.')
|
|
219
|
+
}
|
|
220
|
+
return process.exit(EXIT_CODES.ERROR)
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// Apply changes
|
|
224
|
+
spinner.update('Applying changes...')
|
|
225
|
+
const remote_file_map = new Map((remote_clone.files || []).map(f => [f.path, f]))
|
|
226
|
+
|
|
227
|
+
// Remote-only modified/added → write remote version
|
|
228
|
+
for (const entry of [...classified.remote_only_modified, ...classified.remote_only_added]) {
|
|
229
|
+
const remote_file = remote_file_map.get(entry.path)
|
|
230
|
+
if (remote_file) {
|
|
231
|
+
const full = path.join(skill_dir, entry.path)
|
|
232
|
+
const [dir_err] = await ensure_dir(path.dirname(full))
|
|
233
|
+
if (dir_err) { spinner.fail(`Failed to create dir for ${entry.path}`); throw dir_err[0] }
|
|
234
|
+
await fs.promises.writeFile(full, Buffer.from(remote_file.content, 'base64'))
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Remote-only deleted → remove local
|
|
239
|
+
const [del_err] = await remove_files(skill_dir, classified.remote_only_deleted.map(f => f.path))
|
|
240
|
+
if (del_err) { spinner.fail('Failed to remove deleted files'); throw del_err[0] }
|
|
241
|
+
|
|
242
|
+
// Local-only modified/added → keep (no action needed)
|
|
243
|
+
// Local-only deleted → keep deleted (no action needed)
|
|
244
|
+
|
|
245
|
+
// Both-modified → apply strategy
|
|
246
|
+
if (strategy === 'theirs') {
|
|
247
|
+
for (const entry of classified.both_modified) {
|
|
248
|
+
if (entry.remote_sha === null) {
|
|
249
|
+
// Remote deleted — remove local
|
|
250
|
+
const full = path.join(skill_dir, entry.path)
|
|
251
|
+
try { await fs.promises.unlink(full) } catch { /* ok */ }
|
|
252
|
+
} else {
|
|
253
|
+
const remote_file = remote_file_map.get(entry.path)
|
|
254
|
+
if (remote_file) {
|
|
255
|
+
const full = path.join(skill_dir, entry.path)
|
|
256
|
+
const [dir_err] = await ensure_dir(path.dirname(full))
|
|
257
|
+
if (dir_err) { spinner.fail(`Failed to create dir for ${entry.path}`); throw dir_err[0] }
|
|
258
|
+
await fs.promises.writeFile(full, Buffer.from(remote_file.content, 'base64'))
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
// strategy === 'ours' → keep local files as-is (no action needed)
|
|
264
|
+
|
|
265
|
+
// Update lock
|
|
266
|
+
const [hash_err, new_integrity] = await hash_directory(skill_dir)
|
|
267
|
+
const updated_entry = {
|
|
268
|
+
...lock_entry,
|
|
269
|
+
commit: remote_clone.commit,
|
|
270
|
+
base_commit: remote_clone.commit,
|
|
271
|
+
integrity: new_integrity || lock_entry.integrity,
|
|
272
|
+
base_integrity: new_integrity || lock_entry.base_integrity,
|
|
273
|
+
version: cmp_data.head_version || lock_entry.version,
|
|
274
|
+
ref: remote_clone.ref || lock_entry.ref
|
|
275
|
+
}
|
|
276
|
+
const merged_skills = update_lock_skills(lock_data, { [skill_name]: updated_entry })
|
|
277
|
+
const [wl_err] = await write_lock(lock_root(is_global, project_root), merged_skills)
|
|
278
|
+
if (wl_err) { spinner.fail('Failed to write lock file'); throw wl_err[0] }
|
|
279
|
+
|
|
280
|
+
spinner.stop()
|
|
281
|
+
|
|
282
|
+
if (args.flags.json) {
|
|
283
|
+
print_json({ data: { status: 'merged', report } })
|
|
284
|
+
} else {
|
|
285
|
+
print_success(`Pulled ${skill_name} → ${cmp_data.head_version || 'latest'}`)
|
|
286
|
+
if (report.summary.auto_merged > 0) {
|
|
287
|
+
print_info(`${report.summary.auto_merged} file(s) auto-applied (non-conflicting changes)`)
|
|
288
|
+
}
|
|
289
|
+
if (classified.both_modified.length > 0) {
|
|
290
|
+
print_info(`${classified.both_modified.length} conflict(s) resolved with --${strategy}`)
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// 7. Dependency reconciliation
|
|
295
|
+
await reconcile_dependencies(skill_dir, skill_name, lock_data, is_global, project_root, spinner, args)
|
|
296
|
+
|
|
297
|
+
}).then(([errors]) => { if (errors) { exit_with_error(errors); return } })
|
|
298
|
+
|
|
299
|
+
// ─── Dependency reconciliation ────────────────────────────────────────────────
|
|
300
|
+
|
|
301
|
+
const reconcile_dependencies = (skill_dir, skill_name, lock_data, is_global, project_root, spinner, args) =>
|
|
302
|
+
catch_errors('Dependency reconciliation failed', async () => {
|
|
303
|
+
const manifest_path = path.join(skill_dir, 'skill.json')
|
|
304
|
+
let manifest
|
|
305
|
+
try {
|
|
306
|
+
manifest = JSON.parse(await fs.promises.readFile(manifest_path, 'utf-8'))
|
|
307
|
+
} catch {
|
|
308
|
+
return // No manifest or invalid JSON — nothing to reconcile
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
const new_deps = manifest.dependencies || {}
|
|
312
|
+
if (Object.keys(new_deps).length === 0) return
|
|
313
|
+
|
|
314
|
+
const all_skills = get_all_locked_skills(lock_data)
|
|
315
|
+
const to_install = []
|
|
316
|
+
|
|
317
|
+
for (const [dep, constraint] of Object.entries(new_deps)) {
|
|
318
|
+
const installed = all_skills[dep]
|
|
319
|
+
if (!installed) {
|
|
320
|
+
to_install.push(dep)
|
|
321
|
+
continue
|
|
322
|
+
}
|
|
323
|
+
// Installed version exists — skip (higher-version-wins is handled by the resolver)
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
if (to_install.length === 0) return
|
|
327
|
+
|
|
328
|
+
if (!args.flags.json) {
|
|
329
|
+
print_info(`Installing ${to_install.length} new dependenc${to_install.length === 1 ? 'y' : 'ies'}...`)
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// Delegate to the installer for new deps
|
|
333
|
+
const { run: run_install } = require('./install')
|
|
334
|
+
for (const dep of to_install) {
|
|
335
|
+
await run_install({ _: [dep], flags: { global: is_global, json: args.flags.json || false, yes: true } })
|
|
336
|
+
}
|
|
337
|
+
})
|
|
338
|
+
|
|
339
|
+
module.exports = { run }
|
package/src/constants.js
CHANGED
|
@@ -32,6 +32,7 @@ const COMMAND_ALIASES = {
|
|
|
32
32
|
s: 'search',
|
|
33
33
|
r: 'refresh',
|
|
34
34
|
st: 'status',
|
|
35
|
+
d: 'diff',
|
|
35
36
|
up: 'update',
|
|
36
37
|
pub: 'publish',
|
|
37
38
|
v: 'validate',
|
|
@@ -50,6 +51,8 @@ const COMMANDS = [
|
|
|
50
51
|
'check',
|
|
51
52
|
'refresh',
|
|
52
53
|
'status',
|
|
54
|
+
'pull',
|
|
55
|
+
'diff',
|
|
53
56
|
'update',
|
|
54
57
|
'bump',
|
|
55
58
|
'publish',
|
package/src/index.js
CHANGED
|
@@ -93,6 +93,9 @@ Commands:
|
|
|
93
93
|
search <query> Search the registry (alias: s)
|
|
94
94
|
check [owner/skill] Check for available updates
|
|
95
95
|
refresh Check + update all outdated skills (alias: r)
|
|
96
|
+
status [owner/skill] Show divergence status (alias: st)
|
|
97
|
+
pull <owner/skill> Pull remote changes and merge
|
|
98
|
+
diff <owner/skill> Show file-level differences (alias: d)
|
|
96
99
|
update [owner/skill] Upgrade to latest versions (alias: up)
|
|
97
100
|
publish Push skill to registry (alias: pub)
|
|
98
101
|
validate <skill-name> Validate skill against all rules (alias: v)
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Three-way file classification for merge operations.
|
|
3
|
+
*
|
|
4
|
+
* Compares base, local, and remote file sets to classify each file
|
|
5
|
+
* into one of 8 categories based on which sides changed it.
|
|
6
|
+
*
|
|
7
|
+
* All inputs are arrays of { path, sha } entries.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
const classify_changes = (base_files, local_files, remote_files) => {
|
|
11
|
+
const base_map = new Map(base_files.map(e => [e.path, e.sha]))
|
|
12
|
+
const local_map = new Map(local_files.map(e => [e.path, e.sha]))
|
|
13
|
+
const remote_map = new Map(remote_files.map(e => [e.path, e.sha]))
|
|
14
|
+
|
|
15
|
+
const all_paths = new Set([...base_map.keys(), ...local_map.keys(), ...remote_map.keys()])
|
|
16
|
+
|
|
17
|
+
const remote_only_modified = []
|
|
18
|
+
const local_only_modified = []
|
|
19
|
+
const both_modified = []
|
|
20
|
+
const remote_only_added = []
|
|
21
|
+
const local_only_added = []
|
|
22
|
+
const remote_only_deleted = []
|
|
23
|
+
const local_only_deleted = []
|
|
24
|
+
const unchanged = []
|
|
25
|
+
|
|
26
|
+
for (const path of all_paths) {
|
|
27
|
+
const base_sha = base_map.get(path) || null
|
|
28
|
+
const local_sha = local_map.get(path) || null
|
|
29
|
+
const remote_sha = remote_map.get(path) || null
|
|
30
|
+
|
|
31
|
+
const in_base = base_sha !== null
|
|
32
|
+
const in_local = local_sha !== null
|
|
33
|
+
const in_remote = remote_sha !== null
|
|
34
|
+
|
|
35
|
+
if (in_base) {
|
|
36
|
+
const local_changed = local_sha !== base_sha
|
|
37
|
+
const remote_changed = remote_sha !== base_sha
|
|
38
|
+
|
|
39
|
+
if (!in_local && !in_remote) {
|
|
40
|
+
// Both deleted — treat as unchanged (both agree)
|
|
41
|
+
unchanged.push({ path })
|
|
42
|
+
} else if (!in_local && in_remote) {
|
|
43
|
+
if (remote_changed) {
|
|
44
|
+
// Local deleted, remote modified — classify as both_modified (conflict)
|
|
45
|
+
both_modified.push({ path, base_sha, local_sha: null, remote_sha })
|
|
46
|
+
} else {
|
|
47
|
+
local_only_deleted.push({ path })
|
|
48
|
+
}
|
|
49
|
+
} else if (in_local && !in_remote) {
|
|
50
|
+
if (local_changed) {
|
|
51
|
+
// Local modified, remote deleted — classify as both_modified (conflict)
|
|
52
|
+
both_modified.push({ path, base_sha, local_sha, remote_sha: null })
|
|
53
|
+
} else {
|
|
54
|
+
remote_only_deleted.push({ path })
|
|
55
|
+
}
|
|
56
|
+
} else if (local_changed && remote_changed) {
|
|
57
|
+
// Both changed — if same SHA, they agree (unchanged from merge perspective)
|
|
58
|
+
if (local_sha === remote_sha) {
|
|
59
|
+
unchanged.push({ path, sha: local_sha })
|
|
60
|
+
} else {
|
|
61
|
+
both_modified.push({ path, base_sha, local_sha, remote_sha })
|
|
62
|
+
}
|
|
63
|
+
} else if (local_changed) {
|
|
64
|
+
local_only_modified.push({ path, base_sha, local_sha })
|
|
65
|
+
} else if (remote_changed) {
|
|
66
|
+
remote_only_modified.push({ path, base_sha, remote_sha })
|
|
67
|
+
} else {
|
|
68
|
+
unchanged.push({ path, sha: base_sha })
|
|
69
|
+
}
|
|
70
|
+
} else {
|
|
71
|
+
// Not in base — added by one or both sides
|
|
72
|
+
if (in_local && in_remote) {
|
|
73
|
+
if (local_sha === remote_sha) {
|
|
74
|
+
unchanged.push({ path, sha: local_sha })
|
|
75
|
+
} else {
|
|
76
|
+
both_modified.push({ path, base_sha: null, local_sha, remote_sha })
|
|
77
|
+
}
|
|
78
|
+
} else if (in_local) {
|
|
79
|
+
local_only_added.push({ path, local_sha })
|
|
80
|
+
} else {
|
|
81
|
+
remote_only_added.push({ path, remote_sha })
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return {
|
|
87
|
+
remote_only_modified,
|
|
88
|
+
local_only_modified,
|
|
89
|
+
both_modified,
|
|
90
|
+
remote_only_added,
|
|
91
|
+
local_only_added,
|
|
92
|
+
remote_only_deleted,
|
|
93
|
+
local_only_deleted,
|
|
94
|
+
unchanged
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
module.exports = { classify_changes }
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert/strict')
|
|
3
|
+
const { classify_changes } = require('./comparator')
|
|
4
|
+
|
|
5
|
+
describe('classify_changes', () => {
|
|
6
|
+
it('classifies remote_only_modified', () => {
|
|
7
|
+
const base = [{ path: 'a.md', sha: '111' }]
|
|
8
|
+
const local = [{ path: 'a.md', sha: '111' }]
|
|
9
|
+
const remote = [{ path: 'a.md', sha: '222' }]
|
|
10
|
+
const r = classify_changes(base, local, remote)
|
|
11
|
+
assert.strictEqual(r.remote_only_modified.length, 1)
|
|
12
|
+
assert.deepStrictEqual(r.remote_only_modified[0], { path: 'a.md', base_sha: '111', remote_sha: '222' })
|
|
13
|
+
})
|
|
14
|
+
|
|
15
|
+
it('classifies local_only_modified', () => {
|
|
16
|
+
const base = [{ path: 'a.md', sha: '111' }]
|
|
17
|
+
const local = [{ path: 'a.md', sha: '222' }]
|
|
18
|
+
const remote = [{ path: 'a.md', sha: '111' }]
|
|
19
|
+
const r = classify_changes(base, local, remote)
|
|
20
|
+
assert.strictEqual(r.local_only_modified.length, 1)
|
|
21
|
+
assert.deepStrictEqual(r.local_only_modified[0], { path: 'a.md', base_sha: '111', local_sha: '222' })
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
it('classifies both_modified (different changes)', () => {
|
|
25
|
+
const base = [{ path: 'a.md', sha: '111' }]
|
|
26
|
+
const local = [{ path: 'a.md', sha: '222' }]
|
|
27
|
+
const remote = [{ path: 'a.md', sha: '333' }]
|
|
28
|
+
const r = classify_changes(base, local, remote)
|
|
29
|
+
assert.strictEqual(r.both_modified.length, 1)
|
|
30
|
+
assert.deepStrictEqual(r.both_modified[0], { path: 'a.md', base_sha: '111', local_sha: '222', remote_sha: '333' })
|
|
31
|
+
})
|
|
32
|
+
|
|
33
|
+
it('classifies identical changes on both sides as unchanged', () => {
|
|
34
|
+
const base = [{ path: 'a.md', sha: '111' }]
|
|
35
|
+
const local = [{ path: 'a.md', sha: '222' }]
|
|
36
|
+
const remote = [{ path: 'a.md', sha: '222' }]
|
|
37
|
+
const r = classify_changes(base, local, remote)
|
|
38
|
+
assert.strictEqual(r.both_modified.length, 0)
|
|
39
|
+
assert.strictEqual(r.unchanged.length, 1)
|
|
40
|
+
assert.deepStrictEqual(r.unchanged[0], { path: 'a.md', sha: '222' })
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
it('classifies remote_only_added', () => {
|
|
44
|
+
const base = []
|
|
45
|
+
const local = []
|
|
46
|
+
const remote = [{ path: 'new.md', sha: 'aaa' }]
|
|
47
|
+
const r = classify_changes(base, local, remote)
|
|
48
|
+
assert.strictEqual(r.remote_only_added.length, 1)
|
|
49
|
+
assert.deepStrictEqual(r.remote_only_added[0], { path: 'new.md', remote_sha: 'aaa' })
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
it('classifies local_only_added', () => {
|
|
53
|
+
const base = []
|
|
54
|
+
const local = [{ path: 'new.md', sha: 'aaa' }]
|
|
55
|
+
const remote = []
|
|
56
|
+
const r = classify_changes(base, local, remote)
|
|
57
|
+
assert.strictEqual(r.local_only_added.length, 1)
|
|
58
|
+
assert.deepStrictEqual(r.local_only_added[0], { path: 'new.md', local_sha: 'aaa' })
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
it('classifies remote_only_deleted', () => {
|
|
62
|
+
const base = [{ path: 'gone.md', sha: '111' }]
|
|
63
|
+
const local = [{ path: 'gone.md', sha: '111' }]
|
|
64
|
+
const remote = []
|
|
65
|
+
const r = classify_changes(base, local, remote)
|
|
66
|
+
assert.strictEqual(r.remote_only_deleted.length, 1)
|
|
67
|
+
assert.deepStrictEqual(r.remote_only_deleted[0], { path: 'gone.md' })
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
it('classifies local_only_deleted', () => {
|
|
71
|
+
const base = [{ path: 'gone.md', sha: '111' }]
|
|
72
|
+
const local = []
|
|
73
|
+
const remote = [{ path: 'gone.md', sha: '111' }]
|
|
74
|
+
const r = classify_changes(base, local, remote)
|
|
75
|
+
assert.strictEqual(r.local_only_deleted.length, 1)
|
|
76
|
+
assert.deepStrictEqual(r.local_only_deleted[0], { path: 'gone.md' })
|
|
77
|
+
})
|
|
78
|
+
|
|
79
|
+
it('unchanged when nothing changed', () => {
|
|
80
|
+
const base = [{ path: 'a.md', sha: '111' }]
|
|
81
|
+
const local = [{ path: 'a.md', sha: '111' }]
|
|
82
|
+
const remote = [{ path: 'a.md', sha: '111' }]
|
|
83
|
+
const r = classify_changes(base, local, remote)
|
|
84
|
+
assert.strictEqual(r.unchanged.length, 1)
|
|
85
|
+
assert.deepStrictEqual(r.unchanged[0], { path: 'a.md', sha: '111' })
|
|
86
|
+
})
|
|
87
|
+
|
|
88
|
+
it('handles mixed scenario with all 8 types', () => {
|
|
89
|
+
const base = [
|
|
90
|
+
{ path: 'unchanged.md', sha: '000' },
|
|
91
|
+
{ path: 'remote_mod.md', sha: '111' },
|
|
92
|
+
{ path: 'local_mod.md', sha: '222' },
|
|
93
|
+
{ path: 'both_mod.md', sha: '333' },
|
|
94
|
+
{ path: 'remote_del.md', sha: '444' },
|
|
95
|
+
{ path: 'local_del.md', sha: '555' },
|
|
96
|
+
]
|
|
97
|
+
const local = [
|
|
98
|
+
{ path: 'unchanged.md', sha: '000' },
|
|
99
|
+
{ path: 'remote_mod.md', sha: '111' },
|
|
100
|
+
{ path: 'local_mod.md', sha: 'L22' },
|
|
101
|
+
{ path: 'both_mod.md', sha: 'L33' },
|
|
102
|
+
{ path: 'remote_del.md', sha: '444' },
|
|
103
|
+
// local_del.md removed
|
|
104
|
+
{ path: 'local_add.md', sha: 'LA1' },
|
|
105
|
+
]
|
|
106
|
+
const remote = [
|
|
107
|
+
{ path: 'unchanged.md', sha: '000' },
|
|
108
|
+
{ path: 'remote_mod.md', sha: 'R11' },
|
|
109
|
+
{ path: 'local_mod.md', sha: '222' },
|
|
110
|
+
{ path: 'both_mod.md', sha: 'R33' },
|
|
111
|
+
// remote_del.md removed
|
|
112
|
+
{ path: 'local_del.md', sha: '555' },
|
|
113
|
+
{ path: 'remote_add.md', sha: 'RA1' },
|
|
114
|
+
]
|
|
115
|
+
const r = classify_changes(base, local, remote)
|
|
116
|
+
assert.strictEqual(r.unchanged.length, 1)
|
|
117
|
+
assert.strictEqual(r.remote_only_modified.length, 1)
|
|
118
|
+
assert.strictEqual(r.local_only_modified.length, 1)
|
|
119
|
+
assert.strictEqual(r.both_modified.length, 1)
|
|
120
|
+
assert.strictEqual(r.remote_only_deleted.length, 1)
|
|
121
|
+
assert.strictEqual(r.local_only_deleted.length, 1)
|
|
122
|
+
assert.strictEqual(r.local_only_added.length, 1)
|
|
123
|
+
assert.strictEqual(r.remote_only_added.length, 1)
|
|
124
|
+
})
|
|
125
|
+
|
|
126
|
+
it('handles empty base with both sides adding same file differently', () => {
|
|
127
|
+
const base = []
|
|
128
|
+
const local = [{ path: 'new.md', sha: 'L11' }]
|
|
129
|
+
const remote = [{ path: 'new.md', sha: 'R11' }]
|
|
130
|
+
const r = classify_changes(base, local, remote)
|
|
131
|
+
assert.strictEqual(r.both_modified.length, 1)
|
|
132
|
+
assert.deepStrictEqual(r.both_modified[0], { path: 'new.md', base_sha: null, local_sha: 'L11', remote_sha: 'R11' })
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
it('handles empty base with both sides adding same file identically', () => {
|
|
136
|
+
const base = []
|
|
137
|
+
const local = [{ path: 'new.md', sha: 'same' }]
|
|
138
|
+
const remote = [{ path: 'new.md', sha: 'same' }]
|
|
139
|
+
const r = classify_changes(base, local, remote)
|
|
140
|
+
assert.strictEqual(r.unchanged.length, 1)
|
|
141
|
+
assert.strictEqual(r.both_modified.length, 0)
|
|
142
|
+
})
|
|
143
|
+
|
|
144
|
+
it('handles delete-vs-modify as both_modified (local deleted, remote modified)', () => {
|
|
145
|
+
const base = [{ path: 'a.md', sha: '111' }]
|
|
146
|
+
const local = []
|
|
147
|
+
const remote = [{ path: 'a.md', sha: '222' }]
|
|
148
|
+
const r = classify_changes(base, local, remote)
|
|
149
|
+
assert.strictEqual(r.both_modified.length, 1)
|
|
150
|
+
assert.deepStrictEqual(r.both_modified[0], { path: 'a.md', base_sha: '111', local_sha: null, remote_sha: '222' })
|
|
151
|
+
})
|
|
152
|
+
|
|
153
|
+
it('handles delete-vs-modify as both_modified (local modified, remote deleted)', () => {
|
|
154
|
+
const base = [{ path: 'a.md', sha: '111' }]
|
|
155
|
+
const local = [{ path: 'a.md', sha: '222' }]
|
|
156
|
+
const remote = []
|
|
157
|
+
const r = classify_changes(base, local, remote)
|
|
158
|
+
assert.strictEqual(r.both_modified.length, 1)
|
|
159
|
+
assert.deepStrictEqual(r.both_modified[0], { path: 'a.md', base_sha: '111', local_sha: '222', remote_sha: null })
|
|
160
|
+
})
|
|
161
|
+
})
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generates a structured JSON merge report (v1 contract).
|
|
3
|
+
*
|
|
4
|
+
* The report is consumed by both human-readable CLI output and the AI agent
|
|
5
|
+
* semantic review layer (Layer 2). It must include enough data for the agent
|
|
6
|
+
* to reason about conflicts without additional API calls.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const CLASSIFICATIONS = [
|
|
10
|
+
'remote_only_modified', 'local_only_modified', 'both_modified',
|
|
11
|
+
'remote_only_added', 'local_only_added',
|
|
12
|
+
'remote_only_deleted', 'local_only_deleted',
|
|
13
|
+
'unchanged'
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
const build_report = (skill, base_version, remote_version, classified) => {
|
|
17
|
+
const files = []
|
|
18
|
+
let clean = 0
|
|
19
|
+
let conflicted = 0
|
|
20
|
+
let auto_merged = 0
|
|
21
|
+
|
|
22
|
+
for (const classification of CLASSIFICATIONS) {
|
|
23
|
+
const entries = classified[classification] || []
|
|
24
|
+
for (const entry of entries) {
|
|
25
|
+
if (classification === 'unchanged') {
|
|
26
|
+
clean++
|
|
27
|
+
continue
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const is_conflict = classification === 'both_modified'
|
|
31
|
+
if (is_conflict) conflicted++
|
|
32
|
+
else auto_merged++
|
|
33
|
+
|
|
34
|
+
files.push({
|
|
35
|
+
path: entry.path,
|
|
36
|
+
classification,
|
|
37
|
+
conflict_written: false,
|
|
38
|
+
base_sha: entry.base_sha || null,
|
|
39
|
+
local_sha: entry.local_sha || null,
|
|
40
|
+
remote_sha: entry.remote_sha || null
|
|
41
|
+
})
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return {
|
|
46
|
+
skill,
|
|
47
|
+
base_version: base_version || null,
|
|
48
|
+
remote_version: remote_version || null,
|
|
49
|
+
files,
|
|
50
|
+
summary: {
|
|
51
|
+
total: clean + auto_merged + conflicted,
|
|
52
|
+
clean,
|
|
53
|
+
auto_merged,
|
|
54
|
+
conflicted
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
module.exports = { build_report, CLASSIFICATIONS }
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert/strict')
|
|
3
|
+
const { build_report, CLASSIFICATIONS } = require('./report')
|
|
4
|
+
|
|
5
|
+
describe('build_report', () => {
|
|
6
|
+
it('produces v1 contract fields', () => {
|
|
7
|
+
const classified = {
|
|
8
|
+
remote_only_modified: [{ path: 'a.md', base_sha: '111', remote_sha: '222' }],
|
|
9
|
+
local_only_modified: [],
|
|
10
|
+
both_modified: [{ path: 'b.md', base_sha: '333', local_sha: '444', remote_sha: '555' }],
|
|
11
|
+
remote_only_added: [],
|
|
12
|
+
local_only_added: [],
|
|
13
|
+
remote_only_deleted: [],
|
|
14
|
+
local_only_deleted: [],
|
|
15
|
+
unchanged: [{ path: 'c.md', sha: '666' }]
|
|
16
|
+
}
|
|
17
|
+
const report = build_report('acme/deploy-aws', '1.15.0', '1.17.0', classified)
|
|
18
|
+
|
|
19
|
+
assert.strictEqual(report.skill, 'acme/deploy-aws')
|
|
20
|
+
assert.strictEqual(report.base_version, '1.15.0')
|
|
21
|
+
assert.strictEqual(report.remote_version, '1.17.0')
|
|
22
|
+
assert.strictEqual(report.files.length, 2)
|
|
23
|
+
assert.deepStrictEqual(report.summary, { total: 3, clean: 1, auto_merged: 1, conflicted: 1 })
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
it('summary counts match file array', () => {
|
|
27
|
+
const classified = {
|
|
28
|
+
remote_only_modified: [{ path: 'a.md', base_sha: '1', remote_sha: '2' }],
|
|
29
|
+
local_only_modified: [{ path: 'b.md', base_sha: '3', local_sha: '4' }],
|
|
30
|
+
both_modified: [],
|
|
31
|
+
remote_only_added: [{ path: 'c.md', remote_sha: '5' }],
|
|
32
|
+
local_only_added: [],
|
|
33
|
+
remote_only_deleted: [{ path: 'd.md' }],
|
|
34
|
+
local_only_deleted: [],
|
|
35
|
+
unchanged: [{ path: 'e.md', sha: '6' }, { path: 'f.md', sha: '7' }]
|
|
36
|
+
}
|
|
37
|
+
const report = build_report('test/skill', '1.0.0', '2.0.0', classified)
|
|
38
|
+
|
|
39
|
+
assert.strictEqual(report.files.length, report.summary.auto_merged + report.summary.conflicted)
|
|
40
|
+
assert.strictEqual(report.summary.total, report.summary.clean + report.summary.auto_merged + report.summary.conflicted)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
it('classifications are from the defined vocabulary', () => {
|
|
44
|
+
const classified = {
|
|
45
|
+
remote_only_modified: [{ path: 'a.md', base_sha: '1', remote_sha: '2' }],
|
|
46
|
+
local_only_modified: [],
|
|
47
|
+
both_modified: [{ path: 'b.md', base_sha: '3', local_sha: '4', remote_sha: '5' }],
|
|
48
|
+
remote_only_added: [],
|
|
49
|
+
local_only_added: [],
|
|
50
|
+
remote_only_deleted: [],
|
|
51
|
+
local_only_deleted: [],
|
|
52
|
+
unchanged: []
|
|
53
|
+
}
|
|
54
|
+
const report = build_report('test/skill', '1.0.0', '2.0.0', classified)
|
|
55
|
+
|
|
56
|
+
for (const file of report.files) {
|
|
57
|
+
assert.ok(CLASSIFICATIONS.includes(file.classification), `Unknown classification: ${file.classification}`)
|
|
58
|
+
}
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
it('handles null versions gracefully', () => {
|
|
62
|
+
const classified = {
|
|
63
|
+
remote_only_modified: [], local_only_modified: [], both_modified: [],
|
|
64
|
+
remote_only_added: [], local_only_added: [],
|
|
65
|
+
remote_only_deleted: [], local_only_deleted: [],
|
|
66
|
+
unchanged: [{ path: 'a.md', sha: '111' }]
|
|
67
|
+
}
|
|
68
|
+
const report = build_report('test/skill', null, null, classified)
|
|
69
|
+
assert.strictEqual(report.base_version, null)
|
|
70
|
+
assert.strictEqual(report.remote_version, null)
|
|
71
|
+
assert.strictEqual(report.summary.total, 1)
|
|
72
|
+
})
|
|
73
|
+
|
|
74
|
+
it('unchanged files are not included in files array', () => {
|
|
75
|
+
const classified = {
|
|
76
|
+
remote_only_modified: [], local_only_modified: [], both_modified: [],
|
|
77
|
+
remote_only_added: [], local_only_added: [],
|
|
78
|
+
remote_only_deleted: [], local_only_deleted: [],
|
|
79
|
+
unchanged: [{ path: 'a.md', sha: '111' }, { path: 'b.md', sha: '222' }]
|
|
80
|
+
}
|
|
81
|
+
const report = build_report('test/skill', '1.0.0', '1.0.0', classified)
|
|
82
|
+
assert.strictEqual(report.files.length, 0)
|
|
83
|
+
assert.strictEqual(report.summary.clean, 2)
|
|
84
|
+
})
|
|
85
|
+
})
|