happyskills 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -0
- package/LICENSE +116 -0
- package/README.md +76 -0
- package/bin/happyskills.js +5 -0
- package/package.json +37 -0
- package/src/api/auth.js +40 -0
- package/src/api/client.js +65 -0
- package/src/api/repos.js +44 -0
- package/src/api/workspaces.js +10 -0
- package/src/auth/token_store.js +69 -0
- package/src/commands/check.js +108 -0
- package/src/commands/fork.js +101 -0
- package/src/commands/init.js +74 -0
- package/src/commands/install.js +68 -0
- package/src/commands/list.js +61 -0
- package/src/commands/login.js +97 -0
- package/src/commands/logout.js +26 -0
- package/src/commands/publish.js +152 -0
- package/src/commands/search.js +58 -0
- package/src/commands/uninstall.js +48 -0
- package/src/commands/update.js +73 -0
- package/src/commands/whoami.js +66 -0
- package/src/config/index.js +16 -0
- package/src/config/paths.js +41 -0
- package/src/config/paths.test.js +100 -0
- package/src/constants.js +59 -0
- package/src/engine/downloader.js +10 -0
- package/src/engine/extractor.js +35 -0
- package/src/engine/extractor.test.js +37 -0
- package/src/engine/installer.js +150 -0
- package/src/engine/resolver.js +36 -0
- package/src/engine/system_deps.js +58 -0
- package/src/engine/uninstaller.js +73 -0
- package/src/engine/uninstaller.test.js +98 -0
- package/src/index.js +118 -0
- package/src/index.test.js +63 -0
- package/src/lock/integrity.js +54 -0
- package/src/lock/reader.js +29 -0
- package/src/lock/writer.js +31 -0
- package/src/lock/writer.test.js +74 -0
- package/src/manifest/reader.js +13 -0
- package/src/manifest/validator.js +44 -0
- package/src/manifest/validator.test.js +101 -0
- package/src/manifest/writer.js +12 -0
- package/src/ui/colors.js +17 -0
- package/src/ui/output.js +48 -0
- package/src/ui/spinner.js +59 -0
- package/src/utils/errors.js +69 -0
- package/src/utils/errors.test.js +96 -0
- package/src/utils/fs.js +49 -0
- package/src/utils/semver.js +27 -0
- package/src/utils/semver.test.js +101 -0
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
const { error: { catch_errors, wrap_errors: e } } = require('puffy-core')
|
|
2
|
+
const { read_lock, get_locked_skill, get_all_locked_skills } = require('../lock/reader')
|
|
3
|
+
const { write_lock, update_lock_skills } = require('../lock/writer')
|
|
4
|
+
const { skills_dir, skill_install_dir } = require('../config/paths')
|
|
5
|
+
const { remove_dir } = require('../utils/fs')
|
|
6
|
+
const { print_success, print_info } = require('../ui/output')
|
|
7
|
+
|
|
8
|
+
const find_orphans = (skills, removed_skill) => {
|
|
9
|
+
const orphans = []
|
|
10
|
+
for (const [name, data] of Object.entries(skills)) {
|
|
11
|
+
if (name === removed_skill) continue
|
|
12
|
+
if (!data.requested_by || data.requested_by.length === 0) {
|
|
13
|
+
orphans.push(name)
|
|
14
|
+
continue
|
|
15
|
+
}
|
|
16
|
+
const remaining = data.requested_by.filter(r => r !== removed_skill && skills[r])
|
|
17
|
+
if (remaining.length === 0) {
|
|
18
|
+
orphans.push(name)
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return orphans
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const uninstall = (skill, options = {}) => catch_errors('Uninstall failed', async () => {
|
|
25
|
+
const { global: is_global = false, project_root } = options
|
|
26
|
+
const base_dir = skills_dir(is_global, project_root)
|
|
27
|
+
|
|
28
|
+
const [lock_errors, lock_data] = await read_lock(project_root)
|
|
29
|
+
if (lock_errors || !lock_data) {
|
|
30
|
+
throw new Error('No skills-lock.json found. Nothing to uninstall.')
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const locked = get_locked_skill(lock_data, skill)
|
|
34
|
+
if (!locked) {
|
|
35
|
+
throw new Error(`${skill} is not installed.`)
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const all_skills = { ...get_all_locked_skills(lock_data) }
|
|
39
|
+
|
|
40
|
+
for (const [name, data] of Object.entries(all_skills)) {
|
|
41
|
+
if (data.requested_by) {
|
|
42
|
+
data.requested_by = data.requested_by.filter(r => r !== skill)
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const orphans = find_orphans(all_skills, skill)
|
|
47
|
+
const to_remove = [skill, ...orphans]
|
|
48
|
+
|
|
49
|
+
for (const name of to_remove) {
|
|
50
|
+
const [owner, repo] = name.split('/')
|
|
51
|
+
const install_dir = skill_install_dir(base_dir, owner, repo)
|
|
52
|
+
const [rm_errors] = await remove_dir(install_dir)
|
|
53
|
+
if (rm_errors) throw e(`Failed to remove ${name}`, rm_errors)
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const updates = {}
|
|
57
|
+
for (const name of to_remove) {
|
|
58
|
+
updates[name] = null
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const new_skills = update_lock_skills(lock_data, updates)
|
|
62
|
+
const [write_errors] = await write_lock(project_root, new_skills)
|
|
63
|
+
if (write_errors) throw e('Failed to update lock file', write_errors)
|
|
64
|
+
|
|
65
|
+
print_success(`Removed ${skill}`)
|
|
66
|
+
if (orphans.length > 0) {
|
|
67
|
+
print_info(`Pruned ${orphans.length} orphaned dependency(s): ${orphans.join(', ')}`)
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return { removed: to_remove }
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
module.exports = { uninstall, find_orphans }
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert')
|
|
3
|
+
const { find_orphans } = require('./uninstaller')
|
|
4
|
+
|
|
5
|
+
describe('find_orphans', () => {
|
|
6
|
+
it('returns empty when all skills have requesters in the skills map', () => {
|
|
7
|
+
const skills = {
|
|
8
|
+
'acme/app': { requested_by: ['acme/root'] },
|
|
9
|
+
'acme/root': { requested_by: ['acme/app'] },
|
|
10
|
+
'acme/auth': { requested_by: ['acme/app'] }
|
|
11
|
+
}
|
|
12
|
+
const result = find_orphans(skills, 'acme/other')
|
|
13
|
+
assert.deepStrictEqual(result, [])
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
it('finds orphans whose only requester is the removed skill', () => {
|
|
17
|
+
const skills = {
|
|
18
|
+
'acme/deploy': { requested_by: ['acme/app'] },
|
|
19
|
+
'acme/app': { requested_by: ['acme/deploy'] },
|
|
20
|
+
'acme/auth': { requested_by: ['acme/deploy'] }
|
|
21
|
+
}
|
|
22
|
+
const result = find_orphans(skills, 'acme/deploy')
|
|
23
|
+
assert.ok(result.includes('acme/auth'))
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
it('skips the removed skill itself', () => {
|
|
27
|
+
const skills = {
|
|
28
|
+
'acme/deploy': { requested_by: ['acme/app'] },
|
|
29
|
+
'acme/app': { requested_by: ['acme/deploy'] },
|
|
30
|
+
'acme/auth': { requested_by: ['acme/deploy'] }
|
|
31
|
+
}
|
|
32
|
+
const result = find_orphans(skills, 'acme/deploy')
|
|
33
|
+
assert.ok(!result.includes('acme/deploy'))
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
it('keeps skills that have other valid requesters in the map', () => {
|
|
37
|
+
const skills = {
|
|
38
|
+
'acme/deploy': { requested_by: ['acme/monitor'] },
|
|
39
|
+
'acme/monitor': { requested_by: ['acme/deploy'] },
|
|
40
|
+
'acme/auth': { requested_by: ['acme/deploy', 'acme/monitor'] }
|
|
41
|
+
}
|
|
42
|
+
const result = find_orphans(skills, 'acme/deploy')
|
|
43
|
+
assert.ok(!result.includes('acme/auth'))
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
it('finds skills with empty requested_by', () => {
|
|
47
|
+
const skills = {
|
|
48
|
+
'acme/deploy': { requested_by: ['acme/root'] },
|
|
49
|
+
'acme/root': { requested_by: ['acme/deploy'] },
|
|
50
|
+
'acme/orphan': { requested_by: [] }
|
|
51
|
+
}
|
|
52
|
+
const result = find_orphans(skills, 'acme/other')
|
|
53
|
+
assert.ok(result.includes('acme/orphan'))
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
it('finds skills with no requested_by field', () => {
|
|
57
|
+
const skills = {
|
|
58
|
+
'acme/deploy': { requested_by: ['acme/root'] },
|
|
59
|
+
'acme/root': { requested_by: ['acme/deploy'] },
|
|
60
|
+
'acme/orphan': {}
|
|
61
|
+
}
|
|
62
|
+
const result = find_orphans(skills, 'acme/other')
|
|
63
|
+
assert.ok(result.includes('acme/orphan'))
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
it('handles cascading orphans (only finds direct)', () => {
|
|
67
|
+
const skills = {
|
|
68
|
+
'acme/deploy': { requested_by: ['acme/app'] },
|
|
69
|
+
'acme/app': { requested_by: ['acme/deploy'] },
|
|
70
|
+
'acme/auth': { requested_by: ['acme/deploy'] },
|
|
71
|
+
'acme/crypto': { requested_by: ['acme/auth'] }
|
|
72
|
+
}
|
|
73
|
+
// find_orphans only finds direct orphans, not cascading
|
|
74
|
+
const result = find_orphans(skills, 'acme/deploy')
|
|
75
|
+
// acme/auth is orphaned (only requester is removed skill)
|
|
76
|
+
assert.ok(result.includes('acme/auth'))
|
|
77
|
+
// acme/crypto still has acme/auth in skills map, so it's not orphaned yet
|
|
78
|
+
assert.ok(!result.includes('acme/crypto'))
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
it('filters out requesters that are not in the skills map', () => {
|
|
82
|
+
const skills = {
|
|
83
|
+
'acme/auth': { requested_by: ['acme/gone'] }
|
|
84
|
+
}
|
|
85
|
+
// acme/gone doesn't exist in skills, so acme/auth is orphaned
|
|
86
|
+
const result = find_orphans(skills, 'acme/other')
|
|
87
|
+
assert.ok(result.includes('acme/auth'))
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
it('treats __root__ like any other requester (not in skills map)', () => {
|
|
91
|
+
const skills = {
|
|
92
|
+
'acme/deploy': { requested_by: ['__root__'] }
|
|
93
|
+
}
|
|
94
|
+
// __root__ is not a key in skills, so deploy is considered orphaned
|
|
95
|
+
const result = find_orphans(skills, 'acme/other')
|
|
96
|
+
assert.ok(result.includes('acme/deploy'))
|
|
97
|
+
})
|
|
98
|
+
})
|
package/src/index.js
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
const { CLI_VERSION, EXIT_CODES, COMMAND_ALIASES, COMMANDS } = require('./constants')
|
|
2
|
+
const { print_error, print_help } = require('./ui/output')
|
|
3
|
+
|
|
4
|
+
const parse_args = (argv) => {
|
|
5
|
+
const args = { _: [], flags: {} }
|
|
6
|
+
let i = 0
|
|
7
|
+
while (i < argv.length) {
|
|
8
|
+
const arg = argv[i]
|
|
9
|
+
if (arg.startsWith('--')) {
|
|
10
|
+
const key = arg.slice(2)
|
|
11
|
+
const next = argv[i + 1]
|
|
12
|
+
if (!next || next.startsWith('-')) {
|
|
13
|
+
args.flags[key] = true
|
|
14
|
+
} else {
|
|
15
|
+
args.flags[key] = next
|
|
16
|
+
i++
|
|
17
|
+
}
|
|
18
|
+
} else if (arg.startsWith('-') && arg.length === 2) {
|
|
19
|
+
const key = arg.slice(1)
|
|
20
|
+
const next = argv[i + 1]
|
|
21
|
+
if (!next || next.startsWith('-')) {
|
|
22
|
+
args.flags[key] = true
|
|
23
|
+
} else {
|
|
24
|
+
args.flags[key] = next
|
|
25
|
+
i++
|
|
26
|
+
}
|
|
27
|
+
} else {
|
|
28
|
+
args._.push(arg)
|
|
29
|
+
}
|
|
30
|
+
i++
|
|
31
|
+
}
|
|
32
|
+
return args
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const SHORT_FLAGS = {
|
|
36
|
+
g: 'global',
|
|
37
|
+
y: 'yes',
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const normalize_flags = (flags) => {
|
|
41
|
+
const normalized = {}
|
|
42
|
+
for (const [key, val] of Object.entries(flags)) {
|
|
43
|
+
const long_name = SHORT_FLAGS[key] || key
|
|
44
|
+
normalized[long_name] = val
|
|
45
|
+
}
|
|
46
|
+
return normalized
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const show_version = () => {
|
|
50
|
+
console.log(`happyskills v${CLI_VERSION}`)
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const show_help = () => {
|
|
54
|
+
print_help(`happyskills v${CLI_VERSION} — Package manager for AI agent skills
|
|
55
|
+
|
|
56
|
+
Usage: happyskills <command> [options]
|
|
57
|
+
|
|
58
|
+
Commands:
|
|
59
|
+
init [name] Scaffold SKILL.md + skill.json
|
|
60
|
+
install [owner/skill] Install skill + dependencies (alias: i, add)
|
|
61
|
+
uninstall <owner/skill> Remove skill + prune orphans (alias: rm, remove)
|
|
62
|
+
list List installed skills (alias: ls)
|
|
63
|
+
search <query> Search the registry (alias: s)
|
|
64
|
+
check [owner/skill] Check for available updates
|
|
65
|
+
update [owner/skill] Upgrade to latest versions (alias: up)
|
|
66
|
+
publish Push skill to registry (alias: pub)
|
|
67
|
+
fork <owner/skill> Fork a skill to your workspace
|
|
68
|
+
login Authenticate with the registry
|
|
69
|
+
logout Clear stored credentials
|
|
70
|
+
whoami Show current user
|
|
71
|
+
|
|
72
|
+
Global flags:
|
|
73
|
+
--help Show help for a command
|
|
74
|
+
--version Show CLI version
|
|
75
|
+
-y, --yes Skip confirmation prompts
|
|
76
|
+
--json Output as JSON (list, search, check)`)
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const run = (argv) => {
|
|
80
|
+
const args = parse_args(argv)
|
|
81
|
+
args.flags = normalize_flags(args.flags)
|
|
82
|
+
|
|
83
|
+
if (args.flags.version) {
|
|
84
|
+
show_version()
|
|
85
|
+
return process.exit(EXIT_CODES.SUCCESS)
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const command_name = args._.shift()
|
|
89
|
+
|
|
90
|
+
if (!command_name || args.flags.help && !command_name) {
|
|
91
|
+
show_help()
|
|
92
|
+
return process.exit(EXIT_CODES.SUCCESS)
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const resolved = COMMAND_ALIASES[command_name] || command_name
|
|
96
|
+
|
|
97
|
+
if (!COMMANDS.includes(resolved)) {
|
|
98
|
+
print_error(`Unknown command: ${command_name}`)
|
|
99
|
+
console.error(`Run 'happyskills --help' for usage information.`)
|
|
100
|
+
return process.exit(EXIT_CODES.USAGE)
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (args.flags.help) {
|
|
104
|
+
args.flags._show_help = true
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
const command_module = require(`./commands/${resolved}`)
|
|
108
|
+
const result = command_module.run(args)
|
|
109
|
+
|
|
110
|
+
if (result && typeof result.then === 'function') {
|
|
111
|
+
result.catch((err) => {
|
|
112
|
+
const { exit_with_error } = require('./utils/errors')
|
|
113
|
+
exit_with_error(err)
|
|
114
|
+
})
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
module.exports = { run, parse_args }
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert')
|
|
3
|
+
const { parse_args } = require('./index')
|
|
4
|
+
|
|
5
|
+
describe('parse_args', () => {
|
|
6
|
+
it('parses positional arguments', () => {
|
|
7
|
+
const result = parse_args(['install', 'acme/deploy-aws'])
|
|
8
|
+
assert.deepStrictEqual(result._, ['install', 'acme/deploy-aws'])
|
|
9
|
+
assert.deepStrictEqual(result.flags, {})
|
|
10
|
+
})
|
|
11
|
+
|
|
12
|
+
it('parses long boolean flags', () => {
|
|
13
|
+
const result = parse_args(['--force'])
|
|
14
|
+
assert.deepStrictEqual(result._, [])
|
|
15
|
+
assert.strictEqual(result.flags.force, true)
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
it('parses long flags with values', () => {
|
|
19
|
+
const result = parse_args(['--version', '1.2.0'])
|
|
20
|
+
assert.strictEqual(result.flags.version, '1.2.0')
|
|
21
|
+
assert.deepStrictEqual(result._, [])
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
it('parses short boolean flags', () => {
|
|
25
|
+
const result = parse_args(['-g'])
|
|
26
|
+
assert.strictEqual(result.flags.g, true)
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
it('parses short flags with values', () => {
|
|
30
|
+
const result = parse_args(['-v', '1.0.0'])
|
|
31
|
+
assert.strictEqual(result.flags.v, '1.0.0')
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
it('treats flag followed by another flag as boolean', () => {
|
|
35
|
+
const result = parse_args(['--force', '--yes'])
|
|
36
|
+
assert.strictEqual(result.flags.force, true)
|
|
37
|
+
assert.strictEqual(result.flags.yes, true)
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
it('mixes positionals and flags', () => {
|
|
41
|
+
const result = parse_args(['install', 'acme/deploy-aws', '--version', '1.0.0', '-g'])
|
|
42
|
+
assert.deepStrictEqual(result._, ['install', 'acme/deploy-aws'])
|
|
43
|
+
assert.strictEqual(result.flags.version, '1.0.0')
|
|
44
|
+
assert.strictEqual(result.flags.g, true)
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
it('returns empty result for no arguments', () => {
|
|
48
|
+
const result = parse_args([])
|
|
49
|
+
assert.deepStrictEqual(result._, [])
|
|
50
|
+
assert.deepStrictEqual(result.flags, {})
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
it('treats short flag at end as boolean', () => {
|
|
54
|
+
const result = parse_args(['install', '-y'])
|
|
55
|
+
assert.deepStrictEqual(result._, ['install'])
|
|
56
|
+
assert.strictEqual(result.flags.y, true)
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
it('treats long flag at end as boolean', () => {
|
|
60
|
+
const result = parse_args(['--json'])
|
|
61
|
+
assert.strictEqual(result.flags.json, true)
|
|
62
|
+
})
|
|
63
|
+
})
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
const crypto = require('crypto')
|
|
2
|
+
const fs = require('fs')
|
|
3
|
+
const path = require('path')
|
|
4
|
+
const { error: { catch_errors } } = require('puffy-core')
|
|
5
|
+
|
|
6
|
+
const hash_file = (file_path) => catch_errors(`Failed to hash file ${file_path}`, async () => {
|
|
7
|
+
const content = await fs.promises.readFile(file_path)
|
|
8
|
+
return crypto.createHash('sha256').update(content).digest('hex')
|
|
9
|
+
})
|
|
10
|
+
|
|
11
|
+
const hash_directory = (dir_path) => catch_errors(`Failed to hash directory ${dir_path}`, async () => {
|
|
12
|
+
const hash = crypto.createHash('sha256')
|
|
13
|
+
const entries = await collect_files(dir_path)
|
|
14
|
+
|
|
15
|
+
entries.sort((a, b) => a.relative.localeCompare(b.relative))
|
|
16
|
+
|
|
17
|
+
for (const entry of entries) {
|
|
18
|
+
hash.update(entry.relative)
|
|
19
|
+
const content = await fs.promises.readFile(entry.absolute)
|
|
20
|
+
hash.update(content)
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return `sha256-${hash.digest('hex')}`
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
const collect_files = async (dir_path, base_path = dir_path) => {
|
|
27
|
+
const entries = await fs.promises.readdir(dir_path, { withFileTypes: true })
|
|
28
|
+
const files = []
|
|
29
|
+
|
|
30
|
+
for (const entry of entries) {
|
|
31
|
+
const full_path = path.join(dir_path, entry.name)
|
|
32
|
+
if (entry.name.startsWith('.')) continue
|
|
33
|
+
|
|
34
|
+
if (entry.isDirectory()) {
|
|
35
|
+
const sub_files = await collect_files(full_path, base_path)
|
|
36
|
+
files.push(...sub_files)
|
|
37
|
+
} else if (entry.isFile()) {
|
|
38
|
+
files.push({
|
|
39
|
+
relative: path.relative(base_path, full_path),
|
|
40
|
+
absolute: full_path
|
|
41
|
+
})
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return files
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const verify_integrity = (dir_path, expected_hash) => catch_errors('Integrity verification failed', async () => {
|
|
49
|
+
const [errors, actual_hash] = await hash_directory(dir_path)
|
|
50
|
+
if (errors) throw errors[0]
|
|
51
|
+
return actual_hash === expected_hash
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
module.exports = { hash_file, hash_directory, verify_integrity }
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
const { error: { catch_errors } } = require('puffy-core')
|
|
2
|
+
const { read_json } = require('../utils/fs')
|
|
3
|
+
const { lock_file_path } = require('../config/paths')
|
|
4
|
+
const { LOCK_VERSION } = require('../constants')
|
|
5
|
+
|
|
6
|
+
const read_lock = (project_root) => catch_errors('Failed to read lock file', async () => {
|
|
7
|
+
const lock_path = lock_file_path(project_root)
|
|
8
|
+
const [errors, data] = await read_json(lock_path)
|
|
9
|
+
if (errors) return null
|
|
10
|
+
|
|
11
|
+
if (data.lockVersion !== LOCK_VERSION) {
|
|
12
|
+
const { print_warn } = require('../ui/output')
|
|
13
|
+
print_warn(`Lock file version mismatch (found ${data.lockVersion}, expected ${LOCK_VERSION}). Consider running with --fresh.`)
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
return data
|
|
17
|
+
})
|
|
18
|
+
|
|
19
|
+
const get_locked_skill = (lock_data, skill_name) => {
|
|
20
|
+
if (!lock_data || !lock_data.skills) return null
|
|
21
|
+
return lock_data.skills[skill_name] || null
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const get_all_locked_skills = (lock_data) => {
|
|
25
|
+
if (!lock_data || !lock_data.skills) return {}
|
|
26
|
+
return lock_data.skills
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
module.exports = { read_lock, get_locked_skill, get_all_locked_skills }
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
const { error: { catch_errors } } = require('puffy-core')
|
|
2
|
+
const { atomic_write_json } = require('../utils/fs')
|
|
3
|
+
const { lock_file_path } = require('../config/paths')
|
|
4
|
+
const { LOCK_VERSION } = require('../constants')
|
|
5
|
+
|
|
6
|
+
const write_lock = (project_root, skills) => catch_errors('Failed to write lock file', async () => {
|
|
7
|
+
const lock_path = lock_file_path(project_root)
|
|
8
|
+
const lock_data = {
|
|
9
|
+
lockVersion: LOCK_VERSION,
|
|
10
|
+
generatedAt: new Date().toISOString(),
|
|
11
|
+
skills: skills || {}
|
|
12
|
+
}
|
|
13
|
+
const [errors] = await atomic_write_json(lock_path, lock_data)
|
|
14
|
+
if (errors) throw errors[0]
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
const update_lock_skills = (existing_lock, updates) => {
|
|
18
|
+
const skills = { ...(existing_lock?.skills || {}) }
|
|
19
|
+
|
|
20
|
+
for (const [name, data] of Object.entries(updates)) {
|
|
21
|
+
if (data === null) {
|
|
22
|
+
delete skills[name]
|
|
23
|
+
} else {
|
|
24
|
+
skills[name] = data
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return skills
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
module.exports = { write_lock, update_lock_skills }
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
const { describe, it } = require('node:test')
|
|
2
|
+
const assert = require('node:assert')
|
|
3
|
+
const { update_lock_skills } = require('./writer')
|
|
4
|
+
|
|
5
|
+
describe('update_lock_skills', () => {
|
|
6
|
+
it('adds new skills to empty lock', () => {
|
|
7
|
+
const result = update_lock_skills(null, {
|
|
8
|
+
'acme/deploy': { version: '1.0.0', requested_by: ['__root__'] }
|
|
9
|
+
})
|
|
10
|
+
assert.deepStrictEqual(result, {
|
|
11
|
+
'acme/deploy': { version: '1.0.0', requested_by: ['__root__'] }
|
|
12
|
+
})
|
|
13
|
+
})
|
|
14
|
+
|
|
15
|
+
it('merges new skills into existing lock', () => {
|
|
16
|
+
const existing = {
|
|
17
|
+
skills: {
|
|
18
|
+
'acme/deploy': { version: '1.0.0', requested_by: ['__root__'] }
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
const result = update_lock_skills(existing, {
|
|
22
|
+
'acme/auth': { version: '2.0.0', requested_by: ['acme/deploy'] }
|
|
23
|
+
})
|
|
24
|
+
assert.ok(result['acme/deploy'])
|
|
25
|
+
assert.ok(result['acme/auth'])
|
|
26
|
+
assert.strictEqual(result['acme/auth'].version, '2.0.0')
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
it('removes skills when set to null', () => {
|
|
30
|
+
const existing = {
|
|
31
|
+
skills: {
|
|
32
|
+
'acme/deploy': { version: '1.0.0' },
|
|
33
|
+
'acme/auth': { version: '2.0.0' }
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
const result = update_lock_skills(existing, {
|
|
37
|
+
'acme/deploy': null
|
|
38
|
+
})
|
|
39
|
+
assert.strictEqual(result['acme/deploy'], undefined)
|
|
40
|
+
assert.ok(result['acme/auth'])
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
it('overwrites existing skill data', () => {
|
|
44
|
+
const existing = {
|
|
45
|
+
skills: {
|
|
46
|
+
'acme/deploy': { version: '1.0.0', requested_by: ['__root__'] }
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
const result = update_lock_skills(existing, {
|
|
50
|
+
'acme/deploy': { version: '2.0.0', requested_by: ['__root__'] }
|
|
51
|
+
})
|
|
52
|
+
assert.strictEqual(result['acme/deploy'].version, '2.0.0')
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
it('handles empty updates', () => {
|
|
56
|
+
const existing = {
|
|
57
|
+
skills: {
|
|
58
|
+
'acme/deploy': { version: '1.0.0' }
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
const result = update_lock_skills(existing, {})
|
|
62
|
+
assert.deepStrictEqual(result, { 'acme/deploy': { version: '1.0.0' } })
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
it('does not mutate the original lock data', () => {
|
|
66
|
+
const existing = {
|
|
67
|
+
skills: {
|
|
68
|
+
'acme/deploy': { version: '1.0.0' }
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
update_lock_skills(existing, { 'acme/deploy': null })
|
|
72
|
+
assert.ok(existing.skills['acme/deploy'])
|
|
73
|
+
})
|
|
74
|
+
})
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
const path = require('path')
|
|
2
|
+
const { error: { catch_errors } } = require('puffy-core')
|
|
3
|
+
const { read_json } = require('../utils/fs')
|
|
4
|
+
const { SKILL_JSON } = require('../constants')
|
|
5
|
+
|
|
6
|
+
const read_manifest = (dir = process.cwd()) => catch_errors('Failed to read skill.json', async () => {
|
|
7
|
+
const manifest_path = path.join(dir, SKILL_JSON)
|
|
8
|
+
const [errors, data] = await read_json(manifest_path)
|
|
9
|
+
if (errors) throw new Error(`No ${SKILL_JSON} found in ${dir}. Run 'happyskills init' to create one.`)
|
|
10
|
+
return data
|
|
11
|
+
})
|
|
12
|
+
|
|
13
|
+
module.exports = { read_manifest }
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
const { valid } = require('../utils/semver')
|
|
2
|
+
|
|
3
|
+
const REQUIRED_FIELDS = ['name', 'version']
|
|
4
|
+
const NAME_PATTERN = /^[a-z0-9][a-z0-9_-]*$/
|
|
5
|
+
|
|
6
|
+
const validate_manifest = (manifest) => {
|
|
7
|
+
const errors = []
|
|
8
|
+
|
|
9
|
+
for (const field of REQUIRED_FIELDS) {
|
|
10
|
+
if (!manifest[field]) {
|
|
11
|
+
errors.push(`Missing required field: ${field}`)
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
if (manifest.name && !NAME_PATTERN.test(manifest.name)) {
|
|
16
|
+
errors.push(`Invalid name "${manifest.name}". Use lowercase letters, numbers, hyphens, and underscores. Must start with a letter or number.`)
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (manifest.version && !valid(manifest.version)) {
|
|
20
|
+
errors.push(`Invalid version "${manifest.version}". Must be valid semver (e.g., 1.0.0).`)
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
if (manifest.dependencies) {
|
|
24
|
+
if (typeof manifest.dependencies !== 'object' || Array.isArray(manifest.dependencies)) {
|
|
25
|
+
errors.push('dependencies must be an object mapping skill names to version ranges.')
|
|
26
|
+
} else {
|
|
27
|
+
for (const [dep, range] of Object.entries(manifest.dependencies)) {
|
|
28
|
+
if (!dep.includes('/')) {
|
|
29
|
+
errors.push(`Invalid dependency "${dep}". Must be in owner/name format.`)
|
|
30
|
+
}
|
|
31
|
+
if (typeof range !== 'string') {
|
|
32
|
+
errors.push(`Invalid version range for "${dep}". Must be a string.`)
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return {
|
|
39
|
+
valid: errors.length === 0,
|
|
40
|
+
errors
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
module.exports = { validate_manifest }
|