happyskills 0.16.0 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -7,6 +7,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/).
7
7
 
8
8
  ## [Unreleased]
9
9
 
10
+ ## [0.17.0] - 2026-03-29
11
+
12
+ ### Added
13
+ - Add `status` command (`st` alias) for divergence detection — shows whether installed skills have local modifications, remote updates, or both
14
+ - Add `--force` flag to `publish` to bypass divergence check when deliberately overwriting remote changes
15
+ - Add `--force` flag to `update` to overwrite skills with local modifications
16
+ - Add `base_commit` and `base_integrity` fields to lock file entries — tracks the install-time commit SHA and integrity hash for divergence detection
17
+ - Add `cli/src/merge/detector.js` module with `detect_status()` for local modification detection via integrity comparison
18
+ - Add `cli/src/utils/git_hash.js` with Git-format blob hashing (`sha256("blob <size>\0" + content)`)
19
+
20
+ ### Changed
21
+ - Bump lock file version from 1 to 2 (new `base_commit`/`base_integrity` fields)
22
+ - Switch file hashing in `file_collector.js` from raw SHA-256 to Git blob format (`hash_blob`) for compatibility with the registry's Git object format
23
+ - Send `base_commit` and `force` fields with push requests for server-side divergence checking
24
+ - Update `publish` to read `base_commit` from lock file, handle `409 DIVERGED` responses, and update `base_commit`/`base_integrity` in lock on success
25
+ - Update `update` to refuse overwriting skills with local modifications unless `--force` is passed
26
+
10
27
  ## [0.16.0] - 2026-03-28
11
28
 
12
29
  ### Added
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "happyskills",
3
- "version": "0.16.0",
3
+ "version": "0.17.0",
4
4
  "description": "Package manager for AI agent skills",
5
5
  "license": "SEE LICENSE IN LICENSE",
6
6
  "author": "Nicolas Dao <nic@cloudlesslabs.com> (https://cloudlesslabs.com)",
package/src/api/push.js CHANGED
@@ -12,13 +12,13 @@ const estimate_payload_size = (files) => {
12
12
  return size
13
13
  }
14
14
 
15
- const smart_push = (owner, repo, { version, message, files, visibility }, on_progress) =>
15
+ const smart_push = (owner, repo, { version, message, files, visibility, base_commit, force }, on_progress) =>
16
16
  catch_errors('Smart push failed', async () => {
17
17
  const payload_size = estimate_payload_size(files)
18
18
 
19
19
  if (payload_size < DIRECT_PUSH_THRESHOLD) {
20
20
  // Small payload — use direct push
21
- const [err, data] = await repos_api.push(owner, repo, { version, message, files, visibility })
21
+ const [err, data] = await repos_api.push(owner, repo, { version, message, files, visibility, base_commit, force })
22
22
  if (err) throw e('Direct push failed', err)
23
23
  return data
24
24
  }
@@ -28,7 +28,7 @@ const smart_push = (owner, repo, { version, message, files, visibility }, on_pro
28
28
 
29
29
  // Step 1: Initiate
30
30
  const [init_err, init_data] = await initiate_upload(owner, repo, {
31
- version, message, files: file_meta, visibility
31
+ version, message, files: file_meta, visibility, base_commit, force
32
32
  })
33
33
  if (init_err) throw e('Upload initiation failed', init_err)
34
34
 
@@ -53,7 +53,7 @@ const smart_push = (owner, repo, { version, message, files, visibility }, on_pro
53
53
 
54
54
  // Step 3: Complete
55
55
  const [complete_err, complete_data] = await complete_upload(owner, repo, {
56
- upload_id, version, message, files: file_meta
56
+ upload_id, version, message, files: file_meta, base_commit, force
57
57
  })
58
58
  if (complete_err) throw e('Upload completion failed', complete_err)
59
59
 
@@ -32,6 +32,7 @@ Options:
32
32
  --bump <type> Auto-bump version before publishing (patch, minor, major)
33
33
  --workspace <slug> Target workspace (overrides lock file owner)
34
34
  --public Publish as public (default is private)
35
+ --force Bypass divergence check (may overwrite remote changes)
35
36
 
36
37
  Aliases: pub
37
38
 
@@ -153,6 +154,26 @@ const run = (args) => catch_errors('Publish failed', async () => {
153
154
  }
154
155
  }
155
156
 
157
+ // Read base_commit from lock file for divergence check
158
+ const full_name_pre = `${workspace.slug}/${manifest.name}`
159
+ const project_root = find_project_root()
160
+ const [lock_err, lock_data] = await read_lock(project_root)
161
+ let base_commit = null
162
+ if (!lock_err && lock_data) {
163
+ const all_skills = get_all_locked_skills(lock_data)
164
+ const suffix = `/${skill_name}`
165
+ const lock_key = Object.keys(all_skills).find(k => k.endsWith(suffix))
166
+ if (lock_key && all_skills[lock_key]) {
167
+ base_commit = all_skills[lock_key].base_commit || null
168
+ }
169
+ }
170
+
171
+ const force = !!args.flags.force
172
+ if (force) {
173
+ print_warn('Force publishing — this may overwrite remote changes that haven\'t been merged.')
174
+ print_hint(`Consider ${code('happyskills pull')} first to merge safely.`)
175
+ }
176
+
156
177
  spinner.update('Packaging skill...')
157
178
  const [collect_err, skill_files] = await collect_files(dir)
158
179
  if (collect_err) { spinner.fail('Failed to collect files'); throw e('File collection failed', collect_err) }
@@ -166,15 +187,26 @@ const run = (args) => catch_errors('Publish failed', async () => {
166
187
  version: manifest.version,
167
188
  message: `Release ${manifest.version}`,
168
189
  files: skill_files,
169
- visibility
190
+ visibility,
191
+ base_commit: force ? null : base_commit,
192
+ force
170
193
  }, on_progress)
171
- if (push_err) { spinner.fail('Publish failed'); throw e('Push failed', push_err) }
194
+ if (push_err) {
195
+ const last = push_err[push_err.length - 1]
196
+ if (last?.message?.includes('diverged') || last?.message?.includes('DIVERGED')) {
197
+ spinner.fail('Remote has diverged')
198
+ print_error('Remote has newer changes. Run \'happyskills pull\' to merge, then publish again.')
199
+ print_hint(`Or use ${code('happyskills publish ' + skill_name + ' --force')} to overwrite remote changes.`)
200
+ return process.exit(EXIT_CODES.ERROR)
201
+ }
202
+ spinner.fail('Publish failed')
203
+ throw e('Push failed', push_err)
204
+ }
172
205
 
173
206
  spinner.succeed(`Published ${workspace.slug}/${manifest.name}@${manifest.version}`)
174
207
 
175
- const full_name = `${workspace.slug}/${manifest.name}`
176
- const project_root = find_project_root()
177
- const [lock_err, lock_data] = await read_lock(project_root)
208
+ // Update lock file: set base_commit and base_integrity to new values
209
+ const full_name = full_name_pre
178
210
  if (!lock_err && lock_data) {
179
211
  const all_skills = get_all_locked_skills(lock_data)
180
212
  const suffix = `/${skill_name}`
@@ -185,7 +217,9 @@ const run = (args) => catch_errors('Publish failed', async () => {
185
217
  ...all_skills[lock_key],
186
218
  version: manifest.version,
187
219
  ref: push_data?.ref || `refs/tags/v${manifest.version}`,
188
- commit: push_data?.commit || null
220
+ commit: push_data?.commit || null,
221
+ base_commit: push_data?.commit || null,
222
+ base_integrity: (!hash_err && integrity) ? integrity : null
189
223
  }
190
224
  if (!hash_err && integrity) updated_entry.integrity = integrity
191
225
  const updated_skills = update_lock_skills(lock_data, { [lock_key]: updated_entry })
@@ -196,6 +230,8 @@ const run = (args) => catch_errors('Publish failed', async () => {
196
230
  ref: push_data?.ref || `refs/tags/v${manifest.version}`,
197
231
  commit: push_data?.commit || null,
198
232
  integrity: (!hash_err && integrity) ? integrity : null,
233
+ base_commit: push_data?.commit || null,
234
+ base_integrity: (!hash_err && integrity) ? integrity : null,
199
235
  requested_by: ['__root__'],
200
236
  dependencies: manifest.dependencies || {}
201
237
  }
@@ -0,0 +1,153 @@
1
+ const { error: { catch_errors, wrap_errors: e } } = require('puffy-core')
2
+ const { read_lock, get_all_locked_skills } = require('../lock/reader')
3
+ const { detect_status } = require('../merge/detector')
4
+ const repos_api = require('../api/repos')
5
+ const { print_help, print_info, print_json } = require('../ui/output')
6
+ const { exit_with_error, UsageError } = require('../utils/errors')
7
+ const { find_project_root, lock_root, skills_dir, skill_install_dir } = require('../config/paths')
8
+ const { EXIT_CODES } = require('../constants')
9
+
10
+ const HELP_TEXT = `Usage: happyskills status [owner/skill] [options]
11
+
12
+ Show divergence status for installed skills.
13
+
14
+ Arguments:
15
+ owner/skill Check specific skill (optional, defaults to all)
16
+
17
+ Options:
18
+ -g, --global Check globally installed skills
19
+ --json Output as JSON
20
+
21
+ Aliases: st
22
+
23
+ Examples:
24
+ happyskills status
25
+ happyskills st acme/deploy-aws
26
+ happyskills status --json`
27
+
28
+ const classify = (local_modified, remote_updated) => {
29
+ if (local_modified && remote_updated) return 'diverged'
30
+ if (local_modified) return 'modified'
31
+ if (remote_updated) return 'outdated'
32
+ return 'clean'
33
+ }
34
+
35
+ const run = (args) => catch_errors('Status failed', async () => {
36
+ if (args.flags._show_help) {
37
+ print_help(HELP_TEXT)
38
+ return process.exit(EXIT_CODES.SUCCESS)
39
+ }
40
+
41
+ const project_root = find_project_root()
42
+ const is_global = args.flags.global || false
43
+ const target_skill = args._[0]
44
+
45
+ const [lock_err, lock_data] = await read_lock(lock_root(is_global, project_root))
46
+ if (lock_err || !lock_data) {
47
+ if (args.flags.json) {
48
+ print_json({ data: { results: [] } })
49
+ return
50
+ }
51
+ print_info('No lock file found. Nothing to check.')
52
+ return
53
+ }
54
+
55
+ const all_skills = get_all_locked_skills(lock_data)
56
+ const entries = target_skill
57
+ ? [[target_skill, all_skills[target_skill]]]
58
+ : Object.entries(all_skills).filter(([, data]) => data?.requested_by?.includes('__root__'))
59
+
60
+ if (entries.length === 0) {
61
+ if (args.flags.json) {
62
+ print_json({ data: { results: [] } })
63
+ return
64
+ }
65
+ print_info('No root-level skills found.')
66
+ return
67
+ }
68
+
69
+ const base_dir = skills_dir(is_global, project_root)
70
+
71
+ // Detect local modifications for each skill
72
+ const results = []
73
+ for (const [name, data] of entries) {
74
+ if (!data) {
75
+ results.push({ skill: name, status: 'not_found', local_modified: false, remote_updated: false })
76
+ continue
77
+ }
78
+ const short_name = name.split('/')[1] || name
79
+ const dir = skill_install_dir(base_dir, short_name)
80
+ const [, det] = await detect_status(data, dir)
81
+ results.push({
82
+ skill: name,
83
+ base_version: data.version || null,
84
+ base_commit: data.base_commit || null,
85
+ local_modified: det?.local_modified || false,
86
+ // remote_updated is populated below after API call
87
+ remote_updated: false,
88
+ remote_version: null,
89
+ remote_commit: null,
90
+ status: 'clean'
91
+ })
92
+ }
93
+
94
+ // Check remote for updates
95
+ const skill_names = results.filter(r => r.status !== 'not_found').map(r => r.skill)
96
+ if (skill_names.length > 0) {
97
+ const [api_err, api_data] = await repos_api.check_updates(skill_names)
98
+ if (!api_err && api_data?.results) {
99
+ for (const r of results) {
100
+ const remote = api_data.results[r.skill]
101
+ if (!remote || remote.access_denied) continue
102
+ r.remote_version = remote.latest_version || null
103
+ r.remote_commit = remote.commit || null
104
+ // Remote is updated if the commit differs from our base
105
+ if (r.base_commit && r.remote_commit && r.base_commit !== r.remote_commit) {
106
+ r.remote_updated = true
107
+ }
108
+ }
109
+ }
110
+ }
111
+
112
+ // Classify each result
113
+ for (const r of results) {
114
+ if (r.status !== 'not_found') {
115
+ r.status = classify(r.local_modified, r.remote_updated)
116
+ }
117
+ }
118
+
119
+ if (args.flags.json) {
120
+ print_json({ data: { results } })
121
+ return
122
+ }
123
+
124
+ // Human-readable table
125
+ const col_skill = 'Skill'
126
+ const col_base = 'Base'
127
+ const col_remote = 'Remote'
128
+ const col_status = 'Status'
129
+
130
+ const rows = results.map(r => ({
131
+ skill: r.skill,
132
+ base: r.base_version || '?',
133
+ remote: r.remote_version || '?',
134
+ status: r.status === 'diverged' ? 'diverged (local + remote changes)'
135
+ : r.status === 'modified' ? 'modified (local changes)'
136
+ : r.status === 'outdated' ? 'outdated (remote changes)'
137
+ : r.status === 'not_found' ? 'not found'
138
+ : 'clean'
139
+ }))
140
+
141
+ const w_skill = Math.max(col_skill.length, ...rows.map(r => r.skill.length))
142
+ const w_base = Math.max(col_base.length, ...rows.map(r => r.base.length))
143
+ const w_remote = Math.max(col_remote.length, ...rows.map(r => r.remote.length))
144
+
145
+ const pad = (s, w) => s + ' '.repeat(Math.max(0, w - s.length))
146
+
147
+ console.log(`${pad(col_skill, w_skill)} ${pad(col_base, w_base)} ${pad(col_remote, w_remote)} ${col_status}`)
148
+ for (const r of rows) {
149
+ console.log(`${pad(r.skill, w_skill)} ${pad(r.base, w_base)} ${pad(r.remote, w_remote)} ${r.status}`)
150
+ }
151
+ }).then(([errors]) => { if (errors) { exit_with_error(errors); return } })
152
+
153
+ module.exports = { run }
@@ -1,9 +1,11 @@
1
+ const path = require('path')
1
2
  const { error: { catch_errors, wrap_errors: e } } = require('puffy-core')
2
3
  const { install } = require('../engine/installer')
3
4
  const { read_lock, get_all_locked_skills } = require('../lock/reader')
4
- const { print_help, print_success, print_info, print_json } = require('../ui/output')
5
+ const { detect_status } = require('../merge/detector')
6
+ const { print_help, print_success, print_info, print_warn, print_json } = require('../ui/output')
5
7
  const { exit_with_error, UsageError } = require('../utils/errors')
6
- const { find_project_root, lock_root } = require('../config/paths')
8
+ const { find_project_root, lock_root, skills_dir, skill_install_dir } = require('../config/paths')
7
9
  const { EXIT_CODES } = require('../constants')
8
10
 
9
11
  const HELP_TEXT = `Usage: happyskills update [owner/skill|--all] [options]
@@ -67,7 +69,21 @@ const run = (args) => catch_errors('Update failed', async () => {
67
69
  const updated = []
68
70
  const already_up_to_date = []
69
71
 
72
+ const base_dir = skills_dir(is_global, project_root)
73
+ const force = args.flags.force || false
74
+
70
75
  for (const [name, data] of to_update) {
76
+ // Check for local modifications before overwriting
77
+ if (!force && data) {
78
+ const short_name = name.split('/')[1] || name
79
+ const dir = skill_install_dir(base_dir, short_name)
80
+ const [, det] = await detect_status(data, dir)
81
+ if (det?.local_modified) {
82
+ print_warn(`${name} has local modifications. Use --force to discard, or 'happyskills pull' to merge.`)
83
+ continue
84
+ }
85
+ }
86
+
71
87
  const before_version = data?.version || null
72
88
  const [errors, result] = await install(name, options)
73
89
  if (errors) throw e(`Update ${name} failed`, errors)
package/src/constants.js CHANGED
@@ -16,7 +16,7 @@ const SKILL_TYPES = { SKILL: 'skill', KIT: 'kit' }
16
16
  const KIT_PREFIX = '_kit-'
17
17
  const VALID_SKILL_TYPES = ['skill', 'kit']
18
18
 
19
- const LOCK_VERSION = 1
19
+ const LOCK_VERSION = 2
20
20
 
21
21
  const SKILL_JSON = 'skill.json'
22
22
  const SKILL_MD = 'SKILL.md'
@@ -31,6 +31,7 @@ const COMMAND_ALIASES = {
31
31
  ls: 'list',
32
32
  s: 'search',
33
33
  r: 'refresh',
34
+ st: 'status',
34
35
  up: 'update',
35
36
  pub: 'publish',
36
37
  v: 'validate',
@@ -48,6 +49,7 @@ const COMMANDS = [
48
49
  'search',
49
50
  'check',
50
51
  'refresh',
52
+ 'status',
51
53
  'update',
52
54
  'bump',
53
55
  'publish',
@@ -168,6 +168,8 @@ const install = (skill, options = {}) => catch_errors('Install failed', async ()
168
168
  ref: pkg.ref,
169
169
  commit: pkg.commit || null,
170
170
  integrity: integrity || null,
171
+ base_commit: pkg.commit || null,
172
+ base_integrity: integrity || null,
171
173
  requested_by: pkg.skill === skill ? ['__root__'] : [skill],
172
174
  dependencies: pkg.dependencies || {},
173
175
  ...(pkg_type ? { type: pkg_type } : {}),
@@ -0,0 +1,31 @@
1
+ const { error: { catch_errors } } = require('puffy-core')
2
+ const { hash_directory } = require('../lock/integrity')
3
+
4
+ /**
5
+ * Detects whether a skill has been locally modified since install/pull.
6
+ *
7
+ * Compares the current directory hash against the base_integrity recorded
8
+ * in the lock file. If they differ, the user has local modifications.
9
+ *
10
+ * @param {object} lock_entry - Lock file entry for the skill
11
+ * @param {string} skill_dir - Absolute path to the skill directory
12
+ * @returns {[errors, { local_modified, current_integrity, base_integrity, base_commit }]}
13
+ */
14
+ const detect_status = (lock_entry, skill_dir) => catch_errors('Failed to detect status', async () => {
15
+ const base_commit = lock_entry?.base_commit || null
16
+ const base_integrity = lock_entry?.base_integrity || null
17
+
18
+ if (!base_integrity) return { local_modified: false, current_integrity: null, base_integrity: null, base_commit }
19
+
20
+ const [hash_err, current_integrity] = await hash_directory(skill_dir)
21
+ if (hash_err) return { local_modified: false, current_integrity: null, base_integrity, base_commit }
22
+
23
+ return {
24
+ local_modified: current_integrity !== base_integrity,
25
+ current_integrity,
26
+ base_integrity,
27
+ base_commit
28
+ }
29
+ })
30
+
31
+ module.exports = { detect_status }
@@ -0,0 +1,77 @@
1
+ const { describe, it, afterEach } = require('node:test')
2
+ const assert = require('node:assert/strict')
3
+ const fs = require('fs')
4
+ const path = require('path')
5
+ const os = require('os')
6
+ const { detect_status } = require('./detector')
7
+ const { hash_directory } = require('../lock/integrity')
8
+
9
+ const make_tmp = () => {
10
+ const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'detector-test-'))
11
+ return dir
12
+ }
13
+
14
+ const rm = (dir) => { try { fs.rmSync(dir, { recursive: true }) } catch (_) {} }
15
+
16
+ describe('detect_status', () => {
17
+ let tmp_dir
18
+
19
+ afterEach(() => { if (tmp_dir) rm(tmp_dir) })
20
+
21
+ it('returns local_modified: false when integrity matches', async () => {
22
+ tmp_dir = make_tmp()
23
+ fs.writeFileSync(path.join(tmp_dir, 'SKILL.md'), 'hello')
24
+ fs.writeFileSync(path.join(tmp_dir, 'skill.json'), '{}')
25
+
26
+ const [, integrity] = await hash_directory(tmp_dir)
27
+ const lock_entry = { base_integrity: integrity, base_commit: 'abc123' }
28
+
29
+ const [err, result] = await detect_status(lock_entry, tmp_dir)
30
+ assert.equal(err, null)
31
+ assert.equal(result.local_modified, false)
32
+ assert.equal(result.current_integrity, integrity)
33
+ assert.equal(result.base_commit, 'abc123')
34
+ })
35
+
36
+ it('returns local_modified: true when integrity differs', async () => {
37
+ tmp_dir = make_tmp()
38
+ fs.writeFileSync(path.join(tmp_dir, 'SKILL.md'), 'hello')
39
+
40
+ const [, integrity] = await hash_directory(tmp_dir)
41
+
42
+ // Modify the file
43
+ fs.writeFileSync(path.join(tmp_dir, 'SKILL.md'), 'modified')
44
+
45
+ const lock_entry = { base_integrity: integrity, base_commit: 'abc123' }
46
+ const [err, result] = await detect_status(lock_entry, tmp_dir)
47
+ assert.equal(err, null)
48
+ assert.equal(result.local_modified, true)
49
+ assert.notEqual(result.current_integrity, integrity)
50
+ })
51
+
52
+ it('returns local_modified: false when base_integrity is missing', async () => {
53
+ tmp_dir = make_tmp()
54
+ fs.writeFileSync(path.join(tmp_dir, 'SKILL.md'), 'hello')
55
+
56
+ const lock_entry = { base_commit: 'abc123' }
57
+ const [err, result] = await detect_status(lock_entry, tmp_dir)
58
+ assert.equal(err, null)
59
+ assert.equal(result.local_modified, false)
60
+ assert.equal(result.base_integrity, null)
61
+ })
62
+
63
+ it('handles null lock_entry gracefully', async () => {
64
+ tmp_dir = make_tmp()
65
+ const [err, result] = await detect_status(null, tmp_dir)
66
+ assert.equal(err, null)
67
+ assert.equal(result.local_modified, false)
68
+ assert.equal(result.base_commit, null)
69
+ })
70
+
71
+ it('handles nonexistent skill dir without crashing', async () => {
72
+ const lock_entry = { base_integrity: 'sha256-abc', base_commit: 'abc123' }
73
+ const [err, result] = await detect_status(lock_entry, '/tmp/nonexistent-dir-12345')
74
+ assert.equal(err, null)
75
+ assert.equal(result.local_modified, false)
76
+ })
77
+ })
@@ -1,7 +1,7 @@
1
- const crypto = require('crypto')
2
1
  const fs = require('fs')
3
2
  const path = require('path')
4
3
  const { error: { catch_errors, wrap_errors: e } } = require('puffy-core')
4
+ const { hash_blob } = require('./git_hash')
5
5
 
6
6
  const EXCLUDE_PATTERNS = [
7
7
  'node_modules',
@@ -38,7 +38,7 @@ const collect_files = (dir, base_dir) => catch_errors('Failed to collect files',
38
38
  files.push(...sub_files)
39
39
  } else if (entry.isFile()) {
40
40
  const content = await fs.promises.readFile(full_path)
41
- const sha = crypto.createHash('sha256').update(content).digest('hex')
41
+ const sha = hash_blob(content)
42
42
  files.push({ path: rel_path, content: content.toString('base64'), size: content.length, sha })
43
43
  }
44
44
  }
@@ -0,0 +1,9 @@
1
+ const crypto = require('crypto')
2
+
3
+ const hash_blob = (buf) => {
4
+ if (!Buffer.isBuffer(buf)) buf = Buffer.from(buf)
5
+ const header = Buffer.from(`blob ${buf.length}\0`)
6
+ return crypto.createHash('sha256').update(header).update(buf).digest('hex')
7
+ }
8
+
9
+ module.exports = { hash_blob }
@@ -0,0 +1,49 @@
1
+ const { describe, it } = require('node:test')
2
+ const assert = require('node:assert/strict')
3
+ const crypto = require('crypto')
4
+ const { hash_blob } = require('./git_hash')
5
+
6
+ describe('hash_blob', () => {
7
+ it('produces sha256 with git blob header', () => {
8
+ const content = Buffer.from('hello world')
9
+ const expected = crypto.createHash('sha256')
10
+ .update(`blob ${content.length}\0`)
11
+ .update(content)
12
+ .digest('hex')
13
+ assert.equal(hash_blob(content), expected)
14
+ })
15
+
16
+ it('accepts string input', () => {
17
+ const from_string = hash_blob('hello')
18
+ const from_buffer = hash_blob(Buffer.from('hello'))
19
+ assert.equal(from_string, from_buffer)
20
+ })
21
+
22
+ it('handles empty content', () => {
23
+ const sha = hash_blob(Buffer.alloc(0))
24
+ assert.equal(typeof sha, 'string')
25
+ assert.equal(sha.length, 64)
26
+ })
27
+
28
+ it('handles binary content', () => {
29
+ const buf = Buffer.from([0x00, 0xff, 0x80, 0x01])
30
+ const sha = hash_blob(buf)
31
+ assert.equal(typeof sha, 'string')
32
+ assert.equal(sha.length, 64)
33
+ })
34
+
35
+ it('handles unicode content', () => {
36
+ const sha = hash_blob(Buffer.from('日本語テスト'))
37
+ assert.equal(typeof sha, 'string')
38
+ assert.equal(sha.length, 64)
39
+ })
40
+
41
+ it('matches api/app/utils/git_objects hash_blob', () => {
42
+ // CLI and API must produce identical SHAs for the same input
43
+ const api_hash_blob = require('../../../api/app/utils/git_objects').hash_blob
44
+ const inputs = ['hello', '', 'binary\x00data', '日本語']
45
+ for (const input of inputs) {
46
+ assert.equal(hash_blob(input), api_hash_blob(input))
47
+ }
48
+ })
49
+ })