packattest 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +248 -0
- package/cli/commands/publish.js +92 -0
- package/cli/commands/review.js +190 -0
- package/cli/commands/verify.js +92 -0
- package/cli/index.js +30 -0
- package/cli/lib/artifact.js +95 -0
- package/cli/lib/attestation.js +54 -0
- package/cli/lib/diff.js +29 -0
- package/cli/lib/policy.js +38 -0
- package/cli/lib/registry.js +62 -0
- package/cli/lib/repack.js +73 -0
- package/cli/lib/ui.js +35 -0
- package/package.json +44 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 PackAttest contributors
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
# PackAttest
|
|
2
|
+
|
|
3
|
+
> No file is published unless it is seen, selected, and confirmed.
|
|
4
|
+
|
|
5
|
+
Review the exact npm package artifact before publish.
|
|
6
|
+
PackAttest shows the final tarball, highlights what changed since the previous release, and publishes only the files you explicitly approve.
|
|
7
|
+
|
|
8
|
+
## Quick Start
|
|
9
|
+
|
|
10
|
+
```bash
|
|
11
|
+
git clone https://github.com/Divohna/PackAttest.git
|
|
12
|
+
cd <repo-dir>
|
|
13
|
+
npm install
|
|
14
|
+
npm install -g .
|
|
15
|
+
cd /path/to/your-package
|
|
16
|
+
pa review
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
Typical flow:
|
|
20
|
+
|
|
21
|
+
- run `pa review` before releasing
|
|
22
|
+
- inspect the exact packaged files and warnings
|
|
23
|
+
- select only the files you want shipped
|
|
24
|
+
- commit the resulting `.packattest` file for CI
|
|
25
|
+
- run `pa publish` locally or `pa verify` in CI
|
|
26
|
+
|
|
27
|
+
## Demo
|
|
28
|
+
|
|
29
|
+
Example review session:
|
|
30
|
+
|
|
31
|
+
```text
|
|
32
|
+
PackAttest — review
|
|
33
|
+
|
|
34
|
+
Packing artifact... your-package-1.4.0.tgz
|
|
35
|
+
Fetching previous published version... v1.3.9
|
|
36
|
+
|
|
37
|
+
Diff vs previous: 2 added, 1 modified, 8 unchanged
|
|
38
|
+
|
|
39
|
+
? Select files to publish (Space to toggle, A to select all, Enter to confirm)
|
|
40
|
+
❯ ◯ + package/dist/index.js (18.4KB)
|
|
41
|
+
◯ + package/dist/index.js.map (41.2KB) [source map]
|
|
42
|
+
◯ + package/.env.production (0.6KB) [env file]
|
|
43
|
+
◯ ~ package/package.json (0.8KB)
|
|
44
|
+
◯ package/README.md (5.6KB)
|
|
45
|
+
|
|
46
|
+
? Type "publish 3 files" to confirm › publish 3 files
|
|
47
|
+
|
|
48
|
+
Attestation written: .packattest
|
|
49
|
+
? Publish now? › Yes
|
|
50
|
+
|
|
51
|
+
Repacking constrained artifact... ok
|
|
52
|
+
Publishing...
|
|
53
|
+
Done.
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
The point is not to guess what is safe. The point is to force review of the actual package payload before it leaves your machine.
|
|
57
|
+
|
|
58
|
+
## Problem
|
|
59
|
+
|
|
60
|
+
Modern package publishing workflows rely on ignore files and implicit inclusion rules.
|
|
61
|
+
That is fragile for three reasons:
|
|
62
|
+
|
|
63
|
+
1. ignore rules are easy to forget or misconfigure
|
|
64
|
+
2. they are defined before the final build artifact exists
|
|
65
|
+
3. they do not force review of what will actually be published
|
|
66
|
+
|
|
67
|
+
As a result, accidental leaks often happen at the artifact stage rather than the source stage.
|
|
68
|
+
|
|
69
|
+
## Solution
|
|
70
|
+
|
|
71
|
+
PackAttest is a publish-time verification layer for package release workflows.
|
|
72
|
+
|
|
73
|
+
Before a package is published, PackAttest:
|
|
74
|
+
|
|
75
|
+
- enumerates the exact files in the final artifact
|
|
76
|
+
- shows the full file list to the user
|
|
77
|
+
- requires explicit user selection of files to publish
|
|
78
|
+
- compares the current artifact against the previous published version
|
|
79
|
+
- blocks publish if changes have not been reviewed
|
|
80
|
+
|
|
81
|
+
## Core Principles
|
|
82
|
+
|
|
83
|
+
1. **Artifact is truth**
|
|
84
|
+
Decisions are based on the final package contents, not source-tree assumptions.
|
|
85
|
+
|
|
86
|
+
2. **No implicit inclusion**
|
|
87
|
+
Nothing is published merely because it exists in a folder or matched an old rule.
|
|
88
|
+
|
|
89
|
+
3. **Explicit human intent**
|
|
90
|
+
The user must actively choose what to publish.
|
|
91
|
+
|
|
92
|
+
4. **Diff-based review**
|
|
93
|
+
Review effort should focus on what changed since the previous published version.
|
|
94
|
+
|
|
95
|
+
5. **Federated trust**
|
|
96
|
+
No single baseline file is trusted on its own. Decisions combine:
|
|
97
|
+
- previous published artifact
|
|
98
|
+
- current artifact
|
|
99
|
+
- explicit user selection
|
|
100
|
+
- policy checks
|
|
101
|
+
|
|
102
|
+
## Installation
|
|
103
|
+
|
|
104
|
+
Install from source:
|
|
105
|
+
|
|
106
|
+
```bash
|
|
107
|
+
git clone https://github.com/Divohna/PackAttest.git
|
|
108
|
+
cd <repo-dir>
|
|
109
|
+
npm install
|
|
110
|
+
npm install -g .
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
## Commands
|
|
114
|
+
|
|
115
|
+
| Command | Description |
|
|
116
|
+
|---|---|
|
|
117
|
+
| `pa review` | Enumerate artifact, diff against previous release, interactively select files, write attestation |
|
|
118
|
+
| `pa publish` | Verify the current artifact against an existing `.packattest`, repack the selected files, and publish |
|
|
119
|
+
| `pa verify` | CI mode: run the same verification, repack, and publish flow with step-by-step log output |
|
|
120
|
+
|
|
121
|
+
## How It Works
|
|
122
|
+
|
|
123
|
+
Run `pa review` from your package directory to inspect the exact artifact that `npm pack` would publish.
|
|
124
|
+
|
|
125
|
+
The `.packattest` file records the exact artifact hash, selected files, reviewer identity, and source commit. Commit it to your repository for CI use.
|
|
126
|
+
|
|
127
|
+
### CI mode
|
|
128
|
+
|
|
129
|
+
Add to your release workflow:
|
|
130
|
+
|
|
131
|
+
```bash
|
|
132
|
+
pa verify
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
`pa verify` re-packs the artifact, checks its hash against the `.packattest` attestation, repacks the constrained artifact, and publishes. It fails hard if the artifact has changed since the last `pa review`.
|
|
136
|
+
|
|
137
|
+
`pa publish` performs the same guarded publish flow locally, but with simpler interactive-oriented output.
|
|
138
|
+
|
|
139
|
+
## Why This Model Is Different
|
|
140
|
+
|
|
141
|
+
Traditional systems trust configuration.
|
|
142
|
+
PackAttest trusts reality and requires the user to confirm intent.
|
|
143
|
+
|
|
144
|
+
That means:
|
|
145
|
+
|
|
146
|
+
- the system may enumerate files automatically
|
|
147
|
+
- the system must not silently choose files on the user's behalf
|
|
148
|
+
- the system must not pre-select files by default
|
|
149
|
+
- the system must block unreviewed additions
|
|
150
|
+
|
|
151
|
+
## Security Properties
|
|
152
|
+
|
|
153
|
+
PackAttest is designed to prevent:
|
|
154
|
+
|
|
155
|
+
- accidental inclusion of source maps
|
|
156
|
+
- accidental inclusion of archives or debug files
|
|
157
|
+
- drift in build outputs
|
|
158
|
+
- silent scope expansion between releases
|
|
159
|
+
|
|
160
|
+
PackAttest does not claim to prevent:
|
|
161
|
+
|
|
162
|
+
- deliberate malicious publishing
|
|
163
|
+
- secrets embedded inside an explicitly approved file
|
|
164
|
+
- compromise of the developer workstation
|
|
165
|
+
|
|
166
|
+
## MVP Scope
|
|
167
|
+
|
|
168
|
+
Version 0.1.0 is a proof of concept focused on:
|
|
169
|
+
|
|
170
|
+
- `npm pack` integration
|
|
171
|
+
- file enumeration
|
|
172
|
+
- diff against previous published package
|
|
173
|
+
- interactive file selection
|
|
174
|
+
- publish confirmation
|
|
175
|
+
- blocking unselected files
|
|
176
|
+
|
|
177
|
+
## Repository Layout
|
|
178
|
+
|
|
179
|
+
```text
|
|
180
|
+
packattest/
|
|
181
|
+
├── README.md
|
|
182
|
+
├── LICENSE
|
|
183
|
+
├── package.json
|
|
184
|
+
├── .packattest ← written by pa review, commit for CI use
|
|
185
|
+
├── docs/
|
|
186
|
+
│ ├── RFC.md
|
|
187
|
+
│ └── design.md
|
|
188
|
+
├── test/
|
|
189
|
+
│ ├── helpers.js
|
|
190
|
+
│ ├── artifact.test.js
|
|
191
|
+
│ ├── attestation.test.js
|
|
192
|
+
│ ├── diff.test.js
|
|
193
|
+
│ ├── policy.test.js
|
|
194
|
+
│ ├── repack.test.js
|
|
195
|
+
│ └── ui.test.js
|
|
196
|
+
└── cli/
|
|
197
|
+
├── index.js
|
|
198
|
+
├── commands/
|
|
199
|
+
│ ├── review.js
|
|
200
|
+
│ ├── publish.js
|
|
201
|
+
│ └── verify.js
|
|
202
|
+
└── lib/
|
|
203
|
+
├── artifact.js
|
|
204
|
+
├── attestation.js
|
|
205
|
+
├── diff.js
|
|
206
|
+
├── policy.js
|
|
207
|
+
├── registry.js
|
|
208
|
+
├── repack.js
|
|
209
|
+
└── ui.js
|
|
210
|
+
```
|
|
211
|
+
|
|
212
|
+
## Status
|
|
213
|
+
|
|
214
|
+
The CLI is fully implemented. All three commands (`pa review`, `pa publish`, `pa verify`) are functional.
|
|
215
|
+
|
|
216
|
+
Current version: **v0.1.0**
|
|
217
|
+
|
|
218
|
+
## Contributing
|
|
219
|
+
|
|
220
|
+
Feedback, critiques, design objections, and implementation help are welcome.
|
|
221
|
+
|
|
222
|
+
### Development setup
|
|
223
|
+
|
|
224
|
+
```bash
|
|
225
|
+
git clone https://github.com/Divohna/PackAttest.git
|
|
226
|
+
cd <repo-dir>
|
|
227
|
+
npm install
|
|
228
|
+
```
|
|
229
|
+
|
|
230
|
+
Run tests:
|
|
231
|
+
|
|
232
|
+
```bash
|
|
233
|
+
npm test
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
Run the CLI locally without installing globally:
|
|
237
|
+
|
|
238
|
+
```bash
|
|
239
|
+
node cli/index.js review
|
|
240
|
+
node cli/index.js publish
|
|
241
|
+
node cli/index.js verify
|
|
242
|
+
```
|
|
243
|
+
|
|
244
|
+
After making changes, regenerate the `.packattest` attestation by running `pa review` (or `node cli/index.js review`) from the repo root and committing the updated file.
|
|
245
|
+
|
|
246
|
+
## License
|
|
247
|
+
|
|
248
|
+
MIT
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const os = require('os');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const { execSync } = require('child_process');
|
|
7
|
+
const { read: readAttestation } = require('../lib/attestation');
|
|
8
|
+
const { pack, enumerate, canonicalHash } = require('../lib/artifact');
|
|
9
|
+
const { repack } = require('../lib/repack');
|
|
10
|
+
|
|
11
|
+
async function publish({ otp } = {}) {
|
|
12
|
+
const cwd = process.cwd();
|
|
13
|
+
|
|
14
|
+
console.log('PackAttest — publish\n');
|
|
15
|
+
|
|
16
|
+
// Load attestation
|
|
17
|
+
const attestation = readAttestation(cwd);
|
|
18
|
+
if (!attestation) {
|
|
19
|
+
console.error('No .packattest file found. Run `pa review` first.');
|
|
20
|
+
process.exit(1);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
console.log(`Attested by: ${attestation.reviewer}`);
|
|
24
|
+
console.log(`Attested at: ${attestation.reviewed_at}`);
|
|
25
|
+
console.log(`Attested hash: ${attestation.artifact_hash}`);
|
|
26
|
+
console.log(`Files selected: ${attestation.selected_files.length}\n`);
|
|
27
|
+
|
|
28
|
+
// Pack + enumerate current artifact
|
|
29
|
+
process.stdout.write('Packing current artifact... ');
|
|
30
|
+
let tarballPath;
|
|
31
|
+
try {
|
|
32
|
+
tarballPath = await pack(cwd);
|
|
33
|
+
} catch (err) {
|
|
34
|
+
console.error('\nnpm pack failed:', err.message);
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
console.log(path.relative(cwd, tarballPath));
|
|
38
|
+
|
|
39
|
+
const entries = await enumerate(tarballPath);
|
|
40
|
+
const hash = canonicalHash(entries);
|
|
41
|
+
|
|
42
|
+
// Verify hash matches attestation
|
|
43
|
+
process.stdout.write('Verifying artifact hash... ');
|
|
44
|
+
if (hash !== attestation.artifact_hash) {
|
|
45
|
+
console.error('\nFAIL: artifact has changed since review.');
|
|
46
|
+
console.error(` Attested: ${attestation.artifact_hash}`);
|
|
47
|
+
console.error(` Current: ${hash}`);
|
|
48
|
+
console.error('\nRun `pa review` again.');
|
|
49
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
50
|
+
process.exit(1);
|
|
51
|
+
}
|
|
52
|
+
console.log('ok');
|
|
53
|
+
|
|
54
|
+
// Verify all selected files exist in current artifact
|
|
55
|
+
const entryPaths = new Set(entries.map(e => e.path));
|
|
56
|
+
for (const f of attestation.selected_files) {
|
|
57
|
+
if (!entryPaths.has(f)) {
|
|
58
|
+
console.error(`\nFAIL: attested file not found in artifact: ${f}`);
|
|
59
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
60
|
+
process.exit(1);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Repack
|
|
65
|
+
const tmpPath = path.join(os.tmpdir(), `packattest-${Date.now()}.tgz`);
|
|
66
|
+
process.stdout.write('Repacking constrained artifact... ');
|
|
67
|
+
try {
|
|
68
|
+
await repack(entries, attestation.selected_files, tmpPath);
|
|
69
|
+
} catch (err) {
|
|
70
|
+
console.error('\nRepack failed:', err.message);
|
|
71
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
72
|
+
process.exit(1);
|
|
73
|
+
}
|
|
74
|
+
console.log('ok');
|
|
75
|
+
|
|
76
|
+
// Publish
|
|
77
|
+
console.log('Publishing...\n');
|
|
78
|
+
try {
|
|
79
|
+
const otpFlag = otp ? ` --otp ${otp}` : '';
|
|
80
|
+
execSync(`npm publish "${tmpPath}"${otpFlag}`, { stdio: 'inherit', cwd });
|
|
81
|
+
} catch {
|
|
82
|
+
console.error('\nPublish failed.');
|
|
83
|
+
process.exit(1);
|
|
84
|
+
} finally {
|
|
85
|
+
try { fs.unlinkSync(tmpPath); } catch {}
|
|
86
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
console.log('\nDone.');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
module.exports = publish;
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const prompts = require('prompts');
|
|
6
|
+
const { pack, enumerate, canonicalHash } = require('../lib/artifact');
|
|
7
|
+
const { computeDiff } = require('../lib/diff');
|
|
8
|
+
const { checkPolicy } = require('../lib/policy');
|
|
9
|
+
const { write: writeAttestation } = require('../lib/attestation');
|
|
10
|
+
const { repack } = require('../lib/repack');
|
|
11
|
+
const { fetchPreviousArtifact } = require('../lib/registry');
|
|
12
|
+
const { choiceTitle, printDiffSummary } = require('../lib/ui');
|
|
13
|
+
|
|
14
|
+
async function review({ otp } = {}) {
|
|
15
|
+
const cwd = process.cwd();
|
|
16
|
+
const pkgJson = require(path.join(cwd, 'package.json'));
|
|
17
|
+
|
|
18
|
+
// Collect temp files for cleanup on any exit path
|
|
19
|
+
const tmpFiles = [];
|
|
20
|
+
process.on('exit', () => {
|
|
21
|
+
for (const p of tmpFiles) try { fs.unlinkSync(p); } catch {}
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
console.log('PackAttest — review\n');
|
|
25
|
+
|
|
26
|
+
// 1. Pack
|
|
27
|
+
process.stdout.write('Packing artifact... ');
|
|
28
|
+
let tarballPath;
|
|
29
|
+
try {
|
|
30
|
+
tarballPath = await pack(cwd);
|
|
31
|
+
} catch (err) {
|
|
32
|
+
console.error('\nnpm pack failed:', err.message);
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
tmpFiles.push(tarballPath);
|
|
36
|
+
console.log(path.relative(cwd, tarballPath));
|
|
37
|
+
|
|
38
|
+
// 2. Enumerate + hash
|
|
39
|
+
const entries = await enumerate(tarballPath);
|
|
40
|
+
const hash = canonicalHash(entries);
|
|
41
|
+
|
|
42
|
+
// 3. Fetch previous artifact + diff
|
|
43
|
+
process.stdout.write('Fetching previous published version... ');
|
|
44
|
+
let diffEntries;
|
|
45
|
+
let prev;
|
|
46
|
+
try {
|
|
47
|
+
prev = await fetchPreviousArtifact(pkgJson.name);
|
|
48
|
+
} catch (err) {
|
|
49
|
+
console.error(`\nRegistry lookup failed: ${err.message}`);
|
|
50
|
+
console.error('Cannot verify diff against previous version. Aborting to avoid skipping review.');
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (prev) {
|
|
55
|
+
tmpFiles.push(prev.path);
|
|
56
|
+
console.log(`v${prev.version}`);
|
|
57
|
+
const prevEntries = await enumerate(prev.path);
|
|
58
|
+
diffEntries = computeDiff(entries, prevEntries);
|
|
59
|
+
printDiffSummary(diffEntries);
|
|
60
|
+
} else {
|
|
61
|
+
console.log('not found (first-publish mode)');
|
|
62
|
+
diffEntries = entries.map(e => ({ ...e, status: 'added' }));
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// 4. Policy checks
|
|
66
|
+
const policyMap = new Map();
|
|
67
|
+
for (const entry of entries) {
|
|
68
|
+
const warnings = checkPolicy(entry);
|
|
69
|
+
if (warnings.length) policyMap.set(entry.path, warnings);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// 5. Build multiselect choices (current-artifact files only, sorted)
|
|
73
|
+
const current = diffEntries.filter(e => e.status !== 'removed');
|
|
74
|
+
if (current.length === 0) {
|
|
75
|
+
console.log('\nNo files in artifact.');
|
|
76
|
+
process.exit(0);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const choices = current.map(entry => ({
|
|
80
|
+
title: choiceTitle(entry, policyMap.get(entry.path) || []),
|
|
81
|
+
value: entry.path,
|
|
82
|
+
}));
|
|
83
|
+
|
|
84
|
+
// 6. Interactive selection
|
|
85
|
+
console.log('');
|
|
86
|
+
const { selectedPaths } = await prompts({
|
|
87
|
+
type: 'multiselect',
|
|
88
|
+
name: 'selectedPaths',
|
|
89
|
+
message: 'Select files to publish',
|
|
90
|
+
choices,
|
|
91
|
+
hint: '- Space to toggle, A to select all, Enter to confirm',
|
|
92
|
+
min: 1,
|
|
93
|
+
onState(state) {
|
|
94
|
+
if (state.aborted) {
|
|
95
|
+
console.log('\nAborted.');
|
|
96
|
+
process.exit(1);
|
|
97
|
+
}
|
|
98
|
+
},
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
if (!selectedPaths || selectedPaths.length === 0) {
|
|
102
|
+
console.log('No files selected. Aborting.');
|
|
103
|
+
process.exit(1);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// 7. Confirmation phrase
|
|
107
|
+
const n = selectedPaths.length;
|
|
108
|
+
const expected = `publish ${n} file${n === 1 ? '' : 's'}`;
|
|
109
|
+
|
|
110
|
+
console.log(`\nReady to publish ${n} file${n === 1 ? '' : 's'}:\n`);
|
|
111
|
+
selectedPaths.forEach(p => console.log(` ${p}`));
|
|
112
|
+
console.log('');
|
|
113
|
+
|
|
114
|
+
const { confirmation } = await prompts({
|
|
115
|
+
type: 'text',
|
|
116
|
+
name: 'confirmation',
|
|
117
|
+
message: `Type "${expected}" to confirm`,
|
|
118
|
+
onState(state) {
|
|
119
|
+
if (state.aborted) {
|
|
120
|
+
console.log('\nAborted.');
|
|
121
|
+
process.exit(1);
|
|
122
|
+
}
|
|
123
|
+
},
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
if (confirmation !== expected) {
|
|
127
|
+
console.log('Confirmation did not match. Aborting.');
|
|
128
|
+
process.exit(1);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// 8. Write attestation
|
|
132
|
+
const { path: attPath } = writeAttestation(cwd, {
|
|
133
|
+
packageName: pkgJson.name,
|
|
134
|
+
packageVersion: pkgJson.version,
|
|
135
|
+
artifactHash: hash,
|
|
136
|
+
selectedFiles: selectedPaths,
|
|
137
|
+
});
|
|
138
|
+
console.log(`\nAttestation written: ${path.relative(cwd, attPath)}`);
|
|
139
|
+
|
|
140
|
+
// 9. Offer to publish now
|
|
141
|
+
const { publishNow } = await prompts({
|
|
142
|
+
type: 'confirm',
|
|
143
|
+
name: 'publishNow',
|
|
144
|
+
message: 'Publish now?',
|
|
145
|
+
initial: true,
|
|
146
|
+
onState(state) {
|
|
147
|
+
if (state.aborted) {
|
|
148
|
+
console.log('');
|
|
149
|
+
process.exit(0);
|
|
150
|
+
}
|
|
151
|
+
},
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
if (!publishNow) {
|
|
155
|
+
console.log('\nRun `pa publish` when ready.');
|
|
156
|
+
return;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// 10. Repack + publish
|
|
160
|
+
await runPublish({ entries, selectedPaths, cwd, tmpFiles, otp });
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
async function runPublish({ entries, selectedPaths, cwd, tmpFiles, otp }) {
|
|
164
|
+
const os = require('os');
|
|
165
|
+
const { execSync } = require('child_process');
|
|
166
|
+
const tmpPath = path.join(os.tmpdir(), `packattest-${Date.now()}.tgz`);
|
|
167
|
+
tmpFiles.push(tmpPath);
|
|
168
|
+
|
|
169
|
+
process.stdout.write('\nRepacking constrained artifact... ');
|
|
170
|
+
try {
|
|
171
|
+
await repack(entries, selectedPaths, tmpPath);
|
|
172
|
+
} catch (err) {
|
|
173
|
+
console.error('\nRepack failed:', err.message);
|
|
174
|
+
process.exit(1);
|
|
175
|
+
}
|
|
176
|
+
console.log('ok');
|
|
177
|
+
|
|
178
|
+
console.log('Publishing...\n');
|
|
179
|
+
try {
|
|
180
|
+
const otpFlag = otp ? ` --otp ${otp}` : '';
|
|
181
|
+
execSync(`npm publish "${tmpPath}"${otpFlag}`, { stdio: 'inherit', cwd });
|
|
182
|
+
} catch {
|
|
183
|
+
console.error('\nPublish failed.');
|
|
184
|
+
process.exit(1);
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
console.log('\nDone.');
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
module.exports = review;
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const os = require('os');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const { execSync } = require('child_process');
|
|
7
|
+
const { read: readAttestation } = require('../lib/attestation');
|
|
8
|
+
const { pack, enumerate, canonicalHash } = require('../lib/artifact');
|
|
9
|
+
const { repack } = require('../lib/repack');
|
|
10
|
+
|
|
11
|
+
async function verify({ otp } = {}) {
|
|
12
|
+
const cwd = process.cwd();
|
|
13
|
+
|
|
14
|
+
console.log('PackAttest — verify (CI mode)\n');
|
|
15
|
+
|
|
16
|
+
const attestation = readAttestation(cwd);
|
|
17
|
+
if (!attestation) {
|
|
18
|
+
console.error('FAIL: no .packattest attestation found.');
|
|
19
|
+
process.exit(1);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
console.log(`Package: ${attestation.package_name}@${attestation.package_version}`);
|
|
23
|
+
console.log(`Attested by: ${attestation.reviewer}`);
|
|
24
|
+
console.log(`Attested at: ${attestation.reviewed_at}`);
|
|
25
|
+
console.log(`Source commit: ${attestation.source_commit}`);
|
|
26
|
+
console.log(`Files selected: ${attestation.selected_files.length}\n`);
|
|
27
|
+
|
|
28
|
+
// Step 1: Pack + enumerate
|
|
29
|
+
process.stdout.write('[1/5] Packing artifact... ');
|
|
30
|
+
let tarballPath;
|
|
31
|
+
try {
|
|
32
|
+
tarballPath = await pack(cwd);
|
|
33
|
+
} catch (err) {
|
|
34
|
+
console.error('\nFAIL: npm pack failed:', err.message);
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
console.log('ok');
|
|
38
|
+
|
|
39
|
+
const entries = await enumerate(tarballPath);
|
|
40
|
+
const hash = canonicalHash(entries);
|
|
41
|
+
|
|
42
|
+
// Step 2: Verify canonical hash
|
|
43
|
+
process.stdout.write('[2/5] Verifying artifact hash... ');
|
|
44
|
+
if (hash !== attestation.artifact_hash) {
|
|
45
|
+
console.error('\nFAIL: artifact has changed since review.');
|
|
46
|
+
console.error(` Attested: ${attestation.artifact_hash}`);
|
|
47
|
+
console.error(` Current: ${hash}`);
|
|
48
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
console.log('ok');
|
|
52
|
+
|
|
53
|
+
// Step 3: Verify selected files exist in artifact
|
|
54
|
+
process.stdout.write('[3/5] Verifying selected files exist... ');
|
|
55
|
+
const entryPaths = new Set(entries.map(e => e.path));
|
|
56
|
+
for (const f of attestation.selected_files) {
|
|
57
|
+
if (!entryPaths.has(f)) {
|
|
58
|
+
console.error(`\nFAIL: attested file missing from artifact: ${f}`);
|
|
59
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
60
|
+
process.exit(1);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
console.log('ok');
|
|
64
|
+
|
|
65
|
+
// Step 4: Repack constrained artifact
|
|
66
|
+
const tmpPath = path.join(os.tmpdir(), `packattest-ci-${Date.now()}.tgz`);
|
|
67
|
+
process.stdout.write('[4/5] Repacking constrained artifact... ');
|
|
68
|
+
try {
|
|
69
|
+
await repack(entries, attestation.selected_files, tmpPath);
|
|
70
|
+
} catch (err) {
|
|
71
|
+
console.error('\nFAIL: repack failed:', err.message);
|
|
72
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
73
|
+
process.exit(1);
|
|
74
|
+
}
|
|
75
|
+
console.log('ok');
|
|
76
|
+
|
|
77
|
+
// Step 5: Publish
|
|
78
|
+
process.stdout.write('[5/5] Publishing... ');
|
|
79
|
+
try {
|
|
80
|
+
const otpFlag = otp ? ` --otp ${otp}` : '';
|
|
81
|
+
execSync(`npm publish "${tmpPath}"${otpFlag}`, { stdio: 'inherit', cwd });
|
|
82
|
+
} catch {
|
|
83
|
+
console.error('\nFAIL: publish failed.');
|
|
84
|
+
process.exit(1);
|
|
85
|
+
} finally {
|
|
86
|
+
try { fs.unlinkSync(tmpPath); } catch {}
|
|
87
|
+
try { fs.unlinkSync(tarballPath); } catch {}
|
|
88
|
+
}
|
|
89
|
+
console.log('\nDone.');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
module.exports = verify;
|
package/cli/index.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
const { program } = require('commander');
|
|
5
|
+
const pkg = require('../package.json');
|
|
6
|
+
|
|
7
|
+
program
|
|
8
|
+
.name('pa')
|
|
9
|
+
.description('Artifact review and attestation for package publishes')
|
|
10
|
+
.version(pkg.version);
|
|
11
|
+
|
|
12
|
+
program
|
|
13
|
+
.command('review')
|
|
14
|
+
.description('Enumerate artifact, review diff, select files, write attestation')
|
|
15
|
+
.option('--otp <code>', 'npm one-time password for 2FA')
|
|
16
|
+
.action((opts) => require('./commands/review')(opts).catch(err => { console.error(err.message); process.exit(1); }));
|
|
17
|
+
|
|
18
|
+
program
|
|
19
|
+
.command('publish')
|
|
20
|
+
.description('Verify current artifact against attestation, repack, and publish')
|
|
21
|
+
.option('--otp <code>', 'npm one-time password for 2FA')
|
|
22
|
+
.action((opts) => require('./commands/publish')(opts).catch(err => { console.error(err.message); process.exit(1); }));
|
|
23
|
+
|
|
24
|
+
program
|
|
25
|
+
.command('verify')
|
|
26
|
+
.description('CI: same verification, repack, and publish flow with step-by-step logs')
|
|
27
|
+
.option('--otp <code>', 'npm one-time password for 2FA')
|
|
28
|
+
.action((opts) => require('./commands/verify')(opts).catch(err => { console.error(err.message); process.exit(1); }));
|
|
29
|
+
|
|
30
|
+
program.parse();
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { execSync } = require('child_process');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const crypto = require('crypto');
|
|
7
|
+
const zlib = require('zlib');
|
|
8
|
+
const tar = require('tar-stream');
|
|
9
|
+
|
|
10
|
+
async function pack(cwd = process.cwd()) {
|
|
11
|
+
const raw = execSync('npm pack --json', {
|
|
12
|
+
cwd,
|
|
13
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
14
|
+
});
|
|
15
|
+
const result = JSON.parse(raw.toString());
|
|
16
|
+
return path.resolve(cwd, result[0].filename);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function enumerate(tarballPath) {
|
|
20
|
+
return new Promise((resolve, reject) => {
|
|
21
|
+
const entries = [];
|
|
22
|
+
const extract = tar.extract();
|
|
23
|
+
|
|
24
|
+
extract.on('entry', (header, stream, next) => {
|
|
25
|
+
if (header.type === 'symlink') {
|
|
26
|
+
const linkTarget = header.linkname || '';
|
|
27
|
+
const hash = crypto.createHash('sha256').update(linkTarget).digest('hex');
|
|
28
|
+
entries.push({
|
|
29
|
+
path: header.name,
|
|
30
|
+
size: 0,
|
|
31
|
+
sha256: hash,
|
|
32
|
+
_content: Buffer.alloc(0),
|
|
33
|
+
_header: { mode: header.mode, mtime: header.mtime, uid: header.uid, gid: header.gid, uname: header.uname, gname: header.gname, type: 'symlink', linkname: linkTarget },
|
|
34
|
+
});
|
|
35
|
+
stream.resume();
|
|
36
|
+
next();
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (header.type !== 'file') {
|
|
41
|
+
stream.resume();
|
|
42
|
+
next();
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const chunks = [];
|
|
47
|
+
const hash = crypto.createHash('sha256');
|
|
48
|
+
|
|
49
|
+
stream.on('data', chunk => {
|
|
50
|
+
chunks.push(chunk);
|
|
51
|
+
hash.update(chunk);
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
stream.on('end', () => {
|
|
55
|
+
const content = Buffer.concat(chunks);
|
|
56
|
+
entries.push({
|
|
57
|
+
path: header.name,
|
|
58
|
+
size: content.length,
|
|
59
|
+
sha256: hash.digest('hex'),
|
|
60
|
+
_content: content,
|
|
61
|
+
_header: { mode: header.mode, mtime: header.mtime, uid: header.uid, gid: header.gid, uname: header.uname, gname: header.gname },
|
|
62
|
+
});
|
|
63
|
+
next();
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
stream.on('error', reject);
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
extract.on('finish', () => resolve(entries));
|
|
70
|
+
extract.on('error', reject);
|
|
71
|
+
|
|
72
|
+
fs.createReadStream(tarballPath)
|
|
73
|
+
.on('error', reject)
|
|
74
|
+
.pipe(zlib.createGunzip())
|
|
75
|
+
.on('error', reject)
|
|
76
|
+
.pipe(extract);
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function canonicalManifest(entries) {
|
|
81
|
+
return [...entries]
|
|
82
|
+
.sort((a, b) => a.path.localeCompare(b.path))
|
|
83
|
+
.map(e => ({ path: e.path, size: e.size, sha256: e.sha256 }));
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function canonicalHash(entries) {
|
|
87
|
+
const manifest = canonicalManifest(entries);
|
|
88
|
+
const digest = crypto
|
|
89
|
+
.createHash('sha256')
|
|
90
|
+
.update(JSON.stringify(manifest))
|
|
91
|
+
.digest('hex');
|
|
92
|
+
return `sha256:${digest}`;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
module.exports = { pack, enumerate, canonicalManifest, canonicalHash };
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { execSync } = require('child_process');
|
|
6
|
+
|
|
7
|
+
const ATTESTATION_FILE = '.packattest';
|
|
8
|
+
|
|
9
|
+
function gitUser() {
|
|
10
|
+
try {
|
|
11
|
+
return 'git:' + execSync('git config user.name', { stdio: ['pipe', 'pipe', 'pipe'] }).toString().trim();
|
|
12
|
+
} catch {
|
|
13
|
+
return 'git:unknown';
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function sourceCommit() {
|
|
18
|
+
try {
|
|
19
|
+
return execSync('git rev-parse HEAD', { stdio: ['pipe', 'pipe', 'pipe'] }).toString().trim();
|
|
20
|
+
} catch {
|
|
21
|
+
return 'unknown';
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function write(cwd, { packageName, packageVersion, artifactHash, selectedFiles }) {
|
|
26
|
+
const pkg = require('../../package.json');
|
|
27
|
+
const record = {
|
|
28
|
+
version: 1,
|
|
29
|
+
package_name: packageName,
|
|
30
|
+
package_version: packageVersion,
|
|
31
|
+
artifact_hash: artifactHash,
|
|
32
|
+
selected_files: selectedFiles,
|
|
33
|
+
reviewed_at: new Date().toISOString(),
|
|
34
|
+
reviewer: gitUser(),
|
|
35
|
+
source_commit: sourceCommit(),
|
|
36
|
+
tool_version: pkg.version,
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
const dest = path.join(cwd, ATTESTATION_FILE);
|
|
40
|
+
fs.writeFileSync(dest, JSON.stringify(record, null, 2) + '\n');
|
|
41
|
+
return { path: dest, record };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function read(cwd) {
|
|
45
|
+
const src = path.join(cwd, ATTESTATION_FILE);
|
|
46
|
+
if (!fs.existsSync(src)) return null;
|
|
47
|
+
try {
|
|
48
|
+
return JSON.parse(fs.readFileSync(src, 'utf8'));
|
|
49
|
+
} catch {
|
|
50
|
+
return null;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
module.exports = { write, read, ATTESTATION_FILE };
|
package/cli/lib/diff.js
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
// status: 'added' | 'modified' | 'unchanged' | 'removed'
|
|
4
|
+
function computeDiff(currentEntries, previousEntries) {
|
|
5
|
+
const prevMap = new Map(previousEntries.map(e => [e.path, e]));
|
|
6
|
+
const currMap = new Map(currentEntries.map(e => [e.path, e]));
|
|
7
|
+
const result = [];
|
|
8
|
+
|
|
9
|
+
for (const entry of currentEntries) {
|
|
10
|
+
const prev = prevMap.get(entry.path);
|
|
11
|
+
if (!prev) {
|
|
12
|
+
result.push({ ...entry, status: 'added' });
|
|
13
|
+
} else if (prev.sha256 !== entry.sha256) {
|
|
14
|
+
result.push({ ...entry, status: 'modified' });
|
|
15
|
+
} else {
|
|
16
|
+
result.push({ ...entry, status: 'unchanged' });
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
for (const entry of previousEntries) {
|
|
21
|
+
if (!currMap.has(entry.path)) {
|
|
22
|
+
result.push({ ...entry, status: 'removed' });
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
return result.sort((a, b) => a.path.localeCompare(b.path));
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
module.exports = { computeDiff };
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
const RULES = [
|
|
6
|
+
{
|
|
7
|
+
test: p => p.endsWith('.map'),
|
|
8
|
+
label: 'source map',
|
|
9
|
+
},
|
|
10
|
+
{
|
|
11
|
+
test: p => /\.(zip|tar|tar\.gz|tgz|rar|7z)$/i.test(p),
|
|
12
|
+
label: 'archive',
|
|
13
|
+
},
|
|
14
|
+
{
|
|
15
|
+
test: p => p.endsWith('.log'),
|
|
16
|
+
label: 'log file',
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
test: p => /\.env(\.|$)/i.test(path.basename(p)),
|
|
20
|
+
label: 'env file',
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
test: p => /\.(pem|key|p12|pfx|jks)$/i.test(p),
|
|
24
|
+
label: 'key/cert file',
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
test: (p, size) => size > 1024 * 1024,
|
|
28
|
+
label: 'large file (>1MB)',
|
|
29
|
+
},
|
|
30
|
+
];
|
|
31
|
+
|
|
32
|
+
function checkPolicy(entry) {
|
|
33
|
+
return RULES
|
|
34
|
+
.filter(rule => rule.test(entry.path, entry.size))
|
|
35
|
+
.map(rule => rule.label);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
module.exports = { checkPolicy };
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const https = require('https');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const os = require('os');
|
|
7
|
+
|
|
8
|
+
function httpsGet(url) {
|
|
9
|
+
return new Promise((resolve, reject) => {
|
|
10
|
+
https
|
|
11
|
+
.get(url, { headers: { 'User-Agent': 'packattest/0.1.0' } }, res => {
|
|
12
|
+
if (res.statusCode === 301 || res.statusCode === 302) {
|
|
13
|
+
return httpsGet(res.headers.location).then(resolve).catch(reject);
|
|
14
|
+
}
|
|
15
|
+
const chunks = [];
|
|
16
|
+
res.on('data', c => chunks.push(c));
|
|
17
|
+
res.on('end', () => resolve({ status: res.statusCode, body: Buffer.concat(chunks) }));
|
|
18
|
+
res.on('error', reject);
|
|
19
|
+
})
|
|
20
|
+
.on('error', reject);
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async function fetchPreviousArtifact(packageName, { _fetch = httpsGet } = {}) {
|
|
25
|
+
const encoded = packageName.startsWith('@')
|
|
26
|
+
? '@' + encodeURIComponent(packageName.slice(1))
|
|
27
|
+
: encodeURIComponent(packageName);
|
|
28
|
+
|
|
29
|
+
let manifest;
|
|
30
|
+
try {
|
|
31
|
+
const { status, body } = await _fetch(`https://registry.npmjs.org/${encoded}/latest`);
|
|
32
|
+
if (status === 404) return null; // genuinely unpublished
|
|
33
|
+
if (status !== 200) {
|
|
34
|
+
throw new Error(`Registry returned HTTP ${status} for ${packageName}`);
|
|
35
|
+
}
|
|
36
|
+
manifest = JSON.parse(body.toString());
|
|
37
|
+
} catch (err) {
|
|
38
|
+
if (err.message && err.message.startsWith('Registry returned')) throw err;
|
|
39
|
+
throw new Error(`Failed to fetch registry metadata for ${packageName}: ${err.message}`);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (!manifest?.dist?.tarball) {
|
|
43
|
+
throw new Error(`Registry metadata for ${packageName}@${manifest.version} has no tarball URL`);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const version = manifest.version;
|
|
47
|
+
const safe = packageName.replace(/[^a-z0-9]/gi, '-');
|
|
48
|
+
const tmpPath = path.join(os.tmpdir(), `packattest-prev-${safe}-${version}.tgz`);
|
|
49
|
+
|
|
50
|
+
try {
|
|
51
|
+
const { status, body } = await _fetch(manifest.dist.tarball);
|
|
52
|
+
if (status !== 200) {
|
|
53
|
+
throw new Error(`Tarball download returned HTTP ${status}`);
|
|
54
|
+
}
|
|
55
|
+
fs.writeFileSync(tmpPath, body);
|
|
56
|
+
return { path: tmpPath, version };
|
|
57
|
+
} catch (err) {
|
|
58
|
+
throw new Error(`Failed to download previous tarball for ${packageName}@${version}: ${err.message}`);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
module.exports = { fetchPreviousArtifact };
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const zlib = require('zlib');
|
|
5
|
+
const tar = require('tar-stream');
|
|
6
|
+
const { enumerate } = require('./artifact');
|
|
7
|
+
|
|
8
|
+
async function repack(entries, selectedPaths, outputPath) {
|
|
9
|
+
const selectedSet = new Set(selectedPaths);
|
|
10
|
+
const selectedEntries = entries.filter(e => selectedSet.has(e.path));
|
|
11
|
+
|
|
12
|
+
if (selectedEntries.length !== selectedPaths.length) {
|
|
13
|
+
const found = new Set(selectedEntries.map(e => e.path));
|
|
14
|
+
const missing = selectedPaths.filter(p => !found.has(p));
|
|
15
|
+
throw new Error(`Selected files not found in artifact: ${missing.join(', ')}`);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
await new Promise((resolve, reject) => {
|
|
19
|
+
const pack = tar.pack();
|
|
20
|
+
const out = fs.createWriteStream(outputPath);
|
|
21
|
+
|
|
22
|
+
pack.pipe(zlib.createGzip()).pipe(out);
|
|
23
|
+
out.on('finish', resolve);
|
|
24
|
+
out.on('error', reject);
|
|
25
|
+
pack.on('error', reject);
|
|
26
|
+
|
|
27
|
+
(async () => {
|
|
28
|
+
for (const entry of selectedEntries) {
|
|
29
|
+
await new Promise((res, rej) => {
|
|
30
|
+
if (entry._header && entry._header.type === 'symlink') {
|
|
31
|
+
const hdr = { name: entry.path, type: 'symlink', linkname: entry._header.linkname };
|
|
32
|
+
if (entry._header.mode != null) hdr.mode = entry._header.mode;
|
|
33
|
+
if (entry._header.mtime != null) hdr.mtime = entry._header.mtime;
|
|
34
|
+
if (entry._header.uid != null) hdr.uid = entry._header.uid;
|
|
35
|
+
if (entry._header.gid != null) hdr.gid = entry._header.gid;
|
|
36
|
+
if (entry._header.uname != null) hdr.uname = entry._header.uname;
|
|
37
|
+
if (entry._header.gname != null) hdr.gname = entry._header.gname;
|
|
38
|
+
pack.entry(hdr, err => (err ? rej(err) : res()));
|
|
39
|
+
} else {
|
|
40
|
+
const hdr = { name: entry.path, size: entry._content.length };
|
|
41
|
+
if (entry._header) {
|
|
42
|
+
if (entry._header.mode != null) hdr.mode = entry._header.mode;
|
|
43
|
+
if (entry._header.mtime != null) hdr.mtime = entry._header.mtime;
|
|
44
|
+
if (entry._header.uid != null) hdr.uid = entry._header.uid;
|
|
45
|
+
if (entry._header.gid != null) hdr.gid = entry._header.gid;
|
|
46
|
+
if (entry._header.uname != null) hdr.uname = entry._header.uname;
|
|
47
|
+
if (entry._header.gname != null) hdr.gname = entry._header.gname;
|
|
48
|
+
}
|
|
49
|
+
pack.entry(hdr, entry._content, err => (err ? rej(err) : res()));
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
pack.finalize();
|
|
54
|
+
})().catch(reject);
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
// Verify: re-enumerate and check exact match
|
|
58
|
+
const repacked = await enumerate(outputPath);
|
|
59
|
+
const repackedPaths = new Set(repacked.map(e => e.path));
|
|
60
|
+
|
|
61
|
+
for (const p of selectedPaths) {
|
|
62
|
+
if (!repackedPaths.has(p)) {
|
|
63
|
+
throw new Error(`Verification failed: ${p} is missing from constrained artifact`);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
if (repacked.length !== selectedPaths.length) {
|
|
67
|
+
throw new Error(
|
|
68
|
+
`Verification failed: constrained artifact has ${repacked.length} files, expected ${selectedPaths.length}`
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
module.exports = { repack };
|
package/cli/lib/ui.js
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
function formatSize(bytes) {
|
|
4
|
+
if (bytes < 1024) return `${bytes}B`;
|
|
5
|
+
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)}KB`;
|
|
6
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
const STATUS_SYMBOL = {
|
|
10
|
+
added: '+',
|
|
11
|
+
modified: '~',
|
|
12
|
+
removed: '-',
|
|
13
|
+
unchanged: ' ',
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
function choiceTitle(entry, warnings) {
|
|
17
|
+
const sym = STATUS_SYMBOL[entry.status] || ' ';
|
|
18
|
+
const warn = warnings.length ? ' ' + warnings.map(w => `[${w}]`).join(' ') : '';
|
|
19
|
+
return `${sym} ${entry.path} (${formatSize(entry.size)})${warn}`;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function printDiffSummary(diffEntries) {
|
|
23
|
+
const counts = { added: 0, modified: 0, removed: 0, unchanged: 0 };
|
|
24
|
+
for (const e of diffEntries) counts[e.status] = (counts[e.status] || 0) + 1;
|
|
25
|
+
|
|
26
|
+
const parts = [];
|
|
27
|
+
if (counts.added) parts.push(`${counts.added} added`);
|
|
28
|
+
if (counts.modified) parts.push(`${counts.modified} modified`);
|
|
29
|
+
if (counts.removed) parts.push(`${counts.removed} removed`);
|
|
30
|
+
if (counts.unchanged) parts.push(`${counts.unchanged} unchanged`);
|
|
31
|
+
|
|
32
|
+
console.log(`\nDiff vs previous: ${parts.join(', ') || 'no changes'}`);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
module.exports = { formatSize, choiceTitle, printDiffSummary };
|
package/package.json
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "packattest",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Artifact review and attestation for package publishes",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"private": false,
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "git+https://github.com/Divohna/PackAttest.git"
|
|
10
|
+
},
|
|
11
|
+
"homepage": "https://github.com/Divohna/PackAttest#readme",
|
|
12
|
+
"bugs": {
|
|
13
|
+
"url": "https://github.com/Divohna/PackAttest/issues"
|
|
14
|
+
},
|
|
15
|
+
"type": "commonjs",
|
|
16
|
+
"files": [
|
|
17
|
+
"cli/",
|
|
18
|
+
"LICENSE",
|
|
19
|
+
"README.md"
|
|
20
|
+
],
|
|
21
|
+
"bin": {
|
|
22
|
+
"pa": "./cli/index.js"
|
|
23
|
+
},
|
|
24
|
+
"scripts": {
|
|
25
|
+
"start": "node cli/index.js",
|
|
26
|
+
"test": "node --test test/*.test.js"
|
|
27
|
+
},
|
|
28
|
+
"keywords": [
|
|
29
|
+
"security",
|
|
30
|
+
"publishing",
|
|
31
|
+
"npm",
|
|
32
|
+
"artifact",
|
|
33
|
+
"release",
|
|
34
|
+
"cli"
|
|
35
|
+
],
|
|
36
|
+
"engines": {
|
|
37
|
+
"node": ">=18"
|
|
38
|
+
},
|
|
39
|
+
"dependencies": {
|
|
40
|
+
"commander": "^12.0.0",
|
|
41
|
+
"prompts": "^2.4.2",
|
|
42
|
+
"tar-stream": "^3.1.7"
|
|
43
|
+
}
|
|
44
|
+
}
|