compressing 1.10.3 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -75,7 +75,7 @@ fs.createReadStream('file/path/to/compress')
75
75
  .on('error', handleError);
76
76
 
77
77
  // You should take care of stream errors in caution, use pump to handle error in one place
78
- const pump = require('pump');
78
+ const { pipeline: pump } = require('stream');
79
79
  const sourceStream = fs.createReadStream('file/path/to/compress');
80
80
  const gzipStream = new compressing.gzip.FileStream();
81
81
  const destStream = fs.createWriteStream('path/to/destination.gz');
@@ -169,7 +169,7 @@ const urllib = require('urllib');
169
169
  const targetDir = require('os').tmpdir();
170
170
  const compressing = require('compressing');
171
171
 
172
- urllib.request('http://registry.npmjs.org/pedding/-/pedding-1.1.0.tgz', {
172
+ urllib.request('http://registry.npmjs.org/compressing/-/compressing-2.0.0.tgz', {
173
173
  streaming: true,
174
174
  followRedirect: true,
175
175
  })
@@ -193,7 +193,7 @@ function onEntry(header, stream, next) => {
193
193
  if (header.type === 'file') {
194
194
  stream.pipe(fs.createWriteStream(path.join(destDir, header.name)));
195
195
  } else { // directory
196
- mkdirp(path.join(destDir, header.name), err => {
196
+ fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => {
197
197
  if (err) return handleError(err);
198
198
  stream.resume();
199
199
  });
@@ -4,7 +4,7 @@ const tar = require('../tar');
4
4
  const gzip = require('../gzip');
5
5
  const utils = require('../utils');
6
6
  const stream = require('stream');
7
- const pump = require('pump');
7
+ const { pipeline: pump } = require('stream');
8
8
  const ready = require('get-ready');
9
9
 
10
10
  class TgzFileStream extends stream.Transform {
package/lib/utils.js CHANGED
@@ -2,8 +2,23 @@
2
2
 
3
3
  const fs = require('fs');
4
4
  const path = require('path');
5
- const mkdirp = require('mkdirp');
6
- const pump = require('pump');
5
+ const { pipeline: pump } = require('stream');
6
+
7
+ /**
8
+ * Check if childPath is within parentPath (prevents path traversal attacks)
9
+ * @param {string} childPath - The path to check
10
+ * @param {string} parentPath - The parent directory path
11
+ * @returns {boolean} - True if childPath is within parentPath
12
+ */
13
+ function isPathWithinParent(childPath, parentPath) {
14
+ const normalizedChild = path.resolve(childPath);
15
+ const normalizedParent = path.resolve(parentPath);
16
+ const parentWithSep = normalizedParent.endsWith(path.sep)
17
+ ? normalizedParent
18
+ : normalizedParent + path.sep;
19
+ return normalizedChild === normalizedParent ||
20
+ normalizedChild.startsWith(parentWithSep);
21
+ }
7
22
 
8
23
  // file/fileBuffer/stream
9
24
  exports.sourceType = source => {
@@ -90,9 +105,12 @@ exports.makeUncompressFn = StreamClass => {
90
105
  }
91
106
 
92
107
  return new Promise((resolve, reject) => {
93
- mkdirp(destDir, err => {
108
+ fs.mkdir(destDir, { recursive: true }, err => {
94
109
  if (err) return reject(err);
95
110
 
111
+ // Resolve destDir to absolute path for security validation
112
+ const resolvedDestDir = path.resolve(destDir);
113
+
96
114
  let entryCount = 0;
97
115
  let successCount = 0;
98
116
  let isFinish = false;
@@ -109,11 +127,19 @@ exports.makeUncompressFn = StreamClass => {
109
127
  .on('error', reject)
110
128
  .on('entry', (header, stream, next) => {
111
129
  stream.on('end', next);
112
- const destFilePath = path.join(destDir, header.name);
130
+ const destFilePath = path.join(resolvedDestDir, header.name);
131
+ const resolvedDestPath = path.resolve(destFilePath);
132
+
133
+ // Security: Validate that the entry path doesn't escape the destination directory
134
+ if (!isPathWithinParent(resolvedDestPath, resolvedDestDir)) {
135
+ console.warn(`[compressing] Skipping entry with path traversal: "${header.name}" -> "${resolvedDestPath}"`);
136
+ stream.resume();
137
+ return;
138
+ }
113
139
 
114
140
  if (header.type === 'file') {
115
141
  const dir = path.dirname(destFilePath);
116
- mkdirp(dir, err => {
142
+ fs.mkdir(dir, { recursive: true }, err => {
117
143
  if (err) return reject(err);
118
144
 
119
145
  entryCount++;
@@ -126,9 +152,17 @@ exports.makeUncompressFn = StreamClass => {
126
152
  } else if (header.type === 'symlink') {
127
153
  const dir = path.dirname(destFilePath);
128
154
  const target = path.resolve(dir, header.linkname);
155
+
156
+ // Security: Validate that the symlink target doesn't escape the destination directory
157
+ if (!isPathWithinParent(target, resolvedDestDir)) {
158
+ console.warn(`[compressing] Skipping symlink "${header.name}": target "${target}" escapes extraction directory`);
159
+ stream.resume();
160
+ return;
161
+ }
162
+
129
163
  entryCount++;
130
164
 
131
- mkdirp(dir, err => {
165
+ fs.mkdir(dir, { recursive: true }, err => {
132
166
  if (err) return reject(err);
133
167
 
134
168
  const relativeTarget = path.relative(dir, target);
@@ -139,7 +173,7 @@ exports.makeUncompressFn = StreamClass => {
139
173
  });
140
174
  });
141
175
  } else { // directory
142
- mkdirp(destFilePath, err => {
176
+ fs.mkdir(destFilePath, { recursive: true }, err => {
143
177
  if (err) return reject(err);
144
178
  stream.resume();
145
179
  });
@@ -2,6 +2,7 @@
2
2
 
3
3
  // https://github.com/thejoshwolfe/yauzl#no-streaming-unzip-api
4
4
 
5
+ const debug = require('util').debuglog('compressing/zip/uncompress_stream');
5
6
  const yauzl = require('@eggjs/yauzl');
6
7
  const stream = require('stream');
7
8
  const UncompressBaseStream = require('../base_write_stream');
@@ -38,12 +39,20 @@ class ZipUncompressStream extends UncompressBaseStream {
38
39
  if (this._zipFileNameEncoding === 'utf-8') {
39
40
  this._zipFileNameEncoding = 'utf8';
40
41
  }
42
+ this._finalCallback = err => {
43
+ if (err) {
44
+ debug('finalCallback, error: %j', err);
45
+ return this.emit('error', err);
46
+ }
47
+ this.emit('finish');
48
+ };
41
49
 
42
50
  this[YAUZL_CALLBACK] = this[YAUZL_CALLBACK].bind(this);
43
51
 
44
52
  const sourceType = utils.sourceType(opts.source);
45
53
 
46
54
  const yauzlOpts = this._yauzlOpts = Object.assign({}, DEFAULTS, opts.yauzl);
55
+ debug('sourceType: %s, yauzlOpts: %j', sourceType, yauzlOpts);
47
56
  if (sourceType === 'file') {
48
57
  yauzl.open(opts.source, yauzlOpts, this[YAUZL_CALLBACK]);
49
58
  return;
@@ -60,27 +69,26 @@ class ZipUncompressStream extends UncompressBaseStream {
60
69
  .catch(e => this.emit('error', e));
61
70
  return;
62
71
  }
63
-
64
- this.on('pipe', srcStream => {
65
- srcStream.unpipe(srcStream);
66
-
67
- utils.streamToBuffer(srcStream)
68
- .then(buf => {
69
- this._chunks.push(buf);
70
- buf = Buffer.concat(this._chunks);
71
- yauzl.fromBuffer(buf, yauzlOpts, this[YAUZL_CALLBACK]);
72
- })
73
- .catch(e => this.emit('error', e));
74
- });
75
72
  }
76
73
 
77
- _write(chunk) {
78
- // push to _chunks array, this will only happen once, for stream will be unpiped.
74
+ _write(chunk, _encoding, callback) {
79
75
  this._chunks.push(chunk);
76
+ debug('write size: %d, chunks: %d', chunk.length, this._chunks.length);
77
+ callback();
78
+ }
79
+
80
+ _final(callback) {
81
+ const buf = Buffer.concat(this._chunks);
82
+ debug('final, buf size: %d, chunks: %d', buf.length, this._chunks.length);
83
+ this._finalCallback = callback;
84
+ yauzl.fromBuffer(buf, this._yauzlOpts, this[YAUZL_CALLBACK]);
80
85
  }
81
86
 
82
87
  [YAUZL_CALLBACK](err, zipFile) {
83
- if (err) return this.emit('error', err);
88
+ if (err) {
89
+ debug('yauzl error', err);
90
+ return this._finalCallback(err);
91
+ }
84
92
 
85
93
  zipFile.readEntry();
86
94
 
@@ -106,17 +114,22 @@ class ZipUncompressStream extends UncompressBaseStream {
106
114
 
107
115
  if (type === 'file') {
108
116
  zipFile.openReadStream(entry, (err, readStream) => {
109
- if (err) return this.emit('error', err);
117
+ if (err) {
118
+ debug('file, error: %j', err);
119
+ return this._finalCallback(err);
120
+ }
121
+ debug('file, header: %j', header);
110
122
  this.emit('entry', header, readStream, next);
111
123
  });
112
124
  } else { // directory
113
125
  const placeholder = new stream.Readable({ read() {} });
126
+ debug('directory, header: %j', header);
114
127
  this.emit('entry', header, placeholder, next);
115
128
  setImmediate(() => placeholder.emit('end'));
116
129
  }
117
130
  })
118
- .on('end', () => this.emit('finish'))
119
- .on('error', err => this.emit('error', err));
131
+ .on('end', () => this._finalCallback())
132
+ .on('error', err => this._finalCallback(err));
120
133
 
121
134
  function next() {
122
135
  zipFile.readEntry();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "compressing",
3
- "version": "1.10.3",
3
+ "version": "2.0.1",
4
4
  "description": "Everything you need for compressing and uncompressing",
5
5
  "main": "index.js",
6
6
  "scripts": {
@@ -39,14 +39,12 @@
39
39
  },
40
40
  "homepage": "https://github.com/node-modules/compressing#readme",
41
41
  "dependencies": {
42
+ "@eggjs/yauzl": "^2.11.0",
42
43
  "flushwritable": "^1.0.0",
43
44
  "get-ready": "^1.0.0",
44
45
  "iconv-lite": "^0.5.0",
45
- "mkdirp": "^0.5.1",
46
- "pump": "^3.0.0",
47
46
  "streamifier": "^0.1.1",
48
47
  "tar-stream": "^1.5.2",
49
- "@eggjs/yauzl": "^2.11.0",
50
48
  "yazl": "^2.4.2"
51
49
  },
52
50
  "devDependencies": {
@@ -62,6 +60,6 @@
62
60
  "uuid": "^3.0.1"
63
61
  },
64
62
  "engines": {
65
- "node": ">= 4.0.0"
63
+ "node": ">= 18.0.0"
66
64
  }
67
65
  }