mem-fs-editor 9.0.1 → 9.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -0
- package/lib/actions/append-tpl.js +2 -2
- package/lib/actions/append.js +3 -3
- package/lib/actions/commit-file-async.js +16 -9
- package/lib/actions/commit.js +1 -1
- package/lib/actions/copy-async.js +25 -29
- package/lib/actions/copy-tpl-async.js +5 -5
- package/lib/actions/copy-tpl.js +6 -6
- package/lib/actions/copy.js +28 -21
- package/lib/actions/delete.js +13 -15
- package/lib/actions/dump.js +9 -3
- package/lib/actions/exists.js +1 -1
- package/lib/actions/extend-json.js +3 -3
- package/lib/actions/read.js +1 -1
- package/lib/actions/write-json.js +2 -2
- package/lib/actions/write.js +16 -8
- package/lib/state.js +1 -1
- package/lib/transform.js +2 -2
- package/lib/util.js +13 -13
- package/package.json +8 -7
package/README.md
CHANGED
|
@@ -82,6 +82,8 @@ Optionally, pass an `options.process` function (`process(contents)`) returning a
|
|
|
82
82
|
|
|
83
83
|
Optionally, when `from` is a glob pattern, pass an `options.processDestinationPath` function (`processDestinationPath(destinationFile)`) returning a string who'll become the new file name.
|
|
84
84
|
|
|
85
|
+
`options.noGlob` can be used to by bypass glob matching entirely. In that case, `from` will directly match file paths against the file system.
|
|
86
|
+
|
|
85
87
|
### `#copyAsync(from, to, [options], context[, templateOptions ])`
|
|
86
88
|
|
|
87
89
|
Async version of `copy`.
|
|
@@ -140,3 +142,9 @@ If provided, `filters` is an array of TransformStream to be applied on a stream
|
|
|
140
142
|
If provided, `stream` is a stream of vinyl files.
|
|
141
143
|
|
|
142
144
|
`callback` is called once the files are updated on disk.
|
|
145
|
+
|
|
146
|
+
### `#dump([cwd,] [filter])`
|
|
147
|
+
|
|
148
|
+
Dump files to compare expected result.
|
|
149
|
+
Provide a `cwd` for relative path. Allows to omit temporary path.
|
|
150
|
+
Provide a `filter` function or glob to focus on specific files.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const ejs = require('ejs');
|
|
4
4
|
|
|
5
5
|
module.exports = function (to, contents, context, tplSettings, options) {
|
|
6
6
|
context = context || {};
|
|
@@ -11,7 +11,7 @@ module.exports = function (to, contents, context, tplSettings, options) {
|
|
|
11
11
|
ejs.render(
|
|
12
12
|
contents.toString(),
|
|
13
13
|
context,
|
|
14
|
-
tplSettings
|
|
14
|
+
tplSettings,
|
|
15
15
|
),
|
|
16
16
|
options);
|
|
17
17
|
};
|
package/lib/actions/append.js
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const {EOL} = require('os');
|
|
4
4
|
|
|
5
5
|
module.exports = function (to, contents, options) {
|
|
6
6
|
options = {
|
|
7
7
|
trimEnd: true,
|
|
8
8
|
separator: EOL,
|
|
9
|
-
...options
|
|
9
|
+
...options,
|
|
10
10
|
};
|
|
11
11
|
|
|
12
12
|
if (!this.exists(to) && options.create) {
|
|
@@ -14,7 +14,7 @@ module.exports = function (to, contents, options) {
|
|
|
14
14
|
return;
|
|
15
15
|
}
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
let currentContents = this.read(to);
|
|
18
18
|
if (options.trimEnd) {
|
|
19
19
|
currentContents = currentContents.replace(/\s+$/, '');
|
|
20
20
|
}
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
3
|
+
const fs = require('fs').promises;
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const {clearFileState, isFileStateModified, isFileStateDeleted, setCommittedFile} = require('../state');
|
|
6
6
|
|
|
7
7
|
async function write(file) {
|
|
8
|
-
|
|
8
|
+
const dir = path.dirname(file.path);
|
|
9
9
|
try {
|
|
10
10
|
if (!(await fs.stat(dir)).isDirectory()) {
|
|
11
11
|
throw new Error(`${file.path} is not a directory`);
|
|
@@ -18,18 +18,25 @@ async function write(file) {
|
|
|
18
18
|
}
|
|
19
19
|
}
|
|
20
20
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
21
|
+
const options = {};
|
|
22
|
+
if (file.stat) {
|
|
23
|
+
options.mode = file.stat.mode;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
await fs.writeFile(file.path, file.contents, options);
|
|
24
27
|
}
|
|
25
28
|
|
|
26
29
|
async function remove(file) {
|
|
27
|
-
|
|
30
|
+
const remove = fs.rm || fs.rmdir;
|
|
28
31
|
await remove(file.path, {recursive: true});
|
|
29
32
|
}
|
|
30
33
|
|
|
31
34
|
module.exports = async function (file) {
|
|
32
|
-
this.store.
|
|
35
|
+
const existingFile = this.store.get(file.path);
|
|
36
|
+
if (!existingFile || existingFile !== file) {
|
|
37
|
+
this.store.add(file);
|
|
38
|
+
}
|
|
39
|
+
|
|
33
40
|
if (isFileStateModified(file)) {
|
|
34
41
|
setCommittedFile(file);
|
|
35
42
|
await write(file);
|
package/lib/actions/commit.js
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
3
|
+
const assert = require('assert');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const fsPromises = require('fs').promises;
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const globby = require('globby');
|
|
8
|
+
const multimatch = require('multimatch');
|
|
9
|
+
const ejs = require('ejs');
|
|
10
|
+
const util = require('../util');
|
|
11
|
+
const normalize = require('normalize-path');
|
|
12
12
|
|
|
13
13
|
async function applyProcessingFileFunc(processFile, filename) {
|
|
14
|
-
|
|
14
|
+
const output = await Promise.resolve(processFile.call(this, filename));
|
|
15
15
|
return Buffer.isBuffer(output) ? output : Buffer.from(output);
|
|
16
16
|
}
|
|
17
17
|
|
|
@@ -24,14 +24,14 @@ function renderFilepath(filepath, context, tplSettings) {
|
|
|
24
24
|
}
|
|
25
25
|
|
|
26
26
|
async function getOneFile(from) {
|
|
27
|
-
|
|
27
|
+
let oneFile;
|
|
28
28
|
if (typeof from === 'string') {
|
|
29
29
|
oneFile = from;
|
|
30
30
|
} else {
|
|
31
31
|
return undefined;
|
|
32
32
|
}
|
|
33
33
|
|
|
34
|
-
|
|
34
|
+
const resolved = path.resolve(oneFile);
|
|
35
35
|
try {
|
|
36
36
|
if ((await fsPromises.stat(resolved)).isFile()) {
|
|
37
37
|
return resolved;
|
|
@@ -45,16 +45,16 @@ async function getOneFile(from) {
|
|
|
45
45
|
exports.copyAsync = async function (from, to, options, context, tplSettings) {
|
|
46
46
|
to = path.resolve(to);
|
|
47
47
|
options = options || {};
|
|
48
|
-
|
|
48
|
+
const oneFile = await getOneFile(from);
|
|
49
49
|
if (oneFile) {
|
|
50
50
|
return this._copySingleAsync(oneFile, renderFilepath(to, context, tplSettings), options);
|
|
51
51
|
}
|
|
52
52
|
|
|
53
|
-
|
|
53
|
+
const fromGlob = util.globify(from);
|
|
54
54
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
55
|
+
const globOptions = {...options.globOptions, nodir: true};
|
|
56
|
+
const diskFiles = globby.sync(fromGlob, globOptions).map(filepath => path.resolve(filepath));
|
|
57
|
+
const storeFiles = [];
|
|
58
58
|
this.store.each(file => {
|
|
59
59
|
// The store may have a glob path and when we try to copy it will fail because not real file
|
|
60
60
|
if (!globby.hasMagic(normalize(file.path)) && multimatch([file.path], fromGlob).length !== 0 && !diskFiles.includes(file.path)) {
|
|
@@ -62,17 +62,17 @@ exports.copyAsync = async function (from, to, options, context, tplSettings) {
|
|
|
62
62
|
}
|
|
63
63
|
});
|
|
64
64
|
|
|
65
|
-
|
|
65
|
+
let generateDestination = () => to;
|
|
66
66
|
if (Array.isArray(from) || !this.exists(from) || globby.hasMagic(normalize(from))) {
|
|
67
67
|
assert(
|
|
68
68
|
!this.exists(to) || fs.statSync(to).isDirectory(),
|
|
69
|
-
'When copying multiple files, provide a directory as destination'
|
|
69
|
+
'When copying multiple files, provide a directory as destination',
|
|
70
70
|
);
|
|
71
71
|
|
|
72
72
|
const processDestinationPath = options.processDestinationPath || (path => path);
|
|
73
|
-
|
|
73
|
+
const root = util.getCommonPath(from);
|
|
74
74
|
generateDestination = filepath => {
|
|
75
|
-
|
|
75
|
+
const toFile = path.relative(root, filepath);
|
|
76
76
|
return processDestinationPath(path.join(to, toFile));
|
|
77
77
|
};
|
|
78
78
|
}
|
|
@@ -81,12 +81,8 @@ exports.copyAsync = async function (from, to, options, context, tplSettings) {
|
|
|
81
81
|
assert(options.ignoreNoMatch || diskFiles.length > 0 || storeFiles.length > 0, 'Trying to copy from a source that does not exist: ' + from);
|
|
82
82
|
|
|
83
83
|
await Promise.all([
|
|
84
|
-
...diskFiles.map(file =>
|
|
85
|
-
|
|
86
|
-
}),
|
|
87
|
-
...storeFiles.map(file => {
|
|
88
|
-
return Promise.resolve(this._copySingle(file, renderFilepath(generateDestination(file), context, tplSettings), options));
|
|
89
|
-
})
|
|
84
|
+
...diskFiles.map(file => this._copySingleAsync(file, renderFilepath(generateDestination(file), context, tplSettings), options)),
|
|
85
|
+
...storeFiles.map(file => Promise.resolve(this._copySingle(file, renderFilepath(generateDestination(file), context, tplSettings), options))),
|
|
90
86
|
]);
|
|
91
87
|
};
|
|
92
88
|
|
|
@@ -95,7 +91,7 @@ exports._copySingleAsync = async function (from, to, options = {}) {
|
|
|
95
91
|
return this._copySingle(from, to, options);
|
|
96
92
|
}
|
|
97
93
|
|
|
98
|
-
|
|
94
|
+
const contents = await applyProcessingFileFunc.call(this, options.processFile, from);
|
|
99
95
|
|
|
100
96
|
if (options.append) {
|
|
101
97
|
if (!this.store.existsInMemory) {
|
|
@@ -108,6 +104,6 @@ exports._copySingleAsync = async function (from, to, options = {}) {
|
|
|
108
104
|
}
|
|
109
105
|
}
|
|
110
106
|
|
|
111
|
-
|
|
107
|
+
const stat = await fsPromises.stat(from);
|
|
112
108
|
this.write(to, contents, stat);
|
|
113
109
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
const ejs = require('ejs');
|
|
4
|
+
const fs = require('fs').promises;
|
|
5
5
|
const {isBinary} = require('../util');
|
|
6
6
|
|
|
7
7
|
module.exports = async function (from, to, context, tplSettings, options) {
|
|
@@ -14,16 +14,16 @@ module.exports = async function (from, to, context, tplSettings, options) {
|
|
|
14
14
|
{
|
|
15
15
|
processDestinationPath: path => path.replace(/.ejs$/, ''),
|
|
16
16
|
...options,
|
|
17
|
-
|
|
17
|
+
async processFile(filename) {
|
|
18
18
|
if (isBinary(filename, null)) {
|
|
19
19
|
return fs.readFile(filename);
|
|
20
20
|
}
|
|
21
21
|
|
|
22
22
|
return ejs.renderFile(filename, context, tplSettings);
|
|
23
23
|
},
|
|
24
|
-
process: (contents, filename, destination) => this._processTpl({contents, filename, destination, context, tplSettings})
|
|
24
|
+
process: (contents, filename, destination) => this._processTpl({contents, filename, destination, context, tplSettings}),
|
|
25
25
|
},
|
|
26
26
|
context,
|
|
27
|
-
tplSettings
|
|
27
|
+
tplSettings,
|
|
28
28
|
);
|
|
29
29
|
};
|
package/lib/actions/copy-tpl.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const ejs = require('ejs');
|
|
4
4
|
const {isBinary} = require('../util');
|
|
5
5
|
|
|
6
6
|
module.exports._processTpl = function ({contents, filename, context, tplSettings}) {
|
|
@@ -13,9 +13,9 @@ module.exports._processTpl = function ({contents, filename, context, tplSettings
|
|
|
13
13
|
context,
|
|
14
14
|
{
|
|
15
15
|
// Setting filename by default allow including partials.
|
|
16
|
-
filename
|
|
17
|
-
...tplSettings
|
|
18
|
-
}
|
|
16
|
+
filename,
|
|
17
|
+
...tplSettings,
|
|
18
|
+
},
|
|
19
19
|
);
|
|
20
20
|
};
|
|
21
21
|
|
|
@@ -29,9 +29,9 @@ module.exports.copyTpl = function (from, to, context, tplSettings, options) {
|
|
|
29
29
|
{
|
|
30
30
|
processDestinationPath: path => path.replace(/.ejs$/, ''),
|
|
31
31
|
...options,
|
|
32
|
-
process: (contents, filename) => this._processTpl({contents, filename, context, tplSettings})
|
|
32
|
+
process: (contents, filename) => this._processTpl({contents, filename, context, tplSettings}),
|
|
33
33
|
},
|
|
34
34
|
context,
|
|
35
|
-
tplSettings
|
|
35
|
+
tplSettings,
|
|
36
36
|
);
|
|
37
37
|
};
|
package/lib/actions/copy.js
CHANGED
|
@@ -1,46 +1,53 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
3
|
+
const assert = require('assert');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const globby = require('globby');
|
|
7
|
+
const multimatch = require('multimatch');
|
|
8
|
+
const ejs = require('ejs');
|
|
9
|
+
const util = require('../util');
|
|
10
|
+
const normalize = require('normalize-path');
|
|
11
11
|
|
|
12
12
|
function applyProcessingFunc(process, contents, filename) {
|
|
13
|
-
|
|
13
|
+
const output = process(contents, filename);
|
|
14
14
|
return Buffer.isBuffer(output) ? output : Buffer.from(output);
|
|
15
15
|
}
|
|
16
16
|
|
|
17
17
|
exports.copy = function (from, to, options, context, tplSettings) {
|
|
18
18
|
to = path.resolve(to);
|
|
19
19
|
options = options || {};
|
|
20
|
-
|
|
20
|
+
const fromGlob = util.globify(from);
|
|
21
21
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
let diskFiles = [];
|
|
23
|
+
if (options.noGlob) {
|
|
24
|
+
const fromFiles = Array.isArray(fromGlob) ? fromGlob : [fromGlob];
|
|
25
|
+
diskFiles = fromFiles.filter(filepath => fs.existsSync(filepath));
|
|
26
|
+
} else {
|
|
27
|
+
const globOptions = {...options.globOptions, nodir: true};
|
|
28
|
+
diskFiles = globby.sync(fromGlob, globOptions).map(file => path.resolve(file));
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const storeFiles = [];
|
|
25
32
|
this.store.each(file => {
|
|
26
33
|
// The store may have a glob path and when we try to copy it will fail because not real file
|
|
27
34
|
if (!globby.hasMagic(normalize(file.path)) && multimatch([file.path], fromGlob).length !== 0 && !diskFiles.includes(file.path)) {
|
|
28
35
|
storeFiles.push(file.path);
|
|
29
36
|
}
|
|
30
37
|
});
|
|
31
|
-
|
|
38
|
+
const files = diskFiles.concat(storeFiles);
|
|
32
39
|
|
|
33
|
-
|
|
40
|
+
let generateDestination = () => to;
|
|
34
41
|
if (Array.isArray(from) || !this.exists(from) || globby.hasMagic(normalize(from))) {
|
|
35
42
|
assert(
|
|
36
43
|
!this.exists(to) || fs.statSync(to).isDirectory(),
|
|
37
|
-
'When copying multiple files, provide a directory as destination'
|
|
44
|
+
'When copying multiple files, provide a directory as destination',
|
|
38
45
|
);
|
|
39
46
|
|
|
40
47
|
const processDestinationPath = options.processDestinationPath || (path => path);
|
|
41
|
-
|
|
48
|
+
const root = util.getCommonPath(from);
|
|
42
49
|
generateDestination = filepath => {
|
|
43
|
-
|
|
50
|
+
const toFile = path.relative(root, filepath);
|
|
44
51
|
return processDestinationPath(path.join(to, toFile));
|
|
45
52
|
};
|
|
46
53
|
}
|
|
@@ -49,7 +56,7 @@ exports.copy = function (from, to, options, context, tplSettings) {
|
|
|
49
56
|
assert(options.ignoreNoMatch || files.length > 0, 'Trying to copy from a source that does not exist: ' + from);
|
|
50
57
|
|
|
51
58
|
files.forEach(file => {
|
|
52
|
-
|
|
59
|
+
let toFile = generateDestination(file);
|
|
53
60
|
if (context) {
|
|
54
61
|
toFile = ejs.render(toFile, context, tplSettings);
|
|
55
62
|
}
|
|
@@ -61,9 +68,9 @@ exports.copy = function (from, to, options, context, tplSettings) {
|
|
|
61
68
|
exports._copySingle = function (from, to, options = {}) {
|
|
62
69
|
assert(this.exists(from), 'Trying to copy from a source that does not exist: ' + from);
|
|
63
70
|
|
|
64
|
-
|
|
71
|
+
const file = this.store.get(from);
|
|
65
72
|
|
|
66
|
-
|
|
73
|
+
let {contents} = file;
|
|
67
74
|
if (options.process) {
|
|
68
75
|
contents = applyProcessingFunc(options.process, file.contents, file.path);
|
|
69
76
|
}
|
package/lib/actions/delete.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const globby = require('globby');
|
|
5
|
+
const multimatch = require('multimatch');
|
|
6
|
+
const util = require('../util');
|
|
7
|
+
const {setDeletedFileState} = require('../state');
|
|
8
8
|
|
|
9
9
|
function deleteFile(path, store) {
|
|
10
|
-
|
|
10
|
+
const file = store.get(path);
|
|
11
11
|
setDeletedFileState(file);
|
|
12
12
|
file.contents = null;
|
|
13
13
|
store.add(file);
|
|
@@ -18,21 +18,19 @@ module.exports = function (paths, options) {
|
|
|
18
18
|
paths = [paths];
|
|
19
19
|
}
|
|
20
20
|
|
|
21
|
-
paths = paths.map(
|
|
22
|
-
return path.resolve(filePath);
|
|
23
|
-
});
|
|
21
|
+
paths = paths.map(filePath => path.resolve(filePath));
|
|
24
22
|
paths = util.globify(paths);
|
|
25
23
|
options = options || {};
|
|
26
24
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
files.forEach(
|
|
25
|
+
const globOptions = options.globOptions || {};
|
|
26
|
+
const files = globby.sync(paths, globOptions);
|
|
27
|
+
files.forEach(file => {
|
|
30
28
|
deleteFile(file, this.store);
|
|
31
|
-
}
|
|
29
|
+
});
|
|
32
30
|
|
|
33
|
-
this.store.each(
|
|
31
|
+
this.store.each(file => {
|
|
34
32
|
if (multimatch([file.path], paths).length !== 0) {
|
|
35
33
|
deleteFile(file.path, this.store);
|
|
36
34
|
}
|
|
37
|
-
}
|
|
35
|
+
});
|
|
38
36
|
};
|
package/lib/actions/dump.js
CHANGED
|
@@ -2,15 +2,21 @@
|
|
|
2
2
|
|
|
3
3
|
const path = require('path');
|
|
4
4
|
const normalize = require('normalize-path');
|
|
5
|
+
const minimatch = require('minimatch');
|
|
5
6
|
|
|
6
7
|
const {hasClearedState, hasState, STATE, STATE_CLEARED} = require('../state');
|
|
7
8
|
|
|
8
9
|
module.exports = function (cwd = process.cwd(), filter = file => hasClearedState(file) || hasState(file)) {
|
|
10
|
+
if (typeof filter === 'string') {
|
|
11
|
+
const pattern = filter;
|
|
12
|
+
filter = file => minimatch(file.path, pattern);
|
|
13
|
+
}
|
|
14
|
+
|
|
9
15
|
return Object.fromEntries(
|
|
10
|
-
this.store.all().filter(file => filter(file)).map(file => {
|
|
16
|
+
this.store.all().filter(file => filter(file, cwd)).map(file => {
|
|
11
17
|
const filePath = normalize(cwd ? path.relative(cwd, file.path) : file.path);
|
|
12
18
|
const fileDump = {
|
|
13
|
-
contents: file.contents ? file.contents.toString() : file.contents
|
|
19
|
+
contents: file.contents ? file.contents.toString() : file.contents,
|
|
14
20
|
};
|
|
15
21
|
if (file[STATE]) {
|
|
16
22
|
fileDump[STATE] = file[STATE];
|
|
@@ -21,6 +27,6 @@ module.exports = function (cwd = process.cwd(), filter = file => hasClearedState
|
|
|
21
27
|
}
|
|
22
28
|
|
|
23
29
|
return [filePath, fileDump];
|
|
24
|
-
})
|
|
30
|
+
}),
|
|
25
31
|
);
|
|
26
32
|
};
|
package/lib/actions/exists.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const extend = require('deep-extend');
|
|
4
4
|
|
|
5
5
|
module.exports = function (filepath, contents, replacer, space) {
|
|
6
|
-
|
|
7
|
-
|
|
6
|
+
const originalContent = this.readJSON(filepath, {});
|
|
7
|
+
const newContent = extend({}, originalContent, contents);
|
|
8
8
|
|
|
9
9
|
this.writeJSON(filepath, newContent, replacer, space);
|
|
10
10
|
};
|
package/lib/actions/read.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const DEFAULT_INDENTATION = 2;
|
|
4
4
|
|
|
5
5
|
module.exports = function (filepath, contents, replacer, space) {
|
|
6
|
-
|
|
6
|
+
const jsonStr = JSON.stringify(contents, replacer || null, space || DEFAULT_INDENTATION) + '\n';
|
|
7
7
|
|
|
8
8
|
return this.write(filepath, jsonStr);
|
|
9
9
|
};
|
package/lib/actions/write.js
CHANGED
|
@@ -1,19 +1,27 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
const assert = require('assert');
|
|
4
|
+
const {isFileStateModified, setModifiedFileState} = require('../state');
|
|
5
5
|
|
|
6
6
|
module.exports = function (filepath, contents, stat) {
|
|
7
7
|
assert(
|
|
8
8
|
typeof contents === 'string' || Buffer.isBuffer(contents),
|
|
9
|
-
'Expected `contents` to be a String or a Buffer'
|
|
9
|
+
'Expected `contents` to be a String or a Buffer',
|
|
10
10
|
);
|
|
11
11
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
12
|
+
const file = this.store.get(filepath);
|
|
13
|
+
const newContents = Buffer.isBuffer(contents) ? contents : Buffer.from(contents);
|
|
14
|
+
if (
|
|
15
|
+
!isFileStateModified(file)
|
|
16
|
+
|| !Buffer.isBuffer(file.contents)
|
|
17
|
+
|| !newContents.equals(file.contents)
|
|
18
|
+
|| (stat !== undefined && file.stat !== stat)
|
|
19
|
+
) {
|
|
20
|
+
setModifiedFileState(file);
|
|
21
|
+
file.contents = newContents;
|
|
22
|
+
file.stat = stat;
|
|
23
|
+
this.store.add(file);
|
|
24
|
+
}
|
|
17
25
|
|
|
18
26
|
return file.contents.toString();
|
|
19
27
|
};
|
package/lib/state.js
CHANGED
package/lib/transform.js
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
const {createTransform} = require('./util');
|
|
2
2
|
const {isFilePending} = require('./state');
|
|
3
3
|
|
|
4
|
-
const createPendingFilesPassthrough = () => createTransform(
|
|
4
|
+
const createPendingFilesPassthrough = () => createTransform((file, _enc, cb) => {
|
|
5
5
|
// Don't process deleted file who haven't been commited yet.
|
|
6
6
|
cb(undefined, isFilePending(file) ? file : undefined);
|
|
7
7
|
});
|
|
8
8
|
|
|
9
|
-
const createCommitTransform = memFsEditor => createTransform(
|
|
9
|
+
const createCommitTransform = memFsEditor => createTransform((file, _enc, cb) => {
|
|
10
10
|
memFsEditor.commitFileAsync(file).then(() => cb()).catch(error => cb(error));
|
|
11
11
|
});
|
|
12
12
|
|
package/lib/util.js
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const commondir = require('commondir');
|
|
6
|
+
const globby = require('globby');
|
|
7
|
+
const normalize = require('normalize-path');
|
|
8
8
|
const {Transform} = require('stream');
|
|
9
|
-
|
|
9
|
+
const {isBinaryFileSync} = require('isbinaryfile');
|
|
10
10
|
|
|
11
11
|
const {default: textextensions} = require('textextensions');
|
|
12
12
|
const {default: binaryextensions} = require('binaryextensions');
|
|
@@ -24,7 +24,7 @@ exports.getCommonPath = function (filePath) {
|
|
|
24
24
|
return commondir(filePath);
|
|
25
25
|
}
|
|
26
26
|
|
|
27
|
-
|
|
27
|
+
const globStartIndex = filePath.indexOf('*');
|
|
28
28
|
if (globStartIndex !== -1) {
|
|
29
29
|
filePath = filePath.substring(0, globStartIndex + 1);
|
|
30
30
|
} else if (fs.existsSync(filePath) && fs.statSync(filePath).isDirectory()) {
|
|
@@ -50,11 +50,11 @@ exports.globify = function (filePath) {
|
|
|
50
50
|
// entity on the disk is ambiguous. As such, match both files and directories.
|
|
51
51
|
return [
|
|
52
52
|
filePath,
|
|
53
|
-
normalize(path.join(filePath, '**'))
|
|
53
|
+
normalize(path.join(filePath, '**')),
|
|
54
54
|
];
|
|
55
55
|
}
|
|
56
56
|
|
|
57
|
-
|
|
57
|
+
const fsStats = fs.statSync(filePath);
|
|
58
58
|
if (fsStats.isFile()) {
|
|
59
59
|
return filePath;
|
|
60
60
|
}
|
|
@@ -69,9 +69,9 @@ exports.globify = function (filePath) {
|
|
|
69
69
|
exports.createTransform = function (transform) {
|
|
70
70
|
const stream = new Transform({
|
|
71
71
|
objectMode: true,
|
|
72
|
-
transform
|
|
72
|
+
transform(...args) {
|
|
73
73
|
return transform.apply(this, args);
|
|
74
|
-
}
|
|
74
|
+
},
|
|
75
75
|
});
|
|
76
76
|
return stream;
|
|
77
77
|
};
|
|
@@ -87,7 +87,7 @@ exports.isBinary = (filePath, newFileContents) => {
|
|
|
87
87
|
}
|
|
88
88
|
|
|
89
89
|
return (
|
|
90
|
-
(fs.existsSync(filePath) && isBinaryFileSync(filePath))
|
|
91
|
-
(newFileContents && isBinaryFileSync(Buffer.isBuffer(newFileContents) ? newFileContents : Buffer.from(newFileContents)))
|
|
90
|
+
(fs.existsSync(filePath) && isBinaryFileSync(filePath))
|
|
91
|
+
|| (newFileContents && isBinaryFileSync(Buffer.isBuffer(newFileContents) ? newFileContents : Buffer.from(newFileContents)))
|
|
92
92
|
);
|
|
93
93
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mem-fs-editor",
|
|
3
|
-
"version": "9.0
|
|
3
|
+
"version": "9.3.0",
|
|
4
4
|
"description": "File edition helpers working on top of mem-fs",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"fix": "eslint . --fix",
|
|
@@ -15,15 +15,16 @@
|
|
|
15
15
|
"lib"
|
|
16
16
|
],
|
|
17
17
|
"dependencies": {
|
|
18
|
-
"binaryextensions": "^4.
|
|
18
|
+
"binaryextensions": "^4.16.0",
|
|
19
19
|
"commondir": "^1.0.1",
|
|
20
20
|
"deep-extend": "^0.6.0",
|
|
21
21
|
"ejs": "^3.1.6",
|
|
22
22
|
"globby": "^11.0.3",
|
|
23
23
|
"isbinaryfile": "^4.0.8",
|
|
24
|
+
"minimatch": "^3.0.4",
|
|
24
25
|
"multimatch": "^5.0.0",
|
|
25
26
|
"normalize-path": "^3.0.0",
|
|
26
|
-
"textextensions": "^5.
|
|
27
|
+
"textextensions": "^5.13.0"
|
|
27
28
|
},
|
|
28
29
|
"peerDependencies": {
|
|
29
30
|
"mem-fs": "^2.1.0"
|
|
@@ -37,10 +38,10 @@
|
|
|
37
38
|
"coveralls": "^3.0.3",
|
|
38
39
|
"escape-regexp": "0.0.1",
|
|
39
40
|
"eslint": "^7.26.0",
|
|
40
|
-
"eslint-config-xo-space": "^0.
|
|
41
|
-
"jest": "^
|
|
42
|
-
"mem-fs": "^2.1
|
|
43
|
-
"sinon": "^
|
|
41
|
+
"eslint-config-xo-space": "^0.29.0",
|
|
42
|
+
"jest": "^27.0.6",
|
|
43
|
+
"mem-fs": "^2.2.1",
|
|
44
|
+
"sinon": "^11.1.2"
|
|
44
45
|
},
|
|
45
46
|
"jest": {
|
|
46
47
|
"collectCoverage": true,
|