@tangelo/tangelo-configuration-toolkit 1.21.0 → 1.22.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/index.js +7 -2
- package/package.json +1 -1
- package/src/lib/tcl-config.js +5 -0
- package/src/modules/build/index.js +8 -2
- package/src/modules/deploy/config.js +0 -3
- package/src/modules/deploy/index.js +46 -2
- package/src/modules/deploy/srcset.js +4 -2
- package/src/modules/fonto/commands.js +11 -6
- package/src/modules/fonto/index.js +1 -1
- package/src/modules/git/index.js +14 -6
- package/src/modules/migrate/index.js +3 -1
- package/src/modules/migrate/steps.js +29 -19
package/README.md
CHANGED
package/index.js
CHANGED
|
@@ -83,8 +83,10 @@ global._git = {
|
|
|
83
83
|
{commitHash: '77b8ea9', regex: /^7\.14\./}, // 7.14.x
|
|
84
84
|
{commitHash: '8066c44', regex: /^7\.13\./}, // 7.13.x
|
|
85
85
|
{commitHash: 'a2b2d4e', regex: /^7\.12\./} // 7.12.x
|
|
86
|
-
]
|
|
87
|
-
|
|
86
|
+
],
|
|
87
|
+
hasUncommittedFiles: memoize(() => execGitCommand(`status --short`, path.join(_paths.repo, _paths.tdi)).length > 0),
|
|
88
|
+
},
|
|
89
|
+
hasUncommittedFiles: memoize(() => execGitCommand(`status --short`, _paths.repo).length > 0),
|
|
88
90
|
};
|
|
89
91
|
|
|
90
92
|
global._tdiSubmoduleExists = () => fs.existsSync(path.join(_paths.repo, _paths.tdi));
|
|
@@ -122,6 +124,9 @@ global._modulesTdi = {
|
|
|
122
124
|
|
|
123
125
|
global._settingsTdi = memoize(() => Object.assign(_package.settingsTdiDefault, _modulesTdi.require('settings.json', {throws: false, message: false}) ?? {}));
|
|
124
126
|
|
|
127
|
+
global._filters = {
|
|
128
|
+
fontoSources: ['!**/node_modules/**', '!**/fonto/packages/**', '!**/fonto/packages_shared/**', '!**/fonto/platform/**']
|
|
129
|
+
};
|
|
125
130
|
|
|
126
131
|
process.on('beforeExit', () => {
|
|
127
132
|
_write(); // print empty line before and after update check
|
package/package.json
CHANGED
package/src/lib/tcl-config.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
const fs = require('fs-extra');
|
|
2
|
+
const globby = require('globby');
|
|
2
3
|
const path = require('path');
|
|
3
4
|
const sass = require('sass');
|
|
4
5
|
const SaxonJS = require('saxon-js');
|
|
@@ -53,6 +54,10 @@ module.exports = class TclConfig {
|
|
|
53
54
|
}
|
|
54
55
|
}
|
|
55
56
|
|
|
57
|
+
static findProjects() {
|
|
58
|
+
return globby.sync('config/cmscustom/!(tdi)/!(fonts)').map(p => ({path: p, tcl: fs.existsSync(path.join(p, this.#TCL_FILE_NAME))}));
|
|
59
|
+
}
|
|
60
|
+
|
|
56
61
|
#outputFn;
|
|
57
62
|
#doModifyFontoHtml;
|
|
58
63
|
#tclFilePathRel;
|
|
@@ -4,11 +4,12 @@ const inquirer = require('inquirer');
|
|
|
4
4
|
const oxygen = require('./oxygen');
|
|
5
5
|
const path = require('path');
|
|
6
6
|
|
|
7
|
+
|
|
7
8
|
const createSymlink = (t, p) => {
|
|
8
9
|
const up = p.replace(/[^/]+/g, '..').replace(/^..\//, '');
|
|
9
10
|
fs.symlink(path.join(up, _paths.tdi, t), p, 'dir', err => {
|
|
10
11
|
if (err) {
|
|
11
|
-
if (err.code
|
|
12
|
+
if (err.code==='EEXIST') _write(`Symlink already exists: ${p}`);
|
|
12
13
|
else _error((err.code!=='EPERM' ? err : `Can't create symlinks, restart your console as admin, or fix it permanently:\n 1. Open 'Local Security Policy' (secpol.msc)\n 2. Go to 'Security Settings > Local Policies > User Rights Assignment > Create symbolic links'\n 3. Add your own account\n 4. Restart Windows`));
|
|
13
14
|
}
|
|
14
15
|
else _write(`Symlink created: ${p}`);
|
|
@@ -22,7 +23,12 @@ const createSymlinks = () => {
|
|
|
22
23
|
globby
|
|
23
24
|
.sync('config/cmscustom/!(tdi)/**/fonto')
|
|
24
25
|
.forEach(p => {
|
|
25
|
-
|
|
26
|
+
const tdiCodebasePath =
|
|
27
|
+
_settingsTdi().codebase === 'dual' && fs.existsSync(path.join(p, '../project.tcl')) ? '/tcl' :
|
|
28
|
+
_settingsTdi().codebase === 'dual' ? '/traditional' :
|
|
29
|
+
''
|
|
30
|
+
;
|
|
31
|
+
createSymlink(src + `/config/cmscustom/tdi${tdiCodebasePath}/fonto/packages-shared`, p + '/packages-shared');
|
|
26
32
|
});
|
|
27
33
|
};
|
|
28
34
|
|
|
@@ -129,9 +129,6 @@ function prepareForCopy (filter) {
|
|
|
129
129
|
_info(`Target: ${this.getRemotePath(transferPattern)}`);
|
|
130
130
|
_write();
|
|
131
131
|
|
|
132
|
-
if (_git.commitLocal().date < _git.commitRemote().date) {
|
|
133
|
-
_warn(`You're not deploying from the most recent git commit!\n`);
|
|
134
|
-
}
|
|
135
132
|
}
|
|
136
133
|
|
|
137
134
|
|
|
@@ -8,17 +8,60 @@ const tinylr = require('tiny-lr');
|
|
|
8
8
|
const transfer = require('./execute');
|
|
9
9
|
const c = require('./config');
|
|
10
10
|
const s = require('./srcset');
|
|
11
|
+
const inquirer = require('inquirer');
|
|
12
|
+
|
|
13
|
+
const checkDeployment = async (ftpConfig) => {
|
|
14
|
+
const deployToDevServer = /\.dev\./.test( ftpConfig?.host );
|
|
15
|
+
const deployToTestServer = /\.t(e)?st\./.test( ftpConfig?.host );
|
|
16
|
+
const deployToProdServer = c?.deliveryPack || !( deployToDevServer || deployToTestServer );
|
|
17
|
+
const isBehind = _git.commitLocal().date < _git.commitRemote().date;
|
|
18
|
+
// In most cases productionBranches are called 'master' or 'main'; Some customers use a development, test and production branches structure; hence we allow 'producti(on|e)'
|
|
19
|
+
const isPrdBranch = ( /master|main|producti(on|e)/.test( _git.commitLocal().branch ) );
|
|
20
|
+
|
|
21
|
+
if (!deployToTestServer && !deployToProdServer) {
|
|
22
|
+
// dev-environements
|
|
23
|
+
if (isBehind) {
|
|
24
|
+
/* Warn for dev deployements that are behind */
|
|
25
|
+
_warn(`You're not deploying from the most recent commit in branch "${_git.commitLocal().branch}"! Update your repository: ${'tct git --update-repo'.cyan}`);
|
|
26
|
+
}
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
11
29
|
|
|
30
|
+
// Checks for test / prd environments:
|
|
31
|
+
if ( !isPrdBranch ) {
|
|
32
|
+
_warn(`You want to deploy from branch "${_git.commitLocal().branch}" to a production server. It is strongly advised to deploy from the master branch.`);
|
|
33
|
+
}
|
|
34
|
+
if (_git.hasUncommittedFiles()) {
|
|
35
|
+
_warn(`You have uncommitted changes!`);
|
|
36
|
+
if (_git.commitTdi.hasUncommittedFiles()) _warn(`You have uncommitted changes in the TDI submodule!`);
|
|
37
|
+
}
|
|
38
|
+
if ( isBehind ) {
|
|
39
|
+
_error(`You're not deploying from the most recent commit in branch "${_git.commitLocal().branch}"! Update your repository: ${'tct git --update-repo'.cyan}`);
|
|
40
|
+
}
|
|
12
41
|
|
|
13
|
-
|
|
42
|
+
// Ask for continuation of deployment for production environements only; and only if not deploying from a prd branch or when there exist uncommitted changes
|
|
43
|
+
if ( (!isPrdBranch || _git.hasUncommittedFiles()) && deployToProdServer &&
|
|
44
|
+
!( await inquirer.prompt(
|
|
45
|
+
{type: 'confirm', message: 'Are you sure you want to continue?', name: 'confirm', default: false}
|
|
46
|
+
).then( res => res.confirm )
|
|
47
|
+
)
|
|
48
|
+
) {
|
|
49
|
+
return {cancel: true};
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
module.exports = async function deploy (argv) {
|
|
14
54
|
|
|
15
55
|
// common setup
|
|
16
56
|
|
|
17
57
|
c.setServer(argv.server);
|
|
18
58
|
c.prepareForCopy(argv.filter);
|
|
19
|
-
|
|
20
59
|
const {ftpConfig, remotedir} = c.server;
|
|
21
60
|
|
|
61
|
+
// Deployment checks; cancel deployment when checks require
|
|
62
|
+
const deploy = await checkDeployment(ftpConfig);
|
|
63
|
+
if (deploy?.cancel) { return; }
|
|
64
|
+
|
|
22
65
|
if (ftpConfig) {
|
|
23
66
|
const logPath = path.join(remotedir, 'log/deployments.log').toFws;
|
|
24
67
|
const re = new RemoteExec(ftpConfig);
|
|
@@ -59,6 +102,7 @@ module.exports = function deploy (argv) {
|
|
|
59
102
|
}
|
|
60
103
|
|
|
61
104
|
if (argv.watch || argv.live) {
|
|
105
|
+
if (c?.deliveryPack) _error('You cannot use watch with a delivery-pack.');
|
|
62
106
|
_write(`Watching... (press ctrl+c to quit)\n`);
|
|
63
107
|
|
|
64
108
|
const lrServer = argv.live && tinylr();
|
|
@@ -12,8 +12,10 @@ module.exports = {
|
|
|
12
12
|
create (paths, {test, watch} = {}) {
|
|
13
13
|
const startTime = new Date();
|
|
14
14
|
|
|
15
|
-
this.filesToTransfer = watch ? paths : globby.sync(paths, {nodir: true}); // get list of files to transfer
|
|
16
|
-
|
|
15
|
+
this.filesToTransfer = watch ? paths : globby.sync([...paths, ..._filters.fontoSources], {nodir: true}); // get list of files to transfer
|
|
16
|
+
|
|
17
|
+
// get list of potential base files, if not previously done. Globby 6.x does not expand ** to symlinks, so use **/** to include those too
|
|
18
|
+
this.potentialBaseFilesCache ??= globby.sync(['**/**/*.{xml,xsd,xsl,scss}', ..._filters.fontoSources]);
|
|
17
19
|
this.potentialBaseFiles = this.potentialBaseFilesCache
|
|
18
20
|
.filter(p => !this.filesToTransfer.includes(p)) // subtract files to transfer from list of potential base files
|
|
19
21
|
.map(path => [path]); // make array so file contents can be added later
|
|
@@ -10,7 +10,7 @@ const TclConfig = require('../../lib/tcl-config');
|
|
|
10
10
|
const wws = require('../../lib/worker-with-spinner');
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
g_print.setLogFunction(
|
|
13
|
+
g_print.setLogFunction(filepath => _write(filepath.nostyle));
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
const compileSass = () => new Promise((resolve, reject) => {
|
|
@@ -26,14 +26,13 @@ const compileSass = () => new Promise((resolve, reject) => {
|
|
|
26
26
|
;
|
|
27
27
|
});
|
|
28
28
|
|
|
29
|
-
const cmdExec = command => new Promise(
|
|
29
|
+
const cmdExec = command => new Promise(resolve => {
|
|
30
30
|
_info(`Executing command (${command}):`);
|
|
31
31
|
const cp = exec(command);
|
|
32
32
|
const log = msg => {
|
|
33
33
|
const line = msg.replace(/\s+$/, ''); // remove excessive whitespace
|
|
34
34
|
if (line) console.log(line);
|
|
35
35
|
};
|
|
36
|
-
|
|
37
36
|
cp.stdout.setEncoding('utf8');
|
|
38
37
|
cp.stdout.on('data', data => {
|
|
39
38
|
if (/Error|Input error/.test(data)) _warn(data);
|
|
@@ -48,13 +47,19 @@ const cmdExec = command => new Promise((resolve) => {
|
|
|
48
47
|
});
|
|
49
48
|
|
|
50
49
|
|
|
50
|
+
|
|
51
51
|
module.exports = {
|
|
52
52
|
|
|
53
53
|
init ([fdt, fontoVersion]) {
|
|
54
54
|
return new Promise((resolve, reject) => {
|
|
55
55
|
_info('Ensure symlink exists:');
|
|
56
|
-
|
|
57
|
-
|
|
56
|
+
const tdiCodebasePath =
|
|
57
|
+
_settingsTdi().codebase === 'dual' && fs.existsSync('../project.tcl') ? '/tcl' :
|
|
58
|
+
_settingsTdi().codebase === 'dual' ? '/traditional' :
|
|
59
|
+
''
|
|
60
|
+
;
|
|
61
|
+
fs.symlink(path.join(`../../../tdi${tdiCodebasePath}/fonto/packages-shared`), 'packages-shared', 'dir', err => {
|
|
62
|
+
if (err && err.code!=='EEXIST') reject(err.code==='EPERM' ? 'Start your console as admin for symlink creation!' : err);
|
|
58
63
|
else _write('Done.\n');
|
|
59
64
|
resolve();
|
|
60
65
|
});
|
|
@@ -180,7 +185,7 @@ module.exports = {
|
|
|
180
185
|
Object.entries(rootSchemas).forEach(([, obj]) => {
|
|
181
186
|
const data = execSync(`${fdt} attributes --schema packages/${obj.packageName}/src/assets/schemas/${obj.packageName}.json`, {encoding: 'UTF-8'});
|
|
182
187
|
const attributes = data.replace(/(^.*?Default value\s+)|(\s+Printed name\*.*$)/gs, '').split(/\n\s+/).map(a => a.split(/\s+/)).map(a =>
|
|
183
|
-
a[0] + (a[2]
|
|
188
|
+
a[0] + (a[2]==='required' ? ' (required)' : '') + (a[3]==='-' ? ' (no default)' : '')
|
|
184
189
|
);
|
|
185
190
|
const customAttributes = [...new Set(attributes)].filter(e => e && !ignoreAttributes.includes(e.replace(/ .*/, '')));
|
|
186
191
|
|
|
@@ -29,7 +29,7 @@ module.exports = function fonto (argv) {
|
|
|
29
29
|
process.chdir(_paths.apply || '.');
|
|
30
30
|
|
|
31
31
|
// find fonto instances by searching for fonto/manifest.json files
|
|
32
|
-
const fontoPaths = globby.sync(['**/fonto/manifest.json', 'manifest.json', `!${_paths.tdi}
|
|
32
|
+
const fontoPaths = globby.sync(['**/fonto/manifest.json', 'manifest.json', `!${_paths.tdi}/**`, ..._filters.fontoSources])
|
|
33
33
|
.map(p => ([p.replace('manifest.json', ''), fs.readJsonSync(p).sdkVersion.replace(/Nightlies.*/, 'nightly')]))
|
|
34
34
|
;
|
|
35
35
|
|
package/src/modules/git/index.js
CHANGED
|
@@ -169,9 +169,15 @@ function updateRepo () {
|
|
|
169
169
|
* @param {string[]} options.dates - An array of two strings representing the start and end dates for the custom date range.
|
|
170
170
|
* @param {string} options.updateSubmodule - The target branch name for the submodule update.
|
|
171
171
|
*/
|
|
172
|
-
function updateSubmodule({dates = [], updateSubmodule:
|
|
172
|
+
function updateSubmodule({dates = [], updateSubmodule: releaseBranchName}) {
|
|
173
173
|
const [dateCustomFrom, dateCustomTo] = dates;
|
|
174
|
-
const
|
|
174
|
+
const toBranchName = typeof releaseBranchName !== 'boolean' ? releaseBranchName : null;
|
|
175
|
+
|
|
176
|
+
if (dateCustomFrom) _warn(`While testing the "TDI commits requiring migration" note that the submodule is not changed.`);
|
|
177
|
+
if (dateCustomFrom && !toBranchName) _error(`Specify a release branch for testing the migration, e.g. "--us 5.6".`);
|
|
178
|
+
|
|
179
|
+
// For testing the migrations (dates argument is specified); submodule pull, fetch, updates fail or result in switching to the release-branch; while we want to test the migration in the current submodule branch / symlink to TDI-project. Therefore all submodule actions should not be applied when the dates argument is specified. As the release of the current submodule cannot be determined you also need to specify the toBranchName so you can test the migrations you want.
|
|
180
|
+
const branch = !dateCustomFrom ? getTdiBranch(toBranchName) : {'name': `release/${toBranchName}`};
|
|
175
181
|
const branchUpgrade = branch.from ? branch.from.name < branch.name : false;
|
|
176
182
|
|
|
177
183
|
if (branchUpgrade) _info(`Current branch '${branch.from.name}' will be updated to '${branch.name}'\nCommon ancestor will be used in selecting TDI commits requiring migration: ${_formatDate(branch.commonAncestor.date)}`);
|
|
@@ -184,10 +190,12 @@ function updateSubmodule({dates = [], updateSubmodule: toBranchName}) {
|
|
|
184
190
|
if (branch.commitsBehind > 0 || dateCustomFrom) {
|
|
185
191
|
const dateBeforeUpdate = _git.commitTdi.local().date;
|
|
186
192
|
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
193
|
+
if (!dateCustomFrom) {
|
|
194
|
+
// update submodule
|
|
195
|
+
const updateSubmoduleMsg = execGitCommand(`submodule update --remote`, _paths.repo);
|
|
196
|
+
if (updateSubmoduleMsg.error) _error(`Update submodule failed\n${updateSubmoduleMsg.error}`);
|
|
197
|
+
_info(`TDI submodule updated:\n${updateSubmoduleMsg}\n`);
|
|
198
|
+
}
|
|
191
199
|
|
|
192
200
|
// tdiMigrationFilePath should exist in latest commits of releases 5.3+; As we updated to the latest version this should work
|
|
193
201
|
const migrations = _modulesTdi.require('git/tdiCommitsRequiringMigration.js');
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
const globby = require('globby');
|
|
2
2
|
const inquirer = require('inquirer');
|
|
3
3
|
const path = require('path');
|
|
4
|
+
const TclConfig = require('../../lib/tcl-config');
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
module.exports = function migrate (argv) {
|
|
@@ -76,8 +77,9 @@ module.exports = function migrate (argv) {
|
|
|
76
77
|
_write();
|
|
77
78
|
const startTime = new Date();
|
|
78
79
|
|
|
80
|
+
const projectsWithType = TclConfig.findProjects();
|
|
79
81
|
const scriptPath = path.join('migrate', a.script);
|
|
80
|
-
_modulesTdi.require(scriptPath).forEach(step => require('./steps')(step, argv.dry, filter));
|
|
82
|
+
_modulesTdi.require(scriptPath).forEach(step => require('./steps')(step, argv.dry, filter, projectsWithType));
|
|
81
83
|
|
|
82
84
|
_perf(startTime);
|
|
83
85
|
});
|
|
@@ -5,26 +5,37 @@ const path = require('path');
|
|
|
5
5
|
const rif = require('replace-in-file');
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
8
|
+
class PathSearcher {
|
|
9
|
+
#filter;
|
|
10
|
+
#ignorePaths = [_paths.tdi + '/**', 'config/cmscustom/tdi/**'];
|
|
11
|
+
constructor (filter, ignoreProjectPaths) {
|
|
12
|
+
this.#filter = filter;
|
|
13
|
+
this.#ignorePaths.push(...ignoreProjectPaths);
|
|
14
|
+
}
|
|
15
|
+
#doFilter (paths) {
|
|
16
|
+
return this.#filter ? paths.filter(p => minimatch(p, this.#filter)) : paths;
|
|
17
|
+
}
|
|
18
|
+
get(globPath) {
|
|
19
|
+
return this.#doFilter(globby.sync(globPath, {dot: true, ignore: this.#ignorePaths}));
|
|
20
|
+
}
|
|
21
|
+
}
|
|
14
22
|
|
|
15
|
-
module.exports = function steps (step, dry, filter) {
|
|
16
23
|
|
|
17
|
-
|
|
18
|
-
const setFilter = applyFilter && filter;
|
|
24
|
+
module.exports = function steps (step, dry, passedFilter, projectsWithType) {
|
|
19
25
|
|
|
20
26
|
if (Array.isArray(step)) {
|
|
21
|
-
step.forEach(substep =>
|
|
27
|
+
step.forEach(substep => steps(substep, dry, passedFilter, projectsWithType));
|
|
28
|
+
return;
|
|
22
29
|
}
|
|
23
30
|
|
|
31
|
+
const {projectType = 'all', applyFilter = true, deletePaths, renamePaths, replaceInFiles, customAction} = step;
|
|
32
|
+
const projectsToIgnore = projectType === 'all' ? [] : projectsWithType.filter(p => p.tcl !== (projectType === 'tcl'));
|
|
33
|
+
const pathSearcher = new PathSearcher(applyFilter && passedFilter, projectsToIgnore.map(p => p.path + '/**'));
|
|
34
|
+
|
|
24
35
|
if (deletePaths) {
|
|
25
36
|
deletePaths.forEach(curpath => {
|
|
26
37
|
_info(`Deleting paths: ${curpath}`);
|
|
27
|
-
const paths =
|
|
38
|
+
const paths = pathSearcher.get(curpath);
|
|
28
39
|
if (!dry) paths.forEach(p => fs.removeSync(p));
|
|
29
40
|
_write('Paths deleted:', paths.length);
|
|
30
41
|
});
|
|
@@ -33,7 +44,7 @@ module.exports = function steps (step, dry, filter) {
|
|
|
33
44
|
if (renamePaths) {
|
|
34
45
|
renamePaths.forEach(([curpath, change]) => {
|
|
35
46
|
_info(`Renaming paths: ${curpath}`);
|
|
36
|
-
const paths =
|
|
47
|
+
const paths = pathSearcher.get(curpath);
|
|
37
48
|
if (!dry) paths.forEach(p => fs.moveSync(p, path.join(p, change), {overwrite: true}));
|
|
38
49
|
_write('Paths renamed:', paths.length);
|
|
39
50
|
});
|
|
@@ -43,7 +54,7 @@ module.exports = function steps (step, dry, filter) {
|
|
|
43
54
|
replaceInFiles.forEach(r => {
|
|
44
55
|
_info(`Executing replacements in files: ${r.files}`);
|
|
45
56
|
|
|
46
|
-
r.files = r.files.map(curpath =>
|
|
57
|
+
r.files = r.files.map(curpath => pathSearcher.get(curpath)).flat();
|
|
47
58
|
r.dry = dry;
|
|
48
59
|
|
|
49
60
|
if (r.fromtoPairs) {
|
|
@@ -51,12 +62,11 @@ module.exports = function steps (step, dry, filter) {
|
|
|
51
62
|
r.to = r.fromtoPairs.map(p => p[1]);
|
|
52
63
|
}
|
|
53
64
|
|
|
54
|
-
let filesModCount
|
|
55
|
-
let
|
|
56
|
-
for (let i=0; i<maxRepeat && r.files[0]; i++) { // execute repeatedly for modified files only (with safety limit of 20)
|
|
65
|
+
let filesModCount;
|
|
66
|
+
for (let i=15/* safety limit*/; i>0 && r.files[0]; i--) { // execute repeatedly for modified files only
|
|
57
67
|
r.files = rif.sync(r).filter(f => f.hasChanged).map(f => f.file);
|
|
58
|
-
|
|
59
|
-
if (i===
|
|
68
|
+
filesModCount ??= r.files.length; // save count only after first run (after this only subsets are processed)
|
|
69
|
+
if (i===1 && r.files[0]) _warn(`Repeated replacement stopped by safety limit - check file changes for too many content occurrences`);
|
|
60
70
|
if (dry) break;
|
|
61
71
|
}
|
|
62
72
|
|
|
@@ -66,7 +76,7 @@ module.exports = function steps (step, dry, filter) {
|
|
|
66
76
|
|
|
67
77
|
if (!dry && customAction) {
|
|
68
78
|
customAction.forEach(([dirpath, actionFn]) => {
|
|
69
|
-
|
|
79
|
+
pathSearcher.get(dirpath).forEach(p => actionFn(p));
|
|
70
80
|
});
|
|
71
81
|
}
|
|
72
82
|
|