@tangelo/tangelo-configuration-toolkit 1.21.0 → 1.23.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -83,7 +83,7 @@ ENDLOCAL
83
83
  EXIT /b %errorlevel%
84
84
  :find_dp0
85
85
  SET dp0=%~dp0
86
- EXIT
86
+ EXIT /b
87
87
  ```
88
88
 
89
89
  Then:
package/index.js CHANGED
@@ -83,8 +83,10 @@ global._git = {
83
83
  {commitHash: '77b8ea9', regex: /^7\.14\./}, // 7.14.x
84
84
  {commitHash: '8066c44', regex: /^7\.13\./}, // 7.13.x
85
85
  {commitHash: 'a2b2d4e', regex: /^7\.12\./} // 7.12.x
86
- ]
87
- }
86
+ ],
87
+ hasUncommittedFiles: memoize(() => execGitCommand(`status --short`, path.join(_paths.repo, _paths.tdi)).length > 0),
88
+ },
89
+ hasUncommittedFiles: memoize(() => execGitCommand(`status --short`, _paths.repo).length > 0),
88
90
  };
89
91
 
90
92
  global._tdiSubmoduleExists = () => fs.existsSync(path.join(_paths.repo, _paths.tdi));
@@ -122,6 +124,9 @@ global._modulesTdi = {
122
124
 
123
125
  global._settingsTdi = memoize(() => Object.assign(_package.settingsTdiDefault, _modulesTdi.require('settings.json', {throws: false, message: false}) ?? {}));
124
126
 
127
+ global._filters = {
128
+ fontoSources: ['!**/node_modules/**', '!**/fonto/packages/**', '!**/fonto/packages_shared/**', '!**/fonto/platform/**']
129
+ };
125
130
 
126
131
  process.on('beforeExit', () => {
127
132
  _write(); // print empty line before and after update check
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tangelo/tangelo-configuration-toolkit",
3
- "version": "1.21.0",
3
+ "version": "1.23.0",
4
4
  "engines": {
5
5
  "node": ">=14.0.0"
6
6
  },
@@ -36,7 +36,7 @@
36
36
  "gulp-filter": "^7.0.0",
37
37
  "gulp-plumber": "^1.2.1",
38
38
  "gulp-print": "^5.0.2",
39
- "gulp-sass": "^5.0.0",
39
+ "gulp-sass": "^6.0.0",
40
40
  "gulp-sourcemaps": "^3.0.0",
41
41
  "inquirer": "^8.2.0",
42
42
  "istextorbinary": "^6.0.0",
@@ -45,7 +45,7 @@
45
45
  "object-assign-deep": "^0.4.0",
46
46
  "p-limit": "^3.1.0",
47
47
  "replace-in-file": "^6.3.2",
48
- "sass": "^1.43.5",
48
+ "sass": "^1.79.0",
49
49
  "saxon-js": "^2.5.0",
50
50
  "ssh2-sftp-client": "^9.1.0",
51
51
  "through2": "^4.0.2",
package/src/cli.js CHANGED
@@ -34,7 +34,8 @@ module.exports = function cli () {
34
34
  init: {alias: 'i', desc: 'Create repository content structure', conflicts: ['p', 's', 'c']},
35
35
  project: {alias: 'p', desc: 'Create project configuration', conflicts: ['i', 's', 'c']},
36
36
  symlinks: {alias: 's', desc: 'Recreate symlinks to TDI', conflicts: ['p', 'i', 'c']},
37
- oxygen: {alias: 'x', desc: 'Create or update oXygen project file (.xpr)', conflicts: ['p', 'i', 'c']}
37
+ oxygen: {alias: 'x', desc: 'Create or update oXygen project file (.xpr)', conflicts: ['p', 'i', 'c']},
38
+ file: {alias: 'f', desc: '[hidden] Use preset answers from appconfig', requiresArg: true, hidden: !_devmode}
38
39
  },
39
40
  handler: require('./modules/build')
40
41
  })
@@ -7,8 +7,8 @@ module.exports = function getRepoconfig(repoPath) {
7
7
  return globby
8
8
  .sync('database/config/**/txd_document_types.sql', {cwd: repoPath, absolute: true})
9
9
  .map(p => {
10
- const dtSqlInsert = fs.readFileSync(p).toString().match(/select(.*?)from/s)[1];
11
- const ntSqlInsert = fs.readFileSync(p.replace('txd_document_types', 'txd_node_types')).toString().match(/select(.*?)from/s)[1];
10
+ const dtSqlInsert = fs.readFileSync(p).toString().match(/select(.*?)from(?=\s+dual)/s)[1];
11
+ const ntSqlInsert = fs.readFileSync(p.replace('txd_document_types', 'txd_node_types')).toString().match(/select(.*?)from(?=\s+dual)/s)[1];
12
12
 
13
13
  const dtDisplayName = dtSqlInsert.match(/'([^']+)' display_name/)[1];
14
14
  const ntName = ntSqlInsert.match(/'([^']+)' name/)[1];
@@ -1,4 +1,5 @@
1
1
  const fs = require('fs-extra');
2
+ const globby = require('globby');
2
3
  const path = require('path');
3
4
  const sass = require('sass');
4
5
  const SaxonJS = require('saxon-js');
@@ -53,6 +54,10 @@ module.exports = class TclConfig {
53
54
  }
54
55
  }
55
56
 
57
+ static findProjects() {
58
+ return globby.sync('config/cmscustom/!(tdi)/!(fonts)').map(p => ({path: p, tcl: fs.existsSync(path.join(p, this.#TCL_FILE_NAME))}));
59
+ }
60
+
56
61
  #outputFn;
57
62
  #doModifyFontoHtml;
58
63
  #tclFilePathRel;
@@ -4,11 +4,12 @@ const inquirer = require('inquirer');
4
4
  const oxygen = require('./oxygen');
5
5
  const path = require('path');
6
6
 
7
+
7
8
  const createSymlink = (t, p) => {
8
9
  const up = p.replace(/[^/]+/g, '..').replace(/^..\//, '');
9
10
  fs.symlink(path.join(up, _paths.tdi, t), p, 'dir', err => {
10
11
  if (err) {
11
- if (err.code=='EEXIST') _write(`Symlink already exists: ${p}`);
12
+ if (err.code==='EEXIST') _write(`Symlink already exists: ${p}`);
12
13
  else _error((err.code!=='EPERM' ? err : `Can't create symlinks, restart your console as admin, or fix it permanently:\n 1. Open 'Local Security Policy' (secpol.msc)\n 2. Go to 'Security Settings > Local Policies > User Rights Assignment > Create symbolic links'\n 3. Add your own account\n 4. Restart Windows`));
13
14
  }
14
15
  else _write(`Symlink created: ${p}`);
@@ -22,7 +23,12 @@ const createSymlinks = () => {
22
23
  globby
23
24
  .sync('config/cmscustom/!(tdi)/**/fonto')
24
25
  .forEach(p => {
25
- createSymlink(src + '/config/cmscustom/tdi/fonto/packages-shared', p + '/packages-shared');
26
+ const tdiCodebasePath =
27
+ _settingsTdi().codebase === 'dual' && fs.existsSync(path.join(p, '../project.tcl')) ? '/tcl' :
28
+ _settingsTdi().codebase === 'dual' ? '/traditional' :
29
+ ''
30
+ ;
31
+ createSymlink(src + `/config/cmscustom/tdi${tdiCodebasePath}/fonto/packages-shared`, p + '/packages-shared');
26
32
  });
27
33
  };
28
34
 
@@ -31,13 +37,13 @@ module.exports = function build (argv) {
31
37
 
32
38
  if ((argv.init || argv.project) && _isPre51()) _error('This option only works when using branch release/5.1 and up.');
33
39
 
34
- const predefinedAnswers = (argv.init === 'file' || argv.project === 'file') ? _appconfig.build : undefined;
40
+ const predefinedAnswers = argv.file ? _appconfig.build?.[argv.file] : undefined;
35
41
  // if not chosen to use predefined values by passing "file", erase the "build" property for old TDI commits
36
42
  _appconfig.build = predefinedAnswers;
37
43
 
38
44
  if (argv.init) _modulesTdi.require('build/init')(createSymlinks, predefinedAnswers);
39
45
 
40
- if (argv.project === 'file') _modulesTdi.require('build/project').projectNew(createSymlinks, _repoconfig[0], predefinedAnswers);
46
+ if (argv.project && argv.file) _modulesTdi.require('build/project').projectNew(createSymlinks, _repoconfig[0], predefinedAnswers);
41
47
  else if (argv.project) {
42
48
  inquirer
43
49
  .prompt([{
@@ -51,8 +51,8 @@ function setServer (searchString) {
51
51
  if (!(port && parallel && username)) _error('Config is incomplete!');
52
52
  this.server.ftpConfig = {
53
53
  host, port, parallel, username,
54
- agent: process.platform === 'win32' ? 'pageant' : process.env.SSH_AUTH_SOCK,
55
- agentForward: process.platform !== 'win32',
54
+ agent: process.env.SSH_AUTH_SOCK ?? process.platform === 'win32' ? '\\\\.\\pipe\\openssh-ssh-agent' : undefined,
55
+ agentForward: true,
56
56
  readyTimeout: 15000,
57
57
  retries: 1
58
58
  };
@@ -129,9 +129,6 @@ function prepareForCopy (filter) {
129
129
  _info(`Target: ${this.getRemotePath(transferPattern)}`);
130
130
  _write();
131
131
 
132
- if (_git.commitLocal().date < _git.commitRemote().date) {
133
- _warn(`You're not deploying from the most recent git commit!\n`);
134
- }
135
132
  }
136
133
 
137
134
 
@@ -8,24 +8,68 @@ const tinylr = require('tiny-lr');
8
8
  const transfer = require('./execute');
9
9
  const c = require('./config');
10
10
  const s = require('./srcset');
11
+ const inquirer = require('inquirer');
12
+
13
+ const checkDeployment = async ftpConfig => {
14
+ const deployToDevServer = /\.dev\./.test( ftpConfig?.host );
15
+ const deployToTestServer = /\.t(e)?st\./.test( ftpConfig?.host );
16
+ const deployToProdServer = c?.deliveryPack || !( deployToDevServer || deployToTestServer );
17
+ const isBehind = _git.commitLocal().date < _git.commitRemote().date;
18
+ // In most cases productionBranches are called 'master' or 'main'; Some customers use a development, test and production branches structure; hence we allow 'producti(on|e)'
19
+ const isPrdBranch = ( /master|main|producti(on|e)/.test( _git.commitLocal().branch ) );
20
+
21
+ if (!deployToTestServer && !deployToProdServer) {
22
+ // dev-environements
23
+ if (isBehind) {
24
+ /* Warn for dev deployements that are behind */
25
+ _warn(`You're not deploying from the most recent commit in branch "${_git.commitLocal().branch}"! Update your repository: ${'tct git --update-repo'.cyan}`);
26
+ }
27
+ return;
28
+ }
29
+
30
+ // Checks for test / prd environments:
31
+ if ( !isPrdBranch ) {
32
+ _warn(`You want to deploy from branch "${_git.commitLocal().branch}" to a production server. It is strongly advised to deploy from the master branch.`);
33
+ }
34
+ if (_git.hasUncommittedFiles()) {
35
+ _warn(`You have uncommitted changes!`);
36
+ if (_git.commitTdi.hasUncommittedFiles()) _warn(`You have uncommitted changes in the TDI submodule!`);
37
+ }
38
+ if ( isBehind ) {
39
+ _error(`You're not deploying from the most recent commit in branch "${_git.commitLocal().branch}"! Update your repository: ${'tct git --update-repo'.cyan}`);
40
+ }
11
41
 
42
+ // Ask for continuation of deployment for production environments only; and only if not deploying from a prd branch or when there exist uncommitted changes
43
+ if ( (!isPrdBranch || _git.hasUncommittedFiles()) && deployToProdServer &&
44
+ !( await inquirer.prompt(
45
+ {type: 'confirm', message: 'Are you sure you want to continue?', name: 'confirm', default: false}
46
+ ).then( res => res.confirm )
47
+ )
48
+ ) {
49
+ return {cancel: true};
50
+ }
51
+ };
12
52
 
13
- module.exports = function deploy (argv) {
53
+ module.exports = async function deploy (argv) {
14
54
 
15
55
  // common setup
16
56
 
17
57
  c.setServer(argv.server);
18
58
  c.prepareForCopy(argv.filter);
19
-
20
59
  const {ftpConfig, remotedir} = c.server;
21
60
 
61
+ if (!argv.test && ftpConfig) { // only perform deployment checks when actually transferring to remote server
62
+ const deploy = await checkDeployment(ftpConfig);
63
+ if (deploy?.cancel) return;
64
+ }
65
+
22
66
  if (ftpConfig) {
23
67
  const logPath = path.join(remotedir, 'log/deployments.log').toFws;
24
68
  const re = new RemoteExec(ftpConfig);
25
69
 
26
70
  ftpConfig.eventPut = file => {
27
71
  _write(file.destination.replace(remotedir, ''));
28
- if (path.extname(file.destination)=='.sh')
72
+ if (path.extname(file.destination)==='.sh')
29
73
  re.add('chmod 755 '+file.destination, 'Permissions set: '+file.destination);
30
74
  else if (file.destination.match(/cmscustom.*hce.*config.xml/))
31
75
  re.add('touch '+c.getRemotePath('hce/hce-config.xml'), 'Touched: '+c.getRemotePath('hce/hce-config.xml'));
@@ -59,6 +103,7 @@ module.exports = function deploy (argv) {
59
103
  }
60
104
 
61
105
  if (argv.watch || argv.live) {
106
+ if (c?.deliveryPack) _error('You cannot use watch with a delivery-pack.');
62
107
  _write(`Watching... (press ctrl+c to quit)\n`);
63
108
 
64
109
  const lrServer = argv.live && tinylr();
@@ -90,7 +135,7 @@ module.exports = function deploy (argv) {
90
135
  gulp.watch(c.transferPatterns)
91
136
  .on('all', (event, filepath) => {
92
137
 
93
- if ((event=='add' || event=='change') && (
138
+ if ((event==='add' || event==='change') && (
94
139
  // within fonto, transfer build files only, but also schema files, because
95
140
  // the "dist" folder isn't watched properly: it does not detect "assets" anymore after building once
96
141
  !/cmscustom.+fonto[\\/]/.test(filepath) ||
@@ -102,7 +147,7 @@ module.exports = function deploy (argv) {
102
147
  transfers.add(filepath);
103
148
  }
104
149
 
105
- else if (event=='unlink' && !/fonto/.test(filepath)) { // ignore fonto files
150
+ else if (event==='unlink' && !/fonto/.test(filepath)) { // ignore fonto files
106
151
  s.removeFromCache(filepath);
107
152
 
108
153
  if (!path.parse(filepath).base.match(/\.scss/)) {
@@ -127,14 +172,14 @@ function deployLogInfo (watch, filter, files, action) {
127
172
 
128
173
  let logline = `${timestamp} ${action.padEnd(5)} [${user}] [${branch}:${hash.substring(0, 7)}${uncommittedChanges}] [${filter}]`;
129
174
 
130
- if (action == 'START') {
175
+ if (action === 'START') {
131
176
  if (uncommittedChanges) {
132
177
  const uncommittedChanges = _git.status().replace(/\?\?/g, ' U').split('\n');
133
178
  logline += '\n Uncommitted changes:\n ' + uncommittedChanges.join('\n ');
134
179
  }
135
180
  const filepaths = files.map(({destination}) => destination);
136
181
  logline += `\n Transferring ${filepaths.length} file${filepaths.length > 1 ? 's' : ''}`;
137
- if (watch || filepaths.length == 1) logline += ':\n ' + filepaths.join('\n ');
182
+ if (watch || filepaths.length === 1) logline += ':\n ' + filepaths.join('\n ');
138
183
  else logline += ' in dir:\n ' + getCommonPath(filepaths);
139
184
  }
140
185
  return logline.replace(/"/g, '\\"');
@@ -12,8 +12,10 @@ module.exports = {
12
12
  create (paths, {test, watch} = {}) {
13
13
  const startTime = new Date();
14
14
 
15
- this.filesToTransfer = watch ? paths : globby.sync(paths, {nodir: true}); // get list of files to transfer
16
- this.potentialBaseFilesCache ??= globby.sync('**/*.{xml,xsd,xsl,scss}'); // get list of potential base files, if not previously done
15
+ this.filesToTransfer = watch ? paths : globby.sync([...paths, ..._filters.fontoSources], {nodir: true}); // get list of files to transfer
16
+
17
+ // get list of potential base files, if not previously done. Globby 6.x does not expand ** to symlinks, so use **/** to include those too
18
+ this.potentialBaseFilesCache ??= globby.sync(['**/**/*.{xml,xsd,xsl,scss}', ..._filters.fontoSources]);
17
19
  this.potentialBaseFiles = this.potentialBaseFilesCache
18
20
  .filter(p => !this.filesToTransfer.includes(p)) // subtract files to transfer from list of potential base files
19
21
  .map(path => [path]); // make array so file contents can be added later
@@ -10,7 +10,7 @@ const TclConfig = require('../../lib/tcl-config');
10
10
  const wws = require('../../lib/worker-with-spinner');
11
11
 
12
12
 
13
- g_print.setLogFunction((filepath) => _write(filepath.nostyle));
13
+ g_print.setLogFunction(filepath => _write(filepath.nostyle));
14
14
 
15
15
 
16
16
  const compileSass = () => new Promise((resolve, reject) => {
@@ -26,14 +26,13 @@ const compileSass = () => new Promise((resolve, reject) => {
26
26
  ;
27
27
  });
28
28
 
29
- const cmdExec = command => new Promise((resolve) => {
29
+ const cmdExec = command => new Promise(resolve => {
30
30
  _info(`Executing command (${command}):`);
31
31
  const cp = exec(command);
32
32
  const log = msg => {
33
33
  const line = msg.replace(/\s+$/, ''); // remove excessive whitespace
34
34
  if (line) console.log(line);
35
35
  };
36
-
37
36
  cp.stdout.setEncoding('utf8');
38
37
  cp.stdout.on('data', data => {
39
38
  if (/Error|Input error/.test(data)) _warn(data);
@@ -48,13 +47,19 @@ const cmdExec = command => new Promise((resolve) => {
48
47
  });
49
48
 
50
49
 
50
+
51
51
  module.exports = {
52
52
 
53
53
  init ([fdt, fontoVersion]) {
54
54
  return new Promise((resolve, reject) => {
55
55
  _info('Ensure symlink exists:');
56
- fs.symlink(path.join('../../../tdi/fonto/packages-shared'), 'packages-shared', 'dir', err => {
57
- if (err && err.code!='EEXIST') reject(err.code=='EPERM' ? 'Start your console as admin for symlink creation!' : err);
56
+ const tdiCodebasePath =
57
+ _settingsTdi().codebase === 'dual' && fs.existsSync('../project.tcl') ? '/tcl' :
58
+ _settingsTdi().codebase === 'dual' ? '/traditional' :
59
+ ''
60
+ ;
61
+ fs.symlink(path.join(`../../../tdi${tdiCodebasePath}/fonto/packages-shared`), 'packages-shared', 'dir', err => {
62
+ if (err && err.code!=='EEXIST') reject(err.code==='EPERM' ? 'Start your console as admin for symlink creation!' : err);
58
63
  else _write('Done.\n');
59
64
  resolve();
60
65
  });
@@ -180,7 +185,7 @@ module.exports = {
180
185
  Object.entries(rootSchemas).forEach(([, obj]) => {
181
186
  const data = execSync(`${fdt} attributes --schema packages/${obj.packageName}/src/assets/schemas/${obj.packageName}.json`, {encoding: 'UTF-8'});
182
187
  const attributes = data.replace(/(^.*?Default value\s+)|(\s+Printed name\*.*$)/gs, '').split(/\n\s+/).map(a => a.split(/\s+/)).map(a =>
183
- a[0] + (a[2]=='required' ? ' (required)' : '') + (a[3]=='-' ? ' (no default)' : '')
188
+ a[0] + (a[2]==='required' ? ' (required)' : '') + (a[3]==='-' ? ' (no default)' : '')
184
189
  );
185
190
  const customAttributes = [...new Set(attributes)].filter(e => e && !ignoreAttributes.includes(e.replace(/ .*/, '')));
186
191
 
@@ -29,7 +29,7 @@ module.exports = function fonto (argv) {
29
29
  process.chdir(_paths.apply || '.');
30
30
 
31
31
  // find fonto instances by searching for fonto/manifest.json files
32
- const fontoPaths = globby.sync(['**/fonto/manifest.json', 'manifest.json', `!${_paths.tdi}/**`])
32
+ const fontoPaths = globby.sync(['**/fonto/manifest.json', 'manifest.json', `!${_paths.tdi}/**`, ..._filters.fontoSources])
33
33
  .map(p => ([p.replace('manifest.json', ''), fs.readJsonSync(p).sdkVersion.replace(/Nightlies.*/, 'nightly')]))
34
34
  ;
35
35
 
@@ -169,9 +169,15 @@ function updateRepo () {
169
169
  * @param {string[]} options.dates - An array of two strings representing the start and end dates for the custom date range.
170
170
  * @param {string} options.updateSubmodule - The target branch name for the submodule update.
171
171
  */
172
- function updateSubmodule({dates = [], updateSubmodule: toBranchName}) {
172
+ function updateSubmodule({dates = [], updateSubmodule: releaseBranchName}) {
173
173
  const [dateCustomFrom, dateCustomTo] = dates;
174
- const branch = getTdiBranch(typeof toBranchName !== 'boolean' && toBranchName);
174
+ const toBranchName = typeof releaseBranchName !== 'boolean' ? releaseBranchName : null;
175
+
176
+ if (dateCustomFrom) _warn(`While testing the "TDI commits requiring migration" note that the submodule is not changed.`);
177
+ if (dateCustomFrom && !toBranchName) _error(`Specify a release branch for testing the migration, e.g. "--us 5.6".`);
178
+
179
+ // For testing the migrations (dates argument is specified); submodule pull, fetch, updates fail or result in switching to the release-branch; while we want to test the migration in the current submodule branch / symlink to TDI-project. Therefore all submodule actions should not be applied when the dates argument is specified. As the release of the current submodule cannot be determined you also need to specify the toBranchName so you can test the migrations you want.
180
+ const branch = !dateCustomFrom ? getTdiBranch(toBranchName) : {'name': `release/${toBranchName}`};
175
181
  const branchUpgrade = branch.from ? branch.from.name < branch.name : false;
176
182
 
177
183
  if (branchUpgrade) _info(`Current branch '${branch.from.name}' will be updated to '${branch.name}'\nCommon ancestor will be used in selecting TDI commits requiring migration: ${_formatDate(branch.commonAncestor.date)}`);
@@ -184,10 +190,13 @@ function updateSubmodule({dates = [], updateSubmodule: toBranchName}) {
184
190
  if (branch.commitsBehind > 0 || dateCustomFrom) {
185
191
  const dateBeforeUpdate = _git.commitTdi.local().date;
186
192
 
187
- // update submodule
188
- const updateSubmoduleMsg = execGitCommand(`submodule update --remote`, _paths.repo);
189
- if (updateSubmoduleMsg.error) _error(`Update submodule failed\n${updateSubmoduleMsg.error}`);
190
- if (updateSubmoduleMsg) _info(`TDI submodule updated:\n${updateSubmoduleMsg}\n`);
193
+ if (!dateCustomFrom) {
194
+ // update submodule
195
+ const updateSubmoduleMsg = execGitCommand(`submodule update --remote`, _paths.repo);
196
+ if (updateSubmoduleMsg.error) _error(`Update submodule failed\n${updateSubmoduleMsg.error}`);
197
+ execGitCommand(`add tangelo-default-implementation`, _paths.repo);
198
+ _info(`TDI submodule updated:\n${updateSubmoduleMsg}\n`);
199
+ }
191
200
 
192
201
  // tdiMigrationFilePath should exist in latest commits of releases 5.3+; As we updated to the latest version this should work
193
202
  const migrations = _modulesTdi.require('git/tdiCommitsRequiringMigration.js');
@@ -1,6 +1,7 @@
1
1
  const globby = require('globby');
2
2
  const inquirer = require('inquirer');
3
3
  const path = require('path');
4
+ const TclConfig = require('../../lib/tcl-config');
4
5
 
5
6
 
6
7
  module.exports = function migrate (argv) {
@@ -76,8 +77,9 @@ module.exports = function migrate (argv) {
76
77
  _write();
77
78
  const startTime = new Date();
78
79
 
80
+ const projectsWithType = TclConfig.findProjects();
79
81
  const scriptPath = path.join('migrate', a.script);
80
- _modulesTdi.require(scriptPath).forEach(step => require('./steps')(step, argv.dry, filter));
82
+ _modulesTdi.require(scriptPath).forEach(step => require('./steps')(step, argv.dry, filter, projectsWithType));
81
83
 
82
84
  _perf(startTime);
83
85
  });
@@ -5,26 +5,37 @@ const path = require('path');
5
5
  const rif = require('replace-in-file');
6
6
 
7
7
 
8
- const getPaths = (search, filter) =>
9
- globby
10
- .sync(search, {dot: true, ignore: [_paths.tdi + '/**', '**/cmscustom/tdi/**']})
11
- .filter(p => !filter || minimatch(p, filter)
12
- );
13
-
8
+ class PathSearcher {
9
+ #filter;
10
+ #ignorePaths = [_paths.tdi + '/**', 'config/cmscustom/tdi/**'];
11
+ constructor (filter, ignoreProjectPaths) {
12
+ this.#filter = filter;
13
+ this.#ignorePaths.push(...ignoreProjectPaths);
14
+ }
15
+ #doFilter (paths) {
16
+ return this.#filter ? paths.filter(p => minimatch(p, this.#filter)) : paths;
17
+ }
18
+ get(globPath) {
19
+ return this.#doFilter(globby.sync(globPath, {dot: true, ignore: this.#ignorePaths}));
20
+ }
21
+ }
14
22
 
15
- module.exports = function steps (step, dry, filter) {
16
23
 
17
- const {deletePaths, renamePaths, replaceInFiles, customAction, applyFilter = true} = step;
18
- const setFilter = applyFilter && filter;
24
+ module.exports = function steps (step, dry, passedFilter, projectsWithType) {
19
25
 
20
26
  if (Array.isArray(step)) {
21
- step.forEach(substep => require('./steps')(substep, dry, filter));
27
+ step.forEach(substep => steps(substep, dry, passedFilter, projectsWithType));
28
+ return;
22
29
  }
23
30
 
31
+ const {projectType = 'all', applyFilter = true, deletePaths, renamePaths, replaceInFiles, customAction} = step;
32
+ const projectsToIgnore = projectType === 'all' ? [] : projectsWithType.filter(p => p.tcl !== (projectType === 'tcl'));
33
+ const pathSearcher = new PathSearcher(applyFilter && passedFilter, projectsToIgnore.map(p => p.path + '/**'));
34
+
24
35
  if (deletePaths) {
25
36
  deletePaths.forEach(curpath => {
26
37
  _info(`Deleting paths: ${curpath}`);
27
- const paths = getPaths(curpath, setFilter);
38
+ const paths = pathSearcher.get(curpath);
28
39
  if (!dry) paths.forEach(p => fs.removeSync(p));
29
40
  _write('Paths deleted:', paths.length);
30
41
  });
@@ -33,7 +44,7 @@ module.exports = function steps (step, dry, filter) {
33
44
  if (renamePaths) {
34
45
  renamePaths.forEach(([curpath, change]) => {
35
46
  _info(`Renaming paths: ${curpath}`);
36
- const paths = getPaths(curpath, setFilter);
47
+ const paths = pathSearcher.get(curpath);
37
48
  if (!dry) paths.forEach(p => fs.moveSync(p, path.join(p, change), {overwrite: true}));
38
49
  _write('Paths renamed:', paths.length);
39
50
  });
@@ -43,7 +54,7 @@ module.exports = function steps (step, dry, filter) {
43
54
  replaceInFiles.forEach(r => {
44
55
  _info(`Executing replacements in files: ${r.files}`);
45
56
 
46
- r.files = r.files.map(curpath => getPaths(curpath, setFilter)).flat();
57
+ r.files = r.files.map(curpath => pathSearcher.get(curpath)).flat();
47
58
  r.dry = dry;
48
59
 
49
60
  if (r.fromtoPairs) {
@@ -51,12 +62,11 @@ module.exports = function steps (step, dry, filter) {
51
62
  r.to = r.fromtoPairs.map(p => p[1]);
52
63
  }
53
64
 
54
- let filesModCount = 0;
55
- let maxRepeat = 15;
56
- for (let i=0; i<maxRepeat && r.files[0]; i++) { // execute repeatedly for modified files only (with safety limit of 20)
65
+ let filesModCount;
66
+ for (let i=15/* safety limit*/; i>0 && r.files[0]; i--) { // execute repeatedly for modified files only
57
67
  r.files = rif.sync(r).filter(f => f.hasChanged).map(f => f.file);
58
- if (i===0) filesModCount = r.files.length; // save count only after first run (after this only subsets are processed)
59
- if (i===maxRepeat-1 && r.files[0]) _warn(`Repeated replacement stopped by safety limit - check file changes for too many content occurrences`);
68
+ filesModCount ??= r.files.length; // save count only after first run (after this only subsets are processed)
69
+ if (i===1 && r.files[0]) _warn(`Repeated replacement stopped by safety limit - check file changes for too many content occurrences`);
60
70
  if (dry) break;
61
71
  }
62
72
 
@@ -66,7 +76,7 @@ module.exports = function steps (step, dry, filter) {
66
76
 
67
77
  if (!dry && customAction) {
68
78
  customAction.forEach(([dirpath, actionFn]) => {
69
- getPaths(dirpath, setFilter).forEach(p => actionFn(p));
79
+ pathSearcher.get(dirpath).forEach(p => actionFn(p));
70
80
  });
71
81
  }
72
82