@tangelo/tangelo-configuration-toolkit 1.20.2 → 1.22.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -83,7 +83,7 @@ ENDLOCAL
83
83
  EXIT /b %errorlevel%
84
84
  :find_dp0
85
85
  SET dp0=%~dp0
86
- EXIT
86
+ EXIT /b
87
87
  ```
88
88
 
89
89
  Then:
package/index.js CHANGED
@@ -72,6 +72,7 @@ global._git = {
72
72
  if (!_appdata.gitUser) _appdata._update({gitUser: execGitCommand(`config --get user.email`, _paths.repo)});
73
73
  return _appdata.gitUser;
74
74
  },
75
+ status: memoize(() => execGitCommand(`status -s`, _paths.repo)),
75
76
  commitLocal: memoize(() => execGitCommand(`log -1 --format=%D;%H;%cd --date=iso-strict`, _paths.repo, ['branch', 'hash', 'date'])),
76
77
  commitRemote: memoize(() => execGitCommand('log -1 --format=%cd --date=iso-strict origin/' + _git.commitLocal().branch, _paths.repo, ['date'])),
77
78
  commitTdi: {
@@ -82,8 +83,10 @@ global._git = {
82
83
  {commitHash: '77b8ea9', regex: /^7\.14\./}, // 7.14.x
83
84
  {commitHash: '8066c44', regex: /^7\.13\./}, // 7.13.x
84
85
  {commitHash: 'a2b2d4e', regex: /^7\.12\./} // 7.12.x
85
- ]
86
- }
86
+ ],
87
+ hasUncommittedFiles: memoize(() => execGitCommand(`status --short`, path.join(_paths.repo, _paths.tdi)).length > 0),
88
+ },
89
+ hasUncommittedFiles: memoize(() => execGitCommand(`status --short`, _paths.repo).length > 0),
87
90
  };
88
91
 
89
92
  global._tdiSubmoduleExists = () => fs.existsSync(path.join(_paths.repo, _paths.tdi));
@@ -121,6 +124,9 @@ global._modulesTdi = {
121
124
 
122
125
  global._settingsTdi = memoize(() => Object.assign(_package.settingsTdiDefault, _modulesTdi.require('settings.json', {throws: false, message: false}) ?? {}));
123
126
 
127
+ global._filters = {
128
+ fontoSources: ['!**/node_modules/**', '!**/fonto/packages/**', '!**/fonto/packages_shared/**', '!**/fonto/platform/**']
129
+ };
124
130
 
125
131
  process.on('beforeExit', () => {
126
132
  _write(); // print empty line before and after update check
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tangelo/tangelo-configuration-toolkit",
3
- "version": "1.20.2",
3
+ "version": "1.22.1",
4
4
  "engines": {
5
5
  "node": ">=14.0.0"
6
6
  },
@@ -21,13 +21,15 @@ module.exports = function(ftpConfig, remotedir) {
21
21
  return cb();
22
22
  },
23
23
 
24
- function flush(cb) {
24
+ async function flush(cb) {
25
25
  if (!files[0]) cb();
26
26
 
27
27
  const paths = [...new Set(files.map(({destination}) => path.dirname(destination)))] // collect unique paths
28
28
  .filter((p1, i, a) => !a.find(p2 => p1 != p2 && p2.includes(p1+'/'))) // remove paths being part of others
29
29
  ;
30
30
 
31
+ await ftpConfig.eventBeforeAll(files);
32
+
31
33
  sftp.connect(ftpConfig)
32
34
  .then(() => Promise.all(paths.map(p => parallel( // check if all directories exist
33
35
  () => sftp.exists(p).then(r => {
@@ -48,8 +50,8 @@ module.exports = function(ftpConfig, remotedir) {
48
50
  ).catch(err => _warn(`File transfer failed${err ? ': '+err : ''}`));
49
51
  }
50
52
  ))))
51
- .then(() => {
52
- _info(`${files.length} file(s) transferred`);
53
+ .then(async () => {
54
+ await ftpConfig.eventAfterAll(files);
53
55
  cb();
54
56
  return sftp.end();
55
57
  })
@@ -0,0 +1,44 @@
1
+ const {NodeSSH} = require('node-ssh');
2
+ const pLimit = require('p-limit');
3
+
4
+
5
+ module.exports = class RemoteExec {
6
+
7
+ #ftpConfig;
8
+ #queue = [];
9
+
10
+ constructor(ftpConfig) {
11
+ this.#ftpConfig = ftpConfig;
12
+ }
13
+
14
+ add(cmd, msg) {
15
+ if (!this.#queue.find(e => e[0]==cmd)) this.#queue.push([cmd, msg]);
16
+ return this;
17
+ }
18
+
19
+ async process() {
20
+ if (!this.#queue[0]) return;
21
+
22
+ const sshI = new NodeSSH();
23
+ const limit = pLimit(this.#ftpConfig.parallel);
24
+
25
+ await sshI.connect(this.#ftpConfig) // set up connection once
26
+ .then(() => Promise.all(this.#queue.map(([cmd, msg]) => limit(
27
+ () => sshI.execCommand(cmd).then(result => {
28
+ if (result.stderr) _warn(result.stderr);
29
+ else if (msg === 'STDOUT') _write(result.stdout);
30
+ else if (msg) _info(msg === 'CMD' ? cmd : msg);
31
+ })
32
+ ))))
33
+ .catch(err => {
34
+ if (err.code=='ECONNRESET' || err.code=='ECONNABORTED') _warn(`Server aborted connection. Retrying.`);
35
+ else _warn(err);
36
+ this.process(); // retry set up connection
37
+ })
38
+ .finally(() => {
39
+ sshI.dispose();
40
+ this.#queue = [];
41
+ });
42
+ }
43
+
44
+ };
@@ -1,4 +1,5 @@
1
1
  const fs = require('fs-extra');
2
+ const globby = require('globby');
2
3
  const path = require('path');
3
4
  const sass = require('sass');
4
5
  const SaxonJS = require('saxon-js');
@@ -53,6 +54,10 @@ module.exports = class TclConfig {
53
54
  }
54
55
  }
55
56
 
57
+ static findProjects() {
58
+ return globby.sync('config/cmscustom/!(tdi)/!(fonts)').map(p => ({path: p, tcl: fs.existsSync(path.join(p, this.#TCL_FILE_NAME))}));
59
+ }
60
+
56
61
  #outputFn;
57
62
  #doModifyFontoHtml;
58
63
  #tclFilePathRel;
@@ -4,11 +4,12 @@ const inquirer = require('inquirer');
4
4
  const oxygen = require('./oxygen');
5
5
  const path = require('path');
6
6
 
7
+
7
8
  const createSymlink = (t, p) => {
8
9
  const up = p.replace(/[^/]+/g, '..').replace(/^..\//, '');
9
10
  fs.symlink(path.join(up, _paths.tdi, t), p, 'dir', err => {
10
11
  if (err) {
11
- if (err.code=='EEXIST') _write(`Symlink already exists: ${p}`);
12
+ if (err.code==='EEXIST') _write(`Symlink already exists: ${p}`);
12
13
  else _error((err.code!=='EPERM' ? err : `Can't create symlinks, restart your console as admin, or fix it permanently:\n 1. Open 'Local Security Policy' (secpol.msc)\n 2. Go to 'Security Settings > Local Policies > User Rights Assignment > Create symbolic links'\n 3. Add your own account\n 4. Restart Windows`));
13
14
  }
14
15
  else _write(`Symlink created: ${p}`);
@@ -22,7 +23,12 @@ const createSymlinks = () => {
22
23
  globby
23
24
  .sync('config/cmscustom/!(tdi)/**/fonto')
24
25
  .forEach(p => {
25
- createSymlink(src + '/config/cmscustom/tdi/fonto/packages-shared', p + '/packages-shared');
26
+ const tdiCodebasePath =
27
+ _settingsTdi().codebase === 'dual' && fs.existsSync(path.join(p, '../project.tcl')) ? '/tcl' :
28
+ _settingsTdi().codebase === 'dual' ? '/traditional' :
29
+ ''
30
+ ;
31
+ createSymlink(src + `/config/cmscustom/tdi${tdiCodebasePath}/fonto/packages-shared`, p + '/packages-shared');
26
32
  });
27
33
  };
28
34
 
@@ -129,9 +129,6 @@ function prepareForCopy (filter) {
129
129
  _info(`Target: ${this.getRemotePath(transferPattern)}`);
130
130
  _write();
131
131
 
132
- if (_git.commitLocal().date < _git.commitRemote().date) {
133
- _warn(`You're not deploying from the most recent git commit!\n`);
134
- }
135
132
  }
136
133
 
137
134
 
@@ -3,7 +3,6 @@ const execGitCommand = require('../../lib/exec-git-command');
3
3
  const fs = require('fs-extra');
4
4
  const globby = require('globby');
5
5
  const gulp = require('gulp');
6
- const {NodeSSH} = require('node-ssh');
7
6
  const g_tcl = require('../../lib/gulp-tcl-config');
8
7
  const through2 = require('through2');
9
8
  const {spawnSync} = require('child_process');
@@ -24,59 +23,6 @@ const c = require('./config');
24
23
  const s = require('./srcset');
25
24
 
26
25
 
27
- const remote = {
28
- queue: [],
29
- add(cmd, msg) {
30
- if (!this.queue.find(e => e[0]==cmd)) this.queue.push([cmd, msg]);
31
- return this;
32
- },
33
- do(sshI) {
34
- let executing = Math.min(this.queue.length, c.server.ftpConfig.parallel);
35
-
36
- if (executing===0) { // close connection and return callback after last exec command
37
- sshI.dispose();
38
- _write();
39
- }
40
-
41
- // execute each command from the queue in batches
42
- this.queue.filter((e, i) => i < executing).forEach(()=>{
43
- const command = this.queue.shift();
44
-
45
- sshI.execCommand(command[0])
46
- .then(result => {
47
- if (result.stderr) _warn(result.stderr);
48
- else if (command[1] === '') _write(result.stdout);
49
- else _info(command[1] || command[0], true);
50
-
51
- executing--;
52
- if (executing===0) this.do(sshI);
53
- })
54
- .catch(() => {
55
- _warn(`Server aborted connection. Retrying.`);
56
- this.queue.unshift(command);
57
-
58
- executing--;
59
- if (executing===0) this.do(sshI);
60
- });
61
- });
62
- },
63
- process() {
64
- if (!this.queue[0]) return;
65
-
66
- const sshI = new NodeSSH();
67
- sshI.connect(c.server.ftpConfig) // set up connection once
68
- .then(() => {
69
- this.do(sshI);
70
- })
71
- .catch(err => {
72
- if (err.code=='ECONNRESET' || err.code=='ECONNABORTED') _warn(`Server aborted connection. Retrying.`);
73
- else _warn(err);
74
- this.process(); // retry set up connection
75
- });
76
- }
77
- };
78
-
79
-
80
26
  const createDeliveryPack = () => { // create install scripts if necessary, zip output and remove temp folder
81
27
  const dbiSql = [];
82
28
  const dbiPs1 = [];
@@ -120,7 +66,7 @@ const createDeliveryPack = () => { // create install scripts if necessary, zip o
120
66
  };
121
67
 
122
68
 
123
- const transfer = (paths, {watch, lrServer} = {}) => {
69
+ module.exports = function transfer (paths, {watch, lrServer} = {}) {
124
70
  const jsFGlob = _settingsTdi().transpileToES5 ? ['**/*.js', '!**/hce/**/template_*', '!**/fonto/**', '!**/vendor/**', '!**/*.min.js'] : [];
125
71
  const jsF = g_filter(jsFGlob, {restore: true});
126
72
  const xpsF = g_filter(['**/xopus/**/*.x*'], {restore: true});
@@ -189,8 +135,6 @@ const transfer = (paths, {watch, lrServer} = {}) => {
189
135
  _info('Finished transferring\n', true);
190
136
  if (fontoPaths.outdated[0]) _warn(`Fonto build files in the following folders were outdated and therefore skipped:\n ${fontoPaths.outdated.map(v => v.slice(0, -1)).join('\n ')}`);
191
137
 
192
- remote.process();
193
-
194
138
  if (lrServer) { // reload specific resources if files are all image or css, else reload page
195
139
  const reloadPage = !files.every(f => /.(?:css|jpe?g|png|gif)$/i.test(f));
196
140
  lrServer.changed({params: {files: reloadPage ? ['page'] : files}});
@@ -199,7 +143,4 @@ const transfer = (paths, {watch, lrServer} = {}) => {
199
143
 
200
144
  if (c.deliveryPack) createDeliveryPack();
201
145
  });
202
- };
203
-
204
-
205
- module.exports = {remote, transfer};
146
+ };
@@ -1,32 +1,93 @@
1
1
  const del = require('del');
2
2
  const gulp = require('gulp');
3
3
  const path = require('path');
4
+ const RemoteExec = require('../../lib/remote-exec');
4
5
  const sftpClient = require('ssh2-sftp-client');
5
6
  const tinylr = require('tiny-lr');
6
7
 
7
- const {remote, transfer} = require('./execute');
8
+ const transfer = require('./execute');
8
9
  const c = require('./config');
9
10
  const s = require('./srcset');
11
+ const inquirer = require('inquirer');
12
+
13
+ const checkDeployment = async (ftpConfig) => {
14
+ const deployToDevServer = /\.dev\./.test( ftpConfig?.host );
15
+ const deployToTestServer = /\.t(e)?st\./.test( ftpConfig?.host );
16
+ const deployToProdServer = c?.deliveryPack || !( deployToDevServer || deployToTestServer );
17
+ const isBehind = _git.commitLocal().date < _git.commitRemote().date;
18
+ // In most cases productionBranches are called 'master' or 'main'; Some customers use a development, test and production branches structure; hence we allow 'producti(on|e)'
19
+ const isPrdBranch = ( /master|main|producti(on|e)/.test( _git.commitLocal().branch ) );
20
+
21
+ if (!deployToTestServer && !deployToProdServer) {
22
+ // dev-environements
23
+ if (isBehind) {
24
+ /* Warn for dev deployements that are behind */
25
+ _warn(`You're not deploying from the most recent commit in branch "${_git.commitLocal().branch}"! Update your repository: ${'tct git --update-repo'.cyan}`);
26
+ }
27
+ return;
28
+ }
10
29
 
30
+ // Checks for test / prd environments:
31
+ if ( !isPrdBranch ) {
32
+ _warn(`You want to deploy from branch "${_git.commitLocal().branch}" to a production server. It is strongly advised to deploy from the master branch.`);
33
+ }
34
+ if (_git.hasUncommittedFiles()) {
35
+ _warn(`You have uncommitted changes!`);
36
+ if (_git.commitTdi.hasUncommittedFiles()) _warn(`You have uncommitted changes in the TDI submodule!`);
37
+ }
38
+ if ( isBehind ) {
39
+ _error(`You're not deploying from the most recent commit in branch "${_git.commitLocal().branch}"! Update your repository: ${'tct git --update-repo'.cyan}`);
40
+ }
41
+
42
+ // Ask for continuation of deployment for production environements only; and only if not deploying from a prd branch or when there exist uncommitted changes
43
+ if ( (!isPrdBranch || _git.hasUncommittedFiles()) && deployToProdServer &&
44
+ !( await inquirer.prompt(
45
+ {type: 'confirm', message: 'Are you sure you want to continue?', name: 'confirm', default: false}
46
+ ).then( res => res.confirm )
47
+ )
48
+ ) {
49
+ return {cancel: true};
50
+ }
51
+ };
11
52
 
12
- module.exports = function deploy (argv) {
53
+ module.exports = async function deploy (argv) {
13
54
 
14
55
  // common setup
15
56
 
16
57
  c.setServer(argv.server);
17
58
  c.prepareForCopy(argv.filter);
59
+ const {ftpConfig, remotedir} = c.server;
60
+
61
+ // Deployment checks; cancel deployment when checks require
62
+ const deploy = await checkDeployment(ftpConfig);
63
+ if (deploy?.cancel) { return; }
18
64
 
19
- if (c.server.ftpConfig) {
20
- c.server.ftpConfig.eventPut = file => {
21
- _write(file.destination.replace(c.server.remotedir, ''));
65
+ if (ftpConfig) {
66
+ const logPath = path.join(remotedir, 'log/deployments.log').toFws;
67
+ const re = new RemoteExec(ftpConfig);
68
+
69
+ ftpConfig.eventPut = file => {
70
+ _write(file.destination.replace(remotedir, ''));
22
71
  if (path.extname(file.destination)=='.sh')
23
- remote.add('chmod 755 '+file.destination, 'Permissions set: '+file.destination);
72
+ re.add('chmod 755 '+file.destination, 'Permissions set: '+file.destination);
24
73
  else if (file.destination.match(/cmscustom.*hce.*config.xml/))
25
- remote.add('touch '+c.getRemotePath('hce/hce-config.xml'), 'Touched: '+c.getRemotePath('hce/hce-config.xml'));
74
+ re.add('touch '+c.getRemotePath('hce/hce-config.xml'), 'Touched: '+c.getRemotePath('hce/hce-config.xml'));
26
75
  else if (file.destination.match(/cmscustom.*od[fts].*config.xml/))
27
- remote.add('touch '+c.getRemotePath('odf/odf-config.xml'), 'Touched: '+c.getRemotePath('odf/odf-config.xml'));
76
+ re.add('touch '+c.getRemotePath('odf/odf-config.xml'), 'Touched: '+c.getRemotePath('odf/odf-config.xml'));
28
77
  else if (file.destination.match(/txp\/site-configs/))
29
- remote.add('touch '+c.getRemotePath('txp/xmlpages-config.xml'), 'Touched: '+c.getRemotePath('txp/xmlpages-config.xml'));
78
+ re.add('touch '+c.getRemotePath('txp/xmlpages-config.xml'), 'Touched: '+c.getRemotePath('txp/xmlpages-config.xml'));
79
+ };
80
+
81
+ ftpConfig.eventBeforeAll = async files => {
82
+ const dli = deployLogInfo(!argv.copy, c.transferPatterns[0], files, 'START');
83
+ await re.add(`echo -e "${dli}" >> ${logPath}`).process();
84
+ };
85
+
86
+ ftpConfig.eventAfterAll = async files => {
87
+ _info(`${files.length} file(s) transferred`);
88
+ const dli = deployLogInfo(!argv.copy, c.transferPatterns[0], files, 'DONE');
89
+ re.add(`echo -e "${dli}" >> ${logPath}`).add(`echo "$(tail -10000 ${logPath})" > ${logPath}`);
90
+ await re.process();
30
91
  };
31
92
  }
32
93
 
@@ -41,6 +102,7 @@ module.exports = function deploy (argv) {
41
102
  }
42
103
 
43
104
  if (argv.watch || argv.live) {
105
+ if (c?.deliveryPack) _error('You cannot use watch with a delivery-pack.');
44
106
  _write(`Watching... (press ctrl+c to quit)\n`);
45
107
 
46
108
  const lrServer = argv.live && tinylr();
@@ -64,9 +126,9 @@ module.exports = function deploy (argv) {
64
126
  };
65
127
 
66
128
  // check connection
67
- if (c.server.ftpConfig) {
129
+ if (ftpConfig) {
68
130
  const sftp = new sftpClient();
69
- sftp.connect(c.server.ftpConfig).then(() => sftp.end()).catch(err => _error(`Could not connect to server${err ? ': '+err.message : ''}`));
131
+ sftp.connect(ftpConfig).then(() => sftp.end()).catch(err => _error(`Could not connect to server${err ? ': '+err.message : ''}`));
70
132
  }
71
133
 
72
134
  gulp.watch(c.transferPatterns)
@@ -89,8 +151,8 @@ module.exports = function deploy (argv) {
89
151
 
90
152
  if (!path.parse(filepath).base.match(/\.scss/)) {
91
153
  const rp = c.getRemotePath(filepath);
92
- const msg = 'Removed: ' + rp.replace(c.server.remotedir, '').white;
93
- if (c.server.ftpConfig) remote.add(`rm -rf "${rp}"`, msg).process();
154
+ const msg = 'Removed: ' + rp.replace(remotedir, '').white;
155
+ if (ftpConfig) new RemoteExec(ftpConfig).add(`rm -rf "${rp}"`, msg).process();
94
156
  else del([rp], {force: true}).then(() => _info(msg, true));
95
157
  }
96
158
  }
@@ -98,4 +160,46 @@ module.exports = function deploy (argv) {
98
160
 
99
161
  }
100
162
 
101
- };
163
+ };
164
+
165
+
166
+ function deployLogInfo (watch, filter, files, action) {
167
+ const timestamp = new Date().toISOString();
168
+ const user = _git.user().split('@')[0];
169
+ const {branch, hash} = _git.commitLocal();
170
+ const uncommittedChanges = _git.status().trim() ? ':~' : '';
171
+
172
+ let logline = `${timestamp} ${action.padEnd(5)} [${user}] [${branch}:${hash.substring(0, 7)}${uncommittedChanges}] [${filter}]`;
173
+
174
+ if (action == 'START') {
175
+ if (uncommittedChanges) {
176
+ const uncommittedChanges = _git.status().replace(/\?\?/g, ' U').split('\n');
177
+ logline += '\n Uncommitted changes:\n ' + uncommittedChanges.join('\n ');
178
+ }
179
+ const filepaths = files.map(({destination}) => destination);
180
+ logline += `\n Transferring ${filepaths.length} file${filepaths.length > 1 ? 's' : ''}`;
181
+ if (watch || filepaths.length == 1) logline += ':\n ' + filepaths.join('\n ');
182
+ else logline += ' in dir:\n ' + getCommonPath(filepaths);
183
+ }
184
+ return logline.replace(/"/g, '\\"');
185
+ }
186
+
187
+
188
+ function getCommonPath(paths) {
189
+ const splitPaths = paths.map(p => p.split('/'));
190
+ let commonPath = splitPaths[0];
191
+
192
+ for (let i = 1; i < splitPaths.length; i++) {
193
+ const pathParts = splitPaths[i];
194
+ commonPath = commonPath.slice(0, Math.min(commonPath.length, pathParts.length));
195
+
196
+ for (let j = 0; j < commonPath.length; j++) {
197
+ if (commonPath[j] !== pathParts[j]) {
198
+ commonPath = commonPath.slice(0, j);
199
+ break;
200
+ }
201
+ }
202
+ }
203
+
204
+ return commonPath.length === 0 ? '' : commonPath.join('/');
205
+ }
@@ -12,8 +12,10 @@ module.exports = {
12
12
  create (paths, {test, watch} = {}) {
13
13
  const startTime = new Date();
14
14
 
15
- this.filesToTransfer = watch ? paths : globby.sync(paths, {nodir: true}); // get list of files to transfer
16
- this.potentialBaseFilesCache ??= globby.sync('**/*.{xml,xsd,xsl,scss}'); // get list of potential base files, if not previously done
15
+ this.filesToTransfer = watch ? paths : globby.sync([...paths, ..._filters.fontoSources], {nodir: true}); // get list of files to transfer
16
+
17
+ // get list of potential base files, if not previously done. Globby 6.x does not expand ** to symlinks, so use **/** to include those too
18
+ this.potentialBaseFilesCache ??= globby.sync(['**/**/*.{xml,xsd,xsl,scss}', ..._filters.fontoSources]);
17
19
  this.potentialBaseFiles = this.potentialBaseFilesCache
18
20
  .filter(p => !this.filesToTransfer.includes(p)) // subtract files to transfer from list of potential base files
19
21
  .map(path => [path]); // make array so file contents can be added later
@@ -10,7 +10,7 @@ const TclConfig = require('../../lib/tcl-config');
10
10
  const wws = require('../../lib/worker-with-spinner');
11
11
 
12
12
 
13
- g_print.setLogFunction((filepath) => _write(filepath.nostyle));
13
+ g_print.setLogFunction(filepath => _write(filepath.nostyle));
14
14
 
15
15
 
16
16
  const compileSass = () => new Promise((resolve, reject) => {
@@ -26,14 +26,13 @@ const compileSass = () => new Promise((resolve, reject) => {
26
26
  ;
27
27
  });
28
28
 
29
- const cmdExec = command => new Promise((resolve) => {
29
+ const cmdExec = command => new Promise(resolve => {
30
30
  _info(`Executing command (${command}):`);
31
31
  const cp = exec(command);
32
32
  const log = msg => {
33
33
  const line = msg.replace(/\s+$/, ''); // remove excessive whitespace
34
34
  if (line) console.log(line);
35
35
  };
36
-
37
36
  cp.stdout.setEncoding('utf8');
38
37
  cp.stdout.on('data', data => {
39
38
  if (/Error|Input error/.test(data)) _warn(data);
@@ -48,13 +47,19 @@ const cmdExec = command => new Promise((resolve) => {
48
47
  });
49
48
 
50
49
 
50
+
51
51
  module.exports = {
52
52
 
53
53
  init ([fdt, fontoVersion]) {
54
54
  return new Promise((resolve, reject) => {
55
55
  _info('Ensure symlink exists:');
56
- fs.symlink(path.join('../../../tdi/fonto/packages-shared'), 'packages-shared', 'dir', err => {
57
- if (err && err.code!='EEXIST') reject(err.code=='EPERM' ? 'Start your console as admin for symlink creation!' : err);
56
+ const tdiCodebasePath =
57
+ _settingsTdi().codebase === 'dual' && fs.existsSync('../project.tcl') ? '/tcl' :
58
+ _settingsTdi().codebase === 'dual' ? '/traditional' :
59
+ ''
60
+ ;
61
+ fs.symlink(path.join(`../../../tdi${tdiCodebasePath}/fonto/packages-shared`), 'packages-shared', 'dir', err => {
62
+ if (err && err.code!=='EEXIST') reject(err.code==='EPERM' ? 'Start your console as admin for symlink creation!' : err);
58
63
  else _write('Done.\n');
59
64
  resolve();
60
65
  });
@@ -180,7 +185,7 @@ module.exports = {
180
185
  Object.entries(rootSchemas).forEach(([, obj]) => {
181
186
  const data = execSync(`${fdt} attributes --schema packages/${obj.packageName}/src/assets/schemas/${obj.packageName}.json`, {encoding: 'UTF-8'});
182
187
  const attributes = data.replace(/(^.*?Default value\s+)|(\s+Printed name\*.*$)/gs, '').split(/\n\s+/).map(a => a.split(/\s+/)).map(a =>
183
- a[0] + (a[2]=='required' ? ' (required)' : '') + (a[3]=='-' ? ' (no default)' : '')
188
+ a[0] + (a[2]==='required' ? ' (required)' : '') + (a[3]==='-' ? ' (no default)' : '')
184
189
  );
185
190
  const customAttributes = [...new Set(attributes)].filter(e => e && !ignoreAttributes.includes(e.replace(/ .*/, '')));
186
191
 
@@ -29,7 +29,7 @@ module.exports = function fonto (argv) {
29
29
  process.chdir(_paths.apply || '.');
30
30
 
31
31
  // find fonto instances by searching for fonto/manifest.json files
32
- const fontoPaths = globby.sync(['**/fonto/manifest.json', 'manifest.json', `!${_paths.tdi}/**`])
32
+ const fontoPaths = globby.sync(['**/fonto/manifest.json', 'manifest.json', `!${_paths.tdi}/**`, ..._filters.fontoSources])
33
33
  .map(p => ([p.replace('manifest.json', ''), fs.readJsonSync(p).sdkVersion.replace(/Nightlies.*/, 'nightly')]))
34
34
  ;
35
35
 
@@ -169,9 +169,15 @@ function updateRepo () {
169
169
  * @param {string[]} options.dates - An array of two strings representing the start and end dates for the custom date range.
170
170
  * @param {string} options.updateSubmodule - The target branch name for the submodule update.
171
171
  */
172
- function updateSubmodule({dates = [], updateSubmodule: toBranchName}) {
172
+ function updateSubmodule({dates = [], updateSubmodule: releaseBranchName}) {
173
173
  const [dateCustomFrom, dateCustomTo] = dates;
174
- const branch = getTdiBranch(typeof toBranchName !== 'boolean' && toBranchName);
174
+ const toBranchName = typeof releaseBranchName !== 'boolean' ? releaseBranchName : null;
175
+
176
+ if (dateCustomFrom) _warn(`While testing the "TDI commits requiring migration" note that the submodule is not changed.`);
177
+ if (dateCustomFrom && !toBranchName) _error(`Specify a release branch for testing the migration, e.g. "--us 5.6".`);
178
+
179
+ // For testing the migrations (dates argument is specified); submodule pull, fetch, updates fail or result in switching to the release-branch; while we want to test the migration in the current submodule branch / symlink to TDI-project. Therefore all submodule actions should not be applied when the dates argument is specified. As the release of the current submodule cannot be determined you also need to specify the toBranchName so you can test the migrations you want.
180
+ const branch = !dateCustomFrom ? getTdiBranch(toBranchName) : {'name': `release/${toBranchName}`};
175
181
  const branchUpgrade = branch.from ? branch.from.name < branch.name : false;
176
182
 
177
183
  if (branchUpgrade) _info(`Current branch '${branch.from.name}' will be updated to '${branch.name}'\nCommon ancestor will be used in selecting TDI commits requiring migration: ${_formatDate(branch.commonAncestor.date)}`);
@@ -184,10 +190,12 @@ function updateSubmodule({dates = [], updateSubmodule: toBranchName}) {
184
190
  if (branch.commitsBehind > 0 || dateCustomFrom) {
185
191
  const dateBeforeUpdate = _git.commitTdi.local().date;
186
192
 
187
- // update submodule
188
- const updateSubmoduleMsg = execGitCommand(`submodule update --remote`, _paths.repo);
189
- if (updateSubmoduleMsg.error) _error(`Update submodule failed\n${updateSubmoduleMsg.error}`);
190
- if (updateSubmoduleMsg) _info(`TDI submodule updated:\n${updateSubmoduleMsg}\n`);
193
+ if (!dateCustomFrom) {
194
+ // update submodule
195
+ const updateSubmoduleMsg = execGitCommand(`submodule update --remote`, _paths.repo);
196
+ if (updateSubmoduleMsg.error) _error(`Update submodule failed\n${updateSubmoduleMsg.error}`);
197
+ _info(`TDI submodule updated:\n${updateSubmoduleMsg}\n`);
198
+ }
191
199
 
192
200
  // tdiMigrationFilePath should exist in latest commits of releases 5.3+; As we updated to the latest version this should work
193
201
  const migrations = _modulesTdi.require('git/tdiCommitsRequiringMigration.js');
@@ -6,7 +6,7 @@ const {Table} = require('console-table-printer');
6
6
  const execGitCommand = require('../../lib/exec-git-command');
7
7
  const getTdiBranch = require('../../lib/get-tdi-branch');
8
8
  const c = require('../deploy/config');
9
- const {remote} = require('../deploy/execute');
9
+ const RemoteExec = require('../../lib/remote-exec');
10
10
 
11
11
 
12
12
  const getGitInfo = () => {
@@ -142,7 +142,7 @@ const getServerInfo = (server) => {
142
142
 
143
143
  if (!c.envDev) {
144
144
  _info(`Remote version info for '${c.server.ftpConfig.host}':\n`);
145
- remote.add('sudo ~root/scripts/version.sh', '').process();
145
+ new RemoteExec(c.server.ftpConfig).add('sudo ~root/scripts/version.sh', 'STDOUT').process();
146
146
  }
147
147
  else {
148
148
  _info('For development environments no server version information is available. Check rancher / database for this information.\nAdd the --server option with a non-dev environment to see version information for that server.');
@@ -1,6 +1,7 @@
1
1
  const globby = require('globby');
2
2
  const inquirer = require('inquirer');
3
3
  const path = require('path');
4
+ const TclConfig = require('../../lib/tcl-config');
4
5
 
5
6
 
6
7
  module.exports = function migrate (argv) {
@@ -76,8 +77,9 @@ module.exports = function migrate (argv) {
76
77
  _write();
77
78
  const startTime = new Date();
78
79
 
80
+ const projectsWithType = TclConfig.findProjects();
79
81
  const scriptPath = path.join('migrate', a.script);
80
- _modulesTdi.require(scriptPath).forEach(step => require('./steps')(step, argv.dry, filter));
82
+ _modulesTdi.require(scriptPath).forEach(step => require('./steps')(step, argv.dry, filter, projectsWithType));
81
83
 
82
84
  _perf(startTime);
83
85
  });
@@ -5,26 +5,37 @@ const path = require('path');
5
5
  const rif = require('replace-in-file');
6
6
 
7
7
 
8
- const getPaths = (search, filter) =>
9
- globby
10
- .sync(search, {dot: true, ignore: [_paths.tdi + '/**', '**/cmscustom/tdi/**']})
11
- .filter(p => !filter || minimatch(p, filter)
12
- );
13
-
8
+ class PathSearcher {
9
+ #filter;
10
+ #ignorePaths = [_paths.tdi + '/**', 'config/cmscustom/tdi/**'];
11
+ constructor (filter, ignoreProjectPaths) {
12
+ this.#filter = filter;
13
+ this.#ignorePaths.push(...ignoreProjectPaths);
14
+ }
15
+ #doFilter (paths) {
16
+ return this.#filter ? paths.filter(p => minimatch(p, this.#filter)) : paths;
17
+ }
18
+ get(globPath) {
19
+ return this.#doFilter(globby.sync(globPath, {dot: true, ignore: this.#ignorePaths}));
20
+ }
21
+ }
14
22
 
15
- module.exports = function steps (step, dry, filter) {
16
23
 
17
- const {deletePaths, renamePaths, replaceInFiles, customAction, applyFilter = true} = step;
18
- const setFilter = applyFilter && filter;
24
+ module.exports = function steps (step, dry, passedFilter, projectsWithType) {
19
25
 
20
26
  if (Array.isArray(step)) {
21
- step.forEach(substep => require('./steps')(substep, dry, filter));
27
+ step.forEach(substep => steps(substep, dry, passedFilter, projectsWithType));
28
+ return;
22
29
  }
23
30
 
31
+ const {projectType = 'all', applyFilter = true, deletePaths, renamePaths, replaceInFiles, customAction} = step;
32
+ const projectsToIgnore = projectType === 'all' ? [] : projectsWithType.filter(p => p.tcl !== (projectType === 'tcl'));
33
+ const pathSearcher = new PathSearcher(applyFilter && passedFilter, projectsToIgnore.map(p => p.path + '/**'));
34
+
24
35
  if (deletePaths) {
25
36
  deletePaths.forEach(curpath => {
26
37
  _info(`Deleting paths: ${curpath}`);
27
- const paths = getPaths(curpath, setFilter);
38
+ const paths = pathSearcher.get(curpath);
28
39
  if (!dry) paths.forEach(p => fs.removeSync(p));
29
40
  _write('Paths deleted:', paths.length);
30
41
  });
@@ -33,7 +44,7 @@ module.exports = function steps (step, dry, filter) {
33
44
  if (renamePaths) {
34
45
  renamePaths.forEach(([curpath, change]) => {
35
46
  _info(`Renaming paths: ${curpath}`);
36
- const paths = getPaths(curpath, setFilter);
47
+ const paths = pathSearcher.get(curpath);
37
48
  if (!dry) paths.forEach(p => fs.moveSync(p, path.join(p, change), {overwrite: true}));
38
49
  _write('Paths renamed:', paths.length);
39
50
  });
@@ -43,7 +54,7 @@ module.exports = function steps (step, dry, filter) {
43
54
  replaceInFiles.forEach(r => {
44
55
  _info(`Executing replacements in files: ${r.files}`);
45
56
 
46
- r.files = r.files.map(curpath => getPaths(curpath, setFilter)).flat();
57
+ r.files = r.files.map(curpath => pathSearcher.get(curpath)).flat();
47
58
  r.dry = dry;
48
59
 
49
60
  if (r.fromtoPairs) {
@@ -51,12 +62,11 @@ module.exports = function steps (step, dry, filter) {
51
62
  r.to = r.fromtoPairs.map(p => p[1]);
52
63
  }
53
64
 
54
- let filesModCount = 0;
55
- let maxRepeat = 15;
56
- for (let i=0; i<maxRepeat && r.files[0]; i++) { // execute repeatedly for modified files only (with safety limit of 20)
65
+ let filesModCount;
66
+ for (let i=15/* safety limit*/; i>0 && r.files[0]; i--) { // execute repeatedly for modified files only
57
67
  r.files = rif.sync(r).filter(f => f.hasChanged).map(f => f.file);
58
- if (i===0) filesModCount = r.files.length; // save count only after first run (after this only subsets are processed)
59
- if (i===maxRepeat-1 && r.files[0]) _warn(`Repeated replacement stopped by safety limit - check file changes for too many content occurrences`);
68
+ filesModCount ??= r.files.length; // save count only after first run (after this only subsets are processed)
69
+ if (i===1 && r.files[0]) _warn(`Repeated replacement stopped by safety limit - check file changes for too many content occurrences`);
60
70
  if (dry) break;
61
71
  }
62
72
 
@@ -66,7 +76,7 @@ module.exports = function steps (step, dry, filter) {
66
76
 
67
77
  if (!dry && customAction) {
68
78
  customAction.forEach(([dirpath, actionFn]) => {
69
- getPaths(dirpath, setFilter).forEach(p => actionFn(p));
79
+ pathSearcher.get(dirpath).forEach(p => actionFn(p));
70
80
  });
71
81
  }
72
82