@tangelo/tangelo-configuration-toolkit 1.8.3 → 1.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -106,6 +106,7 @@ else {
106
106
  }
107
107
 
108
108
 
109
+ global._tdiSubmoduleExists = fs.existsSync(path.join(_paths.repo, _paths.tdi));
109
110
  global._isPre42 = fs.existsSync(path.join(_paths.repo, _paths.tdi, 'create_new_project')); // folder changed in 4.2
110
111
  global._isPre51 = !fs.existsSync(path.join(_paths.repo, _paths.tdi, 'src')); // folder changed in 5.1 (check new folder because old one could still exist after branch switch)
111
112
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tangelo/tangelo-configuration-toolkit",
3
- "version": "1.8.3",
3
+ "version": "1.9.2",
4
4
  "description": "Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.",
5
5
  "bin": {
6
6
  "tct": "bin/index.js",
@@ -17,8 +17,8 @@
17
17
  "author": "Iddo Fontijn <iddo.fontijn@tangelo.nl>",
18
18
  "license": "SEE LICENSE IN LICENSE.md",
19
19
  "dependencies": {
20
- "babel-core": "^6.26.3",
21
- "babel-preset-es2015-without-strict": "0.0.4",
20
+ "@babel/core": "^7.17.4",
21
+ "@babel/preset-env": "^7.16.11",
22
22
  "cli-spinner": "^0.2.10",
23
23
  "compare-versions": "^4.1.1",
24
24
  "del": "^6.0.0",
@@ -28,7 +28,7 @@
28
28
  "global-modules-path": "^2.3.1",
29
29
  "globby": "^6.1.0",
30
30
  "gulp": "^4.0.2",
31
- "gulp-babel": "^7.0.1",
31
+ "gulp-babel": "^8.0.0",
32
32
  "gulp-eol": "^0.2.0",
33
33
  "gulp-filter": "^7.0.0",
34
34
  "gulp-plumber": "^1.2.1",
@@ -37,13 +37,14 @@
37
37
  "gulp-sourcemaps": "^3.0.0",
38
38
  "inquirer": "^8.2.0",
39
39
  "istextorbinary": "^6.0.0",
40
- "minimatch": "^3.0.4",
40
+ "minimatch": "^5.0.0",
41
41
  "node-ssh": "^12.0.2",
42
42
  "object-assign-deep": "^0.4.0",
43
- "replace-in-file": "^3.4.4",
43
+ "p-limit": "^3.1.0",
44
+ "replace-in-file": "^6.3.2",
44
45
  "sass": "^1.43.5",
45
46
  "saxon-js": "^2.3.0",
46
- "scp2": "^0.5.0",
47
+ "ssh2-sftp-client": "^7.2.2",
47
48
  "through2": "^4.0.2",
48
49
  "tiny-lr": "^2.0.0",
49
50
  "yargs": "^16.2.0"
@@ -0,0 +1,61 @@
1
+ const path = require('path');
2
+ const pLimit = require('p-limit');
3
+ const sftpClient = require('ssh2-sftp-client');
4
+ const through2 = require('through2');
5
+
6
+
7
+ module.exports = function(ftpConfig, remotedir) {
8
+ const sftp = new sftpClient();
9
+ const serial = pLimit(1);
10
+ const parallel = pLimit(ftpConfig.parallel);
11
+ const files = [];
12
+
13
+ return through2.obj(
14
+
15
+ function transform (file, _, cb) {
16
+ if (file.isStream()) return cb(new Error('Streaming not supported.'));
17
+ if (file.stat.isDirectory()) return cb();
18
+
19
+ file.destination = path.join(remotedir, file.relative).toFws();
20
+ files.push(file); // collect all files in array
21
+ return cb();
22
+ },
23
+
24
+ function flush(cb) {
25
+ if (!files[0]) cb();
26
+
27
+ const paths = [...new Set(files.map(({destination}) => path.dirname(destination)))] // collect unique paths
28
+ .filter((p1, i, a) => !a.find(p2 => p1 != p2 && p2.includes(p1+'/'))) // remove paths being part of others
29
+ ;
30
+
31
+ sftp.connect(ftpConfig)
32
+ .then(() => Promise.all(paths.map(p => parallel( // check if all directories exist
33
+ () => sftp.exists(p).then(r => {
34
+ if (!!r) paths.splice(paths.indexOf(p), 1) // if exists, remove from paths
35
+ })
36
+ ))))
37
+ .then(() => Promise.all(paths.map(p => serial( // create directories that do not exist, which cannot be done parallel
38
+ () => {
39
+ _write(p.replace(remotedir, '').lblack);
40
+ return sftp.mkdir(p, true).catch(err => _warn(err));
41
+ }
42
+ ))))
43
+ .then(() => Promise.all(files.map(file => parallel( // upload files
44
+ () => {
45
+ ftpConfig.eventPut(file);
46
+ return ( // use fastPut for files larger than 3mb; since it requires a local path we assume these files are not changed in the gulp pipeline
47
+ file.contents.length > 3000000 ? sftp.fastPut(file.originalRelativePath, file.destination) : sftp.put(file.contents, file.destination)
48
+ ).catch(err => _warn(`File transfer failed${err ? ': '+err : ''}`));
49
+ }
50
+ ))))
51
+ .then(() => {
52
+ _info(`${files.length} file(s) transferred`);
53
+ cb();
54
+ return sftp.end();
55
+ })
56
+ .catch(err => _error(`Could not connect to server${err ? ': '+err.message : ''}`));
57
+ }
58
+
59
+ )
60
+ .resume(); // required for triggering 'end' event
61
+ };
@@ -54,7 +54,9 @@ module.exports = {
54
54
  parallel: serverConfig.config.parallel,
55
55
  username: serverConfig.config.username,
56
56
  agent: process.platform=='win32' ? 'pageant' : process.env.SSH_AUTH_SOCK,
57
- agentForward: process.platform!='win32'
57
+ agentForward: process.platform!='win32',
58
+ readyTimeout: 10000,
59
+ retries: 1
58
60
  }
59
61
  };
60
62
 
@@ -105,7 +107,7 @@ module.exports = {
105
107
 
106
108
  this.transferPatterns = [transferPattern, tdiPattern, ...this.giPatterns, '!**/*.crdownload']; // ignore patterns must come last
107
109
 
108
- if (this.deliveryPack && transferPattern.includes(_paths.tdi)) this.transferPatterns.push(`${_paths.tdi}/src/database/create.sql`); // create.sql is called by tdi install
110
+ if (this.deliveryPack && transferPattern.includes(_paths.tdi)) this.transferPatterns.push(`${_paths.tdi}/src/database/create*.sql`); // create(_exists).sql is called by tdi install
109
111
 
110
112
  // add time parameters to all xopus requests to overcome cache
111
113
  const ts = new Date().toISOString().replace(/-|:/g, '').substring(0,15);
@@ -1,23 +1,22 @@
1
- const fs = require('fs-extra');
2
- const globby = require('globby');
3
- const gulp = require('gulp');
4
- const {NodeSSH} = require('node-ssh');
5
- const through2 = require('through2');
6
- const {spawnSync} = require('child_process');
7
-
8
- const g_babel_env = require('babel-preset-es2015-without-strict');
9
- const g_babel = require('gulp-babel');
10
- const g_eol = require('gulp-eol');
11
- const g_filter = require('gulp-filter');
12
- const g_plumber = require('gulp-plumber');
13
- const g_print = require('gulp-print');
14
- const g_replace = require('../../lib/gulp-batch-replace-with-filter');
15
- const g_resolveIncl = require('../../lib/gulp-resolve-includes');
16
- const g_sass = require('gulp-sass')(require('sass'));
17
- const g_scp = require('../../lib/gulp-scp2-async');
18
- const g_sourcemaps = require('gulp-sourcemaps');
19
-
20
- const execGitCommand = require('../../lib/exec-git-command');
1
+ const execGitCommand = require('../../lib/exec-git-command');
2
+ const fs = require('fs-extra');
3
+ const globby = require('globby');
4
+ const gulp = require('gulp');
5
+ const {NodeSSH} = require('node-ssh');
6
+ const through2 = require('through2');
7
+ const {spawnSync} = require('child_process');
8
+
9
+ const g_babel = require('gulp-babel')({presets: [[require('@babel/preset-env'), {modules: false}]], comments: false, minified: true});
10
+ const g_eol = require('gulp-eol');
11
+ const g_filter = require('gulp-filter');
12
+ const g_plumber = require('gulp-plumber');
13
+ const g_print = require('gulp-print');
14
+ const g_replace = require('../../lib/gulp-batch-replace-with-filter');
15
+ const g_resolveIncl = require('../../lib/gulp-resolve-includes');
16
+ const g_sass = require('gulp-sass')(require('sass'));
17
+ const g_sftp = require('../../lib/gulp-sftp');
18
+ const g_sourcemaps = require('gulp-sourcemaps');
19
+
21
20
 
22
21
  const c = require('./config');
23
22
  const s = require('./srcset');
@@ -123,26 +122,27 @@ const transfer = (paths, lrServer) => {
123
122
  const sassF = g_filter(['**/*.scss'], {restore: true});
124
123
  const jsF = g_filter(['**/*.js', '!**/hce/**/template_*', '!**/fonto/**', '!**/vendor/**', '!**/*.min.js'], {restore: true});
125
124
  const shF = g_filter(['**/*.sh'], {restore: true});
126
- const ftpC = Object.assign({dest: c.server.remotedir}, c.server.ftpConfig); // new obj because of async transfers
127
125
  const srcset = s.create(paths);
128
126
  const files = [];
129
127
 
130
- const skippedFontoBuildFiles = [];
128
+ const fontoPaths = {outdated: [], uptodate: []};
131
129
  const removeOutdatedFontoBuild = g_filter(file => { // fonto build files must be newer than the last commit date containing fonto changes
132
130
  const prPath = 'config\\'+file.relative.replace(/fonto.+$/, '');
133
- if (skippedFontoBuildFiles.includes(prPath)) return false;
131
+ if (fontoPaths.outdated.includes(prPath)) return false;
132
+ if (fontoPaths.uptodate.includes(prPath)) return true;
134
133
  if (file.relative.match(/fonto[\\\/]dist/)) {
135
134
  const paths = `${prPath}\\fonto ${prPath}\\schema ${_paths.tdi}`; // only check paths containing fonto sources
136
135
  const lastFontoCommitDate = execGitCommand(`log -1 --format=%cd --date=iso-strict origin/${_commit.branch} ${paths}`, _paths.repo);
137
136
  if (fs.statSync(file.relative).mtime < lastFontoCommitDate) {
138
- skippedFontoBuildFiles.push(prPath);
137
+ fontoPaths.outdated.push(prPath);
139
138
  return false;
140
139
  }
140
+ else fontoPaths.uptodate.push(prPath);
141
141
  }
142
142
  return true;
143
143
  });
144
144
 
145
- // disable file logging for ftp (c.ftpConfig.watch function does it already)
145
+ // disable file logging for ftp (gulp-sftp does it already)
146
146
  g_print.setLogFunction((filepath) => c.localTransfer ? _write(filepath.nostyle) : null);
147
147
 
148
148
  _info('Start transferring', true);
@@ -155,7 +155,7 @@ const transfer = (paths, lrServer) => {
155
155
  .pipe(xpsF.restore)
156
156
  .pipe(jsF)
157
157
  .pipe(g_sourcemaps.init())
158
- .pipe(g_babel({presets: [g_babel_env], comments: false, minified: true}))
158
+ .pipe(g_babel)
159
159
  .pipe(g_sourcemaps.write('.'))
160
160
  .pipe(jsF.restore)
161
161
  .pipe(sassF)
@@ -169,15 +169,16 @@ const transfer = (paths, lrServer) => {
169
169
  .pipe(g_plumber.stop())
170
170
  .pipe(g_replace(c.replaceStrings))
171
171
  .pipe(through2.obj((file, enc, cb) => {
172
- file.path = file.path.replace(/(fonto)(?:[\\\/]dist|.+([\\\/]assets[\\\/]schemas))/, '$1$2'); // change destination path for fonto build and schemas
172
+ file.originalRelativePath = file.relative; // original path needed for sftp.fastPut
173
+ file.path = file.path.replace(/(fonto)[\\\/](dev|dist)/, '$1'); // change destination path for fonto build
173
174
  if (!file.relative.endsWith('.map')) files.push(file.relative); // collect all file paths in array for livereload
174
175
  cb(null, file);
175
176
  }))
176
177
  .pipe(g_print.default())
177
- .pipe(c.localTransfer ? gulp.dest(c.server.remotedir) : g_scp(ftpC))
178
+ .pipe(c.localTransfer ? gulp.dest(c.server.remotedir) : g_sftp(c.server.ftpConfig, c.server.remotedir))
178
179
  .on('end', () => {
179
180
  _info('Finished transferring\n', true);
180
- if (skippedFontoBuildFiles[0]) _warn(`Fonto build files in the following folders were outdated and therefore skipped:\n ${skippedFontoBuildFiles.map(v => v.slice(0, -1)).join('\n ')}`);
181
+ if (fontoPaths.outdated[0]) _warn(`Fonto build files in the following folders were outdated and therefore skipped:\n ${fontoPaths.outdated.map(v => v.slice(0, -1)).join('\n ')}`);
181
182
 
182
183
  remote.process();
183
184
 
@@ -1,7 +1,8 @@
1
- const del = require('del');
2
- const gulp = require('gulp');
3
- const path = require('path');
4
- const tinylr = require('tiny-lr');
1
+ const del = require('del');
2
+ const gulp = require('gulp');
3
+ const path = require('path');
4
+ const sftpClient = require('ssh2-sftp-client');
5
+ const tinylr = require('tiny-lr');
5
6
 
6
7
  const {remote, transfer} = require('./execute');
7
8
  const c = require('./config');
@@ -16,7 +17,7 @@ module.exports = function deploy (argv) {
16
17
  c.prepareForCopy(argv.filter);
17
18
 
18
19
  if (!c.localTransfer) {
19
- c.server.ftpConfig.eventWrite = (file) => {
20
+ c.server.ftpConfig.eventPut = (file) => {
20
21
  _write(file.destination.replace(c.server.remotedir, ''));
21
22
  if (path.extname(file.destination)=='.sh')
22
23
  remote.add('chmod 755 '+file.destination, 'Permissions set: '+file.destination);
@@ -60,12 +61,18 @@ module.exports = function deploy (argv) {
60
61
  }
61
62
  };
62
63
 
63
- // for fonto, only watch packages (for schemas) and dist (for the build), but not dist/assets (causes build error / does not detect all files)
64
- gulp.watch([...c.transferPatterns, '!**/fonto/{*.*, !({dist,packages})/**, dist/*/**}'])
64
+ // check connection
65
+ if (!c.localTransfer) {
66
+ const sftp = new sftpClient();
67
+ sftp.connect(c.server.ftpConfig).then(() => sftp.end()).catch(err => _error(`Could not connect to server${err ? ': '+err.message : ''}`));
68
+ }
69
+
70
+ // for fonto, only watch dev/dist (for the build, does not detect "assets" most of the times btw)
71
+ gulp.watch([...c.transferPatterns, `!**/fonto/!(${c.envDev?'dev|':''}dist)/**`])
65
72
  .on('all', (event, filepath) => {
66
73
 
67
74
  if ((event=='add' || event=='change') &&
68
- (/fonto(.dist|.+schemas.+\.json)/.test(filepath) || !/fonto/.test(filepath)) // within fonto, only transfer build and schemas
75
+ (/fonto.(dev|dist)/.test(filepath) || !/fonto/.test(filepath)) // within fonto, only transfer build files
69
76
  ) {
70
77
  transfers.add(filepath);
71
78
  }
@@ -43,7 +43,7 @@ const cmdExec = command => new Promise((resolve, reject) => {
43
43
 
44
44
  cp.stdout.setEncoding('utf8');
45
45
  cp.stdout.on('data', data => {
46
- if (data.includes('Error') || data.includes('Input error')) reject(data);
46
+ if (/Error|Input error/.test(data)) _warn(data);
47
47
  else log(data);
48
48
  });
49
49
  cp.stderr.setEncoding('utf8');
@@ -203,7 +203,7 @@ module.exports = {
203
203
  fs.removeSync('dist/assets');
204
204
  _write('Done.\n');
205
205
  })
206
- .then(() => cmdExec('fdt editor build -M'))
206
+ .then(() => cmdExec('fdt editor build'))
207
207
  ;
208
208
  },
209
209
 
@@ -226,7 +226,7 @@ module.exports = {
226
226
  });
227
227
  _write('Done.\n');
228
228
  })
229
- .then(() => cmdExec('fdt editor run'))
229
+ .then(() => cmdExec('fdt editor run --write-to-disk'))
230
230
  ;
231
231
  }
232
232
 
@@ -6,18 +6,20 @@ const path = require('path');
6
6
 
7
7
  module.exports = function fonto (argv) {
8
8
 
9
+ if (!_tdiSubmoduleExists) _error('TDI submodule folder is missing.');
10
+
9
11
  process.chdir(_paths.apply || '.');
10
-
12
+
11
13
  // find fonto instances by searching for manifest.json files having a fonto property
12
14
  const fontoPaths = globby.sync(['**/manifest.json', `!${_paths.tdi}/**`])
13
15
  .filter(p => (fs.readJsonSync(p, {throws: false}) || {}).edition == 'fontoxml-platform-base')
14
16
  .map(p => p.replace('manifest.json', ''))
15
17
  ;
16
-
18
+
17
19
  if (fontoPaths.length==0) _error('No Fonto instance found.');
18
-
20
+
19
21
  let promiseChain = Promise.resolve(); // for sequentially executing commands for each fonto instance
20
-
22
+
21
23
  fontoPaths.forEach((p, i) => {
22
24
  promiseChain = promiseChain
23
25
  .then(() => {
@@ -40,5 +42,5 @@ module.exports = function fonto (argv) {
40
42
  .catch(error => {
41
43
  if (error) _warn(error.message || error);
42
44
  });
43
-
45
+
44
46
  };
@@ -2,10 +2,10 @@ const fs = require('fs-extra');
2
2
  const globby = require('globby');
3
3
  const minimatch = require('minimatch');
4
4
  const path = require('path');
5
- const rif = require('replace-in-file');
5
+ const rif = require('replace-in-file');
6
6
 
7
7
 
8
- const getPaths = (search, filter) =>
8
+ const getPaths = (search, filter) =>
9
9
  globby
10
10
  .sync(search, {dot: true, ignore: [_paths.tdi + '/**','**/cmscustom/tdi/**']})
11
11
  .filter(p => !filter || minimatch(p, filter)
@@ -47,9 +47,11 @@ module.exports = function steps (step, dry, filter) {
47
47
  r.to = r.fromtoPairs.map(p => p[1]);
48
48
  }
49
49
 
50
- r.files = rif.sync(r); // execute first time
51
- const filesModCount = r.files.length; // save file count
52
- for (let i=0; i<20 && !dry && r.files[0]; i++) r.files = rif.sync(r); // execute repeatedly for modified files only (with safety limit of 20)
50
+ let filesModCount;
51
+ for (let i=0; i<20 && !dry && r.files[0]; i++) { // execute repeatedly for modified files only (with safety limit of 20)
52
+ r.files = rif.sync(r).filter(f => f.hasChanged).map(f => f.file);
53
+ if (i==0) filesModCount = r.files.length; // save count only after first run (after this only subsets are processed)
54
+ }
53
55
 
54
56
  _write('Files modified:', filesModCount);
55
57
  });
@@ -1,95 +0,0 @@
1
- // based on gulp-scp2
2
-
3
- // core
4
- const path = require('path');
5
- // other
6
- const through2 = require('through2'), Client = require('scp2').Client;
7
-
8
- const fixWinPath = (str) => str.replace(/\\/g, '/');
9
-
10
- module.exports = function(options) {
11
- const files = [];
12
-
13
- options.watch = function(client){ // set file transfer watch function
14
- client
15
- .on('write', options.eventWrite)
16
- .on('error', err => {})
17
- ;
18
- };
19
-
20
- const stream = through2.obj(
21
- function transform(file, enc, callback) {
22
- if (file.isStream()) return callback(new Error('Streaming not supported.'));
23
- if (file.stat.isDirectory()) return callback();
24
-
25
- files.push(file); // collect all files in array
26
-
27
- return callback();
28
- },
29
- function flush(callback) { // on callback start transfering all files in parallel threads
30
- if (!files[0]) callback();
31
-
32
- let transferring = 0, transferred = 0;
33
-
34
- function uploadFile(client) {
35
- const file = files.shift();
36
-
37
- if (file) {
38
- const filepath = fixWinPath(path.join(options.dest, file.relative));
39
- transferring++;
40
-
41
- client.mkdir(path.dirname(filepath), (err) => { // try creating dir
42
- if (err) {
43
- const fail = err.message=='Permission denied';
44
- transferring--;
45
- if (!fail) files.unshift(file); // add file back to queue
46
- _warn((fail ? `Fail: ${err.message}` : `Server aborted connection. Retrying.`));
47
- uploadFile(client); // retry
48
- }
49
- else {
50
- client.write({ // upload file
51
- destination: filepath,
52
- content: file.contents
53
- }, (err) => {
54
- transferring--;
55
- transferred++;
56
- uploadFile(client);
57
- });
58
- }
59
- });
60
-
61
- }
62
- else {
63
- client.close();
64
- if (transferring==0) {
65
- transferring--;
66
- _info(`${transferred} file(s) transferred`);
67
- return callback();
68
- }
69
- }
70
- }
71
-
72
- function uploadThread() {
73
- const client = new Client(options);
74
- options.watch(client);
75
- client.sftp(err => {
76
- if (err) {
77
- if (err.toString().toLowerCase().includes('timed out')) _warn(`Connection timed out. Retrying.`);
78
- else if (err.code=='ECONNRESET' || err.code=='ECONNABORTED') _warn(`Server aborted connection. Retrying.`);
79
- else if (err.message=='All configured authentication methods failed') _error('Authentication failed. Check if SSH key is loaded.');
80
- else _warn(err.toString());
81
-
82
- uploadThread(); // retry
83
- }
84
- else uploadFile(client);
85
- });
86
- }
87
-
88
- const parallel = Math.min(Math.round(files.length / 2), options.parallel);
89
- for (let i = 0; i < parallel; i++) setTimeout(uploadThread, 100 * i);
90
- }
91
- );
92
-
93
- stream.resume(); // required for triggering 'end' event
94
- return stream;
95
- };