@tangelo/tangelo-configuration-toolkit 1.8.0-beta.3 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -62,7 +62,7 @@ global._packages = {
62
62
  TCT: {name: '@tangelo/tangelo-configuration-toolkit', version: require('./package.json')?.version},
63
63
  FDT: {name: '@fontoxml/fontoxml-development-tools'}
64
64
  }
65
- _packages.FDT.version = require(`${getPath(_packages.FDT.name)}/package.json`)?.version;
65
+ _packages.FDT.version = fs.readJsonSync(`${getPath(_packages.FDT.name)}/package.json`, {throws: false})?.version;
66
66
 
67
67
  try { global._appconfig = _paths.appconfig && fs.readJsonSync(_paths.appconfig) || {}; }
68
68
  catch({message}) { _error('Error in '+message); }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tangelo/tangelo-configuration-toolkit",
3
- "version": "1.8.0-beta.3",
3
+ "version": "1.9.0",
4
4
  "description": "Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.",
5
5
  "bin": {
6
6
  "tct": "bin/index.js",
@@ -34,17 +34,18 @@
34
34
  "gulp-plumber": "^1.2.1",
35
35
  "gulp-print": "^5.0.2",
36
36
  "gulp-sass": "^5.0.0",
37
- "gulp-simple-rename": "^0.1.3",
38
37
  "gulp-sourcemaps": "^3.0.0",
39
38
  "inquirer": "^8.2.0",
40
39
  "istextorbinary": "^6.0.0",
41
40
  "minimatch": "^3.0.4",
42
41
  "node-ssh": "^12.0.2",
43
42
  "object-assign-deep": "^0.4.0",
44
- "replace-in-file": "^6.0.0",
45
- "sass": "^1.43.4",
43
+ "p-limit": "^3.1.0",
44
+ "replace-in-file": "^3.4.4",
45
+ "sass": "^1.43.5",
46
46
  "saxon-js": "^2.3.0",
47
- "scp2": "^0.5.0",
47
+ "ssh2-sftp-client": "^7.2.2",
48
+ "through2": "^4.0.2",
48
49
  "tiny-lr": "^2.0.0",
49
50
  "yargs": "^16.2.0"
50
51
  },
package/src/cli.js CHANGED
@@ -10,7 +10,8 @@ const checkForPackageUpdate = (package) => (
10
10
  exec(`npm view -g ${_packages[package].name} version`)
11
11
  .then(r => {
12
12
  const versionAvailable = r.stdout.match(/([\d/.]+)/)[1];
13
- if (compare(_packages[package].version, versionAvailable, '<')) {
13
+ if (!_packages[package].version) _warn(`${package} is not installed! Run ` + `npm i -g ${_packages[package].name}`.white);
14
+ else if (compare(_packages[package].version, versionAvailable, '<')) {
14
15
  updateAppdata({[`updateCheck${package}`]: {executed: new Date(), versionAvailable}});
15
16
  return versionAvailable;
16
17
  }
@@ -0,0 +1,61 @@
1
+ const path = require('path');
2
+ const pLimit = require('p-limit');
3
+ const sftpClient = require('ssh2-sftp-client');
4
+ const through2 = require('through2');
5
+
6
+
7
+ module.exports = function(ftpConfig, remotedir) {
8
+ const sftp = new sftpClient();
9
+ const serial = pLimit(1);
10
+ const parallel = pLimit(ftpConfig.parallel);
11
+ const files = [];
12
+
13
+ return through2.obj(
14
+
15
+ function transform (file, _, cb) {
16
+ if (file.isStream()) return cb(new Error('Streaming not supported.'));
17
+ if (file.stat.isDirectory()) return cb();
18
+
19
+ file.destination = path.join(remotedir, file.relative).toFws();
20
+ files.push(file); // collect all files in array
21
+ return cb();
22
+ },
23
+
24
+ function flush(cb) {
25
+ if (!files[0]) cb();
26
+
27
+ const paths = [...new Set(files.map(({destination}) => path.dirname(destination)))] // collect unique paths
28
+ .filter((p1, i, a) => !a.find(p2 => p1 != p2 && p2.includes(p1))) // remove paths being part of others
29
+ ;
30
+
31
+ sftp.connect(ftpConfig)
32
+ .then(() => Promise.all(paths.map(p => parallel( // check if all directories exist
33
+ () => sftp.exists(p).then(r => {
34
+ if (!!r) paths.splice(paths.indexOf(p), 1) // if exists, remove from paths
35
+ })
36
+ ))))
37
+ .then(() => Promise.all(paths.map(p => serial( // create directories that do not exist, which cannot be done parallel
38
+ () => {
39
+ _write(p.replace(remotedir, '').lblack);
40
+ return sftp.mkdir(p, true).catch(err => _warn(err));
41
+ }
42
+ ))))
43
+ .then(() => Promise.all(files.map(file => parallel( // upload files
44
+ () => {
45
+ ftpConfig.eventPut(file);
46
+ return ( // use fastPut for files larger than 3mb; since it requires a local path we assume these files are not changed in the gulp pipeline
47
+ file.contents.length > 3000000 ? sftp.fastPut(file.originalRelativePath, file.destination) : sftp.put(file.contents, file.destination)
48
+ ).catch(err => _warn(`File transfer failed${err ? ': '+err : ''}`));
49
+ }
50
+ ))))
51
+ .then(() => {
52
+ _info(`${files.length} file(s) transferred`);
53
+ cb();
54
+ return sftp.end();
55
+ })
56
+ .catch(err => _error(err));
57
+ }
58
+
59
+ )
60
+ .resume(); // required for triggering 'end' event
61
+ };
@@ -0,0 +1,11 @@
1
+ // replacement for the official (but deprecated) gulp-simple-rename
2
+ // kept same name because of usage in tdi
3
+
4
+ const through2 = require('through2');
5
+
6
+ module.exports = (fn) => {
7
+ return through2.obj((file, enc, cb) => {
8
+ file.path = fn(file.path);
9
+ cb(null, file);
10
+ });
11
+ };
@@ -1,25 +1,23 @@
1
- const fs = require('fs-extra');
2
- const globby = require('globby');
3
- const gulp = require('gulp');
4
- const {NodeSSH} = require('node-ssh');
5
- const path = require('path');
6
- const through2 = require('through2');
7
- const {spawnSync} = require('child_process');
8
-
9
- const g_babel = require('gulp-babel');
10
- const g_babel_env = require('babel-preset-es2015-without-strict');
11
- const g_eol = require('gulp-eol');
12
- const g_filter = require('gulp-filter');
13
- const g_plumber = require('gulp-plumber');
14
- const g_print = require('gulp-print');
15
- const g_rename = require('gulp-simple-rename');
16
- const g_replace = require('../../lib/gulp-batch-replace-with-filter');
17
- const g_resolveIncl = require('../../lib/gulp-resolve-includes');
18
- const g_sass = require('gulp-sass')(require('sass'));
19
- const g_scp = require('../../lib/gulp-scp2-async');
20
- const g_sourcemaps = require('gulp-sourcemaps');
21
-
22
- const execGitCommand = require('../../lib/exec-git-command');
1
+ const execGitCommand = require('../../lib/exec-git-command');
2
+ const fs = require('fs-extra');
3
+ const globby = require('globby');
4
+ const gulp = require('gulp');
5
+ const {NodeSSH} = require('node-ssh');
6
+ const through2 = require('through2');
7
+ const {spawnSync} = require('child_process');
8
+
9
+ const g_babel_env = require('babel-preset-es2015-without-strict');
10
+ const g_babel = require('gulp-babel');
11
+ const g_eol = require('gulp-eol');
12
+ const g_filter = require('gulp-filter');
13
+ const g_plumber = require('gulp-plumber');
14
+ const g_print = require('gulp-print');
15
+ const g_replace = require('../../lib/gulp-batch-replace-with-filter');
16
+ const g_resolveIncl = require('../../lib/gulp-resolve-includes');
17
+ const g_sass = require('gulp-sass')(require('sass'));
18
+ const g_sftp = require('../../lib/gulp-sftp');
19
+ const g_sourcemaps = require('gulp-sourcemaps');
20
+
23
21
 
24
22
  const c = require('./config');
25
23
  const s = require('./srcset');
@@ -125,26 +123,27 @@ const transfer = (paths, lrServer) => {
125
123
  const sassF = g_filter(['**/*.scss'], {restore: true});
126
124
  const jsF = g_filter(['**/*.js', '!**/hce/**/template_*', '!**/fonto/**', '!**/vendor/**', '!**/*.min.js'], {restore: true});
127
125
  const shF = g_filter(['**/*.sh'], {restore: true});
128
- const ftpC = Object.assign({dest: c.server.remotedir}, c.server.ftpConfig); // new obj because of async transfers
129
126
  const srcset = s.create(paths);
130
127
  const files = [];
131
128
 
132
- const skippedFontoBuildFiles = [];
129
+ const fontoPaths = {outdated: [], uptodate: []};
133
130
  const removeOutdatedFontoBuild = g_filter(file => { // fonto build files must be newer than the last commit date containing fonto changes
134
131
  const prPath = 'config\\'+file.relative.replace(/fonto.+$/, '');
135
- if (skippedFontoBuildFiles.includes(prPath)) return false;
132
+ if (fontoPaths.outdated.includes(prPath)) return false;
133
+ if (fontoPaths.uptodate.includes(prPath)) return true;
136
134
  if (file.relative.match(/fonto[\\\/]dist/)) {
137
135
  const paths = `${prPath}\\fonto ${prPath}\\schema ${_paths.tdi}`; // only check paths containing fonto sources
138
136
  const lastFontoCommitDate = execGitCommand(`log -1 --format=%cd --date=iso-strict origin/${_commit.branch} ${paths}`, _paths.repo);
139
137
  if (fs.statSync(file.relative).mtime < lastFontoCommitDate) {
140
- skippedFontoBuildFiles.push(prPath);
138
+ fontoPaths.outdated.push(prPath);
141
139
  return false;
142
140
  }
141
+ else fontoPaths.uptodate.push(prPath);
143
142
  }
144
143
  return true;
145
144
  });
146
145
 
147
- // disable file logging for ftp (c.ftpConfig.watch function does it already)
146
+ // disable file logging for ftp (gulp-sftp does it already)
148
147
  g_print.setLogFunction((filepath) => c.localTransfer ? _write(filepath.nostyle) : null);
149
148
 
150
149
  _info('Start transferring', true);
@@ -157,7 +156,7 @@ const transfer = (paths, lrServer) => {
157
156
  .pipe(xpsF.restore)
158
157
  .pipe(jsF)
159
158
  .pipe(g_sourcemaps.init())
160
- .pipe(g_babel({ presets: [g_babel_env], comments: false, minified: true }))
159
+ .pipe(g_babel({presets: [g_babel_env], comments: false, minified: true}))
161
160
  .pipe(g_sourcemaps.write('.'))
162
161
  .pipe(jsF.restore)
163
162
  .pipe(sassF)
@@ -170,16 +169,17 @@ const transfer = (paths, lrServer) => {
170
169
  .pipe(shF.restore)
171
170
  .pipe(g_plumber.stop())
172
171
  .pipe(g_replace(c.replaceStrings))
173
- .pipe(g_rename(path => path.replace(/fonto[\\\/]dist/, 'fonto').replace(/(fonto[\\\/]).+(assets[\\\/]schemas)/, '$1$2'))) // change destination path for fonto build and schemas
174
172
  .pipe(through2.obj((file, enc, cb) => {
173
+ file.originalRelativePath = file.relative; // original path needed for sftp.fastPut
174
+ file.path = file.path.replace(/(fonto)[\\\/](dev|dist)/, '$1'); // change destination path for fonto build
175
175
  if (!file.relative.endsWith('.map')) files.push(file.relative); // collect all file paths in array for livereload
176
176
  cb(null, file);
177
177
  }))
178
178
  .pipe(g_print.default())
179
- .pipe(c.localTransfer ? gulp.dest(c.server.remotedir) : g_scp(ftpC))
179
+ .pipe(c.localTransfer ? gulp.dest(c.server.remotedir) : g_sftp(c.server.ftpConfig, c.server.remotedir))
180
180
  .on('end', () => {
181
181
  _info('Finished transferring\n', true);
182
- if (skippedFontoBuildFiles[0]) _warn(`Fonto build files in the following folders were outdated and therefore skipped:\n ${skippedFontoBuildFiles.map(v => v.slice(0, -1)).join('\n ')}`);
182
+ if (fontoPaths.outdated[0]) _warn(`Fonto build files in the following folders were outdated and therefore skipped:\n ${fontoPaths.outdated.map(v => v.slice(0, -1)).join('\n ')}`);
183
183
 
184
184
  remote.process();
185
185
 
@@ -16,7 +16,7 @@ module.exports = function deploy (argv) {
16
16
  c.prepareForCopy(argv.filter);
17
17
 
18
18
  if (!c.localTransfer) {
19
- c.server.ftpConfig.eventWrite = (file) => {
19
+ c.server.ftpConfig.eventPut = (file) => {
20
20
  _write(file.destination.replace(c.server.remotedir, ''));
21
21
  if (path.extname(file.destination)=='.sh')
22
22
  remote.add('chmod 755 '+file.destination, 'Permissions set: '+file.destination);
@@ -60,12 +60,12 @@ module.exports = function deploy (argv) {
60
60
  }
61
61
  };
62
62
 
63
- // for fonto, only watch packages (for schemas) and dist (for the build), but not dist/assets (causes build error / does not detect all files)
64
- gulp.watch([...c.transferPatterns, '!**/fonto/{*.*, !({dist,packages})/**, dist/*/**}'])
63
+ // for fonto, only watch dev/dist (for the build, does not detect "assets" most of the times btw)
64
+ gulp.watch([...c.transferPatterns, `!**/fonto/!(${c.envDev?'dev|':''}dist)/**`])
65
65
  .on('all', (event, filepath) => {
66
66
 
67
67
  if ((event=='add' || event=='change') &&
68
- (/fonto(.dist|.+schemas.+\.json)/.test(filepath) || !/fonto/.test(filepath)) // within fonto, only transfer build and schemas
68
+ (/fonto.(dev|dist)/.test(filepath) || !/fonto/.test(filepath)) // within fonto, only transfer build files
69
69
  ) {
70
70
  transfers.add(filepath);
71
71
  }
@@ -43,7 +43,7 @@ const cmdExec = command => new Promise((resolve, reject) => {
43
43
 
44
44
  cp.stdout.setEncoding('utf8');
45
45
  cp.stdout.on('data', data => {
46
- if (data.includes('Error') || data.includes('Input error')) reject(data);
46
+ if (/Error|Input error/.test(data)) _warn(data);
47
47
  else log(data);
48
48
  });
49
49
  cp.stderr.setEncoding('utf8');
@@ -203,7 +203,7 @@ module.exports = {
203
203
  fs.removeSync('dist/assets');
204
204
  _write('Done.\n');
205
205
  })
206
- .then(() => cmdExec('fdt editor build -M'))
206
+ .then(() => cmdExec('fdt editor build'))
207
207
  ;
208
208
  },
209
209
 
@@ -226,7 +226,7 @@ module.exports = {
226
226
  });
227
227
  _write('Done.\n');
228
228
  })
229
- .then(() => cmdExec('fdt editor run'))
229
+ .then(() => cmdExec('fdt editor run --write-to-disk'))
230
230
  ;
231
231
  }
232
232
 
@@ -37,9 +37,12 @@ module.exports = function git (argv) {
37
37
 
38
38
  if (argv.hasOwnProperty('init')) {
39
39
  const remoteTdiUrl = `${_paths.gitremote}/${_paths.tdi}.git`;
40
- const branches =
41
- execSync('git ls-remote --heads ' + remoteTdiUrl)
42
- .toString()
40
+ let branches;
41
+
42
+ try { branches = execSync('git ls-remote --heads ' + remoteTdiUrl, {encoding: 'UTF-8'}) }
43
+ catch (e) { _error(' '); } // execSync already prints an error to the console
44
+
45
+ branches = branches
43
46
  .match(/refs\/heads\/\S+/g)
44
47
  .map(s => s.replace('refs/heads/', ''))
45
48
  .sort((a, b) => {
@@ -5,7 +5,6 @@ const path = require('path');
5
5
  const rif = require('replace-in-file');
6
6
 
7
7
 
8
- const rifSync = (options) => rif.sync(options).filter(result => result.hasChanged).map(result => result.file);
9
8
  const getPaths = (search, filter) =>
10
9
  globby
11
10
  .sync(search, {dot: true, ignore: [_paths.tdi + '/**','**/cmscustom/tdi/**']})
@@ -48,9 +47,9 @@ module.exports = function steps (step, dry, filter) {
48
47
  r.to = r.fromtoPairs.map(p => p[1]);
49
48
  }
50
49
 
51
- r.files = rifSync(r); // execute first time
50
+ r.files = rif.sync(r); // execute first time
52
51
  const filesModCount = r.files.length; // save file count
53
- for (let i=0; i<20 && !dry && r.files[0]; i++) r.files = rifSync(r); // execute repeatedly for modified files only (with safety limit of 20)
52
+ for (let i=0; i<20 && !dry && r.files[0]; i++) r.files = rif.sync(r); // execute repeatedly for modified files only (with safety limit of 20)
54
53
 
55
54
  _write('Files modified:', filesModCount);
56
55
  });
@@ -1,95 +0,0 @@
1
- // based on gulp-scp2
2
-
3
- // core
4
- const path = require('path');
5
- // other
6
- const through2 = require('through2'), Client = require('scp2').Client;
7
-
8
- const fixWinPath = (str) => str.replace(/\\/g, '/');
9
-
10
- module.exports = function(options) {
11
- const files = [];
12
-
13
- options.watch = function(client){ // set file transfer watch function
14
- client
15
- .on('write', options.eventWrite)
16
- .on('error', err => {})
17
- ;
18
- };
19
-
20
- const stream = through2.obj(
21
- function transform(file, enc, callback) {
22
- if (file.isStream()) return callback(new Error('Streaming not supported.'));
23
- if (file.stat.isDirectory()) return callback();
24
-
25
- files.push(file); // collect all files in array
26
-
27
- return callback();
28
- },
29
- function flush(callback) { // on callback start transfering all files in parallel threads
30
- if (!files[0]) callback();
31
-
32
- let transferring = 0, transferred = 0;
33
-
34
- function uploadFile(client) {
35
- const file = files.shift();
36
-
37
- if (file) {
38
- const filepath = fixWinPath(path.join(options.dest, file.relative));
39
- transferring++;
40
-
41
- client.mkdir(path.dirname(filepath), (err) => { // try creating dir
42
- if (err) {
43
- const fail = err.message=='Permission denied';
44
- transferring--;
45
- if (!fail) files.unshift(file); // add file back to queue
46
- _warn((fail ? `Fail: ${err.message}` : `Server aborted connection. Retrying.`));
47
- uploadFile(client); // retry
48
- }
49
- else {
50
- client.write({ // upload file
51
- destination: filepath,
52
- content: file.contents
53
- }, (err) => {
54
- transferring--;
55
- transferred++;
56
- uploadFile(client);
57
- });
58
- }
59
- });
60
-
61
- }
62
- else {
63
- client.close();
64
- if (transferring==0) {
65
- transferring--;
66
- _info(`${transferred} file(s) transferred`);
67
- return callback();
68
- }
69
- }
70
- }
71
-
72
- function uploadThread() {
73
- const client = new Client(options);
74
- options.watch(client);
75
- client.sftp(err => {
76
- if (err) {
77
- if (err.toString().toLowerCase().includes('timed out')) _warn(`Connection timed out. Retrying.`);
78
- else if (err.code=='ECONNRESET' || err.code=='ECONNABORTED') _warn(`Server aborted connection. Retrying.`);
79
- else if (err.message=='All configured authentication methods failed') _error('Authentication failed. Check if SSH key is loaded.');
80
- else _warn(err.toString());
81
-
82
- uploadThread(); // retry
83
- }
84
- else uploadFile(client);
85
- });
86
- }
87
-
88
- const parallel = Math.min(Math.round(files.length / 2), options.parallel);
89
- for (let i = 0; i < parallel; i++) setTimeout(uploadThread, 100 * i);
90
- }
91
- );
92
-
93
- stream.resume(); // required for triggering 'end' event
94
- return stream;
95
- };