@tangelo/tangelo-configuration-toolkit 1.8.3 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tangelo/tangelo-configuration-toolkit",
3
- "version": "1.8.3",
3
+ "version": "1.9.0",
4
4
  "description": "Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.",
5
5
  "bin": {
6
6
  "tct": "bin/index.js",
@@ -40,10 +40,11 @@
40
40
  "minimatch": "^3.0.4",
41
41
  "node-ssh": "^12.0.2",
42
42
  "object-assign-deep": "^0.4.0",
43
+ "p-limit": "^3.1.0",
43
44
  "replace-in-file": "^3.4.4",
44
45
  "sass": "^1.43.5",
45
46
  "saxon-js": "^2.3.0",
46
- "scp2": "^0.5.0",
47
+ "ssh2-sftp-client": "^7.2.2",
47
48
  "through2": "^4.0.2",
48
49
  "tiny-lr": "^2.0.0",
49
50
  "yargs": "^16.2.0"
@@ -0,0 +1,61 @@
1
+ const path = require('path');
2
+ const pLimit = require('p-limit');
3
+ const sftpClient = require('ssh2-sftp-client');
4
+ const through2 = require('through2');
5
+
6
+
7
+ module.exports = function(ftpConfig, remotedir) {
8
+ const sftp = new sftpClient();
9
+ const serial = pLimit(1);
10
+ const parallel = pLimit(ftpConfig.parallel);
11
+ const files = [];
12
+
13
+ return through2.obj(
14
+
15
+ function transform (file, _, cb) {
16
+ if (file.isStream()) return cb(new Error('Streaming not supported.'));
17
+ if (file.stat.isDirectory()) return cb();
18
+
19
+ file.destination = path.join(remotedir, file.relative).toFws();
20
+ files.push(file); // collect all files in array
21
+ return cb();
22
+ },
23
+
24
+ function flush(cb) {
25
+ if (!files[0]) cb();
26
+
27
+ const paths = [...new Set(files.map(({destination}) => path.dirname(destination)))] // collect unique paths
28
+ .filter((p1, i, a) => !a.find(p2 => p1 != p2 && p2.includes(p1))) // remove paths being part of others
29
+ ;
30
+
31
+ sftp.connect(ftpConfig)
32
+ .then(() => Promise.all(paths.map(p => parallel( // check if all directories exist
33
+ () => sftp.exists(p).then(r => {
34
+ if (!!r) paths.splice(paths.indexOf(p), 1) // if exists, remove from paths
35
+ })
36
+ ))))
37
+ .then(() => Promise.all(paths.map(p => serial( // create directories that do not exist, which cannot be done parallel
38
+ () => {
39
+ _write(p.replace(remotedir, '').lblack);
40
+ return sftp.mkdir(p, true).catch(err => _warn(err));
41
+ }
42
+ ))))
43
+ .then(() => Promise.all(files.map(file => parallel( // upload files
44
+ () => {
45
+ ftpConfig.eventPut(file);
46
+ return ( // use fastPut for files larger than 3mb; since it requires a local path we assume these files are not changed in the gulp pipeline
47
+ file.contents.length > 3000000 ? sftp.fastPut(file.originalRelativePath, file.destination) : sftp.put(file.contents, file.destination)
48
+ ).catch(err => _warn(`File transfer failed${err ? ': '+err : ''}`));
49
+ }
50
+ ))))
51
+ .then(() => {
52
+ _info(`${files.length} file(s) transferred`);
53
+ cb();
54
+ return sftp.end();
55
+ })
56
+ .catch(err => _error(err));
57
+ }
58
+
59
+ )
60
+ .resume(); // required for triggering 'end' event
61
+ };
@@ -1,23 +1,23 @@
1
- const fs = require('fs-extra');
2
- const globby = require('globby');
3
- const gulp = require('gulp');
4
- const {NodeSSH} = require('node-ssh');
5
- const through2 = require('through2');
6
- const {spawnSync} = require('child_process');
7
-
8
- const g_babel_env = require('babel-preset-es2015-without-strict');
9
- const g_babel = require('gulp-babel');
10
- const g_eol = require('gulp-eol');
11
- const g_filter = require('gulp-filter');
12
- const g_plumber = require('gulp-plumber');
13
- const g_print = require('gulp-print');
14
- const g_replace = require('../../lib/gulp-batch-replace-with-filter');
15
- const g_resolveIncl = require('../../lib/gulp-resolve-includes');
16
- const g_sass = require('gulp-sass')(require('sass'));
17
- const g_scp = require('../../lib/gulp-scp2-async');
18
- const g_sourcemaps = require('gulp-sourcemaps');
19
-
20
- const execGitCommand = require('../../lib/exec-git-command');
1
+ const execGitCommand = require('../../lib/exec-git-command');
2
+ const fs = require('fs-extra');
3
+ const globby = require('globby');
4
+ const gulp = require('gulp');
5
+ const {NodeSSH} = require('node-ssh');
6
+ const through2 = require('through2');
7
+ const {spawnSync} = require('child_process');
8
+
9
+ const g_babel_env = require('babel-preset-es2015-without-strict');
10
+ const g_babel = require('gulp-babel');
11
+ const g_eol = require('gulp-eol');
12
+ const g_filter = require('gulp-filter');
13
+ const g_plumber = require('gulp-plumber');
14
+ const g_print = require('gulp-print');
15
+ const g_replace = require('../../lib/gulp-batch-replace-with-filter');
16
+ const g_resolveIncl = require('../../lib/gulp-resolve-includes');
17
+ const g_sass = require('gulp-sass')(require('sass'));
18
+ const g_sftp = require('../../lib/gulp-sftp');
19
+ const g_sourcemaps = require('gulp-sourcemaps');
20
+
21
21
 
22
22
  const c = require('./config');
23
23
  const s = require('./srcset');
@@ -123,26 +123,27 @@ const transfer = (paths, lrServer) => {
123
123
  const sassF = g_filter(['**/*.scss'], {restore: true});
124
124
  const jsF = g_filter(['**/*.js', '!**/hce/**/template_*', '!**/fonto/**', '!**/vendor/**', '!**/*.min.js'], {restore: true});
125
125
  const shF = g_filter(['**/*.sh'], {restore: true});
126
- const ftpC = Object.assign({dest: c.server.remotedir}, c.server.ftpConfig); // new obj because of async transfers
127
126
  const srcset = s.create(paths);
128
127
  const files = [];
129
128
 
130
- const skippedFontoBuildFiles = [];
129
+ const fontoPaths = {outdated: [], uptodate: []};
131
130
  const removeOutdatedFontoBuild = g_filter(file => { // fonto build files must be newer than the last commit date containing fonto changes
132
131
  const prPath = 'config\\'+file.relative.replace(/fonto.+$/, '');
133
- if (skippedFontoBuildFiles.includes(prPath)) return false;
132
+ if (fontoPaths.outdated.includes(prPath)) return false;
133
+ if (fontoPaths.uptodate.includes(prPath)) return true;
134
134
  if (file.relative.match(/fonto[\\\/]dist/)) {
135
135
  const paths = `${prPath}\\fonto ${prPath}\\schema ${_paths.tdi}`; // only check paths containing fonto sources
136
136
  const lastFontoCommitDate = execGitCommand(`log -1 --format=%cd --date=iso-strict origin/${_commit.branch} ${paths}`, _paths.repo);
137
137
  if (fs.statSync(file.relative).mtime < lastFontoCommitDate) {
138
- skippedFontoBuildFiles.push(prPath);
138
+ fontoPaths.outdated.push(prPath);
139
139
  return false;
140
140
  }
141
+ else fontoPaths.uptodate.push(prPath);
141
142
  }
142
143
  return true;
143
144
  });
144
145
 
145
- // disable file logging for ftp (c.ftpConfig.watch function does it already)
146
+ // disable file logging for ftp (gulp-sftp does it already)
146
147
  g_print.setLogFunction((filepath) => c.localTransfer ? _write(filepath.nostyle) : null);
147
148
 
148
149
  _info('Start transferring', true);
@@ -169,15 +170,16 @@ const transfer = (paths, lrServer) => {
169
170
  .pipe(g_plumber.stop())
170
171
  .pipe(g_replace(c.replaceStrings))
171
172
  .pipe(through2.obj((file, enc, cb) => {
172
- file.path = file.path.replace(/(fonto)(?:[\\\/]dist|.+([\\\/]assets[\\\/]schemas))/, '$1$2'); // change destination path for fonto build and schemas
173
+ file.originalRelativePath = file.relative; // original path needed for sftp.fastPut
174
+ file.path = file.path.replace(/(fonto)[\\\/](dev|dist)/, '$1'); // change destination path for fonto build
173
175
  if (!file.relative.endsWith('.map')) files.push(file.relative); // collect all file paths in array for livereload
174
176
  cb(null, file);
175
177
  }))
176
178
  .pipe(g_print.default())
177
- .pipe(c.localTransfer ? gulp.dest(c.server.remotedir) : g_scp(ftpC))
179
+ .pipe(c.localTransfer ? gulp.dest(c.server.remotedir) : g_sftp(c.server.ftpConfig, c.server.remotedir))
178
180
  .on('end', () => {
179
181
  _info('Finished transferring\n', true);
180
- if (skippedFontoBuildFiles[0]) _warn(`Fonto build files in the following folders were outdated and therefore skipped:\n ${skippedFontoBuildFiles.map(v => v.slice(0, -1)).join('\n ')}`);
182
+ if (fontoPaths.outdated[0]) _warn(`Fonto build files in the following folders were outdated and therefore skipped:\n ${fontoPaths.outdated.map(v => v.slice(0, -1)).join('\n ')}`);
181
183
 
182
184
  remote.process();
183
185
 
@@ -16,7 +16,7 @@ module.exports = function deploy (argv) {
16
16
  c.prepareForCopy(argv.filter);
17
17
 
18
18
  if (!c.localTransfer) {
19
- c.server.ftpConfig.eventWrite = (file) => {
19
+ c.server.ftpConfig.eventPut = (file) => {
20
20
  _write(file.destination.replace(c.server.remotedir, ''));
21
21
  if (path.extname(file.destination)=='.sh')
22
22
  remote.add('chmod 755 '+file.destination, 'Permissions set: '+file.destination);
@@ -60,12 +60,12 @@ module.exports = function deploy (argv) {
60
60
  }
61
61
  };
62
62
 
63
- // for fonto, only watch packages (for schemas) and dist (for the build), but not dist/assets (causes build error / does not detect all files)
64
- gulp.watch([...c.transferPatterns, '!**/fonto/{*.*, !({dist,packages})/**, dist/*/**}'])
63
+ // for fonto, only watch dev/dist (for the build, does not detect "assets" most of the times btw)
64
+ gulp.watch([...c.transferPatterns, `!**/fonto/!(${c.envDev?'dev|':''}dist)/**`])
65
65
  .on('all', (event, filepath) => {
66
66
 
67
67
  if ((event=='add' || event=='change') &&
68
- (/fonto(.dist|.+schemas.+\.json)/.test(filepath) || !/fonto/.test(filepath)) // within fonto, only transfer build and schemas
68
+ (/fonto.(dev|dist)/.test(filepath) || !/fonto/.test(filepath)) // within fonto, only transfer build files
69
69
  ) {
70
70
  transfers.add(filepath);
71
71
  }
@@ -43,7 +43,7 @@ const cmdExec = command => new Promise((resolve, reject) => {
43
43
 
44
44
  cp.stdout.setEncoding('utf8');
45
45
  cp.stdout.on('data', data => {
46
- if (data.includes('Error') || data.includes('Input error')) reject(data);
46
+ if (/Error|Input error/.test(data)) _warn(data);
47
47
  else log(data);
48
48
  });
49
49
  cp.stderr.setEncoding('utf8');
@@ -203,7 +203,7 @@ module.exports = {
203
203
  fs.removeSync('dist/assets');
204
204
  _write('Done.\n');
205
205
  })
206
- .then(() => cmdExec('fdt editor build -M'))
206
+ .then(() => cmdExec('fdt editor build'))
207
207
  ;
208
208
  },
209
209
 
@@ -226,7 +226,7 @@ module.exports = {
226
226
  });
227
227
  _write('Done.\n');
228
228
  })
229
- .then(() => cmdExec('fdt editor run'))
229
+ .then(() => cmdExec('fdt editor run --write-to-disk'))
230
230
  ;
231
231
  }
232
232
 
@@ -1,95 +0,0 @@
1
- // based on gulp-scp2
2
-
3
- // core
4
- const path = require('path');
5
- // other
6
- const through2 = require('through2'), Client = require('scp2').Client;
7
-
8
- const fixWinPath = (str) => str.replace(/\\/g, '/');
9
-
10
- module.exports = function(options) {
11
- const files = [];
12
-
13
- options.watch = function(client){ // set file transfer watch function
14
- client
15
- .on('write', options.eventWrite)
16
- .on('error', err => {})
17
- ;
18
- };
19
-
20
- const stream = through2.obj(
21
- function transform(file, enc, callback) {
22
- if (file.isStream()) return callback(new Error('Streaming not supported.'));
23
- if (file.stat.isDirectory()) return callback();
24
-
25
- files.push(file); // collect all files in array
26
-
27
- return callback();
28
- },
29
- function flush(callback) { // on callback start transfering all files in parallel threads
30
- if (!files[0]) callback();
31
-
32
- let transferring = 0, transferred = 0;
33
-
34
- function uploadFile(client) {
35
- const file = files.shift();
36
-
37
- if (file) {
38
- const filepath = fixWinPath(path.join(options.dest, file.relative));
39
- transferring++;
40
-
41
- client.mkdir(path.dirname(filepath), (err) => { // try creating dir
42
- if (err) {
43
- const fail = err.message=='Permission denied';
44
- transferring--;
45
- if (!fail) files.unshift(file); // add file back to queue
46
- _warn((fail ? `Fail: ${err.message}` : `Server aborted connection. Retrying.`));
47
- uploadFile(client); // retry
48
- }
49
- else {
50
- client.write({ // upload file
51
- destination: filepath,
52
- content: file.contents
53
- }, (err) => {
54
- transferring--;
55
- transferred++;
56
- uploadFile(client);
57
- });
58
- }
59
- });
60
-
61
- }
62
- else {
63
- client.close();
64
- if (transferring==0) {
65
- transferring--;
66
- _info(`${transferred} file(s) transferred`);
67
- return callback();
68
- }
69
- }
70
- }
71
-
72
- function uploadThread() {
73
- const client = new Client(options);
74
- options.watch(client);
75
- client.sftp(err => {
76
- if (err) {
77
- if (err.toString().toLowerCase().includes('timed out')) _warn(`Connection timed out. Retrying.`);
78
- else if (err.code=='ECONNRESET' || err.code=='ECONNABORTED') _warn(`Server aborted connection. Retrying.`);
79
- else if (err.message=='All configured authentication methods failed') _error('Authentication failed. Check if SSH key is loaded.');
80
- else _warn(err.toString());
81
-
82
- uploadThread(); // retry
83
- }
84
- else uploadFile(client);
85
- });
86
- }
87
-
88
- const parallel = Math.min(Math.round(files.length / 2), options.parallel);
89
- for (let i = 0; i < parallel; i++) setTimeout(uploadThread, 100 * i);
90
- }
91
- );
92
-
93
- stream.resume(); // required for triggering 'end' event
94
- return stream;
95
- };