@tangelo/tangelo-configuration-toolkit 1.7.0 → 1.8.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -4,7 +4,7 @@ Tangelo Configuration Toolkit is a command-line toolkit which offers support for
4
4
 
5
5
  ## Installation
6
6
 
7
- The toolkit requires [NPM](https://www.npmjs.com/get-npm) on [Node.js®](https://nodejs.org/) version 12.x or 14.x. An active or maintenance LTS release is recommended. After installing Node.js, you can install the latest version of the Tangelo Configuration Toolkit globally on your system using the following command:
7
+ The toolkit requires [NPM](https://www.npmjs.com/get-npm) on [Node.js®](https://nodejs.org/) (at least version 14.x). An active or maintenance LTS release is recommended. After installing Node.js, you can install the latest version of the Tangelo Configuration Toolkit globally on your system using the following command:
8
8
 
9
9
  npm i -g @tangelo/tangelo-configuration-toolkit
10
10
 
package/index.js CHANGED
@@ -5,9 +5,10 @@ String.prototype.toFws = function(){
5
5
  };
6
6
 
7
7
 
8
- const findUp = require('find-up');
9
- const fs = require('fs-extra');
10
- const path = require('path');
8
+ const findUp = require('find-up');
9
+ const fs = require('fs-extra');
10
+ const {getPath} = require('global-modules-path');
11
+ const path = require('path');
11
12
 
12
13
  const execGitCommand = require('./src/lib/exec-git-command');
13
14
 
@@ -56,8 +57,12 @@ _paths.repoconfig = path.join(_paths.repo, appname+'-repoconfig.json');
56
57
  _paths.apply = process.cwd().replace(_paths.repo, '').substr(1);
57
58
 
58
59
 
59
- global._package = require('./package.json');
60
- global._appdata = fs.readJsonSync(_paths.appdata, {throws: false}) || {versionLastChecked: 0, versionIsLatest: true};
60
+ global._appdata = fs.readJsonSync(_paths.appdata, {throws: false}) || {};
61
+ global._packages = {
62
+ TCT: {name: '@tangelo/tangelo-configuration-toolkit', version: require('./package.json')?.version},
63
+ FDT: {name: '@fontoxml/fontoxml-development-tools'}
64
+ }
65
+ _packages.FDT.version = require(`${getPath(_packages.FDT.name)}/package.json`)?.version;
61
66
 
62
67
  try { global._appconfig = _paths.appconfig && fs.readJsonSync(_paths.appconfig) || {}; }
63
68
  catch({message}) { _error('Error in '+message); }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tangelo/tangelo-configuration-toolkit",
3
- "version": "1.7.0",
3
+ "version": "1.8.0-beta.3",
4
4
  "description": "Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.",
5
5
  "bin": {
6
6
  "tct": "bin/index.js",
@@ -20,31 +20,33 @@
20
20
  "babel-core": "^6.26.3",
21
21
  "babel-preset-es2015-without-strict": "0.0.4",
22
22
  "cli-spinner": "^0.2.10",
23
- "del": "^5.1.0",
23
+ "compare-versions": "^4.1.1",
24
+ "del": "^6.0.0",
24
25
  "event-stream": "^4.0.1",
25
26
  "find-up": "^5.0.0",
26
- "fs-extra": "^9.0.1",
27
+ "fs-extra": "^10.0.0",
28
+ "global-modules-path": "^2.3.1",
27
29
  "globby": "^6.1.0",
28
30
  "gulp": "^4.0.2",
29
31
  "gulp-babel": "^7.0.1",
30
32
  "gulp-eol": "^0.2.0",
31
- "gulp-filter": "^6.0.0",
33
+ "gulp-filter": "^7.0.0",
32
34
  "gulp-plumber": "^1.2.1",
33
35
  "gulp-print": "^5.0.2",
34
- "gulp-sass": "^4.1.0",
36
+ "gulp-sass": "^5.0.0",
35
37
  "gulp-simple-rename": "^0.1.3",
36
- "gulp-sourcemaps": "^2.6.5",
37
- "inquirer": "^7.3.3",
38
- "istextorbinary": "^5.11.0",
38
+ "gulp-sourcemaps": "^3.0.0",
39
+ "inquirer": "^8.2.0",
40
+ "istextorbinary": "^6.0.0",
39
41
  "minimatch": "^3.0.4",
40
- "node-ssh": "^10.0.0",
42
+ "node-ssh": "^12.0.2",
41
43
  "object-assign-deep": "^0.4.0",
42
- "replace-in-file": "^3.4.4",
44
+ "replace-in-file": "^6.0.0",
45
+ "sass": "^1.43.4",
43
46
  "saxon-js": "^2.3.0",
44
47
  "scp2": "^0.5.0",
45
- "selfupdate": "^1.1.0",
46
48
  "tiny-lr": "^2.0.0",
47
- "yargs": "^16.0.3"
49
+ "yargs": "^16.2.0"
48
50
  },
49
51
  "repository": {
50
52
  "type": "git",
package/src/cli.js CHANGED
@@ -1,9 +1,23 @@
1
- const fs = require('fs-extra');
2
- const selfupdate = require('selfupdate');
3
- const yargs = require('yargs');
1
+ const {compare} = require('compare-versions');
2
+ const exec = require('util').promisify(require('child_process').exec);
3
+ const fs = require('fs-extra');
4
+ const yargs = require('yargs');
4
5
 
5
6
 
6
- const updateJsonFile = (path, json = {}, obj = {}) => fs.writeJsonSync(path, Object.assign(json, obj), {spaces: 2});
7
+ const updateAppdata = (data) => Object.assign(_appdata, data, {_changed: true});
8
+
9
+ const checkForPackageUpdate = (package) => (
10
+ exec(`npm view -g ${_packages[package].name} version`)
11
+ .then(r => {
12
+ const versionAvailable = r.stdout.match(/([\d/.]+)/)[1];
13
+ if (compare(_packages[package].version, versionAvailable, '<')) {
14
+ updateAppdata({[`updateCheck${package}`]: {executed: new Date(), versionAvailable}});
15
+ return versionAvailable;
16
+ }
17
+ else updateAppdata({[`updateCheck${package}`]: {executed: new Date()}});
18
+ })
19
+ .catch(e => _warn(`Failed checking latest version of ${package}.`))
20
+ );
7
21
 
8
22
 
9
23
  module.exports = function cli () {
@@ -11,7 +25,7 @@ module.exports = function cli () {
11
25
  _write();
12
26
 
13
27
  const txtTitle = 'Tangelo Configuration Toolkit'.bold.underline.cyan;
14
- const txtVersion = `v${_package.version}${_appdata.versionIsLatest ? '' : ' (new version available)'}`.lblack;
28
+ const txtVersion = `v${_packages.TCT.version}`.lblack;
15
29
 
16
30
  const {argv} = yargs
17
31
  .scriptName('tct')
@@ -99,7 +113,6 @@ module.exports = function cli () {
99
113
  })
100
114
  .recommendCommands()
101
115
  .option('config', {alias: 'c', desc: 'Show loaded appconfig', global: false})
102
- .option('update', {alias: 'u', desc: 'Update this package', global: false})
103
116
  .version(false)
104
117
  .help(false)
105
118
  .example([
@@ -126,28 +139,36 @@ module.exports = function cli () {
126
139
  _write(_appconfig);
127
140
  }
128
141
 
129
- if (argv.update) {
130
- _info('Updating...');
131
- selfupdate.update(_package, (error, version) => {
132
- if (!error) updateJsonFile(_paths.appdata, _appdata, {versionLastChecked: new Date(), versionIsLatest: true});
133
- _write((error||{}).message || 'Updated to version ' + version);
134
- });
135
- }
136
-
137
142
  }
138
143
 
139
144
 
145
+ let checkUpdatesDone = false;
146
+
140
147
  process.on('beforeExit', () => {
141
- _write(); // always end with empty line
142
-
143
- if (!argv.hasOwnProperty('update') && _appdata.versionIsLatest && new Date() - new Date(_appdata.versionLastChecked || 0) > 1000*3600*24*3) { // check every 3 days
144
- _info('Checking for updates...');
145
- selfupdate.isUpdated(_package, (error, isUpdated) => {
146
- if (!error) updateJsonFile(_paths.appdata, _appdata, {versionLastChecked: new Date(), versionIsLatest: isUpdated});
147
- _write((error || isUpdated ? 'Using latest version.' : 'New version available!') + '\n');
148
- process.exit(); // otherwise this event keeps being triggered
148
+ _write(); // print empty line before and after update check
149
+
150
+ if (!checkUpdatesDone) { // check if updatecheck has ran before because async calls below trigger beforeExit again
151
+ checkUpdatesDone = true;
152
+
153
+ ['TCT', 'FDT'].forEach(p => {
154
+ const updateMsg = (va) => `| Update ${p} to ${va} | ` + `npm i -g ${_packages[p].name}`.white;
155
+ const {versionAvailable} = _appdata[`updateCheck${p}`] || {};
156
+
157
+ if (new Date() - new Date(_appdata[`updateCheck${p}`]?.executed || 0) > 1000*3600*24*7) { // check every week
158
+ checkForPackageUpdate(p).then(r => r && _warn(updateMsg(r)));
159
+ }
160
+ else if (versionAvailable) {
161
+ if (compare(_packages[p].version, versionAvailable, '<')) _warn(updateMsg(versionAvailable));
162
+ else updateAppdata({[`updateCheck${p}`]: {executed: new Date()}});
163
+ }
149
164
  });
150
165
  }
166
+
167
+ if (_appdata._changed) {
168
+ delete _appdata._changed;
169
+ fs.writeJsonSync(_paths.appdata, _appdata, {spaces: 2});
170
+ }
171
+
151
172
  });
152
173
 
153
174
  }
@@ -1,19 +1,19 @@
1
- // based on gulp-batch-replace
2
-
3
- const es = require('event-stream'), minimatch = require("minimatch"), istextorbinary = require('istextorbinary');
4
-
5
- const execReplace = (c, s, r) => Buffer.from(s instanceof RegExp ? String(c).replace(s,r) : String(c).split(s).join(r));
6
-
7
- module.exports = (arr) => {
8
- return es.map((file, callback) => {
9
- if(file.contents instanceof Buffer) {
10
- arr.forEach(e => {
11
- // exec if no glob is passed or if glob matches, and it's a text file
12
- if ((!e[2] || minimatch(file.path,e[2])) && istextorbinary.isText(file.path,file)) {
13
- file.contents = execReplace(file.contents, e[0], e[1]);
14
- }
15
- });
16
- }
17
- callback(null,file);
18
- });
19
- };
1
+ // based on gulp-batch-replace
2
+
3
+ const es = require('event-stream'), minimatch = require("minimatch"), istextorbinary = require('istextorbinary');
4
+
5
+ const execReplace = (c, s, r) => Buffer.from(s instanceof RegExp ? String(c).replace(s,r) : String(c).split(s).join(r));
6
+
7
+ module.exports = (arr) => {
8
+ return es.map((file, callback) => {
9
+ if(file.contents instanceof Buffer) {
10
+ arr.forEach(e => {
11
+ // exec if no glob is passed or if glob matches, and it's a text file
12
+ if ((!e[2] || minimatch(file.path,e[2])) && istextorbinary.isText(file.path,file)) {
13
+ file.contents = execReplace(file.contents, e[0], e[1]);
14
+ }
15
+ });
16
+ }
17
+ callback(null,file);
18
+ });
19
+ };
@@ -78,7 +78,7 @@ const getTransformations = (config) => {
78
78
  });
79
79
  });
80
80
  } else {
81
- _write(`No transformation scenarios found in ${cf} for '${config.extracts.base}'`);
81
+ _write(`No transformation scenarios found in ${f} for '${config.extracts.base}'`);
82
82
  }
83
83
  });
84
84
  });
@@ -1,8 +1,8 @@
1
1
  const fs = require('fs-extra');
2
2
  const globby = require('globby');
3
3
  const gulp = require('gulp');
4
+ const {NodeSSH} = require('node-ssh');
4
5
  const path = require('path');
5
- const ssh = require('node-ssh');
6
6
  const through2 = require('through2');
7
7
  const {spawnSync} = require('child_process');
8
8
 
@@ -15,7 +15,7 @@ const g_print = require('gulp-print');
15
15
  const g_rename = require('gulp-simple-rename');
16
16
  const g_replace = require('../../lib/gulp-batch-replace-with-filter');
17
17
  const g_resolveIncl = require('../../lib/gulp-resolve-includes');
18
- const g_sass = require('gulp-sass');
18
+ const g_sass = require('gulp-sass')(require('sass'));
19
19
  const g_scp = require('../../lib/gulp-scp2-async');
20
20
  const g_sourcemaps = require('gulp-sourcemaps');
21
21
 
@@ -63,7 +63,7 @@ const remote = {
63
63
  process() {
64
64
  if (!this.queue[0]) return;
65
65
 
66
- const sshI = new ssh();
66
+ const sshI = new NodeSSH();
67
67
  sshI.connect(c.server.ftpConfig) // set up connection once
68
68
  .then(() => {
69
69
  this.do(sshI);
@@ -112,7 +112,9 @@ const createDeliveryPack = () => { // create install scripts if necessary, zip o
112
112
  _info('PowerShell script for database install created');
113
113
  }
114
114
 
115
- spawnSync('powershell.exe', [`Compress-Archive -Path * -DestinationPath ../${c.deliveryPackName}`], {cwd: c.server.remotedir});
115
+ const result = spawnSync('powershell.exe', [`Compress-Archive -Path * -DestinationPath ../${c.deliveryPackName}`], {cwd: c.server.remotedir});
116
+ if (result.stderr!='') {_warn(`\nLong paths may cause zip-creation to fail; Try to set a short remotedir, for example 'C:/deliveryPacks'.`);_error(`${result.stderr}\n`);}
117
+
116
118
  fs.removeSync(c.server.remotedir);
117
119
  _info(`Zipped package as: ${c.deliveryPackName}`);
118
120
  };
@@ -3,7 +3,7 @@ const fs = require('fs-extra');
3
3
  const gulp = require('gulp');
4
4
  const g_print = require('gulp-print');
5
5
  const path = require('path');
6
- const sass = require('gulp-sass');
6
+ const sass = require('gulp-sass')(require('sass'));
7
7
  const wws = require('../../lib/worker-with-spinner');
8
8
 
9
9
 
@@ -95,6 +95,16 @@ module.exports = function git (argv) {
95
95
  // Create symlinks for TDI
96
96
  ['tct b -s', 'Create TDI symlinks'],
97
97
  ]);
98
- })
98
+ }).catch((e) => {
99
+ // Show warnings for missing, expected, system environment variables
100
+ if (!process.env.GIT_SSH) {
101
+ _warn(`System environment variable 'GIT_SSH' is missing. (link to putty plink.exe)`);
102
+ }
103
+ if (!process.env.GIT_SSH_COMMAND) {
104
+ _warn(`System environment variable 'GIT_SSH_COMMAND' is missing. (link to putty plink.exe)`);
105
+ }
106
+ // continue display stack trace
107
+ throw e;
108
+ });
99
109
  }
100
110
  };
@@ -5,7 +5,8 @@ const path = require('path');
5
5
  const rif = require('replace-in-file');
6
6
 
7
7
 
8
- const getPaths = (search, filter) =>
8
+ const rifSync = (options) => rif.sync(options).filter(result => result.hasChanged).map(result => result.file);
9
+ const getPaths = (search, filter) =>
9
10
  globby
10
11
  .sync(search, {dot: true, ignore: [_paths.tdi + '/**','**/cmscustom/tdi/**']})
11
12
  .filter(p => !filter || minimatch(p, filter)
@@ -25,7 +26,7 @@ module.exports = function steps (step, dry, filter) {
25
26
  _write('Paths deleted:', paths.length);
26
27
  });
27
28
  }
28
-
29
+
29
30
  if (renamePaths) {
30
31
  renamePaths.forEach(([curpath, change]) => {
31
32
  _info(`Renaming paths: ${curpath}`);
@@ -34,7 +35,7 @@ module.exports = function steps (step, dry, filter) {
34
35
  _write('Paths renamed:', paths.length);
35
36
  });
36
37
  }
37
-
38
+
38
39
  if (replaceInFiles) {
39
40
  replaceInFiles.forEach(r => {
40
41
  _info(`Executing replacements in files: ${r.files}`);
@@ -47,9 +48,9 @@ module.exports = function steps (step, dry, filter) {
47
48
  r.to = r.fromtoPairs.map(p => p[1]);
48
49
  }
49
50
 
50
- r.files = rif.sync(r); // execute first time
51
- const filesModCount = r.files.length; // save file count
52
- for (let i=0; i<20 && !dry && r.files[0]; i++) r.files = rif.sync(r); // execute repeatedly for modified files only (with safety limit of 20)
51
+ r.files = rifSync(r); // execute first time
52
+ const filesModCount = r.files.length; // save file count
53
+ for (let i=0; i<20 && !dry && r.files[0]; i++) r.files = rifSync(r); // execute repeatedly for modified files only (with safety limit of 20)
53
54
 
54
55
  _write('Files modified:', filesModCount);
55
56
  });
@@ -14,6 +14,26 @@ const runSqlScript = (path, db, un, pw) => {
14
14
  _write('Done.\n');
15
15
  };
16
16
 
17
+ const checkLog = (logFilePath, remove=true) => {
18
+ if (fs.existsSync(logFilePath)) {
19
+ const logFile = fs.readFileSync(logFilePath).toString();
20
+ // Check for ORA- error messages
21
+ if (logFile.match(RegExp(`ORA-`, 'gm'))) {
22
+ // Select each ORA- error message and up to 6 lines before (the lines before may not contain ORA- themselves)
23
+ const errors = logFile.match(RegExp(`((?![^\n]*ORA-)[^\n]*\n){0,6}ORA-[^\n\r]*`,`gms`));
24
+ _info(`${errors.length} error(s) during SQL script:\n`)
25
+ // Print each error + lines before:
26
+ errors.forEach((e, i) => {
27
+ _info(`Tail of error ${i+1}:\n`)
28
+ _warn(`${e.trim()}\n`);
29
+ })
30
+ }
31
+ if (remove) {
32
+ // remove log file
33
+ fs.removeSync(logFilePath);
34
+ }
35
+ }
36
+ }
17
37
 
18
38
  module.exports = function sql (argv) {
19
39
 
@@ -25,8 +45,10 @@ module.exports = function sql (argv) {
25
45
  // tdi
26
46
  const dir = `${_paths.tdi}/${_isPre51 ? 'sources' : 'src'}/database/tdi/${_isPre42 ? 'install/' : ''}`;
27
47
  runSqlScript(dir + 'install.sql');
48
+ checkLog(dir + 'tdi-install.log',false);
28
49
  // custom
29
50
  runSqlScript('database/install.sql');
51
+ checkLog('database/install.log',false);
30
52
  }
31
53
 
32
54
  if (argv.configure) {
@@ -52,17 +74,20 @@ module.exports = function sql (argv) {
52
74
 
53
75
  const script = [`define l_env = ${a.pw=='tancms' ? 'dev' : 'prd'}`, 'set verify off define off'];
54
76
 
77
+ script.push(`spool install-config.log`);
55
78
  a.projects.forEach(p => {
56
79
  const dir = p.path_dbconfig ? p.path_dbconfig.join('/') : (_repoconfig.customer.dirname+'/'+p.dirname);
57
80
  script.push('prompt ', `prompt Loading configuration for: ${p.name}`);
58
81
  script.push(...globby.sync(`${dir}/*_install.sql`, {cwd}).map(s => `@${s}`));
59
82
  });
60
83
 
84
+ script.push(`spool off`);
61
85
  script.push('prompt ', 'pause Press ENTER to exit', 'exit');
62
86
 
63
87
  fs.writeFileSync(file, script.join('\n'));
64
88
  runSqlScript(file, a.db, 'tancms', a.pw);
65
89
  fs.removeSync(file);
90
+ checkLog(cwd + 'install-config.log');
66
91
  });
67
92
 
68
93
  }
@@ -75,11 +100,13 @@ module.exports = function sql (argv) {
75
100
  fs.ensureSymlinkSync(_paths.apply, dir + 'dist', 'junction');
76
101
  runSqlScript(dir + 'start.sql');
77
102
  fs.removeSync(dir + 'dist');
103
+ checkLog(dir + 'generate-config.log');
78
104
  }
79
105
 
80
106
  if (argv.remove) {
81
107
  const dir = _paths.tdi + (_isPre51 ? '/util/db-config-remover/' : '/tct/sql/remove/');
82
108
  runSqlScript(dir + 'start.sql');
109
+ checkLog(dir + 'remove-config.log');
83
110
  }
84
111
 
85
112
  };