@tangelo/tangelo-configuration-toolkit 1.14.0 → 1.14.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +67 -67
- package/bin/index.js +2 -2
- package/package.json +1 -1
- package/src/lib/get-tdi-branch.js +45 -40
- package/src/lib/gulp-batch-replace-with-filter.js +19 -19
- package/src/lib/gulp-simple-rename.js +11 -11
- package/src/modules/build/index.js +1 -1
- package/src/modules/build/oxygen.js +167 -167
- package/src/modules/git/index.js +3 -2
- package/src/modules/info/index.js +201 -201
- package/src/modules/migrate/steps.js +3 -1
package/README.md
CHANGED
|
@@ -1,67 +1,67 @@
|
|
|
1
|
-
# tangelo-configuration-toolkit
|
|
2
|
-
|
|
3
|
-
Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.
|
|
4
|
-
|
|
5
|
-
## Installation
|
|
6
|
-
|
|
7
|
-
The toolkit requires [NPM](https://www.npmjs.com/get-npm) on [Node.js®](https://nodejs.org/) (at least version 14.x). An active or maintenance LTS release is recommended. After installing Node.js, you can install the latest version of the Tangelo Configuration Toolkit globally on your system using the following command:
|
|
8
|
-
|
|
9
|
-
npm i -g @tangelo/tangelo-configuration-toolkit
|
|
10
|
-
|
|
11
|
-
## Usage
|
|
12
|
-
|
|
13
|
-
Get help for the available commands and see version:
|
|
14
|
-
|
|
15
|
-
tangelo-configuration-toolkit
|
|
16
|
-
|
|
17
|
-
Get help for a specific command, detailing all its arguments:
|
|
18
|
-
|
|
19
|
-
tangelo-configuration-toolkit <command>
|
|
20
|
-
|
|
21
|
-
Use the `tct` shorthand instead of `tangelo-configuration-toolkit`:
|
|
22
|
-
|
|
23
|
-
tct <command>
|
|
24
|
-
|
|
25
|
-
## Config
|
|
26
|
-
|
|
27
|
-
### Global
|
|
28
|
-
|
|
29
|
-
The `deploy` command requires server connection information. So you'll have to create a configuration file named `tangelo-configuration-toolkit-appconfig.json` in which such information can be stored. This file will be searched for in an ancestor directory of the current working directory.
|
|
30
|
-
|
|
31
|
-
The contents looks like this (all properties are optional):
|
|
32
|
-
|
|
33
|
-
{
|
|
34
|
-
"sharedConfigPath": "absolute/or/relative/path/to/folder/containing/shared/config",
|
|
35
|
-
"servers": [{
|
|
36
|
-
"config": {
|
|
37
|
-
"port": 22,
|
|
38
|
-
"parallel": 4,
|
|
39
|
-
"username": "username",
|
|
40
|
-
"remotedir": "/absolute/path/to/tangelo/config/folder/on/server"
|
|
41
|
-
},
|
|
42
|
-
"domains": ["domain.name.com"],
|
|
43
|
-
"name": "name-for-local-deploy"
|
|
44
|
-
}],
|
|
45
|
-
"serverDefaults": {
|
|
46
|
-
"config": {
|
|
47
|
-
...
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
"defaultServer": "can be set to the name of e.g. your favorite dev server",
|
|
51
|
-
"defaultDatabase": "can be set to the tnsname of e.g. your favorite dev server"
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
When passing a server name, `tct` will look for a full match with a name or a partial match (the start) with a domain.
|
|
55
|
-
|
|
56
|
-
### Repo
|
|
57
|
-
|
|
58
|
-
The `build` and `sql` commands make use of a configuration file in the repository named `tangelo-configuration-toolkit-repoconfig.json`. This contains information about the customer projects.
|
|
59
|
-
|
|
60
|
-
For a new repository, using `tct build --init` also creates the repoconfig-file. For existing projects not having the repoconfig-file, you can use `tct build --config` to generate it.
|
|
61
|
-
|
|
62
|
-
### oXygen
|
|
63
|
-
|
|
64
|
-
The `build -x` commands set projects transformation scenarios and masterfiles in the oXygen project file with the following functionality:
|
|
65
|
-
- Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
66
|
-
- Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
67
|
-
- No masterfiles / scenarios will be added if their path match with oXygens hidden directory patterns
|
|
1
|
+
# tangelo-configuration-toolkit
|
|
2
|
+
|
|
3
|
+
Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
The toolkit requires [NPM](https://www.npmjs.com/get-npm) on [Node.js®](https://nodejs.org/) (at least version 14.x). An active or maintenance LTS release is recommended. After installing Node.js, you can install the latest version of the Tangelo Configuration Toolkit globally on your system using the following command:
|
|
8
|
+
|
|
9
|
+
npm i -g @tangelo/tangelo-configuration-toolkit
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
Get help for the available commands and see version:
|
|
14
|
+
|
|
15
|
+
tangelo-configuration-toolkit
|
|
16
|
+
|
|
17
|
+
Get help for a specific command, detailing all its arguments:
|
|
18
|
+
|
|
19
|
+
tangelo-configuration-toolkit <command>
|
|
20
|
+
|
|
21
|
+
Use the `tct` shorthand instead of `tangelo-configuration-toolkit`:
|
|
22
|
+
|
|
23
|
+
tct <command>
|
|
24
|
+
|
|
25
|
+
## Config
|
|
26
|
+
|
|
27
|
+
### Global
|
|
28
|
+
|
|
29
|
+
The `deploy` command requires server connection information. So you'll have to create a configuration file named `tangelo-configuration-toolkit-appconfig.json` in which such information can be stored. This file will be searched for in an ancestor directory of the current working directory.
|
|
30
|
+
|
|
31
|
+
The contents looks like this (all properties are optional):
|
|
32
|
+
|
|
33
|
+
{
|
|
34
|
+
"sharedConfigPath": "absolute/or/relative/path/to/folder/containing/shared/config",
|
|
35
|
+
"servers": [{
|
|
36
|
+
"config": {
|
|
37
|
+
"port": 22,
|
|
38
|
+
"parallel": 4,
|
|
39
|
+
"username": "username",
|
|
40
|
+
"remotedir": "/absolute/path/to/tangelo/config/folder/on/server"
|
|
41
|
+
},
|
|
42
|
+
"domains": ["domain.name.com"],
|
|
43
|
+
"name": "name-for-local-deploy"
|
|
44
|
+
}],
|
|
45
|
+
"serverDefaults": {
|
|
46
|
+
"config": {
|
|
47
|
+
...
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
"defaultServer": "can be set to the name of e.g. your favorite dev server",
|
|
51
|
+
"defaultDatabase": "can be set to the tnsname of e.g. your favorite dev server"
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
When passing a server name, `tct` will look for a full match with a name or a partial match (the start) with a domain.
|
|
55
|
+
|
|
56
|
+
### Repo
|
|
57
|
+
|
|
58
|
+
The `build` and `sql` commands make use of a configuration file in the repository named `tangelo-configuration-toolkit-repoconfig.json`. This contains information about the customer projects.
|
|
59
|
+
|
|
60
|
+
For a new repository, using `tct build --init` also creates the repoconfig-file. For existing projects not having the repoconfig-file, you can use `tct build --config` to generate it.
|
|
61
|
+
|
|
62
|
+
### oXygen
|
|
63
|
+
|
|
64
|
+
The `build -x` commands set projects transformation scenarios and masterfiles in the oXygen project file with the following functionality:
|
|
65
|
+
- Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
66
|
+
- Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
67
|
+
- No masterfiles / scenarios will be added if their path match with oXygens hidden directory patterns
|
package/bin/index.js
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
3
|
require('..');
|
package/package.json
CHANGED
|
@@ -1,41 +1,46 @@
|
|
|
1
|
-
const execGitCommand = require('./exec-git-command');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
|
|
4
|
-
module.exports = function getTdiBranch(toBranchName) {
|
|
5
|
-
const tdiBranch = {};
|
|
6
|
-
let toBranch;
|
|
7
|
-
if (toBranchName) {
|
|
8
|
-
// Check if specified branch exists; we will update to this branch
|
|
9
|
-
toBranch = String(toBranchName).replace(/(?:release\/)?(\d+(?:\.[\dx]+)*)/, `release/$1`);
|
|
10
|
-
const branchExists = execGitCommand(`branch --remote`, path.join(_paths.repo, _paths.tdi)).match(`origin/${toBranch}`);
|
|
11
|
-
if (!branchExists) _error(`TDI branch "${toBranch}" does not exist. Note that TCT can only update to a release branch.`);
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
// Get the current TDI branch:
|
|
15
|
-
|
|
16
|
-
//
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
tdiBranch.
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
1
|
+
const execGitCommand = require('./exec-git-command');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
module.exports = function getTdiBranch(toBranchName) {
|
|
5
|
+
const tdiBranch = {};
|
|
6
|
+
let toBranch;
|
|
7
|
+
if (toBranchName) {
|
|
8
|
+
// Check if specified branch exists; we will update to this branch
|
|
9
|
+
toBranch = String(toBranchName).replace(/(?:release\/)?(\d+(?:\.[\dx]+)*)/, `release/$1`);
|
|
10
|
+
const branchExists = execGitCommand(`branch --remote`, path.join(_paths.repo, _paths.tdi)).match(`origin/${toBranch}`);
|
|
11
|
+
if (!branchExists) _error(`TDI branch "${toBranch}" does not exist. Note that TCT can only update to a release branch.`);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
// Get the current TDI branch:
|
|
15
|
+
|
|
16
|
+
// Fetch all
|
|
17
|
+
_info(`Fetch TDI submodule`);
|
|
18
|
+
const cmdFetch = execGitCommand('fetch -pf --all', path.join(_paths.repo, _paths.tdi));
|
|
19
|
+
if (cmdFetch.error) _warn(`Fetch failed\n${cmdFetch.error}`);
|
|
20
|
+
|
|
21
|
+
// Get remote release branches containing TDI HEAD commit
|
|
22
|
+
const releaseBranches = execGitCommand(`branch --all --contains ${_git.commitTdi.local().hash}`, path.join(_paths.repo, _paths.tdi)).match(/remotes\/origin\/release\/[^\s]+/gsm);
|
|
23
|
+
if (!releaseBranches || releaseBranches.error) _error(`Could not retrieve TDI release branches`);
|
|
24
|
+
|
|
25
|
+
// Get the first possible branch; prefer release/5.1 over release/5.2:
|
|
26
|
+
releaseBranches.sort((a, b) => a > b ? 1 : -1);
|
|
27
|
+
|
|
28
|
+
// Set branch name of firstBranch without 'remotes/origin/'
|
|
29
|
+
tdiBranch.name = releaseBranches[0].replace(/remotes\/origin\//g, '');
|
|
30
|
+
|
|
31
|
+
// In case of branch switch set from.name to the old branch and name to the new branch
|
|
32
|
+
if (toBranch) {
|
|
33
|
+
if (tdiBranch.name > toBranch) _error(`You cannot downgrade to a lower release branch with TCT.`);
|
|
34
|
+
tdiBranch.from = {name: tdiBranch.name};
|
|
35
|
+
tdiBranch.name = toBranch;
|
|
36
|
+
|
|
37
|
+
const branchHash = execGitCommand(`rev-parse origin/${toBranch}`, path.join(_paths.repo, _paths.tdi));
|
|
38
|
+
const commonAncestorHash = execGitCommand(`merge-base ${_git.commitTdi.local().hash} ${branchHash}`, path.join(_paths.repo, _paths.tdi));
|
|
39
|
+
const commonAncestorDate = execGitCommand(`show ${commonAncestorHash} --no-patch --format=%cd --date=iso-strict `, path.join(_paths.repo, _paths.tdi), ['date']).date;
|
|
40
|
+
tdiBranch.commonAncestor = {date: new Date(commonAncestorDate)};
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Get number of commits behind
|
|
44
|
+
tdiBranch.commitsBehind = execGitCommand(`rev-list HEAD...origin/${tdiBranch.name} --count`, path.join(_paths.repo, _paths.tdi));
|
|
45
|
+
return tdiBranch;
|
|
41
46
|
};
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
// based on gulp-batch-replace
|
|
2
|
-
|
|
3
|
-
const es = require('event-stream'), minimatch = require('minimatch'), istextorbinary = require('istextorbinary');
|
|
4
|
-
|
|
5
|
-
const execReplace = (c, s, r) => Buffer.from(s instanceof RegExp ? String(c).replace(s, r) : String(c).split(s).join(r));
|
|
6
|
-
|
|
7
|
-
module.exports = (arr) => {
|
|
8
|
-
return es.map((file, callback) => {
|
|
9
|
-
if(file.contents instanceof Buffer) {
|
|
10
|
-
arr.forEach(e => {
|
|
11
|
-
// exec if no glob is passed or if glob matches, and it's a text file
|
|
12
|
-
if ((!e[2] || minimatch(file.path, e[2])) && istextorbinary.isText(file.path, file)) {
|
|
13
|
-
file.contents = execReplace(file.contents, e[0], e[1]);
|
|
14
|
-
}
|
|
15
|
-
});
|
|
16
|
-
}
|
|
17
|
-
callback(null, file);
|
|
18
|
-
});
|
|
19
|
-
};
|
|
1
|
+
// based on gulp-batch-replace
|
|
2
|
+
|
|
3
|
+
const es = require('event-stream'), minimatch = require('minimatch'), istextorbinary = require('istextorbinary');
|
|
4
|
+
|
|
5
|
+
const execReplace = (c, s, r) => Buffer.from(s instanceof RegExp ? String(c).replace(s, r) : String(c).split(s).join(r));
|
|
6
|
+
|
|
7
|
+
module.exports = (arr) => {
|
|
8
|
+
return es.map((file, callback) => {
|
|
9
|
+
if(file.contents instanceof Buffer) {
|
|
10
|
+
arr.forEach(e => {
|
|
11
|
+
// exec if no glob is passed or if glob matches, and it's a text file
|
|
12
|
+
if ((!e[2] || minimatch(file.path, e[2])) && istextorbinary.isText(file.path, file)) {
|
|
13
|
+
file.contents = execReplace(file.contents, e[0], e[1]);
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
callback(null, file);
|
|
18
|
+
});
|
|
19
|
+
};
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
// replacement for the official (but deprecated) gulp-simple-rename
|
|
2
|
-
// kept same name because of usage in tdi
|
|
3
|
-
|
|
4
|
-
const through2 = require('through2');
|
|
5
|
-
|
|
6
|
-
module.exports = (fn) => {
|
|
7
|
-
return through2.obj((file, enc, cb) => {
|
|
8
|
-
file.path = fn(file.path);
|
|
9
|
-
cb(null, file);
|
|
10
|
-
});
|
|
11
|
-
};
|
|
1
|
+
// replacement for the official (but deprecated) gulp-simple-rename
|
|
2
|
+
// kept same name because of usage in tdi
|
|
3
|
+
|
|
4
|
+
const through2 = require('through2');
|
|
5
|
+
|
|
6
|
+
module.exports = (fn) => {
|
|
7
|
+
return through2.obj((file, enc, cb) => {
|
|
8
|
+
file.path = fn(file.path);
|
|
9
|
+
cb(null, file);
|
|
10
|
+
});
|
|
11
|
+
};
|
|
@@ -9,7 +9,7 @@ const createSymlink = (t, p) => {
|
|
|
9
9
|
fs.symlink(path.join(up, _paths.tdi, t), p, 'dir', err => {
|
|
10
10
|
if (err) {
|
|
11
11
|
if (err.code=='EEXIST') _write(`Symlink already exists: ${p}`);
|
|
12
|
-
else _error((err.code
|
|
12
|
+
else _error((err.code!=='EPERM' ? err : `Can't create symlinks, restart your console as admin, or fix it permanently:\n 1. Open 'Local Security Policy' (secpol.msc)\n 2. Go to 'Security Settings > Local Policies > User Rights Assignment > Create symbolic links'\n 3. Add your own account\n 4. Restart Windows`));
|
|
13
13
|
}
|
|
14
14
|
else _write(`Symlink created: ${p}`);
|
|
15
15
|
});
|
|
@@ -1,168 +1,168 @@
|
|
|
1
|
-
const fs = require('fs-extra');
|
|
2
|
-
const globby = require('globby');
|
|
3
|
-
const path = require('path');
|
|
4
|
-
const SaxonJS = require('saxon-js');
|
|
5
|
-
|
|
6
|
-
const spConfigPath = path.join(_paths.repo, _paths.tdi, 'tct/build/oxygen/stylesheetPaths.json');
|
|
7
|
-
const sefFilePath = path.join(_paths.repo, _paths.tdi, 'tct/build/oxygen/createProjectFile.sef.json');
|
|
8
|
-
const cmscustomPath = 'config/cmscustom/';
|
|
9
|
-
const siteStylesheetsPath = 'config/txp/site-stylesheets/';
|
|
10
|
-
|
|
11
|
-
const masterFiles = new Set;
|
|
12
|
-
const transformationScenarios = [];
|
|
13
|
-
|
|
14
|
-
const convertToValidFilename = (string) => string.replace(/[\/|\\:*?"<>]/g, " ");
|
|
15
|
-
|
|
16
|
-
const createProjectFile = (config) => {
|
|
17
|
-
_info('Initializing xpr file(s):');
|
|
18
|
-
const xprFiles = globby.sync(`*.xpr`);
|
|
19
|
-
|
|
20
|
-
// Copy xpr file from TDI if it does not exists yet;
|
|
21
|
-
if (xprFiles[0]) _write(`Found: ${xprFiles.join(', ')}`);
|
|
22
|
-
else {
|
|
23
|
-
const customers = new Set;
|
|
24
|
-
_repoconfig.forEach(p => customers.add(p.customer_name));
|
|
25
|
-
xprFiles.push(convertToValidFilename([...customers].join(' - ')) + '.xpr');
|
|
26
|
-
fs.copySync(path.join(_paths.repo, 'tangelo-default-implementation/src/[customer].xpr'), path.join(_paths.repo, xprFiles[0]));
|
|
27
|
-
_write(`Created: '${xprFiles[0]}'`);
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
// Search for transformationScenarios/masterfiles based on TDI submodule oxygenProjectFile config
|
|
31
|
-
_info('\nSearching for transformationScenarios/masterfiles');
|
|
32
|
-
config.oxygenProjectFile.forEach(
|
|
33
|
-
pf => {
|
|
34
|
-
// Collect transformation scenarios and add them to the transformationScenarios array; add individual stylesheets to the masterFiles set.
|
|
35
|
-
if (pf.transformation) getTransformations(pf.transformation);
|
|
36
|
-
// Add files to the masterfiles set.
|
|
37
|
-
else if (pf.masterfile) getMasterfiles(pf.masterfile);
|
|
38
|
-
}
|
|
39
|
-
);
|
|
40
|
-
|
|
41
|
-
// update all .xpr files with collected transformation scenarios and masterfiles.
|
|
42
|
-
transformXprFile(xprFiles);
|
|
43
|
-
};
|
|
44
|
-
|
|
45
|
-
const getTransformations = (config) => {
|
|
46
|
-
_repoconfig.forEach(rc => {
|
|
47
|
-
// get pathname of customer/project
|
|
48
|
-
const [customerPath, projectPath] = config.location == 'database' ? rc.path_dbconfig : rc.path_cmscustom;
|
|
49
|
-
|
|
50
|
-
// set pathname of customer/project in location glob-expression
|
|
51
|
-
const location = path.join(
|
|
52
|
-
_paths.repo,
|
|
53
|
-
config.files.replace(/\[customer\]/, customerPath).replace(/\[project\]/, projectPath)
|
|
54
|
-
);
|
|
55
|
-
|
|
56
|
-
globby
|
|
57
|
-
.sync(`${location}`)
|
|
58
|
-
.forEach(f => {
|
|
59
|
-
// extract baseStrings from file
|
|
60
|
-
const fileString = fs.readFileSync(f).toString();
|
|
61
|
-
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
62
|
-
|
|
63
|
-
if (fileString.replace(/\s|^prompt\s.*$/gm,'')!=='') {
|
|
64
|
-
if (baseStrings) {
|
|
65
|
-
baseStrings.forEach(s => {
|
|
66
|
-
// extract type, name, files info from baseString
|
|
67
|
-
const type = config.extracts.type ? s.match(RegExp(config.extracts.type))[1] : config.values.type;
|
|
68
|
-
const name = config.extracts.name ? s.match(RegExp(config.extracts.name))[1] : config.values.name;
|
|
69
|
-
const files = s.match(RegExp(config.extracts.files))[1];
|
|
70
|
-
|
|
71
|
-
// Add transformation scenario to the transformationScenario array
|
|
72
|
-
transformationScenarios.push({
|
|
73
|
-
name: `${type}: ${name} (${rc.customer_name}, ${rc.project_name})`, // note that in createProjectFile.xsl a regex is added that matches scenarios based on this name. This to preserve manually added scenarios.
|
|
74
|
-
transformationScenario: files,
|
|
75
|
-
location: config.location == 'txp' ? siteStylesheetsPath : cmscustomPath
|
|
76
|
-
});
|
|
77
|
-
|
|
78
|
-
// Add each non-tdi stylesheet in transformation scenario to the masterFiles set
|
|
79
|
-
files.split(',').forEach(f => {
|
|
80
|
-
const filePath = `${config.location == 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
81
|
-
if (!f.startsWith('tdi')) masterFiles.add(filePath);
|
|
82
|
-
});
|
|
83
|
-
});
|
|
84
|
-
} else {
|
|
85
|
-
_write(`No transformation scenarios found in ${f} for '${config.extracts.base}'`);
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
});
|
|
89
|
-
});
|
|
90
|
-
|
|
91
|
-
};
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
const getMasterfiles = (config) => {
|
|
95
|
-
globby
|
|
96
|
-
.sync(`${path.join(_paths.repo, config.files)}`)
|
|
97
|
-
.forEach(cf => {
|
|
98
|
-
// Check if masterfile should be extracted from file
|
|
99
|
-
const fileString = fs.readFileSync(cf).toString();
|
|
100
|
-
if (fileString.replace(/\s|^prompt\s.*$/gm,'')!=='') {
|
|
101
|
-
if (config.extracts) {
|
|
102
|
-
// extract baseStrings from file
|
|
103
|
-
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
104
|
-
|
|
105
|
-
if (baseStrings) {
|
|
106
|
-
baseStrings.forEach(s => {
|
|
107
|
-
// extract (comma-separated) list of masterfiles
|
|
108
|
-
const filesString = s.match(RegExp(config.extracts.files))[1];
|
|
109
|
-
if (!filesString) _error(`No masterfiles found in '${s}' for '${config.extracts.files}'`);
|
|
110
|
-
|
|
111
|
-
// Add each non-tdi masterfile to the masterFiles set
|
|
112
|
-
filesString.split(',').forEach(f => {
|
|
113
|
-
if (!f.startsWith('tdi')){
|
|
114
|
-
const filePath = `${config.location == 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
115
|
-
masterFiles.add(filePath);
|
|
116
|
-
}
|
|
117
|
-
});
|
|
118
|
-
});
|
|
119
|
-
} else {
|
|
120
|
-
_write(`No masterfiles found in ${cf} for '${config.extracts.base}'`);
|
|
121
|
-
}
|
|
122
|
-
} else { // Add synced file to masterfiles; strip path from c:/... hence it starts with config/cmscustom or config/txp/site-stylesheets
|
|
123
|
-
const filePath = config.location == 'txp' ? `${siteStylesheetsPath}${cf.split(siteStylesheetsPath)[1]}` : `${cmscustomPath}${cf.split(cmscustomPath)[1]}`;
|
|
124
|
-
masterFiles.add(filePath);
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
});
|
|
128
|
-
|
|
129
|
-
};
|
|
130
|
-
|
|
131
|
-
const transformXprFile = (xprFiles) => {
|
|
132
|
-
_info('\nUpdating xpr file(s):');
|
|
133
|
-
// create with: xslt3 -t -xsl:createProjectFile.xsl -export:createProjectFile.sef.json v -nogo'
|
|
134
|
-
|
|
135
|
-
xprFiles
|
|
136
|
-
.forEach(xprFile => {
|
|
137
|
-
// Transform xpr; add masterfiles and transformationScenarios as parameters of the stylesheet
|
|
138
|
-
_write(`${xprFile}\n`);
|
|
139
|
-
SaxonJS.transform({
|
|
140
|
-
stylesheetText: fs.readFileSync(sefFilePath),
|
|
141
|
-
stylesheetBaseURI: 'createProjectFile.sef.json',
|
|
142
|
-
sourceFileName: path.join(_paths.repo, xprFile),
|
|
143
|
-
destination: 'serialized',
|
|
144
|
-
stylesheetParams: {
|
|
145
|
-
'masterfiles': [...masterFiles],
|
|
146
|
-
'Q{}transformationScenarios': transformationScenarios
|
|
147
|
-
}
|
|
148
|
-
}, 'async')
|
|
149
|
-
.then(output => {
|
|
150
|
-
// Write result of transformation to xpr file
|
|
151
|
-
fs.writeFileSync(path.join(_paths.repo, xprFile), output.principalResult);
|
|
152
|
-
})
|
|
153
|
-
.catch(e => _warn(`Failed to update: ${xprFile}\n ${e}`));
|
|
154
|
-
});
|
|
155
|
-
};
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
module.exports = function oxygen () {
|
|
159
|
-
// Set projects transformation scenarios and masterfiles in oXygen project file
|
|
160
|
-
// - Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
161
|
-
// - Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
162
|
-
|
|
163
|
-
if (!fs.existsSync(spConfigPath) || !fs.existsSync(sefFilePath)) {
|
|
164
|
-
_error(`Cannot find required files in TDI submodule. Try updating TDI submodule.`);
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
createProjectFile(require(spConfigPath));
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const globby = require('globby');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const SaxonJS = require('saxon-js');
|
|
5
|
+
|
|
6
|
+
const spConfigPath = path.join(_paths.repo, _paths.tdi, 'tct/build/oxygen/stylesheetPaths.json');
|
|
7
|
+
const sefFilePath = path.join(_paths.repo, _paths.tdi, 'tct/build/oxygen/createProjectFile.sef.json');
|
|
8
|
+
const cmscustomPath = 'config/cmscustom/';
|
|
9
|
+
const siteStylesheetsPath = 'config/txp/site-stylesheets/';
|
|
10
|
+
|
|
11
|
+
const masterFiles = new Set;
|
|
12
|
+
const transformationScenarios = [];
|
|
13
|
+
|
|
14
|
+
const convertToValidFilename = (string) => string.replace(/[\/|\\:*?"<>]/g, " ");
|
|
15
|
+
|
|
16
|
+
const createProjectFile = (config) => {
|
|
17
|
+
_info('Initializing xpr file(s):');
|
|
18
|
+
const xprFiles = globby.sync(`*.xpr`);
|
|
19
|
+
|
|
20
|
+
// Copy xpr file from TDI if it does not exists yet;
|
|
21
|
+
if (xprFiles[0]) _write(`Found: ${xprFiles.join(', ')}`);
|
|
22
|
+
else {
|
|
23
|
+
const customers = new Set;
|
|
24
|
+
_repoconfig.forEach(p => customers.add(p.customer_name));
|
|
25
|
+
xprFiles.push(convertToValidFilename([...customers].join(' - ')) + '.xpr');
|
|
26
|
+
fs.copySync(path.join(_paths.repo, 'tangelo-default-implementation/src/[customer].xpr'), path.join(_paths.repo, xprFiles[0]));
|
|
27
|
+
_write(`Created: '${xprFiles[0]}'`);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Search for transformationScenarios/masterfiles based on TDI submodule oxygenProjectFile config
|
|
31
|
+
_info('\nSearching for transformationScenarios/masterfiles');
|
|
32
|
+
config.oxygenProjectFile.forEach(
|
|
33
|
+
pf => {
|
|
34
|
+
// Collect transformation scenarios and add them to the transformationScenarios array; add individual stylesheets to the masterFiles set.
|
|
35
|
+
if (pf.transformation) getTransformations(pf.transformation);
|
|
36
|
+
// Add files to the masterfiles set.
|
|
37
|
+
else if (pf.masterfile) getMasterfiles(pf.masterfile);
|
|
38
|
+
}
|
|
39
|
+
);
|
|
40
|
+
|
|
41
|
+
// update all .xpr files with collected transformation scenarios and masterfiles.
|
|
42
|
+
transformXprFile(xprFiles);
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
const getTransformations = (config) => {
|
|
46
|
+
_repoconfig.forEach(rc => {
|
|
47
|
+
// get pathname of customer/project
|
|
48
|
+
const [customerPath, projectPath] = config.location == 'database' ? rc.path_dbconfig : rc.path_cmscustom;
|
|
49
|
+
|
|
50
|
+
// set pathname of customer/project in location glob-expression
|
|
51
|
+
const location = path.join(
|
|
52
|
+
_paths.repo,
|
|
53
|
+
config.files.replace(/\[customer\]/, customerPath).replace(/\[project\]/, projectPath)
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
globby
|
|
57
|
+
.sync(`${location}`)
|
|
58
|
+
.forEach(f => {
|
|
59
|
+
// extract baseStrings from file
|
|
60
|
+
const fileString = fs.readFileSync(f).toString();
|
|
61
|
+
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
62
|
+
|
|
63
|
+
if (fileString.replace(/\s|^prompt\s.*$/gm,'')!=='') {
|
|
64
|
+
if (baseStrings) {
|
|
65
|
+
baseStrings.forEach(s => {
|
|
66
|
+
// extract type, name, files info from baseString
|
|
67
|
+
const type = config.extracts.type ? s.match(RegExp(config.extracts.type))[1] : config.values.type;
|
|
68
|
+
const name = config.extracts.name ? s.match(RegExp(config.extracts.name))[1] : config.values.name;
|
|
69
|
+
const files = s.match(RegExp(config.extracts.files))[1];
|
|
70
|
+
|
|
71
|
+
// Add transformation scenario to the transformationScenario array
|
|
72
|
+
transformationScenarios.push({
|
|
73
|
+
name: `${type}: ${name} (${rc.customer_name}, ${rc.project_name})`, // note that in createProjectFile.xsl a regex is added that matches scenarios based on this name. This to preserve manually added scenarios.
|
|
74
|
+
transformationScenario: files,
|
|
75
|
+
location: config.location == 'txp' ? siteStylesheetsPath : cmscustomPath
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
// Add each non-tdi stylesheet in transformation scenario to the masterFiles set
|
|
79
|
+
files.split(',').forEach(f => {
|
|
80
|
+
const filePath = `${config.location == 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
81
|
+
if (!f.startsWith('tdi')) masterFiles.add(filePath);
|
|
82
|
+
});
|
|
83
|
+
});
|
|
84
|
+
} else {
|
|
85
|
+
_write(`No transformation scenarios found in ${f} for '${config.extracts.base}'`);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
});
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
const getMasterfiles = (config) => {
|
|
95
|
+
globby
|
|
96
|
+
.sync(`${path.join(_paths.repo, config.files)}`)
|
|
97
|
+
.forEach(cf => {
|
|
98
|
+
// Check if masterfile should be extracted from file
|
|
99
|
+
const fileString = fs.readFileSync(cf).toString();
|
|
100
|
+
if (fileString.replace(/\s|^prompt\s.*$/gm,'')!=='') {
|
|
101
|
+
if (config.extracts) {
|
|
102
|
+
// extract baseStrings from file
|
|
103
|
+
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
104
|
+
|
|
105
|
+
if (baseStrings) {
|
|
106
|
+
baseStrings.forEach(s => {
|
|
107
|
+
// extract (comma-separated) list of masterfiles
|
|
108
|
+
const filesString = s.match(RegExp(config.extracts.files))[1];
|
|
109
|
+
if (!filesString) _error(`No masterfiles found in '${s}' for '${config.extracts.files}'`);
|
|
110
|
+
|
|
111
|
+
// Add each non-tdi masterfile to the masterFiles set
|
|
112
|
+
filesString.split(',').forEach(f => {
|
|
113
|
+
if (!f.startsWith('tdi')){
|
|
114
|
+
const filePath = `${config.location == 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
115
|
+
masterFiles.add(filePath);
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
});
|
|
119
|
+
} else {
|
|
120
|
+
_write(`No masterfiles found in ${cf} for '${config.extracts.base}'`);
|
|
121
|
+
}
|
|
122
|
+
} else { // Add synced file to masterfiles; strip path from c:/... hence it starts with config/cmscustom or config/txp/site-stylesheets
|
|
123
|
+
const filePath = config.location == 'txp' ? `${siteStylesheetsPath}${cf.split(siteStylesheetsPath)[1]}` : `${cmscustomPath}${cf.split(cmscustomPath)[1]}`;
|
|
124
|
+
masterFiles.add(filePath);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
const transformXprFile = (xprFiles) => {
|
|
132
|
+
_info('\nUpdating xpr file(s):');
|
|
133
|
+
// create with: xslt3 -t -xsl:createProjectFile.xsl -export:createProjectFile.sef.json v -nogo'
|
|
134
|
+
|
|
135
|
+
xprFiles
|
|
136
|
+
.forEach(xprFile => {
|
|
137
|
+
// Transform xpr; add masterfiles and transformationScenarios as parameters of the stylesheet
|
|
138
|
+
_write(`${xprFile}\n`);
|
|
139
|
+
SaxonJS.transform({
|
|
140
|
+
stylesheetText: fs.readFileSync(sefFilePath),
|
|
141
|
+
stylesheetBaseURI: 'createProjectFile.sef.json',
|
|
142
|
+
sourceFileName: path.join(_paths.repo, xprFile),
|
|
143
|
+
destination: 'serialized',
|
|
144
|
+
stylesheetParams: {
|
|
145
|
+
'masterfiles': [...masterFiles],
|
|
146
|
+
'Q{}transformationScenarios': transformationScenarios
|
|
147
|
+
}
|
|
148
|
+
}, 'async')
|
|
149
|
+
.then(output => {
|
|
150
|
+
// Write result of transformation to xpr file
|
|
151
|
+
fs.writeFileSync(path.join(_paths.repo, xprFile), output.principalResult);
|
|
152
|
+
})
|
|
153
|
+
.catch(e => _warn(`Failed to update: ${xprFile}\n ${e}`));
|
|
154
|
+
});
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
module.exports = function oxygen () {
|
|
159
|
+
// Set projects transformation scenarios and masterfiles in oXygen project file
|
|
160
|
+
// - Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
161
|
+
// - Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
162
|
+
|
|
163
|
+
if (!fs.existsSync(spConfigPath) || !fs.existsSync(sefFilePath)) {
|
|
164
|
+
_error(`Cannot find required files in TDI submodule. Try updating TDI submodule.`);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
createProjectFile(require(spConfigPath));
|
|
168
168
|
};
|
package/src/modules/git/index.js
CHANGED
|
@@ -124,8 +124,9 @@ module.exports = function git (argv) {
|
|
|
124
124
|
if (update === `tdi`) {
|
|
125
125
|
|
|
126
126
|
const tdiBranch = getTdiBranch(argv.branch);
|
|
127
|
+
const tdiBranchUpgrade = tdiBranch.from ? (tdiBranch.from.name < tdiBranch.name) : false;
|
|
127
128
|
|
|
128
|
-
if (
|
|
129
|
+
if (tdiBranchUpgrade) _info(`Current branch '${tdiBranch.from.name}' will be updated to '${tdiBranch.name}'\nCommon ancestor will be used in selecting TDI commits requiring migration: ${_formatDate(tdiBranch.commonAncestor.date)}`);
|
|
129
130
|
_info(`Branch ${tdiBranch.name} is ${tdiBranch.commitsBehind} commits behind.`);
|
|
130
131
|
|
|
131
132
|
// Set branch in .gitmodules file; This ensures submodule update will follow this branch to the latest commit
|
|
@@ -150,7 +151,7 @@ module.exports = function git (argv) {
|
|
|
150
151
|
const migrationsFiltered = migrations
|
|
151
152
|
.filter((m) => m.releases.find((r) => {
|
|
152
153
|
const time = new Date(r.date).getTime();
|
|
153
|
-
return ((
|
|
154
|
+
return ((tdiBranchUpgrade)
|
|
154
155
|
? `release/${r.release}` == tdiBranch.name || (tdiBranch.from.name <= `release/${r.release}` && `release/${r.release}` < tdiBranch.name && (fromTdiDate.getTime() < time && time < toTdiDate.getTime()))
|
|
155
156
|
: `release/${r.release}` == tdiBranch.name && (fromTdiDate.getTime() < time && time < toTdiDate.getTime())
|
|
156
157
|
);
|
|
@@ -1,201 +1,201 @@
|
|
|
1
|
-
const fs = require('fs-extra');
|
|
2
|
-
const globby = require('globby');
|
|
3
|
-
const path = require('path');
|
|
4
|
-
const {Table} = require('console-table-printer');
|
|
5
|
-
|
|
6
|
-
const execGitCommand = require('../../lib/exec-git-command');
|
|
7
|
-
const getTdiBranch = require('../../lib/get-tdi-branch');
|
|
8
|
-
const c = require('../deploy/config');
|
|
9
|
-
const {remote} = require('../deploy/execute');
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
const getGitInfo = () => {
|
|
13
|
-
// Version info TDI submodule
|
|
14
|
-
const gitSubmoduleInfo = new Table({
|
|
15
|
-
columns: [
|
|
16
|
-
{name: 'property', title: 'TDI - submodule', alignment: 'left'},
|
|
17
|
-
{name: 'value', alignment: 'left'}
|
|
18
|
-
],
|
|
19
|
-
});
|
|
20
|
-
|
|
21
|
-
// Fetch all
|
|
22
|
-
const cmdFetch = execGitCommand('fetch -pf --all', path.join(_paths.repo, _paths.tdi));
|
|
23
|
-
if (cmdFetch.error) _warn(`Fetch failed\n${cmdFetch.error}`);
|
|
24
|
-
|
|
25
|
-
// Set branch name of firstBranch without 'remotes/origin/'
|
|
26
|
-
const tdiBranch = getTdiBranch();
|
|
27
|
-
|
|
28
|
-
// Get number of commits behind
|
|
29
|
-
tdiBranch.commitsBehind = execGitCommand(`rev-list HEAD...origin/${tdiBranch.name} --count`, path.join(_paths.repo, _paths.tdi));
|
|
30
|
-
|
|
31
|
-
// Create table rows for TDI submodule info
|
|
32
|
-
gitSubmoduleInfo.addRow({
|
|
33
|
-
property: 'Commit date',
|
|
34
|
-
value: _formatDate(_git.commitTdi.local().date)
|
|
35
|
-
});
|
|
36
|
-
if (tdiBranch) {
|
|
37
|
-
gitSubmoduleInfo.addRow({
|
|
38
|
-
property: 'Branch',
|
|
39
|
-
value: tdiBranch.name
|
|
40
|
-
});
|
|
41
|
-
gitSubmoduleInfo.addRow({
|
|
42
|
-
property: 'Commits behind',
|
|
43
|
-
value: tdiBranch.commitsBehind
|
|
44
|
-
});
|
|
45
|
-
} else {
|
|
46
|
-
gitSubmoduleInfo.addRow({
|
|
47
|
-
property: 'Branch could not be determined',
|
|
48
|
-
value: ''
|
|
49
|
-
});
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
// Print TDI submodule info
|
|
53
|
-
gitSubmoduleInfo.printTable();
|
|
54
|
-
};
|
|
55
|
-
|
|
56
|
-
const getFileExtractInfo = (sorting) => {
|
|
57
|
-
// version info miscellaneous
|
|
58
|
-
const projects = new Set;
|
|
59
|
-
const types = new Set;
|
|
60
|
-
const versionInfoConfigPath = path.join(_paths.repo, _paths.tdi, 'tct/version/versionInfo.js');
|
|
61
|
-
const versionInfo = new Table({
|
|
62
|
-
columns: [
|
|
63
|
-
{name: 'path', alignment: 'left'},
|
|
64
|
-
{name: 'type', alignment: 'left'},
|
|
65
|
-
{name: 'version', alignment: 'left'},
|
|
66
|
-
{name: 'sort'}
|
|
67
|
-
],
|
|
68
|
-
disabledColumns: ['sort'],
|
|
69
|
-
sort: (a, b) => a.sort.toLowerCase() > b.sort.toLowerCase() ? 1 : -1
|
|
70
|
-
});
|
|
71
|
-
|
|
72
|
-
if (fs.existsSync(versionInfoConfigPath)) {
|
|
73
|
-
const config = require(versionInfoConfigPath);
|
|
74
|
-
|
|
75
|
-
config.forEach(v => {
|
|
76
|
-
const location = path.join(_paths.repo, v.glob);
|
|
77
|
-
|
|
78
|
-
globby
|
|
79
|
-
.sync(location)
|
|
80
|
-
.forEach(f => {
|
|
81
|
-
const filePathExtract = f.match(/.*(?<path>(cmscustom|site-stylesheets)\/(?<customer>[^/]*)\/(?<project>[^/]*)\/.*)/);
|
|
82
|
-
const path = filePathExtract.groups.path || '';
|
|
83
|
-
const project = filePathExtract.groups.project || '';
|
|
84
|
-
|
|
85
|
-
const fileContent = fs.readFileSync(f).toString();
|
|
86
|
-
v.extracts.forEach(e => {
|
|
87
|
-
const extract = fileContent.match(e.regex);
|
|
88
|
-
if (extract) {
|
|
89
|
-
projects.add(project); // Store the projects where versioninfo is found
|
|
90
|
-
types.add(e.type); // Store the types for which versioninfo is found
|
|
91
|
-
versionInfo.addRow({ // Create row with version information to output
|
|
92
|
-
path,
|
|
93
|
-
sort: `${sorting=='project' ? project : e.type}_2${sorting=='project' ? e.type : extract.groups.version}`, // Output is sorted on project or type: '_2' ensures it is rendered after the empty row and the row with the project name
|
|
94
|
-
type: e.type,
|
|
95
|
-
version: extract.groups.version
|
|
96
|
-
});
|
|
97
|
-
}
|
|
98
|
-
});
|
|
99
|
-
});
|
|
100
|
-
});
|
|
101
|
-
|
|
102
|
-
if (sorting=='project') {
|
|
103
|
-
// For projects containing version information
|
|
104
|
-
projects.forEach(p => {
|
|
105
|
-
versionInfo.addRow({ // Add empty row after project
|
|
106
|
-
path: '',
|
|
107
|
-
sort: `${p}_3`,
|
|
108
|
-
type: '',
|
|
109
|
-
version: ''
|
|
110
|
-
});
|
|
111
|
-
versionInfo.addRow({ // Add row with project name
|
|
112
|
-
path: `-- ${p}:`,
|
|
113
|
-
sort: `${p}_1`,
|
|
114
|
-
type: '',
|
|
115
|
-
version: ''
|
|
116
|
-
}, {
|
|
117
|
-
color: 'yellow'
|
|
118
|
-
});
|
|
119
|
-
});
|
|
120
|
-
}
|
|
121
|
-
else if (sorting=='type') {
|
|
122
|
-
types.forEach(t => {
|
|
123
|
-
versionInfo.addRow({ // Add empty row after type
|
|
124
|
-
path: '',
|
|
125
|
-
sort: `${t}_3`,
|
|
126
|
-
type: '',
|
|
127
|
-
version: ''
|
|
128
|
-
});
|
|
129
|
-
});
|
|
130
|
-
}
|
|
131
|
-
versionInfo.printTable();
|
|
132
|
-
}
|
|
133
|
-
else {
|
|
134
|
-
_warn('Version info of miscellaneous items cannot be extracted:\nCannot find required files in TDI submodule. Try updating TDI submodule.');
|
|
135
|
-
}
|
|
136
|
-
};
|
|
137
|
-
|
|
138
|
-
const getServerInfo = (server) => {
|
|
139
|
-
// Remote server info
|
|
140
|
-
// common setup
|
|
141
|
-
_write();
|
|
142
|
-
c.setServer(server);
|
|
143
|
-
|
|
144
|
-
if (!c.envDev) {
|
|
145
|
-
_info(`Remote version info for '${c.server.ftpConfig.host}':\n`);
|
|
146
|
-
remote.add('sudo ~root/scripts/version.sh', '').process();
|
|
147
|
-
}
|
|
148
|
-
else {
|
|
149
|
-
_info('For development environments no server version information is available. Check rancher / database for this information.\nAdd the --server option with a non-dev environment to see version information for that server.');
|
|
150
|
-
}
|
|
151
|
-
};
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
module.exports = function info (argv) {
|
|
155
|
-
|
|
156
|
-
if (argv.doctypes) {
|
|
157
|
-
_info('Document type information for this git repository\n');
|
|
158
|
-
|
|
159
|
-
const doctypesInfo = new Table({
|
|
160
|
-
columns: [
|
|
161
|
-
{name: 'id', alignment: 'right'},
|
|
162
|
-
{name: 'name', alignment: 'left'},
|
|
163
|
-
{name: 'paths', alignment: 'left'}
|
|
164
|
-
],
|
|
165
|
-
});
|
|
166
|
-
|
|
167
|
-
globby
|
|
168
|
-
.sync(_paths.repo + '/database/config/**/txd_document_types.sql')
|
|
169
|
-
.forEach((p, i, a) => {
|
|
170
|
-
fs.readFileSync(p).toString().match(/select([\s\S]+?)from\s+dual/gmi)
|
|
171
|
-
.forEach((dtRow, i, a) => {
|
|
172
|
-
const ntSqlInsert = fs.readFileSync(p.replace('txd_document_types', 'txd_node_types')).toString().match(/select(.*?)from\s+dual/s)[1];
|
|
173
|
-
const id = dtRow.match(/(\d+) id/)?.[1];
|
|
174
|
-
const name = dtRow.match(/'([^']+)' display_name/)?.[1];
|
|
175
|
-
const dbPath = p.match(/(database\/config\/(:?.*)\/)txd_document_types.sql/i)?.[1];
|
|
176
|
-
const prPath = ntSqlInsert.match(/'([^']+)' xsl_prep_inc/)[1].replace('prepare_xincludes.xsl', '');
|
|
177
|
-
|
|
178
|
-
doctypesInfo.addRows([
|
|
179
|
-
{id, name, paths: 'config/cmscustom/'+ prPath},
|
|
180
|
-
{paths: dbPath}
|
|
181
|
-
]);
|
|
182
|
-
|
|
183
|
-
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
184
|
-
});
|
|
185
|
-
|
|
186
|
-
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
187
|
-
});
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
doctypesInfo.printTable();
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
if (argv.versions) {
|
|
194
|
-
_info('Version information for this git repository\n');
|
|
195
|
-
|
|
196
|
-
getGitInfo();
|
|
197
|
-
getFileExtractInfo(argv.versions);
|
|
198
|
-
getServerInfo(argv.server);
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
};
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const globby = require('globby');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const {Table} = require('console-table-printer');
|
|
5
|
+
|
|
6
|
+
const execGitCommand = require('../../lib/exec-git-command');
|
|
7
|
+
const getTdiBranch = require('../../lib/get-tdi-branch');
|
|
8
|
+
const c = require('../deploy/config');
|
|
9
|
+
const {remote} = require('../deploy/execute');
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
const getGitInfo = () => {
|
|
13
|
+
// Version info TDI submodule
|
|
14
|
+
const gitSubmoduleInfo = new Table({
|
|
15
|
+
columns: [
|
|
16
|
+
{name: 'property', title: 'TDI - submodule', alignment: 'left'},
|
|
17
|
+
{name: 'value', alignment: 'left'}
|
|
18
|
+
],
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
// Fetch all
|
|
22
|
+
const cmdFetch = execGitCommand('fetch -pf --all', path.join(_paths.repo, _paths.tdi));
|
|
23
|
+
if (cmdFetch.error) _warn(`Fetch failed\n${cmdFetch.error}`);
|
|
24
|
+
|
|
25
|
+
// Set branch name of firstBranch without 'remotes/origin/'
|
|
26
|
+
const tdiBranch = getTdiBranch();
|
|
27
|
+
|
|
28
|
+
// Get number of commits behind
|
|
29
|
+
tdiBranch.commitsBehind = execGitCommand(`rev-list HEAD...origin/${tdiBranch.name} --count`, path.join(_paths.repo, _paths.tdi));
|
|
30
|
+
|
|
31
|
+
// Create table rows for TDI submodule info
|
|
32
|
+
gitSubmoduleInfo.addRow({
|
|
33
|
+
property: 'Commit date',
|
|
34
|
+
value: _formatDate(_git.commitTdi.local().date)
|
|
35
|
+
});
|
|
36
|
+
if (tdiBranch) {
|
|
37
|
+
gitSubmoduleInfo.addRow({
|
|
38
|
+
property: 'Branch',
|
|
39
|
+
value: tdiBranch.name
|
|
40
|
+
});
|
|
41
|
+
gitSubmoduleInfo.addRow({
|
|
42
|
+
property: 'Commits behind',
|
|
43
|
+
value: tdiBranch.commitsBehind
|
|
44
|
+
});
|
|
45
|
+
} else {
|
|
46
|
+
gitSubmoduleInfo.addRow({
|
|
47
|
+
property: 'Branch could not be determined',
|
|
48
|
+
value: ''
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Print TDI submodule info
|
|
53
|
+
gitSubmoduleInfo.printTable();
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
const getFileExtractInfo = (sorting) => {
|
|
57
|
+
// version info miscellaneous
|
|
58
|
+
const projects = new Set;
|
|
59
|
+
const types = new Set;
|
|
60
|
+
const versionInfoConfigPath = path.join(_paths.repo, _paths.tdi, 'tct/version/versionInfo.js');
|
|
61
|
+
const versionInfo = new Table({
|
|
62
|
+
columns: [
|
|
63
|
+
{name: 'path', alignment: 'left'},
|
|
64
|
+
{name: 'type', alignment: 'left'},
|
|
65
|
+
{name: 'version', alignment: 'left'},
|
|
66
|
+
{name: 'sort'}
|
|
67
|
+
],
|
|
68
|
+
disabledColumns: ['sort'],
|
|
69
|
+
sort: (a, b) => a.sort.toLowerCase() > b.sort.toLowerCase() ? 1 : -1
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
if (fs.existsSync(versionInfoConfigPath)) {
|
|
73
|
+
const config = require(versionInfoConfigPath);
|
|
74
|
+
|
|
75
|
+
config.forEach(v => {
|
|
76
|
+
const location = path.join(_paths.repo, v.glob);
|
|
77
|
+
|
|
78
|
+
globby
|
|
79
|
+
.sync(location)
|
|
80
|
+
.forEach(f => {
|
|
81
|
+
const filePathExtract = f.match(/.*(?<path>(cmscustom|site-stylesheets)\/(?<customer>[^/]*)\/(?<project>[^/]*)\/.*)/);
|
|
82
|
+
const path = filePathExtract.groups.path || '';
|
|
83
|
+
const project = filePathExtract.groups.project || '';
|
|
84
|
+
|
|
85
|
+
const fileContent = fs.readFileSync(f).toString();
|
|
86
|
+
v.extracts.forEach(e => {
|
|
87
|
+
const extract = fileContent.match(e.regex);
|
|
88
|
+
if (extract) {
|
|
89
|
+
projects.add(project); // Store the projects where versioninfo is found
|
|
90
|
+
types.add(e.type); // Store the types for which versioninfo is found
|
|
91
|
+
versionInfo.addRow({ // Create row with version information to output
|
|
92
|
+
path,
|
|
93
|
+
sort: `${sorting=='project' ? project : e.type}_2${sorting=='project' ? e.type : extract.groups.version}`, // Output is sorted on project or type: '_2' ensures it is rendered after the empty row and the row with the project name
|
|
94
|
+
type: e.type,
|
|
95
|
+
version: extract.groups.version
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
});
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
if (sorting=='project') {
|
|
103
|
+
// For projects containing version information
|
|
104
|
+
projects.forEach(p => {
|
|
105
|
+
versionInfo.addRow({ // Add empty row after project
|
|
106
|
+
path: '',
|
|
107
|
+
sort: `${p}_3`,
|
|
108
|
+
type: '',
|
|
109
|
+
version: ''
|
|
110
|
+
});
|
|
111
|
+
versionInfo.addRow({ // Add row with project name
|
|
112
|
+
path: `-- ${p}:`,
|
|
113
|
+
sort: `${p}_1`,
|
|
114
|
+
type: '',
|
|
115
|
+
version: ''
|
|
116
|
+
}, {
|
|
117
|
+
color: 'yellow'
|
|
118
|
+
});
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
else if (sorting=='type') {
|
|
122
|
+
types.forEach(t => {
|
|
123
|
+
versionInfo.addRow({ // Add empty row after type
|
|
124
|
+
path: '',
|
|
125
|
+
sort: `${t}_3`,
|
|
126
|
+
type: '',
|
|
127
|
+
version: ''
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
versionInfo.printTable();
|
|
132
|
+
}
|
|
133
|
+
else {
|
|
134
|
+
_warn('Version info of miscellaneous items cannot be extracted:\nCannot find required files in TDI submodule. Try updating TDI submodule.');
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
|
|
138
|
+
const getServerInfo = (server) => {
|
|
139
|
+
// Remote server info
|
|
140
|
+
// common setup
|
|
141
|
+
_write();
|
|
142
|
+
c.setServer(server);
|
|
143
|
+
|
|
144
|
+
if (!c.envDev) {
|
|
145
|
+
_info(`Remote version info for '${c.server.ftpConfig.host}':\n`);
|
|
146
|
+
remote.add('sudo ~root/scripts/version.sh', '').process();
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
_info('For development environments no server version information is available. Check rancher / database for this information.\nAdd the --server option with a non-dev environment to see version information for that server.');
|
|
150
|
+
}
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
module.exports = function info (argv) {
|
|
155
|
+
|
|
156
|
+
if (argv.doctypes) {
|
|
157
|
+
_info('Document type information for this git repository\n');
|
|
158
|
+
|
|
159
|
+
const doctypesInfo = new Table({
|
|
160
|
+
columns: [
|
|
161
|
+
{name: 'id', alignment: 'right'},
|
|
162
|
+
{name: 'name', alignment: 'left'},
|
|
163
|
+
{name: 'paths', alignment: 'left'}
|
|
164
|
+
],
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
globby
|
|
168
|
+
.sync(_paths.repo + '/database/config/**/txd_document_types.sql')
|
|
169
|
+
.forEach((p, i, a) => {
|
|
170
|
+
fs.readFileSync(p).toString().match(/select([\s\S]+?)from\s+dual/gmi)
|
|
171
|
+
.forEach((dtRow, i, a) => {
|
|
172
|
+
const ntSqlInsert = fs.readFileSync(p.replace('txd_document_types', 'txd_node_types')).toString().match(/select(.*?)from\s+dual/s)[1];
|
|
173
|
+
const id = dtRow.match(/(\d+) id/)?.[1];
|
|
174
|
+
const name = dtRow.match(/'([^']+)' display_name/)?.[1];
|
|
175
|
+
const dbPath = p.match(/(database\/config\/(:?.*)\/)txd_document_types.sql/i)?.[1];
|
|
176
|
+
const prPath = ntSqlInsert.match(/'([^']+)' xsl_prep_inc/)[1].replace('prepare_xincludes.xsl', '');
|
|
177
|
+
|
|
178
|
+
doctypesInfo.addRows([
|
|
179
|
+
{id, name, paths: 'config/cmscustom/'+ prPath},
|
|
180
|
+
{paths: dbPath}
|
|
181
|
+
]);
|
|
182
|
+
|
|
183
|
+
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
doctypesInfo.printTable();
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
if (argv.versions) {
|
|
194
|
+
_info('Version information for this git repository\n');
|
|
195
|
+
|
|
196
|
+
getGitInfo();
|
|
197
|
+
getFileExtractInfo(argv.versions);
|
|
198
|
+
getServerInfo(argv.server);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
};
|
|
@@ -52,9 +52,11 @@ module.exports = function steps (step, dry, filter) {
|
|
|
52
52
|
}
|
|
53
53
|
|
|
54
54
|
let filesModCount = 0;
|
|
55
|
-
|
|
55
|
+
let maxRepeat = 20;
|
|
56
|
+
for (let i=0; i<maxRepeat && r.files[0]; i++) { // execute repeatedly for modified files only (with safety limit of 20)
|
|
56
57
|
r.files = rif.sync(r).filter(f => f.hasChanged).map(f => f.file);
|
|
57
58
|
if (i===0) filesModCount = r.files.length; // save count only after first run (after this only subsets are processed)
|
|
59
|
+
if (i===maxRepeat-1 && r.files[0]) _warn(`Repeated replacement stopped by safety limit - check file changes for too many content occurrences`);
|
|
58
60
|
if (dry) break;
|
|
59
61
|
}
|
|
60
62
|
|