@tangelo/tangelo-configuration-toolkit 1.23.2 → 1.24.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +94 -94
- package/Tangelo-Configuration-Toolkit.code-workspace +8 -0
- package/bin/index.js +2 -2
- package/package.json +1 -1
- package/src/lib/get-tdi-branch.js +45 -45
- package/src/lib/gulp-batch-replace-with-filter.js +19 -19
- package/src/lib/gulp-simple-rename.js +11 -11
- package/src/lib/gulp-tcl-config.js +2 -2
- package/src/lib/tcl-config.js +1 -1
- package/src/modules/build/oxygen.js +177 -177
- package/src/modules/deploy/execute.js +37 -1
- package/src/modules/git/index.js +12 -5
- package/src/modules/info/index.js +201 -201
- package/src/modules/migrate/steps.js +1 -1
- package/src/modules/sql/index.js +8 -1
package/README.md
CHANGED
|
@@ -1,94 +1,94 @@
|
|
|
1
|
-
# tangelo-configuration-toolkit
|
|
2
|
-
|
|
3
|
-
Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.
|
|
4
|
-
|
|
5
|
-
## Installation
|
|
6
|
-
|
|
7
|
-
The toolkit requires [NPM](https://www.npmjs.com/get-npm) on [Node.js®](https://nodejs.org/) (at least version 18.x). An active or maintenance LTS release is recommended. After installing Node.js, you can install the latest version of the Tangelo Configuration Toolkit globally on your system using the following command:
|
|
8
|
-
|
|
9
|
-
npm i -g @tangelo/tangelo-configuration-toolkit
|
|
10
|
-
|
|
11
|
-
## Usage
|
|
12
|
-
|
|
13
|
-
Get help for the available commands and see version:
|
|
14
|
-
|
|
15
|
-
tangelo-configuration-toolkit
|
|
16
|
-
|
|
17
|
-
Get help for a specific command, detailing all its arguments:
|
|
18
|
-
|
|
19
|
-
tangelo-configuration-toolkit <command>
|
|
20
|
-
|
|
21
|
-
Use the `tct` shorthand instead of `tangelo-configuration-toolkit`:
|
|
22
|
-
|
|
23
|
-
tct <command>
|
|
24
|
-
|
|
25
|
-
## Config
|
|
26
|
-
|
|
27
|
-
### Global
|
|
28
|
-
|
|
29
|
-
TCT requires a config file to work: `${userhome}/.tct/appconfig.json`\
|
|
30
|
-
Several commands, like the `deploy` command, require server connection information, which can be stored in here.
|
|
31
|
-
|
|
32
|
-
The contents looks like this (all properties are optional):
|
|
33
|
-
|
|
34
|
-
{
|
|
35
|
-
"sharedConfigPath": "absolute/or/relative/path/to/folder/containing/shared/config",
|
|
36
|
-
"servers": [{
|
|
37
|
-
"config": {
|
|
38
|
-
"port": 22,
|
|
39
|
-
"parallel": 4,
|
|
40
|
-
"username": "username",
|
|
41
|
-
"remotedir": "/absolute/path/to/tangelo/config/folder/on/server"
|
|
42
|
-
},
|
|
43
|
-
"domains": ["domain.name.com"],
|
|
44
|
-
"name": "name-for-local-deploy"
|
|
45
|
-
}],
|
|
46
|
-
"serverDefaults": {
|
|
47
|
-
"config": {
|
|
48
|
-
...
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
"defaultServer": "can be set to the name of e.g. your favorite dev server",
|
|
52
|
-
"defaultDatabase": "can be set to the tnsname of e.g. your favorite dev server"
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
When passing a server name, `tct` will look for a full match with a name or a partial match (the start) with a domain.
|
|
56
|
-
|
|
57
|
-
### oXygen
|
|
58
|
-
|
|
59
|
-
The `build -x` commands set projects transformation scenarios and masterfiles in the oXygen project file with the following functionality:
|
|
60
|
-
- Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
61
|
-
- Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
62
|
-
- No masterfiles / scenarios will be added if their path match with oXygens hidden directory patterns
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
## Debug
|
|
66
|
-
|
|
67
|
-
Save this in the root of your local clone as `tct-dev.cmd`:
|
|
68
|
-
|
|
69
|
-
```
|
|
70
|
-
@ECHO off
|
|
71
|
-
SETLOCAL
|
|
72
|
-
CALL :find_dp0
|
|
73
|
-
|
|
74
|
-
IF EXIST "%dp0%\node.exe" (
|
|
75
|
-
SET "_prog=%dp0%\node.exe"
|
|
76
|
-
) ELSE (
|
|
77
|
-
SET "_prog=node"
|
|
78
|
-
SET PATHEXT=%PATHEXT:;.JS;=;%
|
|
79
|
-
)
|
|
80
|
-
|
|
81
|
-
"%_prog%" "%dp0%\index.js" %*
|
|
82
|
-
ENDLOCAL
|
|
83
|
-
EXIT /b %errorlevel%
|
|
84
|
-
:find_dp0
|
|
85
|
-
SET dp0=%~dp0
|
|
86
|
-
EXIT /b
|
|
87
|
-
```
|
|
88
|
-
|
|
89
|
-
Then:
|
|
90
|
-
|
|
91
|
-
- open a 'Javascript Debug Terminal' in VSCode
|
|
92
|
-
- jump to a customer folder ( you can use `tan-cust` for this)
|
|
93
|
-
- set a breakpoint
|
|
94
|
-
- type (for instance): `tct-dev d -c`
|
|
1
|
+
# tangelo-configuration-toolkit
|
|
2
|
+
|
|
3
|
+
Tangelo Configuration Toolkit is a command-line toolkit which offers support for developing a Tangelo configuration.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
The toolkit requires [NPM](https://www.npmjs.com/get-npm) on [Node.js®](https://nodejs.org/) (at least version 18.x). An active or maintenance LTS release is recommended. After installing Node.js, you can install the latest version of the Tangelo Configuration Toolkit globally on your system using the following command:
|
|
8
|
+
|
|
9
|
+
npm i -g @tangelo/tangelo-configuration-toolkit
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
Get help for the available commands and see version:
|
|
14
|
+
|
|
15
|
+
tangelo-configuration-toolkit
|
|
16
|
+
|
|
17
|
+
Get help for a specific command, detailing all its arguments:
|
|
18
|
+
|
|
19
|
+
tangelo-configuration-toolkit <command>
|
|
20
|
+
|
|
21
|
+
Use the `tct` shorthand instead of `tangelo-configuration-toolkit`:
|
|
22
|
+
|
|
23
|
+
tct <command>
|
|
24
|
+
|
|
25
|
+
## Config
|
|
26
|
+
|
|
27
|
+
### Global
|
|
28
|
+
|
|
29
|
+
TCT requires a config file to work: `${userhome}/.tct/appconfig.json`\
|
|
30
|
+
Several commands, like the `deploy` command, require server connection information, which can be stored in here.
|
|
31
|
+
|
|
32
|
+
The contents looks like this (all properties are optional):
|
|
33
|
+
|
|
34
|
+
{
|
|
35
|
+
"sharedConfigPath": "absolute/or/relative/path/to/folder/containing/shared/config",
|
|
36
|
+
"servers": [{
|
|
37
|
+
"config": {
|
|
38
|
+
"port": 22,
|
|
39
|
+
"parallel": 4,
|
|
40
|
+
"username": "username",
|
|
41
|
+
"remotedir": "/absolute/path/to/tangelo/config/folder/on/server"
|
|
42
|
+
},
|
|
43
|
+
"domains": ["domain.name.com"],
|
|
44
|
+
"name": "name-for-local-deploy"
|
|
45
|
+
}],
|
|
46
|
+
"serverDefaults": {
|
|
47
|
+
"config": {
|
|
48
|
+
...
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
"defaultServer": "can be set to the name of e.g. your favorite dev server",
|
|
52
|
+
"defaultDatabase": "can be set to the tnsname of e.g. your favorite dev server"
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
When passing a server name, `tct` will look for a full match with a name or a partial match (the start) with a domain.
|
|
56
|
+
|
|
57
|
+
### oXygen
|
|
58
|
+
|
|
59
|
+
The `build -x` commands set projects transformation scenarios and masterfiles in the oXygen project file with the following functionality:
|
|
60
|
+
- Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
61
|
+
- Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
62
|
+
- No masterfiles / scenarios will be added if their path match with oXygens hidden directory patterns
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
## Debug
|
|
66
|
+
|
|
67
|
+
Save this in the root of your local clone as `tct-dev.cmd`:
|
|
68
|
+
|
|
69
|
+
```
|
|
70
|
+
@ECHO off
|
|
71
|
+
SETLOCAL
|
|
72
|
+
CALL :find_dp0
|
|
73
|
+
|
|
74
|
+
IF EXIST "%dp0%\node.exe" (
|
|
75
|
+
SET "_prog=%dp0%\node.exe"
|
|
76
|
+
) ELSE (
|
|
77
|
+
SET "_prog=node"
|
|
78
|
+
SET PATHEXT=%PATHEXT:;.JS;=;%
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
"%_prog%" "%dp0%\index.js" %*
|
|
82
|
+
ENDLOCAL
|
|
83
|
+
EXIT /b %errorlevel%
|
|
84
|
+
:find_dp0
|
|
85
|
+
SET dp0=%~dp0
|
|
86
|
+
EXIT /b
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
Then:
|
|
90
|
+
|
|
91
|
+
- open a 'Javascript Debug Terminal' in VSCode
|
|
92
|
+
- jump to a customer folder ( you can use `tan-cust` for this)
|
|
93
|
+
- set a breakpoint
|
|
94
|
+
- type (for instance): `tct-dev d -c`
|
package/bin/index.js
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
3
|
require('..');
|
package/package.json
CHANGED
|
@@ -1,46 +1,46 @@
|
|
|
1
|
-
const execGitCommand = require('./exec-git-command');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
|
|
4
|
-
module.exports = function getTdiBranch(toBranchName) {
|
|
5
|
-
const tdiPath = path.join(_paths.repo, _paths.tdi);
|
|
6
|
-
const tdiBranch = {};
|
|
7
|
-
|
|
8
|
-
// Fetch all
|
|
9
|
-
_info(`Fetch TDI submodule`);
|
|
10
|
-
const cmdFetch = execGitCommand('fetch -pf --all', tdiPath);
|
|
11
|
-
if (cmdFetch.error) _warn(`Fetch failed\n${cmdFetch.error}`);
|
|
12
|
-
|
|
13
|
-
let toBranch;
|
|
14
|
-
if (toBranchName) {
|
|
15
|
-
// Check if specified branch exists; we will update to this branch
|
|
16
|
-
toBranch = String(toBranchName).replace(/(?:release\/)?(\d+(?:\.[\dxt]+)*)/, `release/$1`);
|
|
17
|
-
const branchExists = execGitCommand(`branch --remote`, tdiPath).match(`origin/${toBranch}`);
|
|
18
|
-
if (!branchExists) _error(`TDI branch "${toBranch}" does not exist. Note that TCT can only update to a release branch.`);
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
// Get remote release branches containing TDI HEAD commit
|
|
22
|
-
const releaseBranches = execGitCommand(`branch --all --contains ${_git.commitTdi.local().hash}`, tdiPath).match(/remotes\/origin\/release\/[^\s]+/gsm);
|
|
23
|
-
if (!releaseBranches || releaseBranches.error) _error(`Could not retrieve TDI release branches`);
|
|
24
|
-
|
|
25
|
-
// Get the first possible branch; prefer release/5.1 over release/5.2:
|
|
26
|
-
releaseBranches.sort((a, b) => a > b ? 1 : -1);
|
|
27
|
-
|
|
28
|
-
// Set branch name of firstBranch without 'remotes/origin/'
|
|
29
|
-
tdiBranch.name = releaseBranches[0].replace(/remotes\/origin\//g, '');
|
|
30
|
-
|
|
31
|
-
// In case of branch switch set from.name to the old branch and name to the new branch
|
|
32
|
-
if (toBranch) {
|
|
33
|
-
if (tdiBranch.name > toBranch) _error(`You cannot downgrade to a lower release branch with TCT.`);
|
|
34
|
-
tdiBranch.from = {name: tdiBranch.name};
|
|
35
|
-
tdiBranch.name = toBranch;
|
|
36
|
-
|
|
37
|
-
const branchHash = execGitCommand(`rev-parse origin/${toBranch}`, tdiPath);
|
|
38
|
-
const commonAncestorHash = execGitCommand(`merge-base ${_git.commitTdi.local().hash} ${branchHash}`, tdiPath);
|
|
39
|
-
const commonAncestorDate = execGitCommand(`show ${commonAncestorHash} --no-patch --format=%cd --date=iso-strict `, tdiPath, ['date']).date;
|
|
40
|
-
tdiBranch.commonAncestor = {date: new Date(commonAncestorDate)};
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
// Get number of commits behind
|
|
44
|
-
tdiBranch.commitsBehind = execGitCommand(`rev-list HEAD...origin/${tdiBranch.name} --count`, tdiPath);
|
|
45
|
-
return tdiBranch;
|
|
1
|
+
const execGitCommand = require('./exec-git-command');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
module.exports = function getTdiBranch(toBranchName) {
|
|
5
|
+
const tdiPath = path.join(_paths.repo, _paths.tdi);
|
|
6
|
+
const tdiBranch = {};
|
|
7
|
+
|
|
8
|
+
// Fetch all
|
|
9
|
+
_info(`Fetch TDI submodule`);
|
|
10
|
+
const cmdFetch = execGitCommand('fetch -pf --all', tdiPath);
|
|
11
|
+
if (cmdFetch.error) _warn(`Fetch failed\n${cmdFetch.error}`);
|
|
12
|
+
|
|
13
|
+
let toBranch;
|
|
14
|
+
if (toBranchName) {
|
|
15
|
+
// Check if specified branch exists; we will update to this branch
|
|
16
|
+
toBranch = String(toBranchName).replace(/(?:release\/)?(\d+(?:\.[\dxt]+)*)/, `release/$1`);
|
|
17
|
+
const branchExists = execGitCommand(`branch --remote`, tdiPath).match(`origin/${toBranch}`);
|
|
18
|
+
if (!branchExists) _error(`TDI branch "${toBranch}" does not exist. Note that TCT can only update to a release branch.`);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Get remote release branches containing TDI HEAD commit
|
|
22
|
+
const releaseBranches = execGitCommand(`branch --all --contains ${_git.commitTdi.local().hash}`, tdiPath).match(/remotes\/origin\/release\/[^\s]+/gsm);
|
|
23
|
+
if (!releaseBranches || releaseBranches.error) _error(`Could not retrieve TDI release branches`);
|
|
24
|
+
|
|
25
|
+
// Get the first possible branch; prefer release/5.1 over release/5.2:
|
|
26
|
+
releaseBranches.sort((a, b) => a > b ? 1 : -1);
|
|
27
|
+
|
|
28
|
+
// Set branch name of firstBranch without 'remotes/origin/'
|
|
29
|
+
tdiBranch.name = releaseBranches[0].replace(/remotes\/origin\//g, '');
|
|
30
|
+
|
|
31
|
+
// In case of branch switch set from.name to the old branch and name to the new branch
|
|
32
|
+
if (toBranch) {
|
|
33
|
+
if (tdiBranch.name > toBranch) _error(`You cannot downgrade to a lower release branch with TCT.`);
|
|
34
|
+
tdiBranch.from = {name: tdiBranch.name};
|
|
35
|
+
tdiBranch.name = toBranch;
|
|
36
|
+
|
|
37
|
+
const branchHash = execGitCommand(`rev-parse origin/${toBranch}`, tdiPath);
|
|
38
|
+
const commonAncestorHash = execGitCommand(`merge-base ${_git.commitTdi.local().hash} ${branchHash}`, tdiPath);
|
|
39
|
+
const commonAncestorDate = execGitCommand(`show ${commonAncestorHash} --no-patch --format=%cd --date=iso-strict `, tdiPath, ['date']).date;
|
|
40
|
+
tdiBranch.commonAncestor = {date: new Date(commonAncestorDate)};
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Get number of commits behind
|
|
44
|
+
tdiBranch.commitsBehind = execGitCommand(`rev-list HEAD...origin/${tdiBranch.name} --count`, tdiPath);
|
|
45
|
+
return tdiBranch;
|
|
46
46
|
};
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
// based on gulp-batch-replace
|
|
2
|
-
|
|
3
|
-
const es = require('event-stream'), minimatch = require('minimatch'), istextorbinary = require('istextorbinary');
|
|
4
|
-
|
|
5
|
-
const execReplace = (c, s, r) => Buffer.from(s instanceof RegExp ? String(c).replace(s, r) : String(c).split(s).join(r));
|
|
6
|
-
|
|
7
|
-
module.exports = arr => {
|
|
8
|
-
return es.map((file, callback) => {
|
|
9
|
-
if(file.contents instanceof Buffer) {
|
|
10
|
-
for (const e of arr) {
|
|
11
|
-
// exec if no glob is passed or if glob matches, and it's a text file
|
|
12
|
-
if ((!e[2] || minimatch(file.path, e[2])) && istextorbinary.isText(file.path, file)) {
|
|
13
|
-
file.contents = execReplace(file.contents, e[0], e[1]);
|
|
14
|
-
}
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
callback(null, file);
|
|
18
|
-
});
|
|
19
|
-
};
|
|
1
|
+
// based on gulp-batch-replace
|
|
2
|
+
|
|
3
|
+
const es = require('event-stream'), minimatch = require('minimatch'), istextorbinary = require('istextorbinary');
|
|
4
|
+
|
|
5
|
+
const execReplace = (c, s, r) => Buffer.from(s instanceof RegExp ? String(c).replace(s, r) : String(c).split(s).join(r));
|
|
6
|
+
|
|
7
|
+
module.exports = arr => {
|
|
8
|
+
return es.map((file, callback) => {
|
|
9
|
+
if(file.contents instanceof Buffer) {
|
|
10
|
+
for (const e of arr) {
|
|
11
|
+
// exec if no glob is passed or if glob matches, and it's a text file
|
|
12
|
+
if ((!e[2] || minimatch(file.path, e[2])) && istextorbinary.isText(file.path, file)) {
|
|
13
|
+
file.contents = execReplace(file.contents, e[0], e[1]);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
callback(null, file);
|
|
18
|
+
});
|
|
19
|
+
};
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
// replacement for the official (but deprecated) gulp-simple-rename
|
|
2
|
-
// kept same name because of usage in tdi
|
|
3
|
-
|
|
4
|
-
const through2 = require('through2');
|
|
5
|
-
|
|
6
|
-
module.exports = (fn) => {
|
|
7
|
-
return through2.obj((file, enc, cb) => {
|
|
8
|
-
file.path = fn(file.path);
|
|
9
|
-
cb(null, file);
|
|
10
|
-
});
|
|
11
|
-
};
|
|
1
|
+
// replacement for the official (but deprecated) gulp-simple-rename
|
|
2
|
+
// kept same name because of usage in tdi
|
|
3
|
+
|
|
4
|
+
const through2 = require('through2');
|
|
5
|
+
|
|
6
|
+
module.exports = (fn) => {
|
|
7
|
+
return through2.obj((file, enc, cb) => {
|
|
8
|
+
file.path = fn(file.path);
|
|
9
|
+
cb(null, file);
|
|
10
|
+
});
|
|
11
|
+
};
|
|
@@ -29,8 +29,8 @@ module.exports = function() {
|
|
|
29
29
|
const tclConfig = new TclConfig(tclFileDir, createAddToStreamFn(this));
|
|
30
30
|
tclConfig.outputJson(path.resolve(tclFileDir, 'fonto/assets'));
|
|
31
31
|
tclConfig.outputCss(path.resolve(tclFileDir, 'fonto'));
|
|
32
|
-
const
|
|
33
|
-
if (fs.existsSync(
|
|
32
|
+
const xbrlCssDir = path.resolve(tclFileDir, 'export/xbrl/xbrl_out/css');
|
|
33
|
+
if (fs.existsSync(xbrlCssDir)) tclConfig.outputCss(xbrlCssDir);
|
|
34
34
|
}
|
|
35
35
|
cb();
|
|
36
36
|
}
|
package/src/lib/tcl-config.js
CHANGED
|
@@ -91,7 +91,7 @@ module.exports = class TclConfig {
|
|
|
91
91
|
outputCss(dir) {
|
|
92
92
|
if (this.#tclFileExists) {
|
|
93
93
|
const filepath = path.join(dir, TclConfig.#TCL_CSS_FILE_NAME);
|
|
94
|
-
const moduleName = dir.includes('
|
|
94
|
+
const moduleName = dir.includes('xbrl') ? 'XBRL' : 'FONTO';
|
|
95
95
|
this.#outputFn(filepath, this.#getCss(moduleName));
|
|
96
96
|
this.modifyFontoHtml(dir);
|
|
97
97
|
}
|
|
@@ -1,178 +1,178 @@
|
|
|
1
|
-
const fs = require('fs-extra');
|
|
2
|
-
const globby = require('globby');
|
|
3
|
-
const path = require('path');
|
|
4
|
-
const SaxonJS = require('saxon-js');
|
|
5
|
-
|
|
6
|
-
const spConfigPath = 'build/oxygen/stylesheetPaths.json';
|
|
7
|
-
const sefFilePath = 'build/oxygen/createProjectFile.sef.json';
|
|
8
|
-
const cmscustomPath = 'config/cmscustom/';
|
|
9
|
-
const siteStylesheetsPath = 'config/txp/site-stylesheets/';
|
|
10
|
-
|
|
11
|
-
const masterFiles = new Set;
|
|
12
|
-
const transformationScenarios = [];
|
|
13
|
-
|
|
14
|
-
let spConfig, sefFile;
|
|
15
|
-
|
|
16
|
-
const convertToValidFilename = string => string.replace(/[/|\\:*?"<>]/g, ' ');
|
|
17
|
-
|
|
18
|
-
const createProjectFile = (config, newXprFile) => {
|
|
19
|
-
_info('Initializing xpr file(s):');
|
|
20
|
-
const xprFiles = [...globby.sync(`*.xpr`)];
|
|
21
|
-
// Add newXprFile at the start of xprFiles if it does not already exists:
|
|
22
|
-
if (newXprFile && xprFiles.indexOf(newXprFile)===-1) xprFiles.unshift(newXprFile);
|
|
23
|
-
|
|
24
|
-
// Copy xpr file from TDI if it does not exists yet;
|
|
25
|
-
if (xprFiles[0] && !newXprFile) _write(`Found: ${xprFiles.join(', ')}`);
|
|
26
|
-
else {
|
|
27
|
-
if (!newXprFile) {
|
|
28
|
-
// Set xpr filename to customer name (assumes correct upsert scripts structure)
|
|
29
|
-
const customers = new Set;
|
|
30
|
-
_repoconfig.forEach(p => customers.add(p.customer_name));
|
|
31
|
-
xprFiles.push(convertToValidFilename([...customers].join(' - ')) + '.xpr');
|
|
32
|
-
}
|
|
33
|
-
// Copy new xpr file
|
|
34
|
-
fs.copySync(path.join(_paths.repo, 'tangelo-default-implementation/src/[customer].xpr'), path.join(_paths.repo, xprFiles[0]));
|
|
35
|
-
_write(`Created: '${xprFiles[0]}'`);
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
// Search for transformationScenarios/masterfiles based on TDI submodule oxygenProjectFile config
|
|
39
|
-
_info('\nSearching for transformationScenarios/masterfiles');
|
|
40
|
-
config.oxygenProjectFile.forEach(
|
|
41
|
-
pf => {
|
|
42
|
-
// Collect transformation scenarios and add them to the transformationScenarios array; add individual stylesheets to the masterFiles set.
|
|
43
|
-
if (pf.transformation) getTransformations(pf.transformation);
|
|
44
|
-
// Add files to the masterfiles set.
|
|
45
|
-
else if (pf.masterfile) getMasterfiles(pf.masterfile);
|
|
46
|
-
}
|
|
47
|
-
);
|
|
48
|
-
|
|
49
|
-
// update all .xpr files with collected transformation scenarios and masterfiles.
|
|
50
|
-
transformXprFile(xprFiles);
|
|
51
|
-
};
|
|
52
|
-
|
|
53
|
-
const getTransformations = config => {
|
|
54
|
-
_repoconfig.forEach(rc => {
|
|
55
|
-
// get pathname of customer/project
|
|
56
|
-
const [customerPath, projectPath] = config.location === 'database' ? rc.path_dbconfig : rc.path_cmscustom;
|
|
57
|
-
|
|
58
|
-
// set pathname of customer/project in location glob-expression
|
|
59
|
-
const location = path.join(
|
|
60
|
-
_paths.repo,
|
|
61
|
-
config.files.replace(/\[customer\]/, customerPath).replace(/\[project\]/, projectPath)
|
|
62
|
-
);
|
|
63
|
-
|
|
64
|
-
globby
|
|
65
|
-
.sync(`${location}`)
|
|
66
|
-
.forEach(f => {
|
|
67
|
-
// extract baseStrings from file
|
|
68
|
-
const fileString = fs.readFileSync(f).toString();
|
|
69
|
-
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
70
|
-
|
|
71
|
-
if (fileString.replace(/\s|^prompt\s.*$/gm, '') !== '') {
|
|
72
|
-
if (baseStrings) {
|
|
73
|
-
baseStrings.forEach(s => {
|
|
74
|
-
// extract type, name, files info from baseString
|
|
75
|
-
const type = config.extracts.type ? s.match(RegExp(config.extracts.type))[1] : config.values.type;
|
|
76
|
-
const name = config.extracts.name ? s.match(RegExp(config.extracts.name))[1] : config.values.name;
|
|
77
|
-
const files = s.match(RegExp(config.extracts.files))[1];
|
|
78
|
-
|
|
79
|
-
// Add transformation scenario to the transformationScenario array
|
|
80
|
-
transformationScenarios.push({
|
|
81
|
-
name: `${type}: ${name} (${rc.customer_name}, ${rc.project_name})`, // note that in createProjectFile.xsl a regex is added that matches scenarios based on this name. This to preserve manually added scenarios.
|
|
82
|
-
transformationScenario: files,
|
|
83
|
-
location: config.location === 'txp' ? siteStylesheetsPath : cmscustomPath
|
|
84
|
-
});
|
|
85
|
-
|
|
86
|
-
// Add each non-tdi stylesheet in transformation scenario to the masterFiles set
|
|
87
|
-
files.split(',').forEach(f => {
|
|
88
|
-
const filePath = `${config.location === 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
89
|
-
if (!f.startsWith('tdi')) masterFiles.add(filePath);
|
|
90
|
-
});
|
|
91
|
-
});
|
|
92
|
-
} else {
|
|
93
|
-
_write(`No transformation scenarios found in ${f} for '${config.extracts.base}'`);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
});
|
|
97
|
-
});
|
|
98
|
-
|
|
99
|
-
};
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
const getMasterfiles = config => {
|
|
103
|
-
globby
|
|
104
|
-
.sync(`${path.join(_paths.repo, config.files)}`)
|
|
105
|
-
.forEach(cf => {
|
|
106
|
-
// Check if masterfile should be extracted from file
|
|
107
|
-
const fileString = fs.readFileSync(cf).toString();
|
|
108
|
-
if (fileString.replace(/\s|^prompt\s.*$/gm, '')!=='') {
|
|
109
|
-
if (config.extracts) {
|
|
110
|
-
// extract baseStrings from file
|
|
111
|
-
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
112
|
-
|
|
113
|
-
if (baseStrings) {
|
|
114
|
-
baseStrings.forEach(s => {
|
|
115
|
-
// extract (comma-separated) list of masterfiles
|
|
116
|
-
const filesString = s.match(RegExp(config.extracts.files))[1];
|
|
117
|
-
if (!filesString) _error(`No masterfiles found in '${s}' for '${config.extracts.files}'`);
|
|
118
|
-
|
|
119
|
-
// Add each non-tdi masterfile to the masterFiles set
|
|
120
|
-
filesString.split(',').forEach(f => {
|
|
121
|
-
if (!f.startsWith('tdi')){
|
|
122
|
-
const filePath = `${config.location === 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
123
|
-
masterFiles.add(filePath);
|
|
124
|
-
}
|
|
125
|
-
});
|
|
126
|
-
});
|
|
127
|
-
} else {
|
|
128
|
-
_write(`No masterfiles found in ${cf} for '${config.extracts.base}'`);
|
|
129
|
-
}
|
|
130
|
-
} else { // Add synced file to masterfiles; strip path from c:/... hence it starts with config/cmscustom or config/txp/site-stylesheets
|
|
131
|
-
const filePath = config.location === 'txp' ? `${siteStylesheetsPath}${cf.split(siteStylesheetsPath)[1]}` : `${cmscustomPath}${cf.split(cmscustomPath)[1]}`;
|
|
132
|
-
masterFiles.add(filePath);
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
});
|
|
136
|
-
|
|
137
|
-
};
|
|
138
|
-
|
|
139
|
-
const transformXprFile = xprFiles => {
|
|
140
|
-
_info('\nUpdating xpr file(s):');
|
|
141
|
-
// create with: xslt3 -t -xsl:createProjectFile.xsl -export:createProjectFile.sef.json v -nogo'
|
|
142
|
-
|
|
143
|
-
xprFiles
|
|
144
|
-
.forEach(xprFile => {
|
|
145
|
-
// Transform xpr; add masterfiles and transformationScenarios as parameters of the stylesheet
|
|
146
|
-
_write(`${xprFile}\n`);
|
|
147
|
-
SaxonJS.transform({
|
|
148
|
-
stylesheetText: JSON.stringify(sefFile),
|
|
149
|
-
stylesheetBaseURI: 'createProjectFile.sef.json',
|
|
150
|
-
sourceFileName: path.join(_paths.repo, xprFile),
|
|
151
|
-
destination: 'serialized',
|
|
152
|
-
stylesheetParams: {
|
|
153
|
-
'masterfiles': [...masterFiles],
|
|
154
|
-
'Q{}transformationScenarios': transformationScenarios
|
|
155
|
-
}
|
|
156
|
-
}, 'async')
|
|
157
|
-
.then(output => {
|
|
158
|
-
// Write result of transformation to xpr file
|
|
159
|
-
fs.writeFileSync(path.join(_paths.repo, xprFile), output.principalResult);
|
|
160
|
-
})
|
|
161
|
-
.catch(e => _warn(`Failed to update: ${xprFile}\n ${e}`));
|
|
162
|
-
});
|
|
163
|
-
};
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
module.exports = function oxygen (arg) {
|
|
167
|
-
// Set projects transformation scenarios and masterfiles in oXygen project file
|
|
168
|
-
// - Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
169
|
-
// - Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
170
|
-
|
|
171
|
-
spConfig = _modulesTdi.require(spConfigPath);
|
|
172
|
-
sefFile = _modulesTdi.require(sefFilePath);
|
|
173
|
-
|
|
174
|
-
if (!spConfig || !sefFile) _error(`Cannot find required files in TDI submodule. Try updating TDI submodule.`);
|
|
175
|
-
|
|
176
|
-
const newXprFile = (typeof arg === 'string') ? convertToValidFilename(arg) + '.xpr' : null;
|
|
177
|
-
createProjectFile(spConfig, newXprFile);
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const globby = require('globby');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const SaxonJS = require('saxon-js');
|
|
5
|
+
|
|
6
|
+
const spConfigPath = 'build/oxygen/stylesheetPaths.json';
|
|
7
|
+
const sefFilePath = 'build/oxygen/createProjectFile.sef.json';
|
|
8
|
+
const cmscustomPath = 'config/cmscustom/';
|
|
9
|
+
const siteStylesheetsPath = 'config/txp/site-stylesheets/';
|
|
10
|
+
|
|
11
|
+
const masterFiles = new Set;
|
|
12
|
+
const transformationScenarios = [];
|
|
13
|
+
|
|
14
|
+
let spConfig, sefFile;
|
|
15
|
+
|
|
16
|
+
const convertToValidFilename = string => string.replace(/[/|\\:*?"<>]/g, ' ');
|
|
17
|
+
|
|
18
|
+
const createProjectFile = (config, newXprFile) => {
|
|
19
|
+
_info('Initializing xpr file(s):');
|
|
20
|
+
const xprFiles = [...globby.sync(`*.xpr`)];
|
|
21
|
+
// Add newXprFile at the start of xprFiles if it does not already exists:
|
|
22
|
+
if (newXprFile && xprFiles.indexOf(newXprFile)===-1) xprFiles.unshift(newXprFile);
|
|
23
|
+
|
|
24
|
+
// Copy xpr file from TDI if it does not exists yet;
|
|
25
|
+
if (xprFiles[0] && !newXprFile) _write(`Found: ${xprFiles.join(', ')}`);
|
|
26
|
+
else {
|
|
27
|
+
if (!newXprFile) {
|
|
28
|
+
// Set xpr filename to customer name (assumes correct upsert scripts structure)
|
|
29
|
+
const customers = new Set;
|
|
30
|
+
_repoconfig.forEach(p => customers.add(p.customer_name));
|
|
31
|
+
xprFiles.push(convertToValidFilename([...customers].join(' - ')) + '.xpr');
|
|
32
|
+
}
|
|
33
|
+
// Copy new xpr file
|
|
34
|
+
fs.copySync(path.join(_paths.repo, 'tangelo-default-implementation/src/[customer].xpr'), path.join(_paths.repo, xprFiles[0]));
|
|
35
|
+
_write(`Created: '${xprFiles[0]}'`);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// Search for transformationScenarios/masterfiles based on TDI submodule oxygenProjectFile config
|
|
39
|
+
_info('\nSearching for transformationScenarios/masterfiles');
|
|
40
|
+
config.oxygenProjectFile.forEach(
|
|
41
|
+
pf => {
|
|
42
|
+
// Collect transformation scenarios and add them to the transformationScenarios array; add individual stylesheets to the masterFiles set.
|
|
43
|
+
if (pf.transformation) getTransformations(pf.transformation);
|
|
44
|
+
// Add files to the masterfiles set.
|
|
45
|
+
else if (pf.masterfile) getMasterfiles(pf.masterfile);
|
|
46
|
+
}
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
// update all .xpr files with collected transformation scenarios and masterfiles.
|
|
50
|
+
transformXprFile(xprFiles);
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
const getTransformations = config => {
|
|
54
|
+
_repoconfig.forEach(rc => {
|
|
55
|
+
// get pathname of customer/project
|
|
56
|
+
const [customerPath, projectPath] = config.location === 'database' ? rc.path_dbconfig : rc.path_cmscustom;
|
|
57
|
+
|
|
58
|
+
// set pathname of customer/project in location glob-expression
|
|
59
|
+
const location = path.join(
|
|
60
|
+
_paths.repo,
|
|
61
|
+
config.files.replace(/\[customer\]/, customerPath).replace(/\[project\]/, projectPath)
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
globby
|
|
65
|
+
.sync(`${location}`)
|
|
66
|
+
.forEach(f => {
|
|
67
|
+
// extract baseStrings from file
|
|
68
|
+
const fileString = fs.readFileSync(f).toString();
|
|
69
|
+
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
70
|
+
|
|
71
|
+
if (fileString.replace(/\s|^prompt\s.*$/gm, '') !== '') {
|
|
72
|
+
if (baseStrings) {
|
|
73
|
+
baseStrings.forEach(s => {
|
|
74
|
+
// extract type, name, files info from baseString
|
|
75
|
+
const type = config.extracts.type ? s.match(RegExp(config.extracts.type))[1] : config.values.type;
|
|
76
|
+
const name = config.extracts.name ? s.match(RegExp(config.extracts.name))[1] : config.values.name;
|
|
77
|
+
const files = s.match(RegExp(config.extracts.files))[1];
|
|
78
|
+
|
|
79
|
+
// Add transformation scenario to the transformationScenario array
|
|
80
|
+
transformationScenarios.push({
|
|
81
|
+
name: `${type}: ${name} (${rc.customer_name}, ${rc.project_name})`, // note that in createProjectFile.xsl a regex is added that matches scenarios based on this name. This to preserve manually added scenarios.
|
|
82
|
+
transformationScenario: files,
|
|
83
|
+
location: config.location === 'txp' ? siteStylesheetsPath : cmscustomPath
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
// Add each non-tdi stylesheet in transformation scenario to the masterFiles set
|
|
87
|
+
files.split(',').forEach(f => {
|
|
88
|
+
const filePath = `${config.location === 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
89
|
+
if (!f.startsWith('tdi')) masterFiles.add(filePath);
|
|
90
|
+
});
|
|
91
|
+
});
|
|
92
|
+
} else {
|
|
93
|
+
_write(`No transformation scenarios found in ${f} for '${config.extracts.base}'`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
});
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
const getMasterfiles = config => {
|
|
103
|
+
globby
|
|
104
|
+
.sync(`${path.join(_paths.repo, config.files)}`)
|
|
105
|
+
.forEach(cf => {
|
|
106
|
+
// Check if masterfile should be extracted from file
|
|
107
|
+
const fileString = fs.readFileSync(cf).toString();
|
|
108
|
+
if (fileString.replace(/\s|^prompt\s.*$/gm, '')!=='') {
|
|
109
|
+
if (config.extracts) {
|
|
110
|
+
// extract baseStrings from file
|
|
111
|
+
const baseStrings = fileString.match(RegExp(config.extracts.base, 'gm'));
|
|
112
|
+
|
|
113
|
+
if (baseStrings) {
|
|
114
|
+
baseStrings.forEach(s => {
|
|
115
|
+
// extract (comma-separated) list of masterfiles
|
|
116
|
+
const filesString = s.match(RegExp(config.extracts.files))[1];
|
|
117
|
+
if (!filesString) _error(`No masterfiles found in '${s}' for '${config.extracts.files}'`);
|
|
118
|
+
|
|
119
|
+
// Add each non-tdi masterfile to the masterFiles set
|
|
120
|
+
filesString.split(',').forEach(f => {
|
|
121
|
+
if (!f.startsWith('tdi')){
|
|
122
|
+
const filePath = `${config.location === 'txp' ? siteStylesheetsPath : cmscustomPath}${f}`;
|
|
123
|
+
masterFiles.add(filePath);
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
} else {
|
|
128
|
+
_write(`No masterfiles found in ${cf} for '${config.extracts.base}'`);
|
|
129
|
+
}
|
|
130
|
+
} else { // Add synced file to masterfiles; strip path from c:/... hence it starts with config/cmscustom or config/txp/site-stylesheets
|
|
131
|
+
const filePath = config.location === 'txp' ? `${siteStylesheetsPath}${cf.split(siteStylesheetsPath)[1]}` : `${cmscustomPath}${cf.split(cmscustomPath)[1]}`;
|
|
132
|
+
masterFiles.add(filePath);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
const transformXprFile = xprFiles => {
|
|
140
|
+
_info('\nUpdating xpr file(s):');
|
|
141
|
+
// create with: xslt3 -t -xsl:createProjectFile.xsl -export:createProjectFile.sef.json v -nogo'
|
|
142
|
+
|
|
143
|
+
xprFiles
|
|
144
|
+
.forEach(xprFile => {
|
|
145
|
+
// Transform xpr; add masterfiles and transformationScenarios as parameters of the stylesheet
|
|
146
|
+
_write(`${xprFile}\n`);
|
|
147
|
+
SaxonJS.transform({
|
|
148
|
+
stylesheetText: JSON.stringify(sefFile),
|
|
149
|
+
stylesheetBaseURI: 'createProjectFile.sef.json',
|
|
150
|
+
sourceFileName: path.join(_paths.repo, xprFile),
|
|
151
|
+
destination: 'serialized',
|
|
152
|
+
stylesheetParams: {
|
|
153
|
+
'masterfiles': [...masterFiles],
|
|
154
|
+
'Q{}transformationScenarios': transformationScenarios
|
|
155
|
+
}
|
|
156
|
+
}, 'async')
|
|
157
|
+
.then(output => {
|
|
158
|
+
// Write result of transformation to xpr file
|
|
159
|
+
fs.writeFileSync(path.join(_paths.repo, xprFile), output.principalResult);
|
|
160
|
+
})
|
|
161
|
+
.catch(e => _warn(`Failed to update: ${xprFile}\n ${e}`));
|
|
162
|
+
});
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
module.exports = function oxygen (arg) {
|
|
167
|
+
// Set projects transformation scenarios and masterfiles in oXygen project file
|
|
168
|
+
// - Will try to preserve manually added entries in the transformation scenarios and masterfiles
|
|
169
|
+
// - Will remove non existing masterfiles or masterfiles that start with a '_'
|
|
170
|
+
|
|
171
|
+
spConfig = _modulesTdi.require(spConfigPath);
|
|
172
|
+
sefFile = _modulesTdi.require(sefFilePath);
|
|
173
|
+
|
|
174
|
+
if (!spConfig || !sefFile) _error(`Cannot find required files in TDI submodule. Try updating TDI submodule.`);
|
|
175
|
+
|
|
176
|
+
const newXprFile = (typeof arg === 'string') ? convertToValidFilename(arg) + '.xpr' : null;
|
|
177
|
+
createProjectFile(spConfig, newXprFile);
|
|
178
178
|
};
|
|
@@ -4,6 +4,7 @@ const fs = require('fs-extra');
|
|
|
4
4
|
const globby = require('globby');
|
|
5
5
|
const gulp = require('gulp');
|
|
6
6
|
const g_tcl = require('../../lib/gulp-tcl-config');
|
|
7
|
+
const path = require('path');
|
|
7
8
|
const through2 = require('through2');
|
|
8
9
|
const {spawnSync} = require('child_process');
|
|
9
10
|
|
|
@@ -35,6 +36,8 @@ const createDeliveryPack = () => { // create install scripts if necessary, zip o
|
|
|
35
36
|
});
|
|
36
37
|
|
|
37
38
|
if (dbiSql[0]) { // complete and output sql script
|
|
39
|
+
|
|
40
|
+
const checkVersion = fs.existsSync(path.join(_paths.tdi, 'src/database/tdi/scripts/_check_version.sql')) ? '@@tangelo-default-implementation/src/database/tdi/scripts/_check_version.sql' : '';
|
|
38
41
|
dbiSql.unshift(
|
|
39
42
|
`accept l_tns char prompt 'Database TNS name : '`,
|
|
40
43
|
`accept l_pw_tancms char prompt 'TANCMS password : '`,
|
|
@@ -43,6 +46,7 @@ const createDeliveryPack = () => { // create install scripts if necessary, zip o
|
|
|
43
46
|
'prompt ',
|
|
44
47
|
'connect tancms/&l_pw_tancms@&l_tns',
|
|
45
48
|
'prompt ',
|
|
49
|
+
`${checkVersion}`,
|
|
46
50
|
'set verify off define off'
|
|
47
51
|
);
|
|
48
52
|
dbiSql.push('prompt ', 'pause Press ENTER to exit', 'exit');
|
|
@@ -123,6 +127,7 @@ module.exports = function transfer (paths, {watch, lrServer} = {}) {
|
|
|
123
127
|
.pipe(shF.restore)
|
|
124
128
|
.pipe(g_plumber.stop())
|
|
125
129
|
.pipe(g_replace(c.replaceStrings))
|
|
130
|
+
.pipe(through2.obj(combineTranslations))
|
|
126
131
|
.pipe(through2.obj((file, enc, cb) => {
|
|
127
132
|
file.originalRelativePath = file.relative; // original path needed for sftp.fastPut
|
|
128
133
|
file.path = file.path.replace(/(fonto).(?:dev|dist|packages.sx-shell-.+src)(.+)/, '$1$2'); // change destination path for fonto build files
|
|
@@ -143,4 +148,35 @@ module.exports = function transfer (paths, {watch, lrServer} = {}) {
|
|
|
143
148
|
|
|
144
149
|
if (c.deliveryPack) createDeliveryPack();
|
|
145
150
|
});
|
|
146
|
-
};
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
// Combine custom translations/resource bundle with tdi translations
|
|
154
|
+
async function combineTranslations(file, enc, cb) {
|
|
155
|
+
const isTranslationFile = file.basename.match(/custom-UI-text.*\.properties/);
|
|
156
|
+
|
|
157
|
+
// Ignore non-translation files
|
|
158
|
+
if (!isTranslationFile) return cb(null, file);
|
|
159
|
+
|
|
160
|
+
// Remove tdi translations from pipe
|
|
161
|
+
if (file.path.match(/tdi/)) return cb();
|
|
162
|
+
|
|
163
|
+
// Get language from file name
|
|
164
|
+
const language = file.basename.match(/custom-UI-text(_.+?)?\.properties/)[1] ?? '';
|
|
165
|
+
|
|
166
|
+
// Read matching tdi translation file
|
|
167
|
+
const tdiTranslationPath = `cmscustom/tdi/custom-UI-text/custom-UI-text${language}.properties`;
|
|
168
|
+
let tdiTranslations;
|
|
169
|
+
try {
|
|
170
|
+
tdiTranslations = await fs.readFile(tdiTranslationPath, enc);
|
|
171
|
+
}
|
|
172
|
+
catch {
|
|
173
|
+
// If tdi translation file does not exist (<5.8), leave custom translations unchanged
|
|
174
|
+
return cb(null, file);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Combine file content
|
|
178
|
+
const separator = `# Contents of ${tdiTranslationPath}`;
|
|
179
|
+
const newText = [file.contents, separator, tdiTranslations].join('\n');
|
|
180
|
+
file.contents = Buffer.from(newText);
|
|
181
|
+
return cb(null, file);
|
|
182
|
+
}
|
package/src/modules/git/index.js
CHANGED
|
@@ -3,6 +3,7 @@ const fs = require('fs-extra');
|
|
|
3
3
|
const globby = require('globby');
|
|
4
4
|
const inquirer = require('inquirer');
|
|
5
5
|
const path = require('path');
|
|
6
|
+
const {compare} = require('compare-versions');
|
|
6
7
|
|
|
7
8
|
const execGitCommand = require('../../lib/exec-git-command');
|
|
8
9
|
const getTdiBranch = require('../../lib/get-tdi-branch');
|
|
@@ -202,17 +203,23 @@ function updateSubmodule({dates = [], updateSubmodule: releaseBranchName}) {
|
|
|
202
203
|
const migrations = _modulesTdi.require('git/tdiCommitsRequiringMigration.js');
|
|
203
204
|
const fromDate = dateCustomFrom ? new Date(dateCustomFrom) : branch.commonAncestor?.date ?? dateBeforeUpdate;
|
|
204
205
|
const toDate = dateCustomTo ? new Date(dateCustomTo) : new Date();
|
|
206
|
+
const fromVersion = branch.from?.name?.replace('release/', '');
|
|
207
|
+
const toVersion = branch.name.replace('release/', '');
|
|
205
208
|
|
|
206
209
|
_info(`TDI commits requiring migration between ${_formatDate(fromDate)} and ${_formatDate(toDate)}`);
|
|
207
210
|
// Filter the migrations that should be applied/considered; Also display older releases migrations
|
|
208
211
|
// For a branch upgrade, all the migrations need to be executed from the beginning of the target branch, not just within the given date range
|
|
209
212
|
const migrationsFiltered = migrations.filter(m => m.releases.find(r => {
|
|
210
213
|
const withinDateRange = fromDate < new Date(r.date) && new Date(r.date) < toDate;
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
isTargetBranch &&
|
|
215
|
-
|
|
214
|
+
|
|
215
|
+
if (branchUpgrade) {
|
|
216
|
+
// For branch upgrades, include all migrations for versions newer than the current one, and include new migrations for the current version
|
|
217
|
+
const isTargetBranch = compare(r.release, toVersion, '<=') && compare(r.release, fromVersion, '>');
|
|
218
|
+
return isTargetBranch || (r.release === fromVersion && withinDateRange);
|
|
219
|
+
} else {
|
|
220
|
+
// For non-branch upgrades, only include new migrations for the current branch
|
|
221
|
+
return r.release === toVersion && withinDateRange;
|
|
222
|
+
}
|
|
216
223
|
}));
|
|
217
224
|
|
|
218
225
|
let relevantMigrationCount = 0;
|
|
@@ -1,201 +1,201 @@
|
|
|
1
|
-
const fs = require('fs-extra');
|
|
2
|
-
const globby = require('globby');
|
|
3
|
-
const path = require('path');
|
|
4
|
-
const {Table} = require('console-table-printer');
|
|
5
|
-
|
|
6
|
-
const execGitCommand = require('../../lib/exec-git-command');
|
|
7
|
-
const getTdiBranch = require('../../lib/get-tdi-branch');
|
|
8
|
-
const c = require('../deploy/config');
|
|
9
|
-
const RemoteExec = require('../../lib/remote-exec');
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
const getGitInfo = () => {
|
|
13
|
-
// Version info TDI submodule
|
|
14
|
-
const gitSubmoduleInfo = new Table({
|
|
15
|
-
columns: [
|
|
16
|
-
{name: 'property', title: 'TDI - submodule', alignment: 'left'},
|
|
17
|
-
{name: 'value', alignment: 'left'}
|
|
18
|
-
],
|
|
19
|
-
});
|
|
20
|
-
|
|
21
|
-
// Fetch all
|
|
22
|
-
const cmdFetch = execGitCommand('fetch -pf --all', path.join(_paths.repo, _paths.tdi));
|
|
23
|
-
if (cmdFetch.error) _warn(`Fetch failed\n${cmdFetch.error}`);
|
|
24
|
-
|
|
25
|
-
// Set branch name of firstBranch without 'remotes/origin/'
|
|
26
|
-
const tdiBranch = getTdiBranch();
|
|
27
|
-
|
|
28
|
-
// Get number of commits behind
|
|
29
|
-
tdiBranch.commitsBehind = execGitCommand(`rev-list HEAD...origin/${tdiBranch.name} --count`, path.join(_paths.repo, _paths.tdi));
|
|
30
|
-
|
|
31
|
-
// Create table rows for TDI submodule info
|
|
32
|
-
gitSubmoduleInfo.addRow({
|
|
33
|
-
property: 'Commit date',
|
|
34
|
-
value: _formatDate(_git.commitTdi.local().date)
|
|
35
|
-
});
|
|
36
|
-
if (tdiBranch) {
|
|
37
|
-
gitSubmoduleInfo.addRow({
|
|
38
|
-
property: 'Branch',
|
|
39
|
-
value: tdiBranch.name
|
|
40
|
-
});
|
|
41
|
-
gitSubmoduleInfo.addRow({
|
|
42
|
-
property: 'Commits behind',
|
|
43
|
-
value: tdiBranch.commitsBehind
|
|
44
|
-
});
|
|
45
|
-
} else {
|
|
46
|
-
gitSubmoduleInfo.addRow({
|
|
47
|
-
property: 'Branch could not be determined',
|
|
48
|
-
value: ''
|
|
49
|
-
});
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
// Print TDI submodule info
|
|
53
|
-
gitSubmoduleInfo.printTable();
|
|
54
|
-
};
|
|
55
|
-
|
|
56
|
-
const getFileExtractInfo = (sorting) => {
|
|
57
|
-
// version info miscellaneous
|
|
58
|
-
const projects = new Set;
|
|
59
|
-
const types = new Set;
|
|
60
|
-
const versionInfo = new Table({
|
|
61
|
-
columns: [
|
|
62
|
-
{name: 'path', alignment: 'left'},
|
|
63
|
-
{name: 'type', alignment: 'left'},
|
|
64
|
-
{name: 'version', alignment: 'left'},
|
|
65
|
-
{name: 'sort'}
|
|
66
|
-
],
|
|
67
|
-
disabledColumns: ['sort'],
|
|
68
|
-
sort: (a, b) => a.sort.toLowerCase() > b.sort.toLowerCase() ? 1 : -1
|
|
69
|
-
});
|
|
70
|
-
|
|
71
|
-
const versionInfoConfig = _modulesTdi.require('version/versionInfo.js');
|
|
72
|
-
if (versionInfoConfig) {
|
|
73
|
-
|
|
74
|
-
versionInfoConfig.forEach(v => {
|
|
75
|
-
const location = path.join(_paths.repo, v.glob);
|
|
76
|
-
|
|
77
|
-
globby
|
|
78
|
-
.sync(location)
|
|
79
|
-
.forEach(f => {
|
|
80
|
-
const filePathExtract = f.match(/.*(?<path>(cmscustom|site-stylesheets)\/(?<customer>[^/]*)\/(?<project>[^/]*)\/.*)/);
|
|
81
|
-
const path = filePathExtract.groups.path || '';
|
|
82
|
-
const project = filePathExtract.groups.project || '';
|
|
83
|
-
|
|
84
|
-
const fileContent = fs.readFileSync(f).toString();
|
|
85
|
-
v.extracts.forEach(e => {
|
|
86
|
-
const extract = fileContent.match(e.regex);
|
|
87
|
-
if (extract) {
|
|
88
|
-
projects.add(project); // Store the projects where versioninfo is found
|
|
89
|
-
types.add(e.type); // Store the types for which versioninfo is found
|
|
90
|
-
versionInfo.addRow({ // Create row with version information to output
|
|
91
|
-
path,
|
|
92
|
-
sort: `${sorting=='project' ? project : e.type}_2${sorting=='project' ? e.type : extract.groups.version}`, // Output is sorted on project or type: '_2' ensures it is rendered after the empty row and the row with the project name
|
|
93
|
-
type: e.type,
|
|
94
|
-
version: extract.groups.version
|
|
95
|
-
});
|
|
96
|
-
}
|
|
97
|
-
});
|
|
98
|
-
});
|
|
99
|
-
});
|
|
100
|
-
|
|
101
|
-
if (sorting=='project') {
|
|
102
|
-
// For projects containing version information
|
|
103
|
-
projects.forEach(p => {
|
|
104
|
-
versionInfo.addRow({ // Add empty row after project
|
|
105
|
-
path: '',
|
|
106
|
-
sort: `${p}_3`,
|
|
107
|
-
type: '',
|
|
108
|
-
version: ''
|
|
109
|
-
});
|
|
110
|
-
versionInfo.addRow({ // Add row with project name
|
|
111
|
-
path: `-- ${p}:`,
|
|
112
|
-
sort: `${p}_1`,
|
|
113
|
-
type: '',
|
|
114
|
-
version: ''
|
|
115
|
-
}, {
|
|
116
|
-
color: 'yellow'
|
|
117
|
-
});
|
|
118
|
-
});
|
|
119
|
-
}
|
|
120
|
-
else if (sorting=='type') {
|
|
121
|
-
types.forEach(t => {
|
|
122
|
-
versionInfo.addRow({ // Add empty row after type
|
|
123
|
-
path: '',
|
|
124
|
-
sort: `${t}_3`,
|
|
125
|
-
type: '',
|
|
126
|
-
version: ''
|
|
127
|
-
});
|
|
128
|
-
});
|
|
129
|
-
}
|
|
130
|
-
versionInfo.printTable();
|
|
131
|
-
}
|
|
132
|
-
else {
|
|
133
|
-
_warn('Version info of miscellaneous items cannot be extracted:\nCannot find required files in TDI submodule. Try updating TDI submodule.');
|
|
134
|
-
}
|
|
135
|
-
};
|
|
136
|
-
|
|
137
|
-
const getServerInfo = (server) => {
|
|
138
|
-
// Remote server info
|
|
139
|
-
// common setup
|
|
140
|
-
_write();
|
|
141
|
-
c.setServer(server);
|
|
142
|
-
|
|
143
|
-
if (!c.envDev) {
|
|
144
|
-
_info(`Remote version info for '${c.server.ftpConfig.host}':\n`);
|
|
145
|
-
new RemoteExec(c.server.ftpConfig).add('sudo ~root/scripts/version.sh', 'STDOUT').process();
|
|
146
|
-
}
|
|
147
|
-
else {
|
|
148
|
-
_info('For development environments no server version information is available. Check rancher / database for this information.\nAdd the --server option with a non-dev environment to see version information for that server.');
|
|
149
|
-
}
|
|
150
|
-
};
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
module.exports = function info (argv) {
|
|
154
|
-
|
|
155
|
-
if (argv.doctypes) {
|
|
156
|
-
_info('Document type information for this git repository\n');
|
|
157
|
-
|
|
158
|
-
const doctypesInfo = new Table({
|
|
159
|
-
columns: [
|
|
160
|
-
{name: 'id', alignment: 'right'},
|
|
161
|
-
{name: 'name', alignment: 'left'},
|
|
162
|
-
{name: 'paths', alignment: 'left'}
|
|
163
|
-
],
|
|
164
|
-
});
|
|
165
|
-
|
|
166
|
-
globby
|
|
167
|
-
.sync(_paths.repo + '/database/config/**/txd_document_types.sql')
|
|
168
|
-
.forEach((p, i, a) => {
|
|
169
|
-
fs.readFileSync(p).toString().match(/select([\s\S]+?)from\s+dual/gmi)
|
|
170
|
-
.forEach((dtRow, i, a) => {
|
|
171
|
-
const ntSqlInsert = fs.readFileSync(p.replace('txd_document_types', 'txd_node_types')).toString().match(/select(.*?)from\s+dual/s)[1];
|
|
172
|
-
const id = dtRow.match(/(\d+) id/)?.[1];
|
|
173
|
-
const name = dtRow.match(/'([^']+)' display_name/)?.[1];
|
|
174
|
-
const dbPath = p.match(/(database\/config\/(:?.*)\/)txd_document_types.sql/i)?.[1];
|
|
175
|
-
const xincl = ntSqlInsert.match(/'([^']+)' xsl_prep_inc/)?.[1] ?? dtRow.match(/'([^']+)' xsl_xincludes/)[1];
|
|
176
|
-
const prPath = xincl.replace('prepare_xincludes.xsl', '');
|
|
177
|
-
|
|
178
|
-
doctypesInfo.addRows([
|
|
179
|
-
{id, name, paths: 'config/cmscustom/'+ prPath},
|
|
180
|
-
{paths: dbPath}
|
|
181
|
-
]);
|
|
182
|
-
|
|
183
|
-
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
184
|
-
});
|
|
185
|
-
|
|
186
|
-
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
187
|
-
});
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
doctypesInfo.printTable();
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
if (argv.versions) {
|
|
194
|
-
_info('Version information for this git repository\n');
|
|
195
|
-
|
|
196
|
-
getGitInfo();
|
|
197
|
-
getFileExtractInfo(argv.versions);
|
|
198
|
-
getServerInfo(argv.server);
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
};
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const globby = require('globby');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const {Table} = require('console-table-printer');
|
|
5
|
+
|
|
6
|
+
const execGitCommand = require('../../lib/exec-git-command');
|
|
7
|
+
const getTdiBranch = require('../../lib/get-tdi-branch');
|
|
8
|
+
const c = require('../deploy/config');
|
|
9
|
+
const RemoteExec = require('../../lib/remote-exec');
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
const getGitInfo = () => {
|
|
13
|
+
// Version info TDI submodule
|
|
14
|
+
const gitSubmoduleInfo = new Table({
|
|
15
|
+
columns: [
|
|
16
|
+
{name: 'property', title: 'TDI - submodule', alignment: 'left'},
|
|
17
|
+
{name: 'value', alignment: 'left'}
|
|
18
|
+
],
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
// Fetch all
|
|
22
|
+
const cmdFetch = execGitCommand('fetch -pf --all', path.join(_paths.repo, _paths.tdi));
|
|
23
|
+
if (cmdFetch.error) _warn(`Fetch failed\n${cmdFetch.error}`);
|
|
24
|
+
|
|
25
|
+
// Set branch name of firstBranch without 'remotes/origin/'
|
|
26
|
+
const tdiBranch = getTdiBranch();
|
|
27
|
+
|
|
28
|
+
// Get number of commits behind
|
|
29
|
+
tdiBranch.commitsBehind = execGitCommand(`rev-list HEAD...origin/${tdiBranch.name} --count`, path.join(_paths.repo, _paths.tdi));
|
|
30
|
+
|
|
31
|
+
// Create table rows for TDI submodule info
|
|
32
|
+
gitSubmoduleInfo.addRow({
|
|
33
|
+
property: 'Commit date',
|
|
34
|
+
value: _formatDate(_git.commitTdi.local().date)
|
|
35
|
+
});
|
|
36
|
+
if (tdiBranch) {
|
|
37
|
+
gitSubmoduleInfo.addRow({
|
|
38
|
+
property: 'Branch',
|
|
39
|
+
value: tdiBranch.name
|
|
40
|
+
});
|
|
41
|
+
gitSubmoduleInfo.addRow({
|
|
42
|
+
property: 'Commits behind',
|
|
43
|
+
value: tdiBranch.commitsBehind
|
|
44
|
+
});
|
|
45
|
+
} else {
|
|
46
|
+
gitSubmoduleInfo.addRow({
|
|
47
|
+
property: 'Branch could not be determined',
|
|
48
|
+
value: ''
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Print TDI submodule info
|
|
53
|
+
gitSubmoduleInfo.printTable();
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
const getFileExtractInfo = (sorting) => {
|
|
57
|
+
// version info miscellaneous
|
|
58
|
+
const projects = new Set;
|
|
59
|
+
const types = new Set;
|
|
60
|
+
const versionInfo = new Table({
|
|
61
|
+
columns: [
|
|
62
|
+
{name: 'path', alignment: 'left'},
|
|
63
|
+
{name: 'type', alignment: 'left'},
|
|
64
|
+
{name: 'version', alignment: 'left'},
|
|
65
|
+
{name: 'sort'}
|
|
66
|
+
],
|
|
67
|
+
disabledColumns: ['sort'],
|
|
68
|
+
sort: (a, b) => a.sort.toLowerCase() > b.sort.toLowerCase() ? 1 : -1
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
const versionInfoConfig = _modulesTdi.require('version/versionInfo.js');
|
|
72
|
+
if (versionInfoConfig) {
|
|
73
|
+
|
|
74
|
+
versionInfoConfig.forEach(v => {
|
|
75
|
+
const location = path.join(_paths.repo, v.glob);
|
|
76
|
+
|
|
77
|
+
globby
|
|
78
|
+
.sync(location)
|
|
79
|
+
.forEach(f => {
|
|
80
|
+
const filePathExtract = f.match(/.*(?<path>(cmscustom|site-stylesheets)\/(?<customer>[^/]*)\/(?<project>[^/]*)\/.*)/);
|
|
81
|
+
const path = filePathExtract.groups.path || '';
|
|
82
|
+
const project = filePathExtract.groups.project || '';
|
|
83
|
+
|
|
84
|
+
const fileContent = fs.readFileSync(f).toString();
|
|
85
|
+
v.extracts.forEach(e => {
|
|
86
|
+
const extract = fileContent.match(e.regex);
|
|
87
|
+
if (extract) {
|
|
88
|
+
projects.add(project); // Store the projects where versioninfo is found
|
|
89
|
+
types.add(e.type); // Store the types for which versioninfo is found
|
|
90
|
+
versionInfo.addRow({ // Create row with version information to output
|
|
91
|
+
path,
|
|
92
|
+
sort: `${sorting=='project' ? project : e.type}_2${sorting=='project' ? e.type : extract.groups.version}`, // Output is sorted on project or type: '_2' ensures it is rendered after the empty row and the row with the project name
|
|
93
|
+
type: e.type,
|
|
94
|
+
version: extract.groups.version
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
if (sorting=='project') {
|
|
102
|
+
// For projects containing version information
|
|
103
|
+
projects.forEach(p => {
|
|
104
|
+
versionInfo.addRow({ // Add empty row after project
|
|
105
|
+
path: '',
|
|
106
|
+
sort: `${p}_3`,
|
|
107
|
+
type: '',
|
|
108
|
+
version: ''
|
|
109
|
+
});
|
|
110
|
+
versionInfo.addRow({ // Add row with project name
|
|
111
|
+
path: `-- ${p}:`,
|
|
112
|
+
sort: `${p}_1`,
|
|
113
|
+
type: '',
|
|
114
|
+
version: ''
|
|
115
|
+
}, {
|
|
116
|
+
color: 'yellow'
|
|
117
|
+
});
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
else if (sorting=='type') {
|
|
121
|
+
types.forEach(t => {
|
|
122
|
+
versionInfo.addRow({ // Add empty row after type
|
|
123
|
+
path: '',
|
|
124
|
+
sort: `${t}_3`,
|
|
125
|
+
type: '',
|
|
126
|
+
version: ''
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
versionInfo.printTable();
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
_warn('Version info of miscellaneous items cannot be extracted:\nCannot find required files in TDI submodule. Try updating TDI submodule.');
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
|
|
137
|
+
const getServerInfo = (server) => {
|
|
138
|
+
// Remote server info
|
|
139
|
+
// common setup
|
|
140
|
+
_write();
|
|
141
|
+
c.setServer(server);
|
|
142
|
+
|
|
143
|
+
if (!c.envDev) {
|
|
144
|
+
_info(`Remote version info for '${c.server.ftpConfig.host}':\n`);
|
|
145
|
+
new RemoteExec(c.server.ftpConfig).add('sudo ~root/scripts/version.sh', 'STDOUT').process();
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
_info('For development environments no server version information is available. Check rancher / database for this information.\nAdd the --server option with a non-dev environment to see version information for that server.');
|
|
149
|
+
}
|
|
150
|
+
};
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
module.exports = function info (argv) {
|
|
154
|
+
|
|
155
|
+
if (argv.doctypes) {
|
|
156
|
+
_info('Document type information for this git repository\n');
|
|
157
|
+
|
|
158
|
+
const doctypesInfo = new Table({
|
|
159
|
+
columns: [
|
|
160
|
+
{name: 'id', alignment: 'right'},
|
|
161
|
+
{name: 'name', alignment: 'left'},
|
|
162
|
+
{name: 'paths', alignment: 'left'}
|
|
163
|
+
],
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
globby
|
|
167
|
+
.sync(_paths.repo + '/database/config/**/txd_document_types.sql')
|
|
168
|
+
.forEach((p, i, a) => {
|
|
169
|
+
fs.readFileSync(p).toString().match(/select([\s\S]+?)from\s+dual/gmi)
|
|
170
|
+
.forEach((dtRow, i, a) => {
|
|
171
|
+
const ntSqlInsert = fs.readFileSync(p.replace('txd_document_types', 'txd_node_types')).toString().match(/select(.*?)from\s+dual/s)[1];
|
|
172
|
+
const id = dtRow.match(/(\d+) id/)?.[1];
|
|
173
|
+
const name = dtRow.match(/'([^']+)' display_name/)?.[1];
|
|
174
|
+
const dbPath = p.match(/(database\/config\/(:?.*)\/)txd_document_types.sql/i)?.[1];
|
|
175
|
+
const xincl = ntSqlInsert.match(/'([^']+)' xsl_prep_inc/)?.[1] ?? dtRow.match(/'([^']+)' xsl_xincludes/)[1];
|
|
176
|
+
const prPath = xincl.replace('prepare_xincludes.xsl', '');
|
|
177
|
+
|
|
178
|
+
doctypesInfo.addRows([
|
|
179
|
+
{id, name, paths: 'config/cmscustom/'+ prPath},
|
|
180
|
+
{paths: dbPath}
|
|
181
|
+
]);
|
|
182
|
+
|
|
183
|
+
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
if (i!==a.length-1) doctypesInfo.addRow({});
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
doctypesInfo.printTable();
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
if (argv.versions) {
|
|
194
|
+
_info('Version information for this git repository\n');
|
|
195
|
+
|
|
196
|
+
getGitInfo();
|
|
197
|
+
getFileExtractInfo(argv.versions);
|
|
198
|
+
getServerInfo(argv.server);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
};
|
|
@@ -7,7 +7,7 @@ const rif = require('replace-in-file');
|
|
|
7
7
|
|
|
8
8
|
class PathSearcher {
|
|
9
9
|
#filter;
|
|
10
|
-
#ignorePaths = [_paths.tdi + '/**', 'config/cmscustom/tdi/**'];
|
|
10
|
+
#ignorePaths = [_paths.tdi + '/**', 'config/cmscustom/tdi/**', '**/node_modules/**', '**/fonto/packages_shared/**', '**/fonto/platform/**'];
|
|
11
11
|
constructor (filter, ignoreProjectPaths) {
|
|
12
12
|
this.#filter = filter;
|
|
13
13
|
this.#ignorePaths.push(...ignoreProjectPaths);
|
package/src/modules/sql/index.js
CHANGED
|
@@ -2,7 +2,7 @@ const {execSync} = require('child_process');
|
|
|
2
2
|
const fs = require('fs-extra');
|
|
3
3
|
const globby = require('globby');
|
|
4
4
|
const inquirer = require('inquirer');
|
|
5
|
-
|
|
5
|
+
const path = require('path');
|
|
6
6
|
|
|
7
7
|
const runSqlScript = (path, db, un, pw) => {
|
|
8
8
|
const connect = db ? `${un}/${pw}@${db}` : '/nolog';
|
|
@@ -72,6 +72,13 @@ module.exports = function sql (argv) {
|
|
|
72
72
|
const script = [`define l_env = ${a.pw=='tancms' ? 'dev' : 'prd'}`, 'set verify off define off'];
|
|
73
73
|
|
|
74
74
|
script.push(`spool install-config.log`);
|
|
75
|
+
|
|
76
|
+
if (fs.existsSync(path.join(_paths.tdi, 'src/database/tdi/scripts/_check_version.sql'))) {
|
|
77
|
+
script.push(`whenever sqlerror exit sql.sqlcode`);
|
|
78
|
+
script.push(`@@../../tangelo-default-implementation/src/database/tdi/scripts/_check_version.sql`);
|
|
79
|
+
script.push(`whenever sqlerror continue`);
|
|
80
|
+
}
|
|
81
|
+
|
|
75
82
|
a.projects.forEach(p => {
|
|
76
83
|
const dir = p.path_dbconfig ? p.path_dbconfig.join('/') : (_repoconfig.customer.dirname+'/'+p.dirname);
|
|
77
84
|
script.push('prompt ', `prompt Loading configuration for: ${p.name}`);
|