@uxf/scripts 11.74.5 → 11.77.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
package/src/GitLab.js
CHANGED
|
@@ -80,11 +80,9 @@ async function findMigrationFiles(commits) {
|
|
|
80
80
|
return migrationList;
|
|
81
81
|
}
|
|
82
82
|
|
|
83
|
-
async function getLastTag() {
|
|
83
|
+
async function getLastTag(tagPrefix = "release-") {
|
|
84
84
|
const response = await axios.get(`/projects/${env.CI_PROJECT_ID}/repository/tags`, {
|
|
85
|
-
params: {
|
|
86
|
-
search: "^release-",
|
|
87
|
-
},
|
|
85
|
+
params: { search: `^${tagPrefix}` },
|
|
88
86
|
});
|
|
89
87
|
|
|
90
88
|
const tags = response.data;
|
|
@@ -98,8 +96,8 @@ async function getLastTag() {
|
|
|
98
96
|
return tags[0];
|
|
99
97
|
}
|
|
100
98
|
|
|
101
|
-
async function createRelease(description, dryRun = false) {
|
|
102
|
-
const tag =
|
|
99
|
+
async function createRelease(description, tagPrefix = "release-", dryRun = false) {
|
|
100
|
+
const tag = tagPrefix + dayjs().format("YYYY-MM-DD-HH-mm");
|
|
103
101
|
|
|
104
102
|
if (dryRun) {
|
|
105
103
|
console.log(`\n🎉🎉🎉 Release "${tag}" published (skipped in dry run)\n\n${description}`);
|
|
@@ -11,19 +11,18 @@ module.exports = async () => {
|
|
|
11
11
|
.option("o", { alias: "output", default: "i18n-pages.json" })
|
|
12
12
|
.option("n", { alias: "defaultNamespace", array: true, default: ["common"] })
|
|
13
13
|
.option("p", { alias: "pagesDirectory", default: "src/pages" })
|
|
14
|
-
.option("e", { alias: "fileExtension", array: true, default: ["ts", "tsx"] })
|
|
15
14
|
.option("h", { alias: "help" })
|
|
16
15
|
.strict(false)
|
|
17
16
|
.exitProcess(false);
|
|
18
17
|
|
|
19
18
|
try {
|
|
20
|
-
const { help, include, output, defaultNamespace, pagesDirectory
|
|
19
|
+
const { help, include, output, defaultNamespace, pagesDirectory } = cli.parse(argv.slice(2));
|
|
21
20
|
|
|
22
21
|
if (Boolean(help)) {
|
|
23
22
|
return 0;
|
|
24
23
|
}
|
|
25
24
|
|
|
26
|
-
await require("./index")(include, output, defaultNamespace, pagesDirectory
|
|
25
|
+
await require("./index")(include, output, defaultNamespace, pagesDirectory);
|
|
27
26
|
} catch (e) {
|
|
28
27
|
console.error(e);
|
|
29
28
|
return 1;
|
|
@@ -1,14 +1,134 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
const madge = require("madge");
|
|
3
3
|
const path = require("path");
|
|
4
|
-
const
|
|
5
|
-
const { readFileSync, readdirSync, writeFileSync } = require("fs");
|
|
4
|
+
const { readFileSync, readdirSync, writeFileSync, statSync, existsSync } = require("fs");
|
|
6
5
|
const { findTFunctionNamespaces } = require("./utils/find-t-function-namespaces");
|
|
7
6
|
const { findTransComponentNamespaces } = require("./utils/find-trans-component-namespaces");
|
|
8
7
|
const join = require("node:path").join;
|
|
9
8
|
|
|
9
|
+
const UXF_PACKAGES_PATH = "node_modules/@uxf";
|
|
10
|
+
const FILE_EXTENSIONS = ["js", "mjs", "cjs", "ts", "tsx", "d.ts", "mts", "cts", "d.mts", "d.cts"];
|
|
10
11
|
const TS_CONFIG_PATH = path.resolve(process.cwd(), "tsconfig.json");
|
|
11
|
-
const TS_CONFIG =
|
|
12
|
+
const TS_CONFIG = existsSync(TS_CONFIG_PATH) ? TS_CONFIG_PATH : undefined;
|
|
13
|
+
|
|
14
|
+
// Lazy-read tsconfig for paths alias resolution
|
|
15
|
+
let TS_PATHS_CACHE = null;
|
|
16
|
+
function getTsPaths() {
|
|
17
|
+
if (!TS_CONFIG) return null;
|
|
18
|
+
if (TS_PATHS_CACHE) return TS_PATHS_CACHE;
|
|
19
|
+
try {
|
|
20
|
+
const raw = JSON.parse(readFileSync(TS_CONFIG, "utf8"));
|
|
21
|
+
const compilerOptions = raw && raw.compilerOptions ? raw.compilerOptions : {};
|
|
22
|
+
const baseUrl = compilerOptions.baseUrl ? path.resolve(process.cwd(), compilerOptions.baseUrl) : process.cwd();
|
|
23
|
+
const paths = compilerOptions.paths || {};
|
|
24
|
+
TS_PATHS_CACHE = { baseUrl, paths };
|
|
25
|
+
return TS_PATHS_CACHE;
|
|
26
|
+
} catch {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Check if a file is allowed to be scanned for translations.
|
|
33
|
+
* @param {string} file
|
|
34
|
+
* @returns {boolean}
|
|
35
|
+
*/
|
|
36
|
+
function isAllowedFile(file) {
|
|
37
|
+
return !file.includes("node_modules") || file.includes(UXF_PACKAGES_PATH);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Resolve a module specifier (bare, relative, or absolute) to a real filesystem path if possible.
|
|
42
|
+
* Falls back to the original specifier when not resolvable.
|
|
43
|
+
* Only allows project files and node_modules/@uxf/* to avoid scanning the entire npm tree.
|
|
44
|
+
* @param {string} spec
|
|
45
|
+
* @returns {string}
|
|
46
|
+
*/
|
|
47
|
+
function resolveModuleSpecifier(spec) {
|
|
48
|
+
try {
|
|
49
|
+
if (!spec || typeof spec !== "string") return spec;
|
|
50
|
+
if (existsSync(spec)) return spec;
|
|
51
|
+
|
|
52
|
+
const ts = getTsPaths();
|
|
53
|
+
if (ts?.paths) {
|
|
54
|
+
for (const [pattern, targets] of Object.entries(ts.paths)) {
|
|
55
|
+
const starIndex = pattern.indexOf("*");
|
|
56
|
+
|
|
57
|
+
if (starIndex !== -1) {
|
|
58
|
+
const prefix = pattern.slice(0, starIndex);
|
|
59
|
+
const suffix = pattern.slice(starIndex + 1);
|
|
60
|
+
|
|
61
|
+
if (spec.startsWith(prefix) && spec.endsWith(suffix)) {
|
|
62
|
+
const middle = spec.slice(prefix.length, spec.length - suffix.length);
|
|
63
|
+
|
|
64
|
+
for (const target of targets) {
|
|
65
|
+
const mapped = target.includes("*") ? target.replace("*", middle) : target;
|
|
66
|
+
|
|
67
|
+
const candidates = [
|
|
68
|
+
path.resolve(ts.baseUrl, mapped),
|
|
69
|
+
...FILE_EXTENSIONS.map((e) => path.resolve(ts.baseUrl, mapped + e)),
|
|
70
|
+
...FILE_EXTENSIONS.map((e) => path.join(ts.baseUrl, mapped, "index" + e)),
|
|
71
|
+
].find(existsSync);
|
|
72
|
+
|
|
73
|
+
if (candidates) return candidates;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
} else if (spec === pattern) {
|
|
77
|
+
const candidate = targets.map((t) => path.resolve(ts.baseUrl, t)).find(existsSync);
|
|
78
|
+
if (candidate) return candidate;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const resolved = require.resolve(spec, { paths: [process.cwd()] });
|
|
84
|
+
return isAllowedFile(resolved) ? resolved : spec;
|
|
85
|
+
} catch {
|
|
86
|
+
return spec;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Resolve import specifier in the context of a file path (handles relative imports correctly).
|
|
92
|
+
* @param {string} spec
|
|
93
|
+
* @param {string} fromFile
|
|
94
|
+
* @returns {string}
|
|
95
|
+
*/
|
|
96
|
+
function resolveImportFrom(spec, fromFile) {
|
|
97
|
+
try {
|
|
98
|
+
if (!spec || typeof spec !== "string") return spec;
|
|
99
|
+
|
|
100
|
+
// Relative path from the file's directory
|
|
101
|
+
if (spec.startsWith(".")) {
|
|
102
|
+
const baseDir = path.dirname(fromFile);
|
|
103
|
+
let candidate = path.resolve(baseDir, spec);
|
|
104
|
+
|
|
105
|
+
// Try as file
|
|
106
|
+
for (const e of FILE_EXTENSIONS) {
|
|
107
|
+
const c = candidate + e;
|
|
108
|
+
if (existsSync(c)) return c;
|
|
109
|
+
}
|
|
110
|
+
// Try as directory index
|
|
111
|
+
if (existsSync(candidate) && statSync(candidate).isDirectory()) {
|
|
112
|
+
for (const e of FILE_EXTENSIONS) {
|
|
113
|
+
const idx = path.join(candidate, "index" + e);
|
|
114
|
+
if (existsSync(idx)) return idx;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Fallback to Node resolver with file dir as base
|
|
119
|
+
const resolved = require.resolve(spec, { paths: [baseDir] });
|
|
120
|
+
if (isAllowedFile(resolved)) {
|
|
121
|
+
return resolved;
|
|
122
|
+
}
|
|
123
|
+
return spec;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Bare/absolute spec
|
|
127
|
+
return resolveModuleSpecifier(spec);
|
|
128
|
+
} catch (_e) {
|
|
129
|
+
return spec;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
12
132
|
|
|
13
133
|
function removeTrailingSlash(str) {
|
|
14
134
|
return str.replace(/\/$/, "");
|
|
@@ -64,83 +184,101 @@ const filePathToRoute = (filePath) => {
|
|
|
64
184
|
return removeTrailingSlash(route);
|
|
65
185
|
};
|
|
66
186
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
const resolvedFiles = [];
|
|
187
|
+
async function getMadgeTree(entries, include) {
|
|
188
|
+
// Build madge options; allow traversing UXF packages in node_modules.
|
|
189
|
+
const options = {
|
|
190
|
+
tsConfig: TS_CONFIG,
|
|
191
|
+
fileExtensions: FILE_EXTENSIONS,
|
|
192
|
+
includeNpm: true,
|
|
193
|
+
dependencyFilter: (dependency) => {
|
|
194
|
+
return !dependency.includes("node_modules") || dependency.includes(UXF_PACKAGES_PATH);
|
|
195
|
+
},
|
|
196
|
+
};
|
|
78
197
|
|
|
79
|
-
|
|
80
|
-
|
|
198
|
+
// If include is provided, limit traversal to those prefixes AND node_modules/@uxf.
|
|
199
|
+
if (Array.isArray(include) && include.length > 0) {
|
|
200
|
+
const allowedPrefixes = [...include, UXF_PACKAGES_PATH];
|
|
201
|
+
const searchDirs = new RegExp(`^(?!(${allowedPrefixes.join("|")}))`, "i");
|
|
202
|
+
options.excludeRegExp = [searchDirs];
|
|
81
203
|
}
|
|
82
204
|
|
|
83
|
-
|
|
84
|
-
|
|
205
|
+
const res = await madge(entries, options);
|
|
206
|
+
return res.obj();
|
|
207
|
+
}
|
|
85
208
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
const reexportMatches = content.match(/export\s*(?:\{[^}]*\}|\*)\s*from\s*['"](\.[^'"]+)['"]/g);
|
|
209
|
+
async function getFiles(entryPoint, tree) {
|
|
210
|
+
const filesOnPath = [];
|
|
89
211
|
|
|
90
|
-
|
|
91
|
-
|
|
212
|
+
// Always include the entry point file itself (for cases when page does not import anything with translations)
|
|
213
|
+
filesOnPath.push(entryPoint);
|
|
92
214
|
|
|
93
|
-
|
|
94
|
-
const pathMatch = match.match(/from\s*['"]([^'"]+)['"]/);
|
|
95
|
-
if (pathMatch) {
|
|
96
|
-
let relativePath = pathMatch[1];
|
|
215
|
+
getTree([entryPoint], tree, filesOnPath);
|
|
97
216
|
|
|
98
|
-
|
|
99
|
-
let resolvedPath = path.resolve(fileDir, relativePath);
|
|
217
|
+
let flattenFilesOnPath = Array.from(new Set(filesOnPath.flat(Number.POSITIVE_INFINITY)));
|
|
100
218
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
219
|
+
// Helper: extract module specifiers from file content
|
|
220
|
+
const extractSpecs = (content) => {
|
|
221
|
+
const specs = new Set();
|
|
222
|
+
const reImportFrom = /import\s+[^'"\n;]*?from\s*['"]([^'"\n]+)['"]/g;
|
|
223
|
+
const reDynamicImport = /import\s*\(\s*['"]([^'"\n]+)['"]\s*\)/g;
|
|
224
|
+
const reRequire = /require\(\s*['"]([^'"\n]+)['"]\s*\)/g;
|
|
225
|
+
const reExportFrom = /export\s*(?:\{[^}]*\}|\*)\s*from\s*['"]([^'"\n]+)['"]/g;
|
|
226
|
+
for (const re of [reImportFrom, reDynamicImport, reRequire, reExportFrom]) {
|
|
227
|
+
for (const m of content.matchAll(re)) {
|
|
228
|
+
if (m && m[1]) specs.add(m[1]);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
return Array.from(specs);
|
|
232
|
+
};
|
|
104
233
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
234
|
+
// Resolve re-exports to get actual component files and traverse imports recursively
|
|
235
|
+
const resolvedFiles = [];
|
|
236
|
+
const queue = [];
|
|
237
|
+
const visited = new Set();
|
|
238
|
+
|
|
239
|
+
// Seed queue with initial files from madge tree
|
|
240
|
+
for (const file of flattenFilesOnPath) {
|
|
241
|
+
if (file && typeof file === "string") {
|
|
242
|
+
const fsPath = resolveModuleSpecifier(file);
|
|
243
|
+
if (typeof fsPath === "string") {
|
|
244
|
+
try {
|
|
245
|
+
if (isAllowedFile(fsPath) && statSync(fsPath).isFile()) {
|
|
246
|
+
queue.push(fsPath);
|
|
112
247
|
}
|
|
248
|
+
} catch {}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
113
252
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
253
|
+
while (queue.length > 0) {
|
|
254
|
+
const fsPath = queue.shift();
|
|
255
|
+
if (!fsPath || visited.has(fsPath)) continue;
|
|
256
|
+
visited.add(fsPath);
|
|
257
|
+
resolvedFiles.push(fsPath);
|
|
258
|
+
|
|
259
|
+
if (!existsSync(fsPath)) continue;
|
|
260
|
+
|
|
261
|
+
// Scan direct imports/requires/exports-from and follow them
|
|
262
|
+
try {
|
|
263
|
+
const content = readFileSync(fsPath, "utf8");
|
|
264
|
+
const specs = extractSpecs(content);
|
|
265
|
+
for (const spec of specs) {
|
|
266
|
+
const p = resolveImportFrom(spec, fsPath);
|
|
267
|
+
// Only follow project files and UXF packages (resolvers already filter others)
|
|
268
|
+
if (typeof p === "string" && (!p.includes("node_modules") || p.includes(UXF_PACKAGES_PATH))) {
|
|
269
|
+
if (!visited.has(p)) {
|
|
270
|
+
try {
|
|
271
|
+
if (isAllowedFile(fsPath) && statSync(p).isFile()) {
|
|
272
|
+
queue.push(p);
|
|
123
273
|
}
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
if (!foundFile) {
|
|
128
|
-
console.warn(`Could not resolve re-export: ${relativePath} from ${filePath}`);
|
|
274
|
+
} catch {}
|
|
129
275
|
}
|
|
130
276
|
}
|
|
131
277
|
}
|
|
132
|
-
|
|
133
|
-
// If we found re-exports, don't include the original index file
|
|
134
|
-
if (resolvedFiles.length > 0) {
|
|
135
|
-
return resolvedFiles;
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
} catch (error) {
|
|
139
|
-
console.warn(`Error reading file ${filePath}:`, error.message);
|
|
278
|
+
} catch {}
|
|
140
279
|
}
|
|
141
280
|
|
|
142
|
-
|
|
143
|
-
return [filePath];
|
|
281
|
+
return Array.from(new Set(resolvedFiles));
|
|
144
282
|
}
|
|
145
283
|
|
|
146
284
|
/**
|
|
@@ -148,76 +286,47 @@ function resolveReexports(filePath) {
|
|
|
148
286
|
* @param output string
|
|
149
287
|
* @param defaultNamespaces string[]
|
|
150
288
|
* @param pagesDirectory string
|
|
151
|
-
* @param fileExtensions string[]
|
|
152
289
|
*/
|
|
153
|
-
function main(include, output, defaultNamespaces, pagesDirectory
|
|
290
|
+
async function main(include, output, defaultNamespaces, pagesDirectory) {
|
|
154
291
|
const result = { "*": defaultNamespaces };
|
|
155
|
-
// Negative lookahead – ignore searching for any files
|
|
156
|
-
// that aren't part of our include list
|
|
157
|
-
const searchDirs = new RegExp(`^(?!(${include.join("|")}))`, "i");
|
|
158
292
|
|
|
159
293
|
const pages = walk(pagesDirectory).flat(Number.POSITIVE_INFINITY);
|
|
160
294
|
|
|
161
|
-
madge
|
|
162
|
-
|
|
163
|
-
excludeRegExp: [searchDirs],
|
|
164
|
-
fileExtensions: fileExtensions,
|
|
165
|
-
includeNpm: true,
|
|
166
|
-
dependencyFilter: (dependency) => {
|
|
167
|
-
return !dependency.includes("node_modules") || dependency.includes("node_modules/@uxf");
|
|
168
|
-
},
|
|
169
|
-
}).then((res) => {
|
|
170
|
-
const tree = res.obj();
|
|
171
|
-
|
|
172
|
-
for (const entryPoint of pages) {
|
|
173
|
-
let namespaces = [];
|
|
174
|
-
const filesOnPath = [];
|
|
175
|
-
|
|
176
|
-
// Always include the entry point file itself (for cases when page does not import anything with translations)
|
|
177
|
-
filesOnPath.push(entryPoint);
|
|
295
|
+
// Build a global dependency tree rooted at project to leverage madge resolution across aliases/packages
|
|
296
|
+
const tree = await getMadgeTree(process.cwd(), include);
|
|
178
297
|
|
|
179
|
-
|
|
298
|
+
for (const entryPoint of pages) {
|
|
299
|
+
let namespaces = [];
|
|
180
300
|
|
|
181
|
-
|
|
301
|
+
const uniqueResolvedFiles = await getFiles(entryPoint, tree);
|
|
182
302
|
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
if (file && typeof file === "string") {
|
|
187
|
-
resolvedFiles.push(...resolveReexports(file));
|
|
188
|
-
}
|
|
303
|
+
for (const file of uniqueResolvedFiles) {
|
|
304
|
+
if (!file || !existsSync(file)) {
|
|
305
|
+
continue;
|
|
189
306
|
}
|
|
190
307
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
if (!file || !fs.existsSync(file)) {
|
|
195
|
-
continue;
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
try {
|
|
199
|
-
const fileContent = readFileSync(file).toString();
|
|
200
|
-
const fileNamespaces = findNamespaces(fileContent);
|
|
308
|
+
try {
|
|
309
|
+
const fileContent = readFileSync(file).toString();
|
|
310
|
+
const fileNamespaces = findNamespaces(fileContent);
|
|
201
311
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
}
|
|
312
|
+
namespaces = [...namespaces, ...fileNamespaces];
|
|
313
|
+
} catch (error) {
|
|
314
|
+
console.warn(`Error reading file ${file}:`, error.message);
|
|
206
315
|
}
|
|
316
|
+
}
|
|
207
317
|
|
|
208
|
-
|
|
318
|
+
namespaces = Array.from(new Set(namespaces)).sort();
|
|
209
319
|
|
|
210
|
-
|
|
320
|
+
const page = filePathToRoute(entryPoint);
|
|
211
321
|
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
}
|
|
322
|
+
if (namespaces.length > 0) {
|
|
323
|
+
result[page] = namespaces;
|
|
215
324
|
}
|
|
325
|
+
}
|
|
216
326
|
|
|
217
|
-
|
|
327
|
+
writeFileSync(path.resolve(process.cwd(), output), JSON.stringify(result, null, 4));
|
|
218
328
|
|
|
219
|
-
|
|
220
|
-
});
|
|
329
|
+
console.log("Namespaces generated!");
|
|
221
330
|
}
|
|
222
331
|
|
|
223
332
|
module.exports = main;
|
package/src/uxf-release/cli.js
CHANGED
|
@@ -3,7 +3,7 @@ const { argv, env } = require("process");
|
|
|
3
3
|
module.exports = async () => {
|
|
4
4
|
const cli = require("yargs")
|
|
5
5
|
.command("$0", "UXF release helper", (yargs) => {
|
|
6
|
-
yargs.demandCommand(0, 0).usage(`
|
|
6
|
+
yargs.demandCommand(0, 0).usage(`
|
|
7
7
|
Usage:
|
|
8
8
|
uxf-release [options]
|
|
9
9
|
|
|
@@ -14,6 +14,18 @@ Environment variables:
|
|
|
14
14
|
SLACK_TOKEN - optional
|
|
15
15
|
GOOGLE_WEBHOOK_URL - optional`);
|
|
16
16
|
})
|
|
17
|
+
.option("t", {
|
|
18
|
+
alias: "tag-prefix",
|
|
19
|
+
describe: "Git tag prefix",
|
|
20
|
+
type: "string",
|
|
21
|
+
group: "Options",
|
|
22
|
+
})
|
|
23
|
+
.option("g", {
|
|
24
|
+
alias: "google-chat-webhook",
|
|
25
|
+
describe: "Google chat webhook url",
|
|
26
|
+
type: "string",
|
|
27
|
+
group: "Options",
|
|
28
|
+
})
|
|
17
29
|
.option("m", {
|
|
18
30
|
alias: "message",
|
|
19
31
|
describe: "Message title",
|
|
@@ -43,10 +55,11 @@ Environment variables:
|
|
|
43
55
|
.exitProcess(false);
|
|
44
56
|
|
|
45
57
|
try {
|
|
46
|
-
const { help, p: projectId, d: dryRun, s, m, ...options } = cli.parse(argv.slice(2));
|
|
58
|
+
const { help, p: projectId, d: dryRun, s, m, g: googleChatWebhook, t, ...options } = cli.parse(argv.slice(2));
|
|
47
59
|
|
|
48
60
|
const slackChannel = s || options["slack-channel"];
|
|
49
61
|
const messageTitle = m || options["message"];
|
|
62
|
+
const tagPrefix = t || options["tag-prefix"];
|
|
50
63
|
|
|
51
64
|
if (Boolean(help)) {
|
|
52
65
|
return 0;
|
|
@@ -71,7 +84,13 @@ Environment variables:
|
|
|
71
84
|
return 1;
|
|
72
85
|
}
|
|
73
86
|
|
|
74
|
-
await require("./index")(
|
|
87
|
+
await require("./index")({
|
|
88
|
+
dryRun,
|
|
89
|
+
messageTitle,
|
|
90
|
+
tagPrefix,
|
|
91
|
+
channel: slackChannel,
|
|
92
|
+
googleChatWebhook: googleChatWebhook || eng.GOOGLE_WEBHOOK_URL,
|
|
93
|
+
});
|
|
75
94
|
} catch (e) {
|
|
76
95
|
console.error(e);
|
|
77
96
|
return 1;
|
package/src/uxf-release/index.js
CHANGED
|
@@ -89,32 +89,35 @@ function generateMigrationWarning(migrationFiles = []) {
|
|
|
89
89
|
return `⚠️ VAROVÁNÍ: Součástí této verze jsou i změny v databázi.\n\n` + `Seznam migrací:\n${migrations}\n`;
|
|
90
90
|
}
|
|
91
91
|
|
|
92
|
-
|
|
93
|
-
|
|
92
|
+
/**
|
|
93
|
+
* @param config {{dryRun?: boolean, channel?: string, messageTitle?: string, googleChatWebhook?: string, tagPrefix?: string}}
|
|
94
|
+
*/
|
|
95
|
+
module.exports = async function (config) {
|
|
96
|
+
const lastTag = await GitLab.getLastTag(config.tagPrefix);
|
|
94
97
|
const commits = await GitLab.loadCommits(lastTag ? lastTag.commit.committed_date : null);
|
|
95
98
|
const migrationFiles = await GitLab.findMigrationFiles(commits);
|
|
96
99
|
|
|
97
100
|
const migrationWarning = generateMigrationWarning(migrationFiles);
|
|
98
101
|
|
|
99
102
|
// Add migration warning to Slack message if needed
|
|
100
|
-
const slackMessage = generateSlackMessage(commits, messageTitle);
|
|
103
|
+
const slackMessage = generateSlackMessage(commits, config.messageTitle);
|
|
101
104
|
if (migrationWarning) {
|
|
102
105
|
slackMessage.text = `${slackMessage.text}\n\n${migrationWarning}`;
|
|
103
106
|
}
|
|
104
107
|
|
|
105
|
-
await Slack.chatPostMessage(channel, { text: slackMessage.text }, dryRun);
|
|
108
|
+
await Slack.chatPostMessage(config.channel, { text: slackMessage.text }, config.dryRun);
|
|
106
109
|
|
|
107
110
|
// Add migration warning to Google Chat message if needed
|
|
108
|
-
const googleMessage = generateGoogleMessage(commits, messageTitle);
|
|
111
|
+
const googleMessage = generateGoogleMessage(commits, config.messageTitle);
|
|
109
112
|
if (migrationWarning) {
|
|
110
113
|
googleMessage.text = `${googleMessage.text}\n\n${migrationWarning}`;
|
|
111
114
|
}
|
|
112
115
|
|
|
113
|
-
await GoogleChat.chatPostMessage(googleMessage, { dryRun });
|
|
116
|
+
await GoogleChat.chatPostMessage(googleMessage, { dryRun: config.dryRun, webhookUrl: config.googleChatWebhook });
|
|
114
117
|
|
|
115
118
|
// Add migration warning to release notes if needed
|
|
116
119
|
const releaseNotes = commits.map(generateReleaseCommitMessage).join("\n");
|
|
117
120
|
const fullReleaseNotes = migrationWarning + releaseNotes;
|
|
118
121
|
|
|
119
|
-
await GitLab.createRelease(fullReleaseNotes, dryRun);
|
|
122
|
+
await GitLab.createRelease(fullReleaseNotes, config.tagPrefix, config.dryRun);
|
|
120
123
|
};
|