keycloakify 7.3.0 → 7.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/download-builtin-keycloak-theme.js +9 -8
- package/bin/download-builtin-keycloak-theme.js.map +1 -1
- package/bin/tools/downloadAndUnzip.d.ts +1 -3
- package/bin/tools/downloadAndUnzip.js +85 -371
- package/bin/tools/downloadAndUnzip.js.map +1 -1
- package/bin/tools/jar.js +9 -5
- package/bin/tools/jar.js.map +1 -1
- package/bin/tools/partitionPromiseSettledResults.d.ts +2 -0
- package/bin/tools/partitionPromiseSettledResults.js +41 -0
- package/bin/tools/partitionPromiseSettledResults.js.map +1 -0
- package/bin/tools/trimIndent.d.ts +9 -0
- package/bin/tools/trimIndent.js +98 -0
- package/bin/tools/trimIndent.js.map +1 -0
- package/bin/tools/unzip.d.ts +30 -0
- package/bin/tools/unzip.js +345 -0
- package/bin/tools/unzip.js.map +1 -0
- package/package.json +13 -1
- package/src/bin/download-builtin-keycloak-theme.ts +17 -17
- package/src/bin/tools/downloadAndUnzip.ts +52 -236
- package/src/bin/tools/jar.ts +16 -24
- package/src/bin/tools/partitionPromiseSettledResults.ts +11 -0
- package/src/bin/tools/trimIndent.ts +51 -0
- package/src/bin/tools/unzip.ts +184 -0
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "keycloakify",
|
3
|
-
"version": "7.3.
|
3
|
+
"version": "7.3.2",
|
4
4
|
"description": "Create Keycloak themes using React",
|
5
5
|
"repository": {
|
6
6
|
"type": "git",
|
@@ -99,9 +99,12 @@
|
|
99
99
|
"src/bin/tools/logger.ts",
|
100
100
|
"src/bin/tools/octokit-addons/getLatestsSemVersionedTag.ts",
|
101
101
|
"src/bin/tools/octokit-addons/listTags.ts",
|
102
|
+
"src/bin/tools/partitionPromiseSettledResults.ts",
|
102
103
|
"src/bin/tools/pathJoin.ts",
|
103
104
|
"src/bin/tools/tee.ts",
|
104
105
|
"src/bin/tools/transformCodebase.ts",
|
106
|
+
"src/bin/tools/trimIndent.ts",
|
107
|
+
"src/bin/tools/unzip.ts",
|
105
108
|
"src/bin/tools/walk.ts",
|
106
109
|
"src/bin/tools/zip.ts",
|
107
110
|
"src/bin/tsconfig.json",
|
@@ -401,6 +404,9 @@
|
|
401
404
|
"bin/tools/octokit-addons/listTags.d.ts",
|
402
405
|
"bin/tools/octokit-addons/listTags.js",
|
403
406
|
"bin/tools/octokit-addons/listTags.js.map",
|
407
|
+
"bin/tools/partitionPromiseSettledResults.d.ts",
|
408
|
+
"bin/tools/partitionPromiseSettledResults.js",
|
409
|
+
"bin/tools/partitionPromiseSettledResults.js.map",
|
404
410
|
"bin/tools/pathJoin.d.ts",
|
405
411
|
"bin/tools/pathJoin.js",
|
406
412
|
"bin/tools/pathJoin.js.map",
|
@@ -410,6 +416,12 @@
|
|
410
416
|
"bin/tools/transformCodebase.d.ts",
|
411
417
|
"bin/tools/transformCodebase.js",
|
412
418
|
"bin/tools/transformCodebase.js.map",
|
419
|
+
"bin/tools/trimIndent.d.ts",
|
420
|
+
"bin/tools/trimIndent.js",
|
421
|
+
"bin/tools/trimIndent.js.map",
|
422
|
+
"bin/tools/unzip.d.ts",
|
423
|
+
"bin/tools/unzip.js",
|
424
|
+
"bin/tools/unzip.js.map",
|
413
425
|
"bin/tools/walk.d.ts",
|
414
426
|
"bin/tools/walk.js",
|
415
427
|
"bin/tools/walk.js.map",
|
@@ -8,35 +8,35 @@ import { getCliOptions } from "./tools/cliOptions";
|
|
8
8
|
import { getLogger } from "./tools/logger";
|
9
9
|
|
10
10
|
export async function downloadBuiltinKeycloakTheme(params: { keycloakVersion: string; destDirPath: string; isSilent: boolean }) {
|
11
|
-
const { keycloakVersion, destDirPath
|
11
|
+
const { keycloakVersion, destDirPath } = params;
|
12
12
|
|
13
13
|
await Promise.all(
|
14
14
|
["", "-community"].map(ext =>
|
15
15
|
downloadAndUnzip({
|
16
16
|
"destDirPath": destDirPath,
|
17
17
|
"url": `https://github.com/keycloak/keycloak/archive/refs/tags/${keycloakVersion}.zip`,
|
18
|
-
"pathOfDirToExtractInArchive": `keycloak-${keycloakVersion}/themes/src/main/resources${ext}/theme
|
19
|
-
"cacheDirPath": pathJoin(keycloakThemeBuildingDirPath, ".cache"),
|
20
|
-
isSilent
|
18
|
+
"pathOfDirToExtractInArchive": `keycloak-${keycloakVersion}/themes/src/main/resources${ext}/theme`
|
21
19
|
})
|
22
20
|
)
|
23
21
|
);
|
24
22
|
}
|
25
23
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
24
|
+
async function main() {
|
25
|
+
const { isSilent } = getCliOptions(process.argv.slice(2));
|
26
|
+
const logger = getLogger({ isSilent });
|
27
|
+
const { keycloakVersion } = await promptKeycloakVersion();
|
28
|
+
|
29
|
+
const destDirPath = pathJoin(keycloakThemeBuildingDirPath, "src", "main", "resources", "theme");
|
31
30
|
|
32
|
-
|
31
|
+
logger.log(`Downloading builtins theme of Keycloak ${keycloakVersion} here ${destDirPath}`);
|
33
32
|
|
34
|
-
|
33
|
+
await downloadBuiltinKeycloakTheme({
|
34
|
+
keycloakVersion,
|
35
|
+
destDirPath,
|
36
|
+
isSilent
|
37
|
+
});
|
38
|
+
}
|
35
39
|
|
36
|
-
|
37
|
-
|
38
|
-
destDirPath,
|
39
|
-
isSilent
|
40
|
-
});
|
41
|
-
})();
|
40
|
+
if (require.main === module) {
|
41
|
+
main().catch(e => console.error(e));
|
42
42
|
}
|
@@ -1,15 +1,13 @@
|
|
1
|
-
import { dirname as pathDirname, basename as pathBasename, join as pathJoin, join } from "path";
|
2
|
-
import { createReadStream, createWriteStream } from "fs";
|
3
|
-
import { stat, mkdir, unlink, writeFile } from "fs/promises";
|
4
|
-
import { transformCodebase } from "./transformCodebase";
|
5
|
-
import { createHash } from "crypto";
|
6
|
-
import fetch from "make-fetch-happen";
|
7
|
-
import { createInflateRaw } from "zlib";
|
8
|
-
import type { Readable } from "stream";
|
9
|
-
import { homedir } from "os";
|
10
|
-
import { FetchOptions } from "make-fetch-happen";
|
11
1
|
import { exec as execCallback } from "child_process";
|
2
|
+
import { createHash } from "crypto";
|
3
|
+
import { mkdir, stat, writeFile } from "fs/promises";
|
4
|
+
import fetch, { type FetchOptions } from "make-fetch-happen";
|
5
|
+
import { dirname as pathDirname, join as pathJoin } from "path";
|
6
|
+
import { assert } from "tsafe";
|
12
7
|
import { promisify } from "util";
|
8
|
+
import { getProjectRoot } from "./getProjectRoot";
|
9
|
+
import { transformCodebase } from "./transformCodebase";
|
10
|
+
import { unzip } from "./unzip";
|
13
11
|
|
14
12
|
const exec = promisify(execCallback);
|
15
13
|
|
@@ -17,25 +15,27 @@ function hash(s: string) {
|
|
17
15
|
return createHash("sha256").update(s).digest("hex");
|
18
16
|
}
|
19
17
|
|
20
|
-
async function
|
18
|
+
async function exists(path: string) {
|
21
19
|
try {
|
22
|
-
|
20
|
+
await stat(path);
|
21
|
+
return true;
|
23
22
|
} catch (error) {
|
24
|
-
if ((error as Error & { code: string }).code === "ENOENT") return
|
23
|
+
if ((error as Error & { code: string }).code === "ENOENT") return false;
|
25
24
|
throw error;
|
26
25
|
}
|
27
26
|
}
|
28
27
|
|
29
28
|
/**
|
30
|
-
* Get
|
31
|
-
*
|
32
|
-
* @param key
|
33
|
-
* @returns string or undefined
|
29
|
+
* Get npm configuration as map
|
34
30
|
*/
|
35
|
-
async function getNmpConfig(
|
36
|
-
const { stdout } = await exec(
|
37
|
-
|
38
|
-
|
31
|
+
async function getNmpConfig(): Promise<Record<string, string>> {
|
32
|
+
const { stdout } = await exec("npm config get", { encoding: "utf8" });
|
33
|
+
return stdout
|
34
|
+
.split("\n")
|
35
|
+
.filter(line => !line.startsWith(";"))
|
36
|
+
.map(line => line.trim())
|
37
|
+
.map(line => line.split("=", 2))
|
38
|
+
.reduce((cfg, [key, value]) => ({ ...cfg, [key]: value }), {});
|
39
39
|
}
|
40
40
|
|
41
41
|
/**
|
@@ -45,233 +45,49 @@ async function getNmpConfig(key: string): Promise<string | undefined> {
|
|
45
45
|
* @returns proxy configuration
|
46
46
|
*/
|
47
47
|
async function getNpmProxyConfig(): Promise<Pick<FetchOptions, "proxy" | "noProxy">> {
|
48
|
-
const
|
49
|
-
const noProxy = (await getNmpConfig("noproxy")) ?? (await getNmpConfig("no-proxy"));
|
48
|
+
const cfg = await getNmpConfig();
|
50
49
|
|
51
|
-
|
52
|
-
|
50
|
+
const proxy = cfg["https-proxy"] ?? cfg["proxy"];
|
51
|
+
const noProxy = cfg["noproxy"] ?? cfg["no-proxy"];
|
53
52
|
|
54
|
-
|
55
|
-
* Download a file from `url` to `dir`. Will try to avoid downloading existing
|
56
|
-
* files by using the cache directory ~/.keycloakify/cache
|
57
|
-
*
|
58
|
-
* If the target directory does not exist, it will be created.
|
59
|
-
*
|
60
|
-
* If the target file exists, it will be overwritten.
|
61
|
-
*
|
62
|
-
* We use make-fetch-happen's internal file cache here, so we don't need to
|
63
|
-
* worry about redownloading the same file over and over. Unfortunately, that
|
64
|
-
* cache does not have a single file per entry, but bundles and indexes them,
|
65
|
-
* so we still need to write the contents to the target directory (possibly
|
66
|
-
* over and over), cause the current unzip implementation wants random access.
|
67
|
-
*
|
68
|
-
* @param url download url
|
69
|
-
* @param dir target directory
|
70
|
-
* @param filename target filename
|
71
|
-
* @returns promise for the full path of the downloaded file
|
72
|
-
*/
|
73
|
-
async function download(url: string, dir: string, filename: string): Promise<string> {
|
74
|
-
const proxyOpts = await getNpmProxyConfig();
|
75
|
-
const cacheRoot = process.env.XDG_CACHE_HOME ?? homedir();
|
76
|
-
const cachePath = join(cacheRoot, ".keycloakify/cache");
|
77
|
-
const opts: FetchOptions = { cachePath, ...proxyOpts };
|
78
|
-
const response = await fetch(url, opts);
|
79
|
-
const filepath = pathJoin(dir, filename);
|
80
|
-
await mkdir(dir, { recursive: true });
|
81
|
-
await writeFile(filepath, response.body);
|
82
|
-
return filepath;
|
83
|
-
}
|
84
|
-
|
85
|
-
/**
|
86
|
-
* @typedef
|
87
|
-
* @type MultiError = Error & { cause: Error[] }
|
88
|
-
*/
|
89
|
-
|
90
|
-
/**
|
91
|
-
* Extract the archive `zipFile` into the directory `dir`. If `archiveDir` is given,
|
92
|
-
* only that directory will be extracted, stripping the given path components.
|
93
|
-
*
|
94
|
-
* If dir does not exist, it will be created.
|
95
|
-
*
|
96
|
-
* If any archive file exists, it will be overwritten.
|
97
|
-
*
|
98
|
-
* Will unzip using all available nodejs worker threads.
|
99
|
-
*
|
100
|
-
* Will try to clean up extracted files on failure.
|
101
|
-
*
|
102
|
-
* If unpacking fails, will either throw an regular error, or
|
103
|
-
* possibly an `MultiError`, which contains a `cause` field with
|
104
|
-
* a number of root cause errors.
|
105
|
-
*
|
106
|
-
* Warning this method is not optimized for continuous reading of the zip
|
107
|
-
* archive, but is a trade-off between simplicity and allowing extraction
|
108
|
-
* of a single directory from the archive.
|
109
|
-
*
|
110
|
-
* @param zipFile the file to unzip
|
111
|
-
* @param dir the target directory
|
112
|
-
* @param archiveDir if given, unpack only files from this archive directory
|
113
|
-
* @throws {MultiError} error
|
114
|
-
* @returns Promise for a list of full file paths pointing to actually extracted files
|
115
|
-
*/
|
116
|
-
async function unzip(zipFile: string, dir: string, archiveDir?: string): Promise<string[]> {
|
117
|
-
await mkdir(dir, { recursive: true });
|
118
|
-
const promises: Promise<string>[] = [];
|
119
|
-
|
120
|
-
// Iterate over all files in the zip, skip files which are not in archiveDir,
|
121
|
-
// if given.
|
122
|
-
for await (const record of iterateZipArchive(zipFile)) {
|
123
|
-
const { path: recordPath, createReadStream: createRecordReadStream } = record;
|
124
|
-
const filePath = pathJoin(dir, recordPath);
|
125
|
-
const parent = pathDirname(filePath);
|
126
|
-
if (archiveDir && !recordPath.startsWith(archiveDir)) continue;
|
127
|
-
promises.push(
|
128
|
-
new Promise<string>(async (resolve, reject) => {
|
129
|
-
await mkdir(parent, { recursive: true });
|
130
|
-
// Pull the file out of the archive, write it to the target directory
|
131
|
-
const input = createRecordReadStream();
|
132
|
-
const output = createWriteStream(filePath);
|
133
|
-
output.setMaxListeners(Infinity);
|
134
|
-
output.on("error", e => reject(Object.assign(e, { filePath })));
|
135
|
-
output.on("finish", () => resolve(filePath));
|
136
|
-
input.pipe(output);
|
137
|
-
})
|
138
|
-
);
|
139
|
-
}
|
140
|
-
|
141
|
-
// Wait until _all_ files are either extracted or failed
|
142
|
-
const results = await Promise.allSettled(promises);
|
143
|
-
const success = results.filter(r => r.status === "fulfilled").map(r => (r as PromiseFulfilledResult<string>).value);
|
144
|
-
const failure = results.filter(r => r.status === "rejected").map(r => (r as PromiseRejectedResult).reason);
|
145
|
-
|
146
|
-
// If any extraction failed, try to clean up, then throw a MultiError,
|
147
|
-
// which has a `cause` field, containing a list of root cause errors.
|
148
|
-
if (failure.length) {
|
149
|
-
await Promise.all(success.map(path => unlink(path)));
|
150
|
-
await Promise.all(failure.map(e => e && e.path && unlink(e.path as string)));
|
151
|
-
const e = new Error("Failed to extract: " + failure.map(e => e.message).join(";"));
|
152
|
-
(e as any).cause = failure;
|
153
|
-
throw e;
|
154
|
-
}
|
155
|
-
|
156
|
-
return success;
|
157
|
-
}
|
158
|
-
|
159
|
-
/**
|
160
|
-
*
|
161
|
-
* @param file file to read
|
162
|
-
* @param start first byte to read
|
163
|
-
* @param end last byte to read
|
164
|
-
* @returns Promise of a buffer of read bytes
|
165
|
-
*/
|
166
|
-
async function readFileChunk(file: string, start: number, end: number): Promise<Buffer> {
|
167
|
-
const chunks: Buffer[] = [];
|
168
|
-
return new Promise((resolve, reject) => {
|
169
|
-
const stream = createReadStream(file, { start, end });
|
170
|
-
stream.setMaxListeners(Infinity);
|
171
|
-
stream.on("error", e => reject(e));
|
172
|
-
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
173
|
-
stream.on("data", chunk => chunks.push(chunk as Buffer));
|
174
|
-
});
|
175
|
-
}
|
176
|
-
|
177
|
-
type ZipRecord = {
|
178
|
-
path: string;
|
179
|
-
createReadStream: () => Readable;
|
180
|
-
compressionMethod: "deflate" | undefined;
|
181
|
-
};
|
182
|
-
|
183
|
-
type ZipRecordGenerator = AsyncGenerator<ZipRecord, void, unknown>;
|
184
|
-
|
185
|
-
/**
|
186
|
-
* Iterate over all records of a zipfile, and yield a ZipRecord.
|
187
|
-
* Use `record.createReadStream()` to actually read the file.
|
188
|
-
*
|
189
|
-
* Warning this method will only work with single-disk zip files.
|
190
|
-
* Warning this method may fail if the zip archive has an crazy amount
|
191
|
-
* of files and the central directory is not fully contained within the
|
192
|
-
* last 65k bytes of the zip file.
|
193
|
-
*
|
194
|
-
* @param zipFile
|
195
|
-
* @returns AsyncGenerator which will yield ZipRecords
|
196
|
-
*/
|
197
|
-
async function* iterateZipArchive(zipFile: string): ZipRecordGenerator {
|
198
|
-
// Need to know zip file size before we can do anything else
|
199
|
-
const { size } = await stat(zipFile);
|
200
|
-
const chunkSize = 65_535 + 22 + 1; // max comment size + end header size + wiggle
|
201
|
-
// Read last ~65k bytes. Zip files have an comment up to 65_535 bytes at the very end,
|
202
|
-
// before that comes the zip central directory end header.
|
203
|
-
let chunk = await readFileChunk(zipFile, size - chunkSize, size);
|
204
|
-
const unread = size - chunk.length;
|
205
|
-
let i = chunk.length - 4;
|
206
|
-
let found = false;
|
207
|
-
// Find central directory end header, reading backwards from the end
|
208
|
-
while (!found && i-- > 0) if (chunk[i] === 0x50 && chunk.readUInt32LE(i) === 0x06054b50) found = true;
|
209
|
-
if (!found) throw new Error("Not a zip file");
|
210
|
-
// This method will fail on a multi-disk zip, so bail early.
|
211
|
-
if (chunk.readUInt16LE(i + 4) !== 0) throw new Error("Multi-disk zip not supported");
|
212
|
-
let nFiles = chunk.readUint16LE(i + 10);
|
213
|
-
// Get the position of the central directory
|
214
|
-
const directorySize = chunk.readUint32LE(i + 12);
|
215
|
-
const directoryOffset = chunk.readUint32LE(i + 16);
|
216
|
-
if (directoryOffset === 0xffff_ffff) throw new Error("zip64 not supported");
|
217
|
-
if (directoryOffset > size) throw new Error(`Central directory offset ${directoryOffset} is outside file`);
|
218
|
-
i = directoryOffset - unread;
|
219
|
-
// If i < 0, it means that the central directory is not contained within `chunk`
|
220
|
-
if (i < 0) {
|
221
|
-
chunk = await readFileChunk(zipFile, directoryOffset, directoryOffset + directorySize);
|
222
|
-
i = 0;
|
223
|
-
}
|
224
|
-
// Now iterate the central directory records, yield an `ZipRecord` for every entry
|
225
|
-
while (nFiles-- > 0) {
|
226
|
-
// Check for marker bytes
|
227
|
-
if (chunk.readUInt32LE(i) !== 0x02014b50) throw new Error("No central directory record at position " + (unread + i));
|
228
|
-
const compressionMethod = ({ 8: "deflate" } as const)[chunk.readUint16LE(i + 10)];
|
229
|
-
const compressedFileSize = chunk.readUint32LE(i + 20);
|
230
|
-
const filenameLength = chunk.readUint16LE(i + 28);
|
231
|
-
const extraLength = chunk.readUint16LE(i + 30);
|
232
|
-
const commentLength = chunk.readUint16LE(i + 32);
|
233
|
-
// Start of the actual content byte stream is after the 'local' record header,
|
234
|
-
// which is 30 bytes long plus filename and extra field
|
235
|
-
const start = chunk.readUint32LE(i + 42) + 30 + filenameLength + extraLength;
|
236
|
-
const end = start + compressedFileSize;
|
237
|
-
const filename = chunk.slice(i + 46, i + 46 + filenameLength).toString("utf-8");
|
238
|
-
const createRecordReadStream = () => {
|
239
|
-
const input = createReadStream(zipFile, { start, end });
|
240
|
-
if (compressionMethod === "deflate") {
|
241
|
-
const inflate = createInflateRaw();
|
242
|
-
input.pipe(inflate);
|
243
|
-
return inflate;
|
244
|
-
}
|
245
|
-
return input;
|
246
|
-
};
|
247
|
-
if (end > start) yield { path: filename, createReadStream: createRecordReadStream, compressionMethod };
|
248
|
-
// advance pointer to next central directory entry
|
249
|
-
i += 46 + filenameLength + extraLength + commentLength;
|
250
|
-
}
|
53
|
+
return { proxy, noProxy };
|
251
54
|
}
|
252
55
|
|
253
56
|
export async function downloadAndUnzip({
|
254
57
|
url,
|
255
58
|
destDirPath,
|
256
|
-
pathOfDirToExtractInArchive
|
257
|
-
cacheDirPath
|
59
|
+
pathOfDirToExtractInArchive
|
258
60
|
}: {
|
259
|
-
isSilent: boolean;
|
260
61
|
url: string;
|
261
62
|
destDirPath: string;
|
262
63
|
pathOfDirToExtractInArchive?: string;
|
263
|
-
cacheDirPath: string;
|
264
64
|
}) {
|
265
|
-
const downloadHash = hash(JSON.stringify({ url
|
266
|
-
const
|
267
|
-
|
268
|
-
const
|
269
|
-
const
|
270
|
-
|
271
|
-
|
65
|
+
const downloadHash = hash(JSON.stringify({ url })).substring(0, 15);
|
66
|
+
const projectRoot = getProjectRoot();
|
67
|
+
const cacheRoot = process.env.XDG_CACHE_HOME ?? `${projectRoot}/node_modules/.cache`;
|
68
|
+
const zipFilePath = pathJoin(cacheRoot, "keycloakify", "zip", `_${downloadHash}.zip`);
|
69
|
+
const extractDirPath = pathJoin(cacheRoot, "keycloakify", "unzip", `_${downloadHash}`);
|
70
|
+
|
71
|
+
if (!(await exists(zipFilePath))) {
|
72
|
+
const proxyOpts = await getNpmProxyConfig();
|
73
|
+
const response = await fetch(url, proxyOpts);
|
74
|
+
await mkdir(pathDirname(zipFilePath), { recursive: true });
|
75
|
+
/**
|
76
|
+
* The correct way to fix this is to upgrade node-fetch beyond 3.2.5
|
77
|
+
* (see https://github.com/node-fetch/node-fetch/issues/1295#issuecomment-1144061991.)
|
78
|
+
* Unfortunately, octokit (a dependency of keycloakify) also uses node-fetch, and
|
79
|
+
* does not support node-fetch 3.x. So we stick around with this band-aid until
|
80
|
+
* octokit upgrades.
|
81
|
+
*/
|
82
|
+
response.body?.setMaxListeners(Number.MAX_VALUE);
|
83
|
+
assert(typeof response.body !== "undefined" && response.body != null);
|
84
|
+
await writeFile(zipFilePath, response.body);
|
85
|
+
}
|
272
86
|
|
273
|
-
|
87
|
+
await unzip(zipFilePath, extractDirPath, pathOfDirToExtractInArchive);
|
274
88
|
|
275
|
-
|
276
|
-
|
89
|
+
transformCodebase({
|
90
|
+
"srcDirPath": extractDirPath,
|
91
|
+
"destDirPath": destDirPath
|
92
|
+
});
|
277
93
|
}
|
package/src/bin/tools/jar.ts
CHANGED
@@ -3,12 +3,9 @@ import { dirname, relative, sep } from "path";
|
|
3
3
|
import { createWriteStream } from "fs";
|
4
4
|
|
5
5
|
import walk from "./walk";
|
6
|
-
import
|
7
|
-
import zip from "./zip";
|
6
|
+
import zip, { type ZipSource } from "./zip";
|
8
7
|
import { mkdir } from "fs/promises";
|
9
|
-
|
10
|
-
/** Trim leading whitespace from every line */
|
11
|
-
const trimIndent = (s: string) => s.replace(/(\n)\s+/g, "$1");
|
8
|
+
import trimIndent from "./trimIndent";
|
12
9
|
|
13
10
|
type JarArgs = {
|
14
11
|
rootPath: string;
|
@@ -26,28 +23,23 @@ type JarArgs = {
|
|
26
23
|
export default async function jar({ groupId, artifactId, version, rootPath, targetPath }: JarArgs) {
|
27
24
|
const manifest: ZipSource = {
|
28
25
|
path: "META-INF/MANIFEST.MF",
|
29
|
-
data: Buffer.from(
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
)
|
37
|
-
)
|
26
|
+
data: Buffer.from(trimIndent`
|
27
|
+
Manifest-Version: 1.0
|
28
|
+
Archiver-Version: Plexus Archiver
|
29
|
+
Created-By: Keycloakify
|
30
|
+
Built-By: unknown
|
31
|
+
Build-Jdk: 19.0.0
|
32
|
+
`)
|
38
33
|
};
|
39
34
|
|
40
35
|
const pomProps: ZipSource = {
|
41
36
|
path: `META-INF/maven/${groupId}/${artifactId}/pom.properties`,
|
42
|
-
data: Buffer.from(
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
version=${version}`
|
49
|
-
)
|
50
|
-
)
|
37
|
+
data: Buffer.from(trimIndent`# Generated by keycloakify
|
38
|
+
# ${new Date().toString()}
|
39
|
+
artifactId=${artifactId}
|
40
|
+
groupId=${groupId}
|
41
|
+
version=${version}
|
42
|
+
`)
|
51
43
|
};
|
52
44
|
|
53
45
|
/**
|
@@ -98,5 +90,5 @@ if (require.main === module) {
|
|
98
90
|
groupId: process.env.GROUP_ID ?? "group",
|
99
91
|
version: process.env.VERSION ?? "1.0.0"
|
100
92
|
});
|
101
|
-
main()
|
93
|
+
main();
|
102
94
|
}
|
@@ -0,0 +1,11 @@
|
|
1
|
+
export type PromiseSettledAndPartitioned<T> = [T[], any[]];
|
2
|
+
|
3
|
+
export function partitionPromiseSettledResults<T>() {
|
4
|
+
return [
|
5
|
+
([successes, failures]: PromiseSettledAndPartitioned<T>, item: PromiseSettledResult<T>) =>
|
6
|
+
item.status === "rejected"
|
7
|
+
? ([successes, [item.reason, ...failures]] as PromiseSettledAndPartitioned<T>)
|
8
|
+
: ([[item.value, ...successes], failures] as PromiseSettledAndPartitioned<T>),
|
9
|
+
[[], []] as PromiseSettledAndPartitioned<T>
|
10
|
+
] as const;
|
11
|
+
}
|
@@ -0,0 +1,51 @@
|
|
1
|
+
/**
|
2
|
+
* Concatenate the string fragments and interpolated values
|
3
|
+
* to get a single string.
|
4
|
+
*/
|
5
|
+
function populateTemplate(strings: TemplateStringsArray, ...args: any[]) {
|
6
|
+
const chunks = [];
|
7
|
+
for (let i = 0; i < strings.length; i++) {
|
8
|
+
let lastStringLineLength = 0;
|
9
|
+
if (strings[i]) {
|
10
|
+
chunks.push(strings[i]);
|
11
|
+
// remember last indent of the string portion
|
12
|
+
lastStringLineLength = strings[i].split("\n").at(-1)?.length ?? 0;
|
13
|
+
}
|
14
|
+
if (args[i]) {
|
15
|
+
// if the interpolation value has newlines, indent the interpolation values
|
16
|
+
// using the last known string indent
|
17
|
+
chunks.push(args[i].replace(/([\r?\n])/g, "$1" + " ".repeat(lastStringLineLength)));
|
18
|
+
}
|
19
|
+
}
|
20
|
+
return chunks.join("");
|
21
|
+
}
|
22
|
+
|
23
|
+
function trimIndentPrivate(removeEmptyLeadingAndTrailingLines: boolean, strings: TemplateStringsArray, ...args: any[]) {
|
24
|
+
// Remove initial and final newlines
|
25
|
+
let string = populateTemplate(strings, ...args);
|
26
|
+
if (removeEmptyLeadingAndTrailingLines) string = string.replace(/^[\r\n]/, "").replace(/[^\S\r\n]*[\r\n]$/, "");
|
27
|
+
const dents = string.match(/^([ \t])+/gm)?.map(s => s.length) ?? [];
|
28
|
+
// No dents? no change required
|
29
|
+
if (!dents || dents.length == 0) return string;
|
30
|
+
const minDent = Math.min(...dents);
|
31
|
+
// The min indentation is 0, no change needed
|
32
|
+
if (!minDent) return string;
|
33
|
+
const dedented = string.replace(new RegExp(`^${" ".repeat(minDent)}`, "gm"), "");
|
34
|
+
return dedented;
|
35
|
+
}
|
36
|
+
|
37
|
+
/**
|
38
|
+
* Shift all lines left by the *smallest* indentation level,
|
39
|
+
* and remove initial newline and all trailing spaces.
|
40
|
+
*/
|
41
|
+
export default function trimIndent(strings: TemplateStringsArray, ...args: any[]) {
|
42
|
+
return trimIndentPrivate(true, strings, ...args);
|
43
|
+
}
|
44
|
+
|
45
|
+
/**
|
46
|
+
* Shift all lines left by the *smallest* indentation level,
|
47
|
+
* and _keep_ initial newline and all trailing spaces.
|
48
|
+
*/
|
49
|
+
trimIndent.keepLeadingAndTrailingNewlines = function (strings: TemplateStringsArray, ...args: any[]) {
|
50
|
+
return trimIndentPrivate(false, strings, ...args);
|
51
|
+
};
|