keycloakify 6.9.1 → 6.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/bin/create-keycloak-email-directory.js +0 -0
- package/bin/download-builtin-keycloak-theme.d.ts +1 -1
- package/bin/download-builtin-keycloak-theme.js +50 -26
- package/bin/download-builtin-keycloak-theme.js.map +1 -1
- package/bin/keycloakify/generateKeycloakThemeResources.d.ts +2 -2
- package/bin/keycloakify/generateKeycloakThemeResources.js +134 -92
- package/bin/keycloakify/generateKeycloakThemeResources.js.map +1 -1
- package/bin/keycloakify/index.js +0 -0
- package/bin/keycloakify/keycloakify.d.ts +1 -1
- package/bin/keycloakify/keycloakify.js +135 -89
- package/bin/keycloakify/keycloakify.js.map +1 -1
- package/bin/link_in_test_app.js +4 -1
- package/bin/link_in_test_app.js.map +1 -1
- package/bin/tools/downloadAndUnzip.d.ts +2 -3
- package/bin/tools/downloadAndUnzip.js +460 -70
- package/bin/tools/downloadAndUnzip.js.map +1 -1
- package/bin/tools/grant-exec-perms.js +66 -21
- package/bin/tools/grant-exec-perms.js.map +1 -1
- package/bin/tsconfig.tsbuildinfo +1 -1
- package/lib/tsconfig.tsbuildinfo +1 -1
- package/package.json +3 -3
- package/src/bin/download-builtin-keycloak-theme.ts +3 -3
- package/src/bin/keycloakify/generateKeycloakThemeResources.ts +4 -6
- package/src/bin/keycloakify/keycloakify.ts +2 -2
- package/src/bin/link_in_test_app.ts +2 -1
- package/src/bin/tools/downloadAndUnzip.ts +267 -58
- package/src/bin/tools/grant-exec-perms.ts +14 -7
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "keycloakify",
|
3
|
-
"version": "6.
|
3
|
+
"version": "6.10.1",
|
4
4
|
"description": "Keycloak theme generator for Reacts app",
|
5
5
|
"repository": {
|
6
6
|
"type": "git",
|
@@ -1294,11 +1294,11 @@
|
|
1294
1294
|
"minimal-polyfills": "^2.2.2",
|
1295
1295
|
"minimist": "^1.2.6",
|
1296
1296
|
"path-browserify": "^1.0.1",
|
1297
|
-
"powerhooks": "^0.22.
|
1297
|
+
"powerhooks": "^0.22.1",
|
1298
1298
|
"react-markdown": "^5.0.3",
|
1299
1299
|
"rfc4648": "^1.5.2",
|
1300
1300
|
"scripting-tools": "^0.19.13",
|
1301
|
-
"tsafe": "^1.4.
|
1301
|
+
"tsafe": "^1.4.2",
|
1302
1302
|
"tss-react": "4.4.1-rc.0",
|
1303
1303
|
"zod": "^3.17.10"
|
1304
1304
|
}
|
@@ -7,11 +7,11 @@ import { promptKeycloakVersion } from "./promptKeycloakVersion";
|
|
7
7
|
import { getCliOptions } from "./tools/cliOptions";
|
8
8
|
import { getLogger } from "./tools/logger";
|
9
9
|
|
10
|
-
export function downloadBuiltinKeycloakTheme(params: { keycloakVersion: string; destDirPath: string; isSilent: boolean }) {
|
10
|
+
export async function downloadBuiltinKeycloakTheme(params: { keycloakVersion: string; destDirPath: string; isSilent: boolean }) {
|
11
11
|
const { keycloakVersion, destDirPath, isSilent } = params;
|
12
12
|
|
13
13
|
for (const ext of ["", "-community"]) {
|
14
|
-
downloadAndUnzip({
|
14
|
+
await downloadAndUnzip({
|
15
15
|
"destDirPath": destDirPath,
|
16
16
|
"url": `https://github.com/keycloak/keycloak/archive/refs/tags/${keycloakVersion}.zip`,
|
17
17
|
"pathOfDirToExtractInArchive": `keycloak-${keycloakVersion}/themes/src/main/resources${ext}/theme`,
|
@@ -31,7 +31,7 @@ if (require.main === module) {
|
|
31
31
|
|
32
32
|
logger.log(`Downloading builtins theme of Keycloak ${keycloakVersion} here ${destDirPath}`);
|
33
33
|
|
34
|
-
downloadBuiltinKeycloakTheme({
|
34
|
+
await downloadBuiltinKeycloakTheme({
|
35
35
|
keycloakVersion,
|
36
36
|
destDirPath,
|
37
37
|
isSilent
|
@@ -5,7 +5,6 @@ import { replaceImportsFromStaticInJsCode } from "./replacers/replaceImportsFrom
|
|
5
5
|
import { replaceImportsInCssCode } from "./replacers/replaceImportsInCssCode";
|
6
6
|
import { generateFtlFilesCodeFactory, pageIds } from "./generateFtl";
|
7
7
|
import { downloadBuiltinKeycloakTheme } from "../download-builtin-keycloak-theme";
|
8
|
-
import * as child_process from "child_process";
|
9
8
|
import { mockTestingResourcesCommonPath, mockTestingResourcesPath, mockTestingSubDirOfPublicDirBasename } from "../mockTestingResourcesPath";
|
10
9
|
import { isInside } from "../tools/isInside";
|
11
10
|
import type { BuildOptions } from "./BuildOptions";
|
@@ -53,13 +52,13 @@ export namespace BuildOptionsLike {
|
|
53
52
|
assert<typeof buildOptions extends BuildOptionsLike ? true : false>();
|
54
53
|
}
|
55
54
|
|
56
|
-
export function generateKeycloakThemeResources(params: {
|
55
|
+
export async function generateKeycloakThemeResources(params: {
|
57
56
|
reactAppBuildDirPath: string;
|
58
57
|
keycloakThemeBuildingDirPath: string;
|
59
58
|
keycloakThemeEmailDirPath: string;
|
60
59
|
keycloakVersion: string;
|
61
60
|
buildOptions: BuildOptionsLike;
|
62
|
-
}): { doBundlesEmailTemplate: boolean } {
|
61
|
+
}): Promise<{ doBundlesEmailTemplate: boolean }> {
|
63
62
|
const { reactAppBuildDirPath, keycloakThemeBuildingDirPath, keycloakThemeEmailDirPath, keycloakVersion, buildOptions } = params;
|
64
63
|
|
65
64
|
const logger = getLogger({ isSilent: buildOptions.isSilent });
|
@@ -155,7 +154,7 @@ export function generateKeycloakThemeResources(params: {
|
|
155
154
|
{
|
156
155
|
const tmpDirPath = pathJoin(themeDirPath, "..", "tmp_xxKdLpdIdLd");
|
157
156
|
|
158
|
-
downloadBuiltinKeycloakTheme({
|
157
|
+
await downloadBuiltinKeycloakTheme({
|
159
158
|
keycloakVersion,
|
160
159
|
"destDirPath": tmpDirPath,
|
161
160
|
isSilent: buildOptions.isSilent
|
@@ -190,8 +189,7 @@ export function generateKeycloakThemeResources(params: {
|
|
190
189
|
);
|
191
190
|
|
192
191
|
fs.writeFileSync(pathJoin(keycloakResourcesWithinPublicDirPath, ".gitignore"), Buffer.from("*", "utf8"));
|
193
|
-
|
194
|
-
child_process.execSync(`rm -r ${tmpDirPath}`);
|
192
|
+
fs.rmSync(tmpDirPath, { recursive: true, force: true });
|
195
193
|
}
|
196
194
|
|
197
195
|
fs.writeFileSync(
|
@@ -13,7 +13,7 @@ const reactProjectDirPath = process.cwd();
|
|
13
13
|
export const keycloakThemeBuildingDirPath = pathJoin(reactProjectDirPath, "build_keycloak");
|
14
14
|
export const keycloakThemeEmailDirPath = pathJoin(keycloakThemeBuildingDirPath, "..", "keycloak_email");
|
15
15
|
|
16
|
-
export function main() {
|
16
|
+
export async function main() {
|
17
17
|
const { isSilent, hasExternalAssets } = getCliOptions(process.argv.slice(2));
|
18
18
|
const logger = getLogger({ isSilent });
|
19
19
|
logger.log("🔏 Building the keycloak theme...⌚");
|
@@ -33,7 +33,7 @@ export function main() {
|
|
33
33
|
"isSilent": isSilent
|
34
34
|
});
|
35
35
|
|
36
|
-
const { doBundlesEmailTemplate } = generateKeycloakThemeResources({
|
36
|
+
const { doBundlesEmailTemplate } = await generateKeycloakThemeResources({
|
37
37
|
keycloakThemeBuildingDirPath,
|
38
38
|
keycloakThemeEmailDirPath,
|
39
39
|
"reactAppBuildDirPath": pathJoin(reactProjectDirPath, "build"),
|
@@ -15,7 +15,8 @@ fs.writeFileSync(
|
|
15
15
|
return {
|
16
16
|
...packageJsonParsed,
|
17
17
|
"main": packageJsonParsed["main"].replace(/^dist\//, ""),
|
18
|
-
"types": packageJsonParsed["types"].replace(/^dist\//, "")
|
18
|
+
"types": packageJsonParsed["types"].replace(/^dist\//, ""),
|
19
|
+
"bin": Object.fromEntries(Object.entries<string>(packageJsonParsed["bin"]).map(([k, v]) => [k, v.replace(/^dist\//, "")]))
|
19
20
|
};
|
20
21
|
})(),
|
21
22
|
null,
|
@@ -1,80 +1,289 @@
|
|
1
|
-
import { basename as pathBasename, join as pathJoin } from "path";
|
2
|
-
import {
|
3
|
-
import
|
1
|
+
import { dirname as pathDirname, basename as pathBasename, join as pathJoin } from "path";
|
2
|
+
import { createReadStream, createWriteStream, unlinkSync } from "fs";
|
3
|
+
import { stat, mkdir, unlink, readFile, writeFile } from "fs/promises";
|
4
4
|
import { transformCodebase } from "./transformCodebase";
|
5
|
-
import
|
5
|
+
import { createHash } from "crypto";
|
6
|
+
import http from "http";
|
7
|
+
import https from "https";
|
8
|
+
import { createInflateRaw } from "zlib";
|
6
9
|
|
7
|
-
|
8
|
-
export function downloadAndUnzip(params: {
|
9
|
-
isSilent: boolean;
|
10
|
-
url: string;
|
11
|
-
destDirPath: string;
|
12
|
-
pathOfDirToExtractInArchive?: string;
|
13
|
-
cacheDirPath: string;
|
14
|
-
}) {
|
15
|
-
const { url, destDirPath, pathOfDirToExtractInArchive, cacheDirPath } = params;
|
16
|
-
|
17
|
-
const extractDirPath = pathJoin(
|
18
|
-
cacheDirPath,
|
19
|
-
`_${crypto.createHash("sha256").update(JSON.stringify({ url, pathOfDirToExtractInArchive })).digest("hex").substring(0, 15)}`
|
20
|
-
);
|
10
|
+
import type { Readable } from "stream";
|
21
11
|
|
22
|
-
|
12
|
+
function hash(s: string) {
|
13
|
+
return createHash("sha256").update(s).digest("hex");
|
14
|
+
}
|
23
15
|
|
24
|
-
|
25
|
-
|
16
|
+
async function maybeReadFile(path: string) {
|
17
|
+
try {
|
18
|
+
return await readFile(path, "utf-8");
|
19
|
+
} catch (error) {
|
20
|
+
if ((error as Error & { code: string }).code === "ENOENT") return undefined;
|
21
|
+
throw error;
|
22
|
+
}
|
23
|
+
}
|
26
24
|
|
27
|
-
|
25
|
+
async function maybeStat(path: string) {
|
26
|
+
try {
|
27
|
+
return await stat(path);
|
28
|
+
} catch (error) {
|
29
|
+
if ((error as Error & { code: string }).code === "ENOENT") return undefined;
|
30
|
+
throw error;
|
31
|
+
}
|
32
|
+
}
|
28
33
|
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
34
|
+
/**
|
35
|
+
* Download a file from `url` to `dir`. Will try to avoid downloading existing
|
36
|
+
* files by using an `{hash(url)}.etag` file. If this file exists, we add an
|
37
|
+
* etag headear, so server can tell us if file changed and we should re-download
|
38
|
+
* or if our file is up-to-date.
|
39
|
+
*
|
40
|
+
* Warning, this method assumes that the target filename can be extracted from
|
41
|
+
* url, content-disposition headers are ignored.
|
42
|
+
*
|
43
|
+
* If the target directory does not exist, it will be created.
|
44
|
+
*
|
45
|
+
* If the target file exists and is out of date, it will be overwritten.
|
46
|
+
* If the target file exists and there is no etag file, the target file will
|
47
|
+
* be overwritten.
|
48
|
+
*
|
49
|
+
* @param url download url
|
50
|
+
* @param dir target directory
|
51
|
+
* @returns promise for the full path of the downloaded file
|
52
|
+
*/
|
53
|
+
async function download(url: string, dir: string): Promise<string> {
|
54
|
+
await mkdir(dir, { recursive: true });
|
55
|
+
const filename = pathBasename(url);
|
56
|
+
const filepath = pathJoin(dir, filename);
|
57
|
+
// If downloaded file exists already and has an `.etag` companion file,
|
58
|
+
// read the etag from that file. This will avoid re-downloading the file
|
59
|
+
// if it is up to date.
|
60
|
+
const exists = await maybeStat(filepath);
|
61
|
+
const etagFilepath = pathJoin(dir, "_" + hash(url).substring(0, 15) + ".etag");
|
62
|
+
const etag = !exists ? undefined : await maybeReadFile(etagFilepath);
|
33
63
|
|
34
|
-
|
64
|
+
return new Promise((resolve, reject) => {
|
65
|
+
// use inner method to allow following redirects
|
66
|
+
function request(url1: URL) {
|
67
|
+
const headers: Record<string, string> = {};
|
68
|
+
if (etag) headers["If-None-Match"] = etag;
|
69
|
+
(url1.protocol === "https:" ? https : http).get(url1, { headers }, response => {
|
70
|
+
if (response.statusCode === 301 || response.statusCode === 302) {
|
71
|
+
// follow redirects
|
72
|
+
request(new URL(response.headers.location!!));
|
73
|
+
} else if (response.statusCode === 304) {
|
74
|
+
// up-to-date, resolve now
|
75
|
+
resolve(filepath);
|
76
|
+
} else if (response.statusCode !== 200) {
|
77
|
+
reject(new Error(`Request to ${url1} returned status ${response.statusCode}.`));
|
78
|
+
} else {
|
79
|
+
const fp = createWriteStream(filepath, { autoClose: true });
|
80
|
+
fp.on("err", e => {
|
81
|
+
fp.close();
|
82
|
+
unlinkSync(filepath);
|
83
|
+
reject(e);
|
84
|
+
});
|
85
|
+
fp.on("finish", async () => {
|
86
|
+
// when targetfile has been written, write etag file so that
|
87
|
+
// next time around we don't need to re-download
|
88
|
+
const responseEtag = response.headers.etag;
|
89
|
+
if (responseEtag) await writeFile(etagFilepath, responseEtag, "utf-8");
|
90
|
+
resolve(filepath);
|
91
|
+
});
|
92
|
+
response.pipe(fp);
|
93
|
+
}
|
94
|
+
});
|
35
95
|
}
|
96
|
+
request(new URL(url));
|
97
|
+
});
|
98
|
+
}
|
36
99
|
|
37
|
-
|
38
|
-
|
39
|
-
|
100
|
+
/**
|
101
|
+
* @typedef
|
102
|
+
* @type MultiError = Error & { cause: Error[] }
|
103
|
+
*/
|
40
104
|
|
41
|
-
|
42
|
-
|
105
|
+
/**
|
106
|
+
* Extract the archive `zipFile` into the directory `dir`. If `archiveDir` is given,
|
107
|
+
* only that directory will be extracted, stripping the given path components.
|
108
|
+
*
|
109
|
+
* If dir does not exist, it will be created.
|
110
|
+
*
|
111
|
+
* If any archive file exists, it will be overwritten.
|
112
|
+
*
|
113
|
+
* Will unzip using all available nodejs worker threads.
|
114
|
+
*
|
115
|
+
* Will try to clean up extracted files on failure.
|
116
|
+
*
|
117
|
+
* If unpacking fails, will either throw an regular error, or
|
118
|
+
* possibly an `MultiError`, which contains a `cause` field with
|
119
|
+
* a number of root cause errors.
|
120
|
+
*
|
121
|
+
* Warning this method is not optimized for continuous reading of the zip
|
122
|
+
* archive, but is a trade-off between simplicity and allowing extraction
|
123
|
+
* of a single directory from the archive.
|
124
|
+
*
|
125
|
+
* @param zipFile the file to unzip
|
126
|
+
* @param dir the target directory
|
127
|
+
* @param archiveDir if given, unpack only files from this archive directory
|
128
|
+
* @throws {MultiError} error
|
129
|
+
* @returns Promise for a list of full file paths pointing to actually extracted files
|
130
|
+
*/
|
131
|
+
async function unzip(zipFile: string, dir: string, archiveDir?: string): Promise<string[]> {
|
132
|
+
await mkdir(dir, { recursive: true });
|
133
|
+
const promises: Promise<string>[] = [];
|
43
134
|
|
44
|
-
|
45
|
-
|
135
|
+
// Iterate over all files in the zip, skip files which are not in archiveDir,
|
136
|
+
// if given.
|
137
|
+
for await (const record of iterateZipArchive(zipFile)) {
|
138
|
+
const { path: recordPath, createReadStream: createRecordReadStream } = record;
|
139
|
+
const filePath = pathJoin(dir, recordPath);
|
140
|
+
const parent = pathDirname(filePath);
|
141
|
+
if (archiveDir && !recordPath.startsWith(archiveDir)) continue;
|
142
|
+
promises.push(
|
143
|
+
new Promise<string>(async (resolve, reject) => {
|
144
|
+
await mkdir(parent, { recursive: true });
|
145
|
+
// Pull the file out of the archive, write it to the target directory
|
146
|
+
const input = createRecordReadStream();
|
147
|
+
const output = createWriteStream(filePath);
|
148
|
+
output.on("error", e => reject(Object.assign(e, { filePath })));
|
149
|
+
output.on("finish", () => resolve(filePath));
|
150
|
+
input.pipe(output);
|
151
|
+
})
|
152
|
+
);
|
153
|
+
}
|
46
154
|
|
47
|
-
|
48
|
-
|
49
|
-
|
155
|
+
// Wait until _all_ files are either extracted or failed
|
156
|
+
const results = await Promise.allSettled(promises);
|
157
|
+
const success = results.filter(r => r.status === "fulfilled").map(r => (r as PromiseFulfilledResult<string>).value);
|
158
|
+
const failure = results.filter(r => r.status === "rejected").map(r => (r as PromiseRejectedResult).reason);
|
50
159
|
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
160
|
+
// If any extraction failed, try to clean up, then throw a MultiError,
|
161
|
+
// which has a `cause` field, containing a list of root cause errors.
|
162
|
+
if (failure.length) {
|
163
|
+
await Promise.all(success.map(path => unlink(path)));
|
164
|
+
await Promise.all(failure.map(e => e && e.path && unlink(e.path as string)));
|
165
|
+
const e = new Error("Failed to extract: " + failure.map(e => e.message).join(";"));
|
166
|
+
(e as any).cause = failure;
|
167
|
+
throw e;
|
168
|
+
}
|
55
169
|
|
56
|
-
|
170
|
+
return success;
|
171
|
+
}
|
57
172
|
|
58
|
-
|
173
|
+
/**
|
174
|
+
*
|
175
|
+
* @param file file to read
|
176
|
+
* @param start first byte to read
|
177
|
+
* @param end last byte to read
|
178
|
+
* @returns Promise of a buffer of read bytes
|
179
|
+
*/
|
180
|
+
async function readFileChunk(file: string, start: number, end: number): Promise<Buffer> {
|
181
|
+
const chunks: Buffer[] = [];
|
182
|
+
return new Promise((resolve, reject) => {
|
183
|
+
const stream = createReadStream(file, { start, end });
|
184
|
+
stream.on("error", e => reject(e));
|
185
|
+
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
186
|
+
stream.on("data", chunk => chunks.push(chunk as Buffer));
|
187
|
+
});
|
188
|
+
}
|
59
189
|
|
60
|
-
|
190
|
+
type ZipRecord = {
|
191
|
+
path: string;
|
192
|
+
createReadStream: () => Readable;
|
193
|
+
compressionMethod: "deflate" | undefined;
|
194
|
+
};
|
61
195
|
|
62
|
-
|
196
|
+
type ZipRecordGenerator = AsyncGenerator<ZipRecord, void, unknown>;
|
63
197
|
|
64
|
-
|
65
|
-
|
66
|
-
|
198
|
+
/**
|
199
|
+
* Iterate over all records of a zipfile, and yield a ZipRecord.
|
200
|
+
* Use `record.createReadStream()` to actually read the file.
|
201
|
+
*
|
202
|
+
* Warning this method will only work with single-disk zip files.
|
203
|
+
* Warning this method may fail if the zip archive has an crazy amount
|
204
|
+
* of files and the central directory is not fully contained within the
|
205
|
+
* last 65k bytes of the zip file.
|
206
|
+
*
|
207
|
+
* @param zipFile
|
208
|
+
* @returns AsyncGenerator which will yield ZipRecords
|
209
|
+
*/
|
210
|
+
async function* iterateZipArchive(zipFile: string): ZipRecordGenerator {
|
211
|
+
// Need to know zip file size before we can do anything else
|
212
|
+
const { size } = await stat(zipFile);
|
213
|
+
const chunkSize = 65_535 + 22 + 1; // max comment size + end header size + wiggle
|
214
|
+
// Read last ~65k bytes. Zip files have an comment up to 65_535 bytes at the very end,
|
215
|
+
// before that comes the zip central directory end header.
|
216
|
+
let chunk = await readFileChunk(zipFile, size - chunkSize, size);
|
217
|
+
const unread = size - chunk.length;
|
218
|
+
let i = chunk.length - 4;
|
219
|
+
let found = false;
|
220
|
+
// Find central directory end header, reading backwards from the end
|
221
|
+
while (!found && i-- > 0) if (chunk[i] === 0x50 && chunk.readUInt32LE(i) === 0x06054b50) found = true;
|
222
|
+
if (!found) throw new Error("Not a zip file");
|
223
|
+
// This method will fail on a multi-disk zip, so bail early.
|
224
|
+
if (chunk.readUInt16LE(i + 4) !== 0) throw new Error("Multi-disk zip not supported");
|
225
|
+
let nFiles = chunk.readUint16LE(i + 10);
|
226
|
+
// Get the position of the central directory
|
227
|
+
const directorySize = chunk.readUint32LE(i + 12);
|
228
|
+
const directoryOffset = chunk.readUint32LE(i + 16);
|
229
|
+
if (directoryOffset === 0xffff_ffff) throw new Error("zip64 not supported");
|
230
|
+
if (directoryOffset > size) throw new Error(`Central directory offset ${directoryOffset} is outside file`);
|
231
|
+
i = directoryOffset - unread;
|
232
|
+
// If i < 0, it means that the central directory is not contained within `chunk`
|
233
|
+
if (i < 0) {
|
234
|
+
chunk = await readFileChunk(zipFile, directoryOffset, directoryOffset + directorySize);
|
235
|
+
i = 0;
|
236
|
+
}
|
237
|
+
// Now iterate the central directory records, yield an `ZipRecord` for every entry
|
238
|
+
while (nFiles-- > 0) {
|
239
|
+
// Check for marker bytes
|
240
|
+
if (chunk.readUInt32LE(i) !== 0x02014b50) throw new Error("No central directory record at position " + (unread + i));
|
241
|
+
const compressionMethod = ({ 8: "deflate" } as const)[chunk.readUint16LE(i + 10)];
|
242
|
+
const compressedFileSize = chunk.readUint32LE(i + 20);
|
243
|
+
const filenameLength = chunk.readUint16LE(i + 28);
|
244
|
+
const extraLength = chunk.readUint16LE(i + 30);
|
245
|
+
const commentLength = chunk.readUint16LE(i + 32);
|
246
|
+
// Start of thea actual content byte stream is after the 'local' record header,
|
247
|
+
// which is 30 bytes long plus filename and extra field
|
248
|
+
const start = chunk.readUint32LE(i + 42) + 30 + filenameLength + extraLength;
|
249
|
+
const end = start + compressedFileSize;
|
250
|
+
const filename = chunk.slice(i + 46, i + 46 + filenameLength).toString("utf-8");
|
251
|
+
const createRecordReadStream = () => {
|
252
|
+
const input = createReadStream(zipFile, { start, end });
|
253
|
+
if (compressionMethod === "deflate") {
|
254
|
+
const inflate = createInflateRaw();
|
255
|
+
input.pipe(inflate);
|
256
|
+
return inflate;
|
257
|
+
}
|
258
|
+
return input;
|
259
|
+
};
|
260
|
+
if (end > start) yield { path: filename, createReadStream: createRecordReadStream, compressionMethod };
|
261
|
+
// advance pointer to next central directory entry
|
262
|
+
i += 46 + filenameLength + extraLength + commentLength;
|
263
|
+
}
|
264
|
+
}
|
67
265
|
|
68
|
-
|
266
|
+
export async function downloadAndUnzip({
|
267
|
+
url,
|
268
|
+
destDirPath,
|
269
|
+
pathOfDirToExtractInArchive,
|
270
|
+
cacheDirPath
|
271
|
+
}: {
|
272
|
+
isSilent: boolean;
|
273
|
+
url: string;
|
274
|
+
destDirPath: string;
|
275
|
+
pathOfDirToExtractInArchive?: string;
|
276
|
+
cacheDirPath: string;
|
277
|
+
}) {
|
278
|
+
const downloadHash = hash(JSON.stringify({ url, pathOfDirToExtractInArchive })).substring(0, 15);
|
279
|
+
const extractDirPath = pathJoin(cacheDirPath, `_${downloadHash}`);
|
69
280
|
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
});
|
74
|
-
}
|
281
|
+
const zipFilepath = await download(url, cacheDirPath);
|
282
|
+
const zipMtime = (await stat(zipFilepath)).mtimeMs;
|
283
|
+
const unzipMtime = (await maybeStat(extractDirPath))?.mtimeMs;
|
75
284
|
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
});
|
285
|
+
if (!unzipMtime || zipMtime > unzipMtime) await unzip(zipFilepath, extractDirPath, pathOfDirToExtractInArchive);
|
286
|
+
|
287
|
+
const srcDirPath = pathOfDirToExtractInArchive === undefined ? extractDirPath : pathJoin(extractDirPath, pathOfDirToExtractInArchive);
|
288
|
+
transformCodebase({ srcDirPath, destDirPath });
|
80
289
|
}
|
@@ -1,10 +1,17 @@
|
|
1
1
|
import { getProjectRoot } from "./getProjectRoot";
|
2
2
|
import { join as pathJoin } from "path";
|
3
|
-
import
|
4
|
-
import
|
3
|
+
import { constants } from "fs";
|
4
|
+
import { chmod, stat } from "fs/promises";
|
5
5
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
);
|
6
|
+
async () => {
|
7
|
+
var { bin } = await import(pathJoin(getProjectRoot(), "package.json"));
|
8
|
+
|
9
|
+
var promises = Object.values<string>(bin).map(async scriptPath => {
|
10
|
+
const fullPath = pathJoin(getProjectRoot(), scriptPath);
|
11
|
+
const oldMode = (await stat(fullPath)).mode;
|
12
|
+
const newMode = oldMode | constants.S_IXUSR | constants.S_IXGRP | constants.S_IXOTH;
|
13
|
+
await chmod(fullPath, newMode);
|
14
|
+
});
|
15
|
+
|
16
|
+
await Promise.all(promises);
|
17
|
+
};
|