keycloakify 6.12.8 → 6.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "keycloakify",
3
- "version": "6.12.8",
4
- "description": "Keycloak theme generator for Reacts app",
3
+ "version": "6.13.0",
4
+ "description": "Create Keycloak themes using React",
5
5
  "repository": {
6
6
  "type": "git",
7
7
  "url": "git://github.com/garronej/keycloakify.git"
@@ -1303,7 +1303,7 @@
1303
1303
  },
1304
1304
  "devDependencies": {
1305
1305
  "@babel/core": "^7.0.0",
1306
- "@types/memoizee": "^0.4.7",
1306
+ "@types/make-fetch-happen": "^10.0.1",
1307
1307
  "@types/minimist": "^1.2.2",
1308
1308
  "@types/node": "^18.14.1",
1309
1309
  "@types/react": "18.0.9",
@@ -1314,15 +1314,16 @@
1314
1314
  "properties-parser": "^0.3.1",
1315
1315
  "react": "18.1.0",
1316
1316
  "rimraf": "^3.0.2",
1317
- "typescript": "^4.9.5",
1317
+ "scripting-tools": "^0.19.13",
1318
1318
  "ts-node": "^10.9.1",
1319
- "scripting-tools": "^0.19.13"
1319
+ "typescript": "^4.9.5"
1320
1320
  },
1321
1321
  "dependencies": {
1322
1322
  "@octokit/rest": "^18.12.0",
1323
1323
  "cheerio": "^1.0.0-rc.5",
1324
1324
  "cli-select": "^1.1.2",
1325
1325
  "evt": "^2.4.15",
1326
+ "make-fetch-happen": "^11.0.3",
1326
1327
  "minimal-polyfills": "^2.2.2",
1327
1328
  "minimist": "^1.2.6",
1328
1329
  "path-browserify": "^1.0.1",
@@ -1,27 +1,22 @@
1
- import { dirname as pathDirname, basename as pathBasename, join as pathJoin } from "path";
2
- import { createReadStream, createWriteStream, unlinkSync } from "fs";
3
- import { stat, mkdir, unlink, readFile, writeFile } from "fs/promises";
1
+ import { dirname as pathDirname, basename as pathBasename, join as pathJoin, join } from "path";
2
+ import { createReadStream, createWriteStream } from "fs";
3
+ import { stat, mkdir, unlink, writeFile } from "fs/promises";
4
4
  import { transformCodebase } from "./transformCodebase";
5
5
  import { createHash } from "crypto";
6
- import http from "http";
7
- import https from "https";
6
+ import fetch from "make-fetch-happen";
8
7
  import { createInflateRaw } from "zlib";
9
-
10
8
  import type { Readable } from "stream";
9
+ import { homedir } from "os";
10
+ import { FetchOptions } from "make-fetch-happen";
11
+ import { exec as execCallback } from "child_process";
12
+ import { promisify } from "util";
13
+
14
+ const exec = promisify(execCallback);
11
15
 
12
16
  function hash(s: string) {
13
17
  return createHash("sha256").update(s).digest("hex");
14
18
  }
15
19
 
16
- async function maybeReadFile(path: string) {
17
- try {
18
- return await readFile(path, "utf-8");
19
- } catch (error) {
20
- if ((error as Error & { code: string }).code === "ENOENT") return undefined;
21
- throw error;
22
- }
23
- }
24
-
25
20
  async function maybeStat(path: string) {
26
21
  try {
27
22
  return await stat(path);
@@ -32,69 +27,59 @@ async function maybeStat(path: string) {
32
27
  }
33
28
 
34
29
  /**
35
- * Download a file from `url` to `dir`. Will try to avoid downloading existing
36
- * files by using an `{hash(url)}.etag` file. If this file exists, we add an
37
- * etag headear, so server can tell us if file changed and we should re-download
38
- * or if our file is up-to-date.
30
+ * Get an npm configuration value as string, undefined if not set.
39
31
  *
40
- * Warning, this method assumes that the target filename can be extracted from
41
- * url, content-disposition headers are ignored.
32
+ * @param key
33
+ * @returns string or undefined
34
+ */
35
+ async function getNmpConfig(key: string): Promise<string | undefined> {
36
+ const { stdout } = await exec(`npm config get ${key}`);
37
+ const value = stdout.trim();
38
+ return value && value !== "null" ? value : undefined;
39
+ }
40
+
41
+ /**
42
+ * Get proxy configuration from npm config files. Note that we don't care about
43
+ * proxy config in env vars, because make-fetch-happen will do that for us.
44
+ *
45
+ * @returns proxy configuration
46
+ */
47
+ async function getNpmProxyConfig(): Promise<Pick<FetchOptions, "proxy" | "noProxy">> {
48
+ const proxy = (await getNmpConfig("https-proxy")) ?? (await getNmpConfig("proxy"));
49
+ const noProxy = (await getNmpConfig("noproxy")) ?? (await getNmpConfig("no-proxy"));
50
+
51
+ return { proxy, noProxy };
52
+ }
53
+
54
+ /**
55
+ * Download a file from `url` to `dir`. Will try to avoid downloading existing
56
+ * files by using the cache directory ~/.keycloakify/cache
42
57
  *
43
58
  * If the target directory does not exist, it will be created.
44
59
  *
45
- * If the target file exists and is out of date, it will be overwritten.
46
- * If the target file exists and there is no etag file, the target file will
47
- * be overwritten.
60
+ * If the target file exists, it will be overwritten.
61
+ *
62
+ * We use make-fetch-happen's internal file cache here, so we don't need to
63
+ * worry about redownloading the same file over and over. Unfortunately, that
64
+ * cache does not have a single file per entry, but bundles and indexes them,
65
+ * so we still need to write the contents to the target directory (possibly
66
+ * over and over), cause the current unzip implementation wants random access.
48
67
  *
49
68
  * @param url download url
50
69
  * @param dir target directory
70
+ * @param filename target filename
51
71
  * @returns promise for the full path of the downloaded file
52
72
  */
53
- async function download(url: string, dir: string): Promise<string> {
54
- await mkdir(dir, { recursive: true });
55
- const filename = pathBasename(url);
73
+ async function download(url: string, dir: string, filename: string): Promise<string> {
74
+ const proxyOpts = await getNpmProxyConfig();
75
+ const cacheRoot = process.env.XDG_CACHE_HOME ?? homedir();
76
+ const cachePath = join(cacheRoot, ".keycloakify/cache");
77
+ const opts: FetchOptions = { cachePath, ...proxyOpts };
78
+ const response = await fetch(url, opts);
56
79
  const filepath = pathJoin(dir, filename);
57
- // If downloaded file exists already and has an `.etag` companion file,
58
- // read the etag from that file. This will avoid re-downloading the file
59
- // if it is up to date.
60
- const exists = await maybeStat(filepath);
61
- const etagFilepath = pathJoin(dir, "_" + hash(url).substring(0, 15) + ".etag");
62
- const etag = !exists ? undefined : await maybeReadFile(etagFilepath);
63
-
64
- return new Promise((resolve, reject) => {
65
- // use inner method to allow following redirects
66
- function request(url1: URL) {
67
- const headers: Record<string, string> = {};
68
- if (etag) headers["If-None-Match"] = etag;
69
- (url1.protocol === "https:" ? https : http).get(url1, { headers }, response => {
70
- if (response.statusCode === 301 || response.statusCode === 302) {
71
- // follow redirects
72
- request(new URL(response.headers.location!!));
73
- } else if (response.statusCode === 304) {
74
- // up-to-date, resolve now
75
- resolve(filepath);
76
- } else if (response.statusCode !== 200) {
77
- reject(new Error(`Request to ${url1} returned status ${response.statusCode}.`));
78
- } else {
79
- const fp = createWriteStream(filepath, { autoClose: true });
80
- fp.on("err", e => {
81
- fp.close();
82
- unlinkSync(filepath);
83
- reject(e);
84
- });
85
- fp.on("finish", async () => {
86
- // when targetfile has been written, write etag file so that
87
- // next time around we don't need to re-download
88
- const responseEtag = response.headers.etag;
89
- if (responseEtag) await writeFile(etagFilepath, responseEtag, "utf-8");
90
- resolve(filepath);
91
- });
92
- response.pipe(fp);
93
- }
94
- });
95
- }
96
- request(new URL(url));
97
- });
80
+ await mkdir(dir, { recursive: true });
81
+ await writeFile(filepath, response.body);
82
+ return filepath;
98
83
  }
99
84
 
100
85
  /**
@@ -278,7 +263,8 @@ export async function downloadAndUnzip({
278
263
  const downloadHash = hash(JSON.stringify({ url, pathOfDirToExtractInArchive })).substring(0, 15);
279
264
  const extractDirPath = pathJoin(cacheDirPath, `_${downloadHash}`);
280
265
 
281
- const zipFilepath = await download(url, cacheDirPath);
266
+ const filename = pathBasename(url);
267
+ const zipFilepath = await download(url, cacheDirPath, filename);
282
268
  const zipMtime = (await stat(zipFilepath)).mtimeMs;
283
269
  const unzipMtime = (await maybeStat(extractDirPath))?.mtimeMs;
284
270