electron-incremental-update 0.5.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -7
- package/dist/chunk-VADH6AZA.mjs +51 -0
- package/dist/index.cjs +129 -104
- package/dist/index.d.ts +26 -15
- package/dist/index.mjs +118 -94
- package/dist/vite.cjs +72 -52
- package/dist/vite.d.ts +24 -2
- package/dist/vite.mjs +63 -34
- package/package.json +3 -2
- package/dist/chunk-SSJ6PDMK.mjs +0 -61
package/dist/index.mjs
CHANGED
|
@@ -1,24 +1,33 @@
|
|
|
1
1
|
import {
|
|
2
2
|
__require,
|
|
3
3
|
verify
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-VADH6AZA.mjs";
|
|
5
5
|
|
|
6
6
|
// src/index.ts
|
|
7
|
-
import { resolve } from "node:path";
|
|
7
|
+
import { resolve as resolve2 } from "node:path";
|
|
8
8
|
import { app as app3 } from "electron";
|
|
9
9
|
|
|
10
10
|
// src/updater/index.ts
|
|
11
11
|
import { EventEmitter } from "node:events";
|
|
12
|
+
import { Buffer as Buffer2 } from "node:buffer";
|
|
12
13
|
import { createGunzip } from "node:zlib";
|
|
13
|
-
import { createReadStream, createWriteStream, existsSync } from "node:fs";
|
|
14
|
-
import { rm, writeFile } from "node:fs/promises";
|
|
14
|
+
import { createReadStream, createWriteStream, existsSync, rmSync } from "node:fs";
|
|
15
|
+
import { readFile, rename, rm, writeFile } from "node:fs/promises";
|
|
16
|
+
import { resolve } from "node:path";
|
|
15
17
|
import { app as app2 } from "electron";
|
|
16
18
|
|
|
17
19
|
// src/updater/defaultFunctions.ts
|
|
18
20
|
import { Buffer } from "node:buffer";
|
|
19
21
|
import https from "node:https";
|
|
22
|
+
|
|
23
|
+
// src/updater/types.ts
|
|
24
|
+
function isUpdateJSON(json) {
|
|
25
|
+
return "signature" in json && "version" in json && "size" in json;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// src/updater/defaultFunctions.ts
|
|
20
29
|
function downloadJSONDefault(url, updater, headers) {
|
|
21
|
-
return new Promise((
|
|
30
|
+
return new Promise((resolve3, reject) => {
|
|
22
31
|
https.get(url, (res) => {
|
|
23
32
|
let data = "";
|
|
24
33
|
res.setEncoding("utf8");
|
|
@@ -27,8 +36,8 @@ function downloadJSONDefault(url, updater, headers) {
|
|
|
27
36
|
res.on("end", () => {
|
|
28
37
|
try {
|
|
29
38
|
const json = JSON.parse(data);
|
|
30
|
-
if (
|
|
31
|
-
|
|
39
|
+
if (isUpdateJSON(json)) {
|
|
40
|
+
resolve3(json);
|
|
32
41
|
} else {
|
|
33
42
|
throw Error;
|
|
34
43
|
}
|
|
@@ -43,7 +52,7 @@ function downloadJSONDefault(url, updater, headers) {
|
|
|
43
52
|
}
|
|
44
53
|
function downloadBufferDefault(url, updater, headers) {
|
|
45
54
|
let progress = 0;
|
|
46
|
-
return new Promise((
|
|
55
|
+
return new Promise((resolve3, reject) => {
|
|
47
56
|
https.get(url, (res) => {
|
|
48
57
|
let data = [];
|
|
49
58
|
res.headers = headers;
|
|
@@ -53,7 +62,7 @@ function downloadBufferDefault(url, updater, headers) {
|
|
|
53
62
|
data.push(chunk);
|
|
54
63
|
});
|
|
55
64
|
res.on("end", () => {
|
|
56
|
-
|
|
65
|
+
resolve3(Buffer.concat(data));
|
|
57
66
|
});
|
|
58
67
|
}).on("error", (e) => {
|
|
59
68
|
reject(e);
|
|
@@ -87,14 +96,14 @@ function compareVersionDefault(oldVersion, newVersion) {
|
|
|
87
96
|
import { readFileSync } from "node:fs";
|
|
88
97
|
import { dirname, join } from "node:path";
|
|
89
98
|
import { app } from "electron";
|
|
90
|
-
function
|
|
99
|
+
function getProductAsarPath(name) {
|
|
91
100
|
return app.isPackaged ? join(dirname(app.getAppPath()), `${name}.asar`) : "dev";
|
|
92
101
|
}
|
|
93
102
|
function getEntryVersion() {
|
|
94
103
|
return app.getVersion();
|
|
95
104
|
}
|
|
96
|
-
function
|
|
97
|
-
return app.isPackaged ? readFileSync(join(
|
|
105
|
+
function getProductVersion(name) {
|
|
106
|
+
return app.isPackaged ? readFileSync(join(getProductAsarPath(name), "version"), "utf-8") : getEntryVersion();
|
|
98
107
|
}
|
|
99
108
|
function requireNative(packageName) {
|
|
100
109
|
const path = app.isPackaged ? join(app.getAppPath(), "node_modules", packageName) : packageName;
|
|
@@ -134,7 +143,7 @@ function restartApp() {
|
|
|
134
143
|
|
|
135
144
|
// src/updater/index.ts
|
|
136
145
|
function createUpdater({
|
|
137
|
-
|
|
146
|
+
SIGNATURE_CERT,
|
|
138
147
|
repository,
|
|
139
148
|
productName,
|
|
140
149
|
releaseAsarURL: _release,
|
|
@@ -145,94 +154,103 @@ function createUpdater({
|
|
|
145
154
|
}) {
|
|
146
155
|
const updater = new EventEmitter();
|
|
147
156
|
let signature = "";
|
|
148
|
-
|
|
149
|
-
const gzipPath =
|
|
150
|
-
const
|
|
157
|
+
const asarPath = getProductAsarPath(productName);
|
|
158
|
+
const gzipPath = `${asarPath}.gz`;
|
|
159
|
+
const tmpFilePath = gzipPath.replace(".asar.gz", ".tmp.asar");
|
|
151
160
|
const { downloadBuffer, downloadJSON, extraHeader, userAgent } = downloadConfig || {};
|
|
152
161
|
function log(msg) {
|
|
153
162
|
debug && updater.emit("debug", msg);
|
|
154
163
|
}
|
|
155
|
-
async function
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
Accept: `application/${format === "json" ? "json" : "octet-stream"}`,
|
|
159
|
-
UserAgent: ua,
|
|
160
|
-
...extraHeader
|
|
161
|
-
};
|
|
162
|
-
log(`download headers: ${JSON.stringify(headers, null, 2)}`);
|
|
163
|
-
const downloadFn = format === "json" ? downloadJSON ?? downloadJSONDefault : downloadBuffer ?? downloadBufferDefault;
|
|
164
|
-
log(`download ${format} from ${url}`);
|
|
165
|
-
const ret = await downloadFn(url, updater, headers);
|
|
166
|
-
log(`download ${format} success`);
|
|
167
|
-
return ret;
|
|
168
|
-
}
|
|
169
|
-
async function extractFile(gzipFilePath) {
|
|
170
|
-
if (!gzipFilePath.endsWith(".asar.gz") || !existsSync(gzipFilePath)) {
|
|
171
|
-
log("update .asar.gz file not exist");
|
|
172
|
-
return;
|
|
164
|
+
async function extractFile() {
|
|
165
|
+
if (!gzipPath.endsWith(".asar.gz") || !existsSync(gzipPath)) {
|
|
166
|
+
throw new Error(".asar.gz file not exist");
|
|
173
167
|
}
|
|
174
|
-
|
|
175
|
-
return new Promise((resolve2, reject) => {
|
|
168
|
+
return new Promise((resolve3, reject) => {
|
|
176
169
|
const gunzip = createGunzip();
|
|
177
|
-
const input = createReadStream(
|
|
178
|
-
const
|
|
179
|
-
|
|
180
|
-
log(`outputFilePath: ${outputFilePath}`);
|
|
170
|
+
const input = createReadStream(gzipPath);
|
|
171
|
+
const output = createWriteStream(tmpFilePath);
|
|
172
|
+
log(`outputFilePath: ${tmpFilePath}`);
|
|
181
173
|
input.pipe(gunzip).pipe(output).on("finish", async () => {
|
|
182
|
-
await rm(
|
|
183
|
-
log(`${
|
|
184
|
-
|
|
174
|
+
await rm(gzipPath);
|
|
175
|
+
log(`${gzipPath} unzipped`);
|
|
176
|
+
resolve3(null);
|
|
185
177
|
}).on("error", async (err) => {
|
|
186
|
-
await rm(
|
|
178
|
+
await rm(gzipPath);
|
|
187
179
|
output.destroy(err);
|
|
188
180
|
reject(err);
|
|
189
181
|
});
|
|
190
182
|
});
|
|
191
183
|
}
|
|
192
|
-
function needUpdate(
|
|
184
|
+
function needUpdate(version) {
|
|
193
185
|
if (!app2.isPackaged) {
|
|
194
186
|
log("in dev mode, no need to update");
|
|
195
187
|
return false;
|
|
196
188
|
}
|
|
197
189
|
const currentVersion = getEntryVersion();
|
|
198
|
-
log(`check update:
|
|
199
|
-
current version is ${currentVersion},
|
|
200
|
-
new version is ${version2}`);
|
|
190
|
+
log(`check update: current version is ${currentVersion}, new version is ${version}`);
|
|
201
191
|
const _compare = compareVersion ?? compareVersionDefault;
|
|
202
|
-
return _compare(currentVersion,
|
|
192
|
+
return _compare(currentVersion, version);
|
|
203
193
|
}
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
194
|
+
async function parseData(format, data) {
|
|
195
|
+
if (existsSync(tmpFilePath)) {
|
|
196
|
+
log(`remove tmp file: ${tmpFilePath}`);
|
|
197
|
+
await rm(tmpFilePath);
|
|
198
|
+
}
|
|
199
|
+
if (existsSync(gzipPath)) {
|
|
200
|
+
log(`remove .gz file: ${gzipPath}`);
|
|
201
|
+
await rm(gzipPath);
|
|
202
|
+
}
|
|
203
|
+
if (typeof data === "object") {
|
|
204
|
+
if (format === "json" && isUpdateJSON(data) || format === "buffer" && Buffer2.isBuffer(data)) {
|
|
205
|
+
return data;
|
|
206
|
+
} else {
|
|
207
|
+
throw new Error(`invalid type at format '${format}': ${data}`);
|
|
208
|
+
}
|
|
209
|
+
} else if (["string", "undefined"].includes(typeof data)) {
|
|
210
|
+
const ua = userAgent || "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36";
|
|
211
|
+
const headers = {
|
|
212
|
+
Accept: `application/${format === "json" ? "json" : "octet-stream"}`,
|
|
213
|
+
UserAgent: ua,
|
|
214
|
+
...extraHeader
|
|
215
|
+
};
|
|
216
|
+
log(`download headers: ${JSON.stringify(headers, null, 2)}`);
|
|
217
|
+
const info = format === "json" ? {
|
|
218
|
+
name: "updateJsonURL",
|
|
219
|
+
url: _update,
|
|
220
|
+
repoFallback: `${repository.replace("github.com", "raw.githubusercontent.com")}/master/version.json`,
|
|
221
|
+
fn: downloadJSON ?? downloadJSONDefault
|
|
222
|
+
} : {
|
|
223
|
+
name: "releaseAsarURL",
|
|
224
|
+
url: _release,
|
|
225
|
+
repoFallback: `${repository}/releases/download/latest/${productName}.asar.gz`,
|
|
226
|
+
fn: downloadBuffer ?? downloadBufferDefault
|
|
227
|
+
};
|
|
228
|
+
data ??= info.url;
|
|
229
|
+
if (!data) {
|
|
230
|
+
log(`no ${info.name}, fallback to use repository`);
|
|
209
231
|
if (!repository) {
|
|
210
|
-
throw new Error(
|
|
232
|
+
throw new Error(`${info.name} or repository are not set`);
|
|
211
233
|
}
|
|
212
|
-
|
|
234
|
+
data = info.repoFallback;
|
|
213
235
|
}
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
log(`update info: ${JSON.stringify(json, null, 2)}`);
|
|
229
|
-
if (!await needUpdate(_v)) {
|
|
230
|
-
log(`update unavailable: ${_v}`);
|
|
236
|
+
log(`download ${format} from ${data}`);
|
|
237
|
+
const ret = await info.fn(data, updater, headers);
|
|
238
|
+
log(`download ${format} success`);
|
|
239
|
+
return ret;
|
|
240
|
+
} else {
|
|
241
|
+
throw new Error(`invalid type at format '${format}': ${data}`);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
updater.checkUpdate = async (data) => {
|
|
245
|
+
try {
|
|
246
|
+
const { signature: _sig, size, version } = await parseData("json", data);
|
|
247
|
+
log(`checked version: ${version}, size: ${size}`);
|
|
248
|
+
if (!await needUpdate(version)) {
|
|
249
|
+
log(`update unavailable: ${version}`);
|
|
231
250
|
return void 0;
|
|
232
251
|
} else {
|
|
233
|
-
log(`update available: ${
|
|
252
|
+
log(`update available: ${version}`);
|
|
234
253
|
signature = _sig;
|
|
235
|
-
version = _v;
|
|
236
254
|
return { size, version };
|
|
237
255
|
}
|
|
238
256
|
} catch (error) {
|
|
@@ -240,30 +258,35 @@ function createUpdater({
|
|
|
240
258
|
return error;
|
|
241
259
|
}
|
|
242
260
|
};
|
|
243
|
-
updater.
|
|
261
|
+
updater.downloadAndInstall = async (data, sig) => {
|
|
244
262
|
try {
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
log("no releaseAsarURL, fallback to use repository");
|
|
249
|
-
if (!repository) {
|
|
250
|
-
throw new Error("releaseAsarURL or repository are not set");
|
|
251
|
-
}
|
|
252
|
-
_url = `${repository}/releases/download/latest/${productName}.asar.gz`;
|
|
253
|
-
}
|
|
254
|
-
src = await download(_url, "buffer");
|
|
263
|
+
const _sig = sig ?? signature;
|
|
264
|
+
if (!_sig) {
|
|
265
|
+
throw new Error("signature are not set, please checkUpdate first or set the second parameter");
|
|
255
266
|
}
|
|
267
|
+
const buffer = await parseData("buffer", data);
|
|
256
268
|
log("verify start");
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
throw new Error("invalid signature");
|
|
269
|
+
const version = verify(buffer, _sig, SIGNATURE_CERT);
|
|
270
|
+
if (!version) {
|
|
271
|
+
throw new Error("verify failed, invalid signature");
|
|
260
272
|
}
|
|
261
273
|
log("verify success");
|
|
274
|
+
if (!await needUpdate(version)) {
|
|
275
|
+
throw new Error(`update unavailable: ${version}`);
|
|
276
|
+
}
|
|
262
277
|
log(`write file: ${gzipPath}`);
|
|
263
|
-
await writeFile(gzipPath,
|
|
278
|
+
await writeFile(gzipPath, buffer);
|
|
264
279
|
log(`extract file: ${gzipPath}`);
|
|
265
|
-
await extractFile(
|
|
280
|
+
await extractFile();
|
|
281
|
+
const asarVersion = await readFile(resolve(tmpFilePath, "version"), "utf8");
|
|
282
|
+
if (asarVersion !== version) {
|
|
283
|
+
rmSync(tmpFilePath);
|
|
284
|
+
throw new Error(`update failed: asar version is ${asarVersion}, but it should be ${version}`);
|
|
285
|
+
} else {
|
|
286
|
+
await rename(tmpFilePath, asarPath);
|
|
287
|
+
}
|
|
266
288
|
log(`update success, version: ${version}`);
|
|
289
|
+
signature = "";
|
|
267
290
|
return true;
|
|
268
291
|
} catch (error) {
|
|
269
292
|
log(error);
|
|
@@ -281,7 +304,7 @@ function initApp(appOptions, updaterOptions) {
|
|
|
281
304
|
mainPath = "main/index.js"
|
|
282
305
|
} = appOptions ?? {};
|
|
283
306
|
const mainDir = app3.isPackaged ? `../${productName}.asar` : electronDistPath;
|
|
284
|
-
const entry =
|
|
307
|
+
const entry = resolve2(__dirname, mainDir, mainPath);
|
|
285
308
|
if (updaterOptions) {
|
|
286
309
|
__require(entry)(
|
|
287
310
|
createUpdater({ ...updaterOptions, productName })
|
|
@@ -296,11 +319,12 @@ function initApp(appOptions, updaterOptions) {
|
|
|
296
319
|
}
|
|
297
320
|
export {
|
|
298
321
|
createUpdater,
|
|
299
|
-
getAppAsarPath,
|
|
300
|
-
getAppVersion,
|
|
301
322
|
getEntryVersion,
|
|
302
323
|
getGithubReleaseCdnGroup,
|
|
324
|
+
getProductAsarPath,
|
|
325
|
+
getProductVersion,
|
|
303
326
|
initApp,
|
|
327
|
+
isUpdateJSON,
|
|
304
328
|
parseGithubCdnURL,
|
|
305
329
|
requireNative,
|
|
306
330
|
restartApp
|
package/dist/vite.cjs
CHANGED
|
@@ -11,9 +11,9 @@ var __export = (target, all) => {
|
|
|
11
11
|
};
|
|
12
12
|
var __copyProps = (to, from, except, desc) => {
|
|
13
13
|
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
-
for (let
|
|
15
|
-
if (!__hasOwnProp.call(to,
|
|
16
|
-
__defProp(to,
|
|
14
|
+
for (let key2 of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key2) && key2 !== except)
|
|
16
|
+
__defProp(to, key2, { get: () => from[key2], enumerable: !(desc = __getOwnPropDesc(from, key2)) || desc.enumerable });
|
|
17
17
|
}
|
|
18
18
|
return to;
|
|
19
19
|
};
|
|
@@ -35,45 +35,36 @@ __export(vite_exports, {
|
|
|
35
35
|
module.exports = __toCommonJS(vite_exports);
|
|
36
36
|
var import_vite = require("vite");
|
|
37
37
|
|
|
38
|
-
// src/build-plugins/
|
|
38
|
+
// src/build-plugins/build.ts
|
|
39
39
|
var import_node_fs = require("fs");
|
|
40
40
|
var import_promises = require("fs/promises");
|
|
41
41
|
var import_node_zlib = __toESM(require("zlib"), 1);
|
|
42
|
+
var import_esbuild = require("esbuild");
|
|
42
43
|
|
|
43
44
|
// src/crypto.ts
|
|
44
45
|
var import_node_crypto = require("crypto");
|
|
45
46
|
var import_node_buffer = require("buffer");
|
|
46
47
|
var aesEncode = "base64url";
|
|
47
|
-
function
|
|
48
|
-
const
|
|
49
|
-
const privateKey = pair.privateKey.export({ type: "pkcs1", format: "pem" });
|
|
50
|
-
const publicKey = pair.publicKey.export({ type: "pkcs1", format: "pem" });
|
|
51
|
-
return {
|
|
52
|
-
privateKey,
|
|
53
|
-
publicKey
|
|
54
|
-
};
|
|
55
|
-
}
|
|
56
|
-
function encrypt(plainText, key, iv) {
|
|
57
|
-
const cipher = (0, import_node_crypto.createCipheriv)("aes-256-cbc", key, iv);
|
|
48
|
+
function encrypt(plainText, key2, iv) {
|
|
49
|
+
const cipher = (0, import_node_crypto.createCipheriv)("aes-256-cbc", key2, iv);
|
|
58
50
|
let encrypted = cipher.update(plainText, "utf8", aesEncode);
|
|
59
51
|
encrypted += cipher.final(aesEncode);
|
|
60
52
|
return encrypted;
|
|
61
53
|
}
|
|
62
|
-
function
|
|
63
|
-
|
|
64
|
-
const hash = (0, import_node_crypto.createHash)("SHA256").update(str).digest("binary");
|
|
54
|
+
function key(data, length) {
|
|
55
|
+
const hash = (0, import_node_crypto.createHash)("SHA256").update(data).digest("binary");
|
|
65
56
|
return import_node_buffer.Buffer.from(hash).subarray(0, length);
|
|
66
57
|
}
|
|
67
|
-
function signature(buffer, privateKey,
|
|
58
|
+
function signature(buffer, privateKey, cert, version) {
|
|
68
59
|
const sig = (0, import_node_crypto.createSign)("RSA-SHA256").update(buffer).sign({
|
|
69
60
|
key: privateKey,
|
|
70
61
|
padding: import_node_crypto.constants.RSA_PKCS1_PADDING,
|
|
71
62
|
saltLength: import_node_crypto.constants.RSA_PSS_SALTLEN_DIGEST
|
|
72
63
|
}, "base64");
|
|
73
|
-
return encrypt(sig
|
|
64
|
+
return encrypt(`${sig}%${version}`, key(cert, 32), key(buffer, 16));
|
|
74
65
|
}
|
|
75
66
|
|
|
76
|
-
// src/build-plugins/
|
|
67
|
+
// src/build-plugins/build.ts
|
|
77
68
|
function gzipFile(filePath) {
|
|
78
69
|
return new Promise((resolve, reject) => {
|
|
79
70
|
const gzip = import_node_zlib.default.createGzip();
|
|
@@ -110,24 +101,20 @@ async function buildAsar({
|
|
|
110
101
|
await pack(electronDistPath, asarOutputPath);
|
|
111
102
|
await gzipFile(asarOutputPath);
|
|
112
103
|
}
|
|
113
|
-
async function
|
|
104
|
+
async function buildVersion({
|
|
114
105
|
asarOutputPath,
|
|
115
106
|
versionPath,
|
|
116
107
|
privateKey,
|
|
117
|
-
|
|
118
|
-
productName,
|
|
108
|
+
cert,
|
|
119
109
|
version
|
|
120
110
|
}) {
|
|
121
111
|
const buffer = await (0, import_promises.readFile)(`${asarOutputPath}.gz`);
|
|
122
112
|
await (0, import_promises.writeFile)(versionPath, JSON.stringify({
|
|
123
|
-
signature: signature(buffer, privateKey,
|
|
113
|
+
signature: signature(buffer, privateKey, cert, version),
|
|
124
114
|
version,
|
|
125
115
|
size: buffer.length
|
|
126
116
|
}, null, 2));
|
|
127
117
|
}
|
|
128
|
-
|
|
129
|
-
// src/build-plugins/entry.ts
|
|
130
|
-
var import_esbuild = require("esbuild");
|
|
131
118
|
async function buildEntry({
|
|
132
119
|
entryPath,
|
|
133
120
|
entryOutputPath: outfile,
|
|
@@ -147,16 +134,35 @@ async function buildEntry({
|
|
|
147
134
|
var import_node_fs2 = require("fs");
|
|
148
135
|
var import_node_path = require("path");
|
|
149
136
|
var import_node_os = require("os");
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
137
|
+
var import_node_crypto2 = require("crypto");
|
|
138
|
+
var import_jscert = require("@cyyynthia/jscert");
|
|
139
|
+
function generateCert(privateKey) {
|
|
140
|
+
const dn = {
|
|
141
|
+
country: "zh-CN",
|
|
142
|
+
state: "zj",
|
|
143
|
+
locality: "hz",
|
|
144
|
+
organization: "test",
|
|
145
|
+
organizationalUnit: "test unit",
|
|
146
|
+
commonName: "test.test",
|
|
147
|
+
emailAddress: "test@example.com"
|
|
148
|
+
};
|
|
149
|
+
const csr = new import_jscert.CertificateSigningRequest(dn, privateKey, { digest: "sha256" });
|
|
150
|
+
const expiry = new Date(Date.now() + 365 * 864e5);
|
|
151
|
+
return csr.createSelfSignedCertificate(expiry).toPem();
|
|
152
|
+
}
|
|
153
|
+
function generateKeys(length = 2048) {
|
|
154
|
+
const { privateKey: _key } = (0, import_node_crypto2.generateKeyPairSync)("rsa", { modulusLength: length });
|
|
155
|
+
const cert = generateCert(_key);
|
|
156
|
+
const privateKey = _key.export({ type: "pkcs1", format: "pem" });
|
|
157
|
+
return {
|
|
158
|
+
privateKey,
|
|
159
|
+
cert
|
|
160
|
+
};
|
|
155
161
|
}
|
|
156
|
-
function
|
|
162
|
+
function writeCertToMain(entryPath, cert) {
|
|
157
163
|
const file = (0, import_node_fs2.readFileSync)(entryPath, "utf-8");
|
|
158
|
-
const regex = /const
|
|
159
|
-
const replacement = `const
|
|
164
|
+
const regex = /const SIGNATURE_CERT = ['`][\s\S]*?['`]/;
|
|
165
|
+
const replacement = `const SIGNATURE_CERT = \`${cert}\``;
|
|
160
166
|
let replaced = file;
|
|
161
167
|
const signaturePubExists = regex.test(file);
|
|
162
168
|
if (signaturePubExists) {
|
|
@@ -181,24 +187,26 @@ function writePublicKeyToMain(entryPath, publicKey) {
|
|
|
181
187
|
function getKeys({
|
|
182
188
|
keyLength,
|
|
183
189
|
privateKeyPath,
|
|
184
|
-
|
|
190
|
+
certPath,
|
|
185
191
|
entryPath
|
|
186
192
|
}) {
|
|
187
193
|
const keysDir = (0, import_node_path.dirname)(privateKeyPath);
|
|
188
194
|
!(0, import_node_fs2.existsSync)(keysDir) && (0, import_node_fs2.mkdirSync)(keysDir);
|
|
189
|
-
let privateKey,
|
|
190
|
-
if (!(0, import_node_fs2.existsSync)(privateKeyPath)) {
|
|
191
|
-
const keys =
|
|
195
|
+
let privateKey, cert;
|
|
196
|
+
if (!(0, import_node_fs2.existsSync)(privateKeyPath) || !(0, import_node_fs2.existsSync)(certPath)) {
|
|
197
|
+
const keys = generateKeys(keyLength);
|
|
192
198
|
privateKey = keys.privateKey;
|
|
193
|
-
|
|
199
|
+
cert = keys.cert;
|
|
200
|
+
(0, import_node_fs2.writeFileSync)(privateKeyPath, privateKey);
|
|
201
|
+
(0, import_node_fs2.writeFileSync)(certPath, cert);
|
|
194
202
|
} else {
|
|
195
203
|
privateKey = (0, import_node_fs2.readFileSync)(privateKeyPath, "utf-8");
|
|
196
|
-
|
|
204
|
+
cert = (0, import_node_fs2.readFileSync)(certPath, "utf-8");
|
|
197
205
|
}
|
|
198
|
-
|
|
206
|
+
writeCertToMain(entryPath, cert);
|
|
199
207
|
return {
|
|
200
208
|
privateKey,
|
|
201
|
-
|
|
209
|
+
cert
|
|
202
210
|
};
|
|
203
211
|
}
|
|
204
212
|
|
|
@@ -216,9 +224,17 @@ function parseOptions(options) {
|
|
|
216
224
|
} = paths;
|
|
217
225
|
const {
|
|
218
226
|
privateKeyPath = "keys/private.pem",
|
|
219
|
-
|
|
220
|
-
keyLength = 2048
|
|
227
|
+
certPath = "keys/cert.pem",
|
|
228
|
+
keyLength = 2048,
|
|
229
|
+
certInfo
|
|
221
230
|
} = keys;
|
|
231
|
+
let {
|
|
232
|
+
subject = {
|
|
233
|
+
commonName: productName,
|
|
234
|
+
organization: `org.${productName}`
|
|
235
|
+
},
|
|
236
|
+
expires = Date.now() + 365 * 864e5
|
|
237
|
+
} = certInfo || {};
|
|
222
238
|
const buildAsarOption = {
|
|
223
239
|
version,
|
|
224
240
|
asarOutputPath,
|
|
@@ -232,18 +248,22 @@ function parseOptions(options) {
|
|
|
232
248
|
};
|
|
233
249
|
let buildVersionOption;
|
|
234
250
|
if (!import_ci_info.isCI) {
|
|
235
|
-
|
|
251
|
+
if (typeof expires === "number") {
|
|
252
|
+
expires = new Date(Date.now() + expires);
|
|
253
|
+
}
|
|
254
|
+
const { privateKey, cert } = getKeys({
|
|
236
255
|
keyLength,
|
|
237
256
|
privateKeyPath,
|
|
238
|
-
|
|
239
|
-
entryPath
|
|
257
|
+
certPath,
|
|
258
|
+
entryPath,
|
|
259
|
+
subject,
|
|
260
|
+
expires
|
|
240
261
|
});
|
|
241
262
|
buildVersionOption = {
|
|
242
263
|
version,
|
|
243
264
|
asarOutputPath,
|
|
244
265
|
privateKey,
|
|
245
|
-
|
|
246
|
-
productName,
|
|
266
|
+
cert,
|
|
247
267
|
versionPath
|
|
248
268
|
};
|
|
249
269
|
}
|
|
@@ -269,7 +289,7 @@ function vite_default(options) {
|
|
|
269
289
|
}
|
|
270
290
|
log.info("build asar start");
|
|
271
291
|
await buildAsar(buildAsarOption);
|
|
272
|
-
buildVersionOption && await
|
|
292
|
+
buildVersionOption && await buildVersion(buildVersionOption);
|
|
273
293
|
log.info(`build asar end, output to ${asarOutputPath}`);
|
|
274
294
|
}
|
|
275
295
|
};
|
package/dist/vite.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Plugin } from 'vite';
|
|
2
|
+
import { DistinguishedName } from '@cyyynthia/jscert';
|
|
2
3
|
|
|
3
4
|
type Options = {
|
|
4
5
|
/**
|
|
@@ -69,14 +70,35 @@ type Options = {
|
|
|
69
70
|
/**
|
|
70
71
|
* Path to the pem file that contains public key
|
|
71
72
|
* if not ended with .pem, it will be appended
|
|
72
|
-
* @default 'keys/
|
|
73
|
+
* @default 'keys/cert.pem'
|
|
73
74
|
*/
|
|
74
|
-
|
|
75
|
+
certPath?: string;
|
|
75
76
|
/**
|
|
76
77
|
* Length of the key
|
|
77
78
|
* @default 2048
|
|
78
79
|
*/
|
|
79
80
|
keyLength?: number;
|
|
81
|
+
/**
|
|
82
|
+
* X509 certificate info
|
|
83
|
+
*
|
|
84
|
+
* only generate simple **self-signed** certificate **without extensions**
|
|
85
|
+
*/
|
|
86
|
+
certInfo?: {
|
|
87
|
+
/**
|
|
88
|
+
* the subject of the certificate
|
|
89
|
+
*
|
|
90
|
+
* @default { commonName: productName, organization: `org.${productName}` }
|
|
91
|
+
*/
|
|
92
|
+
subject?: DistinguishedName;
|
|
93
|
+
/**
|
|
94
|
+
* expires of the certificate
|
|
95
|
+
* - `Date`: expire date
|
|
96
|
+
* - `number`: expire duration in seconds
|
|
97
|
+
*
|
|
98
|
+
* @default Date.now() + 365 * 864e5 (1 year)
|
|
99
|
+
*/
|
|
100
|
+
expires?: Date | number;
|
|
101
|
+
};
|
|
80
102
|
};
|
|
81
103
|
};
|
|
82
104
|
|