electron-incremental-update 0.4.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -15
- package/dist/chunk-OBERMV66.mjs +60 -0
- package/dist/index.cjs +85 -75
- package/dist/index.d.ts +12 -13
- package/dist/index.mjs +63 -75
- package/dist/vite.cjs +110 -59
- package/dist/vite.mjs +81 -61
- package/package.json +2 -2
- package/dist/chunk-AKU6F3WT.mjs +0 -11
package/README.md
CHANGED
|
@@ -91,9 +91,12 @@ export default function (updater: Updater) {
|
|
|
91
91
|
console.log(`\tentry: ${getEntryVersion()}`)
|
|
92
92
|
console.log(`\tapp: ${getAppVersion(name)}`)
|
|
93
93
|
let size = 0
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
94
|
+
updater.on('downloading', (progress) => {
|
|
95
|
+
console.log(`${(progress / size).toFixed(2)}%`)
|
|
96
|
+
})
|
|
97
|
+
updater.on('debug', data => console.log('[updater]:', data))
|
|
98
|
+
updater.checkUpdate().then(async (result) => {
|
|
99
|
+
if (result === undefined) {
|
|
97
100
|
console.log('Update Unavailable')
|
|
98
101
|
} else if (result instanceof Error) {
|
|
99
102
|
console.error(result)
|
|
@@ -105,20 +108,9 @@ export default function (updater: Updater) {
|
|
|
105
108
|
buttons: ['Download', 'Later'],
|
|
106
109
|
message: 'Application update available!',
|
|
107
110
|
})
|
|
108
|
-
response === 0 && await updater.downloadUpdate()
|
|
111
|
+
response === 0 && console.log(await updater.downloadUpdate())
|
|
109
112
|
}
|
|
110
113
|
})
|
|
111
|
-
updater.on('download', () => console.log('download start'))
|
|
112
|
-
updater.on('downloading', (len) => {
|
|
113
|
-
currentSize += len
|
|
114
|
-
console.log(`${(currentSize / size).toFixed(2)}%`)
|
|
115
|
-
})
|
|
116
|
-
updater.on('downloaded', () => console.log('download end'))
|
|
117
|
-
updater.on('donwnloadError', console.error)
|
|
118
|
-
// to debug, it need to set debug to true in updater options
|
|
119
|
-
updater.on('debug', data => console.log('[updater]:', data))
|
|
120
|
-
updater.checkUpdate()
|
|
121
|
-
|
|
122
114
|
// app logics
|
|
123
115
|
app.whenReady().then(() => {
|
|
124
116
|
// ...
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
+
}) : x)(function(x) {
|
|
4
|
+
if (typeof require !== "undefined")
|
|
5
|
+
return require.apply(this, arguments);
|
|
6
|
+
throw new Error('Dynamic require of "' + x + '" is not supported');
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
// src/crypto.ts
|
|
10
|
+
import { constants, createCipheriv, createDecipheriv, createHash, createSign, createVerify, generateKeyPairSync } from "node:crypto";
|
|
11
|
+
import { Buffer as Buffer2 } from "node:buffer";
|
|
12
|
+
var aesEncode = "base64url";
|
|
13
|
+
function generateRSA(length = 2048) {
|
|
14
|
+
const pair = generateKeyPairSync("rsa", { modulusLength: length });
|
|
15
|
+
const privateKey = pair.privateKey.export({ type: "pkcs1", format: "pem" });
|
|
16
|
+
const publicKey = pair.publicKey.export({ type: "pkcs1", format: "pem" });
|
|
17
|
+
return {
|
|
18
|
+
privateKey,
|
|
19
|
+
publicKey
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
function encrypt(plainText, key, iv) {
|
|
23
|
+
const cipher = createCipheriv("aes-256-cbc", key, iv);
|
|
24
|
+
let encrypted = cipher.update(plainText, "utf8", aesEncode);
|
|
25
|
+
encrypted += cipher.final(aesEncode);
|
|
26
|
+
return encrypted;
|
|
27
|
+
}
|
|
28
|
+
function decrypt(encryptedText, key, iv) {
|
|
29
|
+
const decipher = createDecipheriv("aes-256-cbc", key, iv);
|
|
30
|
+
let decrypted = decipher.update(encryptedText, aesEncode, "utf8");
|
|
31
|
+
decrypted += decipher.final("utf8");
|
|
32
|
+
return decrypted;
|
|
33
|
+
}
|
|
34
|
+
function generateKey(data, length) {
|
|
35
|
+
const hash = createHash("SHA256").update(data).digest("binary");
|
|
36
|
+
return Buffer2.from(hash).subarray(0, length);
|
|
37
|
+
}
|
|
38
|
+
function signature(buffer, privateKey, publicKey) {
|
|
39
|
+
const sig = createSign("RSA-SHA256").update(buffer).sign({
|
|
40
|
+
key: privateKey,
|
|
41
|
+
padding: constants.RSA_PKCS1_PADDING,
|
|
42
|
+
saltLength: constants.RSA_PSS_SALTLEN_DIGEST
|
|
43
|
+
}, "base64");
|
|
44
|
+
return encrypt(sig, generateKey(publicKey, 32), generateKey(buffer, 16));
|
|
45
|
+
}
|
|
46
|
+
function verify(buffer, signature2, publicKey) {
|
|
47
|
+
try {
|
|
48
|
+
const sig = decrypt(signature2, generateKey(publicKey, 32), generateKey(buffer, 16));
|
|
49
|
+
return createVerify("RSA-SHA256").update(buffer).verify(publicKey, sig, "base64");
|
|
50
|
+
} catch (error) {
|
|
51
|
+
return false;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export {
|
|
56
|
+
__require,
|
|
57
|
+
generateRSA,
|
|
58
|
+
signature,
|
|
59
|
+
verify
|
|
60
|
+
};
|
package/dist/index.cjs
CHANGED
|
@@ -46,14 +46,36 @@ var import_electron3 = require("electron");
|
|
|
46
46
|
|
|
47
47
|
// src/updater/index.ts
|
|
48
48
|
var import_node_events = require("events");
|
|
49
|
-
var import_node_crypto = require("crypto");
|
|
50
49
|
var import_node_zlib = require("zlib");
|
|
51
50
|
var import_node_fs2 = require("fs");
|
|
52
51
|
var import_promises = require("fs/promises");
|
|
53
52
|
var import_electron2 = require("electron");
|
|
54
53
|
|
|
55
|
-
// src/
|
|
54
|
+
// src/crypto.ts
|
|
55
|
+
var import_node_crypto = require("crypto");
|
|
56
56
|
var import_node_buffer = require("buffer");
|
|
57
|
+
var aesEncode = "base64url";
|
|
58
|
+
function decrypt(encryptedText, key, iv) {
|
|
59
|
+
const decipher = (0, import_node_crypto.createDecipheriv)("aes-256-cbc", key, iv);
|
|
60
|
+
let decrypted = decipher.update(encryptedText, aesEncode, "utf8");
|
|
61
|
+
decrypted += decipher.final("utf8");
|
|
62
|
+
return decrypted;
|
|
63
|
+
}
|
|
64
|
+
function generateKey(data, length) {
|
|
65
|
+
const hash = (0, import_node_crypto.createHash)("SHA256").update(data).digest("binary");
|
|
66
|
+
return import_node_buffer.Buffer.from(hash).subarray(0, length);
|
|
67
|
+
}
|
|
68
|
+
function verify(buffer, signature, publicKey) {
|
|
69
|
+
try {
|
|
70
|
+
const sig = decrypt(signature, generateKey(publicKey, 32), generateKey(buffer, 16));
|
|
71
|
+
return (0, import_node_crypto.createVerify)("RSA-SHA256").update(buffer).verify(publicKey, sig, "base64");
|
|
72
|
+
} catch (error) {
|
|
73
|
+
return false;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// src/updater/defaultFunctions.ts
|
|
78
|
+
var import_node_buffer2 = require("buffer");
|
|
57
79
|
var import_node_https = __toESM(require("https"), 1);
|
|
58
80
|
function downloadJSONDefault(url, updater, headers) {
|
|
59
81
|
return new Promise((resolve2, reject) => {
|
|
@@ -80,16 +102,18 @@ function downloadJSONDefault(url, updater, headers) {
|
|
|
80
102
|
});
|
|
81
103
|
}
|
|
82
104
|
function downloadBufferDefault(url, updater, headers) {
|
|
105
|
+
let progress = 0;
|
|
83
106
|
return new Promise((resolve2, reject) => {
|
|
84
107
|
import_node_https.default.get(url, (res) => {
|
|
85
108
|
let data = [];
|
|
86
109
|
res.headers = headers;
|
|
87
110
|
res.on("data", (chunk) => {
|
|
88
|
-
|
|
111
|
+
progress += chunk.length;
|
|
112
|
+
updater.emit("downloading", progress);
|
|
89
113
|
data.push(chunk);
|
|
90
114
|
});
|
|
91
115
|
res.on("end", () => {
|
|
92
|
-
resolve2(
|
|
116
|
+
resolve2(import_node_buffer2.Buffer.concat(data));
|
|
93
117
|
});
|
|
94
118
|
}).on("error", (e) => {
|
|
95
119
|
reject(e);
|
|
@@ -225,9 +249,6 @@ function createUpdater({
|
|
|
225
249
|
});
|
|
226
250
|
});
|
|
227
251
|
}
|
|
228
|
-
function verify(buffer, signature2) {
|
|
229
|
-
return (0, import_node_crypto.createVerify)("RSA-SHA256").update(buffer).verify(SIGNATURE_PUB, signature2, "base64");
|
|
230
|
-
}
|
|
231
252
|
function needUpdate(version2) {
|
|
232
253
|
if (!import_electron2.app.isPackaged) {
|
|
233
254
|
log("in dev mode, no need to update");
|
|
@@ -240,86 +261,75 @@ function createUpdater({
|
|
|
240
261
|
const _compare = compareVersion ?? compareVersionDefault;
|
|
241
262
|
return _compare(currentVersion, version2);
|
|
242
263
|
}
|
|
243
|
-
async
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
throw new Error("updateJsonURL or repository are not set");
|
|
249
|
-
}
|
|
250
|
-
url = `${repository.replace("github.com", "raw.githubusercontent.com")}/master/version.json`;
|
|
251
|
-
}
|
|
252
|
-
if ((0, import_node_fs2.existsSync)(tmpFile)) {
|
|
253
|
-
log(`remove tmp file: ${tmpFile}`);
|
|
254
|
-
await (0, import_promises.rm)(tmpFile);
|
|
255
|
-
}
|
|
256
|
-
if ((0, import_node_fs2.existsSync)(gzipPath)) {
|
|
257
|
-
log(`remove .gz file: ${gzipPath}`);
|
|
258
|
-
await (0, import_promises.rm)(gzipPath);
|
|
259
|
-
}
|
|
260
|
-
const json = await download(url, "json");
|
|
261
|
-
const {
|
|
262
|
-
signature: _sig,
|
|
263
|
-
version: _v,
|
|
264
|
-
size
|
|
265
|
-
} = json;
|
|
266
|
-
log(`update info: ${JSON.stringify(json, null, 2)}`);
|
|
267
|
-
if (!await needUpdate(_v)) {
|
|
268
|
-
log(`update unavailable: ${_v}`);
|
|
269
|
-
return false;
|
|
270
|
-
} else {
|
|
271
|
-
log(`update available: ${_v}`);
|
|
272
|
-
signature = _sig;
|
|
273
|
-
version = _v;
|
|
274
|
-
return { size, version };
|
|
275
|
-
}
|
|
276
|
-
}
|
|
277
|
-
async function downloadUpdate(src) {
|
|
278
|
-
if (typeof src !== "object") {
|
|
279
|
-
let _url = src ?? _release;
|
|
280
|
-
if (!_url) {
|
|
281
|
-
log("no releaseAsarURL, fallback to use repository");
|
|
264
|
+
updater.checkUpdate = async (url) => {
|
|
265
|
+
try {
|
|
266
|
+
url ??= _update;
|
|
267
|
+
if (!url) {
|
|
268
|
+
log("no updateJsonURL, fallback to use repository");
|
|
282
269
|
if (!repository) {
|
|
283
|
-
throw new Error("
|
|
270
|
+
throw new Error("updateJsonURL or repository are not set");
|
|
284
271
|
}
|
|
285
|
-
|
|
272
|
+
url = `${repository.replace("github.com", "raw.githubusercontent.com")}/master/version.json`;
|
|
273
|
+
}
|
|
274
|
+
if ((0, import_node_fs2.existsSync)(tmpFile)) {
|
|
275
|
+
log(`remove tmp file: ${tmpFile}`);
|
|
276
|
+
await (0, import_promises.rm)(tmpFile);
|
|
277
|
+
}
|
|
278
|
+
if ((0, import_node_fs2.existsSync)(gzipPath)) {
|
|
279
|
+
log(`remove .gz file: ${gzipPath}`);
|
|
280
|
+
await (0, import_promises.rm)(gzipPath);
|
|
281
|
+
}
|
|
282
|
+
const json = await download(url, "json");
|
|
283
|
+
const {
|
|
284
|
+
signature: _sig,
|
|
285
|
+
version: _v,
|
|
286
|
+
size
|
|
287
|
+
} = json;
|
|
288
|
+
log(`update info: ${JSON.stringify(json, null, 2)}`);
|
|
289
|
+
if (!await needUpdate(_v)) {
|
|
290
|
+
log(`update unavailable: ${_v}`);
|
|
291
|
+
return void 0;
|
|
292
|
+
} else {
|
|
293
|
+
log(`update available: ${_v}`);
|
|
294
|
+
signature = _sig;
|
|
295
|
+
version = _v;
|
|
296
|
+
return { size, version };
|
|
286
297
|
}
|
|
287
|
-
src = await download(_url, "buffer");
|
|
288
|
-
}
|
|
289
|
-
log("verify start");
|
|
290
|
-
if (!verify(src, signature)) {
|
|
291
|
-
log("verify failed");
|
|
292
|
-
throw new Error("invalid signature");
|
|
293
|
-
}
|
|
294
|
-
log("verify success");
|
|
295
|
-
log(`write file: ${gzipPath}`);
|
|
296
|
-
await (0, import_promises.writeFile)(gzipPath, src);
|
|
297
|
-
log(`extract file: ${gzipPath}`);
|
|
298
|
-
await extractFile(gzipPath);
|
|
299
|
-
log(`update success, version: ${version}`);
|
|
300
|
-
updater.emit("downloaded");
|
|
301
|
-
}
|
|
302
|
-
const onCheck = async (url) => {
|
|
303
|
-
try {
|
|
304
|
-
const result = await checkUpdate(url);
|
|
305
|
-
updater.emit("checkResult", result);
|
|
306
298
|
} catch (error) {
|
|
307
299
|
log(error);
|
|
308
|
-
|
|
300
|
+
return error;
|
|
309
301
|
}
|
|
310
302
|
};
|
|
311
|
-
updater.
|
|
312
|
-
updater.checkUpdate = onCheck;
|
|
313
|
-
const onDownload = async (src) => {
|
|
303
|
+
updater.downloadUpdate = async (src) => {
|
|
314
304
|
try {
|
|
315
|
-
|
|
305
|
+
if (typeof src !== "object") {
|
|
306
|
+
let _url = src ?? _release;
|
|
307
|
+
if (!_url) {
|
|
308
|
+
log("no releaseAsarURL, fallback to use repository");
|
|
309
|
+
if (!repository) {
|
|
310
|
+
throw new Error("releaseAsarURL or repository are not set");
|
|
311
|
+
}
|
|
312
|
+
_url = `${repository}/releases/download/latest/${productName}.asar.gz`;
|
|
313
|
+
}
|
|
314
|
+
src = await download(_url, "buffer");
|
|
315
|
+
}
|
|
316
|
+
log("verify start");
|
|
317
|
+
if (!verify(src, signature, SIGNATURE_PUB)) {
|
|
318
|
+
log("verify failed");
|
|
319
|
+
throw new Error("invalid signature");
|
|
320
|
+
}
|
|
321
|
+
log("verify success");
|
|
322
|
+
log(`write file: ${gzipPath}`);
|
|
323
|
+
await (0, import_promises.writeFile)(gzipPath, src);
|
|
324
|
+
log(`extract file: ${gzipPath}`);
|
|
325
|
+
await extractFile(gzipPath);
|
|
326
|
+
log(`update success, version: ${version}`);
|
|
327
|
+
return true;
|
|
316
328
|
} catch (error) {
|
|
317
329
|
log(error);
|
|
318
|
-
|
|
330
|
+
return error;
|
|
319
331
|
}
|
|
320
332
|
};
|
|
321
|
-
updater.on("download", onDownload);
|
|
322
|
-
updater.downloadUpdate = onDownload;
|
|
323
333
|
return updater;
|
|
324
334
|
}
|
|
325
335
|
|
package/dist/index.d.ts
CHANGED
|
@@ -1,13 +1,9 @@
|
|
|
1
1
|
import { Buffer } from 'node:buffer';
|
|
2
2
|
|
|
3
|
-
type CheckResultType =
|
|
3
|
+
type CheckResultType = Omit<UpdateJSON, 'signature'> | undefined | Error;
|
|
4
|
+
type DownloadResult = true | Error;
|
|
4
5
|
type UpdateEvents = {
|
|
5
|
-
|
|
6
|
-
checkResult: [data: CheckResultType];
|
|
7
|
-
download: [src?: string | Buffer];
|
|
8
|
-
downloading: [current: number];
|
|
9
|
-
downloaded: null;
|
|
10
|
-
donwnloadError: [error: unknown];
|
|
6
|
+
downloading: [progress: number];
|
|
11
7
|
debug: [msg: string | Error];
|
|
12
8
|
};
|
|
13
9
|
type UpdateJSON = {
|
|
@@ -21,16 +17,19 @@ interface TypedUpdater<T extends Record<string | symbol, MaybeArray<any>>, Event
|
|
|
21
17
|
listeners<E extends Event>(eventName: E): Function[];
|
|
22
18
|
eventNames(): (Event)[];
|
|
23
19
|
on<E extends Event>(eventName: E, listener: (...data: MaybeArray<T[E]>) => void): this;
|
|
24
|
-
once<E extends Event>(eventName: E, listener: (...args: MaybeArray<T[E]>) => void): this;
|
|
25
20
|
emit<E extends Event>(eventName: E, ...args: MaybeArray<T[E]>): boolean;
|
|
26
21
|
off<E extends Event>(eventName: E, listener: (...args: MaybeArray<T[E]>) => void): this;
|
|
27
22
|
/**
|
|
28
|
-
* - `
|
|
23
|
+
* - `{size: number, version: string}`: available
|
|
29
24
|
* - `false`: unavailable
|
|
30
|
-
* - `
|
|
25
|
+
* - `Error`: fail
|
|
31
26
|
*/
|
|
32
|
-
checkUpdate(url?: string): Promise<
|
|
33
|
-
|
|
27
|
+
checkUpdate(url?: string): Promise<CheckResultType>;
|
|
28
|
+
/**
|
|
29
|
+
* - `true`: success
|
|
30
|
+
* - `Error`: fail
|
|
31
|
+
*/
|
|
32
|
+
downloadUpdate(url?: string | Buffer): Promise<DownloadResult>;
|
|
34
33
|
}
|
|
35
34
|
type Updater = TypedUpdater<UpdateEvents>;
|
|
36
35
|
interface UpdaterOption {
|
|
@@ -210,4 +209,4 @@ declare function initApp(appOptions: AppOption): {
|
|
|
210
209
|
*/
|
|
211
210
|
declare function initApp(appOptions: AppOption, updaterOptions: InitUpdaterOptions): undefined;
|
|
212
211
|
|
|
213
|
-
export { AppOption, CheckResultType, InitUpdaterOptions, UpdateJSON, Updater, UpdaterOption, createUpdater, getAppAsarPath, getAppVersion, getEntryVersion, getGithubReleaseCdnGroup, initApp, parseGithubCdnURL, requireNative, restartApp };
|
|
212
|
+
export { AppOption, CheckResultType, DownloadResult, InitUpdaterOptions, UpdateJSON, Updater, UpdaterOption, createUpdater, getAppAsarPath, getAppVersion, getEntryVersion, getGithubReleaseCdnGroup, initApp, parseGithubCdnURL, requireNative, restartApp };
|
package/dist/index.mjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
|
-
__require
|
|
3
|
-
|
|
2
|
+
__require,
|
|
3
|
+
verify
|
|
4
|
+
} from "./chunk-OBERMV66.mjs";
|
|
4
5
|
|
|
5
6
|
// src/index.ts
|
|
6
7
|
import { resolve } from "node:path";
|
|
@@ -8,7 +9,6 @@ import { app as app3 } from "electron";
|
|
|
8
9
|
|
|
9
10
|
// src/updater/index.ts
|
|
10
11
|
import { EventEmitter } from "node:events";
|
|
11
|
-
import { createVerify } from "node:crypto";
|
|
12
12
|
import { createGunzip } from "node:zlib";
|
|
13
13
|
import { createReadStream, createWriteStream, existsSync } from "node:fs";
|
|
14
14
|
import { rm, writeFile } from "node:fs/promises";
|
|
@@ -42,12 +42,14 @@ function downloadJSONDefault(url, updater, headers) {
|
|
|
42
42
|
});
|
|
43
43
|
}
|
|
44
44
|
function downloadBufferDefault(url, updater, headers) {
|
|
45
|
+
let progress = 0;
|
|
45
46
|
return new Promise((resolve2, reject) => {
|
|
46
47
|
https.get(url, (res) => {
|
|
47
48
|
let data = [];
|
|
48
49
|
res.headers = headers;
|
|
49
50
|
res.on("data", (chunk) => {
|
|
50
|
-
|
|
51
|
+
progress += chunk.length;
|
|
52
|
+
updater.emit("downloading", progress);
|
|
51
53
|
data.push(chunk);
|
|
52
54
|
});
|
|
53
55
|
res.on("end", () => {
|
|
@@ -187,9 +189,6 @@ function createUpdater({
|
|
|
187
189
|
});
|
|
188
190
|
});
|
|
189
191
|
}
|
|
190
|
-
function verify(buffer, signature2) {
|
|
191
|
-
return createVerify("RSA-SHA256").update(buffer).verify(SIGNATURE_PUB, signature2, "base64");
|
|
192
|
-
}
|
|
193
192
|
function needUpdate(version2) {
|
|
194
193
|
if (!app2.isPackaged) {
|
|
195
194
|
log("in dev mode, no need to update");
|
|
@@ -202,86 +201,75 @@ function createUpdater({
|
|
|
202
201
|
const _compare = compareVersion ?? compareVersionDefault;
|
|
203
202
|
return _compare(currentVersion, version2);
|
|
204
203
|
}
|
|
205
|
-
async
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
throw new Error("updateJsonURL or repository are not set");
|
|
211
|
-
}
|
|
212
|
-
url = `${repository.replace("github.com", "raw.githubusercontent.com")}/master/version.json`;
|
|
213
|
-
}
|
|
214
|
-
if (existsSync(tmpFile)) {
|
|
215
|
-
log(`remove tmp file: ${tmpFile}`);
|
|
216
|
-
await rm(tmpFile);
|
|
217
|
-
}
|
|
218
|
-
if (existsSync(gzipPath)) {
|
|
219
|
-
log(`remove .gz file: ${gzipPath}`);
|
|
220
|
-
await rm(gzipPath);
|
|
221
|
-
}
|
|
222
|
-
const json = await download(url, "json");
|
|
223
|
-
const {
|
|
224
|
-
signature: _sig,
|
|
225
|
-
version: _v,
|
|
226
|
-
size
|
|
227
|
-
} = json;
|
|
228
|
-
log(`update info: ${JSON.stringify(json, null, 2)}`);
|
|
229
|
-
if (!await needUpdate(_v)) {
|
|
230
|
-
log(`update unavailable: ${_v}`);
|
|
231
|
-
return false;
|
|
232
|
-
} else {
|
|
233
|
-
log(`update available: ${_v}`);
|
|
234
|
-
signature = _sig;
|
|
235
|
-
version = _v;
|
|
236
|
-
return { size, version };
|
|
237
|
-
}
|
|
238
|
-
}
|
|
239
|
-
async function downloadUpdate(src) {
|
|
240
|
-
if (typeof src !== "object") {
|
|
241
|
-
let _url = src ?? _release;
|
|
242
|
-
if (!_url) {
|
|
243
|
-
log("no releaseAsarURL, fallback to use repository");
|
|
204
|
+
updater.checkUpdate = async (url) => {
|
|
205
|
+
try {
|
|
206
|
+
url ??= _update;
|
|
207
|
+
if (!url) {
|
|
208
|
+
log("no updateJsonURL, fallback to use repository");
|
|
244
209
|
if (!repository) {
|
|
245
|
-
throw new Error("
|
|
210
|
+
throw new Error("updateJsonURL or repository are not set");
|
|
246
211
|
}
|
|
247
|
-
|
|
212
|
+
url = `${repository.replace("github.com", "raw.githubusercontent.com")}/master/version.json`;
|
|
213
|
+
}
|
|
214
|
+
if (existsSync(tmpFile)) {
|
|
215
|
+
log(`remove tmp file: ${tmpFile}`);
|
|
216
|
+
await rm(tmpFile);
|
|
217
|
+
}
|
|
218
|
+
if (existsSync(gzipPath)) {
|
|
219
|
+
log(`remove .gz file: ${gzipPath}`);
|
|
220
|
+
await rm(gzipPath);
|
|
221
|
+
}
|
|
222
|
+
const json = await download(url, "json");
|
|
223
|
+
const {
|
|
224
|
+
signature: _sig,
|
|
225
|
+
version: _v,
|
|
226
|
+
size
|
|
227
|
+
} = json;
|
|
228
|
+
log(`update info: ${JSON.stringify(json, null, 2)}`);
|
|
229
|
+
if (!await needUpdate(_v)) {
|
|
230
|
+
log(`update unavailable: ${_v}`);
|
|
231
|
+
return void 0;
|
|
232
|
+
} else {
|
|
233
|
+
log(`update available: ${_v}`);
|
|
234
|
+
signature = _sig;
|
|
235
|
+
version = _v;
|
|
236
|
+
return { size, version };
|
|
248
237
|
}
|
|
249
|
-
src = await download(_url, "buffer");
|
|
250
|
-
}
|
|
251
|
-
log("verify start");
|
|
252
|
-
if (!verify(src, signature)) {
|
|
253
|
-
log("verify failed");
|
|
254
|
-
throw new Error("invalid signature");
|
|
255
|
-
}
|
|
256
|
-
log("verify success");
|
|
257
|
-
log(`write file: ${gzipPath}`);
|
|
258
|
-
await writeFile(gzipPath, src);
|
|
259
|
-
log(`extract file: ${gzipPath}`);
|
|
260
|
-
await extractFile(gzipPath);
|
|
261
|
-
log(`update success, version: ${version}`);
|
|
262
|
-
updater.emit("downloaded");
|
|
263
|
-
}
|
|
264
|
-
const onCheck = async (url) => {
|
|
265
|
-
try {
|
|
266
|
-
const result = await checkUpdate(url);
|
|
267
|
-
updater.emit("checkResult", result);
|
|
268
238
|
} catch (error) {
|
|
269
239
|
log(error);
|
|
270
|
-
|
|
240
|
+
return error;
|
|
271
241
|
}
|
|
272
242
|
};
|
|
273
|
-
updater.
|
|
274
|
-
updater.checkUpdate = onCheck;
|
|
275
|
-
const onDownload = async (src) => {
|
|
243
|
+
updater.downloadUpdate = async (src) => {
|
|
276
244
|
try {
|
|
277
|
-
|
|
245
|
+
if (typeof src !== "object") {
|
|
246
|
+
let _url = src ?? _release;
|
|
247
|
+
if (!_url) {
|
|
248
|
+
log("no releaseAsarURL, fallback to use repository");
|
|
249
|
+
if (!repository) {
|
|
250
|
+
throw new Error("releaseAsarURL or repository are not set");
|
|
251
|
+
}
|
|
252
|
+
_url = `${repository}/releases/download/latest/${productName}.asar.gz`;
|
|
253
|
+
}
|
|
254
|
+
src = await download(_url, "buffer");
|
|
255
|
+
}
|
|
256
|
+
log("verify start");
|
|
257
|
+
if (!verify(src, signature, SIGNATURE_PUB)) {
|
|
258
|
+
log("verify failed");
|
|
259
|
+
throw new Error("invalid signature");
|
|
260
|
+
}
|
|
261
|
+
log("verify success");
|
|
262
|
+
log(`write file: ${gzipPath}`);
|
|
263
|
+
await writeFile(gzipPath, src);
|
|
264
|
+
log(`extract file: ${gzipPath}`);
|
|
265
|
+
await extractFile(gzipPath);
|
|
266
|
+
log(`update success, version: ${version}`);
|
|
267
|
+
return true;
|
|
278
268
|
} catch (error) {
|
|
279
269
|
log(error);
|
|
280
|
-
|
|
270
|
+
return error;
|
|
281
271
|
}
|
|
282
272
|
};
|
|
283
|
-
updater.on("download", onDownload);
|
|
284
|
-
updater.downloadUpdate = onDownload;
|
|
285
273
|
return updater;
|
|
286
274
|
}
|
|
287
275
|
|
package/dist/vite.cjs
CHANGED
|
@@ -36,11 +36,43 @@ module.exports = __toCommonJS(vite_exports);
|
|
|
36
36
|
var import_vite = require("vite");
|
|
37
37
|
|
|
38
38
|
// src/build-plugins/asar.ts
|
|
39
|
-
var import_node_crypto = require("crypto");
|
|
40
39
|
var import_node_fs = require("fs");
|
|
41
40
|
var import_promises = require("fs/promises");
|
|
42
41
|
var import_node_zlib = __toESM(require("zlib"), 1);
|
|
43
|
-
|
|
42
|
+
|
|
43
|
+
// src/crypto.ts
|
|
44
|
+
var import_node_crypto = require("crypto");
|
|
45
|
+
var import_node_buffer = require("buffer");
|
|
46
|
+
var aesEncode = "base64url";
|
|
47
|
+
function generateRSA(length = 2048) {
|
|
48
|
+
const pair = (0, import_node_crypto.generateKeyPairSync)("rsa", { modulusLength: length });
|
|
49
|
+
const privateKey = pair.privateKey.export({ type: "pkcs1", format: "pem" });
|
|
50
|
+
const publicKey = pair.publicKey.export({ type: "pkcs1", format: "pem" });
|
|
51
|
+
return {
|
|
52
|
+
privateKey,
|
|
53
|
+
publicKey
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
function encrypt(plainText, key, iv) {
|
|
57
|
+
const cipher = (0, import_node_crypto.createCipheriv)("aes-256-cbc", key, iv);
|
|
58
|
+
let encrypted = cipher.update(plainText, "utf8", aesEncode);
|
|
59
|
+
encrypted += cipher.final(aesEncode);
|
|
60
|
+
return encrypted;
|
|
61
|
+
}
|
|
62
|
+
function generateKey(data, length) {
|
|
63
|
+
const hash = (0, import_node_crypto.createHash)("SHA256").update(data).digest("binary");
|
|
64
|
+
return import_node_buffer.Buffer.from(hash).subarray(0, length);
|
|
65
|
+
}
|
|
66
|
+
function signature(buffer, privateKey, publicKey) {
|
|
67
|
+
const sig = (0, import_node_crypto.createSign)("RSA-SHA256").update(buffer).sign({
|
|
68
|
+
key: privateKey,
|
|
69
|
+
padding: import_node_crypto.constants.RSA_PKCS1_PADDING,
|
|
70
|
+
saltLength: import_node_crypto.constants.RSA_PSS_SALTLEN_DIGEST
|
|
71
|
+
}, "base64");
|
|
72
|
+
return encrypt(sig, generateKey(publicKey, 32), generateKey(buffer, 16));
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// src/build-plugins/asar.ts
|
|
44
76
|
function gzipFile(filePath) {
|
|
45
77
|
return new Promise((resolve, reject) => {
|
|
46
78
|
const gzip = import_node_zlib.default.createGzip();
|
|
@@ -49,13 +81,6 @@ function gzipFile(filePath) {
|
|
|
49
81
|
input.pipe(gzip).pipe(output).on("finish", () => resolve(null)).on("error", (err) => reject(err));
|
|
50
82
|
});
|
|
51
83
|
}
|
|
52
|
-
function generateSignature(buffer, privateKey) {
|
|
53
|
-
return (0, import_node_crypto.createSign)("RSA-SHA256").update(buffer).sign({
|
|
54
|
-
key: privateKey,
|
|
55
|
-
padding: import_node_crypto.constants.RSA_PKCS1_PADDING,
|
|
56
|
-
saltLength: import_node_crypto.constants.RSA_PSS_SALTLEN_DIGEST
|
|
57
|
-
}, "base64");
|
|
58
|
-
}
|
|
59
84
|
async function pack(dir, target) {
|
|
60
85
|
let asar = null;
|
|
61
86
|
try {
|
|
@@ -76,47 +101,60 @@ async function pack(dir, target) {
|
|
|
76
101
|
async function buildAsar({
|
|
77
102
|
version,
|
|
78
103
|
asarOutputPath,
|
|
79
|
-
privateKeyPath,
|
|
80
104
|
electronDistPath,
|
|
81
|
-
rendererDistPath
|
|
82
|
-
versionPath
|
|
105
|
+
rendererDistPath
|
|
83
106
|
}) {
|
|
84
107
|
await (0, import_promises.rename)(rendererDistPath, `${electronDistPath}/renderer`);
|
|
85
108
|
await (0, import_promises.writeFile)(`${electronDistPath}/version`, version);
|
|
86
109
|
await pack(electronDistPath, asarOutputPath);
|
|
87
110
|
await gzipFile(asarOutputPath);
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
111
|
+
}
|
|
112
|
+
async function generateVersion({
|
|
113
|
+
asarOutputPath,
|
|
114
|
+
versionPath,
|
|
115
|
+
privateKey,
|
|
116
|
+
publicKey,
|
|
117
|
+
version
|
|
118
|
+
}) {
|
|
91
119
|
const buffer = await (0, import_promises.readFile)(`${asarOutputPath}.gz`);
|
|
92
|
-
const signature = generateSignature(buffer, await (0, import_promises.readFile)(privateKeyPath, "utf-8"));
|
|
93
120
|
await (0, import_promises.writeFile)(versionPath, JSON.stringify({
|
|
94
|
-
signature,
|
|
121
|
+
signature: signature(buffer, privateKey, publicKey),
|
|
95
122
|
version,
|
|
96
123
|
size: buffer.length
|
|
97
124
|
}, null, 2));
|
|
98
125
|
}
|
|
99
126
|
|
|
100
127
|
// src/build-plugins/entry.ts
|
|
128
|
+
var import_esbuild = require("esbuild");
|
|
129
|
+
async function buildEntry({
|
|
130
|
+
entryPath,
|
|
131
|
+
entryOutputPath: outfile,
|
|
132
|
+
minify
|
|
133
|
+
}) {
|
|
134
|
+
await (0, import_esbuild.build)({
|
|
135
|
+
entryPoints: [entryPath],
|
|
136
|
+
bundle: true,
|
|
137
|
+
platform: "node",
|
|
138
|
+
outfile,
|
|
139
|
+
minify,
|
|
140
|
+
external: ["electron"]
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// src/build-plugins/key.ts
|
|
101
145
|
var import_node_fs2 = require("fs");
|
|
102
146
|
var import_node_path = require("path");
|
|
103
|
-
var import_promises2 = require("fs/promises");
|
|
104
|
-
var import_node_crypto2 = require("crypto");
|
|
105
147
|
var import_node_os = require("os");
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
const publicKey = pair.publicKey.export({ type: "pkcs1", format: "pem" });
|
|
112
|
-
await (0, import_promises2.writeFile)(privateKeyPath, privateKey);
|
|
113
|
-
await (0, import_promises2.writeFile)(publicKeyPath, publicKey);
|
|
148
|
+
function generateKeyFile(privateKeyPath, publicKeyPath, length) {
|
|
149
|
+
const ret = generateRSA(length);
|
|
150
|
+
(0, import_node_fs2.writeFileSync)(privateKeyPath, ret.privateKey);
|
|
151
|
+
(0, import_node_fs2.writeFileSync)(publicKeyPath, ret.publicKey);
|
|
152
|
+
return ret;
|
|
114
153
|
}
|
|
115
|
-
|
|
116
|
-
const file =
|
|
117
|
-
const key = await (0, import_promises2.readFile)(publicKeyPath, "utf-8");
|
|
154
|
+
function writePublicKeyToMain(entryPath, publicKey) {
|
|
155
|
+
const file = (0, import_node_fs2.readFileSync)(entryPath, "utf-8");
|
|
118
156
|
const regex = /const SIGNATURE_PUB = ['`][\s\S]*?['`]/;
|
|
119
|
-
const replacement = `const SIGNATURE_PUB = \`${
|
|
157
|
+
const replacement = `const SIGNATURE_PUB = \`${publicKey}\``;
|
|
120
158
|
let replaced = file;
|
|
121
159
|
const signaturePubExists = regex.test(file);
|
|
122
160
|
if (signaturePubExists) {
|
|
@@ -136,33 +174,34 @@ async function writePublicKeyToMain(updatePath, publicKeyPath) {
|
|
|
136
174
|
!isMatched && lines.push(r);
|
|
137
175
|
replaced = lines.join(import_node_os.EOL);
|
|
138
176
|
}
|
|
139
|
-
|
|
177
|
+
(0, import_node_fs2.writeFileSync)(entryPath, replaced);
|
|
140
178
|
}
|
|
141
|
-
|
|
179
|
+
function getKeys({
|
|
180
|
+
keyLength,
|
|
142
181
|
privateKeyPath,
|
|
143
182
|
publicKeyPath,
|
|
144
|
-
entryPath
|
|
145
|
-
entryOutputPath: outfile,
|
|
146
|
-
minify,
|
|
147
|
-
keyLength
|
|
183
|
+
entryPath
|
|
148
184
|
}) {
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
185
|
+
const keysDir = (0, import_node_path.dirname)(privateKeyPath);
|
|
186
|
+
!(0, import_node_fs2.existsSync)(keysDir) && (0, import_node_fs2.mkdirSync)(keysDir);
|
|
187
|
+
let privateKey, publicKey;
|
|
188
|
+
if (!(0, import_node_fs2.existsSync)(privateKeyPath)) {
|
|
189
|
+
const keys = generateKeyFile(privateKeyPath, publicKeyPath, keyLength);
|
|
190
|
+
privateKey = keys.privateKey;
|
|
191
|
+
publicKey = keys.publicKey;
|
|
192
|
+
} else {
|
|
193
|
+
privateKey = (0, import_node_fs2.readFileSync)(privateKeyPath, "utf-8");
|
|
194
|
+
publicKey = (0, import_node_fs2.readFileSync)(publicKeyPath, "utf-8");
|
|
154
195
|
}
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
minify,
|
|
161
|
-
external: ["electron"]
|
|
162
|
-
});
|
|
196
|
+
writePublicKeyToMain(entryPath, publicKey);
|
|
197
|
+
return {
|
|
198
|
+
privateKey,
|
|
199
|
+
publicKey
|
|
200
|
+
};
|
|
163
201
|
}
|
|
164
202
|
|
|
165
203
|
// src/build-plugins/option.ts
|
|
204
|
+
var import_ci_info = require("ci-info");
|
|
166
205
|
function parseOptions(options) {
|
|
167
206
|
const { isBuild, productName, version, minify = false, paths = {}, keys = {} } = options;
|
|
168
207
|
const {
|
|
@@ -181,25 +220,36 @@ function parseOptions(options) {
|
|
|
181
220
|
const buildAsarOption = {
|
|
182
221
|
version,
|
|
183
222
|
asarOutputPath,
|
|
184
|
-
privateKeyPath,
|
|
185
223
|
electronDistPath,
|
|
186
|
-
rendererDistPath
|
|
187
|
-
versionPath
|
|
224
|
+
rendererDistPath
|
|
188
225
|
};
|
|
189
226
|
const buildEntryOption = {
|
|
190
|
-
privateKeyPath,
|
|
191
|
-
publicKeyPath,
|
|
192
227
|
entryPath,
|
|
193
228
|
entryOutputPath,
|
|
194
|
-
minify
|
|
195
|
-
keyLength
|
|
229
|
+
minify
|
|
196
230
|
};
|
|
197
|
-
|
|
231
|
+
let buildVersionOption;
|
|
232
|
+
if (!import_ci_info.isCI) {
|
|
233
|
+
const { privateKey, publicKey } = getKeys({
|
|
234
|
+
keyLength,
|
|
235
|
+
privateKeyPath,
|
|
236
|
+
publicKeyPath,
|
|
237
|
+
entryPath
|
|
238
|
+
});
|
|
239
|
+
buildVersionOption = {
|
|
240
|
+
version,
|
|
241
|
+
asarOutputPath,
|
|
242
|
+
privateKey,
|
|
243
|
+
publicKey,
|
|
244
|
+
versionPath
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
return { isBuild, buildAsarOption, buildEntryOption, buildVersionOption };
|
|
198
248
|
}
|
|
199
249
|
|
|
200
250
|
// src/vite.ts
|
|
201
251
|
function vite_default(options) {
|
|
202
|
-
const { isBuild, buildAsarOption, buildEntryOption } = parseOptions(options);
|
|
252
|
+
const { isBuild, buildAsarOption, buildEntryOption, buildVersionOption } = parseOptions(options);
|
|
203
253
|
const { entryPath, entryOutputPath } = buildEntryOption;
|
|
204
254
|
const { asarOutputPath } = buildAsarOption;
|
|
205
255
|
const id = "electron-incremental-updater";
|
|
@@ -216,6 +266,7 @@ function vite_default(options) {
|
|
|
216
266
|
}
|
|
217
267
|
log.info("build asar start");
|
|
218
268
|
await buildAsar(buildAsarOption);
|
|
269
|
+
buildVersionOption && await generateVersion(buildVersionOption);
|
|
219
270
|
log.info(`build asar end, output to ${asarOutputPath}`);
|
|
220
271
|
}
|
|
221
272
|
};
|
package/dist/vite.mjs
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
|
-
import
|
|
1
|
+
import {
|
|
2
|
+
generateRSA,
|
|
3
|
+
signature
|
|
4
|
+
} from "./chunk-OBERMV66.mjs";
|
|
2
5
|
|
|
3
6
|
// src/vite.ts
|
|
4
7
|
import { createLogger } from "vite";
|
|
5
8
|
|
|
6
9
|
// src/build-plugins/asar.ts
|
|
7
|
-
import { constants, createSign } from "node:crypto";
|
|
8
10
|
import { createReadStream, createWriteStream } from "node:fs";
|
|
9
11
|
import { readFile, rename, writeFile } from "node:fs/promises";
|
|
10
12
|
import zlib from "node:zlib";
|
|
11
|
-
import { isCI } from "ci-info";
|
|
12
13
|
function gzipFile(filePath) {
|
|
13
14
|
return new Promise((resolve, reject) => {
|
|
14
15
|
const gzip = zlib.createGzip();
|
|
@@ -17,13 +18,6 @@ function gzipFile(filePath) {
|
|
|
17
18
|
input.pipe(gzip).pipe(output).on("finish", () => resolve(null)).on("error", (err) => reject(err));
|
|
18
19
|
});
|
|
19
20
|
}
|
|
20
|
-
function generateSignature(buffer, privateKey) {
|
|
21
|
-
return createSign("RSA-SHA256").update(buffer).sign({
|
|
22
|
-
key: privateKey,
|
|
23
|
-
padding: constants.RSA_PKCS1_PADDING,
|
|
24
|
-
saltLength: constants.RSA_PSS_SALTLEN_DIGEST
|
|
25
|
-
}, "base64");
|
|
26
|
-
}
|
|
27
21
|
async function pack(dir, target) {
|
|
28
22
|
let asar = null;
|
|
29
23
|
try {
|
|
@@ -44,47 +38,60 @@ async function pack(dir, target) {
|
|
|
44
38
|
async function buildAsar({
|
|
45
39
|
version,
|
|
46
40
|
asarOutputPath,
|
|
47
|
-
privateKeyPath,
|
|
48
41
|
electronDistPath,
|
|
49
|
-
rendererDistPath
|
|
50
|
-
versionPath
|
|
42
|
+
rendererDistPath
|
|
51
43
|
}) {
|
|
52
44
|
await rename(rendererDistPath, `${electronDistPath}/renderer`);
|
|
53
45
|
await writeFile(`${electronDistPath}/version`, version);
|
|
54
46
|
await pack(electronDistPath, asarOutputPath);
|
|
55
47
|
await gzipFile(asarOutputPath);
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
48
|
+
}
|
|
49
|
+
async function generateVersion({
|
|
50
|
+
asarOutputPath,
|
|
51
|
+
versionPath,
|
|
52
|
+
privateKey,
|
|
53
|
+
publicKey,
|
|
54
|
+
version
|
|
55
|
+
}) {
|
|
59
56
|
const buffer = await readFile(`${asarOutputPath}.gz`);
|
|
60
|
-
const signature = generateSignature(buffer, await readFile(privateKeyPath, "utf-8"));
|
|
61
57
|
await writeFile(versionPath, JSON.stringify({
|
|
62
|
-
signature,
|
|
58
|
+
signature: signature(buffer, privateKey, publicKey),
|
|
63
59
|
version,
|
|
64
60
|
size: buffer.length
|
|
65
61
|
}, null, 2));
|
|
66
62
|
}
|
|
67
63
|
|
|
68
64
|
// src/build-plugins/entry.ts
|
|
69
|
-
import {
|
|
65
|
+
import { build } from "esbuild";
|
|
66
|
+
async function buildEntry({
|
|
67
|
+
entryPath,
|
|
68
|
+
entryOutputPath: outfile,
|
|
69
|
+
minify
|
|
70
|
+
}) {
|
|
71
|
+
await build({
|
|
72
|
+
entryPoints: [entryPath],
|
|
73
|
+
bundle: true,
|
|
74
|
+
platform: "node",
|
|
75
|
+
outfile,
|
|
76
|
+
minify,
|
|
77
|
+
external: ["electron"]
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// src/build-plugins/key.ts
|
|
82
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
70
83
|
import { dirname } from "node:path";
|
|
71
|
-
import { mkdir, readFile as readFile2, writeFile as writeFile2 } from "node:fs/promises";
|
|
72
|
-
import { generateKeyPairSync } from "node:crypto";
|
|
73
84
|
import { EOL } from "node:os";
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
const publicKey = pair.publicKey.export({ type: "pkcs1", format: "pem" });
|
|
80
|
-
await writeFile2(privateKeyPath, privateKey);
|
|
81
|
-
await writeFile2(publicKeyPath, publicKey);
|
|
85
|
+
function generateKeyFile(privateKeyPath, publicKeyPath, length) {
|
|
86
|
+
const ret = generateRSA(length);
|
|
87
|
+
writeFileSync(privateKeyPath, ret.privateKey);
|
|
88
|
+
writeFileSync(publicKeyPath, ret.publicKey);
|
|
89
|
+
return ret;
|
|
82
90
|
}
|
|
83
|
-
|
|
84
|
-
const file =
|
|
85
|
-
const key = await readFile2(publicKeyPath, "utf-8");
|
|
91
|
+
function writePublicKeyToMain(entryPath, publicKey) {
|
|
92
|
+
const file = readFileSync(entryPath, "utf-8");
|
|
86
93
|
const regex = /const SIGNATURE_PUB = ['`][\s\S]*?['`]/;
|
|
87
|
-
const replacement = `const SIGNATURE_PUB = \`${
|
|
94
|
+
const replacement = `const SIGNATURE_PUB = \`${publicKey}\``;
|
|
88
95
|
let replaced = file;
|
|
89
96
|
const signaturePubExists = regex.test(file);
|
|
90
97
|
if (signaturePubExists) {
|
|
@@ -104,33 +111,34 @@ async function writePublicKeyToMain(updatePath, publicKeyPath) {
|
|
|
104
111
|
!isMatched && lines.push(r);
|
|
105
112
|
replaced = lines.join(EOL);
|
|
106
113
|
}
|
|
107
|
-
|
|
114
|
+
writeFileSync(entryPath, replaced);
|
|
108
115
|
}
|
|
109
|
-
|
|
116
|
+
function getKeys({
|
|
117
|
+
keyLength,
|
|
110
118
|
privateKeyPath,
|
|
111
119
|
publicKeyPath,
|
|
112
|
-
entryPath
|
|
113
|
-
entryOutputPath: outfile,
|
|
114
|
-
minify,
|
|
115
|
-
keyLength
|
|
120
|
+
entryPath
|
|
116
121
|
}) {
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
+
const keysDir = dirname(privateKeyPath);
|
|
123
|
+
!existsSync(keysDir) && mkdirSync(keysDir);
|
|
124
|
+
let privateKey, publicKey;
|
|
125
|
+
if (!existsSync(privateKeyPath)) {
|
|
126
|
+
const keys = generateKeyFile(privateKeyPath, publicKeyPath, keyLength);
|
|
127
|
+
privateKey = keys.privateKey;
|
|
128
|
+
publicKey = keys.publicKey;
|
|
129
|
+
} else {
|
|
130
|
+
privateKey = readFileSync(privateKeyPath, "utf-8");
|
|
131
|
+
publicKey = readFileSync(publicKeyPath, "utf-8");
|
|
122
132
|
}
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
minify,
|
|
129
|
-
external: ["electron"]
|
|
130
|
-
});
|
|
133
|
+
writePublicKeyToMain(entryPath, publicKey);
|
|
134
|
+
return {
|
|
135
|
+
privateKey,
|
|
136
|
+
publicKey
|
|
137
|
+
};
|
|
131
138
|
}
|
|
132
139
|
|
|
133
140
|
// src/build-plugins/option.ts
|
|
141
|
+
import { isCI } from "ci-info";
|
|
134
142
|
function parseOptions(options) {
|
|
135
143
|
const { isBuild, productName, version, minify = false, paths = {}, keys = {} } = options;
|
|
136
144
|
const {
|
|
@@ -149,25 +157,36 @@ function parseOptions(options) {
|
|
|
149
157
|
const buildAsarOption = {
|
|
150
158
|
version,
|
|
151
159
|
asarOutputPath,
|
|
152
|
-
privateKeyPath,
|
|
153
160
|
electronDistPath,
|
|
154
|
-
rendererDistPath
|
|
155
|
-
versionPath
|
|
161
|
+
rendererDistPath
|
|
156
162
|
};
|
|
157
163
|
const buildEntryOption = {
|
|
158
|
-
privateKeyPath,
|
|
159
|
-
publicKeyPath,
|
|
160
164
|
entryPath,
|
|
161
165
|
entryOutputPath,
|
|
162
|
-
minify
|
|
163
|
-
keyLength
|
|
166
|
+
minify
|
|
164
167
|
};
|
|
165
|
-
|
|
168
|
+
let buildVersionOption;
|
|
169
|
+
if (!isCI) {
|
|
170
|
+
const { privateKey, publicKey } = getKeys({
|
|
171
|
+
keyLength,
|
|
172
|
+
privateKeyPath,
|
|
173
|
+
publicKeyPath,
|
|
174
|
+
entryPath
|
|
175
|
+
});
|
|
176
|
+
buildVersionOption = {
|
|
177
|
+
version,
|
|
178
|
+
asarOutputPath,
|
|
179
|
+
privateKey,
|
|
180
|
+
publicKey,
|
|
181
|
+
versionPath
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
return { isBuild, buildAsarOption, buildEntryOption, buildVersionOption };
|
|
166
185
|
}
|
|
167
186
|
|
|
168
187
|
// src/vite.ts
|
|
169
188
|
function vite_default(options) {
|
|
170
|
-
const { isBuild, buildAsarOption, buildEntryOption } = parseOptions(options);
|
|
189
|
+
const { isBuild, buildAsarOption, buildEntryOption, buildVersionOption } = parseOptions(options);
|
|
171
190
|
const { entryPath, entryOutputPath } = buildEntryOption;
|
|
172
191
|
const { asarOutputPath } = buildAsarOption;
|
|
173
192
|
const id = "electron-incremental-updater";
|
|
@@ -184,6 +203,7 @@ function vite_default(options) {
|
|
|
184
203
|
}
|
|
185
204
|
log.info("build asar start");
|
|
186
205
|
await buildAsar(buildAsarOption);
|
|
206
|
+
buildVersionOption && await generateVersion(buildVersionOption);
|
|
187
207
|
log.info(`build asar end, output to ${asarOutputPath}`);
|
|
188
208
|
}
|
|
189
209
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "electron-incremental-update",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.5.1",
|
|
4
4
|
"description": "electron incremental update tools, powered by vite",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"build": "tsup",
|
|
@@ -63,4 +63,4 @@
|
|
|
63
63
|
"dependencies": {
|
|
64
64
|
"ci-info": "^3.8.0"
|
|
65
65
|
}
|
|
66
|
-
}
|
|
66
|
+
}
|
package/dist/chunk-AKU6F3WT.mjs
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
-
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
-
}) : x)(function(x) {
|
|
4
|
-
if (typeof require !== "undefined")
|
|
5
|
-
return require.apply(this, arguments);
|
|
6
|
-
throw new Error('Dynamic require of "' + x + '" is not supported');
|
|
7
|
-
});
|
|
8
|
-
|
|
9
|
-
export {
|
|
10
|
-
__require
|
|
11
|
-
};
|