electrobun 0.0.19-beta.7 → 0.0.19-beta.71
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/BUILD.md +90 -0
- package/bin/electrobun.cjs +165 -0
- package/debug.js +5 -0
- package/dist/api/browser/builtinrpcSchema.ts +19 -0
- package/dist/api/browser/index.ts +409 -0
- package/dist/api/browser/rpc/webview.ts +79 -0
- package/dist/api/browser/stylesAndElements.ts +3 -0
- package/dist/api/browser/webviewtag.ts +534 -0
- package/dist/api/bun/core/ApplicationMenu.ts +66 -0
- package/dist/api/bun/core/BrowserView.ts +349 -0
- package/dist/api/bun/core/BrowserWindow.ts +191 -0
- package/dist/api/bun/core/ContextMenu.ts +67 -0
- package/dist/api/bun/core/Paths.ts +5 -0
- package/dist/api/bun/core/Socket.ts +181 -0
- package/dist/api/bun/core/Tray.ts +107 -0
- package/dist/api/bun/core/Updater.ts +395 -0
- package/dist/api/bun/core/Utils.ts +48 -0
- package/dist/api/bun/events/ApplicationEvents.ts +14 -0
- package/dist/api/bun/events/event.ts +29 -0
- package/dist/api/bun/events/eventEmitter.ts +45 -0
- package/dist/api/bun/events/trayEvents.ts +9 -0
- package/dist/api/bun/events/webviewEvents.ts +16 -0
- package/dist/api/bun/events/windowEvents.ts +12 -0
- package/dist/api/bun/index.ts +45 -0
- package/dist/api/bun/proc/linux.md +43 -0
- package/dist/api/bun/proc/native.ts +1217 -0
- package/dist/api/shared/platform.ts +48 -0
- package/dist/main.js +12 -0
- package/package.json +13 -7
- package/src/cli/index.ts +621 -203
- package/templates/hello-world/README.md +57 -0
- package/templates/hello-world/bun.lock +63 -0
- package/templates/hello-world/electrobun.config +17 -0
- package/templates/hello-world/package.json +16 -0
- package/templates/hello-world/src/bun/index.ts +15 -0
- package/templates/hello-world/src/mainview/index.css +124 -0
- package/templates/hello-world/src/mainview/index.html +47 -0
- package/templates/hello-world/src/mainview/index.ts +5 -0
- package/bin/electrobun +0 -0
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import type { ServerWebSocket } from "bun";
|
|
2
|
+
import { BrowserView } from "./BrowserView";
|
|
3
|
+
import { createCipheriv, createDecipheriv, randomBytes } from "crypto";
|
|
4
|
+
|
|
5
|
+
function base64ToUint8Array(base64: string) {
|
|
6
|
+
{
|
|
7
|
+
return new Uint8Array(
|
|
8
|
+
atob(base64)
|
|
9
|
+
.split("")
|
|
10
|
+
.map((char) => char.charCodeAt(0))
|
|
11
|
+
);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
// Encrypt function
|
|
16
|
+
function encrypt(secretKey: Uint8Array, text: string) {
|
|
17
|
+
const iv = new Uint8Array(randomBytes(12)); // IV for AES-GCM
|
|
18
|
+
const cipher = createCipheriv("aes-256-gcm", secretKey, iv);
|
|
19
|
+
const encrypted = Buffer.concat([
|
|
20
|
+
new Uint8Array(cipher.update(text, "utf8")),
|
|
21
|
+
new Uint8Array(cipher.final()),
|
|
22
|
+
]).toString("base64");
|
|
23
|
+
const tag = cipher.getAuthTag().toString("base64");
|
|
24
|
+
return { encrypted, iv: Buffer.from(iv).toString("base64"), tag };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Decrypt function
|
|
28
|
+
function decrypt(
|
|
29
|
+
secretKey: Uint8Array,
|
|
30
|
+
encryptedData: Uint8Array,
|
|
31
|
+
iv: Uint8Array,
|
|
32
|
+
tag: Uint8Array
|
|
33
|
+
) {
|
|
34
|
+
const decipher = createDecipheriv("aes-256-gcm", secretKey, iv);
|
|
35
|
+
decipher.setAuthTag(tag);
|
|
36
|
+
const decrypted = Buffer.concat([
|
|
37
|
+
new Uint8Array(decipher.update(encryptedData)),
|
|
38
|
+
new Uint8Array(decipher.final()),
|
|
39
|
+
]);
|
|
40
|
+
return decrypted.toString("utf8");
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export const socketMap: {
|
|
44
|
+
[webviewId: string]: {
|
|
45
|
+
socket: null | ServerWebSocket<unknown>;
|
|
46
|
+
queue: string[];
|
|
47
|
+
};
|
|
48
|
+
} = {};
|
|
49
|
+
|
|
50
|
+
const startRPCServer = () => {
|
|
51
|
+
const startPort = 50000;
|
|
52
|
+
const endPort = 65535;
|
|
53
|
+
const payloadLimit = 1024 * 1024 * 500; // 500MB
|
|
54
|
+
let port = startPort;
|
|
55
|
+
let server = null;
|
|
56
|
+
|
|
57
|
+
while (port <= endPort) {
|
|
58
|
+
try {
|
|
59
|
+
server = Bun.serve<{ webviewId: number }>({
|
|
60
|
+
port,
|
|
61
|
+
fetch(req, server) {
|
|
62
|
+
const url = new URL(req.url);
|
|
63
|
+
// const token = new URL(req.url).searchParams.get("token");
|
|
64
|
+
// if (token !== AUTH_TOKEN)
|
|
65
|
+
// return new Response("Unauthorized", { status: 401 });
|
|
66
|
+
// console.log("fetch!!", url.pathname);
|
|
67
|
+
if (url.pathname === "/socket") {
|
|
68
|
+
const webviewIdString = url.searchParams.get("webviewId");
|
|
69
|
+
if (!webviewIdString) {
|
|
70
|
+
return new Response("Missing webviewId", { status: 400 });
|
|
71
|
+
}
|
|
72
|
+
const webviewId = parseInt(webviewIdString, 10);
|
|
73
|
+
const success = server.upgrade(req, { data: { webviewId } });
|
|
74
|
+
return success
|
|
75
|
+
? undefined
|
|
76
|
+
: new Response("Upgrade failed", { status: 500 });
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
console.log("unhandled RPC Server request", req.url);
|
|
80
|
+
},
|
|
81
|
+
websocket: {
|
|
82
|
+
idleTimeout: 960,
|
|
83
|
+
// 500MB max payload should be plenty
|
|
84
|
+
maxPayloadLength: payloadLimit,
|
|
85
|
+
// Anything beyond the backpressure limit will be dropped
|
|
86
|
+
backpressureLimit: payloadLimit * 2,
|
|
87
|
+
open(ws) {
|
|
88
|
+
const { webviewId } = ws.data;
|
|
89
|
+
|
|
90
|
+
if (!socketMap[webviewId]) {
|
|
91
|
+
socketMap[webviewId] = { socket: ws, queue: [] };
|
|
92
|
+
} else {
|
|
93
|
+
socketMap[webviewId].socket = ws;
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
close(ws, code, reason) {
|
|
97
|
+
const { webviewId } = ws.data;
|
|
98
|
+
console.log("Closed:", webviewId, code, reason);
|
|
99
|
+
socketMap[webviewId].socket = null;
|
|
100
|
+
},
|
|
101
|
+
|
|
102
|
+
message(ws, message) {
|
|
103
|
+
const { webviewId } = ws.data;
|
|
104
|
+
const browserView = BrowserView.getById(webviewId);
|
|
105
|
+
|
|
106
|
+
if (browserView.rpcHandler) {
|
|
107
|
+
if (typeof message === "string") {
|
|
108
|
+
try {
|
|
109
|
+
const encryptedPacket = JSON.parse(message);
|
|
110
|
+
const decrypted = decrypt(
|
|
111
|
+
browserView.secretKey,
|
|
112
|
+
base64ToUint8Array(encryptedPacket.encryptedData),
|
|
113
|
+
base64ToUint8Array(encryptedPacket.iv),
|
|
114
|
+
base64ToUint8Array(encryptedPacket.tag)
|
|
115
|
+
);
|
|
116
|
+
|
|
117
|
+
// Note: At this point the secretKey for the webview id would
|
|
118
|
+
// have had to match the encrypted packet data, so we can trust
|
|
119
|
+
// that this message can be passed to this browserview's rpc
|
|
120
|
+
// methods.
|
|
121
|
+
browserView.rpcHandler(JSON.parse(decrypted));
|
|
122
|
+
} catch (error) {
|
|
123
|
+
console.log("Error handling message:", error);
|
|
124
|
+
}
|
|
125
|
+
} else if (message instanceof ArrayBuffer) {
|
|
126
|
+
console.log("TODO: Received ArrayBuffer message:", message);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
},
|
|
130
|
+
},
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
break;
|
|
134
|
+
} catch (error: any) {
|
|
135
|
+
if (error.code === "EADDRINUSE") {
|
|
136
|
+
console.log(`Port ${port} in use, trying next port...`);
|
|
137
|
+
port++;
|
|
138
|
+
} else {
|
|
139
|
+
throw error;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return { rpcServer: server, rpcPort: port };
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
export const { rpcServer, rpcPort } = startRPCServer();
|
|
148
|
+
|
|
149
|
+
// Will return true if message was sent over websocket
|
|
150
|
+
// false if it was not (caller should fallback to postMessage/evaluateJS rpc)
|
|
151
|
+
export const sendMessageToWebviewViaSocket = (
|
|
152
|
+
webviewId: number,
|
|
153
|
+
message: any
|
|
154
|
+
): boolean => {
|
|
155
|
+
const rpc = socketMap[webviewId];
|
|
156
|
+
const browserView = BrowserView.getById(webviewId);
|
|
157
|
+
|
|
158
|
+
if (rpc?.socket?.readyState === WebSocket.OPEN) {
|
|
159
|
+
try {
|
|
160
|
+
const unencryptedString = JSON.stringify(message);
|
|
161
|
+
const encrypted = encrypt(browserView.secretKey, unencryptedString);
|
|
162
|
+
|
|
163
|
+
const encryptedPacket = {
|
|
164
|
+
encryptedData: encrypted.encrypted,
|
|
165
|
+
iv: encrypted.iv,
|
|
166
|
+
tag: encrypted.tag,
|
|
167
|
+
};
|
|
168
|
+
|
|
169
|
+
const encryptedPacketString = JSON.stringify(encryptedPacket);
|
|
170
|
+
|
|
171
|
+
rpc.socket.send(encryptedPacketString);
|
|
172
|
+
return true;
|
|
173
|
+
} catch (error) {
|
|
174
|
+
console.error("Error sending message to webview via socket:", error);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
return false;
|
|
179
|
+
};
|
|
180
|
+
|
|
181
|
+
console.log("Server started at", rpcServer?.url.origin);
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import { ffi, type MenuItemConfig } from "../proc/native";
|
|
2
|
+
import electrobunEventEmitter from "../events/eventEmitter";
|
|
3
|
+
import { VIEWS_FOLDER } from "./Paths";
|
|
4
|
+
import { join } from "path";
|
|
5
|
+
import {FFIType} from 'bun:ffi';
|
|
6
|
+
|
|
7
|
+
let nextTrayId = 1;
|
|
8
|
+
const TrayMap = {};
|
|
9
|
+
|
|
10
|
+
type ConstructorOptions = {
|
|
11
|
+
title?: string;
|
|
12
|
+
image?: string;
|
|
13
|
+
template?: boolean;
|
|
14
|
+
width?: number;
|
|
15
|
+
height?: number;
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
export class Tray {
|
|
19
|
+
id: number = nextTrayId++;
|
|
20
|
+
ptr: FFIType.ptr;
|
|
21
|
+
|
|
22
|
+
constructor({
|
|
23
|
+
title = "",
|
|
24
|
+
image = "",
|
|
25
|
+
template = true,
|
|
26
|
+
width = 16,
|
|
27
|
+
height = 16,
|
|
28
|
+
}: ConstructorOptions = {}) {
|
|
29
|
+
console.log("img", image);
|
|
30
|
+
console.log("img", this.resolveImagePath(image));
|
|
31
|
+
this.ptr = ffi.request.createTray({
|
|
32
|
+
id: this.id,
|
|
33
|
+
title,
|
|
34
|
+
image: this.resolveImagePath(image),
|
|
35
|
+
template,
|
|
36
|
+
width,
|
|
37
|
+
height,
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
TrayMap[this.id] = this;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
resolveImagePath(imgPath: string) {
|
|
44
|
+
if (imgPath.startsWith("views://")) {
|
|
45
|
+
return join(VIEWS_FOLDER, imgPath.replace("views://", ""));
|
|
46
|
+
} else {
|
|
47
|
+
// can specify any file path here
|
|
48
|
+
return imgPath;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
setTitle(title: string) {
|
|
53
|
+
ffi.request.setTrayTitle({ id: this.id, title });
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
setImage(imgPath: string) {
|
|
57
|
+
ffi.request.setTrayImage({
|
|
58
|
+
id: this.id,
|
|
59
|
+
image: this.resolveImagePath(imgPath),
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
setMenu(menu: Array<MenuItemConfig>) {
|
|
64
|
+
const menuWithDefaults = menuConfigWithDefaults(menu);
|
|
65
|
+
ffi.request.setTrayMenu({
|
|
66
|
+
id: this.id,
|
|
67
|
+
menuConfig: JSON.stringify(menuWithDefaults),
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
on(name: "tray-clicked", handler) {
|
|
72
|
+
const specificName = `${name}-${this.id}`;
|
|
73
|
+
electrobunEventEmitter.on(specificName, handler);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
static getById(id: number) {
|
|
77
|
+
return TrayMap[id];
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
static getAll() {
|
|
81
|
+
return Object.values(TrayMap);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
const menuConfigWithDefaults = (
|
|
86
|
+
menu: Array<MenuItemConfig>
|
|
87
|
+
): Array<MenuItemConfig> => {
|
|
88
|
+
return menu.map((item) => {
|
|
89
|
+
if (item.type === "divider" || item.type === "separator") {
|
|
90
|
+
return { type: "divider" };
|
|
91
|
+
} else {
|
|
92
|
+
return {
|
|
93
|
+
label: item.label || "",
|
|
94
|
+
type: item.type || "normal",
|
|
95
|
+
action: item.action || "",
|
|
96
|
+
// default enabled to true unless explicitly set to false
|
|
97
|
+
enabled: item.enabled === false ? false : true,
|
|
98
|
+
checked: Boolean(item.checked),
|
|
99
|
+
hidden: Boolean(item.hidden),
|
|
100
|
+
tooltip: item.tooltip || undefined,
|
|
101
|
+
...(item.submenu
|
|
102
|
+
? { submenu: menuConfigWithDefaults(item.submenu) }
|
|
103
|
+
: {}),
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
};
|
|
@@ -0,0 +1,395 @@
|
|
|
1
|
+
import { join, dirname, resolve } from "path";
|
|
2
|
+
import { homedir } from "os";
|
|
3
|
+
import { renameSync, unlinkSync, mkdirSync, rmdirSync, statSync } from "fs";
|
|
4
|
+
import tar from "tar";
|
|
5
|
+
import { ZstdInit } from "@oneidentity/zstd-js/wasm";
|
|
6
|
+
import { OS as currentOS, ARCH as currentArch } from '../../shared/platform';
|
|
7
|
+
|
|
8
|
+
const appSupportDir = join(homedir(), "Library", "Application Support");
|
|
9
|
+
|
|
10
|
+
// todo (yoav): share type with cli
|
|
11
|
+
let localInfo: {
|
|
12
|
+
version: string;
|
|
13
|
+
hash: string;
|
|
14
|
+
bucketUrl: string;
|
|
15
|
+
channel: string;
|
|
16
|
+
name: string;
|
|
17
|
+
identifier: string;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
let updateInfo: {
|
|
21
|
+
version: string;
|
|
22
|
+
hash: string;
|
|
23
|
+
updateAvailable: boolean;
|
|
24
|
+
updateReady: boolean;
|
|
25
|
+
error: string;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
const Updater = {
|
|
29
|
+
// workaround for some weird state stuff in this old version of bun
|
|
30
|
+
// todo: revisit after updating to the latest bun
|
|
31
|
+
updateInfo: () => {
|
|
32
|
+
return updateInfo;
|
|
33
|
+
},
|
|
34
|
+
// todo: allow switching channels, by default will check the current channel
|
|
35
|
+
checkForUpdate: async () => {
|
|
36
|
+
const localInfo = await Updater.getLocallocalInfo();
|
|
37
|
+
|
|
38
|
+
if (localInfo.channel === "dev") {
|
|
39
|
+
return {
|
|
40
|
+
version: localInfo.version,
|
|
41
|
+
hash: localInfo.hash,
|
|
42
|
+
updateAvailable: false,
|
|
43
|
+
updateReady: false,
|
|
44
|
+
error: "",
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const channelBucketUrl = await Updater.channelBucketUrl();
|
|
49
|
+
const cacheBuster = Math.random().toString(36).substring(7);
|
|
50
|
+
const platformFolder = `${localInfo.channel}-${currentOS}-${currentArch}`;
|
|
51
|
+
const updateInfoUrl = join(localInfo.bucketUrl, platformFolder, `update.json?${cacheBuster}`);
|
|
52
|
+
|
|
53
|
+
try {
|
|
54
|
+
const updateInfoResponse = await fetch(updateInfoUrl);
|
|
55
|
+
|
|
56
|
+
if (updateInfoResponse.ok) {
|
|
57
|
+
// todo: this seems brittle
|
|
58
|
+
updateInfo = await updateInfoResponse.json();
|
|
59
|
+
|
|
60
|
+
if (updateInfo.hash !== localInfo.hash) {
|
|
61
|
+
updateInfo.updateAvailable = true;
|
|
62
|
+
}
|
|
63
|
+
} else {
|
|
64
|
+
return {
|
|
65
|
+
version: "",
|
|
66
|
+
hash: "",
|
|
67
|
+
updateAvailable: false,
|
|
68
|
+
updateReady: false,
|
|
69
|
+
error: `Failed to fetch update info from ${updateInfoUrl}`,
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
} catch (error) {
|
|
73
|
+
return {
|
|
74
|
+
version: "",
|
|
75
|
+
hash: "",
|
|
76
|
+
updateAvailable: false,
|
|
77
|
+
updateReady: false,
|
|
78
|
+
error: `Failed to fetch update info from ${updateInfoUrl}`,
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return updateInfo;
|
|
83
|
+
},
|
|
84
|
+
|
|
85
|
+
downloadUpdate: async () => {
|
|
86
|
+
const appDataFolder = await Updater.appDataFolder();
|
|
87
|
+
const channelBucketUrl = await Updater.channelBucketUrl();
|
|
88
|
+
const appFileName = localInfo.name;
|
|
89
|
+
|
|
90
|
+
let currentHash = (await Updater.getLocallocalInfo()).hash;
|
|
91
|
+
let latestHash = (await Updater.checkForUpdate()).hash;
|
|
92
|
+
|
|
93
|
+
const extractionFolder = join(appDataFolder, "self-extraction");
|
|
94
|
+
if (!(await Bun.file(extractionFolder).exists())) {
|
|
95
|
+
mkdirSync(extractionFolder, { recursive: true });
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
let currentTarPath = join(extractionFolder, `${currentHash}.tar`);
|
|
99
|
+
const latestTarPath = join(extractionFolder, `${latestHash}.tar`);
|
|
100
|
+
|
|
101
|
+
const seenHashes = [];
|
|
102
|
+
|
|
103
|
+
// todo (yoav): add a check to the while loop that checks for a hash we've seen before
|
|
104
|
+
// so that update loops that are cyclical can be broken
|
|
105
|
+
if (!(await Bun.file(latestTarPath).exists())) {
|
|
106
|
+
while (currentHash !== latestHash) {
|
|
107
|
+
seenHashes.push(currentHash);
|
|
108
|
+
const currentTar = Bun.file(currentTarPath);
|
|
109
|
+
|
|
110
|
+
if (!(await currentTar.exists())) {
|
|
111
|
+
// tar file of the current version not found
|
|
112
|
+
// so we can't patch it. We need the byte-for-byte tar file
|
|
113
|
+
// so break out and download the full version
|
|
114
|
+
break;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// check if there's a patch file for it
|
|
118
|
+
const platformFolder = `${localInfo.channel}-${currentOS}-${currentArch}`;
|
|
119
|
+
const patchResponse = await fetch(
|
|
120
|
+
join(localInfo.bucketUrl, platformFolder, `${currentHash}.patch`)
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
if (!patchResponse.ok) {
|
|
124
|
+
// patch not found
|
|
125
|
+
break;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// The patch file's name is the hash of the "from" version
|
|
129
|
+
const patchFilePath = join(
|
|
130
|
+
appDataFolder,
|
|
131
|
+
"self-extraction",
|
|
132
|
+
`${currentHash}.patch`
|
|
133
|
+
);
|
|
134
|
+
await Bun.write(patchFilePath, await patchResponse.arrayBuffer());
|
|
135
|
+
// patch it to a tmp name
|
|
136
|
+
const tmpPatchedTarFilePath = join(
|
|
137
|
+
appDataFolder,
|
|
138
|
+
"self-extraction",
|
|
139
|
+
`from-${currentHash}.tar`
|
|
140
|
+
);
|
|
141
|
+
|
|
142
|
+
// Note: cwd should be Contents/MacOS/ where the binaries are in the amc app bundle
|
|
143
|
+
try {
|
|
144
|
+
Bun.spawnSync([
|
|
145
|
+
"bspatch",
|
|
146
|
+
currentTarPath,
|
|
147
|
+
tmpPatchedTarFilePath,
|
|
148
|
+
patchFilePath,
|
|
149
|
+
]);
|
|
150
|
+
} catch (error) {
|
|
151
|
+
break;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
let versionSubpath = "";
|
|
155
|
+
const untarDir = join(appDataFolder, "self-extraction", "tmpuntar");
|
|
156
|
+
mkdirSync(untarDir, { recursive: true });
|
|
157
|
+
|
|
158
|
+
// extract just the version.json from the patched tar file so we can see what hash it is now
|
|
159
|
+
const resourcesDir = 'Resources'; // Always use capitalized Resources
|
|
160
|
+
await tar.x({
|
|
161
|
+
// gzip: false,
|
|
162
|
+
file: tmpPatchedTarFilePath,
|
|
163
|
+
cwd: untarDir,
|
|
164
|
+
filter: (path, stat) => {
|
|
165
|
+
if (path.endsWith(`${resourcesDir}/version.json`)) {
|
|
166
|
+
versionSubpath = path;
|
|
167
|
+
return true;
|
|
168
|
+
} else {
|
|
169
|
+
return false;
|
|
170
|
+
}
|
|
171
|
+
},
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
const currentVersionJson = await Bun.file(
|
|
175
|
+
join(untarDir, versionSubpath)
|
|
176
|
+
).json();
|
|
177
|
+
const nextHash = currentVersionJson.hash;
|
|
178
|
+
|
|
179
|
+
if (seenHashes.includes(nextHash)) {
|
|
180
|
+
console.log("Warning: cyclical update detected");
|
|
181
|
+
break;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
seenHashes.push(nextHash);
|
|
185
|
+
|
|
186
|
+
if (!nextHash) {
|
|
187
|
+
break;
|
|
188
|
+
}
|
|
189
|
+
// Sync the patched tar file to the new hash
|
|
190
|
+
const updatedTarPath = join(
|
|
191
|
+
appDataFolder,
|
|
192
|
+
"self-extraction",
|
|
193
|
+
`${nextHash}.tar`
|
|
194
|
+
);
|
|
195
|
+
renameSync(tmpPatchedTarFilePath, updatedTarPath);
|
|
196
|
+
|
|
197
|
+
// delete the old tar file
|
|
198
|
+
unlinkSync(currentTarPath);
|
|
199
|
+
unlinkSync(patchFilePath);
|
|
200
|
+
rmdirSync(untarDir, { recursive: true });
|
|
201
|
+
|
|
202
|
+
currentHash = nextHash;
|
|
203
|
+
currentTarPath = join(
|
|
204
|
+
appDataFolder,
|
|
205
|
+
"self-extraction",
|
|
206
|
+
`${currentHash}.tar`
|
|
207
|
+
);
|
|
208
|
+
// loop through applying patches until we reach the latest version
|
|
209
|
+
// if we get stuck then exit and just download the full latest version
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// If we weren't able to apply patches to the current version,
|
|
213
|
+
// then just download it and unpack it
|
|
214
|
+
if (currentHash !== latestHash) {
|
|
215
|
+
const cacheBuster = Math.random().toString(36).substring(7);
|
|
216
|
+
const platformFolder = `${localInfo.channel}-${currentOS}-${currentArch}`;
|
|
217
|
+
const urlToLatestTarball = join(
|
|
218
|
+
localInfo.bucketUrl,
|
|
219
|
+
platformFolder,
|
|
220
|
+
`${appFileName}.app.tar.zst`
|
|
221
|
+
);
|
|
222
|
+
const prevVersionCompressedTarballPath = join(
|
|
223
|
+
appDataFolder,
|
|
224
|
+
"self-extraction",
|
|
225
|
+
"latest.tar.zst"
|
|
226
|
+
);
|
|
227
|
+
const response = await fetch(urlToLatestTarball + `?${cacheBuster}`);
|
|
228
|
+
|
|
229
|
+
if (response.ok && response.body) {
|
|
230
|
+
const reader = response.body.getReader();
|
|
231
|
+
|
|
232
|
+
const writer = Bun.file(prevVersionCompressedTarballPath).writer();
|
|
233
|
+
|
|
234
|
+
while (true) {
|
|
235
|
+
const { done, value } = await reader.read();
|
|
236
|
+
if (done) break;
|
|
237
|
+
await writer.write(value);
|
|
238
|
+
}
|
|
239
|
+
await writer.flush();
|
|
240
|
+
writer.end();
|
|
241
|
+
} else {
|
|
242
|
+
console.log("latest version not found at: ", urlToLatestTarball);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
await ZstdInit().then(async ({ ZstdSimple }) => {
|
|
246
|
+
const data = new Uint8Array(
|
|
247
|
+
await Bun.file(prevVersionCompressedTarballPath).arrayBuffer()
|
|
248
|
+
);
|
|
249
|
+
const uncompressedData = ZstdSimple.decompress(data);
|
|
250
|
+
|
|
251
|
+
await Bun.write(latestTarPath, uncompressedData);
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
unlinkSync(prevVersionCompressedTarballPath);
|
|
255
|
+
try {
|
|
256
|
+
unlinkSync(currentTarPath);
|
|
257
|
+
} catch (error) {
|
|
258
|
+
// Note: ignore the error. it may have already been deleted by the patching process
|
|
259
|
+
// if the patching process only got halfway
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Note: Bun.file().exists() caches the result, so we nee d an new instance of Bun.file() here
|
|
265
|
+
// to check again
|
|
266
|
+
if (await Bun.file(latestTarPath).exists()) {
|
|
267
|
+
// download patch for this version, apply it.
|
|
268
|
+
// check for patch from that tar and apply it, until it matches the latest version
|
|
269
|
+
// as a fallback it should just download and unpack the latest version
|
|
270
|
+
updateInfo.updateReady = true;
|
|
271
|
+
} else {
|
|
272
|
+
updateInfo.error = "Failed to download latest version";
|
|
273
|
+
}
|
|
274
|
+
},
|
|
275
|
+
|
|
276
|
+
// todo (yoav): this should emit an event so app can cleanup or block the restart
|
|
277
|
+
// todo (yoav): rename this to quitAndApplyUpdate or something
|
|
278
|
+
applyUpdate: async () => {
|
|
279
|
+
if (updateInfo?.updateReady) {
|
|
280
|
+
const appDataFolder = await Updater.appDataFolder();
|
|
281
|
+
const extractionFolder = join(appDataFolder, "self-extraction");
|
|
282
|
+
if (!(await Bun.file(extractionFolder).exists())) {
|
|
283
|
+
mkdirSync(extractionFolder, { recursive: true });
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
let latestHash = (await Updater.checkForUpdate()).hash;
|
|
287
|
+
const latestTarPath = join(extractionFolder, `${latestHash}.tar`);
|
|
288
|
+
|
|
289
|
+
let appBundleSubpath: string = "";
|
|
290
|
+
|
|
291
|
+
if (await Bun.file(latestTarPath).exists()) {
|
|
292
|
+
await tar.x({
|
|
293
|
+
// gzip: false,
|
|
294
|
+
file: latestTarPath,
|
|
295
|
+
cwd: extractionFolder,
|
|
296
|
+
onentry: (entry) => {
|
|
297
|
+
// find the first .app bundle in the tarball
|
|
298
|
+
// Some apps may have nested .app bundles
|
|
299
|
+
if (!appBundleSubpath && entry.path.endsWith(".app/")) {
|
|
300
|
+
appBundleSubpath = entry.path;
|
|
301
|
+
}
|
|
302
|
+
},
|
|
303
|
+
});
|
|
304
|
+
|
|
305
|
+
if (!appBundleSubpath) {
|
|
306
|
+
console.error("Failed to find app bundle in tarball");
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// Note: resolve here removes the extra trailing / that the tar file adds
|
|
311
|
+
const newAppBundlePath = resolve(
|
|
312
|
+
join(extractionFolder, appBundleSubpath)
|
|
313
|
+
);
|
|
314
|
+
// Note: dirname(process.execPath) is the path to the running app bundle's
|
|
315
|
+
// Contents/MacOS directory
|
|
316
|
+
const runningAppBundlePath = resolve(
|
|
317
|
+
dirname(process.execPath),
|
|
318
|
+
"..",
|
|
319
|
+
".."
|
|
320
|
+
);
|
|
321
|
+
const backupAppBundlePath = join(extractionFolder, "backup.app");
|
|
322
|
+
|
|
323
|
+
try {
|
|
324
|
+
// const backupState = statSync(backupAppBundlePath);
|
|
325
|
+
if (statSync(backupAppBundlePath, { throwIfNoEntry: false })) {
|
|
326
|
+
rmdirSync(backupAppBundlePath, { recursive: true });
|
|
327
|
+
} else {
|
|
328
|
+
console.log("backupAppBundlePath does not exist");
|
|
329
|
+
}
|
|
330
|
+
renameSync(runningAppBundlePath, backupAppBundlePath);
|
|
331
|
+
renameSync(newAppBundlePath, runningAppBundlePath);
|
|
332
|
+
} catch (error) {
|
|
333
|
+
console.error("Failed to replace app with new version", error);
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
await Bun.spawn(["open", runningAppBundlePath]);
|
|
338
|
+
process.exit(0);
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
},
|
|
342
|
+
|
|
343
|
+
channelBucketUrl: async () => {
|
|
344
|
+
await Updater.getLocallocalInfo();
|
|
345
|
+
const platformFolder = `${localInfo.channel}-${currentOS}-${currentArch}`;
|
|
346
|
+
return join(localInfo.bucketUrl, platformFolder);
|
|
347
|
+
},
|
|
348
|
+
|
|
349
|
+
appDataFolder: async () => {
|
|
350
|
+
await Updater.getLocallocalInfo();
|
|
351
|
+
const appDataFolder = join(
|
|
352
|
+
appSupportDir,
|
|
353
|
+
localInfo.identifier,
|
|
354
|
+
localInfo.name
|
|
355
|
+
);
|
|
356
|
+
|
|
357
|
+
return appDataFolder;
|
|
358
|
+
},
|
|
359
|
+
|
|
360
|
+
// TODO: consider moving this from "Updater.localInfo" to "BuildVars"
|
|
361
|
+
localInfo: {
|
|
362
|
+
version: async () => {
|
|
363
|
+
return (await Updater.getLocallocalInfo()).version;
|
|
364
|
+
},
|
|
365
|
+
hash: async () => {
|
|
366
|
+
return (await Updater.getLocallocalInfo()).hash;
|
|
367
|
+
},
|
|
368
|
+
channel: async () => {
|
|
369
|
+
return (await Updater.getLocallocalInfo()).channel;
|
|
370
|
+
},
|
|
371
|
+
bucketUrl: async () => {
|
|
372
|
+
return (await Updater.getLocallocalInfo()).bucketUrl;
|
|
373
|
+
},
|
|
374
|
+
},
|
|
375
|
+
|
|
376
|
+
getLocallocalInfo: async () => {
|
|
377
|
+
if (localInfo) {
|
|
378
|
+
return localInfo;
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
try {
|
|
382
|
+
const resourcesDir = 'Resources'; // Always use capitalized Resources
|
|
383
|
+
localInfo = await Bun.file(`../${resourcesDir}/version.json`).json();
|
|
384
|
+
return localInfo;
|
|
385
|
+
} catch (error) {
|
|
386
|
+
// Handle the error
|
|
387
|
+
console.error("Failed to read version.json", error);
|
|
388
|
+
|
|
389
|
+
// Then rethrow so the app crashes
|
|
390
|
+
throw error;
|
|
391
|
+
}
|
|
392
|
+
},
|
|
393
|
+
};
|
|
394
|
+
|
|
395
|
+
export { Updater };
|