@utoo/pack 1.1.2 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/cjs/index.d.ts +7 -6
- package/cjs/index.js +6 -5
- package/config_schema.json +1 -1
- package/esm/index.d.ts +7 -6
- package/esm/index.js +6 -5
- package/package.json +19 -18
- package/cjs/build.d.ts +0 -3
- package/cjs/build.js +0 -82
- package/cjs/dev.d.ts +0 -43
- package/cjs/dev.js +0 -376
- package/cjs/find-root.d.ts +0 -4
- package/cjs/find-root.js +0 -75
- package/cjs/hmr.d.ts +0 -80
- package/cjs/hmr.js +0 -286
- package/cjs/loaderWorkerPool.d.ts +0 -1
- package/cjs/loaderWorkerPool.js +0 -35
- package/cjs/mkcert.d.ts +0 -7
- package/cjs/mkcert.js +0 -183
- package/cjs/project.d.ts +0 -43
- package/cjs/project.js +0 -285
- package/cjs/types.d.ts +0 -300
- package/cjs/types.js +0 -2
- package/cjs/util.d.ts +0 -19
- package/cjs/util.js +0 -155
- package/cjs/webpackCompat.d.ts +0 -7
- package/cjs/webpackCompat.js +0 -408
- package/cjs/xcodeProfile.d.ts +0 -1
- package/cjs/xcodeProfile.js +0 -16
- package/esm/build.d.ts +0 -3
- package/esm/build.js +0 -79
- package/esm/dev.d.ts +0 -43
- package/esm/dev.js +0 -360
- package/esm/find-root.d.ts +0 -4
- package/esm/find-root.js +0 -66
- package/esm/hmr.d.ts +0 -80
- package/esm/hmr.js +0 -279
- package/esm/loaderWorkerPool.d.ts +0 -1
- package/esm/loaderWorkerPool.js +0 -32
- package/esm/mkcert.d.ts +0 -7
- package/esm/mkcert.js +0 -176
- package/esm/project.d.ts +0 -43
- package/esm/project.js +0 -247
- package/esm/types.d.ts +0 -300
- package/esm/types.js +0 -1
- package/esm/util.d.ts +0 -19
- package/esm/util.js +0 -141
- package/esm/webpackCompat.d.ts +0 -7
- package/esm/webpackCompat.js +0 -401
- package/esm/xcodeProfile.d.ts +0 -1
- package/esm/xcodeProfile.js +0 -13
package/cjs/hmr.js
DELETED
|
@@ -1,286 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.FAST_REFRESH_RUNTIME_RELOAD = void 0;
|
|
7
|
-
exports.createHotReloader = createHotReloader;
|
|
8
|
-
const nanoid_1 = require("nanoid");
|
|
9
|
-
const ws_1 = __importDefault(require("ws"));
|
|
10
|
-
const project_1 = require("./project");
|
|
11
|
-
const util_1 = require("./util");
|
|
12
|
-
const wsServer = new ws_1.default.Server({ noServer: true });
|
|
13
|
-
const sessionId = Math.floor(Number.MAX_SAFE_INTEGER * Math.random());
|
|
14
|
-
exports.FAST_REFRESH_RUNTIME_RELOAD = "Fast Refresh had to perform a full reload due to a runtime error.";
|
|
15
|
-
async function createHotReloader(bundleOptions, projectPath, rootPath) {
|
|
16
|
-
var _a;
|
|
17
|
-
const createProject = (0, project_1.projectFactory)();
|
|
18
|
-
const project = await createProject({
|
|
19
|
-
processEnv: (_a = bundleOptions.processEnv) !== null && _a !== void 0 ? _a : {},
|
|
20
|
-
defineEnv: (0, util_1.createDefineEnv)({
|
|
21
|
-
config: bundleOptions.config,
|
|
22
|
-
dev: true,
|
|
23
|
-
optionDefineEnv: bundleOptions.defineEnv,
|
|
24
|
-
}),
|
|
25
|
-
watch: {
|
|
26
|
-
enable: true,
|
|
27
|
-
},
|
|
28
|
-
dev: true,
|
|
29
|
-
buildId: bundleOptions.buildId || (0, nanoid_1.nanoid)(),
|
|
30
|
-
config: {
|
|
31
|
-
...bundleOptions.config,
|
|
32
|
-
mode: "development",
|
|
33
|
-
optimization: {
|
|
34
|
-
...bundleOptions.config.optimization,
|
|
35
|
-
minify: false,
|
|
36
|
-
moduleIds: "named",
|
|
37
|
-
},
|
|
38
|
-
},
|
|
39
|
-
projectPath: projectPath || process.cwd(),
|
|
40
|
-
rootPath: rootPath || projectPath || process.cwd(),
|
|
41
|
-
packPath: (0, util_1.getPackPath)(),
|
|
42
|
-
}, {
|
|
43
|
-
persistentCaching: true,
|
|
44
|
-
});
|
|
45
|
-
const entrypointsSubscription = project.entrypointsSubscribe();
|
|
46
|
-
let currentEntriesHandlingResolve;
|
|
47
|
-
let currentEntriesHandling = new Promise((resolve) => (currentEntriesHandlingResolve = resolve));
|
|
48
|
-
let hmrEventHappened = false;
|
|
49
|
-
let hmrHash = 0;
|
|
50
|
-
const clients = new Set();
|
|
51
|
-
const clientStates = new WeakMap();
|
|
52
|
-
function sendToClient(client, payload) {
|
|
53
|
-
client.send(JSON.stringify(payload));
|
|
54
|
-
}
|
|
55
|
-
function sendEnqueuedMessages() {
|
|
56
|
-
for (const client of clients) {
|
|
57
|
-
const state = clientStates.get(client);
|
|
58
|
-
if (!state) {
|
|
59
|
-
continue;
|
|
60
|
-
}
|
|
61
|
-
for (const payload of state.hmrPayloads.values()) {
|
|
62
|
-
sendToClient(client, payload);
|
|
63
|
-
}
|
|
64
|
-
state.hmrPayloads.clear();
|
|
65
|
-
if (state.turbopackUpdates.length > 0) {
|
|
66
|
-
sendToClient(client, {
|
|
67
|
-
action: "turbopack-message" /* HMR_ACTIONS_SENT_TO_BROWSER.TURBOPACK_MESSAGE */,
|
|
68
|
-
data: state.turbopackUpdates,
|
|
69
|
-
});
|
|
70
|
-
state.turbopackUpdates.length = 0;
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
const sendEnqueuedMessagesDebounce = (0, util_1.debounce)(sendEnqueuedMessages, 2);
|
|
75
|
-
function sendTurbopackMessage(payload) {
|
|
76
|
-
var _a;
|
|
77
|
-
payload.diagnostics = [];
|
|
78
|
-
payload.issues = [];
|
|
79
|
-
for (const client of clients) {
|
|
80
|
-
(_a = clientStates.get(client)) === null || _a === void 0 ? void 0 : _a.turbopackUpdates.push(payload);
|
|
81
|
-
}
|
|
82
|
-
hmrEventHappened = true;
|
|
83
|
-
sendEnqueuedMessagesDebounce();
|
|
84
|
-
}
|
|
85
|
-
async function subscribeToHmrEvents(client, id) {
|
|
86
|
-
const state = clientStates.get(client);
|
|
87
|
-
if (!state || state.subscriptions.has(id)) {
|
|
88
|
-
return;
|
|
89
|
-
}
|
|
90
|
-
const subscription = project.hmrEvents(id);
|
|
91
|
-
state.subscriptions.set(id, subscription);
|
|
92
|
-
// The subscription will always emit once, which is the initial
|
|
93
|
-
// computation. This is not a change, so swallow it.
|
|
94
|
-
try {
|
|
95
|
-
await subscription.next();
|
|
96
|
-
for await (const data of subscription) {
|
|
97
|
-
(0, util_1.processIssues)(data, true, true);
|
|
98
|
-
if (data.type !== "issues") {
|
|
99
|
-
sendTurbopackMessage(data);
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
catch (e) {
|
|
104
|
-
// The client might be using an HMR session from a previous server, tell them
|
|
105
|
-
// to fully reload the page to resolve the issue. We can't use
|
|
106
|
-
// `hotReloader.send` since that would force every connected client to
|
|
107
|
-
// reload, only this client is out of date.
|
|
108
|
-
const reloadAction = {
|
|
109
|
-
action: "reload" /* HMR_ACTIONS_SENT_TO_BROWSER.RELOAD */,
|
|
110
|
-
data: `error in HMR event subscription for ${id}: ${e}`,
|
|
111
|
-
};
|
|
112
|
-
sendToClient(client, reloadAction);
|
|
113
|
-
client.close();
|
|
114
|
-
return;
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
function unsubscribeFromHmrEvents(client, id) {
|
|
118
|
-
const state = clientStates.get(client);
|
|
119
|
-
if (!state) {
|
|
120
|
-
return;
|
|
121
|
-
}
|
|
122
|
-
const subscription = state.subscriptions.get(id);
|
|
123
|
-
subscription === null || subscription === void 0 ? void 0 : subscription.return();
|
|
124
|
-
}
|
|
125
|
-
async function handleEntrypointsSubscription() {
|
|
126
|
-
for await (const entrypoints of entrypointsSubscription) {
|
|
127
|
-
if (!currentEntriesHandlingResolve) {
|
|
128
|
-
currentEntriesHandling = new Promise(
|
|
129
|
-
// eslint-disable-next-line no-loop-func
|
|
130
|
-
(resolve) => (currentEntriesHandlingResolve = resolve));
|
|
131
|
-
}
|
|
132
|
-
await Promise.all(entrypoints.apps.map((l) => l.writeToDisk().then((res) => (0, util_1.processIssues)(res, true, true))));
|
|
133
|
-
currentEntriesHandlingResolve();
|
|
134
|
-
currentEntriesHandlingResolve = undefined;
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
const hotReloader = {
|
|
138
|
-
turbopackProject: project,
|
|
139
|
-
serverStats: null,
|
|
140
|
-
onHMR(req, socket, head, onUpgrade) {
|
|
141
|
-
wsServer.handleUpgrade(req, socket, head, (client) => {
|
|
142
|
-
onUpgrade === null || onUpgrade === void 0 ? void 0 : onUpgrade(client);
|
|
143
|
-
const subscriptions = new Map();
|
|
144
|
-
clients.add(client);
|
|
145
|
-
clientStates.set(client, {
|
|
146
|
-
hmrPayloads: new Map(),
|
|
147
|
-
turbopackUpdates: [],
|
|
148
|
-
subscriptions,
|
|
149
|
-
});
|
|
150
|
-
client.on("close", () => {
|
|
151
|
-
var _a;
|
|
152
|
-
// Remove active subscriptions
|
|
153
|
-
for (const subscription of subscriptions.values()) {
|
|
154
|
-
(_a = subscription.return) === null || _a === void 0 ? void 0 : _a.call(subscription);
|
|
155
|
-
}
|
|
156
|
-
clientStates.delete(client);
|
|
157
|
-
clients.delete(client);
|
|
158
|
-
});
|
|
159
|
-
client.addEventListener("message", ({ data }) => {
|
|
160
|
-
const parsedData = JSON.parse(typeof data !== "string" ? data.toString() : data);
|
|
161
|
-
// messages
|
|
162
|
-
switch (parsedData.event) {
|
|
163
|
-
case "client-error": // { errorCount, clientId }
|
|
164
|
-
case "client-warning": // { warningCount, clientId }
|
|
165
|
-
case "client-success": // { clientId }
|
|
166
|
-
case "client-full-reload": // { stackTrace, hadRuntimeError }
|
|
167
|
-
const { hadRuntimeError, dependencyChain } = parsedData;
|
|
168
|
-
if (hadRuntimeError) {
|
|
169
|
-
console.warn(exports.FAST_REFRESH_RUNTIME_RELOAD);
|
|
170
|
-
}
|
|
171
|
-
if (Array.isArray(dependencyChain) &&
|
|
172
|
-
typeof dependencyChain[0] === "string") {
|
|
173
|
-
const cleanedModulePath = dependencyChain[0]
|
|
174
|
-
.replace(/^\[project\]/, ".")
|
|
175
|
-
.replace(/ \[.*\] \(.*\)$/, "");
|
|
176
|
-
console.warn(`Fast Refresh had to perform a full reload when ${cleanedModulePath} changed.`);
|
|
177
|
-
}
|
|
178
|
-
break;
|
|
179
|
-
default:
|
|
180
|
-
// Might be a Turbopack message...
|
|
181
|
-
if (!parsedData.type) {
|
|
182
|
-
throw new Error(`unrecognized HMR message "${data}"`);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
// Turbopack messages
|
|
186
|
-
switch (parsedData.type) {
|
|
187
|
-
case "turbopack-subscribe":
|
|
188
|
-
subscribeToHmrEvents(client, parsedData.path);
|
|
189
|
-
break;
|
|
190
|
-
case "turbopack-unsubscribe":
|
|
191
|
-
unsubscribeFromHmrEvents(client, parsedData.path);
|
|
192
|
-
break;
|
|
193
|
-
default:
|
|
194
|
-
if (!parsedData.event) {
|
|
195
|
-
throw new Error(`unrecognized Turbopack HMR message "${data}"`);
|
|
196
|
-
}
|
|
197
|
-
}
|
|
198
|
-
});
|
|
199
|
-
const turbopackConnected = {
|
|
200
|
-
action: "turbopack-connected" /* HMR_ACTIONS_SENT_TO_BROWSER.TURBOPACK_CONNECTED */,
|
|
201
|
-
data: { sessionId },
|
|
202
|
-
};
|
|
203
|
-
sendToClient(client, turbopackConnected);
|
|
204
|
-
const errors = [];
|
|
205
|
-
(async function () {
|
|
206
|
-
const sync = {
|
|
207
|
-
action: "sync" /* HMR_ACTIONS_SENT_TO_BROWSER.SYNC */,
|
|
208
|
-
errors,
|
|
209
|
-
warnings: [],
|
|
210
|
-
hash: "",
|
|
211
|
-
};
|
|
212
|
-
sendToClient(client, sync);
|
|
213
|
-
})();
|
|
214
|
-
});
|
|
215
|
-
},
|
|
216
|
-
send(action) {
|
|
217
|
-
const payload = JSON.stringify(action);
|
|
218
|
-
for (const client of clients) {
|
|
219
|
-
client.send(payload);
|
|
220
|
-
}
|
|
221
|
-
},
|
|
222
|
-
setHmrServerError(_error) {
|
|
223
|
-
// Not implemented yet.
|
|
224
|
-
},
|
|
225
|
-
clearHmrServerError() {
|
|
226
|
-
// Not implemented yet.
|
|
227
|
-
},
|
|
228
|
-
async start() { },
|
|
229
|
-
async buildFallbackError() {
|
|
230
|
-
// Not implemented yet.
|
|
231
|
-
},
|
|
232
|
-
close() {
|
|
233
|
-
for (const wsClient of clients) {
|
|
234
|
-
// it's okay to not cleanly close these websocket connections, this is dev
|
|
235
|
-
wsClient.terminate();
|
|
236
|
-
}
|
|
237
|
-
clients.clear();
|
|
238
|
-
},
|
|
239
|
-
};
|
|
240
|
-
handleEntrypointsSubscription().catch((err) => {
|
|
241
|
-
console.error(err);
|
|
242
|
-
process.exit(1);
|
|
243
|
-
});
|
|
244
|
-
// Write empty manifests
|
|
245
|
-
await currentEntriesHandling;
|
|
246
|
-
async function handleProjectUpdates() {
|
|
247
|
-
for await (const updateMessage of project.updateInfoSubscribe(30)) {
|
|
248
|
-
switch (updateMessage.updateType) {
|
|
249
|
-
case "start": {
|
|
250
|
-
hotReloader.send({ action: "building" /* HMR_ACTIONS_SENT_TO_BROWSER.BUILDING */ });
|
|
251
|
-
break;
|
|
252
|
-
}
|
|
253
|
-
case "end": {
|
|
254
|
-
sendEnqueuedMessages();
|
|
255
|
-
const errors = new Map();
|
|
256
|
-
for (const client of clients) {
|
|
257
|
-
const state = clientStates.get(client);
|
|
258
|
-
if (!state) {
|
|
259
|
-
continue;
|
|
260
|
-
}
|
|
261
|
-
const clientErrors = new Map(errors);
|
|
262
|
-
sendToClient(client, {
|
|
263
|
-
action: "built" /* HMR_ACTIONS_SENT_TO_BROWSER.BUILT */,
|
|
264
|
-
hash: String(++hmrHash),
|
|
265
|
-
errors: [...clientErrors.values()],
|
|
266
|
-
warnings: [],
|
|
267
|
-
});
|
|
268
|
-
}
|
|
269
|
-
if (hmrEventHappened) {
|
|
270
|
-
const time = updateMessage.value.duration;
|
|
271
|
-
const timeMessage = time > 2000 ? `${Math.round(time / 100) / 10}s` : `${time}ms`;
|
|
272
|
-
console.log(`Compiled in ${timeMessage}`);
|
|
273
|
-
hmrEventHappened = false;
|
|
274
|
-
}
|
|
275
|
-
break;
|
|
276
|
-
}
|
|
277
|
-
default:
|
|
278
|
-
}
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
handleProjectUpdates().catch((err) => {
|
|
282
|
-
console.error(err);
|
|
283
|
-
process.exit(1);
|
|
284
|
-
});
|
|
285
|
-
return hotReloader;
|
|
286
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export declare function runLoaderWorkerPool(binding: typeof import("./binding"), bindingPath: string): Promise<void>;
|
package/cjs/loaderWorkerPool.js
DELETED
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.runLoaderWorkerPool = runLoaderWorkerPool;
|
|
4
|
-
const worker_threads_1 = require("worker_threads");
|
|
5
|
-
const loaderWorkers = {};
|
|
6
|
-
function getPoolId(cwd, filename) {
|
|
7
|
-
return `${cwd}:${filename}`;
|
|
8
|
-
}
|
|
9
|
-
let workerSchedulerRegistered = false;
|
|
10
|
-
async function runLoaderWorkerPool(binding, bindingPath) {
|
|
11
|
-
if (workerSchedulerRegistered) {
|
|
12
|
-
return;
|
|
13
|
-
}
|
|
14
|
-
binding.registerWorkerScheduler((creation) => {
|
|
15
|
-
const { options: { filename, cwd }, } = creation;
|
|
16
|
-
let poolId = getPoolId(cwd, filename);
|
|
17
|
-
const worker = new worker_threads_1.Worker(filename, {
|
|
18
|
-
workerData: {
|
|
19
|
-
bindingPath,
|
|
20
|
-
cwd,
|
|
21
|
-
},
|
|
22
|
-
});
|
|
23
|
-
worker.unref();
|
|
24
|
-
const workers = loaderWorkers[poolId] || (loaderWorkers[poolId] = new Map());
|
|
25
|
-
workers.set(worker.threadId, worker);
|
|
26
|
-
}, (termination) => {
|
|
27
|
-
var _a;
|
|
28
|
-
const { options: { filename, cwd }, workerId, } = termination;
|
|
29
|
-
let poolId = getPoolId(cwd, filename);
|
|
30
|
-
const workers = loaderWorkers[poolId];
|
|
31
|
-
(_a = workers.get(workerId)) === null || _a === void 0 ? void 0 : _a.terminate();
|
|
32
|
-
workers.delete(workerId);
|
|
33
|
-
});
|
|
34
|
-
workerSchedulerRegistered = true;
|
|
35
|
-
}
|
package/cjs/mkcert.d.ts
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
export interface SelfSignedCertificate {
|
|
2
|
-
key: string;
|
|
3
|
-
cert: string;
|
|
4
|
-
rootCA?: string;
|
|
5
|
-
}
|
|
6
|
-
export declare function createSelfSignedCertificate(host?: string, certDir?: string): Promise<SelfSignedCertificate | undefined>;
|
|
7
|
-
export declare function getCacheDirectory(fileDirectory: string, envPath?: string): string;
|
package/cjs/mkcert.js
DELETED
|
@@ -1,183 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.createSelfSignedCertificate = createSelfSignedCertificate;
|
|
7
|
-
exports.getCacheDirectory = getCacheDirectory;
|
|
8
|
-
const node_child_process_1 = require("node:child_process");
|
|
9
|
-
const node_crypto_1 = require("node:crypto");
|
|
10
|
-
const node_fs_1 = __importDefault(require("node:fs"));
|
|
11
|
-
const node_path_1 = __importDefault(require("node:path"));
|
|
12
|
-
const os_1 = __importDefault(require("os"));
|
|
13
|
-
const { WritableStream } = require("node:stream/web");
|
|
14
|
-
const MKCERT_VERSION = "v1.4.4";
|
|
15
|
-
function getBinaryName() {
|
|
16
|
-
const platform = process.platform;
|
|
17
|
-
const arch = process.arch === "x64" ? "amd64" : process.arch;
|
|
18
|
-
if (platform === "win32") {
|
|
19
|
-
return `mkcert-${MKCERT_VERSION}-windows-${arch}.exe`;
|
|
20
|
-
}
|
|
21
|
-
if (platform === "darwin") {
|
|
22
|
-
return `mkcert-${MKCERT_VERSION}-darwin-${arch}`;
|
|
23
|
-
}
|
|
24
|
-
if (platform === "linux") {
|
|
25
|
-
return `mkcert-${MKCERT_VERSION}-linux-${arch}`;
|
|
26
|
-
}
|
|
27
|
-
throw new Error(`Unsupported platform: ${platform}`);
|
|
28
|
-
}
|
|
29
|
-
async function downloadBinary() {
|
|
30
|
-
try {
|
|
31
|
-
const binaryName = getBinaryName();
|
|
32
|
-
const cacheDirectory = getCacheDirectory("mkcert");
|
|
33
|
-
const binaryPath = node_path_1.default.join(cacheDirectory, binaryName);
|
|
34
|
-
if (node_fs_1.default.existsSync(binaryPath)) {
|
|
35
|
-
return binaryPath;
|
|
36
|
-
}
|
|
37
|
-
const downloadUrl = `https://github.com/FiloSottile/mkcert/releases/download/${MKCERT_VERSION}/${binaryName}`;
|
|
38
|
-
await node_fs_1.default.promises.mkdir(cacheDirectory, { recursive: true });
|
|
39
|
-
console.log(`Downloading mkcert package...`);
|
|
40
|
-
const response = await fetch(downloadUrl);
|
|
41
|
-
if (!response.ok || !response.body) {
|
|
42
|
-
throw new Error(`request failed with status ${response.status}`);
|
|
43
|
-
}
|
|
44
|
-
console.log(`Download response was successful, writing to disk`);
|
|
45
|
-
const binaryWriteStream = node_fs_1.default.createWriteStream(binaryPath);
|
|
46
|
-
await response.body.pipeTo(new WritableStream({
|
|
47
|
-
write(chunk) {
|
|
48
|
-
return new Promise((resolve, reject) => {
|
|
49
|
-
binaryWriteStream.write(chunk, (error) => {
|
|
50
|
-
if (error) {
|
|
51
|
-
reject(error);
|
|
52
|
-
return;
|
|
53
|
-
}
|
|
54
|
-
resolve();
|
|
55
|
-
});
|
|
56
|
-
});
|
|
57
|
-
},
|
|
58
|
-
close() {
|
|
59
|
-
return new Promise((resolve, reject) => {
|
|
60
|
-
binaryWriteStream.close((error) => {
|
|
61
|
-
if (error) {
|
|
62
|
-
reject(error);
|
|
63
|
-
return;
|
|
64
|
-
}
|
|
65
|
-
resolve();
|
|
66
|
-
});
|
|
67
|
-
});
|
|
68
|
-
},
|
|
69
|
-
}));
|
|
70
|
-
await node_fs_1.default.promises.chmod(binaryPath, 0o755);
|
|
71
|
-
return binaryPath;
|
|
72
|
-
}
|
|
73
|
-
catch (err) {
|
|
74
|
-
console.error("Error downloading mkcert:", err);
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
async function createSelfSignedCertificate(host, certDir = "certificates") {
|
|
78
|
-
try {
|
|
79
|
-
const binaryPath = await downloadBinary();
|
|
80
|
-
if (!binaryPath)
|
|
81
|
-
throw new Error("missing mkcert binary");
|
|
82
|
-
const resolvedCertDir = node_path_1.default.resolve(process.cwd(), `./${certDir}`);
|
|
83
|
-
await node_fs_1.default.promises.mkdir(resolvedCertDir, {
|
|
84
|
-
recursive: true,
|
|
85
|
-
});
|
|
86
|
-
const keyPath = node_path_1.default.resolve(resolvedCertDir, "localhost-key.pem");
|
|
87
|
-
const certPath = node_path_1.default.resolve(resolvedCertDir, "localhost.pem");
|
|
88
|
-
if (node_fs_1.default.existsSync(keyPath) && node_fs_1.default.existsSync(certPath)) {
|
|
89
|
-
const cert = new node_crypto_1.X509Certificate(node_fs_1.default.readFileSync(certPath));
|
|
90
|
-
const key = node_fs_1.default.readFileSync(keyPath);
|
|
91
|
-
if (cert.checkHost(host !== null && host !== void 0 ? host : "localhost") &&
|
|
92
|
-
cert.checkPrivateKey((0, node_crypto_1.createPrivateKey)(key))) {
|
|
93
|
-
console.log("Using already generated self signed certificate");
|
|
94
|
-
const caLocation = (0, node_child_process_1.execSync)(`"${binaryPath}" -CAROOT`)
|
|
95
|
-
.toString()
|
|
96
|
-
.trim();
|
|
97
|
-
return {
|
|
98
|
-
key: keyPath,
|
|
99
|
-
cert: certPath,
|
|
100
|
-
rootCA: `${caLocation}/rootCA.pem`,
|
|
101
|
-
};
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
console.log("Attempting to generate self signed certificate. This may prompt for your password");
|
|
105
|
-
const defaultHosts = ["localhost", "127.0.0.1", "::1"];
|
|
106
|
-
const hosts = host && !defaultHosts.includes(host)
|
|
107
|
-
? [...defaultHosts, host]
|
|
108
|
-
: defaultHosts;
|
|
109
|
-
(0, node_child_process_1.execSync)(`"${binaryPath}" -install -key-file "${keyPath}" -cert-file "${certPath}" ${hosts.join(" ")}`, { stdio: "ignore" });
|
|
110
|
-
const caLocation = (0, node_child_process_1.execSync)(`"${binaryPath}" -CAROOT`).toString().trim();
|
|
111
|
-
if (!node_fs_1.default.existsSync(keyPath) || !node_fs_1.default.existsSync(certPath)) {
|
|
112
|
-
throw new Error("Certificate files not found");
|
|
113
|
-
}
|
|
114
|
-
console.log(`CA Root certificate created in ${caLocation}`);
|
|
115
|
-
console.log(`Certificates created in ${resolvedCertDir}`);
|
|
116
|
-
const gitignorePath = node_path_1.default.resolve(process.cwd(), "./.gitignore");
|
|
117
|
-
if (node_fs_1.default.existsSync(gitignorePath)) {
|
|
118
|
-
const gitignore = await node_fs_1.default.promises.readFile(gitignorePath, "utf8");
|
|
119
|
-
if (!gitignore.includes(certDir)) {
|
|
120
|
-
console.log("Adding certificates to .gitignore");
|
|
121
|
-
await node_fs_1.default.promises.appendFile(gitignorePath, `\n${certDir}`);
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
return {
|
|
125
|
-
key: keyPath,
|
|
126
|
-
cert: certPath,
|
|
127
|
-
rootCA: `${caLocation}/rootCA.pem`,
|
|
128
|
-
};
|
|
129
|
-
}
|
|
130
|
-
catch (err) {
|
|
131
|
-
console.error("Failed to generate self-signed certificate. Falling back to http.", err);
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
// get platform specific cache directory adapted from playwright's handling
|
|
135
|
-
// https://github.com/microsoft/playwright/blob/7d924470d397975a74a19184c136b3573a974e13/packages/playwright-core/src/utils/registry.ts#L141
|
|
136
|
-
function getCacheDirectory(fileDirectory, envPath) {
|
|
137
|
-
let result;
|
|
138
|
-
if (envPath) {
|
|
139
|
-
result = envPath;
|
|
140
|
-
}
|
|
141
|
-
else {
|
|
142
|
-
let systemCacheDirectory;
|
|
143
|
-
if (process.platform === "linux") {
|
|
144
|
-
systemCacheDirectory =
|
|
145
|
-
process.env.XDG_CACHE_HOME || node_path_1.default.join(os_1.default.homedir(), ".cache");
|
|
146
|
-
}
|
|
147
|
-
else if (process.platform === "darwin") {
|
|
148
|
-
systemCacheDirectory = node_path_1.default.join(os_1.default.homedir(), "Library", "Caches");
|
|
149
|
-
}
|
|
150
|
-
else if (process.platform === "win32") {
|
|
151
|
-
systemCacheDirectory =
|
|
152
|
-
process.env.LOCALAPPDATA || node_path_1.default.join(os_1.default.homedir(), "AppData", "Local");
|
|
153
|
-
}
|
|
154
|
-
else {
|
|
155
|
-
/// Attempt to use generic tmp location for un-handled platform
|
|
156
|
-
if (!systemCacheDirectory) {
|
|
157
|
-
for (const dir of [
|
|
158
|
-
node_path_1.default.join(os_1.default.homedir(), ".cache"),
|
|
159
|
-
node_path_1.default.join(os_1.default.tmpdir()),
|
|
160
|
-
]) {
|
|
161
|
-
if (node_fs_1.default.existsSync(dir)) {
|
|
162
|
-
systemCacheDirectory = dir;
|
|
163
|
-
break;
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
if (!systemCacheDirectory) {
|
|
168
|
-
console.error(new Error("Unsupported platform: " + process.platform));
|
|
169
|
-
process.exit(0);
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
result = node_path_1.default.join(systemCacheDirectory, fileDirectory);
|
|
173
|
-
}
|
|
174
|
-
if (!node_path_1.default.isAbsolute(result)) {
|
|
175
|
-
// It is important to resolve to the absolute path:
|
|
176
|
-
// - for unzipping to work correctly;
|
|
177
|
-
// - so that registry directory matches between installation and execution.
|
|
178
|
-
// INIT_CWD points to the root of `npm/yarn install` and is probably what
|
|
179
|
-
// the user meant when typing the relative path.
|
|
180
|
-
result = node_path_1.default.resolve(process.env["INIT_CWD"] || process.cwd(), result);
|
|
181
|
-
}
|
|
182
|
-
return result;
|
|
183
|
-
}
|
package/cjs/project.d.ts
DELETED
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
import type { HmrIdentifiers, NapiUpdateMessage, NapiWrittenEndpoint, StackFrame } from "./binding";
|
|
2
|
-
import * as binding from "./binding";
|
|
3
|
-
import { ProjectOptions, RawEntrypoints, Update } from "./types";
|
|
4
|
-
export declare class TurbopackInternalError extends Error {
|
|
5
|
-
name: string;
|
|
6
|
-
constructor(cause: Error);
|
|
7
|
-
}
|
|
8
|
-
export declare function projectFactory(): (options: Required<ProjectOptions>, turboEngineOptions: binding.NapiTurboEngineOptions) => Promise<{
|
|
9
|
-
readonly _nativeProject: {
|
|
10
|
-
__napiType: "Project";
|
|
11
|
-
};
|
|
12
|
-
update(options: Partial<ProjectOptions>): Promise<void>;
|
|
13
|
-
writeAllEntrypointsToDisk(): Promise<TurbopackResult<RawEntrypoints>>;
|
|
14
|
-
entrypointsSubscribe(): AsyncGenerator<{
|
|
15
|
-
apps: {
|
|
16
|
-
readonly _nativeEndpoint: {
|
|
17
|
-
__napiType: "Endpoint";
|
|
18
|
-
};
|
|
19
|
-
writeToDisk(): Promise<TurbopackResult<NapiWrittenEndpoint>>;
|
|
20
|
-
clientChanged(): Promise<AsyncIterableIterator<TurbopackResult<{}>>>;
|
|
21
|
-
serverChanged(includeIssues: boolean): Promise<AsyncIterableIterator<TurbopackResult<{}>>>;
|
|
22
|
-
}[];
|
|
23
|
-
libraries: {
|
|
24
|
-
readonly _nativeEndpoint: {
|
|
25
|
-
__napiType: "Endpoint";
|
|
26
|
-
};
|
|
27
|
-
writeToDisk(): Promise<TurbopackResult<NapiWrittenEndpoint>>;
|
|
28
|
-
clientChanged(): Promise<AsyncIterableIterator<TurbopackResult<{}>>>;
|
|
29
|
-
serverChanged(includeIssues: boolean): Promise<AsyncIterableIterator<TurbopackResult<{}>>>;
|
|
30
|
-
}[];
|
|
31
|
-
issues: binding.NapiIssue[];
|
|
32
|
-
diagnostics: binding.NapiDiagnostic[];
|
|
33
|
-
}, void, unknown>;
|
|
34
|
-
hmrEvents(identifier: string): AsyncIterableIterator<TurbopackResult<Update>>;
|
|
35
|
-
hmrIdentifiersSubscribe(): AsyncIterableIterator<TurbopackResult<HmrIdentifiers>>;
|
|
36
|
-
traceSource(stackFrame: StackFrame, currentDirectoryFileUrl: string): Promise<StackFrame | null>;
|
|
37
|
-
getSourceForAsset(filePath: string): Promise<string | null>;
|
|
38
|
-
getSourceMap(filePath: string): Promise<string | null>;
|
|
39
|
-
getSourceMapSync(filePath: string): string | null;
|
|
40
|
-
updateInfoSubscribe(aggregationMs: number): AsyncIterableIterator<TurbopackResult<NapiUpdateMessage>>;
|
|
41
|
-
shutdown(): Promise<void>;
|
|
42
|
-
onExit(): Promise<void>;
|
|
43
|
-
}>;
|