@unpackjs/core 1.7.2 → 1.7.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/compiled/css-loader/index.js +20 -20
- package/compiled/less-loader/index.js +8 -8
- package/compiled/postcss-loader/index.js +8 -8
- package/compiled/sass-loader/index.js +9 -8
- package/compiled/sass-loader/package.json +1 -1
- package/compiled/style-loader/index.js +10 -10
- package/dist/bundler-config/chunkSplit.cjs +0 -4
- package/dist/bundler-config/chunkSplit.d.ts.map +1 -1
- package/dist/bundler-config/chunkSplit.js +0 -4
- package/dist/bundler-config/css.cjs +1 -2
- package/dist/bundler-config/css.d.ts.map +1 -1
- package/dist/bundler-config/css.js +1 -2
- package/dist/bundler-config/experimentCss.cjs +4 -1
- package/dist/bundler-config/experimentCss.d.ts.map +1 -1
- package/dist/bundler-config/experimentCss.js +4 -1
- package/dist/bundler-config/index.cjs +6 -6
- package/dist/bundler-config/index.d.ts.map +1 -1
- package/dist/bundler-config/index.js +6 -6
- package/dist/bundler-config/jsMinify.cjs +101 -0
- package/dist/bundler-config/jsMinify.d.ts +18 -0
- package/dist/bundler-config/jsMinify.d.ts.map +1 -0
- package/dist/bundler-config/jsMinify.js +77 -0
- package/dist/createUnpack.cjs +7 -2
- package/dist/createUnpack.d.ts.map +1 -1
- package/dist/createUnpack.js +7 -2
- package/dist/lightningcss/minimizer.cjs +1 -1
- package/dist/lightningcss/minimizer.js +1 -1
- package/dist/plugin-progress/webpack.d.ts.map +1 -1
- package/dist/prebundleDeps.cjs +2 -1
- package/dist/prebundleDeps.d.ts.map +1 -1
- package/dist/prebundleDeps.js +2 -1
- package/dist/run/dev.cjs +1 -1
- package/dist/run/dev.js +1 -1
- package/dist/types/config.d.ts +5 -2
- package/dist/types/config.d.ts.map +1 -1
- package/package.json +8 -10
- package/dist/thread-loader/WorkerError.cjs +0 -41
- package/dist/thread-loader/WorkerError.js +0 -31
- package/dist/thread-loader/WorkerPool.cjs +0 -407
- package/dist/thread-loader/WorkerPool.js +0 -387
- package/dist/thread-loader/index.cjs +0 -100
- package/dist/thread-loader/index.js +0 -85
- package/dist/thread-loader/readBuffer.cjs +0 -55
- package/dist/thread-loader/readBuffer.js +0 -45
- package/dist/thread-loader/serializer.cjs +0 -46
- package/dist/thread-loader/serializer.js +0 -31
- package/dist/thread-loader/utils.cjs +0 -84
- package/dist/thread-loader/utils.js +0 -60
- package/dist/thread-loader/worker.cjs +0 -377
- package/dist/thread-loader/worker.js +0 -382
- package/dist/thread-loader/workerPools.cjs +0 -57
- package/dist/thread-loader/workerPools.js +0 -33
|
@@ -1,407 +0,0 @@
|
|
|
1
|
-
var __create = Object.create;
|
|
2
|
-
var __defProp = Object.defineProperty;
|
|
3
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
-
var __export = (target, all) => {
|
|
8
|
-
for (var name in all)
|
|
9
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
-
};
|
|
11
|
-
var __copyProps = (to, from, except, desc) => {
|
|
12
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
-
for (let key of __getOwnPropNames(from))
|
|
14
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
-
}
|
|
17
|
-
return to;
|
|
18
|
-
};
|
|
19
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
-
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
-
mod
|
|
26
|
-
));
|
|
27
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
-
var WorkerPool_exports = {};
|
|
29
|
-
__export(WorkerPool_exports, {
|
|
30
|
-
default: () => WorkerPool
|
|
31
|
-
});
|
|
32
|
-
module.exports = __toCommonJS(WorkerPool_exports);
|
|
33
|
-
var import_node_child_process = __toESM(require("node:child_process"));
|
|
34
|
-
var import_WorkerError = __toESM(require("./WorkerError.cjs"));
|
|
35
|
-
var import_readBuffer = __toESM(require("./readBuffer.cjs"));
|
|
36
|
-
var import_serializer = require("./serializer.cjs");
|
|
37
|
-
const asyncMapSeries = require("neo-async/mapSeries.js");
|
|
38
|
-
const asyncQueue = require("neo-async/queue.js");
|
|
39
|
-
const workerPath = require.resolve("./worker");
|
|
40
|
-
let workerId = 0;
|
|
41
|
-
class PoolWorker {
|
|
42
|
-
constructor(options, onJobDone) {
|
|
43
|
-
this.disposed = false;
|
|
44
|
-
this.nextJobId = 0;
|
|
45
|
-
this.jobs = /* @__PURE__ */ Object.create(null);
|
|
46
|
-
this.activeJobs = 0;
|
|
47
|
-
this.onJobDone = onJobDone;
|
|
48
|
-
this.id = workerId;
|
|
49
|
-
workerId += 1;
|
|
50
|
-
const sanitizedNodeArgs = (options.nodeArgs || []).filter((opt) => !!opt);
|
|
51
|
-
this.worker = import_node_child_process.default.spawn(
|
|
52
|
-
process.execPath,
|
|
53
|
-
[].concat(sanitizedNodeArgs).concat(workerPath, options.parallelJobs),
|
|
54
|
-
{
|
|
55
|
-
detached: true,
|
|
56
|
-
stdio: ["ignore", "pipe", "pipe", "pipe", "pipe"]
|
|
57
|
-
}
|
|
58
|
-
);
|
|
59
|
-
this.worker.unref();
|
|
60
|
-
if (!this.worker.stdio) {
|
|
61
|
-
throw new Error(
|
|
62
|
-
`Failed to create the worker pool with workerId: ${workerId} and ${""}configuration: ${JSON.stringify(
|
|
63
|
-
options
|
|
64
|
-
)}. Please verify if you hit the OS open files limit.`
|
|
65
|
-
);
|
|
66
|
-
}
|
|
67
|
-
const [, , , readPipe, writePipe] = this.worker.stdio;
|
|
68
|
-
this.readPipe = readPipe;
|
|
69
|
-
this.writePipe = writePipe;
|
|
70
|
-
this.listenStdOutAndErrFromWorker(this.worker.stdout, this.worker.stderr);
|
|
71
|
-
this.readNextMessage();
|
|
72
|
-
}
|
|
73
|
-
listenStdOutAndErrFromWorker(workerStdout, workerStderr) {
|
|
74
|
-
if (workerStdout) {
|
|
75
|
-
workerStdout.on("data", this.writeToStdout);
|
|
76
|
-
}
|
|
77
|
-
if (workerStderr) {
|
|
78
|
-
workerStderr.on("data", this.writeToStderr);
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
ignoreStdOutAndErrFromWorker(workerStdout, workerStderr) {
|
|
82
|
-
if (workerStdout) {
|
|
83
|
-
workerStdout.removeListener("data", this.writeToStdout);
|
|
84
|
-
}
|
|
85
|
-
if (workerStderr) {
|
|
86
|
-
workerStderr.removeListener("data", this.writeToStderr);
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
writeToStdout(data) {
|
|
90
|
-
if (!this.disposed) {
|
|
91
|
-
process.stdout.write(data);
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
writeToStderr(data) {
|
|
95
|
-
if (!this.disposed) {
|
|
96
|
-
process.stderr.write(data);
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
run(data, callback) {
|
|
100
|
-
const jobId = this.nextJobId;
|
|
101
|
-
this.nextJobId += 1;
|
|
102
|
-
this.jobs[jobId] = { data, callback };
|
|
103
|
-
this.activeJobs += 1;
|
|
104
|
-
this.writeJson({
|
|
105
|
-
type: "job",
|
|
106
|
-
id: jobId,
|
|
107
|
-
data
|
|
108
|
-
});
|
|
109
|
-
}
|
|
110
|
-
warmup(requires) {
|
|
111
|
-
this.writeJson({
|
|
112
|
-
type: "warmup",
|
|
113
|
-
requires
|
|
114
|
-
});
|
|
115
|
-
}
|
|
116
|
-
writeJson(data) {
|
|
117
|
-
const lengthBuffer = Buffer.alloc(4);
|
|
118
|
-
const messageBuffer = Buffer.from(JSON.stringify(data, import_serializer.replacer), "utf-8");
|
|
119
|
-
lengthBuffer.writeInt32BE(messageBuffer.length, 0);
|
|
120
|
-
this.writePipe.write(lengthBuffer);
|
|
121
|
-
this.writePipe.write(messageBuffer);
|
|
122
|
-
}
|
|
123
|
-
writeEnd() {
|
|
124
|
-
const lengthBuffer = Buffer.alloc(4);
|
|
125
|
-
lengthBuffer.writeInt32BE(0, 0);
|
|
126
|
-
this.writePipe.write(lengthBuffer);
|
|
127
|
-
}
|
|
128
|
-
readNextMessage() {
|
|
129
|
-
this.state = "read length";
|
|
130
|
-
this.readBuffer(4, (lengthReadError, lengthBuffer) => {
|
|
131
|
-
if (lengthReadError) {
|
|
132
|
-
console.error(`Failed to communicate with worker (read length) ${lengthReadError}`);
|
|
133
|
-
return;
|
|
134
|
-
}
|
|
135
|
-
this.state = "length read";
|
|
136
|
-
const length = lengthBuffer.readInt32BE(0);
|
|
137
|
-
this.state = "read message";
|
|
138
|
-
this.readBuffer(length, (messageError, messageBuffer) => {
|
|
139
|
-
if (messageError) {
|
|
140
|
-
console.error(`Failed to communicate with worker (read message) ${messageError}`);
|
|
141
|
-
return;
|
|
142
|
-
}
|
|
143
|
-
this.state = "message read";
|
|
144
|
-
const messageString = messageBuffer.toString("utf-8");
|
|
145
|
-
const message = JSON.parse(messageString, import_serializer.reviver);
|
|
146
|
-
this.state = "process message";
|
|
147
|
-
this.onWorkerMessage(message, (err) => {
|
|
148
|
-
if (err) {
|
|
149
|
-
console.error(`Failed to communicate with worker (process message) ${err}`);
|
|
150
|
-
return;
|
|
151
|
-
}
|
|
152
|
-
this.state = "soon next";
|
|
153
|
-
setImmediate(() => this.readNextMessage());
|
|
154
|
-
});
|
|
155
|
-
});
|
|
156
|
-
});
|
|
157
|
-
}
|
|
158
|
-
onWorkerMessage(message, finalCallback) {
|
|
159
|
-
const { type, id } = message;
|
|
160
|
-
switch (type) {
|
|
161
|
-
case "job": {
|
|
162
|
-
const { data, error, result } = message;
|
|
163
|
-
asyncMapSeries(
|
|
164
|
-
data,
|
|
165
|
-
(length, callback) => this.readBuffer(length, callback),
|
|
166
|
-
(eachErr, buffers) => {
|
|
167
|
-
const { callback: jobCallback } = this.jobs[id];
|
|
168
|
-
const callback = (err, arg) => {
|
|
169
|
-
if (jobCallback) {
|
|
170
|
-
delete this.jobs[id];
|
|
171
|
-
this.activeJobs -= 1;
|
|
172
|
-
this.onJobDone();
|
|
173
|
-
if (err) {
|
|
174
|
-
jobCallback(err instanceof Error ? err : new Error(err), arg);
|
|
175
|
-
} else {
|
|
176
|
-
jobCallback(null, arg);
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
finalCallback();
|
|
180
|
-
};
|
|
181
|
-
if (eachErr) {
|
|
182
|
-
callback(eachErr);
|
|
183
|
-
return;
|
|
184
|
-
}
|
|
185
|
-
let bufferPosition = 0;
|
|
186
|
-
if (result.result) {
|
|
187
|
-
result.result = result.result.map((r) => {
|
|
188
|
-
if (r.buffer) {
|
|
189
|
-
const buffer = buffers[bufferPosition];
|
|
190
|
-
bufferPosition += 1;
|
|
191
|
-
if (r.string) {
|
|
192
|
-
return buffer.toString("utf-8");
|
|
193
|
-
}
|
|
194
|
-
return buffer;
|
|
195
|
-
}
|
|
196
|
-
return r.data;
|
|
197
|
-
});
|
|
198
|
-
}
|
|
199
|
-
if (error) {
|
|
200
|
-
callback(this.fromErrorObj(error), result);
|
|
201
|
-
return;
|
|
202
|
-
}
|
|
203
|
-
callback(null, result);
|
|
204
|
-
}
|
|
205
|
-
);
|
|
206
|
-
break;
|
|
207
|
-
}
|
|
208
|
-
case "loadModule": {
|
|
209
|
-
const { request, questionId } = message;
|
|
210
|
-
const { data } = this.jobs[id];
|
|
211
|
-
data.loadModule(request, (error, source, sourceMap) => {
|
|
212
|
-
this.writeJson({
|
|
213
|
-
type: "result",
|
|
214
|
-
id: questionId,
|
|
215
|
-
error: error ? {
|
|
216
|
-
message: error.message,
|
|
217
|
-
details: error.details,
|
|
218
|
-
missing: error.missing
|
|
219
|
-
} : null,
|
|
220
|
-
result: [
|
|
221
|
-
source,
|
|
222
|
-
sourceMap
|
|
223
|
-
// TODO: Serialize module?
|
|
224
|
-
// module,
|
|
225
|
-
]
|
|
226
|
-
});
|
|
227
|
-
});
|
|
228
|
-
finalCallback();
|
|
229
|
-
break;
|
|
230
|
-
}
|
|
231
|
-
case "resolve": {
|
|
232
|
-
const { context, request, options, questionId } = message;
|
|
233
|
-
const { data } = this.jobs[id];
|
|
234
|
-
if (options) {
|
|
235
|
-
data.getResolve(options)(context, request, (error, result) => {
|
|
236
|
-
this.writeJson({
|
|
237
|
-
type: "result",
|
|
238
|
-
id: questionId,
|
|
239
|
-
error: error ? {
|
|
240
|
-
message: error.message,
|
|
241
|
-
details: error.details,
|
|
242
|
-
missing: error.missing
|
|
243
|
-
} : null,
|
|
244
|
-
result
|
|
245
|
-
});
|
|
246
|
-
});
|
|
247
|
-
} else {
|
|
248
|
-
data.resolve(context, request, (error, result) => {
|
|
249
|
-
this.writeJson({
|
|
250
|
-
type: "result",
|
|
251
|
-
id: questionId,
|
|
252
|
-
error: error ? {
|
|
253
|
-
message: error.message,
|
|
254
|
-
details: error.details,
|
|
255
|
-
missing: error.missing
|
|
256
|
-
} : null,
|
|
257
|
-
result
|
|
258
|
-
});
|
|
259
|
-
});
|
|
260
|
-
}
|
|
261
|
-
finalCallback();
|
|
262
|
-
break;
|
|
263
|
-
}
|
|
264
|
-
case "emitWarning": {
|
|
265
|
-
const { data } = message;
|
|
266
|
-
const { data: jobData } = this.jobs[id];
|
|
267
|
-
jobData.emitWarning(this.fromErrorObj(data));
|
|
268
|
-
finalCallback();
|
|
269
|
-
break;
|
|
270
|
-
}
|
|
271
|
-
case "emitError": {
|
|
272
|
-
const { data } = message;
|
|
273
|
-
const { data: jobData } = this.jobs[id];
|
|
274
|
-
jobData.emitError(this.fromErrorObj(data));
|
|
275
|
-
finalCallback();
|
|
276
|
-
break;
|
|
277
|
-
}
|
|
278
|
-
case "getLogger": {
|
|
279
|
-
const { data } = message;
|
|
280
|
-
const { data: jobData } = this.jobs[id];
|
|
281
|
-
if (!Object.hasOwnProperty.call(jobData.loggers, data.name)) {
|
|
282
|
-
jobData.loggers[data.name] = jobData.getLogger(data.name);
|
|
283
|
-
}
|
|
284
|
-
finalCallback();
|
|
285
|
-
break;
|
|
286
|
-
}
|
|
287
|
-
case "logger": {
|
|
288
|
-
const { data } = message;
|
|
289
|
-
const { data: jobData } = this.jobs[id];
|
|
290
|
-
jobData.loggers[data.name][data.severity](data.message);
|
|
291
|
-
finalCallback();
|
|
292
|
-
break;
|
|
293
|
-
}
|
|
294
|
-
default: {
|
|
295
|
-
console.error(`Unexpected worker message ${type} in WorkerPool.`);
|
|
296
|
-
finalCallback();
|
|
297
|
-
break;
|
|
298
|
-
}
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
fromErrorObj(arg) {
|
|
302
|
-
let obj;
|
|
303
|
-
if (typeof arg === "string") {
|
|
304
|
-
obj = { message: arg };
|
|
305
|
-
} else {
|
|
306
|
-
obj = arg;
|
|
307
|
-
}
|
|
308
|
-
return new import_WorkerError.default(obj, this.id);
|
|
309
|
-
}
|
|
310
|
-
readBuffer(length, callback) {
|
|
311
|
-
(0, import_readBuffer.default)(this.readPipe, length, callback);
|
|
312
|
-
}
|
|
313
|
-
dispose() {
|
|
314
|
-
if (!this.disposed) {
|
|
315
|
-
this.disposed = true;
|
|
316
|
-
this.ignoreStdOutAndErrFromWorker(this.worker.stdout, this.worker.stderr);
|
|
317
|
-
this.writeEnd();
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
class WorkerPool {
|
|
322
|
-
constructor(options) {
|
|
323
|
-
this.options = options || {};
|
|
324
|
-
this.numberOfWorkers = options.numberOfWorkers;
|
|
325
|
-
this.poolTimeout = options.poolTimeout;
|
|
326
|
-
this.workerNodeArgs = options.workerNodeArgs;
|
|
327
|
-
this.workerParallelJobs = options.workerParallelJobs;
|
|
328
|
-
this.workers = /* @__PURE__ */ new Set();
|
|
329
|
-
this.activeJobs = 0;
|
|
330
|
-
this.timeout = null;
|
|
331
|
-
this.poolQueue = asyncQueue(this.distributeJob.bind(this), options.poolParallelJobs);
|
|
332
|
-
this.terminated = false;
|
|
333
|
-
this.setupLifeCycle();
|
|
334
|
-
}
|
|
335
|
-
isAbleToRun() {
|
|
336
|
-
return !this.terminated;
|
|
337
|
-
}
|
|
338
|
-
terminate() {
|
|
339
|
-
if (this.terminated) {
|
|
340
|
-
return;
|
|
341
|
-
}
|
|
342
|
-
this.terminated = true;
|
|
343
|
-
this.poolQueue.kill();
|
|
344
|
-
this.disposeWorkers(true);
|
|
345
|
-
}
|
|
346
|
-
setupLifeCycle() {
|
|
347
|
-
process.on("exit", () => {
|
|
348
|
-
this.terminate();
|
|
349
|
-
});
|
|
350
|
-
}
|
|
351
|
-
run(data, callback) {
|
|
352
|
-
if (this.timeout) {
|
|
353
|
-
clearTimeout(this.timeout);
|
|
354
|
-
this.timeout = null;
|
|
355
|
-
}
|
|
356
|
-
this.activeJobs += 1;
|
|
357
|
-
this.poolQueue.push(data, callback);
|
|
358
|
-
}
|
|
359
|
-
distributeJob(data, callback) {
|
|
360
|
-
let bestWorker;
|
|
361
|
-
for (const worker of this.workers) {
|
|
362
|
-
if (!bestWorker || worker.activeJobs < bestWorker.activeJobs) {
|
|
363
|
-
bestWorker = worker;
|
|
364
|
-
}
|
|
365
|
-
}
|
|
366
|
-
if (bestWorker && (bestWorker.activeJobs === 0 || this.workers.size >= this.numberOfWorkers)) {
|
|
367
|
-
bestWorker.run(data, callback);
|
|
368
|
-
return;
|
|
369
|
-
}
|
|
370
|
-
const newWorker = this.createWorker();
|
|
371
|
-
newWorker.run(data, callback);
|
|
372
|
-
}
|
|
373
|
-
createWorker() {
|
|
374
|
-
const newWorker = new PoolWorker(
|
|
375
|
-
{
|
|
376
|
-
nodeArgs: this.workerNodeArgs,
|
|
377
|
-
parallelJobs: this.workerParallelJobs
|
|
378
|
-
},
|
|
379
|
-
() => this.onJobDone()
|
|
380
|
-
);
|
|
381
|
-
this.workers.add(newWorker);
|
|
382
|
-
return newWorker;
|
|
383
|
-
}
|
|
384
|
-
warmup(requires) {
|
|
385
|
-
while (this.workers.size < this.numberOfWorkers) {
|
|
386
|
-
this.createWorker().warmup(requires);
|
|
387
|
-
}
|
|
388
|
-
}
|
|
389
|
-
onJobDone() {
|
|
390
|
-
this.activeJobs -= 1;
|
|
391
|
-
if (this.activeJobs === 0 && Number.isFinite(this.poolTimeout)) {
|
|
392
|
-
this.timeout = setTimeout(() => this.disposeWorkers(), this.poolTimeout);
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
disposeWorkers(fromTerminate) {
|
|
396
|
-
if (!this.options.poolRespawn && !fromTerminate) {
|
|
397
|
-
this.terminate();
|
|
398
|
-
return;
|
|
399
|
-
}
|
|
400
|
-
if (this.activeJobs === 0 || fromTerminate) {
|
|
401
|
-
for (const worker of this.workers) {
|
|
402
|
-
worker.dispose();
|
|
403
|
-
}
|
|
404
|
-
this.workers.clear();
|
|
405
|
-
}
|
|
406
|
-
}
|
|
407
|
-
}
|