@andersbakken/fisk 4.0.40 → 4.0.42
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/builder/VM_runtime.js +2 -2
- package/builder/fisk-builder.js +55 -22
- package/package.json +2 -2
package/builder/VM_runtime.js
CHANGED
|
@@ -3205,7 +3205,7 @@ class Compile extends EventEmitter__default["default"] {
|
|
|
3205
3205
|
if (!fs.existsSync("/usr/bin/as")) {
|
|
3206
3206
|
this.emit("stderr", "as doesn't exist");
|
|
3207
3207
|
}
|
|
3208
|
-
console.log(
|
|
3208
|
+
console.log(`Compiling source file: ${sourceFile}\n${[compiler, ...args].join(" ")}`);
|
|
3209
3209
|
// const env = Object.assign({ TMPDIR: dir, TEMPDIR: dir, TEMP: dir }, process.env);
|
|
3210
3210
|
const proc = child_process__default["default"].spawn(compiler, args, {
|
|
3211
3211
|
/*env: env, */ cwd: dir // , maxBuffer: 1024 * 1024 * 16
|
|
@@ -3263,7 +3263,7 @@ class Compile extends EventEmitter__default["default"] {
|
|
|
3263
3263
|
}
|
|
3264
3264
|
}
|
|
3265
3265
|
catch (err) {
|
|
3266
|
-
|
|
3266
|
+
console.error("Got an error file", path__default["default"].join(directory, file), err);
|
|
3267
3267
|
}
|
|
3268
3268
|
});
|
|
3269
3269
|
}
|
package/builder/fisk-builder.js
CHANGED
|
@@ -4322,7 +4322,7 @@ class ObjectCacheItem {
|
|
|
4322
4322
|
}
|
|
4323
4323
|
get contentsSize() {
|
|
4324
4324
|
return this.response.index.reduce((total, item) => {
|
|
4325
|
-
return total + item.
|
|
4325
|
+
return total + item.bytes; // Cache stores compressed, so use compressed size
|
|
4326
4326
|
}, 0);
|
|
4327
4327
|
}
|
|
4328
4328
|
get fileSize() {
|
|
@@ -7641,7 +7641,7 @@ class ObjectCache extends EventEmitter__default["default"] {
|
|
|
7641
7641
|
}
|
|
7642
7642
|
let remaining = 0;
|
|
7643
7643
|
response.index.forEach((file) => {
|
|
7644
|
-
remaining += file.
|
|
7644
|
+
remaining += file.bytes; // Cache stores compressed, so use compressed size
|
|
7645
7645
|
});
|
|
7646
7646
|
const pendingItem = new ObjectCachePendingItem(response, absolutePath, remaining);
|
|
7647
7647
|
pendingItem.file.on("error", (err) => {
|
|
@@ -7650,8 +7650,7 @@ class ObjectCache extends EventEmitter__default["default"] {
|
|
|
7650
7650
|
});
|
|
7651
7651
|
this.pending[response.sha1] = pendingItem;
|
|
7652
7652
|
contents.forEach((c) => {
|
|
7653
|
-
|
|
7654
|
-
pendingItem.write((_a = c.uncompressed) !== null && _a !== void 0 ? _a : c.contents);
|
|
7653
|
+
pendingItem.write(c.contents); // Cache stores compressed data
|
|
7655
7654
|
});
|
|
7656
7655
|
pendingItem.end().then(() => {
|
|
7657
7656
|
if (this.pending[response.sha1] === pendingItem) {
|
|
@@ -7764,6 +7763,7 @@ class Job extends EventEmitter__default["default"] {
|
|
|
7764
7763
|
this.sha1 = data.sha1;
|
|
7765
7764
|
this.id = data.id;
|
|
7766
7765
|
this.builderIp = data.builderIp;
|
|
7766
|
+
this.supportsCompressedResponse = data.supportsCompressedResponse;
|
|
7767
7767
|
}
|
|
7768
7768
|
get readyState() {
|
|
7769
7769
|
return this.ws.readyState;
|
|
@@ -51229,6 +51229,7 @@ class Server extends EventEmitter__default["default"] {
|
|
|
51229
51229
|
sha1: String(req.headers["x-fisk-sha1"]),
|
|
51230
51230
|
sourceFile: String(req.headers["x-fisk-sourcefile"]),
|
|
51231
51231
|
user: String(req.headers["x-fisk-user"]),
|
|
51232
|
+
supportsCompressedResponse: req.headers["x-fisk-supports-compressed-response"] === "true",
|
|
51232
51233
|
ws
|
|
51233
51234
|
});
|
|
51234
51235
|
break;
|
|
@@ -55255,7 +55256,15 @@ function getFromCache(job, cb) {
|
|
|
55255
55256
|
if (!item) {
|
|
55256
55257
|
throw new Error("Couldn't find item " + job.sha1);
|
|
55257
55258
|
}
|
|
55258
|
-
|
|
55259
|
+
// Cache stores compressed data.
|
|
55260
|
+
// Only send compressed if client both wants compression AND supports compressed responses from cache
|
|
55261
|
+
const sendCompressed = job.compressed && job.supportsCompressedResponse;
|
|
55262
|
+
const response = Object.assign({ objectCache: true }, item === null || item === void 0 ? void 0 : item.response);
|
|
55263
|
+
if (!sendCompressed) {
|
|
55264
|
+
// Client wants uncompressed (or doesn't support compressed cache), update metadata
|
|
55265
|
+
response.index = response.index.map((f) => (Object.assign(Object.assign({}, f), { bytes: f.uncompressedSize })));
|
|
55266
|
+
}
|
|
55267
|
+
job.send(response);
|
|
55259
55268
|
job.objectcache = true;
|
|
55260
55269
|
pointOfNoReturn = true;
|
|
55261
55270
|
fd = fs$3.openSync(path__default["default"].join(objectCache.dir, item.response.sha1), "r");
|
|
@@ -55283,6 +55292,7 @@ function getFromCache(job, cb) {
|
|
|
55283
55292
|
finish();
|
|
55284
55293
|
return;
|
|
55285
55294
|
}
|
|
55295
|
+
// Cache stores compressed data (f.bytes is compressed size)
|
|
55286
55296
|
const buffer = Buffer.allocUnsafe(f.bytes);
|
|
55287
55297
|
// console.log("reading from", file, path.join(objectCache.dir, item.response.sha1), pos);
|
|
55288
55298
|
assert__default["default"](fd !== undefined, "Must have fd");
|
|
@@ -55299,7 +55309,12 @@ function getFromCache(job, cb) {
|
|
|
55299
55309
|
else {
|
|
55300
55310
|
// console.log("got good response from file", file);
|
|
55301
55311
|
// console.log("sending some data", buffer.length, fileIdx, item.response.index.length);
|
|
55302
|
-
|
|
55312
|
+
let sendBuffer = buffer;
|
|
55313
|
+
// Decompress if client doesn't support compressed responses from cache
|
|
55314
|
+
if (!sendCompressed && buffer.byteLength > 0) {
|
|
55315
|
+
sendBuffer = zlib__default["default"].gunzipSync(buffer);
|
|
55316
|
+
}
|
|
55317
|
+
job.send(sendBuffer);
|
|
55303
55318
|
pos += read;
|
|
55304
55319
|
if (++fileIdx < item.response.index.length) {
|
|
55305
55320
|
work();
|
|
@@ -55947,27 +55962,36 @@ server.on("job", (job) => {
|
|
|
55947
55962
|
return;
|
|
55948
55963
|
}
|
|
55949
55964
|
// this can't be async, the directory is removed after the event is fired
|
|
55965
|
+
// Always compress for cache storage, but send based on client preference
|
|
55950
55966
|
const contents = event.files.map((f) => {
|
|
55951
|
-
|
|
55952
|
-
|
|
55953
|
-
if (j.job.compressed) {
|
|
55954
|
-
uncompressed = fileContents;
|
|
55955
|
-
fileContents = zlib__default["default"].gzipSync(fileContents);
|
|
55956
|
-
}
|
|
55967
|
+
const uncompressed = fs$3.readFileSync(f.absolute);
|
|
55968
|
+
const compressed = uncompressed.byteLength > 0 ? zlib__default["default"].gzipSync(uncompressed) : uncompressed;
|
|
55957
55969
|
return {
|
|
55958
|
-
contents:
|
|
55970
|
+
contents: compressed,
|
|
55959
55971
|
uncompressed,
|
|
55960
55972
|
path: f.path
|
|
55961
55973
|
};
|
|
55962
55974
|
});
|
|
55975
|
+
// Prepare data to send to client (compressed or uncompressed based on preference and capability)
|
|
55976
|
+
// Only send compressed if client wants it AND supports compressed responses
|
|
55977
|
+
const sendCompressed = j.job.compressed && j.job.supportsCompressedResponse;
|
|
55978
|
+
const toSend = contents.map((item) => {
|
|
55979
|
+
assert__default["default"](item.uncompressed, "Must have uncompressed data");
|
|
55980
|
+
return {
|
|
55981
|
+
contents: sendCompressed ? item.contents : item.uncompressed,
|
|
55982
|
+
path: item.path
|
|
55983
|
+
};
|
|
55984
|
+
});
|
|
55963
55985
|
const response = {
|
|
55964
55986
|
type: "response",
|
|
55965
|
-
index:
|
|
55966
|
-
|
|
55987
|
+
index: toSend.map((item) => {
|
|
55988
|
+
const original = contents.find((c) => c.path === item.path);
|
|
55989
|
+
assert__default["default"](original, "Must have original contents");
|
|
55990
|
+
assert__default["default"](original.uncompressed, "Must have uncompressed data");
|
|
55967
55991
|
return {
|
|
55968
55992
|
path: item.path,
|
|
55969
55993
|
bytes: item.contents.length,
|
|
55970
|
-
uncompressedSize:
|
|
55994
|
+
uncompressedSize: original.uncompressed.byteLength
|
|
55971
55995
|
};
|
|
55972
55996
|
}),
|
|
55973
55997
|
success: event.success,
|
|
@@ -55988,13 +56012,22 @@ server.on("job", (job) => {
|
|
|
55988
56012
|
objectCache &&
|
|
55989
56013
|
response.sha1 &&
|
|
55990
56014
|
objectCache.state(response.sha1) === "none") {
|
|
55991
|
-
|
|
55992
|
-
response.
|
|
55993
|
-
|
|
55994
|
-
|
|
56015
|
+
// Cache metadata needs to reflect compressed sizes since we store compressed
|
|
56016
|
+
const cacheResponse = Object.assign(Object.assign({}, response), { index: contents.map((item) => {
|
|
56017
|
+
assert__default["default"](item.uncompressed, "Must have uncompressed data");
|
|
56018
|
+
return {
|
|
56019
|
+
path: item.path,
|
|
56020
|
+
bytes: item.contents.length,
|
|
56021
|
+
uncompressedSize: item.uncompressed.byteLength
|
|
56022
|
+
};
|
|
56023
|
+
}) });
|
|
56024
|
+
cacheResponse.sourceFile = jobJob.sourceFile;
|
|
56025
|
+
cacheResponse.commandLine = jobJob.commandLine;
|
|
56026
|
+
cacheResponse.environment = jobJob.hash;
|
|
56027
|
+
objectCache.add(cacheResponse, contents);
|
|
55995
56028
|
}
|
|
55996
|
-
|
|
55997
|
-
if (x.contents.byteLength) {
|
|
56029
|
+
toSend.forEach((x) => {
|
|
56030
|
+
if (x.contents && x.contents.byteLength) {
|
|
55998
56031
|
jobJob.send(x.contents);
|
|
55999
56032
|
}
|
|
56000
56033
|
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@andersbakken/fisk",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.42",
|
|
4
4
|
"description": "Fisk, a distributed compile system",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"lint": "eslint . --ext .ts",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
],
|
|
31
31
|
"dependencies": {
|
|
32
32
|
"@andersbakken/blessed": "^0.1.82",
|
|
33
|
-
"@andersbakken/fisk-native": "^
|
|
33
|
+
"@andersbakken/fisk-native": "^17.0.0",
|
|
34
34
|
"@jhanssen/options": "^10.0.0",
|
|
35
35
|
"axios": "^0.21.1",
|
|
36
36
|
"bufferutil": "^4.0.7",
|