@andersbakken/fisk 3.5.7 → 3.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,899 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- const option = require("@jhanssen/options")({ prefix: "fisk/builder",
4
- applicationPath: false,
5
- additionalFiles: [ "fisk/builder.conf.override" ] });
6
-
7
- const axios = require("axios");
8
- const ws = require("ws");
9
- const Url = require("url");
10
- const common = require("../common")(option);
11
- const Server = require("./server");
12
- const Client = require("./client");
13
- const Compile = require("./compile");
14
- const bytes = require("bytes");
15
- const parse_duration = require("parse-duration");
16
- const fs = require("fs-extra");
17
- const path = require("path");
18
- const os = require("os");
19
- const child_process = require("child_process");
20
- const VM = require("./VM");
21
- const load = require("./load");
22
- const ObjectCache = require("./objectcache");
23
- const quitOnError = require("./quit-on-error")(option);
24
- const zlib = require("zlib");
25
-
26
- if (process.getuid() !== 0) {
27
- console.error("fisk builder needs to run as root to be able to chroot");
28
- process.exit(1);
29
- }
30
-
31
- process.on("unhandledRejection", (reason, p) => {
32
- console.log("Unhandled Rejection at: Promise", p, "reason:", reason.stack);
33
- if (client) {
34
- client.send("log", { message: `Unhandled Rejection at: Promise ${p}, reason: ${reason.stack}` });
35
- }
36
- quitOnError();
37
- });
38
-
39
- process.on("uncaughtException", err => {
40
- console.error("Uncaught exception", err);
41
- if (client)
42
- client.send("log", { message: `Uncaught exception ${err.toString()} ${err.stack}` });
43
- quitOnError();
44
- });
45
-
46
- let debug = option("debug");
47
-
48
- let restartOnInactivity = option("restart-on-inactivity");
49
- if (typeof restartOnInactivity === "string")
50
- restartOnInactivity = parse_duration(restartOnInactivity);
51
-
52
- let shutdownTimer;
53
- function restartShutdownTimer()
54
- {
55
- if (restartOnInactivity > 0) {
56
- if (shutdownTimer)
57
- clearTimeout(shutdownTimer);
58
- function shutdownNow()
59
- {
60
- console.log("shutting down now due to inactivity");
61
- // child_process.exec("shutdown -h now");
62
- }
63
- if (restartOnInactivity <= 10000) {
64
- shutdownTimer = setTimeout(shutdownNow, restartOnInactivity);
65
- } else {
66
- shutdownTimer = setTimeout(() => {
67
- console.log("shutting down in 10 seconds due to inactivity");
68
- shutdownTimer = setTimeout(shutdownNow, 10000);
69
- }, restartOnInactivity - 10000);
70
- }
71
- }
72
- }
73
-
74
- if (restartOnInactivity)
75
- restartShutdownTimer();
76
-
77
- let ports = ("" + option("ports", "")).split(",").filter(x => x).map(x => parseInt(x));
78
- if (ports.length) {
79
- var name = option("name") || option("hostname") || os.hostname();
80
- var children = ports.map(port => {
81
- let ret = child_process.fork(__filename, [
82
- "--port", port,
83
- "--name", name + "_" + port,
84
- "--cache-dir", path.join(common.cacheDir(), "" + port),
85
- "--slots", Math.round(os.cpus().length / ports.length)
86
- ]);
87
- // ret.stdout.on("data", output => console.log(port, output));
88
- // ret.stderr.on("data", output => console.error(port, output));
89
- return ret;
90
- });
91
- process.exit();
92
- }
93
-
94
- let objectCache;
95
-
96
- function getFromCache(job, cb)
97
- {
98
- // console.log("got job", job.sha1, objectCache ? objectCache.state(job.sha1) : false);
99
- // if (objectCache)
100
- // console.log("objectCache", job.sha1, objectCache.state(job.sha1), objectCache.keys);
101
- if (!objectCache || objectCache.state(job.sha1) != "exists")
102
- return false;
103
- const file = path.join(objectCache.dir, job.sha1);
104
- if (!fs.existsSync(file)) {
105
- console.log("The file is not even there", file);
106
- objectCache.remove(job.sha1);
107
- return false;
108
- }
109
- // console.log("we have it cached", job.sha1);
110
-
111
- let pointOfNoReturn = false;
112
- let fd;
113
- try {
114
- let item = objectCache.get(job.sha1);
115
- job.send(Object.assign({objectCache: true}, item.response));
116
- job.objectcache = true;
117
- pointOfNoReturn = true;
118
- fd = fs.openSync(path.join(objectCache.dir, item.response.sha1), "r");
119
- // console.log("here", item.response);
120
- let pos = 4 + item.headerSize;
121
- let fileIdx = 0;
122
- const work = () => {
123
- // console.log("work", job.sha1);
124
- function finish(err)
125
- {
126
- fs.closeSync(fd);
127
- if (err) {
128
- objectCache.remove(job.sha1);
129
- job.close();
130
- } else {
131
- ++item.cacheHits;
132
- }
133
-
134
- cb(err);
135
- }
136
- const file = item.response.index[fileIdx];
137
- if (!file) {
138
- finish();
139
- return;
140
- }
141
- const buffer = Buffer.allocUnsafe(file.bytes);
142
- // console.log("reading from", file, path.join(objectCache.dir, item.response.sha1), pos);
143
- fs.read(fd, buffer, 0, file.bytes, pos, (err, read) => {
144
- // console.log("GOT READ RESPONSE", file, fileIdx, err, read);
145
- if (err || read != file.bytes) {
146
- if (!err) {
147
- err = `Short read ${read}/${file.bytes}`;
148
- }
149
- console.error(`Failed to read ${file.bytes} from ${path.join(objectCache.dir, item.response.sha1)} got ${read} ${err}`);
150
- finish(err);
151
- } else {
152
- // console.log("got good response from file", file);
153
- // console.log("sending some data", buffer.length, fileIdx, item.response.index.length);
154
- job.send(buffer);
155
- pos += read;
156
- if (++fileIdx < item.response.index.length) {
157
- work();
158
- } else {
159
- finish();
160
- }
161
- }
162
- });
163
- };
164
- work();
165
- return true;
166
- } catch (err) {
167
- if (err.code != "ENOENT")
168
- console.error("Got some error here", err);
169
- if (fd)
170
- fs.closeSync(fd);
171
- if (pointOfNoReturn) {
172
- job.close();
173
- return true; // hehe
174
- }
175
- return false;
176
- // console.log("The cache handled it");
177
- }
178
- }
179
-
180
- let environments = {};
181
- const client = new Client(option, common.Version);
182
- client.on("objectCache", enabled => {
183
- let objectCacheSize = bytes.parse(option("object-cache-size"));
184
- if (enabled && objectCacheSize) {
185
- const objectCacheDir = option("object-cache-dir") || path.join(common.cacheDir(), "objectcache");
186
-
187
- objectCache = new ObjectCache(objectCacheDir, objectCacheSize, option.int("object-cache-purge-size") || objectCacheSize);
188
- objectCache.on("added", data => {
189
- client.send({ type: "objectCacheAdded", sha1: data.sha1, sourceFile: data.sourceFile, cacheSize: objectCache.size, fileSize: data.fileSize });
190
- });
191
-
192
- objectCache.on("removed", data => {
193
- client.send({ type: "objectCacheRemoved", sha1: data.sha1, sourceFile: data.sourceFile, cacheSize: objectCache.size, fileSize: data.fileSize });
194
- });
195
- } else {
196
- objectCache = undefined;
197
- }
198
- });
199
-
200
- client.on("fetch_cache_objects", message => {
201
- console.log("Fetching", message.objects.length, "objects");
202
- let filesReceived = 0;
203
- let promises = [];
204
- const max = Math.min(10, message.objects.length);
205
- for (let idx=0; idx<max; ++idx) {
206
- promises.push(Promise.resolve());
207
- }
208
- message.objects.forEach((operation, idx) => {
209
- promises[idx % promises.length] = promises[idx % promises.length].then(() => {
210
- return new Promise((resolve, reject) => {
211
- const file = path.join(objectCache.dir, operation.sha1);
212
- const url = `http://${operation.source}/objectcache/${operation.sha1}`;
213
- console.log("Downloading", url, "->", file);
214
- let expectedSize;
215
- let stream;
216
- try {
217
- stream = fs.createWriteStream(file);
218
- } catch (err) {
219
- console.error("Got some error from write stream", err);
220
- try {
221
- fs.unlinkSync(file);
222
- } catch (e) {
223
- }
224
- resolve();
225
- return;
226
- }
227
- // response_stream.on("response", function (response) {
228
- axios({ method: 'get', url: url, responseType: 'stream' })
229
- .then(response => {
230
- expectedSize = parseInt(response.headers["content-length"]);
231
- response.data.pipe(stream);
232
- response.data.on("error", () => {
233
- console.error("Got some error from stream", err);
234
- stream.destroy("http stream error");
235
- });
236
- // console
237
- }).catch(err => {
238
- console.error("Got some error", err);
239
- stream.destroy("http error");
240
- });
241
- stream.on("finish", () => {
242
- console.log("Finished writing file", file);
243
- let stat;
244
- try {
245
- stat = fs.statSync(file);
246
- } catch (err) {
247
- }
248
- if (!stat || stat.size != expectedSize) {
249
- console.log("Got wrong size for", file, url, "\nGot", (stat ? stat.size : -1), "expected", expectedSize);
250
- try {
251
- fs.unlinkSync(file);
252
- } catch (err) {
253
- // console.log("Got error unlinking file", file, err);
254
- }
255
- } else {
256
- ++filesReceived;
257
- objectCache.loadFile(file, stat.size);
258
- }
259
- resolve();
260
- });
261
- stream.on("error", err => {
262
- console.error("Got stream error", err);
263
- resolve();
264
- });
265
- });
266
- });
267
- });
268
- Promise.all(promises).then(() => {
269
- console.log("got results", filesReceived);
270
- });
271
- // chain.then(() => {
272
- // console.log("Received", filesReceived, "files. Restarting");
273
- // process.exit();
274
- // });
275
- });
276
-
277
- const environmentsRoot = path.join(common.cacheDir(), "environments");
278
-
279
- function exec(command, options)
280
- {
281
- return new Promise((resolve, reject) => {
282
- child_process.exec(command, options, (err, stdout, stderr) => {
283
- if (stderr) {
284
- console.error("Got stderr from", command);
285
- }
286
- if (err) {
287
- reject(new Error(`Failed to run command ${command}: ${err.message}`));
288
- } else {
289
- console.log(command, "finished");
290
- resolve();
291
- }
292
- });
293
- });
294
- }
295
-
296
- function loadEnvironments()
297
- {
298
- return new Promise((resolve, reject) => {
299
- fs.readdir(environmentsRoot, (err, files) => {
300
- // console.log("GOT FILES", files);
301
- if (err) {
302
- if (err.code == "ENOENT") {
303
- fs.mkdirp(environmentsRoot).then(() => {
304
- // let user = option("fisk-user");
305
- // let split = environmentsRoot.split("/");
306
- // if (!user) {
307
- // if (split[0] == "home" || split[0] == "Users") {
308
- // user = split[1];
309
- // } else if (split[0] == "usr" && split[1] == "home") {
310
- // user = split[2];
311
- // }
312
- // }
313
- // if (!user) {
314
- // user = process.env["SUDO_USER"];
315
- // }
316
- // if (user) {
317
- // let p = "";
318
- // split.forEach(element => {
319
- // p += "/" + element;
320
- // });
321
- resolve();
322
- }).catch((err) => {
323
- reject(new Error("Failed to create directory " + err.message));
324
- });
325
- return;
326
- }
327
- reject(err);
328
- } else {
329
- if (files) {
330
- let pending = 0;
331
- for (let i=0; i<files.length; ++i) {
332
- try {
333
- let dir = path.join(environmentsRoot, files[i]);
334
- let stat = fs.statSync(dir);
335
- if (!stat.isDirectory()) {
336
- fs.removeSync(dir);
337
- continue;
338
- }
339
- let file, env;
340
- try {
341
- file = fs.readFileSync(path.join(dir, "environment.json"));
342
- env = JSON.parse(fs.readFileSync(path.join(dir, "environment.json")));
343
- } catch (err) {
344
- }
345
- if (env && env.hash) {
346
- let vm = new VM(dir, env.hash, option);
347
- ++pending;
348
- environments[env.hash] = vm;
349
- let errorHandler = () => {
350
- if (!vm.ready && !--pending) {
351
- resolve();
352
- }
353
- };
354
- vm.once("error", errorHandler);
355
- vm.once("ready", () => {
356
- vm.ready = true;
357
- vm.removeListener("error", errorHandler);
358
- if (!--pending)
359
- resolve();
360
- });
361
- } else {
362
- console.log("Removing directory", dir);
363
- fs.removeSync(dir);
364
- }
365
- } catch (err) {
366
- console.error(`Got error loading environment ${files[i]} ${err.stack} ${err.message}`);
367
- }
368
- }
369
- if (!pending)
370
- resolve();
371
- }
372
- }
373
- });
374
- });
375
- }
376
-
377
- let connectInterval;
378
- client.on("quit", message => {
379
- console.log(`Server wants us to quit: ${message.code || 0} purge environments: ${message.purgeEnvironments}`);
380
- if (message.purgeEnvironments) {
381
- try {
382
- fs.removeSync(environmentsRoot);
383
- } catch (err) {
384
- console.error("Failed to remove environments", environmentsRoot);
385
- }
386
- }
387
- process.exit(message.code || 0);
388
- });
389
-
390
- client.on("version_mismatch", message => {
391
- console.log(`We have the wrong version. We have ${client.npmVersion} but we need ${message.required_version}`);
392
- const versionFile = option("npm-version-file");
393
- if (versionFile) {
394
- try {
395
- fs.writeFileSync(versionFile, "@" + message.required_version);
396
- } catch (err) {
397
- console.error("Failed to write version file", versionFile, err);
398
- }
399
- }
400
- process.exit(message.code || 0);
401
- });
402
-
403
- client.on("clearObjectCache", () => {
404
- if (objectCache) {
405
- objectCache.clear();
406
- }
407
- });
408
-
409
- client.on("dropEnvironments", message => {
410
- console.log(`Dropping environments ${message.environments}`);
411
- message.environments.forEach(env => {
412
- var environment = environments[env];
413
- if (environment) {
414
- const dir = path.join(environmentsRoot, env);
415
- console.log(`Purge environment ${env} ${dir}`);
416
- environment.destroy();
417
- delete environments[env];
418
- }
419
- });
420
- });
421
-
422
- client.on("getEnvironments", message => {
423
- console.log(`Getting environments ${message.environments}`);
424
- let base = option("scheduler", "localhost:8097");
425
- let idx = base.indexOf("://");
426
- if (idx != -1)
427
- base = base.substr(idx + 3);
428
- base = "http://" + base;
429
- if (!/:[0-9]+$/.exec(base))
430
- base += ":8097";
431
- base += "/environment/";
432
- function work()
433
- {
434
- if (!message.environments.length) {
435
- let restart = option("restart-on-new-environments");
436
- if (!restart) {
437
- setTimeout(() => {
438
- client.send("environments", { environments: Object.keys(environments) });
439
- console.log("Informing scheduler about our environments:", Object.keys(environments));
440
- }, option.int("inform-delay", 5000));
441
- } else {
442
- console.log("Restarting after we got our new environments");
443
- process.exit();
444
- }
445
- return;
446
- }
447
- let env = message.environments.splice(0, 1)[0];
448
- const url = base + env;
449
- console.log("Got environment url", url);
450
-
451
- const dir = path.join(environmentsRoot, env);
452
- try {
453
- fs.removeSync(dir);
454
- } catch (err) {
455
- }
456
- if (!fs.mkdirpSync(dir)) {
457
- console.error("Can't create environment directory for builder: " + dir);
458
- setTimeout(work, 0);
459
- return;
460
- }
461
-
462
- let file = path.join(dir, "env.tar.gz");
463
- let stream = fs.createWriteStream(file);
464
- stream.on("finish", () => {
465
- console.log("Got finish", env);
466
- exec("tar xf '" + file + "'", { cwd: dir }).
467
- then(() => {
468
- const json = path.join(dir, "environment.json");
469
- console.log("Writing json file", json);
470
- return fs.writeFile(json, JSON.stringify({ hash: env, created: new Date().toString() }));
471
- }).then(() => {
472
- console.log(`Unlink ${file} ${env}`);
473
- return fs.unlink(file);
474
- }).then(() => {
475
- let vm = new VM(dir, env, option);
476
- return new Promise((resolve, reject) => {
477
- let done = false;
478
- vm.on("error", err => {
479
- if (!done) {
480
- reject(err);
481
- }
482
- });
483
- vm.on("ready", () => {
484
- done = true;
485
- resolve(vm);
486
- });
487
- });
488
- }).then(vm => {
489
- environments[env] = vm;
490
- setTimeout(work, 0);
491
- }).catch((err) => {
492
- console.error("Got failure setting up environment", err);
493
- try {
494
- fs.removeSync(dir);
495
- } catch (rmdirErr) {
496
- console.error("Failed to remove directory", dir, rmdirErr);
497
- }
498
- setTimeout(work, 0);
499
- });
500
- });
501
- axios({ method: 'get', url: url, responseType: 'stream' })
502
- .then(response => {
503
- response.data.pipe(stream);
504
- // console
505
- }).catch(error => {
506
- console.log("Got error from request", error);
507
- if (stream.destroy instanceof Function) {
508
- stream.destroy();
509
- } else {
510
- stream.end();
511
- }
512
- try {
513
- fs.removeSync(dir);
514
- } catch (err) {
515
- }
516
- if (!fs.mkdirpSync(dir)) {
517
- console.error("Can't create environment directory for builder: " + dir);
518
- setTimeout(work, 0);
519
- }
520
- });
521
- }
522
- work();
523
- });
524
-
525
- client.on("requestEnvironments", message => {
526
- console.log("scheduler wants us to inform of current environments", Object.keys(environments));
527
- client.send("environments", { environments: Object.keys(environments) });
528
- });
529
-
530
- client.on("connect", () => {
531
- restartShutdownTimer();
532
- console.log("connected");
533
- if (connectInterval) {
534
- clearInterval(connectInterval);
535
- connectInterval = undefined;
536
- }
537
- if (!load.running)
538
- load.start(option("loadInterval", 1000));
539
- if (objectCache)
540
- client.send({ type: "objectCache", sha1s: objectCache.syncData(), maxSize: objectCache.maxSize, cacheSize: objectCache.size });
541
- });
542
-
543
- client.on("error", err => {
544
- console.error("client error", err);
545
- if (load.running)
546
- load.stop();
547
- });
548
-
549
- client.on("close", () => {
550
- console.log("client closed");
551
- if (load.running)
552
- load.stop();
553
- if (!connectInterval) {
554
- connectInterval = setInterval(() => {
555
- console.log("Reconnecting...");
556
- client.connect(Object.keys(environments));
557
- }, 1000);
558
- }
559
- });
560
-
561
-
562
- const server = new Server(option, common.Version);
563
- let jobQueue = [];
564
-
565
- server.on("headers", (headers, req) => {
566
- // console.log("request is", req.headers);
567
- let wait = false;
568
- if (objectCache && objectCache.state(req.headers["x-fisk-sha1"]) == "exists") {
569
- wait = true;
570
- } else if (jobQueue.length >= client.slots) {
571
- const priority = parseInt(req.headers["x-fisk-sha1"]) || 0;
572
- let idx = jobQueue.length - 1;
573
- while (idx >= client.slots) {
574
- let job = jobQueue[idx].job;
575
- if (job.priority >= priority) {
576
- break;
577
- }
578
- --idx;
579
- }
580
-
581
- wait = idx >= client.slots;
582
- }
583
- headers.push(`x-fisk-wait: ${wait}`);
584
- });
585
-
586
- server.on("listen", app => {
587
- function setDebug(enabled) {
588
- debug = enabled;
589
- for (var i in environments) {
590
- var env = environments[i];
591
- env.setDebug(debug);
592
- }
593
- }
594
- app.get("/debug", (req, res) => {
595
- setDebug(true);
596
- res.sendStatus(200);
597
- });
598
- app.get("/nodebug", (req, res) => {
599
- setDebug(false);
600
- res.sendStatus(200);
601
- });
602
-
603
- app.get("/objectcache/*", (req, res) => {
604
- if (!objectCache) {
605
- res.sendStatus(404);
606
- return;
607
- }
608
-
609
- const parsed = Url.parse(req.url);
610
-
611
- const urlPath = parsed.pathname.substr(13);
612
- if (urlPath == "info") {
613
- res.send(JSON.stringify(objectCache.info(req.query || {}), null, 4));
614
- return;
615
- };
616
- let data = objectCache.get(urlPath, true);
617
- if (!data) {
618
- res.sendStatus(404);
619
- return;
620
- }
621
- let file = path.join(objectCache.dir, urlPath);
622
- try {
623
- const stat = fs.statSync(file);
624
- res.set("Content-Length", stat.size);
625
- const rstream = fs.createReadStream(file);
626
- rstream.on("error", err => {
627
- console.error("Got read stream error for", file, err);
628
- rstream.close();
629
- });
630
- rstream.pipe(res);
631
- } catch (err) {
632
- console.error("Got some error", err);
633
- res.sendStatus(500);
634
- }
635
- });
636
- });
637
-
638
- function startPending()
639
- {
640
- // console.log(`startPending called ${jobQueue.length}`);
641
- for (let idx=0; idx<jobQueue.length; ++idx) {
642
- let jj = jobQueue[idx];
643
- if (!jj.op && !jj.objectCache) {
644
- // console.log("starting jj", jj.id);
645
- jj.start();
646
- break;
647
- }
648
- }
649
- }
650
-
651
- server.on("job", job => {
652
- restartShutdownTimer();
653
- let vm = environments[job.hash];
654
- if (!vm) {
655
- console.error("No vm for this hash", job.hash);
656
- job.close();
657
- return;
658
- }
659
- const jobStartTime = Date.now();
660
- let uploadDuration;
661
-
662
- // console.log("sending to server");
663
- const j = {
664
- id: job.id,
665
- job: job,
666
- op: undefined,
667
- done: false,
668
- aborted: false,
669
- started: false,
670
- heartbeatTimer: undefined,
671
- buffer: undefined,
672
- stdout: "",
673
- stderr: "",
674
- start: function() {
675
- let job = this.job;
676
- if (j.aborted)
677
- return;
678
- if (getFromCache(job, err => {
679
- if (j.aborted)
680
- return;
681
- if (err) {
682
- console.error("cache failed, let the client handle doing it itself");
683
- job.close();
684
- } else {
685
- // console.log("GOT STUFF", job);
686
- let info = {
687
- type: "cacheHit",
688
- client: {
689
- hostname: job.hostname,
690
- ip: job.ip,
691
- name: job.name,
692
- user: job.user
693
- },
694
- sourceFile: job.sourceFile,
695
- sha1: job.sha1,
696
- id: job.id
697
- };
698
- // console.log("sending cachehit", info);
699
- client.send(info);
700
-
701
- console.log("Job finished from cache", j.id, job.sourceFile, "for", job.ip, job.name);
702
- }
703
- j.done = true;
704
- let idx = jobQueue.indexOf(j);
705
- if (idx != -1)
706
- jobQueue.splice(idx, 1);
707
- startPending();
708
- })) {
709
- j.objectCache = true;
710
- return;
711
- }
712
- j.started = true;
713
- client.send("jobStarted", {
714
- id: job.id,
715
- sourceFile: job.sourceFile,
716
- client: {
717
- name: job.name,
718
- hostname: job.hostname,
719
- ip: job.ip,
720
- user: job.user
721
- },
722
- builder: {
723
- ip: job.builderIp,
724
- name: option("name"),
725
- hostname: option("hostname") || os.hostname(),
726
- port: server.port
727
- }
728
- });
729
-
730
- console.log("Starting job", j.id, job.sourceFile, "for", job.ip, job.name, "wait", job.wait);
731
- j.op = vm.startCompile(job.commandLine, job.argv0, job.id);
732
- if (j.buffer) {
733
- j.op.feed(j.buffer);
734
- j.buffer = undefined;
735
- }
736
- if (job.wait) {
737
- job.send("resume", {});
738
- }
739
- j.op.on("stdout", data => { j.stdout += data; }); // ### is there ever any stdout? If there is, does the order matter for stdout vs stderr?
740
- j.op.on("stderr", data => { j.stderr += data; });
741
- j.op.on("finished", event => {
742
- j.done = true;
743
- if (j.aborted)
744
- return;
745
- const end = Date.now();
746
- let idx = jobQueue.indexOf(j);
747
- console.log("Job finished", j.id, job.sourceFile, "for", job.ip, job.name, "exitCode", event.exitCode, "error", event.error, "in", (end - jobStartTime) + "ms");
748
- if (idx != -1) {
749
- jobQueue.splice(idx, 1);
750
- } else {
751
- console.error("Can't find j?");
752
- return;
753
- }
754
-
755
- // this can't be async, the directory is removed after the event is fired
756
- const forCache = event.files.map(f => ({ contents: fs.readFileSync(f.absolute), path: f.path }));
757
- const contents = !j.job.compressed ? forCache : forCache.map(x => ({
758
- path: x.path,
759
- contents: x.contents.byteLength ? zlib.gzipSync(x.contents) : x.contents
760
- }));
761
- let response = {
762
- type: "response",
763
- index: contents.map(item => { return { path: item.path, bytes: item.contents.length }; }),
764
- success: event.success,
765
- exitCode: event.exitCode,
766
- sha1: job.sha1,
767
- stderr: j.stderr,
768
- stdout: j.stdout
769
- };
770
- if (event.error)
771
- response.error = event.error;
772
- if (debug) {
773
- console.log("Sending response", job.ip, job.hostname, response);
774
- }
775
- job.send(response);
776
- if (response.exitCode === 0 && event.success && objectCache && response.sha1 && objectCache.state(response.sha1) == "none") {
777
- response.sourceFile = job.sourceFile;
778
- response.commandLine = job.commandLine;
779
- response.environment = job.hash;
780
- objectCache.add(response, forCache);
781
- }
782
-
783
- contents.forEach(x => {
784
- if (x.contents.byteLength) {
785
- job.send(x.contents);
786
- }
787
- });
788
- // console.log("GOT ID", j);
789
- if (event.success) {
790
- client.send("jobFinished", {
791
- id: j.id,
792
- cppSize: event.cppSize,
793
- compileDuration: event.compileDuration,
794
- compileSpeed: (event.cppSize / event.compileDuration),
795
- uploadDuration: uploadDuration,
796
- uploadSpeed: (event.cppSize / uploadDuration)
797
- });
798
- } else {
799
- client.send("jobAborted", {
800
- id: j.id,
801
- cppSize: event.cppSize,
802
- compileDuration: event.compileDuration,
803
- compileSpeed: (event.cppSize / event.compileDuration),
804
- uploadDuration: uploadDuration,
805
- uploadSpeed: (event.cppSize / uploadDuration)
806
- });
807
- }
808
- startPending();
809
- });
810
- },
811
- cancel: function() {
812
- if (!j.done && j.op) {
813
- j.done = true;
814
- j.op.cancel();
815
- }
816
- }
817
- };
818
-
819
- job.heartbeatTimer = setInterval(() => {
820
- if (job.done || job.aborted || job.readyState !== ws.OPEN) {
821
- clearTimeout(job.heartbeatTimer);
822
- } else {
823
- // console.log("sending heartbeat");
824
- job.send("heartbeat", {});
825
- }
826
- }, 5000);
827
-
828
- job.on("error", err => {
829
- job.webSocketError = `${err} from ${job.name} ${job.hostname} ${job.ip}`;
830
- console.error("got error from job", job.webSocketError);
831
- j.done = true;
832
- });
833
- job.on("close", () => {
834
- job.removeAllListeners();
835
- job.done = true;
836
- let idx = jobQueue.indexOf(j);
837
- if (idx != -1) {
838
- j.aborted = true;
839
- jobQueue.splice(idx, 1);
840
- j.cancel();
841
- if (j.started)
842
- client.send("jobAborted", { id: j.id, webSocketError: job.webSocketError });
843
- startPending();
844
- }
845
- });
846
-
847
- job.on("data", data => {
848
- // console.log("got data", this.id, typeof j.op);
849
- uploadDuration = Date.now() - jobStartTime;
850
- if (!j.op) {
851
- j.buffer = data.data;
852
- console.log("buffering...", j.buffer.byteLength);
853
- } else {
854
- j.op.feed(data.data);
855
- }
856
- });
857
-
858
- let idx = jobQueue.length;
859
- while (idx > 0) {
860
- const jobJob = jobQueue[idx - 1].job;
861
- if (jobJob.priority >= job.priority) {
862
- // console.log("Stopping at idx", idx, "Because of", job.priority, jobJob.priority, client.slots, jobJob.length);
863
- break;
864
- }
865
- --idx;
866
- }
867
- jobQueue.splice(idx, 0, j);
868
-
869
- if (jobQueue.length <= client.slots) {
870
- // console.log(`starting j ${j.id} because ${jobQueue.length} ${client.slots}`);
871
- j.start();
872
- } else {
873
- // console.log(`j ${j.id} is backlogged`, jobQueue.length, client.slots);
874
- }
875
- });
876
-
877
- server.on("error", (err) => {
878
- console.error("server error", err);
879
- });
880
-
881
- function start() {
882
- loadEnvironments().then(() => {
883
- console.log(`Loaded ${Object.keys(environments).length} environments from ${environmentsRoot}`);
884
- console.log("environments", Object.keys(environments));
885
- client.connect(Object.keys(environments));
886
- server.listen();
887
- }).catch((err) => {
888
- console.error(`Failed to initialize ${err.message}`);
889
- setTimeout(start, 1000);
890
- });
891
- }
892
- load.on("data", measure => {
893
- // console.log("Got load", measure);
894
- try {
895
- client.send("load", { measure: measure });
896
- } catch (err) {
897
- }
898
- });
899
- start();