querysub 0.284.0 → 0.286.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "querysub",
3
- "version": "0.284.0",
3
+ "version": "0.286.0",
4
4
  "main": "index.js",
5
5
  "license": "MIT",
6
6
  "note1": "note on node-forge fork, see https://github.com/digitalbazaar/forge/issues/744 for details",
@@ -65,7 +65,8 @@ let curHash = "";
65
65
  function onLiveHashChange(liveHash: string, refreshThresholdTime: number) {
66
66
  if (liveHash === curHash) return;
67
67
  let prevHash = curHash;
68
- let notifyIntervals = [0, 0.1, 0.5, 1];
68
+ // Don't notify the user right away. Hopefully they refresh naturally, and we never have to notify them at all!
69
+ let notifyIntervals = [0.4, 0.75, 1];
69
70
  console.log(blue(`Client liveHash changed ${liveHash}, prev hash: ${prevHash}`));
70
71
  // If we are replacing an already existing notification, don't show immediately
71
72
  let skipFirst = false;
@@ -45,7 +45,8 @@ export class ServicesListPage extends qreact.Component {
45
45
  repoUrl: gitInfo.repoUrl,
46
46
  gitRef: gitInfo.latestRef,
47
47
  command: "ping 1.1.1.1",
48
- deploy: false
48
+ deploy: false,
49
+ rollingWindow: 0,
49
50
  },
50
51
  info: {
51
52
  title: `New Service created at ${formatNiceDateTime(Date.now())}`,
@@ -107,8 +108,9 @@ export class ServicesListPage extends qreact.Component {
107
108
  Deploy: {config.parameters.deploy ? "enabled" : "disabled"}
108
109
  </div>
109
110
  </div>
110
- <div>
111
- Updated {formatDateJSX(config.info.lastUpdatedTime)}
111
+ <div className={css.vbox(4)}>
112
+ <div>Updated {formatDateJSX(config.info.lastUpdatedTime)} AGO</div>
113
+ {config.parameters.rollingWindow && <div>Rolling window: {formatTime(config.parameters.rollingWindow)}</div>}
112
114
  </div>
113
115
  </div>
114
116
  </Anchor>
@@ -21,7 +21,7 @@ import { lazy } from "socket-function/src/caching";
21
21
  import { getGitRefLive, getGitURLLive, setGitRef } from "../4-deploy/git";
22
22
  import { blue, green, magenta, red } from "socket-function/src/formatting/logColors";
23
23
  import { shutdown } from "../diagnostics/periodic";
24
- import { onServiceConfigChange } from "./machineController";
24
+ import { onServiceConfigChange, triggerRollingUpdate } from "./machineController";
25
25
  import { PromiseObj } from "../promise";
26
26
  import path from "path";
27
27
 
@@ -314,13 +314,56 @@ async function removeOldNodeId(screenName: string) {
314
314
  await forceRemoveNode(nodeId);
315
315
  }
316
316
  }
317
+
318
+
319
+ // originalName => info
320
+ let rollingKeepScreenAlive = new Map<string, {
321
+ rollingScreenName: string;
322
+ originalScreenName: string;
323
+ pinnedDuration: number;
324
+ pinnedTime: number;
325
+ }>();
326
+ // rollingScreenName => originalScreenName
327
+ let rollingScreens = new Map<string, string>();
328
+
317
329
  const runScreenCommand = measureWrap(async function runScreenCommand(config: {
318
330
  screenName: string;
319
331
  command: string;
332
+ rollingWindow: number | undefined;
320
333
  }) {
321
- let screenName = config.screenName;
322
- // C:/cygwin64/bin/tmux has-session -t myproj 2>/dev/null || C:/cygwin64/bin/tmux new-session -d -s myproj
323
334
  let prefix = getTmuxPrefix();
335
+ let screenName = config.screenName;
336
+
337
+ if (config.rollingWindow) {
338
+ let screens = await getScreenState();
339
+ let rollingObj = rollingKeepScreenAlive.get(screenName);
340
+ if (rollingObj) {
341
+ rollingObj.pinnedDuration = config.rollingWindow;
342
+ }
343
+ let existingScreen = screens.find(x => x.screenName === screenName);
344
+ if (existingScreen && !rollingObj) {
345
+ let nodeIdPath = os.homedir() + "/" + SERVICE_FOLDER + screenName + "/" + SERVICE_NODE_FILE_NAME;
346
+ let rollingFinalTime = Date.now() + config.rollingWindow;
347
+ if (fs.existsSync(nodeIdPath)) {
348
+ let nodeId = await fs.promises.readFile(nodeIdPath, "utf8");
349
+ await triggerRollingUpdate({
350
+ nodeId,
351
+ time: rollingFinalTime,
352
+ });
353
+ }
354
+ console.log(green(`Renaming screen ${screenName} to for rolling interval ${config.rollingWindow} at ${new Date().toLocaleString()}`));
355
+ let rollingScreenName = screenName + "-rolling";
356
+ await runPromise(`${prefix}tmux rename-window -t ${screenName} ${rollingScreenName}`);
357
+ rollingScreens.set(rollingScreenName, screenName);
358
+ rollingKeepScreenAlive.set(rollingScreenName, {
359
+ rollingScreenName,
360
+ originalScreenName: screenName,
361
+ pinnedDuration: config.rollingWindow,
362
+ pinnedTime: Date.now(),
363
+ });
364
+ }
365
+ }
366
+
324
367
  try {
325
368
  // Throw if it already exists
326
369
  await runPromise(`${prefix}tmux new -s ${screenName} -d`);
@@ -352,6 +395,7 @@ const runScreenCommand = measureWrap(async function runScreenCommand(config: {
352
395
  await runScreenCommand({
353
396
  screenName,
354
397
  command: config.command,
398
+ rollingWindow: config.rollingWindow,
355
399
  });
356
400
  return;
357
401
  }
@@ -520,7 +564,7 @@ const resyncServicesBase = runInSerial(measureWrap(async function resyncServices
520
564
  continue;
521
565
  }
522
566
 
523
- console.log(`Resyncing service ${magenta(screenName)}`);
567
+ console.log(`Resyncing service ${magenta(screenName)}, with ${JSON.stringify(config.parameters)}`);
524
568
 
525
569
  await fs.promises.writeFile(parameterPath, newParametersString);
526
570
 
@@ -533,6 +577,7 @@ const resyncServicesBase = runInSerial(measureWrap(async function resyncServices
533
577
  await runScreenCommand({
534
578
  screenName,
535
579
  command: config.parameters.command,
580
+ rollingWindow: config.parameters.rollingWindow,
536
581
  });
537
582
  await delay(2000);
538
583
  let newScreens = await getScreenState(false);
@@ -566,6 +611,20 @@ const resyncServicesBase = runInSerial(measureWrap(async function resyncServices
566
611
 
567
612
  for (let { screenName } of screenState) {
568
613
  if (screenNamesUsed.has(screenName)) continue;
614
+ let rollingScreenRemainder = rollingScreens.get(screenName);
615
+ if (rollingScreenRemainder) {
616
+ let rollingInfo = rollingKeepScreenAlive.get(rollingScreenRemainder);
617
+ if (!rollingInfo) {
618
+ rollingScreens.delete(screenName);
619
+ }
620
+ if (rollingInfo && rollingInfo.pinnedTime + rollingInfo.pinnedDuration < Date.now()) {
621
+ console.log(green(`Killing rolling screen ${screenName} because it's expired at ${new Date().toLocaleString()}`));
622
+ rollingScreens.delete(screenName);
623
+ rollingKeepScreenAlive.delete(rollingScreenRemainder);
624
+ rollingInfo = undefined;
625
+ }
626
+ if (rollingInfo) continue;
627
+ }
569
628
  await killScreen({
570
629
  screenName,
571
630
  });
@@ -44,6 +44,12 @@ export function watchOnRollingUpdate(config: {
44
44
  }) {
45
45
  rollingUpdateWatchers.add(config.callback);
46
46
  }
47
+ export async function triggerRollingUpdate(config: {
48
+ nodeId: string;
49
+ time: number;
50
+ }) {
51
+ await OnServiceChange.nodes[config.nodeId].onRollingUpdate(config.time);
52
+ }
47
53
  class OnServiceChangeBase {
48
54
  public async onServiceConfigChange() {
49
55
  await triggerServiceConfigChangeCallbacks();
@@ -78,9 +78,8 @@ export type ServiceConfig = {
78
78
  /** Not set by default, so we can setup the configuration before deploying it (or so we can undeploy easily without deleting it) */
79
79
  deploy?: boolean;
80
80
 
81
-
82
- /** TODO: */
83
- //rollingWindow?: number
81
+ /** Keep the instances of the service alive for this long after we start a new one. Only keeps the oldest alive, so multiple updates won't break things (or update the oldest one, so if the oldest one is bad, set this to 0 to make sure it dies immediately). */
82
+ rollingWindow?: number;
84
83
  };
85
84
  info: {
86
85
  title: string;
@@ -1,15 +1,3 @@
1
- OH! the bootstrap root index file has CORS issues
2
- - We didn't notice this before because the cached value always worked?
3
-
4
- The actual http registration isn't working? And our edgeNode file is so messy it's hard to see if it's there or not... ugh...
5
-
6
-
7
- 8) Use a special service for the HTTP bootstrapper, and then have 2 others that are on other ports
8
- --bootstraponly is added, and should work?
9
-
10
- 9) Verify the bootstrap server can't be picked as the endpoint server
11
-
12
-
13
1
  9) Rolling service updates
14
2
  - Add rollingWindow to the definition
15
3
  - Set for public facing services (but not for scripts)
@@ -28,6 +16,10 @@ The actual http registration isn't working? And our edgeNode file is so messy it
28
16
  - In HTTP server, notify users, in the same way we notify for hash updates, that they will need to switch servers
29
17
  - Verify this update works with a relatively low rolling update window, ensuring it force refreshes before the server actually restarts.
30
18
 
19
+ 10) Verify we can using a rolling update with CYOA, and that it'll notify users, then refresh (eventually)
20
+ - I guess set the rollingWindow to 10 minutes for testing.
21
+
22
+
31
23
  10) Add RAM total, ram % used, cpu count, CPU %, disk size, disk % used to machine info
32
24
  - Show this all in the list page, with nice bars?
33
25
  - Filled, but the total size will also depend on the maximum (to a degree), so it's very nice.