@openfn/ws-worker 1.18.0 → 1.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,27 @@
1
1
  # ws-worker
2
2
 
3
+ ## 1.19.0
4
+
5
+ ### Minor Changes
6
+
7
+ - 475f8cf: Add options to profile memory usage in workflows. This should give a more accurate reading of the memory used by a run, but may have a small impact on performance
8
+
9
+ ### Patch Changes
10
+
11
+ - 34edb8b: Improved logging of available memory on claim
12
+ - Updated dependencies [3cd3cd5]
13
+ - Updated dependencies [475f8cf]
14
+ - @openfn/runtime@1.7.4
15
+ - @openfn/engine-multi@1.8.0
16
+
17
+ ## 1.18.1
18
+
19
+ ### Patch Changes
20
+
21
+ - b61bf9b: Fix an issue where memory may not be released after runs
22
+ - Updated dependencies [b61bf9b]
23
+ - @openfn/engine-multi@1.7.1
24
+
3
25
  ## 1.18.0
4
26
 
5
27
  ### Minor Changes
package/dist/index.js CHANGED
@@ -120,6 +120,7 @@ var tryWithBackoff = (fn, opts = {}) => {
120
120
  var try_with_backoff_default = tryWithBackoff;
121
121
 
122
122
  // src/api/claim.ts
123
+ import v8 from "node:v8";
123
124
  import * as Sentry from "@sentry/node";
124
125
  import crypto from "node:crypto";
125
126
  import * as jose from "jose";
@@ -177,7 +178,13 @@ var claim = (app, logger = mockLogger, options = {}) => {
177
178
  }
178
179
  const claimId = ++claimIdGen;
179
180
  app.openClaims[claimId] = demand;
180
- logger.debug(`requesting run (capacity ${activeWorkers}/${maxWorkers})`);
181
+ const { used_heap_size, heap_size_limit } = v8.getHeapStatistics();
182
+ const usedHeapMb = Math.round(used_heap_size / 1024 / 1024);
183
+ const totalHeapMb = Math.round(heap_size_limit / 1024 / 1024);
184
+ const memPercent = Math.round(usedHeapMb / totalHeapMb * 100);
185
+ logger.debug(
186
+ `Claiming runs :: demand ${demand} | capacity ${activeWorkers}/${maxWorkers} | memory ${memPercent}% (${usedHeapMb}/${totalHeapMb}mb)`
187
+ );
181
188
  app.events.emit(INTERNAL_CLAIM_START);
182
189
  const start = Date.now();
183
190
  app.queueChannel.push(CLAIM, {
@@ -889,8 +896,11 @@ function execute(channel, engine, logger, plan, input, options = {}, onFinish =
889
896
  }
890
897
  const lightningEvent = eventMap[eventName] ?? eventName;
891
898
  try {
899
+ let start = Date.now();
892
900
  await handler(context, event);
893
- logger.info(`${plan.id} :: ${lightningEvent} :: OK`);
901
+ logger.info(
902
+ `${plan.id} :: sent ${lightningEvent} :: OK :: ${Date.now() - start}ms`
903
+ );
894
904
  } catch (e) {
895
905
  if (!e.reportedToSentry) {
896
906
  Sentry3.captureException(e);
package/dist/start.js CHANGED
@@ -269,6 +269,7 @@ var tryWithBackoff = (fn, opts = {}) => {
269
269
  var try_with_backoff_default = tryWithBackoff;
270
270
 
271
271
  // src/api/claim.ts
272
+ import v8 from "node:v8";
272
273
  import * as Sentry from "@sentry/node";
273
274
  import crypto2 from "node:crypto";
274
275
  import * as jose from "jose";
@@ -326,7 +327,13 @@ var claim = (app, logger2 = mockLogger, options = {}) => {
326
327
  }
327
328
  const claimId = ++claimIdGen;
328
329
  app.openClaims[claimId] = demand;
329
- logger2.debug(`requesting run (capacity ${activeWorkers}/${maxWorkers})`);
330
+ const { used_heap_size, heap_size_limit } = v8.getHeapStatistics();
331
+ const usedHeapMb = Math.round(used_heap_size / 1024 / 1024);
332
+ const totalHeapMb = Math.round(heap_size_limit / 1024 / 1024);
333
+ const memPercent = Math.round(usedHeapMb / totalHeapMb * 100);
334
+ logger2.debug(
335
+ `Claiming runs :: demand ${demand} | capacity ${activeWorkers}/${maxWorkers} | memory ${memPercent}% (${usedHeapMb}/${totalHeapMb}mb)`
336
+ );
330
337
  app.events.emit(INTERNAL_CLAIM_START);
331
338
  const start = Date.now();
332
339
  app.queueChannel.push(CLAIM, {
@@ -1038,8 +1045,11 @@ function execute(channel, engine, logger2, plan, input, options = {}, onFinish =
1038
1045
  }
1039
1046
  const lightningEvent = eventMap[eventName] ?? eventName;
1040
1047
  try {
1048
+ let start = Date.now();
1041
1049
  await handler(context, event);
1042
- logger2.info(`${plan.id} :: ${lightningEvent} :: OK`);
1050
+ logger2.info(
1051
+ `${plan.id} :: sent ${lightningEvent} :: OK :: ${Date.now() - start}ms`
1052
+ );
1043
1053
  } catch (e) {
1044
1054
  if (!e.reportedToSentry) {
1045
1055
  Sentry3.captureException(e);
@@ -6418,6 +6428,9 @@ function setArg(argValue, envValue, defaultValue) {
6418
6428
  if (typeof defaultValue === "number" && envValue && !argValue) {
6419
6429
  return parseInt(envValue);
6420
6430
  }
6431
+ if (typeof defaultValue === "boolean" && envValue && argValue === void 0) {
6432
+ return envValue === "true" || envValue === "1";
6433
+ }
6421
6434
  return argValue ?? envValue ?? defaultValue;
6422
6435
  }
6423
6436
  function parseArgs(argv) {
@@ -6436,6 +6449,8 @@ function parseArgs(argv) {
6436
6449
  WORKER_MAX_RUN_MEMORY_MB,
6437
6450
  WORKER_MESSAGE_TIMEOUT_SECONDS,
6438
6451
  WORKER_PORT,
6452
+ WORKER_PROFILE,
6453
+ WORKER_PROFILE_POLL_INTERVAL_MS,
6439
6454
  WORKER_REPO_DIR,
6440
6455
  WORKER_SECRET,
6441
6456
  WORKER_SENTRY_DSN,
@@ -6518,6 +6533,12 @@ function parseArgs(argv) {
6518
6533
  }).option("engine-validation-retries", {
6519
6534
  description: "The number of times to retry engine validation. Useful in hosted environments. Default 3. ENV: WORKER_VALIDATION_RETRIES",
6520
6535
  type: "number"
6536
+ }).option("profile", {
6537
+ description: "Enable profiling for runs. Default false. Env: WORKER_PROFILE",
6538
+ type: "boolean"
6539
+ }).option("profile-poll-interval-ms", {
6540
+ description: "Interval for polling profile data, in milliseconds. Default 10. Env: WORKER_PROFILE_POLL_INTERVAL_MS",
6541
+ type: "number"
6521
6542
  });
6522
6543
  const args2 = parser2.parse();
6523
6544
  return {
@@ -6580,6 +6601,12 @@ function parseArgs(argv) {
6580
6601
  args2.engineValidationTimeoutMs,
6581
6602
  WORKER_VALIDATION_TIMEOUT_MS,
6582
6603
  5e3
6604
+ ),
6605
+ profile: setArg(args2.profile, WORKER_PROFILE, false),
6606
+ profilePollIntervalMs: setArg(
6607
+ args2.profilePollIntervalMs,
6608
+ WORKER_PROFILE_POLL_INTERVAL_MS,
6609
+ 10
6583
6610
  )
6584
6611
  };
6585
6612
  }
@@ -6659,7 +6686,9 @@ if (args.mock) {
6659
6686
  statePropsToRemove: args.statePropsToRemove,
6660
6687
  runTimeoutMs: args.maxRunDurationSeconds * 1e3,
6661
6688
  workerValidationTimeout: args.engineValidationTimeoutMs,
6662
- workerValidationRetries: args.engineValidationRetries
6689
+ workerValidationRetries: args.engineValidationRetries,
6690
+ profile: args.profile,
6691
+ profilePollInterval: args.profilePollIntervalMs
6663
6692
  };
6664
6693
  logger.debug("Creating runtime engine...");
6665
6694
  logger.debug("Engine options:", engineOptions);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openfn/ws-worker",
3
- "version": "1.18.0",
3
+ "version": "1.19.0",
4
4
  "description": "A Websocket Worker to connect Lightning to a Runtime Engine",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",
@@ -23,10 +23,10 @@
23
23
  "koa-logger": "^3.2.1",
24
24
  "phoenix": "1.7.10",
25
25
  "ws": "^8.18.3",
26
- "@openfn/engine-multi": "1.7.0",
27
- "@openfn/runtime": "1.7.3",
28
26
  "@openfn/lexicon": "^1.2.5",
29
- "@openfn/logger": "1.0.6"
27
+ "@openfn/runtime": "1.7.4",
28
+ "@openfn/logger": "1.0.6",
29
+ "@openfn/engine-multi": "1.8.0"
30
30
  },
31
31
  "devDependencies": {
32
32
  "@types/koa": "^2.15.0",
@@ -43,7 +43,7 @@
43
43
  "tsup": "^6.7.0",
44
44
  "typescript": "^4.9.5",
45
45
  "yargs": "^17.7.2",
46
- "@openfn/lightning-mock": "2.3.1"
46
+ "@openfn/lightning-mock": "2.3.3"
47
47
  },
48
48
  "files": [
49
49
  "dist",