swarpc 0.18.0 → 0.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -210,6 +210,39 @@ for (const result of await client.initDB.broadcast("localhost:5432")) {
210
210
  }
211
211
  ```
212
212
 
213
+ You also have a very convenient way to aggregate the results of all nodes, if you don't need to handle errors in a fine-grained way:
214
+
215
+ ```ts
216
+ const userbase = await client.tableSize.broadcast
217
+ .orThrow("users")
218
+ .then((counts) => sum(counts))
219
+ .catch((e) => {
220
+ // e is an AggregateError with every failing node's error
221
+ console.error("Could not get total user count:", e);
222
+ });
223
+ ```
224
+
225
+ Otherwise, you have access to a handful of convenience properties on the returned array, to help you narrow down what happened on each node:
226
+
227
+ ```ts
228
+ async function userbase() {
229
+ const counts = await client.tableSize.broadcast("users");
230
+
231
+ if (counts.ko) {
232
+ throw new Error(
233
+ `All nodes failed to get table size: ${counts.failureSummary}`,
234
+ );
235
+ }
236
+
237
+ return {
238
+ exact: counts.ok,
239
+ count:
240
+ sum(counts.successes) +
241
+ average(counts.successes) * counts.failures.length,
242
+ };
243
+ }
244
+ ```
245
+
213
246
  ### Make cancelable requests
214
247
 
215
248
  #### Implementation
@@ -217,21 +250,16 @@ for (const result of await client.initDB.broadcast("localhost:5432")) {
217
250
  To make your procedures meaningfully cancelable, you have to make use of the [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) API. This is passed as a third argument when implementing your procedures:
218
251
 
219
252
  ```js
220
- server.searchIMDb(async ({ query }, onProgress, abort) => {
253
+ server.searchIMDb(async ({ query }, onProgress, { abortSignal }) => {
221
254
  // If you're doing heavy computation without fetch:
222
- let aborted = false
223
- abort?.addEventListener("abort", () => {
224
- aborted = true
225
- })
226
-
227
- // Use `aborted` to check if the request was canceled within your hot loop
255
+ // Use `abortSignal?.throwIfAborted()` within hot loops and at key points
228
256
  for (...) {
229
- /* here */ if (aborted) return
257
+ abortSignal?.throwIfAborted();
230
258
  ...
231
259
  }
232
260
 
233
261
  // When using fetch:
234
- await fetch(..., { signal: abort })
262
+ await fetch(..., { signal: abortSignal })
235
263
  })
236
264
  ```
237
265
 
@@ -298,6 +326,16 @@ const result = await swarpc.onceBy("global-search").searchIMDb({ query });
298
326
 
299
327
  This is useful when you want to ensure only one operation of a certain type is running at a time, regardless of which procedure is being called.
300
328
 
329
+ #### With broadcasting
330
+
331
+ You can combine "once" mode with broadcasting as well, just use `.broadcast.once` or `.broadcast.onceBy` instead of `.once` or `.onceBy`:
332
+
333
+ ```js
334
+ // Load the inference model on all nodes. If we call this again before the previous model finishes loading,
335
+ // the previous load requests get cancelled.
336
+ await swarpc.loadInferenceModel.broadcast.once({ url });
337
+ ```
338
+
301
339
  ### Polyfill a `localStorage` for the Server to access
302
340
 
303
341
  You might call third-party code that accesses on `localStorage` from within your procedures.
package/dist/client.d.ts CHANGED
@@ -18,9 +18,20 @@ export type SwarpcClient<Procedures extends ProceduresMap> = {
18
18
  onceBy: (key: string) => {
19
19
  [F in keyof Procedures]: ClientMethodCallable<Procedures[F]>;
20
20
  };
21
+ /**
22
+ * Disconnects all event listeners created by the client, and:
23
+ * - for Shared Workers: closes the port started by the client
24
+ * - for Dedicated Workers: terminates the worker instance
25
+ * - for Service Workers: does nothing (there is no connection to close)
26
+ */
27
+ destroy(): void;
21
28
  } & {
22
29
  [F in keyof Procedures]: ClientMethod<Procedures[F]>;
23
30
  };
31
+ /**
32
+ * Names that can't be used as procedure names. Will fail at runtime, when starting the client.
33
+ */
34
+ export declare const RESERVED_PROCEDURE_NAMES: readonly ["onceBy", "destroy"];
24
35
  /**
25
36
  *
26
37
  * @param procedures procedures the client will be able to call, see {@link ProceduresMap}
@@ -34,16 +45,18 @@ export type SwarpcClient<Procedures extends ProceduresMap> = {
34
45
  * @param options.restartListener If true, will force the listener to restart even if it has already been started. You should probably leave this to false, unless you are testing and want to reset the client state.
35
46
  * @param options.localStorage Define a in-memory localStorage with the given key-value pairs. Allows code called on the server to access localStorage (even though SharedWorkers don't have access to the browser's real localStorage)
36
47
  * @param options.nodes the number of workers to use for the server, defaults to {@link navigator.hardwareConcurrency}.
48
+ * @param options.nodeIds node IDs to use. If not provided, random IDs will be generated for each node.
37
49
  * @returns a sw&rpc client instance. Each property of the procedures map will be a method, that accepts an input and an optional onProgress callback, see {@link ClientMethod}
38
50
  *
39
51
  * An example of defining and using a client:
40
52
  * {@includeCode ../example/src/routes/+page.svelte}
41
53
  */
42
- export declare function Client<Procedures extends ProceduresMap>(procedures: Procedures, { worker, nodes: nodeCount, loglevel, restartListener, hooks, localStorage, }?: {
54
+ export declare function Client<Procedures extends ProceduresMap>(procedures: Procedures, { worker, nodes: nodeCount, loglevel, restartListener, hooks, localStorage, nodeIds, }?: {
43
55
  worker?: WorkerConstructor | string;
44
56
  nodes?: number;
45
57
  hooks?: Hooks<Procedures>;
46
58
  loglevel?: LogLevel;
47
59
  restartListener?: boolean;
48
60
  localStorage?: Record<string, any>;
61
+ nodeIds?: string[];
49
62
  }): SwarpcClient<Procedures>;
package/dist/client.js CHANGED
@@ -1,24 +1,21 @@
1
1
  import { createLogger, } from "./log.js";
2
- import { makeNodeId, nodeIdOrSW, whoToSendTo } from "./nodes.js";
3
- import { zProcedures, } from "./types.js";
4
- import { findTransferables } from "./utils.js";
2
+ import { broadcastNodes, makeNodeId, nodeIdOrSW, whoToSendTo, } from "./nodes.js";
3
+ import { RequestCancelledError, zProcedures, } from "./types.js";
4
+ import { findTransferables, extractFulfilleds, extractRejecteds, sizedArray, } from "./utils.js";
5
+ export const RESERVED_PROCEDURE_NAMES = ["onceBy", "destroy"];
5
6
  const pendingRequests = new Map();
6
- const onceByMethod = new Map();
7
- const onceByMethodAndKey = new Map();
8
- const onceByGlobalKey = new Map();
9
7
  const emptyProgressCallback = () => { };
10
- let _clientListenerStarted = new Set();
11
- export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug", restartListener = false, hooks = {}, localStorage = {}, } = {}) {
8
+ let _clientListeners = new Map();
9
+ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug", restartListener = false, hooks = {}, localStorage = {}, nodeIds = [], } = {}) {
12
10
  const l = createLogger("client", loglevel);
13
11
  if (restartListener)
14
- _clientListenerStarted.clear();
15
- const instance = { [zProcedures]: procedures };
12
+ _clientListeners.clear();
16
13
  nodeCount ??= navigator.hardwareConcurrency || 1;
17
14
  let nodes;
18
15
  if (worker) {
19
16
  nodes = {};
20
- for (const _ of Array.from({ length: nodeCount })) {
21
- const id = makeNodeId();
17
+ for (const [i] of Array.from({ length: nodeCount }).entries()) {
18
+ const id = nodeIds[i] ?? makeNodeId();
22
19
  if (typeof worker === "string") {
23
20
  nodes[id] = new Worker(worker, { name: id });
24
21
  }
@@ -28,14 +25,55 @@ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug
28
25
  }
29
26
  l.info(null, `Started ${nodeCount} node${nodeCount > 1 ? "s" : ""}`, Object.keys(nodes));
30
27
  }
31
- const cancelRequest = (requestId, reason, functionName) => {
28
+ const instance = {
29
+ [zProcedures]: procedures,
30
+ destroy() {
31
+ for (const [nodeId, listener] of _clientListeners.entries()) {
32
+ l.debug(null, `Destroying listener for node ${nodeId}`);
33
+ listener.disconnect();
34
+ _clientListeners.delete(nodeId);
35
+ }
36
+ for (const [nodeId, node] of Object.entries(nodes ?? {})) {
37
+ l.debug(null, `Terminating worker for node ${nodeId}`);
38
+ if (node instanceof SharedWorker) {
39
+ node.port.close();
40
+ }
41
+ else {
42
+ node.terminate();
43
+ }
44
+ }
45
+ },
46
+ };
47
+ function cancelRequests(reason, criterias) {
48
+ const { nodeIds, functionName, concurrencyKey } = criterias;
49
+ if (!nodeIds && !functionName && !concurrencyKey) {
50
+ throw new Error("At least one criteria must be provided to cancel requests");
51
+ }
52
+ if (nodeIds?.length === 0) {
53
+ console.warn("[SWARPC Client] cancelRequests called with empty nodeIds array, no requests will be cancelled");
54
+ return;
55
+ }
56
+ const trackingKey = concurrencyKey
57
+ ? functionName
58
+ ? `${functionName}:${concurrencyKey}`
59
+ : concurrencyKey
60
+ : undefined;
61
+ const criteria = (param, fn) => param ? fn(param) : true;
62
+ const toCancel = [...pendingRequests.entries()].filter(([_, p]) => criteria(nodeIds, (ns) => !p.nodeId || ns.includes(p.nodeId)) &&
63
+ criteria(functionName, (fn) => p.functionName === fn) &&
64
+ criteria(trackingKey, (key) => p.concurrencyKey === key));
65
+ for (const [requestId, { functionName }] of toCancel) {
66
+ cancelRequest(requestId, reason, functionName);
67
+ }
68
+ }
69
+ function cancelRequest(requestId, reason, functionName) {
32
70
  const pending = pendingRequests.get(requestId);
33
71
  if (!pending)
34
72
  return;
35
73
  const nodeId = pending.nodeId;
36
74
  const l = createLogger("client", loglevel, nodeIdOrSW(nodeId), requestId);
37
75
  l.debug(requestId, `Cancelling ${functionName} with`, reason);
38
- pending.reject(new Error(reason));
76
+ pending.reject(new RequestCancelledError(reason));
39
77
  postMessageSync(l, nodeId ? nodes?.[nodeId] : undefined, {
40
78
  by: "sw&rpc",
41
79
  requestId,
@@ -43,17 +81,21 @@ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug
43
81
  abort: { reason },
44
82
  });
45
83
  pendingRequests.delete(requestId);
46
- };
84
+ }
47
85
  const runProcedureFunctions = new Map();
48
86
  for (const functionName of Object.keys(procedures)) {
49
87
  if (typeof functionName !== "string") {
50
88
  throw new Error(`[SWARPC Client] Invalid function name, don't use symbols`);
51
89
  }
90
+ if (RESERVED_PROCEDURE_NAMES.includes(functionName)) {
91
+ throw new Error(`[SWARPC Client] Invalid function name: "${functionName}" is a reserved word and can't be used as a procedure name. Reserved names: ${RESERVED_PROCEDURE_NAMES}`);
92
+ }
52
93
  const send = async (node, nodeId, requestId, msg, options) => {
53
94
  const ctx = {
54
95
  logger: l,
55
96
  node,
56
97
  nodeId,
98
+ allNodeIDs: new Set(nodes ? Object.keys(nodes) : []),
57
99
  hooks,
58
100
  localStorage,
59
101
  };
@@ -64,13 +106,13 @@ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug
64
106
  functionName,
65
107
  }, options);
66
108
  };
67
- const _runProcedure = async (input, onProgress = emptyProgressCallback, reqid, nodeId) => {
109
+ const _runProcedure = async ({ input, onProgress, requestId: explicitRequestId, nodeId, concurrencyKey, }) => {
68
110
  const validation = procedures[functionName].input["~standard"].validate(input);
69
111
  if (validation instanceof Promise)
70
112
  throw new Error("Validations must not be async");
71
113
  if (validation.issues)
72
114
  throw new Error(`Invalid input: ${validation.issues}`);
73
- const requestId = reqid ?? makeRequestId();
115
+ const requestId = explicitRequestId ?? makeRequestId();
74
116
  nodeId ??= whoToSendTo(nodes, pendingRequests);
75
117
  const node = nodes && nodeId ? nodes[nodeId] : undefined;
76
118
  const l = createLogger("client", loglevel, nodeIdOrSW(nodeId), requestId);
@@ -78,8 +120,10 @@ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug
78
120
  pendingRequests.set(requestId, {
79
121
  nodeId,
80
122
  functionName,
123
+ startedAt: performance.now(),
124
+ concurrencyKey,
81
125
  resolve,
82
- onProgress,
126
+ onProgress: onProgress ?? emptyProgressCallback,
83
127
  reject,
84
128
  });
85
129
  const transfer = procedures[functionName].autotransfer === "always"
@@ -91,14 +135,8 @@ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug
91
135
  .catch(reject);
92
136
  });
93
137
  };
94
- runProcedureFunctions.set(functionName, _runProcedure);
95
- instance[functionName] = _runProcedure;
96
- instance[functionName].broadcast = async (input, onProgresses, nodesCount) => {
97
- let nodesToUse = [undefined];
98
- if (nodes)
99
- nodesToUse = Object.keys(nodes);
100
- if (nodesCount)
101
- nodesToUse = nodesToUse.slice(0, nodesCount);
138
+ const _broadcastProcedure = async ({ input, onProgresses, nodesCountOrIDs, concurrencyKey }) => {
139
+ const nodesToUse = broadcastNodes(nodes ? Object.keys(nodes) : undefined, nodesCountOrIDs);
102
140
  const progresses = new Map();
103
141
  function onProgress(nodeId) {
104
142
  if (!onProgresses)
@@ -108,65 +146,93 @@ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug
108
146
  onProgresses(progresses);
109
147
  };
110
148
  }
111
- const results = await Promise.allSettled(nodesToUse.map(async (id) => _runProcedure(input, onProgress(id), undefined, id)));
112
- return results.map((r, i) => ({ ...r, node: nodeIdOrSW(nodesToUse[i]) }));
149
+ const settleds = await Promise.allSettled(nodesToUse.map(async (id) => _runProcedure({
150
+ input,
151
+ onProgress: onProgress(id),
152
+ nodeId: id,
153
+ concurrencyKey,
154
+ }))).then((results) => results.map((result, index) => ({
155
+ ...result,
156
+ node: nodeIdOrSW(nodesToUse[index]),
157
+ })));
158
+ const _extras = {
159
+ byNode: new Map(settleds.map(({ node, ...result }) => [node, result])),
160
+ successes: sizedArray(extractFulfilleds(settleds).map((r) => r.value)),
161
+ failures: sizedArray(extractRejecteds(settleds)),
162
+ get failureSummary() {
163
+ return this.failures
164
+ ?.map(({ node, reason }) => `Node ${node}: ${reason}`)
165
+ .join(";\n");
166
+ },
167
+ get ok() {
168
+ return this.failures.length === 0;
169
+ },
170
+ get ko() {
171
+ return this.successes.length === 0;
172
+ },
173
+ get status() {
174
+ if (this.ok)
175
+ return "fulfilled";
176
+ if (this.ko)
177
+ return "rejected";
178
+ return "mixed";
179
+ },
180
+ };
181
+ const extras = _extras;
182
+ return Object.assign(settleds, extras);
183
+ };
184
+ runProcedureFunctions.set(functionName, _runProcedure);
185
+ instance[functionName] = (input, onProgress) => _runProcedure({ input, onProgress });
186
+ instance[functionName].broadcast = (input, onProgresses, nodes) => _broadcastProcedure({ input, onProgresses, nodesCountOrIDs: nodes });
187
+ instance[functionName].broadcast.orThrow = async (...args) => handleBroadcastOrThrowResults(await instance[functionName].broadcast(...args));
188
+ instance[functionName].broadcast.once = async (input, onProgresses, nodesCountOrIDs) => {
189
+ const nodesToUse = broadcastNodes(nodes ? Object.keys(nodes) : undefined, nodesCountOrIDs);
190
+ cancelRequests("Cancelled by .broadcast.once() call", {
191
+ functionName,
192
+ nodeIds: nodesToUse.filter((x) => x !== undefined),
193
+ });
194
+ return _broadcastProcedure({
195
+ input,
196
+ onProgresses,
197
+ nodesCountOrIDs: nodesToUse,
198
+ });
199
+ };
200
+ instance[functionName].broadcast.once.orThrow = async (...args) => handleBroadcastOrThrowResults(await instance[functionName].broadcast.once(...args));
201
+ instance[functionName].broadcast.onceBy = async (concurrencyKey, input, onProgresses, nodesCountOrIDs) => {
202
+ const nodesToUse = broadcastNodes(nodes ? Object.keys(nodes) : undefined, nodesCountOrIDs);
203
+ cancelRequests("Cancelled by .broadcast.once() call", {
204
+ concurrencyKey,
205
+ functionName,
206
+ nodeIds: nodesToUse.filter((x) => x !== undefined),
207
+ });
208
+ return _broadcastProcedure({
209
+ input,
210
+ onProgresses,
211
+ nodesCountOrIDs: nodesToUse,
212
+ concurrencyKey,
213
+ });
113
214
  };
215
+ instance[functionName].broadcast.onceBy.orThrow = async (...args) => handleBroadcastOrThrowResults(await instance[functionName].broadcast.onceBy(...args));
114
216
  instance[functionName].cancelable = (input, onProgress) => {
115
217
  const requestId = makeRequestId();
116
218
  const nodeId = whoToSendTo(nodes, pendingRequests);
117
- const l = createLogger("client", loglevel, nodeIdOrSW(nodeId), requestId);
118
219
  return {
119
- request: _runProcedure(input, onProgress, requestId, nodeId),
220
+ request: _runProcedure({ input, onProgress, requestId, nodeId }),
120
221
  cancel(reason) {
121
- if (!pendingRequests.has(requestId)) {
122
- l.warn(requestId, `Cannot cancel ${functionName} request, it has already been resolved or rejected`);
123
- return;
124
- }
125
- l.debug(requestId, `Cancelling ${functionName} with`, reason);
126
- postMessageSync(l, nodeId ? nodes?.[nodeId] : undefined, {
127
- by: "sw&rpc",
128
- requestId,
129
- functionName,
130
- abort: { reason },
131
- });
132
- pendingRequests.delete(requestId);
222
+ cancelRequest(requestId, reason, functionName);
133
223
  },
134
224
  };
135
225
  };
136
226
  instance[functionName].once = async (input, onProgress) => {
137
- const previousRequestId = onceByMethod.get(functionName);
138
- if (previousRequestId) {
139
- cancelRequest(previousRequestId, "Cancelled by .once() call", functionName);
140
- onceByMethod.delete(functionName);
141
- }
142
- const requestId = makeRequestId();
143
- onceByMethod.set(functionName, requestId);
144
- try {
145
- return await _runProcedure(input, onProgress, requestId);
146
- }
147
- finally {
148
- if (onceByMethod.get(functionName) === requestId) {
149
- onceByMethod.delete(functionName);
150
- }
151
- }
227
+ cancelRequests("Cancelled by .once() call", { functionName });
228
+ return await _runProcedure({ input, onProgress });
152
229
  };
153
- instance[functionName].onceBy = async (key, input, onProgress) => {
154
- const trackingKey = `${functionName}:${key}`;
155
- const previousRequestId = onceByMethodAndKey.get(trackingKey);
156
- if (previousRequestId) {
157
- cancelRequest(previousRequestId, `Cancelled by .onceBy("${key}") call`, functionName);
158
- onceByMethodAndKey.delete(trackingKey);
159
- }
160
- const requestId = makeRequestId();
161
- onceByMethodAndKey.set(trackingKey, requestId);
162
- try {
163
- return await _runProcedure(input, onProgress, requestId);
164
- }
165
- finally {
166
- if (onceByMethodAndKey.get(trackingKey) === requestId) {
167
- onceByMethodAndKey.delete(trackingKey);
168
- }
169
- }
230
+ instance[functionName].onceBy = async (concurrencyKey, input, onProgress) => {
231
+ cancelRequests(`Cancelled by .onceBy("${concurrencyKey}") call`, {
232
+ functionName,
233
+ concurrencyKey,
234
+ });
235
+ return await _runProcedure({ input, onProgress, concurrencyKey });
170
236
  };
171
237
  }
172
238
  instance.onceBy = (globalKey) => {
@@ -175,28 +241,20 @@ export function Client(procedures, { worker, nodes: nodeCount, loglevel = "debug
175
241
  if (typeof functionName !== "string")
176
242
  continue;
177
243
  proxy[functionName] = async (input, onProgress) => {
178
- const previousRequestId = onceByGlobalKey.get(globalKey);
179
- if (previousRequestId) {
180
- const pending = pendingRequests.get(previousRequestId);
181
- if (pending) {
182
- cancelRequest(previousRequestId, `Cancelled by global onceBy("${globalKey}") call`, pending.functionName);
183
- }
184
- onceByGlobalKey.delete(globalKey);
185
- }
244
+ cancelRequests(`Cancelled by global onceBy("${globalKey}") call`, {
245
+ concurrencyKey: globalKey,
246
+ });
186
247
  const requestId = makeRequestId();
187
- onceByGlobalKey.set(globalKey, requestId);
188
248
  const _runProcedure = runProcedureFunctions.get(functionName);
189
249
  if (!_runProcedure) {
190
250
  throw new Error(`No procedure found for ${functionName}`);
191
251
  }
192
- try {
193
- return await _runProcedure(input, onProgress ?? emptyProgressCallback, requestId);
194
- }
195
- finally {
196
- if (onceByGlobalKey.get(globalKey) === requestId) {
197
- onceByGlobalKey.delete(globalKey);
198
- }
199
- }
252
+ return await _runProcedure({
253
+ input,
254
+ onProgress,
255
+ requestId,
256
+ concurrencyKey: globalKey,
257
+ });
200
258
  };
201
259
  }
202
260
  return proxy;
@@ -232,7 +290,7 @@ function postMessageSync(l, worker, message, options) {
232
290
  w.postMessage(message, options);
233
291
  }
234
292
  async function startClientListener(ctx) {
235
- if (_clientListenerStarted.has(nodeIdOrSW(ctx.nodeId)))
293
+ if (_clientListeners.has(nodeIdOrSW(ctx.nodeId)))
236
294
  return;
237
295
  const { logger: l, node: worker } = ctx;
238
296
  if (!worker) {
@@ -263,10 +321,12 @@ async function startClientListener(ctx) {
263
321
  if (!handlers) {
264
322
  throw new Error(`[SWARPC Client] ${requestId} has no active request handlers, cannot process ${JSON.stringify(data)}`);
265
323
  }
324
+ const duration = performance.now() - handlers.startedAt;
266
325
  if ("error" in data) {
267
326
  ctx.hooks.error?.({
268
327
  procedure: data.functionName,
269
328
  error: new Error(data.error.message),
329
+ duration,
270
330
  });
271
331
  handlers.reject(new Error(data.error.message));
272
332
  pendingRequests.delete(requestId);
@@ -275,6 +335,7 @@ async function startClientListener(ctx) {
275
335
  ctx.hooks.progress?.({
276
336
  procedure: data.functionName,
277
337
  data: data.progress,
338
+ duration,
278
339
  });
279
340
  handlers.onProgress(data.progress);
280
341
  }
@@ -282,6 +343,7 @@ async function startClientListener(ctx) {
282
343
  ctx.hooks.success?.({
283
344
  procedure: data.functionName,
284
345
  data: data.result,
346
+ duration,
285
347
  });
286
348
  handlers.resolve(data.result);
287
349
  pendingRequests.delete(requestId);
@@ -294,15 +356,31 @@ async function startClientListener(ctx) {
294
356
  else {
295
357
  w.addEventListener("message", listener);
296
358
  }
297
- _clientListenerStarted.add(nodeIdOrSW(ctx.nodeId));
359
+ _clientListeners.set(nodeIdOrSW(ctx.nodeId), {
360
+ disconnect() {
361
+ if (w instanceof SharedWorker) {
362
+ w.port.removeEventListener("message", listener);
363
+ }
364
+ else {
365
+ w.removeEventListener("message", listener);
366
+ }
367
+ },
368
+ });
298
369
  await postMessage(ctx, {
299
370
  by: "sw&rpc",
300
371
  functionName: "#initialize",
301
372
  isInitializeRequest: true,
302
373
  localStorageData: ctx.localStorage,
303
374
  nodeId: nodeIdOrSW(ctx.nodeId),
375
+ allNodeIDs: ctx.allNodeIDs,
304
376
  });
305
377
  }
306
378
  function makeRequestId() {
307
379
  return Math.random().toString(16).substring(2, 8).toUpperCase();
308
380
  }
381
+ function handleBroadcastOrThrowResults(results) {
382
+ if (results.ok) {
383
+ return results.successes;
384
+ }
385
+ throw new AggregateError(results.failures.map((f) => f.reason));
386
+ }
package/dist/index.d.ts CHANGED
@@ -3,8 +3,8 @@
3
3
  * @mergeModuleWith <project>
4
4
  */
5
5
  import "./polyfills.js";
6
- export type { ProceduresMap, CancelablePromise } from "./types.js";
7
- export { Client } from "./client.js";
6
+ export { type ProceduresMap, type CancelablePromise, RequestCancelledError, } from "./types.js";
7
+ export { Client, RESERVED_PROCEDURE_NAMES } from "./client.js";
8
8
  export type { SwarpcClient } from "./client.js";
9
9
  export { Server } from "./server.js";
10
10
  export type { SwarpcServer } from "./server.js";
package/dist/index.js CHANGED
@@ -1,3 +1,4 @@
1
1
  import "./polyfills.js";
2
- export { Client } from "./client.js";
2
+ export { RequestCancelledError, } from "./types.js";
3
+ export { Client, RESERVED_PROCEDURE_NAMES } from "./client.js";
3
4
  export { Server } from "./server.js";
package/dist/nodes.js CHANGED
@@ -30,3 +30,13 @@ const serviceWorkerNodeId = "(SW)";
30
30
  export function nodeIdOrSW(id) {
31
31
  return id ?? serviceWorkerNodeId;
32
32
  }
33
+ export function broadcastNodes(nodes, target) {
34
+ if (target && Array.isArray(target))
35
+ return target;
36
+ let nodesToUse = [undefined];
37
+ if (nodes)
38
+ nodesToUse = [...nodes];
39
+ if (typeof target === "number")
40
+ nodesToUse = nodesToUse.slice(0, target);
41
+ return nodesToUse;
42
+ }
package/dist/server.js CHANGED
@@ -1,5 +1,5 @@
1
1
  import { createLogger, injectIntoConsoleGlobal } from "./log.js";
2
- import { isPayloadHeader, isPayloadInitialize, validatePayloadCore as validatePayloadCore, zImplementations, zProcedures, } from "./types.js";
2
+ import { isPayloadHeader, isPayloadInitialize, RequestCancelledError, validatePayloadCore, zImplementations, zProcedures, } from "./types.js";
3
3
  import { findTransferables } from "./utils.js";
4
4
  import { FauxLocalStorage } from "./localstorage.js";
5
5
  import { scopeIsDedicated, scopeIsShared, scopeIsService } from "./scopes.js";
@@ -9,6 +9,7 @@ const abortedRequests = new Set();
9
9
  export function Server(procedures, { loglevel = "debug", scope, _scopeType, } = {}) {
10
10
  scope ??= self;
11
11
  const nodeId = nodeIdFromScope(scope, _scopeType);
12
+ let allNodeIDs = new Set();
12
13
  const l = createLogger("server", loglevel, nodeId);
13
14
  const instance = {
14
15
  [zProcedures]: procedures,
@@ -63,6 +64,7 @@ export function Server(procedures, { loglevel = "debug", scope, _scopeType, } =
63
64
  l.debug(null, "Setting up faux localStorage", localStorageData);
64
65
  new FauxLocalStorage(localStorageData).register(scope);
65
66
  injectIntoConsoleGlobal(scope, nodeId, null);
67
+ event.data.allNodeIDs.forEach((id) => allNodeIDs.add(id));
66
68
  return;
67
69
  }
68
70
  if (!isPayloadHeader(procedures, event.data)) {
@@ -99,7 +101,7 @@ export function Server(procedures, { loglevel = "debug", scope, _scopeType, } =
99
101
  const controller = abortControllers.get(requestId);
100
102
  if (!controller)
101
103
  await postError("No abort controller found for request");
102
- controller?.abort(payload.abort.reason);
104
+ controller?.abort(new RequestCancelledError(payload.abort.reason));
103
105
  return;
104
106
  }
105
107
  abortControllers.set(requestId, new AbortController());
@@ -113,6 +115,7 @@ export function Server(procedures, { loglevel = "debug", scope, _scopeType, } =
113
115
  await postMsg({ progress });
114
116
  }, {
115
117
  nodeId,
118
+ nodes: allNodeIDs,
116
119
  abortSignal: abortControllers.get(requestId)?.signal,
117
120
  });
118
121
  l.debug(requestId, `Result for ${functionName}`, result);
package/dist/types.d.ts CHANGED
@@ -3,6 +3,7 @@
3
3
  * @mergeModuleWith <project>
4
4
  */
5
5
  import type { StandardSchemaV1 as Schema } from "./standardschema.js";
6
+ import { ArrayOneOrMore } from "./utils.js";
6
7
  /**
7
8
  * A procedure declaration
8
9
  */
@@ -73,6 +74,10 @@ tools: {
73
74
  * ID of the Node the request is being processed on.
74
75
  */
75
76
  nodeId: string;
77
+ /**
78
+ * IDs of all available Nodes.
79
+ */
80
+ nodes: Set<string>;
76
81
  }) => Promise<Schema.InferInput<S>>;
77
82
  /**
78
83
  * Declarations of procedures by name.
@@ -85,6 +90,8 @@ type ProcedureNameAndData<Procedures extends ProceduresMap, Key extends "progres
85
90
  [K in keyof Procedures]: {
86
91
  procedure: K;
87
92
  data: Schema.InferOutput<Procedures[K][Key]>;
93
+ /** Time in milliseconds the procedure call took */
94
+ duration: number;
88
95
  };
89
96
  }[keyof Procedures];
90
97
  /**
@@ -97,9 +104,12 @@ export type Hooks<Procedures extends ProceduresMap> = {
97
104
  success?: (arg: ProcedureNameAndData<Procedures, "success">) => void;
98
105
  /**
99
106
  * Called when a procedure call has failed.
107
+ * @param arg
108
+ * @param arg.duration time in milliseconds the procedure call took
100
109
  */
101
110
  error?: (arg: {
102
111
  procedure: keyof Procedures;
112
+ duration: number;
103
113
  error: Error;
104
114
  }) => void;
105
115
  /**
@@ -127,9 +137,75 @@ export type PayloadCore<PM extends ProceduresMap, Name extends keyof PM = keyof
127
137
  */
128
138
  export type ClientMethodCallable<P extends Procedure<Schema, Schema, Schema>> = (input: Schema.InferInput<P["input"]>, onProgress?: (progress: Schema.InferOutput<P["progress"]>) => void) => Promise<Schema.InferOutput<P["success"]>>;
129
139
  /**
130
- * A procedure's corresponding method on the client instance -- used to call the procedure. If you want to be able to cancel the request, you can use the `cancelable` method instead of running the procedure directly.
140
+ * A procedure that broadcasts its request to multiple nodes.
141
+ * The return value is an array of results, along with extra properties:
142
+ * see {@link BroadcasterResultExtrasMixed}, {@link BroadcasterResultExtrasSuccess} and {@link BroadcasterResultExtrasFailure}
131
143
  */
132
- export type ClientMethod<P extends Procedure<Schema, Schema, Schema>> = ClientMethodCallable<P> & {
144
+ export type Broadcaster<P extends Procedure<Schema, Schema, Schema>> = {
145
+ /**
146
+ * Returns an array of result values for each node.
147
+ * @throws {AggregateError} with every failing node's error
148
+ */
149
+ orThrow: (input: Schema.InferInput<P["input"]>, onProgress?: (
150
+ /** Map of node IDs to their progress updates */
151
+ progresses: Map<string, Schema.InferOutput<P["progress"]>>) => void,
152
+ /** Node IDs or number of nodes to send the request to. Leave undefined to send to all nodes. When sending a list of node IDs, "undefined" is interpreted as "run on the service worker" */
153
+ nodes?: number | Array<string | undefined>) => Promise<Array<Schema.InferOutput<P["success"]>>>;
154
+ } & ((input: Schema.InferInput<P["input"]>, onProgress?: (
155
+ /** Map of node IDs to their progress updates */
156
+ progresses: Map<string, Schema.InferOutput<P["progress"]>>) => void,
157
+ /** Node IDs or number of nodes to send the request to. Leave undefined to send to all nodes. When sending a list of node IDs, "undefined" is interpreted as "run on the service worker" */
158
+ nodes?: number | Array<string | undefined>) => Promise<Array<PromiseSettledResult<Schema.InferOutput<P["success"]>> & {
159
+ node: string;
160
+ }> & (BroadcasterResultExtrasMixed<P> | BroadcasterResultExtrasSuccess<P> | BroadcasterResultExtrasFailure)>);
161
+ /**
162
+ * Extra properties on the result of a broadcaster call, when some nodes succeeded and some failed
163
+ */
164
+ export type BroadcasterResultExtrasMixed<P extends Procedure<Schema, Schema, Schema>> = {
165
+ /** Undefined if no failures */
166
+ failures: ArrayOneOrMore<PromiseRejectedResult & {
167
+ node: string;
168
+ }>;
169
+ /** Formatted error string, undefined if no failures */
170
+ failureSummary: string;
171
+ /** True if only failures */
172
+ ko: false;
173
+ /** True if no failures */
174
+ ok: false;
175
+ /** "mixed" if some failed and some succeeded, "fulfilled" if all succeeded and "rejected" if everything failed */
176
+ status: "mixed";
177
+ /** All values of successful calls */
178
+ successes: ArrayOneOrMore<Schema.InferOutput<P["success"]>>;
179
+ /** Map of node ID to its result or failure */
180
+ byNode: Map<string, PromiseSettledResult<Schema.InferOutput<P["success"]>>>;
181
+ };
182
+ /**
183
+ * Extra properties on the result of a broadcaster call, when all nodes succeeded
184
+ */
185
+ export type BroadcasterResultExtrasSuccess<P extends Procedure<Schema, Schema, Schema>> = {
186
+ failures: [];
187
+ failureSummary: undefined;
188
+ ko: false;
189
+ ok: true;
190
+ status: "fulfilled";
191
+ successes: ArrayOneOrMore<Schema.InferOutput<P["success"]>>;
192
+ byNode: Map<string, PromiseFulfilledResult<Schema.InferOutput<P["success"]>>>;
193
+ };
194
+ /**
195
+ * Extra properties on the result of a broadcaster call, when all nodes failed
196
+ */
197
+ export type BroadcasterResultExtrasFailure = {
198
+ failures: ArrayOneOrMore<PromiseRejectedResult & {
199
+ node: string;
200
+ }>;
201
+ failureSummary: string;
202
+ ko: true;
203
+ ok: false;
204
+ status: "rejected";
205
+ successes: [];
206
+ byNode: Map<string, PromiseRejectedResult>;
207
+ };
208
+ export type ClientMethodExtraCallables<P extends Procedure<Schema, Schema, Schema>> = {
133
209
  /**
134
210
  * A method that returns a `CancelablePromise`. Cancel it by calling `.cancel(reason)` on it, and wait for the request to resolve by awaiting the `request` property on the returned object.
135
211
  */
@@ -138,14 +214,31 @@ export type ClientMethod<P extends Procedure<Schema, Schema, Schema>> = ClientMe
138
214
  * Send the request to specific nodes, or all nodes.
139
215
  * Returns an array of results, one for each node the request was sent to.
140
216
  * Each result is a {@link PromiseSettledResult}, with also an additional property, the node ID of the request
217
+ * The results array also has extra properties for convenience, see {@link BroadcasterResultExtrasMixed}, {@link BroadcasterResultExtrasSuccess} and {@link BroadcasterResultExtrasFailure}
141
218
  */
142
- broadcast: (input: Schema.InferInput<P["input"]>, onProgress?: (
143
- /** Map of node IDs to their progress updates */
144
- progresses: Map<string, Schema.InferOutput<P["progress"]>>) => void,
145
- /** Number of nodes to send the request to. Leave undefined to send to all nodes */
146
- nodes?: number) => Promise<Array<PromiseSettledResult<Schema.InferOutput<P["success"]>> & {
147
- node: string;
148
- }>>;
219
+ broadcast: Broadcaster<P> & {
220
+ /**
221
+ * Send the request to specific nodes, or all nodes.
222
+ * Cancels any previous ongoing calls of this procedure on the nodes beforehand.
223
+ * Returns an array of results, one for each node the request was sent to.
224
+ * Each result is a {@link PromiseSettledResult}, with also an additional property, the node ID of the request. The results array also has extra properties for convenience, see {@link BroadcasterResultExtrasMixed}, {@link BroadcasterResultExtrasSuccess} and {@link BroadcasterResultExtrasFailure}
225
+ */
226
+ once: Broadcaster<P>;
227
+ /**
228
+ * Send the request to specific nodes, or all nodes.
229
+ * Cancels any previous ongoing calls of this procedure on the nodes beforehand that were also run with the specified concurrency key (first argument). See .onceBy for more details.
230
+ * Returns an array of results, one for each node the request was sent to.
231
+ * Each result is a {@link PromiseSettledResult}, with also an additional property, the node ID of the request. The results array also has extra properties for convenience, see {@link BroadcasterResultExtrasMixed}, {@link BroadcasterResultExtrasSuccess} and {@link BroadcasterResultExtrasFailure}
232
+ */
233
+ onceBy: ((key: string, ...args: Parameters<Broadcaster<P>>) => ReturnType<Broadcaster<P>>) & {
234
+ /**
235
+ * Returns an array of result values for each node.
236
+ * Throws if any node failed.
237
+ * @throws {AggregateError} with every failing node's error
238
+ */
239
+ orThrow: (key: string, ...args: Parameters<Broadcaster<P>>) => Promise<Array<Schema.InferOutput<P["success"]>>>;
240
+ };
241
+ };
149
242
  /**
150
243
  * Call the procedure, cancelling any previous ongoing call of this procedure beforehand.
151
244
  */
@@ -155,9 +248,20 @@ export type ClientMethod<P extends Procedure<Schema, Schema, Schema>> = ClientMe
155
248
  */
156
249
  onceBy: (key: string, input: Schema.InferInput<P["input"]>, onProgress?: (progress: Schema.InferOutput<P["progress"]>) => void) => Promise<Schema.InferOutput<P["success"]>>;
157
250
  };
251
+ /**
252
+ * A procedure's corresponding method on the client instance -- used to call the procedure. If you want to be able to cancel the request, you can use the `cancelable` method instead of running the procedure directly.
253
+ */
254
+ export type ClientMethod<P extends Procedure<Schema, Schema, Schema>> = ClientMethodCallable<P> & ClientMethodExtraCallables<P>;
255
+ export declare const zImplementations: unique symbol;
158
256
  export type WorkerConstructor<T extends Worker | SharedWorker = Worker | SharedWorker> = {
159
257
  new (opts?: {
160
258
  name?: string;
161
259
  }): T;
162
260
  };
261
+ /**
262
+ * A cancelable request was cancelled (either via .cancelable's .cancel() or via a .once / .onceBy call)
263
+ */
264
+ export declare class RequestCancelledError extends Error {
265
+ constructor(reason: string);
266
+ }
163
267
  export {};
package/dist/types.js CHANGED
@@ -13,6 +13,8 @@ export function isPayloadInitialize(payload) {
13
13
  return false;
14
14
  if (!("isInitializeRequest" in payload))
15
15
  return false;
16
+ if (!("allNodeIDs" in payload))
17
+ return false;
16
18
  if (payload.by !== "sw&rpc")
17
19
  return false;
18
20
  if (payload.functionName !== "#initialize")
@@ -86,3 +88,9 @@ export function validatePayloadCore(procedure, payload) {
86
88
  }
87
89
  export const zImplementations = Symbol("SWARPC implementations");
88
90
  export const zProcedures = Symbol("SWARPC procedures");
91
+ export class RequestCancelledError extends Error {
92
+ constructor(reason) {
93
+ super(`Request was cancelled: ${reason}`);
94
+ this.name = "RequestCancelledError";
95
+ }
96
+ }
package/dist/utils.js CHANGED
@@ -23,3 +23,15 @@ export function findTransferables(value) {
23
23
  }
24
24
  return [];
25
25
  }
26
+ export function sizedArray(array) {
27
+ if (array.length === 0) {
28
+ return [];
29
+ }
30
+ return array;
31
+ }
32
+ export function extractFulfilleds(settleds) {
33
+ return settleds.filter((settled) => settled.status === "fulfilled");
34
+ }
35
+ export function extractRejecteds(settleds) {
36
+ return settleds.filter((settled) => settled.status === "rejected");
37
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "swarpc",
3
- "version": "0.18.0",
3
+ "version": "0.20.0",
4
4
  "description": "Full type-safe RPC library for service worker -- move things off of the UI thread with ease!",
5
5
  "keywords": [
6
6
  "service-workers",
@@ -47,36 +47,36 @@
47
47
  },
48
48
  "devDependencies": {
49
49
  "@8hobbies/typedoc-plugin-plausible": "^2.2.0",
50
- "@playwright/test": "^1.57.0",
50
+ "@playwright/test": "^1.58.2",
51
51
  "@size-limit/esbuild-why": "^12.0.0",
52
52
  "@size-limit/preset-small-lib": "^12.0.0",
53
- "@vitest/web-worker": "^4.0.16",
53
+ "@vitest/web-worker": "^4.0.18",
54
54
  "arktype": "^2.1.29",
55
55
  "date-fns": "^4.1.0",
56
56
  "husky": "^9.1.7",
57
57
  "kacl": "^1.1.1",
58
- "knip": "^5.80.0",
58
+ "knip": "^5.85.0",
59
59
  "lint-staged": "^16.2.7",
60
- "nodemon": "^3.1.11",
61
- "oxlint": "^1.37.0",
62
- "pkg-pr-new": "^0.0.62",
63
- "prettier": "^3.7.4",
60
+ "nodemon": "^3.1.14",
61
+ "oxlint": "^1.50.0",
62
+ "pkg-pr-new": "^0.0.63",
63
+ "prettier": "^3.8.1",
64
64
  "sirv-cli": "^3.0.1",
65
65
  "size-limit": "^12.0.0",
66
- "typedoc": "^0.28.15",
66
+ "typedoc": "^0.28.17",
67
67
  "typedoc-material-theme": "^1.4.1",
68
- "typedoc-plugin-dt-links": "^2.0.36",
68
+ "typedoc-plugin-dt-links": "^2.0.43",
69
69
  "typedoc-plugin-extras": "^4.0.1",
70
70
  "typedoc-plugin-inline-sources": "^1.3.0",
71
- "typedoc-plugin-mdn-links": "^5.0.10",
72
- "typedoc-plugin-redirect": "^1.2.1",
71
+ "typedoc-plugin-mdn-links": "^5.1.1",
72
+ "typedoc-plugin-redirect": "^1.3.0",
73
73
  "typescript": "^5.9.3",
74
- "vite": "^7.3.0",
75
- "vitest": "^4.0.16"
74
+ "vite": "^7.3.1",
75
+ "vitest": "^4.0.18"
76
76
  },
77
77
  "volta": {
78
- "node": "24.12.0",
79
- "npm": "11.7.0"
78
+ "node": "24.13.1",
79
+ "npm": "11.10.1"
80
80
  },
81
81
  "lint-staged": {
82
82
  "*.{ts,js,md,json,yaml,yml}": [