@djangocfg/centrifugo 2.1.71 → 2.1.73

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -34,10 +34,13 @@ src/
34
34
  │ │ ├── CentrifugoRPCClient.ts # Main facade (~165 lines)
35
35
  │ │ ├── connection.ts # Connection lifecycle
36
36
  │ │ ├── subscriptions.ts # Channel subscriptions
37
- │ │ ├── rpc.ts # RPC methods (namedRPC, namedRPCNoWait)
37
+ │ │ ├── rpc.ts # RPC methods (namedRPC, namedRPCWithRetry, namedRPCNoWait)
38
38
  │ │ ├── version.ts # API version checking
39
39
  │ │ ├── types.ts # Type definitions
40
40
  │ │ └── index.ts # Exports
41
+ │ ├── errors/ # Error handling with retry logic
42
+ │ │ ├── RPCError.ts # Typed RPC errors (isRetryable, userMessage)
43
+ │ │ └── RPCRetryHandler.ts # Exponential backoff retry
41
44
  │ ├── logger/ # Logging system with circular buffer
42
45
  │ │ ├── createLogger.ts # Logger factory (supports string prefix)
43
46
  │ │ └── LogsStore.ts # In-memory logs accumulation
@@ -598,6 +601,98 @@ client?.namedRPCNoWait('terminal.input', { session_id, data }, {
598
601
  | `namedRPC()` | ~800-1800ms | Commands that need response |
599
602
  | `namedRPCNoWait()` | ~10-30ms | Real-time input, fire-and-forget |
600
603
 
604
+ ### namedRPCWithRetry() - RPC with Timeout and Retry
605
+
606
+ For operations that need both timeout protection and automatic retry on transient failures.
607
+
608
+ ```tsx
609
+ import { useCentrifugo } from '@djangocfg/centrifugo';
610
+
611
+ function FileList() {
612
+ const { client } = useCentrifugo();
613
+
614
+ const loadFiles = async () => {
615
+ // Automatically retries on timeout/network errors
616
+ const files = await client?.namedRPCWithRetry('files.list',
617
+ { path: '/home' },
618
+ {
619
+ timeout: 5000, // 5 second timeout per attempt
620
+ maxRetries: 3, // Up to 3 retries
621
+ baseDelayMs: 1000, // Start with 1s delay
622
+ maxDelayMs: 10000, // Cap at 10s
623
+ onRetry: (attempt, error, delay) => {
624
+ console.log(`Retry ${attempt}: ${error.userMessage}, waiting ${delay}ms`);
625
+ }
626
+ }
627
+ );
628
+ return files;
629
+ };
630
+ }
631
+ ```
632
+
633
+ ### RPCError - Typed Error Handling
634
+
635
+ All RPC methods now throw `RPCError` with classification for better error handling:
636
+
637
+ ```tsx
638
+ import { RPCError } from '@djangocfg/centrifugo';
639
+
640
+ try {
641
+ await client.namedRPC('files.list', { path: '/' });
642
+ } catch (error) {
643
+ if (error instanceof RPCError) {
644
+ // Error classification
645
+ console.log(error.code); // 'timeout' | 'network_error' | 'server_error' | ...
646
+ console.log(error.isRetryable); // true for transient errors
647
+ console.log(error.userMessage); // User-friendly message
648
+ console.log(error.suggestedRetryDelay); // Recommended delay in ms
649
+
650
+ // Show user-friendly message
651
+ toast.error(error.userMessage);
652
+
653
+ // Decide if retry makes sense
654
+ if (error.isRetryable) {
655
+ // Schedule retry
656
+ }
657
+ }
658
+ }
659
+ ```
660
+
661
+ **Error Codes:**
662
+
663
+ | Code | Retryable | Description |
664
+ |------|-----------|-------------|
665
+ | `timeout` | ✅ | Request timed out |
666
+ | `network_error` | ✅ | Network connectivity issue |
667
+ | `connection_failed` | ✅ | WebSocket connection failed |
668
+ | `websocket_error` | ✅ | WebSocket protocol error |
669
+ | `server_error` | ✅ (5xx only) | Server returned error |
670
+ | `not_connected` | ❌ | Client not connected |
671
+ | `encoding_error` | ❌ | Failed to encode request |
672
+ | `decoding_error` | ❌ | Failed to decode response |
673
+ | `cancelled` | ❌ | Request was cancelled |
674
+
675
+ ### withRetry() - Generic Retry Utility
676
+
677
+ For custom retry logic outside of RPC:
678
+
679
+ ```tsx
680
+ import { withRetry, RPCError } from '@djangocfg/centrifugo';
681
+
682
+ const result = await withRetry(
683
+ () => fetchSomething(),
684
+ {
685
+ maxRetries: 3,
686
+ baseDelayMs: 1000,
687
+ maxDelayMs: 10000,
688
+ jitterFactor: 0.2, // ±20% randomization
689
+ },
690
+ (state, delay) => {
691
+ console.log(`Retry ${state.attempt}, waiting ${delay}ms`);
692
+ }
693
+ );
694
+ ```
695
+
601
696
  ### checkApiVersion() - API Contract Validation
602
697
 
603
698
  Validates that the client API version matches the server. Useful for detecting when the frontend needs to refresh after a backend deployment.
@@ -1087,6 +1182,12 @@ import type {
1087
1182
  RPCOptions,
1088
1183
  RetryOptions,
1089
1184
  VersionCheckResult,
1185
+ NamedRPCWithRetryOptions,
1186
+
1187
+ // Errors
1188
+ RPCErrorCode,
1189
+ RetryConfig,
1190
+ RetryState,
1090
1191
 
1091
1192
  // Connection
1092
1193
  ConnectionState,
@@ -1121,6 +1222,9 @@ import type {
1121
1222
  SubscriptionsListProps,
1122
1223
  CentrifugoMonitorProps,
1123
1224
  } from '@djangocfg/centrifugo';
1225
+
1226
+ // Error classes
1227
+ import { RPCError, withRetry, createRetryHandler } from '@djangocfg/centrifugo';
1124
1228
  ```
1125
1229
 
1126
1230
  ## Unified Event System
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@djangocfg/centrifugo",
3
- "version": "2.1.71",
3
+ "version": "2.1.73",
4
4
  "description": "Production-ready Centrifugo WebSocket client for React with real-time subscriptions, RPC patterns, and connection state management",
5
5
  "keywords": [
6
6
  "centrifugo",
@@ -51,9 +51,9 @@
51
51
  "centrifuge": "^5.2.2"
52
52
  },
53
53
  "peerDependencies": {
54
- "@djangocfg/api": "^2.1.71",
55
- "@djangocfg/ui-nextjs": "^2.1.71",
56
- "@djangocfg/layouts": "^2.1.71",
54
+ "@djangocfg/api": "^2.1.73",
55
+ "@djangocfg/ui-nextjs": "^2.1.73",
56
+ "@djangocfg/layouts": "^2.1.73",
57
57
  "consola": "^3.4.2",
58
58
  "lucide-react": "^0.545.0",
59
59
  "moment": "^2.30.1",
@@ -61,7 +61,7 @@
61
61
  "react-dom": "^19.1.0"
62
62
  },
63
63
  "devDependencies": {
64
- "@djangocfg/typescript-config": "^2.1.71",
64
+ "@djangocfg/typescript-config": "^2.1.73",
65
65
  "@types/react": "^19.1.0",
66
66
  "@types/react-dom": "^19.1.0",
67
67
  "moment": "^2.30.1",
@@ -47,7 +47,9 @@ import {
47
47
  rpc as legacyRpc,
48
48
  namedRPC as nativeNamedRPC,
49
49
  namedRPCNoWait as nativeNamedRPCNoWait,
50
+ namedRPCWithRetry as nativeNamedRPCWithRetry,
50
51
  type RPCManager,
52
+ type NamedRPCWithRetryOptions,
51
53
  } from './rpc';
52
54
 
53
55
  import { checkApiVersion as checkVersion } from './version';
@@ -158,6 +160,14 @@ export class CentrifugoRPCClient {
158
160
  nativeNamedRPCNoWait(this.rpcManager, method, data, options);
159
161
  }
160
162
 
163
+ async namedRPCWithRetry<TRequest = any, TResponse = any>(
164
+ method: string,
165
+ data: TRequest,
166
+ options?: NamedRPCWithRetryOptions
167
+ ): Promise<TResponse> {
168
+ return nativeNamedRPCWithRetry(this.rpcManager, method, data, options);
169
+ }
170
+
161
171
  // ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
162
172
  // API Version Checking
163
173
  // ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@@ -44,7 +44,9 @@ export {
44
44
  rpc,
45
45
  namedRPC,
46
46
  namedRPCNoWait,
47
+ namedRPCWithRetry,
47
48
  type RPCManager,
49
+ type NamedRPCWithRetryOptions,
48
50
  } from './rpc';
49
51
 
50
52
  // Version module (for advanced use cases)
@@ -7,11 +7,15 @@
7
7
  import type { Centrifuge, Subscription } from 'centrifuge';
8
8
 
9
9
  import { dispatchCentrifugoError } from '../../events';
10
+ import { RPCError } from '../errors/RPCError';
10
11
 
11
12
  import type { Logger } from '../logger';
12
13
  import type { PendingRequest, RPCOptions, RetryOptions } from './types';
13
14
  import { generateCorrelationId } from './connection';
14
15
 
16
+ /** Default RPC timeout in milliseconds */
17
+ const DEFAULT_RPC_TIMEOUT = 30000;
18
+
15
19
  export interface RPCManager {
16
20
  centrifuge: Centrifuge;
17
21
  pendingRequests: Map<string, PendingRequest>;
@@ -194,6 +198,17 @@ export async function rpc<TRequest = any, TResponse = any>(
194
198
  });
195
199
  }
196
200
 
201
+ /**
202
+ * Create a timeout promise that rejects after specified ms.
203
+ */
204
+ function createTimeoutPromise(timeoutMs: number, method: string): Promise<never> {
205
+ return new Promise((_, reject) => {
206
+ setTimeout(() => {
207
+ reject(new RPCError('timeout', `RPC timeout after ${timeoutMs}ms: ${method}`, { method }));
208
+ }, timeoutMs);
209
+ });
210
+ }
211
+
197
212
  /**
198
213
  * Call RPC method via native Centrifugo RPC proxy.
199
214
  *
@@ -206,6 +221,11 @@ export async function rpc<TRequest = any, TResponse = any>(
206
221
  * 3. Centrifugo proxies to Django: POST /centrifugo/rpc/
207
222
  * 4. Django routes to @websocket_rpc handler
208
223
  * 5. Response returned to client
224
+ *
225
+ * Features:
226
+ * - Configurable timeout (default: 30s)
227
+ * - Returns typed RPCError for better error handling
228
+ * - Dispatches error events for monitoring
209
229
  */
210
230
  export async function namedRPC<TRequest = any, TResponse = any>(
211
231
  manager: RPCManager,
@@ -214,11 +234,16 @@ export async function namedRPC<TRequest = any, TResponse = any>(
214
234
  options?: { timeout?: number }
215
235
  ): Promise<TResponse> {
216
236
  const { centrifuge, logger } = manager;
237
+ const timeoutMs = options?.timeout ?? DEFAULT_RPC_TIMEOUT;
217
238
 
218
- logger.info(`Native RPC: ${method}`, { data });
239
+ logger.info(`Native RPC: ${method}`, { data, timeout: timeoutMs });
219
240
 
220
241
  try {
221
- const result = await centrifuge.rpc(method, data);
242
+ // Race between RPC call and timeout
243
+ const result = await Promise.race([
244
+ centrifuge.rpc(method, data),
245
+ createTimeoutPromise(timeoutMs, method),
246
+ ]);
222
247
 
223
248
  logger.success(`Native RPC success: ${method}`, {
224
249
  hasData: !!result.data,
@@ -226,29 +251,24 @@ export async function namedRPC<TRequest = any, TResponse = any>(
226
251
 
227
252
  return result.data as TResponse;
228
253
  } catch (error) {
229
- logger.error(`Native RPC failed: ${method}`, error);
254
+ // Convert to RPCError for consistent error handling
255
+ const rpcError = RPCError.fromError(error, method);
256
+
257
+ logger.error(`Native RPC failed: ${method}`, {
258
+ code: rpcError.code,
259
+ isRetryable: rpcError.isRetryable,
260
+ message: rpcError.message,
261
+ });
230
262
 
231
263
  // Dispatch error event for ErrorsTracker
232
- // Handle different error formats: Error objects, plain objects with message, or stringify
233
- let errorMessage: string;
234
- if (error instanceof Error) {
235
- errorMessage = error.message;
236
- } else if (typeof error === 'object' && error !== null) {
237
- // Try to extract message from object, fallback to JSON stringify
238
- const errObj = error as Record<string, unknown>;
239
- errorMessage = (errObj.message as string) || (errObj.error as string) || JSON.stringify(error);
240
- } else {
241
- errorMessage = String(error);
242
- }
243
- const errorCode = (error as any)?.code;
244
264
  dispatchCentrifugoError({
245
265
  method,
246
- error: errorMessage,
247
- code: errorCode,
266
+ error: rpcError.message,
267
+ code: rpcError.serverCode,
248
268
  data,
249
269
  });
250
270
 
251
- throw error;
271
+ throw rpcError;
252
272
  }
253
273
  }
254
274
 
@@ -275,21 +295,26 @@ export function namedRPCNoWait<TRequest = any>(
275
295
 
276
296
  const attemptSend = (attempt: number): void => {
277
297
  centrifuge.rpc(method, data).catch((error) => {
278
- if (attempt < maxRetries) {
279
- // Exponential backoff: 100ms, 200ms, 400ms... capped at maxDelayMs
280
- const delay = Math.min(baseDelayMs * Math.pow(2, attempt), maxDelayMs);
298
+ const rpcError = RPCError.fromError(error, method);
299
+
300
+ if (attempt < maxRetries && rpcError.isRetryable) {
301
+ // Exponential backoff with jitter
302
+ const baseDelay = rpcError.suggestedRetryDelay || baseDelayMs;
303
+ const delay = Math.min(baseDelay * Math.pow(2, attempt), maxDelayMs);
304
+ const jitter = delay * 0.2 * (Math.random() * 2 - 1);
305
+ const finalDelay = Math.max(0, Math.round(delay + jitter));
281
306
 
282
307
  logger.warning(
283
- `Fire-and-forget RPC failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${method}`,
284
- error
308
+ `Fire-and-forget RPC failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${finalDelay}ms: ${method}`,
309
+ { code: rpcError.code, isRetryable: rpcError.isRetryable }
285
310
  );
286
311
 
287
- setTimeout(() => attemptSend(attempt + 1), delay);
312
+ setTimeout(() => attemptSend(attempt + 1), finalDelay);
288
313
  } else {
289
- // All retries exhausted
314
+ // All retries exhausted or non-retryable error
290
315
  logger.error(
291
- `Fire-and-forget RPC failed after ${maxRetries + 1} attempts: ${method}`,
292
- error
316
+ `Fire-and-forget RPC failed after ${attempt + 1} attempts: ${method}`,
317
+ { code: rpcError.code, message: rpcError.message }
293
318
  );
294
319
  }
295
320
  });
@@ -298,3 +323,83 @@ export function namedRPCNoWait<TRequest = any>(
298
323
  // Start first attempt immediately
299
324
  attemptSend(0);
300
325
  }
326
+
327
+ export interface NamedRPCWithRetryOptions {
328
+ timeout?: number;
329
+ maxRetries?: number;
330
+ baseDelayMs?: number;
331
+ maxDelayMs?: number;
332
+ onRetry?: (attempt: number, error: RPCError, delayMs: number) => void;
333
+ }
334
+
335
+ /**
336
+ * Call RPC method with timeout and automatic retry.
337
+ *
338
+ * Combines namedRPC timeout with retry logic for robust RPC calls.
339
+ * Uses RPCError.isRetryable to determine if retry should happen.
340
+ *
341
+ * @example
342
+ * const files = await namedRPCWithRetry(manager, 'files.list', { path: '/' }, {
343
+ * timeout: 5000,
344
+ * maxRetries: 3,
345
+ * onRetry: (attempt, error, delay) => {
346
+ * console.log(`Retry ${attempt} after ${delay}ms: ${error.userMessage}`);
347
+ * }
348
+ * });
349
+ */
350
+ export async function namedRPCWithRetry<TRequest = any, TResponse = any>(
351
+ manager: RPCManager,
352
+ method: string,
353
+ data: TRequest,
354
+ options?: NamedRPCWithRetryOptions
355
+ ): Promise<TResponse> {
356
+ const { logger } = manager;
357
+ const maxRetries = options?.maxRetries ?? 3;
358
+ const baseDelayMs = options?.baseDelayMs ?? 1000;
359
+ const maxDelayMs = options?.maxDelayMs ?? 10000;
360
+
361
+ let lastError: RPCError | null = null;
362
+
363
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
364
+ try {
365
+ return await namedRPC<TRequest, TResponse>(manager, method, data, {
366
+ timeout: options?.timeout,
367
+ });
368
+ } catch (error) {
369
+ lastError = error instanceof RPCError ? error : RPCError.fromError(error, method);
370
+
371
+ // Don't retry non-retryable errors
372
+ if (!lastError.isRetryable) {
373
+ throw lastError;
374
+ }
375
+
376
+ // Check if we have retries left
377
+ if (attempt >= maxRetries) {
378
+ throw lastError;
379
+ }
380
+
381
+ // Calculate delay with exponential backoff and jitter
382
+ const suggestedDelay = lastError.suggestedRetryDelay || baseDelayMs;
383
+ const exponentialDelay = suggestedDelay * Math.pow(2, attempt);
384
+ const cappedDelay = Math.min(exponentialDelay, maxDelayMs);
385
+ const jitter = cappedDelay * 0.2 * (Math.random() * 2 - 1);
386
+ const delayMs = Math.max(0, Math.round(cappedDelay + jitter));
387
+
388
+ logger.warning(
389
+ `RPC retry (${attempt + 1}/${maxRetries}): ${method} in ${delayMs}ms`,
390
+ { code: lastError.code, message: lastError.message }
391
+ );
392
+
393
+ // Notify callback if provided
394
+ if (options?.onRetry) {
395
+ options.onRetry(attempt + 1, lastError, delayMs);
396
+ }
397
+
398
+ // Wait before retry
399
+ await new Promise((resolve) => setTimeout(resolve, delayMs));
400
+ }
401
+ }
402
+
403
+ // Should not reach here, but TypeScript needs this
404
+ throw lastError ?? new RPCError('unknown', 'Retry failed', { method });
405
+ }
@@ -0,0 +1,187 @@
1
+ /**
2
+ * RPC Error with classification for retry logic.
3
+ *
4
+ * Mirrors Swift's RPCError implementation with:
5
+ * - isRetryable: Whether the error should trigger a retry
6
+ * - suggestedRetryDelay: Recommended delay before retry
7
+ * - userMessage: User-friendly error message
8
+ */
9
+
10
+ export type RPCErrorCode =
11
+ | 'not_connected'
12
+ | 'timeout'
13
+ | 'server_error'
14
+ | 'encoding_error'
15
+ | 'decoding_error'
16
+ | 'websocket_error'
17
+ | 'connection_failed'
18
+ | 'cancelled'
19
+ | 'network_error'
20
+ | 'unknown';
21
+
22
+ export class RPCError extends Error {
23
+ readonly code: RPCErrorCode;
24
+ readonly serverCode?: number;
25
+ readonly method?: string;
26
+ readonly isRetryable: boolean;
27
+ readonly suggestedRetryDelay: number;
28
+ readonly userMessage: string;
29
+
30
+ constructor(
31
+ code: RPCErrorCode,
32
+ message: string,
33
+ options?: {
34
+ serverCode?: number;
35
+ method?: string;
36
+ cause?: unknown;
37
+ }
38
+ ) {
39
+ super(message, { cause: options?.cause });
40
+ this.name = 'RPCError';
41
+ this.code = code;
42
+ this.serverCode = options?.serverCode;
43
+ this.method = options?.method;
44
+ this.isRetryable = this.determineRetryable();
45
+ this.suggestedRetryDelay = this.determineSuggestedDelay();
46
+ this.userMessage = this.determineUserMessage();
47
+ }
48
+
49
+ /**
50
+ * Determine if this error should trigger a retry.
51
+ * Transient errors (timeout, network) are retryable.
52
+ * Permanent errors (encoding, 4xx) are not.
53
+ */
54
+ private determineRetryable(): boolean {
55
+ switch (this.code) {
56
+ // Transient errors - retry
57
+ case 'timeout':
58
+ case 'websocket_error':
59
+ case 'connection_failed':
60
+ case 'network_error':
61
+ return true;
62
+
63
+ // Server errors - retry only 5xx
64
+ case 'server_error':
65
+ return this.serverCode ? this.serverCode >= 500 : false;
66
+
67
+ // Permanent errors - don't retry
68
+ case 'not_connected':
69
+ case 'encoding_error':
70
+ case 'decoding_error':
71
+ case 'cancelled':
72
+ case 'unknown':
73
+ return false;
74
+
75
+ default:
76
+ return false;
77
+ }
78
+ }
79
+
80
+ /**
81
+ * Suggested delay before retry based on error type.
82
+ */
83
+ private determineSuggestedDelay(): number {
84
+ switch (this.code) {
85
+ case 'timeout':
86
+ return 1000;
87
+ case 'websocket_error':
88
+ return 2000;
89
+ case 'server_error':
90
+ return 3000;
91
+ case 'connection_failed':
92
+ return 2000;
93
+ case 'network_error':
94
+ return 1500;
95
+ default:
96
+ return 1000;
97
+ }
98
+ }
99
+
100
+ /**
101
+ * User-friendly message for UI display.
102
+ */
103
+ private determineUserMessage(): string {
104
+ switch (this.code) {
105
+ case 'not_connected':
106
+ return 'Not connected. Please check your internet connection.';
107
+ case 'timeout':
108
+ return 'Request timed out. Please try again.';
109
+ case 'server_error':
110
+ return this.message || 'Server error. Please try again later.';
111
+ case 'websocket_error':
112
+ return 'Connection error. Please try again.';
113
+ case 'connection_failed':
114
+ return 'Unable to connect. Please check your internet connection.';
115
+ case 'encoding_error':
116
+ case 'decoding_error':
117
+ return 'Data error. Please try again or contact support.';
118
+ case 'cancelled':
119
+ return 'Request cancelled.';
120
+ case 'network_error':
121
+ return 'Network error. Please check your connection.';
122
+ default:
123
+ return 'An unexpected error occurred. Please try again.';
124
+ }
125
+ }
126
+
127
+ /**
128
+ * Create RPCError from Centrifugo/unknown error.
129
+ */
130
+ static fromError(error: unknown, method?: string): RPCError {
131
+ if (error instanceof RPCError) {
132
+ return error;
133
+ }
134
+
135
+ // Handle Centrifugo error object
136
+ if (typeof error === 'object' && error !== null) {
137
+ const err = error as Record<string, unknown>;
138
+ const code = err.code as number | undefined;
139
+ const message =
140
+ (err.message as string) || (err.error as string) || 'Unknown error';
141
+
142
+ // Timeout detection
143
+ if (message.includes('timeout') || message.includes('Timeout')) {
144
+ return new RPCError('timeout', message, { method });
145
+ }
146
+
147
+ // Connection issues
148
+ if (
149
+ message.includes('disconnect') ||
150
+ message.includes('connection') ||
151
+ message.includes('not connected')
152
+ ) {
153
+ return new RPCError('connection_failed', message, { method });
154
+ }
155
+
156
+ // Server error codes
157
+ if (code !== undefined) {
158
+ if (code >= 500) {
159
+ return new RPCError('server_error', message, {
160
+ serverCode: code,
161
+ method,
162
+ });
163
+ }
164
+ if (code >= 400) {
165
+ return new RPCError('server_error', message, {
166
+ serverCode: code,
167
+ method,
168
+ });
169
+ }
170
+ }
171
+
172
+ return new RPCError('unknown', message, { method, cause: error });
173
+ }
174
+
175
+ // Standard Error
176
+ if (error instanceof Error) {
177
+ // Check for abort/cancel
178
+ if (error.name === 'AbortError') {
179
+ return new RPCError('cancelled', 'Request cancelled', { method });
180
+ }
181
+
182
+ return new RPCError('unknown', error.message, { method, cause: error });
183
+ }
184
+
185
+ return new RPCError('unknown', String(error), { method });
186
+ }
187
+ }
@@ -0,0 +1,154 @@
1
+ /**
2
+ * RPC Retry Handler with exponential backoff.
3
+ *
4
+ * Mirrors Swift's RPCRetryHandler implementation with:
5
+ * - Configurable max retries and delays
6
+ * - Exponential backoff with jitter
7
+ * - Retry decision based on RPCError.isRetryable
8
+ */
9
+
10
+ import { RPCError } from './RPCError';
11
+
12
+ export interface RetryConfig {
13
+ maxRetries: number;
14
+ baseDelayMs: number;
15
+ maxDelayMs: number;
16
+ jitterFactor: number;
17
+ }
18
+
19
+ export const DEFAULT_RETRY_CONFIG: RetryConfig = {
20
+ maxRetries: 3,
21
+ baseDelayMs: 1000,
22
+ maxDelayMs: 10000,
23
+ jitterFactor: 0.2,
24
+ };
25
+
26
+ export interface RetryState {
27
+ attempt: number;
28
+ lastError: RPCError | null;
29
+ totalDelayMs: number;
30
+ }
31
+
32
+ /**
33
+ * Calculate delay with exponential backoff and jitter.
34
+ */
35
+ export function calculateDelay(
36
+ attempt: number,
37
+ config: RetryConfig,
38
+ error?: RPCError
39
+ ): number {
40
+ // Use error's suggested delay if available
41
+ const baseDelay = error?.suggestedRetryDelay ?? config.baseDelayMs;
42
+
43
+ // Exponential: base * 2^attempt
44
+ const exponentialDelay = baseDelay * Math.pow(2, attempt);
45
+
46
+ // Cap at max
47
+ const cappedDelay = Math.min(exponentialDelay, config.maxDelayMs);
48
+
49
+ // Add jitter: delay * (1 ± jitterFactor * random)
50
+ const jitter = cappedDelay * config.jitterFactor * (Math.random() * 2 - 1);
51
+
52
+ return Math.max(0, Math.round(cappedDelay + jitter));
53
+ }
54
+
55
+ /**
56
+ * Sleep for specified milliseconds.
57
+ */
58
+ export function sleep(ms: number): Promise<void> {
59
+ return new Promise((resolve) => setTimeout(resolve, ms));
60
+ }
61
+
62
+ /**
63
+ * Execute operation with retry logic.
64
+ *
65
+ * @param operation - Async function to execute
66
+ * @param config - Retry configuration
67
+ * @param onRetry - Optional callback before each retry
68
+ * @returns Promise with operation result
69
+ * @throws RPCError after all retries exhausted
70
+ */
71
+ export async function withRetry<T>(
72
+ operation: () => Promise<T>,
73
+ config: Partial<RetryConfig> = {},
74
+ onRetry?: (state: RetryState, delayMs: number) => void
75
+ ): Promise<T> {
76
+ const fullConfig: RetryConfig = { ...DEFAULT_RETRY_CONFIG, ...config };
77
+ const state: RetryState = {
78
+ attempt: 0,
79
+ lastError: null,
80
+ totalDelayMs: 0,
81
+ };
82
+
83
+ while (state.attempt <= fullConfig.maxRetries) {
84
+ try {
85
+ return await operation();
86
+ } catch (error) {
87
+ const rpcError = RPCError.fromError(error);
88
+ state.lastError = rpcError;
89
+
90
+ // Don't retry non-retryable errors
91
+ if (!rpcError.isRetryable) {
92
+ throw rpcError;
93
+ }
94
+
95
+ // Check if we have retries left
96
+ if (state.attempt >= fullConfig.maxRetries) {
97
+ throw rpcError;
98
+ }
99
+
100
+ // Calculate delay and wait
101
+ const delayMs = calculateDelay(state.attempt, fullConfig, rpcError);
102
+ state.totalDelayMs += delayMs;
103
+
104
+ // Notify about retry
105
+ if (onRetry) {
106
+ onRetry(state, delayMs);
107
+ }
108
+
109
+ await sleep(delayMs);
110
+ state.attempt++;
111
+ }
112
+ }
113
+
114
+ // Should not reach here, but TypeScript needs this
115
+ throw state.lastError ?? new RPCError('unknown', 'Retry failed');
116
+ }
117
+
118
+ /**
119
+ * Create a retry wrapper for RPC calls.
120
+ */
121
+ export function createRetryHandler(config: Partial<RetryConfig> = {}) {
122
+ const fullConfig: RetryConfig = { ...DEFAULT_RETRY_CONFIG, ...config };
123
+
124
+ return {
125
+ config: fullConfig,
126
+
127
+ /**
128
+ * Execute with retry.
129
+ */
130
+ execute: <T>(
131
+ operation: () => Promise<T>,
132
+ onRetry?: (state: RetryState, delayMs: number) => void
133
+ ) => withRetry(operation, fullConfig, onRetry),
134
+
135
+ /**
136
+ * Check if error should be retried.
137
+ */
138
+ shouldRetry: (error: unknown, attempt: number): boolean => {
139
+ if (attempt >= fullConfig.maxRetries) {
140
+ return false;
141
+ }
142
+ const rpcError = RPCError.fromError(error);
143
+ return rpcError.isRetryable;
144
+ },
145
+
146
+ /**
147
+ * Get delay for next retry.
148
+ */
149
+ getDelay: (attempt: number, error?: unknown): number => {
150
+ const rpcError = error ? RPCError.fromError(error) : undefined;
151
+ return calculateDelay(attempt, fullConfig, rpcError);
152
+ },
153
+ };
154
+ }
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Error Types
3
+ */
4
+
5
+ export { RPCError, type RPCErrorCode } from './RPCError';
6
+ export {
7
+ withRetry,
8
+ createRetryHandler,
9
+ calculateDelay,
10
+ sleep,
11
+ DEFAULT_RETRY_CONFIG,
12
+ type RetryConfig,
13
+ type RetryState,
14
+ } from './RPCRetryHandler';
package/src/core/index.ts CHANGED
@@ -11,5 +11,14 @@ export { CentrifugoRPCClient } from './client';
11
11
  export { createLogger, LogsStore, getGlobalLogsStore } from './logger';
12
12
  export type { Logger, LoggerConfig } from './logger';
13
13
 
14
+ // Errors
15
+ export {
16
+ RPCError,
17
+ withRetry,
18
+ createRetryHandler,
19
+ DEFAULT_RETRY_CONFIG,
20
+ } from './errors';
21
+ export type { RPCErrorCode, RetryConfig, RetryState } from './errors';
22
+
14
23
  // Types
15
24
  export type * from './types';