@markwharton/liquidplanner 1.7.0 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/client.d.ts CHANGED
@@ -36,6 +36,7 @@ export declare class LPClient {
36
36
  private readonly onRequest?;
37
37
  private readonly cache?;
38
38
  private readonly cacheTtl;
39
+ private readonly retryConfig?;
39
40
  constructor(config: LPConfig);
40
41
  /**
41
42
  * Route through cache if enabled, otherwise call factory directly.
@@ -54,6 +55,10 @@ export declare class LPClient {
54
55
  invalidateTimesheetCache(): void;
55
56
  /**
56
57
  * Make an authenticated request to the LP API
58
+ *
59
+ * When retry is configured, automatically retries on HTTP 429 (Too Many Requests)
60
+ * and 503 (Service Unavailable) with exponential backoff.
61
+ * Respects the Retry-After header when present.
57
62
  */
58
63
  private fetch;
59
64
  /**
@@ -99,7 +104,7 @@ export declare class LPClient {
99
104
  /**
100
105
  * Get the ancestry chain for an item
101
106
  *
102
- * Returns ancestors from root to immediate parent (excludes the item itself).
107
+ * Returns ancestors from root to immediate parent (root→child order, excludes the item itself).
103
108
  * Uses the items/{itemId}/ancestors endpoint.
104
109
  *
105
110
  * @param itemId - The item ID to get ancestors for
package/dist/client.js CHANGED
@@ -6,7 +6,7 @@
6
6
  *
7
7
  * @see https://api-docs.liquidplanner.com/
8
8
  */
9
- import { buildAuthHeader, hoursToMinutes, normalizeItemType, filterIs, filterIn, paginatedFetch, } from './utils.js';
9
+ import { buildAuthHeader, hoursToMinutes, normalizeItemType, filterIs, filterIn, paginatedFetch, batchMap, } from './utils.js';
10
10
  import { parseLPErrorResponse, getErrorMessage } from './errors.js';
11
11
  import { LP_API_BASE } from './constants.js';
12
12
  import { TTLCache } from './cache.js';
@@ -60,6 +60,14 @@ export class LPClient {
60
60
  assignmentsTtl: config.cache?.assignmentsTtl ?? 120000,
61
61
  itemsTtl: config.cache?.itemsTtl ?? 300000,
62
62
  };
63
+ // Initialize retry config with defaults if provided
64
+ if (config.retry) {
65
+ this.retryConfig = {
66
+ maxRetries: config.retry.maxRetries ?? 3,
67
+ initialDelayMs: config.retry.initialDelayMs ?? 1000,
68
+ maxDelayMs: config.retry.maxDelayMs ?? 10000,
69
+ };
70
+ }
63
71
  }
64
72
  /**
65
73
  * Route through cache if enabled, otherwise call factory directly.
@@ -87,19 +95,56 @@ export class LPClient {
87
95
  }
88
96
  /**
89
97
  * Make an authenticated request to the LP API
98
+ *
99
+ * When retry is configured, automatically retries on HTTP 429 (Too Many Requests)
100
+ * and 503 (Service Unavailable) with exponential backoff.
101
+ * Respects the Retry-After header when present.
90
102
  */
91
103
  async fetch(url, options = {}) {
92
104
  const { method = 'GET', body, description } = options;
93
105
  // Notify listener of request (for debugging)
94
106
  this.onRequest?.({ method, url, description });
95
- return fetch(url, {
96
- method,
97
- headers: {
98
- Authorization: buildAuthHeader(this.apiToken),
99
- 'Content-Type': 'application/json',
100
- },
101
- body: body ? JSON.stringify(body) : undefined,
102
- });
107
+ const maxAttempts = this.retryConfig ? 1 + this.retryConfig.maxRetries : 1;
108
+ let lastResponse;
109
+ for (let attempt = 0; attempt < maxAttempts; attempt++) {
110
+ lastResponse = await fetch(url, {
111
+ method,
112
+ headers: {
113
+ Authorization: buildAuthHeader(this.apiToken),
114
+ 'Content-Type': 'application/json',
115
+ },
116
+ body: body ? JSON.stringify(body) : undefined,
117
+ });
118
+ // Check if this is a retryable status
119
+ if (this.retryConfig && (lastResponse.status === 429 || lastResponse.status === 503)) {
120
+ if (attempt >= this.retryConfig.maxRetries) {
121
+ return lastResponse; // Exhausted retries
122
+ }
123
+ // Calculate delay: respect Retry-After header, or use exponential backoff
124
+ let delayMs;
125
+ const retryAfterHeader = lastResponse.headers.get('Retry-After');
126
+ if (retryAfterHeader) {
127
+ const retryAfterSeconds = parseInt(retryAfterHeader, 10);
128
+ delayMs = Number.isFinite(retryAfterSeconds)
129
+ ? retryAfterSeconds * 1000
130
+ : this.retryConfig.initialDelayMs * Math.pow(2, attempt);
131
+ }
132
+ else {
133
+ delayMs = this.retryConfig.initialDelayMs * Math.pow(2, attempt);
134
+ }
135
+ delayMs = Math.min(delayMs, this.retryConfig.maxDelayMs);
136
+ // Notify listener of retry (for debugging)
137
+ this.onRequest?.({
138
+ method,
139
+ url,
140
+ description: `Retry ${attempt + 1}/${this.retryConfig.maxRetries} after ${delayMs}ms (HTTP ${lastResponse.status})`,
141
+ });
142
+ await new Promise(resolve => setTimeout(resolve, delayMs));
143
+ continue;
144
+ }
145
+ return lastResponse;
146
+ }
147
+ return lastResponse;
103
148
  }
104
149
  // ============================================================================
105
150
  // Workspace & Validation
@@ -219,7 +264,7 @@ export class LPClient {
219
264
  /**
220
265
  * Get the ancestry chain for an item
221
266
  *
222
- * Returns ancestors from root to immediate parent (excludes the item itself).
267
+ * Returns ancestors from root to immediate parent (root→child order, excludes the item itself).
223
268
  * Uses the items/{itemId}/ancestors endpoint.
224
269
  *
225
270
  * @param itemId - The item ID to get ancestors for
@@ -243,7 +288,7 @@ export class LPClient {
243
288
  id: a.id,
244
289
  name: a.name || null,
245
290
  itemType: normalizeItemType(a.itemType),
246
- }));
291
+ })).reverse(); // LP API returns child→root, normalize to root→child
247
292
  return { ancestors };
248
293
  }
249
294
  catch (error) {
@@ -319,10 +364,11 @@ export class LPClient {
319
364
  assignmentsByParent.set(a.parentId, a);
320
365
  }
321
366
  }
322
- const ancestorResults = await Promise.all([...assignmentsByParent.entries()].map(async ([parentId, assignment]) => {
367
+ const parentEntries = [...assignmentsByParent.entries()];
368
+ const ancestorResults = await batchMap(parentEntries, 5, async ([parentId, assignment]) => {
323
369
  const { ancestors, error } = await this.getItemAncestors(assignment.id);
324
370
  return { parentId, ancestors, error };
325
- }));
371
+ });
326
372
  const firstError = ancestorResults.find(r => r.error);
327
373
  if (firstError) {
328
374
  return { error: firstError.error };
@@ -366,8 +412,7 @@ export class LPClient {
366
412
  // Build hierarchyPath from Project and Folder ancestors
367
413
  // Exclude system containers (Package, WorkspaceRoot) and Tasks
368
414
  const hierarchyAncestors = ancestors
369
- .filter(anc => anc.itemType === 'Project' || anc.itemType === 'Folder')
370
- .reverse(); // LP returns child→root, we want root→child
415
+ .filter(anc => anc.itemType === 'Project' || anc.itemType === 'Folder');
371
416
  if (hierarchyAncestors.length > 0) {
372
417
  result.hierarchyPath = hierarchyAncestors
373
418
  .map(anc => anc.name ?? `[${anc.id}]`)
package/dist/index.d.ts CHANGED
@@ -28,8 +28,8 @@
28
28
  */
29
29
  export { LPClient } from './client.js';
30
30
  export { resolveTaskToAssignment } from './workflows.js';
31
- export type { LPConfig, LPCacheConfig, LPItemType, HierarchyItem, LPItem, LPAncestor, LPWorkspace, LPMember, LPCostCode, LPSyncResult, LPTimesheetEntry, LPTimesheetEntryWithId, LPTaskResolution, LPResult, LPUpsertOptions, LPAssignmentWithContext, LPErrorInfo, } from './types.js';
32
- export { hoursToMinutes, normalizeItemType, buildAuthHeader, filterIs, filterIn, paginatedFetch, } from './utils.js';
31
+ export type { LPConfig, LPCacheConfig, LPRetryConfig, LPItemType, HierarchyItem, LPItem, LPAncestor, LPWorkspace, LPMember, LPCostCode, LPSyncResult, LPTimesheetEntry, LPTimesheetEntryWithId, LPTaskResolution, LPResult, LPUpsertOptions, LPAssignmentWithContext, LPErrorInfo, } from './types.js';
32
+ export { hoursToMinutes, normalizeItemType, buildAuthHeader, filterIs, filterIn, paginatedFetch, batchMap, } from './utils.js';
33
33
  export type { PaginateOptions } from './utils.js';
34
34
  export { LP_API_BASE } from './constants.js';
35
35
  export { LPError, parseLPErrorResponse, getErrorMessage } from './errors.js';
package/dist/index.js CHANGED
@@ -31,7 +31,7 @@ export { LPClient } from './client.js';
31
31
  // Workflows
32
32
  export { resolveTaskToAssignment } from './workflows.js';
33
33
  // Utilities
34
- export { hoursToMinutes, normalizeItemType, buildAuthHeader, filterIs, filterIn, paginatedFetch, } from './utils.js';
34
+ export { hoursToMinutes, normalizeItemType, buildAuthHeader, filterIs, filterIn, paginatedFetch, batchMap, } from './utils.js';
35
35
  // Constants
36
36
  export { LP_API_BASE } from './constants.js';
37
37
  // Errors
package/dist/types.d.ts CHANGED
@@ -129,6 +129,21 @@ export interface LPCacheConfig {
129
129
  /** TTL for items and ancestors (default: 300000 = 5 min) */
130
130
  itemsTtl?: number;
131
131
  }
132
+ /**
133
+ * Retry configuration for LPClient
134
+ *
135
+ * Controls automatic retry behavior for transient failures
136
+ * (HTTP 429 Too Many Requests, 503 Service Unavailable).
137
+ * Uses exponential backoff with optional Retry-After header support.
138
+ */
139
+ export interface LPRetryConfig {
140
+ /** Maximum number of retry attempts (default: 3) */
141
+ maxRetries?: number;
142
+ /** Initial delay in milliseconds before first retry (default: 1000) */
143
+ initialDelayMs?: number;
144
+ /** Maximum delay cap in milliseconds (default: 10000) */
145
+ maxDelayMs?: number;
146
+ }
132
147
  /**
133
148
  * LiquidPlanner configuration for API access
134
149
  */
@@ -147,6 +162,8 @@ export interface LPConfig {
147
162
  }) => void;
148
163
  /** Enable caching with optional TTL overrides. Omit to disable caching. */
149
164
  cache?: LPCacheConfig;
165
+ /** Retry configuration for transient failures (429, 503). Omit to disable retry. */
166
+ retry?: LPRetryConfig;
150
167
  }
151
168
  /**
152
169
  * Result of a timesheet sync operation
package/dist/utils.d.ts CHANGED
@@ -54,3 +54,16 @@ export declare function normalizeItemType(apiItemType: string): LPItemType;
54
54
  * Build the Authorization header for LP API requests
55
55
  */
56
56
  export declare function buildAuthHeader(apiToken: string): string;
57
+ /**
58
+ * Map over items with bounded concurrency
59
+ *
60
+ * Processes items in batches of `concurrency`, waiting for each batch
61
+ * to complete before starting the next. This prevents overwhelming
62
+ * APIs with too many simultaneous requests.
63
+ *
64
+ * @param items - Array of items to process
65
+ * @param concurrency - Maximum number of concurrent operations
66
+ * @param fn - Async function to apply to each item
67
+ * @returns Array of results in the same order as input items
68
+ */
69
+ export declare function batchMap<T, R>(items: T[], concurrency: number, fn: (item: T) => Promise<R>): Promise<R[]>;
package/dist/utils.js CHANGED
@@ -104,3 +104,27 @@ export function normalizeItemType(apiItemType) {
104
104
  export function buildAuthHeader(apiToken) {
105
105
  return `Bearer ${apiToken}`;
106
106
  }
107
+ // ============================================================================
108
+ // Concurrency Helper
109
+ // ============================================================================
110
+ /**
111
+ * Map over items with bounded concurrency
112
+ *
113
+ * Processes items in batches of `concurrency`, waiting for each batch
114
+ * to complete before starting the next. This prevents overwhelming
115
+ * APIs with too many simultaneous requests.
116
+ *
117
+ * @param items - Array of items to process
118
+ * @param concurrency - Maximum number of concurrent operations
119
+ * @param fn - Async function to apply to each item
120
+ * @returns Array of results in the same order as input items
121
+ */
122
+ export async function batchMap(items, concurrency, fn) {
123
+ const results = [];
124
+ for (let i = 0; i < items.length; i += concurrency) {
125
+ const batch = items.slice(i, i + concurrency);
126
+ const batchResults = await Promise.all(batch.map(fn));
127
+ results.push(...batchResults);
128
+ }
129
+ return results;
130
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@markwharton/liquidplanner",
3
- "version": "1.7.0",
3
+ "version": "1.8.0",
4
4
  "description": "LiquidPlanner API client for timesheet integration",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",