payment-kit 1.18.46 → 1.18.47
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api/src/libs/pagination.ts +403 -0
- package/api/src/routes/invoices.ts +295 -103
- package/api/tests/libs/pagination.spec.ts +549 -0
- package/blocklet.yml +1 -1
- package/package.json +10 -10
- package/src/components/subscription/portal/list.tsx +36 -26
- package/src/libs/util.ts +11 -0
- package/src/pages/admin/billing/subscriptions/detail.tsx +2 -10
- package/src/pages/customer/invoice/past-due.tsx +1 -0
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
import logger from './logger';
|
|
2
|
+
|
|
3
|
+
// In-memory cache with TTL support for simulated data
|
|
4
|
+
const cache = new Map<string, { data: any; expireTime: number }>();
|
|
5
|
+
const DEFAULT_TTL = 60 * 60 * 6 * 1000; // 6 hours
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Fetches data from cache if available and not expired, otherwise fetches fresh data
|
|
9
|
+
* Implements lazy cache cleanup to prevent memory leaks
|
|
10
|
+
*/
|
|
11
|
+
export async function getCachedOrFetch<T>(key: string, fetcher: () => Promise<T>, ttl?: number): Promise<T> {
|
|
12
|
+
const now = Date.now();
|
|
13
|
+
const cached = cache.get(key);
|
|
14
|
+
|
|
15
|
+
// Return cached data if valid
|
|
16
|
+
if (cached && now < cached.expireTime) {
|
|
17
|
+
return cached.data;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Fetch and cache new data
|
|
21
|
+
const result = await fetcher();
|
|
22
|
+
cache.set(key, {
|
|
23
|
+
data: result,
|
|
24
|
+
expireTime: now + (ttl || DEFAULT_TTL),
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
// Lazy cleanup: Randomly clean expired entries to prevent memory leaks
|
|
28
|
+
if (Math.random() < 0.1) {
|
|
29
|
+
// 10% chance to trigger cleanup
|
|
30
|
+
for (const [k, v] of cache.entries()) {
|
|
31
|
+
if (now > v.expireTime) {
|
|
32
|
+
cache.delete(k);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return result;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export interface PaginationOptions {
|
|
41
|
+
page: number;
|
|
42
|
+
pageSize?: number; // Make optional to support pageSize = 0
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export interface PaginatedResult<T> {
|
|
46
|
+
total: number;
|
|
47
|
+
data: T[];
|
|
48
|
+
paging: {
|
|
49
|
+
page: number;
|
|
50
|
+
pageSize: number;
|
|
51
|
+
totalPages: number;
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Generic data source interface
|
|
56
|
+
export interface DataSource<T> {
|
|
57
|
+
count: () => Promise<number>;
|
|
58
|
+
fetch: (limit: number, offset?: number) => Promise<T[]>;
|
|
59
|
+
// Optional metadata for optimization hints
|
|
60
|
+
meta?: {
|
|
61
|
+
type?: 'database' | 'cached' | 'computed';
|
|
62
|
+
estimatedSize?: number;
|
|
63
|
+
cacheable?: boolean;
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Merges multiple sorted arrays efficiently
|
|
69
|
+
* Handles undefined values and provides type safety
|
|
70
|
+
*/
|
|
71
|
+
function mergeSortedArrays<T>(arrays: T[][], orderBy: (a: T, b: T) => number, offset: number, limit: number): T[] {
|
|
72
|
+
// Initialize pointers for each array
|
|
73
|
+
const pointers = new Array(arrays.length).fill(0);
|
|
74
|
+
const result: T[] = [];
|
|
75
|
+
let skipped = 0;
|
|
76
|
+
|
|
77
|
+
while (result.length < limit) {
|
|
78
|
+
// Find the next item to process
|
|
79
|
+
let bestArrayIndex = -1;
|
|
80
|
+
let bestItem: T | undefined;
|
|
81
|
+
|
|
82
|
+
for (let i = 0; i < arrays.length; i++) {
|
|
83
|
+
const array = arrays[i];
|
|
84
|
+
const pointer = pointers[i];
|
|
85
|
+
|
|
86
|
+
if (!array || pointer >= array.length) {
|
|
87
|
+
// Array is undefined or exhausted
|
|
88
|
+
// eslint-disable-next-line no-continue
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const currentItem = array[pointer];
|
|
93
|
+
if (currentItem === undefined) {
|
|
94
|
+
// Safety check for undefined items
|
|
95
|
+
// eslint-disable-next-line no-continue
|
|
96
|
+
continue;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (bestItem === undefined || orderBy(currentItem, bestItem) < 0) {
|
|
100
|
+
bestItem = currentItem;
|
|
101
|
+
bestArrayIndex = i;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// No more items available
|
|
106
|
+
if (bestArrayIndex === -1 || bestItem === undefined) break;
|
|
107
|
+
|
|
108
|
+
// Advance the pointer for the selected array
|
|
109
|
+
pointers[bestArrayIndex]++;
|
|
110
|
+
|
|
111
|
+
// Apply pagination logic
|
|
112
|
+
if (skipped < offset) {
|
|
113
|
+
skipped++;
|
|
114
|
+
} else {
|
|
115
|
+
result.push(bestItem);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return result;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Estimates the optimal fetch strategy for data sources
|
|
124
|
+
*/
|
|
125
|
+
function calculateFetchStrategy<T>(
|
|
126
|
+
sources: DataSource<T>[],
|
|
127
|
+
options: PaginationOptions,
|
|
128
|
+
totalCounts: number[]
|
|
129
|
+
): { fetchLimit: number; fetchOffset: number }[] {
|
|
130
|
+
const { page, pageSize = 0 } = options;
|
|
131
|
+
const offset = (page - 1) * pageSize;
|
|
132
|
+
const total = totalCounts.reduce((sum, count) => sum + count, 0);
|
|
133
|
+
|
|
134
|
+
return sources.map((source, index) => {
|
|
135
|
+
const sourceCount = totalCounts[index] ?? 0;
|
|
136
|
+
const sourceMeta = source.meta;
|
|
137
|
+
|
|
138
|
+
// Handle pageSize = 0: fetch all data
|
|
139
|
+
if (!pageSize) {
|
|
140
|
+
return { fetchLimit: Math.max(sourceCount, 1000), fetchOffset: 0 };
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// For database sources with multiple sources, use conservative strategy
|
|
144
|
+
if (sourceMeta?.type === 'database') {
|
|
145
|
+
if (sources.length > 1) {
|
|
146
|
+
// For multi-source scenarios, we need more data to ensure correct merging
|
|
147
|
+
// Especially for later pages, estimation can be inaccurate
|
|
148
|
+
const bufferMultiplier = Math.max(2, Math.ceil(page / 2)); // More buffer for later pages
|
|
149
|
+
const fetchLimit = Math.min(
|
|
150
|
+
sourceCount,
|
|
151
|
+
Math.max(pageSize * bufferMultiplier, Math.ceil(sourceCount * 0.5)) // At least 50% of source data
|
|
152
|
+
);
|
|
153
|
+
return { fetchLimit, fetchOffset: 0 };
|
|
154
|
+
}
|
|
155
|
+
// Single database source can use precise offset
|
|
156
|
+
const estimatedRatio = total > 0 ? sourceCount / total : 0;
|
|
157
|
+
const estimatedOffset = Math.max(0, Math.floor(offset * estimatedRatio) - pageSize);
|
|
158
|
+
const fetchLimit = Math.min(pageSize * 3, Math.max(pageSize, sourceCount - estimatedOffset));
|
|
159
|
+
return { fetchLimit, fetchOffset: estimatedOffset };
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// For cached/computed sources, always fetch more data to ensure accuracy
|
|
163
|
+
if (sourceMeta?.type === 'cached' || sourceMeta?.type === 'computed') {
|
|
164
|
+
// For multi-source, be more conservative
|
|
165
|
+
if (sources.length > 1) {
|
|
166
|
+
const fetchLimit = Math.min(sourceCount, Math.max(pageSize * 3, Math.ceil(sourceCount * 0.8))); // Get 80% of data
|
|
167
|
+
return { fetchLimit, fetchOffset: 0 };
|
|
168
|
+
}
|
|
169
|
+
const bufferSize = Math.min(sourceMeta.estimatedSize ?? sourceCount, pageSize * 2);
|
|
170
|
+
return { fetchLimit: Math.max(pageSize, bufferSize), fetchOffset: 0 };
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Default strategy: more conservative for multi-source
|
|
174
|
+
if (sources.length > 1) {
|
|
175
|
+
const fetchLimit = Math.min(sourceCount, Math.max(pageSize * 2, Math.ceil(sourceCount * 0.6))); // Get 60% of data
|
|
176
|
+
return { fetchLimit, fetchOffset: 0 };
|
|
177
|
+
}
|
|
178
|
+
const fetchLimit = Math.min(pageSize * 2, sourceCount);
|
|
179
|
+
return { fetchLimit: Math.max(pageSize, fetchLimit), fetchOffset: 0 };
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Enhanced merge pagination with intelligent fetching strategy
|
|
185
|
+
* Supports multiple data sources with different characteristics
|
|
186
|
+
*/
|
|
187
|
+
export async function mergePaginate<T>(
|
|
188
|
+
sources: DataSource<T>[],
|
|
189
|
+
options: PaginationOptions,
|
|
190
|
+
orderBy: (a: T, b: T) => number
|
|
191
|
+
): Promise<PaginatedResult<T>> {
|
|
192
|
+
const page = Math.max(1, options.page || 1);
|
|
193
|
+
const pageSize = options.pageSize ?? 0;
|
|
194
|
+
const offset = (page - 1) * pageSize;
|
|
195
|
+
|
|
196
|
+
try {
|
|
197
|
+
// Get total counts from all sources with error handling
|
|
198
|
+
const totalCounts = await Promise.all(
|
|
199
|
+
sources.map(async (source, index) => {
|
|
200
|
+
try {
|
|
201
|
+
return await source.count();
|
|
202
|
+
} catch (error) {
|
|
203
|
+
logger.error('Failed to get count from data source', { error, sourceIndex: index });
|
|
204
|
+
return 0;
|
|
205
|
+
}
|
|
206
|
+
})
|
|
207
|
+
);
|
|
208
|
+
|
|
209
|
+
const total = totalCounts.reduce((sum, count) => sum + count, 0);
|
|
210
|
+
|
|
211
|
+
// Handle pageSize = 0: return all data
|
|
212
|
+
if (!pageSize) {
|
|
213
|
+
const allData = await Promise.all(
|
|
214
|
+
sources.map(async (source, index) => {
|
|
215
|
+
try {
|
|
216
|
+
const sourceTotal = totalCounts[index] ?? 0;
|
|
217
|
+
const fetchLimit = Math.max(sourceTotal, 1000);
|
|
218
|
+
return await source.fetch(fetchLimit, 0);
|
|
219
|
+
} catch (error) {
|
|
220
|
+
logger.error('Failed to fetch all data from source', { error, sourceIndex: index });
|
|
221
|
+
return [];
|
|
222
|
+
}
|
|
223
|
+
})
|
|
224
|
+
).then((arrays) => arrays.flat());
|
|
225
|
+
|
|
226
|
+
return {
|
|
227
|
+
total,
|
|
228
|
+
data: allData.sort(orderBy),
|
|
229
|
+
paging: {
|
|
230
|
+
page,
|
|
231
|
+
pageSize: 0,
|
|
232
|
+
totalPages: 1,
|
|
233
|
+
},
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Fast path: Single data source optimization
|
|
238
|
+
if (sources.length === 1) {
|
|
239
|
+
const source = sources[0];
|
|
240
|
+
if (!source) {
|
|
241
|
+
return {
|
|
242
|
+
total: 0,
|
|
243
|
+
data: [],
|
|
244
|
+
paging: {
|
|
245
|
+
page,
|
|
246
|
+
pageSize,
|
|
247
|
+
totalPages: 0,
|
|
248
|
+
},
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
try {
|
|
253
|
+
// For single source, we need to get enough data to sort correctly
|
|
254
|
+
// We can't just fetch the page directly because sorting might change the order
|
|
255
|
+
const sourceTotal = totalCounts[0] ?? 0;
|
|
256
|
+
|
|
257
|
+
// For database sources, we can try to be smart about fetching
|
|
258
|
+
if (source.meta?.type === 'database') {
|
|
259
|
+
// For database sources, assume they can handle sorting at the database level
|
|
260
|
+
// and fetch the exact page we need
|
|
261
|
+
const data = await source.fetch(pageSize, offset);
|
|
262
|
+
return {
|
|
263
|
+
total,
|
|
264
|
+
data,
|
|
265
|
+
paging: {
|
|
266
|
+
page,
|
|
267
|
+
pageSize,
|
|
268
|
+
totalPages: Math.ceil(total / pageSize),
|
|
269
|
+
},
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
// For other sources (cached, computed), fetch all data and sort
|
|
273
|
+
const allData = await source.fetch(sourceTotal, 0);
|
|
274
|
+
const sortedData = allData.sort(orderBy);
|
|
275
|
+
const pageData = sortedData.slice(offset, offset + pageSize);
|
|
276
|
+
|
|
277
|
+
return {
|
|
278
|
+
total,
|
|
279
|
+
data: pageData,
|
|
280
|
+
paging: {
|
|
281
|
+
page,
|
|
282
|
+
pageSize,
|
|
283
|
+
totalPages: Math.ceil(total / pageSize),
|
|
284
|
+
},
|
|
285
|
+
};
|
|
286
|
+
} catch (error) {
|
|
287
|
+
logger.error('Failed to fetch data from single source', { error });
|
|
288
|
+
return {
|
|
289
|
+
total: 0,
|
|
290
|
+
data: [],
|
|
291
|
+
paging: {
|
|
292
|
+
page,
|
|
293
|
+
pageSize,
|
|
294
|
+
totalPages: 0,
|
|
295
|
+
},
|
|
296
|
+
};
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
// Calculate optimal fetch strategy for each source
|
|
301
|
+
const fetchStrategies = calculateFetchStrategy(sources, { page, pageSize }, totalCounts);
|
|
302
|
+
|
|
303
|
+
// Fetch data from all sources
|
|
304
|
+
const dataArrays = await Promise.all(
|
|
305
|
+
sources.map(async (source, index) => {
|
|
306
|
+
try {
|
|
307
|
+
const strategy = fetchStrategies[index];
|
|
308
|
+
if (!strategy) {
|
|
309
|
+
logger.warn('No fetch strategy found for source', { sourceIndex: index });
|
|
310
|
+
return [];
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
const data = await source.fetch(strategy.fetchLimit, strategy.fetchOffset);
|
|
314
|
+
|
|
315
|
+
// Only sort non-database sources (database sources are assumed to be pre-sorted)
|
|
316
|
+
if (source.meta?.type === 'database') {
|
|
317
|
+
// Database sources are assumed to handle sorting at the database level
|
|
318
|
+
return data;
|
|
319
|
+
}
|
|
320
|
+
// Non-database sources need to be sorted in memory
|
|
321
|
+
return data.sort(orderBy);
|
|
322
|
+
} catch (error) {
|
|
323
|
+
logger.error('Failed to fetch data from source', { error, sourceIndex: index });
|
|
324
|
+
return [];
|
|
325
|
+
}
|
|
326
|
+
})
|
|
327
|
+
);
|
|
328
|
+
|
|
329
|
+
// Merge all sorted arrays efficiently
|
|
330
|
+
const paged = mergeSortedArrays(dataArrays, orderBy, offset, pageSize);
|
|
331
|
+
|
|
332
|
+
return {
|
|
333
|
+
total,
|
|
334
|
+
data: paged,
|
|
335
|
+
paging: {
|
|
336
|
+
page,
|
|
337
|
+
pageSize,
|
|
338
|
+
totalPages: Math.ceil(total / pageSize),
|
|
339
|
+
},
|
|
340
|
+
};
|
|
341
|
+
} catch (error) {
|
|
342
|
+
logger.error('Failed to merge paginate data', { error, options });
|
|
343
|
+
return {
|
|
344
|
+
total: 0,
|
|
345
|
+
data: [],
|
|
346
|
+
paging: {
|
|
347
|
+
page,
|
|
348
|
+
pageSize,
|
|
349
|
+
totalPages: 0,
|
|
350
|
+
},
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
/**
|
|
356
|
+
* Creates a time-based sorting function with optional secondary sort
|
|
357
|
+
* Extended to support custom secondary field and maintains backward compatibility
|
|
358
|
+
*
|
|
359
|
+
* @param order - Sort direction ('asc' or 'desc')
|
|
360
|
+
* @param secondaryField - Secondary sort field (defaults to 'id')
|
|
361
|
+
* @returns Sorting function that compares timestamps and secondary field
|
|
362
|
+
*/
|
|
363
|
+
export function defaultTimeOrderBy(
|
|
364
|
+
order: 'asc' | 'desc' = 'desc', // Keep original default
|
|
365
|
+
secondaryField: string = 'id'
|
|
366
|
+
): (a: any, b: any) => number {
|
|
367
|
+
return (a, b) => {
|
|
368
|
+
const dateA = new Date(a.created_at).getTime();
|
|
369
|
+
const dateB = new Date(b.created_at).getTime();
|
|
370
|
+
|
|
371
|
+
// First compare by timestamp
|
|
372
|
+
if (dateA !== dateB) {
|
|
373
|
+
return order === 'asc' ? dateA - dateB : dateB - dateA;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// If timestamps are equal, compare by secondary field
|
|
377
|
+
const aValue = a[secondaryField];
|
|
378
|
+
const bValue = b[secondaryField];
|
|
379
|
+
|
|
380
|
+
// If secondary field is undefined or not comparable, use id as fallback
|
|
381
|
+
if (aValue === undefined || bValue === undefined || typeof aValue !== typeof bValue) {
|
|
382
|
+
// Use ID as secondary sort key when timestamps are equal - original behavior
|
|
383
|
+
if (a.id && b.id) {
|
|
384
|
+
return order === 'asc' ? a.id.localeCompare(b.id) : b.id.localeCompare(a.id);
|
|
385
|
+
}
|
|
386
|
+
return 0;
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
// Compare based on field type
|
|
390
|
+
if (typeof aValue === 'string') {
|
|
391
|
+
return order === 'asc' ? aValue.localeCompare(bValue) : bValue.localeCompare(aValue);
|
|
392
|
+
}
|
|
393
|
+
if (typeof aValue === 'number') {
|
|
394
|
+
return order === 'asc' ? aValue - bValue : bValue - aValue;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
// For unsupported types, fallback to id comparison
|
|
398
|
+
if (a.id && b.id) {
|
|
399
|
+
return order === 'asc' ? a.id.localeCompare(b.id) : b.id.localeCompare(a.id);
|
|
400
|
+
}
|
|
401
|
+
return 0;
|
|
402
|
+
};
|
|
403
|
+
}
|