claude-autopm 1.28.0 → 1.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -15
- package/autopm/.claude/scripts/pm/analytics.js +425 -0
- package/autopm/.claude/scripts/pm/sync-batch.js +337 -0
- package/lib/README-FILTER-SEARCH.md +285 -0
- package/lib/analytics-engine.js +689 -0
- package/lib/batch-processor-integration.js +366 -0
- package/lib/batch-processor.js +278 -0
- package/lib/burndown-chart.js +415 -0
- package/lib/dependency-analyzer.js +466 -0
- package/lib/filter-engine.js +414 -0
- package/lib/query-parser.js +322 -0
- package/package.json +5 -4
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Processor Integration Examples
|
|
3
|
+
*
|
|
4
|
+
* This file demonstrates how to integrate BatchProcessor with existing
|
|
5
|
+
* GitHub sync operations from pm-sync-upload-local.js
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* const { batchSyncPRDs, batchSyncAll } = require('./lib/batch-processor-integration');
|
|
9
|
+
*
|
|
10
|
+
* // Batch sync all PRDs
|
|
11
|
+
* const results = await batchSyncPRDs({
|
|
12
|
+
* basePath: '.claude',
|
|
13
|
+
* owner: 'user',
|
|
14
|
+
* repo: 'repository',
|
|
15
|
+
* octokit,
|
|
16
|
+
* dryRun: false
|
|
17
|
+
* });
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
const fs = require('fs').promises;
|
|
21
|
+
const path = require('path');
|
|
22
|
+
const BatchProcessor = require('./batch-processor');
|
|
23
|
+
const {
|
|
24
|
+
syncPRDToGitHub,
|
|
25
|
+
syncEpicToGitHub,
|
|
26
|
+
syncTaskToGitHub,
|
|
27
|
+
loadSyncMap,
|
|
28
|
+
saveSyncMap
|
|
29
|
+
} = require('../autopm/.claude/scripts/pm-sync-upload-local');
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Batch sync PRDs to GitHub
|
|
33
|
+
*
|
|
34
|
+
* @param {Object} options
|
|
35
|
+
* @param {string} options.basePath - Base path containing PRDs
|
|
36
|
+
* @param {string} options.owner - GitHub repo owner
|
|
37
|
+
* @param {string} options.repo - GitHub repo name
|
|
38
|
+
* @param {Object} options.octokit - Octokit instance
|
|
39
|
+
* @param {boolean} options.dryRun - Dry run mode
|
|
40
|
+
* @param {number} options.maxConcurrent - Max concurrent uploads (default: 10)
|
|
41
|
+
* @param {Function} options.onProgress - Progress callback
|
|
42
|
+
* @returns {Promise<Object>} Results summary
|
|
43
|
+
*/
|
|
44
|
+
async function batchSyncPRDs({
|
|
45
|
+
basePath,
|
|
46
|
+
owner,
|
|
47
|
+
repo,
|
|
48
|
+
octokit,
|
|
49
|
+
dryRun = false,
|
|
50
|
+
maxConcurrent = 10,
|
|
51
|
+
onProgress = null
|
|
52
|
+
}) {
|
|
53
|
+
const syncMapPath = path.join(basePath, 'sync-map.json');
|
|
54
|
+
const syncMap = await loadSyncMap(syncMapPath);
|
|
55
|
+
|
|
56
|
+
// Load all PRD files
|
|
57
|
+
const prdDir = path.join(basePath, 'prds');
|
|
58
|
+
let prdFiles = [];
|
|
59
|
+
try {
|
|
60
|
+
const files = await fs.readdir(prdDir);
|
|
61
|
+
prdFiles = files
|
|
62
|
+
.filter(f => f.endsWith('.md'))
|
|
63
|
+
.map(f => ({
|
|
64
|
+
path: path.join(prdDir, f),
|
|
65
|
+
id: f.replace('.md', ''),
|
|
66
|
+
type: 'prd'
|
|
67
|
+
}));
|
|
68
|
+
} catch (err) {
|
|
69
|
+
console.error(`Error reading PRD directory: ${err.message}`);
|
|
70
|
+
return {
|
|
71
|
+
total: 0,
|
|
72
|
+
succeeded: 0,
|
|
73
|
+
failed: 0,
|
|
74
|
+
duration: 0,
|
|
75
|
+
errors: []
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Create batch processor
|
|
80
|
+
const processor = new BatchProcessor({ maxConcurrent });
|
|
81
|
+
|
|
82
|
+
// Create wrapper function for syncPRDToGitHub
|
|
83
|
+
const syncFn = async (item, repo, octokit, syncMap, dryRun) => {
|
|
84
|
+
return await syncPRDToGitHub(item.path, repo, octokit, syncMap, dryRun);
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
// Batch upload
|
|
88
|
+
const results = await processor.batchUpload({
|
|
89
|
+
items: prdFiles,
|
|
90
|
+
syncFn,
|
|
91
|
+
repo: { owner, repo },
|
|
92
|
+
octokit,
|
|
93
|
+
syncMap,
|
|
94
|
+
dryRun,
|
|
95
|
+
onProgress
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
// Save sync map if not dry run
|
|
99
|
+
if (!dryRun && results.succeeded > 0) {
|
|
100
|
+
await saveSyncMap(syncMapPath, syncMap);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return results;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Batch sync Epics to GitHub
|
|
108
|
+
*
|
|
109
|
+
* @param {Object} options - Same as batchSyncPRDs
|
|
110
|
+
* @returns {Promise<Object>} Results summary
|
|
111
|
+
*/
|
|
112
|
+
async function batchSyncEpics({
|
|
113
|
+
basePath,
|
|
114
|
+
owner,
|
|
115
|
+
repo,
|
|
116
|
+
octokit,
|
|
117
|
+
dryRun = false,
|
|
118
|
+
maxConcurrent = 10,
|
|
119
|
+
onProgress = null
|
|
120
|
+
}) {
|
|
121
|
+
const syncMapPath = path.join(basePath, 'sync-map.json');
|
|
122
|
+
const syncMap = await loadSyncMap(syncMapPath);
|
|
123
|
+
|
|
124
|
+
// Load all Epic files
|
|
125
|
+
const epicDir = path.join(basePath, 'epics');
|
|
126
|
+
let epicFiles = [];
|
|
127
|
+
try {
|
|
128
|
+
const dirs = await fs.readdir(epicDir);
|
|
129
|
+
for (const dir of dirs) {
|
|
130
|
+
const epicPath = path.join(epicDir, dir, 'epic.md');
|
|
131
|
+
try {
|
|
132
|
+
await fs.access(epicPath);
|
|
133
|
+
epicFiles.push({
|
|
134
|
+
path: epicPath,
|
|
135
|
+
id: dir,
|
|
136
|
+
type: 'epic'
|
|
137
|
+
});
|
|
138
|
+
} catch (err) {
|
|
139
|
+
// Skip if epic.md doesn't exist
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
} catch (err) {
|
|
143
|
+
console.error(`Error reading Epic directory: ${err.message}`);
|
|
144
|
+
return {
|
|
145
|
+
total: 0,
|
|
146
|
+
succeeded: 0,
|
|
147
|
+
failed: 0,
|
|
148
|
+
duration: 0,
|
|
149
|
+
errors: []
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Create batch processor
|
|
154
|
+
const processor = new BatchProcessor({ maxConcurrent });
|
|
155
|
+
|
|
156
|
+
// Create wrapper function for syncEpicToGitHub
|
|
157
|
+
const syncFn = async (item, repo, octokit, syncMap, dryRun) => {
|
|
158
|
+
return await syncEpicToGitHub(item.path, repo, octokit, syncMap, dryRun);
|
|
159
|
+
};
|
|
160
|
+
|
|
161
|
+
// Batch upload
|
|
162
|
+
const results = await processor.batchUpload({
|
|
163
|
+
items: epicFiles,
|
|
164
|
+
syncFn,
|
|
165
|
+
repo: { owner, repo },
|
|
166
|
+
octokit,
|
|
167
|
+
syncMap,
|
|
168
|
+
dryRun,
|
|
169
|
+
onProgress
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
// Save sync map if not dry run
|
|
173
|
+
if (!dryRun && results.succeeded > 0) {
|
|
174
|
+
await saveSyncMap(syncMapPath, syncMap);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
return results;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Batch sync Tasks to GitHub
|
|
182
|
+
*
|
|
183
|
+
* @param {Object} options - Same as batchSyncPRDs
|
|
184
|
+
* @returns {Promise<Object>} Results summary
|
|
185
|
+
*/
|
|
186
|
+
async function batchSyncTasks({
|
|
187
|
+
basePath,
|
|
188
|
+
owner,
|
|
189
|
+
repo,
|
|
190
|
+
octokit,
|
|
191
|
+
dryRun = false,
|
|
192
|
+
maxConcurrent = 10,
|
|
193
|
+
onProgress = null
|
|
194
|
+
}) {
|
|
195
|
+
const syncMapPath = path.join(basePath, 'sync-map.json');
|
|
196
|
+
const syncMap = await loadSyncMap(syncMapPath);
|
|
197
|
+
|
|
198
|
+
// Load all Task files
|
|
199
|
+
const epicDir = path.join(basePath, 'epics');
|
|
200
|
+
let taskFiles = [];
|
|
201
|
+
try {
|
|
202
|
+
const dirs = await fs.readdir(epicDir);
|
|
203
|
+
for (const dir of dirs) {
|
|
204
|
+
const tasksDir = path.join(epicDir, dir);
|
|
205
|
+
try {
|
|
206
|
+
const files = await fs.readdir(tasksDir);
|
|
207
|
+
for (const file of files) {
|
|
208
|
+
if (file.startsWith('task-') && file.endsWith('.md')) {
|
|
209
|
+
taskFiles.push({
|
|
210
|
+
path: path.join(tasksDir, file),
|
|
211
|
+
id: `${dir}-${file.replace('.md', '')}`,
|
|
212
|
+
type: 'task'
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
} catch (err) {
|
|
217
|
+
// Skip if directory can't be read
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
} catch (err) {
|
|
221
|
+
console.error(`Error reading Task directories: ${err.message}`);
|
|
222
|
+
return {
|
|
223
|
+
total: 0,
|
|
224
|
+
succeeded: 0,
|
|
225
|
+
failed: 0,
|
|
226
|
+
duration: 0,
|
|
227
|
+
errors: []
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Create batch processor
|
|
232
|
+
const processor = new BatchProcessor({ maxConcurrent });
|
|
233
|
+
|
|
234
|
+
// Create wrapper function for syncTaskToGitHub
|
|
235
|
+
const syncFn = async (item, repo, octokit, syncMap, dryRun) => {
|
|
236
|
+
return await syncTaskToGitHub(item.path, repo, octokit, syncMap, dryRun);
|
|
237
|
+
};
|
|
238
|
+
|
|
239
|
+
// Batch upload
|
|
240
|
+
const results = await processor.batchUpload({
|
|
241
|
+
items: taskFiles,
|
|
242
|
+
syncFn,
|
|
243
|
+
repo: { owner, repo },
|
|
244
|
+
octokit,
|
|
245
|
+
syncMap,
|
|
246
|
+
dryRun,
|
|
247
|
+
onProgress
|
|
248
|
+
});
|
|
249
|
+
|
|
250
|
+
// Save sync map if not dry run
|
|
251
|
+
if (!dryRun && results.succeeded > 0) {
|
|
252
|
+
await saveSyncMap(syncMapPath, syncMap);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
return results;
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
/**
|
|
259
|
+
* Batch sync all items (PRDs, Epics, Tasks) to GitHub
|
|
260
|
+
*
|
|
261
|
+
* @param {Object} options - Sync options
|
|
262
|
+
* @returns {Promise<Object>} Combined results summary
|
|
263
|
+
*/
|
|
264
|
+
async function batchSyncAll({
|
|
265
|
+
basePath,
|
|
266
|
+
owner,
|
|
267
|
+
repo,
|
|
268
|
+
octokit,
|
|
269
|
+
dryRun = false,
|
|
270
|
+
maxConcurrent = 10,
|
|
271
|
+
onProgress = null
|
|
272
|
+
}) {
|
|
273
|
+
const startTime = Date.now();
|
|
274
|
+
const combinedResults = {
|
|
275
|
+
prds: null,
|
|
276
|
+
epics: null,
|
|
277
|
+
tasks: null,
|
|
278
|
+
total: 0,
|
|
279
|
+
succeeded: 0,
|
|
280
|
+
failed: 0,
|
|
281
|
+
duration: 0,
|
|
282
|
+
errors: []
|
|
283
|
+
};
|
|
284
|
+
|
|
285
|
+
console.log('🚀 Starting batch sync to GitHub...\n');
|
|
286
|
+
|
|
287
|
+
// Sync PRDs
|
|
288
|
+
console.log('📋 Syncing PRDs...');
|
|
289
|
+
const prdResults = await batchSyncPRDs({
|
|
290
|
+
basePath,
|
|
291
|
+
owner,
|
|
292
|
+
repo,
|
|
293
|
+
octokit,
|
|
294
|
+
dryRun,
|
|
295
|
+
maxConcurrent,
|
|
296
|
+
onProgress: onProgress
|
|
297
|
+
? (current, total, item) => onProgress('PRD', current, total, item)
|
|
298
|
+
: null
|
|
299
|
+
});
|
|
300
|
+
combinedResults.prds = prdResults;
|
|
301
|
+
console.log(`✅ PRDs: ${prdResults.succeeded}/${prdResults.total} succeeded in ${prdResults.duration}ms\n`);
|
|
302
|
+
|
|
303
|
+
// Sync Epics
|
|
304
|
+
console.log('📦 Syncing Epics...');
|
|
305
|
+
const epicResults = await batchSyncEpics({
|
|
306
|
+
basePath,
|
|
307
|
+
owner,
|
|
308
|
+
repo,
|
|
309
|
+
octokit,
|
|
310
|
+
dryRun,
|
|
311
|
+
maxConcurrent,
|
|
312
|
+
onProgress: onProgress
|
|
313
|
+
? (current, total, item) => onProgress('Epic', current, total, item)
|
|
314
|
+
: null
|
|
315
|
+
});
|
|
316
|
+
combinedResults.epics = epicResults;
|
|
317
|
+
console.log(`✅ Epics: ${epicResults.succeeded}/${epicResults.total} succeeded in ${epicResults.duration}ms\n`);
|
|
318
|
+
|
|
319
|
+
// Sync Tasks
|
|
320
|
+
console.log('✓ Syncing Tasks...');
|
|
321
|
+
const taskResults = await batchSyncTasks({
|
|
322
|
+
basePath,
|
|
323
|
+
owner,
|
|
324
|
+
repo,
|
|
325
|
+
octokit,
|
|
326
|
+
dryRun,
|
|
327
|
+
maxConcurrent,
|
|
328
|
+
onProgress: onProgress
|
|
329
|
+
? (current, total, item) => onProgress('Task', current, total, item)
|
|
330
|
+
: null
|
|
331
|
+
});
|
|
332
|
+
combinedResults.tasks = taskResults;
|
|
333
|
+
console.log(`✅ Tasks: ${taskResults.succeeded}/${taskResults.total} succeeded in ${taskResults.duration}ms\n`);
|
|
334
|
+
|
|
335
|
+
// Combine results
|
|
336
|
+
combinedResults.total = prdResults.total + epicResults.total + taskResults.total;
|
|
337
|
+
combinedResults.succeeded = prdResults.succeeded + epicResults.succeeded + taskResults.succeeded;
|
|
338
|
+
combinedResults.failed = prdResults.failed + epicResults.failed + taskResults.failed;
|
|
339
|
+
combinedResults.duration = Date.now() - startTime;
|
|
340
|
+
combinedResults.errors = [
|
|
341
|
+
...prdResults.errors,
|
|
342
|
+
...epicResults.errors,
|
|
343
|
+
...taskResults.errors
|
|
344
|
+
];
|
|
345
|
+
|
|
346
|
+
console.log('🎉 Batch sync complete!');
|
|
347
|
+
console.log(` Total: ${combinedResults.succeeded}/${combinedResults.total} items synced`);
|
|
348
|
+
console.log(` Duration: ${(combinedResults.duration / 1000).toFixed(2)}s`);
|
|
349
|
+
|
|
350
|
+
if (combinedResults.failed > 0) {
|
|
351
|
+
console.log(` ⚠️ Failures: ${combinedResults.failed}`);
|
|
352
|
+
console.log('\nErrors:');
|
|
353
|
+
combinedResults.errors.forEach(err => {
|
|
354
|
+
console.log(` - ${err.item.path}: ${err.error}`);
|
|
355
|
+
});
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
return combinedResults;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
module.exports = {
|
|
362
|
+
batchSyncPRDs,
|
|
363
|
+
batchSyncEpics,
|
|
364
|
+
batchSyncTasks,
|
|
365
|
+
batchSyncAll
|
|
366
|
+
};
|
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Processor for GitHub Sync Operations
|
|
3
|
+
*
|
|
4
|
+
* Handles batch uploading of PRDs, Epics, and Tasks to GitHub with:
|
|
5
|
+
* - Parallel processing with concurrency control
|
|
6
|
+
* - Rate limiting and exponential backoff
|
|
7
|
+
* - Progress tracking
|
|
8
|
+
* - Error recovery
|
|
9
|
+
* - Dry run mode
|
|
10
|
+
*
|
|
11
|
+
* @example
|
|
12
|
+
* const BatchProcessor = require('./lib/batch-processor');
|
|
13
|
+
*
|
|
14
|
+
* const processor = new BatchProcessor({
|
|
15
|
+
* maxConcurrent: 10,
|
|
16
|
+
* rateLimit: {
|
|
17
|
+
* requestsPerHour: 5000,
|
|
18
|
+
* retryDelay: 1000,
|
|
19
|
+
* maxRetries: 3
|
|
20
|
+
* }
|
|
21
|
+
* });
|
|
22
|
+
*
|
|
23
|
+
* const results = await processor.batchUpload({
|
|
24
|
+
* items: ['.claude/prds/*.md'],
|
|
25
|
+
* syncFn: syncPRDToGitHub,
|
|
26
|
+
* repo: { owner, repo },
|
|
27
|
+
* octokit,
|
|
28
|
+
* syncMap,
|
|
29
|
+
* dryRun: false,
|
|
30
|
+
* onProgress: (current, total, item) => {
|
|
31
|
+
* console.log(`[${current}/${total}] ${item.path}`);
|
|
32
|
+
* }
|
|
33
|
+
* });
|
|
34
|
+
*/
|
|
35
|
+
|
|
36
|
+
class BatchProcessor {
|
|
37
|
+
/**
|
|
38
|
+
* Create a new BatchProcessor instance
|
|
39
|
+
*
|
|
40
|
+
* @param {Object} options - Configuration options
|
|
41
|
+
* @param {number} options.maxConcurrent - Maximum concurrent uploads (default: 10)
|
|
42
|
+
* @param {Object} options.rateLimit - Rate limiting configuration
|
|
43
|
+
* @param {number} options.rateLimit.requestsPerHour - GitHub API rate limit (default: 5000)
|
|
44
|
+
* @param {number} options.rateLimit.retryDelay - Initial retry delay in ms (default: 1000)
|
|
45
|
+
* @param {number} options.rateLimit.maxRetries - Maximum retry attempts (default: 3)
|
|
46
|
+
* @param {number} options.rateLimit.threshold - Remaining requests threshold to trigger wait (default: 10)
|
|
47
|
+
*/
|
|
48
|
+
constructor(options = {}) {
|
|
49
|
+
// Validate configuration
|
|
50
|
+
if (options.maxConcurrent !== undefined) {
|
|
51
|
+
if (typeof options.maxConcurrent !== 'number' || options.maxConcurrent <= 0) {
|
|
52
|
+
throw new Error('maxConcurrent must be a positive number');
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
this.maxConcurrent = options.maxConcurrent || 10;
|
|
57
|
+
this.rateLimit = {
|
|
58
|
+
requestsPerHour: options.rateLimit?.requestsPerHour || 5000,
|
|
59
|
+
retryDelay: options.rateLimit?.retryDelay || 1000,
|
|
60
|
+
maxRetries: options.rateLimit?.maxRetries || 3,
|
|
61
|
+
threshold: options.rateLimit?.threshold || 10,
|
|
62
|
+
remaining: options.rateLimit?.requestsPerHour || 5000,
|
|
63
|
+
resetTime: Math.floor(Date.now() / 1000) + 3600
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Batch upload items to GitHub
|
|
69
|
+
*
|
|
70
|
+
* @param {Object} params - Upload parameters
|
|
71
|
+
* @param {Array} params.items - Items to upload
|
|
72
|
+
* @param {Function} params.syncFn - Sync function to call for each item
|
|
73
|
+
* @param {Object} params.repo - Repository info {owner, repo}
|
|
74
|
+
* @param {Object} params.octokit - Octokit instance
|
|
75
|
+
* @param {Object} params.syncMap - Sync mapping object
|
|
76
|
+
* @param {boolean} params.dryRun - Dry run mode
|
|
77
|
+
* @param {Function} params.onProgress - Progress callback (current, total, item)
|
|
78
|
+
* @returns {Promise<Object>} Results summary
|
|
79
|
+
*/
|
|
80
|
+
async batchUpload({
|
|
81
|
+
items,
|
|
82
|
+
syncFn,
|
|
83
|
+
repo,
|
|
84
|
+
octokit,
|
|
85
|
+
syncMap,
|
|
86
|
+
dryRun = false,
|
|
87
|
+
onProgress = null
|
|
88
|
+
}) {
|
|
89
|
+
const startTime = Date.now();
|
|
90
|
+
const results = {
|
|
91
|
+
total: items.length,
|
|
92
|
+
succeeded: 0,
|
|
93
|
+
failed: 0,
|
|
94
|
+
duration: 0,
|
|
95
|
+
errors: [],
|
|
96
|
+
rateLimit: {
|
|
97
|
+
remaining: this.rateLimit.remaining,
|
|
98
|
+
reset: this.rateLimit.resetTime
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
// Handle empty item list
|
|
103
|
+
if (items.length === 0) {
|
|
104
|
+
results.duration = Date.now() - startTime;
|
|
105
|
+
return results;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Process items with concurrency control
|
|
109
|
+
const processingQueue = [];
|
|
110
|
+
let currentIndex = 0;
|
|
111
|
+
let completedCount = 0;
|
|
112
|
+
|
|
113
|
+
const processNextItem = async () => {
|
|
114
|
+
if (currentIndex >= items.length) {
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const item = items[currentIndex];
|
|
119
|
+
currentIndex++;
|
|
120
|
+
|
|
121
|
+
try {
|
|
122
|
+
// Check rate limit before processing
|
|
123
|
+
if (this.shouldWaitForRateLimit()) {
|
|
124
|
+
await this.waitForRateLimit();
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Execute sync with retry logic
|
|
128
|
+
await this.executeWithRetry(
|
|
129
|
+
syncFn,
|
|
130
|
+
item,
|
|
131
|
+
repo,
|
|
132
|
+
octokit,
|
|
133
|
+
syncMap,
|
|
134
|
+
dryRun
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
results.succeeded++;
|
|
138
|
+
} catch (error) {
|
|
139
|
+
results.failed++;
|
|
140
|
+
results.errors.push({
|
|
141
|
+
item,
|
|
142
|
+
error: error.message || String(error)
|
|
143
|
+
});
|
|
144
|
+
} finally {
|
|
145
|
+
completedCount++;
|
|
146
|
+
|
|
147
|
+
// Update progress
|
|
148
|
+
if (onProgress) {
|
|
149
|
+
onProgress(completedCount, items.length, item);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Process next item
|
|
153
|
+
await processNextItem();
|
|
154
|
+
}
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
// Start initial batch of workers
|
|
158
|
+
const workerCount = Math.min(this.maxConcurrent, items.length);
|
|
159
|
+
for (let i = 0; i < workerCount; i++) {
|
|
160
|
+
processingQueue.push(processNextItem());
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Wait for all workers to complete
|
|
164
|
+
await Promise.all(processingQueue);
|
|
165
|
+
|
|
166
|
+
// Calculate final duration
|
|
167
|
+
results.duration = Date.now() - startTime;
|
|
168
|
+
|
|
169
|
+
// Update final rate limit info
|
|
170
|
+
results.rateLimit = {
|
|
171
|
+
remaining: this.rateLimit.remaining,
|
|
172
|
+
reset: this.rateLimit.resetTime
|
|
173
|
+
};
|
|
174
|
+
|
|
175
|
+
return results;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Execute sync function with retry logic for rate limit errors
|
|
180
|
+
*
|
|
181
|
+
* @param {Function} syncFn - Sync function to execute
|
|
182
|
+
* @param {Object} item - Item to sync
|
|
183
|
+
* @param {Object} repo - Repository info
|
|
184
|
+
* @param {Object} octokit - Octokit instance
|
|
185
|
+
* @param {Object} syncMap - Sync mapping
|
|
186
|
+
* @param {boolean} dryRun - Dry run mode
|
|
187
|
+
* @returns {Promise<Object>} Sync result
|
|
188
|
+
*/
|
|
189
|
+
async executeWithRetry(syncFn, item, repo, octokit, syncMap, dryRun) {
|
|
190
|
+
let lastError = null;
|
|
191
|
+
|
|
192
|
+
for (let attempt = 1; attempt <= this.rateLimit.maxRetries; attempt++) {
|
|
193
|
+
try {
|
|
194
|
+
const result = await syncFn(item, repo, octokit, syncMap, dryRun);
|
|
195
|
+
return result;
|
|
196
|
+
} catch (error) {
|
|
197
|
+
lastError = error;
|
|
198
|
+
|
|
199
|
+
// Check if it's a rate limit error
|
|
200
|
+
if (error.status === 429) {
|
|
201
|
+
// Update rate limit from error response if available
|
|
202
|
+
if (error.response?.headers) {
|
|
203
|
+
this.updateRateLimit(error.response.headers);
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Calculate backoff delay for this attempt
|
|
207
|
+
const backoffDelay = this.calculateBackoffDelay(attempt);
|
|
208
|
+
|
|
209
|
+
// Wait before retrying
|
|
210
|
+
await new Promise(resolve => setTimeout(resolve, backoffDelay));
|
|
211
|
+
|
|
212
|
+
// Continue to next retry attempt
|
|
213
|
+
continue;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// For non-rate-limit errors, throw immediately
|
|
217
|
+
throw error;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// If we exhausted all retries, throw the last error
|
|
222
|
+
throw lastError;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
/**
|
|
226
|
+
* Calculate exponential backoff delay
|
|
227
|
+
*
|
|
228
|
+
* @param {number} attempt - Current attempt number (1-based)
|
|
229
|
+
* @returns {number} Delay in milliseconds
|
|
230
|
+
*/
|
|
231
|
+
calculateBackoffDelay(attempt) {
|
|
232
|
+
// Exponential backoff: delay * (2 ^ (attempt - 1))
|
|
233
|
+
return this.rateLimit.retryDelay * Math.pow(2, attempt - 1);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
/**
|
|
237
|
+
* Update rate limit information from response headers
|
|
238
|
+
*
|
|
239
|
+
* @param {Object} headers - Response headers
|
|
240
|
+
*/
|
|
241
|
+
updateRateLimit(headers) {
|
|
242
|
+
if (headers['x-ratelimit-remaining']) {
|
|
243
|
+
this.rateLimit.remaining = parseInt(headers['x-ratelimit-remaining'], 10);
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
if (headers['x-ratelimit-reset']) {
|
|
247
|
+
this.rateLimit.resetTime = parseInt(headers['x-ratelimit-reset'], 10);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
/**
|
|
252
|
+
* Check if we should wait for rate limit to reset
|
|
253
|
+
*
|
|
254
|
+
* @returns {boolean} True if should wait
|
|
255
|
+
*/
|
|
256
|
+
shouldWaitForRateLimit() {
|
|
257
|
+
return this.rateLimit.remaining <= this.rateLimit.threshold;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
/**
|
|
261
|
+
* Wait for rate limit to reset
|
|
262
|
+
*
|
|
263
|
+
* @returns {Promise<void>}
|
|
264
|
+
*/
|
|
265
|
+
async waitForRateLimit() {
|
|
266
|
+
const now = Math.floor(Date.now() / 1000);
|
|
267
|
+
const waitSeconds = Math.max(0, this.rateLimit.resetTime - now);
|
|
268
|
+
|
|
269
|
+
if (waitSeconds > 0) {
|
|
270
|
+
await new Promise(resolve => setTimeout(resolve, waitSeconds * 1000));
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// Reset to full limit after waiting
|
|
274
|
+
this.rateLimit.remaining = this.rateLimit.requestsPerHour;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
module.exports = BatchProcessor;
|