@aikotools/repo-maintenance 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/README.md +227 -0
  2. package/bin/loader.js +10 -0
  3. package/bin/repohub.js +5 -0
  4. package/dist/client/assets/index-Bh_JYZxI.js +15 -0
  5. package/dist/client/assets/index-D-CxJxP4.css +1 -0
  6. package/dist/client/index.html +13 -0
  7. package/dist/server/server/index.js +118 -0
  8. package/dist/server/server/services/bulk-service.js +157 -0
  9. package/dist/server/server/services/cascade-service.js +474 -0
  10. package/dist/server/server/services/config-service.js +343 -0
  11. package/dist/server/server/services/dependency-resolver.js +144 -0
  12. package/dist/server/server/services/git-service.js +320 -0
  13. package/dist/server/server/services/package-service.js +152 -0
  14. package/dist/server/server/services/process.js +51 -0
  15. package/dist/server/server/services/pull-all-service.js +415 -0
  16. package/dist/server/server/services/repo-scanner.js +230 -0
  17. package/dist/server/server/services/task-queue.js +29 -0
  18. package/dist/server/server/trpc/context.js +4 -0
  19. package/dist/server/server/trpc/init.js +7 -0
  20. package/dist/server/server/trpc/procedures/bulk.js +110 -0
  21. package/dist/server/server/trpc/procedures/cascade.js +207 -0
  22. package/dist/server/server/trpc/procedures/dependencies.js +26 -0
  23. package/dist/server/server/trpc/procedures/git.js +151 -0
  24. package/dist/server/server/trpc/procedures/package.js +43 -0
  25. package/dist/server/server/trpc/procedures/project.js +181 -0
  26. package/dist/server/server/trpc/procedures/repos.js +42 -0
  27. package/dist/server/server/trpc/router.js +20 -0
  28. package/dist/server/shared/types.js +3 -0
  29. package/package.json +68 -0
@@ -0,0 +1,415 @@
1
+ /**
2
+ * Service for pulling all repos in parallel with progress tracking,
3
+ * cancellation support, "no tracking information" fallback,
4
+ * and GitHub-based clone-missing + pull-existing flow.
5
+ * Follows the BulkService fire-and-forget pattern.
6
+ */
7
+ import { existsSync, readFileSync } from 'fs';
8
+ import path from 'path';
9
+ import simpleGit from 'simple-git';
10
+ import { spawnProcess } from './process';
11
+ import { TaskQueue } from './task-queue';
12
+ export class PullAllService {
13
+ defaultConcurrency;
14
+ configService;
15
+ executions = new Map();
16
+ abortControllers = new Map();
17
+ constructor(defaultConcurrency, configService) {
18
+ this.defaultConcurrency = defaultConcurrency;
19
+ this.configService = configService;
20
+ }
21
+ /**
22
+ * Start pulling all repos in the background.
23
+ * If repoMapping is configured, fetches GitHub repos, clones missing, and pulls existing.
24
+ * Otherwise falls back to pulling only locally known repos.
25
+ * Returns execution ID for polling.
26
+ */
27
+ startPullAll(repos, config) {
28
+ const id = `pull-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
29
+ const execution = {
30
+ id,
31
+ status: 'running',
32
+ results: [],
33
+ completedCount: 0,
34
+ failedCount: 0,
35
+ withChanges: 0,
36
+ clonedCount: 0,
37
+ skippedCount: 0,
38
+ unmappedCount: 0,
39
+ total: 0,
40
+ startedAt: new Date().toISOString(),
41
+ };
42
+ this.executions.set(id, execution);
43
+ const controller = new AbortController();
44
+ this.abortControllers.set(id, controller);
45
+ // Fire-and-forget: resolve config (auto-import if needed) then execute
46
+ this.resolveConfigAndExecute(id, repos, config, controller.signal).catch((err) => {
47
+ console.error(`[PullAll] Execution ${id} failed:`, err);
48
+ const exec = this.executions.get(id);
49
+ if (exec && exec.status === 'running') {
50
+ exec.status = 'completed';
51
+ exec.completedAt = new Date().toISOString();
52
+ }
53
+ });
54
+ return id;
55
+ }
56
+ getExecution(id) {
57
+ return this.executions.get(id);
58
+ }
59
+ abort(id) {
60
+ const exec = this.executions.get(id);
61
+ if (!exec || exec.status !== 'running')
62
+ return false;
63
+ const controller = this.abortControllers.get(id);
64
+ if (controller)
65
+ controller.abort();
66
+ exec.status = 'aborted';
67
+ exec.completedAt = new Date().toISOString();
68
+ for (const result of exec.results) {
69
+ if (result.status === 'pending') {
70
+ result.status = 'aborted';
71
+ result.message = 'Aborted';
72
+ }
73
+ }
74
+ this.saveToHistory(exec);
75
+ return true;
76
+ }
77
+ // ── Config resolution: auto-import mapping if repo-maintenance.sh exists ──
78
+ async resolveConfigAndExecute(executionId, repos, config, signal) {
79
+ const exec = this.executions.get(executionId);
80
+ if (!exec)
81
+ return;
82
+ let effectiveConfig = config;
83
+ // Auto-import mapping from repo-maintenance.sh if not yet configured
84
+ const hasMapping = config.repoMapping && Object.keys(config.repoMapping).length > 0;
85
+ if (!hasMapping) {
86
+ const scriptPath = path.join(config.rootFolder, 'repo-maintenance.sh');
87
+ if (existsSync(scriptPath)) {
88
+ try {
89
+ console.log('[PullAll] Auto-importing repo mapping from repo-maintenance.sh...');
90
+ const scriptContent = readFileSync(scriptPath, 'utf-8');
91
+ const result = await this.configService.importRepoMapping(scriptContent);
92
+ console.log(`[PullAll] Imported ${Object.keys(result.mapping).length} mappings, ${result.ignore.length} ignore rules`);
93
+ // Reload config with the newly imported mapping
94
+ effectiveConfig = await this.configService.getProjectConfig();
95
+ }
96
+ catch (err) {
97
+ console.error('[PullAll] Failed to auto-import mapping:', err);
98
+ }
99
+ }
100
+ }
101
+ const hasMappingNow = effectiveConfig.repoMapping && Object.keys(effectiveConfig.repoMapping).length > 0;
102
+ if (hasMappingNow) {
103
+ await this.executeWithGitHub(executionId, repos, effectiveConfig, signal);
104
+ }
105
+ else {
106
+ // Fallback: only pull locally known repos (old behavior)
107
+ exec.results = repos.map((r) => ({
108
+ repoId: r.path,
109
+ success: false,
110
+ message: '',
111
+ changes: 0,
112
+ status: 'pending',
113
+ }));
114
+ exec.total = repos.length;
115
+ await this.executeLocalOnly(executionId, repos, effectiveConfig.defaultBranch, signal);
116
+ }
117
+ }
118
+ // ── GitHub-based flow: fetch repos, clone missing, pull existing ──
119
+ async executeWithGitHub(executionId, localRepos, config, signal) {
120
+ const exec = this.executions.get(executionId);
121
+ if (!exec)
122
+ return;
123
+ const org = config.githubOrganizations[0];
124
+ if (!org) {
125
+ exec.status = 'completed';
126
+ exec.completedAt = new Date().toISOString();
127
+ return;
128
+ }
129
+ // 1. Fetch GitHub repos
130
+ let githubRepos;
131
+ try {
132
+ githubRepos = await this.fetchGithubRepos(org);
133
+ }
134
+ catch (err) {
135
+ console.error('[PullAll] Failed to fetch GitHub repos:', err);
136
+ // Fallback to local-only pull
137
+ exec.results = localRepos.map((r) => ({
138
+ repoId: r.path,
139
+ success: false,
140
+ message: '',
141
+ changes: 0,
142
+ status: 'pending',
143
+ }));
144
+ exec.total = localRepos.length;
145
+ await this.executeLocalOnly(executionId, localRepos, config.defaultBranch, signal);
146
+ return;
147
+ }
148
+ if (signal.aborted)
149
+ return;
150
+ // 2. Build local lookup: repo directory name → Repo
151
+ const localMap = new Map();
152
+ for (const repo of localRepos) {
153
+ const dirName = path.basename(repo.absolutePath);
154
+ localMap.set(dirName, repo);
155
+ }
156
+ // 3. Build execution items from GitHub repos
157
+ const items = [];
158
+ for (const ghRepo of githubRepos) {
159
+ if (config.ignoreRepos?.includes(ghRepo)) {
160
+ items.push({ name: ghRepo, action: 'skip' });
161
+ }
162
+ else if (!config.repoMapping?.[ghRepo]) {
163
+ items.push({ name: ghRepo, action: 'unmapped' });
164
+ }
165
+ else if (localMap.has(ghRepo)) {
166
+ items.push({ name: ghRepo, action: 'pull', repo: localMap.get(ghRepo) });
167
+ }
168
+ else {
169
+ items.push({
170
+ name: ghRepo,
171
+ action: 'clone',
172
+ targetDir: config.repoMapping[ghRepo],
173
+ });
174
+ }
175
+ }
176
+ // 4. Initialize execution results
177
+ exec.results = items.map((item) => {
178
+ const repoId = item.action === 'pull'
179
+ ? item.repo.path
180
+ : item.action === 'clone'
181
+ ? `${item.targetDir}/${item.name}`
182
+ : item.name;
183
+ if (item.action === 'skip') {
184
+ exec.skippedCount++;
185
+ return {
186
+ repoId,
187
+ success: true,
188
+ message: 'Ignored',
189
+ changes: 0,
190
+ status: 'skipped',
191
+ };
192
+ }
193
+ if (item.action === 'unmapped') {
194
+ exec.unmappedCount++;
195
+ return {
196
+ repoId,
197
+ success: true,
198
+ message: 'No mapping defined',
199
+ changes: 0,
200
+ status: 'unmapped',
201
+ };
202
+ }
203
+ return {
204
+ repoId,
205
+ success: false,
206
+ message: '',
207
+ changes: 0,
208
+ status: 'pending',
209
+ };
210
+ });
211
+ exec.total = items.length;
212
+ // 5. Execute clone/pull items in parallel
213
+ const actionItems = items.filter((i) => i.action === 'pull' || i.action === 'clone');
214
+ const queue = new TaskQueue(this.defaultConcurrency);
215
+ await queue.run(actionItems, async (item) => {
216
+ if (signal.aborted)
217
+ return;
218
+ const result = exec.results.find((r) => r.repoId ===
219
+ (item.action === 'pull'
220
+ ? item.repo.path
221
+ : `${item.targetDir}/${item.name}`));
222
+ if (!result)
223
+ return;
224
+ result.status = 'running';
225
+ try {
226
+ if (item.action === 'pull') {
227
+ const pullResult = await this.pullWithFallback(item.repo.absolutePath, config.defaultBranch);
228
+ result.success = pullResult.success;
229
+ result.message = pullResult.message;
230
+ result.changes = pullResult.changes;
231
+ result.status = pullResult.success ? 'completed' : 'failed';
232
+ if (pullResult.success) {
233
+ exec.completedCount++;
234
+ if (pullResult.changes > 0)
235
+ exec.withChanges++;
236
+ }
237
+ else {
238
+ exec.failedCount++;
239
+ }
240
+ }
241
+ else if (item.action === 'clone') {
242
+ const cloneResult = await this.cloneRepo(org, item.name, path.join(config.rootFolder, item.targetDir, item.name));
243
+ result.success = cloneResult.success;
244
+ result.message = cloneResult.message;
245
+ result.changes = 0;
246
+ result.status = cloneResult.success ? 'cloned' : 'failed';
247
+ if (cloneResult.success) {
248
+ exec.clonedCount++;
249
+ }
250
+ else {
251
+ exec.failedCount++;
252
+ }
253
+ }
254
+ }
255
+ catch (err) {
256
+ if (signal.aborted)
257
+ return;
258
+ result.status = 'failed';
259
+ result.success = false;
260
+ result.message = `Failed (${item.name}): ${err instanceof Error ? err.message : String(err)}`;
261
+ exec.failedCount++;
262
+ }
263
+ });
264
+ if (exec.status === 'running') {
265
+ exec.status = 'completed';
266
+ exec.completedAt = new Date().toISOString();
267
+ }
268
+ this.saveToHistory(exec);
269
+ }
270
+ // ── Fallback: pull only local repos (old behavior) ──
271
+ async executeLocalOnly(executionId, repos, defaultBranch, signal) {
272
+ const exec = this.executions.get(executionId);
273
+ if (!exec)
274
+ return;
275
+ const queue = new TaskQueue(this.defaultConcurrency);
276
+ await queue.run(repos, async (repo) => {
277
+ if (signal.aborted)
278
+ return;
279
+ const resultIndex = exec.results.findIndex((r) => r.repoId === repo.path);
280
+ if (resultIndex === -1)
281
+ return;
282
+ const result = exec.results[resultIndex];
283
+ result.status = 'running';
284
+ try {
285
+ const pullResult = await this.pullWithFallback(repo.absolutePath, defaultBranch);
286
+ result.success = pullResult.success;
287
+ result.message = pullResult.message;
288
+ result.changes = pullResult.changes;
289
+ result.status = pullResult.success ? 'completed' : 'failed';
290
+ if (pullResult.success) {
291
+ exec.completedCount++;
292
+ if (pullResult.changes > 0) {
293
+ exec.withChanges++;
294
+ }
295
+ }
296
+ else {
297
+ exec.failedCount++;
298
+ }
299
+ }
300
+ catch (err) {
301
+ if (signal.aborted)
302
+ return;
303
+ result.status = 'failed';
304
+ result.success = false;
305
+ result.message = `Pull failed (${repo.path}): ${err instanceof Error ? err.message : String(err)}`;
306
+ exec.failedCount++;
307
+ }
308
+ });
309
+ if (exec.status === 'running') {
310
+ exec.status = 'completed';
311
+ exec.completedAt = new Date().toISOString();
312
+ }
313
+ this.saveToHistory(exec);
314
+ }
315
+ // ── Helper methods ──
316
+ /**
317
+ * Fetch all non-archived repo names from a GitHub organization via `gh` CLI.
318
+ */
319
+ async fetchGithubRepos(org) {
320
+ const { promise } = spawnProcess([
321
+ 'gh', 'repo', 'list', org, '--limit', '200', '--no-archived', '--json', 'name', '--jq', '.[].name',
322
+ ]);
323
+ const result = await promise;
324
+ if (result.exitCode !== 0) {
325
+ throw new Error(`gh repo list failed (exit ${result.exitCode}): ${result.stderr}`);
326
+ }
327
+ return result.stdout
328
+ .trim()
329
+ .split('\n')
330
+ .filter(Boolean)
331
+ .sort();
332
+ }
333
+ /**
334
+ * Clone a repo from GitHub into the target path.
335
+ */
336
+ async cloneRepo(org, name, targetPath) {
337
+ // If target already exists with .git, treat as already cloned
338
+ if (existsSync(path.join(targetPath, '.git'))) {
339
+ return { success: true, message: 'Already exists locally' };
340
+ }
341
+ const { promise } = spawnProcess([
342
+ 'gh', 'repo', 'clone', `${org}/${name}`, targetPath, '--', '--quiet',
343
+ ]);
344
+ const result = await promise;
345
+ if (result.exitCode !== 0) {
346
+ return { success: false, message: `Clone failed: ${result.stderr.trim()}` };
347
+ }
348
+ return { success: true, message: 'Cloned successfully' };
349
+ }
350
+ /**
351
+ * Pull a repo. If it fails with "no tracking information", retry with
352
+ * explicit origin + defaultBranch.
353
+ */
354
+ async pullWithFallback(repoPath, defaultBranch) {
355
+ const git = simpleGit(repoPath);
356
+ try {
357
+ const result = await git.pull();
358
+ return {
359
+ success: true,
360
+ message: result.summary.changes
361
+ ? `${result.summary.changes} changes, ${result.summary.insertions} insertions, ${result.summary.deletions} deletions`
362
+ : 'Already up to date',
363
+ changes: result.summary.changes,
364
+ };
365
+ }
366
+ catch (err) {
367
+ const errMsg = err instanceof Error ? err.message : String(err);
368
+ // Fallback: try explicit origin + defaultBranch
369
+ if (errMsg.includes('no tracking information') || errMsg.includes('There is no tracking')) {
370
+ try {
371
+ const result = await git.pull('origin', defaultBranch);
372
+ return {
373
+ success: true,
374
+ message: result.summary.changes
375
+ ? `${result.summary.changes} changes (via origin/${defaultBranch})`
376
+ : `Already up to date (via origin/${defaultBranch})`,
377
+ changes: result.summary.changes,
378
+ };
379
+ }
380
+ catch (retryErr) {
381
+ return {
382
+ success: false,
383
+ message: `Pull failed: ${retryErr instanceof Error ? retryErr.message : String(retryErr)}`,
384
+ changes: 0,
385
+ };
386
+ }
387
+ }
388
+ return {
389
+ success: false,
390
+ message: `Pull failed: ${errMsg}`,
391
+ changes: 0,
392
+ };
393
+ }
394
+ }
395
+ saveToHistory(exec) {
396
+ this.configService
397
+ .savePullAllHistory({
398
+ id: exec.id,
399
+ status: exec.status === 'aborted' ? 'aborted' : 'completed',
400
+ total: exec.total,
401
+ completedCount: exec.completedCount,
402
+ failedCount: exec.failedCount,
403
+ withChanges: exec.withChanges,
404
+ clonedCount: exec.clonedCount,
405
+ skippedCount: exec.skippedCount,
406
+ unmappedCount: exec.unmappedCount,
407
+ startedAt: exec.startedAt,
408
+ completedAt: exec.completedAt,
409
+ results: exec.results,
410
+ })
411
+ .catch((err) => {
412
+ console.error('[PullAll] Failed to save history:', err);
413
+ });
414
+ }
415
+ }
@@ -0,0 +1,230 @@
1
+ /**
2
+ * Service for scanning the repo/ directory and extracting package info.
3
+ * Builds the complete list of repos with their metadata, domains, and internal dependencies.
4
+ */
5
+ import { readFile, readdir, stat } from 'fs/promises';
6
+ import path from 'path';
7
+ /** Known sub-group directories within domains */
8
+ const KNOWN_SUB_GROUPS = new Set([
9
+ 'outbound',
10
+ 'inbound',
11
+ 'validators',
12
+ 'gov-api',
13
+ 'tax',
14
+ 'export',
15
+ ]);
16
+ /** Directories to skip during scanning */
17
+ const SKIP_DIRS = new Set(['node_modules', 'dist', '.git', 'coverage', '.repoMaintenance']);
18
+ export class RepoScanner {
19
+ rootFolder;
20
+ npmOrgs;
21
+ constructor(rootFolder, npmOrgs) {
22
+ this.rootFolder = rootFolder;
23
+ this.npmOrgs = npmOrgs;
24
+ }
25
+ async scan(domainOverrides) {
26
+ const repoDir = this.rootFolder;
27
+ const repos = [];
28
+ // Scan all domain directories under repo/
29
+ const domainDirs = await this.listDirs(repoDir);
30
+ for (const domainName of domainDirs) {
31
+ const domainPath = path.join(repoDir, domainName);
32
+ await this.scanDomain(domainPath, domainName, repos);
33
+ }
34
+ // Apply domain overrides (repoId → domain)
35
+ if (domainOverrides) {
36
+ for (const repo of repos) {
37
+ const override = domainOverrides[repo.id];
38
+ if (override) {
39
+ repo.domain = override;
40
+ }
41
+ }
42
+ }
43
+ // Build npm name -> repo ID lookup
44
+ const npmToRepo = new Map();
45
+ for (const repo of repos) {
46
+ npmToRepo.set(repo.npmPackage, repo.id);
47
+ }
48
+ // Resolve dependency repo IDs
49
+ for (const repo of repos) {
50
+ for (const dep of repo.dependencies) {
51
+ const repoId = npmToRepo.get(dep.npmName);
52
+ if (repoId) {
53
+ dep.repoId = repoId;
54
+ }
55
+ }
56
+ // Filter out deps that don't map to a known repo
57
+ repo.dependencies = repo.dependencies.filter((d) => d.repoId);
58
+ }
59
+ // Compute dependents (reverse lookup)
60
+ for (const repo of repos) {
61
+ for (const dep of repo.dependencies) {
62
+ const depRepo = repos.find((r) => r.id === dep.repoId);
63
+ if (depRepo && !depRepo.dependents.includes(repo.id)) {
64
+ depRepo.dependents.push(repo.id);
65
+ }
66
+ }
67
+ }
68
+ // Build domain structure
69
+ const domains = this.buildDomains(repos);
70
+ return { repos, domains };
71
+ }
72
+ async scanDomain(domainPath, domainName, repos) {
73
+ const entries = await this.listDirs(domainPath);
74
+ for (const entry of entries) {
75
+ const entryPath = path.join(domainPath, entry);
76
+ if (KNOWN_SUB_GROUPS.has(entry)) {
77
+ // This is a sub-group (e.g. invoice/outbound/) - scan its children
78
+ const subEntries = await this.listDirs(entryPath);
79
+ for (const subEntry of subEntries) {
80
+ const subPath = path.join(entryPath, subEntry);
81
+ const repo = await this.tryParseRepo(subPath, domainName, entry);
82
+ if (repo)
83
+ repos.push(repo);
84
+ }
85
+ }
86
+ else if (domainName === 'apps') {
87
+ // apps/ has nested structure: apps/invoice/saas-invoice-backend
88
+ const appEntries = await this.listDirs(entryPath);
89
+ for (const appEntry of appEntries) {
90
+ const appPath = path.join(entryPath, appEntry);
91
+ const repo = await this.tryParseRepo(appPath, domainName, entry);
92
+ if (repo)
93
+ repos.push(repo);
94
+ }
95
+ // Also check if the entry itself is a repo (e.g. apps/invoice.xhub-customer-saas)
96
+ const directRepo = await this.tryParseRepo(entryPath, domainName);
97
+ if (directRepo)
98
+ repos.push(directRepo);
99
+ }
100
+ else {
101
+ // Direct repo directory
102
+ const repo = await this.tryParseRepo(entryPath, domainName);
103
+ if (repo)
104
+ repos.push(repo);
105
+ }
106
+ }
107
+ }
108
+ async tryParseRepo(repoPath, domain, subGroup) {
109
+ const pkgPath = path.join(repoPath, 'package.json');
110
+ try {
111
+ const content = await readFile(pkgPath, 'utf-8');
112
+ const pkg = JSON.parse(content);
113
+ if (!pkg.name)
114
+ return null;
115
+ const relativePath = path.relative(this.rootFolder, repoPath);
116
+ const dirName = path.basename(repoPath);
117
+ // Extract internal dependencies
118
+ const allDeps = {
119
+ ...pkg.dependencies,
120
+ ...pkg.peerDependencies,
121
+ };
122
+ const internalDeps = this.filterInternalDeps(allDeps);
123
+ return {
124
+ id: dirName,
125
+ path: relativePath,
126
+ absolutePath: repoPath,
127
+ domain,
128
+ subGroup,
129
+ type: this.detectRepoType(pkg.name, dirName),
130
+ npmPackage: pkg.name,
131
+ version: pkg.version || '0.0.0',
132
+ dependencies: internalDeps,
133
+ dependents: [],
134
+ };
135
+ }
136
+ catch {
137
+ return null;
138
+ }
139
+ }
140
+ detectRepoType(npmName, dirName) {
141
+ const name = npmName.replace(/^@[^/]+\//, '');
142
+ if (name === 'kernel')
143
+ return 'kernel';
144
+ if (name === 'frontend-kernel')
145
+ return 'frontend-kernel';
146
+ if (name.startsWith('kernel-plugin-'))
147
+ return 'kernel-plugin';
148
+ if (name.startsWith('frontend-plugin-'))
149
+ return 'frontend-plugin';
150
+ if (name.startsWith('frontend-ui-') || name === 'frontend-ui-components')
151
+ return 'frontend-ui';
152
+ if (name.startsWith('frontend-app-'))
153
+ return 'frontend-ui';
154
+ if (name.startsWith('lib-'))
155
+ return 'lib';
156
+ if (name.startsWith('saas-'))
157
+ return 'app';
158
+ if (name.startsWith('tool-'))
159
+ return 'tool';
160
+ if (name.startsWith('mock-'))
161
+ return 'mock';
162
+ if (dirName.includes('xhub-'))
163
+ return 'integration';
164
+ return 'lib';
165
+ }
166
+ filterInternalDeps(deps) {
167
+ if (!deps)
168
+ return [];
169
+ const result = [];
170
+ for (const [name, version] of Object.entries(deps)) {
171
+ const isInternal = this.npmOrgs.some((org) => name.startsWith(`${org}/`));
172
+ if (isInternal) {
173
+ result.push({
174
+ npmName: name,
175
+ repoId: '',
176
+ versionSpec: version,
177
+ });
178
+ }
179
+ }
180
+ return result;
181
+ }
182
+ buildDomains(repos) {
183
+ const domainMap = new Map();
184
+ for (const repo of repos) {
185
+ if (!domainMap.has(repo.domain)) {
186
+ domainMap.set(repo.domain, {
187
+ id: repo.domain,
188
+ path: `${repo.domain}/`,
189
+ repoCount: 0,
190
+ hasUncommitted: false,
191
+ subGroups: [],
192
+ });
193
+ }
194
+ const domain = domainMap.get(repo.domain);
195
+ domain.repoCount++;
196
+ if (repo.subGroup) {
197
+ let subGroup = domain.subGroups.find((sg) => sg.id === repo.subGroup);
198
+ if (!subGroup) {
199
+ subGroup = {
200
+ id: repo.subGroup,
201
+ path: `${repo.domain}/${repo.subGroup}/`,
202
+ repoIds: [],
203
+ };
204
+ domain.subGroups.push(subGroup);
205
+ }
206
+ subGroup.repoIds.push(repo.id);
207
+ }
208
+ }
209
+ return Array.from(domainMap.values()).sort((a, b) => a.id.localeCompare(b.id));
210
+ }
211
+ async listDirs(dirPath) {
212
+ try {
213
+ const entries = await readdir(dirPath);
214
+ const dirs = [];
215
+ for (const entry of entries) {
216
+ if (SKIP_DIRS.has(entry) || entry.startsWith('.'))
217
+ continue;
218
+ const entryPath = path.join(dirPath, entry);
219
+ const s = await stat(entryPath);
220
+ if (s.isDirectory()) {
221
+ dirs.push(entry);
222
+ }
223
+ }
224
+ return dirs.sort();
225
+ }
226
+ catch {
227
+ return [];
228
+ }
229
+ }
230
+ }
@@ -0,0 +1,29 @@
1
+ /**
2
+ * Generic parallel task execution helper with controlled concurrency.
3
+ */
4
+ export class TaskQueue {
5
+ concurrency;
6
+ constructor(concurrency) {
7
+ this.concurrency = concurrency;
8
+ }
9
+ async run(items, fn) {
10
+ const results = [];
11
+ let index = 0;
12
+ const runNext = async () => {
13
+ while (index < items.length) {
14
+ const currentIndex = index++;
15
+ const item = items[currentIndex];
16
+ try {
17
+ const result = await fn(item);
18
+ results[currentIndex] = { item, result };
19
+ }
20
+ catch (err) {
21
+ results[currentIndex] = { item, error: err instanceof Error ? err : new Error(String(err)) };
22
+ }
23
+ }
24
+ };
25
+ const workers = Array.from({ length: Math.min(this.concurrency, items.length) }, () => runNext());
26
+ await Promise.all(workers);
27
+ return results;
28
+ }
29
+ }
@@ -0,0 +1,4 @@
1
+ /**
2
+ * tRPC context - holds all services and in-memory state.
3
+ * No auth needed since this is a local-only tool.
4
+ */
@@ -0,0 +1,7 @@
1
+ /**
2
+ * tRPC initialization with context type.
3
+ */
4
+ import { initTRPC } from '@trpc/server';
5
+ const t = initTRPC.context().create();
6
+ export const router = t.router;
7
+ export const publicProcedure = t.procedure;