@capgo/capacitor-patch 8.2.1 → 8.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -143,20 +143,44 @@ The bundled catalog tracks external fix PRs mirrored by Capacitor+ auto-sync bra
143
143
 
144
144
  Run `capgo-capacitor-patch list --all` to see the shipped catalog. Each entry includes the original upstream Capacitor PR URL, the Capacitor+ sync branch, target package, supported version range, and patch file.
145
145
 
146
- ## Future Automation
146
+ ## Recurring Patch Automation
147
147
 
148
- The long-term goal is to make this repository the fast path for Capacitor fixes that are waiting upstream.
148
+ This repository is the fast path for Capacitor fixes that are waiting upstream.
149
149
 
150
- For every external PR opened against `ionic-team/capacitor`, the automation should:
150
+ The `Sync upstream Capacitor patches` workflow runs every 6 hours and can also be started manually from GitHub Actions. It:
151
151
 
152
- 1. Detect whether the PR is a fix that changes shipped Capacitor code.
153
- 2. Wait until the upstream PR, or the matching Capacitor+ `sync/upstream-pr-*` branch, passes its test suite.
154
- 3. Generate package-ready patch files in this repository.
155
- 4. Open or update a pull request here with the new `patches/catalog.json` entries and patch files.
156
- 5. Run this repository's tests against supported Capacitor versions.
157
- 6. Comment on the original upstream PR with the quick-patch ID and install snippet once the patch package PR is ready.
152
+ 1. Checks out this repository and `Cap-go/capacitor-plus`.
153
+ 2. Fetches Capacitor+ `sync/upstream-pr-*` branches.
154
+ 3. Reads the matching `ionic-team/capacitor` PR metadata.
155
+ 4. Skips PRs from Capacitor team members and collaborators.
156
+ 5. Skips branches whose Capacitor+ checks are not passing.
157
+ 6. Generates package-ready patch files and `patches/catalog.json` entries.
158
+ 7. Runs this repository's verification.
159
+ 8. Opens or updates a pull request with the generated changes.
158
160
 
159
- The upstream PR comment should only be posted when the patch applies cleanly and this repository's checks pass. A good comment looks like:
161
+ The generator handles direct Android and iOS package source changes. It can also build package artifacts for `@capacitor/core`, `@capacitor/cli`, and native bridge asset patches when an upstream PR changes TypeScript source that users do not receive directly in `node_modules`.
162
+
163
+ Manual run:
164
+
165
+ ```bash
166
+ bun run sync:patches -- \
167
+ --capacitor-plus-dir ../capacitor-plus \
168
+ --remote capgo \
169
+ --base-ref capgo/plus \
170
+ --require-checks
171
+ ```
172
+
173
+ Useful options:
174
+
175
+ - `--pr <number>` only processes a specific upstream PR branch.
176
+ - `--refresh-existing` regenerates patches for entries that already exist.
177
+ - `--no-require-checks` allows local dry-runs before Capacitor+ CI finishes.
178
+ - `--max-build-prs <count>` limits expensive compiled artifact generation.
179
+ - `--dry-run` reports what would be generated without writing files.
180
+
181
+ After a generated patch PR is merged, the `Comment upstream quick patches` workflow comments on the original upstream Capacitor PR when `PERSONAL_ACCESS_TOKEN` is configured with permission to comment there.
182
+
183
+ The upstream PR comment is only posted after the patch entry lands in this repository. A good comment looks like:
160
184
 
161
185
  ````md
162
186
  This fix is available as a quick patch through `@capgo/capacitor-patch`.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@capgo/capacitor-patch",
3
- "version": "8.2.1",
3
+ "version": "8.3.0",
4
4
  "description": "Capacitor plugin for applying vetted Capgo patches during cap sync and cap update.",
5
5
  "main": "dist/plugin.cjs.js",
6
6
  "module": "dist/esm/index.js",
@@ -41,7 +41,9 @@
41
41
  "verify": "bun run verify:web",
42
42
  "verify:web": "bun run build && bun run test:patch",
43
43
  "test": "bun run test:patch",
44
- "test:patch": "node --test scripts/test-capacitor-patch.mjs",
44
+ "test:patch": "node --test scripts/test-capacitor-patch.mjs scripts/test-upstream-sync.mjs",
45
+ "sync:patches": "node scripts/sync-upstream-patches.mjs",
46
+ "comment:upstream-patches": "node scripts/comment-upstream-patches.mjs",
45
47
  "lint": "bun run eslint && bun run prettier -- --check",
46
48
  "fmt": "bun run eslint -- --fix && bun run prettier -- --write",
47
49
  "eslint": "eslint . --ext .ts",
@@ -0,0 +1,671 @@
1
+ import { spawnSync } from 'node:child_process';
2
+ import fs from 'node:fs';
3
+ import os from 'node:os';
4
+ import path from 'node:path';
5
+
6
+ const INTERNAL_AUTHOR_ASSOCIATIONS = new Set(['COLLABORATOR', 'MEMBER', 'OWNER']);
7
+ const SUCCESSFUL_CHECK_CONCLUSIONS = new Set(['success', 'neutral', 'skipped']);
8
+ const FAILED_CHECK_CONCLUSIONS = new Set(['action_required', 'cancelled', 'failure', 'startup_failure', 'timed_out']);
9
+
10
+ export const PACKAGE_TARGETS = [
11
+ {
12
+ key: 'android',
13
+ packageName: '@capacitor/android',
14
+ root: 'android',
15
+ suffix: 'android',
16
+ titleSuffix: 'android',
17
+ versionRange: '>=8.0.0 <9.0.0',
18
+ direct: true,
19
+ accepts: (file) => file.startsWith('android/') && isShippedPackageFile(file, 'android'),
20
+ },
21
+ {
22
+ key: 'ios',
23
+ packageName: '@capacitor/ios',
24
+ root: 'ios',
25
+ suffix: 'ios',
26
+ titleSuffix: 'ios',
27
+ versionRange: '>=8.0.0 <9.0.0',
28
+ direct: true,
29
+ accepts: (file) => file.startsWith('ios/') && isShippedPackageFile(file, 'ios'),
30
+ },
31
+ ];
32
+
33
+ export const COMPILED_TARGETS = [
34
+ {
35
+ key: 'core',
36
+ packageName: '@capacitor/core',
37
+ root: 'core',
38
+ suffix: 'core',
39
+ titleSuffix: 'core',
40
+ versionRange: '>=8.0.0 <9.0.0',
41
+ build: 'core',
42
+ generatedFiles: ['dist/index.js', 'dist/index.cjs.js'],
43
+ triggers: (file) => file.startsWith('core/src/') && isSourceRuntimeFile(file),
44
+ },
45
+ {
46
+ key: 'cli',
47
+ packageName: '@capacitor/cli',
48
+ root: 'cli',
49
+ suffix: 'cli',
50
+ titleSuffix: 'cli',
51
+ versionRange: '>=8.0.0 <9.0.0',
52
+ build: 'cli',
53
+ generatedFiles: ['dist'],
54
+ triggers: (file) => file.startsWith('cli/src/') && isSourceRuntimeFile(file),
55
+ },
56
+ {
57
+ key: 'android-native-bridge',
58
+ packageName: '@capacitor/android',
59
+ root: 'android',
60
+ suffix: 'android-native-bridge',
61
+ titleSuffix: 'android-native-bridge',
62
+ versionRange: '>=8.0.0 <9.0.0',
63
+ build: 'nativebridge',
64
+ generatedFiles: ['capacitor/src/main/assets/native-bridge.js'],
65
+ triggers: (file) => file === 'core/native-bridge.ts',
66
+ },
67
+ {
68
+ key: 'ios-native-bridge',
69
+ packageName: '@capacitor/ios',
70
+ root: 'ios',
71
+ suffix: 'ios-native-bridge',
72
+ titleSuffix: 'ios-native-bridge',
73
+ versionRange: '>=8.0.0 <9.0.0',
74
+ build: 'nativebridge',
75
+ generatedFiles: ['Capacitor/Capacitor/assets/native-bridge.js'],
76
+ triggers: (file) => file === 'core/native-bridge.ts',
77
+ },
78
+ ];
79
+
80
+ export function parseSyncBranchNumber(branchName) {
81
+ const match = /(?:^|\/)sync\/upstream-pr-(\d+)$/.exec(branchName);
82
+ return match ? Number(match[1]) : null;
83
+ }
84
+
85
+ export function sortCatalogEntries(entries) {
86
+ return [...entries].sort((a, b) => {
87
+ const prA = getEntryPullRequestNumber(a);
88
+ const prB = getEntryPullRequestNumber(b);
89
+ if (prA !== prB) {
90
+ return prA - prB;
91
+ }
92
+ return String(a.id).localeCompare(String(b.id));
93
+ });
94
+ }
95
+
96
+ export function getEntryPullRequestNumber(entry) {
97
+ const fromSource = /\/pull\/(\d+)/.exec(entry?.source?.upstreamPullRequest ?? '')?.[1];
98
+ const fromId = /^upstream-pr-(\d+)/.exec(entry?.id ?? '')?.[1];
99
+ return Number(fromSource ?? fromId ?? Number.MAX_SAFE_INTEGER);
100
+ }
101
+
102
+ export function groupPatchTargets(changedFiles) {
103
+ const directTargets = PACKAGE_TARGETS.filter((target) => changedFiles.some((file) => target.accepts(file)));
104
+ const compiledTargets = COMPILED_TARGETS.filter((target) => changedFiles.some((file) => target.triggers(file)));
105
+ return {
106
+ directTargets,
107
+ compiledTargets,
108
+ allTargets: [...directTargets, ...compiledTargets],
109
+ };
110
+ }
111
+
112
+ export function createCatalogEntry({ pr, target, patchFile, upstreamStatus, branchUrl }) {
113
+ return {
114
+ id: `upstream-pr-${pr.number}-${target.suffix}`,
115
+ title: `${pr.title} (${target.titleSuffix})`,
116
+ recommended: false,
117
+ phase: 'package',
118
+ target: {
119
+ type: 'package',
120
+ packageName: target.packageName,
121
+ versionRange: target.versionRange,
122
+ },
123
+ source: {
124
+ upstreamPullRequest: `https://github.com/ionic-team/capacitor/pull/${pr.number}`,
125
+ capacitorPlusBranch: branchUrl,
126
+ author: pr.author,
127
+ authorAssociation: isExternalAuthor(pr.authorAssociation) ? 'external' : 'internal',
128
+ },
129
+ upstream: upstreamStatus,
130
+ patchFile,
131
+ };
132
+ }
133
+
134
+ export function createUpstreamStatus(pr) {
135
+ const mergedAt = pr.mergedAt ?? null;
136
+ if (mergedAt) {
137
+ return {
138
+ state: pr.state,
139
+ mergedAt,
140
+ status: 'merged-upstream',
141
+ };
142
+ }
143
+
144
+ return {
145
+ state: pr.state,
146
+ mergedAt: null,
147
+ status: 'not-merged',
148
+ };
149
+ }
150
+
151
+ export function isExternalAuthor(authorAssociation) {
152
+ return !INTERNAL_AUTHOR_ASSOCIATIONS.has(String(authorAssociation ?? '').toUpperCase());
153
+ }
154
+
155
+ export function isShippedPackageFile(file, root) {
156
+ const relative = file.slice(root.length + 1);
157
+ if (!relative || relative.startsWith('.')) {
158
+ return false;
159
+ }
160
+
161
+ const basename = path.basename(relative);
162
+ if (['CHANGELOG.md', 'LICENSE', 'LICENSE.md', 'README.md', 'package.json'].includes(basename)) {
163
+ return false;
164
+ }
165
+
166
+ if (isTestOrGeneratedFile(relative)) {
167
+ return false;
168
+ }
169
+
170
+ return true;
171
+ }
172
+
173
+ export function isSourceRuntimeFile(file) {
174
+ if (!/\.(ts|tsx|js|mjs|cjs)$/.test(file)) {
175
+ return false;
176
+ }
177
+ return !isTestOrGeneratedFile(file);
178
+ }
179
+
180
+ export function buildQuickPatchComment(entries) {
181
+ const ids = entries.map((entry) => entry.id).sort();
182
+ const firstId = ids[0];
183
+ const patches = ids.length === 1 ? `'${firstId}'` : ids.map((id) => ` '${id}',`).join('\n');
184
+ const patchConfig = ids.length === 1 ? ` patches: [${patches}],` : ` patches: [\n${patches}\n ],`;
185
+
186
+ return `<!-- capgo-capacitor-patch:quick-patch -->
187
+ This fix is available as a quick patch through \`@capgo/capacitor-patch\`.
188
+
189
+ Patch ${ids.length === 1 ? 'ID' : 'IDs'}: ${ids.map((id) => `\`${id}\``).join(', ')}
190
+
191
+ \`\`\`ts
192
+ plugins: {
193
+ CapacitorPatch: {
194
+ ${patchConfig}
195
+ strict: true,
196
+ },
197
+ }
198
+ \`\`\`
199
+
200
+ Run \`npx cap sync\` after installing \`@capgo/capacitor-patch\`.`;
201
+ }
202
+
203
+ export async function syncUpstreamPatches(options) {
204
+ const rootDir = path.resolve(options.rootDir ?? process.cwd());
205
+ const capacitorPlusDir = path.resolve(options.capacitorPlusDir);
206
+ const remote = options.remote ?? 'origin';
207
+ const baseRef = options.baseRef ?? `${remote}/plus`;
208
+ const patchDir = path.join(rootDir, 'patches');
209
+ const catalogPath = path.join(patchDir, 'catalog.json');
210
+ const existingCatalog = readJson(catalogPath);
211
+ const catalogById = new Map(existingCatalog.map((entry) => [entry.id, entry]));
212
+ let remainingBuildPrs = options.maxBuildPrs ?? 3;
213
+ const branches = options.prNumbers?.length
214
+ ? options.prNumbers.map((number) => `${remote}/sync/upstream-pr-${number}`)
215
+ : listSyncBranches(capacitorPlusDir, remote);
216
+ const generatedEntries = [];
217
+ const skipped = [];
218
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'capgo-capacitor-patch-sync-'));
219
+
220
+ try {
221
+ for (const branch of branches) {
222
+ const prNumber = parseSyncBranchNumber(branch);
223
+ if (!prNumber) {
224
+ continue;
225
+ }
226
+
227
+ const pr = await getPullRequestMetadata(prNumber, options.githubToken);
228
+ if (options.externalOnly !== false && !isExternalAuthor(pr.authorAssociation)) {
229
+ skipped.push({ pr: prNumber, reason: `internal author association: ${pr.authorAssociation}` });
230
+ continue;
231
+ }
232
+
233
+ const headSha = git(capacitorPlusDir, ['rev-parse', branch]).trim();
234
+ if (options.requireChecks) {
235
+ const checkState = await getCommitCheckState({
236
+ owner: 'Cap-go',
237
+ repo: 'capacitor-plus',
238
+ ref: headSha,
239
+ token: options.githubToken,
240
+ });
241
+ if (checkState.state !== 'success') {
242
+ skipped.push({ pr: prNumber, reason: `checks are ${checkState.state}: ${checkState.summary}` });
243
+ continue;
244
+ }
245
+ }
246
+
247
+ const mergeBase = git(capacitorPlusDir, ['merge-base', baseRef, branch]).trim();
248
+ const changedFiles = listChangedFiles(capacitorPlusDir, mergeBase, branch);
249
+ const { directTargets, compiledTargets } = groupPatchTargets(changedFiles);
250
+ const selectedCompiledTargets = compiledTargets.filter(
251
+ (target) => options.refreshExisting || !catalogById.has(`upstream-pr-${prNumber}-${target.suffix}`),
252
+ );
253
+ const selectedDirectTargets = directTargets.filter(
254
+ (target) => options.refreshExisting || !catalogById.has(`upstream-pr-${prNumber}-${target.suffix}`),
255
+ );
256
+
257
+ if (!selectedDirectTargets.length && !selectedCompiledTargets.length) {
258
+ skipped.push({ pr: prNumber, reason: 'no new patchable package files' });
259
+ continue;
260
+ }
261
+
262
+ const branchEntries = [];
263
+ let skippedCompiledForLimit = false;
264
+ for (const target of selectedDirectTargets) {
265
+ const files = changedFiles.filter((file) => target.accepts(file));
266
+ const diff = git(capacitorPlusDir, [
267
+ 'diff',
268
+ `--relative=${target.root}`,
269
+ `${mergeBase}..${branch}`,
270
+ '--',
271
+ ...files,
272
+ ]);
273
+ if (!diff.trim()) {
274
+ continue;
275
+ }
276
+ const patchFile = writePatchFile(patchDir, prNumber, target, diff, options.dryRun);
277
+ branchEntries.push(createCatalogEntryForTarget({ pr, target, patchFile, prNumber }));
278
+ }
279
+
280
+ if (selectedCompiledTargets.length) {
281
+ if (remainingBuildPrs <= 0) {
282
+ skippedCompiledForLimit = true;
283
+ skipped.push({ pr: prNumber, reason: 'compiled patch generation limit reached' });
284
+ } else {
285
+ remainingBuildPrs -= 1;
286
+ const built = generateCompiledDiffs({
287
+ capacitorPlusDir,
288
+ mergeBase,
289
+ branch,
290
+ targets: selectedCompiledTargets,
291
+ tmpDir,
292
+ });
293
+
294
+ for (const item of built) {
295
+ if (!item.diff.trim()) {
296
+ continue;
297
+ }
298
+ const patchFile = writePatchFile(patchDir, prNumber, item.target, item.diff, options.dryRun);
299
+ branchEntries.push(createCatalogEntryForTarget({ pr, target: item.target, patchFile, prNumber }));
300
+ }
301
+ }
302
+ }
303
+
304
+ if (!branchEntries.length) {
305
+ if (!skippedCompiledForLimit) {
306
+ skipped.push({ pr: prNumber, reason: 'generated diffs were empty' });
307
+ }
308
+ continue;
309
+ }
310
+
311
+ generatedEntries.push(...branchEntries);
312
+ for (const entry of branchEntries) {
313
+ catalogById.set(entry.id, entry);
314
+ }
315
+ }
316
+
317
+ const nextCatalog = sortCatalogEntries([...catalogById.values()]);
318
+ if (!options.dryRun && generatedEntries.length) {
319
+ fs.writeFileSync(catalogPath, `${JSON.stringify(nextCatalog, null, 2)}\n`);
320
+ }
321
+
322
+ return {
323
+ generatedEntries,
324
+ skipped,
325
+ catalogPath,
326
+ };
327
+ } finally {
328
+ fs.rmSync(tmpDir, { recursive: true, force: true });
329
+ }
330
+ }
331
+
332
+ export async function commentOnUpstreamPullRequests(options) {
333
+ const baseCatalog = readCatalogFromGit(options.baseRef);
334
+ const headCatalog = readCatalogFromGit(options.headRef);
335
+ const baseIds = new Set(baseCatalog.map((entry) => entry.id));
336
+ const added = headCatalog.filter((entry) => !baseIds.has(entry.id) && entry.source?.upstreamPullRequest);
337
+ const byPullRequest = new Map();
338
+
339
+ for (const entry of added) {
340
+ const prNumber = getEntryPullRequestNumber(entry);
341
+ if (!Number.isFinite(prNumber)) {
342
+ continue;
343
+ }
344
+ const entries = byPullRequest.get(prNumber) ?? [];
345
+ entries.push(entry);
346
+ byPullRequest.set(prNumber, entries);
347
+ }
348
+
349
+ const posted = [];
350
+ for (const [prNumber, entries] of byPullRequest) {
351
+ const body = buildQuickPatchComment(entries);
352
+ if (options.dryRun) {
353
+ posted.push({ pr: prNumber, entries: entries.map((entry) => entry.id), dryRun: true });
354
+ continue;
355
+ }
356
+ if (!options.githubToken) {
357
+ throw new Error('GITHUB_TOKEN or PERSONAL_ACCESS_TOKEN is required to comment on upstream pull requests.');
358
+ }
359
+
360
+ await upsertIssueComment({
361
+ owner: 'ionic-team',
362
+ repo: 'capacitor',
363
+ issueNumber: prNumber,
364
+ token: options.githubToken,
365
+ marker: '<!-- capgo-capacitor-patch:quick-patch -->',
366
+ body,
367
+ });
368
+ posted.push({ pr: prNumber, entries: entries.map((entry) => entry.id) });
369
+ }
370
+
371
+ return { posted, added };
372
+ }
373
+
374
+ function createCatalogEntryForTarget({ pr, target, patchFile, prNumber }) {
375
+ return createCatalogEntry({
376
+ pr,
377
+ target,
378
+ patchFile,
379
+ upstreamStatus: createUpstreamStatus(pr),
380
+ branchUrl: `https://github.com/Cap-go/capacitor-plus/tree/sync/upstream-pr-${prNumber}`,
381
+ });
382
+ }
383
+
384
+ function writePatchFile(patchDir, prNumber, target, diff, dryRun) {
385
+ const patchFile = `patches/upstream-pr-${prNumber}-${target.suffix}.patch`;
386
+ if (!dryRun) {
387
+ fs.writeFileSync(path.join(patchDir, path.basename(patchFile)), diff.endsWith('\n') ? diff : `${diff}\n`);
388
+ }
389
+ return patchFile;
390
+ }
391
+
392
+ function listSyncBranches(repoDir, remote) {
393
+ const output = git(repoDir, [
394
+ 'for-each-ref',
395
+ '--format=%(refname:short)',
396
+ `refs/remotes/${remote}/sync/upstream-pr-*`,
397
+ ]);
398
+ return output
399
+ .split('\n')
400
+ .map((line) => line.trim())
401
+ .filter(Boolean)
402
+ .sort((a, b) => (parseSyncBranchNumber(a) ?? 0) - (parseSyncBranchNumber(b) ?? 0));
403
+ }
404
+
405
+ function listChangedFiles(repoDir, baseRef, headRef) {
406
+ return git(repoDir, ['diff', '--name-only', `${baseRef}..${headRef}`])
407
+ .split('\n')
408
+ .map((line) => line.trim())
409
+ .filter(Boolean);
410
+ }
411
+
412
+ function generateCompiledDiffs({ capacitorPlusDir, mergeBase, branch, targets, tmpDir }) {
413
+ const baseDir = path.join(tmpDir, `base-${process.pid}-${Date.now()}`);
414
+ const headDir = path.join(tmpDir, `head-${process.pid}-${Date.now()}`);
415
+ git(capacitorPlusDir, ['worktree', 'add', '--detach', baseDir, mergeBase]);
416
+ git(capacitorPlusDir, ['worktree', 'add', '--detach', headDir, branch]);
417
+
418
+ try {
419
+ buildNeededTargets(baseDir, targets);
420
+ buildNeededTargets(headDir, targets);
421
+
422
+ return targets.map((target) => ({
423
+ target,
424
+ diff: createFileSetDiff({
425
+ baseRoot: path.join(baseDir, target.root),
426
+ headRoot: path.join(headDir, target.root),
427
+ files: expandGeneratedFiles(path.join(headDir, target.root), target.generatedFiles),
428
+ }),
429
+ }));
430
+ } finally {
431
+ git(capacitorPlusDir, ['worktree', 'remove', '--force', baseDir], { allowFailure: true });
432
+ git(capacitorPlusDir, ['worktree', 'remove', '--force', headDir], { allowFailure: true });
433
+ }
434
+ }
435
+
436
+ function buildNeededTargets(worktreeDir, targets) {
437
+ const buildTypes = new Set(targets.map((target) => target.build));
438
+ if (!buildTypes.size) {
439
+ return;
440
+ }
441
+
442
+ run('bun', ['install', '--frozen-lockfile'], { cwd: worktreeDir });
443
+
444
+ if (buildTypes.has('nativebridge')) {
445
+ run('bunx', ['tsc', 'native-bridge.ts', '--target', 'es2017', '--moduleResolution', 'node', '--outDir', 'build'], {
446
+ cwd: path.join(worktreeDir, 'core'),
447
+ });
448
+ run('bunx', ['rollup', '--config', 'rollup.bridge.config.js'], { cwd: path.join(worktreeDir, 'core') });
449
+ }
450
+
451
+ if (buildTypes.has('core')) {
452
+ run('bun', ['run', 'clean'], { cwd: path.join(worktreeDir, 'core') });
453
+ run('bunx', ['tsc'], { cwd: path.join(worktreeDir, 'core') });
454
+ run('bunx', ['rollup', '--config', 'rollup.config.js'], { cwd: path.join(worktreeDir, 'core') });
455
+ }
456
+
457
+ if (buildTypes.has('cli')) {
458
+ run('bun', ['run', 'clean'], { cwd: path.join(worktreeDir, 'cli') });
459
+ run('bun', ['run', 'assets'], { cwd: path.join(worktreeDir, 'cli') });
460
+ run('bunx', ['tsc'], { cwd: path.join(worktreeDir, 'cli') });
461
+ }
462
+ }
463
+
464
+ function createFileSetDiff({ baseRoot, headRoot, files }) {
465
+ if (!files.length) {
466
+ return '';
467
+ }
468
+
469
+ const diffRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'capgo-capacitor-patch-diff-'));
470
+ try {
471
+ copySelectedFiles(baseRoot, diffRoot, files);
472
+ git(diffRoot, ['init', '--quiet']);
473
+ git(diffRoot, ['add', '-A']);
474
+ copySelectedFiles(headRoot, diffRoot, files);
475
+ git(diffRoot, ['add', '-N', '.'], { allowFailure: true });
476
+ return git(diffRoot, ['diff', '--', ...files], { allowFailure: true });
477
+ } finally {
478
+ fs.rmSync(diffRoot, { recursive: true, force: true });
479
+ }
480
+ }
481
+
482
+ function expandGeneratedFiles(rootDir, entries) {
483
+ const files = [];
484
+ for (const entry of entries) {
485
+ const absolute = path.join(rootDir, entry);
486
+ if (!fs.existsSync(absolute)) {
487
+ continue;
488
+ }
489
+ const stat = fs.statSync(absolute);
490
+ if (stat.isFile()) {
491
+ files.push(entry);
492
+ continue;
493
+ }
494
+ for (const file of walkFiles(absolute)) {
495
+ const relative = path.relative(rootDir, file).split(path.sep).join('/');
496
+ if (!isTestOrGeneratedFile(relative) && !relative.endsWith('.map')) {
497
+ files.push(relative);
498
+ }
499
+ }
500
+ }
501
+ return files.sort();
502
+ }
503
+
504
+ function copySelectedFiles(sourceRoot, destRoot, files) {
505
+ for (const file of files) {
506
+ const source = path.join(sourceRoot, file);
507
+ const dest = path.join(destRoot, file);
508
+ if (!fs.existsSync(source)) {
509
+ fs.rmSync(dest, { force: true });
510
+ continue;
511
+ }
512
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
513
+ fs.copyFileSync(source, dest);
514
+ }
515
+ }
516
+
517
+ function walkFiles(rootDir) {
518
+ const result = [];
519
+ const stack = [rootDir];
520
+ while (stack.length) {
521
+ const dir = stack.pop();
522
+ for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
523
+ const absolute = path.join(dir, entry.name);
524
+ if (entry.isDirectory()) {
525
+ stack.push(absolute);
526
+ } else if (entry.isFile()) {
527
+ result.push(absolute);
528
+ }
529
+ }
530
+ }
531
+ return result;
532
+ }
533
+
534
+ function isTestOrGeneratedFile(file) {
535
+ const normalized = file.split(path.sep).join('/');
536
+ const basename = path.basename(normalized);
537
+ return (
538
+ normalized.includes('/build/') ||
539
+ normalized.includes('/coverage/') ||
540
+ normalized.includes('/test/') ||
541
+ normalized.includes('/tests/') ||
542
+ normalized.includes('/__tests__/') ||
543
+ /\.(spec|test)\.[cm]?[jt]sx?$/.test(basename) ||
544
+ basename.endsWith('.map')
545
+ );
546
+ }
547
+
548
+ async function getPullRequestMetadata(number, token) {
549
+ const fallback = {
550
+ number,
551
+ title: `Upstream Capacitor PR #${number}`,
552
+ state: 'unknown',
553
+ mergedAt: null,
554
+ author: 'unknown',
555
+ authorAssociation: 'NONE',
556
+ };
557
+
558
+ const data = await githubJson(`/repos/ionic-team/capacitor/pulls/${number}`, token, { optional: true });
559
+ if (!data) {
560
+ return fallback;
561
+ }
562
+
563
+ return {
564
+ number,
565
+ title: data.title,
566
+ state: data.state,
567
+ mergedAt: data.merged_at,
568
+ author: data.user?.login ?? 'unknown',
569
+ authorAssociation: data.author_association ?? 'NONE',
570
+ };
571
+ }
572
+
573
+ async function getCommitCheckState({ owner, repo, ref, token }) {
574
+ const checkRuns = await githubJson(`/repos/${owner}/${repo}/commits/${ref}/check-runs?per_page=100`, token, {
575
+ optional: true,
576
+ });
577
+ const status = await githubJson(`/repos/${owner}/${repo}/commits/${ref}/status`, token, { optional: true });
578
+ const runs = checkRuns?.check_runs ?? [];
579
+ const statuses = status?.statuses ?? [];
580
+ const pendingRuns = runs.filter((run) => run.status !== 'completed');
581
+ const failedRuns = runs.filter((run) => FAILED_CHECK_CONCLUSIONS.has(run.conclusion));
582
+ const failedStatuses = statuses.filter((item) => item.state !== 'success');
583
+
584
+ if (!runs.length && !statuses.length) {
585
+ return { state: 'missing', summary: 'no check runs or statuses found' };
586
+ }
587
+ if (pendingRuns.length) {
588
+ return { state: 'pending', summary: pendingRuns.map((run) => run.name).join(', ') };
589
+ }
590
+ if (failedRuns.length || failedStatuses.length) {
591
+ return {
592
+ state: 'failure',
593
+ summary: [
594
+ ...failedRuns.map((run) => `${run.name}:${run.conclusion}`),
595
+ ...failedStatuses.map((item) => item.context),
596
+ ].join(', '),
597
+ };
598
+ }
599
+ if (runs.some((run) => !SUCCESSFUL_CHECK_CONCLUSIONS.has(run.conclusion))) {
600
+ return { state: 'unknown', summary: 'one or more check runs have unknown conclusions' };
601
+ }
602
+ return { state: 'success', summary: `${runs.length + statuses.length} checks passed` };
603
+ }
604
+
605
+ async function upsertIssueComment({ owner, repo, issueNumber, token, marker, body }) {
606
+ const comments = await githubJson(`/repos/${owner}/${repo}/issues/${issueNumber}/comments?per_page=100`, token);
607
+ const existing = comments.find((comment) => comment.body?.includes(marker));
608
+ if (existing) {
609
+ await githubJson(`/repos/${owner}/${repo}/issues/comments/${existing.id}`, token, {
610
+ method: 'PATCH',
611
+ body: { body },
612
+ });
613
+ return;
614
+ }
615
+ await githubJson(`/repos/${owner}/${repo}/issues/${issueNumber}/comments`, token, {
616
+ method: 'POST',
617
+ body: { body },
618
+ });
619
+ }
620
+
621
+ async function githubJson(endpoint, token, options = {}) {
622
+ const response = await fetch(`https://api.github.com${endpoint}`, {
623
+ method: options.method ?? 'GET',
624
+ headers: {
625
+ Accept: 'application/vnd.github+json',
626
+ 'X-GitHub-Api-Version': '2022-11-28',
627
+ ...(token ? { Authorization: `Bearer ${token}` } : {}),
628
+ },
629
+ body: options.body ? JSON.stringify(options.body) : undefined,
630
+ });
631
+
632
+ if (options.optional && response.status === 404) {
633
+ return null;
634
+ }
635
+
636
+ if (!response.ok) {
637
+ throw new Error(`GitHub API ${response.status} ${response.statusText}: ${await response.text()}`);
638
+ }
639
+
640
+ return response.status === 204 ? null : response.json();
641
+ }
642
+
643
+ function readCatalogFromGit(ref) {
644
+ try {
645
+ return JSON.parse(run('git', ['show', `${ref}:patches/catalog.json`], { allowFailure: false }));
646
+ } catch {
647
+ return [];
648
+ }
649
+ }
650
+
651
+ function readJson(file) {
652
+ return JSON.parse(fs.readFileSync(file, 'utf8'));
653
+ }
654
+
655
+ function git(cwd, args, options = {}) {
656
+ return run('git', ['-C', cwd, ...args], options);
657
+ }
658
+
659
+ function run(command, args, options = {}) {
660
+ const result = spawnSync(command, args, {
661
+ cwd: options.cwd,
662
+ encoding: 'utf8',
663
+ stdio: ['ignore', 'pipe', 'pipe'],
664
+ });
665
+
666
+ if (result.status !== 0 && !options.allowFailure) {
667
+ throw new Error(`${command} ${args.join(' ')} failed:\n${result.stderr || result.stdout}`);
668
+ }
669
+
670
+ return result.stdout;
671
+ }