@remixhq/core 0.1.13 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api.d.ts +76 -1
- package/dist/api.js +1 -1
- package/dist/auth.js +1 -1
- package/dist/binding-WiIRI2fl.d.ts +43 -0
- package/dist/binding.d.ts +1 -21
- package/dist/binding.js +2 -0
- package/dist/chunk-P6JHXOV4.js +236 -0
- package/dist/chunk-US5SM7ZC.js +433 -0
- package/dist/collab.d.ts +376 -611
- package/dist/collab.js +2382 -377
- package/dist/contracts-NbV3P_Rl.d.ts +677 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +2 -2
- package/package.json +4 -2
package/dist/collab.js
CHANGED
|
@@ -38,6 +38,75 @@ import {
|
|
|
38
38
|
RemixError
|
|
39
39
|
} from "./chunk-YZ34ICNN.js";
|
|
40
40
|
|
|
41
|
+
// src/application/collab/appDeltaCache.ts
|
|
42
|
+
var APP_DELTA_CACHE_TTL_MS = 5e3;
|
|
43
|
+
var appDeltaCache = /* @__PURE__ */ new Map();
|
|
44
|
+
var cacheClock = () => Date.now();
|
|
45
|
+
function buildAppDeltaCacheKey(appId, payload) {
|
|
46
|
+
return [
|
|
47
|
+
appId,
|
|
48
|
+
payload.baseHeadHash,
|
|
49
|
+
payload.targetHeadHash ?? "",
|
|
50
|
+
payload.localSnapshotHash ?? "",
|
|
51
|
+
payload.repoFingerprint ?? "",
|
|
52
|
+
payload.remoteUrl ?? "",
|
|
53
|
+
payload.defaultBranch ?? ""
|
|
54
|
+
].join("|");
|
|
55
|
+
}
|
|
56
|
+
function readAppDeltaCache(key) {
|
|
57
|
+
const entry = appDeltaCache.get(key);
|
|
58
|
+
if (!entry) return void 0;
|
|
59
|
+
if (cacheClock() - entry.cachedAt > APP_DELTA_CACHE_TTL_MS) {
|
|
60
|
+
appDeltaCache.delete(key);
|
|
61
|
+
return void 0;
|
|
62
|
+
}
|
|
63
|
+
return entry.value;
|
|
64
|
+
}
|
|
65
|
+
function writeAppDeltaCache(key, value) {
|
|
66
|
+
appDeltaCache.set(key, { value, cachedAt: cacheClock() });
|
|
67
|
+
}
|
|
68
|
+
async function getAppDeltaCached(api, appId, payload) {
|
|
69
|
+
const key = buildAppDeltaCacheKey(appId, payload);
|
|
70
|
+
const cached = readAppDeltaCache(key);
|
|
71
|
+
if (cached !== void 0) return cached;
|
|
72
|
+
const fresh = await api.getAppDelta(appId, payload);
|
|
73
|
+
writeAppDeltaCache(key, fresh);
|
|
74
|
+
return fresh;
|
|
75
|
+
}
|
|
76
|
+
function invalidateAppDeltaCacheForApp(appId) {
|
|
77
|
+
const prefix = `${appId}|`;
|
|
78
|
+
for (const key of appDeltaCache.keys()) {
|
|
79
|
+
if (key.startsWith(prefix)) appDeltaCache.delete(key);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// src/application/collab/appHeadCache.ts
|
|
84
|
+
var APP_HEAD_CACHE_TTL_MS = 5e3;
|
|
85
|
+
var appHeadCache = /* @__PURE__ */ new Map();
|
|
86
|
+
var cacheClock2 = () => Date.now();
|
|
87
|
+
function readAppHeadCache(appId) {
|
|
88
|
+
const entry = appHeadCache.get(appId);
|
|
89
|
+
if (!entry) return void 0;
|
|
90
|
+
if (cacheClock2() - entry.cachedAt > APP_HEAD_CACHE_TTL_MS) {
|
|
91
|
+
appHeadCache.delete(appId);
|
|
92
|
+
return void 0;
|
|
93
|
+
}
|
|
94
|
+
return entry.value;
|
|
95
|
+
}
|
|
96
|
+
function writeAppHeadCache(appId, value) {
|
|
97
|
+
appHeadCache.set(appId, { value, cachedAt: cacheClock2() });
|
|
98
|
+
}
|
|
99
|
+
async function getAppHeadCached(api, appId) {
|
|
100
|
+
const cached = readAppHeadCache(appId);
|
|
101
|
+
if (cached !== void 0) return cached;
|
|
102
|
+
const fresh = await api.getAppHead(appId);
|
|
103
|
+
writeAppHeadCache(appId, fresh);
|
|
104
|
+
return fresh;
|
|
105
|
+
}
|
|
106
|
+
function invalidateAppHeadCache(appId) {
|
|
107
|
+
appHeadCache.delete(appId);
|
|
108
|
+
}
|
|
109
|
+
|
|
41
110
|
// src/application/collab/branchPolicy.ts
|
|
42
111
|
function describeBranch(value) {
|
|
43
112
|
const normalized = String(value ?? "").trim();
|
|
@@ -71,7 +140,8 @@ function assertBoundBranchMatch(params) {
|
|
|
71
140
|
});
|
|
72
141
|
}
|
|
73
142
|
|
|
74
|
-
// src/infrastructure/collab/
|
|
143
|
+
// src/infrastructure/collab/asyncJobStore.ts
|
|
144
|
+
import { randomUUID } from "crypto";
|
|
75
145
|
import fs from "fs/promises";
|
|
76
146
|
import path2 from "path";
|
|
77
147
|
|
|
@@ -110,14 +180,317 @@ function getBaselinesRoot() {
|
|
|
110
180
|
function getFinalizeQueueRoot() {
|
|
111
181
|
return path.join(getCollabStateRoot(), "finalize-queue");
|
|
112
182
|
}
|
|
183
|
+
function getAsyncJobsRoot() {
|
|
184
|
+
return path.join(getCollabStateRoot(), "jobs");
|
|
185
|
+
}
|
|
186
|
+
function getAsyncJobDir(jobId) {
|
|
187
|
+
return path.join(getAsyncJobsRoot(), jobId);
|
|
188
|
+
}
|
|
189
|
+
function getAsyncJobFilePath(jobId) {
|
|
190
|
+
return path.join(getAsyncJobDir(jobId), "job.json");
|
|
191
|
+
}
|
|
192
|
+
function getAsyncJobBundlePath(jobId) {
|
|
193
|
+
return path.join(getAsyncJobDir(jobId), "bundle.bundle");
|
|
194
|
+
}
|
|
195
|
+
function getLogsRoot() {
|
|
196
|
+
return path.join(getCollabStateRoot(), "logs");
|
|
197
|
+
}
|
|
198
|
+
function getDrainerLogPath() {
|
|
199
|
+
return path.join(getLogsRoot(), "drainer.log");
|
|
200
|
+
}
|
|
201
|
+
function getDrainerPidPath() {
|
|
202
|
+
return path.join(getCollabStateRoot(), "drainer.pid");
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// src/infrastructure/collab/asyncJobStore.ts
|
|
206
|
+
var ASYNC_JOB_LOCK_STALE_MS = 10 * 60 * 1e3;
|
|
207
|
+
var TERMINAL_ASYNC_JOB_RETENTION_MS = 24 * 60 * 60 * 1e3;
|
|
208
|
+
function getJobLockPath(id) {
|
|
209
|
+
return path2.join(getAsyncJobDir(id), "lock");
|
|
210
|
+
}
|
|
211
|
+
function isTerminalStatus(status) {
|
|
212
|
+
return status === "completed" || status === "failed";
|
|
213
|
+
}
|
|
214
|
+
function isProcessingStatus(status) {
|
|
215
|
+
return status === "submitting" || status === "uploading" || status === "server_processing";
|
|
216
|
+
}
|
|
217
|
+
function createEmptySummary() {
|
|
218
|
+
return {
|
|
219
|
+
state: "idle",
|
|
220
|
+
activeJobCount: 0,
|
|
221
|
+
queuedJobCount: 0,
|
|
222
|
+
processingJobCount: 0,
|
|
223
|
+
failedJobCount: 0,
|
|
224
|
+
oldestCreatedAt: null,
|
|
225
|
+
newestCreatedAt: null,
|
|
226
|
+
latestError: null,
|
|
227
|
+
kinds: []
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
function normalizeJob(input) {
|
|
231
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
232
|
+
const base = {
|
|
233
|
+
schemaVersion: 1,
|
|
234
|
+
id: input.id ?? randomUUID(),
|
|
235
|
+
kind: input.kind,
|
|
236
|
+
status: input.status,
|
|
237
|
+
repoRoot: input.repoRoot,
|
|
238
|
+
repoFingerprint: input.repoFingerprint ?? null,
|
|
239
|
+
branchName: input.branchName ?? null,
|
|
240
|
+
laneId: input.laneId ?? null,
|
|
241
|
+
createdAt: input.createdAt ?? now,
|
|
242
|
+
updatedAt: input.updatedAt ?? now,
|
|
243
|
+
retryCount: Number.isFinite(input.retryCount) ? Math.max(0, Number(input.retryCount)) : 0,
|
|
244
|
+
error: input.error ?? null,
|
|
245
|
+
idempotencyKey: input.idempotencyKey ?? null
|
|
246
|
+
};
|
|
247
|
+
if (input.kind === "init") {
|
|
248
|
+
return { ...base, kind: "init", payload: input.payload };
|
|
249
|
+
}
|
|
250
|
+
if (input.kind === "init_post") {
|
|
251
|
+
return { ...base, kind: "init_post", payload: input.payload };
|
|
252
|
+
}
|
|
253
|
+
return { ...base, kind: "re_anchor", payload: input.payload };
|
|
254
|
+
}
|
|
255
|
+
async function enqueueAsyncJob(input) {
|
|
256
|
+
const job = normalizeJob(input);
|
|
257
|
+
await fs.mkdir(getAsyncJobDir(job.id), { recursive: true });
|
|
258
|
+
await writeJsonAtomic(getAsyncJobFilePath(job.id), job);
|
|
259
|
+
return job;
|
|
260
|
+
}
|
|
261
|
+
function parseAsyncJob(raw) {
|
|
262
|
+
if (!raw || typeof raw !== "object") return null;
|
|
263
|
+
const data = raw;
|
|
264
|
+
if (data.schemaVersion !== 1) return null;
|
|
265
|
+
if (typeof data.id !== "string") return null;
|
|
266
|
+
if (data.kind !== "init" && data.kind !== "init_post" && data.kind !== "re_anchor") return null;
|
|
267
|
+
if (!data.payload || typeof data.payload !== "object") return null;
|
|
268
|
+
return data;
|
|
269
|
+
}
|
|
270
|
+
async function readAsyncJob(jobId) {
|
|
271
|
+
try {
|
|
272
|
+
const raw = await fs.readFile(getAsyncJobFilePath(jobId), "utf8");
|
|
273
|
+
const parsed = JSON.parse(raw);
|
|
274
|
+
return parseAsyncJob(parsed);
|
|
275
|
+
} catch {
|
|
276
|
+
return null;
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
async function listAsyncJobs() {
|
|
280
|
+
const root = getAsyncJobsRoot();
|
|
281
|
+
let entries;
|
|
282
|
+
try {
|
|
283
|
+
entries = await fs.readdir(root, { withFileTypes: true });
|
|
284
|
+
} catch (error) {
|
|
285
|
+
if (error?.code === "ENOENT") return [];
|
|
286
|
+
throw error;
|
|
287
|
+
}
|
|
288
|
+
const jobs = await Promise.all(
|
|
289
|
+
entries.filter((entry) => entry.isDirectory()).map((entry) => readAsyncJob(entry.name))
|
|
290
|
+
);
|
|
291
|
+
return jobs.filter((job) => Boolean(job)).sort((a, b) => a.createdAt.localeCompare(b.createdAt));
|
|
292
|
+
}
|
|
293
|
+
async function listAsyncJobsForRepo(params) {
|
|
294
|
+
const jobs = await listAsyncJobs();
|
|
295
|
+
return jobs.filter((job) => {
|
|
296
|
+
if (job.repoRoot !== params.repoRoot) return false;
|
|
297
|
+
if (params.branchName && job.branchName && job.branchName !== params.branchName) return false;
|
|
298
|
+
if (params.kind && job.kind !== params.kind) return false;
|
|
299
|
+
return true;
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
async function findPendingAsyncJob(params) {
|
|
303
|
+
const jobs = await listAsyncJobsForRepo({
|
|
304
|
+
repoRoot: params.repoRoot,
|
|
305
|
+
branchName: params.branchName,
|
|
306
|
+
kind: params.kind
|
|
307
|
+
});
|
|
308
|
+
return jobs.find((job) => !isTerminalStatus(job.status)) ?? null;
|
|
309
|
+
}
|
|
310
|
+
async function findFailedAsyncJob(params) {
|
|
311
|
+
const jobs = await listAsyncJobsForRepo({
|
|
312
|
+
repoRoot: params.repoRoot,
|
|
313
|
+
branchName: params.branchName,
|
|
314
|
+
kind: params.kind
|
|
315
|
+
});
|
|
316
|
+
return jobs.find((job) => job.status === "failed") ?? null;
|
|
317
|
+
}
|
|
318
|
+
async function updateAsyncJob(jobId, update) {
|
|
319
|
+
const existing = await readAsyncJob(jobId);
|
|
320
|
+
if (!existing) return null;
|
|
321
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
322
|
+
if (existing.kind === "init") {
|
|
323
|
+
const nextPayload2 = update.payload ?? existing.payload;
|
|
324
|
+
const next2 = {
|
|
325
|
+
...existing,
|
|
326
|
+
...update,
|
|
327
|
+
schemaVersion: 1,
|
|
328
|
+
id: existing.id,
|
|
329
|
+
kind: "init",
|
|
330
|
+
createdAt: existing.createdAt,
|
|
331
|
+
updatedAt: now,
|
|
332
|
+
payload: nextPayload2
|
|
333
|
+
};
|
|
334
|
+
await writeJsonAtomic(getAsyncJobFilePath(jobId), next2);
|
|
335
|
+
return next2;
|
|
336
|
+
}
|
|
337
|
+
if (existing.kind === "init_post") {
|
|
338
|
+
const nextPayload2 = update.payload ?? existing.payload;
|
|
339
|
+
const next2 = {
|
|
340
|
+
...existing,
|
|
341
|
+
...update,
|
|
342
|
+
schemaVersion: 1,
|
|
343
|
+
id: existing.id,
|
|
344
|
+
kind: "init_post",
|
|
345
|
+
createdAt: existing.createdAt,
|
|
346
|
+
updatedAt: now,
|
|
347
|
+
payload: nextPayload2
|
|
348
|
+
};
|
|
349
|
+
await writeJsonAtomic(getAsyncJobFilePath(jobId), next2);
|
|
350
|
+
return next2;
|
|
351
|
+
}
|
|
352
|
+
const nextPayload = update.payload ?? existing.payload;
|
|
353
|
+
const next = {
|
|
354
|
+
...existing,
|
|
355
|
+
...update,
|
|
356
|
+
schemaVersion: 1,
|
|
357
|
+
id: existing.id,
|
|
358
|
+
kind: "re_anchor",
|
|
359
|
+
createdAt: existing.createdAt,
|
|
360
|
+
updatedAt: now,
|
|
361
|
+
payload: nextPayload
|
|
362
|
+
};
|
|
363
|
+
await writeJsonAtomic(getAsyncJobFilePath(jobId), next);
|
|
364
|
+
return next;
|
|
365
|
+
}
|
|
366
|
+
async function deleteAsyncJob(jobId) {
|
|
367
|
+
await fs.rm(getAsyncJobDir(jobId), { recursive: true, force: true });
|
|
368
|
+
}
|
|
369
|
+
async function acquireJobLock(jobId) {
|
|
370
|
+
const lockPath = getJobLockPath(jobId);
|
|
371
|
+
await fs.mkdir(getAsyncJobDir(jobId), { recursive: true });
|
|
372
|
+
try {
|
|
373
|
+
await fs.mkdir(lockPath);
|
|
374
|
+
return true;
|
|
375
|
+
} catch (error) {
|
|
376
|
+
if (error?.code !== "EEXIST") throw error;
|
|
377
|
+
}
|
|
378
|
+
try {
|
|
379
|
+
const stat = await fs.stat(lockPath);
|
|
380
|
+
if (Date.now() - stat.mtimeMs < ASYNC_JOB_LOCK_STALE_MS) return false;
|
|
381
|
+
await fs.rm(lockPath, { recursive: true, force: true });
|
|
382
|
+
} catch (error) {
|
|
383
|
+
if (error?.code !== "ENOENT") throw error;
|
|
384
|
+
}
|
|
385
|
+
try {
|
|
386
|
+
await fs.mkdir(lockPath);
|
|
387
|
+
return true;
|
|
388
|
+
} catch (error) {
|
|
389
|
+
if (error?.code === "EEXIST") return false;
|
|
390
|
+
throw error;
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
async function claimAsyncJob(jobId) {
|
|
394
|
+
const acquired = await acquireJobLock(jobId);
|
|
395
|
+
if (!acquired) return null;
|
|
396
|
+
const lockPath = getJobLockPath(jobId);
|
|
397
|
+
let released = false;
|
|
398
|
+
const release = async () => {
|
|
399
|
+
if (released) return;
|
|
400
|
+
released = true;
|
|
401
|
+
await fs.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
|
|
402
|
+
};
|
|
403
|
+
try {
|
|
404
|
+
const job = await readAsyncJob(jobId);
|
|
405
|
+
if (!job) {
|
|
406
|
+
await release();
|
|
407
|
+
return null;
|
|
408
|
+
}
|
|
409
|
+
if (isTerminalStatus(job.status)) {
|
|
410
|
+
await release();
|
|
411
|
+
return null;
|
|
412
|
+
}
|
|
413
|
+
return { job, release };
|
|
414
|
+
} catch (error) {
|
|
415
|
+
await release();
|
|
416
|
+
throw error;
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
async function summarizeAsyncJobs(params) {
|
|
420
|
+
const jobs = await listAsyncJobsForRepo({ repoRoot: params.repoRoot, branchName: params.branchName ?? null });
|
|
421
|
+
const active = jobs.filter((job) => !isTerminalStatus(job.status) || job.status === "failed");
|
|
422
|
+
const summary = createEmptySummary();
|
|
423
|
+
if (active.length === 0) return summary;
|
|
424
|
+
const kinds = /* @__PURE__ */ new Set();
|
|
425
|
+
for (const job of active) {
|
|
426
|
+
kinds.add(job.kind);
|
|
427
|
+
if (job.error) summary.latestError = job.error;
|
|
428
|
+
if (job.status === "failed") {
|
|
429
|
+
summary.failedJobCount += 1;
|
|
430
|
+
} else if (isProcessingStatus(job.status)) {
|
|
431
|
+
summary.processingJobCount += 1;
|
|
432
|
+
} else {
|
|
433
|
+
summary.queuedJobCount += 1;
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
summary.activeJobCount = summary.queuedJobCount + summary.processingJobCount;
|
|
437
|
+
summary.kinds = Array.from(kinds);
|
|
438
|
+
summary.oldestCreatedAt = active[0]?.createdAt ?? null;
|
|
439
|
+
summary.newestCreatedAt = active[active.length - 1]?.createdAt ?? null;
|
|
440
|
+
if (summary.processingJobCount > 0) summary.state = "processing";
|
|
441
|
+
else if (summary.queuedJobCount > 0) summary.state = "queued";
|
|
442
|
+
else if (summary.failedJobCount > 0) summary.state = "failed";
|
|
443
|
+
return summary;
|
|
444
|
+
}
|
|
445
|
+
async function pruneTerminalAsyncJobs() {
|
|
446
|
+
const jobs = await listAsyncJobs();
|
|
447
|
+
const now = Date.now();
|
|
448
|
+
await Promise.all(
|
|
449
|
+
jobs.filter((job) => {
|
|
450
|
+
if (!isTerminalStatus(job.status)) return false;
|
|
451
|
+
const updated = Date.parse(job.updatedAt);
|
|
452
|
+
if (!Number.isFinite(updated)) return true;
|
|
453
|
+
return now - updated >= TERMINAL_ASYNC_JOB_RETENTION_MS;
|
|
454
|
+
}).map((job) => deleteAsyncJob(job.id))
|
|
455
|
+
);
|
|
456
|
+
}
|
|
457
|
+
async function awaitAsyncJob(params) {
|
|
458
|
+
const pollIntervalMs = Math.max(50, params.pollIntervalMs ?? 500);
|
|
459
|
+
const deadline = Date.now() + params.timeoutMs;
|
|
460
|
+
let lastSeen = null;
|
|
461
|
+
while (Date.now() < deadline) {
|
|
462
|
+
const job = await readAsyncJob(params.jobId);
|
|
463
|
+
if (job) {
|
|
464
|
+
lastSeen = job;
|
|
465
|
+
if (job.status === "completed") return { status: "completed", job };
|
|
466
|
+
if (job.status === "failed") return { status: "failed", job };
|
|
467
|
+
} else if (lastSeen) {
|
|
468
|
+
return { status: "timeout", job: lastSeen };
|
|
469
|
+
} else {
|
|
470
|
+
return { status: "timeout", job: null };
|
|
471
|
+
}
|
|
472
|
+
const remaining = deadline - Date.now();
|
|
473
|
+
if (remaining <= 0) break;
|
|
474
|
+
await new Promise((resolve) => setTimeout(resolve, Math.min(pollIntervalMs, remaining)));
|
|
475
|
+
}
|
|
476
|
+
const final = await readAsyncJob(params.jobId);
|
|
477
|
+
if (final) {
|
|
478
|
+
if (final.status === "completed") return { status: "completed", job: final };
|
|
479
|
+
if (final.status === "failed") return { status: "failed", job: final };
|
|
480
|
+
return { status: "timeout", job: final };
|
|
481
|
+
}
|
|
482
|
+
return { status: "timeout", job: lastSeen };
|
|
483
|
+
}
|
|
113
484
|
|
|
114
485
|
// src/infrastructure/collab/localBaselineStore.ts
|
|
486
|
+
import fs2 from "fs/promises";
|
|
487
|
+
import path3 from "path";
|
|
115
488
|
function getBaselinePath(params) {
|
|
116
|
-
return
|
|
489
|
+
return path3.join(getBaselinesRoot(), `${buildLaneStateKey(params)}.json`);
|
|
117
490
|
}
|
|
118
491
|
async function readLocalBaseline(params) {
|
|
119
492
|
try {
|
|
120
|
-
const raw = await
|
|
493
|
+
const raw = await fs2.readFile(getBaselinePath(params), "utf8");
|
|
121
494
|
const parsed = JSON.parse(raw);
|
|
122
495
|
if (!parsed || typeof parsed !== "object") return null;
|
|
123
496
|
if (parsed.schemaVersion !== 1 || typeof parsed.key !== "string" || typeof parsed.repoRoot !== "string") {
|
|
@@ -162,19 +535,19 @@ async function writeLocalBaseline(baseline) {
|
|
|
162
535
|
}
|
|
163
536
|
|
|
164
537
|
// src/infrastructure/collab/localSnapshotStore.ts
|
|
165
|
-
import { createHash as createHash2, randomUUID } from "crypto";
|
|
166
|
-
import
|
|
538
|
+
import { createHash as createHash2, randomUUID as randomUUID2 } from "crypto";
|
|
539
|
+
import fs3 from "fs/promises";
|
|
167
540
|
import os2 from "os";
|
|
168
|
-
import
|
|
541
|
+
import path4 from "path";
|
|
169
542
|
import { execa } from "execa";
|
|
170
543
|
function sha256Hex2(value) {
|
|
171
544
|
return createHash2("sha256").update(value).digest("hex");
|
|
172
545
|
}
|
|
173
546
|
function getSnapshotRecordPath(snapshotId) {
|
|
174
|
-
return
|
|
547
|
+
return path4.join(getSnapshotRecordsRoot(), `${snapshotId}.json`);
|
|
175
548
|
}
|
|
176
549
|
function getBlobPath(blobHash) {
|
|
177
|
-
return
|
|
550
|
+
return path4.join(getSnapshotBlobsRoot(), blobHash.slice(0, 2), blobHash);
|
|
178
551
|
}
|
|
179
552
|
async function runGitZ(args, cwd) {
|
|
180
553
|
const res = await execa("git", args, {
|
|
@@ -191,9 +564,9 @@ async function listWorkspaceFiles(repoRoot) {
|
|
|
191
564
|
for (const entry of raw.split("\0")) {
|
|
192
565
|
const relativePath = entry.trim();
|
|
193
566
|
if (!relativePath || seen.has(relativePath)) continue;
|
|
194
|
-
const absolutePath =
|
|
567
|
+
const absolutePath = path4.join(repoRoot, relativePath);
|
|
195
568
|
try {
|
|
196
|
-
const stat = await
|
|
569
|
+
const stat = await fs3.lstat(absolutePath);
|
|
197
570
|
if (stat.isFile() || stat.isSymbolicLink()) {
|
|
198
571
|
seen.add(relativePath);
|
|
199
572
|
result.push(relativePath);
|
|
@@ -206,13 +579,13 @@ async function listWorkspaceFiles(repoRoot) {
|
|
|
206
579
|
async function persistBlob(blobHash, content) {
|
|
207
580
|
const blobPath = getBlobPath(blobHash);
|
|
208
581
|
try {
|
|
209
|
-
await
|
|
582
|
+
await fs3.access(blobPath);
|
|
210
583
|
} catch {
|
|
211
|
-
await
|
|
584
|
+
await fs3.mkdir(path4.dirname(blobPath), { recursive: true });
|
|
212
585
|
if (typeof content === "string") {
|
|
213
|
-
await
|
|
586
|
+
await fs3.writeFile(blobPath, content, "utf8");
|
|
214
587
|
} else {
|
|
215
|
-
await
|
|
588
|
+
await fs3.writeFile(blobPath, content);
|
|
216
589
|
}
|
|
217
590
|
}
|
|
218
591
|
}
|
|
@@ -225,10 +598,10 @@ async function inspectLocalSnapshot(params) {
|
|
|
225
598
|
const files = await listWorkspaceFiles(repoRoot);
|
|
226
599
|
const manifest = [];
|
|
227
600
|
for (const relativePath of files) {
|
|
228
|
-
const absolutePath =
|
|
229
|
-
const stat = await
|
|
601
|
+
const absolutePath = path4.join(repoRoot, relativePath);
|
|
602
|
+
const stat = await fs3.lstat(absolutePath);
|
|
230
603
|
if (stat.isSymbolicLink()) {
|
|
231
|
-
const linkTarget = await
|
|
604
|
+
const linkTarget = await fs3.readlink(absolutePath);
|
|
232
605
|
const blobHash2 = sha256Hex2(`symlink:${linkTarget}`);
|
|
233
606
|
if (params.persistBlobs !== false) {
|
|
234
607
|
await persistBlob(blobHash2, linkTarget);
|
|
@@ -241,7 +614,7 @@ async function inspectLocalSnapshot(params) {
|
|
|
241
614
|
});
|
|
242
615
|
continue;
|
|
243
616
|
}
|
|
244
|
-
const content = await
|
|
617
|
+
const content = await fs3.readFile(absolutePath);
|
|
245
618
|
const blobHash = sha256Hex2(content);
|
|
246
619
|
if (params.persistBlobs !== false) {
|
|
247
620
|
await persistBlob(blobHash, content);
|
|
@@ -269,7 +642,7 @@ async function captureLocalSnapshot(params) {
|
|
|
269
642
|
const inspection = await inspectLocalSnapshot({ ...params, persistBlobs: true });
|
|
270
643
|
const snapshot = {
|
|
271
644
|
schemaVersion: 1,
|
|
272
|
-
id:
|
|
645
|
+
id: randomUUID2(),
|
|
273
646
|
...inspection
|
|
274
647
|
};
|
|
275
648
|
await writeJsonAtomic(getSnapshotRecordPath(snapshot.id), snapshot);
|
|
@@ -278,7 +651,7 @@ async function captureLocalSnapshot(params) {
|
|
|
278
651
|
async function readLocalSnapshot(snapshotId) {
|
|
279
652
|
if (!snapshotId) return null;
|
|
280
653
|
try {
|
|
281
|
-
const raw = await
|
|
654
|
+
const raw = await fs3.readFile(getSnapshotRecordPath(snapshotId), "utf8");
|
|
282
655
|
const parsed = JSON.parse(raw);
|
|
283
656
|
if (!parsed || parsed.schemaVersion !== 1) return null;
|
|
284
657
|
return parsed;
|
|
@@ -288,71 +661,71 @@ async function readLocalSnapshot(snapshotId) {
|
|
|
288
661
|
}
|
|
289
662
|
async function materializeLocalSnapshot(snapshotId, targetDir) {
|
|
290
663
|
const snapshot = await readLocalSnapshot(snapshotId);
|
|
291
|
-
await
|
|
664
|
+
await fs3.mkdir(targetDir, { recursive: true });
|
|
292
665
|
if (!snapshot) return;
|
|
293
666
|
for (const entry of snapshot.files) {
|
|
294
|
-
const destination =
|
|
295
|
-
await
|
|
667
|
+
const destination = path4.join(targetDir, entry.path);
|
|
668
|
+
await fs3.mkdir(path4.dirname(destination), { recursive: true });
|
|
296
669
|
const blobPath = getBlobPath(entry.blobHash);
|
|
297
670
|
if (entry.mode === "symlink") {
|
|
298
|
-
const linkTarget = await
|
|
299
|
-
await
|
|
671
|
+
const linkTarget = await fs3.readFile(blobPath, "utf8");
|
|
672
|
+
await fs3.symlink(linkTarget, destination);
|
|
300
673
|
continue;
|
|
301
674
|
}
|
|
302
|
-
await
|
|
675
|
+
await fs3.copyFile(blobPath, destination);
|
|
303
676
|
if (entry.mode === "executable") {
|
|
304
|
-
await
|
|
677
|
+
await fs3.chmod(destination, 493);
|
|
305
678
|
}
|
|
306
679
|
}
|
|
307
680
|
}
|
|
308
681
|
async function pruneEmptyParentDirectories(repoRoot, filePath) {
|
|
309
|
-
let current =
|
|
682
|
+
let current = path4.dirname(filePath);
|
|
310
683
|
while (current !== repoRoot) {
|
|
311
|
-
const entries = await
|
|
684
|
+
const entries = await fs3.readdir(current).catch(() => null);
|
|
312
685
|
if (!entries || entries.length > 0) return;
|
|
313
|
-
await
|
|
314
|
-
current =
|
|
686
|
+
await fs3.rmdir(current).catch(() => void 0);
|
|
687
|
+
current = path4.dirname(current);
|
|
315
688
|
}
|
|
316
689
|
}
|
|
317
690
|
async function restoreLocalSnapshotToWorktree(snapshotId, repoRoot) {
|
|
318
691
|
const snapshot = await readLocalSnapshot(snapshotId);
|
|
319
|
-
await
|
|
692
|
+
await fs3.mkdir(repoRoot, { recursive: true });
|
|
320
693
|
const desiredPaths = new Set(snapshot?.files.map((entry) => entry.path) ?? []);
|
|
321
694
|
const currentPaths = await listWorkspaceFiles(repoRoot);
|
|
322
695
|
for (const relativePath of currentPaths) {
|
|
323
696
|
if (desiredPaths.has(relativePath)) continue;
|
|
324
|
-
const absolutePath =
|
|
325
|
-
await
|
|
697
|
+
const absolutePath = path4.join(repoRoot, relativePath);
|
|
698
|
+
await fs3.rm(absolutePath, { recursive: true, force: true }).catch(() => void 0);
|
|
326
699
|
await pruneEmptyParentDirectories(repoRoot, absolutePath);
|
|
327
700
|
}
|
|
328
701
|
if (!snapshot) return;
|
|
329
702
|
for (const entry of snapshot.files) {
|
|
330
|
-
const destination =
|
|
331
|
-
await
|
|
332
|
-
await
|
|
703
|
+
const destination = path4.join(repoRoot, entry.path);
|
|
704
|
+
await fs3.mkdir(path4.dirname(destination), { recursive: true });
|
|
705
|
+
await fs3.rm(destination, { recursive: true, force: true }).catch(() => void 0);
|
|
333
706
|
const blobPath = getBlobPath(entry.blobHash);
|
|
334
707
|
if (entry.mode === "symlink") {
|
|
335
|
-
const linkTarget = await
|
|
336
|
-
await
|
|
708
|
+
const linkTarget = await fs3.readFile(blobPath, "utf8");
|
|
709
|
+
await fs3.symlink(linkTarget, destination);
|
|
337
710
|
continue;
|
|
338
711
|
}
|
|
339
|
-
await
|
|
712
|
+
await fs3.copyFile(blobPath, destination);
|
|
340
713
|
if (entry.mode === "executable") {
|
|
341
|
-
await
|
|
714
|
+
await fs3.chmod(destination, 493);
|
|
342
715
|
}
|
|
343
716
|
}
|
|
344
717
|
}
|
|
345
718
|
async function clearDirectoryExceptGit(targetDir) {
|
|
346
|
-
const entries = await
|
|
719
|
+
const entries = await fs3.readdir(targetDir, { withFileTypes: true });
|
|
347
720
|
for (const entry of entries) {
|
|
348
721
|
if (entry.name === ".git") continue;
|
|
349
|
-
await
|
|
722
|
+
await fs3.rm(path4.join(targetDir, entry.name), { recursive: true, force: true });
|
|
350
723
|
}
|
|
351
724
|
}
|
|
352
725
|
async function diffLocalSnapshots(params) {
|
|
353
|
-
const tempRoot = await
|
|
354
|
-
const repoDir =
|
|
355
|
-
await
|
|
726
|
+
const tempRoot = await fs3.mkdtemp(path4.join(os2.tmpdir(), "remix-snapshot-diff-"));
|
|
727
|
+
const repoDir = path4.join(tempRoot, "repo");
|
|
728
|
+
await fs3.mkdir(repoDir, { recursive: true });
|
|
356
729
|
try {
|
|
357
730
|
await materializeLocalSnapshot(params.baseSnapshotId, repoDir);
|
|
358
731
|
await execa("git", ["init"], { cwd: repoDir, stderr: "ignore" });
|
|
@@ -388,21 +761,22 @@ async function diffLocalSnapshots(params) {
|
|
|
388
761
|
stats: summarizeUnifiedDiff(diff)
|
|
389
762
|
};
|
|
390
763
|
} finally {
|
|
391
|
-
await
|
|
764
|
+
await fs3.rm(tempRoot, { recursive: true, force: true });
|
|
392
765
|
}
|
|
393
766
|
}
|
|
394
767
|
|
|
395
768
|
// src/infrastructure/collab/pendingFinalizeQueue.ts
|
|
396
|
-
import { randomUUID as
|
|
397
|
-
import
|
|
398
|
-
import
|
|
399
|
-
var FINALIZE_JOB_LOCK_STALE_MS =
|
|
769
|
+
import { randomUUID as randomUUID3 } from "crypto";
|
|
770
|
+
import fs4 from "fs/promises";
|
|
771
|
+
import path5 from "path";
|
|
772
|
+
var FINALIZE_JOB_LOCK_STALE_MS = 90 * 1e3;
|
|
773
|
+
var FINALIZE_ATTEMPT_STALE_MS = 10 * 60 * 1e3;
|
|
400
774
|
var TERMINAL_FINALIZE_JOB_RETENTION_MS = 24 * 60 * 60 * 1e3;
|
|
401
775
|
function getJobPath(id) {
|
|
402
|
-
return
|
|
776
|
+
return path5.join(getFinalizeQueueRoot(), `${id}.json`);
|
|
403
777
|
}
|
|
404
|
-
function
|
|
405
|
-
return
|
|
778
|
+
function getJobLockPath2(id) {
|
|
779
|
+
return path5.join(getFinalizeQueueRoot(), `${id}.lock`);
|
|
406
780
|
}
|
|
407
781
|
function isPastDue(isoTimestamp) {
|
|
408
782
|
if (!isoTimestamp) return true;
|
|
@@ -414,7 +788,7 @@ function isStaleAttempt(job) {
|
|
|
414
788
|
if (!job.lastAttemptAt) return true;
|
|
415
789
|
const parsed = Date.parse(job.lastAttemptAt);
|
|
416
790
|
if (!Number.isFinite(parsed)) return true;
|
|
417
|
-
return Date.now() - parsed >=
|
|
791
|
+
return Date.now() - parsed >= FINALIZE_ATTEMPT_STALE_MS;
|
|
418
792
|
}
|
|
419
793
|
function readMetadataDisposition(job) {
|
|
420
794
|
const value = job.metadata.failureDisposition;
|
|
@@ -443,6 +817,7 @@ function createEmptyPendingFinalizeQueueSummary() {
|
|
|
443
817
|
queuedJobCount: 0,
|
|
444
818
|
processingJobCount: 0,
|
|
445
819
|
retryScheduledJobCount: 0,
|
|
820
|
+
awaitingUsageJobCount: 0,
|
|
446
821
|
failedJobCount: 0,
|
|
447
822
|
oldestCapturedAt: null,
|
|
448
823
|
newestCapturedAt: null,
|
|
@@ -450,10 +825,10 @@ function createEmptyPendingFinalizeQueueSummary() {
|
|
|
450
825
|
latestError: null
|
|
451
826
|
};
|
|
452
827
|
}
|
|
453
|
-
async function
|
|
454
|
-
const lockPath =
|
|
828
|
+
async function acquireJobLock2(jobId) {
|
|
829
|
+
const lockPath = getJobLockPath2(jobId);
|
|
455
830
|
try {
|
|
456
|
-
await
|
|
831
|
+
await fs4.mkdir(lockPath);
|
|
457
832
|
return true;
|
|
458
833
|
} catch (error) {
|
|
459
834
|
if (error?.code !== "EEXIST") {
|
|
@@ -461,18 +836,18 @@ async function acquireJobLock(jobId) {
|
|
|
461
836
|
}
|
|
462
837
|
}
|
|
463
838
|
try {
|
|
464
|
-
const stat = await
|
|
839
|
+
const stat = await fs4.stat(lockPath);
|
|
465
840
|
if (Date.now() - stat.mtimeMs < FINALIZE_JOB_LOCK_STALE_MS) {
|
|
466
841
|
return false;
|
|
467
842
|
}
|
|
468
|
-
await
|
|
843
|
+
await fs4.rm(lockPath, { recursive: true, force: true });
|
|
469
844
|
} catch (error) {
|
|
470
845
|
if (error?.code !== "ENOENT") {
|
|
471
846
|
throw error;
|
|
472
847
|
}
|
|
473
848
|
}
|
|
474
849
|
try {
|
|
475
|
-
await
|
|
850
|
+
await fs4.mkdir(lockPath);
|
|
476
851
|
return true;
|
|
477
852
|
} catch (error) {
|
|
478
853
|
if (error?.code === "EEXIST") {
|
|
@@ -481,11 +856,52 @@ async function acquireJobLock(jobId) {
|
|
|
481
856
|
throw error;
|
|
482
857
|
}
|
|
483
858
|
}
|
|
484
|
-
function
|
|
859
|
+
async function heartbeatJobLock(jobId) {
|
|
860
|
+
const lockPath = getJobLockPath2(jobId);
|
|
861
|
+
const now = /* @__PURE__ */ new Date();
|
|
862
|
+
try {
|
|
863
|
+
await fs4.utimes(lockPath, now, now);
|
|
864
|
+
} catch {
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
async function cleanStaleFinalizeJobLocks() {
|
|
868
|
+
const removed = [];
|
|
869
|
+
const skipped = [];
|
|
870
|
+
const errors = [];
|
|
871
|
+
let entries;
|
|
872
|
+
try {
|
|
873
|
+
const dirents = await fs4.readdir(getFinalizeQueueRoot(), { withFileTypes: true });
|
|
874
|
+
entries = dirents.map((d) => ({ name: d.name, isDir: d.isDirectory() }));
|
|
875
|
+
} catch (error) {
|
|
876
|
+
if (error?.code === "ENOENT") {
|
|
877
|
+
return { removed, skipped, errors };
|
|
878
|
+
}
|
|
879
|
+
throw error;
|
|
880
|
+
}
|
|
881
|
+
for (const entry of entries) {
|
|
882
|
+
if (!entry.isDir || !entry.name.endsWith(".lock")) continue;
|
|
883
|
+
const jobId = entry.name.replace(/\.lock$/, "");
|
|
884
|
+
const lockPath = path5.join(getFinalizeQueueRoot(), entry.name);
|
|
885
|
+
try {
|
|
886
|
+
const stat = await fs4.stat(lockPath);
|
|
887
|
+
const ageMs = Date.now() - stat.mtimeMs;
|
|
888
|
+
if (ageMs < FINALIZE_JOB_LOCK_STALE_MS) {
|
|
889
|
+
skipped.push({ jobId, reason: `fresh (age=${Math.round(ageMs / 1e3)}s)` });
|
|
890
|
+
continue;
|
|
891
|
+
}
|
|
892
|
+
await fs4.rm(lockPath, { recursive: true, force: true });
|
|
893
|
+
removed.push(jobId);
|
|
894
|
+
} catch (error) {
|
|
895
|
+
errors.push({ jobId, error: error instanceof Error ? error.message : String(error) });
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
return { removed, skipped, errors };
|
|
899
|
+
}
|
|
900
|
+
function normalizeJob2(input) {
|
|
485
901
|
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
486
902
|
return {
|
|
487
903
|
schemaVersion: 1,
|
|
488
|
-
id: input.id ??
|
|
904
|
+
id: input.id ?? randomUUID3(),
|
|
489
905
|
status: input.status,
|
|
490
906
|
repoRoot: input.repoRoot,
|
|
491
907
|
repoFingerprint: input.repoFingerprint ?? null,
|
|
@@ -509,16 +925,16 @@ function normalizeJob(input) {
|
|
|
509
925
|
};
|
|
510
926
|
}
|
|
511
927
|
async function enqueuePendingFinalizeJob(input) {
|
|
512
|
-
const job =
|
|
928
|
+
const job = normalizeJob2(input);
|
|
513
929
|
await writeJsonAtomic(getJobPath(job.id), job);
|
|
514
930
|
return job;
|
|
515
931
|
}
|
|
516
932
|
async function readPendingFinalizeJob(jobId) {
|
|
517
933
|
try {
|
|
518
|
-
const raw = await
|
|
934
|
+
const raw = await fs4.readFile(getJobPath(jobId), "utf8");
|
|
519
935
|
const parsed = JSON.parse(raw);
|
|
520
936
|
if (!parsed || parsed.schemaVersion !== 1 || typeof parsed.id !== "string") return null;
|
|
521
|
-
return
|
|
937
|
+
return normalizeJob2({
|
|
522
938
|
id: parsed.id,
|
|
523
939
|
status: parsed.status ?? "queued",
|
|
524
940
|
repoRoot: String(parsed.repoRoot ?? ""),
|
|
@@ -547,7 +963,7 @@ async function readPendingFinalizeJob(jobId) {
|
|
|
547
963
|
}
|
|
548
964
|
async function listPendingFinalizeJobs() {
|
|
549
965
|
try {
|
|
550
|
-
const entries = await
|
|
966
|
+
const entries = await fs4.readdir(getFinalizeQueueRoot(), { withFileTypes: true });
|
|
551
967
|
const jobs = await Promise.all(
|
|
552
968
|
entries.filter((entry) => entry.isFile() && entry.name.endsWith(".json")).map((entry) => readPendingFinalizeJob(entry.name.replace(/\.json$/, "")))
|
|
553
969
|
);
|
|
@@ -588,18 +1004,24 @@ async function summarizePendingFinalizeJobs(scope) {
|
|
|
588
1004
|
continue;
|
|
589
1005
|
}
|
|
590
1006
|
if (!isPastDue(job.nextRetryAt)) {
|
|
591
|
-
|
|
1007
|
+
if (job.retryCount === 0 && !job.error) {
|
|
1008
|
+
summary.awaitingUsageJobCount += 1;
|
|
1009
|
+
} else {
|
|
1010
|
+
summary.retryScheduledJobCount += 1;
|
|
1011
|
+
}
|
|
592
1012
|
continue;
|
|
593
1013
|
}
|
|
594
1014
|
summary.queuedJobCount += 1;
|
|
595
1015
|
}
|
|
596
|
-
summary.activeJobCount = summary.queuedJobCount + summary.processingJobCount + summary.retryScheduledJobCount;
|
|
1016
|
+
summary.activeJobCount = summary.queuedJobCount + summary.processingJobCount + summary.retryScheduledJobCount + summary.awaitingUsageJobCount;
|
|
597
1017
|
if (summary.processingJobCount > 0) {
|
|
598
1018
|
summary.state = "processing";
|
|
599
1019
|
} else if (summary.queuedJobCount > 0) {
|
|
600
1020
|
summary.state = "queued";
|
|
601
1021
|
} else if (summary.retryScheduledJobCount > 0) {
|
|
602
1022
|
summary.state = "retry_scheduled";
|
|
1023
|
+
} else if (summary.awaitingUsageJobCount > 0) {
|
|
1024
|
+
summary.state = "awaiting_usage";
|
|
603
1025
|
} else if (summary.failedJobCount > 0) {
|
|
604
1026
|
summary.state = "failed";
|
|
605
1027
|
}
|
|
@@ -621,14 +1043,14 @@ async function updatePendingFinalizeJob(jobId, update) {
|
|
|
621
1043
|
return next;
|
|
622
1044
|
}
|
|
623
1045
|
async function claimPendingFinalizeJob(jobId) {
|
|
624
|
-
const lockPath =
|
|
625
|
-
const lockAcquired = await
|
|
1046
|
+
const lockPath = getJobLockPath2(jobId);
|
|
1047
|
+
const lockAcquired = await acquireJobLock2(jobId);
|
|
626
1048
|
if (!lockAcquired) return null;
|
|
627
1049
|
let released = false;
|
|
628
1050
|
const release = async () => {
|
|
629
1051
|
if (released) return;
|
|
630
1052
|
released = true;
|
|
631
|
-
await
|
|
1053
|
+
await fs4.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
|
|
632
1054
|
};
|
|
633
1055
|
try {
|
|
634
1056
|
let existing = await readPendingFinalizeJob(jobId);
|
|
@@ -679,13 +1101,35 @@ async function claimPendingFinalizeJob(jobId) {
|
|
|
679
1101
|
}
|
|
680
1102
|
async function removePendingFinalizeJob(jobId) {
|
|
681
1103
|
try {
|
|
682
|
-
await
|
|
1104
|
+
await fs4.unlink(getJobPath(jobId));
|
|
683
1105
|
} catch (error) {
|
|
684
1106
|
if (error?.code !== "ENOENT") {
|
|
685
1107
|
throw error;
|
|
686
1108
|
}
|
|
687
1109
|
}
|
|
688
|
-
await
|
|
1110
|
+
await fs4.rm(getJobLockPath2(jobId), { recursive: true, force: true }).catch(() => void 0);
|
|
1111
|
+
}
|
|
1112
|
+
var forgetPendingFinalizeJob = removePendingFinalizeJob;
|
|
1113
|
+
async function requeuePendingFinalizeJob(jobId) {
|
|
1114
|
+
const existing = await readPendingFinalizeJob(jobId);
|
|
1115
|
+
if (!existing) return null;
|
|
1116
|
+
await fs4.rm(getJobLockPath2(jobId), { recursive: true, force: true }).catch(() => void 0);
|
|
1117
|
+
return updatePendingFinalizeJob(jobId, {
|
|
1118
|
+
status: "queued",
|
|
1119
|
+
error: null,
|
|
1120
|
+
nextRetryAt: null,
|
|
1121
|
+
metadata: {
|
|
1122
|
+
// Reset the auto-terminal counter so an operator-initiated retry gets
|
|
1123
|
+
// a fresh budget instead of being escalated to "failed" on the very
|
|
1124
|
+
// next attempt.
|
|
1125
|
+
consecutiveFailures: 0,
|
|
1126
|
+
consecutiveFailureReason: null,
|
|
1127
|
+
// Clear any previous terminal disposition so claim doesn't short-circuit.
|
|
1128
|
+
failureDisposition: null,
|
|
1129
|
+
failureReason: null,
|
|
1130
|
+
requeuedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1131
|
+
}
|
|
1132
|
+
});
|
|
689
1133
|
}
|
|
690
1134
|
|
|
691
1135
|
// src/application/collab/shared.ts
|
|
@@ -811,6 +1255,52 @@ async function pollAppReady(api, appId) {
|
|
|
811
1255
|
}
|
|
812
1256
|
throw new RemixError("Timed out waiting for app to become ready.", { exitCode: 1 });
|
|
813
1257
|
}
|
|
1258
|
+
var DEFAULT_APP_IMPORTED_TIMEOUT_MS = 6e4;
|
|
1259
|
+
var APP_IMPORTED_SLOW_WARNING_MS = 3e4;
|
|
1260
|
+
function resolveImportedTimeoutMs(override) {
|
|
1261
|
+
if (typeof override === "number" && Number.isFinite(override) && override > 0) return override;
|
|
1262
|
+
const fromEnv = Number(process.env.REMIX_INIT_POLL_TIMEOUT_MS);
|
|
1263
|
+
if (Number.isFinite(fromEnv) && fromEnv > 0) return fromEnv;
|
|
1264
|
+
return DEFAULT_APP_IMPORTED_TIMEOUT_MS;
|
|
1265
|
+
}
|
|
1266
|
+
async function pollAppImported(api, appId, opts) {
|
|
1267
|
+
const timeoutMs = resolveImportedTimeoutMs(opts?.timeoutMs);
|
|
1268
|
+
const slowWarningMs = opts?.slowWarningMs ?? APP_IMPORTED_SLOW_WARNING_MS;
|
|
1269
|
+
const started = Date.now();
|
|
1270
|
+
let delay = 500;
|
|
1271
|
+
let warnedSlow = false;
|
|
1272
|
+
while (Date.now() - started < timeoutMs) {
|
|
1273
|
+
const appResp = await api.getApp(appId);
|
|
1274
|
+
const app = unwrapResponseObject(appResp, "app");
|
|
1275
|
+
const status = typeof app.status === "string" ? app.status : "";
|
|
1276
|
+
if (status === "error") {
|
|
1277
|
+
throw new RemixError("App is in error state.", {
|
|
1278
|
+
exitCode: 1,
|
|
1279
|
+
hint: typeof app.statusError === "string" ? app.statusError : null
|
|
1280
|
+
});
|
|
1281
|
+
}
|
|
1282
|
+
const headCommitId = typeof app.headCommitId === "string" ? app.headCommitId : null;
|
|
1283
|
+
if (headCommitId) return app;
|
|
1284
|
+
const elapsed = Date.now() - started;
|
|
1285
|
+
if (!warnedSlow && elapsed >= slowWarningMs) {
|
|
1286
|
+
warnedSlow = true;
|
|
1287
|
+
const onSlow = opts?.onSlow;
|
|
1288
|
+
if (onSlow) {
|
|
1289
|
+
onSlow(elapsed);
|
|
1290
|
+
} else {
|
|
1291
|
+
console.warn(
|
|
1292
|
+
`[remix init] still waiting for app import (>${Math.round(elapsed / 1e3)}s); will time out at ${Math.round(timeoutMs / 1e3)}s`
|
|
1293
|
+
);
|
|
1294
|
+
}
|
|
1295
|
+
}
|
|
1296
|
+
await sleep(delay);
|
|
1297
|
+
delay = Math.min(2e3, Math.floor(delay * 1.4));
|
|
1298
|
+
}
|
|
1299
|
+
throw new RemixError("Timed out waiting for app to be imported.", {
|
|
1300
|
+
exitCode: 1,
|
|
1301
|
+
hint: `head_commit_id was not set within ${Math.round(timeoutMs / 1e3)}s. The backend import worker may have hung; check the app's status in the dashboard, or set REMIX_INIT_POLL_TIMEOUT_MS to a higher value if the bundle is unusually large.`
|
|
1302
|
+
});
|
|
1303
|
+
}
|
|
814
1304
|
async function pollChangeStep(api, appId, changeStepId) {
|
|
815
1305
|
const started = Date.now();
|
|
816
1306
|
let delay = 1500;
|
|
@@ -997,8 +1487,39 @@ function buildAmbiguousResolution(params) {
|
|
|
997
1487
|
defaultBranch: params.lane.defaultBranch ?? params.state.defaultBranch
|
|
998
1488
|
};
|
|
999
1489
|
}
|
|
1490
|
+
var RESOLUTION_CACHE_TTL_MS = 5e3;
|
|
1491
|
+
var resolutionCache = /* @__PURE__ */ new Map();
|
|
1492
|
+
var cacheClock3 = () => Date.now();
|
|
1493
|
+
function buildResolutionCacheKey(params) {
|
|
1494
|
+
return `${params.repoRoot}|${params.currentBranch ?? ""}|${params.hasApi ? "1" : "0"}`;
|
|
1495
|
+
}
|
|
1496
|
+
function readResolutionCache(key) {
|
|
1497
|
+
const entry = resolutionCache.get(key);
|
|
1498
|
+
if (!entry) return null;
|
|
1499
|
+
if (cacheClock3() - entry.cachedAt > RESOLUTION_CACHE_TTL_MS) {
|
|
1500
|
+
resolutionCache.delete(key);
|
|
1501
|
+
return null;
|
|
1502
|
+
}
|
|
1503
|
+
return entry.resolution;
|
|
1504
|
+
}
|
|
1505
|
+
function writeResolutionCache(key, resolution) {
|
|
1506
|
+
if (resolution.status !== "resolved") return;
|
|
1507
|
+
resolutionCache.set(key, { resolution, cachedAt: cacheClock3() });
|
|
1508
|
+
}
|
|
1000
1509
|
async function resolveActiveLaneBinding(params) {
|
|
1001
1510
|
const state = await readCollabBindingState(params.repoRoot);
|
|
1511
|
+
const cacheKey = buildResolutionCacheKey({
|
|
1512
|
+
repoRoot: params.repoRoot,
|
|
1513
|
+
currentBranch: state ? normalizeBranchName(state.currentBranch) : null,
|
|
1514
|
+
hasApi: Boolean(params.api)
|
|
1515
|
+
});
|
|
1516
|
+
const cached = readResolutionCache(cacheKey);
|
|
1517
|
+
if (cached) return cached;
|
|
1518
|
+
const resolution = await resolveActiveLaneBindingUncached(params, state);
|
|
1519
|
+
writeResolutionCache(cacheKey, resolution);
|
|
1520
|
+
return resolution;
|
|
1521
|
+
}
|
|
1522
|
+
async function resolveActiveLaneBindingUncached(params, state) {
|
|
1002
1523
|
if (!state) {
|
|
1003
1524
|
return { status: "not_bound", currentBranch: null };
|
|
1004
1525
|
}
|
|
@@ -1189,12 +1710,24 @@ function buildBaseState() {
|
|
|
1189
1710
|
queuedJobCount: 0,
|
|
1190
1711
|
processingJobCount: 0,
|
|
1191
1712
|
retryScheduledJobCount: 0,
|
|
1713
|
+
awaitingUsageJobCount: 0,
|
|
1192
1714
|
failedJobCount: 0,
|
|
1193
1715
|
oldestCapturedAt: null,
|
|
1194
1716
|
newestCapturedAt: null,
|
|
1195
1717
|
nextRetryAt: null,
|
|
1196
1718
|
latestError: null
|
|
1197
1719
|
},
|
|
1720
|
+
pendingAsyncJobs: {
|
|
1721
|
+
state: "idle",
|
|
1722
|
+
activeJobCount: 0,
|
|
1723
|
+
queuedJobCount: 0,
|
|
1724
|
+
processingJobCount: 0,
|
|
1725
|
+
failedJobCount: 0,
|
|
1726
|
+
oldestCreatedAt: null,
|
|
1727
|
+
newestCreatedAt: null,
|
|
1728
|
+
latestError: null,
|
|
1729
|
+
kinds: []
|
|
1730
|
+
},
|
|
1198
1731
|
warnings: [],
|
|
1199
1732
|
hint: null,
|
|
1200
1733
|
metadataWarnings: [],
|
|
@@ -1273,7 +1806,7 @@ async function collabDetectRepoState(params) {
|
|
|
1273
1806
|
return detected;
|
|
1274
1807
|
}
|
|
1275
1808
|
if (!params.api) {
|
|
1276
|
-
const [inspection, pendingFinalize] = await Promise.all([
|
|
1809
|
+
const [inspection, pendingFinalize, pendingAsyncJobs] = await Promise.all([
|
|
1277
1810
|
inspectLocalSnapshot({
|
|
1278
1811
|
repoRoot,
|
|
1279
1812
|
repoFingerprint: binding.repoFingerprint,
|
|
@@ -1286,15 +1819,30 @@ async function collabDetectRepoState(params) {
|
|
|
1286
1819
|
repoFingerprint: binding.repoFingerprint,
|
|
1287
1820
|
currentAppId: binding.currentAppId,
|
|
1288
1821
|
laneId: binding.laneId
|
|
1289
|
-
})
|
|
1822
|
+
}),
|
|
1823
|
+
summarizeAsyncJobs({ repoRoot, branchName: binding.branchName ?? null })
|
|
1290
1824
|
]);
|
|
1291
1825
|
detected.currentSnapshotHash = inspection.snapshotHash;
|
|
1292
1826
|
detected.pendingFinalize = pendingFinalize;
|
|
1827
|
+
detected.pendingAsyncJobs = pendingAsyncJobs;
|
|
1293
1828
|
return detected;
|
|
1294
1829
|
}
|
|
1295
1830
|
try {
|
|
1296
|
-
const
|
|
1297
|
-
|
|
1831
|
+
const baseline = await readLocalBaseline({
|
|
1832
|
+
repoFingerprint: binding.repoFingerprint,
|
|
1833
|
+
laneId: binding.laneId,
|
|
1834
|
+
repoRoot
|
|
1835
|
+
});
|
|
1836
|
+
const hasFullBaseline = Boolean(baseline?.lastSnapshotHash && baseline?.lastServerHeadHash);
|
|
1837
|
+
const metadataIdentityPromise = hasFullBaseline ? getAppDeltaCached(params.api, binding.currentAppId, {
|
|
1838
|
+
baseHeadHash: baseline.lastServerHeadHash,
|
|
1839
|
+
targetHeadHash: baseline.lastServerHeadHash,
|
|
1840
|
+
repoFingerprint: binding.repoFingerprint ?? void 0,
|
|
1841
|
+
remoteUrl: binding.remoteUrl ?? void 0,
|
|
1842
|
+
defaultBranch: binding.defaultBranch ?? void 0
|
|
1843
|
+
}) : Promise.resolve(null);
|
|
1844
|
+
const [headResp, inspection, parallelMetadataResp, pendingFinalize, pendingAsyncJobs] = await Promise.all([
|
|
1845
|
+
getAppHeadCached(params.api, binding.currentAppId),
|
|
1298
1846
|
inspectLocalSnapshot({
|
|
1299
1847
|
repoRoot,
|
|
1300
1848
|
repoFingerprint: binding.repoFingerprint,
|
|
@@ -1302,23 +1850,21 @@ async function collabDetectRepoState(params) {
|
|
|
1302
1850
|
branchName: binding.branchName,
|
|
1303
1851
|
persistBlobs: false
|
|
1304
1852
|
}),
|
|
1305
|
-
|
|
1306
|
-
repoFingerprint: binding.repoFingerprint,
|
|
1307
|
-
laneId: binding.laneId,
|
|
1308
|
-
repoRoot
|
|
1309
|
-
}),
|
|
1853
|
+
metadataIdentityPromise,
|
|
1310
1854
|
summarizePendingFinalizeJobs({
|
|
1311
1855
|
repoRoot,
|
|
1312
1856
|
repoFingerprint: binding.repoFingerprint,
|
|
1313
1857
|
currentAppId: binding.currentAppId,
|
|
1314
1858
|
laneId: binding.laneId
|
|
1315
|
-
})
|
|
1859
|
+
}),
|
|
1860
|
+
summarizeAsyncJobs({ repoRoot, branchName: binding.branchName ?? null })
|
|
1316
1861
|
]);
|
|
1317
1862
|
const appHead = unwrapResponseObject(headResp, "app head");
|
|
1318
1863
|
detected.currentServerHeadHash = appHead.headCommitHash;
|
|
1319
1864
|
detected.currentServerHeadCommitId = appHead.headCommitId;
|
|
1320
1865
|
detected.currentSnapshotHash = inspection.snapshotHash;
|
|
1321
1866
|
detected.pendingFinalize = pendingFinalize;
|
|
1867
|
+
detected.pendingAsyncJobs = pendingAsyncJobs;
|
|
1322
1868
|
detected.baseline = {
|
|
1323
1869
|
lastSnapshotId: baseline?.lastSnapshotId ?? null,
|
|
1324
1870
|
lastSnapshotHash: baseline?.lastSnapshotHash ?? null,
|
|
@@ -1362,15 +1908,7 @@ async function collabDetectRepoState(params) {
|
|
|
1362
1908
|
"Local Git HEAD changed since the last Remix baseline. Remix will use the current workspace snapshot to detect divergence."
|
|
1363
1909
|
);
|
|
1364
1910
|
}
|
|
1365
|
-
const
|
|
1366
|
-
const metadataResp = await params.api.getAppDelta(binding.currentAppId, {
|
|
1367
|
-
baseHeadHash: metadataBaseHeadHash,
|
|
1368
|
-
targetHeadHash: metadataBaseHeadHash,
|
|
1369
|
-
repoFingerprint: binding.repoFingerprint ?? void 0,
|
|
1370
|
-
remoteUrl: binding.remoteUrl ?? void 0,
|
|
1371
|
-
defaultBranch: binding.defaultBranch ?? void 0
|
|
1372
|
-
});
|
|
1373
|
-
const metadataCheck = unwrapResponseObject(metadataResp, "app delta metadata");
|
|
1911
|
+
const metadataCheck = unwrapResponseObject(parallelMetadataResp, "app delta metadata");
|
|
1374
1912
|
detected.metadataWarnings = metadataCheck.warnings;
|
|
1375
1913
|
detected.warnings.push(...metadataCheck.warnings);
|
|
1376
1914
|
if (metadataCheck.status === "conflict_risk") {
|
|
@@ -1404,26 +1942,347 @@ async function collabDetectRepoState(params) {
|
|
|
1404
1942
|
}
|
|
1405
1943
|
}
|
|
1406
1944
|
|
|
1407
|
-
// src/
|
|
1408
|
-
|
|
1409
|
-
var
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1945
|
+
// src/infrastructure/git/gitEventHarvester.ts
|
|
1946
|
+
import { execa as execa2 } from "execa";
|
|
1947
|
+
var GIT_TIMEOUT_MS = 1e4;
|
|
1948
|
+
var GIT_MAX_BUFFER = 64 * 1024 * 1024;
|
|
1949
|
+
var MAX_COMMITS = 100;
|
|
1950
|
+
var RECORD_SEPARATOR = "";
|
|
1951
|
+
var FIELD_SEPARATOR = "";
|
|
1952
|
+
var LOG_FORMAT = `${RECORD_SEPARATOR}%H${FIELD_SEPARATOR}%P${FIELD_SEPARATOR}%an${FIELD_SEPARATOR}%ae${FIELD_SEPARATOR}%cn${FIELD_SEPARATOR}%ce${FIELD_SEPARATOR}%aI${FIELD_SEPARATOR}%cI${FIELD_SEPARATOR}%s`;
|
|
1953
|
+
function sanitizedEnv() {
|
|
1954
|
+
const env = { ...process.env };
|
|
1955
|
+
delete env.GIT_DIR;
|
|
1956
|
+
delete env.GIT_WORK_TREE;
|
|
1957
|
+
delete env.GIT_COMMON_DIR;
|
|
1958
|
+
delete env.GIT_INDEX_FILE;
|
|
1959
|
+
return env;
|
|
1960
|
+
}
|
|
1961
|
+
async function runGitEvent(args, cwd) {
|
|
1962
|
+
const res = await execa2("git", args, {
|
|
1963
|
+
cwd,
|
|
1964
|
+
reject: false,
|
|
1965
|
+
stderr: "pipe",
|
|
1966
|
+
timeout: GIT_TIMEOUT_MS,
|
|
1967
|
+
env: sanitizedEnv(),
|
|
1968
|
+
maxBuffer: GIT_MAX_BUFFER
|
|
1969
|
+
});
|
|
1970
|
+
return {
|
|
1971
|
+
exitCode: res.exitCode ?? -1,
|
|
1972
|
+
stdout: String(res.stdout ?? ""),
|
|
1973
|
+
stderr: String(res.stderr ?? ""),
|
|
1974
|
+
timedOut: res.timedOut === true
|
|
1975
|
+
};
|
|
1413
1976
|
}
|
|
1414
|
-
function
|
|
1415
|
-
const
|
|
1416
|
-
|
|
1977
|
+
function parseNumstatLine(line) {
|
|
1978
|
+
const tab1 = line.indexOf(" ");
|
|
1979
|
+
if (tab1 < 0) return null;
|
|
1980
|
+
const tab2 = line.indexOf(" ", tab1 + 1);
|
|
1981
|
+
if (tab2 < 0) return null;
|
|
1982
|
+
const addedRaw = line.slice(0, tab1);
|
|
1983
|
+
const removedRaw = line.slice(tab1 + 1, tab2);
|
|
1984
|
+
const filePath = line.slice(tab2 + 1);
|
|
1985
|
+
if (!filePath) return null;
|
|
1986
|
+
const added = addedRaw === "-" ? 0 : Number.parseInt(addedRaw, 10);
|
|
1987
|
+
const removed = removedRaw === "-" ? 0 : Number.parseInt(removedRaw, 10);
|
|
1988
|
+
return {
|
|
1989
|
+
added: Number.isFinite(added) ? added : 0,
|
|
1990
|
+
removed: Number.isFinite(removed) ? removed : 0,
|
|
1991
|
+
path: filePath
|
|
1992
|
+
};
|
|
1417
1993
|
}
|
|
1418
|
-
function
|
|
1419
|
-
const
|
|
1420
|
-
const
|
|
1421
|
-
|
|
1994
|
+
function parseLogOutput(stdout) {
|
|
1995
|
+
const records = [];
|
|
1996
|
+
const parts = stdout.split(RECORD_SEPARATOR);
|
|
1997
|
+
for (const part of parts) {
|
|
1998
|
+
if (!part) continue;
|
|
1999
|
+
const newlineIdx = part.indexOf("\n");
|
|
2000
|
+
const metadataLine = newlineIdx === -1 ? part : part.slice(0, newlineIdx);
|
|
2001
|
+
const rest = newlineIdx === -1 ? "" : part.slice(newlineIdx + 1);
|
|
2002
|
+
const fields = metadataLine.split(FIELD_SEPARATOR);
|
|
2003
|
+
if (fields.length < 9) continue;
|
|
2004
|
+
const [hash, parentsField, authorName, authorEmail, committerName, committerEmail, authorDateIso, committerDateIso, subject] = fields;
|
|
2005
|
+
const parents = parentsField ? parentsField.split(" ").filter(Boolean) : [];
|
|
2006
|
+
const commit = {
|
|
2007
|
+
hash,
|
|
2008
|
+
parents,
|
|
2009
|
+
authorName,
|
|
2010
|
+
authorEmail,
|
|
2011
|
+
committerName,
|
|
2012
|
+
committerEmail,
|
|
2013
|
+
authorDateIso,
|
|
2014
|
+
committerDateIso,
|
|
2015
|
+
subject,
|
|
2016
|
+
filesChanged: 0,
|
|
2017
|
+
linesAdded: 0,
|
|
2018
|
+
linesRemoved: 0
|
|
2019
|
+
};
|
|
2020
|
+
const touchedPaths = /* @__PURE__ */ new Set();
|
|
2021
|
+
for (const rawLine of rest.split("\n")) {
|
|
2022
|
+
if (!rawLine) continue;
|
|
2023
|
+
const parsed = parseNumstatLine(rawLine);
|
|
2024
|
+
if (!parsed) continue;
|
|
2025
|
+
commit.linesAdded += parsed.added;
|
|
2026
|
+
commit.linesRemoved += parsed.removed;
|
|
2027
|
+
touchedPaths.add(parsed.path);
|
|
2028
|
+
}
|
|
2029
|
+
commit.filesChanged = touchedPaths.size;
|
|
2030
|
+
records.push(commit);
|
|
2031
|
+
}
|
|
2032
|
+
return records;
|
|
1422
2033
|
}
|
|
1423
|
-
function
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
2034
|
+
function buildCommitEvent(raw) {
|
|
2035
|
+
return {
|
|
2036
|
+
hash: raw.hash,
|
|
2037
|
+
shortHash: raw.hash.slice(0, 7),
|
|
2038
|
+
isMerge: raw.parents.length >= 2,
|
|
2039
|
+
parents: raw.parents,
|
|
2040
|
+
author: { name: raw.authorName, email: raw.authorEmail },
|
|
2041
|
+
committer: { name: raw.committerName, email: raw.committerEmail },
|
|
2042
|
+
message: raw.subject,
|
|
2043
|
+
timestamp: raw.authorDateIso,
|
|
2044
|
+
filesChanged: raw.filesChanged,
|
|
2045
|
+
linesAdded: raw.linesAdded,
|
|
2046
|
+
linesRemoved: raw.linesRemoved
|
|
2047
|
+
};
|
|
2048
|
+
}
|
|
2049
|
+
var MERGE_BRANCH_REGEXES = [
|
|
2050
|
+
/^Merge branch '([^']+)'(?: into .+)?$/,
|
|
2051
|
+
/^Merge remote-tracking branch '([^']+)'(?: into .+)?$/,
|
|
2052
|
+
/^Merge pull request #\d+ from (\S+)$/
|
|
2053
|
+
];
|
|
2054
|
+
function parseBranchFromMergeMessage(subject) {
|
|
2055
|
+
for (const re of MERGE_BRANCH_REGEXES) {
|
|
2056
|
+
const match = re.exec(subject);
|
|
2057
|
+
if (!match) continue;
|
|
2058
|
+
const raw = match[1];
|
|
2059
|
+
if (!raw) continue;
|
|
2060
|
+
return raw.startsWith("origin/") ? raw.slice("origin/".length) : raw;
|
|
2061
|
+
}
|
|
2062
|
+
return null;
|
|
2063
|
+
}
|
|
2064
|
+
async function resolveBranchFromNameRev(secondParent, repoRoot) {
|
|
2065
|
+
const res = await runGitEvent(
|
|
2066
|
+
["name-rev", "--name-only", "--no-undefined", secondParent],
|
|
2067
|
+
repoRoot
|
|
2068
|
+
);
|
|
2069
|
+
if (res.exitCode !== 0 || res.timedOut) return null;
|
|
2070
|
+
let name = res.stdout.trim();
|
|
2071
|
+
if (!name) return null;
|
|
2072
|
+
if (name.startsWith("remotes/origin/")) name = name.slice("remotes/origin/".length);
|
|
2073
|
+
else if (name.startsWith("origin/")) name = name.slice("origin/".length);
|
|
2074
|
+
if (name.includes("~") || name.includes("^") || name.startsWith("tags/") || name === "undefined") return null;
|
|
2075
|
+
return name;
|
|
2076
|
+
}
|
|
2077
|
+
async function collectUncommittedSummary(repoRoot) {
|
|
2078
|
+
const summary = {
|
|
2079
|
+
filesChanged: 0,
|
|
2080
|
+
linesAdded: 0,
|
|
2081
|
+
linesRemoved: 0,
|
|
2082
|
+
staged: 0,
|
|
2083
|
+
unstaged: 0,
|
|
2084
|
+
untracked: 0
|
|
2085
|
+
};
|
|
2086
|
+
const statusRes = await runGitEvent(["status", "--porcelain=v1"], repoRoot);
|
|
2087
|
+
if (statusRes.exitCode === 0 && !statusRes.timedOut) {
|
|
2088
|
+
const uniquePaths = /* @__PURE__ */ new Set();
|
|
2089
|
+
for (const rawLine of statusRes.stdout.split("\n")) {
|
|
2090
|
+
if (!rawLine) continue;
|
|
2091
|
+
if (rawLine.startsWith("??")) {
|
|
2092
|
+
summary.untracked += 1;
|
|
2093
|
+
uniquePaths.add(rawLine.slice(3));
|
|
2094
|
+
continue;
|
|
2095
|
+
}
|
|
2096
|
+
if (rawLine.length < 3) continue;
|
|
2097
|
+
const indexCode = rawLine.charCodeAt(0);
|
|
2098
|
+
const worktreeCode = rawLine.charCodeAt(1);
|
|
2099
|
+
const indexCh = rawLine[0];
|
|
2100
|
+
const worktreeCh = rawLine[1];
|
|
2101
|
+
const pathPart = rawLine.slice(3);
|
|
2102
|
+
if (indexCh !== " " && indexCode !== 63) summary.staged += 1;
|
|
2103
|
+
if ((worktreeCh === "M" || worktreeCh === "D") && worktreeCode !== 63) summary.unstaged += 1;
|
|
2104
|
+
uniquePaths.add(pathPart);
|
|
2105
|
+
}
|
|
2106
|
+
summary.filesChanged = uniquePaths.size;
|
|
2107
|
+
}
|
|
2108
|
+
const diffRes = await runGitEvent(["diff", "--numstat", "HEAD"], repoRoot);
|
|
2109
|
+
if (diffRes.exitCode === 0 && !diffRes.timedOut) {
|
|
2110
|
+
for (const rawLine of diffRes.stdout.split("\n")) {
|
|
2111
|
+
if (!rawLine) continue;
|
|
2112
|
+
const parsed = parseNumstatLine(rawLine);
|
|
2113
|
+
if (!parsed) continue;
|
|
2114
|
+
summary.linesAdded += parsed.added;
|
|
2115
|
+
summary.linesRemoved += parsed.removed;
|
|
2116
|
+
}
|
|
2117
|
+
}
|
|
2118
|
+
return summary;
|
|
2119
|
+
}
|
|
2120
|
+
function buildEmptyPreTurnEvents(params) {
|
|
2121
|
+
return {
|
|
2122
|
+
commits: [],
|
|
2123
|
+
merges: [],
|
|
2124
|
+
uncommittedAtFinalize: params.uncommitted,
|
|
2125
|
+
summary: {
|
|
2126
|
+
totalCommits: 0,
|
|
2127
|
+
totalMerges: 0,
|
|
2128
|
+
totalFilesChanged: 0,
|
|
2129
|
+
totalLinesAdded: 0,
|
|
2130
|
+
totalLinesRemoved: 0,
|
|
2131
|
+
elapsedMs: null,
|
|
2132
|
+
truncated: false,
|
|
2133
|
+
rewritten: params.rewritten
|
|
2134
|
+
},
|
|
2135
|
+
range: { fromCommit: params.fromCommit, toCommit: params.toCommit },
|
|
2136
|
+
schemaVersion: 1
|
|
2137
|
+
};
|
|
2138
|
+
}
|
|
2139
|
+
async function collectPreTurnGitEvents(params) {
|
|
2140
|
+
try {
|
|
2141
|
+
const uncommitted = await collectUncommittedSummary(params.repoRoot);
|
|
2142
|
+
if (!params.fromCommit || params.fromCommit === params.toCommit) {
|
|
2143
|
+
return buildEmptyPreTurnEvents({
|
|
2144
|
+
fromCommit: params.fromCommit,
|
|
2145
|
+
toCommit: params.toCommit,
|
|
2146
|
+
uncommitted,
|
|
2147
|
+
rewritten: false
|
|
2148
|
+
});
|
|
2149
|
+
}
|
|
2150
|
+
const reachabilityRes = await runGitEvent(
|
|
2151
|
+
["merge-base", "--is-ancestor", params.fromCommit, params.toCommit],
|
|
2152
|
+
params.repoRoot
|
|
2153
|
+
);
|
|
2154
|
+
if (reachabilityRes.timedOut) return null;
|
|
2155
|
+
if (reachabilityRes.exitCode === 1) {
|
|
2156
|
+
return buildEmptyPreTurnEvents({
|
|
2157
|
+
fromCommit: null,
|
|
2158
|
+
toCommit: params.toCommit,
|
|
2159
|
+
uncommitted,
|
|
2160
|
+
rewritten: true
|
|
2161
|
+
});
|
|
2162
|
+
}
|
|
2163
|
+
if (reachabilityRes.exitCode !== 0) return null;
|
|
2164
|
+
const logRes = await runGitEvent(
|
|
2165
|
+
[
|
|
2166
|
+
"log",
|
|
2167
|
+
"--first-parent",
|
|
2168
|
+
"-m",
|
|
2169
|
+
"--numstat",
|
|
2170
|
+
"--no-renames",
|
|
2171
|
+
`--format=${LOG_FORMAT}`,
|
|
2172
|
+
`${params.fromCommit}..${params.toCommit}`
|
|
2173
|
+
],
|
|
2174
|
+
params.repoRoot
|
|
2175
|
+
);
|
|
2176
|
+
if (logRes.timedOut || logRes.exitCode !== 0) return null;
|
|
2177
|
+
const rawCommits = parseLogOutput(logRes.stdout);
|
|
2178
|
+
const totalCommits = rawCommits.length;
|
|
2179
|
+
const totalMerges = rawCommits.reduce((n, c) => n + (c.parents.length >= 2 ? 1 : 0), 0);
|
|
2180
|
+
const ordered = [...rawCommits].reverse();
|
|
2181
|
+
const truncated = ordered.length > MAX_COMMITS;
|
|
2182
|
+
const kept = truncated ? ordered.slice(ordered.length - MAX_COMMITS) : ordered;
|
|
2183
|
+
const commits = kept.map(buildCommitEvent);
|
|
2184
|
+
const merges = [];
|
|
2185
|
+
for (const raw of rawCommits) {
|
|
2186
|
+
if (raw.parents.length < 2) continue;
|
|
2187
|
+
const secondParent = raw.parents[1];
|
|
2188
|
+
const fromBranchFromMessage = parseBranchFromMergeMessage(raw.subject);
|
|
2189
|
+
const fromBranchFromNameRev = await resolveBranchFromNameRev(secondParent, params.repoRoot);
|
|
2190
|
+
merges.push({
|
|
2191
|
+
hash: raw.hash,
|
|
2192
|
+
shortHash: raw.hash.slice(0, 7),
|
|
2193
|
+
fromBranchFromMessage,
|
|
2194
|
+
fromBranchFromNameRev,
|
|
2195
|
+
fromCommit: secondParent,
|
|
2196
|
+
filesChanged: raw.filesChanged,
|
|
2197
|
+
linesAdded: raw.linesAdded,
|
|
2198
|
+
linesRemoved: raw.linesRemoved,
|
|
2199
|
+
message: raw.subject
|
|
2200
|
+
});
|
|
2201
|
+
}
|
|
2202
|
+
let totalFilesChanged = 0;
|
|
2203
|
+
let totalLinesAdded = 0;
|
|
2204
|
+
let totalLinesRemoved = 0;
|
|
2205
|
+
const diffRes = await runGitEvent(
|
|
2206
|
+
["diff", "--numstat", "--no-renames", params.fromCommit, params.toCommit],
|
|
2207
|
+
params.repoRoot
|
|
2208
|
+
);
|
|
2209
|
+
if (diffRes.exitCode === 0 && !diffRes.timedOut) {
|
|
2210
|
+
const uniquePaths = /* @__PURE__ */ new Set();
|
|
2211
|
+
for (const rawLine of diffRes.stdout.split("\n")) {
|
|
2212
|
+
if (!rawLine) continue;
|
|
2213
|
+
const parsed = parseNumstatLine(rawLine);
|
|
2214
|
+
if (!parsed) continue;
|
|
2215
|
+
uniquePaths.add(parsed.path);
|
|
2216
|
+
totalLinesAdded += parsed.added;
|
|
2217
|
+
totalLinesRemoved += parsed.removed;
|
|
2218
|
+
}
|
|
2219
|
+
totalFilesChanged = uniquePaths.size;
|
|
2220
|
+
}
|
|
2221
|
+
let elapsedMs = null;
|
|
2222
|
+
if (rawCommits.length > 0) {
|
|
2223
|
+
const committerTimes = rawCommits.map((c) => Date.parse(c.committerDateIso)).filter((t) => Number.isFinite(t));
|
|
2224
|
+
if (committerTimes.length > 0) {
|
|
2225
|
+
const min = Math.min(...committerTimes);
|
|
2226
|
+
const max = Math.max(...committerTimes);
|
|
2227
|
+
elapsedMs = max - min;
|
|
2228
|
+
}
|
|
2229
|
+
}
|
|
2230
|
+
return {
|
|
2231
|
+
// commits[] is a first-parent view: it contains the commits on the
|
|
2232
|
+
// receiving branch plus merges as single entries. Commits that entered
|
|
2233
|
+
// via merges are represented by the MergeEvent, not listed individually.
|
|
2234
|
+
commits,
|
|
2235
|
+
merges,
|
|
2236
|
+
uncommittedAtFinalize: uncommitted,
|
|
2237
|
+
summary: {
|
|
2238
|
+
totalCommits,
|
|
2239
|
+
totalMerges,
|
|
2240
|
+
totalFilesChanged,
|
|
2241
|
+
totalLinesAdded,
|
|
2242
|
+
totalLinesRemoved,
|
|
2243
|
+
elapsedMs,
|
|
2244
|
+
truncated,
|
|
2245
|
+
rewritten: false
|
|
2246
|
+
},
|
|
2247
|
+
range: { fromCommit: params.fromCommit, toCommit: params.toCommit },
|
|
2248
|
+
schemaVersion: 1
|
|
2249
|
+
};
|
|
2250
|
+
} catch (err) {
|
|
2251
|
+
console.warn(
|
|
2252
|
+
`[remix-core] preTurnEvents harvester failed: ${err instanceof Error ? err.message : String(err)}`
|
|
2253
|
+
);
|
|
2254
|
+
return null;
|
|
2255
|
+
}
|
|
2256
|
+
}
|
|
2257
|
+
|
|
2258
|
+
// src/application/collab/collabFinalizeProcessing.ts
|
|
2259
|
+
var FINALIZE_RETRY_BASE_DELAY_MS = 15e3;
|
|
2260
|
+
var FINALIZE_RETRY_MAX_DELAY_MS = 5 * 60 * 1e3;
|
|
2261
|
+
var FINALIZE_JOB_PROCESSING_TIMEOUT_MS_DEFAULT = 90 * 1e3;
|
|
2262
|
+
var finalizeJobProcessingTimeoutMs = FINALIZE_JOB_PROCESSING_TIMEOUT_MS_DEFAULT;
|
|
2263
|
+
var FINALIZE_LOCK_HEARTBEAT_MS = Math.max(15e3, Math.floor(FINALIZE_JOB_LOCK_STALE_MS / 3));
|
|
2264
|
+
var FINALIZE_AUTO_TERMINAL_THRESHOLD = 5;
|
|
2265
|
+
function readMetadataString(job, key) {
|
|
2266
|
+
const value = job.metadata[key];
|
|
2267
|
+
return typeof value === "string" && value.trim() ? value.trim() : null;
|
|
2268
|
+
}
|
|
2269
|
+
function readMetadataActor(job) {
|
|
2270
|
+
const actor = job.metadata.actor;
|
|
2271
|
+
return actor && typeof actor === "object" ? actor : void 0;
|
|
2272
|
+
}
|
|
2273
|
+
function readMetadataTurnUsage(job) {
|
|
2274
|
+
const value = job.metadata.turnUsage;
|
|
2275
|
+
return value && typeof value === "object" ? value : null;
|
|
2276
|
+
}
|
|
2277
|
+
function buildNextRetryAt(retryCount) {
|
|
2278
|
+
const exponent = Math.max(0, retryCount - 1);
|
|
2279
|
+
const delayMs = Math.min(FINALIZE_RETRY_BASE_DELAY_MS * 2 ** exponent, FINALIZE_RETRY_MAX_DELAY_MS);
|
|
2280
|
+
return new Date(Date.now() + delayMs).toISOString();
|
|
2281
|
+
}
|
|
2282
|
+
function buildFinalizeCliError(params) {
|
|
2283
|
+
const error = new RemixError(params.message, {
|
|
2284
|
+
exitCode: params.exitCode,
|
|
2285
|
+
hint: params.hint
|
|
1427
2286
|
});
|
|
1428
2287
|
error.finalizeDisposition = params.disposition;
|
|
1429
2288
|
error.finalizeReason = params.reason;
|
|
@@ -1437,8 +2296,50 @@ function classifyFinalizeError(error) {
|
|
|
1437
2296
|
message: error instanceof Error ? error.message : String(error)
|
|
1438
2297
|
};
|
|
1439
2298
|
}
|
|
1440
|
-
function
|
|
2299
|
+
function runWithFinalizeTimeout(work, jobId) {
|
|
2300
|
+
let timer;
|
|
2301
|
+
const cap = finalizeJobProcessingTimeoutMs;
|
|
2302
|
+
const timeout = new Promise((_, reject) => {
|
|
2303
|
+
timer = setTimeout(() => {
|
|
2304
|
+
reject(
|
|
2305
|
+
buildFinalizeCliError({
|
|
2306
|
+
message: `Finalize job ${jobId} exceeded the ${Math.round(
|
|
2307
|
+
cap / 1e3
|
|
2308
|
+
)}s drainer wall-clock timeout. The slow request will be retried by a future drainer pass.`,
|
|
2309
|
+
exitCode: 1,
|
|
2310
|
+
hint: "If this keeps recurring, run `remix collab finalize-queue list` to inspect the job, then `remix collab finalize-queue forget <id>` to drop it manually.",
|
|
2311
|
+
disposition: "retryable",
|
|
2312
|
+
reason: "processing_timeout"
|
|
2313
|
+
})
|
|
2314
|
+
);
|
|
2315
|
+
}, cap);
|
|
2316
|
+
timer.unref?.();
|
|
2317
|
+
});
|
|
2318
|
+
return Promise.race([work, timeout]).finally(() => {
|
|
2319
|
+
if (timer) clearTimeout(timer);
|
|
2320
|
+
});
|
|
2321
|
+
}
|
|
2322
|
+
function startLockHeartbeat(jobId) {
|
|
2323
|
+
const handle = setInterval(() => {
|
|
2324
|
+
void heartbeatJobLock(jobId);
|
|
2325
|
+
}, FINALIZE_LOCK_HEARTBEAT_MS);
|
|
2326
|
+
handle.unref?.();
|
|
2327
|
+
return () => clearInterval(handle);
|
|
2328
|
+
}
|
|
2329
|
+
function computeFailureEscalation(job, reason) {
|
|
2330
|
+
const previousReason = job.metadata.consecutiveFailureReason;
|
|
2331
|
+
const previousCountRaw = job.metadata.consecutiveFailures;
|
|
2332
|
+
const previousCount = typeof previousCountRaw === "number" && Number.isFinite(previousCountRaw) && previousCountRaw > 0 ? Math.floor(previousCountRaw) : 0;
|
|
2333
|
+
const sameReason = previousReason === reason;
|
|
2334
|
+
const next = sameReason ? previousCount + 1 : 1;
|
|
1441
2335
|
return {
|
|
2336
|
+
consecutiveFailures: next,
|
|
2337
|
+
consecutiveFailureReason: reason,
|
|
2338
|
+
shouldEscalateToTerminal: next >= FINALIZE_AUTO_TERMINAL_THRESHOLD
|
|
2339
|
+
};
|
|
2340
|
+
}
|
|
2341
|
+
function buildWorkspaceMetadata(params) {
|
|
2342
|
+
const metadata = {
|
|
1442
2343
|
branch: params.branchName,
|
|
1443
2344
|
repoRoot: params.repoRoot,
|
|
1444
2345
|
remoteUrl: params.remoteUrl,
|
|
@@ -1450,126 +2351,202 @@ function buildWorkspaceMetadata(params) {
|
|
|
1450
2351
|
currentSnapshotHash: params.currentSnapshotHash,
|
|
1451
2352
|
localCommitHash: params.localCommitHash,
|
|
1452
2353
|
repoStateAtCapture: params.repoState,
|
|
1453
|
-
replayedFromBaseHash: params.replayedFromBaseHash ?? null
|
|
2354
|
+
replayedFromBaseHash: params.replayedFromBaseHash ?? null,
|
|
2355
|
+
previousLocalCommitHash: params.previousLocalCommitHash ?? null
|
|
1454
2356
|
};
|
|
2357
|
+
if (params.preTurnEvents) {
|
|
2358
|
+
metadata.preTurnEvents = params.preTurnEvents;
|
|
2359
|
+
}
|
|
2360
|
+
if (params.turnUsage) {
|
|
2361
|
+
metadata.turnUsage = params.turnUsage;
|
|
2362
|
+
}
|
|
2363
|
+
if (typeof params.promptedAt === "string" && params.promptedAt.trim()) {
|
|
2364
|
+
metadata.promptedAt = params.promptedAt.trim();
|
|
2365
|
+
}
|
|
2366
|
+
return metadata;
|
|
2367
|
+
}
|
|
2368
|
+
async function harvestPreTurnEvents(repoRoot, fromCommit, toCommit) {
|
|
2369
|
+
if (!toCommit) return null;
|
|
2370
|
+
try {
|
|
2371
|
+
return await collectPreTurnGitEvents({ repoRoot, fromCommit, toCommit });
|
|
2372
|
+
} catch (err) {
|
|
2373
|
+
console.warn(
|
|
2374
|
+
`[remix-core] preTurnEvents harvester threw: ${err instanceof Error ? err.message : String(err)}`
|
|
2375
|
+
);
|
|
2376
|
+
return null;
|
|
2377
|
+
}
|
|
1455
2378
|
}
|
|
1456
2379
|
async function processClaimedPendingFinalizeJob(params) {
|
|
1457
2380
|
const job = params.job;
|
|
2381
|
+
const stopHeartbeat = startLockHeartbeat(job.id);
|
|
1458
2382
|
try {
|
|
1459
|
-
|
|
1460
|
-
|
|
1461
|
-
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1467
|
-
|
|
1468
|
-
|
|
2383
|
+
return await runWithFinalizeTimeout(processClaimedPendingFinalizeJobInner(params), job.id);
|
|
2384
|
+
} catch (error) {
|
|
2385
|
+
const classified = classifyFinalizeError(error);
|
|
2386
|
+
const escalation = computeFailureEscalation(job, classified.reason);
|
|
2387
|
+
const finalDisposition = classified.disposition === "terminal" || escalation.shouldEscalateToTerminal ? "terminal" : "retryable";
|
|
2388
|
+
await updatePendingFinalizeJob(job.id, {
|
|
2389
|
+
status: finalDisposition === "terminal" ? "failed" : "queued",
|
|
2390
|
+
error: finalDisposition === "terminal" && escalation.shouldEscalateToTerminal && classified.disposition !== "terminal" ? `${classified.message} (auto-escalated to terminal after ${escalation.consecutiveFailures} consecutive ${classified.reason} failures)` : classified.message,
|
|
2391
|
+
nextRetryAt: finalDisposition === "terminal" ? null : buildNextRetryAt(job.retryCount),
|
|
2392
|
+
metadata: {
|
|
2393
|
+
failureDisposition: finalDisposition,
|
|
2394
|
+
failureReason: classified.reason,
|
|
2395
|
+
consecutiveFailures: escalation.consecutiveFailures,
|
|
2396
|
+
consecutiveFailureReason: escalation.consecutiveFailureReason
|
|
2397
|
+
}
|
|
2398
|
+
});
|
|
2399
|
+
throw error;
|
|
2400
|
+
} finally {
|
|
2401
|
+
stopHeartbeat();
|
|
2402
|
+
await params.release();
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
async function processClaimedPendingFinalizeJobInner(params) {
|
|
2406
|
+
const job = params.job;
|
|
2407
|
+
const [snapshot, baseline, appHeadResp] = await Promise.all([
|
|
2408
|
+
readLocalSnapshot(job.currentSnapshotId),
|
|
2409
|
+
readLocalBaseline({
|
|
2410
|
+
repoFingerprint: job.repoFingerprint,
|
|
2411
|
+
laneId: job.laneId,
|
|
2412
|
+
repoRoot: job.repoRoot
|
|
2413
|
+
}),
|
|
2414
|
+
getAppHeadCached(params.api, job.currentAppId)
|
|
2415
|
+
]);
|
|
2416
|
+
if (!snapshot) {
|
|
2417
|
+
throw buildFinalizeCliError({
|
|
2418
|
+
message: "Captured snapshot is missing from the local snapshot store.",
|
|
2419
|
+
exitCode: 1,
|
|
2420
|
+
disposition: "terminal",
|
|
2421
|
+
reason: "snapshot_missing"
|
|
2422
|
+
});
|
|
2423
|
+
}
|
|
2424
|
+
if (!baseline) {
|
|
2425
|
+
throw buildFinalizeCliError({
|
|
2426
|
+
message: "Local baseline is missing for this queued finalize job.",
|
|
2427
|
+
exitCode: 2,
|
|
2428
|
+
hint: "Run `remix collab re-anchor` to anchor the repository again.",
|
|
2429
|
+
disposition: "terminal",
|
|
2430
|
+
reason: "baseline_missing"
|
|
2431
|
+
});
|
|
2432
|
+
}
|
|
2433
|
+
const baselineDrifted = baseline.lastSnapshotId !== job.baselineSnapshotId || baseline.lastServerHeadHash !== job.baselineServerHeadHash;
|
|
2434
|
+
const appHead = unwrapResponseObject(appHeadResp, "app head");
|
|
2435
|
+
const remoteUrl = readMetadataString(job, "remoteUrl");
|
|
2436
|
+
const defaultBranch = readMetadataString(job, "defaultBranch");
|
|
2437
|
+
const repoState = readMetadataString(job, "repoState");
|
|
2438
|
+
const actor = readMetadataActor(job);
|
|
2439
|
+
const turnUsage = readMetadataTurnUsage(job);
|
|
2440
|
+
const promptedAt = readMetadataString(job, "promptedAt");
|
|
2441
|
+
const previousLocalCommitHash = baseline.lastSeenLocalCommitHash ?? null;
|
|
2442
|
+
const preTurnEvents = await harvestPreTurnEvents(
|
|
2443
|
+
job.repoRoot,
|
|
2444
|
+
previousLocalCommitHash,
|
|
2445
|
+
snapshot.localCommitHash
|
|
2446
|
+
);
|
|
2447
|
+
const diffResult = await diffLocalSnapshots({
|
|
2448
|
+
baseSnapshotId: job.baselineSnapshotId,
|
|
2449
|
+
targetSnapshotId: job.currentSnapshotId
|
|
2450
|
+
});
|
|
2451
|
+
if (!diffResult.diff.trim()) {
|
|
2452
|
+
if (baselineDrifted && snapshot.snapshotHash !== baseline.lastSnapshotHash) {
|
|
1469
2453
|
throw buildFinalizeCliError({
|
|
1470
|
-
message: "
|
|
2454
|
+
message: "Finalize queue baseline drifted before this job was processed.",
|
|
1471
2455
|
exitCode: 1,
|
|
2456
|
+
hint: "Process queued finalize jobs in capture order, or re-anchor the repository before retrying.",
|
|
1472
2457
|
disposition: "terminal",
|
|
1473
|
-
reason: "
|
|
2458
|
+
reason: "baseline_drifted"
|
|
1474
2459
|
});
|
|
1475
2460
|
}
|
|
1476
|
-
if (
|
|
2461
|
+
if (appHead.headCommitHash !== job.baselineServerHeadHash) {
|
|
1477
2462
|
throw buildFinalizeCliError({
|
|
1478
|
-
message: "
|
|
2463
|
+
message: "Server lane changed before a no-diff turn could be recorded.",
|
|
1479
2464
|
exitCode: 2,
|
|
1480
|
-
hint: "
|
|
1481
|
-
disposition: "terminal",
|
|
1482
|
-
reason: "baseline_missing"
|
|
1483
|
-
});
|
|
1484
|
-
}
|
|
1485
|
-
if (baseline.lastSnapshotId !== job.baselineSnapshotId || baseline.lastServerHeadHash !== job.baselineServerHeadHash) {
|
|
1486
|
-
throw buildFinalizeCliError({
|
|
1487
|
-
message: "Finalize queue baseline drifted before this job was processed.",
|
|
1488
|
-
exitCode: 1,
|
|
1489
|
-
hint: "Process queued finalize jobs in capture order, or re-anchor the repository before retrying.",
|
|
2465
|
+
hint: "Pull the server changes locally before recording another no-diff turn.",
|
|
1490
2466
|
disposition: "terminal",
|
|
1491
|
-
reason: "
|
|
2467
|
+
reason: "server_lane_changed"
|
|
1492
2468
|
});
|
|
1493
2469
|
}
|
|
1494
|
-
const
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
targetSnapshotId: job.currentSnapshotId
|
|
1502
|
-
});
|
|
1503
|
-
if (!diffResult.diff.trim()) {
|
|
1504
|
-
if (appHead.headCommitHash !== job.baselineServerHeadHash) {
|
|
1505
|
-
throw buildFinalizeCliError({
|
|
1506
|
-
message: "Server lane changed before a no-diff turn could be recorded.",
|
|
1507
|
-
exitCode: 2,
|
|
1508
|
-
hint: "Pull the server changes locally before recording another no-diff turn.",
|
|
1509
|
-
disposition: "terminal",
|
|
1510
|
-
reason: "server_lane_changed"
|
|
1511
|
-
});
|
|
1512
|
-
}
|
|
1513
|
-
const collabTurnResp = await params.api.createCollabTurn(job.currentAppId, {
|
|
1514
|
-
threadId: job.threadId ?? void 0,
|
|
1515
|
-
collabLaneId: job.laneId ?? void 0,
|
|
1516
|
-
prompt: job.prompt,
|
|
1517
|
-
assistantResponse: job.assistantResponse,
|
|
1518
|
-
actor,
|
|
1519
|
-
workspaceMetadata: buildWorkspaceMetadata({
|
|
1520
|
-
repoRoot: job.repoRoot,
|
|
1521
|
-
branchName: job.branchName,
|
|
1522
|
-
remoteUrl,
|
|
1523
|
-
defaultBranch,
|
|
1524
|
-
baselineSnapshotId: job.baselineSnapshotId,
|
|
1525
|
-
currentSnapshotId: job.currentSnapshotId,
|
|
1526
|
-
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
1527
|
-
currentSnapshotHash: snapshot.snapshotHash,
|
|
1528
|
-
localCommitHash: snapshot.localCommitHash,
|
|
1529
|
-
repoState
|
|
1530
|
-
}),
|
|
1531
|
-
idempotencyKey: job.idempotencyKey ?? void 0
|
|
1532
|
-
});
|
|
1533
|
-
const collabTurn = unwrapResponseObject(collabTurnResp, "collab turn");
|
|
1534
|
-
await writeLocalBaseline({
|
|
2470
|
+
const collabTurnResp = await params.api.createCollabTurn(job.currentAppId, {
|
|
2471
|
+
threadId: job.threadId ?? void 0,
|
|
2472
|
+
collabLaneId: job.laneId ?? void 0,
|
|
2473
|
+
prompt: job.prompt,
|
|
2474
|
+
assistantResponse: job.assistantResponse,
|
|
2475
|
+
actor,
|
|
2476
|
+
workspaceMetadata: buildWorkspaceMetadata({
|
|
1535
2477
|
repoRoot: job.repoRoot,
|
|
1536
|
-
repoFingerprint: job.repoFingerprint,
|
|
1537
|
-
laneId: job.laneId,
|
|
1538
|
-
currentAppId: job.currentAppId,
|
|
1539
2478
|
branchName: job.branchName,
|
|
1540
|
-
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
|
|
1545
|
-
|
|
1546
|
-
|
|
1547
|
-
metadata: { collabTurnId: collabTurn.id }
|
|
1548
|
-
});
|
|
1549
|
-
return {
|
|
1550
|
-
mode: "no_diff_turn",
|
|
1551
|
-
idempotencyKey: job.idempotencyKey ?? "",
|
|
1552
|
-
queued: false,
|
|
1553
|
-
jobId: job.id,
|
|
2479
|
+
remoteUrl,
|
|
2480
|
+
defaultBranch,
|
|
2481
|
+
baselineSnapshotId: job.baselineSnapshotId,
|
|
2482
|
+
currentSnapshotId: job.currentSnapshotId,
|
|
2483
|
+
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
2484
|
+
currentSnapshotHash: snapshot.snapshotHash,
|
|
2485
|
+
localCommitHash: snapshot.localCommitHash,
|
|
1554
2486
|
repoState,
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
}
|
|
1560
|
-
|
|
1561
|
-
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
|
|
2487
|
+
previousLocalCommitHash,
|
|
2488
|
+
preTurnEvents,
|
|
2489
|
+
turnUsage,
|
|
2490
|
+
promptedAt
|
|
2491
|
+
}),
|
|
2492
|
+
idempotencyKey: job.idempotencyKey ?? void 0
|
|
2493
|
+
});
|
|
2494
|
+
const collabTurn = unwrapResponseObject(collabTurnResp, "collab turn");
|
|
2495
|
+
await writeLocalBaseline({
|
|
2496
|
+
repoRoot: job.repoRoot,
|
|
2497
|
+
repoFingerprint: job.repoFingerprint,
|
|
2498
|
+
laneId: job.laneId,
|
|
2499
|
+
currentAppId: job.currentAppId,
|
|
2500
|
+
branchName: job.branchName,
|
|
2501
|
+
lastSnapshotId: snapshot.id,
|
|
2502
|
+
lastSnapshotHash: snapshot.snapshotHash,
|
|
2503
|
+
lastServerHeadHash: appHead.headCommitHash,
|
|
2504
|
+
lastSeenLocalCommitHash: snapshot.localCommitHash
|
|
2505
|
+
});
|
|
2506
|
+
await updatePendingFinalizeJob(job.id, {
|
|
2507
|
+
status: "completed",
|
|
2508
|
+
metadata: { collabTurnId: collabTurn.id }
|
|
2509
|
+
});
|
|
2510
|
+
return {
|
|
2511
|
+
mode: "no_diff_turn",
|
|
2512
|
+
idempotencyKey: job.idempotencyKey ?? "",
|
|
2513
|
+
queued: false,
|
|
2514
|
+
jobId: job.id,
|
|
2515
|
+
repoState,
|
|
2516
|
+
changeStep: null,
|
|
2517
|
+
collabTurn,
|
|
2518
|
+
autoSync: null,
|
|
2519
|
+
warnings: []
|
|
2520
|
+
};
|
|
2521
|
+
}
|
|
2522
|
+
const localBaselineAdvanced = baseline.lastSnapshotId !== job.baselineSnapshotId;
|
|
2523
|
+
const serverHeadAdvanced = appHead.headCommitHash !== job.baselineServerHeadHash;
|
|
2524
|
+
if (baselineDrifted) {
|
|
2525
|
+
const consistentAdvance = localBaselineAdvanced && serverHeadAdvanced;
|
|
2526
|
+
if (!consistentAdvance) {
|
|
1565
2527
|
throw buildFinalizeCliError({
|
|
1566
|
-
message:
|
|
2528
|
+
message: `Finalize queue baseline advanced inconsistently before this job was processed (localBaselineAdvanced=${localBaselineAdvanced}, serverHeadAdvanced=${serverHeadAdvanced}, jobBaselineSnapshotId=${job.baselineSnapshotId ?? "null"}, liveBaselineSnapshotId=${baseline.lastSnapshotId ?? "null"}, jobBaselineServerHeadHash=${job.baselineServerHeadHash ?? "null"}, liveBaselineServerHeadHash=${baseline.lastServerHeadHash ?? "null"}, currentAppHeadHash=${appHead.headCommitHash}). This indicates local Remix state diverged from the backend in a way that should not be reachable in normal operation; please report this as a bug.`,
|
|
1567
2529
|
exitCode: 1,
|
|
2530
|
+
hint: "Run `remix collab status` to inspect, then `remix collab re-anchor` only if the lane has no valid baseline.",
|
|
1568
2531
|
disposition: "terminal",
|
|
1569
|
-
reason: "
|
|
2532
|
+
reason: "baseline_drifted"
|
|
1570
2533
|
});
|
|
1571
2534
|
}
|
|
1572
|
-
|
|
2535
|
+
}
|
|
2536
|
+
let submissionDiff = diffResult.diff;
|
|
2537
|
+
let submissionBaseHeadHash = job.baselineServerHeadHash;
|
|
2538
|
+
let replayedFromBaseHash = null;
|
|
2539
|
+
if (!submissionBaseHeadHash) {
|
|
2540
|
+
throw buildFinalizeCliError({
|
|
2541
|
+
message: "Baseline server head is missing for this finalize job.",
|
|
2542
|
+
exitCode: 1,
|
|
2543
|
+
disposition: "terminal",
|
|
2544
|
+
reason: "baseline_server_head_missing"
|
|
2545
|
+
});
|
|
2546
|
+
}
|
|
2547
|
+
const replayNeeded = appHead.headCommitHash !== submissionBaseHeadHash || baselineDrifted;
|
|
2548
|
+
if (replayNeeded) {
|
|
2549
|
+
try {
|
|
1573
2550
|
const replayResp = await params.api.startChangeStepReplay(job.currentAppId, {
|
|
1574
2551
|
prompt: job.prompt,
|
|
1575
2552
|
assistantResponse: job.assistantResponse,
|
|
@@ -1588,7 +2565,11 @@ async function processClaimedPendingFinalizeJob(params) {
|
|
|
1588
2565
|
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
1589
2566
|
currentSnapshotHash: snapshot.snapshotHash,
|
|
1590
2567
|
localCommitHash: snapshot.localCommitHash,
|
|
1591
|
-
repoState
|
|
2568
|
+
repoState,
|
|
2569
|
+
previousLocalCommitHash,
|
|
2570
|
+
preTurnEvents,
|
|
2571
|
+
turnUsage,
|
|
2572
|
+
promptedAt
|
|
1592
2573
|
}),
|
|
1593
2574
|
idempotencyKey: buildDeterministicIdempotencyKey({
|
|
1594
2575
|
kind: "collab_finalize_turn_replay_v1",
|
|
@@ -1606,79 +2587,91 @@ async function processClaimedPendingFinalizeJob(params) {
|
|
|
1606
2587
|
submissionDiff = replayDiff.diff;
|
|
1607
2588
|
replayedFromBaseHash = submissionBaseHeadHash;
|
|
1608
2589
|
submissionBaseHeadHash = appHead.headCommitHash;
|
|
2590
|
+
} catch (error) {
|
|
2591
|
+
if (error instanceof RemixError && error.finalizeDisposition === void 0) {
|
|
2592
|
+
const detail = error.hint ? `${error.message} (${error.hint})` : error.message;
|
|
2593
|
+
throw buildFinalizeCliError({
|
|
2594
|
+
message: "Server-side replay could not adapt the captured diff to the current server head: " + detail,
|
|
2595
|
+
exitCode: 1,
|
|
2596
|
+
hint: error.hint ?? void 0,
|
|
2597
|
+
disposition: "terminal",
|
|
2598
|
+
reason: "replay_unrecoverable"
|
|
2599
|
+
});
|
|
2600
|
+
}
|
|
2601
|
+
throw error;
|
|
1609
2602
|
}
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
branchName: job.branchName,
|
|
1625
|
-
remoteUrl,
|
|
1626
|
-
defaultBranch,
|
|
1627
|
-
baselineSnapshotId: job.baselineSnapshotId,
|
|
1628
|
-
currentSnapshotId: job.currentSnapshotId,
|
|
1629
|
-
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
1630
|
-
currentSnapshotHash: snapshot.snapshotHash,
|
|
1631
|
-
localCommitHash: snapshot.localCommitHash,
|
|
1632
|
-
repoState,
|
|
1633
|
-
replayedFromBaseHash
|
|
1634
|
-
}),
|
|
1635
|
-
idempotencyKey: job.idempotencyKey ?? void 0
|
|
1636
|
-
});
|
|
1637
|
-
const createdStep = unwrapResponseObject(changeStepResp, "change step");
|
|
1638
|
-
const changeStep = await pollChangeStep(params.api, job.currentAppId, String(createdStep.id));
|
|
1639
|
-
const nextHeadResp = await params.api.getAppHead(job.currentAppId);
|
|
1640
|
-
const nextHead = unwrapResponseObject(nextHeadResp, "app head");
|
|
1641
|
-
await writeLocalBaseline({
|
|
2603
|
+
}
|
|
2604
|
+
const changeStepResp = await params.api.createChangeStep(job.currentAppId, {
|
|
2605
|
+
threadId: job.threadId ?? void 0,
|
|
2606
|
+
collabLaneId: job.laneId ?? void 0,
|
|
2607
|
+
prompt: job.prompt,
|
|
2608
|
+
assistantResponse: job.assistantResponse,
|
|
2609
|
+
diff: submissionDiff,
|
|
2610
|
+
baseCommitHash: submissionBaseHeadHash,
|
|
2611
|
+
headCommitHash: submissionBaseHeadHash,
|
|
2612
|
+
changedFilesCount: diffResult.stats.changedFilesCount,
|
|
2613
|
+
insertions: diffResult.stats.insertions,
|
|
2614
|
+
deletions: diffResult.stats.deletions,
|
|
2615
|
+
actor,
|
|
2616
|
+
workspaceMetadata: buildWorkspaceMetadata({
|
|
1642
2617
|
repoRoot: job.repoRoot,
|
|
1643
|
-
repoFingerprint: job.repoFingerprint,
|
|
1644
|
-
laneId: job.laneId,
|
|
1645
|
-
currentAppId: job.currentAppId,
|
|
1646
2618
|
branchName: job.branchName,
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
metadata: { changeStepId: String(changeStep.id ?? "") }
|
|
1655
|
-
});
|
|
1656
|
-
return {
|
|
1657
|
-
mode: "changed_turn",
|
|
1658
|
-
idempotencyKey: job.idempotencyKey ?? "",
|
|
1659
|
-
queued: false,
|
|
1660
|
-
jobId: job.id,
|
|
2619
|
+
remoteUrl,
|
|
2620
|
+
defaultBranch,
|
|
2621
|
+
baselineSnapshotId: job.baselineSnapshotId,
|
|
2622
|
+
currentSnapshotId: job.currentSnapshotId,
|
|
2623
|
+
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
2624
|
+
currentSnapshotHash: snapshot.snapshotHash,
|
|
2625
|
+
localCommitHash: snapshot.localCommitHash,
|
|
1661
2626
|
repoState,
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
2627
|
+
replayedFromBaseHash,
|
|
2628
|
+
previousLocalCommitHash,
|
|
2629
|
+
preTurnEvents,
|
|
2630
|
+
turnUsage,
|
|
2631
|
+
promptedAt
|
|
2632
|
+
}),
|
|
2633
|
+
idempotencyKey: job.idempotencyKey ?? void 0
|
|
2634
|
+
});
|
|
2635
|
+
const createdStep = unwrapResponseObject(changeStepResp, "change step");
|
|
2636
|
+
const changeStep = await pollChangeStep(params.api, job.currentAppId, String(createdStep.id));
|
|
2637
|
+
invalidateAppHeadCache(job.currentAppId);
|
|
2638
|
+
invalidateAppDeltaCacheForApp(job.currentAppId);
|
|
2639
|
+
const nextServerHeadHash = typeof changeStep.headCommitHash === "string" ? changeStep.headCommitHash.trim() : "";
|
|
2640
|
+
if (!nextServerHeadHash) {
|
|
2641
|
+
throw buildFinalizeCliError({
|
|
2642
|
+
message: "Backend returned a succeeded change step without a head commit hash.",
|
|
2643
|
+
exitCode: 1,
|
|
2644
|
+
hint: "This is a backend invariant violation; retry will not help. Re-anchor and try again.",
|
|
2645
|
+
disposition: "terminal",
|
|
2646
|
+
reason: "missing_head_commit_hash"
|
|
1677
2647
|
});
|
|
1678
|
-
throw error;
|
|
1679
|
-
} finally {
|
|
1680
|
-
await params.release();
|
|
1681
2648
|
}
|
|
2649
|
+
await writeLocalBaseline({
|
|
2650
|
+
repoRoot: job.repoRoot,
|
|
2651
|
+
repoFingerprint: job.repoFingerprint,
|
|
2652
|
+
laneId: job.laneId,
|
|
2653
|
+
currentAppId: job.currentAppId,
|
|
2654
|
+
branchName: job.branchName,
|
|
2655
|
+
lastSnapshotId: snapshot.id,
|
|
2656
|
+
lastSnapshotHash: snapshot.snapshotHash,
|
|
2657
|
+
lastServerHeadHash: nextServerHeadHash,
|
|
2658
|
+
lastSeenLocalCommitHash: snapshot.localCommitHash
|
|
2659
|
+
});
|
|
2660
|
+
await updatePendingFinalizeJob(job.id, {
|
|
2661
|
+
status: "completed",
|
|
2662
|
+
metadata: { changeStepId: String(changeStep.id ?? "") }
|
|
2663
|
+
});
|
|
2664
|
+
return {
|
|
2665
|
+
mode: "changed_turn",
|
|
2666
|
+
idempotencyKey: job.idempotencyKey ?? "",
|
|
2667
|
+
queued: false,
|
|
2668
|
+
jobId: job.id,
|
|
2669
|
+
repoState,
|
|
2670
|
+
changeStep,
|
|
2671
|
+
collabTurn: null,
|
|
2672
|
+
autoSync: null,
|
|
2673
|
+
warnings: []
|
|
2674
|
+
};
|
|
1682
2675
|
}
|
|
1683
2676
|
async function processPendingFinalizeJob(params) {
|
|
1684
2677
|
const claimed = await claimPendingFinalizeJob(params.jobId);
|
|
@@ -1715,7 +2708,7 @@ async function enqueueCapturedFinalizeTurn(params) {
|
|
|
1715
2708
|
error: null,
|
|
1716
2709
|
retryCount: 0,
|
|
1717
2710
|
lastAttemptAt: null,
|
|
1718
|
-
nextRetryAt: null,
|
|
2711
|
+
nextRetryAt: params.nextRetryAt ?? null,
|
|
1719
2712
|
metadata: params.metadata ?? {}
|
|
1720
2713
|
});
|
|
1721
2714
|
}
|
|
@@ -1741,6 +2734,7 @@ async function drainPendingFinalizeQueue(params) {
|
|
|
1741
2734
|
}
|
|
1742
2735
|
|
|
1743
2736
|
// src/application/collab/collabFinalizeTurn.ts
|
|
2737
|
+
var FINALIZE_AWAIT_INIT_POST_TIMEOUT_MS = 6e4;
|
|
1744
2738
|
function collectWarnings(value) {
|
|
1745
2739
|
if (!Array.isArray(value)) return [];
|
|
1746
2740
|
return value.filter((entry) => typeof entry === "string" && entry.trim().length > 0);
|
|
@@ -1769,37 +2763,99 @@ async function collabFinalizeTurn(params) {
|
|
|
1769
2763
|
hint: "Finalize turns now capture the real workspace boundary from the local snapshot store."
|
|
1770
2764
|
});
|
|
1771
2765
|
}
|
|
2766
|
+
const pendingInit = await findPendingAsyncJob({
|
|
2767
|
+
repoRoot,
|
|
2768
|
+
branchName: binding.branchName ?? null,
|
|
2769
|
+
kind: "init"
|
|
2770
|
+
});
|
|
2771
|
+
if (pendingInit) {
|
|
2772
|
+
throw new RemixError("Cannot finalize a turn while the initial Remix import is still processing.", {
|
|
2773
|
+
exitCode: 2,
|
|
2774
|
+
hint: `Init job ${pendingInit.id} is still in the background queue. Run \`remix collab status\` to check progress.`
|
|
2775
|
+
});
|
|
2776
|
+
}
|
|
2777
|
+
const pendingInitPost = await findPendingAsyncJob({
|
|
2778
|
+
repoRoot,
|
|
2779
|
+
branchName: binding.branchName ?? null,
|
|
2780
|
+
kind: "init_post"
|
|
2781
|
+
});
|
|
2782
|
+
if (pendingInitPost) {
|
|
2783
|
+
const result = await awaitAsyncJob({
|
|
2784
|
+
jobId: pendingInitPost.id,
|
|
2785
|
+
timeoutMs: FINALIZE_AWAIT_INIT_POST_TIMEOUT_MS,
|
|
2786
|
+
pollIntervalMs: 500
|
|
2787
|
+
});
|
|
2788
|
+
if (result.status === "failed") {
|
|
2789
|
+
throw new RemixError("The initial Remix import failed; this turn cannot be recorded.", {
|
|
2790
|
+
exitCode: 2,
|
|
2791
|
+
hint: (result.job?.error ? `Last error: ${result.job.error}
|
|
2792
|
+
|
|
2793
|
+
` : "") + "Run `remix collab init` again to retry \u2014 the post-init drainer cleared the local binding so this is safe."
|
|
2794
|
+
});
|
|
2795
|
+
}
|
|
2796
|
+
if (result.status === "timeout") {
|
|
2797
|
+
throw new RemixError("Timed out waiting for the initial Remix import to finish provisioning.", {
|
|
2798
|
+
exitCode: 2,
|
|
2799
|
+
hint: `Init post job ${pendingInitPost.id} did not complete within ${Math.round(
|
|
2800
|
+
FINALIZE_AWAIT_INIT_POST_TIMEOUT_MS / 1e3
|
|
2801
|
+
)}s. Run \`remix collab status\` to check progress, then retry once it reports ready.`
|
|
2802
|
+
});
|
|
2803
|
+
}
|
|
2804
|
+
}
|
|
2805
|
+
const pendingReAnchor = await findPendingAsyncJob({
|
|
2806
|
+
repoRoot,
|
|
2807
|
+
branchName: binding.branchName ?? null,
|
|
2808
|
+
kind: "re_anchor"
|
|
2809
|
+
});
|
|
2810
|
+
if (pendingReAnchor) {
|
|
2811
|
+
throw new RemixError("Cannot finalize a turn while a re-anchor is still processing.", {
|
|
2812
|
+
exitCode: 2,
|
|
2813
|
+
hint: `Re-anchor job ${pendingReAnchor.id} is still in the background queue. Run \`remix collab status\` to check progress.`
|
|
2814
|
+
});
|
|
2815
|
+
}
|
|
1772
2816
|
const detected = await collabDetectRepoState({
|
|
1773
2817
|
api: params.api,
|
|
1774
2818
|
cwd: repoRoot,
|
|
1775
2819
|
allowBranchMismatch: params.allowBranchMismatch
|
|
1776
2820
|
});
|
|
1777
2821
|
if (detected.status === "not_bound") {
|
|
1778
|
-
throw new RemixError("Repository is not bound to Remix.", {
|
|
2822
|
+
throw new RemixError("Repository is not bound to Remix.", {
|
|
2823
|
+
code: "not_bound",
|
|
2824
|
+
exitCode: 2,
|
|
2825
|
+
hint: detected.hint
|
|
2826
|
+
});
|
|
1779
2827
|
}
|
|
1780
2828
|
if (detected.status === "branch_binding_missing" || detected.status === "family_ambiguous") {
|
|
1781
|
-
throw new RemixError(detected.hint || "Current branch is not ready for Remix recording.", {
|
|
2829
|
+
throw new RemixError(detected.hint || "Current branch is not ready for Remix recording.", {
|
|
2830
|
+
code: detected.status,
|
|
2831
|
+
exitCode: 2,
|
|
2832
|
+
hint: detected.hint
|
|
2833
|
+
});
|
|
1782
2834
|
}
|
|
1783
2835
|
if (detected.status === "metadata_conflict" || detected.status === "branch_mismatch") {
|
|
1784
2836
|
throw new RemixError("Repository must be realigned before finalizing the turn.", {
|
|
2837
|
+
code: detected.status,
|
|
1785
2838
|
exitCode: 2,
|
|
1786
2839
|
hint: detected.hint
|
|
1787
2840
|
});
|
|
1788
2841
|
}
|
|
1789
2842
|
if (detected.status === "missing_head" || detected.status === "remote_error") {
|
|
1790
2843
|
throw new RemixError(detected.hint || "Failed to determine the current repo state.", {
|
|
2844
|
+
code: detected.status,
|
|
1791
2845
|
exitCode: 1,
|
|
1792
2846
|
hint: detected.hint
|
|
1793
2847
|
});
|
|
1794
2848
|
}
|
|
1795
2849
|
if (detected.repoState === "server_only_changed") {
|
|
1796
2850
|
throw new RemixError("Server changes must be pulled locally before finalizing this turn.", {
|
|
2851
|
+
code: "pull_required",
|
|
1797
2852
|
exitCode: 2,
|
|
1798
2853
|
hint: detected.hint
|
|
1799
2854
|
});
|
|
1800
2855
|
}
|
|
1801
2856
|
if (detected.repoState === "external_local_base_changed") {
|
|
1802
2857
|
throw new RemixError("The local checkout must be re-anchored before finalizing this turn.", {
|
|
2858
|
+
code: "re_anchor_required",
|
|
1803
2859
|
exitCode: 2,
|
|
1804
2860
|
hint: detected.hint
|
|
1805
2861
|
});
|
|
@@ -1834,6 +2890,8 @@ async function collabFinalizeTurn(params) {
|
|
|
1834
2890
|
prompt,
|
|
1835
2891
|
assistantResponse
|
|
1836
2892
|
});
|
|
2893
|
+
const awaitingDeadlineMs = typeof params.awaitingUsageDeadlineMs === "number" && params.awaitingUsageDeadlineMs > 0 ? params.awaitingUsageDeadlineMs : null;
|
|
2894
|
+
const nextRetryAt = awaitingDeadlineMs === null ? null : new Date(Date.now() + awaitingDeadlineMs).toISOString();
|
|
1837
2895
|
const job = await enqueueCapturedFinalizeTurn({
|
|
1838
2896
|
repoRoot,
|
|
1839
2897
|
repoFingerprint: binding.repoFingerprint,
|
|
@@ -1851,8 +2909,11 @@ async function collabFinalizeTurn(params) {
|
|
|
1851
2909
|
remoteUrl: binding.remoteUrl,
|
|
1852
2910
|
defaultBranch: binding.defaultBranch,
|
|
1853
2911
|
actor: params.actor ?? null,
|
|
1854
|
-
repoState: detected.repoState
|
|
1855
|
-
|
|
2912
|
+
repoState: detected.repoState,
|
|
2913
|
+
turnUsage: params.turnUsage ?? null,
|
|
2914
|
+
promptedAt: typeof params.promptedAt === "string" && params.promptedAt.trim() ? params.promptedAt.trim() : null
|
|
2915
|
+
},
|
|
2916
|
+
nextRetryAt
|
|
1856
2917
|
});
|
|
1857
2918
|
return {
|
|
1858
2919
|
mode,
|
|
@@ -1867,6 +2928,37 @@ async function collabFinalizeTurn(params) {
|
|
|
1867
2928
|
};
|
|
1868
2929
|
}
|
|
1869
2930
|
|
|
2931
|
+
// src/application/collab/finalizePreflightCodes.ts
|
|
2932
|
+
var FINALIZE_PREFLIGHT_FAILURE_CODES = [
|
|
2933
|
+
// Repo has no .remix/ binding at all. Fix: `remix collab init`.
|
|
2934
|
+
"not_bound",
|
|
2935
|
+
// Binding file exists but the current branch has no entry. Fix: same as
|
|
2936
|
+
// not_bound — `remix collab init` records this branch into the binding.
|
|
2937
|
+
"branch_binding_missing",
|
|
2938
|
+
// Branch matches multiple lane families (very rare race). Fix: human
|
|
2939
|
+
// investigation; auto-fixers should treat as warn-only.
|
|
2940
|
+
"family_ambiguous",
|
|
2941
|
+
// Local-vs-server metadata diverged on app id / lane id. Fix: human
|
|
2942
|
+
// investigation; auto-fixers should treat as warn-only.
|
|
2943
|
+
"metadata_conflict",
|
|
2944
|
+
// Currently checked-out branch differs from the one in the binding.
|
|
2945
|
+
// Fix: human (either checkout the recorded branch or update binding).
|
|
2946
|
+
"branch_mismatch",
|
|
2947
|
+
// HEAD ref unreadable / git reports an error. Fix: human investigation.
|
|
2948
|
+
"missing_head",
|
|
2949
|
+
"remote_error",
|
|
2950
|
+
// Server has commits we don't. Fix: `remix collab sync` (safe to
|
|
2951
|
+
// auto-run for fast-forward; non-FF refused by the command itself).
|
|
2952
|
+
"pull_required",
|
|
2953
|
+
// Local base hash doesn't match the recorded baseline (force-push,
|
|
2954
|
+
// hard reset, rebase). Fix: `remix collab re-anchor`.
|
|
2955
|
+
"re_anchor_required"
|
|
2956
|
+
];
|
|
2957
|
+
var CODE_SET = new Set(FINALIZE_PREFLIGHT_FAILURE_CODES);
|
|
2958
|
+
function isFinalizePreflightFailureCode(value) {
|
|
2959
|
+
return typeof value === "string" && CODE_SET.has(value);
|
|
2960
|
+
}
|
|
2961
|
+
|
|
1870
2962
|
// src/application/collab/recordingPreflight.ts
|
|
1871
2963
|
async function collabRecordingPreflight(params) {
|
|
1872
2964
|
const detected = await collabDetectRepoState({
|
|
@@ -1924,9 +3016,9 @@ async function collabRecordingPreflight(params) {
|
|
|
1924
3016
|
}
|
|
1925
3017
|
|
|
1926
3018
|
// src/infrastructure/locking/repoMutationLock.ts
|
|
1927
|
-
import
|
|
3019
|
+
import fs5 from "fs/promises";
|
|
1928
3020
|
import os3 from "os";
|
|
1929
|
-
import
|
|
3021
|
+
import path6 from "path";
|
|
1930
3022
|
var DEFAULT_ACQUIRE_TIMEOUT_MS = 15e3;
|
|
1931
3023
|
var DEFAULT_STALE_MS = 45e3;
|
|
1932
3024
|
var DEFAULT_HEARTBEAT_MS = 5e3;
|
|
@@ -1949,12 +3041,12 @@ function createOwner(params) {
|
|
|
1949
3041
|
};
|
|
1950
3042
|
}
|
|
1951
3043
|
async function writeOwnerMetadata(ownerPath, owner) {
|
|
1952
|
-
await
|
|
3044
|
+
await fs5.writeFile(ownerPath, `${JSON.stringify(owner, null, 2)}
|
|
1953
3045
|
`, "utf8");
|
|
1954
3046
|
}
|
|
1955
3047
|
async function readOwnerMetadata(ownerPath) {
|
|
1956
3048
|
try {
|
|
1957
|
-
const raw = await
|
|
3049
|
+
const raw = await fs5.readFile(ownerPath, "utf8");
|
|
1958
3050
|
const parsed = JSON.parse(raw);
|
|
1959
3051
|
if (!parsed || typeof parsed !== "object") return null;
|
|
1960
3052
|
if (!parsed.operation || !parsed.repoRoot || typeof parsed.pid !== "number" || !parsed.startedAt || !parsed.heartbeatAt) {
|
|
@@ -1991,23 +3083,23 @@ async function getLastKnownUpdateMs(lockDir, ownerPath, owner) {
|
|
|
1991
3083
|
if (Number.isFinite(heartbeatMs)) return heartbeatMs;
|
|
1992
3084
|
const startedMs = owner ? Date.parse(owner.startedAt) : Number.NaN;
|
|
1993
3085
|
if (Number.isFinite(startedMs)) return startedMs;
|
|
1994
|
-
const stat = await
|
|
3086
|
+
const stat = await fs5.stat(ownerPath).catch(() => null);
|
|
1995
3087
|
if (stat) return stat.mtimeMs;
|
|
1996
|
-
const dirStat = await
|
|
3088
|
+
const dirStat = await fs5.stat(lockDir).catch(() => null);
|
|
1997
3089
|
if (dirStat) return dirStat.mtimeMs;
|
|
1998
3090
|
return 0;
|
|
1999
3091
|
}
|
|
2000
3092
|
async function ensureLockDir(lockDir) {
|
|
2001
|
-
await
|
|
3093
|
+
await fs5.mkdir(path6.dirname(lockDir), { recursive: true });
|
|
2002
3094
|
}
|
|
2003
3095
|
async function tryAcquireLock(lockDir, ownerPath, owner) {
|
|
2004
3096
|
try {
|
|
2005
3097
|
await ensureLockDir(lockDir);
|
|
2006
|
-
await
|
|
3098
|
+
await fs5.mkdir(lockDir);
|
|
2007
3099
|
try {
|
|
2008
3100
|
await writeOwnerMetadata(ownerPath, owner);
|
|
2009
3101
|
} catch (error) {
|
|
2010
|
-
await
|
|
3102
|
+
await fs5.rm(lockDir, { recursive: true, force: true }).catch(() => void 0);
|
|
2011
3103
|
throw error;
|
|
2012
3104
|
}
|
|
2013
3105
|
return true;
|
|
@@ -2056,7 +3148,7 @@ async function acquirePhysicalLock(lockDir, ownerPath, owner, options) {
|
|
|
2056
3148
|
const alive = await isProcessAlive(currentOwner2);
|
|
2057
3149
|
if (ageMs >= options.staleMs && alive !== true) {
|
|
2058
3150
|
notices.push(buildStaleRecoveryNotice(currentOwner2));
|
|
2059
|
-
await
|
|
3151
|
+
await fs5.rm(lockDir, { recursive: true, force: true }).catch(() => void 0);
|
|
2060
3152
|
continue;
|
|
2061
3153
|
}
|
|
2062
3154
|
await sleep2(RETRY_DELAY_MS);
|
|
@@ -2081,7 +3173,7 @@ function startHeartbeat(lockDir, ownerPath, owner, heartbeatMs) {
|
|
|
2081
3173
|
};
|
|
2082
3174
|
owner.heartbeatAt = nextOwner.heartbeatAt;
|
|
2083
3175
|
void writeOwnerMetadata(ownerPath, nextOwner).catch(() => void 0);
|
|
2084
|
-
void
|
|
3176
|
+
void fs5.utimes(lockDir, /* @__PURE__ */ new Date(), /* @__PURE__ */ new Date()).catch(() => void 0);
|
|
2085
3177
|
}, heartbeatMs);
|
|
2086
3178
|
}
|
|
2087
3179
|
async function releaseReentrantLock(lockDir) {
|
|
@@ -2091,12 +3183,12 @@ async function releaseReentrantLock(lockDir) {
|
|
|
2091
3183
|
if (held.count > 0) return;
|
|
2092
3184
|
clearInterval(held.heartbeatTimer);
|
|
2093
3185
|
heldLocks.delete(lockDir);
|
|
2094
|
-
await
|
|
3186
|
+
await fs5.rm(lockDir, { recursive: true, force: true }).catch(() => void 0);
|
|
2095
3187
|
}
|
|
2096
3188
|
async function withRepoMutationLock(options, fn) {
|
|
2097
3189
|
const repoRoot = await findGitRoot(options.cwd);
|
|
2098
3190
|
const gitCommonDir = await getGitCommonDir(repoRoot);
|
|
2099
|
-
const lockDir =
|
|
3191
|
+
const lockDir = path6.join(gitCommonDir, "remix", "locks", "repo-mutation.lock");
|
|
2100
3192
|
const owner = createOwner({
|
|
2101
3193
|
operation: options.operation,
|
|
2102
3194
|
repoRoot,
|
|
@@ -2108,11 +3200,11 @@ async function withRepoMutationLock(options, fn) {
|
|
|
2108
3200
|
const existing = heldLocks.get(lockDir);
|
|
2109
3201
|
let notices = [];
|
|
2110
3202
|
if (!existing) {
|
|
2111
|
-
notices = await acquirePhysicalLock(lockDir,
|
|
3203
|
+
notices = await acquirePhysicalLock(lockDir, path6.join(lockDir, "owner.json"), owner, {
|
|
2112
3204
|
acquireTimeoutMs,
|
|
2113
3205
|
staleMs
|
|
2114
3206
|
});
|
|
2115
|
-
const ownerPath =
|
|
3207
|
+
const ownerPath = path6.join(lockDir, "owner.json");
|
|
2116
3208
|
heldLocks.set(lockDir, {
|
|
2117
3209
|
count: 1,
|
|
2118
3210
|
lockDir,
|
|
@@ -2224,7 +3316,7 @@ async function collabSync(params) {
|
|
|
2224
3316
|
if (!headCommitHash) {
|
|
2225
3317
|
throw new RemixError("Failed to resolve local HEAD commit for the initial sync bootstrap.", {
|
|
2226
3318
|
exitCode: 1,
|
|
2227
|
-
hint: "Retry after Git HEAD is available, or run `remix collab re-anchor` if this checkout
|
|
3319
|
+
hint: "Retry after Git HEAD is available, or run `remix collab re-anchor` if this checkout has no local Remix baseline yet."
|
|
2228
3320
|
});
|
|
2229
3321
|
}
|
|
2230
3322
|
baselineServerHeadHash = headCommitHash;
|
|
@@ -2275,7 +3367,7 @@ async function collabSync(params) {
|
|
|
2275
3367
|
if (delta.status === "base_unknown") {
|
|
2276
3368
|
throw new RemixError("Direct pull is unavailable because Remix can no longer diff from the last acknowledged server head.", {
|
|
2277
3369
|
exitCode: 2,
|
|
2278
|
-
hint: "Run `remix collab reconcile --dry-run` to inspect recovery options before retrying. If this checkout
|
|
3370
|
+
hint: "Run `remix collab reconcile --dry-run` to inspect recovery options before retrying. If this checkout has no local Remix baseline yet for this lane, `remix collab re-anchor` may be required."
|
|
2279
3371
|
});
|
|
2280
3372
|
}
|
|
2281
3373
|
if (delta.status !== "delta_ready") {
|
|
@@ -2418,38 +3510,38 @@ async function collabApprove(params) {
|
|
|
2418
3510
|
}
|
|
2419
3511
|
|
|
2420
3512
|
// src/application/collab/checkoutWorkspace.ts
|
|
2421
|
-
import
|
|
3513
|
+
import fs6 from "fs/promises";
|
|
2422
3514
|
import os4 from "os";
|
|
2423
|
-
import
|
|
3515
|
+
import path7 from "path";
|
|
2424
3516
|
async function pathExists(targetPath) {
|
|
2425
3517
|
try {
|
|
2426
|
-
await
|
|
3518
|
+
await fs6.access(targetPath);
|
|
2427
3519
|
return true;
|
|
2428
3520
|
} catch {
|
|
2429
3521
|
return false;
|
|
2430
3522
|
}
|
|
2431
3523
|
}
|
|
2432
3524
|
async function statIsDirectory(targetPath) {
|
|
2433
|
-
const stats = await
|
|
3525
|
+
const stats = await fs6.stat(targetPath).catch(() => null);
|
|
2434
3526
|
return Boolean(stats?.isDirectory());
|
|
2435
3527
|
}
|
|
2436
3528
|
async function findContainingGitRoot(startPath) {
|
|
2437
|
-
let current =
|
|
3529
|
+
let current = path7.resolve(startPath);
|
|
2438
3530
|
while (true) {
|
|
2439
|
-
if (await pathExists(
|
|
2440
|
-
const parent =
|
|
3531
|
+
if (await pathExists(path7.join(current, ".git"))) return current;
|
|
3532
|
+
const parent = path7.dirname(current);
|
|
2441
3533
|
if (parent === current) return null;
|
|
2442
3534
|
current = parent;
|
|
2443
3535
|
}
|
|
2444
3536
|
}
|
|
2445
3537
|
function isSubpath(parentPath, candidatePath) {
|
|
2446
|
-
const relative =
|
|
2447
|
-
return relative === "" || !relative.startsWith("..") && !
|
|
3538
|
+
const relative = path7.relative(parentPath, candidatePath);
|
|
3539
|
+
return relative === "" || !relative.startsWith("..") && !path7.isAbsolute(relative);
|
|
2448
3540
|
}
|
|
2449
3541
|
async function resolveCheckoutDestination(params) {
|
|
2450
3542
|
if (params.outputDir?.trim()) {
|
|
2451
|
-
const preferredRepoRoot =
|
|
2452
|
-
const parentDir2 =
|
|
3543
|
+
const preferredRepoRoot = path7.resolve(params.outputDir.trim());
|
|
3544
|
+
const parentDir2 = path7.dirname(preferredRepoRoot);
|
|
2453
3545
|
if (!await statIsDirectory(parentDir2)) {
|
|
2454
3546
|
throw new RemixError("Remix output parent directory does not exist.", {
|
|
2455
3547
|
exitCode: 2,
|
|
@@ -2462,7 +3554,7 @@ async function resolveCheckoutDestination(params) {
|
|
|
2462
3554
|
explicitOutputDir: true
|
|
2463
3555
|
};
|
|
2464
3556
|
}
|
|
2465
|
-
const parentDir =
|
|
3557
|
+
const parentDir = path7.resolve(params.cwd);
|
|
2466
3558
|
if (!await statIsDirectory(parentDir)) {
|
|
2467
3559
|
throw new RemixError("Remix output parent directory does not exist.", {
|
|
2468
3560
|
exitCode: 2,
|
|
@@ -2470,7 +3562,7 @@ async function resolveCheckoutDestination(params) {
|
|
|
2470
3562
|
});
|
|
2471
3563
|
}
|
|
2472
3564
|
return {
|
|
2473
|
-
preferredRepoRoot:
|
|
3565
|
+
preferredRepoRoot: path7.join(parentDir, params.defaultDirName),
|
|
2474
3566
|
parentDir,
|
|
2475
3567
|
explicitOutputDir: false
|
|
2476
3568
|
};
|
|
@@ -2504,11 +3596,11 @@ async function materializeAppCheckout(params) {
|
|
|
2504
3596
|
explicitOutputDir: destination.explicitOutputDir
|
|
2505
3597
|
});
|
|
2506
3598
|
const repoRoot = destination.explicitOutputDir ? await reserveDirectory(destination.preferredRepoRoot) : await reserveAvailableDirPath(destination.preferredRepoRoot);
|
|
2507
|
-
const bundleTempDir = await
|
|
2508
|
-
const bundlePath =
|
|
3599
|
+
const bundleTempDir = await fs6.mkdtemp(path7.join(os4.tmpdir(), "remix-checkout-"));
|
|
3600
|
+
const bundlePath = path7.join(bundleTempDir, "repository.bundle");
|
|
2509
3601
|
try {
|
|
2510
3602
|
const bundle = await params.api.downloadAppBundle(params.appId);
|
|
2511
|
-
await
|
|
3603
|
+
await fs6.writeFile(bundlePath, bundle.data);
|
|
2512
3604
|
await cloneGitBundleToDirectory(bundlePath, repoRoot);
|
|
2513
3605
|
if (params.expectedBranchName?.trim()) {
|
|
2514
3606
|
await checkoutLocalBranch(repoRoot, params.expectedBranchName.trim());
|
|
@@ -2518,11 +3610,11 @@ async function materializeAppCheckout(params) {
|
|
|
2518
3610
|
}
|
|
2519
3611
|
await ensureGitInfoExcludeEntries(repoRoot, [".remix/"]);
|
|
2520
3612
|
} catch (err) {
|
|
2521
|
-
await
|
|
3613
|
+
await fs6.rm(repoRoot, { recursive: true, force: true }).catch(() => {
|
|
2522
3614
|
});
|
|
2523
3615
|
throw err;
|
|
2524
3616
|
} finally {
|
|
2525
|
-
await
|
|
3617
|
+
await fs6.rm(bundleTempDir, { recursive: true, force: true });
|
|
2526
3618
|
}
|
|
2527
3619
|
const branchName = await getCurrentBranch(repoRoot) ?? params.expectedBranchName?.trim() ?? null;
|
|
2528
3620
|
const remoteUrl = normalizeGitRemote(params.expectedRemoteUrl ?? await getRemoteOriginUrl(repoRoot));
|
|
@@ -2652,7 +3744,7 @@ async function resolveQueueAppId(params) {
|
|
|
2652
3744
|
if (bindingResolution.status === "missing_branch_binding") {
|
|
2653
3745
|
throw new RemixError("Current branch is not yet bound to a Remix lane.", {
|
|
2654
3746
|
exitCode: 2,
|
|
2655
|
-
hint: `Switch back to a bound branch or
|
|
3747
|
+
hint: `Switch back to a bound branch, or run \`remix collab init\` on ${bindingResolution.currentBranch ?? "this branch"} before listing merge requests.`
|
|
2656
3748
|
});
|
|
2657
3749
|
}
|
|
2658
3750
|
if (bindingResolution.status === "binding_conflict") {
|
|
@@ -2797,16 +3889,38 @@ async function collabUpdateMemberRole(params) {
|
|
|
2797
3889
|
}
|
|
2798
3890
|
|
|
2799
3891
|
// src/application/collab/collabInit.ts
|
|
2800
|
-
import
|
|
2801
|
-
import
|
|
3892
|
+
import { randomUUID as randomUUID4 } from "crypto";
|
|
3893
|
+
import fs10 from "fs/promises";
|
|
3894
|
+
import path9 from "path";
|
|
3895
|
+
|
|
3896
|
+
// src/infrastructure/collab/drainerLog.ts
|
|
3897
|
+
import fs7 from "fs/promises";
|
|
3898
|
+
import path8 from "path";
|
|
3899
|
+
async function logDrainerEvent(jobId, event, fields = {}) {
|
|
3900
|
+
const entry = {
|
|
3901
|
+
ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3902
|
+
jobId,
|
|
3903
|
+
event,
|
|
3904
|
+
...fields
|
|
3905
|
+
};
|
|
3906
|
+
try {
|
|
3907
|
+
await fs7.mkdir(getLogsRoot(), { recursive: true });
|
|
3908
|
+
await fs7.appendFile(getDrainerLogPath(), `${JSON.stringify(entry)}
|
|
3909
|
+
`, "utf8");
|
|
3910
|
+
} catch {
|
|
3911
|
+
}
|
|
3912
|
+
}
|
|
3913
|
+
function drainerLogPath() {
|
|
3914
|
+
return getDrainerLogPath();
|
|
3915
|
+
}
|
|
2802
3916
|
|
|
2803
3917
|
// src/shared/hash.ts
|
|
2804
3918
|
import crypto from "crypto";
|
|
2805
|
-
import
|
|
3919
|
+
import fs8 from "fs";
|
|
2806
3920
|
async function sha256FileHex(filePath) {
|
|
2807
3921
|
const hash = crypto.createHash("sha256");
|
|
2808
3922
|
await new Promise((resolve, reject) => {
|
|
2809
|
-
const stream =
|
|
3923
|
+
const stream = fs8.createReadStream(filePath);
|
|
2810
3924
|
stream.on("data", (chunk) => hash.update(chunk));
|
|
2811
3925
|
stream.on("error", reject);
|
|
2812
3926
|
stream.on("end", () => resolve());
|
|
@@ -2815,15 +3929,15 @@ async function sha256FileHex(filePath) {
|
|
|
2815
3929
|
}
|
|
2816
3930
|
|
|
2817
3931
|
// src/shared/upload.ts
|
|
2818
|
-
import
|
|
3932
|
+
import fs9 from "fs";
|
|
2819
3933
|
import { PassThrough } from "stream";
|
|
2820
3934
|
async function uploadPresigned(params) {
|
|
2821
|
-
const stats = await
|
|
3935
|
+
const stats = await fs9.promises.stat(params.filePath).catch(() => null);
|
|
2822
3936
|
if (!stats || !stats.isFile()) {
|
|
2823
3937
|
throw new RemixError("Upload file not found.", { exitCode: 2 });
|
|
2824
3938
|
}
|
|
2825
3939
|
const totalBytes = stats.size;
|
|
2826
|
-
const fileStream =
|
|
3940
|
+
const fileStream = fs9.createReadStream(params.filePath);
|
|
2827
3941
|
const pass = new PassThrough();
|
|
2828
3942
|
let sentBytes = 0;
|
|
2829
3943
|
fileStream.on("data", (chunk) => {
|
|
@@ -3122,6 +4236,60 @@ async function collabInit(params) {
|
|
|
3122
4236
|
const remoteUrl = persistedRemoteUrl ?? normalizeGitRemote(await getRemoteOriginUrl(repoRoot));
|
|
3123
4237
|
const repoFingerprint = localBindingState?.repoFingerprint ?? await buildRepoFingerprint({ gitRoot: repoRoot, remoteUrl, defaultBranch });
|
|
3124
4238
|
const repoSnapshot = await captureRepoSnapshot(repoRoot);
|
|
4239
|
+
if (params.asyncSubmit) {
|
|
4240
|
+
if (params.forceNew) {
|
|
4241
|
+
for (const kind of ["init", "init_post"]) {
|
|
4242
|
+
const existing = await listAsyncJobsForRepo({ repoRoot, branchName, kind });
|
|
4243
|
+
for (const job of existing) {
|
|
4244
|
+
await deleteAsyncJob(job.id);
|
|
4245
|
+
await logDrainerEvent(job.id, "replaced", {
|
|
4246
|
+
kind,
|
|
4247
|
+
reason: "force_new",
|
|
4248
|
+
previousStatus: job.status,
|
|
4249
|
+
previousError: job.error
|
|
4250
|
+
});
|
|
4251
|
+
}
|
|
4252
|
+
}
|
|
4253
|
+
} else {
|
|
4254
|
+
const pendingPost = await findPendingAsyncJob({ repoRoot, branchName, kind: "init_post" });
|
|
4255
|
+
const pending = pendingPost ?? await findPendingAsyncJob({ repoRoot, branchName, kind: "init" });
|
|
4256
|
+
if (pending) {
|
|
4257
|
+
const existingBinding = await readCollabBinding(repoRoot);
|
|
4258
|
+
if (existingBinding && existingBinding.currentAppId) {
|
|
4259
|
+
return {
|
|
4260
|
+
queued: true,
|
|
4261
|
+
jobId: pending.id,
|
|
4262
|
+
repoRoot,
|
|
4263
|
+
projectId: existingBinding.projectId ?? "",
|
|
4264
|
+
appId: existingBinding.currentAppId,
|
|
4265
|
+
upstreamAppId: existingBinding.upstreamAppId ?? existingBinding.currentAppId,
|
|
4266
|
+
dashboardUrl: buildDashboardAppUrl(existingBinding.currentAppId),
|
|
4267
|
+
bindingPath: getCollabBindingPath(repoRoot),
|
|
4268
|
+
bindingMode: existingBinding.bindingMode,
|
|
4269
|
+
createdCanonicalFamily: false,
|
|
4270
|
+
remoteUrl: existingBinding.remoteUrl ?? remoteUrl,
|
|
4271
|
+
defaultBranch: existingBinding.defaultBranch ?? defaultBranch ?? null,
|
|
4272
|
+
...warnings.length > 0 ? { warnings } : {}
|
|
4273
|
+
};
|
|
4274
|
+
}
|
|
4275
|
+
return {
|
|
4276
|
+
queued: true,
|
|
4277
|
+
jobId: pending.id,
|
|
4278
|
+
repoRoot,
|
|
4279
|
+
projectId: "",
|
|
4280
|
+
appId: "",
|
|
4281
|
+
upstreamAppId: "",
|
|
4282
|
+
dashboardUrl: "",
|
|
4283
|
+
bindingPath: getCollabBindingPath(repoRoot),
|
|
4284
|
+
bindingMode: "lane",
|
|
4285
|
+
createdCanonicalFamily: false,
|
|
4286
|
+
remoteUrl,
|
|
4287
|
+
defaultBranch: defaultBranch ?? null,
|
|
4288
|
+
...warnings.length > 0 ? { warnings } : {}
|
|
4289
|
+
};
|
|
4290
|
+
}
|
|
4291
|
+
}
|
|
4292
|
+
}
|
|
3125
4293
|
if (!params.forceNew && localBindingState?.explicitRootBinding && branchName) {
|
|
3126
4294
|
const explicitRoot = localBindingState.explicitRootBinding;
|
|
3127
4295
|
const explicitProjectId = explicitRoot.projectId ?? localBindingState.projectId;
|
|
@@ -3172,7 +4340,7 @@ async function collabInit(params) {
|
|
|
3172
4340
|
boundThreadId2 = canonicalLane2.threadId ?? boundThreadId2;
|
|
3173
4341
|
boundLaneId2 = canonicalLane2.laneId ?? null;
|
|
3174
4342
|
}
|
|
3175
|
-
const readyApp = await
|
|
4343
|
+
const readyApp = await pollAppImported(params.api, boundCurrentAppId2);
|
|
3176
4344
|
boundProjectId2 = String(readyApp.projectId ?? boundProjectId2);
|
|
3177
4345
|
boundThreadId2 = readyApp.threadId ? String(readyApp.threadId) : boundThreadId2;
|
|
3178
4346
|
await assertRepoSnapshotUnchanged(repoRoot, repoSnapshot, {
|
|
@@ -3211,7 +4379,7 @@ async function collabInit(params) {
|
|
|
3211
4379
|
appId: boundCurrentAppId2,
|
|
3212
4380
|
dashboardUrl: buildDashboardAppUrl(boundCurrentAppId2),
|
|
3213
4381
|
upstreamAppId: boundUpstreamAppId2,
|
|
3214
|
-
bindingPath:
|
|
4382
|
+
bindingPath: path9.join(repoRoot, ".remix", "config.json"),
|
|
3215
4383
|
repoRoot,
|
|
3216
4384
|
bindingMode: defaultBranch && branchName !== defaultBranch ? "lane" : "explicit_root",
|
|
3217
4385
|
createdCanonicalFamily: false,
|
|
@@ -3284,7 +4452,7 @@ async function collabInit(params) {
|
|
|
3284
4452
|
boundThreadId2 = canonicalLane2.threadId ?? boundThreadId2;
|
|
3285
4453
|
boundLaneId2 = canonicalLane2.laneId ?? null;
|
|
3286
4454
|
}
|
|
3287
|
-
const readyApp = await
|
|
4455
|
+
const readyApp = await pollAppImported(params.api, boundCurrentAppId2);
|
|
3288
4456
|
boundProjectId2 = String(readyApp.projectId ?? boundProjectId2);
|
|
3289
4457
|
boundThreadId2 = readyApp.threadId ? String(readyApp.threadId) : boundThreadId2;
|
|
3290
4458
|
await assertRepoSnapshotUnchanged(repoRoot, repoSnapshot, {
|
|
@@ -3413,7 +4581,7 @@ async function collabInit(params) {
|
|
|
3413
4581
|
}
|
|
3414
4582
|
}
|
|
3415
4583
|
if (boundCurrentAppId2) {
|
|
3416
|
-
const readyApp = await
|
|
4584
|
+
const readyApp = await pollAppImported(params.api, boundCurrentAppId2);
|
|
3417
4585
|
boundProjectId2 = String(readyApp.projectId ?? boundProjectId2);
|
|
3418
4586
|
boundThreadId2 = readyApp.threadId ? String(readyApp.threadId) : boundThreadId2;
|
|
3419
4587
|
}
|
|
@@ -3435,46 +4603,251 @@ async function collabInit(params) {
|
|
|
3435
4603
|
bindingMode: "lane"
|
|
3436
4604
|
});
|
|
3437
4605
|
}
|
|
3438
|
-
const bindingPath2 = await writeCollabBinding(repoRoot, {
|
|
3439
|
-
projectId: boundProjectId2,
|
|
3440
|
-
currentAppId: boundCurrentAppId2,
|
|
3441
|
-
upstreamAppId: boundUpstreamAppId2,
|
|
3442
|
-
threadId: boundThreadId2,
|
|
4606
|
+
const bindingPath2 = await writeCollabBinding(repoRoot, {
|
|
4607
|
+
projectId: boundProjectId2,
|
|
4608
|
+
currentAppId: boundCurrentAppId2,
|
|
4609
|
+
upstreamAppId: boundUpstreamAppId2,
|
|
4610
|
+
threadId: boundThreadId2,
|
|
4611
|
+
repoFingerprint,
|
|
4612
|
+
remoteUrl,
|
|
4613
|
+
defaultBranch: defaultBranch ?? null,
|
|
4614
|
+
laneId: boundLaneId2,
|
|
4615
|
+
branchName,
|
|
4616
|
+
bindingMode: "lane"
|
|
4617
|
+
});
|
|
4618
|
+
return {
|
|
4619
|
+
reused: true,
|
|
4620
|
+
projectId: boundProjectId2,
|
|
4621
|
+
appId: boundCurrentAppId2,
|
|
4622
|
+
dashboardUrl: buildDashboardAppUrl(boundCurrentAppId2),
|
|
4623
|
+
upstreamAppId: boundUpstreamAppId2,
|
|
4624
|
+
bindingPath: bindingPath2,
|
|
4625
|
+
repoRoot,
|
|
4626
|
+
bindingMode: "lane",
|
|
4627
|
+
createdCanonicalFamily: false,
|
|
4628
|
+
baselineStatus: await resolveInitBaselineStatus({
|
|
4629
|
+
api: params.api,
|
|
4630
|
+
repoRoot,
|
|
4631
|
+
repoFingerprint,
|
|
4632
|
+
laneId: boundLaneId2,
|
|
4633
|
+
currentAppId: boundCurrentAppId2,
|
|
4634
|
+
upstreamAppId: boundUpstreamAppId2,
|
|
4635
|
+
branchName,
|
|
4636
|
+
remoteUrl,
|
|
4637
|
+
defaultBranch
|
|
4638
|
+
}),
|
|
4639
|
+
...warnings.length > 0 ? { warnings } : {}
|
|
4640
|
+
};
|
|
4641
|
+
}
|
|
4642
|
+
}
|
|
4643
|
+
if (params.asyncSubmit) {
|
|
4644
|
+
if (!params.forceNew) {
|
|
4645
|
+
const failed = await findFailedAsyncJob({ repoRoot, branchName, kind: "init" });
|
|
4646
|
+
if (failed) {
|
|
4647
|
+
await deleteAsyncJob(failed.id);
|
|
4648
|
+
}
|
|
4649
|
+
const failedPost = await findFailedAsyncJob({ repoRoot, branchName, kind: "init_post" });
|
|
4650
|
+
if (failedPost) {
|
|
4651
|
+
await deleteAsyncJob(failedPost.id);
|
|
4652
|
+
}
|
|
4653
|
+
}
|
|
4654
|
+
const syncPhaseStartedAt = Date.now();
|
|
4655
|
+
const { bundlePath: bundleFilePath, headCommitHash: bundledHeadCommitHash } = await createGitBundle(
|
|
4656
|
+
repoRoot,
|
|
4657
|
+
"repository.bundle"
|
|
4658
|
+
);
|
|
4659
|
+
const tmpBundleDir = path9.dirname(bundleFilePath);
|
|
4660
|
+
try {
|
|
4661
|
+
const bundleSha2 = await sha256FileHex(bundleFilePath);
|
|
4662
|
+
const bundleSize2 = (await fs10.stat(bundleFilePath)).size;
|
|
4663
|
+
const presignResp2 = await params.api.presignImportUploadFirstParty({
|
|
4664
|
+
file: {
|
|
4665
|
+
name: "repository.bundle",
|
|
4666
|
+
mimeType: "application/x-git-bundle",
|
|
4667
|
+
size: bundleSize2,
|
|
4668
|
+
checksumSha256: bundleSha2
|
|
4669
|
+
}
|
|
4670
|
+
});
|
|
4671
|
+
const presign2 = unwrapResponseObject(presignResp2, "upload");
|
|
4672
|
+
await uploadPresigned({
|
|
4673
|
+
uploadUrl: String(presign2.uploadUrl),
|
|
4674
|
+
headers: presign2.headers ?? {},
|
|
4675
|
+
filePath: bundleFilePath
|
|
4676
|
+
});
|
|
4677
|
+
const importResp2 = await params.api.importFromUploadFirstParty({
|
|
4678
|
+
uploadId: String(presign2.uploadId),
|
|
4679
|
+
appName: params.appName?.trim() || path9.basename(repoRoot),
|
|
4680
|
+
platform: "generic",
|
|
4681
|
+
isPublic: false,
|
|
4682
|
+
branch: defaultBranch && branchName && branchName !== defaultBranch ? defaultBranch : currentBranch ?? void 0,
|
|
4683
|
+
remoteUrl: remoteUrl ?? void 0,
|
|
4684
|
+
defaultBranch: defaultBranch ?? void 0,
|
|
4685
|
+
repoFingerprint,
|
|
4686
|
+
headCommitHash: bundledHeadCommitHash,
|
|
4687
|
+
forceNew: params.forceNew
|
|
4688
|
+
});
|
|
4689
|
+
const imported2 = unwrapResponseObject(importResp2, "import");
|
|
4690
|
+
const importedAppId = String(imported2.appId);
|
|
4691
|
+
let boundProjectId2 = String(imported2.projectId ?? "");
|
|
4692
|
+
let boundCurrentAppId2 = importedAppId;
|
|
4693
|
+
let boundUpstreamAppId2 = importedAppId;
|
|
4694
|
+
let boundThreadId2 = imported2.threadId ? String(imported2.threadId) : null;
|
|
4695
|
+
let boundLaneId2 = null;
|
|
4696
|
+
let canonicalLane2 = null;
|
|
4697
|
+
if (branchName) {
|
|
4698
|
+
if (defaultBranch && branchName !== defaultBranch) {
|
|
4699
|
+
canonicalLane2 = await resolveOrEnsureLaneBinding({
|
|
4700
|
+
api: params.api,
|
|
4701
|
+
projectId: boundProjectId2 || void 0,
|
|
4702
|
+
repoFingerprint,
|
|
4703
|
+
remoteUrl,
|
|
4704
|
+
defaultBranch,
|
|
4705
|
+
branchName: defaultBranch,
|
|
4706
|
+
seedAppId: importedAppId,
|
|
4707
|
+
operation: "`remix collab init`"
|
|
4708
|
+
});
|
|
4709
|
+
const lane = await resolveOrEnsureLaneBinding({
|
|
4710
|
+
api: params.api,
|
|
4711
|
+
projectId: canonicalLane2.projectId ?? boundProjectId2 ?? void 0,
|
|
4712
|
+
repoFingerprint,
|
|
4713
|
+
remoteUrl,
|
|
4714
|
+
defaultBranch,
|
|
4715
|
+
branchName,
|
|
4716
|
+
seedHeadCommitHash: bundledHeadCommitHash,
|
|
4717
|
+
operation: "`remix collab init`"
|
|
4718
|
+
});
|
|
4719
|
+
boundProjectId2 = lane.projectId ?? boundProjectId2;
|
|
4720
|
+
boundCurrentAppId2 = lane.currentAppId ?? boundCurrentAppId2;
|
|
4721
|
+
boundUpstreamAppId2 = lane.upstreamAppId ?? boundUpstreamAppId2;
|
|
4722
|
+
boundThreadId2 = lane.threadId ?? boundThreadId2;
|
|
4723
|
+
boundLaneId2 = lane.laneId ?? null;
|
|
4724
|
+
} else {
|
|
4725
|
+
const lane = await resolveOrEnsureLaneBinding({
|
|
4726
|
+
api: params.api,
|
|
4727
|
+
projectId: boundProjectId2 || void 0,
|
|
4728
|
+
repoFingerprint,
|
|
4729
|
+
remoteUrl,
|
|
4730
|
+
defaultBranch,
|
|
4731
|
+
branchName,
|
|
4732
|
+
seedAppId: importedAppId,
|
|
4733
|
+
operation: "`remix collab init`"
|
|
4734
|
+
});
|
|
4735
|
+
canonicalLane2 = lane;
|
|
4736
|
+
boundProjectId2 = lane.projectId ?? boundProjectId2;
|
|
4737
|
+
boundCurrentAppId2 = lane.currentAppId ?? boundCurrentAppId2;
|
|
4738
|
+
boundUpstreamAppId2 = lane.upstreamAppId ?? boundUpstreamAppId2;
|
|
4739
|
+
boundThreadId2 = lane.threadId ?? boundThreadId2;
|
|
4740
|
+
boundLaneId2 = lane.laneId ?? null;
|
|
4741
|
+
}
|
|
4742
|
+
}
|
|
4743
|
+
await assertRepoSnapshotUnchanged(repoRoot, repoSnapshot, {
|
|
4744
|
+
operation: "`remix collab init`",
|
|
4745
|
+
recoveryHint: "The repository changed before the Remix binding was written. Review the local changes and rerun `remix collab init`."
|
|
4746
|
+
});
|
|
4747
|
+
const bindingMode2 = params.forceNew && (!defaultBranch || branchName === defaultBranch) ? "explicit_root" : "lane";
|
|
4748
|
+
let bindingPath2;
|
|
4749
|
+
if (params.forceNew && defaultBranch && canonicalLane2) {
|
|
4750
|
+
const canonicalBinding = branchBindingFromLane(canonicalLane2, "explicit_root", {
|
|
4751
|
+
projectId: canonicalLane2.projectId ?? boundProjectId2,
|
|
4752
|
+
currentAppId: canonicalLane2.currentAppId ?? boundCurrentAppId2,
|
|
4753
|
+
upstreamAppId: canonicalLane2.upstreamAppId ?? canonicalLane2.currentAppId ?? boundCurrentAppId2,
|
|
4754
|
+
threadId: canonicalLane2.threadId ?? boundThreadId2
|
|
4755
|
+
});
|
|
4756
|
+
const branchBindings = {
|
|
4757
|
+
[defaultBranch]: canonicalBinding
|
|
4758
|
+
};
|
|
4759
|
+
if (branchName && branchName !== defaultBranch) {
|
|
4760
|
+
branchBindings[branchName] = {
|
|
4761
|
+
projectId: boundProjectId2,
|
|
4762
|
+
currentAppId: boundCurrentAppId2,
|
|
4763
|
+
upstreamAppId: boundUpstreamAppId2,
|
|
4764
|
+
threadId: boundThreadId2,
|
|
4765
|
+
laneId: boundLaneId2,
|
|
4766
|
+
bindingMode: "lane"
|
|
4767
|
+
};
|
|
4768
|
+
}
|
|
4769
|
+
bindingPath2 = await writeCollabBindingSnapshot({
|
|
4770
|
+
repoRoot,
|
|
4771
|
+
repoFingerprint,
|
|
4772
|
+
remoteUrl,
|
|
4773
|
+
defaultBranch,
|
|
4774
|
+
branchBindings,
|
|
4775
|
+
explicitRootBinding: canonicalBinding
|
|
4776
|
+
});
|
|
4777
|
+
} else {
|
|
4778
|
+
if (canonicalLane2 && defaultBranch && branchName && branchName !== defaultBranch) {
|
|
4779
|
+
await writeCollabBinding(repoRoot, {
|
|
4780
|
+
projectId: canonicalLane2.projectId ?? null,
|
|
4781
|
+
currentAppId: canonicalLane2.currentAppId ?? boundCurrentAppId2,
|
|
4782
|
+
upstreamAppId: canonicalLane2.upstreamAppId ?? canonicalLane2.currentAppId ?? boundCurrentAppId2,
|
|
4783
|
+
threadId: canonicalLane2.threadId ?? null,
|
|
4784
|
+
repoFingerprint: canonicalLane2.repoFingerprint ?? repoFingerprint,
|
|
4785
|
+
remoteUrl: canonicalLane2.remoteUrl ?? remoteUrl,
|
|
4786
|
+
defaultBranch: canonicalLane2.defaultBranch ?? defaultBranch,
|
|
4787
|
+
laneId: canonicalLane2.laneId ?? null,
|
|
4788
|
+
branchName: defaultBranch,
|
|
4789
|
+
bindingMode: params.forceNew ? "explicit_root" : "lane"
|
|
4790
|
+
});
|
|
4791
|
+
}
|
|
4792
|
+
bindingPath2 = await writeCollabBinding(repoRoot, {
|
|
4793
|
+
projectId: boundProjectId2,
|
|
4794
|
+
currentAppId: boundCurrentAppId2,
|
|
4795
|
+
upstreamAppId: boundUpstreamAppId2,
|
|
4796
|
+
threadId: boundThreadId2,
|
|
4797
|
+
repoFingerprint,
|
|
4798
|
+
remoteUrl,
|
|
4799
|
+
defaultBranch: defaultBranch ?? null,
|
|
4800
|
+
laneId: boundLaneId2,
|
|
4801
|
+
branchName,
|
|
4802
|
+
bindingMode: bindingMode2
|
|
4803
|
+
});
|
|
4804
|
+
}
|
|
4805
|
+
const syncPhaseFinishedAt = Date.now();
|
|
4806
|
+
const jobId = randomUUID4();
|
|
4807
|
+
const job = await enqueueAsyncJob({
|
|
4808
|
+
id: jobId,
|
|
4809
|
+
kind: "init_post",
|
|
4810
|
+
status: "queued",
|
|
4811
|
+
repoRoot,
|
|
3443
4812
|
repoFingerprint,
|
|
3444
|
-
remoteUrl,
|
|
3445
|
-
defaultBranch: defaultBranch ?? null,
|
|
3446
|
-
laneId: boundLaneId2,
|
|
3447
4813
|
branchName,
|
|
3448
|
-
|
|
4814
|
+
laneId: boundLaneId2,
|
|
4815
|
+
retryCount: 0,
|
|
4816
|
+
error: null,
|
|
4817
|
+
idempotencyKey: null,
|
|
4818
|
+
payload: {
|
|
4819
|
+
appId: boundCurrentAppId2,
|
|
4820
|
+
syncPhaseStartedAt,
|
|
4821
|
+
syncPhaseFinishedAt
|
|
4822
|
+
}
|
|
4823
|
+
});
|
|
4824
|
+
await logDrainerEvent(job.id, "submitted", {
|
|
4825
|
+
kind: "init_post",
|
|
4826
|
+
appId: boundCurrentAppId2,
|
|
4827
|
+
["init.sync_phase_ms"]: syncPhaseFinishedAt - syncPhaseStartedAt
|
|
3449
4828
|
});
|
|
3450
4829
|
return {
|
|
3451
|
-
|
|
4830
|
+
queued: true,
|
|
4831
|
+
jobId: job.id,
|
|
4832
|
+
repoRoot,
|
|
3452
4833
|
projectId: boundProjectId2,
|
|
3453
4834
|
appId: boundCurrentAppId2,
|
|
3454
|
-
dashboardUrl: buildDashboardAppUrl(boundCurrentAppId2),
|
|
3455
4835
|
upstreamAppId: boundUpstreamAppId2,
|
|
4836
|
+
dashboardUrl: buildDashboardAppUrl(boundCurrentAppId2),
|
|
3456
4837
|
bindingPath: bindingPath2,
|
|
3457
|
-
|
|
3458
|
-
|
|
3459
|
-
|
|
3460
|
-
|
|
3461
|
-
api: params.api,
|
|
3462
|
-
repoRoot,
|
|
3463
|
-
repoFingerprint,
|
|
3464
|
-
laneId: boundLaneId2,
|
|
3465
|
-
currentAppId: boundCurrentAppId2,
|
|
3466
|
-
upstreamAppId: boundUpstreamAppId2,
|
|
3467
|
-
branchName,
|
|
3468
|
-
remoteUrl,
|
|
3469
|
-
defaultBranch
|
|
3470
|
-
}),
|
|
4838
|
+
bindingMode: bindingMode2,
|
|
4839
|
+
createdCanonicalFamily: Boolean(params.forceNew),
|
|
4840
|
+
remoteUrl,
|
|
4841
|
+
defaultBranch,
|
|
3471
4842
|
...warnings.length > 0 ? { warnings } : {}
|
|
3472
4843
|
};
|
|
4844
|
+
} finally {
|
|
4845
|
+
await fs10.rm(tmpBundleDir, { recursive: true, force: true }).catch(() => void 0);
|
|
3473
4846
|
}
|
|
3474
4847
|
}
|
|
3475
4848
|
const { bundlePath, headCommitHash } = await createGitBundle(repoRoot, "repository.bundle");
|
|
3476
4849
|
const bundleSha = await sha256FileHex(bundlePath);
|
|
3477
|
-
const bundleSize = (await
|
|
4850
|
+
const bundleSize = (await fs10.stat(bundlePath)).size;
|
|
3478
4851
|
const presignResp = await params.api.presignImportUploadFirstParty({
|
|
3479
4852
|
file: {
|
|
3480
4853
|
name: "repository.bundle",
|
|
@@ -3491,7 +4864,7 @@ async function collabInit(params) {
|
|
|
3491
4864
|
});
|
|
3492
4865
|
const importResp = await params.api.importFromUploadFirstParty({
|
|
3493
4866
|
uploadId: String(presign.uploadId),
|
|
3494
|
-
appName: params.appName?.trim() ||
|
|
4867
|
+
appName: params.appName?.trim() || path9.basename(repoRoot),
|
|
3495
4868
|
path: params.path?.trim() || void 0,
|
|
3496
4869
|
platform: "generic",
|
|
3497
4870
|
isPublic: false,
|
|
@@ -3499,10 +4872,11 @@ async function collabInit(params) {
|
|
|
3499
4872
|
remoteUrl: remoteUrl ?? void 0,
|
|
3500
4873
|
defaultBranch: defaultBranch ?? void 0,
|
|
3501
4874
|
repoFingerprint,
|
|
3502
|
-
headCommitHash
|
|
4875
|
+
headCommitHash,
|
|
4876
|
+
forceNew: params.forceNew
|
|
3503
4877
|
});
|
|
3504
4878
|
const imported = unwrapResponseObject(importResp, "import");
|
|
3505
|
-
const app = await
|
|
4879
|
+
const app = await pollAppImported(params.api, String(imported.appId));
|
|
3506
4880
|
let boundProjectId = String(app.projectId);
|
|
3507
4881
|
let boundCurrentAppId = String(app.id);
|
|
3508
4882
|
let boundUpstreamAppId = String(app.id);
|
|
@@ -3556,7 +4930,7 @@ async function collabInit(params) {
|
|
|
3556
4930
|
}
|
|
3557
4931
|
}
|
|
3558
4932
|
if (boundCurrentAppId) {
|
|
3559
|
-
const readyApp = await
|
|
4933
|
+
const readyApp = await pollAppImported(params.api, boundCurrentAppId);
|
|
3560
4934
|
boundProjectId = String(readyApp.projectId ?? boundProjectId);
|
|
3561
4935
|
boundThreadId = readyApp.threadId ? String(readyApp.threadId) : boundThreadId;
|
|
3562
4936
|
}
|
|
@@ -3648,6 +5022,9 @@ async function collabInit(params) {
|
|
|
3648
5022
|
}
|
|
3649
5023
|
);
|
|
3650
5024
|
}
|
|
5025
|
+
async function collabInitSubmit(params) {
|
|
5026
|
+
return collabInit({ ...params, asyncSubmit: true });
|
|
5027
|
+
}
|
|
3651
5028
|
|
|
3652
5029
|
// src/application/collab/collabInvite.ts
|
|
3653
5030
|
async function collabInvite(params) {
|
|
@@ -3692,8 +5069,9 @@ async function collabList(params) {
|
|
|
3692
5069
|
}
|
|
3693
5070
|
|
|
3694
5071
|
// src/application/collab/collabReAnchor.ts
|
|
3695
|
-
import
|
|
3696
|
-
import
|
|
5072
|
+
import { randomUUID as randomUUID5 } from "crypto";
|
|
5073
|
+
import fs11 from "fs/promises";
|
|
5074
|
+
import path10 from "path";
|
|
3697
5075
|
|
|
3698
5076
|
// src/application/collab/pendingFinalize.ts
|
|
3699
5077
|
function hasPendingFinalize(summary) {
|
|
@@ -3723,7 +5101,7 @@ async function collabReAnchor(params) {
|
|
|
3723
5101
|
allowBranchMismatch: params.allowBranchMismatch
|
|
3724
5102
|
});
|
|
3725
5103
|
if (detected.status === "metadata_conflict" || detected.status === "branch_mismatch") {
|
|
3726
|
-
throw new RemixError("Repository must be realigned before
|
|
5104
|
+
throw new RemixError("Repository must be realigned before seeding a fresh local Remix baseline.", {
|
|
3727
5105
|
exitCode: 2,
|
|
3728
5106
|
hint: detected.hint
|
|
3729
5107
|
});
|
|
@@ -3753,15 +5131,15 @@ async function collabReAnchor(params) {
|
|
|
3753
5131
|
hint: buildPendingFinalizeHint()
|
|
3754
5132
|
});
|
|
3755
5133
|
}
|
|
3756
|
-
throw new RemixError("Re-anchor is not
|
|
5134
|
+
throw new RemixError("Re-anchor is not the right command for local content changes.", {
|
|
3757
5135
|
exitCode: 2,
|
|
3758
|
-
hint: "
|
|
5136
|
+
hint: "Remix is source-blind: any local content change since the last recorded turn \u2014 including manual commits, pulls, merges, and rebases \u2014 is recorded with `remix collab finalize-turn`. Use `remix collab re-anchor` only when no local Remix baseline exists yet for this lane (status reports `re_anchor`)."
|
|
3759
5137
|
});
|
|
3760
5138
|
}
|
|
3761
5139
|
if (detected.repoState === "idle") {
|
|
3762
5140
|
throw new RemixError("This checkout is already aligned with Remix.", {
|
|
3763
5141
|
exitCode: 2,
|
|
3764
|
-
hint: "No re-anchor step is needed
|
|
5142
|
+
hint: "No re-anchor step is needed. Re-anchor only applies when no local Remix baseline exists yet for this lane."
|
|
3765
5143
|
});
|
|
3766
5144
|
}
|
|
3767
5145
|
await ensureCleanWorktree(repoRoot, "`remix collab re-anchor`");
|
|
@@ -3770,6 +5148,25 @@ async function collabReAnchor(params) {
|
|
|
3770
5148
|
if (!headCommitHash) {
|
|
3771
5149
|
throw new RemixError("Failed to resolve local HEAD commit.", { exitCode: 1 });
|
|
3772
5150
|
}
|
|
5151
|
+
if (params.asyncSubmit && !params.dryRun) {
|
|
5152
|
+
const pending = await findPendingAsyncJob({
|
|
5153
|
+
repoRoot,
|
|
5154
|
+
branchName: binding.branchName ?? branch,
|
|
5155
|
+
kind: "re_anchor"
|
|
5156
|
+
});
|
|
5157
|
+
if (pending) {
|
|
5158
|
+
return {
|
|
5159
|
+
status: "queued",
|
|
5160
|
+
queued: true,
|
|
5161
|
+
jobId: pending.id,
|
|
5162
|
+
repoRoot,
|
|
5163
|
+
branch,
|
|
5164
|
+
currentAppId: binding.currentAppId,
|
|
5165
|
+
dryRun: false,
|
|
5166
|
+
applied: false
|
|
5167
|
+
};
|
|
5168
|
+
}
|
|
5169
|
+
}
|
|
3773
5170
|
const preflightResp = await params.api.preflightAppReconcile(binding.currentAppId, {
|
|
3774
5171
|
localHeadCommitHash: headCommitHash,
|
|
3775
5172
|
repoFingerprint: binding.repoFingerprint ?? void 0,
|
|
@@ -3799,15 +5196,78 @@ async function collabReAnchor(params) {
|
|
|
3799
5196
|
return preview;
|
|
3800
5197
|
}
|
|
3801
5198
|
let anchoredServerHeadHash = preflight.targetHeadCommitHash;
|
|
5199
|
+
if (params.asyncSubmit && preflight.status === "ready_to_reconcile") {
|
|
5200
|
+
const failed = await findFailedAsyncJob({
|
|
5201
|
+
repoRoot,
|
|
5202
|
+
branchName: binding.branchName ?? branch,
|
|
5203
|
+
kind: "re_anchor"
|
|
5204
|
+
});
|
|
5205
|
+
if (failed) {
|
|
5206
|
+
await deleteAsyncJob(failed.id);
|
|
5207
|
+
}
|
|
5208
|
+
const { bundlePath: tmpBundlePath, headCommitHash: bundledHeadCommitHash } = await createGitBundle(
|
|
5209
|
+
repoRoot,
|
|
5210
|
+
"re-anchor.bundle"
|
|
5211
|
+
);
|
|
5212
|
+
const tmpBundleDir = path10.dirname(tmpBundlePath);
|
|
5213
|
+
try {
|
|
5214
|
+
const jobId = randomUUID5();
|
|
5215
|
+
const durableBundlePath = getAsyncJobBundlePath(jobId);
|
|
5216
|
+
await fs11.mkdir(getAsyncJobDir(jobId), { recursive: true });
|
|
5217
|
+
try {
|
|
5218
|
+
await fs11.rename(tmpBundlePath, durableBundlePath);
|
|
5219
|
+
} catch (error) {
|
|
5220
|
+
if (error?.code !== "EXDEV") throw error;
|
|
5221
|
+
await fs11.copyFile(tmpBundlePath, durableBundlePath);
|
|
5222
|
+
await fs11.unlink(tmpBundlePath).catch(() => void 0);
|
|
5223
|
+
}
|
|
5224
|
+
const bundleSha = await sha256FileHex(durableBundlePath);
|
|
5225
|
+
const job = await enqueueAsyncJob({
|
|
5226
|
+
id: jobId,
|
|
5227
|
+
kind: "re_anchor",
|
|
5228
|
+
status: "queued",
|
|
5229
|
+
repoRoot,
|
|
5230
|
+
repoFingerprint: binding.repoFingerprint,
|
|
5231
|
+
branchName: binding.branchName ?? branch,
|
|
5232
|
+
laneId: binding.laneId,
|
|
5233
|
+
retryCount: 0,
|
|
5234
|
+
error: null,
|
|
5235
|
+
idempotencyKey: null,
|
|
5236
|
+
payload: {
|
|
5237
|
+
bundlePath: durableBundlePath,
|
|
5238
|
+
bundleSha256: bundleSha,
|
|
5239
|
+
localHeadCommitHash: bundledHeadCommitHash,
|
|
5240
|
+
targetHeadCommitHash: preflight.targetHeadCommitHash,
|
|
5241
|
+
appId: binding.currentAppId
|
|
5242
|
+
}
|
|
5243
|
+
});
|
|
5244
|
+
await logDrainerEvent(job.id, "submitted", { kind: "re_anchor" });
|
|
5245
|
+
return {
|
|
5246
|
+
status: "queued",
|
|
5247
|
+
queued: true,
|
|
5248
|
+
jobId: job.id,
|
|
5249
|
+
repoRoot,
|
|
5250
|
+
branch,
|
|
5251
|
+
currentAppId: binding.currentAppId,
|
|
5252
|
+
localHeadCommitHash: bundledHeadCommitHash,
|
|
5253
|
+
targetHeadCommitHash: preflight.targetHeadCommitHash,
|
|
5254
|
+
warnings: preflight.warnings,
|
|
5255
|
+
dryRun: false,
|
|
5256
|
+
applied: false
|
|
5257
|
+
};
|
|
5258
|
+
} finally {
|
|
5259
|
+
await fs11.rm(tmpBundleDir, { recursive: true, force: true }).catch(() => void 0);
|
|
5260
|
+
}
|
|
5261
|
+
}
|
|
3802
5262
|
if (preflight.status === "ready_to_reconcile") {
|
|
3803
5263
|
const { bundlePath, headCommitHash: bundledHeadCommitHash } = await createGitBundle(repoRoot, "re-anchor.bundle");
|
|
3804
|
-
const bundleTempDir =
|
|
5264
|
+
const bundleTempDir = path10.dirname(bundlePath);
|
|
3805
5265
|
try {
|
|
3806
|
-
const bundleStat = await
|
|
5266
|
+
const bundleStat = await fs11.stat(bundlePath);
|
|
3807
5267
|
const checksumSha256 = await sha256FileHex(bundlePath);
|
|
3808
5268
|
const presignResp = await params.api.presignImportUploadFirstParty({
|
|
3809
5269
|
file: {
|
|
3810
|
-
name:
|
|
5270
|
+
name: path10.basename(bundlePath),
|
|
3811
5271
|
mimeType: "application/x-git-bundle",
|
|
3812
5272
|
size: bundleStat.size,
|
|
3813
5273
|
checksumSha256
|
|
@@ -3836,7 +5296,7 @@ async function collabReAnchor(params) {
|
|
|
3836
5296
|
const reconcile = await pollReconcile(params.api, binding.currentAppId, started.id);
|
|
3837
5297
|
anchoredServerHeadHash = reconcile.reconciledHeadCommitHash ?? reconcile.targetHeadCommitHash ?? preflight.targetHeadCommitHash;
|
|
3838
5298
|
} finally {
|
|
3839
|
-
await
|
|
5299
|
+
await fs11.rm(bundleTempDir, { recursive: true, force: true });
|
|
3840
5300
|
}
|
|
3841
5301
|
}
|
|
3842
5302
|
const snapshot = await captureLocalSnapshot({
|
|
@@ -3864,12 +5324,15 @@ async function collabReAnchor(params) {
|
|
|
3864
5324
|
dryRun: false
|
|
3865
5325
|
};
|
|
3866
5326
|
}
|
|
5327
|
+
async function collabReAnchorSubmit(params) {
|
|
5328
|
+
return collabReAnchor({ ...params, asyncSubmit: true });
|
|
5329
|
+
}
|
|
3867
5330
|
|
|
3868
5331
|
// src/application/collab/collabReconcile.ts
|
|
3869
|
-
import
|
|
5332
|
+
import fs12 from "fs/promises";
|
|
3870
5333
|
import os5 from "os";
|
|
3871
|
-
import
|
|
3872
|
-
import { execa as
|
|
5334
|
+
import path11 from "path";
|
|
5335
|
+
import { execa as execa3 } from "execa";
|
|
3873
5336
|
async function reconcileBothChanged(params) {
|
|
3874
5337
|
const repoRoot = await findGitRoot(params.cwd);
|
|
3875
5338
|
const binding = await ensureActiveLaneBinding({
|
|
@@ -3989,13 +5452,13 @@ async function reconcileBothChanged(params) {
|
|
|
3989
5452
|
const replay = await pollChangeStepReplay(params.api, binding.currentAppId, String(replayStart.id));
|
|
3990
5453
|
const replayDiffResp = await params.api.getChangeStepReplayDiff(binding.currentAppId, replay.id);
|
|
3991
5454
|
const replayDiff = unwrapResponseObject(replayDiffResp, "change step replay diff");
|
|
3992
|
-
const tempRoot = await
|
|
5455
|
+
const tempRoot = await fs12.mkdtemp(path11.join(os5.tmpdir(), "remix-reconcile-"));
|
|
3993
5456
|
let serverHeadSnapshot = null;
|
|
3994
5457
|
let mergedSnapshot = null;
|
|
3995
5458
|
try {
|
|
3996
|
-
const tempRepoRoot =
|
|
3997
|
-
await
|
|
3998
|
-
await
|
|
5459
|
+
const tempRepoRoot = path11.join(tempRoot, "repo");
|
|
5460
|
+
await fs12.mkdir(tempRepoRoot, { recursive: true });
|
|
5461
|
+
await execa3("git", ["init"], { cwd: tempRepoRoot, stderr: "ignore" });
|
|
3999
5462
|
await materializeLocalSnapshot(baseline.lastSnapshotId, tempRepoRoot);
|
|
4000
5463
|
if (delta.status === "delta_ready" && delta.diff.trim()) {
|
|
4001
5464
|
await applyUnifiedDiffToWorktree(tempRepoRoot, delta.diff, "`remix collab reconcile`");
|
|
@@ -4016,7 +5479,7 @@ async function reconcileBothChanged(params) {
|
|
|
4016
5479
|
branchName: binding.branchName
|
|
4017
5480
|
});
|
|
4018
5481
|
} finally {
|
|
4019
|
-
await
|
|
5482
|
+
await fs12.rm(tempRoot, { recursive: true, force: true }).catch(() => void 0);
|
|
4020
5483
|
}
|
|
4021
5484
|
if (!serverHeadSnapshot || !mergedSnapshot) {
|
|
4022
5485
|
throw new RemixError("Failed to materialize the reconciled local workspace.", { exitCode: 1 });
|
|
@@ -4239,7 +5702,7 @@ async function collabRequestMerge(params) {
|
|
|
4239
5702
|
|
|
4240
5703
|
// src/application/collab/collabStatus.ts
|
|
4241
5704
|
function isFinalizeQueueBlocking(state) {
|
|
4242
|
-
return state === "queued" || state === "processing" || state === "retry_scheduled";
|
|
5705
|
+
return state === "queued" || state === "processing" || state === "retry_scheduled" || state === "awaiting_usage";
|
|
4243
5706
|
}
|
|
4244
5707
|
function createBaseStatus() {
|
|
4245
5708
|
return {
|
|
@@ -4268,6 +5731,7 @@ function createBaseStatus() {
|
|
|
4268
5731
|
threadId: null,
|
|
4269
5732
|
repoFingerprint: null,
|
|
4270
5733
|
remoteUrl: null,
|
|
5734
|
+
dashboardUrl: null,
|
|
4271
5735
|
defaultBranch: null,
|
|
4272
5736
|
laneId: null,
|
|
4273
5737
|
branchName: null,
|
|
@@ -4312,6 +5776,7 @@ function createBaseStatus() {
|
|
|
4312
5776
|
queuedJobCount: 0,
|
|
4313
5777
|
processingJobCount: 0,
|
|
4314
5778
|
retryScheduledJobCount: 0,
|
|
5779
|
+
awaitingUsageJobCount: 0,
|
|
4315
5780
|
failedJobCount: 0,
|
|
4316
5781
|
oldestCapturedAt: null,
|
|
4317
5782
|
newestCapturedAt: null,
|
|
@@ -4394,6 +5859,10 @@ async function collabStatus(params) {
|
|
|
4394
5859
|
threadId: detected.binding.threadId,
|
|
4395
5860
|
repoFingerprint: detected.binding.repoFingerprint,
|
|
4396
5861
|
remoteUrl: detected.binding.remoteUrl,
|
|
5862
|
+
// Dashboard URL is derived (not stored on the binding) — compute it
|
|
5863
|
+
// here so every status caller, including MCP/agent tools, gets it
|
|
5864
|
+
// alongside `remoteUrl` without having to know the URL convention.
|
|
5865
|
+
dashboardUrl: detected.binding.currentAppId ? buildDashboardAppUrl(detected.binding.currentAppId) : null,
|
|
4397
5866
|
defaultBranch: detected.binding.defaultBranch,
|
|
4398
5867
|
laneId: detected.binding.laneId,
|
|
4399
5868
|
branchName: detected.binding.branchName,
|
|
@@ -4633,16 +6102,534 @@ async function collabView(params) {
|
|
|
4633
6102
|
const resp = await params.api.getMergeRequestReview(params.mrId);
|
|
4634
6103
|
return unwrapMergeRequestReview(resp);
|
|
4635
6104
|
}
|
|
6105
|
+
|
|
6106
|
+
// src/application/collab/collabAsyncProcessing.ts
|
|
6107
|
+
import fs13 from "fs/promises";
|
|
6108
|
+
import path12 from "path";
|
|
6109
|
+
var MAX_TRANSIENT_RETRIES = 5;
|
|
6110
|
+
var TRANSIENT_NETWORK_CODES = /* @__PURE__ */ new Set([
|
|
6111
|
+
"ECONNREFUSED",
|
|
6112
|
+
"ECONNRESET",
|
|
6113
|
+
"ETIMEDOUT",
|
|
6114
|
+
"EAI_AGAIN",
|
|
6115
|
+
"ENOTFOUND",
|
|
6116
|
+
"ENETUNREACH",
|
|
6117
|
+
"EHOSTUNREACH",
|
|
6118
|
+
"EPIPE",
|
|
6119
|
+
"UND_ERR_SOCKET",
|
|
6120
|
+
"UND_ERR_CONNECT_TIMEOUT",
|
|
6121
|
+
"UND_ERR_HEADERS_TIMEOUT"
|
|
6122
|
+
]);
|
|
6123
|
+
function formatError(error) {
|
|
6124
|
+
if (error instanceof RemixError) {
|
|
6125
|
+
return [error.message, error.hint].filter(Boolean).join("\n\n") || error.message || "Unknown error.";
|
|
6126
|
+
}
|
|
6127
|
+
if (error instanceof Error) {
|
|
6128
|
+
return error.message || "Unknown error.";
|
|
6129
|
+
}
|
|
6130
|
+
if (typeof error === "string") return error;
|
|
6131
|
+
try {
|
|
6132
|
+
return JSON.stringify(error);
|
|
6133
|
+
} catch {
|
|
6134
|
+
return "Unknown error.";
|
|
6135
|
+
}
|
|
6136
|
+
}
|
|
6137
|
+
function readNumberCandidate(error, key) {
|
|
6138
|
+
if (!error || typeof error !== "object") return null;
|
|
6139
|
+
const value = error[key];
|
|
6140
|
+
if (typeof value === "number" && Number.isFinite(value)) return value;
|
|
6141
|
+
return null;
|
|
6142
|
+
}
|
|
6143
|
+
function readStringCandidate(error, key) {
|
|
6144
|
+
if (!error || typeof error !== "object") return null;
|
|
6145
|
+
const value = error[key];
|
|
6146
|
+
if (typeof value === "string" && value.length > 0) return value;
|
|
6147
|
+
return null;
|
|
6148
|
+
}
|
|
6149
|
+
function classifyError(error) {
|
|
6150
|
+
if (error instanceof RemixError) return "jobSpecific";
|
|
6151
|
+
const candidates = [error];
|
|
6152
|
+
if (error && typeof error === "object" && "cause" in error) {
|
|
6153
|
+
candidates.push(error.cause);
|
|
6154
|
+
}
|
|
6155
|
+
for (const candidate of candidates) {
|
|
6156
|
+
const status = readNumberCandidate(candidate, "status") ?? readNumberCandidate(candidate, "statusCode");
|
|
6157
|
+
if (status !== null) {
|
|
6158
|
+
if (status === 401 || status === 403) return "transient";
|
|
6159
|
+
if (status >= 500 && status < 600) return "transient";
|
|
6160
|
+
if (status >= 400 && status < 500) return "jobSpecific";
|
|
6161
|
+
}
|
|
6162
|
+
const code = readStringCandidate(candidate, "code");
|
|
6163
|
+
if (code && TRANSIENT_NETWORK_CODES.has(code)) return "transient";
|
|
6164
|
+
if (code === "ENOENT" || code === "EACCES") return "jobSpecific";
|
|
6165
|
+
const name = readStringCandidate(candidate, "name");
|
|
6166
|
+
if (name === "AbortError" || name === "TimeoutError") return "transient";
|
|
6167
|
+
if (name === "SyntaxError") return "jobSpecific";
|
|
6168
|
+
}
|
|
6169
|
+
return "jobSpecific";
|
|
6170
|
+
}
|
|
6171
|
+
async function handleProcessError(job, error) {
|
|
6172
|
+
const message = formatError(error);
|
|
6173
|
+
const kind = classifyError(error);
|
|
6174
|
+
if (kind === "transient") {
|
|
6175
|
+
const nextRetry = (job.retryCount ?? 0) + 1;
|
|
6176
|
+
if (nextRetry < MAX_TRANSIENT_RETRIES) {
|
|
6177
|
+
await updateAsyncJob(job.id, {
|
|
6178
|
+
status: "queued",
|
|
6179
|
+
retryCount: nextRetry,
|
|
6180
|
+
error: message
|
|
6181
|
+
});
|
|
6182
|
+
await logDrainerEvent(job.id, "retrying", {
|
|
6183
|
+
kind: job.kind,
|
|
6184
|
+
retryCount: nextRetry,
|
|
6185
|
+
error: message
|
|
6186
|
+
});
|
|
6187
|
+
return { status: "retry_scheduled", jobId: job.id, error: message, retryCount: nextRetry };
|
|
6188
|
+
}
|
|
6189
|
+
const finalMessage = `exceeded retry limit (${MAX_TRANSIENT_RETRIES} attempts). Last error: ${message}`;
|
|
6190
|
+
await updateAsyncJob(job.id, { status: "failed", error: finalMessage, retryCount: nextRetry });
|
|
6191
|
+
await logDrainerEvent(job.id, "failed", { kind: job.kind, error: finalMessage, retryCount: nextRetry });
|
|
6192
|
+
return { status: "failed", jobId: job.id, error: finalMessage };
|
|
6193
|
+
}
|
|
6194
|
+
await updateAsyncJob(job.id, { status: "failed", error: message });
|
|
6195
|
+
await logDrainerEvent(job.id, "failed", { kind: job.kind, error: message });
|
|
6196
|
+
return { status: "failed", jobId: job.id, error: message };
|
|
6197
|
+
}
|
|
6198
|
+
async function processInitJob(job, api) {
|
|
6199
|
+
if (job.kind !== "init") return { status: "skipped", jobId: job.id };
|
|
6200
|
+
const startedAt = Date.now();
|
|
6201
|
+
try {
|
|
6202
|
+
await updateAsyncJob(job.id, { status: "submitting", error: null });
|
|
6203
|
+
await logDrainerEvent(job.id, "claimed", { kind: "init" });
|
|
6204
|
+
const bundleStat = await fs13.stat(job.payload.bundlePath);
|
|
6205
|
+
const presignResp = await api.presignImportUploadFirstParty({
|
|
6206
|
+
file: {
|
|
6207
|
+
name: path12.basename(job.payload.bundlePath),
|
|
6208
|
+
mimeType: "application/x-git-bundle",
|
|
6209
|
+
size: bundleStat.size,
|
|
6210
|
+
checksumSha256: job.payload.bundleSha256
|
|
6211
|
+
}
|
|
6212
|
+
});
|
|
6213
|
+
const presign = unwrapResponseObject(presignResp, "upload");
|
|
6214
|
+
await updateAsyncJob(job.id, { status: "uploading" });
|
|
6215
|
+
await logDrainerEvent(job.id, "uploading", { kind: "init" });
|
|
6216
|
+
await uploadPresigned({
|
|
6217
|
+
uploadUrl: String(presign.uploadUrl),
|
|
6218
|
+
headers: presign.headers ?? {},
|
|
6219
|
+
filePath: job.payload.bundlePath
|
|
6220
|
+
});
|
|
6221
|
+
await updateAsyncJob(job.id, { status: "server_processing" });
|
|
6222
|
+
const importResp = await api.importFromUploadFirstParty({
|
|
6223
|
+
uploadId: String(presign.uploadId),
|
|
6224
|
+
appName: job.payload.appName?.trim() || path12.basename(job.repoRoot),
|
|
6225
|
+
platform: "generic",
|
|
6226
|
+
isPublic: false,
|
|
6227
|
+
branch: job.payload.defaultBranch && job.branchName && job.branchName !== job.payload.defaultBranch ? job.payload.defaultBranch : job.branchName ?? void 0,
|
|
6228
|
+
remoteUrl: job.payload.remoteUrl ?? void 0,
|
|
6229
|
+
defaultBranch: job.payload.defaultBranch ?? void 0,
|
|
6230
|
+
repoFingerprint: job.repoFingerprint ?? void 0,
|
|
6231
|
+
headCommitHash: job.payload.headCommitHash
|
|
6232
|
+
});
|
|
6233
|
+
const imported = unwrapResponseObject(importResp, "import");
|
|
6234
|
+
await logDrainerEvent(job.id, "server_accepted", { kind: "init", appId: String(imported.appId ?? "") });
|
|
6235
|
+
const app = await pollAppReady(api, String(imported.appId));
|
|
6236
|
+
let boundProjectId = String(app.projectId);
|
|
6237
|
+
let boundCurrentAppId = String(app.id);
|
|
6238
|
+
let boundUpstreamAppId = String(app.id);
|
|
6239
|
+
let boundThreadId = app.threadId ? String(app.threadId) : null;
|
|
6240
|
+
let boundLaneId = null;
|
|
6241
|
+
let canonicalLane = null;
|
|
6242
|
+
const branchName = job.branchName;
|
|
6243
|
+
const defaultBranch = job.payload.defaultBranch;
|
|
6244
|
+
const repoRoot = job.repoRoot;
|
|
6245
|
+
const repoFingerprint = job.repoFingerprint;
|
|
6246
|
+
const remoteUrl = job.payload.remoteUrl;
|
|
6247
|
+
const headCommitHash = job.payload.headCommitHash;
|
|
6248
|
+
if (branchName) {
|
|
6249
|
+
if (defaultBranch && branchName !== defaultBranch) {
|
|
6250
|
+
canonicalLane = await resolveOrEnsureLaneBinding({
|
|
6251
|
+
api,
|
|
6252
|
+
projectId: boundProjectId,
|
|
6253
|
+
repoFingerprint: repoFingerprint ?? "",
|
|
6254
|
+
remoteUrl,
|
|
6255
|
+
defaultBranch,
|
|
6256
|
+
branchName: defaultBranch,
|
|
6257
|
+
seedAppId: String(app.id),
|
|
6258
|
+
operation: "`remix collab init` (background)"
|
|
6259
|
+
});
|
|
6260
|
+
const lane = await resolveOrEnsureLaneBinding({
|
|
6261
|
+
api,
|
|
6262
|
+
projectId: canonicalLane.projectId ?? boundProjectId,
|
|
6263
|
+
repoFingerprint: repoFingerprint ?? "",
|
|
6264
|
+
remoteUrl,
|
|
6265
|
+
defaultBranch,
|
|
6266
|
+
branchName,
|
|
6267
|
+
seedHeadCommitHash: headCommitHash,
|
|
6268
|
+
operation: "`remix collab init` (background)"
|
|
6269
|
+
});
|
|
6270
|
+
boundProjectId = lane.projectId ?? boundProjectId;
|
|
6271
|
+
boundCurrentAppId = lane.currentAppId ?? boundCurrentAppId;
|
|
6272
|
+
boundUpstreamAppId = lane.upstreamAppId ?? boundUpstreamAppId;
|
|
6273
|
+
boundThreadId = lane.threadId ?? boundThreadId;
|
|
6274
|
+
boundLaneId = lane.laneId ?? null;
|
|
6275
|
+
} else {
|
|
6276
|
+
const lane = await resolveOrEnsureLaneBinding({
|
|
6277
|
+
api,
|
|
6278
|
+
projectId: boundProjectId,
|
|
6279
|
+
repoFingerprint: repoFingerprint ?? "",
|
|
6280
|
+
remoteUrl,
|
|
6281
|
+
defaultBranch,
|
|
6282
|
+
branchName,
|
|
6283
|
+
seedAppId: String(app.id),
|
|
6284
|
+
operation: "`remix collab init` (background)"
|
|
6285
|
+
});
|
|
6286
|
+
canonicalLane = lane;
|
|
6287
|
+
boundProjectId = lane.projectId ?? boundProjectId;
|
|
6288
|
+
boundCurrentAppId = lane.currentAppId ?? boundCurrentAppId;
|
|
6289
|
+
boundUpstreamAppId = lane.upstreamAppId ?? boundUpstreamAppId;
|
|
6290
|
+
boundThreadId = lane.threadId ?? boundThreadId;
|
|
6291
|
+
boundLaneId = lane.laneId ?? null;
|
|
6292
|
+
}
|
|
6293
|
+
}
|
|
6294
|
+
if (boundCurrentAppId) {
|
|
6295
|
+
const readyApp = await pollAppReady(api, boundCurrentAppId);
|
|
6296
|
+
boundProjectId = String(readyApp.projectId ?? boundProjectId);
|
|
6297
|
+
boundThreadId = readyApp.threadId ? String(readyApp.threadId) : boundThreadId;
|
|
6298
|
+
}
|
|
6299
|
+
const bindingMode = job.payload.forceNew && (!defaultBranch || branchName === defaultBranch) ? "explicit_root" : "lane";
|
|
6300
|
+
if (job.payload.forceNew && defaultBranch && canonicalLane) {
|
|
6301
|
+
const canonicalBinding = branchBindingFromLane(canonicalLane, "explicit_root", {
|
|
6302
|
+
projectId: canonicalLane.projectId ?? boundProjectId,
|
|
6303
|
+
currentAppId: canonicalLane.currentAppId ?? boundCurrentAppId,
|
|
6304
|
+
upstreamAppId: canonicalLane.upstreamAppId ?? canonicalLane.currentAppId ?? boundCurrentAppId,
|
|
6305
|
+
threadId: canonicalLane.threadId ?? boundThreadId
|
|
6306
|
+
});
|
|
6307
|
+
const branchBindings = { [defaultBranch]: canonicalBinding };
|
|
6308
|
+
if (branchName && branchName !== defaultBranch) {
|
|
6309
|
+
branchBindings[branchName] = {
|
|
6310
|
+
projectId: boundProjectId,
|
|
6311
|
+
currentAppId: boundCurrentAppId,
|
|
6312
|
+
upstreamAppId: boundUpstreamAppId,
|
|
6313
|
+
threadId: boundThreadId,
|
|
6314
|
+
laneId: boundLaneId,
|
|
6315
|
+
bindingMode: "lane"
|
|
6316
|
+
};
|
|
6317
|
+
}
|
|
6318
|
+
await writeCollabBindingSnapshot({
|
|
6319
|
+
repoRoot,
|
|
6320
|
+
repoFingerprint: repoFingerprint ?? "",
|
|
6321
|
+
remoteUrl,
|
|
6322
|
+
defaultBranch,
|
|
6323
|
+
branchBindings,
|
|
6324
|
+
explicitRootBinding: canonicalBinding
|
|
6325
|
+
});
|
|
6326
|
+
} else {
|
|
6327
|
+
if (canonicalLane && defaultBranch && branchName && branchName !== defaultBranch) {
|
|
6328
|
+
await writeCollabBinding(repoRoot, {
|
|
6329
|
+
projectId: canonicalLane.projectId ?? null,
|
|
6330
|
+
currentAppId: canonicalLane.currentAppId ?? boundCurrentAppId,
|
|
6331
|
+
upstreamAppId: canonicalLane.upstreamAppId ?? canonicalLane.currentAppId ?? boundCurrentAppId,
|
|
6332
|
+
threadId: canonicalLane.threadId ?? null,
|
|
6333
|
+
repoFingerprint: canonicalLane.repoFingerprint ?? repoFingerprint ?? "",
|
|
6334
|
+
remoteUrl: canonicalLane.remoteUrl ?? remoteUrl,
|
|
6335
|
+
defaultBranch: canonicalLane.defaultBranch ?? defaultBranch,
|
|
6336
|
+
laneId: canonicalLane.laneId ?? null,
|
|
6337
|
+
branchName: defaultBranch,
|
|
6338
|
+
bindingMode: job.payload.forceNew ? "explicit_root" : "lane"
|
|
6339
|
+
});
|
|
6340
|
+
}
|
|
6341
|
+
await writeCollabBinding(repoRoot, {
|
|
6342
|
+
projectId: boundProjectId,
|
|
6343
|
+
currentAppId: boundCurrentAppId,
|
|
6344
|
+
upstreamAppId: boundUpstreamAppId,
|
|
6345
|
+
threadId: boundThreadId,
|
|
6346
|
+
repoFingerprint: repoFingerprint ?? "",
|
|
6347
|
+
remoteUrl,
|
|
6348
|
+
defaultBranch: defaultBranch ?? null,
|
|
6349
|
+
laneId: boundLaneId,
|
|
6350
|
+
branchName,
|
|
6351
|
+
bindingMode
|
|
6352
|
+
});
|
|
6353
|
+
}
|
|
6354
|
+
await seedImportedInitBaseline({
|
|
6355
|
+
api,
|
|
6356
|
+
repoRoot,
|
|
6357
|
+
repoFingerprint: repoFingerprint ?? "",
|
|
6358
|
+
laneId: boundLaneId,
|
|
6359
|
+
currentAppId: boundCurrentAppId,
|
|
6360
|
+
branchName
|
|
6361
|
+
});
|
|
6362
|
+
await logDrainerEvent(job.id, "server_completed", {
|
|
6363
|
+
kind: "init",
|
|
6364
|
+
appId: boundCurrentAppId,
|
|
6365
|
+
dashboardUrl: buildDashboardAppUrl(boundCurrentAppId),
|
|
6366
|
+
durationMs: Date.now() - startedAt
|
|
6367
|
+
});
|
|
6368
|
+
await updateAsyncJob(job.id, { status: "completed", error: null });
|
|
6369
|
+
await logDrainerEvent(job.id, "completed", { kind: "init", durationMs: Date.now() - startedAt });
|
|
6370
|
+
return { status: "completed", jobId: job.id };
|
|
6371
|
+
} catch (error) {
|
|
6372
|
+
return await handleProcessError(job, error);
|
|
6373
|
+
}
|
|
6374
|
+
}
|
|
6375
|
+
async function processInitPostJob(job, api) {
|
|
6376
|
+
if (job.kind !== "init_post") return { status: "skipped", jobId: job.id };
|
|
6377
|
+
const startedAt = Date.now();
|
|
6378
|
+
const syncPhaseMs = Math.max(0, job.payload.syncPhaseFinishedAt - job.payload.syncPhaseStartedAt);
|
|
6379
|
+
try {
|
|
6380
|
+
await updateAsyncJob(job.id, { status: "server_processing", error: null });
|
|
6381
|
+
await logDrainerEvent(job.id, "claimed", {
|
|
6382
|
+
kind: "init_post",
|
|
6383
|
+
appId: job.payload.appId,
|
|
6384
|
+
syncPhaseMs
|
|
6385
|
+
});
|
|
6386
|
+
await pollAppReady(api, job.payload.appId);
|
|
6387
|
+
await seedImportedInitBaseline({
|
|
6388
|
+
api,
|
|
6389
|
+
repoRoot: job.repoRoot,
|
|
6390
|
+
repoFingerprint: job.repoFingerprint ?? "",
|
|
6391
|
+
laneId: job.laneId,
|
|
6392
|
+
currentAppId: job.payload.appId,
|
|
6393
|
+
branchName: job.branchName
|
|
6394
|
+
});
|
|
6395
|
+
const asyncPhaseMs = Date.now() - startedAt;
|
|
6396
|
+
await updateAsyncJob(job.id, { status: "completed", error: null });
|
|
6397
|
+
await logDrainerEvent(job.id, "completed", {
|
|
6398
|
+
kind: "init_post",
|
|
6399
|
+
appId: job.payload.appId,
|
|
6400
|
+
durationMs: asyncPhaseMs,
|
|
6401
|
+
// Top-level metrics requested in the rollout PR description so the
|
|
6402
|
+
// sync vs async timing breakdown is grep-able in one place.
|
|
6403
|
+
["init.sync_phase_ms"]: syncPhaseMs,
|
|
6404
|
+
["init.async_phase_ms"]: asyncPhaseMs,
|
|
6405
|
+
["init.total_ms"]: syncPhaseMs + asyncPhaseMs
|
|
6406
|
+
});
|
|
6407
|
+
return { status: "completed", jobId: job.id };
|
|
6408
|
+
} catch (error) {
|
|
6409
|
+
const outcome = await handleProcessError(job, error);
|
|
6410
|
+
if (outcome.status === "failed") {
|
|
6411
|
+
const bindingPath = getCollabBindingPath(job.repoRoot);
|
|
6412
|
+
try {
|
|
6413
|
+
await fs13.unlink(bindingPath);
|
|
6414
|
+
await logDrainerEvent(job.id, "binding_cleared", {
|
|
6415
|
+
kind: "init_post",
|
|
6416
|
+
appId: job.payload.appId,
|
|
6417
|
+
repoRoot: job.repoRoot,
|
|
6418
|
+
bindingPath,
|
|
6419
|
+
reason: "init_post_terminal_failure"
|
|
6420
|
+
});
|
|
6421
|
+
} catch (cleanupError) {
|
|
6422
|
+
await logDrainerEvent(job.id, "binding_cleared", {
|
|
6423
|
+
kind: "init_post",
|
|
6424
|
+
appId: job.payload.appId,
|
|
6425
|
+
repoRoot: job.repoRoot,
|
|
6426
|
+
bindingPath,
|
|
6427
|
+
reason: "init_post_terminal_failure",
|
|
6428
|
+
cleanupError: formatError(cleanupError)
|
|
6429
|
+
});
|
|
6430
|
+
}
|
|
6431
|
+
}
|
|
6432
|
+
return outcome;
|
|
6433
|
+
}
|
|
6434
|
+
}
|
|
6435
|
+
async function processReAnchorJob(job, api) {
|
|
6436
|
+
if (job.kind !== "re_anchor") return { status: "skipped", jobId: job.id };
|
|
6437
|
+
const startedAt = Date.now();
|
|
6438
|
+
try {
|
|
6439
|
+
await updateAsyncJob(job.id, { status: "submitting", error: null });
|
|
6440
|
+
await logDrainerEvent(job.id, "claimed", { kind: "re_anchor" });
|
|
6441
|
+
const preflightResp = await api.preflightAppReconcile(job.payload.appId, {
|
|
6442
|
+
localHeadCommitHash: job.payload.localHeadCommitHash,
|
|
6443
|
+
repoFingerprint: job.repoFingerprint ?? void 0
|
|
6444
|
+
});
|
|
6445
|
+
const preflight = unwrapResponseObject(preflightResp, "reconcile preflight");
|
|
6446
|
+
if (preflight.status === "metadata_conflict") {
|
|
6447
|
+
throw new RemixError("Local repository metadata conflicts with the bound Remix app.", {
|
|
6448
|
+
exitCode: 2,
|
|
6449
|
+
hint: preflight.warnings.join("\n") || null
|
|
6450
|
+
});
|
|
6451
|
+
}
|
|
6452
|
+
let anchoredServerHeadHash = preflight.targetHeadCommitHash;
|
|
6453
|
+
if (preflight.status === "ready_to_reconcile") {
|
|
6454
|
+
const bundleStat = await fs13.stat(job.payload.bundlePath);
|
|
6455
|
+
const presignResp = await api.presignImportUploadFirstParty({
|
|
6456
|
+
file: {
|
|
6457
|
+
name: path12.basename(job.payload.bundlePath),
|
|
6458
|
+
mimeType: "application/x-git-bundle",
|
|
6459
|
+
size: bundleStat.size,
|
|
6460
|
+
checksumSha256: job.payload.bundleSha256
|
|
6461
|
+
}
|
|
6462
|
+
});
|
|
6463
|
+
const uploadTarget = unwrapResponseObject(presignResp, "import upload target");
|
|
6464
|
+
await updateAsyncJob(job.id, { status: "uploading" });
|
|
6465
|
+
await logDrainerEvent(job.id, "uploading", { kind: "re_anchor" });
|
|
6466
|
+
await uploadPresigned({
|
|
6467
|
+
uploadUrl: String(uploadTarget.uploadUrl),
|
|
6468
|
+
filePath: job.payload.bundlePath,
|
|
6469
|
+
headers: uploadTarget.headers ?? {}
|
|
6470
|
+
});
|
|
6471
|
+
await updateAsyncJob(job.id, { status: "server_processing" });
|
|
6472
|
+
const startResp = await api.startAppReconcile(job.payload.appId, {
|
|
6473
|
+
uploadId: String(uploadTarget.uploadId),
|
|
6474
|
+
localHeadCommitHash: job.payload.localHeadCommitHash,
|
|
6475
|
+
repoFingerprint: job.repoFingerprint ?? void 0,
|
|
6476
|
+
idempotencyKey: buildDeterministicIdempotencyKey({
|
|
6477
|
+
kind: "collab_re_anchor_v1",
|
|
6478
|
+
appId: job.payload.appId,
|
|
6479
|
+
localHeadCommitHash: job.payload.localHeadCommitHash,
|
|
6480
|
+
targetHeadCommitHash: job.payload.targetHeadCommitHash
|
|
6481
|
+
})
|
|
6482
|
+
});
|
|
6483
|
+
const started = unwrapResponseObject(startResp, "reconcile");
|
|
6484
|
+
await logDrainerEvent(job.id, "server_accepted", { kind: "re_anchor", appId: job.payload.appId });
|
|
6485
|
+
const reconcile = await pollReconcile(api, job.payload.appId, started.id);
|
|
6486
|
+
anchoredServerHeadHash = reconcile.reconciledHeadCommitHash ?? reconcile.targetHeadCommitHash ?? preflight.targetHeadCommitHash;
|
|
6487
|
+
}
|
|
6488
|
+
const snapshot = await captureLocalSnapshot({
|
|
6489
|
+
repoRoot: job.repoRoot,
|
|
6490
|
+
repoFingerprint: job.repoFingerprint,
|
|
6491
|
+
laneId: job.laneId,
|
|
6492
|
+
branchName: job.branchName
|
|
6493
|
+
});
|
|
6494
|
+
await writeLocalBaseline({
|
|
6495
|
+
repoRoot: job.repoRoot,
|
|
6496
|
+
repoFingerprint: job.repoFingerprint,
|
|
6497
|
+
laneId: job.laneId,
|
|
6498
|
+
currentAppId: job.payload.appId,
|
|
6499
|
+
branchName: job.branchName,
|
|
6500
|
+
lastSnapshotId: snapshot.id,
|
|
6501
|
+
lastSnapshotHash: snapshot.snapshotHash,
|
|
6502
|
+
lastServerHeadHash: anchoredServerHeadHash,
|
|
6503
|
+
lastSeenLocalCommitHash: snapshot.localCommitHash
|
|
6504
|
+
});
|
|
6505
|
+
await logDrainerEvent(job.id, "server_completed", {
|
|
6506
|
+
kind: "re_anchor",
|
|
6507
|
+
appId: job.payload.appId,
|
|
6508
|
+
durationMs: Date.now() - startedAt
|
|
6509
|
+
});
|
|
6510
|
+
await updateAsyncJob(job.id, { status: "completed", error: null });
|
|
6511
|
+
await logDrainerEvent(job.id, "completed", { kind: "re_anchor", durationMs: Date.now() - startedAt });
|
|
6512
|
+
return { status: "completed", jobId: job.id };
|
|
6513
|
+
} catch (error) {
|
|
6514
|
+
return await handleProcessError(job, error);
|
|
6515
|
+
}
|
|
6516
|
+
}
|
|
6517
|
+
async function collabInitProcess(jobId, opts) {
|
|
6518
|
+
const claim = await claimAsyncJob(jobId);
|
|
6519
|
+
if (!claim) {
|
|
6520
|
+
const job = await readAsyncJob(jobId);
|
|
6521
|
+
if (!job) return { status: "skipped", jobId };
|
|
6522
|
+
return { status: "skipped", jobId };
|
|
6523
|
+
}
|
|
6524
|
+
try {
|
|
6525
|
+
if (claim.job.kind !== "init") return { status: "skipped", jobId };
|
|
6526
|
+
return await processInitJob(claim.job, opts.api);
|
|
6527
|
+
} finally {
|
|
6528
|
+
await claim.release();
|
|
6529
|
+
}
|
|
6530
|
+
}
|
|
6531
|
+
async function collabInitPostProcess(jobId, opts) {
|
|
6532
|
+
const claim = await claimAsyncJob(jobId);
|
|
6533
|
+
if (!claim) {
|
|
6534
|
+
const job = await readAsyncJob(jobId);
|
|
6535
|
+
if (!job) return { status: "skipped", jobId };
|
|
6536
|
+
return { status: "skipped", jobId };
|
|
6537
|
+
}
|
|
6538
|
+
try {
|
|
6539
|
+
if (claim.job.kind !== "init_post") return { status: "skipped", jobId };
|
|
6540
|
+
return await processInitPostJob(claim.job, opts.api);
|
|
6541
|
+
} finally {
|
|
6542
|
+
await claim.release();
|
|
6543
|
+
}
|
|
6544
|
+
}
|
|
6545
|
+
async function collabReAnchorProcess(jobId, opts) {
|
|
6546
|
+
const claim = await claimAsyncJob(jobId);
|
|
6547
|
+
if (!claim) {
|
|
6548
|
+
const job = await readAsyncJob(jobId);
|
|
6549
|
+
if (!job) return { status: "skipped", jobId };
|
|
6550
|
+
return { status: "skipped", jobId };
|
|
6551
|
+
}
|
|
6552
|
+
try {
|
|
6553
|
+
if (claim.job.kind !== "re_anchor") return { status: "skipped", jobId };
|
|
6554
|
+
return await processReAnchorJob(claim.job, opts.api);
|
|
6555
|
+
} finally {
|
|
6556
|
+
await claim.release();
|
|
6557
|
+
}
|
|
6558
|
+
}
|
|
6559
|
+
async function acquireDrainerPidLock() {
|
|
6560
|
+
const pidPath = getDrainerPidPath();
|
|
6561
|
+
await fs13.mkdir(path12.dirname(pidPath), { recursive: true });
|
|
6562
|
+
try {
|
|
6563
|
+
const existing = await fs13.readFile(pidPath, "utf8").catch(() => "");
|
|
6564
|
+
const existingPid = parseInt(existing.trim(), 10);
|
|
6565
|
+
if (Number.isFinite(existingPid) && existingPid > 0 && existingPid !== process.pid) {
|
|
6566
|
+
try {
|
|
6567
|
+
process.kill(existingPid, 0);
|
|
6568
|
+
return null;
|
|
6569
|
+
} catch (error) {
|
|
6570
|
+
if (error?.code !== "ESRCH") return null;
|
|
6571
|
+
}
|
|
6572
|
+
}
|
|
6573
|
+
await fs13.writeFile(pidPath, String(process.pid), "utf8");
|
|
6574
|
+
return {
|
|
6575
|
+
release: async () => {
|
|
6576
|
+
try {
|
|
6577
|
+
const current = (await fs13.readFile(pidPath, "utf8")).trim();
|
|
6578
|
+
if (current === String(process.pid)) {
|
|
6579
|
+
await fs13.unlink(pidPath).catch(() => void 0);
|
|
6580
|
+
}
|
|
6581
|
+
} catch {
|
|
6582
|
+
}
|
|
6583
|
+
}
|
|
6584
|
+
};
|
|
6585
|
+
} catch {
|
|
6586
|
+
return null;
|
|
6587
|
+
}
|
|
6588
|
+
}
|
|
6589
|
+
async function drainAsyncJobs(opts) {
|
|
6590
|
+
const lockHandle = opts.respectPidLock === false ? null : await acquireDrainerPidLock();
|
|
6591
|
+
if (opts.respectPidLock !== false && !lockHandle) return [];
|
|
6592
|
+
const outcomes = [];
|
|
6593
|
+
try {
|
|
6594
|
+
await pruneTerminalAsyncJobs().catch(() => void 0);
|
|
6595
|
+
const jobs = await listAsyncJobs();
|
|
6596
|
+
const drainable = jobs.filter(
|
|
6597
|
+
(job) => job.status !== "completed" && job.status !== "failed"
|
|
6598
|
+
);
|
|
6599
|
+
for (const job of drainable) {
|
|
6600
|
+
let outcome;
|
|
6601
|
+
if (job.kind === "init") {
|
|
6602
|
+
outcome = await collabInitProcess(job.id, { api: opts.api });
|
|
6603
|
+
} else if (job.kind === "init_post") {
|
|
6604
|
+
outcome = await collabInitPostProcess(job.id, { api: opts.api });
|
|
6605
|
+
} else {
|
|
6606
|
+
outcome = await collabReAnchorProcess(job.id, { api: opts.api });
|
|
6607
|
+
}
|
|
6608
|
+
outcomes.push(outcome);
|
|
6609
|
+
}
|
|
6610
|
+
} finally {
|
|
6611
|
+
if (lockHandle) await lockHandle.release();
|
|
6612
|
+
}
|
|
6613
|
+
return outcomes;
|
|
6614
|
+
}
|
|
4636
6615
|
export {
|
|
6616
|
+
FINALIZE_JOB_LOCK_STALE_MS,
|
|
6617
|
+
FINALIZE_PREFLIGHT_FAILURE_CODES,
|
|
6618
|
+
awaitAsyncJob,
|
|
6619
|
+
cleanStaleFinalizeJobLocks,
|
|
4637
6620
|
collabApprove,
|
|
4638
6621
|
collabCheckout,
|
|
4639
6622
|
collabFinalizeTurn,
|
|
4640
6623
|
collabInit,
|
|
6624
|
+
collabInitProcess,
|
|
6625
|
+
collabInitSubmit,
|
|
4641
6626
|
collabInvite,
|
|
4642
6627
|
collabList,
|
|
4643
6628
|
collabListMembers,
|
|
4644
6629
|
collabListMergeRequests,
|
|
4645
6630
|
collabReAnchor,
|
|
6631
|
+
collabReAnchorProcess,
|
|
6632
|
+
collabReAnchorSubmit,
|
|
4646
6633
|
collabReconcile,
|
|
4647
6634
|
collabRecordingPreflight,
|
|
4648
6635
|
collabReject,
|
|
@@ -4653,8 +6640,26 @@ export {
|
|
|
4653
6640
|
collabSyncUpstream,
|
|
4654
6641
|
collabUpdateMemberRole,
|
|
4655
6642
|
collabView,
|
|
6643
|
+
deleteAsyncJob,
|
|
6644
|
+
drainAsyncJobs,
|
|
4656
6645
|
drainPendingFinalizeQueue,
|
|
6646
|
+
drainerLogPath,
|
|
6647
|
+
findFailedAsyncJob,
|
|
6648
|
+
findPendingAsyncJob,
|
|
6649
|
+
forgetPendingFinalizeJob,
|
|
6650
|
+
getDrainerLogPath,
|
|
6651
|
+
getDrainerPidPath,
|
|
4657
6652
|
getMemberRolesForScope,
|
|
6653
|
+
isFinalizePreflightFailureCode,
|
|
6654
|
+
listAsyncJobs,
|
|
6655
|
+
listAsyncJobsForRepo,
|
|
6656
|
+
listPendingFinalizeJobs,
|
|
4658
6657
|
processPendingFinalizeJob,
|
|
6658
|
+
pruneTerminalAsyncJobs,
|
|
6659
|
+
readAsyncJob,
|
|
6660
|
+
readPendingFinalizeJob,
|
|
6661
|
+
requeuePendingFinalizeJob,
|
|
6662
|
+
summarizeAsyncJobs,
|
|
6663
|
+
updatePendingFinalizeJob,
|
|
4659
6664
|
validateMemberRole
|
|
4660
6665
|
};
|