@remixhq/claude-plugin 0.1.21 → 0.1.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +1 -1
- package/agents/remix-collab.md +1 -1
- package/dist/hook-post-collab.cjs +6 -6
- package/dist/hook-post-collab.cjs.map +1 -1
- package/dist/hook-pre-git.cjs +2 -2
- package/dist/hook-pre-git.cjs.map +1 -1
- package/dist/hook-stop-collab.cjs +2252 -858
- package/dist/hook-stop-collab.cjs.map +1 -1
- package/dist/hook-user-prompt.cjs +28588 -312
- package/dist/hook-user-prompt.cjs.map +1 -1
- package/dist/index.js +4 -4
- package/dist/index.js.map +1 -1
- package/dist/mcp-server.cjs +386 -493
- package/dist/mcp-server.cjs.map +1 -1
- package/package.json +4 -4
- package/skills/init-or-remix/SKILL.md +5 -3
- package/skills/safe-collab-workflow/SKILL.md +15 -8
- package/skills/submit-change-step/SKILL.md +13 -11
- package/skills/sync-and-reconcile/SKILL.md +1 -1
|
@@ -37,8 +37,8 @@ var require_windows = __commonJS({
|
|
|
37
37
|
"use strict";
|
|
38
38
|
module2.exports = isexe;
|
|
39
39
|
isexe.sync = sync;
|
|
40
|
-
var
|
|
41
|
-
function checkPathExt(
|
|
40
|
+
var fs13 = require("fs");
|
|
41
|
+
function checkPathExt(path16, options) {
|
|
42
42
|
var pathext = options.pathExt !== void 0 ? options.pathExt : process.env.PATHEXT;
|
|
43
43
|
if (!pathext) {
|
|
44
44
|
return true;
|
|
@@ -49,25 +49,25 @@ var require_windows = __commonJS({
|
|
|
49
49
|
}
|
|
50
50
|
for (var i2 = 0; i2 < pathext.length; i2++) {
|
|
51
51
|
var p = pathext[i2].toLowerCase();
|
|
52
|
-
if (p &&
|
|
52
|
+
if (p && path16.substr(-p.length).toLowerCase() === p) {
|
|
53
53
|
return true;
|
|
54
54
|
}
|
|
55
55
|
}
|
|
56
56
|
return false;
|
|
57
57
|
}
|
|
58
|
-
function checkStat(stat,
|
|
58
|
+
function checkStat(stat, path16, options) {
|
|
59
59
|
if (!stat.isSymbolicLink() && !stat.isFile()) {
|
|
60
60
|
return false;
|
|
61
61
|
}
|
|
62
|
-
return checkPathExt(
|
|
62
|
+
return checkPathExt(path16, options);
|
|
63
63
|
}
|
|
64
|
-
function isexe(
|
|
65
|
-
|
|
66
|
-
cb(er, er ? false : checkStat(stat,
|
|
64
|
+
function isexe(path16, options, cb) {
|
|
65
|
+
fs13.stat(path16, function(er, stat) {
|
|
66
|
+
cb(er, er ? false : checkStat(stat, path16, options));
|
|
67
67
|
});
|
|
68
68
|
}
|
|
69
|
-
function sync(
|
|
70
|
-
return checkStat(
|
|
69
|
+
function sync(path16, options) {
|
|
70
|
+
return checkStat(fs13.statSync(path16), path16, options);
|
|
71
71
|
}
|
|
72
72
|
}
|
|
73
73
|
});
|
|
@@ -78,14 +78,14 @@ var require_mode = __commonJS({
|
|
|
78
78
|
"use strict";
|
|
79
79
|
module2.exports = isexe;
|
|
80
80
|
isexe.sync = sync;
|
|
81
|
-
var
|
|
82
|
-
function isexe(
|
|
83
|
-
|
|
81
|
+
var fs13 = require("fs");
|
|
82
|
+
function isexe(path16, options, cb) {
|
|
83
|
+
fs13.stat(path16, function(er, stat) {
|
|
84
84
|
cb(er, er ? false : checkStat(stat, options));
|
|
85
85
|
});
|
|
86
86
|
}
|
|
87
|
-
function sync(
|
|
88
|
-
return checkStat(
|
|
87
|
+
function sync(path16, options) {
|
|
88
|
+
return checkStat(fs13.statSync(path16), options);
|
|
89
89
|
}
|
|
90
90
|
function checkStat(stat, options) {
|
|
91
91
|
return stat.isFile() && checkMode(stat, options);
|
|
@@ -110,7 +110,7 @@ var require_mode = __commonJS({
|
|
|
110
110
|
var require_isexe = __commonJS({
|
|
111
111
|
"node_modules/isexe/index.js"(exports2, module2) {
|
|
112
112
|
"use strict";
|
|
113
|
-
var
|
|
113
|
+
var fs13 = require("fs");
|
|
114
114
|
var core;
|
|
115
115
|
if (process.platform === "win32" || global.TESTING_WINDOWS) {
|
|
116
116
|
core = require_windows();
|
|
@@ -119,7 +119,7 @@ var require_isexe = __commonJS({
|
|
|
119
119
|
}
|
|
120
120
|
module2.exports = isexe;
|
|
121
121
|
isexe.sync = sync;
|
|
122
|
-
function isexe(
|
|
122
|
+
function isexe(path16, options, cb) {
|
|
123
123
|
if (typeof options === "function") {
|
|
124
124
|
cb = options;
|
|
125
125
|
options = {};
|
|
@@ -129,7 +129,7 @@ var require_isexe = __commonJS({
|
|
|
129
129
|
throw new TypeError("callback not provided");
|
|
130
130
|
}
|
|
131
131
|
return new Promise(function(resolve, reject) {
|
|
132
|
-
isexe(
|
|
132
|
+
isexe(path16, options || {}, function(er, is) {
|
|
133
133
|
if (er) {
|
|
134
134
|
reject(er);
|
|
135
135
|
} else {
|
|
@@ -138,7 +138,7 @@ var require_isexe = __commonJS({
|
|
|
138
138
|
});
|
|
139
139
|
});
|
|
140
140
|
}
|
|
141
|
-
core(
|
|
141
|
+
core(path16, options || {}, function(er, is) {
|
|
142
142
|
if (er) {
|
|
143
143
|
if (er.code === "EACCES" || options && options.ignoreErrors) {
|
|
144
144
|
er = null;
|
|
@@ -148,9 +148,9 @@ var require_isexe = __commonJS({
|
|
|
148
148
|
cb(er, is);
|
|
149
149
|
});
|
|
150
150
|
}
|
|
151
|
-
function sync(
|
|
151
|
+
function sync(path16, options) {
|
|
152
152
|
try {
|
|
153
|
-
return core.sync(
|
|
153
|
+
return core.sync(path16, options || {});
|
|
154
154
|
} catch (er) {
|
|
155
155
|
if (options && options.ignoreErrors || er.code === "EACCES") {
|
|
156
156
|
return false;
|
|
@@ -167,7 +167,7 @@ var require_which = __commonJS({
|
|
|
167
167
|
"node_modules/which/which.js"(exports2, module2) {
|
|
168
168
|
"use strict";
|
|
169
169
|
var isWindows = process.platform === "win32" || process.env.OSTYPE === "cygwin" || process.env.OSTYPE === "msys";
|
|
170
|
-
var
|
|
170
|
+
var path16 = require("path");
|
|
171
171
|
var COLON = isWindows ? ";" : ":";
|
|
172
172
|
var isexe = require_isexe();
|
|
173
173
|
var getNotFoundError = (cmd) => Object.assign(new Error(`not found: ${cmd}`), { code: "ENOENT" });
|
|
@@ -205,7 +205,7 @@ var require_which = __commonJS({
|
|
|
205
205
|
return opt.all && found.length ? resolve(found) : reject(getNotFoundError(cmd));
|
|
206
206
|
const ppRaw = pathEnv[i2];
|
|
207
207
|
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
|
|
208
|
-
const pCmd =
|
|
208
|
+
const pCmd = path16.join(pathPart, cmd);
|
|
209
209
|
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd;
|
|
210
210
|
resolve(subStep(p, i2, 0));
|
|
211
211
|
});
|
|
@@ -232,7 +232,7 @@ var require_which = __commonJS({
|
|
|
232
232
|
for (let i2 = 0; i2 < pathEnv.length; i2++) {
|
|
233
233
|
const ppRaw = pathEnv[i2];
|
|
234
234
|
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
|
|
235
|
-
const pCmd =
|
|
235
|
+
const pCmd = path16.join(pathPart, cmd);
|
|
236
236
|
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd;
|
|
237
237
|
for (let j = 0; j < pathExt.length; j++) {
|
|
238
238
|
const cur = p + pathExt[j];
|
|
@@ -280,7 +280,7 @@ var require_path_key = __commonJS({
|
|
|
280
280
|
var require_resolveCommand = __commonJS({
|
|
281
281
|
"node_modules/cross-spawn/lib/util/resolveCommand.js"(exports2, module2) {
|
|
282
282
|
"use strict";
|
|
283
|
-
var
|
|
283
|
+
var path16 = require("path");
|
|
284
284
|
var which = require_which();
|
|
285
285
|
var getPathKey = require_path_key();
|
|
286
286
|
function resolveCommandAttempt(parsed, withoutPathExt) {
|
|
@@ -298,7 +298,7 @@ var require_resolveCommand = __commonJS({
|
|
|
298
298
|
try {
|
|
299
299
|
resolved = which.sync(parsed.command, {
|
|
300
300
|
path: env[getPathKey({ env })],
|
|
301
|
-
pathExt: withoutPathExt ?
|
|
301
|
+
pathExt: withoutPathExt ? path16.delimiter : void 0
|
|
302
302
|
});
|
|
303
303
|
} catch (e) {
|
|
304
304
|
} finally {
|
|
@@ -307,7 +307,7 @@ var require_resolveCommand = __commonJS({
|
|
|
307
307
|
}
|
|
308
308
|
}
|
|
309
309
|
if (resolved) {
|
|
310
|
-
resolved =
|
|
310
|
+
resolved = path16.resolve(hasCustomCwd ? parsed.options.cwd : "", resolved);
|
|
311
311
|
}
|
|
312
312
|
return resolved;
|
|
313
313
|
}
|
|
@@ -361,8 +361,8 @@ var require_shebang_command = __commonJS({
|
|
|
361
361
|
if (!match) {
|
|
362
362
|
return null;
|
|
363
363
|
}
|
|
364
|
-
const [
|
|
365
|
-
const binary =
|
|
364
|
+
const [path16, argument] = match[0].replace(/#! ?/, "").split(" ");
|
|
365
|
+
const binary = path16.split("/").pop();
|
|
366
366
|
if (binary === "env") {
|
|
367
367
|
return argument;
|
|
368
368
|
}
|
|
@@ -375,16 +375,16 @@ var require_shebang_command = __commonJS({
|
|
|
375
375
|
var require_readShebang = __commonJS({
|
|
376
376
|
"node_modules/cross-spawn/lib/util/readShebang.js"(exports2, module2) {
|
|
377
377
|
"use strict";
|
|
378
|
-
var
|
|
378
|
+
var fs13 = require("fs");
|
|
379
379
|
var shebangCommand = require_shebang_command();
|
|
380
380
|
function readShebang(command) {
|
|
381
381
|
const size = 150;
|
|
382
382
|
const buffer = Buffer.alloc(size);
|
|
383
383
|
let fd;
|
|
384
384
|
try {
|
|
385
|
-
fd =
|
|
386
|
-
|
|
387
|
-
|
|
385
|
+
fd = fs13.openSync(command, "r");
|
|
386
|
+
fs13.readSync(fd, buffer, 0, size, 0);
|
|
387
|
+
fs13.closeSync(fd);
|
|
388
388
|
} catch (e) {
|
|
389
389
|
}
|
|
390
390
|
return shebangCommand(buffer.toString());
|
|
@@ -397,7 +397,7 @@ var require_readShebang = __commonJS({
|
|
|
397
397
|
var require_parse = __commonJS({
|
|
398
398
|
"node_modules/cross-spawn/lib/parse.js"(exports2, module2) {
|
|
399
399
|
"use strict";
|
|
400
|
-
var
|
|
400
|
+
var path16 = require("path");
|
|
401
401
|
var resolveCommand = require_resolveCommand();
|
|
402
402
|
var escape = require_escape();
|
|
403
403
|
var readShebang = require_readShebang();
|
|
@@ -422,7 +422,7 @@ var require_parse = __commonJS({
|
|
|
422
422
|
const needsShell = !isExecutableRegExp.test(commandFile);
|
|
423
423
|
if (parsed.options.forceShell || needsShell) {
|
|
424
424
|
const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
|
|
425
|
-
parsed.command =
|
|
425
|
+
parsed.command = path16.normalize(parsed.command);
|
|
426
426
|
parsed.command = escape.command(parsed.command);
|
|
427
427
|
parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
|
|
428
428
|
const shellCommand = [parsed.command].concat(parsed.args).join(" ");
|
|
@@ -512,7 +512,7 @@ var require_cross_spawn = __commonJS({
|
|
|
512
512
|
var cp = require("child_process");
|
|
513
513
|
var parse = require_parse();
|
|
514
514
|
var enoent = require_enoent();
|
|
515
|
-
function
|
|
515
|
+
function spawn5(command, args, options) {
|
|
516
516
|
const parsed = parse(command, args, options);
|
|
517
517
|
const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
|
|
518
518
|
enoent.hookChildProcess(spawned, parsed);
|
|
@@ -524,8 +524,8 @@ var require_cross_spawn = __commonJS({
|
|
|
524
524
|
result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
|
|
525
525
|
return result;
|
|
526
526
|
}
|
|
527
|
-
module2.exports =
|
|
528
|
-
module2.exports.spawn =
|
|
527
|
+
module2.exports = spawn5;
|
|
528
|
+
module2.exports.spawn = spawn5;
|
|
529
529
|
module2.exports.sync = spawnSync3;
|
|
530
530
|
module2.exports._parse = parse;
|
|
531
531
|
module2.exports._enoent = enoent;
|
|
@@ -538,21 +538,25 @@ __export(hook_stop_collab_exports, {
|
|
|
538
538
|
runHookStopCollab: () => runHookStopCollab
|
|
539
539
|
});
|
|
540
540
|
module.exports = __toCommonJS(hook_stop_collab_exports);
|
|
541
|
-
var
|
|
542
|
-
var import_node_fs7 = require("fs");
|
|
543
|
-
var import_node_path10 = __toESM(require("path"), 1);
|
|
541
|
+
var import_node_child_process9 = require("child_process");
|
|
544
542
|
|
|
545
|
-
// node_modules/@remixhq/core/dist/chunk-
|
|
543
|
+
// node_modules/@remixhq/core/dist/chunk-7XJGOKEO.js
|
|
546
544
|
var RemixError = class extends Error {
|
|
547
545
|
code;
|
|
548
546
|
exitCode;
|
|
549
547
|
hint;
|
|
548
|
+
// HTTP status code when this error originates from an API response.
|
|
549
|
+
// null for non-HTTP errors (validation, local IO, programming bugs).
|
|
550
|
+
// Callers use this to distinguish transient (5xx) from permanent (4xx)
|
|
551
|
+
// API failures without resorting to error-message string matching.
|
|
552
|
+
statusCode;
|
|
550
553
|
constructor(message, opts) {
|
|
551
554
|
super(message);
|
|
552
555
|
this.name = "RemixError";
|
|
553
556
|
this.code = opts?.code ?? null;
|
|
554
557
|
this.exitCode = opts?.exitCode ?? 1;
|
|
555
558
|
this.hint = opts?.hint ?? null;
|
|
559
|
+
this.statusCode = opts?.statusCode ?? null;
|
|
556
560
|
}
|
|
557
561
|
};
|
|
558
562
|
|
|
@@ -4937,13 +4941,13 @@ var logOutputSync = ({ serializedResult, fdNumber, state, verboseInfo, encoding,
|
|
|
4937
4941
|
}
|
|
4938
4942
|
};
|
|
4939
4943
|
var writeToFiles = (serializedResult, stdioItems, outputFiles) => {
|
|
4940
|
-
for (const { path:
|
|
4941
|
-
const pathString = typeof
|
|
4944
|
+
for (const { path: path16, append } of stdioItems.filter(({ type }) => FILE_TYPES.has(type))) {
|
|
4945
|
+
const pathString = typeof path16 === "string" ? path16 : path16.toString();
|
|
4942
4946
|
if (append || outputFiles.has(pathString)) {
|
|
4943
|
-
(0, import_node_fs4.appendFileSync)(
|
|
4947
|
+
(0, import_node_fs4.appendFileSync)(path16, serializedResult);
|
|
4944
4948
|
} else {
|
|
4945
4949
|
outputFiles.add(pathString);
|
|
4946
|
-
(0, import_node_fs4.writeFileSync)(
|
|
4950
|
+
(0, import_node_fs4.writeFileSync)(path16, serializedResult);
|
|
4947
4951
|
}
|
|
4948
4952
|
}
|
|
4949
4953
|
};
|
|
@@ -7331,7 +7335,7 @@ var {
|
|
|
7331
7335
|
getCancelSignal: getCancelSignal2
|
|
7332
7336
|
} = getIpcExport();
|
|
7333
7337
|
|
|
7334
|
-
// node_modules/@remixhq/core/dist/chunk-
|
|
7338
|
+
// node_modules/@remixhq/core/dist/chunk-S4ECO35X.js
|
|
7335
7339
|
async function runGit(args, cwd) {
|
|
7336
7340
|
const res = await execa("git", args, { cwd, stderr: "ignore" });
|
|
7337
7341
|
return String(res.stdout || "").trim();
|
|
@@ -7386,7 +7390,7 @@ function summarizeUnifiedDiff(diff) {
|
|
|
7386
7390
|
return { changedFilesCount, insertions, deletions };
|
|
7387
7391
|
}
|
|
7388
7392
|
|
|
7389
|
-
// node_modules/@remixhq/core/dist/chunk-
|
|
7393
|
+
// node_modules/@remixhq/core/dist/chunk-DBVN42RF.js
|
|
7390
7394
|
var import_promises12 = __toESM(require("fs/promises"), 1);
|
|
7391
7395
|
var import_path = __toESM(require("path"), 1);
|
|
7392
7396
|
var import_promises13 = __toESM(require("fs/promises"), 1);
|
|
@@ -7685,6 +7689,8 @@ function buildAppDeltaCacheKey(appId, payload) {
|
|
|
7685
7689
|
appId,
|
|
7686
7690
|
payload.baseHeadHash,
|
|
7687
7691
|
payload.targetHeadHash ?? "",
|
|
7692
|
+
payload.baseRevisionId ?? "",
|
|
7693
|
+
payload.targetRevisionId ?? "",
|
|
7688
7694
|
payload.localSnapshotHash ?? "",
|
|
7689
7695
|
payload.repoFingerprint ?? "",
|
|
7690
7696
|
payload.remoteUrl ?? "",
|
|
@@ -7931,11 +7937,11 @@ async function readLocalBaseline(params) {
|
|
|
7931
7937
|
const raw = await import_promises15.default.readFile(getBaselinePath(params), "utf8");
|
|
7932
7938
|
const parsed = JSON.parse(raw);
|
|
7933
7939
|
if (!parsed || typeof parsed !== "object") return null;
|
|
7934
|
-
if (parsed.schemaVersion
|
|
7940
|
+
if (![1, 2].includes(Number(parsed.schemaVersion)) || typeof parsed.key !== "string" || typeof parsed.repoRoot !== "string") {
|
|
7935
7941
|
return null;
|
|
7936
7942
|
}
|
|
7937
7943
|
return {
|
|
7938
|
-
schemaVersion: 1,
|
|
7944
|
+
schemaVersion: Number(parsed.schemaVersion) === 2 ? 2 : 1,
|
|
7939
7945
|
key: parsed.key,
|
|
7940
7946
|
repoRoot: parsed.repoRoot,
|
|
7941
7947
|
repoFingerprint: parsed.repoFingerprint ?? null,
|
|
@@ -7944,6 +7950,8 @@ async function readLocalBaseline(params) {
|
|
|
7944
7950
|
branchName: parsed.branchName ?? null,
|
|
7945
7951
|
lastSnapshotId: parsed.lastSnapshotId ?? null,
|
|
7946
7952
|
lastSnapshotHash: parsed.lastSnapshotHash ?? null,
|
|
7953
|
+
lastServerRevisionId: parsed.lastServerRevisionId ?? null,
|
|
7954
|
+
lastServerTreeHash: parsed.lastServerTreeHash ?? null,
|
|
7947
7955
|
lastServerHeadHash: parsed.lastServerHeadHash ?? null,
|
|
7948
7956
|
lastSeenLocalCommitHash: parsed.lastSeenLocalCommitHash ?? null,
|
|
7949
7957
|
updatedAt: String(parsed.updatedAt ?? "")
|
|
@@ -7955,7 +7963,7 @@ async function readLocalBaseline(params) {
|
|
|
7955
7963
|
async function writeLocalBaseline(baseline) {
|
|
7956
7964
|
const key = buildLaneStateKey(baseline);
|
|
7957
7965
|
const normalized = {
|
|
7958
|
-
schemaVersion:
|
|
7966
|
+
schemaVersion: 2,
|
|
7959
7967
|
key,
|
|
7960
7968
|
repoRoot: baseline.repoRoot,
|
|
7961
7969
|
repoFingerprint: baseline.repoFingerprint ?? null,
|
|
@@ -7964,6 +7972,8 @@ async function writeLocalBaseline(baseline) {
|
|
|
7964
7972
|
branchName: baseline.branchName ?? null,
|
|
7965
7973
|
lastSnapshotId: baseline.lastSnapshotId ?? null,
|
|
7966
7974
|
lastSnapshotHash: baseline.lastSnapshotHash ?? null,
|
|
7975
|
+
lastServerRevisionId: baseline.lastServerRevisionId ?? null,
|
|
7976
|
+
lastServerTreeHash: baseline.lastServerTreeHash ?? null,
|
|
7967
7977
|
lastServerHeadHash: baseline.lastServerHeadHash ?? null,
|
|
7968
7978
|
lastSeenLocalCommitHash: baseline.lastSeenLocalCommitHash ?? null,
|
|
7969
7979
|
updatedAt: baseline.updatedAt ?? (/* @__PURE__ */ new Date()).toISOString()
|
|
@@ -8268,6 +8278,7 @@ function normalizeJob2(input) {
|
|
|
8268
8278
|
prompt: input.prompt,
|
|
8269
8279
|
assistantResponse: input.assistantResponse,
|
|
8270
8280
|
baselineSnapshotId: input.baselineSnapshotId ?? null,
|
|
8281
|
+
baselineServerRevisionId: input.baselineServerRevisionId ?? null,
|
|
8271
8282
|
baselineServerHeadHash: input.baselineServerHeadHash ?? null,
|
|
8272
8283
|
currentSnapshotId: input.currentSnapshotId,
|
|
8273
8284
|
capturedAt: input.capturedAt ?? now,
|
|
@@ -8302,6 +8313,7 @@ async function readPendingFinalizeJob(jobId) {
|
|
|
8302
8313
|
prompt: String(parsed.prompt ?? ""),
|
|
8303
8314
|
assistantResponse: String(parsed.assistantResponse ?? ""),
|
|
8304
8315
|
baselineSnapshotId: parsed.baselineSnapshotId ?? null,
|
|
8316
|
+
baselineServerRevisionId: parsed.baselineServerRevisionId ?? null,
|
|
8305
8317
|
baselineServerHeadHash: parsed.baselineServerHeadHash ?? null,
|
|
8306
8318
|
currentSnapshotId: String(parsed.currentSnapshotId ?? ""),
|
|
8307
8319
|
capturedAt: parsed.capturedAt,
|
|
@@ -8803,6 +8815,8 @@ function buildBaseState() {
|
|
|
8803
8815
|
branchName: null,
|
|
8804
8816
|
localCommitHash: null,
|
|
8805
8817
|
currentSnapshotHash: null,
|
|
8818
|
+
currentServerRevisionId: null,
|
|
8819
|
+
currentServerTreeHash: null,
|
|
8806
8820
|
currentServerHeadHash: null,
|
|
8807
8821
|
currentServerHeadCommitId: null,
|
|
8808
8822
|
worktreeClean: false,
|
|
@@ -8836,6 +8850,8 @@ function buildBaseState() {
|
|
|
8836
8850
|
baseline: {
|
|
8837
8851
|
lastSnapshotId: null,
|
|
8838
8852
|
lastSnapshotHash: null,
|
|
8853
|
+
lastServerRevisionId: null,
|
|
8854
|
+
lastServerTreeHash: null,
|
|
8839
8855
|
lastServerHeadHash: null,
|
|
8840
8856
|
lastSeenLocalCommitHash: null
|
|
8841
8857
|
}
|
|
@@ -8962,6 +8978,8 @@ async function collabDetectRepoState(params) {
|
|
|
8962
8978
|
summarizeAsyncJobs({ repoRoot, branchName: binding.branchName ?? null })
|
|
8963
8979
|
]);
|
|
8964
8980
|
const appHead = unwrapResponseObject(headResp, "app head");
|
|
8981
|
+
detected.currentServerRevisionId = appHead.headRevisionId ?? null;
|
|
8982
|
+
detected.currentServerTreeHash = appHead.treeHash ?? null;
|
|
8965
8983
|
detected.currentServerHeadHash = appHead.headCommitHash;
|
|
8966
8984
|
detected.currentServerHeadCommitId = appHead.headCommitId;
|
|
8967
8985
|
detected.currentSnapshotHash = inspection.snapshotHash;
|
|
@@ -8970,6 +8988,8 @@ async function collabDetectRepoState(params) {
|
|
|
8970
8988
|
detected.baseline = {
|
|
8971
8989
|
lastSnapshotId: baseline?.lastSnapshotId ?? null,
|
|
8972
8990
|
lastSnapshotHash: baseline?.lastSnapshotHash ?? null,
|
|
8991
|
+
lastServerRevisionId: baseline?.lastServerRevisionId ?? null,
|
|
8992
|
+
lastServerTreeHash: baseline?.lastServerTreeHash ?? null,
|
|
8973
8993
|
lastServerHeadHash: baseline?.lastServerHeadHash ?? null,
|
|
8974
8994
|
lastSeenLocalCommitHash: baseline?.lastSeenLocalCommitHash ?? null
|
|
8975
8995
|
};
|
|
@@ -8979,6 +8999,7 @@ async function collabDetectRepoState(params) {
|
|
|
8979
8999
|
const bootstrapResp = await params.api.getAppDelta(binding.currentAppId, {
|
|
8980
9000
|
baseHeadHash: localCommitHash,
|
|
8981
9001
|
targetHeadHash: appHead.headCommitHash,
|
|
9002
|
+
targetRevisionId: appHead.headRevisionId,
|
|
8982
9003
|
repoFingerprint: binding.repoFingerprint ?? void 0,
|
|
8983
9004
|
remoteUrl: binding.remoteUrl ?? void 0,
|
|
8984
9005
|
defaultBranch: binding.defaultBranch ?? void 0
|
|
@@ -9001,7 +9022,7 @@ async function collabDetectRepoState(params) {
|
|
|
9001
9022
|
}
|
|
9002
9023
|
}
|
|
9003
9024
|
detected.repoState = "external_local_base_changed";
|
|
9004
|
-
detected.hint = "No local Remix baseline exists for this lane yet. Run `remix collab
|
|
9025
|
+
detected.hint = "No local Remix revision baseline exists for this lane yet. Run `remix collab init` or sync this lane to seed the baseline.";
|
|
9005
9026
|
return detected;
|
|
9006
9027
|
}
|
|
9007
9028
|
const localHeadMovedSinceBaseline = Boolean(baseline.lastSeenLocalCommitHash) && localCommitHash !== baseline.lastSeenLocalCommitHash;
|
|
@@ -9020,7 +9041,30 @@ async function collabDetectRepoState(params) {
|
|
|
9020
9041
|
return detected;
|
|
9021
9042
|
}
|
|
9022
9043
|
const localChanged = inspection.snapshotHash !== baseline.lastSnapshotHash;
|
|
9023
|
-
const
|
|
9044
|
+
const serverHeadChanged = appHead.headCommitHash !== baseline.lastServerHeadHash;
|
|
9045
|
+
const revisionChanged = Boolean(
|
|
9046
|
+
baseline.lastServerRevisionId && (appHead.headRevisionId ?? null) !== baseline.lastServerRevisionId
|
|
9047
|
+
);
|
|
9048
|
+
const equivalentRevisionDrift = revisionChanged && !serverHeadChanged;
|
|
9049
|
+
if (equivalentRevisionDrift) {
|
|
9050
|
+
await writeLocalBaseline({
|
|
9051
|
+
repoRoot,
|
|
9052
|
+
repoFingerprint: binding.repoFingerprint,
|
|
9053
|
+
laneId: binding.laneId,
|
|
9054
|
+
currentAppId: binding.currentAppId,
|
|
9055
|
+
branchName: binding.branchName,
|
|
9056
|
+
lastSnapshotId: baseline.lastSnapshotId,
|
|
9057
|
+
lastSnapshotHash: baseline.lastSnapshotHash,
|
|
9058
|
+
lastServerRevisionId: appHead.headRevisionId ?? null,
|
|
9059
|
+
lastServerTreeHash: appHead.treeHash ?? baseline.lastServerTreeHash ?? null,
|
|
9060
|
+
lastServerHeadHash: appHead.headCommitHash,
|
|
9061
|
+
lastSeenLocalCommitHash: baseline.lastSeenLocalCommitHash
|
|
9062
|
+
});
|
|
9063
|
+
detected.baseline.lastServerRevisionId = appHead.headRevisionId ?? null;
|
|
9064
|
+
detected.baseline.lastServerTreeHash = appHead.treeHash ?? baseline.lastServerTreeHash ?? null;
|
|
9065
|
+
detected.baseline.lastServerHeadHash = appHead.headCommitHash;
|
|
9066
|
+
}
|
|
9067
|
+
const serverChanged = serverHeadChanged;
|
|
9024
9068
|
if (!localChanged && !serverChanged) {
|
|
9025
9069
|
detected.repoState = "idle";
|
|
9026
9070
|
return detected;
|
|
@@ -9444,6 +9488,7 @@ function buildWorkspaceMetadata(params) {
|
|
|
9444
9488
|
recordingMode: "boundary_delta",
|
|
9445
9489
|
baselineSnapshotId: params.baselineSnapshotId,
|
|
9446
9490
|
currentSnapshotId: params.currentSnapshotId,
|
|
9491
|
+
baselineServerRevisionId: params.baselineServerRevisionId ?? null,
|
|
9447
9492
|
baselineServerHeadHash: params.baselineServerHeadHash,
|
|
9448
9493
|
currentSnapshotHash: params.currentSnapshotHash,
|
|
9449
9494
|
localCommitHash: params.localCommitHash,
|
|
@@ -9522,12 +9567,12 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9522
9567
|
throw buildFinalizeCliError({
|
|
9523
9568
|
message: "Local baseline is missing for this queued finalize job.",
|
|
9524
9569
|
exitCode: 2,
|
|
9525
|
-
hint: "Run `remix collab
|
|
9570
|
+
hint: "Run `remix collab init` to seed this checkout's revision baseline.",
|
|
9526
9571
|
disposition: "terminal",
|
|
9527
9572
|
reason: "baseline_missing"
|
|
9528
9573
|
});
|
|
9529
9574
|
}
|
|
9530
|
-
const baselineDrifted = baseline.lastSnapshotId !== job.baselineSnapshotId || baseline.lastServerHeadHash !== job.baselineServerHeadHash;
|
|
9575
|
+
const baselineDrifted = baseline.lastSnapshotId !== job.baselineSnapshotId || (job.baselineServerRevisionId ? baseline.lastServerRevisionId !== job.baselineServerRevisionId : false) || baseline.lastServerHeadHash !== job.baselineServerHeadHash;
|
|
9531
9576
|
const appHead = unwrapResponseObject(appHeadResp, "app head");
|
|
9532
9577
|
const remoteUrl = readMetadataString(job, "remoteUrl");
|
|
9533
9578
|
const defaultBranch = readMetadataString(job, "defaultBranch");
|
|
@@ -9550,12 +9595,13 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9550
9595
|
throw buildFinalizeCliError({
|
|
9551
9596
|
message: "Finalize queue baseline drifted before this job was processed.",
|
|
9552
9597
|
exitCode: 1,
|
|
9553
|
-
hint: "Process queued finalize jobs in capture order, or
|
|
9598
|
+
hint: "Process queued finalize jobs in capture order, or run `remix collab init` to refresh the revision baseline before retrying.",
|
|
9554
9599
|
disposition: "terminal",
|
|
9555
9600
|
reason: "baseline_drifted"
|
|
9556
9601
|
});
|
|
9557
9602
|
}
|
|
9558
|
-
|
|
9603
|
+
const serverStillAtBaseline = job.baselineServerRevisionId ? appHead.headRevisionId === job.baselineServerRevisionId : appHead.headCommitHash === job.baselineServerHeadHash;
|
|
9604
|
+
if (!serverStillAtBaseline) {
|
|
9559
9605
|
throw buildFinalizeCliError({
|
|
9560
9606
|
message: "Server lane changed before a no-diff turn could be recorded.",
|
|
9561
9607
|
exitCode: 2,
|
|
@@ -9577,6 +9623,7 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9577
9623
|
defaultBranch,
|
|
9578
9624
|
baselineSnapshotId: job.baselineSnapshotId,
|
|
9579
9625
|
currentSnapshotId: job.currentSnapshotId,
|
|
9626
|
+
baselineServerRevisionId: job.baselineServerRevisionId,
|
|
9580
9627
|
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
9581
9628
|
currentSnapshotHash: snapshot.snapshotHash,
|
|
9582
9629
|
localCommitHash: snapshot.localCommitHash,
|
|
@@ -9597,6 +9644,8 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9597
9644
|
branchName: job.branchName,
|
|
9598
9645
|
lastSnapshotId: snapshot.id,
|
|
9599
9646
|
lastSnapshotHash: snapshot.snapshotHash,
|
|
9647
|
+
lastServerRevisionId: appHead.headRevisionId ?? null,
|
|
9648
|
+
lastServerTreeHash: appHead.treeHash ?? null,
|
|
9600
9649
|
lastServerHeadHash: appHead.headCommitHash,
|
|
9601
9650
|
lastSeenLocalCommitHash: snapshot.localCommitHash
|
|
9602
9651
|
});
|
|
@@ -9617,14 +9666,14 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9617
9666
|
};
|
|
9618
9667
|
}
|
|
9619
9668
|
const localBaselineAdvanced = baseline.lastSnapshotId !== job.baselineSnapshotId;
|
|
9620
|
-
const serverHeadAdvanced = appHead.headCommitHash !== job.baselineServerHeadHash;
|
|
9669
|
+
const serverHeadAdvanced = job.baselineServerRevisionId ? appHead.headRevisionId !== job.baselineServerRevisionId : appHead.headCommitHash !== job.baselineServerHeadHash;
|
|
9621
9670
|
if (baselineDrifted) {
|
|
9622
9671
|
const consistentAdvance = localBaselineAdvanced && serverHeadAdvanced;
|
|
9623
9672
|
if (!consistentAdvance) {
|
|
9624
9673
|
throw buildFinalizeCliError({
|
|
9625
9674
|
message: `Finalize queue baseline advanced inconsistently before this job was processed (localBaselineAdvanced=${localBaselineAdvanced}, serverHeadAdvanced=${serverHeadAdvanced}, jobBaselineSnapshotId=${job.baselineSnapshotId ?? "null"}, liveBaselineSnapshotId=${baseline.lastSnapshotId ?? "null"}, jobBaselineServerHeadHash=${job.baselineServerHeadHash ?? "null"}, liveBaselineServerHeadHash=${baseline.lastServerHeadHash ?? "null"}, currentAppHeadHash=${appHead.headCommitHash}). This indicates local Remix state diverged from the backend in a way that should not be reachable in normal operation; please report this as a bug.`,
|
|
9626
9675
|
exitCode: 1,
|
|
9627
|
-
hint: "Run `remix collab status` to inspect, then
|
|
9676
|
+
hint: "Run `remix collab status` to inspect, then sync or reconcile before retrying.",
|
|
9628
9677
|
disposition: "terminal",
|
|
9629
9678
|
reason: "baseline_drifted"
|
|
9630
9679
|
});
|
|
@@ -9632,6 +9681,7 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9632
9681
|
}
|
|
9633
9682
|
let submissionDiff = diffResult.diff;
|
|
9634
9683
|
let submissionBaseHeadHash = job.baselineServerHeadHash;
|
|
9684
|
+
let submissionBaseRevisionId = job.baselineServerRevisionId;
|
|
9635
9685
|
let replayedFromBaseHash = null;
|
|
9636
9686
|
if (!submissionBaseHeadHash) {
|
|
9637
9687
|
throw buildFinalizeCliError({
|
|
@@ -9649,7 +9699,9 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9649
9699
|
assistantResponse: job.assistantResponse,
|
|
9650
9700
|
diff: diffResult.diff,
|
|
9651
9701
|
baseCommitHash: submissionBaseHeadHash,
|
|
9702
|
+
baseRevisionId: job.baselineServerRevisionId,
|
|
9652
9703
|
targetHeadCommitHash: appHead.headCommitHash,
|
|
9704
|
+
targetRevisionId: appHead.headRevisionId,
|
|
9653
9705
|
expectedPaths: diffResult.changedPaths,
|
|
9654
9706
|
actor,
|
|
9655
9707
|
workspaceMetadata: buildWorkspaceMetadata({
|
|
@@ -9659,6 +9711,7 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9659
9711
|
defaultBranch,
|
|
9660
9712
|
baselineSnapshotId: job.baselineSnapshotId,
|
|
9661
9713
|
currentSnapshotId: job.currentSnapshotId,
|
|
9714
|
+
baselineServerRevisionId: job.baselineServerRevisionId,
|
|
9662
9715
|
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
9663
9716
|
currentSnapshotHash: snapshot.snapshotHash,
|
|
9664
9717
|
localCommitHash: snapshot.localCommitHash,
|
|
@@ -9684,6 +9737,7 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9684
9737
|
submissionDiff = replayDiff.diff;
|
|
9685
9738
|
replayedFromBaseHash = submissionBaseHeadHash;
|
|
9686
9739
|
submissionBaseHeadHash = appHead.headCommitHash;
|
|
9740
|
+
submissionBaseRevisionId = appHead.headRevisionId;
|
|
9687
9741
|
} catch (error) {
|
|
9688
9742
|
if (error instanceof RemixError && error.finalizeDisposition === void 0) {
|
|
9689
9743
|
const detail = error.hint ? `${error.message} (${error.hint})` : error.message;
|
|
@@ -9705,6 +9759,7 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9705
9759
|
assistantResponse: job.assistantResponse,
|
|
9706
9760
|
diff: submissionDiff,
|
|
9707
9761
|
baseCommitHash: submissionBaseHeadHash,
|
|
9762
|
+
baseRevisionId: submissionBaseRevisionId,
|
|
9708
9763
|
headCommitHash: submissionBaseHeadHash,
|
|
9709
9764
|
changedFilesCount: diffResult.stats.changedFilesCount,
|
|
9710
9765
|
insertions: diffResult.stats.insertions,
|
|
@@ -9717,6 +9772,7 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9717
9772
|
defaultBranch,
|
|
9718
9773
|
baselineSnapshotId: job.baselineSnapshotId,
|
|
9719
9774
|
currentSnapshotId: job.currentSnapshotId,
|
|
9775
|
+
baselineServerRevisionId: job.baselineServerRevisionId,
|
|
9720
9776
|
baselineServerHeadHash: job.baselineServerHeadHash,
|
|
9721
9777
|
currentSnapshotHash: snapshot.snapshotHash,
|
|
9722
9778
|
localCommitHash: snapshot.localCommitHash,
|
|
@@ -9738,11 +9794,28 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9738
9794
|
throw buildFinalizeCliError({
|
|
9739
9795
|
message: "Backend returned a succeeded change step without a head commit hash.",
|
|
9740
9796
|
exitCode: 1,
|
|
9741
|
-
hint: "This is a backend invariant violation; retry will not help.
|
|
9797
|
+
hint: "This is a backend invariant violation; retry will not help. Run `remix collab status` before trying again.",
|
|
9742
9798
|
disposition: "terminal",
|
|
9743
9799
|
reason: "missing_head_commit_hash"
|
|
9744
9800
|
});
|
|
9745
9801
|
}
|
|
9802
|
+
let nextServerRevisionId = typeof changeStep.resultRevisionId === "string" ? changeStep.resultRevisionId.trim() : "";
|
|
9803
|
+
let nextServerTreeHash = null;
|
|
9804
|
+
if (!nextServerRevisionId) {
|
|
9805
|
+
const freshHeadResp = await params.api.getAppHead(job.currentAppId);
|
|
9806
|
+
const freshHead = unwrapResponseObject(freshHeadResp, "app head");
|
|
9807
|
+
if (freshHead.headCommitHash !== nextServerHeadHash || !freshHead.headRevisionId) {
|
|
9808
|
+
throw buildFinalizeCliError({
|
|
9809
|
+
message: "Backend returned a succeeded change step without a matching result revision.",
|
|
9810
|
+
exitCode: 1,
|
|
9811
|
+
hint: "The local baseline was not advanced because the post-step revision could not be verified. Restart the backend/CLI and retry after checking `remix collab status`.",
|
|
9812
|
+
disposition: "terminal",
|
|
9813
|
+
reason: "missing_result_revision_id"
|
|
9814
|
+
});
|
|
9815
|
+
}
|
|
9816
|
+
nextServerRevisionId = freshHead.headRevisionId;
|
|
9817
|
+
nextServerTreeHash = freshHead.treeHash ?? null;
|
|
9818
|
+
}
|
|
9746
9819
|
await writeLocalBaseline({
|
|
9747
9820
|
repoRoot: job.repoRoot,
|
|
9748
9821
|
repoFingerprint: job.repoFingerprint,
|
|
@@ -9751,6 +9824,8 @@ async function processClaimedPendingFinalizeJobInner(params) {
|
|
|
9751
9824
|
branchName: job.branchName,
|
|
9752
9825
|
lastSnapshotId: snapshot.id,
|
|
9753
9826
|
lastSnapshotHash: snapshot.snapshotHash,
|
|
9827
|
+
lastServerRevisionId: nextServerRevisionId,
|
|
9828
|
+
lastServerTreeHash: nextServerTreeHash,
|
|
9754
9829
|
lastServerHeadHash: nextServerHeadHash,
|
|
9755
9830
|
lastSeenLocalCommitHash: snapshot.localCommitHash
|
|
9756
9831
|
});
|
|
@@ -9782,6 +9857,7 @@ async function enqueueCapturedFinalizeTurn(params) {
|
|
|
9782
9857
|
prompt: params.prompt,
|
|
9783
9858
|
assistantResponse: params.assistantResponse,
|
|
9784
9859
|
baselineSnapshotId: params.baselineSnapshotId,
|
|
9860
|
+
baselineServerRevisionId: params.baselineServerRevisionId ?? null,
|
|
9785
9861
|
baselineServerHeadHash: params.baselineServerHeadHash,
|
|
9786
9862
|
currentSnapshotId: params.currentSnapshotId,
|
|
9787
9863
|
idempotencyKey: params.idempotencyKey,
|
|
@@ -9880,17 +9956,6 @@ async function collabFinalizeTurn(params) {
|
|
|
9880
9956
|
});
|
|
9881
9957
|
}
|
|
9882
9958
|
}
|
|
9883
|
-
const pendingReAnchor = await findPendingAsyncJob({
|
|
9884
|
-
repoRoot,
|
|
9885
|
-
branchName: binding.branchName ?? null,
|
|
9886
|
-
kind: "re_anchor"
|
|
9887
|
-
});
|
|
9888
|
-
if (pendingReAnchor) {
|
|
9889
|
-
throw new RemixError("Cannot finalize a turn while a re-anchor is still processing.", {
|
|
9890
|
-
exitCode: 2,
|
|
9891
|
-
hint: `Re-anchor job ${pendingReAnchor.id} is still in the background queue. Run \`remix collab status\` to check progress.`
|
|
9892
|
-
});
|
|
9893
|
-
}
|
|
9894
9959
|
const detected = await collabDetectRepoState({
|
|
9895
9960
|
api: params.api,
|
|
9896
9961
|
cwd: repoRoot,
|
|
@@ -9931,9 +9996,16 @@ async function collabFinalizeTurn(params) {
|
|
|
9931
9996
|
hint: detected.hint
|
|
9932
9997
|
});
|
|
9933
9998
|
}
|
|
9999
|
+
if (detected.repoState === "both_changed") {
|
|
10000
|
+
throw new RemixError("Local and server changes must be reconciled before finalizing this turn.", {
|
|
10001
|
+
code: "reconcile_required",
|
|
10002
|
+
exitCode: 2,
|
|
10003
|
+
hint: detected.hint || "Run `remix collab reconcile --dry-run` to inspect recovery options before retrying."
|
|
10004
|
+
});
|
|
10005
|
+
}
|
|
9934
10006
|
if (detected.repoState === "external_local_base_changed") {
|
|
9935
|
-
throw new RemixError("The local checkout
|
|
9936
|
-
code: "
|
|
10007
|
+
throw new RemixError("The local checkout is missing a Remix revision baseline for this lane.", {
|
|
10008
|
+
code: "baseline_missing",
|
|
9937
10009
|
exitCode: 2,
|
|
9938
10010
|
hint: detected.hint
|
|
9939
10011
|
});
|
|
@@ -9945,8 +10017,9 @@ async function collabFinalizeTurn(params) {
|
|
|
9945
10017
|
});
|
|
9946
10018
|
if (!baseline) {
|
|
9947
10019
|
throw new RemixError("Local Remix baseline is missing for this lane.", {
|
|
10020
|
+
code: "baseline_missing",
|
|
9948
10021
|
exitCode: 2,
|
|
9949
|
-
hint: "Run `remix collab
|
|
10022
|
+
hint: "Run `remix collab init` or sync this lane to create a fresh revision baseline."
|
|
9950
10023
|
});
|
|
9951
10024
|
}
|
|
9952
10025
|
const snapshot = await captureLocalSnapshot({
|
|
@@ -9957,10 +10030,11 @@ async function collabFinalizeTurn(params) {
|
|
|
9957
10030
|
});
|
|
9958
10031
|
const mode = snapshot.snapshotHash === baseline.lastSnapshotHash ? "no_diff_turn" : "changed_turn";
|
|
9959
10032
|
const idempotencyKey = params.idempotencyKey?.trim() || buildDeterministicIdempotencyKey({
|
|
9960
|
-
kind: "
|
|
10033
|
+
kind: "collab_finalize_turn_boundary_v2",
|
|
9961
10034
|
appId: binding.currentAppId,
|
|
9962
10035
|
laneId: binding.laneId,
|
|
9963
10036
|
baselineSnapshotId: baseline.lastSnapshotId,
|
|
10037
|
+
baselineServerRevisionId: baseline.lastServerRevisionId,
|
|
9964
10038
|
baselineServerHeadHash: baseline.lastServerHeadHash,
|
|
9965
10039
|
currentSnapshotId: snapshot.id,
|
|
9966
10040
|
currentSnapshotHash: snapshot.snapshotHash,
|
|
@@ -9980,6 +10054,7 @@ async function collabFinalizeTurn(params) {
|
|
|
9980
10054
|
prompt,
|
|
9981
10055
|
assistantResponse,
|
|
9982
10056
|
baselineSnapshotId: baseline.lastSnapshotId,
|
|
10057
|
+
baselineServerRevisionId: baseline.lastServerRevisionId,
|
|
9983
10058
|
baselineServerHeadHash: baseline.lastServerHeadHash,
|
|
9984
10059
|
currentSnapshotId: snapshot.id,
|
|
9985
10060
|
idempotencyKey,
|
|
@@ -10026,186 +10101,1087 @@ var FINALIZE_PREFLIGHT_FAILURE_CODES = [
|
|
|
10026
10101
|
// Server has commits we don't. Fix: `remix collab sync` (safe to
|
|
10027
10102
|
// auto-run for fast-forward; non-FF refused by the command itself).
|
|
10028
10103
|
"pull_required",
|
|
10029
|
-
//
|
|
10030
|
-
|
|
10031
|
-
|
|
10104
|
+
// Both local and server changed. Fix: inspect and apply reconcile.
|
|
10105
|
+
"reconcile_required",
|
|
10106
|
+
// Local revision baseline is missing. Fix: `remix collab init` or sync.
|
|
10107
|
+
"baseline_missing"
|
|
10032
10108
|
];
|
|
10033
10109
|
var CODE_SET = new Set(FINALIZE_PREFLIGHT_FAILURE_CODES);
|
|
10110
|
+
function isFinalizePreflightFailureCode(value) {
|
|
10111
|
+
return typeof value === "string" && CODE_SET.has(value);
|
|
10112
|
+
}
|
|
10034
10113
|
|
|
10035
|
-
//
|
|
10036
|
-
|
|
10037
|
-
|
|
10038
|
-
|
|
10114
|
+
// src/auto-fix-dispatcher.ts
|
|
10115
|
+
var import_node_child_process6 = require("child_process");
|
|
10116
|
+
var import_node_fs6 = require("fs");
|
|
10117
|
+
var import_node_path9 = __toESM(require("path"), 1);
|
|
10118
|
+
|
|
10119
|
+
// src/finalize-failure-marker.ts
|
|
10120
|
+
var import_promises18 = __toESM(require("fs/promises"), 1);
|
|
10121
|
+
var import_node_path6 = __toESM(require("path"), 1);
|
|
10122
|
+
var FINALIZE_FAILURE_MARKER_REL = import_node_path6.default.join(".remix", ".last-finalize-failure.json");
|
|
10123
|
+
function markerPath(repoRoot) {
|
|
10124
|
+
return import_node_path6.default.join(repoRoot, FINALIZE_FAILURE_MARKER_REL);
|
|
10125
|
+
}
|
|
10126
|
+
async function writeFinalizeFailureMarker(marker) {
|
|
10127
|
+
const filePath = markerPath(marker.repoRoot);
|
|
10128
|
+
await import_promises18.default.mkdir(import_node_path6.default.dirname(filePath), { recursive: true });
|
|
10129
|
+
const tmpPath = `${filePath}.tmp-${process.pid}-${Date.now()}`;
|
|
10130
|
+
await import_promises18.default.writeFile(tmpPath, JSON.stringify(marker, null, 2), "utf8");
|
|
10131
|
+
await import_promises18.default.rename(tmpPath, filePath);
|
|
10132
|
+
}
|
|
10133
|
+
async function clearFinalizeFailureMarker(repoRoot) {
|
|
10134
|
+
await import_promises18.default.rm(markerPath(repoRoot), { force: true }).catch(() => void 0);
|
|
10135
|
+
}
|
|
10136
|
+
function buildFreshFailureMarker(params) {
|
|
10137
|
+
return {
|
|
10138
|
+
schemaVersion: 1,
|
|
10139
|
+
failedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
10140
|
+
repoRoot: params.repoRoot,
|
|
10141
|
+
preflightCode: params.preflightCode,
|
|
10142
|
+
message: params.message,
|
|
10143
|
+
hint: params.hint,
|
|
10144
|
+
recommendedCommand: params.recommendedCommand,
|
|
10145
|
+
autoFix: {
|
|
10146
|
+
status: "not_attempted",
|
|
10147
|
+
command: null,
|
|
10148
|
+
pid: null,
|
|
10149
|
+
logPath: null,
|
|
10150
|
+
attemptedAt: null,
|
|
10151
|
+
failureMessage: null
|
|
10152
|
+
}
|
|
10153
|
+
};
|
|
10154
|
+
}
|
|
10155
|
+
|
|
10156
|
+
// src/hook-diagnostics.ts
|
|
10157
|
+
var import_node_crypto2 = require("crypto");
|
|
10158
|
+
var import_promises20 = __toESM(require("fs/promises"), 1);
|
|
10159
|
+
var import_node_os5 = __toESM(require("os"), 1);
|
|
10160
|
+
var import_node_path8 = __toESM(require("path"), 1);
|
|
10161
|
+
|
|
10162
|
+
// src/hook-state.ts
|
|
10163
|
+
var import_promises19 = __toESM(require("fs/promises"), 1);
|
|
10164
|
+
var import_node_os4 = __toESM(require("os"), 1);
|
|
10165
|
+
var import_node_path7 = __toESM(require("path"), 1);
|
|
10166
|
+
var import_node_crypto = require("crypto");
|
|
10167
|
+
function stateRoot() {
|
|
10168
|
+
const configured = process.env.REMIX_CLAUDE_PLUGIN_HOOK_STATE_ROOT?.trim();
|
|
10169
|
+
return configured || import_node_path7.default.join(import_node_os4.default.tmpdir(), "remix-claude-plugin-hooks");
|
|
10170
|
+
}
|
|
10171
|
+
function statePath(sessionId) {
|
|
10172
|
+
return import_node_path7.default.join(stateRoot(), `${sessionId}.json`);
|
|
10173
|
+
}
|
|
10174
|
+
function stateLockPath(sessionId) {
|
|
10175
|
+
return import_node_path7.default.join(stateRoot(), `${sessionId}.lock`);
|
|
10176
|
+
}
|
|
10177
|
+
function stateLockMetaPath(sessionId) {
|
|
10178
|
+
return import_node_path7.default.join(stateLockPath(sessionId), "owner.json");
|
|
10179
|
+
}
|
|
10180
|
+
async function writeJsonAtomic2(filePath, value) {
|
|
10181
|
+
await import_promises19.default.mkdir(import_node_path7.default.dirname(filePath), { recursive: true });
|
|
10182
|
+
const tmpPath = `${filePath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
|
10183
|
+
await import_promises19.default.writeFile(tmpPath, JSON.stringify(value, null, 2) + "\n", "utf8");
|
|
10184
|
+
await import_promises19.default.rename(tmpPath, filePath);
|
|
10185
|
+
}
|
|
10186
|
+
var STATE_LOCK_WAIT_MS = 2e3;
|
|
10187
|
+
var STATE_LOCK_POLL_MS = 25;
|
|
10188
|
+
var STATE_LOCK_STALE_MS = 3e4;
|
|
10189
|
+
var STATE_LOCK_HEARTBEAT_MS = 5e3;
|
|
10190
|
+
async function sleep2(ms) {
|
|
10191
|
+
await new Promise((resolve) => setTimeout(resolve, ms));
|
|
10192
|
+
}
|
|
10193
|
+
async function readStateLockMetadata(sessionId) {
|
|
10194
|
+
const raw = await import_promises19.default.readFile(stateLockMetaPath(sessionId), "utf8").catch(() => null);
|
|
10195
|
+
if (!raw) return null;
|
|
10039
10196
|
try {
|
|
10040
|
-
|
|
10197
|
+
const parsed = JSON.parse(raw);
|
|
10198
|
+
if (typeof parsed.ownerId !== "string" || typeof parsed.pid !== "number" || typeof parsed.createdAt !== "string" || typeof parsed.heartbeatAt !== "string") {
|
|
10199
|
+
return null;
|
|
10200
|
+
}
|
|
10201
|
+
return {
|
|
10202
|
+
ownerId: parsed.ownerId,
|
|
10203
|
+
pid: parsed.pid,
|
|
10204
|
+
createdAt: parsed.createdAt,
|
|
10205
|
+
heartbeatAt: parsed.heartbeatAt
|
|
10206
|
+
};
|
|
10041
10207
|
} catch {
|
|
10042
10208
|
return null;
|
|
10043
10209
|
}
|
|
10044
10210
|
}
|
|
10045
|
-
function
|
|
10046
|
-
|
|
10047
|
-
|
|
10048
|
-
|
|
10049
|
-
|
|
10050
|
-
|
|
10051
|
-
|
|
10052
|
-
|
|
10053
|
-
|
|
10054
|
-
|
|
10211
|
+
async function writeStateLockMetadata(sessionId, metadata) {
|
|
10212
|
+
await writeJsonAtomic2(stateLockMetaPath(sessionId), metadata);
|
|
10213
|
+
}
|
|
10214
|
+
async function tryRemoveStaleStateLock(sessionId) {
|
|
10215
|
+
const lockPath = stateLockPath(sessionId);
|
|
10216
|
+
const metadata = await readStateLockMetadata(sessionId);
|
|
10217
|
+
const staleByHeartbeat = metadata && Date.now() - new Date(metadata.heartbeatAt).getTime() > STATE_LOCK_STALE_MS;
|
|
10218
|
+
if (staleByHeartbeat) {
|
|
10219
|
+
await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
|
|
10220
|
+
return true;
|
|
10221
|
+
}
|
|
10222
|
+
if (!metadata) {
|
|
10223
|
+
const lockStat = await import_promises19.default.stat(lockPath).catch(() => null);
|
|
10224
|
+
if (lockStat && Date.now() - lockStat.mtimeMs > STATE_LOCK_STALE_MS) {
|
|
10225
|
+
await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
|
|
10226
|
+
return true;
|
|
10055
10227
|
}
|
|
10056
|
-
|
|
10057
|
-
|
|
10058
|
-
|
|
10059
|
-
|
|
10060
|
-
|
|
10061
|
-
|
|
10062
|
-
|
|
10063
|
-
|
|
10064
|
-
|
|
10065
|
-
|
|
10228
|
+
}
|
|
10229
|
+
return false;
|
|
10230
|
+
}
|
|
10231
|
+
async function acquireStateLock(sessionId) {
|
|
10232
|
+
const lockPath = stateLockPath(sessionId);
|
|
10233
|
+
const deadline = Date.now() + STATE_LOCK_WAIT_MS;
|
|
10234
|
+
await import_promises19.default.mkdir(stateRoot(), { recursive: true });
|
|
10235
|
+
while (true) {
|
|
10236
|
+
try {
|
|
10237
|
+
await import_promises19.default.mkdir(lockPath);
|
|
10238
|
+
const ownerId = (0, import_node_crypto.randomUUID)();
|
|
10239
|
+
const createdAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10240
|
+
const metadata = {
|
|
10241
|
+
ownerId,
|
|
10242
|
+
pid: process.pid,
|
|
10243
|
+
createdAt,
|
|
10244
|
+
heartbeatAt: createdAt
|
|
10245
|
+
};
|
|
10246
|
+
await writeStateLockMetadata(sessionId, metadata);
|
|
10247
|
+
let released = false;
|
|
10248
|
+
const heartbeat = setInterval(() => {
|
|
10249
|
+
if (released) return;
|
|
10250
|
+
void writeStateLockMetadata(sessionId, {
|
|
10251
|
+
...metadata,
|
|
10252
|
+
heartbeatAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
10253
|
+
}).catch(() => void 0);
|
|
10254
|
+
}, STATE_LOCK_HEARTBEAT_MS);
|
|
10255
|
+
heartbeat.unref?.();
|
|
10256
|
+
return async () => {
|
|
10257
|
+
if (released) return;
|
|
10258
|
+
released = true;
|
|
10259
|
+
clearInterval(heartbeat);
|
|
10260
|
+
const currentMetadata = await readStateLockMetadata(sessionId);
|
|
10261
|
+
if (currentMetadata?.ownerId === ownerId) {
|
|
10262
|
+
await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
|
|
10263
|
+
}
|
|
10264
|
+
};
|
|
10265
|
+
} catch (error) {
|
|
10266
|
+
const code = error && typeof error === "object" && "code" in error ? error.code : null;
|
|
10267
|
+
if (code !== "EEXIST") {
|
|
10268
|
+
throw error;
|
|
10066
10269
|
}
|
|
10067
|
-
|
|
10068
|
-
|
|
10069
|
-
|
|
10070
|
-
|
|
10071
|
-
|
|
10072
|
-
|
|
10073
|
-
|
|
10074
|
-
const body = await readJsonSafe(res);
|
|
10075
|
-
const msg = (body && typeof body === "object" && body && "message" in body && typeof body.message === "string" ? body.message : null) ?? `Request failed (status ${res.status})`;
|
|
10076
|
-
throw new RemixError(msg, { exitCode: 1, hint: body ? JSON.stringify(body, null, 2) : null });
|
|
10270
|
+
if (await tryRemoveStaleStateLock(sessionId)) {
|
|
10271
|
+
continue;
|
|
10272
|
+
}
|
|
10273
|
+
if (Date.now() >= deadline) {
|
|
10274
|
+
throw new Error(`Timed out acquiring hook state lock for session ${sessionId}.`);
|
|
10275
|
+
}
|
|
10276
|
+
await sleep2(STATE_LOCK_POLL_MS);
|
|
10077
10277
|
}
|
|
10078
|
-
const json = await readJsonSafe(res);
|
|
10079
|
-
return json ?? null;
|
|
10080
10278
|
}
|
|
10081
|
-
|
|
10082
|
-
|
|
10083
|
-
|
|
10084
|
-
|
|
10085
|
-
|
|
10086
|
-
|
|
10279
|
+
}
|
|
10280
|
+
async function withStateLock(sessionId, fn) {
|
|
10281
|
+
const release = await acquireStateLock(sessionId);
|
|
10282
|
+
try {
|
|
10283
|
+
return await fn();
|
|
10284
|
+
} finally {
|
|
10285
|
+
await release();
|
|
10286
|
+
}
|
|
10287
|
+
}
|
|
10288
|
+
function normalizeIntent(value) {
|
|
10289
|
+
return value === "memory_first" || value === "collab_state" || value === "git_facts" ? value : "neutral";
|
|
10290
|
+
}
|
|
10291
|
+
function normalizeString(value) {
|
|
10292
|
+
return typeof value === "string" && value.trim() ? value.trim() : null;
|
|
10293
|
+
}
|
|
10294
|
+
function normalizeStringArray(value) {
|
|
10295
|
+
if (!Array.isArray(value)) return [];
|
|
10296
|
+
return Array.from(
|
|
10297
|
+
new Set(
|
|
10298
|
+
value.filter((entry) => typeof entry === "string" && entry.trim().length > 0).map((entry) => entry.trim())
|
|
10299
|
+
)
|
|
10300
|
+
);
|
|
10301
|
+
}
|
|
10302
|
+
function normalizeManualRecordingScope(value) {
|
|
10303
|
+
if (value === "full_turn") {
|
|
10304
|
+
return "full_turn";
|
|
10305
|
+
}
|
|
10306
|
+
return null;
|
|
10307
|
+
}
|
|
10308
|
+
function normalizeTouchedRepo(value, repoRoot) {
|
|
10309
|
+
if (!value || typeof value !== "object") return null;
|
|
10310
|
+
const parsed = value;
|
|
10311
|
+
const normalizedRepoRoot = normalizeString(parsed.repoRoot) ?? repoRoot.trim();
|
|
10312
|
+
if (!normalizedRepoRoot) return null;
|
|
10313
|
+
return {
|
|
10314
|
+
repoRoot: normalizedRepoRoot,
|
|
10315
|
+
projectId: normalizeString(parsed.projectId),
|
|
10316
|
+
currentAppId: normalizeString(parsed.currentAppId),
|
|
10317
|
+
upstreamAppId: normalizeString(parsed.upstreamAppId),
|
|
10318
|
+
firstTouchedAt: normalizeString(parsed.firstTouchedAt) ?? (/* @__PURE__ */ new Date()).toISOString(),
|
|
10319
|
+
lastTouchedAt: normalizeString(parsed.lastTouchedAt) ?? (/* @__PURE__ */ new Date()).toISOString(),
|
|
10320
|
+
lastObservedWriteAt: normalizeString(parsed.lastObservedWriteAt),
|
|
10321
|
+
touchedBy: normalizeStringArray(parsed.touchedBy),
|
|
10322
|
+
hasObservedWrite: Boolean(parsed.hasObservedWrite),
|
|
10323
|
+
manuallyRecorded: Boolean(parsed.manuallyRecorded),
|
|
10324
|
+
manuallyRecordedAt: normalizeString(parsed.manuallyRecordedAt),
|
|
10325
|
+
manuallyRecordedByTool: normalizeString(parsed.manuallyRecordedByTool),
|
|
10326
|
+
manualRecordingScope: normalizeManualRecordingScope(parsed.manualRecordingScope),
|
|
10327
|
+
manualRemoteChangeRecordedAt: normalizeString(parsed.manualRemoteChangeRecordedAt),
|
|
10328
|
+
stopAttempted: Boolean(parsed.stopAttempted),
|
|
10329
|
+
stopRecorded: Boolean(parsed.stopRecorded),
|
|
10330
|
+
stopRecordedAt: normalizeString(parsed.stopRecordedAt),
|
|
10331
|
+
stopRecordedMode: parsed.stopRecordedMode === "changed_turn" || parsed.stopRecordedMode === "no_diff_turn" ? parsed.stopRecordedMode : null,
|
|
10332
|
+
recordingFailureMessage: normalizeString(parsed.recordingFailureMessage),
|
|
10333
|
+
recordingFailureHint: normalizeString(parsed.recordingFailureHint),
|
|
10334
|
+
recordingFailedAt: normalizeString(parsed.recordingFailedAt)
|
|
10335
|
+
};
|
|
10336
|
+
}
|
|
10337
|
+
function normalizeTouchedRepos(value) {
|
|
10338
|
+
if (!value || typeof value !== "object") return {};
|
|
10339
|
+
const entries = Object.entries(value).map(([repoRoot, repo]) => normalizeTouchedRepo(repo, repoRoot)).filter((repo) => repo !== null).sort((a2, b) => a2.repoRoot.localeCompare(b.repoRoot));
|
|
10340
|
+
return Object.fromEntries(entries.map((repo) => [repo.repoRoot, repo]));
|
|
10341
|
+
}
|
|
10342
|
+
function createTouchedRepo(params) {
|
|
10343
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
10344
|
+
const touchedBy = params.touchedBy?.trim() ? [params.touchedBy.trim()] : [];
|
|
10345
|
+
return {
|
|
10346
|
+
repoRoot: params.repoRoot,
|
|
10347
|
+
projectId: normalizeString(params.projectId),
|
|
10348
|
+
currentAppId: normalizeString(params.currentAppId),
|
|
10349
|
+
upstreamAppId: normalizeString(params.upstreamAppId),
|
|
10350
|
+
firstTouchedAt: now,
|
|
10351
|
+
lastTouchedAt: now,
|
|
10352
|
+
lastObservedWriteAt: params.hasObservedWrite ? now : null,
|
|
10353
|
+
touchedBy,
|
|
10354
|
+
hasObservedWrite: Boolean(params.hasObservedWrite),
|
|
10355
|
+
manuallyRecorded: false,
|
|
10356
|
+
manuallyRecordedAt: null,
|
|
10357
|
+
manuallyRecordedByTool: null,
|
|
10358
|
+
manualRecordingScope: null,
|
|
10359
|
+
manualRemoteChangeRecordedAt: null,
|
|
10360
|
+
stopAttempted: false,
|
|
10361
|
+
stopRecorded: false,
|
|
10362
|
+
stopRecordedAt: null,
|
|
10363
|
+
stopRecordedMode: null,
|
|
10364
|
+
recordingFailureMessage: null,
|
|
10365
|
+
recordingFailureHint: null,
|
|
10366
|
+
recordingFailedAt: null
|
|
10367
|
+
};
|
|
10368
|
+
}
|
|
10369
|
+
async function updatePendingTurnState(sessionId, updater) {
|
|
10370
|
+
return withStateLock(sessionId, async () => {
|
|
10371
|
+
const existing = await loadPendingTurnState(sessionId);
|
|
10372
|
+
if (!existing) return null;
|
|
10373
|
+
const result = updater(existing);
|
|
10374
|
+
if (result === false) return existing;
|
|
10375
|
+
await savePendingTurnState(existing);
|
|
10376
|
+
return existing;
|
|
10377
|
+
});
|
|
10378
|
+
}
|
|
10379
|
+
async function loadPendingTurnState(sessionId) {
|
|
10380
|
+
const raw = await import_promises19.default.readFile(statePath(sessionId), "utf8").catch(() => null);
|
|
10381
|
+
if (!raw) return null;
|
|
10382
|
+
try {
|
|
10383
|
+
const parsed = JSON.parse(raw);
|
|
10384
|
+
if (!parsed || typeof parsed !== "object") return null;
|
|
10385
|
+
if (typeof parsed.sessionId !== "string" || typeof parsed.turnId !== "string" || typeof parsed.prompt !== "string") {
|
|
10386
|
+
return null;
|
|
10087
10387
|
}
|
|
10088
|
-
|
|
10089
|
-
|
|
10090
|
-
|
|
10091
|
-
|
|
10092
|
-
|
|
10093
|
-
|
|
10094
|
-
|
|
10095
|
-
|
|
10096
|
-
|
|
10097
|
-
|
|
10388
|
+
return {
|
|
10389
|
+
sessionId: parsed.sessionId,
|
|
10390
|
+
turnId: parsed.turnId,
|
|
10391
|
+
prompt: parsed.prompt,
|
|
10392
|
+
initialCwd: normalizeString(parsed.initialCwd),
|
|
10393
|
+
intent: normalizeIntent(parsed.intent),
|
|
10394
|
+
submittedAt: typeof parsed.submittedAt === "string" ? parsed.submittedAt : (/* @__PURE__ */ new Date()).toISOString(),
|
|
10395
|
+
consultedMemory: Boolean(parsed.consultedMemory),
|
|
10396
|
+
touchedRepos: normalizeTouchedRepos(parsed.touchedRepos),
|
|
10397
|
+
turnFailureMessage: normalizeString(parsed.turnFailureMessage),
|
|
10398
|
+
turnFailureHint: normalizeString(parsed.turnFailureHint),
|
|
10399
|
+
turnFailedAt: normalizeString(parsed.turnFailedAt)
|
|
10400
|
+
};
|
|
10401
|
+
} catch {
|
|
10402
|
+
return null;
|
|
10403
|
+
}
|
|
10404
|
+
}
|
|
10405
|
+
async function savePendingTurnState(state) {
|
|
10406
|
+
await writeJsonAtomic2(statePath(state.sessionId), state);
|
|
10407
|
+
}
|
|
10408
|
+
async function upsertTouchedRepo(sessionId, params) {
|
|
10409
|
+
const normalizedRepoRoot = params.repoRoot.trim();
|
|
10410
|
+
if (!normalizedRepoRoot) return null;
|
|
10411
|
+
const state = await updatePendingTurnState(sessionId, (existing) => {
|
|
10412
|
+
const current = existing.touchedRepos[normalizedRepoRoot] ?? createTouchedRepo({
|
|
10413
|
+
repoRoot: normalizedRepoRoot,
|
|
10414
|
+
projectId: params.projectId,
|
|
10415
|
+
currentAppId: params.currentAppId,
|
|
10416
|
+
upstreamAppId: params.upstreamAppId,
|
|
10417
|
+
touchedBy: params.touchedBy,
|
|
10418
|
+
hasObservedWrite: params.hasObservedWrite
|
|
10098
10419
|
});
|
|
10099
|
-
|
|
10100
|
-
|
|
10101
|
-
|
|
10102
|
-
|
|
10420
|
+
current.projectId = normalizeString(params.projectId) ?? current.projectId;
|
|
10421
|
+
current.currentAppId = normalizeString(params.currentAppId) ?? current.currentAppId;
|
|
10422
|
+
current.upstreamAppId = normalizeString(params.upstreamAppId) ?? current.upstreamAppId;
|
|
10423
|
+
current.lastTouchedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10424
|
+
if (params.touchedBy?.trim() && !current.touchedBy.includes(params.touchedBy.trim())) {
|
|
10425
|
+
current.touchedBy = [...current.touchedBy, params.touchedBy.trim()].sort((a2, b) => a2.localeCompare(b));
|
|
10103
10426
|
}
|
|
10104
|
-
if (
|
|
10105
|
-
|
|
10106
|
-
|
|
10107
|
-
throw new RemixError(msg, { exitCode: 1, hint: body ? JSON.stringify(body, null, 2) : null });
|
|
10427
|
+
if (params.hasObservedWrite) {
|
|
10428
|
+
current.hasObservedWrite = true;
|
|
10429
|
+
current.lastObservedWriteAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10108
10430
|
}
|
|
10109
|
-
|
|
10110
|
-
|
|
10111
|
-
|
|
10112
|
-
|
|
10113
|
-
|
|
10114
|
-
|
|
10115
|
-
|
|
10431
|
+
existing.touchedRepos[normalizedRepoRoot] = current;
|
|
10432
|
+
});
|
|
10433
|
+
return state?.touchedRepos[normalizedRepoRoot] ?? null;
|
|
10434
|
+
}
|
|
10435
|
+
async function markTouchedRepoStopAttempted(sessionId, repoRoot) {
|
|
10436
|
+
await updatePendingTurnState(sessionId, (existing) => {
|
|
10437
|
+
const current = existing.touchedRepos[repoRoot];
|
|
10438
|
+
if (!current) return false;
|
|
10439
|
+
current.stopAttempted = true;
|
|
10440
|
+
current.lastTouchedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10441
|
+
});
|
|
10442
|
+
}
|
|
10443
|
+
async function markTouchedRepoStopRecorded(sessionId, repoRoot, params) {
|
|
10444
|
+
await updatePendingTurnState(sessionId, (existing) => {
|
|
10445
|
+
const current = existing.touchedRepos[repoRoot];
|
|
10446
|
+
if (!current) return false;
|
|
10447
|
+
current.stopAttempted = true;
|
|
10448
|
+
current.stopRecorded = true;
|
|
10449
|
+
current.stopRecordedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10450
|
+
current.stopRecordedMode = params.mode;
|
|
10451
|
+
current.recordingFailureMessage = null;
|
|
10452
|
+
current.recordingFailureHint = null;
|
|
10453
|
+
current.recordingFailedAt = null;
|
|
10454
|
+
current.lastTouchedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10455
|
+
});
|
|
10456
|
+
}
|
|
10457
|
+
async function markTouchedRepoRecordingFailure(sessionId, repoRoot, params) {
|
|
10458
|
+
await updatePendingTurnState(sessionId, (existing) => {
|
|
10459
|
+
const current = existing.touchedRepos[repoRoot];
|
|
10460
|
+
if (!current) return false;
|
|
10461
|
+
current.stopAttempted = true;
|
|
10462
|
+
current.recordingFailureMessage = params.message.trim();
|
|
10463
|
+
current.recordingFailureHint = params.hint?.trim() || null;
|
|
10464
|
+
current.recordingFailedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10465
|
+
current.lastTouchedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10466
|
+
});
|
|
10467
|
+
}
|
|
10468
|
+
function lastFinalizedPath(sessionId) {
|
|
10469
|
+
return import_node_path7.default.join(stateRoot(), `${sessionId}.last-finalized.json`);
|
|
10470
|
+
}
|
|
10471
|
+
async function markLastFinalizedTurn(sessionId, turnId, prompt) {
|
|
10472
|
+
const record = {
|
|
10473
|
+
sessionId,
|
|
10474
|
+
turnId,
|
|
10475
|
+
prompt,
|
|
10476
|
+
finalizedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
10477
|
+
};
|
|
10478
|
+
await writeJsonAtomic2(lastFinalizedPath(sessionId), record);
|
|
10479
|
+
}
|
|
10480
|
+
async function loadLastFinalizedTurn(sessionId) {
|
|
10481
|
+
const raw = await import_promises19.default.readFile(lastFinalizedPath(sessionId), "utf8").catch(() => null);
|
|
10482
|
+
if (!raw) return null;
|
|
10483
|
+
try {
|
|
10484
|
+
const parsed = JSON.parse(raw);
|
|
10485
|
+
if (typeof parsed.sessionId === "string" && typeof parsed.turnId === "string" && typeof parsed.prompt === "string" && typeof parsed.finalizedAt === "string") {
|
|
10486
|
+
return {
|
|
10487
|
+
sessionId: parsed.sessionId,
|
|
10488
|
+
turnId: parsed.turnId,
|
|
10489
|
+
prompt: parsed.prompt,
|
|
10490
|
+
finalizedAt: parsed.finalizedAt
|
|
10491
|
+
};
|
|
10492
|
+
}
|
|
10493
|
+
return null;
|
|
10494
|
+
} catch {
|
|
10495
|
+
return null;
|
|
10116
10496
|
}
|
|
10117
|
-
|
|
10118
|
-
|
|
10119
|
-
|
|
10120
|
-
|
|
10121
|
-
|
|
10122
|
-
|
|
10123
|
-
|
|
10124
|
-
|
|
10125
|
-
|
|
10126
|
-
|
|
10127
|
-
|
|
10128
|
-
|
|
10129
|
-
|
|
10130
|
-
|
|
10131
|
-
|
|
10132
|
-
|
|
10133
|
-
|
|
10134
|
-
|
|
10135
|
-
|
|
10136
|
-
|
|
10137
|
-
|
|
10138
|
-
|
|
10139
|
-
|
|
10140
|
-
|
|
10141
|
-
|
|
10142
|
-
|
|
10143
|
-
|
|
10144
|
-
|
|
10145
|
-
|
|
10146
|
-
|
|
10147
|
-
|
|
10148
|
-
|
|
10149
|
-
|
|
10150
|
-
|
|
10151
|
-
|
|
10152
|
-
|
|
10153
|
-
|
|
10154
|
-
|
|
10155
|
-
|
|
10156
|
-
|
|
10157
|
-
|
|
10158
|
-
|
|
10159
|
-
|
|
10160
|
-
|
|
10161
|
-
|
|
10162
|
-
|
|
10163
|
-
|
|
10164
|
-
|
|
10165
|
-
|
|
10166
|
-
|
|
10167
|
-
|
|
10168
|
-
|
|
10169
|
-
|
|
10170
|
-
|
|
10171
|
-
|
|
10172
|
-
|
|
10173
|
-
|
|
10174
|
-
|
|
10175
|
-
|
|
10176
|
-
|
|
10177
|
-
|
|
10178
|
-
|
|
10179
|
-
|
|
10180
|
-
|
|
10181
|
-
|
|
10182
|
-
|
|
10183
|
-
|
|
10184
|
-
|
|
10185
|
-
|
|
10186
|
-
|
|
10187
|
-
|
|
10188
|
-
|
|
10189
|
-
|
|
10190
|
-
|
|
10191
|
-
|
|
10192
|
-
|
|
10193
|
-
|
|
10194
|
-
|
|
10195
|
-
|
|
10196
|
-
|
|
10197
|
-
|
|
10198
|
-
|
|
10199
|
-
|
|
10200
|
-
|
|
10201
|
-
|
|
10202
|
-
|
|
10203
|
-
|
|
10204
|
-
|
|
10205
|
-
|
|
10206
|
-
|
|
10207
|
-
|
|
10208
|
-
|
|
10497
|
+
}
|
|
10498
|
+
async function markPendingTurnFailure(sessionId, params) {
|
|
10499
|
+
await updatePendingTurnState(sessionId, (existing) => {
|
|
10500
|
+
existing.turnFailureMessage = params.message.trim();
|
|
10501
|
+
existing.turnFailureHint = params.hint?.trim() || null;
|
|
10502
|
+
existing.turnFailedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
10503
|
+
});
|
|
10504
|
+
}
|
|
10505
|
+
async function listTouchedRepos(sessionId) {
|
|
10506
|
+
const existing = await loadPendingTurnState(sessionId);
|
|
10507
|
+
if (!existing) return [];
|
|
10508
|
+
return Object.values(existing.touchedRepos).sort((a2, b) => a2.repoRoot.localeCompare(b.repoRoot));
|
|
10509
|
+
}
|
|
10510
|
+
async function clearPendingTurnState(sessionId) {
|
|
10511
|
+
await withStateLock(sessionId, async () => {
|
|
10512
|
+
await import_promises19.default.rm(statePath(sessionId), { force: true }).catch(() => void 0);
|
|
10513
|
+
});
|
|
10514
|
+
}
|
|
10515
|
+
|
|
10516
|
+
// package.json
|
|
10517
|
+
var package_default = {
|
|
10518
|
+
name: "@remixhq/claude-plugin",
|
|
10519
|
+
version: "0.1.23",
|
|
10520
|
+
description: "Claude Code plugin for Remix collaboration workflows",
|
|
10521
|
+
homepage: "https://github.com/RemixDotOne/remix-claude-plugin",
|
|
10522
|
+
license: "MIT",
|
|
10523
|
+
repository: {
|
|
10524
|
+
type: "git",
|
|
10525
|
+
url: "https://github.com/RemixDotOne/remix-claude-plugin.git"
|
|
10526
|
+
},
|
|
10527
|
+
type: "module",
|
|
10528
|
+
engines: {
|
|
10529
|
+
node: ">=20"
|
|
10530
|
+
},
|
|
10531
|
+
publishConfig: {
|
|
10532
|
+
access: "public"
|
|
10533
|
+
},
|
|
10534
|
+
files: [
|
|
10535
|
+
"dist",
|
|
10536
|
+
".claude-plugin/plugin.json",
|
|
10537
|
+
".mcp.json",
|
|
10538
|
+
"skills",
|
|
10539
|
+
"hooks",
|
|
10540
|
+
"agents"
|
|
10541
|
+
],
|
|
10542
|
+
exports: {
|
|
10543
|
+
".": {
|
|
10544
|
+
types: "./dist/index.d.ts",
|
|
10545
|
+
import: "./dist/index.js"
|
|
10546
|
+
}
|
|
10547
|
+
},
|
|
10548
|
+
scripts: {
|
|
10549
|
+
build: "tsup",
|
|
10550
|
+
postbuild: `node -e "const fs=require('node:fs'); for (const p of ['dist/mcp-server.cjs','dist/hook-pre-git.cjs','dist/hook-user-prompt.cjs','dist/hook-post-collab.cjs','dist/hook-stop-collab.cjs']) fs.chmodSync(p, 0o755);"`,
|
|
10551
|
+
dev: "tsx src/mcp-server.ts",
|
|
10552
|
+
typecheck: "tsc -p tsconfig.json --noEmit",
|
|
10553
|
+
test: "node --import tsx --test 'src/**/*.test.ts'",
|
|
10554
|
+
prepack: "npm run build"
|
|
10555
|
+
},
|
|
10556
|
+
dependencies: {
|
|
10557
|
+
"@remixhq/core": "^0.1.18",
|
|
10558
|
+
"@remixhq/mcp": "^0.1.18"
|
|
10559
|
+
},
|
|
10560
|
+
devDependencies: {
|
|
10561
|
+
"@types/node": "^25.4.0",
|
|
10562
|
+
tsup: "^8.5.1",
|
|
10563
|
+
tsx: "^4.21.0",
|
|
10564
|
+
typescript: "^5.9.3"
|
|
10565
|
+
}
|
|
10566
|
+
};
|
|
10567
|
+
|
|
10568
|
+
// src/metadata.ts
|
|
10569
|
+
var pluginMetadata = {
|
|
10570
|
+
name: package_default.name,
|
|
10571
|
+
version: package_default.version,
|
|
10572
|
+
description: package_default.description,
|
|
10573
|
+
pluginId: "remix",
|
|
10574
|
+
agentName: "remix-collab"
|
|
10575
|
+
};
|
|
10576
|
+
|
|
10577
|
+
// src/hook-diagnostics.ts
|
|
10578
|
+
var MAX_LOG_BYTES = 512 * 1024;
|
|
10579
|
+
function resolveClaudeRoot() {
|
|
10580
|
+
const configured = process.env.CLAUDE_CONFIG_DIR?.trim();
|
|
10581
|
+
return configured || import_node_path8.default.join(import_node_os5.default.homedir(), ".claude");
|
|
10582
|
+
}
|
|
10583
|
+
function resolvePluginDataDirName() {
|
|
10584
|
+
return `${pluginMetadata.pluginId}-${pluginMetadata.pluginId}`;
|
|
10585
|
+
}
|
|
10586
|
+
function getHookDiagnosticsDirPath() {
|
|
10587
|
+
const configured = process.env.REMIX_CLAUDE_PLUGIN_HOOK_DIAGNOSTICS_DIR?.trim();
|
|
10588
|
+
return configured || import_node_path8.default.join(resolveClaudeRoot(), "plugins", "data", resolvePluginDataDirName());
|
|
10589
|
+
}
|
|
10590
|
+
function getHookDiagnosticsLogPath() {
|
|
10591
|
+
return import_node_path8.default.join(getHookDiagnosticsDirPath(), "hooks.ndjson");
|
|
10592
|
+
}
|
|
10593
|
+
function toFieldValue(value) {
|
|
10594
|
+
if (value === null) return null;
|
|
10595
|
+
if (typeof value === "string") return value;
|
|
10596
|
+
if (typeof value === "number" && Number.isFinite(value)) return value;
|
|
10597
|
+
if (typeof value === "boolean") return value;
|
|
10598
|
+
return void 0;
|
|
10599
|
+
}
|
|
10600
|
+
function normalizeFields(fields) {
|
|
10601
|
+
if (!fields) return {};
|
|
10602
|
+
const normalizedEntries = Object.entries(fields).map(([key, value]) => {
|
|
10603
|
+
const normalized = toFieldValue(value);
|
|
10604
|
+
return normalized === void 0 ? null : [key, normalized];
|
|
10605
|
+
}).filter((entry) => entry !== null);
|
|
10606
|
+
return Object.fromEntries(normalizedEntries);
|
|
10607
|
+
}
|
|
10608
|
+
async function rotateLogIfNeeded(logPath) {
|
|
10609
|
+
const stat = await import_promises20.default.stat(logPath).catch(() => null);
|
|
10610
|
+
if (!stat || stat.size < MAX_LOG_BYTES) {
|
|
10611
|
+
return;
|
|
10612
|
+
}
|
|
10613
|
+
const rotatedPath = `${logPath}.1`;
|
|
10614
|
+
await import_promises20.default.rm(rotatedPath, { force: true }).catch(() => void 0);
|
|
10615
|
+
await import_promises20.default.rename(logPath, rotatedPath).catch(() => void 0);
|
|
10616
|
+
}
|
|
10617
|
+
function summarizeText(value) {
|
|
10618
|
+
if (typeof value !== "string" || !value.trim()) {
|
|
10619
|
+
return {
|
|
10620
|
+
present: false,
|
|
10621
|
+
length: 0,
|
|
10622
|
+
sha256Prefix: null
|
|
10623
|
+
};
|
|
10624
|
+
}
|
|
10625
|
+
const trimmed = value.trim();
|
|
10626
|
+
return {
|
|
10627
|
+
present: true,
|
|
10628
|
+
length: trimmed.length,
|
|
10629
|
+
sha256Prefix: (0, import_node_crypto2.createHash)("sha256").update(trimmed).digest("hex").slice(0, 12)
|
|
10630
|
+
};
|
|
10631
|
+
}
|
|
10632
|
+
async function appendHookDiagnosticsEvent(params) {
|
|
10633
|
+
try {
|
|
10634
|
+
const logPath = getHookDiagnosticsLogPath();
|
|
10635
|
+
await import_promises20.default.mkdir(import_node_path8.default.dirname(logPath), { recursive: true });
|
|
10636
|
+
await rotateLogIfNeeded(logPath);
|
|
10637
|
+
const event = {
|
|
10638
|
+
ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
10639
|
+
hook: params.hook,
|
|
10640
|
+
pluginVersion: pluginMetadata.version,
|
|
10641
|
+
pid: process.pid,
|
|
10642
|
+
sessionId: params.sessionId?.trim() || null,
|
|
10643
|
+
turnId: params.turnId?.trim() || null,
|
|
10644
|
+
stage: params.stage.trim(),
|
|
10645
|
+
result: params.result,
|
|
10646
|
+
reason: params.reason?.trim() || null,
|
|
10647
|
+
toolName: params.toolName?.trim() || null,
|
|
10648
|
+
repoRoot: params.repoRoot?.trim() || null,
|
|
10649
|
+
message: params.message?.trim() || null,
|
|
10650
|
+
fields: normalizeFields(params.fields)
|
|
10651
|
+
};
|
|
10652
|
+
await import_promises20.default.appendFile(logPath, `${JSON.stringify(event)}
|
|
10653
|
+
`, "utf8");
|
|
10654
|
+
} catch {
|
|
10655
|
+
}
|
|
10656
|
+
}
|
|
10657
|
+
|
|
10658
|
+
// src/auto-fix-dispatcher.ts
|
|
10659
|
+
var AUTO_FIX_COMMAND = {
|
|
10660
|
+
// Already auto-spawned by hook-user-prompt's branch-init path, but we
|
|
10661
|
+
// include it here too so a finalize-time failure (e.g. binding got
|
|
10662
|
+
// deleted between init and the next finalize) also self-heals.
|
|
10663
|
+
branch_binding_missing: ["collab", "init"],
|
|
10664
|
+
// Local revision baseline is missing. Init seeds the branch/lane baseline
|
|
10665
|
+
// without requiring the user to know about the recording internals.
|
|
10666
|
+
baseline_missing: ["collab", "init"],
|
|
10667
|
+
// Server moved ahead. `collab sync` is fast-forward-safe by default;
|
|
10668
|
+
// it refuses non-FF on its own, so we don't need to gate here.
|
|
10669
|
+
pull_required: ["collab", "sync"]
|
|
10670
|
+
};
|
|
10671
|
+
function isAutoFixableFinalizeFailureCode(code) {
|
|
10672
|
+
return Boolean(code && AUTO_FIX_COMMAND[code]);
|
|
10673
|
+
}
|
|
10674
|
+
var RECOMMENDED_USER_COMMAND = {
|
|
10675
|
+
not_bound: "remix collab init",
|
|
10676
|
+
branch_binding_missing: "remix collab init",
|
|
10677
|
+
family_ambiguous: "remix collab status",
|
|
10678
|
+
metadata_conflict: "remix collab status",
|
|
10679
|
+
branch_mismatch: "remix collab status",
|
|
10680
|
+
missing_head: "remix collab status",
|
|
10681
|
+
remote_error: "remix collab status",
|
|
10682
|
+
pull_required: "remix collab sync",
|
|
10683
|
+
baseline_missing: "remix collab init"
|
|
10684
|
+
};
|
|
10685
|
+
var SPAWN_LOCK_REL = (cmdSlug) => import_node_path9.default.join(".remix", `.${cmdSlug}-spawning`);
|
|
10686
|
+
var SPAWN_LOG_REL = (cmdSlug) => import_node_path9.default.join(".remix", `${cmdSlug}.log`);
|
|
10687
|
+
var SPAWN_THROTTLE_MS = 5 * 60 * 1e3;
|
|
10688
|
+
function commandSlug(args) {
|
|
10689
|
+
return args.join("-").replace(/[^a-zA-Z0-9_-]/g, "_");
|
|
10690
|
+
}
|
|
10691
|
+
function spawnFixDetached(repoRoot, args) {
|
|
10692
|
+
const slug = commandSlug(args);
|
|
10693
|
+
const command = `remix ${args.join(" ")}`;
|
|
10694
|
+
const remixDir = import_node_path9.default.join(repoRoot, ".remix");
|
|
10695
|
+
const lockPath = import_node_path9.default.join(repoRoot, SPAWN_LOCK_REL(slug));
|
|
10696
|
+
const logPath = import_node_path9.default.join(repoRoot, SPAWN_LOG_REL(slug));
|
|
10697
|
+
try {
|
|
10698
|
+
if ((0, import_node_fs6.existsSync)(lockPath)) {
|
|
10699
|
+
const ageMs = Date.now() - (0, import_node_fs6.statSync)(lockPath).mtimeMs;
|
|
10700
|
+
if (ageMs < SPAWN_THROTTLE_MS) {
|
|
10701
|
+
return { kind: "spawn_throttled", command, reason: "spawn_lock_held" };
|
|
10702
|
+
}
|
|
10703
|
+
}
|
|
10704
|
+
} catch {
|
|
10705
|
+
}
|
|
10706
|
+
try {
|
|
10707
|
+
(0, import_node_fs6.mkdirSync)(remixDir, { recursive: true });
|
|
10708
|
+
} catch {
|
|
10709
|
+
}
|
|
10710
|
+
let out;
|
|
10711
|
+
let err;
|
|
10712
|
+
try {
|
|
10713
|
+
out = (0, import_node_fs6.openSync)(logPath, "a");
|
|
10714
|
+
err = (0, import_node_fs6.openSync)(logPath, "a");
|
|
10715
|
+
} catch (logErr) {
|
|
10716
|
+
return {
|
|
10717
|
+
kind: "spawn_failed",
|
|
10718
|
+
command,
|
|
10719
|
+
reason: "log_open_failed",
|
|
10720
|
+
message: logErr instanceof Error ? logErr.message : String(logErr)
|
|
10721
|
+
};
|
|
10722
|
+
}
|
|
10723
|
+
try {
|
|
10724
|
+
const child = (0, import_node_child_process6.spawn)("remix", [...args], {
|
|
10725
|
+
cwd: repoRoot,
|
|
10726
|
+
detached: true,
|
|
10727
|
+
stdio: ["ignore", out, err],
|
|
10728
|
+
env: { ...process.env, REMIX_AUTO_FIX_SPAWN: "1" }
|
|
10729
|
+
});
|
|
10730
|
+
child.unref();
|
|
10731
|
+
try {
|
|
10732
|
+
(0, import_node_fs6.writeFileSync)(lockPath, String(child.pid ?? ""), "utf8");
|
|
10733
|
+
(0, import_node_fs6.utimesSync)(lockPath, /* @__PURE__ */ new Date(), /* @__PURE__ */ new Date());
|
|
10734
|
+
} catch {
|
|
10735
|
+
}
|
|
10736
|
+
return { kind: "spawned", command, pid: child.pid, logPath };
|
|
10737
|
+
} catch (spawnErr) {
|
|
10738
|
+
return {
|
|
10739
|
+
kind: "spawn_failed",
|
|
10740
|
+
command,
|
|
10741
|
+
reason: "spawn_failed",
|
|
10742
|
+
message: spawnErr instanceof Error ? spawnErr.message : String(spawnErr)
|
|
10743
|
+
};
|
|
10744
|
+
}
|
|
10745
|
+
}
|
|
10746
|
+
async function dispatchFinalizeFailure(input) {
|
|
10747
|
+
const recommendedCommand = input.preflightCode ? RECOMMENDED_USER_COMMAND[input.preflightCode] ?? null : null;
|
|
10748
|
+
const marker = buildFreshFailureMarker({
|
|
10749
|
+
repoRoot: input.repoRoot,
|
|
10750
|
+
preflightCode: input.preflightCode,
|
|
10751
|
+
message: input.message,
|
|
10752
|
+
hint: input.hint,
|
|
10753
|
+
recommendedCommand
|
|
10754
|
+
});
|
|
10755
|
+
let outcome;
|
|
10756
|
+
const autoFixArgs = input.preflightCode ? AUTO_FIX_COMMAND[input.preflightCode] : void 0;
|
|
10757
|
+
if (!autoFixArgs) {
|
|
10758
|
+
outcome = {
|
|
10759
|
+
kind: "warn_only",
|
|
10760
|
+
reason: input.preflightCode ? "no_auto_fix_for_code" : "unknown_code"
|
|
10761
|
+
};
|
|
10762
|
+
} else {
|
|
10763
|
+
outcome = spawnFixDetached(input.repoRoot, autoFixArgs);
|
|
10764
|
+
marker.autoFix = mergeOutcomeIntoMarker(marker.autoFix, outcome);
|
|
10765
|
+
}
|
|
10766
|
+
try {
|
|
10767
|
+
await writeFinalizeFailureMarker(marker);
|
|
10768
|
+
} catch (writeErr) {
|
|
10769
|
+
await appendHookDiagnosticsEvent({
|
|
10770
|
+
hook: input.hook,
|
|
10771
|
+
sessionId: input.sessionId,
|
|
10772
|
+
turnId: input.turnId ?? void 0,
|
|
10773
|
+
stage: "finalize_failure_marker_write_failed",
|
|
10774
|
+
result: "error",
|
|
10775
|
+
reason: "exception",
|
|
10776
|
+
repoRoot: input.repoRoot,
|
|
10777
|
+
message: writeErr instanceof Error ? writeErr.message : String(writeErr)
|
|
10778
|
+
});
|
|
10779
|
+
}
|
|
10780
|
+
await appendHookDiagnosticsEvent({
|
|
10781
|
+
hook: input.hook,
|
|
10782
|
+
sessionId: input.sessionId,
|
|
10783
|
+
turnId: input.turnId ?? void 0,
|
|
10784
|
+
stage: "auto_fix_dispatched",
|
|
10785
|
+
result: outcome.kind === "spawned" ? "success" : outcome.kind === "warn_only" ? "info" : "error",
|
|
10786
|
+
reason: outcome.kind,
|
|
10787
|
+
repoRoot: input.repoRoot,
|
|
10788
|
+
fields: {
|
|
10789
|
+
preflightCode: input.preflightCode,
|
|
10790
|
+
command: "command" in outcome ? outcome.command : null,
|
|
10791
|
+
pid: outcome.kind === "spawned" ? outcome.pid ?? null : null,
|
|
10792
|
+
logPath: outcome.kind === "spawned" ? outcome.logPath : null,
|
|
10793
|
+
recommendedCommand
|
|
10794
|
+
},
|
|
10795
|
+
message: outcome.kind === "spawn_failed" ? outcome.message : null
|
|
10796
|
+
});
|
|
10797
|
+
return outcome;
|
|
10798
|
+
}
|
|
10799
|
+
function mergeOutcomeIntoMarker(existing, outcome) {
|
|
10800
|
+
if (outcome.kind === "spawned") {
|
|
10801
|
+
return {
|
|
10802
|
+
status: "in_progress",
|
|
10803
|
+
command: outcome.command,
|
|
10804
|
+
pid: outcome.pid ?? null,
|
|
10805
|
+
logPath: outcome.logPath,
|
|
10806
|
+
attemptedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
10807
|
+
failureMessage: null
|
|
10808
|
+
};
|
|
10809
|
+
}
|
|
10810
|
+
if (outcome.kind === "spawn_throttled") {
|
|
10811
|
+
return {
|
|
10812
|
+
status: "in_progress",
|
|
10813
|
+
command: outcome.command,
|
|
10814
|
+
pid: existing.pid,
|
|
10815
|
+
logPath: existing.logPath,
|
|
10816
|
+
attemptedAt: existing.attemptedAt,
|
|
10817
|
+
failureMessage: null
|
|
10818
|
+
};
|
|
10819
|
+
}
|
|
10820
|
+
if (outcome.kind === "spawn_failed") {
|
|
10821
|
+
return {
|
|
10822
|
+
status: "spawn_failed",
|
|
10823
|
+
command: outcome.command,
|
|
10824
|
+
pid: null,
|
|
10825
|
+
logPath: null,
|
|
10826
|
+
attemptedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
10827
|
+
failureMessage: outcome.message
|
|
10828
|
+
};
|
|
10829
|
+
}
|
|
10830
|
+
return existing;
|
|
10831
|
+
}
|
|
10832
|
+
|
|
10833
|
+
// src/deferred-turn-queue.ts
|
|
10834
|
+
var import_promises21 = __toESM(require("fs/promises"), 1);
|
|
10835
|
+
var import_node_os6 = __toESM(require("os"), 1);
|
|
10836
|
+
var import_node_path10 = __toESM(require("path"), 1);
|
|
10837
|
+
var DEFERRED_TURN_SCHEMA_VERSION = 1;
|
|
10838
|
+
var DEFERRED_TURN_MAX_ATTEMPTS = 10;
|
|
10839
|
+
var DEFERRED_TURN_TTL_MS = 24 * 60 * 60 * 1e3;
|
|
10840
|
+
var DEFERRED_TURN_DIR = "deferred-turns";
|
|
10841
|
+
function stateRoot2() {
|
|
10842
|
+
const configured = process.env.REMIX_CLAUDE_PLUGIN_HOOK_STATE_ROOT?.trim();
|
|
10843
|
+
return configured || import_node_path10.default.join(import_node_os6.default.tmpdir(), "remix-claude-plugin-hooks");
|
|
10844
|
+
}
|
|
10845
|
+
function getDeferredTurnDirPath() {
|
|
10846
|
+
return import_node_path10.default.join(stateRoot2(), DEFERRED_TURN_DIR);
|
|
10847
|
+
}
|
|
10848
|
+
function deferredTurnFileName(sessionId, turnId) {
|
|
10849
|
+
const safe = (s) => s.replace(/[^A-Za-z0-9_-]/g, "_");
|
|
10850
|
+
return `${safe(sessionId)}-${safe(turnId)}.json`;
|
|
10851
|
+
}
|
|
10852
|
+
function getDeferredTurnFilePath(sessionId, turnId) {
|
|
10853
|
+
return import_node_path10.default.join(getDeferredTurnDirPath(), deferredTurnFileName(sessionId, turnId));
|
|
10854
|
+
}
|
|
10855
|
+
async function writeDeferredTurn(record) {
|
|
10856
|
+
if (record.schemaVersion !== DEFERRED_TURN_SCHEMA_VERSION) {
|
|
10857
|
+
throw new Error(`writeDeferredTurn: unsupported schemaVersion ${record.schemaVersion}`);
|
|
10858
|
+
}
|
|
10859
|
+
if (!record.prompt.trim() || !record.assistantResponse.trim()) {
|
|
10860
|
+
throw new Error("writeDeferredTurn: prompt and assistantResponse must be non-empty");
|
|
10861
|
+
}
|
|
10862
|
+
const dir = getDeferredTurnDirPath();
|
|
10863
|
+
await import_promises21.default.mkdir(dir, { recursive: true });
|
|
10864
|
+
const filePath = getDeferredTurnFilePath(record.sessionId, record.turnId);
|
|
10865
|
+
const tmpPath = `${filePath}.tmp-${process.pid}-${Date.now()}`;
|
|
10866
|
+
await import_promises21.default.writeFile(tmpPath, JSON.stringify(record), "utf8");
|
|
10867
|
+
await import_promises21.default.rename(tmpPath, filePath);
|
|
10868
|
+
return filePath;
|
|
10869
|
+
}
|
|
10870
|
+
async function readDeferredTurnFile(filePath) {
|
|
10871
|
+
const raw = await import_promises21.default.readFile(filePath, "utf8").catch(() => null);
|
|
10872
|
+
if (!raw) return null;
|
|
10873
|
+
let parsed;
|
|
10874
|
+
try {
|
|
10875
|
+
parsed = JSON.parse(raw);
|
|
10876
|
+
} catch {
|
|
10877
|
+
return null;
|
|
10878
|
+
}
|
|
10879
|
+
if (!parsed || typeof parsed !== "object") return null;
|
|
10880
|
+
const record = parsed;
|
|
10881
|
+
if (record.schemaVersion !== DEFERRED_TURN_SCHEMA_VERSION) return null;
|
|
10882
|
+
if (typeof record.sessionId !== "string" || typeof record.turnId !== "string" || typeof record.repoRoot !== "string" || typeof record.prompt !== "string" || typeof record.assistantResponse !== "string" || typeof record.submittedAt !== "string" || typeof record.deferredAt !== "string" || record.reason !== "current_branch_unbound" && record.reason !== "recovery_in_progress" && record.reason !== "transient_recording_failure") {
|
|
10883
|
+
return null;
|
|
10884
|
+
}
|
|
10885
|
+
return {
|
|
10886
|
+
schemaVersion: DEFERRED_TURN_SCHEMA_VERSION,
|
|
10887
|
+
sessionId: record.sessionId,
|
|
10888
|
+
turnId: record.turnId,
|
|
10889
|
+
repoRoot: record.repoRoot,
|
|
10890
|
+
prompt: record.prompt,
|
|
10891
|
+
assistantResponse: record.assistantResponse,
|
|
10892
|
+
submittedAt: record.submittedAt,
|
|
10893
|
+
deferredAt: record.deferredAt,
|
|
10894
|
+
reason: record.reason,
|
|
10895
|
+
branchAtDefer: typeof record.branchAtDefer === "string" || record.branchAtDefer === null ? record.branchAtDefer : null,
|
|
10896
|
+
// Additive fields: pre-appId-aware records on disk won't have these
|
|
10897
|
+
// keys at all. Coerce missing/invalid to `null` (drainer treats
|
|
10898
|
+
// null as "legacy, drain as today" — see drainer for the policy).
|
|
10899
|
+
appIdAtDefer: typeof record.appIdAtDefer === "string" ? record.appIdAtDefer : null,
|
|
10900
|
+
projectIdAtDefer: typeof record.projectIdAtDefer === "string" ? record.projectIdAtDefer : null,
|
|
10901
|
+
// Pre-attemptCount records coerce to 0 — they've never been
|
|
10902
|
+
// counted against the cap, so giving them the cap's full budget
|
|
10903
|
+
// is correct (we'd rather over-retry a legacy record than drop it
|
|
10904
|
+
// unexpectedly). Negative or non-finite values also coerce to 0.
|
|
10905
|
+
attemptCount: typeof record.attemptCount === "number" && Number.isFinite(record.attemptCount) && record.attemptCount >= 0 ? Math.floor(record.attemptCount) : 0
|
|
10906
|
+
};
|
|
10907
|
+
}
|
|
10908
|
+
async function listDeferredTurnsForRepo(repoRoot) {
|
|
10909
|
+
const dir = getDeferredTurnDirPath();
|
|
10910
|
+
const dirEntries = await import_promises21.default.readdir(dir, { withFileTypes: true }).catch(() => []);
|
|
10911
|
+
const entries = [];
|
|
10912
|
+
for (const entry of dirEntries) {
|
|
10913
|
+
if (!entry.isFile() || !entry.name.endsWith(".json")) continue;
|
|
10914
|
+
const filePath = import_node_path10.default.join(dir, entry.name);
|
|
10915
|
+
const record = await readDeferredTurnFile(filePath);
|
|
10916
|
+
if (!record) continue;
|
|
10917
|
+
if (record.repoRoot !== repoRoot) continue;
|
|
10918
|
+
entries.push({ filePath, record });
|
|
10919
|
+
}
|
|
10920
|
+
entries.sort((a2, b) => {
|
|
10921
|
+
const aMs = Date.parse(a2.record.submittedAt);
|
|
10922
|
+
const bMs = Date.parse(b.record.submittedAt);
|
|
10923
|
+
if (Number.isFinite(aMs) && Number.isFinite(bMs)) return aMs - bMs;
|
|
10924
|
+
return 0;
|
|
10925
|
+
});
|
|
10926
|
+
return entries;
|
|
10927
|
+
}
|
|
10928
|
+
async function deleteDeferredTurnFile(filePath) {
|
|
10929
|
+
await import_promises21.default.rm(filePath, { force: true }).catch(() => void 0);
|
|
10930
|
+
}
|
|
10931
|
+
async function pruneStaleDeferredTurns(maxAgeMs = DEFERRED_TURN_TTL_MS) {
|
|
10932
|
+
const dir = getDeferredTurnDirPath();
|
|
10933
|
+
const dirEntries = await import_promises21.default.readdir(dir, { withFileTypes: true }).catch(() => []);
|
|
10934
|
+
const pruned = [];
|
|
10935
|
+
const now = Date.now();
|
|
10936
|
+
for (const entry of dirEntries) {
|
|
10937
|
+
if (!entry.isFile() || !entry.name.endsWith(".json")) continue;
|
|
10938
|
+
const filePath = import_node_path10.default.join(dir, entry.name);
|
|
10939
|
+
const record = await readDeferredTurnFile(filePath);
|
|
10940
|
+
if (!record) {
|
|
10941
|
+
const stat = await import_promises21.default.stat(filePath).catch(() => null);
|
|
10942
|
+
if (stat && now - stat.mtimeMs > maxAgeMs) {
|
|
10943
|
+
await deleteDeferredTurnFile(filePath);
|
|
10944
|
+
pruned.push(filePath);
|
|
10945
|
+
}
|
|
10946
|
+
continue;
|
|
10947
|
+
}
|
|
10948
|
+
const deferredAtMs = Date.parse(record.deferredAt);
|
|
10949
|
+
if (!Number.isFinite(deferredAtMs)) continue;
|
|
10950
|
+
if (now - deferredAtMs > maxAgeMs) {
|
|
10951
|
+
await deleteDeferredTurnFile(filePath);
|
|
10952
|
+
pruned.push(filePath);
|
|
10953
|
+
}
|
|
10954
|
+
}
|
|
10955
|
+
return pruned;
|
|
10956
|
+
}
|
|
10957
|
+
function buildDeferredTurnRecord(params) {
|
|
10958
|
+
return {
|
|
10959
|
+
schemaVersion: DEFERRED_TURN_SCHEMA_VERSION,
|
|
10960
|
+
sessionId: params.sessionId,
|
|
10961
|
+
turnId: params.turnId,
|
|
10962
|
+
repoRoot: params.repoRoot,
|
|
10963
|
+
prompt: params.prompt,
|
|
10964
|
+
assistantResponse: params.assistantResponse,
|
|
10965
|
+
submittedAt: params.submittedAt,
|
|
10966
|
+
deferredAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
10967
|
+
reason: params.reason ?? "current_branch_unbound",
|
|
10968
|
+
branchAtDefer: params.branchAtDefer,
|
|
10969
|
+
appIdAtDefer: params.appIdAtDefer ?? null,
|
|
10970
|
+
projectIdAtDefer: params.projectIdAtDefer ?? null,
|
|
10971
|
+
// Fresh records start at zero attempts — the next drain pass will
|
|
10972
|
+
// be the first attempt and bump this to 1 if it fails.
|
|
10973
|
+
attemptCount: 0
|
|
10974
|
+
};
|
|
10975
|
+
}
|
|
10976
|
+
async function recordDeferredTurnFailedAttempt(filePath) {
|
|
10977
|
+
const current = await readDeferredTurnFile(filePath);
|
|
10978
|
+
if (!current) {
|
|
10979
|
+
return { promoted: true, finalAttemptCount: DEFERRED_TURN_MAX_ATTEMPTS };
|
|
10980
|
+
}
|
|
10981
|
+
const newAttemptCount = current.attemptCount + 1;
|
|
10982
|
+
if (newAttemptCount >= DEFERRED_TURN_MAX_ATTEMPTS) {
|
|
10983
|
+
await deleteDeferredTurnFile(filePath);
|
|
10984
|
+
return { promoted: true, finalAttemptCount: newAttemptCount };
|
|
10985
|
+
}
|
|
10986
|
+
const next = { ...current, attemptCount: newAttemptCount };
|
|
10987
|
+
await writeDeferredTurn(next);
|
|
10988
|
+
return { promoted: false, newAttemptCount };
|
|
10989
|
+
}
|
|
10990
|
+
|
|
10991
|
+
// src/deferred-turn-drainer.ts
|
|
10992
|
+
var import_promises23 = __toESM(require("fs/promises"), 1);
|
|
10993
|
+
var import_node_path11 = __toESM(require("path"), 1);
|
|
10994
|
+
var import_node_crypto3 = require("crypto");
|
|
10995
|
+
|
|
10996
|
+
// node_modules/@remixhq/core/dist/chunk-RCNOSZP6.js
|
|
10997
|
+
async function readJsonSafe(res) {
|
|
10998
|
+
const ct = res.headers.get("content-type") ?? "";
|
|
10999
|
+
if (!ct.toLowerCase().includes("application/json")) return null;
|
|
11000
|
+
try {
|
|
11001
|
+
return await res.json();
|
|
11002
|
+
} catch {
|
|
11003
|
+
return null;
|
|
11004
|
+
}
|
|
11005
|
+
}
|
|
11006
|
+
function createApiClient(config, opts) {
|
|
11007
|
+
const apiKey = (opts?.apiKey ?? "").trim();
|
|
11008
|
+
const tokenProvider = opts?.tokenProvider;
|
|
11009
|
+
const defaultTimeoutMs = typeof opts?.defaultRequestTimeoutMs === "number" && opts.defaultRequestTimeoutMs > 0 ? opts.defaultRequestTimeoutMs : null;
|
|
11010
|
+
const CLIENT_KEY_HEADER = "x-comerge-api-key";
|
|
11011
|
+
function makeTimeoutSignal(timeoutMs) {
|
|
11012
|
+
const ms = typeof timeoutMs === "number" && timeoutMs > 0 ? timeoutMs : defaultTimeoutMs;
|
|
11013
|
+
return ms != null ? AbortSignal.timeout(ms) : void 0;
|
|
11014
|
+
}
|
|
11015
|
+
async function request(path16, init, opts2) {
|
|
11016
|
+
if (!tokenProvider) {
|
|
11017
|
+
throw new RemixError("API client is missing a token provider.", {
|
|
11018
|
+
exitCode: 1,
|
|
11019
|
+
hint: "Configure auth before creating the Remix API client."
|
|
11020
|
+
});
|
|
11021
|
+
}
|
|
11022
|
+
const auth = await tokenProvider();
|
|
11023
|
+
const url = new URL(path16, config.apiUrl).toString();
|
|
11024
|
+
const doFetch = async (bearer) => fetch(url, {
|
|
11025
|
+
...init,
|
|
11026
|
+
signal: makeTimeoutSignal(opts2?.timeoutMs),
|
|
11027
|
+
headers: {
|
|
11028
|
+
Accept: "application/json",
|
|
11029
|
+
"Content-Type": "application/json",
|
|
11030
|
+
...init?.headers ?? {},
|
|
11031
|
+
Authorization: `Bearer ${bearer}`,
|
|
11032
|
+
...apiKey ? { [CLIENT_KEY_HEADER]: apiKey } : {}
|
|
11033
|
+
}
|
|
11034
|
+
});
|
|
11035
|
+
let res = await doFetch(auth.token);
|
|
11036
|
+
if (res.status === 401 && !auth.fromEnv && auth.session?.refresh_token) {
|
|
11037
|
+
const refreshed = await tokenProvider({ forceRefresh: true });
|
|
11038
|
+
res = await doFetch(refreshed.token);
|
|
11039
|
+
}
|
|
11040
|
+
if (!res.ok) {
|
|
11041
|
+
const body = await readJsonSafe(res);
|
|
11042
|
+
const msg = (body && typeof body === "object" && body && "message" in body && typeof body.message === "string" ? body.message : null) ?? `Request failed (status ${res.status})`;
|
|
11043
|
+
throw new RemixError(msg, {
|
|
11044
|
+
exitCode: 1,
|
|
11045
|
+
hint: body ? JSON.stringify(body, null, 2) : null,
|
|
11046
|
+
statusCode: res.status
|
|
11047
|
+
});
|
|
11048
|
+
}
|
|
11049
|
+
const json = await readJsonSafe(res);
|
|
11050
|
+
return json ?? null;
|
|
11051
|
+
}
|
|
11052
|
+
async function requestBinary(path16, init, opts2) {
|
|
11053
|
+
if (!tokenProvider) {
|
|
11054
|
+
throw new RemixError("API client is missing a token provider.", {
|
|
11055
|
+
exitCode: 1,
|
|
11056
|
+
hint: "Configure auth before creating the Remix API client."
|
|
11057
|
+
});
|
|
11058
|
+
}
|
|
11059
|
+
const auth = await tokenProvider();
|
|
11060
|
+
const url = new URL(path16, config.apiUrl).toString();
|
|
11061
|
+
const doFetch = async (bearer) => fetch(url, {
|
|
11062
|
+
...init,
|
|
11063
|
+
signal: makeTimeoutSignal(opts2?.timeoutMs),
|
|
11064
|
+
headers: {
|
|
11065
|
+
Accept: "*/*",
|
|
11066
|
+
...init?.headers ?? {},
|
|
11067
|
+
Authorization: `Bearer ${bearer}`,
|
|
11068
|
+
...apiKey ? { [CLIENT_KEY_HEADER]: apiKey } : {}
|
|
11069
|
+
}
|
|
11070
|
+
});
|
|
11071
|
+
let res = await doFetch(auth.token);
|
|
11072
|
+
if (res.status === 401 && !auth.fromEnv && auth.session?.refresh_token) {
|
|
11073
|
+
const refreshed = await tokenProvider({ forceRefresh: true });
|
|
11074
|
+
res = await doFetch(refreshed.token);
|
|
11075
|
+
}
|
|
11076
|
+
if (!res.ok) {
|
|
11077
|
+
const body = await readJsonSafe(res);
|
|
11078
|
+
const msg = (body && typeof body === "object" && body && "message" in body && typeof body.message === "string" ? body.message : null) ?? `Request failed (status ${res.status})`;
|
|
11079
|
+
throw new RemixError(msg, {
|
|
11080
|
+
exitCode: 1,
|
|
11081
|
+
hint: body ? JSON.stringify(body, null, 2) : null,
|
|
11082
|
+
statusCode: res.status
|
|
11083
|
+
});
|
|
11084
|
+
}
|
|
11085
|
+
const contentDisposition = res.headers.get("content-disposition") ?? "";
|
|
11086
|
+
const fileNameMatch = contentDisposition.match(/filename=\"([^\"]+)\"/i);
|
|
11087
|
+
return {
|
|
11088
|
+
data: Buffer.from(await res.arrayBuffer()),
|
|
11089
|
+
fileName: fileNameMatch?.[1] ?? null,
|
|
11090
|
+
contentType: res.headers.get("content-type")
|
|
11091
|
+
};
|
|
11092
|
+
}
|
|
11093
|
+
return {
|
|
11094
|
+
getMe: () => request("/v1/me", { method: "GET" }),
|
|
11095
|
+
listOrganizations: () => request("/v1/organizations", { method: "GET" }),
|
|
11096
|
+
getOrganization: (orgId) => request(`/v1/organizations/${encodeURIComponent(orgId)}`, { method: "GET" }),
|
|
11097
|
+
listProjects: (params) => {
|
|
11098
|
+
const qs = new URLSearchParams();
|
|
11099
|
+
if (params?.organizationId) qs.set("organizationId", params.organizationId);
|
|
11100
|
+
if (params?.clientAppId) qs.set("clientAppId", params.clientAppId);
|
|
11101
|
+
const suffix = qs.toString() ? `?${qs.toString()}` : "";
|
|
11102
|
+
return request(`/v1/projects${suffix}`, { method: "GET" });
|
|
11103
|
+
},
|
|
11104
|
+
getProject: (projectId) => request(`/v1/projects/${encodeURIComponent(projectId)}`, { method: "GET" }),
|
|
11105
|
+
resolveProjectBinding: (params) => {
|
|
11106
|
+
const qs = new URLSearchParams();
|
|
11107
|
+
if (params.repoFingerprint) qs.set("repoFingerprint", params.repoFingerprint);
|
|
11108
|
+
if (params.remoteUrl) qs.set("remoteUrl", params.remoteUrl);
|
|
11109
|
+
if (params.branchName) qs.set("branchName", params.branchName);
|
|
11110
|
+
return request(`/v1/projects/bindings/resolve?${qs.toString()}`, { method: "GET" });
|
|
11111
|
+
},
|
|
11112
|
+
resolveProjectLaneBinding: (params) => {
|
|
11113
|
+
const qs = new URLSearchParams();
|
|
11114
|
+
if (params.projectId) qs.set("projectId", params.projectId);
|
|
11115
|
+
if (params.repoFingerprint) qs.set("repoFingerprint", params.repoFingerprint);
|
|
11116
|
+
if (params.remoteUrl) qs.set("remoteUrl", params.remoteUrl);
|
|
11117
|
+
if (params.defaultBranch) qs.set("defaultBranch", params.defaultBranch);
|
|
11118
|
+
qs.set("branchName", params.branchName);
|
|
11119
|
+
return request(`/v1/projects/bindings/resolve-lane?${qs.toString()}`, { method: "GET" });
|
|
11120
|
+
},
|
|
11121
|
+
ensureProjectLaneBinding: (payload) => request("/v1/projects/bindings/ensure-lane", { method: "POST", body: JSON.stringify(payload) }),
|
|
11122
|
+
bootstrapFreshProjectLane: (payload) => request("/v1/projects/bindings/bootstrap-fresh-lane", { method: "POST", body: JSON.stringify(payload) }),
|
|
11123
|
+
autoEnableDeveloper: () => request("/v1/developer/auto-enable", { method: "POST" }),
|
|
11124
|
+
listClientApps: (params) => {
|
|
11125
|
+
const qs = params?.orgId ? `?orgId=${encodeURIComponent(params.orgId)}` : "";
|
|
11126
|
+
return request(`/v1/developer/client-apps${qs}`, { method: "GET" });
|
|
11127
|
+
},
|
|
11128
|
+
createClientApp: (payload) => request("/v1/developer/client-apps", { method: "POST", body: JSON.stringify(payload) }),
|
|
11129
|
+
createClientAppKey: (clientAppId, payload) => request(`/v1/developer/client-apps/${encodeURIComponent(clientAppId)}/keys`, {
|
|
11130
|
+
method: "POST",
|
|
11131
|
+
body: JSON.stringify(payload ?? {})
|
|
11132
|
+
}),
|
|
11133
|
+
listApps: (params) => {
|
|
11134
|
+
const qs = new URLSearchParams();
|
|
11135
|
+
if (params?.projectId) qs.set("projectId", params.projectId);
|
|
11136
|
+
if (params?.organizationId) qs.set("organizationId", params.organizationId);
|
|
11137
|
+
if (params?.ownership) qs.set("ownership", params.ownership);
|
|
11138
|
+
if (params?.accessScope) qs.set("accessScope", params.accessScope);
|
|
11139
|
+
if (params?.createdBy) qs.set("createdBy", params.createdBy);
|
|
11140
|
+
if (params?.forked) qs.set("forked", params.forked);
|
|
11141
|
+
if (typeof params?.limit === "number") qs.set("limit", String(params.limit));
|
|
11142
|
+
if (typeof params?.offset === "number") qs.set("offset", String(params.offset));
|
|
11143
|
+
const suffix = qs.toString() ? `?${qs.toString()}` : "";
|
|
11144
|
+
return request(`/v1/apps${suffix}`, { method: "GET" });
|
|
11145
|
+
},
|
|
11146
|
+
getApp: (appId) => request(`/v1/apps/${encodeURIComponent(appId)}`, { method: "GET" }),
|
|
11147
|
+
getAppContext: (appId) => request(`/v1/apps/${encodeURIComponent(appId)}/context`, { method: "GET" }),
|
|
11148
|
+
getAppOverview: (appId) => request(`/v1/apps/${encodeURIComponent(appId)}/overview`, { method: "GET" }),
|
|
11149
|
+
listAppTimeline: (appId, params) => {
|
|
11150
|
+
const qs = new URLSearchParams();
|
|
11151
|
+
if (typeof params?.limit === "number") qs.set("limit", String(params.limit));
|
|
11152
|
+
if (params?.cursor) qs.set("cursor", params.cursor);
|
|
11153
|
+
const suffix = qs.toString() ? `?${qs.toString()}` : "";
|
|
11154
|
+
return request(`/v1/apps/${encodeURIComponent(appId)}/timeline${suffix}`, { method: "GET" });
|
|
11155
|
+
},
|
|
11156
|
+
getAppTimelineEvent: (appId, eventId) => request(`/v1/apps/${encodeURIComponent(appId)}/timeline/${encodeURIComponent(eventId)}`, { method: "GET" }),
|
|
11157
|
+
listAppEditQueue: (appId, params) => {
|
|
11158
|
+
const qs = new URLSearchParams();
|
|
11159
|
+
if (typeof params?.limit === "number") qs.set("limit", String(params.limit));
|
|
11160
|
+
if (typeof params?.offset === "number") qs.set("offset", String(params.offset));
|
|
11161
|
+
const suffix = qs.toString() ? `?${qs.toString()}` : "";
|
|
11162
|
+
return request(`/v1/apps/${encodeURIComponent(appId)}/edit-queue${suffix}`, { method: "GET" });
|
|
11163
|
+
},
|
|
11164
|
+
listAppJobQueue: (appId, params) => {
|
|
11165
|
+
const qs = new URLSearchParams();
|
|
11166
|
+
if (typeof params?.limit === "number") qs.set("limit", String(params.limit));
|
|
11167
|
+
if (typeof params?.offset === "number") qs.set("offset", String(params.offset));
|
|
11168
|
+
for (const kind of params?.kind ?? []) qs.append("kind", kind);
|
|
11169
|
+
for (const status of params?.status ?? []) qs.append("status", status);
|
|
11170
|
+
const suffix = qs.toString() ? `?${qs.toString()}` : "";
|
|
11171
|
+
return request(`/v1/apps/${encodeURIComponent(appId)}/job-queue${suffix}`, { method: "GET" });
|
|
11172
|
+
},
|
|
11173
|
+
getMergeRequest: (mrId) => request(`/v1/merge-requests/${encodeURIComponent(mrId)}`, { method: "GET" }),
|
|
11174
|
+
presignImportUpload: (payload) => request("/v1/apps/import/upload/presign", { method: "POST", body: JSON.stringify(payload) }),
|
|
11175
|
+
importFromUpload: (payload) => request("/v1/apps/import/upload", { method: "POST", body: JSON.stringify(payload) }),
|
|
11176
|
+
presignImportUploadFirstParty: (payload) => request("/v1/apps/import/upload/presign/first-party", { method: "POST", body: JSON.stringify(payload) }),
|
|
11177
|
+
importFromUploadFirstParty: (payload) => request("/v1/apps/import/upload/first-party", { method: "POST", body: JSON.stringify(payload) }),
|
|
11178
|
+
importFromGithubFirstParty: (payload) => request("/v1/apps/import/github/first-party", { method: "POST", body: JSON.stringify(payload) }),
|
|
11179
|
+
forkApp: (appId, payload) => request(`/v1/apps/${encodeURIComponent(appId)}/fork`, { method: "POST", body: JSON.stringify(payload ?? {}) }),
|
|
11180
|
+
getAppHead: (appId) => request(`/v1/apps/${encodeURIComponent(appId)}/head`, { method: "GET" }),
|
|
11181
|
+
getAppDelta: (appId, payload) => request(`/v1/apps/${encodeURIComponent(appId)}/delta`, {
|
|
11182
|
+
method: "POST",
|
|
11183
|
+
body: JSON.stringify(payload)
|
|
11184
|
+
}),
|
|
10209
11185
|
downloadAppBundle: (appId) => requestBinary(`/v1/apps/${encodeURIComponent(appId)}/download.bundle`, { method: "GET" }),
|
|
10210
11186
|
createChangeStep: (appId, payload) => request(`/v1/apps/${encodeURIComponent(appId)}/change-steps`, {
|
|
10211
11187
|
method: "POST",
|
|
@@ -10936,8 +11912,8 @@ function getErrorMap() {
|
|
|
10936
11912
|
|
|
10937
11913
|
// node_modules/zod/v3/helpers/parseUtil.js
|
|
10938
11914
|
var makeIssue = (params) => {
|
|
10939
|
-
const { data, path:
|
|
10940
|
-
const fullPath = [...
|
|
11915
|
+
const { data, path: path16, errorMaps, issueData } = params;
|
|
11916
|
+
const fullPath = [...path16, ...issueData.path || []];
|
|
10941
11917
|
const fullIssue = {
|
|
10942
11918
|
...issueData,
|
|
10943
11919
|
path: fullPath
|
|
@@ -11053,11 +12029,11 @@ var errorUtil;
|
|
|
11053
12029
|
|
|
11054
12030
|
// node_modules/zod/v3/types.js
|
|
11055
12031
|
var ParseInputLazyPath = class {
|
|
11056
|
-
constructor(parent, value,
|
|
12032
|
+
constructor(parent, value, path16, key) {
|
|
11057
12033
|
this._cachedPath = [];
|
|
11058
12034
|
this.parent = parent;
|
|
11059
12035
|
this.data = value;
|
|
11060
|
-
this._path =
|
|
12036
|
+
this._path = path16;
|
|
11061
12037
|
this._key = key;
|
|
11062
12038
|
}
|
|
11063
12039
|
get path() {
|
|
@@ -14499,8 +15475,8 @@ var coerce = {
|
|
|
14499
15475
|
};
|
|
14500
15476
|
var NEVER = INVALID;
|
|
14501
15477
|
|
|
14502
|
-
// node_modules/@remixhq/core/dist/chunk-
|
|
14503
|
-
var
|
|
15478
|
+
// node_modules/@remixhq/core/dist/chunk-XETDXVGM.js
|
|
15479
|
+
var import_promises22 = __toESM(require("fs/promises"), 1);
|
|
14504
15480
|
var import_os3 = __toESM(require("os"), 1);
|
|
14505
15481
|
var import_path7 = __toESM(require("path"), 1);
|
|
14506
15482
|
|
|
@@ -14908,7 +15884,7 @@ var PostgrestError = class extends Error {
|
|
|
14908
15884
|
};
|
|
14909
15885
|
}
|
|
14910
15886
|
};
|
|
14911
|
-
function
|
|
15887
|
+
function sleep3(ms, signal) {
|
|
14912
15888
|
return new Promise((resolve) => {
|
|
14913
15889
|
if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
|
|
14914
15890
|
resolve();
|
|
@@ -15104,7 +16080,7 @@ var PostgrestBuilder = class {
|
|
|
15104
16080
|
if (_this.retryEnabled && attemptCount < DEFAULT_MAX_RETRIES) {
|
|
15105
16081
|
const delay = getRetryDelay(attemptCount);
|
|
15106
16082
|
attemptCount++;
|
|
15107
|
-
await
|
|
16083
|
+
await sleep3(delay, _this.signal);
|
|
15108
16084
|
continue;
|
|
15109
16085
|
}
|
|
15110
16086
|
throw fetchError;
|
|
@@ -15115,7 +16091,7 @@ var PostgrestBuilder = class {
|
|
|
15115
16091
|
const delay = retryAfterHeader !== null ? Math.max(0, parseInt(retryAfterHeader, 10) || 0) * 1e3 : getRetryDelay(attemptCount);
|
|
15116
16092
|
await res$1.text();
|
|
15117
16093
|
attemptCount++;
|
|
15118
|
-
await
|
|
16094
|
+
await sleep3(delay, _this.signal);
|
|
15119
16095
|
continue;
|
|
15120
16096
|
}
|
|
15121
16097
|
return await _this.processResponse(res$1);
|
|
@@ -23605,8 +24581,8 @@ var IcebergError = class extends Error {
|
|
|
23605
24581
|
return this.status === 419;
|
|
23606
24582
|
}
|
|
23607
24583
|
};
|
|
23608
|
-
function buildUrl(baseUrl,
|
|
23609
|
-
const url = new URL(
|
|
24584
|
+
function buildUrl(baseUrl, path16, query) {
|
|
24585
|
+
const url = new URL(path16, baseUrl);
|
|
23610
24586
|
if (query) {
|
|
23611
24587
|
for (const [key, value] of Object.entries(query)) {
|
|
23612
24588
|
if (value !== void 0) {
|
|
@@ -23636,12 +24612,12 @@ function createFetchClient(options) {
|
|
|
23636
24612
|
return {
|
|
23637
24613
|
async request({
|
|
23638
24614
|
method,
|
|
23639
|
-
path:
|
|
24615
|
+
path: path16,
|
|
23640
24616
|
query,
|
|
23641
24617
|
body,
|
|
23642
24618
|
headers
|
|
23643
24619
|
}) {
|
|
23644
|
-
const url = buildUrl(options.baseUrl,
|
|
24620
|
+
const url = buildUrl(options.baseUrl, path16, query);
|
|
23645
24621
|
const authHeaders = await buildAuthHeaders(options.auth);
|
|
23646
24622
|
const res = await fetchFn(url, {
|
|
23647
24623
|
method,
|
|
@@ -24479,7 +25455,7 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24479
25455
|
* @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
|
|
24480
25456
|
* @param fileBody The body of the file to be stored in the bucket.
|
|
24481
25457
|
*/
|
|
24482
|
-
async uploadOrUpdate(method,
|
|
25458
|
+
async uploadOrUpdate(method, path16, fileBody, fileOptions) {
|
|
24483
25459
|
var _this = this;
|
|
24484
25460
|
return _this.handleOperation(async () => {
|
|
24485
25461
|
let body;
|
|
@@ -24503,7 +25479,7 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24503
25479
|
if ((typeof ReadableStream !== "undefined" && body instanceof ReadableStream || body && typeof body === "object" && "pipe" in body && typeof body.pipe === "function") && !options.duplex) options.duplex = "half";
|
|
24504
25480
|
}
|
|
24505
25481
|
if (fileOptions === null || fileOptions === void 0 ? void 0 : fileOptions.headers) for (const [key, value] of Object.entries(fileOptions.headers)) headers = setHeader(headers, key, value);
|
|
24506
|
-
const cleanPath = _this._removeEmptyFolders(
|
|
25482
|
+
const cleanPath = _this._removeEmptyFolders(path16);
|
|
24507
25483
|
const _path = _this._getFinalPath(cleanPath);
|
|
24508
25484
|
const data = await (method == "PUT" ? put : post)(_this.fetch, `${_this.url}/object/${_path}`, body, _objectSpread22({ headers }, (options === null || options === void 0 ? void 0 : options.duplex) ? { duplex: options.duplex } : {}));
|
|
24509
25485
|
return {
|
|
@@ -24564,8 +25540,8 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24564
25540
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
24565
25541
|
* - For React Native, using either `Blob`, `File` or `FormData` does not work as intended. Upload file using `ArrayBuffer` from base64 file data instead, see example below.
|
|
24566
25542
|
*/
|
|
24567
|
-
async upload(
|
|
24568
|
-
return this.uploadOrUpdate("POST",
|
|
25543
|
+
async upload(path16, fileBody, fileOptions) {
|
|
25544
|
+
return this.uploadOrUpdate("POST", path16, fileBody, fileOptions);
|
|
24569
25545
|
}
|
|
24570
25546
|
/**
|
|
24571
25547
|
* Upload a file with a token generated from `createSignedUploadUrl`.
|
|
@@ -24604,9 +25580,9 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24604
25580
|
* - `objects` table permissions: none
|
|
24605
25581
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
24606
25582
|
*/
|
|
24607
|
-
async uploadToSignedUrl(
|
|
25583
|
+
async uploadToSignedUrl(path16, token, fileBody, fileOptions) {
|
|
24608
25584
|
var _this3 = this;
|
|
24609
|
-
const cleanPath = _this3._removeEmptyFolders(
|
|
25585
|
+
const cleanPath = _this3._removeEmptyFolders(path16);
|
|
24610
25586
|
const _path = _this3._getFinalPath(cleanPath);
|
|
24611
25587
|
const url = new URL(_this3.url + `/object/upload/sign/${_path}`);
|
|
24612
25588
|
url.searchParams.set("token", token);
|
|
@@ -24668,10 +25644,10 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24668
25644
|
* - `objects` table permissions: `insert`
|
|
24669
25645
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
24670
25646
|
*/
|
|
24671
|
-
async createSignedUploadUrl(
|
|
25647
|
+
async createSignedUploadUrl(path16, options) {
|
|
24672
25648
|
var _this4 = this;
|
|
24673
25649
|
return _this4.handleOperation(async () => {
|
|
24674
|
-
let _path = _this4._getFinalPath(
|
|
25650
|
+
let _path = _this4._getFinalPath(path16);
|
|
24675
25651
|
const headers = _objectSpread22({}, _this4.headers);
|
|
24676
25652
|
if (options === null || options === void 0 ? void 0 : options.upsert) headers["x-upsert"] = "true";
|
|
24677
25653
|
const data = await post(_this4.fetch, `${_this4.url}/object/upload/sign/${_path}`, {}, { headers });
|
|
@@ -24680,7 +25656,7 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24680
25656
|
if (!token) throw new StorageError("No token returned by API");
|
|
24681
25657
|
return {
|
|
24682
25658
|
signedUrl: url.toString(),
|
|
24683
|
-
path:
|
|
25659
|
+
path: path16,
|
|
24684
25660
|
token
|
|
24685
25661
|
};
|
|
24686
25662
|
});
|
|
@@ -24736,8 +25712,8 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24736
25712
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
24737
25713
|
* - For React Native, using either `Blob`, `File` or `FormData` does not work as intended. Update file using `ArrayBuffer` from base64 file data instead, see example below.
|
|
24738
25714
|
*/
|
|
24739
|
-
async update(
|
|
24740
|
-
return this.uploadOrUpdate("PUT",
|
|
25715
|
+
async update(path16, fileBody, fileOptions) {
|
|
25716
|
+
return this.uploadOrUpdate("PUT", path16, fileBody, fileOptions);
|
|
24741
25717
|
}
|
|
24742
25718
|
/**
|
|
24743
25719
|
* Moves an existing file to a new path in the same bucket.
|
|
@@ -24885,10 +25861,10 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
24885
25861
|
* - `objects` table permissions: `select`
|
|
24886
25862
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
24887
25863
|
*/
|
|
24888
|
-
async createSignedUrl(
|
|
25864
|
+
async createSignedUrl(path16, expiresIn, options) {
|
|
24889
25865
|
var _this8 = this;
|
|
24890
25866
|
return _this8.handleOperation(async () => {
|
|
24891
|
-
let _path = _this8._getFinalPath(
|
|
25867
|
+
let _path = _this8._getFinalPath(path16);
|
|
24892
25868
|
const hasTransform = typeof (options === null || options === void 0 ? void 0 : options.transform) === "object" && options.transform !== null && Object.keys(options.transform).length > 0;
|
|
24893
25869
|
let data = await post(_this8.fetch, `${_this8.url}/object/sign/${_path}`, _objectSpread22({ expiresIn }, hasTransform ? { transform: options.transform } : {}), { headers: _this8.headers });
|
|
24894
25870
|
const query = new URLSearchParams();
|
|
@@ -25022,13 +25998,13 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
25022
25998
|
* - `objects` table permissions: `select`
|
|
25023
25999
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
25024
26000
|
*/
|
|
25025
|
-
download(
|
|
26001
|
+
download(path16, options, parameters) {
|
|
25026
26002
|
const renderPath = typeof (options === null || options === void 0 ? void 0 : options.transform) === "object" && options.transform !== null && Object.keys(options.transform).length > 0 ? "render/image/authenticated" : "object";
|
|
25027
26003
|
const query = new URLSearchParams();
|
|
25028
26004
|
if (options === null || options === void 0 ? void 0 : options.transform) this.applyTransformOptsToQuery(query, options.transform);
|
|
25029
26005
|
if ((options === null || options === void 0 ? void 0 : options.cacheNonce) != null) query.set("cacheNonce", String(options.cacheNonce));
|
|
25030
26006
|
const queryString = query.toString();
|
|
25031
|
-
const _path = this._getFinalPath(
|
|
26007
|
+
const _path = this._getFinalPath(path16);
|
|
25032
26008
|
const downloadFn = () => get(this.fetch, `${this.url}/${renderPath}/${_path}${queryString ? `?${queryString}` : ""}`, {
|
|
25033
26009
|
headers: this.headers,
|
|
25034
26010
|
noResolveJson: true
|
|
@@ -25058,9 +26034,9 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
25058
26034
|
* }
|
|
25059
26035
|
* ```
|
|
25060
26036
|
*/
|
|
25061
|
-
async info(
|
|
26037
|
+
async info(path16) {
|
|
25062
26038
|
var _this10 = this;
|
|
25063
|
-
const _path = _this10._getFinalPath(
|
|
26039
|
+
const _path = _this10._getFinalPath(path16);
|
|
25064
26040
|
return _this10.handleOperation(async () => {
|
|
25065
26041
|
return recursiveToCamel(await get(_this10.fetch, `${_this10.url}/object/info/${_path}`, { headers: _this10.headers }));
|
|
25066
26042
|
});
|
|
@@ -25080,9 +26056,9 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
25080
26056
|
* .exists('folder/avatar1.png')
|
|
25081
26057
|
* ```
|
|
25082
26058
|
*/
|
|
25083
|
-
async exists(
|
|
26059
|
+
async exists(path16) {
|
|
25084
26060
|
var _this11 = this;
|
|
25085
|
-
const _path = _this11._getFinalPath(
|
|
26061
|
+
const _path = _this11._getFinalPath(path16);
|
|
25086
26062
|
try {
|
|
25087
26063
|
await head(_this11.fetch, `${_this11.url}/object/${_path}`, { headers: _this11.headers });
|
|
25088
26064
|
return {
|
|
@@ -25160,8 +26136,8 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
25160
26136
|
* - `objects` table permissions: none
|
|
25161
26137
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
25162
26138
|
*/
|
|
25163
|
-
getPublicUrl(
|
|
25164
|
-
const _path = this._getFinalPath(
|
|
26139
|
+
getPublicUrl(path16, options) {
|
|
26140
|
+
const _path = this._getFinalPath(path16);
|
|
25165
26141
|
const query = new URLSearchParams();
|
|
25166
26142
|
if (options === null || options === void 0 ? void 0 : options.download) query.set("download", options.download === true ? "" : options.download);
|
|
25167
26143
|
if (options === null || options === void 0 ? void 0 : options.transform) this.applyTransformOptsToQuery(query, options.transform);
|
|
@@ -25298,10 +26274,10 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
25298
26274
|
* - `objects` table permissions: `select`
|
|
25299
26275
|
* - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
|
|
25300
26276
|
*/
|
|
25301
|
-
async list(
|
|
26277
|
+
async list(path16, options, parameters) {
|
|
25302
26278
|
var _this13 = this;
|
|
25303
26279
|
return _this13.handleOperation(async () => {
|
|
25304
|
-
const body = _objectSpread22(_objectSpread22(_objectSpread22({}, DEFAULT_SEARCH_OPTIONS), options), {}, { prefix:
|
|
26280
|
+
const body = _objectSpread22(_objectSpread22(_objectSpread22({}, DEFAULT_SEARCH_OPTIONS), options), {}, { prefix: path16 || "" });
|
|
25305
26281
|
return await post(_this13.fetch, `${_this13.url}/object/list/${_this13.bucketId}`, body, { headers: _this13.headers }, parameters);
|
|
25306
26282
|
});
|
|
25307
26283
|
}
|
|
@@ -25365,11 +26341,11 @@ var StorageFileApi = class extends BaseApiClient {
|
|
|
25365
26341
|
if (typeof Buffer !== "undefined") return Buffer.from(data).toString("base64");
|
|
25366
26342
|
return btoa(data);
|
|
25367
26343
|
}
|
|
25368
|
-
_getFinalPath(
|
|
25369
|
-
return `${this.bucketId}/${
|
|
26344
|
+
_getFinalPath(path16) {
|
|
26345
|
+
return `${this.bucketId}/${path16.replace(/^\/+/, "")}`;
|
|
25370
26346
|
}
|
|
25371
|
-
_removeEmptyFolders(
|
|
25372
|
-
return
|
|
26347
|
+
_removeEmptyFolders(path16) {
|
|
26348
|
+
return path16.replace(/^\/|\/$/g, "").replace(/\/+/g, "/");
|
|
25373
26349
|
}
|
|
25374
26350
|
/** Modifies the `query`, appending values the from `transform` */
|
|
25375
26351
|
applyTransformOptsToQuery(query, transform) {
|
|
@@ -27112,7 +28088,7 @@ function decodeJWT(token) {
|
|
|
27112
28088
|
};
|
|
27113
28089
|
return data;
|
|
27114
28090
|
}
|
|
27115
|
-
async function
|
|
28091
|
+
async function sleep4(time) {
|
|
27116
28092
|
return await new Promise((accept) => {
|
|
27117
28093
|
setTimeout(() => accept(null), time);
|
|
27118
28094
|
});
|
|
@@ -32905,7 +33881,7 @@ var GoTrueClient = class _GoTrueClient {
|
|
|
32905
33881
|
const startedAt = Date.now();
|
|
32906
33882
|
return await retryable(async (attempt) => {
|
|
32907
33883
|
if (attempt > 0) {
|
|
32908
|
-
await
|
|
33884
|
+
await sleep4(200 * Math.pow(2, attempt - 1));
|
|
32909
33885
|
}
|
|
32910
33886
|
this._debug(debugName, "refreshing attempt", attempt);
|
|
32911
33887
|
return await _request(this.fetch, "POST", `${this.url}/token?grant_type=refresh_token`, {
|
|
@@ -34444,7 +35420,7 @@ function shouldShowDeprecationWarning() {
|
|
|
34444
35420
|
}
|
|
34445
35421
|
if (shouldShowDeprecationWarning()) console.warn("\u26A0\uFE0F Node.js 18 and below are deprecated and will no longer be supported in future versions of @supabase/supabase-js. Please upgrade to Node.js 20 or later. For more information, visit: https://github.com/orgs/supabase/discussions/37217");
|
|
34446
35422
|
|
|
34447
|
-
// node_modules/@remixhq/core/dist/chunk-
|
|
35423
|
+
// node_modules/@remixhq/core/dist/chunk-XETDXVGM.js
|
|
34448
35424
|
var storedSessionSchema = external_exports.object({
|
|
34449
35425
|
access_token: external_exports.string().min(1),
|
|
34450
35426
|
refresh_token: external_exports.string().min(1),
|
|
@@ -34477,24 +35453,24 @@ async function maybeLoadKeytar() {
|
|
|
34477
35453
|
}
|
|
34478
35454
|
async function ensurePathPermissions(filePath) {
|
|
34479
35455
|
const dir = import_path7.default.dirname(filePath);
|
|
34480
|
-
await
|
|
35456
|
+
await import_promises22.default.mkdir(dir, { recursive: true });
|
|
34481
35457
|
try {
|
|
34482
|
-
await
|
|
35458
|
+
await import_promises22.default.chmod(dir, 448);
|
|
34483
35459
|
} catch {
|
|
34484
35460
|
}
|
|
34485
35461
|
try {
|
|
34486
|
-
await
|
|
35462
|
+
await import_promises22.default.chmod(filePath, 384);
|
|
34487
35463
|
} catch {
|
|
34488
35464
|
}
|
|
34489
35465
|
}
|
|
34490
|
-
async function
|
|
34491
|
-
await
|
|
35466
|
+
async function writeJsonAtomic3(filePath, value) {
|
|
35467
|
+
await import_promises22.default.mkdir(import_path7.default.dirname(filePath), { recursive: true });
|
|
34492
35468
|
const tmpPath = `${filePath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
|
34493
|
-
await
|
|
34494
|
-
await
|
|
35469
|
+
await import_promises22.default.writeFile(tmpPath, JSON.stringify(value, null, 2) + "\n", "utf8");
|
|
35470
|
+
await import_promises22.default.rename(tmpPath, filePath);
|
|
34495
35471
|
}
|
|
34496
35472
|
async function writeSessionFileFallback(filePath, session) {
|
|
34497
|
-
await
|
|
35473
|
+
await writeJsonAtomic3(filePath, session);
|
|
34498
35474
|
await ensurePathPermissions(filePath);
|
|
34499
35475
|
}
|
|
34500
35476
|
function createLocalSessionStore(params) {
|
|
@@ -34514,7 +35490,7 @@ function createLocalSessionStore(params) {
|
|
|
34514
35490
|
}
|
|
34515
35491
|
}
|
|
34516
35492
|
async function readFile() {
|
|
34517
|
-
const raw = await
|
|
35493
|
+
const raw = await import_promises22.default.readFile(filePath, "utf8").catch(() => null);
|
|
34518
35494
|
if (!raw) return null;
|
|
34519
35495
|
try {
|
|
34520
35496
|
const parsed = storedSessionSchema.safeParse(JSON.parse(raw));
|
|
@@ -34658,7 +35634,7 @@ function createSupabaseAuthHelpers(config) {
|
|
|
34658
35634
|
};
|
|
34659
35635
|
}
|
|
34660
35636
|
|
|
34661
|
-
// node_modules/@remixhq/core/dist/chunk-
|
|
35637
|
+
// node_modules/@remixhq/core/dist/chunk-XCZRNB35.js
|
|
34662
35638
|
var DEFAULT_API_URL = "https://api.remix.one";
|
|
34663
35639
|
var DEFAULT_SUPABASE_URL = "https://xtfxwbckjpfmqubnsusu.supabase.co";
|
|
34664
35640
|
var DEFAULT_SUPABASE_ANON_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Inh0Znh3YmNranBmbXF1Ym5zdXN1Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3NjA2MDEyMzAsImV4cCI6MjA3NjE3NzIzMH0.dzWGAWrK4CvrmHVHzf8w7JlUZohdap0ZPnLZnABMV8s";
|
|
@@ -34696,6 +35672,7 @@ async function resolveConfig(_opts) {
|
|
|
34696
35672
|
}
|
|
34697
35673
|
|
|
34698
35674
|
// src/hook-auth.ts
|
|
35675
|
+
var HOOK_API_REQUEST_TIMEOUT_MS = 6e4;
|
|
34699
35676
|
async function createHookCollabApiClient() {
|
|
34700
35677
|
const config = await resolveConfig();
|
|
34701
35678
|
const sessionStore = createLocalSessionStore();
|
|
@@ -34708,518 +35685,485 @@ async function createHookCollabApiClient() {
|
|
|
34708
35685
|
}
|
|
34709
35686
|
});
|
|
34710
35687
|
return createApiClient(config, {
|
|
34711
|
-
tokenProvider
|
|
35688
|
+
tokenProvider,
|
|
35689
|
+
defaultRequestTimeoutMs: HOOK_API_REQUEST_TIMEOUT_MS
|
|
34712
35690
|
});
|
|
34713
35691
|
}
|
|
34714
35692
|
|
|
34715
|
-
// src/
|
|
34716
|
-
var
|
|
34717
|
-
var
|
|
34718
|
-
var
|
|
34719
|
-
|
|
34720
|
-
|
|
34721
|
-
|
|
34722
|
-
|
|
34723
|
-
|
|
34724
|
-
|
|
34725
|
-
|
|
34726
|
-
|
|
34727
|
-
|
|
34728
|
-
|
|
34729
|
-
}
|
|
34730
|
-
|
|
34731
|
-
|
|
34732
|
-
}
|
|
34733
|
-
|
|
34734
|
-
|
|
34735
|
-
|
|
34736
|
-
|
|
34737
|
-
|
|
35693
|
+
// src/deferred-turn-drainer.ts
|
|
35694
|
+
var collabFinalizeTurn2 = collabFinalizeTurn;
|
|
35695
|
+
var drainPendingFinalizeQueue2 = drainPendingFinalizeQueue;
|
|
35696
|
+
var HOOK_ACTOR = {
|
|
35697
|
+
type: "agent",
|
|
35698
|
+
name: "claude-code",
|
|
35699
|
+
version: pluginMetadata.version,
|
|
35700
|
+
provider: "anthropic"
|
|
35701
|
+
};
|
|
35702
|
+
function getDrainerErrorDetails(error) {
|
|
35703
|
+
if (error instanceof Error) {
|
|
35704
|
+
const hint = typeof error.hint === "string" ? String(error.hint) : null;
|
|
35705
|
+
const codeRaw = error.code;
|
|
35706
|
+
const preflightCode = isFinalizePreflightFailureCode(codeRaw) ? codeRaw : null;
|
|
35707
|
+
return { message: error.message || "Deferred turn recording failed.", hint, preflightCode };
|
|
35708
|
+
}
|
|
35709
|
+
const message = typeof error === "string" && error.trim() ? error.trim() : "Deferred turn recording failed.";
|
|
35710
|
+
return { message, hint: null, preflightCode: null };
|
|
35711
|
+
}
|
|
35712
|
+
var DEFERRED_TURN_DRAIN_POLL_INTERVAL_MS = 3e3;
|
|
35713
|
+
var DEFERRED_TURN_DRAIN_MAX_WAIT_MS = 15 * 60 * 1e3;
|
|
35714
|
+
var DEFERRED_TURN_DRAIN_LOCK_HEARTBEAT_MS = 3e4;
|
|
35715
|
+
var DEFERRED_TURN_DRAIN_LOCK_STALE_MS = 9e4;
|
|
35716
|
+
function isPidAlive(pid) {
|
|
35717
|
+
if (!Number.isFinite(pid) || pid <= 0) return false;
|
|
35718
|
+
try {
|
|
35719
|
+
process.kill(pid, 0);
|
|
35720
|
+
return true;
|
|
35721
|
+
} catch {
|
|
35722
|
+
return false;
|
|
35723
|
+
}
|
|
34738
35724
|
}
|
|
34739
|
-
|
|
34740
|
-
|
|
34741
|
-
|
|
34742
|
-
await import_promises19.default.writeFile(tmpPath, JSON.stringify(value, null, 2) + "\n", "utf8");
|
|
34743
|
-
await import_promises19.default.rename(tmpPath, filePath);
|
|
35725
|
+
function repoLockFileName(repoRoot) {
|
|
35726
|
+
const hash = (0, import_node_crypto3.createHash)("sha256").update(repoRoot).digest("hex").slice(0, 16);
|
|
35727
|
+
return `.drainer-${hash}.lock`;
|
|
34744
35728
|
}
|
|
34745
|
-
|
|
34746
|
-
|
|
34747
|
-
var STATE_LOCK_STALE_MS = 3e4;
|
|
34748
|
-
var STATE_LOCK_HEARTBEAT_MS = 5e3;
|
|
34749
|
-
async function sleep4(ms) {
|
|
34750
|
-
await new Promise((resolve) => setTimeout(resolve, ms));
|
|
35729
|
+
function repoLockPath(repoRoot) {
|
|
35730
|
+
return import_node_path11.default.join(getDeferredTurnDirPath(), repoLockFileName(repoRoot));
|
|
34751
35731
|
}
|
|
34752
|
-
async function
|
|
34753
|
-
const raw = await
|
|
35732
|
+
async function readDrainLockMetadata(lockPath) {
|
|
35733
|
+
const raw = await import_promises23.default.readFile(lockPath, "utf8").catch(() => null);
|
|
34754
35734
|
if (!raw) return null;
|
|
34755
35735
|
try {
|
|
34756
35736
|
const parsed = JSON.parse(raw);
|
|
34757
|
-
if (typeof parsed.
|
|
35737
|
+
if (typeof parsed.pid !== "number" || typeof parsed.repoRoot !== "string" || typeof parsed.startedAt !== "string") {
|
|
34758
35738
|
return null;
|
|
34759
35739
|
}
|
|
34760
|
-
return {
|
|
34761
|
-
ownerId: parsed.ownerId,
|
|
34762
|
-
pid: parsed.pid,
|
|
34763
|
-
createdAt: parsed.createdAt,
|
|
34764
|
-
heartbeatAt: parsed.heartbeatAt
|
|
34765
|
-
};
|
|
35740
|
+
return { pid: parsed.pid, repoRoot: parsed.repoRoot, startedAt: parsed.startedAt };
|
|
34766
35741
|
} catch {
|
|
34767
35742
|
return null;
|
|
34768
35743
|
}
|
|
34769
35744
|
}
|
|
34770
|
-
async function
|
|
34771
|
-
|
|
35745
|
+
async function writeDrainLockMetadata(lockPath, metadata) {
|
|
35746
|
+
const tmpPath = `${lockPath}.tmp-${process.pid}-${Date.now()}`;
|
|
35747
|
+
await import_promises23.default.writeFile(tmpPath, JSON.stringify(metadata), "utf8");
|
|
35748
|
+
await import_promises23.default.rename(tmpPath, lockPath);
|
|
34772
35749
|
}
|
|
34773
|
-
async function
|
|
34774
|
-
const lockPath =
|
|
34775
|
-
|
|
34776
|
-
const
|
|
34777
|
-
if (
|
|
34778
|
-
await
|
|
34779
|
-
|
|
34780
|
-
|
|
34781
|
-
|
|
34782
|
-
|
|
34783
|
-
|
|
34784
|
-
await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
|
|
34785
|
-
return true;
|
|
34786
|
-
}
|
|
34787
|
-
}
|
|
34788
|
-
return false;
|
|
34789
|
-
}
|
|
34790
|
-
async function acquireStateLock(sessionId) {
|
|
34791
|
-
const lockPath = stateLockPath(sessionId);
|
|
34792
|
-
const deadline = Date.now() + STATE_LOCK_WAIT_MS;
|
|
34793
|
-
await import_promises19.default.mkdir(stateRoot(), { recursive: true });
|
|
34794
|
-
while (true) {
|
|
34795
|
-
try {
|
|
34796
|
-
await import_promises19.default.mkdir(lockPath);
|
|
34797
|
-
const ownerId = (0, import_node_crypto.randomUUID)();
|
|
34798
|
-
const createdAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
34799
|
-
const metadata = {
|
|
34800
|
-
ownerId,
|
|
34801
|
-
pid: process.pid,
|
|
34802
|
-
createdAt,
|
|
34803
|
-
heartbeatAt: createdAt
|
|
34804
|
-
};
|
|
34805
|
-
await writeStateLockMetadata(sessionId, metadata);
|
|
34806
|
-
let released = false;
|
|
34807
|
-
const heartbeat = setInterval(() => {
|
|
34808
|
-
if (released) return;
|
|
34809
|
-
void writeStateLockMetadata(sessionId, {
|
|
34810
|
-
...metadata,
|
|
34811
|
-
heartbeatAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
34812
|
-
}).catch(() => void 0);
|
|
34813
|
-
}, STATE_LOCK_HEARTBEAT_MS);
|
|
34814
|
-
heartbeat.unref?.();
|
|
34815
|
-
return async () => {
|
|
34816
|
-
if (released) return;
|
|
34817
|
-
released = true;
|
|
34818
|
-
clearInterval(heartbeat);
|
|
34819
|
-
const currentMetadata = await readStateLockMetadata(sessionId);
|
|
34820
|
-
if (currentMetadata?.ownerId === ownerId) {
|
|
34821
|
-
await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
|
|
34822
|
-
}
|
|
34823
|
-
};
|
|
34824
|
-
} catch (error) {
|
|
34825
|
-
const code = error && typeof error === "object" && "code" in error ? error.code : null;
|
|
34826
|
-
if (code !== "EEXIST") {
|
|
34827
|
-
throw error;
|
|
34828
|
-
}
|
|
34829
|
-
if (await tryRemoveStaleStateLock(sessionId)) {
|
|
34830
|
-
continue;
|
|
34831
|
-
}
|
|
34832
|
-
if (Date.now() >= deadline) {
|
|
34833
|
-
throw new Error(`Timed out acquiring hook state lock for session ${sessionId}.`);
|
|
34834
|
-
}
|
|
34835
|
-
await sleep4(STATE_LOCK_POLL_MS);
|
|
35750
|
+
async function tryAcquireDrainLock(repoRoot) {
|
|
35751
|
+
const lockPath = repoLockPath(repoRoot);
|
|
35752
|
+
await import_promises23.default.mkdir(import_node_path11.default.dirname(lockPath), { recursive: true });
|
|
35753
|
+
const existingMeta = await readDrainLockMetadata(lockPath);
|
|
35754
|
+
if (existingMeta) {
|
|
35755
|
+
const lockStat = await import_promises23.default.stat(lockPath).catch(() => null);
|
|
35756
|
+
const ageMs = lockStat ? Date.now() - lockStat.mtimeMs : Number.POSITIVE_INFINITY;
|
|
35757
|
+
const fresh = ageMs <= DEFERRED_TURN_DRAIN_LOCK_STALE_MS;
|
|
35758
|
+
const alive = isPidAlive(existingMeta.pid);
|
|
35759
|
+
if (fresh && alive) {
|
|
35760
|
+
return { acquired: false, lockPath };
|
|
34836
35761
|
}
|
|
34837
35762
|
}
|
|
35763
|
+
await writeDrainLockMetadata(lockPath, {
|
|
35764
|
+
pid: process.pid,
|
|
35765
|
+
repoRoot,
|
|
35766
|
+
startedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
35767
|
+
});
|
|
35768
|
+
return { acquired: true, lockPath };
|
|
34838
35769
|
}
|
|
34839
|
-
async function
|
|
34840
|
-
const
|
|
34841
|
-
|
|
34842
|
-
|
|
34843
|
-
} finally {
|
|
34844
|
-
await release();
|
|
34845
|
-
}
|
|
35770
|
+
async function releaseDrainLock(lockPath) {
|
|
35771
|
+
const meta = await readDrainLockMetadata(lockPath);
|
|
35772
|
+
if (meta && meta.pid !== process.pid) return;
|
|
35773
|
+
await import_promises23.default.rm(lockPath, { force: true }).catch(() => void 0);
|
|
34846
35774
|
}
|
|
34847
|
-
function
|
|
34848
|
-
|
|
35775
|
+
async function heartbeatDrainLock(lockPath) {
|
|
35776
|
+
const now = /* @__PURE__ */ new Date();
|
|
35777
|
+
await import_promises23.default.utimes(lockPath, now, now).catch(() => void 0);
|
|
34849
35778
|
}
|
|
34850
|
-
function
|
|
34851
|
-
|
|
35779
|
+
async function sleep5(ms) {
|
|
35780
|
+
await new Promise((resolve) => setTimeout(resolve, ms));
|
|
34852
35781
|
}
|
|
34853
|
-
function
|
|
34854
|
-
|
|
34855
|
-
return Array.from(
|
|
34856
|
-
new Set(
|
|
34857
|
-
value.filter((entry) => typeof entry === "string" && entry.trim().length > 0).map((entry) => entry.trim())
|
|
34858
|
-
)
|
|
34859
|
-
);
|
|
35782
|
+
function buildIdempotencyKey(turnId, repoRoot) {
|
|
35783
|
+
return `${turnId}:${repoRoot}:finalize_turn`;
|
|
34860
35784
|
}
|
|
34861
|
-
function
|
|
34862
|
-
if (
|
|
34863
|
-
|
|
35785
|
+
async function pushPendingFinalizeQueueToServer(params) {
|
|
35786
|
+
if (typeof drainPendingFinalizeQueue2 !== "function") {
|
|
35787
|
+
await appendHookDiagnosticsEvent({
|
|
35788
|
+
hook: "deferredTurnDrainer",
|
|
35789
|
+
sessionId: params.sessionMarker,
|
|
35790
|
+
stage: "finalize_queue_push_skipped",
|
|
35791
|
+
result: "info",
|
|
35792
|
+
reason: "drain_pending_finalize_queue_unavailable",
|
|
35793
|
+
repoRoot: params.repoRoot
|
|
35794
|
+
});
|
|
35795
|
+
return;
|
|
34864
35796
|
}
|
|
34865
|
-
return null;
|
|
34866
|
-
}
|
|
34867
|
-
function normalizeTouchedRepo(value, repoRoot) {
|
|
34868
|
-
if (!value || typeof value !== "object") return null;
|
|
34869
|
-
const parsed = value;
|
|
34870
|
-
const normalizedRepoRoot = normalizeString(parsed.repoRoot) ?? repoRoot.trim();
|
|
34871
|
-
if (!normalizedRepoRoot) return null;
|
|
34872
|
-
return {
|
|
34873
|
-
repoRoot: normalizedRepoRoot,
|
|
34874
|
-
projectId: normalizeString(parsed.projectId),
|
|
34875
|
-
currentAppId: normalizeString(parsed.currentAppId),
|
|
34876
|
-
upstreamAppId: normalizeString(parsed.upstreamAppId),
|
|
34877
|
-
firstTouchedAt: normalizeString(parsed.firstTouchedAt) ?? (/* @__PURE__ */ new Date()).toISOString(),
|
|
34878
|
-
lastTouchedAt: normalizeString(parsed.lastTouchedAt) ?? (/* @__PURE__ */ new Date()).toISOString(),
|
|
34879
|
-
lastObservedWriteAt: normalizeString(parsed.lastObservedWriteAt),
|
|
34880
|
-
touchedBy: normalizeStringArray(parsed.touchedBy),
|
|
34881
|
-
hasObservedWrite: Boolean(parsed.hasObservedWrite),
|
|
34882
|
-
manuallyRecorded: Boolean(parsed.manuallyRecorded),
|
|
34883
|
-
manuallyRecordedAt: normalizeString(parsed.manuallyRecordedAt),
|
|
34884
|
-
manuallyRecordedByTool: normalizeString(parsed.manuallyRecordedByTool),
|
|
34885
|
-
manualRecordingScope: normalizeManualRecordingScope(parsed.manualRecordingScope),
|
|
34886
|
-
manualRemoteChangeRecordedAt: normalizeString(parsed.manualRemoteChangeRecordedAt),
|
|
34887
|
-
stopAttempted: Boolean(parsed.stopAttempted),
|
|
34888
|
-
stopRecorded: Boolean(parsed.stopRecorded),
|
|
34889
|
-
stopRecordedAt: normalizeString(parsed.stopRecordedAt),
|
|
34890
|
-
stopRecordedMode: parsed.stopRecordedMode === "changed_turn" || parsed.stopRecordedMode === "no_diff_turn" ? parsed.stopRecordedMode : null,
|
|
34891
|
-
recordingFailureMessage: normalizeString(parsed.recordingFailureMessage),
|
|
34892
|
-
recordingFailureHint: normalizeString(parsed.recordingFailureHint),
|
|
34893
|
-
recordingFailedAt: normalizeString(parsed.recordingFailedAt)
|
|
34894
|
-
};
|
|
34895
|
-
}
|
|
34896
|
-
function normalizeTouchedRepos(value) {
|
|
34897
|
-
if (!value || typeof value !== "object") return {};
|
|
34898
|
-
const entries = Object.entries(value).map(([repoRoot, repo]) => normalizeTouchedRepo(repo, repoRoot)).filter((repo) => repo !== null).sort((a2, b) => a2.repoRoot.localeCompare(b.repoRoot));
|
|
34899
|
-
return Object.fromEntries(entries.map((repo) => [repo.repoRoot, repo]));
|
|
34900
|
-
}
|
|
34901
|
-
function createTouchedRepo(params) {
|
|
34902
|
-
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
34903
|
-
const touchedBy = params.touchedBy?.trim() ? [params.touchedBy.trim()] : [];
|
|
34904
|
-
return {
|
|
34905
|
-
repoRoot: params.repoRoot,
|
|
34906
|
-
projectId: normalizeString(params.projectId),
|
|
34907
|
-
currentAppId: normalizeString(params.currentAppId),
|
|
34908
|
-
upstreamAppId: normalizeString(params.upstreamAppId),
|
|
34909
|
-
firstTouchedAt: now,
|
|
34910
|
-
lastTouchedAt: now,
|
|
34911
|
-
lastObservedWriteAt: params.hasObservedWrite ? now : null,
|
|
34912
|
-
touchedBy,
|
|
34913
|
-
hasObservedWrite: Boolean(params.hasObservedWrite),
|
|
34914
|
-
manuallyRecorded: false,
|
|
34915
|
-
manuallyRecordedAt: null,
|
|
34916
|
-
manuallyRecordedByTool: null,
|
|
34917
|
-
manualRecordingScope: null,
|
|
34918
|
-
manualRemoteChangeRecordedAt: null,
|
|
34919
|
-
stopAttempted: false,
|
|
34920
|
-
stopRecorded: false,
|
|
34921
|
-
stopRecordedAt: null,
|
|
34922
|
-
stopRecordedMode: null,
|
|
34923
|
-
recordingFailureMessage: null,
|
|
34924
|
-
recordingFailureHint: null,
|
|
34925
|
-
recordingFailedAt: null
|
|
34926
|
-
};
|
|
34927
|
-
}
|
|
34928
|
-
async function updatePendingTurnState(sessionId, updater) {
|
|
34929
|
-
return withStateLock(sessionId, async () => {
|
|
34930
|
-
const existing = await loadPendingTurnState(sessionId);
|
|
34931
|
-
if (!existing) return null;
|
|
34932
|
-
const result = updater(existing);
|
|
34933
|
-
if (result === false) return existing;
|
|
34934
|
-
await savePendingTurnState(existing);
|
|
34935
|
-
return existing;
|
|
34936
|
-
});
|
|
34937
|
-
}
|
|
34938
|
-
async function loadPendingTurnState(sessionId) {
|
|
34939
|
-
const raw = await import_promises19.default.readFile(statePath(sessionId), "utf8").catch(() => null);
|
|
34940
|
-
if (!raw) return null;
|
|
34941
35797
|
try {
|
|
34942
|
-
|
|
34943
|
-
|
|
34944
|
-
|
|
34945
|
-
|
|
34946
|
-
|
|
34947
|
-
|
|
34948
|
-
|
|
34949
|
-
|
|
34950
|
-
|
|
34951
|
-
|
|
34952
|
-
|
|
34953
|
-
|
|
34954
|
-
|
|
34955
|
-
|
|
34956
|
-
|
|
34957
|
-
|
|
34958
|
-
|
|
34959
|
-
};
|
|
34960
|
-
} catch {
|
|
34961
|
-
return null;
|
|
35798
|
+
await drainPendingFinalizeQueue2({ api: params.api });
|
|
35799
|
+
await appendHookDiagnosticsEvent({
|
|
35800
|
+
hook: "deferredTurnDrainer",
|
|
35801
|
+
sessionId: params.sessionMarker,
|
|
35802
|
+
stage: "finalize_queue_pushed",
|
|
35803
|
+
result: "success",
|
|
35804
|
+
repoRoot: params.repoRoot
|
|
35805
|
+
});
|
|
35806
|
+
} catch (err) {
|
|
35807
|
+
await appendHookDiagnosticsEvent({
|
|
35808
|
+
hook: "deferredTurnDrainer",
|
|
35809
|
+
sessionId: params.sessionMarker,
|
|
35810
|
+
stage: "finalize_queue_push_failed",
|
|
35811
|
+
result: "error",
|
|
35812
|
+
reason: "exception",
|
|
35813
|
+
repoRoot: params.repoRoot,
|
|
35814
|
+
message: err instanceof Error ? err.message : String(err)
|
|
35815
|
+
});
|
|
34962
35816
|
}
|
|
34963
35817
|
}
|
|
34964
|
-
async function
|
|
34965
|
-
|
|
35818
|
+
async function recordOneDeferredTurn(params) {
|
|
35819
|
+
const { entry, api } = params;
|
|
35820
|
+
const { record, filePath } = entry;
|
|
35821
|
+
try {
|
|
35822
|
+
await collabFinalizeTurn2({
|
|
35823
|
+
api,
|
|
35824
|
+
cwd: record.repoRoot,
|
|
35825
|
+
prompt: record.prompt,
|
|
35826
|
+
assistantResponse: record.assistantResponse,
|
|
35827
|
+
idempotencyKey: buildIdempotencyKey(record.turnId, record.repoRoot),
|
|
35828
|
+
actor: HOOK_ACTOR,
|
|
35829
|
+
turnUsage: null,
|
|
35830
|
+
// The deferred queue can hold a turn for a long time (until the next
|
|
35831
|
+
// `remix collab init` lands a binding), so the server's ingestion
|
|
35832
|
+
// timestamp would otherwise be hours/days off from the real prompt
|
|
35833
|
+
// time. Forward the original submit time so the dashboard timeline
|
|
35834
|
+
// sorts this turn into its true position relative to siblings.
|
|
35835
|
+
promptedAt: record.submittedAt
|
|
35836
|
+
});
|
|
35837
|
+
await deleteDeferredTurnFile(filePath);
|
|
35838
|
+
return { recorded: true };
|
|
35839
|
+
} catch (error) {
|
|
35840
|
+
return { recorded: false, error };
|
|
35841
|
+
}
|
|
34966
35842
|
}
|
|
34967
|
-
async function
|
|
34968
|
-
const
|
|
34969
|
-
|
|
34970
|
-
const
|
|
34971
|
-
|
|
34972
|
-
|
|
34973
|
-
|
|
34974
|
-
|
|
34975
|
-
|
|
34976
|
-
|
|
34977
|
-
|
|
35843
|
+
async function runStandaloneDeferredTurnDrainer(repoRoot) {
|
|
35844
|
+
const startedAt = Date.now();
|
|
35845
|
+
const sessionMarker = `drainer-${process.pid}-${Math.random().toString(36).slice(2, 10)}`;
|
|
35846
|
+
const acquireResult = await tryAcquireDrainLock(repoRoot);
|
|
35847
|
+
if (!acquireResult.acquired) {
|
|
35848
|
+
await appendHookDiagnosticsEvent({
|
|
35849
|
+
hook: "deferredTurnDrainer",
|
|
35850
|
+
sessionId: sessionMarker,
|
|
35851
|
+
stage: "lock_skipped",
|
|
35852
|
+
result: "skip",
|
|
35853
|
+
reason: "another_drainer_active",
|
|
35854
|
+
repoRoot
|
|
34978
35855
|
});
|
|
34979
|
-
|
|
34980
|
-
|
|
34981
|
-
|
|
34982
|
-
|
|
34983
|
-
|
|
34984
|
-
|
|
34985
|
-
|
|
34986
|
-
|
|
34987
|
-
|
|
34988
|
-
|
|
35856
|
+
return;
|
|
35857
|
+
}
|
|
35858
|
+
await appendHookDiagnosticsEvent({
|
|
35859
|
+
hook: "deferredTurnDrainer",
|
|
35860
|
+
sessionId: sessionMarker,
|
|
35861
|
+
stage: "drainer_started",
|
|
35862
|
+
result: "info",
|
|
35863
|
+
repoRoot,
|
|
35864
|
+
fields: {
|
|
35865
|
+
pid: process.pid,
|
|
35866
|
+
maxWaitMs: DEFERRED_TURN_DRAIN_MAX_WAIT_MS,
|
|
35867
|
+
pollIntervalMs: DEFERRED_TURN_DRAIN_POLL_INTERVAL_MS
|
|
34989
35868
|
}
|
|
34990
|
-
existing.touchedRepos[normalizedRepoRoot] = current;
|
|
34991
35869
|
});
|
|
34992
|
-
|
|
34993
|
-
|
|
34994
|
-
|
|
34995
|
-
|
|
34996
|
-
|
|
34997
|
-
|
|
34998
|
-
|
|
34999
|
-
|
|
35000
|
-
|
|
35001
|
-
}
|
|
35002
|
-
async function markTouchedRepoStopRecorded(sessionId, repoRoot, params) {
|
|
35003
|
-
await updatePendingTurnState(sessionId, (existing) => {
|
|
35004
|
-
const current = existing.touchedRepos[repoRoot];
|
|
35005
|
-
if (!current) return false;
|
|
35006
|
-
current.stopAttempted = true;
|
|
35007
|
-
current.stopRecorded = true;
|
|
35008
|
-
current.stopRecordedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
35009
|
-
current.stopRecordedMode = params.mode;
|
|
35010
|
-
current.recordingFailureMessage = null;
|
|
35011
|
-
current.recordingFailureHint = null;
|
|
35012
|
-
current.recordingFailedAt = null;
|
|
35013
|
-
current.lastTouchedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
35014
|
-
});
|
|
35015
|
-
}
|
|
35016
|
-
async function markTouchedRepoRecordingFailure(sessionId, repoRoot, params) {
|
|
35017
|
-
await updatePendingTurnState(sessionId, (existing) => {
|
|
35018
|
-
const current = existing.touchedRepos[repoRoot];
|
|
35019
|
-
if (!current) return false;
|
|
35020
|
-
current.stopAttempted = true;
|
|
35021
|
-
current.recordingFailureMessage = params.message.trim();
|
|
35022
|
-
current.recordingFailureHint = params.hint?.trim() || null;
|
|
35023
|
-
current.recordingFailedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
35024
|
-
current.lastTouchedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
35025
|
-
});
|
|
35026
|
-
}
|
|
35027
|
-
function lastFinalizedPath(sessionId) {
|
|
35028
|
-
return import_node_path6.default.join(stateRoot(), `${sessionId}.last-finalized.json`);
|
|
35029
|
-
}
|
|
35030
|
-
async function markLastFinalizedTurn(sessionId, turnId, prompt) {
|
|
35031
|
-
const record = {
|
|
35032
|
-
sessionId,
|
|
35033
|
-
turnId,
|
|
35034
|
-
prompt,
|
|
35035
|
-
finalizedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
35036
|
-
};
|
|
35037
|
-
await writeJsonAtomic3(lastFinalizedPath(sessionId), record);
|
|
35038
|
-
}
|
|
35039
|
-
async function loadLastFinalizedTurn(sessionId) {
|
|
35040
|
-
const raw = await import_promises19.default.readFile(lastFinalizedPath(sessionId), "utf8").catch(() => null);
|
|
35041
|
-
if (!raw) return null;
|
|
35870
|
+
const heartbeat = setInterval(() => {
|
|
35871
|
+
void heartbeatDrainLock(acquireResult.lockPath).catch(() => void 0);
|
|
35872
|
+
}, DEFERRED_TURN_DRAIN_LOCK_HEARTBEAT_MS);
|
|
35873
|
+
heartbeat.unref?.();
|
|
35874
|
+
let api = null;
|
|
35875
|
+
let recordedTotal = 0;
|
|
35876
|
+
let failedTotal = 0;
|
|
35877
|
+
let droppedTotal = 0;
|
|
35878
|
+
let exitReason = "queue_empty";
|
|
35042
35879
|
try {
|
|
35043
|
-
|
|
35044
|
-
|
|
35045
|
-
|
|
35046
|
-
|
|
35047
|
-
|
|
35048
|
-
|
|
35049
|
-
|
|
35050
|
-
|
|
35880
|
+
while (true) {
|
|
35881
|
+
if (Date.now() - startedAt > DEFERRED_TURN_DRAIN_MAX_WAIT_MS) {
|
|
35882
|
+
exitReason = "timeout";
|
|
35883
|
+
break;
|
|
35884
|
+
}
|
|
35885
|
+
let entries = [];
|
|
35886
|
+
try {
|
|
35887
|
+
entries = await listDeferredTurnsForRepo(repoRoot);
|
|
35888
|
+
} catch (listErr) {
|
|
35889
|
+
await appendHookDiagnosticsEvent({
|
|
35890
|
+
hook: "deferredTurnDrainer",
|
|
35891
|
+
sessionId: sessionMarker,
|
|
35892
|
+
stage: "list_failed",
|
|
35893
|
+
result: "error",
|
|
35894
|
+
reason: "exception",
|
|
35895
|
+
repoRoot,
|
|
35896
|
+
message: listErr instanceof Error ? listErr.message : String(listErr)
|
|
35897
|
+
});
|
|
35898
|
+
await sleep5(DEFERRED_TURN_DRAIN_POLL_INTERVAL_MS);
|
|
35899
|
+
continue;
|
|
35900
|
+
}
|
|
35901
|
+
if (entries.length === 0) {
|
|
35902
|
+
exitReason = "queue_empty";
|
|
35903
|
+
break;
|
|
35904
|
+
}
|
|
35905
|
+
const bindingState = await readCollabBindingState(repoRoot).catch(() => null);
|
|
35906
|
+
const currentBranch = bindingState?.currentBranch ?? null;
|
|
35907
|
+
const isCurrentBranchBound = bindingState?.binding != null;
|
|
35908
|
+
const currentAppId = bindingState?.binding?.currentAppId ?? null;
|
|
35909
|
+
const currentProjectId = bindingState?.binding?.projectId ?? bindingState?.projectId ?? null;
|
|
35910
|
+
let droppedThisPass = 0;
|
|
35911
|
+
const liveEntries = [];
|
|
35912
|
+
for (const entry of entries) {
|
|
35913
|
+
const appIdMismatch = entry.record.appIdAtDefer != null && currentAppId != null && entry.record.appIdAtDefer !== currentAppId;
|
|
35914
|
+
const projectIdMismatch = entry.record.projectIdAtDefer != null && currentProjectId != null && entry.record.projectIdAtDefer !== currentProjectId;
|
|
35915
|
+
if (appIdMismatch || projectIdMismatch) {
|
|
35916
|
+
await deleteDeferredTurnFile(entry.filePath);
|
|
35917
|
+
droppedThisPass += 1;
|
|
35918
|
+
await appendHookDiagnosticsEvent({
|
|
35919
|
+
hook: "deferredTurnDrainer",
|
|
35920
|
+
sessionId: sessionMarker,
|
|
35921
|
+
stage: "deferred_turn_dropped",
|
|
35922
|
+
result: "info",
|
|
35923
|
+
reason: appIdMismatch ? "app_id_mismatch" : "project_id_mismatch",
|
|
35924
|
+
repoRoot,
|
|
35925
|
+
fields: {
|
|
35926
|
+
deferredTurnId: entry.record.turnId,
|
|
35927
|
+
deferredSessionId: entry.record.sessionId,
|
|
35928
|
+
appIdAtDefer: entry.record.appIdAtDefer,
|
|
35929
|
+
projectIdAtDefer: entry.record.projectIdAtDefer,
|
|
35930
|
+
currentAppId,
|
|
35931
|
+
currentProjectId
|
|
35932
|
+
}
|
|
35933
|
+
});
|
|
35934
|
+
continue;
|
|
35935
|
+
}
|
|
35936
|
+
liveEntries.push(entry);
|
|
35937
|
+
}
|
|
35938
|
+
if (droppedThisPass > 0) {
|
|
35939
|
+
droppedTotal += droppedThisPass;
|
|
35940
|
+
}
|
|
35941
|
+
if (liveEntries.length === 0) {
|
|
35942
|
+
const remaining = await listDeferredTurnsForRepo(repoRoot).catch(() => []);
|
|
35943
|
+
if (remaining.length === 0) {
|
|
35944
|
+
exitReason = "queue_empty";
|
|
35945
|
+
break;
|
|
35946
|
+
}
|
|
35947
|
+
await sleep5(DEFERRED_TURN_DRAIN_POLL_INTERVAL_MS);
|
|
35948
|
+
continue;
|
|
35949
|
+
}
|
|
35950
|
+
const attemptable = liveEntries.filter(
|
|
35951
|
+
(e) => isCurrentBranchBound && (!e.record.branchAtDefer || e.record.branchAtDefer === currentBranch)
|
|
35952
|
+
);
|
|
35953
|
+
if (attemptable.length === 0) {
|
|
35954
|
+
await sleep5(DEFERRED_TURN_DRAIN_POLL_INTERVAL_MS);
|
|
35955
|
+
continue;
|
|
35956
|
+
}
|
|
35957
|
+
if (!api) {
|
|
35958
|
+
try {
|
|
35959
|
+
api = await createHookCollabApiClient();
|
|
35960
|
+
} catch (apiErr) {
|
|
35961
|
+
await appendHookDiagnosticsEvent({
|
|
35962
|
+
hook: "deferredTurnDrainer",
|
|
35963
|
+
sessionId: sessionMarker,
|
|
35964
|
+
stage: "api_client_failed",
|
|
35965
|
+
result: "error",
|
|
35966
|
+
reason: "exception",
|
|
35967
|
+
repoRoot,
|
|
35968
|
+
message: apiErr instanceof Error ? apiErr.message : String(apiErr)
|
|
35969
|
+
});
|
|
35970
|
+
exitReason = "api_init_failed";
|
|
35971
|
+
break;
|
|
35972
|
+
}
|
|
35973
|
+
}
|
|
35974
|
+
let recordedThisPass = 0;
|
|
35975
|
+
let failedThisPass = 0;
|
|
35976
|
+
for (const entry of attemptable) {
|
|
35977
|
+
const result = await recordOneDeferredTurn({ entry, api });
|
|
35978
|
+
if (result.recorded) {
|
|
35979
|
+
recordedThisPass += 1;
|
|
35980
|
+
recordedTotal += 1;
|
|
35981
|
+
await appendHookDiagnosticsEvent({
|
|
35982
|
+
hook: "deferredTurnDrainer",
|
|
35983
|
+
sessionId: sessionMarker,
|
|
35984
|
+
stage: "deferred_turn_recorded",
|
|
35985
|
+
result: "success",
|
|
35986
|
+
repoRoot,
|
|
35987
|
+
fields: {
|
|
35988
|
+
deferredTurnId: entry.record.turnId,
|
|
35989
|
+
deferredSessionId: entry.record.sessionId,
|
|
35990
|
+
deferredAt: entry.record.deferredAt,
|
|
35991
|
+
submittedAt: entry.record.submittedAt,
|
|
35992
|
+
recordingDelayMs: Math.max(0, Date.now() - Date.parse(entry.record.deferredAt)),
|
|
35993
|
+
recoveredBy: "standalone_drainer"
|
|
35994
|
+
}
|
|
35995
|
+
});
|
|
35996
|
+
} else {
|
|
35997
|
+
failedThisPass += 1;
|
|
35998
|
+
failedTotal += 1;
|
|
35999
|
+
const outcome = await recordDeferredTurnFailedAttempt(entry.filePath).catch(() => null);
|
|
36000
|
+
const promoted = outcome?.promoted === true;
|
|
36001
|
+
await appendHookDiagnosticsEvent({
|
|
36002
|
+
hook: "deferredTurnDrainer",
|
|
36003
|
+
sessionId: sessionMarker,
|
|
36004
|
+
stage: "deferred_turn_record_failed",
|
|
36005
|
+
result: "error",
|
|
36006
|
+
reason: "exception",
|
|
36007
|
+
repoRoot,
|
|
36008
|
+
message: result.error instanceof Error ? result.error.message : String(result.error ?? ""),
|
|
36009
|
+
fields: {
|
|
36010
|
+
deferredTurnId: entry.record.turnId,
|
|
36011
|
+
deferredSessionId: entry.record.sessionId,
|
|
36012
|
+
attemptCount: outcome?.promoted === false ? outcome.newAttemptCount : outcome?.promoted === true ? outcome.finalAttemptCount : null,
|
|
36013
|
+
promoted
|
|
36014
|
+
}
|
|
36015
|
+
});
|
|
36016
|
+
if (promoted) {
|
|
36017
|
+
const errorDetails = getDrainerErrorDetails(result.error);
|
|
36018
|
+
await dispatchFinalizeFailure({
|
|
36019
|
+
// The dispatcher only knows about the two real Claude hook
|
|
36020
|
+
// entrypoints. The standalone drainer is logically a
|
|
36021
|
+
// post-Stop background process and the marker we're about
|
|
36022
|
+
// to write is consumed by the next prompt's UserPromptSubmit
|
|
36023
|
+
// hook, so attributing the failure to "Stop" matches what
|
|
36024
|
+
// the user will see.
|
|
36025
|
+
hook: "Stop",
|
|
36026
|
+
sessionId: sessionMarker,
|
|
36027
|
+
turnId: entry.record.turnId,
|
|
36028
|
+
repoRoot,
|
|
36029
|
+
preflightCode: errorDetails.preflightCode,
|
|
36030
|
+
message: `Deferred turn could not be recorded after ${outcome?.finalAttemptCount ?? "max"} attempts: ${errorDetails.message}`,
|
|
36031
|
+
hint: errorDetails.hint
|
|
36032
|
+
}).catch(async (dispatchErr) => {
|
|
36033
|
+
await appendHookDiagnosticsEvent({
|
|
36034
|
+
hook: "deferredTurnDrainer",
|
|
36035
|
+
sessionId: sessionMarker,
|
|
36036
|
+
stage: "deferred_turn_promotion_dispatch_failed",
|
|
36037
|
+
result: "error",
|
|
36038
|
+
reason: "exception",
|
|
36039
|
+
repoRoot,
|
|
36040
|
+
message: dispatchErr instanceof Error ? dispatchErr.message : String(dispatchErr),
|
|
36041
|
+
fields: {
|
|
36042
|
+
deferredTurnId: entry.record.turnId,
|
|
36043
|
+
deferredSessionId: entry.record.sessionId
|
|
36044
|
+
}
|
|
36045
|
+
});
|
|
36046
|
+
});
|
|
36047
|
+
}
|
|
36048
|
+
}
|
|
36049
|
+
}
|
|
36050
|
+
if (recordedThisPass > 0) {
|
|
36051
|
+
await pushPendingFinalizeQueueToServer({
|
|
36052
|
+
sessionMarker,
|
|
36053
|
+
repoRoot,
|
|
36054
|
+
api
|
|
36055
|
+
});
|
|
36056
|
+
}
|
|
36057
|
+
if (recordedThisPass > 0 && failedThisPass === 0) {
|
|
36058
|
+
const remaining = await listDeferredTurnsForRepo(repoRoot).catch(() => []);
|
|
36059
|
+
if (remaining.length === 0) {
|
|
36060
|
+
exitReason = "queue_empty";
|
|
36061
|
+
break;
|
|
36062
|
+
}
|
|
36063
|
+
}
|
|
36064
|
+
await sleep5(DEFERRED_TURN_DRAIN_POLL_INTERVAL_MS);
|
|
35051
36065
|
}
|
|
35052
|
-
|
|
35053
|
-
|
|
35054
|
-
|
|
36066
|
+
if (recordedTotal > 0 && api) {
|
|
36067
|
+
await pushPendingFinalizeQueueToServer({
|
|
36068
|
+
sessionMarker,
|
|
36069
|
+
repoRoot,
|
|
36070
|
+
api
|
|
36071
|
+
});
|
|
36072
|
+
}
|
|
36073
|
+
try {
|
|
36074
|
+
const pruned = await pruneStaleDeferredTurns();
|
|
36075
|
+
if (pruned.length > 0) {
|
|
36076
|
+
await appendHookDiagnosticsEvent({
|
|
36077
|
+
hook: "deferredTurnDrainer",
|
|
36078
|
+
sessionId: sessionMarker,
|
|
36079
|
+
stage: "ttl_pruned",
|
|
36080
|
+
result: "info",
|
|
36081
|
+
repoRoot,
|
|
36082
|
+
fields: { prunedCount: pruned.length }
|
|
36083
|
+
});
|
|
36084
|
+
}
|
|
36085
|
+
} catch {
|
|
36086
|
+
}
|
|
36087
|
+
} finally {
|
|
36088
|
+
clearInterval(heartbeat);
|
|
36089
|
+
await releaseDrainLock(acquireResult.lockPath);
|
|
36090
|
+
await appendHookDiagnosticsEvent({
|
|
36091
|
+
hook: "deferredTurnDrainer",
|
|
36092
|
+
sessionId: sessionMarker,
|
|
36093
|
+
stage: "drainer_completed",
|
|
36094
|
+
result: exitReason === "queue_empty" ? "success" : "info",
|
|
36095
|
+
reason: exitReason,
|
|
36096
|
+
repoRoot,
|
|
36097
|
+
fields: {
|
|
36098
|
+
recordedTotal,
|
|
36099
|
+
failedTotal,
|
|
36100
|
+
droppedTotal,
|
|
36101
|
+
elapsedMs: Date.now() - startedAt
|
|
36102
|
+
}
|
|
36103
|
+
});
|
|
35055
36104
|
}
|
|
35056
36105
|
}
|
|
35057
|
-
|
|
35058
|
-
|
|
35059
|
-
|
|
35060
|
-
|
|
35061
|
-
|
|
35062
|
-
|
|
35063
|
-
|
|
35064
|
-
|
|
35065
|
-
|
|
35066
|
-
|
|
35067
|
-
|
|
35068
|
-
|
|
35069
|
-
async function clearPendingTurnState(sessionId) {
|
|
35070
|
-
await withStateLock(sessionId, async () => {
|
|
35071
|
-
await import_promises19.default.rm(statePath(sessionId), { force: true }).catch(() => void 0);
|
|
35072
|
-
});
|
|
35073
|
-
}
|
|
35074
|
-
|
|
35075
|
-
// package.json
|
|
35076
|
-
var package_default = {
|
|
35077
|
-
name: "@remixhq/claude-plugin",
|
|
35078
|
-
version: "0.1.21",
|
|
35079
|
-
description: "Claude Code plugin for Remix collaboration workflows",
|
|
35080
|
-
homepage: "https://github.com/RemixDotOne/remix-claude-plugin",
|
|
35081
|
-
license: "MIT",
|
|
35082
|
-
repository: {
|
|
35083
|
-
type: "git",
|
|
35084
|
-
url: "https://github.com/RemixDotOne/remix-claude-plugin.git"
|
|
35085
|
-
},
|
|
35086
|
-
type: "module",
|
|
35087
|
-
engines: {
|
|
35088
|
-
node: ">=20"
|
|
35089
|
-
},
|
|
35090
|
-
publishConfig: {
|
|
35091
|
-
access: "public"
|
|
35092
|
-
},
|
|
35093
|
-
files: [
|
|
35094
|
-
"dist",
|
|
35095
|
-
".claude-plugin/plugin.json",
|
|
35096
|
-
".mcp.json",
|
|
35097
|
-
"skills",
|
|
35098
|
-
"hooks",
|
|
35099
|
-
"agents"
|
|
35100
|
-
],
|
|
35101
|
-
exports: {
|
|
35102
|
-
".": {
|
|
35103
|
-
types: "./dist/index.d.ts",
|
|
35104
|
-
import: "./dist/index.js"
|
|
36106
|
+
function parseDeferredTurnDrainerArgv(argv) {
|
|
36107
|
+
for (let i2 = 0; i2 < argv.length; i2 += 1) {
|
|
36108
|
+
const arg = argv[i2];
|
|
36109
|
+
if (!arg) continue;
|
|
36110
|
+
if (arg === "--drain-deferred-turns") {
|
|
36111
|
+
const next = argv[i2 + 1];
|
|
36112
|
+
if (next && !next.startsWith("--")) return next;
|
|
36113
|
+
return null;
|
|
36114
|
+
}
|
|
36115
|
+
if (arg.startsWith("--drain-deferred-turns=")) {
|
|
36116
|
+
const value = arg.slice("--drain-deferred-turns=".length);
|
|
36117
|
+
return value || null;
|
|
35105
36118
|
}
|
|
35106
|
-
},
|
|
35107
|
-
scripts: {
|
|
35108
|
-
build: "tsup",
|
|
35109
|
-
postbuild: `node -e "const fs=require('node:fs'); for (const p of ['dist/mcp-server.cjs','dist/hook-pre-git.cjs','dist/hook-user-prompt.cjs','dist/hook-post-collab.cjs','dist/hook-stop-collab.cjs']) fs.chmodSync(p, 0o755);"`,
|
|
35110
|
-
dev: "tsx src/mcp-server.ts",
|
|
35111
|
-
typecheck: "tsc -p tsconfig.json --noEmit",
|
|
35112
|
-
test: "node --import tsx --test src/**/*.test.ts",
|
|
35113
|
-
prepack: "npm run build"
|
|
35114
|
-
},
|
|
35115
|
-
dependencies: {
|
|
35116
|
-
"@remixhq/core": "^0.1.15",
|
|
35117
|
-
"@remixhq/mcp": "^0.1.16"
|
|
35118
|
-
},
|
|
35119
|
-
devDependencies: {
|
|
35120
|
-
"@types/node": "^25.4.0",
|
|
35121
|
-
tsup: "^8.5.1",
|
|
35122
|
-
tsx: "^4.21.0",
|
|
35123
|
-
typescript: "^5.9.3"
|
|
35124
36119
|
}
|
|
35125
|
-
|
|
35126
|
-
|
|
35127
|
-
// src/metadata.ts
|
|
35128
|
-
var pluginMetadata = {
|
|
35129
|
-
name: package_default.name,
|
|
35130
|
-
version: package_default.version,
|
|
35131
|
-
description: package_default.description,
|
|
35132
|
-
pluginId: "remix",
|
|
35133
|
-
agentName: "remix-collab"
|
|
35134
|
-
};
|
|
35135
|
-
|
|
35136
|
-
// src/hook-diagnostics.ts
|
|
35137
|
-
var MAX_LOG_BYTES = 512 * 1024;
|
|
35138
|
-
function resolveClaudeRoot() {
|
|
35139
|
-
const configured = process.env.CLAUDE_CONFIG_DIR?.trim();
|
|
35140
|
-
return configured || import_node_path7.default.join(import_node_os5.default.homedir(), ".claude");
|
|
35141
|
-
}
|
|
35142
|
-
function resolvePluginDataDirName() {
|
|
35143
|
-
return `${pluginMetadata.pluginId}-${pluginMetadata.pluginId}`;
|
|
35144
|
-
}
|
|
35145
|
-
function getHookDiagnosticsDirPath() {
|
|
35146
|
-
const configured = process.env.REMIX_CLAUDE_PLUGIN_HOOK_DIAGNOSTICS_DIR?.trim();
|
|
35147
|
-
return configured || import_node_path7.default.join(resolveClaudeRoot(), "plugins", "data", resolvePluginDataDirName());
|
|
35148
|
-
}
|
|
35149
|
-
function getHookDiagnosticsLogPath() {
|
|
35150
|
-
return import_node_path7.default.join(getHookDiagnosticsDirPath(), "hooks.ndjson");
|
|
35151
|
-
}
|
|
35152
|
-
function toFieldValue(value) {
|
|
35153
|
-
if (value === null) return null;
|
|
35154
|
-
if (typeof value === "string") return value;
|
|
35155
|
-
if (typeof value === "number" && Number.isFinite(value)) return value;
|
|
35156
|
-
if (typeof value === "boolean") return value;
|
|
35157
|
-
return void 0;
|
|
36120
|
+
return null;
|
|
35158
36121
|
}
|
|
35159
|
-
function
|
|
35160
|
-
|
|
35161
|
-
|
|
35162
|
-
|
|
35163
|
-
|
|
35164
|
-
}).filter((entry) => entry !== null);
|
|
35165
|
-
return Object.fromEntries(normalizedEntries);
|
|
36122
|
+
async function maybeRunDeferredTurnDrainerFromArgv() {
|
|
36123
|
+
const repoRoot = parseDeferredTurnDrainerArgv(process.argv);
|
|
36124
|
+
if (!repoRoot) return false;
|
|
36125
|
+
await runStandaloneDeferredTurnDrainer(repoRoot);
|
|
36126
|
+
return true;
|
|
35166
36127
|
}
|
|
35167
|
-
|
|
35168
|
-
|
|
35169
|
-
|
|
35170
|
-
|
|
35171
|
-
|
|
35172
|
-
|
|
35173
|
-
|
|
35174
|
-
|
|
36128
|
+
|
|
36129
|
+
// src/spawn-helpers.ts
|
|
36130
|
+
var import_node_child_process7 = require("child_process");
|
|
36131
|
+
function spawnDeferredTurnDrainer(repoRoot) {
|
|
36132
|
+
const entrypoint = process.argv[1];
|
|
36133
|
+
if (!entrypoint) return;
|
|
36134
|
+
if (!repoRoot) return;
|
|
36135
|
+
const child = (0, import_node_child_process7.spawn)(
|
|
36136
|
+
process.execPath,
|
|
36137
|
+
[...process.execArgv, entrypoint, "--drain-deferred-turns", repoRoot],
|
|
36138
|
+
{
|
|
36139
|
+
detached: true,
|
|
36140
|
+
stdio: "ignore",
|
|
36141
|
+
env: process.env
|
|
36142
|
+
}
|
|
36143
|
+
);
|
|
36144
|
+
child.unref();
|
|
35175
36145
|
}
|
|
35176
|
-
|
|
35177
|
-
|
|
35178
|
-
|
|
35179
|
-
|
|
35180
|
-
|
|
35181
|
-
|
|
35182
|
-
|
|
36146
|
+
|
|
36147
|
+
// src/transient-failure.ts
|
|
36148
|
+
function isTransientRecordingFailure(error) {
|
|
36149
|
+
if (!error || typeof error !== "object") return false;
|
|
36150
|
+
if (error instanceof Error) {
|
|
36151
|
+
if (error.name === "AbortError" || error.name === "TimeoutError") return true;
|
|
36152
|
+
if (error instanceof TypeError && /fetch failed/i.test(error.message)) return true;
|
|
35183
36153
|
}
|
|
35184
|
-
const
|
|
35185
|
-
|
|
35186
|
-
|
|
35187
|
-
length: trimmed.length,
|
|
35188
|
-
sha256Prefix: (0, import_node_crypto2.createHash)("sha256").update(trimmed).digest("hex").slice(0, 12)
|
|
35189
|
-
};
|
|
35190
|
-
}
|
|
35191
|
-
async function appendHookDiagnosticsEvent(params) {
|
|
35192
|
-
try {
|
|
35193
|
-
const logPath = getHookDiagnosticsLogPath();
|
|
35194
|
-
await import_promises20.default.mkdir(import_node_path7.default.dirname(logPath), { recursive: true });
|
|
35195
|
-
await rotateLogIfNeeded(logPath);
|
|
35196
|
-
const event = {
|
|
35197
|
-
ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
35198
|
-
hook: params.hook,
|
|
35199
|
-
pluginVersion: pluginMetadata.version,
|
|
35200
|
-
pid: process.pid,
|
|
35201
|
-
sessionId: params.sessionId?.trim() || null,
|
|
35202
|
-
turnId: params.turnId?.trim() || null,
|
|
35203
|
-
stage: params.stage.trim(),
|
|
35204
|
-
result: params.result,
|
|
35205
|
-
reason: params.reason?.trim() || null,
|
|
35206
|
-
toolName: params.toolName?.trim() || null,
|
|
35207
|
-
repoRoot: params.repoRoot?.trim() || null,
|
|
35208
|
-
message: params.message?.trim() || null,
|
|
35209
|
-
fields: normalizeFields(params.fields)
|
|
35210
|
-
};
|
|
35211
|
-
await import_promises20.default.appendFile(logPath, `${JSON.stringify(event)}
|
|
35212
|
-
`, "utf8");
|
|
35213
|
-
} catch {
|
|
36154
|
+
const candidate = error;
|
|
36155
|
+
if (typeof candidate.statusCode === "number" && candidate.statusCode >= 500 && candidate.statusCode < 600) {
|
|
36156
|
+
return true;
|
|
35214
36157
|
}
|
|
36158
|
+
return false;
|
|
35215
36159
|
}
|
|
35216
36160
|
|
|
35217
36161
|
// node_modules/@remixhq/core/dist/history.js
|
|
35218
|
-
var
|
|
36162
|
+
var import_promises24 = __toESM(require("fs/promises"), 1);
|
|
35219
36163
|
async function readAndParseTranscript(transcriptPath) {
|
|
35220
36164
|
let raw;
|
|
35221
36165
|
try {
|
|
35222
|
-
raw = await
|
|
36166
|
+
raw = await import_promises24.default.readFile(transcriptPath, "utf8");
|
|
35223
36167
|
} catch (err) {
|
|
35224
36168
|
const code = err && typeof err === "object" && "code" in err ? err.code : null;
|
|
35225
36169
|
if (code === "ENOENT") {
|
|
@@ -35662,10 +36606,10 @@ function harvestClaudeCodeUsage(input) {
|
|
|
35662
36606
|
}
|
|
35663
36607
|
|
|
35664
36608
|
// src/usage/claudeCodeSession.ts
|
|
35665
|
-
var
|
|
35666
|
-
var
|
|
35667
|
-
var
|
|
35668
|
-
var
|
|
36609
|
+
var import_node_child_process8 = require("child_process");
|
|
36610
|
+
var import_node_fs7 = require("fs");
|
|
36611
|
+
var import_node_os7 = require("os");
|
|
36612
|
+
var import_node_path12 = require("path");
|
|
35669
36613
|
var CACHE_SCHEMA_VERSION = 1;
|
|
35670
36614
|
var SUCCESS_TTL_MS = 60 * 60 * 1e3;
|
|
35671
36615
|
var FAILURE_TTL_MS = 5 * 60 * 1e3;
|
|
@@ -35675,7 +36619,7 @@ var spawnerImpl = defaultSpawnClaudeAuthStatus;
|
|
|
35675
36619
|
function defaultSpawnClaudeAuthStatus(timeoutMs) {
|
|
35676
36620
|
let result;
|
|
35677
36621
|
try {
|
|
35678
|
-
result = (0,
|
|
36622
|
+
result = (0, import_node_child_process8.spawnSync)("claude", ["auth", "status", "--json"], {
|
|
35679
36623
|
stdio: ["ignore", "pipe", "pipe"],
|
|
35680
36624
|
timeout: timeoutMs,
|
|
35681
36625
|
env: process.env
|
|
@@ -35694,10 +36638,10 @@ function defaultSpawnClaudeAuthStatus(timeoutMs) {
|
|
|
35694
36638
|
}
|
|
35695
36639
|
function getCollabStateRoot2() {
|
|
35696
36640
|
const configured = process.env.REMIX_COLLAB_STATE_ROOT?.trim();
|
|
35697
|
-
return configured || (0,
|
|
36641
|
+
return configured || (0, import_node_path12.join)((0, import_node_os7.homedir)(), ".remix", "collab-state");
|
|
35698
36642
|
}
|
|
35699
36643
|
function getAuthCachePath() {
|
|
35700
|
-
return (0,
|
|
36644
|
+
return (0, import_node_path12.join)(getCollabStateRoot2(), "claude-auth-cache.json");
|
|
35701
36645
|
}
|
|
35702
36646
|
function getSpawnTimeoutMs() {
|
|
35703
36647
|
const raw = process.env.REMIX_CLAUDE_AUTH_TIMEOUT_MS?.trim();
|
|
@@ -35709,7 +36653,7 @@ function getSpawnTimeoutMs() {
|
|
|
35709
36653
|
function readAuthCache() {
|
|
35710
36654
|
let raw;
|
|
35711
36655
|
try {
|
|
35712
|
-
raw = (0,
|
|
36656
|
+
raw = (0, import_node_fs7.readFileSync)(getAuthCachePath(), "utf8");
|
|
35713
36657
|
} catch {
|
|
35714
36658
|
return null;
|
|
35715
36659
|
}
|
|
@@ -35738,10 +36682,10 @@ function isCacheFresh(record) {
|
|
|
35738
36682
|
function writeAuthCache(record) {
|
|
35739
36683
|
const cachePath = getAuthCachePath();
|
|
35740
36684
|
try {
|
|
35741
|
-
(0,
|
|
36685
|
+
(0, import_node_fs7.mkdirSync)((0, import_node_path12.dirname)(cachePath), { recursive: true });
|
|
35742
36686
|
const tmpPath = `${cachePath}.${process.pid}.${Date.now()}.tmp`;
|
|
35743
|
-
(0,
|
|
35744
|
-
(0,
|
|
36687
|
+
(0, import_node_fs7.writeFileSync)(tmpPath, JSON.stringify(record), "utf8");
|
|
36688
|
+
(0, import_node_fs7.renameSync)(tmpPath, cachePath);
|
|
35745
36689
|
} catch {
|
|
35746
36690
|
}
|
|
35747
36691
|
}
|
|
@@ -35795,8 +36739,8 @@ function resolveClaudeCodeSession(hookPayload) {
|
|
|
35795
36739
|
}
|
|
35796
36740
|
|
|
35797
36741
|
// src/hook-utils.ts
|
|
35798
|
-
var
|
|
35799
|
-
var
|
|
36742
|
+
var import_promises25 = __toESM(require("fs/promises"), 1);
|
|
36743
|
+
var import_node_path13 = __toESM(require("path"), 1);
|
|
35800
36744
|
async function readJsonStdin() {
|
|
35801
36745
|
const chunks = [];
|
|
35802
36746
|
for await (const chunk of process.stdin) {
|
|
@@ -35858,16 +36802,16 @@ function extractBoolean(input, keys) {
|
|
|
35858
36802
|
}
|
|
35859
36803
|
async function findBoundRepo(startPath) {
|
|
35860
36804
|
if (!startPath) return null;
|
|
35861
|
-
let current =
|
|
35862
|
-
let stats = await
|
|
36805
|
+
let current = import_node_path13.default.resolve(startPath);
|
|
36806
|
+
let stats = await import_promises25.default.stat(current).catch(() => null);
|
|
35863
36807
|
if (stats?.isFile()) {
|
|
35864
|
-
current =
|
|
36808
|
+
current = import_node_path13.default.dirname(current);
|
|
35865
36809
|
}
|
|
35866
36810
|
while (true) {
|
|
35867
|
-
const bindingPath =
|
|
35868
|
-
const bindingStats = await
|
|
36811
|
+
const bindingPath = import_node_path13.default.join(current, ".remix", "config.json");
|
|
36812
|
+
const bindingStats = await import_promises25.default.stat(bindingPath).catch(() => null);
|
|
35869
36813
|
if (bindingStats?.isFile()) return current;
|
|
35870
|
-
const parent =
|
|
36814
|
+
const parent = import_node_path13.default.dirname(current);
|
|
35871
36815
|
if (parent === current) return null;
|
|
35872
36816
|
current = parent;
|
|
35873
36817
|
}
|
|
@@ -35886,23 +36830,27 @@ async function resolveBoundRepoSummary(startPath) {
|
|
|
35886
36830
|
}
|
|
35887
36831
|
|
|
35888
36832
|
// src/hook-stop-collab.ts
|
|
35889
|
-
var
|
|
36833
|
+
var HOOK_ACTOR2 = {
|
|
35890
36834
|
type: "agent",
|
|
35891
36835
|
name: "claude-code",
|
|
35892
36836
|
version: pluginMetadata.version,
|
|
35893
36837
|
provider: "anthropic"
|
|
35894
36838
|
};
|
|
35895
|
-
var
|
|
36839
|
+
var collabFinalizeTurn3 = collabFinalizeTurn;
|
|
35896
36840
|
function getErrorDetails(error) {
|
|
35897
36841
|
if (error instanceof Error) {
|
|
35898
36842
|
const hint = typeof error.hint === "string" ? String(error.hint) : null;
|
|
36843
|
+
const codeRaw = error.code;
|
|
36844
|
+
const preflightCode = isFinalizePreflightFailureCode(codeRaw) ? codeRaw : null;
|
|
35899
36845
|
return {
|
|
35900
36846
|
message: error.message || "Fallback Remix turn recording failed.",
|
|
35901
|
-
hint
|
|
36847
|
+
hint,
|
|
36848
|
+
preflightCode,
|
|
36849
|
+
isTransient: isTransientRecordingFailure(error)
|
|
35902
36850
|
};
|
|
35903
36851
|
}
|
|
35904
36852
|
const message = typeof error === "string" && error.trim() ? error.trim() : "Fallback Remix turn recording failed.";
|
|
35905
|
-
return { message, hint: null };
|
|
36853
|
+
return { message, hint: null, preflightCode: null, isTransient: false };
|
|
35906
36854
|
}
|
|
35907
36855
|
function buildRepoIdempotencyKey(turnId, repo) {
|
|
35908
36856
|
const repoToken = repo.currentAppId?.trim() || repo.repoRoot;
|
|
@@ -35957,7 +36905,7 @@ function createFallbackTouchedRepo(params) {
|
|
|
35957
36905
|
};
|
|
35958
36906
|
}
|
|
35959
36907
|
var TRANSCRIPT_FLUSH_RETRY_DELAYS_MS = [50, 100, 200];
|
|
35960
|
-
function
|
|
36908
|
+
function sleep6(ms) {
|
|
35961
36909
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
35962
36910
|
}
|
|
35963
36911
|
async function harvestTurnUsage(params) {
|
|
@@ -36002,7 +36950,7 @@ async function harvestTurnUsage(params) {
|
|
|
36002
36950
|
while (!currentResult.ok && currentResult.reason === "no_messages_for_turn") {
|
|
36003
36951
|
if (retriesUsed >= TRANSCRIPT_FLUSH_RETRY_DELAYS_MS.length) break;
|
|
36004
36952
|
const delayMs = TRANSCRIPT_FLUSH_RETRY_DELAYS_MS[retriesUsed];
|
|
36005
|
-
await
|
|
36953
|
+
await sleep6(delayMs);
|
|
36006
36954
|
retriesUsed += 1;
|
|
36007
36955
|
totalBackoffMs += delayMs;
|
|
36008
36956
|
const reparsed = await readAndParseTranscript(transcriptPath);
|
|
@@ -36207,9 +37155,23 @@ async function recordTouchedRepo(params) {
|
|
|
36207
37155
|
try {
|
|
36208
37156
|
const binding = await readCollabBinding(repo.repoRoot).catch(() => null);
|
|
36209
37157
|
if (!binding) {
|
|
36210
|
-
|
|
37158
|
+
const failure = {
|
|
37159
|
+
repoRoot: repo.repoRoot,
|
|
36211
37160
|
message: "Fallback Remix turn recording failed because the repository is no longer bound to Remix.",
|
|
36212
|
-
hint: `Repo root: ${repo.repoRoot}
|
|
37161
|
+
hint: `Repo root: ${repo.repoRoot}`,
|
|
37162
|
+
// Equivalent to the not_bound preflight code — the binding
|
|
37163
|
+
// disappeared between touch-time and finalize-time. Reusing the
|
|
37164
|
+
// code lets the dispatcher route this through the same recovery.
|
|
37165
|
+
preflightCode: "not_bound",
|
|
37166
|
+
// Missing-binding is a permanent state mismatch (the user
|
|
37167
|
+
// unbinded mid-flight); not transient. Spell it out so the
|
|
37168
|
+
// upstream loop routes via dispatchFinalizeFailure instead of
|
|
37169
|
+
// silent defer.
|
|
37170
|
+
isTransient: false
|
|
37171
|
+
};
|
|
37172
|
+
await markTouchedRepoRecordingFailure(sessionId, repo.repoRoot, {
|
|
37173
|
+
message: failure.message,
|
|
37174
|
+
hint: failure.hint
|
|
36213
37175
|
});
|
|
36214
37176
|
await appendHookDiagnosticsEvent({
|
|
36215
37177
|
hook,
|
|
@@ -36220,19 +37182,20 @@ async function recordTouchedRepo(params) {
|
|
|
36220
37182
|
reason: "repo_not_bound",
|
|
36221
37183
|
repoRoot: repo.repoRoot
|
|
36222
37184
|
});
|
|
36223
|
-
return { recorded: false, queued: false };
|
|
37185
|
+
return { recorded: false, queued: false, failure };
|
|
36224
37186
|
}
|
|
36225
|
-
const result = await
|
|
37187
|
+
const result = await collabFinalizeTurn3({
|
|
36226
37188
|
api,
|
|
36227
37189
|
cwd: repo.repoRoot,
|
|
36228
37190
|
prompt,
|
|
36229
37191
|
assistantResponse,
|
|
36230
37192
|
idempotencyKey: buildRepoIdempotencyKey(turnId, repo),
|
|
36231
|
-
actor:
|
|
37193
|
+
actor: HOOK_ACTOR2,
|
|
36232
37194
|
turnUsage,
|
|
36233
37195
|
promptedAt: promptedAt ?? null
|
|
36234
37196
|
});
|
|
36235
37197
|
await markTouchedRepoStopRecorded(sessionId, repo.repoRoot, { mode: result.mode });
|
|
37198
|
+
await clearFinalizeFailureMarker(repo.repoRoot).catch(() => void 0);
|
|
36236
37199
|
await appendHookDiagnosticsEvent({
|
|
36237
37200
|
hook,
|
|
36238
37201
|
sessionId,
|
|
@@ -36242,10 +37205,17 @@ async function recordTouchedRepo(params) {
|
|
|
36242
37205
|
reason: result.mode,
|
|
36243
37206
|
repoRoot: repo.repoRoot
|
|
36244
37207
|
});
|
|
36245
|
-
return {
|
|
37208
|
+
return {
|
|
37209
|
+
recorded: true,
|
|
37210
|
+
queued: result.queued === true,
|
|
37211
|
+
failure: null
|
|
37212
|
+
};
|
|
36246
37213
|
} catch (error) {
|
|
36247
37214
|
const details = getErrorDetails(error);
|
|
36248
|
-
await markTouchedRepoRecordingFailure(sessionId, repo.repoRoot,
|
|
37215
|
+
await markTouchedRepoRecordingFailure(sessionId, repo.repoRoot, {
|
|
37216
|
+
message: details.message,
|
|
37217
|
+
hint: details.hint
|
|
37218
|
+
});
|
|
36249
37219
|
await appendHookDiagnosticsEvent({
|
|
36250
37220
|
hook,
|
|
36251
37221
|
sessionId,
|
|
@@ -36256,68 +37226,374 @@ async function recordTouchedRepo(params) {
|
|
|
36256
37226
|
repoRoot: repo.repoRoot,
|
|
36257
37227
|
message: details.message,
|
|
36258
37228
|
fields: {
|
|
36259
|
-
hint: details.hint
|
|
37229
|
+
hint: details.hint,
|
|
37230
|
+
preflightCode: details.preflightCode,
|
|
37231
|
+
// Logged so a hung backend or DNS hiccup is greppable in the
|
|
37232
|
+
// diagnostics file alongside the Cursor mirror — the next
|
|
37233
|
+
// prompt's drain log will pair with this for the recovery.
|
|
37234
|
+
isTransient: details.isTransient
|
|
36260
37235
|
}
|
|
36261
37236
|
});
|
|
36262
|
-
return {
|
|
37237
|
+
return {
|
|
37238
|
+
recorded: false,
|
|
37239
|
+
queued: false,
|
|
37240
|
+
failure: {
|
|
37241
|
+
repoRoot: repo.repoRoot,
|
|
37242
|
+
message: details.message,
|
|
37243
|
+
hint: details.hint,
|
|
37244
|
+
preflightCode: details.preflightCode,
|
|
37245
|
+
isTransient: details.isTransient
|
|
37246
|
+
}
|
|
37247
|
+
};
|
|
36263
37248
|
}
|
|
36264
37249
|
}
|
|
36265
37250
|
function spawnFinalizeQueueDrainer() {
|
|
36266
37251
|
const entrypoint = process.argv[1];
|
|
36267
37252
|
if (!entrypoint) return;
|
|
36268
|
-
const child = (0,
|
|
37253
|
+
const child = (0, import_node_child_process9.spawn)(process.execPath, [...process.execArgv, entrypoint, "--drain-finalize-queue"], {
|
|
36269
37254
|
detached: true,
|
|
36270
37255
|
stdio: "ignore",
|
|
36271
37256
|
env: process.env
|
|
36272
37257
|
});
|
|
36273
37258
|
child.unref();
|
|
36274
37259
|
}
|
|
36275
|
-
var
|
|
36276
|
-
|
|
36277
|
-
|
|
37260
|
+
var DEFERRED_TURN_PRUNE_LOG_LIMIT = 5;
|
|
37261
|
+
async function drainDeferredTurnsForRepo(params) {
|
|
37262
|
+
const { hook, sessionId, triggerTurnId, repoRoot, api } = params;
|
|
37263
|
+
let entries = [];
|
|
36278
37264
|
try {
|
|
36279
|
-
|
|
36280
|
-
|
|
37265
|
+
entries = await listDeferredTurnsForRepo(repoRoot);
|
|
37266
|
+
} catch (listErr) {
|
|
37267
|
+
await appendHookDiagnosticsEvent({
|
|
37268
|
+
hook,
|
|
37269
|
+
sessionId,
|
|
37270
|
+
turnId: triggerTurnId,
|
|
37271
|
+
stage: "deferred_turn_list_failed",
|
|
37272
|
+
result: "error",
|
|
37273
|
+
reason: "exception",
|
|
37274
|
+
repoRoot,
|
|
37275
|
+
message: listErr instanceof Error ? listErr.message : String(listErr)
|
|
37276
|
+
});
|
|
37277
|
+
return;
|
|
37278
|
+
}
|
|
37279
|
+
if (entries.length === 0) return;
|
|
37280
|
+
const bindingState = await readCollabBindingState(repoRoot).catch(() => null);
|
|
37281
|
+
const currentBranch = bindingState?.currentBranch ?? null;
|
|
37282
|
+
const isCurrentBranchBound = bindingState?.binding != null;
|
|
37283
|
+
const currentAppId = bindingState?.binding?.currentAppId ?? null;
|
|
37284
|
+
const currentProjectId = bindingState?.binding?.projectId ?? bindingState?.projectId ?? null;
|
|
37285
|
+
await appendHookDiagnosticsEvent({
|
|
37286
|
+
hook,
|
|
37287
|
+
sessionId,
|
|
37288
|
+
turnId: triggerTurnId,
|
|
37289
|
+
stage: "deferred_turn_drain_started",
|
|
37290
|
+
result: "info",
|
|
37291
|
+
repoRoot,
|
|
37292
|
+
fields: {
|
|
37293
|
+
candidateCount: entries.length,
|
|
37294
|
+
currentBranch,
|
|
37295
|
+
currentBranchBound: isCurrentBranchBound,
|
|
37296
|
+
currentAppId,
|
|
37297
|
+
currentProjectId
|
|
37298
|
+
}
|
|
37299
|
+
});
|
|
37300
|
+
let recordedCount = 0;
|
|
37301
|
+
let skippedCount = 0;
|
|
37302
|
+
let failedCount = 0;
|
|
37303
|
+
let droppedCount = 0;
|
|
37304
|
+
for (const entry of entries) {
|
|
37305
|
+
const { record, filePath } = entry;
|
|
37306
|
+
const appIdMismatch = record.appIdAtDefer != null && currentAppId != null && record.appIdAtDefer !== currentAppId;
|
|
37307
|
+
const projectIdMismatch = record.projectIdAtDefer != null && currentProjectId != null && record.projectIdAtDefer !== currentProjectId;
|
|
37308
|
+
if (appIdMismatch || projectIdMismatch) {
|
|
37309
|
+
droppedCount += 1;
|
|
37310
|
+
await deleteDeferredTurnFile(filePath);
|
|
37311
|
+
await appendHookDiagnosticsEvent({
|
|
37312
|
+
hook,
|
|
37313
|
+
sessionId,
|
|
37314
|
+
turnId: triggerTurnId,
|
|
37315
|
+
stage: "deferred_turn_dropped",
|
|
37316
|
+
result: "info",
|
|
37317
|
+
reason: appIdMismatch ? "app_id_mismatch" : "project_id_mismatch",
|
|
37318
|
+
repoRoot,
|
|
37319
|
+
fields: {
|
|
37320
|
+
deferredTurnId: record.turnId,
|
|
37321
|
+
deferredSessionId: record.sessionId,
|
|
37322
|
+
appIdAtDefer: record.appIdAtDefer,
|
|
37323
|
+
projectIdAtDefer: record.projectIdAtDefer,
|
|
37324
|
+
currentAppId,
|
|
37325
|
+
currentProjectId
|
|
37326
|
+
}
|
|
37327
|
+
});
|
|
37328
|
+
continue;
|
|
37329
|
+
}
|
|
37330
|
+
if (!isCurrentBranchBound || record.branchAtDefer && record.branchAtDefer !== currentBranch) {
|
|
37331
|
+
skippedCount += 1;
|
|
37332
|
+
await appendHookDiagnosticsEvent({
|
|
37333
|
+
hook,
|
|
37334
|
+
sessionId,
|
|
37335
|
+
turnId: triggerTurnId,
|
|
37336
|
+
stage: "deferred_turn_skipped",
|
|
37337
|
+
result: "info",
|
|
37338
|
+
reason: "branch_mismatch",
|
|
37339
|
+
repoRoot,
|
|
37340
|
+
fields: {
|
|
37341
|
+
deferredTurnId: record.turnId,
|
|
37342
|
+
deferredSessionId: record.sessionId,
|
|
37343
|
+
branchAtDefer: record.branchAtDefer,
|
|
37344
|
+
currentBranch
|
|
37345
|
+
}
|
|
37346
|
+
});
|
|
37347
|
+
continue;
|
|
37348
|
+
}
|
|
37349
|
+
try {
|
|
37350
|
+
const idempotencyKey = `${record.turnId}:${repoRoot}:finalize_turn`;
|
|
37351
|
+
await collabFinalizeTurn3({
|
|
37352
|
+
api,
|
|
37353
|
+
cwd: repoRoot,
|
|
37354
|
+
prompt: record.prompt,
|
|
37355
|
+
assistantResponse: record.assistantResponse,
|
|
37356
|
+
idempotencyKey,
|
|
37357
|
+
actor: HOOK_ACTOR2,
|
|
37358
|
+
turnUsage: null,
|
|
37359
|
+
promptedAt: record.submittedAt
|
|
37360
|
+
});
|
|
37361
|
+
await deleteDeferredTurnFile(filePath);
|
|
37362
|
+
recordedCount += 1;
|
|
37363
|
+
await appendHookDiagnosticsEvent({
|
|
37364
|
+
hook,
|
|
37365
|
+
sessionId,
|
|
37366
|
+
turnId: triggerTurnId,
|
|
37367
|
+
stage: "deferred_turn_recorded",
|
|
37368
|
+
result: "success",
|
|
37369
|
+
repoRoot,
|
|
37370
|
+
fields: {
|
|
37371
|
+
deferredTurnId: record.turnId,
|
|
37372
|
+
deferredSessionId: record.sessionId,
|
|
37373
|
+
deferredAt: record.deferredAt,
|
|
37374
|
+
submittedAt: record.submittedAt,
|
|
37375
|
+
recordingDelayMs: Math.max(0, Date.now() - Date.parse(record.deferredAt))
|
|
37376
|
+
}
|
|
37377
|
+
});
|
|
37378
|
+
} catch (recordErr) {
|
|
37379
|
+
failedCount += 1;
|
|
37380
|
+
const outcome = await recordDeferredTurnFailedAttempt(filePath).catch(() => null);
|
|
37381
|
+
const promoted = outcome?.promoted === true;
|
|
37382
|
+
await appendHookDiagnosticsEvent({
|
|
37383
|
+
hook,
|
|
37384
|
+
sessionId,
|
|
37385
|
+
turnId: triggerTurnId,
|
|
37386
|
+
stage: "deferred_turn_record_failed",
|
|
37387
|
+
result: "error",
|
|
37388
|
+
reason: "exception",
|
|
37389
|
+
repoRoot,
|
|
37390
|
+
message: recordErr instanceof Error ? recordErr.message : String(recordErr),
|
|
37391
|
+
fields: {
|
|
37392
|
+
deferredTurnId: record.turnId,
|
|
37393
|
+
deferredSessionId: record.sessionId,
|
|
37394
|
+
attemptCount: outcome?.promoted === false ? outcome.newAttemptCount : outcome?.promoted === true ? outcome.finalAttemptCount : null,
|
|
37395
|
+
promoted
|
|
37396
|
+
}
|
|
37397
|
+
});
|
|
37398
|
+
if (promoted) {
|
|
37399
|
+
const errorDetails = getErrorDetails(recordErr);
|
|
37400
|
+
await dispatchFinalizeFailure({
|
|
37401
|
+
hook,
|
|
37402
|
+
sessionId,
|
|
37403
|
+
turnId: triggerTurnId,
|
|
37404
|
+
repoRoot,
|
|
37405
|
+
preflightCode: errorDetails.preflightCode,
|
|
37406
|
+
message: `Deferred turn could not be recorded after ${outcome?.finalAttemptCount ?? "max"} attempts: ${errorDetails.message}`,
|
|
37407
|
+
hint: errorDetails.hint
|
|
37408
|
+
}).catch(async (dispatchErr) => {
|
|
37409
|
+
await appendHookDiagnosticsEvent({
|
|
37410
|
+
hook,
|
|
37411
|
+
sessionId,
|
|
37412
|
+
turnId: triggerTurnId,
|
|
37413
|
+
stage: "deferred_turn_promotion_dispatch_failed",
|
|
37414
|
+
result: "error",
|
|
37415
|
+
reason: "exception",
|
|
37416
|
+
repoRoot,
|
|
37417
|
+
message: dispatchErr instanceof Error ? dispatchErr.message : String(dispatchErr),
|
|
37418
|
+
fields: {
|
|
37419
|
+
deferredTurnId: record.turnId,
|
|
37420
|
+
deferredSessionId: record.sessionId
|
|
37421
|
+
}
|
|
37422
|
+
});
|
|
37423
|
+
});
|
|
37424
|
+
}
|
|
36281
37425
|
}
|
|
36282
|
-
} catch (markerErr) {
|
|
36283
|
-
return {
|
|
36284
|
-
spawned: false,
|
|
36285
|
-
reason: "marker_check_failed",
|
|
36286
|
-
message: markerErr instanceof Error ? markerErr.message : String(markerErr)
|
|
36287
|
-
};
|
|
36288
37426
|
}
|
|
36289
|
-
const remixDir = import_node_path10.default.join(repoRoot, ".remix");
|
|
36290
37427
|
try {
|
|
36291
|
-
|
|
37428
|
+
const pruned = await pruneStaleDeferredTurns();
|
|
37429
|
+
for (const prunedPath of pruned.slice(0, DEFERRED_TURN_PRUNE_LOG_LIMIT)) {
|
|
37430
|
+
await appendHookDiagnosticsEvent({
|
|
37431
|
+
hook,
|
|
37432
|
+
sessionId,
|
|
37433
|
+
turnId: triggerTurnId,
|
|
37434
|
+
stage: "deferred_turn_pruned",
|
|
37435
|
+
result: "info",
|
|
37436
|
+
reason: "stale",
|
|
37437
|
+
repoRoot,
|
|
37438
|
+
fields: { prunedFilePath: prunedPath }
|
|
37439
|
+
});
|
|
37440
|
+
}
|
|
37441
|
+
if (pruned.length > DEFERRED_TURN_PRUNE_LOG_LIMIT) {
|
|
37442
|
+
await appendHookDiagnosticsEvent({
|
|
37443
|
+
hook,
|
|
37444
|
+
sessionId,
|
|
37445
|
+
turnId: triggerTurnId,
|
|
37446
|
+
stage: "deferred_turn_pruned",
|
|
37447
|
+
result: "info",
|
|
37448
|
+
reason: "stale_truncated",
|
|
37449
|
+
repoRoot,
|
|
37450
|
+
fields: {
|
|
37451
|
+
totalPruned: pruned.length,
|
|
37452
|
+
loggedPruned: DEFERRED_TURN_PRUNE_LOG_LIMIT
|
|
37453
|
+
}
|
|
37454
|
+
});
|
|
37455
|
+
}
|
|
36292
37456
|
} catch {
|
|
36293
37457
|
}
|
|
36294
|
-
|
|
36295
|
-
|
|
36296
|
-
|
|
37458
|
+
await appendHookDiagnosticsEvent({
|
|
37459
|
+
hook,
|
|
37460
|
+
sessionId,
|
|
37461
|
+
turnId: triggerTurnId,
|
|
37462
|
+
stage: "deferred_turn_drain_completed",
|
|
37463
|
+
result: "info",
|
|
37464
|
+
repoRoot,
|
|
37465
|
+
fields: {
|
|
37466
|
+
recordedCount,
|
|
37467
|
+
skippedCount,
|
|
37468
|
+
failedCount,
|
|
37469
|
+
droppedCount
|
|
37470
|
+
}
|
|
37471
|
+
});
|
|
37472
|
+
}
|
|
37473
|
+
async function deferTurnForTransientFailure(params) {
|
|
37474
|
+
const { hook, sessionId, turnId, repoRoot, prompt, assistantResponse, submittedAt, failureMessage } = params;
|
|
37475
|
+
const bindingState = await readCollabBindingState(repoRoot).catch(() => null);
|
|
37476
|
+
const branchAtDefer = bindingState?.currentBranch ?? null;
|
|
37477
|
+
const appIdAtDefer = bindingState?.binding?.currentAppId ?? null;
|
|
37478
|
+
const projectIdAtDefer = bindingState?.binding?.projectId ?? bindingState?.projectId ?? null;
|
|
36297
37479
|
try {
|
|
36298
|
-
|
|
36299
|
-
|
|
36300
|
-
|
|
36301
|
-
|
|
36302
|
-
|
|
36303
|
-
|
|
36304
|
-
|
|
36305
|
-
|
|
37480
|
+
const deferredFilePath = await writeDeferredTurn(
|
|
37481
|
+
buildDeferredTurnRecord({
|
|
37482
|
+
sessionId,
|
|
37483
|
+
turnId,
|
|
37484
|
+
repoRoot,
|
|
37485
|
+
prompt,
|
|
37486
|
+
assistantResponse,
|
|
37487
|
+
submittedAt,
|
|
37488
|
+
branchAtDefer,
|
|
37489
|
+
appIdAtDefer,
|
|
37490
|
+
projectIdAtDefer,
|
|
37491
|
+
reason: "transient_recording_failure"
|
|
37492
|
+
})
|
|
37493
|
+
);
|
|
37494
|
+
await appendHookDiagnosticsEvent({
|
|
37495
|
+
hook,
|
|
37496
|
+
sessionId,
|
|
37497
|
+
turnId,
|
|
37498
|
+
stage: "turn_deferred",
|
|
37499
|
+
result: "success",
|
|
37500
|
+
reason: "transient_recording_failure",
|
|
37501
|
+
repoRoot,
|
|
37502
|
+
fields: {
|
|
37503
|
+
deferredFilePath,
|
|
37504
|
+
promptLength: prompt.length,
|
|
37505
|
+
assistantResponseLength: assistantResponse.length,
|
|
37506
|
+
branchAtDefer,
|
|
37507
|
+
// Forwarded so the diagnostics timeline pairs the defer with
|
|
37508
|
+
// the originating recording_failed event without needing a
|
|
37509
|
+
// join across stages.
|
|
37510
|
+
failureMessage
|
|
37511
|
+
}
|
|
37512
|
+
});
|
|
37513
|
+
return deferredFilePath;
|
|
37514
|
+
} catch (deferErr) {
|
|
37515
|
+
await appendHookDiagnosticsEvent({
|
|
37516
|
+
hook,
|
|
37517
|
+
sessionId,
|
|
37518
|
+
turnId,
|
|
37519
|
+
stage: "deferred_turn_write_failed",
|
|
37520
|
+
result: "error",
|
|
37521
|
+
reason: "exception",
|
|
37522
|
+
repoRoot,
|
|
37523
|
+
message: deferErr instanceof Error ? deferErr.message : String(deferErr),
|
|
37524
|
+
fields: {
|
|
37525
|
+
triggeredBy: "transient_recording_failure"
|
|
37526
|
+
}
|
|
37527
|
+
});
|
|
37528
|
+
return null;
|
|
36306
37529
|
}
|
|
37530
|
+
}
|
|
37531
|
+
async function deferTurnForRecoveryInProgress(params) {
|
|
37532
|
+
const { hook, sessionId, turnId, repoRoot, prompt, assistantResponse, submittedAt, preflightCode } = params;
|
|
37533
|
+
const bindingState = await readCollabBindingState(repoRoot).catch(() => null);
|
|
37534
|
+
const branchAtDefer = bindingState?.currentBranch ?? null;
|
|
37535
|
+
const appIdAtDefer = bindingState?.binding?.currentAppId ?? null;
|
|
37536
|
+
const projectIdAtDefer = bindingState?.binding?.projectId ?? bindingState?.projectId ?? null;
|
|
36307
37537
|
try {
|
|
36308
|
-
const
|
|
36309
|
-
|
|
36310
|
-
|
|
36311
|
-
|
|
37538
|
+
const deferredFilePath = await writeDeferredTurn(
|
|
37539
|
+
buildDeferredTurnRecord({
|
|
37540
|
+
sessionId,
|
|
37541
|
+
turnId,
|
|
37542
|
+
repoRoot,
|
|
37543
|
+
prompt,
|
|
37544
|
+
assistantResponse,
|
|
37545
|
+
submittedAt,
|
|
37546
|
+
branchAtDefer,
|
|
37547
|
+
appIdAtDefer,
|
|
37548
|
+
projectIdAtDefer,
|
|
37549
|
+
reason: "recovery_in_progress"
|
|
37550
|
+
})
|
|
37551
|
+
);
|
|
37552
|
+
await appendHookDiagnosticsEvent({
|
|
37553
|
+
hook,
|
|
37554
|
+
sessionId,
|
|
37555
|
+
turnId,
|
|
37556
|
+
stage: "turn_deferred",
|
|
37557
|
+
result: "success",
|
|
37558
|
+
reason: "recovery_in_progress",
|
|
37559
|
+
repoRoot,
|
|
37560
|
+
fields: {
|
|
37561
|
+
deferredFilePath,
|
|
37562
|
+
promptLength: prompt.length,
|
|
37563
|
+
assistantResponseLength: assistantResponse.length,
|
|
37564
|
+
branchAtDefer,
|
|
37565
|
+
preflightCode
|
|
37566
|
+
}
|
|
36312
37567
|
});
|
|
36313
|
-
|
|
36314
|
-
|
|
36315
|
-
|
|
36316
|
-
|
|
36317
|
-
|
|
36318
|
-
|
|
36319
|
-
|
|
36320
|
-
|
|
37568
|
+
spawnDeferredTurnDrainer(repoRoot);
|
|
37569
|
+
await appendHookDiagnosticsEvent({
|
|
37570
|
+
hook,
|
|
37571
|
+
sessionId,
|
|
37572
|
+
turnId,
|
|
37573
|
+
stage: "deferred_turn_drainer_spawned",
|
|
37574
|
+
result: "info",
|
|
37575
|
+
repoRoot,
|
|
37576
|
+
fields: {
|
|
37577
|
+
triggeredBy: "recovery_in_progress",
|
|
37578
|
+
preflightCode
|
|
37579
|
+
}
|
|
37580
|
+
});
|
|
37581
|
+
return deferredFilePath;
|
|
37582
|
+
} catch (deferErr) {
|
|
37583
|
+
await appendHookDiagnosticsEvent({
|
|
37584
|
+
hook,
|
|
37585
|
+
sessionId,
|
|
37586
|
+
turnId,
|
|
37587
|
+
stage: "deferred_turn_write_failed",
|
|
37588
|
+
result: "error",
|
|
37589
|
+
reason: "exception",
|
|
37590
|
+
repoRoot,
|
|
37591
|
+
message: deferErr instanceof Error ? deferErr.message : String(deferErr),
|
|
37592
|
+
fields: {
|
|
37593
|
+
preflightCode
|
|
37594
|
+
}
|
|
37595
|
+
});
|
|
37596
|
+
return null;
|
|
36321
37597
|
}
|
|
36322
37598
|
}
|
|
36323
37599
|
async function runHookStopCollab(payload) {
|
|
@@ -36384,6 +37660,7 @@ async function runHookStopCollab(payload) {
|
|
|
36384
37660
|
let unboundBranchRepoRoot = null;
|
|
36385
37661
|
let unboundBranchName = null;
|
|
36386
37662
|
let unboundBranchKnownCount = 0;
|
|
37663
|
+
let unboundProjectIdAtDefer = null;
|
|
36387
37664
|
const candidateRepoRoot = await findBoundRepo(state.initialCwd).catch(() => null);
|
|
36388
37665
|
if (candidateRepoRoot) {
|
|
36389
37666
|
const bindingState = await readCollabBindingState(candidateRepoRoot).catch(() => null);
|
|
@@ -36393,6 +37670,41 @@ async function runHookStopCollab(payload) {
|
|
|
36393
37670
|
unboundBranchRepoRoot = candidateRepoRoot;
|
|
36394
37671
|
unboundBranchName = bindingState.currentBranch;
|
|
36395
37672
|
unboundBranchKnownCount = knownBoundBranches.length;
|
|
37673
|
+
unboundProjectIdAtDefer = bindingState.projectId ?? null;
|
|
37674
|
+
}
|
|
37675
|
+
}
|
|
37676
|
+
const promptTextForDefer = state.prompt.trim();
|
|
37677
|
+
const assistantResponseForDefer = (extractAssistantResponse(payload) || "").trim();
|
|
37678
|
+
let deferredFilePath = null;
|
|
37679
|
+
if (skipReason === "current_branch_unbound" && unboundBranchRepoRoot && promptTextForDefer && assistantResponseForDefer) {
|
|
37680
|
+
try {
|
|
37681
|
+
deferredFilePath = await writeDeferredTurn(
|
|
37682
|
+
buildDeferredTurnRecord({
|
|
37683
|
+
sessionId,
|
|
37684
|
+
turnId: state.turnId,
|
|
37685
|
+
repoRoot: unboundBranchRepoRoot,
|
|
37686
|
+
prompt: promptTextForDefer,
|
|
37687
|
+
assistantResponse: assistantResponseForDefer,
|
|
37688
|
+
submittedAt: state.submittedAt,
|
|
37689
|
+
branchAtDefer: unboundBranchName,
|
|
37690
|
+
// No appId for an unbound lane (the binding is null
|
|
37691
|
+
// by construction); project id still anchors against
|
|
37692
|
+
// `force-new`-style identity rotations.
|
|
37693
|
+
appIdAtDefer: null,
|
|
37694
|
+
projectIdAtDefer: unboundProjectIdAtDefer
|
|
37695
|
+
})
|
|
37696
|
+
);
|
|
37697
|
+
} catch (deferErr) {
|
|
37698
|
+
await appendHookDiagnosticsEvent({
|
|
37699
|
+
hook,
|
|
37700
|
+
sessionId,
|
|
37701
|
+
turnId: state.turnId,
|
|
37702
|
+
stage: "deferred_turn_write_failed",
|
|
37703
|
+
result: "error",
|
|
37704
|
+
reason: "exception",
|
|
37705
|
+
repoRoot: unboundBranchRepoRoot,
|
|
37706
|
+
message: deferErr instanceof Error ? deferErr.message : String(deferErr)
|
|
37707
|
+
});
|
|
36396
37708
|
}
|
|
36397
37709
|
}
|
|
36398
37710
|
await clearPendingTurnState(sessionId);
|
|
@@ -36406,16 +37718,44 @@ async function runHookStopCollab(payload) {
|
|
|
36406
37718
|
repoRoot: unboundBranchRepoRoot,
|
|
36407
37719
|
fields: skipReason === "current_branch_unbound" ? {
|
|
36408
37720
|
currentBranch: unboundBranchName,
|
|
36409
|
-
knownBoundBranchCount: unboundBranchKnownCount
|
|
37721
|
+
knownBoundBranchCount: unboundBranchKnownCount,
|
|
37722
|
+
deferredForRetry: deferredFilePath !== null
|
|
36410
37723
|
} : {}
|
|
36411
37724
|
});
|
|
37725
|
+
if (deferredFilePath && unboundBranchRepoRoot) {
|
|
37726
|
+
await appendHookDiagnosticsEvent({
|
|
37727
|
+
hook,
|
|
37728
|
+
sessionId,
|
|
37729
|
+
turnId: state.turnId,
|
|
37730
|
+
stage: "turn_deferred",
|
|
37731
|
+
result: "success",
|
|
37732
|
+
reason: "current_branch_unbound",
|
|
37733
|
+
repoRoot: unboundBranchRepoRoot,
|
|
37734
|
+
fields: {
|
|
37735
|
+
deferredFilePath,
|
|
37736
|
+
promptLength: promptTextForDefer.length,
|
|
37737
|
+
assistantResponseLength: assistantResponseForDefer.length,
|
|
37738
|
+
branchAtDefer: unboundBranchName
|
|
37739
|
+
}
|
|
37740
|
+
});
|
|
37741
|
+
spawnDeferredTurnDrainer(unboundBranchRepoRoot);
|
|
37742
|
+
await appendHookDiagnosticsEvent({
|
|
37743
|
+
hook,
|
|
37744
|
+
sessionId,
|
|
37745
|
+
turnId: state.turnId,
|
|
37746
|
+
stage: "deferred_turn_drainer_spawned",
|
|
37747
|
+
result: "info",
|
|
37748
|
+
repoRoot: unboundBranchRepoRoot,
|
|
37749
|
+
fields: { triggeredBy: "defer" }
|
|
37750
|
+
});
|
|
37751
|
+
}
|
|
36412
37752
|
await appendHookDiagnosticsEvent({
|
|
36413
37753
|
hook,
|
|
36414
37754
|
sessionId,
|
|
36415
37755
|
turnId: state.turnId,
|
|
36416
37756
|
stage: "state_cleanup",
|
|
36417
37757
|
result: "success",
|
|
36418
|
-
reason: "cleared_without_bound_repo"
|
|
37758
|
+
reason: deferredFilePath ? "cleared_after_defer" : "cleared_without_bound_repo"
|
|
36419
37759
|
});
|
|
36420
37760
|
return;
|
|
36421
37761
|
}
|
|
@@ -36491,10 +37831,20 @@ async function runHookStopCollab(payload) {
|
|
|
36491
37831
|
state: { turnId: state.turnId, prompt, submittedAt: state.submittedAt },
|
|
36492
37832
|
payload
|
|
36493
37833
|
});
|
|
37834
|
+
for (const repo of touchedRepos) {
|
|
37835
|
+
await drainDeferredTurnsForRepo({
|
|
37836
|
+
hook,
|
|
37837
|
+
sessionId,
|
|
37838
|
+
triggerTurnId: state.turnId,
|
|
37839
|
+
repoRoot: repo.repoRoot,
|
|
37840
|
+
api
|
|
37841
|
+
}).catch(() => void 0);
|
|
37842
|
+
}
|
|
36494
37843
|
let hadFailure = false;
|
|
36495
37844
|
let queuedFinalizeWork = false;
|
|
36496
37845
|
let anyRecorded = false;
|
|
36497
37846
|
let anyTurnExists = false;
|
|
37847
|
+
const failures = [];
|
|
36498
37848
|
for (const repo of touchedRepos) {
|
|
36499
37849
|
if (shouldSkipStopRecording(repo)) {
|
|
36500
37850
|
const legacyMcpFinalizeQueued = repo.manuallyRecordedByTool === "remix_collab_finalize_turn" && repo.manualRecordingScope === "full_turn";
|
|
@@ -36541,31 +37891,11 @@ async function runHookStopCollab(payload) {
|
|
|
36541
37891
|
if (recording.recorded) {
|
|
36542
37892
|
anyRecorded = true;
|
|
36543
37893
|
anyTurnExists = true;
|
|
36544
|
-
const autoSpawn = maybeAutoSpawnHistoryImportFromStopHook(repo.repoRoot);
|
|
36545
|
-
if (autoSpawn.spawned) {
|
|
36546
|
-
await appendHookDiagnosticsEvent({
|
|
36547
|
-
hook,
|
|
36548
|
-
sessionId,
|
|
36549
|
-
turnId: state.turnId,
|
|
36550
|
-
stage: "history_import_auto_spawned_from_stop",
|
|
36551
|
-
result: "success",
|
|
36552
|
-
repoRoot: repo.repoRoot,
|
|
36553
|
-
fields: { pid: autoSpawn.pid ?? null, logPath: autoSpawn.logPath }
|
|
36554
|
-
});
|
|
36555
|
-
} else if (autoSpawn.reason !== "marker_present") {
|
|
36556
|
-
await appendHookDiagnosticsEvent({
|
|
36557
|
-
hook,
|
|
36558
|
-
sessionId,
|
|
36559
|
-
turnId: state.turnId,
|
|
36560
|
-
stage: "history_import_auto_spawn_skipped",
|
|
36561
|
-
result: "info",
|
|
36562
|
-
reason: autoSpawn.reason,
|
|
36563
|
-
repoRoot: repo.repoRoot,
|
|
36564
|
-
message: autoSpawn.message ?? null
|
|
36565
|
-
});
|
|
36566
|
-
}
|
|
36567
37894
|
} else {
|
|
36568
37895
|
hadFailure = true;
|
|
37896
|
+
if (recording.failure) {
|
|
37897
|
+
failures.push(recording.failure);
|
|
37898
|
+
}
|
|
36569
37899
|
}
|
|
36570
37900
|
}
|
|
36571
37901
|
if (anyRecorded || anyTurnExists) {
|
|
@@ -36574,7 +37904,64 @@ async function runHookStopCollab(payload) {
|
|
|
36574
37904
|
if (queuedFinalizeWork) {
|
|
36575
37905
|
spawnFinalizeQueueDrainer();
|
|
36576
37906
|
}
|
|
36577
|
-
|
|
37907
|
+
let deferredFailureCount = 0;
|
|
37908
|
+
let dispatchFailureCount = 0;
|
|
37909
|
+
for (const failure of failures) {
|
|
37910
|
+
if (failure.isTransient) {
|
|
37911
|
+
const deferredFilePath = await deferTurnForTransientFailure({
|
|
37912
|
+
hook,
|
|
37913
|
+
sessionId,
|
|
37914
|
+
turnId: state.turnId,
|
|
37915
|
+
repoRoot: failure.repoRoot,
|
|
37916
|
+
prompt,
|
|
37917
|
+
assistantResponse,
|
|
37918
|
+
submittedAt: state.submittedAt,
|
|
37919
|
+
failureMessage: failure.message
|
|
37920
|
+
});
|
|
37921
|
+
if (deferredFilePath) {
|
|
37922
|
+
deferredFailureCount += 1;
|
|
37923
|
+
}
|
|
37924
|
+
continue;
|
|
37925
|
+
}
|
|
37926
|
+
const outcome = await dispatchFinalizeFailure({
|
|
37927
|
+
hook: "Stop",
|
|
37928
|
+
sessionId,
|
|
37929
|
+
turnId: state.turnId,
|
|
37930
|
+
repoRoot: failure.repoRoot,
|
|
37931
|
+
preflightCode: failure.preflightCode,
|
|
37932
|
+
message: failure.message,
|
|
37933
|
+
hint: failure.hint
|
|
37934
|
+
}).catch((dispatchErr) => {
|
|
37935
|
+
dispatchFailureCount += 1;
|
|
37936
|
+
return appendHookDiagnosticsEvent({
|
|
37937
|
+
hook,
|
|
37938
|
+
sessionId,
|
|
37939
|
+
turnId: state.turnId,
|
|
37940
|
+
stage: "auto_fix_dispatch_failed",
|
|
37941
|
+
result: "error",
|
|
37942
|
+
reason: "exception",
|
|
37943
|
+
repoRoot: failure.repoRoot,
|
|
37944
|
+
message: dispatchErr instanceof Error ? dispatchErr.message : String(dispatchErr)
|
|
37945
|
+
}).then(() => null);
|
|
37946
|
+
});
|
|
37947
|
+
if (outcome && isAutoFixableFinalizeFailureCode(failure.preflightCode) && (outcome.kind === "spawned" || outcome.kind === "spawn_throttled")) {
|
|
37948
|
+
const deferredFilePath = await deferTurnForRecoveryInProgress({
|
|
37949
|
+
hook,
|
|
37950
|
+
sessionId,
|
|
37951
|
+
turnId: state.turnId,
|
|
37952
|
+
repoRoot: failure.repoRoot,
|
|
37953
|
+
prompt,
|
|
37954
|
+
assistantResponse,
|
|
37955
|
+
submittedAt: state.submittedAt,
|
|
37956
|
+
preflightCode: failure.preflightCode
|
|
37957
|
+
});
|
|
37958
|
+
if (deferredFilePath) {
|
|
37959
|
+
deferredFailureCount += 1;
|
|
37960
|
+
}
|
|
37961
|
+
}
|
|
37962
|
+
}
|
|
37963
|
+
const allFailuresDeferred = failures.length > 0 && deferredFailureCount === failures.length && dispatchFailureCount === 0;
|
|
37964
|
+
if (!hadFailure || allFailuresDeferred) {
|
|
36578
37965
|
await clearPendingTurnState(sessionId);
|
|
36579
37966
|
await appendHookDiagnosticsEvent({
|
|
36580
37967
|
hook,
|
|
@@ -36582,7 +37969,10 @@ async function runHookStopCollab(payload) {
|
|
|
36582
37969
|
turnId: state.turnId,
|
|
36583
37970
|
stage: "state_cleanup",
|
|
36584
37971
|
result: "success",
|
|
36585
|
-
reason: "cleared_after_success"
|
|
37972
|
+
reason: allFailuresDeferred ? "cleared_after_recovery_defer" : "cleared_after_success",
|
|
37973
|
+
fields: allFailuresDeferred ? {
|
|
37974
|
+
deferredFailureCount
|
|
37975
|
+
} : void 0
|
|
36586
37976
|
});
|
|
36587
37977
|
return;
|
|
36588
37978
|
}
|
|
@@ -36596,7 +37986,7 @@ async function runHookStopCollab(payload) {
|
|
|
36596
37986
|
});
|
|
36597
37987
|
} catch (error) {
|
|
36598
37988
|
const details = getErrorDetails(error);
|
|
36599
|
-
await markPendingTurnFailure(sessionId, details);
|
|
37989
|
+
await markPendingTurnFailure(sessionId, { message: details.message, hint: details.hint });
|
|
36600
37990
|
await appendHookDiagnosticsEvent({
|
|
36601
37991
|
hook,
|
|
36602
37992
|
sessionId,
|
|
@@ -36606,17 +37996,21 @@ async function runHookStopCollab(payload) {
|
|
|
36606
37996
|
reason: "exception",
|
|
36607
37997
|
message: details.message,
|
|
36608
37998
|
fields: {
|
|
36609
|
-
hint: details.hint
|
|
37999
|
+
hint: details.hint,
|
|
38000
|
+
preflightCode: details.preflightCode
|
|
36610
38001
|
}
|
|
36611
38002
|
});
|
|
36612
38003
|
}
|
|
36613
38004
|
}
|
|
36614
38005
|
async function main() {
|
|
38006
|
+
if (await maybeRunDeferredTurnDrainerFromArgv()) {
|
|
38007
|
+
return;
|
|
38008
|
+
}
|
|
36615
38009
|
if (process.argv.includes("--drain-finalize-queue")) {
|
|
36616
38010
|
const api = await createHookCollabApiClient();
|
|
36617
|
-
const
|
|
36618
|
-
if (typeof
|
|
36619
|
-
await
|
|
38011
|
+
const drainPendingFinalizeQueue3 = drainPendingFinalizeQueue;
|
|
38012
|
+
if (typeof drainPendingFinalizeQueue3 === "function") {
|
|
38013
|
+
await drainPendingFinalizeQueue3({ api });
|
|
36620
38014
|
}
|
|
36621
38015
|
return;
|
|
36622
38016
|
}
|