@remixhq/claude-plugin 0.1.18 → 0.1.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,8 +37,8 @@ var require_windows = __commonJS({
37
37
  "use strict";
38
38
  module2.exports = isexe;
39
39
  isexe.sync = sync;
40
- var fs8 = require("fs");
41
- function checkPathExt(path12, options) {
40
+ var fs10 = require("fs");
41
+ function checkPathExt(path13, options) {
42
42
  var pathext = options.pathExt !== void 0 ? options.pathExt : process.env.PATHEXT;
43
43
  if (!pathext) {
44
44
  return true;
@@ -49,25 +49,25 @@ var require_windows = __commonJS({
49
49
  }
50
50
  for (var i2 = 0; i2 < pathext.length; i2++) {
51
51
  var p = pathext[i2].toLowerCase();
52
- if (p && path12.substr(-p.length).toLowerCase() === p) {
52
+ if (p && path13.substr(-p.length).toLowerCase() === p) {
53
53
  return true;
54
54
  }
55
55
  }
56
56
  return false;
57
57
  }
58
- function checkStat(stat, path12, options) {
58
+ function checkStat(stat, path13, options) {
59
59
  if (!stat.isSymbolicLink() && !stat.isFile()) {
60
60
  return false;
61
61
  }
62
- return checkPathExt(path12, options);
62
+ return checkPathExt(path13, options);
63
63
  }
64
- function isexe(path12, options, cb) {
65
- fs8.stat(path12, function(er, stat) {
66
- cb(er, er ? false : checkStat(stat, path12, options));
64
+ function isexe(path13, options, cb) {
65
+ fs10.stat(path13, function(er, stat) {
66
+ cb(er, er ? false : checkStat(stat, path13, options));
67
67
  });
68
68
  }
69
- function sync(path12, options) {
70
- return checkStat(fs8.statSync(path12), path12, options);
69
+ function sync(path13, options) {
70
+ return checkStat(fs10.statSync(path13), path13, options);
71
71
  }
72
72
  }
73
73
  });
@@ -78,14 +78,14 @@ var require_mode = __commonJS({
78
78
  "use strict";
79
79
  module2.exports = isexe;
80
80
  isexe.sync = sync;
81
- var fs8 = require("fs");
82
- function isexe(path12, options, cb) {
83
- fs8.stat(path12, function(er, stat) {
81
+ var fs10 = require("fs");
82
+ function isexe(path13, options, cb) {
83
+ fs10.stat(path13, function(er, stat) {
84
84
  cb(er, er ? false : checkStat(stat, options));
85
85
  });
86
86
  }
87
- function sync(path12, options) {
88
- return checkStat(fs8.statSync(path12), options);
87
+ function sync(path13, options) {
88
+ return checkStat(fs10.statSync(path13), options);
89
89
  }
90
90
  function checkStat(stat, options) {
91
91
  return stat.isFile() && checkMode(stat, options);
@@ -110,7 +110,7 @@ var require_mode = __commonJS({
110
110
  var require_isexe = __commonJS({
111
111
  "node_modules/isexe/index.js"(exports2, module2) {
112
112
  "use strict";
113
- var fs8 = require("fs");
113
+ var fs10 = require("fs");
114
114
  var core;
115
115
  if (process.platform === "win32" || global.TESTING_WINDOWS) {
116
116
  core = require_windows();
@@ -119,7 +119,7 @@ var require_isexe = __commonJS({
119
119
  }
120
120
  module2.exports = isexe;
121
121
  isexe.sync = sync;
122
- function isexe(path12, options, cb) {
122
+ function isexe(path13, options, cb) {
123
123
  if (typeof options === "function") {
124
124
  cb = options;
125
125
  options = {};
@@ -129,7 +129,7 @@ var require_isexe = __commonJS({
129
129
  throw new TypeError("callback not provided");
130
130
  }
131
131
  return new Promise(function(resolve, reject) {
132
- isexe(path12, options || {}, function(er, is) {
132
+ isexe(path13, options || {}, function(er, is) {
133
133
  if (er) {
134
134
  reject(er);
135
135
  } else {
@@ -138,7 +138,7 @@ var require_isexe = __commonJS({
138
138
  });
139
139
  });
140
140
  }
141
- core(path12, options || {}, function(er, is) {
141
+ core(path13, options || {}, function(er, is) {
142
142
  if (er) {
143
143
  if (er.code === "EACCES" || options && options.ignoreErrors) {
144
144
  er = null;
@@ -148,9 +148,9 @@ var require_isexe = __commonJS({
148
148
  cb(er, is);
149
149
  });
150
150
  }
151
- function sync(path12, options) {
151
+ function sync(path13, options) {
152
152
  try {
153
- return core.sync(path12, options || {});
153
+ return core.sync(path13, options || {});
154
154
  } catch (er) {
155
155
  if (options && options.ignoreErrors || er.code === "EACCES") {
156
156
  return false;
@@ -167,7 +167,7 @@ var require_which = __commonJS({
167
167
  "node_modules/which/which.js"(exports2, module2) {
168
168
  "use strict";
169
169
  var isWindows = process.platform === "win32" || process.env.OSTYPE === "cygwin" || process.env.OSTYPE === "msys";
170
- var path12 = require("path");
170
+ var path13 = require("path");
171
171
  var COLON = isWindows ? ";" : ":";
172
172
  var isexe = require_isexe();
173
173
  var getNotFoundError = (cmd) => Object.assign(new Error(`not found: ${cmd}`), { code: "ENOENT" });
@@ -205,7 +205,7 @@ var require_which = __commonJS({
205
205
  return opt.all && found.length ? resolve(found) : reject(getNotFoundError(cmd));
206
206
  const ppRaw = pathEnv[i2];
207
207
  const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
208
- const pCmd = path12.join(pathPart, cmd);
208
+ const pCmd = path13.join(pathPart, cmd);
209
209
  const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd;
210
210
  resolve(subStep(p, i2, 0));
211
211
  });
@@ -232,7 +232,7 @@ var require_which = __commonJS({
232
232
  for (let i2 = 0; i2 < pathEnv.length; i2++) {
233
233
  const ppRaw = pathEnv[i2];
234
234
  const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
235
- const pCmd = path12.join(pathPart, cmd);
235
+ const pCmd = path13.join(pathPart, cmd);
236
236
  const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd;
237
237
  for (let j = 0; j < pathExt.length; j++) {
238
238
  const cur = p + pathExt[j];
@@ -280,7 +280,7 @@ var require_path_key = __commonJS({
280
280
  var require_resolveCommand = __commonJS({
281
281
  "node_modules/cross-spawn/lib/util/resolveCommand.js"(exports2, module2) {
282
282
  "use strict";
283
- var path12 = require("path");
283
+ var path13 = require("path");
284
284
  var which = require_which();
285
285
  var getPathKey = require_path_key();
286
286
  function resolveCommandAttempt(parsed, withoutPathExt) {
@@ -298,7 +298,7 @@ var require_resolveCommand = __commonJS({
298
298
  try {
299
299
  resolved = which.sync(parsed.command, {
300
300
  path: env[getPathKey({ env })],
301
- pathExt: withoutPathExt ? path12.delimiter : void 0
301
+ pathExt: withoutPathExt ? path13.delimiter : void 0
302
302
  });
303
303
  } catch (e) {
304
304
  } finally {
@@ -307,7 +307,7 @@ var require_resolveCommand = __commonJS({
307
307
  }
308
308
  }
309
309
  if (resolved) {
310
- resolved = path12.resolve(hasCustomCwd ? parsed.options.cwd : "", resolved);
310
+ resolved = path13.resolve(hasCustomCwd ? parsed.options.cwd : "", resolved);
311
311
  }
312
312
  return resolved;
313
313
  }
@@ -361,8 +361,8 @@ var require_shebang_command = __commonJS({
361
361
  if (!match) {
362
362
  return null;
363
363
  }
364
- const [path12, argument] = match[0].replace(/#! ?/, "").split(" ");
365
- const binary = path12.split("/").pop();
364
+ const [path13, argument] = match[0].replace(/#! ?/, "").split(" ");
365
+ const binary = path13.split("/").pop();
366
366
  if (binary === "env") {
367
367
  return argument;
368
368
  }
@@ -375,16 +375,16 @@ var require_shebang_command = __commonJS({
375
375
  var require_readShebang = __commonJS({
376
376
  "node_modules/cross-spawn/lib/util/readShebang.js"(exports2, module2) {
377
377
  "use strict";
378
- var fs8 = require("fs");
378
+ var fs10 = require("fs");
379
379
  var shebangCommand = require_shebang_command();
380
380
  function readShebang(command) {
381
381
  const size = 150;
382
382
  const buffer = Buffer.alloc(size);
383
383
  let fd;
384
384
  try {
385
- fd = fs8.openSync(command, "r");
386
- fs8.readSync(fd, buffer, 0, size, 0);
387
- fs8.closeSync(fd);
385
+ fd = fs10.openSync(command, "r");
386
+ fs10.readSync(fd, buffer, 0, size, 0);
387
+ fs10.closeSync(fd);
388
388
  } catch (e) {
389
389
  }
390
390
  return shebangCommand(buffer.toString());
@@ -397,7 +397,7 @@ var require_readShebang = __commonJS({
397
397
  var require_parse = __commonJS({
398
398
  "node_modules/cross-spawn/lib/parse.js"(exports2, module2) {
399
399
  "use strict";
400
- var path12 = require("path");
400
+ var path13 = require("path");
401
401
  var resolveCommand = require_resolveCommand();
402
402
  var escape = require_escape();
403
403
  var readShebang = require_readShebang();
@@ -422,7 +422,7 @@ var require_parse = __commonJS({
422
422
  const needsShell = !isExecutableRegExp.test(commandFile);
423
423
  if (parsed.options.forceShell || needsShell) {
424
424
  const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
425
- parsed.command = path12.normalize(parsed.command);
425
+ parsed.command = path13.normalize(parsed.command);
426
426
  parsed.command = escape.command(parsed.command);
427
427
  parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
428
428
  const shellCommand = [parsed.command].concat(parsed.args).join(" ");
@@ -518,7 +518,7 @@ var require_cross_spawn = __commonJS({
518
518
  enoent.hookChildProcess(spawned, parsed);
519
519
  return spawned;
520
520
  }
521
- function spawnSync2(command, args, options) {
521
+ function spawnSync3(command, args, options) {
522
522
  const parsed = parse(command, args, options);
523
523
  const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
524
524
  result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
@@ -526,7 +526,7 @@ var require_cross_spawn = __commonJS({
526
526
  }
527
527
  module2.exports = spawn3;
528
528
  module2.exports.spawn = spawn3;
529
- module2.exports.sync = spawnSync2;
529
+ module2.exports.sync = spawnSync3;
530
530
  module2.exports._parse = parse;
531
531
  module2.exports._enoent = enoent;
532
532
  }
@@ -538,7 +538,9 @@ __export(hook_stop_collab_exports, {
538
538
  runHookStopCollab: () => runHookStopCollab
539
539
  });
540
540
  module.exports = __toCommonJS(hook_stop_collab_exports);
541
- var import_node_child_process6 = require("child_process");
541
+ var import_node_child_process7 = require("child_process");
542
+ var import_node_fs7 = require("fs");
543
+ var import_node_path10 = __toESM(require("path"), 1);
542
544
 
543
545
  // node_modules/@remixhq/core/dist/chunk-YZ34ICNN.js
544
546
  var RemixError = class extends Error {
@@ -4935,13 +4937,13 @@ var logOutputSync = ({ serializedResult, fdNumber, state, verboseInfo, encoding,
4935
4937
  }
4936
4938
  };
4937
4939
  var writeToFiles = (serializedResult, stdioItems, outputFiles) => {
4938
- for (const { path: path12, append } of stdioItems.filter(({ type }) => FILE_TYPES.has(type))) {
4939
- const pathString = typeof path12 === "string" ? path12 : path12.toString();
4940
+ for (const { path: path13, append } of stdioItems.filter(({ type }) => FILE_TYPES.has(type))) {
4941
+ const pathString = typeof path13 === "string" ? path13 : path13.toString();
4940
4942
  if (append || outputFiles.has(pathString)) {
4941
- (0, import_node_fs4.appendFileSync)(path12, serializedResult);
4943
+ (0, import_node_fs4.appendFileSync)(path13, serializedResult);
4942
4944
  } else {
4943
4945
  outputFiles.add(pathString);
4944
- (0, import_node_fs4.writeFileSync)(path12, serializedResult);
4946
+ (0, import_node_fs4.writeFileSync)(path13, serializedResult);
4945
4947
  }
4946
4948
  }
4947
4949
  };
@@ -7662,18 +7664,84 @@ async function writeCollabBinding(repoRoot, binding) {
7662
7664
 
7663
7665
  // node_modules/@remixhq/core/dist/collab.js
7664
7666
  var import_promises14 = __toESM(require("fs/promises"), 1);
7665
- var import_path3 = __toESM(require("path"), 1);
7666
7667
  var import_crypto = require("crypto");
7667
7668
  var import_os = __toESM(require("os"), 1);
7669
+ var import_path3 = __toESM(require("path"), 1);
7670
+ var import_promises15 = __toESM(require("fs/promises"), 1);
7668
7671
  var import_path4 = __toESM(require("path"), 1);
7669
7672
  var import_crypto2 = require("crypto");
7670
- var import_promises15 = __toESM(require("fs/promises"), 1);
7673
+ var import_promises16 = __toESM(require("fs/promises"), 1);
7671
7674
  var import_os2 = __toESM(require("os"), 1);
7672
7675
  var import_path5 = __toESM(require("path"), 1);
7673
7676
  var import_crypto3 = require("crypto");
7674
- var import_promises16 = __toESM(require("fs/promises"), 1);
7677
+ var import_promises17 = __toESM(require("fs/promises"), 1);
7675
7678
  var import_path6 = __toESM(require("path"), 1);
7676
7679
  var import_crypto4 = require("crypto");
7680
+ var APP_DELTA_CACHE_TTL_MS = 5e3;
7681
+ var appDeltaCache = /* @__PURE__ */ new Map();
7682
+ var cacheClock = () => Date.now();
7683
+ function buildAppDeltaCacheKey(appId, payload) {
7684
+ return [
7685
+ appId,
7686
+ payload.baseHeadHash,
7687
+ payload.targetHeadHash ?? "",
7688
+ payload.localSnapshotHash ?? "",
7689
+ payload.repoFingerprint ?? "",
7690
+ payload.remoteUrl ?? "",
7691
+ payload.defaultBranch ?? ""
7692
+ ].join("|");
7693
+ }
7694
+ function readAppDeltaCache(key) {
7695
+ const entry = appDeltaCache.get(key);
7696
+ if (!entry) return void 0;
7697
+ if (cacheClock() - entry.cachedAt > APP_DELTA_CACHE_TTL_MS) {
7698
+ appDeltaCache.delete(key);
7699
+ return void 0;
7700
+ }
7701
+ return entry.value;
7702
+ }
7703
+ function writeAppDeltaCache(key, value) {
7704
+ appDeltaCache.set(key, { value, cachedAt: cacheClock() });
7705
+ }
7706
+ async function getAppDeltaCached(api, appId, payload) {
7707
+ const key = buildAppDeltaCacheKey(appId, payload);
7708
+ const cached = readAppDeltaCache(key);
7709
+ if (cached !== void 0) return cached;
7710
+ const fresh = await api.getAppDelta(appId, payload);
7711
+ writeAppDeltaCache(key, fresh);
7712
+ return fresh;
7713
+ }
7714
+ function invalidateAppDeltaCacheForApp(appId) {
7715
+ const prefix = `${appId}|`;
7716
+ for (const key of appDeltaCache.keys()) {
7717
+ if (key.startsWith(prefix)) appDeltaCache.delete(key);
7718
+ }
7719
+ }
7720
+ var APP_HEAD_CACHE_TTL_MS = 5e3;
7721
+ var appHeadCache = /* @__PURE__ */ new Map();
7722
+ var cacheClock2 = () => Date.now();
7723
+ function readAppHeadCache(appId) {
7724
+ const entry = appHeadCache.get(appId);
7725
+ if (!entry) return void 0;
7726
+ if (cacheClock2() - entry.cachedAt > APP_HEAD_CACHE_TTL_MS) {
7727
+ appHeadCache.delete(appId);
7728
+ return void 0;
7729
+ }
7730
+ return entry.value;
7731
+ }
7732
+ function writeAppHeadCache(appId, value) {
7733
+ appHeadCache.set(appId, { value, cachedAt: cacheClock2() });
7734
+ }
7735
+ async function getAppHeadCached(api, appId) {
7736
+ const cached = readAppHeadCache(appId);
7737
+ if (cached !== void 0) return cached;
7738
+ const fresh = await api.getAppHead(appId);
7739
+ writeAppHeadCache(appId, fresh);
7740
+ return fresh;
7741
+ }
7742
+ function invalidateAppHeadCache(appId) {
7743
+ appHeadCache.delete(appId);
7744
+ }
7677
7745
  function describeBranch(value) {
7678
7746
  const normalized = String(value ?? "").trim();
7679
7747
  return normalized || "(detached)";
@@ -7697,7 +7765,7 @@ function sha256Hex(value) {
7697
7765
  }
7698
7766
  function getCollabStateRoot() {
7699
7767
  const configured = process.env.REMIX_COLLAB_STATE_ROOT?.trim();
7700
- return configured || import_path4.default.join(import_os.default.homedir(), ".remix", "collab-state");
7768
+ return configured || import_path3.default.join(import_os.default.homedir(), ".remix", "collab-state");
7701
7769
  }
7702
7770
  function buildLaneStateKey(params) {
7703
7771
  const fingerprint = params.repoFingerprint?.trim();
@@ -7709,26 +7777,158 @@ function buildLaneStateKey(params) {
7709
7777
  return sha256Hex(`${stableSource}::${fingerprintSource}::${laneSource}`);
7710
7778
  }
7711
7779
  function getSnapshotsRoot() {
7712
- return import_path4.default.join(getCollabStateRoot(), "snapshots");
7780
+ return import_path3.default.join(getCollabStateRoot(), "snapshots");
7713
7781
  }
7714
7782
  function getSnapshotRecordsRoot() {
7715
- return import_path4.default.join(getSnapshotsRoot(), "records");
7783
+ return import_path3.default.join(getSnapshotsRoot(), "records");
7716
7784
  }
7717
7785
  function getSnapshotBlobsRoot() {
7718
- return import_path4.default.join(getSnapshotsRoot(), "blobs");
7786
+ return import_path3.default.join(getSnapshotsRoot(), "blobs");
7719
7787
  }
7720
7788
  function getBaselinesRoot() {
7721
- return import_path4.default.join(getCollabStateRoot(), "baselines");
7789
+ return import_path3.default.join(getCollabStateRoot(), "baselines");
7722
7790
  }
7723
7791
  function getFinalizeQueueRoot() {
7724
- return import_path4.default.join(getCollabStateRoot(), "finalize-queue");
7792
+ return import_path3.default.join(getCollabStateRoot(), "finalize-queue");
7793
+ }
7794
+ function getAsyncJobsRoot() {
7795
+ return import_path3.default.join(getCollabStateRoot(), "jobs");
7796
+ }
7797
+ function getAsyncJobDir(jobId) {
7798
+ return import_path3.default.join(getAsyncJobsRoot(), jobId);
7799
+ }
7800
+ function getAsyncJobFilePath(jobId) {
7801
+ return import_path3.default.join(getAsyncJobDir(jobId), "job.json");
7802
+ }
7803
+ var ASYNC_JOB_LOCK_STALE_MS = 10 * 60 * 1e3;
7804
+ var TERMINAL_ASYNC_JOB_RETENTION_MS = 24 * 60 * 60 * 1e3;
7805
+ function isTerminalStatus(status) {
7806
+ return status === "completed" || status === "failed";
7807
+ }
7808
+ function isProcessingStatus(status) {
7809
+ return status === "submitting" || status === "uploading" || status === "server_processing";
7810
+ }
7811
+ function createEmptySummary() {
7812
+ return {
7813
+ state: "idle",
7814
+ activeJobCount: 0,
7815
+ queuedJobCount: 0,
7816
+ processingJobCount: 0,
7817
+ failedJobCount: 0,
7818
+ oldestCreatedAt: null,
7819
+ newestCreatedAt: null,
7820
+ latestError: null,
7821
+ kinds: []
7822
+ };
7823
+ }
7824
+ function parseAsyncJob(raw) {
7825
+ if (!raw || typeof raw !== "object") return null;
7826
+ const data = raw;
7827
+ if (data.schemaVersion !== 1) return null;
7828
+ if (typeof data.id !== "string") return null;
7829
+ if (data.kind !== "init" && data.kind !== "init_post" && data.kind !== "re_anchor") return null;
7830
+ if (!data.payload || typeof data.payload !== "object") return null;
7831
+ return data;
7832
+ }
7833
+ async function readAsyncJob(jobId) {
7834
+ try {
7835
+ const raw = await import_promises14.default.readFile(getAsyncJobFilePath(jobId), "utf8");
7836
+ const parsed = JSON.parse(raw);
7837
+ return parseAsyncJob(parsed);
7838
+ } catch {
7839
+ return null;
7840
+ }
7841
+ }
7842
+ async function listAsyncJobs() {
7843
+ const root = getAsyncJobsRoot();
7844
+ let entries;
7845
+ try {
7846
+ entries = await import_promises14.default.readdir(root, { withFileTypes: true });
7847
+ } catch (error) {
7848
+ if (error?.code === "ENOENT") return [];
7849
+ throw error;
7850
+ }
7851
+ const jobs = await Promise.all(
7852
+ entries.filter((entry) => entry.isDirectory()).map((entry) => readAsyncJob(entry.name))
7853
+ );
7854
+ return jobs.filter((job) => Boolean(job)).sort((a2, b) => a2.createdAt.localeCompare(b.createdAt));
7855
+ }
7856
+ async function listAsyncJobsForRepo(params) {
7857
+ const jobs = await listAsyncJobs();
7858
+ return jobs.filter((job) => {
7859
+ if (job.repoRoot !== params.repoRoot) return false;
7860
+ if (params.branchName && job.branchName && job.branchName !== params.branchName) return false;
7861
+ if (params.kind && job.kind !== params.kind) return false;
7862
+ return true;
7863
+ });
7864
+ }
7865
+ async function findPendingAsyncJob(params) {
7866
+ const jobs = await listAsyncJobsForRepo({
7867
+ repoRoot: params.repoRoot,
7868
+ branchName: params.branchName,
7869
+ kind: params.kind
7870
+ });
7871
+ return jobs.find((job) => !isTerminalStatus(job.status)) ?? null;
7872
+ }
7873
+ async function summarizeAsyncJobs(params) {
7874
+ const jobs = await listAsyncJobsForRepo({ repoRoot: params.repoRoot, branchName: params.branchName ?? null });
7875
+ const active = jobs.filter((job) => !isTerminalStatus(job.status) || job.status === "failed");
7876
+ const summary = createEmptySummary();
7877
+ if (active.length === 0) return summary;
7878
+ const kinds = /* @__PURE__ */ new Set();
7879
+ for (const job of active) {
7880
+ kinds.add(job.kind);
7881
+ if (job.error) summary.latestError = job.error;
7882
+ if (job.status === "failed") {
7883
+ summary.failedJobCount += 1;
7884
+ } else if (isProcessingStatus(job.status)) {
7885
+ summary.processingJobCount += 1;
7886
+ } else {
7887
+ summary.queuedJobCount += 1;
7888
+ }
7889
+ }
7890
+ summary.activeJobCount = summary.queuedJobCount + summary.processingJobCount;
7891
+ summary.kinds = Array.from(kinds);
7892
+ summary.oldestCreatedAt = active[0]?.createdAt ?? null;
7893
+ summary.newestCreatedAt = active[active.length - 1]?.createdAt ?? null;
7894
+ if (summary.processingJobCount > 0) summary.state = "processing";
7895
+ else if (summary.queuedJobCount > 0) summary.state = "queued";
7896
+ else if (summary.failedJobCount > 0) summary.state = "failed";
7897
+ return summary;
7898
+ }
7899
+ async function awaitAsyncJob(params) {
7900
+ const pollIntervalMs = Math.max(50, params.pollIntervalMs ?? 500);
7901
+ const deadline = Date.now() + params.timeoutMs;
7902
+ let lastSeen = null;
7903
+ while (Date.now() < deadline) {
7904
+ const job = await readAsyncJob(params.jobId);
7905
+ if (job) {
7906
+ lastSeen = job;
7907
+ if (job.status === "completed") return { status: "completed", job };
7908
+ if (job.status === "failed") return { status: "failed", job };
7909
+ } else if (lastSeen) {
7910
+ return { status: "timeout", job: lastSeen };
7911
+ } else {
7912
+ return { status: "timeout", job: null };
7913
+ }
7914
+ const remaining = deadline - Date.now();
7915
+ if (remaining <= 0) break;
7916
+ await new Promise((resolve) => setTimeout(resolve, Math.min(pollIntervalMs, remaining)));
7917
+ }
7918
+ const final = await readAsyncJob(params.jobId);
7919
+ if (final) {
7920
+ if (final.status === "completed") return { status: "completed", job: final };
7921
+ if (final.status === "failed") return { status: "failed", job: final };
7922
+ return { status: "timeout", job: final };
7923
+ }
7924
+ return { status: "timeout", job: lastSeen };
7725
7925
  }
7726
7926
  function getBaselinePath(params) {
7727
- return import_path3.default.join(getBaselinesRoot(), `${buildLaneStateKey(params)}.json`);
7927
+ return import_path4.default.join(getBaselinesRoot(), `${buildLaneStateKey(params)}.json`);
7728
7928
  }
7729
7929
  async function readLocalBaseline(params) {
7730
7930
  try {
7731
- const raw = await import_promises14.default.readFile(getBaselinePath(params), "utf8");
7931
+ const raw = await import_promises15.default.readFile(getBaselinePath(params), "utf8");
7732
7932
  const parsed = JSON.parse(raw);
7733
7933
  if (!parsed || typeof parsed !== "object") return null;
7734
7934
  if (parsed.schemaVersion !== 1 || typeof parsed.key !== "string" || typeof parsed.repoRoot !== "string") {
@@ -7797,7 +7997,7 @@ async function listWorkspaceFiles(repoRoot) {
7797
7997
  if (!relativePath || seen.has(relativePath)) continue;
7798
7998
  const absolutePath = import_path5.default.join(repoRoot, relativePath);
7799
7999
  try {
7800
- const stat = await import_promises15.default.lstat(absolutePath);
8000
+ const stat = await import_promises16.default.lstat(absolutePath);
7801
8001
  if (stat.isFile() || stat.isSymbolicLink()) {
7802
8002
  seen.add(relativePath);
7803
8003
  result.push(relativePath);
@@ -7810,13 +8010,13 @@ async function listWorkspaceFiles(repoRoot) {
7810
8010
  async function persistBlob(blobHash, content) {
7811
8011
  const blobPath = getBlobPath(blobHash);
7812
8012
  try {
7813
- await import_promises15.default.access(blobPath);
8013
+ await import_promises16.default.access(blobPath);
7814
8014
  } catch {
7815
- await import_promises15.default.mkdir(import_path5.default.dirname(blobPath), { recursive: true });
8015
+ await import_promises16.default.mkdir(import_path5.default.dirname(blobPath), { recursive: true });
7816
8016
  if (typeof content === "string") {
7817
- await import_promises15.default.writeFile(blobPath, content, "utf8");
8017
+ await import_promises16.default.writeFile(blobPath, content, "utf8");
7818
8018
  } else {
7819
- await import_promises15.default.writeFile(blobPath, content);
8019
+ await import_promises16.default.writeFile(blobPath, content);
7820
8020
  }
7821
8021
  }
7822
8022
  }
@@ -7830,9 +8030,9 @@ async function inspectLocalSnapshot(params) {
7830
8030
  const manifest = [];
7831
8031
  for (const relativePath of files) {
7832
8032
  const absolutePath = import_path5.default.join(repoRoot, relativePath);
7833
- const stat = await import_promises15.default.lstat(absolutePath);
8033
+ const stat = await import_promises16.default.lstat(absolutePath);
7834
8034
  if (stat.isSymbolicLink()) {
7835
- const linkTarget = await import_promises15.default.readlink(absolutePath);
8035
+ const linkTarget = await import_promises16.default.readlink(absolutePath);
7836
8036
  const blobHash2 = sha256Hex2(`symlink:${linkTarget}`);
7837
8037
  if (params.persistBlobs !== false) {
7838
8038
  await persistBlob(blobHash2, linkTarget);
@@ -7845,7 +8045,7 @@ async function inspectLocalSnapshot(params) {
7845
8045
  });
7846
8046
  continue;
7847
8047
  }
7848
- const content = await import_promises15.default.readFile(absolutePath);
8048
+ const content = await import_promises16.default.readFile(absolutePath);
7849
8049
  const blobHash = sha256Hex2(content);
7850
8050
  if (params.persistBlobs !== false) {
7851
8051
  await persistBlob(blobHash, content);
@@ -7882,7 +8082,7 @@ async function captureLocalSnapshot(params) {
7882
8082
  async function readLocalSnapshot(snapshotId) {
7883
8083
  if (!snapshotId) return null;
7884
8084
  try {
7885
- const raw = await import_promises15.default.readFile(getSnapshotRecordPath(snapshotId), "utf8");
8085
+ const raw = await import_promises16.default.readFile(getSnapshotRecordPath(snapshotId), "utf8");
7886
8086
  const parsed = JSON.parse(raw);
7887
8087
  if (!parsed || parsed.schemaVersion !== 1) return null;
7888
8088
  return parsed;
@@ -7892,34 +8092,34 @@ async function readLocalSnapshot(snapshotId) {
7892
8092
  }
7893
8093
  async function materializeLocalSnapshot(snapshotId, targetDir) {
7894
8094
  const snapshot = await readLocalSnapshot(snapshotId);
7895
- await import_promises15.default.mkdir(targetDir, { recursive: true });
8095
+ await import_promises16.default.mkdir(targetDir, { recursive: true });
7896
8096
  if (!snapshot) return;
7897
8097
  for (const entry of snapshot.files) {
7898
8098
  const destination = import_path5.default.join(targetDir, entry.path);
7899
- await import_promises15.default.mkdir(import_path5.default.dirname(destination), { recursive: true });
8099
+ await import_promises16.default.mkdir(import_path5.default.dirname(destination), { recursive: true });
7900
8100
  const blobPath = getBlobPath(entry.blobHash);
7901
8101
  if (entry.mode === "symlink") {
7902
- const linkTarget = await import_promises15.default.readFile(blobPath, "utf8");
7903
- await import_promises15.default.symlink(linkTarget, destination);
8102
+ const linkTarget = await import_promises16.default.readFile(blobPath, "utf8");
8103
+ await import_promises16.default.symlink(linkTarget, destination);
7904
8104
  continue;
7905
8105
  }
7906
- await import_promises15.default.copyFile(blobPath, destination);
8106
+ await import_promises16.default.copyFile(blobPath, destination);
7907
8107
  if (entry.mode === "executable") {
7908
- await import_promises15.default.chmod(destination, 493);
8108
+ await import_promises16.default.chmod(destination, 493);
7909
8109
  }
7910
8110
  }
7911
8111
  }
7912
8112
  async function clearDirectoryExceptGit(targetDir) {
7913
- const entries = await import_promises15.default.readdir(targetDir, { withFileTypes: true });
8113
+ const entries = await import_promises16.default.readdir(targetDir, { withFileTypes: true });
7914
8114
  for (const entry of entries) {
7915
8115
  if (entry.name === ".git") continue;
7916
- await import_promises15.default.rm(import_path5.default.join(targetDir, entry.name), { recursive: true, force: true });
8116
+ await import_promises16.default.rm(import_path5.default.join(targetDir, entry.name), { recursive: true, force: true });
7917
8117
  }
7918
8118
  }
7919
8119
  async function diffLocalSnapshots(params) {
7920
- const tempRoot = await import_promises15.default.mkdtemp(import_path5.default.join(import_os2.default.tmpdir(), "remix-snapshot-diff-"));
8120
+ const tempRoot = await import_promises16.default.mkdtemp(import_path5.default.join(import_os2.default.tmpdir(), "remix-snapshot-diff-"));
7921
8121
  const repoDir = import_path5.default.join(tempRoot, "repo");
7922
- await import_promises15.default.mkdir(repoDir, { recursive: true });
8122
+ await import_promises16.default.mkdir(repoDir, { recursive: true });
7923
8123
  try {
7924
8124
  await materializeLocalSnapshot(params.baseSnapshotId, repoDir);
7925
8125
  await execa("git", ["init"], { cwd: repoDir, stderr: "ignore" });
@@ -7955,15 +8155,16 @@ async function diffLocalSnapshots(params) {
7955
8155
  stats: summarizeUnifiedDiff(diff)
7956
8156
  };
7957
8157
  } finally {
7958
- await import_promises15.default.rm(tempRoot, { recursive: true, force: true });
8158
+ await import_promises16.default.rm(tempRoot, { recursive: true, force: true });
7959
8159
  }
7960
8160
  }
7961
- var FINALIZE_JOB_LOCK_STALE_MS = 10 * 60 * 1e3;
8161
+ var FINALIZE_JOB_LOCK_STALE_MS = 90 * 1e3;
8162
+ var FINALIZE_ATTEMPT_STALE_MS = 10 * 60 * 1e3;
7962
8163
  var TERMINAL_FINALIZE_JOB_RETENTION_MS = 24 * 60 * 60 * 1e3;
7963
8164
  function getJobPath(id) {
7964
8165
  return import_path6.default.join(getFinalizeQueueRoot(), `${id}.json`);
7965
8166
  }
7966
- function getJobLockPath(id) {
8167
+ function getJobLockPath2(id) {
7967
8168
  return import_path6.default.join(getFinalizeQueueRoot(), `${id}.lock`);
7968
8169
  }
7969
8170
  function isPastDue(isoTimestamp) {
@@ -7976,7 +8177,7 @@ function isStaleAttempt(job) {
7976
8177
  if (!job.lastAttemptAt) return true;
7977
8178
  const parsed = Date.parse(job.lastAttemptAt);
7978
8179
  if (!Number.isFinite(parsed)) return true;
7979
- return Date.now() - parsed >= FINALIZE_JOB_LOCK_STALE_MS;
8180
+ return Date.now() - parsed >= FINALIZE_ATTEMPT_STALE_MS;
7980
8181
  }
7981
8182
  function readMetadataDisposition(job) {
7982
8183
  const value = job.metadata.failureDisposition;
@@ -8005,6 +8206,7 @@ function createEmptyPendingFinalizeQueueSummary() {
8005
8206
  queuedJobCount: 0,
8006
8207
  processingJobCount: 0,
8007
8208
  retryScheduledJobCount: 0,
8209
+ awaitingUsageJobCount: 0,
8008
8210
  failedJobCount: 0,
8009
8211
  oldestCapturedAt: null,
8010
8212
  newestCapturedAt: null,
@@ -8012,10 +8214,10 @@ function createEmptyPendingFinalizeQueueSummary() {
8012
8214
  latestError: null
8013
8215
  };
8014
8216
  }
8015
- async function acquireJobLock(jobId) {
8016
- const lockPath = getJobLockPath(jobId);
8217
+ async function acquireJobLock2(jobId) {
8218
+ const lockPath = getJobLockPath2(jobId);
8017
8219
  try {
8018
- await import_promises16.default.mkdir(lockPath);
8220
+ await import_promises17.default.mkdir(lockPath);
8019
8221
  return true;
8020
8222
  } catch (error) {
8021
8223
  if (error?.code !== "EEXIST") {
@@ -8023,18 +8225,18 @@ async function acquireJobLock(jobId) {
8023
8225
  }
8024
8226
  }
8025
8227
  try {
8026
- const stat = await import_promises16.default.stat(lockPath);
8228
+ const stat = await import_promises17.default.stat(lockPath);
8027
8229
  if (Date.now() - stat.mtimeMs < FINALIZE_JOB_LOCK_STALE_MS) {
8028
8230
  return false;
8029
8231
  }
8030
- await import_promises16.default.rm(lockPath, { recursive: true, force: true });
8232
+ await import_promises17.default.rm(lockPath, { recursive: true, force: true });
8031
8233
  } catch (error) {
8032
8234
  if (error?.code !== "ENOENT") {
8033
8235
  throw error;
8034
8236
  }
8035
8237
  }
8036
8238
  try {
8037
- await import_promises16.default.mkdir(lockPath);
8239
+ await import_promises17.default.mkdir(lockPath);
8038
8240
  return true;
8039
8241
  } catch (error) {
8040
8242
  if (error?.code === "EEXIST") {
@@ -8043,7 +8245,15 @@ async function acquireJobLock(jobId) {
8043
8245
  throw error;
8044
8246
  }
8045
8247
  }
8046
- function normalizeJob(input) {
8248
+ async function heartbeatJobLock(jobId) {
8249
+ const lockPath = getJobLockPath2(jobId);
8250
+ const now = /* @__PURE__ */ new Date();
8251
+ try {
8252
+ await import_promises17.default.utimes(lockPath, now, now);
8253
+ } catch {
8254
+ }
8255
+ }
8256
+ function normalizeJob2(input) {
8047
8257
  const now = (/* @__PURE__ */ new Date()).toISOString();
8048
8258
  return {
8049
8259
  schemaVersion: 1,
@@ -8071,16 +8281,16 @@ function normalizeJob(input) {
8071
8281
  };
8072
8282
  }
8073
8283
  async function enqueuePendingFinalizeJob(input) {
8074
- const job = normalizeJob(input);
8284
+ const job = normalizeJob2(input);
8075
8285
  await writeJsonAtomic(getJobPath(job.id), job);
8076
8286
  return job;
8077
8287
  }
8078
8288
  async function readPendingFinalizeJob(jobId) {
8079
8289
  try {
8080
- const raw = await import_promises16.default.readFile(getJobPath(jobId), "utf8");
8290
+ const raw = await import_promises17.default.readFile(getJobPath(jobId), "utf8");
8081
8291
  const parsed = JSON.parse(raw);
8082
8292
  if (!parsed || parsed.schemaVersion !== 1 || typeof parsed.id !== "string") return null;
8083
- return normalizeJob({
8293
+ return normalizeJob2({
8084
8294
  id: parsed.id,
8085
8295
  status: parsed.status ?? "queued",
8086
8296
  repoRoot: String(parsed.repoRoot ?? ""),
@@ -8109,7 +8319,7 @@ async function readPendingFinalizeJob(jobId) {
8109
8319
  }
8110
8320
  async function listPendingFinalizeJobs() {
8111
8321
  try {
8112
- const entries = await import_promises16.default.readdir(getFinalizeQueueRoot(), { withFileTypes: true });
8322
+ const entries = await import_promises17.default.readdir(getFinalizeQueueRoot(), { withFileTypes: true });
8113
8323
  const jobs = await Promise.all(
8114
8324
  entries.filter((entry) => entry.isFile() && entry.name.endsWith(".json")).map((entry) => readPendingFinalizeJob(entry.name.replace(/\.json$/, "")))
8115
8325
  );
@@ -8150,18 +8360,24 @@ async function summarizePendingFinalizeJobs(scope) {
8150
8360
  continue;
8151
8361
  }
8152
8362
  if (!isPastDue(job.nextRetryAt)) {
8153
- summary.retryScheduledJobCount += 1;
8363
+ if (job.retryCount === 0 && !job.error) {
8364
+ summary.awaitingUsageJobCount += 1;
8365
+ } else {
8366
+ summary.retryScheduledJobCount += 1;
8367
+ }
8154
8368
  continue;
8155
8369
  }
8156
8370
  summary.queuedJobCount += 1;
8157
8371
  }
8158
- summary.activeJobCount = summary.queuedJobCount + summary.processingJobCount + summary.retryScheduledJobCount;
8372
+ summary.activeJobCount = summary.queuedJobCount + summary.processingJobCount + summary.retryScheduledJobCount + summary.awaitingUsageJobCount;
8159
8373
  if (summary.processingJobCount > 0) {
8160
8374
  summary.state = "processing";
8161
8375
  } else if (summary.queuedJobCount > 0) {
8162
8376
  summary.state = "queued";
8163
8377
  } else if (summary.retryScheduledJobCount > 0) {
8164
8378
  summary.state = "retry_scheduled";
8379
+ } else if (summary.awaitingUsageJobCount > 0) {
8380
+ summary.state = "awaiting_usage";
8165
8381
  } else if (summary.failedJobCount > 0) {
8166
8382
  summary.state = "failed";
8167
8383
  }
@@ -8183,14 +8399,14 @@ async function updatePendingFinalizeJob(jobId, update) {
8183
8399
  return next;
8184
8400
  }
8185
8401
  async function claimPendingFinalizeJob(jobId) {
8186
- const lockPath = getJobLockPath(jobId);
8187
- const lockAcquired = await acquireJobLock(jobId);
8402
+ const lockPath = getJobLockPath2(jobId);
8403
+ const lockAcquired = await acquireJobLock2(jobId);
8188
8404
  if (!lockAcquired) return null;
8189
8405
  let released = false;
8190
8406
  const release = async () => {
8191
8407
  if (released) return;
8192
8408
  released = true;
8193
- await import_promises16.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
8409
+ await import_promises17.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
8194
8410
  };
8195
8411
  try {
8196
8412
  let existing = await readPendingFinalizeJob(jobId);
@@ -8241,13 +8457,13 @@ async function claimPendingFinalizeJob(jobId) {
8241
8457
  }
8242
8458
  async function removePendingFinalizeJob(jobId) {
8243
8459
  try {
8244
- await import_promises16.default.unlink(getJobPath(jobId));
8460
+ await import_promises17.default.unlink(getJobPath(jobId));
8245
8461
  } catch (error) {
8246
8462
  if (error?.code !== "ENOENT") {
8247
8463
  throw error;
8248
8464
  }
8249
8465
  }
8250
- await import_promises16.default.rm(getJobLockPath(jobId), { recursive: true, force: true }).catch(() => void 0);
8466
+ await import_promises17.default.rm(getJobLockPath2(jobId), { recursive: true, force: true }).catch(() => void 0);
8251
8467
  }
8252
8468
  function unwrapResponseObject(resp, label) {
8253
8469
  const obj = resp?.responseObject;
@@ -8375,8 +8591,39 @@ function buildAmbiguousResolution(params) {
8375
8591
  defaultBranch: params.lane.defaultBranch ?? params.state.defaultBranch
8376
8592
  };
8377
8593
  }
8594
+ var RESOLUTION_CACHE_TTL_MS = 5e3;
8595
+ var resolutionCache = /* @__PURE__ */ new Map();
8596
+ var cacheClock3 = () => Date.now();
8597
+ function buildResolutionCacheKey(params) {
8598
+ return `${params.repoRoot}|${params.currentBranch ?? ""}|${params.hasApi ? "1" : "0"}`;
8599
+ }
8600
+ function readResolutionCache(key) {
8601
+ const entry = resolutionCache.get(key);
8602
+ if (!entry) return null;
8603
+ if (cacheClock3() - entry.cachedAt > RESOLUTION_CACHE_TTL_MS) {
8604
+ resolutionCache.delete(key);
8605
+ return null;
8606
+ }
8607
+ return entry.resolution;
8608
+ }
8609
+ function writeResolutionCache(key, resolution) {
8610
+ if (resolution.status !== "resolved") return;
8611
+ resolutionCache.set(key, { resolution, cachedAt: cacheClock3() });
8612
+ }
8378
8613
  async function resolveActiveLaneBinding(params) {
8379
8614
  const state = await readCollabBindingState(params.repoRoot);
8615
+ const cacheKey = buildResolutionCacheKey({
8616
+ repoRoot: params.repoRoot,
8617
+ currentBranch: state ? normalizeBranchName2(state.currentBranch) : null,
8618
+ hasApi: Boolean(params.api)
8619
+ });
8620
+ const cached = readResolutionCache(cacheKey);
8621
+ if (cached) return cached;
8622
+ const resolution = await resolveActiveLaneBindingUncached(params, state);
8623
+ writeResolutionCache(cacheKey, resolution);
8624
+ return resolution;
8625
+ }
8626
+ async function resolveActiveLaneBindingUncached(params, state) {
8380
8627
  if (!state) {
8381
8628
  return { status: "not_bound", currentBranch: null };
8382
8629
  }
@@ -8565,12 +8812,24 @@ function buildBaseState() {
8565
8812
  queuedJobCount: 0,
8566
8813
  processingJobCount: 0,
8567
8814
  retryScheduledJobCount: 0,
8815
+ awaitingUsageJobCount: 0,
8568
8816
  failedJobCount: 0,
8569
8817
  oldestCapturedAt: null,
8570
8818
  newestCapturedAt: null,
8571
8819
  nextRetryAt: null,
8572
8820
  latestError: null
8573
8821
  },
8822
+ pendingAsyncJobs: {
8823
+ state: "idle",
8824
+ activeJobCount: 0,
8825
+ queuedJobCount: 0,
8826
+ processingJobCount: 0,
8827
+ failedJobCount: 0,
8828
+ oldestCreatedAt: null,
8829
+ newestCreatedAt: null,
8830
+ latestError: null,
8831
+ kinds: []
8832
+ },
8574
8833
  warnings: [],
8575
8834
  hint: null,
8576
8835
  metadataWarnings: [],
@@ -8649,7 +8908,7 @@ async function collabDetectRepoState(params) {
8649
8908
  return detected;
8650
8909
  }
8651
8910
  if (!params.api) {
8652
- const [inspection, pendingFinalize] = await Promise.all([
8911
+ const [inspection, pendingFinalize, pendingAsyncJobs] = await Promise.all([
8653
8912
  inspectLocalSnapshot({
8654
8913
  repoRoot,
8655
8914
  repoFingerprint: binding.repoFingerprint,
@@ -8662,15 +8921,30 @@ async function collabDetectRepoState(params) {
8662
8921
  repoFingerprint: binding.repoFingerprint,
8663
8922
  currentAppId: binding.currentAppId,
8664
8923
  laneId: binding.laneId
8665
- })
8924
+ }),
8925
+ summarizeAsyncJobs({ repoRoot, branchName: binding.branchName ?? null })
8666
8926
  ]);
8667
8927
  detected.currentSnapshotHash = inspection.snapshotHash;
8668
8928
  detected.pendingFinalize = pendingFinalize;
8929
+ detected.pendingAsyncJobs = pendingAsyncJobs;
8669
8930
  return detected;
8670
8931
  }
8671
8932
  try {
8672
- const [headResp, inspection, baseline, pendingFinalize] = await Promise.all([
8673
- params.api.getAppHead(binding.currentAppId),
8933
+ const baseline = await readLocalBaseline({
8934
+ repoFingerprint: binding.repoFingerprint,
8935
+ laneId: binding.laneId,
8936
+ repoRoot
8937
+ });
8938
+ const hasFullBaseline = Boolean(baseline?.lastSnapshotHash && baseline?.lastServerHeadHash);
8939
+ const metadataIdentityPromise = hasFullBaseline ? getAppDeltaCached(params.api, binding.currentAppId, {
8940
+ baseHeadHash: baseline.lastServerHeadHash,
8941
+ targetHeadHash: baseline.lastServerHeadHash,
8942
+ repoFingerprint: binding.repoFingerprint ?? void 0,
8943
+ remoteUrl: binding.remoteUrl ?? void 0,
8944
+ defaultBranch: binding.defaultBranch ?? void 0
8945
+ }) : Promise.resolve(null);
8946
+ const [headResp, inspection, parallelMetadataResp, pendingFinalize, pendingAsyncJobs] = await Promise.all([
8947
+ getAppHeadCached(params.api, binding.currentAppId),
8674
8948
  inspectLocalSnapshot({
8675
8949
  repoRoot,
8676
8950
  repoFingerprint: binding.repoFingerprint,
@@ -8678,23 +8952,21 @@ async function collabDetectRepoState(params) {
8678
8952
  branchName: binding.branchName,
8679
8953
  persistBlobs: false
8680
8954
  }),
8681
- readLocalBaseline({
8682
- repoFingerprint: binding.repoFingerprint,
8683
- laneId: binding.laneId,
8684
- repoRoot
8685
- }),
8955
+ metadataIdentityPromise,
8686
8956
  summarizePendingFinalizeJobs({
8687
8957
  repoRoot,
8688
8958
  repoFingerprint: binding.repoFingerprint,
8689
8959
  currentAppId: binding.currentAppId,
8690
8960
  laneId: binding.laneId
8691
- })
8961
+ }),
8962
+ summarizeAsyncJobs({ repoRoot, branchName: binding.branchName ?? null })
8692
8963
  ]);
8693
8964
  const appHead = unwrapResponseObject(headResp, "app head");
8694
8965
  detected.currentServerHeadHash = appHead.headCommitHash;
8695
8966
  detected.currentServerHeadCommitId = appHead.headCommitId;
8696
8967
  detected.currentSnapshotHash = inspection.snapshotHash;
8697
8968
  detected.pendingFinalize = pendingFinalize;
8969
+ detected.pendingAsyncJobs = pendingAsyncJobs;
8698
8970
  detected.baseline = {
8699
8971
  lastSnapshotId: baseline?.lastSnapshotId ?? null,
8700
8972
  lastSnapshotHash: baseline?.lastSnapshotHash ?? null,
@@ -8738,15 +9010,7 @@ async function collabDetectRepoState(params) {
8738
9010
  "Local Git HEAD changed since the last Remix baseline. Remix will use the current workspace snapshot to detect divergence."
8739
9011
  );
8740
9012
  }
8741
- const metadataBaseHeadHash = baseline.lastServerHeadHash || appHead.headCommitHash;
8742
- const metadataResp = await params.api.getAppDelta(binding.currentAppId, {
8743
- baseHeadHash: metadataBaseHeadHash,
8744
- targetHeadHash: metadataBaseHeadHash,
8745
- repoFingerprint: binding.repoFingerprint ?? void 0,
8746
- remoteUrl: binding.remoteUrl ?? void 0,
8747
- defaultBranch: binding.defaultBranch ?? void 0
8748
- });
8749
- const metadataCheck = unwrapResponseObject(metadataResp, "app delta metadata");
9013
+ const metadataCheck = unwrapResponseObject(parallelMetadataResp, "app delta metadata");
8750
9014
  detected.metadataWarnings = metadataCheck.warnings;
8751
9015
  detected.warnings.push(...metadataCheck.warnings);
8752
9016
  if (metadataCheck.status === "conflict_risk") {
@@ -8779,8 +9043,322 @@ async function collabDetectRepoState(params) {
8779
9043
  return detected;
8780
9044
  }
8781
9045
  }
9046
+ var GIT_TIMEOUT_MS = 1e4;
9047
+ var GIT_MAX_BUFFER = 64 * 1024 * 1024;
9048
+ var MAX_COMMITS = 100;
9049
+ var RECORD_SEPARATOR = "";
9050
+ var FIELD_SEPARATOR = "";
9051
+ var LOG_FORMAT = `${RECORD_SEPARATOR}%H${FIELD_SEPARATOR}%P${FIELD_SEPARATOR}%an${FIELD_SEPARATOR}%ae${FIELD_SEPARATOR}%cn${FIELD_SEPARATOR}%ce${FIELD_SEPARATOR}%aI${FIELD_SEPARATOR}%cI${FIELD_SEPARATOR}%s`;
9052
+ function sanitizedEnv() {
9053
+ const env = { ...process.env };
9054
+ delete env.GIT_DIR;
9055
+ delete env.GIT_WORK_TREE;
9056
+ delete env.GIT_COMMON_DIR;
9057
+ delete env.GIT_INDEX_FILE;
9058
+ return env;
9059
+ }
9060
+ async function runGitEvent(args, cwd) {
9061
+ const res = await execa("git", args, {
9062
+ cwd,
9063
+ reject: false,
9064
+ stderr: "pipe",
9065
+ timeout: GIT_TIMEOUT_MS,
9066
+ env: sanitizedEnv(),
9067
+ maxBuffer: GIT_MAX_BUFFER
9068
+ });
9069
+ return {
9070
+ exitCode: res.exitCode ?? -1,
9071
+ stdout: String(res.stdout ?? ""),
9072
+ stderr: String(res.stderr ?? ""),
9073
+ timedOut: res.timedOut === true
9074
+ };
9075
+ }
9076
+ function parseNumstatLine(line) {
9077
+ const tab1 = line.indexOf(" ");
9078
+ if (tab1 < 0) return null;
9079
+ const tab2 = line.indexOf(" ", tab1 + 1);
9080
+ if (tab2 < 0) return null;
9081
+ const addedRaw = line.slice(0, tab1);
9082
+ const removedRaw = line.slice(tab1 + 1, tab2);
9083
+ const filePath = line.slice(tab2 + 1);
9084
+ if (!filePath) return null;
9085
+ const added = addedRaw === "-" ? 0 : Number.parseInt(addedRaw, 10);
9086
+ const removed = removedRaw === "-" ? 0 : Number.parseInt(removedRaw, 10);
9087
+ return {
9088
+ added: Number.isFinite(added) ? added : 0,
9089
+ removed: Number.isFinite(removed) ? removed : 0,
9090
+ path: filePath
9091
+ };
9092
+ }
9093
+ function parseLogOutput(stdout) {
9094
+ const records = [];
9095
+ const parts = stdout.split(RECORD_SEPARATOR);
9096
+ for (const part of parts) {
9097
+ if (!part) continue;
9098
+ const newlineIdx = part.indexOf("\n");
9099
+ const metadataLine = newlineIdx === -1 ? part : part.slice(0, newlineIdx);
9100
+ const rest = newlineIdx === -1 ? "" : part.slice(newlineIdx + 1);
9101
+ const fields = metadataLine.split(FIELD_SEPARATOR);
9102
+ if (fields.length < 9) continue;
9103
+ const [hash, parentsField, authorName, authorEmail, committerName, committerEmail, authorDateIso, committerDateIso, subject] = fields;
9104
+ const parents = parentsField ? parentsField.split(" ").filter(Boolean) : [];
9105
+ const commit = {
9106
+ hash,
9107
+ parents,
9108
+ authorName,
9109
+ authorEmail,
9110
+ committerName,
9111
+ committerEmail,
9112
+ authorDateIso,
9113
+ committerDateIso,
9114
+ subject,
9115
+ filesChanged: 0,
9116
+ linesAdded: 0,
9117
+ linesRemoved: 0
9118
+ };
9119
+ const touchedPaths = /* @__PURE__ */ new Set();
9120
+ for (const rawLine of rest.split("\n")) {
9121
+ if (!rawLine) continue;
9122
+ const parsed = parseNumstatLine(rawLine);
9123
+ if (!parsed) continue;
9124
+ commit.linesAdded += parsed.added;
9125
+ commit.linesRemoved += parsed.removed;
9126
+ touchedPaths.add(parsed.path);
9127
+ }
9128
+ commit.filesChanged = touchedPaths.size;
9129
+ records.push(commit);
9130
+ }
9131
+ return records;
9132
+ }
9133
+ function buildCommitEvent(raw) {
9134
+ return {
9135
+ hash: raw.hash,
9136
+ shortHash: raw.hash.slice(0, 7),
9137
+ isMerge: raw.parents.length >= 2,
9138
+ parents: raw.parents,
9139
+ author: { name: raw.authorName, email: raw.authorEmail },
9140
+ committer: { name: raw.committerName, email: raw.committerEmail },
9141
+ message: raw.subject,
9142
+ timestamp: raw.authorDateIso,
9143
+ filesChanged: raw.filesChanged,
9144
+ linesAdded: raw.linesAdded,
9145
+ linesRemoved: raw.linesRemoved
9146
+ };
9147
+ }
9148
+ var MERGE_BRANCH_REGEXES = [
9149
+ /^Merge branch '([^']+)'(?: into .+)?$/,
9150
+ /^Merge remote-tracking branch '([^']+)'(?: into .+)?$/,
9151
+ /^Merge pull request #\d+ from (\S+)$/
9152
+ ];
9153
+ function parseBranchFromMergeMessage(subject) {
9154
+ for (const re of MERGE_BRANCH_REGEXES) {
9155
+ const match = re.exec(subject);
9156
+ if (!match) continue;
9157
+ const raw = match[1];
9158
+ if (!raw) continue;
9159
+ return raw.startsWith("origin/") ? raw.slice("origin/".length) : raw;
9160
+ }
9161
+ return null;
9162
+ }
9163
+ async function resolveBranchFromNameRev(secondParent, repoRoot) {
9164
+ const res = await runGitEvent(
9165
+ ["name-rev", "--name-only", "--no-undefined", secondParent],
9166
+ repoRoot
9167
+ );
9168
+ if (res.exitCode !== 0 || res.timedOut) return null;
9169
+ let name = res.stdout.trim();
9170
+ if (!name) return null;
9171
+ if (name.startsWith("remotes/origin/")) name = name.slice("remotes/origin/".length);
9172
+ else if (name.startsWith("origin/")) name = name.slice("origin/".length);
9173
+ if (name.includes("~") || name.includes("^") || name.startsWith("tags/") || name === "undefined") return null;
9174
+ return name;
9175
+ }
9176
+ async function collectUncommittedSummary(repoRoot) {
9177
+ const summary = {
9178
+ filesChanged: 0,
9179
+ linesAdded: 0,
9180
+ linesRemoved: 0,
9181
+ staged: 0,
9182
+ unstaged: 0,
9183
+ untracked: 0
9184
+ };
9185
+ const statusRes = await runGitEvent(["status", "--porcelain=v1"], repoRoot);
9186
+ if (statusRes.exitCode === 0 && !statusRes.timedOut) {
9187
+ const uniquePaths = /* @__PURE__ */ new Set();
9188
+ for (const rawLine of statusRes.stdout.split("\n")) {
9189
+ if (!rawLine) continue;
9190
+ if (rawLine.startsWith("??")) {
9191
+ summary.untracked += 1;
9192
+ uniquePaths.add(rawLine.slice(3));
9193
+ continue;
9194
+ }
9195
+ if (rawLine.length < 3) continue;
9196
+ const indexCode = rawLine.charCodeAt(0);
9197
+ const worktreeCode = rawLine.charCodeAt(1);
9198
+ const indexCh = rawLine[0];
9199
+ const worktreeCh = rawLine[1];
9200
+ const pathPart = rawLine.slice(3);
9201
+ if (indexCh !== " " && indexCode !== 63) summary.staged += 1;
9202
+ if ((worktreeCh === "M" || worktreeCh === "D") && worktreeCode !== 63) summary.unstaged += 1;
9203
+ uniquePaths.add(pathPart);
9204
+ }
9205
+ summary.filesChanged = uniquePaths.size;
9206
+ }
9207
+ const diffRes = await runGitEvent(["diff", "--numstat", "HEAD"], repoRoot);
9208
+ if (diffRes.exitCode === 0 && !diffRes.timedOut) {
9209
+ for (const rawLine of diffRes.stdout.split("\n")) {
9210
+ if (!rawLine) continue;
9211
+ const parsed = parseNumstatLine(rawLine);
9212
+ if (!parsed) continue;
9213
+ summary.linesAdded += parsed.added;
9214
+ summary.linesRemoved += parsed.removed;
9215
+ }
9216
+ }
9217
+ return summary;
9218
+ }
9219
+ function buildEmptyPreTurnEvents(params) {
9220
+ return {
9221
+ commits: [],
9222
+ merges: [],
9223
+ uncommittedAtFinalize: params.uncommitted,
9224
+ summary: {
9225
+ totalCommits: 0,
9226
+ totalMerges: 0,
9227
+ totalFilesChanged: 0,
9228
+ totalLinesAdded: 0,
9229
+ totalLinesRemoved: 0,
9230
+ elapsedMs: null,
9231
+ truncated: false,
9232
+ rewritten: params.rewritten
9233
+ },
9234
+ range: { fromCommit: params.fromCommit, toCommit: params.toCommit },
9235
+ schemaVersion: 1
9236
+ };
9237
+ }
9238
+ async function collectPreTurnGitEvents(params) {
9239
+ try {
9240
+ const uncommitted = await collectUncommittedSummary(params.repoRoot);
9241
+ if (!params.fromCommit || params.fromCommit === params.toCommit) {
9242
+ return buildEmptyPreTurnEvents({
9243
+ fromCommit: params.fromCommit,
9244
+ toCommit: params.toCommit,
9245
+ uncommitted,
9246
+ rewritten: false
9247
+ });
9248
+ }
9249
+ const reachabilityRes = await runGitEvent(
9250
+ ["merge-base", "--is-ancestor", params.fromCommit, params.toCommit],
9251
+ params.repoRoot
9252
+ );
9253
+ if (reachabilityRes.timedOut) return null;
9254
+ if (reachabilityRes.exitCode === 1) {
9255
+ return buildEmptyPreTurnEvents({
9256
+ fromCommit: null,
9257
+ toCommit: params.toCommit,
9258
+ uncommitted,
9259
+ rewritten: true
9260
+ });
9261
+ }
9262
+ if (reachabilityRes.exitCode !== 0) return null;
9263
+ const logRes = await runGitEvent(
9264
+ [
9265
+ "log",
9266
+ "--first-parent",
9267
+ "-m",
9268
+ "--numstat",
9269
+ "--no-renames",
9270
+ `--format=${LOG_FORMAT}`,
9271
+ `${params.fromCommit}..${params.toCommit}`
9272
+ ],
9273
+ params.repoRoot
9274
+ );
9275
+ if (logRes.timedOut || logRes.exitCode !== 0) return null;
9276
+ const rawCommits = parseLogOutput(logRes.stdout);
9277
+ const totalCommits = rawCommits.length;
9278
+ const totalMerges = rawCommits.reduce((n2, c3) => n2 + (c3.parents.length >= 2 ? 1 : 0), 0);
9279
+ const ordered = [...rawCommits].reverse();
9280
+ const truncated = ordered.length > MAX_COMMITS;
9281
+ const kept = truncated ? ordered.slice(ordered.length - MAX_COMMITS) : ordered;
9282
+ const commits = kept.map(buildCommitEvent);
9283
+ const merges = [];
9284
+ for (const raw of rawCommits) {
9285
+ if (raw.parents.length < 2) continue;
9286
+ const secondParent = raw.parents[1];
9287
+ const fromBranchFromMessage = parseBranchFromMergeMessage(raw.subject);
9288
+ const fromBranchFromNameRev = await resolveBranchFromNameRev(secondParent, params.repoRoot);
9289
+ merges.push({
9290
+ hash: raw.hash,
9291
+ shortHash: raw.hash.slice(0, 7),
9292
+ fromBranchFromMessage,
9293
+ fromBranchFromNameRev,
9294
+ fromCommit: secondParent,
9295
+ filesChanged: raw.filesChanged,
9296
+ linesAdded: raw.linesAdded,
9297
+ linesRemoved: raw.linesRemoved,
9298
+ message: raw.subject
9299
+ });
9300
+ }
9301
+ let totalFilesChanged = 0;
9302
+ let totalLinesAdded = 0;
9303
+ let totalLinesRemoved = 0;
9304
+ const diffRes = await runGitEvent(
9305
+ ["diff", "--numstat", "--no-renames", params.fromCommit, params.toCommit],
9306
+ params.repoRoot
9307
+ );
9308
+ if (diffRes.exitCode === 0 && !diffRes.timedOut) {
9309
+ const uniquePaths = /* @__PURE__ */ new Set();
9310
+ for (const rawLine of diffRes.stdout.split("\n")) {
9311
+ if (!rawLine) continue;
9312
+ const parsed = parseNumstatLine(rawLine);
9313
+ if (!parsed) continue;
9314
+ uniquePaths.add(parsed.path);
9315
+ totalLinesAdded += parsed.added;
9316
+ totalLinesRemoved += parsed.removed;
9317
+ }
9318
+ totalFilesChanged = uniquePaths.size;
9319
+ }
9320
+ let elapsedMs = null;
9321
+ if (rawCommits.length > 0) {
9322
+ const committerTimes = rawCommits.map((c3) => Date.parse(c3.committerDateIso)).filter((t) => Number.isFinite(t));
9323
+ if (committerTimes.length > 0) {
9324
+ const min = Math.min(...committerTimes);
9325
+ const max = Math.max(...committerTimes);
9326
+ elapsedMs = max - min;
9327
+ }
9328
+ }
9329
+ return {
9330
+ // commits[] is a first-parent view: it contains the commits on the
9331
+ // receiving branch plus merges as single entries. Commits that entered
9332
+ // via merges are represented by the MergeEvent, not listed individually.
9333
+ commits,
9334
+ merges,
9335
+ uncommittedAtFinalize: uncommitted,
9336
+ summary: {
9337
+ totalCommits,
9338
+ totalMerges,
9339
+ totalFilesChanged,
9340
+ totalLinesAdded,
9341
+ totalLinesRemoved,
9342
+ elapsedMs,
9343
+ truncated,
9344
+ rewritten: false
9345
+ },
9346
+ range: { fromCommit: params.fromCommit, toCommit: params.toCommit },
9347
+ schemaVersion: 1
9348
+ };
9349
+ } catch (err) {
9350
+ console.warn(
9351
+ `[remix-core] preTurnEvents harvester failed: ${err instanceof Error ? err.message : String(err)}`
9352
+ );
9353
+ return null;
9354
+ }
9355
+ }
8782
9356
  var FINALIZE_RETRY_BASE_DELAY_MS = 15e3;
8783
9357
  var FINALIZE_RETRY_MAX_DELAY_MS = 5 * 60 * 1e3;
9358
+ var FINALIZE_JOB_PROCESSING_TIMEOUT_MS_DEFAULT = 90 * 1e3;
9359
+ var finalizeJobProcessingTimeoutMs = FINALIZE_JOB_PROCESSING_TIMEOUT_MS_DEFAULT;
9360
+ var FINALIZE_LOCK_HEARTBEAT_MS = Math.max(15e3, Math.floor(FINALIZE_JOB_LOCK_STALE_MS / 3));
9361
+ var FINALIZE_AUTO_TERMINAL_THRESHOLD = 5;
8784
9362
  function readMetadataString(job, key) {
8785
9363
  const value = job.metadata[key];
8786
9364
  return typeof value === "string" && value.trim() ? value.trim() : null;
@@ -8789,6 +9367,10 @@ function readMetadataActor(job) {
8789
9367
  const actor = job.metadata.actor;
8790
9368
  return actor && typeof actor === "object" ? actor : void 0;
8791
9369
  }
9370
+ function readMetadataTurnUsage(job) {
9371
+ const value = job.metadata.turnUsage;
9372
+ return value && typeof value === "object" ? value : null;
9373
+ }
8792
9374
  function buildNextRetryAt(retryCount) {
8793
9375
  const exponent = Math.max(0, retryCount - 1);
8794
9376
  const delayMs = Math.min(FINALIZE_RETRY_BASE_DELAY_MS * 2 ** exponent, FINALIZE_RETRY_MAX_DELAY_MS);
@@ -8811,8 +9393,50 @@ function classifyFinalizeError(error) {
8811
9393
  message: error instanceof Error ? error.message : String(error)
8812
9394
  };
8813
9395
  }
8814
- function buildWorkspaceMetadata(params) {
9396
+ function runWithFinalizeTimeout(work, jobId) {
9397
+ let timer;
9398
+ const cap = finalizeJobProcessingTimeoutMs;
9399
+ const timeout = new Promise((_, reject) => {
9400
+ timer = setTimeout(() => {
9401
+ reject(
9402
+ buildFinalizeCliError({
9403
+ message: `Finalize job ${jobId} exceeded the ${Math.round(
9404
+ cap / 1e3
9405
+ )}s drainer wall-clock timeout. The slow request will be retried by a future drainer pass.`,
9406
+ exitCode: 1,
9407
+ hint: "If this keeps recurring, run `remix collab finalize-queue list` to inspect the job, then `remix collab finalize-queue forget <id>` to drop it manually.",
9408
+ disposition: "retryable",
9409
+ reason: "processing_timeout"
9410
+ })
9411
+ );
9412
+ }, cap);
9413
+ timer.unref?.();
9414
+ });
9415
+ return Promise.race([work, timeout]).finally(() => {
9416
+ if (timer) clearTimeout(timer);
9417
+ });
9418
+ }
9419
+ function startLockHeartbeat(jobId) {
9420
+ const handle = setInterval(() => {
9421
+ void heartbeatJobLock(jobId);
9422
+ }, FINALIZE_LOCK_HEARTBEAT_MS);
9423
+ handle.unref?.();
9424
+ return () => clearInterval(handle);
9425
+ }
9426
+ function computeFailureEscalation(job, reason) {
9427
+ const previousReason = job.metadata.consecutiveFailureReason;
9428
+ const previousCountRaw = job.metadata.consecutiveFailures;
9429
+ const previousCount = typeof previousCountRaw === "number" && Number.isFinite(previousCountRaw) && previousCountRaw > 0 ? Math.floor(previousCountRaw) : 0;
9430
+ const sameReason = previousReason === reason;
9431
+ const next = sameReason ? previousCount + 1 : 1;
8815
9432
  return {
9433
+ consecutiveFailures: next,
9434
+ consecutiveFailureReason: reason,
9435
+ shouldEscalateToTerminal: next >= FINALIZE_AUTO_TERMINAL_THRESHOLD
9436
+ };
9437
+ }
9438
+ function buildWorkspaceMetadata(params) {
9439
+ const metadata = {
8816
9440
  branch: params.branchName,
8817
9441
  repoRoot: params.repoRoot,
8818
9442
  remoteUrl: params.remoteUrl,
@@ -8824,126 +9448,202 @@ function buildWorkspaceMetadata(params) {
8824
9448
  currentSnapshotHash: params.currentSnapshotHash,
8825
9449
  localCommitHash: params.localCommitHash,
8826
9450
  repoStateAtCapture: params.repoState,
8827
- replayedFromBaseHash: params.replayedFromBaseHash ?? null
9451
+ replayedFromBaseHash: params.replayedFromBaseHash ?? null,
9452
+ previousLocalCommitHash: params.previousLocalCommitHash ?? null
8828
9453
  };
9454
+ if (params.preTurnEvents) {
9455
+ metadata.preTurnEvents = params.preTurnEvents;
9456
+ }
9457
+ if (params.turnUsage) {
9458
+ metadata.turnUsage = params.turnUsage;
9459
+ }
9460
+ if (typeof params.promptedAt === "string" && params.promptedAt.trim()) {
9461
+ metadata.promptedAt = params.promptedAt.trim();
9462
+ }
9463
+ return metadata;
9464
+ }
9465
+ async function harvestPreTurnEvents(repoRoot, fromCommit, toCommit) {
9466
+ if (!toCommit) return null;
9467
+ try {
9468
+ return await collectPreTurnGitEvents({ repoRoot, fromCommit, toCommit });
9469
+ } catch (err) {
9470
+ console.warn(
9471
+ `[remix-core] preTurnEvents harvester threw: ${err instanceof Error ? err.message : String(err)}`
9472
+ );
9473
+ return null;
9474
+ }
8829
9475
  }
8830
9476
  async function processClaimedPendingFinalizeJob(params) {
8831
9477
  const job = params.job;
9478
+ const stopHeartbeat = startLockHeartbeat(job.id);
8832
9479
  try {
8833
- const [snapshot, baseline, appHeadResp] = await Promise.all([
8834
- readLocalSnapshot(job.currentSnapshotId),
8835
- readLocalBaseline({
8836
- repoFingerprint: job.repoFingerprint,
8837
- laneId: job.laneId,
8838
- repoRoot: job.repoRoot
8839
- }),
8840
- params.api.getAppHead(job.currentAppId)
8841
- ]);
8842
- if (!snapshot) {
9480
+ return await runWithFinalizeTimeout(processClaimedPendingFinalizeJobInner(params), job.id);
9481
+ } catch (error) {
9482
+ const classified = classifyFinalizeError(error);
9483
+ const escalation = computeFailureEscalation(job, classified.reason);
9484
+ const finalDisposition = classified.disposition === "terminal" || escalation.shouldEscalateToTerminal ? "terminal" : "retryable";
9485
+ await updatePendingFinalizeJob(job.id, {
9486
+ status: finalDisposition === "terminal" ? "failed" : "queued",
9487
+ error: finalDisposition === "terminal" && escalation.shouldEscalateToTerminal && classified.disposition !== "terminal" ? `${classified.message} (auto-escalated to terminal after ${escalation.consecutiveFailures} consecutive ${classified.reason} failures)` : classified.message,
9488
+ nextRetryAt: finalDisposition === "terminal" ? null : buildNextRetryAt(job.retryCount),
9489
+ metadata: {
9490
+ failureDisposition: finalDisposition,
9491
+ failureReason: classified.reason,
9492
+ consecutiveFailures: escalation.consecutiveFailures,
9493
+ consecutiveFailureReason: escalation.consecutiveFailureReason
9494
+ }
9495
+ });
9496
+ throw error;
9497
+ } finally {
9498
+ stopHeartbeat();
9499
+ await params.release();
9500
+ }
9501
+ }
9502
+ async function processClaimedPendingFinalizeJobInner(params) {
9503
+ const job = params.job;
9504
+ const [snapshot, baseline, appHeadResp] = await Promise.all([
9505
+ readLocalSnapshot(job.currentSnapshotId),
9506
+ readLocalBaseline({
9507
+ repoFingerprint: job.repoFingerprint,
9508
+ laneId: job.laneId,
9509
+ repoRoot: job.repoRoot
9510
+ }),
9511
+ getAppHeadCached(params.api, job.currentAppId)
9512
+ ]);
9513
+ if (!snapshot) {
9514
+ throw buildFinalizeCliError({
9515
+ message: "Captured snapshot is missing from the local snapshot store.",
9516
+ exitCode: 1,
9517
+ disposition: "terminal",
9518
+ reason: "snapshot_missing"
9519
+ });
9520
+ }
9521
+ if (!baseline) {
9522
+ throw buildFinalizeCliError({
9523
+ message: "Local baseline is missing for this queued finalize job.",
9524
+ exitCode: 2,
9525
+ hint: "Run `remix collab re-anchor` to anchor the repository again.",
9526
+ disposition: "terminal",
9527
+ reason: "baseline_missing"
9528
+ });
9529
+ }
9530
+ const baselineDrifted = baseline.lastSnapshotId !== job.baselineSnapshotId || baseline.lastServerHeadHash !== job.baselineServerHeadHash;
9531
+ const appHead = unwrapResponseObject(appHeadResp, "app head");
9532
+ const remoteUrl = readMetadataString(job, "remoteUrl");
9533
+ const defaultBranch = readMetadataString(job, "defaultBranch");
9534
+ const repoState = readMetadataString(job, "repoState");
9535
+ const actor = readMetadataActor(job);
9536
+ const turnUsage = readMetadataTurnUsage(job);
9537
+ const promptedAt = readMetadataString(job, "promptedAt");
9538
+ const previousLocalCommitHash = baseline.lastSeenLocalCommitHash ?? null;
9539
+ const preTurnEvents = await harvestPreTurnEvents(
9540
+ job.repoRoot,
9541
+ previousLocalCommitHash,
9542
+ snapshot.localCommitHash
9543
+ );
9544
+ const diffResult = await diffLocalSnapshots({
9545
+ baseSnapshotId: job.baselineSnapshotId,
9546
+ targetSnapshotId: job.currentSnapshotId
9547
+ });
9548
+ if (!diffResult.diff.trim()) {
9549
+ if (baselineDrifted && snapshot.snapshotHash !== baseline.lastSnapshotHash) {
8843
9550
  throw buildFinalizeCliError({
8844
- message: "Captured snapshot is missing from the local snapshot store.",
9551
+ message: "Finalize queue baseline drifted before this job was processed.",
8845
9552
  exitCode: 1,
9553
+ hint: "Process queued finalize jobs in capture order, or re-anchor the repository before retrying.",
8846
9554
  disposition: "terminal",
8847
- reason: "snapshot_missing"
9555
+ reason: "baseline_drifted"
8848
9556
  });
8849
9557
  }
8850
- if (!baseline) {
9558
+ if (appHead.headCommitHash !== job.baselineServerHeadHash) {
8851
9559
  throw buildFinalizeCliError({
8852
- message: "Local baseline is missing for this queued finalize job.",
9560
+ message: "Server lane changed before a no-diff turn could be recorded.",
8853
9561
  exitCode: 2,
8854
- hint: "Run `remix collab re-anchor` to anchor the repository again.",
9562
+ hint: "Pull the server changes locally before recording another no-diff turn.",
8855
9563
  disposition: "terminal",
8856
- reason: "baseline_missing"
9564
+ reason: "server_lane_changed"
8857
9565
  });
8858
9566
  }
8859
- if (baseline.lastSnapshotId !== job.baselineSnapshotId || baseline.lastServerHeadHash !== job.baselineServerHeadHash) {
8860
- throw buildFinalizeCliError({
8861
- message: "Finalize queue baseline drifted before this job was processed.",
8862
- exitCode: 1,
8863
- hint: "Process queued finalize jobs in capture order, or re-anchor the repository before retrying.",
8864
- disposition: "terminal",
8865
- reason: "baseline_drifted"
8866
- });
8867
- }
8868
- const appHead = unwrapResponseObject(appHeadResp, "app head");
8869
- const remoteUrl = readMetadataString(job, "remoteUrl");
8870
- const defaultBranch = readMetadataString(job, "defaultBranch");
8871
- const repoState = readMetadataString(job, "repoState");
8872
- const actor = readMetadataActor(job);
8873
- const diffResult = await diffLocalSnapshots({
8874
- baseSnapshotId: job.baselineSnapshotId,
8875
- targetSnapshotId: job.currentSnapshotId
8876
- });
8877
- if (!diffResult.diff.trim()) {
8878
- if (appHead.headCommitHash !== job.baselineServerHeadHash) {
8879
- throw buildFinalizeCliError({
8880
- message: "Server lane changed before a no-diff turn could be recorded.",
8881
- exitCode: 2,
8882
- hint: "Pull the server changes locally before recording another no-diff turn.",
8883
- disposition: "terminal",
8884
- reason: "server_lane_changed"
8885
- });
8886
- }
8887
- const collabTurnResp = await params.api.createCollabTurn(job.currentAppId, {
8888
- threadId: job.threadId ?? void 0,
8889
- collabLaneId: job.laneId ?? void 0,
8890
- prompt: job.prompt,
8891
- assistantResponse: job.assistantResponse,
8892
- actor,
8893
- workspaceMetadata: buildWorkspaceMetadata({
8894
- repoRoot: job.repoRoot,
8895
- branchName: job.branchName,
8896
- remoteUrl,
8897
- defaultBranch,
8898
- baselineSnapshotId: job.baselineSnapshotId,
8899
- currentSnapshotId: job.currentSnapshotId,
8900
- baselineServerHeadHash: job.baselineServerHeadHash,
8901
- currentSnapshotHash: snapshot.snapshotHash,
8902
- localCommitHash: snapshot.localCommitHash,
8903
- repoState
8904
- }),
8905
- idempotencyKey: job.idempotencyKey ?? void 0
8906
- });
8907
- const collabTurn = unwrapResponseObject(collabTurnResp, "collab turn");
8908
- await writeLocalBaseline({
9567
+ const collabTurnResp = await params.api.createCollabTurn(job.currentAppId, {
9568
+ threadId: job.threadId ?? void 0,
9569
+ collabLaneId: job.laneId ?? void 0,
9570
+ prompt: job.prompt,
9571
+ assistantResponse: job.assistantResponse,
9572
+ actor,
9573
+ workspaceMetadata: buildWorkspaceMetadata({
8909
9574
  repoRoot: job.repoRoot,
8910
- repoFingerprint: job.repoFingerprint,
8911
- laneId: job.laneId,
8912
- currentAppId: job.currentAppId,
8913
9575
  branchName: job.branchName,
8914
- lastSnapshotId: snapshot.id,
8915
- lastSnapshotHash: snapshot.snapshotHash,
8916
- lastServerHeadHash: appHead.headCommitHash,
8917
- lastSeenLocalCommitHash: snapshot.localCommitHash
8918
- });
8919
- await updatePendingFinalizeJob(job.id, {
8920
- status: "completed",
8921
- metadata: { collabTurnId: collabTurn.id }
8922
- });
8923
- return {
8924
- mode: "no_diff_turn",
8925
- idempotencyKey: job.idempotencyKey ?? "",
8926
- queued: false,
8927
- jobId: job.id,
9576
+ remoteUrl,
9577
+ defaultBranch,
9578
+ baselineSnapshotId: job.baselineSnapshotId,
9579
+ currentSnapshotId: job.currentSnapshotId,
9580
+ baselineServerHeadHash: job.baselineServerHeadHash,
9581
+ currentSnapshotHash: snapshot.snapshotHash,
9582
+ localCommitHash: snapshot.localCommitHash,
8928
9583
  repoState,
8929
- changeStep: null,
8930
- collabTurn,
8931
- autoSync: null,
8932
- warnings: []
8933
- };
8934
- }
8935
- let submissionDiff = diffResult.diff;
8936
- let submissionBaseHeadHash = job.baselineServerHeadHash;
8937
- let replayedFromBaseHash = null;
8938
- if (!submissionBaseHeadHash) {
9584
+ previousLocalCommitHash,
9585
+ preTurnEvents,
9586
+ turnUsage,
9587
+ promptedAt
9588
+ }),
9589
+ idempotencyKey: job.idempotencyKey ?? void 0
9590
+ });
9591
+ const collabTurn = unwrapResponseObject(collabTurnResp, "collab turn");
9592
+ await writeLocalBaseline({
9593
+ repoRoot: job.repoRoot,
9594
+ repoFingerprint: job.repoFingerprint,
9595
+ laneId: job.laneId,
9596
+ currentAppId: job.currentAppId,
9597
+ branchName: job.branchName,
9598
+ lastSnapshotId: snapshot.id,
9599
+ lastSnapshotHash: snapshot.snapshotHash,
9600
+ lastServerHeadHash: appHead.headCommitHash,
9601
+ lastSeenLocalCommitHash: snapshot.localCommitHash
9602
+ });
9603
+ await updatePendingFinalizeJob(job.id, {
9604
+ status: "completed",
9605
+ metadata: { collabTurnId: collabTurn.id }
9606
+ });
9607
+ return {
9608
+ mode: "no_diff_turn",
9609
+ idempotencyKey: job.idempotencyKey ?? "",
9610
+ queued: false,
9611
+ jobId: job.id,
9612
+ repoState,
9613
+ changeStep: null,
9614
+ collabTurn,
9615
+ autoSync: null,
9616
+ warnings: []
9617
+ };
9618
+ }
9619
+ const localBaselineAdvanced = baseline.lastSnapshotId !== job.baselineSnapshotId;
9620
+ const serverHeadAdvanced = appHead.headCommitHash !== job.baselineServerHeadHash;
9621
+ if (baselineDrifted) {
9622
+ const consistentAdvance = localBaselineAdvanced && serverHeadAdvanced;
9623
+ if (!consistentAdvance) {
8939
9624
  throw buildFinalizeCliError({
8940
- message: "Baseline server head is missing for this finalize job.",
9625
+ message: `Finalize queue baseline advanced inconsistently before this job was processed (localBaselineAdvanced=${localBaselineAdvanced}, serverHeadAdvanced=${serverHeadAdvanced}, jobBaselineSnapshotId=${job.baselineSnapshotId ?? "null"}, liveBaselineSnapshotId=${baseline.lastSnapshotId ?? "null"}, jobBaselineServerHeadHash=${job.baselineServerHeadHash ?? "null"}, liveBaselineServerHeadHash=${baseline.lastServerHeadHash ?? "null"}, currentAppHeadHash=${appHead.headCommitHash}). This indicates local Remix state diverged from the backend in a way that should not be reachable in normal operation; please report this as a bug.`,
8941
9626
  exitCode: 1,
9627
+ hint: "Run `remix collab status` to inspect, then `remix collab re-anchor` only if the lane has no valid baseline.",
8942
9628
  disposition: "terminal",
8943
- reason: "baseline_server_head_missing"
9629
+ reason: "baseline_drifted"
8944
9630
  });
8945
9631
  }
8946
- if (appHead.headCommitHash !== submissionBaseHeadHash) {
9632
+ }
9633
+ let submissionDiff = diffResult.diff;
9634
+ let submissionBaseHeadHash = job.baselineServerHeadHash;
9635
+ let replayedFromBaseHash = null;
9636
+ if (!submissionBaseHeadHash) {
9637
+ throw buildFinalizeCliError({
9638
+ message: "Baseline server head is missing for this finalize job.",
9639
+ exitCode: 1,
9640
+ disposition: "terminal",
9641
+ reason: "baseline_server_head_missing"
9642
+ });
9643
+ }
9644
+ const replayNeeded = appHead.headCommitHash !== submissionBaseHeadHash || baselineDrifted;
9645
+ if (replayNeeded) {
9646
+ try {
8947
9647
  const replayResp = await params.api.startChangeStepReplay(job.currentAppId, {
8948
9648
  prompt: job.prompt,
8949
9649
  assistantResponse: job.assistantResponse,
@@ -8962,7 +9662,11 @@ async function processClaimedPendingFinalizeJob(params) {
8962
9662
  baselineServerHeadHash: job.baselineServerHeadHash,
8963
9663
  currentSnapshotHash: snapshot.snapshotHash,
8964
9664
  localCommitHash: snapshot.localCommitHash,
8965
- repoState
9665
+ repoState,
9666
+ previousLocalCommitHash,
9667
+ preTurnEvents,
9668
+ turnUsage,
9669
+ promptedAt
8966
9670
  }),
8967
9671
  idempotencyKey: buildDeterministicIdempotencyKey({
8968
9672
  kind: "collab_finalize_turn_replay_v1",
@@ -8980,79 +9684,91 @@ async function processClaimedPendingFinalizeJob(params) {
8980
9684
  submissionDiff = replayDiff.diff;
8981
9685
  replayedFromBaseHash = submissionBaseHeadHash;
8982
9686
  submissionBaseHeadHash = appHead.headCommitHash;
9687
+ } catch (error) {
9688
+ if (error instanceof RemixError && error.finalizeDisposition === void 0) {
9689
+ const detail = error.hint ? `${error.message} (${error.hint})` : error.message;
9690
+ throw buildFinalizeCliError({
9691
+ message: "Server-side replay could not adapt the captured diff to the current server head: " + detail,
9692
+ exitCode: 1,
9693
+ hint: error.hint ?? void 0,
9694
+ disposition: "terminal",
9695
+ reason: "replay_unrecoverable"
9696
+ });
9697
+ }
9698
+ throw error;
8983
9699
  }
8984
- const changeStepResp = await params.api.createChangeStep(job.currentAppId, {
8985
- threadId: job.threadId ?? void 0,
8986
- collabLaneId: job.laneId ?? void 0,
8987
- prompt: job.prompt,
8988
- assistantResponse: job.assistantResponse,
8989
- diff: submissionDiff,
8990
- baseCommitHash: submissionBaseHeadHash,
8991
- headCommitHash: submissionBaseHeadHash,
8992
- changedFilesCount: diffResult.stats.changedFilesCount,
8993
- insertions: diffResult.stats.insertions,
8994
- deletions: diffResult.stats.deletions,
8995
- actor,
8996
- workspaceMetadata: buildWorkspaceMetadata({
8997
- repoRoot: job.repoRoot,
8998
- branchName: job.branchName,
8999
- remoteUrl,
9000
- defaultBranch,
9001
- baselineSnapshotId: job.baselineSnapshotId,
9002
- currentSnapshotId: job.currentSnapshotId,
9003
- baselineServerHeadHash: job.baselineServerHeadHash,
9004
- currentSnapshotHash: snapshot.snapshotHash,
9005
- localCommitHash: snapshot.localCommitHash,
9006
- repoState,
9007
- replayedFromBaseHash
9008
- }),
9009
- idempotencyKey: job.idempotencyKey ?? void 0
9010
- });
9011
- const createdStep = unwrapResponseObject(changeStepResp, "change step");
9012
- const changeStep = await pollChangeStep(params.api, job.currentAppId, String(createdStep.id));
9013
- const nextHeadResp = await params.api.getAppHead(job.currentAppId);
9014
- const nextHead = unwrapResponseObject(nextHeadResp, "app head");
9015
- await writeLocalBaseline({
9700
+ }
9701
+ const changeStepResp = await params.api.createChangeStep(job.currentAppId, {
9702
+ threadId: job.threadId ?? void 0,
9703
+ collabLaneId: job.laneId ?? void 0,
9704
+ prompt: job.prompt,
9705
+ assistantResponse: job.assistantResponse,
9706
+ diff: submissionDiff,
9707
+ baseCommitHash: submissionBaseHeadHash,
9708
+ headCommitHash: submissionBaseHeadHash,
9709
+ changedFilesCount: diffResult.stats.changedFilesCount,
9710
+ insertions: diffResult.stats.insertions,
9711
+ deletions: diffResult.stats.deletions,
9712
+ actor,
9713
+ workspaceMetadata: buildWorkspaceMetadata({
9016
9714
  repoRoot: job.repoRoot,
9017
- repoFingerprint: job.repoFingerprint,
9018
- laneId: job.laneId,
9019
- currentAppId: job.currentAppId,
9020
9715
  branchName: job.branchName,
9021
- lastSnapshotId: snapshot.id,
9022
- lastSnapshotHash: snapshot.snapshotHash,
9023
- lastServerHeadHash: nextHead.headCommitHash,
9024
- lastSeenLocalCommitHash: snapshot.localCommitHash
9025
- });
9026
- await updatePendingFinalizeJob(job.id, {
9027
- status: "completed",
9028
- metadata: { changeStepId: String(changeStep.id ?? "") }
9029
- });
9030
- return {
9031
- mode: "changed_turn",
9032
- idempotencyKey: job.idempotencyKey ?? "",
9033
- queued: false,
9034
- jobId: job.id,
9716
+ remoteUrl,
9717
+ defaultBranch,
9718
+ baselineSnapshotId: job.baselineSnapshotId,
9719
+ currentSnapshotId: job.currentSnapshotId,
9720
+ baselineServerHeadHash: job.baselineServerHeadHash,
9721
+ currentSnapshotHash: snapshot.snapshotHash,
9722
+ localCommitHash: snapshot.localCommitHash,
9035
9723
  repoState,
9036
- changeStep,
9037
- collabTurn: null,
9038
- autoSync: null,
9039
- warnings: []
9040
- };
9041
- } catch (error) {
9042
- const classified = classifyFinalizeError(error);
9043
- await updatePendingFinalizeJob(job.id, {
9044
- status: classified.disposition === "terminal" ? "failed" : "queued",
9045
- error: classified.message,
9046
- nextRetryAt: classified.disposition === "terminal" ? null : buildNextRetryAt(job.retryCount),
9047
- metadata: {
9048
- failureDisposition: classified.disposition,
9049
- failureReason: classified.reason
9050
- }
9051
- });
9052
- throw error;
9053
- } finally {
9054
- await params.release();
9055
- }
9724
+ replayedFromBaseHash,
9725
+ previousLocalCommitHash,
9726
+ preTurnEvents,
9727
+ turnUsage,
9728
+ promptedAt
9729
+ }),
9730
+ idempotencyKey: job.idempotencyKey ?? void 0
9731
+ });
9732
+ const createdStep = unwrapResponseObject(changeStepResp, "change step");
9733
+ const changeStep = await pollChangeStep(params.api, job.currentAppId, String(createdStep.id));
9734
+ invalidateAppHeadCache(job.currentAppId);
9735
+ invalidateAppDeltaCacheForApp(job.currentAppId);
9736
+ const nextServerHeadHash = typeof changeStep.headCommitHash === "string" ? changeStep.headCommitHash.trim() : "";
9737
+ if (!nextServerHeadHash) {
9738
+ throw buildFinalizeCliError({
9739
+ message: "Backend returned a succeeded change step without a head commit hash.",
9740
+ exitCode: 1,
9741
+ hint: "This is a backend invariant violation; retry will not help. Re-anchor and try again.",
9742
+ disposition: "terminal",
9743
+ reason: "missing_head_commit_hash"
9744
+ });
9745
+ }
9746
+ await writeLocalBaseline({
9747
+ repoRoot: job.repoRoot,
9748
+ repoFingerprint: job.repoFingerprint,
9749
+ laneId: job.laneId,
9750
+ currentAppId: job.currentAppId,
9751
+ branchName: job.branchName,
9752
+ lastSnapshotId: snapshot.id,
9753
+ lastSnapshotHash: snapshot.snapshotHash,
9754
+ lastServerHeadHash: nextServerHeadHash,
9755
+ lastSeenLocalCommitHash: snapshot.localCommitHash
9756
+ });
9757
+ await updatePendingFinalizeJob(job.id, {
9758
+ status: "completed",
9759
+ metadata: { changeStepId: String(changeStep.id ?? "") }
9760
+ });
9761
+ return {
9762
+ mode: "changed_turn",
9763
+ idempotencyKey: job.idempotencyKey ?? "",
9764
+ queued: false,
9765
+ jobId: job.id,
9766
+ repoState,
9767
+ changeStep,
9768
+ collabTurn: null,
9769
+ autoSync: null,
9770
+ warnings: []
9771
+ };
9056
9772
  }
9057
9773
  async function enqueueCapturedFinalizeTurn(params) {
9058
9774
  return enqueuePendingFinalizeJob({
@@ -9072,7 +9788,7 @@ async function enqueueCapturedFinalizeTurn(params) {
9072
9788
  error: null,
9073
9789
  retryCount: 0,
9074
9790
  lastAttemptAt: null,
9075
- nextRetryAt: null,
9791
+ nextRetryAt: params.nextRetryAt ?? null,
9076
9792
  metadata: params.metadata ?? {}
9077
9793
  });
9078
9794
  }
@@ -9096,6 +9812,7 @@ async function drainPendingFinalizeQueue(params) {
9096
9812
  }
9097
9813
  return results;
9098
9814
  }
9815
+ var FINALIZE_AWAIT_INIT_POST_TIMEOUT_MS = 6e4;
9099
9816
  function collectWarnings(value) {
9100
9817
  if (!Array.isArray(value)) return [];
9101
9818
  return value.filter((entry) => typeof entry === "string" && entry.trim().length > 0);
@@ -9124,37 +9841,99 @@ async function collabFinalizeTurn(params) {
9124
9841
  hint: "Finalize turns now capture the real workspace boundary from the local snapshot store."
9125
9842
  });
9126
9843
  }
9844
+ const pendingInit = await findPendingAsyncJob({
9845
+ repoRoot,
9846
+ branchName: binding.branchName ?? null,
9847
+ kind: "init"
9848
+ });
9849
+ if (pendingInit) {
9850
+ throw new RemixError("Cannot finalize a turn while the initial Remix import is still processing.", {
9851
+ exitCode: 2,
9852
+ hint: `Init job ${pendingInit.id} is still in the background queue. Run \`remix collab status\` to check progress.`
9853
+ });
9854
+ }
9855
+ const pendingInitPost = await findPendingAsyncJob({
9856
+ repoRoot,
9857
+ branchName: binding.branchName ?? null,
9858
+ kind: "init_post"
9859
+ });
9860
+ if (pendingInitPost) {
9861
+ const result = await awaitAsyncJob({
9862
+ jobId: pendingInitPost.id,
9863
+ timeoutMs: FINALIZE_AWAIT_INIT_POST_TIMEOUT_MS,
9864
+ pollIntervalMs: 500
9865
+ });
9866
+ if (result.status === "failed") {
9867
+ throw new RemixError("The initial Remix import failed; this turn cannot be recorded.", {
9868
+ exitCode: 2,
9869
+ hint: (result.job?.error ? `Last error: ${result.job.error}
9870
+
9871
+ ` : "") + "Run `remix collab init` again to retry \u2014 the post-init drainer cleared the local binding so this is safe."
9872
+ });
9873
+ }
9874
+ if (result.status === "timeout") {
9875
+ throw new RemixError("Timed out waiting for the initial Remix import to finish provisioning.", {
9876
+ exitCode: 2,
9877
+ hint: `Init post job ${pendingInitPost.id} did not complete within ${Math.round(
9878
+ FINALIZE_AWAIT_INIT_POST_TIMEOUT_MS / 1e3
9879
+ )}s. Run \`remix collab status\` to check progress, then retry once it reports ready.`
9880
+ });
9881
+ }
9882
+ }
9883
+ const pendingReAnchor = await findPendingAsyncJob({
9884
+ repoRoot,
9885
+ branchName: binding.branchName ?? null,
9886
+ kind: "re_anchor"
9887
+ });
9888
+ if (pendingReAnchor) {
9889
+ throw new RemixError("Cannot finalize a turn while a re-anchor is still processing.", {
9890
+ exitCode: 2,
9891
+ hint: `Re-anchor job ${pendingReAnchor.id} is still in the background queue. Run \`remix collab status\` to check progress.`
9892
+ });
9893
+ }
9127
9894
  const detected = await collabDetectRepoState({
9128
9895
  api: params.api,
9129
9896
  cwd: repoRoot,
9130
9897
  allowBranchMismatch: params.allowBranchMismatch
9131
9898
  });
9132
9899
  if (detected.status === "not_bound") {
9133
- throw new RemixError("Repository is not bound to Remix.", { exitCode: 2, hint: detected.hint });
9900
+ throw new RemixError("Repository is not bound to Remix.", {
9901
+ code: "not_bound",
9902
+ exitCode: 2,
9903
+ hint: detected.hint
9904
+ });
9134
9905
  }
9135
9906
  if (detected.status === "branch_binding_missing" || detected.status === "family_ambiguous") {
9136
- throw new RemixError(detected.hint || "Current branch is not ready for Remix recording.", { exitCode: 2, hint: detected.hint });
9907
+ throw new RemixError(detected.hint || "Current branch is not ready for Remix recording.", {
9908
+ code: detected.status,
9909
+ exitCode: 2,
9910
+ hint: detected.hint
9911
+ });
9137
9912
  }
9138
9913
  if (detected.status === "metadata_conflict" || detected.status === "branch_mismatch") {
9139
9914
  throw new RemixError("Repository must be realigned before finalizing the turn.", {
9915
+ code: detected.status,
9140
9916
  exitCode: 2,
9141
9917
  hint: detected.hint
9142
9918
  });
9143
9919
  }
9144
9920
  if (detected.status === "missing_head" || detected.status === "remote_error") {
9145
9921
  throw new RemixError(detected.hint || "Failed to determine the current repo state.", {
9922
+ code: detected.status,
9146
9923
  exitCode: 1,
9147
9924
  hint: detected.hint
9148
9925
  });
9149
9926
  }
9150
9927
  if (detected.repoState === "server_only_changed") {
9151
9928
  throw new RemixError("Server changes must be pulled locally before finalizing this turn.", {
9929
+ code: "pull_required",
9152
9930
  exitCode: 2,
9153
9931
  hint: detected.hint
9154
9932
  });
9155
9933
  }
9156
9934
  if (detected.repoState === "external_local_base_changed") {
9157
9935
  throw new RemixError("The local checkout must be re-anchored before finalizing this turn.", {
9936
+ code: "re_anchor_required",
9158
9937
  exitCode: 2,
9159
9938
  hint: detected.hint
9160
9939
  });
@@ -9189,6 +9968,8 @@ async function collabFinalizeTurn(params) {
9189
9968
  prompt,
9190
9969
  assistantResponse
9191
9970
  });
9971
+ const awaitingDeadlineMs = typeof params.awaitingUsageDeadlineMs === "number" && params.awaitingUsageDeadlineMs > 0 ? params.awaitingUsageDeadlineMs : null;
9972
+ const nextRetryAt = awaitingDeadlineMs === null ? null : new Date(Date.now() + awaitingDeadlineMs).toISOString();
9192
9973
  const job = await enqueueCapturedFinalizeTurn({
9193
9974
  repoRoot,
9194
9975
  repoFingerprint: binding.repoFingerprint,
@@ -9206,8 +9987,11 @@ async function collabFinalizeTurn(params) {
9206
9987
  remoteUrl: binding.remoteUrl,
9207
9988
  defaultBranch: binding.defaultBranch,
9208
9989
  actor: params.actor ?? null,
9209
- repoState: detected.repoState
9210
- }
9990
+ repoState: detected.repoState,
9991
+ turnUsage: params.turnUsage ?? null,
9992
+ promptedAt: typeof params.promptedAt === "string" && params.promptedAt.trim() ? params.promptedAt.trim() : null
9993
+ },
9994
+ nextRetryAt
9211
9995
  });
9212
9996
  return {
9213
9997
  mode,
@@ -9221,8 +10005,34 @@ async function collabFinalizeTurn(params) {
9221
10005
  warnings: [FINALIZE_QUEUED_WARNING, ...collectWarnings(detected.warnings)]
9222
10006
  };
9223
10007
  }
10008
+ var FINALIZE_PREFLIGHT_FAILURE_CODES = [
10009
+ // Repo has no .remix/ binding at all. Fix: `remix collab init`.
10010
+ "not_bound",
10011
+ // Binding file exists but the current branch has no entry. Fix: same as
10012
+ // not_bound — `remix collab init` records this branch into the binding.
10013
+ "branch_binding_missing",
10014
+ // Branch matches multiple lane families (very rare race). Fix: human
10015
+ // investigation; auto-fixers should treat as warn-only.
10016
+ "family_ambiguous",
10017
+ // Local-vs-server metadata diverged on app id / lane id. Fix: human
10018
+ // investigation; auto-fixers should treat as warn-only.
10019
+ "metadata_conflict",
10020
+ // Currently checked-out branch differs from the one in the binding.
10021
+ // Fix: human (either checkout the recorded branch or update binding).
10022
+ "branch_mismatch",
10023
+ // HEAD ref unreadable / git reports an error. Fix: human investigation.
10024
+ "missing_head",
10025
+ "remote_error",
10026
+ // Server has commits we don't. Fix: `remix collab sync` (safe to
10027
+ // auto-run for fast-forward; non-FF refused by the command itself).
10028
+ "pull_required",
10029
+ // Local base hash doesn't match the recorded baseline (force-push,
10030
+ // hard reset, rebase). Fix: `remix collab re-anchor`.
10031
+ "re_anchor_required"
10032
+ ];
10033
+ var CODE_SET = new Set(FINALIZE_PREFLIGHT_FAILURE_CODES);
9224
10034
 
9225
- // node_modules/@remixhq/core/dist/chunk-R7FVSCQW.js
10035
+ // node_modules/@remixhq/core/dist/chunk-US5SM7ZC.js
9226
10036
  async function readJsonSafe(res) {
9227
10037
  const ct = res.headers.get("content-type") ?? "";
9228
10038
  if (!ct.toLowerCase().includes("application/json")) return null;
@@ -9236,7 +10046,7 @@ function createApiClient(config, opts) {
9236
10046
  const apiKey = (opts?.apiKey ?? "").trim();
9237
10047
  const tokenProvider = opts?.tokenProvider;
9238
10048
  const CLIENT_KEY_HEADER = "x-comerge-api-key";
9239
- async function request(path12, init) {
10049
+ async function request(path13, init) {
9240
10050
  if (!tokenProvider) {
9241
10051
  throw new RemixError("API client is missing a token provider.", {
9242
10052
  exitCode: 1,
@@ -9244,7 +10054,7 @@ function createApiClient(config, opts) {
9244
10054
  });
9245
10055
  }
9246
10056
  const auth = await tokenProvider();
9247
- const url = new URL(path12, config.apiUrl).toString();
10057
+ const url = new URL(path13, config.apiUrl).toString();
9248
10058
  const doFetch = async (bearer) => fetch(url, {
9249
10059
  ...init,
9250
10060
  headers: {
@@ -9268,7 +10078,7 @@ function createApiClient(config, opts) {
9268
10078
  const json = await readJsonSafe(res);
9269
10079
  return json ?? null;
9270
10080
  }
9271
- async function requestBinary(path12, init) {
10081
+ async function requestBinary(path13, init) {
9272
10082
  if (!tokenProvider) {
9273
10083
  throw new RemixError("API client is missing a token provider.", {
9274
10084
  exitCode: 1,
@@ -9276,7 +10086,7 @@ function createApiClient(config, opts) {
9276
10086
  });
9277
10087
  }
9278
10088
  const auth = await tokenProvider();
9279
- const url = new URL(path12, config.apiUrl).toString();
10089
+ const url = new URL(path13, config.apiUrl).toString();
9280
10090
  const doFetch = async (bearer) => fetch(url, {
9281
10091
  ...init,
9282
10092
  headers: {
@@ -9405,6 +10215,10 @@ function createApiClient(config, opts) {
9405
10215
  method: "POST",
9406
10216
  body: JSON.stringify(payload)
9407
10217
  }),
10218
+ attachCollabTurnUsage: (appId, payload) => request(`/v1/apps/${encodeURIComponent(appId)}/collab-turns/attach-usage`, {
10219
+ method: "POST",
10220
+ body: JSON.stringify(payload)
10221
+ }),
9408
10222
  listCollabTurns: (appId, params) => {
9409
10223
  const qs = new URLSearchParams();
9410
10224
  if (params?.limit !== void 0) qs.set("limit", String(params.limit));
@@ -9626,7 +10440,21 @@ function createApiClient(config, opts) {
9626
10440
  method: "GET"
9627
10441
  });
9628
10442
  },
9629
- getSandboxStatus: (appId) => request(`/v1/apps/${encodeURIComponent(appId)}/sandbox/status`, { method: "GET" })
10443
+ getSandboxStatus: (appId) => request(`/v1/apps/${encodeURIComponent(appId)}/sandbox/status`, { method: "GET" }),
10444
+ importHistory: async (payload) => {
10445
+ const res = await request("/v1/history/import", {
10446
+ method: "POST",
10447
+ body: JSON.stringify(payload)
10448
+ });
10449
+ const envelope = res;
10450
+ if (!envelope || typeof envelope !== "object" || !envelope.responseObject) {
10451
+ throw new RemixError("Malformed importHistory response from server.", {
10452
+ exitCode: 1,
10453
+ hint: "Expected `{ responseObject: ImportHistoryResponse }`."
10454
+ });
10455
+ }
10456
+ return envelope.responseObject;
10457
+ }
9630
10458
  };
9631
10459
  }
9632
10460
 
@@ -10108,8 +10936,8 @@ function getErrorMap() {
10108
10936
 
10109
10937
  // node_modules/zod/v3/helpers/parseUtil.js
10110
10938
  var makeIssue = (params) => {
10111
- const { data, path: path12, errorMaps, issueData } = params;
10112
- const fullPath = [...path12, ...issueData.path || []];
10939
+ const { data, path: path13, errorMaps, issueData } = params;
10940
+ const fullPath = [...path13, ...issueData.path || []];
10113
10941
  const fullIssue = {
10114
10942
  ...issueData,
10115
10943
  path: fullPath
@@ -10225,11 +11053,11 @@ var errorUtil;
10225
11053
 
10226
11054
  // node_modules/zod/v3/types.js
10227
11055
  var ParseInputLazyPath = class {
10228
- constructor(parent, value, path12, key) {
11056
+ constructor(parent, value, path13, key) {
10229
11057
  this._cachedPath = [];
10230
11058
  this.parent = parent;
10231
11059
  this.data = value;
10232
- this._path = path12;
11060
+ this._path = path13;
10233
11061
  this._key = key;
10234
11062
  }
10235
11063
  get path() {
@@ -13671,8 +14499,8 @@ var coerce = {
13671
14499
  };
13672
14500
  var NEVER = INVALID;
13673
14501
 
13674
- // node_modules/@remixhq/core/dist/chunk-EVWDYCBL.js
13675
- var import_promises17 = __toESM(require("fs/promises"), 1);
14502
+ // node_modules/@remixhq/core/dist/chunk-P6JHXOV4.js
14503
+ var import_promises18 = __toESM(require("fs/promises"), 1);
13676
14504
  var import_os3 = __toESM(require("os"), 1);
13677
14505
  var import_path7 = __toESM(require("path"), 1);
13678
14506
 
@@ -13731,6 +14559,13 @@ var FunctionsError = class extends Error {
13731
14559
  this.name = name;
13732
14560
  this.context = context;
13733
14561
  }
14562
+ toJSON() {
14563
+ return {
14564
+ name: this.name,
14565
+ message: this.message,
14566
+ context: this.context
14567
+ };
14568
+ }
13734
14569
  };
13735
14570
  var FunctionsFetchError = class extends FunctionsError {
13736
14571
  constructor(context) {
@@ -13771,24 +14606,22 @@ var FunctionsClient = class {
13771
14606
  /**
13772
14607
  * Creates a new Functions client bound to an Edge Functions URL.
13773
14608
  *
13774
- * @example
14609
+ * @example Using supabase-js (recommended)
13775
14610
  * ```ts
13776
- * import { FunctionsClient, FunctionRegion } from '@supabase/functions-js'
14611
+ * import { createClient } from '@supabase/supabase-js'
13777
14612
  *
13778
- * const functions = new FunctionsClient('https://xyzcompany.supabase.co/functions/v1', {
13779
- * headers: { apikey: 'public-anon-key' },
13780
- * region: FunctionRegion.UsEast1,
13781
- * })
14613
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
14614
+ * const { data, error } = await supabase.functions.invoke('hello-world')
13782
14615
  * ```
13783
14616
  *
13784
14617
  * @category Functions
13785
14618
  *
13786
- * @example Creating a Functions client
14619
+ * @example Standalone import for bundle-sensitive environments
13787
14620
  * ```ts
13788
14621
  * import { FunctionsClient, FunctionRegion } from '@supabase/functions-js'
13789
14622
  *
13790
14623
  * const functions = new FunctionsClient('https://xyzcompany.supabase.co/functions/v1', {
13791
- * headers: { apikey: 'public-anon-key' },
14624
+ * headers: { apikey: 'publishable-or-anon-key' },
13792
14625
  * region: FunctionRegion.UsEast1,
13793
14626
  * })
13794
14627
  * ```
@@ -14036,6 +14869,14 @@ var FunctionsClient = class {
14036
14869
  };
14037
14870
 
14038
14871
  // node_modules/@supabase/postgrest-js/dist/index.mjs
14872
+ var DEFAULT_MAX_RETRIES = 3;
14873
+ var getRetryDelay = (attemptIndex) => Math.min(1e3 * 2 ** attemptIndex, 3e4);
14874
+ var RETRYABLE_STATUS_CODES = [520, 503];
14875
+ var RETRYABLE_METHODS = [
14876
+ "GET",
14877
+ "HEAD",
14878
+ "OPTIONS"
14879
+ ];
14039
14880
  var PostgrestError = class extends Error {
14040
14881
  /**
14041
14882
  * @example
@@ -14057,36 +14898,67 @@ var PostgrestError = class extends Error {
14057
14898
  this.hint = context.hint;
14058
14899
  this.code = context.code;
14059
14900
  }
14901
+ toJSON() {
14902
+ return {
14903
+ name: this.name,
14904
+ message: this.message,
14905
+ details: this.details,
14906
+ hint: this.hint,
14907
+ code: this.code
14908
+ };
14909
+ }
14060
14910
  };
14911
+ function sleep2(ms, signal) {
14912
+ return new Promise((resolve) => {
14913
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
14914
+ resolve();
14915
+ return;
14916
+ }
14917
+ const id = setTimeout(() => {
14918
+ signal === null || signal === void 0 || signal.removeEventListener("abort", onAbort);
14919
+ resolve();
14920
+ }, ms);
14921
+ function onAbort() {
14922
+ clearTimeout(id);
14923
+ resolve();
14924
+ }
14925
+ signal === null || signal === void 0 || signal.addEventListener("abort", onAbort);
14926
+ });
14927
+ }
14928
+ function shouldRetry(method, status, attemptCount, retryEnabled) {
14929
+ if (!retryEnabled || attemptCount >= DEFAULT_MAX_RETRIES) return false;
14930
+ if (!RETRYABLE_METHODS.includes(method)) return false;
14931
+ if (!RETRYABLE_STATUS_CODES.includes(status)) return false;
14932
+ return true;
14933
+ }
14061
14934
  var PostgrestBuilder = class {
14062
14935
  /**
14063
14936
  * Creates a builder configured for a specific PostgREST request.
14064
14937
  *
14065
- * @example
14938
+ * @example Using supabase-js (recommended)
14066
14939
  * ```ts
14067
- * import { PostgrestQueryBuilder } from '@supabase/postgrest-js'
14940
+ * import { createClient } from '@supabase/supabase-js'
14068
14941
  *
14069
- * const builder = new PostgrestQueryBuilder(
14070
- * new URL('https://xyzcompany.supabase.co/rest/v1/users'),
14071
- * { headers: new Headers({ apikey: 'public-anon-key' }) }
14072
- * )
14942
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
14943
+ * const { data, error } = await supabase.from('users').select('*')
14073
14944
  * ```
14074
14945
  *
14075
14946
  * @category Database
14076
14947
  *
14077
- * @example Creating a Postgrest query builder
14948
+ * @example Standalone import for bundle-sensitive environments
14078
14949
  * ```ts
14079
14950
  * import { PostgrestQueryBuilder } from '@supabase/postgrest-js'
14080
14951
  *
14081
14952
  * const builder = new PostgrestQueryBuilder(
14082
14953
  * new URL('https://xyzcompany.supabase.co/rest/v1/users'),
14083
- * { headers: new Headers({ apikey: 'public-anon-key' }) }
14954
+ * { headers: new Headers({ apikey: 'publishable-or-anon-key' }) }
14084
14955
  * )
14085
14956
  * ```
14086
14957
  */
14087
14958
  constructor(builder) {
14088
- var _builder$shouldThrowO, _builder$isMaybeSingl, _builder$urlLengthLim;
14959
+ var _builder$shouldThrowO, _builder$isMaybeSingl, _builder$shouldStripN, _builder$urlLengthLim, _builder$retry;
14089
14960
  this.shouldThrowOnError = false;
14961
+ this.retryEnabled = true;
14090
14962
  this.method = builder.method;
14091
14963
  this.url = builder.url;
14092
14964
  this.headers = new Headers(builder.headers);
@@ -14095,7 +14967,9 @@ var PostgrestBuilder = class {
14095
14967
  this.shouldThrowOnError = (_builder$shouldThrowO = builder.shouldThrowOnError) !== null && _builder$shouldThrowO !== void 0 ? _builder$shouldThrowO : false;
14096
14968
  this.signal = builder.signal;
14097
14969
  this.isMaybeSingle = (_builder$isMaybeSingl = builder.isMaybeSingle) !== null && _builder$isMaybeSingl !== void 0 ? _builder$isMaybeSingl : false;
14970
+ this.shouldStripNulls = (_builder$shouldStripN = builder.shouldStripNulls) !== null && _builder$shouldStripN !== void 0 ? _builder$shouldStripN : false;
14098
14971
  this.urlLengthLimit = (_builder$urlLengthLim = builder.urlLengthLimit) !== null && _builder$urlLengthLim !== void 0 ? _builder$urlLengthLim : 8e3;
14972
+ this.retryEnabled = (_builder$retry = builder.retry) !== null && _builder$retry !== void 0 ? _builder$retry : true;
14099
14973
  if (builder.fetch) this.fetch = builder.fetch;
14100
14974
  else this.fetch = fetch;
14101
14975
  }
@@ -14112,6 +14986,60 @@ var PostgrestBuilder = class {
14112
14986
  return this;
14113
14987
  }
14114
14988
  /**
14989
+ * Strip null values from the response data. Properties with `null` values
14990
+ * will be omitted from the returned JSON objects.
14991
+ *
14992
+ * Requires PostgREST 11.2.0+.
14993
+ *
14994
+ * {@link https://docs.postgrest.org/en/stable/references/api/resource_representation.html#stripped-nulls}
14995
+ *
14996
+ * @category Database
14997
+ *
14998
+ * @example With `select()`
14999
+ * ```ts
15000
+ * const { data, error } = await supabase
15001
+ * .from('characters')
15002
+ * .select()
15003
+ * .stripNulls()
15004
+ * ```
15005
+ *
15006
+ * @exampleSql With `select()`
15007
+ * ```sql
15008
+ * create table
15009
+ * characters (id int8 primary key, name text, bio text);
15010
+ *
15011
+ * insert into
15012
+ * characters (id, name, bio)
15013
+ * values
15014
+ * (1, 'Luke', null),
15015
+ * (2, 'Leia', 'Princess of Alderaan');
15016
+ * ```
15017
+ *
15018
+ * @exampleResponse With `select()`
15019
+ * ```json
15020
+ * {
15021
+ * "data": [
15022
+ * {
15023
+ * "id": 1,
15024
+ * "name": "Luke"
15025
+ * },
15026
+ * {
15027
+ * "id": 2,
15028
+ * "name": "Leia",
15029
+ * "bio": "Princess of Alderaan"
15030
+ * }
15031
+ * ],
15032
+ * "status": 200,
15033
+ * "statusText": "OK"
15034
+ * }
15035
+ * ```
15036
+ */
15037
+ stripNulls() {
15038
+ if (this.headers.get("Accept") === "text/csv") throw new Error("stripNulls() cannot be used with csv()");
15039
+ this.shouldStripNulls = true;
15040
+ return this;
15041
+ }
15042
+ /**
14115
15043
  * Set an HTTP header for the request.
14116
15044
  *
14117
15045
  * @category Database
@@ -14121,79 +15049,79 @@ var PostgrestBuilder = class {
14121
15049
  this.headers.set(name, value);
14122
15050
  return this;
14123
15051
  }
14124
- /** *
15052
+ /**
14125
15053
  * @category Database
15054
+ *
15055
+ * Configure retry behavior for this request.
15056
+ *
15057
+ * By default, retries are enabled for idempotent requests (GET, HEAD, OPTIONS)
15058
+ * that fail with network errors or specific HTTP status codes (503, 520).
15059
+ * Retries use exponential backoff (1s, 2s, 4s) with a maximum of 3 attempts.
15060
+ *
15061
+ * @param enabled - Whether to enable retries for this request
15062
+ *
15063
+ * @example
15064
+ * ```ts
15065
+ * // Disable retries for a specific query
15066
+ * const { data, error } = await supabase
15067
+ * .from('users')
15068
+ * .select()
15069
+ * .retry(false)
15070
+ * ```
14126
15071
  */
15072
+ retry(enabled) {
15073
+ this.retryEnabled = enabled;
15074
+ return this;
15075
+ }
14127
15076
  then(onfulfilled, onrejected) {
14128
15077
  var _this = this;
14129
15078
  if (this.schema === void 0) {
14130
15079
  } else if (["GET", "HEAD"].includes(this.method)) this.headers.set("Accept-Profile", this.schema);
14131
15080
  else this.headers.set("Content-Profile", this.schema);
14132
15081
  if (this.method !== "GET" && this.method !== "HEAD") this.headers.set("Content-Type", "application/json");
15082
+ if (this.shouldStripNulls) {
15083
+ const currentAccept = this.headers.get("Accept");
15084
+ if (currentAccept === "application/vnd.pgrst.object+json") this.headers.set("Accept", "application/vnd.pgrst.object+json;nulls=stripped");
15085
+ else if (!currentAccept || currentAccept === "application/json") this.headers.set("Accept", "application/vnd.pgrst.array+json;nulls=stripped");
15086
+ }
14133
15087
  const _fetch = this.fetch;
14134
- let res = _fetch(this.url.toString(), {
14135
- method: this.method,
14136
- headers: this.headers,
14137
- body: JSON.stringify(this.body),
14138
- signal: this.signal
14139
- }).then(async (res$1) => {
14140
- let error = null;
14141
- let data = null;
14142
- let count2 = null;
14143
- let status = res$1.status;
14144
- let statusText = res$1.statusText;
14145
- if (res$1.ok) {
14146
- var _this$headers$get2, _res$headers$get;
14147
- if (_this.method !== "HEAD") {
14148
- var _this$headers$get;
14149
- const body = await res$1.text();
14150
- if (body === "") {
14151
- } else if (_this.headers.get("Accept") === "text/csv") data = body;
14152
- else if (_this.headers.get("Accept") && ((_this$headers$get = _this.headers.get("Accept")) === null || _this$headers$get === void 0 ? void 0 : _this$headers$get.includes("application/vnd.pgrst.plan+text"))) data = body;
14153
- else data = JSON.parse(body);
14154
- }
14155
- const countHeader = (_this$headers$get2 = _this.headers.get("Prefer")) === null || _this$headers$get2 === void 0 ? void 0 : _this$headers$get2.match(/count=(exact|planned|estimated)/);
14156
- const contentRange = (_res$headers$get = res$1.headers.get("content-range")) === null || _res$headers$get === void 0 ? void 0 : _res$headers$get.split("/");
14157
- if (countHeader && contentRange && contentRange.length > 1) count2 = parseInt(contentRange[1]);
14158
- if (_this.isMaybeSingle && Array.isArray(data)) if (data.length > 1) {
14159
- error = {
14160
- code: "PGRST116",
14161
- details: `Results contain ${data.length} rows, application/vnd.pgrst.object+json requires 1 row`,
14162
- hint: null,
14163
- message: "JSON object requested, multiple (or no) rows returned"
14164
- };
14165
- data = null;
14166
- count2 = null;
14167
- status = 406;
14168
- statusText = "Not Acceptable";
14169
- } else if (data.length === 1) data = data[0];
14170
- else data = null;
14171
- } else {
14172
- const body = await res$1.text();
15088
+ const executeWithRetry = async () => {
15089
+ let attemptCount = 0;
15090
+ while (true) {
15091
+ const requestHeaders = new Headers(_this.headers);
15092
+ if (attemptCount > 0) requestHeaders.set("X-Retry-Count", String(attemptCount));
15093
+ let res$1;
14173
15094
  try {
14174
- error = JSON.parse(body);
14175
- if (Array.isArray(error) && res$1.status === 404) {
14176
- data = [];
14177
- error = null;
14178
- status = 200;
14179
- statusText = "OK";
15095
+ res$1 = await _fetch(_this.url.toString(), {
15096
+ method: _this.method,
15097
+ headers: requestHeaders,
15098
+ body: JSON.stringify(_this.body, (_, value) => typeof value === "bigint" ? value.toString() : value),
15099
+ signal: _this.signal
15100
+ });
15101
+ } catch (fetchError) {
15102
+ if ((fetchError === null || fetchError === void 0 ? void 0 : fetchError.name) === "AbortError" || (fetchError === null || fetchError === void 0 ? void 0 : fetchError.code) === "ABORT_ERR") throw fetchError;
15103
+ if (!RETRYABLE_METHODS.includes(_this.method)) throw fetchError;
15104
+ if (_this.retryEnabled && attemptCount < DEFAULT_MAX_RETRIES) {
15105
+ const delay = getRetryDelay(attemptCount);
15106
+ attemptCount++;
15107
+ await sleep2(delay, _this.signal);
15108
+ continue;
14180
15109
  }
14181
- } catch (_unused) {
14182
- if (res$1.status === 404 && body === "") {
14183
- status = 204;
14184
- statusText = "No Content";
14185
- } else error = { message: body };
15110
+ throw fetchError;
15111
+ }
15112
+ if (shouldRetry(_this.method, res$1.status, attemptCount, _this.retryEnabled)) {
15113
+ var _res$headers$get, _res$headers;
15114
+ const retryAfterHeader = (_res$headers$get = (_res$headers = res$1.headers) === null || _res$headers === void 0 ? void 0 : _res$headers.get("Retry-After")) !== null && _res$headers$get !== void 0 ? _res$headers$get : null;
15115
+ const delay = retryAfterHeader !== null ? Math.max(0, parseInt(retryAfterHeader, 10) || 0) * 1e3 : getRetryDelay(attemptCount);
15116
+ await res$1.text();
15117
+ attemptCount++;
15118
+ await sleep2(delay, _this.signal);
15119
+ continue;
14186
15120
  }
14187
- if (error && _this.shouldThrowOnError) throw new PostgrestError(error);
15121
+ return await _this.processResponse(res$1);
14188
15122
  }
14189
- return {
14190
- error,
14191
- data,
14192
- count: count2,
14193
- status,
14194
- statusText
14195
- };
14196
- });
15123
+ };
15124
+ let res = executeWithRetry();
14197
15125
  if (!this.shouldThrowOnError) res = res.catch((fetchError) => {
14198
15126
  var _fetchError$name2;
14199
15127
  let errorDetails = "";
@@ -14226,6 +15154,7 @@ ${cause.stack}`;
14226
15154
  if (urlLength > this.urlLengthLimit) hint += `. Your request URL is ${urlLength} characters. If selecting many fields, consider using views. If filtering with large arrays (e.g., .in('id', [200+ IDs])), consider using an RPC function instead.`;
14227
15155
  }
14228
15156
  return {
15157
+ success: false,
14229
15158
  error: {
14230
15159
  message: `${(_fetchError$name2 = fetchError === null || fetchError === void 0 ? void 0 : fetchError.name) !== null && _fetchError$name2 !== void 0 ? _fetchError$name2 : "FetchError"}: ${fetchError === null || fetchError === void 0 ? void 0 : fetchError.message}`,
14231
15160
  details: errorDetails,
@@ -14241,6 +15170,69 @@ ${cause.stack}`;
14241
15170
  return res.then(onfulfilled, onrejected);
14242
15171
  }
14243
15172
  /**
15173
+ * Process a fetch response and return the standardized postgrest response.
15174
+ */
15175
+ async processResponse(res) {
15176
+ var _this2 = this;
15177
+ let error = null;
15178
+ let data = null;
15179
+ let count2 = null;
15180
+ let status = res.status;
15181
+ let statusText = res.statusText;
15182
+ if (res.ok) {
15183
+ var _this$headers$get2, _res$headers$get2;
15184
+ if (_this2.method !== "HEAD") {
15185
+ var _this$headers$get;
15186
+ const body = await res.text();
15187
+ if (body === "") {
15188
+ } else if (_this2.headers.get("Accept") === "text/csv") data = body;
15189
+ else if (_this2.headers.get("Accept") && ((_this$headers$get = _this2.headers.get("Accept")) === null || _this$headers$get === void 0 ? void 0 : _this$headers$get.includes("application/vnd.pgrst.plan+text"))) data = body;
15190
+ else data = JSON.parse(body);
15191
+ }
15192
+ const countHeader = (_this$headers$get2 = _this2.headers.get("Prefer")) === null || _this$headers$get2 === void 0 ? void 0 : _this$headers$get2.match(/count=(exact|planned|estimated)/);
15193
+ const contentRange = (_res$headers$get2 = res.headers.get("content-range")) === null || _res$headers$get2 === void 0 ? void 0 : _res$headers$get2.split("/");
15194
+ if (countHeader && contentRange && contentRange.length > 1) count2 = parseInt(contentRange[1]);
15195
+ if (_this2.isMaybeSingle && Array.isArray(data)) if (data.length > 1) {
15196
+ error = {
15197
+ code: "PGRST116",
15198
+ details: `Results contain ${data.length} rows, application/vnd.pgrst.object+json requires 1 row`,
15199
+ hint: null,
15200
+ message: "JSON object requested, multiple (or no) rows returned"
15201
+ };
15202
+ data = null;
15203
+ count2 = null;
15204
+ status = 406;
15205
+ statusText = "Not Acceptable";
15206
+ } else if (data.length === 1) data = data[0];
15207
+ else data = null;
15208
+ } else {
15209
+ const body = await res.text();
15210
+ try {
15211
+ error = JSON.parse(body);
15212
+ if (Array.isArray(error) && res.status === 404) {
15213
+ data = [];
15214
+ error = null;
15215
+ status = 200;
15216
+ statusText = "OK";
15217
+ }
15218
+ } catch (_unused) {
15219
+ if (res.status === 404 && body === "") {
15220
+ status = 204;
15221
+ statusText = "No Content";
15222
+ } else error = { message: body };
15223
+ }
15224
+ if (error && _this2.shouldThrowOnError) throw new PostgrestError(error);
15225
+ }
15226
+ return {
15227
+ success: error === null,
15228
+ error,
15229
+ data,
15230
+ count: count2,
15231
+ status,
15232
+ statusText
15233
+ };
15234
+ }
15235
+ /**
14244
15236
  * Override the type of the returned `data`.
14245
15237
  *
14246
15238
  * @typeParam NewResult - The new result type to override with
@@ -16913,22 +17905,39 @@ var PostgrestQueryBuilder = class {
16913
17905
  *
16914
17906
  * @category Database
16915
17907
  *
16916
- * @example Creating a Postgrest query builder
17908
+ * @param url - The URL for the query
17909
+ * @param options - Named parameters
17910
+ * @param options.headers - Custom headers
17911
+ * @param options.schema - Postgres schema to use
17912
+ * @param options.fetch - Custom fetch implementation
17913
+ * @param options.urlLengthLimit - Maximum URL length before warning
17914
+ * @param options.retry - Enable automatic retries for transient errors (default: true)
17915
+ *
17916
+ * @example Using supabase-js (recommended)
17917
+ * ```ts
17918
+ * import { createClient } from '@supabase/supabase-js'
17919
+ *
17920
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
17921
+ * const { data, error } = await supabase.from('users').select('*')
17922
+ * ```
17923
+ *
17924
+ * @example Standalone import for bundle-sensitive environments
16917
17925
  * ```ts
16918
17926
  * import { PostgrestQueryBuilder } from '@supabase/postgrest-js'
16919
17927
  *
16920
17928
  * const query = new PostgrestQueryBuilder(
16921
17929
  * new URL('https://xyzcompany.supabase.co/rest/v1/users'),
16922
- * { headers: { apikey: 'public-anon-key' } }
17930
+ * { headers: { apikey: 'publishable-or-anon-key' }, retry: true }
16923
17931
  * )
16924
17932
  * ```
16925
17933
  */
16926
- constructor(url, { headers = {}, schema, fetch: fetch$1, urlLengthLimit = 8e3 }) {
17934
+ constructor(url, { headers = {}, schema, fetch: fetch$1, urlLengthLimit = 8e3, retry }) {
16927
17935
  this.url = url;
16928
17936
  this.headers = new Headers(headers);
16929
17937
  this.schema = schema;
16930
17938
  this.fetch = fetch$1;
16931
17939
  this.urlLengthLimit = urlLengthLimit;
17940
+ this.retry = retry;
16932
17941
  }
16933
17942
  /**
16934
17943
  * Clone URL and headers to prevent shared state between operations.
@@ -17736,7 +18745,8 @@ var PostgrestQueryBuilder = class {
17736
18745
  headers,
17737
18746
  schema: this.schema,
17738
18747
  fetch: this.fetch,
17739
- urlLengthLimit: this.urlLengthLimit
18748
+ urlLengthLimit: this.urlLengthLimit,
18749
+ retry: this.retry
17740
18750
  });
17741
18751
  }
17742
18752
  /**
@@ -17870,7 +18880,8 @@ var PostgrestQueryBuilder = class {
17870
18880
  schema: this.schema,
17871
18881
  body: values,
17872
18882
  fetch: (_this$fetch = this.fetch) !== null && _this$fetch !== void 0 ? _this$fetch : fetch,
17873
- urlLengthLimit: this.urlLengthLimit
18883
+ urlLengthLimit: this.urlLengthLimit,
18884
+ retry: this.retry
17874
18885
  });
17875
18886
  }
17876
18887
  /**
@@ -18103,7 +19114,8 @@ var PostgrestQueryBuilder = class {
18103
19114
  schema: this.schema,
18104
19115
  body: values,
18105
19116
  fetch: (_this$fetch2 = this.fetch) !== null && _this$fetch2 !== void 0 ? _this$fetch2 : fetch,
18106
- urlLengthLimit: this.urlLengthLimit
19117
+ urlLengthLimit: this.urlLengthLimit,
19118
+ retry: this.retry
18107
19119
  });
18108
19120
  }
18109
19121
  /**
@@ -18257,7 +19269,8 @@ var PostgrestQueryBuilder = class {
18257
19269
  schema: this.schema,
18258
19270
  body: values,
18259
19271
  fetch: (_this$fetch3 = this.fetch) !== null && _this$fetch3 !== void 0 ? _this$fetch3 : fetch,
18260
- urlLengthLimit: this.urlLengthLimit
19272
+ urlLengthLimit: this.urlLengthLimit,
19273
+ retry: this.retry
18261
19274
  });
18262
19275
  }
18263
19276
  /**
@@ -18389,7 +19402,8 @@ var PostgrestQueryBuilder = class {
18389
19402
  headers,
18390
19403
  schema: this.schema,
18391
19404
  fetch: (_this$fetch4 = this.fetch) !== null && _this$fetch4 !== void 0 ? _this$fetch4 : fetch,
18392
- urlLengthLimit: this.urlLengthLimit
19405
+ urlLengthLimit: this.urlLengthLimit,
19406
+ retry: this.retry
18393
19407
  });
18394
19408
  }
18395
19409
  };
@@ -18455,15 +19469,16 @@ var PostgrestClient = class PostgrestClient2 {
18455
19469
  * @param options.fetch - Custom fetch
18456
19470
  * @param options.timeout - Optional timeout in milliseconds for all requests. When set, requests will automatically abort after this duration to prevent indefinite hangs.
18457
19471
  * @param options.urlLengthLimit - Maximum URL length in characters before warnings/errors are triggered. Defaults to 8000.
18458
- * @example
19472
+ * @param options.retry - Enable or disable automatic retries for transient errors.
19473
+ * When enabled, idempotent requests (GET, HEAD, OPTIONS) that fail with network
19474
+ * errors or HTTP 503/520 responses will be automatically retried up to 3 times
19475
+ * with exponential backoff (1s, 2s, 4s). Defaults to `true`.
19476
+ * @example Using supabase-js (recommended)
18459
19477
  * ```ts
18460
- * import { PostgrestClient } from '@supabase/postgrest-js'
19478
+ * import { createClient } from '@supabase/supabase-js'
18461
19479
  *
18462
- * const postgrest = new PostgrestClient('https://xyzcompany.supabase.co/rest/v1', {
18463
- * headers: { apikey: 'public-anon-key' },
18464
- * schema: 'public',
18465
- * timeout: 30000, // 30 second timeout
18466
- * })
19480
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
19481
+ * const { data, error } = await supabase.from('profiles').select('*')
18467
19482
  * ```
18468
19483
  *
18469
19484
  * @category Database
@@ -18472,28 +19487,18 @@ var PostgrestClient = class PostgrestClient2 {
18472
19487
  * - A `timeout` option (in milliseconds) can be set to automatically abort requests that take too long.
18473
19488
  * - A `urlLengthLimit` option (default: 8000) can be set to control when URL length warnings are included in error messages for aborted requests.
18474
19489
  *
18475
- * @example Creating a Postgrest client
19490
+ * @example Standalone import for bundle-sensitive environments
18476
19491
  * ```ts
18477
19492
  * import { PostgrestClient } from '@supabase/postgrest-js'
18478
19493
  *
18479
19494
  * const postgrest = new PostgrestClient('https://xyzcompany.supabase.co/rest/v1', {
18480
- * headers: { apikey: 'public-anon-key' },
18481
- * schema: 'public',
18482
- * })
18483
- * ```
18484
- *
18485
- * @example With timeout
18486
- * ```ts
18487
- * import { PostgrestClient } from '@supabase/postgrest-js'
18488
- *
18489
- * const postgrest = new PostgrestClient('https://xyzcompany.supabase.co/rest/v1', {
18490
- * headers: { apikey: 'public-anon-key' },
19495
+ * headers: { apikey: 'publishable-or-anon-key' },
18491
19496
  * schema: 'public',
18492
19497
  * timeout: 30000, // 30 second timeout
18493
19498
  * })
18494
19499
  * ```
18495
19500
  */
18496
- constructor(url, { headers = {}, schema, fetch: fetch$1, timeout, urlLengthLimit = 8e3 } = {}) {
19501
+ constructor(url, { headers = {}, schema, fetch: fetch$1, timeout, urlLengthLimit = 8e3, retry } = {}) {
18497
19502
  this.url = url;
18498
19503
  this.headers = new Headers(headers);
18499
19504
  this.schemaName = schema;
@@ -18521,6 +19526,7 @@ var PostgrestClient = class PostgrestClient2 {
18521
19526
  return originalFetch(input, _objectSpread2(_objectSpread2({}, init), {}, { signal: controller.signal })).finally(() => clearTimeout(timeoutId));
18522
19527
  };
18523
19528
  else this.fetch = originalFetch;
19529
+ this.retry = retry;
18524
19530
  }
18525
19531
  /**
18526
19532
  * Perform a query on a table or a view.
@@ -18535,7 +19541,8 @@ var PostgrestClient = class PostgrestClient2 {
18535
19541
  headers: new Headers(this.headers),
18536
19542
  schema: this.schemaName,
18537
19543
  fetch: this.fetch,
18538
- urlLengthLimit: this.urlLengthLimit
19544
+ urlLengthLimit: this.urlLengthLimit,
19545
+ retry: this.retry
18539
19546
  });
18540
19547
  }
18541
19548
  /**
@@ -18552,7 +19559,8 @@ var PostgrestClient = class PostgrestClient2 {
18552
19559
  headers: this.headers,
18553
19560
  schema,
18554
19561
  fetch: this.fetch,
18555
- urlLengthLimit: this.urlLengthLimit
19562
+ urlLengthLimit: this.urlLengthLimit,
19563
+ retry: this.retry
18556
19564
  });
18557
19565
  }
18558
19566
  /**
@@ -18749,7 +19757,8 @@ var PostgrestClient = class PostgrestClient2 {
18749
19757
  schema: this.schemaName,
18750
19758
  body,
18751
19759
  fetch: (_this$fetch = this.fetch) !== null && _this$fetch !== void 0 ? _this$fetch : fetch,
18752
- urlLengthLimit: this.urlLengthLimit
19760
+ urlLengthLimit: this.urlLengthLimit,
19761
+ retry: this.retry
18753
19762
  });
18754
19763
  }
18755
19764
  };
@@ -18868,7 +19877,7 @@ Suggested solution: ${env.workaround}`;
18868
19877
  var websocket_factory_default = WebSocketFactory;
18869
19878
 
18870
19879
  // node_modules/@supabase/realtime-js/dist/module/lib/version.js
18871
- var version = "2.100.1";
19880
+ var version = "2.104.1";
18872
19881
 
18873
19882
  // node_modules/@supabase/realtime-js/dist/module/lib/constants.js
18874
19883
  var DEFAULT_VERSION = `realtime-js/${version}`;
@@ -21143,7 +22152,7 @@ var ChannelAdapter = class {
21143
22152
  try {
21144
22153
  push = this.channel.push(event, payload, timeout);
21145
22154
  } catch (error) {
21146
- throw `tried to push '${event}' to '${this.channel.topic}' before joining. Use channel.subscribe() before pushing events`;
22155
+ throw new Error(`tried to push '${event}' to '${this.channel.topic}' before joining. Use channel.subscribe() before pushing events`);
21147
22156
  }
21148
22157
  if (this.channel.pushBuffer.length > MAX_PUSH_BUFFER_SIZE) {
21149
22158
  const removedPush = this.channel.pushBuffer.shift();
@@ -21243,12 +22252,23 @@ var RealtimeChannel = class _RealtimeChannel {
21243
22252
  *
21244
22253
  * @category Realtime
21245
22254
  *
21246
- * @example Example for a public channel
22255
+ * @example Using supabase-js (recommended)
22256
+ * ```ts
22257
+ * import { createClient } from '@supabase/supabase-js'
22258
+ *
22259
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
22260
+ * const channel = supabase.channel('room1')
22261
+ * channel
22262
+ * .on('broadcast', { event: 'cursor-pos' }, (payload) => console.log(payload))
22263
+ * .subscribe()
22264
+ * ```
22265
+ *
22266
+ * @example Standalone import for bundle-sensitive environments
21247
22267
  * ```ts
21248
22268
  * import RealtimeClient from '@supabase/realtime-js'
21249
22269
  *
21250
22270
  * const client = new RealtimeClient('https://xyzcompany.supabase.co/realtime/v1', {
21251
- * params: { apikey: 'public-anon-key' },
22271
+ * params: { apikey: 'publishable-or-anon-key' },
21252
22272
  * })
21253
22273
  * const channel = new RealtimeChannel('realtime:public:messages', { config: {} }, client)
21254
22274
  * ```
@@ -21274,7 +22294,7 @@ var RealtimeChannel = class _RealtimeChannel {
21274
22294
  this.broadcastEndpointURL = httpEndpointURL(this.socket.socketAdapter.endPointURL());
21275
22295
  this.private = this.params.config.private || false;
21276
22296
  if (!this.private && ((_b = (_a = this.params.config) === null || _a === void 0 ? void 0 : _a.broadcast) === null || _b === void 0 ? void 0 : _b.replay)) {
21277
- throw `tried to use replay on public channel '${this.topic}'. It must be a private channel.`;
22297
+ throw new Error(`tried to use replay on public channel '${this.topic}'. It must be a private channel.`);
21278
22298
  }
21279
22299
  }
21280
22300
  /**
@@ -21542,9 +22562,11 @@ var RealtimeChannel = class _RealtimeChannel {
21542
22562
  * ```
21543
22563
  */
21544
22564
  on(type, filter, callback) {
21545
- if (this.channelAdapter.isJoined() && type === REALTIME_LISTEN_TYPES.PRESENCE) {
21546
- this.socket.log("channel", `cannot add presence callbacks for ${this.topic} after joining.`);
21547
- throw new Error("cannot add presence callbacks after joining a channel");
22565
+ const stateCheck = this.channelAdapter.isJoined() || this.channelAdapter.isJoining();
22566
+ const typeCheck = type === REALTIME_LISTEN_TYPES.PRESENCE || type === REALTIME_LISTEN_TYPES.POSTGRES_CHANGES;
22567
+ if (stateCheck && typeCheck) {
22568
+ this.socket.log("channel", `cannot add \`${type}\` callbacks for ${this.topic} after \`subscribe()\`.`);
22569
+ throw new Error(`cannot add \`${type}\` callbacks for ${this.topic} after \`subscribe()\`.`);
21548
22570
  }
21549
22571
  return this._on(type, filter, callback);
21550
22572
  }
@@ -21564,7 +22586,7 @@ var RealtimeChannel = class _RealtimeChannel {
21564
22586
  async httpSend(event, payload, opts = {}) {
21565
22587
  var _a;
21566
22588
  if (payload === void 0 || payload === null) {
21567
- return Promise.reject("Payload is required for httpSend()");
22589
+ return Promise.reject(new Error("Payload is required for httpSend()"));
21568
22590
  }
21569
22591
  const headers = {
21570
22592
  apikey: this.socket.apiKey ? this.socket.apiKey : "",
@@ -21794,8 +22816,8 @@ var RealtimeChannel = class _RealtimeChannel {
21794
22816
  }
21795
22817
  /** @internal */
21796
22818
  _notThisChannelEvent(event, ref) {
21797
- const { close, error, leave, join } = CHANNEL_EVENTS;
21798
- const events = [close, error, leave, join];
22819
+ const { close, error, leave, join: join2 } = CHANNEL_EVENTS;
22820
+ const events = [close, error, leave, join2];
21799
22821
  return ref && events.includes(event) && ref !== this.joinPush.ref;
21800
22822
  }
21801
22823
  /** @internal */
@@ -21818,6 +22840,16 @@ var RealtimeChannel = class _RealtimeChannel {
21818
22840
  return payload;
21819
22841
  });
21820
22842
  }
22843
+ copyBindings(other) {
22844
+ if (this.joinedOnce) {
22845
+ throw new Error("cannot copy bindings into joined channel");
22846
+ }
22847
+ for (const kind in other.bindings) {
22848
+ for (const binding of other.bindings[kind]) {
22849
+ this._on(binding.type, binding.filter, binding.callback);
22850
+ }
22851
+ }
22852
+ }
21821
22853
  /**
21822
22854
  * Compares two optional filter values for equality.
21823
22855
  * Treats undefined, null, and empty string as equivalent empty values.
@@ -22038,12 +23070,23 @@ var RealtimeClient = class {
22038
23070
  *
22039
23071
  * @category Realtime
22040
23072
  *
22041
- * @example Example for a public channel
23073
+ * @example Using supabase-js (recommended)
23074
+ * ```ts
23075
+ * import { createClient } from '@supabase/supabase-js'
23076
+ *
23077
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
23078
+ * const channel = supabase.channel('room1')
23079
+ * channel
23080
+ * .on('broadcast', { event: 'cursor-pos' }, (payload) => console.log(payload))
23081
+ * .subscribe()
23082
+ * ```
23083
+ *
23084
+ * @example Standalone import for bundle-sensitive environments
22042
23085
  * ```ts
22043
23086
  * import RealtimeClient from '@supabase/realtime-js'
22044
23087
  *
22045
23088
  * const client = new RealtimeClient('https://xyzcompany.supabase.co/realtime/v1', {
22046
- * params: { apikey: 'public-anon-key' },
23089
+ * params: { apikey: 'publishable-or-anon-key' },
22047
23090
  * })
22048
23091
  * client.connect()
22049
23092
  * ```
@@ -22562,8 +23605,8 @@ var IcebergError = class extends Error {
22562
23605
  return this.status === 419;
22563
23606
  }
22564
23607
  };
22565
- function buildUrl(baseUrl, path12, query) {
22566
- const url = new URL(path12, baseUrl);
23608
+ function buildUrl(baseUrl, path13, query) {
23609
+ const url = new URL(path13, baseUrl);
22567
23610
  if (query) {
22568
23611
  for (const [key, value] of Object.entries(query)) {
22569
23612
  if (value !== void 0) {
@@ -22593,12 +23636,12 @@ function createFetchClient(options) {
22593
23636
  return {
22594
23637
  async request({
22595
23638
  method,
22596
- path: path12,
23639
+ path: path13,
22597
23640
  query,
22598
23641
  body,
22599
23642
  headers
22600
23643
  }) {
22601
- const url = buildUrl(options.baseUrl, path12, query);
23644
+ const url = buildUrl(options.baseUrl, path13, query);
22602
23645
  const authHeaders = await buildAuthHeaders(options.auth);
22603
23646
  const res = await fetchFn(url, {
22604
23647
  method,
@@ -23066,6 +24109,57 @@ var IcebergRestCatalog = class {
23066
24109
  };
23067
24110
 
23068
24111
  // node_modules/@supabase/storage-js/dist/index.mjs
24112
+ function _typeof2(o2) {
24113
+ "@babel/helpers - typeof";
24114
+ return _typeof2 = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(o$1) {
24115
+ return typeof o$1;
24116
+ } : function(o$1) {
24117
+ return o$1 && "function" == typeof Symbol && o$1.constructor === Symbol && o$1 !== Symbol.prototype ? "symbol" : typeof o$1;
24118
+ }, _typeof2(o2);
24119
+ }
24120
+ function toPrimitive2(t, r) {
24121
+ if ("object" != _typeof2(t) || !t) return t;
24122
+ var e = t[Symbol.toPrimitive];
24123
+ if (void 0 !== e) {
24124
+ var i2 = e.call(t, r || "default");
24125
+ if ("object" != _typeof2(i2)) return i2;
24126
+ throw new TypeError("@@toPrimitive must return a primitive value.");
24127
+ }
24128
+ return ("string" === r ? String : Number)(t);
24129
+ }
24130
+ function toPropertyKey2(t) {
24131
+ var i2 = toPrimitive2(t, "string");
24132
+ return "symbol" == _typeof2(i2) ? i2 : i2 + "";
24133
+ }
24134
+ function _defineProperty2(e, r, t) {
24135
+ return (r = toPropertyKey2(r)) in e ? Object.defineProperty(e, r, {
24136
+ value: t,
24137
+ enumerable: true,
24138
+ configurable: true,
24139
+ writable: true
24140
+ }) : e[r] = t, e;
24141
+ }
24142
+ function ownKeys2(e, r) {
24143
+ var t = Object.keys(e);
24144
+ if (Object.getOwnPropertySymbols) {
24145
+ var o2 = Object.getOwnPropertySymbols(e);
24146
+ r && (o2 = o2.filter(function(r$1) {
24147
+ return Object.getOwnPropertyDescriptor(e, r$1).enumerable;
24148
+ })), t.push.apply(t, o2);
24149
+ }
24150
+ return t;
24151
+ }
24152
+ function _objectSpread22(e) {
24153
+ for (var r = 1; r < arguments.length; r++) {
24154
+ var t = null != arguments[r] ? arguments[r] : {};
24155
+ r % 2 ? ownKeys2(Object(t), true).forEach(function(r$1) {
24156
+ _defineProperty2(e, r$1, t[r$1]);
24157
+ }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys2(Object(t)).forEach(function(r$1) {
24158
+ Object.defineProperty(e, r$1, Object.getOwnPropertyDescriptor(t, r$1));
24159
+ });
24160
+ }
24161
+ return e;
24162
+ }
23069
24163
  var StorageError = class extends Error {
23070
24164
  constructor(message, namespace = "storage", status, statusCode) {
23071
24165
  super(message);
@@ -23075,6 +24169,14 @@ var StorageError = class extends Error {
23075
24169
  this.status = status;
23076
24170
  this.statusCode = statusCode;
23077
24171
  }
24172
+ toJSON() {
24173
+ return {
24174
+ name: this.name,
24175
+ message: this.message,
24176
+ status: this.status,
24177
+ statusCode: this.statusCode
24178
+ };
24179
+ }
23078
24180
  };
23079
24181
  function isStorageError(error) {
23080
24182
  return typeof error === "object" && error !== null && "__isStorageError" in error;
@@ -23087,12 +24189,7 @@ var StorageApiError = class extends StorageError {
23087
24189
  this.statusCode = statusCode;
23088
24190
  }
23089
24191
  toJSON() {
23090
- return {
23091
- name: this.name,
23092
- message: this.message,
23093
- status: this.status,
23094
- statusCode: this.statusCode
23095
- };
24192
+ return _objectSpread22({}, super.toJSON());
23096
24193
  }
23097
24194
  };
23098
24195
  var StorageUnknownError = class extends StorageError {
@@ -23102,6 +24199,18 @@ var StorageUnknownError = class extends StorageError {
23102
24199
  this.originalError = originalError;
23103
24200
  }
23104
24201
  };
24202
+ function setHeader(headers, name, value) {
24203
+ const result = _objectSpread22({}, headers);
24204
+ const nameLower = name.toLowerCase();
24205
+ for (const key of Object.keys(result)) if (key.toLowerCase() === nameLower) delete result[key];
24206
+ result[nameLower] = value;
24207
+ return result;
24208
+ }
24209
+ function normalizeHeaders(headers) {
24210
+ const result = {};
24211
+ for (const [key, value] of Object.entries(headers)) result[key.toLowerCase()] = value;
24212
+ return result;
24213
+ }
23105
24214
  var resolveFetch2 = (customFetch) => {
23106
24215
  if (customFetch) return (...args) => customFetch(...args);
23107
24216
  return (...args) => fetch(...args);
@@ -23128,57 +24237,6 @@ var isValidBucketName = (bucketName) => {
23128
24237
  if (bucketName.includes("/") || bucketName.includes("\\")) return false;
23129
24238
  return /^[\w!.\*'() &$@=;:+,?-]+$/.test(bucketName);
23130
24239
  };
23131
- function _typeof2(o2) {
23132
- "@babel/helpers - typeof";
23133
- return _typeof2 = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(o$1) {
23134
- return typeof o$1;
23135
- } : function(o$1) {
23136
- return o$1 && "function" == typeof Symbol && o$1.constructor === Symbol && o$1 !== Symbol.prototype ? "symbol" : typeof o$1;
23137
- }, _typeof2(o2);
23138
- }
23139
- function toPrimitive2(t, r) {
23140
- if ("object" != _typeof2(t) || !t) return t;
23141
- var e = t[Symbol.toPrimitive];
23142
- if (void 0 !== e) {
23143
- var i2 = e.call(t, r || "default");
23144
- if ("object" != _typeof2(i2)) return i2;
23145
- throw new TypeError("@@toPrimitive must return a primitive value.");
23146
- }
23147
- return ("string" === r ? String : Number)(t);
23148
- }
23149
- function toPropertyKey2(t) {
23150
- var i2 = toPrimitive2(t, "string");
23151
- return "symbol" == _typeof2(i2) ? i2 : i2 + "";
23152
- }
23153
- function _defineProperty2(e, r, t) {
23154
- return (r = toPropertyKey2(r)) in e ? Object.defineProperty(e, r, {
23155
- value: t,
23156
- enumerable: true,
23157
- configurable: true,
23158
- writable: true
23159
- }) : e[r] = t, e;
23160
- }
23161
- function ownKeys2(e, r) {
23162
- var t = Object.keys(e);
23163
- if (Object.getOwnPropertySymbols) {
23164
- var o2 = Object.getOwnPropertySymbols(e);
23165
- r && (o2 = o2.filter(function(r$1) {
23166
- return Object.getOwnPropertyDescriptor(e, r$1).enumerable;
23167
- })), t.push.apply(t, o2);
23168
- }
23169
- return t;
23170
- }
23171
- function _objectSpread22(e) {
23172
- for (var r = 1; r < arguments.length; r++) {
23173
- var t = null != arguments[r] ? arguments[r] : {};
23174
- r % 2 ? ownKeys2(Object(t), true).forEach(function(r$1) {
23175
- _defineProperty2(e, r$1, t[r$1]);
23176
- }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys2(Object(t)).forEach(function(r$1) {
23177
- Object.defineProperty(e, r$1, Object.getOwnPropertyDescriptor(t, r$1));
23178
- });
23179
- }
23180
- return e;
23181
- }
23182
24240
  var _getErrorMessage = (err) => {
23183
24241
  var _err$error;
23184
24242
  return err.msg || err.message || err.error_description || (typeof err.error === "string" ? err.error : (_err$error = err.error) === null || _err$error === void 0 ? void 0 : _err$error.message) || JSON.stringify(err);
@@ -23204,7 +24262,11 @@ var _getRequestParams = (method, options, parameters, body) => {
23204
24262
  };
23205
24263
  if (method === "GET" || method === "HEAD" || !body) return _objectSpread22(_objectSpread22({}, params), parameters);
23206
24264
  if (isPlainObject2(body)) {
23207
- params.headers = _objectSpread22({ "Content-Type": "application/json" }, options === null || options === void 0 ? void 0 : options.headers);
24265
+ var _contentType;
24266
+ const headers = (options === null || options === void 0 ? void 0 : options.headers) || {};
24267
+ let contentType;
24268
+ for (const [key, value] of Object.entries(headers)) if (key.toLowerCase() === "content-type") contentType = value;
24269
+ params.headers = setHeader(headers, "Content-Type", (_contentType = contentType) !== null && _contentType !== void 0 ? _contentType : "application/json");
23208
24270
  params.body = JSON.stringify(body);
23209
24271
  } else params.body = body;
23210
24272
  if (options === null || options === void 0 ? void 0 : options.duplex) params.duplex = options.duplex;
@@ -23257,7 +24319,7 @@ var BaseApiClient = class {
23257
24319
  constructor(url, headers = {}, fetch$1, namespace = "storage") {
23258
24320
  this.shouldThrowOnError = false;
23259
24321
  this.url = url;
23260
- this.headers = headers;
24322
+ this.headers = normalizeHeaders(headers);
23261
24323
  this.fetch = resolveFetch2(fetch$1);
23262
24324
  this.namespace = namespace;
23263
24325
  }
@@ -23280,7 +24342,7 @@ var BaseApiClient = class {
23280
24342
  * @returns this - For method chaining
23281
24343
  */
23282
24344
  setHeader(name, value) {
23283
- this.headers = _objectSpread22(_objectSpread22({}, this.headers), {}, { [name]: value });
24345
+ this.headers = setHeader(this.headers, name, value);
23284
24346
  return this;
23285
24347
  }
23286
24348
  /**
@@ -23417,7 +24479,7 @@ var StorageFileApi = class extends BaseApiClient {
23417
24479
  * @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
23418
24480
  * @param fileBody The body of the file to be stored in the bucket.
23419
24481
  */
23420
- async uploadOrUpdate(method, path12, fileBody, fileOptions) {
24482
+ async uploadOrUpdate(method, path13, fileBody, fileOptions) {
23421
24483
  var _this = this;
23422
24484
  return _this.handleOperation(async () => {
23423
24485
  let body;
@@ -23440,8 +24502,8 @@ var StorageFileApi = class extends BaseApiClient {
23440
24502
  if (metadata) headers["x-metadata"] = _this.toBase64(_this.encodeMetadata(metadata));
23441
24503
  if ((typeof ReadableStream !== "undefined" && body instanceof ReadableStream || body && typeof body === "object" && "pipe" in body && typeof body.pipe === "function") && !options.duplex) options.duplex = "half";
23442
24504
  }
23443
- if (fileOptions === null || fileOptions === void 0 ? void 0 : fileOptions.headers) headers = _objectSpread22(_objectSpread22({}, headers), fileOptions.headers);
23444
- const cleanPath = _this._removeEmptyFolders(path12);
24505
+ if (fileOptions === null || fileOptions === void 0 ? void 0 : fileOptions.headers) for (const [key, value] of Object.entries(fileOptions.headers)) headers = setHeader(headers, key, value);
24506
+ const cleanPath = _this._removeEmptyFolders(path13);
23445
24507
  const _path = _this._getFinalPath(cleanPath);
23446
24508
  const data = await (method == "PUT" ? put : post)(_this.fetch, `${_this.url}/object/${_path}`, body, _objectSpread22({ headers }, (options === null || options === void 0 ? void 0 : options.duplex) ? { duplex: options.duplex } : {}));
23447
24509
  return {
@@ -23502,8 +24564,8 @@ var StorageFileApi = class extends BaseApiClient {
23502
24564
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
23503
24565
  * - For React Native, using either `Blob`, `File` or `FormData` does not work as intended. Upload file using `ArrayBuffer` from base64 file data instead, see example below.
23504
24566
  */
23505
- async upload(path12, fileBody, fileOptions) {
23506
- return this.uploadOrUpdate("POST", path12, fileBody, fileOptions);
24567
+ async upload(path13, fileBody, fileOptions) {
24568
+ return this.uploadOrUpdate("POST", path13, fileBody, fileOptions);
23507
24569
  }
23508
24570
  /**
23509
24571
  * Upload a file with a token generated from `createSignedUploadUrl`.
@@ -23542,9 +24604,9 @@ var StorageFileApi = class extends BaseApiClient {
23542
24604
  * - `objects` table permissions: none
23543
24605
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
23544
24606
  */
23545
- async uploadToSignedUrl(path12, token, fileBody, fileOptions) {
24607
+ async uploadToSignedUrl(path13, token, fileBody, fileOptions) {
23546
24608
  var _this3 = this;
23547
- const cleanPath = _this3._removeEmptyFolders(path12);
24609
+ const cleanPath = _this3._removeEmptyFolders(path13);
23548
24610
  const _path = _this3._getFinalPath(cleanPath);
23549
24611
  const url = new URL(_this3.url + `/object/upload/sign/${_path}`);
23550
24612
  url.searchParams.set("token", token);
@@ -23606,10 +24668,10 @@ var StorageFileApi = class extends BaseApiClient {
23606
24668
  * - `objects` table permissions: `insert`
23607
24669
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
23608
24670
  */
23609
- async createSignedUploadUrl(path12, options) {
24671
+ async createSignedUploadUrl(path13, options) {
23610
24672
  var _this4 = this;
23611
24673
  return _this4.handleOperation(async () => {
23612
- let _path = _this4._getFinalPath(path12);
24674
+ let _path = _this4._getFinalPath(path13);
23613
24675
  const headers = _objectSpread22({}, _this4.headers);
23614
24676
  if (options === null || options === void 0 ? void 0 : options.upsert) headers["x-upsert"] = "true";
23615
24677
  const data = await post(_this4.fetch, `${_this4.url}/object/upload/sign/${_path}`, {}, { headers });
@@ -23618,7 +24680,7 @@ var StorageFileApi = class extends BaseApiClient {
23618
24680
  if (!token) throw new StorageError("No token returned by API");
23619
24681
  return {
23620
24682
  signedUrl: url.toString(),
23621
- path: path12,
24683
+ path: path13,
23622
24684
  token
23623
24685
  };
23624
24686
  });
@@ -23674,8 +24736,8 @@ var StorageFileApi = class extends BaseApiClient {
23674
24736
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
23675
24737
  * - For React Native, using either `Blob`, `File` or `FormData` does not work as intended. Update file using `ArrayBuffer` from base64 file data instead, see example below.
23676
24738
  */
23677
- async update(path12, fileBody, fileOptions) {
23678
- return this.uploadOrUpdate("PUT", path12, fileBody, fileOptions);
24739
+ async update(path13, fileBody, fileOptions) {
24740
+ return this.uploadOrUpdate("PUT", path13, fileBody, fileOptions);
23679
24741
  }
23680
24742
  /**
23681
24743
  * Moves an existing file to a new path in the same bucket.
@@ -23773,6 +24835,7 @@ var StorageFileApi = class extends BaseApiClient {
23773
24835
  * @param expiresIn The number of seconds until the signed URL expires. For example, `60` for a URL which is valid for one minute.
23774
24836
  * @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
23775
24837
  * @param options.transform Transform the asset before serving it to the client.
24838
+ * @param options.cacheNonce Append a cache nonce parameter to the URL to invalidate the cache.
23776
24839
  * @returns Promise with response containing signed URL or error
23777
24840
  *
23778
24841
  * @example Create Signed URL
@@ -23822,15 +24885,17 @@ var StorageFileApi = class extends BaseApiClient {
23822
24885
  * - `objects` table permissions: `select`
23823
24886
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
23824
24887
  */
23825
- async createSignedUrl(path12, expiresIn, options) {
24888
+ async createSignedUrl(path13, expiresIn, options) {
23826
24889
  var _this8 = this;
23827
24890
  return _this8.handleOperation(async () => {
23828
- let _path = _this8._getFinalPath(path12);
24891
+ let _path = _this8._getFinalPath(path13);
23829
24892
  const hasTransform = typeof (options === null || options === void 0 ? void 0 : options.transform) === "object" && options.transform !== null && Object.keys(options.transform).length > 0;
23830
24893
  let data = await post(_this8.fetch, `${_this8.url}/object/sign/${_path}`, _objectSpread22({ expiresIn }, hasTransform ? { transform: options.transform } : {}), { headers: _this8.headers });
23831
- const downloadQueryParam = (options === null || options === void 0 ? void 0 : options.download) ? `&download=${options.download === true ? "" : options.download}` : "";
23832
- const returnedPath = hasTransform && data.signedURL.includes("/object/sign/") ? data.signedURL.replace("/object/sign/", "/render/image/sign/") : data.signedURL;
23833
- return { signedUrl: encodeURI(`${_this8.url}${returnedPath}${downloadQueryParam}`) };
24894
+ const query = new URLSearchParams();
24895
+ if (options === null || options === void 0 ? void 0 : options.download) query.set("download", options.download === true ? "" : options.download);
24896
+ if ((options === null || options === void 0 ? void 0 : options.cacheNonce) != null) query.set("cacheNonce", String(options.cacheNonce));
24897
+ const queryString = query.toString();
24898
+ return { signedUrl: encodeURI(`${_this8.url}${data.signedURL}${queryString ? `&${queryString}` : ""}`) };
23834
24899
  });
23835
24900
  }
23836
24901
  /**
@@ -23840,6 +24905,7 @@ var StorageFileApi = class extends BaseApiClient {
23840
24905
  * @param paths The file paths to be downloaded, including the current file names. For example `['folder/image.png', 'folder2/image2.png']`.
23841
24906
  * @param expiresIn The number of seconds until the signed URLs expire. For example, `60` for URLs which are valid for one minute.
23842
24907
  * @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
24908
+ * @param options.cacheNonce Append a cache nonce parameter to the URL to invalidate the cache.
23843
24909
  * @returns Promise with response containing array of objects with signedUrl, path, and error or error
23844
24910
  *
23845
24911
  * @example Create Signed URLs
@@ -23884,8 +24950,11 @@ var StorageFileApi = class extends BaseApiClient {
23884
24950
  expiresIn,
23885
24951
  paths
23886
24952
  }, { headers: _this9.headers });
23887
- const downloadQueryParam = (options === null || options === void 0 ? void 0 : options.download) ? `&download=${options.download === true ? "" : options.download}` : "";
23888
- return data.map((datum) => _objectSpread22(_objectSpread22({}, datum), {}, { signedUrl: datum.signedURL ? encodeURI(`${_this9.url}${datum.signedURL}${downloadQueryParam}`) : null }));
24953
+ const query = new URLSearchParams();
24954
+ if (options === null || options === void 0 ? void 0 : options.download) query.set("download", options.download === true ? "" : options.download);
24955
+ if ((options === null || options === void 0 ? void 0 : options.cacheNonce) != null) query.set("cacheNonce", String(options.cacheNonce));
24956
+ const queryString = query.toString();
24957
+ return data.map((datum) => _objectSpread22(_objectSpread22({}, datum), {}, { signedUrl: datum.signedURL ? encodeURI(`${_this9.url}${datum.signedURL}${queryString ? `&${queryString}` : ""}`) : null }));
23889
24958
  });
23890
24959
  }
23891
24960
  /**
@@ -23894,6 +24963,7 @@ var StorageFileApi = class extends BaseApiClient {
23894
24963
  * @category File Buckets
23895
24964
  * @param path The full path and file name of the file to be downloaded. For example `folder/image.png`.
23896
24965
  * @param options.transform Transform the asset before serving it to the client.
24966
+ * @param options.cacheNonce Append a cache nonce parameter to the URL to invalidate the cache.
23897
24967
  * @param parameters Additional fetch parameters like signal for cancellation. Supports standard fetch options including cache control.
23898
24968
  * @returns BlobDownloadBuilder instance for downloading the file
23899
24969
  *
@@ -23952,12 +25022,14 @@ var StorageFileApi = class extends BaseApiClient {
23952
25022
  * - `objects` table permissions: `select`
23953
25023
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
23954
25024
  */
23955
- download(path12, options, parameters) {
23956
- const renderPath = typeof (options === null || options === void 0 ? void 0 : options.transform) !== "undefined" ? "render/image/authenticated" : "object";
23957
- const transformationQuery = this.transformOptsToQueryString((options === null || options === void 0 ? void 0 : options.transform) || {});
23958
- const queryString = transformationQuery ? `?${transformationQuery}` : "";
23959
- const _path = this._getFinalPath(path12);
23960
- const downloadFn = () => get(this.fetch, `${this.url}/${renderPath}/${_path}${queryString}`, {
25025
+ download(path13, options, parameters) {
25026
+ const renderPath = typeof (options === null || options === void 0 ? void 0 : options.transform) === "object" && options.transform !== null && Object.keys(options.transform).length > 0 ? "render/image/authenticated" : "object";
25027
+ const query = new URLSearchParams();
25028
+ if (options === null || options === void 0 ? void 0 : options.transform) this.applyTransformOptsToQuery(query, options.transform);
25029
+ if ((options === null || options === void 0 ? void 0 : options.cacheNonce) != null) query.set("cacheNonce", String(options.cacheNonce));
25030
+ const queryString = query.toString();
25031
+ const _path = this._getFinalPath(path13);
25032
+ const downloadFn = () => get(this.fetch, `${this.url}/${renderPath}/${_path}${queryString ? `?${queryString}` : ""}`, {
23961
25033
  headers: this.headers,
23962
25034
  noResolveJson: true
23963
25035
  }, parameters);
@@ -23986,9 +25058,9 @@ var StorageFileApi = class extends BaseApiClient {
23986
25058
  * }
23987
25059
  * ```
23988
25060
  */
23989
- async info(path12) {
25061
+ async info(path13) {
23990
25062
  var _this10 = this;
23991
- const _path = _this10._getFinalPath(path12);
25063
+ const _path = _this10._getFinalPath(path13);
23992
25064
  return _this10.handleOperation(async () => {
23993
25065
  return recursiveToCamel(await get(_this10.fetch, `${_this10.url}/object/info/${_path}`, { headers: _this10.headers }));
23994
25066
  });
@@ -24008,9 +25080,9 @@ var StorageFileApi = class extends BaseApiClient {
24008
25080
  * .exists('folder/avatar1.png')
24009
25081
  * ```
24010
25082
  */
24011
- async exists(path12) {
25083
+ async exists(path13) {
24012
25084
  var _this11 = this;
24013
- const _path = _this11._getFinalPath(path12);
25085
+ const _path = _this11._getFinalPath(path13);
24014
25086
  try {
24015
25087
  await head(_this11.fetch, `${_this11.url}/object/${_path}`, { headers: _this11.headers });
24016
25088
  return {
@@ -24038,6 +25110,7 @@ var StorageFileApi = class extends BaseApiClient {
24038
25110
  * @param path The path and name of the file to generate the public URL for. For example `folder/image.png`.
24039
25111
  * @param options.download Triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
24040
25112
  * @param options.transform Transform the asset before serving it to the client.
25113
+ * @param options.cacheNonce Append a cache nonce parameter to the URL to invalidate the cache.
24041
25114
  * @returns Object with public URL
24042
25115
  *
24043
25116
  * @example Returns the URL for an asset in a public bucket
@@ -24087,17 +25160,15 @@ var StorageFileApi = class extends BaseApiClient {
24087
25160
  * - `objects` table permissions: none
24088
25161
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
24089
25162
  */
24090
- getPublicUrl(path12, options) {
24091
- const _path = this._getFinalPath(path12);
24092
- const _queryString = [];
24093
- const downloadQueryParam = (options === null || options === void 0 ? void 0 : options.download) ? `download=${options.download === true ? "" : options.download}` : "";
24094
- if (downloadQueryParam !== "") _queryString.push(downloadQueryParam);
24095
- const renderPath = typeof (options === null || options === void 0 ? void 0 : options.transform) !== "undefined" ? "render/image" : "object";
24096
- const transformationQuery = this.transformOptsToQueryString((options === null || options === void 0 ? void 0 : options.transform) || {});
24097
- if (transformationQuery !== "") _queryString.push(transformationQuery);
24098
- let queryString = _queryString.join("&");
24099
- if (queryString !== "") queryString = `?${queryString}`;
24100
- return { data: { publicUrl: encodeURI(`${this.url}/${renderPath}/public/${_path}${queryString}`) } };
25163
+ getPublicUrl(path13, options) {
25164
+ const _path = this._getFinalPath(path13);
25165
+ const query = new URLSearchParams();
25166
+ if (options === null || options === void 0 ? void 0 : options.download) query.set("download", options.download === true ? "" : options.download);
25167
+ if (options === null || options === void 0 ? void 0 : options.transform) this.applyTransformOptsToQuery(query, options.transform);
25168
+ if ((options === null || options === void 0 ? void 0 : options.cacheNonce) != null) query.set("cacheNonce", String(options.cacheNonce));
25169
+ const queryString = query.toString();
25170
+ const renderPath = typeof (options === null || options === void 0 ? void 0 : options.transform) === "object" && options.transform !== null && Object.keys(options.transform).length > 0 ? "render/image" : "object";
25171
+ return { data: { publicUrl: encodeURI(`${this.url}/${renderPath}/public/${_path}`) + (queryString ? `?${queryString}` : "") } };
24101
25172
  }
24102
25173
  /**
24103
25174
  * Deletes files within the same bucket
@@ -24227,10 +25298,10 @@ var StorageFileApi = class extends BaseApiClient {
24227
25298
  * - `objects` table permissions: `select`
24228
25299
  * - Refer to the [Storage guide](/docs/guides/storage/security/access-control) on how access control works
24229
25300
  */
24230
- async list(path12, options, parameters) {
25301
+ async list(path13, options, parameters) {
24231
25302
  var _this13 = this;
24232
25303
  return _this13.handleOperation(async () => {
24233
- const body = _objectSpread22(_objectSpread22(_objectSpread22({}, DEFAULT_SEARCH_OPTIONS), options), {}, { prefix: path12 || "" });
25304
+ const body = _objectSpread22(_objectSpread22(_objectSpread22({}, DEFAULT_SEARCH_OPTIONS), options), {}, { prefix: path13 || "" });
24234
25305
  return await post(_this13.fetch, `${_this13.url}/object/list/${_this13.bucketId}`, body, { headers: _this13.headers }, parameters);
24235
25306
  });
24236
25307
  }
@@ -24294,23 +25365,23 @@ var StorageFileApi = class extends BaseApiClient {
24294
25365
  if (typeof Buffer !== "undefined") return Buffer.from(data).toString("base64");
24295
25366
  return btoa(data);
24296
25367
  }
24297
- _getFinalPath(path12) {
24298
- return `${this.bucketId}/${path12.replace(/^\/+/, "")}`;
25368
+ _getFinalPath(path13) {
25369
+ return `${this.bucketId}/${path13.replace(/^\/+/, "")}`;
24299
25370
  }
24300
- _removeEmptyFolders(path12) {
24301
- return path12.replace(/^\/|\/$/g, "").replace(/\/+/g, "/");
25371
+ _removeEmptyFolders(path13) {
25372
+ return path13.replace(/^\/|\/$/g, "").replace(/\/+/g, "/");
24302
25373
  }
24303
- transformOptsToQueryString(transform) {
24304
- const params = [];
24305
- if (transform.width) params.push(`width=${transform.width}`);
24306
- if (transform.height) params.push(`height=${transform.height}`);
24307
- if (transform.resize) params.push(`resize=${transform.resize}`);
24308
- if (transform.format) params.push(`format=${transform.format}`);
24309
- if (transform.quality) params.push(`quality=${transform.quality}`);
24310
- return params.join("&");
25374
+ /** Modifies the `query`, appending values the from `transform` */
25375
+ applyTransformOptsToQuery(query, transform) {
25376
+ if (transform.width) query.set("width", transform.width.toString());
25377
+ if (transform.height) query.set("height", transform.height.toString());
25378
+ if (transform.resize) query.set("resize", transform.resize);
25379
+ if (transform.format) query.set("format", transform.format);
25380
+ if (transform.quality) query.set("quality", transform.quality.toString());
25381
+ return query;
24311
25382
  }
24312
25383
  };
24313
- var version2 = "2.100.1";
25384
+ var version2 = "2.104.1";
24314
25385
  var DEFAULT_HEADERS = { "X-Client-Info": `storage-js/${version2}` };
24315
25386
  var StorageBucketApi = class extends BaseApiClient {
24316
25387
  constructor(url, headers = {}, fetch$1, opts) {
@@ -24619,8 +25690,18 @@ var StorageAnalyticsClient = class extends BaseApiClient {
24619
25690
  * @param headers - HTTP headers to include in requests
24620
25691
  * @param fetch - Optional custom fetch implementation
24621
25692
  *
24622
- * @example Creating a StorageAnalyticsClient instance
25693
+ * @example Using supabase-js (recommended)
24623
25694
  * ```typescript
25695
+ * import { createClient } from '@supabase/supabase-js'
25696
+ *
25697
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
25698
+ * const { data, error } = await supabase.storage.analytics.listBuckets()
25699
+ * ```
25700
+ *
25701
+ * @example Standalone import for bundle-sensitive environments
25702
+ * ```typescript
25703
+ * import { StorageAnalyticsClient } from '@supabase/storage-js'
25704
+ *
24624
25705
  * const client = new StorageAnalyticsClient(url, headers)
24625
25706
  * ```
24626
25707
  */
@@ -25075,8 +26156,18 @@ var StorageVectorsClient = class extends VectorBucketApi {
25075
26156
  * @param options.headers - Optional headers (for example `Authorization`) applied to every request.
25076
26157
  * @param options.fetch - Optional custom `fetch` implementation for non-browser runtimes.
25077
26158
  *
25078
- * @example Creating a StorageVectorsClient instance
26159
+ * @example Using supabase-js (recommended)
25079
26160
  * ```typescript
26161
+ * import { createClient } from '@supabase/supabase-js'
26162
+ *
26163
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
26164
+ * const bucket = supabase.storage.vectors.from('embeddings-prod')
26165
+ * ```
26166
+ *
26167
+ * @example Standalone import for bundle-sensitive environments
26168
+ * ```typescript
26169
+ * import { StorageVectorsClient } from '@supabase/storage-js'
26170
+ *
25080
26171
  * const client = new StorageVectorsClient(url, options)
25081
26172
  * ```
25082
26173
  */
@@ -25541,12 +26632,20 @@ var StorageClient = class extends StorageBucketApi {
25541
26632
  * Creates a client for Storage buckets, files, analytics, and vectors.
25542
26633
  *
25543
26634
  * @category File Buckets
25544
- * @example Creating a Storage client
26635
+ * @example Using supabase-js (recommended)
26636
+ * ```ts
26637
+ * import { createClient } from '@supabase/supabase-js'
26638
+ *
26639
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
26640
+ * const avatars = supabase.storage.from('avatars')
26641
+ * ```
26642
+ *
26643
+ * @example Standalone import for bundle-sensitive environments
25545
26644
  * ```ts
25546
26645
  * import { StorageClient } from '@supabase/storage-js'
25547
26646
  *
25548
26647
  * const storage = new StorageClient('https://xyzcompany.supabase.co/storage/v1', {
25549
- * apikey: 'public-anon-key',
26648
+ * apikey: 'publishable-or-anon-key',
25550
26649
  * })
25551
26650
  * const avatars = storage.from('avatars')
25552
26651
  * ```
@@ -25602,7 +26701,7 @@ var StorageClient = class extends StorageBucketApi {
25602
26701
  };
25603
26702
 
25604
26703
  // node_modules/@supabase/auth-js/dist/module/lib/version.js
25605
- var version3 = "2.100.1";
26704
+ var version3 = "2.104.1";
25606
26705
 
25607
26706
  // node_modules/@supabase/auth-js/dist/module/lib/constants.js
25608
26707
  var AUTO_REFRESH_TICK_DURATION_MS = 30 * 1e3;
@@ -25630,6 +26729,14 @@ var AuthError = class extends Error {
25630
26729
  this.status = status;
25631
26730
  this.code = code;
25632
26731
  }
26732
+ toJSON() {
26733
+ return {
26734
+ name: this.name,
26735
+ message: this.message,
26736
+ status: this.status,
26737
+ code: this.code
26738
+ };
26739
+ }
25633
26740
  };
25634
26741
  function isAuthError(error) {
25635
26742
  return typeof error === "object" && error !== null && "__isAuthError" in error;
@@ -25684,12 +26791,7 @@ var AuthImplicitGrantRedirectError = class extends CustomAuthError {
25684
26791
  this.details = details;
25685
26792
  }
25686
26793
  toJSON() {
25687
- return {
25688
- name: this.name,
25689
- message: this.message,
25690
- status: this.status,
25691
- details: this.details
25692
- };
26794
+ return Object.assign(Object.assign({}, super.toJSON()), { details: this.details });
25693
26795
  }
25694
26796
  };
25695
26797
  function isAuthImplicitGrantRedirectError(error) {
@@ -25702,12 +26804,7 @@ var AuthPKCEGrantCodeExchangeError = class extends CustomAuthError {
25702
26804
  this.details = details;
25703
26805
  }
25704
26806
  toJSON() {
25705
- return {
25706
- name: this.name,
25707
- message: this.message,
25708
- status: this.status,
25709
- details: this.details
25710
- };
26807
+ return Object.assign(Object.assign({}, super.toJSON()), { details: this.details });
25711
26808
  }
25712
26809
  };
25713
26810
  var AuthPKCECodeVerifierMissingError = class extends CustomAuthError {
@@ -25728,6 +26825,9 @@ var AuthWeakPasswordError = class extends CustomAuthError {
25728
26825
  super(message, "AuthWeakPasswordError", status, "weak_password");
25729
26826
  this.reasons = reasons;
25730
26827
  }
26828
+ toJSON() {
26829
+ return Object.assign(Object.assign({}, super.toJSON()), { reasons: this.reasons });
26830
+ }
25731
26831
  };
25732
26832
  var AuthInvalidJwtError = class extends CustomAuthError {
25733
26833
  constructor(message) {
@@ -26012,7 +27112,7 @@ function decodeJWT(token) {
26012
27112
  };
26013
27113
  return data;
26014
27114
  }
26015
- async function sleep2(time) {
27115
+ async function sleep3(time) {
26016
27116
  return await new Promise((accept) => {
26017
27117
  setTimeout(() => accept(null), time);
26018
27118
  });
@@ -26077,7 +27177,7 @@ async function getCodeChallengeAndMethod(storage, storageKey, isPasswordRecovery
26077
27177
  const codeVerifier = generatePKCEVerifier();
26078
27178
  let storedCodeVerifier = codeVerifier;
26079
27179
  if (isPasswordRecovery) {
26080
- storedCodeVerifier += "/PASSWORD_RECOVERY";
27180
+ storedCodeVerifier += "/recovery";
26081
27181
  }
26082
27182
  await setItemAsync(storage, `${storageKey}-code-verifier`, storedCodeVerifier);
26083
27183
  const codeChallenge = await generatePKCEChallenge(codeVerifier);
@@ -26181,7 +27281,7 @@ function deepClone(obj) {
26181
27281
 
26182
27282
  // node_modules/@supabase/auth-js/dist/module/lib/fetch.js
26183
27283
  var _getErrorMessage2 = (err) => err.msg || err.message || err.error_description || err.error || JSON.stringify(err);
26184
- var NETWORK_ERROR_CODES = [502, 503, 504];
27284
+ var NETWORK_ERROR_CODES = [502, 503, 504, 520, 521, 522, 523, 524, 530];
26185
27285
  async function handleError2(error) {
26186
27286
  var _a;
26187
27287
  if (!looksLikeFetchResponse(error)) {
@@ -26324,7 +27424,15 @@ var GoTrueAdminApi = class {
26324
27424
  /**
26325
27425
  * Creates an admin API client that can be used to manage users and OAuth clients.
26326
27426
  *
26327
- * @example
27427
+ * @example Using supabase-js (recommended)
27428
+ * ```ts
27429
+ * import { createClient } from '@supabase/supabase-js'
27430
+ *
27431
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'secret-or-service-role-key')
27432
+ * const { data, error } = await supabase.auth.admin.listUsers()
27433
+ * ```
27434
+ *
27435
+ * @example Standalone import for bundle-sensitive environments
26328
27436
  * ```ts
26329
27437
  * import { GoTrueAdminApi } from '@supabase/auth-js'
26330
27438
  *
@@ -28283,13 +29391,21 @@ var GoTrueClient = class _GoTrueClient {
28283
29391
  /**
28284
29392
  * Create a new client for use in the browser.
28285
29393
  *
28286
- * @example
29394
+ * @example Using supabase-js (recommended)
29395
+ * ```ts
29396
+ * import { createClient } from '@supabase/supabase-js'
29397
+ *
29398
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
29399
+ * const { data, error } = await supabase.auth.getUser()
29400
+ * ```
29401
+ *
29402
+ * @example Standalone import for bundle-sensitive environments
28287
29403
  * ```ts
28288
29404
  * import { GoTrueClient } from '@supabase/auth-js'
28289
29405
  *
28290
29406
  * const auth = new GoTrueClient({
28291
29407
  * url: 'https://xyzcompany.supabase.co/auth/v1',
28292
- * headers: { apikey: 'public-anon-key' },
29408
+ * headers: { apikey: 'publishable-or-anon-key' },
28293
29409
  * storageKey: 'supabase-auth',
28294
29410
  * })
28295
29411
  * ```
@@ -29599,7 +30715,7 @@ var GoTrueClient = class _GoTrueClient {
29599
30715
  }
29600
30716
  if (data.session) {
29601
30717
  await this._saveSession(data.session);
29602
- await this._notifyAllSubscribers("SIGNED_IN", data.session);
30718
+ await this._notifyAllSubscribers(redirectType === "recovery" ? "PASSWORD_RECOVERY" : "SIGNED_IN", data.session);
29603
30719
  }
29604
30720
  return this._returnResult({ data: Object.assign(Object.assign({}, data), { redirectType: redirectType !== null && redirectType !== void 0 ? redirectType : null }), error });
29605
30721
  } catch (error) {
@@ -31082,6 +32198,7 @@ var GoTrueClient = class _GoTrueClient {
31082
32198
  * Gets the session data from a URL string
31083
32199
  */
31084
32200
  async _getSessionFromURL(params, callbackUrlType) {
32201
+ var _a;
31085
32202
  try {
31086
32203
  if (!isBrowser())
31087
32204
  throw new AuthImplicitGrantRedirectError("No browser detected.");
@@ -31114,7 +32231,10 @@ var GoTrueClient = class _GoTrueClient {
31114
32231
  const url = new URL(window.location.href);
31115
32232
  url.searchParams.delete("code");
31116
32233
  window.history.replaceState(window.history.state, "", url.toString());
31117
- return { data: { session: data2.session, redirectType: null }, error: null };
32234
+ return {
32235
+ data: { session: data2.session, redirectType: (_a = data2.redirectType) !== null && _a !== void 0 ? _a : null },
32236
+ error: null
32237
+ };
31118
32238
  }
31119
32239
  const { provider_token, provider_refresh_token, access_token, refresh_token, expires_in, expires_at, token_type } = params;
31120
32240
  if (!access_token || !expires_in || !refresh_token || !token_type) {
@@ -31457,7 +32577,11 @@ var GoTrueClient = class _GoTrueClient {
31457
32577
  } catch (err) {
31458
32578
  await ((_b = this.stateChangeEmitters.get(id)) === null || _b === void 0 ? void 0 : _b.callback("INITIAL_SESSION", null));
31459
32579
  this._debug("INITIAL_SESSION", "callback id", id, "error", err);
31460
- console.error(err);
32580
+ if (isAuthSessionMissingError(err)) {
32581
+ console.warn(err);
32582
+ } else {
32583
+ console.error(err);
32584
+ }
31461
32585
  }
31462
32586
  });
31463
32587
  }
@@ -31781,7 +32905,7 @@ var GoTrueClient = class _GoTrueClient {
31781
32905
  const startedAt = Date.now();
31782
32906
  return await retryable(async (attempt) => {
31783
32907
  if (attempt > 0) {
31784
- await sleep2(200 * Math.pow(2, attempt - 1));
32908
+ await sleep3(200 * Math.pow(2, attempt - 1));
31785
32909
  }
31786
32910
  this._debug(debugName, "refreshing attempt", attempt);
31787
32911
  return await _request(this.fetch, "POST", `${this.url}/token?grant_type=refresh_token`, {
@@ -32787,7 +33911,7 @@ var AuthClient = GoTrueClient_default;
32787
33911
  var AuthClient_default = AuthClient;
32788
33912
 
32789
33913
  // node_modules/@supabase/supabase-js/dist/index.mjs
32790
- var version4 = "2.100.1";
33914
+ var version4 = "2.104.1";
32791
33915
  var JS_ENV = "";
32792
33916
  if (typeof Deno !== "undefined") JS_ENV = "deno";
32793
33917
  else if (typeof document !== "undefined") JS_ENV = "web";
@@ -33090,7 +34214,7 @@ var SupabaseClient = class {
33090
34214
  * ```ts
33091
34215
  * import { createClient } from '@supabase/supabase-js'
33092
34216
  *
33093
- * const supabase = createClient('https://xyzcompany.supabase.co', 'public-anon-key')
34217
+ * const supabase = createClient('https://xyzcompany.supabase.co', 'publishable-or-anon-key')
33094
34218
  *
33095
34219
  * const { data } = await supabase.from('profiles').select('*')
33096
34220
  * ```
@@ -33128,7 +34252,8 @@ var SupabaseClient = class {
33128
34252
  this.fetch = fetchWithAuth(supabaseKey, this._getAccessToken.bind(this), settings.global.fetch);
33129
34253
  this.realtime = this._initRealtimeClient(_objectSpread23({
33130
34254
  headers: this.headers,
33131
- accessToken: this._getAccessToken.bind(this)
34255
+ accessToken: this._getAccessToken.bind(this),
34256
+ fetch: this.fetch
33132
34257
  }, settings.realtime));
33133
34258
  if (this.accessToken) Promise.resolve(this.accessToken()).then((token) => this.realtime.setAuth(token)).catch((e) => console.warn("Failed to set initial Realtime auth token:", e));
33134
34259
  this.rest = new PostgrestClient(new URL("rest/v1", baseUrl).href, {
@@ -33319,7 +34444,7 @@ function shouldShowDeprecationWarning() {
33319
34444
  }
33320
34445
  if (shouldShowDeprecationWarning()) console.warn("\u26A0\uFE0F Node.js 18 and below are deprecated and will no longer be supported in future versions of @supabase/supabase-js. Please upgrade to Node.js 20 or later. For more information, visit: https://github.com/orgs/supabase/discussions/37217");
33321
34446
 
33322
- // node_modules/@remixhq/core/dist/chunk-EVWDYCBL.js
34447
+ // node_modules/@remixhq/core/dist/chunk-P6JHXOV4.js
33323
34448
  var storedSessionSchema = external_exports.object({
33324
34449
  access_token: external_exports.string().min(1),
33325
34450
  refresh_token: external_exports.string().min(1),
@@ -33352,21 +34477,21 @@ async function maybeLoadKeytar() {
33352
34477
  }
33353
34478
  async function ensurePathPermissions(filePath) {
33354
34479
  const dir = import_path7.default.dirname(filePath);
33355
- await import_promises17.default.mkdir(dir, { recursive: true });
34480
+ await import_promises18.default.mkdir(dir, { recursive: true });
33356
34481
  try {
33357
- await import_promises17.default.chmod(dir, 448);
34482
+ await import_promises18.default.chmod(dir, 448);
33358
34483
  } catch {
33359
34484
  }
33360
34485
  try {
33361
- await import_promises17.default.chmod(filePath, 384);
34486
+ await import_promises18.default.chmod(filePath, 384);
33362
34487
  } catch {
33363
34488
  }
33364
34489
  }
33365
34490
  async function writeJsonAtomic2(filePath, value) {
33366
- await import_promises17.default.mkdir(import_path7.default.dirname(filePath), { recursive: true });
34491
+ await import_promises18.default.mkdir(import_path7.default.dirname(filePath), { recursive: true });
33367
34492
  const tmpPath = `${filePath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`;
33368
- await import_promises17.default.writeFile(tmpPath, JSON.stringify(value, null, 2) + "\n", "utf8");
33369
- await import_promises17.default.rename(tmpPath, filePath);
34493
+ await import_promises18.default.writeFile(tmpPath, JSON.stringify(value, null, 2) + "\n", "utf8");
34494
+ await import_promises18.default.rename(tmpPath, filePath);
33370
34495
  }
33371
34496
  async function writeSessionFileFallback(filePath, session) {
33372
34497
  await writeJsonAtomic2(filePath, session);
@@ -33376,40 +34501,53 @@ function createLocalSessionStore(params) {
33376
34501
  const service = params?.service?.trim() || "remix-cli";
33377
34502
  const account = params?.account?.trim() || "default";
33378
34503
  const filePath = params?.filePath?.trim() || import_path7.default.join(xdgConfigHome(), "remix", "session.json");
34504
+ async function readKeytar() {
34505
+ const keytar = await maybeLoadKeytar();
34506
+ if (!keytar) return null;
34507
+ const raw = await keytar.getPassword(service, account).catch(() => null);
34508
+ if (!raw) return null;
34509
+ try {
34510
+ const parsed = storedSessionSchema.safeParse(JSON.parse(raw));
34511
+ return parsed.success ? parsed.data : null;
34512
+ } catch {
34513
+ return null;
34514
+ }
34515
+ }
34516
+ async function readFile() {
34517
+ const raw = await import_promises18.default.readFile(filePath, "utf8").catch(() => null);
34518
+ if (!raw) return null;
34519
+ try {
34520
+ const parsed = storedSessionSchema.safeParse(JSON.parse(raw));
34521
+ if (!parsed.success) return null;
34522
+ await ensurePathPermissions(filePath);
34523
+ return parsed.data;
34524
+ } catch {
34525
+ return null;
34526
+ }
34527
+ }
34528
+ function pickFreshest(a2, b) {
34529
+ if (!a2) return b;
34530
+ if (!b) return a2;
34531
+ return a2.expires_at >= b.expires_at ? a2 : b;
34532
+ }
33379
34533
  return {
33380
34534
  async getSession() {
33381
- const keytar = await maybeLoadKeytar();
33382
- if (keytar) {
33383
- const raw2 = await keytar.getPassword(service, account);
33384
- if (!raw2) return null;
33385
- try {
33386
- const parsed = storedSessionSchema.safeParse(JSON.parse(raw2));
33387
- return parsed.success ? parsed.data : null;
33388
- } catch {
33389
- return null;
33390
- }
33391
- }
33392
- const raw = await import_promises17.default.readFile(filePath, "utf8").catch(() => null);
33393
- if (!raw) return null;
33394
- try {
33395
- const parsed = storedSessionSchema.safeParse(JSON.parse(raw));
33396
- if (!parsed.success) return null;
33397
- await ensurePathPermissions(filePath);
33398
- return parsed.data;
33399
- } catch {
33400
- return null;
33401
- }
34535
+ const [k, f] = await Promise.all([readKeytar(), readFile()]);
34536
+ return pickFreshest(k, f);
33402
34537
  },
33403
34538
  async setSession(session) {
33404
34539
  const parsed = storedSessionSchema.safeParse(session);
33405
34540
  if (!parsed.success) {
33406
34541
  throw new Error("Session data is invalid and was not stored.");
33407
34542
  }
34543
+ await writeSessionFileFallback(filePath, parsed.data);
33408
34544
  const keytar = await maybeLoadKeytar();
33409
34545
  if (keytar) {
33410
- await keytar.setPassword(service, account, JSON.stringify(parsed.data));
34546
+ try {
34547
+ await keytar.setPassword(service, account, JSON.stringify(parsed.data));
34548
+ } catch {
34549
+ }
33411
34550
  }
33412
- await writeSessionFileFallback(filePath, parsed.data);
33413
34551
  }
33414
34552
  };
33415
34553
  }
@@ -33576,12 +34714,12 @@ async function createHookCollabApiClient() {
33576
34714
 
33577
34715
  // src/hook-diagnostics.ts
33578
34716
  var import_node_crypto2 = require("crypto");
33579
- var import_promises19 = __toESM(require("fs/promises"), 1);
34717
+ var import_promises20 = __toESM(require("fs/promises"), 1);
33580
34718
  var import_node_os5 = __toESM(require("os"), 1);
33581
34719
  var import_node_path7 = __toESM(require("path"), 1);
33582
34720
 
33583
34721
  // src/hook-state.ts
33584
- var import_promises18 = __toESM(require("fs/promises"), 1);
34722
+ var import_promises19 = __toESM(require("fs/promises"), 1);
33585
34723
  var import_node_os4 = __toESM(require("os"), 1);
33586
34724
  var import_node_path6 = __toESM(require("path"), 1);
33587
34725
  var import_node_crypto = require("crypto");
@@ -33599,20 +34737,20 @@ function stateLockMetaPath(sessionId) {
33599
34737
  return import_node_path6.default.join(stateLockPath(sessionId), "owner.json");
33600
34738
  }
33601
34739
  async function writeJsonAtomic3(filePath, value) {
33602
- await import_promises18.default.mkdir(import_node_path6.default.dirname(filePath), { recursive: true });
34740
+ await import_promises19.default.mkdir(import_node_path6.default.dirname(filePath), { recursive: true });
33603
34741
  const tmpPath = `${filePath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`;
33604
- await import_promises18.default.writeFile(tmpPath, JSON.stringify(value, null, 2) + "\n", "utf8");
33605
- await import_promises18.default.rename(tmpPath, filePath);
34742
+ await import_promises19.default.writeFile(tmpPath, JSON.stringify(value, null, 2) + "\n", "utf8");
34743
+ await import_promises19.default.rename(tmpPath, filePath);
33606
34744
  }
33607
34745
  var STATE_LOCK_WAIT_MS = 2e3;
33608
34746
  var STATE_LOCK_POLL_MS = 25;
33609
34747
  var STATE_LOCK_STALE_MS = 3e4;
33610
34748
  var STATE_LOCK_HEARTBEAT_MS = 5e3;
33611
- async function sleep3(ms) {
34749
+ async function sleep4(ms) {
33612
34750
  await new Promise((resolve) => setTimeout(resolve, ms));
33613
34751
  }
33614
34752
  async function readStateLockMetadata(sessionId) {
33615
- const raw = await import_promises18.default.readFile(stateLockMetaPath(sessionId), "utf8").catch(() => null);
34753
+ const raw = await import_promises19.default.readFile(stateLockMetaPath(sessionId), "utf8").catch(() => null);
33616
34754
  if (!raw) return null;
33617
34755
  try {
33618
34756
  const parsed = JSON.parse(raw);
@@ -33637,13 +34775,13 @@ async function tryRemoveStaleStateLock(sessionId) {
33637
34775
  const metadata = await readStateLockMetadata(sessionId);
33638
34776
  const staleByHeartbeat = metadata && Date.now() - new Date(metadata.heartbeatAt).getTime() > STATE_LOCK_STALE_MS;
33639
34777
  if (staleByHeartbeat) {
33640
- await import_promises18.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
34778
+ await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
33641
34779
  return true;
33642
34780
  }
33643
34781
  if (!metadata) {
33644
- const lockStat = await import_promises18.default.stat(lockPath).catch(() => null);
34782
+ const lockStat = await import_promises19.default.stat(lockPath).catch(() => null);
33645
34783
  if (lockStat && Date.now() - lockStat.mtimeMs > STATE_LOCK_STALE_MS) {
33646
- await import_promises18.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
34784
+ await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
33647
34785
  return true;
33648
34786
  }
33649
34787
  }
@@ -33652,10 +34790,10 @@ async function tryRemoveStaleStateLock(sessionId) {
33652
34790
  async function acquireStateLock(sessionId) {
33653
34791
  const lockPath = stateLockPath(sessionId);
33654
34792
  const deadline = Date.now() + STATE_LOCK_WAIT_MS;
33655
- await import_promises18.default.mkdir(stateRoot(), { recursive: true });
34793
+ await import_promises19.default.mkdir(stateRoot(), { recursive: true });
33656
34794
  while (true) {
33657
34795
  try {
33658
- await import_promises18.default.mkdir(lockPath);
34796
+ await import_promises19.default.mkdir(lockPath);
33659
34797
  const ownerId = (0, import_node_crypto.randomUUID)();
33660
34798
  const createdAt = (/* @__PURE__ */ new Date()).toISOString();
33661
34799
  const metadata = {
@@ -33680,7 +34818,7 @@ async function acquireStateLock(sessionId) {
33680
34818
  clearInterval(heartbeat);
33681
34819
  const currentMetadata = await readStateLockMetadata(sessionId);
33682
34820
  if (currentMetadata?.ownerId === ownerId) {
33683
- await import_promises18.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
34821
+ await import_promises19.default.rm(lockPath, { recursive: true, force: true }).catch(() => void 0);
33684
34822
  }
33685
34823
  };
33686
34824
  } catch (error) {
@@ -33694,7 +34832,7 @@ async function acquireStateLock(sessionId) {
33694
34832
  if (Date.now() >= deadline) {
33695
34833
  throw new Error(`Timed out acquiring hook state lock for session ${sessionId}.`);
33696
34834
  }
33697
- await sleep3(STATE_LOCK_POLL_MS);
34835
+ await sleep4(STATE_LOCK_POLL_MS);
33698
34836
  }
33699
34837
  }
33700
34838
  }
@@ -33798,7 +34936,7 @@ async function updatePendingTurnState(sessionId, updater) {
33798
34936
  });
33799
34937
  }
33800
34938
  async function loadPendingTurnState(sessionId) {
33801
- const raw = await import_promises18.default.readFile(statePath(sessionId), "utf8").catch(() => null);
34939
+ const raw = await import_promises19.default.readFile(statePath(sessionId), "utf8").catch(() => null);
33802
34940
  if (!raw) return null;
33803
34941
  try {
33804
34942
  const parsed = JSON.parse(raw);
@@ -33886,6 +35024,36 @@ async function markTouchedRepoRecordingFailure(sessionId, repoRoot, params) {
33886
35024
  current.lastTouchedAt = (/* @__PURE__ */ new Date()).toISOString();
33887
35025
  });
33888
35026
  }
35027
+ function lastFinalizedPath(sessionId) {
35028
+ return import_node_path6.default.join(stateRoot(), `${sessionId}.last-finalized.json`);
35029
+ }
35030
+ async function markLastFinalizedTurn(sessionId, turnId, prompt) {
35031
+ const record = {
35032
+ sessionId,
35033
+ turnId,
35034
+ prompt,
35035
+ finalizedAt: (/* @__PURE__ */ new Date()).toISOString()
35036
+ };
35037
+ await writeJsonAtomic3(lastFinalizedPath(sessionId), record);
35038
+ }
35039
+ async function loadLastFinalizedTurn(sessionId) {
35040
+ const raw = await import_promises19.default.readFile(lastFinalizedPath(sessionId), "utf8").catch(() => null);
35041
+ if (!raw) return null;
35042
+ try {
35043
+ const parsed = JSON.parse(raw);
35044
+ if (typeof parsed.sessionId === "string" && typeof parsed.turnId === "string" && typeof parsed.prompt === "string" && typeof parsed.finalizedAt === "string") {
35045
+ return {
35046
+ sessionId: parsed.sessionId,
35047
+ turnId: parsed.turnId,
35048
+ prompt: parsed.prompt,
35049
+ finalizedAt: parsed.finalizedAt
35050
+ };
35051
+ }
35052
+ return null;
35053
+ } catch {
35054
+ return null;
35055
+ }
35056
+ }
33889
35057
  async function markPendingTurnFailure(sessionId, params) {
33890
35058
  await updatePendingTurnState(sessionId, (existing) => {
33891
35059
  existing.turnFailureMessage = params.message.trim();
@@ -33900,14 +35068,14 @@ async function listTouchedRepos(sessionId) {
33900
35068
  }
33901
35069
  async function clearPendingTurnState(sessionId) {
33902
35070
  await withStateLock(sessionId, async () => {
33903
- await import_promises18.default.rm(statePath(sessionId), { force: true }).catch(() => void 0);
35071
+ await import_promises19.default.rm(statePath(sessionId), { force: true }).catch(() => void 0);
33904
35072
  });
33905
35073
  }
33906
35074
 
33907
35075
  // package.json
33908
35076
  var package_default = {
33909
35077
  name: "@remixhq/claude-plugin",
33910
- version: "0.1.18",
35078
+ version: "0.1.19",
33911
35079
  description: "Claude Code plugin for Remix collaboration workflows",
33912
35080
  homepage: "https://github.com/RemixDotOne/remix-claude-plugin",
33913
35081
  license: "MIT",
@@ -33930,16 +35098,27 @@ var package_default = {
33930
35098
  "hooks",
33931
35099
  "agents"
33932
35100
  ],
35101
+ exports: {
35102
+ ".": {
35103
+ types: "./dist/index.d.ts",
35104
+ import: "./dist/index.js"
35105
+ },
35106
+ "./historical": {
35107
+ types: "./dist/historical.d.ts",
35108
+ import: "./dist/historical.js"
35109
+ }
35110
+ },
33933
35111
  scripts: {
33934
35112
  build: "tsup",
33935
35113
  postbuild: `node -e "const fs=require('node:fs'); for (const p of ['dist/mcp-server.cjs','dist/hook-pre-git.cjs','dist/hook-user-prompt.cjs','dist/hook-post-collab.cjs','dist/hook-stop-collab.cjs']) fs.chmodSync(p, 0o755);"`,
33936
35114
  dev: "tsx src/mcp-server.ts",
33937
35115
  typecheck: "tsc -p tsconfig.json --noEmit",
35116
+ test: "node --import tsx --test src/**/*.test.ts",
33938
35117
  prepack: "npm run build"
33939
35118
  },
33940
35119
  dependencies: {
33941
- "@remixhq/core": "^0.1.13",
33942
- "@remixhq/mcp": "^0.1.13"
35120
+ "@remixhq/core": "^0.1.14",
35121
+ "@remixhq/mcp": "^0.1.14"
33943
35122
  },
33944
35123
  devDependencies: {
33945
35124
  "@types/node": "^25.4.0",
@@ -33990,13 +35169,13 @@ function normalizeFields(fields) {
33990
35169
  return Object.fromEntries(normalizedEntries);
33991
35170
  }
33992
35171
  async function rotateLogIfNeeded(logPath) {
33993
- const stat = await import_promises19.default.stat(logPath).catch(() => null);
35172
+ const stat = await import_promises20.default.stat(logPath).catch(() => null);
33994
35173
  if (!stat || stat.size < MAX_LOG_BYTES) {
33995
35174
  return;
33996
35175
  }
33997
35176
  const rotatedPath = `${logPath}.1`;
33998
- await import_promises19.default.rm(rotatedPath, { force: true }).catch(() => void 0);
33999
- await import_promises19.default.rename(logPath, rotatedPath).catch(() => void 0);
35177
+ await import_promises20.default.rm(rotatedPath, { force: true }).catch(() => void 0);
35178
+ await import_promises20.default.rename(logPath, rotatedPath).catch(() => void 0);
34000
35179
  }
34001
35180
  function summarizeText(value) {
34002
35181
  if (typeof value !== "string" || !value.trim()) {
@@ -34016,7 +35195,7 @@ function summarizeText(value) {
34016
35195
  async function appendHookDiagnosticsEvent(params) {
34017
35196
  try {
34018
35197
  const logPath = getHookDiagnosticsLogPath();
34019
- await import_promises19.default.mkdir(import_node_path7.default.dirname(logPath), { recursive: true });
35198
+ await import_promises20.default.mkdir(import_node_path7.default.dirname(logPath), { recursive: true });
34020
35199
  await rotateLogIfNeeded(logPath);
34021
35200
  const event = {
34022
35201
  ts: (/* @__PURE__ */ new Date()).toISOString(),
@@ -34033,15 +35212,597 @@ async function appendHookDiagnosticsEvent(params) {
34033
35212
  message: params.message?.trim() || null,
34034
35213
  fields: normalizeFields(params.fields)
34035
35214
  };
34036
- await import_promises19.default.appendFile(logPath, `${JSON.stringify(event)}
35215
+ await import_promises20.default.appendFile(logPath, `${JSON.stringify(event)}
34037
35216
  `, "utf8");
34038
35217
  } catch {
34039
35218
  }
34040
35219
  }
34041
35220
 
35221
+ // src/usage/claudeCodeUsageHarvester.ts
35222
+ var BOUNDARY_TIMESTAMP_TOLERANCE_MS = 500;
35223
+ function parseTimestamp(value) {
35224
+ if (typeof value !== "string") return null;
35225
+ const ms = Date.parse(value);
35226
+ return Number.isFinite(ms) ? ms : null;
35227
+ }
35228
+ function extractUserContentText(message) {
35229
+ if (!message || typeof message !== "object") return null;
35230
+ const content = message.content;
35231
+ if (typeof content === "string") return content;
35232
+ if (Array.isArray(content)) {
35233
+ const textBlocks = content.filter((block) => Boolean(block) && typeof block === "object").filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text);
35234
+ if (textBlocks.length === 0) return null;
35235
+ return textBlocks.join("\n");
35236
+ }
35237
+ return null;
35238
+ }
35239
+ function isUserBoundary(event) {
35240
+ return event.type === "user" && event.isMeta !== true && event.isSidechain !== true;
35241
+ }
35242
+ function findBoundary(sessionEvents, promptText, submittedAt, upperMs) {
35243
+ const isWithinUpperBound = (ev) => {
35244
+ if (upperMs === null) return true;
35245
+ const ms = parseTimestamp(ev.timestamp);
35246
+ return ms !== null && ms < upperMs;
35247
+ };
35248
+ let contentMatch = null;
35249
+ for (let i2 = sessionEvents.length - 1; i2 >= 0; i2--) {
35250
+ const ev = sessionEvents[i2];
35251
+ if (!isUserBoundary(ev)) continue;
35252
+ if (!isWithinUpperBound(ev)) continue;
35253
+ const text = extractUserContentText(ev.message);
35254
+ if (text !== null && text === promptText) {
35255
+ contentMatch = ev;
35256
+ break;
35257
+ }
35258
+ }
35259
+ if (contentMatch) return { boundary: contentMatch, usedFallback: false };
35260
+ const submittedMs = parseTimestamp(submittedAt);
35261
+ if (submittedMs === null) return { boundary: null, usedFallback: false };
35262
+ for (let i2 = sessionEvents.length - 1; i2 >= 0; i2--) {
35263
+ const ev = sessionEvents[i2];
35264
+ if (!isUserBoundary(ev)) continue;
35265
+ if (!isWithinUpperBound(ev)) continue;
35266
+ const ms = parseTimestamp(ev.timestamp);
35267
+ if (ms === null) continue;
35268
+ if (ms >= submittedMs - BOUNDARY_TIMESTAMP_TOLERANCE_MS) {
35269
+ return { boundary: ev, usedFallback: true };
35270
+ }
35271
+ }
35272
+ return { boundary: null, usedFallback: false };
35273
+ }
35274
+ function asNumberOrNull(value) {
35275
+ return typeof value === "number" && Number.isFinite(value) ? value : null;
35276
+ }
35277
+ function asStringOrNull(value) {
35278
+ return typeof value === "string" ? value : null;
35279
+ }
35280
+ function extractAssistantMessage(event) {
35281
+ const message = event.message;
35282
+ if (!message || typeof message !== "object") return null;
35283
+ const msg = message;
35284
+ if (msg.role !== "assistant" && msg.role !== void 0) {
35285
+ }
35286
+ const content = Array.isArray(msg.content) ? msg.content : [];
35287
+ const usage = msg.usage && typeof msg.usage === "object" ? msg.usage : null;
35288
+ return {
35289
+ id: asStringOrNull(msg.id),
35290
+ model: asStringOrNull(msg.model),
35291
+ content,
35292
+ usage
35293
+ };
35294
+ }
35295
+ function usageIsComplete(usage) {
35296
+ if (!usage) return false;
35297
+ const hasInput = typeof usage.input_tokens === "number";
35298
+ const hasOutput = typeof usage.output_tokens === "number";
35299
+ const hasCacheRead = typeof usage.cache_read_input_tokens === "number";
35300
+ return hasInput && hasOutput && hasCacheRead;
35301
+ }
35302
+ function buildModelCall(event, msg) {
35303
+ const usage = msg.usage ?? {};
35304
+ const cacheCreation = usage.cache_creation && typeof usage.cache_creation === "object" ? usage.cache_creation : null;
35305
+ const has5m = cacheCreation && typeof cacheCreation.ephemeral_5m_input_tokens === "number";
35306
+ const has1h = cacheCreation && typeof cacheCreation.ephemeral_1h_input_tokens === "number";
35307
+ const cacheWrite5mTokens = has5m ? cacheCreation.ephemeral_5m_input_tokens : null;
35308
+ const cacheWrite1hTokens = has1h ? cacheCreation.ephemeral_1h_input_tokens : null;
35309
+ const splitAvailable = has5m || has1h;
35310
+ const cacheWriteTokens = splitAvailable ? null : asNumberOrNull(usage.cache_creation_input_tokens);
35311
+ return {
35312
+ provider: "anthropic",
35313
+ model: msg.model,
35314
+ tier: asStringOrNull(usage.service_tier),
35315
+ requestId: asStringOrNull(event.requestId),
35316
+ timestamp: asStringOrNull(event.timestamp),
35317
+ isSidechain: event.isSidechain === true,
35318
+ inputTokens: asNumberOrNull(usage.input_tokens),
35319
+ outputTokens: asNumberOrNull(usage.output_tokens),
35320
+ cacheReadTokens: asNumberOrNull(usage.cache_read_input_tokens),
35321
+ cacheWriteTokens,
35322
+ cacheWrite5mTokens,
35323
+ cacheWrite1hTokens,
35324
+ reasoningTokens: null,
35325
+ audioInputTokens: null,
35326
+ imageInputTokens: null
35327
+ };
35328
+ }
35329
+ function scanServerToolUses(content) {
35330
+ const uses = [];
35331
+ for (const block of content) {
35332
+ if (!block || typeof block !== "object") continue;
35333
+ const b = block;
35334
+ const id = b.id;
35335
+ if (typeof id !== "string" || !id.startsWith("srvtoolu_")) continue;
35336
+ const name = typeof b.name === "string" ? b.name : "";
35337
+ switch (name) {
35338
+ case "web_search":
35339
+ uses.push({ tool: "web_search", unit: "per_request", isKnown: true, id, source: "direct" });
35340
+ break;
35341
+ case "web_fetch":
35342
+ uses.push({ tool: "web_fetch", unit: "per_request", isKnown: true, id, source: "direct" });
35343
+ break;
35344
+ case "code_execution":
35345
+ uses.push({ tool: "code_execution", unit: "invocation", isKnown: true, id, source: "direct" });
35346
+ break;
35347
+ default:
35348
+ uses.push({ tool: name || "unknown", unit: "invocation", isKnown: false, id, source: "direct" });
35349
+ break;
35350
+ }
35351
+ }
35352
+ return uses;
35353
+ }
35354
+ function buildClientToolNameMap(turnEvents) {
35355
+ const map = /* @__PURE__ */ new Map();
35356
+ for (const ev of turnEvents) {
35357
+ if (ev.type !== "assistant") continue;
35358
+ const msg = ev.message;
35359
+ if (!msg || typeof msg !== "object") continue;
35360
+ const content = msg.content;
35361
+ if (!Array.isArray(content)) continue;
35362
+ for (const block of content) {
35363
+ if (!block || typeof block !== "object") continue;
35364
+ const b = block;
35365
+ if (b.type !== "tool_use") continue;
35366
+ const id = b.id;
35367
+ const name = b.name;
35368
+ if (typeof id !== "string" || typeof name !== "string") continue;
35369
+ if (!id.startsWith("toolu_")) continue;
35370
+ map.set(id, name);
35371
+ }
35372
+ }
35373
+ return map;
35374
+ }
35375
+ function scanEmbeddedServerToolUses(turnEvents, clientToolNames) {
35376
+ const uses = [];
35377
+ for (const ev of turnEvents) {
35378
+ if (ev.type !== "user") continue;
35379
+ const tur = ev.toolUseResult;
35380
+ if (!tur || typeof tur !== "object") continue;
35381
+ const results = tur.results;
35382
+ if (!Array.isArray(results)) continue;
35383
+ let parentToolName = "";
35384
+ const userMsg = ev.message;
35385
+ if (userMsg && typeof userMsg === "object") {
35386
+ const userContent = userMsg.content;
35387
+ if (Array.isArray(userContent)) {
35388
+ for (const block of userContent) {
35389
+ if (!block || typeof block !== "object") continue;
35390
+ const b = block;
35391
+ if (b.type !== "tool_result") continue;
35392
+ const parentId = b.tool_use_id;
35393
+ if (typeof parentId === "string") {
35394
+ parentToolName = clientToolNames.get(parentId) ?? "";
35395
+ break;
35396
+ }
35397
+ }
35398
+ }
35399
+ }
35400
+ for (const entry of results) {
35401
+ if (!entry || typeof entry !== "object") continue;
35402
+ const srvId = entry.tool_use_id;
35403
+ if (typeof srvId !== "string" || !srvId.startsWith("srvtoolu_")) continue;
35404
+ if (parentToolName === "WebFetch") {
35405
+ uses.push({ tool: "web_fetch", unit: "per_request", isKnown: true, id: srvId, source: "embedded" });
35406
+ } else {
35407
+ uses.push({ tool: "web_search", unit: "per_request", isKnown: true, id: srvId, source: "embedded" });
35408
+ }
35409
+ }
35410
+ }
35411
+ return uses;
35412
+ }
35413
+ function dedupeByServerToolId(records) {
35414
+ const seen = /* @__PURE__ */ new Map();
35415
+ for (const r of records) {
35416
+ const existing = seen.get(r.id);
35417
+ if (!existing) {
35418
+ seen.set(r.id, r);
35419
+ continue;
35420
+ }
35421
+ if (existing.source === "embedded" && r.source === "direct") {
35422
+ seen.set(r.id, r);
35423
+ }
35424
+ }
35425
+ return Array.from(seen.values());
35426
+ }
35427
+ function aggregateServerTools(uses) {
35428
+ const map = /* @__PURE__ */ new Map();
35429
+ let sawUnknown = false;
35430
+ let sawEmbedded = false;
35431
+ for (const use of uses) {
35432
+ if (!use.isKnown) sawUnknown = true;
35433
+ if (use.source === "embedded") sawEmbedded = true;
35434
+ const key = `anthropic|${use.tool}|${use.unit}`;
35435
+ const existing = map.get(key);
35436
+ if (existing) {
35437
+ existing.quantity += 1;
35438
+ } else {
35439
+ map.set(key, { provider: "anthropic", tool: use.tool, unit: use.unit, quantity: 1 });
35440
+ }
35441
+ }
35442
+ return { serverTools: Array.from(map.values()), sawUnknown, sawEmbedded };
35443
+ }
35444
+ function sumCrossCheckCounts(usageBlocks) {
35445
+ const totals = /* @__PURE__ */ new Map();
35446
+ const keyFor = (raw) => {
35447
+ if (raw === "web_search_requests") return "web_search";
35448
+ if (raw === "web_fetch_requests") return "web_fetch";
35449
+ return null;
35450
+ };
35451
+ for (const usage of usageBlocks) {
35452
+ const stu = usage.server_tool_use;
35453
+ if (!stu || typeof stu !== "object") continue;
35454
+ for (const [rawKey, rawVal] of Object.entries(stu)) {
35455
+ const mapped = keyFor(rawKey);
35456
+ if (!mapped) continue;
35457
+ if (typeof rawVal !== "number" || !Number.isFinite(rawVal)) continue;
35458
+ totals.set(mapped, (totals.get(mapped) ?? 0) + rawVal);
35459
+ }
35460
+ }
35461
+ return totals;
35462
+ }
35463
+ function primaryToolCounts(serverTools) {
35464
+ const map = /* @__PURE__ */ new Map();
35465
+ for (const entry of serverTools) {
35466
+ map.set(entry.tool, (map.get(entry.tool) ?? 0) + entry.quantity);
35467
+ }
35468
+ return map;
35469
+ }
35470
+ function resolveVersion(events) {
35471
+ for (const ev of events) {
35472
+ if (typeof ev.version === "string" && ev.version.trim()) return ev.version.trim();
35473
+ }
35474
+ return null;
35475
+ }
35476
+ function buildTurnUsage(args) {
35477
+ const assistantEvents = args.turnEvents.filter((ev) => ev.type === "assistant");
35478
+ if (assistantEvents.length === 0) {
35479
+ return { ok: false, reason: "no_messages_for_turn" };
35480
+ }
35481
+ const warnings = [...args.initialWarnings];
35482
+ const calls = [];
35483
+ const usageBlocks = [];
35484
+ let anyIncomplete = false;
35485
+ let anyComplete = false;
35486
+ let sawLumpSumFallback = false;
35487
+ const collectedServerToolUses = [];
35488
+ const mainModels = /* @__PURE__ */ new Set();
35489
+ const sidechainModels = /* @__PURE__ */ new Set();
35490
+ for (const ev of assistantEvents) {
35491
+ const msg = extractAssistantMessage(ev);
35492
+ if (!msg) continue;
35493
+ if (usageIsComplete(msg.usage)) {
35494
+ anyComplete = true;
35495
+ } else {
35496
+ anyIncomplete = true;
35497
+ }
35498
+ if (msg.usage) usageBlocks.push(msg.usage);
35499
+ const call = buildModelCall(ev, msg);
35500
+ calls.push(call);
35501
+ if (call.cacheWrite5mTokens === null && call.cacheWrite1hTokens === null && call.cacheWriteTokens !== null) {
35502
+ sawLumpSumFallback = true;
35503
+ }
35504
+ collectedServerToolUses.push(...scanServerToolUses(msg.content));
35505
+ if (call.model) {
35506
+ if (call.isSidechain) sidechainModels.add(call.model);
35507
+ else mainModels.add(call.model);
35508
+ }
35509
+ }
35510
+ if (calls.length === 0) {
35511
+ return { ok: false, reason: "no_messages_for_turn" };
35512
+ }
35513
+ if (sawLumpSumFallback) {
35514
+ warnings.push({
35515
+ code: "cache_split_unavailable",
35516
+ message: "Assistant message reported lump-sum cache_creation_input_tokens without the 5m/1h split."
35517
+ });
35518
+ }
35519
+ const clientToolNames = buildClientToolNameMap(args.turnEvents);
35520
+ const embeddedUses = scanEmbeddedServerToolUses(args.turnEvents, clientToolNames);
35521
+ const merged = dedupeByServerToolId([...collectedServerToolUses, ...embeddedUses]);
35522
+ const { serverTools, sawUnknown, sawEmbedded } = aggregateServerTools(merged);
35523
+ if (sawUnknown) {
35524
+ warnings.push({
35525
+ code: "unknown_server_tool",
35526
+ message: "Encountered a server tool whose name is not in the known list (web_search, web_fetch, code_execution)."
35527
+ });
35528
+ }
35529
+ const crossCheck = sumCrossCheckCounts(usageBlocks);
35530
+ const primary = primaryToolCounts(serverTools);
35531
+ const allTools = /* @__PURE__ */ new Set([...crossCheck.keys(), ...primary.keys()]);
35532
+ for (const tool of allTools) {
35533
+ const crossVal = crossCheck.get(tool) ?? 0;
35534
+ const primVal = primary.get(tool) ?? 0;
35535
+ if (crossVal === primVal) continue;
35536
+ if (sawEmbedded && crossVal === 0) continue;
35537
+ warnings.push({
35538
+ code: "server_tool_count_mismatch",
35539
+ message: `Server-tool ${tool} count mismatch: srvtoolu_ scan=${primVal}, usage.server_tool_use=${crossVal}. Trusting srvtoolu_ count.`
35540
+ });
35541
+ break;
35542
+ }
35543
+ const subagentMismatch = mainModels.size > 0 && sidechainModels.size > 0 && [...sidechainModels].some((m) => !mainModels.has(m));
35544
+ if (subagentMismatch) {
35545
+ warnings.push({
35546
+ code: "subagent_model_differs",
35547
+ message: "At least one sidechain ModelCall uses a model different from the main-chain model in this turn."
35548
+ });
35549
+ }
35550
+ let confidence;
35551
+ if (!anyComplete && anyIncomplete) confidence = "unknown";
35552
+ else if (anyIncomplete) confidence = "partial";
35553
+ else if (anyComplete) confidence = "exact";
35554
+ else confidence = "unknown";
35555
+ if (args.checkSubsequentEvent) {
35556
+ const lastAssistantMs = (() => {
35557
+ const msList = assistantEvents.map((ev) => parseTimestamp(ev.timestamp)).filter((ms) => ms !== null);
35558
+ return msList.length ? Math.max(...msList) : null;
35559
+ })();
35560
+ const hasSubsequent = lastAssistantMs !== null && args.sessionEvents.some((ev) => {
35561
+ const ms = parseTimestamp(ev.timestamp);
35562
+ if (ms === null || ms <= lastAssistantMs) return false;
35563
+ if (args.upperMs !== null && ms >= args.upperMs) return false;
35564
+ return true;
35565
+ });
35566
+ if (!hasSubsequent && confidence === "exact") {
35567
+ confidence = "partial";
35568
+ warnings.push({
35569
+ code: "transcript_truncated",
35570
+ message: "Previous turn has no subsequent event after its last assistant message; transcript may be truncated."
35571
+ });
35572
+ }
35573
+ }
35574
+ const resolvedVersion = args.agent.version ?? resolveVersion(args.turnEvents) ?? resolveVersion(args.sessionEvents);
35575
+ const usage = {
35576
+ schemaVersion: 1,
35577
+ capturedAt: args.capturedAt,
35578
+ captureSource: args.captureSource,
35579
+ confidence,
35580
+ agent: {
35581
+ name: args.agent.name,
35582
+ version: resolvedVersion,
35583
+ sessionId: args.agent.sessionId,
35584
+ turnId: args.agent.turnId,
35585
+ plan: args.agent.plan
35586
+ },
35587
+ calls,
35588
+ serverTools,
35589
+ warnings,
35590
+ extensions: args.extensions
35591
+ };
35592
+ return { ok: true, usage };
35593
+ }
35594
+ function harvestClaudeCodeUsage(input) {
35595
+ const sessionEvents = input.events.filter((ev) => ev.sessionId === input.sessionId);
35596
+ const upperMs = input.nextBoundaryAt ? parseTimestamp(input.nextBoundaryAt) : null;
35597
+ const { boundary, usedFallback } = findBoundary(
35598
+ sessionEvents,
35599
+ input.promptText,
35600
+ input.submittedAt,
35601
+ upperMs
35602
+ );
35603
+ if (!boundary) {
35604
+ return { ok: false, reason: "no_user_boundary_found" };
35605
+ }
35606
+ const boundaryMs = parseTimestamp(boundary.timestamp);
35607
+ if (boundaryMs === null) {
35608
+ return { ok: false, reason: "no_user_boundary_found" };
35609
+ }
35610
+ const turnEvents = sessionEvents.filter((ev) => {
35611
+ const ms = parseTimestamp(ev.timestamp);
35612
+ if (ms === null || ms <= boundaryMs) return false;
35613
+ if (upperMs !== null && ms >= upperMs) return false;
35614
+ return true;
35615
+ });
35616
+ const initialWarnings = [];
35617
+ if (usedFallback) {
35618
+ initialWarnings.push({
35619
+ code: "transcript_truncated",
35620
+ message: "Prompt-text equality match failed; used timestamp-tolerance fallback to locate the user boundary."
35621
+ });
35622
+ }
35623
+ const built = buildTurnUsage({
35624
+ sessionEvents,
35625
+ turnEvents,
35626
+ upperMs,
35627
+ initialWarnings,
35628
+ agent: input.agent,
35629
+ capturedAt: input.capturedAt,
35630
+ captureSource: "hook",
35631
+ extensions: input.extensions,
35632
+ checkSubsequentEvent: input.checkSubsequentEvent === true
35633
+ });
35634
+ if (!built.ok) return built;
35635
+ return {
35636
+ ok: true,
35637
+ usage: built.usage,
35638
+ boundaryAt: typeof boundary.timestamp === "string" ? boundary.timestamp : null
35639
+ };
35640
+ }
35641
+
35642
+ // src/usage/claudeCodeSession.ts
35643
+ var import_node_child_process6 = require("child_process");
35644
+ var import_node_fs6 = require("fs");
35645
+ var import_node_os6 = require("os");
35646
+ var import_node_path8 = require("path");
35647
+ var CACHE_SCHEMA_VERSION = 1;
35648
+ var SUCCESS_TTL_MS = 60 * 60 * 1e3;
35649
+ var FAILURE_TTL_MS = 5 * 60 * 1e3;
35650
+ var DEFAULT_SPAWN_TIMEOUT_MS = 5e3;
35651
+ var inProcessCache = null;
35652
+ var spawnerImpl = defaultSpawnClaudeAuthStatus;
35653
+ function defaultSpawnClaudeAuthStatus(timeoutMs) {
35654
+ let result;
35655
+ try {
35656
+ result = (0, import_node_child_process6.spawnSync)("claude", ["auth", "status", "--json"], {
35657
+ stdio: ["ignore", "pipe", "pipe"],
35658
+ timeout: timeoutMs,
35659
+ env: process.env
35660
+ });
35661
+ } catch {
35662
+ return { stdout: "", status: -1, failed: true };
35663
+ }
35664
+ if (result.error) {
35665
+ return { stdout: "", status: -1, failed: true };
35666
+ }
35667
+ return {
35668
+ stdout: result.stdout?.toString("utf8") ?? "",
35669
+ status: result.status ?? -1,
35670
+ failed: false
35671
+ };
35672
+ }
35673
+ function getCollabStateRoot2() {
35674
+ const configured = process.env.REMIX_COLLAB_STATE_ROOT?.trim();
35675
+ return configured || (0, import_node_path8.join)((0, import_node_os6.homedir)(), ".remix", "collab-state");
35676
+ }
35677
+ function getAuthCachePath() {
35678
+ return (0, import_node_path8.join)(getCollabStateRoot2(), "claude-auth-cache.json");
35679
+ }
35680
+ function getSpawnTimeoutMs() {
35681
+ const raw = process.env.REMIX_CLAUDE_AUTH_TIMEOUT_MS?.trim();
35682
+ if (!raw) return DEFAULT_SPAWN_TIMEOUT_MS;
35683
+ const parsed = Number.parseInt(raw, 10);
35684
+ if (!Number.isFinite(parsed) || parsed <= 0) return DEFAULT_SPAWN_TIMEOUT_MS;
35685
+ return parsed;
35686
+ }
35687
+ function readAuthCache() {
35688
+ let raw;
35689
+ try {
35690
+ raw = (0, import_node_fs6.readFileSync)(getAuthCachePath(), "utf8");
35691
+ } catch {
35692
+ return null;
35693
+ }
35694
+ let parsed;
35695
+ try {
35696
+ parsed = JSON.parse(raw);
35697
+ } catch {
35698
+ return null;
35699
+ }
35700
+ if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) return null;
35701
+ const record = parsed;
35702
+ if (record.schemaVersion !== CACHE_SCHEMA_VERSION) return null;
35703
+ const planRaw = record.plan;
35704
+ const capturedAtRaw = record.capturedAt;
35705
+ const ttlRaw = record.ttlMs;
35706
+ const plan = planRaw === null ? null : typeof planRaw === "string" && planRaw.trim().length > 0 ? planRaw.trim() : null;
35707
+ if (typeof capturedAtRaw !== "string") return null;
35708
+ if (typeof ttlRaw !== "number" || !Number.isFinite(ttlRaw) || ttlRaw <= 0) return null;
35709
+ return { schemaVersion: CACHE_SCHEMA_VERSION, plan, capturedAt: capturedAtRaw, ttlMs: ttlRaw };
35710
+ }
35711
+ function isCacheFresh(record) {
35712
+ const captured = Date.parse(record.capturedAt);
35713
+ if (!Number.isFinite(captured)) return false;
35714
+ return Date.now() - captured < record.ttlMs;
35715
+ }
35716
+ function writeAuthCache(record) {
35717
+ const cachePath = getAuthCachePath();
35718
+ try {
35719
+ (0, import_node_fs6.mkdirSync)((0, import_node_path8.dirname)(cachePath), { recursive: true });
35720
+ const tmpPath = `${cachePath}.${process.pid}.${Date.now()}.tmp`;
35721
+ (0, import_node_fs6.writeFileSync)(tmpPath, JSON.stringify(record), "utf8");
35722
+ (0, import_node_fs6.renameSync)(tmpPath, cachePath);
35723
+ } catch {
35724
+ }
35725
+ }
35726
+ function parseAuthStatusJson(stdout) {
35727
+ if (!stdout) return { plan: null };
35728
+ let parsed;
35729
+ try {
35730
+ parsed = JSON.parse(stdout);
35731
+ } catch {
35732
+ return { plan: null };
35733
+ }
35734
+ if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) return { plan: null };
35735
+ const root = parsed;
35736
+ if (root.loggedIn === false) return { plan: null };
35737
+ const subscriptionTypeRaw = root.subscriptionType;
35738
+ if (typeof subscriptionTypeRaw !== "string") return { plan: null };
35739
+ const plan = subscriptionTypeRaw.trim();
35740
+ return { plan: plan.length > 0 ? plan : null };
35741
+ }
35742
+ function probeClaudePlan() {
35743
+ if (inProcessCache) return inProcessCache;
35744
+ const cached = readAuthCache();
35745
+ if (cached && isCacheFresh(cached)) {
35746
+ inProcessCache = { plan: cached.plan };
35747
+ return inProcessCache;
35748
+ }
35749
+ const result = spawnerImpl(getSpawnTimeoutMs());
35750
+ let probed;
35751
+ if (result.failed || result.status !== 0) {
35752
+ probed = { plan: null };
35753
+ } else {
35754
+ probed = parseAuthStatusJson(result.stdout);
35755
+ }
35756
+ inProcessCache = probed;
35757
+ writeAuthCache({
35758
+ schemaVersion: CACHE_SCHEMA_VERSION,
35759
+ plan: probed.plan,
35760
+ capturedAt: (/* @__PURE__ */ new Date()).toISOString(),
35761
+ ttlMs: probed.plan === null ? FAILURE_TTL_MS : SUCCESS_TTL_MS
35762
+ });
35763
+ return probed;
35764
+ }
35765
+ function resolveClaudeCodeSession(hookPayload) {
35766
+ const sessionIdRaw = hookPayload?.session_id;
35767
+ const sessionId = typeof sessionIdRaw === "string" && sessionIdRaw.trim() ? sessionIdRaw.trim() : null;
35768
+ const probe = probeClaudePlan();
35769
+ return {
35770
+ agent: { name: "claude-code", sessionId, plan: probe.plan },
35771
+ extensions: null
35772
+ };
35773
+ }
35774
+
35775
+ // src/usage/claudeCodeTranscript.ts
35776
+ var import_promises21 = __toESM(require("fs/promises"), 1);
35777
+ async function readAndParseTranscript(transcriptPath) {
35778
+ let raw;
35779
+ try {
35780
+ raw = await import_promises21.default.readFile(transcriptPath, "utf8");
35781
+ } catch (err) {
35782
+ const code = err && typeof err === "object" && "code" in err ? err.code : null;
35783
+ if (code === "ENOENT") {
35784
+ return { ok: false, reason: "transcript_not_found" };
35785
+ }
35786
+ return { ok: false, reason: "transcript_unreadable" };
35787
+ }
35788
+ const events = [];
35789
+ for (const line of raw.split("\n")) {
35790
+ const trimmed = line.trim();
35791
+ if (!trimmed) continue;
35792
+ try {
35793
+ const parsed = JSON.parse(trimmed);
35794
+ if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) {
35795
+ events.push(parsed);
35796
+ }
35797
+ } catch {
35798
+ }
35799
+ }
35800
+ return { ok: true, events };
35801
+ }
35802
+
34042
35803
  // src/hook-utils.ts
34043
- var import_promises20 = __toESM(require("fs/promises"), 1);
34044
- var import_node_path8 = __toESM(require("path"), 1);
35804
+ var import_promises22 = __toESM(require("fs/promises"), 1);
35805
+ var import_node_path9 = __toESM(require("path"), 1);
34045
35806
  async function readJsonStdin() {
34046
35807
  const chunks = [];
34047
35808
  for await (const chunk of process.stdin) {
@@ -34103,16 +35864,16 @@ function extractBoolean(input, keys) {
34103
35864
  }
34104
35865
  async function findBoundRepo(startPath) {
34105
35866
  if (!startPath) return null;
34106
- let current = import_node_path8.default.resolve(startPath);
34107
- let stats = await import_promises20.default.stat(current).catch(() => null);
35867
+ let current = import_node_path9.default.resolve(startPath);
35868
+ let stats = await import_promises22.default.stat(current).catch(() => null);
34108
35869
  if (stats?.isFile()) {
34109
- current = import_node_path8.default.dirname(current);
35870
+ current = import_node_path9.default.dirname(current);
34110
35871
  }
34111
35872
  while (true) {
34112
- const bindingPath = import_node_path8.default.join(current, ".remix", "config.json");
34113
- const bindingStats = await import_promises20.default.stat(bindingPath).catch(() => null);
35873
+ const bindingPath = import_node_path9.default.join(current, ".remix", "config.json");
35874
+ const bindingStats = await import_promises22.default.stat(bindingPath).catch(() => null);
34114
35875
  if (bindingStats?.isFile()) return current;
34115
- const parent = import_node_path8.default.dirname(current);
35876
+ const parent = import_node_path9.default.dirname(current);
34116
35877
  if (parent === current) return null;
34117
35878
  current = parent;
34118
35879
  }
@@ -34201,8 +35962,240 @@ function createFallbackTouchedRepo(params) {
34201
35962
  recordingFailedAt: null
34202
35963
  };
34203
35964
  }
35965
+ var TRANSCRIPT_FLUSH_RETRY_DELAYS_MS = [50, 100, 200];
35966
+ function sleep5(ms) {
35967
+ return new Promise((resolve) => setTimeout(resolve, ms));
35968
+ }
35969
+ async function harvestTurnUsage(params) {
35970
+ const { hook, sessionId, state, payload } = params;
35971
+ const transcriptPath = extractString(payload, ["transcript_path"]);
35972
+ if (!transcriptPath) return null;
35973
+ const capturedAt = (/* @__PURE__ */ new Date()).toISOString();
35974
+ try {
35975
+ const session = resolveClaudeCodeSession(payload);
35976
+ const resolvedSessionId = session.agent.sessionId ?? sessionId;
35977
+ let parsed = await readAndParseTranscript(transcriptPath);
35978
+ if (!parsed.ok) {
35979
+ await appendHookDiagnosticsEvent({
35980
+ hook,
35981
+ sessionId,
35982
+ turnId: state.turnId,
35983
+ stage: "usage_harvest_transcript_read",
35984
+ result: "info",
35985
+ reason: parsed.reason
35986
+ });
35987
+ return null;
35988
+ }
35989
+ let events = parsed.events;
35990
+ const runCurrentHarvest = () => harvestClaudeCodeUsage({
35991
+ events,
35992
+ sessionId: resolvedSessionId,
35993
+ promptText: state.prompt,
35994
+ submittedAt: state.submittedAt,
35995
+ agent: {
35996
+ name: "claude-code",
35997
+ version: null,
35998
+ sessionId: resolvedSessionId,
35999
+ turnId: state.turnId,
36000
+ plan: session.agent.plan
36001
+ },
36002
+ capturedAt,
36003
+ extensions: session.extensions
36004
+ });
36005
+ let currentResult = runCurrentHarvest();
36006
+ let retriesUsed = 0;
36007
+ let totalBackoffMs = 0;
36008
+ while (!currentResult.ok && currentResult.reason === "no_messages_for_turn") {
36009
+ if (retriesUsed >= TRANSCRIPT_FLUSH_RETRY_DELAYS_MS.length) break;
36010
+ const delayMs = TRANSCRIPT_FLUSH_RETRY_DELAYS_MS[retriesUsed];
36011
+ await sleep5(delayMs);
36012
+ retriesUsed += 1;
36013
+ totalBackoffMs += delayMs;
36014
+ const reparsed = await readAndParseTranscript(transcriptPath);
36015
+ if (!reparsed.ok) break;
36016
+ events = reparsed.events;
36017
+ currentResult = runCurrentHarvest();
36018
+ }
36019
+ let currentUsage = null;
36020
+ let currentBoundaryAt = null;
36021
+ if (currentResult.ok) {
36022
+ currentUsage = currentResult.usage;
36023
+ currentBoundaryAt = currentResult.boundaryAt;
36024
+ if (retriesUsed > 0) {
36025
+ await appendHookDiagnosticsEvent({
36026
+ hook,
36027
+ sessionId,
36028
+ turnId: state.turnId,
36029
+ stage: "usage_harvest_current",
36030
+ result: "info",
36031
+ reason: "recovered_after_flush_retry",
36032
+ fields: { retriesUsed, totalBackoffMs }
36033
+ });
36034
+ }
36035
+ } else {
36036
+ await appendHookDiagnosticsEvent({
36037
+ hook,
36038
+ sessionId,
36039
+ turnId: state.turnId,
36040
+ stage: "usage_harvest_current",
36041
+ result: "info",
36042
+ reason: currentResult.reason,
36043
+ fields: retriesUsed > 0 ? { retriesUsed, totalBackoffMs } : void 0
36044
+ });
36045
+ }
36046
+ let previousPatch = null;
36047
+ const lastFinalized = await loadLastFinalizedTurn(sessionId);
36048
+ if (lastFinalized && lastFinalized.sessionId && lastFinalized.turnId && lastFinalized.prompt) {
36049
+ const prevResult = harvestClaudeCodeUsage({
36050
+ events,
36051
+ sessionId: lastFinalized.sessionId,
36052
+ promptText: lastFinalized.prompt,
36053
+ submittedAt: lastFinalized.finalizedAt,
36054
+ checkSubsequentEvent: true,
36055
+ // Constrain the previous-turn harvest to events strictly before this
36056
+ // turn's actual user-event timestamp in the JSONL. Using the hook's
36057
+ // submittedAt is too loose: Claude Code records the user event
36058
+ // ~100–200 ms before the hook fires, so the current turn's own user
36059
+ // message slips under that bound and findBoundary locks onto it.
36060
+ // Prefer the timestamp returned by the current-turn harvest; fall
36061
+ // back to submittedAt only if the current harvest failed.
36062
+ nextBoundaryAt: currentBoundaryAt ?? state.submittedAt,
36063
+ agent: {
36064
+ name: "claude-code",
36065
+ version: null,
36066
+ sessionId: lastFinalized.sessionId,
36067
+ turnId: lastFinalized.turnId,
36068
+ plan: session.agent.plan
36069
+ },
36070
+ capturedAt,
36071
+ extensions: session.extensions
36072
+ });
36073
+ if (prevResult.ok) {
36074
+ previousPatch = {
36075
+ previousAgentTurnId: lastFinalized.turnId,
36076
+ previousSessionId: lastFinalized.sessionId,
36077
+ usage: prevResult.usage
36078
+ };
36079
+ } else {
36080
+ await appendHookDiagnosticsEvent({
36081
+ hook,
36082
+ sessionId,
36083
+ turnId: state.turnId,
36084
+ stage: "usage_harvest_previous",
36085
+ result: "info",
36086
+ reason: prevResult.reason
36087
+ });
36088
+ }
36089
+ }
36090
+ if (!currentUsage && !previousPatch) return null;
36091
+ return { schemaVersion: 1, currentTurn: currentUsage, previousTurn: previousPatch };
36092
+ } catch (err) {
36093
+ await appendHookDiagnosticsEvent({
36094
+ hook,
36095
+ sessionId,
36096
+ turnId: state.turnId,
36097
+ stage: "usage_harvest_threw",
36098
+ result: "error",
36099
+ message: err instanceof Error ? err.message : String(err)
36100
+ });
36101
+ return null;
36102
+ }
36103
+ }
36104
+ async function attachUsageToPendingMcpJobs(params) {
36105
+ const { hook, sessionId, turnId, repo, turnUsage } = params;
36106
+ const listPendingFinalizeJobs2 = listPendingFinalizeJobs;
36107
+ const updatePendingFinalizeJob2 = updatePendingFinalizeJob;
36108
+ if (typeof listPendingFinalizeJobs2 !== "function" || typeof updatePendingFinalizeJob2 !== "function") {
36109
+ await appendHookDiagnosticsEvent({
36110
+ hook,
36111
+ sessionId,
36112
+ turnId,
36113
+ stage: "usage_attach_skipped",
36114
+ result: "info",
36115
+ reason: "queue_helpers_unavailable",
36116
+ repoRoot: repo.repoRoot
36117
+ });
36118
+ return;
36119
+ }
36120
+ let pending;
36121
+ try {
36122
+ pending = await listPendingFinalizeJobs2();
36123
+ } catch (err) {
36124
+ await appendHookDiagnosticsEvent({
36125
+ hook,
36126
+ sessionId,
36127
+ turnId,
36128
+ stage: "usage_attach_list_failed",
36129
+ result: "error",
36130
+ reason: "exception",
36131
+ repoRoot: repo.repoRoot,
36132
+ message: err instanceof Error ? err.message : String(err)
36133
+ });
36134
+ return;
36135
+ }
36136
+ const now = Date.now();
36137
+ const matches = pending.filter(
36138
+ (job) => job.status === "queued" && job.repoRoot === repo.repoRoot && job.retryCount === 0 && !job.error && job.nextRetryAt !== null && Number.isFinite(Date.parse(job.nextRetryAt)) && Date.parse(job.nextRetryAt) > now
36139
+ );
36140
+ if (matches.length === 0) {
36141
+ await appendHookDiagnosticsEvent({
36142
+ hook,
36143
+ sessionId,
36144
+ turnId,
36145
+ stage: "usage_attach_no_match",
36146
+ result: "info",
36147
+ reason: turnUsage ? "no_pending_mcp_job" : "no_pending_mcp_job_no_usage",
36148
+ repoRoot: repo.repoRoot
36149
+ });
36150
+ return;
36151
+ }
36152
+ let updated = 0;
36153
+ let skipped = 0;
36154
+ let failed = 0;
36155
+ for (const job of matches) {
36156
+ try {
36157
+ const next = await updatePendingFinalizeJob2(job.id, {
36158
+ metadata: turnUsage ? { turnUsage } : {},
36159
+ nextRetryAt: null
36160
+ });
36161
+ if (next) {
36162
+ updated += 1;
36163
+ } else {
36164
+ skipped += 1;
36165
+ }
36166
+ } catch (err) {
36167
+ failed += 1;
36168
+ await appendHookDiagnosticsEvent({
36169
+ hook,
36170
+ sessionId,
36171
+ turnId,
36172
+ stage: "usage_attach_update_failed",
36173
+ result: "error",
36174
+ reason: "exception",
36175
+ repoRoot: repo.repoRoot,
36176
+ message: err instanceof Error ? err.message : String(err),
36177
+ fields: { jobId: job.id }
36178
+ });
36179
+ }
36180
+ }
36181
+ await appendHookDiagnosticsEvent({
36182
+ hook,
36183
+ sessionId,
36184
+ turnId,
36185
+ stage: "usage_attach_completed",
36186
+ result: failed > 0 ? "error" : "success",
36187
+ repoRoot: repo.repoRoot,
36188
+ fields: {
36189
+ matchedJobCount: matches.length,
36190
+ updatedJobCount: updated,
36191
+ skippedJobCount: skipped,
36192
+ failedJobCount: failed,
36193
+ hasUsage: Boolean(turnUsage)
36194
+ }
36195
+ });
36196
+ }
34204
36197
  async function recordTouchedRepo(params) {
34205
- const { hook, sessionId, turnId, repo, prompt, assistantResponse, api } = params;
36198
+ const { hook, sessionId, turnId, repo, prompt, assistantResponse, api, turnUsage, promptedAt } = params;
34206
36199
  await markTouchedRepoStopAttempted(sessionId, repo.repoRoot);
34207
36200
  await appendHookDiagnosticsEvent({
34208
36201
  hook,
@@ -34241,7 +36234,9 @@ async function recordTouchedRepo(params) {
34241
36234
  prompt,
34242
36235
  assistantResponse,
34243
36236
  idempotencyKey: buildRepoIdempotencyKey(turnId, repo),
34244
- actor: HOOK_ACTOR
36237
+ actor: HOOK_ACTOR,
36238
+ turnUsage,
36239
+ promptedAt: promptedAt ?? null
34245
36240
  });
34246
36241
  await markTouchedRepoStopRecorded(sessionId, repo.repoRoot, { mode: result.mode });
34247
36242
  await appendHookDiagnosticsEvent({
@@ -34276,13 +36271,61 @@ async function recordTouchedRepo(params) {
34276
36271
  function spawnFinalizeQueueDrainer() {
34277
36272
  const entrypoint = process.argv[1];
34278
36273
  if (!entrypoint) return;
34279
- const child = (0, import_node_child_process6.spawn)(process.execPath, [...process.execArgv, entrypoint, "--drain-finalize-queue"], {
36274
+ const child = (0, import_node_child_process7.spawn)(process.execPath, [...process.execArgv, entrypoint, "--drain-finalize-queue"], {
34280
36275
  detached: true,
34281
36276
  stdio: "ignore",
34282
36277
  env: process.env
34283
36278
  });
34284
36279
  child.unref();
34285
36280
  }
36281
+ var HISTORY_IMPORT_MARKER_REL = import_node_path10.default.join(".remix", ".history-imported");
36282
+ var HISTORY_IMPORT_LOG_REL = import_node_path10.default.join(".remix", "history-import.log");
36283
+ function maybeAutoSpawnHistoryImportFromStopHook(repoRoot) {
36284
+ try {
36285
+ if ((0, import_node_fs7.existsSync)(import_node_path10.default.join(repoRoot, HISTORY_IMPORT_MARKER_REL))) {
36286
+ return { spawned: false, reason: "marker_present" };
36287
+ }
36288
+ } catch (markerErr) {
36289
+ return {
36290
+ spawned: false,
36291
+ reason: "marker_check_failed",
36292
+ message: markerErr instanceof Error ? markerErr.message : String(markerErr)
36293
+ };
36294
+ }
36295
+ const remixDir = import_node_path10.default.join(repoRoot, ".remix");
36296
+ try {
36297
+ (0, import_node_fs7.mkdirSync)(remixDir, { recursive: true });
36298
+ } catch {
36299
+ }
36300
+ const logPath = import_node_path10.default.join(repoRoot, HISTORY_IMPORT_LOG_REL);
36301
+ let out;
36302
+ let err;
36303
+ try {
36304
+ out = (0, import_node_fs7.openSync)(logPath, "a");
36305
+ err = (0, import_node_fs7.openSync)(logPath, "a");
36306
+ } catch (logErr) {
36307
+ return {
36308
+ spawned: false,
36309
+ reason: "log_open_failed",
36310
+ message: logErr instanceof Error ? logErr.message : String(logErr)
36311
+ };
36312
+ }
36313
+ try {
36314
+ const child = (0, import_node_child_process7.spawn)("remix", ["history", "import", "--repo", repoRoot, "--include-prompt-text"], {
36315
+ detached: true,
36316
+ stdio: ["ignore", out, err],
36317
+ env: { ...process.env, REMIX_HISTORY_AUTO_SPAWN: "1" }
36318
+ });
36319
+ child.unref();
36320
+ return { spawned: true, pid: child.pid, logPath };
36321
+ } catch (spawnErr) {
36322
+ return {
36323
+ spawned: false,
36324
+ reason: "spawn_failed",
36325
+ message: spawnErr instanceof Error ? spawnErr.message : String(spawnErr)
36326
+ };
36327
+ }
36328
+ }
34286
36329
  async function runHookStopCollab(payload) {
34287
36330
  const hook = "Stop";
34288
36331
  if (extractBoolean(payload, ["stop_hook_active"])) {
@@ -34343,6 +36386,21 @@ async function runHookStopCollab(payload) {
34343
36386
  if (touchedRepos.length === 0) {
34344
36387
  const fallbackRepo = await resolveBoundRepoSummary(state.initialCwd);
34345
36388
  if (!fallbackRepo) {
36389
+ let skipReason = "no_bound_repo_for_fallback";
36390
+ let unboundBranchRepoRoot = null;
36391
+ let unboundBranchName = null;
36392
+ let unboundBranchKnownCount = 0;
36393
+ const candidateRepoRoot = await findBoundRepo(state.initialCwd).catch(() => null);
36394
+ if (candidateRepoRoot) {
36395
+ const bindingState = await readCollabBindingState(candidateRepoRoot).catch(() => null);
36396
+ const knownBoundBranches = bindingState ? Object.keys(bindingState.branchBindings) : [];
36397
+ if (bindingState && !bindingState.binding && knownBoundBranches.length > 0) {
36398
+ skipReason = "current_branch_unbound";
36399
+ unboundBranchRepoRoot = candidateRepoRoot;
36400
+ unboundBranchName = bindingState.currentBranch;
36401
+ unboundBranchKnownCount = knownBoundBranches.length;
36402
+ }
36403
+ }
34346
36404
  await clearPendingTurnState(sessionId);
34347
36405
  await appendHookDiagnosticsEvent({
34348
36406
  hook,
@@ -34350,7 +36408,12 @@ async function runHookStopCollab(payload) {
34350
36408
  turnId: state.turnId,
34351
36409
  stage: "fallback_repo_lookup",
34352
36410
  result: "skip",
34353
- reason: "no_bound_repo_for_fallback"
36411
+ reason: skipReason,
36412
+ repoRoot: unboundBranchRepoRoot,
36413
+ fields: skipReason === "current_branch_unbound" ? {
36414
+ currentBranch: unboundBranchName,
36415
+ knownBoundBranchCount: unboundBranchKnownCount
36416
+ } : {}
34354
36417
  });
34355
36418
  await appendHookDiagnosticsEvent({
34356
36419
  hook,
@@ -34428,8 +36491,16 @@ async function runHookStopCollab(payload) {
34428
36491
  touchedRepoCount: touchedRepos.length
34429
36492
  }
34430
36493
  });
36494
+ const turnUsage = await harvestTurnUsage({
36495
+ hook,
36496
+ sessionId,
36497
+ state: { turnId: state.turnId, prompt, submittedAt: state.submittedAt },
36498
+ payload
36499
+ });
34431
36500
  let hadFailure = false;
34432
36501
  let queuedFinalizeWork = false;
36502
+ let anyRecorded = false;
36503
+ let anyTurnExists = false;
34433
36504
  for (const repo of touchedRepos) {
34434
36505
  if (shouldSkipStopRecording(repo)) {
34435
36506
  const backupDrainQueued = repo.manuallyRecordedByTool === "remix_collab_finalize_turn" && repo.manualRecordingScope === "full_turn";
@@ -34445,9 +36516,20 @@ async function runHookStopCollab(payload) {
34445
36516
  fields: {
34446
36517
  manuallyRecorded: repo.manuallyRecorded,
34447
36518
  stopRecorded: repo.stopRecorded,
34448
- backupDrainQueued
36519
+ backupDrainQueued,
36520
+ willAttachUsage: Boolean(turnUsage)
34449
36521
  }
34450
36522
  });
36523
+ anyTurnExists = true;
36524
+ if (backupDrainQueued) {
36525
+ await attachUsageToPendingMcpJobs({
36526
+ hook,
36527
+ sessionId,
36528
+ turnId: state.turnId,
36529
+ repo,
36530
+ turnUsage
36531
+ });
36532
+ }
34451
36533
  continue;
34452
36534
  }
34453
36535
  const recording = await recordTouchedRepo({
@@ -34457,13 +36539,44 @@ async function runHookStopCollab(payload) {
34457
36539
  repo,
34458
36540
  prompt,
34459
36541
  assistantResponse,
34460
- api
36542
+ api,
36543
+ turnUsage,
36544
+ promptedAt: state.submittedAt
34461
36545
  });
34462
36546
  queuedFinalizeWork = queuedFinalizeWork || recording.queued;
34463
- if (!recording.recorded) {
36547
+ if (recording.recorded) {
36548
+ anyRecorded = true;
36549
+ anyTurnExists = true;
36550
+ const autoSpawn = maybeAutoSpawnHistoryImportFromStopHook(repo.repoRoot);
36551
+ if (autoSpawn.spawned) {
36552
+ await appendHookDiagnosticsEvent({
36553
+ hook,
36554
+ sessionId,
36555
+ turnId: state.turnId,
36556
+ stage: "history_import_auto_spawned_from_stop",
36557
+ result: "success",
36558
+ repoRoot: repo.repoRoot,
36559
+ fields: { pid: autoSpawn.pid ?? null, logPath: autoSpawn.logPath }
36560
+ });
36561
+ } else if (autoSpawn.reason !== "marker_present") {
36562
+ await appendHookDiagnosticsEvent({
36563
+ hook,
36564
+ sessionId,
36565
+ turnId: state.turnId,
36566
+ stage: "history_import_auto_spawn_skipped",
36567
+ result: "info",
36568
+ reason: autoSpawn.reason,
36569
+ repoRoot: repo.repoRoot,
36570
+ message: autoSpawn.message ?? null
36571
+ });
36572
+ }
36573
+ } else {
34464
36574
  hadFailure = true;
34465
36575
  }
34466
36576
  }
36577
+ if (anyRecorded || anyTurnExists) {
36578
+ await markLastFinalizedTurn(sessionId, state.turnId, prompt);
36579
+ }
34467
36580
  if (queuedFinalizeWork) {
34468
36581
  spawnFinalizeQueueDrainer();
34469
36582
  }