@valentinkolb/sync 2.1.1 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -15745,6 +15745,9 @@ var scheduler = (config2) => {
15745
15745
  dispatchRetried: 0,
15746
15746
  dispatchSkipped: 0,
15747
15747
  dispatchDlq: 0,
15748
+ triggerSubmitted: 0,
15749
+ triggerFailed: 0,
15750
+ triggerRejected: 0,
15748
15751
  tickErrors: 0,
15749
15752
  lastTickAt: null
15750
15753
  };
@@ -15884,22 +15887,17 @@ var scheduler = (config2) => {
15884
15887
  message: cfg.message
15885
15888
  });
15886
15889
  };
15887
- const submitWithRetry = async (cfg) => {
15888
- await retry(async () => {
15889
- if (!await ensureLeadership({ forceRefresh: true })) {
15890
+ const submitScheduledJob = async (cfg) => {
15891
+ return await retry(async () => {
15892
+ if (cfg.requireLeadership && !await ensureLeadership({ forceRefresh: true })) {
15890
15893
  throw new Error("leadership lost during dispatch");
15891
15894
  }
15892
15895
  return await cfg.jobHandle.submit({
15893
15896
  input: cfg.schedule.input,
15894
- key: `${cfg.schedule.id}:${cfg.slotTs}`,
15897
+ key: cfg.key,
15895
15898
  keyTtlMs: scheduledJobKeyTtlMs,
15896
- at: cfg.slotTs,
15897
- meta: {
15898
- ...cfg.schedule.meta ?? {},
15899
- scheduleId: cfg.schedule.id,
15900
- scheduleSlotTs: cfg.slotTs,
15901
- schedulerId: config2.id
15902
- }
15899
+ ...cfg.at !== undefined ? { at: cfg.at } : {},
15900
+ meta: cfg.meta
15903
15901
  });
15904
15902
  }, {
15905
15903
  attempts: submitRetries + 1,
@@ -15911,9 +15909,9 @@ var scheduler = (config2) => {
15911
15909
  const err = asError4(error48);
15912
15910
  if (err.name === "ZodError")
15913
15911
  return false;
15914
- if (err.message === "leadership lost during dispatch")
15912
+ if (cfg.requireLeadership && err.message === "leadership lost during dispatch")
15915
15913
  return false;
15916
- metricsState.dispatchRetried += 1;
15914
+ cfg.onRetry?.();
15917
15915
  return true;
15918
15916
  }
15919
15917
  });
@@ -16000,10 +15998,21 @@ var scheduler = (config2) => {
16000
15998
  break;
16001
15999
  }
16002
16000
  try {
16003
- await submitWithRetry({
16001
+ const jobId = await submitScheduledJob({
16004
16002
  jobHandle,
16005
16003
  schedule,
16006
- slotTs
16004
+ key: `${schedule.id}:${slotTs}`,
16005
+ at: slotTs,
16006
+ meta: {
16007
+ ...schedule.meta ?? {},
16008
+ scheduleId: schedule.id,
16009
+ scheduleSlotTs: slotTs,
16010
+ schedulerId: config2.id
16011
+ },
16012
+ requireLeadership: true,
16013
+ onRetry: () => {
16014
+ metricsState.dispatchRetried += 1;
16015
+ }
16007
16016
  });
16008
16017
  metricsState.dispatchSubmitted += 1;
16009
16018
  submitsRemaining -= 1;
@@ -16014,7 +16023,7 @@ var scheduler = (config2) => {
16014
16023
  ts: Date.now(),
16015
16024
  scheduleId: schedule.id,
16016
16025
  slotTs,
16017
- jobId: schedule.jobId
16026
+ jobId
16018
16027
  });
16019
16028
  } catch (error48) {
16020
16029
  submitFailed = true;
@@ -16188,6 +16197,74 @@ var scheduler = (config2) => {
16188
16197
  jobsById.delete(jobId);
16189
16198
  }
16190
16199
  };
16200
+ const triggerNow = async (cfg) => {
16201
+ const raw = await redis6.get(scheduleKey(cfg.id));
16202
+ if (!raw) {
16203
+ metricsState.triggerRejected += 1;
16204
+ safeMetric(config2.onMetric, {
16205
+ type: "trigger_rejected",
16206
+ ts: Date.now(),
16207
+ scheduleId: cfg.id,
16208
+ reason: "missing_schedule"
16209
+ });
16210
+ throw new Error(`scheduler trigger rejected: missing schedule ${cfg.id}`);
16211
+ }
16212
+ const schedule = parseSchedule(raw);
16213
+ if (!schedule) {
16214
+ metricsState.triggerRejected += 1;
16215
+ safeMetric(config2.onMetric, {
16216
+ type: "trigger_rejected",
16217
+ ts: Date.now(),
16218
+ scheduleId: cfg.id,
16219
+ reason: "invalid_schedule"
16220
+ });
16221
+ throw new Error(`scheduler trigger rejected: invalid schedule ${cfg.id}`);
16222
+ }
16223
+ const jobHandle = jobsById.get(schedule.jobId);
16224
+ if (!jobHandle) {
16225
+ metricsState.triggerRejected += 1;
16226
+ safeMetric(config2.onMetric, {
16227
+ type: "trigger_rejected",
16228
+ ts: Date.now(),
16229
+ scheduleId: schedule.id,
16230
+ reason: "missing_handler"
16231
+ });
16232
+ throw new Error(`scheduler trigger rejected: missing local handler for schedule ${schedule.id}`);
16233
+ }
16234
+ try {
16235
+ const jobId = await submitScheduledJob({
16236
+ jobHandle,
16237
+ schedule,
16238
+ key: cfg.key ? `manual:${schedule.id}:${cfg.key}` : undefined,
16239
+ meta: {
16240
+ ...schedule.meta ?? {},
16241
+ scheduleId: schedule.id,
16242
+ schedulerId: config2.id,
16243
+ scheduleTrigger: "manual",
16244
+ ...cfg.key ? { scheduleManualKey: cfg.key } : {}
16245
+ },
16246
+ requireLeadership: false
16247
+ });
16248
+ metricsState.triggerSubmitted += 1;
16249
+ safeMetric(config2.onMetric, {
16250
+ type: "trigger_submitted",
16251
+ ts: Date.now(),
16252
+ scheduleId: schedule.id,
16253
+ jobId
16254
+ });
16255
+ return jobId;
16256
+ } catch (error48) {
16257
+ metricsState.triggerFailed += 1;
16258
+ const err = asError4(error48);
16259
+ safeMetric(config2.onMetric, {
16260
+ type: "trigger_failed",
16261
+ ts: Date.now(),
16262
+ scheduleId: schedule.id,
16263
+ message: err.message
16264
+ });
16265
+ throw err;
16266
+ }
16267
+ };
16191
16268
  const get = async (cfg) => {
16192
16269
  const raw = await redis6.get(scheduleKey(cfg.id));
16193
16270
  const parsed = parseSchedule(raw);
@@ -16241,6 +16318,7 @@ var scheduler = (config2) => {
16241
16318
  stop,
16242
16319
  register,
16243
16320
  unregister,
16321
+ triggerNow,
16244
16322
  get,
16245
16323
  list,
16246
16324
  metrics
@@ -16915,16 +16993,1555 @@ var ephemeral = (config2) => {
16915
16993
  reader
16916
16994
  };
16917
16995
  };
16996
+ // src/registry.ts
16997
+ var {redis: redis8, RedisClient: RedisClient4 } = globalThis.Bun;
16998
+ var DEFAULT_PREFIX8 = "sync:registry";
16999
+ var DEFAULT_TENANT4 = "default";
17000
+ var DEFAULT_MAX_ENTRIES2 = 1e4;
17001
+ var DEFAULT_MAX_PAYLOAD_BYTES2 = 128 * 1024;
17002
+ var DEFAULT_EVENT_RETENTION_MS2 = 5 * 60 * 1000;
17003
+ var DEFAULT_EVENT_MAXLEN2 = 50000;
17004
+ var DEFAULT_TOMBSTONE_RETENTION_MS = 5 * 60 * 1000;
17005
+ var DEFAULT_RECONCILE_BATCH_SIZE2 = 200;
17006
+ var DEFAULT_LIST_LIMIT = 1000;
17007
+ var DEFAULT_TIMEOUT_MS3 = 30000;
17008
+ var MAX_RECONCILE_LOOPS = 50;
17009
+ var MAX_KEY_BYTES2 = 512;
17010
+ var MAX_IDENTIFIER_LENGTH2 = 256;
17011
+ var MAX_KEY_DEPTH = 8;
17012
+ var textEncoder4 = new TextEncoder;
17013
+ var LUA_HELPERS = `
17014
+ local function ttl_key(ttlPrefix, logicalKey)
17015
+ return ttlPrefix .. string.len(logicalKey) .. ":" .. logicalKey
17016
+ end
17017
+
17018
+ local function key_stream(keyPrefix, logicalKey)
17019
+ return keyPrefix .. logicalKey
17020
+ end
17021
+
17022
+ local function prefix_stream(prefixPrefix, prefix)
17023
+ return prefixPrefix .. prefix
17024
+ end
17025
+
17026
+ local function xadd_bounded(streamKey, maxEventLen, fields)
17027
+ if maxEventLen > 0 then
17028
+ redis.call("XADD", streamKey, "MAXLEN", "~", tostring(maxEventLen), "*", unpack(fields))
17029
+ return
17030
+ end
17031
+ redis.call("XADD", streamKey, "*", unpack(fields))
17032
+ end
17033
+
17034
+ local function trim_root_stream(streamKey, trimMinId)
17035
+ if trimMinId ~= "" then
17036
+ redis.call("XTRIM", streamKey, "MINID", "~", trimMinId)
17037
+ end
17038
+ end
17039
+
17040
+ local function latest_cursor(streamKey)
17041
+ local raw = redis.call("XREVRANGE", streamKey, "+", "-", "COUNT", "1")
17042
+ if type(raw) ~= "table" or #raw == 0 then
17043
+ return "0-0"
17044
+ end
17045
+ local first = raw[1]
17046
+ if type(first) ~= "table" or #first == 0 then
17047
+ return "0-0"
17048
+ end
17049
+ local id = first[1]
17050
+ if type(id) ~= "string" then
17051
+ return "0-0"
17052
+ end
17053
+ return id
17054
+ end
17055
+
17056
+ local function ancestor_prefixes(logicalKey)
17057
+ local prefixes = {}
17058
+ local current = ""
17059
+ for segment in string.gmatch(logicalKey, "[^/]+") do
17060
+ current = current .. segment .. "/"
17061
+ table.insert(prefixes, current)
17062
+ end
17063
+ if #prefixes > 0 then
17064
+ table.remove(prefixes, #prefixes)
17065
+ end
17066
+ return prefixes
17067
+ end
17068
+
17069
+ local function prefix_ref_inc(prefixRefsKey, logicalKey)
17070
+ local prefixes = ancestor_prefixes(logicalKey)
17071
+ for _, prefix in ipairs(prefixes) do
17072
+ redis.call("HINCRBY", prefixRefsKey, prefix, 1)
17073
+ end
17074
+ end
17075
+
17076
+ local function prefix_ref_dec(prefixRefsKey, prefixStreamPrefix, logicalKey)
17077
+ local prefixes = ancestor_prefixes(logicalKey)
17078
+ for _, prefix in ipairs(prefixes) do
17079
+ local nextValue = tonumber(redis.call("HINCRBY", prefixRefsKey, prefix, -1))
17080
+ if nextValue <= 0 then
17081
+ redis.call("HDEL", prefixRefsKey, prefix)
17082
+ end
17083
+ end
17084
+ end
17085
+
17086
+ local function emit_registry_event(rootStream, keyStreamPrefix, prefixStreamPrefix, logicalKey, trimMinId, maxEventLen, ...)
17087
+ local fields = { ... }
17088
+ xadd_bounded(rootStream, maxEventLen, fields)
17089
+ trim_root_stream(rootStream, trimMinId)
17090
+
17091
+ local exactStream = key_stream(keyStreamPrefix, logicalKey)
17092
+ xadd_bounded(exactStream, maxEventLen, fields)
17093
+
17094
+ local prefixes = ancestor_prefixes(logicalKey)
17095
+ for _, prefix in ipairs(prefixes) do
17096
+ local streamKey = prefix_stream(prefixStreamPrefix, prefix)
17097
+ xadd_bounded(streamKey, maxEventLen, fields)
17098
+ end
17099
+ end
17100
+
17101
+ local function parse_json(raw)
17102
+ if not raw then return nil end
17103
+ local ok, decoded = pcall(cjson.decode, raw)
17104
+ if not ok then return nil end
17105
+ return decoded
17106
+ end
17107
+
17108
+ local function store_tombstone(deadKey, deadKeysKey, deadExpKey, logicalKey, tombstone, tombstoneRetentionMs)
17109
+ redis.call("HSET", deadKey, logicalKey, cjson.encode(tombstone))
17110
+ redis.call("ZADD", deadKeysKey, "0", logicalKey)
17111
+ redis.call("ZADD", deadExpKey, tostring(tombstone.removedAt + tombstoneRetentionMs), logicalKey)
17112
+ end
17113
+
17114
+ local function clear_stale_tombstone(deadKey, deadKeysKey, deadExpKey, logicalKey)
17115
+ redis.call("HDEL", deadKey, logicalKey)
17116
+ redis.call("ZREM", deadKeysKey, logicalKey)
17117
+ redis.call("ZREM", deadExpKey, logicalKey)
17118
+ end
17119
+
17120
+ local function expire_loaded_entry(
17121
+ logicalKey,
17122
+ now,
17123
+ existing,
17124
+ stateKey,
17125
+ activeKeysKey,
17126
+ expKey,
17127
+ ttlPrefix,
17128
+ deadKey,
17129
+ deadKeysKey,
17130
+ deadExpKey,
17131
+ tombstoneRetentionMs,
17132
+ rootStream,
17133
+ keyStreamPrefix,
17134
+ prefixStreamPrefix,
17135
+ trimMinId,
17136
+ maxEventLen
17137
+ )
17138
+ redis.call("HDEL", stateKey, logicalKey)
17139
+ redis.call("ZREM", activeKeysKey, logicalKey)
17140
+ redis.call("ZREM", expKey, logicalKey)
17141
+
17142
+ local tombstone = {
17143
+ key = existing.key,
17144
+ value = existing.value,
17145
+ version = tostring(existing.version),
17146
+ status = "expired",
17147
+ createdAt = tonumber(existing.createdAt) or now,
17148
+ updatedAt = tonumber(existing.updatedAt) or now,
17149
+ ttlMs = tonumber(existing.ttlMs),
17150
+ expiresAt = tonumber(existing.expiresAt),
17151
+ removedAt = now,
17152
+ }
17153
+ store_tombstone(deadKey, deadKeysKey, deadExpKey, logicalKey, tombstone, tombstoneRetentionMs)
17154
+
17155
+ emit_registry_event(
17156
+ rootStream,
17157
+ keyStreamPrefix,
17158
+ prefixStreamPrefix,
17159
+ logicalKey,
17160
+ trimMinId,
17161
+ maxEventLen,
17162
+ "type",
17163
+ "expire",
17164
+ "key",
17165
+ logicalKey,
17166
+ "version",
17167
+ tostring(existing.version),
17168
+ "removedAt",
17169
+ tostring(now)
17170
+ )
17171
+
17172
+ return 1
17173
+ end
17174
+
17175
+ local function reconcile_exact(
17176
+ logicalKey,
17177
+ now,
17178
+ stateKey,
17179
+ activeKeysKey,
17180
+ expKey,
17181
+ ttlPrefix,
17182
+ deadKey,
17183
+ deadKeysKey,
17184
+ deadExpKey,
17185
+ tombstoneRetentionMs,
17186
+ rootStream,
17187
+ keyStreamPrefix,
17188
+ prefixStreamPrefix,
17189
+ trimMinId,
17190
+ maxEventLen
17191
+ )
17192
+ local existingRaw = redis.call("HGET", stateKey, logicalKey)
17193
+ if not existingRaw then return nil end
17194
+
17195
+ local existing = parse_json(existingRaw)
17196
+ if not existing then
17197
+ redis.call("HDEL", stateKey, logicalKey)
17198
+ redis.call("ZREM", activeKeysKey, logicalKey)
17199
+ redis.call("ZREM", expKey, logicalKey)
17200
+ return nil
17201
+ end
17202
+
17203
+ local expiresAt = tonumber(existing.expiresAt)
17204
+ local entryTtlMs = tonumber(existing.ttlMs)
17205
+ if not entryTtlMs or not expiresAt then
17206
+ redis.call("ZREM", expKey, logicalKey)
17207
+ return existing
17208
+ end
17209
+
17210
+ if expiresAt > now then
17211
+ local ttlKey = ttl_key(ttlPrefix, logicalKey)
17212
+ if redis.call("EXISTS", ttlKey) == 1 then
17213
+ return existing
17214
+ end
17215
+ end
17216
+
17217
+ expire_loaded_entry(
17218
+ logicalKey,
17219
+ now,
17220
+ existing,
17221
+ stateKey,
17222
+ activeKeysKey,
17223
+ expKey,
17224
+ ttlPrefix,
17225
+ deadKey,
17226
+ deadKeysKey,
17227
+ deadExpKey,
17228
+ tombstoneRetentionMs,
17229
+ rootStream,
17230
+ keyStreamPrefix,
17231
+ prefixStreamPrefix,
17232
+ trimMinId,
17233
+ maxEventLen
17234
+ )
17235
+ return nil
17236
+ end
17237
+
17238
+ local function cleanup_tombstone_entry(deadKey, deadKeysKey, deadExpKey, stateKey, prefixRefsKey, prefixStreamPrefix, logicalKey)
17239
+ redis.call("HDEL", deadKey, logicalKey)
17240
+ redis.call("ZREM", deadKeysKey, logicalKey)
17241
+ redis.call("ZREM", deadExpKey, logicalKey)
17242
+
17243
+ local activeExists = redis.call("HEXISTS", stateKey, logicalKey)
17244
+ if activeExists == 0 then
17245
+ prefix_ref_dec(prefixRefsKey, prefixStreamPrefix, logicalKey)
17246
+ end
17247
+
17248
+ return 1
17249
+ end
17250
+
17251
+ local function cleanup_tombstone(deadKey, deadKeysKey, deadExpKey, stateKey, prefixRefsKey, prefixStreamPrefix, logicalKey)
17252
+ local tombstoneRaw = redis.call("HGET", deadKey, logicalKey)
17253
+ if not tombstoneRaw then
17254
+ redis.call("ZREM", deadExpKey, logicalKey)
17255
+ return 0
17256
+ end
17257
+
17258
+ return cleanup_tombstone_entry(deadKey, deadKeysKey, deadExpKey, stateKey, prefixRefsKey, prefixStreamPrefix, logicalKey)
17259
+ end
17260
+
17261
+ local function reconcile_batch(
17262
+ now,
17263
+ batchSize,
17264
+ stateKey,
17265
+ activeKeysKey,
17266
+ expKey,
17267
+ ttlPrefix,
17268
+ deadKey,
17269
+ deadKeysKey,
17270
+ deadExpKey,
17271
+ prefixRefsKey,
17272
+ tombstoneRetentionMs,
17273
+ rootStream,
17274
+ keyStreamPrefix,
17275
+ prefixStreamPrefix,
17276
+ trimMinId,
17277
+ maxEventLen
17278
+ )
17279
+ local expired = 0
17280
+ local cleaned = 0
17281
+
17282
+ local due = redis.call("ZRANGEBYSCORE", expKey, "-inf", tostring(now), "LIMIT", "0", tostring(batchSize))
17283
+ for _, logicalKey in ipairs(due) do
17284
+ local existingRaw = redis.call("HGET", stateKey, logicalKey)
17285
+ if not existingRaw then
17286
+ redis.call("ZREM", expKey, logicalKey)
17287
+ else
17288
+ local existing = parse_json(existingRaw)
17289
+ if not existing then
17290
+ redis.call("HDEL", stateKey, logicalKey)
17291
+ redis.call("ZREM", activeKeysKey, logicalKey)
17292
+ redis.call("ZREM", expKey, logicalKey)
17293
+ else
17294
+ local entryTtlMs = tonumber(existing.ttlMs)
17295
+ local expiresAt = tonumber(existing.expiresAt)
17296
+ if not entryTtlMs or not expiresAt then
17297
+ redis.call("ZREM", expKey, logicalKey)
17298
+ else
17299
+ local ttlKey = ttl_key(ttlPrefix, logicalKey)
17300
+ if redis.call("EXISTS", ttlKey) == 0 then
17301
+ expired = expired + expire_loaded_entry(
17302
+ logicalKey,
17303
+ now,
17304
+ existing,
17305
+ stateKey,
17306
+ activeKeysKey,
17307
+ expKey,
17308
+ ttlPrefix,
17309
+ deadKey,
17310
+ deadKeysKey,
17311
+ deadExpKey,
17312
+ tombstoneRetentionMs,
17313
+ rootStream,
17314
+ keyStreamPrefix,
17315
+ prefixStreamPrefix,
17316
+ trimMinId,
17317
+ maxEventLen
17318
+ )
17319
+ end
17320
+ end
17321
+ end
17322
+ end
17323
+ end
17324
+
17325
+ local stale = redis.call("ZRANGEBYSCORE", deadExpKey, "-inf", tostring(now), "LIMIT", "0", tostring(batchSize))
17326
+ for _, logicalKey in ipairs(stale) do
17327
+ cleaned = cleaned + cleanup_tombstone(
17328
+ deadKey,
17329
+ deadKeysKey,
17330
+ deadExpKey,
17331
+ stateKey,
17332
+ prefixRefsKey,
17333
+ prefixStreamPrefix,
17334
+ logicalKey
17335
+ )
17336
+ end
17337
+
17338
+ return {
17339
+ expired = expired,
17340
+ cleaned = cleaned,
17341
+ dueCount = #due,
17342
+ staleCount = #stale,
17343
+ }
17344
+ end
17345
+ `;
17346
+ var UPSERT_SCRIPT3 = `
17347
+ ${LUA_HELPERS}
17348
+
17349
+ local now = tonumber(ARGV[1])
17350
+ local ttlMsRaw = ARGV[2]
17351
+ local payloadRaw = ARGV[3]
17352
+ local logicalKey = ARGV[4]
17353
+ local maxEntries = tonumber(ARGV[5])
17354
+ local tombstoneRetentionMs = tonumber(ARGV[6])
17355
+ local trimMinId = ARGV[7]
17356
+ local maxEventLen = tonumber(ARGV[8])
17357
+
17358
+ local existing = reconcile_exact(
17359
+ logicalKey,
17360
+ now,
17361
+ KEYS[1],
17362
+ KEYS[2],
17363
+ KEYS[3],
17364
+ KEYS[4],
17365
+ KEYS[5],
17366
+ KEYS[6],
17367
+ KEYS[7],
17368
+ tombstoneRetentionMs,
17369
+ KEYS[10],
17370
+ KEYS[11],
17371
+ KEYS[12],
17372
+ trimMinId,
17373
+ maxEventLen
17374
+ )
17375
+
17376
+ local payload = parse_json(payloadRaw)
17377
+ if not payload then
17378
+ return "__ERR_PAYLOAD__"
17379
+ end
17380
+
17381
+ local createdAt = now
17382
+ local version
17383
+
17384
+ if existing then
17385
+ createdAt = tonumber(existing.createdAt) or now
17386
+ version = tostring(redis.call("INCR", KEYS[9]))
17387
+ else
17388
+ local count = tonumber(redis.call("HLEN", KEYS[1]))
17389
+ if count >= maxEntries then
17390
+ return "__ERR_CAPACITY__"
17391
+ end
17392
+ version = tostring(redis.call("INCR", KEYS[9]))
17393
+ if redis.call("HEXISTS", KEYS[5], logicalKey) == 0 then
17394
+ prefix_ref_inc(KEYS[8], logicalKey)
17395
+ end
17396
+ end
17397
+
17398
+ local ttlMs = cjson.null
17399
+ local expiresAt = cjson.null
17400
+ if ttlMsRaw ~= "" then
17401
+ ttlMs = tonumber(ttlMsRaw)
17402
+ expiresAt = now + ttlMs
17403
+ end
17404
+ local hasTtl = ttlMs ~= cjson.null
17405
+
17406
+ local entry = {
17407
+ key = logicalKey,
17408
+ value = payload,
17409
+ version = version,
17410
+ status = "active",
17411
+ createdAt = createdAt,
17412
+ updatedAt = now,
17413
+ ttlMs = ttlMs,
17414
+ expiresAt = expiresAt,
17415
+ }
17416
+
17417
+ redis.call("HSET", KEYS[1], logicalKey, cjson.encode(entry))
17418
+ redis.call("ZADD", KEYS[2], "0", logicalKey)
17419
+
17420
+ if hasTtl then
17421
+ redis.call("ZADD", KEYS[3], tostring(expiresAt), logicalKey)
17422
+ redis.call("SET", ttl_key(KEYS[4], logicalKey), "1", "PX", tostring(ttlMs))
17423
+ else
17424
+ redis.call("ZREM", KEYS[3], logicalKey)
17425
+ redis.call("DEL", ttl_key(KEYS[4], logicalKey))
17426
+ end
17427
+
17428
+ clear_stale_tombstone(KEYS[5], KEYS[6], KEYS[7], logicalKey)
17429
+
17430
+ emit_registry_event(
17431
+ KEYS[10],
17432
+ KEYS[11],
17433
+ KEYS[12],
17434
+ logicalKey,
17435
+ trimMinId,
17436
+ maxEventLen,
17437
+ "type",
17438
+ "upsert",
17439
+ "key",
17440
+ logicalKey,
17441
+ "version",
17442
+ version,
17443
+ "createdAt",
17444
+ tostring(createdAt),
17445
+ "updatedAt",
17446
+ tostring(now),
17447
+ "ttlMs",
17448
+ hasTtl and tostring(ttlMs) or "",
17449
+ "expiresAt",
17450
+ hasTtl and tostring(expiresAt) or "",
17451
+ "payload",
17452
+ payloadRaw
17453
+ )
17454
+
17455
+ return cjson.encode(entry)
17456
+ `;
17457
+ var TOUCH_SCRIPT3 = `
17458
+ ${LUA_HELPERS}
17459
+
17460
+ local now = tonumber(ARGV[1])
17461
+ local logicalKey = ARGV[2]
17462
+ local tombstoneRetentionMs = tonumber(ARGV[3])
17463
+ local trimMinId = ARGV[4]
17464
+ local maxEventLen = tonumber(ARGV[5])
17465
+
17466
+ local existing = reconcile_exact(
17467
+ logicalKey,
17468
+ now,
17469
+ KEYS[1],
17470
+ KEYS[2],
17471
+ KEYS[3],
17472
+ KEYS[4],
17473
+ KEYS[5],
17474
+ KEYS[6],
17475
+ KEYS[7],
17476
+ tombstoneRetentionMs,
17477
+ KEYS[10],
17478
+ KEYS[11],
17479
+ KEYS[12],
17480
+ trimMinId,
17481
+ maxEventLen
17482
+ )
17483
+
17484
+ if not existing then
17485
+ return nil
17486
+ end
17487
+
17488
+ local ttlMs = tonumber(existing.ttlMs)
17489
+ if not ttlMs or ttlMs <= 0 then
17490
+ return nil
17491
+ end
17492
+
17493
+ local expiresAt = now + ttlMs
17494
+ existing.updatedAt = now
17495
+ existing.expiresAt = expiresAt
17496
+
17497
+ redis.call("HSET", KEYS[1], logicalKey, cjson.encode(existing))
17498
+ redis.call("ZADD", KEYS[3], tostring(expiresAt), logicalKey)
17499
+ redis.call("SET", ttl_key(KEYS[4], logicalKey), "1", "PX", tostring(ttlMs))
17500
+
17501
+ emit_registry_event(
17502
+ KEYS[10],
17503
+ KEYS[11],
17504
+ KEYS[12],
17505
+ logicalKey,
17506
+ trimMinId,
17507
+ maxEventLen,
17508
+ "type",
17509
+ "touch",
17510
+ "key",
17511
+ logicalKey,
17512
+ "version",
17513
+ tostring(existing.version),
17514
+ "updatedAt",
17515
+ tostring(now),
17516
+ "expiresAt",
17517
+ tostring(expiresAt)
17518
+ )
17519
+
17520
+ return cjson.encode({
17521
+ version = tostring(existing.version),
17522
+ expiresAt = expiresAt,
17523
+ })
17524
+ `;
17525
+ var REMOVE_SCRIPT2 = `
17526
+ ${LUA_HELPERS}
17527
+
17528
+ local now = tonumber(ARGV[1])
17529
+ local logicalKey = ARGV[2]
17530
+ local reason = ARGV[3]
17531
+ local tombstoneRetentionMs = tonumber(ARGV[4])
17532
+ local trimMinId = ARGV[5]
17533
+ local maxEventLen = tonumber(ARGV[6])
17534
+
17535
+ local existing = reconcile_exact(
17536
+ logicalKey,
17537
+ now,
17538
+ KEYS[1],
17539
+ KEYS[2],
17540
+ KEYS[3],
17541
+ KEYS[4],
17542
+ KEYS[5],
17543
+ KEYS[6],
17544
+ KEYS[7],
17545
+ tombstoneRetentionMs,
17546
+ KEYS[10],
17547
+ KEYS[11],
17548
+ KEYS[12],
17549
+ trimMinId,
17550
+ maxEventLen
17551
+ )
17552
+
17553
+ if not existing then
17554
+ return 0
17555
+ end
17556
+
17557
+ redis.call("HDEL", KEYS[1], logicalKey)
17558
+ redis.call("ZREM", KEYS[2], logicalKey)
17559
+ redis.call("ZREM", KEYS[3], logicalKey)
17560
+ redis.call("DEL", ttl_key(KEYS[4], logicalKey))
17561
+
17562
+ local tombstone = {
17563
+ key = logicalKey,
17564
+ value = existing.value,
17565
+ version = tostring(existing.version),
17566
+ status = "deleted",
17567
+ createdAt = tonumber(existing.createdAt) or now,
17568
+ updatedAt = tonumber(existing.updatedAt) or now,
17569
+ ttlMs = tonumber(existing.ttlMs),
17570
+ expiresAt = tonumber(existing.expiresAt),
17571
+ removedAt = now,
17572
+ reason = reason ~= "" and reason or cjson.null,
17573
+ }
17574
+ store_tombstone(KEYS[5], KEYS[6], KEYS[7], logicalKey, tombstone, tombstoneRetentionMs)
17575
+
17576
+ if reason ~= "" then
17577
+ emit_registry_event(
17578
+ KEYS[10],
17579
+ KEYS[11],
17580
+ KEYS[12],
17581
+ logicalKey,
17582
+ trimMinId,
17583
+ maxEventLen,
17584
+ "type",
17585
+ "delete",
17586
+ "key",
17587
+ logicalKey,
17588
+ "version",
17589
+ tostring(existing.version),
17590
+ "removedAt",
17591
+ tostring(now),
17592
+ "reason",
17593
+ reason
17594
+ )
17595
+ else
17596
+ emit_registry_event(
17597
+ KEYS[10],
17598
+ KEYS[11],
17599
+ KEYS[12],
17600
+ logicalKey,
17601
+ trimMinId,
17602
+ maxEventLen,
17603
+ "type",
17604
+ "delete",
17605
+ "key",
17606
+ logicalKey,
17607
+ "version",
17608
+ tostring(existing.version),
17609
+ "removedAt",
17610
+ tostring(now)
17611
+ )
17612
+ end
17613
+
17614
+ return 1
17615
+ `;
17616
+ var CAS_SCRIPT = `
17617
+ ${LUA_HELPERS}
17618
+
17619
+ local now = tonumber(ARGV[1])
17620
+ local logicalKey = ARGV[2]
17621
+ local expectedVersion = ARGV[3]
17622
+ local payloadRaw = ARGV[4]
17623
+ local tombstoneRetentionMs = tonumber(ARGV[5])
17624
+ local trimMinId = ARGV[6]
17625
+ local maxEventLen = tonumber(ARGV[7])
17626
+
17627
+ local existing = reconcile_exact(
17628
+ logicalKey,
17629
+ now,
17630
+ KEYS[1],
17631
+ KEYS[2],
17632
+ KEYS[3],
17633
+ KEYS[4],
17634
+ KEYS[5],
17635
+ KEYS[6],
17636
+ KEYS[7],
17637
+ tombstoneRetentionMs,
17638
+ KEYS[10],
17639
+ KEYS[11],
17640
+ KEYS[12],
17641
+ trimMinId,
17642
+ maxEventLen
17643
+ )
17644
+
17645
+ if not existing then
17646
+ return cjson.encode({ ok = false })
17647
+ end
17648
+
17649
+ if tostring(existing.version) ~= expectedVersion then
17650
+ return cjson.encode({ ok = false })
17651
+ end
17652
+
17653
+ local payload = parse_json(payloadRaw)
17654
+ if not payload then
17655
+ return "__ERR_PAYLOAD__"
17656
+ end
17657
+
17658
+ local version = tostring(redis.call("INCR", KEYS[9]))
17659
+ local entry = {
17660
+ key = logicalKey,
17661
+ value = payload,
17662
+ version = version,
17663
+ status = "active",
17664
+ createdAt = tonumber(existing.createdAt) or now,
17665
+ updatedAt = now,
17666
+ ttlMs = tonumber(existing.ttlMs) or cjson.null,
17667
+ expiresAt = cjson.null,
17668
+ }
17669
+
17670
+ if tonumber(existing.ttlMs) then
17671
+ entry.expiresAt = now + tonumber(existing.ttlMs)
17672
+ end
17673
+ local hasTtl = entry.ttlMs ~= cjson.null and entry.expiresAt ~= cjson.null
17674
+
17675
+ redis.call("HSET", KEYS[1], logicalKey, cjson.encode(entry))
17676
+ if hasTtl then
17677
+ redis.call("ZADD", KEYS[3], tostring(entry.expiresAt), logicalKey)
17678
+ redis.call("SET", ttl_key(KEYS[4], logicalKey), "1", "PX", tostring(entry.ttlMs))
17679
+ else
17680
+ redis.call("ZREM", KEYS[3], logicalKey)
17681
+ redis.call("DEL", ttl_key(KEYS[4], logicalKey))
17682
+ end
17683
+
17684
+ emit_registry_event(
17685
+ KEYS[10],
17686
+ KEYS[11],
17687
+ KEYS[12],
17688
+ logicalKey,
17689
+ trimMinId,
17690
+ maxEventLen,
17691
+ "type",
17692
+ "upsert",
17693
+ "key",
17694
+ logicalKey,
17695
+ "version",
17696
+ version,
17697
+ "createdAt",
17698
+ tostring(entry.createdAt),
17699
+ "updatedAt",
17700
+ tostring(now),
17701
+ "ttlMs",
17702
+ hasTtl and tostring(entry.ttlMs) or "",
17703
+ "expiresAt",
17704
+ hasTtl and tostring(entry.expiresAt) or "",
17705
+ "payload",
17706
+ payloadRaw
17707
+ )
17708
+
17709
+ return cjson.encode({
17710
+ ok = true,
17711
+ entry = entry,
17712
+ })
17713
+ `;
17714
+ var GET_SCRIPT = `
17715
+ ${LUA_HELPERS}
17716
+
17717
+ local now = tonumber(ARGV[1])
17718
+ local logicalKey = ARGV[2]
17719
+ local includeExpired = ARGV[3] == "1"
17720
+ local tombstoneRetentionMs = tonumber(ARGV[4])
17721
+ local trimMinId = ARGV[5]
17722
+ local maxEventLen = tonumber(ARGV[6])
17723
+
17724
+ local existing = reconcile_exact(
17725
+ logicalKey,
17726
+ now,
17727
+ KEYS[1],
17728
+ KEYS[2],
17729
+ KEYS[3],
17730
+ KEYS[4],
17731
+ KEYS[5],
17732
+ KEYS[6],
17733
+ KEYS[7],
17734
+ tombstoneRetentionMs,
17735
+ KEYS[10],
17736
+ KEYS[11],
17737
+ KEYS[12],
17738
+ trimMinId,
17739
+ maxEventLen
17740
+ )
17741
+
17742
+ if existing then
17743
+ return cjson.encode(existing)
17744
+ end
17745
+
17746
+ if includeExpired then
17747
+ local tombstoneRaw = redis.call("HGET", KEYS[5], logicalKey)
17748
+ if tombstoneRaw then
17749
+ local tomb = parse_json(tombstoneRaw)
17750
+ if tomb then
17751
+ local removedAt = tonumber(tomb.removedAt) or 0
17752
+ if removedAt + tombstoneRetentionMs <= now then
17753
+ cleanup_tombstone_entry(KEYS[5], KEYS[6], KEYS[7], KEYS[1], KEYS[8], KEYS[12], logicalKey)
17754
+ return nil
17755
+ end
17756
+ end
17757
+ if tomb and tomb.status == "expired" then
17758
+ return tombstoneRaw
17759
+ end
17760
+ end
17761
+ end
17762
+
17763
+ return nil
17764
+ `;
17765
+ var RECONCILE_BATCH_SCRIPT = `
17766
+ ${LUA_HELPERS}
17767
+
17768
+ local now = tonumber(ARGV[1])
17769
+ local tombstoneRetentionMs = tonumber(ARGV[2])
17770
+ local batchSize = tonumber(ARGV[3])
17771
+ local trimMinId = ARGV[4]
17772
+ local maxEventLen = tonumber(ARGV[5])
17773
+
17774
+ return cjson.encode(reconcile_batch(
17775
+ now,
17776
+ batchSize,
17777
+ KEYS[1],
17778
+ KEYS[2],
17779
+ KEYS[3],
17780
+ KEYS[4],
17781
+ KEYS[5],
17782
+ KEYS[6],
17783
+ KEYS[7],
17784
+ KEYS[8],
17785
+ tombstoneRetentionMs,
17786
+ KEYS[10],
17787
+ KEYS[11],
17788
+ KEYS[12],
17789
+ trimMinId,
17790
+ maxEventLen
17791
+ ))
17792
+ `;
17793
+ var LIST_PAGE_SCRIPT = `
17794
+ ${LUA_HELPERS}
17795
+
17796
+ local rawPrefix = ARGV[1]
17797
+ local status = ARGV[2]
17798
+ local limit = tonumber(ARGV[3])
17799
+ local afterKey = ARGV[4]
17800
+
17801
+ local sourceHash = KEYS[1]
17802
+ local sourceIndex = KEYS[2]
17803
+ if status == "expired" then
17804
+ sourceHash = KEYS[5]
17805
+ sourceIndex = KEYS[6]
17806
+ end
17807
+
17808
+ local lower = "-"
17809
+ local upper = "+"
17810
+ if rawPrefix ~= "" then
17811
+ lower = "[" .. rawPrefix
17812
+ upper = "[" .. rawPrefix .. "\\255"
17813
+ end
17814
+ if afterKey ~= "" then
17815
+ lower = "(" .. afterKey
17816
+ end
17817
+
17818
+ local collected = {}
17819
+ local nextKey = cjson.null
17820
+ local scanLower = lower
17821
+ local chunkSize = limit > 0 and math.max(limit * 2, 32) or 0
17822
+
17823
+ while true do
17824
+ local range
17825
+ if limit > 0 then
17826
+ range = redis.call("ZRANGEBYLEX", sourceIndex, scanLower, upper, "LIMIT", "0", tostring(chunkSize))
17827
+ else
17828
+ range = redis.call("ZRANGEBYLEX", sourceIndex, scanLower, upper)
17829
+ end
17830
+
17831
+ if #range == 0 then
17832
+ break
17833
+ end
17834
+
17835
+ local stop = false
17836
+ for _, logicalKey in ipairs(range) do
17837
+ local raw = redis.call("HGET", sourceHash, logicalKey)
17838
+ if raw then
17839
+ local entry = parse_json(raw)
17840
+ if entry then
17841
+ if status ~= "expired" or entry.status == "expired" then
17842
+ table.insert(collected, entry)
17843
+ if limit > 0 and #collected > limit then
17844
+ nextKey = collected[limit].key
17845
+ local trimmed = {}
17846
+ for i = 1, limit do
17847
+ trimmed[i] = collected[i]
17848
+ end
17849
+ collected = trimmed
17850
+ stop = true
17851
+ break
17852
+ end
17853
+ end
17854
+ end
17855
+ end
17856
+ scanLower = "(" .. logicalKey
17857
+ end
17858
+
17859
+ if stop or limit == 0 or #range < chunkSize then
17860
+ break
17861
+ end
17862
+ end
17863
+
17864
+ local streamKey = KEYS[10]
17865
+ if rawPrefix ~= "" then
17866
+ streamKey = prefix_stream(KEYS[12], rawPrefix)
17867
+ end
17868
+
17869
+ return cjson.encode({
17870
+ entries = collected,
17871
+ cursor = latest_cursor(streamKey),
17872
+ nextKey = nextKey,
17873
+ })
17874
+ `;
17875
+ var asError6 = (error48) => error48 instanceof Error ? error48 : new Error(String(error48));
17876
+ var safeClose4 = (client) => {
17877
+ if (!client.connected)
17878
+ return;
17879
+ try {
17880
+ client.close();
17881
+ } catch {}
17882
+ };
17883
+ var evalScript4 = async (script, keys, args) => {
17884
+ return await redis8.send("EVAL", [script, keys.length.toString(), ...keys, ...args.map((v) => String(v))]);
17885
+ };
17886
+ var blockingReadWithTemporaryClient3 = async (args, signal) => {
17887
+ if (signal?.aborted)
17888
+ return null;
17889
+ const client = new RedisClient4;
17890
+ const onAbort = () => {
17891
+ safeClose4(client);
17892
+ };
17893
+ if (signal)
17894
+ signal.addEventListener("abort", onAbort, { once: true });
17895
+ try {
17896
+ if (!client.connected)
17897
+ await client.connect();
17898
+ return await client.send("XREAD", args);
17899
+ } catch (error48) {
17900
+ if (signal?.aborted)
17901
+ return null;
17902
+ throw asError6(error48);
17903
+ } finally {
17904
+ if (signal)
17905
+ signal.removeEventListener("abort", onAbort);
17906
+ safeClose4(client);
17907
+ }
17908
+ };
17909
+ var parseFirstRangeEntry2 = (raw) => {
17910
+ if (!Array.isArray(raw) || raw.length === 0)
17911
+ return null;
17912
+ const first = raw[0];
17913
+ if (!Array.isArray(first) || first.length < 2)
17914
+ return null;
17915
+ const id = first[0];
17916
+ if (typeof id !== "string")
17917
+ return null;
17918
+ return {
17919
+ id,
17920
+ fields: fieldArrayToObject(first[1])
17921
+ };
17922
+ };
17923
+ var parseOptionalNumber2 = (value) => {
17924
+ if (typeof value === "number")
17925
+ return Number.isFinite(value) ? value : null;
17926
+ if (typeof value === "string" && value !== "") {
17927
+ const num = Number(value);
17928
+ return Number.isFinite(num) ? num : null;
17929
+ }
17930
+ return null;
17931
+ };
17932
+ var parseOptionalString = (value) => {
17933
+ if (typeof value === "string" && value.length > 0)
17934
+ return value;
17935
+ return null;
17936
+ };
17937
+ var encodeSegment2 = (value) => encodeURIComponent(value);
17938
+ var assertIdentifier2 = (value, label) => {
17939
+ if (value.length === 0)
17940
+ throw new Error(`${label} must be non-empty`);
17941
+ if (value.length > MAX_IDENTIFIER_LENGTH2)
17942
+ throw new Error(`${label} too long (max ${MAX_IDENTIFIER_LENGTH2} chars)`);
17943
+ };
17944
+ var assertNoReservedBraces = (value, label) => {
17945
+ if (value.includes("{") || value.includes("}")) {
17946
+ throw new Error(`${label} must not contain '{' or '}'`);
17947
+ }
17948
+ };
17949
+ var assertKeyStructure = (value, label, allowTrailingSlash) => {
17950
+ if (value.length === 0)
17951
+ throw new Error(`${label} must be non-empty`);
17952
+ if (value.includes("\x00"))
17953
+ throw new Error(`${label} must not contain null bytes`);
17954
+ assertNoReservedBraces(value, label);
17955
+ const bytes = textEncoder4.encode(value).byteLength;
17956
+ if (bytes > MAX_KEY_BYTES2)
17957
+ throw new Error(`${label} exceeds max length (${MAX_KEY_BYTES2} bytes)`);
17958
+ if (value.startsWith("/"))
17959
+ throw new Error(`${label} must not start with '/'`);
17960
+ if (!allowTrailingSlash && value.endsWith("/"))
17961
+ throw new Error(`${label} must not end with '/'`);
17962
+ if (value.includes("//"))
17963
+ throw new Error(`${label} must not contain empty path segments`);
17964
+ const trimmed = allowTrailingSlash && value.endsWith("/") ? value.slice(0, -1) : value;
17965
+ const segments = trimmed.split("/").filter(Boolean);
17966
+ if (segments.length === 0)
17967
+ throw new Error(`${label} must contain at least one path segment`);
17968
+ if (segments.length > MAX_KEY_DEPTH)
17969
+ throw new Error(`${label} exceeds max depth (${MAX_KEY_DEPTH})`);
17970
+ };
17971
+ var assertLogicalKey2 = (value) => {
17972
+ assertKeyStructure(value, "key", false);
17973
+ };
17974
+ var normalizePrefix = (value) => {
17975
+ if (!value)
17976
+ return "";
17977
+ assertKeyStructure(value, "prefix", true);
17978
+ if (!value.endsWith("/")) {
17979
+ throw new Error("prefix must end with '/'");
17980
+ }
17981
+ return value;
17982
+ };
17983
+
17984
+ class RegistryCapacityError extends Error {
17985
+ constructor(message = "registry capacity reached") {
17986
+ super(message);
17987
+ this.name = "RegistryCapacityError";
17988
+ }
17989
+ }
17990
+
17991
+ class RegistryPayloadTooLargeError extends Error {
17992
+ constructor(message) {
17993
+ super(message);
17994
+ this.name = "RegistryPayloadTooLargeError";
17995
+ }
17996
+ }
17997
+ var registry2 = (config2) => {
17998
+ assertIdentifier2(config2.id, "config.id");
17999
+ const prefix = config2.prefix ?? DEFAULT_PREFIX8;
18000
+ const defaultTenant = config2.tenantId ?? DEFAULT_TENANT4;
18001
+ assertIdentifier2(defaultTenant, "tenantId");
18002
+ const maxEntries = config2.limits?.maxEntries ?? DEFAULT_MAX_ENTRIES2;
18003
+ const maxPayloadBytes = config2.limits?.maxPayloadBytes ?? DEFAULT_MAX_PAYLOAD_BYTES2;
18004
+ const eventRetentionMs = config2.limits?.eventRetentionMs ?? DEFAULT_EVENT_RETENTION_MS2;
18005
+ const eventMaxLen = config2.limits?.eventMaxLen ?? DEFAULT_EVENT_MAXLEN2;
18006
+ const tombstoneRetentionMs = config2.limits?.tombstoneRetentionMs ?? DEFAULT_TOMBSTONE_RETENTION_MS;
18007
+ const reconcileBatchSize = config2.limits?.reconcileBatchSize ?? DEFAULT_RECONCILE_BATCH_SIZE2;
18008
+ if (!Number.isInteger(maxEntries) || maxEntries <= 0)
18009
+ throw new Error("limits.maxEntries must be > 0");
18010
+ if (!Number.isInteger(maxPayloadBytes) || maxPayloadBytes <= 0)
18011
+ throw new Error("limits.maxPayloadBytes must be > 0");
18012
+ if (!Number.isInteger(eventRetentionMs) || eventRetentionMs <= 0)
18013
+ throw new Error("limits.eventRetentionMs must be > 0");
18014
+ if (!Number.isInteger(eventMaxLen) || eventMaxLen <= 0)
18015
+ throw new Error("limits.eventMaxLen must be > 0");
18016
+ if (!Number.isInteger(tombstoneRetentionMs) || tombstoneRetentionMs <= 0)
18017
+ throw new Error("limits.tombstoneRetentionMs must be > 0");
18018
+ if (!Number.isInteger(reconcileBatchSize) || reconcileBatchSize <= 0)
18019
+ throw new Error("limits.reconcileBatchSize must be > 0");
18020
+ const resolveTenant = (tenantId) => {
18021
+ const resolved = tenantId ?? defaultTenant;
18022
+ assertIdentifier2(resolved, "tenantId");
18023
+ return resolved;
18024
+ };
18025
+ const keysForTenant = (tenantId) => {
18026
+ const base = `${prefix}:${encodeSegment2(tenantId)}:${encodeSegment2(config2.id)}`;
18027
+ return {
18028
+ state: `${base}:state`,
18029
+ activeKeys: `${base}:keys`,
18030
+ expirations: `${base}:exp`,
18031
+ ttlPrefix: `${base}:ttl:`,
18032
+ tombstones: `${base}:dead`,
18033
+ tombstoneKeys: `${base}:deadkeys`,
18034
+ tombstoneExpirations: `${base}:deadexp`,
18035
+ prefixRefs: `${base}:pref`,
18036
+ seq: `${base}:seq`,
18037
+ rootStream: `${base}:ev:root`,
18038
+ keyStreamPrefix: `${base}:ev:key:`,
18039
+ prefixStreamPrefix: `${base}:ev:px:`
18040
+ };
18041
+ };
18042
+ const trimMinId = () => `${Date.now() - eventRetentionMs}-0`;
18043
+ const parseStoredEntry = (raw) => {
18044
+ try {
18045
+ const parsed = JSON.parse(raw);
18046
+ const validated = config2.schema.safeParse(parsed.value);
18047
+ if (!validated.success)
18048
+ return null;
18049
+ const status = parsed.status === "expired" ? "expired" : "active";
18050
+ return {
18051
+ key: String(parsed.key),
18052
+ value: validated.data,
18053
+ version: String(parsed.version),
18054
+ status,
18055
+ createdAt: Number(parsed.createdAt),
18056
+ updatedAt: Number(parsed.updatedAt),
18057
+ ttlMs: parsed.ttlMs === null || parsed.ttlMs === undefined ? null : Number(parsed.ttlMs),
18058
+ expiresAt: parsed.expiresAt === null || parsed.expiresAt === undefined ? null : Number(parsed.expiresAt)
18059
+ };
18060
+ } catch {
18061
+ return null;
18062
+ }
18063
+ };
18064
+ const parseUpsertEvent = (entry) => {
18065
+ const rawPayload = entry.fields.payload;
18066
+ if (!rawPayload)
18067
+ return null;
18068
+ try {
18069
+ const payload = JSON.parse(rawPayload);
18070
+ const validated = config2.schema.safeParse(payload);
18071
+ if (!validated.success)
18072
+ return null;
18073
+ const createdAt = parseOptionalNumber2(entry.fields.createdAt);
18074
+ const updatedAt = parseOptionalNumber2(entry.fields.updatedAt);
18075
+ if (createdAt === null || updatedAt === null)
18076
+ return null;
18077
+ return {
18078
+ type: "upsert",
18079
+ cursor: entry.id,
18080
+ entry: {
18081
+ key: entry.fields.key ?? "",
18082
+ value: validated.data,
18083
+ version: entry.fields.version ?? "",
18084
+ status: "active",
18085
+ createdAt,
18086
+ updatedAt,
18087
+ ttlMs: parseOptionalNumber2(entry.fields.ttlMs),
18088
+ expiresAt: parseOptionalNumber2(entry.fields.expiresAt)
18089
+ }
18090
+ };
18091
+ } catch {
18092
+ return null;
18093
+ }
18094
+ };
18095
+ const parseEvent = (entry) => {
18096
+ const type = entry.fields.type;
18097
+ if (type === "upsert")
18098
+ return parseUpsertEvent(entry);
18099
+ if (type === "touch") {
18100
+ const updatedAt = parseOptionalNumber2(entry.fields.updatedAt);
18101
+ const expiresAt = parseOptionalNumber2(entry.fields.expiresAt);
18102
+ if (updatedAt === null || expiresAt === null)
18103
+ return null;
18104
+ return {
18105
+ type,
18106
+ cursor: entry.id,
18107
+ key: entry.fields.key ?? "",
18108
+ version: entry.fields.version ?? "",
18109
+ updatedAt,
18110
+ expiresAt
18111
+ };
18112
+ }
18113
+ if (type === "delete") {
18114
+ const removedAt = parseOptionalNumber2(entry.fields.removedAt);
18115
+ if (removedAt === null)
18116
+ return null;
18117
+ return {
18118
+ type,
18119
+ cursor: entry.id,
18120
+ key: entry.fields.key ?? "",
18121
+ version: entry.fields.version ?? "",
18122
+ removedAt,
18123
+ reason: parseOptionalString(entry.fields.reason) ?? undefined
18124
+ };
18125
+ }
18126
+ if (type === "expire") {
18127
+ const removedAt = parseOptionalNumber2(entry.fields.removedAt);
18128
+ if (removedAt === null)
18129
+ return null;
18130
+ return {
18131
+ type,
18132
+ cursor: entry.id,
18133
+ key: entry.fields.key ?? "",
18134
+ version: entry.fields.version ?? "",
18135
+ removedAt
18136
+ };
18137
+ }
18138
+ return null;
18139
+ };
18140
+ const latestCursor = async (streamKey) => {
18141
+ const raw = await redis8.send("XREVRANGE", [streamKey, "+", "-", "COUNT", "1"]);
18142
+ const parsed = parseFirstRangeEntry2(raw);
18143
+ return parsed?.id ?? "0-0";
18144
+ };
18145
+ const firstAtOrAfterCursor = async (streamKey, cursor) => {
18146
+ const raw = await redis8.send("XRANGE", [streamKey, cursor, "+", "COUNT", "1"]);
18147
+ return parseFirstRangeEntry2(raw)?.id ?? null;
18148
+ };
18149
+ const selectionStreamKey = (keys, selection) => {
18150
+ if (selection.key)
18151
+ return `${keys.keyStreamPrefix}${selection.key}`;
18152
+ if (selection.prefix)
18153
+ return `${keys.prefixStreamPrefix}${selection.prefix}`;
18154
+ return keys.rootStream;
18155
+ };
18156
+ const parseTouchResult = (raw) => {
18157
+ if (!raw)
18158
+ return { ok: false };
18159
+ try {
18160
+ const parsed = JSON.parse(typeof raw === "string" ? raw : String(raw));
18161
+ return {
18162
+ ok: true,
18163
+ version: String(parsed.version),
18164
+ expiresAt: Number(parsed.expiresAt)
18165
+ };
18166
+ } catch {
18167
+ return { ok: false };
18168
+ }
18169
+ };
18170
+ const parseListResult = (raw) => {
18171
+ const parsed = JSON.parse(typeof raw === "string" ? raw : String(raw));
18172
+ const entries = [];
18173
+ const rawEntries = Array.isArray(parsed.entries) ? parsed.entries : [];
18174
+ for (const item of rawEntries) {
18175
+ const entry = parseStoredEntry(JSON.stringify(item));
18176
+ if (entry)
18177
+ entries.push(entry);
18178
+ }
18179
+ entries.sort((a, b) => a.key.localeCompare(b.key));
18180
+ return {
18181
+ entries,
18182
+ cursor: typeof parsed.cursor === "string" ? parsed.cursor : "0-0",
18183
+ nextKey: typeof parsed.nextKey === "string" && parsed.nextKey.length > 0 ? parsed.nextKey : undefined
18184
+ };
18185
+ };
18186
+ const runReconcileBatch = async (keys, now2) => {
18187
+ const raw = await evalScript4(RECONCILE_BATCH_SCRIPT, [
18188
+ keys.state,
18189
+ keys.activeKeys,
18190
+ keys.expirations,
18191
+ keys.ttlPrefix,
18192
+ keys.tombstones,
18193
+ keys.tombstoneKeys,
18194
+ keys.tombstoneExpirations,
18195
+ keys.prefixRefs,
18196
+ keys.seq,
18197
+ keys.rootStream,
18198
+ keys.keyStreamPrefix,
18199
+ keys.prefixStreamPrefix
18200
+ ], [now2, tombstoneRetentionMs, reconcileBatchSize, trimMinId(), eventMaxLen]);
18201
+ const parsed = JSON.parse(typeof raw === "string" ? raw : String(raw));
18202
+ return {
18203
+ expired: Number(parsed.expired ?? 0),
18204
+ cleaned: Number(parsed.cleaned ?? 0),
18205
+ dueCount: Number(parsed.dueCount ?? 0),
18206
+ staleCount: Number(parsed.staleCount ?? 0)
18207
+ };
18208
+ };
18209
+ const runFullReconcile = async (keys, now2) => {
18210
+ let loops = 0;
18211
+ while (loops < MAX_RECONCILE_LOOPS) {
18212
+ const batch = await runReconcileBatch(keys, now2);
18213
+ if (batch.dueCount < reconcileBatchSize && batch.staleCount < reconcileBatchSize) {
18214
+ break;
18215
+ }
18216
+ loops += 1;
18217
+ await Bun.sleep(1);
18218
+ }
18219
+ };
18220
+ const upsert = async (cfg) => {
18221
+ assertLogicalKey2(cfg.key);
18222
+ const tenantId = resolveTenant(cfg.tenantId);
18223
+ const keys = keysForTenant(tenantId);
18224
+ const parsed = config2.schema.safeParse(cfg.value);
18225
+ if (!parsed.success)
18226
+ throw parsed.error;
18227
+ if (cfg.ttlMs !== undefined) {
18228
+ if (!Number.isFinite(cfg.ttlMs) || cfg.ttlMs <= 0) {
18229
+ throw new Error("ttlMs must be > 0 when provided");
18230
+ }
18231
+ }
18232
+ const payloadRaw = JSON.stringify(parsed.data);
18233
+ const payloadBytes = textEncoder4.encode(payloadRaw).byteLength;
18234
+ if (payloadBytes > maxPayloadBytes) {
18235
+ throw new RegistryPayloadTooLargeError(`payload exceeds limit (${maxPayloadBytes} bytes)`);
18236
+ }
18237
+ const raw = await evalScript4(UPSERT_SCRIPT3, [
18238
+ keys.state,
18239
+ keys.activeKeys,
18240
+ keys.expirations,
18241
+ keys.ttlPrefix,
18242
+ keys.tombstones,
18243
+ keys.tombstoneKeys,
18244
+ keys.tombstoneExpirations,
18245
+ keys.prefixRefs,
18246
+ keys.seq,
18247
+ keys.rootStream,
18248
+ keys.keyStreamPrefix,
18249
+ keys.prefixStreamPrefix
18250
+ ], [
18251
+ Date.now(),
18252
+ cfg.ttlMs ?? "",
18253
+ payloadRaw,
18254
+ cfg.key,
18255
+ maxEntries,
18256
+ tombstoneRetentionMs,
18257
+ trimMinId(),
18258
+ eventMaxLen
18259
+ ]);
18260
+ if (raw === "__ERR_CAPACITY__") {
18261
+ throw new RegistryCapacityError(`maxEntries (${maxEntries}) reached`);
18262
+ }
18263
+ if (raw === "__ERR_PAYLOAD__") {
18264
+ throw new Error("invalid payload encoding");
18265
+ }
18266
+ const entry = parseStoredEntry(typeof raw === "string" ? raw : String(raw ?? ""));
18267
+ if (!entry)
18268
+ throw new Error("failed to parse stored registry entry");
18269
+ return entry;
18270
+ };
18271
+ const touch = async (cfg) => {
18272
+ assertLogicalKey2(cfg.key);
18273
+ const tenantId = resolveTenant(cfg.tenantId);
18274
+ const keys = keysForTenant(tenantId);
18275
+ const raw = await evalScript4(TOUCH_SCRIPT3, [
18276
+ keys.state,
18277
+ keys.activeKeys,
18278
+ keys.expirations,
18279
+ keys.ttlPrefix,
18280
+ keys.tombstones,
18281
+ keys.tombstoneKeys,
18282
+ keys.tombstoneExpirations,
18283
+ keys.prefixRefs,
18284
+ keys.seq,
18285
+ keys.rootStream,
18286
+ keys.keyStreamPrefix,
18287
+ keys.prefixStreamPrefix
18288
+ ], [Date.now(), cfg.key, tombstoneRetentionMs, trimMinId(), eventMaxLen]);
18289
+ return parseTouchResult(raw);
18290
+ };
18291
+ const remove = async (cfg) => {
18292
+ assertLogicalKey2(cfg.key);
18293
+ const tenantId = resolveTenant(cfg.tenantId);
18294
+ const keys = keysForTenant(tenantId);
18295
+ const raw = await evalScript4(REMOVE_SCRIPT2, [
18296
+ keys.state,
18297
+ keys.activeKeys,
18298
+ keys.expirations,
18299
+ keys.ttlPrefix,
18300
+ keys.tombstones,
18301
+ keys.tombstoneKeys,
18302
+ keys.tombstoneExpirations,
18303
+ keys.prefixRefs,
18304
+ keys.seq,
18305
+ keys.rootStream,
18306
+ keys.keyStreamPrefix,
18307
+ keys.prefixStreamPrefix
18308
+ ], [Date.now(), cfg.key, cfg.reason ?? "", tombstoneRetentionMs, trimMinId(), eventMaxLen]);
18309
+ return Number(raw) > 0;
18310
+ };
18311
+ const get = async (cfg) => {
18312
+ assertLogicalKey2(cfg.key);
18313
+ const tenantId = resolveTenant(cfg.tenantId);
18314
+ const keys = keysForTenant(tenantId);
18315
+ const raw = await evalScript4(GET_SCRIPT, [
18316
+ keys.state,
18317
+ keys.activeKeys,
18318
+ keys.expirations,
18319
+ keys.ttlPrefix,
18320
+ keys.tombstones,
18321
+ keys.tombstoneKeys,
18322
+ keys.tombstoneExpirations,
18323
+ keys.prefixRefs,
18324
+ keys.seq,
18325
+ keys.rootStream,
18326
+ keys.keyStreamPrefix,
18327
+ keys.prefixStreamPrefix
18328
+ ], [Date.now(), cfg.key, cfg.includeExpired ? 1 : 0, tombstoneRetentionMs, trimMinId(), eventMaxLen]);
18329
+ if (!raw)
18330
+ return null;
18331
+ return parseStoredEntry(typeof raw === "string" ? raw : String(raw ?? ""));
18332
+ };
18333
+ const list = async (cfg = {}) => {
18334
+ const tenantId = resolveTenant(cfg.tenantId);
18335
+ const keys = keysForTenant(tenantId);
18336
+ const prefixValue = normalizePrefix(cfg.prefix);
18337
+ const status = cfg.status ?? "active";
18338
+ if (status !== "active" && status !== "expired") {
18339
+ throw new Error(`unsupported status: ${status}`);
18340
+ }
18341
+ let limit = cfg.limit ?? 0;
18342
+ if (limit !== 0) {
18343
+ if (!Number.isInteger(limit) || limit <= 0) {
18344
+ throw new Error("limit must be a positive integer when provided");
18345
+ }
18346
+ limit = Math.min(limit, DEFAULT_LIST_LIMIT);
18347
+ }
18348
+ if (cfg.afterKey !== undefined) {
18349
+ assertLogicalKey2(cfg.afterKey);
18350
+ if (prefixValue && !cfg.afterKey.startsWith(prefixValue)) {
18351
+ throw new Error("afterKey must start with prefix");
18352
+ }
18353
+ }
18354
+ const snapshotNow = Date.now();
18355
+ await runFullReconcile(keys, snapshotNow);
18356
+ const raw = await evalScript4(LIST_PAGE_SCRIPT, [
18357
+ keys.state,
18358
+ keys.activeKeys,
18359
+ keys.expirations,
18360
+ keys.ttlPrefix,
18361
+ keys.tombstones,
18362
+ keys.tombstoneKeys,
18363
+ keys.tombstoneExpirations,
18364
+ keys.prefixRefs,
18365
+ keys.seq,
18366
+ keys.rootStream,
18367
+ keys.keyStreamPrefix,
18368
+ keys.prefixStreamPrefix
18369
+ ], [
18370
+ prefixValue,
18371
+ status,
18372
+ limit,
18373
+ cfg.afterKey ?? ""
18374
+ ]);
18375
+ return parseListResult(raw);
18376
+ };
18377
+ const cas = async (cfg) => {
18378
+ assertLogicalKey2(cfg.key);
18379
+ if (cfg.version.length === 0)
18380
+ throw new Error("version must be non-empty");
18381
+ const tenantId = resolveTenant(cfg.tenantId);
18382
+ const keys = keysForTenant(tenantId);
18383
+ const parsed = config2.schema.safeParse(cfg.value);
18384
+ if (!parsed.success)
18385
+ throw parsed.error;
18386
+ const payloadRaw = JSON.stringify(parsed.data);
18387
+ const payloadBytes = textEncoder4.encode(payloadRaw).byteLength;
18388
+ if (payloadBytes > maxPayloadBytes) {
18389
+ throw new RegistryPayloadTooLargeError(`payload exceeds limit (${maxPayloadBytes} bytes)`);
18390
+ }
18391
+ const raw = await evalScript4(CAS_SCRIPT, [
18392
+ keys.state,
18393
+ keys.activeKeys,
18394
+ keys.expirations,
18395
+ keys.ttlPrefix,
18396
+ keys.tombstones,
18397
+ keys.tombstoneKeys,
18398
+ keys.tombstoneExpirations,
18399
+ keys.prefixRefs,
18400
+ keys.seq,
18401
+ keys.rootStream,
18402
+ keys.keyStreamPrefix,
18403
+ keys.prefixStreamPrefix
18404
+ ], [Date.now(), cfg.key, cfg.version, payloadRaw, tombstoneRetentionMs, trimMinId(), eventMaxLen]);
18405
+ if (raw === "__ERR_PAYLOAD__") {
18406
+ throw new Error("invalid payload encoding");
18407
+ }
18408
+ const parsedRaw = JSON.parse(typeof raw === "string" ? raw : String(raw));
18409
+ if (!parsedRaw.ok)
18410
+ return { ok: false };
18411
+ const entry = parsedRaw.entry ? parseStoredEntry(JSON.stringify(parsedRaw.entry)) : null;
18412
+ if (!entry)
18413
+ return { ok: false };
18414
+ return { ok: true, entry };
18415
+ };
18416
+ const reader = (readerCfg = {}) => {
18417
+ if (readerCfg.key && readerCfg.prefix) {
18418
+ throw new Error("reader accepts either key or prefix, not both");
18419
+ }
18420
+ if (readerCfg.key)
18421
+ assertLogicalKey2(readerCfg.key);
18422
+ const prefixValue = readerCfg.prefix ? normalizePrefix(readerCfg.prefix) : "";
18423
+ const tenantId = resolveTenant(readerCfg.tenantId);
18424
+ const keys = keysForTenant(tenantId);
18425
+ const streamKey = selectionStreamKey(keys, { key: readerCfg.key, prefix: prefixValue || undefined });
18426
+ let cursor = readerCfg.after ?? "$";
18427
+ let overflowPending = null;
18428
+ let replayChecked = false;
18429
+ let anchored = false;
18430
+ let blockingClient = null;
18431
+ const resetBlockingClient = () => {
18432
+ if (!blockingClient)
18433
+ return;
18434
+ safeClose4(blockingClient);
18435
+ blockingClient = null;
18436
+ };
18437
+ const ensureBlockingClient = async () => {
18438
+ if (blockingClient?.connected)
18439
+ return blockingClient;
18440
+ resetBlockingClient();
18441
+ blockingClient = new RedisClient4;
18442
+ await blockingClient.connect();
18443
+ return blockingClient;
18444
+ };
18445
+ const checkReplayGap = async () => {
18446
+ if (replayChecked)
18447
+ return;
18448
+ replayChecked = true;
18449
+ const after = readerCfg.after;
18450
+ if (!after || after === "$")
18451
+ return;
18452
+ const firstAvailable = await firstAtOrAfterCursor(streamKey, after);
18453
+ if (!firstAvailable)
18454
+ return;
18455
+ if (after === "0-0" || firstAvailable !== after) {
18456
+ const liveCursor = await latestCursor(streamKey);
18457
+ overflowPending = {
18458
+ type: "overflow",
18459
+ cursor: liveCursor,
18460
+ after,
18461
+ firstAvailable
18462
+ };
18463
+ cursor = liveCursor;
18464
+ }
18465
+ };
18466
+ const anchorLiveCursor = async () => {
18467
+ if (anchored)
18468
+ return;
18469
+ anchored = true;
18470
+ if (cursor !== "$")
18471
+ return;
18472
+ cursor = await latestCursor(streamKey);
18473
+ };
18474
+ const recv = async (cfg = {}) => {
18475
+ await anchorLiveCursor();
18476
+ await checkReplayGap();
18477
+ if (overflowPending) {
18478
+ const pending = overflowPending;
18479
+ overflowPending = null;
18480
+ return pending;
18481
+ }
18482
+ const wait = cfg.wait ?? true;
18483
+ const timeoutMs = cfg.timeoutMs ?? DEFAULT_TIMEOUT_MS3;
18484
+ const args = wait ? ["COUNT", "1", "BLOCK", timeoutMs.toString(), "STREAMS", streamKey, cursor] : ["COUNT", "1", "STREAMS", streamKey, cursor];
18485
+ const raw = cfg.signal ? await blockingReadWithTemporaryClient3(args, cfg.signal) : wait ? await (async () => {
18486
+ const client = await ensureBlockingClient();
18487
+ try {
18488
+ return await client.send("XREAD", args);
18489
+ } catch (error48) {
18490
+ resetBlockingClient();
18491
+ throw asError6(error48);
18492
+ }
18493
+ })() : await redis8.send("XREAD", args);
18494
+ const entry = parseFirstStreamEntry(raw);
18495
+ if (!entry)
18496
+ return null;
18497
+ cursor = entry.id;
18498
+ return parseEvent(entry);
18499
+ };
18500
+ const stream = async function* (cfg = {}) {
18501
+ const wait = cfg.wait ?? true;
18502
+ try {
18503
+ while (!cfg.signal?.aborted) {
18504
+ const next = wait ? await retry(async () => await recv(cfg), {
18505
+ attempts: Number.POSITIVE_INFINITY,
18506
+ signal: cfg.signal,
18507
+ retryIf: isRetryableTransportError
18508
+ }) : await recv(cfg);
18509
+ if (next) {
18510
+ yield next;
18511
+ continue;
18512
+ }
18513
+ if (!wait)
18514
+ break;
18515
+ }
18516
+ } finally {
18517
+ resetBlockingClient();
18518
+ }
18519
+ };
18520
+ return { recv, stream };
18521
+ };
18522
+ return {
18523
+ upsert,
18524
+ touch,
18525
+ remove,
18526
+ get,
18527
+ list,
18528
+ cas,
18529
+ reader
18530
+ };
18531
+ };
16918
18532
  export {
16919
18533
  topic,
16920
18534
  scheduler,
16921
18535
  retry,
18536
+ registry2 as registry,
16922
18537
  ratelimit,
16923
18538
  queue,
16924
18539
  mutex,
16925
18540
  job,
16926
18541
  isRetryableTransportError,
16927
18542
  ephemeral,
18543
+ RegistryPayloadTooLargeError,
18544
+ RegistryCapacityError,
16928
18545
  RateLimitError,
16929
18546
  LockError,
16930
18547
  EphemeralPayloadTooLargeError,