groupmq-plus 1.4.2 → 1.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- var e=Object.create,t=Object.defineProperty,n=Object.getOwnPropertyDescriptor,r=Object.getOwnPropertyNames,i=Object.getPrototypeOf,a=Object.prototype.hasOwnProperty,o=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),s=(e,i,o,s)=>{if(i&&typeof i==`object`||typeof i==`function`)for(var c=r(i),l=0,u=c.length,d;l<u;l++)d=c[l],!a.call(e,d)&&d!==o&&t(e,d,{get:(e=>i[e]).bind(null,d),enumerable:!(s=n(i,d))||s.enumerable});return e},c=(n,r,a)=>(a=n==null?{}:e(i(n)),s(r||!n||!n.__esModule?t(a,`default`,{value:n,enumerable:!0}):a,n));let l=require(`node:crypto`);l=c(l);let u=require(`cron-parser`);u=c(u);let d=require(`node:fs`);d=c(d);let f=require(`node:path`);f=c(f);let p=require(`node:url`);p=c(p);var m=o((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.BaseAdapter=void 0,e.BaseAdapter=class{constructor(e,t={}){this.formatters=new Map,this._visibilityGuard=()=>!0,this.readOnlyMode=t.readOnlyMode===!0,this.allowRetries=this.readOnlyMode?!1:t.allowRetries!==!1,this.allowCompletedRetries=this.allowRetries&&t.allowCompletedRetries!==!1,this.prefix=t.prefix||``,this.delimiter=t.delimiter||``,this.description=t.description||``,this.displayName=t.displayName||``,this.type=e,this.externalJobUrl=t.externalJobUrl}getDescription(){return this.description}getDisplayName(){return this.displayName}setFormatter(e,t){this.formatters.set(e,t)}format(e,t,n=t){let r=this.formatters.get(e);return typeof r==`function`?r(t):n}setVisibilityGuard(e){this._visibilityGuard=e}isVisible(e){return this._visibilityGuard(e)}}})),h=c(m()),g=class extends h.BaseAdapter{constructor(e,t={}){let n=e.namespace;super(n,t),this.queue=e,this.options=t}getDescription(){return this.options.description||``}getDisplayName(){return this.options.displayName||``}getName(){return`${this.options.prefix||``}${this.options.delimiter||``}${this.queue.rawNamespace}`.replace(/(^[\s:]+)|([\s:]+$)/g,``)}async getRedisInfo(){return this.queue.redis.info()}async getJob(e){return await this.queue.getJob(e)}async getJobs(e,t,n){return await this.queue.getJobsByStatus(e,t,n)}async getJobCounts(){let e=await this.queue.getJobCounts();return{latest:0,active:e.active,waiting:e.waiting,"waiting-children":e[`waiting-children`],prioritized:e.prioritized,completed:e.completed,failed:e.failed,delayed:e.delayed,paused:e.paused}}async getJobLogs(e){return[]}getStatuses(){return[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}getJobStatuses(){return[`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}assertWritable(){if(this.options.readOnlyMode)throw Error(`This adapter is in read-only mode. Mutations are disabled.`)}async clean(e,t){this.assertWritable(),!(e!==`completed`&&e!==`failed`&&e!==`delayed`)&&await this.queue.clean(t,2**53-1,e)}async addJob(e,t,n){return this.assertWritable(),await this.queue.add({groupId:n.groupId??Math.random().toString(36).substring(2,15),data:t,...n})}async isPaused(){return this.queue.isPaused()}async pause(){this.assertWritable(),await this.queue.pause()}async resume(){this.assertWritable(),await this.queue.resume()}async empty(){throw this.assertWritable(),Error(`Not implemented`)}async promoteAll(){throw this.assertWritable(),Error(`Not implemented`)}},_=class extends Error{constructor(e,t){let n=`Active=${e.active}, Waiting=${e.waiting}, Delayed=${e.delayed}, Staged=${e.staged}, Limited=${e.limited}, Ready=${e.ready}`;super(`waitForEmpty timed out after ${t}ms. State: ${n}`),this.name=`WaitForEmptyTimeoutError`,this.state=e,this.timeoutMs=t}};async function v(e,t=6e4){let n=typeof t==`number`?{timeoutMs:t}:t;return e.waitForEmpty(n)}async function y(e,t=3e4,n=100){let r=Date.now();for(;Date.now()-r<t;){try{if(await e())return!0}catch{}await x(n)}return!1}async function b(e,t,n=3e4,r=100){if(!await y(e,n,r))throw Error(`${t} (timed out after ${n}ms)`)}function x(e){return new Promise(t=>setTimeout(t,e))}function S(e){let t=e.map((e,t)=>{let n=e.getCurrentJob();return{index:t,isProcessing:e.isProcessing(),currentJob:n?{jobId:n.job.id,groupId:n.job.groupId,processingTimeMs:n.processingTimeMs}:void 0}}),n=t.filter(e=>e.isProcessing).length,r=t.length-n;return{total:e.length,processing:n,idle:r,workers:t}}var C=class e{constructor(e){this.queue=e.queue,this.id=e.id,this.name=e.name??`groupmq`,this.data=e.data,this.groupId=e.groupId,this.attemptsMade=e.attemptsMade,this.opts=e.opts,this.processedOn=e.processedOn,this.finishedOn=e.finishedOn,this.failedReason=e.failedReason,this.stacktrace=e.stacktrace,this.returnvalue=e.returnvalue,this.timestamp=e.timestamp,this.orderMs=e.orderMs,this.status=e.status??`unknown`,this.parentId=e.parentId,this.isFlowParent=e.isFlowParent===!0,this.token=e.token}async getState(){return this.status??`unknown`}toJSON(){return{id:this.id,name:this.name,data:this.data,groupId:this.groupId,attemptsMade:this.attemptsMade,opts:this.opts,processedOn:this.processedOn,finishedOn:this.finishedOn,failedReason:this.failedReason,stacktrace:this.stacktrace?[this.stacktrace]:null,returnvalue:this.returnvalue,timestamp:this.timestamp,orderMs:this.orderMs,status:this.status,progress:0,isFlowParent:this.isFlowParent}}changeDelay(e){return this.queue.changeDelay(this.id,e)}async promote(){await this.queue.promote(this.id)}async remove(){await this.queue.remove(this.id)}async retry(e){if(!this.token)throw Error(`Cannot retry job ${this.id}: token not available`);await this.queue.retry({id:this.id,token:this.token})}async updateData(e){await this.queue.updateData(this.id,e),this.data=e}isActive(){return this.status===`active`}isWaiting(){return this.status===`waiting`}isDelayed(){return this.status===`delayed`}isCompleted(){return this.status===`completed`}isFailed(){return this.status===`failed`}isWaitingChildren(){return this.status===`waiting-children`}async waitUntilFinished(e=0){return this.queue.waitUntilFinished(this.id,e)}async getChildren(){return this.queue.getFlowChildren(this.id)}async getChildrenValues(){return this.queue.getFlowResults(this.id)}async getChildrenCount(){return this.queue.getFlowChildrenCount(this.id)}async getRemainingCount(){return this.queue.getFlowRemainingCount(this.id)}async getDependenciesCount(e){let t=await this.queue.getFlowDependenciesCount(this.id);if(!e)return t;let n={};return e.processed&&(n.processed=t.processed),e.unprocessed&&(n.unprocessed=t.unprocessed),e.failed&&(n.failed=t.failed),n}async getParent(){if(this.parentId)try{return await this.queue.getJob(this.parentId)}catch{return}}static fromReserved(t,n,r){return new e({queue:t,id:n.id,name:`groupmq`,data:n.data,groupId:n.groupId,attemptsMade:n.attempts,opts:{attempts:n.maxAttempts,delay:r?.delayMs},processedOn:r?.processedOn,finishedOn:r?.finishedOn,failedReason:r?.failedReason,stacktrace:r?.stacktrace,returnvalue:r?.returnvalue,timestamp:n.timestamp?n.timestamp:Date.now(),orderMs:n.orderMs,status:T(r?.status),isFlowParent:n.isFlowParent,token:n.token})}static fromRawHash(t,n,r,i){let a=r.groupId??``,o=r.data?w(r.data):null,s=r.attempts?parseInt(r.attempts,10):0,c=r.maxAttempts?parseInt(r.maxAttempts,10):t.maxAttemptsDefault,l=r.timestamp?parseInt(r.timestamp,10):0,u=r.orderMs?parseInt(r.orderMs,10):void 0,d=r.delayUntil?parseInt(r.delayUntil,10):0,f=r.processedOn?parseInt(r.processedOn,10):void 0,p=r.finishedOn?parseInt(r.finishedOn,10):void 0,m=(r.failedReason??r.lastErrorMessage)||void 0,h=(r.stacktrace??r.lastErrorStack)||void 0,g=r.returnvalue?w(r.returnvalue):void 0,_=r.parentId||void 0,v=r.isFlowParent===`1`;return new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:i??T(r.status),parentId:_,isFlowParent:v})}static async fromStore(t,n){let r=`${t.namespace}:job:${n}`,i=await t.redis.hgetall(r);if(!i||Object.keys(i).length===0)throw Error(`Job ${n} not found`);let a=i.groupId??``,o=i.data?w(i.data):null,s=i.attempts?parseInt(i.attempts,10):0,c=i.maxAttempts?parseInt(i.maxAttempts,10):t.maxAttemptsDefault,l=i.timestamp?parseInt(i.timestamp,10):0,u=i.orderMs?parseInt(i.orderMs,10):void 0,d=i.delayUntil?parseInt(i.delayUntil,10):0,f=i.processedOn?parseInt(i.processedOn,10):void 0,p=i.finishedOn?parseInt(i.finishedOn,10):void 0,m=(i.failedReason??i.lastErrorMessage)||void 0,h=(i.stacktrace??i.lastErrorStack)||void 0,g=i.returnvalue?w(i.returnvalue):void 0,_=i.parentId||void 0,v=i.isFlowParent===`1`,[y,b]=await Promise.all([t.redis.zscore(`${t.namespace}:processing`,n),t.redis.zscore(`${t.namespace}:delayed`,n)]),x=i.status;return y===null?b===null?a&&await t.redis.zscore(`${t.namespace}:g:${a}`,n)!==null&&(x=`waiting`):x=`delayed`:x=`active`,new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:T(x),parentId:_,isFlowParent:v})}};function w(e){try{return JSON.parse(e)}catch{return null}}function T(e){return e&&[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`].includes(e)?e:`unknown`}var E=class{constructor(e,t){this.enabled=e,this.name=t}debug(...e){this.enabled&&console.debug(`[${this.name}]`,...e)}info(...e){this.enabled&&console.log(`[${this.name}]`,...e)}warn(...e){this.enabled&&console.warn(`⚠️ [${this.name}]`,...e)}error(...e){this.enabled&&console.error(`💥 [${this.name}]`,...e)}};const D=new WeakMap,O=new Map,k=/^[-]{2,3}[ \t]*@include[ \t]+(["'])(.+?)\1[; \t\n]*$/gm;function A(e){let t=f.default.dirname((0,p.fileURLToPath)(require(`url`).pathToFileURL(__filename).href)),n=[f.default.join(t,`${e}.lua`),f.default.join(t,`lua`,`${e}.lua`)];for(let e of n)if(d.default.existsSync(e))return e;return n[0]}function j(e){let t=f.default.dirname((0,p.fileURLToPath)(require(`url`).pathToFileURL(__filename).href)),n=[f.default.join(t,`${e}.lua`),f.default.join(t,`lua`,`${e}.lua`)];for(let e of n)if(d.default.existsSync(e))return e;throw Error(`Include file not found: ${e} (tried: ${n.join(`, `)})`)}function M(e){return Array.from(e.matchAll(k)).map(e=>e[2])}function N(e,t=new Set){let n=f.default.normalize(e);if(O.has(n))return O.get(n);if(t.has(n))throw Error(`Circular dependency detected: ${n}`);if(t.add(n),!d.default.existsSync(n))throw Error(`Script not found: ${n}`);let r=d.default.readFileSync(n,`utf8`),i=M(r),a=[];for(let e of i){let n=N(j(e),new Set(t));a.push(n)}let o={name:f.default.basename(n,`.lua`),path:n,content:r,dependencies:a};return O.set(n,o),o}function P(e){let t=new Set,n=[];function r(e){if(t.has(e.path))return;t.add(e.path);for(let t of e.dependencies)r(t);let i=e.content.replace(k,``).trim();i&&n.push(i)}return r(e),n.join(`
1
+ var e=Object.create,t=Object.defineProperty,n=Object.getOwnPropertyDescriptor,r=Object.getOwnPropertyNames,i=Object.getPrototypeOf,a=Object.prototype.hasOwnProperty,o=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),s=(e,i,o,s)=>{if(i&&typeof i==`object`||typeof i==`function`)for(var c=r(i),l=0,u=c.length,d;l<u;l++)d=c[l],!a.call(e,d)&&d!==o&&t(e,d,{get:(e=>i[e]).bind(null,d),enumerable:!(s=n(i,d))||s.enumerable});return e},c=(n,r,a)=>(a=n==null?{}:e(i(n)),s(r||!n||!n.__esModule?t(a,`default`,{value:n,enumerable:!0}):a,n));let l=require(`uuid`);l=c(l);let u=require(`cron-parser`);u=c(u);let d=require(`node:fs`);d=c(d);let f=require(`node:path`);f=c(f);let p=require(`node:url`);p=c(p);var m=o((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.BaseAdapter=void 0,e.BaseAdapter=class{constructor(e,t={}){this.formatters=new Map,this._visibilityGuard=()=>!0,this.readOnlyMode=t.readOnlyMode===!0,this.allowRetries=this.readOnlyMode?!1:t.allowRetries!==!1,this.allowCompletedRetries=this.allowRetries&&t.allowCompletedRetries!==!1,this.prefix=t.prefix||``,this.delimiter=t.delimiter||``,this.description=t.description||``,this.displayName=t.displayName||``,this.type=e,this.externalJobUrl=t.externalJobUrl}getDescription(){return this.description}getDisplayName(){return this.displayName}setFormatter(e,t){this.formatters.set(e,t)}format(e,t,n=t){let r=this.formatters.get(e);return typeof r==`function`?r(t):n}setVisibilityGuard(e){this._visibilityGuard=e}isVisible(e){return this._visibilityGuard(e)}}})),h=c(m()),g=class extends h.BaseAdapter{constructor(e,t={}){let n=e.namespace;super(n,t),this.queue=e,this.options=t}getDescription(){return this.options.description||``}getDisplayName(){return this.options.displayName||``}getName(){return`${this.options.prefix||``}${this.options.delimiter||``}${this.queue.rawNamespace}`.replace(/(^[\s:]+)|([\s:]+$)/g,``)}async getRedisInfo(){return this.queue.redis.info()}async getJob(e){return await this.queue.getJob(e)}async getJobs(e,t,n){return await this.queue.getJobsByStatus(e,t,n)}async getJobCounts(){let e=await this.queue.getJobCounts();return{latest:0,active:e.active,waiting:e.waiting,"waiting-children":e[`waiting-children`],prioritized:e.prioritized,completed:e.completed,failed:e.failed,delayed:e.delayed,paused:e.paused}}async getJobLogs(e){return[]}getStatuses(){return[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}getJobStatuses(){return[`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}assertWritable(){if(this.options.readOnlyMode)throw Error(`This adapter is in read-only mode. Mutations are disabled.`)}async clean(e,t){this.assertWritable(),!(e!==`completed`&&e!==`failed`&&e!==`delayed`)&&await this.queue.clean(t,2**53-1,e)}async addJob(e,t,n){return this.assertWritable(),await this.queue.add({groupId:n.groupId??Math.random().toString(36).substring(2,15),data:t,...n})}async isPaused(){return this.queue.isPaused()}async pause(){this.assertWritable(),await this.queue.pause()}async resume(){this.assertWritable(),await this.queue.resume()}async empty(){throw this.assertWritable(),Error(`Not implemented`)}async promoteAll(){throw this.assertWritable(),Error(`Not implemented`)}},_=class extends Error{constructor(e,t){let n=`Active=${e.active}, Waiting=${e.waiting}, Delayed=${e.delayed}, Staged=${e.staged}, Limited=${e.limited}, Ready=${e.ready}`;super(`waitForEmpty timed out after ${t}ms. State: ${n}`),this.name=`WaitForEmptyTimeoutError`,this.state=e,this.timeoutMs=t}};async function v(e,t=6e4){let n=typeof t==`number`?{timeoutMs:t}:t;return e.waitForEmpty(n)}async function y(e,t=3e4,n=100){let r=Date.now();for(;Date.now()-r<t;){try{if(await e())return!0}catch{}await x(n)}return!1}async function b(e,t,n=3e4,r=100){if(!await y(e,n,r))throw Error(`${t} (timed out after ${n}ms)`)}function x(e){return new Promise(t=>setTimeout(t,e))}function S(e){let t=e.map((e,t)=>{let n=e.getCurrentJob();return{index:t,isProcessing:e.isProcessing(),currentJob:n?{jobId:n.job.id,groupId:n.job.groupId,processingTimeMs:n.processingTimeMs}:void 0}}),n=t.filter(e=>e.isProcessing).length,r=t.length-n;return{total:e.length,processing:n,idle:r,workers:t}}var C=class e{constructor(e){this.queue=e.queue,this.id=e.id,this.name=e.name??`groupmq`,this.data=e.data,this.groupId=e.groupId,this.attemptsMade=e.attemptsMade,this.opts=e.opts,this.processedOn=e.processedOn,this.finishedOn=e.finishedOn,this.failedReason=e.failedReason,this.stacktrace=e.stacktrace,this.returnvalue=e.returnvalue,this.timestamp=e.timestamp,this.orderMs=e.orderMs,this.status=e.status??`unknown`,this.parentId=e.parentId,this.isFlowParent=e.isFlowParent===!0,this.token=e.token}async getState(){return this.status??`unknown`}toJSON(){return{id:this.id,name:this.name,data:this.data,groupId:this.groupId,attemptsMade:this.attemptsMade,opts:this.opts,processedOn:this.processedOn,finishedOn:this.finishedOn,failedReason:this.failedReason,stacktrace:this.stacktrace?[this.stacktrace]:null,returnvalue:this.returnvalue,timestamp:this.timestamp,orderMs:this.orderMs,status:this.status,progress:0,isFlowParent:this.isFlowParent}}changeDelay(e){return this.queue.changeDelay(this.id,e)}async promote(){await this.queue.promote(this.id)}async remove(){await this.queue.remove(this.id)}async retry(e){if(!this.token)throw Error(`Cannot retry job ${this.id}: token not available`);await this.queue.retry({id:this.id,token:this.token})}async updateData(e){await this.queue.updateData(this.id,e),this.data=e}isActive(){return this.status===`active`}isWaiting(){return this.status===`waiting`}isDelayed(){return this.status===`delayed`}isCompleted(){return this.status===`completed`}isFailed(){return this.status===`failed`}isWaitingChildren(){return this.status===`waiting-children`}async waitUntilFinished(e=0){return this.queue.waitUntilFinished(this.id,e)}async getChildren(){return this.queue.getFlowChildren(this.id)}async getChildrenValues(){return this.queue.getFlowResults(this.id)}async getChildrenCount(){return this.queue.getFlowChildrenCount(this.id)}async getRemainingCount(){return this.queue.getFlowRemainingCount(this.id)}async getDependenciesCount(e){let t=await this.queue.getFlowDependenciesCount(this.id);if(!e)return t;let n={};return e.processed&&(n.processed=t.processed),e.unprocessed&&(n.unprocessed=t.unprocessed),e.failed&&(n.failed=t.failed),n}async getParent(){if(this.parentId)try{return await this.queue.getJob(this.parentId)}catch{return}}static fromReserved(t,n,r){return new e({queue:t,id:n.id,name:`groupmq`,data:n.data,groupId:n.groupId,attemptsMade:n.attempts,opts:{attempts:n.maxAttempts,delay:r?.delayMs},processedOn:r?.processedOn,finishedOn:r?.finishedOn,failedReason:r?.failedReason,stacktrace:r?.stacktrace,returnvalue:r?.returnvalue,timestamp:n.timestamp?n.timestamp:Date.now(),orderMs:n.orderMs,status:T(r?.status),isFlowParent:n.isFlowParent,token:n.token})}static fromRawHash(t,n,r,i){let a=r.groupId??``,o=r.data?w(r.data):null,s=r.attempts?parseInt(r.attempts,10):0,c=r.maxAttempts?parseInt(r.maxAttempts,10):t.maxAttemptsDefault,l=r.timestamp?parseInt(r.timestamp,10):0,u=r.orderMs?parseInt(r.orderMs,10):void 0,d=r.delayUntil?parseInt(r.delayUntil,10):0,f=r.processedOn?parseInt(r.processedOn,10):void 0,p=r.finishedOn?parseInt(r.finishedOn,10):void 0,m=(r.failedReason??r.lastErrorMessage)||void 0,h=(r.stacktrace??r.lastErrorStack)||void 0,g=r.returnvalue?w(r.returnvalue):void 0,_=r.parentId||void 0,v=r.isFlowParent===`1`;return new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:i??T(r.status),parentId:_,isFlowParent:v})}static async fromStore(t,n){let r=`${t.namespace}:job:${n}`,i=await t.redis.hgetall(r);if(!i||Object.keys(i).length===0)throw Error(`Job ${n} not found`);let a=i.groupId??``,o=i.data?w(i.data):null,s=i.attempts?parseInt(i.attempts,10):0,c=i.maxAttempts?parseInt(i.maxAttempts,10):t.maxAttemptsDefault,l=i.timestamp?parseInt(i.timestamp,10):0,u=i.orderMs?parseInt(i.orderMs,10):void 0,d=i.delayUntil?parseInt(i.delayUntil,10):0,f=i.processedOn?parseInt(i.processedOn,10):void 0,p=i.finishedOn?parseInt(i.finishedOn,10):void 0,m=(i.failedReason??i.lastErrorMessage)||void 0,h=(i.stacktrace??i.lastErrorStack)||void 0,g=i.returnvalue?w(i.returnvalue):void 0,_=i.parentId||void 0,v=i.isFlowParent===`1`,[y,b]=await Promise.all([t.redis.zscore(`${t.namespace}:processing`,n),t.redis.zscore(`${t.namespace}:delayed`,n)]),x=i.status;return y===null?b===null?a&&await t.redis.zscore(`${t.namespace}:g:${a}`,n)!==null&&(x=`waiting`):x=`delayed`:x=`active`,new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:T(x),parentId:_,isFlowParent:v})}};function w(e){try{return JSON.parse(e)}catch{return null}}function T(e){return e&&[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`].includes(e)?e:`unknown`}var E=class{constructor(e,t){this.enabled=e,this.name=t}debug(...e){this.enabled&&console.debug(`[${this.name}]`,...e)}info(...e){this.enabled&&console.log(`[${this.name}]`,...e)}warn(...e){this.enabled&&console.warn(`⚠️ [${this.name}]`,...e)}error(...e){this.enabled&&console.error(`💥 [${this.name}]`,...e)}};const D=new WeakMap,O=new Map,k=/^[-]{2,3}[ \t]*@include[ \t]+(["'])(.+?)\1[; \t\n]*$/gm;function A(e){let t=f.default.dirname((0,p.fileURLToPath)(require(`url`).pathToFileURL(__filename).href)),n=[f.default.join(t,`${e}.lua`),f.default.join(t,`lua`,`${e}.lua`)];for(let e of n)if(d.default.existsSync(e))return e;return n[0]}function j(e){let t=f.default.dirname((0,p.fileURLToPath)(require(`url`).pathToFileURL(__filename).href)),n=[f.default.join(t,`${e}.lua`),f.default.join(t,`lua`,`${e}.lua`)];for(let e of n)if(d.default.existsSync(e))return e;throw Error(`Include file not found: ${e} (tried: ${n.join(`, `)})`)}function M(e){return Array.from(e.matchAll(k)).map(e=>e[2])}function N(e,t=new Set){let n=f.default.normalize(e);if(O.has(n))return O.get(n);if(t.has(n))throw Error(`Circular dependency detected: ${n}`);if(t.add(n),!d.default.existsSync(n))throw Error(`Script not found: ${n}`);let r=d.default.readFileSync(n,`utf8`),i=M(r),a=[];for(let e of i){let n=N(j(e),new Set(t));a.push(n)}let o={name:f.default.basename(n,`.lua`),path:n,content:r,dependencies:a};return O.set(n,o),o}function P(e){let t=new Set,n=[];function r(e){if(t.has(e.path))return;t.add(e.path);for(let t of e.dependencies)r(t);let i=e.content.replace(k,``).trim();i&&n.push(i)}return r(e),n.join(`
2
2
 
3
3
  `).replace(/\n\s*\n\s*\n/g,`
4
4
 
@@ -52,7 +52,7 @@ var e=Object.create,t=Object.defineProperty,n=Object.getOwnPropertyDescriptor,r=
52
52
  end
53
53
  end
54
54
  return 0
55
- `,1,r,e,String(n))},getConcurrency:async e=>(await this.groups.getConfig(e)).concurrency??1},this._groupCleanupTracking=new Map,this.r=e.redis,this.rawNs=e.namespace,this.name=e.namespace,this.ns=`groupmq:${this.rawNs}`;let t=e.jobTimeoutMs??5e3;this.vt=Math.max(1,t),this.defaultMaxAttempts=e.maxAttempts??3,this.scanLimit=e.reserveScanLimit??20,this.keepCompleted=Math.max(0,e.keepCompleted??0),this.keepFailed=Math.max(0,e.keepFailed??0),this.schedulerLockTtlMs=e.schedulerLockTtlMs??1500,this.orderingDelayMs=e.orderingDelayMs??0,e.autoBatch&&(this.batchConfig=typeof e.autoBatch==`boolean`?{size:10,maxWaitMs:10}:{size:e.autoBatch.size??10,maxWaitMs:e.autoBatch.maxWaitMs??10}),this.logger=typeof e.logger==`object`?e.logger:new E(!!e.logger,this.namespace),this.r.on(`error`,e=>{this.logger.error(`Redis error (main):`,e)})}get redis(){return this.r}get namespace(){return this.ns}get rawNamespace(){return this.rawNs}get jobTimeoutMs(){return this.vt}get maxAttemptsDefault(){return this.defaultMaxAttempts}async add(e){let t=e.maxAttempts??this.defaultMaxAttempts,n=e.orderMs??Date.now(),r=Date.now(),i=e.jobId??(0,l.randomUUID)();if(e.repeat)return this.addRepeatingJob({...e,orderMs:n,maxAttempts:t});let a;if(e.delay!==void 0&&e.delay>0)a=e.delay;else if(e.runAt!==void 0){let t=e.runAt instanceof Date?e.runAt.getTime():e.runAt;a=Math.max(0,t-r)}let o=e.data===void 0?null:e.data;return this.batchConfig?new Promise((r,s)=>{this.batchBuffer.push({groupId:e.groupId,data:o,jobId:i,maxAttempts:t,delayMs:a,orderMs:n,resolve:r,reject:s}),this.batchBuffer.length>=this.batchConfig.size?this.flushBatch():this.batchTimer||=setTimeout(()=>this.flushBatch(),this.batchConfig.maxWaitMs)}):this.addSingle({...e,data:o,jobId:i,maxAttempts:t,orderMs:n,delayMs:a,groupConfig:e.groupConfig})}async addFlow(e){let t=e.parent.jobId??(0,l.randomUUID)(),n=e.parent.maxAttempts??this.defaultMaxAttempts,r=e.parent.orderMs??Date.now(),i=JSON.stringify(e.parent.data===void 0?null:e.parent.data),a=e.parent.groupConfig?JSON.stringify(e.parent.groupConfig):``,o=[],s=[];for(let t of e.children){let e=t.jobId??(0,l.randomUUID)(),n=t.maxAttempts??this.defaultMaxAttempts,r=t.orderMs??Date.now(),i=t.delay??0,a=JSON.stringify(t.data===void 0?null:t.data),c=t.groupConfig?JSON.stringify(t.groupConfig):``;o.push(e),s.push(e,t.groupId,a,n.toString(),r.toString(),i.toString(),c)}let c=Date.now();return await I(this.r,`enqueue-flow`,[this.ns,t,e.parent.groupId,i,n.toString(),r.toString(),c.toString(),a,...s],1),new C({queue:this,id:t,groupId:e.parent.groupId,data:e.parent.data,status:`waiting-children`,attemptsMade:0,opts:{attempts:n},timestamp:c,orderMs:r,isFlowParent:!0})}async getFlowRemainingCount(e){let t=await this.r.hget(`${this.ns}:job:${e}`,`flowRemaining`);return t===null?null:parseInt(t,10)}async getFlowDependenciesCount(e){let t=await this.getFlowChildrenIds(e);if(t.length===0)return{processed:0,unprocessed:0,failed:0};let n=await this.r.hgetall(`${this.ns}:flow:results:${e}`),r=0,i=0;for(let e of Object.values(n))try{let t=JSON.parse(e);t.status===`completed`?r++:t.status===`failed`&&i++}catch{}let a=t.length-r-i;return{processed:r,unprocessed:Math.max(0,a),failed:i}}async getFlowResults(e){let t=await this.r.hgetall(`${this.ns}:flow:results:${e}`),n=[];for(let[e,r]of Object.entries(t))try{let t=JSON.parse(r),i=t.data;if(typeof t.data==`string`)try{i=JSON.parse(t.data)}catch{}n.push({jobId:e,status:t.status,result:i})}catch(t){this.logger.error(`Failed to parse flow result for child ${e}`,t),n.push({jobId:e,status:`failed`,result:{error:`Corrupted result data`}})}return n}async getFlowChildrenCount(e){return this.r.scard(`${this.ns}:flow:children:${e}`)}async getFlowChildrenIds(e){return this.r.smembers(`${this.ns}:flow:children:${e}`)}async getFlowChildren(e){let t=await this.getFlowChildrenIds(e);if(t.length===0)return[];let n=this.r.multi();for(let e of t)n.hgetall(`${this.ns}:job:${e}`);let r=await n.exec(),i=[];for(let e=0;e<t.length;e++){let n=t[e],a=r?.[e]?.[1]||{};if(!a||Object.keys(a).length===0){this.logger.warn(`Skipping child job ${n} - not found (likely cleaned up)`);continue}let o=C.fromRawHash(this,n,a);i.push(o)}return i}async addSingle(e){let t=Date.now(),n=0;e.delayMs!==void 0&&e.delayMs>0&&(n=t+e.delayMs);let r=JSON.stringify(e.data),i=e.groupConfig?JSON.stringify(e.groupConfig):``,a=await I(this.r,`enqueue`,[this.ns,e.groupId,r,String(e.maxAttempts),String(e.orderMs),String(n),String(e.jobId),String(this.keepCompleted),String(t),String(this.orderingDelayMs),i],1);if(Array.isArray(a)){let[e,t,n,r,i,o,s,c,l]=a;return C.fromRawHash(this,e,{id:e,groupId:t,data:n,attempts:r,maxAttempts:i,timestamp:o,orderMs:s,delayUntil:c,status:l},l)}return this.getJob(a)}async flushBatch(){if(this.batchTimer&&=(clearTimeout(this.batchTimer),void 0),this.batchBuffer.length===0||this.flushing)return;this.flushing=!0;let e=this.batchBuffer.splice(0);try{this.logger.debug(`Flushing batch of ${e.length} jobs`);let t=Date.now(),n=e.map(e=>({jobId:e.jobId,groupId:e.groupId,data:JSON.stringify(e.data),maxAttempts:e.maxAttempts,orderMs:e.orderMs,delayMs:e.delayMs})),r=await I(this.r,`enqueue-batch`,[this.ns,JSON.stringify(n),String(this.keepCompleted),String(t),String(this.orderingDelayMs)],1);for(let t=0;t<e.length;t++){let n=e[t],i=r[t];try{if(i&&i.length>=9){let[e,t,r,a,o,s,c,l,u]=i,d=C.fromRawHash(this,e,{id:e,groupId:t,data:r,attempts:a,maxAttempts:o,timestamp:s,orderMs:c,delayUntil:l,status:u},u);n.resolve(d)}else throw Error(`Invalid job data returned from batch enqueue`)}catch(e){n.reject(e instanceof Error?e:Error(String(e)))}}}catch(t){for(let n of e)n.reject(t instanceof Error?t:Error(String(t)))}finally{this.flushing=!1,this.batchBuffer.length>0&&setImmediate(()=>this.flushBatch())}}async reserve(){let e=Date.now(),t=(0,l.randomUUID)(),n=await I(this.r,`reserve`,[this.ns,String(e),String(this.vt),String(this.scanLimit),t],1);if(!n)return null;let r=n.split(`|||`);if(r.length!==12)return null;let i;try{i=JSON.parse(r[2])}catch(e){this.logger.warn(`Failed to parse job data: ${e.message}, raw: ${r[2]}`),i=null}let a=Number.parseInt(r[7],10);return{id:r[0],groupId:r[1],data:i,attempts:Number.parseInt(r[3],10),maxAttempts:Number.parseInt(r[4],10),seq:Number.parseInt(r[5],10),timestamp:Number.parseInt(r[6],10),orderMs:Number.isNaN(a)?Number.parseInt(r[6],10):a,score:Number(r[8]),deadlineAt:Number.parseInt(r[9],10),isFlowParent:r[10]===`1`,token:r[11]}}async getGroupJobCount(e){let t=`${this.ns}:g:${e}`;return await this.r.zcard(t)}async complete(e){await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`completed`,String(Date.now()),JSON.stringify(null),String(this.keepCompleted),String(this.keepFailed),String(Date.now()),String(Date.now()),`0`,`0`,e.token||``],1)}async completeWithMetadata(e,t,n){if(!e.token){this.logger.warn(`completeWithMetadata: Missing token for job ${e.id}`);return}await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`completed`,String(n.finishedOn),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(n.processedOn),String(n.finishedOn),String(n.attempts),String(n.maxAttempts),e.token],1)}async completeAndReserveNextWithMetadata(e,t,n,r,i){let a=Date.now(),o=(0,l.randomUUID)();try{let s=await I(this.r,`complete-and-reserve-next-with-metadata`,[this.ns,e,t,`completed`,String(i.finishedOn),JSON.stringify(r??null),String(this.keepCompleted),String(this.keepFailed),String(i.processedOn),String(i.finishedOn),String(i.attempts),String(i.maxAttempts),String(a),String(this.jobTimeoutMs),n,o],1);if(!s)return null;let c=s.split(`|||`);if(c.length!==12)return this.logger.error(`Queue completeAndReserveNextWithMetadata: unexpected result format:`,s),null;let[l,,u,d,f,p,m,h,g,_,v,y]=c;return{id:l,groupId:t,data:JSON.parse(u),attempts:parseInt(d,10),maxAttempts:parseInt(f,10),seq:parseInt(p,10),timestamp:parseInt(m,10),orderMs:parseInt(h,10),score:parseFloat(g),deadlineAt:parseInt(_,10),isFlowParent:v===`1`,token:y}}catch(e){return this.logger.error(`Queue completeAndReserveNextWithMetadata error:`,e),null}}async isJobProcessing(e){return await this.r.zscore(`${this.ns}:processing`,e)!==null}async retry(e,t=0){return e.token?I(this.r,`retry`,[this.ns,e.id,String(t),e.token],1):(this.logger.warn(`retry called without token for job ${e.id}`),0)}async deadLetter(e,t,n){return I(this.r,`dead-letter`,[this.ns,e,t,n||``],1)}async recordCompleted(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts;try{await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`completed`,String(i),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o),``],1)}catch(t){throw this.logger.error(`Error recording completion for job ${e.id}:`,t),t}}async recordAttemptFailure(e,t,n){let r=`${this.ns}:job:${e.id}`,i=n.processedOn??Date.now(),a=n.finishedOn??Date.now(),o=typeof t==`string`?t:t.message??`Error`,s=typeof t==`string`?`Error`:t.name??`Error`,c=typeof t==`string`?``:t.stack??``;await this.r.hset(r,`lastErrorMessage`,o,`lastErrorName`,s,`lastErrorStack`,c,`processedOn`,String(i),`finishedOn`,String(a))}async recordFinalFailure(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts,s=typeof t==`string`?t:t.message??`Error`,c=typeof t==`string`?`Error`:t.name??`Error`,l=typeof t==`string`?``:t.stack??``,u=JSON.stringify({message:s,name:c,stack:l});try{await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`failed`,String(i),u,String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o),e.token||``],1)}catch(t){throw this.logger.error(`Error recording final failure for job ${e.id}:`,t),t}}async getCompleted(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`returnvalue`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?R(r):null,returnvalue:a?R(a):null,processedOn:o?parseInt(o,10):void 0,finishedOn:s?parseInt(s,10):void 0,attempts:c?parseInt(c,10):0,maxAttempts:l?parseInt(l,10):this.defaultMaxAttempts}})}async getFailed(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`failedReason`,`stacktrace`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l,u]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?R(r):null,failedReason:a||``,stacktrace:o||void 0,processedOn:s?parseInt(s,10):void 0,finishedOn:c?parseInt(c,10):void 0,attempts:l?parseInt(l,10):0,maxAttempts:u?parseInt(u,10):this.defaultMaxAttempts}})}async getCompletedJobs(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping completed job ${t} - not found (likely cleaned up)`);continue}let o=C.fromRawHash(this,t,r,`completed`);a.push(o)}return a}async getFailedJobs(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping failed job ${t} - not found (likely cleaned up)`);continue}let o=C.fromRawHash(this,t,r,`failed`);a.push(o)}return a}async getCompletedCount(){return this.r.zcard(`${this.ns}:completed`)}async getFailedCount(){return this.r.zcard(`${this.ns}:failed`)}async heartbeat(e,t=this.vt){return e.token?I(this.r,`heartbeat`,[this.ns,e.id,e.groupId,String(t),e.token],1):0}getBlockTimeout(e,t){let n=.001;if(t){let e=t-Date.now();return e<=0||e<n*1e3?n:Math.min(e/1e3,5)}return Math.max(n,Math.min(e,5))}isConnectionError(e){if(!e)return!1;let t=`${e.message||``}`;return t.includes(`Connection is closed`)||t.includes(`ECONNREFUSED`)||t.includes(`ETIMEDOUT`)}async reserveBlocking(e=5,t,n){let r=Date.now();if(await this.isPaused())return await B(50),null;if(!(this._consecutiveEmptyReserves>=3)){let e=await this.reserve();if(e)return this.logger.debug(`Immediate reserve successful (${Date.now()-r}ms)`),this._consecutiveEmptyReserves=0,e}let i=this.getBlockTimeout(e,t);this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Starting blocking operation (timeout: ${i}s, consecutive empty: ${this._consecutiveEmptyReserves})`);let a=L(this.ns,`ready`);try{let e=Date.now(),t=await(n??this.r).bzpopmin(a,i),r=Date.now()-e;if(!t||t.length<3)return this.logger.debug(`Blocking timeout/empty (took ${r}ms)`),this._consecutiveEmptyReserves+=1,null;let[,o,s]=t;this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Blocking result: group=${o}, score=${s} (took ${r}ms)`);let c=Date.now(),l=await this.reserveAtomic(o),u=Date.now()-c;if(l.status===`success`){let e=l.job;return this.logger.debug(`Successful job reserve after blocking: ${e.id} from group ${e.groupId} (reserve took ${u}ms)`),this._consecutiveEmptyReserves=0,e}else if(l.status===`limit_exceeded`)return this.logger.debug(`Blocking found group at capacity: group=${o} (reserve took ${u}ms)`),this._consecutiveEmptyReserves++,null;else return this.logger.debug(`Blocking found empty group: group=${o} (reserve took ${u}ms)`),this._consecutiveEmptyReserves++,this.reserve()}catch(e){let t=Date.now()-r;if(this.logger.error(`Blocking error after ${t}ms:`,e),this.isConnectionError(e))throw this.logger.error(`Connection error detected - rethrowing`),e;return this.logger.warn(`Falling back to regular reserve due to error`),this.reserve()}finally{let e=Date.now()-r;e>1e3&&this.logger.debug(`ReserveBlocking completed in ${e}ms`)}}async reserveAtomic(e){let t=Date.now(),n=(0,l.randomUUID)(),r=await I(this.r,`reserve-atomic`,[this.ns,String(t),String(this.vt),String(e),``,n],1);if(r===`E_LIMIT`)return{status:`limit_exceeded`};if(!r)return{status:`empty`};let i=r.split(`|||`);if(i.length<12)return{status:`empty`};let[a,o,s,c,u,d,f,p,m,h,g,_]=i,v=parseInt(f,10),y=parseInt(p,10);return{status:`success`,job:{id:a,groupId:o,data:JSON.parse(s),attempts:parseInt(c,10),maxAttempts:parseInt(u,10),seq:parseInt(d,10),timestamp:v,orderMs:Number.isNaN(y)?v:y,score:parseFloat(m),deadlineAt:parseInt(h,10),isFlowParent:g===`1`,token:_}}}async getReadyGroups(e=0,t=-1){return this.r.zrange(`${this.ns}:ready`,e,t)}async getGroupOldestTimestamp(e){let t=`${this.ns}:g:${e}`,n=await this.r.zrange(t,0,0);if(!n||n.length===0)return;let r=n[0],i=await this.r.hget(`${this.ns}:job:${r}`,`timestamp`);return i?parseInt(i,10):void 0}async reserveBatch(e=16){let t=Date.now(),n=(0,l.randomUUID)(),r=await I(this.r,`reserve-batch`,[this.ns,String(t),String(this.vt),String(Math.max(1,e)),n],1),i=[];for(let e of r||[]){if(!e)continue;let t=e.split(`|||`);t.length===12&&i.push({id:t[0],groupId:t[1],data:R(t[2]),attempts:parseInt(t[3],10),maxAttempts:parseInt(t[4],10),seq:parseInt(t[5],10),timestamp:parseInt(t[6],10),orderMs:parseInt(t[7],10),score:parseFloat(t[8]),deadlineAt:parseInt(t[9],10),isFlowParent:t[10]===`1`,token:t[11]})}return i}async getActiveCount(){return I(this.r,`get-queue-metrics`,[this.ns,`active`],1)}async getWaitingCount(){return I(this.r,`get-queue-metrics`,[this.ns,`waiting`],1)}async getDelayedCount(){return I(this.r,`get-queue-metrics`,[this.ns,`delayed`],1)}async getStagedCount(){return this.r.zcard(`${this.ns}:stage`)}async getReadyGroupCount(){return this.r.zcard(`${this.ns}:ready`)}async getQueueStateSnapshot(){let[e,t,n,r,i,a]=await Promise.all([this.getActiveCount(),this.getWaitingCount(),this.getDelayedCount(),this.getStagedCount(),this.getLimitedGroupCount(),this.getReadyGroupCount()]);return{active:e,waiting:t,delayed:n,staged:r,limited:i,ready:a}}async getQueueMetrics(){let e=await I(this.r,`get-queue-metrics`,[this.ns],1);return JSON.parse(e)}async getActiveJobs(){return I(this.r,`get-jobs`,[this.ns,`active`],1)}async getWaitingJobs(){return I(this.r,`get-jobs`,[this.ns,`waiting`],1)}async getDelayedJobs(){return I(this.r,`get-jobs`,[this.ns,`delayed`],1)}async getUniqueGroups(){return I(this.r,`get-unique-groups`,[this.ns],1)}async getUniqueGroupsCount(){return I(this.r,`get-unique-groups-count`,[this.ns],1)}async getJob(e){return C.fromStore(this,e)}async setupSubscriber(){this.eventsSubscribed&&this.subscriber||(this.subscriber||(this.subscriber=this.r.duplicate(),this.subscriber.on(`message`,(e,t)=>{e===`${this.ns}:events`&&this.handleJobEvent(t)}),this.subscriber.on(`error`,e=>{this.logger.error(`Redis error (events subscriber):`,e)})),await this.subscriber.subscribe(`${this.ns}:events`),this.eventsSubscribed=!0)}handleJobEvent(e){try{let t=R(e);if(!t||typeof t.id!=`string`)return;let n=this.waitingJobs.get(t.id);if(!n||n.length===0)return;if(t.status===`completed`){let e=typeof t.result==`string`?R(t.result)??t.result:t.result;n.forEach(t=>t.resolve(e))}else if(t.status===`failed`){let e=typeof t.result==`string`?R(t.result)??{}:t.result??{},r=Error(e&&e.message||`Job failed`);e&&typeof e==`object`&&(typeof e.name==`string`&&(r.name=e.name),typeof e.stack==`string`&&(r.stack=e.stack)),n.forEach(e=>e.reject(r))}this.waitingJobs.delete(t.id)}catch(e){this.logger.error(`Failed to process job event:`,e)}}async waitUntilFinished(e,t=0){let n=await this.getJob(e),r=await n.getState();if(r===`completed`)return n.returnvalue;if(r===`failed`)throw Error(n.failedReason||`Job failed`);return await this.setupSubscriber(),new Promise((n,r)=>{let i,a,o=()=>{i&&clearTimeout(i);let t=this.waitingJobs.get(e);if(!t)return;let n=t.filter(e=>e!==a);n.length===0?this.waitingJobs.delete(e):this.waitingJobs.set(e,n)},s=e=>{o(),n(e)},c=e=>{o(),r(e)};a={resolve:s,reject:c};let l=this.waitingJobs.get(e)??[];l.push(a),this.waitingJobs.set(e,l),t>0&&(i=setTimeout(()=>{c(Error(`Timed out waiting for job ${e} to finish`))},t)),(async()=>{try{let t=await this.getJob(e),n=await t.getState();n===`completed`?s(t.returnvalue):n===`failed`&&c(Error(t.failedReason??`Job failed`))}catch{}})()})}async getJobsByStatus(e,t=0,n=-1){let r=n>=0?n-t+1:100,i=Math.min(r*2,500),a=new Map,o=[],s=async(e,t,n=!1)=>{try{let r=n?await this.r.zrevrange(e,0,i-1):await this.r.zrange(e,0,i-1);for(let e of r)a.set(e,t);o.push(...r)}catch{}},c=new Set(e);if(c.has(`active`)&&await s(`${this.ns}:processing`,`active`),c.has(`delayed`)&&await s(`${this.ns}:delayed`,`delayed`),c.has(`completed`)&&await s(`${this.ns}:completed`,`completed`,!0),c.has(`failed`)&&await s(`${this.ns}:failed`,`failed`,!0),c.has(`waiting`))try{let e=await this.r.smembers(`${this.ns}:groups`);if(e.length>0){let t=e.slice(0,Math.min(100,e.length)),n=this.r.multi(),r=Math.max(1,Math.ceil(i/t.length));for(let e of t)n.zrange(`${this.ns}:g:${e}`,0,r-1);let s=await n.exec();for(let e of s||[]){let t=e?.[1]||[];for(let e of t)a.set(e,`waiting`);o.push(...t)}}}catch{}let l=new Set,u=[];for(let e of o)l.has(e)||(l.add(e),u.push(e));let d=n>=0?u.slice(t,n+1):u.slice(t);if(d.length===0)return[];let f=this.r.multi();for(let e of d)f.hgetall(`${this.ns}:job:${e}`);let p=await f.exec(),m=[];for(let e=0;e<d.length;e++){let t=d[e],n=p?.[e]?.[1]||{};if(!n||Object.keys(n).length===0){this.logger.warn(`Skipping job ${t} - not found (likely cleaned up by retention)`);continue}let r=a.get(t),i=C.fromRawHash(this,t,n,r);m.push(i)}return m}async getJobCounts(){let[e,t,n,r,i]=await Promise.all([this.getActiveCount(),this.getWaitingCount(),this.getDelayedCount(),this.getCompletedCount(),this.getFailedCount()]);return{active:e,waiting:t,delayed:n,completed:r,failed:i,paused:0,"waiting-children":0,prioritized:0}}async getLimitedGroupCount(){return this.r.zcard(`${this.ns}:limited`)}async getLimitedGroups(){return this.r.zrange(`${this.ns}:limited`,0,-1)}async checkStalledJobs(e,t,n,r=500){try{return await I(this.r,`check-stalled`,[this.ns,String(e),String(t),String(n),String(r)],1)||[]}catch(e){return this.logger.error(`Error checking stalled jobs:`,e),[]}}async startPromoter(){if(!(this.promoterRunning||this.orderingDelayMs<=0)){this.promoterRunning=!0,this.promoterLockId=(0,l.randomUUID)();try{this.promoterRedis=this.r.duplicate();try{await this.promoterRedis.config(`SET`,`notify-keyspace-events`,`Ex`),this.logger.debug(`Enabled Redis keyspace notifications for staging promoter`)}catch(e){this.logger.warn(`Failed to enable keyspace notifications. Promoter will use polling fallback.`,e)}let e=this.promoterRedis.options.db??0,t=`${this.ns}:stage:timer`,n=`__keyevent@${e}__:expired`;await this.promoterRedis.subscribe(n,e=>{e?this.logger.error(`Failed to subscribe to keyspace events:`,e):this.logger.debug(`Subscribed to ${n}`)}),this.promoterRedis.on(`message`,async(e,r)=>{e===n&&r===t&&await this.runPromotion()}),this.promoterInterval=setInterval(async()=>{await this.runPromotion()},100),await this.runPromotion(),this.logger.debug(`Staging promoter started`)}catch(e){this.logger.error(`Failed to start promoter:`,e),this.promoterRunning=!1,await this.stopPromoter()}}}async runPromotion(){if(!this.promoterRunning)return;let e=`${this.ns}:promoter:lock`;try{if(await this.r.set(e,this.promoterLockId,`PX`,3e4,`NX`)===`OK`)try{let e=await I(this.r,`promote-staged`,[this.ns,String(Date.now()),`100`],1);e>0&&this.logger.debug(`Promoted ${e} staged jobs`)}finally{await this.r.get(e)===this.promoterLockId&&await this.r.del(e)}}catch(e){this.logger.error(`Error during promotion:`,e)}}async stopPromoter(){if(this.promoterRunning){if(this.promoterRunning=!1,this.promoterInterval&&=(clearInterval(this.promoterInterval),void 0),this.promoterRedis){try{await this.promoterRedis.unsubscribe(),await this.promoterRedis.quit()}catch{try{this.promoterRedis.disconnect()}catch{}}this.promoterRedis=void 0}this.logger.debug(`Staging promoter stopped`)}}async repairGroups(e=100){let t=`${this.ns}:groups`,n=0,r=`0`;try{do{let[i,a]=await this.r.sscan(t,r,`COUNT`,e);if(r=i,a.length===0)continue;let o=await I(this.r,`repair-groups`,[this.ns,JSON.stringify(a)],1);n+=o}while(r!==`0`);return n>0&&this.logger.info(`Maintenance: Scanned and repaired states for ${n} groups`),n}catch(e){return this.logger.error(`Failed to repair groups:`,e),0}}async close(){if(this.batchConfig&&this.batchBuffer.length>0&&(this.logger.debug(`Flushing ${this.batchBuffer.length} pending batched jobs before close`),await this.flushBatch()),await this.stopPromoter(),this.subscriber){try{await this.subscriber.unsubscribe(`${this.ns}:events`),await this.subscriber.quit()}catch{try{this.subscriber.disconnect()}catch{}}this.subscriber=void 0,this.eventsSubscribed=!1}if(this.waitingJobs.size>0){let e=Error(`Queue closed`);this.waitingJobs.forEach(t=>{t.forEach(t=>t.reject(e))}),this.waitingJobs.clear()}try{await this.r.quit()}catch{try{this.r.disconnect()}catch{}}}get pausedKey(){return`${this.ns}:paused`}async pause(){await this.r.set(this.pausedKey,`1`)}async resume(){await this.r.del(this.pausedKey)}async isPaused(){return await this.r.get(this.pausedKey)!==null}async waitForEmpty(e=6e4){let t=typeof e==`number`?{timeoutMs:e}:e,n=t.timeoutMs??6e4,r=t.intervalMs??200,i=t.ignoreDelayed?`1`:`0`,a=t.ignoreStaged?`1`:`0`,o=t.throwOnTimeout??!1,s=Date.now();for(;Date.now()-s<n;)try{if(await I(this.r,`is-empty`,[this.ns,i,a],1)===1)return await B(0),!0;await B(r)}catch(e){if(this.isConnectionError(e)){this.logger.warn(`Redis connection error in waitForEmpty, retrying...`),await B(1e3);continue}throw e}if(o)throw new _(await this.getQueueStateSnapshot(),n);return!1}async cleanupPoisonedGroup(e){if(Math.random()>.01)return`skipped`;let t=this._groupCleanupTracking.get(e)||0,n=Date.now();if(n-t<1e4)return`throttled`;if(this._groupCleanupTracking.set(e,n),this._groupCleanupTracking.size>1e3){let e=n-6e4;for(let[t,n]of this._groupCleanupTracking.entries())n<e&&this._groupCleanupTracking.delete(t)}try{let t=await I(this.r,`cleanup-poisoned-group`,[this.ns,e,String(n)],1);return t===`poisoned`?this.logger.warn(`Removed poisoned group ${e} from ready queue`):t===`empty`?this.logger.warn(`Removed empty group ${e} from ready queue`):t===`locked`&&Math.random()<.1&&this.logger.debug(`Detected group ${e} is locked by another worker (this is normal with high concurrency)`),t}catch(t){return this.logger.error(`Error cleaning up group ${e}:`,t),`error`}}maintenanceLockKey(){return`${this.ns}:maintenance:lock`}async acquireMaintenanceLock(e=3e4){try{return await this.r.set(this.maintenanceLockKey(),`1`,`PX`,e,`NX`)===`OK`}catch{return!1}}schedulerLockKey(){return`${this.ns}:sched:lock`}async acquireSchedulerLock(e=1500){try{return await this.r.set(this.schedulerLockKey(),`1`,`PX`,e,`NX`)===`OK`}catch{return!1}}async runSchedulerOnce(e=Date.now()){await this.acquireSchedulerLock(this.schedulerLockTtlMs)&&(await this.promoteDelayedJobsBounded(32,e),await this.processRepeatingJobsBounded(16,e))}async promoteDelayedJobsBounded(e=256,t=Date.now()){let n=0;for(let r=0;r<e;r++)try{let e=await I(this.r,`promote-delayed`,[this.ns,String(t),`1`],1);if(!e||e<=0)break;n+=e}catch{break}return n}async processRepeatingJobsBounded(e=128,t=Date.now()){let n=`${this.ns}:repeat:schedule`,r=0;for(let i=0;i<e;i++){let e=await this.r.zrangebyscore(n,0,t,`LIMIT`,0,1);if(!e||e.length===0)break;let i=e[0];try{let e=`${this.ns}:repeat:${i}`,a=await this.r.get(e);if(!a){await this.r.zrem(n,i);continue}let o=JSON.parse(a);if(o.removed){await this.r.zrem(n,i),await this.r.del(e);continue}await this.r.zrem(n,i);let s;s=`every`in o.repeat?t+o.repeat.every:this.getNextCronTime(o.repeat.pattern,t),o.nextRunTime=s,o.lastRunTime=t,await this.r.set(e,JSON.stringify(o)),await this.r.zadd(n,s,i),await I(this.r,`enqueue`,[this.ns,o.groupId,JSON.stringify(o.data),String(o.maxAttempts??this.defaultMaxAttempts),String(o.orderMs??t),`0`,String((0,l.randomUUID)()),String(this.keepCompleted)],1),r++}catch(e){this.logger.error(`Error processing repeating job ${i}:`,e),await this.r.zrem(n,i)}}return r}async promoteDelayedJobs(){try{return await I(this.r,`promote-delayed`,[this.ns,String(Date.now())],1)}catch(e){return this.logger.error(`Error promoting delayed jobs:`,e),0}}async changeDelay(e,t){let n=t>0?Date.now()+t:0;try{return await I(this.r,`change-delay`,[this.ns,e,String(n),String(Date.now())],1)===1}catch(t){return this.logger.error(`Error changing delay for job ${e}:`,t),!1}}async promote(e){return this.changeDelay(e,0)}async remove(e){try{return await I(this.r,`remove`,[this.ns,e],1)===1}catch(t){return this.logger.error(`Error removing job ${e}:`,t),!1}}async clean(e,t,n){let r=Date.now()-e;try{return await I(this.r,`clean-status`,[this.ns,n,String(r),String(Math.max(0,Math.min(t,1e5)))],1)??0}catch(e){return console.log(`HERE?`,e),this.logger.error(`Error cleaning ${n} jobs:`,e),0}}async updateData(e,t){let n=`${this.ns}:job:${e}`;if(!await this.r.exists(n))throw Error(`Job ${e} not found`);let r=JSON.stringify(t===void 0?null:t);await this.r.hset(n,`data`,r)}async addRepeatingJob(e){if(!e.repeat)throw Error(`Repeat options are required for repeating jobs`);let t=Date.now(),n=`${e.groupId}:${JSON.stringify(e.repeat)}:${t}:${Math.random().toString(36).slice(2)}`,r;r=`every`in e.repeat?t+e.repeat.every:this.getNextCronTime(e.repeat.pattern,t);let i={groupId:e.groupId,data:e.data===void 0?null:e.data,maxAttempts:e.maxAttempts??this.defaultMaxAttempts,orderMs:e.orderMs,repeat:e.repeat,nextRunTime:r,lastRunTime:null,removed:!1},a=`${this.ns}:repeat:${n}`;await this.r.set(a,JSON.stringify(i)),await this.r.zadd(`${this.ns}:repeat:schedule`,r,n);let o=`${this.ns}:repeat:lookup:${e.groupId}:${JSON.stringify(e.repeat)}`;await this.r.set(o,n);let s=`repeat:${n}`,c=`${this.ns}:job:${s}`;try{await this.r.hmset(c,`id`,s,`groupId`,i.groupId,`data`,JSON.stringify(i.data),`attempts`,`0`,`maxAttempts`,String(i.maxAttempts),`seq`,`0`,`timestamp`,String(Date.now()),`orderMs`,String(i.orderMs??t),`status`,`waiting`)}catch{}return C.fromStore(this,s)}getNextCronTime(e,t){try{return u.default.parseExpression(e,{currentDate:new Date(t)}).next().getTime()}catch{throw Error(`Invalid cron pattern: ${e}`)}}async removeRepeatingJob(e,t){try{let n=`${this.ns}:repeat:lookup:${e}:${JSON.stringify(t)}`,r=await this.r.get(n);if(!r)return!1;let i=`${this.ns}:repeat:${r}`,a=`${this.ns}:repeat:schedule`,o=await this.r.get(i);if(!o)return await this.r.del(n),!1;let s=JSON.parse(o);s.removed=!0,await this.r.set(i,JSON.stringify(s)),await this.r.zrem(a,r),await this.r.del(n);try{let e=`repeat:${r}`;await this.r.del(`${this.ns}:job:${e}`)}catch{}return!0}catch(e){return this.logger.error(`Error removing repeating job:`,e),!1}}};function B(e){return new Promise(t=>setTimeout(t,e))}var V=class{constructor(e){this.value=void 0,this.next=null,this.value=e}},H=class{constructor(){this.length=0,this.head=null,this.tail=null}push(e){let t=new V(e);return this.length?this.tail.next=t:this.head=t,this.tail=t,this.length+=1,t}shift(){if(!this.length)return null;let e=this.head;return this.head=this.head.next,--this.length,e}},U=class{constructor(e=!1){this.ignoreErrors=e,this.queue=new H,this.pending=new Set,this.newPromise()}add(e){this.pending.add(e),e.then(t=>{this.pending.delete(e),this.queue.length===0&&this.resolvePromise(t),this.queue.push(t)}).catch(t=>{this.pending.delete(e),this.ignoreErrors?(this.queue.length===0&&this.resolvePromise(void 0),this.queue.push(void 0)):this.rejectPromise(t)})}async waitAll(){await Promise.all(this.pending)}numTotal(){return this.pending.size+this.queue.length}numPending(){return this.pending.size}numQueued(){return this.queue.length}resolvePromise(e){this.resolve(e),this.newPromise()}rejectPromise(e){this.reject(e),this.newPromise()}newPromise(){this.nextPromise=new Promise((e,t)=>{this.resolve=e,this.reject=t})}async wait(){return this.nextPromise}async fetch(){if(!(this.pending.size===0&&this.queue.length===0)){for(;this.queue.length===0;)try{await this.wait()}catch(e){this.ignoreErrors||console.error(`Unexpected Error in AsyncFifoQueue`,e)}return this.queue.shift()?.value}}},W=class extends Error{constructor(e){super(e),this.name=`UnrecoverableError`}},G=class{constructor(){this.listeners=new Map}on(e,t){return this.listeners.has(e)||this.listeners.set(e,[]),this.listeners.get(e).push(t),this}off(e,t){let n=this.listeners.get(e);if(n){let e=n.indexOf(t);e!==-1&&n.splice(e,1)}return this}emit(e,...t){let n=this.listeners.get(e);if(n&&n.length>0){for(let r of n)try{r(...t)}catch(t){console.error(`Error in event listener for '${String(e)}':`,t)}return!0}return!1}removeAllListeners(e){return e?this.listeners.delete(e):this.listeners.clear(),this}};const K=(e,t)=>{let n=Math.min(3e4,2**(e-1)*500);return n+Math.floor(n*.25*Math.random())};var q=class extends G{constructor(e){if(super(),this.stopping=!1,this.ready=!1,this.closed=!1,this.blockingClient=null,this.jobsInProgress=new Set,this.lastJobPickupTime=Date.now(),this.totalJobsProcessed=0,this.blockingStats={totalBlockingCalls:0,consecutiveEmptyReserves:0,lastActivityTime:Date.now()},this.emptyReserveBackoffMs=0,!e.handler||typeof e.handler!=`function`)throw Error(`Worker handler must be a function`);this.opts=e,this.q=e.queue,this.name=e.name??this.q.name,this.logger=typeof e.logger==`object`?e.logger:new E(!!e.logger,this.name),this.handler=e.handler;let t=this.q.jobTimeoutMs??3e4;this.hbMs=e.heartbeatMs??Math.max(1e3,Math.floor(t/3)),this.onError=e.onError,this.maxAttempts=e.maxAttempts??this.q.maxAttemptsDefault??3,this.backoff=e.backoff??K,this.schedulerMs=e.schedulerIntervalMs??1e3,this.blockingTimeoutSec=e.blockingTimeoutSec??5,this.concurrency=Math.max(1,e.concurrency??1),this.stalledInterval=e.stalledInterval??2e3,this.maxStalledCount=e.maxStalledCount??3,this.maxJobsPerScan=e.maxJobsPerScan??Math.max(500,this.concurrency*2),this.stalledGracePeriod=e.stalledGracePeriod??0,this.maintenanceIntervalMs=e.maintenanceIntervalMs??6e4,this.setupRedisEventHandlers(),this.q.orderingDelayMs>0&&this.q.startPromoter().catch(e=>{this.logger.error(`Failed to start staging promoter:`,e)}),e.autoStart!==!1&&this.run()}get isClosed(){return this.closed}addJitter(e,t=.1){return e+Math.random()*e*t}setupRedisEventHandlers(){let e=this.q.redis;e&&(this.redisCloseHandler=()=>{this.ready=!1,this.emit(`ioredis:close`)},this.redisErrorHandler=e=>{this.emit(`error`,e)},this.redisReadyHandler=()=>{!this.ready&&!this.stopping&&(this.ready=!0,this.emit(`ready`))},e.on(`close`,this.redisCloseHandler),e.on(`error`,this.redisErrorHandler),e.on(`ready`,this.redisReadyHandler))}async run(){if(this.runLoopPromise)return this.runLoopPromise;let e=this._runLoop();return this.runLoopPromise=e,e}async _runLoop(){this.logger.info(`🚀 Worker ${this.name} starting...`);try{this.blockingClient=this.q.redis.duplicate({enableAutoPipelining:!0,maxRetriesPerRequest:null,retryStrategy:e=>Math.max(Math.min(Math.exp(e)*1e3,2e4),1e3)}),this.blockingClient.on(`error`,e=>{this.q.isConnectionError(e)?this.logger.warn(`Blocking client connection error:`,e.message):this.logger.error(`Blocking client error (non-connection):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}),this.blockingClient.on(`close`,()=>{!this.stopping&&!this.closed&&this.logger.warn(`Blocking client disconnected, will reconnect on next operation`)}),this.blockingClient.on(`reconnecting`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client reconnecting...`)}),this.blockingClient.on(`ready`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client ready`)})}catch(e){this.logger.error(`Failed to create blocking client:`,e),this.blockingClient=null}let e=this.schedulerMs;this.schedulerTimer=setInterval(async()=>{try{await this.q.runSchedulerOnce()}catch{}},this.addJitter(e));try{await this.checkStalled()}catch(e){this.logger.error(`Error in initial stalled job check:`,e)}this.startStalledChecker(),this.startMaintenance();let t=0,n=new U(!0);for(;!this.stopping||n.numTotal()>0;)try{for(;!this.stopping&&!(n.numTotal()>=this.concurrency);){this.blockingStats.totalBlockingCalls++,this.blockingStats.totalBlockingCalls>=1e9&&(this.blockingStats.totalBlockingCalls=0),this.logger.debug(`Fetching job (call #${this.blockingStats.totalBlockingCalls}, processing: ${this.jobsInProgress.size}/${this.concurrency}, queue: ${n.numTotal()} (queued: ${n.numQueued()}, pending: ${n.numPending()}), total: ${n.numTotal()}/${this.concurrency})...`);let e;if(this.opts.strategy)e=(async()=>{let e=await this.opts.strategy.acquireJob(this.q);return e||await this.delay(this.opts.strategy.idleInterval),e})();else{let r=this.concurrency-n.numTotal();if(r>0&&n.numTotal()===0){let e=Math.min(r,8),i=await this.q.reserveBatch(e);if(i.length>0){this.logger.debug(`Batch reserved ${i.length} jobs`);for(let e of i)n.add(Promise.resolve(e));t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0;continue}}let i=this.blockingStats.consecutiveEmptyReserves>=2&&n.numTotal()===0&&this.jobsInProgress.size===0,a=this.blockingTimeoutSec;e=i?this.q.reserveBlocking(a,void 0,this.blockingClient??void 0):this.q.reserve()}n.add(e);let r=await e;if(r)t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0,this.logger.debug(`Fetched job ${r.id} from group ${r.groupId}`);else{if(this.opts.strategy&&n.numTotal()===0&&this.jobsInProgress.size===0)break;this.blockingStats.consecutiveEmptyReserves++,this.blockingStats.consecutiveEmptyReserves%50==0&&this.logger.debug(`No job available (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves})`);let e=this.concurrency>=100?5:3;if(this.blockingStats.consecutiveEmptyReserves>e&&n.numTotal()===0&&this.jobsInProgress.size===0){let e=this.concurrency>=100?2e3:5e3;this.emptyReserveBackoffMs===0?this.emptyReserveBackoffMs=this.concurrency>=100?100:50:this.emptyReserveBackoffMs=Math.min(e,Math.max(100,this.emptyReserveBackoffMs*1.2)),this.blockingStats.consecutiveEmptyReserves%20==0&&this.logger.debug(`Applying backoff: ${Math.round(this.emptyReserveBackoffMs)}ms (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves}, jobs in progress: ${this.jobsInProgress.size})`),await this.delay(this.emptyReserveBackoffMs)}if(n.numTotal()===0&&this.jobsInProgress.size===0||n.numTotal()>0||this.jobsInProgress.size>0)break}}let e;do e=await n.fetch()??void 0;while(!e&&n.numQueued()>0);if(e&&typeof e==`object`&&`id`in e){this.totalJobsProcessed++,this.logger.debug(`Processing job ${e.id} from group ${e.groupId} immediately`);let t=this.processJob(e,()=>n.numTotal()<=this.concurrency,this.jobsInProgress);n.add(t)}}catch(e){if(this.stopping)return;if(this.q.isConnectionError(e))if(t++,this.logger.error(`Connection error (retry ${t}/10):`,e),t>=10)this.logger.error(`⚠️ Max connection retries (10) exceeded! Worker will continue but may be experiencing persistent Redis issues.`),this.emit(`error`,Error(`Max connection retries (10) exceeded - worker continuing with backoff`)),await this.delay(2e4),t=0;else{let e=Math.max(Math.min(Math.exp(t)*1e3,2e4),1e3);this.logger.debug(`Waiting ${Math.round(e)}ms before retry (exponential backoff)`),await this.delay(e)}else this.logger.error(`Worker loop error (non-connection, continuing):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e))),t=0,await this.delay(200);this.onError?.(e)}this.logger.info(`Stopped`)}async delay(e){return new Promise(t=>setTimeout(t,e))}async processJob(e,t,n){let r=Array.from(n).find(t=>t.job.id===e.id),i;r?(r.ts=Date.now(),i=r):(i={job:e,ts:Date.now()},n.add(i));try{let r=await this.processSingleJob(e,t);if(r&&typeof r==`object`&&`id`in r&&`groupId`in r){let e={job:r,ts:Date.now()};return n.add(e),n.delete(i),r}return r}finally{n.has(i)&&n.delete(i)}}async completeJob(e,t,n,r,i){if(n?.()){let n=await this.q.completeAndReserveNextWithMetadata(e.id,e.groupId,e.token,t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts});if(n)return this.logger.debug(`Got next job ${n.id} from same group ${n.groupId} atomically`),n;this.logger.debug(`Atomic chaining returned nil for job ${e.id} - job completed, but no next job chained`),Math.random()<.1&&await new Promise(e=>setTimeout(e,Math.random()*100))}else await this.q.completeWithMetadata({id:e.id,groupId:e.groupId,token:e.token},t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts})}startStalledChecker(){this.stalledInterval<=0||(this.stalledCheckTimer=setInterval(async()=>{try{await this.checkStalled()}catch(e){this.logger.error(`Error in stalled job checker:`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}},this.stalledInterval))}async checkStalled(){if(!(this.stopping||this.closed))try{let e=Date.now(),t=await this.q.checkStalledJobs(e,this.stalledGracePeriod,this.maxStalledCount,this.maxJobsPerScan);if(t.length>0)for(let e=0;e<t.length;e+=3){let n=t[e],r=t[e+1],i=t[e+2];i===`recovered`?(this.logger.info(`Recovered stalled job ${n} from group ${r}`),this.emit(`stalled`,n,r)):i===`failed`&&(this.logger.warn(`Failed stalled job ${n} from group ${r} (exceeded max stalled count)`),this.emit(`stalled`,n,r))}}catch(e){this.logger.error(`Error checking stalled jobs:`,e)}}startMaintenance(){if(this.maintenanceIntervalMs<=0)return;let e=Math.random()*this.maintenanceIntervalMs*.1,t=this.maintenanceIntervalMs+e;this.maintenanceTimer=setInterval(async()=>{if(!(this.stopping||this.closed))try{let e=Math.floor(this.maintenanceIntervalMs/2);await this.q.acquireMaintenanceLock(e)&&await this.q.repairGroups()}catch(e){this.logger.error(`Error in maintenance (repairGroups):`,e)}},t)}getWorkerMetrics(){let e=Date.now();return{name:this.name,totalJobsProcessed:this.totalJobsProcessed,lastJobPickupTime:this.lastJobPickupTime,timeSinceLastJob:this.lastJobPickupTime>0?e-this.lastJobPickupTime:null,blockingStats:{...this.blockingStats},isProcessing:this.jobsInProgress.size>0,jobsInProgressCount:this.jobsInProgress.size,jobsInProgress:Array.from(this.jobsInProgress).map(t=>({jobId:t.job.id,groupId:t.job.groupId,processingTimeMs:e-t.ts}))}}async close(e=3e4){this.stopping=!0,await this.delay(100),this.schedulerTimer&&clearInterval(this.schedulerTimer),this.stalledCheckTimer&&clearInterval(this.stalledCheckTimer),this.maintenanceTimer&&clearInterval(this.maintenanceTimer);let t=Date.now();for(;this.jobsInProgress.size>0&&Date.now()-t<e;)await Y(100);if(this.blockingClient){try{this.jobsInProgress.size>0&&e>0?(this.logger.debug(`Gracefully closing blocking client (quit)...`),await this.blockingClient.quit()):(this.logger.debug(`Force closing blocking client (disconnect)...`),this.blockingClient.disconnect())}catch(e){this.logger.debug(`Error closing blocking client:`,e)}this.blockingClient=null}if(this.runLoopPromise){let t=this.jobsInProgress.size>0?e:2e3,n=new Promise(e=>{setTimeout(e,t)});try{await Promise.race([this.runLoopPromise,n])}catch(e){this.logger.warn(`Error while waiting for run loop to exit:`,e)}}if(this.jobsInProgress.size>0){this.logger.warn(`Worker stopped with ${this.jobsInProgress.size} jobs still processing after ${e}ms timeout.`);let t=Date.now();for(let e of this.jobsInProgress)this.emit(`graceful-timeout`,C.fromReserved(this.q,e.job,{processedOn:e.ts,finishedOn:t,status:`active`}))}this.jobsInProgress.clear(),this.ready=!1,this.closed=!0;try{let e=this.q.redis;e&&(this.redisCloseHandler&&e.off?.(`close`,this.redisCloseHandler),this.redisErrorHandler&&e.off?.(`error`,this.redisErrorHandler),this.redisReadyHandler&&e.off?.(`ready`,this.redisReadyHandler))}catch{}this.emit(`closed`)}getCurrentJob(){if(this.jobsInProgress.size===0)return null;let e=Array.from(this.jobsInProgress)[0],t=Date.now();return{job:C.fromReserved(this.q,e.job,{processedOn:e.ts,status:`active`}),processingTimeMs:t-e.ts}}getCurrentJobs(){let e=Date.now();return Array.from(this.jobsInProgress).map(t=>({job:C.fromReserved(this.q,t.job,{processedOn:t.ts,status:`active`}),processingTimeMs:e-t.ts}))}isProcessing(){return this.jobsInProgress.size>0}async add(e){return this.q.add(e)}async processSingleJob(e,t){let n=Date.now(),r,i,a=()=>{let t=this.q.jobTimeoutMs||3e4,n=Math.min(this.hbMs,Math.floor(t/3),1e4);this.logger.debug(`Starting heartbeat for job ${e.id} (interval: ${n}ms, concurrency: ${this.concurrency})`),r=setInterval(async()=>{try{await this.q.heartbeat({id:e.id,groupId:e.groupId,token:e.token})===0&&(this.logger.warn(`Heartbeat failed for job ${e.id} - job may have been removed or completed elsewhere`),r&&clearInterval(r))}catch(t){let n=this.q.isConnectionError(t);(!n||!this.stopping)&&this.logger.error(`Heartbeat error for job ${e.id}:`,t instanceof Error?t.message:String(t)),this.onError?.(t,C.fromReserved(this.q,e,{status:`active`})),(!n||!this.stopping)&&this.emit(`error`,t instanceof Error?t:Error(String(t)))}},n)};try{let o=this.q.jobTimeoutMs||3e4,s=Math.min(o*.1,2e3);i=setTimeout(()=>{a()},s);let c=C.fromReserved(this.q,e,{processedOn:n,status:`active`}),l=await this.handler(c);i&&clearTimeout(i),r&&clearInterval(r);let u=Date.now(),d=await this.completeJob(e,l,t,n,u);return this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0,this.emit(`completed`,C.fromReserved(this.q,e,{processedOn:n,finishedOn:u,returnvalue:l,status:`completed`})),d}catch(t){i&&clearTimeout(i),r&&clearInterval(r),await this.handleJobFailure(t,e,n)}}async handleJobFailure(e,t,n){let r=e instanceof Error?e:Error(String(e)),i=t.attempts+1;this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0;let a=e instanceof W,o=i>=this.maxAttempts,s=`failed`,c,l=Date.now();if(a||o){s=`failed`;let c=o?t.maxAttempts:i;await this.deadLetterJob(e,t,n,l,c),a&&this.logger.info(`Unrecoverable error for job ${t.id}: ${r.message}. Skipping retries.`)}else{let r=this.backoff(i,e);s=r>0?`delayed`:`waiting`,c=r;let a=await this.q.retry({id:t.id,token:t.token},r);if(a===-1)s=`failed`,c=void 0,await this.deadLetterJob(e,t,n,l,t.maxAttempts);else if(a===-2){this.logger.warn(`Lock lost for job ${t.id}: cannot retry as another worker has taken over`);return}else await this.recordFailureAttempt(e,t,n,l,i)}let u=C.fromReserved(this.q,t,{processedOn:n,status:`active`});this.onError?.(e,u),this.emit(`failed`,C.fromReserved(this.q,t,{processedOn:n,finishedOn:l,failedReason:r.message,stacktrace:r.stack,status:s,delayMs:c}));try{this.emit(`error`,r)}catch{}}async deadLetterJob(e,t,n,r,i){this.logger.info(`Dead lettering job ${t.id} from group ${t.groupId} (attempts: ${i}/${t.maxAttempts})`);let a=e instanceof Error?e:Error(String(e));try{await this.q.recordFinalFailure({id:t.id,groupId:t.groupId,token:t.token},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts,data:t.data})}catch(e){this.logger.warn(`Failed to record final failure`,e)}await this.q.deadLetter(t.id,t.groupId,t.token)}async recordFailureAttempt(e,t,n,r,i){let a=e instanceof Error?e:Error(String(e));try{await this.q.recordAttemptFailure({id:t.id,groupId:t.groupId},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts})}catch(e){this.logger.warn(`Failed to record attempt failure`,e)}}};const J=q;function Y(e){return new Promise(t=>setTimeout(t,e))}const X=`
55
+ `,1,r,e,String(n))},getConcurrency:async e=>(await this.groups.getConfig(e)).concurrency??1},this._groupCleanupTracking=new Map,this.r=e.redis,this.rawNs=e.namespace,this.name=e.namespace,this.ns=`groupmq:${this.rawNs}`;let t=e.jobTimeoutMs??5e3;this.vt=Math.max(1,t),this.defaultMaxAttempts=e.maxAttempts??3,this.scanLimit=e.reserveScanLimit??20,this.keepCompleted=Math.max(0,e.keepCompleted??0),this.keepFailed=Math.max(0,e.keepFailed??0),this.schedulerLockTtlMs=e.schedulerLockTtlMs??1500,this.orderingDelayMs=e.orderingDelayMs??0,e.autoBatch&&(this.batchConfig=typeof e.autoBatch==`boolean`?{size:10,maxWaitMs:10}:{size:e.autoBatch.size??10,maxWaitMs:e.autoBatch.maxWaitMs??10}),this.logger=typeof e.logger==`object`?e.logger:new E(!!e.logger,this.namespace),this.r.on(`error`,e=>{this.logger.error(`Redis error (main):`,e)})}get redis(){return this.r}get namespace(){return this.ns}get rawNamespace(){return this.rawNs}get jobTimeoutMs(){return this.vt}get maxAttemptsDefault(){return this.defaultMaxAttempts}async add(e){let t=e.maxAttempts??this.defaultMaxAttempts,n=e.orderMs??Date.now(),r=Date.now(),i=e.jobId??(0,l.v7)();if(e.repeat)return this.addRepeatingJob({...e,orderMs:n,maxAttempts:t});let a;if(e.delay!==void 0&&e.delay>0)a=e.delay;else if(e.runAt!==void 0){let t=e.runAt instanceof Date?e.runAt.getTime():e.runAt;a=Math.max(0,t-r)}let o=e.data===void 0?null:e.data;return this.batchConfig?new Promise((r,s)=>{this.batchBuffer.push({groupId:e.groupId,data:o,jobId:i,maxAttempts:t,delayMs:a,orderMs:n,resolve:r,reject:s}),this.batchBuffer.length>=this.batchConfig.size?this.flushBatch():this.batchTimer||=setTimeout(()=>this.flushBatch(),this.batchConfig.maxWaitMs)}):this.addSingle({...e,data:o,jobId:i,maxAttempts:t,orderMs:n,delayMs:a,groupConfig:e.groupConfig})}async addFlow(e){let t=e.parent.jobId??(0,l.v7)(),n=e.parent.maxAttempts??this.defaultMaxAttempts,r=e.parent.orderMs??Date.now(),i=JSON.stringify(e.parent.data===void 0?null:e.parent.data),a=e.parent.groupConfig?JSON.stringify(e.parent.groupConfig):``,o=[],s=[];for(let t of e.children){let e=t.jobId??(0,l.v7)(),n=t.maxAttempts??this.defaultMaxAttempts,r=t.orderMs??Date.now(),i=t.delay??0,a=JSON.stringify(t.data===void 0?null:t.data),c=t.groupConfig?JSON.stringify(t.groupConfig):``;o.push(e),s.push(e,t.groupId,a,n.toString(),r.toString(),i.toString(),c)}let c=Date.now();return await I(this.r,`enqueue-flow`,[this.ns,t,e.parent.groupId,i,n.toString(),r.toString(),c.toString(),a,...s],1),new C({queue:this,id:t,groupId:e.parent.groupId,data:e.parent.data,status:`waiting-children`,attemptsMade:0,opts:{attempts:n},timestamp:c,orderMs:r,isFlowParent:!0})}async getFlowRemainingCount(e){let t=await this.r.hget(`${this.ns}:job:${e}`,`flowRemaining`);return t===null?null:parseInt(t,10)}async getFlowDependenciesCount(e){let t=await this.getFlowChildrenIds(e);if(t.length===0)return{processed:0,unprocessed:0,failed:0};let n=await this.r.hgetall(`${this.ns}:flow:results:${e}`),r=0,i=0;for(let e of Object.values(n))try{let t=JSON.parse(e);t.status===`completed`?r++:t.status===`failed`&&i++}catch{}let a=t.length-r-i;return{processed:r,unprocessed:Math.max(0,a),failed:i}}async getFlowResults(e){let t=await this.r.hgetall(`${this.ns}:flow:results:${e}`),n=[];for(let[e,r]of Object.entries(t))try{let t=JSON.parse(r),i=t.data;if(typeof t.data==`string`)try{i=JSON.parse(t.data)}catch{}n.push({jobId:e,status:t.status,result:i})}catch(t){this.logger.error(`Failed to parse flow result for child ${e}`,t),n.push({jobId:e,status:`failed`,result:{error:`Corrupted result data`}})}return n}async getFlowChildrenCount(e){return this.r.scard(`${this.ns}:flow:children:${e}`)}async getFlowChildrenIds(e){return this.r.smembers(`${this.ns}:flow:children:${e}`)}async getFlowChildren(e){let t=await this.getFlowChildrenIds(e);if(t.length===0)return[];let n=this.r.multi();for(let e of t)n.hgetall(`${this.ns}:job:${e}`);let r=await n.exec(),i=[];for(let e=0;e<t.length;e++){let n=t[e],a=r?.[e]?.[1]||{};if(!a||Object.keys(a).length===0){this.logger.warn(`Skipping child job ${n} - not found (likely cleaned up)`);continue}let o=C.fromRawHash(this,n,a);i.push(o)}return i}async addSingle(e){let t=Date.now(),n=0;e.delayMs!==void 0&&e.delayMs>0&&(n=t+e.delayMs);let r=JSON.stringify(e.data),i=e.groupConfig?JSON.stringify(e.groupConfig):``,a=await I(this.r,`enqueue`,[this.ns,e.groupId,r,String(e.maxAttempts),String(e.orderMs),String(n),String(e.jobId),String(this.keepCompleted),String(t),String(this.orderingDelayMs),i],1);if(Array.isArray(a)){let[e,t,n,r,i,o,s,c,l]=a;return C.fromRawHash(this,e,{id:e,groupId:t,data:n,attempts:r,maxAttempts:i,timestamp:o,orderMs:s,delayUntil:c,status:l},l)}return this.getJob(a)}async flushBatch(){if(this.batchTimer&&=(clearTimeout(this.batchTimer),void 0),this.batchBuffer.length===0||this.flushing)return;this.flushing=!0;let e=this.batchBuffer.splice(0);try{this.logger.debug(`Flushing batch of ${e.length} jobs`);let t=Date.now(),n=e.map(e=>({jobId:e.jobId,groupId:e.groupId,data:JSON.stringify(e.data),maxAttempts:e.maxAttempts,orderMs:e.orderMs,delayMs:e.delayMs})),r=await I(this.r,`enqueue-batch`,[this.ns,JSON.stringify(n),String(this.keepCompleted),String(t),String(this.orderingDelayMs)],1);for(let t=0;t<e.length;t++){let n=e[t],i=r[t];try{if(i&&i.length>=9){let[e,t,r,a,o,s,c,l,u]=i,d=C.fromRawHash(this,e,{id:e,groupId:t,data:r,attempts:a,maxAttempts:o,timestamp:s,orderMs:c,delayUntil:l,status:u},u);n.resolve(d)}else throw Error(`Invalid job data returned from batch enqueue`)}catch(e){n.reject(e instanceof Error?e:Error(String(e)))}}}catch(t){for(let n of e)n.reject(t instanceof Error?t:Error(String(t)))}finally{this.flushing=!1,this.batchBuffer.length>0&&setImmediate(()=>this.flushBatch())}}async reserve(){let e=Date.now(),t=(0,l.v7)(),n=await I(this.r,`reserve`,[this.ns,String(e),String(this.vt),String(this.scanLimit),t],1);if(!n)return null;let r=n.split(`|||`);if(r.length!==12)return null;let i;try{i=JSON.parse(r[2])}catch(e){this.logger.warn(`Failed to parse job data: ${e.message}, raw: ${r[2]}`),i=null}let a=Number.parseInt(r[7],10);return{id:r[0],groupId:r[1],data:i,attempts:Number.parseInt(r[3],10),maxAttempts:Number.parseInt(r[4],10),seq:Number.parseInt(r[5],10),timestamp:Number.parseInt(r[6],10),orderMs:Number.isNaN(a)?Number.parseInt(r[6],10):a,score:Number(r[8]),deadlineAt:Number.parseInt(r[9],10),isFlowParent:r[10]===`1`,token:r[11]}}async getGroupJobCount(e){let t=`${this.ns}:g:${e}`;return await this.r.zcard(t)}async complete(e){await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`completed`,String(Date.now()),JSON.stringify(null),String(this.keepCompleted),String(this.keepFailed),String(Date.now()),String(Date.now()),`0`,`0`,e.token||``],1)}async completeWithMetadata(e,t,n){if(!e.token){this.logger.warn(`completeWithMetadata: Missing token for job ${e.id}`);return}await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`completed`,String(n.finishedOn),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(n.processedOn),String(n.finishedOn),String(n.attempts),String(n.maxAttempts),e.token],1)}async completeAndReserveNextWithMetadata(e,t,n,r,i){let a=Date.now(),o=(0,l.v7)();try{let s=await I(this.r,`complete-and-reserve-next-with-metadata`,[this.ns,e,t,`completed`,String(i.finishedOn),JSON.stringify(r??null),String(this.keepCompleted),String(this.keepFailed),String(i.processedOn),String(i.finishedOn),String(i.attempts),String(i.maxAttempts),String(a),String(this.jobTimeoutMs),n,o],1);if(!s)return null;let c=s.split(`|||`);if(c.length!==12)return this.logger.error(`Queue completeAndReserveNextWithMetadata: unexpected result format:`,s),null;let[l,,u,d,f,p,m,h,g,_,v,y]=c;return{id:l,groupId:t,data:JSON.parse(u),attempts:parseInt(d,10),maxAttempts:parseInt(f,10),seq:parseInt(p,10),timestamp:parseInt(m,10),orderMs:parseInt(h,10),score:parseFloat(g),deadlineAt:parseInt(_,10),isFlowParent:v===`1`,token:y}}catch(e){return this.logger.error(`Queue completeAndReserveNextWithMetadata error:`,e),null}}async isJobProcessing(e){return await this.r.zscore(`${this.ns}:processing`,e)!==null}async retry(e,t=0){return e.token?I(this.r,`retry`,[this.ns,e.id,String(t),e.token],1):(this.logger.warn(`retry called without token for job ${e.id}`),0)}async deadLetter(e,t,n){return I(this.r,`dead-letter`,[this.ns,e,t,n||``],1)}async recordCompleted(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts;try{await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`completed`,String(i),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o),``],1)}catch(t){throw this.logger.error(`Error recording completion for job ${e.id}:`,t),t}}async recordAttemptFailure(e,t,n){let r=`${this.ns}:job:${e.id}`,i=n.processedOn??Date.now(),a=n.finishedOn??Date.now(),o=typeof t==`string`?t:t.message??`Error`,s=typeof t==`string`?`Error`:t.name??`Error`,c=typeof t==`string`?``:t.stack??``;await this.r.hset(r,`lastErrorMessage`,o,`lastErrorName`,s,`lastErrorStack`,c,`processedOn`,String(i),`finishedOn`,String(a))}async recordFinalFailure(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts,s=typeof t==`string`?t:t.message??`Error`,c=typeof t==`string`?`Error`:t.name??`Error`,l=typeof t==`string`?``:t.stack??``,u=JSON.stringify({message:s,name:c,stack:l});try{await I(this.r,`complete-job`,[this.ns,e.id,e.groupId,`failed`,String(i),u,String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o),e.token||``],1)}catch(t){throw this.logger.error(`Error recording final failure for job ${e.id}:`,t),t}}async getCompleted(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`returnvalue`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?R(r):null,returnvalue:a?R(a):null,processedOn:o?parseInt(o,10):void 0,finishedOn:s?parseInt(s,10):void 0,attempts:c?parseInt(c,10):0,maxAttempts:l?parseInt(l,10):this.defaultMaxAttempts}})}async getFailed(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`failedReason`,`stacktrace`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l,u]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?R(r):null,failedReason:a||``,stacktrace:o||void 0,processedOn:s?parseInt(s,10):void 0,finishedOn:c?parseInt(c,10):void 0,attempts:l?parseInt(l,10):0,maxAttempts:u?parseInt(u,10):this.defaultMaxAttempts}})}async getCompletedJobs(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping completed job ${t} - not found (likely cleaned up)`);continue}let o=C.fromRawHash(this,t,r,`completed`);a.push(o)}return a}async getFailedJobs(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping failed job ${t} - not found (likely cleaned up)`);continue}let o=C.fromRawHash(this,t,r,`failed`);a.push(o)}return a}async getCompletedCount(){return this.r.zcard(`${this.ns}:completed`)}async getFailedCount(){return this.r.zcard(`${this.ns}:failed`)}async heartbeat(e,t=this.vt){return e.token?I(this.r,`heartbeat`,[this.ns,e.id,e.groupId,String(t),e.token],1):0}getBlockTimeout(e,t){let n=.001;if(t){let e=t-Date.now();return e<=0||e<n*1e3?n:Math.min(e/1e3,5)}return Math.max(n,Math.min(e,5))}isConnectionError(e){if(!e)return!1;let t=`${e.message||``}`;return t.includes(`Connection is closed`)||t.includes(`ECONNREFUSED`)||t.includes(`ETIMEDOUT`)}async reserveBlocking(e=5,t,n){let r=Date.now();if(await this.isPaused())return await B(50),null;if(!(this._consecutiveEmptyReserves>=3)){let e=await this.reserve();if(e)return this.logger.debug(`Immediate reserve successful (${Date.now()-r}ms)`),this._consecutiveEmptyReserves=0,e}let i=this.getBlockTimeout(e,t);this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Starting blocking operation (timeout: ${i}s, consecutive empty: ${this._consecutiveEmptyReserves})`);let a=L(this.ns,`ready`);try{let e=Date.now(),t=await(n??this.r).bzpopmin(a,i),r=Date.now()-e;if(!t||t.length<3)return this.logger.debug(`Blocking timeout/empty (took ${r}ms)`),this._consecutiveEmptyReserves+=1,null;let[,o,s]=t;this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Blocking result: group=${o}, score=${s} (took ${r}ms)`);let c=Date.now(),l=await this.reserveAtomic(o),u=Date.now()-c;if(l.status===`success`){let e=l.job;return this.logger.debug(`Successful job reserve after blocking: ${e.id} from group ${e.groupId} (reserve took ${u}ms)`),this._consecutiveEmptyReserves=0,e}else if(l.status===`limit_exceeded`)return this.logger.debug(`Blocking found group at capacity: group=${o} (reserve took ${u}ms)`),this._consecutiveEmptyReserves++,null;else return this.logger.debug(`Blocking found empty group: group=${o} (reserve took ${u}ms)`),this._consecutiveEmptyReserves++,this.reserve()}catch(e){let t=Date.now()-r;if(this.logger.error(`Blocking error after ${t}ms:`,e),this.isConnectionError(e))throw this.logger.error(`Connection error detected - rethrowing`),e;return this.logger.warn(`Falling back to regular reserve due to error`),this.reserve()}finally{let e=Date.now()-r;e>1e3&&this.logger.debug(`ReserveBlocking completed in ${e}ms`)}}async reserveAtomic(e){let t=Date.now(),n=(0,l.v7)(),r=await I(this.r,`reserve-atomic`,[this.ns,String(t),String(this.vt),String(e),``,n],1);if(r===`E_LIMIT`)return{status:`limit_exceeded`};if(!r)return{status:`empty`};let i=r.split(`|||`);if(i.length<12)return{status:`empty`};let[a,o,s,c,u,d,f,p,m,h,g,_]=i,v=parseInt(f,10),y=parseInt(p,10);return{status:`success`,job:{id:a,groupId:o,data:JSON.parse(s),attempts:parseInt(c,10),maxAttempts:parseInt(u,10),seq:parseInt(d,10),timestamp:v,orderMs:Number.isNaN(y)?v:y,score:parseFloat(m),deadlineAt:parseInt(h,10),isFlowParent:g===`1`,token:_}}}async getReadyGroups(e=0,t=-1){return this.r.zrange(`${this.ns}:ready`,e,t)}async getGroupOldestTimestamp(e){let t=`${this.ns}:g:${e}`,n=await this.r.zrange(t,0,0);if(!n||n.length===0)return;let r=n[0],i=await this.r.hget(`${this.ns}:job:${r}`,`timestamp`);return i?parseInt(i,10):void 0}async reserveBatch(e=16){let t=Date.now(),n=(0,l.v7)(),r=await I(this.r,`reserve-batch`,[this.ns,String(t),String(this.vt),String(Math.max(1,e)),n],1),i=[];for(let e of r||[]){if(!e)continue;let t=e.split(`|||`);t.length===12&&i.push({id:t[0],groupId:t[1],data:R(t[2]),attempts:parseInt(t[3],10),maxAttempts:parseInt(t[4],10),seq:parseInt(t[5],10),timestamp:parseInt(t[6],10),orderMs:parseInt(t[7],10),score:parseFloat(t[8]),deadlineAt:parseInt(t[9],10),isFlowParent:t[10]===`1`,token:t[11]})}return i}async getActiveCount(){return I(this.r,`get-queue-metrics`,[this.ns,`active`],1)}async getWaitingCount(){return I(this.r,`get-queue-metrics`,[this.ns,`waiting`],1)}async getDelayedCount(){return I(this.r,`get-queue-metrics`,[this.ns,`delayed`],1)}async getStagedCount(){return this.r.zcard(`${this.ns}:stage`)}async getReadyGroupCount(){return this.r.zcard(`${this.ns}:ready`)}async getQueueStateSnapshot(){let[e,t,n,r,i,a]=await Promise.all([this.getActiveCount(),this.getWaitingCount(),this.getDelayedCount(),this.getStagedCount(),this.getLimitedGroupCount(),this.getReadyGroupCount()]);return{active:e,waiting:t,delayed:n,staged:r,limited:i,ready:a}}async getQueueMetrics(){let e=await I(this.r,`get-queue-metrics`,[this.ns],1);return JSON.parse(e)}async getActiveJobs(){return I(this.r,`get-jobs`,[this.ns,`active`],1)}async getWaitingJobs(){return I(this.r,`get-jobs`,[this.ns,`waiting`],1)}async getDelayedJobs(){return I(this.r,`get-jobs`,[this.ns,`delayed`],1)}async getUniqueGroups(){return I(this.r,`get-unique-groups`,[this.ns],1)}async getUniqueGroupsCount(){return I(this.r,`get-unique-groups-count`,[this.ns],1)}async getJob(e){return C.fromStore(this,e)}async setupSubscriber(){this.eventsSubscribed&&this.subscriber||(this.subscriber||(this.subscriber=this.r.duplicate(),this.subscriber.on(`message`,(e,t)=>{e===`${this.ns}:events`&&this.handleJobEvent(t)}),this.subscriber.on(`error`,e=>{this.logger.error(`Redis error (events subscriber):`,e)})),await this.subscriber.subscribe(`${this.ns}:events`),this.eventsSubscribed=!0)}handleJobEvent(e){try{let t=R(e);if(!t||typeof t.id!=`string`)return;let n=this.waitingJobs.get(t.id);if(!n||n.length===0)return;if(t.status===`completed`){let e=typeof t.result==`string`?R(t.result)??t.result:t.result;n.forEach(t=>t.resolve(e))}else if(t.status===`failed`){let e=typeof t.result==`string`?R(t.result)??{}:t.result??{},r=Error(e&&e.message||`Job failed`);e&&typeof e==`object`&&(typeof e.name==`string`&&(r.name=e.name),typeof e.stack==`string`&&(r.stack=e.stack)),n.forEach(e=>e.reject(r))}this.waitingJobs.delete(t.id)}catch(e){this.logger.error(`Failed to process job event:`,e)}}async waitUntilFinished(e,t=0){let n=await this.getJob(e),r=await n.getState();if(r===`completed`)return n.returnvalue;if(r===`failed`)throw Error(n.failedReason||`Job failed`);return await this.setupSubscriber(),new Promise((n,r)=>{let i,a,o=()=>{i&&clearTimeout(i);let t=this.waitingJobs.get(e);if(!t)return;let n=t.filter(e=>e!==a);n.length===0?this.waitingJobs.delete(e):this.waitingJobs.set(e,n)},s=e=>{o(),n(e)},c=e=>{o(),r(e)};a={resolve:s,reject:c};let l=this.waitingJobs.get(e)??[];l.push(a),this.waitingJobs.set(e,l),t>0&&(i=setTimeout(()=>{c(Error(`Timed out waiting for job ${e} to finish`))},t)),(async()=>{try{let t=await this.getJob(e),n=await t.getState();n===`completed`?s(t.returnvalue):n===`failed`&&c(Error(t.failedReason??`Job failed`))}catch{}})()})}async getJobsByStatus(e,t=0,n=-1){let r=n>=0?n-t+1:100,i=Math.min(r*2,500),a=new Map,o=[],s=async(e,t,n=!1)=>{try{let r=n?await this.r.zrevrange(e,0,i-1):await this.r.zrange(e,0,i-1);for(let e of r)a.set(e,t);o.push(...r)}catch{}},c=new Set(e);if(c.has(`active`)&&await s(`${this.ns}:processing`,`active`),c.has(`delayed`)&&await s(`${this.ns}:delayed`,`delayed`),c.has(`completed`)&&await s(`${this.ns}:completed`,`completed`,!0),c.has(`failed`)&&await s(`${this.ns}:failed`,`failed`,!0),c.has(`waiting`))try{let e=await this.r.smembers(`${this.ns}:groups`);if(e.length>0){let t=e.slice(0,Math.min(100,e.length)),n=this.r.multi(),r=Math.max(1,Math.ceil(i/t.length));for(let e of t)n.zrange(`${this.ns}:g:${e}`,0,r-1);let s=await n.exec();for(let e of s||[]){let t=e?.[1]||[];for(let e of t)a.set(e,`waiting`);o.push(...t)}}}catch{}let l=new Set,u=[];for(let e of o)l.has(e)||(l.add(e),u.push(e));let d=n>=0?u.slice(t,n+1):u.slice(t);if(d.length===0)return[];let f=this.r.multi();for(let e of d)f.hgetall(`${this.ns}:job:${e}`);let p=await f.exec(),m=[];for(let e=0;e<d.length;e++){let t=d[e],n=p?.[e]?.[1]||{};if(!n||Object.keys(n).length===0){this.logger.warn(`Skipping job ${t} - not found (likely cleaned up by retention)`);continue}let r=a.get(t),i=C.fromRawHash(this,t,n,r);m.push(i)}return m}async getJobCounts(){let[e,t,n,r,i]=await Promise.all([this.getActiveCount(),this.getWaitingCount(),this.getDelayedCount(),this.getCompletedCount(),this.getFailedCount()]);return{active:e,waiting:t,delayed:n,completed:r,failed:i,paused:0,"waiting-children":0,prioritized:0}}async getLimitedGroupCount(){return this.r.zcard(`${this.ns}:limited`)}async getLimitedGroups(){return this.r.zrange(`${this.ns}:limited`,0,-1)}async checkStalledJobs(e,t,n,r=500){try{return await I(this.r,`check-stalled`,[this.ns,String(e),String(t),String(n),String(r)],1)||[]}catch(e){return this.logger.error(`Error checking stalled jobs:`,e),[]}}async startPromoter(){if(!(this.promoterRunning||this.orderingDelayMs<=0)){this.promoterRunning=!0,this.promoterLockId=(0,l.v7)();try{this.promoterRedis=this.r.duplicate();try{await this.promoterRedis.config(`SET`,`notify-keyspace-events`,`Ex`),this.logger.debug(`Enabled Redis keyspace notifications for staging promoter`)}catch(e){this.logger.warn(`Failed to enable keyspace notifications. Promoter will use polling fallback.`,e)}let e=this.promoterRedis.options.db??0,t=`${this.ns}:stage:timer`,n=`__keyevent@${e}__:expired`;await this.promoterRedis.subscribe(n,e=>{e?this.logger.error(`Failed to subscribe to keyspace events:`,e):this.logger.debug(`Subscribed to ${n}`)}),this.promoterRedis.on(`message`,async(e,r)=>{e===n&&r===t&&await this.runPromotion()}),this.promoterInterval=setInterval(async()=>{await this.runPromotion()},100),await this.runPromotion(),this.logger.debug(`Staging promoter started`)}catch(e){this.logger.error(`Failed to start promoter:`,e),this.promoterRunning=!1,await this.stopPromoter()}}}async runPromotion(){if(!this.promoterRunning)return;let e=`${this.ns}:promoter:lock`;try{if(await this.r.set(e,this.promoterLockId,`PX`,3e4,`NX`)===`OK`)try{let e=await I(this.r,`promote-staged`,[this.ns,String(Date.now()),`100`],1);e>0&&this.logger.debug(`Promoted ${e} staged jobs`)}finally{await this.r.get(e)===this.promoterLockId&&await this.r.del(e)}}catch(e){this.logger.error(`Error during promotion:`,e)}}async stopPromoter(){if(this.promoterRunning){if(this.promoterRunning=!1,this.promoterInterval&&=(clearInterval(this.promoterInterval),void 0),this.promoterRedis){try{await this.promoterRedis.unsubscribe(),await this.promoterRedis.quit()}catch{try{this.promoterRedis.disconnect()}catch{}}this.promoterRedis=void 0}this.logger.debug(`Staging promoter stopped`)}}async repairGroups(e=100){let t=`${this.ns}:groups`,n=0,r=`0`;try{do{let[i,a]=await this.r.sscan(t,r,`COUNT`,e);if(r=i,a.length===0)continue;let o=await I(this.r,`repair-groups`,[this.ns,JSON.stringify(a)],1);n+=o}while(r!==`0`);return n>0&&this.logger.info(`Maintenance: Scanned and repaired states for ${n} groups`),n}catch(e){return this.logger.error(`Failed to repair groups:`,e),0}}async close(){if(this.batchConfig&&this.batchBuffer.length>0&&(this.logger.debug(`Flushing ${this.batchBuffer.length} pending batched jobs before close`),await this.flushBatch()),await this.stopPromoter(),this.subscriber){try{await this.subscriber.unsubscribe(`${this.ns}:events`),await this.subscriber.quit()}catch{try{this.subscriber.disconnect()}catch{}}this.subscriber=void 0,this.eventsSubscribed=!1}if(this.waitingJobs.size>0){let e=Error(`Queue closed`);this.waitingJobs.forEach(t=>{t.forEach(t=>t.reject(e))}),this.waitingJobs.clear()}try{await this.r.quit()}catch{try{this.r.disconnect()}catch{}}}get pausedKey(){return`${this.ns}:paused`}async pause(){await this.r.set(this.pausedKey,`1`)}async resume(){await this.r.del(this.pausedKey)}async isPaused(){return await this.r.get(this.pausedKey)!==null}async waitForEmpty(e=6e4){let t=typeof e==`number`?{timeoutMs:e}:e,n=t.timeoutMs??6e4,r=t.intervalMs??200,i=t.ignoreDelayed?`1`:`0`,a=t.ignoreStaged?`1`:`0`,o=t.throwOnTimeout??!1,s=Date.now();for(;Date.now()-s<n;)try{if(await I(this.r,`is-empty`,[this.ns,i,a],1)===1)return await B(0),!0;await B(r)}catch(e){if(this.isConnectionError(e)){this.logger.warn(`Redis connection error in waitForEmpty, retrying...`),await B(1e3);continue}throw e}if(o)throw new _(await this.getQueueStateSnapshot(),n);return!1}async cleanupPoisonedGroup(e){if(Math.random()>.01)return`skipped`;let t=this._groupCleanupTracking.get(e)||0,n=Date.now();if(n-t<1e4)return`throttled`;if(this._groupCleanupTracking.set(e,n),this._groupCleanupTracking.size>1e3){let e=n-6e4;for(let[t,n]of this._groupCleanupTracking.entries())n<e&&this._groupCleanupTracking.delete(t)}try{let t=await I(this.r,`cleanup-poisoned-group`,[this.ns,e,String(n)],1);return t===`poisoned`?this.logger.warn(`Removed poisoned group ${e} from ready queue`):t===`empty`?this.logger.warn(`Removed empty group ${e} from ready queue`):t===`locked`&&Math.random()<.1&&this.logger.debug(`Detected group ${e} is locked by another worker (this is normal with high concurrency)`),t}catch(t){return this.logger.error(`Error cleaning up group ${e}:`,t),`error`}}maintenanceLockKey(){return`${this.ns}:maintenance:lock`}async acquireMaintenanceLock(e=3e4){try{return await this.r.set(this.maintenanceLockKey(),`1`,`PX`,e,`NX`)===`OK`}catch{return!1}}schedulerLockKey(){return`${this.ns}:sched:lock`}async acquireSchedulerLock(e=1500){try{return await this.r.set(this.schedulerLockKey(),`1`,`PX`,e,`NX`)===`OK`}catch{return!1}}async runSchedulerOnce(e=Date.now()){await this.acquireSchedulerLock(this.schedulerLockTtlMs)&&(await this.promoteDelayedJobsBounded(32,e),await this.processRepeatingJobsBounded(16,e))}async promoteDelayedJobsBounded(e=256,t=Date.now()){let n=0;for(let r=0;r<e;r++)try{let e=await I(this.r,`promote-delayed`,[this.ns,String(t),`1`],1);if(!e||e<=0)break;n+=e}catch{break}return n}async processRepeatingJobsBounded(e=128,t=Date.now()){let n=`${this.ns}:repeat:schedule`,r=0;for(let i=0;i<e;i++){let e=await this.r.zrangebyscore(n,0,t,`LIMIT`,0,1);if(!e||e.length===0)break;let i=e[0];try{let e=`${this.ns}:repeat:${i}`,a=await this.r.get(e);if(!a){await this.r.zrem(n,i);continue}let o=JSON.parse(a);if(o.removed){await this.r.zrem(n,i),await this.r.del(e);continue}await this.r.zrem(n,i);let s;s=`every`in o.repeat?t+o.repeat.every:this.getNextCronTime(o.repeat.pattern,t),o.nextRunTime=s,o.lastRunTime=t,await this.r.set(e,JSON.stringify(o)),await this.r.zadd(n,s,i),await I(this.r,`enqueue`,[this.ns,o.groupId,JSON.stringify(o.data),String(o.maxAttempts??this.defaultMaxAttempts),String(o.orderMs??t),`0`,String((0,l.v7)()),String(this.keepCompleted)],1),r++}catch(e){this.logger.error(`Error processing repeating job ${i}:`,e),await this.r.zrem(n,i)}}return r}async promoteDelayedJobs(){try{return await I(this.r,`promote-delayed`,[this.ns,String(Date.now())],1)}catch(e){return this.logger.error(`Error promoting delayed jobs:`,e),0}}async changeDelay(e,t){let n=t>0?Date.now()+t:0;try{return await I(this.r,`change-delay`,[this.ns,e,String(n),String(Date.now())],1)===1}catch(t){return this.logger.error(`Error changing delay for job ${e}:`,t),!1}}async promote(e){return this.changeDelay(e,0)}async remove(e){try{return await I(this.r,`remove`,[this.ns,e],1)===1}catch(t){return this.logger.error(`Error removing job ${e}:`,t),!1}}async clean(e,t,n){let r=Date.now()-e;try{return await I(this.r,`clean-status`,[this.ns,n,String(r),String(Math.max(0,Math.min(t,1e5)))],1)??0}catch(e){return console.log(`HERE?`,e),this.logger.error(`Error cleaning ${n} jobs:`,e),0}}async updateData(e,t){let n=`${this.ns}:job:${e}`;if(!await this.r.exists(n))throw Error(`Job ${e} not found`);let r=JSON.stringify(t===void 0?null:t);await this.r.hset(n,`data`,r)}async addRepeatingJob(e){if(!e.repeat)throw Error(`Repeat options are required for repeating jobs`);let t=Date.now(),n=`${e.groupId}:${JSON.stringify(e.repeat)}:${t}:${Math.random().toString(36).slice(2)}`,r;r=`every`in e.repeat?t+e.repeat.every:this.getNextCronTime(e.repeat.pattern,t);let i={groupId:e.groupId,data:e.data===void 0?null:e.data,maxAttempts:e.maxAttempts??this.defaultMaxAttempts,orderMs:e.orderMs,repeat:e.repeat,nextRunTime:r,lastRunTime:null,removed:!1},a=`${this.ns}:repeat:${n}`;await this.r.set(a,JSON.stringify(i)),await this.r.zadd(`${this.ns}:repeat:schedule`,r,n);let o=`${this.ns}:repeat:lookup:${e.groupId}:${JSON.stringify(e.repeat)}`;await this.r.set(o,n);let s=`repeat:${n}`,c=`${this.ns}:job:${s}`;try{await this.r.hmset(c,`id`,s,`groupId`,i.groupId,`data`,JSON.stringify(i.data),`attempts`,`0`,`maxAttempts`,String(i.maxAttempts),`seq`,`0`,`timestamp`,String(Date.now()),`orderMs`,String(i.orderMs??t),`status`,`waiting`)}catch{}return C.fromStore(this,s)}getNextCronTime(e,t){try{return u.default.parseExpression(e,{currentDate:new Date(t)}).next().getTime()}catch{throw Error(`Invalid cron pattern: ${e}`)}}async removeRepeatingJob(e,t){try{let n=`${this.ns}:repeat:lookup:${e}:${JSON.stringify(t)}`,r=await this.r.get(n);if(!r)return!1;let i=`${this.ns}:repeat:${r}`,a=`${this.ns}:repeat:schedule`,o=await this.r.get(i);if(!o)return await this.r.del(n),!1;let s=JSON.parse(o);s.removed=!0,await this.r.set(i,JSON.stringify(s)),await this.r.zrem(a,r),await this.r.del(n);try{let e=`repeat:${r}`;await this.r.del(`${this.ns}:job:${e}`)}catch{}return!0}catch(e){return this.logger.error(`Error removing repeating job:`,e),!1}}};function B(e){return new Promise(t=>setTimeout(t,e))}var V=class{constructor(e){this.value=void 0,this.next=null,this.value=e}},H=class{constructor(){this.length=0,this.head=null,this.tail=null}push(e){let t=new V(e);return this.length?this.tail.next=t:this.head=t,this.tail=t,this.length+=1,t}shift(){if(!this.length)return null;let e=this.head;return this.head=this.head.next,--this.length,e}},U=class{constructor(e=!1){this.ignoreErrors=e,this.queue=new H,this.pending=new Set,this.newPromise()}add(e){this.pending.add(e),e.then(t=>{this.pending.delete(e),this.queue.length===0&&this.resolvePromise(t),this.queue.push(t)}).catch(t=>{this.pending.delete(e),this.ignoreErrors?(this.queue.length===0&&this.resolvePromise(void 0),this.queue.push(void 0)):this.rejectPromise(t)})}async waitAll(){await Promise.all(this.pending)}numTotal(){return this.pending.size+this.queue.length}numPending(){return this.pending.size}numQueued(){return this.queue.length}resolvePromise(e){this.resolve(e),this.newPromise()}rejectPromise(e){this.reject(e),this.newPromise()}newPromise(){this.nextPromise=new Promise((e,t)=>{this.resolve=e,this.reject=t})}async wait(){return this.nextPromise}async fetch(){if(!(this.pending.size===0&&this.queue.length===0)){for(;this.queue.length===0;)try{await this.wait()}catch(e){this.ignoreErrors||console.error(`Unexpected Error in AsyncFifoQueue`,e)}return this.queue.shift()?.value}}},W=class extends Error{constructor(e){super(e),this.name=`UnrecoverableError`}},G=class{constructor(){this.listeners=new Map}on(e,t){return this.listeners.has(e)||this.listeners.set(e,[]),this.listeners.get(e).push(t),this}off(e,t){let n=this.listeners.get(e);if(n){let e=n.indexOf(t);e!==-1&&n.splice(e,1)}return this}emit(e,...t){let n=this.listeners.get(e);if(n&&n.length>0){for(let r of n)try{r(...t)}catch(t){console.error(`Error in event listener for '${String(e)}':`,t)}return!0}return!1}removeAllListeners(e){return e?this.listeners.delete(e):this.listeners.clear(),this}};const K=(e,t)=>{let n=Math.min(3e4,2**(e-1)*500);return n+Math.floor(n*.25*Math.random())};var q=class extends G{constructor(e){if(super(),this.stopping=!1,this.ready=!1,this.closed=!1,this.blockingClient=null,this.jobsInProgress=new Set,this.lastJobPickupTime=Date.now(),this.totalJobsProcessed=0,this.blockingStats={totalBlockingCalls:0,consecutiveEmptyReserves:0,lastActivityTime:Date.now()},this.emptyReserveBackoffMs=0,!e.handler||typeof e.handler!=`function`)throw Error(`Worker handler must be a function`);this.opts=e,this.q=e.queue,this.name=e.name??this.q.name,this.logger=typeof e.logger==`object`?e.logger:new E(!!e.logger,this.name),this.handler=e.handler;let t=this.q.jobTimeoutMs??3e4;this.hbMs=e.heartbeatMs??Math.max(1e3,Math.floor(t/3)),this.onError=e.onError,this.maxAttempts=e.maxAttempts??this.q.maxAttemptsDefault??3,this.backoff=e.backoff??K,this.schedulerMs=e.schedulerIntervalMs??1e3,this.blockingTimeoutSec=e.blockingTimeoutSec??5,this.concurrency=Math.max(1,e.concurrency??1),this.stalledInterval=e.stalledInterval??2e3,this.maxStalledCount=e.maxStalledCount??3,this.maxJobsPerScan=e.maxJobsPerScan??Math.max(500,this.concurrency*2),this.stalledGracePeriod=e.stalledGracePeriod??0,this.maintenanceIntervalMs=e.maintenanceIntervalMs??6e4,this.setupRedisEventHandlers(),this.q.orderingDelayMs>0&&this.q.startPromoter().catch(e=>{this.logger.error(`Failed to start staging promoter:`,e)}),e.autoStart!==!1&&this.run()}get isClosed(){return this.closed}addJitter(e,t=.1){return e+Math.random()*e*t}setupRedisEventHandlers(){let e=this.q.redis;e&&(this.redisCloseHandler=()=>{this.ready=!1,this.emit(`ioredis:close`)},this.redisErrorHandler=e=>{this.emit(`error`,e)},this.redisReadyHandler=()=>{!this.ready&&!this.stopping&&(this.ready=!0,this.emit(`ready`))},e.on(`close`,this.redisCloseHandler),e.on(`error`,this.redisErrorHandler),e.on(`ready`,this.redisReadyHandler))}async run(){if(this.runLoopPromise)return this.runLoopPromise;let e=this._runLoop();return this.runLoopPromise=e,e}async _runLoop(){this.logger.info(`🚀 Worker ${this.name} starting...`);try{this.blockingClient=this.q.redis.duplicate({enableAutoPipelining:!0,maxRetriesPerRequest:null,retryStrategy:e=>Math.max(Math.min(Math.exp(e)*1e3,2e4),1e3)}),this.blockingClient.on(`error`,e=>{this.q.isConnectionError(e)?this.logger.warn(`Blocking client connection error:`,e.message):this.logger.error(`Blocking client error (non-connection):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}),this.blockingClient.on(`close`,()=>{!this.stopping&&!this.closed&&this.logger.warn(`Blocking client disconnected, will reconnect on next operation`)}),this.blockingClient.on(`reconnecting`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client reconnecting...`)}),this.blockingClient.on(`ready`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client ready`)})}catch(e){this.logger.error(`Failed to create blocking client:`,e),this.blockingClient=null}let e=this.schedulerMs;this.schedulerTimer=setInterval(async()=>{try{await this.q.runSchedulerOnce()}catch{}},this.addJitter(e));try{await this.checkStalled()}catch(e){this.logger.error(`Error in initial stalled job check:`,e)}this.startStalledChecker(),this.startMaintenance();let t=0,n=new U(!0);for(;!this.stopping||n.numTotal()>0;)try{for(;!this.stopping&&!(n.numTotal()>=this.concurrency);){this.blockingStats.totalBlockingCalls++,this.blockingStats.totalBlockingCalls>=1e9&&(this.blockingStats.totalBlockingCalls=0),this.logger.debug(`Fetching job (call #${this.blockingStats.totalBlockingCalls}, processing: ${this.jobsInProgress.size}/${this.concurrency}, queue: ${n.numTotal()} (queued: ${n.numQueued()}, pending: ${n.numPending()}), total: ${n.numTotal()}/${this.concurrency})...`);let e;if(this.opts.strategy)e=(async()=>{let e=await this.opts.strategy.acquireJob(this.q);return e||await this.delay(this.opts.strategy.idleInterval),e})();else{let r=this.concurrency-n.numTotal();if(r>0&&n.numTotal()===0){let e=Math.min(r,8),i=await this.q.reserveBatch(e);if(i.length>0){this.logger.debug(`Batch reserved ${i.length} jobs`);for(let e of i)n.add(Promise.resolve(e));t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0;continue}}let i=this.blockingStats.consecutiveEmptyReserves>=2&&n.numTotal()===0&&this.jobsInProgress.size===0,a=this.blockingTimeoutSec;e=i?this.q.reserveBlocking(a,void 0,this.blockingClient??void 0):this.q.reserve()}n.add(e);let r=await e;if(r)t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0,this.logger.debug(`Fetched job ${r.id} from group ${r.groupId}`);else{if(this.opts.strategy&&n.numTotal()===0&&this.jobsInProgress.size===0)break;this.blockingStats.consecutiveEmptyReserves++,this.blockingStats.consecutiveEmptyReserves%50==0&&this.logger.debug(`No job available (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves})`);let e=this.concurrency>=100?5:3;if(this.blockingStats.consecutiveEmptyReserves>e&&n.numTotal()===0&&this.jobsInProgress.size===0){let e=this.concurrency>=100?2e3:5e3;this.emptyReserveBackoffMs===0?this.emptyReserveBackoffMs=this.concurrency>=100?100:50:this.emptyReserveBackoffMs=Math.min(e,Math.max(100,this.emptyReserveBackoffMs*1.2)),this.blockingStats.consecutiveEmptyReserves%20==0&&this.logger.debug(`Applying backoff: ${Math.round(this.emptyReserveBackoffMs)}ms (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves}, jobs in progress: ${this.jobsInProgress.size})`),await this.delay(this.emptyReserveBackoffMs)}if(n.numTotal()===0&&this.jobsInProgress.size===0||n.numTotal()>0||this.jobsInProgress.size>0)break}}let e;do e=await n.fetch()??void 0;while(!e&&n.numQueued()>0);if(e&&typeof e==`object`&&`id`in e){this.totalJobsProcessed++,this.logger.debug(`Processing job ${e.id} from group ${e.groupId} immediately`);let t=this.processJob(e,()=>n.numTotal()<=this.concurrency,this.jobsInProgress);n.add(t)}}catch(e){if(this.stopping)return;if(this.q.isConnectionError(e))if(t++,this.logger.error(`Connection error (retry ${t}/10):`,e),t>=10)this.logger.error(`⚠️ Max connection retries (10) exceeded! Worker will continue but may be experiencing persistent Redis issues.`),this.emit(`error`,Error(`Max connection retries (10) exceeded - worker continuing with backoff`)),await this.delay(2e4),t=0;else{let e=Math.max(Math.min(Math.exp(t)*1e3,2e4),1e3);this.logger.debug(`Waiting ${Math.round(e)}ms before retry (exponential backoff)`),await this.delay(e)}else this.logger.error(`Worker loop error (non-connection, continuing):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e))),t=0,await this.delay(200);this.onError?.(e)}this.logger.info(`Stopped`)}async delay(e){return new Promise(t=>setTimeout(t,e))}async processJob(e,t,n){let r=Array.from(n).find(t=>t.job.id===e.id),i;r?(r.ts=Date.now(),i=r):(i={job:e,ts:Date.now()},n.add(i));try{let r=await this.processSingleJob(e,t);if(r&&typeof r==`object`&&`id`in r&&`groupId`in r){let e={job:r,ts:Date.now()};return n.add(e),n.delete(i),r}return r}finally{n.has(i)&&n.delete(i)}}async completeJob(e,t,n,r,i){if(n?.()){let n=await this.q.completeAndReserveNextWithMetadata(e.id,e.groupId,e.token,t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts});if(n)return this.logger.debug(`Got next job ${n.id} from same group ${n.groupId} atomically`),n;this.logger.debug(`Atomic chaining returned nil for job ${e.id} - job completed, but no next job chained`),Math.random()<.1&&await new Promise(e=>setTimeout(e,Math.random()*100))}else await this.q.completeWithMetadata({id:e.id,groupId:e.groupId,token:e.token},t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts})}startStalledChecker(){this.stalledInterval<=0||(this.stalledCheckTimer=setInterval(async()=>{try{await this.checkStalled()}catch(e){this.logger.error(`Error in stalled job checker:`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}},this.stalledInterval))}async checkStalled(){if(!(this.stopping||this.closed))try{let e=Date.now(),t=await this.q.checkStalledJobs(e,this.stalledGracePeriod,this.maxStalledCount,this.maxJobsPerScan);if(t.length>0)for(let e=0;e<t.length;e+=3){let n=t[e],r=t[e+1],i=t[e+2];i===`recovered`?(this.logger.info(`Recovered stalled job ${n} from group ${r}`),this.emit(`stalled`,n,r)):i===`failed`&&(this.logger.warn(`Failed stalled job ${n} from group ${r} (exceeded max stalled count)`),this.emit(`stalled`,n,r))}}catch(e){this.logger.error(`Error checking stalled jobs:`,e)}}startMaintenance(){if(this.maintenanceIntervalMs<=0)return;let e=Math.random()*this.maintenanceIntervalMs*.1,t=this.maintenanceIntervalMs+e;this.maintenanceTimer=setInterval(async()=>{if(!(this.stopping||this.closed))try{let e=Math.floor(this.maintenanceIntervalMs/2);await this.q.acquireMaintenanceLock(e)&&await this.q.repairGroups()}catch(e){this.logger.error(`Error in maintenance (repairGroups):`,e)}},t)}getWorkerMetrics(){let e=Date.now();return{name:this.name,totalJobsProcessed:this.totalJobsProcessed,lastJobPickupTime:this.lastJobPickupTime,timeSinceLastJob:this.lastJobPickupTime>0?e-this.lastJobPickupTime:null,blockingStats:{...this.blockingStats},isProcessing:this.jobsInProgress.size>0,jobsInProgressCount:this.jobsInProgress.size,jobsInProgress:Array.from(this.jobsInProgress).map(t=>({jobId:t.job.id,groupId:t.job.groupId,processingTimeMs:e-t.ts}))}}async close(e=3e4){this.stopping=!0,await this.delay(100),this.schedulerTimer&&clearInterval(this.schedulerTimer),this.stalledCheckTimer&&clearInterval(this.stalledCheckTimer),this.maintenanceTimer&&clearInterval(this.maintenanceTimer);let t=Date.now();for(;this.jobsInProgress.size>0&&Date.now()-t<e;)await Y(100);if(this.blockingClient){try{this.jobsInProgress.size>0&&e>0?(this.logger.debug(`Gracefully closing blocking client (quit)...`),await this.blockingClient.quit()):(this.logger.debug(`Force closing blocking client (disconnect)...`),this.blockingClient.disconnect())}catch(e){this.logger.debug(`Error closing blocking client:`,e)}this.blockingClient=null}if(this.runLoopPromise){let t=this.jobsInProgress.size>0?e:2e3,n=new Promise(e=>{setTimeout(e,t)});try{await Promise.race([this.runLoopPromise,n])}catch(e){this.logger.warn(`Error while waiting for run loop to exit:`,e)}}if(this.jobsInProgress.size>0){this.logger.warn(`Worker stopped with ${this.jobsInProgress.size} jobs still processing after ${e}ms timeout.`);let t=Date.now();for(let e of this.jobsInProgress)this.emit(`graceful-timeout`,C.fromReserved(this.q,e.job,{processedOn:e.ts,finishedOn:t,status:`active`}))}this.jobsInProgress.clear(),this.ready=!1,this.closed=!0;try{let e=this.q.redis;e&&(this.redisCloseHandler&&e.off?.(`close`,this.redisCloseHandler),this.redisErrorHandler&&e.off?.(`error`,this.redisErrorHandler),this.redisReadyHandler&&e.off?.(`ready`,this.redisReadyHandler))}catch{}this.emit(`closed`)}getCurrentJob(){if(this.jobsInProgress.size===0)return null;let e=Array.from(this.jobsInProgress)[0],t=Date.now();return{job:C.fromReserved(this.q,e.job,{processedOn:e.ts,status:`active`}),processingTimeMs:t-e.ts}}getCurrentJobs(){let e=Date.now();return Array.from(this.jobsInProgress).map(t=>({job:C.fromReserved(this.q,t.job,{processedOn:t.ts,status:`active`}),processingTimeMs:e-t.ts}))}isProcessing(){return this.jobsInProgress.size>0}async add(e){return this.q.add(e)}async processSingleJob(e,t){let n=Date.now(),r,i,a=()=>{let t=this.q.jobTimeoutMs||3e4,n=Math.min(this.hbMs,Math.floor(t/3),1e4);this.logger.debug(`Starting heartbeat for job ${e.id} (interval: ${n}ms, concurrency: ${this.concurrency})`),r=setInterval(async()=>{try{await this.q.heartbeat({id:e.id,groupId:e.groupId,token:e.token})===0&&(this.logger.warn(`Heartbeat failed for job ${e.id} - job may have been removed or completed elsewhere`),r&&clearInterval(r))}catch(t){let n=this.q.isConnectionError(t);(!n||!this.stopping)&&this.logger.error(`Heartbeat error for job ${e.id}:`,t instanceof Error?t.message:String(t)),this.onError?.(t,C.fromReserved(this.q,e,{status:`active`})),(!n||!this.stopping)&&this.emit(`error`,t instanceof Error?t:Error(String(t)))}},n)};try{let o=this.q.jobTimeoutMs||3e4,s=Math.min(o*.1,2e3);i=setTimeout(()=>{a()},s);let c=C.fromReserved(this.q,e,{processedOn:n,status:`active`}),l=await this.handler(c);i&&clearTimeout(i),r&&clearInterval(r);let u=Date.now(),d=await this.completeJob(e,l,t,n,u);return this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0,this.emit(`completed`,C.fromReserved(this.q,e,{processedOn:n,finishedOn:u,returnvalue:l,status:`completed`})),d}catch(t){i&&clearTimeout(i),r&&clearInterval(r),await this.handleJobFailure(t,e,n)}}async handleJobFailure(e,t,n){let r=e instanceof Error?e:Error(String(e)),i=t.attempts+1;this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0;let a=e instanceof W,o=i>=this.maxAttempts,s=`failed`,c,l=Date.now();if(a||o){s=`failed`;let c=o?t.maxAttempts:i;await this.deadLetterJob(e,t,n,l,c),a&&this.logger.info(`Unrecoverable error for job ${t.id}: ${r.message}. Skipping retries.`)}else{let r=this.backoff(i,e);s=r>0?`delayed`:`waiting`,c=r;let a=await this.q.retry({id:t.id,token:t.token},r);if(a===-1)s=`failed`,c=void 0,await this.deadLetterJob(e,t,n,l,t.maxAttempts);else if(a===-2){this.logger.warn(`Lock lost for job ${t.id}: cannot retry as another worker has taken over`);return}else await this.recordFailureAttempt(e,t,n,l,i)}let u=C.fromReserved(this.q,t,{processedOn:n,status:`active`});this.onError?.(e,u),this.emit(`failed`,C.fromReserved(this.q,t,{processedOn:n,finishedOn:l,failedReason:r.message,stacktrace:r.stack,status:s,delayMs:c}));try{this.emit(`error`,r)}catch{}}async deadLetterJob(e,t,n,r,i){this.logger.info(`Dead lettering job ${t.id} from group ${t.groupId} (attempts: ${i}/${t.maxAttempts})`);let a=e instanceof Error?e:Error(String(e));try{await this.q.recordFinalFailure({id:t.id,groupId:t.groupId,token:t.token},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts,data:t.data})}catch(e){this.logger.warn(`Failed to record final failure`,e)}await this.q.deadLetter(t.id,t.groupId,t.token)}async recordFailureAttempt(e,t,n,r,i){let a=e instanceof Error?e:Error(String(e));try{await this.q.recordAttemptFailure({id:t.id,groupId:t.groupId},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts})}catch(e){this.logger.warn(`Failed to record attempt failure`,e)}}};const J=q;function Y(e){return new Promise(t=>setTimeout(t,e))}const X=`
56
56
  local ns = KEYS[1]
57
57
  local limit = tonumber(ARGV[1]) or 100
58
58
  local defaultPriority = tonumber(ARGV[2]) or 1