groupmq-plus 1.2.2 → 1.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -100,7 +100,10 @@ declare class Job<T = any> {
100
100
  * Get the return values of all child jobs in a flow.
101
101
  * @returns Object mapping child job IDs to their return values
102
102
  */
103
- getChildrenValues(): Promise<Record<string, any>>;
103
+ getChildrenValues(): Promise<{
104
+ jobId: string;
105
+ result: any;
106
+ }[]>;
104
107
  /**
105
108
  * Get the number of remaining child jobs that haven't completed yet.
106
109
  * @returns Number of remaining dependencies, or null if not a parent job
@@ -556,9 +559,12 @@ declare class Queue<T = any> {
556
559
  /**
557
560
  * Gets the results of all child jobs in a flow.
558
561
  * @param parentId The ID of the parent job
559
- * @returns An object mapping child job IDs to their results
562
+ * @returns An array of objects containing child job IDs and their results
560
563
  */
561
- getFlowResults(parentId: string): Promise<Record<string, any>>;
564
+ getFlowResults(parentId: string): Promise<Array<{
565
+ jobId: string;
566
+ result: any;
567
+ }>>;
562
568
  /**
563
569
  * Gets all child job IDs for a parent job in a flow.
564
570
  * @param parentId The ID of the parent job
package/dist/index.d.ts CHANGED
@@ -100,7 +100,10 @@ declare class Job<T = any> {
100
100
  * Get the return values of all child jobs in a flow.
101
101
  * @returns Object mapping child job IDs to their return values
102
102
  */
103
- getChildrenValues(): Promise<Record<string, any>>;
103
+ getChildrenValues(): Promise<{
104
+ jobId: string;
105
+ result: any;
106
+ }[]>;
104
107
  /**
105
108
  * Get the number of remaining child jobs that haven't completed yet.
106
109
  * @returns Number of remaining dependencies, or null if not a parent job
@@ -556,9 +559,12 @@ declare class Queue<T = any> {
556
559
  /**
557
560
  * Gets the results of all child jobs in a flow.
558
561
  * @param parentId The ID of the parent job
559
- * @returns An object mapping child job IDs to their results
562
+ * @returns An array of objects containing child job IDs and their results
560
563
  */
561
- getFlowResults(parentId: string): Promise<Record<string, any>>;
564
+ getFlowResults(parentId: string): Promise<Array<{
565
+ jobId: string;
566
+ result: any;
567
+ }>>;
562
568
  /**
563
569
  * Gets all child job IDs for a parent job in a flow.
564
570
  * @param parentId The ID of the parent job
package/dist/index.js CHANGED
@@ -1,2 +1,2 @@
1
- var e=Object.create,t=Object.defineProperty,n=Object.getOwnPropertyDescriptor,r=Object.getOwnPropertyNames,i=Object.getPrototypeOf,a=Object.prototype.hasOwnProperty,o=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),s=(e,i,o,s)=>{if(i&&typeof i==`object`||typeof i==`function`)for(var c=r(i),l=0,u=c.length,d;l<u;l++)d=c[l],!a.call(e,d)&&d!==o&&t(e,d,{get:(e=>i[e]).bind(null,d),enumerable:!(s=n(i,d))||s.enumerable});return e},c=(n,r,a)=>(a=n==null?{}:e(i(n)),s(r||!n||!n.__esModule?t(a,`default`,{value:n,enumerable:!0}):a,n));let l=require(`node:crypto`);l=c(l);let u=require(`cron-parser`);u=c(u);let d=require(`node:fs`);d=c(d);let f=require(`node:path`);f=c(f);let p=require(`node:url`);p=c(p);var m=o((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.BaseAdapter=void 0,e.BaseAdapter=class{constructor(e,t={}){this.formatters=new Map,this._visibilityGuard=()=>!0,this.readOnlyMode=t.readOnlyMode===!0,this.allowRetries=this.readOnlyMode?!1:t.allowRetries!==!1,this.allowCompletedRetries=this.allowRetries&&t.allowCompletedRetries!==!1,this.prefix=t.prefix||``,this.delimiter=t.delimiter||``,this.description=t.description||``,this.displayName=t.displayName||``,this.type=e,this.externalJobUrl=t.externalJobUrl}getDescription(){return this.description}getDisplayName(){return this.displayName}setFormatter(e,t){this.formatters.set(e,t)}format(e,t,n=t){let r=this.formatters.get(e);return typeof r==`function`?r(t):n}setVisibilityGuard(e){this._visibilityGuard=e}isVisible(e){return this._visibilityGuard(e)}}})),h=c(m()),g=class extends h.BaseAdapter{constructor(e,t={}){let n=e.namespace;super(n,t),this.queue=e,this.options=t}getDescription(){return this.options.description||``}getDisplayName(){return this.options.displayName||``}getName(){return`${this.options.prefix||``}${this.options.delimiter||``}${this.queue.rawNamespace}`.replace(/(^[\s:]+)|([\s:]+$)/g,``)}async getRedisInfo(){return this.queue.redis.info()}async getJob(e){return await this.queue.getJob(e)}async getJobs(e,t,n){return await this.queue.getJobsByStatus(e,t,n)}async getJobCounts(){let e=await this.queue.getJobCounts();return{latest:0,active:e.active,waiting:e.waiting,"waiting-children":e[`waiting-children`],prioritized:e.prioritized,completed:e.completed,failed:e.failed,delayed:e.delayed,paused:e.paused}}async getJobLogs(e){return[]}getStatuses(){return[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}getJobStatuses(){return[`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}assertWritable(){if(this.options.readOnlyMode)throw Error(`This adapter is in read-only mode. Mutations are disabled.`)}async clean(e,t){this.assertWritable(),!(e!==`completed`&&e!==`failed`&&e!==`delayed`)&&await this.queue.clean(t,2**53-1,e)}async addJob(e,t,n){return this.assertWritable(),await this.queue.add({groupId:n.groupId??Math.random().toString(36).substring(2,15),data:t,...n})}async isPaused(){return this.queue.isPaused()}async pause(){this.assertWritable(),await this.queue.pause()}async resume(){this.assertWritable(),await this.queue.resume()}async empty(){throw this.assertWritable(),Error(`Not implemented`)}async promoteAll(){throw this.assertWritable(),Error(`Not implemented`)}};async function _(e,t=6e4){return e.waitForEmpty(t)}function v(e){let t=e.map((e,t)=>{let n=e.getCurrentJob();return{index:t,isProcessing:e.isProcessing(),currentJob:n?{jobId:n.job.id,groupId:n.job.groupId,processingTimeMs:n.processingTimeMs}:void 0}}),n=t.filter(e=>e.isProcessing).length,r=t.length-n;return{total:e.length,processing:n,idle:r,workers:t}}var y=class e{constructor(e){this.queue=e.queue,this.id=e.id,this.name=e.name??`groupmq`,this.data=e.data,this.groupId=e.groupId,this.attemptsMade=e.attemptsMade,this.opts=e.opts,this.processedOn=e.processedOn,this.finishedOn=e.finishedOn,this.failedReason=e.failedReason,this.stacktrace=e.stacktrace,this.returnvalue=e.returnvalue,this.timestamp=e.timestamp,this.orderMs=e.orderMs,this.status=e.status??`unknown`,this.parentId=e.parentId}async getState(){return this.status??`unknown`}toJSON(){return{id:this.id,name:this.name,data:this.data,groupId:this.groupId,attemptsMade:this.attemptsMade,opts:this.opts,processedOn:this.processedOn,finishedOn:this.finishedOn,failedReason:this.failedReason,stacktrace:this.stacktrace?[this.stacktrace]:null,returnvalue:this.returnvalue,timestamp:this.timestamp,orderMs:this.orderMs,status:this.status,progress:0}}changeDelay(e){return this.queue.changeDelay(this.id,e)}async promote(){await this.queue.promote(this.id)}async remove(){await this.queue.remove(this.id)}async retry(e){await this.queue.retry(this.id)}async updateData(e){await this.queue.updateData(this.id,e)}async update(e){await this.updateData(e)}async waitUntilFinished(e=0){return this.queue.waitUntilFinished(this.id,e)}async getChildren(){return this.queue.getFlowChildren(this.id)}async getChildrenValues(){return this.queue.getFlowResults(this.id)}async getDependenciesCount(){return this.queue.getFlowDependencies(this.id)}async getParent(){if(this.parentId)try{return await this.queue.getJob(this.parentId)}catch{return}}static fromReserved(t,n,r){return new e({queue:t,id:n.id,name:`groupmq`,data:n.data,groupId:n.groupId,attemptsMade:n.attempts,opts:{attempts:n.maxAttempts,delay:r?.delayMs},processedOn:r?.processedOn,finishedOn:r?.finishedOn,failedReason:r?.failedReason,stacktrace:r?.stacktrace,returnvalue:r?.returnvalue,timestamp:n.timestamp?n.timestamp:Date.now(),orderMs:n.orderMs,status:x(r?.status)})}static fromRawHash(t,n,r,i){let a=r.groupId??``,o=r.data?b(r.data):null,s=r.attempts?parseInt(r.attempts,10):0,c=r.maxAttempts?parseInt(r.maxAttempts,10):t.maxAttemptsDefault,l=r.timestamp?parseInt(r.timestamp,10):0,u=r.orderMs?parseInt(r.orderMs,10):void 0,d=r.delayUntil?parseInt(r.delayUntil,10):0,f=r.processedOn?parseInt(r.processedOn,10):void 0,p=r.finishedOn?parseInt(r.finishedOn,10):void 0,m=(r.failedReason??r.lastErrorMessage)||void 0,h=(r.stacktrace??r.lastErrorStack)||void 0,g=r.returnvalue?b(r.returnvalue):void 0,_=r.parentId||void 0;return new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:i??x(r.status),parentId:_})}static async fromStore(t,n){let r=`${t.namespace}:job:${n}`,i=await t.redis.hgetall(r);if(!i||Object.keys(i).length===0)throw Error(`Job ${n} not found`);let a=i.groupId??``,o=i.data?b(i.data):null,s=i.attempts?parseInt(i.attempts,10):0,c=i.maxAttempts?parseInt(i.maxAttempts,10):t.maxAttemptsDefault,l=i.timestamp?parseInt(i.timestamp,10):0,u=i.orderMs?parseInt(i.orderMs,10):void 0,d=i.delayUntil?parseInt(i.delayUntil,10):0,f=i.processedOn?parseInt(i.processedOn,10):void 0,p=i.finishedOn?parseInt(i.finishedOn,10):void 0,m=(i.failedReason??i.lastErrorMessage)||void 0,h=(i.stacktrace??i.lastErrorStack)||void 0,g=i.returnvalue?b(i.returnvalue):void 0,_=i.parentId||void 0,[v,y]=await Promise.all([t.redis.zscore(`${t.namespace}:processing`,n),t.redis.zscore(`${t.namespace}:delayed`,n)]),S=i.status;return v===null?y===null?a&&await t.redis.zscore(`${t.namespace}:g:${a}`,n)!==null&&(S=`waiting`):S=`delayed`:S=`active`,new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:x(S),parentId:_})}};function b(e){try{return JSON.parse(e)}catch{return null}}function x(e){return e&&[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`].includes(e)?e:`unknown`}var S=class{constructor(e,t){this.enabled=e,this.name=t}debug(...e){this.enabled&&console.debug(`[${this.name}]`,...e)}info(...e){this.enabled&&console.log(`[${this.name}]`,...e)}warn(...e){this.enabled&&console.warn(`⚠️ [${this.name}]`,...e)}error(...e){this.enabled&&console.error(`💥 [${this.name}]`,...e)}};const C=new WeakMap;function w(e){let t=f.default.dirname((0,p.fileURLToPath)(require(`url`).pathToFileURL(__filename).href)),n=[f.default.join(t,`${e}.lua`),f.default.join(t,`lua`,`${e}.lua`)];for(let e of n)if(d.default.existsSync(e))return e;return n[0]}async function T(e,t){let n=C.get(e);n||(n=new Map,C.set(e,n));let r=n.get(t);if(r)return r;let i=w(t),a=d.default.readFileSync(i,`utf8`),o=await e.script(`load`,a);return n.set(t,o),o}async function E(e,t,n,r){let i=await T(e,t);return e.evalsha(i,r,...n)}function D(e,...t){return[e,...t].join(`:`)}function O(e){try{return JSON.parse(e)}catch{return null}}var k=class{constructor(e){this._consecutiveEmptyReserves=0,this.eventsSubscribed=!1,this.waitingJobs=new Map,this.promoterRunning=!1,this.batchBuffer=[],this.flushing=!1,this._groupCleanupTracking=new Map,this.r=e.redis,this.rawNs=e.namespace,this.name=e.namespace,this.ns=`groupmq:${this.rawNs}`;let t=e.jobTimeoutMs??3e4;this.vt=Math.max(1,t),this.defaultMaxAttempts=e.maxAttempts??3,this.scanLimit=e.reserveScanLimit??20,this.keepCompleted=Math.max(0,e.keepCompleted??0),this.keepFailed=Math.max(0,e.keepFailed??0),this.schedulerLockTtlMs=e.schedulerLockTtlMs??1500,this.orderingDelayMs=e.orderingDelayMs??0,e.autoBatch&&(this.batchConfig=typeof e.autoBatch==`boolean`?{size:10,maxWaitMs:10}:{size:e.autoBatch.size??10,maxWaitMs:e.autoBatch.maxWaitMs??10}),this.logger=typeof e.logger==`object`?e.logger:new S(!!e.logger,this.namespace),this.r.on(`error`,e=>{this.logger.error(`Redis error (main):`,e)})}get redis(){return this.r}get namespace(){return this.ns}get rawNamespace(){return this.rawNs}get jobTimeoutMs(){return this.vt}get maxAttemptsDefault(){return this.defaultMaxAttempts}async add(e){let t=e.maxAttempts??this.defaultMaxAttempts,n=e.orderMs??Date.now(),r=Date.now(),i=e.jobId??(0,l.randomUUID)();if(e.repeat)return this.addRepeatingJob({...e,orderMs:n,maxAttempts:t});let a;if(e.delay!==void 0&&e.delay>0)a=e.delay;else if(e.runAt!==void 0){let t=e.runAt instanceof Date?e.runAt.getTime():e.runAt;a=Math.max(0,t-r)}let o=e.data===void 0?null:e.data;return this.batchConfig?new Promise((r,s)=>{this.batchBuffer.push({groupId:e.groupId,data:o,jobId:i,maxAttempts:t,delayMs:a,orderMs:n,resolve:r,reject:s}),this.batchBuffer.length>=this.batchConfig.size?this.flushBatch():this.batchTimer||=setTimeout(()=>this.flushBatch(),this.batchConfig.maxWaitMs)}):this.addSingle({...e,data:o,jobId:i,maxAttempts:t,orderMs:n,delayMs:a})}async addFlow(e){let t=e.parent.jobId??(0,l.randomUUID)(),n=e.parent.maxAttempts??this.defaultMaxAttempts,r=e.parent.orderMs??Date.now(),i=JSON.stringify(e.parent.data===void 0?null:e.parent.data),a=[],o=[];for(let t of e.children){let e=t.jobId??(0,l.randomUUID)(),n=t.maxAttempts??this.defaultMaxAttempts,r=t.orderMs??Date.now(),i=t.delay??0,s=JSON.stringify(t.data===void 0?null:t.data);a.push(e),o.push(e,t.groupId,s,n.toString(),r.toString(),i.toString())}let s=Date.now();return await E(this.r,`enqueue-flow`,[this.ns,t,e.parent.groupId,i,n.toString(),r.toString(),s.toString(),...o],1),new y({queue:this,id:t,groupId:e.parent.groupId,data:e.parent.data,status:`waiting-children`,attemptsMade:0,opts:{attempts:n},timestamp:s,orderMs:r})}async getFlowDependencies(e){let t=await this.r.hget(`${this.ns}:job:${e}`,`flowRemaining`);return t===null?null:parseInt(t,10)}async getFlowResults(e){let t=await this.r.hgetall(`${this.ns}:flow:results:${e}`),n={};for(let[e,r]of Object.entries(t))try{n[e]=JSON.parse(r)}catch{n[e]=r}return n}async getFlowChildrenIds(e){return this.r.smembers(`${this.ns}:flow:children:${e}`)}async getFlowChildren(e){let t=await this.getFlowChildrenIds(e);if(t.length===0)return[];let n=this.r.multi();for(let e of t)n.hgetall(`${this.ns}:job:${e}`);let r=await n.exec(),i=[];for(let e=0;e<t.length;e++){let n=t[e],a=r?.[e]?.[1]||{};if(!a||Object.keys(a).length===0){this.logger.warn(`Skipping child job ${n} - not found (likely cleaned up)`);continue}let o=y.fromRawHash(this,n,a);i.push(o)}return i}async addSingle(e){let t=Date.now(),n=0;e.delayMs!==void 0&&e.delayMs>0&&(n=t+e.delayMs);let r=JSON.stringify(e.data),i=await E(this.r,`enqueue`,[this.ns,e.groupId,r,String(e.maxAttempts),String(e.orderMs),String(n),String(e.jobId),String(this.keepCompleted),String(t),String(this.orderingDelayMs)],1);if(Array.isArray(i)){let[e,t,n,r,a,o,s,c,l]=i;return y.fromRawHash(this,e,{id:e,groupId:t,data:n,attempts:r,maxAttempts:a,timestamp:o,orderMs:s,delayUntil:c,status:l},l)}return this.getJob(i)}async flushBatch(){if(this.batchTimer&&=(clearTimeout(this.batchTimer),void 0),this.batchBuffer.length===0||this.flushing)return;this.flushing=!0;let e=this.batchBuffer.splice(0);try{this.logger.debug(`Flushing batch of ${e.length} jobs`);let t=Date.now(),n=e.map(e=>({jobId:e.jobId,groupId:e.groupId,data:JSON.stringify(e.data),maxAttempts:e.maxAttempts,orderMs:e.orderMs,delayMs:e.delayMs})),r=await E(this.r,`enqueue-batch`,[this.ns,JSON.stringify(n),String(this.keepCompleted),String(t),String(this.orderingDelayMs)],1);for(let t=0;t<e.length;t++){let n=e[t],i=r[t];try{if(i&&i.length>=9){let[e,t,r,a,o,s,c,l,u]=i,d=y.fromRawHash(this,e,{id:e,groupId:t,data:r,attempts:a,maxAttempts:o,timestamp:s,orderMs:c,delayUntil:l,status:u},u);n.resolve(d)}else throw Error(`Invalid job data returned from batch enqueue`)}catch(e){n.reject(e instanceof Error?e:Error(String(e)))}}}catch(t){for(let n of e)n.reject(t instanceof Error?t:Error(String(t)))}finally{this.flushing=!1,this.batchBuffer.length>0&&setImmediate(()=>this.flushBatch())}}async reserve(){let e=Date.now(),t=await E(this.r,`reserve`,[this.ns,String(e),String(this.vt),String(this.scanLimit)],1);if(!t)return null;let n=t.split(`|||`);if(n.length!==10)return null;let r;try{r=JSON.parse(n[2])}catch(e){this.logger.warn(`Failed to parse job data: ${e.message}, raw: ${n[2]}`),r=null}let i=Number.parseInt(n[7],10);return{id:n[0],groupId:n[1],data:r,attempts:Number.parseInt(n[3],10),maxAttempts:Number.parseInt(n[4],10),seq:Number.parseInt(n[5],10),timestamp:Number.parseInt(n[6],10),orderMs:Number.isNaN(i)?Number.parseInt(n[6],10):i,score:Number(n[8]),deadlineAt:Number.parseInt(n[9],10)}}async getGroupJobCount(e){let t=`${this.ns}:g:${e}`;return await this.r.zcard(t)}async complete(e){await E(this.r,`complete`,[this.ns,e.id,e.groupId],1)}async completeWithMetadata(e,t,n){await E(this.r,`complete-with-metadata`,[this.ns,e.id,e.groupId,`completed`,String(n.finishedOn),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(n.processedOn),String(n.finishedOn),String(n.attempts),String(n.maxAttempts)],1)}async completeAndReserveNextWithMetadata(e,t,n,r){let i=Date.now();try{let a=await E(this.r,`complete-and-reserve-next-with-metadata`,[this.ns,e,t,`completed`,String(r.finishedOn),JSON.stringify(n??null),String(this.keepCompleted),String(this.keepFailed),String(r.processedOn),String(r.finishedOn),String(r.attempts),String(r.maxAttempts),String(i),String(this.jobTimeoutMs)],1);if(!a)return null;let o=a.split(`|||`);if(o.length!==10)return this.logger.error(`Queue completeAndReserveNextWithMetadata: unexpected result format:`,a),null;let[s,,c,l,u,d,f,p,m,h]=o;return{id:s,groupId:t,data:JSON.parse(c),attempts:parseInt(l,10),maxAttempts:parseInt(u,10),seq:parseInt(d,10),timestamp:parseInt(f,10),orderMs:parseInt(p,10),score:parseFloat(m),deadlineAt:parseInt(h,10)}}catch(e){return this.logger.error(`Queue completeAndReserveNextWithMetadata error:`,e),null}}async isJobProcessing(e){return await this.r.zscore(`${this.ns}:processing`,e)!==null}async retry(e,t=0){return E(this.r,`retry`,[this.ns,e,String(t)],1)}async deadLetter(e,t){return E(this.r,`dead-letter`,[this.ns,e,t],1)}async recordCompleted(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts;try{await E(this.r,`record-job-result`,[this.ns,e.id,`completed`,String(i),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o)],1)}catch(t){throw this.logger.error(`Error recording completion for job ${e.id}:`,t),t}}async recordAttemptFailure(e,t,n){let r=`${this.ns}:job:${e.id}`,i=n.processedOn??Date.now(),a=n.finishedOn??Date.now(),o=typeof t==`string`?t:t.message??`Error`,s=typeof t==`string`?`Error`:t.name??`Error`,c=typeof t==`string`?``:t.stack??``;await this.r.hset(r,`lastErrorMessage`,o,`lastErrorName`,s,`lastErrorStack`,c,`processedOn`,String(i),`finishedOn`,String(a))}async recordFinalFailure(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts,s=typeof t==`string`?t:t.message??`Error`,c=typeof t==`string`?`Error`:t.name??`Error`,l=typeof t==`string`?``:t.stack??``,u=JSON.stringify({message:s,name:c,stack:l});try{await E(this.r,`record-job-result`,[this.ns,e.id,`failed`,String(i),u,String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o)],1)}catch(t){throw this.logger.error(`Error recording final failure for job ${e.id}:`,t),t}}async getCompleted(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`returnvalue`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?O(r):null,returnvalue:a?O(a):null,processedOn:o?parseInt(o,10):void 0,finishedOn:s?parseInt(s,10):void 0,attempts:c?parseInt(c,10):0,maxAttempts:l?parseInt(l,10):this.defaultMaxAttempts}})}async getFailed(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`failedReason`,`stacktrace`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l,u]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?O(r):null,failedReason:a||``,stacktrace:o||void 0,processedOn:s?parseInt(s,10):void 0,finishedOn:c?parseInt(c,10):void 0,attempts:l?parseInt(l,10):0,maxAttempts:u?parseInt(u,10):this.defaultMaxAttempts}})}async getCompletedJobs(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping completed job ${t} - not found (likely cleaned up)`);continue}let o=y.fromRawHash(this,t,r,`completed`);a.push(o)}return a}async getFailedJobs(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping failed job ${t} - not found (likely cleaned up)`);continue}let o=y.fromRawHash(this,t,r,`failed`);a.push(o)}return a}async getCompletedCount(){return this.r.zcard(`${this.ns}:completed`)}async getFailedCount(){return this.r.zcard(`${this.ns}:failed`)}async heartbeat(e,t=this.vt){return E(this.r,`heartbeat`,[this.ns,e.id,e.groupId,String(t)],1)}async cleanup(){let e=`${this.ns}:cleanup:lock`;try{if(await this.r.set(e,`1`,`PX`,6e4,`NX`)!==`OK`)return 0;let t=Date.now();return E(this.r,`cleanup`,[this.ns,String(t)],1)}catch{return 0}}getBlockTimeout(e,t){let n=.001;if(t){let e=t-Date.now();return e<=0||e<n*1e3?n:Math.min(e/1e3,5)}return Math.max(n,Math.min(e,5))}isConnectionError(e){if(!e)return!1;let t=`${e.message||``}`;return t===`Connection is closed.`||t.includes(`ECONNREFUSED`)}async reserveBlocking(e=5,t,n){let r=Date.now();if(await this.isPaused())return await A(50),null;if(!(this._consecutiveEmptyReserves>=3)){let e=await this.reserve();if(e)return this.logger.debug(`Immediate reserve successful (${Date.now()-r}ms)`),this._consecutiveEmptyReserves=0,e}let i=this.getBlockTimeout(e,t);this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Starting blocking operation (timeout: ${i}s, consecutive empty: ${this._consecutiveEmptyReserves})`);let a=D(this.ns,`ready`);try{let e=Date.now(),t=await(n??this.r).bzpopmin(a,i),r=Date.now()-e;if(!t||t.length<3)return this.logger.debug(`Blocking timeout/empty (took ${r}ms)`),this._consecutiveEmptyReserves+=1,null;let[,o,s]=t;this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Blocking result: group=${o}, score=${s} (took ${r}ms)`);let c=Date.now(),l=await this.reserveAtomic(o),u=Date.now()-c;if(l)this.logger.debug(`Successful job reserve after blocking: ${l.id} from group ${l.groupId} (reserve took ${u}ms)`),this._consecutiveEmptyReserves=0;else{this.logger.warn(`Blocking found group but reserve failed: group=${o} (reserve took ${u}ms)`);try{let e=`${this.ns}:g:${o}`,t=await this.r.zcard(e);t>0?(await this.r.zadd(a,Number(s),o),this.logger.debug(`Restored group ${o} to ready with score ${s} after failed atomic reserve (${t} jobs)`)):this.logger.warn(`Not restoring empty group ${o} - preventing poisoned group loop`)}catch{this.logger.warn(`Failed to check group ${o} job count, not restoring`)}return this._consecutiveEmptyReserves+=1,this.reserve()}return l}catch(e){let t=Date.now()-r;if(this.logger.error(`Blocking error after ${t}ms:`,e),this.isConnectionError(e))throw this.logger.error(`Connection error detected - rethrowing`),e;return this.logger.warn(`Falling back to regular reserve due to error`),this.reserve()}finally{let e=Date.now()-r;e>1e3&&this.logger.debug(`ReserveBlocking completed in ${e}ms`)}}async reserveAtomic(e){let t=Date.now(),n=await E(this.r,`reserve-atomic`,[this.ns,String(t),String(this.vt),String(e)],1);if(!n)return null;let r=n.split(`|||`);if(r.length<10)return null;let[i,a,o,s,c,l,u,d,f,p]=r,m=parseInt(u,10),h=parseInt(d,10);return{id:i,groupId:a,data:JSON.parse(o),attempts:parseInt(s,10),maxAttempts:parseInt(c,10),seq:parseInt(l,10),timestamp:m,orderMs:Number.isNaN(h)?m:h,score:parseFloat(f),deadlineAt:parseInt(p,10)}}async getReadyGroups(e=0,t=-1){return this.r.zrange(`${this.ns}:ready`,e,t)}async setGroupConfig(e,t){let n=`${this.ns}:config:${e}`,r=[];t.priority!==void 0&&r.push(`priority`,String(t.priority)),t.concurrency!==void 0&&r.push(`concurrency`,String(t.concurrency)),r.length>0&&await this.r.hset(n,...r)}async getGroupConfig(e){let t=`${this.ns}:config:${e}`,[n,r]=await this.r.hmget(t,`priority`,`concurrency`);return{priority:n?parseInt(n,10):1,concurrency:r?parseInt(r,10):1}}async setGroupConcurrency(e,t){let n=Math.max(1,Math.floor(t));await this.r.hset(`${this.ns}:config:${e}`,`concurrency`,String(n))}async getGroupConcurrency(e){let t=await this.r.hget(`${this.ns}:config:${e}`,`concurrency`);return t?parseInt(t,10):1}async getGroupOldestTimestamp(e){let t=`${this.ns}:g:${e}`,n=await this.r.zrange(t,0,0);if(!n||n.length===0)return;let r=n[0],i=await this.r.hget(`${this.ns}:job:${r}`,`timestamp`);return i?parseInt(i,10):void 0}async reserveBatch(e=16){let t=Date.now(),n=await E(this.r,`reserve-batch`,[this.ns,String(t),String(this.vt),String(Math.max(1,e))],1),r=[];for(let e of n||[]){if(!e)continue;let t=e.split(`|||`);t.length===10&&r.push({id:t[0],groupId:t[1],data:O(t[2]),attempts:parseInt(t[3],10),maxAttempts:parseInt(t[4],10),seq:parseInt(t[5],10),timestamp:parseInt(t[6],10),orderMs:parseInt(t[7],10),score:parseFloat(t[8]),deadlineAt:parseInt(t[9],10)})}return r}async getActiveCount(){return E(this.r,`get-active-count`,[this.ns],1)}async getWaitingCount(){return E(this.r,`get-waiting-count`,[this.ns],1)}async getDelayedCount(){return E(this.r,`get-delayed-count`,[this.ns],1)}async getActiveJobs(){return E(this.r,`get-active-jobs`,[this.ns],1)}async getWaitingJobs(){return E(this.r,`get-waiting-jobs`,[this.ns],1)}async getDelayedJobs(){return E(this.r,`get-delayed-jobs`,[this.ns],1)}async getUniqueGroups(){return E(this.r,`get-unique-groups`,[this.ns],1)}async getUniqueGroupsCount(){return E(this.r,`get-unique-groups-count`,[this.ns],1)}async getJob(e){return y.fromStore(this,e)}async setupSubscriber(){this.eventsSubscribed&&this.subscriber||(this.subscriber||(this.subscriber=this.r.duplicate(),this.subscriber.on(`message`,(e,t)=>{e===`${this.ns}:events`&&this.handleJobEvent(t)}),this.subscriber.on(`error`,e=>{this.logger.error(`Redis error (events subscriber):`,e)})),await this.subscriber.subscribe(`${this.ns}:events`),this.eventsSubscribed=!0)}handleJobEvent(e){try{let t=O(e);if(!t||typeof t.id!=`string`)return;let n=this.waitingJobs.get(t.id);if(!n||n.length===0)return;if(t.status===`completed`){let e=typeof t.result==`string`?O(t.result)??t.result:t.result;n.forEach(t=>t.resolve(e))}else if(t.status===`failed`){let e=typeof t.result==`string`?O(t.result)??{}:t.result??{},r=Error(e&&e.message||`Job failed`);e&&typeof e==`object`&&(typeof e.name==`string`&&(r.name=e.name),typeof e.stack==`string`&&(r.stack=e.stack)),n.forEach(e=>e.reject(r))}this.waitingJobs.delete(t.id)}catch(e){this.logger.error(`Failed to process job event:`,e)}}async waitUntilFinished(e,t=0){let n=await this.getJob(e),r=await n.getState();if(r===`completed`)return n.returnvalue;if(r===`failed`)throw Error(n.failedReason||`Job failed`);return await this.setupSubscriber(),new Promise((n,r)=>{let i,a,o=()=>{i&&clearTimeout(i);let t=this.waitingJobs.get(e);if(!t)return;let n=t.filter(e=>e!==a);n.length===0?this.waitingJobs.delete(e):this.waitingJobs.set(e,n)},s=e=>{o(),n(e)},c=e=>{o(),r(e)};a={resolve:s,reject:c};let l=this.waitingJobs.get(e)??[];l.push(a),this.waitingJobs.set(e,l),t>0&&(i=setTimeout(()=>{c(Error(`Timed out waiting for job ${e} to finish`))},t)),(async()=>{try{let t=await this.getJob(e),n=await t.getState();n===`completed`?s(t.returnvalue):n===`failed`&&c(Error(t.failedReason??`Job failed`))}catch{}})()})}async getJobsByStatus(e,t=0,n=-1){let r=n>=0?n-t+1:100,i=Math.min(r*2,500),a=new Map,o=[],s=async(e,t,n=!1)=>{try{let r=n?await this.r.zrevrange(e,0,i-1):await this.r.zrange(e,0,i-1);for(let e of r)a.set(e,t);o.push(...r)}catch{}},c=new Set(e);if(c.has(`active`)&&await s(`${this.ns}:processing`,`active`),c.has(`delayed`)&&await s(`${this.ns}:delayed`,`delayed`),c.has(`completed`)&&await s(`${this.ns}:completed`,`completed`,!0),c.has(`failed`)&&await s(`${this.ns}:failed`,`failed`,!0),c.has(`waiting`))try{let e=await this.r.smembers(`${this.ns}:groups`);if(e.length>0){let t=e.slice(0,Math.min(100,e.length)),n=this.r.multi(),r=Math.max(1,Math.ceil(i/t.length));for(let e of t)n.zrange(`${this.ns}:g:${e}`,0,r-1);let s=await n.exec();for(let e of s||[]){let t=e?.[1]||[];for(let e of t)a.set(e,`waiting`);o.push(...t)}}}catch{}let l=new Set,u=[];for(let e of o)l.has(e)||(l.add(e),u.push(e));let d=n>=0?u.slice(t,n+1):u.slice(t);if(d.length===0)return[];let f=this.r.multi();for(let e of d)f.hgetall(`${this.ns}:job:${e}`);let p=await f.exec(),m=[];for(let e=0;e<d.length;e++){let t=d[e],n=p?.[e]?.[1]||{};if(!n||Object.keys(n).length===0){this.logger.warn(`Skipping job ${t} - not found (likely cleaned up by retention)`);continue}let r=a.get(t),i=y.fromRawHash(this,t,n,r);m.push(i)}return m}async getJobCounts(){let[e,t,n,r,i]=await Promise.all([this.getActiveCount(),this.getWaitingCount(),this.getDelayedCount(),this.getCompletedCount(),this.getFailedCount()]);return{active:e,waiting:t,delayed:n,completed:r,failed:i,paused:0,"waiting-children":0,prioritized:0}}async checkStalledJobs(e,t,n){try{return await E(this.r,`check-stalled`,[this.ns,String(e),String(t),String(n)],1)||[]}catch(e){return this.logger.error(`Error checking stalled jobs:`,e),[]}}async startPromoter(){if(!(this.promoterRunning||this.orderingDelayMs<=0)){this.promoterRunning=!0,this.promoterLockId=(0,l.randomUUID)();try{this.promoterRedis=this.r.duplicate();try{await this.promoterRedis.config(`SET`,`notify-keyspace-events`,`Ex`),this.logger.debug(`Enabled Redis keyspace notifications for staging promoter`)}catch(e){this.logger.warn(`Failed to enable keyspace notifications. Promoter will use polling fallback.`,e)}let e=this.promoterRedis.options.db??0,t=`${this.ns}:stage:timer`,n=`__keyevent@${e}__:expired`;await this.promoterRedis.subscribe(n,e=>{e?this.logger.error(`Failed to subscribe to keyspace events:`,e):this.logger.debug(`Subscribed to ${n}`)}),this.promoterRedis.on(`message`,async(e,r)=>{e===n&&r===t&&await this.runPromotion()}),this.promoterInterval=setInterval(async()=>{await this.runPromotion()},100),await this.runPromotion(),this.logger.debug(`Staging promoter started`)}catch(e){this.logger.error(`Failed to start promoter:`,e),this.promoterRunning=!1,await this.stopPromoter()}}}async runPromotion(){if(!this.promoterRunning)return;let e=`${this.ns}:promoter:lock`;try{if(await this.r.set(e,this.promoterLockId,`PX`,3e4,`NX`)===`OK`)try{let e=await E(this.r,`promote-staged`,[this.ns,String(Date.now()),`100`],1);e>0&&this.logger.debug(`Promoted ${e} staged jobs`)}finally{await this.r.get(e)===this.promoterLockId&&await this.r.del(e)}}catch(e){this.logger.error(`Error during promotion:`,e)}}async stopPromoter(){if(this.promoterRunning){if(this.promoterRunning=!1,this.promoterInterval&&=(clearInterval(this.promoterInterval),void 0),this.promoterRedis){try{await this.promoterRedis.unsubscribe(),await this.promoterRedis.quit()}catch{try{this.promoterRedis.disconnect()}catch{}}this.promoterRedis=void 0}this.logger.debug(`Staging promoter stopped`)}}async close(){if(this.batchConfig&&this.batchBuffer.length>0&&(this.logger.debug(`Flushing ${this.batchBuffer.length} pending batched jobs before close`),await this.flushBatch()),await this.stopPromoter(),this.subscriber){try{await this.subscriber.unsubscribe(`${this.ns}:events`),await this.subscriber.quit()}catch{try{this.subscriber.disconnect()}catch{}}this.subscriber=void 0,this.eventsSubscribed=!1}if(this.waitingJobs.size>0){let e=Error(`Queue closed`);this.waitingJobs.forEach(t=>{t.forEach(t=>t.reject(e))}),this.waitingJobs.clear()}try{await this.r.quit()}catch{try{this.r.disconnect()}catch{}}}get pausedKey(){return`${this.ns}:paused`}async pause(){await this.r.set(this.pausedKey,`1`)}async resume(){await this.r.del(this.pausedKey)}async isPaused(){return await this.r.get(this.pausedKey)!==null}async waitForEmpty(e=6e4){let t=Date.now();for(;Date.now()-t<e;)try{if(await E(this.r,`is-empty`,[this.ns],1)===1)return await A(0),!0;await A(200)}catch(e){if(this.isConnectionError(e)){this.logger.warn(`Redis connection error in waitForEmpty, retrying...`),await A(1e3);continue}throw e}return!1}async cleanupPoisonedGroup(e){if(Math.random()>.01)return`skipped`;let t=this._groupCleanupTracking.get(e)||0,n=Date.now();if(n-t<1e4)return`throttled`;if(this._groupCleanupTracking.set(e,n),this._groupCleanupTracking.size>1e3){let e=n-6e4;for(let[t,n]of this._groupCleanupTracking.entries())n<e&&this._groupCleanupTracking.delete(t)}try{let t=await E(this.r,`cleanup-poisoned-group`,[this.ns,e,String(n)],1);return t===`poisoned`?this.logger.warn(`Removed poisoned group ${e} from ready queue`):t===`empty`?this.logger.warn(`Removed empty group ${e} from ready queue`):t===`locked`&&Math.random()<.1&&this.logger.debug(`Detected group ${e} is locked by another worker (this is normal with high concurrency)`),t}catch(t){return this.logger.error(`Error cleaning up group ${e}:`,t),`error`}}schedulerLockKey(){return`${this.ns}:sched:lock`}async acquireSchedulerLock(e=1500){try{return await this.r.set(this.schedulerLockKey(),`1`,`PX`,e,`NX`)===`OK`}catch{return!1}}async runSchedulerOnce(e=Date.now()){await this.acquireSchedulerLock(this.schedulerLockTtlMs)&&(await this.promoteDelayedJobsBounded(32,e),await this.processRepeatingJobsBounded(16,e))}async promoteDelayedJobsBounded(e=256,t=Date.now()){let n=0;for(let r=0;r<e;r++)try{let e=await E(this.r,`promote-delayed-one`,[this.ns,String(t)],1);if(!e||e<=0)break;n+=e}catch{break}return n}async processRepeatingJobsBounded(e=128,t=Date.now()){let n=`${this.ns}:repeat:schedule`,r=0;for(let i=0;i<e;i++){let e=await this.r.zrangebyscore(n,0,t,`LIMIT`,0,1);if(!e||e.length===0)break;let i=e[0];try{let e=`${this.ns}:repeat:${i}`,a=await this.r.get(e);if(!a){await this.r.zrem(n,i);continue}let o=JSON.parse(a);if(o.removed){await this.r.zrem(n,i),await this.r.del(e);continue}await this.r.zrem(n,i);let s;s=`every`in o.repeat?t+o.repeat.every:this.getNextCronTime(o.repeat.pattern,t),o.nextRunTime=s,o.lastRunTime=t,await this.r.set(e,JSON.stringify(o)),await this.r.zadd(n,s,i),await E(this.r,`enqueue`,[this.ns,o.groupId,JSON.stringify(o.data),String(o.maxAttempts??this.defaultMaxAttempts),String(o.orderMs??t),`0`,String((0,l.randomUUID)()),String(this.keepCompleted)],1),r++}catch(e){this.logger.error(`Error processing repeating job ${i}:`,e),await this.r.zrem(n,i)}}return r}async promoteDelayedJobs(){try{return await E(this.r,`promote-delayed-jobs`,[this.ns,String(Date.now())],1)}catch(e){return this.logger.error(`Error promoting delayed jobs:`,e),0}}async changeDelay(e,t){let n=t>0?Date.now()+t:0;try{return await E(this.r,`change-delay`,[this.ns,e,String(n),String(Date.now())],1)===1}catch(t){return this.logger.error(`Error changing delay for job ${e}:`,t),!1}}async promote(e){return this.changeDelay(e,0)}async remove(e){try{return await E(this.r,`remove`,[this.ns,e],1)===1}catch(t){return this.logger.error(`Error removing job ${e}:`,t),!1}}async clean(e,t,n){let r=Date.now()-e;try{return await E(this.r,`clean-status`,[this.ns,n,String(r),String(Math.max(0,Math.min(t,1e5)))],1)??0}catch(e){return console.log(`HERE?`,e),this.logger.error(`Error cleaning ${n} jobs:`,e),0}}async updateData(e,t){let n=`${this.ns}:job:${e}`;if(!await this.r.exists(n))throw Error(`Job ${e} not found`);let r=JSON.stringify(t===void 0?null:t);await this.r.hset(n,`data`,r)}async addRepeatingJob(e){if(!e.repeat)throw Error(`Repeat options are required for repeating jobs`);let t=Date.now(),n=`${e.groupId}:${JSON.stringify(e.repeat)}:${t}:${Math.random().toString(36).slice(2)}`,r;r=`every`in e.repeat?t+e.repeat.every:this.getNextCronTime(e.repeat.pattern,t);let i={groupId:e.groupId,data:e.data===void 0?null:e.data,maxAttempts:e.maxAttempts??this.defaultMaxAttempts,orderMs:e.orderMs,repeat:e.repeat,nextRunTime:r,lastRunTime:null,removed:!1},a=`${this.ns}:repeat:${n}`;await this.r.set(a,JSON.stringify(i)),await this.r.zadd(`${this.ns}:repeat:schedule`,r,n);let o=`${this.ns}:repeat:lookup:${e.groupId}:${JSON.stringify(e.repeat)}`;await this.r.set(o,n);let s=`repeat:${n}`,c=`${this.ns}:job:${s}`;try{await this.r.hmset(c,`id`,s,`groupId`,i.groupId,`data`,JSON.stringify(i.data),`attempts`,`0`,`maxAttempts`,String(i.maxAttempts),`seq`,`0`,`timestamp`,String(Date.now()),`orderMs`,String(i.orderMs??t),`status`,`waiting`)}catch{}return y.fromStore(this,s)}getNextCronTime(e,t){try{return u.default.parseExpression(e,{currentDate:new Date(t)}).next().getTime()}catch{throw Error(`Invalid cron pattern: ${e}`)}}async removeRepeatingJob(e,t){try{let n=`${this.ns}:repeat:lookup:${e}:${JSON.stringify(t)}`,r=await this.r.get(n);if(!r)return!1;let i=`${this.ns}:repeat:${r}`,a=`${this.ns}:repeat:schedule`,o=await this.r.get(i);if(!o)return await this.r.del(n),!1;let s=JSON.parse(o);s.removed=!0,await this.r.set(i,JSON.stringify(s)),await this.r.zrem(a,r),await this.r.del(n);try{let e=`repeat:${r}`;await this.r.del(`${this.ns}:job:${e}`)}catch{}return!0}catch(e){return this.logger.error(`Error removing repeating job:`,e),!1}}};function A(e){return new Promise(t=>setTimeout(t,e))}var j=class{constructor(e){this.value=void 0,this.next=null,this.value=e}},M=class{constructor(){this.length=0,this.head=null,this.tail=null}push(e){let t=new j(e);return this.length?this.tail.next=t:this.head=t,this.tail=t,this.length+=1,t}shift(){if(!this.length)return null;let e=this.head;return this.head=this.head.next,--this.length,e}},N=class{constructor(e=!1){this.ignoreErrors=e,this.queue=new M,this.pending=new Set,this.newPromise()}add(e){this.pending.add(e),e.then(t=>{this.pending.delete(e),this.queue.length===0&&this.resolvePromise(t),this.queue.push(t)}).catch(t=>{this.pending.delete(e),this.ignoreErrors?(this.queue.length===0&&this.resolvePromise(void 0),this.queue.push(void 0)):this.rejectPromise(t)})}async waitAll(){await Promise.all(this.pending)}numTotal(){return this.pending.size+this.queue.length}numPending(){return this.pending.size}numQueued(){return this.queue.length}resolvePromise(e){this.resolve(e),this.newPromise()}rejectPromise(e){this.reject(e),this.newPromise()}newPromise(){this.nextPromise=new Promise((e,t)=>{this.resolve=e,this.reject=t})}async wait(){return this.nextPromise}async fetch(){if(!(this.pending.size===0&&this.queue.length===0)){for(;this.queue.length===0;)try{await this.wait()}catch(e){this.ignoreErrors||console.error(`Unexpected Error in AsyncFifoQueue`,e)}return this.queue.shift()?.value}}},P=class extends Error{constructor(e){super(e),this.name=`UnrecoverableError`}},F=class{constructor(){this.listeners=new Map}on(e,t){return this.listeners.has(e)||this.listeners.set(e,[]),this.listeners.get(e).push(t),this}off(e,t){let n=this.listeners.get(e);if(n){let e=n.indexOf(t);e!==-1&&n.splice(e,1)}return this}emit(e,...t){let n=this.listeners.get(e);if(n&&n.length>0){for(let r of n)try{r(...t)}catch(t){console.error(`Error in event listener for '${String(e)}':`,t)}return!0}return!1}removeAllListeners(e){return e?this.listeners.delete(e):this.listeners.clear(),this}};const I=(e,t)=>{let n=Math.min(3e4,2**(e-1)*500);return n+Math.floor(n*.25*Math.random())};var L=class extends F{constructor(e){if(super(),this.stopping=!1,this.ready=!1,this.closed=!1,this.blockingClient=null,this.jobsInProgress=new Set,this.lastJobPickupTime=Date.now(),this.totalJobsProcessed=0,this.blockingStats={totalBlockingCalls:0,consecutiveEmptyReserves:0,lastActivityTime:Date.now()},this.emptyReserveBackoffMs=0,!e.handler||typeof e.handler!=`function`)throw Error(`Worker handler must be a function`);this.opts=e,this.q=e.queue,this.name=e.name??this.q.name,this.logger=typeof e.logger==`object`?e.logger:new S(!!e.logger,this.name),this.handler=e.handler;let t=this.q.jobTimeoutMs??3e4;this.hbMs=e.heartbeatMs??Math.max(1e3,Math.floor(t/3)),this.onError=e.onError,this.maxAttempts=e.maxAttempts??this.q.maxAttemptsDefault??3,this.backoff=e.backoff??I,this.enableCleanup=e.enableCleanup??!0,this.cleanupMs=e.cleanupIntervalMs??6e4,this.schedulerMs=e.schedulerIntervalMs??1e3,this.blockingTimeoutSec=e.blockingTimeoutSec??5,this.concurrency=Math.max(1,e.concurrency??1),this.stalledInterval=e.stalledInterval??(this.concurrency>50?6e4:3e4),this.maxStalledCount=e.maxStalledCount??(this.concurrency>50?2:1),this.stalledGracePeriod=e.stalledGracePeriod??5e3,this.setupRedisEventHandlers(),this.q.orderingDelayMs>0&&this.q.startPromoter().catch(e=>{this.logger.error(`Failed to start staging promoter:`,e)}),e.autoStart!==!1&&this.run()}get isClosed(){return this.closed}addJitter(e,t=.1){return e+Math.random()*e*t}setupRedisEventHandlers(){let e=this.q.redis;e&&(this.redisCloseHandler=()=>{this.ready=!1,this.emit(`ioredis:close`)},this.redisErrorHandler=e=>{this.emit(`error`,e)},this.redisReadyHandler=()=>{!this.ready&&!this.stopping&&(this.ready=!0,this.emit(`ready`))},e.on(`close`,this.redisCloseHandler),e.on(`error`,this.redisErrorHandler),e.on(`ready`,this.redisReadyHandler))}async run(){if(this.runLoopPromise)return this.runLoopPromise;let e=this._runLoop();return this.runLoopPromise=e,e}async _runLoop(){this.logger.info(`🚀 Worker ${this.name} starting...`);let e=this.opts.strategyPollInterval??50;try{this.blockingClient=this.q.redis.duplicate({enableAutoPipelining:!0,maxRetriesPerRequest:null,retryStrategy:e=>Math.max(Math.min(Math.exp(e)*1e3,2e4),1e3)}),this.blockingClient.on(`error`,e=>{this.q.isConnectionError(e)?this.logger.warn(`Blocking client connection error:`,e.message):this.logger.error(`Blocking client error (non-connection):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}),this.blockingClient.on(`close`,()=>{!this.stopping&&!this.closed&&this.logger.warn(`Blocking client disconnected, will reconnect on next operation`)}),this.blockingClient.on(`reconnecting`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client reconnecting...`)}),this.blockingClient.on(`ready`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client ready`)})}catch(e){this.logger.error(`Failed to create blocking client:`,e),this.blockingClient=null}if(this.enableCleanup){this.cleanupTimer=setInterval(async()=>{try{await this.q.cleanup()}catch(e){this.onError?.(e)}},this.addJitter(this.cleanupMs));let e=Math.min(this.schedulerMs,this.cleanupMs);this.schedulerTimer=setInterval(async()=>{try{await this.q.runSchedulerOnce()}catch{}},this.addJitter(e))}this.startStalledChecker();let t=0,n=new N(!0);for(;!this.stopping||n.numTotal()>0;)try{for(;!this.stopping&&!(n.numTotal()>=this.concurrency);){this.blockingStats.totalBlockingCalls++,this.blockingStats.totalBlockingCalls>=1e9&&(this.blockingStats.totalBlockingCalls=0),this.logger.debug(`Fetching job (call #${this.blockingStats.totalBlockingCalls}, processing: ${this.jobsInProgress.size}/${this.concurrency}, queue: ${n.numTotal()} (queued: ${n.numQueued()}, pending: ${n.numPending()}), total: ${n.numTotal()}/${this.concurrency})...`);let r;if(this.opts.strategy)r=(async()=>{let t=await this.opts.strategy.getNextGroup(this.q);return t?await this.q.reserveAtomic(t)||null:(await this.delay(e),null)})();else{let e=this.concurrency-n.numTotal();if(e>0&&n.numTotal()===0){let r=Math.min(e,8),i=await this.q.reserveBatch(r);if(i.length>0){this.logger.debug(`Batch reserved ${i.length} jobs`);for(let e of i)n.add(Promise.resolve(e));t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0;continue}}let i=this.blockingStats.consecutiveEmptyReserves>=2&&n.numTotal()===0&&this.jobsInProgress.size===0,a=this.blockingTimeoutSec;r=i?this.q.reserveBlocking(a,void 0,this.blockingClient??void 0):this.q.reserve()}n.add(r);let i=await r;if(i)t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0,this.logger.debug(`Fetched job ${i.id} from group ${i.groupId}`);else{if(this.opts.strategy&&n.numTotal()===0&&this.jobsInProgress.size===0)break;this.blockingStats.consecutiveEmptyReserves++,this.blockingStats.consecutiveEmptyReserves%50==0&&this.logger.debug(`No job available (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves})`);let e=this.concurrency>=100?5:3;if(this.blockingStats.consecutiveEmptyReserves>e&&n.numTotal()===0&&this.jobsInProgress.size===0){let e=this.concurrency>=100?2e3:5e3;this.emptyReserveBackoffMs===0?this.emptyReserveBackoffMs=this.concurrency>=100?100:50:this.emptyReserveBackoffMs=Math.min(e,Math.max(100,this.emptyReserveBackoffMs*1.2)),this.blockingStats.consecutiveEmptyReserves%20==0&&this.logger.debug(`Applying backoff: ${Math.round(this.emptyReserveBackoffMs)}ms (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves}, jobs in progress: ${this.jobsInProgress.size})`),await this.delay(this.emptyReserveBackoffMs)}if(n.numTotal()===0&&this.jobsInProgress.size===0||n.numTotal()>0||this.jobsInProgress.size>0)break}}let r;do r=await n.fetch()??void 0;while(!r&&n.numQueued()>0);if(r&&typeof r==`object`&&`id`in r){this.totalJobsProcessed++,this.logger.debug(`Processing job ${r.id} from group ${r.groupId} immediately`);let e=this.processJob(r,()=>n.numTotal()<=this.concurrency,this.jobsInProgress);n.add(e)}}catch(e){if(this.stopping)return;if(this.q.isConnectionError(e))if(t++,this.logger.error(`Connection error (retry ${t}/10):`,e),t>=10)this.logger.error(`⚠️ Max connection retries (10) exceeded! Worker will continue but may be experiencing persistent Redis issues.`),this.emit(`error`,Error(`Max connection retries (10) exceeded - worker continuing with backoff`)),await this.delay(2e4),t=0;else{let e=Math.max(Math.min(Math.exp(t)*1e3,2e4),1e3);this.logger.debug(`Waiting ${Math.round(e)}ms before retry (exponential backoff)`),await this.delay(e)}else this.logger.error(`Worker loop error (non-connection, continuing):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e))),t=0,await this.delay(100);this.onError?.(e)}this.logger.info(`Stopped`)}async delay(e){return new Promise(t=>setTimeout(t,e))}async processJob(e,t,n){let r=Array.from(n).find(t=>t.job.id===e.id),i;r?(r.ts=Date.now(),i=r):(i={job:e,ts:Date.now()},n.add(i));try{let r=await this.processSingleJob(e,t);if(r&&typeof r==`object`&&`id`in r&&`groupId`in r){let e={job:r,ts:Date.now()};return n.add(e),n.delete(i),r}return r}finally{n.has(i)&&n.delete(i)}}async completeJob(e,t,n,r,i){if(n?.()){let n=await this.q.completeAndReserveNextWithMetadata(e.id,e.groupId,t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts});if(n)return this.logger.debug(`Got next job ${n.id} from same group ${n.groupId} atomically`),n;this.logger.debug(`Atomic chaining returned nil for job ${e.id} - job completed, but no next job chained`),Math.random()<.1&&await new Promise(e=>setTimeout(e,Math.random()*100))}else await this.q.completeWithMetadata(e,t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts})}startStalledChecker(){this.stalledInterval<=0||(this.stalledCheckTimer=setInterval(async()=>{try{await this.checkStalled()}catch(e){this.logger.error(`Error in stalled job checker:`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}},this.stalledInterval))}async checkStalled(){if(!(this.stopping||this.closed))try{let e=Date.now(),t=await this.q.checkStalledJobs(e,this.stalledGracePeriod,this.maxStalledCount);if(t.length>0)for(let e=0;e<t.length;e+=3){let n=t[e],r=t[e+1],i=t[e+2];i===`recovered`?(this.logger.info(`Recovered stalled job ${n} from group ${r}`),this.emit(`stalled`,n,r)):i===`failed`&&(this.logger.warn(`Failed stalled job ${n} from group ${r} (exceeded max stalled count)`),this.emit(`stalled`,n,r))}}catch(e){this.logger.error(`Error checking stalled jobs:`,e)}}getWorkerMetrics(){let e=Date.now();return{name:this.name,totalJobsProcessed:this.totalJobsProcessed,lastJobPickupTime:this.lastJobPickupTime,timeSinceLastJob:this.lastJobPickupTime>0?e-this.lastJobPickupTime:null,blockingStats:{...this.blockingStats},isProcessing:this.jobsInProgress.size>0,jobsInProgressCount:this.jobsInProgress.size,jobsInProgress:Array.from(this.jobsInProgress).map(t=>({jobId:t.job.id,groupId:t.job.groupId,processingTimeMs:e-t.ts}))}}async close(e=3e4){this.stopping=!0,await this.delay(100),this.cleanupTimer&&clearInterval(this.cleanupTimer),this.schedulerTimer&&clearInterval(this.schedulerTimer),this.stalledCheckTimer&&clearInterval(this.stalledCheckTimer);let t=Date.now();for(;this.jobsInProgress.size>0&&Date.now()-t<e;)await z(100);if(this.blockingClient){try{this.jobsInProgress.size>0&&e>0?(this.logger.debug(`Gracefully closing blocking client (quit)...`),await this.blockingClient.quit()):(this.logger.debug(`Force closing blocking client (disconnect)...`),this.blockingClient.disconnect())}catch(e){this.logger.debug(`Error closing blocking client:`,e)}this.blockingClient=null}if(this.runLoopPromise){let t=this.jobsInProgress.size>0?e:2e3,n=new Promise(e=>{setTimeout(e,t)});try{await Promise.race([this.runLoopPromise,n])}catch(e){this.logger.warn(`Error while waiting for run loop to exit:`,e)}}if(this.jobsInProgress.size>0){this.logger.warn(`Worker stopped with ${this.jobsInProgress.size} jobs still processing after ${e}ms timeout.`);let t=Date.now();for(let e of this.jobsInProgress)this.emit(`graceful-timeout`,y.fromReserved(this.q,e.job,{processedOn:e.ts,finishedOn:t,status:`active`}))}this.jobsInProgress.clear(),this.ready=!1,this.closed=!0;try{let e=this.q.redis;e&&(this.redisCloseHandler&&e.off?.(`close`,this.redisCloseHandler),this.redisErrorHandler&&e.off?.(`error`,this.redisErrorHandler),this.redisReadyHandler&&e.off?.(`ready`,this.redisReadyHandler))}catch{}this.emit(`closed`)}getCurrentJob(){if(this.jobsInProgress.size===0)return null;let e=Array.from(this.jobsInProgress)[0],t=Date.now();return{job:y.fromReserved(this.q,e.job,{processedOn:e.ts,status:`active`}),processingTimeMs:t-e.ts}}getCurrentJobs(){let e=Date.now();return Array.from(this.jobsInProgress).map(t=>({job:y.fromReserved(this.q,t.job,{processedOn:t.ts,status:`active`}),processingTimeMs:e-t.ts}))}isProcessing(){return this.jobsInProgress.size>0}async add(e){return this.q.add(e)}async processSingleJob(e,t){let n=Date.now(),r,i,a=()=>{let t=this.q.jobTimeoutMs||3e4,n=Math.min(this.hbMs,Math.floor(t/3),1e4);this.logger.debug(`Starting heartbeat for job ${e.id} (interval: ${n}ms, concurrency: ${this.concurrency})`),r=setInterval(async()=>{try{await this.q.heartbeat(e)===0&&(this.logger.warn(`Heartbeat failed for job ${e.id} - job may have been removed or completed elsewhere`),r&&clearInterval(r))}catch(t){let n=this.q.isConnectionError(t);(!n||!this.stopping)&&this.logger.error(`Heartbeat error for job ${e.id}:`,t instanceof Error?t.message:String(t)),this.onError?.(t,y.fromReserved(this.q,e,{status:`active`})),(!n||!this.stopping)&&this.emit(`error`,t instanceof Error?t:Error(String(t)))}},n)};try{let o=this.q.jobTimeoutMs||3e4,s=Math.min(o*.1,2e3);i=setTimeout(()=>{a()},s);let c=y.fromReserved(this.q,e,{processedOn:n,status:`active`}),l=await this.handler(c);i&&clearTimeout(i),r&&clearInterval(r);let u=Date.now(),d=await this.completeJob(e,l,t,n,u);return this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0,this.emit(`completed`,y.fromReserved(this.q,e,{processedOn:n,finishedOn:u,returnvalue:l,status:`completed`})),d}catch(t){i&&clearTimeout(i),r&&clearInterval(r),await this.handleJobFailure(t,e,n)}}async handleJobFailure(e,t,n){let r=y.fromReserved(this.q,t,{processedOn:n,status:`active`});this.onError?.(e,r),this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0;try{this.emit(`error`,e instanceof Error?e:Error(String(e)))}catch{}let i=Date.now();this.emit(`failed`,y.fromReserved(this.q,t,{processedOn:n,finishedOn:i,failedReason:e instanceof Error?e.message:String(e),stacktrace:e instanceof Error||typeof e==`object`&&e?e.stack:void 0,status:`failed`}));let a=t.attempts+1;if(e instanceof P){this.logger.info(`Unrecoverable error for job ${t.id}: ${e instanceof Error?e.message:String(e)}. Skipping retries.`),await this.deadLetterJob(e,t,n,i,a);return}let o=this.backoff(a,e);if(a>=this.maxAttempts){await this.deadLetterJob(e,t,n,i,a);return}if(await this.q.retry(t.id,o)===-1){await this.deadLetterJob(e,t,n,i,t.maxAttempts);return}await this.recordFailureAttempt(e,t,n,i,a)}async deadLetterJob(e,t,n,r,i){this.logger.info(`Dead lettering job ${t.id} from group ${t.groupId} (attempts: ${i}/${t.maxAttempts})`);let a=e instanceof Error?e:Error(String(e));try{await this.q.recordFinalFailure({id:t.id,groupId:t.groupId},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts,data:t.data})}catch(e){this.logger.warn(`Failed to record final failure`,e)}await this.q.deadLetter(t.id,t.groupId)}async recordFailureAttempt(e,t,n,r,i){let a=e instanceof Error?e:Error(String(e));try{await this.q.recordAttemptFailure({id:t.id,groupId:t.groupId},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts})}catch(e){this.logger.warn(`Failed to record attempt failure`,e)}}};const R=L;function z(e){return new Promise(t=>setTimeout(t,e))}var B=class{constructor(e={}){this.cache=new Map,this.overrides=new Map,this.algorithmConfig=e.algorithm??{type:`weighted-random`},this.defaultPriority=e.defaultPriority??1,this.cacheTtlMs=e.cacheTtlMs??5e3,this.onGetPriority=e.onGetPriority}setPriority(e,t){this.overrides.set(e,t)}clearPriority(e){this.overrides.delete(e)}async resolvePriority(e,t){let n=this.overrides.get(t);if(n!==void 0)return n;let r=Date.now(),i=this.cache.get(t);if(i&&i.expiresAt>r)return i.priority;let a=await e.getGroupConfig(t),o;return o=this.onGetPriority?await this.onGetPriority(t,a):a.priority===1?this.defaultPriority:a.priority,this.cacheTtlMs>0&&this.cache.set(t,{priority:o,expiresAt:r+this.cacheTtlMs}),o}selectByStrict(e){return e.sort((e,t)=>t.priority-e.priority),e[0].groupId}selectByWeightedRandom(e){if(e.length===1)return e[0].groupId;let t=this.algorithmConfig.minWeightRatio??.1,n=Math.max(...e.map(e=>e.priority))*t,r=e.map(e=>({groupId:e.groupId,weight:Math.max(e.priority,n)})),i=r.reduce((e,t)=>e+t.weight,0),a=Math.random()*i;for(let e of r)if(a-=e.weight,a<=0)return e.groupId;return e[0].groupId}selectByAging(e){let t=Date.now(),n=this.algorithmConfig.intervalMs??6e4,r=e.map(e=>{let r=0;if(e.oldestTimestamp){let i=t-e.oldestTimestamp;r=Math.floor(i/n)}return{groupId:e.groupId,adjustedPriority:e.priority+r}});return r.sort((e,t)=>t.adjustedPriority-e.adjustedPriority),r[0].groupId}async getNextGroup(e){let t=await e.getReadyGroups(0,100);if(t.length===0)return null;let n=await Promise.all(t.map(async t=>{let n={groupId:t,priority:await this.resolvePriority(e,t)};return this.algorithmConfig.type===`aging`&&(n.oldestTimestamp=await e.getGroupOldestTimestamp(t)),n}));switch(this.algorithmConfig.type){case`strict`:return this.selectByStrict(n);case`aging`:return this.selectByAging(n);case`weighted-random`:default:return this.selectByWeightedRandom(n)}}};exports.BullBoardGroupMQAdapter=g,exports.Job=y,exports.PriorityStrategy=B,exports.Queue=k,exports.UnrecoverableError=P,exports.Worker=R,exports.getWorkersStatus=v,exports.waitForQueueToEmpty=_;
1
+ var e=Object.create,t=Object.defineProperty,n=Object.getOwnPropertyDescriptor,r=Object.getOwnPropertyNames,i=Object.getPrototypeOf,a=Object.prototype.hasOwnProperty,o=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),s=(e,i,o,s)=>{if(i&&typeof i==`object`||typeof i==`function`)for(var c=r(i),l=0,u=c.length,d;l<u;l++)d=c[l],!a.call(e,d)&&d!==o&&t(e,d,{get:(e=>i[e]).bind(null,d),enumerable:!(s=n(i,d))||s.enumerable});return e},c=(n,r,a)=>(a=n==null?{}:e(i(n)),s(r||!n||!n.__esModule?t(a,`default`,{value:n,enumerable:!0}):a,n));let l=require(`node:crypto`);l=c(l);let u=require(`cron-parser`);u=c(u);let d=require(`node:fs`);d=c(d);let f=require(`node:path`);f=c(f);let p=require(`node:url`);p=c(p);var m=o((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.BaseAdapter=void 0,e.BaseAdapter=class{constructor(e,t={}){this.formatters=new Map,this._visibilityGuard=()=>!0,this.readOnlyMode=t.readOnlyMode===!0,this.allowRetries=this.readOnlyMode?!1:t.allowRetries!==!1,this.allowCompletedRetries=this.allowRetries&&t.allowCompletedRetries!==!1,this.prefix=t.prefix||``,this.delimiter=t.delimiter||``,this.description=t.description||``,this.displayName=t.displayName||``,this.type=e,this.externalJobUrl=t.externalJobUrl}getDescription(){return this.description}getDisplayName(){return this.displayName}setFormatter(e,t){this.formatters.set(e,t)}format(e,t,n=t){let r=this.formatters.get(e);return typeof r==`function`?r(t):n}setVisibilityGuard(e){this._visibilityGuard=e}isVisible(e){return this._visibilityGuard(e)}}})),h=c(m()),g=class extends h.BaseAdapter{constructor(e,t={}){let n=e.namespace;super(n,t),this.queue=e,this.options=t}getDescription(){return this.options.description||``}getDisplayName(){return this.options.displayName||``}getName(){return`${this.options.prefix||``}${this.options.delimiter||``}${this.queue.rawNamespace}`.replace(/(^[\s:]+)|([\s:]+$)/g,``)}async getRedisInfo(){return this.queue.redis.info()}async getJob(e){return await this.queue.getJob(e)}async getJobs(e,t,n){return await this.queue.getJobsByStatus(e,t,n)}async getJobCounts(){let e=await this.queue.getJobCounts();return{latest:0,active:e.active,waiting:e.waiting,"waiting-children":e[`waiting-children`],prioritized:e.prioritized,completed:e.completed,failed:e.failed,delayed:e.delayed,paused:e.paused}}async getJobLogs(e){return[]}getStatuses(){return[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}getJobStatuses(){return[`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`]}assertWritable(){if(this.options.readOnlyMode)throw Error(`This adapter is in read-only mode. Mutations are disabled.`)}async clean(e,t){this.assertWritable(),!(e!==`completed`&&e!==`failed`&&e!==`delayed`)&&await this.queue.clean(t,2**53-1,e)}async addJob(e,t,n){return this.assertWritable(),await this.queue.add({groupId:n.groupId??Math.random().toString(36).substring(2,15),data:t,...n})}async isPaused(){return this.queue.isPaused()}async pause(){this.assertWritable(),await this.queue.pause()}async resume(){this.assertWritable(),await this.queue.resume()}async empty(){throw this.assertWritable(),Error(`Not implemented`)}async promoteAll(){throw this.assertWritable(),Error(`Not implemented`)}};async function _(e,t=6e4){return e.waitForEmpty(t)}function v(e){let t=e.map((e,t)=>{let n=e.getCurrentJob();return{index:t,isProcessing:e.isProcessing(),currentJob:n?{jobId:n.job.id,groupId:n.job.groupId,processingTimeMs:n.processingTimeMs}:void 0}}),n=t.filter(e=>e.isProcessing).length,r=t.length-n;return{total:e.length,processing:n,idle:r,workers:t}}var y=class e{constructor(e){this.queue=e.queue,this.id=e.id,this.name=e.name??`groupmq`,this.data=e.data,this.groupId=e.groupId,this.attemptsMade=e.attemptsMade,this.opts=e.opts,this.processedOn=e.processedOn,this.finishedOn=e.finishedOn,this.failedReason=e.failedReason,this.stacktrace=e.stacktrace,this.returnvalue=e.returnvalue,this.timestamp=e.timestamp,this.orderMs=e.orderMs,this.status=e.status??`unknown`,this.parentId=e.parentId}async getState(){return this.status??`unknown`}toJSON(){return{id:this.id,name:this.name,data:this.data,groupId:this.groupId,attemptsMade:this.attemptsMade,opts:this.opts,processedOn:this.processedOn,finishedOn:this.finishedOn,failedReason:this.failedReason,stacktrace:this.stacktrace?[this.stacktrace]:null,returnvalue:this.returnvalue,timestamp:this.timestamp,orderMs:this.orderMs,status:this.status,progress:0}}changeDelay(e){return this.queue.changeDelay(this.id,e)}async promote(){await this.queue.promote(this.id)}async remove(){await this.queue.remove(this.id)}async retry(e){await this.queue.retry(this.id)}async updateData(e){await this.queue.updateData(this.id,e)}async update(e){await this.updateData(e)}async waitUntilFinished(e=0){return this.queue.waitUntilFinished(this.id,e)}async getChildren(){return this.queue.getFlowChildren(this.id)}async getChildrenValues(){return this.queue.getFlowResults(this.id)}async getDependenciesCount(){return this.queue.getFlowDependencies(this.id)}async getParent(){if(this.parentId)try{return await this.queue.getJob(this.parentId)}catch{return}}static fromReserved(t,n,r){return new e({queue:t,id:n.id,name:`groupmq`,data:n.data,groupId:n.groupId,attemptsMade:n.attempts,opts:{attempts:n.maxAttempts,delay:r?.delayMs},processedOn:r?.processedOn,finishedOn:r?.finishedOn,failedReason:r?.failedReason,stacktrace:r?.stacktrace,returnvalue:r?.returnvalue,timestamp:n.timestamp?n.timestamp:Date.now(),orderMs:n.orderMs,status:x(r?.status)})}static fromRawHash(t,n,r,i){let a=r.groupId??``,o=r.data?b(r.data):null,s=r.attempts?parseInt(r.attempts,10):0,c=r.maxAttempts?parseInt(r.maxAttempts,10):t.maxAttemptsDefault,l=r.timestamp?parseInt(r.timestamp,10):0,u=r.orderMs?parseInt(r.orderMs,10):void 0,d=r.delayUntil?parseInt(r.delayUntil,10):0,f=r.processedOn?parseInt(r.processedOn,10):void 0,p=r.finishedOn?parseInt(r.finishedOn,10):void 0,m=(r.failedReason??r.lastErrorMessage)||void 0,h=(r.stacktrace??r.lastErrorStack)||void 0,g=r.returnvalue?b(r.returnvalue):void 0,_=r.parentId||void 0;return new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:i??x(r.status),parentId:_})}static async fromStore(t,n){let r=`${t.namespace}:job:${n}`,i=await t.redis.hgetall(r);if(!i||Object.keys(i).length===0)throw Error(`Job ${n} not found`);let a=i.groupId??``,o=i.data?b(i.data):null,s=i.attempts?parseInt(i.attempts,10):0,c=i.maxAttempts?parseInt(i.maxAttempts,10):t.maxAttemptsDefault,l=i.timestamp?parseInt(i.timestamp,10):0,u=i.orderMs?parseInt(i.orderMs,10):void 0,d=i.delayUntil?parseInt(i.delayUntil,10):0,f=i.processedOn?parseInt(i.processedOn,10):void 0,p=i.finishedOn?parseInt(i.finishedOn,10):void 0,m=(i.failedReason??i.lastErrorMessage)||void 0,h=(i.stacktrace??i.lastErrorStack)||void 0,g=i.returnvalue?b(i.returnvalue):void 0,_=i.parentId||void 0,[v,y]=await Promise.all([t.redis.zscore(`${t.namespace}:processing`,n),t.redis.zscore(`${t.namespace}:delayed`,n)]),S=i.status;return v===null?y===null?a&&await t.redis.zscore(`${t.namespace}:g:${a}`,n)!==null&&(S=`waiting`):S=`delayed`:S=`active`,new e({queue:t,id:n,name:`groupmq`,data:o,groupId:a,attemptsMade:s,opts:{attempts:c,delay:d&&d>Date.now()?d-Date.now():void 0},processedOn:f,finishedOn:p,failedReason:m,stacktrace:h,returnvalue:g,timestamp:l||Date.now(),orderMs:u,status:x(S),parentId:_})}};function b(e){try{return JSON.parse(e)}catch{return null}}function x(e){return e&&[`latest`,`active`,`waiting`,`waiting-children`,`prioritized`,`completed`,`failed`,`delayed`,`paused`].includes(e)?e:`unknown`}var S=class{constructor(e,t){this.enabled=e,this.name=t}debug(...e){this.enabled&&console.debug(`[${this.name}]`,...e)}info(...e){this.enabled&&console.log(`[${this.name}]`,...e)}warn(...e){this.enabled&&console.warn(`⚠️ [${this.name}]`,...e)}error(...e){this.enabled&&console.error(`💥 [${this.name}]`,...e)}};const C=new WeakMap;function w(e){let t=f.default.dirname((0,p.fileURLToPath)(require(`url`).pathToFileURL(__filename).href)),n=[f.default.join(t,`${e}.lua`),f.default.join(t,`lua`,`${e}.lua`)];for(let e of n)if(d.default.existsSync(e))return e;return n[0]}async function T(e,t){let n=C.get(e);n||(n=new Map,C.set(e,n));let r=n.get(t);if(r)return r;let i=w(t),a=d.default.readFileSync(i,`utf8`),o=await e.script(`load`,a);return n.set(t,o),o}async function E(e,t,n,r){let i=await T(e,t);return e.evalsha(i,r,...n)}function D(e,...t){return[e,...t].join(`:`)}function O(e){try{return JSON.parse(e)}catch{return null}}var k=class{constructor(e){this._consecutiveEmptyReserves=0,this.eventsSubscribed=!1,this.waitingJobs=new Map,this.promoterRunning=!1,this.batchBuffer=[],this.flushing=!1,this._groupCleanupTracking=new Map,this.r=e.redis,this.rawNs=e.namespace,this.name=e.namespace,this.ns=`groupmq:${this.rawNs}`;let t=e.jobTimeoutMs??3e4;this.vt=Math.max(1,t),this.defaultMaxAttempts=e.maxAttempts??3,this.scanLimit=e.reserveScanLimit??20,this.keepCompleted=Math.max(0,e.keepCompleted??0),this.keepFailed=Math.max(0,e.keepFailed??0),this.schedulerLockTtlMs=e.schedulerLockTtlMs??1500,this.orderingDelayMs=e.orderingDelayMs??0,e.autoBatch&&(this.batchConfig=typeof e.autoBatch==`boolean`?{size:10,maxWaitMs:10}:{size:e.autoBatch.size??10,maxWaitMs:e.autoBatch.maxWaitMs??10}),this.logger=typeof e.logger==`object`?e.logger:new S(!!e.logger,this.namespace),this.r.on(`error`,e=>{this.logger.error(`Redis error (main):`,e)})}get redis(){return this.r}get namespace(){return this.ns}get rawNamespace(){return this.rawNs}get jobTimeoutMs(){return this.vt}get maxAttemptsDefault(){return this.defaultMaxAttempts}async add(e){let t=e.maxAttempts??this.defaultMaxAttempts,n=e.orderMs??Date.now(),r=Date.now(),i=e.jobId??(0,l.randomUUID)();if(e.repeat)return this.addRepeatingJob({...e,orderMs:n,maxAttempts:t});let a;if(e.delay!==void 0&&e.delay>0)a=e.delay;else if(e.runAt!==void 0){let t=e.runAt instanceof Date?e.runAt.getTime():e.runAt;a=Math.max(0,t-r)}let o=e.data===void 0?null:e.data;return this.batchConfig?new Promise((r,s)=>{this.batchBuffer.push({groupId:e.groupId,data:o,jobId:i,maxAttempts:t,delayMs:a,orderMs:n,resolve:r,reject:s}),this.batchBuffer.length>=this.batchConfig.size?this.flushBatch():this.batchTimer||=setTimeout(()=>this.flushBatch(),this.batchConfig.maxWaitMs)}):this.addSingle({...e,data:o,jobId:i,maxAttempts:t,orderMs:n,delayMs:a})}async addFlow(e){let t=e.parent.jobId??(0,l.randomUUID)(),n=e.parent.maxAttempts??this.defaultMaxAttempts,r=e.parent.orderMs??Date.now(),i=JSON.stringify(e.parent.data===void 0?null:e.parent.data),a=[],o=[];for(let t of e.children){let e=t.jobId??(0,l.randomUUID)(),n=t.maxAttempts??this.defaultMaxAttempts,r=t.orderMs??Date.now(),i=t.delay??0,s=JSON.stringify(t.data===void 0?null:t.data);a.push(e),o.push(e,t.groupId,s,n.toString(),r.toString(),i.toString())}let s=Date.now();return await E(this.r,`enqueue-flow`,[this.ns,t,e.parent.groupId,i,n.toString(),r.toString(),s.toString(),...o],1),new y({queue:this,id:t,groupId:e.parent.groupId,data:e.parent.data,status:`waiting-children`,attemptsMade:0,opts:{attempts:n},timestamp:s,orderMs:r})}async getFlowDependencies(e){let t=await this.r.hget(`${this.ns}:job:${e}`,`flowRemaining`);return t===null?null:parseInt(t,10)}async getFlowResults(e){let t=await this.r.hgetall(`${this.ns}:flow:results:${e}`),n=[];for(let[e,r]of Object.entries(t))try{n.push({jobId:e,result:JSON.parse(r)})}catch{n.push({jobId:e,result:r})}return n}async getFlowChildrenIds(e){return this.r.smembers(`${this.ns}:flow:children:${e}`)}async getFlowChildren(e){let t=await this.getFlowChildrenIds(e);if(t.length===0)return[];let n=this.r.multi();for(let e of t)n.hgetall(`${this.ns}:job:${e}`);let r=await n.exec(),i=[];for(let e=0;e<t.length;e++){let n=t[e],a=r?.[e]?.[1]||{};if(!a||Object.keys(a).length===0){this.logger.warn(`Skipping child job ${n} - not found (likely cleaned up)`);continue}let o=y.fromRawHash(this,n,a);i.push(o)}return i}async addSingle(e){let t=Date.now(),n=0;e.delayMs!==void 0&&e.delayMs>0&&(n=t+e.delayMs);let r=JSON.stringify(e.data),i=await E(this.r,`enqueue`,[this.ns,e.groupId,r,String(e.maxAttempts),String(e.orderMs),String(n),String(e.jobId),String(this.keepCompleted),String(t),String(this.orderingDelayMs)],1);if(Array.isArray(i)){let[e,t,n,r,a,o,s,c,l]=i;return y.fromRawHash(this,e,{id:e,groupId:t,data:n,attempts:r,maxAttempts:a,timestamp:o,orderMs:s,delayUntil:c,status:l},l)}return this.getJob(i)}async flushBatch(){if(this.batchTimer&&=(clearTimeout(this.batchTimer),void 0),this.batchBuffer.length===0||this.flushing)return;this.flushing=!0;let e=this.batchBuffer.splice(0);try{this.logger.debug(`Flushing batch of ${e.length} jobs`);let t=Date.now(),n=e.map(e=>({jobId:e.jobId,groupId:e.groupId,data:JSON.stringify(e.data),maxAttempts:e.maxAttempts,orderMs:e.orderMs,delayMs:e.delayMs})),r=await E(this.r,`enqueue-batch`,[this.ns,JSON.stringify(n),String(this.keepCompleted),String(t),String(this.orderingDelayMs)],1);for(let t=0;t<e.length;t++){let n=e[t],i=r[t];try{if(i&&i.length>=9){let[e,t,r,a,o,s,c,l,u]=i,d=y.fromRawHash(this,e,{id:e,groupId:t,data:r,attempts:a,maxAttempts:o,timestamp:s,orderMs:c,delayUntil:l,status:u},u);n.resolve(d)}else throw Error(`Invalid job data returned from batch enqueue`)}catch(e){n.reject(e instanceof Error?e:Error(String(e)))}}}catch(t){for(let n of e)n.reject(t instanceof Error?t:Error(String(t)))}finally{this.flushing=!1,this.batchBuffer.length>0&&setImmediate(()=>this.flushBatch())}}async reserve(){let e=Date.now(),t=await E(this.r,`reserve`,[this.ns,String(e),String(this.vt),String(this.scanLimit)],1);if(!t)return null;let n=t.split(`|||`);if(n.length!==10)return null;let r;try{r=JSON.parse(n[2])}catch(e){this.logger.warn(`Failed to parse job data: ${e.message}, raw: ${n[2]}`),r=null}let i=Number.parseInt(n[7],10);return{id:n[0],groupId:n[1],data:r,attempts:Number.parseInt(n[3],10),maxAttempts:Number.parseInt(n[4],10),seq:Number.parseInt(n[5],10),timestamp:Number.parseInt(n[6],10),orderMs:Number.isNaN(i)?Number.parseInt(n[6],10):i,score:Number(n[8]),deadlineAt:Number.parseInt(n[9],10)}}async getGroupJobCount(e){let t=`${this.ns}:g:${e}`;return await this.r.zcard(t)}async complete(e){await E(this.r,`complete`,[this.ns,e.id,e.groupId],1)}async completeWithMetadata(e,t,n){await E(this.r,`complete-with-metadata`,[this.ns,e.id,e.groupId,`completed`,String(n.finishedOn),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(n.processedOn),String(n.finishedOn),String(n.attempts),String(n.maxAttempts)],1)}async completeAndReserveNextWithMetadata(e,t,n,r){let i=Date.now();try{let a=await E(this.r,`complete-and-reserve-next-with-metadata`,[this.ns,e,t,`completed`,String(r.finishedOn),JSON.stringify(n??null),String(this.keepCompleted),String(this.keepFailed),String(r.processedOn),String(r.finishedOn),String(r.attempts),String(r.maxAttempts),String(i),String(this.jobTimeoutMs)],1);if(!a)return null;let o=a.split(`|||`);if(o.length!==10)return this.logger.error(`Queue completeAndReserveNextWithMetadata: unexpected result format:`,a),null;let[s,,c,l,u,d,f,p,m,h]=o;return{id:s,groupId:t,data:JSON.parse(c),attempts:parseInt(l,10),maxAttempts:parseInt(u,10),seq:parseInt(d,10),timestamp:parseInt(f,10),orderMs:parseInt(p,10),score:parseFloat(m),deadlineAt:parseInt(h,10)}}catch(e){return this.logger.error(`Queue completeAndReserveNextWithMetadata error:`,e),null}}async isJobProcessing(e){return await this.r.zscore(`${this.ns}:processing`,e)!==null}async retry(e,t=0){return E(this.r,`retry`,[this.ns,e,String(t)],1)}async deadLetter(e,t){return E(this.r,`dead-letter`,[this.ns,e,t],1)}async recordCompleted(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts;try{await E(this.r,`record-job-result`,[this.ns,e.id,`completed`,String(i),JSON.stringify(t??null),String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o)],1)}catch(t){throw this.logger.error(`Error recording completion for job ${e.id}:`,t),t}}async recordAttemptFailure(e,t,n){let r=`${this.ns}:job:${e.id}`,i=n.processedOn??Date.now(),a=n.finishedOn??Date.now(),o=typeof t==`string`?t:t.message??`Error`,s=typeof t==`string`?`Error`:t.name??`Error`,c=typeof t==`string`?``:t.stack??``;await this.r.hset(r,`lastErrorMessage`,o,`lastErrorName`,s,`lastErrorStack`,c,`processedOn`,String(i),`finishedOn`,String(a))}async recordFinalFailure(e,t,n){let r=n.processedOn??Date.now(),i=n.finishedOn??Date.now(),a=n.attempts??0,o=n.maxAttempts??this.defaultMaxAttempts,s=typeof t==`string`?t:t.message??`Error`,c=typeof t==`string`?`Error`:t.name??`Error`,l=typeof t==`string`?``:t.stack??``,u=JSON.stringify({message:s,name:c,stack:l});try{await E(this.r,`record-job-result`,[this.ns,e.id,`failed`,String(i),u,String(this.keepCompleted),String(this.keepFailed),String(r),String(i),String(a),String(o)],1)}catch(t){throw this.logger.error(`Error recording final failure for job ${e.id}:`,t),t}}async getCompleted(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`returnvalue`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?O(r):null,returnvalue:a?O(a):null,processedOn:o?parseInt(o,10):void 0,finishedOn:s?parseInt(s,10):void 0,attempts:c?parseInt(c,10):0,maxAttempts:l?parseInt(l,10):this.defaultMaxAttempts}})}async getFailed(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hmget(`${this.ns}:job:${e}`,`groupId`,`data`,`failedReason`,`stacktrace`,`processedOn`,`finishedOn`,`attempts`,`maxAttempts`);let i=await r.exec()??[];return n.map((e,t)=>{let[n,r,a,o,s,c,l,u]=i[t]?.[1]||[];return{id:e,groupId:n||``,data:r?O(r):null,failedReason:a||``,stacktrace:o||void 0,processedOn:s?parseInt(s,10):void 0,finishedOn:c?parseInt(c,10):void 0,attempts:l?parseInt(l,10):0,maxAttempts:u?parseInt(u,10):this.defaultMaxAttempts}})}async getCompletedJobs(e=this.keepCompleted){let t=`${this.ns}:completed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping completed job ${t} - not found (likely cleaned up)`);continue}let o=y.fromRawHash(this,t,r,`completed`);a.push(o)}return a}async getFailedJobs(e=this.keepFailed){let t=`${this.ns}:failed`,n=await this.r.zrevrange(t,0,Math.max(0,e-1));if(n.length===0)return[];let r=this.r.multi();for(let e of n)r.hgetall(`${this.ns}:job:${e}`);let i=await r.exec(),a=[];for(let e=0;e<n.length;e++){let t=n[e],r=i?.[e]?.[1]||{};if(!r||Object.keys(r).length===0){this.logger.warn(`Skipping failed job ${t} - not found (likely cleaned up)`);continue}let o=y.fromRawHash(this,t,r,`failed`);a.push(o)}return a}async getCompletedCount(){return this.r.zcard(`${this.ns}:completed`)}async getFailedCount(){return this.r.zcard(`${this.ns}:failed`)}async heartbeat(e,t=this.vt){return E(this.r,`heartbeat`,[this.ns,e.id,e.groupId,String(t)],1)}async cleanup(){let e=`${this.ns}:cleanup:lock`;try{if(await this.r.set(e,`1`,`PX`,6e4,`NX`)!==`OK`)return 0;let t=Date.now();return E(this.r,`cleanup`,[this.ns,String(t)],1)}catch{return 0}}getBlockTimeout(e,t){let n=.001;if(t){let e=t-Date.now();return e<=0||e<n*1e3?n:Math.min(e/1e3,5)}return Math.max(n,Math.min(e,5))}isConnectionError(e){if(!e)return!1;let t=`${e.message||``}`;return t===`Connection is closed.`||t.includes(`ECONNREFUSED`)}async reserveBlocking(e=5,t,n){let r=Date.now();if(await this.isPaused())return await A(50),null;if(!(this._consecutiveEmptyReserves>=3)){let e=await this.reserve();if(e)return this.logger.debug(`Immediate reserve successful (${Date.now()-r}ms)`),this._consecutiveEmptyReserves=0,e}let i=this.getBlockTimeout(e,t);this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Starting blocking operation (timeout: ${i}s, consecutive empty: ${this._consecutiveEmptyReserves})`);let a=D(this.ns,`ready`);try{let e=Date.now(),t=await(n??this.r).bzpopmin(a,i),r=Date.now()-e;if(!t||t.length<3)return this.logger.debug(`Blocking timeout/empty (took ${r}ms)`),this._consecutiveEmptyReserves+=1,null;let[,o,s]=t;this._consecutiveEmptyReserves%10==0&&this.logger.debug(`Blocking result: group=${o}, score=${s} (took ${r}ms)`);let c=Date.now(),l=await this.reserveAtomic(o),u=Date.now()-c;if(l)this.logger.debug(`Successful job reserve after blocking: ${l.id} from group ${l.groupId} (reserve took ${u}ms)`),this._consecutiveEmptyReserves=0;else{this.logger.warn(`Blocking found group but reserve failed: group=${o} (reserve took ${u}ms)`);try{let e=`${this.ns}:g:${o}`,t=await this.r.zcard(e);t>0?(await this.r.zadd(a,Number(s),o),this.logger.debug(`Restored group ${o} to ready with score ${s} after failed atomic reserve (${t} jobs)`)):this.logger.warn(`Not restoring empty group ${o} - preventing poisoned group loop`)}catch{this.logger.warn(`Failed to check group ${o} job count, not restoring`)}return this._consecutiveEmptyReserves+=1,this.reserve()}return l}catch(e){let t=Date.now()-r;if(this.logger.error(`Blocking error after ${t}ms:`,e),this.isConnectionError(e))throw this.logger.error(`Connection error detected - rethrowing`),e;return this.logger.warn(`Falling back to regular reserve due to error`),this.reserve()}finally{let e=Date.now()-r;e>1e3&&this.logger.debug(`ReserveBlocking completed in ${e}ms`)}}async reserveAtomic(e){let t=Date.now(),n=await E(this.r,`reserve-atomic`,[this.ns,String(t),String(this.vt),String(e)],1);if(!n)return null;let r=n.split(`|||`);if(r.length<10)return null;let[i,a,o,s,c,l,u,d,f,p]=r,m=parseInt(u,10),h=parseInt(d,10);return{id:i,groupId:a,data:JSON.parse(o),attempts:parseInt(s,10),maxAttempts:parseInt(c,10),seq:parseInt(l,10),timestamp:m,orderMs:Number.isNaN(h)?m:h,score:parseFloat(f),deadlineAt:parseInt(p,10)}}async getReadyGroups(e=0,t=-1){return this.r.zrange(`${this.ns}:ready`,e,t)}async setGroupConfig(e,t){let n=`${this.ns}:config:${e}`,r=[];t.priority!==void 0&&r.push(`priority`,String(t.priority)),t.concurrency!==void 0&&r.push(`concurrency`,String(t.concurrency)),r.length>0&&await this.r.hset(n,...r)}async getGroupConfig(e){let t=`${this.ns}:config:${e}`,[n,r]=await this.r.hmget(t,`priority`,`concurrency`);return{priority:n?parseInt(n,10):1,concurrency:r?parseInt(r,10):1}}async setGroupConcurrency(e,t){let n=Math.max(1,Math.floor(t));await this.r.hset(`${this.ns}:config:${e}`,`concurrency`,String(n))}async getGroupConcurrency(e){let t=await this.r.hget(`${this.ns}:config:${e}`,`concurrency`);return t?parseInt(t,10):1}async getGroupOldestTimestamp(e){let t=`${this.ns}:g:${e}`,n=await this.r.zrange(t,0,0);if(!n||n.length===0)return;let r=n[0],i=await this.r.hget(`${this.ns}:job:${r}`,`timestamp`);return i?parseInt(i,10):void 0}async reserveBatch(e=16){let t=Date.now(),n=await E(this.r,`reserve-batch`,[this.ns,String(t),String(this.vt),String(Math.max(1,e))],1),r=[];for(let e of n||[]){if(!e)continue;let t=e.split(`|||`);t.length===10&&r.push({id:t[0],groupId:t[1],data:O(t[2]),attempts:parseInt(t[3],10),maxAttempts:parseInt(t[4],10),seq:parseInt(t[5],10),timestamp:parseInt(t[6],10),orderMs:parseInt(t[7],10),score:parseFloat(t[8]),deadlineAt:parseInt(t[9],10)})}return r}async getActiveCount(){return E(this.r,`get-active-count`,[this.ns],1)}async getWaitingCount(){return E(this.r,`get-waiting-count`,[this.ns],1)}async getDelayedCount(){return E(this.r,`get-delayed-count`,[this.ns],1)}async getActiveJobs(){return E(this.r,`get-active-jobs`,[this.ns],1)}async getWaitingJobs(){return E(this.r,`get-waiting-jobs`,[this.ns],1)}async getDelayedJobs(){return E(this.r,`get-delayed-jobs`,[this.ns],1)}async getUniqueGroups(){return E(this.r,`get-unique-groups`,[this.ns],1)}async getUniqueGroupsCount(){return E(this.r,`get-unique-groups-count`,[this.ns],1)}async getJob(e){return y.fromStore(this,e)}async setupSubscriber(){this.eventsSubscribed&&this.subscriber||(this.subscriber||(this.subscriber=this.r.duplicate(),this.subscriber.on(`message`,(e,t)=>{e===`${this.ns}:events`&&this.handleJobEvent(t)}),this.subscriber.on(`error`,e=>{this.logger.error(`Redis error (events subscriber):`,e)})),await this.subscriber.subscribe(`${this.ns}:events`),this.eventsSubscribed=!0)}handleJobEvent(e){try{let t=O(e);if(!t||typeof t.id!=`string`)return;let n=this.waitingJobs.get(t.id);if(!n||n.length===0)return;if(t.status===`completed`){let e=typeof t.result==`string`?O(t.result)??t.result:t.result;n.forEach(t=>t.resolve(e))}else if(t.status===`failed`){let e=typeof t.result==`string`?O(t.result)??{}:t.result??{},r=Error(e&&e.message||`Job failed`);e&&typeof e==`object`&&(typeof e.name==`string`&&(r.name=e.name),typeof e.stack==`string`&&(r.stack=e.stack)),n.forEach(e=>e.reject(r))}this.waitingJobs.delete(t.id)}catch(e){this.logger.error(`Failed to process job event:`,e)}}async waitUntilFinished(e,t=0){let n=await this.getJob(e),r=await n.getState();if(r===`completed`)return n.returnvalue;if(r===`failed`)throw Error(n.failedReason||`Job failed`);return await this.setupSubscriber(),new Promise((n,r)=>{let i,a,o=()=>{i&&clearTimeout(i);let t=this.waitingJobs.get(e);if(!t)return;let n=t.filter(e=>e!==a);n.length===0?this.waitingJobs.delete(e):this.waitingJobs.set(e,n)},s=e=>{o(),n(e)},c=e=>{o(),r(e)};a={resolve:s,reject:c};let l=this.waitingJobs.get(e)??[];l.push(a),this.waitingJobs.set(e,l),t>0&&(i=setTimeout(()=>{c(Error(`Timed out waiting for job ${e} to finish`))},t)),(async()=>{try{let t=await this.getJob(e),n=await t.getState();n===`completed`?s(t.returnvalue):n===`failed`&&c(Error(t.failedReason??`Job failed`))}catch{}})()})}async getJobsByStatus(e,t=0,n=-1){let r=n>=0?n-t+1:100,i=Math.min(r*2,500),a=new Map,o=[],s=async(e,t,n=!1)=>{try{let r=n?await this.r.zrevrange(e,0,i-1):await this.r.zrange(e,0,i-1);for(let e of r)a.set(e,t);o.push(...r)}catch{}},c=new Set(e);if(c.has(`active`)&&await s(`${this.ns}:processing`,`active`),c.has(`delayed`)&&await s(`${this.ns}:delayed`,`delayed`),c.has(`completed`)&&await s(`${this.ns}:completed`,`completed`,!0),c.has(`failed`)&&await s(`${this.ns}:failed`,`failed`,!0),c.has(`waiting`))try{let e=await this.r.smembers(`${this.ns}:groups`);if(e.length>0){let t=e.slice(0,Math.min(100,e.length)),n=this.r.multi(),r=Math.max(1,Math.ceil(i/t.length));for(let e of t)n.zrange(`${this.ns}:g:${e}`,0,r-1);let s=await n.exec();for(let e of s||[]){let t=e?.[1]||[];for(let e of t)a.set(e,`waiting`);o.push(...t)}}}catch{}let l=new Set,u=[];for(let e of o)l.has(e)||(l.add(e),u.push(e));let d=n>=0?u.slice(t,n+1):u.slice(t);if(d.length===0)return[];let f=this.r.multi();for(let e of d)f.hgetall(`${this.ns}:job:${e}`);let p=await f.exec(),m=[];for(let e=0;e<d.length;e++){let t=d[e],n=p?.[e]?.[1]||{};if(!n||Object.keys(n).length===0){this.logger.warn(`Skipping job ${t} - not found (likely cleaned up by retention)`);continue}let r=a.get(t),i=y.fromRawHash(this,t,n,r);m.push(i)}return m}async getJobCounts(){let[e,t,n,r,i]=await Promise.all([this.getActiveCount(),this.getWaitingCount(),this.getDelayedCount(),this.getCompletedCount(),this.getFailedCount()]);return{active:e,waiting:t,delayed:n,completed:r,failed:i,paused:0,"waiting-children":0,prioritized:0}}async checkStalledJobs(e,t,n){try{return await E(this.r,`check-stalled`,[this.ns,String(e),String(t),String(n)],1)||[]}catch(e){return this.logger.error(`Error checking stalled jobs:`,e),[]}}async startPromoter(){if(!(this.promoterRunning||this.orderingDelayMs<=0)){this.promoterRunning=!0,this.promoterLockId=(0,l.randomUUID)();try{this.promoterRedis=this.r.duplicate();try{await this.promoterRedis.config(`SET`,`notify-keyspace-events`,`Ex`),this.logger.debug(`Enabled Redis keyspace notifications for staging promoter`)}catch(e){this.logger.warn(`Failed to enable keyspace notifications. Promoter will use polling fallback.`,e)}let e=this.promoterRedis.options.db??0,t=`${this.ns}:stage:timer`,n=`__keyevent@${e}__:expired`;await this.promoterRedis.subscribe(n,e=>{e?this.logger.error(`Failed to subscribe to keyspace events:`,e):this.logger.debug(`Subscribed to ${n}`)}),this.promoterRedis.on(`message`,async(e,r)=>{e===n&&r===t&&await this.runPromotion()}),this.promoterInterval=setInterval(async()=>{await this.runPromotion()},100),await this.runPromotion(),this.logger.debug(`Staging promoter started`)}catch(e){this.logger.error(`Failed to start promoter:`,e),this.promoterRunning=!1,await this.stopPromoter()}}}async runPromotion(){if(!this.promoterRunning)return;let e=`${this.ns}:promoter:lock`;try{if(await this.r.set(e,this.promoterLockId,`PX`,3e4,`NX`)===`OK`)try{let e=await E(this.r,`promote-staged`,[this.ns,String(Date.now()),`100`],1);e>0&&this.logger.debug(`Promoted ${e} staged jobs`)}finally{await this.r.get(e)===this.promoterLockId&&await this.r.del(e)}}catch(e){this.logger.error(`Error during promotion:`,e)}}async stopPromoter(){if(this.promoterRunning){if(this.promoterRunning=!1,this.promoterInterval&&=(clearInterval(this.promoterInterval),void 0),this.promoterRedis){try{await this.promoterRedis.unsubscribe(),await this.promoterRedis.quit()}catch{try{this.promoterRedis.disconnect()}catch{}}this.promoterRedis=void 0}this.logger.debug(`Staging promoter stopped`)}}async close(){if(this.batchConfig&&this.batchBuffer.length>0&&(this.logger.debug(`Flushing ${this.batchBuffer.length} pending batched jobs before close`),await this.flushBatch()),await this.stopPromoter(),this.subscriber){try{await this.subscriber.unsubscribe(`${this.ns}:events`),await this.subscriber.quit()}catch{try{this.subscriber.disconnect()}catch{}}this.subscriber=void 0,this.eventsSubscribed=!1}if(this.waitingJobs.size>0){let e=Error(`Queue closed`);this.waitingJobs.forEach(t=>{t.forEach(t=>t.reject(e))}),this.waitingJobs.clear()}try{await this.r.quit()}catch{try{this.r.disconnect()}catch{}}}get pausedKey(){return`${this.ns}:paused`}async pause(){await this.r.set(this.pausedKey,`1`)}async resume(){await this.r.del(this.pausedKey)}async isPaused(){return await this.r.get(this.pausedKey)!==null}async waitForEmpty(e=6e4){let t=Date.now();for(;Date.now()-t<e;)try{if(await E(this.r,`is-empty`,[this.ns],1)===1)return await A(0),!0;await A(200)}catch(e){if(this.isConnectionError(e)){this.logger.warn(`Redis connection error in waitForEmpty, retrying...`),await A(1e3);continue}throw e}return!1}async cleanupPoisonedGroup(e){if(Math.random()>.01)return`skipped`;let t=this._groupCleanupTracking.get(e)||0,n=Date.now();if(n-t<1e4)return`throttled`;if(this._groupCleanupTracking.set(e,n),this._groupCleanupTracking.size>1e3){let e=n-6e4;for(let[t,n]of this._groupCleanupTracking.entries())n<e&&this._groupCleanupTracking.delete(t)}try{let t=await E(this.r,`cleanup-poisoned-group`,[this.ns,e,String(n)],1);return t===`poisoned`?this.logger.warn(`Removed poisoned group ${e} from ready queue`):t===`empty`?this.logger.warn(`Removed empty group ${e} from ready queue`):t===`locked`&&Math.random()<.1&&this.logger.debug(`Detected group ${e} is locked by another worker (this is normal with high concurrency)`),t}catch(t){return this.logger.error(`Error cleaning up group ${e}:`,t),`error`}}schedulerLockKey(){return`${this.ns}:sched:lock`}async acquireSchedulerLock(e=1500){try{return await this.r.set(this.schedulerLockKey(),`1`,`PX`,e,`NX`)===`OK`}catch{return!1}}async runSchedulerOnce(e=Date.now()){await this.acquireSchedulerLock(this.schedulerLockTtlMs)&&(await this.promoteDelayedJobsBounded(32,e),await this.processRepeatingJobsBounded(16,e))}async promoteDelayedJobsBounded(e=256,t=Date.now()){let n=0;for(let r=0;r<e;r++)try{let e=await E(this.r,`promote-delayed-one`,[this.ns,String(t)],1);if(!e||e<=0)break;n+=e}catch{break}return n}async processRepeatingJobsBounded(e=128,t=Date.now()){let n=`${this.ns}:repeat:schedule`,r=0;for(let i=0;i<e;i++){let e=await this.r.zrangebyscore(n,0,t,`LIMIT`,0,1);if(!e||e.length===0)break;let i=e[0];try{let e=`${this.ns}:repeat:${i}`,a=await this.r.get(e);if(!a){await this.r.zrem(n,i);continue}let o=JSON.parse(a);if(o.removed){await this.r.zrem(n,i),await this.r.del(e);continue}await this.r.zrem(n,i);let s;s=`every`in o.repeat?t+o.repeat.every:this.getNextCronTime(o.repeat.pattern,t),o.nextRunTime=s,o.lastRunTime=t,await this.r.set(e,JSON.stringify(o)),await this.r.zadd(n,s,i),await E(this.r,`enqueue`,[this.ns,o.groupId,JSON.stringify(o.data),String(o.maxAttempts??this.defaultMaxAttempts),String(o.orderMs??t),`0`,String((0,l.randomUUID)()),String(this.keepCompleted)],1),r++}catch(e){this.logger.error(`Error processing repeating job ${i}:`,e),await this.r.zrem(n,i)}}return r}async promoteDelayedJobs(){try{return await E(this.r,`promote-delayed-jobs`,[this.ns,String(Date.now())],1)}catch(e){return this.logger.error(`Error promoting delayed jobs:`,e),0}}async changeDelay(e,t){let n=t>0?Date.now()+t:0;try{return await E(this.r,`change-delay`,[this.ns,e,String(n),String(Date.now())],1)===1}catch(t){return this.logger.error(`Error changing delay for job ${e}:`,t),!1}}async promote(e){return this.changeDelay(e,0)}async remove(e){try{return await E(this.r,`remove`,[this.ns,e],1)===1}catch(t){return this.logger.error(`Error removing job ${e}:`,t),!1}}async clean(e,t,n){let r=Date.now()-e;try{return await E(this.r,`clean-status`,[this.ns,n,String(r),String(Math.max(0,Math.min(t,1e5)))],1)??0}catch(e){return console.log(`HERE?`,e),this.logger.error(`Error cleaning ${n} jobs:`,e),0}}async updateData(e,t){let n=`${this.ns}:job:${e}`;if(!await this.r.exists(n))throw Error(`Job ${e} not found`);let r=JSON.stringify(t===void 0?null:t);await this.r.hset(n,`data`,r)}async addRepeatingJob(e){if(!e.repeat)throw Error(`Repeat options are required for repeating jobs`);let t=Date.now(),n=`${e.groupId}:${JSON.stringify(e.repeat)}:${t}:${Math.random().toString(36).slice(2)}`,r;r=`every`in e.repeat?t+e.repeat.every:this.getNextCronTime(e.repeat.pattern,t);let i={groupId:e.groupId,data:e.data===void 0?null:e.data,maxAttempts:e.maxAttempts??this.defaultMaxAttempts,orderMs:e.orderMs,repeat:e.repeat,nextRunTime:r,lastRunTime:null,removed:!1},a=`${this.ns}:repeat:${n}`;await this.r.set(a,JSON.stringify(i)),await this.r.zadd(`${this.ns}:repeat:schedule`,r,n);let o=`${this.ns}:repeat:lookup:${e.groupId}:${JSON.stringify(e.repeat)}`;await this.r.set(o,n);let s=`repeat:${n}`,c=`${this.ns}:job:${s}`;try{await this.r.hmset(c,`id`,s,`groupId`,i.groupId,`data`,JSON.stringify(i.data),`attempts`,`0`,`maxAttempts`,String(i.maxAttempts),`seq`,`0`,`timestamp`,String(Date.now()),`orderMs`,String(i.orderMs??t),`status`,`waiting`)}catch{}return y.fromStore(this,s)}getNextCronTime(e,t){try{return u.default.parseExpression(e,{currentDate:new Date(t)}).next().getTime()}catch{throw Error(`Invalid cron pattern: ${e}`)}}async removeRepeatingJob(e,t){try{let n=`${this.ns}:repeat:lookup:${e}:${JSON.stringify(t)}`,r=await this.r.get(n);if(!r)return!1;let i=`${this.ns}:repeat:${r}`,a=`${this.ns}:repeat:schedule`,o=await this.r.get(i);if(!o)return await this.r.del(n),!1;let s=JSON.parse(o);s.removed=!0,await this.r.set(i,JSON.stringify(s)),await this.r.zrem(a,r),await this.r.del(n);try{let e=`repeat:${r}`;await this.r.del(`${this.ns}:job:${e}`)}catch{}return!0}catch(e){return this.logger.error(`Error removing repeating job:`,e),!1}}};function A(e){return new Promise(t=>setTimeout(t,e))}var j=class{constructor(e){this.value=void 0,this.next=null,this.value=e}},M=class{constructor(){this.length=0,this.head=null,this.tail=null}push(e){let t=new j(e);return this.length?this.tail.next=t:this.head=t,this.tail=t,this.length+=1,t}shift(){if(!this.length)return null;let e=this.head;return this.head=this.head.next,--this.length,e}},N=class{constructor(e=!1){this.ignoreErrors=e,this.queue=new M,this.pending=new Set,this.newPromise()}add(e){this.pending.add(e),e.then(t=>{this.pending.delete(e),this.queue.length===0&&this.resolvePromise(t),this.queue.push(t)}).catch(t=>{this.pending.delete(e),this.ignoreErrors?(this.queue.length===0&&this.resolvePromise(void 0),this.queue.push(void 0)):this.rejectPromise(t)})}async waitAll(){await Promise.all(this.pending)}numTotal(){return this.pending.size+this.queue.length}numPending(){return this.pending.size}numQueued(){return this.queue.length}resolvePromise(e){this.resolve(e),this.newPromise()}rejectPromise(e){this.reject(e),this.newPromise()}newPromise(){this.nextPromise=new Promise((e,t)=>{this.resolve=e,this.reject=t})}async wait(){return this.nextPromise}async fetch(){if(!(this.pending.size===0&&this.queue.length===0)){for(;this.queue.length===0;)try{await this.wait()}catch(e){this.ignoreErrors||console.error(`Unexpected Error in AsyncFifoQueue`,e)}return this.queue.shift()?.value}}},P=class extends Error{constructor(e){super(e),this.name=`UnrecoverableError`}},F=class{constructor(){this.listeners=new Map}on(e,t){return this.listeners.has(e)||this.listeners.set(e,[]),this.listeners.get(e).push(t),this}off(e,t){let n=this.listeners.get(e);if(n){let e=n.indexOf(t);e!==-1&&n.splice(e,1)}return this}emit(e,...t){let n=this.listeners.get(e);if(n&&n.length>0){for(let r of n)try{r(...t)}catch(t){console.error(`Error in event listener for '${String(e)}':`,t)}return!0}return!1}removeAllListeners(e){return e?this.listeners.delete(e):this.listeners.clear(),this}};const I=(e,t)=>{let n=Math.min(3e4,2**(e-1)*500);return n+Math.floor(n*.25*Math.random())};var L=class extends F{constructor(e){if(super(),this.stopping=!1,this.ready=!1,this.closed=!1,this.blockingClient=null,this.jobsInProgress=new Set,this.lastJobPickupTime=Date.now(),this.totalJobsProcessed=0,this.blockingStats={totalBlockingCalls:0,consecutiveEmptyReserves:0,lastActivityTime:Date.now()},this.emptyReserveBackoffMs=0,!e.handler||typeof e.handler!=`function`)throw Error(`Worker handler must be a function`);this.opts=e,this.q=e.queue,this.name=e.name??this.q.name,this.logger=typeof e.logger==`object`?e.logger:new S(!!e.logger,this.name),this.handler=e.handler;let t=this.q.jobTimeoutMs??3e4;this.hbMs=e.heartbeatMs??Math.max(1e3,Math.floor(t/3)),this.onError=e.onError,this.maxAttempts=e.maxAttempts??this.q.maxAttemptsDefault??3,this.backoff=e.backoff??I,this.enableCleanup=e.enableCleanup??!0,this.cleanupMs=e.cleanupIntervalMs??6e4,this.schedulerMs=e.schedulerIntervalMs??1e3,this.blockingTimeoutSec=e.blockingTimeoutSec??5,this.concurrency=Math.max(1,e.concurrency??1),this.stalledInterval=e.stalledInterval??(this.concurrency>50?6e4:3e4),this.maxStalledCount=e.maxStalledCount??(this.concurrency>50?2:1),this.stalledGracePeriod=e.stalledGracePeriod??5e3,this.setupRedisEventHandlers(),this.q.orderingDelayMs>0&&this.q.startPromoter().catch(e=>{this.logger.error(`Failed to start staging promoter:`,e)}),e.autoStart!==!1&&this.run()}get isClosed(){return this.closed}addJitter(e,t=.1){return e+Math.random()*e*t}setupRedisEventHandlers(){let e=this.q.redis;e&&(this.redisCloseHandler=()=>{this.ready=!1,this.emit(`ioredis:close`)},this.redisErrorHandler=e=>{this.emit(`error`,e)},this.redisReadyHandler=()=>{!this.ready&&!this.stopping&&(this.ready=!0,this.emit(`ready`))},e.on(`close`,this.redisCloseHandler),e.on(`error`,this.redisErrorHandler),e.on(`ready`,this.redisReadyHandler))}async run(){if(this.runLoopPromise)return this.runLoopPromise;let e=this._runLoop();return this.runLoopPromise=e,e}async _runLoop(){this.logger.info(`🚀 Worker ${this.name} starting...`);let e=this.opts.strategyPollInterval??50;try{this.blockingClient=this.q.redis.duplicate({enableAutoPipelining:!0,maxRetriesPerRequest:null,retryStrategy:e=>Math.max(Math.min(Math.exp(e)*1e3,2e4),1e3)}),this.blockingClient.on(`error`,e=>{this.q.isConnectionError(e)?this.logger.warn(`Blocking client connection error:`,e.message):this.logger.error(`Blocking client error (non-connection):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}),this.blockingClient.on(`close`,()=>{!this.stopping&&!this.closed&&this.logger.warn(`Blocking client disconnected, will reconnect on next operation`)}),this.blockingClient.on(`reconnecting`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client reconnecting...`)}),this.blockingClient.on(`ready`,()=>{!this.stopping&&!this.closed&&this.logger.info(`Blocking client ready`)})}catch(e){this.logger.error(`Failed to create blocking client:`,e),this.blockingClient=null}if(this.enableCleanup){this.cleanupTimer=setInterval(async()=>{try{await this.q.cleanup()}catch(e){this.onError?.(e)}},this.addJitter(this.cleanupMs));let e=Math.min(this.schedulerMs,this.cleanupMs);this.schedulerTimer=setInterval(async()=>{try{await this.q.runSchedulerOnce()}catch{}},this.addJitter(e))}this.startStalledChecker();let t=0,n=new N(!0);for(;!this.stopping||n.numTotal()>0;)try{for(;!this.stopping&&!(n.numTotal()>=this.concurrency);){this.blockingStats.totalBlockingCalls++,this.blockingStats.totalBlockingCalls>=1e9&&(this.blockingStats.totalBlockingCalls=0),this.logger.debug(`Fetching job (call #${this.blockingStats.totalBlockingCalls}, processing: ${this.jobsInProgress.size}/${this.concurrency}, queue: ${n.numTotal()} (queued: ${n.numQueued()}, pending: ${n.numPending()}), total: ${n.numTotal()}/${this.concurrency})...`);let r;if(this.opts.strategy)r=(async()=>{let t=await this.opts.strategy.getNextGroup(this.q);return t?await this.q.reserveAtomic(t)||null:(await this.delay(e),null)})();else{let e=this.concurrency-n.numTotal();if(e>0&&n.numTotal()===0){let r=Math.min(e,8),i=await this.q.reserveBatch(r);if(i.length>0){this.logger.debug(`Batch reserved ${i.length} jobs`);for(let e of i)n.add(Promise.resolve(e));t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0;continue}}let i=this.blockingStats.consecutiveEmptyReserves>=2&&n.numTotal()===0&&this.jobsInProgress.size===0,a=this.blockingTimeoutSec;r=i?this.q.reserveBlocking(a,void 0,this.blockingClient??void 0):this.q.reserve()}n.add(r);let i=await r;if(i)t=0,this.lastJobPickupTime=Date.now(),this.blockingStats.consecutiveEmptyReserves=0,this.blockingStats.lastActivityTime=Date.now(),this.emptyReserveBackoffMs=0,this.logger.debug(`Fetched job ${i.id} from group ${i.groupId}`);else{if(this.opts.strategy&&n.numTotal()===0&&this.jobsInProgress.size===0)break;this.blockingStats.consecutiveEmptyReserves++,this.blockingStats.consecutiveEmptyReserves%50==0&&this.logger.debug(`No job available (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves})`);let e=this.concurrency>=100?5:3;if(this.blockingStats.consecutiveEmptyReserves>e&&n.numTotal()===0&&this.jobsInProgress.size===0){let e=this.concurrency>=100?2e3:5e3;this.emptyReserveBackoffMs===0?this.emptyReserveBackoffMs=this.concurrency>=100?100:50:this.emptyReserveBackoffMs=Math.min(e,Math.max(100,this.emptyReserveBackoffMs*1.2)),this.blockingStats.consecutiveEmptyReserves%20==0&&this.logger.debug(`Applying backoff: ${Math.round(this.emptyReserveBackoffMs)}ms (consecutive empty: ${this.blockingStats.consecutiveEmptyReserves}, jobs in progress: ${this.jobsInProgress.size})`),await this.delay(this.emptyReserveBackoffMs)}if(n.numTotal()===0&&this.jobsInProgress.size===0||n.numTotal()>0||this.jobsInProgress.size>0)break}}let r;do r=await n.fetch()??void 0;while(!r&&n.numQueued()>0);if(r&&typeof r==`object`&&`id`in r){this.totalJobsProcessed++,this.logger.debug(`Processing job ${r.id} from group ${r.groupId} immediately`);let e=this.processJob(r,()=>n.numTotal()<=this.concurrency,this.jobsInProgress);n.add(e)}}catch(e){if(this.stopping)return;if(this.q.isConnectionError(e))if(t++,this.logger.error(`Connection error (retry ${t}/10):`,e),t>=10)this.logger.error(`⚠️ Max connection retries (10) exceeded! Worker will continue but may be experiencing persistent Redis issues.`),this.emit(`error`,Error(`Max connection retries (10) exceeded - worker continuing with backoff`)),await this.delay(2e4),t=0;else{let e=Math.max(Math.min(Math.exp(t)*1e3,2e4),1e3);this.logger.debug(`Waiting ${Math.round(e)}ms before retry (exponential backoff)`),await this.delay(e)}else this.logger.error(`Worker loop error (non-connection, continuing):`,e),this.emit(`error`,e instanceof Error?e:Error(String(e))),t=0,await this.delay(100);this.onError?.(e)}this.logger.info(`Stopped`)}async delay(e){return new Promise(t=>setTimeout(t,e))}async processJob(e,t,n){let r=Array.from(n).find(t=>t.job.id===e.id),i;r?(r.ts=Date.now(),i=r):(i={job:e,ts:Date.now()},n.add(i));try{let r=await this.processSingleJob(e,t);if(r&&typeof r==`object`&&`id`in r&&`groupId`in r){let e={job:r,ts:Date.now()};return n.add(e),n.delete(i),r}return r}finally{n.has(i)&&n.delete(i)}}async completeJob(e,t,n,r,i){if(n?.()){let n=await this.q.completeAndReserveNextWithMetadata(e.id,e.groupId,t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts});if(n)return this.logger.debug(`Got next job ${n.id} from same group ${n.groupId} atomically`),n;this.logger.debug(`Atomic chaining returned nil for job ${e.id} - job completed, but no next job chained`),Math.random()<.1&&await new Promise(e=>setTimeout(e,Math.random()*100))}else await this.q.completeWithMetadata(e,t,{processedOn:r||Date.now(),finishedOn:i||Date.now(),attempts:e.attempts,maxAttempts:e.maxAttempts})}startStalledChecker(){this.stalledInterval<=0||(this.stalledCheckTimer=setInterval(async()=>{try{await this.checkStalled()}catch(e){this.logger.error(`Error in stalled job checker:`,e),this.emit(`error`,e instanceof Error?e:Error(String(e)))}},this.stalledInterval))}async checkStalled(){if(!(this.stopping||this.closed))try{let e=Date.now(),t=await this.q.checkStalledJobs(e,this.stalledGracePeriod,this.maxStalledCount);if(t.length>0)for(let e=0;e<t.length;e+=3){let n=t[e],r=t[e+1],i=t[e+2];i===`recovered`?(this.logger.info(`Recovered stalled job ${n} from group ${r}`),this.emit(`stalled`,n,r)):i===`failed`&&(this.logger.warn(`Failed stalled job ${n} from group ${r} (exceeded max stalled count)`),this.emit(`stalled`,n,r))}}catch(e){this.logger.error(`Error checking stalled jobs:`,e)}}getWorkerMetrics(){let e=Date.now();return{name:this.name,totalJobsProcessed:this.totalJobsProcessed,lastJobPickupTime:this.lastJobPickupTime,timeSinceLastJob:this.lastJobPickupTime>0?e-this.lastJobPickupTime:null,blockingStats:{...this.blockingStats},isProcessing:this.jobsInProgress.size>0,jobsInProgressCount:this.jobsInProgress.size,jobsInProgress:Array.from(this.jobsInProgress).map(t=>({jobId:t.job.id,groupId:t.job.groupId,processingTimeMs:e-t.ts}))}}async close(e=3e4){this.stopping=!0,await this.delay(100),this.cleanupTimer&&clearInterval(this.cleanupTimer),this.schedulerTimer&&clearInterval(this.schedulerTimer),this.stalledCheckTimer&&clearInterval(this.stalledCheckTimer);let t=Date.now();for(;this.jobsInProgress.size>0&&Date.now()-t<e;)await z(100);if(this.blockingClient){try{this.jobsInProgress.size>0&&e>0?(this.logger.debug(`Gracefully closing blocking client (quit)...`),await this.blockingClient.quit()):(this.logger.debug(`Force closing blocking client (disconnect)...`),this.blockingClient.disconnect())}catch(e){this.logger.debug(`Error closing blocking client:`,e)}this.blockingClient=null}if(this.runLoopPromise){let t=this.jobsInProgress.size>0?e:2e3,n=new Promise(e=>{setTimeout(e,t)});try{await Promise.race([this.runLoopPromise,n])}catch(e){this.logger.warn(`Error while waiting for run loop to exit:`,e)}}if(this.jobsInProgress.size>0){this.logger.warn(`Worker stopped with ${this.jobsInProgress.size} jobs still processing after ${e}ms timeout.`);let t=Date.now();for(let e of this.jobsInProgress)this.emit(`graceful-timeout`,y.fromReserved(this.q,e.job,{processedOn:e.ts,finishedOn:t,status:`active`}))}this.jobsInProgress.clear(),this.ready=!1,this.closed=!0;try{let e=this.q.redis;e&&(this.redisCloseHandler&&e.off?.(`close`,this.redisCloseHandler),this.redisErrorHandler&&e.off?.(`error`,this.redisErrorHandler),this.redisReadyHandler&&e.off?.(`ready`,this.redisReadyHandler))}catch{}this.emit(`closed`)}getCurrentJob(){if(this.jobsInProgress.size===0)return null;let e=Array.from(this.jobsInProgress)[0],t=Date.now();return{job:y.fromReserved(this.q,e.job,{processedOn:e.ts,status:`active`}),processingTimeMs:t-e.ts}}getCurrentJobs(){let e=Date.now();return Array.from(this.jobsInProgress).map(t=>({job:y.fromReserved(this.q,t.job,{processedOn:t.ts,status:`active`}),processingTimeMs:e-t.ts}))}isProcessing(){return this.jobsInProgress.size>0}async add(e){return this.q.add(e)}async processSingleJob(e,t){let n=Date.now(),r,i,a=()=>{let t=this.q.jobTimeoutMs||3e4,n=Math.min(this.hbMs,Math.floor(t/3),1e4);this.logger.debug(`Starting heartbeat for job ${e.id} (interval: ${n}ms, concurrency: ${this.concurrency})`),r=setInterval(async()=>{try{await this.q.heartbeat(e)===0&&(this.logger.warn(`Heartbeat failed for job ${e.id} - job may have been removed or completed elsewhere`),r&&clearInterval(r))}catch(t){let n=this.q.isConnectionError(t);(!n||!this.stopping)&&this.logger.error(`Heartbeat error for job ${e.id}:`,t instanceof Error?t.message:String(t)),this.onError?.(t,y.fromReserved(this.q,e,{status:`active`})),(!n||!this.stopping)&&this.emit(`error`,t instanceof Error?t:Error(String(t)))}},n)};try{let o=this.q.jobTimeoutMs||3e4,s=Math.min(o*.1,2e3);i=setTimeout(()=>{a()},s);let c=y.fromReserved(this.q,e,{processedOn:n,status:`active`}),l=await this.handler(c);i&&clearTimeout(i),r&&clearInterval(r);let u=Date.now(),d=await this.completeJob(e,l,t,n,u);return this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0,this.emit(`completed`,y.fromReserved(this.q,e,{processedOn:n,finishedOn:u,returnvalue:l,status:`completed`})),d}catch(t){i&&clearTimeout(i),r&&clearInterval(r),await this.handleJobFailure(t,e,n)}}async handleJobFailure(e,t,n){let r=y.fromReserved(this.q,t,{processedOn:n,status:`active`});this.onError?.(e,r),this.blockingStats.consecutiveEmptyReserves=0,this.emptyReserveBackoffMs=0;try{this.emit(`error`,e instanceof Error?e:Error(String(e)))}catch{}let i=Date.now();this.emit(`failed`,y.fromReserved(this.q,t,{processedOn:n,finishedOn:i,failedReason:e instanceof Error?e.message:String(e),stacktrace:e instanceof Error||typeof e==`object`&&e?e.stack:void 0,status:`failed`}));let a=t.attempts+1;if(e instanceof P){this.logger.info(`Unrecoverable error for job ${t.id}: ${e instanceof Error?e.message:String(e)}. Skipping retries.`),await this.deadLetterJob(e,t,n,i,a);return}let o=this.backoff(a,e);if(a>=this.maxAttempts){await this.deadLetterJob(e,t,n,i,a);return}if(await this.q.retry(t.id,o)===-1){await this.deadLetterJob(e,t,n,i,t.maxAttempts);return}await this.recordFailureAttempt(e,t,n,i,a)}async deadLetterJob(e,t,n,r,i){this.logger.info(`Dead lettering job ${t.id} from group ${t.groupId} (attempts: ${i}/${t.maxAttempts})`);let a=e instanceof Error?e:Error(String(e));try{await this.q.recordFinalFailure({id:t.id,groupId:t.groupId},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts,data:t.data})}catch(e){this.logger.warn(`Failed to record final failure`,e)}await this.q.deadLetter(t.id,t.groupId)}async recordFailureAttempt(e,t,n,r,i){let a=e instanceof Error?e:Error(String(e));try{await this.q.recordAttemptFailure({id:t.id,groupId:t.groupId},{name:a.name,message:a.message,stack:a.stack},{processedOn:n,finishedOn:r,attempts:i,maxAttempts:t.maxAttempts})}catch(e){this.logger.warn(`Failed to record attempt failure`,e)}}};const R=L;function z(e){return new Promise(t=>setTimeout(t,e))}var B=class{constructor(e={}){this.cache=new Map,this.overrides=new Map,this.algorithmConfig=e.algorithm??{type:`weighted-random`},this.defaultPriority=e.defaultPriority??1,this.cacheTtlMs=e.cacheTtlMs??5e3,this.onGetPriority=e.onGetPriority}setPriority(e,t){this.overrides.set(e,t)}clearPriority(e){this.overrides.delete(e)}async resolvePriority(e,t){let n=this.overrides.get(t);if(n!==void 0)return n;let r=Date.now(),i=this.cache.get(t);if(i&&i.expiresAt>r)return i.priority;let a=await e.getGroupConfig(t),o;return o=this.onGetPriority?await this.onGetPriority(t,a):a.priority===1?this.defaultPriority:a.priority,this.cacheTtlMs>0&&this.cache.set(t,{priority:o,expiresAt:r+this.cacheTtlMs}),o}selectByStrict(e){return e.sort((e,t)=>t.priority-e.priority),e[0].groupId}selectByWeightedRandom(e){if(e.length===1)return e[0].groupId;let t=this.algorithmConfig.minWeightRatio??.1,n=Math.max(...e.map(e=>e.priority))*t,r=e.map(e=>({groupId:e.groupId,weight:Math.max(e.priority,n)})),i=r.reduce((e,t)=>e+t.weight,0),a=Math.random()*i;for(let e of r)if(a-=e.weight,a<=0)return e.groupId;return e[0].groupId}selectByAging(e){let t=Date.now(),n=this.algorithmConfig.intervalMs??6e4,r=e.map(e=>{let r=0;if(e.oldestTimestamp){let i=t-e.oldestTimestamp;r=Math.floor(i/n)}return{groupId:e.groupId,adjustedPriority:e.priority+r}});return r.sort((e,t)=>t.adjustedPriority-e.adjustedPriority),r[0].groupId}async getNextGroup(e){let t=await e.getReadyGroups(0,100);if(t.length===0)return null;let n=await Promise.all(t.map(async t=>{let n={groupId:t,priority:await this.resolvePriority(e,t)};return this.algorithmConfig.type===`aging`&&(n.oldestTimestamp=await e.getGroupOldestTimestamp(t)),n}));switch(this.algorithmConfig.type){case`strict`:return this.selectByStrict(n);case`aging`:return this.selectByAging(n);case`weighted-random`:default:return this.selectByWeightedRandom(n)}}};exports.BullBoardGroupMQAdapter=g,exports.Job=y,exports.PriorityStrategy=B,exports.Queue=k,exports.UnrecoverableError=P,exports.Worker=R,exports.getWorkersStatus=v,exports.waitForQueueToEmpty=_;
2
2
  //# sourceMappingURL=index.js.map