@oss-autopilot/core 0.60.0 → 0.60.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -81,7 +81,7 @@ The gh CLI is recommended - install from https://cli.github.com`);return e}funct
81
81
  onRateLimit: (retryAfter, options) => {/* ... */}
82
82
  }
83
83
  })
84
- `);let u={},l=new o.Events(u);return u.on("secondary-limit",c.onSecondaryRateLimit),u.on("rate-limit",c.onRateLimit),u.on("error",d=>e.log.warn("Error in throttling-plugin limit handler",d)),c.retryLimiter.on("failed",async function(d,p){let[g,w,S]=p.args,{pathname:j}=new URL(S.url,"http://github.test");if(!(j.startsWith("/graphql")&&d.status!==401||d.status===403||d.status===429))return;let L=~~w.retryCount;w.retryCount=L,S.request.retryCount=L;let{wantRetry:F,retryAfter:H=0}=await(async function(){if(/\bsecondary rate\b/i.test(d.message)){let z=Number(d.response.headers["retry-after"])||g.fallbackSecondaryRateRetryAfter;return{wantRetry:await l.trigger("secondary-limit",z,S,e,L),retryAfter:z}}if(d.response.headers!=null&&d.response.headers["x-ratelimit-remaining"]==="0"||(d.response.data?.errors??[]).some(z=>z.type==="RATE_LIMITED")){let z=new Date(~~d.response.headers["x-ratelimit-reset"]*1e3).getTime(),R=Math.max(Math.ceil((z-Date.now())/1e3)+1,0);return{wantRetry:await l.trigger("rate-limit",R,S,e,L),retryAfter:R}}return{}})();if(F)return w.retryCount++,H*g.retryAfterBaseValue}),e.hook.wrap("request",yx.bind(null,c)),{}}var x_,bx,Bm,wx,E_,R_,Ir,kx,P_=b(()=>{x_=re(T_(),1),bx="0.0.0-development",Bm=()=>Promise.resolve();wx=["/orgs/{org}/invitations","/orgs/{org}/invitations/{invitation_id}","/orgs/{org}/teams/{team_slug}/discussions","/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments","/repos/{owner}/{repo}/collaborators/{username}","/repos/{owner}/{repo}/commits/{commit_sha}/comments","/repos/{owner}/{repo}/issues","/repos/{owner}/{repo}/issues/{issue_number}/comments","/repos/{owner}/{repo}/issues/{issue_number}/sub_issue","/repos/{owner}/{repo}/issues/{issue_number}/sub_issues/priority","/repos/{owner}/{repo}/pulls","/repos/{owner}/{repo}/pulls/{pull_number}/comments","/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies","/repos/{owner}/{repo}/pulls/{pull_number}/merge","/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers","/repos/{owner}/{repo}/pulls/{pull_number}/reviews","/repos/{owner}/{repo}/releases","/teams/{team_id}/discussions","/teams/{team_id}/discussions/{discussion_number}/comments"];E_=Sx(wx),R_=E_.test.bind(E_),Ir={},kx=function(e,t){Ir.global=new e.Group({id:"octokit-global",maxConcurrent:10,...t}),Ir.auth=new e.Group({id:"octokit-auth",maxConcurrent:1,...t}),Ir.search=new e.Group({id:"octokit-search",maxConcurrent:1,minTime:2e3,...t}),Ir.write=new e.Group({id:"octokit-write",maxConcurrent:1,minTime:1e3,...t}),Ir.notifications=new e.Group({id:"octokit-notifications",maxConcurrent:1,minTime:3e3,...t})};ra.VERSION=bx;ra.triggersNotification=R_});function ia(e){return e.toLocaleTimeString("en-US",{hour12:!1})}function Ex(){return{onRateLimit:(e,t,n,o)=>{let r=t,i=new Date(Date.now()+e*1e3);return o<2?(v(na,`Rate limit hit (retry ${o+1}/2, waiting ${e}s, resets at ${ia(i)}) \u2014 ${r.method} ${r.url}`),!0):(v(na,`Rate limit exceeded, not retrying \u2014 ${r.method} ${r.url} (resets at ${ia(i)})`),!1)},onSecondaryRateLimit:(e,t,n,o)=>{let r=t,i=new Date(Date.now()+e*1e3);return o<1?(v(na,`Secondary rate limit hit (retry ${o+1}/1, waiting ${e}s, resets at ${ia(i)}) \u2014 ${r.method} ${r.url}`),!0):(v(na,`Secondary rate limit exceeded, not retrying \u2014 ${r.method} ${r.url} (resets at ${ia(i)})`),!1)}}}function Ue(e){if(oa&&C_===e)return oa;let t=Ex();return oa=new Tx({auth:e,throttle:t}),C_=e,oa}async function Wm(e){let t=Ue(e),{data:n}=await t.rateLimit.get(),o=n.resources.search;return{remaining:o.remaining,limit:o.limit,resetAt:new Date(o.reset*1e3).toISOString()}}var na,Tx,oa,C_,Rn=b(()=>{"use strict";k_();P_();ye();na="github",Tx=S_.plugin(ra),oa=null,C_=null});function Px(e,t){if(!e||!t)return!0;let n=e.toLowerCase();return xx.has(n)?!0:n===t.toLowerCase()}function Cx(e,t){let n=new Date(e).getTime(),o=new Date(t).getTime();return Number.isNaN(n)||Number.isNaN(o)?e>t:n-o>=Rx}function O_(e){let{latestCommitDate:t,latestCommitAuthor:n,contributorUsername:o}=e;if(t)return Px(n,o)?t:void 0}function I_(e,t,n){return!(!e||!t||!Cx(e,t)||n&&e<n)}function A_(e,t){return!e||!t?!1:e>=t}function Ox(e){let{ciStatus:t,hasMergeConflict:n,hasUnrespondedComment:o,hasIncompleteChecklist:r,reviewDecision:i,lastMaintainerCommentDate:s,latestChangesRequestedDate:a,hasActionableCIFailure:c=!0}=e,u=O_(e),l=[];return o&&!I_(u,s,a)&&l.push("needs_response"),i==="changes_requested"&&a&&!A_(u,a)&&l.push("needs_changes"),t==="failing"&&c&&l.push("failing_ci"),n&&l.push("merge_conflict"),r&&l.push("incomplete_checklist"),l.length>0?l:void 0}function Jm(e){let t=Ix(e),n=Ox(e);return n?{...t,actionReasons:n}:t}function Ix(e){let{ciStatus:t,hasMergeConflict:n,hasUnrespondedComment:o,hasIncompleteChecklist:r,reviewDecision:i,daysSinceActivity:s,dormantThreshold:a,approachingThreshold:c,lastMaintainerCommentDate:u,latestChangesRequestedDate:l,hasActionableCIFailure:d=!0}=e,p="active";s>=a?p="dormant":s>=c&&(p="approaching_dormant");let g=O_(e);return o?I_(g,u,l)?t==="failing"&&d?{status:"needs_addressing",actionReason:"failing_ci",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"changes_addressed",stalenessTier:p}:{status:"needs_addressing",actionReason:"needs_response",stalenessTier:p}:i==="changes_requested"&&l?A_(g,l)?t==="failing"&&d?{status:"needs_addressing",actionReason:"failing_ci",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"changes_addressed",stalenessTier:p}:{status:"needs_addressing",actionReason:"needs_changes",stalenessTier:p}:t==="failing"?d?s>=5?{status:"waiting_on_maintainer",waitReason:"stale_ci_failure",stalenessTier:p}:{status:"needs_addressing",actionReason:"failing_ci",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"ci_blocked",stalenessTier:p}:n?{status:"needs_addressing",actionReason:"merge_conflict",stalenessTier:p}:r?{status:"needs_addressing",actionReason:"incomplete_checklist",stalenessTier:p}:i==="approved"&&(t==="passing"||t==="unknown")?{status:"waiting_on_maintainer",waitReason:"pending_merge",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"pending_review",stalenessTier:p}}var xx,Rx,Km=b(()=>{"use strict";xx=new Set(["autofix-ci[bot]","prettier-ci[bot]","pre-commit-ci[bot]"]),Rx=120*1e3});async function sa(e,t,n){let o=0,r=!1,i=async()=>{for(;o<e.length&&!r;){let a=e[o++];try{await t(a)}catch(c){throw r=!0,c}}},s=Math.min(n,e.length);await Promise.all(Array.from({length:s},()=>i()))}var Xm=b(()=>{"use strict"});async function wt(e,t=100,n=10){let o=[];for(let r=1;r<=n;r++){let{data:i}=await e(r);if(o.push(...i),i.length<t)break}return o}var Ao=b(()=>{"use strict"});function Bt(){return Ym||(Ym=new ca),Ym}async function Do(e,t,n){let o=e.getInflight(t);if(o)return E(mr,`Dedup hit for ${t}`),await o;let i=(async()=>{let a={},c=e.get(t);c&&(a["if-none-match"]=c.etag);try{let u=await n(a),l=u.headers?.etag;return l&&e.set(t,l,u.data),u.data}catch(u){if(Dx(u)){let l=e.get(t);if(l)return E(mr,`304 cache hit for ${t}`),l.body}throw u}})(),s=e.setInflight(t,i);try{return await i}finally{s()}}async function ua(e,t,n,o){let r=e.getIfFresh(t,n);if(r)return E(mr,`Time-based cache hit for ${t}`),r;let i=await o();return e.set(t,"",i),i}function Dx(e){return Ze(e)===304}var ct,aa,D_,mr,Ax,ca,Ym,Pn=b(()=>{"use strict";ct=re(require("fs"),1),aa=re(require("path"),1),D_=re(require("crypto"),1);Ve();ye();be();mr="http-cache",Ax=1440*60*1e3,ca=class{cacheDir;inflightRequests=new Map;constructor(t){this.cacheDir=t??Zs()}keyFor(t){return D_.createHash("sha256").update(t).digest("hex")}pathFor(t){return aa.join(this.cacheDir,`${this.keyFor(t)}.json`)}getIfFresh(t,n){let o=this.get(t);if(!o)return null;let r=Date.now()-new Date(o.cachedAt).getTime();return!Number.isFinite(r)||r<0||r>n?null:o.body}get(t){let n=this.pathFor(t);try{let o=ct.readFileSync(n,"utf-8"),r=JSON.parse(o);return r.url!==t?(E(mr,`Cache collision detected for ${t}, ignoring`),null):r}catch{return null}}set(t,n,o){let r={etag:n,url:t,body:o,cachedAt:new Date().toISOString()};try{ct.writeFileSync(this.pathFor(t),JSON.stringify(r),{encoding:"utf-8",mode:384}),E(mr,`Cached response for ${t}`)}catch(i){E(mr,`Failed to write cache for ${t}`,i)}}hasInflight(t){return this.inflightRequests.has(t)}getInflight(t){return this.inflightRequests.get(t)}setInflight(t,n){return this.inflightRequests.set(t,n),()=>{this.inflightRequests.delete(t)}}evictStale(t=Ax){let n=0;try{let o=ct.readdirSync(this.cacheDir),r=Date.now();for(let i of o){if(!i.endsWith(".json"))continue;let s=aa.join(this.cacheDir,i);try{let a=ct.readFileSync(s,"utf-8"),c=JSON.parse(a);r-new Date(c.cachedAt).getTime()>t&&(ct.unlinkSync(s),n++)}catch{try{ct.unlinkSync(s),n++}catch{}}}}catch{}return n>0&&E(mr,`Evicted ${n} stale cache entries`),n}clear(){try{let t=ct.readdirSync(this.cacheDir);for(let n of t)n.endsWith(".json")&&ct.unlinkSync(aa.join(this.cacheDir,n));E(mr,"Cache cleared")}catch{}}size(){try{return ct.readdirSync(this.cacheDir).filter(t=>t.endsWith(".json")).length}catch{return 0}}},Ym=null});function la(){return{status:"unknown",failingCheckNames:[],failingCheckConclusions:new Map}}function Qm(e,t,n){if(n==="cancelled"||n==="timed_out")return"infrastructure";let o=e.toLowerCase();if(z_.some(r=>r.test(o)))return"auth_gate";if(U_.some(r=>r.test(o)))return"fork_limitation";if(j_.some(r=>r.test(o)))return"infrastructure";if(t){let r=t.toLowerCase();if(z_.some(i=>i.test(r)))return"auth_gate";if(U_.some(i=>i.test(r)))return"fork_limitation";if(j_.some(i=>i.test(r)))return"infrastructure"}return"actionable"}function da(e,t){return e.map(n=>{let o=t?.get(n);return{name:n,category:Qm(n,void 0,o),conclusion:o}})}function Ux(e){let t=!1,n=!1,o=!1,r=[],i=new Map;for(let s of e)s.conclusion==="failure"||s.conclusion==="cancelled"||s.conclusion==="timed_out"?(t=!0,r.push(s.name),i.set(s.name,s.conclusion)):s.conclusion==="action_required"||s.status==="in_progress"||s.status==="queued"?n=!0:s.conclusion==="success"&&(o=!0);return{hasFailingChecks:t,hasPendingChecks:n,hasSuccessfulChecks:o,failingCheckNames:r,failingCheckConclusions:i}}function zx(e){let t=e.statuses.filter(c=>{let u=(c.description||"").toLowerCase();return!(c.state==="failure"&&(u.includes("authorization required")||u.includes("authorize")))}),n=t.some(c=>c.state==="failure"||c.state==="error"),o=t.some(c=>c.state==="pending"),r=t.some(c=>c.state==="success"),i;n?i="failure":o?i="pending":r||t.length===0?i="success":i=e.state;let s=e.statuses.length>0,a=[];for(let c of t)(c.state==="failure"||c.state==="error")&&a.push(c.context);return{effectiveCombinedState:i,hasStatuses:s,failingStatusNames:a}}function jx(e,t,n){let{hasFailingChecks:o,hasPendingChecks:r,hasSuccessfulChecks:i,failingCheckNames:s,failingCheckConclusions:a}=e,{effectiveCombinedState:c,hasStatuses:u,failingStatusNames:l}=t,d=[...s,...l];return o||c==="failure"||c==="error"?{status:"failing",failingCheckNames:d,failingCheckConclusions:a}:r||c==="pending"?{status:"pending",failingCheckNames:[],failingCheckConclusions:new Map}:i||c==="success"?{status:"passing",failingCheckNames:[],failingCheckConclusions:new Map}:la()}async function eg(e,t,n,o){if(!o)return la();try{let[r,i]=await Promise.all([e.repos.getCombinedStatusForRef({owner:t,repo:n,ref:o}),e.checks.listForRef({owner:t,repo:n,ref:o}).catch(p=>{let g=Ze(p);if(g===429)throw p;if(g===403){let w=P(p).toLowerCase();if(w.includes("rate limit")||w.includes("abuse detection"))throw p}return g===404?E("pr-monitor",`Check runs 404 for ${t}/${n}@${o.slice(0,7)} (no checks configured)`):v("pr-monitor",`Non-404 error fetching check runs for ${t}/${n}@${o.slice(0,7)}: ${g??p}`),null})]),s=r.data,a=i?.data?.check_runs||[],c=new Map;for(let p of a){let g=c.get(p.name);(!g||new Date(p.started_at??0)>new Date(g.started_at??0))&&c.set(p.name,p)}let u=[...c.values()],l=Ux(u),d=zx(s);return jx(l,d,u.length)}catch(r){let i=Ze(r);if(i===401||i===403||i===429)throw r;return i===404?E("pr-monitor",`CI check 404 for ${t}/${n} (no CI configured)`):v("pr-monitor",`Failed to check CI for ${t}/${n}@${o.slice(0,7)}: ${P(r)}`),la()}}var U_,z_,j_,tg=b(()=>{"use strict";be();ye();U_=[/vercel/i,/netlify/i,/\bpreview\s*deploy/i,/\bdeploy\s*preview/i,/storybook/i,/chromatic/i,/percy/i,/cloudflare pages/i,/\binternal\b/i],z_=[/authoriz/i,/approval/i,/\bcla\b/i,/license\/cla/i],j_=[/\binstall\s*(os\s*)?dep(endenc|s\b)/i,/\bsetup\s+fail(ed|ure)?\b/i,/\bservice\s*unavailable/i,/\binfrastructure/i,/\bblacksmith\b/i]});function Cn(e){return e.includes("[bot]")||Gx.has(e.toLowerCase())}function Uo(e){if(!e||e.length>100||e.includes("?"))return!1;let t=e.toLowerCase();return["thanks","thank you","lgtm","looks good","will review","we'll review","we'll get to this","noted","got it","will look","will check"].some(o=>t.includes(o))}var Gx,pa=b(()=>{"use strict";Gx=new Set(["allcontributors","changeset-bot","claassistant","codecov-commenter","greenkeeper","imgbot","netlify","renovate","snyk-bot","sonarcloud","stale","vercel"])});function G_(e){if(e.length===0)return"review_required";let t=new Map;for(let o of e){let r=o.user?.login,i=o.state;r&&i&&t.set(r,i)}let n=Array.from(t.values());return n.includes("CHANGES_REQUESTED")?"changes_requested":n.includes("APPROVED")?"approved":"review_required"}function F_(e){let t;for(let n of e)n.state==="CHANGES_REQUESTED"&&n.submitted_at&&(!t||n.submitted_at>t)&&(t=n.submitted_at);return t}function Fx(e,t){let n=t.filter(r=>r.pull_request_review_id===e);if(n.length===0)return!1;let o=new Map;for(let r of t)r.user?.login&&o.set(r.id,r.user.login.toLowerCase());return n.every(r=>{if(!r.in_reply_to_id)return!1;let i=o.get(r.in_reply_to_id),s=r.user?.login?.toLowerCase();return!(i==null||s==null||i!==s||!r.body||r.body.includes("?"))})}function Lx(e,t){return t.find(n=>n.pull_request_review_id===e&&n.body?.trim())?.body?.trim()}function L_(e,t,n,o){let r=[],i=o.toLowerCase();for(let c of e){let u=c.user?.login||"unknown";r.push({author:u,body:c.body||"",createdAt:c.created_at,isUser:u.toLowerCase()===i})}for(let c of t){if(!c.submitted_at)continue;let u=(c.body||"").trim();if(!u&&c.state!=="COMMENTED"&&c.state!=="CHANGES_REQUESTED")continue;let l=c.user?.login||"unknown";if(!u&&c.state==="COMMENTED"&&c.id!=null&&Fx(c.id,n))continue;let d=u||(c.id!=null?Lx(c.id,n):void 0)||(c.state==="CHANGES_REQUESTED"?"(requested changes via inline review comments)":"(posted inline review comments)");r.push({author:l,body:d,createdAt:c.submitted_at,isUser:l.toLowerCase()===i})}r.sort((c,u)=>new Date(c.createdAt).getTime()-new Date(u.createdAt).getTime());let s=null;for(let c of r)c.isUser&&(s=new Date(c.createdAt));let a;for(let c of r){if(c.isUser||c.author==="unknown"||Cn(c.author))continue;let u=new Date(c.createdAt);(!s||u>s)&&(a={author:c.author,body:c.body.slice(0,200)+(c.body.length>200?"...":""),createdAt:c.createdAt})}return a&&Uo(a.body)&&(a=void 0),{hasUnrespondedComment:!!a,lastMaintainerComment:a}}var N_=b(()=>{"use strict";pa()});function M_(e){return Nx.test(e.toLowerCase())}function Z_(e){if(!e)return{hasIncompleteChecklist:!1};let t=/- \[x\]/gi,n=/^.*- \[ \].*$/gm,o=e.match(t)||[],r=e.match(n)||[],i=o.length;if(i+r.length===0)return{hasIncompleteChecklist:!1};let a=r.filter(u=>!M_(u)),c=i+a.length;return{hasIncompleteChecklist:a.length>0,checklistStats:{checked:i,total:c}}}var Nx,rg=b(()=>{"use strict";Nx=/\(if\s|\bif applicable\b|\bif needed\b|\bif relevant\b|\bonly if\b|\bwhen applicable\b|\(optional\)|- \[ \]\s*optional\b|\bn\/a\b|\bnot applicable\b|\bif required\b|\bif necessary\b/});function V_(e,t){let n=[];if(t==="changes_requested"&&n.push("changes_requested"),!e)return n;let o=e.toLowerCase();return["screenshot","demo","recording","screen recording","before/after","before and after","gif","video","screencast","show me","can you show"].some(c=>o.includes(c))&&n.push("demo_requested"),["add test","test coverage","unit test","missing test","add a test","write test","needs test","need test"].some(c=>o.includes(c))&&n.push("tests_requested"),["documentation","readme","jsdoc","docstring","add docs","update docs","document this"].some(c=>o.includes(c))&&n.push("docs_requested"),["rebase","merge conflict","out of date","behind main","behind master"].some(c=>o.includes(c))&&n.push("rebase_requested"),n}var H_=b(()=>{"use strict"});function Zx(e){let t=q_[e]?.label;return t?t.replace(/[[\]]/g,"").toLowerCase():e}function ma(e){if(e.status==="needs_addressing"&&e.actionReason){let t=q_[e.actionReason];if(t){let n=t.description(e);if(e.actionReasons&&e.actionReasons.length>1){let o=e.actionReasons.filter(r=>r!==e.actionReason).map(Zx);o.length>0&&(n+=` (also: ${o.join(", ")})`)}return{displayLabel:t.label,displayDescription:n}}}if(e.status==="waiting_on_maintainer"&&e.waitReason){let t=Mx[e.waitReason];if(t)return{displayLabel:t.label,displayDescription:t.description(e)}}return v("display-utils",`PR ${e.url} has status "${e.status}" but no matching reason (actionReason=${e.actionReason}, waitReason=${e.waitReason})`),e.status==="needs_addressing"?{displayLabel:"[Needs Addressing]",displayDescription:"Action required"}:{displayLabel:"[Waiting on Maintainer]",displayDescription:"Awaiting maintainer action"}}var q_,Mx,ng=b(()=>{"use strict";ye();q_={needs_response:{label:"[Needs Response]",description:e=>e.lastMaintainerComment?`@${e.lastMaintainerComment.author} commented`:"Maintainer awaiting response"},needs_changes:{label:"[Needs Changes]",description:()=>"Review requested changes \u2014 push commits to address"},failing_ci:{label:"[CI Failing]",description:e=>{let t=e.classifiedChecks||[],n=t.filter(i=>i.category==="actionable");if(n.length>0)return`${n.length} check${n.length===1?"":"s"} failed: ${n.map(i=>i.name).join(", ")}`;let o=t.filter(i=>i.category==="infrastructure");if(o.length>0)return`${o.length} check${o.length===1?"":"s"} cancelled/timed out (infrastructure)`;let r=e.failingCheckNames||[];return r.length>0?`${r.length} check${r.length===1?"":"s"} failed`:"One or more CI checks are failing"}},merge_conflict:{label:"[Merge Conflict]",description:()=>"PR has merge conflicts with the base branch"},incomplete_checklist:{label:"[Incomplete Checklist]",description:e=>e.checklistStats?`${e.checklistStats.checked}/${e.checklistStats.total} items checked`:"PR body has unchecked required checkboxes"},ci_not_running:{label:"[CI Not Running]",description:()=>"No CI checks have been triggered"},needs_rebase:{label:"[Needs Rebase]",description:()=>"PR branch is significantly behind upstream"},missing_required_files:{label:"[Missing Files]",description:e=>e.missingRequiredFiles?`Missing: ${e.missingRequiredFiles.join(", ")}`:"Required files are missing"}},Mx={pending_review:{label:"[Waiting on Maintainer]",description:()=>"Awaiting review"},pending_merge:{label:"[Waiting on Maintainer]",description:()=>"Approved and CI passes \u2014 waiting for merge"},changes_addressed:{label:"[Waiting on Maintainer]",description:e=>e.hasUnrespondedComment&&e.lastMaintainerComment?`Changes addressed \u2014 waiting for @${e.lastMaintainerComment.author} to re-review`:"Changes addressed \u2014 awaiting re-review"},ci_blocked:{label:"[CI Blocked]",description:e=>{let t=e.classifiedChecks||[];return t.length>0&&t.every(n=>n.category!=="actionable")?`All failing checks are non-actionable (${[...new Set(t.map(o=>o.category))].join(", ")})`:"CI checks are failing but no action is needed from you"}},stale_ci_failure:{label:"[Stale CI Failure]",description:e=>`CI failing for ${e.daysSinceActivity}+ days \u2014 likely pre-existing or non-actionable`}}});function Dr(){return{repos:new Map,monthlyCounts:{},monthlyOpenedCounts:{},dailyActivityCounts:{}}}function Hx(e){if(typeof e!="object"||e===null)return!1;let t=e;return Array.isArray(t.reposEntries)&&typeof t.monthlyCounts=="object"&&t.monthlyCounts!==null&&typeof t.monthlyOpenedCounts=="object"&&t.monthlyOpenedCounts!==null&&typeof t.dailyActivityCounts=="object"&&t.dailyActivityCounts!==null}async function B_(e,t,n,o,r,i){if(!t)return Dr();let s=Bt(),a=i?`:stars${i.minStars}`:"",c=`pr-counts:v3:${o}:${t}${a}`,u=s.getIfFresh(c,Vx);if(u&&Hx(u))return E(Ee,`Using cached ${o} PR counts for @${t}`),{repos:new Map(u.reposEntries),monthlyCounts:u.monthlyCounts,monthlyOpenedCounts:u.monthlyOpenedCounts,dailyActivityCounts:u.dailyActivityCounts};E(Ee,`Fetching ${o} PR counts for @${t}...`);let l=new Map,d={},p={},g={},w=1,S=0,j;for(;;){let{data:k}=await e.search.issuesAndPullRequests({q:`is:pr ${n} author:${t} -user:${t}`,sort:"updated",order:"desc",per_page:100,page:w});j=k.total_count;for(let L of k.items){let F=xr(L.html_url);if(!F){v(Ee,`Skipping ${o} PR with unparseable URL: ${L.html_url}`);continue}let{owner:H}=F,z=`${H}/${F.repo}`;if(_t(H,t)||i&&Ht(i.knownStarCounts.get(z),i.minStars))continue;let R=r(l,z,L);if(R){let G=R.slice(0,7);d[G]=(d[G]||0)+1;let Y=R.slice(0,10);Y.length===10&&(g[Y]=(g[Y]||0)+1)}if(L.created_at){let G=L.created_at.slice(0,7);p[G]=(p[G]||0)+1;let Y=L.created_at.slice(0,10);Y.length===10&&(g[Y]=(g[Y]||0)+1)}}if(S+=k.items.length,S>=k.total_count||S>=1e3||k.items.length===0||w>=Ar)break;w++}return S<j&&w>=Ar&&v(Ee,`Pagination capped at ${Ar} pages: fetched ${S} of ${j} ${o} PRs. Stats may be incomplete for prolific contributors.`),E(Ee,`Found ${S} ${o} PRs across ${l.size} repos`),s.set(c,"",{reposEntries:Array.from(l.entries()),monthlyCounts:d,monthlyOpenedCounts:p,dailyActivityCounts:g}),{repos:l,monthlyCounts:d,monthlyOpenedCounts:p,dailyActivityCounts:g}}function W_(e,t,n){return B_(e,t,"is:merged","merged",(o,r,i)=>{i.pull_request?.merged_at||v(Ee,`merged_at missing for merged PR ${i.html_url}${i.closed_at?", falling back to closed_at":", no date available"}`);let s=i.pull_request?.merged_at||i.closed_at||"",a=o.get(r);return a?(a.count+=1,s&&s>a.lastMergedAt&&(a.lastMergedAt=s)):o.set(r,{count:1,lastMergedAt:s}),s},n)}function J_(e,t,n){return B_(e,t,"is:closed is:unmerged","closed",(o,r,i)=>(o.set(r,(o.get(r)||0)+1),i.closed_at||""),n)}async function K_(e,t,n,o,r,i){if(!t.githubUsername)return v(Ee,`Skipping recently ${o} PRs fetch: no githubUsername configured. Run /setup-oss to configure.`),[];let s=new Date;s.setDate(s.getDate()-r);let a=s.toISOString().split("T")[0];E(Ee,`Fetching recently ${o} PRs for @${t.githubUsername} (since ${a})...`);let{data:c}=await e.search.issuesAndPullRequests({q:n.replace("{username}",t.githubUsername).replace("{since}",a),sort:"updated",order:"desc",per_page:100}),u=[];for(let l of c.items){let d=Oe(l.html_url);if(!d){v(Ee,`Could not parse GitHub URL from API response: ${l.html_url}`);continue}let p=`${d.owner}/${d.repo}`;_t(d.owner,t.githubUsername)||u.push(i(l,{owner:d.owner,repo:p,number:d.number}))}return E(Ee,`Found ${u.length} recently ${o} PRs`),u}async function X_(e,t,n=7){return K_(e,t,"is:pr is:closed is:unmerged author:{username} closed:>={since}","closed",n,(o,{repo:r,number:i})=>({url:o.html_url,repo:r,number:i,title:o.title,closedAt:o.closed_at||""}))}async function Y_(e,t,n=7){return K_(e,t,"is:pr is:merged author:{username} merged:>={since}","merged",n,(o,{repo:r,number:i})=>{let s=o.pull_request?.merged_at;return s||v(Ee,`merged_at missing for merged PR ${o.html_url}${o.closed_at?", falling back to closed_at":", no date available"}`),{url:o.html_url,repo:r,number:i,title:o.title,mergedAt:s||o.closed_at||""}})}async function Q_(e,t,n){if(!t.githubUsername)return v(Ee,"Skipping merged PRs fetch: no githubUsername configured."),[];let o=n?` merged:>${n}`:"",r=`is:pr is:merged author:${t.githubUsername} -user:${t.githubUsername}${o}`;E(Ee,`Fetching merged PRs${n?` since ${n}`:" (all time)"}...`);let i=[],s=1,a=0;for(;;){let{data:c}=await e.search.issuesAndPullRequests({q:r,sort:"updated",order:"desc",per_page:100,page:s});for(let u of c.items){let l=Oe(u.html_url);if(!l){v(Ee,`Skipping merged PR with unparseable URL: ${u.html_url}`);continue}if(_t(l.owner,t.githubUsername))continue;let d=u.pull_request?.merged_at||u.closed_at||"";if(!d){v(Ee,`Skipping merged PR with no merge date: ${u.html_url}`);continue}i.push({url:u.html_url,title:u.title,mergedAt:d})}if(a+=c.items.length,a>=c.total_count||a>=1e3||c.items.length===0||s>=Ar)break;s++}return E(Ee,`Fetched ${i.length} merged PRs${n?" (incremental)":" (initial)"}`),i}async function e$(e,t,n){if(!t.githubUsername)return v(Ee,"Skipping closed PRs fetch: no githubUsername configured."),[];let o=n?` closed:>${n}`:"",r=`is:pr is:closed is:unmerged author:${t.githubUsername} -user:${t.githubUsername}${o}`;E(Ee,`Fetching closed PRs${n?` since ${n}`:" (all time)"}...`);let i=[],s=1,a=0,c;for(;;){let{data:u}=await e.search.issuesAndPullRequests({q:r,sort:"updated",order:"desc",per_page:100,page:s});c=u.total_count;for(let l of u.items){let d=Oe(l.html_url);if(!d){v(Ee,`Skipping closed PR with unparseable URL: ${l.html_url}`);continue}if(_t(d.owner,t.githubUsername))continue;let p=l.closed_at||"";if(!p){v(Ee,`Skipping closed PR with no close date: ${l.html_url}`);continue}i.push({url:l.html_url,title:l.title,closedAt:p})}if(a+=u.items.length,a>=c||a>=1e3||u.items.length===0||s>=Ar)break;s++}return a<c&&s>=Ar&&v(Ee,`Pagination capped at ${Ar} pages: fetched ${a} of ${c} closed PRs. Oldest PRs may be missing.`),E(Ee,`Fetched ${i.length} closed PRs${n?" (incremental)":" (initial)"}`),i}var Ee,Vx,Ar,ga=b(()=>{"use strict";Ve();At();ye();Pn();Ee="github-stats",Vx=1440*60*1e3,Ar=3});function qx(e,t){return e===!1||t==="dirty"}var fa,Bx,Ur,t$=b(()=>{"use strict";Rn();Tn();Ve();Km();Xm();be();Ao();ye();Pn();tg();N_();rg();H_();ng();ga();ng();tg();rg();Km();fa="pr-monitor",Bx=lr,Ur=class{octokit;stateManager;constructor(t){this.octokit=Ue(t),this.stateManager=W()}async fetchUserOpenPRs(){let t=this.stateManager.getState().config;if(!t.githubUsername)throw new Nt("No GitHub username configured. Run setup first.");E("pr-monitor",`Fetching open PRs for @${t.githubUsername}...`);let n=[],o=1,r=100,i=await this.octokit.search.issuesAndPullRequests({q:`is:pr is:open author:${t.githubUsername}`,sort:"updated",order:"desc",per_page:r,page:1});n.push(...i.data.items);let s=i.data.total_count;E("pr-monitor",`Found ${s} open PRs`);let a=Math.min(Math.ceil(s/r),10);for(;o<a;){o++;let d=await this.octokit.search.issuesAndPullRequests({q:`is:pr is:open author:${t.githubUsername}`,sort:"updated",order:"desc",per_page:r,page:o});n.push(...d.data.items)}let c=[],u=[],l=n.filter(d=>{if(!d.pull_request)return!1;let p=xr(d.html_url);return p?!_t(p.owner,t.githubUsername):(v("pr-monitor",`Skipping PR with unparseable URL: ${d.html_url}`),!1)});return E("pr-monitor",`Filtered to ${l.length} PRs after excluding own repos`),await wm("pr-monitor",`Fetch details for ${l.length} PRs`,async()=>{await sa(l,async d=>{try{E("pr-monitor",`Fetching details for ${d.html_url}`);let p=await this.fetchPRDetails(d.html_url);p&&c.push(p)}catch(p){let g=P(p);v("pr-monitor",`Error fetching ${d.html_url}: ${g}`),u.push({prUrl:d.html_url,error:g})}},Bx)}),c.sort((d,p)=>d.status===p.status?0:d.status==="needs_addressing"?-1:1),{prs:c,failures:u}}async fetchPRDetails(t){let n=Oe(t);if(!n||n.type!=="pull")throw new _e(`Invalid PR URL format: ${t}`);let{owner:o,repo:r,number:i}=n,s=this.stateManager.getState().config,[a,c,u,l]=await Promise.all([this.octokit.pulls.get({owner:o,repo:r,pull_number:i}),wt(V=>this.octokit.issues.listComments({owner:o,repo:r,issue_number:i,per_page:100,page:V})),this.octokit.pulls.listReviews({owner:o,repo:r,pull_number:i}),wt(V=>this.octokit.pulls.listReviewComments({owner:o,repo:r,pull_number:i,per_page:100,page:V})).catch(V=>{let ee=Ze(V);if(ee===429)throw V;if(ee===403){let fe=P(V).toLowerCase();if(fe.includes("rate limit")||fe.includes("abuse detection"))throw V;return v("pr-monitor",`403 fetching review comments for ${o}/${r}#${i}: ${fe}`),[]}return ee===404?E("pr-monitor",`Review comments 404 for ${o}/${r}#${i} (likely no inline comments)`):v("pr-monitor",`Failed to fetch review comments for ${o}/${r}#${i} (status ${ee??"unknown"}): self-reply detection will be skipped`),[]})]),d=a.data,p=u.data,g=G_(p),w=qx(d.mergeable,d.mergeable_state),{hasUnrespondedComment:S,lastMaintainerComment:j}=L_(c,p,l,s.githubUsername),k=eg(this.octokit,o,r,d.head.sha),F=S||g==="changes_requested"?this.octokit.repos.getCommit({owner:o,repo:r,ref:d.head.sha}).then(V=>({date:V.data.commit.author?.date,author:V.data.author?.login})).catch(V=>{let ee=Ze(V);if(ee===429)throw V;if(ee===403){let fe=P(V).toLowerCase();if(fe.includes("rate limit")||fe.includes("abuse detection"))throw V;v("pr-monitor",`403 fetching commit date for ${o}/${r}@${d.head.sha.slice(0,7)}: ${P(V)}`);return}v("pr-monitor",`Failed to fetch commit date for ${o}/${r}@${d.head.sha.slice(0,7)}: ${P(V)}`)}):Promise.resolve(void 0),[{status:H,failingCheckNames:z,failingCheckConclusions:R},G]=await Promise.all([k,F]),Y=G?.date,J=G?.author,{hasIncompleteChecklist:$e,checklistStats:lt}=Z_(d.body||""),le=V_(j?.body,g),we=ft(new Date(d.updated_at),new Date),qe=F_(p),dt=da(z,R),Et=H==="failing"&&dt.some(V=>V.category==="actionable"),{status:Qt,actionReason:je,waitReason:Xe,stalenessTier:Be,actionReasons:K}=Jm({ciStatus:H,hasMergeConflict:w,hasUnrespondedComment:S,hasIncompleteChecklist:$e,reviewDecision:g,daysSinceActivity:we,dormantThreshold:s.dormantThresholdDays,approachingThreshold:s.approachingDormantDays,latestCommitDate:Y,latestCommitAuthor:J,contributorUsername:s.githubUsername,lastMaintainerCommentDate:j?.createdAt,latestChangesRequestedDate:qe,hasActionableCIFailure:Et});return this.buildFetchedPR({id:d.id,url:t,repo:`${o}/${r}`,number:i,title:d.title,status:Qt,actionReason:je,waitReason:Xe,stalenessTier:Be,actionReasons:K,createdAt:d.created_at,updatedAt:d.updated_at,daysSinceActivity:we,ciStatus:H,failingCheckNames:z,classifiedChecks:dt,hasMergeConflict:w,reviewDecision:g,hasUnrespondedComment:S,lastMaintainerComment:j,latestCommitDate:Y,hasIncompleteChecklist:$e,checklistStats:lt,maintainerActionHints:le})}buildFetchedPR(t){let n={...t,displayLabel:"",displayDescription:""},{displayLabel:o,displayDescription:r}=ma(n);return n.displayLabel=o,n.displayDescription=r,n}async fetchUserMergedPRCounts(t){let n=this.stateManager.getState().config;return W_(this.octokit,n.githubUsername,t)}async fetchUserClosedPRCounts(t){let n=this.stateManager.getState().config;return J_(this.octokit,n.githubUsername,t)}async fetchRepoMetadata(t){if(t.length===0)return new Map;E(fa,`Fetching repo metadata for ${t.length} repos...`);let n=new Map,o=Bt(),r=[...new Set(t)],i=10;for(let s=0;s<r.length;s+=i){let a=r.slice(s,s+i),c=await Promise.allSettled(a.map(async l=>{let d=l.split("/");if(d.length!==2||!d[0]||!d[1])throw new _e(`Malformed repo identifier: "${l}"`);let[p,g]=d,w=`/repos/${p}/${g}`,S=await Do(o,w,k=>this.octokit.repos.get({owner:p,repo:g,headers:k})),j={stars:S.stargazers_count,language:S.language??null};return{repo:l,metadata:j}})),u=0;for(let l=0;l<c.length;l++){let d=c[l];d.status==="fulfilled"?n.set(d.value.repo,d.value.metadata):(u++,v(fa,`Failed to fetch metadata for ${a[l]}: ${P(d.reason)}`))}if(u===a.length&&a.length>0){let l=r.length-s-i;l>0&&v(fa,`Entire chunk failed, aborting remaining ${l} repos`);break}}return E(fa,`Fetched repo metadata for ${n.size}/${t.length} repos`),n}async fetchRecentlyClosedPRs(t=7){let n=this.stateManager.getState().config;return X_(this.octokit,n,t)}async fetchRecentlyMergedPRs(t=7){let n=this.stateManager.getState().config;return Y_(this.octokit,n,t)}generateDigest(t,n=[],o=[]){let r=new Date().toISOString(),i=t.filter(c=>c.status==="needs_addressing"),s=t.filter(c=>c.status==="waiting_on_maintainer"),a=this.stateManager.getStats();return{generatedAt:r,openPRs:t,needsAddressingPRs:i,waitingOnMaintainerPRs:s,recentlyClosedPRs:n,recentlyMergedPRs:o,shelvedPRs:[],autoUnshelvedPRs:[],summary:{totalActivePRs:t.length,totalNeedingAttention:i.length,totalMergedAllTime:a.mergedPRs,mergeRate:parseFloat(a.mergeRate)}}}async updateRepoScoreFromObservedPR(t,n){n?this.stateManager.incrementMergedCount(t):this.stateManager.incrementClosedCount(t)}}});function og(e){if(!e.labels||!Array.isArray(e.labels)||e.labels.length===0)return!1;let n=e.labels.map(o=>(typeof o=="string"?o:o.name||"").toLowerCase()).filter(o=>o.length>0);return n.length===0?!1:n.every(o=>r$.has(o))}function Jx(e){return!e.labels||!Array.isArray(e.labels)?!1:e.labels.map(o=>(typeof o=="string"?o:o.name||"").toLowerCase()).filter(o=>Wx.has(o)).length>=5}function Kx(e){return e?/^.+\s+(question|fact|point|item|task|entry|post|challenge|exercise|example|problem|tip|recipe|snippet)\s+#?\d+$/i.test(e):!1}function n$(e){let t=new Set,n=new Map;for(let o of e){let r=o.repository_url.split("/").slice(-2).join("/");if(Jx(o)){t.add(r);continue}o.title&&Kx(o.title)&&n.set(r,(n.get(r)||0)+1)}for(let[o,r]of n)r>=3&&t.add(o);return t}function ig(e,t){let n=new Map,o=[];for(let r of e){let i=n.get(r.issue.repo)||0;i<t&&(o.push(r),n.set(r.issue.repo,i+1))}return o}var r$,Wx,ha=b(()=>{"use strict";r$=new Set(["documentation","docs","typo","spelling"]);Wx=new Set(["good first issue","hacktoberfest","easy","up-for-grabs","first-timers-only","beginner-friendly","beginner","starter","newbie","low-hanging-fruit","community"])});function o$(e,t){let n=0;return e>=5e3?n+=8:e>=500?n+=5:e>=50&&(n+=3),t>=500?n+=4:t>=50&&(n+=2),n}function i$(e){let t=50;e.repoScore!==null&&(t+=e.repoScore*2),t+=e.repoQualityBonus??0,e.mergedPRCount>0&&(t+=15),e.clearRequirements&&(t+=15);let n=new Date(e.issueUpdatedAt),o=ft(n);return o<=14?t+=15:o<=30&&(t+=Math.round(15*(1-(o-14)/16))),e.hasContributionGuidelines&&(t+=10),e.orgHasMergedPRs&&(t+=5),e.matchesPreferredCategory&&(t+=5),e.hasExistingPR&&(t-=30),e.isClaimed&&(t-=20),e.closedWithoutMergeCount>0&&e.mergedPRCount===0&&(t-=15),Math.max(0,Math.min(100,t))}var s$=b(()=>{"use strict";Ve()});function a$(e,t){if(t.length===0)return!1;let n=e.split("/")[0]?.toLowerCase();if(!n)return!1;for(let o of t){let r=Yx[o];if(r&&r.some(i=>i.toLowerCase()===n))return!0}return!1}function c$(e){let t=new Set;for(let n of e){let o=Xx[n];if(o)for(let r of o)t.add(r)}return[...t]}var Xx,Yx,sg=b(()=>{"use strict";Xx={nonprofit:["nonprofit","social-good","humanitarian","charity","social-impact","civic-tech"],devtools:["developer-tools","devtools","cli","sdk","linter","formatter","build-tool"],infrastructure:["infrastructure","cloud","kubernetes","docker","devops","monitoring","observability"],"web-frameworks":["web-framework","frontend","backend","fullstack","nextjs","react","vue"],"data-ml":["machine-learning","data-science","deep-learning","nlp","data-pipeline","analytics"],education:["education","learning","tutorial","courseware","edtech","teaching"]},Yx={nonprofit:["code-for-america","opengovfoundation","ushahidi","hotosm","openfn","democracyearth"],devtools:["eslint","prettier","vitejs","biomejs","oxc-project","ast-grep","turbot"],infrastructure:["kubernetes","hashicorp","grafana","prometheus","open-telemetry","envoyproxy","cncf"],"web-frameworks":["vercel","remix-run","sveltejs","nuxt","astro","redwoodjs","blitz-js"],"data-ml":["huggingface","mlflow","apache","dbt-labs","dagster-io","prefecthq","langchain-ai"],education:["freeCodeCamp","TheOdinProject","exercism","codecademy","oppia","Khan"]}});async function u$(e,t,n,o){try{let{data:r}=await e.search.issuesAndPullRequests({q:`repo:${t}/${n} is:pr ${o}`,per_page:5}),s=(await wt(a=>e.issues.listEventsForTimeline({owner:t,repo:n,issue_number:o,per_page:100,page:a}))).filter(a=>{let c=a;return c.event==="cross-referenced"&&c.source?.issue?.pull_request});return{passed:r.total_count===0&&s.length===0}}catch(r){let i=P(r);return v(ag,`Failed to check for existing PRs on ${t}/${n}#${o}: ${i}. Assuming no existing PR.`),{passed:!0,inconclusive:!0,reason:i}}}async function l$(e,t,n){try{let{data:o}=await e.search.issuesAndPullRequests({q:`repo:${t}/${n} is:pr is:merged author:@me`,per_page:1});return o.total_count}catch(o){let r=P(o);return v(ag,`Could not check merged PRs in ${t}/${n}: ${r}. Defaulting to 0.`),0}}async function d$(e,t,n,o,r){if(r===0)return{passed:!0};try{let s=(await e.paginate(e.issues.listComments,{owner:t,repo:n,issue_number:o,per_page:100},a=>a.data)).slice(-100);for(let a of s){let c=(a.body||"").toLowerCase();if(Qx.some(u=>c.includes(u)))return{passed:!1}}return{passed:!0}}catch(i){let s=P(i);return v(ag,`Failed to check claim status on ${t}/${n}#${o}: ${s}. Assuming not claimed.`),{passed:!0,inconclusive:!0,reason:s}}}function p$(e){if(!e||e.length<50)return!1;let t=/\d\.|[-*]\s/.test(e),n=/```/.test(e),o=/expect|should|must|want/i.test(e);return[t,n,o,e.length>200].filter(Boolean).length>=2}var ag,Qx,m$=b(()=>{"use strict";Ao();be();ye();ag="issue-eligibility",Qx=["i'm working on this","i am working on this","i'll take this","i will take this","working on it","i'd like to work on","i would like to work on","can i work on","may i work on","assigned to me","i'm on it","i'll submit a pr","i will submit a pr","working on a fix","working on a pr"]});function f$(){let e=Date.now();for(let[t,n]of Wt.entries())e-n.fetchedAt>h$&&Wt.delete(t);if(Wt.size>g$){let n=Array.from(Wt.entries()).sort((o,r)=>o[1].fetchedAt-r[1].fetchedAt).slice(0,Wt.size-g$);for(let[o]of n)Wt.delete(o)}}async function v$(e,t,n){let o=Bt(),r=`health:${t}/${n}`;try{return await ua(o,r,eR,async()=>{let i=`/repos/${t}/${n}`,s=await Do(o,i,p=>e.repos.get({owner:t,repo:n,headers:p})),{data:a}=await e.repos.listCommits({owner:t,repo:n,per_page:1}),u=a[0]?.commit?.author?.date||s.pushed_at,l=ft(new Date(u)),d="unknown";try{let{data:p}=await e.actions.listRepoWorkflows({owner:t,repo:n,per_page:1});p.total_count>0&&(d="passing")}catch(p){let g=P(p);v(cg,`Failed to check CI status for ${t}/${n}: ${g}. Defaulting to unknown.`)}return{repo:`${t}/${n}`,lastCommitAt:u,daysSinceLastCommit:l,openIssuesCount:s.open_issues_count,avgIssueResponseDays:0,ciStatus:d,isActive:l<30,stargazersCount:s.stargazers_count,forksCount:s.forks_count}})}catch(i){let s=P(i);return v(cg,`Error checking project health for ${t}/${n}: ${s}`),{repo:`${t}/${n}`,lastCommitAt:"",daysSinceLastCommit:999,openIssuesCount:0,avgIssueResponseDays:0,ciStatus:"unknown",isActive:!1,checkFailed:!0,failureReason:s}}}async function b$(e,t,n){let o=`${t}/${n}`,r=Wt.get(o);if(r&&Date.now()-r.fetchedAt<h$)return r.guidelines;let i=["CONTRIBUTING.md",".github/CONTRIBUTING.md","docs/CONTRIBUTING.md","contributing.md"],s=await Promise.allSettled(i.map(a=>e.repos.getContent({owner:t,repo:n,path:a}).then(({data:c})=>"content"in c?Buffer.from(c.content,"base64").toString("utf-8"):null)));for(let a=0;a<s.length;a++){let c=s[a];if(c.status==="fulfilled"&&c.value){let u=tR(c.value);return Wt.set(o,{guidelines:u,fetchedAt:Date.now()}),f$(),u}if(c.status==="rejected"){let u=c.reason instanceof Error?c.reason.message:String(c.reason);!u.includes("404")&&!u.includes("Not Found")&&v(cg,`Unexpected error fetching ${i[a]} from ${t}/${n}: ${u}`)}}Wt.set(o,{guidelines:void 0,fetchedAt:Date.now()}),f$()}function tR(e){let t={rawContent:e},n=e.toLowerCase();if(n.includes("branch")){let o=e.match(/branch[^\n]*(?:named?|format|convention)[^\n]*[`"]([^`"]+)[`"]/i);o&&(t.branchNamingConvention=o[1])}if(n.includes("conventional commit"))t.commitMessageFormat="conventional commits";else if(n.includes("commit message")){let o=e.match(/commit message[^\n]*[`"]([^`"]+)[`"]/i);o&&(t.commitMessageFormat=o[1])}return n.includes("jest")?t.testFramework="Jest":n.includes("rspec")?t.testFramework="RSpec":n.includes("pytest")?t.testFramework="pytest":n.includes("mocha")&&(t.testFramework="Mocha"),n.includes("eslint")?t.linter="ESLint":n.includes("rubocop")?t.linter="RuboCop":n.includes("prettier")&&(t.formatter="Prettier"),(n.includes("cla")||n.includes("contributor license agreement"))&&(t.claRequired=!0),t}var cg,Wt,h$,eR,g$,y$=b(()=>{"use strict";Ve();be();ye();Pn();cg="repo-health",Wt=new Map,h$=3600*1e3,eR=14400*1e3,g$=100});var _$,rR,va,$$=b(()=>{"use strict";Ve();be();ye();s$();sg();m$();y$();_$="issue-vetting",rR=lr,va=class{octokit;stateManager;constructor(t,n){this.octokit=t,this.stateManager=n}async vetIssue(t){let n=Oe(t);if(!n||n.type!=="issues")throw new _e(`Invalid issue URL: ${t}`);let{owner:o,repo:r,number:i}=n,s=`${o}/${r}`,{data:a}=await this.octokit.issues.get({owner:o,repo:r,issue_number:i}),[c,u,l,d,p]=await Promise.all([u$(this.octokit,o,r,i),d$(this.octokit,o,r,i,a.comments),v$(this.octokit,o,r),b$(this.octokit,o,r),l$(this.octokit,o,r)]),g=c.passed,w=u.passed,S=p$(a.body||""),j=l.checkFailed?!0:l.isActive,k={passedAllChecks:g&&w&&j&&S,checks:{noExistingPR:g,notClaimed:w,projectActive:j,clearRequirements:S,contributionGuidelinesFound:!!d},contributionGuidelines:d,notes:[]};g||k.notes.push("Existing PR found for this issue"),w||k.notes.push("Issue appears to be claimed by someone"),c.inconclusive&&k.notes.push(`Could not verify absence of existing PRs: ${c.reason||"API error"}`),u.inconclusive&&k.notes.push(`Could not verify claim status: ${u.reason||"API error"}`),l.checkFailed?k.notes.push(`Could not verify project activity: ${l.failureReason||"API error"}`):l.isActive||k.notes.push("Project may be inactive"),S||k.notes.push("Issue requirements are unclear"),d||k.notes.push("No CONTRIBUTING.md found");let L={id:a.id,url:t,repo:s,number:i,title:a.title,status:"candidate",labels:a.labels.map(Be=>typeof Be=="string"?Be:Be.name||""),createdAt:a.created_at,updatedAt:a.updated_at,vetted:!0,vettingResult:k},F=[],H=[];g||F.push("Has existing PR"),w||F.push("Already claimed"),!l.isActive&&!l.checkFailed&&F.push("Inactive project"),S||F.push("Unclear requirements"),g&&H.push("No existing PR"),w&&H.push("Not claimed"),l.isActive&&!l.checkFailed&&H.push("Active project"),S&&H.push("Clear requirements"),d&&H.push("Has contribution guidelines");let z=this.stateManager.getState().config,R=this.stateManager.getRepoScore(s),G=R&&R.mergedPRCount>0?R.mergedPRCount:p;G>0?H.push(`Trusted project (${G} PR${G>1?"s":""} merged)`):z.trustedProjects.includes(s)&&H.push("Trusted project (previous PR merged)"),R&&(R.closedWithoutMergeCount>0&&G===0?F.push("User has rejected PR(s) in this repo with no successful merges"):R.closedWithoutMergeCount>0&&G>0&&k.notes.push(`Mixed history: ${G} merged, ${R.closedWithoutMergeCount} closed without merge`));let Y=s.split("/")[0],J=!1;Y&&s.includes("/")&&(J=Object.values(this.stateManager.getState().repoScores).some(Be=>Be.repo&&Be.mergedPRCount>0&&Be.repo.startsWith(Y+"/")&&Be.repo!==s)),J&&H.push(`Org affinity (merged PRs in other ${Y} repos)`);let $e=z.projectCategories??[],lt=a$(s,$e);lt&&H.push("Matches preferred project category");let le;k.passedAllChecks?le="approve":F.length>2?le="skip":le="needs_review";let we=l.checkFailed||c.inconclusive||u.inconclusive;le==="approve"&&we&&(le="needs_review",k.notes.push("Recommendation downgraded: one or more checks were inconclusive"));let qe=o$(l.stargazersCount??0,l.forksCount??0);l.checkFailed&&qe===0&&k.notes.push("Repo quality bonus unavailable: could not fetch star/fork counts due to API error");let dt=this.getRepoScore(s),Et=i$({repoScore:dt,hasExistingPR:!g,isClaimed:!w,clearRequirements:S,hasContributionGuidelines:!!d,issueUpdatedAt:a.updated_at,closedWithoutMergeCount:R?.closedWithoutMergeCount??0,mergedPRCount:G,orgHasMergedPRs:J,repoQualityBonus:qe,matchesPreferredCategory:lt}),Qt=this.stateManager.getStarredRepos(),je=z.preferredOrgs??[],Xe="normal";return G>0?Xe="merged_pr":je.some(Be=>Be.toLowerCase()===Y?.toLowerCase())?Xe="preferred_org":Qt.includes(s)&&(Xe="starred"),{issue:L,vettingResult:k,projectHealth:l,recommendation:le,reasonsToSkip:F,reasonsToApprove:H,viabilityScore:Et,searchPriority:Xe}}async vetIssuesParallel(t,n,o){let r=[],i=new Map,s=0,a=0,c=0;for(let l of t){if(r.length>=n)break;c++;let d=this.vetIssue(l).then(p=>{r.length<n&&(o&&(p.searchPriority=o),r.push(p))}).catch(p=>{s++,Mt(p)&&a++,v(_$,`Error vetting issue ${l}:`,P(p))}).finally(()=>i.delete(l));i.set(l,d),i.size>=rR&&await Promise.race(i.values())}await Promise.allSettled(i.values());let u=s===c&&c>0;return u&&v(_$,`All ${c} issue(s) failed vetting. This may indicate a systemic issue (rate limit, auth, network).`),{candidates:r.slice(0,n),allFailed:u,rateLimitHit:a>0}}getRepoScore(t){return this.stateManager.getState().repoScores?.[t]?.score??null}}});function ug(e){return e.length===0?"":e.length===1?`label:"${e[0]}"`:`(${e.map(t=>`label:"${t}"`).join(" OR ")})`}function w$(e,t){let n=new Set;for(let o of e)for(let r of xo[o]??[])n.add(r);for(let o of t)n.add(o);return[...n]}function S$(e){let t=[],n=Math.max(...e.map(o=>o.length),0);for(let o=0;o<n;o++)for(let r of e)o<r.length&&t.push(r[o]);return t}function nR(e,t){let n=[];for(let o=0;o<e.length;o+=t)n.push(e.slice(o,o+t));return n}async function jo(e,t){let n=`search:${t.q}:${t.sort}:${t.order}:${t.per_page}`;return ua(Bt(),n,oR,async()=>{let{data:o}=await e.search.issuesAndPullRequests(t);return o})}async function lg(e,t,n,o,r,i,s){let a=n$(t);if(a.size>0){let w=t.filter(S=>a.has(S.repository_url.split("/").slice(-2).join("/"))).length;E(zo,`[SPAM_FILTER] Filtered ${w} issues from ${a.size} label-farming repos: ${[...a].join(", ")}`)}let c=n(t).filter(w=>{let S=w.repository_url.split("/").slice(-2).join("/");return a.has(S)?!1:o.every(j=>!j.has(S))}).slice(0,r*2);if(c.length===0)return E(zo,`[${s}] All ${t.length} items filtered before vetting`),{candidates:[],allVetFailed:!1,rateLimitHit:!1};let{candidates:u,allFailed:l,rateLimitHit:d}=await e.vetIssuesParallel(c.map(w=>w.html_url),r,"normal"),p=u.filter(w=>w.projectHealth.checkFailed?!0:(w.projectHealth.stargazersCount??0)>=i),g=u.length-p.length;return g>0&&E(zo,`[STAR_FILTER] Filtered ${g} ${s} candidates below ${i} stars`),{candidates:p,allVetFailed:l,rateLimitHit:d}}async function ba(e,t,n,o,r,i,s){let a=[],u=nR(n,5),l=0,d=0;for(let w of u){if(a.length>=r)break;try{let S=w.map(L=>`repo:${L}`).join(" OR "),j=`${o} (${S})`,k=await jo(e,{q:j,sort:"created",order:"desc",per_page:Math.min(30,(r-a.length)*3)});if(k.items.length>0){let L=s(k.items),F=r-a.length,{candidates:H}=await t.vetIssuesParallel(L.slice(0,F*2).map(z=>z.html_url),F,i);a.push(...H)}}catch(S){l++,Mt(S)&&d++;let j=w.join(", ");v(zo,`Error searching issues in batch [${j}]:`,P(S))}}let p=l===u.length&&u.length>0,g=d>0;return p&&v(zo,`All ${u.length} batch(es) failed for ${i} phase. This may indicate a systemic issue (rate limit, auth, network).`),{candidates:a,allBatchesFailed:p,rateLimitHit:g}}var zo,oR,k$=b(()=>{"use strict";At();be();ye();Pn();ha();zo="search-phases";oR=900*1e3});var T$,E$,ge,zr,x$=b(()=>{"use strict";T$=re(require("fs"),1),E$=re(require("path"),1);Rn();Tn();Ve();At();be();ye();ha();$$();sg();k$();ge="issue-discovery",zr=class{octokit;stateManager;githubToken;vetter;rateLimitWarning=null;constructor(t){this.githubToken=t,this.octokit=Ue(t),this.stateManager=W(),this.vetter=new va(this.octokit,this.stateManager)}async fetchStarredRepos(){Ce(ge,"Fetching starred repositories...");let t=[];try{let n=this.octokit.paginate.iterator(this.octokit.activity.listReposStarredByAuthenticatedUser,{per_page:100}),o=0;for await(let{data:r}of n){for(let i of r){let s;"full_name"in i&&typeof i.full_name=="string"?s=i.full_name:"repo"in i&&i.repo&&typeof i.repo=="object"&&"full_name"in i.repo&&(s=i.repo.full_name),s&&t.push(s)}if(o++,o>=5){Ce(ge,"Reached pagination limit for starred repos (500)");break}}return Ce(ge,`Fetched ${t.length} starred repositories`),this.stateManager.setStarredRepos(t),t}catch(n){let o=this.stateManager.getStarredRepos(),r=P(n);return v(ge,"Error fetching starred repos:",r),o.length===0?v(ge,`Failed to fetch starred repositories from GitHub API. No cached repos available. Error: ${r}
84
+ `);let u={},l=new o.Events(u);return u.on("secondary-limit",c.onSecondaryRateLimit),u.on("rate-limit",c.onRateLimit),u.on("error",d=>e.log.warn("Error in throttling-plugin limit handler",d)),c.retryLimiter.on("failed",async function(d,p){let[g,w,S]=p.args,{pathname:j}=new URL(S.url,"http://github.test");if(!(j.startsWith("/graphql")&&d.status!==401||d.status===403||d.status===429))return;let L=~~w.retryCount;w.retryCount=L,S.request.retryCount=L;let{wantRetry:F,retryAfter:H=0}=await(async function(){if(/\bsecondary rate\b/i.test(d.message)){let z=Number(d.response.headers["retry-after"])||g.fallbackSecondaryRateRetryAfter;return{wantRetry:await l.trigger("secondary-limit",z,S,e,L),retryAfter:z}}if(d.response.headers!=null&&d.response.headers["x-ratelimit-remaining"]==="0"||(d.response.data?.errors??[]).some(z=>z.type==="RATE_LIMITED")){let z=new Date(~~d.response.headers["x-ratelimit-reset"]*1e3).getTime(),R=Math.max(Math.ceil((z-Date.now())/1e3)+1,0);return{wantRetry:await l.trigger("rate-limit",R,S,e,L),retryAfter:R}}return{}})();if(F)return w.retryCount++,H*g.retryAfterBaseValue}),e.hook.wrap("request",yx.bind(null,c)),{}}var x_,bx,Bm,wx,E_,R_,Ir,kx,P_=b(()=>{x_=re(T_(),1),bx="0.0.0-development",Bm=()=>Promise.resolve();wx=["/orgs/{org}/invitations","/orgs/{org}/invitations/{invitation_id}","/orgs/{org}/teams/{team_slug}/discussions","/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments","/repos/{owner}/{repo}/collaborators/{username}","/repos/{owner}/{repo}/commits/{commit_sha}/comments","/repos/{owner}/{repo}/issues","/repos/{owner}/{repo}/issues/{issue_number}/comments","/repos/{owner}/{repo}/issues/{issue_number}/sub_issue","/repos/{owner}/{repo}/issues/{issue_number}/sub_issues/priority","/repos/{owner}/{repo}/pulls","/repos/{owner}/{repo}/pulls/{pull_number}/comments","/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies","/repos/{owner}/{repo}/pulls/{pull_number}/merge","/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers","/repos/{owner}/{repo}/pulls/{pull_number}/reviews","/repos/{owner}/{repo}/releases","/teams/{team_id}/discussions","/teams/{team_id}/discussions/{discussion_number}/comments"];E_=Sx(wx),R_=E_.test.bind(E_),Ir={},kx=function(e,t){Ir.global=new e.Group({id:"octokit-global",maxConcurrent:10,...t}),Ir.auth=new e.Group({id:"octokit-auth",maxConcurrent:1,...t}),Ir.search=new e.Group({id:"octokit-search",maxConcurrent:1,minTime:2e3,...t}),Ir.write=new e.Group({id:"octokit-write",maxConcurrent:1,minTime:1e3,...t}),Ir.notifications=new e.Group({id:"octokit-notifications",maxConcurrent:1,minTime:3e3,...t})};ra.VERSION=bx;ra.triggersNotification=R_});function ia(e){return e.toLocaleTimeString("en-US",{hour12:!1})}function Ex(){return{onRateLimit:(e,t,n,o)=>{let r=t,i=new Date(Date.now()+e*1e3);return o<2?(v(na,`Rate limit hit (retry ${o+1}/2, waiting ${e}s, resets at ${ia(i)}) \u2014 ${r.method} ${r.url}`),!0):(v(na,`Rate limit exceeded, not retrying \u2014 ${r.method} ${r.url} (resets at ${ia(i)})`),!1)},onSecondaryRateLimit:(e,t,n,o)=>{let r=t,i=new Date(Date.now()+e*1e3);return o<1?(v(na,`Secondary rate limit hit (retry ${o+1}/1, waiting ${e}s, resets at ${ia(i)}) \u2014 ${r.method} ${r.url}`),!0):(v(na,`Secondary rate limit exceeded, not retrying \u2014 ${r.method} ${r.url} (resets at ${ia(i)})`),!1)}}}function Ue(e){if(oa&&C_===e)return oa;let t=Ex();return oa=new Tx({auth:e,throttle:t}),C_=e,oa}async function Wm(e){let t=Ue(e),{data:n}=await t.rateLimit.get(),o=n.resources.search;return{remaining:o.remaining,limit:o.limit,resetAt:new Date(o.reset*1e3).toISOString()}}var na,Tx,oa,C_,Rn=b(()=>{"use strict";k_();P_();ye();na="github",Tx=S_.plugin(ra),oa=null,C_=null});function Px(e,t){if(!e||!t)return!0;let n=e.toLowerCase();return xx.has(n)?!0:n===t.toLowerCase()}function Cx(e,t){let n=new Date(e).getTime(),o=new Date(t).getTime();return Number.isNaN(n)||Number.isNaN(o)?e>t:n-o>=Rx}function O_(e){let{latestCommitDate:t,latestCommitAuthor:n,contributorUsername:o}=e;if(t)return Px(n,o)?t:void 0}function I_(e,t,n){return!(!e||!t||!Cx(e,t)||n&&e<n)}function A_(e,t){return!e||!t?!1:e>=t}function Ox(e){let{ciStatus:t,hasMergeConflict:n,hasUnrespondedComment:o,hasIncompleteChecklist:r,reviewDecision:i,lastMaintainerCommentDate:s,latestChangesRequestedDate:a,hasActionableCIFailure:c=!0}=e,u=O_(e),l=[];return o&&!I_(u,s,a)&&l.push("needs_response"),i==="changes_requested"&&a&&!A_(u,a)&&l.push("needs_changes"),t==="failing"&&c&&l.push("failing_ci"),n&&l.push("merge_conflict"),r&&l.push("incomplete_checklist"),l.length>0?l:void 0}function Jm(e){let t=Ix(e),n=Ox(e);return n?{...t,actionReasons:n}:t}function Ix(e){let{ciStatus:t,hasMergeConflict:n,hasUnrespondedComment:o,hasIncompleteChecklist:r,reviewDecision:i,daysSinceActivity:s,dormantThreshold:a,approachingThreshold:c,lastMaintainerCommentDate:u,latestChangesRequestedDate:l,hasActionableCIFailure:d=!0}=e,p="active";s>=a?p="dormant":s>=c&&(p="approaching_dormant");let g=O_(e);return o?I_(g,u,l)?t==="failing"&&d?{status:"needs_addressing",actionReason:"failing_ci",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"changes_addressed",stalenessTier:p}:{status:"needs_addressing",actionReason:"needs_response",stalenessTier:p}:i==="changes_requested"&&l?A_(g,l)?t==="failing"&&d?{status:"needs_addressing",actionReason:"failing_ci",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"changes_addressed",stalenessTier:p}:{status:"needs_addressing",actionReason:"needs_changes",stalenessTier:p}:t==="failing"?d?s>=5?{status:"waiting_on_maintainer",waitReason:"stale_ci_failure",stalenessTier:p}:{status:"needs_addressing",actionReason:"failing_ci",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"ci_blocked",stalenessTier:p}:n?{status:"needs_addressing",actionReason:"merge_conflict",stalenessTier:p}:r?{status:"needs_addressing",actionReason:"incomplete_checklist",stalenessTier:p}:i==="approved"&&(t==="passing"||t==="unknown")?{status:"waiting_on_maintainer",waitReason:"pending_merge",stalenessTier:p}:{status:"waiting_on_maintainer",waitReason:"pending_review",stalenessTier:p}}var xx,Rx,Km=b(()=>{"use strict";xx=new Set(["autofix-ci[bot]","prettier-ci[bot]","pre-commit-ci[bot]"]),Rx=120*1e3});async function sa(e,t,n){let o=0,r=!1,i=async()=>{for(;o<e.length&&!r;){let a=e[o++];try{await t(a)}catch(c){throw r=!0,c}}},s=Math.min(n,e.length);await Promise.all(Array.from({length:s},()=>i()))}var Xm=b(()=>{"use strict"});async function wt(e,t=100,n=10){let o=[];for(let r=1;r<=n;r++){let{data:i}=await e(r);if(o.push(...i),i.length<t)break}return o}var Ao=b(()=>{"use strict"});function Bt(){return Ym||(Ym=new ca),Ym}async function Do(e,t,n){let o=e.getInflight(t);if(o)return E(mr,`Dedup hit for ${t}`),await o;let i=(async()=>{let a={},c=e.get(t);c&&(a["if-none-match"]=c.etag);try{let u=await n(a),l=u.headers?.etag;return l&&e.set(t,l,u.data),u.data}catch(u){if(Dx(u)){let l=e.get(t);if(l)return E(mr,`304 cache hit for ${t}`),l.body}throw u}})(),s=e.setInflight(t,i);try{return await i}finally{s()}}async function ua(e,t,n,o){let r=e.getIfFresh(t,n);if(r)return E(mr,`Time-based cache hit for ${t}`),r;let i=await o();return e.set(t,"",i),i}function Dx(e){return Ze(e)===304}var ct,aa,D_,mr,Ax,ca,Ym,Pn=b(()=>{"use strict";ct=re(require("fs"),1),aa=re(require("path"),1),D_=re(require("crypto"),1);Ve();ye();be();mr="http-cache",Ax=1440*60*1e3,ca=class{cacheDir;inflightRequests=new Map;constructor(t){this.cacheDir=t??Zs()}keyFor(t){return D_.createHash("sha256").update(t).digest("hex")}pathFor(t){return aa.join(this.cacheDir,`${this.keyFor(t)}.json`)}getIfFresh(t,n){let o=this.get(t);if(!o)return null;let r=Date.now()-new Date(o.cachedAt).getTime();return!Number.isFinite(r)||r<0||r>n?null:o.body}get(t){let n=this.pathFor(t);try{let o=ct.readFileSync(n,"utf-8"),r=JSON.parse(o);return r.url!==t?(E(mr,`Cache collision detected for ${t}, ignoring`),null):r}catch{return null}}set(t,n,o){let r={etag:n,url:t,body:o,cachedAt:new Date().toISOString()};try{ct.writeFileSync(this.pathFor(t),JSON.stringify(r),{encoding:"utf-8",mode:384}),E(mr,`Cached response for ${t}`)}catch(i){E(mr,`Failed to write cache for ${t}`,i)}}hasInflight(t){return this.inflightRequests.has(t)}getInflight(t){return this.inflightRequests.get(t)}setInflight(t,n){return this.inflightRequests.set(t,n),()=>{this.inflightRequests.delete(t)}}evictStale(t=Ax){let n=0;try{let o=ct.readdirSync(this.cacheDir),r=Date.now();for(let i of o){if(!i.endsWith(".json"))continue;let s=aa.join(this.cacheDir,i);try{let a=ct.readFileSync(s,"utf-8"),c=JSON.parse(a);r-new Date(c.cachedAt).getTime()>t&&(ct.unlinkSync(s),n++)}catch{try{ct.unlinkSync(s),n++}catch{}}}}catch{}return n>0&&E(mr,`Evicted ${n} stale cache entries`),n}clear(){try{let t=ct.readdirSync(this.cacheDir);for(let n of t)n.endsWith(".json")&&ct.unlinkSync(aa.join(this.cacheDir,n));E(mr,"Cache cleared")}catch{}}size(){try{return ct.readdirSync(this.cacheDir).filter(t=>t.endsWith(".json")).length}catch{return 0}}},Ym=null});function la(){return{status:"unknown",failingCheckNames:[],failingCheckConclusions:new Map}}function Qm(e,t,n){if(n==="cancelled"||n==="timed_out")return"infrastructure";if(n==="action_required")return"auth_gate";let o=e.toLowerCase();if(z_.some(r=>r.test(o)))return"auth_gate";if(U_.some(r=>r.test(o)))return"fork_limitation";if(j_.some(r=>r.test(o)))return"infrastructure";if(t){let r=t.toLowerCase();if(z_.some(i=>i.test(r)))return"auth_gate";if(U_.some(i=>i.test(r)))return"fork_limitation";if(j_.some(i=>i.test(r)))return"infrastructure"}return"actionable"}function da(e,t){return e.map(n=>{let o=t?.get(n);return{name:n,category:Qm(n,void 0,o),conclusion:o}})}function Ux(e){let t=!1,n=!1,o=!1,r=[],i=new Map;for(let s of e)s.conclusion==="failure"||s.conclusion==="cancelled"||s.conclusion==="timed_out"||s.conclusion==="action_required"?(t=!0,r.push(s.name),i.set(s.name,s.conclusion)):s.status==="in_progress"||s.status==="queued"?n=!0:s.conclusion==="success"&&(o=!0);return{hasFailingChecks:t,hasPendingChecks:n,hasSuccessfulChecks:o,failingCheckNames:r,failingCheckConclusions:i}}function zx(e){let t=e.statuses.filter(c=>{let u=(c.description||"").toLowerCase();return!(c.state==="failure"&&(u.includes("authorization required")||u.includes("authorize")))}),n=t.some(c=>c.state==="failure"||c.state==="error"),o=t.some(c=>c.state==="pending"),r=t.some(c=>c.state==="success"),i;n?i="failure":o?i="pending":r||t.length===0?i="success":i=e.state;let s=e.statuses.length>0,a=[];for(let c of t)(c.state==="failure"||c.state==="error")&&a.push(c.context);return{effectiveCombinedState:i,hasStatuses:s,failingStatusNames:a}}function jx(e,t,n){let{hasFailingChecks:o,hasPendingChecks:r,hasSuccessfulChecks:i,failingCheckNames:s,failingCheckConclusions:a}=e,{effectiveCombinedState:c,hasStatuses:u,failingStatusNames:l}=t,d=[...s,...l];return o||c==="failure"||c==="error"?{status:"failing",failingCheckNames:d,failingCheckConclusions:a}:r||c==="pending"?{status:"pending",failingCheckNames:[],failingCheckConclusions:new Map}:i||c==="success"?{status:"passing",failingCheckNames:[],failingCheckConclusions:new Map}:la()}async function eg(e,t,n,o){if(!o)return la();try{let[r,i]=await Promise.all([e.repos.getCombinedStatusForRef({owner:t,repo:n,ref:o}),e.checks.listForRef({owner:t,repo:n,ref:o}).catch(p=>{let g=Ze(p);if(g===429)throw p;if(g===403){let w=P(p).toLowerCase();if(w.includes("rate limit")||w.includes("abuse detection"))throw p}return g===404?E("pr-monitor",`Check runs 404 for ${t}/${n}@${o.slice(0,7)} (no checks configured)`):v("pr-monitor",`Non-404 error fetching check runs for ${t}/${n}@${o.slice(0,7)}: ${g??p}`),null})]),s=r.data,a=i?.data?.check_runs||[],c=new Map;for(let p of a){let g=c.get(p.name);(!g||new Date(p.started_at??0)>new Date(g.started_at??0))&&c.set(p.name,p)}let u=[...c.values()],l=Ux(u),d=zx(s);return jx(l,d,u.length)}catch(r){let i=Ze(r);if(i===401||i===403||i===429)throw r;return i===404?E("pr-monitor",`CI check 404 for ${t}/${n} (no CI configured)`):v("pr-monitor",`Failed to check CI for ${t}/${n}@${o.slice(0,7)}: ${P(r)}`),la()}}var U_,z_,j_,tg=b(()=>{"use strict";be();ye();U_=[/vercel/i,/netlify/i,/\bpreview\s*deploy/i,/\bdeploy\s*preview/i,/storybook/i,/chromatic/i,/percy/i,/cloudflare pages/i,/\binternal\b/i],z_=[/authoriz/i,/approval/i,/\bcla\b/i,/license\/cla/i],j_=[/\binstall\s*(os\s*)?dep(endenc|s\b)/i,/\bsetup\s+fail(ed|ure)?\b/i,/\bservice\s*unavailable/i,/\binfrastructure/i,/\bblacksmith\b/i,/\breadthedocs\b/i]});function Cn(e){return e.includes("[bot]")||Gx.has(e.toLowerCase())}function Uo(e){if(!e||e.length>100||e.includes("?"))return!1;let t=e.toLowerCase();return["thanks","thank you","lgtm","looks good","will review","we'll review","we'll get to this","noted","got it","will look","will check"].some(o=>t.includes(o))}var Gx,pa=b(()=>{"use strict";Gx=new Set(["allcontributors","changeset-bot","claassistant","codecov-commenter","greenkeeper","imgbot","netlify","renovate","snyk-bot","sonarcloud","stale","vercel"])});function G_(e){if(e.length===0)return"review_required";let t=new Map;for(let o of e){let r=o.user?.login,i=o.state;r&&i&&t.set(r,i)}let n=Array.from(t.values());return n.includes("CHANGES_REQUESTED")?"changes_requested":n.includes("APPROVED")?"approved":"review_required"}function F_(e){let t;for(let n of e)n.state==="CHANGES_REQUESTED"&&n.submitted_at&&(!t||n.submitted_at>t)&&(t=n.submitted_at);return t}function Fx(e,t){let n=t.filter(r=>r.pull_request_review_id===e);if(n.length===0)return!1;let o=new Map;for(let r of t)r.user?.login&&o.set(r.id,r.user.login.toLowerCase());return n.every(r=>{if(!r.in_reply_to_id)return!1;let i=o.get(r.in_reply_to_id),s=r.user?.login?.toLowerCase();return!(i==null||s==null||i!==s||!r.body||r.body.includes("?"))})}function Lx(e,t){return t.find(n=>n.pull_request_review_id===e&&n.body?.trim())?.body?.trim()}function L_(e,t,n,o){let r=[],i=o.toLowerCase();for(let c of e){let u=c.user?.login||"unknown";r.push({author:u,body:c.body||"",createdAt:c.created_at,isUser:u.toLowerCase()===i})}for(let c of t){if(!c.submitted_at)continue;let u=(c.body||"").trim();if(!u&&c.state!=="COMMENTED"&&c.state!=="CHANGES_REQUESTED")continue;let l=c.user?.login||"unknown";if(!u&&c.state==="COMMENTED"&&c.id!=null&&Fx(c.id,n))continue;let d=u||(c.id!=null?Lx(c.id,n):void 0)||(c.state==="CHANGES_REQUESTED"?"(requested changes via inline review comments)":"(posted inline review comments)");r.push({author:l,body:d,createdAt:c.submitted_at,isUser:l.toLowerCase()===i})}r.sort((c,u)=>new Date(c.createdAt).getTime()-new Date(u.createdAt).getTime());let s=null;for(let c of r)c.isUser&&(s=new Date(c.createdAt));let a;for(let c of r){if(c.isUser||c.author==="unknown"||Cn(c.author))continue;let u=new Date(c.createdAt);(!s||u>s)&&(a={author:c.author,body:c.body.slice(0,200)+(c.body.length>200?"...":""),createdAt:c.createdAt})}return a&&Uo(a.body)&&(a=void 0),{hasUnrespondedComment:!!a,lastMaintainerComment:a}}var N_=b(()=>{"use strict";pa()});function M_(e){return Nx.test(e.toLowerCase())}function Z_(e){if(!e)return{hasIncompleteChecklist:!1};let t=/- \[x\]/gi,n=/^.*- \[ \].*$/gm,o=e.match(t)||[],r=e.match(n)||[],i=o.length;if(i+r.length===0)return{hasIncompleteChecklist:!1};let a=r.filter(u=>!M_(u)),c=i+a.length;return{hasIncompleteChecklist:a.length>0,checklistStats:{checked:i,total:c}}}var Nx,rg=b(()=>{"use strict";Nx=/\(if\s|\bif applicable\b|\bif needed\b|\bif relevant\b|\bonly if\b|\bwhen applicable\b|\(optional\)|- \[ \]\s*optional\b|\bn\/a\b|\bnot applicable\b|\bif required\b|\bif necessary\b/});function V_(e,t){let n=[];if(t==="changes_requested"&&n.push("changes_requested"),!e)return n;let o=e.toLowerCase();return["screenshot","demo","recording","screen recording","before/after","before and after","gif","video","screencast","show me","can you show"].some(c=>o.includes(c))&&n.push("demo_requested"),["add test","test coverage","unit test","missing test","add a test","write test","needs test","need test"].some(c=>o.includes(c))&&n.push("tests_requested"),["documentation","readme","jsdoc","docstring","add docs","update docs","document this"].some(c=>o.includes(c))&&n.push("docs_requested"),["rebase","merge conflict","out of date","behind main","behind master"].some(c=>o.includes(c))&&n.push("rebase_requested"),n}var H_=b(()=>{"use strict"});function Zx(e){let t=q_[e]?.label;return t?t.replace(/[[\]]/g,"").toLowerCase():e}function ma(e){if(e.status==="needs_addressing"&&e.actionReason){let t=q_[e.actionReason];if(t){let n=t.description(e);if(e.actionReasons&&e.actionReasons.length>1){let o=e.actionReasons.filter(r=>r!==e.actionReason).map(Zx);o.length>0&&(n+=` (also: ${o.join(", ")})`)}return{displayLabel:t.label,displayDescription:n}}}if(e.status==="waiting_on_maintainer"&&e.waitReason){let t=Mx[e.waitReason];if(t)return{displayLabel:t.label,displayDescription:t.description(e)}}return v("display-utils",`PR ${e.url} has status "${e.status}" but no matching reason (actionReason=${e.actionReason}, waitReason=${e.waitReason})`),e.status==="needs_addressing"?{displayLabel:"[Needs Addressing]",displayDescription:"Action required"}:{displayLabel:"[Waiting on Maintainer]",displayDescription:"Awaiting maintainer action"}}var q_,Mx,ng=b(()=>{"use strict";ye();q_={needs_response:{label:"[Needs Response]",description:e=>e.lastMaintainerComment?`@${e.lastMaintainerComment.author} commented`:"Maintainer awaiting response"},needs_changes:{label:"[Needs Changes]",description:()=>"Review requested changes \u2014 push commits to address"},failing_ci:{label:"[CI Failing]",description:e=>{let t=e.classifiedChecks||[],n=t.filter(i=>i.category==="actionable");if(n.length>0)return`${n.length} check${n.length===1?"":"s"} failed: ${n.map(i=>i.name).join(", ")}`;let o=t.filter(i=>i.category==="infrastructure");if(o.length>0)return`${o.length} check${o.length===1?"":"s"} cancelled/timed out (infrastructure)`;let r=e.failingCheckNames||[];return r.length>0?`${r.length} check${r.length===1?"":"s"} failed`:"One or more CI checks are failing"}},merge_conflict:{label:"[Merge Conflict]",description:()=>"PR has merge conflicts with the base branch"},incomplete_checklist:{label:"[Incomplete Checklist]",description:e=>e.checklistStats?`${e.checklistStats.checked}/${e.checklistStats.total} items checked`:"PR body has unchecked required checkboxes"},ci_not_running:{label:"[CI Not Running]",description:()=>"No CI checks have been triggered"},needs_rebase:{label:"[Needs Rebase]",description:()=>"PR branch is significantly behind upstream"},missing_required_files:{label:"[Missing Files]",description:e=>e.missingRequiredFiles?`Missing: ${e.missingRequiredFiles.join(", ")}`:"Required files are missing"}},Mx={pending_review:{label:"[Waiting on Maintainer]",description:()=>"Awaiting review"},pending_merge:{label:"[Waiting on Maintainer]",description:()=>"Approved and CI passes \u2014 waiting for merge"},changes_addressed:{label:"[Waiting on Maintainer]",description:e=>e.hasUnrespondedComment&&e.lastMaintainerComment?`Changes addressed \u2014 waiting for @${e.lastMaintainerComment.author} to re-review`:"Changes addressed \u2014 awaiting re-review"},ci_blocked:{label:"[CI Blocked]",description:e=>{let t=e.classifiedChecks||[];return t.length>0&&t.every(n=>n.category!=="actionable")?`All failing checks are non-actionable (${[...new Set(t.map(o=>o.category))].join(", ")})`:"CI checks are failing but no action is needed from you"}},stale_ci_failure:{label:"[Stale CI Failure]",description:e=>`CI failing for ${e.daysSinceActivity}+ days \u2014 likely pre-existing or non-actionable`}}});function Dr(){return{repos:new Map,monthlyCounts:{},monthlyOpenedCounts:{},dailyActivityCounts:{}}}function Hx(e){if(typeof e!="object"||e===null)return!1;let t=e;return Array.isArray(t.reposEntries)&&typeof t.monthlyCounts=="object"&&t.monthlyCounts!==null&&typeof t.monthlyOpenedCounts=="object"&&t.monthlyOpenedCounts!==null&&typeof t.dailyActivityCounts=="object"&&t.dailyActivityCounts!==null}async function B_(e,t,n,o,r,i){if(!t)return Dr();let s=Bt(),a=i?`:stars${i.minStars}`:"",c=`pr-counts:v3:${o}:${t}${a}`,u=s.getIfFresh(c,Vx);if(u&&Hx(u))return E(Ee,`Using cached ${o} PR counts for @${t}`),{repos:new Map(u.reposEntries),monthlyCounts:u.monthlyCounts,monthlyOpenedCounts:u.monthlyOpenedCounts,dailyActivityCounts:u.dailyActivityCounts};E(Ee,`Fetching ${o} PR counts for @${t}...`);let l=new Map,d={},p={},g={},w=1,S=0,j;for(;;){let{data:k}=await e.search.issuesAndPullRequests({q:`is:pr ${n} author:${t} -user:${t}`,sort:"updated",order:"desc",per_page:100,page:w});j=k.total_count;for(let L of k.items){let F=xr(L.html_url);if(!F){v(Ee,`Skipping ${o} PR with unparseable URL: ${L.html_url}`);continue}let{owner:H}=F,z=`${H}/${F.repo}`;if(_t(H,t)||i&&Ht(i.knownStarCounts.get(z),i.minStars))continue;let R=r(l,z,L);if(R){let G=R.slice(0,7);d[G]=(d[G]||0)+1;let Y=R.slice(0,10);Y.length===10&&(g[Y]=(g[Y]||0)+1)}if(L.created_at){let G=L.created_at.slice(0,7);p[G]=(p[G]||0)+1;let Y=L.created_at.slice(0,10);Y.length===10&&(g[Y]=(g[Y]||0)+1)}}if(S+=k.items.length,S>=k.total_count||S>=1e3||k.items.length===0||w>=Ar)break;w++}return S<j&&w>=Ar&&v(Ee,`Pagination capped at ${Ar} pages: fetched ${S} of ${j} ${o} PRs. Stats may be incomplete for prolific contributors.`),E(Ee,`Found ${S} ${o} PRs across ${l.size} repos`),s.set(c,"",{reposEntries:Array.from(l.entries()),monthlyCounts:d,monthlyOpenedCounts:p,dailyActivityCounts:g}),{repos:l,monthlyCounts:d,monthlyOpenedCounts:p,dailyActivityCounts:g}}function W_(e,t,n){return B_(e,t,"is:merged","merged",(o,r,i)=>{i.pull_request?.merged_at||v(Ee,`merged_at missing for merged PR ${i.html_url}${i.closed_at?", falling back to closed_at":", no date available"}`);let s=i.pull_request?.merged_at||i.closed_at||"",a=o.get(r);return a?(a.count+=1,s&&s>a.lastMergedAt&&(a.lastMergedAt=s)):o.set(r,{count:1,lastMergedAt:s}),s},n)}function J_(e,t,n){return B_(e,t,"is:closed is:unmerged","closed",(o,r,i)=>(o.set(r,(o.get(r)||0)+1),i.closed_at||""),n)}async function K_(e,t,n,o,r,i){if(!t.githubUsername)return v(Ee,`Skipping recently ${o} PRs fetch: no githubUsername configured. Run /setup-oss to configure.`),[];let s=new Date;s.setDate(s.getDate()-r);let a=s.toISOString().split("T")[0];E(Ee,`Fetching recently ${o} PRs for @${t.githubUsername} (since ${a})...`);let{data:c}=await e.search.issuesAndPullRequests({q:n.replace("{username}",t.githubUsername).replace("{since}",a),sort:"updated",order:"desc",per_page:100}),u=[];for(let l of c.items){let d=Oe(l.html_url);if(!d){v(Ee,`Could not parse GitHub URL from API response: ${l.html_url}`);continue}let p=`${d.owner}/${d.repo}`;_t(d.owner,t.githubUsername)||u.push(i(l,{owner:d.owner,repo:p,number:d.number}))}return E(Ee,`Found ${u.length} recently ${o} PRs`),u}async function X_(e,t,n=7){return K_(e,t,"is:pr is:closed is:unmerged author:{username} closed:>={since}","closed",n,(o,{repo:r,number:i})=>({url:o.html_url,repo:r,number:i,title:o.title,closedAt:o.closed_at||""}))}async function Y_(e,t,n=7){return K_(e,t,"is:pr is:merged author:{username} merged:>={since}","merged",n,(o,{repo:r,number:i})=>{let s=o.pull_request?.merged_at;return s||v(Ee,`merged_at missing for merged PR ${o.html_url}${o.closed_at?", falling back to closed_at":", no date available"}`),{url:o.html_url,repo:r,number:i,title:o.title,mergedAt:s||o.closed_at||""}})}async function Q_(e,t,n){if(!t.githubUsername)return v(Ee,"Skipping merged PRs fetch: no githubUsername configured."),[];let o=n?` merged:>${n}`:"",r=`is:pr is:merged author:${t.githubUsername} -user:${t.githubUsername}${o}`;E(Ee,`Fetching merged PRs${n?` since ${n}`:" (all time)"}...`);let i=[],s=1,a=0;for(;;){let{data:c}=await e.search.issuesAndPullRequests({q:r,sort:"updated",order:"desc",per_page:100,page:s});for(let u of c.items){let l=Oe(u.html_url);if(!l){v(Ee,`Skipping merged PR with unparseable URL: ${u.html_url}`);continue}if(_t(l.owner,t.githubUsername))continue;let d=u.pull_request?.merged_at||u.closed_at||"";if(!d){v(Ee,`Skipping merged PR with no merge date: ${u.html_url}`);continue}i.push({url:u.html_url,title:u.title,mergedAt:d})}if(a+=c.items.length,a>=c.total_count||a>=1e3||c.items.length===0||s>=Ar)break;s++}return E(Ee,`Fetched ${i.length} merged PRs${n?" (incremental)":" (initial)"}`),i}async function e$(e,t,n){if(!t.githubUsername)return v(Ee,"Skipping closed PRs fetch: no githubUsername configured."),[];let o=n?` closed:>${n}`:"",r=`is:pr is:closed is:unmerged author:${t.githubUsername} -user:${t.githubUsername}${o}`;E(Ee,`Fetching closed PRs${n?` since ${n}`:" (all time)"}...`);let i=[],s=1,a=0,c;for(;;){let{data:u}=await e.search.issuesAndPullRequests({q:r,sort:"updated",order:"desc",per_page:100,page:s});c=u.total_count;for(let l of u.items){let d=Oe(l.html_url);if(!d){v(Ee,`Skipping closed PR with unparseable URL: ${l.html_url}`);continue}if(_t(d.owner,t.githubUsername))continue;let p=l.closed_at||"";if(!p){v(Ee,`Skipping closed PR with no close date: ${l.html_url}`);continue}i.push({url:l.html_url,title:l.title,closedAt:p})}if(a+=u.items.length,a>=c||a>=1e3||u.items.length===0||s>=Ar)break;s++}return a<c&&s>=Ar&&v(Ee,`Pagination capped at ${Ar} pages: fetched ${a} of ${c} closed PRs. Oldest PRs may be missing.`),E(Ee,`Fetched ${i.length} closed PRs${n?" (incremental)":" (initial)"}`),i}var Ee,Vx,Ar,ga=b(()=>{"use strict";Ve();At();ye();Pn();Ee="github-stats",Vx=1440*60*1e3,Ar=3});function qx(e,t){return e===!1||t==="dirty"}var fa,Bx,Ur,t$=b(()=>{"use strict";Rn();Tn();Ve();Km();Xm();be();Ao();ye();Pn();tg();N_();rg();H_();ng();ga();ng();tg();rg();Km();fa="pr-monitor",Bx=lr,Ur=class{octokit;stateManager;constructor(t){this.octokit=Ue(t),this.stateManager=W()}async fetchUserOpenPRs(){let t=this.stateManager.getState().config;if(!t.githubUsername)throw new Nt("No GitHub username configured. Run setup first.");E("pr-monitor",`Fetching open PRs for @${t.githubUsername}...`);let n=[],o=1,r=100,i=await this.octokit.search.issuesAndPullRequests({q:`is:pr is:open author:${t.githubUsername}`,sort:"updated",order:"desc",per_page:r,page:1});n.push(...i.data.items);let s=i.data.total_count;E("pr-monitor",`Found ${s} open PRs`);let a=Math.min(Math.ceil(s/r),10);for(;o<a;){o++;let d=await this.octokit.search.issuesAndPullRequests({q:`is:pr is:open author:${t.githubUsername}`,sort:"updated",order:"desc",per_page:r,page:o});n.push(...d.data.items)}let c=[],u=[],l=n.filter(d=>{if(!d.pull_request)return!1;let p=xr(d.html_url);return p?!_t(p.owner,t.githubUsername):(v("pr-monitor",`Skipping PR with unparseable URL: ${d.html_url}`),!1)});return E("pr-monitor",`Filtered to ${l.length} PRs after excluding own repos`),await wm("pr-monitor",`Fetch details for ${l.length} PRs`,async()=>{await sa(l,async d=>{try{E("pr-monitor",`Fetching details for ${d.html_url}`);let p=await this.fetchPRDetails(d.html_url);p&&c.push(p)}catch(p){let g=P(p);v("pr-monitor",`Error fetching ${d.html_url}: ${g}`),u.push({prUrl:d.html_url,error:g})}},Bx)}),c.sort((d,p)=>d.status===p.status?0:d.status==="needs_addressing"?-1:1),{prs:c,failures:u}}async fetchPRDetails(t){let n=Oe(t);if(!n||n.type!=="pull")throw new _e(`Invalid PR URL format: ${t}`);let{owner:o,repo:r,number:i}=n,s=this.stateManager.getState().config,[a,c,u,l]=await Promise.all([this.octokit.pulls.get({owner:o,repo:r,pull_number:i}),wt(V=>this.octokit.issues.listComments({owner:o,repo:r,issue_number:i,per_page:100,page:V})),this.octokit.pulls.listReviews({owner:o,repo:r,pull_number:i}),wt(V=>this.octokit.pulls.listReviewComments({owner:o,repo:r,pull_number:i,per_page:100,page:V})).catch(V=>{let ee=Ze(V);if(ee===429)throw V;if(ee===403){let fe=P(V).toLowerCase();if(fe.includes("rate limit")||fe.includes("abuse detection"))throw V;return v("pr-monitor",`403 fetching review comments for ${o}/${r}#${i}: ${fe}`),[]}return ee===404?E("pr-monitor",`Review comments 404 for ${o}/${r}#${i} (likely no inline comments)`):v("pr-monitor",`Failed to fetch review comments for ${o}/${r}#${i} (status ${ee??"unknown"}): self-reply detection will be skipped`),[]})]),d=a.data,p=u.data,g=G_(p),w=qx(d.mergeable,d.mergeable_state),{hasUnrespondedComment:S,lastMaintainerComment:j}=L_(c,p,l,s.githubUsername),k=eg(this.octokit,o,r,d.head.sha),F=S||g==="changes_requested"?this.octokit.repos.getCommit({owner:o,repo:r,ref:d.head.sha}).then(V=>({date:V.data.commit.author?.date,author:V.data.author?.login})).catch(V=>{let ee=Ze(V);if(ee===429)throw V;if(ee===403){let fe=P(V).toLowerCase();if(fe.includes("rate limit")||fe.includes("abuse detection"))throw V;v("pr-monitor",`403 fetching commit date for ${o}/${r}@${d.head.sha.slice(0,7)}: ${P(V)}`);return}v("pr-monitor",`Failed to fetch commit date for ${o}/${r}@${d.head.sha.slice(0,7)}: ${P(V)}`)}):Promise.resolve(void 0),[{status:H,failingCheckNames:z,failingCheckConclusions:R},G]=await Promise.all([k,F]),Y=G?.date,J=G?.author,{hasIncompleteChecklist:$e,checklistStats:lt}=Z_(d.body||""),le=V_(j?.body,g),we=ft(new Date(d.updated_at),new Date),qe=F_(p),dt=da(z,R),Et=H==="failing"&&dt.some(V=>V.category==="actionable"),{status:Qt,actionReason:je,waitReason:Xe,stalenessTier:Be,actionReasons:K}=Jm({ciStatus:H,hasMergeConflict:w,hasUnrespondedComment:S,hasIncompleteChecklist:$e,reviewDecision:g,daysSinceActivity:we,dormantThreshold:s.dormantThresholdDays,approachingThreshold:s.approachingDormantDays,latestCommitDate:Y,latestCommitAuthor:J,contributorUsername:s.githubUsername,lastMaintainerCommentDate:j?.createdAt,latestChangesRequestedDate:qe,hasActionableCIFailure:Et});return this.buildFetchedPR({id:d.id,url:t,repo:`${o}/${r}`,number:i,title:d.title,status:Qt,actionReason:je,waitReason:Xe,stalenessTier:Be,actionReasons:K,createdAt:d.created_at,updatedAt:d.updated_at,daysSinceActivity:we,ciStatus:H,failingCheckNames:z,classifiedChecks:dt,hasMergeConflict:w,reviewDecision:g,hasUnrespondedComment:S,lastMaintainerComment:j,latestCommitDate:Y,hasIncompleteChecklist:$e,checklistStats:lt,maintainerActionHints:le})}buildFetchedPR(t){let n={...t,displayLabel:"",displayDescription:""},{displayLabel:o,displayDescription:r}=ma(n);return n.displayLabel=o,n.displayDescription=r,n}async fetchUserMergedPRCounts(t){let n=this.stateManager.getState().config;return W_(this.octokit,n.githubUsername,t)}async fetchUserClosedPRCounts(t){let n=this.stateManager.getState().config;return J_(this.octokit,n.githubUsername,t)}async fetchRepoMetadata(t){if(t.length===0)return new Map;E(fa,`Fetching repo metadata for ${t.length} repos...`);let n=new Map,o=Bt(),r=[...new Set(t)],i=10;for(let s=0;s<r.length;s+=i){let a=r.slice(s,s+i),c=await Promise.allSettled(a.map(async l=>{let d=l.split("/");if(d.length!==2||!d[0]||!d[1])throw new _e(`Malformed repo identifier: "${l}"`);let[p,g]=d,w=`/repos/${p}/${g}`,S=await Do(o,w,k=>this.octokit.repos.get({owner:p,repo:g,headers:k})),j={stars:S.stargazers_count,language:S.language??null};return{repo:l,metadata:j}})),u=0;for(let l=0;l<c.length;l++){let d=c[l];d.status==="fulfilled"?n.set(d.value.repo,d.value.metadata):(u++,v(fa,`Failed to fetch metadata for ${a[l]}: ${P(d.reason)}`))}if(u===a.length&&a.length>0){let l=r.length-s-i;l>0&&v(fa,`Entire chunk failed, aborting remaining ${l} repos`);break}}return E(fa,`Fetched repo metadata for ${n.size}/${t.length} repos`),n}async fetchRecentlyClosedPRs(t=7){let n=this.stateManager.getState().config;return X_(this.octokit,n,t)}async fetchRecentlyMergedPRs(t=7){let n=this.stateManager.getState().config;return Y_(this.octokit,n,t)}generateDigest(t,n=[],o=[]){let r=new Date().toISOString(),i=t.filter(c=>c.status==="needs_addressing"),s=t.filter(c=>c.status==="waiting_on_maintainer"),a=this.stateManager.getStats();return{generatedAt:r,openPRs:t,needsAddressingPRs:i,waitingOnMaintainerPRs:s,recentlyClosedPRs:n,recentlyMergedPRs:o,shelvedPRs:[],autoUnshelvedPRs:[],summary:{totalActivePRs:t.length,totalNeedingAttention:i.length,totalMergedAllTime:a.mergedPRs,mergeRate:parseFloat(a.mergeRate)}}}async updateRepoScoreFromObservedPR(t,n){n?this.stateManager.incrementMergedCount(t):this.stateManager.incrementClosedCount(t)}}});function og(e){if(!e.labels||!Array.isArray(e.labels)||e.labels.length===0)return!1;let n=e.labels.map(o=>(typeof o=="string"?o:o.name||"").toLowerCase()).filter(o=>o.length>0);return n.length===0?!1:n.every(o=>r$.has(o))}function Jx(e){return!e.labels||!Array.isArray(e.labels)?!1:e.labels.map(o=>(typeof o=="string"?o:o.name||"").toLowerCase()).filter(o=>Wx.has(o)).length>=5}function Kx(e){return e?/^.+\s+(question|fact|point|item|task|entry|post|challenge|exercise|example|problem|tip|recipe|snippet)\s+#?\d+$/i.test(e):!1}function n$(e){let t=new Set,n=new Map;for(let o of e){let r=o.repository_url.split("/").slice(-2).join("/");if(Jx(o)){t.add(r);continue}o.title&&Kx(o.title)&&n.set(r,(n.get(r)||0)+1)}for(let[o,r]of n)r>=3&&t.add(o);return t}function ig(e,t){let n=new Map,o=[];for(let r of e){let i=n.get(r.issue.repo)||0;i<t&&(o.push(r),n.set(r.issue.repo,i+1))}return o}var r$,Wx,ha=b(()=>{"use strict";r$=new Set(["documentation","docs","typo","spelling"]);Wx=new Set(["good first issue","hacktoberfest","easy","up-for-grabs","first-timers-only","beginner-friendly","beginner","starter","newbie","low-hanging-fruit","community"])});function o$(e,t){let n=0;return e>=5e3?n+=8:e>=500?n+=5:e>=50&&(n+=3),t>=500?n+=4:t>=50&&(n+=2),n}function i$(e){let t=50;e.repoScore!==null&&(t+=e.repoScore*2),t+=e.repoQualityBonus??0,e.mergedPRCount>0&&(t+=15),e.clearRequirements&&(t+=15);let n=new Date(e.issueUpdatedAt),o=ft(n);return o<=14?t+=15:o<=30&&(t+=Math.round(15*(1-(o-14)/16))),e.hasContributionGuidelines&&(t+=10),e.orgHasMergedPRs&&(t+=5),e.matchesPreferredCategory&&(t+=5),e.hasExistingPR&&(t-=30),e.isClaimed&&(t-=20),e.closedWithoutMergeCount>0&&e.mergedPRCount===0&&(t-=15),Math.max(0,Math.min(100,t))}var s$=b(()=>{"use strict";Ve()});function a$(e,t){if(t.length===0)return!1;let n=e.split("/")[0]?.toLowerCase();if(!n)return!1;for(let o of t){let r=Yx[o];if(r&&r.some(i=>i.toLowerCase()===n))return!0}return!1}function c$(e){let t=new Set;for(let n of e){let o=Xx[n];if(o)for(let r of o)t.add(r)}return[...t]}var Xx,Yx,sg=b(()=>{"use strict";Xx={nonprofit:["nonprofit","social-good","humanitarian","charity","social-impact","civic-tech"],devtools:["developer-tools","devtools","cli","sdk","linter","formatter","build-tool"],infrastructure:["infrastructure","cloud","kubernetes","docker","devops","monitoring","observability"],"web-frameworks":["web-framework","frontend","backend","fullstack","nextjs","react","vue"],"data-ml":["machine-learning","data-science","deep-learning","nlp","data-pipeline","analytics"],education:["education","learning","tutorial","courseware","edtech","teaching"]},Yx={nonprofit:["code-for-america","opengovfoundation","ushahidi","hotosm","openfn","democracyearth"],devtools:["eslint","prettier","vitejs","biomejs","oxc-project","ast-grep","turbot"],infrastructure:["kubernetes","hashicorp","grafana","prometheus","open-telemetry","envoyproxy","cncf"],"web-frameworks":["vercel","remix-run","sveltejs","nuxt","astro","redwoodjs","blitz-js"],"data-ml":["huggingface","mlflow","apache","dbt-labs","dagster-io","prefecthq","langchain-ai"],education:["freeCodeCamp","TheOdinProject","exercism","codecademy","oppia","Khan"]}});async function u$(e,t,n,o){try{let{data:r}=await e.search.issuesAndPullRequests({q:`repo:${t}/${n} is:pr ${o}`,per_page:5}),s=(await wt(a=>e.issues.listEventsForTimeline({owner:t,repo:n,issue_number:o,per_page:100,page:a}))).filter(a=>{let c=a;return c.event==="cross-referenced"&&c.source?.issue?.pull_request});return{passed:r.total_count===0&&s.length===0}}catch(r){let i=P(r);return v(ag,`Failed to check for existing PRs on ${t}/${n}#${o}: ${i}. Assuming no existing PR.`),{passed:!0,inconclusive:!0,reason:i}}}async function l$(e,t,n){try{let{data:o}=await e.search.issuesAndPullRequests({q:`repo:${t}/${n} is:pr is:merged author:@me`,per_page:1});return o.total_count}catch(o){let r=P(o);return v(ag,`Could not check merged PRs in ${t}/${n}: ${r}. Defaulting to 0.`),0}}async function d$(e,t,n,o,r){if(r===0)return{passed:!0};try{let s=(await e.paginate(e.issues.listComments,{owner:t,repo:n,issue_number:o,per_page:100},a=>a.data)).slice(-100);for(let a of s){let c=(a.body||"").toLowerCase();if(Qx.some(u=>c.includes(u)))return{passed:!1}}return{passed:!0}}catch(i){let s=P(i);return v(ag,`Failed to check claim status on ${t}/${n}#${o}: ${s}. Assuming not claimed.`),{passed:!0,inconclusive:!0,reason:s}}}function p$(e){if(!e||e.length<50)return!1;let t=/\d\.|[-*]\s/.test(e),n=/```/.test(e),o=/expect|should|must|want/i.test(e);return[t,n,o,e.length>200].filter(Boolean).length>=2}var ag,Qx,m$=b(()=>{"use strict";Ao();be();ye();ag="issue-eligibility",Qx=["i'm working on this","i am working on this","i'll take this","i will take this","working on it","i'd like to work on","i would like to work on","can i work on","may i work on","assigned to me","i'm on it","i'll submit a pr","i will submit a pr","working on a fix","working on a pr"]});function f$(){let e=Date.now();for(let[t,n]of Wt.entries())e-n.fetchedAt>h$&&Wt.delete(t);if(Wt.size>g$){let n=Array.from(Wt.entries()).sort((o,r)=>o[1].fetchedAt-r[1].fetchedAt).slice(0,Wt.size-g$);for(let[o]of n)Wt.delete(o)}}async function v$(e,t,n){let o=Bt(),r=`health:${t}/${n}`;try{return await ua(o,r,eR,async()=>{let i=`/repos/${t}/${n}`,s=await Do(o,i,p=>e.repos.get({owner:t,repo:n,headers:p})),{data:a}=await e.repos.listCommits({owner:t,repo:n,per_page:1}),u=a[0]?.commit?.author?.date||s.pushed_at,l=ft(new Date(u)),d="unknown";try{let{data:p}=await e.actions.listRepoWorkflows({owner:t,repo:n,per_page:1});p.total_count>0&&(d="passing")}catch(p){let g=P(p);v(cg,`Failed to check CI status for ${t}/${n}: ${g}. Defaulting to unknown.`)}return{repo:`${t}/${n}`,lastCommitAt:u,daysSinceLastCommit:l,openIssuesCount:s.open_issues_count,avgIssueResponseDays:0,ciStatus:d,isActive:l<30,stargazersCount:s.stargazers_count,forksCount:s.forks_count}})}catch(i){let s=P(i);return v(cg,`Error checking project health for ${t}/${n}: ${s}`),{repo:`${t}/${n}`,lastCommitAt:"",daysSinceLastCommit:999,openIssuesCount:0,avgIssueResponseDays:0,ciStatus:"unknown",isActive:!1,checkFailed:!0,failureReason:s}}}async function b$(e,t,n){let o=`${t}/${n}`,r=Wt.get(o);if(r&&Date.now()-r.fetchedAt<h$)return r.guidelines;let i=["CONTRIBUTING.md",".github/CONTRIBUTING.md","docs/CONTRIBUTING.md","contributing.md"],s=await Promise.allSettled(i.map(a=>e.repos.getContent({owner:t,repo:n,path:a}).then(({data:c})=>"content"in c?Buffer.from(c.content,"base64").toString("utf-8"):null)));for(let a=0;a<s.length;a++){let c=s[a];if(c.status==="fulfilled"&&c.value){let u=tR(c.value);return Wt.set(o,{guidelines:u,fetchedAt:Date.now()}),f$(),u}if(c.status==="rejected"){let u=c.reason instanceof Error?c.reason.message:String(c.reason);!u.includes("404")&&!u.includes("Not Found")&&v(cg,`Unexpected error fetching ${i[a]} from ${t}/${n}: ${u}`)}}Wt.set(o,{guidelines:void 0,fetchedAt:Date.now()}),f$()}function tR(e){let t={rawContent:e},n=e.toLowerCase();if(n.includes("branch")){let o=e.match(/branch[^\n]*(?:named?|format|convention)[^\n]*[`"]([^`"]+)[`"]/i);o&&(t.branchNamingConvention=o[1])}if(n.includes("conventional commit"))t.commitMessageFormat="conventional commits";else if(n.includes("commit message")){let o=e.match(/commit message[^\n]*[`"]([^`"]+)[`"]/i);o&&(t.commitMessageFormat=o[1])}return n.includes("jest")?t.testFramework="Jest":n.includes("rspec")?t.testFramework="RSpec":n.includes("pytest")?t.testFramework="pytest":n.includes("mocha")&&(t.testFramework="Mocha"),n.includes("eslint")?t.linter="ESLint":n.includes("rubocop")?t.linter="RuboCop":n.includes("prettier")&&(t.formatter="Prettier"),(n.includes("cla")||n.includes("contributor license agreement"))&&(t.claRequired=!0),t}var cg,Wt,h$,eR,g$,y$=b(()=>{"use strict";Ve();be();ye();Pn();cg="repo-health",Wt=new Map,h$=3600*1e3,eR=14400*1e3,g$=100});var _$,rR,va,$$=b(()=>{"use strict";Ve();be();ye();s$();sg();m$();y$();_$="issue-vetting",rR=lr,va=class{octokit;stateManager;constructor(t,n){this.octokit=t,this.stateManager=n}async vetIssue(t){let n=Oe(t);if(!n||n.type!=="issues")throw new _e(`Invalid issue URL: ${t}`);let{owner:o,repo:r,number:i}=n,s=`${o}/${r}`,{data:a}=await this.octokit.issues.get({owner:o,repo:r,issue_number:i}),[c,u,l,d,p]=await Promise.all([u$(this.octokit,o,r,i),d$(this.octokit,o,r,i,a.comments),v$(this.octokit,o,r),b$(this.octokit,o,r),l$(this.octokit,o,r)]),g=c.passed,w=u.passed,S=p$(a.body||""),j=l.checkFailed?!0:l.isActive,k={passedAllChecks:g&&w&&j&&S,checks:{noExistingPR:g,notClaimed:w,projectActive:j,clearRequirements:S,contributionGuidelinesFound:!!d},contributionGuidelines:d,notes:[]};g||k.notes.push("Existing PR found for this issue"),w||k.notes.push("Issue appears to be claimed by someone"),c.inconclusive&&k.notes.push(`Could not verify absence of existing PRs: ${c.reason||"API error"}`),u.inconclusive&&k.notes.push(`Could not verify claim status: ${u.reason||"API error"}`),l.checkFailed?k.notes.push(`Could not verify project activity: ${l.failureReason||"API error"}`):l.isActive||k.notes.push("Project may be inactive"),S||k.notes.push("Issue requirements are unclear"),d||k.notes.push("No CONTRIBUTING.md found");let L={id:a.id,url:t,repo:s,number:i,title:a.title,status:"candidate",labels:a.labels.map(Be=>typeof Be=="string"?Be:Be.name||""),createdAt:a.created_at,updatedAt:a.updated_at,vetted:!0,vettingResult:k},F=[],H=[];g||F.push("Has existing PR"),w||F.push("Already claimed"),!l.isActive&&!l.checkFailed&&F.push("Inactive project"),S||F.push("Unclear requirements"),g&&H.push("No existing PR"),w&&H.push("Not claimed"),l.isActive&&!l.checkFailed&&H.push("Active project"),S&&H.push("Clear requirements"),d&&H.push("Has contribution guidelines");let z=this.stateManager.getState().config,R=this.stateManager.getRepoScore(s),G=R&&R.mergedPRCount>0?R.mergedPRCount:p;G>0?H.push(`Trusted project (${G} PR${G>1?"s":""} merged)`):z.trustedProjects.includes(s)&&H.push("Trusted project (previous PR merged)"),R&&(R.closedWithoutMergeCount>0&&G===0?F.push("User has rejected PR(s) in this repo with no successful merges"):R.closedWithoutMergeCount>0&&G>0&&k.notes.push(`Mixed history: ${G} merged, ${R.closedWithoutMergeCount} closed without merge`));let Y=s.split("/")[0],J=!1;Y&&s.includes("/")&&(J=Object.values(this.stateManager.getState().repoScores).some(Be=>Be.repo&&Be.mergedPRCount>0&&Be.repo.startsWith(Y+"/")&&Be.repo!==s)),J&&H.push(`Org affinity (merged PRs in other ${Y} repos)`);let $e=z.projectCategories??[],lt=a$(s,$e);lt&&H.push("Matches preferred project category");let le;k.passedAllChecks?le="approve":F.length>2?le="skip":le="needs_review";let we=l.checkFailed||c.inconclusive||u.inconclusive;le==="approve"&&we&&(le="needs_review",k.notes.push("Recommendation downgraded: one or more checks were inconclusive"));let qe=o$(l.stargazersCount??0,l.forksCount??0);l.checkFailed&&qe===0&&k.notes.push("Repo quality bonus unavailable: could not fetch star/fork counts due to API error");let dt=this.getRepoScore(s),Et=i$({repoScore:dt,hasExistingPR:!g,isClaimed:!w,clearRequirements:S,hasContributionGuidelines:!!d,issueUpdatedAt:a.updated_at,closedWithoutMergeCount:R?.closedWithoutMergeCount??0,mergedPRCount:G,orgHasMergedPRs:J,repoQualityBonus:qe,matchesPreferredCategory:lt}),Qt=this.stateManager.getStarredRepos(),je=z.preferredOrgs??[],Xe="normal";return G>0?Xe="merged_pr":je.some(Be=>Be.toLowerCase()===Y?.toLowerCase())?Xe="preferred_org":Qt.includes(s)&&(Xe="starred"),{issue:L,vettingResult:k,projectHealth:l,recommendation:le,reasonsToSkip:F,reasonsToApprove:H,viabilityScore:Et,searchPriority:Xe}}async vetIssuesParallel(t,n,o){let r=[],i=new Map,s=0,a=0,c=0;for(let l of t){if(r.length>=n)break;c++;let d=this.vetIssue(l).then(p=>{r.length<n&&(o&&(p.searchPriority=o),r.push(p))}).catch(p=>{s++,Mt(p)&&a++,v(_$,`Error vetting issue ${l}:`,P(p))}).finally(()=>i.delete(l));i.set(l,d),i.size>=rR&&await Promise.race(i.values())}await Promise.allSettled(i.values());let u=s===c&&c>0;return u&&v(_$,`All ${c} issue(s) failed vetting. This may indicate a systemic issue (rate limit, auth, network).`),{candidates:r.slice(0,n),allFailed:u,rateLimitHit:a>0}}getRepoScore(t){return this.stateManager.getState().repoScores?.[t]?.score??null}}});function ug(e){return e.length===0?"":e.length===1?`label:"${e[0]}"`:`(${e.map(t=>`label:"${t}"`).join(" OR ")})`}function w$(e,t){let n=new Set;for(let o of e)for(let r of xo[o]??[])n.add(r);for(let o of t)n.add(o);return[...n]}function S$(e){let t=[],n=Math.max(...e.map(o=>o.length),0);for(let o=0;o<n;o++)for(let r of e)o<r.length&&t.push(r[o]);return t}function nR(e,t){let n=[];for(let o=0;o<e.length;o+=t)n.push(e.slice(o,o+t));return n}async function jo(e,t){let n=`search:${t.q}:${t.sort}:${t.order}:${t.per_page}`;return ua(Bt(),n,oR,async()=>{let{data:o}=await e.search.issuesAndPullRequests(t);return o})}async function lg(e,t,n,o,r,i,s){let a=n$(t);if(a.size>0){let w=t.filter(S=>a.has(S.repository_url.split("/").slice(-2).join("/"))).length;E(zo,`[SPAM_FILTER] Filtered ${w} issues from ${a.size} label-farming repos: ${[...a].join(", ")}`)}let c=n(t).filter(w=>{let S=w.repository_url.split("/").slice(-2).join("/");return a.has(S)?!1:o.every(j=>!j.has(S))}).slice(0,r*2);if(c.length===0)return E(zo,`[${s}] All ${t.length} items filtered before vetting`),{candidates:[],allVetFailed:!1,rateLimitHit:!1};let{candidates:u,allFailed:l,rateLimitHit:d}=await e.vetIssuesParallel(c.map(w=>w.html_url),r,"normal"),p=u.filter(w=>w.projectHealth.checkFailed?!0:(w.projectHealth.stargazersCount??0)>=i),g=u.length-p.length;return g>0&&E(zo,`[STAR_FILTER] Filtered ${g} ${s} candidates below ${i} stars`),{candidates:p,allVetFailed:l,rateLimitHit:d}}async function ba(e,t,n,o,r,i,s){let a=[],u=nR(n,5),l=0,d=0;for(let w of u){if(a.length>=r)break;try{let S=w.map(L=>`repo:${L}`).join(" OR "),j=`${o} (${S})`,k=await jo(e,{q:j,sort:"created",order:"desc",per_page:Math.min(30,(r-a.length)*3)});if(k.items.length>0){let L=s(k.items),F=r-a.length,{candidates:H}=await t.vetIssuesParallel(L.slice(0,F*2).map(z=>z.html_url),F,i);a.push(...H)}}catch(S){l++,Mt(S)&&d++;let j=w.join(", ");v(zo,`Error searching issues in batch [${j}]:`,P(S))}}let p=l===u.length&&u.length>0,g=d>0;return p&&v(zo,`All ${u.length} batch(es) failed for ${i} phase. This may indicate a systemic issue (rate limit, auth, network).`),{candidates:a,allBatchesFailed:p,rateLimitHit:g}}var zo,oR,k$=b(()=>{"use strict";At();be();ye();Pn();ha();zo="search-phases";oR=900*1e3});var T$,E$,ge,zr,x$=b(()=>{"use strict";T$=re(require("fs"),1),E$=re(require("path"),1);Rn();Tn();Ve();At();be();ye();ha();$$();sg();k$();ge="issue-discovery",zr=class{octokit;stateManager;githubToken;vetter;rateLimitWarning=null;constructor(t){this.githubToken=t,this.octokit=Ue(t),this.stateManager=W(),this.vetter=new va(this.octokit,this.stateManager)}async fetchStarredRepos(){Ce(ge,"Fetching starred repositories...");let t=[];try{let n=this.octokit.paginate.iterator(this.octokit.activity.listReposStarredByAuthenticatedUser,{per_page:100}),o=0;for await(let{data:r}of n){for(let i of r){let s;"full_name"in i&&typeof i.full_name=="string"?s=i.full_name:"repo"in i&&i.repo&&typeof i.repo=="object"&&"full_name"in i.repo&&(s=i.repo.full_name),s&&t.push(s)}if(o++,o>=5){Ce(ge,"Reached pagination limit for starred repos (500)");break}}return Ce(ge,`Fetched ${t.length} starred repositories`),this.stateManager.setStarredRepos(t),t}catch(n){let o=this.stateManager.getStarredRepos(),r=P(n);return v(ge,"Error fetching starred repos:",r),o.length===0?v(ge,`Failed to fetch starred repositories from GitHub API. No cached repos available. Error: ${r}
85
85
  Tip: Ensure your GITHUB_TOKEN has the 'read:user' scope and try again.`):v(ge,`Failed to fetch starred repositories from GitHub API. Using ${o.length} cached repos instead. Error: ${r}`),o}}async getStarredReposWithRefresh(){return this.stateManager.isStarredReposStale()?this.fetchStarredRepos():this.stateManager.getStarredRepos()}async searchIssues(t={}){let n=this.stateManager.getState().config,o=t.languages||n.languages,r=n.scope,i=t.labels||(r?w$(r,n.labels):n.labels),s=t.maxResults||10,a=n.minStars??50,c=[],u=null,l=null,d=!1;this.rateLimitWarning=null;try{let K=await Wm(this.githubToken);if(K.remaining<5){let V=new Date(K.resetAt).toLocaleTimeString("en-US",{hour12:!1});this.rateLimitWarning=`GitHub search API quota low (${K.remaining}/${K.limit} remaining, resets at ${V}). Search may be slow.`,v(ge,this.rateLimitWarning)}}catch(K){if(Ze(K)===401)throw K;v(ge,"Could not check rate limit:",P(K))}let p=this.stateManager.getReposWithMergedPRs(),g=new Set(p),w=this.stateManager.getReposWithOpenPRs(),S=await this.getStarredReposWithRefresh(),j=new Set(S),k=new Set(this.stateManager.getLowScoringRepos(3)),L=new Set(this.stateManager.getState().activeIssues.map(K=>K.url)),F=new Set(n.excludeRepos),H=n.maxIssueAgeDays||90,z=new Date,R=ug(i),Y=o.some(K=>K.toLowerCase()==="any")?"":o.map(K=>`language:${K}`).join(" "),J=`is:issue is:open ${Y} no:assignee`.replace(/ +/g," ").trim(),$e=`is:issue is:open ${R} ${Y} no:assignee`.replace(/ +/g," ").trim(),lt=n.includeDocIssues??!0,le=new Set(n.aiPolicyBlocklist??Sn.aiPolicyBlocklist??[]);le.size>0&&E(ge,`[AI_POLICY_FILTER] Filtering issues from ${le.size} blocklisted repo(s): ${[...le].join(", ")}`);let we=K=>K.filter(V=>{if(L.has(V.html_url))return!1;let ee=V.repository_url.split("/").slice(-2).join("/");if(F.has(ee)||le.has(ee)||k.has(ee))return!1;let fe=new Date(V.updated_at);return!(ft(fe,z)>H||!lt&&og(V))}),qe=[...p,...w.filter(K=>!g.has(K))].slice(0,10),dt=new Set(qe);if(qe.length>0){let K=Math.min(p.length,qe.length),V=qe.length-K;Ce(ge,`Phase 0: Searching issues in ${qe.length} repos (${K} merged-PR, ${V} open-PR, no label filter)...`);let ee=qe.slice(0,K);if(ee.length>0){let ke=s-c.length;if(ke>0){let{candidates:ve,allBatchesFailed:de,rateLimitHit:Pe}=await ba(this.octokit,this.vetter,ee,J,ke,"merged_pr",we);c.push(...ve),de&&(u="All merged-PR repo batches failed"),Pe&&(d=!0),Ce(ge,`Found ${ve.length} candidates from merged-PR repos`)}}let fe=qe.slice(K);if(fe.length>0&&c.length<s){let ke=s-c.length;if(ke>0){let{candidates:ve,allBatchesFailed:de,rateLimitHit:Pe}=await ba(this.octokit,this.vetter,fe,J,ke,"starred",we);if(c.push(...ve),de){let Re="All open-PR repo batches failed";u=u?`${u}; ${Re}`:Re}Pe&&(d=!0),Ce(ge,`Found ${ve.length} candidates from open-PR repos`)}}}let Et=null,Qt=n.preferredOrgs??[];if(c.length<s&&Qt.length>0){let K=new Set(qe.map(ee=>ee.split("/")[0]?.toLowerCase())),V=Qt.filter(ee=>!K.has(ee.toLowerCase())).slice(0,5);if(V.length>0){Ce(ge,`Phase 0.5: Searching issues in ${V.length} preferred org(s)...`);let ee=s-c.length,fe=V.map(ve=>`org:${ve}`).join(" OR "),ke=`${$e} (${fe})`;try{let ve=await jo(this.octokit,{q:ke,sort:"created",order:"desc",per_page:ee*3});if(ve.items.length>0){let de=we(ve.items).filter(Ye=>{let hr=Ye.repository_url.split("/").slice(-2).join("/");return!dt.has(hr)}),{candidates:Pe,allFailed:Re,rateLimitHit:Ae}=await this.vetter.vetIssuesParallel(de.slice(0,ee*2).map(Ye=>Ye.html_url),ee,"preferred_org");c.push(...Pe),Re&&(Et="All preferred org issue vetting failed"),Ae&&(d=!0),Ce(ge,`Found ${Pe.length} candidates from preferred orgs`)}}catch(ve){let de=P(ve);Et=de,Mt(ve)&&(d=!0),v(ge,`Error searching preferred orgs: ${de}`)}}}if(c.length<s&&S.length>0){let K=S.filter(V=>!dt.has(V));if(K.length>0){Ce(ge,`Phase 1: Searching issues in ${K.length} starred repos...`);let V=s-c.length;if(V>0){let{candidates:ee,allBatchesFailed:fe,rateLimitHit:ke}=await ba(this.octokit,this.vetter,K.slice(0,10),$e,V,"starred",we);c.push(...ee),fe&&(l="All starred repo batches failed"),ke&&(d=!0),Ce(ge,`Found ${ee.length} candidates from starred repos`)}}}let je=null;if(c.length<s){Ce(ge,"Phase 2: General issue search...");let K=s-c.length,V=new Set(c.map(de=>de.issue.repo)),ee=[];if(r&&r.length>1){for(let Re of r){let Ae=xo[Re]??[];if(Ae.length===0){v(ge,`Scope "${Re}" has no labels, skipping tier`);continue}ee.push({tier:Re,tierLabels:Ae})}let de=new Set(r.flatMap(Re=>xo[Re]??[])),Pe=n.labels.filter(Re=>!de.has(Re));Pe.length>0&&ee.push({tier:"custom",tierLabels:Pe})}else ee.push({tier:"general",tierLabels:i});let fe=Math.ceil(K/ee.length),ke=[];for(let{tier:de,tierLabels:Pe}of ee){let Re=`is:issue is:open ${ug(Pe)} ${Y} no:assignee`.replace(/ +/g," ").trim();try{let Ae=await jo(this.octokit,{q:Re,sort:"created",order:"desc",per_page:fe*3});Ce(ge,`Phase 2 [${de}]: ${Ae.total_count} total, processing top ${Ae.items.length}...`);let{candidates:Ye,allVetFailed:hr,rateLimitHit:Ga}=await lg(this.vetter,Ae.items,we,[dt,j,V],fe,a,`Phase 2 [${de}]`);ke.push(Ye);for(let Jo of Ye)V.add(Jo.issue.repo);hr&&(je=(je?je+"; ":"")+`${de}: all vetting failed`),Ga&&(d=!0),Ce(ge,`Found ${Ye.length} candidates from ${de} tier`)}catch(Ae){if(Ze(Ae)===401)throw Ae;let Ye=P(Ae);je=(je?je+"; ":"")+`${de}: ${Ye}`,Mt(Ae)&&(d=!0),v(ge,`Error in ${de} tier search: ${Ye}`),ke.push([])}}let ve=S$(ke);ve.length===0&&je&&v(ge,`All ${ee.length} scope tiers failed in Phase 2: ${je}`),c.push(...ve.slice(0,K))}let Xe=null;if(c.length<s){Ce(ge,"Phase 3: Searching actively maintained repos...");let K=s-c.length,V=new Date;V.setDate(V.getDate()-30);let ee=V.toISOString().split("T")[0],fe=c$(n.projectCategories??[]),ke=fe.length>0?`topic:${fe[0]}`:"",ve=`is:issue is:open no:assignee ${Y} ${ke} stars:>=${a} pushed:>=${ee} archived:false`.replace(/ +/g," ").trim();try{let de=await jo(this.octokit,{q:ve,sort:"updated",order:"desc",per_page:K*3});Ce(ge,`Found ${de.total_count} issues in maintained-repo search, processing top ${de.items.length}...`);let Pe=new Set(c.map(hr=>hr.issue.repo)),{candidates:Re,allVetFailed:Ae,rateLimitHit:Ye}=await lg(this.vetter,de.items,we,[dt,j,Pe],K,a,"Phase 3");c.push(...Re),Ae&&(Xe="all vetting failed"),Ye&&(d=!0),Ce(ge,`Found ${Re.length} candidates from maintained-repo search`)}catch(de){let Pe=P(de);Xe=Pe,Mt(de)&&(d=!0),v(ge,`Error in maintained-repo search: ${Pe}`)}}if(c.length===0){let K=[u?`Phase 0 (merged-PR repos): ${u}`:null,Et?`Phase 0.5 (preferred orgs): ${Et}`:null,l?`Phase 1 (starred repos): ${l}`:null,je?`Phase 2 (general): ${je}`:null,Xe?`Phase 3 (maintained repos): ${Xe}`:null].filter(Boolean),V=K.length>0?` ${K.join(". ")}.`:"";if(d)return this.rateLimitWarning=`Search returned no results due to GitHub API rate limits.${V} Try again after the rate limit resets.`,[];throw new _e(`No issue candidates found across all search phases.${V} Try adjusting your search criteria (languages, labels) or check your network connection.`)}return d&&(this.rateLimitWarning=`Search results may be incomplete: GitHub API rate limits were hit during search. Found ${c.length} candidate${c.length===1?"":"s"} but some search phases failed. Try again after the rate limit resets for complete results.`),c.sort((K,V)=>{let ee={merged_pr:0,preferred_org:1,starred:2,normal:3},fe=ee[K.searchPriority]-ee[V.searchPriority];if(fe!==0)return fe;let ke={approve:0,needs_review:1,skip:2},ve=ke[K.recommendation]-ke[V.recommendation];return ve!==0?ve:V.viabilityScore-K.viabilityScore}),ig(c,2).slice(0,s)}async vetIssue(t){return this.vetter.vetIssue(t)}saveSearchResults(t){let n=[...t].sort((a,c)=>c.viabilityScore-a.viabilityScore),o=yt(),r=E$.join(o,"found-issues.md"),i=new Date().toISOString(),s=`# Found Issues
86
86
 
87
87
  `;s+=`> Generated at: ${i}
@@ -5,14 +5,15 @@
5
5
  import type { Octokit } from '@octokit/rest';
6
6
  import { CIFailureCategory, ClassifiedCheck, CIStatusResult } from './types.js';
7
7
  /**
8
- * Classify a failing CI check as actionable, fork_limitation, auth_gate, or infrastructure (#81, #145).
8
+ * Classify a failing CI check as actionable, fork_limitation, auth_gate, or infrastructure (#81, #145, #743).
9
9
  * Default is 'actionable' — only known patterns get reclassified.
10
- * When conclusion is provided (cancelled, timed_out), the check is classified as infrastructure.
10
+ * Conclusion-based classification (cancelled, timed_out, action_required) takes precedence
11
+ * over name-based pattern matching.
11
12
  */
12
13
  export declare function classifyCICheck(name: string, description?: string, conclusion?: string): CIFailureCategory;
13
14
  /**
14
- * Classify all failing checks and return both the flat names array and classified array (#81, #145).
15
- * Accepts optional conclusion data to detect infrastructure failures.
15
+ * Classify all failing checks and return a ClassifiedCheck array (#81, #145, #743).
16
+ * Accepts optional conclusion data to detect infrastructure failures and auth gates.
16
17
  */
17
18
  export declare function classifyFailingChecks(failingCheckNames: string[], conclusions?: Map<string, string>): ClassifiedCheck[];
18
19
  /**
@@ -38,16 +38,21 @@ const INFRASTRUCTURE_PATTERNS = [
38
38
  /\bservice\s*unavailable/i,
39
39
  /\binfrastructure/i,
40
40
  /\bblacksmith\b/i,
41
+ /\breadthedocs\b/i,
41
42
  ];
42
43
  /**
43
- * Classify a failing CI check as actionable, fork_limitation, auth_gate, or infrastructure (#81, #145).
44
+ * Classify a failing CI check as actionable, fork_limitation, auth_gate, or infrastructure (#81, #145, #743).
44
45
  * Default is 'actionable' — only known patterns get reclassified.
45
- * When conclusion is provided (cancelled, timed_out), the check is classified as infrastructure.
46
+ * Conclusion-based classification (cancelled, timed_out, action_required) takes precedence
47
+ * over name-based pattern matching.
46
48
  */
47
49
  export function classifyCICheck(name, description, conclusion) {
48
50
  // Infrastructure: cancelled or timed_out jobs are transient failures (#145)
49
51
  if (conclusion === 'cancelled' || conclusion === 'timed_out')
50
52
  return 'infrastructure';
53
+ // Auth gate: action_required means the workflow needs external approval (e.g., fork PR or first-time contributor)
54
+ if (conclusion === 'action_required')
55
+ return 'auth_gate';
51
56
  const nameLower = name.toLowerCase();
52
57
  // Check name first (more reliable than description)
53
58
  if (AUTH_GATE_PATTERNS.some((p) => p.test(nameLower)))
@@ -69,8 +74,8 @@ export function classifyCICheck(name, description, conclusion) {
69
74
  return 'actionable';
70
75
  }
71
76
  /**
72
- * Classify all failing checks and return both the flat names array and classified array (#81, #145).
73
- * Accepts optional conclusion data to detect infrastructure failures.
77
+ * Classify all failing checks and return a ClassifiedCheck array (#81, #145, #743).
78
+ * Accepts optional conclusion data to detect infrastructure failures and auth gates.
74
79
  */
75
80
  export function classifyFailingChecks(failingCheckNames, conclusions) {
76
81
  return failingCheckNames.map((name) => {
@@ -93,14 +98,14 @@ export function analyzeCheckRuns(checkRuns) {
93
98
  const failingCheckNames = [];
94
99
  const failingCheckConclusions = new Map();
95
100
  for (const check of checkRuns) {
96
- if (check.conclusion === 'failure' || check.conclusion === 'cancelled' || check.conclusion === 'timed_out') {
101
+ if (check.conclusion === 'failure' ||
102
+ check.conclusion === 'cancelled' ||
103
+ check.conclusion === 'timed_out' ||
104
+ check.conclusion === 'action_required') {
97
105
  hasFailingChecks = true;
98
106
  failingCheckNames.push(check.name);
99
107
  failingCheckConclusions.set(check.name, check.conclusion);
100
108
  }
101
- else if (check.conclusion === 'action_required') {
102
- hasPendingChecks = true; // Maintainer approval gate, not a real failure
103
- }
104
109
  else if (check.status === 'in_progress' || check.status === 'queued') {
105
110
  hasPendingChecks = true;
106
111
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@oss-autopilot/core",
3
- "version": "0.60.0",
3
+ "version": "0.60.1",
4
4
  "description": "CLI and core library for managing open source contributions",
5
5
  "type": "module",
6
6
  "bin": {