@netlify/agent-runner-cli 1.48.1-alpha → 1.48.2-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin-local.js CHANGED
@@ -1,28 +1,28 @@
1
1
  #!/usr/bin/env node
2
- import P from"process";import zt from"path";import Zt from"fs";import un from"minimist";import{createRequire as sn}from"module";import{createTracerProvider as Qt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as et}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as er}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as tr}from"@netlify/otel";import{propagation as tt,context as rt,W3CTraceContextPropagator as rr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as nr}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ce=_("tracing"),nt=async(e,t,r)=>(await Qt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new et(new Pe),new et(new nr({url:r.exporterUrl}))],instrumentations:[new er({skipHeaders:!0})]}),r.traceparent?(tt.setGlobalPropagator(new rr),tt.extract(rt.active(),{traceparent:r.traceparent,isRemote:!0})):rt.active());function A(e,t,r){return Ce.log(`\u23F3 TRACE: ${t} starting...`),tr(e,t,r)}var Pe=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,s]of Object.entries(o))l.includes("duration")&&typeof s=="number"?n.push(`${l}=${s.toFixed(2)}ms`):n.push(`${l}=${s}`);let i=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ce.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ce.log(` \u274C Error: ${t.status.message}`)}};var or=["error","failed","exception","fatal","panic","abort","crash"];function ot(e){let t=e.split(`
3
- `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(or.some(h=>l.includes(h))){let h=Math.max(0,n-10,o+1),d=Math.min(t.length-1,n+20),c=[];for(let m=h;m<=d;m++)c.push(t[m]);r.push(c.join(`
2
+ import P from"process";import Zt from"path";import Qt from"fs";import cn from"minimist";import{createRequire as an}from"module";import{createTracerProvider as er}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as tt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as tr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as rr}from"@netlify/otel";import{propagation as rt,context as nt,W3CTraceContextPropagator as nr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as or}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ce=_("tracing"),ot=async(e,t,r)=>(await er({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new tt(new Pe),new tt(new or({url:r.exporterUrl}))],instrumentations:[new tr({skipHeaders:!0})]}),r.traceparent?(rt.setGlobalPropagator(new nr),rt.extract(nt.active(),{traceparent:r.traceparent,isRemote:!0})):nt.active());function A(e,t,r){return Ce.log(`\u23F3 TRACE: ${t} starting...`),rr(e,t,r)}var Pe=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,s]of Object.entries(o))l.includes("duration")&&typeof s=="number"?n.push(`${l}=${s.toFixed(2)}ms`):n.push(`${l}=${s}`);let i=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ce.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ce.log(` \u274C Error: ${t.status.message}`)}};var ir=["error","failed","exception","fatal","panic","abort","crash"];function it(e){let t=e.split(`
3
+ `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(ir.some(h=>l.includes(h))){let h=Math.max(0,n-10,o+1),d=Math.min(t.length-1,n+20),c=[];for(let m=h;m<=d;m++)c.push(t[m]);r.push(c.join(`
4
4
  `)),o=d,n=d+1}else n++}if(r.length===0)return e;let i=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
5
5
  ${a}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import ve from"process";import{getTracer as Yr}from"@netlify/otel";import ue from"process";var me=ue.env.NETLIFY_API_URL,he=ue.env.NETLIFY_API_TOKEN,Y=_("api"),ye=()=>ue.env.NETLIFY_LOCAL_MODE==="true",ce=async(e,t={})=>{if(!me||!he)throw new Error("No API URL or token");let r=new URL(e,me),o={...t,headers:{...t.headers,Authorization:`Bearer ${he}`}};ue.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ue.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((l,s)=>{Y.log(` ${s}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${l||"N/A"}`)}if(i||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i)throw a;return a},it=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(me=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(he=e.constants.NETLIFY_API_TOKEN)},st=()=>({apiUrl:me,token:he}),de=async(e,t)=>ye()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ce(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>ye()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var at=async(e,t)=>ye()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`),lt=(e,t,r)=>ce(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),ut=async(e,t)=>ye()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ce(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Oe=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var te=_("ai_gateway"),Fe=null;var ct=async()=>{if(Fe)return Fe;te.log("Fetching available AI gateway providers");let e=await fetch(`${st().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Fe=t,te.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},ir=async(e,t)=>{let o=(await ct()).providers[e];if(!o)return te.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return te.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},dt=async({netlify:e,config:t})=>{let r,o,n,i,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),te.log("Requesting AI gateway information");let s=await lt(a,t.id,t.sessionId);if({token:r,url:i}=s,o=s.expires_at?s.expires_at*1e3:void 0,te.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{l()},h))}};return await Promise.all([l(),ct()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:ir}};import K from"process";import Ee from"path";import $e from"fs";import{fileURLToPath as dr}from"url";import{execa as pr,execaCommand as kn}from"execa";import{Transform as sr}from"stream";var ar=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),lr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function ur(){return Object.entries(process.env).filter(([e,t])=>!(!t||ar.has(e)||lr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=ur();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(cr(o),"g");r=r.replace(n,"******")}),r}function cr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var re=class extends sr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=q(n);o(null,i)}};function pt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let a=typeof o=="string"?q(o):o;return typeof n=="function"?t(a,n):t(a,n,i)},process.stderr.write=function(o,n,i){let a=typeof o=="string"?q(o):o;return typeof n=="function"?r(a,n):r(a,n,i)}}var pe=null,ft=e=>(pe&&pe.destroy(),pe=new z({totalAllowedTime:e}),pe),gt=()=>pe;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,a=null;o!==void 0&&(a=new Promise((l,s)=>{i=setTimeout(()=>{s(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var fr=dr(import.meta.url),gr=Ee.dirname(fr),_e=_("shell"),De=new Set,mr={preferLocal:!0},C=(e,t,r)=>{let[o,n]=hr(t,r),i={...mr,...n},a=pr(e,o,i);return yr(a,i),_r(a),a};var hr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},yr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new re).pipe(K.stdout),e.stdout?.pipe(new re).pipe(K.stdout),e.stderr?.pipe(new re).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},mt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),_e.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return _e.error("Error killing process:",r),!1}},Er=e=>mt(e,"SIGKILL"),_r=e=>{De.add(e);let t=gt();if(t){let r=t.onTimesUp(()=>{_e.log(`Global timer expired, killing process ${e.pid}`),mt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(_e.log(`Force killing process ${e.pid} after timeout`),Er(e))},5e3)});e.on("exit",()=>{De.delete(e),r()}),e.on("error",()=>{De.delete(e),r()})}};function we(e,t){return!!J(e,t)}function J(e,t){if(K.env.NODE_PATH){let n=Ee.join(K.env.NODE_PATH,".bin",t);if($e.existsSync(n))return n}let r=Ee.join(e,"node_modules",".bin",t);if($e.existsSync(r))return r;let o=Ee.join(gr,"..","node_modules",".bin",t);if($e.existsSync(o))return o}var ht="netlify-agent-runner-context.md",ke="task-history",Le="netlify-context",U=".netlify",ne="results.md",Ue="assets";var wr=_("utils"),Ir=e=>new Promise(t=>{setTimeout(t,e)}),yt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,a=(...l)=>{if(r)return o=l,new Promise(d=>{n.push(d)});r=!0;let s,h=new Promise(d=>{s=d});return i=(async()=>{await Promise.resolve();let d=await e(...l);for(s(d);;){if(await Ir(t),!o)return r=!1,i=null,d;let c=o,m=n;o=null,n=[],d=await e(...c),m.forEach(y=>{y(d)})}})(),h};return a.flush=async()=>{if((r||o)&&i)return await i,a.flush()},a},Ie=(e,t,r=!1)=>{let o=null,n=null,i=null,a=function(...l){n=l,i=this;let s=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),s&&(e.apply(i,n),n=null,i=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,s=i;o=null,n=null,i=null,e.apply(s,l)}},a},Et=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):wr.error("Could not parse JSON",o))}},_t=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let s=60-l.length;if(s<=0)return"";if(s>=i.length+6){let h=Math.min(s-i.length,e.length);return`${i}${e.slice(0,h)}`}return e.slice(0,s)};import{Buffer as wt}from"buffer";import Tr from"path";var It=async({config:e})=>{let t=await vr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=Rr(o);await Sr(n);let i=await Ge(o);await Me(i);let a={stdio:["ignore","pipe","pipe"]},s=(await C("git",["diff","--staged"],a)).stdout;if(r=!!s,!r)return{hasChanges:!1,ignored:i};let d=(await C("git",["diff","--staged","--binary"],a)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await C("git",["commit","-m","Agent runner"]),c=(await C("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await C("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(m=wt.from(g).toString("base64"))}let y={hasChanges:!0,diff:s,resultDiff:c,ignored:i};return s!==d&&(y.diffBinary=wt.from(d).toString("base64")),m&&(y.resultDiffBinary=m),y},Me=async(e=[])=>{await C("git",["add",".",...e])},Tt=async()=>(await C("git",["status","-s"])).stdout,xt=/.. (.+)?\.log$/,xr=[xt],vr=async()=>{let e=await Tt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(o=>xr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},vt=async()=>{let{stdout:e}=await C("git",["rev-parse","HEAD"]);return e.trim()},Rt=async()=>{let{stdout:e}=await C("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Ge=async e=>{e||=await Tt();let t=[".netlify","node_modules"],r=[];return e.split(`
10
- `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Tr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${i}`)});let n=o.match(xt)?.[1];n&&r.push(`:!${n}.log`)}),r},St=async()=>{await C("git",["reset","--hard","HEAD"])},Rr=e=>{let t=e.split(`
11
- `).reduce((r,o)=>{if(!o)return r;let[n,i,,...a]=o,l=a.join(""),s=n.trim(),h=i.trim();return r[l]?r[l].change=h:r[l]={filePath:l,stage:s,change:h},r},{});return Object.values(t)},Sr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(C("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Nr from"fs/promises";import br from"os";import bt from"path";import Z from"process";import Cr from"readline";import je from"path";import Ar from"fs/promises";var Ye=_("agent-output-utils");async function oe({initialResult:e,agentName:t,hasError:r}){let o="",n=je.join(process.cwd(),U,ne);try{let i=await Ar.readFile(n,"utf-8");i&&(o=i,Ye.log(`Pulled result from ${je.relative(process.cwd(),n)}`))}catch{Ye.log(`No results file found at ${je.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ie({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Ye.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function se(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=_("runner_claude"),At="Claude Code",Te="claude-sonnet-4-5-20250929",Nt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Pr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Be({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i}){let a=e,{accountType:l,prompt:s,modelVersionOverrides:h}=a,{model:d}=a,c="";if(o){let{token:u,url:E}=o;if(!u||!E)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);d=p}}else if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Te)?(d=Te,M.log(`Using default model: ${Te}`)):M.log(`Default model ${Te} is not available, proceeding without model specification`);Z.env.ANTHROPIC_API_KEY=u,Z.env.ANTHROPIC_BASE_URL=E}else if(!Z.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let m=[],y=[],x={},I=0,g=0,v,R,N=[J(Z.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...d?["--model",d]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",s],O=`${Z.env.NVM_BIN}/node`;M.log(`Running ${O} ${N.join(" ")}`);let b=t.utils.run(O,N,{all:!0,env:Z.env});b.stdin?.end();let S=Ie(()=>{r?.({steps:m,duration:g})},250),w=(u,E)=>{let p={...u,id:I};I+=1,y.push(p),m.push(p),E||S.flush(),S(),E&&S.flush()},f=Cr.createInterface({input:b.all});return f.on("error",u=>{M.error("Readline interface error",{error:u.message,stack:u.stack})}),f.on("line",u=>{let E=null;try{E=JSON.parse(u)}catch{M.log("Could not parse line",u)}E?.session_id&&E.session_id!==c&&(c=E.session_id),Array.isArray(E?.message?.content)?E.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&w({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?w({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):M.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let T=p.input?.description&&`\`${p.input.description}\``;w({title:[Nt(p.name),T].filter(Boolean).join(" ")})}else p.id&&(x[p.id]=p);S.flush();break}case"tool_result":{let T=p.tool_use_id?x[p.tool_use_id]:void 0,le;if(T){let ee=T.input?.file_path&&bt.relative(Z.cwd(),T.input.file_path),F=ee&&`\`${ee}\``;le=[Nt(T.name||""),F].filter(Boolean).join(" ")}let Qe=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(T?.name||""),V;if(typeof p.content=="string")V=p.content;else if(Array.isArray(p.content)){let ee=[];p.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?ee.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?ee.push(`![](data:${F.source.media_type};base64,${F.source.data})`):M.log(`Unsupported image type ${F.source.type}`,F.source):M.log(`Unsupported block type ${F?.type}`)}),V=ee.join(`
8
+ `);return i.length>e.length*.8?e:i}import ve from"process";import{getTracer as Br}from"@netlify/otel";import ue from"process";var me=ue.env.NETLIFY_API_URL,he=ue.env.NETLIFY_API_TOKEN,Y=_("api"),ye=()=>ue.env.NETLIFY_LOCAL_MODE==="true",ce=async(e,t={})=>{if(!me||!he)throw new Error("No API URL or token");let r=new URL(e,me),o={...t,headers:{...t.headers,Authorization:`Bearer ${he}`}};ue.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ue.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((l,s)=>{Y.log(` ${s}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${l||"N/A"}`)}if(i||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i)throw a;return a},st=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(me=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(he=e.constants.NETLIFY_API_TOKEN)},at=()=>({apiUrl:me,token:he}),de=async(e,t)=>ye()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ce(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>ye()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var lt=async(e,t)=>ye()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`),ut=(e,t,r)=>ce(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),ct=async(e,t)=>ye()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ce(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Oe=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var te=_("ai_gateway"),Fe=null;var dt=async()=>{if(Fe)return Fe;te.log("Fetching available AI gateway providers");let e=await fetch(`${at().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Fe=t,te.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},sr=async(e,t)=>{let o=(await dt()).providers[e];if(!o)return te.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return te.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},pt=async({netlify:e,config:t})=>{let r,o,n,i,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),te.log("Requesting AI gateway information");let s=await ut(a,t.id,t.sessionId);if({token:r,url:i}=s,o=s.expires_at?s.expires_at*1e3:void 0,te.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{l()},h))}};return await Promise.all([l(),dt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:sr}};import K from"process";import Ee from"path";import $e from"fs";import{fileURLToPath as pr}from"url";import{execa as fr,execaCommand as kn}from"execa";import{Transform as ar}from"stream";var lr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),ur=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function cr(){return Object.entries(process.env).filter(([e,t])=>!(!t||lr.has(e)||ur.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=cr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(dr(o),"g");r=r.replace(n,"******")}),r}function dr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var re=class extends ar{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=q(n);o(null,i)}};function ft(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let a=typeof o=="string"?q(o):o;return typeof n=="function"?t(a,n):t(a,n,i)},process.stderr.write=function(o,n,i){let a=typeof o=="string"?q(o):o;return typeof n=="function"?r(a,n):r(a,n,i)}}var pe=null,gt=e=>(pe&&pe.destroy(),pe=new z({totalAllowedTime:e}),pe),mt=()=>pe;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,a=null;o!==void 0&&(a=new Promise((l,s)=>{i=setTimeout(()=>{s(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var gr=pr(import.meta.url),mr=Ee.dirname(gr),_e=_("shell"),De=new Set,hr={preferLocal:!0},C=(e,t,r)=>{let[o,n]=yr(t,r),i={...hr,...n},a=fr(e,o,i);return Er(a,i),wr(a),a};var yr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Er=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new re).pipe(K.stdout),e.stdout?.pipe(new re).pipe(K.stdout),e.stderr?.pipe(new re).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},ht=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),_e.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return _e.error("Error killing process:",r),!1}},_r=e=>ht(e,"SIGKILL"),wr=e=>{De.add(e);let t=mt();if(t){let r=t.onTimesUp(()=>{_e.log(`Global timer expired, killing process ${e.pid}`),ht(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(_e.log(`Force killing process ${e.pid} after timeout`),_r(e))},5e3)});e.on("exit",()=>{De.delete(e),r()}),e.on("error",()=>{De.delete(e),r()})}};function we(e,t){return!!J(e,t)}function J(e,t){if(K.env.NODE_PATH){let n=Ee.join(K.env.NODE_PATH,".bin",t);if($e.existsSync(n))return n}let r=Ee.join(e,"node_modules",".bin",t);if($e.existsSync(r))return r;let o=Ee.join(mr,"..","node_modules",".bin",t);if($e.existsSync(o))return o}var yt="netlify-agent-runner-context.md",Le="task-history",ke="netlify-context",U=".netlify",ne="results.md",Ue="assets";var Ir=_("utils"),Tr=e=>new Promise(t=>{setTimeout(t,e)}),Et=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,a=(...l)=>{if(r)return o=l,new Promise(d=>{n.push(d)});r=!0;let s,h=new Promise(d=>{s=d});return i=(async()=>{await Promise.resolve();let d=await e(...l);for(s(d);;){if(await Tr(t),!o)return r=!1,i=null,d;let c=o,m=n;o=null,n=[],d=await e(...c),m.forEach(y=>{y(d)})}})(),h};return a.flush=async()=>{if((r||o)&&i)return await i,a.flush()},a},Ie=(e,t,r=!1)=>{let o=null,n=null,i=null,a=function(...l){n=l,i=this;let s=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),s&&(e.apply(i,n),n=null,i=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,s=i;o=null,n=null,i=null,e.apply(s,l)}},a},_t=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Ir.error("Could not parse JSON",o))}},wt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let s=60-l.length;if(s<=0)return"";if(s>=i.length+6){let h=Math.min(s-i.length,e.length);return`${i}${e.slice(0,h)}`}return e.slice(0,s)};import{Buffer as It}from"buffer";import xr from"path";var Tt=_("repo"),xt=async({config:e})=>{Tt.info("Getting runner diffs");let t=await Rr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=Sr(o);await Ar(n),Tt.info("Changes after processing"),await Ge();let i=await je(o);await Me(i);let a={stdio:["ignore","pipe","pipe"]},s=(await C("git",["diff","--staged"],a)).stdout;if(r=!!s,!r)return{hasChanges:!1,ignored:i};let d=(await C("git",["diff","--staged","--binary"],a)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await C("git",["commit","-m","Agent runner"]),c=(await C("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await C("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(m=It.from(g).toString("base64"))}let y={hasChanges:!0,diff:s,resultDiff:c,ignored:i};return s!==d&&(y.diffBinary=It.from(d).toString("base64")),m&&(y.resultDiffBinary=m),y},Me=async(e=[])=>{await C("git",["add",".",...e])},Ge=async()=>(await C("git",["status","-s"])).stdout,vt=/.. (.+)?\.log$/,vr=[vt],Rr=async()=>{let e=await Ge();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(o=>vr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Rt=async()=>{let{stdout:e}=await C("git",["rev-parse","HEAD"]);return e.trim()},St=async()=>{let{stdout:e}=await C("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},je=async e=>{e||=await Ge();let t=[".netlify","node_modules"],r=[];return e.split(`
10
+ `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${xr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${i}`)});let n=o.match(vt)?.[1];n&&r.push(`:!${n}.log`)}),r},At=async()=>{await C("git",["reset","--hard","HEAD"])},Sr=e=>{let t=e.split(`
11
+ `).reduce((r,o)=>{if(!o)return r;let[n,i,,...a]=o,l=a.join(""),s=n.trim(),h=i.trim();return r[l]?r[l].change=h:r[l]={filePath:l,stage:s,change:h},r},{});return Object.values(t)},Ar=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(C("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import br from"fs/promises";import Cr from"os";import Ct from"path";import Z from"process";import Pr from"readline";import Ye from"path";import Nr from"fs/promises";var Be=_("agent-output-utils");async function oe({initialResult:e,agentName:t,hasError:r}){let o="",n=Ye.join(process.cwd(),U,ne);try{let i=await Nr.readFile(n,"utf-8");i&&(o=i,Be.log(`Pulled result from ${Ye.relative(process.cwd(),n)}`))}catch{Be.log(`No results file found at ${Ye.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ie({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Be.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function se(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=_("runner_claude"),Nt="Claude Code",Te="claude-sonnet-4-5-20250929",bt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Or=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function He({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i}){let a=e,{accountType:l,prompt:s,modelVersionOverrides:h}=a,{model:d}=a,c="";if(o){let{token:u,url:E}=o;if(!u||!E)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);d=p}}else if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Te)?(d=Te,M.log(`Using default model: ${Te}`)):M.log(`Default model ${Te} is not available, proceeding without model specification`);Z.env.ANTHROPIC_API_KEY=u,Z.env.ANTHROPIC_BASE_URL=E}else if(!Z.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let m=[],y=[],x={},I=0,g=0,v,R,N=[J(Z.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...d?["--model",d]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",s],O=`${Z.env.NVM_BIN}/node`;M.log(`Running ${O} ${N.join(" ")}`);let b=t.utils.run(O,N,{all:!0,env:Z.env});b.stdin?.end();let S=Ie(()=>{r?.({steps:m,duration:g})},250),w=(u,E)=>{let p={...u,id:I};I+=1,y.push(p),m.push(p),E||S.flush(),S(),E&&S.flush()},f=Pr.createInterface({input:b.all});return f.on("error",u=>{M.error("Readline interface error",{error:u.message,stack:u.stack})}),f.on("line",u=>{let E=null;try{E=JSON.parse(u)}catch{M.log("Could not parse line",u)}E?.session_id&&E.session_id!==c&&(c=E.session_id),Array.isArray(E?.message?.content)?E.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&w({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?w({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):M.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let T=p.input?.description&&`\`${p.input.description}\``;w({title:[bt(p.name),T].filter(Boolean).join(" ")})}else p.id&&(x[p.id]=p);S.flush();break}case"tool_result":{let T=p.tool_use_id?x[p.tool_use_id]:void 0,le;if(T){let ee=T.input?.file_path&&Ct.relative(Z.cwd(),T.input.file_path),F=ee&&`\`${ee}\``;le=[bt(T.name||""),F].filter(Boolean).join(" ")}let et=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(T?.name||""),V;if(typeof p.content=="string")V=p.content;else if(Array.isArray(p.content)){let ee=[];p.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?ee.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?ee.push(`![](data:${F.source.media_type};base64,${F.source.data})`):M.log(`Unsupported image type ${F.source.type}`,F.source):M.log(`Unsupported block type ${F?.type}`)}),V=ee.join(`
12
12
 
13
- `)}Qe&&V&&(V=`\`\`\`
13
+ `)}et&&V&&(V=`\`\`\`
14
14
  ${V.trim()}
15
- \`\`\``),w({title:le,message:V},!0);break}case"thinking":{p.thinking&&w({title:"Thinking",message:p.thinking},!0);break}default:M.log(`Message content type is not supported ${p.type}`,p)}}):E?.type==="result"&&(g=E.duration_ms||0,E.is_error?R=E.result:v=E.result,[y,m].forEach(p=>{p[p.length-1]?.message===v&&p.pop()}))}),await b.catch(u=>{({error:R,result:v}=Pr({catchError:u,runCmd:b,error:R,result:v,runnerName:"Claude"}))}),f.close(),S.flush(),{steps:y,duration:g,result:await oe({initialResult:v,agentName:At,hasError:!!R}),error:ie({error:R,agentName:At}),isRetryableError:se(R),agentSessionId:c}}var Ct=async()=>{let e=bt.join(br.homedir(),".claude");await Nr.rm(e,{recursive:!0,force:!0})};import xe from"fs/promises";import Ot from"os";import He from"path";import W from"process";import Or from"readline";var G=_("runner_codex"),Pt="Codex CLI",Fr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function qe({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:f,url:u}=n;if(!f||!u)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[i];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);s=E}}else if(s&&!await n.isModelAvailableForProvider("openai",s))throw new Error(`Model '${s}' is not available for openai provider`);W.env.OPENAI_API_KEY=f,W.env.OPENAI_BASE_URL=u}else if(!W.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],d=[],c={},m=0,y=0,x,I,g=`${W.env.NVM_BIN}/node`,v=He.join(Ot.homedir(),".codex"),R=He.join(v,"config.toml");try{await xe.mkdir(v,{recursive:!0});let f="";try{f=await xe.readFile(R,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
15
+ \`\`\``),w({title:le,message:V},!0);break}case"thinking":{p.thinking&&w({title:"Thinking",message:p.thinking},!0);break}default:M.log(`Message content type is not supported ${p.type}`,p)}}):E?.type==="result"&&(g=E.duration_ms||0,E.is_error?R=E.result:v=E.result,[y,m].forEach(p=>{p[p.length-1]?.message===v&&p.pop()}))}),await b.catch(u=>{({error:R,result:v}=Or({catchError:u,runCmd:b,error:R,result:v,runnerName:"Claude"}))}),f.close(),S.flush(),{steps:y,duration:g,result:await oe({initialResult:v,agentName:Nt,hasError:!!R}),error:ie({error:R,agentName:Nt}),isRetryableError:se(R),agentSessionId:c}}var Pt=async()=>{let e=Ct.join(Cr.homedir(),".claude");await br.rm(e,{recursive:!0,force:!0})};import xe from"fs/promises";import Ft from"os";import qe from"path";import W from"process";import Fr from"readline";var G=_("runner_codex"),Ot="Codex CLI",$r=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ke({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:f,url:u}=n;if(!f||!u)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[i];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);s=E}}else if(s&&!await n.isModelAvailableForProvider("openai",s))throw new Error(`Model '${s}' is not available for openai provider`);W.env.OPENAI_API_KEY=f,W.env.OPENAI_BASE_URL=u}else if(!W.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],d=[],c={},m=0,y=0,x,I,g=`${W.env.NVM_BIN}/node`,v=qe.join(Ft.homedir(),".codex"),R=qe.join(v,"config.toml");try{await xe.mkdir(v,{recursive:!0});let f="";try{f=await xe.readFile(R,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
16
16
  web_search = true`):f+=`
17
17
  [tools]
18
18
  web_search = true
19
- `,await xe.writeFile(R,f,"utf-8"),G.log("Updated Codex config with web_search enabled"))}catch(f){G.warn("Failed to update Codex config",{error:f.message})}let N=[J(W.cwd(),"codex"),"login","--with-api-key"];G.log(`Running ${g} ${N.join(" ")}`);let O=t.utils.run(g,N,{input:W.env.OPENAI_API_KEY,env:{...W.env}});try{await O,G.log("Successfully logged in to Codex")}catch(f){throw G.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let b=[J(W.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...s?["--model",s]:[],a].filter(Boolean);G.log(`Running ${g} ${b.join(" ")}`);let S=t.utils.run(g,b,{all:!0,env:{...W.env}}),w=Or.createInterface({input:S.all});return w.on("error",f=>{G.error("Readline interface error",{error:f.message,stack:f.stack})}),w.on("line",f=>{let u=null;try{u=JSON.parse(f)}catch{G.log("Could not parse line",f);return}let E=[],p=!1;if(u?.duration_ms&&(y=u.duration_ms,p=!0),u?.type==="local_shell_call")c[u.call_id]=u;else if(u?.type==="local_shell_call_output"){let T=Dr(c[u.call_id],u);T&&(T.id=m,m+=1,d.push(T),h.push(T),E.push(T),p=!0)}else u?.type==="message"&&u.role==="assistant"?x=u.content.map(T=>T.text).join(`
19
+ `,await xe.writeFile(R,f,"utf-8"),G.log("Updated Codex config with web_search enabled"))}catch(f){G.warn("Failed to update Codex config",{error:f.message})}let N=[J(W.cwd(),"codex"),"login","--with-api-key"];G.log(`Running ${g} ${N.join(" ")}`);let O=t.utils.run(g,N,{input:W.env.OPENAI_API_KEY,env:{...W.env}});try{await O,G.log("Successfully logged in to Codex")}catch(f){throw G.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let b=[J(W.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...s?["--model",s]:[],a].filter(Boolean);G.log(`Running ${g} ${b.join(" ")}`);let S=t.utils.run(g,b,{all:!0,env:{...W.env}}),w=Fr.createInterface({input:S.all});return w.on("error",f=>{G.error("Readline interface error",{error:f.message,stack:f.stack})}),w.on("line",f=>{let u=null;try{u=JSON.parse(f)}catch{G.log("Could not parse line",f);return}let E=[],p=!1;if(u?.duration_ms&&(y=u.duration_ms,p=!0),u?.type==="local_shell_call")c[u.call_id]=u;else if(u?.type==="local_shell_call_output"){let T=Lr(c[u.call_id],u);T&&(T.id=m,m+=1,d.push(T),h.push(T),E.push(T),p=!0)}else u?.type==="message"&&u.role==="assistant"?x=u.content.map(T=>T.text).join(`
20
20
  `):u?.type==="message"&&u.role==="system"&&(I=u.content.map(T=>T.text).join(`
21
- `));p&&(r?.({steps:h,duration:y}),o?.({steps:E,duration:y}))}),await S.catch(f=>{let u=Fr({catchError:f,runCmd:S,error:I,result:x,runnerName:"Codex"});I=u.error,x=u.result}),w.close(),{steps:d,duration:y,result:await oe({initialResult:x,agentName:Pt,hasError:!!I}),error:ie({error:I,agentName:Pt}),isRetryableError:se(I)}}var Ft=async()=>{let e=He.join(Ot.homedir(),".codex");await xe.rm(e,{recursive:!0,force:!0})},$r=new Set(["bash","-lc"]),Dr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!$r.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
21
+ `));p&&(r?.({steps:h,duration:y}),o?.({steps:E,duration:y}))}),await S.catch(f=>{let u=$r({catchError:f,runCmd:S,error:I,result:x,runnerName:"Codex"});I=u.error,x=u.result}),w.close(),{steps:d,duration:y,result:await oe({initialResult:x,agentName:Ot,hasError:!!I}),error:ie({error:I,agentName:Ot}),isRetryableError:se(I)}}var $t=async()=>{let e=qe.join(Ft.homedir(),".codex");await xe.rm(e,{recursive:!0,force:!0})},Dr=new Set(["bash","-lc"]),Lr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Dr.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
22
22
  ${n.trim()}
23
- \`\`\``)}catch(i){G.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import kr from"fs/promises";import Lr from"os";import Dt from"path";import Q from"process";import Ur from"readline";var ae=_("runner_gemini"),$t="Gemini CLI",Mr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ae.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ae.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ae.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Gr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Ke({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:w,url:f}=n;if(!w||!f)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let u=l?.gemini?.[i];if(u){if(!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model override '${u}' is not available for gemini provider`);s=u}}else if(s&&!await n.isModelAvailableForProvider("gemini",s))throw new Error(`Model '${s}' is not available for gemini provider`);Q.env.GEMINI_API_KEY=w,Q.env.GOOGLE_GEMINI_BASE_URL=f}else if(!Q.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],d=[],c=[],m={},y=0,x=0,I,g,v=[J(Q.cwd(),"gemini"),...s?["--model",s]:[],"--yolo","-p",a],R=`${Q.env.NVM_BIN}/node`;ae.log(`Running ${R} ${v.join(" ")}`);let N=t.utils.run(R,v,{all:!0,env:Q.env});N.stdin?.end();let O=Ie(()=>{r?.({steps:h,duration:x}),o?.({steps:d,duration:x}),d=[]},250),b=(w,f)=>{w.id=y,y+=1,c.push(w),h.push(w),d.push(w),f||O.flush(),O(),f&&O.flush()},S=Ur.createInterface({input:N.all});return S.on("error",w=>{ae.error("Readline interface error",{error:w.message,stack:w.stack})}),S.on("line",w=>{let f=null;try{if(w.startsWith("[API Error")){let u=w.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:Et(u,!1)?.error?.message||u||"Gemini encountered error"}}else f=JSON.parse(w)}catch{return}if(f)switch(f.type){case"thought":{let u=f.value;b({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{f.value&&b({message:f.value});break}case"tool_call_request":{let u=f.value,E=Gr[u.name]??u.name,p=u.args?.path||u.args?.absolute_path,T=p&&Dt.relative(Q.cwd(),p),le=u.args?.command,V={title:[E,T&&`\`${T}\``,le&&`\`${le}\``].filter(Boolean).join(" ")};m[u.callId]=V,O.flush();break}case"tool_result":{let u=f.value,E=m[u.callId];if(E){let p=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(T=>typeof T=="string"&&T);p&&(E.message=`\`\`\`
23
+ \`\`\``)}catch(i){G.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import kr from"fs/promises";import Ur from"os";import Lt from"path";import Q from"process";import Mr from"readline";var ae=_("runner_gemini"),Dt="Gemini CLI",Gr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ae.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ae.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ae.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),jr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function We({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:w,url:f}=n;if(!w||!f)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let u=l?.gemini?.[i];if(u){if(!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model override '${u}' is not available for gemini provider`);s=u}}else if(s&&!await n.isModelAvailableForProvider("gemini",s))throw new Error(`Model '${s}' is not available for gemini provider`);Q.env.GEMINI_API_KEY=w,Q.env.GOOGLE_GEMINI_BASE_URL=f}else if(!Q.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],d=[],c=[],m={},y=0,x=0,I,g,v=[J(Q.cwd(),"gemini"),...s?["--model",s]:[],"--yolo","-p",a],R=`${Q.env.NVM_BIN}/node`;ae.log(`Running ${R} ${v.join(" ")}`);let N=t.utils.run(R,v,{all:!0,env:Q.env});N.stdin?.end();let O=Ie(()=>{r?.({steps:h,duration:x}),o?.({steps:d,duration:x}),d=[]},250),b=(w,f)=>{w.id=y,y+=1,c.push(w),h.push(w),d.push(w),f||O.flush(),O(),f&&O.flush()},S=Mr.createInterface({input:N.all});return S.on("error",w=>{ae.error("Readline interface error",{error:w.message,stack:w.stack})}),S.on("line",w=>{let f=null;try{if(w.startsWith("[API Error")){let u=w.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:_t(u,!1)?.error?.message||u||"Gemini encountered error"}}else f=JSON.parse(w)}catch{return}if(f)switch(f.type){case"thought":{let u=f.value;b({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{f.value&&b({message:f.value});break}case"tool_call_request":{let u=f.value,E=jr[u.name]??u.name,p=u.args?.path||u.args?.absolute_path,T=p&&Lt.relative(Q.cwd(),p),le=u.args?.command,V={title:[E,T&&`\`${T}\``,le&&`\`${le}\``].filter(Boolean).join(" ")};m[u.callId]=V,O.flush();break}case"tool_result":{let u=f.value,E=m[u.callId];if(E){let p=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(T=>typeof T=="string"&&T);p&&(E.message=`\`\`\`
24
24
  ${p.trim()}
25
- \`\`\``),b(E,!0)}break}case"result":{x=f.duration_ms,I=f.value,[c,h,d].forEach(u=>{u[u.length-1]?.message===I&&u.pop()});break}case"error":{g=f.value;break}case"finished":break;default:{ae.warn("Unhandled message type:",f.type);break}}}),await N.catch(w=>{({error:g,result:I}=Mr({catchError:w,runCmd:N,error:g,result:I,runnerName:"Gemini"}))}),S.close(),O.flush(),{steps:c,duration:x,result:await oe({initialResult:I,agentName:$t,hasError:!!g}),error:ie({error:g,agentName:$t}),isRetryableError:se(g)}}var kt=async()=>{let e=Dt.join(Lr.homedir(),".gemini");await kr.rm(e,{recursive:!0,force:!0})};var jr={codex:{runner:qe,clean:Ft},claude:{runner:Be,clean:Ct},gemini:{runner:Ke,clean:kt}},Lt=jr;var Ut=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Yr(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Lt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Br({apiToken:r});it(l);let s=e.useGateway?await dt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!s}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=yt(({steps:y=[],duration:x})=>{let I=y.map(g=>({...g,title:g.title?q(g.title):void 0,message:g.message?q(g.message):void 0}));return y.length=0,H(e.id,e.sessionId,{steps:I,duration:x})},t),d=await Ge();await Me(d);let c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await vt(),await de(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await Rt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let m=performance.now()-i;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":m,"init.status":"success"}),{aiGateway:s,context:l,persistSteps:h,runner:a,sha:c}}),Br=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ve.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ve.env.NETLIFY_API_TOKEN,SITE_ID:ve.env.SITE_ID,FUNCTIONS_DIST:ve.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:C}});import{getTracer as We}from"@netlify/otel";import Hr from"crypto";import X from"fs/promises";import k from"path";import j from"process";var D=_("context"),qr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Kr=10,Wr=async e=>{let{name:t,ext:r}=k.parse(e),o=e,n=k.join(j.cwd(),U,o),i=0;for(;await Vr(n);){if(i>=Kr)throw new Error("Failed to generate context file");o=`${t}-${Hr.randomUUID().slice(0,5)}${r}`,n=k.join(j.cwd(),U,o),i+=1}return o},Vr=async e=>{try{return await X.access(e),!0}catch{return!1}},Jr=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},Xr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await X.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},Re=null,zr=async()=>{if(Re)return Re;let e=await Jr();if(!e)return[];let t=k.join(j.cwd(),U,Le);await X.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=k.join(t,a),s=k.join(U,Le,a);return D.log(`Downloading ${i.scope} context...`),await Xr(i.endpoint,l)?(D.log(`Downloaded: ${s}`),{scope:i.scope,path:s,key:n}):null});return Re=(await Promise.all(r)).filter(n=>n!==null),Re},Mt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=qr(t),i=await Wr(ht),a=k.join(j.cwd(),U);await X.mkdir(a,{recursive:!0});let l=k.join(U,i),s=k.join(j.cwd(),l),h=k.join(j.cwd(),U,ne);try{await X.unlink(h),D.log(`Deleted old results file: ${h}`)}catch{}let d=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
25
+ \`\`\``),b(E,!0)}break}case"result":{x=f.duration_ms,I=f.value,[c,h,d].forEach(u=>{u[u.length-1]?.message===I&&u.pop()});break}case"error":{g=f.value;break}case"finished":break;default:{ae.warn("Unhandled message type:",f.type);break}}}),await N.catch(w=>{({error:g,result:I}=Gr({catchError:w,runCmd:N,error:g,result:I,runnerName:"Gemini"}))}),S.close(),O.flush(),{steps:c,duration:x,result:await oe({initialResult:I,agentName:Dt,hasError:!!g}),error:ie({error:g,agentName:Dt}),isRetryableError:se(g)}}var kt=async()=>{let e=Lt.join(Ur.homedir(),".gemini");await kr.rm(e,{recursive:!0,force:!0})};var Yr={codex:{runner:Ke,clean:$t},claude:{runner:He,clean:Pt},gemini:{runner:We,clean:kt}},Ut=Yr;var Mt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Br(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Ut[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Hr({apiToken:r});st(l);let s=e.useGateway?await pt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!s}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=Et(({steps:y=[],duration:x})=>{let I=y.map(g=>({...g,title:g.title?q(g.title):void 0,message:g.message?q(g.message):void 0}));return y.length=0,H(e.id,e.sessionId,{steps:I,duration:x})},t),d=await je();await Me(d);let c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await Rt(),await de(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await St(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let m=performance.now()-i;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":m,"init.status":"success"}),{aiGateway:s,context:l,persistSteps:h,runner:a,sha:c}}),Hr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ve.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ve.env.NETLIFY_API_TOKEN,SITE_ID:ve.env.SITE_ID,FUNCTIONS_DIST:ve.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:C}});import{getTracer as Ve}from"@netlify/otel";import qr from"crypto";import X from"fs/promises";import L from"path";import j from"process";var D=_("context"),Kr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Wr=10,Vr=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(j.cwd(),U,o),i=0;for(;await Jr(n);){if(i>=Wr)throw new Error("Failed to generate context file");o=`${t}-${qr.randomUUID().slice(0,5)}${r}`,n=L.join(j.cwd(),U,o),i+=1}return o},Jr=async e=>{try{return await X.access(e),!0}catch{return!1}},Xr=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},zr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await X.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},Re=null,Zr=async()=>{if(Re)return Re;let e=await Xr();if(!e)return[];let t=L.join(j.cwd(),U,ke);await X.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=L.join(t,a),s=L.join(U,ke,a);return D.log(`Downloading ${i.scope} context...`),await zr(i.endpoint,l)?(D.log(`Downloaded: ${s}`),{scope:i.scope,path:s,key:n}):null});return Re=(await Promise.all(r)).filter(n=>n!==null),Re},Gt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Kr(t),i=await Vr(yt),a=L.join(j.cwd(),U);await X.mkdir(a,{recursive:!0});let l=L.join(U,i),s=L.join(j.cwd(),l),h=L.join(j.cwd(),U,ne);try{await X.unlink(h),D.log(`Deleted old results file: ${h}`)}catch{}let d=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
26
26
  Your task is to analyze and fix the build errors.
27
27
  Don't apply techniques of reverting changes. Apply fixes related to errors.
28
28
  Don't try to run build by yourself. Just fix the errors.
@@ -35,7 +35,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
35
35
 
36
36
  `)}
37
37
  </project_rules>
38
- `);let m="";if(r.sessionHistoryContext?.length){let g=k.join(j.cwd(),U,ke);await X.mkdir(g,{recursive:!0});let v=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let O=N+1,b=`attempt-${O}.md`,S=k.join(g,b),w=k.join(U,ke,b),f=`# Task History - Attempt ${O}
38
+ `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(j.cwd(),U,Le);await X.mkdir(g,{recursive:!0});let v=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let O=N+1,b=`attempt-${O}.md`,S=L.join(g,b),w=L.join(U,Le,b),f=`# Task History - Attempt ${O}
39
39
 
40
40
  ## Request - what the user asked for
41
41
  ${R.request}
@@ -54,7 +54,7 @@ ${R.response}
54
54
  `)}
55
55
 
56
56
  </session_history_context>
57
- `}let y=await zr(),x="";y.length>0&&(x=`
57
+ `}let y=await Zr(),x="";y.length>0&&(x=`
58
58
  <netlify_features_context>
59
59
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
60
60
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
@@ -123,17 +123,17 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
123
123
  </request>
124
124
 
125
125
  Use the following file for the complete context of the ask, the environment, and what's available. ${s} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
126
- `),I};var Zr=_("prompt"),Gt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Mt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Zr.log("Contextful Prompt:",n),{prompt:n}};var Se=_("inference_stage"),jt=5,Ae=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:a,aiGateway:l,attempt:s,contextPrefix:h,priorAgentSessionId:d}=e;Se.log(`Running inference stage, attempt ${s} of ${jt}`);let c=await A(We(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":s||1}),pt();let{prompt:y}=await A(We(),"compose-prompt",async()=>await Gt({cliPath:t,config:r,buildErrorContext:Qr(n),netlify:o})),x=`
126
+ `),I};var Qr=_("prompt"),jt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Gt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Qr.log("Contextful Prompt:",n),{prompt:n}};var Se=_("inference_stage"),Yt=5,Ae=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:a,aiGateway:l,attempt:s,contextPrefix:h,priorAgentSessionId:d}=e;Se.log(`Running inference stage, attempt ${s} of ${Yt}`);let c=await A(Ve(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":s||1}),ft();let{prompt:y}=await A(Ve(),"compose-prompt",async()=>await jt({cliPath:t,config:r,buildErrorContext:en(n),netlify:o})),x=`
127
127
  ${h||""}
128
128
  ${y}
129
- `.trim(),I={...r,prompt:x},g=await A(We(),`run-${r.runner}`,async()=>await i({aiGateway:l,config:I,netlify:o,persistSteps:a,continueSession:!!(s&&s>1),priorAgentSessionId:d}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await a.flush(),g});if(c.error){if(Se.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:s||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!s||s<jt))return Se.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Ae({...e,attempt:(s||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Se.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},Qr=e=>!e||e.length===0?"":`
129
+ `.trim(),I={...r,prompt:x},g=await A(Ve(),`run-${r.runner}`,async()=>await i({aiGateway:l,config:I,netlify:o,persistSteps:a,continueSession:!!(s&&s>1),priorAgentSessionId:d}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await a.flush(),g});if(c.error){if(Se.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:s||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!s||s<Yt))return Se.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Ae({...e,attempt:(s||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Se.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},en=e=>!e||e.length===0?"":`
130
130
  Deploy failed failed. Here are the errors to review on the latest build:
131
131
 
132
132
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
133
133
 
134
134
  ${e.pop()}
135
- `;import rn from"process";import{getTracer as Ve}from"@netlify/otel";import{getTracer as en}from"@netlify/otel";var fe=_("deploy"),Yt=async e=>await A(en(),"create-preview-deploy",async t=>tn(e,t)),tn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:a},l)=>{try{let s=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(fe.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),n&&s.push("--alias",n),a&&s.push("--filter",a),r?(fe.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview");let h=i||"netlify";fe.log(`Running: ${h} ${s.join(" ")}`),l?.setAttributes({cmd:h,args:s});let d=await e.utils.run(h,s,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(d.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),fe.log(`
136
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(s){throw fe.error("Failed to create preview deploy via CLI:",s),l?.setAttributes({success:!1,error:s.message}),s}};var ge=_("deploy_stage"),Je=async e=>await A(Ve(),"run-deploy-stage",async()=>nn(e)),nn=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let i=await A(Ve(),"get-runner-diffs",async()=>await It({config:t,netlify:r}));if(ge.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:s,resultDiffBinary:h}=i,d=!0;ge.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:d,wouldCreatePreview:o!==void 0&&d});let c=null;if(o!==void 0&&d)try{let m;try{let y=await A(Ve(),"get-runner-session",async()=>await at(t.id,t.sessionId));y?.title&&(m=y.title)}catch(y){ge.warn("Failed to fetch session title, using fallback message:",y.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await Yt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:_t(t.id,rn.env.SITE_NAME),filter:n})}catch(m){return ge.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:d,previewInfo:null,diffBinary:s,resultDiffBinary:h,deployError:m instanceof Error?m.message:String(m)}}return ge.log("Git status",{hasDiff:!!a,hasChanges:d}),{diff:a,resultDiff:l,hasChanges:d,previewInfo:c,diffBinary:s,resultDiffBinary:h}};import{getTracer as Ne}from"@netlify/otel";async function Bt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(i=l,a===r)throw i;n&&n(a,i),await new Promise(s=>setTimeout(s,o*a))}throw i}var L=_("cleanup_stage"),Ht=async e=>await A(Ne(),"cleanup-stage",async()=>on(e)),Xe=1024*1024*10,on=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:a,previewInfo:l})=>{let s={result:r||"Done",duration:o};if(l&&l.deployId&&(s.deploy_id=l.deployId),l&&l.sourceZipFilename&&(s.result_zip_file_name=l.sourceZipFilename),t||i||n||a)try{L.log("Getting pre-signed URLs for diff upload");let d=await ut(e.id,e.sessionId),c=[];(t||i)&&c.push(Oe(d.result.upload_url,i||t).then(()=>{s.result_diff_s3_key=d.result.s3_key,L.log("Successfully uploaded result_diff to S3")})),(n||a)&&c.push(Oe(d.cumulative.upload_url,a||n).then(()=>{s.cumulative_diff_s3_key=d.cumulative.s3_key,L.log("Successfully uploaded cumulative_diff to S3")})),L.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||a)&&(L.log("Updating agent runner with cumulative diff S3 key"),await A(Ne(),"update-runner",async()=>{await de(e.id,{result_diff_s3_key:d.cumulative.s3_key})}))}catch(d){L.error("S3 upload failed, falling back to inline diffs:",d);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(a||n||"");if(c>Xe||m>Xe){let y=`Diffs exceed maximum inline size of ${Xe} bytes.`;throw L.error(y),new Error(y)}s.result_diff=t,s.result_diff_binary=i,(n||a)&&(s.cumulative_diff=n,s.cumulative_diff_binary=a,L.log("Updating agent runner with inline diffs (fallback)"),await A(Ne(),"update-runner",async()=>{await de(e.id,{result_diff:n,result_diff_binary:a})}))}else L.log("No diffs to upload");return L.log("Updated agent runner with result"),await Bt(async()=>await A(Ne(),"update-runner-session",()=>H(e.id,e.sessionId,s)),{maxRetries:3,baseDelay:1e3,onRetry:(d,c)=>{L.error(`Error updating agent runner session (attempt ${d}):`,c),L.log("Retrying...")}}),L.log("Finished updating agent runner with result"),{sessionUpdate:s}};import{getTracer as qt,shutdownTracers as an,withActiveSpan as Kt}from"@netlify/otel";var ln=sn(import.meta.url),Wt=ln("../package.json"),Vt=_("pipeline_index"),be=3,Jt=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:i,tracing:a={}})=>{let l,{withStageTimer:s}=ft(z.timeUnits.hours(4)),h=await nt(Wt.version,e.id,a);try{await Kt(qt(),"run-pipeline",{},h,async()=>{let d,{aiGateway:c,context:m,persistSteps:y,runner:x,sha:I}=await s("init",()=>Ut({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:i,runnerVersion:Wt.version}),z.timeUnits.minutes(10));l=x.clean,e.sha=I;let{runnerResult:g}=await s("inference",()=>Ae({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let v=await s("deploy",()=>Je({cliPath:r,config:e,context:m,result:g.result,filter:i})),R=g,N=[];if(v.hasChanges&&v.deployError){N.push(ot(v.deployError));let u=1;for(;u<=be&&!v.previewInfo;)Vt.log(`Deploy attempt had errors. Retrying. ${u}/${be}`),await Kt(qt(),"deploy-stage",async E=>{E?.setAttributes({"stage.attempt":u});let{runnerResult:p}=await s(`inference-retry-${u}`,()=>Ae({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c,buildErrors:N,priorAgentSessionId:g.agentSessionId}));R={...p,steps:[...R.steps||[],...p.steps||[]],duration:(R.duration||0)+(p.duration||0)},v=await s(`deploy-retry-${u}`,()=>Je({cliPath:r,config:e,context:m,result:p.result,filter:i})),v.deployError&&N.push(v.deployError),u++});u>be&&!v.previewInfo&&(d=new Error(`Deploy validation failed after ${be} attempts`))}let{diff:O,resultDiff:b,previewInfo:S,diffBinary:w,resultDiffBinary:f}=v;if(await s("cleanup",()=>Ht({config:e,diff:O,result:R.result,duration:R.duration,resultDiff:b,diffBinary:w,resultDiffBinary:f,previewInfo:S}),z.timeUnits.minutes(10)),d)throw d;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await St())})}catch(d){Vt.error("Got error while running pipeline",d),await l?.();let c=d instanceof Error&&d.message;throw await H(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),d}finally{await an()}};import Xt from"crypto";var $=_("bin_local"),B=un(P.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),Ze=()=>{console.log(`
135
+ `;import nn from"process";import{getTracer as Je}from"@netlify/otel";import{getTracer as tn}from"@netlify/otel";var fe=_("deploy"),Bt=async e=>await A(tn(),"create-preview-deploy",async t=>rn(e,t)),rn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:a},l)=>{try{let s=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(fe.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),n&&s.push("--alias",n),a&&s.push("--filter",a),r?(fe.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview");let h=i||"netlify";fe.log(`Running: ${h} ${s.join(" ")}`),l?.setAttributes({cmd:h,args:s});let d=await e.utils.run(h,s,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(d.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),fe.log(`
136
+ Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(s){throw fe.error("Failed to create preview deploy via CLI:",s),l?.setAttributes({success:!1,error:s.message}),s}};var ge=_("deploy_stage"),Xe=async e=>await A(Je(),"run-deploy-stage",async()=>on(e)),on=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let i=await A(Je(),"get-runner-diffs",async()=>await xt({config:t,netlify:r}));if(ge.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:s,resultDiffBinary:h}=i,d=!0;ge.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:d,wouldCreatePreview:o!==void 0&&d});let c=null;if(o!==void 0&&d)try{let m;try{let y=await A(Je(),"get-runner-session",async()=>await lt(t.id,t.sessionId));y?.title&&(m=y.title)}catch(y){ge.warn("Failed to fetch session title, using fallback message:",y.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await Bt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:wt(t.id,nn.env.SITE_NAME),filter:n})}catch(m){return ge.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:d,previewInfo:null,diffBinary:s,resultDiffBinary:h,deployError:m instanceof Error?m.message:String(m)}}return ge.log("Git status",{hasDiff:!!a,hasChanges:d}),{diff:a,resultDiff:l,hasChanges:d,previewInfo:c,diffBinary:s,resultDiffBinary:h}};import{getTracer as Ne}from"@netlify/otel";async function Ht(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(i=l,a===r)throw i;n&&n(a,i),await new Promise(s=>setTimeout(s,o*a))}throw i}var k=_("cleanup_stage"),qt=async e=>await A(Ne(),"cleanup-stage",async()=>sn(e)),ze=1024*1024*10,sn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:a,previewInfo:l})=>{let s={result:r||"Done",duration:o};if(l&&l.deployId&&(s.deploy_id=l.deployId),l&&l.sourceZipFilename&&(s.result_zip_file_name=l.sourceZipFilename),t||i||n||a)try{k.log("Getting pre-signed URLs for diff upload");let d=await ct(e.id,e.sessionId),c=[];(t||i)&&c.push(Oe(d.result.upload_url,i||t).then(()=>{s.result_diff_s3_key=d.result.s3_key,k.log("Successfully uploaded result_diff to S3")})),(n||a)&&c.push(Oe(d.cumulative.upload_url,a||n).then(()=>{s.cumulative_diff_s3_key=d.cumulative.s3_key,k.log("Successfully uploaded cumulative_diff to S3")})),k.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||a)&&(k.log("Updating agent runner with cumulative diff S3 key"),await A(Ne(),"update-runner",async()=>{await de(e.id,{result_diff_s3_key:d.cumulative.s3_key})}))}catch(d){k.error("S3 upload failed, falling back to inline diffs:",d);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(a||n||"");if(c>ze||m>ze){let y=`Diffs exceed maximum inline size of ${ze} bytes.`;throw k.error(y),new Error(y)}s.result_diff=t,s.result_diff_binary=i,(n||a)&&(s.cumulative_diff=n,s.cumulative_diff_binary=a,k.log("Updating agent runner with inline diffs (fallback)"),await A(Ne(),"update-runner",async()=>{await de(e.id,{result_diff:n,result_diff_binary:a})}))}else k.log("No diffs to upload");return k.log("Updated agent runner with result"),await Ht(async()=>await A(Ne(),"update-runner-session",()=>H(e.id,e.sessionId,s)),{maxRetries:3,baseDelay:1e3,onRetry:(d,c)=>{k.error(`Error updating agent runner session (attempt ${d}):`,c),k.log("Retrying...")}}),k.log("Finished updating agent runner with result"),{sessionUpdate:s}};import{getTracer as Kt,shutdownTracers as ln,withActiveSpan as Wt}from"@netlify/otel";var un=an(import.meta.url),Vt=un("../package.json"),Jt=_("pipeline_index"),be=3,Xt=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:i,tracing:a={}})=>{let l,{withStageTimer:s}=gt(z.timeUnits.hours(4)),h=await ot(Vt.version,e.id,a);try{await Wt(Kt(),"run-pipeline",{},h,async()=>{let d,{aiGateway:c,context:m,persistSteps:y,runner:x,sha:I}=await s("init",()=>Mt({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:i,runnerVersion:Vt.version}),z.timeUnits.minutes(10));l=x.clean,e.sha=I;let{runnerResult:g}=await s("inference",()=>Ae({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let v=await s("deploy",()=>Xe({cliPath:r,config:e,context:m,result:g.result,filter:i})),R=g,N=[];if(v.hasChanges&&v.deployError){N.push(it(v.deployError));let u=1;for(;u<=be&&!v.previewInfo;)Jt.log(`Deploy attempt had errors. Retrying. ${u}/${be}`),await Wt(Kt(),"deploy-stage",async E=>{E?.setAttributes({"stage.attempt":u});let{runnerResult:p}=await s(`inference-retry-${u}`,()=>Ae({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c,buildErrors:N,priorAgentSessionId:g.agentSessionId}));R={...p,steps:[...R.steps||[],...p.steps||[]],duration:(R.duration||0)+(p.duration||0)},v=await s(`deploy-retry-${u}`,()=>Xe({cliPath:r,config:e,context:m,result:p.result,filter:i})),v.deployError&&N.push(v.deployError),u++});u>be&&!v.previewInfo&&(d=new Error(`Deploy validation failed after ${be} attempts`))}let{diff:O,resultDiff:b,previewInfo:S,diffBinary:w,resultDiffBinary:f}=v;if(await s("cleanup",()=>qt({config:e,diff:O,result:R.result,duration:R.duration,resultDiff:b,diffBinary:w,resultDiffBinary:f,previewInfo:S}),z.timeUnits.minutes(10)),d)throw d;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await At())})}catch(d){Jt.error("Got error while running pipeline",d),await l?.();let c=d instanceof Error&&d.message;throw await H(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),d}finally{await ln()}};import zt from"crypto";var $=_("bin_local"),B=cn(P.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),Qe=()=>{console.log(`
137
137
  agent-runner-cli-local - Run Netlify agent runner locally without API connections
138
138
 
139
139
  USAGE:
@@ -165,6 +165,6 @@ NOTE:
165
165
  This local mode mocks all Netlify API calls. The agent will run through
166
166
  the full pipeline including inference and deployment, but API calls will
167
167
  be logged instead of executed.
168
- `)};B.help&&(Ze(),P.exit(0));B.prompt||($.error("Error: --prompt is required"),Ze(),P.exit(1));B["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),Ze(),P.exit(1));try{let e=B.cwd||P.cwd(),t=zt.join(e,".netlify","netlify-agent-runner-context*");Zt.rmSync(t,{recursive:!0,force:!0});let r;try{r=await cn(e)}catch(l){$.error(l.message),$.error(`
169
- To link this directory to a Netlify site, run:`),$.error(" netlify link"),P.exit(1)}let o=`local-${Xt.randomBytes(8).toString("hex")}`,n=`session-${Xt.randomBytes(8).toString("hex")}`,i=B.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let a={id:o,sessionId:n,prompt:B.prompt,runner:i,model:B.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};P.env.NETLIFY_LOCAL_MODE="true",P.env.NETLIFY_API_HOST="api.netlify.com",P.env.NETLIFY_API_TOKEN=B["netlify-api-token"],P.env.SITE_ID=r,P.env.NETLIFY_TEAM_ID="local-team-id",P.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",P.env.SITE_NAME="local-site",i==="claude"?we(e,"claude")||($.log("Claude CLI not found, installing..."),await ze(e,"@anthropic-ai/claude-code")):i==="gemini"?we(e,"gemini")||($.log("Gemini CLI not found, installing..."),await ze(e,"@google/gemini-cli@0.1.17")):i==="codex"?we(e,"codex")||($.log("Codex CLI not found, installing..."),await ze(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),P.exit(1)),await Jt({config:a,cwd:e,cliPath:B["cli-path"],filter:B.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),P.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),P.exit(1)}function ze(e,t){return new Promise((r,o)=>{C("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{$.log(`${t} installed: ${n}`),r()}).catch(n=>{$.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function cn(e){let t=zt.join(e,".netlify","state.json");try{let r=await Zt.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
168
+ `)};B.help&&(Qe(),P.exit(0));B.prompt||($.error("Error: --prompt is required"),Qe(),P.exit(1));B["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),Qe(),P.exit(1));try{let e=B.cwd||P.cwd(),t=Zt.join(e,".netlify","netlify-agent-runner-context*");Qt.rmSync(t,{recursive:!0,force:!0});let r;try{r=await dn(e)}catch(l){$.error(l.message),$.error(`
169
+ To link this directory to a Netlify site, run:`),$.error(" netlify link"),P.exit(1)}let o=`local-${zt.randomBytes(8).toString("hex")}`,n=`session-${zt.randomBytes(8).toString("hex")}`,i=B.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let a={id:o,sessionId:n,prompt:B.prompt,runner:i,model:B.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};P.env.NETLIFY_LOCAL_MODE="true",P.env.NETLIFY_API_HOST="api.netlify.com",P.env.NETLIFY_API_TOKEN=B["netlify-api-token"],P.env.SITE_ID=r,P.env.NETLIFY_TEAM_ID="local-team-id",P.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",P.env.SITE_NAME="local-site",i==="claude"?we(e,"claude")||($.log("Claude CLI not found, installing..."),await Ze(e,"@anthropic-ai/claude-code")):i==="gemini"?we(e,"gemini")||($.log("Gemini CLI not found, installing..."),await Ze(e,"@google/gemini-cli@0.1.17")):i==="codex"?we(e,"codex")||($.log("Codex CLI not found, installing..."),await Ze(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),P.exit(1)),await Xt({config:a,cwd:e,cliPath:B["cli-path"],filter:B.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),P.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),P.exit(1)}function Ze(e,t){return new Promise((r,o)=>{C("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{$.log(`${t} installed: ${n}`),r()}).catch(n=>{$.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function dn(e){let t=Zt.join(e,".netlify","state.json");try{let r=await Qt.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
170
170
  //# sourceMappingURL=bin-local.js.map
package/dist/bin.js CHANGED
@@ -1,28 +1,28 @@
1
1
  #!/usr/bin/env node
2
- import et from"process";import yn from"minimist";import{createRequire as cn}from"module";import{createTracerProvider as nr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as rt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as or}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as sr}from"@netlify/otel";import{propagation as nt,context as ot,W3CTraceContextPropagator as ir}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as ar}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ce=_("tracing"),st=async(e,t,r)=>(await nr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new rt(new be),new rt(new ar({url:r.exporterUrl}))],instrumentations:[new or({skipHeaders:!0})]}),r.traceparent?(nt.setGlobalPropagator(new ir),nt.extract(ot.active(),{traceparent:r.traceparent,isRemote:!0})):ot.active());function A(e,t,r){return Ce.log(`\u23F3 TRACE: ${t} starting...`),sr(e,t,r)}var be=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ce.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ce.log(` \u274C Error: ${t.status.message}`)}};var lr=["error","failed","exception","fatal","panic","abort","crash"];function it(e){let t=e.split(`
3
- `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(lr.some(h=>l.includes(h))){let h=Math.max(0,n-10,o+1),d=Math.min(t.length-1,n+20),u=[];for(let m=h;m<=d;m++)u.push(t[m]);r.push(u.join(`
2
+ import tt from"process";import _n from"minimist";import{createRequire as dn}from"module";import{createTracerProvider as or}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as nt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as sr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as ir}from"@netlify/otel";import{propagation as ot,context as st,W3CTraceContextPropagator as ar}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as lr}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ce=_("tracing"),it=async(e,t,r)=>(await or({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new nt(new be),new nt(new lr({url:r.exporterUrl}))],instrumentations:[new sr({skipHeaders:!0})]}),r.traceparent?(ot.setGlobalPropagator(new ar),ot.extract(st.active(),{traceparent:r.traceparent,isRemote:!0})):st.active());function A(e,t,r){return Ce.log(`\u23F3 TRACE: ${t} starting...`),ir(e,t,r)}var be=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ce.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ce.log(` \u274C Error: ${t.status.message}`)}};var ur=["error","failed","exception","fatal","panic","abort","crash"];function at(e){let t=e.split(`
3
+ `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(ur.some(h=>l.includes(h))){let h=Math.max(0,n-10,o+1),d=Math.min(t.length-1,n+20),u=[];for(let m=h;m<=d;m++)u.push(t[m]);r.push(u.join(`
4
4
  `)),o=d,n=d+1}else n++}if(r.length===0)return e;let s=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
5
5
  ${a}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return s.length>e.length*.8?e:s}import Re from"process";import{getTracer as qr}from"@netlify/otel";import le from"process";var ge=le.env.NETLIFY_API_URL,me=le.env.NETLIFY_API_TOKEN,j=_("api"),he=()=>le.env.NETLIFY_LOCAL_MODE==="true",ue=async(e,t={})=>{if(!ge||!me)throw new Error("No API URL or token");let r=new URL(e,ge),o={...t,headers:{...t.headers,Authorization:`Bearer ${me}`}};le.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(le.env.AGENT_RUNNERS_DEBUG==="true")j.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{j.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");j.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||j.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},at=e=>{j.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ge=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(me=e.constants.NETLIFY_API_TOKEN)},lt=()=>({apiUrl:ge,token:me}),ce=async(e,t)=>he()?(j.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ue(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),Y=async(e,t,r)=>he()?(j.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ue(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var ut=async(e,t)=>he()?(j.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ue(`/api/v1/agent_runners/${e}/sessions/${t}`),ct=(e,t,r)=>ue(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),dt=async(e,t)=>he()?(j.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ue(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Pe=async(e,t)=>{j.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ee=_("ai_gateway"),Oe=null;var pt=async()=>{if(Oe)return Oe;ee.log("Fetching available AI gateway providers");let e=await fetch(`${lt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Oe=t,ee.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},ur=async(e,t)=>{let o=(await pt()).providers[e];if(!o)return ee.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ee.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},ft=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),ee.log("Requesting AI gateway information");let i=await ct(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,ee.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{l()},h))}};return await Promise.all([l(),pt()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:ur}};import H from"process";import Ee from"path";import Fe from"fs";import{fileURLToPath as mr}from"url";import{execa as hr,execaCommand as Hn}from"execa";import{Transform as cr}from"stream";var dr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),pr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function fr(){return Object.entries(process.env).filter(([e,t])=>!(!t||dr.has(e)||pr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function B(e){if(typeof e!="string")return e;let t=fr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(gr(o),"g");r=r.replace(n,"******")}),r}function gr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var te=class extends cr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=B(n);o(null,s)}};function gt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?B(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?B(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var de=null,mt=e=>(de&&de.destroy(),de=new V({totalAllowedTime:e}),de),ht=()=>de;var V=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Er=mr(import.meta.url),yr=Ee.dirname(Er),ye=_("shell"),Le=new Set,_r={preferLocal:!0},O=(e,t,r)=>{let[o,n]=Tr(t,r),s={..._r,...n},a=hr(e,o,s);return wr(a,s),Rr(a),a};var Tr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},wr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new te).pipe(H.stdout),e.stdout?.pipe(new te).pipe(H.stdout),e.stderr?.pipe(new te).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Et=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),ye.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ye.error("Error killing process:",r),!1}},Ir=e=>Et(e,"SIGKILL"),Rr=e=>{Le.add(e);let t=ht();if(t){let r=t.onTimesUp(()=>{ye.log(`Global timer expired, killing process ${e.pid}`),Et(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ye.log(`Force killing process ${e.pid} after timeout`),Ir(e))},5e3)});e.on("exit",()=>{Le.delete(e),r()}),e.on("error",()=>{Le.delete(e),r()})}};function J(e,t){if(H.env.NODE_PATH){let n=Ee.join(H.env.NODE_PATH,".bin",t);if(Fe.existsSync(n))return n}let r=Ee.join(e,"node_modules",".bin",t);if(Fe.existsSync(r))return r;let o=Ee.join(yr,"..","node_modules",".bin",t);if(Fe.existsSync(o))return o}var yt="netlify-agent-runner-context.md",De="task-history",$e="netlify-context",k=".netlify",re="results.md",ke="assets",Ue="other",Ge="personal";var Me="enterprise",je="free",_t=[Ge,"pro",Me,je];var Tt=_("utils"),xr=e=>new Promise(t=>{setTimeout(t,e)}),wt=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(d=>{n.push(d)});r=!0;let i,h=new Promise(d=>{i=d});return s=(async()=>{await Promise.resolve();let d=await e(...l);for(i(d);;){if(await xr(t),!o)return r=!1,s=null,d;let u=o,m=n;o=null,n=[],d=await e(...u),m.forEach(E=>{E(d)})}})(),h};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},_e=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},Te=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Tt.error("Could not parse JSON",o))}},It=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let h=Math.min(i-s.length,e.length);return`${s}${e.slice(0,h)}`}return e.slice(0,i)},Nr=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!_t.some(t=>t in e),Rt=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,o])=>{if(o){let n=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let s=JSON.parse(o);Nr(s)&&(e[r]=s)}catch(s){let l=s instanceof SyntaxError?"Invalid JSON":s.message;Tt.error(`Could not parse ${r} model version override from ${n}: ${l}`)}}}),e};import{Buffer as xt}from"buffer";import vr from"path";var Nt=async({config:e})=>{let t=await Sr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=Cr(o);await br(n);let s=await Be(o);await Ye(s);let a={stdio:["ignore","pipe","pipe"]},i=(await O("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let d=(await O("git",["diff","--staged","--binary"],a)).stdout,u,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await O("git",["commit","-m","Agent runner"]),u=(await O("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await O("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;u!==g&&(m=xt.from(g).toString("base64"))}let E={hasChanges:!0,diff:i,resultDiff:u,ignored:s};return i!==d&&(E.diffBinary=xt.from(d).toString("base64")),m&&(E.resultDiffBinary=m),E},Ye=async(e=[])=>{await O("git",["add",".",...e])},vt=async()=>(await O("git",["status","-s"])).stdout,At=/.. (.+)?\.log$/,Ar=[At],Sr=async()=>{let e=await vt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(o=>Ar.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},St=async()=>{let{stdout:e}=await O("git",["rev-parse","HEAD"]);return e.trim()},Ct=async()=>{let{stdout:e}=await O("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Be=async e=>{e||=await vt();let t=[".netlify","node_modules"],r=[];return e.split(`
10
- `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${vr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${s}`)});let n=o.match(At)?.[1];n&&r.push(`:!${n}.log`)}),r},bt=async()=>{await O("git",["reset","--hard","HEAD"])},Cr=e=>{let t=e.split(`
11
- `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,l=a.join(""),i=n.trim(),h=s.trim();return r[l]?r[l].change=h:r[l]={filePath:l,stage:i,change:h},r},{});return Object.values(t)},br=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(O("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Or from"fs/promises";import Fr from"os";import Ft from"path";import X from"process";import Lr from"readline";import He from"path";import Pr from"fs/promises";var Ke=_("agent-output-utils");async function ne({initialResult:e,agentName:t,hasError:r}){let o="",n=He.join(process.cwd(),k,re);try{let s=await Pr.readFile(n,"utf-8");s&&(o=s,Ke.log(`Pulled result from ${He.relative(process.cwd(),n)}`))}catch{Ke.log(`No results file found at ${He.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function oe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Ke.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function se(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var U=_("runner_claude"),Pt="Claude Code",we="claude-sonnet-4-5-20250929",Ot=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Dr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function qe({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:l,prompt:i,modelVersionOverrides:h}=a,{model:d}=a,u="";if(o){let{token:c,url:y}=o;if(!c||!y)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);d=p}}else if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",we)?(d=we,U.log(`Using default model: ${we}`)):U.log(`Default model ${we} is not available, proceeding without model specification`);X.env.ANTHROPIC_API_KEY=c,X.env.ANTHROPIC_BASE_URL=y}else if(!X.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let m=[],E=[],I={},w=0,g=0,R,N,S=[J(X.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...d?["--model",d]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],b=`${X.env.NVM_BIN}/node`;U.log(`Running ${b} ${S.join(" ")}`);let C=t.utils.run(b,S,{all:!0,env:X.env});C.stdin?.end();let v=_e(()=>{r?.({steps:m,duration:g})},250),T=(c,y)=>{let p={...c,id:w};w+=1,E.push(p),m.push(p),y||v.flush(),v(),y&&v.flush()},f=Lr.createInterface({input:C.all});return f.on("error",c=>{U.error("Readline interface error",{error:c.message,stack:c.stack})}),f.on("line",c=>{let y=null;try{y=JSON.parse(c)}catch{U.log("Could not parse line",c)}y?.session_id&&y.session_id!==u&&(u=y.session_id),Array.isArray(y?.message?.content)?y.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&T({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?T({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):U.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let x=p.input?.description&&`\`${p.input.description}\``;T({title:[Ot(p.name),x].filter(Boolean).join(" ")})}else p.id&&(I[p.id]=p);v.flush();break}case"tool_result":{let x=p.tool_use_id?I[p.tool_use_id]:void 0,ae;if(x){let Q=x.input?.file_path&&Ft.relative(X.cwd(),x.input.file_path),P=Q&&`\`${Q}\``;ae=[Ot(x.name||""),P].filter(Boolean).join(" ")}let tt=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(x?.name||""),q;if(typeof p.content=="string")q=p.content;else if(Array.isArray(p.content)){let Q=[];p.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?Q.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?Q.push(`![](data:${P.source.media_type};base64,${P.source.data})`):U.log(`Unsupported image type ${P.source.type}`,P.source):U.log(`Unsupported block type ${P?.type}`)}),q=Q.join(`
8
+ `);return s.length>e.length*.8?e:s}import Re from"process";import{getTracer as Wr}from"@netlify/otel";import le from"process";var ge=le.env.NETLIFY_API_URL,me=le.env.NETLIFY_API_TOKEN,j=_("api"),he=()=>le.env.NETLIFY_LOCAL_MODE==="true",ue=async(e,t={})=>{if(!ge||!me)throw new Error("No API URL or token");let r=new URL(e,ge),o={...t,headers:{...t.headers,Authorization:`Bearer ${me}`}};le.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(le.env.AGENT_RUNNERS_DEBUG==="true")j.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{j.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");j.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||j.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},lt=e=>{j.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ge=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(me=e.constants.NETLIFY_API_TOKEN)},ut=()=>({apiUrl:ge,token:me}),ce=async(e,t)=>he()?(j.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ue(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),Y=async(e,t,r)=>he()?(j.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ue(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var ct=async(e,t)=>he()?(j.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ue(`/api/v1/agent_runners/${e}/sessions/${t}`),dt=(e,t,r)=>ue(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),pt=async(e,t)=>he()?(j.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ue(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Pe=async(e,t)=>{j.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ee=_("ai_gateway"),Oe=null;var ft=async()=>{if(Oe)return Oe;ee.log("Fetching available AI gateway providers");let e=await fetch(`${ut().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Oe=t,ee.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},cr=async(e,t)=>{let o=(await ft()).providers[e];if(!o)return ee.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ee.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},gt=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),ee.log("Requesting AI gateway information");let i=await dt(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,ee.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{l()},h))}};return await Promise.all([l(),ft()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:cr}};import H from"process";import Ee from"path";import Fe from"fs";import{fileURLToPath as hr}from"url";import{execa as Er,execaCommand as Kn}from"execa";import{Transform as dr}from"stream";var pr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),fr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function gr(){return Object.entries(process.env).filter(([e,t])=>!(!t||pr.has(e)||fr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function B(e){if(typeof e!="string")return e;let t=gr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(mr(o),"g");r=r.replace(n,"******")}),r}function mr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var te=class extends dr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=B(n);o(null,s)}};function mt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?B(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?B(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var de=null,ht=e=>(de&&de.destroy(),de=new V({totalAllowedTime:e}),de),Et=()=>de;var V=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var yr=hr(import.meta.url),_r=Ee.dirname(yr),ye=_("shell"),Le=new Set,Tr={preferLocal:!0},O=(e,t,r)=>{let[o,n]=wr(t,r),s={...Tr,...n},a=Er(e,o,s);return Ir(a,s),xr(a),a};var wr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Ir=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new te).pipe(H.stdout),e.stdout?.pipe(new te).pipe(H.stdout),e.stderr?.pipe(new te).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},yt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),ye.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ye.error("Error killing process:",r),!1}},Rr=e=>yt(e,"SIGKILL"),xr=e=>{Le.add(e);let t=Et();if(t){let r=t.onTimesUp(()=>{ye.log(`Global timer expired, killing process ${e.pid}`),yt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ye.log(`Force killing process ${e.pid} after timeout`),Rr(e))},5e3)});e.on("exit",()=>{Le.delete(e),r()}),e.on("error",()=>{Le.delete(e),r()})}};function J(e,t){if(H.env.NODE_PATH){let n=Ee.join(H.env.NODE_PATH,".bin",t);if(Fe.existsSync(n))return n}let r=Ee.join(e,"node_modules",".bin",t);if(Fe.existsSync(r))return r;let o=Ee.join(_r,"..","node_modules",".bin",t);if(Fe.existsSync(o))return o}var _t="netlify-agent-runner-context.md",De="task-history",$e="netlify-context",k=".netlify",re="results.md",ke="assets",Ue="other",Ge="personal";var Me="enterprise",je="free",Tt=[Ge,"pro",Me,je];var wt=_("utils"),Nr=e=>new Promise(t=>{setTimeout(t,e)}),It=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(d=>{n.push(d)});r=!0;let i,h=new Promise(d=>{i=d});return s=(async()=>{await Promise.resolve();let d=await e(...l);for(i(d);;){if(await Nr(t),!o)return r=!1,s=null,d;let u=o,m=n;o=null,n=[],d=await e(...u),m.forEach(E=>{E(d)})}})(),h};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},_e=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},Te=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):wt.error("Could not parse JSON",o))}},Rt=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let h=Math.min(i-s.length,e.length);return`${s}${e.slice(0,h)}`}return e.slice(0,i)},vr=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!Tt.some(t=>t in e),xt=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,o])=>{if(o){let n=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let s=JSON.parse(o);vr(s)&&(e[r]=s)}catch(s){let l=s instanceof SyntaxError?"Invalid JSON":s.message;wt.error(`Could not parse ${r} model version override from ${n}: ${l}`)}}}),e};import{Buffer as Nt}from"buffer";import Ar from"path";var vt=_("repo"),At=async({config:e})=>{vt.info("Getting runner diffs");let t=await Cr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=br(o);await Pr(n),vt.info("Changes after processing"),await Be();let s=await He(o);await Ye(s);let a={stdio:["ignore","pipe","pipe"]},i=(await O("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let d=(await O("git",["diff","--staged","--binary"],a)).stdout,u,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await O("git",["commit","-m","Agent runner"]),u=(await O("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await O("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;u!==g&&(m=Nt.from(g).toString("base64"))}let E={hasChanges:!0,diff:i,resultDiff:u,ignored:s};return i!==d&&(E.diffBinary=Nt.from(d).toString("base64")),m&&(E.resultDiffBinary=m),E},Ye=async(e=[])=>{await O("git",["add",".",...e])},Be=async()=>(await O("git",["status","-s"])).stdout,St=/.. (.+)?\.log$/,Sr=[St],Cr=async()=>{let e=await Be();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(o=>Sr.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Ct=async()=>{let{stdout:e}=await O("git",["rev-parse","HEAD"]);return e.trim()},bt=async()=>{let{stdout:e}=await O("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},He=async e=>{e||=await Be();let t=[".netlify","node_modules"],r=[];return e.split(`
10
+ `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${Ar.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${s}`)});let n=o.match(St)?.[1];n&&r.push(`:!${n}.log`)}),r},Pt=async()=>{await O("git",["reset","--hard","HEAD"])},br=e=>{let t=e.split(`
11
+ `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,l=a.join(""),i=n.trim(),h=s.trim();return r[l]?r[l].change=h:r[l]={filePath:l,stage:i,change:h},r},{});return Object.values(t)},Pr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(O("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Fr from"fs/promises";import Lr from"os";import Lt from"path";import X from"process";import Dr from"readline";import Ke from"path";import Or from"fs/promises";var qe=_("agent-output-utils");async function ne({initialResult:e,agentName:t,hasError:r}){let o="",n=Ke.join(process.cwd(),k,re);try{let s=await Or.readFile(n,"utf-8");s&&(o=s,qe.log(`Pulled result from ${Ke.relative(process.cwd(),n)}`))}catch{qe.log(`No results file found at ${Ke.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function oe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&qe.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function se(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var U=_("runner_claude"),Ot="Claude Code",we="claude-sonnet-4-5-20250929",Ft=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,$r=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function We({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:l,prompt:i,modelVersionOverrides:h}=a,{model:d}=a,u="";if(o){let{token:c,url:y}=o;if(!c||!y)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);d=p}}else if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",we)?(d=we,U.log(`Using default model: ${we}`)):U.log(`Default model ${we} is not available, proceeding without model specification`);X.env.ANTHROPIC_API_KEY=c,X.env.ANTHROPIC_BASE_URL=y}else if(!X.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let m=[],E=[],I={},w=0,g=0,R,N,S=[J(X.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...d?["--model",d]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],b=`${X.env.NVM_BIN}/node`;U.log(`Running ${b} ${S.join(" ")}`);let C=t.utils.run(b,S,{all:!0,env:X.env});C.stdin?.end();let v=_e(()=>{r?.({steps:m,duration:g})},250),T=(c,y)=>{let p={...c,id:w};w+=1,E.push(p),m.push(p),y||v.flush(),v(),y&&v.flush()},f=Dr.createInterface({input:C.all});return f.on("error",c=>{U.error("Readline interface error",{error:c.message,stack:c.stack})}),f.on("line",c=>{let y=null;try{y=JSON.parse(c)}catch{U.log("Could not parse line",c)}y?.session_id&&y.session_id!==u&&(u=y.session_id),Array.isArray(y?.message?.content)?y.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&T({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?T({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):U.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let x=p.input?.description&&`\`${p.input.description}\``;T({title:[Ft(p.name),x].filter(Boolean).join(" ")})}else p.id&&(I[p.id]=p);v.flush();break}case"tool_result":{let x=p.tool_use_id?I[p.tool_use_id]:void 0,ae;if(x){let Q=x.input?.file_path&&Lt.relative(X.cwd(),x.input.file_path),P=Q&&`\`${Q}\``;ae=[Ft(x.name||""),P].filter(Boolean).join(" ")}let rt=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(x?.name||""),q;if(typeof p.content=="string")q=p.content;else if(Array.isArray(p.content)){let Q=[];p.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?Q.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?Q.push(`![](data:${P.source.media_type};base64,${P.source.data})`):U.log(`Unsupported image type ${P.source.type}`,P.source):U.log(`Unsupported block type ${P?.type}`)}),q=Q.join(`
12
12
 
13
- `)}tt&&q&&(q=`\`\`\`
13
+ `)}rt&&q&&(q=`\`\`\`
14
14
  ${q.trim()}
15
- \`\`\``),T({title:ae,message:q},!0);break}case"thinking":{p.thinking&&T({title:"Thinking",message:p.thinking},!0);break}default:U.log(`Message content type is not supported ${p.type}`,p)}}):y?.type==="result"&&(g=y.duration_ms||0,y.is_error?N=y.result:R=y.result,[E,m].forEach(p=>{p[p.length-1]?.message===R&&p.pop()}))}),await C.catch(c=>{({error:N,result:R}=Dr({catchError:c,runCmd:C,error:N,result:R,runnerName:"Claude"}))}),f.close(),v.flush(),{steps:E,duration:g,result:await ne({initialResult:R,agentName:Pt,hasError:!!N}),error:oe({error:N,agentName:Pt}),isRetryableError:se(N),agentSessionId:u}}var Lt=async()=>{let e=Ft.join(Fr.homedir(),".claude");await Or.rm(e,{recursive:!0,force:!0})};import Ie from"fs/promises";import $t from"os";import We from"path";import K from"process";import $r from"readline";var G=_("runner_codex"),Dt="Codex CLI",kr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ve({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:f,url:c}=n;if(!f||!c)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let y=l?.codex?.[s];if(y){if(!await n.isModelAvailableForProvider("openai",y))throw new Error(`Model override '${y}' is not available for openai provider`);i=y}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);K.env.OPENAI_API_KEY=f,K.env.OPENAI_BASE_URL=c}else if(!K.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],d=[],u={},m=0,E=0,I,w,g=`${K.env.NVM_BIN}/node`,R=We.join($t.homedir(),".codex"),N=We.join(R,"config.toml");try{await Ie.mkdir(R,{recursive:!0});let f="";try{f=await Ie.readFile(N,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
15
+ \`\`\``),T({title:ae,message:q},!0);break}case"thinking":{p.thinking&&T({title:"Thinking",message:p.thinking},!0);break}default:U.log(`Message content type is not supported ${p.type}`,p)}}):y?.type==="result"&&(g=y.duration_ms||0,y.is_error?N=y.result:R=y.result,[E,m].forEach(p=>{p[p.length-1]?.message===R&&p.pop()}))}),await C.catch(c=>{({error:N,result:R}=$r({catchError:c,runCmd:C,error:N,result:R,runnerName:"Claude"}))}),f.close(),v.flush(),{steps:E,duration:g,result:await ne({initialResult:R,agentName:Ot,hasError:!!N}),error:oe({error:N,agentName:Ot}),isRetryableError:se(N),agentSessionId:u}}var Dt=async()=>{let e=Lt.join(Lr.homedir(),".claude");await Fr.rm(e,{recursive:!0,force:!0})};import Ie from"fs/promises";import kt from"os";import Ve from"path";import K from"process";import kr from"readline";var G=_("runner_codex"),$t="Codex CLI",Ur=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Je({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:f,url:c}=n;if(!f||!c)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let y=l?.codex?.[s];if(y){if(!await n.isModelAvailableForProvider("openai",y))throw new Error(`Model override '${y}' is not available for openai provider`);i=y}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);K.env.OPENAI_API_KEY=f,K.env.OPENAI_BASE_URL=c}else if(!K.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],d=[],u={},m=0,E=0,I,w,g=`${K.env.NVM_BIN}/node`,R=Ve.join(kt.homedir(),".codex"),N=Ve.join(R,"config.toml");try{await Ie.mkdir(R,{recursive:!0});let f="";try{f=await Ie.readFile(N,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
16
16
  web_search = true`):f+=`
17
17
  [tools]
18
18
  web_search = true
19
- `,await Ie.writeFile(N,f,"utf-8"),G.log("Updated Codex config with web_search enabled"))}catch(f){G.warn("Failed to update Codex config",{error:f.message})}let S=[J(K.cwd(),"codex"),"login","--with-api-key"];G.log(`Running ${g} ${S.join(" ")}`);let b=t.utils.run(g,S,{input:K.env.OPENAI_API_KEY,env:{...K.env}});try{await b,G.log("Successfully logged in to Codex")}catch(f){throw G.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let C=[J(K.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...i?["--model",i]:[],a].filter(Boolean);G.log(`Running ${g} ${C.join(" ")}`);let v=t.utils.run(g,C,{all:!0,env:{...K.env}}),T=$r.createInterface({input:v.all});return T.on("error",f=>{G.error("Readline interface error",{error:f.message,stack:f.stack})}),T.on("line",f=>{let c=null;try{c=JSON.parse(f)}catch{G.log("Could not parse line",f);return}let y=[],p=!1;if(c?.duration_ms&&(E=c.duration_ms,p=!0),c?.type==="local_shell_call")u[c.call_id]=c;else if(c?.type==="local_shell_call_output"){let x=Gr(u[c.call_id],c);x&&(x.id=m,m+=1,d.push(x),h.push(x),y.push(x),p=!0)}else c?.type==="message"&&c.role==="assistant"?I=c.content.map(x=>x.text).join(`
19
+ `,await Ie.writeFile(N,f,"utf-8"),G.log("Updated Codex config with web_search enabled"))}catch(f){G.warn("Failed to update Codex config",{error:f.message})}let S=[J(K.cwd(),"codex"),"login","--with-api-key"];G.log(`Running ${g} ${S.join(" ")}`);let b=t.utils.run(g,S,{input:K.env.OPENAI_API_KEY,env:{...K.env}});try{await b,G.log("Successfully logged in to Codex")}catch(f){throw G.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let C=[J(K.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...i?["--model",i]:[],a].filter(Boolean);G.log(`Running ${g} ${C.join(" ")}`);let v=t.utils.run(g,C,{all:!0,env:{...K.env}}),T=kr.createInterface({input:v.all});return T.on("error",f=>{G.error("Readline interface error",{error:f.message,stack:f.stack})}),T.on("line",f=>{let c=null;try{c=JSON.parse(f)}catch{G.log("Could not parse line",f);return}let y=[],p=!1;if(c?.duration_ms&&(E=c.duration_ms,p=!0),c?.type==="local_shell_call")u[c.call_id]=c;else if(c?.type==="local_shell_call_output"){let x=Mr(u[c.call_id],c);x&&(x.id=m,m+=1,d.push(x),h.push(x),y.push(x),p=!0)}else c?.type==="message"&&c.role==="assistant"?I=c.content.map(x=>x.text).join(`
20
20
  `):c?.type==="message"&&c.role==="system"&&(w=c.content.map(x=>x.text).join(`
21
- `));p&&(r?.({steps:h,duration:E}),o?.({steps:y,duration:E}))}),await v.catch(f=>{let c=kr({catchError:f,runCmd:v,error:w,result:I,runnerName:"Codex"});w=c.error,I=c.result}),T.close(),{steps:d,duration:E,result:await ne({initialResult:I,agentName:Dt,hasError:!!w}),error:oe({error:w,agentName:Dt}),isRetryableError:se(w)}}var kt=async()=>{let e=We.join($t.homedir(),".codex");await Ie.rm(e,{recursive:!0,force:!0})},Ur=new Set(["bash","-lc"]),Gr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Ur.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
21
+ `));p&&(r?.({steps:h,duration:E}),o?.({steps:y,duration:E}))}),await v.catch(f=>{let c=Ur({catchError:f,runCmd:v,error:w,result:I,runnerName:"Codex"});w=c.error,I=c.result}),T.close(),{steps:d,duration:E,result:await ne({initialResult:I,agentName:$t,hasError:!!w}),error:oe({error:w,agentName:$t}),isRetryableError:se(w)}}var Ut=async()=>{let e=Ve.join(kt.homedir(),".codex");await Ie.rm(e,{recursive:!0,force:!0})},Gr=new Set(["bash","-lc"]),Mr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Gr.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
22
22
  ${n.trim()}
23
- \`\`\``)}catch(s){G.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Mr from"fs/promises";import jr from"os";import Gt from"path";import z from"process";import Yr from"readline";var ie=_("runner_gemini"),Ut="Gemini CLI",Br=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ie.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ie.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ie.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Hr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Je({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:T,url:f}=n;if(!T||!f)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let c=l?.gemini?.[s];if(c){if(!await n.isModelAvailableForProvider("gemini",c))throw new Error(`Model override '${c}' is not available for gemini provider`);i=c}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);z.env.GEMINI_API_KEY=T,z.env.GOOGLE_GEMINI_BASE_URL=f}else if(!z.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],d=[],u=[],m={},E=0,I=0,w,g,R=[J(z.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","-p",a],N=`${z.env.NVM_BIN}/node`;ie.log(`Running ${N} ${R.join(" ")}`);let S=t.utils.run(N,R,{all:!0,env:z.env});S.stdin?.end();let b=_e(()=>{r?.({steps:h,duration:I}),o?.({steps:d,duration:I}),d=[]},250),C=(T,f)=>{T.id=E,E+=1,u.push(T),h.push(T),d.push(T),f||b.flush(),b(),f&&b.flush()},v=Yr.createInterface({input:S.all});return v.on("error",T=>{ie.error("Readline interface error",{error:T.message,stack:T.stack})}),v.on("line",T=>{let f=null;try{if(T.startsWith("[API Error")){let c=T.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:Te(c,!1)?.error?.message||c||"Gemini encountered error"}}else f=JSON.parse(T)}catch{return}if(f)switch(f.type){case"thought":{let c=f.value;C({title:c?.subject??"Thinking...",message:c?.description},!0);break}case"content":{f.value&&C({message:f.value});break}case"tool_call_request":{let c=f.value,y=Hr[c.name]??c.name,p=c.args?.path||c.args?.absolute_path,x=p&&Gt.relative(z.cwd(),p),ae=c.args?.command,q={title:[y,x&&`\`${x}\``,ae&&`\`${ae}\``].filter(Boolean).join(" ")};m[c.callId]=q,b.flush();break}case"tool_result":{let c=f.value,y=m[c.callId];if(y){let p=[c.resultDisplay,c.responseParts?.functionResponse?.response?.output].find(x=>typeof x=="string"&&x);p&&(y.message=`\`\`\`
23
+ \`\`\``)}catch(s){G.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import jr from"fs/promises";import Yr from"os";import Mt from"path";import z from"process";import Br from"readline";var ie=_("runner_gemini"),Gt="Gemini CLI",Hr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ie.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ie.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ie.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Kr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Xe({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:T,url:f}=n;if(!T||!f)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let c=l?.gemini?.[s];if(c){if(!await n.isModelAvailableForProvider("gemini",c))throw new Error(`Model override '${c}' is not available for gemini provider`);i=c}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);z.env.GEMINI_API_KEY=T,z.env.GOOGLE_GEMINI_BASE_URL=f}else if(!z.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],d=[],u=[],m={},E=0,I=0,w,g,R=[J(z.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","-p",a],N=`${z.env.NVM_BIN}/node`;ie.log(`Running ${N} ${R.join(" ")}`);let S=t.utils.run(N,R,{all:!0,env:z.env});S.stdin?.end();let b=_e(()=>{r?.({steps:h,duration:I}),o?.({steps:d,duration:I}),d=[]},250),C=(T,f)=>{T.id=E,E+=1,u.push(T),h.push(T),d.push(T),f||b.flush(),b(),f&&b.flush()},v=Br.createInterface({input:S.all});return v.on("error",T=>{ie.error("Readline interface error",{error:T.message,stack:T.stack})}),v.on("line",T=>{let f=null;try{if(T.startsWith("[API Error")){let c=T.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:Te(c,!1)?.error?.message||c||"Gemini encountered error"}}else f=JSON.parse(T)}catch{return}if(f)switch(f.type){case"thought":{let c=f.value;C({title:c?.subject??"Thinking...",message:c?.description},!0);break}case"content":{f.value&&C({message:f.value});break}case"tool_call_request":{let c=f.value,y=Kr[c.name]??c.name,p=c.args?.path||c.args?.absolute_path,x=p&&Mt.relative(z.cwd(),p),ae=c.args?.command,q={title:[y,x&&`\`${x}\``,ae&&`\`${ae}\``].filter(Boolean).join(" ")};m[c.callId]=q,b.flush();break}case"tool_result":{let c=f.value,y=m[c.callId];if(y){let p=[c.resultDisplay,c.responseParts?.functionResponse?.response?.output].find(x=>typeof x=="string"&&x);p&&(y.message=`\`\`\`
24
24
  ${p.trim()}
25
- \`\`\``),C(y,!0)}break}case"result":{I=f.duration_ms,w=f.value,[u,h,d].forEach(c=>{c[c.length-1]?.message===w&&c.pop()});break}case"error":{g=f.value;break}case"finished":break;default:{ie.warn("Unhandled message type:",f.type);break}}}),await S.catch(T=>{({error:g,result:w}=Br({catchError:T,runCmd:S,error:g,result:w,runnerName:"Gemini"}))}),v.close(),b.flush(),{steps:u,duration:I,result:await ne({initialResult:w,agentName:Ut,hasError:!!g}),error:oe({error:g,agentName:Ut}),isRetryableError:se(g)}}var Mt=async()=>{let e=Gt.join(jr.homedir(),".gemini");await Mr.rm(e,{recursive:!0,force:!0})};var Kr={codex:{runner:Ve,clean:kt},claude:{runner:qe,clean:Lt},gemini:{runner:Je,clean:Mt}},jt=Kr;var Yt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(qr(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=jt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Wr({apiToken:r});at(l);let i=e.useGateway?await ft({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=wt(({steps:E=[],duration:I})=>{let w=E.map(g=>({...g,title:g.title?B(g.title):void 0,message:g.message?B(g.message):void 0}));return E.length=0,Y(e.id,e.sessionId,{steps:w,duration:I})},t),d=await Be();await Ye(d);let u;e.hasRepo?e.sha?(u=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(u=await St(),await ce(e.id,{sha:u}),n?.setAttributes({"init.sha.source":"current_commit"})):(u=await Ct(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let m=performance.now()-s;return n?.setAttributes({"init.sha":u||"unknown","init.duration.ms":m,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:h,runner:a,sha:u}}),Wr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Re.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Re.env.NETLIFY_API_TOKEN,SITE_ID:Re.env.SITE_ID,FUNCTIONS_DIST:Re.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:O}});import{getTracer as Xe}from"@netlify/otel";import Vr from"crypto";import W from"fs/promises";import D from"path";import M from"process";var F=_("context"),Jr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:M.env.NETLIFY_TEAM_ID,userId:M.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:M.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Xr=10,zr=async e=>{let{name:t,ext:r}=D.parse(e),o=e,n=D.join(M.cwd(),k,o),s=0;for(;await Zr(n);){if(s>=Xr)throw new Error("Failed to generate context file");o=`${t}-${Vr.randomUUID().slice(0,5)}${r}`,n=D.join(M.cwd(),k,o),s+=1}return o},Zr=async e=>{try{return await W.access(e),!0}catch{return!1}},Qr=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},en=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await W.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},xe=null,tn=async()=>{if(xe)return xe;let e=await Qr();if(!e)return[];let t=D.join(M.cwd(),k,$e);await W.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=D.join(t,a),i=D.join(k,$e,a);return F.log(`Downloading ${s.scope} context...`),await en(s.endpoint,l)?(F.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return xe=(await Promise.all(r)).filter(n=>n!==null),xe},Bt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Jr(t),s=await zr(yt),a=D.join(M.cwd(),k);await W.mkdir(a,{recursive:!0});let l=D.join(k,s),i=D.join(M.cwd(),l),h=D.join(M.cwd(),k,re);try{await W.unlink(h),F.log(`Deleted old results file: ${h}`)}catch{}let d=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
25
+ \`\`\``),C(y,!0)}break}case"result":{I=f.duration_ms,w=f.value,[u,h,d].forEach(c=>{c[c.length-1]?.message===w&&c.pop()});break}case"error":{g=f.value;break}case"finished":break;default:{ie.warn("Unhandled message type:",f.type);break}}}),await S.catch(T=>{({error:g,result:w}=Hr({catchError:T,runCmd:S,error:g,result:w,runnerName:"Gemini"}))}),v.close(),b.flush(),{steps:u,duration:I,result:await ne({initialResult:w,agentName:Gt,hasError:!!g}),error:oe({error:g,agentName:Gt}),isRetryableError:se(g)}}var jt=async()=>{let e=Mt.join(Yr.homedir(),".gemini");await jr.rm(e,{recursive:!0,force:!0})};var qr={codex:{runner:Je,clean:Ut},claude:{runner:We,clean:Dt},gemini:{runner:Xe,clean:jt}},Yt=qr;var Bt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Wr(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Yt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Vr({apiToken:r});lt(l);let i=e.useGateway?await gt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=It(({steps:E=[],duration:I})=>{let w=E.map(g=>({...g,title:g.title?B(g.title):void 0,message:g.message?B(g.message):void 0}));return E.length=0,Y(e.id,e.sessionId,{steps:w,duration:I})},t),d=await He();await Ye(d);let u;e.hasRepo?e.sha?(u=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(u=await Ct(),await ce(e.id,{sha:u}),n?.setAttributes({"init.sha.source":"current_commit"})):(u=await bt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let m=performance.now()-s;return n?.setAttributes({"init.sha":u||"unknown","init.duration.ms":m,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:h,runner:a,sha:u}}),Vr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Re.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Re.env.NETLIFY_API_TOKEN,SITE_ID:Re.env.SITE_ID,FUNCTIONS_DIST:Re.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:O}});import{getTracer as ze}from"@netlify/otel";import Jr from"crypto";import W from"fs/promises";import D from"path";import M from"process";var F=_("context"),Xr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:M.env.NETLIFY_TEAM_ID,userId:M.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:M.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},zr=10,Zr=async e=>{let{name:t,ext:r}=D.parse(e),o=e,n=D.join(M.cwd(),k,o),s=0;for(;await Qr(n);){if(s>=zr)throw new Error("Failed to generate context file");o=`${t}-${Jr.randomUUID().slice(0,5)}${r}`,n=D.join(M.cwd(),k,o),s+=1}return o},Qr=async e=>{try{return await W.access(e),!0}catch{return!1}},en=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},tn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await W.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},xe=null,rn=async()=>{if(xe)return xe;let e=await en();if(!e)return[];let t=D.join(M.cwd(),k,$e);await W.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=D.join(t,a),i=D.join(k,$e,a);return F.log(`Downloading ${s.scope} context...`),await tn(s.endpoint,l)?(F.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return xe=(await Promise.all(r)).filter(n=>n!==null),xe},Ht=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Xr(t),s=await Zr(_t),a=D.join(M.cwd(),k);await W.mkdir(a,{recursive:!0});let l=D.join(k,s),i=D.join(M.cwd(),l),h=D.join(M.cwd(),k,re);try{await W.unlink(h),F.log(`Deleted old results file: ${h}`)}catch{}let d=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
26
26
  Your task is to analyze and fix the build errors.
27
27
  Don't apply techniques of reverting changes. Apply fixes related to errors.
28
28
  Don't try to run build by yourself. Just fix the errors.
@@ -54,7 +54,7 @@ ${N.response}
54
54
  `)}
55
55
 
56
56
  </session_history_context>
57
- `}let E=await tn(),I="";E.length>0&&(I=`
57
+ `}let E=await rn(),I="";E.length>0&&(I=`
58
58
  <netlify_features_context>
59
59
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
60
60
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
@@ -123,15 +123,15 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
123
123
  </request>
124
124
 
125
125
  Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
126
- `),w};var rn=_("prompt"),Ht=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Bt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&rn.log("Contextful Prompt:",n),{prompt:n}};var Ne=_("inference_stage"),Kt=5,ve=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i,contextPrefix:h,priorAgentSessionId:d}=e;Ne.log(`Running inference stage, attempt ${i} of ${Kt}`);let u=await A(Xe(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":i||1}),gt();let{prompt:E}=await A(Xe(),"compose-prompt",async()=>await Ht({cliPath:t,config:r,buildErrorContext:nn(n),netlify:o})),I=`
126
+ `),w};var nn=_("prompt"),Kt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Ht({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&nn.log("Contextful Prompt:",n),{prompt:n}};var Ne=_("inference_stage"),qt=5,ve=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i,contextPrefix:h,priorAgentSessionId:d}=e;Ne.log(`Running inference stage, attempt ${i} of ${qt}`);let u=await A(ze(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":i||1}),mt();let{prompt:E}=await A(ze(),"compose-prompt",async()=>await Kt({cliPath:t,config:r,buildErrorContext:on(n),netlify:o})),I=`
127
127
  ${h||""}
128
128
  ${E}
129
- `.trim(),w={...r,prompt:I},g=await A(Xe(),`run-${r.runner}`,async()=>await s({aiGateway:l,config:w,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:d}));return g.result&&(g.result=B(g.result)),g.error&&(g.error=B(g.error)),await a.flush(),g});if(u.error){if(Ne.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:i||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!i||i<Kt))return Ne.log("Retrying inference stage"),await new Promise(E=>setTimeout(E,5e3)),{runnerResult:(await ve({...e,attempt:(i||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ne.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},nn=e=>!e||e.length===0?"":`
129
+ `.trim(),w={...r,prompt:I},g=await A(ze(),`run-${r.runner}`,async()=>await s({aiGateway:l,config:w,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:d}));return g.result&&(g.result=B(g.result)),g.error&&(g.error=B(g.error)),await a.flush(),g});if(u.error){if(Ne.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:i||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!i||i<qt))return Ne.log("Retrying inference stage"),await new Promise(E=>setTimeout(E,5e3)),{runnerResult:(await ve({...e,attempt:(i||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ne.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},on=e=>!e||e.length===0?"":`
130
130
  Deploy failed failed. Here are the errors to review on the latest build:
131
131
 
132
132
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
133
133
 
134
134
  ${e.pop()}
135
- `;import an from"process";import{getTracer as ze}from"@netlify/otel";import{getTracer as on}from"@netlify/otel";var pe=_("deploy"),qt=async e=>await A(on(),"create-preview-deploy",async t=>sn(e,t)),sn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},l)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(pe.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(pe.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let h=s||"netlify";pe.log(`Running: ${h} ${i.join(" ")}`),l?.setAttributes({cmd:h,args:i});let d=await e.utils.run(h,i,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(d.stdout.trim());l?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),pe.log(`
136
- Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let m={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(m.sourceZipFilename=u.source_zip_filename),m}catch(i){throw pe.error("Failed to create preview deploy via CLI:",i),l?.setAttributes({success:!1,error:i.message}),i}};var fe=_("deploy_stage"),Ze=async e=>await A(ze(),"run-deploy-stage",async()=>ln(e)),ln=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await A(ze(),"get-runner-diffs",async()=>await Nt({config:t,netlify:r}));if(fe.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:h}=s,d=!0;fe.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:d,wouldCreatePreview:o!==void 0&&d});let u=null;if(o!==void 0&&d)try{let m;try{let E=await A(ze(),"get-runner-session",async()=>await ut(t.id,t.sessionId));E?.title&&(m=E.title)}catch(E){fe.warn("Failed to fetch session title, using fallback message:",E.message)}await Y(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),u=await qt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:It(t.id,an.env.SITE_NAME),filter:n})}catch(m){return fe.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:d,previewInfo:null,diffBinary:i,resultDiffBinary:h,deployError:m instanceof Error?m.message:String(m)}}return fe.log("Git status",{hasDiff:!!a,hasChanges:d}),{diff:a,resultDiff:l,hasChanges:d,previewInfo:u,diffBinary:i,resultDiffBinary:h}};import{getTracer as Ae}from"@netlify/otel";async function Wt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var $=_("cleanup_stage"),Vt=async e=>await A(Ae(),"cleanup-stage",async()=>un(e)),Qe=1024*1024*10,un=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result:r||"Done",duration:o};if(l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename),t||s||n||a)try{$.log("Getting pre-signed URLs for diff upload");let d=await dt(e.id,e.sessionId),u=[];(t||s)&&u.push(Pe(d.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=d.result.s3_key,$.log("Successfully uploaded result_diff to S3")})),(n||a)&&u.push(Pe(d.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=d.cumulative.s3_key,$.log("Successfully uploaded cumulative_diff to S3")})),$.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(n||a)&&($.log("Updating agent runner with cumulative diff S3 key"),await A(Ae(),"update-runner",async()=>{await ce(e.id,{result_diff_s3_key:d.cumulative.s3_key})}))}catch(d){$.error("S3 upload failed, falling back to inline diffs:",d);let u=Buffer.byteLength(t||s||""),m=Buffer.byteLength(a||n||"");if(u>Qe||m>Qe){let E=`Diffs exceed maximum inline size of ${Qe} bytes.`;throw $.error(E),new Error(E)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,$.log("Updating agent runner with inline diffs (fallback)"),await A(Ae(),"update-runner",async()=>{await ce(e.id,{result_diff:n,result_diff_binary:a})}))}else $.log("No diffs to upload");return $.log("Updated agent runner with result"),await Wt(async()=>await A(Ae(),"update-runner-session",()=>Y(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(d,u)=>{$.error(`Error updating agent runner session (attempt ${d}):`,u),$.log("Retrying...")}}),$.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Jt,shutdownTracers as dn,withActiveSpan as Xt}from"@netlify/otel";var pn=cn(import.meta.url),zt=pn("../package.json"),Zt=_("pipeline_index"),Se=3,Qt=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let l,{withStageTimer:i}=mt(V.timeUnits.hours(4)),h=await st(zt.version,e.id,a);try{await Xt(Jt(),"run-pipeline",{},h,async()=>{let d,{aiGateway:u,context:m,persistSteps:E,runner:I,sha:w}=await i("init",()=>Yt({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:zt.version}),V.timeUnits.minutes(10));l=I.clean,e.sha=w;let{runnerResult:g}=await i("inference",()=>ve({cliPath:r,config:e,context:m,runner:I.runner,persistSteps:E,aiGateway:u}));await Y(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let R=await i("deploy",()=>Ze({cliPath:r,config:e,context:m,result:g.result,filter:s})),N=g,S=[];if(R.hasChanges&&R.deployError){S.push(it(R.deployError));let c=1;for(;c<=Se&&!R.previewInfo;)Zt.log(`Deploy attempt had errors. Retrying. ${c}/${Se}`),await Xt(Jt(),"deploy-stage",async y=>{y?.setAttributes({"stage.attempt":c});let{runnerResult:p}=await i(`inference-retry-${c}`,()=>ve({cliPath:r,config:e,context:m,runner:I.runner,persistSteps:E,aiGateway:u,buildErrors:S,priorAgentSessionId:g.agentSessionId}));N={...p,steps:[...N.steps||[],...p.steps||[]],duration:(N.duration||0)+(p.duration||0)},R=await i(`deploy-retry-${c}`,()=>Ze({cliPath:r,config:e,context:m,result:p.result,filter:s})),R.deployError&&S.push(R.deployError),c++});c>Se&&!R.previewInfo&&(d=new Error(`Deploy validation failed after ${Se} attempts`))}let{diff:b,resultDiff:C,previewInfo:v,diffBinary:T,resultDiffBinary:f}=R;if(await i("cleanup",()=>Vt({config:e,diff:b,result:N.result,duration:N.duration,resultDiff:C,diffBinary:T,resultDiffBinary:f,previewInfo:v}),V.timeUnits.minutes(10)),d)throw d;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await bt())})}catch(d){Zt.error("Got error while running pipeline",d),await l?.();let u=d instanceof Error&&d.message;throw await Y(e.id,e.sessionId,{result:u||"Encountered error when running agent",state:"error"}),d}finally{await dn()}};import L from"process";var gn="claude",mn=e=>(e??[]).filter(t=>t.request&&t.response),hn=e=>(e??[]).filter(t=>t.site_context),er=_("config"),tr=()=>{let e=L.env.NETLIFY_AGENT_RUNNER_ID,t=L.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=L.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=L.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=L.env.NETLIFY_AGENT_RUNNER_AGENT||gn,s=L.env.NETLIFY_AGENT_RUNNER_MODEL,a=L.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",l=L.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",i=L.env.ERROR_LOGS_PATH,h=Te(L.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,er),d=mn(h),u=hn(h),m=L.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",E=!L.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,I=L.env.NETLIFY_AGENT_RUNNER_SHA,w=En(),g=Rt(),R={id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:s,validateAgent:a,errorLogsPath:i,sessionHistoryContext:d,siteContext:u,hasRepo:m,useGateway:E,sha:I,accountType:w,validateAgentWithBuild:l,modelVersionOverrides:g};return er.log({fullConfig:R}),R},En=()=>{let e=L.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?Ge:e.includes("pro")?"pro":e.startsWith("enterprise")?Me:e.startsWith("free")?je:Ue:Ue};var rr=_("bin_cmd"),Z=yn(et.argv.slice(2),{string:["auth","cwd","cli-path","error-logs-path","filter","trace-exporter-url","traceparent"]});try{let e=tr();await Qt({config:e,apiToken:Z.auth,cwd:Z.cwd,cliPath:Z["cli-path"],errorLogsPath:Z["error-logs-path"],filter:Z.filter,tracing:{exporterUrl:Z["trace-exporter-url"],traceparent:Z.traceparent}}),rr.info("Finished agent"),et.exit(0)}catch(e){rr.error("Error running agent pipeline:",e),et.exit(1)}
135
+ `;import ln from"process";import{getTracer as Ze}from"@netlify/otel";import{getTracer as sn}from"@netlify/otel";var pe=_("deploy"),Wt=async e=>await A(sn(),"create-preview-deploy",async t=>an(e,t)),an=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},l)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(pe.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(pe.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let h=s||"netlify";pe.log(`Running: ${h} ${i.join(" ")}`),l?.setAttributes({cmd:h,args:i});let d=await e.utils.run(h,i,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(d.stdout.trim());l?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),pe.log(`
136
+ Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let m={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(m.sourceZipFilename=u.source_zip_filename),m}catch(i){throw pe.error("Failed to create preview deploy via CLI:",i),l?.setAttributes({success:!1,error:i.message}),i}};var fe=_("deploy_stage"),Qe=async e=>await A(Ze(),"run-deploy-stage",async()=>un(e)),un=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await A(Ze(),"get-runner-diffs",async()=>await At({config:t,netlify:r}));if(fe.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:h}=s,d=!0;fe.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:d,wouldCreatePreview:o!==void 0&&d});let u=null;if(o!==void 0&&d)try{let m;try{let E=await A(Ze(),"get-runner-session",async()=>await ct(t.id,t.sessionId));E?.title&&(m=E.title)}catch(E){fe.warn("Failed to fetch session title, using fallback message:",E.message)}await Y(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),u=await Wt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:Rt(t.id,ln.env.SITE_NAME),filter:n})}catch(m){return fe.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:d,previewInfo:null,diffBinary:i,resultDiffBinary:h,deployError:m instanceof Error?m.message:String(m)}}return fe.log("Git status",{hasDiff:!!a,hasChanges:d}),{diff:a,resultDiff:l,hasChanges:d,previewInfo:u,diffBinary:i,resultDiffBinary:h}};import{getTracer as Ae}from"@netlify/otel";async function Vt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var $=_("cleanup_stage"),Jt=async e=>await A(Ae(),"cleanup-stage",async()=>cn(e)),et=1024*1024*10,cn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result:r||"Done",duration:o};if(l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename),t||s||n||a)try{$.log("Getting pre-signed URLs for diff upload");let d=await pt(e.id,e.sessionId),u=[];(t||s)&&u.push(Pe(d.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=d.result.s3_key,$.log("Successfully uploaded result_diff to S3")})),(n||a)&&u.push(Pe(d.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=d.cumulative.s3_key,$.log("Successfully uploaded cumulative_diff to S3")})),$.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(n||a)&&($.log("Updating agent runner with cumulative diff S3 key"),await A(Ae(),"update-runner",async()=>{await ce(e.id,{result_diff_s3_key:d.cumulative.s3_key})}))}catch(d){$.error("S3 upload failed, falling back to inline diffs:",d);let u=Buffer.byteLength(t||s||""),m=Buffer.byteLength(a||n||"");if(u>et||m>et){let E=`Diffs exceed maximum inline size of ${et} bytes.`;throw $.error(E),new Error(E)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,$.log("Updating agent runner with inline diffs (fallback)"),await A(Ae(),"update-runner",async()=>{await ce(e.id,{result_diff:n,result_diff_binary:a})}))}else $.log("No diffs to upload");return $.log("Updated agent runner with result"),await Vt(async()=>await A(Ae(),"update-runner-session",()=>Y(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(d,u)=>{$.error(`Error updating agent runner session (attempt ${d}):`,u),$.log("Retrying...")}}),$.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Xt,shutdownTracers as pn,withActiveSpan as zt}from"@netlify/otel";var fn=dn(import.meta.url),Zt=fn("../package.json"),Qt=_("pipeline_index"),Se=3,er=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let l,{withStageTimer:i}=ht(V.timeUnits.hours(4)),h=await it(Zt.version,e.id,a);try{await zt(Xt(),"run-pipeline",{},h,async()=>{let d,{aiGateway:u,context:m,persistSteps:E,runner:I,sha:w}=await i("init",()=>Bt({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:Zt.version}),V.timeUnits.minutes(10));l=I.clean,e.sha=w;let{runnerResult:g}=await i("inference",()=>ve({cliPath:r,config:e,context:m,runner:I.runner,persistSteps:E,aiGateway:u}));await Y(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let R=await i("deploy",()=>Qe({cliPath:r,config:e,context:m,result:g.result,filter:s})),N=g,S=[];if(R.hasChanges&&R.deployError){S.push(at(R.deployError));let c=1;for(;c<=Se&&!R.previewInfo;)Qt.log(`Deploy attempt had errors. Retrying. ${c}/${Se}`),await zt(Xt(),"deploy-stage",async y=>{y?.setAttributes({"stage.attempt":c});let{runnerResult:p}=await i(`inference-retry-${c}`,()=>ve({cliPath:r,config:e,context:m,runner:I.runner,persistSteps:E,aiGateway:u,buildErrors:S,priorAgentSessionId:g.agentSessionId}));N={...p,steps:[...N.steps||[],...p.steps||[]],duration:(N.duration||0)+(p.duration||0)},R=await i(`deploy-retry-${c}`,()=>Qe({cliPath:r,config:e,context:m,result:p.result,filter:s})),R.deployError&&S.push(R.deployError),c++});c>Se&&!R.previewInfo&&(d=new Error(`Deploy validation failed after ${Se} attempts`))}let{diff:b,resultDiff:C,previewInfo:v,diffBinary:T,resultDiffBinary:f}=R;if(await i("cleanup",()=>Jt({config:e,diff:b,result:N.result,duration:N.duration,resultDiff:C,diffBinary:T,resultDiffBinary:f,previewInfo:v}),V.timeUnits.minutes(10)),d)throw d;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await Pt())})}catch(d){Qt.error("Got error while running pipeline",d),await l?.();let u=d instanceof Error&&d.message;throw await Y(e.id,e.sessionId,{result:u||"Encountered error when running agent",state:"error"}),d}finally{await pn()}};import L from"process";var mn="claude",hn=e=>(e??[]).filter(t=>t.request&&t.response),En=e=>(e??[]).filter(t=>t.site_context),tr=_("config"),rr=()=>{let e=L.env.NETLIFY_AGENT_RUNNER_ID,t=L.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=L.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=L.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=L.env.NETLIFY_AGENT_RUNNER_AGENT||mn,s=L.env.NETLIFY_AGENT_RUNNER_MODEL,a=L.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",l=L.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",i=L.env.ERROR_LOGS_PATH,h=Te(L.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,tr),d=hn(h),u=En(h),m=L.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",E=!L.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,I=L.env.NETLIFY_AGENT_RUNNER_SHA,w=yn(),g=xt(),R={id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:s,validateAgent:a,errorLogsPath:i,sessionHistoryContext:d,siteContext:u,hasRepo:m,useGateway:E,sha:I,accountType:w,validateAgentWithBuild:l,modelVersionOverrides:g};return tr.log({fullConfig:R}),R},yn=()=>{let e=L.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?Ge:e.includes("pro")?"pro":e.startsWith("enterprise")?Me:e.startsWith("free")?je:Ue:Ue};var nr=_("bin_cmd"),Z=_n(tt.argv.slice(2),{string:["auth","cwd","cli-path","error-logs-path","filter","trace-exporter-url","traceparent"]});try{let e=rr();await er({config:e,apiToken:Z.auth,cwd:Z.cwd,cliPath:Z["cli-path"],errorLogsPath:Z["error-logs-path"],filter:Z.filter,tracing:{exporterUrl:Z["trace-exporter-url"],traceparent:Z.traceparent}}),nr.info("Finished agent"),tt.exit(0)}catch(e){nr.error("Error running agent pipeline:",e),tt.exit(1)}
137
137
  //# sourceMappingURL=bin.js.map
package/dist/index.js CHANGED
@@ -1,27 +1,27 @@
1
- import{createRequire as Jr}from"module";import{createTracerProvider as Bt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as Ve}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Ht}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Kt}from"@netlify/otel";import{propagation as Je,context as Xe,W3CTraceContextPropagator as qt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Wt}from"@opentelemetry/exporter-trace-otlp-grpc";function w(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Se=w("tracing"),ze=async(e,t,r)=>(await Bt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new Ve(new Ae),new Ve(new Wt({url:r.exporterUrl}))],instrumentations:[new Ht({skipHeaders:!0})]}),r.traceparent?(Je.setGlobalPropagator(new qt),Je.extract(Xe.active(),{traceparent:r.traceparent,isRemote:!0})):Xe.active());function A(e,t,r){return Se.log(`\u23F3 TRACE: ${t} starting...`),Kt(e,t,r)}var Ae=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Se.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Se.log(` \u274C Error: ${t.status.message}`)}};var Vt=["error","failed","exception","fatal","panic","abort","crash"];function Ze(e){let t=e.split(`
2
- `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(Vt.some(h=>l.includes(h))){let h=Math.max(0,n-10,o+1),d=Math.min(t.length-1,n+20),c=[];for(let m=h;m<=d;m++)c.push(t[m]);r.push(c.join(`
1
+ import{createRequire as Xr}from"module";import{createTracerProvider as Ht}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as Je}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Kt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as qt}from"@netlify/otel";import{propagation as Xe,context as ze,W3CTraceContextPropagator as Wt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Vt}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Se=_("tracing"),Ze=async(e,t,r)=>(await Ht({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new Je(new Ae),new Je(new Vt({url:r.exporterUrl}))],instrumentations:[new Kt({skipHeaders:!0})]}),r.traceparent?(Xe.setGlobalPropagator(new Wt),Xe.extract(ze.active(),{traceparent:r.traceparent,isRemote:!0})):ze.active());function A(e,t,r){return Se.log(`\u23F3 TRACE: ${t} starting...`),qt(e,t,r)}var Ae=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Se.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Se.log(` \u274C Error: ${t.status.message}`)}};var Jt=["error","failed","exception","fatal","panic","abort","crash"];function Qe(e){let t=e.split(`
2
+ `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(Jt.some(h=>l.includes(h))){let h=Math.max(0,n-10,o+1),d=Math.min(t.length-1,n+20),c=[];for(let m=h;m<=d;m++)c.push(t[m]);r.push(c.join(`
3
3
  `)),o=d,n=d+1}else n++}if(r.length===0)return e;let s=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
4
4
  ${a}
5
5
  </extracted_error_chunk>`).join(`
6
6
 
7
- `);return s.length>e.length*.8?e:s}import we from"process";import{getTracer as Or}from"@netlify/otel";import ie from"process";var pe=ie.env.NETLIFY_API_URL,fe=ie.env.NETLIFY_API_TOKEN,G=w("api"),ge=()=>ie.env.NETLIFY_LOCAL_MODE==="true",ae=async(e,t={})=>{if(!pe||!fe)throw new Error("No API URL or token");let r=new URL(e,pe),o={...t,headers:{...t.headers,Authorization:`Bearer ${fe}`}};ie.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ie.env.AGENT_RUNNERS_DEBUG==="true")G.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{G.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");G.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||G.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},Qe=e=>{G.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(pe=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(fe=e.constants.NETLIFY_API_TOKEN)},et=()=>({apiUrl:pe,token:fe}),le=async(e,t)=>ge()?(G.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ae(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),j=async(e,t,r)=>ge()?(G.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ae(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var tt=async(e,t)=>ge()?(G.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ae(`/api/v1/agent_runners/${e}/sessions/${t}`),rt=(e,t,r)=>ae(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),nt=async(e,t)=>ge()?(G.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ae(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ne=async(e,t)=>{G.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var Z=w("ai_gateway"),be=null;var ot=async()=>{if(be)return be;Z.log("Fetching available AI gateway providers");let e=await fetch(`${et().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return be=t,Z.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Jt=async(e,t)=>{let o=(await ot()).providers[e];if(!o)return Z.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return Z.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},st=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),Z.log("Requesting AI gateway information");let i=await rt(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,Z.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{l()},h))}};return await Promise.all([l(),ot()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:Jt}};import B from"process";import me from"path";import Ce from"fs";import{fileURLToPath as tr}from"url";import{execa as rr,execaCommand as Rn}from"execa";import{Transform as Xt}from"stream";var zt=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Zt=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Qt(){return Object.entries(process.env).filter(([e,t])=>!(!t||zt.has(e)||Zt.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function Y(e){if(typeof e!="string")return e;let t=Qt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(er(o),"g");r=r.replace(n,"******")}),r}function er(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var Q=class extends Xt{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=Y(n);o(null,s)}};function it(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?Y(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?Y(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var ue=null,at=e=>(ue&&ue.destroy(),ue=new W({totalAllowedTime:e}),ue),lt=()=>ue;var W=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var nr=tr(import.meta.url),or=me.dirname(nr),he=w("shell"),Pe=new Set,sr={preferLocal:!0},O=(e,t,r)=>{let[o,n]=ir(t,r),s={...sr,...n},a=rr(e,o,s);return ar(a,s),ur(a),a};var ir=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},ar=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(B.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new Q).pipe(B.stdout),e.stdout?.pipe(new Q).pipe(B.stdout),e.stderr?.pipe(new Q).pipe(B.stderr);return}e.stdout?.pipe(B.stdout),e.stderr?.pipe(B.stderr)},ut=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(B.kill(-e.pid,t),he.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return he.error("Error killing process:",r),!1}},lr=e=>ut(e,"SIGKILL"),ur=e=>{Pe.add(e);let t=lt();if(t){let r=t.onTimesUp(()=>{he.log(`Global timer expired, killing process ${e.pid}`),ut(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(he.log(`Force killing process ${e.pid} after timeout`),lr(e))},5e3)});e.on("exit",()=>{Pe.delete(e),r()}),e.on("error",()=>{Pe.delete(e),r()})}};function V(e,t){if(B.env.NODE_PATH){let n=me.join(B.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=me.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=me.join(or,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var ct="netlify-agent-runner-context.md",Oe="task-history",Fe="netlify-context",L=".netlify",ee="results.md",$e="assets";var cr=w("utils"),dr=e=>new Promise(t=>{setTimeout(t,e)}),dt=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(d=>{n.push(d)});r=!0;let i,h=new Promise(d=>{i=d});return s=(async()=>{await Promise.resolve();let d=await e(...l);for(i(d);;){if(await dr(t),!o)return r=!1,s=null,d;let c=o,m=n;o=null,n=[],d=await e(...c),m.forEach(y=>{y(d)})}})(),h};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},ye=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},pt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):cr.error("Could not parse JSON",o))}},ft=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let h=Math.min(i-s.length,e.length);return`${s}${e.slice(0,h)}`}return e.slice(0,i)};import{Buffer as gt}from"buffer";import pr from"path";var mt=async({config:e})=>{let t=await gr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=mr(o);await hr(n);let s=await Le(o);await De(s);let a={stdio:["ignore","pipe","pipe"]},i=(await O("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let d=(await O("git",["diff","--staged","--binary"],a)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await O("git",["commit","-m","Agent runner"]),c=(await O("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await O("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(m=gt.from(g).toString("base64"))}let y={hasChanges:!0,diff:i,resultDiff:c,ignored:s};return i!==d&&(y.diffBinary=gt.from(d).toString("base64")),m&&(y.resultDiffBinary=m),y},De=async(e=[])=>{await O("git",["add",".",...e])},ht=async()=>(await O("git",["status","-s"])).stdout,yt=/.. (.+)?\.log$/,fr=[yt],gr=async()=>{let e=await ht();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
- `).filter(o=>fr.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Et=async()=>{let{stdout:e}=await O("git",["rev-parse","HEAD"]);return e.trim()},_t=async()=>{let{stdout:e}=await O("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Le=async e=>{e||=await ht();let t=[".netlify","node_modules"],r=[];return e.split(`
9
- `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${pr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${s}`)});let n=o.match(yt)?.[1];n&&r.push(`:!${n}.log`)}),r},wt=async()=>{await O("git",["reset","--hard","HEAD"])},mr=e=>{let t=e.split(`
10
- `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,l=a.join(""),i=n.trim(),h=s.trim();return r[l]?r[l].change=h:r[l]={filePath:l,stage:i,change:h},r},{});return Object.values(t)},hr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(O("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Er from"fs/promises";import _r from"os";import xt from"path";import J from"process";import wr from"readline";import ke from"path";import yr from"fs/promises";var Ue=w("agent-output-utils");async function te({initialResult:e,agentName:t,hasError:r}){let o="",n=ke.join(process.cwd(),L,ee);try{let s=await yr.readFile(n,"utf-8");s&&(o=s,Ue.log(`Pulled result from ${ke.relative(process.cwd(),n)}`))}catch{Ue.log(`No results file found at ${ke.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function re({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Ue.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ne(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var k=w("runner_claude"),Tt="Claude Code",Ee="claude-sonnet-4-5-20250929",It=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Tr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Me({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:l,prompt:i,modelVersionOverrides:h}=a,{model:d}=a,c="";if(o){let{token:u,url:E}=o;if(!u||!E)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);d=p}}else if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Ee)?(d=Ee,k.log(`Using default model: ${Ee}`)):k.log(`Default model ${Ee} is not available, proceeding without model specification`);J.env.ANTHROPIC_API_KEY=u,J.env.ANTHROPIC_BASE_URL=E}else if(!J.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let m=[],y=[],x={},T=0,g=0,R,v,N=[V(J.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...d?["--model",d]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],C=`${J.env.NVM_BIN}/node`;k.log(`Running ${C} ${N.join(" ")}`);let b=t.utils.run(C,N,{all:!0,env:J.env});b.stdin?.end();let S=ye(()=>{r?.({steps:m,duration:g})},250),_=(u,E)=>{let p={...u,id:T};T+=1,y.push(p),m.push(p),E||S.flush(),S(),E&&S.flush()},f=wr.createInterface({input:b.all});return f.on("error",u=>{k.error("Readline interface error",{error:u.message,stack:u.stack})}),f.on("line",u=>{let E=null;try{E=JSON.parse(u)}catch{k.log("Could not parse line",u)}E?.session_id&&E.session_id!==c&&(c=E.session_id),Array.isArray(E?.message?.content)?E.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&_({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?_({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):k.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let I=p.input?.description&&`\`${p.input.description}\``;_({title:[It(p.name),I].filter(Boolean).join(" ")})}else p.id&&(x[p.id]=p);S.flush();break}case"tool_result":{let I=p.tool_use_id?x[p.tool_use_id]:void 0,se;if(I){let z=I.input?.file_path&&xt.relative(J.cwd(),I.input.file_path),P=z&&`\`${z}\``;se=[It(I.name||""),P].filter(Boolean).join(" ")}let We=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(I?.name||""),K;if(typeof p.content=="string")K=p.content;else if(Array.isArray(p.content)){let z=[];p.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?z.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?z.push(`![](data:${P.source.media_type};base64,${P.source.data})`):k.log(`Unsupported image type ${P.source.type}`,P.source):k.log(`Unsupported block type ${P?.type}`)}),K=z.join(`
7
+ `);return s.length>e.length*.8?e:s}import we from"process";import{getTracer as Fr}from"@netlify/otel";import ie from"process";var pe=ie.env.NETLIFY_API_URL,fe=ie.env.NETLIFY_API_TOKEN,G=_("api"),ge=()=>ie.env.NETLIFY_LOCAL_MODE==="true",ae=async(e,t={})=>{if(!pe||!fe)throw new Error("No API URL or token");let r=new URL(e,pe),o={...t,headers:{...t.headers,Authorization:`Bearer ${fe}`}};ie.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ie.env.AGENT_RUNNERS_DEBUG==="true")G.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{G.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");G.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||G.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},et=e=>{G.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(pe=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(fe=e.constants.NETLIFY_API_TOKEN)},tt=()=>({apiUrl:pe,token:fe}),le=async(e,t)=>ge()?(G.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ae(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),j=async(e,t,r)=>ge()?(G.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ae(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var rt=async(e,t)=>ge()?(G.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ae(`/api/v1/agent_runners/${e}/sessions/${t}`),nt=(e,t,r)=>ae(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),ot=async(e,t)=>ge()?(G.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ae(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ne=async(e,t)=>{G.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var Z=_("ai_gateway"),be=null;var st=async()=>{if(be)return be;Z.log("Fetching available AI gateway providers");let e=await fetch(`${tt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return be=t,Z.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Xt=async(e,t)=>{let o=(await st()).providers[e];if(!o)return Z.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return Z.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},it=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),Z.log("Requesting AI gateway information");let i=await nt(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,Z.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{l()},h))}};return await Promise.all([l(),st()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:Xt}};import B from"process";import me from"path";import Ce from"fs";import{fileURLToPath as rr}from"url";import{execa as nr,execaCommand as vn}from"execa";import{Transform as zt}from"stream";var Zt=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Qt=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function er(){return Object.entries(process.env).filter(([e,t])=>!(!t||Zt.has(e)||Qt.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function Y(e){if(typeof e!="string")return e;let t=er();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(tr(o),"g");r=r.replace(n,"******")}),r}function tr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var Q=class extends zt{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=Y(n);o(null,s)}};function at(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?Y(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?Y(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var ue=null,lt=e=>(ue&&ue.destroy(),ue=new W({totalAllowedTime:e}),ue),ut=()=>ue;var W=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var or=rr(import.meta.url),sr=me.dirname(or),he=_("shell"),Pe=new Set,ir={preferLocal:!0},O=(e,t,r)=>{let[o,n]=ar(t,r),s={...ir,...n},a=nr(e,o,s);return lr(a,s),cr(a),a};var ar=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},lr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(B.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new Q).pipe(B.stdout),e.stdout?.pipe(new Q).pipe(B.stdout),e.stderr?.pipe(new Q).pipe(B.stderr);return}e.stdout?.pipe(B.stdout),e.stderr?.pipe(B.stderr)},ct=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(B.kill(-e.pid,t),he.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return he.error("Error killing process:",r),!1}},ur=e=>ct(e,"SIGKILL"),cr=e=>{Pe.add(e);let t=ut();if(t){let r=t.onTimesUp(()=>{he.log(`Global timer expired, killing process ${e.pid}`),ct(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(he.log(`Force killing process ${e.pid} after timeout`),ur(e))},5e3)});e.on("exit",()=>{Pe.delete(e),r()}),e.on("error",()=>{Pe.delete(e),r()})}};function V(e,t){if(B.env.NODE_PATH){let n=me.join(B.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=me.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=me.join(sr,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var dt="netlify-agent-runner-context.md",Oe="task-history",Fe="netlify-context",L=".netlify",ee="results.md",$e="assets";var dr=_("utils"),pr=e=>new Promise(t=>{setTimeout(t,e)}),pt=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(d=>{n.push(d)});r=!0;let i,h=new Promise(d=>{i=d});return s=(async()=>{await Promise.resolve();let d=await e(...l);for(i(d);;){if(await pr(t),!o)return r=!1,s=null,d;let c=o,m=n;o=null,n=[],d=await e(...c),m.forEach(y=>{y(d)})}})(),h};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},ye=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},ft=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):dr.error("Could not parse JSON",o))}},gt=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let h=Math.min(i-s.length,e.length);return`${s}${e.slice(0,h)}`}return e.slice(0,i)};import{Buffer as mt}from"buffer";import fr from"path";var ht=_("repo"),yt=async({config:e})=>{ht.info("Getting runner diffs");let t=await mr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=hr(o);await yr(n),ht.info("Changes after processing"),await Le();let s=await ke(o);await De(s);let a={stdio:["ignore","pipe","pipe"]},i=(await O("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let d=(await O("git",["diff","--staged","--binary"],a)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await O("git",["commit","-m","Agent runner"]),c=(await O("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await O("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(m=mt.from(g).toString("base64"))}let y={hasChanges:!0,diff:i,resultDiff:c,ignored:s};return i!==d&&(y.diffBinary=mt.from(d).toString("base64")),m&&(y.resultDiffBinary=m),y},De=async(e=[])=>{await O("git",["add",".",...e])},Le=async()=>(await O("git",["status","-s"])).stdout,Et=/.. (.+)?\.log$/,gr=[Et],mr=async()=>{let e=await Le();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
+ `).filter(o=>gr.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},_t=async()=>{let{stdout:e}=await O("git",["rev-parse","HEAD"]);return e.trim()},wt=async()=>{let{stdout:e}=await O("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},ke=async e=>{e||=await Le();let t=[".netlify","node_modules"],r=[];return e.split(`
9
+ `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${fr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${s}`)});let n=o.match(Et)?.[1];n&&r.push(`:!${n}.log`)}),r},Tt=async()=>{await O("git",["reset","--hard","HEAD"])},hr=e=>{let t=e.split(`
10
+ `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,l=a.join(""),i=n.trim(),h=s.trim();return r[l]?r[l].change=h:r[l]={filePath:l,stage:i,change:h},r},{});return Object.values(t)},yr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(O("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import _r from"fs/promises";import wr from"os";import Rt from"path";import J from"process";import Tr from"readline";import Ue from"path";import Er from"fs/promises";var Me=_("agent-output-utils");async function te({initialResult:e,agentName:t,hasError:r}){let o="",n=Ue.join(process.cwd(),L,ee);try{let s=await Er.readFile(n,"utf-8");s&&(o=s,Me.log(`Pulled result from ${Ue.relative(process.cwd(),n)}`))}catch{Me.log(`No results file found at ${Ue.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function re({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Me.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ne(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var k=_("runner_claude"),It="Claude Code",Ee="claude-sonnet-4-5-20250929",xt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Ir=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ge({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:l,prompt:i,modelVersionOverrides:h}=a,{model:d}=a,c="";if(o){let{token:u,url:E}=o;if(!u||!E)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);d=p}}else if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Ee)?(d=Ee,k.log(`Using default model: ${Ee}`)):k.log(`Default model ${Ee} is not available, proceeding without model specification`);J.env.ANTHROPIC_API_KEY=u,J.env.ANTHROPIC_BASE_URL=E}else if(!J.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let m=[],y=[],x={},T=0,g=0,R,v,N=[V(J.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...d?["--model",d]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],C=`${J.env.NVM_BIN}/node`;k.log(`Running ${C} ${N.join(" ")}`);let b=t.utils.run(C,N,{all:!0,env:J.env});b.stdin?.end();let S=ye(()=>{r?.({steps:m,duration:g})},250),w=(u,E)=>{let p={...u,id:T};T+=1,y.push(p),m.push(p),E||S.flush(),S(),E&&S.flush()},f=Tr.createInterface({input:b.all});return f.on("error",u=>{k.error("Readline interface error",{error:u.message,stack:u.stack})}),f.on("line",u=>{let E=null;try{E=JSON.parse(u)}catch{k.log("Could not parse line",u)}E?.session_id&&E.session_id!==c&&(c=E.session_id),Array.isArray(E?.message?.content)?E.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&w({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?w({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):k.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let I=p.input?.description&&`\`${p.input.description}\``;w({title:[xt(p.name),I].filter(Boolean).join(" ")})}else p.id&&(x[p.id]=p);S.flush();break}case"tool_result":{let I=p.tool_use_id?x[p.tool_use_id]:void 0,se;if(I){let z=I.input?.file_path&&Rt.relative(J.cwd(),I.input.file_path),P=z&&`\`${z}\``;se=[xt(I.name||""),P].filter(Boolean).join(" ")}let Ve=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(I?.name||""),K;if(typeof p.content=="string")K=p.content;else if(Array.isArray(p.content)){let z=[];p.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?z.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?z.push(`![](data:${P.source.media_type};base64,${P.source.data})`):k.log(`Unsupported image type ${P.source.type}`,P.source):k.log(`Unsupported block type ${P?.type}`)}),K=z.join(`
11
11
 
12
- `)}We&&K&&(K=`\`\`\`
12
+ `)}Ve&&K&&(K=`\`\`\`
13
13
  ${K.trim()}
14
- \`\`\``),_({title:se,message:K},!0);break}case"thinking":{p.thinking&&_({title:"Thinking",message:p.thinking},!0);break}default:k.log(`Message content type is not supported ${p.type}`,p)}}):E?.type==="result"&&(g=E.duration_ms||0,E.is_error?v=E.result:R=E.result,[y,m].forEach(p=>{p[p.length-1]?.message===R&&p.pop()}))}),await b.catch(u=>{({error:v,result:R}=Tr({catchError:u,runCmd:b,error:v,result:R,runnerName:"Claude"}))}),f.close(),S.flush(),{steps:y,duration:g,result:await te({initialResult:R,agentName:Tt,hasError:!!v}),error:re({error:v,agentName:Tt}),isRetryableError:ne(v),agentSessionId:c}}var Rt=async()=>{let e=xt.join(_r.homedir(),".claude");await Er.rm(e,{recursive:!0,force:!0})};import _e from"fs/promises";import St from"os";import Ge from"path";import H from"process";import Ir from"readline";var U=w("runner_codex"),vt="Codex CLI",xr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function je({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:f,url:u}=n;if(!f||!u)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);i=E}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);H.env.OPENAI_API_KEY=f,H.env.OPENAI_BASE_URL=u}else if(!H.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],d=[],c={},m=0,y=0,x,T,g=`${H.env.NVM_BIN}/node`,R=Ge.join(St.homedir(),".codex"),v=Ge.join(R,"config.toml");try{await _e.mkdir(R,{recursive:!0});let f="";try{f=await _e.readFile(v,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
14
+ \`\`\``),w({title:se,message:K},!0);break}case"thinking":{p.thinking&&w({title:"Thinking",message:p.thinking},!0);break}default:k.log(`Message content type is not supported ${p.type}`,p)}}):E?.type==="result"&&(g=E.duration_ms||0,E.is_error?v=E.result:R=E.result,[y,m].forEach(p=>{p[p.length-1]?.message===R&&p.pop()}))}),await b.catch(u=>{({error:v,result:R}=Ir({catchError:u,runCmd:b,error:v,result:R,runnerName:"Claude"}))}),f.close(),S.flush(),{steps:y,duration:g,result:await te({initialResult:R,agentName:It,hasError:!!v}),error:re({error:v,agentName:It}),isRetryableError:ne(v),agentSessionId:c}}var vt=async()=>{let e=Rt.join(wr.homedir(),".claude");await _r.rm(e,{recursive:!0,force:!0})};import _e from"fs/promises";import At from"os";import je from"path";import H from"process";import xr from"readline";var U=_("runner_codex"),St="Codex CLI",Rr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ye({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:f,url:u}=n;if(!f||!u)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);i=E}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);H.env.OPENAI_API_KEY=f,H.env.OPENAI_BASE_URL=u}else if(!H.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],d=[],c={},m=0,y=0,x,T,g=`${H.env.NVM_BIN}/node`,R=je.join(At.homedir(),".codex"),v=je.join(R,"config.toml");try{await _e.mkdir(R,{recursive:!0});let f="";try{f=await _e.readFile(v,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
15
15
  web_search = true`):f+=`
16
16
  [tools]
17
17
  web_search = true
18
- `,await _e.writeFile(v,f,"utf-8"),U.log("Updated Codex config with web_search enabled"))}catch(f){U.warn("Failed to update Codex config",{error:f.message})}let N=[V(H.cwd(),"codex"),"login","--with-api-key"];U.log(`Running ${g} ${N.join(" ")}`);let C=t.utils.run(g,N,{input:H.env.OPENAI_API_KEY,env:{...H.env}});try{await C,U.log("Successfully logged in to Codex")}catch(f){throw U.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let b=[V(H.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...i?["--model",i]:[],a].filter(Boolean);U.log(`Running ${g} ${b.join(" ")}`);let S=t.utils.run(g,b,{all:!0,env:{...H.env}}),_=Ir.createInterface({input:S.all});return _.on("error",f=>{U.error("Readline interface error",{error:f.message,stack:f.stack})}),_.on("line",f=>{let u=null;try{u=JSON.parse(f)}catch{U.log("Could not parse line",f);return}let E=[],p=!1;if(u?.duration_ms&&(y=u.duration_ms,p=!0),u?.type==="local_shell_call")c[u.call_id]=u;else if(u?.type==="local_shell_call_output"){let I=vr(c[u.call_id],u);I&&(I.id=m,m+=1,d.push(I),h.push(I),E.push(I),p=!0)}else u?.type==="message"&&u.role==="assistant"?x=u.content.map(I=>I.text).join(`
18
+ `,await _e.writeFile(v,f,"utf-8"),U.log("Updated Codex config with web_search enabled"))}catch(f){U.warn("Failed to update Codex config",{error:f.message})}let N=[V(H.cwd(),"codex"),"login","--with-api-key"];U.log(`Running ${g} ${N.join(" ")}`);let C=t.utils.run(g,N,{input:H.env.OPENAI_API_KEY,env:{...H.env}});try{await C,U.log("Successfully logged in to Codex")}catch(f){throw U.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let b=[V(H.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...i?["--model",i]:[],a].filter(Boolean);U.log(`Running ${g} ${b.join(" ")}`);let S=t.utils.run(g,b,{all:!0,env:{...H.env}}),w=xr.createInterface({input:S.all});return w.on("error",f=>{U.error("Readline interface error",{error:f.message,stack:f.stack})}),w.on("line",f=>{let u=null;try{u=JSON.parse(f)}catch{U.log("Could not parse line",f);return}let E=[],p=!1;if(u?.duration_ms&&(y=u.duration_ms,p=!0),u?.type==="local_shell_call")c[u.call_id]=u;else if(u?.type==="local_shell_call_output"){let I=Sr(c[u.call_id],u);I&&(I.id=m,m+=1,d.push(I),h.push(I),E.push(I),p=!0)}else u?.type==="message"&&u.role==="assistant"?x=u.content.map(I=>I.text).join(`
19
19
  `):u?.type==="message"&&u.role==="system"&&(T=u.content.map(I=>I.text).join(`
20
- `));p&&(r?.({steps:h,duration:y}),o?.({steps:E,duration:y}))}),await S.catch(f=>{let u=xr({catchError:f,runCmd:S,error:T,result:x,runnerName:"Codex"});T=u.error,x=u.result}),_.close(),{steps:d,duration:y,result:await te({initialResult:x,agentName:vt,hasError:!!T}),error:re({error:T,agentName:vt}),isRetryableError:ne(T)}}var At=async()=>{let e=Ge.join(St.homedir(),".codex");await _e.rm(e,{recursive:!0,force:!0})},Rr=new Set(["bash","-lc"]),vr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Rr.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
20
+ `));p&&(r?.({steps:h,duration:y}),o?.({steps:E,duration:y}))}),await S.catch(f=>{let u=Rr({catchError:f,runCmd:S,error:T,result:x,runnerName:"Codex"});T=u.error,x=u.result}),w.close(),{steps:d,duration:y,result:await te({initialResult:x,agentName:St,hasError:!!T}),error:re({error:T,agentName:St}),isRetryableError:ne(T)}}var Nt=async()=>{let e=je.join(At.homedir(),".codex");await _e.rm(e,{recursive:!0,force:!0})},vr=new Set(["bash","-lc"]),Sr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!vr.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
21
21
  ${n.trim()}
22
- \`\`\``)}catch(s){U.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Sr from"fs/promises";import Ar from"os";import bt from"path";import X from"process";import Nr from"readline";var oe=w("runner_gemini"),Nt="Gemini CLI",br=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(oe.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(oe.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(oe.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Cr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Ye({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:_,url:f}=n;if(!_||!f)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let u=l?.gemini?.[s];if(u){if(!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model override '${u}' is not available for gemini provider`);i=u}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);X.env.GEMINI_API_KEY=_,X.env.GOOGLE_GEMINI_BASE_URL=f}else if(!X.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],d=[],c=[],m={},y=0,x=0,T,g,R=[V(X.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","-p",a],v=`${X.env.NVM_BIN}/node`;oe.log(`Running ${v} ${R.join(" ")}`);let N=t.utils.run(v,R,{all:!0,env:X.env});N.stdin?.end();let C=ye(()=>{r?.({steps:h,duration:x}),o?.({steps:d,duration:x}),d=[]},250),b=(_,f)=>{_.id=y,y+=1,c.push(_),h.push(_),d.push(_),f||C.flush(),C(),f&&C.flush()},S=Nr.createInterface({input:N.all});return S.on("error",_=>{oe.error("Readline interface error",{error:_.message,stack:_.stack})}),S.on("line",_=>{let f=null;try{if(_.startsWith("[API Error")){let u=_.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:pt(u,!1)?.error?.message||u||"Gemini encountered error"}}else f=JSON.parse(_)}catch{return}if(f)switch(f.type){case"thought":{let u=f.value;b({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{f.value&&b({message:f.value});break}case"tool_call_request":{let u=f.value,E=Cr[u.name]??u.name,p=u.args?.path||u.args?.absolute_path,I=p&&bt.relative(X.cwd(),p),se=u.args?.command,K={title:[E,I&&`\`${I}\``,se&&`\`${se}\``].filter(Boolean).join(" ")};m[u.callId]=K,C.flush();break}case"tool_result":{let u=f.value,E=m[u.callId];if(E){let p=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(I=>typeof I=="string"&&I);p&&(E.message=`\`\`\`
22
+ \`\`\``)}catch(s){U.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Ar from"fs/promises";import Nr from"os";import Ct from"path";import X from"process";import br from"readline";var oe=_("runner_gemini"),bt="Gemini CLI",Cr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(oe.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(oe.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(oe.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Pr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Be({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:w,url:f}=n;if(!w||!f)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let u=l?.gemini?.[s];if(u){if(!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model override '${u}' is not available for gemini provider`);i=u}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);X.env.GEMINI_API_KEY=w,X.env.GOOGLE_GEMINI_BASE_URL=f}else if(!X.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],d=[],c=[],m={},y=0,x=0,T,g,R=[V(X.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","-p",a],v=`${X.env.NVM_BIN}/node`;oe.log(`Running ${v} ${R.join(" ")}`);let N=t.utils.run(v,R,{all:!0,env:X.env});N.stdin?.end();let C=ye(()=>{r?.({steps:h,duration:x}),o?.({steps:d,duration:x}),d=[]},250),b=(w,f)=>{w.id=y,y+=1,c.push(w),h.push(w),d.push(w),f||C.flush(),C(),f&&C.flush()},S=br.createInterface({input:N.all});return S.on("error",w=>{oe.error("Readline interface error",{error:w.message,stack:w.stack})}),S.on("line",w=>{let f=null;try{if(w.startsWith("[API Error")){let u=w.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:ft(u,!1)?.error?.message||u||"Gemini encountered error"}}else f=JSON.parse(w)}catch{return}if(f)switch(f.type){case"thought":{let u=f.value;b({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{f.value&&b({message:f.value});break}case"tool_call_request":{let u=f.value,E=Pr[u.name]??u.name,p=u.args?.path||u.args?.absolute_path,I=p&&Ct.relative(X.cwd(),p),se=u.args?.command,K={title:[E,I&&`\`${I}\``,se&&`\`${se}\``].filter(Boolean).join(" ")};m[u.callId]=K,C.flush();break}case"tool_result":{let u=f.value,E=m[u.callId];if(E){let p=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(I=>typeof I=="string"&&I);p&&(E.message=`\`\`\`
23
23
  ${p.trim()}
24
- \`\`\``),b(E,!0)}break}case"result":{x=f.duration_ms,T=f.value,[c,h,d].forEach(u=>{u[u.length-1]?.message===T&&u.pop()});break}case"error":{g=f.value;break}case"finished":break;default:{oe.warn("Unhandled message type:",f.type);break}}}),await N.catch(_=>{({error:g,result:T}=br({catchError:_,runCmd:N,error:g,result:T,runnerName:"Gemini"}))}),S.close(),C.flush(),{steps:c,duration:x,result:await te({initialResult:T,agentName:Nt,hasError:!!g}),error:re({error:g,agentName:Nt}),isRetryableError:ne(g)}}var Ct=async()=>{let e=bt.join(Ar.homedir(),".gemini");await Sr.rm(e,{recursive:!0,force:!0})};var Pr={codex:{runner:je,clean:At},claude:{runner:Me,clean:Rt},gemini:{runner:Ye,clean:Ct}},Pt=Pr;var Ot=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Or(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Pt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Fr({apiToken:r});Qe(l);let i=e.useGateway?await st({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=dt(({steps:y=[],duration:x})=>{let T=y.map(g=>({...g,title:g.title?Y(g.title):void 0,message:g.message?Y(g.message):void 0}));return y.length=0,j(e.id,e.sessionId,{steps:T,duration:x})},t),d=await Le();await De(d);let c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await Et(),await le(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await _t(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let m=performance.now()-s;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":m,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:h,runner:a,sha:c}}),Fr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:we.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||we.env.NETLIFY_API_TOKEN,SITE_ID:we.env.SITE_ID,FUNCTIONS_DIST:we.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:O}});import{getTracer as Be}from"@netlify/otel";import $r from"crypto";import q from"fs/promises";import $ from"path";import M from"process";var F=w("context"),Dr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:M.env.NETLIFY_TEAM_ID,userId:M.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:M.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Lr=10,kr=async e=>{let{name:t,ext:r}=$.parse(e),o=e,n=$.join(M.cwd(),L,o),s=0;for(;await Ur(n);){if(s>=Lr)throw new Error("Failed to generate context file");o=`${t}-${$r.randomUUID().slice(0,5)}${r}`,n=$.join(M.cwd(),L,o),s+=1}return o},Ur=async e=>{try{return await q.access(e),!0}catch{return!1}},Mr=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},Gr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await q.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},Te=null,jr=async()=>{if(Te)return Te;let e=await Mr();if(!e)return[];let t=$.join(M.cwd(),L,Fe);await q.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=$.join(t,a),i=$.join(L,Fe,a);return F.log(`Downloading ${s.scope} context...`),await Gr(s.endpoint,l)?(F.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return Te=(await Promise.all(r)).filter(n=>n!==null),Te},Ft=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Dr(t),s=await kr(ct),a=$.join(M.cwd(),L);await q.mkdir(a,{recursive:!0});let l=$.join(L,s),i=$.join(M.cwd(),l),h=$.join(M.cwd(),L,ee);try{await q.unlink(h),F.log(`Deleted old results file: ${h}`)}catch{}let d=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
24
+ \`\`\``),b(E,!0)}break}case"result":{x=f.duration_ms,T=f.value,[c,h,d].forEach(u=>{u[u.length-1]?.message===T&&u.pop()});break}case"error":{g=f.value;break}case"finished":break;default:{oe.warn("Unhandled message type:",f.type);break}}}),await N.catch(w=>{({error:g,result:T}=Cr({catchError:w,runCmd:N,error:g,result:T,runnerName:"Gemini"}))}),S.close(),C.flush(),{steps:c,duration:x,result:await te({initialResult:T,agentName:bt,hasError:!!g}),error:re({error:g,agentName:bt}),isRetryableError:ne(g)}}var Pt=async()=>{let e=Ct.join(Nr.homedir(),".gemini");await Ar.rm(e,{recursive:!0,force:!0})};var Or={codex:{runner:Ye,clean:Nt},claude:{runner:Ge,clean:vt},gemini:{runner:Be,clean:Pt}},Ot=Or;var Ft=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Fr(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Ot[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=$r({apiToken:r});et(l);let i=e.useGateway?await it({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=pt(({steps:y=[],duration:x})=>{let T=y.map(g=>({...g,title:g.title?Y(g.title):void 0,message:g.message?Y(g.message):void 0}));return y.length=0,j(e.id,e.sessionId,{steps:T,duration:x})},t),d=await ke();await De(d);let c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await _t(),await le(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await wt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let m=performance.now()-s;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":m,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:h,runner:a,sha:c}}),$r=({apiToken:e})=>({constants:{NETLIFY_API_HOST:we.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||we.env.NETLIFY_API_TOKEN,SITE_ID:we.env.SITE_ID,FUNCTIONS_DIST:we.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:O}});import{getTracer as He}from"@netlify/otel";import Dr from"crypto";import q from"fs/promises";import $ from"path";import M from"process";var F=_("context"),Lr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:M.env.NETLIFY_TEAM_ID,userId:M.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:M.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},kr=10,Ur=async e=>{let{name:t,ext:r}=$.parse(e),o=e,n=$.join(M.cwd(),L,o),s=0;for(;await Mr(n);){if(s>=kr)throw new Error("Failed to generate context file");o=`${t}-${Dr.randomUUID().slice(0,5)}${r}`,n=$.join(M.cwd(),L,o),s+=1}return o},Mr=async e=>{try{return await q.access(e),!0}catch{return!1}},Gr=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},jr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await q.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},Te=null,Yr=async()=>{if(Te)return Te;let e=await Gr();if(!e)return[];let t=$.join(M.cwd(),L,Fe);await q.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=$.join(t,a),i=$.join(L,Fe,a);return F.log(`Downloading ${s.scope} context...`),await jr(s.endpoint,l)?(F.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return Te=(await Promise.all(r)).filter(n=>n!==null),Te},$t=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Lr(t),s=await Ur(dt),a=$.join(M.cwd(),L);await q.mkdir(a,{recursive:!0});let l=$.join(L,s),i=$.join(M.cwd(),l),h=$.join(M.cwd(),L,ee);try{await q.unlink(h),F.log(`Deleted old results file: ${h}`)}catch{}let d=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
25
25
  Your task is to analyze and fix the build errors.
26
26
  Don't apply techniques of reverting changes. Apply fixes related to errors.
27
27
  Don't try to run build by yourself. Just fix the errors.
@@ -34,7 +34,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
34
34
 
35
35
  `)}
36
36
  </project_rules>
37
- `);let m="";if(r.sessionHistoryContext?.length){let g=$.join(M.cwd(),L,Oe);await q.mkdir(g,{recursive:!0});let R=await Promise.all(r.sessionHistoryContext.map(async(v,N)=>{let C=N+1,b=`attempt-${C}.md`,S=$.join(g,b),_=$.join(L,Oe,b),f=`# Task History - Attempt ${C}
37
+ `);let m="";if(r.sessionHistoryContext?.length){let g=$.join(M.cwd(),L,Oe);await q.mkdir(g,{recursive:!0});let R=await Promise.all(r.sessionHistoryContext.map(async(v,N)=>{let C=N+1,b=`attempt-${C}.md`,S=$.join(g,b),w=$.join(L,Oe,b),f=`# Task History - Attempt ${C}
38
38
 
39
39
  ## Request - what the user asked for
40
40
  ${v.request}
@@ -44,7 +44,7 @@ ${v.request}
44
44
  ## Response - what the agent replied with after its work
45
45
 
46
46
  ${v.response}
47
- `;return await q.writeFile(S,f,"utf-8"),F.log(`Created history file: ${_}`),_}));m+=`
47
+ `;return await q.writeFile(S,f,"utf-8"),F.log(`Created history file: ${w}`),w}));m+=`
48
48
  <session_history_context>
49
49
  History of prior work on this task.
50
50
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
@@ -53,7 +53,7 @@ ${v.response}
53
53
  `)}
54
54
 
55
55
  </session_history_context>
56
- `}let y=await jr(),x="";y.length>0&&(x=`
56
+ `}let y=await Yr(),x="";y.length>0&&(x=`
57
57
  <netlify_features_context>
58
58
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
59
59
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
@@ -122,15 +122,15 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
122
122
  </request>
123
123
 
124
124
  Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
125
- `),T};var Yr=w("prompt"),$t=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Ft({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Yr.log("Contextful Prompt:",n),{prompt:n}};var Ie=w("inference_stage"),Dt=5,xe=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i,contextPrefix:h,priorAgentSessionId:d}=e;Ie.log(`Running inference stage, attempt ${i} of ${Dt}`);let c=await A(Be(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":i||1}),it();let{prompt:y}=await A(Be(),"compose-prompt",async()=>await $t({cliPath:t,config:r,buildErrorContext:Br(n),netlify:o})),x=`
125
+ `),T};var Br=_("prompt"),Dt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await $t({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Br.log("Contextful Prompt:",n),{prompt:n}};var Ie=_("inference_stage"),Lt=5,xe=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i,contextPrefix:h,priorAgentSessionId:d}=e;Ie.log(`Running inference stage, attempt ${i} of ${Lt}`);let c=await A(He(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":i||1}),at();let{prompt:y}=await A(He(),"compose-prompt",async()=>await Dt({cliPath:t,config:r,buildErrorContext:Hr(n),netlify:o})),x=`
126
126
  ${h||""}
127
127
  ${y}
128
- `.trim(),T={...r,prompt:x},g=await A(Be(),`run-${r.runner}`,async()=>await s({aiGateway:l,config:T,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:d}));return g.result&&(g.result=Y(g.result)),g.error&&(g.error=Y(g.error)),await a.flush(),g});if(c.error){if(Ie.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:i||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!i||i<Dt))return Ie.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await xe({...e,attempt:(i||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ie.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},Br=e=>!e||e.length===0?"":`
128
+ `.trim(),T={...r,prompt:x},g=await A(He(),`run-${r.runner}`,async()=>await s({aiGateway:l,config:T,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:d}));return g.result&&(g.result=Y(g.result)),g.error&&(g.error=Y(g.error)),await a.flush(),g});if(c.error){if(Ie.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:i||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!i||i<Lt))return Ie.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await xe({...e,attempt:(i||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ie.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},Hr=e=>!e||e.length===0?"":`
129
129
  Deploy failed failed. Here are the errors to review on the latest build:
130
130
 
131
131
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
132
132
 
133
133
  ${e.pop()}
134
- `;import qr from"process";import{getTracer as He}from"@netlify/otel";import{getTracer as Hr}from"@netlify/otel";var ce=w("deploy"),Lt=async e=>await A(Hr(),"create-preview-deploy",async t=>Kr(e,t)),Kr=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},l)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(ce.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(ce.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let h=s||"netlify";ce.log(`Running: ${h} ${i.join(" ")}`),l?.setAttributes({cmd:h,args:i});let d=await e.utils.run(h,i,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(d.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),ce.log(`
135
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(i){throw ce.error("Failed to create preview deploy via CLI:",i),l?.setAttributes({success:!1,error:i.message}),i}};var de=w("deploy_stage"),Ke=async e=>await A(He(),"run-deploy-stage",async()=>Wr(e)),Wr=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await A(He(),"get-runner-diffs",async()=>await mt({config:t,netlify:r}));if(de.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:h}=s,d=!0;de.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:d,wouldCreatePreview:o!==void 0&&d});let c=null;if(o!==void 0&&d)try{let m;try{let y=await A(He(),"get-runner-session",async()=>await tt(t.id,t.sessionId));y?.title&&(m=y.title)}catch(y){de.warn("Failed to fetch session title, using fallback message:",y.message)}await j(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await Lt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:ft(t.id,qr.env.SITE_NAME),filter:n})}catch(m){return de.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:d,previewInfo:null,diffBinary:i,resultDiffBinary:h,deployError:m instanceof Error?m.message:String(m)}}return de.log("Git status",{hasDiff:!!a,hasChanges:d}),{diff:a,resultDiff:l,hasChanges:d,previewInfo:c,diffBinary:i,resultDiffBinary:h}};import{getTracer as Re}from"@netlify/otel";async function kt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var D=w("cleanup_stage"),Ut=async e=>await A(Re(),"cleanup-stage",async()=>Vr(e)),qe=1024*1024*10,Vr=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result:r||"Done",duration:o};if(l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename),t||s||n||a)try{D.log("Getting pre-signed URLs for diff upload");let d=await nt(e.id,e.sessionId),c=[];(t||s)&&c.push(Ne(d.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=d.result.s3_key,D.log("Successfully uploaded result_diff to S3")})),(n||a)&&c.push(Ne(d.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=d.cumulative.s3_key,D.log("Successfully uploaded cumulative_diff to S3")})),D.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||a)&&(D.log("Updating agent runner with cumulative diff S3 key"),await A(Re(),"update-runner",async()=>{await le(e.id,{result_diff_s3_key:d.cumulative.s3_key})}))}catch(d){D.error("S3 upload failed, falling back to inline diffs:",d);let c=Buffer.byteLength(t||s||""),m=Buffer.byteLength(a||n||"");if(c>qe||m>qe){let y=`Diffs exceed maximum inline size of ${qe} bytes.`;throw D.error(y),new Error(y)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,D.log("Updating agent runner with inline diffs (fallback)"),await A(Re(),"update-runner",async()=>{await le(e.id,{result_diff:n,result_diff_binary:a})}))}else D.log("No diffs to upload");return D.log("Updated agent runner with result"),await kt(async()=>await A(Re(),"update-runner-session",()=>j(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(d,c)=>{D.error(`Error updating agent runner session (attempt ${d}):`,c),D.log("Retrying...")}}),D.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Mt,shutdownTracers as Xr,withActiveSpan as Gt}from"@netlify/otel";var zr=Jr(import.meta.url),jt=zr("../package.json"),Yt=w("pipeline_index"),ve=3,Ns=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let l,{withStageTimer:i}=at(W.timeUnits.hours(4)),h=await ze(jt.version,e.id,a);try{await Gt(Mt(),"run-pipeline",{},h,async()=>{let d,{aiGateway:c,context:m,persistSteps:y,runner:x,sha:T}=await i("init",()=>Ot({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:jt.version}),W.timeUnits.minutes(10));l=x.clean,e.sha=T;let{runnerResult:g}=await i("inference",()=>xe({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c}));await j(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let R=await i("deploy",()=>Ke({cliPath:r,config:e,context:m,result:g.result,filter:s})),v=g,N=[];if(R.hasChanges&&R.deployError){N.push(Ze(R.deployError));let u=1;for(;u<=ve&&!R.previewInfo;)Yt.log(`Deploy attempt had errors. Retrying. ${u}/${ve}`),await Gt(Mt(),"deploy-stage",async E=>{E?.setAttributes({"stage.attempt":u});let{runnerResult:p}=await i(`inference-retry-${u}`,()=>xe({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c,buildErrors:N,priorAgentSessionId:g.agentSessionId}));v={...p,steps:[...v.steps||[],...p.steps||[]],duration:(v.duration||0)+(p.duration||0)},R=await i(`deploy-retry-${u}`,()=>Ke({cliPath:r,config:e,context:m,result:p.result,filter:s})),R.deployError&&N.push(R.deployError),u++});u>ve&&!R.previewInfo&&(d=new Error(`Deploy validation failed after ${ve} attempts`))}let{diff:C,resultDiff:b,previewInfo:S,diffBinary:_,resultDiffBinary:f}=R;if(await i("cleanup",()=>Ut({config:e,diff:C,result:v.result,duration:v.duration,resultDiff:b,diffBinary:_,resultDiffBinary:f,previewInfo:S}),W.timeUnits.minutes(10)),d)throw d;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await wt())})}catch(d){Yt.error("Got error while running pipeline",d),await l?.();let c=d instanceof Error&&d.message;throw await j(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),d}finally{await Xr()}};export{Ns as runPipeline};
134
+ `;import Wr from"process";import{getTracer as Ke}from"@netlify/otel";import{getTracer as Kr}from"@netlify/otel";var ce=_("deploy"),kt=async e=>await A(Kr(),"create-preview-deploy",async t=>qr(e,t)),qr=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},l)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(ce.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(ce.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let h=s||"netlify";ce.log(`Running: ${h} ${i.join(" ")}`),l?.setAttributes({cmd:h,args:i});let d=await e.utils.run(h,i,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(d.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),ce.log(`
135
+ Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(i){throw ce.error("Failed to create preview deploy via CLI:",i),l?.setAttributes({success:!1,error:i.message}),i}};var de=_("deploy_stage"),qe=async e=>await A(Ke(),"run-deploy-stage",async()=>Vr(e)),Vr=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await A(Ke(),"get-runner-diffs",async()=>await yt({config:t,netlify:r}));if(de.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:h}=s,d=!0;de.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:d,wouldCreatePreview:o!==void 0&&d});let c=null;if(o!==void 0&&d)try{let m;try{let y=await A(Ke(),"get-runner-session",async()=>await rt(t.id,t.sessionId));y?.title&&(m=y.title)}catch(y){de.warn("Failed to fetch session title, using fallback message:",y.message)}await j(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await kt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:gt(t.id,Wr.env.SITE_NAME),filter:n})}catch(m){return de.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:d,previewInfo:null,diffBinary:i,resultDiffBinary:h,deployError:m instanceof Error?m.message:String(m)}}return de.log("Git status",{hasDiff:!!a,hasChanges:d}),{diff:a,resultDiff:l,hasChanges:d,previewInfo:c,diffBinary:i,resultDiffBinary:h}};import{getTracer as Re}from"@netlify/otel";async function Ut(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var D=_("cleanup_stage"),Mt=async e=>await A(Re(),"cleanup-stage",async()=>Jr(e)),We=1024*1024*10,Jr=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result:r||"Done",duration:o};if(l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename),t||s||n||a)try{D.log("Getting pre-signed URLs for diff upload");let d=await ot(e.id,e.sessionId),c=[];(t||s)&&c.push(Ne(d.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=d.result.s3_key,D.log("Successfully uploaded result_diff to S3")})),(n||a)&&c.push(Ne(d.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=d.cumulative.s3_key,D.log("Successfully uploaded cumulative_diff to S3")})),D.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||a)&&(D.log("Updating agent runner with cumulative diff S3 key"),await A(Re(),"update-runner",async()=>{await le(e.id,{result_diff_s3_key:d.cumulative.s3_key})}))}catch(d){D.error("S3 upload failed, falling back to inline diffs:",d);let c=Buffer.byteLength(t||s||""),m=Buffer.byteLength(a||n||"");if(c>We||m>We){let y=`Diffs exceed maximum inline size of ${We} bytes.`;throw D.error(y),new Error(y)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,D.log("Updating agent runner with inline diffs (fallback)"),await A(Re(),"update-runner",async()=>{await le(e.id,{result_diff:n,result_diff_binary:a})}))}else D.log("No diffs to upload");return D.log("Updated agent runner with result"),await Ut(async()=>await A(Re(),"update-runner-session",()=>j(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(d,c)=>{D.error(`Error updating agent runner session (attempt ${d}):`,c),D.log("Retrying...")}}),D.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Gt,shutdownTracers as zr,withActiveSpan as jt}from"@netlify/otel";var Zr=Xr(import.meta.url),Yt=Zr("../package.json"),Bt=_("pipeline_index"),ve=3,Cs=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let l,{withStageTimer:i}=lt(W.timeUnits.hours(4)),h=await Ze(Yt.version,e.id,a);try{await jt(Gt(),"run-pipeline",{},h,async()=>{let d,{aiGateway:c,context:m,persistSteps:y,runner:x,sha:T}=await i("init",()=>Ft({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:Yt.version}),W.timeUnits.minutes(10));l=x.clean,e.sha=T;let{runnerResult:g}=await i("inference",()=>xe({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c}));await j(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let R=await i("deploy",()=>qe({cliPath:r,config:e,context:m,result:g.result,filter:s})),v=g,N=[];if(R.hasChanges&&R.deployError){N.push(Qe(R.deployError));let u=1;for(;u<=ve&&!R.previewInfo;)Bt.log(`Deploy attempt had errors. Retrying. ${u}/${ve}`),await jt(Gt(),"deploy-stage",async E=>{E?.setAttributes({"stage.attempt":u});let{runnerResult:p}=await i(`inference-retry-${u}`,()=>xe({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c,buildErrors:N,priorAgentSessionId:g.agentSessionId}));v={...p,steps:[...v.steps||[],...p.steps||[]],duration:(v.duration||0)+(p.duration||0)},R=await i(`deploy-retry-${u}`,()=>qe({cliPath:r,config:e,context:m,result:p.result,filter:s})),R.deployError&&N.push(R.deployError),u++});u>ve&&!R.previewInfo&&(d=new Error(`Deploy validation failed after ${ve} attempts`))}let{diff:C,resultDiff:b,previewInfo:S,diffBinary:w,resultDiffBinary:f}=R;if(await i("cleanup",()=>Mt({config:e,diff:C,result:v.result,duration:v.duration,resultDiff:b,diffBinary:w,resultDiffBinary:f,previewInfo:S}),W.timeUnits.minutes(10)),d)throw d;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await Tt())})}catch(d){Bt.error("Got error while running pipeline",d),await l?.();let c=d instanceof Error&&d.message;throw await j(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),d}finally{await zr()}};export{Cs as runPipeline};
136
136
  //# sourceMappingURL=index.js.map
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@netlify/agent-runner-cli",
3
3
  "type": "module",
4
- "version": "1.48.1-alpha",
4
+ "version": "1.48.2-alpha",
5
5
  "description": "CLI tool for running Netlify agents",
6
6
  "main": "./dist/index.js",
7
7
  "types": "./dist/index.d.ts",