@netlify/agent-runner-cli 1.46.0 → 1.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin-local.js CHANGED
@@ -1,36 +1,40 @@
1
1
  #!/usr/bin/env node
2
- import C from"process";import Mt from"path";import Gt from"fs";import Zr from"minimist";import{createRequire as Jr}from"module";import{createTracerProvider as jt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as We}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Yt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Bt}from"@netlify/otel";import{propagation as Je,context as Xe,W3CTraceContextPropagator as Ht}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as qt}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ne=_("tracing"),ze=async(e,t,r)=>(await jt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new We(new Se),new We(new qt({url:r.exporterUrl}))],instrumentations:[new Yt({skipHeaders:!0})]}),r.traceparent?(Je.setGlobalPropagator(new Ht),Je.extract(Xe.active(),{traceparent:r.traceparent,isRemote:!0})):Xe.active());function b(e,t,r){return Ne.log(`\u23F3 TRACE: ${t} starting...`),Bt(e,t,r)}var Se=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,s]of Object.entries(o))l.includes("duration")&&typeof s=="number"?n.push(`${l}=${s.toFixed(2)}ms`):n.push(`${l}=${s}`);let i=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ne.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ne.log(` \u274C Error: ${t.status.message}`)}};var Kt=["error","failed","exception","fatal","panic","abort","crash"];function Ze(e){let t=e.split(`
3
- `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(Kt.some(m=>l.includes(m))){let m=Math.max(0,n-10,o+1),u=Math.min(t.length-1,n+20),c=[];for(let f=m;f<=u;f++)c.push(t[f]);r.push(c.join(`
4
- `)),o=u,n=u+1}else n++}if(r.length===0)return e;let i=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
2
+ import C from"process";import Kt from"path";import Wt from"fs";import rn from"minimist";import{createRequire as Qr}from"module";import{createTracerProvider as Vt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as Ze}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Jt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Xt}from"@netlify/otel";import{propagation as Qe,context as et,W3CTraceContextPropagator as zt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Zt}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ce=_("tracing"),tt=async(e,t,r)=>(await Vt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new Ze(new Pe),new Ze(new Zt({url:r.exporterUrl}))],instrumentations:[new Jt({skipHeaders:!0})]}),r.traceparent?(Qe.setGlobalPropagator(new zt),Qe.extract(et.active(),{traceparent:r.traceparent,isRemote:!0})):et.active());function A(e,t,r){return Ce.log(`\u23F3 TRACE: ${t} starting...`),Xt(e,t,r)}var Pe=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[c,s]of Object.entries(o))c.includes("duration")&&typeof s=="number"?n.push(`${c}=${s.toFixed(2)}ms`):n.push(`${c}=${s}`);let i=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ce.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ce.log(` \u274C Error: ${t.status.message}`)}};var Qt=["error","failed","exception","fatal","panic","abort","crash"];function rt(e){let t=e.split(`
3
+ `),r=[],o=-1,n=0;for(;n<t.length;){let c=t[n].slice(0,500).toLowerCase();if(Qt.some(h=>c.includes(h))){let h=Math.max(0,n-10,o+1),l=Math.min(t.length-1,n+20),d=[];for(let g=h;g<=l;g++)d.push(t[g]);r.push(d.join(`
4
+ `)),o=l,n=l+1}else n++}if(r.length===0)return e;let i=r.map((a,c)=>`<extracted_error_chunk order="${c+1}">
5
5
  ${a}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import Te from"process";import{getTracer as Or}from"@netlify/otel";import le from"process";var pe=le.env.NETLIFY_API_URL,fe=le.env.NETLIFY_API_TOKEN,H=_("api"),be=()=>le.env.NETLIFY_LOCAL_MODE==="true",ge=async(e,t={})=>{if(!pe||!fe)throw new Error("No API URL or token");let r=new URL(e,pe),o={...t,headers:{...t.headers,Authorization:`Bearer ${fe}`}};le.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(le.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),n.headers.forEach((l,s)=>{H.log(` ${s}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${l||"N/A"}`)}if(i||H.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i)throw a;return a},Qe=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(pe=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(fe=e.constants.NETLIFY_API_TOKEN)},et=()=>({apiUrl:pe,token:fe}),me=async(e,t)=>be()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ge(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),G=async(e,t,r)=>be()?(H.log("Mock API: updateRunnerSession called",{runnerId:e,sessionId:t,data:r}),{id:e,sessionId:t,...r}):ge(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var tt=async(e,t)=>be()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ge(`/api/v1/agent_runners/${e}/sessions/${t}`),rt=(e,t,r)=>ge(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}});var Z=_("ai_gateway"),Ce=null;var nt=async()=>{if(Ce)return Ce;Z.log("Fetching available AI gateway providers");let e=await fetch(`${et().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ce=t,Z.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Vt=async(e,t)=>{let o=(await nt()).providers[e];if(!o)return Z.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return Z.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},ot=async({netlify:e,config:t})=>{let r,o,n,i,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),Z.log("Requesting AI gateway information");let s=await rt(a,t.id,t.sessionId);if({token:r,url:i}=s,o=s.expires_at?s.expires_at*1e3:void 0,Z.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let m=o-Date.now()-6e4;m>0&&(n=setTimeout(()=>{l()},m))}};return await Promise.all([l(),nt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:Vt}};import Y from"process";import he from"path";import Pe from"fs";import{fileURLToPath as Qt}from"url";import{execa as er,execaCommand as An}from"execa";import{Transform as Wt}from"stream";var Jt=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Xt=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function zt(){return Object.entries(process.env).filter(([e,t])=>!(!t||Jt.has(e)||Xt.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function j(e){if(typeof e!="string")return e;let t=zt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Zt(o),"g");r=r.replace(n,"******")}),r}function Zt(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var Q=class extends Wt{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=j(n);o(null,i)}};function it(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let a=typeof o=="string"?j(o):o;return typeof n=="function"?t(a,n):t(a,n,i)},process.stderr.write=function(o,n,i){let a=typeof o=="string"?j(o):o;return typeof n=="function"?r(a,n):r(a,n,i)}}var ue=null,st=e=>(ue&&ue.destroy(),ue=new K({totalAllowedTime:e}),ue),at=()=>ue;var K=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,a=null;o!==void 0&&(a=new Promise((l,s)=>{i=setTimeout(()=>{s(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var tr=Qt(import.meta.url),rr=he.dirname(tr),ye=_("shell"),Oe=new Set,nr={preferLocal:!0},Ee=(e,t,r)=>{let[o,n]=or(t,r),i={...nr,...n},a=er(e,o,i);return ir(a,i),ar(a),a};var or=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},ir=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(Y.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new Q).pipe(Y.stdout),e.stdout?.pipe(new Q).pipe(Y.stdout),e.stderr?.pipe(new Q).pipe(Y.stderr);return}e.stdout?.pipe(Y.stdout),e.stderr?.pipe(Y.stderr)},lt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(Y.kill(-e.pid,t),ye.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ye.error("Error killing process:",r),!1}},sr=e=>lt(e,"SIGKILL"),ar=e=>{Oe.add(e);let t=at();if(t){let r=t.onTimesUp(()=>{ye.log(`Global timer expired, killing process ${e.pid}`),lt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ye.log(`Force killing process ${e.pid} after timeout`),sr(e))},5e3)});e.on("exit",()=>{Oe.delete(e),r()}),e.on("error",()=>{Oe.delete(e),r()})}};function _e(e,t){return!!V(e,t)}function V(e,t){if(Y.env.NODE_PATH){let n=he.join(Y.env.NODE_PATH,".bin",t);if(Pe.existsSync(n))return n}let r=he.join(e,"node_modules",".bin",t);if(Pe.existsSync(r))return r;let o=he.join(rr,"..","node_modules",".bin",t);if(Pe.existsSync(o))return o}var ut="netlify-agent-runner-context.md",$e="task-history",De="netlify-context",F=".netlify",ee="results.md",Le="assets";var lr=_("utils"),ur=e=>new Promise(t=>{setTimeout(t,e)}),ct=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,a=(...l)=>{if(r)return o=l,new Promise(u=>{n.push(u)});r=!0;let s,m=new Promise(u=>{s=u});return i=(async()=>{await Promise.resolve();let u=await e(...l);for(s(u);;){if(await ur(t),!o)return r=!1,i=null,u;let c=o,f=n;o=null,n=[],u=await e(...c),f.forEach(E=>{E(u)})}})(),m};return a.flush=async()=>{if((r||o)&&i)return await i,a.flush()},a},we=(e,t,r=!1)=>{let o=null,n=null,i=null,a=function(...l){n=l,i=this;let s=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),s&&(e.apply(i,n),n=null,i=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,s=i;o=null,n=null,i=null,e.apply(s,l)}},a},dt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):lr.error("Could not parse JSON",o))}},pt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let s=60-l.length;if(s<=0)return"";if(s>=i.length+6){let m=Math.min(s-i.length,e.length);return`${i}${e.slice(0,m)}`}return e.slice(0,s)};import{Buffer as ft}from"buffer";import cr from"path";var gt=async({config:e,netlify:t})=>{let r=await pr(t),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};let i=await fr(n);await t.utils.run("git",["add",".",...i]);let a={stdio:["ignore","pipe","pipe"]},s=(await t.utils.run("git",["diff","--staged"],a)).stdout;if(o=!!s,!o)return{hasChanges:!1,ignored:i};let u=(await t.utils.run("git",["diff","--staged","--binary"],a)).stdout,c,f;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await t.utils.run("git",["commit","-m","Agent runner"]),c=(await t.utils.run("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await t.utils.run("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(f=ft.from(g).toString("base64"))}let E={hasChanges:!0,diff:s,resultDiff:c,ignored:i};return s!==u&&(E.diffBinary=ft.from(u).toString("base64")),f&&(E.resultDiffBinary=f),E},dr=["?? mise.toml","?? deno.lock",/\?\? .+?\.log/],pr=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
9
- `).filter(n=>!dr.some(i=>i instanceof RegExp?i.test(n):n===i))).length!==0,status:t.stdout}};var mt=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},ht=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},fr=async e=>{let t=[".netlify","mise.toml","deno.lock","node_modules"],r=[];return e.split(`
10
- `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${cr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${i}`)});let n=o.match(/\?\? (.+?)\.log$/)?.[1];n&&r.push(`:!${n}.log`)}),r};import mr from"fs/promises";import hr from"os";import Et from"path";import W from"process";import yr from"readline";import Fe from"path";import gr from"fs/promises";var ke=_("agent-output-utils");async function te({initialResult:e,agentName:t,hasError:r}){let o="",n=Fe.join(process.cwd(),F,ee);try{let i=await gr.readFile(n,"utf-8");i&&(o=i,ke.log(`Pulled result from ${Fe.relative(process.cwd(),n)}`))}catch{ke.log(`No results file found at ${Fe.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function re({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&ke.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ne(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var k=_("runner_claude"),yt="Claude Code",Ie="claude-sonnet-4-5-20250929",Er=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ue({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i}){let a=e,{accountType:l,prompt:s,modelVersionOverrides:m}=a,{model:u}=a,c="";if(o){let{token:p,url:T}=o;if(!p||!T)throw new Error("No token or url provided from AI Gateway");if(m?.claude){let d=m?.claude?.[l];if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model override '${d}' is not available for anthropic provider`);u=d}}else if(u){if(!await o.isModelAvailableForProvider("anthropic",u))throw new Error(`Model '${u}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Ie)?(u=Ie,k.log(`Using default model: ${Ie}`)):k.log(`Default model ${Ie} is not available, proceeding without model specification`);W.env.ANTHROPIC_API_KEY=p,W.env.ANTHROPIC_BASE_URL=T}else if(!W.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let f=[],E=[],x={},I=0,g=0,A,R,S=[V(W.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...u?["--model",u]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",s],v=`${W.env.NVM_BIN}/node`;k.log(`Running ${v} ${S.join(" ")}`);let y=t.utils.run(v,S,{all:!0,env:W.env});y.stdin?.end();let N=we(()=>{r?.({steps:f,duration:g})},250),w=(p,T)=>{let d={...p,id:I};I+=1,E.push(d),f.push(d),T||N.flush(),N(),T&&N.flush()},h=yr.createInterface({input:y.all});return h.on("error",p=>{k.error("Readline interface error",{error:p.message,stack:p.stack})}),h.on("line",p=>{let T=null;try{T=JSON.parse(p)}catch{k.log("Could not parse line",p)}T?.session_id&&T.session_id!==c&&(c=T.session_id),Array.isArray(T?.message?.content)?T.message.content.forEach(d=>{switch(d.type){case"text":{d.text&&w({message:d.text});break}case"image":{typeof d.source=="object"&&d.source&&d.source.type==="base64"&&d.source.media_type?w({message:`![](data:${d.source.media_type};base64,${d.source.data})`}):k.log(`Unsupported image type ${d.source?.type}`,d.source);break}case"tool_use":{if(d.name==="Task"){let P=d.input?.description&&`\`${d.input.description}\``;w({title:[d.name,P].filter(Boolean).join(" ")})}else d.id&&(x[d.id]=d);N.flush();break}case"tool_result":{let P=d.tool_use_id?x[d.tool_use_id]:void 0,ae;if(P){let z=P.input?.file_path&&Et.relative(W.cwd(),P.input.file_path),O=z&&`\`${z}\``;ae=[P.name,O].filter(Boolean).join(" ")}let Ve=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(P?.name||""),B;if(typeof d.content=="string")B=d.content;else if(Array.isArray(d.content)){let z=[];d.content.forEach(O=>{O?.type==="text"&&typeof O.text=="string"?z.push(O.text):O?.type==="image"&&typeof O.source=="object"&&O.source?O.source.type==="base64"&&O.source.media_type?z.push(`![](data:${O.source.media_type};base64,${O.source.data})`):k.log(`Unsupported image type ${O.source.type}`,O.source):k.log(`Unsupported block type ${O?.type}`)}),B=z.join(`
11
-
12
- `)}Ve&&B&&(B=`\`\`\`
13
- ${B.trim()}
14
- \`\`\``),w({title:ae,message:B},!0);break}case"thinking":{d.thinking&&w({title:"Thinking",message:d.thinking},!0);break}default:k.log(`Message content type is not supported ${d.type}`,d)}}):T?.type==="result"&&(g=T.duration_ms||0,T.is_error?R=T.result:A=T.result,[E,f].forEach(d=>{d[d.length-1]?.message===A&&d.pop()}))}),await y.catch(p=>{({error:R,result:A}=Er({catchError:p,runCmd:y,error:R,result:A,runnerName:"Claude"}))}),h.close(),N.flush(),{steps:E,duration:g,result:await te({initialResult:A,agentName:yt,hasError:!!R}),error:re({error:R,agentName:yt}),isRetryableError:ne(R),agentSessionId:c}}var _t=async()=>{let e=Et.join(hr.homedir(),".claude");await mr.rm(e,{recursive:!0,force:!0})};import _r from"fs/promises";import wr from"os";import Ir from"path";import oe from"process";import Tr from"readline";var J=_("runner_codex"),wt="Codex CLI",xr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(J.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(J.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(J.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Me({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:v,url:y}=n;if(!v||!y)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let N=l?.codex?.[i];if(N){if(!await n.isModelAvailableForProvider("openai",N))throw new Error(`Model override '${N}' is not available for openai provider`);s=N}}else if(s&&!await n.isModelAvailableForProvider("openai",s))throw new Error(`Model '${s}' is not available for openai provider`);oe.env.OPENAI_API_KEY=v,oe.env.OPENAI_BASE_URL=y}else if(!oe.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let m=[],u=[],c={},f=0,E=0,x,I,g=[V(oe.cwd(),"codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...s?["--model",s]:[],"-q",a].filter(Boolean),A=`${oe.env.NVM_BIN}/node`;J.log(`Running ${A} ${g.join(" ")}`);let R=t.utils.run(A,g,{all:!0,env:{...oe.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),S=Tr.createInterface({input:R.all});return S.on("error",v=>{J.error("Readline interface error",{error:v.message,stack:v.stack})}),S.on("line",v=>{let y=null;try{y=JSON.parse(v)}catch{J.log("Could not parse line",v);return}let N=[],w=!1;if(y?.duration_ms&&(E=y.duration_ms,w=!0),y?.type==="local_shell_call")c[y.call_id]=y;else if(y?.type==="local_shell_call_output"){let h=Rr(c[y.call_id],y);h&&(h.id=f,f+=1,u.push(h),m.push(h),N.push(h),w=!0)}else y?.type==="message"&&y.role==="assistant"?x=y.content.map(h=>h.text).join(`
15
- `):y?.type==="message"&&y.role==="system"&&(I=y.content.map(h=>h.text).join(`
16
- `));w&&(r?.({steps:m,duration:E}),o?.({steps:N,duration:E}))}),await R.catch(v=>{let y=xr({catchError:v,runCmd:R,error:I,result:x,runnerName:"Codex"});I=y.error,x=y.result}),S.close(),{steps:u,duration:E,result:await te({initialResult:x,agentName:wt,hasError:!!I}),error:re({error:I,agentName:wt}),isRetryableError:ne(I)}}var It=async()=>{let e=Ir.join(wr.homedir(),".codex");await _r.rm(e,{recursive:!0,force:!0})},vr=new Set(["bash","-lc"]),Rr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!vr.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
8
+ `);return i.length>e.length*.8?e:i}import ve from"process";import{getTracer as Lr}from"@netlify/otel";import le from"process";var ge=le.env.NETLIFY_API_URL,me=le.env.NETLIFY_API_TOKEN,j=_("api"),he=()=>le.env.NETLIFY_LOCAL_MODE==="true",ue=async(e,t={})=>{if(!ge||!me)throw new Error("No API URL or token");let r=new URL(e,ge),o={...t,headers:{...t.headers,Authorization:`Bearer ${me}`}};le.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(le.env.AGENT_RUNNERS_DEBUG==="true")j.log(`Response headers for ${r}:`),n.headers.forEach((c,s)=>{j.log(` ${s}: ${c}`)});else{let c=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");j.log(`Request ID for ${r}: ${c||"N/A"}`)}if(i||j.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i)throw a;return a},nt=e=>{j.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ge=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(me=e.constants.NETLIFY_API_TOKEN)},ot=()=>({apiUrl:ge,token:me}),ce=async(e,t)=>he()?(j.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ue(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>he()?(j.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ue(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var it=async(e,t)=>he()?(j.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ue(`/api/v1/agent_runners/${e}/sessions/${t}`),st=(e,t,r)=>ue(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),at=async(e,t)=>he()?(j.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ue(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Oe=async(e,t)=>{j.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ee=_("ai_gateway"),$e=null;var lt=async()=>{if($e)return $e;ee.log("Fetching available AI gateway providers");let e=await fetch(`${ot().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return $e=t,ee.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},er=async(e,t)=>{let o=(await lt()).providers[e];if(!o)return ee.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ee.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},ut=async({netlify:e,config:t})=>{let r,o,n,i,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let c=async()=>{clearTimeout(n),ee.log("Requesting AI gateway information");let s=await st(a,t.id,t.sessionId);if({token:r,url:i}=s,o=s.expires_at?s.expires_at*1e3:void 0,ee.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{c()},h))}};return await Promise.all([c(),lt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:er}};import q from"process";import ye from"path";import ke from"fs";import{fileURLToPath as sr}from"url";import{execa as ar,execaCommand as Cn}from"execa";import{Transform as tr}from"stream";var rr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),nr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function or(){return Object.entries(process.env).filter(([e,t])=>!(!t||rr.has(e)||nr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function H(e){if(typeof e!="string")return e;let t=or();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(ir(o),"g");r=r.replace(n,"******")}),r}function ir(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var te=class extends tr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=H(n);o(null,i)}};function ct(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let a=typeof o=="string"?H(o):o;return typeof n=="function"?t(a,n):t(a,n,i)},process.stderr.write=function(o,n,i){let a=typeof o=="string"?H(o):o;return typeof n=="function"?r(a,n):r(a,n,i)}}var de=null,dt=e=>(de&&de.destroy(),de=new X({totalAllowedTime:e}),de),pt=()=>de;var X=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,a=null;o!==void 0&&(a=new Promise((c,s)=>{i=setTimeout(()=>{s(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var lr=sr(import.meta.url),ur=ye.dirname(lr),Ee=_("shell"),De=new Set,cr={preferLocal:!0},_e=(e,t,r)=>{let[o,n]=dr(t,r),i={...cr,...n},a=ar(e,o,i);return pr(a,i),gr(a),a};var dr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},pr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new te).pipe(q.stdout),e.stdout?.pipe(new te).pipe(q.stdout),e.stderr?.pipe(new te).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},ft=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),Ee.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Ee.error("Error killing process:",r),!1}},fr=e=>ft(e,"SIGKILL"),gr=e=>{De.add(e);let t=pt();if(t){let r=t.onTimesUp(()=>{Ee.log(`Global timer expired, killing process ${e.pid}`),ft(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Ee.log(`Force killing process ${e.pid} after timeout`),fr(e))},5e3)});e.on("exit",()=>{De.delete(e),r()}),e.on("error",()=>{De.delete(e),r()})}};function we(e,t){return!!V(e,t)}function V(e,t){if(q.env.NODE_PATH){let n=ye.join(q.env.NODE_PATH,".bin",t);if(ke.existsSync(n))return n}let r=ye.join(e,"node_modules",".bin",t);if(ke.existsSync(r))return r;let o=ye.join(ur,"..","node_modules",".bin",t);if(ke.existsSync(o))return o}var gt="netlify-agent-runner-context.md",Le="task-history",Fe="netlify-context",F=".netlify",re="results.md",Ue="assets";var mr=_("utils"),hr=e=>new Promise(t=>{setTimeout(t,e)}),mt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,a=(...c)=>{if(r)return o=c,new Promise(l=>{n.push(l)});r=!0;let s,h=new Promise(l=>{s=l});return i=(async()=>{await Promise.resolve();let l=await e(...c);for(s(l);;){if(await hr(t),!o)return r=!1,i=null,l;let d=o,g=n;o=null,n=[],l=await e(...d),g.forEach(E=>{E(l)})}})(),h};return a.flush=async()=>{if((r||o)&&i)return await i,a.flush()},a},Ie=(e,t,r=!1)=>{let o=null,n=null,i=null,a=function(...c){n=c,i=this;let s=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),s&&(e.apply(i,n),n=null,i=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},a.flush=()=>{if(o){clearTimeout(o);let c=n,s=i;o=null,n=null,i=null,e.apply(s,c)}},a},ht=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):mr.error("Could not parse JSON",o))}},yt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let c=`--${t}${n}`;if(c.length>55)return"";let s=60-c.length;if(s<=0)return"";if(s>=i.length+6){let h=Math.min(s-i.length,e.length);return`${i}${e.slice(0,h)}`}return e.slice(0,s)};import{Buffer as Et}from"buffer";import yr from"path";var _t=async({config:e,netlify:t})=>{let r=await _r(t),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};let i=await wr(n);await t.utils.run("git",["add",".",...i]);let a={stdio:["ignore","pipe","pipe"]},s=(await t.utils.run("git",["diff","--staged"],a)).stdout;if(o=!!s,!o)return{hasChanges:!1,ignored:i};let l=(await t.utils.run("git",["diff","--staged","--binary"],a)).stdout,d,g;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await t.utils.run("git",["commit","-m","Agent runner"]),d=(await t.utils.run("git",["diff",e.sha,"HEAD"],a)).stdout;let m=(await t.utils.run("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;d!==m&&(g=Et.from(m).toString("base64"))}let E={hasChanges:!0,diff:s,resultDiff:d,ignored:i};return s!==l&&(E.diffBinary=Et.from(l).toString("base64")),g&&(E.resultDiffBinary=g),E},Er=["?? mise.toml","?? deno.lock","?? .npmrc",/\?\? .+?\.log/],_r=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
9
+ `).filter(n=>!Er.some(i=>i instanceof RegExp?i.test(n):n===i))).length!==0,status:t.stdout}};var wt=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},It=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},wr=async e=>{let t=[".netlify","mise.toml","deno.lock",".npmrc","node_modules"],r=[];return e.split(`
10
+ `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${yr.sep}`].some(c=>o.startsWith(c))&&r.push(`:!${i}`)});let n=o.match(/\?\? (.+?)\.log$/)?.[1];n&&r.push(`:!${n}.log`)}),r};import Tr from"fs/promises";import xr from"os";import vt from"path";import z from"process";import vr from"readline";import Me from"path";import Ir from"fs/promises";var Ge=_("agent-output-utils");async function ne({initialResult:e,agentName:t,hasError:r}){let o="",n=Me.join(process.cwd(),F,re);try{let i=await Ir.readFile(n,"utf-8");i&&(o=i,Ge.log(`Pulled result from ${Me.relative(process.cwd(),n)}`))}catch{Ge.log(`No results file found at ${Me.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function oe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Ge.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ie(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var U=_("runner_claude"),Tt="Claude Code",Te="claude-sonnet-4-5-20250929",xt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Rr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function je({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i}){let a=e,{accountType:c,prompt:s,modelVersionOverrides:h}=a,{model:l}=a,d="";if(o){let{token:u,url:y}=o;if(!u||!y)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[c];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);l=p}}else if(l){if(!await o.isModelAvailableForProvider("anthropic",l))throw new Error(`Model '${l}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Te)?(l=Te,U.log(`Using default model: ${Te}`)):U.log(`Default model ${Te} is not available, proceeding without model specification`);z.env.ANTHROPIC_API_KEY=u,z.env.ANTHROPIC_BASE_URL=y}else if(!z.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let g=[],E=[],T={},I=0,m=0,v,R,N=[V(z.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...l?["--model",l]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",s],P=`${z.env.NVM_BIN}/node`;U.log(`Running ${P} ${N.join(" ")}`);let b=t.utils.run(P,N,{all:!0,env:z.env});b.stdin?.end();let S=Ie(()=>{r?.({steps:g,duration:m})},250),w=(u,y)=>{let p={...u,id:I};I+=1,E.push(p),g.push(p),y||S.flush(),S(),y&&S.flush()},f=vr.createInterface({input:b.all});return f.on("error",u=>{U.error("Readline interface error",{error:u.message,stack:u.stack})}),f.on("line",u=>{let y=null;try{y=JSON.parse(u)}catch{U.log("Could not parse line",u)}y?.session_id&&y.session_id!==d&&(d=y.session_id),Array.isArray(y?.message?.content)?y.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&w({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?w({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):U.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let x=p.input?.description&&`\`${p.input.description}\``;w({title:[xt(p.name),x].filter(Boolean).join(" ")})}else p.id&&(T[p.id]=p);S.flush();break}case"tool_result":{let x=p.tool_use_id?T[p.tool_use_id]:void 0,ae;if(x){let Q=x.input?.file_path&&vt.relative(z.cwd(),x.input.file_path),O=Q&&`\`${Q}\``;ae=[xt(x.name||""),O].filter(Boolean).join(" ")}let ze=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(x?.name||""),W;if(typeof p.content=="string")W=p.content;else if(Array.isArray(p.content)){let Q=[];p.content.forEach(O=>{O?.type==="text"&&typeof O.text=="string"?Q.push(O.text):O?.type==="image"&&typeof O.source=="object"&&O.source?O.source.type==="base64"&&O.source.media_type?Q.push(`![](data:${O.source.media_type};base64,${O.source.data})`):U.log(`Unsupported image type ${O.source.type}`,O.source):U.log(`Unsupported block type ${O?.type}`)}),W=Q.join(`
11
+
12
+ `)}ze&&W&&(W=`\`\`\`
13
+ ${W.trim()}
14
+ \`\`\``),w({title:ae,message:W},!0);break}case"thinking":{p.thinking&&w({title:"Thinking",message:p.thinking},!0);break}default:U.log(`Message content type is not supported ${p.type}`,p)}}):y?.type==="result"&&(m=y.duration_ms||0,y.is_error?R=y.result:v=y.result,[E,g].forEach(p=>{p[p.length-1]?.message===v&&p.pop()}))}),await b.catch(u=>{({error:R,result:v}=Rr({catchError:u,runCmd:b,error:R,result:v,runnerName:"Claude"}))}),f.close(),S.flush(),{steps:E,duration:m,result:await ne({initialResult:v,agentName:Tt,hasError:!!R}),error:oe({error:R,agentName:Tt}),isRetryableError:ie(R),agentSessionId:d}}var Rt=async()=>{let e=vt.join(xr.homedir(),".claude");await Tr.rm(e,{recursive:!0,force:!0})};import xe from"fs/promises";import At from"os";import Ye from"path";import K from"process";import Sr from"readline";var M=_("runner_codex"),St="Codex CLI",Ar=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Be({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:c}=e,{model:s}=e;if(n){let{token:f,url:u}=n;if(!f||!u)throw new Error("No token or url provided from AI Gateway");if(c?.codex){let y=c?.codex?.[i];if(y){if(!await n.isModelAvailableForProvider("openai",y))throw new Error(`Model override '${y}' is not available for openai provider`);s=y}}else if(s&&!await n.isModelAvailableForProvider("openai",s))throw new Error(`Model '${s}' is not available for openai provider`);K.env.OPENAI_API_KEY=f,K.env.OPENAI_BASE_URL=u}else if(!K.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],l=[],d={},g=0,E=0,T,I,m=`${K.env.NVM_BIN}/node`,v=Ye.join(At.homedir(),".codex"),R=Ye.join(v,"config.toml");try{await xe.mkdir(v,{recursive:!0});let f="";try{f=await xe.readFile(R,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
15
+ web_search = true`):f+=`
16
+ [tools]
17
+ web_search = true
18
+ `,await xe.writeFile(R,f,"utf-8"),M.log("Updated Codex config with web_search enabled"))}catch(f){M.warn("Failed to update Codex config",{error:f.message})}let N=[V(K.cwd(),"codex"),"login","--with-api-key"];M.log(`Running ${m} ${N.join(" ")}`);let P=t.utils.run(m,N,{input:K.env.OPENAI_API_KEY,env:{...K.env}});try{await P,M.log("Successfully logged in to Codex")}catch(f){throw M.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let b=[V(K.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...s?["--model",s]:[],a].filter(Boolean);M.log(`Running ${m} ${b.join(" ")}`);let S=t.utils.run(m,b,{all:!0,env:{...K.env}}),w=Sr.createInterface({input:S.all});return w.on("error",f=>{M.error("Readline interface error",{error:f.message,stack:f.stack})}),w.on("line",f=>{let u=null;try{u=JSON.parse(f)}catch{M.log("Could not parse line",f);return}let y=[],p=!1;if(u?.duration_ms&&(E=u.duration_ms,p=!0),u?.type==="local_shell_call")d[u.call_id]=u;else if(u?.type==="local_shell_call_output"){let x=br(d[u.call_id],u);x&&(x.id=g,g+=1,l.push(x),h.push(x),y.push(x),p=!0)}else u?.type==="message"&&u.role==="assistant"?T=u.content.map(x=>x.text).join(`
19
+ `):u?.type==="message"&&u.role==="system"&&(I=u.content.map(x=>x.text).join(`
20
+ `));p&&(r?.({steps:h,duration:E}),o?.({steps:y,duration:E}))}),await S.catch(f=>{let u=Ar({catchError:f,runCmd:S,error:I,result:T,runnerName:"Codex"});I=u.error,T=u.result}),w.close(),{steps:l,duration:E,result:await ne({initialResult:T,agentName:St,hasError:!!I}),error:oe({error:I,agentName:St}),isRetryableError:ie(I)}}var Nt=async()=>{let e=Ye.join(At.homedir(),".codex");await xe.rm(e,{recursive:!0,force:!0})},Nr=new Set(["bash","-lc"]),br=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Nr.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
17
21
  ${n.trim()}
18
- \`\`\``)}catch(i){J.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Ar from"fs/promises";import Nr from"os";import xt from"path";import X from"process";import Sr from"readline";var ie=_("runner_gemini"),Tt="Gemini CLI",br=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ie.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ie.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ie.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Cr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Ge({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:w,url:h}=n;if(!w||!h)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let p=l?.gemini?.[i];if(p){if(!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model override '${p}' is not available for gemini provider`);s=p}}else if(s&&!await n.isModelAvailableForProvider("gemini",s))throw new Error(`Model '${s}' is not available for gemini provider`);X.env.GEMINI_API_KEY=w,X.env.GOOGLE_GEMINI_BASE_URL=h}else if(!X.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let m=[],u=[],c=[],f={},E=0,x=0,I,g,A=[V(X.cwd(),"gemini"),...s?["--model",s]:[],"--yolo","-p",a],R=`${X.env.NVM_BIN}/node`;ie.log(`Running ${R} ${A.join(" ")}`);let S=t.utils.run(R,A,{all:!0,env:X.env});S.stdin?.end();let v=we(()=>{r?.({steps:m,duration:x}),o?.({steps:u,duration:x}),u=[]},250),y=(w,h)=>{w.id=E,E+=1,c.push(w),m.push(w),u.push(w),h||v.flush(),v(),h&&v.flush()},N=Sr.createInterface({input:S.all});return N.on("error",w=>{ie.error("Readline interface error",{error:w.message,stack:w.stack})}),N.on("line",w=>{let h=null;try{if(w.startsWith("[API Error")){let p=w.match(/\[api error: (.+?)]$/i)?.[1];h={type:"error",value:dt(p,!1)?.error?.message||p||"Gemini encountered error"}}else h=JSON.parse(w)}catch{return}if(h)switch(h.type){case"thought":{let p=h.value;y({title:p?.subject??"Thinking...",message:p?.description},!0);break}case"content":{h.value&&y({message:h.value});break}case"tool_call_request":{let p=h.value,T=Cr[p.name]??p.name,d=p.args?.path||p.args?.absolute_path,P=d&&xt.relative(X.cwd(),d),ae=p.args?.command,B={title:[T,P&&`\`${P}\``,ae&&`\`${ae}\``].filter(Boolean).join(" ")};f[p.callId]=B,v.flush();break}case"tool_result":{let p=h.value,T=f[p.callId];if(T){let d=[p.resultDisplay,p.responseParts?.functionResponse?.response?.output].find(P=>typeof P=="string"&&P);d&&(T.message=`\`\`\`
19
- ${d.trim()}
20
- \`\`\``),y(T,!0)}break}case"result":{x=h.duration_ms,I=h.value,[c,m,u].forEach(p=>{p[p.length-1]?.message===I&&p.pop()});break}case"error":{g=h.value;break}case"finished":break;default:{ie.warn("Unhandled message type:",h.type);break}}}),await S.catch(w=>{({error:g,result:I}=br({catchError:w,runCmd:S,error:g,result:I,runnerName:"Gemini"}))}),N.close(),v.flush(),{steps:c,duration:x,result:await te({initialResult:I,agentName:Tt,hasError:!!g}),error:re({error:g,agentName:Tt}),isRetryableError:ne(g)}}var vt=async()=>{let e=xt.join(Nr.homedir(),".gemini");await Ar.rm(e,{recursive:!0,force:!0})};var Pr={codex:{runner:Me,clean:It},claude:{runner:Ue,clean:_t},gemini:{runner:Ge,clean:vt}},Rt=Pr;var At=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(Or(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Rt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=$r({apiToken:r});Qe(l);let s=e.useGateway?await ot({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!s}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let m=ct(({steps:f=[],duration:E})=>{let x=f.map(I=>({...I,title:I.title?j(I.title):void 0,message:I.message?j(I.message):void 0}));return f.length=0,G(e.id,e.sessionId,{steps:x,duration:E})},t),u;e.hasRepo?e.sha?(u=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(u=await mt(l),await me(e.id,{sha:u}),n?.setAttributes({"init.sha.source":"current_commit"})):(u=await ht(l),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let c=performance.now()-i;return n?.setAttributes({"init.sha":u||"unknown","init.duration.ms":c,"init.status":"success"}),{aiGateway:s,context:l,persistSteps:m,runner:a,sha:u}}),$r=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Te.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Te.env.NETLIFY_API_TOKEN,SITE_ID:Te.env.SITE_ID,FUNCTIONS_DIST:Te.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:Ee}});import{getTracer as je}from"@netlify/otel";import Dr from"crypto";import q from"fs/promises";import L from"path";import U from"process";var D=_("context"),Lr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:U.env.NETLIFY_TEAM_ID,userId:U.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:U.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Fr=10,kr=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(U.cwd(),F,o),i=0;for(;await Ur(n);){if(i>=Fr)throw new Error("Failed to generate context file");o=`${t}-${Dr.randomUUID().slice(0,5)}${r}`,n=L.join(U.cwd(),F,o),i+=1}return o},Ur=async e=>{try{return await q.access(e),!0}catch{return!1}},Mr=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},Gr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await q.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},xe=null,jr=async()=>{if(xe)return xe;let e=await Mr();if(!e)return[];let t=L.join(U.cwd(),F,De);await q.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=L.join(t,a),s=L.join(F,De,a);return D.log(`Downloading ${i.scope} context...`),await Gr(i.endpoint,l)?(D.log(`Downloaded: ${s}`),{scope:i.scope,path:s,key:n}):null});return xe=(await Promise.all(r)).filter(n=>n!==null),xe},Nt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Lr(t),i=await kr(ut),a=L.join(U.cwd(),F);await q.mkdir(a,{recursive:!0});let l=L.join(F,i),s=L.join(U.cwd(),l),m=L.join(U.cwd(),F,ee);try{await q.unlink(m),D.log(`Deleted old results file: ${m}`)}catch{}let u=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
22
+ \`\`\``)}catch(i){M.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Cr from"fs/promises";import Pr from"os";import Ct from"path";import Z from"process";import Or from"readline";var se=_("runner_gemini"),bt="Gemini CLI",$r=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(se.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(se.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(se.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),kr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function He({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:c}=e,{model:s}=e;if(n){let{token:w,url:f}=n;if(!w||!f)throw new Error("No token or url provided from AI Gateway");if(c?.gemini){let u=c?.gemini?.[i];if(u){if(!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model override '${u}' is not available for gemini provider`);s=u}}else if(s&&!await n.isModelAvailableForProvider("gemini",s))throw new Error(`Model '${s}' is not available for gemini provider`);Z.env.GEMINI_API_KEY=w,Z.env.GOOGLE_GEMINI_BASE_URL=f}else if(!Z.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],l=[],d=[],g={},E=0,T=0,I,m,v=[V(Z.cwd(),"gemini"),...s?["--model",s]:[],"--yolo","-p",a],R=`${Z.env.NVM_BIN}/node`;se.log(`Running ${R} ${v.join(" ")}`);let N=t.utils.run(R,v,{all:!0,env:Z.env});N.stdin?.end();let P=Ie(()=>{r?.({steps:h,duration:T}),o?.({steps:l,duration:T}),l=[]},250),b=(w,f)=>{w.id=E,E+=1,d.push(w),h.push(w),l.push(w),f||P.flush(),P(),f&&P.flush()},S=Or.createInterface({input:N.all});return S.on("error",w=>{se.error("Readline interface error",{error:w.message,stack:w.stack})}),S.on("line",w=>{let f=null;try{if(w.startsWith("[API Error")){let u=w.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:ht(u,!1)?.error?.message||u||"Gemini encountered error"}}else f=JSON.parse(w)}catch{return}if(f)switch(f.type){case"thought":{let u=f.value;b({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{f.value&&b({message:f.value});break}case"tool_call_request":{let u=f.value,y=kr[u.name]??u.name,p=u.args?.path||u.args?.absolute_path,x=p&&Ct.relative(Z.cwd(),p),ae=u.args?.command,W={title:[y,x&&`\`${x}\``,ae&&`\`${ae}\``].filter(Boolean).join(" ")};g[u.callId]=W,P.flush();break}case"tool_result":{let u=f.value,y=g[u.callId];if(y){let p=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(x=>typeof x=="string"&&x);p&&(y.message=`\`\`\`
23
+ ${p.trim()}
24
+ \`\`\``),b(y,!0)}break}case"result":{T=f.duration_ms,I=f.value,[d,h,l].forEach(u=>{u[u.length-1]?.message===I&&u.pop()});break}case"error":{m=f.value;break}case"finished":break;default:{se.warn("Unhandled message type:",f.type);break}}}),await N.catch(w=>{({error:m,result:I}=$r({catchError:w,runCmd:N,error:m,result:I,runnerName:"Gemini"}))}),S.close(),P.flush(),{steps:d,duration:T,result:await ne({initialResult:I,agentName:bt,hasError:!!m}),error:oe({error:m,agentName:bt}),isRetryableError:ie(m)}}var Pt=async()=>{let e=Ct.join(Pr.homedir(),".gemini");await Cr.rm(e,{recursive:!0,force:!0})};var Dr={codex:{runner:Be,clean:Nt},claude:{runner:je,clean:Rt},gemini:{runner:He,clean:Pt}},Ot=Dr;var $t=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Lr(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Ot[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let c=Fr({apiToken:r});nt(c);let s=e.useGateway?await ut({netlify:c,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!s}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=mt(({steps:g=[],duration:E})=>{let T=g.map(I=>({...I,title:I.title?H(I.title):void 0,message:I.message?H(I.message):void 0}));return g.length=0,B(e.id,e.sessionId,{steps:T,duration:E})},t),l;e.hasRepo?e.sha?(l=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(l=await wt(c),await ce(e.id,{sha:l}),n?.setAttributes({"init.sha.source":"current_commit"})):(l=await It(c),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let d=performance.now()-i;return n?.setAttributes({"init.sha":l||"unknown","init.duration.ms":d,"init.status":"success"}),{aiGateway:s,context:c,persistSteps:h,runner:a,sha:l}}),Fr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ve.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ve.env.NETLIFY_API_TOKEN,SITE_ID:ve.env.SITE_ID,FUNCTIONS_DIST:ve.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:_e}});import{getTracer as qe}from"@netlify/otel";import Ur from"crypto";import J from"fs/promises";import D from"path";import G from"process";var k=_("context"),Mr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:G.env.NETLIFY_TEAM_ID,userId:G.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:G.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Gr=10,jr=async e=>{let{name:t,ext:r}=D.parse(e),o=e,n=D.join(G.cwd(),F,o),i=0;for(;await Yr(n);){if(i>=Gr)throw new Error("Failed to generate context file");o=`${t}-${Ur.randomUUID().slice(0,5)}${r}`,n=D.join(G.cwd(),F,o),i+=1}return o},Yr=async e=>{try{return await J.access(e),!0}catch{return!1}},Br=async()=>{try{k.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return k.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(k.warn("Catchall consumer missing or invalid contextScopes"),null):r:(k.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?k.warn("Netlify features context request timed out"):k.warn("Failed to fetch Netlify features context:",e.message),null}},Hr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await J.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?k.warn(`Download timeout for ${e}`):k.warn(`Failed to download context file ${e}:`,r.message),!1}},Re=null,qr=async()=>{if(Re)return Re;let e=await Br();if(!e)return[];let t=D.join(G.cwd(),F,Fe);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return k.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,c=D.join(t,a),s=D.join(F,Fe,a);return k.log(`Downloading ${i.scope} context...`),await Hr(i.endpoint,c)?(k.log(`Downloaded: ${s}`),{scope:i.scope,path:s,key:n}):null});return Re=(await Promise.all(r)).filter(n=>n!==null),Re},kt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Mr(t),i=await jr(gt),a=D.join(G.cwd(),F);await J.mkdir(a,{recursive:!0});let c=D.join(F,i),s=D.join(G.cwd(),c),h=D.join(G.cwd(),F,re);try{await J.unlink(h),k.log(`Deleted old results file: ${h}`)}catch{}let l=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
21
25
  Your task is to analyze and fix the build errors.
22
26
  Don't apply techniques of reverting changes. Apply fixes related to errors.
23
27
  Don't try to run build by yourself. Just fix the errors.
24
28
 
25
29
  <build_error_context>
26
30
  ${o}
27
- </build_error_context>`:"",c="";r.siteContext&&r.siteContext.length!==0&&(c=`
31
+ </build_error_context>`:"",d="";r.siteContext&&r.siteContext.length!==0&&(d=`
28
32
  <project_rules>
29
- ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
33
+ ${r.siteContext.filter(m=>m.site_context).map(m=>typeof m.site_context=="string"?m.site_context:typeof m.site_context=="object"?JSON.stringify(m.site_context):"").join(`
30
34
 
31
35
  `)}
32
36
  </project_rules>
33
- `);let f="";if(r.sessionHistoryContext?.length){let g=L.join(U.cwd(),F,$e);await q.mkdir(g,{recursive:!0});let A=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let v=S+1,y=`attempt-${v}.md`,N=L.join(g,y),w=L.join(F,$e,y),h=`# Task History - Attempt ${v}
37
+ `);let g="";if(r.sessionHistoryContext?.length){let m=D.join(G.cwd(),F,Le);await J.mkdir(m,{recursive:!0});let v=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let P=N+1,b=`attempt-${P}.md`,S=D.join(m,b),w=D.join(F,Le,b),f=`# Task History - Attempt ${P}
34
38
 
35
39
  ## Request - what the user asked for
36
40
  ${R.request}
@@ -40,21 +44,21 @@ ${R.request}
40
44
  ## Response - what the agent replied with after its work
41
45
 
42
46
  ${R.response}
43
- `;return await q.writeFile(N,h,"utf-8"),D.log(`Created history file: ${w}`),w}));f+=`
47
+ `;return await J.writeFile(S,f,"utf-8"),k.log(`Created history file: ${w}`),w}));g+=`
44
48
  <session_history_context>
45
49
  History of prior work on this task.
46
50
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
47
51
 
48
- ${A.slice(-5).map(R=>`- ${R}`).join(`
52
+ ${v.slice(-5).map(R=>`- ${R}`).join(`
49
53
  `)}
50
54
 
51
55
  </session_history_context>
52
- `}let E=await jr(),x="";E.length>0&&(x=`
56
+ `}let E=await qr(),T="";E.length>0&&(T=`
53
57
  <netlify_features_context>
54
58
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
55
59
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
56
60
 
57
- ${E.map(g=>`- **${g.scope}**: ${g.path}`).join(`
61
+ ${E.map(m=>`- **${m.scope}**: ${m.path}`).join(`
58
62
  `)}
59
63
 
60
64
  Refer to these files when working with specific Netlify features.
@@ -66,23 +70,23 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
66
70
  <user_request>
67
71
  ${r.prompt}
68
72
  </user_request>
69
- ${u}
73
+ ${l}
70
74
  </request>
71
75
 
72
76
  <requirements>
73
77
  <responses>
74
78
  - Do not speak in first person. You may speak as "the agent".
75
- - When work is complete, write a changes summary in ${a}/${ee} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
76
- - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${ee} file.
79
+ - When work is complete, write a changes summary in ${a}/${re} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
80
+ - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${re} file.
77
81
  - Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
78
82
  - NEVER look into the \`.git\` folder
79
83
  - NEVER print potentially sensitive values (like secrets) in the planning output or results
80
84
  </responses>
81
85
  <attachements>
82
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${Le} folder
83
- - move assets from ${a}/${Le} folder to the project assets folder if they are referenced in a code or applied changes
86
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${Ue} folder
87
+ - move assets from ${a}/${Ue} folder to the project assets folder if they are referenced in a code or applied changes
84
88
  </attachements>
85
- ${c}
89
+ ${d}
86
90
  </requirements>
87
91
 
88
92
  <extra_context>
@@ -94,41 +98,41 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
94
98
  - Netlify Functions directory: ${n.functionsDir}
95
99
  </metadata>
96
100
  <environment>
97
- - Node Version: ${U.version||"unknown"}
101
+ - Node Version: ${G.version||"unknown"}
98
102
  - Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
99
103
  - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
100
104
  - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
101
105
  </environment>
102
- ${x}
106
+ ${T}
103
107
  <docs>
104
108
  - Netlify Docs: https://docs.netlify.com
105
109
  - LLM Resources Index: https://docs.netlify.com/llms.txt
106
110
  </docs>
107
111
  </extra_context>
108
112
 
109
- ${f}
110
- `;return await q.writeFile(s,I,"utf-8"),D.log(`Generated agent context document at: ${s}`),I.length>5e5&&(I=`
113
+ ${g}
114
+ `;return await J.writeFile(s,I,"utf-8"),k.log(`Generated agent context document at: ${s}`),I.length>5e5&&(I=`
111
115
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
112
116
 
113
117
  <request>
114
118
  <user_request>
115
119
  ${r.prompt}
116
120
  </user_request>
117
- ${u}
121
+ ${l}
118
122
  </request>
119
123
 
120
124
  Use the following file for the complete context of the ask, the environment, and what's available. ${s} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
121
- `),I};var Yr=_("prompt"),St=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Nt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Yr.log("Contextful Prompt:",n),{prompt:n}};var ve=_("inference_stage"),bt=5,Re=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:a,aiGateway:l,attempt:s,contextPrefix:m,priorAgentSessionId:u}=e;ve.log(`Running inference stage, attempt ${s} of ${bt}`);let c=await b(je(),"inference-stage",async f=>{f?.setAttributes({"inference.attempt":s||1}),it();let{prompt:E}=await b(je(),"compose-prompt",async()=>await St({cliPath:t,config:r,buildErrorContext:Br(n),netlify:o})),x=`
122
- ${m||""}
125
+ `),I};var Kr=_("prompt"),Dt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await kt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Kr.log("Contextful Prompt:",n),{prompt:n}};var Se=_("inference_stage"),Lt=5,Ae=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:a,aiGateway:c,attempt:s,contextPrefix:h,priorAgentSessionId:l}=e;Se.log(`Running inference stage, attempt ${s} of ${Lt}`);let d=await A(qe(),"inference-stage",async g=>{g?.setAttributes({"inference.attempt":s||1}),ct();let{prompt:E}=await A(qe(),"compose-prompt",async()=>await Dt({cliPath:t,config:r,buildErrorContext:Wr(n),netlify:o})),T=`
126
+ ${h||""}
123
127
  ${E}
124
- `.trim(),I={...r,prompt:x},g=await b(je(),`run-${r.runner}`,async()=>await i({aiGateway:l,config:I,netlify:o,persistSteps:a,continueSession:!!(s&&s>1),priorAgentSessionId:u}));return g.result&&(g.result=j(g.result)),g.error&&(g.error=j(g.error)),await a.flush(),g});if(c.error){if(ve.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:s||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!s||s<bt))return ve.log("Retrying inference stage"),await new Promise(E=>setTimeout(E,5e3)),{runnerResult:(await Re({...e,attempt:(s||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw ve.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},Br=e=>!e||e.length===0?"":`
128
+ `.trim(),I={...r,prompt:T},m=await A(qe(),`run-${r.runner}`,async()=>await i({aiGateway:c,config:I,netlify:o,persistSteps:a,continueSession:!!(s&&s>1),priorAgentSessionId:l}));return m.result&&(m.result=H(m.result)),m.error&&(m.error=H(m.error)),await a.flush(),m});if(d.error){if(Se.error("Runner failed",{stepsCount:d.steps.length,duration:d.duration,error:d.error,isRetryableError:d.isRetryableError,attempt:s||1,agentSessionId:d.agentSessionId}),d.isRetryableError&&(!s||s<Lt))return Se.log("Retrying inference stage"),await new Promise(E=>setTimeout(E,5e3)),{runnerResult:(await Ae({...e,attempt:(s||1)+1,priorAgentSessionId:d.agentSessionId,contextPrefix:d.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Se.log("Do not retry inference stage"),new Error(d.error)}return{runnerResult:d}},Wr=e=>!e||e.length===0?"":`
125
129
  Deploy failed failed. Here are the errors to review on the latest build:
126
130
 
127
131
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
128
132
 
129
133
  ${e.pop()}
130
- `;import Kr from"process";import{getTracer as Ye}from"@netlify/otel";import{getTracer as Hr}from"@netlify/otel";var ce=_("deploy"),Ct=async e=>await b(Hr(),"create-preview-deploy",async t=>qr(e,t)),qr=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:a},l)=>{try{let s=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(ce.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),n&&s.push("--alias",n),a&&s.push("--filter",a),r?(ce.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview");let m=i||"netlify";ce.log(`Running: ${m} ${s.join(" ")}`),l?.setAttributes({cmd:m,args:s});let u=await e.utils.run(m,s,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(u.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),ce.log(`
131
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let f={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(f.sourceZipFilename=c.source_zip_filename),f}catch(s){throw ce.error("Failed to create preview deploy via CLI:",s),l?.setAttributes({success:!1,error:s.message}),s}};var de=_("deploy_stage"),Be=async e=>await b(Ye(),"run-deploy-stage",async()=>Vr(e)),Vr=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let i=await b(Ye(),"get-runner-diffs",async()=>await gt({config:t,netlify:r}));if(de.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:s,resultDiffBinary:m}=i,u=!0;de.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:u,wouldCreatePreview:o!==void 0&&u});let c=null;if(o!==void 0&&u)try{let f;try{let E=await b(Ye(),"get-runner-session",async()=>await tt(t.id,t.sessionId));E?.title&&(f=E.title)}catch(E){de.warn("Failed to fetch session title, using fallback message:",E.message)}await G(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await Ct({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:f,skipBuild:!1,deploySubdomain:pt(t.id,Kr.env.SITE_NAME),filter:n})}catch(f){return de.warn("Failed to create preview deploy (continuing with agent run):",f),{diff:a,resultDiff:l,hasChanges:u,previewInfo:null,diffBinary:s,resultDiffBinary:m,deployError:f instanceof Error?f.message:String(f)}}return de.log("Git status",{hasDiff:!!a,hasChanges:u}),{diff:a,resultDiff:l,hasChanges:u,previewInfo:c,diffBinary:s,resultDiffBinary:m}};import{getTracer as He}from"@netlify/otel";async function Pt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(i=l,a===r)throw i;n&&n(a,i),await new Promise(s=>setTimeout(s,o*a))}throw i}var se=_("cleanup_stage"),Ot=async e=>await b(He(),"cleanup-stage",async()=>Wr(e)),Wr=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:a,previewInfo:l})=>{let s={result_diff:t,result:r||"Done",duration:o,result_diff_binary:i};return l&&l.deployId&&(s.deploy_id=l.deployId),l&&l.sourceZipFilename&&(s.result_zip_file_name=l.sourceZipFilename),n||a?(s.cumulative_diff=n,s.cumulative_diff_binary=a,se.log("Updating total agent result diff"),await b(He(),"update-runner",async()=>{await me(e.id,{result_diff:n,result_diff_binary:a})})):se.log("No total result diff, not updating"),se.log("Updated agent runner with result"),await Pt(async()=>await b(He(),"update-runner-session",()=>G(e.id,e.sessionId,s)),{maxRetries:3,baseDelay:1e3,onRetry:(m,u)=>{se.error(`Error updating agent runner session (attempt ${m}):`,u),se.log("Retrying...")}}),se.log("Finished updating agent runner with result"),{sessionUpdate:s}};import{getTracer as $t,shutdownTracers as Xr,withActiveSpan as Dt}from"@netlify/otel";var zr=Jr(import.meta.url),Lt=zr("../package.json"),Ft=_("pipeline_index"),Ae=3,kt=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:i,tracing:a={}})=>{let l,{withStageTimer:s}=st(K.timeUnits.hours(4)),m=await ze(Lt.version,e.id,a);try{await Dt($t(),"run-pipeline",{},m,async()=>{let u,{aiGateway:c,context:f,persistSteps:E,runner:x,sha:I}=await s("init",()=>At({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:i,runnerVersion:Lt.version}),K.timeUnits.minutes(10));l=x.clean,e.sha=I;let{runnerResult:g}=await s("inference",()=>Re({cliPath:r,config:e,context:f,runner:x.runner,persistSteps:E,aiGateway:c}));await G(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let A=await s("deploy",()=>Be({cliPath:r,config:e,context:f,result:g.result,filter:i})),R=g,S=[];if(A.hasChanges&&A.deployError){S.push(Ze(A.deployError));let p=1;for(;p<=Ae&&!A.previewInfo;)Ft.log(`Deploy attempt had errors. Retrying. ${p}/${Ae}`),await Dt($t(),"deploy-stage",async T=>{T?.setAttributes({"stage.attempt":p});let{runnerResult:d}=await s(`inference-retry-${p}`,()=>Re({cliPath:r,config:e,context:f,runner:x.runner,persistSteps:E,aiGateway:c,buildErrors:S,priorAgentSessionId:g.agentSessionId}));R={...d,steps:[...R.steps||[],...d.steps||[]],duration:(R.duration||0)+(d.duration||0)},A=await s(`deploy-retry-${p}`,()=>Be({cliPath:r,config:e,context:f,result:d.result,filter:i})),A.deployError&&S.push(A.deployError),p++});p>Ae&&!A.previewInfo&&(u=new Error(`Deploy validation failed after ${Ae} attempts`))}let{diff:v,resultDiff:y,previewInfo:N,diffBinary:w,resultDiffBinary:h}=A;if(await s("cleanup",()=>Ot({config:e,diff:v,result:R.result,duration:R.duration,resultDiff:y,diffBinary:w,resultDiffBinary:h,previewInfo:N}),K.timeUnits.minutes(10)),u)throw u;await x.clean?.()})}catch(u){Ft.error("Got error while running pipeline",u),await l?.();let c=u instanceof Error&&u.message;throw await G(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),u}finally{await Xr()}};import Ut from"crypto";var $=_("bin_local"),M=Zr(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),Ke=()=>{console.log(`
134
+ `;import Xr from"process";import{getTracer as Ke}from"@netlify/otel";import{getTracer as Vr}from"@netlify/otel";var pe=_("deploy"),Ft=async e=>await A(Vr(),"create-preview-deploy",async t=>Jr(e,t)),Jr=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:a},c)=>{try{let s=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(pe.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),n&&s.push("--alias",n),a&&s.push("--filter",a),r?(pe.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview");let h=i||"netlify";pe.log(`Running: ${h} ${s.join(" ")}`),c?.setAttributes({cmd:h,args:s});let l=await e.utils.run(h,s,{stdio:["ignore","pipe","pipe"]}),d=JSON.parse(l.stdout.trim());c?.setAttributes({success:!0,deployId:d.deploy_id,deployUrl:d.deploy_url,siteId:d.site_id}),pe.log(`
135
+ Preview deploy created successfully:`,{deployId:d.deploy_id,deployUrl:d.deploy_url,siteId:d.site_id});let g={deployId:d.deploy_id,previewUrl:d.deploy_url,logsUrl:d.logs,siteId:d.site_id};return t||(g.sourceZipFilename=d.source_zip_filename),g}catch(s){throw pe.error("Failed to create preview deploy via CLI:",s),c?.setAttributes({success:!1,error:s.message}),s}};var fe=_("deploy_stage"),We=async e=>await A(Ke(),"run-deploy-stage",async()=>zr(e)),zr=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let i=await A(Ke(),"get-runner-diffs",async()=>await _t({config:t,netlify:r}));if(fe.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:c,diffBinary:s,resultDiffBinary:h}=i,l=!0;fe.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:l,wouldCreatePreview:o!==void 0&&l});let d=null;if(o!==void 0&&l)try{let g;try{let E=await A(Ke(),"get-runner-session",async()=>await it(t.id,t.sessionId));E?.title&&(g=E.title)}catch(E){fe.warn("Failed to fetch session title, using fallback message:",E.message)}await B(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),d=await Ft({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:g,skipBuild:!1,deploySubdomain:yt(t.id,Xr.env.SITE_NAME),filter:n})}catch(g){return fe.warn("Failed to create preview deploy (continuing with agent run):",g),{diff:a,resultDiff:c,hasChanges:l,previewInfo:null,diffBinary:s,resultDiffBinary:h,deployError:g instanceof Error?g.message:String(g)}}return fe.log("Git status",{hasDiff:!!a,hasChanges:l}),{diff:a,resultDiff:c,hasChanges:l,previewInfo:d,diffBinary:s,resultDiffBinary:h}};import{getTracer as Ne}from"@netlify/otel";async function Ut(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let a=1;a<=r;a++)try{return await e()}catch(c){if(i=c,a===r)throw i;n&&n(a,i),await new Promise(s=>setTimeout(s,o*a))}throw i}var L=_("cleanup_stage"),Mt=async e=>await A(Ne(),"cleanup-stage",async()=>Zr(e)),Ve=1024*1024*10,Zr=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:a,previewInfo:c})=>{let s={result:r||"Done",duration:o};if(c&&c.deployId&&(s.deploy_id=c.deployId),c&&c.sourceZipFilename&&(s.result_zip_file_name=c.sourceZipFilename),t||i||n||a)try{L.log("Getting pre-signed URLs for diff upload");let l=await at(e.id,e.sessionId),d=[];(t||i)&&d.push(Oe(l.result.upload_url,i||t).then(()=>{s.result_diff_s3_key=l.result.s3_key,L.log("Successfully uploaded result_diff to S3")})),(n||a)&&d.push(Oe(l.cumulative.upload_url,a||n).then(()=>{s.cumulative_diff_s3_key=l.cumulative.s3_key,L.log("Successfully uploaded cumulative_diff to S3")})),L.log(`Uploading ${d.length} diff(s) to S3 in parallel`),await Promise.all(d),(n||a)&&(L.log("Updating agent runner with cumulative diff S3 key"),await A(Ne(),"update-runner",async()=>{await ce(e.id,{result_diff_s3_key:l.cumulative.s3_key})}))}catch(l){L.error("S3 upload failed, falling back to inline diffs:",l);let d=Buffer.byteLength(t||i||""),g=Buffer.byteLength(a||n||"");if(d>Ve||g>Ve){let E=`Diffs exceed maximum inline size of ${Ve} bytes.`;throw L.error(E),new Error(E)}s.result_diff=t,s.result_diff_binary=i,(n||a)&&(s.cumulative_diff=n,s.cumulative_diff_binary=a,L.log("Updating agent runner with inline diffs (fallback)"),await A(Ne(),"update-runner",async()=>{await ce(e.id,{result_diff:n,result_diff_binary:a})}))}else L.log("No diffs to upload");return L.log("Updated agent runner with result"),await Ut(async()=>await A(Ne(),"update-runner-session",()=>B(e.id,e.sessionId,s)),{maxRetries:3,baseDelay:1e3,onRetry:(l,d)=>{L.error(`Error updating agent runner session (attempt ${l}):`,d),L.log("Retrying...")}}),L.log("Finished updating agent runner with result"),{sessionUpdate:s}};import{getTracer as Gt,shutdownTracers as en,withActiveSpan as jt}from"@netlify/otel";var tn=Qr(import.meta.url),Yt=tn("../package.json"),Bt=_("pipeline_index"),be=3,Ht=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:i,tracing:a={}})=>{let c,{withStageTimer:s}=dt(X.timeUnits.hours(4)),h=await tt(Yt.version,e.id,a);try{await jt(Gt(),"run-pipeline",{},h,async()=>{let l,{aiGateway:d,context:g,persistSteps:E,runner:T,sha:I}=await s("init",()=>$t({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:i,runnerVersion:Yt.version}),X.timeUnits.minutes(10));c=T.clean,e.sha=I;let{runnerResult:m}=await s("inference",()=>Ae({cliPath:r,config:e,context:g,runner:T.runner,persistSteps:E,aiGateway:d}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let v=await s("deploy",()=>We({cliPath:r,config:e,context:g,result:m.result,filter:i})),R=m,N=[];if(v.hasChanges&&v.deployError){N.push(rt(v.deployError));let u=1;for(;u<=be&&!v.previewInfo;)Bt.log(`Deploy attempt had errors. Retrying. ${u}/${be}`),await jt(Gt(),"deploy-stage",async y=>{y?.setAttributes({"stage.attempt":u});let{runnerResult:p}=await s(`inference-retry-${u}`,()=>Ae({cliPath:r,config:e,context:g,runner:T.runner,persistSteps:E,aiGateway:d,buildErrors:N,priorAgentSessionId:m.agentSessionId}));R={...p,steps:[...R.steps||[],...p.steps||[]],duration:(R.duration||0)+(p.duration||0)},v=await s(`deploy-retry-${u}`,()=>We({cliPath:r,config:e,context:g,result:p.result,filter:i})),v.deployError&&N.push(v.deployError),u++});u>be&&!v.previewInfo&&(l=new Error(`Deploy validation failed after ${be} attempts`))}let{diff:P,resultDiff:b,previewInfo:S,diffBinary:w,resultDiffBinary:f}=v;if(await s("cleanup",()=>Mt({config:e,diff:P,result:R.result,duration:R.duration,resultDiff:b,diffBinary:w,resultDiffBinary:f,previewInfo:S}),X.timeUnits.minutes(10)),l)throw l;process.env.NETLIFY_LOCAL_MODE||await T.clean?.()})}catch(l){Bt.error("Got error while running pipeline",l),await c?.();let d=l instanceof Error&&l.message;throw await B(e.id,e.sessionId,{result:d||"Encountered error when running agent",state:"error"}),l}finally{await en()}};import qt from"crypto";var $=_("bin_local"),Y=rn(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),Xe=()=>{console.log(`
132
136
  agent-runner-cli-local - Run Netlify agent runner locally without API connections
133
137
 
134
138
  USAGE:
@@ -160,6 +164,6 @@ NOTE:
160
164
  This local mode mocks all Netlify API calls. The agent will run through
161
165
  the full pipeline including inference and deployment, but API calls will
162
166
  be logged instead of executed.
163
- `)};M.help&&(Ke(),C.exit(0));M.prompt||($.error("Error: --prompt is required"),Ke(),C.exit(1));M["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),Ke(),C.exit(1));try{let e=M.cwd||C.cwd(),t=Mt.join(e,".netlify","netlify-agent-runner-context*");Gt.rmSync(t,{recursive:!0,force:!0});let r;try{r=await Qr(e)}catch(l){$.error(l.message),$.error(`
164
- To link this directory to a Netlify site, run:`),$.error(" netlify link"),C.exit(1)}let o=`local-${Ut.randomBytes(8).toString("hex")}`,n=`session-${Ut.randomBytes(8).toString("hex")}`,i=M.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let a={id:o,sessionId:n,prompt:M.prompt,runner:i,model:M.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=M["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?_e(e,"claude")||($.log("Claude CLI not found, installing..."),await qe(e,"@anthropic-ai/claude-code")):i==="gemini"?_e(e,"gemini")||($.log("Gemini CLI not found, installing..."),await qe(e,"@google/gemini-cli@0.1.17")):i==="codex"?_e(e,"codex")||($.log("Codex CLI not found, installing..."),await qe(e,"my-codex-no-sandbox")):($.error(`Unknown runner: ${i}`),C.exit(1)),await kt({config:a,cwd:e,cliPath:M["cli-path"],filter:M.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),C.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),C.exit(1)}function qe(e,t){return new Promise((r,o)=>{Ee("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{$.log(`${t} installed: ${n}`),r()}).catch(n=>{$.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function Qr(e){let t=Mt.join(e,".netlify","state.json");try{let r=await Gt.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
167
+ `)};Y.help&&(Xe(),C.exit(0));Y.prompt||($.error("Error: --prompt is required"),Xe(),C.exit(1));Y["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),Xe(),C.exit(1));try{let e=Y.cwd||C.cwd(),t=Kt.join(e,".netlify","netlify-agent-runner-context*");Wt.rmSync(t,{recursive:!0,force:!0});let r;try{r=await nn(e)}catch(c){$.error(c.message),$.error(`
168
+ To link this directory to a Netlify site, run:`),$.error(" netlify link"),C.exit(1)}let o=`local-${qt.randomBytes(8).toString("hex")}`,n=`session-${qt.randomBytes(8).toString("hex")}`,i=Y.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let a={id:o,sessionId:n,prompt:Y.prompt,runner:i,model:Y.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=Y["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?we(e,"claude")||($.log("Claude CLI not found, installing..."),await Je(e,"@anthropic-ai/claude-code")):i==="gemini"?we(e,"gemini")||($.log("Gemini CLI not found, installing..."),await Je(e,"@google/gemini-cli@0.1.17")):i==="codex"?we(e,"codex")||($.log("Codex CLI not found, installing..."),await Je(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),C.exit(1)),await Ht({config:a,cwd:e,cliPath:Y["cli-path"],filter:Y.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),C.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),C.exit(1)}function Je(e,t){return new Promise((r,o)=>{_e("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{$.log(`${t} installed: ${n}`),r()}).catch(n=>{$.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function nn(e){let t=Kt.join(e,".netlify","state.json");try{let r=await Wt.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
165
169
  //# sourceMappingURL=bin-local.js.map