@netlify/agent-runner-cli 1.60.0 → 1.61.0-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin-local.js CHANGED
@@ -1,45 +1,49 @@
1
1
  #!/usr/bin/env node
2
- import C from"process";import dr from"path";import pr from"fs";import An from"minimist";import{createRequire as Rn}from"module";import{createTracerProvider as mr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as pt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as gr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as hr}from"@netlify/otel";import{propagation as ft,context as mt,W3CTraceContextPropagator as yr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as _r}from"@opentelemetry/exporter-trace-otlp-grpc";import fr from"process";function _(e){let t=fr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ye=_("tracing"),gt=async(e,t,r)=>(await mr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new pt(new Be),new pt(new _r({url:r.exporterUrl}))],instrumentations:[new gr({skipHeaders:!0})]}),r.traceparent?(ft.setGlobalPropagator(new yr),ft.extract(mt.active(),{traceparent:r.traceparent,isRemote:!0})):mt.active());function b(e,t,r){return Ye.log(`\u23F3 TRACE: ${t} starting...`),hr(e,t,r)}var Be=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[u,l]of Object.entries(o))u.includes("duration")&&typeof l=="number"?n.push(`${u}=${l.toFixed(2)}ms`):n.push(`${u}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Ye.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Ye.log(` \u274C Error: ${t.status.message}`)}};var Er=["error","failed","exception","fatal","panic","abort","crash"];function ht(e){let t=e.split(`
3
- `),r=[],o=-1,n=0;for(;n<t.length;){let u=t[n].slice(0,500).toLowerCase();if(Er.some(p=>u.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),c=[];for(let m=p;m<=f;m++)c.push(t[m]);r.push(c.join(`
4
- `)),o=f,n=f+1}else n++}if(r.length===0)return e;let i=r.map((s,u)=>`<extracted_error_chunk order="${u+1}">
2
+ import C from"process";import fr from"path";import mr from"fs";import Pn from"minimist";import{createRequire as An}from"module";import{createTracerProvider as hr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ft}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as yr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as _r}from"@netlify/otel";import{propagation as mt,context as gt,W3CTraceContextPropagator as Er}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as wr}from"@opentelemetry/exporter-trace-otlp-grpc";import gr from"process";function _(e){let t=gr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Be=_("tracing"),ht=async(e,t,r)=>(await hr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ft(new He),new ft(new wr({url:r.exporterUrl}))],instrumentations:[new yr({skipHeaders:!0})]}),r.traceparent?(mt.setGlobalPropagator(new Er),mt.extract(gt.active(),{traceparent:r.traceparent,isRemote:!0})):gt.active());function b(e,t,r){return Be.log(`\u23F3 TRACE: ${t} starting...`),_r(e,t,r)}var He=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[c,l]of Object.entries(o))c.includes("duration")&&typeof l=="number"?n.push(`${c}=${l.toFixed(2)}ms`):n.push(`${c}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Be.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Be.log(` \u274C Error: ${t.status.message}`)}};var Tr=["error","failed","exception","fatal","panic","abort","crash"];function yt(e){let t=e.split(`
3
+ `),r=[],o=-1,n=0;for(;n<t.length;){let c=t[n].slice(0,500).toLowerCase();if(Tr.some(p=>c.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),u=[];for(let m=p;m<=f;m++)u.push(t[m]);r.push(u.join(`
4
+ `)),o=f,n=f+1}else n++}if(r.length===0)return e;let i=r.map((s,c)=>`<extracted_error_chunk order="${c+1}">
5
5
  ${s}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import ke from"process";import{getTracer as ln}from"@netlify/otel";import _e from"process";var oe=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},yt=e=>e instanceof oe;var be=_e.env.NETLIFY_API_URL,Ae=_e.env.NETLIFY_API_TOKEN,H=_("api"),Ne=()=>_e.env.NETLIFY_LOCAL_MODE==="true",Ee=async(e,t={})=>{if(!be||!Ae)throw new Error("No API URL or token");let r=new URL(e,be),o={...t,headers:{...t.headers,Authorization:`Bearer ${Ae}`}};_e.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(_e.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{H.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||H.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new oe(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new oe(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},_t=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(be=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ae=e.constants.NETLIFY_API_TOKEN)},Et=()=>({apiUrl:be,token:Ae}),we=async(e,t)=>Ne()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):Ee(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>Ne()?(H.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var wt=async(e,t)=>Ne()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`),Tt=(e,t,r)=>Ee(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),It=async(e,t)=>Ne()?(H.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),He=async(e,t)=>{H.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ie=_("ai_gateway"),qe=null;var xt=async()=>{if(qe)return qe;ie.log("Fetching available AI gateway providers");let e=await fetch(`${Et().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return qe=t,ie.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},wr=async(e,t)=>{let o=(await xt()).providers[e];if(!o)return ie.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ie.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},vt=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),ie.log("Requesting AI gateway information");let l=await Tt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,ie.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),xt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:wr}};import K from"process";import V from"path";import Ce from"fs";import{fileURLToPath as br}from"url";import{createRequire as Ar}from"module";import{execa as Nr,execaCommand as ao}from"execa";import{Transform as Tr}from"stream";var Ir=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),xr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function vr(){return Object.entries(process.env).filter(([e,t])=>!(!t||Ir.has(e)||xr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function J(e){if(typeof e!="string")return e;let t=vr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Rr(o),"g");r=r.replace(n,"******")}),r}function Rr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var se=class extends Tr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=J(n);o(null,i)}};function Rt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var Te=null,St=e=>(Te&&Te.destroy(),Te=new ee({totalAllowedTime:e}),Te),bt=()=>Te;var ee=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var At="netlify-agent-runner-context.md",Ke="task-history",We="netlify-context",U=".netlify",ae="results.md",Je="assets";var te=1800*1e3;var Nt={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.0.76","@google/gemini-cli":"0.23.0","@netlify/otel":"^5.1.1","@openai/codex":"0.79.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var Cr=br(import.meta.url),Pr=V.dirname(Cr),Or=Ar(import.meta.url),le=_("shell"),Ve=new Set,$r={preferLocal:!0},N=(e,t,r)=>{let[o,n]=Fr(t,r),i={...$r,...n},s=Nr(e,o,i);kr(s,i),Lr(s);let u=r?.idleTimeout;return u&&u>0&&Dr(s,u),s};var Fr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},kr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new se).pipe(K.stdout),e.stdout?.pipe(new se).pipe(K.stdout),e.stderr?.pipe(new se).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},Xe=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),le.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return le.error("Error killing process:",r),!1}},Ct=e=>Xe(e,"SIGKILL"),Dr=(e,t)=>{let r=null,o=()=>{le.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Xe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing idle process ${e.pid}`),Ct(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},Lr=e=>{Ve.add(e);let t=bt();if(t){let r=t.onTimesUp(()=>{le.log(`Global timer expired, killing process ${e.pid}`),Xe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing process ${e.pid} after timeout`),Ct(e))},5e3)});e.on("exit",()=>{Ve.delete(e),r()}),e.on("error",()=>{Ve.delete(e),r()})}};function Pe(e,t){return!!re(e,t)}function re(e,t){if(!K.env.NETLIFY_LOCAL_MODE)try{let n=Or.resolve(Nt.name),i=V.dirname(n);for(;i!==V.dirname(i);){let s=V.dirname(i);if(V.basename(s)==="node_modules"){let u=V.join(s,".bin",t);if(Ce.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(K.env.NODE_PATH){let n=V.join(K.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=V.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=V.join(Pr,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var Ur=_("utils"),Mr=e=>new Promise(t=>{setTimeout(t,e)}),Pt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Mr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},ue=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},Ot=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Ur.error("Could not parse JSON",o))}},$t=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var Gr=50*1024,ze=(e,t=Gr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as Ft}from"buffer";import jr from"path";var kt=_("repo"),Dt=async({config:e,isRetry:t})=>{kt.info("Getting runner diffs");let r=await Br(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let I=Hr(n);await qr(I)}kt.info("Changes after processing"),await Qe();let i=await et(n);await Ze(i);let s={stdio:["ignore","pipe","pipe"]},u=await N("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await N("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]);let I=await N("git",["diff",e.sha,"HEAD"],s);c=String(I.stdout??"");let w=await N("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");c!==g&&(m=Ft.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(y.diffBinary=Ft.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Ze=async(e=[])=>{await N("git",["add",".",...e])},Qe=async()=>{let e=await N("git",["status","-s"]);return String(e.stdout??"")},Lt=/.. (.+)?\.log$/,Yr=[Lt],Br=async()=>{let e=await Qe();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(o=>Yr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Ut=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return String(e??"").trim()},Mt=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},et=async e=>{e||=await Qe();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
10
- `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${jr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${i}`)});let n=o.match(Lt)?.[1];n&&r.push(`:!${n}.log`)}),r},Gt=async()=>{await N("git",["reset","--hard","HEAD"])},Hr=e=>{let t=e.split(`
11
- `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},qr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Wr from"fs/promises";import Jr from"os";import Bt from"path";import fe from"process";import Vr from"readline";import tt from"path";import Kr from"fs/promises";var rt=_("agent-output-utils");async function ce({initialResult:e,agentName:t,hasError:r}){let o="",n=tt.join(process.cwd(),U,ae);try{let i=await Kr.readFile(n,"utf-8");i&&(o=i,rt.log(`Pulled result from ${tt.relative(process.cwd(),n)}`))}catch{rt.log(`No results file found at ${tt.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function de({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&rt.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function pe(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=_("runner_claude"),jt="Claude Code",me="claude-opus-4-5-20251101",Yt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Xr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function nt({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=fe.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!me&&await o.isModelAvailableForProvider("anthropic",me)?(c=me,M.log(`Using default model: ${me}`)):me&&M.log(`Default model ${me} is not available, proceeding without model specification`);fe.env.ANTHROPIC_API_KEY=h,fe.env.ANTHROPIC_BASE_URL=d}else if(!fe.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],I=[],w={},g=0,T=0,R,S,P=[re(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],O=`${fe.env.NVM_BIN}/node`;M.log(`Running ${O} ${P.join(" ")}`);let k=t.utils.run(O,P,{all:!0,env:fe.env,cwd:s,idleTimeout:te});k.stdin?.end();let x=ue(()=>{r?.({steps:y,duration:T})},250),v=(h,d)=>{let a={...h,id:g};g+=1,I.push(a),y.push(a),d||x.flush(),x(),d&&x.flush()},j=Vr.createInterface({input:k.all});return j.on("error",h=>{M.error("Readline interface error",{error:h.message,stack:h.stack})}),j.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{M.log("Could not parse line",h)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):M.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[Yt(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);x.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,Z;if(E){let X=E.input?.file_path&&Bt.relative(s,E.input.file_path),$=X&&`\`${X}\``;Z=[Yt(E.name||""),$].filter(Boolean).join(" ")}let Se=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),Q;if(typeof a.content=="string")Q=a.content;else if(Array.isArray(a.content)){let X=[];a.content.forEach($=>{$?.type==="text"&&typeof $.text=="string"?X.push($.text):$?.type==="image"&&typeof $.source=="object"&&$.source?$.source.type==="base64"&&$.source.media_type?X.push(`![](data:${$.source.media_type};base64,${$.source.data})`):M.log(`Unsupported image type ${$.source.type}`,$.source):M.log(`Unsupported block type ${$?.type}`)}),Q=X.join(`
8
+ `);return i.length>e.length*.8?e:i}import Qt from"fs/promises";import er from"path";import xe from"process";import{getTracer as un}from"@netlify/otel";import _e from"process";var oe=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},_t=e=>e instanceof oe;var Ae=_e.env.NETLIFY_API_URL,Ne=_e.env.NETLIFY_API_TOKEN,H=_("api"),Ce=()=>_e.env.NETLIFY_LOCAL_MODE==="true",Ee=async(e,t={})=>{if(!Ae||!Ne)throw new Error("No API URL or token");let r=new URL(e,Ae),o={...t,headers:{...t.headers,Authorization:`Bearer ${Ne}`}};_e.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(_e.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),n.headers.forEach((c,l)=>{H.log(` ${l}: ${c}`)});else{let c=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${c||"N/A"}`)}if(i||H.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let c=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new oe(`API request failed: 404 - ${c}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new oe(`API request failed: 403 - ${c}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${c}`)}return s},Et=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ae=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ne=e.constants.NETLIFY_API_TOKEN)},wt=()=>({apiUrl:Ae,token:Ne}),we=async(e,t)=>Ce()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):Ee(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>Ce()?(H.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var Tt=async(e,t)=>Ce()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`),It=(e,t,r)=>Ee(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),xt=async(e,t)=>Ce()?(H.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),qe=async(e,t)=>{H.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ie=_("ai_gateway"),Ke=null;var vt=async()=>{if(Ke)return Ke;ie.log("Fetching available AI gateway providers");let e=await fetch(`${wt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ke=t,ie.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Ir=async(e,t)=>{let o=(await vt()).providers[e];if(!o)return ie.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ie.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},Rt=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let c=async()=>{clearTimeout(n),ie.log("Requesting AI gateway information");let l=await It(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,ie.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{c()},p))}};return await Promise.all([c(),vt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:Ir}};import K from"process";import V from"path";import Pe from"fs";import{fileURLToPath as Nr}from"url";import{createRequire as Cr}from"module";import{execa as Pr,execaCommand as uo}from"execa";import{Transform as xr}from"stream";var vr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Rr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Sr(){return Object.entries(process.env).filter(([e,t])=>!(!t||vr.has(e)||Rr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function J(e){if(typeof e!="string")return e;let t=Sr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(br(o),"g");r=r.replace(n,"******")}),r}function br(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var se=class extends xr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=J(n);o(null,i)}};function St(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var Te=null,bt=e=>(Te&&Te.destroy(),Te=new ee({totalAllowedTime:e}),Te),At=()=>Te;var ee=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((c,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Nt="netlify-agent-runner-context.md",We="task-history",Je="netlify-context",U=".netlify",ae="results.md",Ve="assets";var te=1800*1e3;var Ct={name:"@netlify/agent-runner-cli",type:"module",version:"1.61.0-alpha",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.0.76","@google/gemini-cli":"0.23.0","@netlify/otel":"^5.1.1","@openai/codex":"0.81.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var Or=Nr(import.meta.url),Fr=V.dirname(Or),$r=Cr(import.meta.url),le=_("shell"),Xe=new Set,kr={preferLocal:!0},N=(e,t,r)=>{let[o,n]=Dr(t,r),i={...kr,...n},s=Pr(e,o,i);Lr(s,i),Mr(s);let c=r?.idleTimeout;return c&&c>0&&Ur(s,c),s};var Dr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Lr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new se).pipe(K.stdout),e.stdout?.pipe(new se).pipe(K.stdout),e.stderr?.pipe(new se).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},ze=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),le.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return le.error("Error killing process:",r),!1}},Pt=e=>ze(e,"SIGKILL"),Ur=(e,t)=>{let r=null,o=()=>{le.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),ze(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing idle process ${e.pid}`),Pt(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},Mr=e=>{Xe.add(e);let t=At();if(t){let r=t.onTimesUp(()=>{le.log(`Global timer expired, killing process ${e.pid}`),ze(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing process ${e.pid} after timeout`),Pt(e))},5e3)});e.on("exit",()=>{Xe.delete(e),r()}),e.on("error",()=>{Xe.delete(e),r()})}};function Oe(e,t){return!!re(e,t)}function re(e,t){if(!K.env.NETLIFY_LOCAL_MODE)try{let n=$r.resolve(Ct.name),i=V.dirname(n);for(;i!==V.dirname(i);){let s=V.dirname(i);if(V.basename(s)==="node_modules"){let c=V.join(s,".bin",t);if(Pe.existsSync(c))return c;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(K.env.NODE_PATH){let n=V.join(K.env.NODE_PATH,".bin",t);if(Pe.existsSync(n))return n}let r=V.join(e,"node_modules",".bin",t);if(Pe.existsSync(r))return r;let o=V.join(Fr,"..","node_modules",".bin",t);if(Pe.existsSync(o))return o}var jr=_("utils"),Gr=e=>new Promise(t=>{setTimeout(t,e)}),Ot=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...c)=>{if(r)return o=c,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...c);for(l(f);;){if(await Gr(t),!o)return r=!1,i=null,f;let u=o,m=n;o=null,n=[],f=await e(...u),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},ce=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...c){n=c,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let c=n,l=i;o=null,n=null,i=null,e.apply(l,c)}},s},Ft=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):jr.error("Could not parse JSON",o))}},$t=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let c=`--${t}${n}`;if(c.length>55)return"";let l=60-c.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var Yr=50*1024,Ze=(e,t=Yr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as kt}from"buffer";import Br from"path";var Dt=_("repo"),Lt=async({config:e,isRetry:t})=>{Dt.info("Getting runner diffs");let r=await qr(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let I=Kr(n);await Wr(I)}Dt.info("Changes after processing"),await et();let i=await tt(n);await Qe(i);let s={stdio:["ignore","pipe","pipe"]},c=await N("git",["diff","--staged"],s),l=String(c.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await N("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),u,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]);let I=await N("git",["diff",e.sha,"HEAD"],s);u=String(I.stdout??"");let w=await N("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");u!==g&&(m=kt.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:u,ignored:i};return l!==f&&(y.diffBinary=kt.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Qe=async(e=[])=>{await N("git",["add",".",...e])},et=async()=>{let e=await N("git",["status","-s"]);return String(e.stdout??"")},Ut=/.. (.+)?\.log$/,Hr=[Ut],qr=async()=>{let e=await et();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(o=>Hr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Mt=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return String(e??"").trim()},jt=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},tt=async e=>{e||=await et();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
10
+ `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Br.sep}`].some(c=>o.startsWith(c))&&r.push(`:!${i}`)});let n=o.match(Ut)?.[1];n&&r.push(`:!${n}.log`)}),r},Gt=async()=>{await N("git",["reset","--hard","HEAD"])},Kr=e=>{let t=e.split(`
11
+ `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,c=s.join(""),l=n.trim(),p=i.trim();return r[c]?r[c].change=p:r[c]={filePath:c,stage:l,change:p},r},{});return Object.values(t)},Wr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Vr from"fs/promises";import Xr from"os";import Ht from"path";import fe from"process";import zr from"readline";import rt from"path";import Jr from"fs/promises";var nt=_("agent-output-utils");async function ue({initialResult:e,agentName:t,hasError:r}){let o="",n=rt.join(process.cwd(),U,ae);try{let i=await Jr.readFile(n,"utf-8");i&&(o=i,nt.log(`Pulled result from ${rt.relative(process.cwd(),n)}`))}catch{nt.log(`No results file found at ${rt.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function de({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&nt.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function pe(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=_("runner_claude"),Yt="Claude Code",me="claude-opus-4-5-20251101",Bt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Zr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ot({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=fe.cwd()}){let c=e,{accountType:l,prompt:p,modelVersionOverrides:f}=c,{model:u}=c,m="";if(o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);u=a}}else if(u){if(!await o.isModelAvailableForProvider("anthropic",u))throw new Error(`Model '${u}' is not available for anthropic provider`)}else!!me&&await o.isModelAvailableForProvider("anthropic",me)?(u=me,M.log(`Using default model: ${me}`)):me&&M.log(`Default model ${me} is not available, proceeding without model specification`);fe.env.ANTHROPIC_API_KEY=h,fe.env.ANTHROPIC_BASE_URL=d}else if(!fe.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],I=[],w={},g=0,T=0,R,S,P=[re(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...u?["--model",u]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],O=`${fe.env.NVM_BIN}/node`;M.log(`Running ${O} ${P.join(" ")}`);let k=t.utils.run(O,P,{all:!0,env:fe.env,cwd:s,idleTimeout:te});k.stdin?.end();let x=ce(()=>{r?.({steps:y,duration:T})},250),v=(h,d)=>{let a={...h,id:g};g+=1,I.push(a),y.push(a),d||x.flush(),x(),d&&x.flush()},G=zr.createInterface({input:k.all});return G.on("error",h=>{M.error("Readline interface error",{error:h.message,stack:h.stack})}),G.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{M.log("Could not parse line",h)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):M.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[Bt(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);x.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,Z;if(E){let X=E.input?.file_path&&Ht.relative(s,E.input.file_path),F=X&&`\`${X}\``;Z=[Bt(E.name||""),F].filter(Boolean).join(" ")}let be=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),Q;if(typeof a.content=="string")Q=a.content;else if(Array.isArray(a.content)){let X=[];a.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?X.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?X.push(`![](data:${F.source.media_type};base64,${F.source.data})`):M.log(`Unsupported image type ${F.source.type}`,F.source):M.log(`Unsupported block type ${F?.type}`)}),Q=X.join(`
12
12
 
13
- `)}Se&&Q&&(Q=`\`\`\`
13
+ `)}be&&Q&&(Q=`\`\`\`
14
14
  ${Q.trim()}
15
- \`\`\``),v({title:Z,message:Q},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:M.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[I,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await k.catch(h=>{({error:S,result:R}=Xr({catchError:h,runCmd:k,error:S,result:R,runnerName:"Claude"}))}),j.close(),x.flush(),{steps:I,duration:T,result:await ce({initialResult:R,agentName:jt,hasError:!!S}),error:de({error:S,agentName:jt}),isRetryableError:pe(S),agentSessionId:m}}var Ht=async()=>{let e=Bt.join(Jr.homedir(),".claude");await Wr.rm(e,{recursive:!0,force:!0})};import Ie from"fs/promises";import Kt from"os";import Oe from"path";import ne from"process";import zr from"readline";var G=_("runner_codex"),qt="Codex CLI",ge="gpt-5.2",Zr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ot({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=ne.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);p=E}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!ge&&await n.isModelAvailableForProvider("openai",ge)?(p=ge,G.log(`Using default model: ${ge}`)):ge&&G.log(`Default model ${ge} is not available, proceeding without model specification`);ne.env.OPENAI_API_KEY=d,ne.env.OPENAI_BASE_URL=a}else if(!ne.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=`${ne.env.NVM_BIN}/node`,S=Oe.join(Kt.homedir(),".codex"),P=Oe.join(S,"config.toml"),O=Oe.join(S,"auth.json");try{await Ie.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:ne.env.OPENAI_API_KEY};await Ie.writeFile(O,JSON.stringify(d,null,2),"utf-8"),G.log("Created Codex auth.json file");let a="";try{a=await Ie.readFile(P,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
15
+ \`\`\``),v({title:Z,message:Q},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:M.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[I,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await k.catch(h=>{({error:S,result:R}=Zr({catchError:h,runCmd:k,error:S,result:R,runnerName:"Claude"}))}),G.close(),x.flush(),{steps:I,duration:T,result:await ue({initialResult:R,agentName:Yt,hasError:!!S}),error:de({error:S,agentName:Yt}),isRetryableError:pe(S),agentSessionId:m}}var qt=async()=>{let e=Ht.join(Xr.homedir(),".claude");await Vr.rm(e,{recursive:!0,force:!0})};import Ie from"fs/promises";import Wt from"os";import Fe from"path";import ne from"process";import Qr from"readline";var j=_("runner_codex"),Kt="Codex CLI",ge="",en=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function it({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=ne.cwd()}){let{accountType:s,prompt:c,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);p=E}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!ge&&await n.isModelAvailableForProvider("openai",ge)?(p=ge,j.log(`Using default model: ${ge}`)):ge&&j.log(`Default model ${ge} is not available, proceeding without model specification`);ne.env.OPENAI_API_KEY=d,ne.env.OPENAI_BASE_URL=a}else if(!ne.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],u=[],m=[],y={},I=0,w=0,g,T,R=`${ne.env.NVM_BIN}/node`,S=Fe.join(Wt.homedir(),".codex"),P=Fe.join(S,"config.toml"),O=Fe.join(S,"auth.json");try{await Ie.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:ne.env.OPENAI_API_KEY};await Ie.writeFile(O,JSON.stringify(d,null,2),"utf-8"),j.log("Created Codex auth.json file");let a="";try{a=await Ie.readFile(P,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
16
16
  web_search_request = true`):a+=`
17
17
  [features]
18
18
  web_search_request = true
19
- `,await Ie.writeFile(P,a,"utf-8"),G.log("Updated Codex config with web_search_request enabled"))}catch(d){throw G.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let k=[re(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);G.log(`Running ${R} ${k.join(" ")}`);let x=t.utils.run(R,k,{all:!0,cwd:i,env:{...ne.env},idleTimeout:te}),v=ue(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),j=(d,a)=>{d.id=I,I+=1,m.push(d),f.push(d),c.push(d),a||v.flush(),v(),a&&v.flush()},h=zr.createInterface({input:x.all});return h.on("error",d=>{G.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{G.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let E=en(a.item);E&&j(E,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let E={title:"Reasoning",message:a.item.text};j(E,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let E=tn(y[a.call_id],a);E&&j(E,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(E=>E.text).join(`
19
+ `,await Ie.writeFile(P,a,"utf-8"),j.log("Updated Codex config with web_search_request enabled"))}catch(d){throw j.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let k=[re(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],c].filter(Boolean);j.log(`Running ${R} ${k.join(" ")}`);let x=t.utils.run(R,k,{all:!0,cwd:i,env:{...ne.env},idleTimeout:te}),v=ce(()=>{r?.({steps:f,duration:w}),o?.({steps:u,duration:w}),u=[]},250),G=(d,a)=>{d.id=I,I+=1,m.push(d),f.push(d),u.push(d),a||v.flush(),v(),a&&v.flush()},h=Qr.createInterface({input:x.all});return h.on("error",d=>{j.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{j.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let E=rn(a.item);E&&G(E,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let E={title:"Reasoning",message:a.item.text};G(E,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let E=nn(y[a.call_id],a);E&&G(E,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(E=>E.text).join(`
20
20
  `):a?.type==="message"&&a.role==="system"&&(T=a.content.map(E=>E.text).join(`
21
- `))}),await x.catch(d=>{let a=Zr({catchError:d,runCmd:x,error:T,result:g,runnerName:"Codex"});T=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:qt,hasError:!!T}),error:de({error:T,agentName:qt}),isRetryableError:pe(T)}}var Wt=async()=>{let e=Oe.join(Kt.homedir(),".codex");await Ie.rm(e,{recursive:!0,force:!0})},Qr=new Set(["bash","-lc"]),en=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
21
+ `))}),await x.catch(d=>{let a=en({catchError:d,runCmd:x,error:T,result:g,runnerName:"Codex"});T=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:w,result:await ue({initialResult:g,agentName:Kt,hasError:!!T}),error:de({error:T,agentName:Kt}),isRetryableError:pe(T)}}var Jt=async()=>{let e=Fe.join(Wt.homedir(),".codex");await Ie.rm(e,{recursive:!0,force:!0})},tn=new Set(["bash","-lc"]),rn=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
22
22
  ${o}
23
23
  \`\`\``),e.status==="failed"&&e.exit_code!==0&&(o=o?`${o}
24
24
 
25
- *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},tn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Qr.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
25
+ *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},nn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!tn.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
26
26
  ${n.trim()}
27
- \`\`\``)}catch(i){G.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import $e from"fs/promises";import Vt from"os";import Fe from"path";import he from"process";import rn from"readline";var Y=_("runner_gemini"),Jt="Gemini CLI",ye="",nn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(Y.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(Y.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(Y.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),on={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},sn=async()=>{let e=Fe.join(Vt.homedir(),".gemini"),t=Fe.join(e,"settings.json");try{await $e.mkdir(e,{recursive:!0});let r={};try{let o=await $e.readFile(t,"utf-8");r=JSON.parse(o)}catch{Y.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await $e.writeFile(t,JSON.stringify(r,null,2),"utf-8"),Y.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){Y.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function it({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=he.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await sn(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!ye&&await n.isModelAvailableForProvider("gemini",ye)?(p=ye,Y.log(`Using default model: ${ye}`)):ye&&Y.log(`Default model ${ye} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);he.env.GEMINI_API_KEY=h,he.env.GOOGLE_GEMINI_BASE_URL=d}else if(!he.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=[re(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],S=`${he.env.NVM_BIN}/node`;Y.log(`Running ${S} ${R.join(" ")}`);let P=t.utils.run(S,R,{all:!0,env:he.env,cwd:i,idleTimeout:te});P.stdin?.end();let O=ue(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),k=(h,d)=>{h.id=I,I+=1,m.push(h),f.push(h),c.push(h),d||O.flush(),O(),d&&O.flush()},x=rn.createInterface({input:P.all});x.on("error",h=>{Y.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",j=()=>{v&&k({message:v.trim()}),v=""};return x.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Ot(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||j(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=on[d.tool_name]??d.tool_name,E=d.parameters?.file_path,Z=E&&Fe.relative(i,E),Se=d.parameters?.command,X={title:[a,Z&&`\`${Z}\``,Se&&`\`${Se}\``].filter(Boolean).join(" ")};y[d.tool_id]=X,O.flush();break}case"tool_result":{let a=y[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
27
+ \`\`\``)}catch(i){j.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import $e from"fs/promises";import Xt from"os";import ke from"path";import he from"process";import on from"readline";var Y=_("runner_gemini"),Vt="Gemini CLI",ye="",sn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(Y.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(Y.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(Y.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),an={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},ln=async()=>{let e=ke.join(Xt.homedir(),".gemini"),t=ke.join(e,"settings.json");try{await $e.mkdir(e,{recursive:!0});let r={};try{let o=await $e.readFile(t,"utf-8");r=JSON.parse(o)}catch{Y.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await $e.writeFile(t,JSON.stringify(r,null,2),"utf-8"),Y.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){Y.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function st({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=he.cwd()}){let{accountType:s,prompt:c,modelVersionOverrides:l}=e,{model:p}=e;if(await ln(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!ye&&await n.isModelAvailableForProvider("gemini",ye)?(p=ye,Y.log(`Using default model: ${ye}`)):ye&&Y.log(`Default model ${ye} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);he.env.GEMINI_API_KEY=h,he.env.GOOGLE_GEMINI_BASE_URL=d}else if(!he.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],u=[],m=[],y={},I=0,w=0,g,T,R=[re(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",c],S=`${he.env.NVM_BIN}/node`;Y.log(`Running ${S} ${R.join(" ")}`);let P=t.utils.run(S,R,{all:!0,env:he.env,cwd:i,idleTimeout:te});P.stdin?.end();let O=ce(()=>{r?.({steps:f,duration:w}),o?.({steps:u,duration:w}),u=[]},250),k=(h,d)=>{h.id=I,I+=1,m.push(h),f.push(h),u.push(h),d||O.flush(),O(),d&&O.flush()},x=on.createInterface({input:P.all});x.on("error",h=>{Y.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",G=()=>{v&&k({message:v.trim()}),v=""};return x.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Ft(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=an[d.tool_name]??d.tool_name,E=d.parameters?.file_path,Z=E&&ke.relative(i,E),be=d.parameters?.command,X={title:[a,Z&&`\`${Z}\``,be&&`\`${be}\``].filter(Boolean).join(" ")};y[d.tool_id]=X,O.flush();break}case"tool_result":{let a=y[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
28
28
  ${d.output.trim()}
29
- \`\`\``),k(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{Y.warn("Unhandled message type:",d.type);break}}}),await P.catch(h=>{({error:T,result:g}=nn({catchError:h,runCmd:P,error:T,result:g,runnerName:"Gemini"}))}),x.close(),O.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:Jt,hasError:!!T}),error:de({error:T,agentName:Jt}),isRetryableError:pe(T)}}var Xt=async()=>{let e=Fe.join(Vt.homedir(),".gemini");await $e.rm(e,{recursive:!0,force:!0})};var an={codex:{runner:ot,clean:Wt},claude:{runner:nt,clean:Ht},gemini:{runner:it,clean:Xt}},zt=an;var Zt=_("init_stage"),Qt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(ln(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=zt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=un({apiToken:r});_t(u);let l=e.useGateway?await vt({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Pt(async({steps:I=[],duration:w})=>{let g=I.map(T=>{let R=T.title?ze(J(T.title),p):void 0,S=T.message?ze(J(T.message)):void 0;return{...T,title:R,message:S}});I.length=0;try{return await q(e.id,e.sessionId,{steps:g,duration:w})}catch(T){Zt.error("persistSteps failed",{error:T?.message||T})}},t);Zt.info("Adding build files to stage");let c=await et();await Ze(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Ut(),await we(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Mt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),un=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ke.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ke.env.NETLIFY_API_TOKEN,SITE_ID:ke.env.SITE_ID,FUNCTIONS_DIST:ke.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as st}from"@netlify/otel";import cn from"crypto";import z from"fs/promises";import L from"path";import B from"process";var D=_("context"),dn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:B.env.NETLIFY_TEAM_ID,userId:B.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:B.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},pn=10,fn=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(B.cwd(),U,o),i=0;for(;await mn(n);){if(i>=pn)throw new Error("Failed to generate context file");o=`${t}-${cn.randomUUID().slice(0,5)}${r}`,n=L.join(B.cwd(),U,o),i+=1}return o},mn=async e=>{try{return await z.access(e),!0}catch{return!1}},gn=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},hn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await z.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},De=null,yn=async()=>{if(De)return De;let e=await gn();if(!e)return[];let t=L.join(B.cwd(),U,We);await z.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=L.join(t,s),l=L.join(U,We,s);return D.log(`Downloading ${i.scope} context...`),await hn(i.endpoint,u)?(D.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return De=(await Promise.all(r)).filter(n=>n!==null),De},er=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=dn(t),i=await fn(At),s=L.join(B.cwd(),U);await z.mkdir(s,{recursive:!0});let u=L.join(U,i),l=L.join(B.cwd(),u),p=L.join(B.cwd(),U,ae);try{await z.unlink(p),D.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
29
+ \`\`\``),k(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{Y.warn("Unhandled message type:",d.type);break}}}),await P.catch(h=>{({error:T,result:g}=sn({catchError:h,runCmd:P,error:T,result:g,runnerName:"Gemini"}))}),x.close(),O.flush(),{steps:m,duration:w,result:await ue({initialResult:g,agentName:Vt,hasError:!!T}),error:de({error:T,agentName:Vt}),isRetryableError:pe(T)}}var zt=async()=>{let e=ke.join(Xt.homedir(),".gemini");await $e.rm(e,{recursive:!0,force:!0})};var cn={codex:{runner:it,clean:Jt},claude:{runner:ot,clean:qt},gemini:{runner:st,clean:zt}},Zt=cn;var De=_("init_stage"),tr=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(un(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=Zt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let c=pn({apiToken:r});Et(c);let l=e.useGateway?await Rt({netlify:c,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Ot(async({steps:I=[],duration:w})=>{let g=I.map(T=>{let R=T.title?Ze(J(T.title),p):void 0,S=T.message?Ze(J(T.message)):void 0;return{...T,title:R,message:S}});I.length=0;try{return await q(e.id,e.sessionId,{steps:g,duration:w})}catch(T){De.error("persistSteps failed",{error:T?.message||T})}},t);De.info("Adding build files to stage");let u=await tt();await Qe(u),await dn();let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Mt(),await we(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await jt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:c,persistSteps:f,runner:s,sha:m}}),dn=async()=>{let e=er.join(xe.cwd(),".git","hooks","pre-commit"),t=`#!/bin/bash
30
+ echo "Error: Commits are disabled. The deployment system handles commits automatically."
31
+ echo "Leave your changes uncommitted."
32
+ exit 1
33
+ `;try{await Qt.mkdir(er.dirname(e),{recursive:!0}),await Qt.writeFile(e,t,{mode:493}),De.info("Installed pre-commit hook to prevent agent commits")}catch(r){De.warn("Failed to install pre-commit hook",{error:r?.message||r})}},pn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:xe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||xe.env.NETLIFY_API_TOKEN,SITE_ID:xe.env.SITE_ID,FUNCTIONS_DIST:xe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as at}from"@netlify/otel";import fn from"crypto";import z from"fs/promises";import L from"path";import B from"process";var D=_("context"),mn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:B.env.NETLIFY_TEAM_ID,userId:B.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:B.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},gn=10,hn=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(B.cwd(),U,o),i=0;for(;await yn(n);){if(i>=gn)throw new Error("Failed to generate context file");o=`${t}-${fn.randomUUID().slice(0,5)}${r}`,n=L.join(B.cwd(),U,o),i+=1}return o},yn=async e=>{try{return await z.access(e),!0}catch{return!1}},_n=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},En=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await z.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},Le=null,wn=async()=>{if(Le)return Le;let e=await _n();if(!e)return[];let t=L.join(B.cwd(),U,Je);await z.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,c=L.join(t,s),l=L.join(U,Je,s);return D.log(`Downloading ${i.scope} context...`),await En(i.endpoint,c)?(D.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return Le=(await Promise.all(r)).filter(n=>n!==null),Le},rr=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=mn(t),i=await hn(Nt),s=L.join(B.cwd(),U);await z.mkdir(s,{recursive:!0});let c=L.join(U,i),l=L.join(B.cwd(),c),p=L.join(B.cwd(),U,ae);try{await z.unlink(p),D.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
30
34
  Your task is to analyze and fix the build errors.
31
35
  Don't apply techniques of reverting changes. Apply fixes related to errors.
32
36
  Don't try to run build by yourself. Just fix the errors.
33
37
 
34
38
  <build_error_context>
35
39
  ${o}
36
- </build_error_context>`:"",c="";r.siteContext&&r.siteContext.length!==0&&(c=`
40
+ </build_error_context>`:"",u="";r.siteContext&&r.siteContext.length!==0&&(u=`
37
41
  <project_rules>
38
42
  ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
39
43
 
40
44
  `)}
41
45
  </project_rules>
42
- `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(B.cwd(),U,Ke);await z.mkdir(g,{recursive:!0});let T=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let P=S+1,O=`attempt-${P}.md`,k=L.join(g,O),x=L.join(U,Ke,O),v=`# Task History - Attempt ${P}
46
+ `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(B.cwd(),U,We);await z.mkdir(g,{recursive:!0});let T=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let P=S+1,O=`attempt-${P}.md`,k=L.join(g,O),x=L.join(U,We,O),v=`# Task History - Attempt ${P}
43
47
 
44
48
  ## Request - what the user asked for
45
49
  ${R.request}
@@ -58,7 +62,7 @@ ${R.response}
58
62
  `)}
59
63
 
60
64
  </session_history_context>
61
- `}let y=await yn(),I="";y.length>0&&(I=`
65
+ `}let y=await wn(),I="";y.length>0&&(I=`
62
66
  <netlify_features_context>
63
67
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
64
68
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
@@ -88,10 +92,10 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
88
92
  - NEVER print potentially sensitive values (like secrets) in the planning output or results
89
93
  </responses>
90
94
  <attachements>
91
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${Je} folder
92
- - move assets from ${s}/${Je} folder to the project assets folder if they are referenced in a code or applied changes
95
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${Ve} folder
96
+ - move assets from ${s}/${Ve} folder to the project assets folder if they are referenced in a code or applied changes
93
97
  </attachements>
94
- ${c}
98
+ ${u}
95
99
  </requirements>
96
100
 
97
101
  <extra_context>
@@ -127,19 +131,19 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
127
131
  </request>
128
132
 
129
133
  Use the following file for the complete context of the ask, the environment, and what's available. ${l} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
130
- `),w};var _n=_("prompt"),tr=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await er({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&_n.log("Contextful Prompt:",n),{prompt:n}};var Le=_("inference_stage"),rr=5,Ue=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;Le.log(`Running inference stage, attempt ${l} of ${rr}`);let c=await b(st(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),Rt();let{prompt:y}=await b(st(),"compose-prompt",async()=>await tr({cliPath:t,config:r,buildErrorContext:En(n),netlify:o})),I=`
134
+ `),w};var Tn=_("prompt"),nr=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await rr({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Tn.log("Contextful Prompt:",n),{prompt:n}};var Ue=_("inference_stage"),or=5,Me=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:c,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;Ue.log(`Running inference stage, attempt ${l} of ${or}`);let u=await b(at(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),St();let{prompt:y}=await b(at(),"compose-prompt",async()=>await nr({cliPath:t,config:r,buildErrorContext:In(n),netlify:o})),I=`
131
135
  ${p||""}
132
136
  ${y}
133
- `.trim(),w={...r,prompt:I},g=await b(st(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=J(g.result)),g.error&&(g.error=J(g.error)),await s.flush(),g});if(c.error){if(Le.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<rr))return Le.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Ue({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Le.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},En=e=>!e||e.length===0?"":`
137
+ `.trim(),w={...r,prompt:I},g=await b(at(),`run-${r.runner}`,async()=>await i({aiGateway:c,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=J(g.result)),g.error&&(g.error=J(g.error)),await s.flush(),g});if(u.error){if(Ue.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:l||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!l||l<or))return Ue.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Me({...e,attempt:(l||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ue.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},In=e=>!e||e.length===0?"":`
134
138
  Deploy failed failed. Here are the errors to review on the latest build:
135
139
 
136
140
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
137
141
 
138
142
  ${e.pop()}
139
- `;import In from"process";import{getTracer as at}from"@netlify/otel";import{getTracer as wn}from"@netlify/otel";var xe=_("deploy"),nr=async e=>await b(wn(),"create-preview-deploy",async t=>Tn(e,t)),Tn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},u)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(xe.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(xe.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";xe.log(`Running: ${p} ${l.join(" ")}`),u?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(String(f.stdout??"").trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),xe.log(`
140
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw xe.error("Failed to create preview deploy via CLI:",l),u?.setAttributes({success:!1,error:l.message}),l}};var ve=_("deploy_stage"),lt=async e=>await b(at(),"run-deploy-stage",async()=>xn(e)),xn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await b(at(),"get-runner-diffs",async()=>await Dt({config:t,isRetry:i}));if(ve.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,c=!0;ve.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let m=null;if(o!==void 0&&c)try{let y;try{let I=await b(at(),"get-runner-session",async()=>await wt(t.id,t.sessionId));I?.title&&(y=I.title)}catch(I){ve.warn("Failed to fetch session title, using fallback message:",I.message)}await q(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await nr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:$t(t.id,In.env.SITE_NAME),filter:n})}catch(y){return ve.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:u,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return ve.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:l,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as Ge}from"@netlify/otel";async function or(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var Me=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
141
- `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let l=this.containsNetlifyForm(n,o);l&&r.push(l)}let u=s.split(" ");o=u[u.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
142
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var A=_("cleanup_stage"),ir=async e=>await b(Ge(),"cleanup-stage",async()=>vn(e)),ut=1024*1024*10,vn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new Me,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:y,snippet:I})=>{A.log(` - ${y}: ${I}`)}),l.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await It(e.id,e.sessionId),c=[];(t||i)&&c.push(He(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(He(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await b(Ge(),"update-runner",async()=>{await we(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>ut||m>ut){let y=`Diffs exceed maximum inline size of ${ut} bytes.`;throw A.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await b(Ge(),"update-runner",async()=>{await we(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await or(async()=>await b(Ge(),"update-runner-session",()=>q(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{A.error(`Error updating agent runner session (attempt ${f}):`,c),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as sr,shutdownTracers as Sn,withActiveSpan as ar}from"@netlify/otel";var bn=Rn(import.meta.url),lr=bn("../package.json"),Re=_("pipeline_index"),je=3,ur=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=St(ee.timeUnits.hours(4)),l=await gt(lr.version,e.id,i);try{await ar(sr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:y}=await u("init",()=>Qt({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:lr.version}),ee.timeUnits.minutes(10));s=m.clean,e.sha=y;let{runnerResult:I}=await u("inference",()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await q(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await u("deploy",()=>lt({cliPath:r,config:e,context:f,result:I.result,filter:n,isRetry:!1})),g=I,T=[];if(w.hasChanges&&w.deployError){T.push(ht(w.deployError));let x=1,v=!1;for(;x<=je&&!w.previewInfo&&!v;)Re.log(`Deploy attempt had errors. Retrying. ${x}/${je}`),await ar(sr(),"deploy-stage",async j=>{j?.setAttributes({"stage.attempt":x});let h;try{h=(await u(`inference-retry-${x}`,()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:T,priorAgentSessionId:I.agentSessionId}))).runnerResult}catch(d){Re.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},w=await u(`deploy-retry-${x}`,()=>lt({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),w.deployError&&T.push(w.deployError),x++});x>je&&!w.previewInfo&&console.warn(`Deploy validation failed after ${je} attempts`)}let{diff:R,resultDiff:S,previewInfo:P,diffBinary:O,resultDiffBinary:k}=w;await u("cleanup",()=>ir({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:O,resultDiffBinary:k,previewInfo:P}),ee.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Gt())})}catch(p){if(yt(p)){Re.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await q(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Re.info("Could not update session (site may have been deleted)")}return}Re.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await q(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Sn()}};import cr from"crypto";var F=_("bin_local"),W=An(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),dt=()=>{console.log(`
143
+ `;import Rn from"process";import{getTracer as lt}from"@netlify/otel";import{getTracer as xn}from"@netlify/otel";var ve=_("deploy"),ir=async e=>await b(xn(),"create-preview-deploy",async t=>vn(e,t)),vn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},c)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(ve.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(ve.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";ve.log(`Running: ${p} ${l.join(" ")}`),c?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(String(f.stdout??"").trim());c?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),ve.log(`
144
+ Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let m={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(m.sourceZipFilename=u.source_zip_filename),m}catch(l){throw ve.error("Failed to create preview deploy via CLI:",l),c?.setAttributes({success:!1,error:l.message}),l}};var Re=_("deploy_stage"),ct=async e=>await b(lt(),"run-deploy-stage",async()=>Sn(e)),Sn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await b(lt(),"get-runner-diffs",async()=>await Lt({config:t,isRetry:i}));if(Re.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:c,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,u=!0;Re.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:u,wouldCreatePreview:o!==void 0&&u});let m=null;if(o!==void 0&&u)try{let y;try{let I=await b(lt(),"get-runner-session",async()=>await Tt(t.id,t.sessionId));I?.title&&(y=I.title)}catch(I){Re.warn("Failed to fetch session title, using fallback message:",I.message)}await q(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await ir({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:$t(t.id,Rn.env.SITE_NAME),filter:n})}catch(y){return Re.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:c,resultDiff:l,hasChanges:u,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return Re.log("Git status",{hasDiff:!!c,hasChanges:u}),{diff:c,resultDiff:l,hasChanges:u,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as Ge}from"@netlify/otel";async function sr(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(c){if(i=c,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var je=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
145
+ `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let l=this.containsNetlifyForm(n,o);l&&r.push(l)}let c=s.split(" ");o=c[c.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
146
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let c=o.match(i);if(c){let l=c.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+c[0].length+20),u=o.slice(p,f).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${u}`}}}return null}};var A=_("cleanup_stage"),ar=async e=>await b(Ge(),"cleanup-stage",async()=>bn(e)),ut=1024*1024*10,bn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:c})=>{let l={result:r||"Done",duration:o};c&&c.deployId&&(l.deploy_id=c.deployId),c&&c.sourceZipFilename&&(l.result_zip_file_name=c.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new je,u=t||i||"",m=f.scanDiffForForms(u);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:y,snippet:I})=>{A.log(` - ${y}: ${I}`)}),l.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await xt(e.id,e.sessionId),u=[];(t||i)&&u.push(qe(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&u.push(qe(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await b(Ge(),"update-runner",async()=>{await we(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let u=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(u>ut||m>ut){let y=`Diffs exceed maximum inline size of ${ut} bytes.`;throw A.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await b(Ge(),"update-runner",async()=>{await we(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await sr(async()=>await b(Ge(),"update-runner-session",()=>q(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,u)=>{A.error(`Error updating agent runner session (attempt ${f}):`,u),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as lr,shutdownTracers as Nn,withActiveSpan as cr}from"@netlify/otel";var Cn=An(import.meta.url),ur=Cn("../package.json"),Se=_("pipeline_index"),Ye=3,dr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:c}=bt(ee.timeUnits.hours(4)),l=await ht(ur.version,e.id,i);try{await cr(lr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:u,runner:m,sha:y}=await c("init",()=>tr({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:ur.version}),ee.timeUnits.minutes(10));s=m.clean,e.sha=y;let{runnerResult:I}=await c("inference",()=>Me({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:u,aiGateway:p}));await q(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await c("deploy",()=>ct({cliPath:r,config:e,context:f,result:I.result,filter:n,isRetry:!1})),g=I,T=[];if(w.hasChanges&&w.deployError){T.push(yt(w.deployError));let x=1,v=!1;for(;x<=Ye&&!w.previewInfo&&!v;)Se.log(`Deploy attempt had errors. Retrying. ${x}/${Ye}`),await cr(lr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":x});let h;try{h=(await c(`inference-retry-${x}`,()=>Me({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:u,aiGateway:p,buildErrors:T,priorAgentSessionId:I.agentSessionId}))).runnerResult}catch(d){Se.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},w=await c(`deploy-retry-${x}`,()=>ct({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),w.deployError&&T.push(w.deployError),x++});x>Ye&&!w.previewInfo&&console.warn(`Deploy validation failed after ${Ye} attempts`)}let{diff:R,resultDiff:S,previewInfo:P,diffBinary:O,resultDiffBinary:k}=w;await c("cleanup",()=>ar({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:O,resultDiffBinary:k,previewInfo:P}),ee.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Gt())})}catch(p){if(_t(p)){Se.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await q(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Se.info("Could not update session (site may have been deleted)")}return}Se.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await q(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Nn()}};import pr from"crypto";var $=_("bin_local"),W=Pn(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),pt=()=>{console.log(`
143
147
  agent-runner-cli-local - Run Netlify agent runner locally without API connections
144
148
 
145
149
  USAGE:
@@ -171,6 +175,6 @@ NOTE:
171
175
  This local mode mocks all Netlify API calls. The agent will run through
172
176
  the full pipeline including inference and deployment, but API calls will
173
177
  be logged instead of executed.
174
- `)};W.help&&(dt(),C.exit(0));W.prompt||(F.error("Error: --prompt is required"),dt(),C.exit(1));W["netlify-api-token"]||(F.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),dt(),C.exit(1));try{let e=W.cwd||C.cwd(),t=dr.join(e,".netlify","netlify-agent-runner-context*");pr.rmSync(t,{recursive:!0,force:!0});let r;try{r=await Nn(e)}catch(u){F.error(u.message),F.error(`
175
- To link this directory to a Netlify site, run:`),F.error(" netlify link"),C.exit(1)}let o=`local-${cr.randomBytes(8).toString("hex")}`,n=`session-${cr.randomBytes(8).toString("hex")}`,i=W.runner||"claude";F.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let s={id:o,sessionId:n,prompt:W.prompt,runner:i,model:W.model,accountType:"local",validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=W["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?Pe(e,"claude")||(F.log("Claude CLI not found, installing..."),await ct(e,"@anthropic-ai/claude-code")):i==="gemini"?Pe(e,"gemini")||(F.log("Gemini CLI not found, installing..."),await ct(e,"@google/gemini-cli")):i==="codex"?Pe(e,"codex")||(F.log("Codex CLI not found, installing..."),await ct(e,"@openai/codex")):(F.error(`Unknown runner: ${i}`),C.exit(1)),await ur({config:s,cwd:e,cliPath:W["cli-path"],filter:W.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),F.info("Finished agent (local mode)"),C.exit(0)}catch(e){F.error("Error running agent pipeline (local mode):",e),C.exit(1)}function ct(e,t){return new Promise((r,o)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{F.log(`${t} installed: ${n}`),r()}).catch(n=>{F.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function Nn(e){let t=dr.join(e,".netlify","state.json");try{let r=await pr.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return F.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
178
+ `)};W.help&&(pt(),C.exit(0));W.prompt||($.error("Error: --prompt is required"),pt(),C.exit(1));W["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),pt(),C.exit(1));try{let e=W.cwd||C.cwd(),t=fr.join(e,".netlify","netlify-agent-runner-context*");mr.rmSync(t,{recursive:!0,force:!0});let r;try{r=await On(e)}catch(c){$.error(c.message),$.error(`
179
+ To link this directory to a Netlify site, run:`),$.error(" netlify link"),C.exit(1)}let o=`local-${pr.randomBytes(8).toString("hex")}`,n=`session-${pr.randomBytes(8).toString("hex")}`,i=W.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let s={id:o,sessionId:n,prompt:W.prompt,runner:i,model:W.model,accountType:"local",validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=W["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?Oe(e,"claude")||($.log("Claude CLI not found, installing..."),await dt(e,"@anthropic-ai/claude-code")):i==="gemini"?Oe(e,"gemini")||($.log("Gemini CLI not found, installing..."),await dt(e,"@google/gemini-cli")):i==="codex"?Oe(e,"codex")||($.log("Codex CLI not found, installing..."),await dt(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),C.exit(1)),await dr({config:s,cwd:e,cliPath:W["cli-path"],filter:W.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),C.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),C.exit(1)}function dt(e,t){return new Promise((r,o)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{$.log(`${t} installed: ${n}`),r()}).catch(n=>{$.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function On(e){let t=fr.join(e,".netlify","state.json");try{let r=await mr.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
176
180
  //# sourceMappingURL=bin-local.js.map