@netlify/agent-runner-cli 1.60.3 → 1.60.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin-local.js CHANGED
@@ -1,32 +1,32 @@
1
1
  #!/usr/bin/env node
2
- import C from"process";import dr from"path";import pr from"fs";import bn from"minimist";import{createRequire as Rn}from"module";import{createTracerProvider as mr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as pt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as gr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as hr}from"@netlify/otel";import{propagation as ft,context as mt,W3CTraceContextPropagator as yr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as _r}from"@opentelemetry/exporter-trace-otlp-grpc";import fr from"process";function _(e){let t=fr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ye=_("tracing"),gt=async(e,t,r)=>(await mr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new pt(new Be),new pt(new _r({url:r.exporterUrl}))],instrumentations:[new gr({skipHeaders:!0})]}),r.traceparent?(ft.setGlobalPropagator(new yr),ft.extract(mt.active(),{traceparent:r.traceparent,isRemote:!0})):mt.active());function A(e,t,r){return Ye.log(`\u23F3 TRACE: ${t} starting...`),hr(e,t,r)}var Be=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[u,l]of Object.entries(o))u.includes("duration")&&typeof l=="number"?n.push(`${u}=${l.toFixed(2)}ms`):n.push(`${u}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Ye.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Ye.log(` \u274C Error: ${t.status.message}`)}};var Er=["error","failed","exception","fatal","panic","abort","crash"];function ht(e){let t=e.split(`
2
+ import C from"process";import dr from"path";import pr from"fs";import An from"minimist";import{createRequire as Rn}from"module";import{createTracerProvider as mr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as pt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as gr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as hr}from"@netlify/otel";import{propagation as ft,context as mt,W3CTraceContextPropagator as yr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as _r}from"@opentelemetry/exporter-trace-otlp-grpc";import fr from"process";function _(e){let t=fr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ye=_("tracing"),gt=async(e,t,r)=>(await mr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new pt(new Be),new pt(new _r({url:r.exporterUrl}))],instrumentations:[new gr({skipHeaders:!0})]}),r.traceparent?(ft.setGlobalPropagator(new yr),ft.extract(mt.active(),{traceparent:r.traceparent,isRemote:!0})):mt.active());function b(e,t,r){return Ye.log(`\u23F3 TRACE: ${t} starting...`),hr(e,t,r)}var Be=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[u,l]of Object.entries(o))u.includes("duration")&&typeof l=="number"?n.push(`${u}=${l.toFixed(2)}ms`):n.push(`${u}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Ye.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Ye.log(` \u274C Error: ${t.status.message}`)}};var Er=["error","failed","exception","fatal","panic","abort","crash"];function ht(e){let t=e.split(`
3
3
  `),r=[],o=-1,n=0;for(;n<t.length;){let u=t[n].slice(0,500).toLowerCase();if(Er.some(p=>u.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),c=[];for(let m=p;m<=f;m++)c.push(t[m]);r.push(c.join(`
4
4
  `)),o=f,n=f+1}else n++}if(r.length===0)return e;let i=r.map((s,u)=>`<extracted_error_chunk order="${u+1}">
5
5
  ${s}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import ke from"process";import{getTracer as ln}from"@netlify/otel";import _e from"process";var ie=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},yt=e=>e instanceof ie;var Ae=_e.env.NETLIFY_API_URL,be=_e.env.NETLIFY_API_TOKEN,H=_("api"),Ne=()=>_e.env.NETLIFY_LOCAL_MODE==="true",Ee=async(e,t={})=>{if(!Ae||!be)throw new Error("No API URL or token");let r=new URL(e,Ae),o={...t,headers:{...t.headers,Authorization:`Bearer ${be}`}};_e.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(_e.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{H.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||H.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new ie(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new ie(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},_t=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ae=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(be=e.constants.NETLIFY_API_TOKEN)},Et=()=>({apiUrl:Ae,token:be}),we=async(e,t)=>Ne()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):Ee(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>Ne()?(H.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var wt=async(e,t)=>Ne()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`),Tt=(e,t,r)=>Ee(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),It=async(e,t)=>Ne()?(H.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),He=async(e,t)=>{H.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var se=_("ai_gateway"),qe=null;var xt=async()=>{if(qe)return qe;se.log("Fetching available AI gateway providers");let e=await fetch(`${Et().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return qe=t,se.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},wr=async(e,t)=>{let o=(await xt()).providers[e];if(!o)return se.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return se.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},vt=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),se.log("Requesting AI gateway information");let l=await Tt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,se.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),xt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:wr}};import K from"process";import V from"path";import Ce from"fs";import{fileURLToPath as Ar}from"url";import{createRequire as br}from"module";import{execa as Nr,execaCommand as ao}from"execa";import{Transform as Tr}from"stream";var Ir=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),xr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function vr(){return Object.entries(process.env).filter(([e,t])=>!(!t||Ir.has(e)||xr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function J(e){if(typeof e!="string")return e;let t=vr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Rr(o),"g");r=r.replace(n,"******")}),r}function Rr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ae=class extends Tr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=J(n);o(null,i)}};function Rt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var Te=null,St=e=>(Te&&Te.destroy(),Te=new ee({totalAllowedTime:e}),Te),At=()=>Te;var ee=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var bt="netlify-agent-runner-context.md",Ke="task-history",We="netlify-context",U=".netlify",te="results.md",Je="assets";var re=1800*1e3;var Nt={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.3",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.14","@google/gemini-cli":"0.25.0","@netlify/otel":"^5.1.1","@openai/codex":"0.88.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var Cr=Ar(import.meta.url),Pr=V.dirname(Cr),Or=br(import.meta.url),le=_("shell"),Ve=new Set,$r={preferLocal:!0},N=(e,t,r)=>{let[o,n]=Fr(t,r),i={...$r,...n},s=Nr(e,o,i);kr(s,i),Lr(s);let u=r?.idleTimeout;return u&&u>0&&Dr(s,u),s};var Fr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},kr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ae).pipe(K.stdout),e.stdout?.pipe(new ae).pipe(K.stdout),e.stderr?.pipe(new ae).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},Xe=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),le.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return le.error("Error killing process:",r),!1}},Ct=e=>Xe(e,"SIGKILL"),Dr=(e,t)=>{let r=null,o=()=>{le.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Xe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing idle process ${e.pid}`),Ct(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},Lr=e=>{Ve.add(e);let t=At();if(t){let r=t.onTimesUp(()=>{le.log(`Global timer expired, killing process ${e.pid}`),Xe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing process ${e.pid} after timeout`),Ct(e))},5e3)});e.on("exit",()=>{Ve.delete(e),r()}),e.on("error",()=>{Ve.delete(e),r()})}};function Pe(e,t){return!!ne(e,t)}function ne(e,t){if(!K.env.NETLIFY_LOCAL_MODE)try{let n=Or.resolve(Nt.name),i=V.dirname(n);for(;i!==V.dirname(i);){let s=V.dirname(i);if(V.basename(s)==="node_modules"){let u=V.join(s,".bin",t);if(Ce.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(K.env.NODE_PATH){let n=V.join(K.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=V.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=V.join(Pr,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var Ur=_("utils"),Mr=e=>new Promise(t=>{setTimeout(t,e)}),Pt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Mr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},ue=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},Ot=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Ur.error("Could not parse JSON",o))}},$t=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var jr=50*1024,ze=(e,t=jr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as Ft}from"buffer";import Gr from"path";var kt=_("repo"),Dt=async({config:e,isRetry:t})=>{kt.info("Getting runner diffs");let r=await Br(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let I=Hr(n);await qr(I)}kt.info("Changes after processing"),await Qe();let i=await et(n);await Ze(i);let s={stdio:["ignore","pipe","pipe"]},u=await N("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await N("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]);let I=await N("git",["diff",e.sha,"HEAD"],s);c=String(I.stdout??"");let w=await N("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");c!==g&&(m=Ft.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(y.diffBinary=Ft.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Ze=async(e=[])=>{await N("git",["add",".",...e])},Qe=async()=>{let e=await N("git",["status","-s"]);return String(e.stdout??"")},Lt=/.. (.+)?\.log$/,Yr=[Lt],Br=async()=>{let e=await Qe();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
+ `);return i.length>e.length*.8?e:i}import ke from"process";import{getTracer as ln}from"@netlify/otel";import _e from"process";var ie=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},yt=e=>e instanceof ie;var be=_e.env.NETLIFY_API_URL,Ae=_e.env.NETLIFY_API_TOKEN,H=_("api"),Ne=()=>_e.env.NETLIFY_LOCAL_MODE==="true",Ee=async(e,t={})=>{if(!be||!Ae)throw new Error("No API URL or token");let r=new URL(e,be),o={...t,headers:{...t.headers,Authorization:`Bearer ${Ae}`}};_e.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(_e.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{H.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||H.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new ie(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new ie(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},_t=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(be=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ae=e.constants.NETLIFY_API_TOKEN)},Et=()=>({apiUrl:be,token:Ae}),we=async(e,t)=>Ne()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):Ee(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>Ne()?(H.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var wt=async(e,t)=>Ne()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`),Tt=(e,t,r)=>Ee(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),It=async(e,t)=>Ne()?(H.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),He=async(e,t)=>{H.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var se=_("ai_gateway"),qe=null;var xt=async()=>{if(qe)return qe;se.log("Fetching available AI gateway providers");let e=await fetch(`${Et().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return qe=t,se.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},wr=async(e,t)=>{let o=(await xt()).providers[e];if(!o)return se.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return se.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},vt=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),se.log("Requesting AI gateway information");let l=await Tt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,se.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),xt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:wr}};import K from"process";import V from"path";import Ce from"fs";import{fileURLToPath as br}from"url";import{createRequire as Ar}from"module";import{execa as Nr,execaCommand as ao}from"execa";import{Transform as Tr}from"stream";var Ir=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),xr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function vr(){return Object.entries(process.env).filter(([e,t])=>!(!t||Ir.has(e)||xr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function J(e){if(typeof e!="string")return e;let t=vr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Rr(o),"g");r=r.replace(n,"******")}),r}function Rr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ae=class extends Tr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=J(n);o(null,i)}};function Rt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?J(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var Te=null,St=e=>(Te&&Te.destroy(),Te=new ee({totalAllowedTime:e}),Te),bt=()=>Te;var ee=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var At="netlify-agent-runner-context.md",Ke="task-history",We="netlify-context",U=".netlify",te="results.md",Je="assets";var re=1800*1e3;var Nt={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.4",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.14","@google/gemini-cli":"0.25.0","@netlify/otel":"^5.1.1","@openai/codex":"0.88.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var Cr=br(import.meta.url),Pr=V.dirname(Cr),$r=Ar(import.meta.url),le=_("shell"),Ve=new Set,Or={preferLocal:!0},N=(e,t,r)=>{let[o,n]=Fr(t,r),i={...Or,...n},s=Nr(e,o,i);kr(s,i),Lr(s);let u=r?.idleTimeout;return u&&u>0&&Dr(s,u),s};var Fr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},kr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ae).pipe(K.stdout),e.stdout?.pipe(new ae).pipe(K.stdout),e.stderr?.pipe(new ae).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},Xe=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),le.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return le.error("Error killing process:",r),!1}},Ct=e=>Xe(e,"SIGKILL"),Dr=(e,t)=>{let r=null,o=()=>{le.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Xe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing idle process ${e.pid}`),Ct(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},Lr=e=>{Ve.add(e);let t=bt();if(t){let r=t.onTimesUp(()=>{le.log(`Global timer expired, killing process ${e.pid}`),Xe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(le.log(`Force killing process ${e.pid} after timeout`),Ct(e))},5e3)});e.on("exit",()=>{Ve.delete(e),r()}),e.on("error",()=>{Ve.delete(e),r()})}};function Pe(e,t){return!!ne(e,t)}function ne(e,t){if(!K.env.NETLIFY_LOCAL_MODE)try{let n=$r.resolve(Nt.name),i=V.dirname(n);for(;i!==V.dirname(i);){let s=V.dirname(i);if(V.basename(s)==="node_modules"){let u=V.join(s,".bin",t);if(Ce.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(K.env.NODE_PATH){let n=V.join(K.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=V.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=V.join(Pr,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var Ur=_("utils"),Mr=e=>new Promise(t=>{setTimeout(t,e)}),Pt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Mr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},ue=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},$t=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Ur.error("Could not parse JSON",o))}},Ot=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var jr=50*1024,ze=(e,t=jr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as Ft}from"buffer";import Gr from"path";var kt=_("repo"),Dt=async({config:e,isRetry:t})=>{kt.info("Getting runner diffs");let r=await Br(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let I=Hr(n);await qr(I)}kt.info("Changes after processing"),await Qe();let i=await et(n);await Ze(i);let s={stdio:["ignore","pipe","pipe"]},u=await N("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await N("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]);let I=await N("git",["diff",e.sha,"HEAD"],s);c=String(I.stdout??"");let w=await N("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");c!==g&&(m=Ft.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(y.diffBinary=Ft.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Ze=async(e=[])=>{await N("git",["add",".",...e])},Qe=async()=>{let e=await N("git",["status","-s"]);return String(e.stdout??"")},Lt=/.. (.+)?\.log$/,Yr=[Lt],Br=async()=>{let e=await Qe();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
9
  `).filter(o=>Yr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Ut=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return String(e??"").trim()},Mt=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},et=async e=>{e||=await Qe();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
10
10
  `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Gr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${i}`)});let n=o.match(Lt)?.[1];n&&r.push(`:!${n}.log`)}),r},jt=async()=>{await N("git",["reset","--hard","HEAD"])},Hr=e=>{let t=e.split(`
11
- `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},qr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Wr from"fs/promises";import Jr from"os";import Bt from"path";import fe from"process";import Vr from"readline";import tt from"path";import Kr from"fs/promises";var rt=_("agent-output-utils");async function ce({initialResult:e,agentName:t,hasError:r}){let o="",n=tt.join(process.cwd(),U,te);try{let i=await Kr.readFile(n,"utf-8");i&&(o=i,rt.log(`Pulled result from ${tt.relative(process.cwd(),n)}`))}catch{rt.log(`No results file found at ${tt.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function de({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&rt.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function pe(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var M=_("runner_claude"),Gt="Claude Code",me="claude-opus-4-5-20251101",Yt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Xr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function nt({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=fe.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!me&&await o.isModelAvailableForProvider("anthropic",me)?(c=me,M.log(`Using default model: ${me}`)):me&&M.log(`Default model ${me} is not available, proceeding without model specification`);fe.env.ANTHROPIC_API_KEY=h,fe.env.ANTHROPIC_BASE_URL=d}else if(!fe.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],I=[],w={},g=0,T=0,R,S,P=[ne(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],O=`${fe.env.NVM_BIN}/node`;M.log(`Running ${O} ${P.join(" ")}`);let k=t.utils.run(O,P,{all:!0,env:fe.env,cwd:s,idleTimeout:re});k.stdin?.end();let x=ue(()=>{r?.({steps:y,duration:T})},250),v=(h,d)=>{let a={...h,id:g};g+=1,I.push(a),y.push(a),d||x.flush(),x(),d&&x.flush()},G=Vr.createInterface({input:k.all});return G.on("error",h=>{M.error("Readline interface error",{error:h.message,stack:h.stack})}),G.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{M.log("Could not parse line",h)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):M.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[Yt(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);x.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,Z;if(E){let X=E.input?.file_path&&Bt.relative(s,E.input.file_path),$=X&&`\`${X}\``;Z=[Yt(E.name||""),$].filter(Boolean).join(" ")}let Se=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),Q;if(typeof a.content=="string")Q=a.content;else if(Array.isArray(a.content)){let X=[];a.content.forEach($=>{$?.type==="text"&&typeof $.text=="string"?X.push($.text):$?.type==="image"&&typeof $.source=="object"&&$.source?$.source.type==="base64"&&$.source.media_type?X.push(`![](data:${$.source.media_type};base64,${$.source.data})`):M.log(`Unsupported image type ${$.source.type}`,$.source):M.log(`Unsupported block type ${$?.type}`)}),Q=X.join(`
11
+ `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},qr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Wr from"fs/promises";import Jr from"os";import Bt from"path";import fe from"process";import Vr from"readline";import tt from"path";import Kr from"fs/promises";var rt=_("agent-output-utils");async function ce({initialResult:e,agentName:t,hasError:r}){let o="",n=tt.join(process.cwd(),U,te);try{let i=await Kr.readFile(n,"utf-8");i&&(o=i,rt.log(`Pulled result from ${tt.relative(process.cwd(),n)}`))}catch{rt.log(`No results file found at ${tt.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function de({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&rt.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function pe(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var M=_("runner_claude"),Gt="Claude Code",me="claude-opus-4-5-20251101",Yt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Xr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function nt({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=fe.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!me&&await o.isModelAvailableForProvider("anthropic",me)?(c=me,M.log(`Using default model: ${me}`)):me&&M.log(`Default model ${me} is not available, proceeding without model specification`);fe.env.ANTHROPIC_API_KEY=h,fe.env.ANTHROPIC_BASE_URL=d}else if(!fe.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],I=[],w={},g=0,T=0,R,S,P=[ne(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],$=`${fe.env.NVM_BIN}/node`;M.log(`Running ${$} ${P.join(" ")}`);let k=t.utils.run($,P,{all:!0,env:fe.env,cwd:s,idleTimeout:re});k.stdin?.end();let x=ue(()=>{r?.({steps:y,duration:T})},250),v=(h,d)=>{let a={...h,id:g};g+=1,I.push(a),y.push(a),d||x.flush(),x(),d&&x.flush()},G=Vr.createInterface({input:k.all});return G.on("error",h=>{M.error("Readline interface error",{error:h.message,stack:h.stack})}),G.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{M.log("Could not parse line",h)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):M.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[Yt(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);x.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,Z;if(E){let X=E.input?.file_path&&Bt.relative(s,E.input.file_path),O=X&&`\`${X}\``;Z=[Yt(E.name||""),O].filter(Boolean).join(" ")}let Se=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),Q;if(typeof a.content=="string")Q=a.content;else if(Array.isArray(a.content)){let X=[];a.content.forEach(O=>{O?.type==="text"&&typeof O.text=="string"?X.push(O.text):O?.type==="image"&&typeof O.source=="object"&&O.source?O.source.type==="base64"&&O.source.media_type?X.push(`![](data:${O.source.media_type};base64,${O.source.data})`):M.log(`Unsupported image type ${O.source.type}`,O.source):M.log(`Unsupported block type ${O?.type}`)}),Q=X.join(`
12
12
 
13
13
  `)}Se&&Q&&(Q=`\`\`\`
14
14
  ${Q.trim()}
15
- \`\`\``),v({title:Z,message:Q},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:M.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[I,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await k.catch(h=>{({error:S,result:R}=Xr({catchError:h,runCmd:k,error:S,result:R,runnerName:"Claude"}))}),G.close(),x.flush(),{steps:I,duration:T,result:await ce({initialResult:R,agentName:Gt,hasError:!!S}),error:de({error:S,agentName:Gt}),isRetryableError:pe(S),agentSessionId:m}}var Ht=async()=>{let e=Bt.join(Jr.homedir(),".claude");await Wr.rm(e,{recursive:!0,force:!0})};import Ie from"fs/promises";import Kt from"os";import Oe from"path";import oe from"process";import zr from"readline";var j=_("runner_codex"),qt="Codex CLI",ge="",Zr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ot({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=oe.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);p=E}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!ge&&await n.isModelAvailableForProvider("openai",ge)?(p=ge,j.log(`Using default model: ${ge}`)):ge&&j.log(`Default model ${ge} is not available, proceeding without model specification`);oe.env.OPENAI_API_KEY=d,oe.env.OPENAI_BASE_URL=a}else if(!oe.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=`${oe.env.NVM_BIN}/node`,S=Oe.join(Kt.homedir(),".codex"),P=Oe.join(S,"config.toml"),O=Oe.join(S,"auth.json");try{await Ie.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:oe.env.OPENAI_API_KEY};await Ie.writeFile(O,JSON.stringify(d,null,2),"utf-8"),j.log("Created Codex auth.json file");let a="";try{a=await Ie.readFile(P,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
15
+ \`\`\``),v({title:Z,message:Q},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:M.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[I,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await k.catch(h=>{({error:S,result:R}=Xr({catchError:h,runCmd:k,error:S,result:R,runnerName:"Claude"}))}),G.close(),x.flush(),{steps:I,duration:T,result:await ce({initialResult:R,agentName:Gt,hasError:!!S}),error:de({error:S,agentName:Gt}),isRetryableError:pe(S),agentSessionId:m}}var Ht=async()=>{let e=Bt.join(Jr.homedir(),".claude");await Wr.rm(e,{recursive:!0,force:!0})};import Ie from"fs/promises";import Kt from"os";import $e from"path";import oe from"process";import zr from"readline";var j=_("runner_codex"),qt="Codex CLI",ge="",Zr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ot({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=oe.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);p=E}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!ge&&await n.isModelAvailableForProvider("openai",ge)?(p=ge,j.log(`Using default model: ${ge}`)):ge&&j.log(`Default model ${ge} is not available, proceeding without model specification`);oe.env.OPENAI_API_KEY=d,oe.env.OPENAI_BASE_URL=a}else if(!oe.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=`${oe.env.NVM_BIN}/node`,S=$e.join(Kt.homedir(),".codex"),P=$e.join(S,"config.toml"),$=$e.join(S,"auth.json");try{await Ie.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:oe.env.OPENAI_API_KEY};await Ie.writeFile($,JSON.stringify(d,null,2),"utf-8"),j.log("Created Codex auth.json file");let a="";try{a=await Ie.readFile(P,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
16
16
  web_search_request = true`):a+=`
17
17
  [features]
18
18
  web_search_request = true
19
19
  `,await Ie.writeFile(P,a,"utf-8"),j.log("Updated Codex config with web_search_request enabled"))}catch(d){throw j.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let k=[ne(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);j.log(`Running ${R} ${k.join(" ")}`);let x=t.utils.run(R,k,{all:!0,cwd:i,env:{...oe.env},idleTimeout:re}),v=ue(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),G=(d,a)=>{d.id=I,I+=1,m.push(d),f.push(d),c.push(d),a||v.flush(),v(),a&&v.flush()},h=zr.createInterface({input:x.all});return h.on("error",d=>{j.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{j.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let E=en(a.item);E&&G(E,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let E={title:"Reasoning",message:a.item.text};G(E,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let E=tn(y[a.call_id],a);E&&G(E,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(E=>E.text).join(`
20
20
  `):a?.type==="message"&&a.role==="system"&&(T=a.content.map(E=>E.text).join(`
21
- `))}),await x.catch(d=>{let a=Zr({catchError:d,runCmd:x,error:T,result:g,runnerName:"Codex"});T=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:qt,hasError:!!T}),error:de({error:T,agentName:qt}),isRetryableError:pe(T)}}var Wt=async()=>{let e=Oe.join(Kt.homedir(),".codex");await Ie.rm(e,{recursive:!0,force:!0})},Qr=new Set(["bash","-lc"]),en=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
21
+ `))}),await x.catch(d=>{let a=Zr({catchError:d,runCmd:x,error:T,result:g,runnerName:"Codex"});T=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:qt,hasError:!!T}),error:de({error:T,agentName:qt}),isRetryableError:pe(T)}}var Wt=async()=>{let e=$e.join(Kt.homedir(),".codex");await Ie.rm(e,{recursive:!0,force:!0})},Qr=new Set(["bash","-lc"]),en=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
22
22
  ${o}
23
23
  \`\`\``),e.status==="failed"&&e.exit_code!==0&&(o=o?`${o}
24
24
 
25
25
  *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},tn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Qr.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
26
26
  ${n.trim()}
27
- \`\`\``)}catch(i){j.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import $e from"fs/promises";import Vt from"os";import Fe from"path";import he from"process";import rn from"readline";var Y=_("runner_gemini"),Jt="Gemini CLI",ye="",nn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(Y.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(Y.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(Y.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),on={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},sn=async()=>{let e=Fe.join(Vt.homedir(),".gemini"),t=Fe.join(e,"settings.json");try{await $e.mkdir(e,{recursive:!0});let r={};try{let o=await $e.readFile(t,"utf-8");r=JSON.parse(o)}catch{Y.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await $e.writeFile(t,JSON.stringify(r,null,2),"utf-8"),Y.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){Y.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function it({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=he.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await sn(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!ye&&await n.isModelAvailableForProvider("gemini",ye)?(p=ye,Y.log(`Using default model: ${ye}`)):ye&&Y.log(`Default model ${ye} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);he.env.GEMINI_API_KEY=h,he.env.GOOGLE_GEMINI_BASE_URL=d}else if(!he.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=[ne(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],S=`${he.env.NVM_BIN}/node`;Y.log(`Running ${S} ${R.join(" ")}`);let P=t.utils.run(S,R,{all:!0,env:he.env,cwd:i,idleTimeout:re});P.stdin?.end();let O=ue(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),k=(h,d)=>{h.id=I,I+=1,m.push(h),f.push(h),c.push(h),d||O.flush(),O(),d&&O.flush()},x=rn.createInterface({input:P.all});x.on("error",h=>{Y.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",G=()=>{v&&k({message:v.trim()}),v=""};return x.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Ot(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=on[d.tool_name]??d.tool_name,E=d.parameters?.file_path,Z=E&&Fe.relative(i,E),Se=d.parameters?.command,X={title:[a,Z&&`\`${Z}\``,Se&&`\`${Se}\``].filter(Boolean).join(" ")};y[d.tool_id]=X,O.flush();break}case"tool_result":{let a=y[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
27
+ \`\`\``)}catch(i){j.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Oe from"fs/promises";import Vt from"os";import Fe from"path";import he from"process";import rn from"readline";var Y=_("runner_gemini"),Jt="Gemini CLI",ye="",nn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(Y.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(Y.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(Y.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),on={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},sn=async()=>{let e=Fe.join(Vt.homedir(),".gemini"),t=Fe.join(e,"settings.json");try{await Oe.mkdir(e,{recursive:!0});let r={};try{let o=await Oe.readFile(t,"utf-8");r=JSON.parse(o)}catch{Y.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Oe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),Y.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){Y.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function it({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=he.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await sn(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!ye&&await n.isModelAvailableForProvider("gemini",ye)?(p=ye,Y.log(`Using default model: ${ye}`)):ye&&Y.log(`Default model ${ye} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);he.env.GEMINI_API_KEY=h,he.env.GOOGLE_GEMINI_BASE_URL=d}else if(!he.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=[ne(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],S=`${he.env.NVM_BIN}/node`;Y.log(`Running ${S} ${R.join(" ")}`);let P=t.utils.run(S,R,{all:!0,env:he.env,cwd:i,idleTimeout:re});P.stdin?.end();let $=ue(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),k=(h,d)=>{h.id=I,I+=1,m.push(h),f.push(h),c.push(h),d||$.flush(),$(),d&&$.flush()},x=rn.createInterface({input:P.all});x.on("error",h=>{Y.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",G=()=>{v&&k({message:v.trim()}),v=""};return x.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:$t(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=on[d.tool_name]??d.tool_name,E=d.parameters?.file_path,Z=E&&Fe.relative(i,E),Se=d.parameters?.command,X={title:[a,Z&&`\`${Z}\``,Se&&`\`${Se}\``].filter(Boolean).join(" ")};y[d.tool_id]=X,$.flush();break}case"tool_result":{let a=y[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
28
28
  ${d.output.trim()}
29
- \`\`\``),k(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{Y.warn("Unhandled message type:",d.type);break}}}),await P.catch(h=>{({error:T,result:g}=nn({catchError:h,runCmd:P,error:T,result:g,runnerName:"Gemini"}))}),x.close(),O.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:Jt,hasError:!!T}),error:de({error:T,agentName:Jt}),isRetryableError:pe(T)}}var Xt=async()=>{let e=Fe.join(Vt.homedir(),".gemini");await $e.rm(e,{recursive:!0,force:!0})};var an={codex:{runner:ot,clean:Wt},claude:{runner:nt,clean:Ht},gemini:{runner:it,clean:Xt}},zt=an;var Zt=_("init_stage"),Qt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(ln(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=zt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=un({apiToken:r});_t(u);let l=e.useGateway?await vt({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Pt(async({steps:I=[],duration:w})=>{let g=I.map(T=>{let R=T.title?ze(J(T.title),p):void 0,S=T.message?ze(J(T.message)):void 0;return{...T,title:R,message:S}});I.length=0;try{return await q(e.id,e.sessionId,{steps:g,duration:w})}catch(T){Zt.error("persistSteps failed",{error:T?.message||T})}},t);Zt.info("Adding build files to stage");let c=await et();await Ze(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Ut(),await we(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Mt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),un=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ke.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ke.env.NETLIFY_API_TOKEN,SITE_ID:ke.env.SITE_ID,FUNCTIONS_DIST:ke.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as st}from"@netlify/otel";import cn from"crypto";import z from"fs/promises";import L from"path";import B from"process";var D=_("context"),dn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:B.env.NETLIFY_TEAM_ID,userId:B.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:B.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},pn=10,fn=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(B.cwd(),U,o),i=0;for(;await mn(n);){if(i>=pn)throw new Error("Failed to generate context file");o=`${t}-${cn.randomUUID().slice(0,5)}${r}`,n=L.join(B.cwd(),U,o),i+=1}return o},mn=async e=>{try{return await z.access(e),!0}catch{return!1}},gn=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},hn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await z.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},De=null,yn=async()=>{if(De)return De;let e=await gn();if(!e)return[];let t=L.join(B.cwd(),U,We);await z.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=L.join(t,s),l=L.join(U,We,s);return D.log(`Downloading ${i.scope} context...`),await hn(i.endpoint,u)?(D.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return De=(await Promise.all(r)).filter(n=>n!==null),De},er=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=dn(t),i=await fn(bt),s=L.join(B.cwd(),U);await z.mkdir(s,{recursive:!0});let u=L.join(U,i),l=L.join(B.cwd(),u),p=L.join(B.cwd(),U,te);try{await z.unlink(p),D.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
29
+ \`\`\``),k(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{Y.warn("Unhandled message type:",d.type);break}}}),await P.catch(h=>{({error:T,result:g}=nn({catchError:h,runCmd:P,error:T,result:g,runnerName:"Gemini"}))}),x.close(),$.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:Jt,hasError:!!T}),error:de({error:T,agentName:Jt}),isRetryableError:pe(T)}}var Xt=async()=>{let e=Fe.join(Vt.homedir(),".gemini");await Oe.rm(e,{recursive:!0,force:!0})};var an={codex:{runner:ot,clean:Wt},claude:{runner:nt,clean:Ht},gemini:{runner:it,clean:Xt}},zt=an;var Zt=_("init_stage"),Qt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(ln(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=zt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=un({apiToken:r});_t(u);let l=e.useGateway?await vt({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Pt(async({steps:I=[],duration:w})=>{let g=I.map(T=>{let R=T.title?ze(J(T.title),p):void 0,S=T.message?ze(J(T.message)):void 0;return{...T,title:R,message:S}});I.length=0;try{return await q(e.id,e.sessionId,{steps:g,duration:w})}catch(T){Zt.error("persistSteps failed",{error:T?.message||T})}},t);Zt.info("Adding build files to stage");let c=await et();await Ze(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Ut(),await we(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Mt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),un=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ke.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ke.env.NETLIFY_API_TOKEN,SITE_ID:ke.env.SITE_ID,FUNCTIONS_DIST:ke.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as st}from"@netlify/otel";import cn from"crypto";import z from"fs/promises";import L from"path";import B from"process";var D=_("context"),dn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:B.env.NETLIFY_TEAM_ID,userId:B.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:B.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},pn=10,fn=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(B.cwd(),U,o),i=0;for(;await mn(n);){if(i>=pn)throw new Error("Failed to generate context file");o=`${t}-${cn.randomUUID().slice(0,5)}${r}`,n=L.join(B.cwd(),U,o),i+=1}return o},mn=async e=>{try{return await z.access(e),!0}catch{return!1}},gn=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},hn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await z.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},De=null,yn=async()=>{if(De)return De;let e=await gn();if(!e)return[];let t=L.join(B.cwd(),U,We);await z.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=L.join(t,s),l=L.join(U,We,s);return D.log(`Downloading ${i.scope} context...`),await hn(i.endpoint,u)?(D.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return De=(await Promise.all(r)).filter(n=>n!==null),De},er=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=dn(t),i=await fn(At),s=L.join(B.cwd(),U);await z.mkdir(s,{recursive:!0});let u=L.join(U,i),l=L.join(B.cwd(),u),p=L.join(B.cwd(),U,te);try{await z.unlink(p),D.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
30
30
  Your task is to analyze and fix the build errors.
31
31
  Don't apply techniques of reverting changes. Apply fixes related to errors.
32
32
  Don't try to run build by yourself. Just fix the errors.
@@ -39,7 +39,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
39
39
 
40
40
  `)}
41
41
  </project_rules>
42
- `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(B.cwd(),U,Ke);await z.mkdir(g,{recursive:!0});let T=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let P=S+1,O=`attempt-${P}.md`,k=L.join(g,O),x=L.join(U,Ke,O),v=`# Task History - Attempt ${P}
42
+ `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(B.cwd(),U,Ke);await z.mkdir(g,{recursive:!0});let T=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let P=S+1,$=`attempt-${P}.md`,k=L.join(g,$),x=L.join(U,Ke,$),v=`# Task History - Attempt ${P}
43
43
 
44
44
  ## Request - what the user asked for
45
45
  ${R.request}
@@ -128,19 +128,19 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
128
128
  </request>
129
129
 
130
130
  Use the following file for the complete context of the ask, the environment, and what's available. ${l} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
131
- `),w};var _n=_("prompt"),tr=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await er({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&_n.log("Contextful Prompt:",n),{prompt:n}};var Le=_("inference_stage"),rr=5,Ue=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;Le.log(`Running inference stage, attempt ${l} of ${rr}`);let c=await A(st(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),Rt();let{prompt:y}=await A(st(),"compose-prompt",async()=>await tr({cliPath:t,config:r,buildErrorContext:En(n),netlify:o})),I=`
131
+ `),w};var _n=_("prompt"),tr=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await er({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&_n.log("Contextful Prompt:",n),{prompt:n}};var Le=_("inference_stage"),rr=5,Ue=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;Le.log(`Running inference stage, attempt ${l} of ${rr}`);let c=await b(st(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),Rt();let{prompt:y}=await b(st(),"compose-prompt",async()=>await tr({cliPath:t,config:r,buildErrorContext:En(n),netlify:o})),I=`
132
132
  ${p||""}
133
133
  ${y}
134
- `.trim(),w={...r,prompt:I},g=await A(st(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=J(g.result)),g.error&&(g.error=J(g.error)),await s.flush(),g});if(c.error){if(Le.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<rr))return Le.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Ue({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Le.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},En=e=>!e||e.length===0?"":`
134
+ `.trim(),w={...r,prompt:I},g=await b(st(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=J(g.result)),g.error&&(g.error=J(g.error)),await s.flush(),g});if(c.error){if(Le.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<rr))return Le.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Ue({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Le.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},En=e=>!e||e.length===0?"":`
135
135
  Deploy failed failed. Here are the errors to review on the latest build:
136
136
 
137
137
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
138
138
 
139
139
  ${e.pop()}
140
- `;import In from"process";import{getTracer as at}from"@netlify/otel";import{getTracer as wn}from"@netlify/otel";var xe=_("deploy"),nr=async e=>await A(wn(),"create-preview-deploy",async t=>Tn(e,t)),Tn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},u)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(xe.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(xe.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";xe.log(`Running: ${p} ${l.join(" ")}`),u?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(String(f.stdout??"").trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),xe.log(`
141
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw xe.error("Failed to create preview deploy via CLI:",l),u?.setAttributes({success:!1,error:l.message}),l}};var ve=_("deploy_stage"),lt=async e=>await A(at(),"run-deploy-stage",async()=>xn(e)),xn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await A(at(),"get-runner-diffs",async()=>await Dt({config:t,isRetry:i}));if(ve.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,c=!0;ve.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let m=null;if(o!==void 0&&c)try{let y;try{let I=await A(at(),"get-runner-session",async()=>await wt(t.id,t.sessionId));I?.title&&(y=I.title)}catch(I){ve.warn("Failed to fetch session title, using fallback message:",I.message)}await q(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await nr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:$t(t.id,In.env.SITE_NAME),filter:n})}catch(y){return ve.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:u,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return ve.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:l,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as je}from"@netlify/otel";async function or(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var Me=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
140
+ `;import In from"process";import{getTracer as at}from"@netlify/otel";import{getTracer as wn}from"@netlify/otel";var xe=_("deploy"),nr=async e=>await b(wn(),"create-preview-deploy",async t=>Tn(e,t)),Tn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},u)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(xe.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(xe.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";xe.log(`Running: ${p} ${l.join(" ")}`),u?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(String(f.stdout??"").trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),xe.log(`
141
+ Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw xe.error("Failed to create preview deploy via CLI:",l),u?.setAttributes({success:!1,error:l.message}),l}};var ve=_("deploy_stage"),lt=async e=>await b(at(),"run-deploy-stage",async()=>xn(e)),xn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await b(at(),"get-runner-diffs",async()=>await Dt({config:t,isRetry:i}));if(ve.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,c=!0;ve.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let m=null;if(o!==void 0&&c)try{let y;try{let I=await b(at(),"get-runner-session",async()=>await wt(t.id,t.sessionId));I?.title&&(y=I.title)}catch(I){ve.warn("Failed to fetch session title, using fallback message:",I.message)}await q(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await nr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:Ot(t.id,In.env.SITE_NAME),filter:n})}catch(y){return ve.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:u,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return ve.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:l,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as je}from"@netlify/otel";async function or(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var Me=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
142
142
  `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let l=this.containsNetlifyForm(n,o);l&&r.push(l)}let u=s.split(" ");o=u[u.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
143
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var b=_("cleanup_stage"),ir=async e=>await A(je(),"cleanup-stage",async()=>vn(e)),ut=1024*1024*10,vn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new Me,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(b.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:y,snippet:I})=>{b.log(` - ${y}: ${I}`)}),l.has_netlify_form=!0):b.log("Did not detect Netlify form(s) in diff"),b.log("Did not detect Netlify form(s) in diff")}if(p)try{b.log("Getting pre-signed URLs for diff upload");let f=await It(e.id,e.sessionId),c=[];(t||i)&&c.push(He(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,b.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(He(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,b.log("Successfully uploaded cumulative_diff to S3")})),b.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(b.log("Updating agent runner with cumulative diff S3 key"),await A(je(),"update-runner",async()=>{await we(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){b.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>ut||m>ut){let y=`Diffs exceed maximum inline size of ${ut} bytes.`;throw b.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,b.log("Updating agent runner with inline diffs (fallback)"),await A(je(),"update-runner",async()=>{await we(e.id,{result_diff:n,result_diff_binary:s})}))}else b.log("No diffs to upload");return b.log("Updated agent runner with result"),await or(async()=>await A(je(),"update-runner-session",()=>q(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{b.error(`Error updating agent runner session (attempt ${f}):`,c),b.log("Retrying...")}}),b.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as sr,shutdownTracers as Sn,withActiveSpan as ar}from"@netlify/otel";var An=Rn(import.meta.url),lr=An("../package.json"),Re=_("pipeline_index"),Ge=3,ur=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=St(ee.timeUnits.hours(4)),l=await gt(lr.version,e.id,i);try{await ar(sr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:y}=await u("init",()=>Qt({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:lr.version}),ee.timeUnits.minutes(10));s=m.clean,e.sha=y;let{runnerResult:I}=await u("inference",()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await q(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await u("deploy",()=>lt({cliPath:r,config:e,context:f,result:I.result,filter:n,isRetry:!1})),g=I,T=[];if(w.hasChanges&&w.deployError){T.push(ht(w.deployError));let x=1,v=!1;for(;x<=Ge&&!w.previewInfo&&!v;)Re.log(`Deploy attempt had errors. Retrying. ${x}/${Ge}`),await ar(sr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":x});let h;try{h=(await u(`inference-retry-${x}`,()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:T,priorAgentSessionId:I.agentSessionId}))).runnerResult}catch(d){Re.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},w=await u(`deploy-retry-${x}`,()=>lt({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),w.deployError&&T.push(w.deployError),x++});x>Ge&&!w.previewInfo&&console.warn(`Deploy validation failed after ${Ge} attempts`)}let{diff:R,resultDiff:S,previewInfo:P,diffBinary:O,resultDiffBinary:k}=w;await u("cleanup",()=>ir({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:O,resultDiffBinary:k,previewInfo:P}),ee.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await jt())})}catch(p){if(yt(p)){Re.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await q(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Re.info("Could not update session (site may have been deleted)")}return}Re.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await q(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Sn()}};import cr from"crypto";var F=_("bin_local"),W=bn(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),dt=()=>{console.log(`
143
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var A=_("cleanup_stage"),ir=async e=>await b(je(),"cleanup-stage",async()=>vn(e)),ut=1024*1024*10,vn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new Me,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:y,snippet:I})=>{A.log(` - ${y}: ${I}`)}),l.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await It(e.id,e.sessionId),c=[];(t||i)&&c.push(He(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(He(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await b(je(),"update-runner",async()=>{await we(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>ut||m>ut){let y=`Diffs exceed maximum inline size of ${ut} bytes.`;throw A.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await b(je(),"update-runner",async()=>{await we(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await or(async()=>await b(je(),"update-runner-session",()=>q(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{A.error(`Error updating agent runner session (attempt ${f}):`,c),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as sr,shutdownTracers as Sn,withActiveSpan as ar}from"@netlify/otel";var bn=Rn(import.meta.url),lr=bn("../package.json"),Re=_("pipeline_index"),Ge=3,ur=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=St(ee.timeUnits.hours(4)),l=await gt(lr.version,e.id,i);try{await ar(sr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:y}=await u("init",()=>Qt({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:lr.version}),ee.timeUnits.minutes(10));s=m.clean,e.sha=y;let{runnerResult:I}=await u("inference",()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await q(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await u("deploy",()=>lt({cliPath:r,config:e,context:f,result:I.result,filter:n,isRetry:!1})),g=I,T=[];if(w.hasChanges&&w.deployError){T.push(ht(w.deployError));let x=1,v=!1;for(;x<=Ge&&!w.previewInfo&&!v;)Re.log(`Deploy attempt had errors. Retrying. ${x}/${Ge}`),await ar(sr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":x});let h;try{h=(await u(`inference-retry-${x}`,()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:T,priorAgentSessionId:I.agentSessionId}))).runnerResult}catch(d){Re.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},w=await u(`deploy-retry-${x}`,()=>lt({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),w.deployError&&T.push(w.deployError),x++});x>Ge&&!w.previewInfo&&console.warn(`Deploy validation failed after ${Ge} attempts`)}let{diff:R,resultDiff:S,previewInfo:P,diffBinary:$,resultDiffBinary:k}=w;await u("cleanup",()=>ir({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:$,resultDiffBinary:k,previewInfo:P}),ee.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await jt())})}catch(p){if(yt(p)){Re.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await q(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Re.info("Could not update session (site may have been deleted)")}return}Re.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await q(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Sn()}};import cr from"crypto";var F=_("bin_local"),W=An(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),dt=()=>{console.log(`
144
144
  agent-runner-cli-local - Run Netlify agent runner locally without API connections
145
145
 
146
146
  USAGE:
@@ -173,5 +173,5 @@ NOTE:
173
173
  the full pipeline including inference and deployment, but API calls will
174
174
  be logged instead of executed.
175
175
  `)};W.help&&(dt(),C.exit(0));W.prompt||(F.error("Error: --prompt is required"),dt(),C.exit(1));W["netlify-api-token"]||(F.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),dt(),C.exit(1));try{let e=W.cwd||C.cwd(),t=dr.join(e,".netlify","netlify-agent-runner-context*");pr.rmSync(t,{recursive:!0,force:!0});let r;try{r=await Nn(e)}catch(u){F.error(u.message),F.error(`
176
- To link this directory to a Netlify site, run:`),F.error(" netlify link"),C.exit(1)}let o=`local-${cr.randomBytes(8).toString("hex")}`,n=`session-${cr.randomBytes(8).toString("hex")}`,i=W.runner||"claude";F.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let s={id:o,sessionId:n,prompt:W.prompt,runner:i,model:W.model,accountType:"local",validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=W["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?Pe(e,"claude")||(F.log("Claude CLI not found, installing..."),await ct(e,"@anthropic-ai/claude-code")):i==="gemini"?Pe(e,"gemini")||(F.log("Gemini CLI not found, installing..."),await ct(e,"@google/gemini-cli")):i==="codex"?Pe(e,"codex")||(F.log("Codex CLI not found, installing..."),await ct(e,"@openai/codex")):(F.error(`Unknown runner: ${i}`),C.exit(1)),await ur({config:s,cwd:e,cliPath:W["cli-path"],filter:W.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),F.info("Finished agent (local mode)"),C.exit(0)}catch(e){F.error("Error running agent pipeline (local mode):",e),C.exit(1)}function ct(e,t){return new Promise((r,o)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{F.log(`${t} installed: ${n}`),r()}).catch(n=>{F.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function Nn(e){let t=dr.join(e,".netlify","state.json");try{let r=await pr.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return F.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
176
+ To link this directory to a Netlify site, run:`),F.error(" netlify link"),C.exit(1)}let o=`local-${cr.randomBytes(8).toString("hex")}`,n=`session-${cr.randomBytes(8).toString("hex")}`,i=W.runner||"claude";F.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let s={id:o,sessionId:n,prompt:W.prompt,runner:i,model:W.model,accountType:"local",sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=W["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?Pe(e,"claude")||(F.log("Claude CLI not found, installing..."),await ct(e,"@anthropic-ai/claude-code")):i==="gemini"?Pe(e,"gemini")||(F.log("Gemini CLI not found, installing..."),await ct(e,"@google/gemini-cli")):i==="codex"?Pe(e,"codex")||(F.log("Codex CLI not found, installing..."),await ct(e,"@openai/codex")):(F.error(`Unknown runner: ${i}`),C.exit(1)),await ur({config:s,cwd:e,cliPath:W["cli-path"],filter:W.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),F.info("Finished agent (local mode)"),C.exit(0)}catch(e){F.error("Error running agent pipeline (local mode):",e),C.exit(1)}function ct(e,t){return new Promise((r,o)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{F.log(`${t} installed: ${n}`),r()}).catch(n=>{F.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function Nn(e){let t=dr.join(e,".netlify","state.json");try{let r=await pr.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return F.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
177
177
  //# sourceMappingURL=bin-local.js.map
package/dist/bin.js CHANGED
@@ -1,32 +1,32 @@
1
1
  #!/usr/bin/env node
2
- import ft from"process";import kn from"minimist";import{createRequire as bn}from"module";import{createTracerProvider as _r}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as mt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Er}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Tr}from"@netlify/otel";import{propagation as gt,context as ht,W3CTraceContextPropagator as wr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Ir}from"@opentelemetry/exporter-trace-otlp-grpc";import yr from"process";function _(e){let t=yr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var je=_("tracing"),yt=async(e,t,r)=>(await _r({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new mt(new Ye),new mt(new Ir({url:r.exporterUrl}))],instrumentations:[new Er({skipHeaders:!0})]}),r.traceparent?(gt.setGlobalPropagator(new wr),gt.extract(ht.active(),{traceparent:r.traceparent,isRemote:!0})):ht.active());function A(e,t,r){return je.log(`\u23F3 TRACE: ${t} starting...`),Tr(e,t,r)}var Ye=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[u,l]of Object.entries(o))u.includes("duration")&&typeof l=="number"?n.push(`${u}=${l.toFixed(2)}ms`):n.push(`${u}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";je.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&je.log(` \u274C Error: ${t.status.message}`)}};var xr=["error","failed","exception","fatal","panic","abort","crash"];function _t(e){let t=e.split(`
3
- `),r=[],o=-1,n=0;for(;n<t.length;){let u=t[n].slice(0,500).toLowerCase();if(xr.some(p=>u.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),c=[];for(let m=p;m<=f;m++)c.push(t[m]);r.push(c.join(`
2
+ import ft from"process";import kn from"minimist";import{createRequire as bn}from"module";import{createTracerProvider as _r}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as mt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Er}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as wr}from"@netlify/otel";import{propagation as gt,context as ht,W3CTraceContextPropagator as Tr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as xr}from"@opentelemetry/exporter-trace-otlp-grpc";import yr from"process";function _(e){let t=yr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var je=_("tracing"),yt=async(e,t,r)=>(await _r({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new mt(new Ye),new mt(new xr({url:r.exporterUrl}))],instrumentations:[new Er({skipHeaders:!0})]}),r.traceparent?(gt.setGlobalPropagator(new Tr),gt.extract(ht.active(),{traceparent:r.traceparent,isRemote:!0})):ht.active());function N(e,t,r){return je.log(`\u23F3 TRACE: ${t} starting...`),wr(e,t,r)}var Ye=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[u,l]of Object.entries(o))u.includes("duration")&&typeof l=="number"?n.push(`${u}=${l.toFixed(2)}ms`):n.push(`${u}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";je.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&je.log(` \u274C Error: ${t.status.message}`)}};var Ir=["error","failed","exception","fatal","panic","abort","crash"];function _t(e){let t=e.split(`
3
+ `),r=[],o=-1,n=0;for(;n<t.length;){let u=t[n].slice(0,500).toLowerCase();if(Ir.some(p=>u.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),c=[];for(let m=p;m<=f;m++)c.push(t[m]);r.push(c.join(`
4
4
  `)),o=f,n=f+1}else n++}if(r.length===0)return e;let i=r.map((s,u)=>`<extracted_error_chunk order="${u+1}">
5
5
  ${s}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import $e from"process";import{getTracer as pn}from"@netlify/otel";import ye from"process";var re=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},Et=e=>e instanceof re;var Ne=ye.env.NETLIFY_API_URL,Ae=ye.env.NETLIFY_API_TOKEN,B=_("api"),Se=()=>ye.env.NETLIFY_LOCAL_MODE==="true",_e=async(e,t={})=>{if(!Ne||!Ae)throw new Error("No API URL or token");let r=new URL(e,Ne),o={...t,headers:{...t.headers,Authorization:`Bearer ${Ae}`}};ye.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ye.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{B.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||B.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new re(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new re(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},Tt=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ne=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ae=e.constants.NETLIFY_API_TOKEN)},wt=()=>({apiUrl:Ne,token:Ae}),Ee=async(e,t)=>Se()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):_e(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>Se()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var It=async(e,t)=>Se()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`),xt=(e,t,r)=>_e(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),Rt=async(e,t)=>Se()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):_e(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Be=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ne=_("ai_gateway"),He=null;var vt=async()=>{if(He)return He;ne.log("Fetching available AI gateway providers");let e=await fetch(`${wt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return He=t,ne.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Rr=async(e,t)=>{let o=(await vt()).providers[e];if(!o)return ne.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ne.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},Nt=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),ne.log("Requesting AI gateway information");let l=await xt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,ne.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),vt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:Rr}};import q from"process";import K from"path";import be from"fs";import{fileURLToPath as Pr}from"url";import{createRequire as Or}from"module";import{execa as Fr,execaCommand as ho}from"execa";import{Transform as vr}from"stream";var Nr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Ar=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Sr(){return Object.entries(process.env).filter(([e,t])=>!(!t||Nr.has(e)||Ar.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function W(e){if(typeof e!="string")return e;let t=Sr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(br(o),"g");r=r.replace(n,"******")}),r}function br(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var oe=class extends vr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=W(n);o(null,i)}};function At(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?W(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?W(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var Te=null,St=e=>(Te&&Te.destroy(),Te=new Z({totalAllowedTime:e}),Te),bt=()=>Te;var Z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Ct="netlify-agent-runner-context.md",qe="task-history",We="netlify-context",D=".netlify",Q="results.md",Ke="assets",Ve="other",Je="personal";var Xe="enterprise",ze="free",Pt=[Je,"pro",Xe,ze],ee=1800*1e3;var Ot={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.3",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.14","@google/gemini-cli":"0.25.0","@netlify/otel":"^5.1.1","@openai/codex":"0.88.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var $r=Pr(import.meta.url),Lr=K.dirname($r),Dr=Or(import.meta.url),ie=_("shell"),Ze=new Set,kr={preferLocal:!0},F=(e,t,r)=>{let[o,n]=Ur(t,r),i={...kr,...n},s=Fr(e,o,i);Mr(s,i),jr(s);let u=r?.idleTimeout;return u&&u>0&&Gr(s,u),s};var Ur=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Mr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new oe).pipe(q.stdout),e.stdout?.pipe(new oe).pipe(q.stdout),e.stderr?.pipe(new oe).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},Qe=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),ie.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ie.error("Error killing process:",r),!1}},Ft=e=>Qe(e,"SIGKILL"),Gr=(e,t)=>{let r=null,o=()=>{ie.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ie.log(`Force killing idle process ${e.pid}`),Ft(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},jr=e=>{Ze.add(e);let t=bt();if(t){let r=t.onTimesUp(()=>{ie.log(`Global timer expired, killing process ${e.pid}`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ie.log(`Force killing process ${e.pid} after timeout`),Ft(e))},5e3)});e.on("exit",()=>{Ze.delete(e),r()}),e.on("error",()=>{Ze.delete(e),r()})}};function se(e,t){if(!q.env.NETLIFY_LOCAL_MODE)try{let n=Dr.resolve(Ot.name),i=K.dirname(n);for(;i!==K.dirname(i);){let s=K.dirname(i);if(K.basename(s)==="node_modules"){let u=K.join(s,".bin",t);if(be.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(q.env.NODE_PATH){let n=K.join(q.env.NODE_PATH,".bin",t);if(be.existsSync(n))return n}let r=K.join(e,"node_modules",".bin",t);if(be.existsSync(r))return r;let o=K.join(Lr,"..","node_modules",".bin",t);if(be.existsSync(o))return o}var $t=_("utils"),Yr=e=>new Promise(t=>{setTimeout(t,e)}),Lt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Yr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(h=>{h(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},ae=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},Ce=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):$t.error("Could not parse JSON",o))}},Dt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)},Br=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!Pt.some(t=>t in e),kt=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,o])=>{if(o){let n=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let i=JSON.parse(o);Br(i)&&(e[r]=i)}catch(i){let u=i instanceof SyntaxError?"Invalid JSON":i.message;$t.error(`Could not parse ${r} model version override from ${n}: ${u}`)}}}),e},Hr=50*1024,et=(e,t=Hr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as Ut}from"buffer";import qr from"path";var Mt=_("repo"),Gt=async({config:e,isRetry:t})=>{Mt.info("Getting runner diffs");let r=await Kr(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let T=Vr(n);await Jr(T)}Mt.info("Changes after processing"),await rt();let i=await nt(n);await tt(i);let s={stdio:["ignore","pipe","pipe"]},u=await F("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await F("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]);let T=await F("git",["diff",e.sha,"HEAD"],s);c=String(T.stdout??"");let E=await F("git",["diff",e.sha,"HEAD","--binary"],s),g=String(E.stdout??"");c!==g&&(m=Ut.from(g).toString("base64"))}let h={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(h.diffBinary=Ut.from(f).toString("base64")),m&&(h.resultDiffBinary=m),h},tt=async(e=[])=>{await F("git",["add",".",...e])},rt=async()=>{let e=await F("git",["status","-s"]);return String(e.stdout??"")},jt=/.. (.+)?\.log$/,Wr=[jt],Kr=async()=>{let e=await rt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(o=>Wr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Yt=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return String(e??"").trim()},Bt=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},nt=async e=>{e||=await rt();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
10
- `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${qr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${i}`)});let n=o.match(jt)?.[1];n&&r.push(`:!${n}.log`)}),r},Ht=async()=>{await F("git",["reset","--hard","HEAD"])},Vr=e=>{let t=e.split(`
11
- `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},Jr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import zr from"fs/promises";import Zr from"os";import Kt from"path";import de from"process";import Qr from"readline";import ot from"path";import Xr from"fs/promises";var it=_("agent-output-utils");async function le({initialResult:e,agentName:t,hasError:r}){let o="",n=ot.join(process.cwd(),D,Q);try{let i=await Xr.readFile(n,"utf-8");i&&(o=i,it.log(`Pulled result from ${ot.relative(process.cwd(),n)}`))}catch{it.log(`No results file found at ${ot.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ue({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&it.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ce(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var k=_("runner_claude"),qt="Claude Code",pe="claude-opus-4-5-20251101",Wt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,en=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function st({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=de.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!pe&&await o.isModelAvailableForProvider("anthropic",pe)?(c=pe,k.log(`Using default model: ${pe}`)):pe&&k.log(`Default model ${pe} is not available, proceeding without model specification`);de.env.ANTHROPIC_API_KEY=y,de.env.ANTHROPIC_BASE_URL=d}else if(!de.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],T=[],E={},g=0,I=0,v,N,b=[se(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],C=`${de.env.NVM_BIN}/node`;k.log(`Running ${C} ${b.join(" ")}`);let O=t.utils.run(C,b,{all:!0,env:de.env,cwd:s,idleTimeout:ee});O.stdin?.end();let x=ae(()=>{r?.({steps:h,duration:I})},250),R=(y,d)=>{let a={...y,id:g};g+=1,T.push(a),h.push(a),d||x.flush(),x(),d&&x.flush()},G=Qr.createInterface({input:O.all});return G.on("error",y=>{k.error("Readline interface error",{error:y.message,stack:y.stack})}),G.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{k.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&R({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?R({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):k.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let w=a.input?.description&&`\`${a.input.description}\``;R({title:[Wt(a.name),w].filter(Boolean).join(" ")})}else a.id&&(E[a.id]=a);x.flush();break}case"tool_result":{let w=a.tool_use_id?E[a.tool_use_id]:void 0,X;if(w){let V=w.input?.file_path&&Kt.relative(s,w.input.file_path),P=V&&`\`${V}\``;X=[Wt(w.name||""),P].filter(Boolean).join(" ")}let ve=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),z;if(typeof a.content=="string")z=a.content;else if(Array.isArray(a.content)){let V=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?V.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?V.push(`![](data:${P.source.media_type};base64,${P.source.data})`):k.log(`Unsupported image type ${P.source.type}`,P.source):k.log(`Unsupported block type ${P?.type}`)}),z=V.join(`
8
+ `);return i.length>e.length*.8?e:i}import $e from"process";import{getTracer as pn}from"@netlify/otel";import ye from"process";var re=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},Et=e=>e instanceof re;var Se=ye.env.NETLIFY_API_URL,Ne=ye.env.NETLIFY_API_TOKEN,B=_("api"),Ae=()=>ye.env.NETLIFY_LOCAL_MODE==="true",_e=async(e,t={})=>{if(!Se||!Ne)throw new Error("No API URL or token");let r=new URL(e,Se),o={...t,headers:{...t.headers,Authorization:`Bearer ${Ne}`}};ye.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ye.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{B.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||B.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new re(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new re(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},wt=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Se=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ne=e.constants.NETLIFY_API_TOKEN)},Tt=()=>({apiUrl:Se,token:Ne}),Ee=async(e,t)=>Ae()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):_e(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>Ae()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var xt=async(e,t)=>Ae()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`),It=(e,t,r)=>_e(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),vt=async(e,t)=>Ae()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):_e(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Be=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ne=_("ai_gateway"),He=null;var Rt=async()=>{if(He)return He;ne.log("Fetching available AI gateway providers");let e=await fetch(`${Tt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return He=t,ne.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},vr=async(e,t)=>{let o=(await Rt()).providers[e];if(!o)return ne.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ne.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},St=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),ne.log("Requesting AI gateway information");let l=await It(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,ne.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),Rt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:vr}};import q from"process";import W from"path";import be from"fs";import{fileURLToPath as Pr}from"url";import{createRequire as Or}from"module";import{execa as Fr,execaCommand as ho}from"execa";import{Transform as Rr}from"stream";var Sr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Nr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Ar(){return Object.entries(process.env).filter(([e,t])=>!(!t||Sr.has(e)||Nr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function K(e){if(typeof e!="string")return e;let t=Ar();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(br(o),"g");r=r.replace(n,"******")}),r}function br(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var oe=class extends Rr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=K(n);o(null,i)}};function Nt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?K(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?K(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var we=null,At=e=>(we&&we.destroy(),we=new Z({totalAllowedTime:e}),we),bt=()=>we;var Z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Ct="netlify-agent-runner-context.md",qe="task-history",Ke="netlify-context",D=".netlify",Q="results.md",We="assets",Je="other",Ve="personal";var Xe="enterprise",ze="free",Pt=[Ve,"pro",Xe,ze],ee=1800*1e3;var Ot={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.4",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.14","@google/gemini-cli":"0.25.0","@netlify/otel":"^5.1.1","@openai/codex":"0.88.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var $r=Pr(import.meta.url),Lr=W.dirname($r),Dr=Or(import.meta.url),ie=_("shell"),Ze=new Set,kr={preferLocal:!0},F=(e,t,r)=>{let[o,n]=Ur(t,r),i={...kr,...n},s=Fr(e,o,i);Mr(s,i),jr(s);let u=r?.idleTimeout;return u&&u>0&&Gr(s,u),s};var Ur=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Mr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new oe).pipe(q.stdout),e.stdout?.pipe(new oe).pipe(q.stdout),e.stderr?.pipe(new oe).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},Qe=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),ie.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ie.error("Error killing process:",r),!1}},Ft=e=>Qe(e,"SIGKILL"),Gr=(e,t)=>{let r=null,o=()=>{ie.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ie.log(`Force killing idle process ${e.pid}`),Ft(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},jr=e=>{Ze.add(e);let t=bt();if(t){let r=t.onTimesUp(()=>{ie.log(`Global timer expired, killing process ${e.pid}`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ie.log(`Force killing process ${e.pid} after timeout`),Ft(e))},5e3)});e.on("exit",()=>{Ze.delete(e),r()}),e.on("error",()=>{Ze.delete(e),r()})}};function se(e,t){if(!q.env.NETLIFY_LOCAL_MODE)try{let n=Dr.resolve(Ot.name),i=W.dirname(n);for(;i!==W.dirname(i);){let s=W.dirname(i);if(W.basename(s)==="node_modules"){let u=W.join(s,".bin",t);if(be.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(q.env.NODE_PATH){let n=W.join(q.env.NODE_PATH,".bin",t);if(be.existsSync(n))return n}let r=W.join(e,"node_modules",".bin",t);if(be.existsSync(r))return r;let o=W.join(Lr,"..","node_modules",".bin",t);if(be.existsSync(o))return o}var $t=_("utils"),Yr=e=>new Promise(t=>{setTimeout(t,e)}),Lt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Yr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(h=>{h(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},ae=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},Ce=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):$t.error("Could not parse JSON",o))}},Dt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)},Br=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!Pt.some(t=>t in e),kt=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,o])=>{if(o){let n=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let i=JSON.parse(o);Br(i)&&(e[r]=i)}catch(i){let u=i instanceof SyntaxError?"Invalid JSON":i.message;$t.error(`Could not parse ${r} model version override from ${n}: ${u}`)}}}),e},Hr=50*1024,et=(e,t=Hr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as Ut}from"buffer";import qr from"path";var Mt=_("repo"),Gt=async({config:e,isRetry:t})=>{Mt.info("Getting runner diffs");let r=await Wr(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let E=Jr(n);await Vr(E)}Mt.info("Changes after processing"),await rt();let i=await nt(n);await tt(i);let s={stdio:["ignore","pipe","pipe"]},u=await F("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await F("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]);let E=await F("git",["diff",e.sha,"HEAD"],s);c=String(E.stdout??"");let T=await F("git",["diff",e.sha,"HEAD","--binary"],s),g=String(T.stdout??"");c!==g&&(m=Ut.from(g).toString("base64"))}let h={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(h.diffBinary=Ut.from(f).toString("base64")),m&&(h.resultDiffBinary=m),h},tt=async(e=[])=>{await F("git",["add",".",...e])},rt=async()=>{let e=await F("git",["status","-s"]);return String(e.stdout??"")},jt=/.. (.+)?\.log$/,Kr=[jt],Wr=async()=>{let e=await rt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(o=>Kr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Yt=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return String(e??"").trim()},Bt=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},nt=async e=>{e||=await rt();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
10
+ `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${qr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${i}`)});let n=o.match(jt)?.[1];n&&r.push(`:!${n}.log`)}),r},Ht=async()=>{await F("git",["reset","--hard","HEAD"])},Jr=e=>{let t=e.split(`
11
+ `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},Vr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import zr from"fs/promises";import Zr from"os";import Wt from"path";import de from"process";import Qr from"readline";import ot from"path";import Xr from"fs/promises";var it=_("agent-output-utils");async function le({initialResult:e,agentName:t,hasError:r}){let o="",n=ot.join(process.cwd(),D,Q);try{let i=await Xr.readFile(n,"utf-8");i&&(o=i,it.log(`Pulled result from ${ot.relative(process.cwd(),n)}`))}catch{it.log(`No results file found at ${ot.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ue({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&it.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ce(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var k=_("runner_claude"),qt="Claude Code",pe="claude-opus-4-5-20251101",Kt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,en=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function st({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=de.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!pe&&await o.isModelAvailableForProvider("anthropic",pe)?(c=pe,k.log(`Using default model: ${pe}`)):pe&&k.log(`Default model ${pe} is not available, proceeding without model specification`);de.env.ANTHROPIC_API_KEY=y,de.env.ANTHROPIC_BASE_URL=d}else if(!de.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],E=[],T={},g=0,x=0,R,S,b=[se(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],C=`${de.env.NVM_BIN}/node`;k.log(`Running ${C} ${b.join(" ")}`);let O=t.utils.run(C,b,{all:!0,env:de.env,cwd:s,idleTimeout:ee});O.stdin?.end();let I=ae(()=>{r?.({steps:h,duration:x})},250),v=(y,d)=>{let a={...y,id:g};g+=1,E.push(a),h.push(a),d||I.flush(),I(),d&&I.flush()},M=Qr.createInterface({input:O.all});return M.on("error",y=>{k.error("Readline interface error",{error:y.message,stack:y.stack})}),M.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{k.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):k.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let w=a.input?.description&&`\`${a.input.description}\``;v({title:[Kt(a.name),w].filter(Boolean).join(" ")})}else a.id&&(T[a.id]=a);I.flush();break}case"tool_result":{let w=a.tool_use_id?T[a.tool_use_id]:void 0,X;if(w){let J=w.input?.file_path&&Wt.relative(s,w.input.file_path),P=J&&`\`${J}\``;X=[Kt(w.name||""),P].filter(Boolean).join(" ")}let Re=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),z;if(typeof a.content=="string")z=a.content;else if(Array.isArray(a.content)){let J=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?J.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?J.push(`![](data:${P.source.media_type};base64,${P.source.data})`):k.log(`Unsupported image type ${P.source.type}`,P.source):k.log(`Unsupported block type ${P?.type}`)}),z=J.join(`
12
12
 
13
- `)}ve&&z&&(z=`\`\`\`
13
+ `)}Re&&z&&(z=`\`\`\`
14
14
  ${z.trim()}
15
- \`\`\``),R({title:X,message:z},!0);break}case"thinking":{a.thinking&&R({title:"Thinking",message:a.thinking},!0);break}default:k.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(I=d.duration_ms||0,d.is_error?N=d.result:v=d.result,[T,h].forEach(a=>{a[a.length-1]?.message===v&&a.pop()}))}),await O.catch(y=>{({error:N,result:v}=en({catchError:y,runCmd:O,error:N,result:v,runnerName:"Claude"}))}),G.close(),x.flush(),{steps:T,duration:I,result:await le({initialResult:v,agentName:qt,hasError:!!N}),error:ue({error:N,agentName:qt}),isRetryableError:ce(N),agentSessionId:m}}var Vt=async()=>{let e=Kt.join(Zr.homedir(),".claude");await zr.rm(e,{recursive:!0,force:!0})};import we from"fs/promises";import Xt from"os";import Pe from"path";import te from"process";import tn from"readline";var U=_("runner_codex"),Jt="Codex CLI",fe="",rn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function at({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=te.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let w=l?.codex?.[s];if(w){if(!await n.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);p=w}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!fe&&await n.isModelAvailableForProvider("openai",fe)?(p=fe,U.log(`Using default model: ${fe}`)):fe&&U.log(`Default model ${fe} is not available, proceeding without model specification`);te.env.OPENAI_API_KEY=d,te.env.OPENAI_BASE_URL=a}else if(!te.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],h={},T=0,E=0,g,I,v=`${te.env.NVM_BIN}/node`,N=Pe.join(Xt.homedir(),".codex"),b=Pe.join(N,"config.toml"),C=Pe.join(N,"auth.json");try{await we.mkdir(N,{recursive:!0});let d={OPENAI_API_KEY:te.env.OPENAI_API_KEY};await we.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let a="";try{a=await we.readFile(b,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
15
+ \`\`\``),v({title:X,message:z},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:k.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(x=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[E,h].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await O.catch(y=>{({error:S,result:R}=en({catchError:y,runCmd:O,error:S,result:R,runnerName:"Claude"}))}),M.close(),I.flush(),{steps:E,duration:x,result:await le({initialResult:R,agentName:qt,hasError:!!S}),error:ue({error:S,agentName:qt}),isRetryableError:ce(S),agentSessionId:m}}var Jt=async()=>{let e=Wt.join(Zr.homedir(),".claude");await zr.rm(e,{recursive:!0,force:!0})};import Te from"fs/promises";import Xt from"os";import Pe from"path";import te from"process";import tn from"readline";var U=_("runner_codex"),Vt="Codex CLI",fe="",rn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function at({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=te.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let w=l?.codex?.[s];if(w){if(!await n.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);p=w}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!fe&&await n.isModelAvailableForProvider("openai",fe)?(p=fe,U.log(`Using default model: ${fe}`)):fe&&U.log(`Default model ${fe} is not available, proceeding without model specification`);te.env.OPENAI_API_KEY=d,te.env.OPENAI_BASE_URL=a}else if(!te.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],h={},E=0,T=0,g,x,R=`${te.env.NVM_BIN}/node`,S=Pe.join(Xt.homedir(),".codex"),b=Pe.join(S,"config.toml"),C=Pe.join(S,"auth.json");try{await Te.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:te.env.OPENAI_API_KEY};await Te.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let a="";try{a=await Te.readFile(b,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
16
16
  web_search_request = true`):a+=`
17
17
  [features]
18
18
  web_search_request = true
19
- `,await we.writeFile(b,a,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[se(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);U.log(`Running ${v} ${O.join(" ")}`);let x=t.utils.run(v,O,{all:!0,cwd:i,env:{...te.env},idleTimeout:ee}),R=ae(()=>{r?.({steps:f,duration:E}),o?.({steps:c,duration:E}),c=[]},250),G=(d,a)=>{d.id=T,T+=1,m.push(d),f.push(d),c.push(d),a||R.flush(),R(),a&&R.flush()},y=tn.createInterface({input:x.all});return y.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(a?.duration_ms&&(E=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")h[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let w=on(a.item);w&&G(w,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let w={title:"Reasoning",message:a.item.text};G(w,!0)}else if(a?.type==="local_shell_call")h[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let w=sn(h[a.call_id],a);w&&G(w,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(w=>w.text).join(`
20
- `):a?.type==="message"&&a.role==="system"&&(I=a.content.map(w=>w.text).join(`
21
- `))}),await x.catch(d=>{let a=rn({catchError:d,runCmd:x,error:I,result:g,runnerName:"Codex"});I=a.error,g=a.result}),y.close(),R.flush(),{steps:m,duration:E,result:await le({initialResult:g,agentName:Jt,hasError:!!I}),error:ue({error:I,agentName:Jt}),isRetryableError:ce(I)}}var zt=async()=>{let e=Pe.join(Xt.homedir(),".codex");await we.rm(e,{recursive:!0,force:!0})},nn=new Set(["bash","-lc"]),on=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
19
+ `,await Te.writeFile(b,a,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[se(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);U.log(`Running ${R} ${O.join(" ")}`);let I=t.utils.run(R,O,{all:!0,cwd:i,env:{...te.env},idleTimeout:ee}),v=ae(()=>{r?.({steps:f,duration:T}),o?.({steps:c,duration:T}),c=[]},250),M=(d,a)=>{d.id=E,E+=1,m.push(d),f.push(d),c.push(d),a||v.flush(),v(),a&&v.flush()},y=tn.createInterface({input:I.all});return y.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(a?.duration_ms&&(T=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")h[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let w=on(a.item);w&&M(w,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let w={title:"Reasoning",message:a.item.text};M(w,!0)}else if(a?.type==="local_shell_call")h[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let w=sn(h[a.call_id],a);w&&M(w,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(w=>w.text).join(`
20
+ `):a?.type==="message"&&a.role==="system"&&(x=a.content.map(w=>w.text).join(`
21
+ `))}),await I.catch(d=>{let a=rn({catchError:d,runCmd:I,error:x,result:g,runnerName:"Codex"});x=a.error,g=a.result}),y.close(),v.flush(),{steps:m,duration:T,result:await le({initialResult:g,agentName:Vt,hasError:!!x}),error:ue({error:x,agentName:Vt}),isRetryableError:ce(x)}}var zt=async()=>{let e=Pe.join(Xt.homedir(),".codex");await Te.rm(e,{recursive:!0,force:!0})},nn=new Set(["bash","-lc"]),on=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
22
22
  ${o}
23
23
  \`\`\``),e.status==="failed"&&e.exit_code!==0&&(o=o?`${o}
24
24
 
25
25
  *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},sn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!nn.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
26
26
  ${n.trim()}
27
- \`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Oe from"fs/promises";import Qt from"os";import Fe from"path";import me from"process";import an from"readline";var j=_("runner_gemini"),Zt="Gemini CLI",ge="",ln=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),un={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},cn=async()=>{let e=Fe.join(Qt.homedir(),".gemini"),t=Fe.join(e,"settings.json");try{await Oe.mkdir(e,{recursive:!0});let r={};try{let o=await Oe.readFile(t,"utf-8");r=JSON.parse(o)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Oe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function lt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=me.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await cn(),n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!ge&&await n.isModelAvailableForProvider("gemini",ge)?(p=ge,j.log(`Using default model: ${ge}`)):ge&&j.log(`Default model ${ge} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);me.env.GEMINI_API_KEY=y,me.env.GOOGLE_GEMINI_BASE_URL=d}else if(!me.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],h={},T=0,E=0,g,I,v=[se(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],N=`${me.env.NVM_BIN}/node`;j.log(`Running ${N} ${v.join(" ")}`);let b=t.utils.run(N,v,{all:!0,env:me.env,cwd:i,idleTimeout:ee});b.stdin?.end();let C=ae(()=>{r?.({steps:f,duration:E}),o?.({steps:c,duration:E}),c=[]},250),O=(y,d)=>{y.id=T,T+=1,m.push(y),f.push(y),c.push(y),d||C.flush(),C(),d&&C.flush()},x=an.createInterface({input:b.all});x.on("error",y=>{j.error("Readline interface error",{error:y.message,stack:y.stack})});let R="",G=()=>{R&&O({message:R.trim()}),R=""};return x.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let a=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Ce(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(R+=d.content);break}case"tool_use":{let a=un[d.tool_name]??d.tool_name,w=d.parameters?.file_path,X=w&&Fe.relative(i,w),ve=d.parameters?.command,V={title:[a,X&&`\`${X}\``,ve&&`\`${ve}\``].filter(Boolean).join(" ")};h[d.tool_id]=V,C.flush();break}case"tool_result":{let a=h[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
27
+ \`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Oe from"fs/promises";import Qt from"os";import Fe from"path";import me from"process";import an from"readline";var G=_("runner_gemini"),Zt="Gemini CLI",ge="",ln=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),un={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},cn=async()=>{let e=Fe.join(Qt.homedir(),".gemini"),t=Fe.join(e,"settings.json");try{await Oe.mkdir(e,{recursive:!0});let r={};try{let o=await Oe.readFile(t,"utf-8");r=JSON.parse(o)}catch{G.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Oe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),G.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){G.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function lt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=me.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await cn(),n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!ge&&await n.isModelAvailableForProvider("gemini",ge)?(p=ge,G.log(`Using default model: ${ge}`)):ge&&G.log(`Default model ${ge} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);me.env.GEMINI_API_KEY=y,me.env.GOOGLE_GEMINI_BASE_URL=d}else if(!me.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],h={},E=0,T=0,g,x,R=[se(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],S=`${me.env.NVM_BIN}/node`;G.log(`Running ${S} ${R.join(" ")}`);let b=t.utils.run(S,R,{all:!0,env:me.env,cwd:i,idleTimeout:ee});b.stdin?.end();let C=ae(()=>{r?.({steps:f,duration:T}),o?.({steps:c,duration:T}),c=[]},250),O=(y,d)=>{y.id=E,E+=1,m.push(y),f.push(y),c.push(y),d||C.flush(),C(),d&&C.flush()},I=an.createInterface({input:b.all});I.on("error",y=>{G.error("Readline interface error",{error:y.message,stack:y.stack})});let v="",M=()=>{v&&O({message:v.trim()}),v=""};return I.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let a=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Ce(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||M(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=un[d.tool_name]??d.tool_name,w=d.parameters?.file_path,X=w&&Fe.relative(i,w),Re=d.parameters?.command,J={title:[a,X&&`\`${X}\``,Re&&`\`${Re}\``].filter(Boolean).join(" ")};h[d.tool_id]=J,C.flush();break}case"tool_result":{let a=h[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
28
28
  ${d.output.trim()}
29
- \`\`\``),O(a,!0));break}case"result":{E=d.stats?.duration_ms,d.status==="error"?I=d.error?.message:g=R.trim();break}case"error":{I=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:I,result:g}=ln({catchError:y,runCmd:b,error:I,result:g,runnerName:"Gemini"}))}),x.close(),C.flush(),{steps:m,duration:E,result:await le({initialResult:g,agentName:Zt,hasError:!!I}),error:ue({error:I,agentName:Zt}),isRetryableError:ce(I)}}var er=async()=>{let e=Fe.join(Qt.homedir(),".gemini");await Oe.rm(e,{recursive:!0,force:!0})};var dn={codex:{runner:at,clean:zt},claude:{runner:st,clean:Vt},gemini:{runner:lt,clean:er}},tr=dn;var rr=_("init_stage"),nr=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(pn(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=tr[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=fn({apiToken:r});Tt(u);let l=e.useGateway?await Nt({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Lt(async({steps:T=[],duration:E})=>{let g=T.map(I=>{let v=I.title?et(W(I.title),p):void 0,N=I.message?et(W(I.message)):void 0;return{...I,title:v,message:N}});T.length=0;try{return await H(e.id,e.sessionId,{steps:g,duration:E})}catch(I){rr.error("persistSteps failed",{error:I?.message||I})}},t);rr.info("Adding build files to stage");let c=await nt();await tt(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Yt(),await Ee(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Bt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),fn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:$e.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||$e.env.NETLIFY_API_TOKEN,SITE_ID:$e.env.SITE_ID,FUNCTIONS_DIST:$e.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as ut}from"@netlify/otel";import mn from"crypto";import J from"fs/promises";import L from"path";import Y from"process";var $=_("context"),gn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:Y.env.NETLIFY_TEAM_ID,userId:Y.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:Y.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},hn=10,yn=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(Y.cwd(),D,o),i=0;for(;await _n(n);){if(i>=hn)throw new Error("Failed to generate context file");o=`${t}-${mn.randomUUID().slice(0,5)}${r}`,n=L.join(Y.cwd(),D,o),i+=1}return o},_n=async e=>{try{return await J.access(e),!0}catch{return!1}},En=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},Tn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await J.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},Le=null,wn=async()=>{if(Le)return Le;let e=await En();if(!e)return[];let t=L.join(Y.cwd(),D,We);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return $.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=L.join(t,s),l=L.join(D,We,s);return $.log(`Downloading ${i.scope} context...`),await Tn(i.endpoint,u)?($.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return Le=(await Promise.all(r)).filter(n=>n!==null),Le},or=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=gn(t),i=await yn(Ct),s=L.join(Y.cwd(),D);await J.mkdir(s,{recursive:!0});let u=L.join(D,i),l=L.join(Y.cwd(),u),p=L.join(Y.cwd(),D,Q);try{await J.unlink(p),$.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
29
+ \`\`\``),O(a,!0));break}case"result":{T=d.stats?.duration_ms,d.status==="error"?x=d.error?.message:g=v.trim();break}case"error":{x=d.error;break}case"finished":break;default:{G.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:x,result:g}=ln({catchError:y,runCmd:b,error:x,result:g,runnerName:"Gemini"}))}),I.close(),C.flush(),{steps:m,duration:T,result:await le({initialResult:g,agentName:Zt,hasError:!!x}),error:ue({error:x,agentName:Zt}),isRetryableError:ce(x)}}var er=async()=>{let e=Fe.join(Qt.homedir(),".gemini");await Oe.rm(e,{recursive:!0,force:!0})};var dn={codex:{runner:at,clean:zt},claude:{runner:st,clean:Jt},gemini:{runner:lt,clean:er}},tr=dn;var rr=_("init_stage"),nr=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await N(pn(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=tr[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=fn({apiToken:r});wt(u);let l=e.useGateway?await St({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Lt(async({steps:E=[],duration:T})=>{let g=E.map(x=>{let R=x.title?et(K(x.title),p):void 0,S=x.message?et(K(x.message)):void 0;return{...x,title:R,message:S}});E.length=0;try{return await H(e.id,e.sessionId,{steps:g,duration:T})}catch(x){rr.error("persistSteps failed",{error:x?.message||x})}},t);rr.info("Adding build files to stage");let c=await nt();await tt(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Yt(),await Ee(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Bt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),fn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:$e.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||$e.env.NETLIFY_API_TOKEN,SITE_ID:$e.env.SITE_ID,FUNCTIONS_DIST:$e.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as ut}from"@netlify/otel";import mn from"crypto";import V from"fs/promises";import L from"path";import j from"process";var $=_("context"),gn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},hn=10,yn=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(j.cwd(),D,o),i=0;for(;await _n(n);){if(i>=hn)throw new Error("Failed to generate context file");o=`${t}-${mn.randomUUID().slice(0,5)}${r}`,n=L.join(j.cwd(),D,o),i+=1}return o},_n=async e=>{try{return await V.access(e),!0}catch{return!1}},En=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},wn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await V.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},Le=null,Tn=async()=>{if(Le)return Le;let e=await En();if(!e)return[];let t=L.join(j.cwd(),D,Ke);await V.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return $.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=L.join(t,s),l=L.join(D,Ke,s);return $.log(`Downloading ${i.scope} context...`),await wn(i.endpoint,u)?($.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return Le=(await Promise.all(r)).filter(n=>n!==null),Le},or=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=gn(t),i=await yn(Ct),s=L.join(j.cwd(),D);await V.mkdir(s,{recursive:!0});let u=L.join(D,i),l=L.join(j.cwd(),u),p=L.join(j.cwd(),D,Q);try{await V.unlink(p),$.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
30
30
  Your task is to analyze and fix the build errors.
31
31
  Don't apply techniques of reverting changes. Apply fixes related to errors.
32
32
  Don't try to run build by yourself. Just fix the errors.
@@ -39,26 +39,26 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
39
39
 
40
40
  `)}
41
41
  </project_rules>
42
- `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(Y.cwd(),D,qe);await J.mkdir(g,{recursive:!0});let I=await Promise.all(r.sessionHistoryContext.map(async(v,N)=>{let b=N+1,C=`attempt-${b}.md`,O=L.join(g,C),x=L.join(D,qe,C),R=`# Task History - Attempt ${b}
42
+ `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(j.cwd(),D,qe);await V.mkdir(g,{recursive:!0});let x=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let b=S+1,C=`attempt-${b}.md`,O=L.join(g,C),I=L.join(D,qe,C),v=`# Task History - Attempt ${b}
43
43
 
44
44
  ## Request - what the user asked for
45
- ${v.request}
45
+ ${R.request}
46
46
 
47
47
  ---
48
48
 
49
49
  ## Response - what the agent replied with after its work
50
50
 
51
- ${v.response}
52
- `;return await J.writeFile(O,R,"utf-8"),$.log(`Created history file: ${x}`),x}));m+=`
51
+ ${R.response}
52
+ `;return await V.writeFile(O,v,"utf-8"),$.log(`Created history file: ${I}`),I}));m+=`
53
53
  <session_history_context>
54
54
  History of prior work on this task.
55
55
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
56
56
 
57
- ${I.slice(-5).map(v=>`- ${v}`).join(`
57
+ ${x.slice(-5).map(R=>`- ${R}`).join(`
58
58
  `)}
59
59
 
60
60
  </session_history_context>
61
- `}let h=await wn(),T="";h.length>0&&(T=`
61
+ `}let h=await Tn(),E="";h.length>0&&(E=`
62
62
  <netlify_features_context>
63
63
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
64
64
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
@@ -68,7 +68,7 @@ ${v.response}
68
68
 
69
69
  Refer to these files when working with specific Netlify features.
70
70
  </netlify_features_context>
71
- `);let E=`
71
+ `);let T=`
72
72
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
73
73
 
74
74
  <request>
@@ -89,8 +89,8 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
89
89
  - If the user asks for "a plan", "just planning", or similar (without asking for implementation) you may use plan mode to explore the codebase in read-only mode, design your implementation approach and write the complete plan to ${s}/${Q}. Stop there, do not wait for approval and do not implement unless explicitly asked.
90
90
  </responses>
91
91
  <attachements>
92
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${Ke} folder
93
- - move assets from ${s}/${Ke} folder to the project assets folder if they are referenced in a code or applied changes
92
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${We} folder
93
+ - move assets from ${s}/${We} folder to the project assets folder if they are referenced in a code or applied changes
94
94
  </attachements>
95
95
  ${c}
96
96
  </requirements>
@@ -104,12 +104,12 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
104
104
  - Netlify Functions directory: ${n.functionsDir}
105
105
  </metadata>
106
106
  <environment>
107
- - Node Version: ${Y.version||"unknown"}
107
+ - Node Version: ${j.version||"unknown"}
108
108
  - Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
109
109
  - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
110
110
  - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
111
111
  </environment>
112
- ${T}
112
+ ${E}
113
113
  <docs>
114
114
  - Netlify Docs: https://docs.netlify.com
115
115
  - LLM Resources Index: https://docs.netlify.com/llms.txt
@@ -117,7 +117,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
117
117
  </extra_context>
118
118
 
119
119
  ${m}
120
- `;return await J.writeFile(l,E,"utf-8"),$.log(`Generated agent context document at: ${l}`),E.length>5e5&&(E=`
120
+ `;return await V.writeFile(l,T,"utf-8"),$.log(`Generated agent context document at: ${l}`),T.length>5e5&&(T=`
121
121
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
122
122
 
123
123
  <request>
@@ -128,17 +128,17 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
128
128
  </request>
129
129
 
130
130
  Use the following file for the complete context of the ask, the environment, and what's available. ${l} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
131
- `),E};var In=_("prompt"),ir=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await or({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&In.log("Contextful Prompt:",n),{prompt:n}};var De=_("inference_stage"),sr=5,ke=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;De.log(`Running inference stage, attempt ${l} of ${sr}`);let c=await A(ut(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),At();let{prompt:h}=await A(ut(),"compose-prompt",async()=>await ir({cliPath:t,config:r,buildErrorContext:xn(n),netlify:o})),T=`
131
+ `),T};var xn=_("prompt"),ir=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await or({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&xn.log("Contextful Prompt:",n),{prompt:n}};var De=_("inference_stage"),sr=5,ke=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;De.log(`Running inference stage, attempt ${l} of ${sr}`);let c=await N(ut(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),Nt();let{prompt:h}=await N(ut(),"compose-prompt",async()=>await ir({cliPath:t,config:r,buildErrorContext:In(n),netlify:o})),E=`
132
132
  ${p||""}
133
133
  ${h}
134
- `.trim(),E={...r,prompt:T},g=await A(ut(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:E,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=W(g.result)),g.error&&(g.error=W(g.error)),await s.flush(),g});if(c.error){if(De.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<sr))return De.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await ke({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw De.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},xn=e=>!e||e.length===0?"":`
134
+ `.trim(),T={...r,prompt:E},g=await N(ut(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:T,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=K(g.result)),g.error&&(g.error=K(g.error)),await s.flush(),g});if(c.error){if(De.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<sr))return De.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await ke({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw De.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},In=e=>!e||e.length===0?"":`
135
135
  Deploy failed failed. Here are the errors to review on the latest build:
136
136
 
137
137
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
138
138
 
139
139
  ${e.pop()}
140
- `;import Nn from"process";import{getTracer as ct}from"@netlify/otel";import{getTracer as Rn}from"@netlify/otel";var Ie=_("deploy"),ar=async e=>await A(Rn(),"create-preview-deploy",async t=>vn(e,t)),vn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},u)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(Ie.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(Ie.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";Ie.log(`Running: ${p} ${l.join(" ")}`),u?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(String(f.stdout??"").trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),Ie.log(`
141
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw Ie.error("Failed to create preview deploy via CLI:",l),u?.setAttributes({success:!1,error:l.message}),l}};var xe=_("deploy_stage"),dt=async e=>await A(ct(),"run-deploy-stage",async()=>An(e)),An=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await A(ct(),"get-runner-diffs",async()=>await Gt({config:t,isRetry:i}));if(xe.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,c=!0;xe.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let m=null;if(o!==void 0&&c)try{let h;try{let T=await A(ct(),"get-runner-session",async()=>await It(t.id,t.sessionId));T?.title&&(h=T.title)}catch(T){xe.warn("Failed to fetch session title, using fallback message:",T.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await ar({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:Dt(t.id,Nn.env.SITE_NAME),filter:n})}catch(h){return xe.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:u,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:h instanceof Error?h.message:String(h)}}return xe.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:l,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as Me}from"@netlify/otel";async function lr(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var Ue=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
140
+ `;import Sn from"process";import{getTracer as ct}from"@netlify/otel";import{getTracer as vn}from"@netlify/otel";var xe=_("deploy"),ar=async e=>await N(vn(),"create-preview-deploy",async t=>Rn(e,t)),Rn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},u)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(xe.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(xe.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";xe.log(`Running: ${p} ${l.join(" ")}`),u?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(String(f.stdout??"").trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),xe.log(`
141
+ Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw xe.error("Failed to create preview deploy via CLI:",l),u?.setAttributes({success:!1,error:l.message}),l}};var Ie=_("deploy_stage"),dt=async e=>await N(ct(),"run-deploy-stage",async()=>Nn(e)),Nn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await N(ct(),"get-runner-diffs",async()=>await Gt({config:t,isRetry:i}));if(Ie.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,c=!0;Ie.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let m=null;if(o!==void 0&&c)try{let h;try{let E=await N(ct(),"get-runner-session",async()=>await xt(t.id,t.sessionId));E?.title&&(h=E.title)}catch(E){Ie.warn("Failed to fetch session title, using fallback message:",E.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await ar({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:Dt(t.id,Sn.env.SITE_NAME),filter:n})}catch(h){return Ie.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:u,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:h instanceof Error?h.message:String(h)}}return Ie.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:l,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as Me}from"@netlify/otel";async function lr(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var Ue=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
142
142
  `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let l=this.containsNetlifyForm(n,o);l&&r.push(l)}let u=s.split(" ");o=u[u.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
143
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var S=_("cleanup_stage"),ur=async e=>await A(Me(),"cleanup-stage",async()=>Sn(e)),pt=1024*1024*10,Sn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new Ue,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(S.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:T})=>{S.log(` - ${h}: ${T}`)}),l.has_netlify_form=!0):S.log("Did not detect Netlify form(s) in diff"),S.log("Did not detect Netlify form(s) in diff")}if(p)try{S.log("Getting pre-signed URLs for diff upload");let f=await Rt(e.id,e.sessionId),c=[];(t||i)&&c.push(Be(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,S.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(Be(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,S.log("Successfully uploaded cumulative_diff to S3")})),S.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(S.log("Updating agent runner with cumulative diff S3 key"),await A(Me(),"update-runner",async()=>{await Ee(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){S.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>pt||m>pt){let h=`Diffs exceed maximum inline size of ${pt} bytes.`;throw S.error(h),new Error(h)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,S.log("Updating agent runner with inline diffs (fallback)"),await A(Me(),"update-runner",async()=>{await Ee(e.id,{result_diff:n,result_diff_binary:s})}))}else S.log("No diffs to upload");return S.log("Updated agent runner with result"),await lr(async()=>await A(Me(),"update-runner-session",()=>H(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{S.error(`Error updating agent runner session (attempt ${f}):`,c),S.log("Retrying...")}}),S.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as cr,shutdownTracers as Cn,withActiveSpan as dr}from"@netlify/otel";var Pn=bn(import.meta.url),pr=Pn("../package.json"),Re=_("pipeline_index"),Ge=3,fr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=St(Z.timeUnits.hours(4)),l=await yt(pr.version,e.id,i);try{await dr(cr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:h}=await u("init",()=>nr({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:pr.version}),Z.timeUnits.minutes(10));s=m.clean,e.sha=h;let{runnerResult:T}=await u("inference",()=>ke({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let E=await u("deploy",()=>dt({cliPath:r,config:e,context:f,result:T.result,filter:n,isRetry:!1})),g=T,I=[];if(E.hasChanges&&E.deployError){I.push(_t(E.deployError));let x=1,R=!1;for(;x<=Ge&&!E.previewInfo&&!R;)Re.log(`Deploy attempt had errors. Retrying. ${x}/${Ge}`),await dr(cr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":x});let y;try{y=(await u(`inference-retry-${x}`,()=>ke({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:I,priorAgentSessionId:T.agentSessionId}))).runnerResult}catch(d){Re.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),R=!0;return}g={...y,steps:[...g.steps||[],...y.steps||[]],duration:(g.duration||0)+(y.duration||0)},E=await u(`deploy-retry-${x}`,()=>dt({cliPath:r,config:e,context:f,result:y.result,filter:n,isRetry:!0})),E.deployError&&I.push(E.deployError),x++});x>Ge&&!E.previewInfo&&console.warn(`Deploy validation failed after ${Ge} attempts`)}let{diff:v,resultDiff:N,previewInfo:b,diffBinary:C,resultDiffBinary:O}=E;await u("cleanup",()=>ur({config:e,diff:v,result:g.result,duration:g.duration,resultDiff:N,diffBinary:C,resultDiffBinary:O,previewInfo:b}),Z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Ht())})}catch(p){if(Et(p)){Re.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await H(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Re.info("Could not update session (site may have been deleted)")}return}Re.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await H(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Cn()}};import M from"process";var Fn="claude",$n=e=>(e??[]).filter(t=>t.request&&t.response),Ln=e=>(e??[]).filter(t=>t.site_context),mr=_("config"),gr=()=>{let e=M.env.NETLIFY_AGENT_RUNNER_ID,t=M.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=M.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=M.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=M.env.NETLIFY_AGENT_RUNNER_AGENT||Fn,i=M.env.NETLIFY_AGENT_RUNNER_MODEL,s=M.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",u=Ce(M.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,mr),l=$n(u),p=Ln(u),f=M.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",c=!M.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,m=M.env.NETLIFY_AGENT_RUNNER_SHA,h=Dn(),T=kt(),E={id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:i,sessionHistoryContext:l,siteContext:p,hasRepo:f,useGateway:c,sha:m,accountType:h,validateAgentWithBuild:s,modelVersionOverrides:T};return mr.log({fullConfig:E}),E},Dn=()=>{let e=M.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?Je:e.includes("pro")?"pro":e.startsWith("enterprise")?Xe:e.startsWith("free")?ze:Ve:Ve};var hr=_("bin_cmd"),he=kn(ft.argv.slice(2),{string:["auth","cwd","cli-path","filter","trace-exporter-url","traceparent"]});try{let e=gr();await fr({config:e,apiToken:he.auth,cwd:he.cwd,cliPath:he["cli-path"],filter:he.filter,tracing:{exporterUrl:he["trace-exporter-url"],traceparent:he.traceparent}}),hr.info("Finished agent"),ft.exit(0)}catch(e){hr.error("Error running agent pipeline:",e),ft.exit(1)}
143
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var A=_("cleanup_stage"),ur=async e=>await N(Me(),"cleanup-stage",async()=>An(e)),pt=1024*1024*10,An=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new Ue,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:E})=>{A.log(` - ${h}: ${E}`)}),l.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await vt(e.id,e.sessionId),c=[];(t||i)&&c.push(Be(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(Be(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await N(Me(),"update-runner",async()=>{await Ee(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>pt||m>pt){let h=`Diffs exceed maximum inline size of ${pt} bytes.`;throw A.error(h),new Error(h)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await N(Me(),"update-runner",async()=>{await Ee(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await lr(async()=>await N(Me(),"update-runner-session",()=>H(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{A.error(`Error updating agent runner session (attempt ${f}):`,c),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as cr,shutdownTracers as Cn,withActiveSpan as dr}from"@netlify/otel";var Pn=bn(import.meta.url),pr=Pn("../package.json"),ve=_("pipeline_index"),Ge=3,fr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=At(Z.timeUnits.hours(4)),l=await yt(pr.version,e.id,i);try{await dr(cr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:h}=await u("init",()=>nr({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:pr.version}),Z.timeUnits.minutes(10));s=m.clean,e.sha=h;let{runnerResult:E}=await u("inference",()=>ke({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let T=await u("deploy",()=>dt({cliPath:r,config:e,context:f,result:E.result,filter:n,isRetry:!1})),g=E,x=[];if(T.hasChanges&&T.deployError){x.push(_t(T.deployError));let I=1,v=!1;for(;I<=Ge&&!T.previewInfo&&!v;)ve.log(`Deploy attempt had errors. Retrying. ${I}/${Ge}`),await dr(cr(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":I});let y;try{y=(await u(`inference-retry-${I}`,()=>ke({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:x,priorAgentSessionId:E.agentSessionId}))).runnerResult}catch(d){ve.warn(`Inference retry ${I} failed, stopping deploy retries:`,d),v=!0;return}g={...y,steps:[...g.steps||[],...y.steps||[]],duration:(g.duration||0)+(y.duration||0)},T=await u(`deploy-retry-${I}`,()=>dt({cliPath:r,config:e,context:f,result:y.result,filter:n,isRetry:!0})),T.deployError&&x.push(T.deployError),I++});I>Ge&&!T.previewInfo&&console.warn(`Deploy validation failed after ${Ge} attempts`)}let{diff:R,resultDiff:S,previewInfo:b,diffBinary:C,resultDiffBinary:O}=T;await u("cleanup",()=>ur({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:C,resultDiffBinary:O,previewInfo:b}),Z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Ht())})}catch(p){if(Et(p)){ve.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await H(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{ve.info("Could not update session (site may have been deleted)")}return}ve.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await H(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Cn()}};import Y from"process";var Fn="claude",$n=e=>(e??[]).filter(t=>t.request&&t.response),Ln=e=>(e??[]).filter(t=>t.site_context),mr=_("config"),gr=()=>{let e=Y.env.NETLIFY_AGENT_RUNNER_ID,t=Y.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=Y.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=Y.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=Y.env.NETLIFY_AGENT_RUNNER_AGENT||Fn,i=Y.env.NETLIFY_AGENT_RUNNER_MODEL,s=Ce(Y.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,mr),u=$n(s),l=Ln(s),p=Y.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",f=!Y.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,c=Y.env.NETLIFY_AGENT_RUNNER_SHA,m=Dn(),h=kt(),E={id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:i,sessionHistoryContext:u,siteContext:l,hasRepo:p,useGateway:f,sha:c,accountType:m,modelVersionOverrides:h};return mr.log({fullConfig:E}),E},Dn=()=>{let e=Y.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?Ve:e.includes("pro")?"pro":e.startsWith("enterprise")?Xe:e.startsWith("free")?ze:Je:Je};var hr=_("bin_cmd"),he=kn(ft.argv.slice(2),{string:["auth","cwd","cli-path","filter","trace-exporter-url","traceparent"]});try{let e=gr();await fr({config:e,apiToken:he.auth,cwd:he.cwd,cliPath:he["cli-path"],filter:he.filter,tracing:{exporterUrl:he["trace-exporter-url"],traceparent:he.traceparent}}),hr.info("Finished agent"),ft.exit(0)}catch(e){hr.error("Error running agent pipeline:",e),ft.exit(1)}
144
144
  //# sourceMappingURL=bin.js.map
package/dist/index.d.ts CHANGED
@@ -265,7 +265,6 @@ interface RunnerConfig {
265
265
  useGateway: boolean;
266
266
  hasRepo: boolean;
267
267
  sha?: string;
268
- validateAgentWithBuild?: boolean;
269
268
  }
270
269
 
271
270
  interface Context {
package/dist/index.js CHANGED
@@ -4,18 +4,18 @@ import{createRequire as gn}from"module";import{createTracerProvider as or}from"@
4
4
  ${s}
5
5
  </extracted_error_chunk>`).join(`
6
6
 
7
- `);return i.length>e.length*.8?e:i}import Pe from"process";import{getTracer as zr}from"@netlify/otel";import ge from"process";var te=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},dt=e=>e instanceof te;var ve=ge.env.NETLIFY_API_URL,Re=ge.env.NETLIFY_API_TOKEN,Y=_("api"),Se=()=>ge.env.NETLIFY_LOCAL_MODE==="true",he=async(e,t={})=>{if(!ve||!Re)throw new Error("No API URL or token");let r=new URL(e,ve),o={...t,headers:{...t.headers,Authorization:`Bearer ${Re}`}};ge.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ge.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{Y.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new te(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new te(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},pt=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ve=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Re=e.constants.NETLIFY_API_TOKEN)},ft=()=>({apiUrl:ve,token:Re}),ye=async(e,t)=>Se()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):he(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>Se()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):he(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var mt=async(e,t)=>Se()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):he(`/api/v1/agent_runners/${e}/sessions/${t}`),gt=(e,t,r)=>he(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),ht=async(e,t)=>Se()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):he(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),je=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var re=_("ai_gateway"),Ge=null;var yt=async()=>{if(Ge)return Ge;re.log("Fetching available AI gateway providers");let e=await fetch(`${ft().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ge=t,re.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},cr=async(e,t)=>{let o=(await yt()).providers[e];if(!o)return re.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return re.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},_t=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),re.log("Requesting AI gateway information");let l=await gt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,re.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),yt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:cr}};import H from"process";import K from"path";import be from"fs";import{fileURLToPath as yr}from"url";import{createRequire as _r}from"module";import{execa as Er,execaCommand as Vn}from"execa";import{Transform as dr}from"stream";var pr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),fr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function mr(){return Object.entries(process.env).filter(([e,t])=>!(!t||pr.has(e)||fr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=mr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(gr(o),"g");r=r.replace(n,"******")}),r}function gr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ne=class extends dr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=q(n);o(null,i)}};function Et(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var _e=null,wt=e=>(_e&&_e.destroy(),_e=new z({totalAllowedTime:e}),_e),Tt=()=>_e;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var xt="netlify-agent-runner-context.md",Ye="task-history",Be="netlify-context",D=".netlify",Z="results.md",He="assets";var Q=1800*1e3;var It={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.3",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.14","@google/gemini-cli":"0.25.0","@netlify/otel":"^5.1.1","@openai/codex":"0.88.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var wr=yr(import.meta.url),Tr=K.dirname(wr),xr=_r(import.meta.url),oe=_("shell"),qe=new Set,Ir={preferLocal:!0},$=(e,t,r)=>{let[o,n]=vr(t,r),i={...Ir,...n},s=Er(e,o,i);Rr(s,i),br(s);let u=r?.idleTimeout;return u&&u>0&&Sr(s,u),s};var vr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Rr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ne).pipe(H.stdout),e.stdout?.pipe(new ne).pipe(H.stdout),e.stderr?.pipe(new ne).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Ke=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),oe.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return oe.error("Error killing process:",r),!1}},vt=e=>Ke(e,"SIGKILL"),Sr=(e,t)=>{let r=null,o=()=>{oe.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Ke(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing idle process ${e.pid}`),vt(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},br=e=>{qe.add(e);let t=Tt();if(t){let r=t.onTimesUp(()=>{oe.log(`Global timer expired, killing process ${e.pid}`),Ke(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing process ${e.pid} after timeout`),vt(e))},5e3)});e.on("exit",()=>{qe.delete(e),r()}),e.on("error",()=>{qe.delete(e),r()})}};function ie(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let n=xr.resolve(It.name),i=K.dirname(n);for(;i!==K.dirname(i);){let s=K.dirname(i);if(K.basename(s)==="node_modules"){let u=K.join(s,".bin",t);if(be.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(H.env.NODE_PATH){let n=K.join(H.env.NODE_PATH,".bin",t);if(be.existsSync(n))return n}let r=K.join(e,"node_modules",".bin",t);if(be.existsSync(r))return r;let o=K.join(Tr,"..","node_modules",".bin",t);if(be.existsSync(o))return o}var Ar=_("utils"),Nr=e=>new Promise(t=>{setTimeout(t,e)}),Rt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Nr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},se=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},St=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Ar.error("Could not parse JSON",o))}},bt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var Cr=50*1024,We=(e,t=Cr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as At}from"buffer";import Pr from"path";var Nt=_("repo"),Ct=async({config:e,isRetry:t})=>{Nt.info("Getting runner diffs");let r=await $r(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let x=Fr(n);await kr(x)}Nt.info("Changes after processing"),await Ve();let i=await Xe(n);await Je(i);let s={stdio:["ignore","pipe","pipe"]},u=await $("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await $("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await $("git",["commit","-m","Agent runner"]);let x=await $("git",["diff",e.sha,"HEAD"],s);c=String(x.stdout??"");let w=await $("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");c!==g&&(m=At.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(y.diffBinary=At.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Je=async(e=[])=>{await $("git",["add",".",...e])},Ve=async()=>{let e=await $("git",["status","-s"]);return String(e.stdout??"")},Pt=/.. (.+)?\.log$/,Or=[Pt],$r=async()=>{let e=await Ve();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
- `).filter(o=>Or.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Ot=async()=>{let{stdout:e}=await $("git",["rev-parse","HEAD"]);return String(e??"").trim()},$t=async()=>{let{stdout:e}=await $("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},Xe=async e=>{e||=await Ve();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
9
- `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Pr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${i}`)});let n=o.match(Pt)?.[1];n&&r.push(`:!${n}.log`)}),r},Ft=async()=>{await $("git",["reset","--hard","HEAD"])},Fr=e=>{let t=e.split(`
10
- `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},kr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push($("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Lr from"fs/promises";import Ur from"os";import Lt from"path";import ce from"process";import Mr from"readline";import ze from"path";import Dr from"fs/promises";var Ze=_("agent-output-utils");async function ae({initialResult:e,agentName:t,hasError:r}){let o="",n=ze.join(process.cwd(),D,Z);try{let i=await Dr.readFile(n,"utf-8");i&&(o=i,Ze.log(`Pulled result from ${ze.relative(process.cwd(),n)}`))}catch{Ze.log(`No results file found at ${ze.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function le({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&Ze.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ue(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var L=_("runner_claude"),kt="Claude Code",de="claude-opus-4-5-20251101",Dt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,jr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(L.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Qe({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=ce.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!de&&await o.isModelAvailableForProvider("anthropic",de)?(c=de,L.log(`Using default model: ${de}`)):de&&L.log(`Default model ${de} is not available, proceeding without model specification`);ce.env.ANTHROPIC_API_KEY=h,ce.env.ANTHROPIC_BASE_URL=d}else if(!ce.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],x=[],w={},g=0,T=0,R,S,N=[ie(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],C=`${ce.env.NVM_BIN}/node`;L.log(`Running ${C} ${N.join(" ")}`);let O=t.utils.run(C,N,{all:!0,env:ce.env,cwd:s,idleTimeout:Q});O.stdin?.end();let I=se(()=>{r?.({steps:y,duration:T})},250),v=(h,d)=>{let a={...h,id:g};g+=1,x.push(a),y.push(a),d||I.flush(),I(),d&&I.flush()},M=Mr.createInterface({input:O.all});return M.on("error",h=>{L.error("Readline interface error",{error:h.message,stack:h.stack})}),M.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{L.log("Could not parse line",h)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):L.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[Dt(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);I.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,V;if(E){let W=E.input?.file_path&&Lt.relative(s,E.input.file_path),P=W&&`\`${W}\``;V=[Dt(E.name||""),P].filter(Boolean).join(" ")}let Ie=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),X;if(typeof a.content=="string")X=a.content;else if(Array.isArray(a.content)){let W=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?W.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?W.push(`![](data:${P.source.media_type};base64,${P.source.data})`):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),X=W.join(`
7
+ `);return i.length>e.length*.8?e:i}import Pe from"process";import{getTracer as zr}from"@netlify/otel";import ge from"process";var te=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},dt=e=>e instanceof te;var ve=ge.env.NETLIFY_API_URL,Re=ge.env.NETLIFY_API_TOKEN,Y=_("api"),Se=()=>ge.env.NETLIFY_LOCAL_MODE==="true",he=async(e,t={})=>{if(!ve||!Re)throw new Error("No API URL or token");let r=new URL(e,ve),o={...t,headers:{...t.headers,Authorization:`Bearer ${Re}`}};ge.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ge.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{Y.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new te(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new te(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},pt=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ve=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Re=e.constants.NETLIFY_API_TOKEN)},ft=()=>({apiUrl:ve,token:Re}),ye=async(e,t)=>Se()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):he(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>Se()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):he(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var mt=async(e,t)=>Se()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):he(`/api/v1/agent_runners/${e}/sessions/${t}`),gt=(e,t,r)=>he(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),ht=async(e,t)=>Se()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):he(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),je=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var re=_("ai_gateway"),Ge=null;var yt=async()=>{if(Ge)return Ge;re.log("Fetching available AI gateway providers");let e=await fetch(`${ft().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ge=t,re.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},cr=async(e,t)=>{let o=(await yt()).providers[e];if(!o)return re.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return re.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},_t=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),re.log("Requesting AI gateway information");let l=await gt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,re.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),yt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:cr}};import H from"process";import K from"path";import be from"fs";import{fileURLToPath as yr}from"url";import{createRequire as _r}from"module";import{execa as Er,execaCommand as Vn}from"execa";import{Transform as dr}from"stream";var pr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),fr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function mr(){return Object.entries(process.env).filter(([e,t])=>!(!t||pr.has(e)||fr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=mr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(gr(o),"g");r=r.replace(n,"******")}),r}function gr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ne=class extends dr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=q(n);o(null,i)}};function Et(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var _e=null,wt=e=>(_e&&_e.destroy(),_e=new z({totalAllowedTime:e}),_e),Tt=()=>_e;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var xt="netlify-agent-runner-context.md",Ye="task-history",Be="netlify-context",D=".netlify",Z="results.md",He="assets";var Q=1800*1e3;var It={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.4",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.14","@google/gemini-cli":"0.25.0","@netlify/otel":"^5.1.1","@openai/codex":"0.88.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var wr=yr(import.meta.url),Tr=K.dirname(wr),xr=_r(import.meta.url),oe=_("shell"),qe=new Set,Ir={preferLocal:!0},O=(e,t,r)=>{let[o,n]=vr(t,r),i={...Ir,...n},s=Er(e,o,i);Rr(s,i),br(s);let u=r?.idleTimeout;return u&&u>0&&Sr(s,u),s};var vr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Rr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ne).pipe(H.stdout),e.stdout?.pipe(new ne).pipe(H.stdout),e.stderr?.pipe(new ne).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Ke=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),oe.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return oe.error("Error killing process:",r),!1}},vt=e=>Ke(e,"SIGKILL"),Sr=(e,t)=>{let r=null,o=()=>{oe.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Ke(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing idle process ${e.pid}`),vt(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},br=e=>{qe.add(e);let t=Tt();if(t){let r=t.onTimesUp(()=>{oe.log(`Global timer expired, killing process ${e.pid}`),Ke(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing process ${e.pid} after timeout`),vt(e))},5e3)});e.on("exit",()=>{qe.delete(e),r()}),e.on("error",()=>{qe.delete(e),r()})}};function ie(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let n=xr.resolve(It.name),i=K.dirname(n);for(;i!==K.dirname(i);){let s=K.dirname(i);if(K.basename(s)==="node_modules"){let u=K.join(s,".bin",t);if(be.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(H.env.NODE_PATH){let n=K.join(H.env.NODE_PATH,".bin",t);if(be.existsSync(n))return n}let r=K.join(e,"node_modules",".bin",t);if(be.existsSync(r))return r;let o=K.join(Tr,"..","node_modules",".bin",t);if(be.existsSync(o))return o}var Ar=_("utils"),Nr=e=>new Promise(t=>{setTimeout(t,e)}),Rt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Nr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},se=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},St=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Ar.error("Could not parse JSON",o))}},bt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var Cr=50*1024,We=(e,t=Cr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as At}from"buffer";import Pr from"path";var Nt=_("repo"),Ct=async({config:e,isRetry:t})=>{Nt.info("Getting runner diffs");let r=await Or(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let x=Fr(n);await kr(x)}Nt.info("Changes after processing"),await Ve();let i=await Xe(n);await Je(i);let s={stdio:["ignore","pipe","pipe"]},u=await O("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await O("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await O("git",["commit","-m","Agent runner"]);let x=await O("git",["diff",e.sha,"HEAD"],s);c=String(x.stdout??"");let w=await O("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");c!==g&&(m=At.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(y.diffBinary=At.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Je=async(e=[])=>{await O("git",["add",".",...e])},Ve=async()=>{let e=await O("git",["status","-s"]);return String(e.stdout??"")},Pt=/.. (.+)?\.log$/,$r=[Pt],Or=async()=>{let e=await Ve();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
+ `).filter(o=>$r.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},$t=async()=>{let{stdout:e}=await O("git",["rev-parse","HEAD"]);return String(e??"").trim()},Ot=async()=>{let{stdout:e}=await O("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},Xe=async e=>{e||=await Ve();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
9
+ `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Pr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${i}`)});let n=o.match(Pt)?.[1];n&&r.push(`:!${n}.log`)}),r},Ft=async()=>{await O("git",["reset","--hard","HEAD"])},Fr=e=>{let t=e.split(`
10
+ `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},kr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(O("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Lr from"fs/promises";import Ur from"os";import Lt from"path";import ce from"process";import Mr from"readline";import ze from"path";import Dr from"fs/promises";var Ze=_("agent-output-utils");async function ae({initialResult:e,agentName:t,hasError:r}){let o="",n=ze.join(process.cwd(),D,Z);try{let i=await Dr.readFile(n,"utf-8");i&&(o=i,Ze.log(`Pulled result from ${ze.relative(process.cwd(),n)}`))}catch{Ze.log(`No results file found at ${ze.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function le({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&Ze.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ue(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var L=_("runner_claude"),kt="Claude Code",de="claude-opus-4-5-20251101",Dt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,jr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(L.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Qe({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=ce.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!de&&await o.isModelAvailableForProvider("anthropic",de)?(c=de,L.log(`Using default model: ${de}`)):de&&L.log(`Default model ${de} is not available, proceeding without model specification`);ce.env.ANTHROPIC_API_KEY=h,ce.env.ANTHROPIC_BASE_URL=d}else if(!ce.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],x=[],w={},g=0,T=0,R,S,N=[ie(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],C=`${ce.env.NVM_BIN}/node`;L.log(`Running ${C} ${N.join(" ")}`);let $=t.utils.run(C,N,{all:!0,env:ce.env,cwd:s,idleTimeout:Q});$.stdin?.end();let I=se(()=>{r?.({steps:y,duration:T})},250),v=(h,d)=>{let a={...h,id:g};g+=1,x.push(a),y.push(a),d||I.flush(),I(),d&&I.flush()},M=Mr.createInterface({input:$.all});return M.on("error",h=>{L.error("Readline interface error",{error:h.message,stack:h.stack})}),M.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{L.log("Could not parse line",h)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):L.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[Dt(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);I.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,V;if(E){let W=E.input?.file_path&&Lt.relative(s,E.input.file_path),P=W&&`\`${W}\``;V=[Dt(E.name||""),P].filter(Boolean).join(" ")}let Ie=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),X;if(typeof a.content=="string")X=a.content;else if(Array.isArray(a.content)){let W=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?W.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?W.push(`![](data:${P.source.media_type};base64,${P.source.data})`):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),X=W.join(`
11
11
 
12
12
  `)}Ie&&X&&(X=`\`\`\`
13
13
  ${X.trim()}
14
- \`\`\``),v({title:V,message:X},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:L.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[x,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await O.catch(h=>{({error:S,result:R}=jr({catchError:h,runCmd:O,error:S,result:R,runnerName:"Claude"}))}),M.close(),I.flush(),{steps:x,duration:T,result:await ae({initialResult:R,agentName:kt,hasError:!!S}),error:le({error:S,agentName:kt}),isRetryableError:ue(S),agentSessionId:m}}var Ut=async()=>{let e=Lt.join(Ur.homedir(),".claude");await Lr.rm(e,{recursive:!0,force:!0})};import Ee from"fs/promises";import jt from"os";import Ae from"path";import ee from"process";import Gr from"readline";var U=_("runner_codex"),Mt="Codex CLI",pe="",Yr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function et({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=ee.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);p=E}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!pe&&await n.isModelAvailableForProvider("openai",pe)?(p=pe,U.log(`Using default model: ${pe}`)):pe&&U.log(`Default model ${pe} is not available, proceeding without model specification`);ee.env.OPENAI_API_KEY=d,ee.env.OPENAI_BASE_URL=a}else if(!ee.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],y={},x=0,w=0,g,T,R=`${ee.env.NVM_BIN}/node`,S=Ae.join(jt.homedir(),".codex"),N=Ae.join(S,"config.toml"),C=Ae.join(S,"auth.json");try{await Ee.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:ee.env.OPENAI_API_KEY};await Ee.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let a="";try{a=await Ee.readFile(N,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
14
+ \`\`\``),v({title:V,message:X},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:L.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[x,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await $.catch(h=>{({error:S,result:R}=jr({catchError:h,runCmd:$,error:S,result:R,runnerName:"Claude"}))}),M.close(),I.flush(),{steps:x,duration:T,result:await ae({initialResult:R,agentName:kt,hasError:!!S}),error:le({error:S,agentName:kt}),isRetryableError:ue(S),agentSessionId:m}}var Ut=async()=>{let e=Lt.join(Ur.homedir(),".claude");await Lr.rm(e,{recursive:!0,force:!0})};import Ee from"fs/promises";import jt from"os";import Ae from"path";import ee from"process";import Gr from"readline";var U=_("runner_codex"),Mt="Codex CLI",pe="",Yr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function et({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=ee.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);p=E}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!pe&&await n.isModelAvailableForProvider("openai",pe)?(p=pe,U.log(`Using default model: ${pe}`)):pe&&U.log(`Default model ${pe} is not available, proceeding without model specification`);ee.env.OPENAI_API_KEY=d,ee.env.OPENAI_BASE_URL=a}else if(!ee.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],y={},x=0,w=0,g,T,R=`${ee.env.NVM_BIN}/node`,S=Ae.join(jt.homedir(),".codex"),N=Ae.join(S,"config.toml"),C=Ae.join(S,"auth.json");try{await Ee.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:ee.env.OPENAI_API_KEY};await Ee.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let a="";try{a=await Ee.readFile(N,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
15
15
  web_search_request = true`):a+=`
16
16
  [features]
17
17
  web_search_request = true
18
- `,await Ee.writeFile(N,a,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[ie(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);U.log(`Running ${R} ${O.join(" ")}`);let I=t.utils.run(R,O,{all:!0,cwd:i,env:{...ee.env},idleTimeout:Q}),v=se(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),M=(d,a)=>{d.id=x,x+=1,m.push(d),f.push(d),c.push(d),a||v.flush(),v(),a&&v.flush()},h=Gr.createInterface({input:I.all});return h.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let E=Hr(a.item);E&&M(E,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let E={title:"Reasoning",message:a.item.text};M(E,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let E=qr(y[a.call_id],a);E&&M(E,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(E=>E.text).join(`
18
+ `,await Ee.writeFile(N,a,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let $=[ie(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);U.log(`Running ${R} ${$.join(" ")}`);let I=t.utils.run(R,$,{all:!0,cwd:i,env:{...ee.env},idleTimeout:Q}),v=se(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),M=(d,a)=>{d.id=x,x+=1,m.push(d),f.push(d),c.push(d),a||v.flush(),v(),a&&v.flush()},h=Gr.createInterface({input:I.all});return h.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let E=Hr(a.item);E&&M(E,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let E={title:"Reasoning",message:a.item.text};M(E,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let E=qr(y[a.call_id],a);E&&M(E,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(E=>E.text).join(`
19
19
  `):a?.type==="message"&&a.role==="system"&&(T=a.content.map(E=>E.text).join(`
20
20
  `))}),await I.catch(d=>{let a=Yr({catchError:d,runCmd:I,error:T,result:g,runnerName:"Codex"});T=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:w,result:await ae({initialResult:g,agentName:Mt,hasError:!!T}),error:le({error:T,agentName:Mt}),isRetryableError:ue(T)}}var Gt=async()=>{let e=Ae.join(jt.homedir(),".codex");await Ee.rm(e,{recursive:!0,force:!0})},Br=new Set(["bash","-lc"]),Hr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
21
21
  ${o}
@@ -23,9 +23,9 @@ ${o}
23
23
 
24
24
  *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},qr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Br.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
25
25
  ${n.trim()}
26
- \`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Ne from"fs/promises";import Bt from"os";import Ce from"path";import fe from"process";import Kr from"readline";var j=_("runner_gemini"),Yt="Gemini CLI",me="",Wr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Jr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Vr=async()=>{let e=Ce.join(Bt.homedir(),".gemini"),t=Ce.join(e,"settings.json");try{await Ne.mkdir(e,{recursive:!0});let r={};try{let o=await Ne.readFile(t,"utf-8");r=JSON.parse(o)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ne.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function tt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=fe.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await Vr(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!me&&await n.isModelAvailableForProvider("gemini",me)?(p=me,j.log(`Using default model: ${me}`)):me&&j.log(`Default model ${me} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);fe.env.GEMINI_API_KEY=h,fe.env.GOOGLE_GEMINI_BASE_URL=d}else if(!fe.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],y={},x=0,w=0,g,T,R=[ie(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],S=`${fe.env.NVM_BIN}/node`;j.log(`Running ${S} ${R.join(" ")}`);let N=t.utils.run(S,R,{all:!0,env:fe.env,cwd:i,idleTimeout:Q});N.stdin?.end();let C=se(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),O=(h,d)=>{h.id=x,x+=1,m.push(h),f.push(h),c.push(h),d||C.flush(),C(),d&&C.flush()},I=Kr.createInterface({input:N.all});I.on("error",h=>{j.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",M=()=>{v&&O({message:v.trim()}),v=""};return I.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:St(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||M(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=Jr[d.tool_name]??d.tool_name,E=d.parameters?.file_path,V=E&&Ce.relative(i,E),Ie=d.parameters?.command,W={title:[a,V&&`\`${V}\``,Ie&&`\`${Ie}\``].filter(Boolean).join(" ")};y[d.tool_id]=W,C.flush();break}case"tool_result":{let a=y[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
26
+ \`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Ne from"fs/promises";import Bt from"os";import Ce from"path";import fe from"process";import Kr from"readline";var j=_("runner_gemini"),Yt="Gemini CLI",me="",Wr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Jr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Vr=async()=>{let e=Ce.join(Bt.homedir(),".gemini"),t=Ce.join(e,"settings.json");try{await Ne.mkdir(e,{recursive:!0});let r={};try{let o=await Ne.readFile(t,"utf-8");r=JSON.parse(o)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ne.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function tt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=fe.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await Vr(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!me&&await n.isModelAvailableForProvider("gemini",me)?(p=me,j.log(`Using default model: ${me}`)):me&&j.log(`Default model ${me} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);fe.env.GEMINI_API_KEY=h,fe.env.GOOGLE_GEMINI_BASE_URL=d}else if(!fe.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],y={},x=0,w=0,g,T,R=[ie(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],S=`${fe.env.NVM_BIN}/node`;j.log(`Running ${S} ${R.join(" ")}`);let N=t.utils.run(S,R,{all:!0,env:fe.env,cwd:i,idleTimeout:Q});N.stdin?.end();let C=se(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),$=(h,d)=>{h.id=x,x+=1,m.push(h),f.push(h),c.push(h),d||C.flush(),C(),d&&C.flush()},I=Kr.createInterface({input:N.all});I.on("error",h=>{j.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",M=()=>{v&&$({message:v.trim()}),v=""};return I.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:St(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||M(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=Jr[d.tool_name]??d.tool_name,E=d.parameters?.file_path,V=E&&Ce.relative(i,E),Ie=d.parameters?.command,W={title:[a,V&&`\`${V}\``,Ie&&`\`${Ie}\``].filter(Boolean).join(" ")};y[d.tool_id]=W,C.flush();break}case"tool_result":{let a=y[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
27
27
  ${d.output.trim()}
28
- \`\`\``),O(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await N.catch(h=>{({error:T,result:g}=Wr({catchError:h,runCmd:N,error:T,result:g,runnerName:"Gemini"}))}),I.close(),C.flush(),{steps:m,duration:w,result:await ae({initialResult:g,agentName:Yt,hasError:!!T}),error:le({error:T,agentName:Yt}),isRetryableError:ue(T)}}var Ht=async()=>{let e=Ce.join(Bt.homedir(),".gemini");await Ne.rm(e,{recursive:!0,force:!0})};var Xr={codex:{runner:et,clean:Gt},claude:{runner:Qe,clean:Ut},gemini:{runner:tt,clean:Ht}},qt=Xr;var Kt=_("init_stage"),Wt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(zr(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=qt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=Zr({apiToken:r});pt(u);let l=e.useGateway?await _t({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Rt(async({steps:x=[],duration:w})=>{let g=x.map(T=>{let R=T.title?We(q(T.title),p):void 0,S=T.message?We(q(T.message)):void 0;return{...T,title:R,message:S}});x.length=0;try{return await B(e.id,e.sessionId,{steps:g,duration:w})}catch(T){Kt.error("persistSteps failed",{error:T?.message||T})}},t);Kt.info("Adding build files to stage");let c=await Xe();await Je(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Ot(),await ye(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await $t(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),Zr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Pe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Pe.env.NETLIFY_API_TOKEN,SITE_ID:Pe.env.SITE_ID,FUNCTIONS_DIST:Pe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:$}});import{getTracer as rt}from"@netlify/otel";import Qr from"crypto";import J from"fs/promises";import k from"path";import G from"process";var F=_("context"),en=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:G.env.NETLIFY_TEAM_ID,userId:G.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:G.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},tn=10,rn=async e=>{let{name:t,ext:r}=k.parse(e),o=e,n=k.join(G.cwd(),D,o),i=0;for(;await nn(n);){if(i>=tn)throw new Error("Failed to generate context file");o=`${t}-${Qr.randomUUID().slice(0,5)}${r}`,n=k.join(G.cwd(),D,o),i+=1}return o},nn=async e=>{try{return await J.access(e),!0}catch{return!1}},on=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},sn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await J.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},Oe=null,an=async()=>{if(Oe)return Oe;let e=await on();if(!e)return[];let t=k.join(G.cwd(),D,Be);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=k.join(t,s),l=k.join(D,Be,s);return F.log(`Downloading ${i.scope} context...`),await sn(i.endpoint,u)?(F.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return Oe=(await Promise.all(r)).filter(n=>n!==null),Oe},Jt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=en(t),i=await rn(xt),s=k.join(G.cwd(),D);await J.mkdir(s,{recursive:!0});let u=k.join(D,i),l=k.join(G.cwd(),u),p=k.join(G.cwd(),D,Z);try{await J.unlink(p),F.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
28
+ \`\`\``),$(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await N.catch(h=>{({error:T,result:g}=Wr({catchError:h,runCmd:N,error:T,result:g,runnerName:"Gemini"}))}),I.close(),C.flush(),{steps:m,duration:w,result:await ae({initialResult:g,agentName:Yt,hasError:!!T}),error:le({error:T,agentName:Yt}),isRetryableError:ue(T)}}var Ht=async()=>{let e=Ce.join(Bt.homedir(),".gemini");await Ne.rm(e,{recursive:!0,force:!0})};var Xr={codex:{runner:et,clean:Gt},claude:{runner:Qe,clean:Ut},gemini:{runner:tt,clean:Ht}},qt=Xr;var Kt=_("init_stage"),Wt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(zr(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=qt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=Zr({apiToken:r});pt(u);let l=e.useGateway?await _t({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Rt(async({steps:x=[],duration:w})=>{let g=x.map(T=>{let R=T.title?We(q(T.title),p):void 0,S=T.message?We(q(T.message)):void 0;return{...T,title:R,message:S}});x.length=0;try{return await B(e.id,e.sessionId,{steps:g,duration:w})}catch(T){Kt.error("persistSteps failed",{error:T?.message||T})}},t);Kt.info("Adding build files to stage");let c=await Xe();await Je(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await $t(),await ye(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Ot(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),Zr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Pe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Pe.env.NETLIFY_API_TOKEN,SITE_ID:Pe.env.SITE_ID,FUNCTIONS_DIST:Pe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:O}});import{getTracer as rt}from"@netlify/otel";import Qr from"crypto";import J from"fs/promises";import k from"path";import G from"process";var F=_("context"),en=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:G.env.NETLIFY_TEAM_ID,userId:G.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:G.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},tn=10,rn=async e=>{let{name:t,ext:r}=k.parse(e),o=e,n=k.join(G.cwd(),D,o),i=0;for(;await nn(n);){if(i>=tn)throw new Error("Failed to generate context file");o=`${t}-${Qr.randomUUID().slice(0,5)}${r}`,n=k.join(G.cwd(),D,o),i+=1}return o},nn=async e=>{try{return await J.access(e),!0}catch{return!1}},on=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},sn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await J.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},$e=null,an=async()=>{if($e)return $e;let e=await on();if(!e)return[];let t=k.join(G.cwd(),D,Be);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=k.join(t,s),l=k.join(D,Be,s);return F.log(`Downloading ${i.scope} context...`),await sn(i.endpoint,u)?(F.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return $e=(await Promise.all(r)).filter(n=>n!==null),$e},Jt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=en(t),i=await rn(xt),s=k.join(G.cwd(),D);await J.mkdir(s,{recursive:!0});let u=k.join(D,i),l=k.join(G.cwd(),u),p=k.join(G.cwd(),D,Z);try{await J.unlink(p),F.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
29
29
  Your task is to analyze and fix the build errors.
30
30
  Don't apply techniques of reverting changes. Apply fixes related to errors.
31
31
  Don't try to run build by yourself. Just fix the errors.
@@ -38,7 +38,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
38
38
 
39
39
  `)}
40
40
  </project_rules>
41
- `);let m="";if(r.sessionHistoryContext?.length){let g=k.join(G.cwd(),D,Ye);await J.mkdir(g,{recursive:!0});let T=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let N=S+1,C=`attempt-${N}.md`,O=k.join(g,C),I=k.join(D,Ye,C),v=`# Task History - Attempt ${N}
41
+ `);let m="";if(r.sessionHistoryContext?.length){let g=k.join(G.cwd(),D,Ye);await J.mkdir(g,{recursive:!0});let T=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let N=S+1,C=`attempt-${N}.md`,$=k.join(g,C),I=k.join(D,Ye,C),v=`# Task History - Attempt ${N}
42
42
 
43
43
  ## Request - what the user asked for
44
44
  ${R.request}
@@ -48,7 +48,7 @@ ${R.request}
48
48
  ## Response - what the agent replied with after its work
49
49
 
50
50
  ${R.response}
51
- `;return await J.writeFile(O,v,"utf-8"),F.log(`Created history file: ${I}`),I}));m+=`
51
+ `;return await J.writeFile($,v,"utf-8"),F.log(`Created history file: ${I}`),I}));m+=`
52
52
  <session_history_context>
53
53
  History of prior work on this task.
54
54
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
@@ -127,10 +127,10 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
127
127
  </request>
128
128
 
129
129
  Use the following file for the complete context of the ask, the environment, and what's available. ${l} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
130
- `),w};var ln=_("prompt"),Vt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Jt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&ln.log("Contextful Prompt:",n),{prompt:n}};var $e=_("inference_stage"),Xt=5,Fe=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;$e.log(`Running inference stage, attempt ${l} of ${Xt}`);let c=await b(rt(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),Et();let{prompt:y}=await b(rt(),"compose-prompt",async()=>await Vt({cliPath:t,config:r,buildErrorContext:un(n),netlify:o})),x=`
130
+ `),w};var ln=_("prompt"),Vt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Jt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&ln.log("Contextful Prompt:",n),{prompt:n}};var Oe=_("inference_stage"),Xt=5,Fe=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;Oe.log(`Running inference stage, attempt ${l} of ${Xt}`);let c=await b(rt(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),Et();let{prompt:y}=await b(rt(),"compose-prompt",async()=>await Vt({cliPath:t,config:r,buildErrorContext:un(n),netlify:o})),x=`
131
131
  ${p||""}
132
132
  ${y}
133
- `.trim(),w={...r,prompt:x},g=await b(rt(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await s.flush(),g});if(c.error){if($e.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<Xt))return $e.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Fe({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw $e.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},un=e=>!e||e.length===0?"":`
133
+ `.trim(),w={...r,prompt:x},g=await b(rt(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await s.flush(),g});if(c.error){if(Oe.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<Xt))return Oe.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Fe({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Oe.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},un=e=>!e||e.length===0?"":`
134
134
  Deploy failed failed. Here are the errors to review on the latest build:
135
135
 
136
136
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
@@ -139,5 +139,5 @@ ${e.pop()}
139
139
  `;import pn from"process";import{getTracer as nt}from"@netlify/otel";import{getTracer as cn}from"@netlify/otel";var we=_("deploy"),zt=async e=>await b(cn(),"create-preview-deploy",async t=>dn(e,t)),dn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},u)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(we.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(we.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";we.log(`Running: ${p} ${l.join(" ")}`),u?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(String(f.stdout??"").trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),we.log(`
140
140
  Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw we.error("Failed to create preview deploy via CLI:",l),u?.setAttributes({success:!1,error:l.message}),l}};var Te=_("deploy_stage"),ot=async e=>await b(nt(),"run-deploy-stage",async()=>fn(e)),fn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await b(nt(),"get-runner-diffs",async()=>await Ct({config:t,isRetry:i}));if(Te.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,c=!0;Te.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let m=null;if(o!==void 0&&c)try{let y;try{let x=await b(nt(),"get-runner-session",async()=>await mt(t.id,t.sessionId));x?.title&&(y=x.title)}catch(x){Te.warn("Failed to fetch session title, using fallback message:",x.message)}await B(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await zt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:bt(t.id,pn.env.SITE_NAME),filter:n})}catch(y){return Te.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:u,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return Te.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:l,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as De}from"@netlify/otel";async function Zt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var ke=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
141
141
  `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let l=this.containsNetlifyForm(n,o);l&&r.push(l)}let u=s.split(" ");o=u[u.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
142
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var A=_("cleanup_stage"),Qt=async e=>await b(De(),"cleanup-stage",async()=>mn(e)),it=1024*1024*10,mn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new ke,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:y,snippet:x})=>{A.log(` - ${y}: ${x}`)}),l.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await ht(e.id,e.sessionId),c=[];(t||i)&&c.push(je(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(je(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await b(De(),"update-runner",async()=>{await ye(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>it||m>it){let y=`Diffs exceed maximum inline size of ${it} bytes.`;throw A.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await b(De(),"update-runner",async()=>{await ye(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await Zt(async()=>await b(De(),"update-runner-session",()=>B(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{A.error(`Error updating agent runner session (attempt ${f}):`,c),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as er,shutdownTracers as hn,withActiveSpan as tr}from"@netlify/otel";var yn=gn(import.meta.url),rr=yn("../package.json"),xe=_("pipeline_index"),Le=3,us=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=wt(z.timeUnits.hours(4)),l=await ut(rr.version,e.id,i);try{await tr(er(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:y}=await u("init",()=>Wt({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:rr.version}),z.timeUnits.minutes(10));s=m.clean,e.sha=y;let{runnerResult:x}=await u("inference",()=>Fe({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await u("deploy",()=>ot({cliPath:r,config:e,context:f,result:x.result,filter:n,isRetry:!1})),g=x,T=[];if(w.hasChanges&&w.deployError){T.push(ct(w.deployError));let I=1,v=!1;for(;I<=Le&&!w.previewInfo&&!v;)xe.log(`Deploy attempt had errors. Retrying. ${I}/${Le}`),await tr(er(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":I});let h;try{h=(await u(`inference-retry-${I}`,()=>Fe({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:T,priorAgentSessionId:x.agentSessionId}))).runnerResult}catch(d){xe.warn(`Inference retry ${I} failed, stopping deploy retries:`,d),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},w=await u(`deploy-retry-${I}`,()=>ot({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),w.deployError&&T.push(w.deployError),I++});I>Le&&!w.previewInfo&&console.warn(`Deploy validation failed after ${Le} attempts`)}let{diff:R,resultDiff:S,previewInfo:N,diffBinary:C,resultDiffBinary:O}=w;await u("cleanup",()=>Qt({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:C,resultDiffBinary:O,previewInfo:N}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Ft())})}catch(p){if(dt(p)){xe.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await B(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{xe.info("Could not update session (site may have been deleted)")}return}xe.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await B(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await hn()}};export{us as runPipeline};
142
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var A=_("cleanup_stage"),Qt=async e=>await b(De(),"cleanup-stage",async()=>mn(e)),it=1024*1024*10,mn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new ke,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:y,snippet:x})=>{A.log(` - ${y}: ${x}`)}),l.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await ht(e.id,e.sessionId),c=[];(t||i)&&c.push(je(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(je(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await b(De(),"update-runner",async()=>{await ye(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>it||m>it){let y=`Diffs exceed maximum inline size of ${it} bytes.`;throw A.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await b(De(),"update-runner",async()=>{await ye(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await Zt(async()=>await b(De(),"update-runner-session",()=>B(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{A.error(`Error updating agent runner session (attempt ${f}):`,c),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as er,shutdownTracers as hn,withActiveSpan as tr}from"@netlify/otel";var yn=gn(import.meta.url),rr=yn("../package.json"),xe=_("pipeline_index"),Le=3,us=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=wt(z.timeUnits.hours(4)),l=await ut(rr.version,e.id,i);try{await tr(er(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:y}=await u("init",()=>Wt({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:rr.version}),z.timeUnits.minutes(10));s=m.clean,e.sha=y;let{runnerResult:x}=await u("inference",()=>Fe({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await u("deploy",()=>ot({cliPath:r,config:e,context:f,result:x.result,filter:n,isRetry:!1})),g=x,T=[];if(w.hasChanges&&w.deployError){T.push(ct(w.deployError));let I=1,v=!1;for(;I<=Le&&!w.previewInfo&&!v;)xe.log(`Deploy attempt had errors. Retrying. ${I}/${Le}`),await tr(er(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":I});let h;try{h=(await u(`inference-retry-${I}`,()=>Fe({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:T,priorAgentSessionId:x.agentSessionId}))).runnerResult}catch(d){xe.warn(`Inference retry ${I} failed, stopping deploy retries:`,d),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},w=await u(`deploy-retry-${I}`,()=>ot({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),w.deployError&&T.push(w.deployError),I++});I>Le&&!w.previewInfo&&console.warn(`Deploy validation failed after ${Le} attempts`)}let{diff:R,resultDiff:S,previewInfo:N,diffBinary:C,resultDiffBinary:$}=w;await u("cleanup",()=>Qt({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:C,resultDiffBinary:$,previewInfo:N}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Ft())})}catch(p){if(dt(p)){xe.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await B(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{xe.info("Could not update session (site may have been deleted)")}return}xe.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await B(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await hn()}};export{us as runPipeline};
143
143
  //# sourceMappingURL=index.js.map
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@netlify/agent-runner-cli",
3
3
  "type": "module",
4
- "version": "1.60.3",
4
+ "version": "1.60.4",
5
5
  "description": "CLI tool for running Netlify agents",
6
6
  "main": "./dist/index.js",
7
7
  "types": "./dist/index.d.ts",