@netlify/agent-runner-cli 1.60.6 → 1.61.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin.js CHANGED
@@ -1,32 +1,32 @@
1
1
  #!/usr/bin/env node
2
- import mt from"process";import jn from"minimist";import{createRequire as On}from"module";import{createTracerProvider as Er}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as gt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Tr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as wr}from"@netlify/otel";import{propagation as ht,context as yt,W3CTraceContextPropagator as Ir}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as xr}from"@opentelemetry/exporter-trace-otlp-grpc";import _r from"process";function _(e){let t=_r.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ye=_("tracing"),_t=async(e,t,r)=>(await Er({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new gt(new Be),new gt(new xr({url:r.exporterUrl}))],instrumentations:[new Tr({skipHeaders:!0})]}),r.traceparent?(ht.setGlobalPropagator(new Ir),ht.extract(yt.active(),{traceparent:r.traceparent,isRemote:!0})):yt.active());function A(e,t,r){return Ye.log(`\u23F3 TRACE: ${t} starting...`),wr(e,t,r)}var Be=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[c,l]of Object.entries(o))c.includes("duration")&&typeof l=="number"?n.push(`${c}=${l.toFixed(2)}ms`):n.push(`${c}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Ye.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Ye.log(` \u274C Error: ${t.status.message}`)}};var vr=["error","failed","exception","fatal","panic","abort","crash"];function Et(e){let t=e.split(`
3
- `),r=[],o=-1,n=0;for(;n<t.length;){let c=t[n].slice(0,500).toLowerCase();if(vr.some(p=>c.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),u=[];for(let m=p;m<=f;m++)u.push(t[m]);r.push(u.join(`
4
- `)),o=f,n=f+1}else n++}if(r.length===0)return e;let i=r.map((s,c)=>`<extracted_error_chunk order="${c+1}">
2
+ import gt from"process";import qn from"minimist";import{createRequire as Ln}from"module";import{createTracerProvider as wr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ht}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as xr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Ir}from"@netlify/otel";import{propagation as yt,context as Et,W3CTraceContextPropagator as vr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Rr}from"@opentelemetry/exporter-trace-otlp-grpc";import Tr from"process";function _(e){let t=Tr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Be=_("tracing"),_t=async(e,t,r)=>(await wr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ht(new He),new ht(new Rr({url:r.exporterUrl}))],instrumentations:[new xr({skipHeaders:!0})]}),r.traceparent?(yt.setGlobalPropagator(new vr),yt.extract(Et.active(),{traceparent:r.traceparent,isRemote:!0})):Et.active());function A(e,t,r){return Be.log(`\u23F3 TRACE: ${t} starting...`),Ir(e,t,r)}var He=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,a]of Object.entries(o))l.includes("duration")&&typeof a=="number"?n.push(`${l}=${a.toFixed(2)}ms`):n.push(`${l}=${a}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Be.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Be.log(` \u274C Error: ${t.status.message}`)}};var Nr=["error","failed","exception","fatal","panic","abort","crash"];function Tt(e){let t=e.split(`
3
+ `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(Nr.some(p=>l.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),u=[];for(let m=p;m<=f;m++)u.push(t[m]);r.push(u.join(`
4
+ `)),o=f,n=f+1}else n++}if(r.length===0)return e;let i=r.map((s,l)=>`<extracted_error_chunk order="${l+1}">
5
5
  ${s}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import{execSync as fn}from"child_process";import nr from"fs/promises";import mn from"path";import V from"process";import{getTracer as gn}from"@netlify/otel";import _e from"process";var ne=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},Tt=e=>e instanceof ne;var Ae=_e.env.NETLIFY_API_URL,Se=_e.env.NETLIFY_API_TOKEN,B=_("api"),be=()=>_e.env.NETLIFY_LOCAL_MODE==="true",Ee=async(e,t={})=>{if(!Ae||!Se)throw new Error("No API URL or token");let r=new URL(e,Ae),o={...t,headers:{...t.headers,Authorization:`Bearer ${Se}`}};_e.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(_e.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),n.headers.forEach((c,l)=>{B.log(` ${l}: ${c}`)});else{let c=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${c||"N/A"}`)}if(i||B.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let c=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new ne(`API request failed: 404 - ${c}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new ne(`API request failed: 403 - ${c}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${c}`)}return s},wt=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ae=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Se=e.constants.NETLIFY_API_TOKEN)},It=()=>({apiUrl:Ae,token:Se}),Te=async(e,t)=>be()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):Ee(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>be()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var xt=async(e,t)=>be()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}`),vt=(e,t,r)=>Ee(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),Rt=async(e,t)=>be()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):Ee(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),He=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var oe=_("ai_gateway"),qe=null;var Nt=async()=>{if(qe)return qe;oe.log("Fetching available AI gateway providers");let e=await fetch(`${It().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return qe=t,oe.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Rr=async(e,t)=>{let o=(await Nt()).providers[e];if(!o)return oe.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return oe.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},At=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let c=async()=>{clearTimeout(n),oe.log("Requesting AI gateway information");let l=await vt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,oe.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{c()},p))}};return await Promise.all([c(),Nt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:Rr}};import q from"process";import K from"path";import Ce from"fs";import{fileURLToPath as Fr}from"url";import{createRequire as Or}from"module";import{execa as $r,execaCommand as To}from"execa";import{Transform as Nr}from"stream";var Ar=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Sr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function br(){return Object.entries(process.env).filter(([e,t])=>!(!t||Ar.has(e)||Sr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function W(e){if(typeof e!="string")return e;let t=br();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Cr(o),"g");r=r.replace(n,"******")}),r}function Cr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ie=class extends Nr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=W(n);o(null,i)}};function St(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?W(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?W(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var we=null,bt=e=>(we&&we.destroy(),we=new Q({totalAllowedTime:e}),we),Ct=()=>we;var Q=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((c,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Pt="netlify-agent-runner-context.md",We="task-history",Ke="netlify-context",D=".netlify",ee="results.md",Je="assets",Ve="other",Xe="personal";var ze="enterprise",Ze="free",Ft=[Xe,"pro",ze,Ze],te=1800*1e3;var Ot={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.6",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.20","@google/gemini-cli":"0.25.0","@netlify/otel":"^5.1.1","@openai/codex":"0.88.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var Lr=Fr(import.meta.url),Dr=K.dirname(Lr),kr=Or(import.meta.url),se=_("shell"),Qe=new Set,Ur={preferLocal:!0},O=(e,t,r)=>{let[o,n]=Mr(t,r),i={...Ur,...n},s=$r(e,o,i);Gr(s,i),Yr(s);let c=r?.idleTimeout;return c&&c>0&&jr(s,c),s};var Mr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Gr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ie).pipe(q.stdout),e.stdout?.pipe(new ie).pipe(q.stdout),e.stderr?.pipe(new ie).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},et=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),se.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return se.error("Error killing process:",r),!1}},$t=e=>et(e,"SIGKILL"),jr=(e,t)=>{let r=null,o=()=>{se.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),et(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(se.log(`Force killing idle process ${e.pid}`),$t(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},Yr=e=>{Qe.add(e);let t=Ct();if(t){let r=t.onTimesUp(()=>{se.log(`Global timer expired, killing process ${e.pid}`),et(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(se.log(`Force killing process ${e.pid} after timeout`),$t(e))},5e3)});e.on("exit",()=>{Qe.delete(e),r()}),e.on("error",()=>{Qe.delete(e),r()})}};function ae(e,t){if(!q.env.NETLIFY_LOCAL_MODE)try{let n=kr.resolve(Ot.name),i=K.dirname(n);for(;i!==K.dirname(i);){let s=K.dirname(i);if(K.basename(s)==="node_modules"){let c=K.join(s,".bin",t);if(Ce.existsSync(c))return c;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(q.env.NODE_PATH){let n=K.join(q.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=K.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=K.join(Dr,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var Lt=_("utils"),Br=e=>new Promise(t=>{setTimeout(t,e)}),Dt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...c)=>{if(r)return o=c,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...c);for(l(f);;){if(await Br(t),!o)return r=!1,i=null,f;let u=o,m=n;o=null,n=[],f=await e(...u),m.forEach(h=>{h(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},le=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...c){n=c,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let c=n,l=i;o=null,n=null,i=null,e.apply(l,c)}},s},Pe=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Lt.error("Could not parse JSON",o))}},kt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let c=`--${t}${n}`;if(c.length>55)return"";let l=60-c.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)},Hr=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!Ft.some(t=>t in e),Ut=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,o])=>{if(o){let n=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let i=JSON.parse(o);Hr(i)&&(e[r]=i)}catch(i){let c=i instanceof SyntaxError?"Invalid JSON":i.message;Lt.error(`Could not parse ${r} model version override from ${n}: ${c}`)}}}),e},qr=50*1024,tt=(e,t=qr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as Mt}from"buffer";import Wr from"path";var Gt=_("repo"),jt=async({config:e,isRetry:t})=>{Gt.info("Getting runner diffs");let r=await Jr(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let E=Vr(n);await Xr(E)}Gt.info("Changes after processing"),await nt();let i=await ot(n);await rt(i);let s={stdio:["ignore","pipe","pipe"]},c=await O("git",["diff","--staged"],s),l=String(c.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await O("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),u,m;if(e.sha){if(!process.env.NETLIFY_LOCAL_MODE){process.env.NETLIFY_INTERNAL_GIT="1";try{await O("git",["commit","-m","Agent runner"])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}}let E=await O("git",["diff",e.sha,"HEAD"],s);u=String(E.stdout??"");let w=await O("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");u!==g&&(m=Mt.from(g).toString("base64"))}let h={hasChanges:!0,diff:l,resultDiff:u,ignored:i};return l!==f&&(h.diffBinary=Mt.from(f).toString("base64")),m&&(h.resultDiffBinary=m),h},rt=async(e=[])=>{process.env.NETLIFY_INTERNAL_GIT="1";try{await O("git",["add",".",...e])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}},nt=async()=>{let e=await O("git",["status","-s"]);return String(e.stdout??"")},Yt=/.. (.+)?\.log$/,Kr=[Yt],Jr=async()=>{let e=await nt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(o=>Kr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Bt=async()=>{let{stdout:e}=await O("git",["rev-parse","HEAD"]);return String(e??"").trim()},Ht=async()=>{let{stdout:e}=await O("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},ot=async e=>{e||=await nt();let t=[".netlify","node_modules","dist",".next","out",".nuxt",".output",".cache",".turbo",".parcel-cache","coverage",".nyc_output","storybook-static","public/build"],r=[];return e.split(`
10
- `).forEach(o=>{t.forEach(i=>{let s=o===`?? ${i}`,c=o.startsWith(`?? ${i}/`)||o.startsWith(`?? ${i}${Wr.sep}`);(s||c)&&r.push(`:!${i}`)});let n=o.match(Yt)?.[1];n&&r.push(`:!${n}.log`)}),r},qt=async()=>{await O("git",["reset","--hard","HEAD"])},Vr=e=>{let t=e.split(`
11
- `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,c=s.join(""),l=n.trim(),p=i.trim();return r[c]?r[c].change=p:r[c]={filePath:c,stage:l,change:p},r},{});return Object.values(t)},Xr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(O("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Zr from"fs/promises";import Qr from"os";import Jt from"path";import pe from"process";import en from"readline";import it from"path";import zr from"fs/promises";var st=_("agent-output-utils");async function ce({initialResult:e,agentName:t,hasError:r}){let o="",n=it.join(process.cwd(),D,ee);try{let i=await zr.readFile(n,"utf-8");i&&(o=i,st.log(`Pulled result from ${it.relative(process.cwd(),n)}`))}catch{st.log(`No results file found at ${it.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ue({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&st.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function de(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var k=_("runner_claude"),Wt="Claude Code",fe="claude-opus-4-5-20251101",Kt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,tn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function at({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=pe.cwd()}){let c=e,{accountType:l,prompt:p,modelVersionOverrides:f}=c,{model:u}=c,m="";if(o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);u=a}}else if(u){if(!await o.isModelAvailableForProvider("anthropic",u))throw new Error(`Model '${u}' is not available for anthropic provider`)}else!!fe&&await o.isModelAvailableForProvider("anthropic",fe)?(u=fe,k.log(`Using default model: ${fe}`)):fe&&k.log(`Default model ${fe} is not available, proceeding without model specification`);pe.env.ANTHROPIC_API_KEY=y,pe.env.ANTHROPIC_BASE_URL=d}else if(!pe.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],E=[],w={},g=0,I=0,R,N,b=[ae(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...u?["--model",u]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],C=`${pe.env.NVM_BIN}/node`;k.log(`Running ${C} ${b.join(" ")}`);let F=t.utils.run(C,b,{all:!0,env:pe.env,cwd:s,idleTimeout:te});F.stdin?.end();let x=le(()=>{r?.({steps:h,duration:I})},250),v=(y,d)=>{let a={...y,id:g};g+=1,E.push(a),h.push(a),d||x.flush(),x(),d&&x.flush()},M=en.createInterface({input:F.all});return M.on("error",y=>{k.error("Readline interface error",{error:y.message,stack:y.stack})}),M.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{k.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):k.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let T=a.input?.description&&`\`${a.input.description}\``;v({title:[Kt(a.name),T].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);x.flush();break}case"tool_result":{let T=a.tool_use_id?w[a.tool_use_id]:void 0,z;if(T){let J=T.input?.file_path&&Jt.relative(s,T.input.file_path),P=J&&`\`${J}\``;z=[Kt(T.name||""),P].filter(Boolean).join(" ")}let Ne=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(T?.name||""),Z;if(typeof a.content=="string")Z=a.content;else if(Array.isArray(a.content)){let J=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?J.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?J.push(`![](data:${P.source.media_type};base64,${P.source.data})`):k.log(`Unsupported image type ${P.source.type}`,P.source):k.log(`Unsupported block type ${P?.type}`)}),Z=J.join(`
8
+ `);return i.length>e.length*.8?e:i}import{execSync as gn}from"child_process";import ir from"fs/promises";import hn from"path";import V from"process";import{getTracer as yn}from"@netlify/otel";import Ee from"process";var ne=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},wt=e=>e instanceof ne;var Se=Ee.env.NETLIFY_API_URL,Ae=Ee.env.NETLIFY_API_TOKEN,B=_("api"),be=()=>Ee.env.NETLIFY_LOCAL_MODE==="true",_e=async(e,t={})=>{if(!Se||!Ae)throw new Error("No API URL or token");let r=new URL(e,Se),o={...t,headers:{...t.headers,Authorization:`Bearer ${Ae}`}};Ee.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(Ee.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),n.headers.forEach((l,a)=>{B.log(` ${a}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${l||"N/A"}`)}if(i||B.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let l=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new ne(`API request failed: 404 - ${l}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new ne(`API request failed: 403 - ${l}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${l}`)}return s},xt=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Se=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ae=e.constants.NETLIFY_API_TOKEN)},It=()=>({apiUrl:Se,token:Ae}),Te=async(e,t)=>be()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):_e(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>be()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var vt=async(e,t)=>be()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`),Rt=(e,t,r)=>_e(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),Nt=async(e,t)=>be()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):_e(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),qe=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var oe=_("ai_gateway"),We=null;var St=async()=>{if(We)return We;oe.log("Fetching available AI gateway providers");let e=await fetch(`${It().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return We=t,oe.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Sr=async(e,t)=>{let o=(await St()).providers[e];if(!o)return oe.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return oe.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},At=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let l=async()=>{clearTimeout(n),oe.log("Requesting AI gateway information");let a=await Rt(s,t.id,t.sessionId);if({token:r,url:i}=a,o=a.expires_at?a.expires_at*1e3:void 0,oe.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{l()},p))}};return await Promise.all([l(),St()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:Sr}};import q from"process";import K from"path";import Ce from"fs";import{fileURLToPath as $r}from"url";import{createRequire as Lr}from"module";import{execa as Dr,execaCommand as vo}from"execa";import{Transform as Ar}from"stream";var br=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Cr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Pr(){return Object.entries(process.env).filter(([e,t])=>!(!t||br.has(e)||Cr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function W(e){if(typeof e!="string")return e;let t=Pr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Or(o),"g");r=r.replace(n,"******")}),r}function Or(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ie=class extends Ar{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=W(n);o(null,i)}};function bt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?W(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?W(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var we=null,Ct=e=>(we&&we.destroy(),we=new Q({totalAllowedTime:e}),we),Pt=()=>we;var Q=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((l,a)=>{i=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Ot="netlify-agent-runner-context.md",Ke="task-history",Je="netlify-context",D=".netlify",ee="results.md",Ve="assets",Xe="other",ze="personal";var Ze="enterprise",Qe="free",Ft=[ze,"pro",Ze,Qe],$t=["normal","redeploy","create","ask"],te=1800*1e3;var Lt={name:"@netlify/agent-runner-cli",type:"module",version:"1.61.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.29","@google/gemini-cli":"0.25.2","@netlify/otel":"^5.1.1","@openai/codex":"0.93.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var kr=$r(import.meta.url),Ur=K.dirname(kr),Mr=Lr(import.meta.url),se=_("shell"),et=new Set,Gr={preferLocal:!0},F=(e,t,r)=>{let[o,n]=jr(t,r),i={...Gr,...n},s=Dr(e,o,i);Yr(s,i),Hr(s);let l=r?.idleTimeout;return l&&l>0&&Br(s,l),s};var jr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Yr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ie).pipe(q.stdout),e.stdout?.pipe(new ie).pipe(q.stdout),e.stderr?.pipe(new ie).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},tt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),se.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return se.error("Error killing process:",r),!1}},Dt=e=>tt(e,"SIGKILL"),Br=(e,t)=>{let r=null,o=()=>{se.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),tt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(se.log(`Force killing idle process ${e.pid}`),Dt(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},Hr=e=>{et.add(e);let t=Pt();if(t){let r=t.onTimesUp(()=>{se.log(`Global timer expired, killing process ${e.pid}`),tt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(se.log(`Force killing process ${e.pid} after timeout`),Dt(e))},5e3)});e.on("exit",()=>{et.delete(e),r()}),e.on("error",()=>{et.delete(e),r()})}};function ae(e,t){if(!q.env.NETLIFY_LOCAL_MODE)try{let n=Mr.resolve(Lt.name),i=K.dirname(n);for(;i!==K.dirname(i);){let s=K.dirname(i);if(K.basename(s)==="node_modules"){let l=K.join(s,".bin",t);if(Ce.existsSync(l))return l;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(q.env.NODE_PATH){let n=K.join(q.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=K.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=K.join(Ur,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var kt=_("utils"),qr=e=>new Promise(t=>{setTimeout(t,e)}),Ut=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...l)=>{if(r)return o=l,new Promise(f=>{n.push(f)});r=!0;let a,p=new Promise(f=>{a=f});return i=(async()=>{await Promise.resolve();let f=await e(...l);for(a(f);;){if(await qr(t),!o)return r=!1,i=null,f;let u=o,m=n;o=null,n=[],f=await e(...u),m.forEach(h=>{h(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},le=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...l){n=l,i=this;let a=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),a&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let l=n,a=i;o=null,n=null,i=null,e.apply(a,l)}},s},Pe=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):kt.error("Could not parse JSON",o))}},Mt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let a=60-l.length;if(a<=0)return"";if(a>=i.length+6){let p=Math.min(a-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,a)},Wr=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!Ft.some(t=>t in e),Gt=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,o])=>{if(o){let n=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let i=JSON.parse(o);Wr(i)&&(e[r]=i)}catch(i){let l=i instanceof SyntaxError?"Invalid JSON":i.message;kt.error(`Could not parse ${r} model version override from ${n}: ${l}`)}}}),e},Kr=50*1024,rt=(e,t=Kr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as jt}from"buffer";import Jr from"path";var Yt=_("repo"),Bt=async({config:e,isRetry:t})=>{Yt.info("Getting runner diffs");let r=await Xr(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let T=zr(n);await Zr(T)}Yt.info("Changes after processing"),await ot();let i=await it(n);await nt(i);let s={stdio:["ignore","pipe","pipe"]},l=await F("git",["diff","--staged"],s),a=String(l.stdout??"");if(o=!!a,!o)return{hasChanges:!1,ignored:i};let p=await F("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),u,m;if(e.sha){if(!process.env.NETLIFY_LOCAL_MODE){process.env.NETLIFY_INTERNAL_GIT="1";try{await F("git",["commit","-m","Agent runner"])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}}let T=await F("git",["diff",e.sha,"HEAD"],s);u=String(T.stdout??"");let E=await F("git",["diff",e.sha,"HEAD","--binary"],s),g=String(E.stdout??"");u!==g&&(m=jt.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:u,ignored:i};return a!==f&&(h.diffBinary=jt.from(f).toString("base64")),m&&(h.resultDiffBinary=m),h},nt=async(e=[])=>{process.env.NETLIFY_INTERNAL_GIT="1";try{await F("git",["add",".",...e])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}},ot=async()=>{let e=await F("git",["status","-s"]);return String(e.stdout??"")},Ht=/.. (.+)?\.log$/,Vr=[Ht],Xr=async()=>{let e=await ot();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(o=>Vr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},qt=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return String(e??"").trim()},Wt=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},it=async e=>{e||=await ot();let t=[".netlify","node_modules","dist",".next","out",".nuxt",".output",".cache",".turbo",".parcel-cache","coverage",".nyc_output","storybook-static","public/build"],r=[];return e.split(`
10
+ `).forEach(o=>{t.forEach(i=>{let s=o===`?? ${i}`,l=o.startsWith(`?? ${i}/`)||o.startsWith(`?? ${i}${Jr.sep}`);(s||l)&&r.push(`:!${i}`)});let n=o.match(Ht)?.[1];n&&r.push(`:!${n}.log`)}),r},Kt=async()=>{await F("git",["reset","--hard","HEAD"])},zr=e=>{let t=e.split(`
11
+ `).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,l=s.join(""),a=n.trim(),p=i.trim();return r[l]?r[l].change=p:r[l]={filePath:l,stage:a,change:p},r},{});return Object.values(t)},Zr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import en from"fs/promises";import tn from"os";import Xt from"path";import pe from"process";import rn from"readline";import st from"path";import Qr from"fs/promises";var at=_("agent-output-utils");async function ce({initialResult:e,agentName:t,hasError:r}){let o="",n=st.join(process.cwd(),D,ee);try{let i=await Qr.readFile(n,"utf-8");i&&(o=i,at.log(`Pulled result from ${st.relative(process.cwd(),n)}`))}catch{at.log(`No results file found at ${st.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ue({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&at.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function de(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var k=_("runner_claude"),Jt="Claude Code",fe="claude-opus-4-5-20251101",Vt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,nn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function lt({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=pe.cwd()}){let l=e,{accountType:a,prompt:p,modelVersionOverrides:f}=l,{model:u}=l,m="";if(o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let c=f?.claude?.[a];if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model override '${c}' is not available for anthropic provider`);u=c}}else if(u){if(!await o.isModelAvailableForProvider("anthropic",u))throw new Error(`Model '${u}' is not available for anthropic provider`)}else!!fe&&await o.isModelAvailableForProvider("anthropic",fe)?(u=fe,k.log(`Using default model: ${fe}`)):fe&&k.log(`Default model ${fe} is not available, proceeding without model specification`);pe.env.ANTHROPIC_API_KEY=y,pe.env.ANTHROPIC_BASE_URL=d}else if(!pe.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],T=[],E={},g=0,x=0,R,N,b=[ae(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...u?["--model",u]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],C=`${pe.env.NVM_BIN}/node`;k.log(`Running ${C} ${b.join(" ")}`);let O=t.utils.run(C,b,{all:!0,env:pe.env,cwd:s,idleTimeout:te});O.stdin?.end();let I=le(()=>{r?.({steps:h,duration:x})},250),v=(y,d)=>{let c={...y,id:g};g+=1,T.push(c),h.push(c),d||I.flush(),I(),d&&I.flush()},M=rn.createInterface({input:O.all});return M.on("error",y=>{k.error("Readline interface error",{error:y.message,stack:y.stack})}),M.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{k.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(c=>{switch(c.type){case"text":{c.text&&v({message:c.text});break}case"image":{typeof c.source=="object"&&c.source&&c.source.type==="base64"&&c.source.media_type?v({message:`![](data:${c.source.media_type};base64,${c.source.data})`}):k.log(`Unsupported image type ${c.source?.type}`,c.source);break}case"tool_use":{if(c.name==="Task"){let w=c.input?.description&&`\`${c.input.description}\``;v({title:[Vt(c.name),w].filter(Boolean).join(" ")})}else c.id&&(E[c.id]=c);I.flush();break}case"tool_result":{let w=c.tool_use_id?E[c.tool_use_id]:void 0,z;if(w){let J=w.input?.file_path&&Xt.relative(s,w.input.file_path),P=J&&`\`${J}\``;z=[Vt(w.name||""),P].filter(Boolean).join(" ")}let Ne=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),Z;if(typeof c.content=="string")Z=c.content;else if(Array.isArray(c.content)){let J=[];c.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?J.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?J.push(`![](data:${P.source.media_type};base64,${P.source.data})`):k.log(`Unsupported image type ${P.source.type}`,P.source):k.log(`Unsupported block type ${P?.type}`)}),Z=J.join(`
12
12
 
13
13
  `)}Ne&&Z&&(Z=`\`\`\`
14
14
  ${Z.trim()}
15
- \`\`\``),v({title:z,message:Z},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:k.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(I=d.duration_ms||0,d.is_error?N=d.result:R=d.result,[E,h].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await F.catch(y=>{({error:N,result:R}=tn({catchError:y,runCmd:F,error:N,result:R,runnerName:"Claude"}))}),M.close(),x.flush(),{steps:E,duration:I,result:await ce({initialResult:R,agentName:Wt,hasError:!!N}),error:ue({error:N,agentName:Wt}),isRetryableError:de(N),agentSessionId:m}}var Vt=async()=>{let e=Jt.join(Qr.homedir(),".claude");await Zr.rm(e,{recursive:!0,force:!0})};import Ie from"fs/promises";import zt from"os";import Fe from"path";import re from"process";import rn from"readline";var U=_("runner_codex"),Xt="Codex CLI",me="",nn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function lt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=re.cwd()}){let{accountType:s,prompt:c,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let T=l?.codex?.[s];if(T){if(!await n.isModelAvailableForProvider("openai",T))throw new Error(`Model override '${T}' is not available for openai provider`);p=T}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!me&&await n.isModelAvailableForProvider("openai",me)?(p=me,U.log(`Using default model: ${me}`)):me&&U.log(`Default model ${me} is not available, proceeding without model specification`);re.env.OPENAI_API_KEY=d,re.env.OPENAI_BASE_URL=a}else if(!re.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],u=[],m=[],h={},E=0,w=0,g,I,R=`${re.env.NVM_BIN}/node`,N=Fe.join(zt.homedir(),".codex"),b=Fe.join(N,"config.toml"),C=Fe.join(N,"auth.json");try{await Ie.mkdir(N,{recursive:!0});let d={OPENAI_API_KEY:re.env.OPENAI_API_KEY};await Ie.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let a="";try{a=await Ie.readFile(b,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
16
- web_search_request = true`):a+=`
15
+ \`\`\``),v({title:z,message:Z},!0);break}case"thinking":{c.thinking&&v({title:"Thinking",message:c.thinking},!0);break}default:k.log(`Message content type is not supported ${c.type}`,c)}}):d?.type==="result"&&(x=d.duration_ms||0,d.is_error?N=d.result:R=d.result,[T,h].forEach(c=>{c[c.length-1]?.message===R&&c.pop()}))}),await O.catch(y=>{({error:N,result:R}=nn({catchError:y,runCmd:O,error:N,result:R,runnerName:"Claude"}))}),M.close(),I.flush(),{steps:T,duration:x,result:await ce({initialResult:R,agentName:Jt,hasError:!!N}),error:ue({error:N,agentName:Jt}),isRetryableError:de(N),agentSessionId:m}}var zt=async()=>{let e=Xt.join(tn.homedir(),".claude");await en.rm(e,{recursive:!0,force:!0})};import xe from"fs/promises";import Qt from"os";import Oe from"path";import re from"process";import on from"readline";var U=_("runner_codex"),Zt="Codex CLI",me="",sn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ct({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=re.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:a}=e,{model:p}=e;if(n){let{token:d,url:c}=n;if(!d||!c)throw new Error("No token or url provided from AI Gateway");if(a?.codex){let w=a?.codex?.[s];if(w){if(!await n.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);p=w}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!me&&await n.isModelAvailableForProvider("openai",me)?(p=me,U.log(`Using default model: ${me}`)):me&&U.log(`Default model ${me} is not available, proceeding without model specification`);re.env.OPENAI_API_KEY=d,re.env.OPENAI_BASE_URL=c}else if(!re.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],u=[],m=[],h={},T=0,E=0,g,x,R=`${re.env.NVM_BIN}/node`,N=Oe.join(Qt.homedir(),".codex"),b=Oe.join(N,"config.toml"),C=Oe.join(N,"auth.json");try{await xe.mkdir(N,{recursive:!0});let d={OPENAI_API_KEY:re.env.OPENAI_API_KEY};await xe.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let c="";try{c=await xe.readFile(b,"utf-8")}catch{}c.includes("web_search_request")||(c.includes("[features]")?c=c.replace(/\[features\]/,`[features]
16
+ web_search_request = true`):c+=`
17
17
  [features]
18
18
  web_search_request = true
19
- `,await Ie.writeFile(b,a,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let F=[ae(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],c].filter(Boolean);U.log(`Running ${R} ${F.join(" ")}`);let x=t.utils.run(R,F,{all:!0,cwd:i,env:{...re.env},idleTimeout:te}),v=le(()=>{r?.({steps:f,duration:w}),o?.({steps:u,duration:w}),u=[]},250),M=(d,a)=>{d.id=E,E+=1,m.push(d),f.push(d),u.push(d),a||v.flush(),v(),a&&v.flush()},y=rn.createInterface({input:x.all});return y.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")h[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let T=sn(a.item);T&&M(T,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let T={title:"Reasoning",message:a.item.text};M(T,!0)}else if(a?.type==="local_shell_call")h[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let T=an(h[a.call_id],a);T&&M(T,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(T=>T.text).join(`
20
- `):a?.type==="message"&&a.role==="system"&&(I=a.content.map(T=>T.text).join(`
21
- `))}),await x.catch(d=>{let a=nn({catchError:d,runCmd:x,error:I,result:g,runnerName:"Codex"});I=a.error,g=a.result}),y.close(),v.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:Xt,hasError:!!I}),error:ue({error:I,agentName:Xt}),isRetryableError:de(I)}}var Zt=async()=>{let e=Fe.join(zt.homedir(),".codex");await Ie.rm(e,{recursive:!0,force:!0})},on=new Set(["bash","-lc"]),sn=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
19
+ `,await xe.writeFile(b,c,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[ae(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],l].filter(Boolean);U.log(`Running ${R} ${O.join(" ")}`);let I=t.utils.run(R,O,{all:!0,cwd:i,env:{...re.env},idleTimeout:te}),v=le(()=>{r?.({steps:f,duration:E}),o?.({steps:u,duration:E}),u=[]},250),M=(d,c)=>{d.id=T,T+=1,m.push(d),f.push(d),u.push(d),c||v.flush(),v(),c&&v.flush()},y=on.createInterface({input:I.all});return y.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let c=null;try{c=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(c?.duration_ms&&(E=c.duration_ms),c?.type==="item.started"&&c?.item?.type==="command_execution")h[c.item.id]=c.item;else if(c?.type==="item.completed"&&c?.item?.type==="command_execution"){let w=ln(c.item);w&&M(w,!0)}else if(c?.type==="item.completed"&&c?.item?.type==="reasoning"){let w={title:"Reasoning",message:c.item.text};M(w,!0)}else if(c?.type==="local_shell_call")h[c.call_id]=c;else if(c?.type==="local_shell_call_output"){let w=cn(h[c.call_id],c);w&&M(w,!0)}else c?.type==="message"&&c.role==="assistant"?g=c.content.map(w=>w.text).join(`
20
+ `):c?.type==="message"&&c.role==="system"&&(x=c.content.map(w=>w.text).join(`
21
+ `))}),await I.catch(d=>{let c=sn({catchError:d,runCmd:I,error:x,result:g,runnerName:"Codex"});x=c.error,g=c.result}),y.close(),v.flush(),{steps:m,duration:E,result:await ce({initialResult:g,agentName:Zt,hasError:!!x}),error:ue({error:x,agentName:Zt}),isRetryableError:de(x)}}var er=async()=>{let e=Oe.join(Qt.homedir(),".codex");await xe.rm(e,{recursive:!0,force:!0})},an=new Set(["bash","-lc"]),ln=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
22
22
  ${o}
23
23
  \`\`\``),e.status==="failed"&&e.exit_code!==0&&(o=o?`${o}
24
24
 
25
- *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},an=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!on.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
25
+ *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},cn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!an.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
26
26
  ${n.trim()}
27
- \`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Oe from"fs/promises";import er from"os";import $e from"path";import ge from"process";import ln from"readline";var G=_("runner_gemini"),Qt="Gemini CLI",he="",cn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),un={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},dn=async()=>{let e=$e.join(er.homedir(),".gemini"),t=$e.join(e,"settings.json");try{await Oe.mkdir(e,{recursive:!0});let r={};try{let o=await Oe.readFile(t,"utf-8");r=JSON.parse(o)}catch{G.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Oe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),G.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){G.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ct({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=ge.cwd()}){let{accountType:s,prompt:c,modelVersionOverrides:l}=e,{model:p}=e;if(await dn(),n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!he&&await n.isModelAvailableForProvider("gemini",he)?(p=he,G.log(`Using default model: ${he}`)):he&&G.log(`Default model ${he} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);ge.env.GEMINI_API_KEY=y,ge.env.GOOGLE_GEMINI_BASE_URL=d}else if(!ge.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],u=[],m=[],h={},E=0,w=0,g,I,R=[ae(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",c],N=`${ge.env.NVM_BIN}/node`;G.log(`Running ${N} ${R.join(" ")}`);let b=t.utils.run(N,R,{all:!0,env:ge.env,cwd:i,idleTimeout:te});b.stdin?.end();let C=le(()=>{r?.({steps:f,duration:w}),o?.({steps:u,duration:w}),u=[]},250),F=(y,d)=>{y.id=E,E+=1,m.push(y),f.push(y),u.push(y),d||C.flush(),C(),d&&C.flush()},x=ln.createInterface({input:b.all});x.on("error",y=>{G.error("Readline interface error",{error:y.message,stack:y.stack})});let v="",M=()=>{v&&F({message:v.trim()}),v=""};return x.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let a=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Pe(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||M(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=un[d.tool_name]??d.tool_name,T=d.parameters?.file_path,z=T&&$e.relative(i,T),Ne=d.parameters?.command,J={title:[a,z&&`\`${z}\``,Ne&&`\`${Ne}\``].filter(Boolean).join(" ")};h[d.tool_id]=J,C.flush();break}case"tool_result":{let a=h[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
27
+ \`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Fe from"fs/promises";import rr from"os";import $e from"path";import ge from"process";import un from"readline";var G=_("runner_gemini"),tr="Gemini CLI",he="",dn=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),pn={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},fn=async()=>{let e=$e.join(rr.homedir(),".gemini"),t=$e.join(e,"settings.json");try{await Fe.mkdir(e,{recursive:!0});let r={};try{let o=await Fe.readFile(t,"utf-8");r=JSON.parse(o)}catch{G.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Fe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),G.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){G.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ut({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=ge.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:a}=e,{model:p}=e;if(await fn(),n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(a?.gemini){let c=a?.gemini?.[s];if(c){if(!await n.isModelAvailableForProvider("gemini",c))throw new Error(`Model override '${c}' is not available for gemini provider`);p=c}}if(!p)!!he&&await n.isModelAvailableForProvider("gemini",he)?(p=he,G.log(`Using default model: ${he}`)):he&&G.log(`Default model ${he} is not available, proceeding without model specification`);else if(p&&!a?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);ge.env.GEMINI_API_KEY=y,ge.env.GOOGLE_GEMINI_BASE_URL=d}else if(!ge.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],u=[],m=[],h={},T=0,E=0,g,x,R=[ae(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",l],N=`${ge.env.NVM_BIN}/node`;G.log(`Running ${N} ${R.join(" ")}`);let b=t.utils.run(N,R,{all:!0,env:ge.env,cwd:i,idleTimeout:te});b.stdin?.end();let C=le(()=>{r?.({steps:f,duration:E}),o?.({steps:u,duration:E}),u=[]},250),O=(y,d)=>{y.id=T,T+=1,m.push(y),f.push(y),u.push(y),d||C.flush(),C(),d&&C.flush()},I=un.createInterface({input:b.all});I.on("error",y=>{G.error("Readline interface error",{error:y.message,stack:y.stack})});let v="",M=()=>{v&&O({message:v.trim()}),v=""};return I.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let c=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Pe(c,!1)?.error?.message||c||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||M(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let c=pn[d.tool_name]??d.tool_name,w=d.parameters?.file_path,z=w&&$e.relative(i,w),Ne=d.parameters?.command,J={title:[c,z&&`\`${z}\``,Ne&&`\`${Ne}\``].filter(Boolean).join(" ")};h[d.tool_id]=J,C.flush();break}case"tool_result":{let c=h[d.tool_id];c&&(d.output&&(c.message=`\`\`\`
28
28
  ${d.output.trim()}
29
- \`\`\``),F(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?I=d.error?.message:g=v.trim();break}case"error":{I=d.error;break}case"finished":break;default:{G.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:I,result:g}=cn({catchError:y,runCmd:b,error:I,result:g,runnerName:"Gemini"}))}),x.close(),C.flush(),{steps:m,duration:w,result:await ce({initialResult:g,agentName:Qt,hasError:!!I}),error:ue({error:I,agentName:Qt}),isRetryableError:de(I)}}var tr=async()=>{let e=$e.join(er.homedir(),".gemini");await Oe.rm(e,{recursive:!0,force:!0})};var pn={codex:{runner:lt,clean:Zt},claude:{runner:at,clean:Vt},gemini:{runner:ct,clean:tr}},rr=pn;var Le=_("init_stage"),or=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(gn(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=rr[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let c=yn({apiToken:r});wt(c);let l=e.useGateway?await At({netlify:c,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Dt(async({steps:E=[],duration:w})=>{let g=E.map(I=>{let R=I.title?tt(W(I.title),p):void 0,N=I.message?tt(W(I.message)):void 0;return{...I,title:R,message:N}});E.length=0;try{return await H(e.id,e.sessionId,{steps:g,duration:w})}catch(I){Le.error("persistSteps failed",{error:I?.message||I})}},t);Le.info("Adding build files to stage");let u=await ot();await rt(u),V.env.NETLIFY_LOCAL_MODE||await hn();let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Bt(),await Te(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Ht(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:l,context:c,persistSteps:f,runner:s,sha:m}}),hn=async()=>{let e="/usr/bin/git";try{e=fn("which git").toString().trim()||e}catch{}let t="/tmp/netlify-git-wrapper",r=mn.join(t,"git"),o=`#!/bin/bash
29
+ \`\`\``),O(c,!0));break}case"result":{E=d.stats?.duration_ms,d.status==="error"?x=d.error?.message:g=v.trim();break}case"error":{x=d.error;break}case"finished":break;default:{G.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:x,result:g}=dn({catchError:y,runCmd:b,error:x,result:g,runnerName:"Gemini"}))}),I.close(),C.flush(),{steps:m,duration:E,result:await ce({initialResult:g,agentName:tr,hasError:!!x}),error:ue({error:x,agentName:tr}),isRetryableError:de(x)}}var nr=async()=>{let e=$e.join(rr.homedir(),".gemini");await Fe.rm(e,{recursive:!0,force:!0})};var mn={codex:{runner:ct,clean:er},claude:{runner:lt,clean:zt},gemini:{runner:ut,clean:nr}},or=mn;var Le=_("init_stage"),sr=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(yn(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=or[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=_n({apiToken:r});xt(l);let a=e.useGateway?await At({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!a});let p=5*1024,f=Ut(async({steps:T=[],duration:E})=>{let g=T.map(x=>{let R=x.title?rt(W(x.title),p):void 0,N=x.message?rt(W(x.message)):void 0;return{...x,title:R,message:N}});T.length=0;try{return await H(e.id,e.sessionId,{steps:g,duration:E})}catch(x){Le.error("persistSteps failed",{error:x?.message||x})}},t);Le.info("Adding build files to stage");let u=await it();await nt(u),V.env.NETLIFY_LOCAL_MODE||await En();let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await qt(),await Te(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Wt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:a,context:l,persistSteps:f,runner:s,sha:m}}),En=async()=>{let e="/usr/bin/git";try{e=gn("which git").toString().trim()||e}catch{}let t="/tmp/netlify-git-wrapper",r=hn.join(t,"git"),o=`#!/bin/bash
30
30
  # Git wrapper that blocks add and commit commands
31
31
  # The deployment system handles staging and commits automatically
32
32
 
@@ -56,7 +56,7 @@ case "$1" in
56
56
  exec ${e} "$@"
57
57
  ;;
58
58
  esac
59
- `;try{await nr.mkdir(t,{recursive:!0}),await nr.writeFile(r,o,{mode:493}),V.env.PATH=`${t}:${V.env.PATH}`,V.env.NETLIFY_INTERNAL_GIT="0",Le.info("Installed git wrapper to block add/commit commands")}catch(n){Le.warn("Failed to install git wrapper",{error:n?.message||n})}},yn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:V.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||V.env.NETLIFY_API_TOKEN,SITE_ID:V.env.SITE_ID,FUNCTIONS_DIST:V.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:O}});import{getTracer as ut}from"@netlify/otel";import _n from"crypto";import X from"fs/promises";import L from"path";import j from"process";var $=_("context"),En=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Tn=10,wn=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(j.cwd(),D,o),i=0;for(;await In(n);){if(i>=Tn)throw new Error("Failed to generate context file");o=`${t}-${_n.randomUUID().slice(0,5)}${r}`,n=L.join(j.cwd(),D,o),i+=1}return o},In=async e=>{try{return await X.access(e),!0}catch{return!1}},xn=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},vn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await X.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},De=null,Rn=async()=>{if(De)return De;let e=await xn();if(!e)return[];let t=L.join(j.cwd(),D,Ke);await X.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return $.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,c=L.join(t,s),l=L.join(D,Ke,s);return $.log(`Downloading ${i.scope} context...`),await vn(i.endpoint,c)?($.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return De=(await Promise.all(r)).filter(n=>n!==null),De},ir=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=En(t),i=await wn(Pt),s=L.join(j.cwd(),D);await X.mkdir(s,{recursive:!0});let c=L.join(D,i),l=L.join(j.cwd(),c),p=L.join(j.cwd(),D,ee);try{await X.unlink(p),$.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
59
+ `;try{await ir.mkdir(t,{recursive:!0}),await ir.writeFile(r,o,{mode:493}),V.env.PATH=`${t}:${V.env.PATH}`,V.env.NETLIFY_INTERNAL_GIT="0",Le.info("Installed git wrapper to block add/commit commands")}catch(n){Le.warn("Failed to install git wrapper",{error:n?.message||n})}},_n=({apiToken:e})=>({constants:{NETLIFY_API_HOST:V.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||V.env.NETLIFY_API_TOKEN,SITE_ID:V.env.SITE_ID,FUNCTIONS_DIST:V.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as dt}from"@netlify/otel";import Tn from"crypto";import X from"fs/promises";import L from"path";import j from"process";var $=_("context"),wn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},xn=10,In=async e=>{let{name:t,ext:r}=L.parse(e),o=e,n=L.join(j.cwd(),D,o),i=0;for(;await vn(n);){if(i>=xn)throw new Error("Failed to generate context file");o=`${t}-${Tn.randomUUID().slice(0,5)}${r}`,n=L.join(j.cwd(),D,o),i+=1}return o},vn=async e=>{try{return await X.access(e),!0}catch{return!1}},Rn=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},Nn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await X.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},De=null,Sn=async()=>{if(De)return De;let e=await Rn();if(!e)return[];let t=L.join(j.cwd(),D,Je);await X.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return $.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,l=L.join(t,s),a=L.join(D,Je,s);return $.log(`Downloading ${i.scope} context...`),await Nn(i.endpoint,l)?($.log(`Downloaded: ${a}`),{scope:i.scope,path:a,key:n}):null});return De=(await Promise.all(r)).filter(n=>n!==null),De},ar=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=wn(t),i=await In(Ot),s=L.join(j.cwd(),D);await X.mkdir(s,{recursive:!0});let l=L.join(D,i),a=L.join(j.cwd(),l),p=L.join(j.cwd(),D,ee);try{await X.unlink(p),$.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
60
60
  Your task is to analyze and fix the build errors.
61
61
  Don't apply techniques of reverting changes. Apply fixes related to errors.
62
62
  Don't try to run build by yourself. Just fix the errors.
@@ -69,7 +69,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
69
69
 
70
70
  `)}
71
71
  </project_rules>
72
- `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(j.cwd(),D,We);await X.mkdir(g,{recursive:!0});let I=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let b=N+1,C=`attempt-${b}.md`,F=L.join(g,C),x=L.join(D,We,C),v=`# Task History - Attempt ${b}
72
+ `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(j.cwd(),D,Ke);await X.mkdir(g,{recursive:!0});let x=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let b=N+1,C=`attempt-${b}.md`,O=L.join(g,C),I=L.join(D,Ke,C),v=`# Task History - Attempt ${b}
73
73
 
74
74
  ## Request - what the user asked for
75
75
  ${R.request}
@@ -79,16 +79,16 @@ ${R.request}
79
79
  ## Response - what the agent replied with after its work
80
80
 
81
81
  ${R.response}
82
- `;return await X.writeFile(F,v,"utf-8"),$.log(`Created history file: ${x}`),x}));m+=`
82
+ `;return await X.writeFile(O,v,"utf-8"),$.log(`Created history file: ${I}`),I}));m+=`
83
83
  <session_history_context>
84
84
  History of prior work on this task.
85
85
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
86
86
 
87
- ${I.slice(-5).map(R=>`- ${R}`).join(`
87
+ ${x.slice(-5).map(R=>`- ${R}`).join(`
88
88
  `)}
89
89
 
90
90
  </session_history_context>
91
- `}let h=await Rn(),E="";h.length>0&&(E=`
91
+ `}let h=await Sn(),T="";h.length>0&&(T=`
92
92
  <netlify_features_context>
93
93
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
94
94
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
@@ -98,7 +98,7 @@ ${R.response}
98
98
 
99
99
  Refer to these files when working with specific Netlify features.
100
100
  </netlify_features_context>
101
- `);let w=`
101
+ `);let E=`
102
102
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
103
103
 
104
104
  <request>
@@ -119,8 +119,8 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
119
119
  - If the user asks for "a plan", "just planning", or similar (without asking for implementation) you may use plan mode to explore the codebase in read-only mode, design your implementation approach and write the complete plan to ${s}/${ee}. Stop there, do not wait for approval and do not implement unless explicitly asked.
120
120
  </responses>
121
121
  <attachements>
122
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${Je} folder
123
- - move assets from ${s}/${Je} folder to the project assets folder if they are referenced in a code or applied changes
122
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${Ve} folder
123
+ - move assets from ${s}/${Ve} folder to the project assets folder if they are referenced in a code or applied changes
124
124
  </attachements>
125
125
  ${u}
126
126
  </requirements>
@@ -139,7 +139,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
139
139
  - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
140
140
  - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
141
141
  </environment>
142
- ${E}
142
+ ${T}
143
143
  <docs>
144
144
  - Netlify Docs: https://docs.netlify.com
145
145
  - LLM Resources Index: https://docs.netlify.com/llms.txt
@@ -147,7 +147,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
147
147
  </extra_context>
148
148
 
149
149
  ${m}
150
- `;return await X.writeFile(l,w,"utf-8"),$.log(`Generated agent context document at: ${l}`),w.length>5e5&&(w=`
150
+ `;return await X.writeFile(a,E,"utf-8"),$.log(`Generated agent context document at: ${a}`),E.length>5e5&&(E=`
151
151
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
152
152
 
153
153
  <request>
@@ -157,18 +157,20 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
157
157
  ${f}
158
158
  </request>
159
159
 
160
- Use the following file for the complete context of the ask, the environment, and what's available. ${l} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
161
- `),w};var Nn=_("prompt"),sr=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await ir({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Nn.log("Contextful Prompt:",n),{prompt:n}};var ke=_("inference_stage"),ar=5,Ue=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:c,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;ke.log(`Running inference stage, attempt ${l} of ${ar}`);let u=await A(ut(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),St();let{prompt:h}=await A(ut(),"compose-prompt",async()=>await sr({cliPath:t,config:r,buildErrorContext:An(n),netlify:o})),E=`
160
+ Use the following file for the complete context of the ask, the environment, and what's available. ${a} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
161
+ `),E};var An=_("prompt"),lr=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await ar({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&An.log("Contextful Prompt:",n),{prompt:n}};var ke=_("inference_stage"),cr=5,Ue=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:l,attempt:a,contextPrefix:p,priorAgentSessionId:f}=e;ke.log(`Running inference stage, attempt ${a} of ${cr}`);let u=await A(dt(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":a||1}),bt();let{prompt:h}=await A(dt(),"compose-prompt",async()=>await lr({cliPath:t,config:r,buildErrorContext:bn(n),netlify:o})),T=`
162
162
  ${p||""}
163
163
  ${h}
164
- `.trim(),w={...r,prompt:E},g=await A(ut(),`run-${r.runner}`,async()=>await i({aiGateway:c,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=W(g.result)),g.error&&(g.error=W(g.error)),await s.flush(),g});if(u.error){if(ke.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:l||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!l||l<ar))return ke.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Ue({...e,attempt:(l||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw ke.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},An=e=>!e||e.length===0?"":`
164
+ `.trim(),E={...r,prompt:T},g=await A(dt(),`run-${r.runner}`,async()=>await i({aiGateway:l,config:E,netlify:o,persistSteps:s,continueSession:!!(a&&a>1),priorAgentSessionId:f}));return g.result&&(g.result=W(g.result)),g.error&&(g.error=W(g.error)),await s.flush(),g});if(u.error){if(ke.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:a||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!a||a<cr))return ke.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Ue({...e,attempt:(a||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw ke.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},bn=e=>!e||e.length===0?"":`
165
165
  Deploy failed failed. Here are the errors to review on the latest build:
166
166
 
167
167
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
168
168
 
169
169
  ${e.pop()}
170
- `;import Cn from"process";import{getTracer as dt}from"@netlify/otel";import{getTracer as Sn}from"@netlify/otel";var xe=_("deploy"),lr=async e=>await A(Sn(),"create-preview-deploy",async t=>bn(e,t)),bn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},c)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(xe.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(xe.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";xe.log(`Running: ${p} ${l.join(" ")}`),c?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(String(f.stdout??"").trim());c?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),xe.log(`
171
- Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let m={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(m.sourceZipFilename=u.source_zip_filename),m}catch(l){throw xe.error("Failed to create preview deploy via CLI:",l),c?.setAttributes({success:!1,error:l.message}),l}};var ve=_("deploy_stage"),pt=async e=>await A(dt(),"run-deploy-stage",async()=>Pn(e)),Pn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await A(dt(),"get-runner-diffs",async()=>await jt({config:t,isRetry:i}));if(ve.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:c,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,u=!0;ve.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:u,wouldCreatePreview:o!==void 0&&u});let m=null;if(o!==void 0&&u)try{let h;try{let E=await A(dt(),"get-runner-session",async()=>await xt(t.id,t.sessionId));E?.title&&(h=E.title)}catch(E){ve.warn("Failed to fetch session title, using fallback message:",E.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await lr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:kt(t.id,Cn.env.SITE_NAME),filter:n})}catch(h){return ve.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:c,resultDiff:l,hasChanges:u,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:h instanceof Error?h.message:String(h)}}return ve.log("Git status",{hasDiff:!!c,hasChanges:u}),{diff:c,resultDiff:l,hasChanges:u,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as Ge}from"@netlify/otel";async function cr(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(c){if(i=c,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var Me=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
172
- `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let l=this.containsNetlifyForm(n,o);l&&r.push(l)}let c=s.split(" ");o=c[c.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
173
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let c=o.match(i);if(c){let l=c.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+c[0].length+20),u=o.slice(p,f).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${u}`}}}return null}};var S=_("cleanup_stage"),ur=async e=>await A(Ge(),"cleanup-stage",async()=>Fn(e)),ft=1024*1024*10,Fn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:c})=>{let l={result:r||"Done",duration:o};c&&c.deployId&&(l.deploy_id=c.deployId),c&&c.sourceZipFilename&&(l.result_zip_file_name=c.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new Me,u=t||i||"",m=f.scanDiffForForms(u);m.detected?(S.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:E})=>{S.log(` - ${h}: ${E}`)}),l.has_netlify_form=!0):S.log("Did not detect Netlify form(s) in diff"),S.log("Did not detect Netlify form(s) in diff")}if(p)try{S.log("Getting pre-signed URLs for diff upload");let f=await Rt(e.id,e.sessionId),u=[];(t||i)&&u.push(He(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,S.log("Successfully uploaded result_diff to S3")})),(n||s)&&u.push(He(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,S.log("Successfully uploaded cumulative_diff to S3")})),S.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(n||s)&&(S.log("Updating agent runner with cumulative diff S3 key"),await A(Ge(),"update-runner",async()=>{await Te(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){S.error("S3 upload failed, falling back to inline diffs:",f);let u=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(u>ft||m>ft){let h=`Diffs exceed maximum inline size of ${ft} bytes.`;throw S.error(h),new Error(h)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,S.log("Updating agent runner with inline diffs (fallback)"),await A(Ge(),"update-runner",async()=>{await Te(e.id,{result_diff:n,result_diff_binary:s})}))}else S.log("No diffs to upload");return S.log("Updated agent runner with result"),await cr(async()=>await A(Ge(),"update-runner-session",()=>H(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,u)=>{S.error(`Error updating agent runner session (attempt ${f}):`,u),S.log("Retrying...")}}),S.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as dr,shutdownTracers as $n,withActiveSpan as pr}from"@netlify/otel";var Ln=On(import.meta.url),fr=Ln("../package.json"),Re=_("pipeline_index"),je=3,mr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:c}=bt(Q.timeUnits.hours(4)),l=await _t(fr.version,e.id,i);try{await pr(dr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:u,runner:m,sha:h}=await c("init",()=>or({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:fr.version}),Q.timeUnits.minutes(10));s=m.clean,e.sha=h;let{runnerResult:E}=await c("inference",()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:u,aiGateway:p}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await c("deploy",()=>pt({cliPath:r,config:e,context:f,result:E.result,filter:n,isRetry:!1})),g=E,I=[];if(w.hasChanges&&w.deployError){I.push(Et(w.deployError));let x=1,v=!1;for(;x<=je&&!w.previewInfo&&!v;)Re.log(`Deploy attempt had errors. Retrying. ${x}/${je}`),await pr(dr(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":x});let y;try{y=(await c(`inference-retry-${x}`,()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:u,aiGateway:p,buildErrors:I,priorAgentSessionId:E.agentSessionId}))).runnerResult}catch(d){Re.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),v=!0;return}g={...y,steps:[...g.steps||[],...y.steps||[]],duration:(g.duration||0)+(y.duration||0)},w=await c(`deploy-retry-${x}`,()=>pt({cliPath:r,config:e,context:f,result:y.result,filter:n,isRetry:!0})),w.deployError&&I.push(w.deployError),x++});x>je&&!w.previewInfo&&console.warn(`Deploy validation failed after ${je} attempts`)}let{diff:R,resultDiff:N,previewInfo:b,diffBinary:C,resultDiffBinary:F}=w;await c("cleanup",()=>ur({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:N,diffBinary:C,resultDiffBinary:F,previewInfo:b}),Q.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await qt())})}catch(p){if(Tt(p)){Re.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await H(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Re.info("Could not update session (site may have been deleted)")}return}Re.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await H(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await $n()}};import Y from"process";var kn="claude",Un=e=>(e??[]).filter(t=>t.request&&t.response),Mn=e=>(e??[]).filter(t=>t.site_context),gr=_("config"),hr=()=>{let e=Y.env.NETLIFY_AGENT_RUNNER_ID,t=Y.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=Y.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=Y.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=Y.env.NETLIFY_AGENT_RUNNER_AGENT||kn,i=Y.env.NETLIFY_AGENT_RUNNER_MODEL,s=Pe(Y.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,gr),c=Un(s),l=Mn(s),p=Y.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",f=!Y.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,u=Y.env.NETLIFY_AGENT_RUNNER_SHA,m=Gn(),h=Ut(),E={id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:i,sessionHistoryContext:c,siteContext:l,hasRepo:p,useGateway:f,sha:u,accountType:m,modelVersionOverrides:h};return gr.log({fullConfig:E}),E},Gn=()=>{let e=Y.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?Xe:e.includes("pro")?"pro":e.startsWith("enterprise")?ze:e.startsWith("free")?Ze:Ve:Ve};var yr=_("bin_cmd"),ye=jn(mt.argv.slice(2),{string:["auth","cwd","cli-path","filter","trace-exporter-url","traceparent"]});try{let e=hr();await mr({config:e,apiToken:ye.auth,cwd:ye.cwd,cliPath:ye["cli-path"],filter:ye.filter,tracing:{exporterUrl:ye["trace-exporter-url"],traceparent:ye.traceparent}}),yr.info("Finished agent"),mt.exit(0)}catch(e){yr.error("Error running agent pipeline:",e),mt.exit(1)}
170
+ `;import On from"process";import{getTracer as pt}from"@netlify/otel";import{getTracer as Cn}from"@netlify/otel";var Ie=_("deploy"),ur=async e=>await A(Cn(),"create-preview-deploy",async t=>Pn(e,t)),Pn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},l)=>{try{let a=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(Ie.log("Deploy: Uploading source zip"),a.push("--upload-source-zip")),n&&a.push("--alias",n),s&&a.push("--filter",s),r?(Ie.log("Deploy: Skipping build"),a.push("--no-build")):a.push("--context","deploy-preview");let p=i||"netlify";Ie.log(`Running: ${p} ${a.join(" ")}`),l?.setAttributes({cmd:p,args:a});let f=await e.utils.run(p,a,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(String(f.stdout??"").trim());l?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),Ie.log(`
171
+ Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let m={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(m.sourceZipFilename=u.source_zip_filename),m}catch(a){throw Ie.error("Failed to create preview deploy via CLI:",a),l?.setAttributes({success:!1,error:a.message}),a}};var ve=_("deploy_stage"),ft=async e=>await A(pt(),"run-deploy-stage",async()=>Fn(e)),Fn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await A(pt(),"get-runner-diffs",async()=>await Bt({config:t,isRetry:i}));if(ve.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:l,resultDiff:a,diffBinary:p,resultDiffBinary:f}=s,u=!0;ve.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:u,wouldCreatePreview:o!==void 0&&u});let m=null;if(o!==void 0&&u)try{let h;try{let T=await A(pt(),"get-runner-session",async()=>await vt(t.id,t.sessionId));T?.title&&(h=T.title)}catch(T){ve.warn("Failed to fetch session title, using fallback message:",T.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await ur({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:Mt(t.id,On.env.SITE_NAME),filter:n})}catch(h){return ve.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:l,resultDiff:a,hasChanges:u,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:h instanceof Error?h.message:String(h)}}return ve.log("Git status",{hasDiff:!!l,hasChanges:u}),{diff:l,resultDiff:a,hasChanges:u,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as je}from"@netlify/otel";async function dr(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(l){if(i=l,s===r)throw i;n&&n(s,i),await new Promise(a=>setTimeout(a,o*s))}throw i}var Me=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
172
+ `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let a=this.containsNetlifyForm(n,o);a&&r.push(a)}let l=s.split(" ");o=l[l.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
173
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let l=o.match(i);if(l){let a=l.index||0,p=Math.max(0,a-20),f=Math.min(o.length,a+l[0].length+20),u=o.slice(p,f).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${u}`}}}return null}};var Ge=class{scanDiffForIdentity(t){let r=[],o=null,n=[],i=t.split(`
174
+ `);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let a=this.containsNetlifyIdentity(n,o);a&&r.push(a)}let l=s.split(" ");o=l[l.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyIdentity(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyIdentity(t,r){let o=t.join(`
175
+ `),n=[{pattern:/data-netlify-identity-(button|menu)/i,name:"identity widget element"},{pattern:/netlify-identity-widget/i,name:"identity widget import"},{pattern:/new\s+GoTrue\s*\(/i,name:"GoTrue client"},{pattern:/(import\s+.*GoTrue|require\s*\(\s*['"]gotrue-js['"]\s*\))/i,name:"GoTrue import"},{pattern:/netlifyIdentity\s*\.\s*(init|on|off|open|close|login|signup|logout|refresh|currentUser)/i,name:"identity widget API"},{pattern:/['"`]\/?\.netlify\/identity/i,name:"identity endpoint"}];for(let{pattern:i,name:s}of n){let l=o.match(i);if(l){let a=l.index||0,p=Math.max(0,a-20),f=Math.min(o.length,a+l[0].length+20),u=o.slice(p,f).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${u}`}}}return null}};var S=_("cleanup_stage"),pr=async e=>await A(je(),"cleanup-stage",async()=>$n(e)),mt=1024*1024*10,$n=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:l})=>{let a={result:r||"Done",duration:o};l&&l.deployId&&(a.deploy_id=l.deployId),l&&l.sourceZipFilename&&(a.result_zip_file_name=l.sourceZipFilename);let p=t||i||n||s;if(p&&(a.diff_produced=!0),p){let f=new Me,u=t||i||"",m=f.scanDiffForForms(u);m.detected?(S.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:E,snippet:g})=>{S.log(` - ${E}: ${g}`)}),a.has_netlify_form=!0):S.log("Did not detect Netlify form(s) in diff");let T=new Ge().scanDiffForIdentity(u);T.detected?(S.log("Detected Netlify Identity usage in diff:"),T.matches.forEach(({file:E,snippet:g})=>{S.log(` - ${E}: ${g}`)}),a.has_netlify_identity=!0):S.log("Did not detect Netlify Identity usage in diff")}if(p)try{S.log("Getting pre-signed URLs for diff upload");let f=await Nt(e.id,e.sessionId),u=[];(t||i)&&u.push(qe(f.result.upload_url,i||t).then(()=>{a.result_diff_s3_key=f.result.s3_key,S.log("Successfully uploaded result_diff to S3")})),(n||s)&&u.push(qe(f.cumulative.upload_url,s||n).then(()=>{a.cumulative_diff_s3_key=f.cumulative.s3_key,S.log("Successfully uploaded cumulative_diff to S3")})),S.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(n||s)&&(S.log("Updating agent runner with cumulative diff S3 key"),await A(je(),"update-runner",async()=>{await Te(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){S.error("S3 upload failed, falling back to inline diffs:",f);let u=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(u>mt||m>mt){let h=`Diffs exceed maximum inline size of ${mt} bytes.`;throw S.error(h),new Error(h)}a.result_diff=t,a.result_diff_binary=i,(n||s)&&(a.cumulative_diff=n,a.cumulative_diff_binary=s,S.log("Updating agent runner with inline diffs (fallback)"),await A(je(),"update-runner",async()=>{await Te(e.id,{result_diff:n,result_diff_binary:s})}))}else S.log("No diffs to upload");return S.log("Updated agent runner with result"),await dr(async()=>await A(je(),"update-runner-session",()=>H(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(f,u)=>{S.error(`Error updating agent runner session (attempt ${f}):`,u),S.log("Retrying...")}}),S.log("Finished updating agent runner with result"),{sessionUpdate:a}};import{getTracer as fr,shutdownTracers as Dn,withActiveSpan as mr}from"@netlify/otel";var kn=Ln(import.meta.url),gr=kn("../package.json"),Re=_("pipeline_index"),Ye=3,hr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:l}=Ct(Q.timeUnits.hours(4)),a=await _t(gr.version,e.id,i);try{await mr(fr(),"run-pipeline",{},a,async()=>{let{aiGateway:p,context:f,persistSteps:u,runner:m,sha:h}=await l("init",()=>sr({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:gr.version}),Q.timeUnits.minutes(10));s=m.clean,e.sha=h;let{runnerResult:T}=await l("inference",()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:u,aiGateway:p}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let E=await l("deploy",()=>ft({cliPath:r,config:e,context:f,result:T.result,filter:n,isRetry:!1})),g=T,x=[];if(E.hasChanges&&E.deployError){x.push(Tt(E.deployError));let I=1,v=!1;for(;I<=Ye&&!E.previewInfo&&!v;)Re.log(`Deploy attempt had errors. Retrying. ${I}/${Ye}`),await mr(fr(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":I});let y;try{y=(await l(`inference-retry-${I}`,()=>Ue({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:u,aiGateway:p,buildErrors:x,priorAgentSessionId:T.agentSessionId}))).runnerResult}catch(d){Re.warn(`Inference retry ${I} failed, stopping deploy retries:`,d),v=!0;return}g={...y,steps:[...g.steps||[],...y.steps||[]],duration:(g.duration||0)+(y.duration||0)},E=await l(`deploy-retry-${I}`,()=>ft({cliPath:r,config:e,context:f,result:y.result,filter:n,isRetry:!0})),E.deployError&&x.push(E.deployError),I++});I>Ye&&!E.previewInfo&&console.warn(`Deploy validation failed after ${Ye} attempts`)}let{diff:R,resultDiff:N,previewInfo:b,diffBinary:C,resultDiffBinary:O}=E;await l("cleanup",()=>pr({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:N,diffBinary:C,resultDiffBinary:O,previewInfo:b}),Q.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Kt())})}catch(p){if(wt(p)){Re.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await H(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Re.info("Could not update session (site may have been deleted)")}return}Re.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await H(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Dn()}};import Y from"process";var Mn="claude",Gn=e=>typeof e.request=="string"&&typeof e.response=="string",jn=e=>typeof e.site_context=="string",Yn=e=>(e??[]).filter(Gn),Bn=e=>(e??[]).filter(jn),yr=_("config"),Er=()=>{let e=Y.env.NETLIFY_AGENT_RUNNER_ID,t=Y.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=Y.env.NETLIFY_AGENT_RUNNER_MODE||"normal";if(!$t.includes(r))throw new Error(`Mode ${r} is not supported`);let o=Y.env.NETLIFY_AGENT_RUNNER_PROMPT;if(r!=="redeploy"&&!o)throw new Error("Prompt is not provided");let n=Y.env.NETLIFY_AGENT_RUNNER_AGENT||Mn,i=Y.env.NETLIFY_AGENT_RUNNER_MODEL,s=Pe(Y.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,yr),l=Yn(s),a=Bn(s),p=Y.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",f=!Y.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,u=Y.env.NETLIFY_AGENT_RUNNER_SHA,m=Hn(),h=Gt(),T={id:e,sessionId:t,runner:n,model:i,sessionHistoryContext:l,siteContext:a,hasRepo:p,useGateway:f,sha:u,accountType:m,modelVersionOverrides:h},E=r==="redeploy"?{...T,mode:r}:{...T,mode:r,prompt:o};return yr.log({fullConfig:E}),E},Hn=()=>{let e=Y.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?ze:e.includes("pro")?"pro":e.startsWith("enterprise")?Ze:e.startsWith("free")?Qe:Xe:Xe};var _r=_("bin_cmd"),ye=qn(gt.argv.slice(2),{string:["auth","cwd","cli-path","filter","trace-exporter-url","traceparent"]});try{let e=Er();await hr({config:e,apiToken:ye.auth,cwd:ye.cwd,cliPath:ye["cli-path"],filter:ye.filter,tracing:{exporterUrl:ye["trace-exporter-url"],traceparent:ye.traceparent}}),_r.info("Finished agent"),gt.exit(0)}catch(e){_r.error("Error running agent pipeline:",e),gt.exit(1)}
174
176
  //# sourceMappingURL=bin.js.map
package/dist/index.d.ts CHANGED
@@ -210,6 +210,12 @@ type StdioOptionsProperty<IsSync extends boolean = boolean> =
210
210
  | SimpleStdioOption<IsSync, false, false>
211
211
  | StdioOptionsArray<IsSync>;
212
212
 
213
+ declare const ACCOUNT_TYPE_OTHER: "other";
214
+ declare const ACCOUNT_TYPE_PERSONAL: "personal";
215
+ declare const ACCOUNT_TYPE_PRO: "pro";
216
+ declare const ACCOUNT_TYPE_ENTERPRISE: "enterprise";
217
+ declare const ACCOUNT_TYPE_FREE: "free";
218
+
213
219
  interface IdleTimeoutOptions {
214
220
  idleTimeout?: number;
215
221
  }
@@ -255,17 +261,39 @@ declare const run: (file: string, args?: string[] | object, options?: Options &
255
261
  windowsVerbatimArguments?: boolean;
256
262
  }>;
257
263
 
258
- interface RunnerConfig {
264
+ type RunnerConfig = {
259
265
  id: string;
260
266
  sessionId: string;
261
- prompt: string;
262
- accountType: string;
267
+ accountType: AccountType;
263
268
  model?: string;
264
269
  runner: string;
265
270
  useGateway: boolean;
266
271
  hasRepo: boolean;
272
+ sessionHistoryContext: HistoryContextEntry[];
273
+ siteContext: SiteContextEntry[];
274
+ modelVersionOverrides: ModelVersionOverrides;
267
275
  sha?: string;
268
- }
276
+ } & ({
277
+ mode: Exclude<RunnerMode, 'redeploy'>;
278
+ prompt: string;
279
+ } | {
280
+ mode: 'redeploy';
281
+ });
282
+ type RunnerMode = 'normal' | 'redeploy' | 'create' | 'ask';
283
+ type AccountType = typeof ACCOUNT_TYPE_OTHER | typeof ACCOUNT_TYPE_PERSONAL | typeof ACCOUNT_TYPE_PRO | typeof ACCOUNT_TYPE_ENTERPRISE | typeof ACCOUNT_TYPE_FREE;
284
+ type HistoryContextEntry = {
285
+ request: string;
286
+ response: string;
287
+ };
288
+ type SiteContextEntry = {
289
+ site_context: string;
290
+ };
291
+ type ModelVersionOverrides = {
292
+ codex?: AgentVersionOverride;
293
+ claude?: AgentVersionOverride;
294
+ gemini?: AgentVersionOverride;
295
+ };
296
+ type AgentVersionOverride = Record<AccountType, string>;
269
297
 
270
298
  interface Context {
271
299
  constants: {