@netlify/agent-runner-cli 1.62.0 → 1.63.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,31 +1,31 @@
1
- import{createRequire as Tn}from"module";import{createTracerProvider as ar}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as lt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as lr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as ur}from"@netlify/otel";import{propagation as ut,context as ct,W3CTraceContextPropagator as cr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as dr}from"@opentelemetry/exporter-trace-otlp-grpc";import sr from"process";function E(e){let t=sr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ge=E("tracing"),dt=async(e,t,r)=>(await ar({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new lt(new je),new lt(new dr({url:r.exporterUrl}))],instrumentations:[new lr({skipHeaders:!0})]}),r.traceparent?(ut.setGlobalPropagator(new cr),ut.extract(ct.active(),{traceparent:r.traceparent,isRemote:!0})):ct.active());function A(e,t,r){return Ge.log(`\u23F3 TRACE: ${t} starting...`),ur(e,t,r)}var je=class{export(t,r){for(let i of t)this.logSpan(i);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,i=t.attributes,n=[];for(let[l,u]of Object.entries(i))l.includes("duration")&&typeof u=="number"?n.push(`${l}=${u.toFixed(2)}ms`):n.push(`${l}=${u}`);let o=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Ge.log(`${o} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Ge.log(` \u274C Error: ${t.status.message}`)}};var pr=["error","failed","exception","fatal","panic","abort","crash"];function pt(e){let t=e.split(`
1
+ import{createRequire as Tn}from"module";import{createTracerProvider as ar}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ct}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as lr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as ur}from"@netlify/otel";import{propagation as dt,context as pt,W3CTraceContextPropagator as cr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as dr}from"@opentelemetry/exporter-trace-otlp-grpc";import sr from"process";function E(e){let t=sr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var je=E("tracing"),ft=async(e,t,r)=>(await ar({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ct(new Ye),new ct(new dr({url:r.exporterUrl}))],instrumentations:[new lr({skipHeaders:!0})]}),r.traceparent?(dt.setGlobalPropagator(new cr),dt.extract(pt.active(),{traceparent:r.traceparent,isRemote:!0})):pt.active());function A(e,t,r){return je.log(`\u23F3 TRACE: ${t} starting...`),ur(e,t,r)}var Ye=class{export(t,r){for(let i of t)this.logSpan(i);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,i=t.attributes,n=[];for(let[l,u]of Object.entries(i))l.includes("duration")&&typeof u=="number"?n.push(`${l}=${u.toFixed(2)}ms`):n.push(`${l}=${u}`);let o=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";je.log(`${o} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&je.log(` \u274C Error: ${t.status.message}`)}};var pr=["error","failed","exception","fatal","panic","abort","crash"];function mt(e){let t=e.split(`
2
2
  `),r=[],i=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(pr.some(c=>l.includes(c))){let c=Math.max(0,n-10,i+1),f=Math.min(t.length-1,n+20),d=[];for(let m=c;m<=f;m++)d.push(t[m]);r.push(d.join(`
3
3
  `)),i=f,n=f+1}else n++}if(r.length===0)return e;let o=r.map((s,l)=>`<extracted_error_chunk order="${l+1}">
4
4
  ${s}
5
5
  </extracted_error_chunk>`).join(`
6
6
 
7
- `);return o.length>e.length*.8?e:o}import{execSync as en}from"child_process";import Vt from"fs/promises";import tn from"path";import V from"process";import{getTracer as rn}from"@netlify/otel";import he from"process";var re=class extends Error{constructor(r,i,n){super(r);this.statusCode=i;this.userMessage=n;this.name="GracefulShutdownError"}},ft=e=>e instanceof re;var Re=he.env.NETLIFY_API_URL,Se=he.env.NETLIFY_API_TOKEN,Y=E("api"),be=()=>he.env.NETLIFY_LOCAL_MODE==="true",ye=async(e,t={})=>{if(!Re||!Se)throw new Error("No API URL or token");let r=new URL(e,Re),i={...t,headers:{...t.headers,Authorization:`Bearer ${Se}`}};he.env.AGENT_RUNNERS_DEBUG==="true"&&(i.headers["x-nf-debug-logging"]="true"),t.json&&(i.headers||={},i.headers["Content-Type"]="application/json",i.body=JSON.stringify(t.json));let n=await fetch(r,i),o=n.ok&&n.status<=299;if(he.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((l,u)=>{Y.log(` ${u}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${l||"N/A"}`)}if(o||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!o)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!o){let l=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new re(`API request failed: 404 - ${l}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new re(`API request failed: 403 - ${l}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${l}`)}return s},mt=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Re=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Se=e.constants.NETLIFY_API_TOKEN)},gt=()=>({apiUrl:Re,token:Se}),_e=async(e,t)=>be()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ye(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>be()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ye(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var ht=async(e,t)=>be()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ye(`/api/v1/agent_runners/${e}/sessions/${t}`),yt=(e,t,r)=>ye(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),_t=async(e,t)=>be()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ye(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ye=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ne=E("ai_gateway"),Be=null;var Et=async()=>{if(Be)return Be;ne.log("Fetching available AI gateway providers");let e=await fetch(`${gt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Be=t,ne.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},fr=async(e,t)=>{let i=(await Et()).providers[e];if(!i)return ne.log(`Provider '${e}' not found`),!1;let n=i.models.includes(t);return ne.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},wt=async({netlify:e,config:t})=>{let r,i,n,o,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let l=async()=>{clearTimeout(n),ne.log("Requesting AI gateway information");let u=await yt(s,t.id,t.sessionId);if({token:r,url:o}=u,i=u.expires_at?u.expires_at*1e3:void 0,ne.log("Got AI gateway information",{token:!!r,expiresAt:i,url:o}),i){let c=i-Date.now()-6e4;c>0&&(n=setTimeout(()=>{l()},c))}};return await Promise.all([l(),Et()]),{get url(){return o},get token(){return r},isModelAvailableForProvider:fr}};import H from"process";import W from"path";import Ae from"fs";import{fileURLToPath as wr}from"url";import{createRequire as Tr}from"module";import{execa as xr,execaCommand as ti}from"execa";import{Transform as mr}from"stream";var gr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),hr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function yr(){return Object.entries(process.env).filter(([e,t])=>!(!t||gr.has(e)||hr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=yr();if(t.length===0)return e;let r=e;return t.forEach(i=>{let n=new RegExp(_r(i),"g");r=r.replace(n,"******")}),r}function _r(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ie=class extends mr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,i){let n=t.toString(),o=q(n);i(null,o)}};function Tt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(i,n,o){let s=typeof i=="string"?q(i):i;return typeof n=="function"?t(s,n):t(s,n,o)},process.stderr.write=function(i,n,o){let s=typeof i=="string"?q(i):i;return typeof n=="function"?r(s,n):r(s,n,o)}}var Ee=null,xt=e=>(Ee&&Ee.destroy(),Ee=new Z({totalAllowedTime:e}),Ee),It=()=>Ee;var Z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,i)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),o=null,s=null;i!==void 0&&(s=new Promise((l,u)=>{o=setTimeout(()=>{u(new Error(`${t} stage exceeded its maximum duration of ${i}ms`))},i)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),o&&clearTimeout(o)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var vt="netlify-agent-runner-context.md",He="task-history",qe="netlify-context",k=".netlify",Q="results.md",We="assets";var ee=1800*1e3;var Rt={name:"@netlify/agent-runner-cli",type:"module",version:"1.62.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.29","@google/gemini-cli":"0.25.2","@netlify/otel":"^5.1.1","@openai/codex":"0.93.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var Ir=wr(import.meta.url),vr=W.dirname(Ir),Rr=Tr(import.meta.url),oe=E("shell"),Ke=new Set,Sr={preferLocal:!0},$=(e,t,r)=>{let[i,n]=br(t,r),o={...Sr,...n},s=xr(e,i,o);Ar(s,o),Cr(s);let l=r?.idleTimeout;return l&&l>0&&Nr(s,l),s};var br=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Ar=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ie).pipe(H.stdout),e.stdout?.pipe(new ie).pipe(H.stdout),e.stderr?.pipe(new ie).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Ve=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),oe.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return oe.error("Error killing process:",r),!1}},St=e=>Ve(e,"SIGKILL"),Nr=(e,t)=>{let r=null,i=()=>{oe.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Ve(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing idle process ${e.pid}`),St(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(i,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let o=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",o),e.on("error",o)},Cr=e=>{Ke.add(e);let t=It();if(t){let r=t.onTimesUp(()=>{oe.log(`Global timer expired, killing process ${e.pid}`),Ve(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing process ${e.pid} after timeout`),St(e))},5e3)});e.on("exit",()=>{Ke.delete(e),r()}),e.on("error",()=>{Ke.delete(e),r()})}};function se(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let n=Rr.resolve(Rt.name),o=W.dirname(n);for(;o!==W.dirname(o);){let s=W.dirname(o);if(W.basename(s)==="node_modules"){let l=W.join(s,".bin",t);if(Ae.existsSync(l))return l;break}o=s}}catch(n){console.error("Could not resolve package.json",n)}if(H.env.NODE_PATH){let n=W.join(H.env.NODE_PATH,".bin",t);if(Ae.existsSync(n))return n}let r=W.join(e,"node_modules",".bin",t);if(Ae.existsSync(r))return r;let i=W.join(vr,"..","node_modules",".bin",t);if(Ae.existsSync(i))return i}var Pr=E("utils"),Or=e=>new Promise(t=>{setTimeout(t,e)}),bt=(e,t=3e3)=>{let r=!1,i=null,n=[],o=null,s=(...l)=>{if(r)return i=l,new Promise(f=>{n.push(f)});r=!0;let u,c=new Promise(f=>{u=f});return o=(async()=>{await Promise.resolve();let f=await e(...l);for(u(f);;){if(await Or(t),!i)return r=!1,o=null,f;let d=i,m=n;i=null,n=[],f=await e(...d),m.forEach(h=>{h(f)})}})(),c};return s.flush=async()=>{if((r||i)&&o)return await o,s.flush()},s},ae=(e,t,r=!1)=>{let i=null,n=null,o=null,s=function(...l){n=l,o=this;let u=r&&!i;clearTimeout(i),i=setTimeout(()=>{i=null,r||(e.apply(o,n),n=null,o=null)},t),u&&(e.apply(o,n),n=null,o=null)};return s.cancel=()=>{clearTimeout(i),i=null,n=null,o=null},s.flush=()=>{if(i){clearTimeout(i);let l=n,u=o;i=null,n=null,o=null,e.apply(u,l)}},s},At=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(i){t&&(r?.error?r.error("Could not parse JSON",i):Pr.error("Could not parse JSON",i))}},Nt=(e,t)=>{let n=".netlify.app",o="agent-";if(!t)return`${o}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let u=60-l.length;if(u<=0)return"";if(u>=o.length+6){let c=Math.min(u-o.length,e.length);return`${o}${e.slice(0,c)}`}return e.slice(0,u)};var $r=50*1024,Je=(e,t=$r)=>{if(!e||typeof e!="string"||e.length<=t)return e;let i=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+i};import{Buffer as Ct}from"buffer";import Fr from"path";var Pt=E("repo"),Ot=async({config:e,isRetry:t})=>{Pt.info("Getting runner diffs");let r=await kr(),{hasChanges:i}=r,{status:n}=r;if(!i)return{hasChanges:!1};if(!t){let T=Lr(n);await Ur(T)}Pt.info("Changes after processing"),await ze();let o=await Ze(n);await Xe(o);let s={stdio:["ignore","pipe","pipe"]},l=await $("git",["diff","--staged"],s),u=String(l.stdout??"");if(i=!!u,!i)return{hasChanges:!1,ignored:o};let c=await $("git",["diff","--staged","--binary"],s),f=String(c.stdout??""),d,m;if(e.sha){if(!process.env.NETLIFY_LOCAL_MODE){process.env.NETLIFY_INTERNAL_GIT="1";try{await $("git",["commit","-m","Agent runner"])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}}let T=await $("git",["diff",e.sha,"HEAD"],s);d=String(T.stdout??"");let _=await $("git",["diff",e.sha,"HEAD","--binary"],s),g=String(_.stdout??"");d!==g&&(m=Ct.from(g).toString("base64"))}let h={hasChanges:!0,diff:u,resultDiff:d,ignored:o};return u!==f&&(h.diffBinary=Ct.from(f).toString("base64")),m&&(h.resultDiffBinary=m),h},Xe=async(e=[])=>{process.env.NETLIFY_INTERNAL_GIT="1";try{await $("git",["add",".",...e])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}},ze=async()=>{let e=await $("git",["status","-s"]);return String(e.stdout??"")},$t=/.. (.+)?\.log$/,Dr=[$t],kr=async()=>{let e=await ze();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
- `).filter(i=>Dr.some(o=>o instanceof RegExp?o.test(i):i===o)?!1:i[1]?.trim()!=="")).length!==0,status:e}},Ft=async()=>{let{stdout:e}=await $("git",["rev-parse","HEAD"]);return String(e??"").trim()},Dt=async()=>{let{stdout:e}=await $("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},Ze=async e=>{e||=await ze();let t=[".netlify","node_modules","dist",".next","out",".nuxt",".output",".cache",".turbo",".parcel-cache","coverage",".nyc_output","storybook-static","public/build"],r=[];return e.split(`
9
- `).forEach(i=>{t.forEach(o=>{let s=i===`?? ${o}`,l=i.startsWith(`?? ${o}/`)||i.startsWith(`?? ${o}${Fr.sep}`);(s||l)&&r.push(`:!${o}`)});let n=i.match($t)?.[1];n&&r.push(`:!${n}.log`)}),r},kt=async()=>{await $("git",["reset","--hard","HEAD"])},Lr=e=>{let t=e.split(`
10
- `).reduce((r,i)=>{if(!i)return r;let[n,o,,...s]=i,l=s.join(""),u=n.trim(),c=o.trim();return r[l]?r[l].change=c:r[l]={filePath:l,stage:u,change:c},r},{});return Object.values(t)},Ur=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push($("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Gr from"fs/promises";import jr from"os";import Mt from"path";import de from"process";import Yr from"readline";import Qe from"path";import Mr from"fs/promises";var et=E("agent-output-utils");async function le({initialResult:e,agentName:t,hasError:r}){let i="",n=Qe.join(process.cwd(),k,Q);try{let o=await Mr.readFile(n,"utf-8");o&&(i=o,et.log(`Pulled result from ${Qe.relative(process.cwd(),n)}`))}catch{et.log(`No results file found at ${Qe.relative(process.cwd(),n)}`)}return i||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ue({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,i=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return i?.includes("ai gateway is not available for your account")||i?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":i?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(i?.includes("connection closed prematurely")||i?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),i?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),i?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),i?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&et.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ce(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var L=E("runner_claude"),Lt="Claude Code",pe="claude-opus-4-5-20251101",Ut=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Br=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(L.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function tt({config:e,netlify:t,persistSteps:r,aiGateway:i,continueSession:n,priorAgentSessionId:o,cwd:s=de.cwd()}){let l=e,{accountType:u,prompt:c,modelVersionOverrides:f}=l,{model:d}=l,m="";if(i){let{token:y,url:p}=i;if(!y||!p)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[u];if(a){if(!await i.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);d=a}}else if(d){if(!await i.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else!!pe&&await i.isModelAvailableForProvider("anthropic",pe)?(d=pe,L.log(`Using default model: ${pe}`)):pe&&L.log(`Default model ${pe} is not available, proceeding without model specification`);de.env.ANTHROPIC_API_KEY=y,de.env.ANTHROPIC_BASE_URL=p}else if(!de.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],T=[],_={},g=0,x=0,R,S,N=[se(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...d?["--model",d]:[],...n?["--continue"]:[],...n&&o?["--resume",o]:[],"-p",c],C=`${de.env.NVM_BIN}/node`;L.log(`Running ${C} ${N.join(" ")}`);let O=t.utils.run(C,N,{all:!0,env:de.env,cwd:s,idleTimeout:ee});O.stdin?.end();let I=ae(()=>{r?.({steps:h,duration:x})},250),v=(y,p)=>{let a={...y,id:g};g+=1,T.push(a),h.push(a),p||I.flush(),I(),p&&I.flush()},M=Yr.createInterface({input:O.all});return M.on("error",y=>{L.error("Readline interface error",{error:y.message,stack:y.stack})}),M.on("line",y=>{let p=null;try{p=JSON.parse(y)}catch{L.log("Could not parse line",y)}p?.session_id&&p.session_id!==m&&(m=p.session_id),Array.isArray(p?.message?.content)?p.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):L.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let w=a.input?.description&&`\`${a.input.description}\``;v({title:[Ut(a.name),w].filter(Boolean).join(" ")})}else a.id&&(_[a.id]=a);I.flush();break}case"tool_result":{let w=a.tool_use_id?_[a.tool_use_id]:void 0,X;if(w){let K=w.input?.file_path&&Mt.relative(s,w.input.file_path),P=K&&`\`${K}\``;X=[Ut(w.name||""),P].filter(Boolean).join(" ")}let ve=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),z;if(typeof a.content=="string")z=a.content;else if(Array.isArray(a.content)){let K=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?K.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?K.push(`![](data:${P.source.media_type};base64,${P.source.data})`):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),z=K.join(`
7
+ `);return o.length>e.length*.8?e:o}import{execSync as en}from"child_process";import Jt from"fs/promises";import tn from"path";import J from"process";import{getTracer as rn}from"@netlify/otel";import ye from"process";var re=class extends Error{constructor(r,i,n){super(r);this.statusCode=i;this.userMessage=n;this.name="GracefulShutdownError"}},gt=e=>e instanceof re;var Re=ye.env.NETLIFY_API_URL,Se=ye.env.NETLIFY_API_TOKEN,B=E("api"),be=()=>ye.env.NETLIFY_LOCAL_MODE==="true",_e=async(e,t={})=>{if(!Re||!Se)throw new Error("No API URL or token");let r=new URL(e,Re),i={...t,headers:{...t.headers,Authorization:`Bearer ${Se}`}};ye.env.AGENT_RUNNERS_DEBUG==="true"&&(i.headers["x-nf-debug-logging"]="true"),t.json&&(i.headers||={},i.headers["Content-Type"]="application/json",i.body=JSON.stringify(t.json));let n=await fetch(r,i),o=n.ok&&n.status<=299;if(ye.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),n.headers.forEach((l,u)=>{B.log(` ${u}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${l||"N/A"}`)}if(o||B.error(`Got status ${n.status} for request ${r}`),t.raw){if(!o)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!o){let l=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new re(`API request failed: 404 - ${l}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new re(`API request failed: 403 - ${l}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${l}`)}return s},ht=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Re=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Se=e.constants.NETLIFY_API_TOKEN)},yt=()=>({apiUrl:Re,token:Se}),Ee=async(e,t)=>be()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):_e(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),G=async(e,t,r)=>be()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var _t=async(e,t)=>be()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):_e(`/api/v1/agent_runners/${e}/sessions/${t}`),Et=(e,t,r)=>_e(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),wt=async(e,t)=>be()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):_e(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Be=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ne=E("ai_gateway"),He=null;var Tt=async()=>{if(He)return He;ne.log("Fetching available AI gateway providers");let e=await fetch(`${yt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return He=t,ne.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},fr=async(e,t)=>{let i=(await Tt()).providers[e];if(!i)return ne.log(`Provider '${e}' not found`),!1;let n=i.models.includes(t);return ne.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},xt=async({netlify:e,config:t})=>{let r,i,n,o,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let l=async()=>{clearTimeout(n),ne.log("Requesting AI gateway information");let u=await Et(s,t.id,t.sessionId);if({token:r,url:o}=u,i=u.expires_at?u.expires_at*1e3:void 0,ne.log("Got AI gateway information",{token:!!r,expiresAt:i,url:o}),i){let c=i-Date.now()-6e4;c>0&&(n=setTimeout(()=>{l()},c))}};return await Promise.all([l(),Tt()]),{get url(){return o},get token(){return r},isModelAvailableForProvider:fr}};import H from"process";import W from"path";import Ae from"fs";import{fileURLToPath as wr}from"url";import{createRequire as Tr}from"module";import{execa as xr,execaCommand as ti}from"execa";import{Transform as mr}from"stream";var gr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),hr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function yr(){return Object.entries(process.env).filter(([e,t])=>!(!t||gr.has(e)||hr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=yr();if(t.length===0)return e;let r=e;return t.forEach(i=>{let n=new RegExp(_r(i),"g");r=r.replace(n,"******")}),r}function _r(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ie=class extends mr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,i){let n=t.toString(),o=q(n);i(null,o)}};function It(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(i,n,o){let s=typeof i=="string"?q(i):i;return typeof n=="function"?t(s,n):t(s,n,o)},process.stderr.write=function(i,n,o){let s=typeof i=="string"?q(i):i;return typeof n=="function"?r(s,n):r(s,n,o)}}var we=null,vt=e=>(we&&we.destroy(),we=new V({totalAllowedTime:e}),we),Rt=()=>we;var V=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,i)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),o=null,s=null;i!==void 0&&(s=new Promise((l,u)=>{o=setTimeout(()=>{u(new Error(`${t} stage exceeded its maximum duration of ${i}ms`))},i)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),o&&clearTimeout(o)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var St="netlify-agent-runner-context.md",qe="task-history",We="netlify-context",k=".netlify",Q="results.md",Ke="assets";var ee=1800*1e3;var bt={name:"@netlify/agent-runner-cli",type:"module",version:"1.63.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.29","@google/gemini-cli":"0.25.2","@netlify/otel":"^5.1.1","@openai/codex":"0.93.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var Ir=wr(import.meta.url),vr=W.dirname(Ir),Rr=Tr(import.meta.url),oe=E("shell"),Ve=new Set,Sr={preferLocal:!0},$=(e,t,r)=>{let[i,n]=br(t,r),o={...Sr,...n},s=xr(e,i,o);Ar(s,o),Cr(s);let l=r?.idleTimeout;return l&&l>0&&Nr(s,l),s};var br=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Ar=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ie).pipe(H.stdout),e.stdout?.pipe(new ie).pipe(H.stdout),e.stderr?.pipe(new ie).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Je=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),oe.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return oe.error("Error killing process:",r),!1}},At=e=>Je(e,"SIGKILL"),Nr=(e,t)=>{let r=null,i=()=>{oe.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Je(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing idle process ${e.pid}`),At(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(i,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let o=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",o),e.on("error",o)},Cr=e=>{Ve.add(e);let t=Rt();if(t){let r=t.onTimesUp(()=>{oe.log(`Global timer expired, killing process ${e.pid}`),Je(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing process ${e.pid} after timeout`),At(e))},5e3)});e.on("exit",()=>{Ve.delete(e),r()}),e.on("error",()=>{Ve.delete(e),r()})}};function se(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let n=Rr.resolve(bt.name),o=W.dirname(n);for(;o!==W.dirname(o);){let s=W.dirname(o);if(W.basename(s)==="node_modules"){let l=W.join(s,".bin",t);if(Ae.existsSync(l))return l;break}o=s}}catch(n){console.error("Could not resolve package.json",n)}if(H.env.NODE_PATH){let n=W.join(H.env.NODE_PATH,".bin",t);if(Ae.existsSync(n))return n}let r=W.join(e,"node_modules",".bin",t);if(Ae.existsSync(r))return r;let i=W.join(vr,"..","node_modules",".bin",t);if(Ae.existsSync(i))return i}var Pr=E("utils"),Or=e=>new Promise(t=>{setTimeout(t,e)}),Nt=(e,t=3e3)=>{let r=!1,i=null,n=[],o=null,s=(...l)=>{if(r)return i=l,new Promise(f=>{n.push(f)});r=!0;let u,c=new Promise(f=>{u=f});return o=(async()=>{await Promise.resolve();let f=await e(...l);for(u(f);;){if(await Or(t),!i)return r=!1,o=null,f;let d=i,m=n;i=null,n=[],f=await e(...d),m.forEach(y=>{y(f)})}})(),c};return s.flush=async()=>{if((r||i)&&o)return await o,s.flush()},s},ae=(e,t,r=!1)=>{let i=null,n=null,o=null,s=function(...l){n=l,o=this;let u=r&&!i;clearTimeout(i),i=setTimeout(()=>{i=null,r||(e.apply(o,n),n=null,o=null)},t),u&&(e.apply(o,n),n=null,o=null)};return s.cancel=()=>{clearTimeout(i),i=null,n=null,o=null},s.flush=()=>{if(i){clearTimeout(i);let l=n,u=o;i=null,n=null,o=null,e.apply(u,l)}},s},Ct=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(i){t&&(r?.error?r.error("Could not parse JSON",i):Pr.error("Could not parse JSON",i))}},Pt=(e,t)=>{let n=".netlify.app",o="agent-";if(!t)return`${o}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let u=60-l.length;if(u<=0)return"";if(u>=o.length+6){let c=Math.min(u-o.length,e.length);return`${o}${e.slice(0,c)}`}return e.slice(0,u)};var $r=50*1024,Xe=(e,t=$r)=>{if(!e||typeof e!="string"||e.length<=t)return e;let i=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+i};import{Buffer as Ot}from"buffer";import Fr from"path";var $t=E("repo"),Ft=async({config:e,isRetry:t})=>{$t.info("Getting runner diffs");let r=await Lr(),{hasChanges:i}=r,{status:n}=r;if(!i)return{hasChanges:!1};if(!t){let T=kr(n);await Ur(T)}$t.info("Changes after processing"),await Ze();let o=await Qe(n);await ze(o);let s={stdio:["ignore","pipe","pipe"]},l=await $("git",["diff","--staged"],s),u=String(l.stdout??"");if(i=!!u,!i)return{hasChanges:!1,ignored:o};let c=await $("git",["diff","--staged","--binary"],s),f=String(c.stdout??""),d,m;if(e.sha){if(!process.env.NETLIFY_LOCAL_MODE){process.env.NETLIFY_INTERNAL_GIT="1";try{await $("git",["commit","-m","Agent runner"])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}}let T=await $("git",["diff",e.sha,"HEAD"],s);d=String(T.stdout??"");let _=await $("git",["diff",e.sha,"HEAD","--binary"],s),g=String(_.stdout??"");d!==g&&(m=Ot.from(g).toString("base64"))}let y={hasChanges:!0,diff:u,resultDiff:d,ignored:o};return u!==f&&(y.diffBinary=Ot.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},ze=async(e=[])=>{process.env.NETLIFY_INTERNAL_GIT="1";try{await $("git",["add",".",...e])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}},Ze=async()=>{let e=await $("git",["status","-s"]);return String(e.stdout??"")},Dt=/.. (.+)?\.log$/,Dr=[Dt],Lr=async()=>{let e=await Ze();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
+ `).filter(i=>Dr.some(o=>o instanceof RegExp?o.test(i):i===o)?!1:i[1]?.trim()!=="")).length!==0,status:e}},Lt=async()=>{let{stdout:e}=await $("git",["rev-parse","HEAD"]);return String(e??"").trim()},kt=async()=>{let{stdout:e}=await $("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},Qe=async e=>{e||=await Ze();let t=[".netlify","node_modules","dist",".next","out",".nuxt",".output",".cache",".turbo",".parcel-cache","coverage",".nyc_output","storybook-static","public/build"],r=[];return e.split(`
9
+ `).forEach(i=>{t.forEach(o=>{let s=i===`?? ${o}`,l=i.startsWith(`?? ${o}/`)||i.startsWith(`?? ${o}${Fr.sep}`);(s||l)&&r.push(`:!${o}`)});let n=i.match(Dt)?.[1];n&&r.push(`:!${n}.log`)}),r},et=async()=>{await $("git",["reset","--hard","HEAD"])},kr=e=>{let t=e.split(`
10
+ `).reduce((r,i)=>{if(!i)return r;let[n,o,,...s]=i,l=s.join(""),u=n.trim(),c=o.trim();return r[l]?r[l].change=c:r[l]={filePath:l,stage:u,change:c},r},{});return Object.values(t)},Ur=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push($("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Gr from"fs/promises";import jr from"os";import Gt from"path";import de from"process";import Yr from"readline";import tt from"path";import Mr from"fs/promises";var rt=E("agent-output-utils");async function le({initialResult:e,agentName:t,hasError:r}){let i="",n=tt.join(process.cwd(),k,Q);try{let o=await Mr.readFile(n,"utf-8");o&&(i=o,rt.log(`Pulled result from ${tt.relative(process.cwd(),n)}`))}catch{rt.log(`No results file found at ${tt.relative(process.cwd(),n)}`)}return i||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ue({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,i=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return i?.includes("ai gateway is not available for your account")||i?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":i?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(i?.includes("connection closed prematurely")||i?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),i?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),i?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),i?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&rt.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ce(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var U=E("runner_claude"),Ut="Claude Code",pe="claude-opus-4-5-20251101",Mt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Br=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function nt({config:e,netlify:t,persistSteps:r,aiGateway:i,continueSession:n,priorAgentSessionId:o,cwd:s=de.cwd()}){let l=e,{accountType:u,prompt:c,modelVersionOverrides:f}=l,{model:d}=l,m="";if(i){let{token:h,url:p}=i;if(!h||!p)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[u];if(a){if(!await i.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);d=a}}else if(d){if(!await i.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else!!pe&&await i.isModelAvailableForProvider("anthropic",pe)?(d=pe,U.log(`Using default model: ${pe}`)):pe&&U.log(`Default model ${pe} is not available, proceeding without model specification`);de.env.ANTHROPIC_API_KEY=h,de.env.ANTHROPIC_BASE_URL=p}else if(!de.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],T=[],_={},g=0,x=0,R,S,N=[se(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...d?["--model",d]:[],...n?["--continue"]:[],...n&&o?["--resume",o]:[],"-p",c],C=`${de.env.NVM_BIN}/node`;U.log(`Running ${C} ${N.join(" ")}`);let O=t.utils.run(C,N,{all:!0,env:de.env,cwd:s,idleTimeout:ee});O.stdin?.end();let I=ae(()=>{r?.({steps:y,duration:x})},250),v=(h,p)=>{let a={...h,id:g};g+=1,T.push(a),y.push(a),p||I.flush(),I(),p&&I.flush()},D=Yr.createInterface({input:O.all});return D.on("error",h=>{U.error("Readline interface error",{error:h.message,stack:h.stack})}),D.on("line",h=>{let p=null;try{p=JSON.parse(h)}catch{U.log("Could not parse line",h)}p?.session_id&&p.session_id!==m&&(m=p.session_id),Array.isArray(p?.message?.content)?p.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):U.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let w=a.input?.description&&`\`${a.input.description}\``;v({title:[Mt(a.name),w].filter(Boolean).join(" ")})}else a.id&&(_[a.id]=a);I.flush();break}case"tool_result":{let w=a.tool_use_id?_[a.tool_use_id]:void 0,z;if(w){let K=w.input?.file_path&&Gt.relative(s,w.input.file_path),P=K&&`\`${K}\``;z=[Mt(w.name||""),P].filter(Boolean).join(" ")}let ve=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),Z;if(typeof a.content=="string")Z=a.content;else if(Array.isArray(a.content)){let K=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?K.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?K.push(`![](data:${P.source.media_type};base64,${P.source.data})`):U.log(`Unsupported image type ${P.source.type}`,P.source):U.log(`Unsupported block type ${P?.type}`)}),Z=K.join(`
11
11
 
12
- `)}ve&&z&&(z=`\`\`\`
13
- ${z.trim()}
14
- \`\`\``),v({title:X,message:z},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:L.log(`Message content type is not supported ${a.type}`,a)}}):p?.type==="result"&&(x=p.duration_ms||0,p.is_error?S=p.result:R=p.result,[T,h].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await O.catch(y=>{({error:S,result:R}=Br({catchError:y,runCmd:O,error:S,result:R,runnerName:"Claude"}))}),M.close(),I.flush(),{steps:T,duration:x,result:await le({initialResult:R,agentName:Lt,hasError:!!S}),error:ue({error:S,agentName:Lt}),isRetryableError:ce(S),agentSessionId:m}}var Gt=async()=>{let e=Mt.join(jr.homedir(),".claude");await Gr.rm(e,{recursive:!0,force:!0})};import we from"fs/promises";import Yt from"os";import Ne from"path";import te from"process";import Hr from"readline";var U=E("runner_codex"),jt="Codex CLI",fe="",qr=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function rt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=te.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:u}=e,{model:c}=e;if(n){let{token:p,url:a}=n;if(!p||!a)throw new Error("No token or url provided from AI Gateway");if(u?.codex){let w=u?.codex?.[s];if(w){if(!await n.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);c=w}}else if(c){if(!await n.isModelAvailableForProvider("openai",c))throw new Error(`Model '${c}' is not available for openai provider`)}else!!fe&&await n.isModelAvailableForProvider("openai",fe)?(c=fe,U.log(`Using default model: ${fe}`)):fe&&U.log(`Default model ${fe} is not available, proceeding without model specification`);te.env.OPENAI_API_KEY=p,te.env.OPENAI_BASE_URL=a}else if(!te.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],d=[],m=[],h={},T=0,_=0,g,x,R=`${te.env.NVM_BIN}/node`,S=Ne.join(Yt.homedir(),".codex"),N=Ne.join(S,"config.toml"),C=Ne.join(S,"auth.json");try{await we.mkdir(S,{recursive:!0});let p={OPENAI_API_KEY:te.env.OPENAI_API_KEY};await we.writeFile(C,JSON.stringify(p,null,2),"utf-8"),U.log("Created Codex auth.json file");let a="";try{a=await we.readFile(N,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
12
+ `)}ve&&Z&&(Z=`\`\`\`
13
+ ${Z.trim()}
14
+ \`\`\``),v({title:z,message:Z},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:U.log(`Message content type is not supported ${a.type}`,a)}}):p?.type==="result"&&(x=p.duration_ms||0,p.is_error?S=p.result:R=p.result,[T,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await O.catch(h=>{({error:S,result:R}=Br({catchError:h,runCmd:O,error:S,result:R,runnerName:"Claude"}))}),D.close(),I.flush(),{steps:T,duration:x,result:await le({initialResult:R,agentName:Ut,hasError:!!S}),error:ue({error:S,agentName:Ut}),isRetryableError:ce(S),agentSessionId:m}}var jt=async()=>{let e=Gt.join(jr.homedir(),".claude");await Gr.rm(e,{recursive:!0,force:!0})};import Te from"fs/promises";import Bt from"os";import Ne from"path";import te from"process";import Hr from"readline";var M=E("runner_codex"),Yt="Codex CLI",fe="",qr=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function it({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=te.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:u}=e,{model:c}=e;if(n){let{token:p,url:a}=n;if(!p||!a)throw new Error("No token or url provided from AI Gateway");if(u?.codex){let w=u?.codex?.[s];if(w){if(!await n.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);c=w}}else if(c){if(!await n.isModelAvailableForProvider("openai",c))throw new Error(`Model '${c}' is not available for openai provider`)}else!!fe&&await n.isModelAvailableForProvider("openai",fe)?(c=fe,M.log(`Using default model: ${fe}`)):fe&&M.log(`Default model ${fe} is not available, proceeding without model specification`);te.env.OPENAI_API_KEY=p,te.env.OPENAI_BASE_URL=a}else if(!te.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],d=[],m=[],y={},T=0,_=0,g,x,R=`${te.env.NVM_BIN}/node`,S=Ne.join(Bt.homedir(),".codex"),N=Ne.join(S,"config.toml"),C=Ne.join(S,"auth.json");try{await Te.mkdir(S,{recursive:!0});let p={OPENAI_API_KEY:te.env.OPENAI_API_KEY};await Te.writeFile(C,JSON.stringify(p,null,2),"utf-8"),M.log("Created Codex auth.json file");let a="";try{a=await Te.readFile(N,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
15
15
  web_search_request = true`):a+=`
16
16
  [features]
17
17
  web_search_request = true
18
- `,await we.writeFile(N,a,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(p){throw U.warn("Failed to setup Codex config and credentials",{error:p.message}),new Error(`Codex setup failed: ${p.message}`)}let O=[se(o,"codex"),"exec","--yolo","--json","--enable","web_search_request",...c?["--model",c]:[],l].filter(Boolean);U.log(`Running ${R} ${O.join(" ")}`);let I=t.utils.run(R,O,{all:!0,cwd:o,env:{...te.env},idleTimeout:ee}),v=ae(()=>{r?.({steps:f,duration:_}),i?.({steps:d,duration:_}),d=[]},250),M=(p,a)=>{p.id=T,T+=1,m.push(p),f.push(p),d.push(p),a||v.flush(),v(),a&&v.flush()},y=Hr.createInterface({input:I.all});return y.on("error",p=>{U.error("Readline interface error",{error:p.message,stack:p.stack})}),y.on("line",p=>{let a=null;try{a=JSON.parse(p)}catch{U.log("Could not parse line",p);return}if(a?.duration_ms&&(_=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")h[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let w=Kr(a.item);w&&M(w,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let w={title:"Reasoning",message:a.item.text};M(w,!0)}else if(a?.type==="local_shell_call")h[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let w=Vr(h[a.call_id],a);w&&M(w,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(w=>w.text).join(`
18
+ `,await Te.writeFile(N,a,"utf-8"),M.log("Updated Codex config with web_search_request enabled"))}catch(p){throw M.warn("Failed to setup Codex config and credentials",{error:p.message}),new Error(`Codex setup failed: ${p.message}`)}let O=[se(o,"codex"),"exec","--yolo","--json","--enable","web_search_request",...c?["--model",c]:[],l].filter(Boolean);M.log(`Running ${R} ${O.join(" ")}`);let I=t.utils.run(R,O,{all:!0,cwd:o,env:{...te.env},idleTimeout:ee}),v=ae(()=>{r?.({steps:f,duration:_}),i?.({steps:d,duration:_}),d=[]},250),D=(p,a)=>{p.id=T,T+=1,m.push(p),f.push(p),d.push(p),a||v.flush(),v(),a&&v.flush()},h=Hr.createInterface({input:I.all});return h.on("error",p=>{M.error("Readline interface error",{error:p.message,stack:p.stack})}),h.on("line",p=>{let a=null;try{a=JSON.parse(p)}catch{M.log("Could not parse line",p);return}if(a?.duration_ms&&(_=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let w=Kr(a.item);w&&D(w,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let w={title:"Reasoning",message:a.item.text};D(w,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let w=Vr(y[a.call_id],a);w&&D(w,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(w=>w.text).join(`
19
19
  `):a?.type==="message"&&a.role==="system"&&(x=a.content.map(w=>w.text).join(`
20
- `))}),await I.catch(p=>{let a=qr({catchError:p,runCmd:I,error:x,result:g,runnerName:"Codex"});x=a.error,g=a.result}),y.close(),v.flush(),{steps:m,duration:_,result:await le({initialResult:g,agentName:jt,hasError:!!x}),error:ue({error:x,agentName:jt}),isRetryableError:ce(x)}}var Bt=async()=>{let e=Ne.join(Yt.homedir(),".codex");await we.rm(e,{recursive:!0,force:!0})},Wr=new Set(["bash","-lc"]),Kr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,i=e.aggregated_output?.trim();return i&&(i=`\`\`\`
20
+ `))}),await I.catch(p=>{let a=qr({catchError:p,runCmd:I,error:x,result:g,runnerName:"Codex"});x=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:_,result:await le({initialResult:g,agentName:Yt,hasError:!!x}),error:ue({error:x,agentName:Yt}),isRetryableError:ce(x)}}var Ht=async()=>{let e=Ne.join(Bt.homedir(),".codex");await Te.rm(e,{recursive:!0,force:!0})},Wr=new Set(["bash","-lc"]),Kr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,i=e.aggregated_output?.trim();return i&&(i=`\`\`\`
21
21
  ${i}
22
22
  \`\`\``),e.status==="failed"&&e.exit_code!==0&&(i=i?`${i}
23
23
 
24
24
  *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:i}},Vr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(o=>!Wr.has(o)),i=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
25
25
  ${n.trim()}
26
- \`\`\``)}catch(o){U.error("Could not decode outputMsg",o,t.output)}return{title:i,message:n}};import Ce from"fs/promises";import qt from"os";import Pe from"path";import me from"process";import Jr from"readline";var G=E("runner_gemini"),Ht="Gemini CLI",ge="",Xr=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),zr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Zr=async()=>{let e=Pe.join(qt.homedir(),".gemini"),t=Pe.join(e,"settings.json");try{await Ce.mkdir(e,{recursive:!0});let r={};try{let i=await Ce.readFile(t,"utf-8");r=JSON.parse(i)}catch{G.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ce.writeFile(t,JSON.stringify(r,null,2),"utf-8"),G.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){G.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function nt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=me.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:u}=e,{model:c}=e;if(await Zr(),n){let{token:y,url:p}=n;if(!y||!p)throw new Error("No token or url provided from AI Gateway");if(u?.gemini){let a=u?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);c=a}}if(!c)!!ge&&await n.isModelAvailableForProvider("gemini",ge)?(c=ge,G.log(`Using default model: ${ge}`)):ge&&G.log(`Default model ${ge} is not available, proceeding without model specification`);else if(c&&!u?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",c))throw new Error(`Model '${c}' is not available for gemini provider`);me.env.GEMINI_API_KEY=y,me.env.GOOGLE_GEMINI_BASE_URL=p}else if(!me.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],d=[],m=[],h={},T=0,_=0,g,x,R=[se(o,"gemini"),...c?["--model",c]:[],"--yolo","--output-format","stream-json","-p",l],S=`${me.env.NVM_BIN}/node`;G.log(`Running ${S} ${R.join(" ")}`);let N=t.utils.run(S,R,{all:!0,env:me.env,cwd:o,idleTimeout:ee});N.stdin?.end();let C=ae(()=>{r?.({steps:f,duration:_}),i?.({steps:d,duration:_}),d=[]},250),O=(y,p)=>{y.id=T,T+=1,m.push(y),f.push(y),d.push(y),p||C.flush(),C(),p&&C.flush()},I=Jr.createInterface({input:N.all});I.on("error",y=>{G.error("Readline interface error",{error:y.message,stack:y.stack})});let v="",M=()=>{v&&O({message:v.trim()}),v=""};return I.on("line",y=>{let p=null;try{if(y.startsWith("[API Error")){let a=y.match(/\[api error: (.+?)]$/i)?.[1];p={type:"error",value:At(a,!1)?.error?.message||a||"Gemini encountered error"}}else p=JSON.parse(y)}catch{return}if(p)switch(["message","result"].includes(p.type)||M(),p.type){case"message":{p.role!=="user"&&p.content&&(v+=p.content);break}case"tool_use":{let a=zr[p.tool_name]??p.tool_name,w=p.parameters?.file_path,X=w&&Pe.relative(o,w),ve=p.parameters?.command,K={title:[a,X&&`\`${X}\``,ve&&`\`${ve}\``].filter(Boolean).join(" ")};h[p.tool_id]=K,C.flush();break}case"tool_result":{let a=h[p.tool_id];a&&(p.output&&(a.message=`\`\`\`
26
+ \`\`\``)}catch(o){M.error("Could not decode outputMsg",o,t.output)}return{title:i,message:n}};import Ce from"fs/promises";import Wt from"os";import Pe from"path";import me from"process";import Jr from"readline";var j=E("runner_gemini"),qt="Gemini CLI",ge="",Xr=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),zr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Zr=async()=>{let e=Pe.join(Wt.homedir(),".gemini"),t=Pe.join(e,"settings.json");try{await Ce.mkdir(e,{recursive:!0});let r={};try{let i=await Ce.readFile(t,"utf-8");r=JSON.parse(i)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ce.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ot({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=me.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:u}=e,{model:c}=e;if(await Zr(),n){let{token:h,url:p}=n;if(!h||!p)throw new Error("No token or url provided from AI Gateway");if(u?.gemini){let a=u?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);c=a}}if(!c)!!ge&&await n.isModelAvailableForProvider("gemini",ge)?(c=ge,j.log(`Using default model: ${ge}`)):ge&&j.log(`Default model ${ge} is not available, proceeding without model specification`);else if(c&&!u?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",c))throw new Error(`Model '${c}' is not available for gemini provider`);me.env.GEMINI_API_KEY=h,me.env.GOOGLE_GEMINI_BASE_URL=p}else if(!me.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],d=[],m=[],y={},T=0,_=0,g,x,R=[se(o,"gemini"),...c?["--model",c]:[],"--yolo","--output-format","stream-json","-p",l],S=`${me.env.NVM_BIN}/node`;j.log(`Running ${S} ${R.join(" ")}`);let N=t.utils.run(S,R,{all:!0,env:me.env,cwd:o,idleTimeout:ee});N.stdin?.end();let C=ae(()=>{r?.({steps:f,duration:_}),i?.({steps:d,duration:_}),d=[]},250),O=(h,p)=>{h.id=T,T+=1,m.push(h),f.push(h),d.push(h),p||C.flush(),C(),p&&C.flush()},I=Jr.createInterface({input:N.all});I.on("error",h=>{j.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",D=()=>{v&&O({message:v.trim()}),v=""};return I.on("line",h=>{let p=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];p={type:"error",value:Ct(a,!1)?.error?.message||a||"Gemini encountered error"}}else p=JSON.parse(h)}catch{return}if(p)switch(["message","result"].includes(p.type)||D(),p.type){case"message":{p.role!=="user"&&p.content&&(v+=p.content);break}case"tool_use":{let a=zr[p.tool_name]??p.tool_name,w=p.parameters?.file_path,z=w&&Pe.relative(o,w),ve=p.parameters?.command,K={title:[a,z&&`\`${z}\``,ve&&`\`${ve}\``].filter(Boolean).join(" ")};y[p.tool_id]=K,C.flush();break}case"tool_result":{let a=y[p.tool_id];a&&(p.output&&(a.message=`\`\`\`
27
27
  ${p.output.trim()}
28
- \`\`\``),O(a,!0));break}case"result":{_=p.stats?.duration_ms,p.status==="error"?x=p.error?.message:g=v.trim();break}case"error":{x=p.error;break}case"finished":break;default:{G.warn("Unhandled message type:",p.type);break}}}),await N.catch(y=>{({error:x,result:g}=Xr({catchError:y,runCmd:N,error:x,result:g,runnerName:"Gemini"}))}),I.close(),C.flush(),{steps:m,duration:_,result:await le({initialResult:g,agentName:Ht,hasError:!!x}),error:ue({error:x,agentName:Ht}),isRetryableError:ce(x)}}var Wt=async()=>{let e=Pe.join(qt.homedir(),".gemini");await Ce.rm(e,{recursive:!0,force:!0})};var Qr={codex:{runner:rt,clean:Bt},claude:{runner:tt,clean:Gt},gemini:{runner:nt,clean:Wt}},Kt=Qr;var Oe=E("init_stage"),Jt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:i})=>await A(rn(),"init-stage",async n=>{let o=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":i||"unknown"});let s=Kt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=on({apiToken:r});mt(l);let u=e.useGateway?await wt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!u});let c=5*1024,f=bt(async({steps:T=[],duration:_})=>{let g=T.map(x=>{let R=x.title?Je(q(x.title),c):void 0,S=x.message?Je(q(x.message)):void 0;return{...x,title:R,message:S}});T.length=0;try{return await B(e.id,e.sessionId,{steps:g,duration:_})}catch(x){Oe.error("persistSteps failed",{error:x?.message||x})}},t);Oe.info("Adding build files to stage");let d=await Ze();await Xe(d),V.env.NETLIFY_LOCAL_MODE||await nn();let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Ft(),await _e(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Dt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-o;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:u,context:l,persistSteps:f,runner:s,sha:m}}),nn=async()=>{let e="/usr/bin/git";try{e=en("which git").toString().trim()||e}catch{}let t="/tmp/netlify-git-wrapper",r=tn.join(t,"git"),i=`#!/bin/bash
28
+ \`\`\``),O(a,!0));break}case"result":{_=p.stats?.duration_ms,p.status==="error"?x=p.error?.message:g=v.trim();break}case"error":{x=p.error;break}case"finished":break;default:{j.warn("Unhandled message type:",p.type);break}}}),await N.catch(h=>{({error:x,result:g}=Xr({catchError:h,runCmd:N,error:x,result:g,runnerName:"Gemini"}))}),I.close(),C.flush(),{steps:m,duration:_,result:await le({initialResult:g,agentName:qt,hasError:!!x}),error:ue({error:x,agentName:qt}),isRetryableError:ce(x)}}var Kt=async()=>{let e=Pe.join(Wt.homedir(),".gemini");await Ce.rm(e,{recursive:!0,force:!0})};var Qr={codex:{runner:it,clean:Ht},claude:{runner:nt,clean:jt},gemini:{runner:ot,clean:Kt}},Vt=Qr;var Oe=E("init_stage"),Xt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:i})=>await A(rn(),"init-stage",async n=>{let o=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":i||"unknown"});let s=Vt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=on({apiToken:r});ht(l);let u=e.useGateway?await xt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!u});let c=5*1024,f=Nt(async({steps:T=[],duration:_})=>{let g=T.map(x=>{let R=x.title?Xe(q(x.title),c):void 0,S=x.message?Xe(q(x.message)):void 0;return{...x,title:R,message:S}});T.length=0;try{return await G(e.id,e.sessionId,{steps:g,duration:_})}catch(x){Oe.error("persistSteps failed",{error:x?.message||x})}},t);Oe.info("Adding build files to stage");let d=await Qe();await ze(d),J.env.NETLIFY_LOCAL_MODE||await nn();let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Lt(),await Ee(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await kt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-o;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:u,context:l,persistSteps:f,runner:s,sha:m}}),nn=async()=>{let e="/usr/bin/git";try{e=en("which git").toString().trim()||e}catch{}let t="/tmp/netlify-git-wrapper",r=tn.join(t,"git"),i=`#!/bin/bash
29
29
  # Git wrapper that blocks add and commit commands
30
30
  # The deployment system handles staging and commits automatically
31
31
 
@@ -55,7 +55,7 @@ case "$1" in
55
55
  exec ${e} "$@"
56
56
  ;;
57
57
  esac
58
- `;try{await Vt.mkdir(t,{recursive:!0}),await Vt.writeFile(r,i,{mode:493}),V.env.PATH=`${t}:${V.env.PATH}`,V.env.NETLIFY_INTERNAL_GIT="0",Oe.info("Installed git wrapper to block add/commit commands")}catch(n){Oe.warn("Failed to install git wrapper",{error:n?.message||n})}},on=({apiToken:e})=>({constants:{NETLIFY_API_HOST:V.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||V.env.NETLIFY_API_TOKEN,SITE_ID:V.env.SITE_ID,FUNCTIONS_DIST:V.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:$}});import{getTracer as it}from"@netlify/otel";import sn from"crypto";import J from"fs/promises";import D from"path";import j from"process";var F=E("context"),an=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},ln=10,un=async e=>{let{name:t,ext:r}=D.parse(e),i=e,n=D.join(j.cwd(),k,i),o=0;for(;await cn(n);){if(o>=ln)throw new Error("Failed to generate context file");i=`${t}-${sn.randomUUID().slice(0,5)}${r}`,n=D.join(j.cwd(),k,i),o+=1}return i},cn=async e=>{try{return await J.access(e),!0}catch{return!1}},dn=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(i=>i&&typeof i=="object"&&i.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},pn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let i=await r.text();return await J.writeFile(t,i,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},$e=null,fn=async()=>{if($e)return $e;let e=await dn();if(!e)return[];let t=D.join(j.cwd(),k,qe);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,o])=>{if(!o||typeof o!="object"||!o.endpoint||!o.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,l=D.join(t,s),u=D.join(k,qe,s);return F.log(`Downloading ${o.scope} context...`),await pn(o.endpoint,l)?(F.log(`Downloaded: ${u}`),{scope:o.scope,path:u,key:n}):null});return $e=(await Promise.all(r)).filter(n=>n!==null),$e},Xt=async({cliPath:e,netlify:t,config:r,buildErrorContext:i})=>{let n=an(t),o=await un(vt),s=D.join(j.cwd(),k);await J.mkdir(s,{recursive:!0});let l=D.join(k,o),u=D.join(j.cwd(),l),c=D.join(j.cwd(),k,Q);try{await J.unlink(c),F.log(`Deleted old results file: ${c}`)}catch{}let f=i?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
58
+ `;try{await Jt.mkdir(t,{recursive:!0}),await Jt.writeFile(r,i,{mode:493}),J.env.PATH=`${t}:${J.env.PATH}`,J.env.NETLIFY_INTERNAL_GIT="0",Oe.info("Installed git wrapper to block add/commit commands")}catch(n){Oe.warn("Failed to install git wrapper",{error:n?.message||n})}},on=({apiToken:e})=>({constants:{NETLIFY_API_HOST:J.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||J.env.NETLIFY_API_TOKEN,SITE_ID:J.env.SITE_ID,FUNCTIONS_DIST:J.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:$}});import{getTracer as st}from"@netlify/otel";import sn from"crypto";import X from"fs/promises";import L from"path";import Y from"process";var F=E("context"),an=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:Y.env.NETLIFY_TEAM_ID,userId:Y.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:Y.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},ln=10,un=async e=>{let{name:t,ext:r}=L.parse(e),i=e,n=L.join(Y.cwd(),k,i),o=0;for(;await cn(n);){if(o>=ln)throw new Error("Failed to generate context file");i=`${t}-${sn.randomUUID().slice(0,5)}${r}`,n=L.join(Y.cwd(),k,i),o+=1}return i},cn=async e=>{try{return await X.access(e),!0}catch{return!1}},dn=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(i=>i&&typeof i=="object"&&i.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},pn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let i=await r.text();return await X.writeFile(t,i,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},$e=null,fn=async()=>{if($e)return $e;let e=await dn();if(!e)return[];let t=L.join(Y.cwd(),k,We);await X.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,o])=>{if(!o||typeof o!="object"||!o.endpoint||!o.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,l=L.join(t,s),u=L.join(k,We,s);return F.log(`Downloading ${o.scope} context...`),await pn(o.endpoint,l)?(F.log(`Downloaded: ${u}`),{scope:o.scope,path:u,key:n}):null});return $e=(await Promise.all(r)).filter(n=>n!==null),$e},zt=async({cliPath:e,netlify:t,config:r,buildErrorContext:i})=>{let n=an(t),o=await un(St),s=L.join(Y.cwd(),k);await X.mkdir(s,{recursive:!0});let l=L.join(k,o),u=L.join(Y.cwd(),l),c=L.join(Y.cwd(),k,Q);try{await X.unlink(c),F.log(`Deleted old results file: ${c}`)}catch{}let f=i?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
59
59
  Your task is to analyze and fix the build errors.
60
60
  Don't apply techniques of reverting changes. Apply fixes related to errors.
61
61
  Don't try to run build by yourself. Just fix the errors.
@@ -68,7 +68,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
68
68
 
69
69
  `)}
70
70
  </project_rules>
71
- `);let m="";if(r.sessionHistoryContext?.length){let g=D.join(j.cwd(),k,He);await J.mkdir(g,{recursive:!0});let x=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let N=S+1,C=`attempt-${N}.md`,O=D.join(g,C),I=D.join(k,He,C),v=`# Task History - Attempt ${N}
71
+ `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(Y.cwd(),k,qe);await X.mkdir(g,{recursive:!0});let x=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let N=S+1,C=`attempt-${N}.md`,O=L.join(g,C),I=L.join(k,qe,C),v=`# Task History - Attempt ${N}
72
72
 
73
73
  ## Request - what the user asked for
74
74
  ${R.request}
@@ -78,7 +78,7 @@ ${R.request}
78
78
  ## Response - what the agent replied with after its work
79
79
 
80
80
  ${R.response}
81
- `;return await J.writeFile(O,v,"utf-8"),F.log(`Created history file: ${I}`),I}));m+=`
81
+ `;return await X.writeFile(O,v,"utf-8"),F.log(`Created history file: ${I}`),I}));m+=`
82
82
  <session_history_context>
83
83
  History of prior work on this task.
84
84
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
@@ -87,12 +87,12 @@ ${R.response}
87
87
  `)}
88
88
 
89
89
  </session_history_context>
90
- `}let h=await fn(),T="";h.length>0&&(T=`
90
+ `}let y=await fn(),T="";y.length>0&&(T=`
91
91
  <netlify_features_context>
92
92
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
93
93
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
94
94
 
95
- ${h.map(g=>`- **${g.scope}**: ${g.path}`).join(`
95
+ ${y.map(g=>`- **${g.scope}**: ${g.path}`).join(`
96
96
  `)}
97
97
 
98
98
  Refer to these files when working with specific Netlify features.
@@ -118,8 +118,8 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
118
118
  - If the user asks for "a plan", "just planning", or similar (without asking for implementation) you may use plan mode to explore the codebase in read-only mode, design your implementation approach and write the complete plan to ${s}/${Q}. Stop there, do not wait for approval and do not implement unless explicitly asked.
119
119
  </responses>
120
120
  <attachements>
121
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${We} folder
122
- - move assets from ${s}/${We} folder to the project assets folder if they are referenced in a code or applied changes
121
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${Ke} folder
122
+ - move assets from ${s}/${Ke} folder to the project assets folder if they are referenced in a code or applied changes
123
123
  </attachements>
124
124
  ${d}
125
125
  </requirements>
@@ -133,7 +133,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
133
133
  - Netlify Functions directory: ${n.functionsDir}
134
134
  </metadata>
135
135
  <environment>
136
- - Node Version: ${j.version||"unknown"}
136
+ - Node Version: ${Y.version||"unknown"}
137
137
  - Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
138
138
  - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
139
139
  - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
@@ -146,7 +146,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
146
146
  </extra_context>
147
147
 
148
148
  ${m}
149
- `;return await J.writeFile(u,_,"utf-8"),F.log(`Generated agent context document at: ${u}`),_.length>5e5&&(_=`
149
+ `;return await X.writeFile(u,_,"utf-8"),F.log(`Generated agent context document at: ${u}`),_.length>5e5&&(_=`
150
150
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
151
151
 
152
152
  <request>
@@ -157,19 +157,19 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
157
157
  </request>
158
158
 
159
159
  Use the following file for the complete context of the ask, the environment, and what's available. ${u} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
160
- `),_};var mn=E("prompt"),zt=async({cliPath:e,config:t,netlify:r,buildErrorContext:i})=>{let n=await Xt({cliPath:e,config:t,netlify:r,buildErrorContext:i});return process.env.AGENT_RUNNER_DEBUG&&mn.log("Contextful Prompt:",n),{prompt:n}};var Fe=E("inference_stage"),Zt=5,De=async e=>{let{cliPath:t,config:r,context:i,buildErrors:n,runner:o,persistSteps:s,aiGateway:l,attempt:u,contextPrefix:c,priorAgentSessionId:f}=e;Fe.log(`Running inference stage, attempt ${u} of ${Zt}`);let d=await A(it(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":u||1}),Tt();let{prompt:h}=await A(it(),"compose-prompt",async()=>await zt({cliPath:t,config:r,buildErrorContext:gn(n),netlify:i})),T=`
160
+ `),_};var mn=E("prompt"),Zt=async({cliPath:e,config:t,netlify:r,buildErrorContext:i})=>{let n=await zt({cliPath:e,config:t,netlify:r,buildErrorContext:i});return process.env.AGENT_RUNNER_DEBUG&&mn.log("Contextful Prompt:",n),{prompt:n}};var Fe=E("inference_stage"),Qt=5,De=async e=>{let{cliPath:t,config:r,context:i,buildErrors:n,runner:o,persistSteps:s,aiGateway:l,attempt:u,contextPrefix:c,priorAgentSessionId:f}=e;Fe.log(`Running inference stage, attempt ${u} of ${Qt}`);let d=await A(st(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":u||1}),It();let{prompt:y}=await A(st(),"compose-prompt",async()=>await Zt({cliPath:t,config:r,buildErrorContext:gn(n),netlify:i})),T=`
161
161
  ${c||""}
162
- ${h}
163
- `.trim(),_={...r,prompt:T},g=await A(it(),`run-${r.runner}`,async()=>await o({aiGateway:l,config:_,netlify:i,persistSteps:s,continueSession:!!(u&&u>1),priorAgentSessionId:f}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await s.flush(),g});if(d.error){if(Fe.error("Runner failed",{stepsCount:d.steps.length,duration:d.duration,error:d.error,isRetryableError:d.isRetryableError,attempt:u||1,agentSessionId:d.agentSessionId}),d.isRetryableError&&(!u||u<Zt))return Fe.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await De({...e,attempt:(u||1)+1,priorAgentSessionId:d.agentSessionId,contextPrefix:d.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Fe.log("Do not retry inference stage"),new Error(d.error)}return{runnerResult:d}},gn=e=>!e||e.length===0?"":`
162
+ ${y}
163
+ `.trim(),_={...r,prompt:T},g=await A(st(),`run-${r.runner}`,async()=>await o({aiGateway:l,config:_,netlify:i,persistSteps:s,continueSession:!!(u&&u>1),priorAgentSessionId:f}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await s.flush(),g});if(d.error){if(Fe.error("Runner failed",{stepsCount:d.steps.length,duration:d.duration,error:d.error,isRetryableError:d.isRetryableError,attempt:u||1,agentSessionId:d.agentSessionId}),d.isRetryableError&&(!u||u<Qt))return Fe.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await De({...e,attempt:(u||1)+1,priorAgentSessionId:d.agentSessionId,contextPrefix:d.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Fe.log("Do not retry inference stage"),new Error(d.error)}return{runnerResult:d}},gn=e=>!e||e.length===0?"":`
164
164
  Deploy failed failed. Here are the errors to review on the latest build:
165
165
 
166
166
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
167
167
 
168
168
  ${e.pop()}
169
- `;import _n from"process";import{getTracer as ot}from"@netlify/otel";import{getTracer as hn}from"@netlify/otel";var Te=E("deploy"),Qt=async e=>await A(hn(),"create-preview-deploy",async t=>yn(e,t)),yn=async({netlify:e,hasRepo:t,skipBuild:r,message:i="Agent Preview",deploySubdomain:n,cliPath:o,filter:s,prodDeploy:l},u)=>{try{let c=["deploy","--message",`"${i}"`,"--json","--verbose"];l?c.push("--prod"):c.push("--draft"),t||(Te.log("Deploy: Uploading source zip"),c.push("--upload-source-zip")),n&&c.push("--alias",n),s&&c.push("--filter",s),r?(Te.log("Deploy: Skipping build"),c.push("--no-build")):c.push("--context","deploy-preview");let f=o||"netlify";Te.log(`Running: ${f} ${c.join(" ")}`),u?.setAttributes({cmd:f,args:c});let d=await e.utils.run(f,c,{stdio:["ignore","pipe","pipe"]}),m=JSON.parse(String(d.stdout??"").trim());u?.setAttributes({success:!0,deployId:m.deploy_id,deployUrl:m.deploy_url,siteId:m.site_id}),Te.log(`
170
- Preview deploy created successfully:`,{deployId:m.deploy_id,deployUrl:m.deploy_url,siteId:m.site_id});let h={deployId:m.deploy_id,previewUrl:m.deploy_url,logsUrl:m.logs,siteId:m.site_id};return t||(h.sourceZipFilename=m.source_zip_filename),h}catch(c){throw Te.error("Failed to create preview deploy via CLI:",c),u?.setAttributes({success:!1,error:c.message}),c}};var er=e=>["dtn-prod-iteration","create"].includes(e);var xe=E("deploy_stage"),st=async e=>await A(ot(),"run-deploy-stage",async()=>En(e)),En=async({cliPath:e,config:t,context:r,result:i,filter:n,isRetry:o})=>{let s=await A(ot(),"get-runner-diffs",async()=>await Ot({config:t,isRetry:o}));if(xe.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:l,resultDiff:u,diffBinary:c,resultDiffBinary:f}=s,d=!0;xe.log("Preview deploy condition check:",{resultUndefined:i===void 0,resultType:typeof i,hasChanges:d,wouldCreatePreview:i!==void 0&&d});let m=null;if(i!==void 0&&d)try{let h;try{let T=await A(ot(),"get-runner-session",async()=>await ht(t.id,t.sessionId));T?.title&&(h=T.title)}catch(T){xe.warn("Failed to fetch session title, using fallback message:",T.message)}await B(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await Qt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:Nt(t.id,_n.env.SITE_NAME),filter:n,prodDeploy:er(t.mode)})}catch(h){return xe.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:l,resultDiff:u,hasChanges:d,previewInfo:null,diffBinary:c,resultDiffBinary:f,deployError:h instanceof Error?h.message:String(h)}}return xe.log("Git status",{hasDiff:!!l,hasChanges:d}),{diff:l,resultDiff:u,hasChanges:d,previewInfo:m,diffBinary:c,resultDiffBinary:f}};import{getTracer as Ue}from"@netlify/otel";async function tr(e,t){let{maxRetries:r,baseDelay:i,onRetry:n}=t,o;for(let s=1;s<=r;s++)try{return await e()}catch(l){if(o=l,s===r)throw o;n&&n(s,o),await new Promise(u=>setTimeout(u,i*s))}throw o}var ke=class{scanDiffForForms(t){let r=[],i=null,n=[],o=t.split(`
169
+ `;import _n from"process";import{getTracer as at}from"@netlify/otel";import{getTracer as hn}from"@netlify/otel";var xe=E("deploy"),er=async e=>await A(hn(),"create-preview-deploy",async t=>yn(e,t)),yn=async({netlify:e,hasRepo:t,skipBuild:r,message:i="Agent Preview",deploySubdomain:n,cliPath:o,filter:s,prodDeploy:l},u)=>{try{let c=["deploy","--message",`"${i}"`,"--json","--verbose"];l?c.push("--prod"):c.push("--draft"),t||(xe.log("Deploy: Uploading source zip"),c.push("--upload-source-zip")),n&&c.push("--alias",n),s&&c.push("--filter",s),r?(xe.log("Deploy: Skipping build"),c.push("--no-build")):c.push("--context","deploy-preview");let f=o||"netlify";xe.log(`Running: ${f} ${c.join(" ")}`),u?.setAttributes({cmd:f,args:c});let d=await e.utils.run(f,c,{stdio:["ignore","pipe","pipe"]}),m=JSON.parse(String(d.stdout??"").trim());u?.setAttributes({success:!0,deployId:m.deploy_id,deployUrl:m.deploy_url,siteId:m.site_id}),xe.log(`
170
+ Preview deploy created successfully:`,{deployId:m.deploy_id,deployUrl:m.deploy_url,siteId:m.site_id});let y={deployId:m.deploy_id,previewUrl:m.deploy_url,logsUrl:m.logs,siteId:m.site_id};return t||(y.sourceZipFilename=m.source_zip_filename),y}catch(c){throw xe.error("Failed to create preview deploy via CLI:",c),u?.setAttributes({success:!1,error:c.message}),c}};var tr=e=>["dtn-prod-iteration","create"].includes(e);var Ie=E("deploy_stage"),Le=async e=>await A(at(),"run-deploy-stage",async()=>En(e)),En=async({cliPath:e,config:t,context:r,result:i,filter:n,isRetry:o})=>{let s=await A(at(),"get-runner-diffs",async()=>await Ft({config:t,isRetry:o}));if(Ie.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:l,resultDiff:u,diffBinary:c,resultDiffBinary:f}=s,d=!0;Ie.log("Preview deploy condition check:",{resultUndefined:i===void 0,resultType:typeof i,hasChanges:d,wouldCreatePreview:i!==void 0&&d});let m=null;if(i!==void 0&&d)try{let y;try{let T=await A(at(),"get-runner-session",async()=>await _t(t.id,t.sessionId));T?.title&&(y=T.title)}catch(T){Ie.warn("Failed to fetch session title, using fallback message:",T.message)}await G(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await er({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:Pt(t.id,_n.env.SITE_NAME),filter:n,prodDeploy:tr(t.mode)})}catch(y){return Ie.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:l,resultDiff:u,hasChanges:d,previewInfo:null,diffBinary:c,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return Ie.log("Git status",{hasDiff:!!l,hasChanges:d}),{diff:l,resultDiff:u,hasChanges:d,previewInfo:m,diffBinary:c,resultDiffBinary:f}};import{getTracer as Me}from"@netlify/otel";async function rr(e,t){let{maxRetries:r,baseDelay:i,onRetry:n}=t,o;for(let s=1;s<=r;s++)try{return await e()}catch(l){if(o=l,s===r)throw o;n&&n(s,o),await new Promise(u=>setTimeout(u,i*s))}throw o}var ke=class{scanDiffForForms(t){let r=[],i=null,n=[],o=t.split(`
171
171
  `);for(let s of o)if(s.startsWith("diff --git")){if(i&&n.length>0){let u=this.containsNetlifyForm(n,i);u&&r.push(u)}let l=s.split(" ");i=l[l.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(i&&n.length>0){let s=this.containsNetlifyForm(n,i);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let i=t.join(`
172
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:o,name:s}of n){let l=i.match(o);if(l){let u=l.index||0,c=Math.max(0,u-20),f=Math.min(i.length,u+l[0].length+20),d=i.slice(c,f).trim();return d=d.replace(/\s+/g," "),d.length>100&&(d=d.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${d}`}}}return null}};var Le=class{scanDiffForIdentity(t){let r=[],i=null,n=[],o=t.split(`
172
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:o,name:s}of n){let l=i.match(o);if(l){let u=l.index||0,c=Math.max(0,u-20),f=Math.min(i.length,u+l[0].length+20),d=i.slice(c,f).trim();return d=d.replace(/\s+/g," "),d.length>100&&(d=d.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${d}`}}}return null}};var Ue=class{scanDiffForIdentity(t){let r=[],i=null,n=[],o=t.split(`
173
173
  `);for(let s of o)if(s.startsWith("diff --git")){if(i&&n.length>0){let u=this.containsNetlifyIdentity(n,i);u&&r.push(u)}let l=s.split(" ");i=l[l.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(i&&n.length>0){let s=this.containsNetlifyIdentity(n,i);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyIdentity(t,r){let i=t.join(`
174
- `),n=[{pattern:/data-netlify-identity-(button|menu)/i,name:"identity widget element"},{pattern:/netlify-identity-widget/i,name:"identity widget import"},{pattern:/new\s+GoTrue\s*\(/i,name:"GoTrue client"},{pattern:/(import\s+.*GoTrue|require\s*\(\s*['"]gotrue-js['"]\s*\))/i,name:"GoTrue import"},{pattern:/netlifyIdentity\s*\.\s*(init|on|off|open|close|login|signup|logout|refresh|currentUser)/i,name:"identity widget API"},{pattern:/['"`]\/?\.netlify\/identity/i,name:"identity endpoint"}];for(let{pattern:o,name:s}of n){let l=i.match(o);if(l){let u=l.index||0,c=Math.max(0,u-20),f=Math.min(i.length,u+l[0].length+20),d=i.slice(c,f).trim();return d=d.replace(/\s+/g," "),d.length>100&&(d=d.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${d}`}}}return null}};var b=E("cleanup_stage"),rr=async e=>await A(Ue(),"cleanup-stage",async()=>wn(e)),at=1024*1024*10,wn=async({config:e,diff:t,result:r,duration:i,resultDiff:n,diffBinary:o,resultDiffBinary:s,previewInfo:l})=>{let u={result:r||"Done",duration:i};l&&l.deployId&&(u.deploy_id=l.deployId),l&&l.sourceZipFilename&&(u.result_zip_file_name=l.sourceZipFilename);let c=t||o||n||s;if(c&&(u.diff_produced=!0),c){let f=new ke,d=t||o||"",m=f.scanDiffForForms(d);m.detected?(b.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:_,snippet:g})=>{b.log(` - ${_}: ${g}`)}),u.has_netlify_form=!0):b.log("Did not detect Netlify form(s) in diff");let T=new Le().scanDiffForIdentity(d);T.detected?(b.log("Detected Netlify Identity usage in diff:"),T.matches.forEach(({file:_,snippet:g})=>{b.log(` - ${_}: ${g}`)}),u.has_netlify_identity=!0):b.log("Did not detect Netlify Identity usage in diff")}if(c)try{b.log("Getting pre-signed URLs for diff upload");let f=await _t(e.id,e.sessionId),d=[];(t||o)&&d.push(Ye(f.result.upload_url,o||t).then(()=>{u.result_diff_s3_key=f.result.s3_key,b.log("Successfully uploaded result_diff to S3")})),(n||s)&&d.push(Ye(f.cumulative.upload_url,s||n).then(()=>{u.cumulative_diff_s3_key=f.cumulative.s3_key,b.log("Successfully uploaded cumulative_diff to S3")})),b.log(`Uploading ${d.length} diff(s) to S3 in parallel`),await Promise.all(d),(n||s)&&(b.log("Updating agent runner with cumulative diff S3 key"),await A(Ue(),"update-runner",async()=>{await _e(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){b.error("S3 upload failed, falling back to inline diffs:",f);let d=Buffer.byteLength(t||o||""),m=Buffer.byteLength(s||n||"");if(d>at||m>at){let h=`Diffs exceed maximum inline size of ${at} bytes.`;throw b.error(h),new Error(h)}u.result_diff=t,u.result_diff_binary=o,(n||s)&&(u.cumulative_diff=n,u.cumulative_diff_binary=s,b.log("Updating agent runner with inline diffs (fallback)"),await A(Ue(),"update-runner",async()=>{await _e(e.id,{result_diff:n,result_diff_binary:s})}))}else b.log("No diffs to upload");return b.log("Updated agent runner with result"),await tr(async()=>await A(Ue(),"update-runner-session",()=>B(e.id,e.sessionId,u)),{maxRetries:3,baseDelay:1e3,onRetry:(f,d)=>{b.error(`Error updating agent runner session (attempt ${f}):`,d),b.log("Retrying...")}}),b.log("Finished updating agent runner with result"),{sessionUpdate:u}};import{getTracer as nr,shutdownTracers as xn,withActiveSpan as ir}from"@netlify/otel";var In=Tn(import.meta.url),or=In("../package.json"),Ie=E("pipeline_index"),Me=3,xs=async({config:e,apiToken:t,cliPath:r="netlify",cwd:i,filter:n,tracing:o={}})=>{let s,{withStageTimer:l}=xt(Z.timeUnits.hours(4)),u=await dt(or.version,e.id,o);try{await ir(nr(),"run-pipeline",{},u,async()=>{let{aiGateway:c,context:f,persistSteps:d,runner:m,sha:h}=await l("init",()=>Jt({config:e,apiToken:t,cliPath:r,cwd:i,filter:n,runnerVersion:or.version}),Z.timeUnits.minutes(10));s=m.clean,e.sha=h;let{runnerResult:T}=await l("inference",()=>De({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:d,aiGateway:c}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let _=await l("deploy",()=>st({cliPath:r,config:e,context:f,result:T.result,filter:n,isRetry:!1})),g=T,x=[];if(_.hasChanges&&_.deployError){x.push(pt(_.deployError));let I=1,v=!1;for(;I<=Me&&!_.previewInfo&&!v;)Ie.log(`Deploy attempt had errors. Retrying. ${I}/${Me}`),await ir(nr(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":I});let y;try{y=(await l(`inference-retry-${I}`,()=>De({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:d,aiGateway:c,buildErrors:x,priorAgentSessionId:T.agentSessionId}))).runnerResult}catch(p){Ie.warn(`Inference retry ${I} failed, stopping deploy retries:`,p),v=!0;return}g={...y,steps:[...g.steps||[],...y.steps||[]],duration:(g.duration||0)+(y.duration||0)},_=await l(`deploy-retry-${I}`,()=>st({cliPath:r,config:e,context:f,result:y.result,filter:n,isRetry:!0})),_.deployError&&x.push(_.deployError),I++});I>Me&&!_.previewInfo&&console.warn(`Deploy validation failed after ${Me} attempts`)}let{diff:R,resultDiff:S,previewInfo:N,diffBinary:C,resultDiffBinary:O}=_;await l("cleanup",()=>rr({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:C,resultDiffBinary:O,previewInfo:N}),Z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await kt())})}catch(c){if(ft(c)){Ie.info("Agent run terminated gracefully",{statusCode:c.statusCode,reason:c.message}),await s?.();try{await B(e.id,e.sessionId,{result:c.userMessage,state:"error"})}catch{Ie.info("Could not update session (site may have been deleted)")}return}Ie.error("Got error while running pipeline",c),await s?.();let f=c instanceof Error&&c.message;throw await B(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),c}finally{await xn()}};export{xs as runPipeline};
174
+ `),n=[{pattern:/data-netlify-identity-(button|menu)/i,name:"identity widget element"},{pattern:/netlify-identity-widget/i,name:"identity widget import"},{pattern:/new\s+GoTrue\s*\(/i,name:"GoTrue client"},{pattern:/(import\s+.*GoTrue|require\s*\(\s*['"]gotrue-js['"]\s*\))/i,name:"GoTrue import"},{pattern:/netlifyIdentity\s*\.\s*(init|on|off|open|close|login|signup|logout|refresh|currentUser)/i,name:"identity widget API"},{pattern:/['"`]\/?\.netlify\/identity/i,name:"identity endpoint"}];for(let{pattern:o,name:s}of n){let l=i.match(o);if(l){let u=l.index||0,c=Math.max(0,u-20),f=Math.min(i.length,u+l[0].length+20),d=i.slice(c,f).trim();return d=d.replace(/\s+/g," "),d.length>100&&(d=d.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${d}`}}}return null}};var b=E("cleanup_stage"),ut=async e=>await A(Me(),"cleanup-stage",async()=>wn(e)),lt=1024*1024*10,wn=async({config:e,diff:t,result:r,duration:i,resultDiff:n,diffBinary:o,resultDiffBinary:s,previewInfo:l})=>{let u={result:r||"Done",duration:i};l&&l.deployId&&(u.deploy_id=l.deployId),l&&l.sourceZipFilename&&(u.result_zip_file_name=l.sourceZipFilename);let c=t||o||n||s;if(c&&(u.diff_produced=!0),c){let f=new ke,d=t||o||"",m=f.scanDiffForForms(d);m.detected?(b.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:_,snippet:g})=>{b.log(` - ${_}: ${g}`)}),u.has_netlify_form=!0):b.log("Did not detect Netlify form(s) in diff");let T=new Ue().scanDiffForIdentity(d);T.detected?(b.log("Detected Netlify Identity usage in diff:"),T.matches.forEach(({file:_,snippet:g})=>{b.log(` - ${_}: ${g}`)}),u.has_netlify_identity=!0):b.log("Did not detect Netlify Identity usage in diff")}if(c)try{b.log("Getting pre-signed URLs for diff upload");let f=await wt(e.id,e.sessionId),d=[];(t||o)&&d.push(Be(f.result.upload_url,o||t).then(()=>{u.result_diff_s3_key=f.result.s3_key,b.log("Successfully uploaded result_diff to S3")})),(n||s)&&d.push(Be(f.cumulative.upload_url,s||n).then(()=>{u.cumulative_diff_s3_key=f.cumulative.s3_key,b.log("Successfully uploaded cumulative_diff to S3")})),b.log(`Uploading ${d.length} diff(s) to S3 in parallel`),await Promise.all(d),(n||s)&&(b.log("Updating agent runner with cumulative diff S3 key"),await A(Me(),"update-runner",async()=>{await Ee(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){b.error("S3 upload failed, falling back to inline diffs:",f);let d=Buffer.byteLength(t||o||""),m=Buffer.byteLength(s||n||"");if(d>lt||m>lt){let y=`Diffs exceed maximum inline size of ${lt} bytes.`;throw b.error(y),new Error(y)}u.result_diff=t,u.result_diff_binary=o,(n||s)&&(u.cumulative_diff=n,u.cumulative_diff_binary=s,b.log("Updating agent runner with inline diffs (fallback)"),await A(Me(),"update-runner",async()=>{await Ee(e.id,{result_diff:n,result_diff_binary:s})}))}else b.log("No diffs to upload");return b.log("Updated agent runner with result"),await rr(async()=>await A(Me(),"update-runner-session",()=>G(e.id,e.sessionId,u)),{maxRetries:3,baseDelay:1e3,onRetry:(f,d)=>{b.error(`Error updating agent runner session (attempt ${f}):`,d),b.log("Retrying...")}}),b.log("Finished updating agent runner with result"),{sessionUpdate:u}};import{getTracer as nr,shutdownTracers as xn,withActiveSpan as ir}from"@netlify/otel";var In=Tn(import.meta.url),or=In("../package.json"),he=E("pipeline_index"),Ge=3,xs=async({config:e,apiToken:t,cliPath:r="netlify",cwd:i,filter:n,tracing:o={}})=>{let s,{withStageTimer:l}=vt(V.timeUnits.hours(4)),u=await ft(or.version,e.id,o);try{await ir(nr(),"run-pipeline",{},u,async()=>{let{aiGateway:c,context:f,persistSteps:d,runner:m,sha:y}=await l("init",()=>Xt({config:e,apiToken:t,cliPath:r,cwd:i,filter:n,runnerVersion:or.version}),V.timeUnits.minutes(10));if(s=m.clean,e.sha=y,e.mode==="redeploy"){await G(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let I=await l("deploy",()=>Le({cliPath:r,config:e,context:f,result:"Redeploy completed",filter:n,isRetry:!1}));I.deployError&&he.warn(`Redeploy deploy failed: ${I.deployError}`);let{diff:v,resultDiff:D,previewInfo:h,diffBinary:p,resultDiffBinary:a}=I;await l("cleanup",()=>ut({config:e,diff:v,result:"Redeploy completed",duration:0,resultDiff:D,diffBinary:p,resultDiffBinary:a,previewInfo:h}),V.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await et());return}let{runnerResult:T}=await l("inference",()=>De({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:d,aiGateway:c}));await G(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let _=await l("deploy",()=>Le({cliPath:r,config:e,context:f,result:T.result,filter:n,isRetry:!1})),g=T,x=[];if(_.hasChanges&&_.deployError){x.push(mt(_.deployError));let I=1,v=!1;for(;I<=Ge&&!_.previewInfo&&!v;)he.log(`Deploy attempt had errors. Retrying. ${I}/${Ge}`),await ir(nr(),"deploy-stage",async D=>{D?.setAttributes({"stage.attempt":I});let h;try{h=(await l(`inference-retry-${I}`,()=>De({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:d,aiGateway:c,buildErrors:x,priorAgentSessionId:T.agentSessionId}))).runnerResult}catch(p){he.warn(`Inference retry ${I} failed, stopping deploy retries:`,p),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},_=await l(`deploy-retry-${I}`,()=>Le({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),_.deployError&&x.push(_.deployError),I++});I>Ge&&!_.previewInfo&&console.warn(`Deploy validation failed after ${Ge} attempts`)}let{diff:R,resultDiff:S,previewInfo:N,diffBinary:C,resultDiffBinary:O}=_;await l("cleanup",()=>ut({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:C,resultDiffBinary:O,previewInfo:N}),V.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await et())})}catch(c){if(gt(c)){he.info("Agent run terminated gracefully",{statusCode:c.statusCode,reason:c.message}),await s?.();try{await G(e.id,e.sessionId,{result:c.userMessage,state:"error"})}catch{he.info("Could not update session (site may have been deleted)")}return}he.error("Got error while running pipeline",c),await s?.();let f=c instanceof Error&&c.message;throw await G(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),c}finally{await xn()}};export{xs as runPipeline};
175
175
  //# sourceMappingURL=index.js.map
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@netlify/agent-runner-cli",
3
3
  "type": "module",
4
- "version": "1.62.0",
4
+ "version": "1.63.0",
5
5
  "description": "CLI tool for running Netlify agents",
6
6
  "main": "./dist/index.js",
7
7
  "types": "./dist/index.d.ts",