@netlify/agent-runner-cli 1.58.1-alpha → 1.58.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin-local.js CHANGED
@@ -1,74 +1,74 @@
1
1
  #!/usr/bin/env node
2
- import C from"process";import nr from"path";import or from"fs";import En from"minimist";import{createRequire as hn}from"module";import{createTracerProvider as ir}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as it}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as sr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as ar}from"@netlify/otel";import{propagation as st,context as at,W3CTraceContextPropagator as lr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as cr}from"@opentelemetry/exporter-trace-otlp-grpc";function y(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Le=y("tracing"),lt=async(e,t,r)=>(await ir({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new it(new Ue),new it(new cr({url:r.exporterUrl}))],instrumentations:[new sr({skipHeaders:!0})]}),r.traceparent?(st.setGlobalPropagator(new lr),st.extract(at.active(),{traceparent:r.traceparent,isRemote:!0})):at.active());function A(e,t,r){return Le.log(`\u23F3 TRACE: ${t} starting...`),ar(e,t,r)}var Ue=class{export(t,r){for(let n of t)this.logSpan(n);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,n=t.attributes,o=[];for(let[c,s]of Object.entries(n))c.includes("duration")&&typeof s=="number"?o.push(`${c}=${s.toFixed(2)}ms`):o.push(`${c}=${s}`);let i=t.status?.code===2?"\u274C":"\u2705",a=o.length>0?` [${o.join(", ")}]`:"";Le.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Le.log(` \u274C Error: ${t.status.message}`)}};var ur=["error","failed","exception","fatal","panic","abort","crash"];function ct(e){let t=e.split(`
3
- `),r=[],n=-1,o=0;for(;o<t.length;){let c=t[o].slice(0,500).toLowerCase();if(ur.some(m=>c.includes(m))){let m=Math.max(0,o-10,n+1),p=Math.min(t.length-1,o+20),u=[];for(let g=m;g<=p;g++)u.push(t[g]);r.push(u.join(`
4
- `)),n=p,o=p+1}else o++}if(r.length===0)return e;let i=r.map((a,c)=>`<extracted_error_chunk order="${c+1}">
5
- ${a}
2
+ import C from"process";import ar from"path";import lr from"fs";import vn from"minimist";import{createRequire as xn}from"module";import{createTracerProvider as ur}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as lt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as dr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as pr}from"@netlify/otel";import{propagation as ct,context as ut,W3CTraceContextPropagator as fr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as gr}from"@opentelemetry/exporter-trace-otlp-grpc";import cr from"process";function _(e){let t=cr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Me=_("tracing"),dt=async(e,t,r)=>(await ur({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new lt(new je),new lt(new gr({url:r.exporterUrl}))],instrumentations:[new dr({skipHeaders:!0})]}),r.traceparent?(ct.setGlobalPropagator(new fr),ct.extract(ut.active(),{traceparent:r.traceparent,isRemote:!0})):ut.active());function S(e,t,r){return Me.log(`\u23F3 TRACE: ${t} starting...`),pr(e,t,r)}var je=class{export(t,r){for(let n of t)this.logSpan(n);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,n=t.attributes,o=[];for(let[u,a]of Object.entries(n))u.includes("duration")&&typeof a=="number"?o.push(`${u}=${a.toFixed(2)}ms`):o.push(`${u}=${a}`);let i=t.status?.code===2?"\u274C":"\u2705",s=o.length>0?` [${o.join(", ")}]`:"";Me.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Me.log(` \u274C Error: ${t.status.message}`)}};var mr=["error","failed","exception","fatal","panic","abort","crash"];function pt(e){let t=e.split(`
3
+ `),r=[],n=-1,o=0;for(;o<t.length;){let u=t[o].slice(0,500).toLowerCase();if(mr.some(f=>u.includes(f))){let f=Math.max(0,o-10,n+1),p=Math.min(t.length-1,o+20),c=[];for(let m=f;m<=p;m++)c.push(t[m]);r.push(c.join(`
4
+ `)),n=p,o=p+1}else o++}if(r.length===0)return e;let i=r.map((s,u)=>`<extracted_error_chunk order="${u+1}">
5
+ ${s}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import Ce from"process";import{getTracer as zr}from"@netlify/otel";import fe from"process";var Ie=fe.env.NETLIFY_API_URL,xe=fe.env.NETLIFY_API_TOKEN,Y=y("api"),Te=()=>fe.env.NETLIFY_LOCAL_MODE==="true",ge=async(e,t={})=>{if(!Ie||!xe)throw new Error("No API URL or token");let r=new URL(e,Ie),n={...t,headers:{...t.headers,Authorization:`Bearer ${xe}`}};fe.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),i=o.ok&&o.status<=299;if(fe.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),o.headers.forEach((c,s)=>{Y.log(` ${s}: ${c}`)});else{let c=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${c||"N/A"}`)}if(i||Y.error(`Got status ${o.status} for request ${r}`),t.raw){if(!i)throw o;return o}let a=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!i)throw a;return a},ut=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ie=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(xe=e.constants.NETLIFY_API_TOKEN)},dt=()=>({apiUrl:Ie,token:xe}),me=async(e,t)=>Te()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ge(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>Te()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ge(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var pt=async(e,t)=>Te()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ge(`/api/v1/agent_runners/${e}/sessions/${t}`),ft=(e,t,r)=>ge(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),gt=async(e,t)=>Te()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ge(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Me=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var oe=y("ai_gateway"),je=null;var mt=async()=>{if(je)return je;oe.log("Fetching available AI gateway providers");let e=await fetch(`${dt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return je=t,oe.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},dr=async(e,t)=>{let n=(await mt()).providers[e];if(!n)return oe.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return oe.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},ht=async({netlify:e,config:t})=>{let r,n,o,i,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let c=async()=>{clearTimeout(o),oe.log("Requesting AI gateway information");let s=await ft(a,t.id,t.sessionId);if({token:r,url:i}=s,n=s.expires_at?s.expires_at*1e3:void 0,oe.log("Got AI gateway information",{token:!!r,expiresAt:n,url:i}),n){let m=n-Date.now()-6e4;m>0&&(o=setTimeout(()=>{c()},m))}};return await Promise.all([c(),mt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:dr}};import B from"process";import V from"path";import ve from"fs";import{fileURLToPath as _r}from"url";import{createRequire as Er}from"module";import{execa as wr,execaCommand as Vn}from"execa";import{Transform as pr}from"stream";var fr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),gr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function mr(){return Object.entries(process.env).filter(([e,t])=>!(!t||fr.has(e)||gr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function W(e){if(typeof e!="string")return e;let t=mr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(hr(n),"g");r=r.replace(o,"******")}),r}function hr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ie=class extends pr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),i=W(o);n(null,i)}};function yt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,i){let a=typeof n=="string"?W(n):n;return typeof o=="function"?t(a,o):t(a,o,i)},process.stderr.write=function(n,o,i){let a=typeof n=="string"?W(n):n;return typeof o=="function"?r(a,o):r(a,o,i)}}var he=null,_t=e=>(he&&he.destroy(),he=new Q({totalAllowedTime:e}),he),Et=()=>he;var Q=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,a=null;n!==void 0&&(a=new Promise((c,s)=>{i=setTimeout(()=>{s(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return a?await Promise.race([r(),a]):await r()}finally{o(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var wt={name:"@netlify/agent-runner-cli",type:"module",version:"1.58.1-alpha",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"vitest","test:ci:vitest":"c8 -r lcovonly -r text -r json vitest",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.61","@google/gemini-cli":"0.19.4","@netlify/otel":"^5.1.0","@openai/codex":"0.66.0-alpha.11","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var Ir=_r(import.meta.url),xr=V.dirname(Ir),Tr=Er(import.meta.url),Re=y("shell"),Ge=new Set,vr={preferLocal:!0},N=(e,t,r)=>{let[n,o]=Rr(t,r),i={...vr,...o},a=wr(e,n,i);return Sr(a,i),br(a),a};var Rr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Sr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(B.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ie).pipe(B.stdout),e.stdout?.pipe(new ie).pipe(B.stdout),e.stderr?.pipe(new ie).pipe(B.stderr);return}e.stdout?.pipe(B.stdout),e.stderr?.pipe(B.stderr)},It=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(B.kill(-e.pid,t),Re.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Re.error("Error killing process:",r),!1}},Ar=e=>It(e,"SIGKILL"),br=e=>{Ge.add(e);let t=Et();if(t){let r=t.onTimesUp(()=>{Re.log(`Global timer expired, killing process ${e.pid}`),It(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Re.log(`Force killing process ${e.pid} after timeout`),Ar(e))},5e3)});e.on("exit",()=>{Ge.delete(e),r()}),e.on("error",()=>{Ge.delete(e),r()})}};function Se(e,t){return!!ee(e,t)}function ee(e,t){if(!B.env.NETLIFY_LOCAL_MODE)try{let o=Tr.resolve(wt.name),i=V.dirname(o);for(;i!==V.dirname(i);){let a=V.dirname(i);if(V.basename(a)==="node_modules"){let c=V.join(a,".bin",t);if(ve.existsSync(c))return c;break}i=a}}catch(o){console.error("Could not resolve package.json",o)}if(B.env.NODE_PATH){let o=V.join(B.env.NODE_PATH,".bin",t);if(ve.existsSync(o))return o}let r=V.join(e,"node_modules",".bin",t);if(ve.existsSync(r))return r;let n=V.join(xr,"..","node_modules",".bin",t);if(ve.existsSync(n))return n}var xt="netlify-agent-runner-context.md",Ye="task-history",Be="netlify-context",U=".netlify",se="results.md",He="assets";var Nr=y("utils"),Cr=e=>new Promise(t=>{setTimeout(t,e)}),Tt=(e,t=3e3)=>{let r=!1,n=null,o=[],i=null,a=(...c)=>{if(r)return n=c,new Promise(p=>{o.push(p)});r=!0;let s,m=new Promise(p=>{s=p});return i=(async()=>{await Promise.resolve();let p=await e(...c);for(s(p);;){if(await Cr(t),!n)return r=!1,i=null,p;let u=n,g=o;n=null,o=[],p=await e(...u),g.forEach(h=>{h(p)})}})(),m};return a.flush=async()=>{if((r||n)&&i)return await i,a.flush()},a},ae=(e,t,r=!1)=>{let n=null,o=null,i=null,a=function(...c){o=c,i=this;let s=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(i,o),o=null,i=null)},t),s&&(e.apply(i,o),o=null,i=null)};return a.cancel=()=>{clearTimeout(n),n=null,o=null,i=null},a.flush=()=>{if(n){clearTimeout(n);let c=o,s=i;n=null,o=null,i=null,e.apply(s,c)}},a},vt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):Nr.error("Could not parse JSON",n))}},Rt=(e,t)=>{let o=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let c=`--${t}${o}`;if(c.length>55)return"";let s=60-c.length;if(s<=0)return"";if(s>=i.length+6){let m=Math.min(s-i.length,e.length);return`${i}${e.slice(0,m)}`}return e.slice(0,s)};import{Buffer as St}from"buffer";import Pr from"path";var At=y("repo"),bt=async({config:e})=>{At.info("Getting runner diffs");let t=await Fr(),{hasChanges:r}=t,{status:n}=t;if(!r)return{hasChanges:!1};let o=$r(n);await kr(o),At.info("Changes after processing"),await qe();let i=await We(n);await Ke(i);let a={stdio:["ignore","pipe","pipe"]},s=(await N("git",["diff","--staged"],a)).stdout;if(r=!!s,!r)return{hasChanges:!1,ignored:i};let p=(await N("git",["diff","--staged","--binary"],a)).stdout,u,g;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]),u=(await N("git",["diff",e.sha,"HEAD"],a)).stdout;let f=(await N("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;u!==f&&(g=St.from(f).toString("base64"))}let h={hasChanges:!0,diff:s,resultDiff:u,ignored:i};return s!==p&&(h.diffBinary=St.from(p).toString("base64")),g&&(h.resultDiffBinary=g),h},Ke=async(e=[])=>{await N("git",["add",".",...e])},qe=async()=>(await N("git",["status","-s"])).stdout,Nt=/.. (.+)?\.log$/,Or=[Nt],Fr=async()=>{let e=await qe();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(n=>Or.some(i=>i instanceof RegExp?i.test(n):n===i)?!1:n[1]?.trim()!=="")).length!==0,status:e}},Ct=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return e.trim()},Pt=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},We=async e=>{e||=await qe();let t=[".netlify","node_modules"],r=[];return e.split(`
10
- `).forEach(n=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Pr.sep}`].some(c=>n.startsWith(c))&&r.push(`:!${i}`)});let o=n.match(Nt)?.[1];o&&r.push(`:!${o}.log`)}),r},Ot=async()=>{await N("git",["reset","--hard","HEAD"])},$r=e=>{let t=e.split(`
11
- `).reduce((r,n)=>{if(!n)return r;let[o,i,,...a]=n,c=a.join(""),s=o.trim(),m=i.trim();return r[c]?r[c].change=m:r[c]={filePath:c,stage:s,change:m},r},{});return Object.values(t)},kr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Lr from"fs/promises";import Ur from"os";import kt from"path";import te from"process";import Mr from"readline";import Ve from"path";import Dr from"fs/promises";var Je=y("agent-output-utils");async function le({initialResult:e,agentName:t,hasError:r}){let n="",o=Ve.join(process.cwd(),U,se);try{let i=await Dr.readFile(o,"utf-8");i&&(n=i,Je.log(`Pulled result from ${Ve.relative(process.cwd(),o)}`))}catch{Je.log(`No results file found at ${Ve.relative(process.cwd(),o)}`)}return n||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ce({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,n=r?.replace(/\s+/g," ").trim().toLowerCase()||"",o="";return n?.includes("ai gateway is not available for your account")||n?.includes("ai gateway is not enabled for your account")?o="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":n?.includes("error when talking to gemini api")?o="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(n?.includes("connection closed prematurely")||n?.includes("499")&&t.toLowerCase().includes("gemini"))&&(o=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),n?.includes("request timed out")&&(o=`The ${t} API request's have timed out. Please try again or use a different available agent.`),n?.includes("network error")&&(o=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o&&Je.log(`Providing updated error messsage: ${o}, replacing original error: ${r}`),o||r||void 0}function ue(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=y("runner_claude"),Ft="Claude Code",de="claude-opus-4-5-20251101",$t=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,jr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(M.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function Xe({config:e,netlify:t,persistSteps:r,aiGateway:n,continueSession:o,priorAgentSessionId:i}){let a=e,{accountType:c,prompt:s,modelVersionOverrides:m}=a,{model:p}=a,u="";if(n){let{token:_,url:d}=n;if(!_||!d)throw new Error("No token or url provided from AI Gateway");if(m?.claude){let l=m?.claude?.[c];if(l){if(!await n.isModelAvailableForProvider("anthropic",l))throw new Error(`Model override '${l}' is not available for anthropic provider`);p=l}}else if(p){if(!await n.isModelAvailableForProvider("anthropic",p))throw new Error(`Model '${p}' is not available for anthropic provider`)}else!!de&&await n.isModelAvailableForProvider("anthropic",de)?(p=de,M.log(`Using default model: ${de}`)):de&&M.log(`Default model ${de} is not available, proceeding without model specification`);te.env.ANTHROPIC_API_KEY=_,te.env.ANTHROPIC_BASE_URL=d}else if(!te.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let g=[],h=[],I={},w=0,f=0,v,x,k=[ee(te.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...p?["--model",p]:[],...o?["--continue"]:[],...o&&i?["--resume",i]:[],"-p",s],P=`${te.env.NVM_BIN}/node`;M.log(`Running ${P} ${k.join(" ")}`);let O=t.utils.run(P,k,{all:!0,env:te.env});O.stdin?.end();let S=ae(()=>{r?.({steps:g,duration:f})},250),T=(_,d)=>{let l={..._,id:w};w+=1,h.push(l),g.push(l),d||S.flush(),S(),d&&S.flush()},R=Mr.createInterface({input:O.all});return R.on("error",_=>{M.error("Readline interface error",{error:_.message,stack:_.stack})}),R.on("line",_=>{let d=null;try{d=JSON.parse(_)}catch{M.log("Could not parse line",_)}d?.session_id&&d.session_id!==u&&(u=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(l=>{switch(l.type){case"text":{l.text&&T({message:l.text});break}case"image":{typeof l.source=="object"&&l.source&&l.source.type==="base64"&&l.source.media_type?T({message:`![](data:${l.source.media_type};base64,${l.source.data})`}):M.log(`Unsupported image type ${l.source?.type}`,l.source);break}case"tool_use":{if(l.name==="Task"){let E=l.input?.description&&`\`${l.input.description}\``;T({title:[$t(l.name),E].filter(Boolean).join(" ")})}else l.id&&(I[l.id]=l);S.flush();break}case"tool_result":{let E=l.tool_use_id?I[l.tool_use_id]:void 0,z;if(E){let J=E.input?.file_path&&kt.relative(te.cwd(),E.input.file_path),F=J&&`\`${J}\``;z=[$t(E.name||""),F].filter(Boolean).join(" ")}let we=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),Z;if(typeof l.content=="string")Z=l.content;else if(Array.isArray(l.content)){let J=[];l.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?J.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?J.push(`![](data:${F.source.media_type};base64,${F.source.data})`):M.log(`Unsupported image type ${F.source.type}`,F.source):M.log(`Unsupported block type ${F?.type}`)}),Z=J.join(`
12
-
13
- `)}we&&Z&&(Z=`\`\`\`
14
- ${Z.trim()}
15
- \`\`\``),T({title:z,message:Z},!0);break}case"thinking":{l.thinking&&T({title:"Thinking",message:l.thinking},!0);break}default:M.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(f=d.duration_ms||0,d.is_error?x=d.result:v=d.result,[h,g].forEach(l=>{l[l.length-1]?.message===v&&l.pop()}))}),await O.catch(_=>{({error:x,result:v}=jr({catchError:_,runCmd:O,error:x,result:v,runnerName:"Claude"}))}),R.close(),S.flush(),{steps:h,duration:f,result:await le({initialResult:v,agentName:Ft,hasError:!!x}),error:ce({error:x,agentName:Ft}),isRetryableError:ue(x),agentSessionId:u}}var Dt=async()=>{let e=kt.join(Ur.homedir(),".claude");await Lr.rm(e,{recursive:!0,force:!0})};import ye from"fs/promises";import Ut from"os";import Ae from"path";import re from"process";import Gr from"readline";var H=y("runner_codex"),Lt="Codex CLI",Yr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(H.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(H.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(H.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function ze({config:e,netlify:t,persistSteps:r,sendSteps:n,aiGateway:o}){let{accountType:i,prompt:a,modelVersionOverrides:c}=e,{model:s}=e;if(o){let{token:d,url:l}=o;if(!d||!l)throw new Error("No token or url provided from AI Gateway");if(c?.codex){let E=c?.codex?.[i];if(E){if(!await o.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);s=E}}else if(s&&!await o.isModelAvailableForProvider("openai",s))throw new Error(`Model '${s}' is not available for openai provider`);re.env.OPENAI_API_KEY=d,re.env.OPENAI_BASE_URL=l}else if(!re.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let m=[],p=[],u=[],g={},h=0,I=0,w,f,v=`${re.env.NVM_BIN}/node`,x=Ae.join(Ut.homedir(),".codex"),k=Ae.join(x,"config.toml"),P=Ae.join(x,"auth.json");try{await ye.mkdir(x,{recursive:!0});let d={OPENAI_API_KEY:re.env.OPENAI_API_KEY};await ye.writeFile(P,JSON.stringify(d,null,2),"utf-8"),H.log("Created Codex auth.json file");let l="";try{l=await ye.readFile(k,"utf-8")}catch{}l.includes("web_search")||(l.includes("[tools]")?l=l.replace(/\[tools\]/,`[tools]
16
- web_search = true`):l+=`
17
- [tools]
18
- web_search = true
19
- `,await ye.writeFile(k,l,"utf-8"),H.log("Updated Codex config with web_search enabled"))}catch(d){throw H.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[ee(re.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...s?["--model",s]:[],a].filter(Boolean);H.log(`Running ${v} ${O.join(" ")}`);let S=t.utils.run(v,O,{all:!0,env:{...re.env}}),T=ae(()=>{r?.({steps:m,duration:I}),n?.({steps:p,duration:I}),p=[]},250),R=(d,l)=>{d.id=h,h+=1,u.push(d),m.push(d),p.push(d),l||T.flush(),T(),l&&T.flush()},_=Gr.createInterface({input:S.all});return _.on("error",d=>{H.error("Readline interface error",{error:d.message,stack:d.stack})}),_.on("line",d=>{let l=null;try{l=JSON.parse(d)}catch{H.log("Could not parse line",d);return}if(l?.duration_ms&&(I=l.duration_ms),l?.type==="item.started"&&l?.item?.type==="command_execution")g[l.item.id]=l.item;else if(l?.type==="item.completed"&&l?.item?.type==="command_execution"){let E=Hr(l.item);E&&R(E,!0)}else if(l?.type==="item.completed"&&l?.item?.type==="reasoning"){let E={title:"Reasoning",message:l.item.text};R(E,!0)}else if(l?.type==="local_shell_call")g[l.call_id]=l;else if(l?.type==="local_shell_call_output"){let E=Kr(g[l.call_id],l);E&&R(E,!0)}else l?.type==="message"&&l.role==="assistant"?w=l.content.map(E=>E.text).join(`
20
- `):l?.type==="message"&&l.role==="system"&&(f=l.content.map(E=>E.text).join(`
21
- `))}),await S.catch(d=>{let l=Yr({catchError:d,runCmd:S,error:f,result:w,runnerName:"Codex"});f=l.error,w=l.result}),_.close(),T.flush(),{steps:u,duration:I,result:await le({initialResult:w,agentName:Lt,hasError:!!f}),error:ce({error:f,agentName:Lt}),isRetryableError:ue(f)}}var Mt=async()=>{let e=Ae.join(Ut.homedir(),".codex");await ye.rm(e,{recursive:!0,force:!0})},Br=new Set(["bash","-lc"]),Hr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,n=e.aggregated_output?.trim();return n&&(n=`\`\`\`
8
+ `);return i.length>e.length*.8?e:i}import Oe from"process";import{getTracer as nn}from"@netlify/otel";import me from"process";var Te=me.env.NETLIFY_API_URL,ve=me.env.NETLIFY_API_TOKEN,H=_("api"),Re=()=>me.env.NETLIFY_LOCAL_MODE==="true",he=async(e,t={})=>{if(!Te||!ve)throw new Error("No API URL or token");let r=new URL(e,Te),n={...t,headers:{...t.headers,Authorization:`Bearer ${ve}`}};me.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),i=o.ok&&o.status<=299;if(me.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),o.headers.forEach((u,a)=>{H.log(` ${a}: ${u}`)});else{let u=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||H.error(`Got status ${o.status} for request ${r}`),t.raw){if(!i)throw o;return o}let s=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!i)throw s;return s},ft=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Te=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(ve=e.constants.NETLIFY_API_TOKEN)},gt=()=>({apiUrl:Te,token:ve}),ye=async(e,t)=>Re()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):he(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),W=async(e,t,r)=>Re()?(H.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):he(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var mt=async(e,t)=>Re()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):he(`/api/v1/agent_runners/${e}/sessions/${t}`),ht=(e,t,r)=>he(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),yt=async(e,t)=>Re()?(H.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):he(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ge=async(e,t)=>{H.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ne=_("ai_gateway"),Ye=null;var _t=async()=>{if(Ye)return Ye;ne.log("Fetching available AI gateway providers");let e=await fetch(`${gt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ye=t,ne.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},hr=async(e,t)=>{let n=(await _t()).providers[e];if(!n)return ne.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return ne.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},Et=async({netlify:e,config:t})=>{let r,n,o,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(o),ne.log("Requesting AI gateway information");let a=await ht(s,t.id,t.sessionId);if({token:r,url:i}=a,n=a.expires_at?a.expires_at*1e3:void 0,ne.log("Got AI gateway information",{token:!!r,expiresAt:n,url:i}),n){let f=n-Date.now()-6e4;f>0&&(o=setTimeout(()=>{u()},f))}};return await Promise.all([u(),_t()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:hr}};import q from"process";import J from"path";import Ae from"fs";import{fileURLToPath as Tr}from"url";import{createRequire as vr}from"module";import{execa as Rr,execaCommand as eo}from"execa";import{Transform as yr}from"stream";var _r=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Er=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function wr(){return Object.entries(process.env).filter(([e,t])=>!(!t||_r.has(e)||Er.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function V(e){if(typeof e!="string")return e;let t=wr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(xr(n),"g");r=r.replace(o,"******")}),r}function xr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var oe=class extends yr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),i=V(o);n(null,i)}};function wt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,i){let s=typeof n=="string"?V(n):n;return typeof o=="function"?t(s,o):t(s,o,i)},process.stderr.write=function(n,o,i){let s=typeof n=="string"?V(n):n;return typeof o=="function"?r(s,o):r(s,o,i)}}var _e=null,xt=e=>(_e&&_e.destroy(),_e=new ee({totalAllowedTime:e}),_e),It=()=>_e;var ee=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;n!==void 0&&(s=new Promise((u,a)=>{i=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return s?await Promise.race([r(),s]):await r()}finally{o(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Tt={name:"@netlify/agent-runner-cli",type:"module",version:"1.58.2",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var Ar=Tr(import.meta.url),Sr=J.dirname(Ar),br=vr(import.meta.url),Se=_("shell"),Be=new Set,Nr={preferLocal:!0},N=(e,t,r)=>{let[n,o]=Cr(t,r),i={...Nr,...o},s=Rr(e,n,i);return Pr(s,i),Fr(s),s};var Cr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Pr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new oe).pipe(q.stdout),e.stdout?.pipe(new oe).pipe(q.stdout),e.stderr?.pipe(new oe).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},vt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),Se.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Se.error("Error killing process:",r),!1}},Or=e=>vt(e,"SIGKILL"),Fr=e=>{Be.add(e);let t=It();if(t){let r=t.onTimesUp(()=>{Se.log(`Global timer expired, killing process ${e.pid}`),vt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Se.log(`Force killing process ${e.pid} after timeout`),Or(e))},5e3)});e.on("exit",()=>{Be.delete(e),r()}),e.on("error",()=>{Be.delete(e),r()})}};function be(e,t){return!!te(e,t)}function te(e,t){if(!q.env.NETLIFY_LOCAL_MODE)try{let o=br.resolve(Tt.name),i=J.dirname(o);for(;i!==J.dirname(i);){let s=J.dirname(i);if(J.basename(s)==="node_modules"){let u=J.join(s,".bin",t);if(Ae.existsSync(u))return u;break}i=s}}catch(o){console.error("Could not resolve package.json",o)}if(q.env.NODE_PATH){let o=J.join(q.env.NODE_PATH,".bin",t);if(Ae.existsSync(o))return o}let r=J.join(e,"node_modules",".bin",t);if(Ae.existsSync(r))return r;let n=J.join(Sr,"..","node_modules",".bin",t);if(Ae.existsSync(n))return n}var Rt="netlify-agent-runner-context.md",He="task-history",qe="netlify-context",U=".netlify",ie="results.md",Ke="assets";var $r=_("utils"),kr=e=>new Promise(t=>{setTimeout(t,e)}),At=(e,t=3e3)=>{let r=!1,n=null,o=[],i=null,s=(...u)=>{if(r)return n=u,new Promise(p=>{o.push(p)});r=!0;let a,f=new Promise(p=>{a=p});return i=(async()=>{await Promise.resolve();let p=await e(...u);for(a(p);;){if(await kr(t),!n)return r=!1,i=null,p;let c=n,m=o;n=null,o=[],p=await e(...c),m.forEach(h=>{h(p)})}})(),f};return s.flush=async()=>{if((r||n)&&i)return await i,s.flush()},s},se=(e,t,r=!1)=>{let n=null,o=null,i=null,s=function(...u){o=u,i=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(i,o),o=null,i=null)},t),a&&(e.apply(i,o),o=null,i=null)};return s.cancel=()=>{clearTimeout(n),n=null,o=null,i=null},s.flush=()=>{if(n){clearTimeout(n);let u=o,a=i;n=null,o=null,i=null,e.apply(a,u)}},s},St=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):$r.error("Could not parse JSON",n))}},bt=(e,t)=>{let o=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${o}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=i.length+6){let f=Math.min(a-i.length,e.length);return`${i}${e.slice(0,f)}`}return e.slice(0,a)};var Dr=50*1024,We=(e,t=Dr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as Nt}from"buffer";import Lr from"path";var Ct=_("repo"),Pt=async({config:e,isRetry:t})=>{Ct.info("Getting runner diffs");let r=await Mr(),{hasChanges:n}=r,{status:o}=r;if(!n)return{hasChanges:!1};if(!t){let x=jr(o);await Gr(x)}Ct.info("Changes after processing"),await Je();let i=await Xe(o);await Ve(i);let s={stdio:["ignore","pipe","pipe"]},a=(await N("git",["diff","--staged"],s)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:i};let p=(await N("git",["diff","--staged","--binary"],s)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]),c=(await N("git",["diff",e.sha,"HEAD"],s)).stdout;let g=(await N("git",["diff",e.sha,"HEAD","--binary"],s)).stdout;c!==g&&(m=Nt.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:i};return a!==p&&(h.diffBinary=Nt.from(p).toString("base64")),m&&(h.resultDiffBinary=m),h},Ve=async(e=[])=>{await N("git",["add",".",...e])},Je=async()=>(await N("git",["status","-s"])).stdout,Ot=/.. (.+)?\.log$/,Ur=[Ot],Mr=async()=>{let e=await Je();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(n=>Ur.some(i=>i instanceof RegExp?i.test(n):n===i)?!1:n[1]?.trim()!=="")).length!==0,status:e}},Ft=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return e.trim()},$t=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Xe=async e=>{e||=await Je();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
10
+ `).forEach(n=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Lr.sep}`].some(u=>n.startsWith(u))&&r.push(`:!${i}`)});let o=n.match(Ot)?.[1];o&&r.push(`:!${o}.log`)}),r},kt=async()=>{await N("git",["reset","--hard","HEAD"])},jr=e=>{let t=e.split(`
11
+ `).reduce((r,n)=>{if(!n)return r;let[o,i,,...s]=n,u=s.join(""),a=o.trim(),f=i.trim();return r[u]?r[u].change=f:r[u]={filePath:u,stage:a,change:f},r},{});return Object.values(t)},Gr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Br from"fs/promises";import Hr from"os";import Ut from"path";import ue from"process";import qr from"readline";import ze from"path";import Yr from"fs/promises";var Ze=_("agent-output-utils");async function ae({initialResult:e,agentName:t,hasError:r}){let n="",o=ze.join(process.cwd(),U,ie);try{let i=await Yr.readFile(o,"utf-8");i&&(n=i,Ze.log(`Pulled result from ${ze.relative(process.cwd(),o)}`))}catch{Ze.log(`No results file found at ${ze.relative(process.cwd(),o)}`)}return n||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function le({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,n=r?.replace(/\s+/g," ").trim().toLowerCase()||"",o="";return n?.includes("ai gateway is not available for your account")||n?.includes("ai gateway is not enabled for your account")?o="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":n?.includes("error when talking to gemini api")?o="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(n?.includes("connection closed prematurely")||n?.includes("499")&&t.toLowerCase().includes("gemini"))&&(o=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),n?.includes("request timed out")&&(o=`The ${t} API request's have timed out. Please try again or use a different available agent.`),n?.includes("network error")&&(o=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o&&Ze.log(`Providing updated error messsage: ${o}, replacing original error: ${r}`),o||r||void 0}function ce(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=_("runner_claude"),Dt="Claude Code",de="claude-opus-4-5-20251101",Lt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Kr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(M.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function Qe({config:e,netlify:t,persistSteps:r,aiGateway:n,continueSession:o,priorAgentSessionId:i,cwd:s=ue.cwd()}){let u=e,{accountType:a,prompt:f,modelVersionOverrides:p}=u,{model:c}=u,m="";if(n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(p?.claude){let l=p?.claude?.[a];if(l){if(!await n.isModelAvailableForProvider("anthropic",l))throw new Error(`Model override '${l}' is not available for anthropic provider`);c=l}}else if(c){if(!await n.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!de&&await n.isModelAvailableForProvider("anthropic",de)?(c=de,M.log(`Using default model: ${de}`)):de&&M.log(`Default model ${de} is not available, proceeding without model specification`);ue.env.ANTHROPIC_API_KEY=y,ue.env.ANTHROPIC_BASE_URL=d}else if(!ue.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],x=[],I={},g=0,E=0,v,R,P=[te(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...o?["--continue"]:[],...o&&i?["--resume",i]:[],"-p",f],O=`${ue.env.NVM_BIN}/node`;M.log(`Running ${O} ${P.join(" ")}`);let k=t.utils.run(O,P,{all:!0,env:ue.env,cwd:s});k.stdin?.end();let A=se(()=>{r?.({steps:h,duration:E})},250),T=(y,d)=>{let l={...y,id:g};g+=1,x.push(l),h.push(l),d||A.flush(),A(),d&&A.flush()},G=qr.createInterface({input:k.all});return G.on("error",y=>{M.error("Readline interface error",{error:y.message,stack:y.stack})}),G.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{M.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(l=>{switch(l.type){case"text":{l.text&&T({message:l.text});break}case"image":{typeof l.source=="object"&&l.source&&l.source.type==="base64"&&l.source.media_type?T({message:`![](data:${l.source.media_type};base64,${l.source.data})`}):M.log(`Unsupported image type ${l.source?.type}`,l.source);break}case"tool_use":{if(l.name==="Task"){let w=l.input?.description&&`\`${l.input.description}\``;T({title:[Lt(l.name),w].filter(Boolean).join(" ")})}else l.id&&(I[l.id]=l);A.flush();break}case"tool_result":{let w=l.tool_use_id?I[l.tool_use_id]:void 0,Z;if(w){let X=w.input?.file_path&&Ut.relative(s,w.input.file_path),F=X&&`\`${X}\``;Z=[Lt(w.name||""),F].filter(Boolean).join(" ")}let Ie=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),Q;if(typeof l.content=="string")Q=l.content;else if(Array.isArray(l.content)){let X=[];l.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?X.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?X.push(`![](data:${F.source.media_type};base64,${F.source.data})`):M.log(`Unsupported image type ${F.source.type}`,F.source):M.log(`Unsupported block type ${F?.type}`)}),Q=X.join(`
12
+
13
+ `)}Ie&&Q&&(Q=`\`\`\`
14
+ ${Q.trim()}
15
+ \`\`\``),T({title:Z,message:Q},!0);break}case"thinking":{l.thinking&&T({title:"Thinking",message:l.thinking},!0);break}default:M.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?R=d.result:v=d.result,[x,h].forEach(l=>{l[l.length-1]?.message===v&&l.pop()}))}),await k.catch(y=>{({error:R,result:v}=Kr({catchError:y,runCmd:k,error:R,result:v,runnerName:"Claude"}))}),G.close(),A.flush(),{steps:x,duration:E,result:await ae({initialResult:v,agentName:Dt,hasError:!!R}),error:le({error:R,agentName:Dt}),isRetryableError:ce(R),agentSessionId:m}}var Mt=async()=>{let e=Ut.join(Hr.homedir(),".claude");await Br.rm(e,{recursive:!0,force:!0})};import Ee from"fs/promises";import Gt from"os";import Ne from"path";import re from"process";import Wr from"readline";var j=_("runner_codex"),jt="Codex CLI",pe="gpt-5.2",Vr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(j.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function et({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:i=re.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(o){let{token:d,url:l}=o;if(!d||!l)throw new Error("No token or url provided from AI Gateway");if(a?.codex){let w=a?.codex?.[s];if(w){if(!await o.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);f=w}}else if(f){if(!await o.isModelAvailableForProvider("openai",f))throw new Error(`Model '${f}' is not available for openai provider`)}else!!pe&&await o.isModelAvailableForProvider("openai",pe)?(f=pe,j.log(`Using default model: ${pe}`)):pe&&j.log(`Default model ${pe} is not available, proceeding without model specification`);re.env.OPENAI_API_KEY=d,re.env.OPENAI_BASE_URL=l}else if(!re.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let p=[],c=[],m=[],h={},x=0,I=0,g,E,v=`${re.env.NVM_BIN}/node`,R=Ne.join(Gt.homedir(),".codex"),P=Ne.join(R,"config.toml"),O=Ne.join(R,"auth.json");try{await Ee.mkdir(R,{recursive:!0});let d={OPENAI_API_KEY:re.env.OPENAI_API_KEY};await Ee.writeFile(O,JSON.stringify(d,null,2),"utf-8"),j.log("Created Codex auth.json file");let l="";try{l=await Ee.readFile(P,"utf-8")}catch{}l.includes("web_search_request")||(l.includes("[features]")?l=l.replace(/\[features\]/,`[features]
16
+ web_search_request = true`):l+=`
17
+ [features]
18
+ web_search_request = true
19
+ `,await Ee.writeFile(P,l,"utf-8"),j.log("Updated Codex config with web_search_request enabled"))}catch(d){throw j.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let k=[te(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...f?["--model",f]:[],u].filter(Boolean);j.log(`Running ${v} ${k.join(" ")}`);let A=t.utils.run(v,k,{all:!0,cwd:i,env:{...re.env}}),T=se(()=>{r?.({steps:p,duration:I}),n?.({steps:c,duration:I}),c=[]},250),G=(d,l)=>{d.id=x,x+=1,m.push(d),p.push(d),c.push(d),l||T.flush(),T(),l&&T.flush()},y=Wr.createInterface({input:A.all});return y.on("error",d=>{j.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let l=null;try{l=JSON.parse(d)}catch{j.log("Could not parse line",d);return}if(l?.duration_ms&&(I=l.duration_ms),l?.type==="item.started"&&l?.item?.type==="command_execution")h[l.item.id]=l.item;else if(l?.type==="item.completed"&&l?.item?.type==="command_execution"){let w=Xr(l.item);w&&G(w,!0)}else if(l?.type==="item.completed"&&l?.item?.type==="reasoning"){let w={title:"Reasoning",message:l.item.text};G(w,!0)}else if(l?.type==="local_shell_call")h[l.call_id]=l;else if(l?.type==="local_shell_call_output"){let w=zr(h[l.call_id],l);w&&G(w,!0)}else l?.type==="message"&&l.role==="assistant"?g=l.content.map(w=>w.text).join(`
20
+ `):l?.type==="message"&&l.role==="system"&&(E=l.content.map(w=>w.text).join(`
21
+ `))}),await A.catch(d=>{let l=Vr({catchError:d,runCmd:A,error:E,result:g,runnerName:"Codex"});E=l.error,g=l.result}),y.close(),T.flush(),{steps:m,duration:I,result:await ae({initialResult:g,agentName:jt,hasError:!!E}),error:le({error:E,agentName:jt}),isRetryableError:ce(E)}}var Yt=async()=>{let e=Ne.join(Gt.homedir(),".codex");await Ee.rm(e,{recursive:!0,force:!0})},Jr=new Set(["bash","-lc"]),Xr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,n=e.aggregated_output?.trim();return n&&(n=`\`\`\`
22
22
  ${n}
23
23
  \`\`\``),e.status==="failed"&&e.exit_code!==0&&(n=n?`${n}
24
24
 
25
- *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},Kr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Br.has(i)),n=r?`Running \`${r.join(" ")}\``:void 0,o;try{o=JSON.parse(t.output).output?.trim(),o&&(o=`\`\`\`
25
+ *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},zr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Jr.has(i)),n=r?`Running \`${r.join(" ")}\``:void 0,o;try{o=JSON.parse(t.output).output?.trim(),o&&(o=`\`\`\`
26
26
  ${o.trim()}
27
- \`\`\``)}catch(i){H.error("Could not decode outputMsg",i,t.output)}return{title:n,message:o}};import be from"fs/promises";import Gt from"os";import Ne from"path";import ne from"process";import qr from"readline";var j=y("runner_gemini"),jt="Gemini CLI",pe="",Wr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(j.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0})),Vr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Jr=async()=>{let e=Ne.join(Gt.homedir(),".gemini"),t=Ne.join(e,"settings.json");try{await be.mkdir(e,{recursive:!0});let r={};try{let n=await be.readFile(t,"utf-8");r=JSON.parse(n)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await be.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function Ze({config:e,netlify:t,persistSteps:r,sendSteps:n,aiGateway:o}){let{accountType:i,prompt:a,modelVersionOverrides:c}=e,{model:s}=e;if(await Jr(),o){let{token:_,url:d}=o;if(!_||!d)throw new Error("No token or url provided from AI Gateway");if(c?.gemini){let l=c?.gemini?.[i];if(l){if(!await o.isModelAvailableForProvider("gemini",l))throw new Error(`Model override '${l}' is not available for gemini provider`);s=l}}if(!s)!!pe&&await o.isModelAvailableForProvider("gemini",pe)?(s=pe,j.log(`Using default model: ${pe}`)):pe&&j.log(`Default model ${pe} is not available, proceeding without model specification`);else if(s&&!c?.gemini?.[i]&&!await o.isModelAvailableForProvider("gemini",s))throw new Error(`Model '${s}' is not available for gemini provider`);ne.env.GEMINI_API_KEY=_,ne.env.GOOGLE_GEMINI_BASE_URL=d}else if(!ne.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let m=[],p=[],u=[],g={},h=0,I=0,w,f,v=[ee(ne.cwd(),"gemini"),...s?["--model",s]:[],"--yolo","--output-format","stream-json","-p",a],x=`${ne.env.NVM_BIN}/node`;j.log(`Running ${x} ${v.join(" ")}`);let k=t.utils.run(x,v,{all:!0,env:ne.env});k.stdin?.end();let P=ae(()=>{r?.({steps:m,duration:I}),n?.({steps:p,duration:I}),p=[]},250),O=(_,d)=>{_.id=h,h+=1,u.push(_),m.push(_),p.push(_),d||P.flush(),P(),d&&P.flush()},S=qr.createInterface({input:k.all});S.on("error",_=>{j.error("Readline interface error",{error:_.message,stack:_.stack})});let T="",R=()=>{T&&O({message:T.trim()}),T=""};return S.on("line",_=>{let d=null;try{if(_.startsWith("[API Error")){let l=_.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:vt(l,!1)?.error?.message||l||"Gemini encountered error"}}else d=JSON.parse(_)}catch{return}if(d)switch(["message","result"].includes(d.type)||R(),d.type){case"message":{d.role!=="user"&&d.content&&(T+=d.content);break}case"tool_use":{let l=Vr[d.tool_name]??d.tool_name,E=d.parameters?.file_path,z=E&&Ne.relative(ne.cwd(),E),we=d.parameters?.command,J={title:[l,z&&`\`${z}\``,we&&`\`${we}\``].filter(Boolean).join(" ")};g[d.tool_id]=J,P.flush();break}case"tool_result":{let l=g[d.tool_id];l&&(d.output&&(l.message=`\`\`\`
27
+ \`\`\``)}catch(i){j.error("Could not decode outputMsg",i,t.output)}return{title:n,message:o}};import Ce from"fs/promises";import Ht from"os";import Pe from"path";import fe from"process";import Zr from"readline";var Y=_("runner_gemini"),Bt="Gemini CLI",ge="",Qr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(Y.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(Y.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(Y.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0})),en={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},tn=async()=>{let e=Pe.join(Ht.homedir(),".gemini"),t=Pe.join(e,"settings.json");try{await Ce.mkdir(e,{recursive:!0});let r={};try{let n=await Ce.readFile(t,"utf-8");r=JSON.parse(n)}catch{Y.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ce.writeFile(t,JSON.stringify(r,null,2),"utf-8"),Y.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){Y.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function tt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:i=fe.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(await tn(),o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(a?.gemini){let l=a?.gemini?.[s];if(l){if(!await o.isModelAvailableForProvider("gemini",l))throw new Error(`Model override '${l}' is not available for gemini provider`);f=l}}if(!f)!!ge&&await o.isModelAvailableForProvider("gemini",ge)?(f=ge,Y.log(`Using default model: ${ge}`)):ge&&Y.log(`Default model ${ge} is not available, proceeding without model specification`);else if(f&&!a?.gemini?.[s]&&!await o.isModelAvailableForProvider("gemini",f))throw new Error(`Model '${f}' is not available for gemini provider`);fe.env.GEMINI_API_KEY=y,fe.env.GOOGLE_GEMINI_BASE_URL=d}else if(!fe.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let p=[],c=[],m=[],h={},x=0,I=0,g,E,v=[te(i,"gemini"),...f?["--model",f]:[],"--yolo","--output-format","stream-json","-p",u],R=`${fe.env.NVM_BIN}/node`;Y.log(`Running ${R} ${v.join(" ")}`);let P=t.utils.run(R,v,{all:!0,env:fe.env,cwd:i});P.stdin?.end();let O=se(()=>{r?.({steps:p,duration:I}),n?.({steps:c,duration:I}),c=[]},250),k=(y,d)=>{y.id=x,x+=1,m.push(y),p.push(y),c.push(y),d||O.flush(),O(),d&&O.flush()},A=Zr.createInterface({input:P.all});A.on("error",y=>{Y.error("Readline interface error",{error:y.message,stack:y.stack})});let T="",G=()=>{T&&k({message:T.trim()}),T=""};return A.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let l=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:St(l,!1)?.error?.message||l||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(T+=d.content);break}case"tool_use":{let l=en[d.tool_name]??d.tool_name,w=d.parameters?.file_path,Z=w&&Pe.relative(i,w),Ie=d.parameters?.command,X={title:[l,Z&&`\`${Z}\``,Ie&&`\`${Ie}\``].filter(Boolean).join(" ")};h[d.tool_id]=X,O.flush();break}case"tool_result":{let l=h[d.tool_id];l&&(d.output&&(l.message=`\`\`\`
28
28
  ${d.output.trim()}
29
- \`\`\``),O(l,!0));break}case"result":{I=d.stats?.duration_ms,d.status==="error"?f=d.error?.message:w=T.trim();break}case"error":{f=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await k.catch(_=>{({error:f,result:w}=Wr({catchError:_,runCmd:k,error:f,result:w,runnerName:"Gemini"}))}),S.close(),P.flush(),{steps:u,duration:I,result:await le({initialResult:w,agentName:jt,hasError:!!f}),error:ce({error:f,agentName:jt}),isRetryableError:ue(f)}}var Yt=async()=>{let e=Ne.join(Gt.homedir(),".gemini");await be.rm(e,{recursive:!0,force:!0})};var Xr={codex:{runner:ze,clean:Mt},claude:{runner:Xe,clean:Dt},gemini:{runner:Ze,clean:Yt}},Bt=Xr;var Zr=y("init_stage"),Ht=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:n})=>await A(zr(),"init-stage",async o=>{let i=performance.now();o?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":n||"unknown"});let a=Bt[e.runner];if(!a)throw o?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let c=Qr({apiToken:r});ut(c);let s=e.useGateway?await ht({netlify:c,config:e}):void 0;o?.setAttributes({"init.aiGateway.created":!!s}),e.validateAgent&&e.errorLogsPath&&o?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let m=Tt(({steps:h=[],duration:I})=>{let w=h.map(f=>({...f,title:f.title?W(f.title):void 0,message:f.message?W(f.message):void 0}));return h.length=0,q(e.id,e.sessionId,{steps:w,duration:I})},t);Zr.info("Adding build files to stage");let p=await We();await Ke(p);let u;e.hasRepo?e.sha?(u=e.sha,o?.setAttributes({"init.sha.source":"provided"})):(u=await Ct(),await me(e.id,{sha:u}),o?.setAttributes({"init.sha.source":"current_commit"})):(u=await Pt(),o?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let g=performance.now()-i;return o?.setAttributes({"init.sha":u||"unknown","init.duration.ms":g,"init.status":"success"}),{aiGateway:s,context:c,persistSteps:m,runner:a,sha:u}}),Qr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Ce.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Ce.env.NETLIFY_API_TOKEN,SITE_ID:Ce.env.SITE_ID,FUNCTIONS_DIST:Ce.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as Qe}from"@netlify/otel";import en from"crypto";import X from"fs/promises";import L from"path";import G from"process";var D=y("context"),tn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:G.env.NETLIFY_TEAM_ID,userId:G.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:G.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},rn=10,nn=async e=>{let{name:t,ext:r}=L.parse(e),n=e,o=L.join(G.cwd(),U,n),i=0;for(;await on(o);){if(i>=rn)throw new Error("Failed to generate context file");n=`${t}-${en.randomUUID().slice(0,5)}${r}`,o=L.join(G.cwd(),U,n),i+=1}return n},on=async e=>{try{return await X.access(e),!0}catch{return!1}},sn=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(n=>n&&typeof n=="object"&&n.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},an=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let n=await r.text();return await X.writeFile(t,n,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},Pe=null,ln=async()=>{if(Pe)return Pe;let e=await sn();if(!e)return[];let t=L.join(G.cwd(),U,Be);await X.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([o,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${o}, skipping...`),null;let a=`${o}.md`,c=L.join(t,a),s=L.join(U,Be,a);return D.log(`Downloading ${i.scope} context...`),await an(i.endpoint,c)?(D.log(`Downloaded: ${s}`),{scope:i.scope,path:s,key:o}):null});return Pe=(await Promise.all(r)).filter(o=>o!==null),Pe},Kt=async({cliPath:e,netlify:t,config:r,buildErrorContext:n})=>{let o=tn(t),i=await nn(xt),a=L.join(G.cwd(),U);await X.mkdir(a,{recursive:!0});let c=L.join(U,i),s=L.join(G.cwd(),c),m=L.join(G.cwd(),U,se);try{await X.unlink(m),D.log(`Deleted old results file: ${m}`)}catch{}let p=n?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
29
+ \`\`\``),k(l,!0));break}case"result":{I=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=T.trim();break}case"error":{E=d.error;break}case"finished":break;default:{Y.warn("Unhandled message type:",d.type);break}}}),await P.catch(y=>{({error:E,result:g}=Qr({catchError:y,runCmd:P,error:E,result:g,runnerName:"Gemini"}))}),A.close(),O.flush(),{steps:m,duration:I,result:await ae({initialResult:g,agentName:Bt,hasError:!!E}),error:le({error:E,agentName:Bt}),isRetryableError:ce(E)}}var qt=async()=>{let e=Pe.join(Ht.homedir(),".gemini");await Ce.rm(e,{recursive:!0,force:!0})};var rn={codex:{runner:et,clean:Yt},claude:{runner:Qe,clean:Mt},gemini:{runner:tt,clean:qt}},Kt=rn;var Wt=_("init_stage"),Vt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:n})=>await S(nn(),"init-stage",async o=>{let i=performance.now();o?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":n||"unknown"});let s=Kt[e.runner];if(!s)throw o?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=on({apiToken:r});ft(u);let a=e.useGateway?await Et({netlify:u,config:e}):void 0;o?.setAttributes({"init.aiGateway.created":!!a}),e.validateAgent&&e.errorLogsPath&&o?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let f=5*1024,p=At(async({steps:x=[],duration:I})=>{let g=x.map(E=>{let v=E.title?We(V(E.title),f):void 0,R=E.message?We(V(E.message)):void 0;return{...E,title:v,message:R}});x.length=0;try{return await W(e.id,e.sessionId,{steps:g,duration:I})}catch(E){Wt.error("persistSteps failed",{error:E?.message||E})}},t);Wt.info("Adding build files to stage");let c=await Xe();await Ve(c);let m;e.hasRepo?e.sha?(m=e.sha,o?.setAttributes({"init.sha.source":"provided"})):(m=await Ft(),await ye(e.id,{sha:m}),o?.setAttributes({"init.sha.source":"current_commit"})):(m=await $t(),o?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-i;return o?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:a,context:u,persistSteps:p,runner:s,sha:m}}),on=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Oe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Oe.env.NETLIFY_API_TOKEN,SITE_ID:Oe.env.SITE_ID,FUNCTIONS_DIST:Oe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as rt}from"@netlify/otel";import sn from"crypto";import z from"fs/promises";import L from"path";import B from"process";var D=_("context"),an=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:B.env.NETLIFY_TEAM_ID,userId:B.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:B.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},ln=10,cn=async e=>{let{name:t,ext:r}=L.parse(e),n=e,o=L.join(B.cwd(),U,n),i=0;for(;await un(o);){if(i>=ln)throw new Error("Failed to generate context file");n=`${t}-${sn.randomUUID().slice(0,5)}${r}`,o=L.join(B.cwd(),U,n),i+=1}return n},un=async e=>{try{return await z.access(e),!0}catch{return!1}},dn=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(n=>n&&typeof n=="object"&&n.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},pn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let n=await r.text();return await z.writeFile(t,n,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},Fe=null,fn=async()=>{if(Fe)return Fe;let e=await dn();if(!e)return[];let t=L.join(B.cwd(),U,qe);await z.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([o,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${o}, skipping...`),null;let s=`${o}.md`,u=L.join(t,s),a=L.join(U,qe,s);return D.log(`Downloading ${i.scope} context...`),await pn(i.endpoint,u)?(D.log(`Downloaded: ${a}`),{scope:i.scope,path:a,key:o}):null});return Fe=(await Promise.all(r)).filter(o=>o!==null),Fe},Jt=async({cliPath:e,netlify:t,config:r,buildErrorContext:n})=>{let o=an(t),i=await cn(Rt),s=L.join(B.cwd(),U);await z.mkdir(s,{recursive:!0});let u=L.join(U,i),a=L.join(B.cwd(),u),f=L.join(B.cwd(),U,ie);try{await z.unlink(f),D.log(`Deleted old results file: ${f}`)}catch{}let p=n?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
30
30
  Your task is to analyze and fix the build errors.
31
31
  Don't apply techniques of reverting changes. Apply fixes related to errors.
32
32
  Don't try to run build by yourself. Just fix the errors.
33
33
 
34
34
  <build_error_context>
35
35
  ${n}
36
- </build_error_context>`:"",u="";r.siteContext&&r.siteContext.length!==0&&(u=`
36
+ </build_error_context>`:"",c="";r.siteContext&&r.siteContext.length!==0&&(c=`
37
37
  <project_rules>
38
- ${r.siteContext.filter(f=>f.site_context).map(f=>typeof f.site_context=="string"?f.site_context:typeof f.site_context=="object"?JSON.stringify(f.site_context):"").join(`
38
+ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
39
39
 
40
40
  `)}
41
41
  </project_rules>
42
- `);let g="";if(r.sessionHistoryContext?.length){let f=L.join(G.cwd(),U,Ye);await X.mkdir(f,{recursive:!0});let v=await Promise.all(r.sessionHistoryContext.map(async(x,k)=>{let P=k+1,O=`attempt-${P}.md`,S=L.join(f,O),T=L.join(U,Ye,O),R=`# Task History - Attempt ${P}
42
+ `);let m="";if(r.sessionHistoryContext?.length){let g=L.join(B.cwd(),U,He);await z.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(v,R)=>{let P=R+1,O=`attempt-${P}.md`,k=L.join(g,O),A=L.join(U,He,O),T=`# Task History - Attempt ${P}
43
43
 
44
44
  ## Request - what the user asked for
45
- ${x.request}
45
+ ${v.request}
46
46
 
47
47
  ---
48
48
 
49
49
  ## Response - what the agent replied with after its work
50
50
 
51
- ${x.response}
52
- `;return await X.writeFile(S,R,"utf-8"),D.log(`Created history file: ${T}`),T}));g+=`
51
+ ${v.response}
52
+ `;return await z.writeFile(k,T,"utf-8"),D.log(`Created history file: ${A}`),A}));m+=`
53
53
  <session_history_context>
54
54
  History of prior work on this task.
55
55
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
56
56
 
57
- ${v.slice(-5).map(x=>`- ${x}`).join(`
57
+ ${E.slice(-5).map(v=>`- ${v}`).join(`
58
58
  `)}
59
59
 
60
60
  </session_history_context>
61
- `}let h=await ln(),I="";h.length>0&&(I=`
61
+ `}let h=await fn(),x="";h.length>0&&(x=`
62
62
  <netlify_features_context>
63
63
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
64
64
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
65
65
 
66
- ${h.map(f=>`- **${f.scope}**: ${f.path}`).join(`
66
+ ${h.map(g=>`- **${g.scope}**: ${g.path}`).join(`
67
67
  `)}
68
68
 
69
69
  Refer to these files when working with specific Netlify features.
70
70
  </netlify_features_context>
71
- `);let w=`
71
+ `);let I=`
72
72
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
73
73
 
74
74
  <request>
@@ -81,17 +81,17 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
81
81
  <requirements>
82
82
  <responses>
83
83
  - Do not speak in first person. You may speak as "the agent".
84
- - When work is complete, write a changes summary in ${a}/${se} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
85
- - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${se} file.
84
+ - When work is complete, write a changes summary in ${s}/${ie} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
85
+ - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${s}/${ie} file.
86
86
  - Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
87
87
  - NEVER look into the \`.git\` folder
88
88
  - NEVER print potentially sensitive values (like secrets) in the planning output or results
89
89
  </responses>
90
90
  <attachements>
91
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${He} folder
92
- - move assets from ${a}/${He} folder to the project assets folder if they are referenced in a code or applied changes
91
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${Ke} folder
92
+ - move assets from ${s}/${Ke} folder to the project assets folder if they are referenced in a code or applied changes
93
93
  </attachements>
94
- ${u}
94
+ ${c}
95
95
  </requirements>
96
96
 
97
97
  <extra_context>
@@ -103,20 +103,20 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
103
103
  - Netlify Functions directory: ${o.functionsDir}
104
104
  </metadata>
105
105
  <environment>
106
- - Node Version: ${G.version||"unknown"}
106
+ - Node Version: ${B.version||"unknown"}
107
107
  - Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
108
108
  - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
109
109
  - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
110
110
  </environment>
111
- ${I}
111
+ ${x}
112
112
  <docs>
113
113
  - Netlify Docs: https://docs.netlify.com
114
114
  - LLM Resources Index: https://docs.netlify.com/llms.txt
115
115
  </docs>
116
116
  </extra_context>
117
117
 
118
- ${g}
119
- `;return await X.writeFile(s,w,"utf-8"),D.log(`Generated agent context document at: ${s}`),w.length>5e5&&(w=`
118
+ ${m}
119
+ `;return await z.writeFile(a,I,"utf-8"),D.log(`Generated agent context document at: ${a}`),I.length>5e5&&(I=`
120
120
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
121
121
 
122
122
  <request>
@@ -126,20 +126,20 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
126
126
  ${p}
127
127
  </request>
128
128
 
129
- Use the following file for the complete context of the ask, the environment, and what's available. ${s} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
130
- `),w};var cn=y("prompt"),qt=async({cliPath:e,config:t,netlify:r,buildErrorContext:n})=>{let o=await Kt({cliPath:e,config:t,netlify:r,buildErrorContext:n});return process.env.AGENT_RUNNER_DEBUG&&cn.log("Contextful Prompt:",o),{prompt:o}};var Oe=y("inference_stage"),Wt=5,Fe=async e=>{let{cliPath:t,config:r,context:n,buildErrors:o,runner:i,persistSteps:a,aiGateway:c,attempt:s,contextPrefix:m,priorAgentSessionId:p}=e;Oe.log(`Running inference stage, attempt ${s} of ${Wt}`);let u=await A(Qe(),"inference-stage",async g=>{g?.setAttributes({"inference.attempt":s||1}),yt();let{prompt:h}=await A(Qe(),"compose-prompt",async()=>await qt({cliPath:t,config:r,buildErrorContext:un(o),netlify:n})),I=`
131
- ${m||""}
129
+ Use the following file for the complete context of the ask, the environment, and what's available. ${a} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
130
+ `),I};var gn=_("prompt"),Xt=async({cliPath:e,config:t,netlify:r,buildErrorContext:n})=>{let o=await Jt({cliPath:e,config:t,netlify:r,buildErrorContext:n});return process.env.AGENT_RUNNER_DEBUG&&gn.log("Contextful Prompt:",o),{prompt:o}};var $e=_("inference_stage"),zt=5,ke=async e=>{let{cliPath:t,config:r,context:n,buildErrors:o,runner:i,persistSteps:s,aiGateway:u,attempt:a,contextPrefix:f,priorAgentSessionId:p}=e;$e.log(`Running inference stage, attempt ${a} of ${zt}`);let c=await S(rt(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":a||1}),wt();let{prompt:h}=await S(rt(),"compose-prompt",async()=>await Xt({cliPath:t,config:r,buildErrorContext:mn(o),netlify:n})),x=`
131
+ ${f||""}
132
132
  ${h}
133
- `.trim(),w={...r,prompt:I},f=await A(Qe(),`run-${r.runner}`,async()=>await i({aiGateway:c,config:w,netlify:n,persistSteps:a,continueSession:!!(s&&s>1),priorAgentSessionId:p}));return f.result&&(f.result=W(f.result)),f.error&&(f.error=W(f.error)),await a.flush(),f});if(u.error){if(Oe.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:s||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!s||s<Wt))return Oe.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Fe({...e,attempt:(s||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Oe.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},un=e=>!e||e.length===0?"":`
133
+ `.trim(),I={...r,prompt:x},g=await S(rt(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:I,netlify:n,persistSteps:s,continueSession:!!(a&&a>1),priorAgentSessionId:p}));return g.result&&(g.result=V(g.result)),g.error&&(g.error=V(g.error)),await s.flush(),g});if(c.error){if($e.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:a||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!a||a<zt))return $e.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await ke({...e,attempt:(a||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw $e.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},mn=e=>!e||e.length===0?"":`
134
134
  Deploy failed failed. Here are the errors to review on the latest build:
135
135
 
136
136
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
137
137
 
138
138
  ${e.pop()}
139
- `;import fn from"process";import{getTracer as et}from"@netlify/otel";import{getTracer as dn}from"@netlify/otel";var _e=y("deploy"),Vt=async e=>await A(dn(),"create-preview-deploy",async t=>pn(e,t)),pn=async({netlify:e,hasRepo:t,skipBuild:r,message:n="Agent Preview",deploySubdomain:o,cliPath:i,filter:a},c)=>{try{let s=["deploy","--message",`"${n}"`,"--json","--draft","--verbose"];t||(_e.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),o&&s.push("--alias",o),a&&s.push("--filter",a),r?(_e.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview");let m=i||"netlify";_e.log(`Running: ${m} ${s.join(" ")}`),c?.setAttributes({cmd:m,args:s});let p=await e.utils.run(m,s,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(p.stdout.trim());c?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),_e.log(`
140
- Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let g={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(g.sourceZipFilename=u.source_zip_filename),g}catch(s){throw _e.error("Failed to create preview deploy via CLI:",s),c?.setAttributes({success:!1,error:s.message}),s}};var Ee=y("deploy_stage"),tt=async e=>await A(et(),"run-deploy-stage",async()=>gn(e)),gn=async({cliPath:e,config:t,context:r,result:n,filter:o})=>{let i=await A(et(),"get-runner-diffs",async()=>await bt({config:t}));if(Ee.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:c,diffBinary:s,resultDiffBinary:m}=i,p=!0;Ee.log("Preview deploy condition check:",{resultUndefined:n===void 0,resultType:typeof n,hasChanges:p,wouldCreatePreview:n!==void 0&&p});let u=null;if(n!==void 0&&p)try{let g;try{let h=await A(et(),"get-runner-session",async()=>await pt(t.id,t.sessionId));h?.title&&(g=h.title)}catch(h){Ee.warn("Failed to fetch session title, using fallback message:",h.message)}await q(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),u=await Vt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:g,skipBuild:!1,deploySubdomain:Rt(t.id,fn.env.SITE_NAME),filter:o})}catch(g){return Ee.warn("Failed to create preview deploy (continuing with agent run):",g),{diff:a,resultDiff:c,hasChanges:p,previewInfo:null,diffBinary:s,resultDiffBinary:m,deployError:g instanceof Error?g.message:String(g)}}return Ee.log("Git status",{hasDiff:!!a,hasChanges:p}),{diff:a,resultDiff:c,hasChanges:p,previewInfo:u,diffBinary:s,resultDiffBinary:m}};import{getTracer as ke}from"@netlify/otel";async function Jt(e,t){let{maxRetries:r,baseDelay:n,onRetry:o}=t,i;for(let a=1;a<=r;a++)try{return await e()}catch(c){if(i=c,a===r)throw i;o&&o(a,i),await new Promise(s=>setTimeout(s,n*a))}throw i}var $e=class{scanDiffForForms(t){let r=[],n=null,o=[],i=t.split(`
141
- `);for(let a of i)if(a.startsWith("diff --git")){if(n&&o.length>0){let s=this.containsNetlifyForm(o,n);s&&r.push(s)}let c=a.split(" ");n=c[c.length-1].replace(/^b\//,""),o=[]}else a.startsWith("+")&&!a.startsWith("+++")&&o.push(a.slice(1));if(n&&o.length>0){let a=this.containsNetlifyForm(o,n);a&&r.push(a)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let n=t.join(`
142
- `),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:a}of o){let c=n.match(i);if(c){let s=c.index||0,m=Math.max(0,s-20),p=Math.min(n.length,s+c[0].length+20),u=n.slice(m,p).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${a}] ${u}`}}}return null}};var b=y("cleanup_stage"),Xt=async e=>await A(ke(),"cleanup-stage",async()=>mn(e)),rt=1024*1024*10,mn=async({config:e,diff:t,result:r,duration:n,resultDiff:o,diffBinary:i,resultDiffBinary:a,previewInfo:c})=>{let s={result:r||"Done",duration:n};c&&c.deployId&&(s.deploy_id=c.deployId),c&&c.sourceZipFilename&&(s.result_zip_file_name=c.sourceZipFilename);let m=t||i||o||a;if(m){let p=new $e,u=t||i||"",g=p.scanDiffForForms(u);g.detected?(b.log("Detected Netlify form(s) in diff:"),g.matches.forEach(({file:h,snippet:I})=>{b.log(` - ${h}: ${I}`)}),s.has_netlify_form=!0):b.log("Did not detect Netlify form(s) in diff"),b.log("Did not detect Netlify form(s) in diff")}if(m)try{b.log("Getting pre-signed URLs for diff upload");let p=await gt(e.id,e.sessionId),u=[];(t||i)&&u.push(Me(p.result.upload_url,i||t).then(()=>{s.result_diff_s3_key=p.result.s3_key,b.log("Successfully uploaded result_diff to S3")})),(o||a)&&u.push(Me(p.cumulative.upload_url,a||o).then(()=>{s.cumulative_diff_s3_key=p.cumulative.s3_key,b.log("Successfully uploaded cumulative_diff to S3")})),b.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(o||a)&&(b.log("Updating agent runner with cumulative diff S3 key"),await A(ke(),"update-runner",async()=>{await me(e.id,{result_diff_s3_key:p.cumulative.s3_key})}))}catch(p){b.error("S3 upload failed, falling back to inline diffs:",p);let u=Buffer.byteLength(t||i||""),g=Buffer.byteLength(a||o||"");if(u>rt||g>rt){let h=`Diffs exceed maximum inline size of ${rt} bytes.`;throw b.error(h),new Error(h)}s.result_diff=t,s.result_diff_binary=i,(o||a)&&(s.cumulative_diff=o,s.cumulative_diff_binary=a,b.log("Updating agent runner with inline diffs (fallback)"),await A(ke(),"update-runner",async()=>{await me(e.id,{result_diff:o,result_diff_binary:a})}))}else b.log("No diffs to upload");return b.log("Updated agent runner with result"),await Jt(async()=>await A(ke(),"update-runner-session",()=>q(e.id,e.sessionId,s)),{maxRetries:3,baseDelay:1e3,onRetry:(p,u)=>{b.error(`Error updating agent runner session (attempt ${p}):`,u),b.log("Retrying...")}}),b.log("Finished updating agent runner with result"),{sessionUpdate:s}};import{getTracer as zt,shutdownTracers as yn,withActiveSpan as Zt}from"@netlify/otel";var _n=hn(import.meta.url),Qt=_n("../package.json"),er=y("pipeline_index"),De=3,tr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:n,errorLogsPath:o,filter:i,tracing:a={}})=>{let c,{withStageTimer:s}=_t(Q.timeUnits.hours(4)),m=await lt(Qt.version,e.id,a);try{await Zt(zt(),"run-pipeline",{},m,async()=>{let{aiGateway:p,context:u,persistSteps:g,runner:h,sha:I}=await s("init",()=>Ht({config:e,apiToken:t,cliPath:r,cwd:n,errorLogsPath:o,filter:i,runnerVersion:Qt.version}),Q.timeUnits.minutes(10));c=h.clean,e.sha=I;let{runnerResult:w}=await s("inference",()=>Fe({cliPath:r,config:e,context:u,runner:h.runner,persistSteps:g,aiGateway:p}));await q(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let f=await s("deploy",()=>tt({cliPath:r,config:e,context:u,result:w.result,filter:i})),v=w,x=[];if(f.hasChanges&&f.deployError){x.push(ct(f.deployError));let R=1;for(;R<=De&&!f.previewInfo;)er.log(`Deploy attempt had errors. Retrying. ${R}/${De}`),await Zt(zt(),"deploy-stage",async _=>{_?.setAttributes({"stage.attempt":R});let{runnerResult:d}=await s(`inference-retry-${R}`,()=>Fe({cliPath:r,config:e,context:u,runner:h.runner,persistSteps:g,aiGateway:p,buildErrors:x,priorAgentSessionId:w.agentSessionId}));v={...d,steps:[...v.steps||[],...d.steps||[]],duration:(v.duration||0)+(d.duration||0)},f=await s(`deploy-retry-${R}`,()=>tt({cliPath:r,config:e,context:u,result:d.result,filter:i})),f.deployError&&x.push(f.deployError),R++});R>De&&!f.previewInfo&&console.warn(`Deploy validation failed after ${De} attempts`)}let{diff:k,resultDiff:P,previewInfo:O,diffBinary:S,resultDiffBinary:T}=f;await s("cleanup",()=>Xt({config:e,diff:k,result:v.result,duration:v.duration,resultDiff:P,diffBinary:S,resultDiffBinary:T,previewInfo:O}),Q.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await c?.(),await Ot())})}catch(p){er.error("Got error while running pipeline",p),await c?.();let u=p instanceof Error&&p.message;throw await q(e.id,e.sessionId,{result:u||"Encountered error when running agent",state:"error"}),p}finally{await yn()}};import rr from"crypto";var $=y("bin_local"),K=En(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),ot=()=>{console.log(`
139
+ `;import _n from"process";import{getTracer as nt}from"@netlify/otel";import{getTracer as hn}from"@netlify/otel";var we=_("deploy"),Zt=async e=>await S(hn(),"create-preview-deploy",async t=>yn(e,t)),yn=async({netlify:e,hasRepo:t,skipBuild:r,message:n="Agent Preview",deploySubdomain:o,cliPath:i,filter:s},u)=>{try{let a=["deploy","--message",`"${n}"`,"--json","--draft","--verbose"];t||(we.log("Deploy: Uploading source zip"),a.push("--upload-source-zip")),o&&a.push("--alias",o),s&&a.push("--filter",s),r?(we.log("Deploy: Skipping build"),a.push("--no-build")):a.push("--context","deploy-preview");let f=i||"netlify";we.log(`Running: ${f} ${a.join(" ")}`),u?.setAttributes({cmd:f,args:a});let p=await e.utils.run(f,a,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(p.stdout.trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),we.log(`
140
+ Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw we.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var xe=_("deploy_stage"),ot=async e=>await S(nt(),"run-deploy-stage",async()=>En(e)),En=async({cliPath:e,config:t,context:r,result:n,filter:o,isRetry:i})=>{let s=await S(nt(),"get-runner-diffs",async()=>await Pt({config:t,isRetry:i}));if(xe.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:a,diffBinary:f,resultDiffBinary:p}=s,c=!0;xe.log("Preview deploy condition check:",{resultUndefined:n===void 0,resultType:typeof n,hasChanges:c,wouldCreatePreview:n!==void 0&&c});let m=null;if(n!==void 0&&c)try{let h;try{let x=await S(nt(),"get-runner-session",async()=>await mt(t.id,t.sessionId));x?.title&&(h=x.title)}catch(x){xe.warn("Failed to fetch session title, using fallback message:",x.message)}await W(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await Zt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:bt(t.id,_n.env.SITE_NAME),filter:o})}catch(h){return xe.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:u,resultDiff:a,hasChanges:c,previewInfo:null,diffBinary:f,resultDiffBinary:p,deployError:h instanceof Error?h.message:String(h)}}return xe.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:a,hasChanges:c,previewInfo:m,diffBinary:f,resultDiffBinary:p}};import{getTracer as Le}from"@netlify/otel";async function Qt(e,t){let{maxRetries:r,baseDelay:n,onRetry:o}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;o&&o(s,i),await new Promise(a=>setTimeout(a,n*s))}throw i}var De=class{scanDiffForForms(t){let r=[],n=null,o=[],i=t.split(`
141
+ `);for(let s of i)if(s.startsWith("diff --git")){if(n&&o.length>0){let a=this.containsNetlifyForm(o,n);a&&r.push(a)}let u=s.split(" ");n=u[u.length-1].replace(/^b\//,""),o=[]}else s.startsWith("+")&&!s.startsWith("+++")&&o.push(s.slice(1));if(n&&o.length>0){let s=this.containsNetlifyForm(o,n);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let n=t.join(`
142
+ `),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of o){let u=n.match(i);if(u){let a=u.index||0,f=Math.max(0,a-20),p=Math.min(n.length,a+u[0].length+20),c=n.slice(f,p).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var b=_("cleanup_stage"),er=async e=>await S(Le(),"cleanup-stage",async()=>wn(e)),it=1024*1024*10,wn=async({config:e,diff:t,result:r,duration:n,resultDiff:o,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let a={result:r||"Done",duration:n};u&&u.deployId&&(a.deploy_id=u.deployId),u&&u.sourceZipFilename&&(a.result_zip_file_name=u.sourceZipFilename);let f=t||i||o||s;if(f&&(a.diff_produced=!0),f){let p=new De,c=t||i||"",m=p.scanDiffForForms(c);m.detected?(b.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:x})=>{b.log(` - ${h}: ${x}`)}),a.has_netlify_form=!0):b.log("Did not detect Netlify form(s) in diff"),b.log("Did not detect Netlify form(s) in diff")}if(f)try{b.log("Getting pre-signed URLs for diff upload");let p=await yt(e.id,e.sessionId),c=[];(t||i)&&c.push(Ge(p.result.upload_url,i||t).then(()=>{a.result_diff_s3_key=p.result.s3_key,b.log("Successfully uploaded result_diff to S3")})),(o||s)&&c.push(Ge(p.cumulative.upload_url,s||o).then(()=>{a.cumulative_diff_s3_key=p.cumulative.s3_key,b.log("Successfully uploaded cumulative_diff to S3")})),b.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(o||s)&&(b.log("Updating agent runner with cumulative diff S3 key"),await S(Le(),"update-runner",async()=>{await ye(e.id,{result_diff_s3_key:p.cumulative.s3_key})}))}catch(p){b.error("S3 upload failed, falling back to inline diffs:",p);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||o||"");if(c>it||m>it){let h=`Diffs exceed maximum inline size of ${it} bytes.`;throw b.error(h),new Error(h)}a.result_diff=t,a.result_diff_binary=i,(o||s)&&(a.cumulative_diff=o,a.cumulative_diff_binary=s,b.log("Updating agent runner with inline diffs (fallback)"),await S(Le(),"update-runner",async()=>{await ye(e.id,{result_diff:o,result_diff_binary:s})}))}else b.log("No diffs to upload");return b.log("Updated agent runner with result"),await Qt(async()=>await S(Le(),"update-runner-session",()=>W(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{b.error(`Error updating agent runner session (attempt ${p}):`,c),b.log("Retrying...")}}),b.log("Finished updating agent runner with result"),{sessionUpdate:a}};import{getTracer as tr,shutdownTracers as In,withActiveSpan as rr}from"@netlify/otel";var Tn=xn(import.meta.url),nr=Tn("../package.json"),or=_("pipeline_index"),Ue=3,ir=async({config:e,apiToken:t,cliPath:r="netlify",cwd:n,errorLogsPath:o,filter:i,tracing:s={}})=>{let u,{withStageTimer:a}=xt(ee.timeUnits.hours(4)),f=await dt(nr.version,e.id,s);try{await rr(tr(),"run-pipeline",{},f,async()=>{let{aiGateway:p,context:c,persistSteps:m,runner:h,sha:x}=await a("init",()=>Vt({config:e,apiToken:t,cliPath:r,cwd:n,errorLogsPath:o,filter:i,runnerVersion:nr.version}),ee.timeUnits.minutes(10));u=h.clean,e.sha=x;let{runnerResult:I}=await a("inference",()=>ke({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p}));await W(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let g=await a("deploy",()=>ot({cliPath:r,config:e,context:c,result:I.result,filter:i,isRetry:!1})),E=I,v=[];if(g.hasChanges&&g.deployError){v.push(pt(g.deployError));let T=1;for(;T<=Ue&&!g.previewInfo;)or.log(`Deploy attempt had errors. Retrying. ${T}/${Ue}`),await rr(tr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":T});let{runnerResult:y}=await a(`inference-retry-${T}`,()=>ke({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p,buildErrors:v,priorAgentSessionId:I.agentSessionId}));E={...y,steps:[...E.steps||[],...y.steps||[]],duration:(E.duration||0)+(y.duration||0)},g=await a(`deploy-retry-${T}`,()=>ot({cliPath:r,config:e,context:c,result:y.result,filter:i,isRetry:!0})),g.deployError&&v.push(g.deployError),T++});T>Ue&&!g.previewInfo&&console.warn(`Deploy validation failed after ${Ue} attempts`)}let{diff:R,resultDiff:P,previewInfo:O,diffBinary:k,resultDiffBinary:A}=g;await a("cleanup",()=>er({config:e,diff:R,result:E.result,duration:E.duration,resultDiff:P,diffBinary:k,resultDiffBinary:A,previewInfo:O}),ee.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await u?.(),await kt())})}catch(p){or.error("Got error while running pipeline",p),await u?.();let c=p instanceof Error&&p.message;throw await W(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),p}finally{await In()}};import sr from"crypto";var $=_("bin_local"),K=vn(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),at=()=>{console.log(`
143
143
  agent-runner-cli-local - Run Netlify agent runner locally without API connections
144
144
 
145
145
  USAGE:
@@ -171,6 +171,6 @@ NOTE:
171
171
  This local mode mocks all Netlify API calls. The agent will run through
172
172
  the full pipeline including inference and deployment, but API calls will
173
173
  be logged instead of executed.
174
- `)};K.help&&(ot(),C.exit(0));K.prompt||($.error("Error: --prompt is required"),ot(),C.exit(1));K["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),ot(),C.exit(1));try{let e=K.cwd||C.cwd(),t=nr.join(e,".netlify","netlify-agent-runner-context*");or.rmSync(t,{recursive:!0,force:!0});let r;try{r=await wn(e)}catch(c){$.error(c.message),$.error(`
175
- To link this directory to a Netlify site, run:`),$.error(" netlify link"),C.exit(1)}let n=`local-${rr.randomBytes(8).toString("hex")}`,o=`session-${rr.randomBytes(8).toString("hex")}`,i=K.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:n,sessionId:o,siteId:r,cwd:e,runner:i});let a={id:n,sessionId:o,prompt:K.prompt,runner:i,model:K.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=K["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?Se(e,"claude")||($.log("Claude CLI not found, installing..."),await nt(e,"@anthropic-ai/claude-code")):i==="gemini"?Se(e,"gemini")||($.log("Gemini CLI not found, installing..."),await nt(e,"@google/gemini-cli")):i==="codex"?Se(e,"codex")||($.log("Codex CLI not found, installing..."),await nt(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),C.exit(1)),await tr({config:a,cwd:e,cliPath:K["cli-path"],filter:K.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),C.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),C.exit(1)}function nt(e,t){return new Promise((r,n)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:o})=>{$.log(`${t} installed: ${o}`),r()}).catch(o=>{$.error(`Error installing ${t}: ${o.stderr||o.message}`),n(o)})})}async function wn(e){let t=nr.join(e,".netlify","state.json");try{let r=await or.readFileSync(t,"utf-8"),n=JSON.parse(r);if(!n.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${n.siteId}`),n.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
174
+ `)};K.help&&(at(),C.exit(0));K.prompt||($.error("Error: --prompt is required"),at(),C.exit(1));K["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),at(),C.exit(1));try{let e=K.cwd||C.cwd(),t=ar.join(e,".netlify","netlify-agent-runner-context*");lr.rmSync(t,{recursive:!0,force:!0});let r;try{r=await Rn(e)}catch(u){$.error(u.message),$.error(`
175
+ To link this directory to a Netlify site, run:`),$.error(" netlify link"),C.exit(1)}let n=`local-${sr.randomBytes(8).toString("hex")}`,o=`session-${sr.randomBytes(8).toString("hex")}`,i=K.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:n,sessionId:o,siteId:r,cwd:e,runner:i});let s={id:n,sessionId:o,prompt:K.prompt,runner:i,model:K.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=K["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?be(e,"claude")||($.log("Claude CLI not found, installing..."),await st(e,"@anthropic-ai/claude-code")):i==="gemini"?be(e,"gemini")||($.log("Gemini CLI not found, installing..."),await st(e,"@google/gemini-cli")):i==="codex"?be(e,"codex")||($.log("Codex CLI not found, installing..."),await st(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),C.exit(1)),await ir({config:s,cwd:e,cliPath:K["cli-path"],filter:K.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),C.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),C.exit(1)}function st(e,t){return new Promise((r,n)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:o})=>{$.log(`${t} installed: ${o}`),r()}).catch(o=>{$.error(`Error installing ${t}: ${o.stderr||o.message}`),n(o)})})}async function Rn(e){let t=ar.join(e,".netlify","state.json");try{let r=await lr.readFileSync(t,"utf-8"),n=JSON.parse(r);if(!n.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${n.siteId}`),n.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
176
176
  //# sourceMappingURL=bin-local.js.map