@netlify/agent-runner-cli 1.52.0 → 1.54.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,97 +1,105 @@
1
- import{createRequire as on}from"module";import{createTracerProvider as Kt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ze}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Wt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Vt}from"@netlify/otel";import{propagation as Ze,context as Qe,W3CTraceContextPropagator as Jt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Xt}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Pe=_("tracing"),et=async(e,t,r)=>(await Kt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ze(new Oe),new ze(new Xt({url:r.exporterUrl}))],instrumentations:[new Wt({skipHeaders:!0})]}),r.traceparent?(Ze.setGlobalPropagator(new Jt),Ze.extract(Qe.active(),{traceparent:r.traceparent,isRemote:!0})):Qe.active());function N(e,t,r){return Pe.log(`\u23F3 TRACE: ${t} starting...`),Vt(e,t,r)}var Oe=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Pe.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Pe.log(` \u274C Error: ${t.status.message}`)}};var zt=["error","failed","exception","fatal","panic","abort","crash"];function tt(e){let t=e.split(`
2
- `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(zt.some(m=>l.includes(m))){let m=Math.max(0,n-10,o+1),u=Math.min(t.length-1,n+20),c=[];for(let f=m;f<=u;f++)c.push(t[f]);r.push(c.join(`
3
- `)),o=u,n=u+1}else n++}if(r.length===0)return e;let s=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
4
- ${a}
1
+ import{createRequire as sn}from"module";import{createTracerProvider as Jt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as Qe}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Xt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as zt}from"@netlify/otel";import{propagation as et,context as tt,W3CTraceContextPropagator as Zt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Qt}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Oe=_("tracing"),rt=async(e,t,r)=>(await Jt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new Qe(new $e),new Qe(new Qt({url:r.exporterUrl}))],instrumentations:[new Xt({skipHeaders:!0})]}),r.traceparent?(et.setGlobalPropagator(new Zt),et.extract(tt.active(),{traceparent:r.traceparent,isRemote:!0})):tt.active());function S(e,t,r){return Oe.log(`\u23F3 TRACE: ${t} starting...`),zt(e,t,r)}var $e=class{export(t,r){for(let n of t)this.logSpan(n);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,n=t.attributes,o=[];for(let[c,i]of Object.entries(n))c.includes("duration")&&typeof i=="number"?o.push(`${c}=${i.toFixed(2)}ms`):o.push(`${c}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",l=o.length>0?` [${o.join(", ")}]`:"";Oe.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${l}`),t.status?.code===2&&t.status.message&&Oe.log(` \u274C Error: ${t.status.message}`)}};var er=["error","failed","exception","fatal","panic","abort","crash"];function nt(e){let t=e.split(`
2
+ `),r=[],n=-1,o=0;for(;o<t.length;){let c=t[o].slice(0,500).toLowerCase();if(er.some(m=>c.includes(m))){let m=Math.max(0,o-10,n+1),p=Math.min(t.length-1,o+20),u=[];for(let g=m;g<=p;g++)u.push(t[g]);r.push(u.join(`
3
+ `)),n=p,o=p+1}else o++}if(r.length===0)return e;let s=r.map((l,c)=>`<extracted_error_chunk order="${c+1}">
4
+ ${l}
5
5
  </extracted_error_chunk>`).join(`
6
6
 
7
- `);return s.length>e.length*.8?e:s}import ve from"process";import{getTracer as jr}from"@netlify/otel";import ae from"process";var ge=ae.env.NETLIFY_API_URL,me=ae.env.NETLIFY_API_TOKEN,j=_("api"),he=()=>ae.env.NETLIFY_LOCAL_MODE==="true",le=async(e,t={})=>{if(!ge||!me)throw new Error("No API URL or token");let r=new URL(e,ge),o={...t,headers:{...t.headers,Authorization:`Bearer ${me}`}};ae.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ae.env.AGENT_RUNNERS_DEBUG==="true")j.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{j.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");j.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||j.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},rt=e=>{j.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ge=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(me=e.constants.NETLIFY_API_TOKEN)},nt=()=>({apiUrl:ge,token:me}),ce=async(e,t)=>he()?(j.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):le(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),Y=async(e,t,r)=>he()?(j.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):le(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var ot=async(e,t)=>he()?(j.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):le(`/api/v1/agent_runners/${e}/sessions/${t}`),st=(e,t,r)=>le(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),it=async(e,t)=>he()?(j.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):le(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Fe=async(e,t)=>{j.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var Q=_("ai_gateway"),$e=null;var at=async()=>{if($e)return $e;Q.log("Fetching available AI gateway providers");let e=await fetch(`${nt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return $e=t,Q.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Zt=async(e,t)=>{let o=(await at()).providers[e];if(!o)return Q.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return Q.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},lt=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),Q.log("Requesting AI gateway information");let i=await st(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,Q.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let m=o-Date.now()-6e4;m>0&&(n=setTimeout(()=>{l()},m))}};return await Promise.all([l(),at()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:Zt}};import G from"process";import H from"path";import ye from"fs";import{fileURLToPath as sr}from"url";import{createRequire as ir}from"module";import{execa as ar,execaCommand as Dn}from"execa";import{Transform as Qt}from"stream";var er=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),tr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function rr(){return Object.entries(process.env).filter(([e,t])=>!(!t||er.has(e)||tr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function B(e){if(typeof e!="string")return e;let t=rr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(nr(o),"g");r=r.replace(n,"******")}),r}function nr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ee=class extends Qt{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=B(n);o(null,s)}};function ct(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?B(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?B(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var ue=null,ut=e=>(ue&&ue.destroy(),ue=new V({totalAllowedTime:e}),ue),pt=()=>ue;var V=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var dt={name:"@netlify/agent-runner-cli",type:"module",version:"1.52.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"vitest","test:ci:vitest":"c8 -r lcovonly -r text -r json vitest",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.42","@google/gemini-cli":"0.16.0","@netlify/otel":"^5.0.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8","my-codex-no-sandbox":"^0.1.2505290819"}};var lr=sr(import.meta.url),cr=H.dirname(lr),ur=ir(import.meta.url),_e=_("shell"),De=new Set,pr={preferLocal:!0},F=(e,t,r)=>{let[o,n]=dr(t,r),s={...pr,...n},a=ar(e,o,s);return fr(a,s),mr(a),a};var dr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},fr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(G.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ee).pipe(G.stdout),e.stdout?.pipe(new ee).pipe(G.stdout),e.stderr?.pipe(new ee).pipe(G.stderr);return}e.stdout?.pipe(G.stdout),e.stderr?.pipe(G.stderr)},ft=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(G.kill(-e.pid,t),_e.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return _e.error("Error killing process:",r),!1}},gr=e=>ft(e,"SIGKILL"),mr=e=>{De.add(e);let t=pt();if(t){let r=t.onTimesUp(()=>{_e.log(`Global timer expired, killing process ${e.pid}`),ft(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(_e.log(`Force killing process ${e.pid} after timeout`),gr(e))},5e3)});e.on("exit",()=>{De.delete(e),r()}),e.on("error",()=>{De.delete(e),r()})}};function te(e,t){if(!G.env.NETLIFY_LOCAL_MODE)try{let n=ur.resolve(dt.name),s=H.dirname(n);for(;s!==H.dirname(s);){let a=H.dirname(s);if(H.basename(a)==="node_modules"){let l=H.join(a,".bin",t);if(ye.existsSync(l))return l;break}s=a}}catch(n){console.error("Could not resolve package.json",n)}if(G.env.NODE_PATH){let n=H.join(G.env.NODE_PATH,".bin",t);if(ye.existsSync(n))return n}let r=H.join(e,"node_modules",".bin",t);if(ye.existsSync(r))return r;let o=H.join(cr,"..","node_modules",".bin",t);if(ye.existsSync(o))return o}var gt="netlify-agent-runner-context.md",ke="task-history",Le="netlify-context",k=".netlify",re="results.md",Ue="assets";var hr=_("utils"),yr=e=>new Promise(t=>{setTimeout(t,e)}),mt=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(u=>{n.push(u)});r=!0;let i,m=new Promise(u=>{i=u});return s=(async()=>{await Promise.resolve();let u=await e(...l);for(i(u);;){if(await yr(t),!o)return r=!1,s=null,u;let c=o,f=n;o=null,n=[],u=await e(...c),f.forEach(h=>{h(u)})}})(),m};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},Ee=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},ht=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):hr.error("Could not parse JSON",o))}},yt=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let m=Math.min(i-s.length,e.length);return`${s}${e.slice(0,m)}`}return e.slice(0,i)};import{Buffer as _t}from"buffer";import _r from"path";var Et=_("repo"),wt=async({config:e})=>{Et.info("Getting runner diffs");let t=await wr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=Tr(o);await xr(n),Et.info("Changes after processing"),await je();let s=await Ge(o);await Me(s);let a={stdio:["ignore","pipe","pipe"]},i=(await F("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let u=(await F("git",["diff","--staged","--binary"],a)).stdout,c,f;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),c=(await F("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(f=_t.from(g).toString("base64"))}let h={hasChanges:!0,diff:i,resultDiff:c,ignored:s};return i!==u&&(h.diffBinary=_t.from(u).toString("base64")),f&&(h.resultDiffBinary=f),h},Me=async(e=[])=>{await F("git",["add",".",...e])},je=async()=>(await F("git",["status","-s"])).stdout,Tt=/.. (.+)?\.log$/,Er=[Tt],wr=async()=>{let e=await je();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
- `).filter(o=>Er.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},xt=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return e.trim()},It=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Ge=async e=>{e||=await je();let t=[".netlify","node_modules"],r=[];return e.split(`
9
- `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${_r.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${s}`)});let n=o.match(Tt)?.[1];n&&r.push(`:!${n}.log`)}),r},vt=async()=>{await F("git",["reset","--hard","HEAD"])},Tr=e=>{let t=e.split(`
10
- `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,l=a.join(""),i=n.trim(),m=s.trim();return r[l]?r[l].change=m:r[l]={filePath:l,stage:i,change:m},r},{});return Object.values(t)},xr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import vr from"fs/promises";import Rr from"os";import At from"path";import J from"process";import Sr from"readline";import Ye from"path";import Ir from"fs/promises";var Be=_("agent-output-utils");async function ne({initialResult:e,agentName:t,hasError:r}){let o="",n=Ye.join(process.cwd(),k,re);try{let s=await Ir.readFile(n,"utf-8");s&&(o=s,Be.log(`Pulled result from ${Ye.relative(process.cwd(),n)}`))}catch{Be.log(`No results file found at ${Ye.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function oe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Be.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function se(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var L=_("runner_claude"),Rt="Claude Code",we="claude-sonnet-4-5-20250929",St=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Ar=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(L.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function He({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:l,prompt:i,modelVersionOverrides:m}=a,{model:u}=a,c="";if(o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(m?.claude){let p=m?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);u=p}}else if(u){if(!await o.isModelAvailableForProvider("anthropic",u))throw new Error(`Model '${u}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",we)?(u=we,L.log(`Using default model: ${we}`)):L.log(`Default model ${we} is not available, proceeding without model specification`);J.env.ANTHROPIC_API_KEY=y,J.env.ANTHROPIC_BASE_URL=d}else if(!J.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let f=[],h=[],w={},T=0,g=0,v,I,b=[te(J.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...u?["--model",u]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],x=`${J.env.NVM_BIN}/node`;L.log(`Running ${x} ${b.join(" ")}`);let E=t.utils.run(x,b,{all:!0,env:J.env});E.stdin?.end();let R=Ee(()=>{r?.({steps:f,duration:g})},250),S=(y,d)=>{let p={...y,id:T};T+=1,h.push(p),f.push(p),d||R.flush(),R(),d&&R.flush()},A=Sr.createInterface({input:E.all});return A.on("error",y=>{L.error("Readline interface error",{error:y.message,stack:y.stack})}),A.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{L.log("Could not parse line",y)}d?.session_id&&d.session_id!==c&&(c=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&S({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?S({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):L.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let O=p.input?.description&&`\`${p.input.description}\``;S({title:[St(p.name),O].filter(Boolean).join(" ")})}else p.id&&(w[p.id]=p);R.flush();break}case"tool_result":{let O=p.tool_use_id?w[p.tool_use_id]:void 0,Z;if(O){let q=O.input?.file_path&&At.relative(J.cwd(),O.input.file_path),P=q&&`\`${q}\``;Z=[St(O.name||""),P].filter(Boolean).join(" ")}let fe=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(O?.name||""),W;if(typeof p.content=="string")W=p.content;else if(Array.isArray(p.content)){let q=[];p.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?q.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?q.push(`![](data:${P.source.media_type};base64,${P.source.data})`):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),W=q.join(`
11
-
12
- `)}fe&&W&&(W=`\`\`\`
13
- ${W.trim()}
14
- \`\`\``),S({title:Z,message:W},!0);break}case"thinking":{p.thinking&&S({title:"Thinking",message:p.thinking},!0);break}default:L.log(`Message content type is not supported ${p.type}`,p)}}):d?.type==="result"&&(g=d.duration_ms||0,d.is_error?I=d.result:v=d.result,[h,f].forEach(p=>{p[p.length-1]?.message===v&&p.pop()}))}),await E.catch(y=>{({error:I,result:v}=Ar({catchError:y,runCmd:E,error:I,result:v,runnerName:"Claude"}))}),A.close(),R.flush(),{steps:h,duration:g,result:await ne({initialResult:v,agentName:Rt,hasError:!!I}),error:oe({error:I,agentName:Rt}),isRetryableError:se(I),agentSessionId:c}}var bt=async()=>{let e=At.join(Rr.homedir(),".claude");await vr.rm(e,{recursive:!0,force:!0})};import br from"fs/promises";import Nr from"os";import Cr from"path";import ie from"process";import Pr from"readline";var X=_("runner_codex"),Nt="Codex CLI",Or=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(X.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(X.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(X.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function qe({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:x,url:E}=n;if(!x||!E)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let R=l?.codex?.[s];if(R){if(!await n.isModelAvailableForProvider("openai",R))throw new Error(`Model override '${R}' is not available for openai provider`);i=R}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);ie.env.OPENAI_API_KEY=x,ie.env.OPENAI_BASE_URL=E}else if(!ie.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let m=[],u=[],c={},f=0,h=0,w,T,g=[te(ie.cwd(),"codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),v=`${ie.env.NVM_BIN}/node`;X.log(`Running ${v} ${g.join(" ")}`);let I=t.utils.run(v,g,{all:!0,env:{...ie.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),b=Pr.createInterface({input:I.all});return b.on("error",x=>{X.error("Readline interface error",{error:x.message,stack:x.stack})}),b.on("line",x=>{let E=null;try{E=JSON.parse(x)}catch{X.log("Could not parse line",x);return}let R=[],S=!1;if(E?.duration_ms&&(h=E.duration_ms,S=!0),E?.type==="local_shell_call")c[E.call_id]=E;else if(E?.type==="local_shell_call_output"){let A=$r(c[E.call_id],E);A&&(A.id=f,f+=1,u.push(A),m.push(A),R.push(A),S=!0)}else E?.type==="message"&&E.role==="assistant"?w=E.content.map(A=>A.text).join(`
15
- `):E?.type==="message"&&E.role==="system"&&(T=E.content.map(A=>A.text).join(`
16
- `));S&&(r?.({steps:m,duration:h}),o?.({steps:R,duration:h}))}),await I.catch(x=>{let E=Or({catchError:x,runCmd:I,error:T,result:w,runnerName:"Codex"});T=E.error,w=E.result}),b.close(),{steps:u,duration:h,result:await ne({initialResult:w,agentName:Nt,hasError:!!T}),error:oe({error:T,agentName:Nt}),isRetryableError:se(T)}}var Ct=async()=>{let e=Cr.join(Nr.homedir(),".codex");await br.rm(e,{recursive:!0,force:!0})},Fr=new Set(["bash","-lc"]),$r=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Fr.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
17
- ${n.trim()}
18
- \`\`\``)}catch(s){X.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import xe from"fs/promises";import Ot from"os";import Ie from"path";import z from"process";import Dr from"readline";var U=_("runner_gemini"),Pt="Gemini CLI",Te="gemini-2.5-pro",kr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Lr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Ur=async()=>{let e=Ie.join(Ot.homedir(),".gemini"),t=Ie.join(e,"settings.json");try{await xe.mkdir(e,{recursive:!0});let r={};try{let o=await xe.readFile(t,"utf-8");r=JSON.parse(o)}catch{U.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await xe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),U.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){U.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function Ke({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(await Ur(),n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let p=l?.gemini?.[s];if(p){if(!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model override '${p}' is not available for gemini provider`);i=p}}if(!i)await n.isModelAvailableForProvider("gemini",Te)?(i=Te,U.log(`Using default model: ${Te}`)):U.log(`Default model ${Te} is not available, proceeding without model specification`);else if(i&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);z.env.GEMINI_API_KEY=y,z.env.GOOGLE_GEMINI_BASE_URL=d}else if(!z.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let m=[],u=[],c=[],f={},h=0,w=0,T,g,v=[te(z.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","--output-format","stream-json","-p",a],I=`${z.env.NVM_BIN}/node`;U.log(`Running ${I} ${v.join(" ")}`);let b=t.utils.run(I,v,{all:!0,env:z.env});b.stdin?.end();let x=Ee(()=>{r?.({steps:m,duration:w}),o?.({steps:u,duration:w}),u=[]},250),E=(y,d)=>{y.id=h,h+=1,c.push(y),m.push(y),u.push(y),d||x.flush(),x(),d&&x.flush()},R=Dr.createInterface({input:b.all});R.on("error",y=>{U.error("Readline interface error",{error:y.message,stack:y.stack})});let S="",A=()=>{S&&E({message:S.trim()}),S=""};return R.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let p=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:ht(p,!1)?.error?.message||p||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||A(),d.type){case"message":{d.role!=="user"&&d.content&&(S+=d.content);break}case"tool_use":{let p=Lr[d.tool_name]??d.tool_name,O=d.parameters?.file_path,Z=O&&Ie.relative(z.cwd(),O),fe=d.parameters?.command,q={title:[p,Z&&`\`${Z}\``,fe&&`\`${fe}\``].filter(Boolean).join(" ")};f[d.tool_id]=q,x.flush();break}case"tool_result":{let p=f[d.tool_id];p&&(d.output&&(p.message=`\`\`\`
7
+ `);return s.length>e.length*.8?e:s}import Se from"process";import{getTracer as Gr}from"@netlify/otel";import le from"process";var he=le.env.NETLIFY_API_URL,ye=le.env.NETLIFY_API_TOKEN,j=_("api"),_e=()=>le.env.NETLIFY_LOCAL_MODE==="true",ce=async(e,t={})=>{if(!he||!ye)throw new Error("No API URL or token");let r=new URL(e,he),n={...t,headers:{...t.headers,Authorization:`Bearer ${ye}`}};le.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),s=o.ok&&o.status<=299;if(le.env.AGENT_RUNNERS_DEBUG==="true")j.log(`Response headers for ${r}:`),o.headers.forEach((c,i)=>{j.log(` ${i}: ${c}`)});else{let c=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");j.log(`Request ID for ${r}: ${c||"N/A"}`)}if(s||j.error(`Got status ${o.status} for request ${r}`),t.raw){if(!s)throw o;return o}let l=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!s)throw l;return l},ot=e=>{j.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(he=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(ye=e.constants.NETLIFY_API_TOKEN)},st=()=>({apiUrl:he,token:ye}),ue=async(e,t)=>_e()?(j.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ce(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>_e()?(j.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var it=async(e,t)=>_e()?(j.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`),at=(e,t,r)=>ce(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),lt=async(e,t)=>_e()?(j.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ce(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),ke=async(e,t)=>{j.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ee=_("ai_gateway"),De=null;var ct=async()=>{if(De)return De;ee.log("Fetching available AI gateway providers");let e=await fetch(`${st().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return De=t,ee.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},tr=async(e,t)=>{let n=(await ct()).providers[e];if(!n)return ee.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return ee.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},ut=async({netlify:e,config:t})=>{let r,n,o,s,l=e.constants?.SITE_ID;if(!l)throw new Error("No site id");let c=async()=>{clearTimeout(o),ee.log("Requesting AI gateway information");let i=await at(l,t.id,t.sessionId);if({token:r,url:s}=i,n=i.expires_at?i.expires_at*1e3:void 0,ee.log("Got AI gateway information",{token:!!r,expiresAt:n,url:s}),n){let m=n-Date.now()-6e4;m>0&&(o=setTimeout(()=>{c()},m))}};return await Promise.all([c(),ct()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:tr}};import G from"process";import K from"path";import Ee from"fs";import{fileURLToPath as lr}from"url";import{createRequire as cr}from"module";import{execa as ur,execaCommand as Dn}from"execa";import{Transform as rr}from"stream";var nr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),or=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function sr(){return Object.entries(process.env).filter(([e,t])=>!(!t||nr.has(e)||or.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function H(e){if(typeof e!="string")return e;let t=sr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(ir(n),"g");r=r.replace(o,"******")}),r}function ir(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var te=class extends rr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),s=H(o);n(null,s)}};function dt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,s){let l=typeof n=="string"?H(n):n;return typeof o=="function"?t(l,o):t(l,o,s)},process.stderr.write=function(n,o,s){let l=typeof n=="string"?H(n):n;return typeof o=="function"?r(l,o):r(l,o,s)}}var de=null,pt=e=>(de&&de.destroy(),de=new X({totalAllowedTime:e}),de),ft=()=>de;var X=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,l=null;n!==void 0&&(l=new Promise((c,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return l?await Promise.race([r(),l]):await r()}finally{o(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var gt={name:"@netlify/agent-runner-cli",type:"module",version:"1.54.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"vitest","test:ci:vitest":"c8 -r lcovonly -r text -r json vitest",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.42","@google/gemini-cli":"0.16.0","@netlify/otel":"^5.0.0","@openai/codex":"0.55.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var dr=lr(import.meta.url),pr=K.dirname(dr),fr=cr(import.meta.url),we=_("shell"),Le=new Set,gr={preferLocal:!0},F=(e,t,r)=>{let[n,o]=mr(t,r),s={...gr,...o},l=ur(e,n,s);return hr(l,s),_r(l),l};var mr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},hr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(G.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new te).pipe(G.stdout),e.stdout?.pipe(new te).pipe(G.stdout),e.stderr?.pipe(new te).pipe(G.stderr);return}e.stdout?.pipe(G.stdout),e.stderr?.pipe(G.stderr)},mt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(G.kill(-e.pid,t),we.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return we.error("Error killing process:",r),!1}},yr=e=>mt(e,"SIGKILL"),_r=e=>{Le.add(e);let t=ft();if(t){let r=t.onTimesUp(()=>{we.log(`Global timer expired, killing process ${e.pid}`),mt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(we.log(`Force killing process ${e.pid} after timeout`),yr(e))},5e3)});e.on("exit",()=>{Le.delete(e),r()}),e.on("error",()=>{Le.delete(e),r()})}};function re(e,t){if(!G.env.NETLIFY_LOCAL_MODE)try{let o=fr.resolve(gt.name),s=K.dirname(o);for(;s!==K.dirname(s);){let l=K.dirname(s);if(K.basename(l)==="node_modules"){let c=K.join(l,".bin",t);if(Ee.existsSync(c))return c;break}s=l}}catch(o){console.error("Could not resolve package.json",o)}if(G.env.NODE_PATH){let o=K.join(G.env.NODE_PATH,".bin",t);if(Ee.existsSync(o))return o}let r=K.join(e,"node_modules",".bin",t);if(Ee.existsSync(r))return r;let n=K.join(pr,"..","node_modules",".bin",t);if(Ee.existsSync(n))return n}var ht="netlify-agent-runner-context.md",Ue="task-history",Me="netlify-context",D=".netlify",ne="results.md",je="assets";var Er=_("utils"),wr=e=>new Promise(t=>{setTimeout(t,e)}),yt=(e,t=3e3)=>{let r=!1,n=null,o=[],s=null,l=(...c)=>{if(r)return n=c,new Promise(p=>{o.push(p)});r=!0;let i,m=new Promise(p=>{i=p});return s=(async()=>{await Promise.resolve();let p=await e(...c);for(i(p);;){if(await wr(t),!n)return r=!1,s=null,p;let u=n,g=o;n=null,o=[],p=await e(...u),g.forEach(y=>{y(p)})}})(),m};return l.flush=async()=>{if((r||n)&&s)return await s,l.flush()},l},oe=(e,t,r=!1)=>{let n=null,o=null,s=null,l=function(...c){o=c,s=this;let i=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(s,o),o=null,s=null)},t),i&&(e.apply(s,o),o=null,s=null)};return l.cancel=()=>{clearTimeout(n),n=null,o=null,s=null},l.flush=()=>{if(n){clearTimeout(n);let c=o,i=s;n=null,o=null,s=null,e.apply(i,c)}},l},_t=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):Er.error("Could not parse JSON",n))}},Et=(e,t)=>{let o=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let c=`--${t}${o}`;if(c.length>55)return"";let i=60-c.length;if(i<=0)return"";if(i>=s.length+6){let m=Math.min(i-s.length,e.length);return`${s}${e.slice(0,m)}`}return e.slice(0,i)};import{Buffer as wt}from"buffer";import xr from"path";var xt=_("repo"),Tt=async({config:e})=>{xt.info("Getting runner diffs");let t=await Ir(),{hasChanges:r}=t,{status:n}=t;if(!r)return{hasChanges:!1};let o=vr(n);await Rr(o),xt.info("Changes after processing"),await Ye();let s=await Be(n);await Ge(s);let l={stdio:["ignore","pipe","pipe"]},i=(await F("git",["diff","--staged"],l)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let p=(await F("git",["diff","--staged","--binary"],l)).stdout,u,g;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),u=(await F("git",["diff",e.sha,"HEAD"],l)).stdout;let f=(await F("git",["diff",e.sha,"HEAD","--binary"],l)).stdout;u!==f&&(g=wt.from(f).toString("base64"))}let y={hasChanges:!0,diff:i,resultDiff:u,ignored:s};return i!==p&&(y.diffBinary=wt.from(p).toString("base64")),g&&(y.resultDiffBinary=g),y},Ge=async(e=[])=>{await F("git",["add",".",...e])},Ye=async()=>(await F("git",["status","-s"])).stdout,It=/.. (.+)?\.log$/,Tr=[It],Ir=async()=>{let e=await Ye();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
+ `).filter(n=>Tr.some(s=>s instanceof RegExp?s.test(n):n===s)?!1:n[1]?.trim()!=="")).length!==0,status:e}},vt=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return e.trim()},Rt=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Be=async e=>{e||=await Ye();let t=[".netlify","node_modules"],r=[];return e.split(`
9
+ `).forEach(n=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${xr.sep}`].some(c=>n.startsWith(c))&&r.push(`:!${s}`)});let o=n.match(It)?.[1];o&&r.push(`:!${o}.log`)}),r},St=async()=>{await F("git",["reset","--hard","HEAD"])},vr=e=>{let t=e.split(`
10
+ `).reduce((r,n)=>{if(!n)return r;let[o,s,,...l]=n,c=l.join(""),i=o.trim(),m=s.trim();return r[c]?r[c].change=m:r[c]={filePath:c,stage:i,change:m},r},{});return Object.values(t)},Rr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import br from"fs/promises";import Ar from"os";import Nt from"path";import z from"process";import Nr from"readline";import He from"path";import Sr from"fs/promises";var Ke=_("agent-output-utils");async function se({initialResult:e,agentName:t,hasError:r}){let n="",o=He.join(process.cwd(),D,ne);try{let s=await Sr.readFile(o,"utf-8");s&&(n=s,Ke.log(`Pulled result from ${He.relative(process.cwd(),o)}`))}catch{Ke.log(`No results file found at ${He.relative(process.cwd(),o)}`)}return n||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ie({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,n=r?.replace(/\s+/g," ").trim().toLowerCase()||"",o="";return n?.includes("ai gateway is not available for your account")||n?.includes("ai gateway is not enabled for your account")?o="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":n?.includes("error when talking to gemini api")?o="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(n?.includes("connection closed prematurely")||n?.includes("499")&&t.toLowerCase().includes("gemini"))&&(o=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),n?.includes("request timed out")&&(o=`The ${t} API request's have timed out. Please try again or use a different available agent.`),n?.includes("network error")&&(o=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o&&Ke.log(`Providing updated error messsage: ${o}, replacing original error: ${r}`),o||r||void 0}function ae(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var L=_("runner_claude"),bt="Claude Code",xe="claude-sonnet-4-5-20250929",At=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Cr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(L.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function We({config:e,netlify:t,persistSteps:r,aiGateway:n,continueSession:o,priorAgentSessionId:s}){let l=e,{accountType:c,prompt:i,modelVersionOverrides:m}=l,{model:p}=l,u="";if(n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(m?.claude){let a=m?.claude?.[c];if(a){if(!await n.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);p=a}}else if(p){if(!await n.isModelAvailableForProvider("anthropic",p))throw new Error(`Model '${p}' is not available for anthropic provider`)}else await n.isModelAvailableForProvider("anthropic",xe)?(p=xe,L.log(`Using default model: ${xe}`)):L.log(`Default model ${xe} is not available, proceeding without model specification`);z.env.ANTHROPIC_API_KEY=h,z.env.ANTHROPIC_BASE_URL=d}else if(!z.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let g=[],y=[],w={},x=0,f=0,I,T,A=[re(z.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...p?["--model",p]:[],...o?["--continue"]:[],...o&&s?["--resume",s]:[],"-p",i],N=`${z.env.NVM_BIN}/node`;L.log(`Running ${N} ${A.join(" ")}`);let C=t.utils.run(N,A,{all:!0,env:z.env});C.stdin?.end();let R=oe(()=>{r?.({steps:g,duration:f})},250),v=(h,d)=>{let a={...h,id:x};x+=1,y.push(a),g.push(a),d||R.flush(),R(),d&&R.flush()},$=Nr.createInterface({input:C.all});return $.on("error",h=>{L.error("Readline interface error",{error:h.message,stack:h.stack})}),$.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{L.log("Could not parse line",h)}d?.session_id&&d.session_id!==u&&(u=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):L.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[At(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);R.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,V;if(E){let W=E.input?.file_path&&Nt.relative(z.cwd(),E.input.file_path),P=W&&`\`${W}\``;V=[At(E.name||""),P].filter(Boolean).join(" ")}let me=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),J;if(typeof a.content=="string")J=a.content;else if(Array.isArray(a.content)){let W=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?W.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?W.push(`![](data:${P.source.media_type};base64,${P.source.data})`):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),J=W.join(`
11
+
12
+ `)}me&&J&&(J=`\`\`\`
13
+ ${J.trim()}
14
+ \`\`\``),v({title:V,message:J},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:L.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(f=d.duration_ms||0,d.is_error?T=d.result:I=d.result,[y,g].forEach(a=>{a[a.length-1]?.message===I&&a.pop()}))}),await C.catch(h=>{({error:T,result:I}=Cr({catchError:h,runCmd:C,error:T,result:I,runnerName:"Claude"}))}),$.close(),R.flush(),{steps:y,duration:f,result:await se({initialResult:I,agentName:bt,hasError:!!T}),error:ie({error:T,agentName:bt}),isRetryableError:ae(T),agentSessionId:u}}var Ct=async()=>{let e=Nt.join(Ar.homedir(),".claude");await br.rm(e,{recursive:!0,force:!0})};import pe from"fs/promises";import Ft from"os";import Te from"path";import Z from"process";import Pr from"readline";var Y=_("runner_codex"),Pt="Codex CLI",Fr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(Y.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(Y.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(Y.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function qe({config:e,netlify:t,persistSteps:r,sendSteps:n,aiGateway:o}){let{accountType:s,prompt:l,modelVersionOverrides:c}=e,{model:i}=e;if(o){let{token:d,url:a}=o;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(c?.codex){let E=c?.codex?.[s];if(E){if(!await o.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);i=E}}else if(i&&!await o.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);Z.env.OPENAI_API_KEY=d,Z.env.OPENAI_BASE_URL=a}else if(!Z.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let m=[],p=[],u=[],g={},y=0,w=0,x,f,I=`${Z.env.NVM_BIN}/node`,T=Te.join(Ft.homedir(),".codex"),A=Te.join(T,"config.toml"),N=Te.join(T,"auth.json");try{await pe.mkdir(T,{recursive:!0});let d={OPENAI_API_KEY:Z.env.OPENAI_API_KEY};await pe.writeFile(N,JSON.stringify(d,null,2),"utf-8"),Y.log("Created Codex auth.json file");let a="";try{a=await pe.readFile(A,"utf-8")}catch{}a.includes("web_search")||(a.includes("[tools]")?a=a.replace(/\[tools\]/,`[tools]
15
+ web_search = true`):a+=`
16
+ [tools]
17
+ web_search = true
18
+ `,await pe.writeFile(A,a,"utf-8"),Y.log("Updated Codex config with web_search enabled"))}catch(d){throw Y.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let C=[re(Z.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...i?["--model",i]:[],l].filter(Boolean);Y.log(`Running ${I} ${C.join(" ")}`);let R=t.utils.run(I,C,{all:!0,env:{...Z.env}}),v=oe(()=>{r?.({steps:m,duration:w}),n?.({steps:p,duration:w}),p=[]},250),$=(d,a)=>{d.id=y,y+=1,u.push(d),m.push(d),p.push(d),a||v.flush(),v(),a&&v.flush()},h=Pr.createInterface({input:R.all});return h.on("error",d=>{Y.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{Y.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")g[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let E=$r(a.item);E&&$(E,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let E={title:"Reasoning",message:a.item.text};$(E,!0)}else if(a?.type==="local_shell_call")g[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let E=kr(g[a.call_id],a);E&&$(E,!0)}else a?.type==="message"&&a.role==="assistant"?x=a.content.map(E=>E.text).join(`
19
+ `):a?.type==="message"&&a.role==="system"&&(f=a.content.map(E=>E.text).join(`
20
+ `))}),await R.catch(d=>{let a=Fr({catchError:d,runCmd:R,error:f,result:x,runnerName:"Codex"});f=a.error,x=a.result}),h.close(),v.flush(),{steps:u,duration:w,result:await se({initialResult:x,agentName:Pt,hasError:!!f}),error:ie({error:f,agentName:Pt}),isRetryableError:ae(f)}}var Ot=async()=>{let e=Te.join(Ft.homedir(),".codex");await pe.rm(e,{recursive:!0,force:!0})},Or=new Set(["bash","-lc"]),$r=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,n=e.aggregated_output?.trim();return n&&(n=`\`\`\`
21
+ ${n}
22
+ \`\`\``),e.status==="failed"&&e.exit_code!==0&&(n=n?`${n}
23
+
24
+ *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},kr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Or.has(s)),n=r?`Running \`${r.join(" ")}\``:void 0,o;try{o=JSON.parse(t.output).output?.trim(),o&&(o=`\`\`\`
25
+ ${o.trim()}
26
+ \`\`\``)}catch(s){Y.error("Could not decode outputMsg",s,t.output)}return{title:n,message:o}};import ve from"fs/promises";import kt from"os";import Re from"path";import Q from"process";import Dr from"readline";var U=_("runner_gemini"),$t="Gemini CLI",Ie="gemini-2.5-pro",Lr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(U.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0})),Ur={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Mr=async()=>{let e=Re.join(kt.homedir(),".gemini"),t=Re.join(e,"settings.json");try{await ve.mkdir(e,{recursive:!0});let r={};try{let n=await ve.readFile(t,"utf-8");r=JSON.parse(n)}catch{U.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!1),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await ve.writeFile(t,JSON.stringify(r,null,2),"utf-8"),U.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){U.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function Ve({config:e,netlify:t,persistSteps:r,sendSteps:n,aiGateway:o}){let{accountType:s,prompt:l,modelVersionOverrides:c}=e,{model:i}=e;if(await Mr(),o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(c?.gemini){let a=c?.gemini?.[s];if(a){if(!await o.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);i=a}}if(!i)await o.isModelAvailableForProvider("gemini",Ie)?(i=Ie,U.log(`Using default model: ${Ie}`)):U.log(`Default model ${Ie} is not available, proceeding without model specification`);else if(i&&!c?.gemini?.[s]&&!await o.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);Q.env.GEMINI_API_KEY=h,Q.env.GOOGLE_GEMINI_BASE_URL=d}else if(!Q.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let m=[],p=[],u=[],g={},y=0,w=0,x,f,I=[re(Q.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","--output-format","stream-json","-p",l],T=`${Q.env.NVM_BIN}/node`;U.log(`Running ${T} ${I.join(" ")}`);let A=t.utils.run(T,I,{all:!0,env:Q.env});A.stdin?.end();let N=oe(()=>{r?.({steps:m,duration:w}),n?.({steps:p,duration:w}),p=[]},250),C=(h,d)=>{h.id=y,y+=1,u.push(h),m.push(h),p.push(h),d||N.flush(),N(),d&&N.flush()},R=Dr.createInterface({input:A.all});R.on("error",h=>{U.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",$=()=>{v&&C({message:v.trim()}),v=""};return R.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:_t(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||$(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=Ur[d.tool_name]??d.tool_name,E=d.parameters?.file_path,V=E&&Re.relative(Q.cwd(),E),me=d.parameters?.command,W={title:[a,V&&`\`${V}\``,me&&`\`${me}\``].filter(Boolean).join(" ")};g[d.tool_id]=W,N.flush();break}case"tool_result":{let a=g[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
19
27
  ${d.output.trim()}
20
- \`\`\``),E(p,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?g=d.error?.message:T=S.trim();break}case"error":{g=d.error;break}case"finished":break;default:{U.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:g,result:T}=kr({catchError:y,runCmd:b,error:g,result:T,runnerName:"Gemini"}))}),R.close(),x.flush(),{steps:c,duration:w,result:await ne({initialResult:T,agentName:Pt,hasError:!!g}),error:oe({error:g,agentName:Pt}),isRetryableError:se(g)}}var Ft=async()=>{let e=Ie.join(Ot.homedir(),".gemini");await xe.rm(e,{recursive:!0,force:!0})};var Mr={codex:{runner:qe,clean:Ct},claude:{runner:He,clean:bt},gemini:{runner:Ke,clean:Ft}},$t=Mr;var Gr=_("init_stage"),Dt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await N(jr(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=$t[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Yr({apiToken:r});rt(l);let i=e.useGateway?await lt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let m=mt(({steps:h=[],duration:w})=>{let T=h.map(g=>({...g,title:g.title?B(g.title):void 0,message:g.message?B(g.message):void 0}));return h.length=0,Y(e.id,e.sessionId,{steps:T,duration:w})},t);Gr.info("Adding build files to stage");let u=await Ge();await Me(u);let c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await xt(),await ce(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await It(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let f=performance.now()-s;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":f,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:m,runner:a,sha:c}}),Yr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ve.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ve.env.NETLIFY_API_TOKEN,SITE_ID:ve.env.SITE_ID,FUNCTIONS_DIST:ve.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as We}from"@netlify/otel";import Br from"crypto";import K from"fs/promises";import D from"path";import M from"process";var $=_("context"),Hr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:M.env.NETLIFY_TEAM_ID,userId:M.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:M.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},qr=10,Kr=async e=>{let{name:t,ext:r}=D.parse(e),o=e,n=D.join(M.cwd(),k,o),s=0;for(;await Wr(n);){if(s>=qr)throw new Error("Failed to generate context file");o=`${t}-${Br.randomUUID().slice(0,5)}${r}`,n=D.join(M.cwd(),k,o),s+=1}return o},Wr=async e=>{try{return await K.access(e),!0}catch{return!1}},Vr=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},Jr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await K.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},Re=null,Xr=async()=>{if(Re)return Re;let e=await Vr();if(!e)return[];let t=D.join(M.cwd(),k,Le);await K.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return $.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=D.join(t,a),i=D.join(k,Le,a);return $.log(`Downloading ${s.scope} context...`),await Jr(s.endpoint,l)?($.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return Re=(await Promise.all(r)).filter(n=>n!==null),Re},kt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Hr(t),s=await Kr(gt),a=D.join(M.cwd(),k);await K.mkdir(a,{recursive:!0});let l=D.join(k,s),i=D.join(M.cwd(),l),m=D.join(M.cwd(),k,re);try{await K.unlink(m),$.log(`Deleted old results file: ${m}`)}catch{}let u=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
28
+ \`\`\``),C(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?f=d.error?.message:x=v.trim();break}case"error":{f=d.error;break}case"finished":break;default:{U.warn("Unhandled message type:",d.type);break}}}),await A.catch(h=>{({error:f,result:x}=Lr({catchError:h,runCmd:A,error:f,result:x,runnerName:"Gemini"}))}),R.close(),N.flush(),{steps:u,duration:w,result:await se({initialResult:x,agentName:$t,hasError:!!f}),error:ie({error:f,agentName:$t}),isRetryableError:ae(f)}}var Dt=async()=>{let e=Re.join(kt.homedir(),".gemini");await ve.rm(e,{recursive:!0,force:!0})};var jr={codex:{runner:qe,clean:Ot},claude:{runner:We,clean:Ct},gemini:{runner:Ve,clean:Dt}},Lt=jr;var Yr=_("init_stage"),Ut=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:n})=>await S(Gr(),"init-stage",async o=>{let s=performance.now();o?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":n||"unknown"});let l=Lt[e.runner];if(!l)throw o?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let c=Br({apiToken:r});ot(c);let i=e.useGateway?await ut({netlify:c,config:e}):void 0;o?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&o?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let m=yt(({steps:y=[],duration:w})=>{let x=y.map(f=>({...f,title:f.title?H(f.title):void 0,message:f.message?H(f.message):void 0}));return y.length=0,B(e.id,e.sessionId,{steps:x,duration:w})},t);Yr.info("Adding build files to stage");let p=await Be();await Ge(p);let u;e.hasRepo?e.sha?(u=e.sha,o?.setAttributes({"init.sha.source":"provided"})):(u=await vt(),await ue(e.id,{sha:u}),o?.setAttributes({"init.sha.source":"current_commit"})):(u=await Rt(),o?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let g=performance.now()-s;return o?.setAttributes({"init.sha":u||"unknown","init.duration.ms":g,"init.status":"success"}),{aiGateway:i,context:c,persistSteps:m,runner:l,sha:u}}),Br=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Se.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Se.env.NETLIFY_API_TOKEN,SITE_ID:Se.env.SITE_ID,FUNCTIONS_DIST:Se.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as Je}from"@netlify/otel";import Hr from"crypto";import q from"fs/promises";import k from"path";import M from"process";var O=_("context"),Kr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:M.env.NETLIFY_TEAM_ID,userId:M.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:M.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Wr=10,qr=async e=>{let{name:t,ext:r}=k.parse(e),n=e,o=k.join(M.cwd(),D,n),s=0;for(;await Vr(o);){if(s>=Wr)throw new Error("Failed to generate context file");n=`${t}-${Hr.randomUUID().slice(0,5)}${r}`,o=k.join(M.cwd(),D,n),s+=1}return n},Vr=async e=>{try{return await q.access(e),!0}catch{return!1}},Jr=async()=>{try{O.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return O.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(n=>n&&typeof n=="object"&&n.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(O.warn("Catchall consumer missing or invalid contextScopes"),null):r:(O.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?O.warn("Netlify features context request timed out"):O.warn("Failed to fetch Netlify features context:",e.message),null}},Xr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let n=await r.text();return await q.writeFile(t,n,"utf-8"),!0}catch(r){return r.name==="AbortError"?O.warn(`Download timeout for ${e}`):O.warn(`Failed to download context file ${e}:`,r.message),!1}},be=null,zr=async()=>{if(be)return be;let e=await Jr();if(!e)return[];let t=k.join(M.cwd(),D,Me);await q.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([o,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return O.warn(`Invalid scope data for ${o}, skipping...`),null;let l=`${o}.md`,c=k.join(t,l),i=k.join(D,Me,l);return O.log(`Downloading ${s.scope} context...`),await Xr(s.endpoint,c)?(O.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:o}):null});return be=(await Promise.all(r)).filter(o=>o!==null),be},Mt=async({cliPath:e,netlify:t,config:r,buildErrorContext:n})=>{let o=Kr(t),s=await qr(ht),l=k.join(M.cwd(),D);await q.mkdir(l,{recursive:!0});let c=k.join(D,s),i=k.join(M.cwd(),c),m=k.join(M.cwd(),D,ne);try{await q.unlink(m),O.log(`Deleted old results file: ${m}`)}catch{}let p=n?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
21
29
  Your task is to analyze and fix the build errors.
22
30
  Don't apply techniques of reverting changes. Apply fixes related to errors.
23
31
  Don't try to run build by yourself. Just fix the errors.
24
32
 
25
33
  <build_error_context>
26
- ${o}
27
- </build_error_context>`:"",c="";r.siteContext&&r.siteContext.length!==0&&(c=`
34
+ ${n}
35
+ </build_error_context>`:"",u="";r.siteContext&&r.siteContext.length!==0&&(u=`
28
36
  <project_rules>
29
- ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
37
+ ${r.siteContext.filter(f=>f.site_context).map(f=>typeof f.site_context=="string"?f.site_context:typeof f.site_context=="object"?JSON.stringify(f.site_context):"").join(`
30
38
 
31
39
  `)}
32
40
  </project_rules>
33
- `);let f="";if(r.sessionHistoryContext?.length){let g=D.join(M.cwd(),k,ke);await K.mkdir(g,{recursive:!0});let v=await Promise.all(r.sessionHistoryContext.map(async(I,b)=>{let x=b+1,E=`attempt-${x}.md`,R=D.join(g,E),S=D.join(k,ke,E),A=`# Task History - Attempt ${x}
41
+ `);let g="";if(r.sessionHistoryContext?.length){let f=k.join(M.cwd(),D,Ue);await q.mkdir(f,{recursive:!0});let I=await Promise.all(r.sessionHistoryContext.map(async(T,A)=>{let N=A+1,C=`attempt-${N}.md`,R=k.join(f,C),v=k.join(D,Ue,C),$=`# Task History - Attempt ${N}
34
42
 
35
43
  ## Request - what the user asked for
36
- ${I.request}
44
+ ${T.request}
37
45
 
38
46
  ---
39
47
 
40
48
  ## Response - what the agent replied with after its work
41
49
 
42
- ${I.response}
43
- `;return await K.writeFile(R,A,"utf-8"),$.log(`Created history file: ${S}`),S}));f+=`
50
+ ${T.response}
51
+ `;return await q.writeFile(R,$,"utf-8"),O.log(`Created history file: ${v}`),v}));g+=`
44
52
  <session_history_context>
45
53
  History of prior work on this task.
46
54
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
47
55
 
48
- ${v.slice(-5).map(I=>`- ${I}`).join(`
56
+ ${I.slice(-5).map(T=>`- ${T}`).join(`
49
57
  `)}
50
58
 
51
59
  </session_history_context>
52
- `}let h=await Xr(),w="";h.length>0&&(w=`
60
+ `}let y=await zr(),w="";y.length>0&&(w=`
53
61
  <netlify_features_context>
54
62
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
55
63
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
56
64
 
57
- ${h.map(g=>`- **${g.scope}**: ${g.path}`).join(`
65
+ ${y.map(f=>`- **${f.scope}**: ${f.path}`).join(`
58
66
  `)}
59
67
 
60
68
  Refer to these files when working with specific Netlify features.
61
69
  </netlify_features_context>
62
- `);let T=`
70
+ `);let x=`
63
71
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
64
72
 
65
73
  <request>
66
74
  <user_request>
67
75
  ${r.prompt}
68
76
  </user_request>
69
- ${u}
77
+ ${p}
70
78
  </request>
71
79
 
72
80
  <requirements>
73
81
  <responses>
74
82
  - Do not speak in first person. You may speak as "the agent".
75
- - When work is complete, write a changes summary in ${a}/${re} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
76
- - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${re} file.
83
+ - When work is complete, write a changes summary in ${l}/${ne} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
84
+ - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${l}/${ne} file.
77
85
  - Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
78
86
  - NEVER look into the \`.git\` folder
79
87
  - NEVER print potentially sensitive values (like secrets) in the planning output or results
80
88
  </responses>
81
89
  <attachements>
82
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${Ue} folder
83
- - move assets from ${a}/${Ue} folder to the project assets folder if they are referenced in a code or applied changes
90
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${l}/${je} folder
91
+ - move assets from ${l}/${je} folder to the project assets folder if they are referenced in a code or applied changes
84
92
  </attachements>
85
- ${c}
93
+ ${u}
86
94
  </requirements>
87
95
 
88
96
  <extra_context>
89
97
  <metadata>
90
- - Site/Project ID: ${n.siteId}
91
- - Account/Team ID: ${n.accountId}
92
- - User ID: ${n.userId}
93
- - Site/Project Slug: ${n.siteSlug}
94
- - Netlify Functions directory: ${n.functionsDir}
98
+ - Site/Project ID: ${o.siteId}
99
+ - Account/Team ID: ${o.accountId}
100
+ - User ID: ${o.userId}
101
+ - Site/Project Slug: ${o.siteSlug}
102
+ - Netlify Functions directory: ${o.functionsDir}
95
103
  </metadata>
96
104
  <environment>
97
105
  - Node Version: ${M.version||"unknown"}
@@ -106,29 +114,29 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
106
114
  </docs>
107
115
  </extra_context>
108
116
 
109
- ${f}
110
- `;return await K.writeFile(i,T,"utf-8"),$.log(`Generated agent context document at: ${i}`),T.length>5e5&&(T=`
117
+ ${g}
118
+ `;return await q.writeFile(i,x,"utf-8"),O.log(`Generated agent context document at: ${i}`),x.length>5e5&&(x=`
111
119
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
112
120
 
113
121
  <request>
114
122
  <user_request>
115
123
  ${r.prompt}
116
124
  </user_request>
117
- ${u}
125
+ ${p}
118
126
  </request>
119
127
 
120
128
  Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
121
- `),T};var zr=_("prompt"),Lt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await kt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&zr.log("Contextful Prompt:",n),{prompt:n}};var Se=_("inference_stage"),Ut=5,Ae=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i,contextPrefix:m,priorAgentSessionId:u}=e;Se.log(`Running inference stage, attempt ${i} of ${Ut}`);let c=await N(We(),"inference-stage",async f=>{f?.setAttributes({"inference.attempt":i||1}),ct();let{prompt:h}=await N(We(),"compose-prompt",async()=>await Lt({cliPath:t,config:r,buildErrorContext:Zr(n),netlify:o})),w=`
129
+ `),x};var Zr=_("prompt"),jt=async({cliPath:e,config:t,netlify:r,buildErrorContext:n})=>{let o=await Mt({cliPath:e,config:t,netlify:r,buildErrorContext:n});return process.env.AGENT_RUNNER_DEBUG&&Zr.log("Contextful Prompt:",o),{prompt:o}};var Ae=_("inference_stage"),Gt=5,Ne=async e=>{let{cliPath:t,config:r,context:n,buildErrors:o,runner:s,persistSteps:l,aiGateway:c,attempt:i,contextPrefix:m,priorAgentSessionId:p}=e;Ae.log(`Running inference stage, attempt ${i} of ${Gt}`);let u=await S(Je(),"inference-stage",async g=>{g?.setAttributes({"inference.attempt":i||1}),dt();let{prompt:y}=await S(Je(),"compose-prompt",async()=>await jt({cliPath:t,config:r,buildErrorContext:Qr(o),netlify:n})),w=`
122
130
  ${m||""}
123
- ${h}
124
- `.trim(),T={...r,prompt:w},g=await N(We(),`run-${r.runner}`,async()=>await s({aiGateway:l,config:T,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:u}));return g.result&&(g.result=B(g.result)),g.error&&(g.error=B(g.error)),await a.flush(),g});if(c.error){if(Se.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:i||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!i||i<Ut))return Se.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Ae({...e,attempt:(i||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Se.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},Zr=e=>!e||e.length===0?"":`
131
+ ${y}
132
+ `.trim(),x={...r,prompt:w},f=await S(Je(),`run-${r.runner}`,async()=>await s({aiGateway:c,config:x,netlify:n,persistSteps:l,continueSession:!!(i&&i>1),priorAgentSessionId:p}));return f.result&&(f.result=H(f.result)),f.error&&(f.error=H(f.error)),await l.flush(),f});if(u.error){if(Ae.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:i||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!i||i<Gt))return Ae.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Ne({...e,attempt:(i||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ae.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},Qr=e=>!e||e.length===0?"":`
125
133
  Deploy failed failed. Here are the errors to review on the latest build:
126
134
 
127
135
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
128
136
 
129
137
  ${e.pop()}
130
- `;import tn from"process";import{getTracer as Ve}from"@netlify/otel";import{getTracer as Qr}from"@netlify/otel";var pe=_("deploy"),Mt=async e=>await N(Qr(),"create-preview-deploy",async t=>en(e,t)),en=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},l)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(pe.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(pe.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let m=s||"netlify";pe.log(`Running: ${m} ${i.join(" ")}`),l?.setAttributes({cmd:m,args:i});let u=await e.utils.run(m,i,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(u.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),pe.log(`
131
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let f={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(f.sourceZipFilename=c.source_zip_filename),f}catch(i){throw pe.error("Failed to create preview deploy via CLI:",i),l?.setAttributes({success:!1,error:i.message}),i}};var de=_("deploy_stage"),Je=async e=>await N(Ve(),"run-deploy-stage",async()=>rn(e)),rn=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await N(Ve(),"get-runner-diffs",async()=>await wt({config:t,netlify:r}));if(de.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:m}=s,u=!0;de.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:u,wouldCreatePreview:o!==void 0&&u});let c=null;if(o!==void 0&&u)try{let f;try{let h=await N(Ve(),"get-runner-session",async()=>await ot(t.id,t.sessionId));h?.title&&(f=h.title)}catch(h){de.warn("Failed to fetch session title, using fallback message:",h.message)}await Y(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await Mt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:f,skipBuild:!1,deploySubdomain:yt(t.id,tn.env.SITE_NAME),filter:n})}catch(f){return de.warn("Failed to create preview deploy (continuing with agent run):",f),{diff:a,resultDiff:l,hasChanges:u,previewInfo:null,diffBinary:i,resultDiffBinary:m,deployError:f instanceof Error?f.message:String(f)}}return de.log("Git status",{hasDiff:!!a,hasChanges:u}),{diff:a,resultDiff:l,hasChanges:u,previewInfo:c,diffBinary:i,resultDiffBinary:m}};import{getTracer as Ne}from"@netlify/otel";async function jt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var be=class{scanDiffForForms(t){let r=[],o=null,n=[],s=t.split(`
132
- `);for(let a of s)if(a.startsWith("diff --git")){if(o&&n.length>0){let i=this.containsNetlifyForm(n,o);i&&r.push(i)}let l=a.split(" ");o=l[l.length-1].replace(/^b\//,""),n=[]}else a.startsWith("+")&&!a.startsWith("+++")&&n.push(a.slice(1));if(o&&n.length>0){let a=this.containsNetlifyForm(n,o);a&&r.push(a)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
133
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:a}of n){let l=o.match(s);if(l){let i=l.index||0,m=Math.max(0,i-20),u=Math.min(o.length,i+l[0].length+20),c=o.slice(m,u).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${a}] ${c}`}}}return null}};var C=_("cleanup_stage"),Gt=async e=>await N(Ne(),"cleanup-stage",async()=>nn(e)),Xe=1024*1024*10,nn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result:r||"Done",duration:o};l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename);let m=t||s||n||a;if(m){let u=new be,c=t||s||"",f=u.scanDiffForForms(c);f.detected?(C.log("Detected Netlify form(s) in diff:"),f.matches.forEach(({file:h,snippet:w})=>{C.log(` - ${h}: ${w}`)}),i.has_netlify_form=!0):C.log("Did not detect Netlify form(s) in diff"),C.log("Did not detect Netlify form(s) in diff")}if(m)try{C.log("Getting pre-signed URLs for diff upload");let u=await it(e.id,e.sessionId),c=[];(t||s)&&c.push(Fe(u.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=u.result.s3_key,C.log("Successfully uploaded result_diff to S3")})),(n||a)&&c.push(Fe(u.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=u.cumulative.s3_key,C.log("Successfully uploaded cumulative_diff to S3")})),C.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||a)&&(C.log("Updating agent runner with cumulative diff S3 key"),await N(Ne(),"update-runner",async()=>{await ce(e.id,{result_diff_s3_key:u.cumulative.s3_key})}))}catch(u){C.error("S3 upload failed, falling back to inline diffs:",u);let c=Buffer.byteLength(t||s||""),f=Buffer.byteLength(a||n||"");if(c>Xe||f>Xe){let h=`Diffs exceed maximum inline size of ${Xe} bytes.`;throw C.error(h),new Error(h)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,C.log("Updating agent runner with inline diffs (fallback)"),await N(Ne(),"update-runner",async()=>{await ce(e.id,{result_diff:n,result_diff_binary:a})}))}else C.log("No diffs to upload");return C.log("Updated agent runner with result"),await jt(async()=>await N(Ne(),"update-runner-session",()=>Y(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(u,c)=>{C.error(`Error updating agent runner session (attempt ${u}):`,c),C.log("Retrying...")}}),C.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Yt,shutdownTracers as sn,withActiveSpan as Bt}from"@netlify/otel";var an=on(import.meta.url),Ht=an("../package.json"),qt=_("pipeline_index"),Ce=3,Hs=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let l,{withStageTimer:i}=ut(V.timeUnits.hours(4)),m=await et(Ht.version,e.id,a);try{await Bt(Yt(),"run-pipeline",{},m,async()=>{let u,{aiGateway:c,context:f,persistSteps:h,runner:w,sha:T}=await i("init",()=>Dt({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:Ht.version}),V.timeUnits.minutes(10));l=w.clean,e.sha=T;let{runnerResult:g}=await i("inference",()=>Ae({cliPath:r,config:e,context:f,runner:w.runner,persistSteps:h,aiGateway:c}));await Y(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let v=await i("deploy",()=>Je({cliPath:r,config:e,context:f,result:g.result,filter:s})),I=g,b=[];if(v.hasChanges&&v.deployError){b.push(tt(v.deployError));let y=1;for(;y<=Ce&&!v.previewInfo;)qt.log(`Deploy attempt had errors. Retrying. ${y}/${Ce}`),await Bt(Yt(),"deploy-stage",async d=>{d?.setAttributes({"stage.attempt":y});let{runnerResult:p}=await i(`inference-retry-${y}`,()=>Ae({cliPath:r,config:e,context:f,runner:w.runner,persistSteps:h,aiGateway:c,buildErrors:b,priorAgentSessionId:g.agentSessionId}));I={...p,steps:[...I.steps||[],...p.steps||[]],duration:(I.duration||0)+(p.duration||0)},v=await i(`deploy-retry-${y}`,()=>Je({cliPath:r,config:e,context:f,result:p.result,filter:s})),v.deployError&&b.push(v.deployError),y++});y>Ce&&!v.previewInfo&&(u=new Error(`Deploy validation failed after ${Ce} attempts`))}let{diff:x,resultDiff:E,previewInfo:R,diffBinary:S,resultDiffBinary:A}=v;if(await i("cleanup",()=>Gt({config:e,diff:x,result:I.result,duration:I.duration,resultDiff:E,diffBinary:S,resultDiffBinary:A,previewInfo:R}),V.timeUnits.minutes(10)),u)throw u;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await vt())})}catch(u){qt.error("Got error while running pipeline",u),await l?.();let c=u instanceof Error&&u.message;throw await Y(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),u}finally{await sn()}};export{Hs as runPipeline};
138
+ `;import rn from"process";import{getTracer as Xe}from"@netlify/otel";import{getTracer as en}from"@netlify/otel";var fe=_("deploy"),Yt=async e=>await S(en(),"create-preview-deploy",async t=>tn(e,t)),tn=async({netlify:e,hasRepo:t,skipBuild:r,message:n="Agent Preview",deploySubdomain:o,cliPath:s,filter:l},c)=>{try{let i=["deploy","--message",`"${n}"`,"--json","--draft","--verbose"];t||(fe.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),o&&i.push("--alias",o),l&&i.push("--filter",l),r?(fe.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let m=s||"netlify";fe.log(`Running: ${m} ${i.join(" ")}`),c?.setAttributes({cmd:m,args:i});let p=await e.utils.run(m,i,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(p.stdout.trim());c?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),fe.log(`
139
+ Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let g={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(g.sourceZipFilename=u.source_zip_filename),g}catch(i){throw fe.error("Failed to create preview deploy via CLI:",i),c?.setAttributes({success:!1,error:i.message}),i}};var ge=_("deploy_stage"),ze=async e=>await S(Xe(),"run-deploy-stage",async()=>nn(e)),nn=async({cliPath:e,config:t,context:r,result:n,filter:o})=>{let s=await S(Xe(),"get-runner-diffs",async()=>await Tt({config:t,netlify:r}));if(ge.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:l,resultDiff:c,diffBinary:i,resultDiffBinary:m}=s,p=!0;ge.log("Preview deploy condition check:",{resultUndefined:n===void 0,resultType:typeof n,hasChanges:p,wouldCreatePreview:n!==void 0&&p});let u=null;if(n!==void 0&&p)try{let g;try{let y=await S(Xe(),"get-runner-session",async()=>await it(t.id,t.sessionId));y?.title&&(g=y.title)}catch(y){ge.warn("Failed to fetch session title, using fallback message:",y.message)}await B(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),u=await Yt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:g,skipBuild:!1,deploySubdomain:Et(t.id,rn.env.SITE_NAME),filter:o})}catch(g){return ge.warn("Failed to create preview deploy (continuing with agent run):",g),{diff:l,resultDiff:c,hasChanges:p,previewInfo:null,diffBinary:i,resultDiffBinary:m,deployError:g instanceof Error?g.message:String(g)}}return ge.log("Git status",{hasDiff:!!l,hasChanges:p}),{diff:l,resultDiff:c,hasChanges:p,previewInfo:u,diffBinary:i,resultDiffBinary:m}};import{getTracer as Pe}from"@netlify/otel";async function Bt(e,t){let{maxRetries:r,baseDelay:n,onRetry:o}=t,s;for(let l=1;l<=r;l++)try{return await e()}catch(c){if(s=c,l===r)throw s;o&&o(l,s),await new Promise(i=>setTimeout(i,n*l))}throw s}var Ce=class{scanDiffForForms(t){let r=[],n=null,o=[],s=t.split(`
140
+ `);for(let l of s)if(l.startsWith("diff --git")){if(n&&o.length>0){let i=this.containsNetlifyForm(o,n);i&&r.push(i)}let c=l.split(" ");n=c[c.length-1].replace(/^b\//,""),o=[]}else l.startsWith("+")&&!l.startsWith("+++")&&o.push(l.slice(1));if(n&&o.length>0){let l=this.containsNetlifyForm(o,n);l&&r.push(l)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let n=t.join(`
141
+ `),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:l}of o){let c=n.match(s);if(c){let i=c.index||0,m=Math.max(0,i-20),p=Math.min(n.length,i+c[0].length+20),u=n.slice(m,p).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${l}] ${u}`}}}return null}};var b=_("cleanup_stage"),Ht=async e=>await S(Pe(),"cleanup-stage",async()=>on(e)),Ze=1024*1024*10,on=async({config:e,diff:t,result:r,duration:n,resultDiff:o,diffBinary:s,resultDiffBinary:l,previewInfo:c})=>{let i={result:r||"Done",duration:n};c&&c.deployId&&(i.deploy_id=c.deployId),c&&c.sourceZipFilename&&(i.result_zip_file_name=c.sourceZipFilename);let m=t||s||o||l;if(m){let p=new Ce,u=t||s||"",g=p.scanDiffForForms(u);g.detected?(b.log("Detected Netlify form(s) in diff:"),g.matches.forEach(({file:y,snippet:w})=>{b.log(` - ${y}: ${w}`)}),i.has_netlify_form=!0):b.log("Did not detect Netlify form(s) in diff"),b.log("Did not detect Netlify form(s) in diff")}if(m)try{b.log("Getting pre-signed URLs for diff upload");let p=await lt(e.id,e.sessionId),u=[];(t||s)&&u.push(ke(p.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=p.result.s3_key,b.log("Successfully uploaded result_diff to S3")})),(o||l)&&u.push(ke(p.cumulative.upload_url,l||o).then(()=>{i.cumulative_diff_s3_key=p.cumulative.s3_key,b.log("Successfully uploaded cumulative_diff to S3")})),b.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(o||l)&&(b.log("Updating agent runner with cumulative diff S3 key"),await S(Pe(),"update-runner",async()=>{await ue(e.id,{result_diff_s3_key:p.cumulative.s3_key})}))}catch(p){b.error("S3 upload failed, falling back to inline diffs:",p);let u=Buffer.byteLength(t||s||""),g=Buffer.byteLength(l||o||"");if(u>Ze||g>Ze){let y=`Diffs exceed maximum inline size of ${Ze} bytes.`;throw b.error(y),new Error(y)}i.result_diff=t,i.result_diff_binary=s,(o||l)&&(i.cumulative_diff=o,i.cumulative_diff_binary=l,b.log("Updating agent runner with inline diffs (fallback)"),await S(Pe(),"update-runner",async()=>{await ue(e.id,{result_diff:o,result_diff_binary:l})}))}else b.log("No diffs to upload");return b.log("Updated agent runner with result"),await Bt(async()=>await S(Pe(),"update-runner-session",()=>B(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(p,u)=>{b.error(`Error updating agent runner session (attempt ${p}):`,u),b.log("Retrying...")}}),b.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Kt,shutdownTracers as an,withActiveSpan as Wt}from"@netlify/otel";var ln=sn(import.meta.url),qt=ln("../package.json"),Vt=_("pipeline_index"),Fe=3,Ws=async({config:e,apiToken:t,cliPath:r="netlify",cwd:n,errorLogsPath:o,filter:s,tracing:l={}})=>{let c,{withStageTimer:i}=pt(X.timeUnits.hours(4)),m=await rt(qt.version,e.id,l);try{await Wt(Kt(),"run-pipeline",{},m,async()=>{let p,{aiGateway:u,context:g,persistSteps:y,runner:w,sha:x}=await i("init",()=>Ut({config:e,apiToken:t,cliPath:r,cwd:n,errorLogsPath:o,filter:s,runnerVersion:qt.version}),X.timeUnits.minutes(10));c=w.clean,e.sha=x;let{runnerResult:f}=await i("inference",()=>Ne({cliPath:r,config:e,context:g,runner:w.runner,persistSteps:y,aiGateway:u}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let I=await i("deploy",()=>ze({cliPath:r,config:e,context:g,result:f.result,filter:s})),T=f,A=[];if(I.hasChanges&&I.deployError){A.push(nt(I.deployError));let h=1;for(;h<=Fe&&!I.previewInfo;)Vt.log(`Deploy attempt had errors. Retrying. ${h}/${Fe}`),await Wt(Kt(),"deploy-stage",async d=>{d?.setAttributes({"stage.attempt":h});let{runnerResult:a}=await i(`inference-retry-${h}`,()=>Ne({cliPath:r,config:e,context:g,runner:w.runner,persistSteps:y,aiGateway:u,buildErrors:A,priorAgentSessionId:f.agentSessionId}));T={...a,steps:[...T.steps||[],...a.steps||[]],duration:(T.duration||0)+(a.duration||0)},I=await i(`deploy-retry-${h}`,()=>ze({cliPath:r,config:e,context:g,result:a.result,filter:s})),I.deployError&&A.push(I.deployError),h++});h>Fe&&!I.previewInfo&&(p=new Error(`Deploy validation failed after ${Fe} attempts`))}let{diff:N,resultDiff:C,previewInfo:R,diffBinary:v,resultDiffBinary:$}=I;if(await i("cleanup",()=>Ht({config:e,diff:N,result:T.result,duration:T.duration,resultDiff:C,diffBinary:v,resultDiffBinary:$,previewInfo:R}),X.timeUnits.minutes(10)),p)throw p;process.env.NETLIFY_LOCAL_MODE||(await c?.(),await St())})}catch(p){Vt.error("Got error while running pipeline",p),await c?.();let u=p instanceof Error&&p.message;throw await B(e.id,e.sessionId,{result:u||"Encountered error when running agent",state:"error"}),p}finally{await an()}};export{Ws as runPipeline};
134
142
  //# sourceMappingURL=index.js.map
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@netlify/agent-runner-cli",
3
3
  "type": "module",
4
- "version": "1.52.0",
4
+ "version": "1.54.0",
5
5
  "description": "CLI tool for running Netlify agents",
6
6
  "main": "./dist/index.js",
7
7
  "types": "./dist/index.d.ts",
@@ -75,10 +75,10 @@
75
75
  "@anthropic-ai/claude-code": "2.0.42",
76
76
  "@google/gemini-cli": "0.16.0",
77
77
  "@netlify/otel": "^5.0.0",
78
+ "@openai/codex": "0.55.0",
78
79
  "@opentelemetry/exporter-trace-otlp-grpc": "^0.57.0",
79
80
  "execa": "^8.0.0",
80
81
  "get-port": "^5.1.1",
81
- "minimist": "^1.2.8",
82
- "my-codex-no-sandbox": "^0.1.2505290819"
82
+ "minimist": "^1.2.8"
83
83
  }
84
84
  }