@netlify/agent-runner-cli 0.0.6 → 1.0.0-broken

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin.js CHANGED
@@ -1,117 +1,143 @@
1
1
  #!/usr/bin/env node
2
- import Ye from"process";import bt from"minimist";import{createRequire as St}from"module";import wt from"process";import je from"crypto";import ne from"fs/promises";import H from"path";import b from"process";var K="netlify-agent-runner-context.md",V=".netlify",O="other",L="starter";var U="business",$="enterprise",D="free";var Me=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:b.env.NETLIFY_TEAM_ID,userId:b.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:b.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Ge=()=>{let e=Object.keys(b.env).sort();return{nodeVersion:b.version,envVars:e}},He=10,Ve=async e=>{let{name:t,ext:r}=H.parse(e),o=e,n=H.join(b.cwd(),V,o),s=0;for(;await qe(n);){if(s>=He)throw new Error("Failed to generate context file");o=`${t}-${je.randomUUID().slice(0,5)}${r}`,n=H.join(b.cwd(),V,o),s+=1}return o},qe=async e=>{try{return await ne.access(e),!0}catch{return!1}},ce=async({cliPath:e,netlify:t,config:r})=>{let o=Me(t),n=Ge(),s=await Ve(K),a=H.join(b.cwd(),V);await ne.mkdir(a,{recursive:!0});let i=H.join(V,s),u=H.join(b.cwd(),i),p=`# Agent Context
3
-
4
- In Netlify documentation and interfaces, the terms "site" and "project" refer to the same thing.
5
-
6
- If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
7
-
8
- ## Netlify Site
9
-
10
- - Site ID: ${o.siteId}
11
- - Account ID: ${o.accountId}
12
- - User ID: ${o.userId}
13
- - Site Slug: ${o.siteSlug}
14
- - Netlify Functions directory: ${o.functionsDir}
15
-
16
- ## Environment
17
-
18
- - Node Version: ${n.nodeVersion}
19
-
20
- ### Available Environment Variables
21
- ${n.envVars.map(l=>`\`${l}\``).join(", ")}
2
+ import ct from"process";import Fn from"minimist";import{createRequire as Rn}from"module";import{createTracerProvider as gr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ut}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as mr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as hr}from"@netlify/otel";import{propagation as dt,context as pt,W3CTraceContextPropagator as yr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as _r}from"@opentelemetry/exporter-trace-otlp-grpc";import fr from"process";function _(e){let t=fr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ue=_("tracing"),ft=async(e,t,r)=>(await gr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ut(new Me),new ut(new _r({url:r.exporterUrl}))],instrumentations:[new mr({skipHeaders:!0})]}),r.traceparent?(dt.setGlobalPropagator(new yr),dt.extract(pt.active(),{traceparent:r.traceparent,isRemote:!0})):pt.active());function S(e,t,r){return Ue.log(`\u23F3 TRACE: ${t} starting...`),hr(e,t,r)}var Me=class{export(t,r){for(let n of t)this.logSpan(n);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,n=t.attributes,o=[];for(let[u,a]of Object.entries(n))u.includes("duration")&&typeof a=="number"?o.push(`${u}=${a.toFixed(2)}ms`):o.push(`${u}=${a}`);let s=t.status?.code===2?"\u274C":"\u2705",i=o.length>0?` [${o.join(", ")}]`:"";Ue.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${i}`),t.status?.code===2&&t.status.message&&Ue.log(` \u274C Error: ${t.status.message}`)}};var Er=["error","failed","exception","fatal","panic","abort","crash"];function gt(e){let t=e.split(`
3
+ `),r=[],n=-1,o=0;for(;o<t.length;){let u=t[o].slice(0,500).toLowerCase();if(Er.some(p=>u.includes(p))){let p=Math.max(0,o-10,n+1),f=Math.min(t.length-1,o+20),c=[];for(let m=p;m<=f;m++)c.push(t[m]);r.push(c.join(`
4
+ `)),n=f,o=f+1}else o++}if(r.length===0)return e;let s=r.map((i,u)=>`<extracted_error_chunk order="${u+1}">
5
+ ${i}
6
+ </extracted_error_chunk>`).join(`
7
+
8
+ `);return s.length>e.length*.8?e:s}import Pe from"process";import{getTracer as ln}from"@netlify/otel";import ge from"process";var Ie=ge.env.NETLIFY_API_URL,xe=ge.env.NETLIFY_API_TOKEN,B=_("api"),ve=()=>ge.env.NETLIFY_LOCAL_MODE==="true",me=async(e,t={})=>{if(!Ie||!xe)throw new Error("No API URL or token");let r=new URL(e,Ie),n={...t,headers:{...t.headers,Authorization:`Bearer ${xe}`}};ge.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),s=o.ok&&o.status<=299;if(ge.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),o.headers.forEach((u,a)=>{B.log(` ${a}: ${u}`)});else{let u=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${u||"N/A"}`)}if(s||B.error(`Got status ${o.status} for request ${r}`),t.raw){if(!s)throw o;return o}let i=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!s)throw i;return i},mt=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ie=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(xe=e.constants.NETLIFY_API_TOKEN)},ht=()=>({apiUrl:Ie,token:xe}),he=async(e,t)=>ve()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):me(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>ve()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):me(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var yt=async(e,t)=>ve()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):me(`/api/v1/agent_runners/${e}/sessions/${t}`),_t=(e,t,r)=>me(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),Et=async(e,t)=>ve()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):me(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ge=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var te=_("ai_gateway"),je=null;var wt=async()=>{if(je)return je;te.log("Fetching available AI gateway providers");let e=await fetch(`${ht().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return je=t,te.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},wr=async(e,t)=>{let n=(await wt()).providers[e];if(!n)return te.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return te.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},Tt=async({netlify:e,config:t})=>{let r,n,o,s,i=e.constants?.SITE_ID;if(!i)throw new Error("No site id");let u=async()=>{clearTimeout(o),te.log("Requesting AI gateway information");let a=await _t(i,t.id,t.sessionId);if({token:r,url:s}=a,n=a.expires_at?a.expires_at*1e3:void 0,te.log("Got AI gateway information",{token:!!r,expiresAt:n,url:s}),n){let p=n-Date.now()-6e4;p>0&&(o=setTimeout(()=>{u()},p))}};return await Promise.all([u(),wt()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:wr}};import H from"process";import K from"path";import Re from"fs";import{fileURLToPath as Ar}from"url";import{createRequire as Sr}from"module";import{execa as br,execaCommand as lo}from"execa";import{Transform as Tr}from"stream";var Ir=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),xr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function vr(){return Object.entries(process.env).filter(([e,t])=>!(!t||Ir.has(e)||xr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function W(e){if(typeof e!="string")return e;let t=vr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(Rr(n),"g");r=r.replace(o,"******")}),r}function Rr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var re=class extends Tr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),s=W(o);n(null,s)}};function It(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,s){let i=typeof n=="string"?W(n):n;return typeof o=="function"?t(i,o):t(i,o,s)},process.stderr.write=function(n,o,s){let i=typeof n=="string"?W(n):n;return typeof o=="function"?r(i,o):r(i,o,s)}}var ye=null,xt=e=>(ye&&ye.destroy(),ye=new Z({totalAllowedTime:e}),ye),vt=()=>ye;var Z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,i=null;n!==void 0&&(i=new Promise((u,a)=>{s=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return i?await Promise.race([r(),i]):await r()}finally{o(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Rt={name:"@netlify/agent-runner-cli",type:"module",version:"1.0.0-broken",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var Cr=Ar(import.meta.url),Pr=K.dirname(Cr),Or=Sr(import.meta.url),Ne=_("shell"),Ye=new Set,Fr={preferLocal:!0},F=(e,t,r)=>{let[n,o]=Lr(t,r),s={...Fr,...o},i=br(e,n,s);return Dr(i,s),kr(i),i};var Lr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Dr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new re).pipe(H.stdout),e.stdout?.pipe(new re).pipe(H.stdout),e.stderr?.pipe(new re).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Nt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),Ne.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Ne.error("Error killing process:",r),!1}},$r=e=>Nt(e,"SIGKILL"),kr=e=>{Ye.add(e);let t=vt();if(t){let r=t.onTimesUp(()=>{Ne.log(`Global timer expired, killing process ${e.pid}`),Nt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Ne.log(`Force killing process ${e.pid} after timeout`),$r(e))},5e3)});e.on("exit",()=>{Ye.delete(e),r()}),e.on("error",()=>{Ye.delete(e),r()})}};function ne(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let o=Or.resolve(Rt.name),s=K.dirname(o);for(;s!==K.dirname(s);){let i=K.dirname(s);if(K.basename(i)==="node_modules"){let u=K.join(i,".bin",t);if(Re.existsSync(u))return u;break}s=i}}catch(o){console.error("Could not resolve package.json",o)}if(H.env.NODE_PATH){let o=K.join(H.env.NODE_PATH,".bin",t);if(Re.existsSync(o))return o}let r=K.join(e,"node_modules",".bin",t);if(Re.existsSync(r))return r;let n=K.join(Pr,"..","node_modules",".bin",t);if(Re.existsSync(n))return n}var At="netlify-agent-runner-context.md",Be="task-history",He="netlify-context",k=".netlify",oe="results.md",qe="assets",We="other",Ke="personal";var Ve="enterprise",Je="free",St=[Ke,"pro",Ve,Je];var bt=_("utils"),Ur=e=>new Promise(t=>{setTimeout(t,e)}),Ct=(e,t=3e3)=>{let r=!1,n=null,o=[],s=null,i=(...u)=>{if(r)return n=u,new Promise(f=>{o.push(f)});r=!0;let a,p=new Promise(f=>{a=f});return s=(async()=>{await Promise.resolve();let f=await e(...u);for(a(f);;){if(await Ur(t),!n)return r=!1,s=null,f;let c=n,m=o;n=null,o=[],f=await e(...c),m.forEach(h=>{h(f)})}})(),p};return i.flush=async()=>{if((r||n)&&s)return await s,i.flush()},i},se=(e,t,r=!1)=>{let n=null,o=null,s=null,i=function(...u){o=u,s=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(s,o),o=null,s=null)},t),a&&(e.apply(s,o),o=null,s=null)};return i.cancel=()=>{clearTimeout(n),n=null,o=null,s=null},i.flush=()=>{if(n){clearTimeout(n);let u=o,a=s;n=null,o=null,s=null,e.apply(a,u)}},i},Ae=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):bt.error("Could not parse JSON",n))}},Pt=(e,t)=>{let o=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let u=`--${t}${o}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=s.length+6){let p=Math.min(a-s.length,e.length);return`${s}${e.slice(0,p)}`}return e.slice(0,a)},Mr=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!St.some(t=>t in e),Ot=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,n])=>{if(n){let o=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let s=JSON.parse(n);Mr(s)&&(e[r]=s)}catch(s){let u=s instanceof SyntaxError?"Invalid JSON":s.message;bt.error(`Could not parse ${r} model version override from ${o}: ${u}`)}}}),e},Gr=50*1024,Xe=(e,t=Gr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as Ft}from"buffer";import jr from"path";var Lt=_("repo"),Dt=async({config:e,isRetry:t})=>{Lt.info("Getting runner diffs");let r=await Br(),{hasChanges:n}=r,{status:o}=r;if(!n)return{hasChanges:!1};if(!t){let T=Hr(o);await qr(T)}Lt.info("Changes after processing"),await Ze();let s=await Qe(o);await ze(s);let i={stdio:["ignore","pipe","pipe"]},a=(await F("git",["diff","--staged"],i)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:s};let f=(await F("git",["diff","--staged","--binary"],i)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),c=(await F("git",["diff",e.sha,"HEAD"],i)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],i)).stdout;c!==g&&(m=Ft.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:s};return a!==f&&(h.diffBinary=Ft.from(f).toString("base64")),m&&(h.resultDiffBinary=m),h},ze=async(e=[])=>{await F("git",["add",".",...e])},Ze=async()=>(await F("git",["status","-s"])).stdout,$t=/.. (.+)?\.log$/,Yr=[$t],Br=async()=>{let e=await Ze();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(n=>Yr.some(s=>s instanceof RegExp?s.test(n):n===s)?!1:n[1]?.trim()!=="")).length!==0,status:e}},kt=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return e.trim()},Ut=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Qe=async e=>{e||=await Ze();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
10
+ `).forEach(n=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${jr.sep}`].some(u=>n.startsWith(u))&&r.push(`:!${s}`)});let o=n.match($t)?.[1];o&&r.push(`:!${o}.log`)}),r},Mt=async()=>{await F("git",["reset","--hard","HEAD"])},Hr=e=>{let t=e.split(`
11
+ `).reduce((r,n)=>{if(!n)return r;let[o,s,,...i]=n,u=i.join(""),a=o.trim(),p=s.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:a,change:p},r},{});return Object.values(t)},qr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Kr from"fs/promises";import Vr from"os";import Yt from"path";import ce from"process";import Jr from"readline";import et from"path";import Wr from"fs/promises";var tt=_("agent-output-utils");async function ie({initialResult:e,agentName:t,hasError:r}){let n="",o=et.join(process.cwd(),k,oe);try{let s=await Wr.readFile(o,"utf-8");s&&(n=s,tt.log(`Pulled result from ${et.relative(process.cwd(),o)}`))}catch{tt.log(`No results file found at ${et.relative(process.cwd(),o)}`)}return n||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ae({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,n=r?.replace(/\s+/g," ").trim().toLowerCase()||"",o="";return n?.includes("ai gateway is not available for your account")||n?.includes("ai gateway is not enabled for your account")?o="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":n?.includes("error when talking to gemini api")?o="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(n?.includes("connection closed prematurely")||n?.includes("499")&&t.toLowerCase().includes("gemini"))&&(o=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),n?.includes("request timed out")&&(o=`The ${t} API request's have timed out. Please try again or use a different available agent.`),n?.includes("network error")&&(o=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o&&tt.log(`Providing updated error messsage: ${o}, replacing original error: ${r}`),o||r||void 0}function le(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var U=_("runner_claude"),Gt="Claude Code",ue="claude-opus-4-5-20251101",jt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Xr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(U.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function rt({config:e,netlify:t,persistSteps:r,aiGateway:n,continueSession:o,priorAgentSessionId:s,cwd:i=ce.cwd()}){let u=e,{accountType:a,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let l=f?.claude?.[a];if(l){if(!await n.isModelAvailableForProvider("anthropic",l))throw new Error(`Model override '${l}' is not available for anthropic provider`);c=l}}else if(c){if(!await n.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!ue&&await n.isModelAvailableForProvider("anthropic",ue)?(c=ue,U.log(`Using default model: ${ue}`)):ue&&U.log(`Default model ${ue} is not available, proceeding without model specification`);ce.env.ANTHROPIC_API_KEY=y,ce.env.ANTHROPIC_BASE_URL=d}else if(!ce.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],T=[],I={},g=0,E=0,v,R,b=[ne(i,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...o?["--continue"]:[],...o&&s?["--resume",s]:[],"-p",p],C=`${ce.env.NVM_BIN}/node`;U.log(`Running ${C} ${b.join(" ")}`);let O=t.utils.run(C,b,{all:!0,env:ce.env,cwd:i});O.stdin?.end();let N=se(()=>{r?.({steps:h,duration:E})},250),x=(y,d)=>{let l={...y,id:g};g+=1,T.push(l),h.push(l),d||N.flush(),N(),d&&N.flush()},G=Jr.createInterface({input:O.all});return G.on("error",y=>{U.error("Readline interface error",{error:y.message,stack:y.stack})}),G.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{U.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(l=>{switch(l.type){case"text":{l.text&&x({message:l.text});break}case"image":{typeof l.source=="object"&&l.source&&l.source.type==="base64"&&l.source.media_type?x({message:`![](data:${l.source.media_type};base64,${l.source.data})`}):U.log(`Unsupported image type ${l.source?.type}`,l.source);break}case"tool_use":{if(l.name==="Task"){let w=l.input?.description&&`\`${l.input.description}\``;x({title:[jt(l.name),w].filter(Boolean).join(" ")})}else l.id&&(I[l.id]=l);N.flush();break}case"tool_result":{let w=l.tool_use_id?I[l.tool_use_id]:void 0,X;if(w){let V=w.input?.file_path&&Yt.relative(i,w.input.file_path),P=V&&`\`${V}\``;X=[jt(w.name||""),P].filter(Boolean).join(" ")}let Te=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),z;if(typeof l.content=="string")z=l.content;else if(Array.isArray(l.content)){let V=[];l.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?V.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?V.push(`![](data:${P.source.media_type};base64,${P.source.data})`):U.log(`Unsupported image type ${P.source.type}`,P.source):U.log(`Unsupported block type ${P?.type}`)}),z=V.join(`
12
+
13
+ `)}Te&&z&&(z=`\`\`\`
14
+ ${z.trim()}
15
+ \`\`\``),x({title:X,message:z},!0);break}case"thinking":{l.thinking&&x({title:"Thinking",message:l.thinking},!0);break}default:U.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?R=d.result:v=d.result,[T,h].forEach(l=>{l[l.length-1]?.message===v&&l.pop()}))}),await O.catch(y=>{({error:R,result:v}=Xr({catchError:y,runCmd:O,error:R,result:v,runnerName:"Claude"}))}),G.close(),N.flush(),{steps:T,duration:E,result:await ie({initialResult:v,agentName:Gt,hasError:!!R}),error:ae({error:R,agentName:Gt}),isRetryableError:le(R),agentSessionId:m}}var Bt=async()=>{let e=Yt.join(Vr.homedir(),".claude");await Kr.rm(e,{recursive:!0,force:!0})};import _e from"fs/promises";import qt from"os";import Se from"path";import Q from"process";import zr from"readline";var M=_("runner_codex"),Ht="Codex CLI",de="gpt-5.2",Zr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(M.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function nt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:s=Q.cwd()}){let{accountType:i,prompt:u,modelVersionOverrides:a}=e,{model:p}=e;if(o){let{token:d,url:l}=o;if(!d||!l)throw new Error("No token or url provided from AI Gateway");if(a?.codex){let w=a?.codex?.[i];if(w){if(!await o.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);p=w}}else if(p){if(!await o.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!de&&await o.isModelAvailableForProvider("openai",de)?(p=de,M.log(`Using default model: ${de}`)):de&&M.log(`Default model ${de} is not available, proceeding without model specification`);Q.env.OPENAI_API_KEY=d,Q.env.OPENAI_BASE_URL=l}else if(!Q.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],h={},T=0,I=0,g,E,v=`${Q.env.NVM_BIN}/node`,R=Se.join(qt.homedir(),".codex"),b=Se.join(R,"config.toml"),C=Se.join(R,"auth.json");try{await _e.mkdir(R,{recursive:!0});let d={OPENAI_API_KEY:Q.env.OPENAI_API_KEY};await _e.writeFile(C,JSON.stringify(d,null,2),"utf-8"),M.log("Created Codex auth.json file");let l="";try{l=await _e.readFile(b,"utf-8")}catch{}l.includes("web_search_request")||(l.includes("[features]")?l=l.replace(/\[features\]/,`[features]
16
+ web_search_request = true`):l+=`
17
+ [features]
18
+ web_search_request = true
19
+ `,await _e.writeFile(b,l,"utf-8"),M.log("Updated Codex config with web_search_request enabled"))}catch(d){throw M.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[ne(s,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);M.log(`Running ${v} ${O.join(" ")}`);let N=t.utils.run(v,O,{all:!0,cwd:s,env:{...Q.env}}),x=se(()=>{r?.({steps:f,duration:I}),n?.({steps:c,duration:I}),c=[]},250),G=(d,l)=>{d.id=T,T+=1,m.push(d),f.push(d),c.push(d),l||x.flush(),x(),l&&x.flush()},y=zr.createInterface({input:N.all});return y.on("error",d=>{M.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let l=null;try{l=JSON.parse(d)}catch{M.log("Could not parse line",d);return}if(l?.duration_ms&&(I=l.duration_ms),l?.type==="item.started"&&l?.item?.type==="command_execution")h[l.item.id]=l.item;else if(l?.type==="item.completed"&&l?.item?.type==="command_execution"){let w=en(l.item);w&&G(w,!0)}else if(l?.type==="item.completed"&&l?.item?.type==="reasoning"){let w={title:"Reasoning",message:l.item.text};G(w,!0)}else if(l?.type==="local_shell_call")h[l.call_id]=l;else if(l?.type==="local_shell_call_output"){let w=tn(h[l.call_id],l);w&&G(w,!0)}else l?.type==="message"&&l.role==="assistant"?g=l.content.map(w=>w.text).join(`
20
+ `):l?.type==="message"&&l.role==="system"&&(E=l.content.map(w=>w.text).join(`
21
+ `))}),await N.catch(d=>{let l=Zr({catchError:d,runCmd:N,error:E,result:g,runnerName:"Codex"});E=l.error,g=l.result}),y.close(),x.flush(),{steps:m,duration:I,result:await ie({initialResult:g,agentName:Ht,hasError:!!E}),error:ae({error:E,agentName:Ht}),isRetryableError:le(E)}}var Wt=async()=>{let e=Se.join(qt.homedir(),".codex");await _e.rm(e,{recursive:!0,force:!0})},Qr=new Set(["bash","-lc"]),en=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,n=e.aggregated_output?.trim();return n&&(n=`\`\`\`
22
+ ${n}
23
+ \`\`\``),e.status==="failed"&&e.exit_code!==0&&(n=n?`${n}
24
+
25
+ *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},tn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Qr.has(s)),n=r?`Running \`${r.join(" ")}\``:void 0,o;try{o=JSON.parse(t.output).output?.trim(),o&&(o=`\`\`\`
26
+ ${o.trim()}
27
+ \`\`\``)}catch(s){M.error("Could not decode outputMsg",s,t.output)}return{title:n,message:o}};import be from"fs/promises";import Vt from"os";import Ce from"path";import pe from"process";import rn from"readline";var j=_("runner_gemini"),Kt="Gemini CLI",fe="",nn=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(j.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0})),on={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},sn=async()=>{let e=Ce.join(Vt.homedir(),".gemini"),t=Ce.join(e,"settings.json");try{await be.mkdir(e,{recursive:!0});let r={};try{let n=await be.readFile(t,"utf-8");r=JSON.parse(n)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await be.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ot({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:s=pe.cwd()}){let{accountType:i,prompt:u,modelVersionOverrides:a}=e,{model:p}=e;if(await sn(),o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(a?.gemini){let l=a?.gemini?.[i];if(l){if(!await o.isModelAvailableForProvider("gemini",l))throw new Error(`Model override '${l}' is not available for gemini provider`);p=l}}if(!p)!!fe&&await o.isModelAvailableForProvider("gemini",fe)?(p=fe,j.log(`Using default model: ${fe}`)):fe&&j.log(`Default model ${fe} is not available, proceeding without model specification`);else if(p&&!a?.gemini?.[i]&&!await o.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);pe.env.GEMINI_API_KEY=y,pe.env.GOOGLE_GEMINI_BASE_URL=d}else if(!pe.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],h={},T=0,I=0,g,E,v=[ne(s,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],R=`${pe.env.NVM_BIN}/node`;j.log(`Running ${R} ${v.join(" ")}`);let b=t.utils.run(R,v,{all:!0,env:pe.env,cwd:s});b.stdin?.end();let C=se(()=>{r?.({steps:f,duration:I}),n?.({steps:c,duration:I}),c=[]},250),O=(y,d)=>{y.id=T,T+=1,m.push(y),f.push(y),c.push(y),d||C.flush(),C(),d&&C.flush()},N=rn.createInterface({input:b.all});N.on("error",y=>{j.error("Readline interface error",{error:y.message,stack:y.stack})});let x="",G=()=>{x&&O({message:x.trim()}),x=""};return N.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let l=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Ae(l,!1)?.error?.message||l||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(x+=d.content);break}case"tool_use":{let l=on[d.tool_name]??d.tool_name,w=d.parameters?.file_path,X=w&&Ce.relative(s,w),Te=d.parameters?.command,V={title:[l,X&&`\`${X}\``,Te&&`\`${Te}\``].filter(Boolean).join(" ")};h[d.tool_id]=V,C.flush();break}case"tool_result":{let l=h[d.tool_id];l&&(d.output&&(l.message=`\`\`\`
28
+ ${d.output.trim()}
29
+ \`\`\``),O(l,!0));break}case"result":{I=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=x.trim();break}case"error":{E=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:E,result:g}=nn({catchError:y,runCmd:b,error:E,result:g,runnerName:"Gemini"}))}),N.close(),C.flush(),{steps:m,duration:I,result:await ie({initialResult:g,agentName:Kt,hasError:!!E}),error:ae({error:E,agentName:Kt}),isRetryableError:le(E)}}var Jt=async()=>{let e=Ce.join(Vt.homedir(),".gemini");await be.rm(e,{recursive:!0,force:!0})};var an={codex:{runner:nt,clean:Wt},claude:{runner:rt,clean:Bt},gemini:{runner:ot,clean:Jt}},Xt=an;var zt=_("init_stage"),Zt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:n})=>await S(ln(),"init-stage",async o=>{let s=performance.now();o?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":n||"unknown"});let i=Xt[e.runner];if(!i)throw o?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=cn({apiToken:r});mt(u);let a=e.useGateway?await Tt({netlify:u,config:e}):void 0;o?.setAttributes({"init.aiGateway.created":!!a}),e.validateAgent&&e.errorLogsPath&&o?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let p=5*1024,f=Ct(async({steps:T=[],duration:I})=>{let g=T.map(E=>{let v=E.title?Xe(W(E.title),p):void 0,R=E.message?Xe(W(E.message)):void 0;return{...E,title:v,message:R}});T.length=0;try{return await q(e.id,e.sessionId,{steps:g,duration:I})}catch(E){zt.error("persistSteps failed",{error:E?.message||E})}},t);zt.info("Adding build files to stage");let c=await Qe();await ze(c);let m;e.hasRepo?e.sha?(m=e.sha,o?.setAttributes({"init.sha.source":"provided"})):(m=await kt(),await he(e.id,{sha:m}),o?.setAttributes({"init.sha.source":"current_commit"})):(m=await Ut(),o?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-s;return o?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:a,context:u,persistSteps:f,runner:i,sha:m}}),cn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Pe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Pe.env.NETLIFY_API_TOKEN,SITE_ID:Pe.env.SITE_ID,FUNCTIONS_DIST:Pe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as st}from"@netlify/otel";import un from"crypto";import J from"fs/promises";import $ from"path";import Y from"process";var L=_("context"),dn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:Y.env.NETLIFY_TEAM_ID,userId:Y.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:Y.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},pn=10,fn=async e=>{let{name:t,ext:r}=$.parse(e),n=e,o=$.join(Y.cwd(),k,n),s=0;for(;await gn(o);){if(s>=pn)throw new Error("Failed to generate context file");n=`${t}-${un.randomUUID().slice(0,5)}${r}`,o=$.join(Y.cwd(),k,n),s+=1}return n},gn=async e=>{try{return await J.access(e),!0}catch{return!1}},mn=async()=>{try{L.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return L.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(n=>n&&typeof n=="object"&&n.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(L.warn("Catchall consumer missing or invalid contextScopes"),null):r:(L.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?L.warn("Netlify features context request timed out"):L.warn("Failed to fetch Netlify features context:",e.message),null}},hn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let n=await r.text();return await J.writeFile(t,n,"utf-8"),!0}catch(r){return r.name==="AbortError"?L.warn(`Download timeout for ${e}`):L.warn(`Failed to download context file ${e}:`,r.message),!1}},Oe=null,yn=async()=>{if(Oe)return Oe;let e=await mn();if(!e)return[];let t=$.join(Y.cwd(),k,He);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([o,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return L.warn(`Invalid scope data for ${o}, skipping...`),null;let i=`${o}.md`,u=$.join(t,i),a=$.join(k,He,i);return L.log(`Downloading ${s.scope} context...`),await hn(s.endpoint,u)?(L.log(`Downloaded: ${a}`),{scope:s.scope,path:a,key:o}):null});return Oe=(await Promise.all(r)).filter(o=>o!==null),Oe},Qt=async({cliPath:e,netlify:t,config:r,buildErrorContext:n})=>{let o=dn(t),s=await fn(At),i=$.join(Y.cwd(),k);await J.mkdir(i,{recursive:!0});let u=$.join(k,s),a=$.join(Y.cwd(),u),p=$.join(Y.cwd(),k,oe);try{await J.unlink(p),L.log(`Deleted old results file: ${p}`)}catch{}let f=n?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
30
+ Your task is to analyze and fix the build errors.
31
+ Don't apply techniques of reverting changes. Apply fixes related to errors.
32
+ Don't try to run build by yourself. Just fix the errors.
22
33
 
23
- ## Attachments
34
+ <build_error_context>
35
+ ${n}
36
+ </build_error_context>`:"",c="";r.siteContext&&r.siteContext.length!==0&&(c=`
37
+ <project_rules>
38
+ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
24
39
 
25
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in .netlify/assets folder
26
- - move assets from .netlify/assets folder to the project assets folder if they are referenced in a code or applied changes
40
+ `)}
41
+ </project_rules>
42
+ `);let m="";if(r.sessionHistoryContext?.length){let g=$.join(Y.cwd(),k,Be);await J.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(v,R)=>{let b=R+1,C=`attempt-${b}.md`,O=$.join(g,C),N=$.join(k,Be,C),x=`# Task History - Attempt ${b}
27
43
 
28
- ## Netlify CLI
29
- 'netlify-cli' npm package is already available as a global package.
30
- Don't try to install it, in case you want to use it, use the global package.
44
+ ## Request - what the user asked for
45
+ ${v.request}
31
46
 
32
- ## Documentation
47
+ ---
33
48
 
34
- - Netlify Docs: https://docs.netlify.com
35
- - LLM Resources Index: https://docs.netlify.com/llms.txt
36
- `;return r.siteContext&&r.siteContext.length!==0&&(p+=`
37
- # Project Guidelines
49
+ ## Response - what the agent replied with after its work
38
50
 
39
- ${r.siteContext.filter(l=>l.site_context).map(l=>typeof l.site_context=="string"?l.site_context:typeof l.site_context=="object"?JSON.stringify(l.site_context):"").join(`
51
+ ${v.response}
52
+ `;return await J.writeFile(O,x,"utf-8"),L.log(`Created history file: ${N}`),N}));m+=`
53
+ <session_history_context>
54
+ History of prior work on this task.
55
+ You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
40
56
 
57
+ ${E.slice(-5).map(v=>`- ${v}`).join(`
41
58
  `)}
42
- `),r.sessionHistoryContext&&r.sessionHistoryContext.length!==0&&(p+=`# History of prior calls
43
-
44
- Please continue the discussion based on the previous discussion, provided below as xml:
45
- - <history> contains the full conversation so far.
46
- - <turn> groups a request and its response.
47
- - <request> is the user request.
48
- - <response> is the agent result.
49
59
 
50
- Use the <history> only as context. Do NOT wrap your answer in XML tags. Just return the plain response.
60
+ </session_history_context>
61
+ `}let h=await yn(),T="";h.length>0&&(T=`
62
+ <netlify_features_context>
63
+ If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
64
+ DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
51
65
 
52
- <history>
53
- ${r.sessionHistoryContext.map((l,h)=>`<turn attempt="${h+1}">
54
- <request>${l.request}</request>
55
- <response>${l.response}</response>
56
- </turn>`).join(`
66
+ ${h.map(g=>`- **${g.scope}**: ${g.path}`).join(`
57
67
  `)}
58
- </history>
59
- `),await ne.writeFile(u,p,"utf-8"),console.log(`Generated agent context document at: ${u}`),i},de=e=>`The build is currently failing after your previous changes.
60
- Your task is to analyze and fix the build errors.
61
- Don't apply techniques of reverting changes. Apply fixes related to errors.
62
- Don't try to run build by yourself. Just fix the errors.
63
68
 
64
- ${e}`;var pe=(e={})=>`
65
- Check for errors and validate the fix
66
-
67
- ${e.errorLogsPath?`Error Check Process:
68
- 1. Read recent errors: \`tail -n 50 ${e.errorLogsPath}\` (single check, no monitoring loop)
69
- 2. If errors are found, fix them by modifying the codebase
70
- 3. After fixes, do ONE final check: \`tail -n 20 ${e.errorLogsPath}\`
71
- 4. Focus only on errors related to your changes
72
- 5. Ignore unrelated operational messages (git, API calls, plugin startup)
73
- 6. NEVER modify ${e.errorLogsPath} - it's read-only
74
- 7. Do NOT use continuous monitoring or repeated tail commands
75
- `:""}
76
-
77
- Fix Strategy:
78
- - Check error logs ONCE, fix issues, then do ONE final verification
79
- - Do NOT revert changes - improve the codebase to make it work
80
- - Do NOT continuously monitor logs or use tail in loops
81
- - Dev server is running in background, no need to run build or dev server yourself
82
- - Complete the validation quickly and decisively
83
-
84
- At the very end, print exactly one line with the verdict:
85
- VALIDATION_RESULT: PASS {"checks": ["error_logs"]}
86
- or
87
- VALIDATION_RESULT: FAIL {"checks": ["error_logs"], "errors": ["<errors>"]}
88
- `,Ke=e=>!(!e||typeof e!="object"||!Array.isArray(e.checks)||e.errors&&!Array.isArray(e.errors)),fe=e=>{if(!e||typeof e!="string")return null;let t=e.match(/VALIDATION_RESULT:\s+(PASS|FAIL)\s+({[\s\S]*?})(?:\s|$)/);if(!t)return null;let[,r,o]=t;try{let n=JSON.parse(o);return Ke(n)?!n.checks||n.checks.length===0?(console.warn("Validation result missing checks array"),null):r==="FAIL"&&(!n.errors||n.errors.length===0)?(console.warn("FAIL validation result missing errors array"),null):{ok:r==="PASS",verdict:r,details:{checks:n.checks,errors:n.errors||[],...n}}:(console.warn("Validation result has invalid schema:",n),null)}catch(n){return console.warn("Failed to parse validation result JSON:",n.message),null}},me=e=>e&&e.replace(/^.*VALIDATION_RESULT:\s+(PASS|FAIL)\s+{[\s\S]*?}.*$/gm,"");var Xe=(e=K)=>`Use ${e} to understand the Netlify project context and resources. It also contains history of previous conversations. Make sure to read it first. Never reveal, cite, or paraphrase private context.`,Je=async({cliPath:e,config:t,netlify:r})=>{let o=await ce({cliPath:e,netlify:r,config:t});return{context:Xe(o)}},We=({config:e})=>{let t=[];return e?.validateAgent&&t.push(pe(e)),t},X=async({cliPath:e,config:t,netlify:r,buildErrorContext:o}={})=>{let{context:n}=await Je({cliPath:e,config:t,netlify:r}),s;s=[...We({config:t})],o&&(s=[...s,de(o)]);let a=[];return n&&a.push(n),t.prompt&&a.push("New user request comes in the <new_request> tag.",`<new_request>${t.prompt}</new_request>`),s?.length&&a.push(s.join(`
89
-
90
- `)),{prompt:a.join(`
91
-
92
- `)}};import oe from"process";import ge from"process";var re=ge.env.NETLIFY_API_URL,se=ge.env.NETLIFY_API_TOKEN,J=async(e,t={})=>{if(!re||!se)throw new Error("No API URL or token");let r=new URL(e,re),o={...t,headers:{...t.headers,Authorization:`Bearer ${se}`}};t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(s||console.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type").includes("application/json")?n.json():n.text());if(!s)throw a;return a},he=e=>{console.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(re=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(se=e.constants.NETLIFY_API_TOKEN)},W=(e,t)=>J(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),z=(e,t,r)=>J(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var _e=(e,t)=>J(`/api/v1/agent_runners/${e}/sessions/${t}`),Te=e=>J(`/api/v1/sites/${e}/ai-gateway/token`);var Ee=async({netlify:e})=>{let t,r,o,n,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let a=async()=>{clearTimeout(o),console.log("Requesting AI gateway information");let i=await Te(s);if({token:t,url:n}=i,r=i.expires_at?i.expires_at*1e3:void 0,console.log("Got AI gateway information",{token:!!t,expiresAt:r,url:n}),r){let u=r-Date.now()-6e4;u>0&&(o=setTimeout(()=>{a()},u))}};return await a(),{get url(){return n},get token(){return t}}};import xe from"process";import{execa as ze,execaCommand as Zt}from"execa";var Ze={preferLocal:!0},ye=(e,t,r)=>{let[o,n]=Qe(t,r),s={...Ze,...n},a=ze(e,o,s);return et(a,s),a};var Qe=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},et=(e,t)=>{t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0||(e.stdout?.pipe(xe.stdout),e.stderr?.pipe(xe.stderr))};var tt=e=>new Promise(t=>{setTimeout(t,e)}),Ie=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...i)=>{if(r)return o=i,new Promise(l=>{n.push(l)});r=!0;let u,p=new Promise(l=>{u=l});return s=(async()=>{await Promise.resolve();let l=await e(...i);for(u(l);;){if(await tt(t),!o)return r=!1,s=null,l;let h=o,T=n;o=null,n=[],l=await e(...h),T.forEach(_=>{_(l)})}})(),p};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},Z=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...i){n=i,s=this;let u=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),u&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let i=n,u=s;o=null,n=null,s=null,e.apply(u,i)}},a},Q=(e,t=!0)=>{if(e)try{return JSON.parse(e)}catch(r){t&&console.error("Could not parse JSON",r)}},Ne=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let i=`--${t}${n}`;if(i.length>55)return"";let u=60-i.length;if(u<=0)return"";if(u>=s.length+6){let p=Math.min(u-s.length,e.length);return`${s}${e.slice(0,p)}`}return e.slice(0,u)};var Ae=async({config:e,netlify:t})=>{let{hasChanges:r,status:o}=await nt(t);if(!r)return{hasChanges:!1};let n=await rt(t,o);await t.utils.run("git",["add",".",...n]);let a=(await t.utils.run("git",["diff","--staged"])).stdout;if(r=!!a,!r)return{hasChanges:r,diff:a};let i;return e.sha&&(await t.utils.run("git",["commit","-m","Agent runner"]),i=(await t.utils.run("git",["diff",e.sha,"HEAD"])).stdout),{hasChanges:!0,diff:a,resultDiff:i}},ot=["?? mise.toml",/\?\? .+?\.log/],nt=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
93
- `).filter(n=>!ot.some(s=>s instanceof RegExp?s.test(n):n===s))).length!==0,status:t.stdout}};var Se=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},we=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},rt=async(e,t="")=>{let r=[".netlify","mise.toml"],o=[],n=r.map(async a=>{try{return await e.utils.run("git",["check-ignore","-v",a]),null}catch{return`:!${a}`}});return(await Promise.all(n)).forEach(a=>{a&&o.push(a)}),t.split(`
94
- `).forEach(a=>{let i=a.match(/\?\? (.+?)\.log$/)?.[1];i&&o.push(`:!${i}.log`)}),o};import st from"fs/promises";import it from"os";import ee from"path";import F from"process";var at=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ie({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:m,url:f}=n;if(!m||!f)throw new Error("No token or url provided from AI Gateway");let d=lt[s];if(!d)throw new Error(`Claude is not supported for the account type ${s}`);if(i){if(!d?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=d.default;F.env.ANTHROPIC_API_KEY=m,F.env.ANTHROPIC_BASE_URL=f}else if(!F.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let u=[],p=[],l=[],h={},T=0,_=0,E,x,R=ee.join(F.cwd(),"node_modules"),P=[ee.join(F.env.NODE_PATH||R,".bin/claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...i?["--model",i]:[],"-p",a],C=`${F.env.NVM_BIN}/node`;console.log(`Running ${C} ${P.join(" ")}`);let S=t.utils.run(C,P,{all:!0,env:F.env});S.stdin?.end();let g=Z(()=>{r?.({steps:u,duration:_}),o?.({steps:p,duration:_}),p=[]},250),y=(m,f)=>{m.id=T,T+=1,l.push(m),u.push(m),p.push(m),f||g.flush(),g(),f&&g.flush()},I="";return S.all.on("data",m=>{if(I+=m.toString(),!m.includes(`
95
- `))return;let f=I.split(`
96
- `).filter(Boolean).map(d=>{try{return JSON.parse(d)}catch{console.log("Could not parse line",d)}return null}).filter(Boolean);I="",f.forEach(d=>{Array.isArray(d?.message?.content)?d.message.content.forEach(c=>{switch(c.type){case"text":{c.text&&y({message:c.text});break}case"image":{typeof c.source=="object"&&c.source.type==="base64"&&c.source.media_type?y({message:`![](data:${c.source.media_type};base64,${c.source.data})`}):console.log(`Unsupported image type ${c.source?.type}`,c.source);break}case"tool_use":{if(c.name==="Task"){let N=c.input?.description&&`\`${c.input.description}\``;y({title:[c.name,N].filter(Boolean).join(" ")})}else h[c.id]=c;g.flush();break}case"tool_result":{let N=h[c.tool_use_id],v;if(N){let j=N.input?.file_path&&ee.relative(F.cwd(),N.input.file_path),A=j&&`\`${j}\``;v=[N.name,A].filter(Boolean).join(" ")}let Y=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(N?.name),k;if(typeof c.content=="string")k=c.content;else if(Array.isArray(c.content)){let j=[];c.content.forEach(A=>{A?.type==="text"&&typeof A.text=="string"?j.push(A.text):A?.type==="image"&&typeof A.source=="object"?A.source.type==="base64"&&A.source.media_type?j.push(`![](data:${A.source.media_type};base64,${A.source.data})`):console.log(`Unsupported image type ${A.source.type}`,A.source):console.log(`Unsupported block type ${A?.type}`)}),k=j.join(`
97
-
98
- `)}Y&&k&&(k=`\`\`\`
99
- ${k.trim()}
100
- \`\`\``),y({title:v,message:k},!0);break}case"thinking":{c.thinking&&y({title:"Thinking",message:c.thinking},!0);break}default:console.log(`Message content type is not supported ${c.type}`,c)}}):d?.type==="result"&&(_=d.duration_ms,d.is_error?x=d.result:E=d.result,[l,u,p].forEach(c=>{c[c.length-1]?.message===E&&c.pop()}))})}),await S.catch(m=>{({error:x,result:E}=at({catchError:m,runCmd:S,error:x,result:E,runnerName:"Claude"}))}),g.flush(),{steps:l,duration:_,result:E,error:x}}var Ce=async()=>{let e=ee.join(it.homedir(),".claude");await st.rm(e,{recursive:!0,force:!0})},lt={[U]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3}}},[$]:{default:"claude-sonnet-4-20250514",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3},"claude-3-haiku-20240307":{maxTokens:4096},"claude-opus-4-20250514":{maxTokens:32e3},"claude-sonnet-4-20250514":{maxTokens:64e3}}},pro:{default:"claude-3-5-haiku-20241022",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-haiku-20240307":{maxTokens:4096}}},[D]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[L]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[O]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}}};import ut from"fs/promises";import ct from"os";import ae from"path";import G from"process";var dt=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function le({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:g,url:y}=n;if(!g||!y)throw new Error("No token or url provided from AI Gateway");let I=pt[s];if(!I)throw new Error(`Codex is not supported for the account type ${s}`);if(i){if(!I?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=I.default;G.env.OPENAI_API_KEY=g,G.env.OPENAI_BASE_URL=y}else if(!G.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let u=[],p=[],l={},h=0,T=0,_,E,x=ae.join(G.cwd(),"node_modules"),R=[ae.join(G.env.NODE_PATH||x,".bin/codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),P=`${G.env.NVM_BIN}/node`;console.log(`Running ${P} ${R.join(" ")}`);let C=t.utils.run(P,R,{all:!0,env:{...G.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),S="";return C.all.on("data",g=>{if(S+=g.toString(),!g.includes(`
101
- `))return;let y=S.toString().split(`
102
- `).filter(Boolean).map(f=>{try{return JSON.parse(f)}catch{console.log("Could not parse line",f)}return null}).filter(Boolean);S="";let I=[],m=!1;y.forEach(f=>{if(f?.duration_ms&&(T=f.duration_ms,m=!0),f?.type==="local_shell_call")l[f.call_id]=f;else if(f?.type==="local_shell_call_output"){let d=mt(l[f.call_id],f);d.id=h,h+=1,d&&(p.push(d),u.push(d),I.push(d),m=!0)}else f?.type==="message"&&f.role==="assistant"?_=f.content.map(d=>d.text).join(`
103
- `):f?.type==="message"&&f.role==="system"&&(E=f.content.map(d=>d.text).join(`
104
- `))}),m&&(r?.({steps:u,duration:T}),o?.({steps:I,duration:T}))}),await C.catch(g=>{({error:E,result:_}=dt({catchError:g,runCmd:C,error:E,result:_,runnerName:"Codex"}))}),{steps:p,duration:T,result:_,error:E}}var Re=async()=>{let e=ae.join(ct.homedir(),".codex");await ut.rm(e,{recursive:!0,force:!0})},pt={[U]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:1e5}}},[$]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},o1:{maxTokens:1e5},"o1-mini":{maxTokens:65536},"o3-mini":{maxTokens:1e5},"gpt-image-1":{},"dall-e-2":{},"dall-e-3":{}}},pro:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768}}},[D]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[L]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[O]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}}},ft=new Set(["bash","-lc"]),mt=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!ft.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
105
- ${n.trim()}
106
- \`\`\``)}catch(s){console.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import gt from"fs/promises";import ht from"os";import te from"path";import B from"process";var _t=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Tt={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function ue({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:m,url:f}=n;if(!m||!f)throw new Error("No token or url provided from AI Gateway");let d=Et[s];if(!d)throw new Error(`Gemini is not supported for the account type ${s}`);if(i){if(!d?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=d.default;B.env.GEMINI_API_KEY=m,B.env.GOOGLE_GEMINI_BASE_URL=f}else if(!B.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let u=[],p=[],l=[],h={},T=0,_=0,E,x,R=te.join(B.cwd(),"node_modules"),P=[te.join(B.env.NODE_PATH||R,".bin/gemini"),...i?["--model",i]:[],"--yolo","-p",a],C=`${B.env.NVM_BIN}/node`;console.log(`Running ${C} ${P.join(" ")}`);let S=t.utils.run(C,P,{all:!0,env:B.env});S.stdin?.end();let g=Z(()=>{r?.({steps:u,duration:_}),o?.({steps:p,duration:_}),p=[]},250),y=(m,f)=>{m.id=T,T+=1,l.push(m),u.push(m),p.push(m),f||g.flush(),g(),f&&g.flush()},I="";return S.all.on("data",m=>{if(I+=m.toString(),!m.includes(`
107
- `))return;let f=I.toString().split(`
108
- `).filter(Boolean).map(d=>{try{if(d.startsWith("[API Error")){let c=d.match(/\[api error: (.+?)]$/i)?.[1];return{type:"error",value:Q(c,!1)?.error?.message||c||"Gemini encountered error"}}return JSON.parse(d)}catch{}return null}).filter(Boolean);I="",f.forEach(d=>{switch(d.type){case"thought":{let c=d.value;y({title:c?.subject??"Thinking...",message:c?.description},!0);break}case"content":{d.value&&y({message:d.value});break}case"tool_call_request":{let c=d.value,N=Tt[c.name]??c.name,v=c.args?.path||c.args?.absolute_path,Y=v&&te.relative(B.cwd(),v),k=c.args?.command,A={title:[N,Y&&`\`${Y}\``,k&&`\`${k}\``].filter(Boolean).join(" ")};h[c.callId]=A,g.flush();break}case"tool_result":{let c=d.value,N=h[c.callId];if(N){let v=[c.resultDisplay,c.responseParts?.functionResponse?.response?.output].find(Y=>typeof Y=="string"&&Y);v&&(N.message=`\`\`\`
109
- ${v.trim()}
110
- \`\`\``),y(N,!0)}break}case"result":{_=d.duration_ms,E=d.value,[l,u,p].forEach(c=>{c[c.length-1]?.message===E&&c.pop()});break}case"error":{x=d.value;break}case"finished":break;default:{console.warn("Unhandled message type:",d.type);break}}})}),await S.catch(m=>{({error:x,result:E}=_t({catchError:m,runCmd:S,error:x,result:E,runnerName:"Gemini"}))}),g.flush(),{steps:l,duration:_,result:E,error:x}}var Pe=async()=>{let e=te.join(ht.homedir(),".gemini");await gt.rm(e,{recursive:!0,force:!0})},Et={[U]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536}}},[$]:{default:"gemini-2.5-pro",models:{"gemini-1.5-flash":{maxTokens:8192},"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-1.5-pro":{maxTokens:8192},"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536},"gemini-2.5-flash-lite":{maxTokens:65536},"gemini-2.5-pro":{maxTokens:65536},"imagen-4.0-generate-001":{},"veo-3.0-generate-preview":{}}},pro:{default:"gemini-2.0-flash-lite",models:{"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192}}},[D]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[L]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[O]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}}};var xt={codex:{runner:le,clean:Re},claude:{runner:ie,clean:Ce},gemini:{runner:ue,clean:Pe}},ve=xt;var ke=async({config:e,apiThrottle:t,apiToken:r})=>{let o=ve[e.runner];if(!o)throw new Error(`${e.runner} is not supported`);let n=yt({apiToken:r});he(n),console.log(`Starting agent runner ${e.runner}`,{id:e.id,sessionId:e.sessionId,prompt:e.prompt,hasRepo:e.hasRepo});let s=e.useGateway?await Ee({netlify:n}):void 0;e.validateAgent&&e.errorLogsPath&&console.log("Validation will use error logs file",{path:e.errorLogsPath});let a=Ie(({steps:u=[],duration:p})=>{let l=[...u];return u.length=0,z(e.id,e.sessionId,{steps:l,duration:p})},t),i;return e.hasRepo?e.sha?i=e.sha:(console.log("No sha in runner, marking the latest as the one"),i=await Se(n),await W(e.id,{sha:i})):(console.log("Zip project"),i=await we(n)),console.log("Resolved sha to",i),{aiGateway:s,context:n,persistSteps:a,runner:o,sha:i}},yt=({apiToken:e})=>({constants:{NETLIFY_API_HOST:oe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||oe.env.NETLIFY_API_TOKEN,SITE_ID:oe.env.SITE_ID,FUNCTIONS_DIST:oe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:ye}});var Oe=async({cliPath:e,config:t,context:r,runner:o,persistSteps:n,aiGateway:s})=>{let{prompt:a}=await X({cliPath:e,config:t,netlify:r}),i={...t,prompt:a},u=await o({aiGateway:s,config:i,netlify:r,persistSteps:n});if(u.error)throw console.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error}),new Error(u.error);return{runnerResult:u}};var be=async(e,t,r)=>{try{console.log("Running netlify build...");let o=["build","--context","deploy-preview"];r&&o.push("--filter",r);let n=await t.utils.run(e,o);return console.log("Build completed successfully"),{success:!0,stdout:n?.stdout||"",stderr:n?.stderr||""}}catch(o){return console.log("Build failed:",o.message),{success:!1,stdout:o.stdout||"",stderr:o.stderr||"",error:o.message}}},It=e=>{if(e.success)return"";let t=[];return e.error&&t.push(`Build Error: ${e.error}`),e.stderr&&t.push(`Build stderr:
111
- ${e.stderr}`),e.stdout&&t.push(`Build stdout:
112
- ${e.stdout}`),t.join(`
113
-
114
- `)},Nt=e=>{let t=[];return t.push("Build validation failed. Here are the build errors you need to fix:"),e.forEach((r,o)=>{t.push(`Build attempt ${o+1}: ${It(r)}`)}),t.join(`
115
- `)},Le=async({cliPath:e,context:t,initialResult:r,runAgentCallback:o,filter:n})=>{console.log("Starting post-execution build validation");let s=await be(e,t,n);if(s.success)return console.log("Build validation passed"),{...r,buildValidation:{attempts:0,finalBuildSuccess:!0,buildHistory:[s]}};console.log("Build validation failed, starting build-fix iteration process");let a=[s],i=[],u=0,p=r;for(let l=1;l<=3;l++){console.log(`Build fix attempt ${l}/3`);let h=Nt(a);console.log("Running agent to fix build errors"),p=await o({errorContext:h}),i=[...i,...p.steps||[]],u+=p.duration||0;let T=await be(e,t,n);if(a.push(T),T.success)return console.log(`Build fixed after ${l} attempts`),{...p,steps:i,duration:u,buildValidation:{attempts:l,finalBuildSuccess:!0,buildHistory:a}};console.log(`Build still failing after attempt ${l}`)}return console.log("Build validation failed after 3 attempts"),{...p,steps:i,duration:u,buildValidation:{attempts:3,finalBuildSuccess:!1,buildHistory:a,error:"Build validation failed - unable to fix build errors after 3 attempts"}}};import At from"process";var Ue=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a})=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft"];t||(console.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(console.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let u=s||"netlify";console.log(`Running: ${u} ${i.join(" ")}`);let p=await e.utils.run(u,i),l=JSON.parse(p.stdout.trim());console.log(`
116
- Preview deploy created successfully:`,{deployId:l.deploy_id,deployUrl:l.deploy_url,siteId:l.site_id});let h={deployId:l.deploy_id,previewUrl:l.deploy_url,logsUrl:l.logs,siteId:l.site_id};return t||(h.sourceZipFilename=l.source_zip_filename),h}catch(i){throw console.error("Failed to create preview deploy via CLI:",i),i}};var $e=async({cliPath:e,config:t,context:r,result:o,buildValidation:n,filter:s})=>{let{diff:a,resultDiff:i,hasChanges:u}=await Ae({config:t,netlify:r});console.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:u,wouldCreatePreview:o!==void 0&&u});let p=null;if(o!==void 0&&u)try{let l;try{let h=await _e(t.id,t.sessionId);h?.title&&(l=h.title)}catch(h){console.warn("Failed to fetch session title, using fallback message:",h.message)}p=await Ue({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:l,skipBuild:n?.finalBuildSuccess,deploySubdomain:Ne(t.id,At.env.SITE_NAME),filter:s})}catch(l){console.warn("Failed to create preview deploy (continuing with agent run):",l)}return console.log("Git status",{diff:a,hasChanges:u}),{diff:a,resultDiff:i,hasChanges:u,previewInfo:p}};var De=async({config:e,diff:t,result:r,duration:o,resultDiff:n,previewInfo:s,cleanRunner:a})=>{let i={result_diff:t,result:r||"Done",duration:o};return s&&s.deployId&&(i.deploy_id=s.deployId),s&&s.sourceZipFilename&&(i.result_zip_file_name=s.sourceZipFilename),n?(console.log("Updating total agent result diff"),await W(e.id,{result_diff:n})):console.log("No total result diff, not updating"),await a?.(),console.log("Updated agent runner with result"),await z(e.id,e.sessionId,i),{sessionUpdate:i}};var Ct=St(import.meta.url),Rt=Ct("../package.json"),Fe=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s})=>{console.log("Starting agent runner orchestrator",{cliPath:r,cwd:o,processCwd:wt.cwd(),version:Rt.version});let{aiGateway:a,context:i,persistSteps:u,runner:p,stopDev:l,sha:h}=await ke({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s});e.sha=h;let{runnerResult:T}=await Oe({cliPath:r,config:e,context:i,runner:p.runner,persistSteps:u,aiGateway:a});l();let _=T,E;if(e.validateAgentWithBuild){console.log("Build validation enabled, performing post-execution build validation");let g=await Le({cliPath:r,config:e,context:i,initialResult:T,filter:s,runAgentCallback:async({errorContext:y})=>{let{prompt:I}=await X({config:{...e,prompt:T.result},buildErrorContext:y,netlify:i});return p.runner({config:{...e,prompt:I},netlify:i,persistSteps:u,aiGateway:a})}});console.log("Build validation completed:",g.buildValidation),_=g,E=g.buildValidation}let x={ok:!0},R=_.result;if(e.validateAgent&&_.result){let g=fe(_.result);console.log("Validation result",g),g&&(x=g),R=me(_.result)}x.ok||console.log("Validation failed",x);let{diff:P,resultDiff:C,previewInfo:S}=await $e({cliPath:r,config:e,context:i,result:R,buildValidation:E,filter:s});await De({config:e,diff:P,result:R,duration:_.duration,resultDiff:C,previewInfo:S,cleanRunner:p.clean})};import w from"process";var Pt="codex",vt=e=>(e??[]).filter(t=>t.request&&t.response),kt=e=>(e??[]).filter(t=>t.site_context),Be=()=>{let e=w.env.NETLIFY_AGENT_RUNNER_ID,t=w.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=w.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=w.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=w.env.NETLIFY_AGENT_RUNNER_AGENT||Pt,s=w.env.NETLIFY_AGENT_RUNNER_MODEL,a=w.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",i=w.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",u=w.env.ERROR_LOGS_PATH,p=Q(w.env.NETLIFY_AGENT_RUNNER_CONTEXT),l=vt(p),h=kt(p),T=w.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",_=!w.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,E=w.env.NETLIFY_AGENT_RUNNER_SHA,x=Ot();return{id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:s,validateAgent:a,errorLogsPath:u,sessionHistoryContext:l,siteContext:h,hasRepo:T,useGateway:_,sha:E,accountType:x,validateAgentWithBuild:i}},Ot=()=>{let e=w.env.NETLIFY_TEAM_TYPE;return e?e.includes("starter")?L:e.includes("pro")?"pro":e.startsWith("business")?U:e.startsWith("enterprise")?$:e.startsWith("free")?D:O:O};var q=bt(Ye.argv.slice(2),{string:["auth","cwd","cli-path","error-logs-path","filter"]});try{let e=Be();await Fe({config:e,apiToken:q.auth,cwd:q.cwd,cliPath:q["cli-path"],errorLogsPath:q["error-logs-path"],filter:q.filter})}catch(e){console.error("Error running agent pipeline:",e),Ye.exit(1)}
69
+ Refer to these files when working with specific Netlify features.
70
+ </netlify_features_context>
71
+ `);let I=`
72
+ You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
73
+
74
+ <request>
75
+ <user_request>
76
+ ${r.prompt}
77
+ </user_request>
78
+ ${f}
79
+ </request>
80
+
81
+ <requirements>
82
+ <responses>
83
+ - Do not speak in first person. You may speak as "the agent".
84
+ - When work is complete, write a changes summary in ${i}/${oe} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
85
+ - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${i}/${oe} file.
86
+ - Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
87
+ - NEVER look into the \`.git\` folder
88
+ - NEVER print potentially sensitive values (like secrets) in the planning output or results
89
+ </responses>
90
+ <attachements>
91
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${i}/${qe} folder
92
+ - move assets from ${i}/${qe} folder to the project assets folder if they are referenced in a code or applied changes
93
+ </attachements>
94
+ ${c}
95
+ </requirements>
96
+
97
+ <extra_context>
98
+ <metadata>
99
+ - Site/Project ID: ${o.siteId}
100
+ - Account/Team ID: ${o.accountId}
101
+ - User ID: ${o.userId}
102
+ - Site/Project Slug: ${o.siteSlug}
103
+ - Netlify Functions directory: ${o.functionsDir}
104
+ </metadata>
105
+ <environment>
106
+ - Node Version: ${Y.version||"unknown"}
107
+ - Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
108
+ - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
109
+ - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
110
+ </environment>
111
+ ${T}
112
+ <docs>
113
+ - Netlify Docs: https://docs.netlify.com
114
+ - LLM Resources Index: https://docs.netlify.com/llms.txt
115
+ </docs>
116
+ </extra_context>
117
+
118
+ ${m}
119
+ `;return await J.writeFile(a,I,"utf-8"),L.log(`Generated agent context document at: ${a}`),I.length>5e5&&(I=`
120
+ You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
121
+
122
+ <request>
123
+ <user_request>
124
+ ${r.prompt}
125
+ </user_request>
126
+ ${f}
127
+ </request>
128
+
129
+ Use the following file for the complete context of the ask, the environment, and what's available. ${a} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
130
+ `),I};var _n=_("prompt"),er=async({cliPath:e,config:t,netlify:r,buildErrorContext:n})=>{let o=await Qt({cliPath:e,config:t,netlify:r,buildErrorContext:n});return process.env.AGENT_RUNNER_DEBUG&&_n.log("Contextful Prompt:",o),{prompt:o}};var Fe=_("inference_stage"),tr=5,Le=async e=>{let{cliPath:t,config:r,context:n,buildErrors:o,runner:s,persistSteps:i,aiGateway:u,attempt:a,contextPrefix:p,priorAgentSessionId:f}=e;Fe.log(`Running inference stage, attempt ${a} of ${tr}`);let c=await S(st(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":a||1}),It();let{prompt:h}=await S(st(),"compose-prompt",async()=>await er({cliPath:t,config:r,buildErrorContext:En(o),netlify:n})),T=`
131
+ ${p||""}
132
+ ${h}
133
+ `.trim(),I={...r,prompt:T},g=await S(st(),`run-${r.runner}`,async()=>await s({aiGateway:u,config:I,netlify:n,persistSteps:i,continueSession:!!(a&&a>1),priorAgentSessionId:f}));return g.result&&(g.result=W(g.result)),g.error&&(g.error=W(g.error)),await i.flush(),g});if(c.error){if(Fe.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:a||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!a||a<tr))return Fe.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Le({...e,attempt:(a||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Fe.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},En=e=>!e||e.length===0?"":`
134
+ Deploy failed failed. Here are the errors to review on the latest build:
135
+
136
+ Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
137
+
138
+ ${e.pop()}
139
+ `;import In from"process";import{getTracer as it}from"@netlify/otel";import{getTracer as wn}from"@netlify/otel";var Ee=_("deploy"),rr=async e=>await S(wn(),"create-preview-deploy",async t=>Tn(e,t)),Tn=async({netlify:e,hasRepo:t,skipBuild:r,message:n="Agent Preview",deploySubdomain:o,cliPath:s,filter:i},u)=>{try{let a=["deploy","--message",`"${n}"`,"--json","--draft","--verbose"];t||(Ee.log("Deploy: Uploading source zip"),a.push("--upload-source-zip")),o&&a.push("--alias",o),i&&a.push("--filter",i),r?(Ee.log("Deploy: Skipping build"),a.push("--no-build")):a.push("--context","deploy-preview");let p=s||"netlify";Ee.log(`Running: ${p} ${a.join(" ")}`),u?.setAttributes({cmd:p,args:a});let f=await e.utils.run(p,a,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(f.stdout.trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),Ee.log(`
140
+ Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw Ee.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var we=_("deploy_stage"),at=async e=>await S(it(),"run-deploy-stage",async()=>xn(e)),xn=async({cliPath:e,config:t,context:r,result:n,filter:o,isRetry:s})=>{let i=await S(it(),"get-runner-diffs",async()=>await Dt({config:t,isRetry:s}));if(we.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:a,diffBinary:p,resultDiffBinary:f}=i,c=!0;we.log("Preview deploy condition check:",{resultUndefined:n===void 0,resultType:typeof n,hasChanges:c,wouldCreatePreview:n!==void 0&&c});let m=null;if(n!==void 0&&c)try{let h;try{let T=await S(it(),"get-runner-session",async()=>await yt(t.id,t.sessionId));T?.title&&(h=T.title)}catch(T){we.warn("Failed to fetch session title, using fallback message:",T.message)}await q(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await rr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:Pt(t.id,In.env.SITE_NAME),filter:o})}catch(h){return we.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:u,resultDiff:a,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:h instanceof Error?h.message:String(h)}}return we.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:a,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as $e}from"@netlify/otel";async function nr(e,t){let{maxRetries:r,baseDelay:n,onRetry:o}=t,s;for(let i=1;i<=r;i++)try{return await e()}catch(u){if(s=u,i===r)throw s;o&&o(i,s),await new Promise(a=>setTimeout(a,n*i))}throw s}var De=class{scanDiffForForms(t){let r=[],n=null,o=[],s=t.split(`
141
+ `);for(let i of s)if(i.startsWith("diff --git")){if(n&&o.length>0){let a=this.containsNetlifyForm(o,n);a&&r.push(a)}let u=i.split(" ");n=u[u.length-1].replace(/^b\//,""),o=[]}else i.startsWith("+")&&!i.startsWith("+++")&&o.push(i.slice(1));if(n&&o.length>0){let i=this.containsNetlifyForm(o,n);i&&r.push(i)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let n=t.join(`
142
+ `),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:i}of o){let u=n.match(s);if(u){let a=u.index||0,p=Math.max(0,a-20),f=Math.min(n.length,a+u[0].length+20),c=n.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${i}] ${c}`}}}return null}};var A=_("cleanup_stage"),or=async e=>await S($e(),"cleanup-stage",async()=>vn(e)),lt=1024*1024*10,vn=async({config:e,diff:t,result:r,duration:n,resultDiff:o,diffBinary:s,resultDiffBinary:i,previewInfo:u})=>{let a={result:r||"Done",duration:n};u&&u.deployId&&(a.deploy_id=u.deployId),u&&u.sourceZipFilename&&(a.result_zip_file_name=u.sourceZipFilename);let p=t||s||o||i;if(p&&(a.diff_produced=!0),process.env.SITE_ID==="def61649-ad41-4d63-a478-8496a919443a"&&p)return A.log("Test site detected - skipping diff upload to test loss detection"),{sessionUpdate:a};if(p){let f=new De,c=t||s||"",m=f.scanDiffForForms(c);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:T})=>{A.log(` - ${h}: ${T}`)}),a.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await Et(e.id,e.sessionId),c=[];(t||s)&&c.push(Ge(f.result.upload_url,s||t).then(()=>{a.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(o||i)&&c.push(Ge(f.cumulative.upload_url,i||o).then(()=>{a.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(o||i)&&(A.log("Updating agent runner with cumulative diff S3 key"),await S($e(),"update-runner",async()=>{await he(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||s||""),m=Buffer.byteLength(i||o||"");if(c>lt||m>lt){let h=`Diffs exceed maximum inline size of ${lt} bytes.`;throw A.error(h),new Error(h)}a.result_diff=t,a.result_diff_binary=s,(o||i)&&(a.cumulative_diff=o,a.cumulative_diff_binary=i,A.log("Updating agent runner with inline diffs (fallback)"),await S($e(),"update-runner",async()=>{await he(e.id,{result_diff:o,result_diff_binary:i})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await nr(async()=>await S($e(),"update-runner-session",()=>q(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{A.error(`Error updating agent runner session (attempt ${f}):`,c),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:a}};import{getTracer as sr,shutdownTracers as Nn,withActiveSpan as ir}from"@netlify/otel";var An=Rn(import.meta.url),ar=An("../package.json"),lr=_("pipeline_index"),ke=3,cr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:n,errorLogsPath:o,filter:s,tracing:i={}})=>{let u,{withStageTimer:a}=xt(Z.timeUnits.hours(4)),p=await ft(ar.version,e.id,i);try{await ir(sr(),"run-pipeline",{},p,async()=>{let{aiGateway:f,context:c,persistSteps:m,runner:h,sha:T}=await a("init",()=>Zt({config:e,apiToken:t,cliPath:r,cwd:n,errorLogsPath:o,filter:s,runnerVersion:ar.version}),Z.timeUnits.minutes(10));u=h.clean,e.sha=T;let{runnerResult:I}=await a("inference",()=>Le({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:f}));await q(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let g=await a("deploy",()=>at({cliPath:r,config:e,context:c,result:I.result,filter:s,isRetry:!1})),E=I,v=[];if(g.hasChanges&&g.deployError){v.push(gt(g.deployError));let x=1;for(;x<=ke&&!g.previewInfo;)lr.log(`Deploy attempt had errors. Retrying. ${x}/${ke}`),await ir(sr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":x});let{runnerResult:y}=await a(`inference-retry-${x}`,()=>Le({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:f,buildErrors:v,priorAgentSessionId:I.agentSessionId}));E={...y,steps:[...E.steps||[],...y.steps||[]],duration:(E.duration||0)+(y.duration||0)},g=await a(`deploy-retry-${x}`,()=>at({cliPath:r,config:e,context:c,result:y.result,filter:s,isRetry:!0})),g.deployError&&v.push(g.deployError),x++});x>ke&&!g.previewInfo&&console.warn(`Deploy validation failed after ${ke} attempts`)}let{diff:R,resultDiff:b,previewInfo:C,diffBinary:O,resultDiffBinary:N}=g;await a("cleanup",()=>or({config:e,diff:R,result:E.result,duration:E.duration,resultDiff:b,diffBinary:O,resultDiffBinary:N,previewInfo:C}),Z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await u?.(),await Mt())})}catch(f){lr.error("Got error while running pipeline",f),await u?.();let c=f instanceof Error&&f.message;throw await q(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),f}finally{await Nn()}};import D from"process";var bn="claude",Cn=e=>(e??[]).filter(t=>t.request&&t.response),Pn=e=>(e??[]).filter(t=>t.site_context),ur=_("config"),dr=()=>{let e=D.env.NETLIFY_AGENT_RUNNER_ID,t=D.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=D.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,n=D.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!n)throw new Error("Prompt is not provided");let o=D.env.NETLIFY_AGENT_RUNNER_AGENT||bn,s=D.env.NETLIFY_AGENT_RUNNER_MODEL,i=D.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",u=D.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",a=D.env.ERROR_LOGS_PATH,p=Ae(D.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,ur),f=Cn(p),c=Pn(p),m=D.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",h=!D.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,T=D.env.NETLIFY_AGENT_RUNNER_SHA,I=On(),g=Ot(),E={id:e,sessionId:t,resultBranch:r,prompt:n,runner:o,model:s,validateAgent:i,errorLogsPath:a,sessionHistoryContext:f,siteContext:c,hasRepo:m,useGateway:h,sha:T,accountType:I,validateAgentWithBuild:u,modelVersionOverrides:g};return ur.log({fullConfig:E}),E},On=()=>{let e=D.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?Ke:e.includes("pro")?"pro":e.startsWith("enterprise")?Ve:e.startsWith("free")?Je:We:We};var pr=_("bin_cmd"),ee=Fn(ct.argv.slice(2),{string:["auth","cwd","cli-path","error-logs-path","filter","trace-exporter-url","traceparent"]});try{let e=dr();await cr({config:e,apiToken:ee.auth,cwd:ee.cwd,cliPath:ee["cli-path"],errorLogsPath:ee["error-logs-path"],filter:ee.filter,tracing:{exporterUrl:ee["trace-exporter-url"],traceparent:ee.traceparent}}),pr.info("Finished agent"),ct.exit(0)}catch(e){pr.error("Error running agent pipeline:",e),ct.exit(1)}
117
143
  //# sourceMappingURL=bin.js.map
package/dist/index.d.ts CHANGED
@@ -24,6 +24,7 @@ interface Context {
24
24
  NETLIFY_API_HOST: string;
25
25
  NETLIFY_API_TOKEN?: string;
26
26
  SITE_ID?: string;
27
+ URL?: string;
27
28
  };
28
29
  utils: {
29
30
  run: typeof run;
@@ -36,7 +37,11 @@ interface PipelineOptions {
36
37
  cwd?: string;
37
38
  errorLogsPath?: string;
38
39
  filter?: string;
40
+ tracing?: {
41
+ exporterUrl?: string;
42
+ traceparent?: string;
43
+ };
39
44
  }
40
- declare const runPipeline: ({ config, apiToken, cliPath, cwd, errorLogsPath, filter, }: PipelineOptions) => Promise<void>;
45
+ declare const runPipeline: ({ config, apiToken, cliPath, cwd, errorLogsPath, filter, tracing, }: PipelineOptions) => Promise<void>;
41
46
 
42
47
  export { type Context, type PipelineOptions, runPipeline };