@netlify/agent-runner-cli 1.0.0-broken → 1.0.1-broken
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin-local.js +18 -18
- package/dist/bin.js +26 -26
- package/dist/index.js +28 -28
- package/package.json +1 -1
package/dist/bin-local.js
CHANGED
|
@@ -1,32 +1,32 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import C from"process";import ar from"path";import lr from"fs";import vn from"minimist";import{createRequire as xn}from"module";import{createTracerProvider as ur}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as
|
|
2
|
+
import C from"process";import ar from"path";import lr from"fs";import vn from"minimist";import{createRequire as xn}from"module";import{createTracerProvider as ur}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ct}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as dr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as pr}from"@netlify/otel";import{propagation as ut,context as dt,W3CTraceContextPropagator as fr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as gr}from"@opentelemetry/exporter-trace-otlp-grpc";import cr from"process";function _(e){let t=cr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Me=_("tracing"),pt=async(e,t,r)=>(await ur({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ct(new je),new ct(new gr({url:r.exporterUrl}))],instrumentations:[new dr({skipHeaders:!0})]}),r.traceparent?(ut.setGlobalPropagator(new fr),ut.extract(dt.active(),{traceparent:r.traceparent,isRemote:!0})):dt.active());function S(e,t,r){return Me.log(`\u23F3 TRACE: ${t} starting...`),pr(e,t,r)}var je=class{export(t,r){for(let n of t)this.logSpan(n);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,n=t.attributes,o=[];for(let[u,a]of Object.entries(n))u.includes("duration")&&typeof a=="number"?o.push(`${u}=${a.toFixed(2)}ms`):o.push(`${u}=${a}`);let i=t.status?.code===2?"\u274C":"\u2705",s=o.length>0?` [${o.join(", ")}]`:"";Me.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Me.log(` \u274C Error: ${t.status.message}`)}};var mr=["error","failed","exception","fatal","panic","abort","crash"];function ft(e){let t=e.split(`
|
|
3
3
|
`),r=[],n=-1,o=0;for(;o<t.length;){let u=t[o].slice(0,500).toLowerCase();if(mr.some(f=>u.includes(f))){let f=Math.max(0,o-10,n+1),p=Math.min(t.length-1,o+20),c=[];for(let m=f;m<=p;m++)c.push(t[m]);r.push(c.join(`
|
|
4
4
|
`)),n=p,o=p+1}else o++}if(r.length===0)return e;let i=r.map((s,u)=>`<extracted_error_chunk order="${u+1}">
|
|
5
5
|
${s}
|
|
6
6
|
</extracted_error_chunk>`).join(`
|
|
7
7
|
|
|
8
|
-
`);return i.length>e.length*.8?e:i}import Oe from"process";import{getTracer as nn}from"@netlify/otel";import me from"process";var Te=me.env.NETLIFY_API_URL,ve=me.env.NETLIFY_API_TOKEN,H=_("api"),Re=()=>me.env.NETLIFY_LOCAL_MODE==="true",he=async(e,t={})=>{if(!Te||!ve)throw new Error("No API URL or token");let r=new URL(e,Te),n={...t,headers:{...t.headers,Authorization:`Bearer ${ve}`}};me.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),i=o.ok&&o.status<=299;if(me.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),o.headers.forEach((u,a)=>{H.log(` ${a}: ${u}`)});else{let u=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||H.error(`Got status ${o.status} for request ${r}`),t.raw){if(!i)throw o;return o}let s=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!i)throw s;return s},ft=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Te=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(ve=e.constants.NETLIFY_API_TOKEN)},gt=()=>({apiUrl:Te,token:ve}),ye=async(e,t)=>Re()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):he(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),W=async(e,t,r)=>Re()?(H.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):he(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var mt=async(e,t)=>Re()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):he(`/api/v1/agent_runners/${e}/sessions/${t}`),ht=(e,t,r)=>he(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),yt=async(e,t)=>Re()?(H.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):he(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ge=async(e,t)=>{H.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ne=_("ai_gateway"),Ye=null;var _t=async()=>{if(Ye)return Ye;ne.log("Fetching available AI gateway providers");let e=await fetch(`${gt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ye=t,ne.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},hr=async(e,t)=>{let n=(await _t()).providers[e];if(!n)return ne.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return ne.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},Et=async({netlify:e,config:t})=>{let r,n,o,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(o),ne.log("Requesting AI gateway information");let a=await ht(s,t.id,t.sessionId);if({token:r,url:i}=a,n=a.expires_at?a.expires_at*1e3:void 0,ne.log("Got AI gateway information",{token:!!r,expiresAt:n,url:i}),n){let f=n-Date.now()-6e4;f>0&&(o=setTimeout(()=>{u()},f))}};return await Promise.all([u(),_t()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:hr}};import q from"process";import J from"path";import Ae from"fs";import{fileURLToPath as Tr}from"url";import{createRequire as vr}from"module";import{execa as Rr,execaCommand as eo}from"execa";import{Transform as yr}from"stream";var _r=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Er=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function wr(){return Object.entries(process.env).filter(([e,t])=>!(!t||_r.has(e)||Er.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function V(e){if(typeof e!="string")return e;let t=wr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(xr(n),"g");r=r.replace(o,"******")}),r}function xr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var oe=class extends yr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),i=V(o);n(null,i)}};function wt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,i){let s=typeof n=="string"?V(n):n;return typeof o=="function"?t(s,o):t(s,o,i)},process.stderr.write=function(n,o,i){let s=typeof n=="string"?V(n):n;return typeof o=="function"?r(s,o):r(s,o,i)}}var _e=null,xt=e=>(_e&&_e.destroy(),_e=new ee({totalAllowedTime:e}),_e),It=()=>_e;var ee=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;n!==void 0&&(s=new Promise((u,a)=>{i=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return s?await Promise.race([r(),s]):await r()}finally{o(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Tt={name:"@netlify/agent-runner-cli",type:"module",version:"1.0.0-broken",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var Ar=Tr(import.meta.url),Sr=J.dirname(Ar),br=vr(import.meta.url),Se=_("shell"),Be=new Set,Nr={preferLocal:!0},N=(e,t,r)=>{let[n,o]=Cr(t,r),i={...Nr,...o},s=Rr(e,n,i);return Pr(s,i),Fr(s),s};var Cr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Pr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new oe).pipe(q.stdout),e.stdout?.pipe(new oe).pipe(q.stdout),e.stderr?.pipe(new oe).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},vt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),Se.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Se.error("Error killing process:",r),!1}},Or=e=>vt(e,"SIGKILL"),Fr=e=>{Be.add(e);let t=It();if(t){let r=t.onTimesUp(()=>{Se.log(`Global timer expired, killing process ${e.pid}`),vt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Se.log(`Force killing process ${e.pid} after timeout`),Or(e))},5e3)});e.on("exit",()=>{Be.delete(e),r()}),e.on("error",()=>{Be.delete(e),r()})}};function be(e,t){return!!te(e,t)}function te(e,t){if(!q.env.NETLIFY_LOCAL_MODE)try{let o=br.resolve(Tt.name),i=J.dirname(o);for(;i!==J.dirname(i);){let s=J.dirname(i);if(J.basename(s)==="node_modules"){let u=J.join(s,".bin",t);if(Ae.existsSync(u))return u;break}i=s}}catch(o){console.error("Could not resolve package.json",o)}if(q.env.NODE_PATH){let o=J.join(q.env.NODE_PATH,".bin",t);if(Ae.existsSync(o))return o}let r=J.join(e,"node_modules",".bin",t);if(Ae.existsSync(r))return r;let n=J.join(Sr,"..","node_modules",".bin",t);if(Ae.existsSync(n))return n}var Rt="netlify-agent-runner-context.md",He="task-history",qe="netlify-context",U=".netlify",ie="results.md",Ke="assets";var $r=_("utils"),kr=e=>new Promise(t=>{setTimeout(t,e)}),At=(e,t=3e3)=>{let r=!1,n=null,o=[],i=null,s=(...u)=>{if(r)return n=u,new Promise(p=>{o.push(p)});r=!0;let a,f=new Promise(p=>{a=p});return i=(async()=>{await Promise.resolve();let p=await e(...u);for(a(p);;){if(await kr(t),!n)return r=!1,i=null,p;let c=n,m=o;n=null,o=[],p=await e(...c),m.forEach(h=>{h(p)})}})(),f};return s.flush=async()=>{if((r||n)&&i)return await i,s.flush()},s},se=(e,t,r=!1)=>{let n=null,o=null,i=null,s=function(...u){o=u,i=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(i,o),o=null,i=null)},t),a&&(e.apply(i,o),o=null,i=null)};return s.cancel=()=>{clearTimeout(n),n=null,o=null,i=null},s.flush=()=>{if(n){clearTimeout(n);let u=o,a=i;n=null,o=null,i=null,e.apply(a,u)}},s},St=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):$r.error("Could not parse JSON",n))}},bt=(e,t)=>{let o=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${o}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=i.length+6){let f=Math.min(a-i.length,e.length);return`${i}${e.slice(0,f)}`}return e.slice(0,a)};var Dr=50*1024,We=(e,t=Dr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as Nt}from"buffer";import Lr from"path";var Ct=_("repo"),Pt=async({config:e,isRetry:t})=>{Ct.info("Getting runner diffs");let r=await Mr(),{hasChanges:n}=r,{status:o}=r;if(!n)return{hasChanges:!1};if(!t){let x=jr(o);await Gr(x)}Ct.info("Changes after processing"),await Je();let i=await Xe(o);await Ve(i);let s={stdio:["ignore","pipe","pipe"]},a=(await N("git",["diff","--staged"],s)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:i};let p=(await N("git",["diff","--staged","--binary"],s)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]),c=(await N("git",["diff",e.sha,"HEAD"],s)).stdout;let g=(await N("git",["diff",e.sha,"HEAD","--binary"],s)).stdout;c!==g&&(m=Nt.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:i};return a!==p&&(h.diffBinary=Nt.from(p).toString("base64")),m&&(h.resultDiffBinary=m),h},Ve=async(e=[])=>{await N("git",["add",".",...e])},Je=async()=>(await N("git",["status","-s"])).stdout,Ot=/.. (.+)?\.log$/,Ur=[Ot],Mr=async()=>{let e=await Je();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
9
|
-
`).filter(n=>Ur.some(i=>i instanceof RegExp?i.test(n):n===i)?!1:n[1]?.trim()!=="")).length!==0,status:e}}
|
|
10
|
-
`).forEach(n=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Lr.sep}`].some(u=>n.startsWith(u))&&r.push(`:!${i}`)});let o=n.match(
|
|
11
|
-
`).reduce((r,n)=>{if(!n)return r;let[o,i,,...s]=n,u=s.join(""),a=o.trim(),f=i.trim();return r[u]?r[u].change=f:r[u]={filePath:u,stage:a,change:f},r},{});return Object.values(t)},Gr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Br from"fs/promises";import Hr from"os";import
|
|
8
|
+
`);return i.length>e.length*.8?e:i}import Fe from"process";import{getTracer as nn}from"@netlify/otel";import me from"process";var ve=me.env.NETLIFY_API_URL,Re=me.env.NETLIFY_API_TOKEN,H=_("api"),be=()=>me.env.NETLIFY_LOCAL_MODE==="true",he=async(e,t={})=>{if(!ve||!Re)throw new Error("No API URL or token");let r=new URL(e,ve),n={...t,headers:{...t.headers,Authorization:`Bearer ${Re}`}};me.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),i=o.ok&&o.status<=299;if(me.env.AGENT_RUNNERS_DEBUG==="true")H.log(`Response headers for ${r}:`),o.headers.forEach((u,a)=>{H.log(` ${a}: ${u}`)});else{let u=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");H.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||H.error(`Got status ${o.status} for request ${r}`),t.raw){if(!i)throw o;return o}let s=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!i)throw s;return s},gt=e=>{H.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ve=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Re=e.constants.NETLIFY_API_TOKEN)},mt=()=>({apiUrl:ve,token:Re}),ye=async(e,t)=>be()?(H.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):he(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>be()?(H.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):he(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var ht=async(e,t)=>be()?(H.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):he(`/api/v1/agent_runners/${e}/sessions/${t}`),yt=(e,t,r)=>he(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),_t=async(e,t)=>be()?(H.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):he(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ge=async(e,t)=>{H.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ne=_("ai_gateway"),Ye=null;var Et=async()=>{if(Ye)return Ye;ne.log("Fetching available AI gateway providers");let e=await fetch(`${mt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ye=t,ne.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},hr=async(e,t)=>{let n=(await Et()).providers[e];if(!n)return ne.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return ne.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},wt=async({netlify:e,config:t})=>{let r,n,o,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(o),ne.log("Requesting AI gateway information");let a=await yt(s,t.id,t.sessionId);if({token:r,url:i}=a,n=a.expires_at?a.expires_at*1e3:void 0,ne.log("Got AI gateway information",{token:!!r,expiresAt:n,url:i}),n){let f=n-Date.now()-6e4;f>0&&(o=setTimeout(()=>{u()},f))}};return await Promise.all([u(),Et()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:hr}};import K from"process";import J from"path";import Ae from"fs";import{fileURLToPath as Tr}from"url";import{createRequire as vr}from"module";import{execa as Rr,execaCommand as eo}from"execa";import{Transform as yr}from"stream";var _r=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Er=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function wr(){return Object.entries(process.env).filter(([e,t])=>!(!t||_r.has(e)||Er.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function V(e){if(typeof e!="string")return e;let t=wr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(xr(n),"g");r=r.replace(o,"******")}),r}function xr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var oe=class extends yr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),i=V(o);n(null,i)}};function xt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,i){let s=typeof n=="string"?V(n):n;return typeof o=="function"?t(s,o):t(s,o,i)},process.stderr.write=function(n,o,i){let s=typeof n=="string"?V(n):n;return typeof o=="function"?r(s,o):r(s,o,i)}}var _e=null,It=e=>(_e&&_e.destroy(),_e=new ee({totalAllowedTime:e}),_e),Tt=()=>_e;var ee=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;n!==void 0&&(s=new Promise((u,a)=>{i=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return s?await Promise.race([r(),s]):await r()}finally{o(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var vt={name:"@netlify/agent-runner-cli",type:"module",version:"1.0.1-broken",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var br=Tr(import.meta.url),Ar=J.dirname(br),Sr=vr(import.meta.url),Se=_("shell"),Be=new Set,Nr={preferLocal:!0},N=(e,t,r)=>{let[n,o]=Cr(t,r),i={...Nr,...o},s=Rr(e,n,i);return Pr(s,i),Fr(s),s};var Cr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Pr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new oe).pipe(K.stdout),e.stdout?.pipe(new oe).pipe(K.stdout),e.stderr?.pipe(new oe).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},Rt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),Se.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Se.error("Error killing process:",r),!1}},Or=e=>Rt(e,"SIGKILL"),Fr=e=>{Be.add(e);let t=Tt();if(t){let r=t.onTimesUp(()=>{Se.log(`Global timer expired, killing process ${e.pid}`),Rt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Se.log(`Force killing process ${e.pid} after timeout`),Or(e))},5e3)});e.on("exit",()=>{Be.delete(e),r()}),e.on("error",()=>{Be.delete(e),r()})}};function Ne(e,t){return!!te(e,t)}function te(e,t){if(!K.env.NETLIFY_LOCAL_MODE)try{let o=Sr.resolve(vt.name),i=J.dirname(o);for(;i!==J.dirname(i);){let s=J.dirname(i);if(J.basename(s)==="node_modules"){let u=J.join(s,".bin",t);if(Ae.existsSync(u))return u;break}i=s}}catch(o){console.error("Could not resolve package.json",o)}if(K.env.NODE_PATH){let o=J.join(K.env.NODE_PATH,".bin",t);if(Ae.existsSync(o))return o}let r=J.join(e,"node_modules",".bin",t);if(Ae.existsSync(r))return r;let n=J.join(Ar,"..","node_modules",".bin",t);if(Ae.existsSync(n))return n}var bt="netlify-agent-runner-context.md",He="task-history",qe="netlify-context",U=".netlify",ie="results.md",Ke="assets";var $r=_("utils"),kr=e=>new Promise(t=>{setTimeout(t,e)}),At=(e,t=3e3)=>{let r=!1,n=null,o=[],i=null,s=(...u)=>{if(r)return n=u,new Promise(p=>{o.push(p)});r=!0;let a,f=new Promise(p=>{a=p});return i=(async()=>{await Promise.resolve();let p=await e(...u);for(a(p);;){if(await kr(t),!n)return r=!1,i=null,p;let c=n,m=o;n=null,o=[],p=await e(...c),m.forEach(h=>{h(p)})}})(),f};return s.flush=async()=>{if((r||n)&&i)return await i,s.flush()},s},se=(e,t,r=!1)=>{let n=null,o=null,i=null,s=function(...u){o=u,i=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(i,o),o=null,i=null)},t),a&&(e.apply(i,o),o=null,i=null)};return s.cancel=()=>{clearTimeout(n),n=null,o=null,i=null},s.flush=()=>{if(n){clearTimeout(n);let u=o,a=i;n=null,o=null,i=null,e.apply(a,u)}},s},St=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):$r.error("Could not parse JSON",n))}},Nt=(e,t)=>{let o=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${o}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=i.length+6){let f=Math.min(a-i.length,e.length);return`${i}${e.slice(0,f)}`}return e.slice(0,a)};var Dr=50*1024,We=(e,t=Dr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as Ct}from"buffer";import Lr from"path";var Pt=_("repo"),Ot=async({config:e,isRetry:t})=>{Pt.info("Getting runner diffs");let r=await Mr(),{hasChanges:n}=r,{status:o}=r;if(!n)return{hasChanges:!1};if(!t){let x=jr(o);await Gr(x)}Pt.info("Changes after processing"),await Je();let i=await Xe(o);await Ve(i);let s={stdio:["ignore","pipe","pipe"]},a=(await N("git",["diff","--staged"],s)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:i};let p=(await N("git",["diff","--staged","--binary"],s)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await N("git",["commit","-m","Agent runner"]),c=(await N("git",["diff",e.sha,"HEAD"],s)).stdout;let g=(await N("git",["diff",e.sha,"HEAD","--binary"],s)).stdout;c!==g&&(m=Ct.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:i};return a!==p&&(h.diffBinary=Ct.from(p).toString("base64")),m&&(h.resultDiffBinary=m),h},Ve=async(e=[])=>{await N("git",["add",".",...e])},Je=async()=>(await N("git",["status","-s"])).stdout,Ft=/.. (.+)?\.log$/,Ur=[Ft],Mr=async()=>{let e=await Je();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
9
|
+
`).filter(n=>Ur.some(i=>i instanceof RegExp?i.test(n):n===i)?!1:n[1]?.trim()!=="")).length!==0,status:e}},$t=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return e.trim()},kt=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Xe=async e=>{e||=await Je();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
|
|
10
|
+
`).forEach(n=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Lr.sep}`].some(u=>n.startsWith(u))&&r.push(`:!${i}`)});let o=n.match(Ft)?.[1];o&&r.push(`:!${o}.log`)}),r},Dt=async()=>{await N("git",["reset","--hard","HEAD"])},jr=e=>{let t=e.split(`
|
|
11
|
+
`).reduce((r,n)=>{if(!n)return r;let[o,i,,...s]=n,u=s.join(""),a=o.trim(),f=i.trim();return r[u]?r[u].change=f:r[u]={filePath:u,stage:a,change:f},r},{});return Object.values(t)},Gr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Br from"fs/promises";import Hr from"os";import Mt from"path";import ue from"process";import qr from"readline";import ze from"path";import Yr from"fs/promises";var Ze=_("agent-output-utils");async function ae({initialResult:e,agentName:t,hasError:r}){let n="",o=ze.join(process.cwd(),U,ie);try{let i=await Yr.readFile(o,"utf-8");i&&(n=i,Ze.log(`Pulled result from ${ze.relative(process.cwd(),o)}`))}catch{Ze.log(`No results file found at ${ze.relative(process.cwd(),o)}`)}return n||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function le({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,n=r?.replace(/\s+/g," ").trim().toLowerCase()||"",o="";return n?.includes("ai gateway is not available for your account")||n?.includes("ai gateway is not enabled for your account")?o="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":n?.includes("error when talking to gemini api")?o="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(n?.includes("connection closed prematurely")||n?.includes("499")&&t.toLowerCase().includes("gemini"))&&(o=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),n?.includes("request timed out")&&(o=`The ${t} API request's have timed out. Please try again or use a different available agent.`),n?.includes("network error")&&(o=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o&&Ze.log(`Providing updated error messsage: ${o}, replacing original error: ${r}`),o||r||void 0}function ce(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=_("runner_claude"),Lt="Claude Code",de="claude-opus-4-5-20251101",Ut=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Kr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(M.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function Qe({config:e,netlify:t,persistSteps:r,aiGateway:n,continueSession:o,priorAgentSessionId:i,cwd:s=ue.cwd()}){let u=e,{accountType:a,prompt:f,modelVersionOverrides:p}=u,{model:c}=u,m="";if(n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(p?.claude){let l=p?.claude?.[a];if(l){if(!await n.isModelAvailableForProvider("anthropic",l))throw new Error(`Model override '${l}' is not available for anthropic provider`);c=l}}else if(c){if(!await n.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!de&&await n.isModelAvailableForProvider("anthropic",de)?(c=de,M.log(`Using default model: ${de}`)):de&&M.log(`Default model ${de} is not available, proceeding without model specification`);ue.env.ANTHROPIC_API_KEY=y,ue.env.ANTHROPIC_BASE_URL=d}else if(!ue.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],x=[],I={},g=0,E=0,v,b,P=[te(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...o?["--continue"]:[],...o&&i?["--resume",i]:[],"-p",f],O=`${ue.env.NVM_BIN}/node`;M.log(`Running ${O} ${P.join(" ")}`);let k=t.utils.run(O,P,{all:!0,env:ue.env,cwd:s});k.stdin?.end();let A=se(()=>{r?.({steps:h,duration:E})},250),T=(y,d)=>{let l={...y,id:g};g+=1,x.push(l),h.push(l),d||A.flush(),A(),d&&A.flush()},G=qr.createInterface({input:k.all});return G.on("error",y=>{M.error("Readline interface error",{error:y.message,stack:y.stack})}),G.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{M.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(l=>{switch(l.type){case"text":{l.text&&T({message:l.text});break}case"image":{typeof l.source=="object"&&l.source&&l.source.type==="base64"&&l.source.media_type?T({message:``}):M.log(`Unsupported image type ${l.source?.type}`,l.source);break}case"tool_use":{if(l.name==="Task"){let w=l.input?.description&&`\`${l.input.description}\``;T({title:[Ut(l.name),w].filter(Boolean).join(" ")})}else l.id&&(I[l.id]=l);A.flush();break}case"tool_result":{let w=l.tool_use_id?I[l.tool_use_id]:void 0,Z;if(w){let X=w.input?.file_path&&Mt.relative(s,w.input.file_path),F=X&&`\`${X}\``;Z=[Ut(w.name||""),F].filter(Boolean).join(" ")}let Te=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),Q;if(typeof l.content=="string")Q=l.content;else if(Array.isArray(l.content)){let X=[];l.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?X.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?X.push(``):M.log(`Unsupported image type ${F.source.type}`,F.source):M.log(`Unsupported block type ${F?.type}`)}),Q=X.join(`
|
|
12
12
|
|
|
13
|
-
`)}
|
|
13
|
+
`)}Te&&Q&&(Q=`\`\`\`
|
|
14
14
|
${Q.trim()}
|
|
15
|
-
\`\`\``),T({title:Z,message:Q},!0);break}case"thinking":{l.thinking&&T({title:"Thinking",message:l.thinking},!0);break}default:M.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?
|
|
15
|
+
\`\`\``),T({title:Z,message:Q},!0);break}case"thinking":{l.thinking&&T({title:"Thinking",message:l.thinking},!0);break}default:M.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?b=d.result:v=d.result,[x,h].forEach(l=>{l[l.length-1]?.message===v&&l.pop()}))}),await k.catch(y=>{({error:b,result:v}=Kr({catchError:y,runCmd:k,error:b,result:v,runnerName:"Claude"}))}),G.close(),A.flush(),{steps:x,duration:E,result:await ae({initialResult:v,agentName:Lt,hasError:!!b}),error:le({error:b,agentName:Lt}),isRetryableError:ce(b),agentSessionId:m}}var jt=async()=>{let e=Mt.join(Hr.homedir(),".claude");await Br.rm(e,{recursive:!0,force:!0})};import Ee from"fs/promises";import Yt from"os";import Ce from"path";import re from"process";import Wr from"readline";var j=_("runner_codex"),Gt="Codex CLI",pe="gpt-5.2",Vr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(j.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function et({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:i=re.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(o){let{token:d,url:l}=o;if(!d||!l)throw new Error("No token or url provided from AI Gateway");if(a?.codex){let w=a?.codex?.[s];if(w){if(!await o.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);f=w}}else if(f){if(!await o.isModelAvailableForProvider("openai",f))throw new Error(`Model '${f}' is not available for openai provider`)}else!!pe&&await o.isModelAvailableForProvider("openai",pe)?(f=pe,j.log(`Using default model: ${pe}`)):pe&&j.log(`Default model ${pe} is not available, proceeding without model specification`);re.env.OPENAI_API_KEY=d,re.env.OPENAI_BASE_URL=l}else if(!re.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let p=[],c=[],m=[],h={},x=0,I=0,g,E,v=`${re.env.NVM_BIN}/node`,b=Ce.join(Yt.homedir(),".codex"),P=Ce.join(b,"config.toml"),O=Ce.join(b,"auth.json");try{await Ee.mkdir(b,{recursive:!0});let d={OPENAI_API_KEY:re.env.OPENAI_API_KEY};await Ee.writeFile(O,JSON.stringify(d,null,2),"utf-8"),j.log("Created Codex auth.json file");let l="";try{l=await Ee.readFile(P,"utf-8")}catch{}l.includes("web_search_request")||(l.includes("[features]")?l=l.replace(/\[features\]/,`[features]
|
|
16
16
|
web_search_request = true`):l+=`
|
|
17
17
|
[features]
|
|
18
18
|
web_search_request = true
|
|
19
19
|
`,await Ee.writeFile(P,l,"utf-8"),j.log("Updated Codex config with web_search_request enabled"))}catch(d){throw j.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let k=[te(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...f?["--model",f]:[],u].filter(Boolean);j.log(`Running ${v} ${k.join(" ")}`);let A=t.utils.run(v,k,{all:!0,cwd:i,env:{...re.env}}),T=se(()=>{r?.({steps:p,duration:I}),n?.({steps:c,duration:I}),c=[]},250),G=(d,l)=>{d.id=x,x+=1,m.push(d),p.push(d),c.push(d),l||T.flush(),T(),l&&T.flush()},y=Wr.createInterface({input:A.all});return y.on("error",d=>{j.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let l=null;try{l=JSON.parse(d)}catch{j.log("Could not parse line",d);return}if(l?.duration_ms&&(I=l.duration_ms),l?.type==="item.started"&&l?.item?.type==="command_execution")h[l.item.id]=l.item;else if(l?.type==="item.completed"&&l?.item?.type==="command_execution"){let w=Xr(l.item);w&&G(w,!0)}else if(l?.type==="item.completed"&&l?.item?.type==="reasoning"){let w={title:"Reasoning",message:l.item.text};G(w,!0)}else if(l?.type==="local_shell_call")h[l.call_id]=l;else if(l?.type==="local_shell_call_output"){let w=zr(h[l.call_id],l);w&&G(w,!0)}else l?.type==="message"&&l.role==="assistant"?g=l.content.map(w=>w.text).join(`
|
|
20
20
|
`):l?.type==="message"&&l.role==="system"&&(E=l.content.map(w=>w.text).join(`
|
|
21
|
-
`))}),await A.catch(d=>{let l=Vr({catchError:d,runCmd:A,error:E,result:g,runnerName:"Codex"});E=l.error,g=l.result}),y.close(),T.flush(),{steps:m,duration:I,result:await ae({initialResult:g,agentName:
|
|
21
|
+
`))}),await A.catch(d=>{let l=Vr({catchError:d,runCmd:A,error:E,result:g,runnerName:"Codex"});E=l.error,g=l.result}),y.close(),T.flush(),{steps:m,duration:I,result:await ae({initialResult:g,agentName:Gt,hasError:!!E}),error:le({error:E,agentName:Gt}),isRetryableError:ce(E)}}var Bt=async()=>{let e=Ce.join(Yt.homedir(),".codex");await Ee.rm(e,{recursive:!0,force:!0})},Jr=new Set(["bash","-lc"]),Xr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,n=e.aggregated_output?.trim();return n&&(n=`\`\`\`
|
|
22
22
|
${n}
|
|
23
23
|
\`\`\``),e.status==="failed"&&e.exit_code!==0&&(n=n?`${n}
|
|
24
24
|
|
|
25
25
|
*Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},zr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Jr.has(i)),n=r?`Running \`${r.join(" ")}\``:void 0,o;try{o=JSON.parse(t.output).output?.trim(),o&&(o=`\`\`\`
|
|
26
26
|
${o.trim()}
|
|
27
|
-
\`\`\``)}catch(i){j.error("Could not decode outputMsg",i,t.output)}return{title:n,message:o}};import
|
|
27
|
+
\`\`\``)}catch(i){j.error("Could not decode outputMsg",i,t.output)}return{title:n,message:o}};import Pe from"fs/promises";import qt from"os";import Oe from"path";import fe from"process";import Zr from"readline";var Y=_("runner_gemini"),Ht="Gemini CLI",ge="",Qr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(Y.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(Y.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(Y.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0})),en={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},tn=async()=>{let e=Oe.join(qt.homedir(),".gemini"),t=Oe.join(e,"settings.json");try{await Pe.mkdir(e,{recursive:!0});let r={};try{let n=await Pe.readFile(t,"utf-8");r=JSON.parse(n)}catch{Y.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Pe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),Y.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){Y.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function tt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:i=fe.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(await tn(),o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(a?.gemini){let l=a?.gemini?.[s];if(l){if(!await o.isModelAvailableForProvider("gemini",l))throw new Error(`Model override '${l}' is not available for gemini provider`);f=l}}if(!f)!!ge&&await o.isModelAvailableForProvider("gemini",ge)?(f=ge,Y.log(`Using default model: ${ge}`)):ge&&Y.log(`Default model ${ge} is not available, proceeding without model specification`);else if(f&&!a?.gemini?.[s]&&!await o.isModelAvailableForProvider("gemini",f))throw new Error(`Model '${f}' is not available for gemini provider`);fe.env.GEMINI_API_KEY=y,fe.env.GOOGLE_GEMINI_BASE_URL=d}else if(!fe.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let p=[],c=[],m=[],h={},x=0,I=0,g,E,v=[te(i,"gemini"),...f?["--model",f]:[],"--yolo","--output-format","stream-json","-p",u],b=`${fe.env.NVM_BIN}/node`;Y.log(`Running ${b} ${v.join(" ")}`);let P=t.utils.run(b,v,{all:!0,env:fe.env,cwd:i});P.stdin?.end();let O=se(()=>{r?.({steps:p,duration:I}),n?.({steps:c,duration:I}),c=[]},250),k=(y,d)=>{y.id=x,x+=1,m.push(y),p.push(y),c.push(y),d||O.flush(),O(),d&&O.flush()},A=Zr.createInterface({input:P.all});A.on("error",y=>{Y.error("Readline interface error",{error:y.message,stack:y.stack})});let T="",G=()=>{T&&k({message:T.trim()}),T=""};return A.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let l=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:St(l,!1)?.error?.message||l||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(T+=d.content);break}case"tool_use":{let l=en[d.tool_name]??d.tool_name,w=d.parameters?.file_path,Z=w&&Oe.relative(i,w),Te=d.parameters?.command,X={title:[l,Z&&`\`${Z}\``,Te&&`\`${Te}\``].filter(Boolean).join(" ")};h[d.tool_id]=X,O.flush();break}case"tool_result":{let l=h[d.tool_id];l&&(d.output&&(l.message=`\`\`\`
|
|
28
28
|
${d.output.trim()}
|
|
29
|
-
\`\`\``),k(l,!0));break}case"result":{I=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=T.trim();break}case"error":{E=d.error;break}case"finished":break;default:{Y.warn("Unhandled message type:",d.type);break}}}),await P.catch(y=>{({error:E,result:g}=Qr({catchError:y,runCmd:P,error:E,result:g,runnerName:"Gemini"}))}),A.close(),O.flush(),{steps:m,duration:I,result:await ae({initialResult:g,agentName:
|
|
29
|
+
\`\`\``),k(l,!0));break}case"result":{I=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=T.trim();break}case"error":{E=d.error;break}case"finished":break;default:{Y.warn("Unhandled message type:",d.type);break}}}),await P.catch(y=>{({error:E,result:g}=Qr({catchError:y,runCmd:P,error:E,result:g,runnerName:"Gemini"}))}),A.close(),O.flush(),{steps:m,duration:I,result:await ae({initialResult:g,agentName:Ht,hasError:!!E}),error:le({error:E,agentName:Ht}),isRetryableError:ce(E)}}var Kt=async()=>{let e=Oe.join(qt.homedir(),".gemini");await Pe.rm(e,{recursive:!0,force:!0})};var rn={codex:{runner:et,clean:Bt},claude:{runner:Qe,clean:jt},gemini:{runner:tt,clean:Kt}},Wt=rn;var Vt=_("init_stage"),Jt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:n})=>await S(nn(),"init-stage",async o=>{let i=performance.now();o?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":n||"unknown"});let s=Wt[e.runner];if(!s)throw o?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=on({apiToken:r});gt(u);let a=e.useGateway?await wt({netlify:u,config:e}):void 0;o?.setAttributes({"init.aiGateway.created":!!a}),e.validateAgent&&e.errorLogsPath&&o?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let f=5*1024,p=At(async({steps:x=[],duration:I})=>{let g=x.map(E=>{let v=E.title?We(V(E.title),f):void 0,b=E.message?We(V(E.message)):void 0;return{...E,title:v,message:b}});x.length=0;try{return await q(e.id,e.sessionId,{steps:g,duration:I})}catch(E){Vt.error("persistSteps failed",{error:E?.message||E})}},t);Vt.info("Adding build files to stage");let c=await Xe();await Ve(c);let m;e.hasRepo?e.sha?(m=e.sha,o?.setAttributes({"init.sha.source":"provided"})):(m=await $t(),await ye(e.id,{sha:m}),o?.setAttributes({"init.sha.source":"current_commit"})):(m=await kt(),o?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-i;return o?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:a,context:u,persistSteps:p,runner:s,sha:m}}),on=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Fe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Fe.env.NETLIFY_API_TOKEN,SITE_ID:Fe.env.SITE_ID,FUNCTIONS_DIST:Fe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as rt}from"@netlify/otel";import sn from"crypto";import z from"fs/promises";import L from"path";import B from"process";var D=_("context"),an=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:B.env.NETLIFY_TEAM_ID,userId:B.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:B.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},ln=10,cn=async e=>{let{name:t,ext:r}=L.parse(e),n=e,o=L.join(B.cwd(),U,n),i=0;for(;await un(o);){if(i>=ln)throw new Error("Failed to generate context file");n=`${t}-${sn.randomUUID().slice(0,5)}${r}`,o=L.join(B.cwd(),U,n),i+=1}return n},un=async e=>{try{return await z.access(e),!0}catch{return!1}},dn=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(n=>n&&typeof n=="object"&&n.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},pn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let n=await r.text();return await z.writeFile(t,n,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},$e=null,fn=async()=>{if($e)return $e;let e=await dn();if(!e)return[];let t=L.join(B.cwd(),U,qe);await z.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([o,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${o}, skipping...`),null;let s=`${o}.md`,u=L.join(t,s),a=L.join(U,qe,s);return D.log(`Downloading ${i.scope} context...`),await pn(i.endpoint,u)?(D.log(`Downloaded: ${a}`),{scope:i.scope,path:a,key:o}):null});return $e=(await Promise.all(r)).filter(o=>o!==null),$e},Xt=async({cliPath:e,netlify:t,config:r,buildErrorContext:n})=>{let o=an(t),i=await cn(bt),s=L.join(B.cwd(),U);await z.mkdir(s,{recursive:!0});let u=L.join(U,i),a=L.join(B.cwd(),u),f=L.join(B.cwd(),U,ie);try{await z.unlink(f),D.log(`Deleted old results file: ${f}`)}catch{}let p=n?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
|
|
30
30
|
Your task is to analyze and fix the build errors.
|
|
31
31
|
Don't apply techniques of reverting changes. Apply fixes related to errors.
|
|
32
32
|
Don't try to run build by yourself. Just fix the errors.
|
|
@@ -39,7 +39,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
|
|
|
39
39
|
|
|
40
40
|
`)}
|
|
41
41
|
</project_rules>
|
|
42
|
-
`);let m="";if(r.sessionHistoryContext?.length){let g=L.join(B.cwd(),U,He);await z.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(v,
|
|
42
|
+
`);let m="";if(r.sessionHistoryContext?.length){let g=L.join(B.cwd(),U,He);await z.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(v,b)=>{let P=b+1,O=`attempt-${P}.md`,k=L.join(g,O),A=L.join(U,He,O),T=`# Task History - Attempt ${P}
|
|
43
43
|
|
|
44
44
|
## Request - what the user asked for
|
|
45
45
|
${v.request}
|
|
@@ -127,19 +127,19 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
127
127
|
</request>
|
|
128
128
|
|
|
129
129
|
Use the following file for the complete context of the ask, the environment, and what's available. ${a} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
|
|
130
|
-
`),I};var gn=_("prompt"),
|
|
130
|
+
`),I};var gn=_("prompt"),zt=async({cliPath:e,config:t,netlify:r,buildErrorContext:n})=>{let o=await Xt({cliPath:e,config:t,netlify:r,buildErrorContext:n});return process.env.AGENT_RUNNER_DEBUG&&gn.log("Contextful Prompt:",o),{prompt:o}};var ke=_("inference_stage"),Zt=5,De=async e=>{let{cliPath:t,config:r,context:n,buildErrors:o,runner:i,persistSteps:s,aiGateway:u,attempt:a,contextPrefix:f,priorAgentSessionId:p}=e;ke.log(`Running inference stage, attempt ${a} of ${Zt}`);let c=await S(rt(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":a||1}),xt();let{prompt:h}=await S(rt(),"compose-prompt",async()=>await zt({cliPath:t,config:r,buildErrorContext:mn(o),netlify:n})),x=`
|
|
131
131
|
${f||""}
|
|
132
132
|
${h}
|
|
133
|
-
`.trim(),I={...r,prompt:x},g=await
|
|
133
|
+
`.trim(),I={...r,prompt:x},g=await S(rt(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:I,netlify:n,persistSteps:s,continueSession:!!(a&&a>1),priorAgentSessionId:p}));return g.result&&(g.result=V(g.result)),g.error&&(g.error=V(g.error)),await s.flush(),g});if(c.error){if(ke.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:a||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!a||a<Zt))return ke.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await De({...e,attempt:(a||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw ke.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},mn=e=>!e||e.length===0?"":`
|
|
134
134
|
Deploy failed failed. Here are the errors to review on the latest build:
|
|
135
135
|
|
|
136
136
|
Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
|
|
137
137
|
|
|
138
138
|
${e.pop()}
|
|
139
|
-
`;import _n from"process";import{getTracer as nt}from"@netlify/otel";import{getTracer as hn}from"@netlify/otel";var we=_("deploy"),
|
|
140
|
-
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw we.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var xe=_("deploy_stage"),ot=async e=>await
|
|
139
|
+
`;import _n from"process";import{getTracer as nt}from"@netlify/otel";import{getTracer as hn}from"@netlify/otel";var we=_("deploy"),Qt=async e=>await S(hn(),"create-preview-deploy",async t=>yn(e,t)),yn=async({netlify:e,hasRepo:t,skipBuild:r,message:n="Agent Preview",deploySubdomain:o,cliPath:i,filter:s},u)=>{try{let a=["deploy","--message",`"${n}"`,"--json","--draft","--verbose"];t||(we.log("Deploy: Uploading source zip"),a.push("--upload-source-zip")),o&&a.push("--alias",o),s&&a.push("--filter",s),r?(we.log("Deploy: Skipping build"),a.push("--no-build")):a.push("--context","deploy-preview");let f=i||"netlify";we.log(`Running: ${f} ${a.join(" ")}`),u?.setAttributes({cmd:f,args:a});let p=await e.utils.run(f,a,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(p.stdout.trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),we.log(`
|
|
140
|
+
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw we.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var xe=_("deploy_stage"),ot=async e=>await S(nt(),"run-deploy-stage",async()=>En(e)),En=async({cliPath:e,config:t,context:r,result:n,filter:o,isRetry:i})=>{let s=await S(nt(),"get-runner-diffs",async()=>await Ot({config:t,isRetry:i}));if(xe.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:a,diffBinary:f,resultDiffBinary:p}=s,c=!0;xe.log("Preview deploy condition check:",{resultUndefined:n===void 0,resultType:typeof n,hasChanges:c,wouldCreatePreview:n!==void 0&&c});let m=null;if(n!==void 0&&c)try{let h;try{let x=await S(nt(),"get-runner-session",async()=>await ht(t.id,t.sessionId));x?.title&&(h=x.title)}catch(x){xe.warn("Failed to fetch session title, using fallback message:",x.message)}await q(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await Qt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:Nt(t.id,_n.env.SITE_NAME),filter:o})}catch(h){return xe.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:u,resultDiff:a,hasChanges:c,previewInfo:null,diffBinary:f,resultDiffBinary:p,deployError:h instanceof Error?h.message:String(h)}}return xe.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:a,hasChanges:c,previewInfo:m,diffBinary:f,resultDiffBinary:p}};import{getTracer as Ie}from"@netlify/otel";async function it(e,t){let{maxRetries:r,baseDelay:n,onRetry:o}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;o&&o(s,i),await new Promise(a=>setTimeout(a,n*s))}throw i}var Le=class{scanDiffForForms(t){let r=[],n=null,o=[],i=t.split(`
|
|
141
141
|
`);for(let s of i)if(s.startsWith("diff --git")){if(n&&o.length>0){let a=this.containsNetlifyForm(o,n);a&&r.push(a)}let u=s.split(" ");n=u[u.length-1].replace(/^b\//,""),o=[]}else s.startsWith("+")&&!s.startsWith("+++")&&o.push(s.slice(1));if(n&&o.length>0){let s=this.containsNetlifyForm(o,n);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let n=t.join(`
|
|
142
|
-
`),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of o){let u=n.match(i);if(u){let a=u.index||0,f=Math.max(0,a-20),p=Math.min(n.length,a+u[0].length+20),c=n.slice(f,p).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var
|
|
142
|
+
`),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of o){let u=n.match(i);if(u){let a=u.index||0,f=Math.max(0,a-20),p=Math.min(n.length,a+u[0].length+20),c=n.slice(f,p).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var R=_("cleanup_stage"),er=async e=>await S(Ie(),"cleanup-stage",async()=>wn(e)),st=1024*1024*10,wn=async({config:e,diff:t,result:r,duration:n,resultDiff:o,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let a={result:r||"Done",duration:n};u&&u.deployId&&(a.deploy_id=u.deployId),u&&u.sourceZipFilename&&(a.result_zip_file_name=u.sourceZipFilename);let f=t||i||o||s;if(f&&(a.diff_produced=!0),process.env.SITE_ID==="def61649-ad41-4d63-a478-8496a919443a"&&f)return R.log("Test site detected - skipping diff upload to test loss detection"),await it(async()=>await S(Ie(),"update-runner-session",()=>q(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{R.error(`Error updating agent runner session (attempt ${p}):`,c),R.log("Retrying...")}}),{sessionUpdate:a};if(f){let p=new Le,c=t||i||"",m=p.scanDiffForForms(c);m.detected?(R.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:x})=>{R.log(` - ${h}: ${x}`)}),a.has_netlify_form=!0):R.log("Did not detect Netlify form(s) in diff"),R.log("Did not detect Netlify form(s) in diff")}if(f)try{R.log("Getting pre-signed URLs for diff upload");let p=await _t(e.id,e.sessionId),c=[];(t||i)&&c.push(Ge(p.result.upload_url,i||t).then(()=>{a.result_diff_s3_key=p.result.s3_key,R.log("Successfully uploaded result_diff to S3")})),(o||s)&&c.push(Ge(p.cumulative.upload_url,s||o).then(()=>{a.cumulative_diff_s3_key=p.cumulative.s3_key,R.log("Successfully uploaded cumulative_diff to S3")})),R.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(o||s)&&(R.log("Updating agent runner with cumulative diff S3 key"),await S(Ie(),"update-runner",async()=>{await ye(e.id,{result_diff_s3_key:p.cumulative.s3_key})}))}catch(p){R.error("S3 upload failed, falling back to inline diffs:",p);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||o||"");if(c>st||m>st){let h=`Diffs exceed maximum inline size of ${st} bytes.`;throw R.error(h),new Error(h)}a.result_diff=t,a.result_diff_binary=i,(o||s)&&(a.cumulative_diff=o,a.cumulative_diff_binary=s,R.log("Updating agent runner with inline diffs (fallback)"),await S(Ie(),"update-runner",async()=>{await ye(e.id,{result_diff:o,result_diff_binary:s})}))}else R.log("No diffs to upload");return R.log("Updated agent runner with result"),await it(async()=>await S(Ie(),"update-runner-session",()=>q(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{R.error(`Error updating agent runner session (attempt ${p}):`,c),R.log("Retrying...")}}),R.log("Finished updating agent runner with result"),{sessionUpdate:a}};import{getTracer as tr,shutdownTracers as In,withActiveSpan as rr}from"@netlify/otel";var Tn=xn(import.meta.url),nr=Tn("../package.json"),or=_("pipeline_index"),Ue=3,ir=async({config:e,apiToken:t,cliPath:r="netlify",cwd:n,errorLogsPath:o,filter:i,tracing:s={}})=>{let u,{withStageTimer:a}=It(ee.timeUnits.hours(4)),f=await pt(nr.version,e.id,s);try{await rr(tr(),"run-pipeline",{},f,async()=>{let{aiGateway:p,context:c,persistSteps:m,runner:h,sha:x}=await a("init",()=>Jt({config:e,apiToken:t,cliPath:r,cwd:n,errorLogsPath:o,filter:i,runnerVersion:nr.version}),ee.timeUnits.minutes(10));u=h.clean,e.sha=x;let{runnerResult:I}=await a("inference",()=>De({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p}));await q(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let g=await a("deploy",()=>ot({cliPath:r,config:e,context:c,result:I.result,filter:i,isRetry:!1})),E=I,v=[];if(g.hasChanges&&g.deployError){v.push(ft(g.deployError));let T=1;for(;T<=Ue&&!g.previewInfo;)or.log(`Deploy attempt had errors. Retrying. ${T}/${Ue}`),await rr(tr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":T});let{runnerResult:y}=await a(`inference-retry-${T}`,()=>De({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p,buildErrors:v,priorAgentSessionId:I.agentSessionId}));E={...y,steps:[...E.steps||[],...y.steps||[]],duration:(E.duration||0)+(y.duration||0)},g=await a(`deploy-retry-${T}`,()=>ot({cliPath:r,config:e,context:c,result:y.result,filter:i,isRetry:!0})),g.deployError&&v.push(g.deployError),T++});T>Ue&&!g.previewInfo&&console.warn(`Deploy validation failed after ${Ue} attempts`)}let{diff:b,resultDiff:P,previewInfo:O,diffBinary:k,resultDiffBinary:A}=g;await a("cleanup",()=>er({config:e,diff:b,result:E.result,duration:E.duration,resultDiff:P,diffBinary:k,resultDiffBinary:A,previewInfo:O}),ee.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await u?.(),await Dt())})}catch(p){or.error("Got error while running pipeline",p),await u?.();let c=p instanceof Error&&p.message;throw await q(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),p}finally{await In()}};import sr from"crypto";var $=_("bin_local"),W=vn(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),lt=()=>{console.log(`
|
|
143
143
|
agent-runner-cli-local - Run Netlify agent runner locally without API connections
|
|
144
144
|
|
|
145
145
|
USAGE:
|
|
@@ -171,6 +171,6 @@ NOTE:
|
|
|
171
171
|
This local mode mocks all Netlify API calls. The agent will run through
|
|
172
172
|
the full pipeline including inference and deployment, but API calls will
|
|
173
173
|
be logged instead of executed.
|
|
174
|
-
`)};
|
|
175
|
-
To link this directory to a Netlify site, run:`),$.error(" netlify link"),C.exit(1)}let n=`local-${sr.randomBytes(8).toString("hex")}`,o=`session-${sr.randomBytes(8).toString("hex")}`,i=
|
|
174
|
+
`)};W.help&&(lt(),C.exit(0));W.prompt||($.error("Error: --prompt is required"),lt(),C.exit(1));W["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),lt(),C.exit(1));try{let e=W.cwd||C.cwd(),t=ar.join(e,".netlify","netlify-agent-runner-context*");lr.rmSync(t,{recursive:!0,force:!0});let r;try{r=await Rn(e)}catch(u){$.error(u.message),$.error(`
|
|
175
|
+
To link this directory to a Netlify site, run:`),$.error(" netlify link"),C.exit(1)}let n=`local-${sr.randomBytes(8).toString("hex")}`,o=`session-${sr.randomBytes(8).toString("hex")}`,i=W.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:n,sessionId:o,siteId:r,cwd:e,runner:i});let s={id:n,sessionId:o,prompt:W.prompt,runner:i,model:W.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=W["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",i==="claude"?Ne(e,"claude")||($.log("Claude CLI not found, installing..."),await at(e,"@anthropic-ai/claude-code")):i==="gemini"?Ne(e,"gemini")||($.log("Gemini CLI not found, installing..."),await at(e,"@google/gemini-cli")):i==="codex"?Ne(e,"codex")||($.log("Codex CLI not found, installing..."),await at(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),C.exit(1)),await ir({config:s,cwd:e,cliPath:W["cli-path"],filter:W.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),C.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),C.exit(1)}function at(e,t){return new Promise((r,n)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:o})=>{$.log(`${t} installed: ${o}`),r()}).catch(o=>{$.error(`Error installing ${t}: ${o.stderr||o.message}`),n(o)})})}async function Rn(e){let t=ar.join(e,".netlify","state.json");try{let r=await lr.readFileSync(t,"utf-8"),n=JSON.parse(r);if(!n.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${n.siteId}`),n.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
|
|
176
176
|
//# sourceMappingURL=bin-local.js.map
|
package/dist/bin.js
CHANGED
|
@@ -1,32 +1,32 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import
|
|
3
|
-
`),r=[],n=-1,o=0;for(;o<t.length;){let u=t[o].slice(0,500).toLowerCase();if(Er.some(
|
|
4
|
-
`)),n=
|
|
2
|
+
import ut from"process";import Fn from"minimist";import{createRequire as vn}from"module";import{createTracerProvider as gr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as dt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as mr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as hr}from"@netlify/otel";import{propagation as pt,context as ft,W3CTraceContextPropagator as yr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as _r}from"@opentelemetry/exporter-trace-otlp-grpc";import fr from"process";function _(e){let t=fr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ue=_("tracing"),gt=async(e,t,r)=>(await gr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new dt(new Me),new dt(new _r({url:r.exporterUrl}))],instrumentations:[new mr({skipHeaders:!0})]}),r.traceparent?(pt.setGlobalPropagator(new yr),pt.extract(ft.active(),{traceparent:r.traceparent,isRemote:!0})):ft.active());function S(e,t,r){return Ue.log(`\u23F3 TRACE: ${t} starting...`),hr(e,t,r)}var Me=class{export(t,r){for(let n of t)this.logSpan(n);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,n=t.attributes,o=[];for(let[u,a]of Object.entries(n))u.includes("duration")&&typeof a=="number"?o.push(`${u}=${a.toFixed(2)}ms`):o.push(`${u}=${a}`);let s=t.status?.code===2?"\u274C":"\u2705",i=o.length>0?` [${o.join(", ")}]`:"";Ue.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${i}`),t.status?.code===2&&t.status.message&&Ue.log(` \u274C Error: ${t.status.message}`)}};var Er=["error","failed","exception","fatal","panic","abort","crash"];function mt(e){let t=e.split(`
|
|
3
|
+
`),r=[],n=-1,o=0;for(;o<t.length;){let u=t[o].slice(0,500).toLowerCase();if(Er.some(f=>u.includes(f))){let f=Math.max(0,o-10,n+1),p=Math.min(t.length-1,o+20),c=[];for(let m=f;m<=p;m++)c.push(t[m]);r.push(c.join(`
|
|
4
|
+
`)),n=p,o=p+1}else o++}if(r.length===0)return e;let s=r.map((i,u)=>`<extracted_error_chunk order="${u+1}">
|
|
5
5
|
${i}
|
|
6
6
|
</extracted_error_chunk>`).join(`
|
|
7
7
|
|
|
8
|
-
`);return s.length>e.length*.8?e:s}import Pe from"process";import{getTracer as ln}from"@netlify/otel";import ge from"process";var Ie=ge.env.NETLIFY_API_URL,xe=ge.env.NETLIFY_API_TOKEN,B=_("api"),ve=()=>ge.env.NETLIFY_LOCAL_MODE==="true",me=async(e,t={})=>{if(!Ie||!xe)throw new Error("No API URL or token");let r=new URL(e,Ie),n={...t,headers:{...t.headers,Authorization:`Bearer ${xe}`}};ge.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),s=o.ok&&o.status<=299;if(ge.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),o.headers.forEach((u,a)=>{B.log(` ${a}: ${u}`)});else{let u=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${u||"N/A"}`)}if(s||B.error(`Got status ${o.status} for request ${r}`),t.raw){if(!s)throw o;return o}let i=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!s)throw i;return i},mt=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ie=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(xe=e.constants.NETLIFY_API_TOKEN)},ht=()=>({apiUrl:Ie,token:xe}),he=async(e,t)=>ve()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):me(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),q=async(e,t,r)=>ve()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):me(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var yt=async(e,t)=>ve()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):me(`/api/v1/agent_runners/${e}/sessions/${t}`),_t=(e,t,r)=>me(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),Et=async(e,t)=>ve()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):me(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ge=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var te=_("ai_gateway"),je=null;var wt=async()=>{if(je)return je;te.log("Fetching available AI gateway providers");let e=await fetch(`${ht().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return je=t,te.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},wr=async(e,t)=>{let n=(await wt()).providers[e];if(!n)return te.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return te.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},Tt=async({netlify:e,config:t})=>{let r,n,o,s,i=e.constants?.SITE_ID;if(!i)throw new Error("No site id");let u=async()=>{clearTimeout(o),te.log("Requesting AI gateway information");let a=await _t(i,t.id,t.sessionId);if({token:r,url:s}=a,n=a.expires_at?a.expires_at*1e3:void 0,te.log("Got AI gateway information",{token:!!r,expiresAt:n,url:s}),n){let p=n-Date.now()-6e4;p>0&&(o=setTimeout(()=>{u()},p))}};return await Promise.all([u(),wt()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:wr}};import H from"process";import K from"path";import Re from"fs";import{fileURLToPath as Ar}from"url";import{createRequire as Sr}from"module";import{execa as br,execaCommand as lo}from"execa";import{Transform as Tr}from"stream";var Ir=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),xr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function vr(){return Object.entries(process.env).filter(([e,t])=>!(!t||Ir.has(e)||xr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function W(e){if(typeof e!="string")return e;let t=vr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(Rr(n),"g");r=r.replace(o,"******")}),r}function Rr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var re=class extends Tr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),s=W(o);n(null,s)}};function It(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,s){let i=typeof n=="string"?W(n):n;return typeof o=="function"?t(i,o):t(i,o,s)},process.stderr.write=function(n,o,s){let i=typeof n=="string"?W(n):n;return typeof o=="function"?r(i,o):r(i,o,s)}}var ye=null,xt=e=>(ye&&ye.destroy(),ye=new Z({totalAllowedTime:e}),ye),vt=()=>ye;var Z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,i=null;n!==void 0&&(i=new Promise((u,a)=>{s=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return i?await Promise.race([r(),i]):await r()}finally{o(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Rt={name:"@netlify/agent-runner-cli",type:"module",version:"1.0.0-broken",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var Cr=Ar(import.meta.url),Pr=K.dirname(Cr),Or=Sr(import.meta.url),Ne=_("shell"),Ye=new Set,Fr={preferLocal:!0},F=(e,t,r)=>{let[n,o]=Lr(t,r),s={...Fr,...o},i=br(e,n,s);return Dr(i,s),kr(i),i};var Lr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Dr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new re).pipe(H.stdout),e.stdout?.pipe(new re).pipe(H.stdout),e.stderr?.pipe(new re).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Nt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),Ne.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Ne.error("Error killing process:",r),!1}},$r=e=>Nt(e,"SIGKILL"),kr=e=>{Ye.add(e);let t=vt();if(t){let r=t.onTimesUp(()=>{Ne.log(`Global timer expired, killing process ${e.pid}`),Nt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Ne.log(`Force killing process ${e.pid} after timeout`),$r(e))},5e3)});e.on("exit",()=>{Ye.delete(e),r()}),e.on("error",()=>{Ye.delete(e),r()})}};function ne(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let o=Or.resolve(Rt.name),s=K.dirname(o);for(;s!==K.dirname(s);){let i=K.dirname(s);if(K.basename(i)==="node_modules"){let u=K.join(i,".bin",t);if(Re.existsSync(u))return u;break}s=i}}catch(o){console.error("Could not resolve package.json",o)}if(H.env.NODE_PATH){let o=K.join(H.env.NODE_PATH,".bin",t);if(Re.existsSync(o))return o}let r=K.join(e,"node_modules",".bin",t);if(Re.existsSync(r))return r;let n=K.join(Pr,"..","node_modules",".bin",t);if(Re.existsSync(n))return n}var At="netlify-agent-runner-context.md",Be="task-history",He="netlify-context",k=".netlify",oe="results.md",qe="assets",We="other",Ke="personal";var Ve="enterprise",Je="free",St=[Ke,"pro",Ve,Je];var bt=_("utils"),Ur=e=>new Promise(t=>{setTimeout(t,e)}),Ct=(e,t=3e3)=>{let r=!1,n=null,o=[],s=null,i=(...u)=>{if(r)return n=u,new Promise(f=>{o.push(f)});r=!0;let a,p=new Promise(f=>{a=f});return s=(async()=>{await Promise.resolve();let f=await e(...u);for(a(f);;){if(await Ur(t),!n)return r=!1,s=null,f;let c=n,m=o;n=null,o=[],f=await e(...c),m.forEach(h=>{h(f)})}})(),p};return i.flush=async()=>{if((r||n)&&s)return await s,i.flush()},i},se=(e,t,r=!1)=>{let n=null,o=null,s=null,i=function(...u){o=u,s=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(s,o),o=null,s=null)},t),a&&(e.apply(s,o),o=null,s=null)};return i.cancel=()=>{clearTimeout(n),n=null,o=null,s=null},i.flush=()=>{if(n){clearTimeout(n);let u=o,a=s;n=null,o=null,s=null,e.apply(a,u)}},i},Ae=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):bt.error("Could not parse JSON",n))}},Pt=(e,t)=>{let o=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let u=`--${t}${o}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=s.length+6){let p=Math.min(a-s.length,e.length);return`${s}${e.slice(0,p)}`}return e.slice(0,a)},Mr=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!St.some(t=>t in e),Ot=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,n])=>{if(n){let o=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let s=JSON.parse(n);Mr(s)&&(e[r]=s)}catch(s){let u=s instanceof SyntaxError?"Invalid JSON":s.message;bt.error(`Could not parse ${r} model version override from ${o}: ${u}`)}}}),e},Gr=50*1024,Xe=(e,t=Gr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as Ft}from"buffer";import jr from"path";var Lt=_("repo"),Dt=async({config:e,isRetry:t})=>{Lt.info("Getting runner diffs");let r=await Br(),{hasChanges:n}=r,{status:o}=r;if(!n)return{hasChanges:!1};if(!t){let T=Hr(o);await qr(T)}Lt.info("Changes after processing"),await Ze();let s=await Qe(o);await ze(s);let i={stdio:["ignore","pipe","pipe"]},a=(await F("git",["diff","--staged"],i)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:s};let f=(await F("git",["diff","--staged","--binary"],i)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),c=(await F("git",["diff",e.sha,"HEAD"],i)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],i)).stdout;c!==g&&(m=Ft.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:s};return a!==f&&(h.diffBinary=Ft.from(f).toString("base64")),m&&(h.resultDiffBinary=m),h},ze=async(e=[])=>{await F("git",["add",".",...e])},Ze=async()=>(await F("git",["status","-s"])).stdout,$t=/.. (.+)?\.log$/,Yr=[$t],Br=async()=>{let e=await Ze();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
9
|
-
`).filter(n=>Yr.some(s=>s instanceof RegExp?s.test(n):n===s)?!1:n[1]?.trim()!=="")).length!==0,status:e}},
|
|
10
|
-
`).forEach(n=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${jr.sep}`].some(u=>n.startsWith(u))&&r.push(`:!${s}`)});let o=n.match(
|
|
11
|
-
`).reduce((r,n)=>{if(!n)return r;let[o,s,,...i]=n,u=i.join(""),a=o.trim(),
|
|
8
|
+
`);return s.length>e.length*.8?e:s}import Oe from"process";import{getTracer as ln}from"@netlify/otel";import ge from"process";var xe=ge.env.NETLIFY_API_URL,Re=ge.env.NETLIFY_API_TOKEN,B=_("api"),ve=()=>ge.env.NETLIFY_LOCAL_MODE==="true",me=async(e,t={})=>{if(!xe||!Re)throw new Error("No API URL or token");let r=new URL(e,xe),n={...t,headers:{...t.headers,Authorization:`Bearer ${Re}`}};ge.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),s=o.ok&&o.status<=299;if(ge.env.AGENT_RUNNERS_DEBUG==="true")B.log(`Response headers for ${r}:`),o.headers.forEach((u,a)=>{B.log(` ${a}: ${u}`)});else{let u=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");B.log(`Request ID for ${r}: ${u||"N/A"}`)}if(s||B.error(`Got status ${o.status} for request ${r}`),t.raw){if(!s)throw o;return o}let i=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!s)throw i;return i},ht=e=>{B.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(xe=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Re=e.constants.NETLIFY_API_TOKEN)},yt=()=>({apiUrl:xe,token:Re}),he=async(e,t)=>ve()?(B.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):me(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>ve()?(B.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):me(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var _t=async(e,t)=>ve()?(B.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):me(`/api/v1/agent_runners/${e}/sessions/${t}`),Et=(e,t,r)=>me(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),wt=async(e,t)=>ve()?(B.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):me(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ge=async(e,t)=>{B.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var te=_("ai_gateway"),je=null;var Tt=async()=>{if(je)return je;te.log("Fetching available AI gateway providers");let e=await fetch(`${yt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return je=t,te.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},wr=async(e,t)=>{let n=(await Tt()).providers[e];if(!n)return te.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return te.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},It=async({netlify:e,config:t})=>{let r,n,o,s,i=e.constants?.SITE_ID;if(!i)throw new Error("No site id");let u=async()=>{clearTimeout(o),te.log("Requesting AI gateway information");let a=await Et(i,t.id,t.sessionId);if({token:r,url:s}=a,n=a.expires_at?a.expires_at*1e3:void 0,te.log("Got AI gateway information",{token:!!r,expiresAt:n,url:s}),n){let f=n-Date.now()-6e4;f>0&&(o=setTimeout(()=>{u()},f))}};return await Promise.all([u(),Tt()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:wr}};import q from"process";import K from"path";import Ne from"fs";import{fileURLToPath as Ar}from"url";import{createRequire as Sr}from"module";import{execa as br,execaCommand as lo}from"execa";import{Transform as Tr}from"stream";var Ir=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),xr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Rr(){return Object.entries(process.env).filter(([e,t])=>!(!t||Ir.has(e)||xr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function W(e){if(typeof e!="string")return e;let t=Rr();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(vr(n),"g");r=r.replace(o,"******")}),r}function vr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var re=class extends Tr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),s=W(o);n(null,s)}};function xt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,s){let i=typeof n=="string"?W(n):n;return typeof o=="function"?t(i,o):t(i,o,s)},process.stderr.write=function(n,o,s){let i=typeof n=="string"?W(n):n;return typeof o=="function"?r(i,o):r(i,o,s)}}var ye=null,Rt=e=>(ye&&ye.destroy(),ye=new Z({totalAllowedTime:e}),ye),vt=()=>ye;var Z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,i=null;n!==void 0&&(i=new Promise((u,a)=>{s=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return i?await Promise.race([r(),i]):await r()}finally{o(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Nt={name:"@netlify/agent-runner-cli",type:"module",version:"1.0.1-broken",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var Cr=Ar(import.meta.url),Pr=K.dirname(Cr),Or=Sr(import.meta.url),Ae=_("shell"),Ye=new Set,Fr={preferLocal:!0},F=(e,t,r)=>{let[n,o]=Lr(t,r),s={...Fr,...o},i=br(e,n,s);return Dr(i,s),kr(i),i};var Lr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Dr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new re).pipe(q.stdout),e.stdout?.pipe(new re).pipe(q.stdout),e.stderr?.pipe(new re).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},At=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),Ae.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Ae.error("Error killing process:",r),!1}},$r=e=>At(e,"SIGKILL"),kr=e=>{Ye.add(e);let t=vt();if(t){let r=t.onTimesUp(()=>{Ae.log(`Global timer expired, killing process ${e.pid}`),At(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Ae.log(`Force killing process ${e.pid} after timeout`),$r(e))},5e3)});e.on("exit",()=>{Ye.delete(e),r()}),e.on("error",()=>{Ye.delete(e),r()})}};function ne(e,t){if(!q.env.NETLIFY_LOCAL_MODE)try{let o=Or.resolve(Nt.name),s=K.dirname(o);for(;s!==K.dirname(s);){let i=K.dirname(s);if(K.basename(i)==="node_modules"){let u=K.join(i,".bin",t);if(Ne.existsSync(u))return u;break}s=i}}catch(o){console.error("Could not resolve package.json",o)}if(q.env.NODE_PATH){let o=K.join(q.env.NODE_PATH,".bin",t);if(Ne.existsSync(o))return o}let r=K.join(e,"node_modules",".bin",t);if(Ne.existsSync(r))return r;let n=K.join(Pr,"..","node_modules",".bin",t);if(Ne.existsSync(n))return n}var St="netlify-agent-runner-context.md",Be="task-history",He="netlify-context",k=".netlify",oe="results.md",qe="assets",We="other",Ke="personal";var Ve="enterprise",Je="free",bt=[Ke,"pro",Ve,Je];var Ct=_("utils"),Ur=e=>new Promise(t=>{setTimeout(t,e)}),Pt=(e,t=3e3)=>{let r=!1,n=null,o=[],s=null,i=(...u)=>{if(r)return n=u,new Promise(p=>{o.push(p)});r=!0;let a,f=new Promise(p=>{a=p});return s=(async()=>{await Promise.resolve();let p=await e(...u);for(a(p);;){if(await Ur(t),!n)return r=!1,s=null,p;let c=n,m=o;n=null,o=[],p=await e(...c),m.forEach(h=>{h(p)})}})(),f};return i.flush=async()=>{if((r||n)&&s)return await s,i.flush()},i},se=(e,t,r=!1)=>{let n=null,o=null,s=null,i=function(...u){o=u,s=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(s,o),o=null,s=null)},t),a&&(e.apply(s,o),o=null,s=null)};return i.cancel=()=>{clearTimeout(n),n=null,o=null,s=null},i.flush=()=>{if(n){clearTimeout(n);let u=o,a=s;n=null,o=null,s=null,e.apply(a,u)}},i},Se=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):Ct.error("Could not parse JSON",n))}},Ot=(e,t)=>{let o=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let u=`--${t}${o}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=s.length+6){let f=Math.min(a-s.length,e.length);return`${s}${e.slice(0,f)}`}return e.slice(0,a)},Mr=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!bt.some(t=>t in e),Ft=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,n])=>{if(n){let o=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let s=JSON.parse(n);Mr(s)&&(e[r]=s)}catch(s){let u=s instanceof SyntaxError?"Invalid JSON":s.message;Ct.error(`Could not parse ${r} model version override from ${o}: ${u}`)}}}),e},Gr=50*1024,Xe=(e,t=Gr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as Lt}from"buffer";import jr from"path";var Dt=_("repo"),$t=async({config:e,isRetry:t})=>{Dt.info("Getting runner diffs");let r=await Br(),{hasChanges:n}=r,{status:o}=r;if(!n)return{hasChanges:!1};if(!t){let T=Hr(o);await qr(T)}Dt.info("Changes after processing"),await Ze();let s=await Qe(o);await ze(s);let i={stdio:["ignore","pipe","pipe"]},a=(await F("git",["diff","--staged"],i)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:s};let p=(await F("git",["diff","--staged","--binary"],i)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),c=(await F("git",["diff",e.sha,"HEAD"],i)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],i)).stdout;c!==g&&(m=Lt.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:s};return a!==p&&(h.diffBinary=Lt.from(p).toString("base64")),m&&(h.resultDiffBinary=m),h},ze=async(e=[])=>{await F("git",["add",".",...e])},Ze=async()=>(await F("git",["status","-s"])).stdout,kt=/.. (.+)?\.log$/,Yr=[kt],Br=async()=>{let e=await Ze();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
9
|
+
`).filter(n=>Yr.some(s=>s instanceof RegExp?s.test(n):n===s)?!1:n[1]?.trim()!=="")).length!==0,status:e}},Ut=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return e.trim()},Mt=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Qe=async e=>{e||=await Ze();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
|
|
10
|
+
`).forEach(n=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${jr.sep}`].some(u=>n.startsWith(u))&&r.push(`:!${s}`)});let o=n.match(kt)?.[1];o&&r.push(`:!${o}.log`)}),r},Gt=async()=>{await F("git",["reset","--hard","HEAD"])},Hr=e=>{let t=e.split(`
|
|
11
|
+
`).reduce((r,n)=>{if(!n)return r;let[o,s,,...i]=n,u=i.join(""),a=o.trim(),f=s.trim();return r[u]?r[u].change=f:r[u]={filePath:u,stage:a,change:f},r},{});return Object.values(t)},qr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Kr from"fs/promises";import Vr from"os";import Bt from"path";import ce from"process";import Jr from"readline";import et from"path";import Wr from"fs/promises";var tt=_("agent-output-utils");async function ie({initialResult:e,agentName:t,hasError:r}){let n="",o=et.join(process.cwd(),k,oe);try{let s=await Wr.readFile(o,"utf-8");s&&(n=s,tt.log(`Pulled result from ${et.relative(process.cwd(),o)}`))}catch{tt.log(`No results file found at ${et.relative(process.cwd(),o)}`)}return n||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ae({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,n=r?.replace(/\s+/g," ").trim().toLowerCase()||"",o="";return n?.includes("ai gateway is not available for your account")||n?.includes("ai gateway is not enabled for your account")?o="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":n?.includes("error when talking to gemini api")?o="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(n?.includes("connection closed prematurely")||n?.includes("499")&&t.toLowerCase().includes("gemini"))&&(o=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),n?.includes("request timed out")&&(o=`The ${t} API request's have timed out. Please try again or use a different available agent.`),n?.includes("network error")&&(o=`The ${t} agent is having network issues. Please try again or use a different available agent.`),o&&tt.log(`Providing updated error messsage: ${o}, replacing original error: ${r}`),o||r||void 0}function le(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var U=_("runner_claude"),jt="Claude Code",ue="claude-opus-4-5-20251101",Yt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Xr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(U.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function rt({config:e,netlify:t,persistSteps:r,aiGateway:n,continueSession:o,priorAgentSessionId:s,cwd:i=ce.cwd()}){let u=e,{accountType:a,prompt:f,modelVersionOverrides:p}=u,{model:c}=u,m="";if(n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(p?.claude){let l=p?.claude?.[a];if(l){if(!await n.isModelAvailableForProvider("anthropic",l))throw new Error(`Model override '${l}' is not available for anthropic provider`);c=l}}else if(c){if(!await n.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!ue&&await n.isModelAvailableForProvider("anthropic",ue)?(c=ue,U.log(`Using default model: ${ue}`)):ue&&U.log(`Default model ${ue} is not available, proceeding without model specification`);ce.env.ANTHROPIC_API_KEY=y,ce.env.ANTHROPIC_BASE_URL=d}else if(!ce.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],T=[],I={},g=0,E=0,R,N,b=[ne(i,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...o?["--continue"]:[],...o&&s?["--resume",s]:[],"-p",f],C=`${ce.env.NVM_BIN}/node`;U.log(`Running ${C} ${b.join(" ")}`);let O=t.utils.run(C,b,{all:!0,env:ce.env,cwd:i});O.stdin?.end();let A=se(()=>{r?.({steps:h,duration:E})},250),x=(y,d)=>{let l={...y,id:g};g+=1,T.push(l),h.push(l),d||A.flush(),A(),d&&A.flush()},G=Jr.createInterface({input:O.all});return G.on("error",y=>{U.error("Readline interface error",{error:y.message,stack:y.stack})}),G.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{U.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(l=>{switch(l.type){case"text":{l.text&&x({message:l.text});break}case"image":{typeof l.source=="object"&&l.source&&l.source.type==="base64"&&l.source.media_type?x({message:``}):U.log(`Unsupported image type ${l.source?.type}`,l.source);break}case"tool_use":{if(l.name==="Task"){let w=l.input?.description&&`\`${l.input.description}\``;x({title:[Yt(l.name),w].filter(Boolean).join(" ")})}else l.id&&(I[l.id]=l);A.flush();break}case"tool_result":{let w=l.tool_use_id?I[l.tool_use_id]:void 0,X;if(w){let V=w.input?.file_path&&Bt.relative(i,w.input.file_path),P=V&&`\`${V}\``;X=[Yt(w.name||""),P].filter(Boolean).join(" ")}let Ie=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),z;if(typeof l.content=="string")z=l.content;else if(Array.isArray(l.content)){let V=[];l.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?V.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?V.push(``):U.log(`Unsupported image type ${P.source.type}`,P.source):U.log(`Unsupported block type ${P?.type}`)}),z=V.join(`
|
|
12
12
|
|
|
13
|
-
`)}
|
|
13
|
+
`)}Ie&&z&&(z=`\`\`\`
|
|
14
14
|
${z.trim()}
|
|
15
|
-
\`\`\``),x({title:X,message:z},!0);break}case"thinking":{l.thinking&&x({title:"Thinking",message:l.thinking},!0);break}default:U.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?
|
|
15
|
+
\`\`\``),x({title:X,message:z},!0);break}case"thinking":{l.thinking&&x({title:"Thinking",message:l.thinking},!0);break}default:U.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?N=d.result:R=d.result,[T,h].forEach(l=>{l[l.length-1]?.message===R&&l.pop()}))}),await O.catch(y=>{({error:N,result:R}=Xr({catchError:y,runCmd:O,error:N,result:R,runnerName:"Claude"}))}),G.close(),A.flush(),{steps:T,duration:E,result:await ie({initialResult:R,agentName:jt,hasError:!!N}),error:ae({error:N,agentName:jt}),isRetryableError:le(N),agentSessionId:m}}var Ht=async()=>{let e=Bt.join(Vr.homedir(),".claude");await Kr.rm(e,{recursive:!0,force:!0})};import _e from"fs/promises";import Wt from"os";import be from"path";import Q from"process";import zr from"readline";var M=_("runner_codex"),qt="Codex CLI",de="gpt-5.2",Zr=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(M.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0}));async function nt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:s=Q.cwd()}){let{accountType:i,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(o){let{token:d,url:l}=o;if(!d||!l)throw new Error("No token or url provided from AI Gateway");if(a?.codex){let w=a?.codex?.[i];if(w){if(!await o.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);f=w}}else if(f){if(!await o.isModelAvailableForProvider("openai",f))throw new Error(`Model '${f}' is not available for openai provider`)}else!!de&&await o.isModelAvailableForProvider("openai",de)?(f=de,M.log(`Using default model: ${de}`)):de&&M.log(`Default model ${de} is not available, proceeding without model specification`);Q.env.OPENAI_API_KEY=d,Q.env.OPENAI_BASE_URL=l}else if(!Q.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let p=[],c=[],m=[],h={},T=0,I=0,g,E,R=`${Q.env.NVM_BIN}/node`,N=be.join(Wt.homedir(),".codex"),b=be.join(N,"config.toml"),C=be.join(N,"auth.json");try{await _e.mkdir(N,{recursive:!0});let d={OPENAI_API_KEY:Q.env.OPENAI_API_KEY};await _e.writeFile(C,JSON.stringify(d,null,2),"utf-8"),M.log("Created Codex auth.json file");let l="";try{l=await _e.readFile(b,"utf-8")}catch{}l.includes("web_search_request")||(l.includes("[features]")?l=l.replace(/\[features\]/,`[features]
|
|
16
16
|
web_search_request = true`):l+=`
|
|
17
17
|
[features]
|
|
18
18
|
web_search_request = true
|
|
19
|
-
`,await _e.writeFile(b,l,"utf-8"),M.log("Updated Codex config with web_search_request enabled"))}catch(d){throw M.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[ne(s,"codex"),"exec","--yolo","--json","--enable","web_search_request",...
|
|
19
|
+
`,await _e.writeFile(b,l,"utf-8"),M.log("Updated Codex config with web_search_request enabled"))}catch(d){throw M.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[ne(s,"codex"),"exec","--yolo","--json","--enable","web_search_request",...f?["--model",f]:[],u].filter(Boolean);M.log(`Running ${R} ${O.join(" ")}`);let A=t.utils.run(R,O,{all:!0,cwd:s,env:{...Q.env}}),x=se(()=>{r?.({steps:p,duration:I}),n?.({steps:c,duration:I}),c=[]},250),G=(d,l)=>{d.id=T,T+=1,m.push(d),p.push(d),c.push(d),l||x.flush(),x(),l&&x.flush()},y=zr.createInterface({input:A.all});return y.on("error",d=>{M.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let l=null;try{l=JSON.parse(d)}catch{M.log("Could not parse line",d);return}if(l?.duration_ms&&(I=l.duration_ms),l?.type==="item.started"&&l?.item?.type==="command_execution")h[l.item.id]=l.item;else if(l?.type==="item.completed"&&l?.item?.type==="command_execution"){let w=en(l.item);w&&G(w,!0)}else if(l?.type==="item.completed"&&l?.item?.type==="reasoning"){let w={title:"Reasoning",message:l.item.text};G(w,!0)}else if(l?.type==="local_shell_call")h[l.call_id]=l;else if(l?.type==="local_shell_call_output"){let w=tn(h[l.call_id],l);w&&G(w,!0)}else l?.type==="message"&&l.role==="assistant"?g=l.content.map(w=>w.text).join(`
|
|
20
20
|
`):l?.type==="message"&&l.role==="system"&&(E=l.content.map(w=>w.text).join(`
|
|
21
|
-
`))}),await
|
|
21
|
+
`))}),await A.catch(d=>{let l=Zr({catchError:d,runCmd:A,error:E,result:g,runnerName:"Codex"});E=l.error,g=l.result}),y.close(),x.flush(),{steps:m,duration:I,result:await ie({initialResult:g,agentName:qt,hasError:!!E}),error:ae({error:E,agentName:qt}),isRetryableError:le(E)}}var Kt=async()=>{let e=be.join(Wt.homedir(),".codex");await _e.rm(e,{recursive:!0,force:!0})},Qr=new Set(["bash","-lc"]),en=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,n=e.aggregated_output?.trim();return n&&(n=`\`\`\`
|
|
22
22
|
${n}
|
|
23
23
|
\`\`\``),e.status==="failed"&&e.exit_code!==0&&(n=n?`${n}
|
|
24
24
|
|
|
25
25
|
*Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},tn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Qr.has(s)),n=r?`Running \`${r.join(" ")}\``:void 0,o;try{o=JSON.parse(t.output).output?.trim(),o&&(o=`\`\`\`
|
|
26
26
|
${o.trim()}
|
|
27
|
-
\`\`\``)}catch(s){M.error("Could not decode outputMsg",s,t.output)}return{title:n,message:o}};import
|
|
27
|
+
\`\`\``)}catch(s){M.error("Could not decode outputMsg",s,t.output)}return{title:n,message:o}};import Ce from"fs/promises";import Jt from"os";import Pe from"path";import pe from"process";import rn from"readline";var j=_("runner_gemini"),Vt="Gemini CLI",fe="",nn=({catchError:e,runCmd:t,error:r,result:n,runnerName:o})=>(j.log(`${o} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${o} failed`,result:void 0})),on={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},sn=async()=>{let e=Pe.join(Jt.homedir(),".gemini"),t=Pe.join(e,"settings.json");try{await Ce.mkdir(e,{recursive:!0});let r={};try{let n=await Ce.readFile(t,"utf-8");r=JSON.parse(n)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ce.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ot({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:o,cwd:s=pe.cwd()}){let{accountType:i,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(await sn(),o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(a?.gemini){let l=a?.gemini?.[i];if(l){if(!await o.isModelAvailableForProvider("gemini",l))throw new Error(`Model override '${l}' is not available for gemini provider`);f=l}}if(!f)!!fe&&await o.isModelAvailableForProvider("gemini",fe)?(f=fe,j.log(`Using default model: ${fe}`)):fe&&j.log(`Default model ${fe} is not available, proceeding without model specification`);else if(f&&!a?.gemini?.[i]&&!await o.isModelAvailableForProvider("gemini",f))throw new Error(`Model '${f}' is not available for gemini provider`);pe.env.GEMINI_API_KEY=y,pe.env.GOOGLE_GEMINI_BASE_URL=d}else if(!pe.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let p=[],c=[],m=[],h={},T=0,I=0,g,E,R=[ne(s,"gemini"),...f?["--model",f]:[],"--yolo","--output-format","stream-json","-p",u],N=`${pe.env.NVM_BIN}/node`;j.log(`Running ${N} ${R.join(" ")}`);let b=t.utils.run(N,R,{all:!0,env:pe.env,cwd:s});b.stdin?.end();let C=se(()=>{r?.({steps:p,duration:I}),n?.({steps:c,duration:I}),c=[]},250),O=(y,d)=>{y.id=T,T+=1,m.push(y),p.push(y),c.push(y),d||C.flush(),C(),d&&C.flush()},A=rn.createInterface({input:b.all});A.on("error",y=>{j.error("Readline interface error",{error:y.message,stack:y.stack})});let x="",G=()=>{x&&O({message:x.trim()}),x=""};return A.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let l=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Se(l,!1)?.error?.message||l||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||G(),d.type){case"message":{d.role!=="user"&&d.content&&(x+=d.content);break}case"tool_use":{let l=on[d.tool_name]??d.tool_name,w=d.parameters?.file_path,X=w&&Pe.relative(s,w),Ie=d.parameters?.command,V={title:[l,X&&`\`${X}\``,Ie&&`\`${Ie}\``].filter(Boolean).join(" ")};h[d.tool_id]=V,C.flush();break}case"tool_result":{let l=h[d.tool_id];l&&(d.output&&(l.message=`\`\`\`
|
|
28
28
|
${d.output.trim()}
|
|
29
|
-
\`\`\``),O(l,!0));break}case"result":{I=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=x.trim();break}case"error":{E=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:E,result:g}=nn({catchError:y,runCmd:b,error:E,result:g,runnerName:"Gemini"}))}),
|
|
29
|
+
\`\`\``),O(l,!0));break}case"result":{I=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=x.trim();break}case"error":{E=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:E,result:g}=nn({catchError:y,runCmd:b,error:E,result:g,runnerName:"Gemini"}))}),A.close(),C.flush(),{steps:m,duration:I,result:await ie({initialResult:g,agentName:Vt,hasError:!!E}),error:ae({error:E,agentName:Vt}),isRetryableError:le(E)}}var Xt=async()=>{let e=Pe.join(Jt.homedir(),".gemini");await Ce.rm(e,{recursive:!0,force:!0})};var an={codex:{runner:nt,clean:Kt},claude:{runner:rt,clean:Ht},gemini:{runner:ot,clean:Xt}},zt=an;var Zt=_("init_stage"),Qt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:n})=>await S(ln(),"init-stage",async o=>{let s=performance.now();o?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":n||"unknown"});let i=zt[e.runner];if(!i)throw o?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=cn({apiToken:r});ht(u);let a=e.useGateway?await It({netlify:u,config:e}):void 0;o?.setAttributes({"init.aiGateway.created":!!a}),e.validateAgent&&e.errorLogsPath&&o?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let f=5*1024,p=Pt(async({steps:T=[],duration:I})=>{let g=T.map(E=>{let R=E.title?Xe(W(E.title),f):void 0,N=E.message?Xe(W(E.message)):void 0;return{...E,title:R,message:N}});T.length=0;try{return await H(e.id,e.sessionId,{steps:g,duration:I})}catch(E){Zt.error("persistSteps failed",{error:E?.message||E})}},t);Zt.info("Adding build files to stage");let c=await Qe();await ze(c);let m;e.hasRepo?e.sha?(m=e.sha,o?.setAttributes({"init.sha.source":"provided"})):(m=await Ut(),await he(e.id,{sha:m}),o?.setAttributes({"init.sha.source":"current_commit"})):(m=await Mt(),o?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-s;return o?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:a,context:u,persistSteps:p,runner:i,sha:m}}),cn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Oe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Oe.env.NETLIFY_API_TOKEN,SITE_ID:Oe.env.SITE_ID,FUNCTIONS_DIST:Oe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as st}from"@netlify/otel";import un from"crypto";import J from"fs/promises";import $ from"path";import Y from"process";var L=_("context"),dn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:Y.env.NETLIFY_TEAM_ID,userId:Y.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:Y.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},pn=10,fn=async e=>{let{name:t,ext:r}=$.parse(e),n=e,o=$.join(Y.cwd(),k,n),s=0;for(;await gn(o);){if(s>=pn)throw new Error("Failed to generate context file");n=`${t}-${un.randomUUID().slice(0,5)}${r}`,o=$.join(Y.cwd(),k,n),s+=1}return n},gn=async e=>{try{return await J.access(e),!0}catch{return!1}},mn=async()=>{try{L.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return L.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(n=>n&&typeof n=="object"&&n.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(L.warn("Catchall consumer missing or invalid contextScopes"),null):r:(L.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?L.warn("Netlify features context request timed out"):L.warn("Failed to fetch Netlify features context:",e.message),null}},hn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let n=await r.text();return await J.writeFile(t,n,"utf-8"),!0}catch(r){return r.name==="AbortError"?L.warn(`Download timeout for ${e}`):L.warn(`Failed to download context file ${e}:`,r.message),!1}},Fe=null,yn=async()=>{if(Fe)return Fe;let e=await mn();if(!e)return[];let t=$.join(Y.cwd(),k,He);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([o,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return L.warn(`Invalid scope data for ${o}, skipping...`),null;let i=`${o}.md`,u=$.join(t,i),a=$.join(k,He,i);return L.log(`Downloading ${s.scope} context...`),await hn(s.endpoint,u)?(L.log(`Downloaded: ${a}`),{scope:s.scope,path:a,key:o}):null});return Fe=(await Promise.all(r)).filter(o=>o!==null),Fe},er=async({cliPath:e,netlify:t,config:r,buildErrorContext:n})=>{let o=dn(t),s=await fn(St),i=$.join(Y.cwd(),k);await J.mkdir(i,{recursive:!0});let u=$.join(k,s),a=$.join(Y.cwd(),u),f=$.join(Y.cwd(),k,oe);try{await J.unlink(f),L.log(`Deleted old results file: ${f}`)}catch{}let p=n?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
|
|
30
30
|
Your task is to analyze and fix the build errors.
|
|
31
31
|
Don't apply techniques of reverting changes. Apply fixes related to errors.
|
|
32
32
|
Don't try to run build by yourself. Just fix the errors.
|
|
@@ -39,22 +39,22 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
|
|
|
39
39
|
|
|
40
40
|
`)}
|
|
41
41
|
</project_rules>
|
|
42
|
-
`);let m="";if(r.sessionHistoryContext?.length){let g=$.join(Y.cwd(),k,Be);await J.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(
|
|
42
|
+
`);let m="";if(r.sessionHistoryContext?.length){let g=$.join(Y.cwd(),k,Be);await J.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let b=N+1,C=`attempt-${b}.md`,O=$.join(g,C),A=$.join(k,Be,C),x=`# Task History - Attempt ${b}
|
|
43
43
|
|
|
44
44
|
## Request - what the user asked for
|
|
45
|
-
${
|
|
45
|
+
${R.request}
|
|
46
46
|
|
|
47
47
|
---
|
|
48
48
|
|
|
49
49
|
## Response - what the agent replied with after its work
|
|
50
50
|
|
|
51
|
-
${
|
|
52
|
-
`;return await J.writeFile(O,x,"utf-8"),L.log(`Created history file: ${
|
|
51
|
+
${R.response}
|
|
52
|
+
`;return await J.writeFile(O,x,"utf-8"),L.log(`Created history file: ${A}`),A}));m+=`
|
|
53
53
|
<session_history_context>
|
|
54
54
|
History of prior work on this task.
|
|
55
55
|
You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
|
|
56
56
|
|
|
57
|
-
${E.slice(-5).map(
|
|
57
|
+
${E.slice(-5).map(R=>`- ${R}`).join(`
|
|
58
58
|
`)}
|
|
59
59
|
|
|
60
60
|
</session_history_context>
|
|
@@ -75,7 +75,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
75
75
|
<user_request>
|
|
76
76
|
${r.prompt}
|
|
77
77
|
</user_request>
|
|
78
|
-
${
|
|
78
|
+
${p}
|
|
79
79
|
</request>
|
|
80
80
|
|
|
81
81
|
<requirements>
|
|
@@ -123,21 +123,21 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
123
123
|
<user_request>
|
|
124
124
|
${r.prompt}
|
|
125
125
|
</user_request>
|
|
126
|
-
${
|
|
126
|
+
${p}
|
|
127
127
|
</request>
|
|
128
128
|
|
|
129
129
|
Use the following file for the complete context of the ask, the environment, and what's available. ${a} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
|
|
130
|
-
`),I};var _n=_("prompt"),
|
|
131
|
-
${
|
|
130
|
+
`),I};var _n=_("prompt"),tr=async({cliPath:e,config:t,netlify:r,buildErrorContext:n})=>{let o=await er({cliPath:e,config:t,netlify:r,buildErrorContext:n});return process.env.AGENT_RUNNER_DEBUG&&_n.log("Contextful Prompt:",o),{prompt:o}};var Le=_("inference_stage"),rr=5,De=async e=>{let{cliPath:t,config:r,context:n,buildErrors:o,runner:s,persistSteps:i,aiGateway:u,attempt:a,contextPrefix:f,priorAgentSessionId:p}=e;Le.log(`Running inference stage, attempt ${a} of ${rr}`);let c=await S(st(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":a||1}),xt();let{prompt:h}=await S(st(),"compose-prompt",async()=>await tr({cliPath:t,config:r,buildErrorContext:En(o),netlify:n})),T=`
|
|
131
|
+
${f||""}
|
|
132
132
|
${h}
|
|
133
|
-
`.trim(),I={...r,prompt:T},g=await S(st(),`run-${r.runner}`,async()=>await s({aiGateway:u,config:I,netlify:n,persistSteps:i,continueSession:!!(a&&a>1),priorAgentSessionId:
|
|
133
|
+
`.trim(),I={...r,prompt:T},g=await S(st(),`run-${r.runner}`,async()=>await s({aiGateway:u,config:I,netlify:n,persistSteps:i,continueSession:!!(a&&a>1),priorAgentSessionId:p}));return g.result&&(g.result=W(g.result)),g.error&&(g.error=W(g.error)),await i.flush(),g});if(c.error){if(Le.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:a||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!a||a<rr))return Le.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await De({...e,attempt:(a||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Le.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},En=e=>!e||e.length===0?"":`
|
|
134
134
|
Deploy failed failed. Here are the errors to review on the latest build:
|
|
135
135
|
|
|
136
136
|
Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
|
|
137
137
|
|
|
138
138
|
${e.pop()}
|
|
139
|
-
`;import In from"process";import{getTracer as it}from"@netlify/otel";import{getTracer as wn}from"@netlify/otel";var Ee=_("deploy"),
|
|
140
|
-
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw Ee.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var we=_("deploy_stage"),at=async e=>await S(it(),"run-deploy-stage",async()=>xn(e)),xn=async({cliPath:e,config:t,context:r,result:n,filter:o,isRetry:s})=>{let i=await S(it(),"get-runner-diffs",async()=>await
|
|
139
|
+
`;import In from"process";import{getTracer as it}from"@netlify/otel";import{getTracer as wn}from"@netlify/otel";var Ee=_("deploy"),nr=async e=>await S(wn(),"create-preview-deploy",async t=>Tn(e,t)),Tn=async({netlify:e,hasRepo:t,skipBuild:r,message:n="Agent Preview",deploySubdomain:o,cliPath:s,filter:i},u)=>{try{let a=["deploy","--message",`"${n}"`,"--json","--draft","--verbose"];t||(Ee.log("Deploy: Uploading source zip"),a.push("--upload-source-zip")),o&&a.push("--alias",o),i&&a.push("--filter",i),r?(Ee.log("Deploy: Skipping build"),a.push("--no-build")):a.push("--context","deploy-preview");let f=s||"netlify";Ee.log(`Running: ${f} ${a.join(" ")}`),u?.setAttributes({cmd:f,args:a});let p=await e.utils.run(f,a,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(p.stdout.trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),Ee.log(`
|
|
140
|
+
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw Ee.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var we=_("deploy_stage"),at=async e=>await S(it(),"run-deploy-stage",async()=>xn(e)),xn=async({cliPath:e,config:t,context:r,result:n,filter:o,isRetry:s})=>{let i=await S(it(),"get-runner-diffs",async()=>await $t({config:t,isRetry:s}));if(we.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:a,diffBinary:f,resultDiffBinary:p}=i,c=!0;we.log("Preview deploy condition check:",{resultUndefined:n===void 0,resultType:typeof n,hasChanges:c,wouldCreatePreview:n!==void 0&&c});let m=null;if(n!==void 0&&c)try{let h;try{let T=await S(it(),"get-runner-session",async()=>await _t(t.id,t.sessionId));T?.title&&(h=T.title)}catch(T){we.warn("Failed to fetch session title, using fallback message:",T.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await nr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:Ot(t.id,In.env.SITE_NAME),filter:o})}catch(h){return we.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:u,resultDiff:a,hasChanges:c,previewInfo:null,diffBinary:f,resultDiffBinary:p,deployError:h instanceof Error?h.message:String(h)}}return we.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:a,hasChanges:c,previewInfo:m,diffBinary:f,resultDiffBinary:p}};import{getTracer as Te}from"@netlify/otel";async function lt(e,t){let{maxRetries:r,baseDelay:n,onRetry:o}=t,s;for(let i=1;i<=r;i++)try{return await e()}catch(u){if(s=u,i===r)throw s;o&&o(i,s),await new Promise(a=>setTimeout(a,n*i))}throw s}var $e=class{scanDiffForForms(t){let r=[],n=null,o=[],s=t.split(`
|
|
141
141
|
`);for(let i of s)if(i.startsWith("diff --git")){if(n&&o.length>0){let a=this.containsNetlifyForm(o,n);a&&r.push(a)}let u=i.split(" ");n=u[u.length-1].replace(/^b\//,""),o=[]}else i.startsWith("+")&&!i.startsWith("+++")&&o.push(i.slice(1));if(n&&o.length>0){let i=this.containsNetlifyForm(o,n);i&&r.push(i)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let n=t.join(`
|
|
142
|
-
`),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:i}of o){let u=n.match(s);if(u){let a=u.index||0,
|
|
142
|
+
`),o=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:i}of o){let u=n.match(s);if(u){let a=u.index||0,f=Math.max(0,a-20),p=Math.min(n.length,a+u[0].length+20),c=n.slice(f,p).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${i}] ${c}`}}}return null}};var v=_("cleanup_stage"),or=async e=>await S(Te(),"cleanup-stage",async()=>Rn(e)),ct=1024*1024*10,Rn=async({config:e,diff:t,result:r,duration:n,resultDiff:o,diffBinary:s,resultDiffBinary:i,previewInfo:u})=>{let a={result:r||"Done",duration:n};u&&u.deployId&&(a.deploy_id=u.deployId),u&&u.sourceZipFilename&&(a.result_zip_file_name=u.sourceZipFilename);let f=t||s||o||i;if(f&&(a.diff_produced=!0),process.env.SITE_ID==="def61649-ad41-4d63-a478-8496a919443a"&&f)return v.log("Test site detected - skipping diff upload to test loss detection"),await lt(async()=>await S(Te(),"update-runner-session",()=>H(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{v.error(`Error updating agent runner session (attempt ${p}):`,c),v.log("Retrying...")}}),{sessionUpdate:a};if(f){let p=new $e,c=t||s||"",m=p.scanDiffForForms(c);m.detected?(v.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:T})=>{v.log(` - ${h}: ${T}`)}),a.has_netlify_form=!0):v.log("Did not detect Netlify form(s) in diff"),v.log("Did not detect Netlify form(s) in diff")}if(f)try{v.log("Getting pre-signed URLs for diff upload");let p=await wt(e.id,e.sessionId),c=[];(t||s)&&c.push(Ge(p.result.upload_url,s||t).then(()=>{a.result_diff_s3_key=p.result.s3_key,v.log("Successfully uploaded result_diff to S3")})),(o||i)&&c.push(Ge(p.cumulative.upload_url,i||o).then(()=>{a.cumulative_diff_s3_key=p.cumulative.s3_key,v.log("Successfully uploaded cumulative_diff to S3")})),v.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(o||i)&&(v.log("Updating agent runner with cumulative diff S3 key"),await S(Te(),"update-runner",async()=>{await he(e.id,{result_diff_s3_key:p.cumulative.s3_key})}))}catch(p){v.error("S3 upload failed, falling back to inline diffs:",p);let c=Buffer.byteLength(t||s||""),m=Buffer.byteLength(i||o||"");if(c>ct||m>ct){let h=`Diffs exceed maximum inline size of ${ct} bytes.`;throw v.error(h),new Error(h)}a.result_diff=t,a.result_diff_binary=s,(o||i)&&(a.cumulative_diff=o,a.cumulative_diff_binary=i,v.log("Updating agent runner with inline diffs (fallback)"),await S(Te(),"update-runner",async()=>{await he(e.id,{result_diff:o,result_diff_binary:i})}))}else v.log("No diffs to upload");return v.log("Updated agent runner with result"),await lt(async()=>await S(Te(),"update-runner-session",()=>H(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{v.error(`Error updating agent runner session (attempt ${p}):`,c),v.log("Retrying...")}}),v.log("Finished updating agent runner with result"),{sessionUpdate:a}};import{getTracer as sr,shutdownTracers as Nn,withActiveSpan as ir}from"@netlify/otel";var An=vn(import.meta.url),ar=An("../package.json"),lr=_("pipeline_index"),ke=3,cr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:n,errorLogsPath:o,filter:s,tracing:i={}})=>{let u,{withStageTimer:a}=Rt(Z.timeUnits.hours(4)),f=await gt(ar.version,e.id,i);try{await ir(sr(),"run-pipeline",{},f,async()=>{let{aiGateway:p,context:c,persistSteps:m,runner:h,sha:T}=await a("init",()=>Qt({config:e,apiToken:t,cliPath:r,cwd:n,errorLogsPath:o,filter:s,runnerVersion:ar.version}),Z.timeUnits.minutes(10));u=h.clean,e.sha=T;let{runnerResult:I}=await a("inference",()=>De({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let g=await a("deploy",()=>at({cliPath:r,config:e,context:c,result:I.result,filter:s,isRetry:!1})),E=I,R=[];if(g.hasChanges&&g.deployError){R.push(mt(g.deployError));let x=1;for(;x<=ke&&!g.previewInfo;)lr.log(`Deploy attempt had errors. Retrying. ${x}/${ke}`),await ir(sr(),"deploy-stage",async G=>{G?.setAttributes({"stage.attempt":x});let{runnerResult:y}=await a(`inference-retry-${x}`,()=>De({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p,buildErrors:R,priorAgentSessionId:I.agentSessionId}));E={...y,steps:[...E.steps||[],...y.steps||[]],duration:(E.duration||0)+(y.duration||0)},g=await a(`deploy-retry-${x}`,()=>at({cliPath:r,config:e,context:c,result:y.result,filter:s,isRetry:!0})),g.deployError&&R.push(g.deployError),x++});x>ke&&!g.previewInfo&&console.warn(`Deploy validation failed after ${ke} attempts`)}let{diff:N,resultDiff:b,previewInfo:C,diffBinary:O,resultDiffBinary:A}=g;await a("cleanup",()=>or({config:e,diff:N,result:E.result,duration:E.duration,resultDiff:b,diffBinary:O,resultDiffBinary:A,previewInfo:C}),Z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await u?.(),await Gt())})}catch(p){lr.error("Got error while running pipeline",p),await u?.();let c=p instanceof Error&&p.message;throw await H(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),p}finally{await Nn()}};import D from"process";var bn="claude",Cn=e=>(e??[]).filter(t=>t.request&&t.response),Pn=e=>(e??[]).filter(t=>t.site_context),ur=_("config"),dr=()=>{let e=D.env.NETLIFY_AGENT_RUNNER_ID,t=D.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=D.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,n=D.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!n)throw new Error("Prompt is not provided");let o=D.env.NETLIFY_AGENT_RUNNER_AGENT||bn,s=D.env.NETLIFY_AGENT_RUNNER_MODEL,i=D.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",u=D.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",a=D.env.ERROR_LOGS_PATH,f=Se(D.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,ur),p=Cn(f),c=Pn(f),m=D.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",h=!D.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,T=D.env.NETLIFY_AGENT_RUNNER_SHA,I=On(),g=Ft(),E={id:e,sessionId:t,resultBranch:r,prompt:n,runner:o,model:s,validateAgent:i,errorLogsPath:a,sessionHistoryContext:p,siteContext:c,hasRepo:m,useGateway:h,sha:T,accountType:I,validateAgentWithBuild:u,modelVersionOverrides:g};return ur.log({fullConfig:E}),E},On=()=>{let e=D.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?Ke:e.includes("pro")?"pro":e.startsWith("enterprise")?Ve:e.startsWith("free")?Je:We:We};var pr=_("bin_cmd"),ee=Fn(ut.argv.slice(2),{string:["auth","cwd","cli-path","error-logs-path","filter","trace-exporter-url","traceparent"]});try{let e=dr();await cr({config:e,apiToken:ee.auth,cwd:ee.cwd,cliPath:ee["cli-path"],errorLogsPath:ee["error-logs-path"],filter:ee.filter,tracing:{exporterUrl:ee["trace-exporter-url"],traceparent:ee.traceparent}}),pr.info("Finished agent"),ut.exit(0)}catch(e){pr.error("Error running agent pipeline:",e),ut.exit(1)}
|
|
143
143
|
//# sourceMappingURL=bin.js.map
|
package/dist/index.js
CHANGED
|
@@ -1,31 +1,31 @@
|
|
|
1
|
-
import{createRequire as dn}from"module";import{createTracerProvider as er}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as
|
|
2
|
-
`),r=[],n=-1,
|
|
3
|
-
`)),n=p,
|
|
1
|
+
import{createRequire as dn}from"module";import{createTracerProvider as er}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as nt}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as tr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as rr}from"@netlify/otel";import{propagation as st,context as ot,W3CTraceContextPropagator as nr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as sr}from"@opentelemetry/exporter-trace-otlp-grpc";import Qt from"process";function _(e){let t=Qt.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var De=_("tracing"),it=async(e,t,r)=>(await er({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new nt(new ke),new nt(new sr({url:r.exporterUrl}))],instrumentations:[new tr({skipHeaders:!0})]}),r.traceparent?(st.setGlobalPropagator(new nr),st.extract(ot.active(),{traceparent:r.traceparent,isRemote:!0})):ot.active());function S(e,t,r){return De.log(`\u23F3 TRACE: ${t} starting...`),rr(e,t,r)}var ke=class{export(t,r){for(let n of t)this.logSpan(n);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,n=t.attributes,s=[];for(let[u,a]of Object.entries(n))u.includes("duration")&&typeof a=="number"?s.push(`${u}=${a.toFixed(2)}ms`):s.push(`${u}=${a}`);let o=t.status?.code===2?"\u274C":"\u2705",i=s.length>0?` [${s.join(", ")}]`:"";De.log(`${o} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${i}`),t.status?.code===2&&t.status.message&&De.log(` \u274C Error: ${t.status.message}`)}};var or=["error","failed","exception","fatal","panic","abort","crash"];function at(e){let t=e.split(`
|
|
2
|
+
`),r=[],n=-1,s=0;for(;s<t.length;){let u=t[s].slice(0,500).toLowerCase();if(or.some(f=>u.includes(f))){let f=Math.max(0,s-10,n+1),p=Math.min(t.length-1,s+20),c=[];for(let m=f;m<=p;m++)c.push(t[m]);r.push(c.join(`
|
|
3
|
+
`)),n=p,s=p+1}else s++}if(r.length===0)return e;let o=r.map((i,u)=>`<extracted_error_chunk order="${u+1}">
|
|
4
4
|
${i}
|
|
5
5
|
</extracted_error_chunk>`).join(`
|
|
6
6
|
|
|
7
|
-
`);return s.length>e.length*.8?e:s}import Se from"process";import{getTracer as Wr}from"@netlify/otel";import pe from"process";var we=pe.env.NETLIFY_API_URL,xe=pe.env.NETLIFY_API_TOKEN,Y=_("api"),Te=()=>pe.env.NETLIFY_LOCAL_MODE==="true",fe=async(e,t={})=>{if(!we||!xe)throw new Error("No API URL or token");let r=new URL(e,we),n={...t,headers:{...t.headers,Authorization:`Bearer ${xe}`}};pe.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let o=await fetch(r,n),s=o.ok&&o.status<=299;if(pe.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),o.headers.forEach((u,a)=>{Y.log(` ${a}: ${u}`)});else{let u=o.headers.get("x-request-id")||o.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${u||"N/A"}`)}if(s||Y.error(`Got status ${o.status} for request ${r}`),t.raw){if(!s)throw o;return o}let i=await(o.headers.get("content-type")?.includes("application/json")?o.json():o.text());if(!s)throw i;return i},at=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(we=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(xe=e.constants.NETLIFY_API_TOKEN)},lt=()=>({apiUrl:we,token:xe}),ge=async(e,t)=>Te()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):fe(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>Te()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):fe(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var ct=async(e,t)=>Te()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):fe(`/api/v1/agent_runners/${e}/sessions/${t}`),ut=(e,t,r)=>fe(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),dt=async(e,t)=>Te()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):fe(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Le=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var Q=_("ai_gateway"),Ue=null;var pt=async()=>{if(Ue)return Ue;Q.log("Fetching available AI gateway providers");let e=await fetch(`${lt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ue=t,Q.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},ir=async(e,t)=>{let n=(await pt()).providers[e];if(!n)return Q.log(`Provider '${e}' not found`),!1;let o=n.models.includes(t);return Q.log(`Model validation for ${e}/${t}`,{isAvailable:o}),o},ft=async({netlify:e,config:t})=>{let r,n,o,s,i=e.constants?.SITE_ID;if(!i)throw new Error("No site id");let u=async()=>{clearTimeout(o),Q.log("Requesting AI gateway information");let a=await ut(i,t.id,t.sessionId);if({token:r,url:s}=a,n=a.expires_at?a.expires_at*1e3:void 0,Q.log("Got AI gateway information",{token:!!r,expiresAt:n,url:s}),n){let f=n-Date.now()-6e4;f>0&&(o=setTimeout(()=>{u()},f))}};return await Promise.all([u(),pt()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:ir}};import B from"process";import K from"path";import Ie from"fs";import{fileURLToPath as fr}from"url";import{createRequire as gr}from"module";import{execa as mr,execaCommand as Yn}from"execa";import{Transform as ar}from"stream";var lr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),cr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function ur(){return Object.entries(process.env).filter(([e,t])=>!(!t||lr.has(e)||cr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=ur();if(t.length===0)return e;let r=e;return t.forEach(n=>{let o=new RegExp(dr(n),"g");r=r.replace(o,"******")}),r}function dr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ee=class extends ar{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let o=t.toString(),s=q(o);n(null,s)}};function gt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,o,s){let i=typeof n=="string"?q(n):n;return typeof o=="function"?t(i,o):t(i,o,s)},process.stderr.write=function(n,o,s){let i=typeof n=="string"?q(n):n;return typeof o=="function"?r(i,o):r(i,o,s)}}var me=null,mt=e=>(me&&me.destroy(),me=new z({totalAllowedTime:e}),me),ht=()=>me;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let o=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,i=null;n!==void 0&&(i=new Promise((u,a)=>{s=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return i?await Promise.race([r(),i]):await r()}finally{o(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var yt={name:"@netlify/agent-runner-cli",type:"module",version:"1.0.0-broken",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var hr=fr(import.meta.url),yr=K.dirname(hr),_r=gr(import.meta.url),ve=_("shell"),Me=new Set,Er={preferLocal:!0},F=(e,t,r)=>{let[n,o]=wr(t,r),s={...Er,...o},i=mr(e,n,s);return xr(i,s),Ir(i),i};var wr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},xr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(B.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ee).pipe(B.stdout),e.stdout?.pipe(new ee).pipe(B.stdout),e.stderr?.pipe(new ee).pipe(B.stderr);return}e.stdout?.pipe(B.stdout),e.stderr?.pipe(B.stderr)},_t=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(B.kill(-e.pid,t),ve.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ve.error("Error killing process:",r),!1}},Tr=e=>_t(e,"SIGKILL"),Ir=e=>{Me.add(e);let t=ht();if(t){let r=t.onTimesUp(()=>{ve.log(`Global timer expired, killing process ${e.pid}`),_t(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ve.log(`Force killing process ${e.pid} after timeout`),Tr(e))},5e3)});e.on("exit",()=>{Me.delete(e),r()}),e.on("error",()=>{Me.delete(e),r()})}};function te(e,t){if(!B.env.NETLIFY_LOCAL_MODE)try{let o=_r.resolve(yt.name),s=K.dirname(o);for(;s!==K.dirname(s);){let i=K.dirname(s);if(K.basename(i)==="node_modules"){let u=K.join(i,".bin",t);if(Ie.existsSync(u))return u;break}s=i}}catch(o){console.error("Could not resolve package.json",o)}if(B.env.NODE_PATH){let o=K.join(B.env.NODE_PATH,".bin",t);if(Ie.existsSync(o))return o}let r=K.join(e,"node_modules",".bin",t);if(Ie.existsSync(r))return r;let n=K.join(yr,"..","node_modules",".bin",t);if(Ie.existsSync(n))return n}var Et="netlify-agent-runner-context.md",je="task-history",Ge="netlify-context",k=".netlify",re="results.md",Ye="assets";var vr=_("utils"),Rr=e=>new Promise(t=>{setTimeout(t,e)}),wt=(e,t=3e3)=>{let r=!1,n=null,o=[],s=null,i=(...u)=>{if(r)return n=u,new Promise(p=>{o.push(p)});r=!0;let a,f=new Promise(p=>{a=p});return s=(async()=>{await Promise.resolve();let p=await e(...u);for(a(p);;){if(await Rr(t),!n)return r=!1,s=null,p;let c=n,m=o;n=null,o=[],p=await e(...c),m.forEach(h=>{h(p)})}})(),f};return i.flush=async()=>{if((r||n)&&s)return await s,i.flush()},i},ne=(e,t,r=!1)=>{let n=null,o=null,s=null,i=function(...u){o=u,s=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(s,o),o=null,s=null)},t),a&&(e.apply(s,o),o=null,s=null)};return i.cancel=()=>{clearTimeout(n),n=null,o=null,s=null},i.flush=()=>{if(n){clearTimeout(n);let u=o,a=s;n=null,o=null,s=null,e.apply(a,u)}},i},xt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):vr.error("Could not parse JSON",n))}},Tt=(e,t)=>{let o=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let u=`--${t}${o}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=s.length+6){let f=Math.min(a-s.length,e.length);return`${s}${e.slice(0,f)}`}return e.slice(0,a)};var br=50*1024,Be=(e,t=br)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as It}from"buffer";import Ar from"path";var vt=_("repo"),Rt=async({config:e,isRetry:t})=>{vt.info("Getting runner diffs");let r=await Nr(),{hasChanges:n}=r,{status:o}=r;if(!n)return{hasChanges:!1};if(!t){let x=Cr(o);await Pr(x)}vt.info("Changes after processing"),await qe();let s=await Ke(o);await He(s);let i={stdio:["ignore","pipe","pipe"]},a=(await F("git",["diff","--staged"],i)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:s};let p=(await F("git",["diff","--staged","--binary"],i)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),c=(await F("git",["diff",e.sha,"HEAD"],i)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],i)).stdout;c!==g&&(m=It.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:s};return a!==p&&(h.diffBinary=It.from(p).toString("base64")),m&&(h.resultDiffBinary=m),h},He=async(e=[])=>{await F("git",["add",".",...e])},qe=async()=>(await F("git",["status","-s"])).stdout,bt=/.. (.+)?\.log$/,Sr=[bt],Nr=async()=>{let e=await qe();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
8
|
-
`).filter(n=>Sr.some(
|
|
9
|
-
`).forEach(n=>{t.forEach(
|
|
10
|
-
`).reduce((r,n)=>{if(!n)return r;let[o
|
|
7
|
+
`);return o.length>e.length*.8?e:o}import Ne from"process";import{getTracer as Wr}from"@netlify/otel";import pe from"process";var xe=pe.env.NETLIFY_API_URL,Te=pe.env.NETLIFY_API_TOKEN,Y=_("api"),Ie=()=>pe.env.NETLIFY_LOCAL_MODE==="true",fe=async(e,t={})=>{if(!xe||!Te)throw new Error("No API URL or token");let r=new URL(e,xe),n={...t,headers:{...t.headers,Authorization:`Bearer ${Te}`}};pe.env.AGENT_RUNNERS_DEBUG==="true"&&(n.headers["x-nf-debug-logging"]="true"),t.json&&(n.headers||={},n.headers["Content-Type"]="application/json",n.body=JSON.stringify(t.json));let s=await fetch(r,n),o=s.ok&&s.status<=299;if(pe.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),s.headers.forEach((u,a)=>{Y.log(` ${a}: ${u}`)});else{let u=s.headers.get("x-request-id")||s.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${u||"N/A"}`)}if(o||Y.error(`Got status ${s.status} for request ${r}`),t.raw){if(!o)throw s;return s}let i=await(s.headers.get("content-type")?.includes("application/json")?s.json():s.text());if(!o)throw i;return i},lt=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(xe=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Te=e.constants.NETLIFY_API_TOKEN)},ct=()=>({apiUrl:xe,token:Te}),ge=async(e,t)=>Ie()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):fe(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>Ie()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):fe(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var ut=async(e,t)=>Ie()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):fe(`/api/v1/agent_runners/${e}/sessions/${t}`),dt=(e,t,r)=>fe(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),pt=async(e,t)=>Ie()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):fe(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Le=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var Q=_("ai_gateway"),Ue=null;var ft=async()=>{if(Ue)return Ue;Q.log("Fetching available AI gateway providers");let e=await fetch(`${ct().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ue=t,Q.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},ir=async(e,t)=>{let n=(await ft()).providers[e];if(!n)return Q.log(`Provider '${e}' not found`),!1;let s=n.models.includes(t);return Q.log(`Model validation for ${e}/${t}`,{isAvailable:s}),s},gt=async({netlify:e,config:t})=>{let r,n,s,o,i=e.constants?.SITE_ID;if(!i)throw new Error("No site id");let u=async()=>{clearTimeout(s),Q.log("Requesting AI gateway information");let a=await dt(i,t.id,t.sessionId);if({token:r,url:o}=a,n=a.expires_at?a.expires_at*1e3:void 0,Q.log("Got AI gateway information",{token:!!r,expiresAt:n,url:o}),n){let f=n-Date.now()-6e4;f>0&&(s=setTimeout(()=>{u()},f))}};return await Promise.all([u(),ft()]),{get url(){return o},get token(){return r},isModelAvailableForProvider:ir}};import H from"process";import K from"path";import ve from"fs";import{fileURLToPath as fr}from"url";import{createRequire as gr}from"module";import{execa as mr,execaCommand as Yn}from"execa";import{Transform as ar}from"stream";var lr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),cr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function ur(){return Object.entries(process.env).filter(([e,t])=>!(!t||lr.has(e)||cr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=ur();if(t.length===0)return e;let r=e;return t.forEach(n=>{let s=new RegExp(dr(n),"g");r=r.replace(s,"******")}),r}function dr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ee=class extends ar{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,n){let s=t.toString(),o=q(s);n(null,o)}};function mt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(n,s,o){let i=typeof n=="string"?q(n):n;return typeof s=="function"?t(i,s):t(i,s,o)},process.stderr.write=function(n,s,o){let i=typeof n=="string"?q(n):n;return typeof s=="function"?r(i,s):r(i,s,o)}}var me=null,ht=e=>(me&&me.destroy(),me=new z({totalAllowedTime:e}),me),yt=()=>me;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,n)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let s=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),o=null,i=null;n!==void 0&&(i=new Promise((u,a)=>{o=setTimeout(()=>{a(new Error(`${t} stage exceeded its maximum duration of ${n}ms`))},n)}));try{return i?await Promise.race([r(),i]):await r()}finally{s(),o&&clearTimeout(o)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var _t={name:"@netlify/agent-runner-cli",type:"module",version:"1.0.1-broken",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.69","@google/gemini-cli":"0.20.2","@netlify/otel":"^5.1.1","@openai/codex":"0.72.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8"}};var hr=fr(import.meta.url),yr=K.dirname(hr),_r=gr(import.meta.url),Re=_("shell"),Me=new Set,Er={preferLocal:!0},F=(e,t,r)=>{let[n,s]=wr(t,r),o={...Er,...s},i=mr(e,n,o);return xr(i,o),Ir(i),i};var wr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},xr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new ee).pipe(H.stdout),e.stdout?.pipe(new ee).pipe(H.stdout),e.stderr?.pipe(new ee).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Et=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),Re.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return Re.error("Error killing process:",r),!1}},Tr=e=>Et(e,"SIGKILL"),Ir=e=>{Me.add(e);let t=yt();if(t){let r=t.onTimesUp(()=>{Re.log(`Global timer expired, killing process ${e.pid}`),Et(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(Re.log(`Force killing process ${e.pid} after timeout`),Tr(e))},5e3)});e.on("exit",()=>{Me.delete(e),r()}),e.on("error",()=>{Me.delete(e),r()})}};function te(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let s=_r.resolve(_t.name),o=K.dirname(s);for(;o!==K.dirname(o);){let i=K.dirname(o);if(K.basename(i)==="node_modules"){let u=K.join(i,".bin",t);if(ve.existsSync(u))return u;break}o=i}}catch(s){console.error("Could not resolve package.json",s)}if(H.env.NODE_PATH){let s=K.join(H.env.NODE_PATH,".bin",t);if(ve.existsSync(s))return s}let r=K.join(e,"node_modules",".bin",t);if(ve.existsSync(r))return r;let n=K.join(yr,"..","node_modules",".bin",t);if(ve.existsSync(n))return n}var wt="netlify-agent-runner-context.md",je="task-history",Ge="netlify-context",k=".netlify",re="results.md",Ye="assets";var vr=_("utils"),Rr=e=>new Promise(t=>{setTimeout(t,e)}),xt=(e,t=3e3)=>{let r=!1,n=null,s=[],o=null,i=(...u)=>{if(r)return n=u,new Promise(p=>{s.push(p)});r=!0;let a,f=new Promise(p=>{a=p});return o=(async()=>{await Promise.resolve();let p=await e(...u);for(a(p);;){if(await Rr(t),!n)return r=!1,o=null,p;let c=n,m=s;n=null,s=[],p=await e(...c),m.forEach(h=>{h(p)})}})(),f};return i.flush=async()=>{if((r||n)&&o)return await o,i.flush()},i},ne=(e,t,r=!1)=>{let n=null,s=null,o=null,i=function(...u){s=u,o=this;let a=r&&!n;clearTimeout(n),n=setTimeout(()=>{n=null,r||(e.apply(o,s),s=null,o=null)},t),a&&(e.apply(o,s),s=null,o=null)};return i.cancel=()=>{clearTimeout(n),n=null,s=null,o=null},i.flush=()=>{if(n){clearTimeout(n);let u=s,a=o;n=null,s=null,o=null,e.apply(a,u)}},i},Tt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(n){t&&(r?.error?r.error("Could not parse JSON",n):vr.error("Could not parse JSON",n))}},It=(e,t)=>{let s=".netlify.app",o="agent-";if(!t)return`${o}${e.slice(0,6)}`;let u=`--${t}${s}`;if(u.length>55)return"";let a=60-u.length;if(a<=0)return"";if(a>=o.length+6){let f=Math.min(a-o.length,e.length);return`${o}${e.slice(0,f)}`}return e.slice(0,a)};var br=50*1024,Be=(e,t=br)=>{if(!e||typeof e!="string"||e.length<=t)return e;let n=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+n};import{Buffer as vt}from"buffer";import Ar from"path";var Rt=_("repo"),bt=async({config:e,isRetry:t})=>{Rt.info("Getting runner diffs");let r=await Nr(),{hasChanges:n}=r,{status:s}=r;if(!n)return{hasChanges:!1};if(!t){let x=Cr(s);await Pr(x)}Rt.info("Changes after processing"),await qe();let o=await Ke(s);await He(o);let i={stdio:["ignore","pipe","pipe"]},a=(await F("git",["diff","--staged"],i)).stdout;if(n=!!a,!n)return{hasChanges:!1,ignored:o};let p=(await F("git",["diff","--staged","--binary"],i)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),c=(await F("git",["diff",e.sha,"HEAD"],i)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],i)).stdout;c!==g&&(m=vt.from(g).toString("base64"))}let h={hasChanges:!0,diff:a,resultDiff:c,ignored:o};return a!==p&&(h.diffBinary=vt.from(p).toString("base64")),m&&(h.resultDiffBinary=m),h},He=async(e=[])=>{await F("git",["add",".",...e])},qe=async()=>(await F("git",["status","-s"])).stdout,At=/.. (.+)?\.log$/,Sr=[At],Nr=async()=>{let e=await qe();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
8
|
+
`).filter(n=>Sr.some(o=>o instanceof RegExp?o.test(n):n===o)?!1:n[1]?.trim()!=="")).length!==0,status:e}},St=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return e.trim()},Nt=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Ke=async e=>{e||=await qe();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
|
|
9
|
+
`).forEach(n=>{t.forEach(o=>{[`?? ${o}`,`?? ${o}${Ar.sep}`].some(u=>n.startsWith(u))&&r.push(`:!${o}`)});let s=n.match(At)?.[1];s&&r.push(`:!${s}.log`)}),r},Ct=async()=>{await F("git",["reset","--hard","HEAD"])},Cr=e=>{let t=e.split(`
|
|
10
|
+
`).reduce((r,n)=>{if(!n)return r;let[s,o,,...i]=n,u=i.join(""),a=s.trim(),f=o.trim();return r[u]?r[u].change=f:r[u]={filePath:u,stage:a,change:f},r},{});return Object.values(t)},Pr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Fr from"fs/promises";import $r from"os";import Ft from"path";import ae from"process";import Dr from"readline";import We from"path";import Or from"fs/promises";var Ve=_("agent-output-utils");async function se({initialResult:e,agentName:t,hasError:r}){let n="",s=We.join(process.cwd(),k,re);try{let o=await Or.readFile(s,"utf-8");o&&(n=o,Ve.log(`Pulled result from ${We.relative(process.cwd(),s)}`))}catch{Ve.log(`No results file found at ${We.relative(process.cwd(),s)}`)}return n||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function oe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,n=r?.replace(/\s+/g," ").trim().toLowerCase()||"",s="";return n?.includes("ai gateway is not available for your account")||n?.includes("ai gateway is not enabled for your account")?s="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":n?.includes("error when talking to gemini api")?s="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(n?.includes("connection closed prematurely")||n?.includes("499")&&t.toLowerCase().includes("gemini"))&&(s=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),n?.includes("request timed out")&&(s=`The ${t} API request's have timed out. Please try again or use a different available agent.`),n?.includes("network error")&&(s=`The ${t} agent is having network issues. Please try again or use a different available agent.`),s&&Ve.log(`Providing updated error messsage: ${s}, replacing original error: ${r}`),s||r||void 0}function ie(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var L=_("runner_claude"),Pt="Claude Code",le="claude-opus-4-5-20251101",Ot=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,kr=({catchError:e,runCmd:t,error:r,result:n,runnerName:s})=>(L.log(`${s} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${s} failed`,result:void 0}));async function Je({config:e,netlify:t,persistSteps:r,aiGateway:n,continueSession:s,priorAgentSessionId:o,cwd:i=ae.cwd()}){let u=e,{accountType:a,prompt:f,modelVersionOverrides:p}=u,{model:c}=u,m="";if(n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(p?.claude){let l=p?.claude?.[a];if(l){if(!await n.isModelAvailableForProvider("anthropic",l))throw new Error(`Model override '${l}' is not available for anthropic provider`);c=l}}else if(c){if(!await n.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!le&&await n.isModelAvailableForProvider("anthropic",le)?(c=le,L.log(`Using default model: ${le}`)):le&&L.log(`Default model ${le} is not available, proceeding without model specification`);ae.env.ANTHROPIC_API_KEY=y,ae.env.ANTHROPIC_BASE_URL=d}else if(!ae.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let h=[],x=[],T={},g=0,E=0,v,b,N=[te(i,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...s?["--continue"]:[],...s&&o?["--resume",o]:[],"-p",f],C=`${ae.env.NVM_BIN}/node`;L.log(`Running ${C} ${N.join(" ")}`);let O=t.utils.run(C,N,{all:!0,env:ae.env,cwd:i});O.stdin?.end();let A=ne(()=>{r?.({steps:h,duration:E})},250),I=(y,d)=>{let l={...y,id:g};g+=1,x.push(l),h.push(l),d||A.flush(),A(),d&&A.flush()},M=Dr.createInterface({input:O.all});return M.on("error",y=>{L.error("Readline interface error",{error:y.message,stack:y.stack})}),M.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{L.log("Could not parse line",y)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(l=>{switch(l.type){case"text":{l.text&&I({message:l.text});break}case"image":{typeof l.source=="object"&&l.source&&l.source.type==="base64"&&l.source.media_type?I({message:``}):L.log(`Unsupported image type ${l.source?.type}`,l.source);break}case"tool_use":{if(l.name==="Task"){let w=l.input?.description&&`\`${l.input.description}\``;I({title:[Ot(l.name),w].filter(Boolean).join(" ")})}else l.id&&(T[l.id]=l);A.flush();break}case"tool_result":{let w=l.tool_use_id?T[l.tool_use_id]:void 0,J;if(w){let W=w.input?.file_path&&Ft.relative(i,w.input.file_path),P=W&&`\`${W}\``;J=[Ot(w.name||""),P].filter(Boolean).join(" ")}let we=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),X;if(typeof l.content=="string")X=l.content;else if(Array.isArray(l.content)){let W=[];l.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?W.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?W.push(``):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),X=W.join(`
|
|
11
11
|
|
|
12
|
-
`)}
|
|
12
|
+
`)}we&&X&&(X=`\`\`\`
|
|
13
13
|
${X.trim()}
|
|
14
|
-
\`\`\``),I({title:J,message:X},!0);break}case"thinking":{l.thinking&&I({title:"Thinking",message:l.thinking},!0);break}default:L.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?
|
|
14
|
+
\`\`\``),I({title:J,message:X},!0);break}case"thinking":{l.thinking&&I({title:"Thinking",message:l.thinking},!0);break}default:L.log(`Message content type is not supported ${l.type}`,l)}}):d?.type==="result"&&(E=d.duration_ms||0,d.is_error?b=d.result:v=d.result,[x,h].forEach(l=>{l[l.length-1]?.message===v&&l.pop()}))}),await O.catch(y=>{({error:b,result:v}=kr({catchError:y,runCmd:O,error:b,result:v,runnerName:"Claude"}))}),M.close(),A.flush(),{steps:x,duration:E,result:await se({initialResult:v,agentName:Pt,hasError:!!b}),error:oe({error:b,agentName:Pt}),isRetryableError:ie(b),agentSessionId:m}}var $t=async()=>{let e=Ft.join($r.homedir(),".claude");await Fr.rm(e,{recursive:!0,force:!0})};import he from"fs/promises";import kt from"os";import be from"path";import Z from"process";import Lr from"readline";var U=_("runner_codex"),Dt="Codex CLI",ce="gpt-5.2",Ur=({catchError:e,runCmd:t,error:r,result:n,runnerName:s})=>(U.log(`${s} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${s} failed`,result:void 0}));async function Xe({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:s,cwd:o=Z.cwd()}){let{accountType:i,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(s){let{token:d,url:l}=s;if(!d||!l)throw new Error("No token or url provided from AI Gateway");if(a?.codex){let w=a?.codex?.[i];if(w){if(!await s.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);f=w}}else if(f){if(!await s.isModelAvailableForProvider("openai",f))throw new Error(`Model '${f}' is not available for openai provider`)}else!!ce&&await s.isModelAvailableForProvider("openai",ce)?(f=ce,U.log(`Using default model: ${ce}`)):ce&&U.log(`Default model ${ce} is not available, proceeding without model specification`);Z.env.OPENAI_API_KEY=d,Z.env.OPENAI_BASE_URL=l}else if(!Z.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let p=[],c=[],m=[],h={},x=0,T=0,g,E,v=`${Z.env.NVM_BIN}/node`,b=be.join(kt.homedir(),".codex"),N=be.join(b,"config.toml"),C=be.join(b,"auth.json");try{await he.mkdir(b,{recursive:!0});let d={OPENAI_API_KEY:Z.env.OPENAI_API_KEY};await he.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let l="";try{l=await he.readFile(N,"utf-8")}catch{}l.includes("web_search_request")||(l.includes("[features]")?l=l.replace(/\[features\]/,`[features]
|
|
15
15
|
web_search_request = true`):l+=`
|
|
16
16
|
[features]
|
|
17
17
|
web_search_request = true
|
|
18
|
-
`,await he.writeFile(N,l,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[te(
|
|
18
|
+
`,await he.writeFile(N,l,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[te(o,"codex"),"exec","--yolo","--json","--enable","web_search_request",...f?["--model",f]:[],u].filter(Boolean);U.log(`Running ${v} ${O.join(" ")}`);let A=t.utils.run(v,O,{all:!0,cwd:o,env:{...Z.env}}),I=ne(()=>{r?.({steps:p,duration:T}),n?.({steps:c,duration:T}),c=[]},250),M=(d,l)=>{d.id=x,x+=1,m.push(d),p.push(d),c.push(d),l||I.flush(),I(),l&&I.flush()},y=Lr.createInterface({input:A.all});return y.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),y.on("line",d=>{let l=null;try{l=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(l?.duration_ms&&(T=l.duration_ms),l?.type==="item.started"&&l?.item?.type==="command_execution")h[l.item.id]=l.item;else if(l?.type==="item.completed"&&l?.item?.type==="command_execution"){let w=jr(l.item);w&&M(w,!0)}else if(l?.type==="item.completed"&&l?.item?.type==="reasoning"){let w={title:"Reasoning",message:l.item.text};M(w,!0)}else if(l?.type==="local_shell_call")h[l.call_id]=l;else if(l?.type==="local_shell_call_output"){let w=Gr(h[l.call_id],l);w&&M(w,!0)}else l?.type==="message"&&l.role==="assistant"?g=l.content.map(w=>w.text).join(`
|
|
19
19
|
`):l?.type==="message"&&l.role==="system"&&(E=l.content.map(w=>w.text).join(`
|
|
20
|
-
`))}),await
|
|
20
|
+
`))}),await A.catch(d=>{let l=Ur({catchError:d,runCmd:A,error:E,result:g,runnerName:"Codex"});E=l.error,g=l.result}),y.close(),I.flush(),{steps:m,duration:T,result:await se({initialResult:g,agentName:Dt,hasError:!!E}),error:oe({error:E,agentName:Dt}),isRetryableError:ie(E)}}var Lt=async()=>{let e=be.join(kt.homedir(),".codex");await he.rm(e,{recursive:!0,force:!0})},Mr=new Set(["bash","-lc"]),jr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,n=e.aggregated_output?.trim();return n&&(n=`\`\`\`
|
|
21
21
|
${n}
|
|
22
22
|
\`\`\``),e.status==="failed"&&e.exit_code!==0&&(n=n?`${n}
|
|
23
23
|
|
|
24
|
-
*Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},Gr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(
|
|
25
|
-
${
|
|
26
|
-
\`\`\``)}catch(
|
|
24
|
+
*Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:n}},Gr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(o=>!Mr.has(o)),n=r?`Running \`${r.join(" ")}\``:void 0,s;try{s=JSON.parse(t.output).output?.trim(),s&&(s=`\`\`\`
|
|
25
|
+
${s.trim()}
|
|
26
|
+
\`\`\``)}catch(o){U.error("Could not decode outputMsg",o,t.output)}return{title:n,message:s}};import Ae from"fs/promises";import Mt from"os";import Se from"path";import ue from"process";import Yr from"readline";var j=_("runner_gemini"),Ut="Gemini CLI",de="",Br=({catchError:e,runCmd:t,error:r,result:n,runnerName:s})=>(j.log(`${s} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!n,resultLength:n?n.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),n?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:n}:{error:"Process completed with errors but result was captured",result:n}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${s} failed`,result:void 0})),Hr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},qr=async()=>{let e=Se.join(Mt.homedir(),".gemini"),t=Se.join(e,"settings.json");try{await Ae.mkdir(e,{recursive:!0});let r={};try{let n=await Ae.readFile(t,"utf-8");r=JSON.parse(n)}catch{j.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ae.writeFile(t,JSON.stringify(r,null,2),"utf-8"),j.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){j.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ze({config:e,netlify:t,persistSteps:r=void 0,sendSteps:n=void 0,aiGateway:s,cwd:o=ue.cwd()}){let{accountType:i,prompt:u,modelVersionOverrides:a}=e,{model:f}=e;if(await qr(),s){let{token:y,url:d}=s;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(a?.gemini){let l=a?.gemini?.[i];if(l){if(!await s.isModelAvailableForProvider("gemini",l))throw new Error(`Model override '${l}' is not available for gemini provider`);f=l}}if(!f)!!de&&await s.isModelAvailableForProvider("gemini",de)?(f=de,j.log(`Using default model: ${de}`)):de&&j.log(`Default model ${de} is not available, proceeding without model specification`);else if(f&&!a?.gemini?.[i]&&!await s.isModelAvailableForProvider("gemini",f))throw new Error(`Model '${f}' is not available for gemini provider`);ue.env.GEMINI_API_KEY=y,ue.env.GOOGLE_GEMINI_BASE_URL=d}else if(!ue.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let p=[],c=[],m=[],h={},x=0,T=0,g,E,v=[te(o,"gemini"),...f?["--model",f]:[],"--yolo","--output-format","stream-json","-p",u],b=`${ue.env.NVM_BIN}/node`;j.log(`Running ${b} ${v.join(" ")}`);let N=t.utils.run(b,v,{all:!0,env:ue.env,cwd:o});N.stdin?.end();let C=ne(()=>{r?.({steps:p,duration:T}),n?.({steps:c,duration:T}),c=[]},250),O=(y,d)=>{y.id=x,x+=1,m.push(y),p.push(y),c.push(y),d||C.flush(),C(),d&&C.flush()},A=Yr.createInterface({input:N.all});A.on("error",y=>{j.error("Readline interface error",{error:y.message,stack:y.stack})});let I="",M=()=>{I&&O({message:I.trim()}),I=""};return A.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let l=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Tt(l,!1)?.error?.message||l||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||M(),d.type){case"message":{d.role!=="user"&&d.content&&(I+=d.content);break}case"tool_use":{let l=Hr[d.tool_name]??d.tool_name,w=d.parameters?.file_path,J=w&&Se.relative(o,w),we=d.parameters?.command,W={title:[l,J&&`\`${J}\``,we&&`\`${we}\``].filter(Boolean).join(" ")};h[d.tool_id]=W,C.flush();break}case"tool_result":{let l=h[d.tool_id];l&&(d.output&&(l.message=`\`\`\`
|
|
27
27
|
${d.output.trim()}
|
|
28
|
-
\`\`\``),O(l,!0));break}case"result":{T=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=I.trim();break}case"error":{E=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await N.catch(y=>{({error:E,result:g}=Br({catchError:y,runCmd:N,error:E,result:g,runnerName:"Gemini"}))}),
|
|
28
|
+
\`\`\``),O(l,!0));break}case"result":{T=d.stats?.duration_ms,d.status==="error"?E=d.error?.message:g=I.trim();break}case"error":{E=d.error;break}case"finished":break;default:{j.warn("Unhandled message type:",d.type);break}}}),await N.catch(y=>{({error:E,result:g}=Br({catchError:y,runCmd:N,error:E,result:g,runnerName:"Gemini"}))}),A.close(),C.flush(),{steps:m,duration:T,result:await se({initialResult:g,agentName:Ut,hasError:!!E}),error:oe({error:E,agentName:Ut}),isRetryableError:ie(E)}}var jt=async()=>{let e=Se.join(Mt.homedir(),".gemini");await Ae.rm(e,{recursive:!0,force:!0})};var Kr={codex:{runner:Xe,clean:Lt},claude:{runner:Je,clean:$t},gemini:{runner:ze,clean:jt}},Gt=Kr;var Yt=_("init_stage"),Bt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:n})=>await S(Wr(),"init-stage",async s=>{let o=performance.now();s?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":n||"unknown"});let i=Gt[e.runner];if(!i)throw s?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=Vr({apiToken:r});lt(u);let a=e.useGateway?await gt({netlify:u,config:e}):void 0;s?.setAttributes({"init.aiGateway.created":!!a}),e.validateAgent&&e.errorLogsPath&&s?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let f=5*1024,p=xt(async({steps:x=[],duration:T})=>{let g=x.map(E=>{let v=E.title?Be(q(E.title),f):void 0,b=E.message?Be(q(E.message)):void 0;return{...E,title:v,message:b}});x.length=0;try{return await B(e.id,e.sessionId,{steps:g,duration:T})}catch(E){Yt.error("persistSteps failed",{error:E?.message||E})}},t);Yt.info("Adding build files to stage");let c=await Ke();await He(c);let m;e.hasRepo?e.sha?(m=e.sha,s?.setAttributes({"init.sha.source":"provided"})):(m=await St(),await ge(e.id,{sha:m}),s?.setAttributes({"init.sha.source":"current_commit"})):(m=await Nt(),s?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let h=performance.now()-o;return s?.setAttributes({"init.sha":m||"unknown","init.duration.ms":h,"init.status":"success"}),{aiGateway:a,context:u,persistSteps:p,runner:i,sha:m}}),Vr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Ne.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Ne.env.NETLIFY_API_TOKEN,SITE_ID:Ne.env.SITE_ID,FUNCTIONS_DIST:Ne.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as Ze}from"@netlify/otel";import Jr from"crypto";import V from"fs/promises";import D from"path";import G from"process";var $=_("context"),Xr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:G.env.NETLIFY_TEAM_ID,userId:G.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:G.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},zr=10,Zr=async e=>{let{name:t,ext:r}=D.parse(e),n=e,s=D.join(G.cwd(),k,n),o=0;for(;await Qr(s);){if(o>=zr)throw new Error("Failed to generate context file");n=`${t}-${Jr.randomUUID().slice(0,5)}${r}`,s=D.join(G.cwd(),k,n),o+=1}return n},Qr=async e=>{try{return await V.access(e),!0}catch{return!1}},en=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(n=>n&&typeof n=="object"&&n.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},tn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let n=await r.text();return await V.writeFile(t,n,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},Ce=null,rn=async()=>{if(Ce)return Ce;let e=await en();if(!e)return[];let t=D.join(G.cwd(),k,Ge);await V.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([s,o])=>{if(!o||typeof o!="object"||!o.endpoint||!o.scope)return $.warn(`Invalid scope data for ${s}, skipping...`),null;let i=`${s}.md`,u=D.join(t,i),a=D.join(k,Ge,i);return $.log(`Downloading ${o.scope} context...`),await tn(o.endpoint,u)?($.log(`Downloaded: ${a}`),{scope:o.scope,path:a,key:s}):null});return Ce=(await Promise.all(r)).filter(s=>s!==null),Ce},Ht=async({cliPath:e,netlify:t,config:r,buildErrorContext:n})=>{let s=Xr(t),o=await Zr(wt),i=D.join(G.cwd(),k);await V.mkdir(i,{recursive:!0});let u=D.join(k,o),a=D.join(G.cwd(),u),f=D.join(G.cwd(),k,re);try{await V.unlink(f),$.log(`Deleted old results file: ${f}`)}catch{}let p=n?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
|
|
29
29
|
Your task is to analyze and fix the build errors.
|
|
30
30
|
Don't apply techniques of reverting changes. Apply fixes related to errors.
|
|
31
31
|
Don't try to run build by yourself. Just fix the errors.
|
|
@@ -38,7 +38,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
|
|
|
38
38
|
|
|
39
39
|
`)}
|
|
40
40
|
</project_rules>
|
|
41
|
-
`);let m="";if(r.sessionHistoryContext?.length){let g=D.join(G.cwd(),k,je);await V.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(v,
|
|
41
|
+
`);let m="";if(r.sessionHistoryContext?.length){let g=D.join(G.cwd(),k,je);await V.mkdir(g,{recursive:!0});let E=await Promise.all(r.sessionHistoryContext.map(async(v,b)=>{let N=b+1,C=`attempt-${N}.md`,O=D.join(g,C),A=D.join(k,je,C),I=`# Task History - Attempt ${N}
|
|
42
42
|
|
|
43
43
|
## Request - what the user asked for
|
|
44
44
|
${v.request}
|
|
@@ -48,7 +48,7 @@ ${v.request}
|
|
|
48
48
|
## Response - what the agent replied with after its work
|
|
49
49
|
|
|
50
50
|
${v.response}
|
|
51
|
-
`;return await V.writeFile(O,I,"utf-8"),$.log(`Created history file: ${
|
|
51
|
+
`;return await V.writeFile(O,I,"utf-8"),$.log(`Created history file: ${A}`),A}));m+=`
|
|
52
52
|
<session_history_context>
|
|
53
53
|
History of prior work on this task.
|
|
54
54
|
You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
|
|
@@ -95,11 +95,11 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
95
95
|
|
|
96
96
|
<extra_context>
|
|
97
97
|
<metadata>
|
|
98
|
-
- Site/Project ID: ${
|
|
99
|
-
- Account/Team ID: ${
|
|
100
|
-
- User ID: ${
|
|
101
|
-
- Site/Project Slug: ${
|
|
102
|
-
- Netlify Functions directory: ${
|
|
98
|
+
- Site/Project ID: ${s.siteId}
|
|
99
|
+
- Account/Team ID: ${s.accountId}
|
|
100
|
+
- User ID: ${s.userId}
|
|
101
|
+
- Site/Project Slug: ${s.siteSlug}
|
|
102
|
+
- Netlify Functions directory: ${s.functionsDir}
|
|
103
103
|
</metadata>
|
|
104
104
|
<environment>
|
|
105
105
|
- Node Version: ${G.version||"unknown"}
|
|
@@ -126,17 +126,17 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
126
126
|
</request>
|
|
127
127
|
|
|
128
128
|
Use the following file for the complete context of the ask, the environment, and what's available. ${a} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
|
|
129
|
-
`),T};var nn=_("prompt"),
|
|
129
|
+
`),T};var nn=_("prompt"),qt=async({cliPath:e,config:t,netlify:r,buildErrorContext:n})=>{let s=await Ht({cliPath:e,config:t,netlify:r,buildErrorContext:n});return process.env.AGENT_RUNNER_DEBUG&&nn.log("Contextful Prompt:",s),{prompt:s}};var Pe=_("inference_stage"),Kt=5,Oe=async e=>{let{cliPath:t,config:r,context:n,buildErrors:s,runner:o,persistSteps:i,aiGateway:u,attempt:a,contextPrefix:f,priorAgentSessionId:p}=e;Pe.log(`Running inference stage, attempt ${a} of ${Kt}`);let c=await S(Ze(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":a||1}),mt();let{prompt:h}=await S(Ze(),"compose-prompt",async()=>await qt({cliPath:t,config:r,buildErrorContext:sn(s),netlify:n})),x=`
|
|
130
130
|
${f||""}
|
|
131
131
|
${h}
|
|
132
|
-
`.trim(),T={...r,prompt:x},g=await S(Ze(),`run-${r.runner}`,async()=>await
|
|
132
|
+
`.trim(),T={...r,prompt:x},g=await S(Ze(),`run-${r.runner}`,async()=>await o({aiGateway:u,config:T,netlify:n,persistSteps:i,continueSession:!!(a&&a>1),priorAgentSessionId:p}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await i.flush(),g});if(c.error){if(Pe.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:a||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!a||a<Kt))return Pe.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Oe({...e,attempt:(a||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Pe.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},sn=e=>!e||e.length===0?"":`
|
|
133
133
|
Deploy failed failed. Here are the errors to review on the latest build:
|
|
134
134
|
|
|
135
135
|
Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
|
|
136
136
|
|
|
137
137
|
${e.pop()}
|
|
138
|
-
`;import ln from"process";import{getTracer as Qe}from"@netlify/otel";import{getTracer as
|
|
139
|
-
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw ye.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var _e=_("deploy_stage"),et=async e=>await S(Qe(),"run-deploy-stage",async()=>cn(e)),cn=async({cliPath:e,config:t,context:r,result:n,filter:
|
|
140
|
-
`);for(let i of
|
|
141
|
-
`),
|
|
138
|
+
`;import ln from"process";import{getTracer as Qe}from"@netlify/otel";import{getTracer as on}from"@netlify/otel";var ye=_("deploy"),Wt=async e=>await S(on(),"create-preview-deploy",async t=>an(e,t)),an=async({netlify:e,hasRepo:t,skipBuild:r,message:n="Agent Preview",deploySubdomain:s,cliPath:o,filter:i},u)=>{try{let a=["deploy","--message",`"${n}"`,"--json","--draft","--verbose"];t||(ye.log("Deploy: Uploading source zip"),a.push("--upload-source-zip")),s&&a.push("--alias",s),i&&a.push("--filter",i),r?(ye.log("Deploy: Skipping build"),a.push("--no-build")):a.push("--context","deploy-preview");let f=o||"netlify";ye.log(`Running: ${f} ${a.join(" ")}`),u?.setAttributes({cmd:f,args:a});let p=await e.utils.run(f,a,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(p.stdout.trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),ye.log(`
|
|
139
|
+
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(a){throw ye.error("Failed to create preview deploy via CLI:",a),u?.setAttributes({success:!1,error:a.message}),a}};var _e=_("deploy_stage"),et=async e=>await S(Qe(),"run-deploy-stage",async()=>cn(e)),cn=async({cliPath:e,config:t,context:r,result:n,filter:s,isRetry:o})=>{let i=await S(Qe(),"get-runner-diffs",async()=>await bt({config:t,isRetry:o}));if(_e.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:a,diffBinary:f,resultDiffBinary:p}=i,c=!0;_e.log("Preview deploy condition check:",{resultUndefined:n===void 0,resultType:typeof n,hasChanges:c,wouldCreatePreview:n!==void 0&&c});let m=null;if(n!==void 0&&c)try{let h;try{let x=await S(Qe(),"get-runner-session",async()=>await ut(t.id,t.sessionId));x?.title&&(h=x.title)}catch(x){_e.warn("Failed to fetch session title, using fallback message:",x.message)}await B(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await Wt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:h,skipBuild:!1,deploySubdomain:It(t.id,ln.env.SITE_NAME),filter:s})}catch(h){return _e.warn("Failed to create preview deploy (continuing with agent run):",h),{diff:u,resultDiff:a,hasChanges:c,previewInfo:null,diffBinary:f,resultDiffBinary:p,deployError:h instanceof Error?h.message:String(h)}}return _e.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:a,hasChanges:c,previewInfo:m,diffBinary:f,resultDiffBinary:p}};import{getTracer as Ee}from"@netlify/otel";async function tt(e,t){let{maxRetries:r,baseDelay:n,onRetry:s}=t,o;for(let i=1;i<=r;i++)try{return await e()}catch(u){if(o=u,i===r)throw o;s&&s(i,o),await new Promise(a=>setTimeout(a,n*i))}throw o}var Fe=class{scanDiffForForms(t){let r=[],n=null,s=[],o=t.split(`
|
|
140
|
+
`);for(let i of o)if(i.startsWith("diff --git")){if(n&&s.length>0){let a=this.containsNetlifyForm(s,n);a&&r.push(a)}let u=i.split(" ");n=u[u.length-1].replace(/^b\//,""),s=[]}else i.startsWith("+")&&!i.startsWith("+++")&&s.push(i.slice(1));if(n&&s.length>0){let i=this.containsNetlifyForm(s,n);i&&r.push(i)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let n=t.join(`
|
|
141
|
+
`),s=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:o,name:i}of s){let u=n.match(o);if(u){let a=u.index||0,f=Math.max(0,a-20),p=Math.min(n.length,a+u[0].length+20),c=n.slice(f,p).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${i}] ${c}`}}}return null}};var R=_("cleanup_stage"),Vt=async e=>await S(Ee(),"cleanup-stage",async()=>un(e)),rt=1024*1024*10,un=async({config:e,diff:t,result:r,duration:n,resultDiff:s,diffBinary:o,resultDiffBinary:i,previewInfo:u})=>{let a={result:r||"Done",duration:n};u&&u.deployId&&(a.deploy_id=u.deployId),u&&u.sourceZipFilename&&(a.result_zip_file_name=u.sourceZipFilename);let f=t||o||s||i;if(f&&(a.diff_produced=!0),process.env.SITE_ID==="def61649-ad41-4d63-a478-8496a919443a"&&f)return R.log("Test site detected - skipping diff upload to test loss detection"),await tt(async()=>await S(Ee(),"update-runner-session",()=>B(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{R.error(`Error updating agent runner session (attempt ${p}):`,c),R.log("Retrying...")}}),{sessionUpdate:a};if(f){let p=new Fe,c=t||o||"",m=p.scanDiffForForms(c);m.detected?(R.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:h,snippet:x})=>{R.log(` - ${h}: ${x}`)}),a.has_netlify_form=!0):R.log("Did not detect Netlify form(s) in diff"),R.log("Did not detect Netlify form(s) in diff")}if(f)try{R.log("Getting pre-signed URLs for diff upload");let p=await pt(e.id,e.sessionId),c=[];(t||o)&&c.push(Le(p.result.upload_url,o||t).then(()=>{a.result_diff_s3_key=p.result.s3_key,R.log("Successfully uploaded result_diff to S3")})),(s||i)&&c.push(Le(p.cumulative.upload_url,i||s).then(()=>{a.cumulative_diff_s3_key=p.cumulative.s3_key,R.log("Successfully uploaded cumulative_diff to S3")})),R.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(s||i)&&(R.log("Updating agent runner with cumulative diff S3 key"),await S(Ee(),"update-runner",async()=>{await ge(e.id,{result_diff_s3_key:p.cumulative.s3_key})}))}catch(p){R.error("S3 upload failed, falling back to inline diffs:",p);let c=Buffer.byteLength(t||o||""),m=Buffer.byteLength(i||s||"");if(c>rt||m>rt){let h=`Diffs exceed maximum inline size of ${rt} bytes.`;throw R.error(h),new Error(h)}a.result_diff=t,a.result_diff_binary=o,(s||i)&&(a.cumulative_diff=s,a.cumulative_diff_binary=i,R.log("Updating agent runner with inline diffs (fallback)"),await S(Ee(),"update-runner",async()=>{await ge(e.id,{result_diff:s,result_diff_binary:i})}))}else R.log("No diffs to upload");return R.log("Updated agent runner with result"),await tt(async()=>await S(Ee(),"update-runner-session",()=>B(e.id,e.sessionId,a)),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{R.error(`Error updating agent runner session (attempt ${p}):`,c),R.log("Retrying...")}}),R.log("Finished updating agent runner with result"),{sessionUpdate:a}};import{getTracer as Jt,shutdownTracers as pn,withActiveSpan as Xt}from"@netlify/otel";var fn=dn(import.meta.url),zt=fn("../package.json"),Zt=_("pipeline_index"),$e=3,Zo=async({config:e,apiToken:t,cliPath:r="netlify",cwd:n,errorLogsPath:s,filter:o,tracing:i={}})=>{let u,{withStageTimer:a}=ht(z.timeUnits.hours(4)),f=await it(zt.version,e.id,i);try{await Xt(Jt(),"run-pipeline",{},f,async()=>{let{aiGateway:p,context:c,persistSteps:m,runner:h,sha:x}=await a("init",()=>Bt({config:e,apiToken:t,cliPath:r,cwd:n,errorLogsPath:s,filter:o,runnerVersion:zt.version}),z.timeUnits.minutes(10));u=h.clean,e.sha=x;let{runnerResult:T}=await a("inference",()=>Oe({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let g=await a("deploy",()=>et({cliPath:r,config:e,context:c,result:T.result,filter:o,isRetry:!1})),E=T,v=[];if(g.hasChanges&&g.deployError){v.push(at(g.deployError));let I=1;for(;I<=$e&&!g.previewInfo;)Zt.log(`Deploy attempt had errors. Retrying. ${I}/${$e}`),await Xt(Jt(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":I});let{runnerResult:y}=await a(`inference-retry-${I}`,()=>Oe({cliPath:r,config:e,context:c,runner:h.runner,persistSteps:m,aiGateway:p,buildErrors:v,priorAgentSessionId:T.agentSessionId}));E={...y,steps:[...E.steps||[],...y.steps||[]],duration:(E.duration||0)+(y.duration||0)},g=await a(`deploy-retry-${I}`,()=>et({cliPath:r,config:e,context:c,result:y.result,filter:o,isRetry:!0})),g.deployError&&v.push(g.deployError),I++});I>$e&&!g.previewInfo&&console.warn(`Deploy validation failed after ${$e} attempts`)}let{diff:b,resultDiff:N,previewInfo:C,diffBinary:O,resultDiffBinary:A}=g;await a("cleanup",()=>Vt({config:e,diff:b,result:E.result,duration:E.duration,resultDiff:N,diffBinary:O,resultDiffBinary:A,previewInfo:C}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await u?.(),await Ct())})}catch(p){Zt.error("Got error while running pipeline",p),await u?.();let c=p instanceof Error&&p.message;throw await B(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),p}finally{await pn()}};export{Zo as runPipeline};
|
|
142
142
|
//# sourceMappingURL=index.js.map
|