@netlify/agent-runner-cli 1.62.1-alpha.0 → 1.63.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin-local.js CHANGED
@@ -1,32 +1,32 @@
1
1
  #!/usr/bin/env node
2
- import C from"process";import fr from"path";import mr from"fs";import Fn from"minimist";import{createRequire as Cn}from"module";import{createTracerProvider as hr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ht}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as yr}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as _r}from"@netlify/otel";import{propagation as yt,context as _t,W3CTraceContextPropagator as Er}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as wr}from"@opentelemetry/exporter-trace-otlp-grpc";import gr from"process";function _(e){let t=gr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var qe=_("tracing"),Et=async(e,t,r)=>(await hr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ht(new We),new ht(new wr({url:r.exporterUrl}))],instrumentations:[new yr({skipHeaders:!0})]}),r.traceparent?(yt.setGlobalPropagator(new Er),yt.extract(_t.active(),{traceparent:r.traceparent,isRemote:!0})):_t.active());function A(e,t,r){return qe.log(`\u23F3 TRACE: ${t} starting...`),_r(e,t,r)}var We=class{export(t,r){for(let i of t)this.logSpan(i);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,i=t.attributes,n=[];for(let[a,l]of Object.entries(i))a.includes("duration")&&typeof l=="number"?n.push(`${a}=${l.toFixed(2)}ms`):n.push(`${a}=${l}`);let o=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";qe.log(`${o} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&qe.log(` \u274C Error: ${t.status.message}`)}};var Tr=["error","failed","exception","fatal","panic","abort","crash"];function wt(e){let t=e.split(`
3
- `),r=[],i=-1,n=0;for(;n<t.length;){let a=t[n].slice(0,500).toLowerCase();if(Tr.some(p=>a.includes(p))){let p=Math.max(0,n-10,i+1),f=Math.min(t.length-1,n+20),u=[];for(let g=p;g<=f;g++)u.push(t[g]);r.push(u.join(`
4
- `)),i=f,n=f+1}else n++}if(r.length===0)return e;let o=r.map((s,a)=>`<extracted_error_chunk order="${a+1}">
2
+ import C from"process";import mr from"path";import gr from"fs";import Fn from"minimist";import{createRequire as Pn}from"module";import{createTracerProvider as yr}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as ht}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Er}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as _r}from"@netlify/otel";import{propagation as yt,context as Et,W3CTraceContextPropagator as wr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Tr}from"@opentelemetry/exporter-trace-otlp-grpc";import hr from"process";function E(e){let t=hr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var qe=E("tracing"),_t=async(e,t,r)=>(await yr({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new ht(new We),new ht(new Tr({url:r.exporterUrl}))],instrumentations:[new Er({skipHeaders:!0})]}),r.traceparent?(yt.setGlobalPropagator(new wr),yt.extract(Et.active(),{traceparent:r.traceparent,isRemote:!0})):Et.active());function b(e,t,r){return qe.log(`\u23F3 TRACE: ${t} starting...`),_r(e,t,r)}var We=class{export(t,r){for(let i of t)this.logSpan(i);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,i=t.attributes,n=[];for(let[l,c]of Object.entries(i))l.includes("duration")&&typeof c=="number"?n.push(`${l}=${c.toFixed(2)}ms`):n.push(`${l}=${c}`);let o=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";qe.log(`${o} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&qe.log(` \u274C Error: ${t.status.message}`)}};var Ir=["error","failed","exception","fatal","panic","abort","crash"];function wt(e){let t=e.split(`
3
+ `),r=[],i=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(Ir.some(u=>l.includes(u))){let u=Math.max(0,n-10,i+1),f=Math.min(t.length-1,n+20),d=[];for(let m=u;m<=f;m++)d.push(t[m]);r.push(d.join(`
4
+ `)),i=f,n=f+1}else n++}if(r.length===0)return e;let o=r.map((s,l)=>`<extracted_error_chunk order="${l+1}">
5
5
  ${s}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return o.length>e.length*.8?e:o}import{execSync as un}from"child_process";import tr from"fs/promises";import dn from"path";import Z from"process";import{getTracer as pn}from"@netlify/otel";import we from"process";var se=class extends Error{constructor(r,i,n){super(r);this.statusCode=i;this.userMessage=n;this.name="GracefulShutdownError"}},Tt=e=>e instanceof se;var Ae=we.env.NETLIFY_API_URL,Ne=we.env.NETLIFY_API_TOKEN,q=_("api"),Ce=()=>we.env.NETLIFY_LOCAL_MODE==="true",Te=async(e,t={})=>{if(!Ae||!Ne)throw new Error("No API URL or token");let r=new URL(e,Ae),i={...t,headers:{...t.headers,Authorization:`Bearer ${Ne}`}};we.env.AGENT_RUNNERS_DEBUG==="true"&&(i.headers["x-nf-debug-logging"]="true"),t.json&&(i.headers||={},i.headers["Content-Type"]="application/json",i.body=JSON.stringify(t.json));let n=await fetch(r,i),o=n.ok&&n.status<=299;if(we.env.AGENT_RUNNERS_DEBUG==="true")q.log(`Response headers for ${r}:`),n.headers.forEach((a,l)=>{q.log(` ${l}: ${a}`)});else{let a=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");q.log(`Request ID for ${r}: ${a||"N/A"}`)}if(o||q.error(`Got status ${n.status} for request ${r}`),t.raw){if(!o)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!o){let a=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new se(`API request failed: 404 - ${a}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new se(`API request failed: 403 - ${a}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${a}`)}return s},It=e=>{q.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ae=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ne=e.constants.NETLIFY_API_TOKEN)},xt=()=>({apiUrl:Ae,token:Ne}),Ie=async(e,t)=>Ce()?(q.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):Te(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),Y=async(e,t,r)=>Ce()?(q.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):Te(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var vt=async(e,t)=>Ce()?(q.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):Te(`/api/v1/agent_runners/${e}/sessions/${t}`),Rt=(e,t,r)=>Te(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),St=async(e,t)=>Ce()?(q.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):Te(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ke=async(e,t)=>{q.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ae=_("ai_gateway"),Je=null;var bt=async()=>{if(Je)return Je;ae.log("Fetching available AI gateway providers");let e=await fetch(`${xt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Je=t,ae.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Ir=async(e,t)=>{let i=(await bt()).providers[e];if(!i)return ae.log(`Provider '${e}' not found`),!1;let n=i.models.includes(t);return ae.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},At=async({netlify:e,config:t})=>{let r,i,n,o,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let a=async()=>{clearTimeout(n),ae.log("Requesting AI gateway information");let l=await Rt(s,t.id,t.sessionId);if({token:r,url:o}=l,i=l.expires_at?l.expires_at*1e3:void 0,ae.log("Got AI gateway information",{token:!!r,expiresAt:i,url:o}),i){let p=i-Date.now()-6e4;p>0&&(n=setTimeout(()=>{a()},p))}};return await Promise.all([a(),bt()]),{get url(){return o},get token(){return r},isModelAvailableForProvider:Ir}};import W from"process";import V from"path";import Pe from"fs";import{fileURLToPath as Nr}from"url";import{createRequire as Cr}from"module";import{execa as Pr,execaCommand as pi}from"execa";import{Transform as xr}from"stream";var vr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Rr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Sr(){return Object.entries(process.env).filter(([e,t])=>!(!t||vr.has(e)||Rr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function J(e){if(typeof e!="string")return e;let t=Sr();if(t.length===0)return e;let r=e;return t.forEach(i=>{let n=new RegExp(br(i),"g");r=r.replace(n,"******")}),r}function br(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var le=class extends xr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,i){let n=t.toString(),o=J(n);i(null,o)}};function Nt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(i,n,o){let s=typeof i=="string"?J(i):i;return typeof n=="function"?t(s,n):t(s,n,o)},process.stderr.write=function(i,n,o){let s=typeof i=="string"?J(i):i;return typeof n=="function"?r(s,n):r(s,n,o)}}var xe=null,Ct=e=>(xe&&xe.destroy(),xe=new z({totalAllowedTime:e}),xe),Pt=()=>xe;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,i)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),o=null,s=null;i!==void 0&&(s=new Promise((a,l)=>{o=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${i}ms`))},i)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),o&&clearTimeout(o)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var $t="netlify-agent-runner-context.md",Ve="task-history",Xe="netlify-context",M=".netlify",re="results.md",ze="assets";var ne=1800*1e3;var Ft={name:"@netlify/agent-runner-cli",type:"module",version:"1.62.1-alpha.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.29","@google/gemini-cli":"0.25.2","@netlify/otel":"^5.1.1","@openai/codex":"0.93.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var $r=Nr(import.meta.url),Fr=V.dirname($r),Or=Cr(import.meta.url),ce=_("shell"),Ze=new Set,kr={preferLocal:!0},N=(e,t,r)=>{let[i,n]=Lr(t,r),o={...kr,...n},s=Pr(e,i,o);Dr(s,o),Mr(s);let a=r?.idleTimeout;return a&&a>0&&Ur(s,a),s};var Lr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Dr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(W.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new le).pipe(W.stdout),e.stdout?.pipe(new le).pipe(W.stdout),e.stderr?.pipe(new le).pipe(W.stderr);return}e.stdout?.pipe(W.stdout),e.stderr?.pipe(W.stderr)},Qe=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(W.kill(-e.pid,t),ce.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ce.error("Error killing process:",r),!1}},Ot=e=>Qe(e,"SIGKILL"),Ur=(e,t)=>{let r=null,i=()=>{ce.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ce.log(`Force killing idle process ${e.pid}`),Ot(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(i,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let o=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",o),e.on("error",o)},Mr=e=>{Ze.add(e);let t=Pt();if(t){let r=t.onTimesUp(()=>{ce.log(`Global timer expired, killing process ${e.pid}`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ce.log(`Force killing process ${e.pid} after timeout`),Ot(e))},5e3)});e.on("exit",()=>{Ze.delete(e),r()}),e.on("error",()=>{Ze.delete(e),r()})}};function $e(e,t){return!!ie(e,t)}function ie(e,t){if(!W.env.NETLIFY_LOCAL_MODE)try{let n=Or.resolve(Ft.name),o=V.dirname(n);for(;o!==V.dirname(o);){let s=V.dirname(o);if(V.basename(s)==="node_modules"){let a=V.join(s,".bin",t);if(Pe.existsSync(a))return a;break}o=s}}catch(n){console.error("Could not resolve package.json",n)}if(W.env.NODE_PATH){let n=V.join(W.env.NODE_PATH,".bin",t);if(Pe.existsSync(n))return n}let r=V.join(e,"node_modules",".bin",t);if(Pe.existsSync(r))return r;let i=V.join(Fr,"..","node_modules",".bin",t);if(Pe.existsSync(i))return i}var Gr=_("utils"),jr=e=>new Promise(t=>{setTimeout(t,e)}),kt=(e,t=3e3)=>{let r=!1,i=null,n=[],o=null,s=(...a)=>{if(r)return i=a,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return o=(async()=>{await Promise.resolve();let f=await e(...a);for(l(f);;){if(await jr(t),!i)return r=!1,o=null,f;let u=i,g=n;i=null,n=[],f=await e(...u),g.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||i)&&o)return await o,s.flush()},s},ue=(e,t,r=!1)=>{let i=null,n=null,o=null,s=function(...a){n=a,o=this;let l=r&&!i;clearTimeout(i),i=setTimeout(()=>{i=null,r||(e.apply(o,n),n=null,o=null)},t),l&&(e.apply(o,n),n=null,o=null)};return s.cancel=()=>{clearTimeout(i),i=null,n=null,o=null},s.flush=()=>{if(i){clearTimeout(i);let a=n,l=o;i=null,n=null,o=null,e.apply(l,a)}},s},Lt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(i){t&&(r?.error?r.error("Could not parse JSON",i):Gr.error("Could not parse JSON",i))}},Dt=(e,t)=>{let n=".netlify.app",o="agent-";if(!t)return`${o}${e.slice(0,6)}`;let a=`--${t}${n}`;if(a.length>55)return"";let l=60-a.length;if(l<=0)return"";if(l>=o.length+6){let p=Math.min(l-o.length,e.length);return`${o}${e.slice(0,p)}`}return e.slice(0,l)};var Yr=50*1024,et=(e,t=Yr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let i=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+i};import{Buffer as Ut}from"buffer";import Br from"path";var Mt=_("repo"),Gt=async({config:e,isRetry:t})=>{Mt.info("Getting runner diffs");let r=await qr(),{hasChanges:i}=r,{status:n}=r;if(!i)return{hasChanges:!1};if(!t){let T=Wr(n);await Kr(T)}Mt.info("Changes after processing"),await rt();let o=await nt(n);await tt(o);let s={stdio:["ignore","pipe","pipe"]},a=await N("git",["diff","--staged"],s),l=String(a.stdout??"");if(i=!!l,!i)return{hasChanges:!1,ignored:o};let p=await N("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),u,g;if(e.sha){if(!process.env.NETLIFY_LOCAL_MODE){process.env.NETLIFY_INTERNAL_GIT="1";try{await N("git",["commit","-m","Agent runner"])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}}let T=await N("git",["diff",e.sha,"HEAD"],s);u=String(T.stdout??"");let E=await N("git",["diff",e.sha,"HEAD","--binary"],s),m=String(E.stdout??"");u!==m&&(g=Ut.from(m).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:u,ignored:o};return l!==f&&(y.diffBinary=Ut.from(f).toString("base64")),g&&(y.resultDiffBinary=g),y},tt=async(e=[])=>{process.env.NETLIFY_INTERNAL_GIT="1";try{await N("git",["add",".",...e])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}},rt=async()=>{let e=await N("git",["status","-s"]);return String(e.stdout??"")},jt=/.. (.+)?\.log$/,Hr=[jt],qr=async()=>{let e=await rt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(i=>Hr.some(o=>o instanceof RegExp?o.test(i):i===o)?!1:i[1]?.trim()!=="")).length!==0,status:e}},Yt=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return String(e??"").trim()},Bt=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},nt=async e=>{e||=await rt();let t=[".netlify","node_modules","dist",".next","out",".nuxt",".output",".cache",".turbo",".parcel-cache","coverage",".nyc_output","storybook-static","public/build"],r=[];return e.split(`
10
- `).forEach(i=>{t.forEach(o=>{let s=i===`?? ${o}`,a=i.startsWith(`?? ${o}/`)||i.startsWith(`?? ${o}${Br.sep}`);(s||a)&&r.push(`:!${o}`)});let n=i.match(jt)?.[1];n&&r.push(`:!${n}.log`)}),r},it=async()=>{await N("git",["reset","--hard","HEAD"])},Wr=e=>{let t=e.split(`
11
- `).reduce((r,i)=>{if(!i)return r;let[n,o,,...s]=i,a=s.join(""),l=n.trim(),p=o.trim();return r[a]?r[a].change=p:r[a]={filePath:a,stage:l,change:p},r},{});return Object.values(t)},Kr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Vr from"fs/promises";import Xr from"os";import Wt from"path";import me from"process";import zr from"readline";import ot from"path";import Jr from"fs/promises";var st=_("agent-output-utils");async function de({initialResult:e,agentName:t,hasError:r}){let i="",n=ot.join(process.cwd(),M,re);try{let o=await Jr.readFile(n,"utf-8");o&&(i=o,st.log(`Pulled result from ${ot.relative(process.cwd(),n)}`))}catch{st.log(`No results file found at ${ot.relative(process.cwd(),n)}`)}return i||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function pe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,i=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return i?.includes("ai gateway is not available for your account")||i?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":i?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(i?.includes("connection closed prematurely")||i?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),i?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),i?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),i?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&st.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function fe(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var G=_("runner_claude"),Ht="Claude Code",ge="claude-opus-4-5-20251101",qt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Zr=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function at({config:e,netlify:t,persistSteps:r,aiGateway:i,continueSession:n,priorAgentSessionId:o,cwd:s=me.cwd()}){let a=e,{accountType:l,prompt:p,modelVersionOverrides:f}=a,{model:u}=a,g="";if(i){let{token:h,url:d}=i;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let c=f?.claude?.[l];if(c){if(!await i.isModelAvailableForProvider("anthropic",c))throw new Error(`Model override '${c}' is not available for anthropic provider`);u=c}}else if(u){if(!await i.isModelAvailableForProvider("anthropic",u))throw new Error(`Model '${u}' is not available for anthropic provider`)}else!!ge&&await i.isModelAvailableForProvider("anthropic",ge)?(u=ge,G.log(`Using default model: ${ge}`)):ge&&G.log(`Default model ${ge} is not available, proceeding without model specification`);me.env.ANTHROPIC_API_KEY=h,me.env.ANTHROPIC_BASE_URL=d}else if(!me.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],T=[],E={},m=0,I=0,R,S,P=[ie(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...u?["--model",u]:[],...n?["--continue"]:[],...n&&o?["--resume",o]:[],"-p",p],$=`${me.env.NVM_BIN}/node`;G.log(`Running ${$} ${P.join(" ")}`);let k=t.utils.run($,P,{all:!0,env:me.env,cwd:s,idleTimeout:ne});k.stdin?.end();let x=ue(()=>{r?.({steps:y,duration:I})},250),v=(h,d)=>{let c={...h,id:m};m+=1,T.push(c),y.push(c),d||x.flush(),x(),d&&x.flush()},D=zr.createInterface({input:k.all});return D.on("error",h=>{G.error("Readline interface error",{error:h.message,stack:h.stack})}),D.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{G.log("Could not parse line",h)}d?.session_id&&d.session_id!==g&&(g=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(c=>{switch(c.type){case"text":{c.text&&v({message:c.text});break}case"image":{typeof c.source=="object"&&c.source&&c.source.type==="base64"&&c.source.media_type?v({message:`![](data:${c.source.media_type};base64,${c.source.data})`}):G.log(`Unsupported image type ${c.source?.type}`,c.source);break}case"tool_use":{if(c.name==="Task"){let w=c.input?.description&&`\`${c.input.description}\``;v({title:[qt(c.name),w].filter(Boolean).join(" ")})}else c.id&&(E[c.id]=c);x.flush();break}case"tool_result":{let w=c.tool_use_id?E[c.tool_use_id]:void 0,ee;if(w){let X=w.input?.file_path&&Wt.relative(s,w.input.file_path),F=X&&`\`${X}\``;ee=[qt(w.name||""),F].filter(Boolean).join(" ")}let be=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),te;if(typeof c.content=="string")te=c.content;else if(Array.isArray(c.content)){let X=[];c.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?X.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?X.push(`![](data:${F.source.media_type};base64,${F.source.data})`):G.log(`Unsupported image type ${F.source.type}`,F.source):G.log(`Unsupported block type ${F?.type}`)}),te=X.join(`
8
+ `);return o.length>e.length*.8?e:o}import{execSync as dn}from"child_process";import tr from"fs/promises";import pn from"path";import Z from"process";import{getTracer as fn}from"@netlify/otel";import we from"process";var se=class extends Error{constructor(r,i,n){super(r);this.statusCode=i;this.userMessage=n;this.name="GracefulShutdownError"}},Tt=e=>e instanceof se;var be=we.env.NETLIFY_API_URL,Ne=we.env.NETLIFY_API_TOKEN,q=E("api"),Ce=()=>we.env.NETLIFY_LOCAL_MODE==="true",Te=async(e,t={})=>{if(!be||!Ne)throw new Error("No API URL or token");let r=new URL(e,be),i={...t,headers:{...t.headers,Authorization:`Bearer ${Ne}`}};we.env.AGENT_RUNNERS_DEBUG==="true"&&(i.headers["x-nf-debug-logging"]="true"),t.json&&(i.headers||={},i.headers["Content-Type"]="application/json",i.body=JSON.stringify(t.json));let n=await fetch(r,i),o=n.ok&&n.status<=299;if(we.env.AGENT_RUNNERS_DEBUG==="true")q.log(`Response headers for ${r}:`),n.headers.forEach((l,c)=>{q.log(` ${c}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");q.log(`Request ID for ${r}: ${l||"N/A"}`)}if(o||q.error(`Got status ${n.status} for request ${r}`),t.raw){if(!o)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!o){let l=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new se(`API request failed: 404 - ${l}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new se(`API request failed: 403 - ${l}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${l}`)}return s},It=e=>{q.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(be=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ne=e.constants.NETLIFY_API_TOKEN)},xt=()=>({apiUrl:be,token:Ne}),Ie=async(e,t)=>Ce()?(q.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):Te(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),Y=async(e,t,r)=>Ce()?(q.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):Te(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var vt=async(e,t)=>Ce()?(q.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):Te(`/api/v1/agent_runners/${e}/sessions/${t}`),Rt=(e,t,r)=>Te(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),St=async(e,t)=>Ce()?(q.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):Te(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ke=async(e,t)=>{q.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ae=E("ai_gateway"),Ve=null;var At=async()=>{if(Ve)return Ve;ae.log("Fetching available AI gateway providers");let e=await fetch(`${xt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ve=t,ae.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},xr=async(e,t)=>{let i=(await At()).providers[e];if(!i)return ae.log(`Provider '${e}' not found`),!1;let n=i.models.includes(t);return ae.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},bt=async({netlify:e,config:t})=>{let r,i,n,o,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let l=async()=>{clearTimeout(n),ae.log("Requesting AI gateway information");let c=await Rt(s,t.id,t.sessionId);if({token:r,url:o}=c,i=c.expires_at?c.expires_at*1e3:void 0,ae.log("Got AI gateway information",{token:!!r,expiresAt:i,url:o}),i){let u=i-Date.now()-6e4;u>0&&(n=setTimeout(()=>{l()},u))}};return await Promise.all([l(),At()]),{get url(){return o},get token(){return r},isModelAvailableForProvider:xr}};import W from"process";import J from"path";import Pe from"fs";import{fileURLToPath as Cr}from"url";import{createRequire as Pr}from"module";import{execa as Or,execaCommand as fi}from"execa";import{Transform as vr}from"stream";var Rr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Sr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Ar(){return Object.entries(process.env).filter(([e,t])=>!(!t||Rr.has(e)||Sr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function V(e){if(typeof e!="string")return e;let t=Ar();if(t.length===0)return e;let r=e;return t.forEach(i=>{let n=new RegExp(br(i),"g");r=r.replace(n,"******")}),r}function br(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var le=class extends vr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,i){let n=t.toString(),o=V(n);i(null,o)}};function Nt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(i,n,o){let s=typeof i=="string"?V(i):i;return typeof n=="function"?t(s,n):t(s,n,o)},process.stderr.write=function(i,n,o){let s=typeof i=="string"?V(i):i;return typeof n=="function"?r(s,n):r(s,n,o)}}var xe=null,Ct=e=>(xe&&xe.destroy(),xe=new z({totalAllowedTime:e}),xe),Pt=()=>xe;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,i)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),o=null,s=null;i!==void 0&&(s=new Promise((l,c)=>{o=setTimeout(()=>{c(new Error(`${t} stage exceeded its maximum duration of ${i}ms`))},i)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),o&&clearTimeout(o)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Ot="netlify-agent-runner-context.md",Je="task-history",Xe="netlify-context",M=".netlify",re="results.md",ze="assets";var ne=1800*1e3;var $t={name:"@netlify/agent-runner-cli",type:"module",version:"1.63.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.1.29","@google/gemini-cli":"0.25.2","@netlify/otel":"^5.1.1","@openai/codex":"0.93.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var $r=Cr(import.meta.url),Fr=J.dirname($r),Dr=Pr(import.meta.url),ce=E("shell"),Ze=new Set,kr={preferLocal:!0},N=(e,t,r)=>{let[i,n]=Lr(t,r),o={...kr,...n},s=Or(e,i,o);Ur(s,o),Gr(s);let l=r?.idleTimeout;return l&&l>0&&Mr(s,l),s};var Lr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Ur=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(W.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new le).pipe(W.stdout),e.stdout?.pipe(new le).pipe(W.stdout),e.stderr?.pipe(new le).pipe(W.stderr);return}e.stdout?.pipe(W.stdout),e.stderr?.pipe(W.stderr)},Qe=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(W.kill(-e.pid,t),ce.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ce.error("Error killing process:",r),!1}},Ft=e=>Qe(e,"SIGKILL"),Mr=(e,t)=>{let r=null,i=()=>{ce.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ce.log(`Force killing idle process ${e.pid}`),Ft(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(i,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let o=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",o),e.on("error",o)},Gr=e=>{Ze.add(e);let t=Pt();if(t){let r=t.onTimesUp(()=>{ce.log(`Global timer expired, killing process ${e.pid}`),Qe(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ce.log(`Force killing process ${e.pid} after timeout`),Ft(e))},5e3)});e.on("exit",()=>{Ze.delete(e),r()}),e.on("error",()=>{Ze.delete(e),r()})}};function Oe(e,t){return!!ie(e,t)}function ie(e,t){if(!W.env.NETLIFY_LOCAL_MODE)try{let n=Dr.resolve($t.name),o=J.dirname(n);for(;o!==J.dirname(o);){let s=J.dirname(o);if(J.basename(s)==="node_modules"){let l=J.join(s,".bin",t);if(Pe.existsSync(l))return l;break}o=s}}catch(n){console.error("Could not resolve package.json",n)}if(W.env.NODE_PATH){let n=J.join(W.env.NODE_PATH,".bin",t);if(Pe.existsSync(n))return n}let r=J.join(e,"node_modules",".bin",t);if(Pe.existsSync(r))return r;let i=J.join(Fr,"..","node_modules",".bin",t);if(Pe.existsSync(i))return i}var jr=E("utils"),Yr=e=>new Promise(t=>{setTimeout(t,e)}),Dt=(e,t=3e3)=>{let r=!1,i=null,n=[],o=null,s=(...l)=>{if(r)return i=l,new Promise(f=>{n.push(f)});r=!0;let c,u=new Promise(f=>{c=f});return o=(async()=>{await Promise.resolve();let f=await e(...l);for(c(f);;){if(await Yr(t),!i)return r=!1,o=null,f;let d=i,m=n;i=null,n=[],f=await e(...d),m.forEach(y=>{y(f)})}})(),u};return s.flush=async()=>{if((r||i)&&o)return await o,s.flush()},s},ue=(e,t,r=!1)=>{let i=null,n=null,o=null,s=function(...l){n=l,o=this;let c=r&&!i;clearTimeout(i),i=setTimeout(()=>{i=null,r||(e.apply(o,n),n=null,o=null)},t),c&&(e.apply(o,n),n=null,o=null)};return s.cancel=()=>{clearTimeout(i),i=null,n=null,o=null},s.flush=()=>{if(i){clearTimeout(i);let l=n,c=o;i=null,n=null,o=null,e.apply(c,l)}},s},kt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(i){t&&(r?.error?r.error("Could not parse JSON",i):jr.error("Could not parse JSON",i))}},Lt=(e,t)=>{let n=".netlify.app",o="agent-";if(!t)return`${o}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let c=60-l.length;if(c<=0)return"";if(c>=o.length+6){let u=Math.min(c-o.length,e.length);return`${o}${e.slice(0,u)}`}return e.slice(0,c)};var Br=50*1024,et=(e,t=Br)=>{if(!e||typeof e!="string"||e.length<=t)return e;let i=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+i};import{Buffer as Ut}from"buffer";import Hr from"path";var Mt=E("repo"),Gt=async({config:e,isRetry:t})=>{Mt.info("Getting runner diffs");let r=await Wr(),{hasChanges:i}=r,{status:n}=r;if(!i)return{hasChanges:!1};if(!t){let T=Kr(n);await Vr(T)}Mt.info("Changes after processing"),await rt();let o=await nt(n);await tt(o);let s={stdio:["ignore","pipe","pipe"]},l=await N("git",["diff","--staged"],s),c=String(l.stdout??"");if(i=!!c,!i)return{hasChanges:!1,ignored:o};let u=await N("git",["diff","--staged","--binary"],s),f=String(u.stdout??""),d,m;if(e.sha){if(!process.env.NETLIFY_LOCAL_MODE){process.env.NETLIFY_INTERNAL_GIT="1";try{await N("git",["commit","-m","Agent runner"])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}}let T=await N("git",["diff",e.sha,"HEAD"],s);d=String(T.stdout??"");let _=await N("git",["diff",e.sha,"HEAD","--binary"],s),g=String(_.stdout??"");d!==g&&(m=Ut.from(g).toString("base64"))}let y={hasChanges:!0,diff:c,resultDiff:d,ignored:o};return c!==f&&(y.diffBinary=Ut.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},tt=async(e=[])=>{process.env.NETLIFY_INTERNAL_GIT="1";try{await N("git",["add",".",...e])}finally{process.env.NETLIFY_INTERNAL_GIT="0"}},rt=async()=>{let e=await N("git",["status","-s"]);return String(e.stdout??"")},jt=/.. (.+)?\.log$/,qr=[jt],Wr=async()=>{let e=await rt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(i=>qr.some(o=>o instanceof RegExp?o.test(i):i===o)?!1:i[1]?.trim()!=="")).length!==0,status:e}},Yt=async()=>{let{stdout:e}=await N("git",["rev-parse","HEAD"]);return String(e??"").trim()},Bt=async()=>{let{stdout:e}=await N("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},nt=async e=>{e||=await rt();let t=[".netlify","node_modules","dist",".next","out",".nuxt",".output",".cache",".turbo",".parcel-cache","coverage",".nyc_output","storybook-static","public/build"],r=[];return e.split(`
10
+ `).forEach(i=>{t.forEach(o=>{let s=i===`?? ${o}`,l=i.startsWith(`?? ${o}/`)||i.startsWith(`?? ${o}${Hr.sep}`);(s||l)&&r.push(`:!${o}`)});let n=i.match(jt)?.[1];n&&r.push(`:!${n}.log`)}),r},it=async()=>{await N("git",["reset","--hard","HEAD"])},Kr=e=>{let t=e.split(`
11
+ `).reduce((r,i)=>{if(!i)return r;let[n,o,,...s]=i,l=s.join(""),c=n.trim(),u=o.trim();return r[l]?r[l].change=u:r[l]={filePath:l,stage:c,change:u},r},{});return Object.values(t)},Vr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(N("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Xr from"fs/promises";import zr from"os";import Wt from"path";import me from"process";import Zr from"readline";import ot from"path";import Jr from"fs/promises";var st=E("agent-output-utils");async function de({initialResult:e,agentName:t,hasError:r}){let i="",n=ot.join(process.cwd(),M,re);try{let o=await Jr.readFile(n,"utf-8");o&&(i=o,st.log(`Pulled result from ${ot.relative(process.cwd(),n)}`))}catch{st.log(`No results file found at ${ot.relative(process.cwd(),n)}`)}return i||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function pe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,i=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return i?.includes("ai gateway is not available for your account")||i?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":i?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(i?.includes("connection closed prematurely")||i?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),i?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),i?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),i?.includes("503")&&(n=`The ${t} API is currently experiencing high load. Retrying automatically...`),n&&st.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function fe(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error")||r?.includes("503"))}var G=E("runner_claude"),Ht="Claude Code",ge="claude-opus-4-5-20251101",qt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Qr=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function at({config:e,netlify:t,persistSteps:r,aiGateway:i,continueSession:n,priorAgentSessionId:o,cwd:s=me.cwd()}){let l=e,{accountType:c,prompt:u,modelVersionOverrides:f}=l,{model:d}=l,m="";if(i){let{token:h,url:p}=i;if(!h||!p)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[c];if(a){if(!await i.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);d=a}}else if(d){if(!await i.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else!!ge&&await i.isModelAvailableForProvider("anthropic",ge)?(d=ge,G.log(`Using default model: ${ge}`)):ge&&G.log(`Default model ${ge} is not available, proceeding without model specification`);me.env.ANTHROPIC_API_KEY=h,me.env.ANTHROPIC_BASE_URL=p}else if(!me.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],T=[],_={},g=0,I=0,R,S,P=[ie(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose","--disallowed-tools","ExitPlanMode,AskUserQuestion",...d?["--model",d]:[],...n?["--continue"]:[],...n&&o?["--resume",o]:[],"-p",u],O=`${me.env.NVM_BIN}/node`;G.log(`Running ${O} ${P.join(" ")}`);let D=t.utils.run(O,P,{all:!0,env:me.env,cwd:s,idleTimeout:ne});D.stdin?.end();let x=ue(()=>{r?.({steps:y,duration:I})},250),v=(h,p)=>{let a={...h,id:g};g+=1,T.push(a),y.push(a),p||x.flush(),x(),p&&x.flush()},L=Zr.createInterface({input:D.all});return L.on("error",h=>{G.error("Readline interface error",{error:h.message,stack:h.stack})}),L.on("line",h=>{let p=null;try{p=JSON.parse(h)}catch{G.log("Could not parse line",h)}p?.session_id&&p.session_id!==m&&(m=p.session_id),Array.isArray(p?.message?.content)?p.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:`![](data:${a.source.media_type};base64,${a.source.data})`}):G.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let w=a.input?.description&&`\`${a.input.description}\``;v({title:[qt(a.name),w].filter(Boolean).join(" ")})}else a.id&&(_[a.id]=a);x.flush();break}case"tool_result":{let w=a.tool_use_id?_[a.tool_use_id]:void 0,ee;if(w){let X=w.input?.file_path&&Wt.relative(s,w.input.file_path),$=X&&`\`${X}\``;ee=[qt(w.name||""),$].filter(Boolean).join(" ")}let Ae=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(w?.name||""),te;if(typeof a.content=="string")te=a.content;else if(Array.isArray(a.content)){let X=[];a.content.forEach($=>{$?.type==="text"&&typeof $.text=="string"?X.push($.text):$?.type==="image"&&typeof $.source=="object"&&$.source?$.source.type==="base64"&&$.source.media_type?X.push(`![](data:${$.source.media_type};base64,${$.source.data})`):G.log(`Unsupported image type ${$.source.type}`,$.source):G.log(`Unsupported block type ${$?.type}`)}),te=X.join(`
12
12
 
13
- `)}be&&te&&(te=`\`\`\`
13
+ `)}Ae&&te&&(te=`\`\`\`
14
14
  ${te.trim()}
15
- \`\`\``),v({title:ee,message:te},!0);break}case"thinking":{c.thinking&&v({title:"Thinking",message:c.thinking},!0);break}default:G.log(`Message content type is not supported ${c.type}`,c)}}):d?.type==="result"&&(I=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[T,y].forEach(c=>{c[c.length-1]?.message===R&&c.pop()}))}),await k.catch(h=>{({error:S,result:R}=Zr({catchError:h,runCmd:k,error:S,result:R,runnerName:"Claude"}))}),D.close(),x.flush(),{steps:T,duration:I,result:await de({initialResult:R,agentName:Ht,hasError:!!S}),error:pe({error:S,agentName:Ht}),isRetryableError:fe(S),agentSessionId:g}}var Kt=async()=>{let e=Wt.join(Xr.homedir(),".claude");await Vr.rm(e,{recursive:!0,force:!0})};import ve from"fs/promises";import Vt from"os";import Fe from"path";import oe from"process";import Qr from"readline";var j=_("runner_codex"),Jt="Codex CLI",he="",en=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function lt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=oe.cwd()}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:c}=n;if(!d||!c)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let w=l?.codex?.[s];if(w){if(!await n.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);p=w}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!he&&await n.isModelAvailableForProvider("openai",he)?(p=he,j.log(`Using default model: ${he}`)):he&&j.log(`Default model ${he} is not available, proceeding without model specification`);oe.env.OPENAI_API_KEY=d,oe.env.OPENAI_BASE_URL=c}else if(!oe.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],u=[],g=[],y={},T=0,E=0,m,I,R=`${oe.env.NVM_BIN}/node`,S=Fe.join(Vt.homedir(),".codex"),P=Fe.join(S,"config.toml"),$=Fe.join(S,"auth.json");try{await ve.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:oe.env.OPENAI_API_KEY};await ve.writeFile($,JSON.stringify(d,null,2),"utf-8"),j.log("Created Codex auth.json file");let c="";try{c=await ve.readFile(P,"utf-8")}catch{}c.includes("web_search_request")||(c.includes("[features]")?c=c.replace(/\[features\]/,`[features]
16
- web_search_request = true`):c+=`
15
+ \`\`\``),v({title:ee,message:te},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:G.log(`Message content type is not supported ${a.type}`,a)}}):p?.type==="result"&&(I=p.duration_ms||0,p.is_error?S=p.result:R=p.result,[T,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await D.catch(h=>{({error:S,result:R}=Qr({catchError:h,runCmd:D,error:S,result:R,runnerName:"Claude"}))}),L.close(),x.flush(),{steps:T,duration:I,result:await de({initialResult:R,agentName:Ht,hasError:!!S}),error:pe({error:S,agentName:Ht}),isRetryableError:fe(S),agentSessionId:m}}var Kt=async()=>{let e=Wt.join(zr.homedir(),".claude");await Xr.rm(e,{recursive:!0,force:!0})};import ve from"fs/promises";import Jt from"os";import $e from"path";import oe from"process";import en from"readline";var j=E("runner_codex"),Vt="Codex CLI",he="",tn=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function lt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=oe.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:c}=e,{model:u}=e;if(n){let{token:p,url:a}=n;if(!p||!a)throw new Error("No token or url provided from AI Gateway");if(c?.codex){let w=c?.codex?.[s];if(w){if(!await n.isModelAvailableForProvider("openai",w))throw new Error(`Model override '${w}' is not available for openai provider`);u=w}}else if(u){if(!await n.isModelAvailableForProvider("openai",u))throw new Error(`Model '${u}' is not available for openai provider`)}else!!he&&await n.isModelAvailableForProvider("openai",he)?(u=he,j.log(`Using default model: ${he}`)):he&&j.log(`Default model ${he} is not available, proceeding without model specification`);oe.env.OPENAI_API_KEY=p,oe.env.OPENAI_BASE_URL=a}else if(!oe.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],d=[],m=[],y={},T=0,_=0,g,I,R=`${oe.env.NVM_BIN}/node`,S=$e.join(Jt.homedir(),".codex"),P=$e.join(S,"config.toml"),O=$e.join(S,"auth.json");try{await ve.mkdir(S,{recursive:!0});let p={OPENAI_API_KEY:oe.env.OPENAI_API_KEY};await ve.writeFile(O,JSON.stringify(p,null,2),"utf-8"),j.log("Created Codex auth.json file");let a="";try{a=await ve.readFile(P,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
16
+ web_search_request = true`):a+=`
17
17
  [features]
18
18
  web_search_request = true
19
- `,await ve.writeFile(P,c,"utf-8"),j.log("Updated Codex config with web_search_request enabled"))}catch(d){throw j.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let k=[ie(o,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],a].filter(Boolean);j.log(`Running ${R} ${k.join(" ")}`);let x=t.utils.run(R,k,{all:!0,cwd:o,env:{...oe.env},idleTimeout:ne}),v=ue(()=>{r?.({steps:f,duration:E}),i?.({steps:u,duration:E}),u=[]},250),D=(d,c)=>{d.id=T,T+=1,g.push(d),f.push(d),u.push(d),c||v.flush(),v(),c&&v.flush()},h=Qr.createInterface({input:x.all});return h.on("error",d=>{j.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let c=null;try{c=JSON.parse(d)}catch{j.log("Could not parse line",d);return}if(c?.duration_ms&&(E=c.duration_ms),c?.type==="item.started"&&c?.item?.type==="command_execution")y[c.item.id]=c.item;else if(c?.type==="item.completed"&&c?.item?.type==="command_execution"){let w=rn(c.item);w&&D(w,!0)}else if(c?.type==="item.completed"&&c?.item?.type==="reasoning"){let w={title:"Reasoning",message:c.item.text};D(w,!0)}else if(c?.type==="local_shell_call")y[c.call_id]=c;else if(c?.type==="local_shell_call_output"){let w=nn(y[c.call_id],c);w&&D(w,!0)}else c?.type==="message"&&c.role==="assistant"?m=c.content.map(w=>w.text).join(`
20
- `):c?.type==="message"&&c.role==="system"&&(I=c.content.map(w=>w.text).join(`
21
- `))}),await x.catch(d=>{let c=en({catchError:d,runCmd:x,error:I,result:m,runnerName:"Codex"});I=c.error,m=c.result}),h.close(),v.flush(),{steps:g,duration:E,result:await de({initialResult:m,agentName:Jt,hasError:!!I}),error:pe({error:I,agentName:Jt}),isRetryableError:fe(I)}}var Xt=async()=>{let e=Fe.join(Vt.homedir(),".codex");await ve.rm(e,{recursive:!0,force:!0})},tn=new Set(["bash","-lc"]),rn=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,i=e.aggregated_output?.trim();return i&&(i=`\`\`\`
19
+ `,await ve.writeFile(P,a,"utf-8"),j.log("Updated Codex config with web_search_request enabled"))}catch(p){throw j.warn("Failed to setup Codex config and credentials",{error:p.message}),new Error(`Codex setup failed: ${p.message}`)}let D=[ie(o,"codex"),"exec","--yolo","--json","--enable","web_search_request",...u?["--model",u]:[],l].filter(Boolean);j.log(`Running ${R} ${D.join(" ")}`);let x=t.utils.run(R,D,{all:!0,cwd:o,env:{...oe.env},idleTimeout:ne}),v=ue(()=>{r?.({steps:f,duration:_}),i?.({steps:d,duration:_}),d=[]},250),L=(p,a)=>{p.id=T,T+=1,m.push(p),f.push(p),d.push(p),a||v.flush(),v(),a&&v.flush()},h=en.createInterface({input:x.all});return h.on("error",p=>{j.error("Readline interface error",{error:p.message,stack:p.stack})}),h.on("line",p=>{let a=null;try{a=JSON.parse(p)}catch{j.log("Could not parse line",p);return}if(a?.duration_ms&&(_=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let w=nn(a.item);w&&L(w,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let w={title:"Reasoning",message:a.item.text};L(w,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let w=on(y[a.call_id],a);w&&L(w,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(w=>w.text).join(`
20
+ `):a?.type==="message"&&a.role==="system"&&(I=a.content.map(w=>w.text).join(`
21
+ `))}),await x.catch(p=>{let a=tn({catchError:p,runCmd:x,error:I,result:g,runnerName:"Codex"});I=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:_,result:await de({initialResult:g,agentName:Vt,hasError:!!I}),error:pe({error:I,agentName:Vt}),isRetryableError:fe(I)}}var Xt=async()=>{let e=$e.join(Jt.homedir(),".codex");await ve.rm(e,{recursive:!0,force:!0})},rn=new Set(["bash","-lc"]),nn=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,i=e.aggregated_output?.trim();return i&&(i=`\`\`\`
22
22
  ${i}
23
23
  \`\`\``),e.status==="failed"&&e.exit_code!==0&&(i=i?`${i}
24
24
 
25
- *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:i}},nn=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(o=>!tn.has(o)),i=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
25
+ *Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:i}},on=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(o=>!rn.has(o)),i=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
26
26
  ${n.trim()}
27
- \`\`\``)}catch(o){j.error("Could not decode outputMsg",o,t.output)}return{title:i,message:n}};import Oe from"fs/promises";import Zt from"os";import ke from"path";import ye from"process";import on from"readline";var B=_("runner_gemini"),zt="Gemini CLI",_e="",sn=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(B.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(B.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(B.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),an={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},ln=async()=>{let e=ke.join(Zt.homedir(),".gemini"),t=ke.join(e,"settings.json");try{await Oe.mkdir(e,{recursive:!0});let r={};try{let i=await Oe.readFile(t,"utf-8");r=JSON.parse(i)}catch{B.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Oe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),B.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){B.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ct({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=ye.cwd()}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:p}=e;if(await ln(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let c=l?.gemini?.[s];if(c){if(!await n.isModelAvailableForProvider("gemini",c))throw new Error(`Model override '${c}' is not available for gemini provider`);p=c}}if(!p)!!_e&&await n.isModelAvailableForProvider("gemini",_e)?(p=_e,B.log(`Using default model: ${_e}`)):_e&&B.log(`Default model ${_e} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);ye.env.GEMINI_API_KEY=h,ye.env.GOOGLE_GEMINI_BASE_URL=d}else if(!ye.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],u=[],g=[],y={},T=0,E=0,m,I,R=[ie(o,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",a],S=`${ye.env.NVM_BIN}/node`;B.log(`Running ${S} ${R.join(" ")}`);let P=t.utils.run(S,R,{all:!0,env:ye.env,cwd:o,idleTimeout:ne});P.stdin?.end();let $=ue(()=>{r?.({steps:f,duration:E}),i?.({steps:u,duration:E}),u=[]},250),k=(h,d)=>{h.id=T,T+=1,g.push(h),f.push(h),u.push(h),d||$.flush(),$(),d&&$.flush()},x=on.createInterface({input:P.all});x.on("error",h=>{B.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",D=()=>{v&&k({message:v.trim()}),v=""};return x.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let c=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:Lt(c,!1)?.error?.message||c||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||D(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let c=an[d.tool_name]??d.tool_name,w=d.parameters?.file_path,ee=w&&ke.relative(o,w),be=d.parameters?.command,X={title:[c,ee&&`\`${ee}\``,be&&`\`${be}\``].filter(Boolean).join(" ")};y[d.tool_id]=X,$.flush();break}case"tool_result":{let c=y[d.tool_id];c&&(d.output&&(c.message=`\`\`\`
28
- ${d.output.trim()}
29
- \`\`\``),k(c,!0));break}case"result":{E=d.stats?.duration_ms,d.status==="error"?I=d.error?.message:m=v.trim();break}case"error":{I=d.error;break}case"finished":break;default:{B.warn("Unhandled message type:",d.type);break}}}),await P.catch(h=>{({error:I,result:m}=sn({catchError:h,runCmd:P,error:I,result:m,runnerName:"Gemini"}))}),x.close(),$.flush(),{steps:g,duration:E,result:await de({initialResult:m,agentName:zt,hasError:!!I}),error:pe({error:I,agentName:zt}),isRetryableError:fe(I)}}var Qt=async()=>{let e=ke.join(Zt.homedir(),".gemini");await Oe.rm(e,{recursive:!0,force:!0})};var cn={codex:{runner:lt,clean:Xt},claude:{runner:at,clean:Kt},gemini:{runner:ct,clean:Qt}},er=cn;var Le=_("init_stage"),rr=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:i})=>await A(pn(),"init-stage",async n=>{let o=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":i||"unknown"});let s=er[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let a=mn({apiToken:r});It(a);let l=e.useGateway?await At({netlify:a,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=kt(async({steps:T=[],duration:E})=>{let m=T.map(I=>{let R=I.title?et(J(I.title),p):void 0,S=I.message?et(J(I.message)):void 0;return{...I,title:R,message:S}});T.length=0;try{return await Y(e.id,e.sessionId,{steps:m,duration:E})}catch(I){Le.error("persistSteps failed",{error:I?.message||I})}},t);Le.info("Adding build files to stage");let u=await nt();await tt(u),Z.env.NETLIFY_LOCAL_MODE||await fn();let g;e.hasRepo?e.sha?(g=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(g=await Yt(),await Ie(e.id,{sha:g}),n?.setAttributes({"init.sha.source":"current_commit"})):(g=await Bt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-o;return n?.setAttributes({"init.sha":g||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:a,persistSteps:f,runner:s,sha:g}}),fn=async()=>{let e="/usr/bin/git";try{e=un("which git").toString().trim()||e}catch{}let t="/tmp/netlify-git-wrapper",r=dn.join(t,"git"),i=`#!/bin/bash
27
+ \`\`\``)}catch(o){j.error("Could not decode outputMsg",o,t.output)}return{title:i,message:n}};import Fe from"fs/promises";import Zt from"os";import De from"path";import ye from"process";import sn from"readline";var B=E("runner_gemini"),zt="Gemini CLI",Ee="",an=({catchError:e,runCmd:t,error:r,result:i,runnerName:n})=>(B.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!i,resultLength:i?i.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),i?(B.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:i}:{error:"Process completed with errors but result was captured",result:i}):(B.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),ln={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},cn=async()=>{let e=De.join(Zt.homedir(),".gemini"),t=De.join(e,"settings.json");try{await Fe.mkdir(e,{recursive:!0});let r={};try{let i=await Fe.readFile(t,"utf-8");r=JSON.parse(i)}catch{B.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Fe.writeFile(t,JSON.stringify(r,null,2),"utf-8"),B.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){B.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function ct({config:e,netlify:t,persistSteps:r=void 0,sendSteps:i=void 0,aiGateway:n,cwd:o=ye.cwd()}){let{accountType:s,prompt:l,modelVersionOverrides:c}=e,{model:u}=e;if(await cn(),n){let{token:h,url:p}=n;if(!h||!p)throw new Error("No token or url provided from AI Gateway");if(c?.gemini){let a=c?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);u=a}}if(!u)!!Ee&&await n.isModelAvailableForProvider("gemini",Ee)?(u=Ee,B.log(`Using default model: ${Ee}`)):Ee&&B.log(`Default model ${Ee} is not available, proceeding without model specification`);else if(u&&!c?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model '${u}' is not available for gemini provider`);ye.env.GEMINI_API_KEY=h,ye.env.GOOGLE_GEMINI_BASE_URL=p}else if(!ye.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],d=[],m=[],y={},T=0,_=0,g,I,R=[ie(o,"gemini"),...u?["--model",u]:[],"--yolo","--output-format","stream-json","-p",l],S=`${ye.env.NVM_BIN}/node`;B.log(`Running ${S} ${R.join(" ")}`);let P=t.utils.run(S,R,{all:!0,env:ye.env,cwd:o,idleTimeout:ne});P.stdin?.end();let O=ue(()=>{r?.({steps:f,duration:_}),i?.({steps:d,duration:_}),d=[]},250),D=(h,p)=>{h.id=T,T+=1,m.push(h),f.push(h),d.push(h),p||O.flush(),O(),p&&O.flush()},x=sn.createInterface({input:P.all});x.on("error",h=>{B.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",L=()=>{v&&D({message:v.trim()}),v=""};return x.on("line",h=>{let p=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];p={type:"error",value:kt(a,!1)?.error?.message||a||"Gemini encountered error"}}else p=JSON.parse(h)}catch{return}if(p)switch(["message","result"].includes(p.type)||L(),p.type){case"message":{p.role!=="user"&&p.content&&(v+=p.content);break}case"tool_use":{let a=ln[p.tool_name]??p.tool_name,w=p.parameters?.file_path,ee=w&&De.relative(o,w),Ae=p.parameters?.command,X={title:[a,ee&&`\`${ee}\``,Ae&&`\`${Ae}\``].filter(Boolean).join(" ")};y[p.tool_id]=X,O.flush();break}case"tool_result":{let a=y[p.tool_id];a&&(p.output&&(a.message=`\`\`\`
28
+ ${p.output.trim()}
29
+ \`\`\``),D(a,!0));break}case"result":{_=p.stats?.duration_ms,p.status==="error"?I=p.error?.message:g=v.trim();break}case"error":{I=p.error;break}case"finished":break;default:{B.warn("Unhandled message type:",p.type);break}}}),await P.catch(h=>{({error:I,result:g}=an({catchError:h,runCmd:P,error:I,result:g,runnerName:"Gemini"}))}),x.close(),O.flush(),{steps:m,duration:_,result:await de({initialResult:g,agentName:zt,hasError:!!I}),error:pe({error:I,agentName:zt}),isRetryableError:fe(I)}}var Qt=async()=>{let e=De.join(Zt.homedir(),".gemini");await Fe.rm(e,{recursive:!0,force:!0})};var un={codex:{runner:lt,clean:Xt},claude:{runner:at,clean:Kt},gemini:{runner:ct,clean:Qt}},er=un;var ke=E("init_stage"),rr=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:i})=>await b(fn(),"init-stage",async n=>{let o=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":i||"unknown"});let s=er[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=gn({apiToken:r});It(l);let c=e.useGateway?await bt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!c});let u=5*1024,f=Dt(async({steps:T=[],duration:_})=>{let g=T.map(I=>{let R=I.title?et(V(I.title),u):void 0,S=I.message?et(V(I.message)):void 0;return{...I,title:R,message:S}});T.length=0;try{return await Y(e.id,e.sessionId,{steps:g,duration:_})}catch(I){ke.error("persistSteps failed",{error:I?.message||I})}},t);ke.info("Adding build files to stage");let d=await nt();await tt(d),Z.env.NETLIFY_LOCAL_MODE||await mn();let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Yt(),await Ie(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await Bt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-o;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:c,context:l,persistSteps:f,runner:s,sha:m}}),mn=async()=>{let e="/usr/bin/git";try{e=dn("which git").toString().trim()||e}catch{}let t="/tmp/netlify-git-wrapper",r=pn.join(t,"git"),i=`#!/bin/bash
30
30
  # Git wrapper that blocks add and commit commands
31
31
  # The deployment system handles staging and commits automatically
32
32
 
@@ -56,20 +56,20 @@ case "$1" in
56
56
  exec ${e} "$@"
57
57
  ;;
58
58
  esac
59
- `;try{await tr.mkdir(t,{recursive:!0}),await tr.writeFile(r,i,{mode:493}),Z.env.PATH=`${t}:${Z.env.PATH}`,Z.env.NETLIFY_INTERNAL_GIT="0",Le.info("Installed git wrapper to block add/commit commands")}catch(n){Le.warn("Failed to install git wrapper",{error:n?.message||n})}},mn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Z.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Z.env.NETLIFY_API_TOKEN,SITE_ID:Z.env.SITE_ID,FUNCTIONS_DIST:Z.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as ut}from"@netlify/otel";import gn from"crypto";import Q from"fs/promises";import U from"path";import H from"process";var L=_("context"),hn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:H.env.NETLIFY_TEAM_ID,userId:H.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:H.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},yn=10,_n=async e=>{let{name:t,ext:r}=U.parse(e),i=e,n=U.join(H.cwd(),M,i),o=0;for(;await En(n);){if(o>=yn)throw new Error("Failed to generate context file");i=`${t}-${gn.randomUUID().slice(0,5)}${r}`,n=U.join(H.cwd(),M,i),o+=1}return i},En=async e=>{try{return await Q.access(e),!0}catch{return!1}},wn=async()=>{try{L.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return L.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(i=>i&&typeof i=="object"&&i.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(L.warn("Catchall consumer missing or invalid contextScopes"),null):r:(L.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?L.warn("Netlify features context request timed out"):L.warn("Failed to fetch Netlify features context:",e.message),null}},Tn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let i=await r.text();return await Q.writeFile(t,i,"utf-8"),!0}catch(r){return r.name==="AbortError"?L.warn(`Download timeout for ${e}`):L.warn(`Failed to download context file ${e}:`,r.message),!1}},De=null,In=async()=>{if(De)return De;let e=await wn();if(!e)return[];let t=U.join(H.cwd(),M,Xe);await Q.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,o])=>{if(!o||typeof o!="object"||!o.endpoint||!o.scope)return L.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,a=U.join(t,s),l=U.join(M,Xe,s);return L.log(`Downloading ${o.scope} context...`),await Tn(o.endpoint,a)?(L.log(`Downloaded: ${l}`),{scope:o.scope,path:l,key:n}):null});return De=(await Promise.all(r)).filter(n=>n!==null),De},nr=async({cliPath:e,netlify:t,config:r,buildErrorContext:i})=>{let n=hn(t),o=await _n($t),s=U.join(H.cwd(),M);await Q.mkdir(s,{recursive:!0});let a=U.join(M,o),l=U.join(H.cwd(),a),p=U.join(H.cwd(),M,re);try{await Q.unlink(p),L.log(`Deleted old results file: ${p}`)}catch{}let f=i?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
59
+ `;try{await tr.mkdir(t,{recursive:!0}),await tr.writeFile(r,i,{mode:493}),Z.env.PATH=`${t}:${Z.env.PATH}`,Z.env.NETLIFY_INTERNAL_GIT="0",ke.info("Installed git wrapper to block add/commit commands")}catch(n){ke.warn("Failed to install git wrapper",{error:n?.message||n})}},gn=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Z.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Z.env.NETLIFY_API_TOKEN,SITE_ID:Z.env.SITE_ID,FUNCTIONS_DIST:Z.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:N}});import{getTracer as ut}from"@netlify/otel";import hn from"crypto";import Q from"fs/promises";import U from"path";import H from"process";var k=E("context"),yn=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:H.env.NETLIFY_TEAM_ID,userId:H.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:H.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},En=10,_n=async e=>{let{name:t,ext:r}=U.parse(e),i=e,n=U.join(H.cwd(),M,i),o=0;for(;await wn(n);){if(o>=En)throw new Error("Failed to generate context file");i=`${t}-${hn.randomUUID().slice(0,5)}${r}`,n=U.join(H.cwd(),M,i),o+=1}return i},wn=async e=>{try{return await Q.access(e),!0}catch{return!1}},Tn=async()=>{try{k.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return k.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(i=>i&&typeof i=="object"&&i.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(k.warn("Catchall consumer missing or invalid contextScopes"),null):r:(k.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?k.warn("Netlify features context request timed out"):k.warn("Failed to fetch Netlify features context:",e.message),null}},In=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let i=await r.text();return await Q.writeFile(t,i,"utf-8"),!0}catch(r){return r.name==="AbortError"?k.warn(`Download timeout for ${e}`):k.warn(`Failed to download context file ${e}:`,r.message),!1}},Le=null,xn=async()=>{if(Le)return Le;let e=await Tn();if(!e)return[];let t=U.join(H.cwd(),M,Xe);await Q.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,o])=>{if(!o||typeof o!="object"||!o.endpoint||!o.scope)return k.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,l=U.join(t,s),c=U.join(M,Xe,s);return k.log(`Downloading ${o.scope} context...`),await In(o.endpoint,l)?(k.log(`Downloaded: ${c}`),{scope:o.scope,path:c,key:n}):null});return Le=(await Promise.all(r)).filter(n=>n!==null),Le},nr=async({cliPath:e,netlify:t,config:r,buildErrorContext:i})=>{let n=yn(t),o=await _n(Ot),s=U.join(H.cwd(),M);await Q.mkdir(s,{recursive:!0});let l=U.join(M,o),c=U.join(H.cwd(),l),u=U.join(H.cwd(),M,re);try{await Q.unlink(u),k.log(`Deleted old results file: ${u}`)}catch{}let f=i?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
60
60
  Your task is to analyze and fix the build errors.
61
61
  Don't apply techniques of reverting changes. Apply fixes related to errors.
62
62
  Don't try to run build by yourself. Just fix the errors.
63
63
 
64
64
  <build_error_context>
65
65
  ${i}
66
- </build_error_context>`:"",u="";r.siteContext&&r.siteContext.length!==0&&(u=`
66
+ </build_error_context>`:"",d="";r.siteContext&&r.siteContext.length!==0&&(d=`
67
67
  <project_rules>
68
- ${r.siteContext.filter(m=>m.site_context).map(m=>typeof m.site_context=="string"?m.site_context:typeof m.site_context=="object"?JSON.stringify(m.site_context):"").join(`
68
+ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
69
69
 
70
70
  `)}
71
71
  </project_rules>
72
- `);let g="";if(r.sessionHistoryContext?.length){let m=U.join(H.cwd(),M,Ve);await Q.mkdir(m,{recursive:!0});let I=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let P=S+1,$=`attempt-${P}.md`,k=U.join(m,$),x=U.join(M,Ve,$),v=`# Task History - Attempt ${P}
72
+ `);let m="";if(r.sessionHistoryContext?.length){let g=U.join(H.cwd(),M,Je);await Q.mkdir(g,{recursive:!0});let I=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let P=S+1,O=`attempt-${P}.md`,D=U.join(g,O),x=U.join(M,Je,O),v=`# Task History - Attempt ${P}
73
73
 
74
74
  ## Request - what the user asked for
75
75
  ${R.request}
@@ -79,7 +79,7 @@ ${R.request}
79
79
  ## Response - what the agent replied with after its work
80
80
 
81
81
  ${R.response}
82
- `;return await Q.writeFile(k,v,"utf-8"),L.log(`Created history file: ${x}`),x}));g+=`
82
+ `;return await Q.writeFile(D,v,"utf-8"),k.log(`Created history file: ${x}`),x}));m+=`
83
83
  <session_history_context>
84
84
  History of prior work on this task.
85
85
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
@@ -88,17 +88,17 @@ ${R.response}
88
88
  `)}
89
89
 
90
90
  </session_history_context>
91
- `}let y=await In(),T="";y.length>0&&(T=`
91
+ `}let y=await xn(),T="";y.length>0&&(T=`
92
92
  <netlify_features_context>
93
93
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
94
94
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
95
95
 
96
- ${y.map(m=>`- **${m.scope}**: ${m.path}`).join(`
96
+ ${y.map(g=>`- **${g.scope}**: ${g.path}`).join(`
97
97
  `)}
98
98
 
99
99
  Refer to these files when working with specific Netlify features.
100
100
  </netlify_features_context>
101
- `);let E=`
101
+ `);let _=`
102
102
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
103
103
 
104
104
  <request>
@@ -122,7 +122,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
122
122
  - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${ze} folder
123
123
  - move assets from ${s}/${ze} folder to the project assets folder if they are referenced in a code or applied changes
124
124
  </attachements>
125
- ${u}
125
+ ${d}
126
126
  </requirements>
127
127
 
128
128
  <extra_context>
@@ -146,8 +146,8 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
146
146
  </docs>
147
147
  </extra_context>
148
148
 
149
- ${g}
150
- `;return await Q.writeFile(l,E,"utf-8"),L.log(`Generated agent context document at: ${l}`),E.length>5e5&&(E=`
149
+ ${m}
150
+ `;return await Q.writeFile(c,_,"utf-8"),k.log(`Generated agent context document at: ${c}`),_.length>5e5&&(_=`
151
151
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
152
152
 
153
153
  <request>
@@ -157,22 +157,22 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
157
157
  ${f}
158
158
  </request>
159
159
 
160
- Use the following file for the complete context of the ask, the environment, and what's available. ${l} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
161
- `),E};var xn=_("prompt"),ir=async({cliPath:e,config:t,netlify:r,buildErrorContext:i})=>{let n=await nr({cliPath:e,config:t,netlify:r,buildErrorContext:i});return process.env.AGENT_RUNNER_DEBUG&&xn.log("Contextful Prompt:",n),{prompt:n}};var Ue=_("inference_stage"),or=5,Me=async e=>{let{cliPath:t,config:r,context:i,buildErrors:n,runner:o,persistSteps:s,aiGateway:a,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;Ue.log(`Running inference stage, attempt ${l} of ${or}`);let u=await A(ut(),"inference-stage",async g=>{g?.setAttributes({"inference.attempt":l||1}),Nt();let{prompt:y}=await A(ut(),"compose-prompt",async()=>await ir({cliPath:t,config:r,buildErrorContext:vn(n),netlify:i})),T=`
162
- ${p||""}
160
+ Use the following file for the complete context of the ask, the environment, and what's available. ${c} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
161
+ `),_};var vn=E("prompt"),ir=async({cliPath:e,config:t,netlify:r,buildErrorContext:i})=>{let n=await nr({cliPath:e,config:t,netlify:r,buildErrorContext:i});return process.env.AGENT_RUNNER_DEBUG&&vn.log("Contextful Prompt:",n),{prompt:n}};var Ue=E("inference_stage"),or=5,Me=async e=>{let{cliPath:t,config:r,context:i,buildErrors:n,runner:o,persistSteps:s,aiGateway:l,attempt:c,contextPrefix:u,priorAgentSessionId:f}=e;Ue.log(`Running inference stage, attempt ${c} of ${or}`);let d=await b(ut(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":c||1}),Nt();let{prompt:y}=await b(ut(),"compose-prompt",async()=>await ir({cliPath:t,config:r,buildErrorContext:Rn(n),netlify:i})),T=`
162
+ ${u||""}
163
163
  ${y}
164
- `.trim(),E={...r,prompt:T},m=await A(ut(),`run-${r.runner}`,async()=>await o({aiGateway:a,config:E,netlify:i,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return m.result&&(m.result=J(m.result)),m.error&&(m.error=J(m.error)),await s.flush(),m});if(u.error){if(Ue.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:l||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!l||l<or))return Ue.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Me({...e,attempt:(l||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ue.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},vn=e=>!e||e.length===0?"":`
164
+ `.trim(),_={...r,prompt:T},g=await b(ut(),`run-${r.runner}`,async()=>await o({aiGateway:l,config:_,netlify:i,persistSteps:s,continueSession:!!(c&&c>1),priorAgentSessionId:f}));return g.result&&(g.result=V(g.result)),g.error&&(g.error=V(g.error)),await s.flush(),g});if(d.error){if(Ue.error("Runner failed",{stepsCount:d.steps.length,duration:d.duration,error:d.error,isRetryableError:d.isRetryableError,attempt:c||1,agentSessionId:d.agentSessionId}),d.isRetryableError&&(!c||c<or))return Ue.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Me({...e,attempt:(c||1)+1,priorAgentSessionId:d.agentSessionId,contextPrefix:d.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ue.log("Do not retry inference stage"),new Error(d.error)}return{runnerResult:d}},Rn=e=>!e||e.length===0?"":`
165
165
  Deploy failed failed. Here are the errors to review on the latest build:
166
166
 
167
167
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
168
168
 
169
169
  ${e.pop()}
170
- `;import bn from"process";import{getTracer as dt}from"@netlify/otel";import{getTracer as Rn}from"@netlify/otel";var Re=_("deploy"),sr=async e=>await A(Rn(),"create-preview-deploy",async t=>Sn(e,t)),Sn=async({netlify:e,hasRepo:t,skipBuild:r,message:i="Agent Preview",deploySubdomain:n,cliPath:o,filter:s},a)=>{try{let l=["deploy","--message",`"${i}"`,"--json","--draft","--verbose"];t||(Re.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(Re.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=o||"netlify";Re.log(`Running: ${p} ${l.join(" ")}`),a?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(String(f.stdout??"").trim());a?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),Re.log(`
171
- Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let g={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(g.sourceZipFilename=u.source_zip_filename),g}catch(l){throw Re.error("Failed to create preview deploy via CLI:",l),a?.setAttributes({success:!1,error:l.message}),l}};var Se=_("deploy_stage"),Ge=async e=>await A(dt(),"run-deploy-stage",async()=>An(e)),An=async({cliPath:e,config:t,context:r,result:i,filter:n,isRetry:o})=>{let s=await A(dt(),"get-runner-diffs",async()=>await Gt({config:t,isRetry:o}));if(Se.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,u=!0;Se.log("Preview deploy condition check:",{resultUndefined:i===void 0,resultType:typeof i,hasChanges:u,wouldCreatePreview:i!==void 0&&u});let g=null;if(i!==void 0&&u)try{let y;try{let T=await A(dt(),"get-runner-session",async()=>await vt(t.id,t.sessionId));T?.title&&(y=T.title)}catch(T){Se.warn("Failed to fetch session title, using fallback message:",T.message)}await Y(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),g=await sr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:Dt(t.id,bn.env.SITE_NAME),filter:n})}catch(y){return Se.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:a,resultDiff:l,hasChanges:u,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return Se.log("Git status",{hasDiff:!!a,hasChanges:u}),{diff:a,resultDiff:l,hasChanges:u,previewInfo:g,diffBinary:p,resultDiffBinary:f}};import{getTracer as Be}from"@netlify/otel";async function ar(e,t){let{maxRetries:r,baseDelay:i,onRetry:n}=t,o;for(let s=1;s<=r;s++)try{return await e()}catch(a){if(o=a,s===r)throw o;n&&n(s,o),await new Promise(l=>setTimeout(l,i*s))}throw o}var je=class{scanDiffForForms(t){let r=[],i=null,n=[],o=t.split(`
172
- `);for(let s of o)if(s.startsWith("diff --git")){if(i&&n.length>0){let l=this.containsNetlifyForm(n,i);l&&r.push(l)}let a=s.split(" ");i=a[a.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(i&&n.length>0){let s=this.containsNetlifyForm(n,i);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let i=t.join(`
173
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:o,name:s}of n){let a=i.match(o);if(a){let l=a.index||0,p=Math.max(0,l-20),f=Math.min(i.length,l+a[0].length+20),u=i.slice(p,f).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${u}`}}}return null}};var Ye=class{scanDiffForIdentity(t){let r=[],i=null,n=[],o=t.split(`
174
- `);for(let s of o)if(s.startsWith("diff --git")){if(i&&n.length>0){let l=this.containsNetlifyIdentity(n,i);l&&r.push(l)}let a=s.split(" ");i=a[a.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(i&&n.length>0){let s=this.containsNetlifyIdentity(n,i);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyIdentity(t,r){let i=t.join(`
175
- `),n=[{pattern:/data-netlify-identity-(button|menu)/i,name:"identity widget element"},{pattern:/netlify-identity-widget/i,name:"identity widget import"},{pattern:/new\s+GoTrue\s*\(/i,name:"GoTrue client"},{pattern:/(import\s+.*GoTrue|require\s*\(\s*['"]gotrue-js['"]\s*\))/i,name:"GoTrue import"},{pattern:/netlifyIdentity\s*\.\s*(init|on|off|open|close|login|signup|logout|refresh|currentUser)/i,name:"identity widget API"},{pattern:/['"`]\/?\.netlify\/identity/i,name:"identity endpoint"}];for(let{pattern:o,name:s}of n){let a=i.match(o);if(a){let l=a.index||0,p=Math.max(0,l-20),f=Math.min(i.length,l+a[0].length+20),u=i.slice(p,f).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${u}`}}}return null}};var b=_("cleanup_stage"),ft=async e=>await A(Be(),"cleanup-stage",async()=>Nn(e)),pt=1024*1024*10,Nn=async({config:e,diff:t,result:r,duration:i,resultDiff:n,diffBinary:o,resultDiffBinary:s,previewInfo:a})=>{let l={result:r||"Done",duration:i};a&&a.deployId&&(l.deploy_id=a.deployId),a&&a.sourceZipFilename&&(l.result_zip_file_name=a.sourceZipFilename);let p=t||o||n||s;if(p&&(l.diff_produced=!0),p){let f=new je,u=t||o||"",g=f.scanDiffForForms(u);g.detected?(b.log("Detected Netlify form(s) in diff:"),g.matches.forEach(({file:E,snippet:m})=>{b.log(` - ${E}: ${m}`)}),l.has_netlify_form=!0):b.log("Did not detect Netlify form(s) in diff");let T=new Ye().scanDiffForIdentity(u);T.detected?(b.log("Detected Netlify Identity usage in diff:"),T.matches.forEach(({file:E,snippet:m})=>{b.log(` - ${E}: ${m}`)}),l.has_netlify_identity=!0):b.log("Did not detect Netlify Identity usage in diff")}if(p)try{b.log("Getting pre-signed URLs for diff upload");let f=await St(e.id,e.sessionId),u=[];(t||o)&&u.push(Ke(f.result.upload_url,o||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,b.log("Successfully uploaded result_diff to S3")})),(n||s)&&u.push(Ke(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,b.log("Successfully uploaded cumulative_diff to S3")})),b.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(n||s)&&(b.log("Updating agent runner with cumulative diff S3 key"),await A(Be(),"update-runner",async()=>{await Ie(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){b.error("S3 upload failed, falling back to inline diffs:",f);let u=Buffer.byteLength(t||o||""),g=Buffer.byteLength(s||n||"");if(u>pt||g>pt){let y=`Diffs exceed maximum inline size of ${pt} bytes.`;throw b.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=o,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,b.log("Updating agent runner with inline diffs (fallback)"),await A(Be(),"update-runner",async()=>{await Ie(e.id,{result_diff:n,result_diff_binary:s})}))}else b.log("No diffs to upload");return b.log("Updated agent runner with result"),await ar(async()=>await A(Be(),"update-runner-session",()=>Y(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,u)=>{b.error(`Error updating agent runner session (attempt ${f}):`,u),b.log("Retrying...")}}),b.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as lr,shutdownTracers as Pn,withActiveSpan as cr}from"@netlify/otel";var $n=Cn(import.meta.url),ur=$n("../package.json"),Ee=_("pipeline_index"),He=3,dr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:i,filter:n,tracing:o={}})=>{let s,{withStageTimer:a}=Ct(z.timeUnits.hours(4)),l=await Et(ur.version,e.id,o);try{await cr(lr(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:u,runner:g,sha:y}=await a("init",()=>rr({config:e,apiToken:t,cliPath:r,cwd:i,filter:n,runnerVersion:ur.version}),z.timeUnits.minutes(10));if(s=g.clean,e.sha=y,e.mode==="redeploy"){await Y(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let x=await a("deploy",()=>Ge({cliPath:r,config:e,context:f,result:"Redeploy completed",filter:n,isRetry:!1}));x.deployError&&Ee.warn(`Redeploy deploy failed: ${x.deployError}`);let{diff:v,resultDiff:D,previewInfo:h,diffBinary:d,resultDiffBinary:c}=x;await a("cleanup",()=>ft({config:e,diff:v,result:"Redeploy completed",duration:0,resultDiff:D,diffBinary:d,resultDiffBinary:c,previewInfo:h}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await it());return}let{runnerResult:T}=await a("inference",()=>Me({cliPath:r,config:e,context:f,runner:g.runner,persistSteps:u,aiGateway:p}));await Y(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let E=await a("deploy",()=>Ge({cliPath:r,config:e,context:f,result:T.result,filter:n,isRetry:!1})),m=T,I=[];if(E.hasChanges&&E.deployError){I.push(wt(E.deployError));let x=1,v=!1;for(;x<=He&&!E.previewInfo&&!v;)Ee.log(`Deploy attempt had errors. Retrying. ${x}/${He}`),await cr(lr(),"deploy-stage",async D=>{D?.setAttributes({"stage.attempt":x});let h;try{h=(await a(`inference-retry-${x}`,()=>Me({cliPath:r,config:e,context:f,runner:g.runner,persistSteps:u,aiGateway:p,buildErrors:I,priorAgentSessionId:T.agentSessionId}))).runnerResult}catch(d){Ee.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),v=!0;return}m={...h,steps:[...m.steps||[],...h.steps||[]],duration:(m.duration||0)+(h.duration||0)},E=await a(`deploy-retry-${x}`,()=>Ge({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),E.deployError&&I.push(E.deployError),x++});x>He&&!E.previewInfo&&console.warn(`Deploy validation failed after ${He} attempts`)}let{diff:R,resultDiff:S,previewInfo:P,diffBinary:$,resultDiffBinary:k}=E;await a("cleanup",()=>ft({config:e,diff:R,result:m.result,duration:m.duration,resultDiff:S,diffBinary:$,resultDiffBinary:k,previewInfo:P}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await it())})}catch(p){if(Tt(p)){Ee.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await Y(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Ee.info("Could not update session (site may have been deleted)")}return}Ee.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await Y(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await Pn()}};import pr from"crypto";var O=_("bin_local"),K=Fn(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),gt=()=>{console.log(`
170
+ `;import bn from"process";import{getTracer as dt}from"@netlify/otel";import{getTracer as Sn}from"@netlify/otel";var Re=E("deploy"),sr=async e=>await b(Sn(),"create-preview-deploy",async t=>An(e,t)),An=async({netlify:e,hasRepo:t,skipBuild:r,message:i="Agent Preview",deploySubdomain:n,cliPath:o,filter:s,prodDeploy:l},c)=>{try{let u=["deploy","--message",`"${i}"`,"--json","--verbose"];l?u.push("--prod"):u.push("--draft"),t||(Re.log("Deploy: Uploading source zip"),u.push("--upload-source-zip")),n&&u.push("--alias",n),s&&u.push("--filter",s),r?(Re.log("Deploy: Skipping build"),u.push("--no-build")):u.push("--context","deploy-preview");let f=o||"netlify";Re.log(`Running: ${f} ${u.join(" ")}`),c?.setAttributes({cmd:f,args:u});let d=await e.utils.run(f,u,{stdio:["ignore","pipe","pipe"]}),m=JSON.parse(String(d.stdout??"").trim());c?.setAttributes({success:!0,deployId:m.deploy_id,deployUrl:m.deploy_url,siteId:m.site_id}),Re.log(`
171
+ Preview deploy created successfully:`,{deployId:m.deploy_id,deployUrl:m.deploy_url,siteId:m.site_id});let y={deployId:m.deploy_id,previewUrl:m.deploy_url,logsUrl:m.logs,siteId:m.site_id};return t||(y.sourceZipFilename=m.source_zip_filename),y}catch(u){throw Re.error("Failed to create preview deploy via CLI:",u),c?.setAttributes({success:!1,error:u.message}),u}};var ar=e=>["dtn-prod-iteration","create"].includes(e);var Se=E("deploy_stage"),Ge=async e=>await b(dt(),"run-deploy-stage",async()=>Nn(e)),Nn=async({cliPath:e,config:t,context:r,result:i,filter:n,isRetry:o})=>{let s=await b(dt(),"get-runner-diffs",async()=>await Gt({config:t,isRetry:o}));if(Se.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:l,resultDiff:c,diffBinary:u,resultDiffBinary:f}=s,d=!0;Se.log("Preview deploy condition check:",{resultUndefined:i===void 0,resultType:typeof i,hasChanges:d,wouldCreatePreview:i!==void 0&&d});let m=null;if(i!==void 0&&d)try{let y;try{let T=await b(dt(),"get-runner-session",async()=>await vt(t.id,t.sessionId));T?.title&&(y=T.title)}catch(T){Se.warn("Failed to fetch session title, using fallback message:",T.message)}await Y(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await sr({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:Lt(t.id,bn.env.SITE_NAME),filter:n,prodDeploy:ar(t.mode)})}catch(y){return Se.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:l,resultDiff:c,hasChanges:d,previewInfo:null,diffBinary:u,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return Se.log("Git status",{hasDiff:!!l,hasChanges:d}),{diff:l,resultDiff:c,hasChanges:d,previewInfo:m,diffBinary:u,resultDiffBinary:f}};import{getTracer as Be}from"@netlify/otel";async function lr(e,t){let{maxRetries:r,baseDelay:i,onRetry:n}=t,o;for(let s=1;s<=r;s++)try{return await e()}catch(l){if(o=l,s===r)throw o;n&&n(s,o),await new Promise(c=>setTimeout(c,i*s))}throw o}var je=class{scanDiffForForms(t){let r=[],i=null,n=[],o=t.split(`
172
+ `);for(let s of o)if(s.startsWith("diff --git")){if(i&&n.length>0){let c=this.containsNetlifyForm(n,i);c&&r.push(c)}let l=s.split(" ");i=l[l.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(i&&n.length>0){let s=this.containsNetlifyForm(n,i);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let i=t.join(`
173
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:o,name:s}of n){let l=i.match(o);if(l){let c=l.index||0,u=Math.max(0,c-20),f=Math.min(i.length,c+l[0].length+20),d=i.slice(u,f).trim();return d=d.replace(/\s+/g," "),d.length>100&&(d=d.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${d}`}}}return null}};var Ye=class{scanDiffForIdentity(t){let r=[],i=null,n=[],o=t.split(`
174
+ `);for(let s of o)if(s.startsWith("diff --git")){if(i&&n.length>0){let c=this.containsNetlifyIdentity(n,i);c&&r.push(c)}let l=s.split(" ");i=l[l.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(i&&n.length>0){let s=this.containsNetlifyIdentity(n,i);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyIdentity(t,r){let i=t.join(`
175
+ `),n=[{pattern:/data-netlify-identity-(button|menu)/i,name:"identity widget element"},{pattern:/netlify-identity-widget/i,name:"identity widget import"},{pattern:/new\s+GoTrue\s*\(/i,name:"GoTrue client"},{pattern:/(import\s+.*GoTrue|require\s*\(\s*['"]gotrue-js['"]\s*\))/i,name:"GoTrue import"},{pattern:/netlifyIdentity\s*\.\s*(init|on|off|open|close|login|signup|logout|refresh|currentUser)/i,name:"identity widget API"},{pattern:/['"`]\/?\.netlify\/identity/i,name:"identity endpoint"}];for(let{pattern:o,name:s}of n){let l=i.match(o);if(l){let c=l.index||0,u=Math.max(0,c-20),f=Math.min(i.length,c+l[0].length+20),d=i.slice(u,f).trim();return d=d.replace(/\s+/g," "),d.length>100&&(d=d.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${d}`}}}return null}};var A=E("cleanup_stage"),ft=async e=>await b(Be(),"cleanup-stage",async()=>Cn(e)),pt=1024*1024*10,Cn=async({config:e,diff:t,result:r,duration:i,resultDiff:n,diffBinary:o,resultDiffBinary:s,previewInfo:l})=>{let c={result:r||"Done",duration:i};l&&l.deployId&&(c.deploy_id=l.deployId),l&&l.sourceZipFilename&&(c.result_zip_file_name=l.sourceZipFilename);let u=t||o||n||s;if(u&&(c.diff_produced=!0),u){let f=new je,d=t||o||"",m=f.scanDiffForForms(d);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:_,snippet:g})=>{A.log(` - ${_}: ${g}`)}),c.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff");let T=new Ye().scanDiffForIdentity(d);T.detected?(A.log("Detected Netlify Identity usage in diff:"),T.matches.forEach(({file:_,snippet:g})=>{A.log(` - ${_}: ${g}`)}),c.has_netlify_identity=!0):A.log("Did not detect Netlify Identity usage in diff")}if(u)try{A.log("Getting pre-signed URLs for diff upload");let f=await St(e.id,e.sessionId),d=[];(t||o)&&d.push(Ke(f.result.upload_url,o||t).then(()=>{c.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&d.push(Ke(f.cumulative.upload_url,s||n).then(()=>{c.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${d.length} diff(s) to S3 in parallel`),await Promise.all(d),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await b(Be(),"update-runner",async()=>{await Ie(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let d=Buffer.byteLength(t||o||""),m=Buffer.byteLength(s||n||"");if(d>pt||m>pt){let y=`Diffs exceed maximum inline size of ${pt} bytes.`;throw A.error(y),new Error(y)}c.result_diff=t,c.result_diff_binary=o,(n||s)&&(c.cumulative_diff=n,c.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await b(Be(),"update-runner",async()=>{await Ie(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await lr(async()=>await b(Be(),"update-runner-session",()=>Y(e.id,e.sessionId,c)),{maxRetries:3,baseDelay:1e3,onRetry:(f,d)=>{A.error(`Error updating agent runner session (attempt ${f}):`,d),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:c}};import{getTracer as cr,shutdownTracers as On,withActiveSpan as ur}from"@netlify/otel";var $n=Pn(import.meta.url),dr=$n("../package.json"),_e=E("pipeline_index"),He=3,pr=async({config:e,apiToken:t,cliPath:r="netlify",cwd:i,filter:n,tracing:o={}})=>{let s,{withStageTimer:l}=Ct(z.timeUnits.hours(4)),c=await _t(dr.version,e.id,o);try{await ur(cr(),"run-pipeline",{},c,async()=>{let{aiGateway:u,context:f,persistSteps:d,runner:m,sha:y}=await l("init",()=>rr({config:e,apiToken:t,cliPath:r,cwd:i,filter:n,runnerVersion:dr.version}),z.timeUnits.minutes(10));if(s=m.clean,e.sha=y,e.mode==="redeploy"){await Y(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let x=await l("deploy",()=>Ge({cliPath:r,config:e,context:f,result:"Redeploy completed",filter:n,isRetry:!1}));x.deployError&&_e.warn(`Redeploy deploy failed: ${x.deployError}`);let{diff:v,resultDiff:L,previewInfo:h,diffBinary:p,resultDiffBinary:a}=x;await l("cleanup",()=>ft({config:e,diff:v,result:"Redeploy completed",duration:0,resultDiff:L,diffBinary:p,resultDiffBinary:a,previewInfo:h}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await it());return}let{runnerResult:T}=await l("inference",()=>Me({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:d,aiGateway:u}));await Y(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let _=await l("deploy",()=>Ge({cliPath:r,config:e,context:f,result:T.result,filter:n,isRetry:!1})),g=T,I=[];if(_.hasChanges&&_.deployError){I.push(wt(_.deployError));let x=1,v=!1;for(;x<=He&&!_.previewInfo&&!v;)_e.log(`Deploy attempt had errors. Retrying. ${x}/${He}`),await ur(cr(),"deploy-stage",async L=>{L?.setAttributes({"stage.attempt":x});let h;try{h=(await l(`inference-retry-${x}`,()=>Me({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:d,aiGateway:u,buildErrors:I,priorAgentSessionId:T.agentSessionId}))).runnerResult}catch(p){_e.warn(`Inference retry ${x} failed, stopping deploy retries:`,p),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},_=await l(`deploy-retry-${x}`,()=>Ge({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),_.deployError&&I.push(_.deployError),x++});x>He&&!_.previewInfo&&console.warn(`Deploy validation failed after ${He} attempts`)}let{diff:R,resultDiff:S,previewInfo:P,diffBinary:O,resultDiffBinary:D}=_;await l("cleanup",()=>ft({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:O,resultDiffBinary:D,previewInfo:P}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await it())})}catch(u){if(Tt(u)){_e.info("Agent run terminated gracefully",{statusCode:u.statusCode,reason:u.message}),await s?.();try{await Y(e.id,e.sessionId,{result:u.userMessage,state:"error"})}catch{_e.info("Could not update session (site may have been deleted)")}return}_e.error("Got error while running pipeline",u),await s?.();let f=u instanceof Error&&u.message;throw await Y(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),u}finally{await On()}};import fr from"crypto";var F=E("bin_local"),K=Fn(C.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),gt=()=>{console.log(`
176
176
  agent-runner-cli-local - Run Netlify agent runner locally without API connections
177
177
 
178
178
  USAGE:
@@ -204,6 +204,6 @@ NOTE:
204
204
  This local mode mocks all Netlify API calls. The agent will run through
205
205
  the full pipeline including inference and deployment, but API calls will
206
206
  be logged instead of executed.
207
- `)};K.help&&(gt(),C.exit(0));K.prompt||(O.error("Error: --prompt is required"),gt(),C.exit(1));K["netlify-api-token"]||(O.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),gt(),C.exit(1));try{let e=K.cwd||C.cwd(),t=fr.join(e,".netlify","netlify-agent-runner-context*");mr.rmSync(t,{recursive:!0,force:!0});let r;try{r=await On(e)}catch(a){O.error(a.message),O.error(`
208
- To link this directory to a Netlify site, run:`),O.error(" netlify link"),C.exit(1)}let i=`local-${pr.randomBytes(8).toString("hex")}`,n=`session-${pr.randomBytes(8).toString("hex")}`,o=K.runner||"claude";O.log("Starting agent runner in local mode",{runnerId:i,sessionId:n,siteId:r,cwd:e,runner:o});let s={id:i,sessionId:n,prompt:K.prompt,runner:o,model:K.model,accountType:"free",mode:"normal",sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=K["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",o==="claude"?$e(e,"claude")||(O.log("Claude CLI not found, installing..."),await mt(e,"@anthropic-ai/claude-code")):o==="gemini"?$e(e,"gemini")||(O.log("Gemini CLI not found, installing..."),await mt(e,"@google/gemini-cli")):o==="codex"?$e(e,"codex")||(O.log("Codex CLI not found, installing..."),await mt(e,"@openai/codex")):(O.error(`Unknown runner: ${o}`),C.exit(1)),await dr({config:s,cwd:e,cliPath:K["cli-path"],filter:K.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),O.info("Finished agent (local mode)"),C.exit(0)}catch(e){O.error("Error running agent pipeline (local mode):",e),C.exit(1)}function mt(e,t){return new Promise((r,i)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{O.log(`${t} installed: ${n}`),r()}).catch(n=>{O.error(`Error installing ${t}: ${n.stderr||n.message}`),i(n)})})}async function On(e){let t=fr.join(e,".netlify","state.json");try{let r=await mr.readFileSync(t,"utf-8"),i=JSON.parse(r);if(!i.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return O.log(`Found site ID from state file: ${i.siteId}`),i.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
207
+ `)};K.help&&(gt(),C.exit(0));K.prompt||(F.error("Error: --prompt is required"),gt(),C.exit(1));K["netlify-api-token"]||(F.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),gt(),C.exit(1));try{let e=K.cwd||C.cwd(),t=mr.join(e,".netlify","netlify-agent-runner-context*");gr.rmSync(t,{recursive:!0,force:!0});let r;try{r=await Dn(e)}catch(l){F.error(l.message),F.error(`
208
+ To link this directory to a Netlify site, run:`),F.error(" netlify link"),C.exit(1)}let i=`local-${fr.randomBytes(8).toString("hex")}`,n=`session-${fr.randomBytes(8).toString("hex")}`,o=K.runner||"claude";F.log("Starting agent runner in local mode",{runnerId:i,sessionId:n,siteId:r,cwd:e,runner:o});let s={id:i,sessionId:n,prompt:K.prompt,runner:o,model:K.model,accountType:"free",mode:"normal",sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};C.env.NETLIFY_LOCAL_MODE="true",C.env.NETLIFY_API_HOST="api.netlify.com",C.env.NETLIFY_API_TOKEN=K["netlify-api-token"],C.env.SITE_ID=r,C.env.NETLIFY_TEAM_ID="local-team-id",C.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",C.env.SITE_NAME="local-site",o==="claude"?Oe(e,"claude")||(F.log("Claude CLI not found, installing..."),await mt(e,"@anthropic-ai/claude-code")):o==="gemini"?Oe(e,"gemini")||(F.log("Gemini CLI not found, installing..."),await mt(e,"@google/gemini-cli")):o==="codex"?Oe(e,"codex")||(F.log("Codex CLI not found, installing..."),await mt(e,"@openai/codex")):(F.error(`Unknown runner: ${o}`),C.exit(1)),await pr({config:s,cwd:e,cliPath:K["cli-path"],filter:K.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),F.info("Finished agent (local mode)"),C.exit(0)}catch(e){F.error("Error running agent pipeline (local mode):",e),C.exit(1)}function mt(e,t){return new Promise((r,i)=>{N("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{F.log(`${t} installed: ${n}`),r()}).catch(n=>{F.error(`Error installing ${t}: ${n.stderr||n.message}`),i(n)})})}async function Dn(e){let t=mr.join(e,".netlify","state.json");try{let r=await gr.readFileSync(t,"utf-8"),i=JSON.parse(r);if(!i.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return F.log(`Found site ID from state file: ${i.siteId}`),i.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
209
209
  //# sourceMappingURL=bin-local.js.map