@netlify/agent-runner-cli 1.48.1-alpha → 1.48.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin-local.js CHANGED
@@ -1,41 +1,37 @@
1
1
  #!/usr/bin/env node
2
- import P from"process";import zt from"path";import Zt from"fs";import un from"minimist";import{createRequire as sn}from"module";import{createTracerProvider as Qt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as et}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as er}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as tr}from"@netlify/otel";import{propagation as tt,context as rt,W3CTraceContextPropagator as rr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as nr}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ce=_("tracing"),nt=async(e,t,r)=>(await Qt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new et(new Pe),new et(new nr({url:r.exporterUrl}))],instrumentations:[new er({skipHeaders:!0})]}),r.traceparent?(tt.setGlobalPropagator(new rr),tt.extract(rt.active(),{traceparent:r.traceparent,isRemote:!0})):rt.active());function A(e,t,r){return Ce.log(`\u23F3 TRACE: ${t} starting...`),tr(e,t,r)}var Pe=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,s]of Object.entries(o))l.includes("duration")&&typeof s=="number"?n.push(`${l}=${s.toFixed(2)}ms`):n.push(`${l}=${s}`);let i=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ce.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ce.log(` \u274C Error: ${t.status.message}`)}};var or=["error","failed","exception","fatal","panic","abort","crash"];function ot(e){let t=e.split(`
3
- `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(or.some(h=>l.includes(h))){let h=Math.max(0,n-10,o+1),d=Math.min(t.length-1,n+20),c=[];for(let m=h;m<=d;m++)c.push(t[m]);r.push(c.join(`
4
- `)),o=d,n=d+1}else n++}if(r.length===0)return e;let i=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
2
+ import O from"process";import Xt from"path";import zt from"fs";import pn from"minimist";import{createRequire as un}from"module";import{createTracerProvider as Zt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as et}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Qt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as er}from"@netlify/otel";import{propagation as tt,context as rt,W3CTraceContextPropagator as tr}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as rr}from"@opentelemetry/exporter-trace-otlp-grpc";function E(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ce=E("tracing"),nt=async(e,t,r)=>(await Zt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new et(new Pe),new et(new rr({url:r.exporterUrl}))],instrumentations:[new Qt({skipHeaders:!0})]}),r.traceparent?(tt.setGlobalPropagator(new tr),tt.extract(rt.active(),{traceparent:r.traceparent,isRemote:!0})):rt.active());function b(e,t,r){return Ce.log(`\u23F3 TRACE: ${t} starting...`),er(e,t,r)}var Pe=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ce.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ce.log(` \u274C Error: ${t.status.message}`)}};var nr=["error","failed","exception","fatal","panic","abort","crash"];function ot(e){let t=e.split(`
3
+ `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(nr.some(m=>l.includes(m))){let m=Math.max(0,n-10,o+1),c=Math.min(t.length-1,n+20),u=[];for(let p=m;p<=c;p++)u.push(t[p]);r.push(u.join(`
4
+ `)),o=c,n=c+1}else n++}if(r.length===0)return e;let s=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
5
5
  ${a}
6
6
  </extracted_error_chunk>`).join(`
7
7
 
8
- `);return i.length>e.length*.8?e:i}import ve from"process";import{getTracer as Yr}from"@netlify/otel";import ue from"process";var me=ue.env.NETLIFY_API_URL,he=ue.env.NETLIFY_API_TOKEN,Y=_("api"),ye=()=>ue.env.NETLIFY_LOCAL_MODE==="true",ce=async(e,t={})=>{if(!me||!he)throw new Error("No API URL or token");let r=new URL(e,me),o={...t,headers:{...t.headers,Authorization:`Bearer ${he}`}};ue.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ue.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((l,s)=>{Y.log(` ${s}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${l||"N/A"}`)}if(i||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i)throw a;return a},it=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(me=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(he=e.constants.NETLIFY_API_TOKEN)},st=()=>({apiUrl:me,token:he}),de=async(e,t)=>ye()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ce(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>ye()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var at=async(e,t)=>ye()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`),lt=(e,t,r)=>ce(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),ut=async(e,t)=>ye()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ce(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Oe=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var te=_("ai_gateway"),Fe=null;var ct=async()=>{if(Fe)return Fe;te.log("Fetching available AI gateway providers");let e=await fetch(`${st().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Fe=t,te.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},ir=async(e,t)=>{let o=(await ct()).providers[e];if(!o)return te.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return te.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},dt=async({netlify:e,config:t})=>{let r,o,n,i,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),te.log("Requesting AI gateway information");let s=await lt(a,t.id,t.sessionId);if({token:r,url:i}=s,o=s.expires_at?s.expires_at*1e3:void 0,te.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{l()},h))}};return await Promise.all([l(),ct()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:ir}};import K from"process";import Ee from"path";import $e from"fs";import{fileURLToPath as dr}from"url";import{execa as pr,execaCommand as kn}from"execa";import{Transform as sr}from"stream";var ar=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),lr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function ur(){return Object.entries(process.env).filter(([e,t])=>!(!t||ar.has(e)||lr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=ur();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(cr(o),"g");r=r.replace(n,"******")}),r}function cr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var re=class extends sr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=q(n);o(null,i)}};function pt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let a=typeof o=="string"?q(o):o;return typeof n=="function"?t(a,n):t(a,n,i)},process.stderr.write=function(o,n,i){let a=typeof o=="string"?q(o):o;return typeof n=="function"?r(a,n):r(a,n,i)}}var pe=null,ft=e=>(pe&&pe.destroy(),pe=new z({totalAllowedTime:e}),pe),gt=()=>pe;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,a=null;o!==void 0&&(a=new Promise((l,s)=>{i=setTimeout(()=>{s(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var fr=dr(import.meta.url),gr=Ee.dirname(fr),_e=_("shell"),De=new Set,mr={preferLocal:!0},C=(e,t,r)=>{let[o,n]=hr(t,r),i={...mr,...n},a=pr(e,o,i);return yr(a,i),_r(a),a};var hr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},yr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(K.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new re).pipe(K.stdout),e.stdout?.pipe(new re).pipe(K.stdout),e.stderr?.pipe(new re).pipe(K.stderr);return}e.stdout?.pipe(K.stdout),e.stderr?.pipe(K.stderr)},mt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(K.kill(-e.pid,t),_e.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return _e.error("Error killing process:",r),!1}},Er=e=>mt(e,"SIGKILL"),_r=e=>{De.add(e);let t=gt();if(t){let r=t.onTimesUp(()=>{_e.log(`Global timer expired, killing process ${e.pid}`),mt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(_e.log(`Force killing process ${e.pid} after timeout`),Er(e))},5e3)});e.on("exit",()=>{De.delete(e),r()}),e.on("error",()=>{De.delete(e),r()})}};function we(e,t){return!!J(e,t)}function J(e,t){if(K.env.NODE_PATH){let n=Ee.join(K.env.NODE_PATH,".bin",t);if($e.existsSync(n))return n}let r=Ee.join(e,"node_modules",".bin",t);if($e.existsSync(r))return r;let o=Ee.join(gr,"..","node_modules",".bin",t);if($e.existsSync(o))return o}var ht="netlify-agent-runner-context.md",ke="task-history",Le="netlify-context",U=".netlify",ne="results.md",Ue="assets";var wr=_("utils"),Ir=e=>new Promise(t=>{setTimeout(t,e)}),yt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,a=(...l)=>{if(r)return o=l,new Promise(d=>{n.push(d)});r=!0;let s,h=new Promise(d=>{s=d});return i=(async()=>{await Promise.resolve();let d=await e(...l);for(s(d);;){if(await Ir(t),!o)return r=!1,i=null,d;let c=o,m=n;o=null,n=[],d=await e(...c),m.forEach(y=>{y(d)})}})(),h};return a.flush=async()=>{if((r||o)&&i)return await i,a.flush()},a},Ie=(e,t,r=!1)=>{let o=null,n=null,i=null,a=function(...l){n=l,i=this;let s=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),s&&(e.apply(i,n),n=null,i=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,s=i;o=null,n=null,i=null,e.apply(s,l)}},a},Et=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):wr.error("Could not parse JSON",o))}},_t=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let s=60-l.length;if(s<=0)return"";if(s>=i.length+6){let h=Math.min(s-i.length,e.length);return`${i}${e.slice(0,h)}`}return e.slice(0,s)};import{Buffer as wt}from"buffer";import Tr from"path";var It=async({config:e})=>{let t=await vr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=Rr(o);await Sr(n);let i=await Ge(o);await Me(i);let a={stdio:["ignore","pipe","pipe"]},s=(await C("git",["diff","--staged"],a)).stdout;if(r=!!s,!r)return{hasChanges:!1,ignored:i};let d=(await C("git",["diff","--staged","--binary"],a)).stdout,c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await C("git",["commit","-m","Agent runner"]),c=(await C("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await C("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(m=wt.from(g).toString("base64"))}let y={hasChanges:!0,diff:s,resultDiff:c,ignored:i};return s!==d&&(y.diffBinary=wt.from(d).toString("base64")),m&&(y.resultDiffBinary=m),y},Me=async(e=[])=>{await C("git",["add",".",...e])},Tt=async()=>(await C("git",["status","-s"])).stdout,xt=/.. (.+)?\.log$/,xr=[xt],vr=async()=>{let e=await Tt();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
- `).filter(o=>xr.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},vt=async()=>{let{stdout:e}=await C("git",["rev-parse","HEAD"]);return e.trim()},Rt=async()=>{let{stdout:e}=await C("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Ge=async e=>{e||=await Tt();let t=[".netlify","node_modules"],r=[];return e.split(`
10
- `).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Tr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${i}`)});let n=o.match(xt)?.[1];n&&r.push(`:!${n}.log`)}),r},St=async()=>{await C("git",["reset","--hard","HEAD"])},Rr=e=>{let t=e.split(`
11
- `).reduce((r,o)=>{if(!o)return r;let[n,i,,...a]=o,l=a.join(""),s=n.trim(),h=i.trim();return r[l]?r[l].change=h:r[l]={filePath:l,stage:s,change:h},r},{});return Object.values(t)},Sr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(C("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Nr from"fs/promises";import br from"os";import bt from"path";import Z from"process";import Cr from"readline";import je from"path";import Ar from"fs/promises";var Ye=_("agent-output-utils");async function oe({initialResult:e,agentName:t,hasError:r}){let o="",n=je.join(process.cwd(),U,ne);try{let i=await Ar.readFile(n,"utf-8");i&&(o=i,Ye.log(`Pulled result from ${je.relative(process.cwd(),n)}`))}catch{Ye.log(`No results file found at ${je.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ie({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Ye.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function se(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=_("runner_claude"),At="Claude Code",Te="claude-sonnet-4-5-20250929",Nt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Pr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Be({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i}){let a=e,{accountType:l,prompt:s,modelVersionOverrides:h}=a,{model:d}=a,c="";if(o){let{token:u,url:E}=o;if(!u||!E)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);d=p}}else if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model '${d}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Te)?(d=Te,M.log(`Using default model: ${Te}`)):M.log(`Default model ${Te} is not available, proceeding without model specification`);Z.env.ANTHROPIC_API_KEY=u,Z.env.ANTHROPIC_BASE_URL=E}else if(!Z.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let m=[],y=[],x={},I=0,g=0,v,R,N=[J(Z.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...d?["--model",d]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",s],O=`${Z.env.NVM_BIN}/node`;M.log(`Running ${O} ${N.join(" ")}`);let b=t.utils.run(O,N,{all:!0,env:Z.env});b.stdin?.end();let S=Ie(()=>{r?.({steps:m,duration:g})},250),w=(u,E)=>{let p={...u,id:I};I+=1,y.push(p),m.push(p),E||S.flush(),S(),E&&S.flush()},f=Cr.createInterface({input:b.all});return f.on("error",u=>{M.error("Readline interface error",{error:u.message,stack:u.stack})}),f.on("line",u=>{let E=null;try{E=JSON.parse(u)}catch{M.log("Could not parse line",u)}E?.session_id&&E.session_id!==c&&(c=E.session_id),Array.isArray(E?.message?.content)?E.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&w({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?w({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):M.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let T=p.input?.description&&`\`${p.input.description}\``;w({title:[Nt(p.name),T].filter(Boolean).join(" ")})}else p.id&&(x[p.id]=p);S.flush();break}case"tool_result":{let T=p.tool_use_id?x[p.tool_use_id]:void 0,le;if(T){let ee=T.input?.file_path&&bt.relative(Z.cwd(),T.input.file_path),F=ee&&`\`${ee}\``;le=[Nt(T.name||""),F].filter(Boolean).join(" ")}let Qe=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(T?.name||""),V;if(typeof p.content=="string")V=p.content;else if(Array.isArray(p.content)){let ee=[];p.content.forEach(F=>{F?.type==="text"&&typeof F.text=="string"?ee.push(F.text):F?.type==="image"&&typeof F.source=="object"&&F.source?F.source.type==="base64"&&F.source.media_type?ee.push(`![](data:${F.source.media_type};base64,${F.source.data})`):M.log(`Unsupported image type ${F.source.type}`,F.source):M.log(`Unsupported block type ${F?.type}`)}),V=ee.join(`
12
-
13
- `)}Qe&&V&&(V=`\`\`\`
14
- ${V.trim()}
15
- \`\`\``),w({title:le,message:V},!0);break}case"thinking":{p.thinking&&w({title:"Thinking",message:p.thinking},!0);break}default:M.log(`Message content type is not supported ${p.type}`,p)}}):E?.type==="result"&&(g=E.duration_ms||0,E.is_error?R=E.result:v=E.result,[y,m].forEach(p=>{p[p.length-1]?.message===v&&p.pop()}))}),await b.catch(u=>{({error:R,result:v}=Pr({catchError:u,runCmd:b,error:R,result:v,runnerName:"Claude"}))}),f.close(),S.flush(),{steps:y,duration:g,result:await oe({initialResult:v,agentName:At,hasError:!!R}),error:ie({error:R,agentName:At}),isRetryableError:se(R),agentSessionId:c}}var Ct=async()=>{let e=bt.join(br.homedir(),".claude");await Nr.rm(e,{recursive:!0,force:!0})};import xe from"fs/promises";import Ot from"os";import He from"path";import W from"process";import Or from"readline";var G=_("runner_codex"),Pt="Codex CLI",Fr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function qe({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:f,url:u}=n;if(!f||!u)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[i];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);s=E}}else if(s&&!await n.isModelAvailableForProvider("openai",s))throw new Error(`Model '${s}' is not available for openai provider`);W.env.OPENAI_API_KEY=f,W.env.OPENAI_BASE_URL=u}else if(!W.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],d=[],c={},m=0,y=0,x,I,g=`${W.env.NVM_BIN}/node`,v=He.join(Ot.homedir(),".codex"),R=He.join(v,"config.toml");try{await xe.mkdir(v,{recursive:!0});let f="";try{f=await xe.readFile(R,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
16
- web_search = true`):f+=`
17
- [tools]
18
- web_search = true
19
- `,await xe.writeFile(R,f,"utf-8"),G.log("Updated Codex config with web_search enabled"))}catch(f){G.warn("Failed to update Codex config",{error:f.message})}let N=[J(W.cwd(),"codex"),"login","--with-api-key"];G.log(`Running ${g} ${N.join(" ")}`);let O=t.utils.run(g,N,{input:W.env.OPENAI_API_KEY,env:{...W.env}});try{await O,G.log("Successfully logged in to Codex")}catch(f){throw G.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let b=[J(W.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...s?["--model",s]:[],a].filter(Boolean);G.log(`Running ${g} ${b.join(" ")}`);let S=t.utils.run(g,b,{all:!0,env:{...W.env}}),w=Or.createInterface({input:S.all});return w.on("error",f=>{G.error("Readline interface error",{error:f.message,stack:f.stack})}),w.on("line",f=>{let u=null;try{u=JSON.parse(f)}catch{G.log("Could not parse line",f);return}let E=[],p=!1;if(u?.duration_ms&&(y=u.duration_ms,p=!0),u?.type==="local_shell_call")c[u.call_id]=u;else if(u?.type==="local_shell_call_output"){let T=Dr(c[u.call_id],u);T&&(T.id=m,m+=1,d.push(T),h.push(T),E.push(T),p=!0)}else u?.type==="message"&&u.role==="assistant"?x=u.content.map(T=>T.text).join(`
20
- `):u?.type==="message"&&u.role==="system"&&(I=u.content.map(T=>T.text).join(`
21
- `));p&&(r?.({steps:h,duration:y}),o?.({steps:E,duration:y}))}),await S.catch(f=>{let u=Fr({catchError:f,runCmd:S,error:I,result:x,runnerName:"Codex"});I=u.error,x=u.result}),w.close(),{steps:d,duration:y,result:await oe({initialResult:x,agentName:Pt,hasError:!!I}),error:ie({error:I,agentName:Pt}),isRetryableError:se(I)}}var Ft=async()=>{let e=He.join(Ot.homedir(),".codex");await xe.rm(e,{recursive:!0,force:!0})},$r=new Set(["bash","-lc"]),Dr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!$r.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
8
+ `);return s.length>e.length*.8?e:s}import xe from"process";import{getTracer as Hr}from"@netlify/otel";import ue from"process";var me=ue.env.NETLIFY_API_URL,he=ue.env.NETLIFY_API_TOKEN,j=E("api"),ye=()=>ue.env.NETLIFY_LOCAL_MODE==="true",ce=async(e,t={})=>{if(!me||!he)throw new Error("No API URL or token");let r=new URL(e,me),o={...t,headers:{...t.headers,Authorization:`Bearer ${he}`}};ue.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ue.env.AGENT_RUNNERS_DEBUG==="true")j.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{j.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");j.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||j.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},st=e=>{j.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(me=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(he=e.constants.NETLIFY_API_TOKEN)},it=()=>({apiUrl:me,token:he}),de=async(e,t)=>ye()?(j.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ce(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>ye()?(j.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var at=async(e,t)=>ye()?(j.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ce(`/api/v1/agent_runners/${e}/sessions/${t}`),lt=(e,t,r)=>ce(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),ut=async(e,t)=>ye()?(j.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ce(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Oe=async(e,t)=>{j.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ee=E("ai_gateway"),Fe=null;var ct=async()=>{if(Fe)return Fe;ee.log("Fetching available AI gateway providers");let e=await fetch(`${it().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Fe=t,ee.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},or=async(e,t)=>{let o=(await ct()).providers[e];if(!o)return ee.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ee.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},dt=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),ee.log("Requesting AI gateway information");let i=await lt(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,ee.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let m=o-Date.now()-6e4;m>0&&(n=setTimeout(()=>{l()},m))}};return await Promise.all([l(),ct()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:or}};import q from"process";import Ee from"path";import $e from"fs";import{fileURLToPath as cr}from"url";import{execa as dr,execaCommand as Mn}from"execa";import{Transform as sr}from"stream";var ir=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),ar=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function lr(){return Object.entries(process.env).filter(([e,t])=>!(!t||ir.has(e)||ar.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function H(e){if(typeof e!="string")return e;let t=lr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(ur(o),"g");r=r.replace(n,"******")}),r}function ur(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var te=class extends sr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=H(n);o(null,s)}};function pt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?H(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?H(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var pe=null,ft=e=>(pe&&pe.destroy(),pe=new V({totalAllowedTime:e}),pe),gt=()=>pe;var V=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var pr=cr(import.meta.url),fr=Ee.dirname(pr),_e=E("shell"),De=new Set,gr={preferLocal:!0},C=(e,t,r)=>{let[o,n]=mr(t,r),s={...gr,...n},a=dr(e,o,s);return hr(a,s),Er(a),a};var mr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},hr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(q.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new te).pipe(q.stdout),e.stdout?.pipe(new te).pipe(q.stdout),e.stderr?.pipe(new te).pipe(q.stderr);return}e.stdout?.pipe(q.stdout),e.stderr?.pipe(q.stderr)},mt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(q.kill(-e.pid,t),_e.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return _e.error("Error killing process:",r),!1}},yr=e=>mt(e,"SIGKILL"),Er=e=>{De.add(e);let t=gt();if(t){let r=t.onTimesUp(()=>{_e.log(`Global timer expired, killing process ${e.pid}`),mt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(_e.log(`Force killing process ${e.pid} after timeout`),yr(e))},5e3)});e.on("exit",()=>{De.delete(e),r()}),e.on("error",()=>{De.delete(e),r()})}};function we(e,t){return!!J(e,t)}function J(e,t){if(q.env.NODE_PATH){let n=Ee.join(q.env.NODE_PATH,".bin",t);if($e.existsSync(n))return n}let r=Ee.join(e,"node_modules",".bin",t);if($e.existsSync(r))return r;let o=Ee.join(fr,"..","node_modules",".bin",t);if($e.existsSync(o))return o}var ht="netlify-agent-runner-context.md",Le="task-history",ke="netlify-context",U=".netlify",re="results.md",Ue="assets";var _r=E("utils"),wr=e=>new Promise(t=>{setTimeout(t,e)}),yt=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(c=>{n.push(c)});r=!0;let i,m=new Promise(c=>{i=c});return s=(async()=>{await Promise.resolve();let c=await e(...l);for(i(c);;){if(await wr(t),!o)return r=!1,s=null,c;let u=o,p=n;o=null,n=[],c=await e(...u),p.forEach(h=>{h(c)})}})(),m};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},Ie=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},Et=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):_r.error("Could not parse JSON",o))}},_t=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let m=Math.min(i-s.length,e.length);return`${s}${e.slice(0,m)}`}return e.slice(0,i)};import{Buffer as wt}from"buffer";import Ir from"path";var It=E("repo"),Tt=async({config:e})=>{It.info("Getting runner diffs");let t=await xr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=vr(o);await Rr(n),It.info("Changes after processing"),await Ge();let s=await je(o);await Me(s);let a={stdio:["ignore","pipe","pipe"]},i=(await C("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let c=(await C("git",["diff","--staged","--binary"],a)).stdout,u,p;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await C("git",["commit","-m","Agent runner"]),u=(await C("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await C("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;u!==g&&(p=wt.from(g).toString("base64"))}let h={hasChanges:!0,diff:i,resultDiff:u,ignored:s};return i!==c&&(h.diffBinary=wt.from(c).toString("base64")),p&&(h.resultDiffBinary=p),h},Me=async(e=[])=>{await C("git",["add",".",...e])},Ge=async()=>(await C("git",["status","-s"])).stdout,xt=/.. (.+)?\.log$/,Tr=[xt],xr=async()=>{let e=await Ge();return{hasChanges:(e.trim().length===0?[]:e.split(`
9
+ `).filter(o=>Tr.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},vt=async()=>{let{stdout:e}=await C("git",["rev-parse","HEAD"]);return e.trim()},Rt=async()=>{let{stdout:e}=await C("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},je=async e=>{e||=await Ge();let t=[".netlify","node_modules"],r=[];return e.split(`
10
+ `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${Ir.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${s}`)});let n=o.match(xt)?.[1];n&&r.push(`:!${n}.log`)}),r},St=async()=>{await C("git",["reset","--hard","HEAD"])},vr=e=>{let t=e.split(`
11
+ `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,l=a.join(""),i=n.trim(),m=s.trim();return r[l]?r[l].change=m:r[l]={filePath:l,stage:i,change:m},r},{});return Object.values(t)},Rr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(C("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Ar from"fs/promises";import Nr from"os";import bt from"path";import X from"process";import br from"readline";import Ye from"path";import Sr from"fs/promises";var Be=E("agent-output-utils");async function ne({initialResult:e,agentName:t,hasError:r}){let o="",n=Ye.join(process.cwd(),U,re);try{let s=await Sr.readFile(n,"utf-8");s&&(o=s,Be.log(`Pulled result from ${Ye.relative(process.cwd(),n)}`))}catch{Be.log(`No results file found at ${Ye.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function oe({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Be.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function se(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var M=E("runner_claude"),At="Claude Code",Te="claude-sonnet-4-5-20250929",Nt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Cr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(M.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(M.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(M.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function He({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:l,prompt:i,modelVersionOverrides:m}=a,{model:c}=a,u="";if(o){let{token:f,url:I}=o;if(!f||!I)throw new Error("No token or url provided from AI Gateway");if(m?.claude){let d=m?.claude?.[l];if(d){if(!await o.isModelAvailableForProvider("anthropic",d))throw new Error(`Model override '${d}' is not available for anthropic provider`);c=d}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Te)?(c=Te,M.log(`Using default model: ${Te}`)):M.log(`Default model ${Te} is not available, proceeding without model specification`);X.env.ANTHROPIC_API_KEY=f,X.env.ANTHROPIC_BASE_URL=I}else if(!X.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let p=[],h=[],x={},T=0,g=0,S,R,N=[J(X.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],v=`${X.env.NVM_BIN}/node`;M.log(`Running ${v} ${N.join(" ")}`);let _=t.utils.run(v,N,{all:!0,env:X.env});_.stdin?.end();let A=Ie(()=>{r?.({steps:p,duration:g})},250),w=(f,I)=>{let d={...f,id:T};T+=1,h.push(d),p.push(d),I||A.flush(),A(),I&&A.flush()},y=br.createInterface({input:_.all});return y.on("error",f=>{M.error("Readline interface error",{error:f.message,stack:f.stack})}),y.on("line",f=>{let I=null;try{I=JSON.parse(f)}catch{M.log("Could not parse line",f)}I?.session_id&&I.session_id!==u&&(u=I.session_id),Array.isArray(I?.message?.content)?I.message.content.forEach(d=>{switch(d.type){case"text":{d.text&&w({message:d.text});break}case"image":{typeof d.source=="object"&&d.source&&d.source.type==="base64"&&d.source.media_type?w({message:`![](data:${d.source.media_type};base64,${d.source.data})`}):M.log(`Unsupported image type ${d.source?.type}`,d.source);break}case"tool_use":{if(d.name==="Task"){let F=d.input?.description&&`\`${d.input.description}\``;w({title:[Nt(d.name),F].filter(Boolean).join(" ")})}else d.id&&(x[d.id]=d);A.flush();break}case"tool_result":{let F=d.tool_use_id?x[d.tool_use_id]:void 0,le;if(F){let Q=F.input?.file_path&&bt.relative(X.cwd(),F.input.file_path),$=Q&&`\`${Q}\``;le=[Nt(F.name||""),$].filter(Boolean).join(" ")}let Qe=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(F?.name||""),K;if(typeof d.content=="string")K=d.content;else if(Array.isArray(d.content)){let Q=[];d.content.forEach($=>{$?.type==="text"&&typeof $.text=="string"?Q.push($.text):$?.type==="image"&&typeof $.source=="object"&&$.source?$.source.type==="base64"&&$.source.media_type?Q.push(`![](data:${$.source.media_type};base64,${$.source.data})`):M.log(`Unsupported image type ${$.source.type}`,$.source):M.log(`Unsupported block type ${$?.type}`)}),K=Q.join(`
12
+
13
+ `)}Qe&&K&&(K=`\`\`\`
14
+ ${K.trim()}
15
+ \`\`\``),w({title:le,message:K},!0);break}case"thinking":{d.thinking&&w({title:"Thinking",message:d.thinking},!0);break}default:M.log(`Message content type is not supported ${d.type}`,d)}}):I?.type==="result"&&(g=I.duration_ms||0,I.is_error?R=I.result:S=I.result,[h,p].forEach(d=>{d[d.length-1]?.message===S&&d.pop()}))}),await _.catch(f=>{({error:R,result:S}=Cr({catchError:f,runCmd:_,error:R,result:S,runnerName:"Claude"}))}),y.close(),A.flush(),{steps:h,duration:g,result:await ne({initialResult:S,agentName:At,hasError:!!R}),error:oe({error:R,agentName:At}),isRetryableError:se(R),agentSessionId:u}}var Ct=async()=>{let e=bt.join(Nr.homedir(),".claude");await Ar.rm(e,{recursive:!0,force:!0})};import Pr from"fs/promises";import Or from"os";import Fr from"path";import ie from"process";import $r from"readline";var z=E("runner_codex"),Pt="Codex CLI",Dr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(z.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(z.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(z.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function qe({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:v,url:_}=n;if(!v||!_)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let A=l?.codex?.[s];if(A){if(!await n.isModelAvailableForProvider("openai",A))throw new Error(`Model override '${A}' is not available for openai provider`);i=A}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);ie.env.OPENAI_API_KEY=v,ie.env.OPENAI_BASE_URL=_}else if(!ie.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let m=[],c=[],u={},p=0,h=0,x,T,g=[J(ie.cwd(),"codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),S=`${ie.env.NVM_BIN}/node`;z.log(`Running ${S} ${g.join(" ")}`);let R=t.utils.run(S,g,{all:!0,env:{...ie.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),N=$r.createInterface({input:R.all});return N.on("error",v=>{z.error("Readline interface error",{error:v.message,stack:v.stack})}),N.on("line",v=>{let _=null;try{_=JSON.parse(v)}catch{z.log("Could not parse line",v);return}let A=[],w=!1;if(_?.duration_ms&&(h=_.duration_ms,w=!0),_?.type==="local_shell_call")u[_.call_id]=_;else if(_?.type==="local_shell_call_output"){let y=kr(u[_.call_id],_);y&&(y.id=p,p+=1,c.push(y),m.push(y),A.push(y),w=!0)}else _?.type==="message"&&_.role==="assistant"?x=_.content.map(y=>y.text).join(`
16
+ `):_?.type==="message"&&_.role==="system"&&(T=_.content.map(y=>y.text).join(`
17
+ `));w&&(r?.({steps:m,duration:h}),o?.({steps:A,duration:h}))}),await R.catch(v=>{let _=Dr({catchError:v,runCmd:R,error:T,result:x,runnerName:"Codex"});T=_.error,x=_.result}),N.close(),{steps:c,duration:h,result:await ne({initialResult:x,agentName:Pt,hasError:!!T}),error:oe({error:T,agentName:Pt}),isRetryableError:se(T)}}var Ot=async()=>{let e=Fr.join(Or.homedir(),".codex");await Pr.rm(e,{recursive:!0,force:!0})},Lr=new Set(["bash","-lc"]),kr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Lr.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
22
18
  ${n.trim()}
23
- \`\`\``)}catch(i){G.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import kr from"fs/promises";import Lr from"os";import Dt from"path";import Q from"process";import Ur from"readline";var ae=_("runner_gemini"),$t="Gemini CLI",Mr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ae.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ae.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ae.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Gr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Ke({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:i,prompt:a,modelVersionOverrides:l}=e,{model:s}=e;if(n){let{token:w,url:f}=n;if(!w||!f)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let u=l?.gemini?.[i];if(u){if(!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model override '${u}' is not available for gemini provider`);s=u}}else if(s&&!await n.isModelAvailableForProvider("gemini",s))throw new Error(`Model '${s}' is not available for gemini provider`);Q.env.GEMINI_API_KEY=w,Q.env.GOOGLE_GEMINI_BASE_URL=f}else if(!Q.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],d=[],c=[],m={},y=0,x=0,I,g,v=[J(Q.cwd(),"gemini"),...s?["--model",s]:[],"--yolo","-p",a],R=`${Q.env.NVM_BIN}/node`;ae.log(`Running ${R} ${v.join(" ")}`);let N=t.utils.run(R,v,{all:!0,env:Q.env});N.stdin?.end();let O=Ie(()=>{r?.({steps:h,duration:x}),o?.({steps:d,duration:x}),d=[]},250),b=(w,f)=>{w.id=y,y+=1,c.push(w),h.push(w),d.push(w),f||O.flush(),O(),f&&O.flush()},S=Ur.createInterface({input:N.all});return S.on("error",w=>{ae.error("Readline interface error",{error:w.message,stack:w.stack})}),S.on("line",w=>{let f=null;try{if(w.startsWith("[API Error")){let u=w.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:Et(u,!1)?.error?.message||u||"Gemini encountered error"}}else f=JSON.parse(w)}catch{return}if(f)switch(f.type){case"thought":{let u=f.value;b({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{f.value&&b({message:f.value});break}case"tool_call_request":{let u=f.value,E=Gr[u.name]??u.name,p=u.args?.path||u.args?.absolute_path,T=p&&Dt.relative(Q.cwd(),p),le=u.args?.command,V={title:[E,T&&`\`${T}\``,le&&`\`${le}\``].filter(Boolean).join(" ")};m[u.callId]=V,O.flush();break}case"tool_result":{let u=f.value,E=m[u.callId];if(E){let p=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(T=>typeof T=="string"&&T);p&&(E.message=`\`\`\`
24
- ${p.trim()}
25
- \`\`\``),b(E,!0)}break}case"result":{x=f.duration_ms,I=f.value,[c,h,d].forEach(u=>{u[u.length-1]?.message===I&&u.pop()});break}case"error":{g=f.value;break}case"finished":break;default:{ae.warn("Unhandled message type:",f.type);break}}}),await N.catch(w=>{({error:g,result:I}=Mr({catchError:w,runCmd:N,error:g,result:I,runnerName:"Gemini"}))}),S.close(),O.flush(),{steps:c,duration:x,result:await oe({initialResult:I,agentName:$t,hasError:!!g}),error:ie({error:g,agentName:$t}),isRetryableError:se(g)}}var kt=async()=>{let e=Dt.join(Lr.homedir(),".gemini");await kr.rm(e,{recursive:!0,force:!0})};var jr={codex:{runner:qe,clean:Ft},claude:{runner:Be,clean:Ct},gemini:{runner:Ke,clean:kt}},Lt=jr;var Ut=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Yr(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Lt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Br({apiToken:r});it(l);let s=e.useGateway?await dt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!s}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=yt(({steps:y=[],duration:x})=>{let I=y.map(g=>({...g,title:g.title?q(g.title):void 0,message:g.message?q(g.message):void 0}));return y.length=0,H(e.id,e.sessionId,{steps:I,duration:x})},t),d=await Ge();await Me(d);let c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await vt(),await de(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await Rt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let m=performance.now()-i;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":m,"init.status":"success"}),{aiGateway:s,context:l,persistSteps:h,runner:a,sha:c}}),Br=({apiToken:e})=>({constants:{NETLIFY_API_HOST:ve.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||ve.env.NETLIFY_API_TOKEN,SITE_ID:ve.env.SITE_ID,FUNCTIONS_DIST:ve.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:C}});import{getTracer as We}from"@netlify/otel";import Hr from"crypto";import X from"fs/promises";import k from"path";import j from"process";var D=_("context"),qr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Kr=10,Wr=async e=>{let{name:t,ext:r}=k.parse(e),o=e,n=k.join(j.cwd(),U,o),i=0;for(;await Vr(n);){if(i>=Kr)throw new Error("Failed to generate context file");o=`${t}-${Hr.randomUUID().slice(0,5)}${r}`,n=k.join(j.cwd(),U,o),i+=1}return o},Vr=async e=>{try{return await X.access(e),!0}catch{return!1}},Jr=async()=>{try{D.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return D.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(D.warn("Catchall consumer missing or invalid contextScopes"),null):r:(D.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?D.warn("Netlify features context request timed out"):D.warn("Failed to fetch Netlify features context:",e.message),null}},Xr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await X.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?D.warn(`Download timeout for ${e}`):D.warn(`Failed to download context file ${e}:`,r.message),!1}},Re=null,zr=async()=>{if(Re)return Re;let e=await Jr();if(!e)return[];let t=k.join(j.cwd(),U,Le);await X.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return D.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=k.join(t,a),s=k.join(U,Le,a);return D.log(`Downloading ${i.scope} context...`),await Xr(i.endpoint,l)?(D.log(`Downloaded: ${s}`),{scope:i.scope,path:s,key:n}):null});return Re=(await Promise.all(r)).filter(n=>n!==null),Re},Mt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=qr(t),i=await Wr(ht),a=k.join(j.cwd(),U);await X.mkdir(a,{recursive:!0});let l=k.join(U,i),s=k.join(j.cwd(),l),h=k.join(j.cwd(),U,ne);try{await X.unlink(h),D.log(`Deleted old results file: ${h}`)}catch{}let d=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
19
+ \`\`\``)}catch(s){z.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Ur from"fs/promises";import Mr from"os";import $t from"path";import Z from"process";import Gr from"readline";var ae=E("runner_gemini"),Ft="Gemini CLI",jr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ae.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ae.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ae.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Yr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Ke({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:w,url:y}=n;if(!w||!y)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let f=l?.gemini?.[s];if(f){if(!await n.isModelAvailableForProvider("gemini",f))throw new Error(`Model override '${f}' is not available for gemini provider`);i=f}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);Z.env.GEMINI_API_KEY=w,Z.env.GOOGLE_GEMINI_BASE_URL=y}else if(!Z.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let m=[],c=[],u=[],p={},h=0,x=0,T,g,S=[J(Z.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","-p",a],R=`${Z.env.NVM_BIN}/node`;ae.log(`Running ${R} ${S.join(" ")}`);let N=t.utils.run(R,S,{all:!0,env:Z.env});N.stdin?.end();let v=Ie(()=>{r?.({steps:m,duration:x}),o?.({steps:c,duration:x}),c=[]},250),_=(w,y)=>{w.id=h,h+=1,u.push(w),m.push(w),c.push(w),y||v.flush(),v(),y&&v.flush()},A=Gr.createInterface({input:N.all});return A.on("error",w=>{ae.error("Readline interface error",{error:w.message,stack:w.stack})}),A.on("line",w=>{let y=null;try{if(w.startsWith("[API Error")){let f=w.match(/\[api error: (.+?)]$/i)?.[1];y={type:"error",value:Et(f,!1)?.error?.message||f||"Gemini encountered error"}}else y=JSON.parse(w)}catch{return}if(y)switch(y.type){case"thought":{let f=y.value;_({title:f?.subject??"Thinking...",message:f?.description},!0);break}case"content":{y.value&&_({message:y.value});break}case"tool_call_request":{let f=y.value,I=Yr[f.name]??f.name,d=f.args?.path||f.args?.absolute_path,F=d&&$t.relative(Z.cwd(),d),le=f.args?.command,K={title:[I,F&&`\`${F}\``,le&&`\`${le}\``].filter(Boolean).join(" ")};p[f.callId]=K,v.flush();break}case"tool_result":{let f=y.value,I=p[f.callId];if(I){let d=[f.resultDisplay,f.responseParts?.functionResponse?.response?.output].find(F=>typeof F=="string"&&F);d&&(I.message=`\`\`\`
20
+ ${d.trim()}
21
+ \`\`\``),_(I,!0)}break}case"result":{x=y.duration_ms,T=y.value,[u,m,c].forEach(f=>{f[f.length-1]?.message===T&&f.pop()});break}case"error":{g=y.value;break}case"finished":break;default:{ae.warn("Unhandled message type:",y.type);break}}}),await N.catch(w=>{({error:g,result:T}=jr({catchError:w,runCmd:N,error:g,result:T,runnerName:"Gemini"}))}),A.close(),v.flush(),{steps:u,duration:x,result:await ne({initialResult:T,agentName:Ft,hasError:!!g}),error:oe({error:g,agentName:Ft}),isRetryableError:se(g)}}var Dt=async()=>{let e=$t.join(Mr.homedir(),".gemini");await Ur.rm(e,{recursive:!0,force:!0})};var Br={codex:{runner:qe,clean:Ot},claude:{runner:He,clean:Ct},gemini:{runner:Ke,clean:Dt}},Lt=Br;var qr=E("init_stage"),kt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(Hr(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Lt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Kr({apiToken:r});st(l);let i=e.useGateway?await dt({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let m=yt(({steps:h=[],duration:x})=>{let T=h.map(g=>({...g,title:g.title?H(g.title):void 0,message:g.message?H(g.message):void 0}));return h.length=0,B(e.id,e.sessionId,{steps:T,duration:x})},t);qr.info("Adding build files to stage");let c=await je();await Me(c);let u;e.hasRepo?e.sha?(u=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(u=await vt(),await de(e.id,{sha:u}),n?.setAttributes({"init.sha.source":"current_commit"})):(u=await Rt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let p=performance.now()-s;return n?.setAttributes({"init.sha":u||"unknown","init.duration.ms":p,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:m,runner:a,sha:u}}),Kr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:xe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||xe.env.NETLIFY_API_TOKEN,SITE_ID:xe.env.SITE_ID,FUNCTIONS_DIST:xe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:C}});import{getTracer as We}from"@netlify/otel";import Wr from"crypto";import W from"fs/promises";import k from"path";import G from"process";var L=E("context"),Vr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:G.env.NETLIFY_TEAM_ID,userId:G.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:G.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Jr=10,Xr=async e=>{let{name:t,ext:r}=k.parse(e),o=e,n=k.join(G.cwd(),U,o),s=0;for(;await zr(n);){if(s>=Jr)throw new Error("Failed to generate context file");o=`${t}-${Wr.randomUUID().slice(0,5)}${r}`,n=k.join(G.cwd(),U,o),s+=1}return o},zr=async e=>{try{return await W.access(e),!0}catch{return!1}},Zr=async()=>{try{L.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return L.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(L.warn("Catchall consumer missing or invalid contextScopes"),null):r:(L.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?L.warn("Netlify features context request timed out"):L.warn("Failed to fetch Netlify features context:",e.message),null}},Qr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await W.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?L.warn(`Download timeout for ${e}`):L.warn(`Failed to download context file ${e}:`,r.message),!1}},ve=null,en=async()=>{if(ve)return ve;let e=await Zr();if(!e)return[];let t=k.join(G.cwd(),U,ke);await W.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return L.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=k.join(t,a),i=k.join(U,ke,a);return L.log(`Downloading ${s.scope} context...`),await Qr(s.endpoint,l)?(L.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return ve=(await Promise.all(r)).filter(n=>n!==null),ve},Ut=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Vr(t),s=await Xr(ht),a=k.join(G.cwd(),U);await W.mkdir(a,{recursive:!0});let l=k.join(U,s),i=k.join(G.cwd(),l),m=k.join(G.cwd(),U,re);try{await W.unlink(m),L.log(`Deleted old results file: ${m}`)}catch{}let c=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
26
22
  Your task is to analyze and fix the build errors.
27
23
  Don't apply techniques of reverting changes. Apply fixes related to errors.
28
24
  Don't try to run build by yourself. Just fix the errors.
29
25
 
30
26
  <build_error_context>
31
27
  ${o}
32
- </build_error_context>`:"",c="";r.siteContext&&r.siteContext.length!==0&&(c=`
28
+ </build_error_context>`:"",u="";r.siteContext&&r.siteContext.length!==0&&(u=`
33
29
  <project_rules>
34
30
  ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
35
31
 
36
32
  `)}
37
33
  </project_rules>
38
- `);let m="";if(r.sessionHistoryContext?.length){let g=k.join(j.cwd(),U,ke);await X.mkdir(g,{recursive:!0});let v=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let O=N+1,b=`attempt-${O}.md`,S=k.join(g,b),w=k.join(U,ke,b),f=`# Task History - Attempt ${O}
34
+ `);let p="";if(r.sessionHistoryContext?.length){let g=k.join(G.cwd(),U,Le);await W.mkdir(g,{recursive:!0});let S=await Promise.all(r.sessionHistoryContext.map(async(R,N)=>{let v=N+1,_=`attempt-${v}.md`,A=k.join(g,_),w=k.join(U,Le,_),y=`# Task History - Attempt ${v}
39
35
 
40
36
  ## Request - what the user asked for
41
37
  ${R.request}
@@ -45,40 +41,40 @@ ${R.request}
45
41
  ## Response - what the agent replied with after its work
46
42
 
47
43
  ${R.response}
48
- `;return await X.writeFile(S,f,"utf-8"),D.log(`Created history file: ${w}`),w}));m+=`
44
+ `;return await W.writeFile(A,y,"utf-8"),L.log(`Created history file: ${w}`),w}));p+=`
49
45
  <session_history_context>
50
46
  History of prior work on this task.
51
47
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
52
48
 
53
- ${v.slice(-5).map(R=>`- ${R}`).join(`
49
+ ${S.slice(-5).map(R=>`- ${R}`).join(`
54
50
  `)}
55
51
 
56
52
  </session_history_context>
57
- `}let y=await zr(),x="";y.length>0&&(x=`
53
+ `}let h=await en(),x="";h.length>0&&(x=`
58
54
  <netlify_features_context>
59
55
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
60
56
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
61
57
 
62
- ${y.map(g=>`- **${g.scope}**: ${g.path}`).join(`
58
+ ${h.map(g=>`- **${g.scope}**: ${g.path}`).join(`
63
59
  `)}
64
60
 
65
61
  Refer to these files when working with specific Netlify features.
66
62
  </netlify_features_context>
67
- `);let I=`
63
+ `);let T=`
68
64
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
69
65
 
70
66
  <request>
71
67
  <user_request>
72
68
  ${r.prompt}
73
69
  </user_request>
74
- ${d}
70
+ ${c}
75
71
  </request>
76
72
 
77
73
  <requirements>
78
74
  <responses>
79
75
  - Do not speak in first person. You may speak as "the agent".
80
- - When work is complete, write a changes summary in ${a}/${ne} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
81
- - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${ne} file.
76
+ - When work is complete, write a changes summary in ${a}/${re} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
77
+ - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${re} file.
82
78
  - Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
83
79
  - NEVER look into the \`.git\` folder
84
80
  - NEVER print potentially sensitive values (like secrets) in the planning output or results
@@ -87,7 +83,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
87
83
  - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${Ue} folder
88
84
  - move assets from ${a}/${Ue} folder to the project assets folder if they are referenced in a code or applied changes
89
85
  </attachements>
90
- ${c}
86
+ ${u}
91
87
  </requirements>
92
88
 
93
89
  <extra_context>
@@ -99,7 +95,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
99
95
  - Netlify Functions directory: ${n.functionsDir}
100
96
  </metadata>
101
97
  <environment>
102
- - Node Version: ${j.version||"unknown"}
98
+ - Node Version: ${G.version||"unknown"}
103
99
  - Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
104
100
  - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
105
101
  - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
@@ -111,29 +107,31 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
111
107
  </docs>
112
108
  </extra_context>
113
109
 
114
- ${m}
115
- `;return await X.writeFile(s,I,"utf-8"),D.log(`Generated agent context document at: ${s}`),I.length>5e5&&(I=`
110
+ ${p}
111
+ `;return await W.writeFile(i,T,"utf-8"),L.log(`Generated agent context document at: ${i}`),T.length>5e5&&(T=`
116
112
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
117
113
 
118
114
  <request>
119
115
  <user_request>
120
116
  ${r.prompt}
121
117
  </user_request>
122
- ${d}
118
+ ${c}
123
119
  </request>
124
120
 
125
- Use the following file for the complete context of the ask, the environment, and what's available. ${s} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
126
- `),I};var Zr=_("prompt"),Gt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Mt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Zr.log("Contextful Prompt:",n),{prompt:n}};var Se=_("inference_stage"),jt=5,Ae=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:a,aiGateway:l,attempt:s,contextPrefix:h,priorAgentSessionId:d}=e;Se.log(`Running inference stage, attempt ${s} of ${jt}`);let c=await A(We(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":s||1}),pt();let{prompt:y}=await A(We(),"compose-prompt",async()=>await Gt({cliPath:t,config:r,buildErrorContext:Qr(n),netlify:o})),x=`
127
- ${h||""}
128
- ${y}
129
- `.trim(),I={...r,prompt:x},g=await A(We(),`run-${r.runner}`,async()=>await i({aiGateway:l,config:I,netlify:o,persistSteps:a,continueSession:!!(s&&s>1),priorAgentSessionId:d}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await a.flush(),g});if(c.error){if(Se.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:s||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!s||s<jt))return Se.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Ae({...e,attempt:(s||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Se.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},Qr=e=>!e||e.length===0?"":`
121
+ Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
122
+ `),T};var tn=E("prompt"),Mt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Ut({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&tn.log("Contextful Prompt:",n),{prompt:n}};var Re=E("inference_stage"),Gt=5,Se=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i,contextPrefix:m,priorAgentSessionId:c}=e;Re.log(`Running inference stage, attempt ${i} of ${Gt}`);let u=await b(We(),"inference-stage",async p=>{p?.setAttributes({"inference.attempt":i||1}),pt();let{prompt:h}=await b(We(),"compose-prompt",async()=>await Mt({cliPath:t,config:r,buildErrorContext:rn(n),netlify:o})),x=`
123
+ ${m||""}
124
+ ${h}
125
+ `.trim(),T={...r,prompt:x},g=await b(We(),`run-${r.runner}`,async()=>await s({aiGateway:l,config:T,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:c}));return g.result&&(g.result=H(g.result)),g.error&&(g.error=H(g.error)),await a.flush(),g});if(u.error){if(Re.error("Runner failed",{stepsCount:u.steps.length,duration:u.duration,error:u.error,isRetryableError:u.isRetryableError,attempt:i||1,agentSessionId:u.agentSessionId}),u.isRetryableError&&(!i||i<Gt))return Re.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Se({...e,attempt:(i||1)+1,priorAgentSessionId:u.agentSessionId,contextPrefix:u.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Re.log("Do not retry inference stage"),new Error(u.error)}return{runnerResult:u}},rn=e=>!e||e.length===0?"":`
130
126
  Deploy failed failed. Here are the errors to review on the latest build:
131
127
 
132
128
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
133
129
 
134
130
  ${e.pop()}
135
- `;import rn from"process";import{getTracer as Ve}from"@netlify/otel";import{getTracer as en}from"@netlify/otel";var fe=_("deploy"),Yt=async e=>await A(en(),"create-preview-deploy",async t=>tn(e,t)),tn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:a},l)=>{try{let s=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(fe.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),n&&s.push("--alias",n),a&&s.push("--filter",a),r?(fe.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview");let h=i||"netlify";fe.log(`Running: ${h} ${s.join(" ")}`),l?.setAttributes({cmd:h,args:s});let d=await e.utils.run(h,s,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(d.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),fe.log(`
136
- Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(s){throw fe.error("Failed to create preview deploy via CLI:",s),l?.setAttributes({success:!1,error:s.message}),s}};var ge=_("deploy_stage"),Je=async e=>await A(Ve(),"run-deploy-stage",async()=>nn(e)),nn=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let i=await A(Ve(),"get-runner-diffs",async()=>await It({config:t,netlify:r}));if(ge.info("Resolved git",{hasChanges:i.hasChanges,ignored:i.ignored??[]}),!i.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:s,resultDiffBinary:h}=i,d=!0;ge.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:d,wouldCreatePreview:o!==void 0&&d});let c=null;if(o!==void 0&&d)try{let m;try{let y=await A(Ve(),"get-runner-session",async()=>await at(t.id,t.sessionId));y?.title&&(m=y.title)}catch(y){ge.warn("Failed to fetch session title, using fallback message:",y.message)}await H(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await Yt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:_t(t.id,rn.env.SITE_NAME),filter:n})}catch(m){return ge.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:d,previewInfo:null,diffBinary:s,resultDiffBinary:h,deployError:m instanceof Error?m.message:String(m)}}return ge.log("Git status",{hasDiff:!!a,hasChanges:d}),{diff:a,resultDiff:l,hasChanges:d,previewInfo:c,diffBinary:s,resultDiffBinary:h}};import{getTracer as Ne}from"@netlify/otel";async function Bt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(i=l,a===r)throw i;n&&n(a,i),await new Promise(s=>setTimeout(s,o*a))}throw i}var L=_("cleanup_stage"),Ht=async e=>await A(Ne(),"cleanup-stage",async()=>on(e)),Xe=1024*1024*10,on=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:a,previewInfo:l})=>{let s={result:r||"Done",duration:o};if(l&&l.deployId&&(s.deploy_id=l.deployId),l&&l.sourceZipFilename&&(s.result_zip_file_name=l.sourceZipFilename),t||i||n||a)try{L.log("Getting pre-signed URLs for diff upload");let d=await ut(e.id,e.sessionId),c=[];(t||i)&&c.push(Oe(d.result.upload_url,i||t).then(()=>{s.result_diff_s3_key=d.result.s3_key,L.log("Successfully uploaded result_diff to S3")})),(n||a)&&c.push(Oe(d.cumulative.upload_url,a||n).then(()=>{s.cumulative_diff_s3_key=d.cumulative.s3_key,L.log("Successfully uploaded cumulative_diff to S3")})),L.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||a)&&(L.log("Updating agent runner with cumulative diff S3 key"),await A(Ne(),"update-runner",async()=>{await de(e.id,{result_diff_s3_key:d.cumulative.s3_key})}))}catch(d){L.error("S3 upload failed, falling back to inline diffs:",d);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(a||n||"");if(c>Xe||m>Xe){let y=`Diffs exceed maximum inline size of ${Xe} bytes.`;throw L.error(y),new Error(y)}s.result_diff=t,s.result_diff_binary=i,(n||a)&&(s.cumulative_diff=n,s.cumulative_diff_binary=a,L.log("Updating agent runner with inline diffs (fallback)"),await A(Ne(),"update-runner",async()=>{await de(e.id,{result_diff:n,result_diff_binary:a})}))}else L.log("No diffs to upload");return L.log("Updated agent runner with result"),await Bt(async()=>await A(Ne(),"update-runner-session",()=>H(e.id,e.sessionId,s)),{maxRetries:3,baseDelay:1e3,onRetry:(d,c)=>{L.error(`Error updating agent runner session (attempt ${d}):`,c),L.log("Retrying...")}}),L.log("Finished updating agent runner with result"),{sessionUpdate:s}};import{getTracer as qt,shutdownTracers as an,withActiveSpan as Kt}from"@netlify/otel";var ln=sn(import.meta.url),Wt=ln("../package.json"),Vt=_("pipeline_index"),be=3,Jt=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:i,tracing:a={}})=>{let l,{withStageTimer:s}=ft(z.timeUnits.hours(4)),h=await nt(Wt.version,e.id,a);try{await Kt(qt(),"run-pipeline",{},h,async()=>{let d,{aiGateway:c,context:m,persistSteps:y,runner:x,sha:I}=await s("init",()=>Ut({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:i,runnerVersion:Wt.version}),z.timeUnits.minutes(10));l=x.clean,e.sha=I;let{runnerResult:g}=await s("inference",()=>Ae({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c}));await H(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let v=await s("deploy",()=>Je({cliPath:r,config:e,context:m,result:g.result,filter:i})),R=g,N=[];if(v.hasChanges&&v.deployError){N.push(ot(v.deployError));let u=1;for(;u<=be&&!v.previewInfo;)Vt.log(`Deploy attempt had errors. Retrying. ${u}/${be}`),await Kt(qt(),"deploy-stage",async E=>{E?.setAttributes({"stage.attempt":u});let{runnerResult:p}=await s(`inference-retry-${u}`,()=>Ae({cliPath:r,config:e,context:m,runner:x.runner,persistSteps:y,aiGateway:c,buildErrors:N,priorAgentSessionId:g.agentSessionId}));R={...p,steps:[...R.steps||[],...p.steps||[]],duration:(R.duration||0)+(p.duration||0)},v=await s(`deploy-retry-${u}`,()=>Je({cliPath:r,config:e,context:m,result:p.result,filter:i})),v.deployError&&N.push(v.deployError),u++});u>be&&!v.previewInfo&&(d=new Error(`Deploy validation failed after ${be} attempts`))}let{diff:O,resultDiff:b,previewInfo:S,diffBinary:w,resultDiffBinary:f}=v;if(await s("cleanup",()=>Ht({config:e,diff:O,result:R.result,duration:R.duration,resultDiff:b,diffBinary:w,resultDiffBinary:f,previewInfo:S}),z.timeUnits.minutes(10)),d)throw d;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await St())})}catch(d){Vt.error("Got error while running pipeline",d),await l?.();let c=d instanceof Error&&d.message;throw await H(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),d}finally{await an()}};import Xt from"crypto";var $=_("bin_local"),B=un(P.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),Ze=()=>{console.log(`
131
+ `;import sn from"process";import{getTracer as Ve}from"@netlify/otel";import{getTracer as nn}from"@netlify/otel";var fe=E("deploy"),jt=async e=>await b(nn(),"create-preview-deploy",async t=>on(e,t)),on=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},l)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(fe.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(fe.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let m=s||"netlify";fe.log(`Running: ${m} ${i.join(" ")}`),l?.setAttributes({cmd:m,args:i});let c=await e.utils.run(m,i,{stdio:["ignore","pipe","pipe"]}),u=JSON.parse(c.stdout.trim());l?.setAttributes({success:!0,deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id}),fe.log(`
132
+ Preview deploy created successfully:`,{deployId:u.deploy_id,deployUrl:u.deploy_url,siteId:u.site_id});let p={deployId:u.deploy_id,previewUrl:u.deploy_url,logsUrl:u.logs,siteId:u.site_id};return t||(p.sourceZipFilename=u.source_zip_filename),p}catch(i){throw fe.error("Failed to create preview deploy via CLI:",i),l?.setAttributes({success:!1,error:i.message}),i}};var ge=E("deploy_stage"),Je=async e=>await b(Ve(),"run-deploy-stage",async()=>an(e)),an=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await b(Ve(),"get-runner-diffs",async()=>await Tt({config:t,netlify:r}));if(ge.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:m}=s,c=!0;ge.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let u=null;if(o!==void 0&&c)try{let p;try{let h=await b(Ve(),"get-runner-session",async()=>await at(t.id,t.sessionId));h?.title&&(p=h.title)}catch(h){ge.warn("Failed to fetch session title, using fallback message:",h.message)}await B(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),u=await jt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:p,skipBuild:!1,deploySubdomain:_t(t.id,sn.env.SITE_NAME),filter:n})}catch(p){return ge.warn("Failed to create preview deploy (continuing with agent run):",p),{diff:a,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:i,resultDiffBinary:m,deployError:p instanceof Error?p.message:String(p)}}return ge.log("Git status",{hasDiff:!!a,hasChanges:c}),{diff:a,resultDiff:l,hasChanges:c,previewInfo:u,diffBinary:i,resultDiffBinary:m}};import{getTracer as Ne}from"@netlify/otel";async function Yt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var Ae=class{scanDiffForForms(t){let r=[],o=null,n=[],s=t.split(`
133
+ `);for(let a of s)if(a.startsWith("diff --git")){if(o&&n.length>0){let i=this.containsNetlifyForm(n,o);i&&r.push(i)}let l=a.split(" ");o=l[l.length-1].replace(/^b\//,""),n=[]}else a.startsWith("+")&&!a.startsWith("+++")&&n.push(a.slice(1));if(o&&n.length>0){let a=this.containsNetlifyForm(n,o);a&&r.push(a)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
134
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:a}of n){let l=o.match(s);if(l){let i=l.index||0,m=Math.max(0,i-20),c=Math.min(o.length,i+l[0].length+20),u=o.slice(m,c).trim();return u=u.replace(/\s+/g," "),u.length>100&&(u=u.slice(0,97)+"..."),{file:r,snippet:`[${a}] ${u}`}}}return null}};var P=E("cleanup_stage"),Bt=async e=>await b(Ne(),"cleanup-stage",async()=>ln(e)),Xe=1024*1024*10,ln=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result:r||"Done",duration:o};l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename);let m=t||s||n||a;if(m){let c=new Ae,u=t||s||"",p=c.scanDiffForForms(u);p.detected&&(P.log("Detected Netlify form(s) in diff:"),p.matches.forEach(({file:h,snippet:x})=>{P.log(` - ${h}: ${x}`)}),i.has_netlify_form=!0),P.log("Did not detect Netlify form(s) in diff")}if(m)try{P.log("Getting pre-signed URLs for diff upload");let c=await ut(e.id,e.sessionId),u=[];(t||s)&&u.push(Oe(c.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=c.result.s3_key,P.log("Successfully uploaded result_diff to S3")})),(n||a)&&u.push(Oe(c.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=c.cumulative.s3_key,P.log("Successfully uploaded cumulative_diff to S3")})),P.log(`Uploading ${u.length} diff(s) to S3 in parallel`),await Promise.all(u),(n||a)&&(P.log("Updating agent runner with cumulative diff S3 key"),await b(Ne(),"update-runner",async()=>{await de(e.id,{result_diff_s3_key:c.cumulative.s3_key})}))}catch(c){P.error("S3 upload failed, falling back to inline diffs:",c);let u=Buffer.byteLength(t||s||""),p=Buffer.byteLength(a||n||"");if(u>Xe||p>Xe){let h=`Diffs exceed maximum inline size of ${Xe} bytes.`;throw P.error(h),new Error(h)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,P.log("Updating agent runner with inline diffs (fallback)"),await b(Ne(),"update-runner",async()=>{await de(e.id,{result_diff:n,result_diff_binary:a})}))}else P.log("No diffs to upload");return P.log("Updated agent runner with result"),await Yt(async()=>await b(Ne(),"update-runner-session",()=>B(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(c,u)=>{P.error(`Error updating agent runner session (attempt ${c}):`,u),P.log("Retrying...")}}),P.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Ht,shutdownTracers as cn,withActiveSpan as qt}from"@netlify/otel";var dn=un(import.meta.url),Kt=dn("../package.json"),Wt=E("pipeline_index"),be=3,Vt=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let l,{withStageTimer:i}=ft(V.timeUnits.hours(4)),m=await nt(Kt.version,e.id,a);try{await qt(Ht(),"run-pipeline",{},m,async()=>{let c,{aiGateway:u,context:p,persistSteps:h,runner:x,sha:T}=await i("init",()=>kt({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:Kt.version}),V.timeUnits.minutes(10));l=x.clean,e.sha=T;let{runnerResult:g}=await i("inference",()=>Se({cliPath:r,config:e,context:p,runner:x.runner,persistSteps:h,aiGateway:u}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let S=await i("deploy",()=>Je({cliPath:r,config:e,context:p,result:g.result,filter:s})),R=g,N=[];if(S.hasChanges&&S.deployError){N.push(ot(S.deployError));let f=1;for(;f<=be&&!S.previewInfo;)Wt.log(`Deploy attempt had errors. Retrying. ${f}/${be}`),await qt(Ht(),"deploy-stage",async I=>{I?.setAttributes({"stage.attempt":f});let{runnerResult:d}=await i(`inference-retry-${f}`,()=>Se({cliPath:r,config:e,context:p,runner:x.runner,persistSteps:h,aiGateway:u,buildErrors:N,priorAgentSessionId:g.agentSessionId}));R={...d,steps:[...R.steps||[],...d.steps||[]],duration:(R.duration||0)+(d.duration||0)},S=await i(`deploy-retry-${f}`,()=>Je({cliPath:r,config:e,context:p,result:d.result,filter:s})),S.deployError&&N.push(S.deployError),f++});f>be&&!S.previewInfo&&(c=new Error(`Deploy validation failed after ${be} attempts`))}let{diff:v,resultDiff:_,previewInfo:A,diffBinary:w,resultDiffBinary:y}=S;if(await i("cleanup",()=>Bt({config:e,diff:v,result:R.result,duration:R.duration,resultDiff:_,diffBinary:w,resultDiffBinary:y,previewInfo:A}),V.timeUnits.minutes(10)),c)throw c;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await St())})}catch(c){Wt.error("Got error while running pipeline",c),await l?.();let u=c instanceof Error&&c.message;throw await B(e.id,e.sessionId,{result:u||"Encountered error when running agent",state:"error"}),c}finally{await cn()}};import Jt from"crypto";var D=E("bin_local"),Y=pn(O.argv.slice(2),{string:["cwd","cli-path","filter","prompt","runner","model","netlify-api-token"],boolean:["verbose","help"],alias:{h:"help",v:"verbose"}}),Ze=()=>{console.log(`
137
135
  agent-runner-cli-local - Run Netlify agent runner locally without API connections
138
136
 
139
137
  USAGE:
@@ -165,6 +163,6 @@ NOTE:
165
163
  This local mode mocks all Netlify API calls. The agent will run through
166
164
  the full pipeline including inference and deployment, but API calls will
167
165
  be logged instead of executed.
168
- `)};B.help&&(Ze(),P.exit(0));B.prompt||($.error("Error: --prompt is required"),Ze(),P.exit(1));B["netlify-api-token"]||($.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),Ze(),P.exit(1));try{let e=B.cwd||P.cwd(),t=zt.join(e,".netlify","netlify-agent-runner-context*");Zt.rmSync(t,{recursive:!0,force:!0});let r;try{r=await cn(e)}catch(l){$.error(l.message),$.error(`
169
- To link this directory to a Netlify site, run:`),$.error(" netlify link"),P.exit(1)}let o=`local-${Xt.randomBytes(8).toString("hex")}`,n=`session-${Xt.randomBytes(8).toString("hex")}`,i=B.runner||"claude";$.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:i});let a={id:o,sessionId:n,prompt:B.prompt,runner:i,model:B.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};P.env.NETLIFY_LOCAL_MODE="true",P.env.NETLIFY_API_HOST="api.netlify.com",P.env.NETLIFY_API_TOKEN=B["netlify-api-token"],P.env.SITE_ID=r,P.env.NETLIFY_TEAM_ID="local-team-id",P.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",P.env.SITE_NAME="local-site",i==="claude"?we(e,"claude")||($.log("Claude CLI not found, installing..."),await ze(e,"@anthropic-ai/claude-code")):i==="gemini"?we(e,"gemini")||($.log("Gemini CLI not found, installing..."),await ze(e,"@google/gemini-cli@0.1.17")):i==="codex"?we(e,"codex")||($.log("Codex CLI not found, installing..."),await ze(e,"@openai/codex")):($.error(`Unknown runner: ${i}`),P.exit(1)),await Jt({config:a,cwd:e,cliPath:B["cli-path"],filter:B.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),$.info("Finished agent (local mode)"),P.exit(0)}catch(e){$.error("Error running agent pipeline (local mode):",e),P.exit(1)}function ze(e,t){return new Promise((r,o)=>{C("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{$.log(`${t} installed: ${n}`),r()}).catch(n=>{$.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function cn(e){let t=zt.join(e,".netlify","state.json");try{let r=await Zt.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return $.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
166
+ `)};Y.help&&(Ze(),O.exit(0));Y.prompt||(D.error("Error: --prompt is required"),Ze(),O.exit(1));Y["netlify-api-token"]||(D.error("Error: --netlify-api-token is required - generate a PAT from your Netlify user settings"),Ze(),O.exit(1));try{let e=Y.cwd||O.cwd(),t=Xt.join(e,".netlify","netlify-agent-runner-context*");zt.rmSync(t,{recursive:!0,force:!0});let r;try{r=await fn(e)}catch(l){D.error(l.message),D.error(`
167
+ To link this directory to a Netlify site, run:`),D.error(" netlify link"),O.exit(1)}let o=`local-${Jt.randomBytes(8).toString("hex")}`,n=`session-${Jt.randomBytes(8).toString("hex")}`,s=Y.runner||"claude";D.log("Starting agent runner in local mode",{runnerId:o,sessionId:n,siteId:r,cwd:e,runner:s});let a={id:o,sessionId:n,prompt:Y.prompt,runner:s,model:Y.model,accountType:"local",validateAgent:!1,validateAgentWithBuild:!1,sessionHistoryContext:[],siteContext:[],hasRepo:!0,useGateway:!0,sha:void 0,modelVersionOverrides:{}};O.env.NETLIFY_LOCAL_MODE="true",O.env.NETLIFY_API_HOST="api.netlify.com",O.env.NETLIFY_API_TOKEN=Y["netlify-api-token"],O.env.SITE_ID=r,O.env.NETLIFY_TEAM_ID="local-team-id",O.env.NETLIFY_AGENT_RUNNER_USER_ID="local-user-id",O.env.SITE_NAME="local-site",s==="claude"?we(e,"claude")||(D.log("Claude CLI not found, installing..."),await ze(e,"@anthropic-ai/claude-code")):s==="gemini"?we(e,"gemini")||(D.log("Gemini CLI not found, installing..."),await ze(e,"@google/gemini-cli@0.1.17")):s==="codex"?we(e,"codex")||(D.log("Codex CLI not found, installing..."),await ze(e,"my-codex-no-sandbox")):(D.error(`Unknown runner: ${s}`),O.exit(1)),await Vt({config:a,cwd:e,cliPath:Y["cli-path"],filter:Y.filter,tracing:{exporterUrl:void 0,traceparent:void 0}}),D.info("Finished agent (local mode)"),O.exit(0)}catch(e){D.error("Error running agent pipeline (local mode):",e),O.exit(1)}function ze(e,t){return new Promise((r,o)=>{C("npm",["install",t,"--no-save"],{cwd:e}).then(({stdout:n})=>{D.log(`${t} installed: ${n}`),r()}).catch(n=>{D.error(`Error installing ${t}: ${n.stderr||n.message}`),o(n)})})}async function fn(e){let t=Xt.join(e,".netlify","state.json");try{let r=await zt.readFileSync(t,"utf-8"),o=JSON.parse(r);if(!o.siteId)throw new Error(`No siteId found in ${t}. Please link this directory to a Netlify site using 'netlify link'.`);return D.log(`Found site ID from state file: ${o.siteId}`),o.siteId}catch(r){throw r.code==="ENOENT"?new Error(`No .netlify/state.json found in ${e}. Please link this directory to a Netlify site using 'netlify link'.`):r}}
170
168
  //# sourceMappingURL=bin-local.js.map