@netlify/agent-runner-cli 1.48.2 → 1.49.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,55 +1,55 @@
1
- import{createRequire as Zr}from"module";import{createTracerProvider as Yt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as Ve}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Bt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Ht}from"@netlify/otel";import{propagation as Je,context as Xe,W3CTraceContextPropagator as qt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Kt}from"@opentelemetry/exporter-trace-otlp-grpc";function E(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Se=E("tracing"),ze=async(e,t,r)=>(await Yt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new Ve(new Ae),new Ve(new Kt({url:r.exporterUrl}))],instrumentations:[new Bt({skipHeaders:!0})]}),r.traceparent?(Je.setGlobalPropagator(new qt),Je.extract(Xe.active(),{traceparent:r.traceparent,isRemote:!0})):Xe.active());function b(e,t,r){return Se.log(`\u23F3 TRACE: ${t} starting...`),Ht(e,t,r)}var Ae=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[u,i]of Object.entries(o))u.includes("duration")&&typeof i=="number"?n.push(`${u}=${i.toFixed(2)}ms`):n.push(`${u}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Se.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Se.log(` \u274C Error: ${t.status.message}`)}};var Wt=["error","failed","exception","fatal","panic","abort","crash"];function Ze(e){let t=e.split(`
2
- `),r=[],o=-1,n=0;for(;n<t.length;){let u=t[n].slice(0,500).toLowerCase();if(Wt.some(m=>u.includes(m))){let m=Math.max(0,n-10,o+1),c=Math.min(t.length-1,n+20),l=[];for(let d=m;d<=c;d++)l.push(t[d]);r.push(l.join(`
3
- `)),o=c,n=c+1}else n++}if(r.length===0)return e;let s=r.map((a,u)=>`<extracted_error_chunk order="${u+1}">
1
+ import{createRequire as rn}from"module";import{createTracerProvider as Bt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as Ve}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Ht}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as qt}from"@netlify/otel";import{propagation as Je,context as Xe,W3CTraceContextPropagator as Kt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Wt}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var be=_("tracing"),ze=async(e,t,r)=>(await Bt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new Ve(new Ne),new Ve(new Wt({url:r.exporterUrl}))],instrumentations:[new Ht({skipHeaders:!0})]}),r.traceparent?(Je.setGlobalPropagator(new Kt),Je.extract(Xe.active(),{traceparent:r.traceparent,isRemote:!0})):Xe.active());function N(e,t,r){return be.log(`\u23F3 TRACE: ${t} starting...`),qt(e,t,r)}var Ne=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";be.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&be.log(` \u274C Error: ${t.status.message}`)}};var Vt=["error","failed","exception","fatal","panic","abort","crash"];function Ze(e){let t=e.split(`
2
+ `),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(Vt.some(m=>l.includes(m))){let m=Math.max(0,n-10,o+1),u=Math.min(t.length-1,n+20),c=[];for(let f=m;f<=u;f++)c.push(t[f]);r.push(c.join(`
3
+ `)),o=u,n=u+1}else n++}if(r.length===0)return e;let s=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
4
4
  ${a}
5
5
  </extracted_error_chunk>`).join(`
6
6
 
7
- `);return s.length>e.length*.8?e:s}import _e from"process";import{getTracer as $r}from"@netlify/otel";import ie from"process";var de=ie.env.NETLIFY_API_URL,fe=ie.env.NETLIFY_API_TOKEN,M=E("api"),ge=()=>ie.env.NETLIFY_LOCAL_MODE==="true",ae=async(e,t={})=>{if(!de||!fe)throw new Error("No API URL or token");let r=new URL(e,de),o={...t,headers:{...t.headers,Authorization:`Bearer ${fe}`}};ie.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ie.env.AGENT_RUNNERS_DEBUG==="true")M.log(`Response headers for ${r}:`),n.headers.forEach((u,i)=>{M.log(` ${i}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");M.log(`Request ID for ${r}: ${u||"N/A"}`)}if(s||M.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},Qe=e=>{M.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(de=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(fe=e.constants.NETLIFY_API_TOKEN)},et=()=>({apiUrl:de,token:fe}),le=async(e,t)=>ge()?(M.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ae(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),G=async(e,t,r)=>ge()?(M.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ae(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var tt=async(e,t)=>ge()?(M.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ae(`/api/v1/agent_runners/${e}/sessions/${t}`),rt=(e,t,r)=>ae(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),nt=async(e,t)=>ge()?(M.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ae(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ne=async(e,t)=>{M.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var X=E("ai_gateway"),be=null;var ot=async()=>{if(be)return be;X.log("Fetching available AI gateway providers");let e=await fetch(`${et().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return be=t,X.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Vt=async(e,t)=>{let o=(await ot()).providers[e];if(!o)return X.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return X.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},st=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let u=async()=>{clearTimeout(n),X.log("Requesting AI gateway information");let i=await rt(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,X.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let m=o-Date.now()-6e4;m>0&&(n=setTimeout(()=>{u()},m))}};return await Promise.all([u(),ot()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:Vt}};import Y from"process";import me from"path";import Ce from"fs";import{fileURLToPath as er}from"url";import{execa as tr,execaCommand as An}from"execa";import{Transform as Jt}from"stream";var Xt=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),zt=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Zt(){return Object.entries(process.env).filter(([e,t])=>!(!t||Xt.has(e)||zt.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function j(e){if(typeof e!="string")return e;let t=Zt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Qt(o),"g");r=r.replace(n,"******")}),r}function Qt(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var z=class extends Jt{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=j(n);o(null,s)}};function it(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?j(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?j(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var ue=null,at=e=>(ue&&ue.destroy(),ue=new q({totalAllowedTime:e}),ue),lt=()=>ue;var q=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((u,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var rr=er(import.meta.url),nr=me.dirname(rr),he=E("shell"),Pe=new Set,or={preferLocal:!0},F=(e,t,r)=>{let[o,n]=sr(t,r),s={...or,...n},a=tr(e,o,s);return ir(a,s),lr(a),a};var sr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},ir=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(Y.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new z).pipe(Y.stdout),e.stdout?.pipe(new z).pipe(Y.stdout),e.stderr?.pipe(new z).pipe(Y.stderr);return}e.stdout?.pipe(Y.stdout),e.stderr?.pipe(Y.stderr)},ut=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(Y.kill(-e.pid,t),he.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return he.error("Error killing process:",r),!1}},ar=e=>ut(e,"SIGKILL"),lr=e=>{Pe.add(e);let t=lt();if(t){let r=t.onTimesUp(()=>{he.log(`Global timer expired, killing process ${e.pid}`),ut(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(he.log(`Force killing process ${e.pid} after timeout`),ar(e))},5e3)});e.on("exit",()=>{Pe.delete(e),r()}),e.on("error",()=>{Pe.delete(e),r()})}};function Z(e,t){if(Y.env.NODE_PATH){let n=me.join(Y.env.NODE_PATH,".bin",t);if(Ce.existsSync(n))return n}let r=me.join(e,"node_modules",".bin",t);if(Ce.existsSync(r))return r;let o=me.join(nr,"..","node_modules",".bin",t);if(Ce.existsSync(o))return o}var ct="netlify-agent-runner-context.md",Oe="task-history",Fe="netlify-context",L=".netlify",Q="results.md",$e="assets";var ur=E("utils"),cr=e=>new Promise(t=>{setTimeout(t,e)}),pt=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...u)=>{if(r)return o=u,new Promise(c=>{n.push(c)});r=!0;let i,m=new Promise(c=>{i=c});return s=(async()=>{await Promise.resolve();let c=await e(...u);for(i(c);;){if(await cr(t),!o)return r=!1,s=null,c;let l=o,d=n;o=null,n=[],c=await e(...l),d.forEach(h=>{h(c)})}})(),m};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},ye=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...u){n=u,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let u=n,i=s;o=null,n=null,s=null,e.apply(i,u)}},a},dt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):ur.error("Could not parse JSON",o))}},ft=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let i=60-u.length;if(i<=0)return"";if(i>=s.length+6){let m=Math.min(i-s.length,e.length);return`${s}${e.slice(0,m)}`}return e.slice(0,i)};import{Buffer as gt}from"buffer";import pr from"path";var mt=E("repo"),ht=async({config:e})=>{mt.info("Getting runner diffs");let t=await fr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=gr(o);await mr(n),mt.info("Changes after processing"),await Le();let s=await ke(o);await De(s);let a={stdio:["ignore","pipe","pipe"]},i=(await F("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let c=(await F("git",["diff","--staged","--binary"],a)).stdout,l,d;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),l=(await F("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;l!==g&&(d=gt.from(g).toString("base64"))}let h={hasChanges:!0,diff:i,resultDiff:l,ignored:s};return i!==c&&(h.diffBinary=gt.from(c).toString("base64")),d&&(h.resultDiffBinary=d),h},De=async(e=[])=>{await F("git",["add",".",...e])},Le=async()=>(await F("git",["status","-s"])).stdout,yt=/.. (.+)?\.log$/,dr=[yt],fr=async()=>{let e=await Le();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
- `).filter(o=>dr.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Et=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return e.trim()},_t=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},ke=async e=>{e||=await Le();let t=[".netlify","node_modules"],r=[];return e.split(`
9
- `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${pr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${s}`)});let n=o.match(yt)?.[1];n&&r.push(`:!${n}.log`)}),r},wt=async()=>{await F("git",["reset","--hard","HEAD"])},gr=e=>{let t=e.split(`
10
- `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,u=a.join(""),i=n.trim(),m=s.trim();return r[u]?r[u].change=m:r[u]={filePath:u,stage:i,change:m},r},{});return Object.values(t)},mr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import yr from"fs/promises";import Er from"os";import xt from"path";import K from"process";import _r from"readline";import Ue from"path";import hr from"fs/promises";var Me=E("agent-output-utils");async function ee({initialResult:e,agentName:t,hasError:r}){let o="",n=Ue.join(process.cwd(),L,Q);try{let s=await hr.readFile(n,"utf-8");s&&(o=s,Me.log(`Pulled result from ${Ue.relative(process.cwd(),n)}`))}catch{Me.log(`No results file found at ${Ue.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function te({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Me.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function re(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var k=E("runner_claude"),Tt="Claude Code",Ee="claude-sonnet-4-5-20250929",It=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,wr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ge({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:u,prompt:i,modelVersionOverrides:m}=a,{model:c}=a,l="";if(o){let{token:f,url:T}=o;if(!f||!T)throw new Error("No token or url provided from AI Gateway");if(m?.claude){let p=m?.claude?.[u];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);c=p}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",Ee)?(c=Ee,k.log(`Using default model: ${Ee}`)):k.log(`Default model ${Ee} is not available, proceeding without model specification`);K.env.ANTHROPIC_API_KEY=f,K.env.ANTHROPIC_BASE_URL=T}else if(!K.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let d=[],h=[],x={},I=0,g=0,S,v,N=[Z(K.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],R=`${K.env.NVM_BIN}/node`;k.log(`Running ${R} ${N.join(" ")}`);let _=t.utils.run(R,N,{all:!0,env:K.env});_.stdin?.end();let A=ye(()=>{r?.({steps:d,duration:g})},250),w=(f,T)=>{let p={...f,id:I};I+=1,h.push(p),d.push(p),T||A.flush(),A(),T&&A.flush()},y=_r.createInterface({input:_.all});return y.on("error",f=>{k.error("Readline interface error",{error:f.message,stack:f.stack})}),y.on("line",f=>{let T=null;try{T=JSON.parse(f)}catch{k.log("Could not parse line",f)}T?.session_id&&T.session_id!==l&&(l=T.session_id),Array.isArray(T?.message?.content)?T.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&w({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?w({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):k.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let P=p.input?.description&&`\`${p.input.description}\``;w({title:[It(p.name),P].filter(Boolean).join(" ")})}else p.id&&(x[p.id]=p);A.flush();break}case"tool_result":{let P=p.tool_use_id?x[p.tool_use_id]:void 0,se;if(P){let J=P.input?.file_path&&xt.relative(K.cwd(),P.input.file_path),O=J&&`\`${J}\``;se=[It(P.name||""),O].filter(Boolean).join(" ")}let We=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(P?.name||""),B;if(typeof p.content=="string")B=p.content;else if(Array.isArray(p.content)){let J=[];p.content.forEach(O=>{O?.type==="text"&&typeof O.text=="string"?J.push(O.text):O?.type==="image"&&typeof O.source=="object"&&O.source?O.source.type==="base64"&&O.source.media_type?J.push(`![](data:${O.source.media_type};base64,${O.source.data})`):k.log(`Unsupported image type ${O.source.type}`,O.source):k.log(`Unsupported block type ${O?.type}`)}),B=J.join(`
7
+ `);return s.length>e.length*.8?e:s}import Te from"process";import{getTracer as Ur}from"@netlify/otel";import ae from"process";var ge=ae.env.NETLIFY_API_URL,me=ae.env.NETLIFY_API_TOKEN,M=_("api"),he=()=>ae.env.NETLIFY_LOCAL_MODE==="true",le=async(e,t={})=>{if(!ge||!me)throw new Error("No API URL or token");let r=new URL(e,ge),o={...t,headers:{...t.headers,Authorization:`Bearer ${me}`}};ae.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ae.env.AGENT_RUNNERS_DEBUG==="true")M.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{M.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");M.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||M.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},Qe=e=>{M.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ge=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(me=e.constants.NETLIFY_API_TOKEN)},et=()=>({apiUrl:ge,token:me}),ce=async(e,t)=>he()?(M.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):le(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),G=async(e,t,r)=>he()?(M.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):le(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var tt=async(e,t)=>he()?(M.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):le(`/api/v1/agent_runners/${e}/sessions/${t}`),rt=(e,t,r)=>le(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),nt=async(e,t)=>he()?(M.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):le(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ce=async(e,t)=>{M.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var Z=_("ai_gateway"),Pe=null;var ot=async()=>{if(Pe)return Pe;Z.log("Fetching available AI gateway providers");let e=await fetch(`${et().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Pe=t,Z.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Jt=async(e,t)=>{let o=(await ot()).providers[e];if(!o)return Z.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return Z.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},st=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),Z.log("Requesting AI gateway information");let i=await rt(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,Z.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let m=o-Date.now()-6e4;m>0&&(n=setTimeout(()=>{l()},m))}};return await Promise.all([l(),ot()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:Jt}};import j from"process";import B from"path";import ye from"fs";import{fileURLToPath as rr}from"url";import{createRequire as nr}from"module";import{execa as or,execaCommand as Fn}from"execa";import{Transform as Xt}from"stream";var zt=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Zt=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Qt(){return Object.entries(process.env).filter(([e,t])=>!(!t||zt.has(e)||Zt.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function Y(e){if(typeof e!="string")return e;let t=Qt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(er(o),"g");r=r.replace(n,"******")}),r}function er(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var Q=class extends Xt{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=Y(n);o(null,s)}};function it(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?Y(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?Y(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var ue=null,at=e=>(ue&&ue.destroy(),ue=new W({totalAllowedTime:e}),ue),lt=()=>ue;var W=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var ct={name:"@netlify/agent-runner-cli",type:"module",version:"1.49.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"vitest","test:ci:vitest":"c8 -r lcovonly -r text -r json vitest",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^19.0.0","@commitlint/config-conventional":"^19.0.0","@eslint/compat":"^1.3.2","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^7.1.0","@typescript-eslint/parser":"^7.1.0","@vitest/eslint-plugin":"^1.3.10",c8:"^9.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^8.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^1.5.0"},dependencies:{"@anthropic-ai/claude-code":"2.0.42","@google/gemini-cli":"0.16.0","@netlify/otel":"^5.0.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.57.0",execa:"^8.0.0","get-port":"^5.1.1",minimist:"^1.2.8","my-codex-no-sandbox":"^0.1.2505290819"}};var sr=rr(import.meta.url),ir=B.dirname(sr),ar=nr(import.meta.url),_e=_("shell"),Oe=new Set,lr={preferLocal:!0},F=(e,t,r)=>{let[o,n]=cr(t,r),s={...lr,...n},a=or(e,o,s);return ur(a,s),dr(a),a};var cr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},ur=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(j.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new Q).pipe(j.stdout),e.stdout?.pipe(new Q).pipe(j.stdout),e.stderr?.pipe(new Q).pipe(j.stderr);return}e.stdout?.pipe(j.stdout),e.stderr?.pipe(j.stderr)},ut=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(j.kill(-e.pid,t),_e.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return _e.error("Error killing process:",r),!1}},pr=e=>ut(e,"SIGKILL"),dr=e=>{Oe.add(e);let t=lt();if(t){let r=t.onTimesUp(()=>{_e.log(`Global timer expired, killing process ${e.pid}`),ut(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(_e.log(`Force killing process ${e.pid} after timeout`),pr(e))},5e3)});e.on("exit",()=>{Oe.delete(e),r()}),e.on("error",()=>{Oe.delete(e),r()})}};function ee(e,t){if(!j.env.NETLIFY_LOCAL_MODE)try{let n=ar.resolve(ct.name),s=B.dirname(n);for(;s!==B.dirname(s);){let a=B.dirname(s);if(B.basename(a)==="node_modules"){let l=B.join(a,".bin",t);if(ye.existsSync(l))return l;break}s=a}}catch(n){console.error("Could not resolve package.json",n)}if(j.env.NODE_PATH){let n=B.join(j.env.NODE_PATH,".bin",t);if(ye.existsSync(n))return n}let r=B.join(e,"node_modules",".bin",t);if(ye.existsSync(r))return r;let o=B.join(ir,"..","node_modules",".bin",t);if(ye.existsSync(o))return o}var pt="netlify-agent-runner-context.md",Fe="task-history",$e="netlify-context",D=".netlify",te="results.md",ke="assets";var fr=_("utils"),gr=e=>new Promise(t=>{setTimeout(t,e)}),dt=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(u=>{n.push(u)});r=!0;let i,m=new Promise(u=>{i=u});return s=(async()=>{await Promise.resolve();let u=await e(...l);for(i(u);;){if(await gr(t),!o)return r=!1,s=null,u;let c=o,f=n;o=null,n=[],u=await e(...c),f.forEach(h=>{h(u)})}})(),m};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},Ee=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},ft=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):fr.error("Could not parse JSON",o))}},gt=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let m=Math.min(i-s.length,e.length);return`${s}${e.slice(0,m)}`}return e.slice(0,i)};import{Buffer as mt}from"buffer";import mr from"path";var ht=_("repo"),yt=async({config:e})=>{ht.info("Getting runner diffs");let t=await yr(),{hasChanges:r}=t,{status:o}=t;if(!r)return{hasChanges:!1};let n=_r(o);await Er(n),ht.info("Changes after processing"),await Le();let s=await Ue(o);await De(s);let a={stdio:["ignore","pipe","pipe"]},i=(await F("git",["diff","--staged"],a)).stdout;if(r=!!i,!r)return{hasChanges:!1,ignored:s};let u=(await F("git",["diff","--staged","--binary"],a)).stdout,c,f;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]),c=(await F("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await F("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(f=mt.from(g).toString("base64"))}let h={hasChanges:!0,diff:i,resultDiff:c,ignored:s};return i!==u&&(h.diffBinary=mt.from(u).toString("base64")),f&&(h.resultDiffBinary=f),h},De=async(e=[])=>{await F("git",["add",".",...e])},Le=async()=>(await F("git",["status","-s"])).stdout,_t=/.. (.+)?\.log$/,hr=[_t],yr=async()=>{let e=await Le();return{hasChanges:(e.trim().length===0?[]:e.split(`
8
+ `).filter(o=>hr.some(s=>s instanceof RegExp?s.test(o):o===s)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Et=async()=>{let{stdout:e}=await F("git",["rev-parse","HEAD"]);return e.trim()},wt=async()=>{let{stdout:e}=await F("git",["rev-list","--max-parents=0","HEAD"]);return e.trim()},Ue=async e=>{e||=await Le();let t=[".netlify","node_modules"],r=[];return e.split(`
9
+ `).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${mr.sep}`].some(l=>o.startsWith(l))&&r.push(`:!${s}`)});let n=o.match(_t)?.[1];n&&r.push(`:!${n}.log`)}),r},Tt=async()=>{await F("git",["reset","--hard","HEAD"])},_r=e=>{let t=e.split(`
10
+ `).reduce((r,o)=>{if(!o)return r;let[n,s,,...a]=o,l=a.join(""),i=n.trim(),m=s.trim();return r[l]?r[l].change=m:r[l]={filePath:l,stage:i,change:m},r},{});return Object.values(t)},Er=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push(F("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Tr from"fs/promises";import xr from"os";import vt from"path";import V from"process";import Ir from"readline";import Me from"path";import wr from"fs/promises";var je=_("agent-output-utils");async function re({initialResult:e,agentName:t,hasError:r}){let o="",n=Me.join(process.cwd(),D,te);try{let s=await wr.readFile(n,"utf-8");s&&(o=s,je.log(`Pulled result from ${Me.relative(process.cwd(),n)}`))}catch{je.log(`No results file found at ${Me.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function ne({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&je.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function oe(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var L=_("runner_claude"),xt="Claude Code",we="claude-sonnet-4-5-20250929",It=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,vr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(L.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ge({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:l,prompt:i,modelVersionOverrides:m}=a,{model:u}=a,c="";if(o){let{token:y,url:d}=o;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(m?.claude){let p=m?.claude?.[l];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);u=p}}else if(u){if(!await o.isModelAvailableForProvider("anthropic",u))throw new Error(`Model '${u}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",we)?(u=we,L.log(`Using default model: ${we}`)):L.log(`Default model ${we} is not available, proceeding without model specification`);V.env.ANTHROPIC_API_KEY=y,V.env.ANTHROPIC_BASE_URL=d}else if(!V.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let f=[],h=[],w={},T=0,g=0,v,I,b=[ee(V.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...u?["--model",u]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],x=`${V.env.NVM_BIN}/node`;L.log(`Running ${x} ${b.join(" ")}`);let E=t.utils.run(x,b,{all:!0,env:V.env});E.stdin?.end();let R=Ee(()=>{r?.({steps:f,duration:g})},250),S=(y,d)=>{let p={...y,id:T};T+=1,h.push(p),f.push(p),d||R.flush(),R(),d&&R.flush()},A=Ir.createInterface({input:E.all});return A.on("error",y=>{L.error("Readline interface error",{error:y.message,stack:y.stack})}),A.on("line",y=>{let d=null;try{d=JSON.parse(y)}catch{L.log("Could not parse line",y)}d?.session_id&&d.session_id!==c&&(c=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&S({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?S({message:`![](data:${p.source.media_type};base64,${p.source.data})`}):L.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let O=p.input?.description&&`\`${p.input.description}\``;S({title:[It(p.name),O].filter(Boolean).join(" ")})}else p.id&&(w[p.id]=p);R.flush();break}case"tool_result":{let O=p.tool_use_id?w[p.tool_use_id]:void 0,z;if(O){let H=O.input?.file_path&&vt.relative(V.cwd(),O.input.file_path),P=H&&`\`${H}\``;z=[It(O.name||""),P].filter(Boolean).join(" ")}let fe=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(O?.name||""),K;if(typeof p.content=="string")K=p.content;else if(Array.isArray(p.content)){let H=[];p.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?H.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?H.push(`![](data:${P.source.media_type};base64,${P.source.data})`):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),K=H.join(`
11
11
 
12
- `)}We&&B&&(B=`\`\`\`
13
- ${B.trim()}
14
- \`\`\``),w({title:se,message:B},!0);break}case"thinking":{p.thinking&&w({title:"Thinking",message:p.thinking},!0);break}default:k.log(`Message content type is not supported ${p.type}`,p)}}):T?.type==="result"&&(g=T.duration_ms||0,T.is_error?v=T.result:S=T.result,[h,d].forEach(p=>{p[p.length-1]?.message===S&&p.pop()}))}),await _.catch(f=>{({error:v,result:S}=wr({catchError:f,runCmd:_,error:v,result:S,runnerName:"Claude"}))}),y.close(),A.flush(),{steps:h,duration:g,result:await ee({initialResult:S,agentName:Tt,hasError:!!v}),error:te({error:v,agentName:Tt}),isRetryableError:re(v),agentSessionId:l}}var Rt=async()=>{let e=xt.join(Er.homedir(),".claude");await yr.rm(e,{recursive:!0,force:!0})};import Tr from"fs/promises";import Ir from"os";import xr from"path";import ne from"process";import Rr from"readline";var W=E("runner_codex"),vt="Codex CLI",vr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(W.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(W.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(W.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function je({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:u}=e,{model:i}=e;if(n){let{token:R,url:_}=n;if(!R||!_)throw new Error("No token or url provided from AI Gateway");if(u?.codex){let A=u?.codex?.[s];if(A){if(!await n.isModelAvailableForProvider("openai",A))throw new Error(`Model override '${A}' is not available for openai provider`);i=A}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);ne.env.OPENAI_API_KEY=R,ne.env.OPENAI_BASE_URL=_}else if(!ne.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let m=[],c=[],l={},d=0,h=0,x,I,g=[Z(ne.cwd(),"codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),S=`${ne.env.NVM_BIN}/node`;W.log(`Running ${S} ${g.join(" ")}`);let v=t.utils.run(S,g,{all:!0,env:{...ne.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),N=Rr.createInterface({input:v.all});return N.on("error",R=>{W.error("Readline interface error",{error:R.message,stack:R.stack})}),N.on("line",R=>{let _=null;try{_=JSON.parse(R)}catch{W.log("Could not parse line",R);return}let A=[],w=!1;if(_?.duration_ms&&(h=_.duration_ms,w=!0),_?.type==="local_shell_call")l[_.call_id]=_;else if(_?.type==="local_shell_call_output"){let y=Ar(l[_.call_id],_);y&&(y.id=d,d+=1,c.push(y),m.push(y),A.push(y),w=!0)}else _?.type==="message"&&_.role==="assistant"?x=_.content.map(y=>y.text).join(`
15
- `):_?.type==="message"&&_.role==="system"&&(I=_.content.map(y=>y.text).join(`
16
- `));w&&(r?.({steps:m,duration:h}),o?.({steps:A,duration:h}))}),await v.catch(R=>{let _=vr({catchError:R,runCmd:v,error:I,result:x,runnerName:"Codex"});I=_.error,x=_.result}),N.close(),{steps:c,duration:h,result:await ee({initialResult:x,agentName:vt,hasError:!!I}),error:te({error:I,agentName:vt}),isRetryableError:re(I)}}var St=async()=>{let e=xr.join(Ir.homedir(),".codex");await Tr.rm(e,{recursive:!0,force:!0})},Sr=new Set(["bash","-lc"]),Ar=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Sr.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
12
+ `)}fe&&K&&(K=`\`\`\`
13
+ ${K.trim()}
14
+ \`\`\``),S({title:z,message:K},!0);break}case"thinking":{p.thinking&&S({title:"Thinking",message:p.thinking},!0);break}default:L.log(`Message content type is not supported ${p.type}`,p)}}):d?.type==="result"&&(g=d.duration_ms||0,d.is_error?I=d.result:v=d.result,[h,f].forEach(p=>{p[p.length-1]?.message===v&&p.pop()}))}),await E.catch(y=>{({error:I,result:v}=vr({catchError:y,runCmd:E,error:I,result:v,runnerName:"Claude"}))}),A.close(),R.flush(),{steps:h,duration:g,result:await re({initialResult:v,agentName:xt,hasError:!!I}),error:ne({error:I,agentName:xt}),isRetryableError:oe(I),agentSessionId:c}}var Rt=async()=>{let e=vt.join(xr.homedir(),".claude");await Tr.rm(e,{recursive:!0,force:!0})};import Rr from"fs/promises";import Sr from"os";import Ar from"path";import se from"process";import br from"readline";var J=_("runner_codex"),St="Codex CLI",Nr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(J.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(J.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(J.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ye({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:x,url:E}=n;if(!x||!E)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let R=l?.codex?.[s];if(R){if(!await n.isModelAvailableForProvider("openai",R))throw new Error(`Model override '${R}' is not available for openai provider`);i=R}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);se.env.OPENAI_API_KEY=x,se.env.OPENAI_BASE_URL=E}else if(!se.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let m=[],u=[],c={},f=0,h=0,w,T,g=[ee(se.cwd(),"codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),v=`${se.env.NVM_BIN}/node`;J.log(`Running ${v} ${g.join(" ")}`);let I=t.utils.run(v,g,{all:!0,env:{...se.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),b=br.createInterface({input:I.all});return b.on("error",x=>{J.error("Readline interface error",{error:x.message,stack:x.stack})}),b.on("line",x=>{let E=null;try{E=JSON.parse(x)}catch{J.log("Could not parse line",x);return}let R=[],S=!1;if(E?.duration_ms&&(h=E.duration_ms,S=!0),E?.type==="local_shell_call")c[E.call_id]=E;else if(E?.type==="local_shell_call_output"){let A=Pr(c[E.call_id],E);A&&(A.id=f,f+=1,u.push(A),m.push(A),R.push(A),S=!0)}else E?.type==="message"&&E.role==="assistant"?w=E.content.map(A=>A.text).join(`
15
+ `):E?.type==="message"&&E.role==="system"&&(T=E.content.map(A=>A.text).join(`
16
+ `));S&&(r?.({steps:m,duration:h}),o?.({steps:R,duration:h}))}),await I.catch(x=>{let E=Nr({catchError:x,runCmd:I,error:T,result:w,runnerName:"Codex"});T=E.error,w=E.result}),b.close(),{steps:u,duration:h,result:await re({initialResult:w,agentName:St,hasError:!!T}),error:ne({error:T,agentName:St}),isRetryableError:oe(T)}}var At=async()=>{let e=Ar.join(Sr.homedir(),".codex");await Rr.rm(e,{recursive:!0,force:!0})},Cr=new Set(["bash","-lc"]),Pr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Cr.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
17
17
  ${n.trim()}
18
- \`\`\``)}catch(s){W.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Nr from"fs/promises";import br from"os";import Nt from"path";import V from"process";import Cr from"readline";var oe=E("runner_gemini"),At="Gemini CLI",Pr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(oe.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(oe.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(oe.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Or={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Ye({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:u}=e,{model:i}=e;if(n){let{token:w,url:y}=n;if(!w||!y)throw new Error("No token or url provided from AI Gateway");if(u?.gemini){let f=u?.gemini?.[s];if(f){if(!await n.isModelAvailableForProvider("gemini",f))throw new Error(`Model override '${f}' is not available for gemini provider`);i=f}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);V.env.GEMINI_API_KEY=w,V.env.GOOGLE_GEMINI_BASE_URL=y}else if(!V.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let m=[],c=[],l=[],d={},h=0,x=0,I,g,S=[Z(V.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","-p",a],v=`${V.env.NVM_BIN}/node`;oe.log(`Running ${v} ${S.join(" ")}`);let N=t.utils.run(v,S,{all:!0,env:V.env});N.stdin?.end();let R=ye(()=>{r?.({steps:m,duration:x}),o?.({steps:c,duration:x}),c=[]},250),_=(w,y)=>{w.id=h,h+=1,l.push(w),m.push(w),c.push(w),y||R.flush(),R(),y&&R.flush()},A=Cr.createInterface({input:N.all});return A.on("error",w=>{oe.error("Readline interface error",{error:w.message,stack:w.stack})}),A.on("line",w=>{let y=null;try{if(w.startsWith("[API Error")){let f=w.match(/\[api error: (.+?)]$/i)?.[1];y={type:"error",value:dt(f,!1)?.error?.message||f||"Gemini encountered error"}}else y=JSON.parse(w)}catch{return}if(y)switch(y.type){case"thought":{let f=y.value;_({title:f?.subject??"Thinking...",message:f?.description},!0);break}case"content":{y.value&&_({message:y.value});break}case"tool_call_request":{let f=y.value,T=Or[f.name]??f.name,p=f.args?.path||f.args?.absolute_path,P=p&&Nt.relative(V.cwd(),p),se=f.args?.command,B={title:[T,P&&`\`${P}\``,se&&`\`${se}\``].filter(Boolean).join(" ")};d[f.callId]=B,R.flush();break}case"tool_result":{let f=y.value,T=d[f.callId];if(T){let p=[f.resultDisplay,f.responseParts?.functionResponse?.response?.output].find(P=>typeof P=="string"&&P);p&&(T.message=`\`\`\`
19
- ${p.trim()}
20
- \`\`\``),_(T,!0)}break}case"result":{x=y.duration_ms,I=y.value,[l,m,c].forEach(f=>{f[f.length-1]?.message===I&&f.pop()});break}case"error":{g=y.value;break}case"finished":break;default:{oe.warn("Unhandled message type:",y.type);break}}}),await N.catch(w=>{({error:g,result:I}=Pr({catchError:w,runCmd:N,error:g,result:I,runnerName:"Gemini"}))}),A.close(),R.flush(),{steps:l,duration:x,result:await ee({initialResult:I,agentName:At,hasError:!!g}),error:te({error:g,agentName:At}),isRetryableError:re(g)}}var bt=async()=>{let e=Nt.join(br.homedir(),".gemini");await Nr.rm(e,{recursive:!0,force:!0})};var Fr={codex:{runner:je,clean:St},claude:{runner:Ge,clean:Rt},gemini:{runner:Ye,clean:bt}},Ct=Fr;var Dr=E("init_stage"),Pt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b($r(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Ct[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=Lr({apiToken:r});Qe(u);let i=e.useGateway?await st({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let m=pt(({steps:h=[],duration:x})=>{let I=h.map(g=>({...g,title:g.title?j(g.title):void 0,message:g.message?j(g.message):void 0}));return h.length=0,G(e.id,e.sessionId,{steps:I,duration:x})},t);Dr.info("Adding build files to stage");let c=await ke();await De(c);let l;e.hasRepo?e.sha?(l=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(l=await Et(),await le(e.id,{sha:l}),n?.setAttributes({"init.sha.source":"current_commit"})):(l=await _t(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let d=performance.now()-s;return n?.setAttributes({"init.sha":l||"unknown","init.duration.ms":d,"init.status":"success"}),{aiGateway:i,context:u,persistSteps:m,runner:a,sha:l}}),Lr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:_e.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||_e.env.NETLIFY_API_TOKEN,SITE_ID:_e.env.SITE_ID,FUNCTIONS_DIST:_e.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as Be}from"@netlify/otel";import kr from"crypto";import H from"fs/promises";import D from"path";import U from"process";var $=E("context"),Ur=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:U.env.NETLIFY_TEAM_ID,userId:U.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:U.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Mr=10,Gr=async e=>{let{name:t,ext:r}=D.parse(e),o=e,n=D.join(U.cwd(),L,o),s=0;for(;await jr(n);){if(s>=Mr)throw new Error("Failed to generate context file");o=`${t}-${kr.randomUUID().slice(0,5)}${r}`,n=D.join(U.cwd(),L,o),s+=1}return o},jr=async e=>{try{return await H.access(e),!0}catch{return!1}},Yr=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},Br=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await H.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},we=null,Hr=async()=>{if(we)return we;let e=await Yr();if(!e)return[];let t=D.join(U.cwd(),L,Fe);await H.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return $.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,u=D.join(t,a),i=D.join(L,Fe,a);return $.log(`Downloading ${s.scope} context...`),await Br(s.endpoint,u)?($.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return we=(await Promise.all(r)).filter(n=>n!==null),we},Ot=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Ur(t),s=await Gr(ct),a=D.join(U.cwd(),L);await H.mkdir(a,{recursive:!0});let u=D.join(L,s),i=D.join(U.cwd(),u),m=D.join(U.cwd(),L,Q);try{await H.unlink(m),$.log(`Deleted old results file: ${m}`)}catch{}let c=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
18
+ \`\`\``)}catch(s){J.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Or from"fs/promises";import Fr from"os";import Nt from"path";import X from"process";import $r from"readline";var ie=_("runner_gemini"),bt="Gemini CLI",kr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ie.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ie.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ie.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Dr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Be({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:y,url:d}=n;if(!y||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let p=l?.gemini?.[s];if(p){if(!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model override '${p}' is not available for gemini provider`);i=p}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);X.env.GEMINI_API_KEY=y,X.env.GOOGLE_GEMINI_BASE_URL=d}else if(!X.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let m=[],u=[],c=[],f={},h=0,w=0,T,g,v=[ee(X.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","--output-format","stream-json","-p",a],I=`${X.env.NVM_BIN}/node`;ie.log(`Running ${I} ${v.join(" ")}`);let b=t.utils.run(I,v,{all:!0,env:X.env});b.stdin?.end();let x=Ee(()=>{r?.({steps:m,duration:w}),o?.({steps:u,duration:w}),u=[]},250),E=(y,d)=>{y.id=h,h+=1,c.push(y),m.push(y),u.push(y),d||x.flush(),x(),d&&x.flush()},R=$r.createInterface({input:b.all});R.on("error",y=>{ie.error("Readline interface error",{error:y.message,stack:y.stack})});let S="",A=()=>{S&&E({message:S.trim()}),S=""};return R.on("line",y=>{let d=null;try{if(y.startsWith("[API Error")){let p=y.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:ft(p,!1)?.error?.message||p||"Gemini encountered error"}}else d=JSON.parse(y)}catch{return}if(d)switch(["message","result"].includes(d.type)||A(),d.type){case"message":{d.role!=="user"&&d.content&&(S+=d.content);break}case"tool_use":{let p=Dr[d.tool_name]??d.tool_name,O=d.parameters?.file_path,z=O&&Nt.relative(X.cwd(),O),fe=d.parameters?.command,H={title:[p,z&&`\`${z}\``,fe&&`\`${fe}\``].filter(Boolean).join(" ")};f[d.tool_id]=H,x.flush();break}case"tool_result":{let p=f[d.tool_id];p&&(d.output&&(p.message=`\`\`\`
19
+ ${d.output.trim()}
20
+ \`\`\``),E(p,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?g=d.error?.message:T=S.trim();break}case"error":{g=d.error;break}case"finished":break;default:{ie.warn("Unhandled message type:",d.type);break}}}),await b.catch(y=>{({error:g,result:T}=kr({catchError:y,runCmd:b,error:g,result:T,runnerName:"Gemini"}))}),R.close(),x.flush(),{steps:c,duration:w,result:await re({initialResult:T,agentName:bt,hasError:!!g}),error:ne({error:g,agentName:bt}),isRetryableError:oe(g)}}var Ct=async()=>{let e=Nt.join(Fr.homedir(),".gemini");await Or.rm(e,{recursive:!0,force:!0})};var Lr={codex:{runner:Ye,clean:At},claude:{runner:Ge,clean:Rt},gemini:{runner:Be,clean:Ct}},Pt=Lr;var Mr=_("init_stage"),Ot=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await N(Ur(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=Pt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=jr({apiToken:r});Qe(l);let i=e.useGateway?await st({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let m=dt(({steps:h=[],duration:w})=>{let T=h.map(g=>({...g,title:g.title?Y(g.title):void 0,message:g.message?Y(g.message):void 0}));return h.length=0,G(e.id,e.sessionId,{steps:T,duration:w})},t);Mr.info("Adding build files to stage");let u=await Ue();await De(u);let c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await Et(),await ce(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await wt(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let f=performance.now()-s;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":f,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:m,runner:a,sha:c}}),jr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Te.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Te.env.NETLIFY_API_TOKEN,SITE_ID:Te.env.SITE_ID,FUNCTIONS_DIST:Te.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:F}});import{getTracer as He}from"@netlify/otel";import Gr from"crypto";import q from"fs/promises";import k from"path";import U from"process";var $=_("context"),Yr=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:U.env.NETLIFY_TEAM_ID,userId:U.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:U.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Br=10,Hr=async e=>{let{name:t,ext:r}=k.parse(e),o=e,n=k.join(U.cwd(),D,o),s=0;for(;await qr(n);){if(s>=Br)throw new Error("Failed to generate context file");o=`${t}-${Gr.randomUUID().slice(0,5)}${r}`,n=k.join(U.cwd(),D,o),s+=1}return o},qr=async e=>{try{return await q.access(e),!0}catch{return!1}},Kr=async()=>{try{$.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return $.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?($.warn("Catchall consumer missing or invalid contextScopes"),null):r:($.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?$.warn("Netlify features context request timed out"):$.warn("Failed to fetch Netlify features context:",e.message),null}},Wr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await q.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?$.warn(`Download timeout for ${e}`):$.warn(`Failed to download context file ${e}:`,r.message),!1}},xe=null,Vr=async()=>{if(xe)return xe;let e=await Kr();if(!e)return[];let t=k.join(U.cwd(),D,$e);await q.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return $.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=k.join(t,a),i=k.join(D,$e,a);return $.log(`Downloading ${s.scope} context...`),await Wr(s.endpoint,l)?($.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return xe=(await Promise.all(r)).filter(n=>n!==null),xe},Ft=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=Yr(t),s=await Hr(pt),a=k.join(U.cwd(),D);await q.mkdir(a,{recursive:!0});let l=k.join(D,s),i=k.join(U.cwd(),l),m=k.join(U.cwd(),D,te);try{await q.unlink(m),$.log(`Deleted old results file: ${m}`)}catch{}let u=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
21
21
  Your task is to analyze and fix the build errors.
22
22
  Don't apply techniques of reverting changes. Apply fixes related to errors.
23
23
  Don't try to run build by yourself. Just fix the errors.
24
24
 
25
25
  <build_error_context>
26
26
  ${o}
27
- </build_error_context>`:"",l="";r.siteContext&&r.siteContext.length!==0&&(l=`
27
+ </build_error_context>`:"",c="";r.siteContext&&r.siteContext.length!==0&&(c=`
28
28
  <project_rules>
29
29
  ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
30
30
 
31
31
  `)}
32
32
  </project_rules>
33
- `);let d="";if(r.sessionHistoryContext?.length){let g=D.join(U.cwd(),L,Oe);await H.mkdir(g,{recursive:!0});let S=await Promise.all(r.sessionHistoryContext.map(async(v,N)=>{let R=N+1,_=`attempt-${R}.md`,A=D.join(g,_),w=D.join(L,Oe,_),y=`# Task History - Attempt ${R}
33
+ `);let f="";if(r.sessionHistoryContext?.length){let g=k.join(U.cwd(),D,Fe);await q.mkdir(g,{recursive:!0});let v=await Promise.all(r.sessionHistoryContext.map(async(I,b)=>{let x=b+1,E=`attempt-${x}.md`,R=k.join(g,E),S=k.join(D,Fe,E),A=`# Task History - Attempt ${x}
34
34
 
35
35
  ## Request - what the user asked for
36
- ${v.request}
36
+ ${I.request}
37
37
 
38
38
  ---
39
39
 
40
40
  ## Response - what the agent replied with after its work
41
41
 
42
- ${v.response}
43
- `;return await H.writeFile(A,y,"utf-8"),$.log(`Created history file: ${w}`),w}));d+=`
42
+ ${I.response}
43
+ `;return await q.writeFile(R,A,"utf-8"),$.log(`Created history file: ${S}`),S}));f+=`
44
44
  <session_history_context>
45
45
  History of prior work on this task.
46
46
  You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
47
47
 
48
- ${S.slice(-5).map(v=>`- ${v}`).join(`
48
+ ${v.slice(-5).map(I=>`- ${I}`).join(`
49
49
  `)}
50
50
 
51
51
  </session_history_context>
52
- `}let h=await Hr(),x="";h.length>0&&(x=`
52
+ `}let h=await Vr(),w="";h.length>0&&(w=`
53
53
  <netlify_features_context>
54
54
  If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
55
55
  DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
@@ -59,30 +59,30 @@ ${v.response}
59
59
 
60
60
  Refer to these files when working with specific Netlify features.
61
61
  </netlify_features_context>
62
- `);let I=`
62
+ `);let T=`
63
63
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
64
64
 
65
65
  <request>
66
66
  <user_request>
67
67
  ${r.prompt}
68
68
  </user_request>
69
- ${c}
69
+ ${u}
70
70
  </request>
71
71
 
72
72
  <requirements>
73
73
  <responses>
74
74
  - Do not speak in first person. You may speak as "the agent".
75
- - When work is complete, write a changes summary in ${a}/${Q} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
76
- - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${Q} file.
75
+ - When work is complete, write a changes summary in ${a}/${te} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
76
+ - If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${te} file.
77
77
  - Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
78
78
  - NEVER look into the \`.git\` folder
79
79
  - NEVER print potentially sensitive values (like secrets) in the planning output or results
80
80
  </responses>
81
81
  <attachements>
82
- - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${$e} folder
83
- - move assets from ${a}/${$e} folder to the project assets folder if they are referenced in a code or applied changes
82
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${ke} folder
83
+ - move assets from ${a}/${ke} folder to the project assets folder if they are referenced in a code or applied changes
84
84
  </attachements>
85
- ${l}
85
+ ${c}
86
86
  </requirements>
87
87
 
88
88
  <extra_context>
@@ -99,36 +99,36 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
99
99
  - 'netlify-cli' npm package is already available as a global package. Don't try to install it again
100
100
  - If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
101
101
  </environment>
102
- ${x}
102
+ ${w}
103
103
  <docs>
104
104
  - Netlify Docs: https://docs.netlify.com
105
105
  - LLM Resources Index: https://docs.netlify.com/llms.txt
106
106
  </docs>
107
107
  </extra_context>
108
108
 
109
- ${d}
110
- `;return await H.writeFile(i,I,"utf-8"),$.log(`Generated agent context document at: ${i}`),I.length>5e5&&(I=`
109
+ ${f}
110
+ `;return await q.writeFile(i,T,"utf-8"),$.log(`Generated agent context document at: ${i}`),T.length>5e5&&(T=`
111
111
  You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
112
112
 
113
113
  <request>
114
114
  <user_request>
115
115
  ${r.prompt}
116
116
  </user_request>
117
- ${c}
117
+ ${u}
118
118
  </request>
119
119
 
120
120
  Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
121
- `),I};var qr=E("prompt"),Ft=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Ot({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&qr.log("Contextful Prompt:",n),{prompt:n}};var Te=E("inference_stage"),$t=5,Ie=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:u,attempt:i,contextPrefix:m,priorAgentSessionId:c}=e;Te.log(`Running inference stage, attempt ${i} of ${$t}`);let l=await b(Be(),"inference-stage",async d=>{d?.setAttributes({"inference.attempt":i||1}),it();let{prompt:h}=await b(Be(),"compose-prompt",async()=>await Ft({cliPath:t,config:r,buildErrorContext:Kr(n),netlify:o})),x=`
121
+ `),T};var Jr=_("prompt"),$t=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Ft({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&Jr.log("Contextful Prompt:",n),{prompt:n}};var Ie=_("inference_stage"),kt=5,ve=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i,contextPrefix:m,priorAgentSessionId:u}=e;Ie.log(`Running inference stage, attempt ${i} of ${kt}`);let c=await N(He(),"inference-stage",async f=>{f?.setAttributes({"inference.attempt":i||1}),it();let{prompt:h}=await N(He(),"compose-prompt",async()=>await $t({cliPath:t,config:r,buildErrorContext:Xr(n),netlify:o})),w=`
122
122
  ${m||""}
123
123
  ${h}
124
- `.trim(),I={...r,prompt:x},g=await b(Be(),`run-${r.runner}`,async()=>await s({aiGateway:u,config:I,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:c}));return g.result&&(g.result=j(g.result)),g.error&&(g.error=j(g.error)),await a.flush(),g});if(l.error){if(Te.error("Runner failed",{stepsCount:l.steps.length,duration:l.duration,error:l.error,isRetryableError:l.isRetryableError,attempt:i||1,agentSessionId:l.agentSessionId}),l.isRetryableError&&(!i||i<$t))return Te.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await Ie({...e,attempt:(i||1)+1,priorAgentSessionId:l.agentSessionId,contextPrefix:l.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Te.log("Do not retry inference stage"),new Error(l.error)}return{runnerResult:l}},Kr=e=>!e||e.length===0?"":`
124
+ `.trim(),T={...r,prompt:w},g=await N(He(),`run-${r.runner}`,async()=>await s({aiGateway:l,config:T,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:u}));return g.result&&(g.result=Y(g.result)),g.error&&(g.error=Y(g.error)),await a.flush(),g});if(c.error){if(Ie.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:i||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!i||i<kt))return Ie.log("Retrying inference stage"),await new Promise(h=>setTimeout(h,5e3)),{runnerResult:(await ve({...e,attempt:(i||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Ie.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},Xr=e=>!e||e.length===0?"":`
125
125
  Deploy failed failed. Here are the errors to review on the latest build:
126
126
 
127
127
  Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
128
128
 
129
129
  ${e.pop()}
130
- `;import Jr from"process";import{getTracer as He}from"@netlify/otel";import{getTracer as Wr}from"@netlify/otel";var ce=E("deploy"),Dt=async e=>await b(Wr(),"create-preview-deploy",async t=>Vr(e,t)),Vr=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},u)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(ce.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(ce.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let m=s||"netlify";ce.log(`Running: ${m} ${i.join(" ")}`),u?.setAttributes({cmd:m,args:i});let c=await e.utils.run(m,i,{stdio:["ignore","pipe","pipe"]}),l=JSON.parse(c.stdout.trim());u?.setAttributes({success:!0,deployId:l.deploy_id,deployUrl:l.deploy_url,siteId:l.site_id}),ce.log(`
131
- Preview deploy created successfully:`,{deployId:l.deploy_id,deployUrl:l.deploy_url,siteId:l.site_id});let d={deployId:l.deploy_id,previewUrl:l.deploy_url,logsUrl:l.logs,siteId:l.site_id};return t||(d.sourceZipFilename=l.source_zip_filename),d}catch(i){throw ce.error("Failed to create preview deploy via CLI:",i),u?.setAttributes({success:!1,error:i.message}),i}};var pe=E("deploy_stage"),qe=async e=>await b(He(),"run-deploy-stage",async()=>Xr(e)),Xr=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await b(He(),"get-runner-diffs",async()=>await ht({config:t,netlify:r}));if(pe.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:u,diffBinary:i,resultDiffBinary:m}=s,c=!0;pe.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let l=null;if(o!==void 0&&c)try{let d;try{let h=await b(He(),"get-runner-session",async()=>await tt(t.id,t.sessionId));h?.title&&(d=h.title)}catch(h){pe.warn("Failed to fetch session title, using fallback message:",h.message)}await G(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),l=await Dt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:d,skipBuild:!1,deploySubdomain:ft(t.id,Jr.env.SITE_NAME),filter:n})}catch(d){return pe.warn("Failed to create preview deploy (continuing with agent run):",d),{diff:a,resultDiff:u,hasChanges:c,previewInfo:null,diffBinary:i,resultDiffBinary:m,deployError:d instanceof Error?d.message:String(d)}}return pe.log("Git status",{hasDiff:!!a,hasChanges:c}),{diff:a,resultDiff:u,hasChanges:c,previewInfo:l,diffBinary:i,resultDiffBinary:m}};import{getTracer as Re}from"@netlify/otel";async function Lt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(u){if(s=u,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var xe=class{scanDiffForForms(t){let r=[],o=null,n=[],s=t.split(`
132
- `);for(let a of s)if(a.startsWith("diff --git")){if(o&&n.length>0){let i=this.containsNetlifyForm(n,o);i&&r.push(i)}let u=a.split(" ");o=u[u.length-1].replace(/^b\//,""),n=[]}else a.startsWith("+")&&!a.startsWith("+++")&&n.push(a.slice(1));if(o&&n.length>0){let a=this.containsNetlifyForm(n,o);a&&r.push(a)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
133
- `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:a}of n){let u=o.match(s);if(u){let i=u.index||0,m=Math.max(0,i-20),c=Math.min(o.length,i+u[0].length+20),l=o.slice(m,c).trim();return l=l.replace(/\s+/g," "),l.length>100&&(l=l.slice(0,97)+"..."),{file:r,snippet:`[${a}] ${l}`}}}return null}};var C=E("cleanup_stage"),kt=async e=>await b(Re(),"cleanup-stage",async()=>zr(e)),Ke=1024*1024*10,zr=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:u})=>{let i={result:r||"Done",duration:o};u&&u.deployId&&(i.deploy_id=u.deployId),u&&u.sourceZipFilename&&(i.result_zip_file_name=u.sourceZipFilename);let m=t||s||n||a;if(m){let c=new xe,l=t||s||"",d=c.scanDiffForForms(l);d.detected?(C.log("Detected Netlify form(s) in diff:"),d.matches.forEach(({file:h,snippet:x})=>{C.log(` - ${h}: ${x}`)}),i.has_netlify_form=!0):C.log("Did not detect Netlify form(s) in diff"),C.log("Did not detect Netlify form(s) in diff")}if(m)try{C.log("Getting pre-signed URLs for diff upload");let c=await nt(e.id,e.sessionId),l=[];(t||s)&&l.push(Ne(c.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=c.result.s3_key,C.log("Successfully uploaded result_diff to S3")})),(n||a)&&l.push(Ne(c.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=c.cumulative.s3_key,C.log("Successfully uploaded cumulative_diff to S3")})),C.log(`Uploading ${l.length} diff(s) to S3 in parallel`),await Promise.all(l),(n||a)&&(C.log("Updating agent runner with cumulative diff S3 key"),await b(Re(),"update-runner",async()=>{await le(e.id,{result_diff_s3_key:c.cumulative.s3_key})}))}catch(c){C.error("S3 upload failed, falling back to inline diffs:",c);let l=Buffer.byteLength(t||s||""),d=Buffer.byteLength(a||n||"");if(l>Ke||d>Ke){let h=`Diffs exceed maximum inline size of ${Ke} bytes.`;throw C.error(h),new Error(h)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,C.log("Updating agent runner with inline diffs (fallback)"),await b(Re(),"update-runner",async()=>{await le(e.id,{result_diff:n,result_diff_binary:a})}))}else C.log("No diffs to upload");return C.log("Updated agent runner with result"),await Lt(async()=>await b(Re(),"update-runner-session",()=>G(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(c,l)=>{C.error(`Error updating agent runner session (attempt ${c}):`,l),C.log("Retrying...")}}),C.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Ut,shutdownTracers as Qr,withActiveSpan as Mt}from"@netlify/otel";var en=Zr(import.meta.url),Gt=en("../package.json"),jt=E("pipeline_index"),ve=3,Ds=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let u,{withStageTimer:i}=at(q.timeUnits.hours(4)),m=await ze(Gt.version,e.id,a);try{await Mt(Ut(),"run-pipeline",{},m,async()=>{let c,{aiGateway:l,context:d,persistSteps:h,runner:x,sha:I}=await i("init",()=>Pt({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:Gt.version}),q.timeUnits.minutes(10));u=x.clean,e.sha=I;let{runnerResult:g}=await i("inference",()=>Ie({cliPath:r,config:e,context:d,runner:x.runner,persistSteps:h,aiGateway:l}));await G(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let S=await i("deploy",()=>qe({cliPath:r,config:e,context:d,result:g.result,filter:s})),v=g,N=[];if(S.hasChanges&&S.deployError){N.push(Ze(S.deployError));let f=1;for(;f<=ve&&!S.previewInfo;)jt.log(`Deploy attempt had errors. Retrying. ${f}/${ve}`),await Mt(Ut(),"deploy-stage",async T=>{T?.setAttributes({"stage.attempt":f});let{runnerResult:p}=await i(`inference-retry-${f}`,()=>Ie({cliPath:r,config:e,context:d,runner:x.runner,persistSteps:h,aiGateway:l,buildErrors:N,priorAgentSessionId:g.agentSessionId}));v={...p,steps:[...v.steps||[],...p.steps||[]],duration:(v.duration||0)+(p.duration||0)},S=await i(`deploy-retry-${f}`,()=>qe({cliPath:r,config:e,context:d,result:p.result,filter:s})),S.deployError&&N.push(S.deployError),f++});f>ve&&!S.previewInfo&&(c=new Error(`Deploy validation failed after ${ve} attempts`))}let{diff:R,resultDiff:_,previewInfo:A,diffBinary:w,resultDiffBinary:y}=S;if(await i("cleanup",()=>kt({config:e,diff:R,result:v.result,duration:v.duration,resultDiff:_,diffBinary:w,resultDiffBinary:y,previewInfo:A}),q.timeUnits.minutes(10)),c)throw c;process.env.NETLIFY_LOCAL_MODE||(await u?.(),await wt())})}catch(c){jt.error("Got error while running pipeline",c),await u?.();let l=c instanceof Error&&c.message;throw await G(e.id,e.sessionId,{result:l||"Encountered error when running agent",state:"error"}),c}finally{await Qr()}};export{Ds as runPipeline};
130
+ `;import Qr from"process";import{getTracer as qe}from"@netlify/otel";import{getTracer as zr}from"@netlify/otel";var pe=_("deploy"),Dt=async e=>await N(zr(),"create-preview-deploy",async t=>Zr(e,t)),Zr=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},l)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(pe.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(pe.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let m=s||"netlify";pe.log(`Running: ${m} ${i.join(" ")}`),l?.setAttributes({cmd:m,args:i});let u=await e.utils.run(m,i,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(u.stdout.trim());l?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),pe.log(`
131
+ Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let f={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(f.sourceZipFilename=c.source_zip_filename),f}catch(i){throw pe.error("Failed to create preview deploy via CLI:",i),l?.setAttributes({success:!1,error:i.message}),i}};var de=_("deploy_stage"),Ke=async e=>await N(qe(),"run-deploy-stage",async()=>en(e)),en=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await N(qe(),"get-runner-diffs",async()=>await yt({config:t,netlify:r}));if(de.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:m}=s,u=!0;de.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:u,wouldCreatePreview:o!==void 0&&u});let c=null;if(o!==void 0&&u)try{let f;try{let h=await N(qe(),"get-runner-session",async()=>await tt(t.id,t.sessionId));h?.title&&(f=h.title)}catch(h){de.warn("Failed to fetch session title, using fallback message:",h.message)}await G(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),c=await Dt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:f,skipBuild:!1,deploySubdomain:gt(t.id,Qr.env.SITE_NAME),filter:n})}catch(f){return de.warn("Failed to create preview deploy (continuing with agent run):",f),{diff:a,resultDiff:l,hasChanges:u,previewInfo:null,diffBinary:i,resultDiffBinary:m,deployError:f instanceof Error?f.message:String(f)}}return de.log("Git status",{hasDiff:!!a,hasChanges:u}),{diff:a,resultDiff:l,hasChanges:u,previewInfo:c,diffBinary:i,resultDiffBinary:m}};import{getTracer as Se}from"@netlify/otel";async function Lt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var Re=class{scanDiffForForms(t){let r=[],o=null,n=[],s=t.split(`
132
+ `);for(let a of s)if(a.startsWith("diff --git")){if(o&&n.length>0){let i=this.containsNetlifyForm(n,o);i&&r.push(i)}let l=a.split(" ");o=l[l.length-1].replace(/^b\//,""),n=[]}else a.startsWith("+")&&!a.startsWith("+++")&&n.push(a.slice(1));if(o&&n.length>0){let a=this.containsNetlifyForm(n,o);a&&r.push(a)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
133
+ `),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:s,name:a}of n){let l=o.match(s);if(l){let i=l.index||0,m=Math.max(0,i-20),u=Math.min(o.length,i+l[0].length+20),c=o.slice(m,u).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${a}] ${c}`}}}return null}};var C=_("cleanup_stage"),Ut=async e=>await N(Se(),"cleanup-stage",async()=>tn(e)),We=1024*1024*10,tn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result:r||"Done",duration:o};l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename);let m=t||s||n||a;if(m){let u=new Re,c=t||s||"",f=u.scanDiffForForms(c);f.detected?(C.log("Detected Netlify form(s) in diff:"),f.matches.forEach(({file:h,snippet:w})=>{C.log(` - ${h}: ${w}`)}),i.has_netlify_form=!0):C.log("Did not detect Netlify form(s) in diff"),C.log("Did not detect Netlify form(s) in diff")}if(m)try{C.log("Getting pre-signed URLs for diff upload");let u=await nt(e.id,e.sessionId),c=[];(t||s)&&c.push(Ce(u.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=u.result.s3_key,C.log("Successfully uploaded result_diff to S3")})),(n||a)&&c.push(Ce(u.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=u.cumulative.s3_key,C.log("Successfully uploaded cumulative_diff to S3")})),C.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||a)&&(C.log("Updating agent runner with cumulative diff S3 key"),await N(Se(),"update-runner",async()=>{await ce(e.id,{result_diff_s3_key:u.cumulative.s3_key})}))}catch(u){C.error("S3 upload failed, falling back to inline diffs:",u);let c=Buffer.byteLength(t||s||""),f=Buffer.byteLength(a||n||"");if(c>We||f>We){let h=`Diffs exceed maximum inline size of ${We} bytes.`;throw C.error(h),new Error(h)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,C.log("Updating agent runner with inline diffs (fallback)"),await N(Se(),"update-runner",async()=>{await ce(e.id,{result_diff:n,result_diff_binary:a})}))}else C.log("No diffs to upload");return C.log("Updated agent runner with result"),await Lt(async()=>await N(Se(),"update-runner-session",()=>G(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(u,c)=>{C.error(`Error updating agent runner session (attempt ${u}):`,c),C.log("Retrying...")}}),C.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Mt,shutdownTracers as nn,withActiveSpan as jt}from"@netlify/otel";var on=rn(import.meta.url),Gt=on("../package.json"),Yt=_("pipeline_index"),Ae=3,Ys=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let l,{withStageTimer:i}=at(W.timeUnits.hours(4)),m=await ze(Gt.version,e.id,a);try{await jt(Mt(),"run-pipeline",{},m,async()=>{let u,{aiGateway:c,context:f,persistSteps:h,runner:w,sha:T}=await i("init",()=>Ot({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:Gt.version}),W.timeUnits.minutes(10));l=w.clean,e.sha=T;let{runnerResult:g}=await i("inference",()=>ve({cliPath:r,config:e,context:f,runner:w.runner,persistSteps:h,aiGateway:c}));await G(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let v=await i("deploy",()=>Ke({cliPath:r,config:e,context:f,result:g.result,filter:s})),I=g,b=[];if(v.hasChanges&&v.deployError){b.push(Ze(v.deployError));let y=1;for(;y<=Ae&&!v.previewInfo;)Yt.log(`Deploy attempt had errors. Retrying. ${y}/${Ae}`),await jt(Mt(),"deploy-stage",async d=>{d?.setAttributes({"stage.attempt":y});let{runnerResult:p}=await i(`inference-retry-${y}`,()=>ve({cliPath:r,config:e,context:f,runner:w.runner,persistSteps:h,aiGateway:c,buildErrors:b,priorAgentSessionId:g.agentSessionId}));I={...p,steps:[...I.steps||[],...p.steps||[]],duration:(I.duration||0)+(p.duration||0)},v=await i(`deploy-retry-${y}`,()=>Ke({cliPath:r,config:e,context:f,result:p.result,filter:s})),v.deployError&&b.push(v.deployError),y++});y>Ae&&!v.previewInfo&&(u=new Error(`Deploy validation failed after ${Ae} attempts`))}let{diff:x,resultDiff:E,previewInfo:R,diffBinary:S,resultDiffBinary:A}=v;if(await i("cleanup",()=>Ut({config:e,diff:x,result:I.result,duration:I.duration,resultDiff:E,diffBinary:S,resultDiffBinary:A,previewInfo:R}),W.timeUnits.minutes(10)),u)throw u;process.env.NETLIFY_LOCAL_MODE||(await l?.(),await Tt())})}catch(u){Yt.error("Got error while running pipeline",u),await l?.();let c=u instanceof Error&&u.message;throw await G(e.id,e.sessionId,{result:c||"Encountered error when running agent",state:"error"}),u}finally{await nn()}};export{Ys as runPipeline};
134
134
  //# sourceMappingURL=index.js.map
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@netlify/agent-runner-cli",
3
3
  "type": "module",
4
- "version": "1.48.2",
4
+ "version": "1.49.0",
5
5
  "description": "CLI tool for running Netlify agents",
6
6
  "main": "./dist/index.js",
7
7
  "types": "./dist/index.d.ts",
@@ -73,7 +73,7 @@
73
73
  },
74
74
  "dependencies": {
75
75
  "@anthropic-ai/claude-code": "2.0.42",
76
- "@google/gemini-cli": "0.15.4",
76
+ "@google/gemini-cli": "0.16.0",
77
77
  "@netlify/otel": "^5.0.0",
78
78
  "@opentelemetry/exporter-trace-otlp-grpc": "^0.57.0",
79
79
  "execa": "^8.0.0",
@@ -1,87 +0,0 @@
1
- diff --git a/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js b/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js
2
- index 874ec89..6eb9a7d 100644
3
- --- a/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js
4
- +++ b/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js
5
- @@ -5,9 +5,22 @@
6
- */
7
- import { executeToolCall, shutdownTelemetry, isTelemetrySdkInitialized, GeminiEventType, ToolErrorType, } from '@google/gemini-cli-core';
8
- import { parseAndFormatApiError } from './ui/utils/errorParsing.js';
9
- +function writeAsync(data) {
10
- + return new Promise((resolve, reject) => {
11
- + const canWrite = process.stdout.write(data, (err) => {
12
- + if (err) reject(err);
13
- + else resolve();
14
- + });
15
- + if (!canWrite) {
16
- + process.stdout.once('drain', resolve);
17
- + }
18
- + });
19
- +}
20
- export async function runNonInteractive(config, input, prompt_id) {
21
- await config.initialize();
22
- // Handle EPIPE errors when the output is piped to a command that closes early.
23
- + const startTime = Date.now();
24
- + let lastContent = '';
25
- process.stdout.on('error', (err) => {
26
- if (err.code === 'EPIPE') {
27
- // Exit gracefully if the pipe is closed.
28
- @@ -29,15 +42,27 @@ export async function runNonInteractive(config, input, prompt_id) {
29
- }
30
- const functionCalls = [];
31
- const responseStream = geminiClient.sendMessageStream(currentMessages[0]?.parts || [], abortController.signal, prompt_id);
32
- + let contentBuffer = '';
33
- + const flushContentBuffer = async () => {
34
- + if (contentBuffer) {
35
- + await writeAsync(JSON.stringify({ type: 'content', value: contentBuffer }) + '\n');
36
- + lastContent = contentBuffer;
37
- + contentBuffer = '';
38
- + }
39
- + }
40
- for await (const event of responseStream) {
41
- if (abortController.signal.aborted) {
42
- console.error('Operation cancelled.');
43
- return;
44
- }
45
- if (event.type === GeminiEventType.Content) {
46
- - process.stdout.write(event.value);
47
- + contentBuffer += event.value;
48
- + } else {
49
- + await flushContentBuffer();
50
- + await writeAsync(JSON.stringify(event) + '\n');
51
- }
52
- - else if (event.type === GeminiEventType.ToolCallRequest) {
53
- +
54
- + if (event.type === GeminiEventType.ToolCallRequest) {
55
- const toolCallRequest = event.value;
56
- const fc = {
57
- name: toolCallRequest.name,
58
- @@ -47,6 +72,7 @@ export async function runNonInteractive(config, input, prompt_id) {
59
- functionCalls.push(fc);
60
- }
61
- }
62
- + await flushContentBuffer();
63
- if (functionCalls.length > 0) {
64
- const toolResponseParts = [];
65
- for (const fc of functionCalls) {
66
- @@ -59,6 +85,9 @@ export async function runNonInteractive(config, input, prompt_id) {
67
- prompt_id,
68
- };
69
- const toolResponse = await executeToolCall(config, requestInfo, toolRegistry, abortController.signal);
70
- +
71
- + await writeAsync(JSON.stringify({type: 'tool_result', value: toolResponse}) + '\n');
72
- +
73
- if (toolResponse.error) {
74
- console.error(`Error executing tool ${fc.name}: ${toolResponse.resultDisplay || toolResponse.error.message}`);
75
- if (toolResponse.errorType === ToolErrorType.UNHANDLED_EXCEPTION)
76
- @@ -81,7 +110,10 @@ export async function runNonInteractive(config, input, prompt_id) {
77
- currentMessages = [{ role: 'user', parts: toolResponseParts }];
78
- }
79
- else {
80
- - process.stdout.write('\n'); // Ensure a final newline
81
- + const endTime = Date.now();
82
- + const duration = endTime - startTime;
83
- + const result = { type: 'result', duration_ms: duration, value: lastContent };
84
- + await writeAsync(JSON.stringify(result) + '\n');
85
- return;
86
- }
87
- }