@netlify/agent-runner-cli 1.46.0 → 1.47.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin-local.js +44 -40
- package/dist/bin.js +41 -37
- package/dist/index.js +41 -37
- package/package.json +4 -4
package/dist/index.js
CHANGED
|
@@ -1,35 +1,39 @@
|
|
|
1
|
-
import{createRequire as
|
|
2
|
-
`),r=[],o=-1,n=0;for(;n<t.length;){let
|
|
3
|
-
`)),o=
|
|
1
|
+
import{createRequire as Br}from"module";import{createTracerProvider as Ut}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as Ke}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as Mt}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as Gt}from"@netlify/otel";import{propagation as qe,context as We,W3CTraceContextPropagator as jt}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as Yt}from"@opentelemetry/exporter-trace-otlp-grpc";function w(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var ve=w("tracing"),Ve=async(e,t,r)=>(await Ut({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new Ke(new Se),new Ke(new Yt({url:r.exporterUrl}))],instrumentations:[new Mt({skipHeaders:!0})]}),r.traceparent?(qe.setGlobalPropagator(new jt),qe.extract(We.active(),{traceparent:r.traceparent,isRemote:!0})):We.active());function A(e,t,r){return ve.log(`\u23F3 TRACE: ${t} starting...`),Gt(e,t,r)}var Se=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[c,i]of Object.entries(o))c.includes("duration")&&typeof i=="number"?n.push(`${c}=${i.toFixed(2)}ms`):n.push(`${c}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";ve.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&ve.log(` \u274C Error: ${t.status.message}`)}};var Bt=["error","failed","exception","fatal","panic","abort","crash"];function Je(e){let t=e.split(`
|
|
2
|
+
`),r=[],o=-1,n=0;for(;n<t.length;){let c=t[n].slice(0,500).toLowerCase();if(Bt.some(h=>c.includes(h))){let h=Math.max(0,n-10,o+1),l=Math.min(t.length-1,n+20),d=[];for(let g=h;g<=l;g++)d.push(t[g]);r.push(d.join(`
|
|
3
|
+
`)),o=l,n=l+1}else n++}if(r.length===0)return e;let s=r.map((a,c)=>`<extracted_error_chunk order="${c+1}">
|
|
4
4
|
${a}
|
|
5
5
|
</extracted_error_chunk>`).join(`
|
|
6
6
|
|
|
7
|
-
`);return s.length>e.length*.8?e:s}import ye from"process";import{getTracer as Ir}from"@netlify/otel";import se from"process";var ue=se.env.NETLIFY_API_URL,ce=se.env.NETLIFY_API_TOKEN,j=w("api"),Re=()=>se.env.NETLIFY_LOCAL_MODE==="true",pe=async(e,t={})=>{if(!ue||!ce)throw new Error("No API URL or token");let r=new URL(e,ue),o={...t,headers:{...t.headers,Authorization:`Bearer ${ce}`}};se.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(se.env.AGENT_RUNNERS_DEBUG==="true")j.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{j.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");j.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||j.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},Ke=e=>{j.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ue=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(ce=e.constants.NETLIFY_API_TOKEN)},Ve=()=>({apiUrl:ue,token:ce}),de=async(e,t)=>Re()?(j.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):pe(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),k=async(e,t,r)=>Re()?(j.log("Mock API: updateRunnerSession called",{runnerId:e,sessionId:t,data:r}),{id:e,sessionId:t,...r}):pe(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var We=async(e,t)=>Re()?(j.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):pe(`/api/v1/agent_runners/${e}/sessions/${t}`),Je=(e,t,r)=>pe(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}});var W=w("ai_gateway"),ve=null;var Xe=async()=>{if(ve)return ve;W.log("Fetching available AI gateway providers");let e=await fetch(`${Ve().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return ve=t,W.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},kt=async(e,t)=>{let o=(await Xe()).providers[e];if(!o)return W.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return W.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},ze=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),W.log("Requesting AI gateway information");let i=await Je(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,W.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let m=o-Date.now()-6e4;m>0&&(n=setTimeout(()=>{l()},m))}};return await Promise.all([l(),Xe()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:kt}};import M from"process";import fe from"path";import Ae from"fs";import{fileURLToPath as Yt}from"url";import{execa as Ht,execaCommand as gn}from"execa";import{Transform as Ut}from"stream";var Mt=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Gt=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function jt(){return Object.entries(process.env).filter(([e,t])=>!(!t||Mt.has(e)||Gt.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function U(e){if(typeof e!="string")return e;let t=jt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Bt(o),"g");r=r.replace(n,"******")}),r}function Bt(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var J=class extends Ut{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=U(n);o(null,s)}};function Ze(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?U(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?U(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var ie=null,Qe=e=>(ie&&ie.destroy(),ie=new Y({totalAllowedTime:e}),ie),et=()=>ie;var Y=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((l,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var qt=Yt(import.meta.url),Kt=fe.dirname(qt),ge=w("shell"),Ne=new Set,Vt={preferLocal:!0},tt=(e,t,r)=>{let[o,n]=Wt(t,r),s={...Vt,...n},a=Ht(e,o,s);return Jt(a,s),zt(a),a};var Wt=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Jt=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(M.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new J).pipe(M.stdout),e.stdout?.pipe(new J).pipe(M.stdout),e.stderr?.pipe(new J).pipe(M.stderr);return}e.stdout?.pipe(M.stdout),e.stderr?.pipe(M.stderr)},rt=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(M.kill(-e.pid,t),ge.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ge.error("Error killing process:",r),!1}},Xt=e=>rt(e,"SIGKILL"),zt=e=>{Ne.add(e);let t=et();if(t){let r=t.onTimesUp(()=>{ge.log(`Global timer expired, killing process ${e.pid}`),rt(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ge.log(`Force killing process ${e.pid} after timeout`),Xt(e))},5e3)});e.on("exit",()=>{Ne.delete(e),r()}),e.on("error",()=>{Ne.delete(e),r()})}};function X(e,t){if(M.env.NODE_PATH){let n=fe.join(M.env.NODE_PATH,".bin",t);if(Ae.existsSync(n))return n}let r=fe.join(e,"node_modules",".bin",t);if(Ae.existsSync(r))return r;let o=fe.join(Kt,"..","node_modules",".bin",t);if(Ae.existsSync(o))return o}var nt="netlify-agent-runner-context.md",Se="task-history",be="netlify-context",D=".netlify",z="results.md",Ce="assets";var Zt=w("utils"),Qt=e=>new Promise(t=>{setTimeout(t,e)}),ot=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(u=>{n.push(u)});r=!0;let i,m=new Promise(u=>{i=u});return s=(async()=>{await Promise.resolve();let u=await e(...l);for(i(u);;){if(await Qt(t),!o)return r=!1,s=null,u;let c=o,f=n;o=null,n=[],u=await e(...c),f.forEach(E=>{E(u)})}})(),m};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},me=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},st=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Zt.error("Could not parse JSON",o))}},it=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let m=Math.min(i-s.length,e.length);return`${s}${e.slice(0,m)}`}return e.slice(0,i)};import{Buffer as at}from"buffer";import er from"path";var lt=async({config:e,netlify:t})=>{let r=await rr(t),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};let s=await nr(n);await t.utils.run("git",["add",".",...s]);let a={stdio:["ignore","pipe","pipe"]},i=(await t.utils.run("git",["diff","--staged"],a)).stdout;if(o=!!i,!o)return{hasChanges:!1,ignored:s};let u=(await t.utils.run("git",["diff","--staged","--binary"],a)).stdout,c,f;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await t.utils.run("git",["commit","-m","Agent runner"]),c=(await t.utils.run("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await t.utils.run("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;c!==g&&(f=at.from(g).toString("base64"))}let E={hasChanges:!0,diff:i,resultDiff:c,ignored:s};return i!==u&&(E.diffBinary=at.from(u).toString("base64")),f&&(E.resultDiffBinary=f),E},tr=["?? mise.toml","?? deno.lock",/\?\? .+?\.log/],rr=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
|
|
8
|
-
`).filter(n=>!
|
|
9
|
-
`).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${
|
|
10
|
-
|
|
11
|
-
`)}
|
|
12
|
-
${
|
|
13
|
-
\`\`\``),_({title:oe,message:
|
|
14
|
-
`):
|
|
15
|
-
|
|
7
|
+
`);return s.length>e.length*.8?e:s}import _e from"process";import{getTracer as Sr}from"@netlify/otel";import se from"process";var de=se.env.NETLIFY_API_URL,pe=se.env.NETLIFY_API_TOKEN,M=w("api"),fe=()=>se.env.NETLIFY_LOCAL_MODE==="true",ie=async(e,t={})=>{if(!de||!pe)throw new Error("No API URL or token");let r=new URL(e,de),o={...t,headers:{...t.headers,Authorization:`Bearer ${pe}`}};se.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(se.env.AGENT_RUNNERS_DEBUG==="true")M.log(`Response headers for ${r}:`),n.headers.forEach((c,i)=>{M.log(` ${i}: ${c}`)});else{let c=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");M.log(`Request ID for ${r}: ${c||"N/A"}`)}if(s||M.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},Xe=e=>{M.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(de=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(pe=e.constants.NETLIFY_API_TOKEN)},ze=()=>({apiUrl:de,token:pe}),ae=async(e,t)=>fe()?(M.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ie(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),G=async(e,t,r)=>fe()?(M.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ie(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var Ze=async(e,t)=>fe()?(M.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ie(`/api/v1/agent_runners/${e}/sessions/${t}`),Qe=(e,t,r)=>ie(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),et=async(e,t)=>fe()?(M.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ie(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ae=async(e,t)=>{M.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var z=w("ai_gateway"),Ne=null;var tt=async()=>{if(Ne)return Ne;z.log("Fetching available AI gateway providers");let e=await fetch(`${ze().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Ne=t,z.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},Ht=async(e,t)=>{let o=(await tt()).providers[e];if(!o)return z.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return z.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},rt=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let c=async()=>{clearTimeout(n),z.log("Requesting AI gateway information");let i=await Qe(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,z.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let h=o-Date.now()-6e4;h>0&&(n=setTimeout(()=>{c()},h))}};return await Promise.all([c(),tt()]),{get url(){return s},get token(){return r},isModelAvailableForProvider:Ht}};import Y from"process";import ge from"path";import be from"fs";import{fileURLToPath as Xt}from"url";import{execa as zt,execaCommand as En}from"execa";import{Transform as Kt}from"stream";var qt=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","ERROR_LOGS_PATH","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),Wt=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function Vt(){return Object.entries(process.env).filter(([e,t])=>!(!t||qt.has(e)||Wt.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function j(e){if(typeof e!="string")return e;let t=Vt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Jt(o),"g");r=r.replace(n,"******")}),r}function Jt(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var Z=class extends Kt{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=j(n);o(null,s)}};function nt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?j(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?j(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var le=null,ot=e=>(le&&le.destroy(),le=new q({totalAllowedTime:e}),le),st=()=>le;var q=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),s=null,a=null;o!==void 0&&(a=new Promise((c,i)=>{s=setTimeout(()=>{i(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return a?await Promise.race([r(),a]):await r()}finally{n(),s&&clearTimeout(s)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var Zt=Xt(import.meta.url),Qt=ge.dirname(Zt),me=w("shell"),Ce=new Set,er={preferLocal:!0},it=(e,t,r)=>{let[o,n]=tr(t,r),s={...er,...n},a=zt(e,o,s);return rr(a,s),or(a),a};var tr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},rr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(Y.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new Z).pipe(Y.stdout),e.stdout?.pipe(new Z).pipe(Y.stdout),e.stderr?.pipe(new Z).pipe(Y.stderr);return}e.stdout?.pipe(Y.stdout),e.stderr?.pipe(Y.stderr)},at=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(Y.kill(-e.pid,t),me.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return me.error("Error killing process:",r),!1}},nr=e=>at(e,"SIGKILL"),or=e=>{Ce.add(e);let t=st();if(t){let r=t.onTimesUp(()=>{me.log(`Global timer expired, killing process ${e.pid}`),at(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(me.log(`Force killing process ${e.pid} after timeout`),nr(e))},5e3)});e.on("exit",()=>{Ce.delete(e),r()}),e.on("error",()=>{Ce.delete(e),r()})}};function W(e,t){if(Y.env.NODE_PATH){let n=ge.join(Y.env.NODE_PATH,".bin",t);if(be.existsSync(n))return n}let r=ge.join(e,"node_modules",".bin",t);if(be.existsSync(r))return r;let o=ge.join(Qt,"..","node_modules",".bin",t);if(be.existsSync(o))return o}var lt="netlify-agent-runner-context.md",Pe="task-history",Oe="netlify-context",F=".netlify",Q="results.md",$e="assets";var sr=w("utils"),ir=e=>new Promise(t=>{setTimeout(t,e)}),ut=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...c)=>{if(r)return o=c,new Promise(l=>{n.push(l)});r=!0;let i,h=new Promise(l=>{i=l});return s=(async()=>{await Promise.resolve();let l=await e(...c);for(i(l);;){if(await ir(t),!o)return r=!1,s=null,l;let d=o,g=n;o=null,n=[],l=await e(...d),g.forEach(E=>{E(l)})}})(),h};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},he=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...c){n=c,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let c=n,i=s;o=null,n=null,s=null,e.apply(i,c)}},a},ct=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):sr.error("Could not parse JSON",o))}},dt=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let c=`--${t}${n}`;if(c.length>55)return"";let i=60-c.length;if(i<=0)return"";if(i>=s.length+6){let h=Math.min(i-s.length,e.length);return`${s}${e.slice(0,h)}`}return e.slice(0,i)};import{Buffer as pt}from"buffer";import ar from"path";var ft=async({config:e,netlify:t})=>{let r=await ur(t),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};let s=await cr(n);await t.utils.run("git",["add",".",...s]);let a={stdio:["ignore","pipe","pipe"]},i=(await t.utils.run("git",["diff","--staged"],a)).stdout;if(o=!!i,!o)return{hasChanges:!1,ignored:s};let l=(await t.utils.run("git",["diff","--staged","--binary"],a)).stdout,d,g;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await t.utils.run("git",["commit","-m","Agent runner"]),d=(await t.utils.run("git",["diff",e.sha,"HEAD"],a)).stdout;let m=(await t.utils.run("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;d!==m&&(g=pt.from(m).toString("base64"))}let E={hasChanges:!0,diff:i,resultDiff:d,ignored:s};return i!==l&&(E.diffBinary=pt.from(l).toString("base64")),g&&(E.resultDiffBinary=g),E},lr=["?? mise.toml","?? deno.lock","?? .npmrc",/\?\? .+?\.log/],ur=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
|
|
8
|
+
`).filter(n=>!lr.some(s=>s instanceof RegExp?s.test(n):n===s))).length!==0,status:t.stdout}};var gt=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},mt=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},cr=async e=>{let t=[".netlify","mise.toml","deno.lock",".npmrc","node_modules"],r=[];return e.split(`
|
|
9
|
+
`).forEach(o=>{t.forEach(s=>{[`?? ${s}`,`?? ${s}${ar.sep}`].some(c=>o.startsWith(c))&&r.push(`:!${s}`)});let n=o.match(/\?\? (.+?)\.log$/)?.[1];n&&r.push(`:!${n}.log`)}),r};import pr from"fs/promises";import fr from"os";import Et from"path";import V from"process";import gr from"readline";import De from"path";import dr from"fs/promises";var Fe=w("agent-output-utils");async function ee({initialResult:e,agentName:t,hasError:r}){let o="",n=De.join(process.cwd(),F,Q);try{let s=await dr.readFile(n,"utf-8");s&&(o=s,Fe.log(`Pulled result from ${De.relative(process.cwd(),n)}`))}catch{Fe.log(`No results file found at ${De.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function te({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Fe.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function re(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var k=w("runner_claude"),ht="Claude Code",ye="claude-sonnet-4-5-20250929",yt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,mr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(k.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(k.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(k.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ke({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:s}){let a=e,{accountType:c,prompt:i,modelVersionOverrides:h}=a,{model:l}=a,d="";if(o){let{token:u,url:y}=o;if(!u||!y)throw new Error("No token or url provided from AI Gateway");if(h?.claude){let p=h?.claude?.[c];if(p){if(!await o.isModelAvailableForProvider("anthropic",p))throw new Error(`Model override '${p}' is not available for anthropic provider`);l=p}}else if(l){if(!await o.isModelAvailableForProvider("anthropic",l))throw new Error(`Model '${l}' is not available for anthropic provider`)}else await o.isModelAvailableForProvider("anthropic",ye)?(l=ye,k.log(`Using default model: ${ye}`)):k.log(`Default model ${ye} is not available, proceeding without model specification`);V.env.ANTHROPIC_API_KEY=u,V.env.ANTHROPIC_BASE_URL=y}else if(!V.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let g=[],E=[],I={},T=0,m=0,R,v,N=[W(V.cwd(),"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...l?["--model",l]:[],...n?["--continue"]:[],...n&&s?["--resume",s]:[],"-p",i],C=`${V.env.NVM_BIN}/node`;k.log(`Running ${C} ${N.join(" ")}`);let b=t.utils.run(C,N,{all:!0,env:V.env});b.stdin?.end();let S=he(()=>{r?.({steps:g,duration:m})},250),_=(u,y)=>{let p={...u,id:T};T+=1,E.push(p),g.push(p),y||S.flush(),S(),y&&S.flush()},f=gr.createInterface({input:b.all});return f.on("error",u=>{k.error("Readline interface error",{error:u.message,stack:u.stack})}),f.on("line",u=>{let y=null;try{y=JSON.parse(u)}catch{k.log("Could not parse line",u)}y?.session_id&&y.session_id!==d&&(d=y.session_id),Array.isArray(y?.message?.content)?y.message.content.forEach(p=>{switch(p.type){case"text":{p.text&&_({message:p.text});break}case"image":{typeof p.source=="object"&&p.source&&p.source.type==="base64"&&p.source.media_type?_({message:``}):k.log(`Unsupported image type ${p.source?.type}`,p.source);break}case"tool_use":{if(p.name==="Task"){let x=p.input?.description&&`\`${p.input.description}\``;_({title:[yt(p.name),x].filter(Boolean).join(" ")})}else p.id&&(I[p.id]=p);S.flush();break}case"tool_result":{let x=p.tool_use_id?I[p.tool_use_id]:void 0,oe;if(x){let X=x.input?.file_path&&Et.relative(V.cwd(),x.input.file_path),P=X&&`\`${X}\``;oe=[yt(x.name||""),P].filter(Boolean).join(" ")}let He=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(x?.name||""),H;if(typeof p.content=="string")H=p.content;else if(Array.isArray(p.content)){let X=[];p.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?X.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?X.push(``):k.log(`Unsupported image type ${P.source.type}`,P.source):k.log(`Unsupported block type ${P?.type}`)}),H=X.join(`
|
|
10
|
+
|
|
11
|
+
`)}He&&H&&(H=`\`\`\`
|
|
12
|
+
${H.trim()}
|
|
13
|
+
\`\`\``),_({title:oe,message:H},!0);break}case"thinking":{p.thinking&&_({title:"Thinking",message:p.thinking},!0);break}default:k.log(`Message content type is not supported ${p.type}`,p)}}):y?.type==="result"&&(m=y.duration_ms||0,y.is_error?v=y.result:R=y.result,[E,g].forEach(p=>{p[p.length-1]?.message===R&&p.pop()}))}),await b.catch(u=>{({error:v,result:R}=mr({catchError:u,runCmd:b,error:v,result:R,runnerName:"Claude"}))}),f.close(),S.flush(),{steps:E,duration:m,result:await ee({initialResult:R,agentName:ht,hasError:!!v}),error:te({error:v,agentName:ht}),isRetryableError:re(v),agentSessionId:d}}var _t=async()=>{let e=Et.join(fr.homedir(),".claude");await pr.rm(e,{recursive:!0,force:!0})};import Ee from"fs/promises";import Tt from"os";import Le from"path";import B from"process";import hr from"readline";var L=w("runner_codex"),wt="Codex CLI",yr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(L.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ue({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:c}=e,{model:i}=e;if(n){let{token:f,url:u}=n;if(!f||!u)throw new Error("No token or url provided from AI Gateway");if(c?.codex){let y=c?.codex?.[s];if(y){if(!await n.isModelAvailableForProvider("openai",y))throw new Error(`Model override '${y}' is not available for openai provider`);i=y}}else if(i&&!await n.isModelAvailableForProvider("openai",i))throw new Error(`Model '${i}' is not available for openai provider`);B.env.OPENAI_API_KEY=f,B.env.OPENAI_BASE_URL=u}else if(!B.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let h=[],l=[],d={},g=0,E=0,I,T,m=`${B.env.NVM_BIN}/node`,R=Le.join(Tt.homedir(),".codex"),v=Le.join(R,"config.toml");try{await Ee.mkdir(R,{recursive:!0});let f="";try{f=await Ee.readFile(v,"utf-8")}catch{}f.includes("web_search")||(f.includes("[tools]")?f=f.replace(/\[tools\]/,`[tools]
|
|
14
|
+
web_search = true`):f+=`
|
|
15
|
+
[tools]
|
|
16
|
+
web_search = true
|
|
17
|
+
`,await Ee.writeFile(v,f,"utf-8"),L.log("Updated Codex config with web_search enabled"))}catch(f){L.warn("Failed to update Codex config",{error:f.message})}let N=[W(B.cwd(),"codex"),"login","--with-api-key"];L.log(`Running ${m} ${N.join(" ")}`);let C=t.utils.run(m,N,{input:B.env.OPENAI_API_KEY,env:{...B.env}});try{await C,L.log("Successfully logged in to Codex")}catch(f){throw L.error("Failed to login to Codex",{error:f.message}),new Error(`Codex login failed: ${f.message}`)}let b=[W(B.cwd(),"codex"),"exec","--yolo","--json","--config","web_search=true",...i?["--model",i]:[],a].filter(Boolean);L.log(`Running ${m} ${b.join(" ")}`);let S=t.utils.run(m,b,{all:!0,env:{...B.env}}),_=hr.createInterface({input:S.all});return _.on("error",f=>{L.error("Readline interface error",{error:f.message,stack:f.stack})}),_.on("line",f=>{let u=null;try{u=JSON.parse(f)}catch{L.log("Could not parse line",f);return}let y=[],p=!1;if(u?.duration_ms&&(E=u.duration_ms,p=!0),u?.type==="local_shell_call")d[u.call_id]=u;else if(u?.type==="local_shell_call_output"){let x=_r(d[u.call_id],u);x&&(x.id=g,g+=1,l.push(x),h.push(x),y.push(x),p=!0)}else u?.type==="message"&&u.role==="assistant"?I=u.content.map(x=>x.text).join(`
|
|
18
|
+
`):u?.type==="message"&&u.role==="system"&&(T=u.content.map(x=>x.text).join(`
|
|
19
|
+
`));p&&(r?.({steps:h,duration:E}),o?.({steps:y,duration:E}))}),await S.catch(f=>{let u=yr({catchError:f,runCmd:S,error:T,result:I,runnerName:"Codex"});T=u.error,I=u.result}),_.close(),{steps:l,duration:E,result:await ee({initialResult:I,agentName:wt,hasError:!!T}),error:te({error:T,agentName:wt}),isRetryableError:re(T)}}var It=async()=>{let e=Le.join(Tt.homedir(),".codex");await Ee.rm(e,{recursive:!0,force:!0})},Er=new Set(["bash","-lc"]),_r=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Er.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
|
|
16
20
|
${n.trim()}
|
|
17
|
-
\`\`\``)}catch(s){
|
|
21
|
+
\`\`\``)}catch(s){L.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import wr from"fs/promises";import Tr from"os";import Rt from"path";import J from"process";import Ir from"readline";var ne=w("runner_gemini"),xt="Gemini CLI",xr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ne.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ne.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ne.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Rr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Me({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:c}=e,{model:i}=e;if(n){let{token:_,url:f}=n;if(!_||!f)throw new Error("No token or url provided from AI Gateway");if(c?.gemini){let u=c?.gemini?.[s];if(u){if(!await n.isModelAvailableForProvider("gemini",u))throw new Error(`Model override '${u}' is not available for gemini provider`);i=u}}else if(i&&!await n.isModelAvailableForProvider("gemini",i))throw new Error(`Model '${i}' is not available for gemini provider`);J.env.GEMINI_API_KEY=_,J.env.GOOGLE_GEMINI_BASE_URL=f}else if(!J.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let h=[],l=[],d=[],g={},E=0,I=0,T,m,R=[W(J.cwd(),"gemini"),...i?["--model",i]:[],"--yolo","-p",a],v=`${J.env.NVM_BIN}/node`;ne.log(`Running ${v} ${R.join(" ")}`);let N=t.utils.run(v,R,{all:!0,env:J.env});N.stdin?.end();let C=he(()=>{r?.({steps:h,duration:I}),o?.({steps:l,duration:I}),l=[]},250),b=(_,f)=>{_.id=E,E+=1,d.push(_),h.push(_),l.push(_),f||C.flush(),C(),f&&C.flush()},S=Ir.createInterface({input:N.all});return S.on("error",_=>{ne.error("Readline interface error",{error:_.message,stack:_.stack})}),S.on("line",_=>{let f=null;try{if(_.startsWith("[API Error")){let u=_.match(/\[api error: (.+?)]$/i)?.[1];f={type:"error",value:ct(u,!1)?.error?.message||u||"Gemini encountered error"}}else f=JSON.parse(_)}catch{return}if(f)switch(f.type){case"thought":{let u=f.value;b({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{f.value&&b({message:f.value});break}case"tool_call_request":{let u=f.value,y=Rr[u.name]??u.name,p=u.args?.path||u.args?.absolute_path,x=p&&Rt.relative(J.cwd(),p),oe=u.args?.command,H={title:[y,x&&`\`${x}\``,oe&&`\`${oe}\``].filter(Boolean).join(" ")};g[u.callId]=H,C.flush();break}case"tool_result":{let u=f.value,y=g[u.callId];if(y){let p=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(x=>typeof x=="string"&&x);p&&(y.message=`\`\`\`
|
|
18
22
|
${p.trim()}
|
|
19
|
-
\`\`\``),y
|
|
23
|
+
\`\`\``),b(y,!0)}break}case"result":{I=f.duration_ms,T=f.value,[d,h,l].forEach(u=>{u[u.length-1]?.message===T&&u.pop()});break}case"error":{m=f.value;break}case"finished":break;default:{ne.warn("Unhandled message type:",f.type);break}}}),await N.catch(_=>{({error:m,result:T}=xr({catchError:_,runCmd:N,error:m,result:T,runnerName:"Gemini"}))}),S.close(),C.flush(),{steps:d,duration:I,result:await ee({initialResult:T,agentName:xt,hasError:!!m}),error:te({error:m,agentName:xt}),isRetryableError:re(m)}}var vt=async()=>{let e=Rt.join(Tr.homedir(),".gemini");await wr.rm(e,{recursive:!0,force:!0})};var vr={codex:{runner:Ue,clean:It},claude:{runner:ke,clean:_t},gemini:{runner:Me,clean:vt}},St=vr;var At=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await A(Sr(),"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent,"init.runnerVersion":o||"unknown"});let a=St[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let c=Ar({apiToken:r});Xe(c);let i=e.useGateway?await rt({netlify:c,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let h=ut(({steps:g=[],duration:E})=>{let I=g.map(T=>({...T,title:T.title?j(T.title):void 0,message:T.message?j(T.message):void 0}));return g.length=0,G(e.id,e.sessionId,{steps:I,duration:E})},t),l;e.hasRepo?e.sha?(l=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(l=await gt(c),await ae(e.id,{sha:l}),n?.setAttributes({"init.sha.source":"current_commit"})):(l=await mt(c),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let d=performance.now()-s;return n?.setAttributes({"init.sha":l||"unknown","init.duration.ms":d,"init.status":"success"}),{aiGateway:i,context:c,persistSteps:h,runner:a,sha:l}}),Ar=({apiToken:e})=>({constants:{NETLIFY_API_HOST:_e.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||_e.env.NETLIFY_API_TOKEN,SITE_ID:_e.env.SITE_ID,FUNCTIONS_DIST:_e.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:it}});import{getTracer as Ge}from"@netlify/otel";import Nr from"crypto";import K from"fs/promises";import $ from"path";import U from"process";var O=w("context"),br=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:U.env.NETLIFY_TEAM_ID,userId:U.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:U.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Cr=10,Pr=async e=>{let{name:t,ext:r}=$.parse(e),o=e,n=$.join(U.cwd(),F,o),s=0;for(;await Or(n);){if(s>=Cr)throw new Error("Failed to generate context file");o=`${t}-${Nr.randomUUID().slice(0,5)}${r}`,n=$.join(U.cwd(),F,o),s+=1}return o},Or=async e=>{try{return await K.access(e),!0}catch{return!1}},$r=async()=>{try{O.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return O.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(O.warn("Catchall consumer missing or invalid contextScopes"),null):r:(O.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?O.warn("Netlify features context request timed out"):O.warn("Failed to fetch Netlify features context:",e.message),null}},Dr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await K.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?O.warn(`Download timeout for ${e}`):O.warn(`Failed to download context file ${e}:`,r.message),!1}},we=null,Fr=async()=>{if(we)return we;let e=await $r();if(!e)return[];let t=$.join(U.cwd(),F,Oe);await K.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return O.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,c=$.join(t,a),i=$.join(F,Oe,a);return O.log(`Downloading ${s.scope} context...`),await Dr(s.endpoint,c)?(O.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return we=(await Promise.all(r)).filter(n=>n!==null),we},Nt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=br(t),s=await Pr(lt),a=$.join(U.cwd(),F);await K.mkdir(a,{recursive:!0});let c=$.join(F,s),i=$.join(U.cwd(),c),h=$.join(U.cwd(),F,Q);try{await K.unlink(h),O.log(`Deleted old results file: ${h}`)}catch{}let l=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
|
|
20
24
|
Your task is to analyze and fix the build errors.
|
|
21
25
|
Don't apply techniques of reverting changes. Apply fixes related to errors.
|
|
22
26
|
Don't try to run build by yourself. Just fix the errors.
|
|
23
27
|
|
|
24
28
|
<build_error_context>
|
|
25
29
|
${o}
|
|
26
|
-
</build_error_context>`:"",
|
|
30
|
+
</build_error_context>`:"",d="";r.siteContext&&r.siteContext.length!==0&&(d=`
|
|
27
31
|
<project_rules>
|
|
28
|
-
${r.siteContext.filter(
|
|
32
|
+
${r.siteContext.filter(m=>m.site_context).map(m=>typeof m.site_context=="string"?m.site_context:typeof m.site_context=="object"?JSON.stringify(m.site_context):"").join(`
|
|
29
33
|
|
|
30
34
|
`)}
|
|
31
35
|
</project_rules>
|
|
32
|
-
`);let
|
|
36
|
+
`);let g="";if(r.sessionHistoryContext?.length){let m=$.join(U.cwd(),F,Pe);await K.mkdir(m,{recursive:!0});let R=await Promise.all(r.sessionHistoryContext.map(async(v,N)=>{let C=N+1,b=`attempt-${C}.md`,S=$.join(m,b),_=$.join(F,Pe,b),f=`# Task History - Attempt ${C}
|
|
33
37
|
|
|
34
38
|
## Request - what the user asked for
|
|
35
39
|
${v.request}
|
|
@@ -39,21 +43,21 @@ ${v.request}
|
|
|
39
43
|
## Response - what the agent replied with after its work
|
|
40
44
|
|
|
41
45
|
${v.response}
|
|
42
|
-
`;return await
|
|
46
|
+
`;return await K.writeFile(S,f,"utf-8"),O.log(`Created history file: ${_}`),_}));g+=`
|
|
43
47
|
<session_history_context>
|
|
44
48
|
History of prior work on this task.
|
|
45
49
|
You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
|
|
46
50
|
|
|
47
|
-
${
|
|
51
|
+
${R.slice(-5).map(v=>`- ${v}`).join(`
|
|
48
52
|
`)}
|
|
49
53
|
|
|
50
54
|
</session_history_context>
|
|
51
|
-
`}let E=await
|
|
55
|
+
`}let E=await Fr(),I="";E.length>0&&(I=`
|
|
52
56
|
<netlify_features_context>
|
|
53
57
|
If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
|
|
54
58
|
DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
|
|
55
59
|
|
|
56
|
-
${E.map(
|
|
60
|
+
${E.map(m=>`- **${m.scope}**: ${m.path}`).join(`
|
|
57
61
|
`)}
|
|
58
62
|
|
|
59
63
|
Refer to these files when working with specific Netlify features.
|
|
@@ -65,23 +69,23 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
65
69
|
<user_request>
|
|
66
70
|
${r.prompt}
|
|
67
71
|
</user_request>
|
|
68
|
-
${
|
|
72
|
+
${l}
|
|
69
73
|
</request>
|
|
70
74
|
|
|
71
75
|
<requirements>
|
|
72
76
|
<responses>
|
|
73
77
|
- Do not speak in first person. You may speak as "the agent".
|
|
74
|
-
- When work is complete, write a changes summary in ${a}/${
|
|
75
|
-
- If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${
|
|
78
|
+
- When work is complete, write a changes summary in ${a}/${Q} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
|
|
79
|
+
- If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${a}/${Q} file.
|
|
76
80
|
- Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
|
|
77
81
|
- NEVER look into the \`.git\` folder
|
|
78
82
|
- NEVER print potentially sensitive values (like secrets) in the planning output or results
|
|
79
83
|
</responses>
|
|
80
84
|
<attachements>
|
|
81
|
-
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${
|
|
82
|
-
- move assets from ${a}/${
|
|
85
|
+
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${$e} folder
|
|
86
|
+
- move assets from ${a}/${$e} folder to the project assets folder if they are referenced in a code or applied changes
|
|
83
87
|
</attachements>
|
|
84
|
-
${
|
|
88
|
+
${d}
|
|
85
89
|
</requirements>
|
|
86
90
|
|
|
87
91
|
<extra_context>
|
|
@@ -93,39 +97,39 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
93
97
|
- Netlify Functions directory: ${n.functionsDir}
|
|
94
98
|
</metadata>
|
|
95
99
|
<environment>
|
|
96
|
-
- Node Version: ${
|
|
100
|
+
- Node Version: ${U.version||"unknown"}
|
|
97
101
|
- Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
|
|
98
102
|
- 'netlify-cli' npm package is already available as a global package. Don't try to install it again
|
|
99
103
|
- If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
|
|
100
104
|
</environment>
|
|
101
|
-
${
|
|
105
|
+
${I}
|
|
102
106
|
<docs>
|
|
103
107
|
- Netlify Docs: https://docs.netlify.com
|
|
104
108
|
- LLM Resources Index: https://docs.netlify.com/llms.txt
|
|
105
109
|
</docs>
|
|
106
110
|
</extra_context>
|
|
107
111
|
|
|
108
|
-
${
|
|
109
|
-
`;return await
|
|
112
|
+
${g}
|
|
113
|
+
`;return await K.writeFile(i,T,"utf-8"),O.log(`Generated agent context document at: ${i}`),T.length>5e5&&(T=`
|
|
110
114
|
You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
|
|
111
115
|
|
|
112
116
|
<request>
|
|
113
117
|
<user_request>
|
|
114
118
|
${r.prompt}
|
|
115
119
|
</user_request>
|
|
116
|
-
${
|
|
120
|
+
${l}
|
|
117
121
|
</request>
|
|
118
122
|
|
|
119
123
|
Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
|
|
120
|
-
`),T};var
|
|
121
|
-
${
|
|
124
|
+
`),T};var kr=w("prompt"),bt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Nt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&kr.log("Contextful Prompt:",n),{prompt:n}};var Te=w("inference_stage"),Ct=5,Ie=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:c,attempt:i,contextPrefix:h,priorAgentSessionId:l}=e;Te.log(`Running inference stage, attempt ${i} of ${Ct}`);let d=await A(Ge(),"inference-stage",async g=>{g?.setAttributes({"inference.attempt":i||1}),nt();let{prompt:E}=await A(Ge(),"compose-prompt",async()=>await bt({cliPath:t,config:r,buildErrorContext:Lr(n),netlify:o})),I=`
|
|
125
|
+
${h||""}
|
|
122
126
|
${E}
|
|
123
|
-
`.trim(),T={...r,prompt:
|
|
127
|
+
`.trim(),T={...r,prompt:I},m=await A(Ge(),`run-${r.runner}`,async()=>await s({aiGateway:c,config:T,netlify:o,persistSteps:a,continueSession:!!(i&&i>1),priorAgentSessionId:l}));return m.result&&(m.result=j(m.result)),m.error&&(m.error=j(m.error)),await a.flush(),m});if(d.error){if(Te.error("Runner failed",{stepsCount:d.steps.length,duration:d.duration,error:d.error,isRetryableError:d.isRetryableError,attempt:i||1,agentSessionId:d.agentSessionId}),d.isRetryableError&&(!i||i<Ct))return Te.log("Retrying inference stage"),await new Promise(E=>setTimeout(E,5e3)),{runnerResult:(await Ie({...e,attempt:(i||1)+1,priorAgentSessionId:d.agentSessionId,contextPrefix:d.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw Te.log("Do not retry inference stage"),new Error(d.error)}return{runnerResult:d}},Lr=e=>!e||e.length===0?"":`
|
|
124
128
|
Deploy failed failed. Here are the errors to review on the latest build:
|
|
125
129
|
|
|
126
130
|
Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
|
|
127
131
|
|
|
128
132
|
${e.pop()}
|
|
129
|
-
`;import
|
|
130
|
-
Preview deploy created successfully:`,{deployId:
|
|
133
|
+
`;import Gr from"process";import{getTracer as je}from"@netlify/otel";import{getTracer as Ur}from"@netlify/otel";var ue=w("deploy"),Pt=async e=>await A(Ur(),"create-preview-deploy",async t=>Mr(e,t)),Mr=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a},c)=>{try{let i=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(ue.log("Deploy: Uploading source zip"),i.push("--upload-source-zip")),n&&i.push("--alias",n),a&&i.push("--filter",a),r?(ue.log("Deploy: Skipping build"),i.push("--no-build")):i.push("--context","deploy-preview");let h=s||"netlify";ue.log(`Running: ${h} ${i.join(" ")}`),c?.setAttributes({cmd:h,args:i});let l=await e.utils.run(h,i,{stdio:["ignore","pipe","pipe"]}),d=JSON.parse(l.stdout.trim());c?.setAttributes({success:!0,deployId:d.deploy_id,deployUrl:d.deploy_url,siteId:d.site_id}),ue.log(`
|
|
134
|
+
Preview deploy created successfully:`,{deployId:d.deploy_id,deployUrl:d.deploy_url,siteId:d.site_id});let g={deployId:d.deploy_id,previewUrl:d.deploy_url,logsUrl:d.logs,siteId:d.site_id};return t||(g.sourceZipFilename=d.source_zip_filename),g}catch(i){throw ue.error("Failed to create preview deploy via CLI:",i),c?.setAttributes({success:!1,error:i.message}),i}};var ce=w("deploy_stage"),Ye=async e=>await A(je(),"run-deploy-stage",async()=>jr(e)),jr=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await A(je(),"get-runner-diffs",async()=>await ft({config:t,netlify:r}));if(ce.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:c,diffBinary:i,resultDiffBinary:h}=s,l=!0;ce.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:l,wouldCreatePreview:o!==void 0&&l});let d=null;if(o!==void 0&&l)try{let g;try{let E=await A(je(),"get-runner-session",async()=>await Ze(t.id,t.sessionId));E?.title&&(g=E.title)}catch(E){ce.warn("Failed to fetch session title, using fallback message:",E.message)}await G(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),d=await Pt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:g,skipBuild:!1,deploySubdomain:dt(t.id,Gr.env.SITE_NAME),filter:n})}catch(g){return ce.warn("Failed to create preview deploy (continuing with agent run):",g),{diff:a,resultDiff:c,hasChanges:l,previewInfo:null,diffBinary:i,resultDiffBinary:h,deployError:g instanceof Error?g.message:String(g)}}return ce.log("Git status",{hasDiff:!!a,hasChanges:l}),{diff:a,resultDiff:c,hasChanges:l,previewInfo:d,diffBinary:i,resultDiffBinary:h}};import{getTracer as xe}from"@netlify/otel";async function Ot(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(c){if(s=c,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var D=w("cleanup_stage"),$t=async e=>await A(xe(),"cleanup-stage",async()=>Yr(e)),Be=1024*1024*10,Yr=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:c})=>{let i={result:r||"Done",duration:o};if(c&&c.deployId&&(i.deploy_id=c.deployId),c&&c.sourceZipFilename&&(i.result_zip_file_name=c.sourceZipFilename),t||s||n||a)try{D.log("Getting pre-signed URLs for diff upload");let l=await et(e.id,e.sessionId),d=[];(t||s)&&d.push(Ae(l.result.upload_url,s||t).then(()=>{i.result_diff_s3_key=l.result.s3_key,D.log("Successfully uploaded result_diff to S3")})),(n||a)&&d.push(Ae(l.cumulative.upload_url,a||n).then(()=>{i.cumulative_diff_s3_key=l.cumulative.s3_key,D.log("Successfully uploaded cumulative_diff to S3")})),D.log(`Uploading ${d.length} diff(s) to S3 in parallel`),await Promise.all(d),(n||a)&&(D.log("Updating agent runner with cumulative diff S3 key"),await A(xe(),"update-runner",async()=>{await ae(e.id,{result_diff_s3_key:l.cumulative.s3_key})}))}catch(l){D.error("S3 upload failed, falling back to inline diffs:",l);let d=Buffer.byteLength(t||s||""),g=Buffer.byteLength(a||n||"");if(d>Be||g>Be){let E=`Diffs exceed maximum inline size of ${Be} bytes.`;throw D.error(E),new Error(E)}i.result_diff=t,i.result_diff_binary=s,(n||a)&&(i.cumulative_diff=n,i.cumulative_diff_binary=a,D.log("Updating agent runner with inline diffs (fallback)"),await A(xe(),"update-runner",async()=>{await ae(e.id,{result_diff:n,result_diff_binary:a})}))}else D.log("No diffs to upload");return D.log("Updated agent runner with result"),await Ot(async()=>await A(xe(),"update-runner-session",()=>G(e.id,e.sessionId,i)),{maxRetries:3,baseDelay:1e3,onRetry:(l,d)=>{D.error(`Error updating agent runner session (attempt ${l}):`,d),D.log("Retrying...")}}),D.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Dt,shutdownTracers as Hr,withActiveSpan as Ft}from"@netlify/otel";var Kr=Br(import.meta.url),kt=Kr("../package.json"),Lt=w("pipeline_index"),Re=3,ws=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,tracing:a={}})=>{let c,{withStageTimer:i}=ot(q.timeUnits.hours(4)),h=await Ve(kt.version,e.id,a);try{await Ft(Dt(),"run-pipeline",{},h,async()=>{let l,{aiGateway:d,context:g,persistSteps:E,runner:I,sha:T}=await i("init",()=>At({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s,runnerVersion:kt.version}),q.timeUnits.minutes(10));c=I.clean,e.sha=T;let{runnerResult:m}=await i("inference",()=>Ie({cliPath:r,config:e,context:g,runner:I.runner,persistSteps:E,aiGateway:d}));await G(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let R=await i("deploy",()=>Ye({cliPath:r,config:e,context:g,result:m.result,filter:s})),v=m,N=[];if(R.hasChanges&&R.deployError){N.push(Je(R.deployError));let u=1;for(;u<=Re&&!R.previewInfo;)Lt.log(`Deploy attempt had errors. Retrying. ${u}/${Re}`),await Ft(Dt(),"deploy-stage",async y=>{y?.setAttributes({"stage.attempt":u});let{runnerResult:p}=await i(`inference-retry-${u}`,()=>Ie({cliPath:r,config:e,context:g,runner:I.runner,persistSteps:E,aiGateway:d,buildErrors:N,priorAgentSessionId:m.agentSessionId}));v={...p,steps:[...v.steps||[],...p.steps||[]],duration:(v.duration||0)+(p.duration||0)},R=await i(`deploy-retry-${u}`,()=>Ye({cliPath:r,config:e,context:g,result:p.result,filter:s})),R.deployError&&N.push(R.deployError),u++});u>Re&&!R.previewInfo&&(l=new Error(`Deploy validation failed after ${Re} attempts`))}let{diff:C,resultDiff:b,previewInfo:S,diffBinary:_,resultDiffBinary:f}=R;if(await i("cleanup",()=>$t({config:e,diff:C,result:v.result,duration:v.duration,resultDiff:b,diffBinary:_,resultDiffBinary:f,previewInfo:S}),q.timeUnits.minutes(10)),l)throw l;process.env.NETLIFY_LOCAL_MODE||await I.clean?.()})}catch(l){Lt.error("Got error while running pipeline",l),await c?.();let d=l instanceof Error&&l.message;throw await G(e.id,e.sessionId,{result:d||"Encountered error when running agent",state:"error"}),l}finally{await Hr()}};export{ws as runPipeline};
|
|
131
135
|
//# sourceMappingURL=index.js.map
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@netlify/agent-runner-cli",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "1.
|
|
4
|
+
"version": "1.47.0",
|
|
5
5
|
"description": "CLI tool for running Netlify agents",
|
|
6
6
|
"main": "./dist/index.js",
|
|
7
7
|
"types": "./dist/index.d.ts",
|
|
@@ -72,13 +72,13 @@
|
|
|
72
72
|
"vitest": "^1.5.0"
|
|
73
73
|
},
|
|
74
74
|
"dependencies": {
|
|
75
|
-
"@anthropic-ai/claude-code": "
|
|
75
|
+
"@anthropic-ai/claude-code": "2.0.22",
|
|
76
76
|
"@google/gemini-cli": "0.1.17",
|
|
77
77
|
"@netlify/otel": "^4.3.2",
|
|
78
|
+
"@openai/codex": "0.55.0",
|
|
78
79
|
"@opentelemetry/exporter-trace-otlp-grpc": "^0.57.0",
|
|
79
80
|
"execa": "^8.0.0",
|
|
80
81
|
"get-port": "^5.1.1",
|
|
81
|
-
"minimist": "^1.2.8"
|
|
82
|
-
"my-codex-no-sandbox": "^0.1.2505290819"
|
|
82
|
+
"minimist": "^1.2.8"
|
|
83
83
|
}
|
|
84
84
|
}
|