@netlify/agent-runner-cli 1.58.7-alpha → 1.60.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin-local.js +30 -30
- package/dist/bin.js +30 -30
- package/dist/index.js +27 -27
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -1,31 +1,31 @@
|
|
|
1
|
-
import{createRequire as
|
|
2
|
-
`),r=[],o=-1,n=0;for(;n<t.length;){let u=t[n].slice(0,500).toLowerCase();if(
|
|
1
|
+
import{createRequire as gn}from"module";import{createTracerProvider as or}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as st}from"@netlify/otel/opentelemetry";import{FetchInstrumentation as ir}from"@netlify/otel/instrumentation-fetch";import{withActiveSpan as sr}from"@netlify/otel";import{propagation as at,context as lt,W3CTraceContextPropagator as ar}from"@netlify/otel/opentelemetry";import{OTLPTraceExporter as lr}from"@opentelemetry/exporter-trace-otlp-grpc";import nr from"process";function _(e){let t=nr.env.LOG!=="0";return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[ERROR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[WARN]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[DEBUG]-[${e}]`,...r)}}}var Ue=_("tracing"),ut=async(e,t,r)=>(await or({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new st(new Me),new st(new lr({url:r.exporterUrl}))],instrumentations:[new ir({skipHeaders:!0})]}),r.traceparent?(at.setGlobalPropagator(new ar),at.extract(lt.active(),{traceparent:r.traceparent,isRemote:!0})):lt.active());function b(e,t,r){return Ue.log(`\u23F3 TRACE: ${t} starting...`),sr(e,t,r)}var Me=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[u,l]of Object.entries(o))u.includes("duration")&&typeof l=="number"?n.push(`${u}=${l.toFixed(2)}ms`):n.push(`${u}=${l}`);let i=t.status?.code===2?"\u274C":"\u2705",s=n.length>0?` [${n.join(", ")}]`:"";Ue.log(`${i} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${s}`),t.status?.code===2&&t.status.message&&Ue.log(` \u274C Error: ${t.status.message}`)}};var ur=["error","failed","exception","fatal","panic","abort","crash"];function ct(e){let t=e.split(`
|
|
2
|
+
`),r=[],o=-1,n=0;for(;n<t.length;){let u=t[n].slice(0,500).toLowerCase();if(ur.some(p=>u.includes(p))){let p=Math.max(0,n-10,o+1),f=Math.min(t.length-1,n+20),c=[];for(let m=p;m<=f;m++)c.push(t[m]);r.push(c.join(`
|
|
3
3
|
`)),o=f,n=f+1}else n++}if(r.length===0)return e;let i=r.map((s,u)=>`<extracted_error_chunk order="${u+1}">
|
|
4
4
|
${s}
|
|
5
5
|
</extracted_error_chunk>`).join(`
|
|
6
6
|
|
|
7
|
-
`);return i.length>e.length*.8?e:i}import Ne from"process";import{getTracer as Vr}from"@netlify/otel";import me from"process";var Ie=me.env.NETLIFY_API_URL,xe=me.env.NETLIFY_API_TOKEN,Y=_("api"),ve=()=>me.env.NETLIFY_LOCAL_MODE==="true",ge=async(e,t={})=>{if(!Ie||!xe)throw new Error("No API URL or token");let r=new URL(e,Ie),o={...t,headers:{...t.headers,Authorization:`Bearer ${xe}`}};me.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(me.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{Y.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw new Error(`API request failed: ${n.status} - ${u}`)}return s},ct=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(Ie=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(xe=e.constants.NETLIFY_API_TOKEN)},dt=()=>({apiUrl:Ie,token:xe}),he=async(e,t)=>ve()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):ge(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),H=async(e,t,r)=>ve()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):ge(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var pt=async(e,t)=>ve()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):ge(`/api/v1/agent_runners/${e}/sessions/${t}`),ft=(e,t,r)=>ge(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}}),mt=async(e,t)=>ve()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):ge(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ue=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var ee=_("ai_gateway"),Me=null;var gt=async()=>{if(Me)return Me;ee.log("Fetching available AI gateway providers");let e=await fetch(`${dt().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return Me=t,ee.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},lr=async(e,t)=>{let o=(await gt()).providers[e];if(!o)return ee.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return ee.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},ht=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),ee.log("Requesting AI gateway information");let l=await ft(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,ee.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),gt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:lr}};import B from"process";import K from"path";import Re from"fs";import{fileURLToPath as gr}from"url";import{createRequire as hr}from"module";import{execa as yr,execaCommand as qn}from"execa";import{Transform as ur}from"stream";var cr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),dr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function pr(){return Object.entries(process.env).filter(([e,t])=>!(!t||cr.has(e)||dr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=pr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(fr(o),"g");r=r.replace(n,"******")}),r}function fr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var te=class extends ur{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=q(n);o(null,i)}};function yt(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var ye=null,_t=e=>(ye&&ye.destroy(),ye=new z({totalAllowedTime:e}),ye),Et=()=>ye;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var wt="netlify-agent-runner-context.md",je="task-history",Ge="netlify-context",k=".netlify",re="results.md",Ye="assets";var Z=600*1e3;var Tt={name:"@netlify/agent-runner-cli",type:"module",version:"1.58.7-alpha",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.0.76","@google/gemini-cli":"0.23.0","@netlify/otel":"^5.1.1","@openai/codex":"0.77.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var _r=gr(import.meta.url),Er=K.dirname(_r),wr=hr(import.meta.url),ne=_("shell"),Be=new Set,Tr={preferLocal:!0},F=(e,t,r)=>{let[o,n]=Ir(t,r),i={...Tr,...n},s=yr(e,o,i);xr(s,i),Rr(s);let u=r?.idleTimeout;return u&&u>0&&vr(s,u),s};var Ir=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},xr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(B.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new te).pipe(B.stdout),e.stdout?.pipe(new te).pipe(B.stdout),e.stderr?.pipe(new te).pipe(B.stderr);return}e.stdout?.pipe(B.stdout),e.stderr?.pipe(B.stderr)},He=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(B.kill(-e.pid,t),ne.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return ne.error("Error killing process:",r),!1}},It=e=>He(e,"SIGKILL"),vr=(e,t)=>{let r=null,o=()=>{ne.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),He(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ne.log(`Force killing idle process ${e.pid}`),It(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},Rr=e=>{Be.add(e);let t=Et();if(t){let r=t.onTimesUp(()=>{ne.log(`Global timer expired, killing process ${e.pid}`),He(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(ne.log(`Force killing process ${e.pid} after timeout`),It(e))},5e3)});e.on("exit",()=>{Be.delete(e),r()}),e.on("error",()=>{Be.delete(e),r()})}};function oe(e,t){if(!B.env.NETLIFY_LOCAL_MODE)try{let n=wr.resolve(Tt.name),i=K.dirname(n);for(;i!==K.dirname(i);){let s=K.dirname(i);if(K.basename(s)==="node_modules"){let u=K.join(s,".bin",t);if(Re.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(B.env.NODE_PATH){let n=K.join(B.env.NODE_PATH,".bin",t);if(Re.existsSync(n))return n}let r=K.join(e,"node_modules",".bin",t);if(Re.existsSync(r))return r;let o=K.join(Er,"..","node_modules",".bin",t);if(Re.existsSync(o))return o}var Sr=_("utils"),br=e=>new Promise(t=>{setTimeout(t,e)}),xt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await br(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},ie=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},vt=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Sr.error("Could not parse JSON",o))}},Rt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var Ar=50*1024,qe=(e,t=Ar)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as St}from"buffer";import Nr from"path";var bt=_("repo"),At=async({config:e,isRetry:t})=>{bt.info("Getting runner diffs");let r=await Pr(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let I=Or(n);await Fr(I)}bt.info("Changes after processing"),await We();let i=await Je(n);await Ke(i);let s={stdio:["ignore","pipe","pipe"]},u=await F("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await F("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await F("git",["commit","-m","Agent runner"]);let I=await F("git",["diff",e.sha,"HEAD"],s);c=String(I.stdout??"");let w=await F("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");c!==g&&(m=St.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(y.diffBinary=St.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Ke=async(e=[])=>{await F("git",["add",".",...e])},We=async()=>{let e=await F("git",["status","-s"]);return String(e.stdout??"")},Nt=/.. (.+)?\.log$/,Cr=[Nt],Pr=async()=>{let e=await We();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
8
|
-
`).filter(o=>
|
|
9
|
-
`).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${
|
|
10
|
-
`).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},
|
|
7
|
+
`);return i.length>e.length*.8?e:i}import Pe from"process";import{getTracer as zr}from"@netlify/otel";import ge from"process";var ee=class extends Error{constructor(r,o,n){super(r);this.statusCode=o;this.userMessage=n;this.name="GracefulShutdownError"}},dt=e=>e instanceof ee;var ve=ge.env.NETLIFY_API_URL,Re=ge.env.NETLIFY_API_TOKEN,Y=_("api"),Se=()=>ge.env.NETLIFY_LOCAL_MODE==="true",he=async(e,t={})=>{if(!ve||!Re)throw new Error("No API URL or token");let r=new URL(e,ve),o={...t,headers:{...t.headers,Authorization:`Bearer ${Re}`}};ge.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),i=n.ok&&n.status<=299;if(ge.env.AGENT_RUNNERS_DEBUG==="true")Y.log(`Response headers for ${r}:`),n.headers.forEach((u,l)=>{Y.log(` ${l}: ${u}`)});else{let u=n.headers.get("x-request-id")||n.headers.get("x-nf-request-id");Y.log(`Request ID for ${r}: ${u||"N/A"}`)}if(i||Y.error(`Got status ${n.status} for request ${r}`),t.raw){if(!i)throw new Error(`API request failed: ${n.status} ${n.statusText}`);return n}let s=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!i){let u=typeof s=="string"?s:JSON.stringify(s);throw n.status===404?new ee(`API request failed: 404 - ${u}`,404,"The site associated with this agent run no longer exists."):n.status===403&&t.gracefulOn403?new ee(`API request failed: 403 - ${u}`,403,"Credit limit reached. Please add more credits to continue using Agent Runners."):new Error(`API request failed: ${n.status} - ${u}`)}return s},pt=e=>{Y.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ve=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Re=e.constants.NETLIFY_API_TOKEN)},ft=()=>({apiUrl:ve,token:Re}),ye=async(e,t)=>Se()?(Y.log("Mock API: updateRunner called",{runnerId:e,data:t}),{id:e,...t}):he(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),B=async(e,t,r)=>Se()?(Y.log("Mock API: updateRunnerSession called",JSON.stringify({runnerId:e,sessionId:t,data:r},null,2)),{id:e,sessionId:t,...r}):he(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var mt=async(e,t)=>Se()?(Y.log("Mock API: getRunnerSession called",{runnerId:e,sessionId:t}),{id:t,runnerId:e,state:"running"}):he(`/api/v1/agent_runners/${e}/sessions/${t}`),gt=(e,t,r)=>he(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r},gracefulOn403:!0}),ht=async(e,t)=>Se()?(Y.log("Mock API: getDiffUploadUrls called",{runnerId:e,sessionId:t}),{result:{upload_url:"https://s3.mock.com/mock-upload-url-result",s3_key:"mock-s3-key-result"},cumulative:{upload_url:"https://s3.mock.com/mock-upload-url-cumulative",s3_key:"mock-s3-key-cumulative"}}):he(`/api/v1/agent_runners/${e}/sessions/${t}/diff/upload_urls`,{method:"POST"}),Ge=async(e,t)=>{Y.log(`Uploading diff to S3: ${e.substring(0,50)}...`);let r=await fetch(e,{method:"PUT",body:t,headers:{"Content-Type":"text/plain"}});if(!r.ok)throw new Error(`S3 upload failed with status ${r.status}`);return r};var te=_("ai_gateway"),je=null;var yt=async()=>{if(je)return je;te.log("Fetching available AI gateway providers");let e=await fetch(`${ft().apiUrl}/api/v1/ai-gateway/providers`);if(!e.ok)throw new Error(`Failed to fetch AI gateway providers: ${e.statusText}`);let t=await e.json();return je=t,te.log("Cached AI gateway providers",{providerCount:Object.keys(t.providers).length}),t},cr=async(e,t)=>{let o=(await yt()).providers[e];if(!o)return te.log(`Provider '${e}' not found`),!1;let n=o.models.includes(t);return te.log(`Model validation for ${e}/${t}`,{isAvailable:n}),n},_t=async({netlify:e,config:t})=>{let r,o,n,i,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let u=async()=>{clearTimeout(n),te.log("Requesting AI gateway information");let l=await gt(s,t.id,t.sessionId);if({token:r,url:i}=l,o=l.expires_at?l.expires_at*1e3:void 0,te.log("Got AI gateway information",{token:!!r,expiresAt:o,url:i}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{u()},p))}};return await Promise.all([u(),yt()]),{get url(){return i},get token(){return r},isModelAvailableForProvider:cr}};import H from"process";import K from"path";import be from"fs";import{fileURLToPath as yr}from"url";import{createRequire as _r}from"module";import{execa as Er,execaCommand as Vn}from"execa";import{Transform as dr}from"stream";var pr=new Set(["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE","NETLIFY_AGENT_RUNNER_ID","NETLIFY_AGENT_RUNNER_SESSION_ID","NETLIFY_AGENT_RUNNER_PROMPT","NETLIFY_AGENT_RUNNER_AGENT","NETLIFY_AGENT_RUNNER_MODEL","NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED","NETLIFY_AGENT_RUNNER_CONTEXT","NETLIFY_AGENT_RUNNER_HAS_REPO","NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED","NETLIFY_AGENT_RUNNER_SHA","NETLIFY_TEAM_TYPE","AGENT_RUNNERS_DEBUG","NETLIFY_TEAM_ID","NETLIFY_AGENT_RUNNER_USER_ID","SITE_NAME"]),fr=new Set(["true","false","undefined","null","deploy","project","claude","gemini","codex",""]);function mr(){return Object.entries(process.env).filter(([e,t])=>!(!t||pr.has(e)||fr.has(t)||!isNaN(Number(t))||t.length<5)).map(([,e])=>e).filter(Boolean)}function q(e){if(typeof e!="string")return e;let t=mr();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(gr(o),"g");r=r.replace(n,"******")}),r}function gr(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var re=class extends dr{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),i=q(n);o(null,i)}};function Et(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?t(s,n):t(s,n,i)},process.stderr.write=function(o,n,i){let s=typeof o=="string"?q(o):o;return typeof n=="function"?r(s,n):r(s,n,i)}}var _e=null,wt=e=>(_e&&_e.destroy(),_e=new z({totalAllowedTime:e}),_e),Tt=()=>_e;var z=class{constructor({totalAllowedTime:t}){this.withStageTimer=async(t,r,o)=>{if(this.isTimeExpired())throw new Error(`${t} stage did not complete in the allowed time. Time has already expired.`);let n=this.onTimesUp(()=>{throw new Error(`${t} stage did not complete in the allowed time.`)}),i=null,s=null;o!==void 0&&(s=new Promise((u,l)=>{i=setTimeout(()=>{l(new Error(`${t} stage exceeded its maximum duration of ${o}ms`))},o)}));try{return s?await Promise.race([r(),s]):await r()}finally{n(),i&&clearTimeout(i)}};this.startTime=Date.now(),this.totalAllowedTime=t,this.globalTimeoutId=null,this.subscribers=[],this.hasTimedOut=!1,this.setupGlobalTimeout()}getElapsedTime(){return Date.now()-this.startTime}getRemainingTime(){let t=this.getElapsedTime(),r=this.totalAllowedTime-t;return Math.max(0,r)}isTimeExpired(){return this.getRemainingTime()===0||this.hasTimedOut}setupGlobalTimeout(){this.globalTimeoutId&&clearTimeout(this.globalTimeoutId),this.globalTimeoutId=setTimeout(()=>{this.notifyTimeUp()},this.totalAllowedTime)}notifyTimeUp(){this.hasTimedOut=!0;for(let t=this.subscribers.length-1;t>=0;t--)try{this.subscribers[t]()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}}onTimesUp(t){if(this.subscribers.push(t),this.hasTimedOut)try{t()}catch(r){console.error("TimeKeeper: Error in time up callback:",r)}return()=>{let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}}off(t){let r=this.subscribers.indexOf(t);r>-1&&this.subscribers.splice(r,1)}clearSubscribers(){this.subscribers.length=0}getSubscriberCount(){return this.subscribers.length}destroy(){this.globalTimeoutId&&(clearTimeout(this.globalTimeoutId),this.globalTimeoutId=null),this.clearSubscribers()}static{this.timeUnits={seconds:t=>t*1e3,minutes:t=>t*60*1e3,hours:t=>t*60*60*1e3}}};var It="netlify-agent-runner-context.md",Ye="task-history",Be="netlify-context",k=".netlify",ne="results.md",He="assets";var Z=1800*1e3;var xt={name:"@netlify/agent-runner-cli",type:"module",version:"1.60.0",description:"CLI tool for running Netlify agents",main:"./dist/index.js",types:"./dist/index.d.ts",exports:"./dist/index.js",bin:{"agent-runner-cli":"./dist/bin.js","agent-runner-cli-local":"./dist/bin-local.js"},files:["dist/**/*.js","dist/**/*.d.ts","patches","scripts"],scripts:{build:"tsup",dev:"tsup --watch",prepare:"husky install node_modules/@netlify/eslint-config-node/.husky/",prepublishOnly:"npm ci && npm test",prepack:"npm run build",test:"run-s build format test:dev",format:"run-s build format:check-fix:*","format:ci":"run-s build format:check:*","format:check-fix:lint":"run-e format:check:lint format:fix:lint","format:check:lint":"cross-env-shell eslint $npm_package_config_eslint","format:fix:lint":"cross-env-shell eslint --fix $npm_package_config_eslint","format:check-fix:prettier":"run-e format:check:prettier format:fix:prettier","format:check:prettier":"cross-env-shell prettier --check $npm_package_config_prettier","format:fix:prettier":"cross-env-shell prettier --write $npm_package_config_prettier","test:dev":"run-s build test:dev:*","test:ci":"run-s build test:ci:*","test:dev:vitest":"LOG=0 vitest --exclude '**/integration/**'","test:ci:vitest":"LOG=0 c8 -r lcovonly -r text -r json vitest --exclude '**/integration/**'","test:integration":"vitest run test/integration/","test:integration:codex":"vitest run test/integration/codex.test.ts","test:integration:claude":"vitest run test/integration/claude.test.ts","test:integration:gemini":"vitest run test/integration/gemini.test.ts",postinstall:"node scripts/postinstall.js"},config:{eslint:'--cache --format=codeframe --max-warnings=0 "{src,scripts,test,.github}/**/*.{js,ts,md,html}"',prettier:'--ignore-path .gitignore --loglevel=warn "{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}" "*.{js,ts,yml,json,html}" ".*.{js,ts,yml,json,html}" "!**/package-lock.json" "!package-lock.json"'},keywords:[],license:"MIT",repository:"netlify/agent-runner-cli",bugs:{url:"https://github.com/netlify/agent-runner-cli/issues"},author:"Netlify Inc.",directories:{test:"test"},devDependencies:{"@commitlint/cli":"^20.0.0","@commitlint/config-conventional":"^20.0.0","@eslint/compat":"^2.0.0","@eslint/js":"^9.35.0","@netlify/eslint-config-node":"^7.0.1","@types/node":"^24.5.0","@typescript-eslint/eslint-plugin":"^8.0.0","@typescript-eslint/parser":"^8.0.0","@vitest/eslint-plugin":"^1.6.6",c8:"^10.0.0","eslint-config-prettier":"^10.1.8","eslint-plugin-n":"^17.0.0",husky:"^9.0.0","patch-package":"^8.0.0",tsup:"^8.5.0",typescript:"^5.0.0","typescript-eslint":"^8.44.0",vitest:"^4.0.16"},dependencies:{"@anthropic-ai/claude-code":"2.0.76","@google/gemini-cli":"0.23.0","@netlify/otel":"^5.1.1","@openai/codex":"0.79.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.208.0",execa:"^9.6.1",minimist:"^1.2.8"}};var wr=yr(import.meta.url),Tr=K.dirname(wr),Ir=_r(import.meta.url),oe=_("shell"),qe=new Set,xr={preferLocal:!0},$=(e,t,r)=>{let[o,n]=vr(t,r),i={...xr,...n},s=Er(e,o,i);Rr(s,i),br(s);let u=r?.idleTimeout;return u&&u>0&&Sr(s,u),s};var vr=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Rr=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(H.env.NETLIFY_MASK_LOGS!=="false"){e.all?.pipe(new re).pipe(H.stdout),e.stdout?.pipe(new re).pipe(H.stdout),e.stderr?.pipe(new re).pipe(H.stderr);return}e.stdout?.pipe(H.stdout),e.stderr?.pipe(H.stderr)},Ke=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(H.kill(-e.pid,t),oe.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return oe.error("Error killing process:",r),!1}},vt=e=>Ke(e,"SIGKILL"),Sr=(e,t)=>{let r=null,o=()=>{oe.log(`Process ${e.pid} killed due to idle timeout (no output for ${t}ms)`),Ke(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing idle process ${e.pid}`),vt(e))},5e3)},n=()=>{r&&clearTimeout(r),r=setTimeout(o,t)};n(),e.stdout?.on("data",n),e.stderr?.on("data",n);let i=()=>{r&&(clearTimeout(r),r=null)};e.on("exit",i),e.on("error",i)},br=e=>{qe.add(e);let t=Tt();if(t){let r=t.onTimesUp(()=>{oe.log(`Global timer expired, killing process ${e.pid}`),Ke(e,"SIGTERM"),setTimeout(()=>{e.pid&&!e.killed&&(oe.log(`Force killing process ${e.pid} after timeout`),vt(e))},5e3)});e.on("exit",()=>{qe.delete(e),r()}),e.on("error",()=>{qe.delete(e),r()})}};function ie(e,t){if(!H.env.NETLIFY_LOCAL_MODE)try{let n=Ir.resolve(xt.name),i=K.dirname(n);for(;i!==K.dirname(i);){let s=K.dirname(i);if(K.basename(s)==="node_modules"){let u=K.join(s,".bin",t);if(be.existsSync(u))return u;break}i=s}}catch(n){console.error("Could not resolve package.json",n)}if(H.env.NODE_PATH){let n=K.join(H.env.NODE_PATH,".bin",t);if(be.existsSync(n))return n}let r=K.join(e,"node_modules",".bin",t);if(be.existsSync(r))return r;let o=K.join(Tr,"..","node_modules",".bin",t);if(be.existsSync(o))return o}var Ar=_("utils"),Nr=e=>new Promise(t=>{setTimeout(t,e)}),Rt=(e,t=3e3)=>{let r=!1,o=null,n=[],i=null,s=(...u)=>{if(r)return o=u,new Promise(f=>{n.push(f)});r=!0;let l,p=new Promise(f=>{l=f});return i=(async()=>{await Promise.resolve();let f=await e(...u);for(l(f);;){if(await Nr(t),!o)return r=!1,i=null,f;let c=o,m=n;o=null,n=[],f=await e(...c),m.forEach(y=>{y(f)})}})(),p};return s.flush=async()=>{if((r||o)&&i)return await i,s.flush()},s},se=(e,t,r=!1)=>{let o=null,n=null,i=null,s=function(...u){n=u,i=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(i,n),n=null,i=null)},t),l&&(e.apply(i,n),n=null,i=null)};return s.cancel=()=>{clearTimeout(o),o=null,n=null,i=null},s.flush=()=>{if(o){clearTimeout(o);let u=n,l=i;o=null,n=null,i=null,e.apply(l,u)}},s},St=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Ar.error("Could not parse JSON",o))}},bt=(e,t)=>{let n=".netlify.app",i="agent-";if(!t)return`${i}${e.slice(0,6)}`;let u=`--${t}${n}`;if(u.length>55)return"";let l=60-u.length;if(l<=0)return"";if(l>=i.length+6){let p=Math.min(l-i.length,e.length);return`${i}${e.slice(0,p)}`}return e.slice(0,l)};var Cr=50*1024,We=(e,t=Cr)=>{if(!e||typeof e!="string"||e.length<=t)return e;let o=e.startsWith("```")?"\n... [truncated]\n```":"... [truncated]";return e.slice(0,t)+o};import{Buffer as At}from"buffer";import Pr from"path";var Nt=_("repo"),Ct=async({config:e,isRetry:t})=>{Nt.info("Getting runner diffs");let r=await $r(),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};if(!t){let I=Fr(n);await Dr(I)}Nt.info("Changes after processing"),await Ve();let i=await Xe(n);await Je(i);let s={stdio:["ignore","pipe","pipe"]},u=await $("git",["diff","--staged"],s),l=String(u.stdout??"");if(o=!!l,!o)return{hasChanges:!1,ignored:i};let p=await $("git",["diff","--staged","--binary"],s),f=String(p.stdout??""),c,m;if(e.sha){process.env.NETLIFY_LOCAL_MODE||await $("git",["commit","-m","Agent runner"]);let I=await $("git",["diff",e.sha,"HEAD"],s);c=String(I.stdout??"");let w=await $("git",["diff",e.sha,"HEAD","--binary"],s),g=String(w.stdout??"");c!==g&&(m=At.from(g).toString("base64"))}let y={hasChanges:!0,diff:l,resultDiff:c,ignored:i};return l!==f&&(y.diffBinary=At.from(f).toString("base64")),m&&(y.resultDiffBinary=m),y},Je=async(e=[])=>{await $("git",["add",".",...e])},Ve=async()=>{let e=await $("git",["status","-s"]);return String(e.stdout??"")},Pt=/.. (.+)?\.log$/,Or=[Pt],$r=async()=>{let e=await Ve();return{hasChanges:(e.trim().length===0?[]:e.split(`
|
|
8
|
+
`).filter(o=>Or.some(i=>i instanceof RegExp?i.test(o):o===i)?!1:o[1]?.trim()!=="")).length!==0,status:e}},Ot=async()=>{let{stdout:e}=await $("git",["rev-parse","HEAD"]);return String(e??"").trim()},$t=async()=>{let{stdout:e}=await $("git",["rev-list","--max-parents=0","HEAD"]);return String(e??"").trim()},Xe=async e=>{e||=await Ve();let t=[".netlify","node_modules","dist"],r=[];return e.split(`
|
|
9
|
+
`).forEach(o=>{t.forEach(i=>{[`?? ${i}`,`?? ${i}${Pr.sep}`].some(u=>o.startsWith(u))&&r.push(`:!${i}`)});let n=o.match(Pt)?.[1];n&&r.push(`:!${n}.log`)}),r},Ft=async()=>{await $("git",["reset","--hard","HEAD"])},Fr=e=>{let t=e.split(`
|
|
10
|
+
`).reduce((r,o)=>{if(!o)return r;let[n,i,,...s]=o,u=s.join(""),l=n.trim(),p=i.trim();return r[u]?r[u].change=p:r[u]={filePath:u,stage:l,change:p},r},{});return Object.values(t)},Dr=async e=>{let t=[];for(let r of e)r.stage&&!r.change&&t.push($("git",["restore","--staged","--worktree",r.filePath]));await Promise.allSettled(t)};import Lr from"fs/promises";import Ur from"os";import Lt from"path";import ce from"process";import Mr from"readline";import ze from"path";import kr from"fs/promises";var Ze=_("agent-output-utils");async function ae({initialResult:e,agentName:t,hasError:r}){let o="",n=ze.join(process.cwd(),k,ne);try{let i=await kr.readFile(n,"utf-8");i&&(o=i,Ze.log(`Pulled result from ${ze.relative(process.cwd(),n)}`))}catch{Ze.log(`No results file found at ${ze.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function le({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim().toLowerCase()||"",n="";return o?.includes("ai gateway is not available for your account")||o?.includes("ai gateway is not enabled for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n=`The ${t} models were currently overloaded. Please try again or use a different available agent.`),o?.includes("request timed out")&&(n=`The ${t} API request's have timed out. Please try again or use a different available agent.`),o?.includes("network error")&&(n=`The ${t} agent is having network issues. Please try again or use a different available agent.`),n&&Ze.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function ue(e){if(!e)return!1;let r=(e&&typeof e=="object"?JSON.stringify(e):e)?.replace(/\s+/g," ").trim().toLowerCase()||"";return!!(r?.includes("error when talking to gemini api")||r?.includes("499")||r?.includes("connection closed prematurely")||r?.includes("request timed out")||r?.includes("network error"))}var L=_("runner_claude"),Dt="Claude Code",de="claude-opus-4-5-20251101",kt=e=>({Task:"Task",Bash:"Run command",Glob:"Find files",Grep:"Search files",LS:"List directory",ExitPlanMode:"Exit planning",Read:"Read file",Edit:"Edit file",MultiEdit:"Edit multiple files",Write:"Coding",NotebookEdit:"Edit notebook",WebFetch:"Fetch webpage",TodoWrite:"Update task list",WebSearch:"Search web",BashOutput:"Get command output",KillBash:"Stop command"})[e]||e,Gr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(L.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(L.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(L.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Qe({config:e,netlify:t,persistSteps:r,aiGateway:o,continueSession:n,priorAgentSessionId:i,cwd:s=ce.cwd()}){let u=e,{accountType:l,prompt:p,modelVersionOverrides:f}=u,{model:c}=u,m="";if(o){let{token:h,url:d}=o;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(f?.claude){let a=f?.claude?.[l];if(a){if(!await o.isModelAvailableForProvider("anthropic",a))throw new Error(`Model override '${a}' is not available for anthropic provider`);c=a}}else if(c){if(!await o.isModelAvailableForProvider("anthropic",c))throw new Error(`Model '${c}' is not available for anthropic provider`)}else!!de&&await o.isModelAvailableForProvider("anthropic",de)?(c=de,L.log(`Using default model: ${de}`)):de&&L.log(`Default model ${de} is not available, proceeding without model specification`);ce.env.ANTHROPIC_API_KEY=h,ce.env.ANTHROPIC_BASE_URL=d}else if(!ce.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let y=[],I=[],w={},g=0,T=0,R,S,N=[ie(s,"claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...c?["--model",c]:[],...n?["--continue"]:[],...n&&i?["--resume",i]:[],"-p",p],C=`${ce.env.NVM_BIN}/node`;L.log(`Running ${C} ${N.join(" ")}`);let O=t.utils.run(C,N,{all:!0,env:ce.env,cwd:s,idleTimeout:Z});O.stdin?.end();let x=se(()=>{r?.({steps:y,duration:T})},250),v=(h,d)=>{let a={...h,id:g};g+=1,I.push(a),y.push(a),d||x.flush(),x(),d&&x.flush()},M=Mr.createInterface({input:O.all});return M.on("error",h=>{L.error("Readline interface error",{error:h.message,stack:h.stack})}),M.on("line",h=>{let d=null;try{d=JSON.parse(h)}catch{L.log("Could not parse line",h)}d?.session_id&&d.session_id!==m&&(m=d.session_id),Array.isArray(d?.message?.content)?d.message.content.forEach(a=>{switch(a.type){case"text":{a.text&&v({message:a.text});break}case"image":{typeof a.source=="object"&&a.source&&a.source.type==="base64"&&a.source.media_type?v({message:``}):L.log(`Unsupported image type ${a.source?.type}`,a.source);break}case"tool_use":{if(a.name==="Task"){let E=a.input?.description&&`\`${a.input.description}\``;v({title:[kt(a.name),E].filter(Boolean).join(" ")})}else a.id&&(w[a.id]=a);x.flush();break}case"tool_result":{let E=a.tool_use_id?w[a.tool_use_id]:void 0,V;if(E){let W=E.input?.file_path&&Lt.relative(s,E.input.file_path),P=W&&`\`${W}\``;V=[kt(E.name||""),P].filter(Boolean).join(" ")}let xe=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),X;if(typeof a.content=="string")X=a.content;else if(Array.isArray(a.content)){let W=[];a.content.forEach(P=>{P?.type==="text"&&typeof P.text=="string"?W.push(P.text):P?.type==="image"&&typeof P.source=="object"&&P.source?P.source.type==="base64"&&P.source.media_type?W.push(``):L.log(`Unsupported image type ${P.source.type}`,P.source):L.log(`Unsupported block type ${P?.type}`)}),X=W.join(`
|
|
11
11
|
|
|
12
|
-
`)}
|
|
12
|
+
`)}xe&&X&&(X=`\`\`\`
|
|
13
13
|
${X.trim()}
|
|
14
|
-
\`\`\``),v({title:V,message:X},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:L.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[I,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await O.catch(h=>{({error:S,result:R}=
|
|
14
|
+
\`\`\``),v({title:V,message:X},!0);break}case"thinking":{a.thinking&&v({title:"Thinking",message:a.thinking},!0);break}default:L.log(`Message content type is not supported ${a.type}`,a)}}):d?.type==="result"&&(T=d.duration_ms||0,d.is_error?S=d.result:R=d.result,[I,y].forEach(a=>{a[a.length-1]?.message===R&&a.pop()}))}),await O.catch(h=>{({error:S,result:R}=Gr({catchError:h,runCmd:O,error:S,result:R,runnerName:"Claude"}))}),M.close(),x.flush(),{steps:I,duration:T,result:await ae({initialResult:R,agentName:Dt,hasError:!!S}),error:le({error:S,agentName:Dt}),isRetryableError:ue(S),agentSessionId:m}}var Ut=async()=>{let e=Lt.join(Ur.homedir(),".claude");await Lr.rm(e,{recursive:!0,force:!0})};import Ee from"fs/promises";import Gt from"os";import Ae from"path";import Q from"process";import jr from"readline";var U=_("runner_codex"),Mt="Codex CLI",pe="gpt-5.2",Yr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(U.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(U.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(U.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function et({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=Q.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(n){let{token:d,url:a}=n;if(!d||!a)throw new Error("No token or url provided from AI Gateway");if(l?.codex){let E=l?.codex?.[s];if(E){if(!await n.isModelAvailableForProvider("openai",E))throw new Error(`Model override '${E}' is not available for openai provider`);p=E}}else if(p){if(!await n.isModelAvailableForProvider("openai",p))throw new Error(`Model '${p}' is not available for openai provider`)}else!!pe&&await n.isModelAvailableForProvider("openai",pe)?(p=pe,U.log(`Using default model: ${pe}`)):pe&&U.log(`Default model ${pe} is not available, proceeding without model specification`);Q.env.OPENAI_API_KEY=d,Q.env.OPENAI_BASE_URL=a}else if(!Q.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=`${Q.env.NVM_BIN}/node`,S=Ae.join(Gt.homedir(),".codex"),N=Ae.join(S,"config.toml"),C=Ae.join(S,"auth.json");try{await Ee.mkdir(S,{recursive:!0});let d={OPENAI_API_KEY:Q.env.OPENAI_API_KEY};await Ee.writeFile(C,JSON.stringify(d,null,2),"utf-8"),U.log("Created Codex auth.json file");let a="";try{a=await Ee.readFile(N,"utf-8")}catch{}a.includes("web_search_request")||(a.includes("[features]")?a=a.replace(/\[features\]/,`[features]
|
|
15
15
|
web_search_request = true`):a+=`
|
|
16
16
|
[features]
|
|
17
17
|
web_search_request = true
|
|
18
|
-
`,await
|
|
18
|
+
`,await Ee.writeFile(N,a,"utf-8"),U.log("Updated Codex config with web_search_request enabled"))}catch(d){throw U.warn("Failed to setup Codex config and credentials",{error:d.message}),new Error(`Codex setup failed: ${d.message}`)}let O=[ie(i,"codex"),"exec","--yolo","--json","--enable","web_search_request",...p?["--model",p]:[],u].filter(Boolean);U.log(`Running ${R} ${O.join(" ")}`);let x=t.utils.run(R,O,{all:!0,cwd:i,env:{...Q.env},idleTimeout:Z}),v=se(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),M=(d,a)=>{d.id=I,I+=1,m.push(d),f.push(d),c.push(d),a||v.flush(),v(),a&&v.flush()},h=jr.createInterface({input:x.all});return h.on("error",d=>{U.error("Readline interface error",{error:d.message,stack:d.stack})}),h.on("line",d=>{let a=null;try{a=JSON.parse(d)}catch{U.log("Could not parse line",d);return}if(a?.duration_ms&&(w=a.duration_ms),a?.type==="item.started"&&a?.item?.type==="command_execution")y[a.item.id]=a.item;else if(a?.type==="item.completed"&&a?.item?.type==="command_execution"){let E=Hr(a.item);E&&M(E,!0)}else if(a?.type==="item.completed"&&a?.item?.type==="reasoning"){let E={title:"Reasoning",message:a.item.text};M(E,!0)}else if(a?.type==="local_shell_call")y[a.call_id]=a;else if(a?.type==="local_shell_call_output"){let E=qr(y[a.call_id],a);E&&M(E,!0)}else a?.type==="message"&&a.role==="assistant"?g=a.content.map(E=>E.text).join(`
|
|
19
19
|
`):a?.type==="message"&&a.role==="system"&&(T=a.content.map(E=>E.text).join(`
|
|
20
|
-
`))}),await x.catch(d=>{let a=
|
|
20
|
+
`))}),await x.catch(d=>{let a=Yr({catchError:d,runCmd:x,error:T,result:g,runnerName:"Codex"});T=a.error,g=a.result}),h.close(),v.flush(),{steps:m,duration:w,result:await ae({initialResult:g,agentName:Mt,hasError:!!T}),error:le({error:T,agentName:Mt}),isRetryableError:ue(T)}}var jt=async()=>{let e=Ae.join(Gt.homedir(),".codex");await Ee.rm(e,{recursive:!0,force:!0})},Br=new Set(["bash","-lc"]),Hr=e=>{if(!e||e.type!=="command_execution")return null;let t=e.command;t.startsWith("bash -lc ")&&(t=t.replace(/^bash -lc ['"]/,"").replace(/['"]$/,""));let r=`Running \`${t}\``,o=e.aggregated_output?.trim();return o&&(o=`\`\`\`
|
|
21
21
|
${o}
|
|
22
22
|
\`\`\``),e.status==="failed"&&e.exit_code!==0&&(o=o?`${o}
|
|
23
23
|
|
|
24
|
-
*Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},
|
|
24
|
+
*Exit code: ${e.exit_code}*`:`*Command failed with exit code: ${e.exit_code}*`),{title:r,message:o}},qr=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(i=>!Br.has(i)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
|
|
25
25
|
${n.trim()}
|
|
26
|
-
\`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import
|
|
26
|
+
\`\`\``)}catch(i){U.error("Could not decode outputMsg",i,t.output)}return{title:o,message:n}};import Ne from"fs/promises";import Bt from"os";import Ce from"path";import fe from"process";import Kr from"readline";var G=_("runner_gemini"),Yt="Gemini CLI",me="",Wr=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(G.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(G.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(G.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Jr={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"},Vr=async()=>{let e=Ce.join(Bt.homedir(),".gemini"),t=Ce.join(e,"settings.json");try{await Ne.mkdir(e,{recursive:!0});let r={};try{let o=await Ne.readFile(t,"utf-8");r=JSON.parse(o)}catch{G.log("Creating new Gemini CLI settings file")}r.general||(r.general={}),r.general.previewFeatures||(r.general.previewFeatures=!0),r.model||(r.model={}),r.model.compressionThreshold!==.3&&(r.model.compressionThreshold=.3),await Ne.writeFile(t,JSON.stringify(r,null,2),"utf-8"),G.log("Configured Gemini CLI settings (preview features and compression threshold)")}catch(r){G.error("Failed to ensure Gemini CLI settings",{error:r.message})}};async function tt({config:e,netlify:t,persistSteps:r=void 0,sendSteps:o=void 0,aiGateway:n,cwd:i=fe.cwd()}){let{accountType:s,prompt:u,modelVersionOverrides:l}=e,{model:p}=e;if(await Vr(),n){let{token:h,url:d}=n;if(!h||!d)throw new Error("No token or url provided from AI Gateway");if(l?.gemini){let a=l?.gemini?.[s];if(a){if(!await n.isModelAvailableForProvider("gemini",a))throw new Error(`Model override '${a}' is not available for gemini provider`);p=a}}if(!p)!!me&&await n.isModelAvailableForProvider("gemini",me)?(p=me,G.log(`Using default model: ${me}`)):me&&G.log(`Default model ${me} is not available, proceeding without model specification`);else if(p&&!l?.gemini?.[s]&&!await n.isModelAvailableForProvider("gemini",p))throw new Error(`Model '${p}' is not available for gemini provider`);fe.env.GEMINI_API_KEY=h,fe.env.GOOGLE_GEMINI_BASE_URL=d}else if(!fe.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],m=[],y={},I=0,w=0,g,T,R=[ie(i,"gemini"),...p?["--model",p]:[],"--yolo","--output-format","stream-json","-p",u],S=`${fe.env.NVM_BIN}/node`;G.log(`Running ${S} ${R.join(" ")}`);let N=t.utils.run(S,R,{all:!0,env:fe.env,cwd:i,idleTimeout:Z});N.stdin?.end();let C=se(()=>{r?.({steps:f,duration:w}),o?.({steps:c,duration:w}),c=[]},250),O=(h,d)=>{h.id=I,I+=1,m.push(h),f.push(h),c.push(h),d||C.flush(),C(),d&&C.flush()},x=Kr.createInterface({input:N.all});x.on("error",h=>{G.error("Readline interface error",{error:h.message,stack:h.stack})});let v="",M=()=>{v&&O({message:v.trim()}),v=""};return x.on("line",h=>{let d=null;try{if(h.startsWith("[API Error")){let a=h.match(/\[api error: (.+?)]$/i)?.[1];d={type:"error",value:St(a,!1)?.error?.message||a||"Gemini encountered error"}}else d=JSON.parse(h)}catch{return}if(d)switch(["message","result"].includes(d.type)||M(),d.type){case"message":{d.role!=="user"&&d.content&&(v+=d.content);break}case"tool_use":{let a=Jr[d.tool_name]??d.tool_name,E=d.parameters?.file_path,V=E&&Ce.relative(i,E),xe=d.parameters?.command,W={title:[a,V&&`\`${V}\``,xe&&`\`${xe}\``].filter(Boolean).join(" ")};y[d.tool_id]=W,C.flush();break}case"tool_result":{let a=y[d.tool_id];a&&(d.output&&(a.message=`\`\`\`
|
|
27
27
|
${d.output.trim()}
|
|
28
|
-
\`\`\``),O(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{
|
|
28
|
+
\`\`\``),O(a,!0));break}case"result":{w=d.stats?.duration_ms,d.status==="error"?T=d.error?.message:g=v.trim();break}case"error":{T=d.error;break}case"finished":break;default:{G.warn("Unhandled message type:",d.type);break}}}),await N.catch(h=>{({error:T,result:g}=Wr({catchError:h,runCmd:N,error:T,result:g,runnerName:"Gemini"}))}),x.close(),C.flush(),{steps:m,duration:w,result:await ae({initialResult:g,agentName:Yt,hasError:!!T}),error:le({error:T,agentName:Yt}),isRetryableError:ue(T)}}var Ht=async()=>{let e=Ce.join(Bt.homedir(),".gemini");await Ne.rm(e,{recursive:!0,force:!0})};var Xr={codex:{runner:et,clean:jt},claude:{runner:Qe,clean:Ut},gemini:{runner:tt,clean:Ht}},qt=Xr;var Kt=_("init_stage"),Wt=async({config:e,apiThrottle:t,apiToken:r,runnerVersion:o})=>await b(zr(),"init-stage",async n=>{let i=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.runnerVersion":o||"unknown"});let s=qt[e.runner];if(!s)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let u=Zr({apiToken:r});pt(u);let l=e.useGateway?await _t({netlify:u,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!l});let p=5*1024,f=Rt(async({steps:I=[],duration:w})=>{let g=I.map(T=>{let R=T.title?We(q(T.title),p):void 0,S=T.message?We(q(T.message)):void 0;return{...T,title:R,message:S}});I.length=0;try{return await B(e.id,e.sessionId,{steps:g,duration:w})}catch(T){Kt.error("persistSteps failed",{error:T?.message||T})}},t);Kt.info("Adding build files to stage");let c=await Xe();await Je(c);let m;e.hasRepo?e.sha?(m=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(m=await Ot(),await ye(e.id,{sha:m}),n?.setAttributes({"init.sha.source":"current_commit"})):(m=await $t(),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let y=performance.now()-i;return n?.setAttributes({"init.sha":m||"unknown","init.duration.ms":y,"init.status":"success"}),{aiGateway:l,context:u,persistSteps:f,runner:s,sha:m}}),Zr=({apiToken:e})=>({constants:{NETLIFY_API_HOST:Pe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||Pe.env.NETLIFY_API_TOKEN,SITE_ID:Pe.env.SITE_ID,FUNCTIONS_DIST:Pe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:$}});import{getTracer as rt}from"@netlify/otel";import Qr from"crypto";import J from"fs/promises";import D from"path";import j from"process";var F=_("context"),en=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:j.env.NETLIFY_TEAM_ID,userId:j.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:j.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},tn=10,rn=async e=>{let{name:t,ext:r}=D.parse(e),o=e,n=D.join(j.cwd(),k,o),i=0;for(;await nn(n);){if(i>=tn)throw new Error("Failed to generate context file");o=`${t}-${Qr.randomUUID().slice(0,5)}${r}`,n=D.join(j.cwd(),k,o),i+=1}return o},nn=async e=>{try{return await J.access(e),!0}catch{return!1}},on=async()=>{try{F.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return F.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(F.warn("Catchall consumer missing or invalid contextScopes"),null):r:(F.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?F.warn("Netlify features context request timed out"):F.warn("Failed to fetch Netlify features context:",e.message),null}},sn=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await J.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?F.warn(`Download timeout for ${e}`):F.warn(`Failed to download context file ${e}:`,r.message),!1}},Oe=null,an=async()=>{if(Oe)return Oe;let e=await on();if(!e)return[];let t=D.join(j.cwd(),k,Be);await J.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,i])=>{if(!i||typeof i!="object"||!i.endpoint||!i.scope)return F.warn(`Invalid scope data for ${n}, skipping...`),null;let s=`${n}.md`,u=D.join(t,s),l=D.join(k,Be,s);return F.log(`Downloading ${i.scope} context...`),await sn(i.endpoint,u)?(F.log(`Downloaded: ${l}`),{scope:i.scope,path:l,key:n}):null});return Oe=(await Promise.all(r)).filter(n=>n!==null),Oe},Jt=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=en(t),i=await rn(It),s=D.join(j.cwd(),k);await J.mkdir(s,{recursive:!0});let u=D.join(k,i),l=D.join(j.cwd(),u),p=D.join(j.cwd(),k,ne);try{await J.unlink(p),F.log(`Deleted old results file: ${p}`)}catch{}let f=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
|
|
29
29
|
Your task is to analyze and fix the build errors.
|
|
30
30
|
Don't apply techniques of reverting changes. Apply fixes related to errors.
|
|
31
31
|
Don't try to run build by yourself. Just fix the errors.
|
|
@@ -38,7 +38,7 @@ ${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"
|
|
|
38
38
|
|
|
39
39
|
`)}
|
|
40
40
|
</project_rules>
|
|
41
|
-
`);let m="";if(r.sessionHistoryContext?.length){let g=D.join(
|
|
41
|
+
`);let m="";if(r.sessionHistoryContext?.length){let g=D.join(j.cwd(),k,Ye);await J.mkdir(g,{recursive:!0});let T=await Promise.all(r.sessionHistoryContext.map(async(R,S)=>{let N=S+1,C=`attempt-${N}.md`,O=D.join(g,C),x=D.join(k,Ye,C),v=`# Task History - Attempt ${N}
|
|
42
42
|
|
|
43
43
|
## Request - what the user asked for
|
|
44
44
|
${R.request}
|
|
@@ -48,7 +48,7 @@ ${R.request}
|
|
|
48
48
|
## Response - what the agent replied with after its work
|
|
49
49
|
|
|
50
50
|
${R.response}
|
|
51
|
-
`;return await J.writeFile(O,v,"utf-8")
|
|
51
|
+
`;return await J.writeFile(O,v,"utf-8"),F.log(`Created history file: ${x}`),x}));m+=`
|
|
52
52
|
<session_history_context>
|
|
53
53
|
History of prior work on this task.
|
|
54
54
|
You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
|
|
@@ -57,7 +57,7 @@ ${R.response}
|
|
|
57
57
|
`)}
|
|
58
58
|
|
|
59
59
|
</session_history_context>
|
|
60
|
-
`}let y=await
|
|
60
|
+
`}let y=await an(),I="";y.length>0&&(I=`
|
|
61
61
|
<netlify_features_context>
|
|
62
62
|
If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
|
|
63
63
|
DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
|
|
@@ -80,15 +80,15 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
80
80
|
<requirements>
|
|
81
81
|
<responses>
|
|
82
82
|
- Do not speak in first person. You may speak as "the agent".
|
|
83
|
-
- When work is complete, write a changes summary in ${s}/${
|
|
84
|
-
- If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${s}/${
|
|
83
|
+
- When work is complete, write a changes summary in ${s}/${ne} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
|
|
84
|
+
- If the user's request is informational in nature (asking for output, status, information, or analysis rather than asking you to make changes), write the requested information directly to the ${s}/${ne} file.
|
|
85
85
|
- Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
|
|
86
86
|
- NEVER look into the \`.git\` folder
|
|
87
87
|
- NEVER print potentially sensitive values (like secrets) in the planning output or results
|
|
88
88
|
</responses>
|
|
89
89
|
<attachements>
|
|
90
|
-
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${
|
|
91
|
-
- move assets from ${s}/${
|
|
90
|
+
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${s}/${He} folder
|
|
91
|
+
- move assets from ${s}/${He} folder to the project assets folder if they are referenced in a code or applied changes
|
|
92
92
|
</attachements>
|
|
93
93
|
${c}
|
|
94
94
|
</requirements>
|
|
@@ -102,7 +102,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
102
102
|
- Netlify Functions directory: ${n.functionsDir}
|
|
103
103
|
</metadata>
|
|
104
104
|
<environment>
|
|
105
|
-
- Node Version: ${
|
|
105
|
+
- Node Version: ${j.version||"unknown"}
|
|
106
106
|
- Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
|
|
107
107
|
- 'netlify-cli' npm package is already available as a global package. Don't try to install it again
|
|
108
108
|
- If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
|
|
@@ -115,7 +115,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
115
115
|
</extra_context>
|
|
116
116
|
|
|
117
117
|
${m}
|
|
118
|
-
`;return await J.writeFile(l,w,"utf-8")
|
|
118
|
+
`;return await J.writeFile(l,w,"utf-8"),F.log(`Generated agent context document at: ${l}`),w.length>5e5&&(w=`
|
|
119
119
|
You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
|
|
120
120
|
|
|
121
121
|
<request>
|
|
@@ -126,17 +126,17 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
126
126
|
</request>
|
|
127
127
|
|
|
128
128
|
Use the following file for the complete context of the ask, the environment, and what's available. ${l} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
|
|
129
|
-
`),w};var
|
|
129
|
+
`),w};var ln=_("prompt"),Vt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await Jt({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&ln.log("Contextful Prompt:",n),{prompt:n}};var $e=_("inference_stage"),Xt=5,Fe=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:i,persistSteps:s,aiGateway:u,attempt:l,contextPrefix:p,priorAgentSessionId:f}=e;$e.log(`Running inference stage, attempt ${l} of ${Xt}`);let c=await b(rt(),"inference-stage",async m=>{m?.setAttributes({"inference.attempt":l||1}),Et();let{prompt:y}=await b(rt(),"compose-prompt",async()=>await Vt({cliPath:t,config:r,buildErrorContext:un(n),netlify:o})),I=`
|
|
130
130
|
${p||""}
|
|
131
131
|
${y}
|
|
132
|
-
`.trim(),w={...r,prompt:I},g=await b(
|
|
132
|
+
`.trim(),w={...r,prompt:I},g=await b(rt(),`run-${r.runner}`,async()=>await i({aiGateway:u,config:w,netlify:o,persistSteps:s,continueSession:!!(l&&l>1),priorAgentSessionId:f}));return g.result&&(g.result=q(g.result)),g.error&&(g.error=q(g.error)),await s.flush(),g});if(c.error){if($e.error("Runner failed",{stepsCount:c.steps.length,duration:c.duration,error:c.error,isRetryableError:c.isRetryableError,attempt:l||1,agentSessionId:c.agentSessionId}),c.isRetryableError&&(!l||l<Xt))return $e.log("Retrying inference stage"),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await Fe({...e,attempt:(l||1)+1,priorAgentSessionId:c.agentSessionId,contextPrefix:c.agentSessionId?"":"<important> The agent has already started on this work but ran into networking errors trying to complete it. Please continue from where it left off (you can use git commands to see what's currently changed thus far) and do not start over. Here is the full prompt for context: </important>"})).runnerResult};throw $e.log("Do not retry inference stage"),new Error(c.error)}return{runnerResult:c}},un=e=>!e||e.length===0?"":`
|
|
133
133
|
Deploy failed failed. Here are the errors to review on the latest build:
|
|
134
134
|
|
|
135
135
|
Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
|
|
136
136
|
|
|
137
137
|
${e.pop()}
|
|
138
|
-
`;import
|
|
139
|
-
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw
|
|
138
|
+
`;import pn from"process";import{getTracer as nt}from"@netlify/otel";import{getTracer as cn}from"@netlify/otel";var we=_("deploy"),zt=async e=>await b(cn(),"create-preview-deploy",async t=>dn(e,t)),dn=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:i,filter:s},u)=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(we.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),s&&l.push("--filter",s),r?(we.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let p=i||"netlify";we.log(`Running: ${p} ${l.join(" ")}`),u?.setAttributes({cmd:p,args:l});let f=await e.utils.run(p,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(String(f.stdout??"").trim());u?.setAttributes({success:!0,deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id}),we.log(`
|
|
139
|
+
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let m={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(m.sourceZipFilename=c.source_zip_filename),m}catch(l){throw we.error("Failed to create preview deploy via CLI:",l),u?.setAttributes({success:!1,error:l.message}),l}};var Te=_("deploy_stage"),ot=async e=>await b(nt(),"run-deploy-stage",async()=>fn(e)),fn=async({cliPath:e,config:t,context:r,result:o,filter:n,isRetry:i})=>{let s=await b(nt(),"get-runner-diffs",async()=>await Ct({config:t,isRetry:i}));if(Te.info("Resolved git",{hasChanges:s.hasChanges,ignored:s.ignored??[]}),!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:u,resultDiff:l,diffBinary:p,resultDiffBinary:f}=s,c=!0;Te.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let m=null;if(o!==void 0&&c)try{let y;try{let I=await b(nt(),"get-runner-session",async()=>await mt(t.id,t.sessionId));I?.title&&(y=I.title)}catch(I){Te.warn("Failed to fetch session title, using fallback message:",I.message)}await B(t.id,t.sessionId,{steps:[{title:"Deploying the run preview"}]}),m=await zt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:y,skipBuild:!1,deploySubdomain:bt(t.id,pn.env.SITE_NAME),filter:n})}catch(y){return Te.warn("Failed to create preview deploy (continuing with agent run):",y),{diff:u,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:p,resultDiffBinary:f,deployError:y instanceof Error?y.message:String(y)}}return Te.log("Git status",{hasDiff:!!u,hasChanges:c}),{diff:u,resultDiff:l,hasChanges:c,previewInfo:m,diffBinary:p,resultDiffBinary:f}};import{getTracer as ke}from"@netlify/otel";async function Zt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,i;for(let s=1;s<=r;s++)try{return await e()}catch(u){if(i=u,s===r)throw i;n&&n(s,i),await new Promise(l=>setTimeout(l,o*s))}throw i}var De=class{scanDiffForForms(t){let r=[],o=null,n=[],i=t.split(`
|
|
140
140
|
`);for(let s of i)if(s.startsWith("diff --git")){if(o&&n.length>0){let l=this.containsNetlifyForm(n,o);l&&r.push(l)}let u=s.split(" ");o=u[u.length-1].replace(/^b\//,""),n=[]}else s.startsWith("+")&&!s.startsWith("+++")&&n.push(s.slice(1));if(o&&n.length>0){let s=this.containsNetlifyForm(n,o);s&&r.push(s)}return{detected:r.length>0,matches:r}}containsNetlifyForm(t,r){let o=t.join(`
|
|
141
|
-
`),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var A=_("cleanup_stage"),
|
|
141
|
+
`),n=[{pattern:/<[\w-]*form[\s\S]*?(data-)?netlify/i,name:"standard form element"},{pattern:/<[A-Z][\w]*[\s\S]*?component\s*=\s*["']form["'][\s\S]*?(data-)?netlify/i,name:"component with form prop"}];for(let{pattern:i,name:s}of n){let u=o.match(i);if(u){let l=u.index||0,p=Math.max(0,l-20),f=Math.min(o.length,l+u[0].length+20),c=o.slice(p,f).trim();return c=c.replace(/\s+/g," "),c.length>100&&(c=c.slice(0,97)+"..."),{file:r,snippet:`[${s}] ${c}`}}}return null}};var A=_("cleanup_stage"),Qt=async e=>await b(ke(),"cleanup-stage",async()=>mn(e)),it=1024*1024*10,mn=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:i,resultDiffBinary:s,previewInfo:u})=>{let l={result:r||"Done",duration:o};u&&u.deployId&&(l.deploy_id=u.deployId),u&&u.sourceZipFilename&&(l.result_zip_file_name=u.sourceZipFilename);let p=t||i||n||s;if(p&&(l.diff_produced=!0),p){let f=new De,c=t||i||"",m=f.scanDiffForForms(c);m.detected?(A.log("Detected Netlify form(s) in diff:"),m.matches.forEach(({file:y,snippet:I})=>{A.log(` - ${y}: ${I}`)}),l.has_netlify_form=!0):A.log("Did not detect Netlify form(s) in diff"),A.log("Did not detect Netlify form(s) in diff")}if(p)try{A.log("Getting pre-signed URLs for diff upload");let f=await ht(e.id,e.sessionId),c=[];(t||i)&&c.push(Ge(f.result.upload_url,i||t).then(()=>{l.result_diff_s3_key=f.result.s3_key,A.log("Successfully uploaded result_diff to S3")})),(n||s)&&c.push(Ge(f.cumulative.upload_url,s||n).then(()=>{l.cumulative_diff_s3_key=f.cumulative.s3_key,A.log("Successfully uploaded cumulative_diff to S3")})),A.log(`Uploading ${c.length} diff(s) to S3 in parallel`),await Promise.all(c),(n||s)&&(A.log("Updating agent runner with cumulative diff S3 key"),await b(ke(),"update-runner",async()=>{await ye(e.id,{result_diff_s3_key:f.cumulative.s3_key})}))}catch(f){A.error("S3 upload failed, falling back to inline diffs:",f);let c=Buffer.byteLength(t||i||""),m=Buffer.byteLength(s||n||"");if(c>it||m>it){let y=`Diffs exceed maximum inline size of ${it} bytes.`;throw A.error(y),new Error(y)}l.result_diff=t,l.result_diff_binary=i,(n||s)&&(l.cumulative_diff=n,l.cumulative_diff_binary=s,A.log("Updating agent runner with inline diffs (fallback)"),await b(ke(),"update-runner",async()=>{await ye(e.id,{result_diff:n,result_diff_binary:s})}))}else A.log("No diffs to upload");return A.log("Updated agent runner with result"),await Zt(async()=>await b(ke(),"update-runner-session",()=>B(e.id,e.sessionId,l)),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{A.error(`Error updating agent runner session (attempt ${f}):`,c),A.log("Retrying...")}}),A.log("Finished updating agent runner with result"),{sessionUpdate:l}};import{getTracer as er,shutdownTracers as hn,withActiveSpan as tr}from"@netlify/otel";var yn=gn(import.meta.url),rr=yn("../package.json"),Ie=_("pipeline_index"),Le=3,us=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,filter:n,tracing:i={}})=>{let s,{withStageTimer:u}=wt(z.timeUnits.hours(4)),l=await ut(rr.version,e.id,i);try{await tr(er(),"run-pipeline",{},l,async()=>{let{aiGateway:p,context:f,persistSteps:c,runner:m,sha:y}=await u("init",()=>Wt({config:e,apiToken:t,cliPath:r,cwd:o,filter:n,runnerVersion:rr.version}),z.timeUnits.minutes(10));s=m.clean,e.sha=y;let{runnerResult:I}=await u("inference",()=>Fe({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p}));await B(e.id,e.sessionId,{steps:[{title:"Building and deploying the preview"}]});let w=await u("deploy",()=>ot({cliPath:r,config:e,context:f,result:I.result,filter:n,isRetry:!1})),g=I,T=[];if(w.hasChanges&&w.deployError){T.push(ct(w.deployError));let x=1,v=!1;for(;x<=Le&&!w.previewInfo&&!v;)Ie.log(`Deploy attempt had errors. Retrying. ${x}/${Le}`),await tr(er(),"deploy-stage",async M=>{M?.setAttributes({"stage.attempt":x});let h;try{h=(await u(`inference-retry-${x}`,()=>Fe({cliPath:r,config:e,context:f,runner:m.runner,persistSteps:c,aiGateway:p,buildErrors:T,priorAgentSessionId:I.agentSessionId}))).runnerResult}catch(d){Ie.warn(`Inference retry ${x} failed, stopping deploy retries:`,d),v=!0;return}g={...h,steps:[...g.steps||[],...h.steps||[]],duration:(g.duration||0)+(h.duration||0)},w=await u(`deploy-retry-${x}`,()=>ot({cliPath:r,config:e,context:f,result:h.result,filter:n,isRetry:!0})),w.deployError&&T.push(w.deployError),x++});x>Le&&!w.previewInfo&&console.warn(`Deploy validation failed after ${Le} attempts`)}let{diff:R,resultDiff:S,previewInfo:N,diffBinary:C,resultDiffBinary:O}=w;await u("cleanup",()=>Qt({config:e,diff:R,result:g.result,duration:g.duration,resultDiff:S,diffBinary:C,resultDiffBinary:O,previewInfo:N}),z.timeUnits.minutes(10)),process.env.NETLIFY_LOCAL_MODE||(await s?.(),await Ft())})}catch(p){if(dt(p)){Ie.info("Agent run terminated gracefully",{statusCode:p.statusCode,reason:p.message}),await s?.();try{await B(e.id,e.sessionId,{result:p.userMessage,state:"error"})}catch{Ie.info("Could not update session (site may have been deleted)")}return}Ie.error("Got error while running pipeline",p),await s?.();let f=p instanceof Error&&p.message;throw await B(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),p}finally{await hn()}};export{us as runPipeline};
|
|
142
142
|
//# sourceMappingURL=index.js.map
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@netlify/agent-runner-cli",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "1.
|
|
4
|
+
"version": "1.60.0",
|
|
5
5
|
"description": "CLI tool for running Netlify agents",
|
|
6
6
|
"main": "./dist/index.js",
|
|
7
7
|
"types": "./dist/index.d.ts",
|
|
@@ -79,7 +79,7 @@
|
|
|
79
79
|
"@anthropic-ai/claude-code": "2.0.76",
|
|
80
80
|
"@google/gemini-cli": "0.23.0",
|
|
81
81
|
"@netlify/otel": "^5.1.1",
|
|
82
|
-
"@openai/codex": "0.
|
|
82
|
+
"@openai/codex": "0.79.0",
|
|
83
83
|
"@opentelemetry/exporter-trace-otlp-grpc": "^0.208.0",
|
|
84
84
|
"execa": "^9.6.1",
|
|
85
85
|
"minimist": "^1.2.8"
|