@netlify/agent-runner-cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2022 Netlify <team@netlify.com>
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,6 @@
1
+ # Agent Runner Build Plugin
2
+
3
+ The build plugin which runs in agent the provided prompt. Used on Preview Server.
4
+
5
+
6
+
package/dist/bin.d.ts ADDED
@@ -0,0 +1 @@
1
+ #!/usr/bin/env node
package/dist/bin.js ADDED
@@ -0,0 +1,116 @@
1
+ #!/usr/bin/env node
2
+ import Ve from"process";import Ft from"minimist";import{createRequire as Ut}from"module";var ce=e=>!!e.validateAgentWithBuild;import Xe from"crypto";import oe from"fs/promises";import H from"path";import R from"process";var X="netlify-agent-runner-context.md",V=".netlify",O="other",L="starter";var b="business",U="enterprise",$="free";var qe=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:R.env.NETLIFY_TEAM_ID,userId:R.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:R.env.SITE_NAME,localUrl:t.URL||R.env.URL||R.env.NETLIFY_LOCAL_DEV_URL,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Ke=()=>{let e=Object.keys(R.env).sort();return{nodeVersion:R.version,envVars:e}},Je=10,We=async e=>{let{name:t,ext:r}=H.parse(e),o=e,n=H.join(R.cwd(),V,o),s=0;for(;await ze(n);){if(s>=Je)throw new Error("Failed to generate context file");o=`${t}-${Xe.randomUUID().slice(0,5)}${r}`,n=H.join(R.cwd(),V,o),s+=1}return o},ze=async e=>{try{return await oe.access(e),!0}catch{return!1}},de=async({netlify:e,config:t})=>{let r=qe(e),o=Ke(),n=await We(X),s=H.join(R.cwd(),V);await oe.mkdir(s,{recursive:!0});let a=H.join(V,n),i=H.join(R.cwd(),a),l=`# Agent Context
3
+
4
+ In Netlify documentation and interfaces, the terms "site" and "project" refer to the same thing.
5
+
6
+ ## Netlify Site
7
+
8
+ - Site ID: ${r.siteId}
9
+ - Account ID: ${r.accountId}
10
+ - User ID: ${r.userId}
11
+ - Site Slug: ${r.siteSlug}
12
+ - Local origin when running in development: ${r.localUrl}
13
+ - Netlify Functions directory: ${r.functionsDir}
14
+
15
+ ## Environment
16
+
17
+ - Node Version: ${o.nodeVersion}
18
+
19
+ ### Available Environment Variables
20
+ ${o.envVars.map(c=>`\`${c}\``).join(", ")}
21
+
22
+ ## Attachments
23
+
24
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in .netlify/assets folder
25
+ - move assets from .netlify/assets folder to the project assets folder if they are referenced in a code or applied changes
26
+
27
+ ## Netlify CLI
28
+ 'netlify-cli' npm package is already available as a global package.
29
+ Don't try to install it, in case you want to use it, use the global package.
30
+
31
+ ## Documentation
32
+
33
+ - Netlify Docs: https://docs.netlify.com
34
+ - LLM Resources Index: https://docs.netlify.com/llms.txt
35
+ `;return t.siteContext&&t.siteContext.length!==0&&(l+=`
36
+ # Project Guidelines
37
+
38
+ ${t.siteContext.filter(c=>c.site_context).map(c=>typeof c.site_context=="string"?c.site_context:typeof c.site_context=="object"?JSON.stringify(c.site_context):"").join(`
39
+
40
+ `)}
41
+ `),t.sessionHistoryContext&&t.sessionHistoryContext.length!==0&&(l+=`# History of prior calls
42
+
43
+ Please continue the discussion based on the previous discussion, provided below as xml:
44
+ - <history> contains the full conversation so far.
45
+ - <turn> groups a request and its response.
46
+ - <request> is the user request.
47
+ - <response> is the agent result.
48
+
49
+ Use the <history> only as context. Do NOT wrap your answer in XML tags. Just return the plain response.
50
+
51
+ <history>
52
+ ${t.sessionHistoryContext.map((c,p)=>`<turn attempt="${p+1}">
53
+ <request>${c.request}</request>
54
+ <response>${c.response}</response>
55
+ </turn>`).join(`
56
+ `)}
57
+ </history>
58
+ `),await oe.writeFile(i,l,"utf-8"),console.log(`Generated agent context document at: ${i}`),a},pe=e=>`The build is currently failing after your previous changes.
59
+ Your task is to analyze and fix the build errors.
60
+ Don't apply techniques of reverting changes. Apply fixes related to errors.
61
+ Don't try to run build by yourself. Just fix the errors.
62
+
63
+ ${e}`;var fe=(e={})=>`
64
+ Check for errors and validate the fix
65
+
66
+ ${e.errorLogsPath?`Error Check Process:
67
+ 1. Read recent errors: \`tail -n 50 ${e.errorLogsPath}\` (single check, no monitoring loop)
68
+ 2. If errors are found, fix them by modifying the codebase
69
+ 3. After fixes, do ONE final check: \`tail -n 20 ${e.errorLogsPath}\`
70
+ 4. Focus only on errors related to your changes
71
+ 5. Ignore unrelated operational messages (git, API calls, plugin startup)
72
+ 6. NEVER modify ${e.errorLogsPath} - it's read-only
73
+ 7. Do NOT use continuous monitoring or repeated tail commands
74
+ `:""}
75
+
76
+ Fix Strategy:
77
+ - Check error logs ONCE, fix issues, then do ONE final verification
78
+ - Do NOT revert changes - improve the codebase to make it work
79
+ - Do NOT continuously monitor logs or use tail in loops
80
+ - Dev server is running in background, no need to run build or dev server yourself
81
+ - Complete the validation quickly and decisively
82
+
83
+ At the very end, print exactly one line with the verdict:
84
+ VALIDATION_RESULT: PASS {"checks": ["error_logs"]}
85
+ or
86
+ VALIDATION_RESULT: FAIL {"checks": ["error_logs"], "errors": ["<errors>"]}
87
+ `,Ze=e=>!(!e||typeof e!="object"||!Array.isArray(e.checks)||e.errors&&!Array.isArray(e.errors)),me=e=>{if(!e||typeof e!="string")return null;let t=e.match(/VALIDATION_RESULT:\s+(PASS|FAIL)\s+({[\s\S]*?})(?:\s|$)/);if(!t)return null;let[,r,o]=t;try{let n=JSON.parse(o);return Ze(n)?!n.checks||n.checks.length===0?(console.warn("Validation result missing checks array"),null):r==="FAIL"&&(!n.errors||n.errors.length===0)?(console.warn("FAIL validation result missing errors array"),null):{ok:r==="PASS",verdict:r,details:{checks:n.checks,errors:n.errors||[],...n}}:(console.warn("Validation result has invalid schema:",n),null)}catch(n){return console.warn("Failed to parse validation result JSON:",n.message),null}},ge=e=>e&&e.replace(/^.*VALIDATION_RESULT:\s+(PASS|FAIL)\s+{[\s\S]*?}.*$/gm,"");var Qe=(e=X)=>`Use ${e} to understand the Netlify project context and resources. It also contains history of previous conversations. Make sure to read it first. Never reveal, cite, or paraphrase private context.`,et=async({config:e,netlify:t})=>{let r=await de({netlify:t,config:e});return{context:Qe(r)}},tt=({config:e})=>{let t=[];return e?.validateAgent&&t.push(fe(e)),t},q=async({config:e,netlify:t,buildErrorContext:r}={})=>{let{context:o}=await et({config:e,netlify:t}),n;n=[...tt({config:e})],r&&(n=[...n,pe(r)]);let s=[];return o&&s.push(o),e.prompt&&s.push("New user request comes in the <new_request> tag.",`<new_request>${e.prompt}</new_request>`),n?.length&&s.push(n.join(`
88
+
89
+ `)),{prompt:s.join(`
90
+
91
+ `)}};import te from"process";import vt from"get-port";import he from"process";var ne=he.env.NETLIFY_API_URL,re=he.env.NETLIFY_API_TOKEN,K=async(e,t={})=>{if(!ne||!re)throw new Error("No API URL or token");let r=new URL(e,ne),o={...t,headers:{...t.headers,Authorization:`Bearer ${re}`}};t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(s||console.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type").includes("application/json")?n.json():n.text());if(!s)throw a;return a},_e=e=>{console.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ne=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(re=e.constants.NETLIFY_API_TOKEN)},J=(e,t)=>K(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),W=(e,t,r)=>K(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var Te=(e,t)=>K(`/api/v1/agent_runners/${e}/sessions/${t}`),Ee=e=>K(`/api/v1/sites/${e}/ai-gateway/token`);var xe=async({netlify:e})=>{let t,r,o,n,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let a=async()=>{clearTimeout(o),console.log("Requesting AI gateway information");let i=await Ee(s);if({token:t,url:n}=i,r=i.expires_at?i.expires_at*1e3:void 0,console.log("Got AI gateway information",{token:!!t,expiresAt:r,url:n}),r){let l=r-Date.now()-6e4;l>0&&(o=setTimeout(()=>{a()},l))}};return await a(),{get url(){return n},get token(){return t}}};import S from"process";var ot=e=>new Promise(t=>{setTimeout(t,e)}),ye=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...i)=>{if(r)return o=i,new Promise(p=>{n.push(p)});r=!0;let l,c=new Promise(p=>{l=p});return s=(async()=>{await Promise.resolve();let p=await e(...i);for(l(p);;){if(await ot(t),!o)return r=!1,s=null,p;let E=o,x=n;o=null,n=[],p=await e(...E),x.forEach(h=>{h(p)})}})(),c};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},z=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...i){n=i,s=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),l&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let i=n,l=s;o=null,n=null,s=null,e.apply(l,i)}},a},Z=(e,t=!0)=>{if(e)try{return JSON.parse(e)}catch(r){t&&console.error("Could not parse JSON",r)}},Ie=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let i=`--${t}${n}`;if(i.length>55)return"";let l=60-i.length;if(l<=0)return"";if(l>=s.length+6){let c=Math.min(l-s.length,e.length);return`${s}${e.slice(0,c)}`}return e.slice(0,l)};var nt="codex",rt=e=>(e??[]).filter(t=>t.request&&t.response),st=e=>(e??[]).filter(t=>t.site_context),Ne=()=>{let e=S.env.NETLIFY_AGENT_RUNNER_ID,t=S.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=S.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=S.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=S.env.NETLIFY_AGENT_RUNNER_AGENT||nt,s=S.env.NETLIFY_AGENT_RUNNER_MODEL,a=S.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",i=S.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",l=S.env.ERROR_LOGS_PATH,c=Z(S.env.NETLIFY_AGENT_RUNNER_CONTEXT),p=rt(c),E=st(c),x=S.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",h=!S.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,_=S.env.NETLIFY_AGENT_RUNNER_SHA,g=it();return{id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:s,validateAgent:a,errorLogsPath:l,sessionHistoryContext:p,siteContext:E,hasRepo:x,useGateway:h,sha:_,accountType:g,validateAgentWithBuild:i}},it=()=>{let e=S.env.NETLIFY_TEAM_TYPE;return e?e.includes("starter")?L:e.includes("pro")?"pro":e.startsWith("business")?b:e.startsWith("enterprise")?U:e.startsWith("free")?$:O:O};import se from"process";import{execa as at,execaCommand as lt}from"execa";var Ae={preferLocal:!0},Se=(e,t,r)=>{let[o,n]=ut(t,r),s={...Ae,...n},a=at(e,o,s);return Re(a,s),a},we=(e,t)=>{let r={...Ae,...t},o=lt(e,r);return Re(o,r),o},ut=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Re=(e,t)=>{t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0||(e.stdout?.pipe(se.stdout),e.stderr?.pipe(se.stderr))},Ce=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(se.kill(e.pid,t),console.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return console.error("Error killing process:",r),!1}};var ve=async({config:e,netlify:t})=>{let{hasChanges:r,status:o}=await dt(t);if(!r)return{hasChanges:!1};let n=await pt(t,o);await t.utils.run("git",["add",".",...n]);let a=(await t.utils.run("git",["diff","--staged"])).stdout;if(r=!!a,!r)return{hasChanges:r,diff:a};let i;return e.sha&&(await t.utils.run("git",["commit","-m","Agent runner"]),i=(await t.utils.run("git",["diff",e.sha,"HEAD"])).stdout),{hasChanges:!0,diff:a,resultDiff:i}},ct=["?? mise.toml",/\?\? .+?\.log/],dt=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
92
+ `).filter(n=>!ct.some(s=>s instanceof RegExp?s.test(n):n===s))).length!==0,status:t.stdout}};var Pe=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},ke=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},pt=async(e,t="")=>{let r=[".netlify","mise.toml"],o=[],n=r.map(async a=>{try{return await e.utils.run("git",["check-ignore","-v",a]),null}catch{return`:!${a}`}});return(await Promise.all(n)).forEach(a=>{a&&o.push(a)}),t.split(`
93
+ `).forEach(a=>{let i=a.match(/\?\? (.+?)\.log$/)?.[1];i&&o.push(`:!${i}.log`)}),o};import ft from"fs/promises";import mt from"os";import Q from"path";import D from"process";var gt=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ie({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:m,url:f}=n;if(!m||!f)throw new Error("No token or url provided from AI Gateway");let d=ht[s];if(!d)throw new Error(`Claude is not supported for the account type ${s}`);if(i){if(!d?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=d.default;D.env.ANTHROPIC_API_KEY=m,D.env.ANTHROPIC_BASE_URL=f}else if(!D.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let l=[],c=[],p=[],E={},x=0,h=0,_,g,B=Q.join(D.cwd(),"node_modules"),C=[Q.join(D.env.NODE_PATH||B,".bin/claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...i?["--model",i]:[],"-p",a],v=`${D.env.NVM_BIN}/node`;console.log(`Running ${v} ${C.join(" ")}`);let w=t.utils.run(v,C,{all:!0,env:D.env});w.stdin?.end();let T=z(()=>{r?.({steps:l,duration:h}),o?.({steps:c,duration:h}),c=[]},250),N=(m,f)=>{m.id=x,x+=1,p.push(m),l.push(m),c.push(m),f||T.flush(),T(),f&&T.flush()},A="";return w.all.on("data",m=>{if(A+=m.toString(),!m.includes(`
94
+ `))return;let f=A.split(`
95
+ `).filter(Boolean).map(d=>{try{return JSON.parse(d)}catch{console.log("Could not parse line",d)}return null}).filter(Boolean);A="",f.forEach(d=>{Array.isArray(d?.message?.content)?d.message.content.forEach(u=>{switch(u.type){case"text":{u.text&&N({message:u.text});break}case"image":{typeof u.source=="object"&&u.source.type==="base64"&&u.source.media_type?N({message:`![](data:${u.source.media_type};base64,${u.source.data})`}):console.log(`Unsupported image type ${u.source?.type}`,u.source);break}case"tool_use":{if(u.name==="Task"){let y=u.input?.description&&`\`${u.input.description}\``;N({title:[u.name,y].filter(Boolean).join(" ")})}else E[u.id]=u;T.flush();break}case"tool_result":{let y=E[u.tool_use_id],P;if(y){let M=y.input?.file_path&&Q.relative(D.cwd(),y.input.file_path),I=M&&`\`${M}\``;P=[y.name,I].filter(Boolean).join(" ")}let Y=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(y?.name),k;if(typeof u.content=="string")k=u.content;else if(Array.isArray(u.content)){let M=[];u.content.forEach(I=>{I?.type==="text"&&typeof I.text=="string"?M.push(I.text):I?.type==="image"&&typeof I.source=="object"?I.source.type==="base64"&&I.source.media_type?M.push(`![](data:${I.source.media_type};base64,${I.source.data})`):console.log(`Unsupported image type ${I.source.type}`,I.source):console.log(`Unsupported block type ${I?.type}`)}),k=M.join(`
96
+
97
+ `)}Y&&k&&(k=`\`\`\`
98
+ ${k.trim()}
99
+ \`\`\``),N({title:P,message:k},!0);break}case"thinking":{u.thinking&&N({title:"Thinking",message:u.thinking},!0);break}default:console.log(`Message content type is not supported ${u.type}`,u)}}):d?.type==="result"&&(h=d.duration_ms,d.is_error?g=d.result:_=d.result,[p,l,c].forEach(u=>{u[u.length-1]?.message===_&&u.pop()}))})}),await w.catch(m=>{({error:g,result:_}=gt({catchError:m,runCmd:w,error:g,result:_,runnerName:"Claude"}))}),T.flush(),{steps:p,duration:h,result:_,error:g}}var Oe=async()=>{let e=Q.join(mt.homedir(),".claude");await ft.rm(e,{recursive:!0,force:!0})},ht={[b]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3}}},[U]:{default:"claude-sonnet-4-20250514",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3},"claude-3-haiku-20240307":{maxTokens:4096},"claude-opus-4-20250514":{maxTokens:32e3},"claude-sonnet-4-20250514":{maxTokens:64e3}}},pro:{default:"claude-3-5-haiku-20241022",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-haiku-20240307":{maxTokens:4096}}},[$]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[L]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[O]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}}};import _t from"fs/promises";import Tt from"os";import ae from"path";import G from"process";var Et=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function le({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:T,url:N}=n;if(!T||!N)throw new Error("No token or url provided from AI Gateway");let A=xt[s];if(!A)throw new Error(`Codex is not supported for the account type ${s}`);if(i){if(!A?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=A.default;G.env.OPENAI_API_KEY=T,G.env.OPENAI_BASE_URL=N}else if(!G.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let l=[],c=[],p={},E=0,x=0,h,_,g=ae.join(G.cwd(),"node_modules"),B=[ae.join(G.env.NODE_PATH||g,".bin/codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),C=`${G.env.NVM_BIN}/node`;console.log(`Running ${C} ${B.join(" ")}`);let v=t.utils.run(C,B,{all:!0,env:{...G.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),w="";return v.all.on("data",T=>{if(w+=T.toString(),!T.includes(`
100
+ `))return;let N=w.toString().split(`
101
+ `).filter(Boolean).map(f=>{try{return JSON.parse(f)}catch{console.log("Could not parse line",f)}return null}).filter(Boolean);w="";let A=[],m=!1;N.forEach(f=>{if(f?.duration_ms&&(x=f.duration_ms,m=!0),f?.type==="local_shell_call")p[f.call_id]=f;else if(f?.type==="local_shell_call_output"){let d=It(p[f.call_id],f);d.id=E,E+=1,d&&(c.push(d),l.push(d),A.push(d),m=!0)}else f?.type==="message"&&f.role==="assistant"?h=f.content.map(d=>d.text).join(`
102
+ `):f?.type==="message"&&f.role==="system"&&(_=f.content.map(d=>d.text).join(`
103
+ `))}),m&&(r?.({steps:l,duration:x}),o?.({steps:A,duration:x}))}),await v.catch(T=>{({error:_,result:h}=Et({catchError:T,runCmd:v,error:_,result:h,runnerName:"Codex"}))}),{steps:c,duration:x,result:h,error:_}}var Be=async()=>{let e=ae.join(Tt.homedir(),".codex");await _t.rm(e,{recursive:!0,force:!0})},xt={[b]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:1e5}}},[U]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},o1:{maxTokens:1e5},"o1-mini":{maxTokens:65536},"o3-mini":{maxTokens:1e5},"gpt-image-1":{},"dall-e-2":{},"dall-e-3":{}}},pro:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768}}},[$]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[L]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[O]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}}},yt=new Set(["bash","-lc"]),It=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!yt.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
104
+ ${n.trim()}
105
+ \`\`\``)}catch(s){console.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Nt from"fs/promises";import At from"os";import ee from"path";import F from"process";var St=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),wt={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function ue({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:m,url:f}=n;if(!m||!f)throw new Error("No token or url provided from AI Gateway");let d=Rt[s];if(!d)throw new Error(`Gemini is not supported for the account type ${s}`);if(i){if(!d?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=d.default;F.env.GEMINI_API_KEY=m,F.env.GOOGLE_GEMINI_BASE_URL=f}else if(!F.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let l=[],c=[],p=[],E={},x=0,h=0,_,g,B=ee.join(F.cwd(),"node_modules"),C=[ee.join(F.env.NODE_PATH||B,".bin/gemini"),...i?["--model",i]:[],"--yolo","-p",a],v=`${F.env.NVM_BIN}/node`;console.log(`Running ${v} ${C.join(" ")}`);let w=t.utils.run(v,C,{all:!0,env:F.env});w.stdin?.end();let T=z(()=>{r?.({steps:l,duration:h}),o?.({steps:c,duration:h}),c=[]},250),N=(m,f)=>{m.id=x,x+=1,p.push(m),l.push(m),c.push(m),f||T.flush(),T(),f&&T.flush()},A="";return w.all.on("data",m=>{if(A+=m.toString(),!m.includes(`
106
+ `))return;let f=A.toString().split(`
107
+ `).filter(Boolean).map(d=>{try{if(d.startsWith("[API Error")){let u=d.match(/\[api error: (.+?)]$/i)?.[1];return{type:"error",value:Z(u,!1)?.error?.message||u||"Gemini encountered error"}}return JSON.parse(d)}catch{}return null}).filter(Boolean);A="",f.forEach(d=>{switch(d.type){case"thought":{let u=d.value;N({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{d.value&&N({message:d.value});break}case"tool_call_request":{let u=d.value,y=wt[u.name]??u.name,P=u.args?.path||u.args?.absolute_path,Y=P&&ee.relative(F.cwd(),P),k=u.args?.command,I={title:[y,Y&&`\`${Y}\``,k&&`\`${k}\``].filter(Boolean).join(" ")};E[u.callId]=I,T.flush();break}case"tool_result":{let u=d.value,y=E[u.callId];if(y){let P=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(Y=>typeof Y=="string"&&Y);P&&(y.message=`\`\`\`
108
+ ${P.trim()}
109
+ \`\`\``),N(y,!0)}break}case"result":{h=d.duration_ms,_=d.value,[p,l,c].forEach(u=>{u[u.length-1]?.message===_&&u.pop()});break}case"error":{g=d.value;break}case"finished":break;default:{console.warn("Unhandled message type:",d.type);break}}})}),await w.catch(m=>{({error:g,result:_}=St({catchError:m,runCmd:w,error:g,result:_,runnerName:"Gemini"}))}),T.flush(),{steps:p,duration:h,result:_,error:g}}var Le=async()=>{let e=ee.join(At.homedir(),".gemini");await Nt.rm(e,{recursive:!0,force:!0})},Rt={[b]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536}}},[U]:{default:"gemini-2.5-pro",models:{"gemini-1.5-flash":{maxTokens:8192},"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-1.5-pro":{maxTokens:8192},"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536},"gemini-2.5-flash-lite":{maxTokens:65536},"gemini-2.5-pro":{maxTokens:65536},"imagen-4.0-generate-001":{},"veo-3.0-generate-preview":{}}},pro:{default:"gemini-2.0-flash-lite",models:{"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192}}},[$]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[L]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[O]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}}};var Ct={codex:{runner:le,clean:Be},claude:{runner:ie,clean:Oe},gemini:{runner:ue,clean:Le}},be=Ct;var Pt=8888,Ue=async({apiThrottle:e,apiToken:t,cwd:r}={})=>{let o=Ne(),n=be[o.runner];if(!n)throw new Error(`${o.runner} is not supported`);let s=await Ot({apiToken:t,cwd:r}),a=kt({apiToken:t,devPort:s.port});_e(a),console.log(`Starting agent runner ${o.runner}`,{id:o.id,sessionId:o.sessionId,prompt:o.prompt,hasRepo:o.hasRepo});let i=o.useGateway?await xe({netlify:a}):void 0;o.validateAgent&&o.errorLogsPath&&console.log("Validation will use error logs file",{path:o.errorLogsPath});let l=ye(({steps:p=[],duration:E})=>{let x=[...p];return p.length=0,W(o.id,o.sessionId,{steps:x,duration:E})},e),c=o.sha;if(o.hasRepo){if(!o.sha){console.log("No sha in runner, marking the latest as the one");let p=await Pe(a);await J(o.id,{sha:p}),c=p}}else console.log("Zip project"),c=await ke(a);return console.log("Resolved sha to",c),{aiGateway:i,config:o,context:a,persistSteps:l,runner:n,stopDev:s.stop}},kt=({apiToken:e,devPort:t})=>({constants:{NETLIFY_API_HOST:te.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||te.env.NETLIFY_API_TOKEN,SITE_ID:te.env.SITE_ID,URL:`http://localhost:${t}`,FUNCTIONS_DIST:te.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:Se}}),Ot=async({apiToken:e,cwd:t})=>{let r=await vt({port:Pt}),o=[`--port ${r}`,"--no-open","--skip-gitignore","--context dev-server","--offline-env",e?`--auth ${e}`:void 0,t?`--cwd ${t}`:void 0].filter(Boolean),n=we(`netlify dev ${o.join(" ")}`,{detached:!0});return{port:r,stop:()=>Ce(n)}};var $e=async({config:e,context:t,runner:r,persistSteps:o,aiGateway:n})=>{let{prompt:s}=await q({config:e,netlify:t}),a={...e,prompt:s},i=await r({aiGateway:n,config:a,netlify:t,persistSteps:o});if(i.error)throw console.error("Runner failed",{stepsCount:i.steps.length,duration:i.duration,error:i.error}),new Error(i.error);return{runnerResult:i}};var De=async e=>{try{console.log("Running netlify build...");let t=await e.utils.run("netlify",["build","--context","deploy-preview"]);return console.log("Build completed successfully"),{success:!0,stdout:t?.stdout||"",stderr:t?.stderr||""}}catch(t){return console.log("Build failed:",t.message),{success:!1,stdout:t.stdout||"",stderr:t.stderr||"",error:t.message}}},Bt=e=>{if(e.success)return"";let t=[];return e.error&&t.push(`Build Error: ${e.error}`),e.stderr&&t.push(`Build stderr:
110
+ ${e.stderr}`),e.stdout&&t.push(`Build stdout:
111
+ ${e.stdout}`),t.join(`
112
+
113
+ `)},Lt=e=>{let t=[];return t.push("Build validation failed. Here are the build errors you need to fix:"),e.forEach((r,o)=>{t.push(`Build attempt ${o+1}: ${Bt(r)}`)}),t.join(`
114
+ `)},Fe=async({netlify:e,initialResult:t,runAgentCallback:r})=>{console.log("Starting post-execution build validation");let o=await De(e);if(o.success)return console.log("Build validation passed"),{...t,buildValidation:{attempts:0,finalBuildSuccess:!0,buildHistory:[o]}};console.log("Build validation failed, starting build-fix iteration process");let n=[o],s=[],a=0,i=t;for(let l=1;l<=3;l++){console.log(`Build fix attempt ${l}/3`);let c=Lt(n);console.log("Running agent to fix build errors"),i=await r({errorContext:c}),s=[...s,...i.steps||[]],a+=i.duration||0;let p=await De(e);if(n.push(p),p.success)return console.log(`Build fixed after ${l} attempts`),{...i,steps:s,duration:a,buildValidation:{attempts:l,finalBuildSuccess:!0,buildHistory:n}};console.log(`Build still failing after attempt ${l}`)}return console.log("Build validation failed after 3 attempts"),{...i,steps:s,duration:a,buildValidation:{attempts:3,finalBuildSuccess:!1,buildHistory:n,error:"Build validation failed - unable to fix build errors after 3 attempts"}}};import bt from"process";var Ye=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n})=>{try{let s=["deploy","--message",`"${o}"`,"--json"];t||(console.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),n&&s.push("--alias",n),r?(console.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview"),console.log(`Running: netlify ${s.join(" ")}`);let a=await e.utils.run("netlify",s),i=JSON.parse(a.stdout.trim());console.log(`
115
+ Preview deploy created successfully:`,{deployId:i.deploy_id,deployUrl:i.deploy_url,siteId:i.site_id});let l={deployId:i.deploy_id,previewUrl:i.deploy_url,logsUrl:i.logs,siteId:i.site_id};return t||(l.sourceZipFilename=i.source_zip_filename),l}catch(s){throw console.error("Failed to create preview deploy via CLI:",s),s}};var Me=async({config:e,context:t,result:r,buildValidation:o})=>{let{diff:n,resultDiff:s,hasChanges:a}=await ve({config:e,netlify:t});console.log("Preview deploy condition check:",{resultUndefined:r===void 0,resultType:typeof r,hasChanges:a,wouldCreatePreview:r!==void 0&&a});let i=null;if(r!==void 0&&a)try{let l;try{let c=await Te(e.id,e.sessionId);c?.title&&(l=c.title)}catch(c){console.warn("Failed to fetch session title, using fallback message:",c.message)}i=await Ye({netlify:t,hasRepo:e.hasRepo,message:l,skipBuild:o?.finalBuildSuccess,deploySubdomain:Ie(e.id,bt.env.SITE_NAME)})}catch(l){console.warn("Failed to create preview deploy (continuing with agent run):",l)}return console.log("Git status",{diff:n,hasChanges:a}),{diff:n,resultDiff:s,hasChanges:a,previewInfo:i}};var je=async({config:e,diff:t,result:r,duration:o,resultDiff:n,previewInfo:s,cleanRunner:a})=>{let i={result_diff:t,result:r||"Done",duration:o};return s&&s.deployId&&(i.deploy_id=s.deployId),s&&s.sourceZipFilename&&(i.result_zip_file_name=s.sourceZipFilename),n?(console.log("Updating total agent result diff"),await J(e.id,{result_diff:n})):console.log("No total result diff, not updating"),await a?.(),console.log("Updated agent runner with result"),await W(e.id,e.sessionId,i),{sessionUpdate:i}};var $t=Ut(import.meta.url),Dt=$t("../package.json"),Ge=async(e={})=>{console.log("Starting agent runner orchestrator",{version:Dt.version});let{aiGateway:t,config:r,context:o,persistSteps:n,runner:s,stopDev:a}=await Ue({apiToken:e.apiToken}),{runnerResult:i}=await $e({config:r,context:o,runner:s.runner,persistSteps:n,aiGateway:t});a();let l=i,c;if(ce(r)){console.log("Build validation enabled, performing post-execution build validation");let g=await Fe({config:r,netlify:o,initialResult:i,runAgentCallback:async({errorContext:B})=>{let{prompt:C}=await q({config:{...r,prompt:i.result},buildErrorContext:B,netlify:o});return s.runner({config:{...r,prompt:C},netlify:o,persistSteps:n,aiGateway:t})}});console.log("Build validation completed:",g.buildValidation),l=g,c=g.buildValidation}let p={ok:!0},E=l.result;if(r.validateAgent&&l.result){let g=me(l.result);console.log("Validation result",g),g&&(p=g),E=ge(l.result)}p.ok||console.log("Validation failed",p);let{diff:x,resultDiff:h,previewInfo:_}=await Me({config:r,context:o,result:E,buildValidation:c});await je({config:r,diff:x,result:E,duration:l.duration,resultDiff:h,previewInfo:_,cleanRunner:s.clean})};var He=Ft(Ve.argv.slice(2),{string:["auth","cwd"]});try{await Ge({apiToken:He.auth,cwd:He.cwd})}catch(e){console.error("Error running agent pipeline:",e),Ve.exit(1)}
116
+ //# sourceMappingURL=bin.js.map
@@ -0,0 +1,24 @@
1
+ import { Options, ExecaChildProcess } from 'execa';
2
+
3
+ /** Run a command, with arguments being an array */
4
+ declare const run: (file: string, args?: string[] | object, options?: Options) => ExecaChildProcess<string>;
5
+
6
+ interface Context {
7
+ constants: {
8
+ FUNCTIONS_DIST: string;
9
+ NETLIFY_API_HOST: string;
10
+ NETLIFY_API_TOKEN?: string;
11
+ SITE_ID?: string;
12
+ URL: string;
13
+ };
14
+ utils: {
15
+ run: typeof run;
16
+ };
17
+ }
18
+ interface PipelineOptions {
19
+ apiToken?: string;
20
+ cwd?: string;
21
+ }
22
+ declare const runPipeline: (options?: PipelineOptions) => Promise<void>;
23
+
24
+ export { type Context, type PipelineOptions, runPipeline };
package/dist/index.js ADDED
@@ -0,0 +1,115 @@
1
+ import{createRequire as Bt}from"module";var ce=e=>!!e.validateAgentWithBuild;import Ge from"crypto";import oe from"fs/promises";import H from"path";import w from"process";var X="netlify-agent-runner-context.md",V=".netlify",O="other",L="starter";var b="business",U="enterprise",$="free";var He=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:w.env.NETLIFY_TEAM_ID,userId:w.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:w.env.SITE_NAME,localUrl:t.URL||w.env.URL||w.env.NETLIFY_LOCAL_DEV_URL,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Ve=()=>{let e=Object.keys(w.env).sort();return{nodeVersion:w.version,envVars:e}},Xe=10,qe=async e=>{let{name:t,ext:r}=H.parse(e),o=e,n=H.join(w.cwd(),V,o),s=0;for(;await Ke(n);){if(s>=Xe)throw new Error("Failed to generate context file");o=`${t}-${Ge.randomUUID().slice(0,5)}${r}`,n=H.join(w.cwd(),V,o),s+=1}return o},Ke=async e=>{try{return await oe.access(e),!0}catch{return!1}},de=async({netlify:e,config:t})=>{let r=He(e),o=Ve(),n=await qe(X),s=H.join(w.cwd(),V);await oe.mkdir(s,{recursive:!0});let a=H.join(V,n),i=H.join(w.cwd(),a),l=`# Agent Context
2
+
3
+ In Netlify documentation and interfaces, the terms "site" and "project" refer to the same thing.
4
+
5
+ ## Netlify Site
6
+
7
+ - Site ID: ${r.siteId}
8
+ - Account ID: ${r.accountId}
9
+ - User ID: ${r.userId}
10
+ - Site Slug: ${r.siteSlug}
11
+ - Local origin when running in development: ${r.localUrl}
12
+ - Netlify Functions directory: ${r.functionsDir}
13
+
14
+ ## Environment
15
+
16
+ - Node Version: ${o.nodeVersion}
17
+
18
+ ### Available Environment Variables
19
+ ${o.envVars.map(c=>`\`${c}\``).join(", ")}
20
+
21
+ ## Attachments
22
+
23
+ - for requests that require work with attachments or assets, take into account that uploaded attachments are stored in .netlify/assets folder
24
+ - move assets from .netlify/assets folder to the project assets folder if they are referenced in a code or applied changes
25
+
26
+ ## Netlify CLI
27
+ 'netlify-cli' npm package is already available as a global package.
28
+ Don't try to install it, in case you want to use it, use the global package.
29
+
30
+ ## Documentation
31
+
32
+ - Netlify Docs: https://docs.netlify.com
33
+ - LLM Resources Index: https://docs.netlify.com/llms.txt
34
+ `;return t.siteContext&&t.siteContext.length!==0&&(l+=`
35
+ # Project Guidelines
36
+
37
+ ${t.siteContext.filter(c=>c.site_context).map(c=>typeof c.site_context=="string"?c.site_context:typeof c.site_context=="object"?JSON.stringify(c.site_context):"").join(`
38
+
39
+ `)}
40
+ `),t.sessionHistoryContext&&t.sessionHistoryContext.length!==0&&(l+=`# History of prior calls
41
+
42
+ Please continue the discussion based on the previous discussion, provided below as xml:
43
+ - <history> contains the full conversation so far.
44
+ - <turn> groups a request and its response.
45
+ - <request> is the user request.
46
+ - <response> is the agent result.
47
+
48
+ Use the <history> only as context. Do NOT wrap your answer in XML tags. Just return the plain response.
49
+
50
+ <history>
51
+ ${t.sessionHistoryContext.map((c,p)=>`<turn attempt="${p+1}">
52
+ <request>${c.request}</request>
53
+ <response>${c.response}</response>
54
+ </turn>`).join(`
55
+ `)}
56
+ </history>
57
+ `),await oe.writeFile(i,l,"utf-8"),console.log(`Generated agent context document at: ${i}`),a},pe=e=>`The build is currently failing after your previous changes.
58
+ Your task is to analyze and fix the build errors.
59
+ Don't apply techniques of reverting changes. Apply fixes related to errors.
60
+ Don't try to run build by yourself. Just fix the errors.
61
+
62
+ ${e}`;var fe=(e={})=>`
63
+ Check for errors and validate the fix
64
+
65
+ ${e.errorLogsPath?`Error Check Process:
66
+ 1. Read recent errors: \`tail -n 50 ${e.errorLogsPath}\` (single check, no monitoring loop)
67
+ 2. If errors are found, fix them by modifying the codebase
68
+ 3. After fixes, do ONE final check: \`tail -n 20 ${e.errorLogsPath}\`
69
+ 4. Focus only on errors related to your changes
70
+ 5. Ignore unrelated operational messages (git, API calls, plugin startup)
71
+ 6. NEVER modify ${e.errorLogsPath} - it's read-only
72
+ 7. Do NOT use continuous monitoring or repeated tail commands
73
+ `:""}
74
+
75
+ Fix Strategy:
76
+ - Check error logs ONCE, fix issues, then do ONE final verification
77
+ - Do NOT revert changes - improve the codebase to make it work
78
+ - Do NOT continuously monitor logs or use tail in loops
79
+ - Dev server is running in background, no need to run build or dev server yourself
80
+ - Complete the validation quickly and decisively
81
+
82
+ At the very end, print exactly one line with the verdict:
83
+ VALIDATION_RESULT: PASS {"checks": ["error_logs"]}
84
+ or
85
+ VALIDATION_RESULT: FAIL {"checks": ["error_logs"], "errors": ["<errors>"]}
86
+ `,Je=e=>!(!e||typeof e!="object"||!Array.isArray(e.checks)||e.errors&&!Array.isArray(e.errors)),me=e=>{if(!e||typeof e!="string")return null;let t=e.match(/VALIDATION_RESULT:\s+(PASS|FAIL)\s+({[\s\S]*?})(?:\s|$)/);if(!t)return null;let[,r,o]=t;try{let n=JSON.parse(o);return Je(n)?!n.checks||n.checks.length===0?(console.warn("Validation result missing checks array"),null):r==="FAIL"&&(!n.errors||n.errors.length===0)?(console.warn("FAIL validation result missing errors array"),null):{ok:r==="PASS",verdict:r,details:{checks:n.checks,errors:n.errors||[],...n}}:(console.warn("Validation result has invalid schema:",n),null)}catch(n){return console.warn("Failed to parse validation result JSON:",n.message),null}},ge=e=>e&&e.replace(/^.*VALIDATION_RESULT:\s+(PASS|FAIL)\s+{[\s\S]*?}.*$/gm,"");var We=(e=X)=>`Use ${e} to understand the Netlify project context and resources. It also contains history of previous conversations. Make sure to read it first. Never reveal, cite, or paraphrase private context.`,ze=async({config:e,netlify:t})=>{let r=await de({netlify:t,config:e});return{context:We(r)}},Ze=({config:e})=>{let t=[];return e?.validateAgent&&t.push(fe(e)),t},q=async({config:e,netlify:t,buildErrorContext:r}={})=>{let{context:o}=await ze({config:e,netlify:t}),n;n=[...Ze({config:e})],r&&(n=[...n,pe(r)]);let s=[];return o&&s.push(o),e.prompt&&s.push("New user request comes in the <new_request> tag.",`<new_request>${e.prompt}</new_request>`),n?.length&&s.push(n.join(`
87
+
88
+ `)),{prompt:s.join(`
89
+
90
+ `)}};import te from"process";import Rt from"get-port";import he from"process";var ne=he.env.NETLIFY_API_URL,re=he.env.NETLIFY_API_TOKEN,K=async(e,t={})=>{if(!ne||!re)throw new Error("No API URL or token");let r=new URL(e,ne),o={...t,headers:{...t.headers,Authorization:`Bearer ${re}`}};t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(s||console.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type").includes("application/json")?n.json():n.text());if(!s)throw a;return a},_e=e=>{console.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(ne=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(re=e.constants.NETLIFY_API_TOKEN)},J=(e,t)=>K(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),W=(e,t,r)=>K(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var Te=(e,t)=>K(`/api/v1/agent_runners/${e}/sessions/${t}`),Ee=e=>K(`/api/v1/sites/${e}/ai-gateway/token`);var xe=async({netlify:e})=>{let t,r,o,n,s=e.constants?.SITE_ID;if(!s)throw new Error("No site id");let a=async()=>{clearTimeout(o),console.log("Requesting AI gateway information");let i=await Ee(s);if({token:t,url:n}=i,r=i.expires_at?i.expires_at*1e3:void 0,console.log("Got AI gateway information",{token:!!t,expiresAt:r,url:n}),r){let l=r-Date.now()-6e4;l>0&&(o=setTimeout(()=>{a()},l))}};return await a(),{get url(){return n},get token(){return t}}};import S from"process";var Qe=e=>new Promise(t=>{setTimeout(t,e)}),ye=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...i)=>{if(r)return o=i,new Promise(p=>{n.push(p)});r=!0;let l,c=new Promise(p=>{l=p});return s=(async()=>{await Promise.resolve();let p=await e(...i);for(l(p);;){if(await Qe(t),!o)return r=!1,s=null,p;let E=o,x=n;o=null,n=[],p=await e(...E),x.forEach(h=>{h(p)})}})(),c};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},z=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...i){n=i,s=this;let l=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),l&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let i=n,l=s;o=null,n=null,s=null,e.apply(l,i)}},a},Z=(e,t=!0)=>{if(e)try{return JSON.parse(e)}catch(r){t&&console.error("Could not parse JSON",r)}},Ie=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let i=`--${t}${n}`;if(i.length>55)return"";let l=60-i.length;if(l<=0)return"";if(l>=s.length+6){let c=Math.min(l-s.length,e.length);return`${s}${e.slice(0,c)}`}return e.slice(0,l)};var et="codex",tt=e=>(e??[]).filter(t=>t.request&&t.response),ot=e=>(e??[]).filter(t=>t.site_context),Ne=()=>{let e=S.env.NETLIFY_AGENT_RUNNER_ID,t=S.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=S.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=S.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=S.env.NETLIFY_AGENT_RUNNER_AGENT||et,s=S.env.NETLIFY_AGENT_RUNNER_MODEL,a=S.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",i=S.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",l=S.env.ERROR_LOGS_PATH,c=Z(S.env.NETLIFY_AGENT_RUNNER_CONTEXT),p=tt(c),E=ot(c),x=S.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",h=!S.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,_=S.env.NETLIFY_AGENT_RUNNER_SHA,g=nt();return{id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:s,validateAgent:a,errorLogsPath:l,sessionHistoryContext:p,siteContext:E,hasRepo:x,useGateway:h,sha:_,accountType:g,validateAgentWithBuild:i}},nt=()=>{let e=S.env.NETLIFY_TEAM_TYPE;return e?e.includes("starter")?L:e.includes("pro")?"pro":e.startsWith("business")?b:e.startsWith("enterprise")?U:e.startsWith("free")?$:O:O};import se from"process";import{execa as rt,execaCommand as st}from"execa";var Ae={preferLocal:!0},Se=(e,t,r)=>{let[o,n]=it(t,r),s={...Ae,...n},a=rt(e,o,s);return we(a,s),a},Re=(e,t)=>{let r={...Ae,...t},o=st(e,r);return we(o,r),o},it=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},we=(e,t)=>{t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0||(e.stdout?.pipe(se.stdout),e.stderr?.pipe(se.stderr))},Ce=(e,t="SIGTERM")=>{try{return e.pid&&!e.killed?(se.kill(e.pid,t),console.log(`Killed process ${e.pid} with signal ${t}`),!0):!1}catch(r){return console.error("Error killing process:",r),!1}};var ve=async({config:e,netlify:t})=>{let{hasChanges:r,status:o}=await lt(t);if(!r)return{hasChanges:!1};let n=await ut(t,o);await t.utils.run("git",["add",".",...n]);let a=(await t.utils.run("git",["diff","--staged"])).stdout;if(r=!!a,!r)return{hasChanges:r,diff:a};let i;return e.sha&&(await t.utils.run("git",["commit","-m","Agent runner"]),i=(await t.utils.run("git",["diff",e.sha,"HEAD"])).stdout),{hasChanges:!0,diff:a,resultDiff:i}},at=["?? mise.toml",/\?\? .+?\.log/],lt=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
91
+ `).filter(n=>!at.some(s=>s instanceof RegExp?s.test(n):n===s))).length!==0,status:t.stdout}};var Pe=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},ke=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},ut=async(e,t="")=>{let r=[".netlify","mise.toml"],o=[],n=r.map(async a=>{try{return await e.utils.run("git",["check-ignore","-v",a]),null}catch{return`:!${a}`}});return(await Promise.all(n)).forEach(a=>{a&&o.push(a)}),t.split(`
92
+ `).forEach(a=>{let i=a.match(/\?\? (.+?)\.log$/)?.[1];i&&o.push(`:!${i}.log`)}),o};import ct from"fs/promises";import dt from"os";import Q from"path";import D from"process";var pt=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ie({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:m,url:f}=n;if(!m||!f)throw new Error("No token or url provided from AI Gateway");let d=ft[s];if(!d)throw new Error(`Claude is not supported for the account type ${s}`);if(i){if(!d?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=d.default;D.env.ANTHROPIC_API_KEY=m,D.env.ANTHROPIC_BASE_URL=f}else if(!D.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let l=[],c=[],p=[],E={},x=0,h=0,_,g,B=Q.join(D.cwd(),"node_modules"),C=[Q.join(D.env.NODE_PATH||B,".bin/claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...i?["--model",i]:[],"-p",a],v=`${D.env.NVM_BIN}/node`;console.log(`Running ${v} ${C.join(" ")}`);let R=t.utils.run(v,C,{all:!0,env:D.env});R.stdin?.end();let T=z(()=>{r?.({steps:l,duration:h}),o?.({steps:c,duration:h}),c=[]},250),N=(m,f)=>{m.id=x,x+=1,p.push(m),l.push(m),c.push(m),f||T.flush(),T(),f&&T.flush()},A="";return R.all.on("data",m=>{if(A+=m.toString(),!m.includes(`
93
+ `))return;let f=A.split(`
94
+ `).filter(Boolean).map(d=>{try{return JSON.parse(d)}catch{console.log("Could not parse line",d)}return null}).filter(Boolean);A="",f.forEach(d=>{Array.isArray(d?.message?.content)?d.message.content.forEach(u=>{switch(u.type){case"text":{u.text&&N({message:u.text});break}case"image":{typeof u.source=="object"&&u.source.type==="base64"&&u.source.media_type?N({message:`![](data:${u.source.media_type};base64,${u.source.data})`}):console.log(`Unsupported image type ${u.source?.type}`,u.source);break}case"tool_use":{if(u.name==="Task"){let y=u.input?.description&&`\`${u.input.description}\``;N({title:[u.name,y].filter(Boolean).join(" ")})}else E[u.id]=u;T.flush();break}case"tool_result":{let y=E[u.tool_use_id],P;if(y){let M=y.input?.file_path&&Q.relative(D.cwd(),y.input.file_path),I=M&&`\`${M}\``;P=[y.name,I].filter(Boolean).join(" ")}let Y=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(y?.name),k;if(typeof u.content=="string")k=u.content;else if(Array.isArray(u.content)){let M=[];u.content.forEach(I=>{I?.type==="text"&&typeof I.text=="string"?M.push(I.text):I?.type==="image"&&typeof I.source=="object"?I.source.type==="base64"&&I.source.media_type?M.push(`![](data:${I.source.media_type};base64,${I.source.data})`):console.log(`Unsupported image type ${I.source.type}`,I.source):console.log(`Unsupported block type ${I?.type}`)}),k=M.join(`
95
+
96
+ `)}Y&&k&&(k=`\`\`\`
97
+ ${k.trim()}
98
+ \`\`\``),N({title:P,message:k},!0);break}case"thinking":{u.thinking&&N({title:"Thinking",message:u.thinking},!0);break}default:console.log(`Message content type is not supported ${u.type}`,u)}}):d?.type==="result"&&(h=d.duration_ms,d.is_error?g=d.result:_=d.result,[p,l,c].forEach(u=>{u[u.length-1]?.message===_&&u.pop()}))})}),await R.catch(m=>{({error:g,result:_}=pt({catchError:m,runCmd:R,error:g,result:_,runnerName:"Claude"}))}),T.flush(),{steps:p,duration:h,result:_,error:g}}var Oe=async()=>{let e=Q.join(dt.homedir(),".claude");await ct.rm(e,{recursive:!0,force:!0})},ft={[b]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3}}},[U]:{default:"claude-sonnet-4-20250514",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3},"claude-3-haiku-20240307":{maxTokens:4096},"claude-opus-4-20250514":{maxTokens:32e3},"claude-sonnet-4-20250514":{maxTokens:64e3}}},pro:{default:"claude-3-5-haiku-20241022",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-haiku-20240307":{maxTokens:4096}}},[$]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[L]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[O]:{default:"claude-3-7-sonnet-20250219",models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}}};import mt from"fs/promises";import gt from"os";import ae from"path";import G from"process";var ht=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function le({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:T,url:N}=n;if(!T||!N)throw new Error("No token or url provided from AI Gateway");let A=_t[s];if(!A)throw new Error(`Codex is not supported for the account type ${s}`);if(i){if(!A?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=A.default;G.env.OPENAI_API_KEY=T,G.env.OPENAI_BASE_URL=N}else if(!G.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let l=[],c=[],p={},E=0,x=0,h,_,g=ae.join(G.cwd(),"node_modules"),B=[ae.join(G.env.NODE_PATH||g,".bin/codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),C=`${G.env.NVM_BIN}/node`;console.log(`Running ${C} ${B.join(" ")}`);let v=t.utils.run(C,B,{all:!0,env:{...G.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),R="";return v.all.on("data",T=>{if(R+=T.toString(),!T.includes(`
99
+ `))return;let N=R.toString().split(`
100
+ `).filter(Boolean).map(f=>{try{return JSON.parse(f)}catch{console.log("Could not parse line",f)}return null}).filter(Boolean);R="";let A=[],m=!1;N.forEach(f=>{if(f?.duration_ms&&(x=f.duration_ms,m=!0),f?.type==="local_shell_call")p[f.call_id]=f;else if(f?.type==="local_shell_call_output"){let d=Et(p[f.call_id],f);d.id=E,E+=1,d&&(c.push(d),l.push(d),A.push(d),m=!0)}else f?.type==="message"&&f.role==="assistant"?h=f.content.map(d=>d.text).join(`
101
+ `):f?.type==="message"&&f.role==="system"&&(_=f.content.map(d=>d.text).join(`
102
+ `))}),m&&(r?.({steps:l,duration:x}),o?.({steps:A,duration:x}))}),await v.catch(T=>{({error:_,result:h}=ht({catchError:T,runCmd:v,error:_,result:h,runnerName:"Codex"}))}),{steps:c,duration:x,result:h,error:_}}var Be=async()=>{let e=ae.join(gt.homedir(),".codex");await mt.rm(e,{recursive:!0,force:!0})},_t={[b]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:1e5}}},[U]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},o1:{maxTokens:1e5},"o1-mini":{maxTokens:65536},"o3-mini":{maxTokens:1e5},"gpt-image-1":{},"dall-e-2":{},"dall-e-3":{}}},pro:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768}}},[$]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[L]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[O]:{default:"codex-mini-latest",models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}}},Tt=new Set(["bash","-lc"]),Et=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Tt.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
103
+ ${n.trim()}
104
+ \`\`\``)}catch(s){console.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import xt from"fs/promises";import yt from"os";import ee from"path";import F from"process";var It=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(console.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(console.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(console.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),Nt={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function ue({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a}=e,{model:i}=e;if(n){let{token:m,url:f}=n;if(!m||!f)throw new Error("No token or url provided from AI Gateway");let d=At[s];if(!d)throw new Error(`Gemini is not supported for the account type ${s}`);if(i){if(!d?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`)}else i=d.default;F.env.GEMINI_API_KEY=m,F.env.GOOGLE_GEMINI_BASE_URL=f}else if(!F.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let l=[],c=[],p=[],E={},x=0,h=0,_,g,B=ee.join(F.cwd(),"node_modules"),C=[ee.join(F.env.NODE_PATH||B,".bin/gemini"),...i?["--model",i]:[],"--yolo","-p",a],v=`${F.env.NVM_BIN}/node`;console.log(`Running ${v} ${C.join(" ")}`);let R=t.utils.run(v,C,{all:!0,env:F.env});R.stdin?.end();let T=z(()=>{r?.({steps:l,duration:h}),o?.({steps:c,duration:h}),c=[]},250),N=(m,f)=>{m.id=x,x+=1,p.push(m),l.push(m),c.push(m),f||T.flush(),T(),f&&T.flush()},A="";return R.all.on("data",m=>{if(A+=m.toString(),!m.includes(`
105
+ `))return;let f=A.toString().split(`
106
+ `).filter(Boolean).map(d=>{try{if(d.startsWith("[API Error")){let u=d.match(/\[api error: (.+?)]$/i)?.[1];return{type:"error",value:Z(u,!1)?.error?.message||u||"Gemini encountered error"}}return JSON.parse(d)}catch{}return null}).filter(Boolean);A="",f.forEach(d=>{switch(d.type){case"thought":{let u=d.value;N({title:u?.subject??"Thinking...",message:u?.description},!0);break}case"content":{d.value&&N({message:d.value});break}case"tool_call_request":{let u=d.value,y=Nt[u.name]??u.name,P=u.args?.path||u.args?.absolute_path,Y=P&&ee.relative(F.cwd(),P),k=u.args?.command,I={title:[y,Y&&`\`${Y}\``,k&&`\`${k}\``].filter(Boolean).join(" ")};E[u.callId]=I,T.flush();break}case"tool_result":{let u=d.value,y=E[u.callId];if(y){let P=[u.resultDisplay,u.responseParts?.functionResponse?.response?.output].find(Y=>typeof Y=="string"&&Y);P&&(y.message=`\`\`\`
107
+ ${P.trim()}
108
+ \`\`\``),N(y,!0)}break}case"result":{h=d.duration_ms,_=d.value,[p,l,c].forEach(u=>{u[u.length-1]?.message===_&&u.pop()});break}case"error":{g=d.value;break}case"finished":break;default:{console.warn("Unhandled message type:",d.type);break}}})}),await R.catch(m=>{({error:g,result:_}=It({catchError:m,runCmd:R,error:g,result:_,runnerName:"Gemini"}))}),T.flush(),{steps:p,duration:h,result:_,error:g}}var Le=async()=>{let e=ee.join(yt.homedir(),".gemini");await xt.rm(e,{recursive:!0,force:!0})},At={[b]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536}}},[U]:{default:"gemini-2.5-pro",models:{"gemini-1.5-flash":{maxTokens:8192},"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-1.5-pro":{maxTokens:8192},"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536},"gemini-2.5-flash-lite":{maxTokens:65536},"gemini-2.5-pro":{maxTokens:65536},"imagen-4.0-generate-001":{},"veo-3.0-generate-preview":{}}},pro:{default:"gemini-2.0-flash-lite",models:{"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192}}},[$]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[L]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[O]:{default:"gemini-2.5-flash",models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}}};var St={codex:{runner:le,clean:Be},claude:{runner:ie,clean:Oe},gemini:{runner:ue,clean:Le}},be=St;var wt=8888,Ue=async({apiThrottle:e,apiToken:t,cwd:r}={})=>{let o=Ne(),n=be[o.runner];if(!n)throw new Error(`${o.runner} is not supported`);let s=await vt({apiToken:t,cwd:r}),a=Ct({apiToken:t,devPort:s.port});_e(a),console.log(`Starting agent runner ${o.runner}`,{id:o.id,sessionId:o.sessionId,prompt:o.prompt,hasRepo:o.hasRepo});let i=o.useGateway?await xe({netlify:a}):void 0;o.validateAgent&&o.errorLogsPath&&console.log("Validation will use error logs file",{path:o.errorLogsPath});let l=ye(({steps:p=[],duration:E})=>{let x=[...p];return p.length=0,W(o.id,o.sessionId,{steps:x,duration:E})},e),c=o.sha;if(o.hasRepo){if(!o.sha){console.log("No sha in runner, marking the latest as the one");let p=await Pe(a);await J(o.id,{sha:p}),c=p}}else console.log("Zip project"),c=await ke(a);return console.log("Resolved sha to",c),{aiGateway:i,config:o,context:a,persistSteps:l,runner:n,stopDev:s.stop}},Ct=({apiToken:e,devPort:t})=>({constants:{NETLIFY_API_HOST:te.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||te.env.NETLIFY_API_TOKEN,SITE_ID:te.env.SITE_ID,URL:`http://localhost:${t}`,FUNCTIONS_DIST:te.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:Se}}),vt=async({apiToken:e,cwd:t})=>{let r=await Rt({port:wt}),o=[`--port ${r}`,"--no-open","--skip-gitignore","--context dev-server","--offline-env",e?`--auth ${e}`:void 0,t?`--cwd ${t}`:void 0].filter(Boolean),n=Re(`netlify dev ${o.join(" ")}`,{detached:!0});return{port:r,stop:()=>Ce(n)}};var $e=async({config:e,context:t,runner:r,persistSteps:o,aiGateway:n})=>{let{prompt:s}=await q({config:e,netlify:t}),a={...e,prompt:s},i=await r({aiGateway:n,config:a,netlify:t,persistSteps:o});if(i.error)throw console.error("Runner failed",{stepsCount:i.steps.length,duration:i.duration,error:i.error}),new Error(i.error);return{runnerResult:i}};var De=async e=>{try{console.log("Running netlify build...");let t=await e.utils.run("netlify",["build","--context","deploy-preview"]);return console.log("Build completed successfully"),{success:!0,stdout:t?.stdout||"",stderr:t?.stderr||""}}catch(t){return console.log("Build failed:",t.message),{success:!1,stdout:t.stdout||"",stderr:t.stderr||"",error:t.message}}},Pt=e=>{if(e.success)return"";let t=[];return e.error&&t.push(`Build Error: ${e.error}`),e.stderr&&t.push(`Build stderr:
109
+ ${e.stderr}`),e.stdout&&t.push(`Build stdout:
110
+ ${e.stdout}`),t.join(`
111
+
112
+ `)},kt=e=>{let t=[];return t.push("Build validation failed. Here are the build errors you need to fix:"),e.forEach((r,o)=>{t.push(`Build attempt ${o+1}: ${Pt(r)}`)}),t.join(`
113
+ `)},Fe=async({netlify:e,initialResult:t,runAgentCallback:r})=>{console.log("Starting post-execution build validation");let o=await De(e);if(o.success)return console.log("Build validation passed"),{...t,buildValidation:{attempts:0,finalBuildSuccess:!0,buildHistory:[o]}};console.log("Build validation failed, starting build-fix iteration process");let n=[o],s=[],a=0,i=t;for(let l=1;l<=3;l++){console.log(`Build fix attempt ${l}/3`);let c=kt(n);console.log("Running agent to fix build errors"),i=await r({errorContext:c}),s=[...s,...i.steps||[]],a+=i.duration||0;let p=await De(e);if(n.push(p),p.success)return console.log(`Build fixed after ${l} attempts`),{...i,steps:s,duration:a,buildValidation:{attempts:l,finalBuildSuccess:!0,buildHistory:n}};console.log(`Build still failing after attempt ${l}`)}return console.log("Build validation failed after 3 attempts"),{...i,steps:s,duration:a,buildValidation:{attempts:3,finalBuildSuccess:!1,buildHistory:n,error:"Build validation failed - unable to fix build errors after 3 attempts"}}};import Ot from"process";var Ye=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n})=>{try{let s=["deploy","--message",`"${o}"`,"--json"];t||(console.log("Deploy: Uploading source zip"),s.push("--upload-source-zip")),n&&s.push("--alias",n),r?(console.log("Deploy: Skipping build"),s.push("--no-build")):s.push("--context","deploy-preview"),console.log(`Running: netlify ${s.join(" ")}`);let a=await e.utils.run("netlify",s),i=JSON.parse(a.stdout.trim());console.log(`
114
+ Preview deploy created successfully:`,{deployId:i.deploy_id,deployUrl:i.deploy_url,siteId:i.site_id});let l={deployId:i.deploy_id,previewUrl:i.deploy_url,logsUrl:i.logs,siteId:i.site_id};return t||(l.sourceZipFilename=i.source_zip_filename),l}catch(s){throw console.error("Failed to create preview deploy via CLI:",s),s}};var Me=async({config:e,context:t,result:r,buildValidation:o})=>{let{diff:n,resultDiff:s,hasChanges:a}=await ve({config:e,netlify:t});console.log("Preview deploy condition check:",{resultUndefined:r===void 0,resultType:typeof r,hasChanges:a,wouldCreatePreview:r!==void 0&&a});let i=null;if(r!==void 0&&a)try{let l;try{let c=await Te(e.id,e.sessionId);c?.title&&(l=c.title)}catch(c){console.warn("Failed to fetch session title, using fallback message:",c.message)}i=await Ye({netlify:t,hasRepo:e.hasRepo,message:l,skipBuild:o?.finalBuildSuccess,deploySubdomain:Ie(e.id,Ot.env.SITE_NAME)})}catch(l){console.warn("Failed to create preview deploy (continuing with agent run):",l)}return console.log("Git status",{diff:n,hasChanges:a}),{diff:n,resultDiff:s,hasChanges:a,previewInfo:i}};var je=async({config:e,diff:t,result:r,duration:o,resultDiff:n,previewInfo:s,cleanRunner:a})=>{let i={result_diff:t,result:r||"Done",duration:o};return s&&s.deployId&&(i.deploy_id=s.deployId),s&&s.sourceZipFilename&&(i.result_zip_file_name=s.sourceZipFilename),n?(console.log("Updating total agent result diff"),await J(e.id,{result_diff:n})):console.log("No total result diff, not updating"),await a?.(),console.log("Updated agent runner with result"),await W(e.id,e.sessionId,i),{sessionUpdate:i}};var Lt=Bt(import.meta.url),bt=Lt("../package.json"),dn=async(e={})=>{console.log("Starting agent runner orchestrator",{version:bt.version});let{aiGateway:t,config:r,context:o,persistSteps:n,runner:s,stopDev:a}=await Ue({apiToken:e.apiToken}),{runnerResult:i}=await $e({config:r,context:o,runner:s.runner,persistSteps:n,aiGateway:t});a();let l=i,c;if(ce(r)){console.log("Build validation enabled, performing post-execution build validation");let g=await Fe({config:r,netlify:o,initialResult:i,runAgentCallback:async({errorContext:B})=>{let{prompt:C}=await q({config:{...r,prompt:i.result},buildErrorContext:B,netlify:o});return s.runner({config:{...r,prompt:C},netlify:o,persistSteps:n,aiGateway:t})}});console.log("Build validation completed:",g.buildValidation),l=g,c=g.buildValidation}let p={ok:!0},E=l.result;if(r.validateAgent&&l.result){let g=me(l.result);console.log("Validation result",g),g&&(p=g),E=ge(l.result)}p.ok||console.log("Validation failed",p);let{diff:x,resultDiff:h,previewInfo:_}=await Me({config:r,context:o,result:E,buildValidation:c});await je({config:r,diff:x,result:E,duration:l.duration,resultDiff:h,previewInfo:_,cleanRunner:s.clean})};export{dn as runPipeline};
115
+ //# sourceMappingURL=index.js.map
package/manifest.yml ADDED
@@ -0,0 +1 @@
1
+ name: '@netlify/plugin-agent-runner'
package/package.json ADDED
@@ -0,0 +1,83 @@
1
+ {
2
+ "name": "@netlify/agent-runner-cli",
3
+ "type": "module",
4
+ "version": "0.0.1",
5
+ "description": "CLI tool for running Netlify agents",
6
+ "main": "./dist/index.js",
7
+ "types": "./dist/index.d.ts",
8
+ "exports": "./dist/index.js",
9
+ "bin": {
10
+ "agent-runner-cli": "./dist/bin.js"
11
+ },
12
+ "files": [
13
+ "dist/**/*.js",
14
+ "dist/**/*.d.ts",
15
+ "manifest.yml",
16
+ "patches",
17
+ "scripts"
18
+ ],
19
+ "scripts": {
20
+ "build": "tsup",
21
+ "dev": "tsup --watch",
22
+ "prepare": "husky install node_modules/@netlify/eslint-config-node/.husky/",
23
+ "prepublishOnly": "npm ci && npm test",
24
+ "prepack": "npm run build",
25
+ "test": "run-s build format test:dev",
26
+ "format": "run-s build format:check-fix:*",
27
+ "format:ci": "run-s build format:check:*",
28
+ "format:check-fix:lint": "run-e format:check:lint format:fix:lint",
29
+ "format:check:lint": "cross-env-shell eslint $npm_package_config_eslint",
30
+ "format:fix:lint": "cross-env-shell eslint --fix $npm_package_config_eslint",
31
+ "format:check-fix:prettier": "run-e format:check:prettier format:fix:prettier",
32
+ "format:check:prettier": "cross-env-shell prettier --check $npm_package_config_prettier",
33
+ "format:fix:prettier": "cross-env-shell prettier --write $npm_package_config_prettier",
34
+ "test:dev": "run-s build test:dev:*",
35
+ "test:ci": "run-s build test:ci:*",
36
+ "test:dev:vitest": "vitest",
37
+ "test:ci:vitest": "c8 -r lcovonly -r text -r json vitest",
38
+ "postinstall": "node scripts/postinstall.js"
39
+ },
40
+ "config": {
41
+ "eslint": "--cache --format=codeframe --max-warnings=0 \"{src,scripts,test,.github}/**/*.{js,ts,md,html}\"",
42
+ "prettier": "--ignore-path .gitignore --loglevel=warn \"{src,scripts,test,.github}/**/*.{js,ts,md,yml,json,html}\" \"*.{js,ts,yml,json,html}\" \".*.{js,ts,yml,json,html}\" \"!**/package-lock.json\" \"!package-lock.json\""
43
+ },
44
+ "keywords": [],
45
+ "license": "MIT",
46
+ "repository": "netlify/agent-runner-cli",
47
+ "bugs": {
48
+ "url": "https://github.com/netlify/agent-runner-cli/issues"
49
+ },
50
+ "author": "Netlify Inc.",
51
+ "directories": {
52
+ "test": "test"
53
+ },
54
+ "devDependencies": {
55
+ "@commitlint/cli": "^19.0.0",
56
+ "@commitlint/config-conventional": "^19.0.0",
57
+ "@eslint/compat": "^1.3.2",
58
+ "@eslint/js": "^9.35.0",
59
+ "@netlify/eslint-config-node": "^7.0.1",
60
+ "@types/node": "^24.5.0",
61
+ "@typescript-eslint/eslint-plugin": "^7.1.0",
62
+ "@typescript-eslint/parser": "^7.1.0",
63
+ "@vitest/eslint-plugin": "^1.3.10",
64
+ "c8": "^9.0.0",
65
+ "eslint-config-prettier": "^10.1.8",
66
+ "eslint-plugin-n": "^17.0.0",
67
+ "husky": "^8.0.0",
68
+ "patch-package": "^8.0.0",
69
+ "tsup": "^8.5.0",
70
+ "typescript": "^5.0.0",
71
+ "typescript-eslint": "^8.44.0",
72
+ "vitest": "^1.5.0"
73
+ },
74
+ "dependencies": {
75
+ "@anthropic-ai/claude-code": "^1.0.51",
76
+ "@google/gemini-cli": "0.1.17",
77
+ "execa": "^8.0.0",
78
+ "get-port": "^5.1.1",
79
+ "minimist": "^1.2.8",
80
+ "my-codex-no-sandbox": "^0.1.2505290819",
81
+ "ws": "^8.18.2"
82
+ }
83
+ }
@@ -0,0 +1,87 @@
1
+ diff --git a/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js b/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js
2
+ index 874ec89..6eb9a7d 100644
3
+ --- a/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js
4
+ +++ b/node_modules/@google/gemini-cli/dist/src/nonInteractiveCli.js
5
+ @@ -5,9 +5,22 @@
6
+ */
7
+ import { executeToolCall, shutdownTelemetry, isTelemetrySdkInitialized, GeminiEventType, ToolErrorType, } from '@google/gemini-cli-core';
8
+ import { parseAndFormatApiError } from './ui/utils/errorParsing.js';
9
+ +function writeAsync(data) {
10
+ + return new Promise((resolve, reject) => {
11
+ + const canWrite = process.stdout.write(data, (err) => {
12
+ + if (err) reject(err);
13
+ + else resolve();
14
+ + });
15
+ + if (!canWrite) {
16
+ + process.stdout.once('drain', resolve);
17
+ + }
18
+ + });
19
+ +}
20
+ export async function runNonInteractive(config, input, prompt_id) {
21
+ await config.initialize();
22
+ // Handle EPIPE errors when the output is piped to a command that closes early.
23
+ + const startTime = Date.now();
24
+ + let lastContent = '';
25
+ process.stdout.on('error', (err) => {
26
+ if (err.code === 'EPIPE') {
27
+ // Exit gracefully if the pipe is closed.
28
+ @@ -29,15 +42,27 @@ export async function runNonInteractive(config, input, prompt_id) {
29
+ }
30
+ const functionCalls = [];
31
+ const responseStream = geminiClient.sendMessageStream(currentMessages[0]?.parts || [], abortController.signal, prompt_id);
32
+ + let contentBuffer = '';
33
+ + const flushContentBuffer = async () => {
34
+ + if (contentBuffer) {
35
+ + await writeAsync(JSON.stringify({ type: 'content', value: contentBuffer }) + '\n');
36
+ + lastContent = contentBuffer;
37
+ + contentBuffer = '';
38
+ + }
39
+ + }
40
+ for await (const event of responseStream) {
41
+ if (abortController.signal.aborted) {
42
+ console.error('Operation cancelled.');
43
+ return;
44
+ }
45
+ if (event.type === GeminiEventType.Content) {
46
+ - process.stdout.write(event.value);
47
+ + contentBuffer += event.value;
48
+ + } else {
49
+ + await flushContentBuffer();
50
+ + await writeAsync(JSON.stringify(event) + '\n');
51
+ }
52
+ - else if (event.type === GeminiEventType.ToolCallRequest) {
53
+ +
54
+ + if (event.type === GeminiEventType.ToolCallRequest) {
55
+ const toolCallRequest = event.value;
56
+ const fc = {
57
+ name: toolCallRequest.name,
58
+ @@ -47,6 +72,7 @@ export async function runNonInteractive(config, input, prompt_id) {
59
+ functionCalls.push(fc);
60
+ }
61
+ }
62
+ + await flushContentBuffer();
63
+ if (functionCalls.length > 0) {
64
+ const toolResponseParts = [];
65
+ for (const fc of functionCalls) {
66
+ @@ -59,6 +85,9 @@ export async function runNonInteractive(config, input, prompt_id) {
67
+ prompt_id,
68
+ };
69
+ const toolResponse = await executeToolCall(config, requestInfo, toolRegistry, abortController.signal);
70
+ +
71
+ + await writeAsync(JSON.stringify({type: 'tool_result', value: toolResponse}) + '\n');
72
+ +
73
+ if (toolResponse.error) {
74
+ console.error(`Error executing tool ${fc.name}: ${toolResponse.resultDisplay || toolResponse.error.message}`);
75
+ if (toolResponse.errorType === ToolErrorType.UNHANDLED_EXCEPTION)
76
+ @@ -81,7 +110,10 @@ export async function runNonInteractive(config, input, prompt_id) {
77
+ currentMessages = [{ role: 'user', parts: toolResponseParts }];
78
+ }
79
+ else {
80
+ - process.stdout.write('\n'); // Ensure a final newline
81
+ + const endTime = Date.now();
82
+ + const duration = endTime - startTime;
83
+ + const result = { type: 'result', duration_ms: duration, value: lastContent };
84
+ + await writeAsync(JSON.stringify(result) + '\n');
85
+ return;
86
+ }
87
+ }
@@ -0,0 +1,38 @@
1
+ import { execSync } from 'node:child_process'
2
+ import fs from 'node:fs'
3
+ import path from 'node:path'
4
+ import process from 'node:process'
5
+
6
+ const cwd = process.cwd()
7
+ const initCwd = process.env.INIT_CWD
8
+ const targetCwd = initCwd || cwd
9
+
10
+ const patchesDir = path.join(cwd, 'patches')
11
+
12
+ console.log(`[postinstall] cwd=${cwd}`)
13
+ console.log(`[postinstall] INIT_CWD=${initCwd}`)
14
+ console.log('[postinstall] using patchesDir =', patchesDir)
15
+
16
+ if (fs.existsSync(patchesDir)) {
17
+ const files = fs.readdirSync(patchesDir).filter((file) => file.endsWith('.patch'))
18
+
19
+ if (files.length === 0) {
20
+ console.log('[postinstall] no .patch files found')
21
+ } else {
22
+ for (const file of files) {
23
+ const filePath = path.join(patchesDir, file)
24
+ try {
25
+ execSync(`patch -p1 --forward -i "${filePath}"`, {
26
+ stdio: 'inherit',
27
+ cwd: targetCwd,
28
+ shell: true,
29
+ })
30
+ console.log(`[postinstall] Applied ${file}`)
31
+ } catch (error) {
32
+ console.warn(`[postinstall] Failed to apply ${file}:`, error.message)
33
+ }
34
+ }
35
+ }
36
+ } else {
37
+ console.log('[postinstall] patchesDir does not exist, skipping')
38
+ }