@netlify/agent-runner-cli 1.28.0 → 1.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin.js +40 -40
- package/dist/index.js +49 -49
- package/package.json +1 -1
package/dist/bin.js
CHANGED
|
@@ -1,42 +1,42 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import
|
|
3
|
-
`),r=[],
|
|
4
|
-
`)),
|
|
5
|
-
${
|
|
2
|
+
import xt from"process";import Ir from"minimist";import{createRequire as hr}from"module";import{createTracerProvider as wt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as De}from"@netlify/otel/opentelemetry";import{withActiveSpan as It}from"@netlify/otel";import{OTLPTraceExporter as Nt}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[${e}]`,...r)}}}var xe=_("tracing"),Fe=(e,t,r)=>wt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new De(new Re),new De(new Nt({url:r}))]});function le(e,t,r){return xe.log(`\u23F3 TRACE: ${t} starting...`),It(e,t,r)}var Re=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";xe.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&xe.log(` \u274C Error: ${t.status.message}`)}};var Ct=["error","failed","exception","fatal","panic","abort","crash"];function Ue(e){let t=e.split(`
|
|
3
|
+
`),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(Ct.some(p=>l.includes(p))){let p=Math.max(0,n-10,o+1),c=Math.min(t.length-1,n+20),d=[];for(let m=p;m<=c;m++)d.push(t[m]);r.push(d.join(`
|
|
4
|
+
`)),o=c,n=c+1}else n++}if(r.length===0)return e;let s=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
|
|
5
|
+
${a}
|
|
6
6
|
</extracted_error_chunk>`).join(`
|
|
7
7
|
|
|
8
|
-
`);return s.length>e.length*.8?e:s}import
|
|
9
|
-
`).filter(
|
|
10
|
-
`).forEach(
|
|
11
|
-
`))return;let
|
|
12
|
-
`).filter(Boolean).map(
|
|
8
|
+
`);return s.length>e.length*.8?e:s}import he from"process";import{getTracer as nr}from"@netlify/otel";import ue from"process";var we=ue.env.NETLIFY_API_URL,Ie=ue.env.NETLIFY_API_TOKEN,re=_("api"),ce=async(e,t={})=>{if(!we||!Ie)throw new Error("No API URL or token");let r=new URL(e,we),o={...t,headers:{...t.headers,Authorization:`Bearer ${Ie}`}};ue.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ue.env.AGENT_RUNNERS_DEBUG==="true")re.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{re.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-nf-request-id");re.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||re.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},Me=e=>{re.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(we=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Ie=e.constants.NETLIFY_API_TOKEN)},pe=(e,t)=>ce(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),X=(e,t,r)=>ce(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var Ge=(e,t)=>ce(`/api/v1/agent_runners/${e}/sessions/${t}`),Ye=(e,t,r)=>ce(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}});var je=_("ai_gateway"),Be=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),je.log("Requesting AI gateway information");let i=await Ye(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,je.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let p=o-Date.now()-6e4;p>0&&(n=setTimeout(()=>{l()},p))}};return await l(),{get url(){return s},get token(){return r}}};import oe from"process";import{execa as Ot,execaCommand as qr}from"execa";import{Transform as At}from"stream";var St=["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE"];function Pt(){return Object.entries(process.env).filter(([e,t])=>!(!t||St.includes(e)||t.length<5)).map(([,e])=>e).filter(Boolean)}function Ne(e){if(typeof e!="string")return e;let t=Pt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(vt(o),"g");r=r.replace(n,"******")}),r}function vt(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ne=class extends At{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=Ne(n);o(null,s)}};function He(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?Ne(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?Ne(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var Wr=_("shell"),kt={preferLocal:!0},qe=(e,t,r)=>{let[o,n]=bt(t,r),s={...kt,...n},a=Ot(e,o,s);return Lt(a,s),a};var bt=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Lt=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(oe.env.NETLIFY_MASK_LOGS!=="false"){e.stdout?.pipe(new ne).pipe(oe.stdout),e.stderr?.pipe(new ne).pipe(oe.stderr);return}e.stdout?.pipe(oe.stdout),e.stderr?.pipe(oe.stderr)};var Ke="netlify-agent-runner-context.md",Ce="task-history",Ae="netlify-context",L=".netlify",se="results.md",Se="assets",U="other",M="personal";var G="enterprise",Y="free",Ve=[M,"pro",G,Y];var We=_("utils"),$t=e=>new Promise(t=>{setTimeout(t,e)}),Xe=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(c=>{n.push(c)});r=!0;let i,p=new Promise(c=>{i=c});return s=(async()=>{await Promise.resolve();let c=await e(...l);for(i(c);;){if(await $t(t),!o)return r=!1,s=null,c;let d=o,m=n;o=null,n=[],c=await e(...d),m.forEach(E=>{E(c)})}})(),p};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},de=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},fe=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):We.error("Could not parse JSON",o))}},Je=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let p=Math.min(i-s.length,e.length);return`${s}${e.slice(0,p)}`}return e.slice(0,i)},Dt=e=>!e||typeof e!="object"||Array.isArray(e)||Object.keys(e).length===0?!1:!!Ve.some(t=>t in e),ze=()=>{let e={},t={codex:process.env.NETLIFY_FF_AGENT_RUNNER_CODEX_VERSION,claude:process.env.NETLIFY_FF_AGENT_RUNNER_CLAUDE_VERSION,gemini:process.env.NETLIFY_FF_AGENT_RUNNER_GEMINI_VERSION};return Object.entries(t).forEach(([r,o])=>{if(o){let n=`NETLIFY_FF_AGENT_RUNNER_${r.toUpperCase()}_VERSION`;try{let s=JSON.parse(o);Dt(s)&&(e[r]=s)}catch(s){let l=s instanceof SyntaxError?"Invalid JSON":s.message;We.error(`Could not parse ${r} model version override from ${n}: ${l}`)}}}),e};import{Buffer as Ze}from"buffer";var Qe=async({config:e,netlify:t})=>{let r=await Ut(t),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};let s=await Mt(t,n);await t.utils.run("git",["add",".",...s]);let a={stdio:["ignore","pipe","pipe"]},i=(await t.utils.run("git",["diff","--staged"],a)).stdout;if(o=!!i,!o)return{hasChanges:!1};let c=(await t.utils.run("git",["diff","--staged","--binary"],a)).stdout,d,m;if(e.sha){await t.utils.run("git",["commit","-m","Agent runner"]),d=(await t.utils.run("git",["diff",e.sha,"HEAD"],a)).stdout;let g=(await t.utils.run("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;d!==g&&(m=Ze.from(g).toString("base64"))}let E={hasChanges:!0,diff:i,resultDiff:d};return i!==c&&(E.diffBinary=Ze.from(c).toString("base64")),m&&(E.resultDiffBinary=m),E},Ft=["?? mise.toml",/\?\? .+?\.log/],Ut=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
|
|
9
|
+
`).filter(n=>!Ft.some(s=>s instanceof RegExp?s.test(n):n===s))).length!==0,status:t.stdout}};var et=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},tt=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},Mt=async(e,t="")=>{let r=[".netlify","mise.toml","node_modules"],o=[],n=r.map(async a=>{try{return await e.utils.run("git",["check-ignore","-v",a]),null}catch{return`:!${a}`}});return(await Promise.all(n)).forEach(a=>{a&&o.push(a)}),t.split(`
|
|
10
|
+
`).forEach(a=>{let l=a.match(/\?\? (.+?)\.log$/)?.[1];l&&o.push(`:!${l}.log`)}),o};import Yt from"fs/promises";import jt from"os";import me from"path";import H from"process";import Pe from"path";import Gt from"fs/promises";var ve=_("agent-output-utils");async function J({initialResult:e,agentName:t,hasError:r}){let o="",n=Pe.join(process.cwd(),L,se);try{let s=await Gt.readFile(n,"utf-8");s&&(o=s,ve.log(`Pulled result from ${Pe.relative(process.cwd(),n)}`))}catch{ve.log(`No results file found at ${Pe.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function z({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim()||"",n="";return o?.includes("ai gateway is not available for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely while waiting for the model")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n="The agent's models were currently overloaded. Please try again or use a different available agent."),n&&ve.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function Z(e){if(!e)return!1;let t=e?.replace(/\s+/g," ").trim()||"";return!!(t?.includes("error when talking to gemini api")||t?.includes("connection closed prematurely while waiting for the model"))}var j=_("runner_claude"),rt="Claude Code",Bt=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(j.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(j.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(j.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Oe({config:e,netlify:t,persistSteps:r,aiGateway:o}){let n=e,{accountType:s,prompt:a,modelVersionOverrides:l}=n,{model:i}=n;if(o){let{token:w,url:h}=o;if(!w||!h)throw new Error("No token or url provided from AI Gateway");let f=Ht[s];if(!f)throw new Error(`Claude is not supported for the account type ${s}`);if(i&&!f?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`);if(l?.claude){let u=l?.claude?.[s];u&&(i=u)}H.env.ANTHROPIC_API_KEY=w,H.env.ANTHROPIC_BASE_URL=h}else if(!H.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let p=[],c=[],d={},m=0,E=0,x,T,g=me.join(H.cwd(),"node_modules"),N=[me.join(H.env.NODE_PATH||g,".bin/claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...i?["--model",i]:[],"-p",a],C=`${H.env.NVM_BIN}/node`;j.log(`Running ${C} ${N.join(" ")}`);let P=t.utils.run(C,N,{all:!0,env:H.env});P.stdin?.end();let I=de(()=>{r?.({steps:p,duration:E})},250),R=(w,h)=>{let f={...w,id:m};m+=1,c.push(f),p.push(f),h||I.flush(),I(),h&&I.flush()},A="";return P.all?.on("data",w=>{if(A+=w.toString(),!w.includes(`
|
|
11
|
+
`))return;let h=A.split(`
|
|
12
|
+
`).filter(Boolean).map(f=>{try{return JSON.parse(f)}catch{j.log("Could not parse line",f)}return null}).filter(Boolean);A="",h.forEach(f=>{Array.isArray(f?.message?.content)?f.message.content.forEach(u=>{switch(u.type){case"text":{u.text&&R({message:u.text});break}case"image":{typeof u.source=="object"&&u.source&&u.source.type==="base64"&&u.source.media_type?R({message:``}):j.log(`Unsupported image type ${u.source?.type}`,u.source);break}case"tool_use":{if(u.name==="Task"){let y=u.input?.description&&`\`${u.input.description}\``;R({title:[u.name,y].filter(Boolean).join(" ")})}else u.id&&(d[u.id]=u);I.flush();break}case"tool_result":{let y=u.tool_use_id?d[u.tool_use_id]:void 0,D;if(y){let F=y.input?.file_path&&me.relative(H.cwd(),y.input.file_path),S=F&&`\`${F}\``;D=[y.name,S].filter(Boolean).join(" ")}let B=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(y?.name||""),b;if(typeof u.content=="string")b=u.content;else if(Array.isArray(u.content)){let F=[];u.content.forEach(S=>{S?.type==="text"&&typeof S.text=="string"?F.push(S.text):S?.type==="image"&&typeof S.source=="object"&&S.source?S.source.type==="base64"&&S.source.media_type?F.push(``):j.log(`Unsupported image type ${S.source.type}`,S.source):j.log(`Unsupported block type ${S?.type}`)}),b=F.join(`
|
|
13
13
|
|
|
14
14
|
`)}B&&b&&(b=`\`\`\`
|
|
15
15
|
${b.trim()}
|
|
16
|
-
\`\`\``),
|
|
17
|
-
`))return;let A=
|
|
18
|
-
`).filter(Boolean).map(
|
|
19
|
-
`):
|
|
20
|
-
`))}),
|
|
21
|
-
${
|
|
22
|
-
\`\`\``)}catch(s){
|
|
23
|
-
`))return;let
|
|
24
|
-
`).filter(Boolean).map(u=>{try{if(u.startsWith("[API Error")){let
|
|
16
|
+
\`\`\``),R({title:D,message:b},!0);break}case"thinking":{u.thinking&&R({title:"Thinking",message:u.thinking},!0);break}default:j.log(`Message content type is not supported ${u.type}`,u)}}):f?.type==="result"&&(E=f.duration_ms||0,f.is_error?T=f.result:x=f.result,[c,p].forEach(u=>{u[u.length-1]?.message===x&&u.pop()}))})}),await P.catch(w=>{({error:T,result:x}=Bt({catchError:w,runCmd:P,error:T,result:x,runnerName:"Claude"}))}),I.flush(),{steps:c,duration:E,result:await J({initialResult:x,agentName:rt,hasError:!!T}),error:z({error:T,agentName:rt}),isRetryableError:Z(T)}}var nt=async()=>{let e=me.join(jt.homedir(),".claude");await Yt.rm(e,{recursive:!0,force:!0})},Ht={[G]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3},"claude-3-haiku-20240307":{maxTokens:4096},"claude-opus-4-20250514":{maxTokens:32e3},"claude-sonnet-4-20250514":{maxTokens:64e3}}},pro:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-haiku-20240307":{maxTokens:4096}}},[M]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-haiku-20240307":{maxTokens:4096}}},[Y]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[U]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}}};import qt from"fs/promises";import Kt from"os";import ke from"path";import W from"process";var Q=_("runner_codex"),ot="Codex CLI",Vt=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(Q.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(Q.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(Q.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function be({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:R,url:A}=n;if(!R||!A)throw new Error("No token or url provided from AI Gateway");let w=Wt[s];if(!w)throw new Error(`Codex is not supported for the account type ${s}`);if(i&&!w?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`);if(l?.codex){let h=l?.codex?.[s];h&&(i=h)}W.env.OPENAI_API_KEY=R,W.env.OPENAI_BASE_URL=A}else if(!W.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let p=[],c=[],d={},m=0,E=0,x,T,g=ke.join(W.cwd(),"node_modules"),N=[ke.join(W.env.NODE_PATH||g,".bin/codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),C=`${W.env.NVM_BIN}/node`;Q.log(`Running ${C} ${N.join(" ")}`);let P=t.utils.run(C,N,{all:!0,env:{...W.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),I="";return P.all.on("data",R=>{if(I+=R.toString(),!R.includes(`
|
|
17
|
+
`))return;let A=I.toString().split(`
|
|
18
|
+
`).filter(Boolean).map(f=>{try{return JSON.parse(f)}catch{Q.log("Could not parse line",f)}return null}).filter(Boolean);I="";let w=[],h=!1;A.forEach(f=>{if(f?.duration_ms&&(E=f.duration_ms,h=!0),f?.type==="local_shell_call")d[f.call_id]=f;else if(f?.type==="local_shell_call_output"){let u=Jt(d[f.call_id],f);u.id=m,m+=1,u&&(c.push(u),p.push(u),w.push(u),h=!0)}else f?.type==="message"&&f.role==="assistant"?x=f.content.map(u=>u.text).join(`
|
|
19
|
+
`):f?.type==="message"&&f.role==="system"&&(T=f.content.map(u=>u.text).join(`
|
|
20
|
+
`))}),h&&(r?.({steps:p,duration:E}),o?.({steps:w,duration:E}))}),await P.catch(R=>{({error:T,result:x}=Vt({catchError:R,runCmd:P,error:T,result:x,runnerName:"Codex"}))}),{steps:c,duration:E,result:await J({initialResult:x,agentName:ot,hasError:!!T}),error:z({error:T,agentName:ot}),isRetryableError:Z(T)}}var st=async()=>{let e=ke.join(Kt.homedir(),".codex");await qt.rm(e,{recursive:!0,force:!0})},Wt={[G]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},o1:{maxTokens:1e5},"o1-mini":{maxTokens:65536},"o3-mini":{maxTokens:1e5},"gpt-image-1":{},"dall-e-2":{},"dall-e-3":{}}},[M]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768}}},pro:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768}}},[Y]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[U]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}}},Xt=new Set(["bash","-lc"]),Jt=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!Xt.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
|
|
21
|
+
${n.trim()}
|
|
22
|
+
\`\`\``)}catch(s){Q.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import zt from"fs/promises";import Zt from"os";import ge from"path";import q from"process";var ie=_("runner_gemini"),it="Gemini CLI",Qt=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(ie.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(ie.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(ie.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),er={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Le({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:h,url:f}=n;if(!h||!f)throw new Error("No token or url provided from AI Gateway");let u=tr[s];if(!u)throw new Error(`Gemini is not supported for the account type ${s}`);if(i&&!u?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`);if(l?.gemini){let y=l?.gemini?.[s];y&&(i=y)}q.env.GEMINI_API_KEY=h,q.env.GOOGLE_GEMINI_BASE_URL=f}else if(!q.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let p=[],c=[],d=[],m={},E=0,x=0,T,g,N=ge.join(q.cwd(),"node_modules"),C=[ge.join(q.env.NODE_PATH||N,".bin/gemini"),...i?["--model",i]:[],"--yolo","-p",a],P=`${q.env.NVM_BIN}/node`;ie.log(`Running ${P} ${C.join(" ")}`);let I=t.utils.run(P,C,{all:!0,env:q.env});I.stdin?.end();let R=de(()=>{r?.({steps:p,duration:x}),o?.({steps:c,duration:x}),c=[]},250),A=(h,f)=>{h.id=E,E+=1,d.push(h),p.push(h),c.push(h),f||R.flush(),R(),f&&R.flush()},w="";return I.all.on("data",h=>{if(w+=h.toString(),!h.includes(`
|
|
23
|
+
`))return;let f=w.toString().split(`
|
|
24
|
+
`).filter(Boolean).map(u=>{try{if(u.startsWith("[API Error")){let y=u.match(/\[api error: (.+?)]$/i)?.[1];return{type:"error",value:fe(y,!1)?.error?.message||y||"Gemini encountered error"}}return JSON.parse(u)}catch{}return null}).filter(Boolean);w="",f.forEach(u=>{switch(u.type){case"thought":{let y=u.value;A({title:y?.subject??"Thinking...",message:y?.description},!0);break}case"content":{u.value&&A({message:u.value});break}case"tool_call_request":{let y=u.value,D=er[y.name]??y.name,B=y.args?.path||y.args?.absolute_path,b=B&&ge.relative(q.cwd(),B),F=y.args?.command,Rt={title:[D,b&&`\`${b}\``,F&&`\`${F}\``].filter(Boolean).join(" ")};m[y.callId]=Rt,R.flush();break}case"tool_result":{let y=u.value,D=m[y.callId];if(D){let B=[y.resultDisplay,y.responseParts?.functionResponse?.response?.output].find(b=>typeof b=="string"&&b);B&&(D.message=`\`\`\`
|
|
25
25
|
${B.trim()}
|
|
26
|
-
\`\`\``),A(
|
|
26
|
+
\`\`\``),A(D,!0)}break}case"result":{x=u.duration_ms,T=u.value,[d,p,c].forEach(y=>{y[y.length-1]?.message===T&&y.pop()});break}case"error":{g=u.value;break}case"finished":break;default:{ie.warn("Unhandled message type:",u.type);break}}})}),await I.catch(h=>{({error:g,result:T}=Qt({catchError:h,runCmd:I,error:g,result:T,runnerName:"Gemini"}))}),R.flush(),{steps:d,duration:x,result:await J({initialResult:T,agentName:it,hasError:!!g}),error:z({error:g,agentName:it}),isRetryableError:Z(g)}}var at=async()=>{let e=ge.join(Zt.homedir(),".gemini");await zt.rm(e,{recursive:!0,force:!0})},tr={[G]:{models:{"gemini-1.5-flash":{maxTokens:8192},"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-1.5-pro":{maxTokens:8192},"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536},"gemini-2.5-flash-lite":{maxTokens:65536},"gemini-2.5-pro":{maxTokens:65536},"imagen-4.0-generate-001":{},"veo-3.0-generate-preview":{}}},pro:{models:{"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192}}},[M]:{models:{"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192}}},[Y]:{models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[U]:{models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}}};var rr={codex:{runner:be,clean:st},claude:{runner:Oe,clean:nt},gemini:{runner:Le,clean:at}},lt=rr;var ut=async({config:e,apiThrottle:t,apiToken:r})=>{let o=nr();return le(o,"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent});let a=lt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=or({apiToken:r});Me(l);let i=e.useGateway?await Be({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let p=Xe(({steps:m=[],duration:E})=>{let x=[...m];return m.length=0,X(e.id,e.sessionId,{steps:x,duration:E})},t),c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await et(l),await pe(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await tt(l),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let d=performance.now()-s;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":d,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:p,runner:a,sha:c}})},or=({apiToken:e})=>({constants:{NETLIFY_API_HOST:he.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||he.env.NETLIFY_API_TOKEN,SITE_ID:he.env.SITE_ID,FUNCTIONS_DIST:he.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:qe}});import sr from"crypto";import K from"fs/promises";import k from"path";import $ from"process";var v=_("context"),ir=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:$.env.NETLIFY_TEAM_ID,userId:$.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:$.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},ar=10,lr=async e=>{let{name:t,ext:r}=k.parse(e),o=e,n=k.join($.cwd(),L,o),s=0;for(;await ur(n);){if(s>=ar)throw new Error("Failed to generate context file");o=`${t}-${sr.randomUUID().slice(0,5)}${r}`,n=k.join($.cwd(),L,o),s+=1}return o},ur=async e=>{try{return await K.access(e),!0}catch{return!1}},cr=async()=>{try{v.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return v.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(v.warn("Catchall consumer missing or invalid contextScopes"),null):r:(v.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?v.warn("Netlify features context request timed out"):v.warn("Failed to fetch Netlify features context:",e.message),null}},pr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await K.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?v.warn(`Download timeout for ${e}`):v.warn(`Failed to download context file ${e}:`,r.message),!1}},Ee=null,dr=async()=>{if(Ee)return Ee;let e=await cr();if(!e)return[];let t=k.join($.cwd(),L,Ae);await K.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return v.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=k.join(t,a),i=k.join(L,Ae,a);return v.log(`Downloading ${s.scope} context...`),await pr(s.endpoint,l)?(v.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return Ee=(await Promise.all(r)).filter(n=>n!==null),Ee},ct=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=ir(t),s=await lr(Ke),a=k.join($.cwd(),L);await K.mkdir(a,{recursive:!0});let l=k.join(L,s),i=k.join($.cwd(),l),p=k.join($.cwd(),L,se);try{await K.unlink(p),v.log(`Deleted old results file: ${p}`)}catch{}let c=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
|
|
27
27
|
Your task is to analyze and fix the build errors.
|
|
28
28
|
Don't apply techniques of reverting changes. Apply fixes related to errors.
|
|
29
29
|
Don't try to run build by yourself. Just fix the errors.
|
|
30
30
|
|
|
31
31
|
<build_error_context>
|
|
32
|
-
${
|
|
33
|
-
</build_error_context>`:"",
|
|
32
|
+
${o}
|
|
33
|
+
</build_error_context>`:"",d="";r.siteContext&&r.siteContext.length!==0&&(d=`
|
|
34
34
|
<project_rules>
|
|
35
35
|
${r.siteContext.filter(g=>g.site_context).map(g=>typeof g.site_context=="string"?g.site_context:typeof g.site_context=="object"?JSON.stringify(g.site_context):"").join(`
|
|
36
36
|
|
|
37
37
|
`)}
|
|
38
38
|
</project_rules>
|
|
39
|
-
`);let m="";if(r.sessionHistoryContext?.length){let g=k.join(
|
|
39
|
+
`);let m="";if(r.sessionHistoryContext?.length){let g=k.join($.cwd(),L,Ce);await K.mkdir(g,{recursive:!0});let N=await Promise.all(r.sessionHistoryContext.map(async(C,P)=>{let I=P+1,R=`attempt-${I}.md`,A=k.join(g,R),w=k.join(L,Ce,R),h=`# Task History - Attempt ${I}
|
|
40
40
|
|
|
41
41
|
## Request - what the user asked for
|
|
42
42
|
${C.request}
|
|
@@ -46,21 +46,21 @@ ${C.request}
|
|
|
46
46
|
## Response - what the agent replied with after its work
|
|
47
47
|
|
|
48
48
|
${C.response}
|
|
49
|
-
`;return await
|
|
49
|
+
`;return await K.writeFile(A,h,"utf-8"),v.log(`Created history file: ${w}`),w}));m+=`
|
|
50
50
|
<session_history_context>
|
|
51
51
|
History of prior work on this task.
|
|
52
52
|
You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
|
|
53
53
|
|
|
54
|
-
${
|
|
54
|
+
${N.slice(-5).map(C=>`- ${C}`).join(`
|
|
55
55
|
`)}
|
|
56
56
|
|
|
57
57
|
</session_history_context>
|
|
58
|
-
`}let
|
|
58
|
+
`}let E=await dr(),x="";E.length>0&&(x=`
|
|
59
59
|
<netlify_features_context>
|
|
60
60
|
If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
|
|
61
61
|
DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
|
|
62
62
|
|
|
63
|
-
${
|
|
63
|
+
${E.map(g=>`- **${g.scope}**: ${g.path}`).join(`
|
|
64
64
|
`)}
|
|
65
65
|
|
|
66
66
|
Refer to these files when working with specific Netlify features.
|
|
@@ -78,26 +78,26 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
78
78
|
<requirements>
|
|
79
79
|
<responses>
|
|
80
80
|
- Do not speak in first person. You may speak as "the agent".
|
|
81
|
-
- When
|
|
81
|
+
- When work is complete, write a changes summary in ${a}/${se} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
|
|
82
82
|
- Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
|
|
83
83
|
</responses>
|
|
84
84
|
<attachements>
|
|
85
|
-
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${
|
|
86
|
-
- move assets from ${
|
|
85
|
+
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${Se} folder
|
|
86
|
+
- move assets from ${a}/${Se} folder to the project assets folder if they are referenced in a code or applied changes
|
|
87
87
|
</attachements>
|
|
88
|
-
${
|
|
88
|
+
${d}
|
|
89
89
|
</requirements>
|
|
90
90
|
|
|
91
91
|
<extra_context>
|
|
92
92
|
<metadata>
|
|
93
|
-
- Site/Project ID: ${
|
|
94
|
-
- Account/Team ID: ${
|
|
95
|
-
- User ID: ${
|
|
96
|
-
- Site/Project Slug: ${
|
|
97
|
-
- Netlify Functions directory: ${
|
|
93
|
+
- Site/Project ID: ${n.siteId}
|
|
94
|
+
- Account/Team ID: ${n.accountId}
|
|
95
|
+
- User ID: ${n.userId}
|
|
96
|
+
- Site/Project Slug: ${n.siteSlug}
|
|
97
|
+
- Netlify Functions directory: ${n.functionsDir}
|
|
98
98
|
</metadata>
|
|
99
99
|
<environment>
|
|
100
|
-
- Node Version: ${
|
|
100
|
+
- Node Version: ${$.version||"unknown"}
|
|
101
101
|
- Environment variables are set globally (e.g. \`echo $VARIABLE_NAME\` can be used to check if a var is set).
|
|
102
102
|
- 'netlify-cli' npm package is already available as a global package. Don't try to install it again
|
|
103
103
|
- If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
|
|
@@ -110,7 +110,7 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
110
110
|
</extra_context>
|
|
111
111
|
|
|
112
112
|
${m}
|
|
113
|
-
`;return await
|
|
113
|
+
`;return await K.writeFile(i,T,"utf-8"),v.log(`Generated agent context document at: ${i}`),T.length>5e5&&(T=`
|
|
114
114
|
You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
|
|
115
115
|
|
|
116
116
|
<request>
|
|
@@ -121,12 +121,12 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
121
121
|
</request>
|
|
122
122
|
|
|
123
123
|
Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
|
|
124
|
-
`),T};var
|
|
124
|
+
`),T};var fr=_("prompt"),pt=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await ct({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&fr.log("Contextful Prompt:",n),{prompt:n}};var dt=_("inference_stage"),ft=2,_e=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i}=e;He();let{prompt:p}=await pt({cliPath:t,config:r,buildErrorContext:mr(n),netlify:o}),c={...r,prompt:p},d=await s({aiGateway:l,config:c,netlify:o,persistSteps:a});if(await a.flush(),d.error){if(dt.error("Runner failed",{stepsCount:d.steps.length,duration:d.duration,error:d.error,isRetryableError:d.isRetryableError}),d.error&&d.isRetryableError&&(!i||i<ft))return dt.log(`Retrying inference stage, attempt ${i} of ${ft}...`),await new Promise(E=>setTimeout(E,5e3)),{runnerResult:(await _e({...e,attempt:(i||1)+1})).runnerResult};throw new Error(d.error)}return{runnerResult:d}},mr=e=>!e||e.length===0?"":`
|
|
125
125
|
Deploy failed failed. Here are the errors to review on the latest build:
|
|
126
126
|
|
|
127
127
|
Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
|
|
128
128
|
|
|
129
129
|
${e.pop()}
|
|
130
|
-
`;import
|
|
131
|
-
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let
|
|
130
|
+
`;import gr from"process";var ae=_("deploy"),mt=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a})=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(ae.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),a&&l.push("--filter",a),r?(ae.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let i=s||"netlify";ae.log(`Running: ${i} ${l.join(" ")}`);let p=await e.utils.run(i,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(p.stdout.trim());ae.log(`
|
|
131
|
+
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let d={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(d.sourceZipFilename=c.source_zip_filename),d}catch(l){throw ae.error("Failed to create preview deploy via CLI:",l),l}};var ye=_("deploy_stage"),$e=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await Qe({config:t,netlify:r});if(!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:p}=s,c=!0;ye.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let d=null;if(o!==void 0&&c)try{let m;try{let E=await Ge(t.id,t.sessionId);E?.title&&(m=E.title)}catch(E){ye.warn("Failed to fetch session title, using fallback message:",E.message)}d=await mt({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:m,skipBuild:!1,deploySubdomain:Je(t.id,gr.env.SITE_NAME),filter:n})}catch(m){return ye.warn("Failed to create preview deploy (continuing with agent run):",m),{diff:a,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:i,resultDiffBinary:p,deployError:m instanceof Error?m.message:String(m)}}return ye.log("Git status",{hasDiff:!!a,hasChanges:c}),{diff:a,resultDiff:l,hasChanges:c,previewInfo:d,diffBinary:i,resultDiffBinary:p}};async function gt(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var ee=_("cleanup_stage"),ht=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result_diff:t,result:r||"Done",duration:o,result_diff_binary:s};return l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename),n||a?(ee.log("Updating total agent result diff"),await pe(e.id,{result_diff:n,result_diff_binary:a})):ee.log("No total result diff, not updating"),ee.log("Updated agent runner with result"),await gt(()=>X(e.id,e.sessionId,i),{maxRetries:3,baseDelay:1e3,onRetry:(p,c)=>{ee.error(`Error updating agent runner session (attempt ${p}):`,c),ee.log("Retrying...")}}),ee.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as Er}from"@netlify/otel";var _r=hr(import.meta.url),yr=_r("../package.json"),Et=_("pipeline_index"),Te=3,_t=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,traceExporterUrl:a})=>{let l;try{await Fe(yr.version,e.id,a);let i=Er(),p,{aiGateway:c,context:d,persistSteps:m,runner:E,sha:x}=await ut({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s});l=E.clean,e.sha=x;let{runnerResult:T}=await _e({cliPath:r,config:e,context:d,runner:E.runner,persistSteps:m,aiGateway:c}),g=await $e({cliPath:r,config:e,context:d,result:T.result,filter:s}),N=T,C=[];if(g.hasChanges&&g.deployError){C.push(Ue(g.deployError));let h=1;for(;h<=Te&&!g.previewInfo;)Et.log(`Deploy attempt had errors. Retrying. ${h}/${Te}`),await le(i,"stage-deploy",async f=>{f?.setAttributes({"stage.attempt":h});let{runnerResult:u}=await _e({cliPath:r,config:e,context:d,runner:E.runner,persistSteps:m,aiGateway:c,buildErrors:C});N={...u,steps:[...N.steps||[],...u.steps||[]],duration:(N.duration||0)+(u.duration||0)},g=await $e({cliPath:r,config:e,context:d,result:u.result,filter:s}),g.deployError&&C.push(g.deployError),h++});h>Te&&!g.previewInfo&&(p=new Error(`Deploy validation failed after ${Te} attempts`))}let{diff:P,resultDiff:I,previewInfo:R,diffBinary:A,resultDiffBinary:w}=g;if(await ht({config:e,diff:P,result:N.result,duration:N.duration,resultDiff:I,diffBinary:A,resultDiffBinary:w,previewInfo:R}),p)throw p;await E.clean?.()}catch(i){Et.error("Got error while running pipeline",i),await l?.();let p=i instanceof Error&&i.message;throw await X(e.id,e.sessionId,{result:p||"Encountered error when running agent",state:"error"}),i}};import O from"process";var Tr="codex",xr=e=>(e??[]).filter(t=>t.request&&t.response),Rr=e=>(e??[]).filter(t=>t.site_context),yt=_("config"),Tt=()=>{let e=O.env.NETLIFY_AGENT_RUNNER_ID,t=O.env.NETLIFY_AGENT_RUNNER_SESSION_ID;if(!e||!t)throw new Error("ID of agent runner is not provided");let r=O.env.NETLIFY_AGENT_RUNNER_RESULT_BRANCH,o=O.env.NETLIFY_AGENT_RUNNER_PROMPT;if(!o)throw new Error("Prompt is not provided");let n=O.env.NETLIFY_AGENT_RUNNER_AGENT||Tr,s=O.env.NETLIFY_AGENT_RUNNER_MODEL,a=O.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_ENABLED==="1",l=O.env.NETLIFY_FF_AGENT_RUNNER_POST_EXECUTION_VALIDATION_WITH_BUILD_ENABLED==="1",i=O.env.ERROR_LOGS_PATH,p=fe(O.env.NETLIFY_AGENT_RUNNER_CONTEXT,!0,yt),c=xr(p),d=Rr(p),m=O.env.NETLIFY_AGENT_RUNNER_HAS_REPO!=="0",E=!O.env.NETLIFY_FF_AGENT_RUNNER_BYOK_ENABLED,x=O.env.NETLIFY_AGENT_RUNNER_SHA,T=wr(),g=ze(),N={id:e,sessionId:t,resultBranch:r,prompt:o,runner:n,model:s,validateAgent:a,errorLogsPath:i,sessionHistoryContext:c,siteContext:d,hasRepo:m,useGateway:E,sha:x,accountType:T,validateAgentWithBuild:l,modelVersionOverrides:g};return yt.log({fullConfig:N}),N},wr=()=>{let e=O.env.NETLIFY_TEAM_TYPE;return e?e.includes("personal")?M:e.includes("pro")?"pro":e.startsWith("enterprise")?G:e.startsWith("free")?Y:U:U};var Nr=_("bin_cmd"),te=Ir(xt.argv.slice(2),{string:["auth","cwd","cli-path","error-logs-path","filter","trace-exporter-url"]});try{let e=Tt();await _t({config:e,apiToken:te.auth,cwd:te.cwd,cliPath:te["cli-path"],errorLogsPath:te["error-logs-path"],filter:te.filter,traceExporterUrl:te["trace-exporter-url"]})}catch(e){Nr.error("Error running agent pipeline:",e),xt.exit(1)}
|
|
132
132
|
//# sourceMappingURL=bin.js.map
|
package/dist/index.js
CHANGED
|
@@ -1,70 +1,70 @@
|
|
|
1
|
-
import{createRequire as
|
|
2
|
-
`),r=[],
|
|
3
|
-
`)),
|
|
4
|
-
${
|
|
1
|
+
import{createRequire as ar}from"module";import{createTracerProvider as mt}from"@netlify/otel/bootstrap";import{SimpleSpanProcessor as be}from"@netlify/otel/opentelemetry";import{withActiveSpan as gt}from"@netlify/otel";import{OTLPTraceExporter as ht}from"@opentelemetry/exporter-trace-otlp-grpc";function _(e){let t=!process.env.VITEST;return{log:(...r)=>{t&&console.log(`[AR]-[${e}]`,...r)},error:(...r)=>{t&&console.error(`[AR]-[${e}]`,...r)},warn:(...r)=>{t&&console.warn(`[AR]-[${e}]`,...r)},info:(...r)=>{t&&console.info(`[AR]-[${e}]`,...r)},debug:(...r)=>{t&&console.debug(`[AR]-[${e}]`,...r)}}}var Ee=_("tracing"),$e=(e,t,r)=>mt({serviceName:"@netlify/agent-runner-cli",serviceVersion:e,deploymentEnvironment:"production",siteUrl:"",siteId:process.env.SITE_ID??"",siteName:t,spanProcessors:[new be(new _e),new be(new ht({url:r}))]});function ie(e,t,r){return Ee.log(`\u23F3 TRACE: ${t} starting...`),gt(e,t,r)}var _e=class{export(t,r){for(let o of t)this.logSpan(o);r({code:1})}async shutdown(){}forceFlush(){return Promise.resolve()}logSpan(t){let r=(t.endTime[0]-t.startTime[0])*1e3+(t.endTime[1]-t.startTime[1])/1e6,o=t.attributes,n=[];for(let[l,i]of Object.entries(o))l.includes("duration")&&typeof i=="number"?n.push(`${l}=${i.toFixed(2)}ms`):n.push(`${l}=${i}`);let s=t.status?.code===2?"\u274C":"\u2705",a=n.length>0?` [${n.join(", ")}]`:"";Ee.log(`${s} TRACE: ${t.name} completed in ${r.toFixed(2)}ms${a}`),t.status?.code===2&&t.status.message&&Ee.log(` \u274C Error: ${t.status.message}`)}};var yt=["error","failed","exception","fatal","panic","abort","crash"];function De(e){let t=e.split(`
|
|
2
|
+
`),r=[],o=-1,n=0;for(;n<t.length;){let l=t[n].slice(0,500).toLowerCase();if(yt.some(f=>l.includes(f))){let f=Math.max(0,n-10,o+1),c=Math.min(t.length-1,n+20),p=[];for(let g=f;g<=c;g++)p.push(t[g]);r.push(p.join(`
|
|
3
|
+
`)),o=c,n=c+1}else n++}if(r.length===0)return e;let s=r.map((a,l)=>`<extracted_error_chunk order="${l+1}">
|
|
4
|
+
${a}
|
|
5
5
|
</extracted_error_chunk>`).join(`
|
|
6
6
|
|
|
7
|
-
`);return s.length>e.length*.8?e:s}import
|
|
8
|
-
`).filter(
|
|
9
|
-
`).forEach(
|
|
10
|
-
`))return;let
|
|
11
|
-
`).filter(Boolean).map(
|
|
12
|
-
|
|
13
|
-
`)}
|
|
14
|
-
${
|
|
15
|
-
\`\`\``),T({title:D,message:
|
|
16
|
-
`))return;let N=
|
|
17
|
-
`).filter(Boolean).map(
|
|
18
|
-
`):
|
|
19
|
-
`))}),
|
|
20
|
-
${
|
|
21
|
-
\`\`\``)}catch(s){
|
|
22
|
-
`))return;let
|
|
23
|
-
`).filter(Boolean).map(u=>{try{if(u.startsWith("[API Error")){let
|
|
24
|
-
${
|
|
25
|
-
\`\`\``),N(D,!0)}break}case"result":{
|
|
7
|
+
`);return s.length>e.length*.8?e:s}import fe from"process";import{getTracer as Wt}from"@netlify/otel";import ae from"process";var xe=ae.env.NETLIFY_API_URL,Te=ae.env.NETLIFY_API_TOKEN,Q=_("api"),le=async(e,t={})=>{if(!xe||!Te)throw new Error("No API URL or token");let r=new URL(e,xe),o={...t,headers:{...t.headers,Authorization:`Bearer ${Te}`}};ae.env.AGENT_RUNNERS_DEBUG==="true"&&(o.headers["x-nf-debug-logging"]="true"),t.json&&(o.headers||={},o.headers["Content-Type"]="application/json",o.body=JSON.stringify(t.json));let n=await fetch(r,o),s=n.ok&&n.status<=299;if(ae.env.AGENT_RUNNERS_DEBUG==="true")Q.log(`Response headers for ${r}:`),n.headers.forEach((l,i)=>{Q.log(` ${i}: ${l}`)});else{let l=n.headers.get("x-nf-request-id");Q.log(`Request ID for ${r}: ${l||"N/A"}`)}if(s||Q.error(`Got status ${n.status} for request ${r}`),t.raw){if(!s)throw n;return n}let a=await(n.headers.get("content-type")?.includes("application/json")?n.json():n.text());if(!s)throw a;return a},Le=e=>{Q.log("Setting details for api",{apiUrl:e?.constants?.NETLIFY_API_HOST,token:!!e?.constants?.NETLIFY_API_TOKEN}),e?.constants?.NETLIFY_API_HOST&&(xe=`https://${e.constants.NETLIFY_API_HOST}`),e?.constants?.NETLIFY_API_TOKEN&&(Te=e.constants.NETLIFY_API_TOKEN)},ue=(e,t)=>le(`/api/v1/agent_runners/${e}`,{method:"PUT",json:t}),Y=(e,t,r)=>le(`/api/v1/agent_runners/${e}/sessions/${t}`,{method:"PUT",json:r});var Fe=(e,t)=>le(`/api/v1/agent_runners/${e}/sessions/${t}`),Ue=(e,t,r)=>le(`/api/v1/sites/${e}/ai-gateway/token`,{headers:{"X-Nf-Agent-Runner-Id":t,"X-Nf-Agent-Runner-Session-Id":r}});var Me=_("ai_gateway"),je=async({netlify:e,config:t})=>{let r,o,n,s,a=e.constants?.SITE_ID;if(!a)throw new Error("No site id");let l=async()=>{clearTimeout(n),Me.log("Requesting AI gateway information");let i=await Ue(a,t.id,t.sessionId);if({token:r,url:s}=i,o=i.expires_at?i.expires_at*1e3:void 0,Me.log("Got AI gateway information",{token:!!r,expiresAt:o,url:s}),o){let f=o-Date.now()-6e4;f>0&&(n=setTimeout(()=>{l()},f))}};return await l(),{get url(){return s},get token(){return r}}};import te from"process";import{execa as wt,execaCommand as vr}from"execa";import{Transform as Et}from"stream";var _t=["NODE_ENV","PATH","HOME","USER","USERNAME","SHELL","PWD","OLDPWD","TMPDIR","TMP","TEMP","LANG","TERM","EDITOR","PAGER","OS","PROCESSOR_ARCHITECTURE","PROCESSOR_IDENTIFIER","SYSTEMROOT","WINDIR","PROGRAMFILES","PROGRAMFILES(X86)","PROGRAMDATA","APPDATA","LOCALAPPDATA","NODE_OPTIONS","NODE_PATH","NODE_DEBUG","NODE_NO_WARNINGS","npm_config_registry","npm_config_cache","npm_execpath","npm_node_execpath","CI","GITHUB_ACTIONS","GITHUB_WORKSPACE","GITHUB_REPOSITORY","GITHUB_REF","BUILDKITE","BUILDKITE_BRANCH","BUILDKITE_COMMIT","BUILDKITE_BUILD_NUMBER","JENKINS_URL","TRAVIS","CIRCLECI","DISPLAY","COLORTERM","TERM_PROGRAM","TERM_PROGRAM_VERSION","COLUMNS","LINES","HISTSIZE","HISTFILE"];function xt(){return Object.entries(process.env).filter(([e,t])=>!(!t||_t.includes(e)||t.length<5)).map(([,e])=>e).filter(Boolean)}function we(e){if(typeof e!="string")return e;let t=xt();if(t.length===0)return e;let r=e;return t.forEach(o=>{let n=new RegExp(Tt(o),"g");r=r.replace(n,"******")}),r}function Tt(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}var ee=class extends Et{constructor(t={}){super({...t,objectMode:!1})}_transform(t,r,o){let n=t.toString(),s=we(n);o(null,s)}};function Ge(){if(!(process.env.NETLIFY_MASK_LOGS!=="false"))return;let t=process.stdout.write.bind(process.stdout),r=process.stderr.write.bind(process.stderr);process.stdout.write=function(o,n,s){let a=typeof o=="string"?we(o):o;return typeof n=="function"?t(a,n):t(a,n,s)},process.stderr.write=function(o,n,s){let a=typeof o=="string"?we(o):o;return typeof n=="function"?r(a,n):r(a,n,s)}}var br=_("shell"),Rt={preferLocal:!0},Be=(e,t,r)=>{let[o,n]=It(t,r),s={...Rt,...n},a=wt(e,o,s);return Ct(a,s),a};var It=function(e,t){return Array.isArray(e)?[e,t]:typeof e=="object"&&e!==null?[[],e]:[[],void 0]},Ct=(e,t)=>{if(t.stdio!==void 0||t.stdout!==void 0||t.stderr!==void 0)return;if(te.env.NETLIFY_MASK_LOGS!=="false"){e.stdout?.pipe(new ee).pipe(te.stdout),e.stderr?.pipe(new ee).pipe(te.stderr);return}e.stdout?.pipe(te.stdout),e.stderr?.pipe(te.stderr)};var Ye="netlify-agent-runner-context.md",Re="task-history",Ie="netlify-context",b=".netlify",re="results.md",Ce="assets",H="other",q="personal";var K="enterprise",V="free";var Nt=_("utils"),St=e=>new Promise(t=>{setTimeout(t,e)}),He=(e,t=3e3)=>{let r=!1,o=null,n=[],s=null,a=(...l)=>{if(r)return o=l,new Promise(c=>{n.push(c)});r=!0;let i,f=new Promise(c=>{i=c});return s=(async()=>{await Promise.resolve();let c=await e(...l);for(i(c);;){if(await St(t),!o)return r=!1,s=null,c;let p=o,g=n;o=null,n=[],c=await e(...p),g.forEach(y=>{y(c)})}})(),f};return a.flush=async()=>{if((r||o)&&s)return await s,a.flush()},a},ce=(e,t,r=!1)=>{let o=null,n=null,s=null,a=function(...l){n=l,s=this;let i=r&&!o;clearTimeout(o),o=setTimeout(()=>{o=null,r||(e.apply(s,n),n=null,s=null)},t),i&&(e.apply(s,n),n=null,s=null)};return a.cancel=()=>{clearTimeout(o),o=null,n=null,s=null},a.flush=()=>{if(o){clearTimeout(o);let l=n,i=s;o=null,n=null,s=null,e.apply(i,l)}},a},qe=(e,t=!0,r)=>{if(e)try{return JSON.parse(e)}catch(o){t&&(r?.error?r.error("Could not parse JSON",o):Nt.error("Could not parse JSON",o))}},Ke=(e,t)=>{let n=".netlify.app",s="agent-";if(!t)return`${s}${e.slice(0,6)}`;let l=`--${t}${n}`;if(l.length>55)return"";let i=60-l.length;if(i<=0)return"";if(i>=s.length+6){let f=Math.min(i-s.length,e.length);return`${s}${e.slice(0,f)}`}return e.slice(0,i)};import{Buffer as Ve}from"buffer";var We=async({config:e,netlify:t})=>{let r=await Pt(t),{hasChanges:o}=r,{status:n}=r;if(!o)return{hasChanges:!1};let s=await vt(t,n);await t.utils.run("git",["add",".",...s]);let a={stdio:["ignore","pipe","pipe"]},i=(await t.utils.run("git",["diff","--staged"],a)).stdout;if(o=!!i,!o)return{hasChanges:!1};let c=(await t.utils.run("git",["diff","--staged","--binary"],a)).stdout,p,g;if(e.sha){await t.utils.run("git",["commit","-m","Agent runner"]),p=(await t.utils.run("git",["diff",e.sha,"HEAD"],a)).stdout;let h=(await t.utils.run("git",["diff",e.sha,"HEAD","--binary"],a)).stdout;p!==h&&(g=Ve.from(h).toString("base64"))}let y={hasChanges:!0,diff:i,resultDiff:p};return i!==c&&(y.diffBinary=Ve.from(c).toString("base64")),g&&(y.resultDiffBinary=g),y},At=["?? mise.toml",/\?\? .+?\.log/],Pt=async e=>{let t=await e.utils.run("git",["status","-s"]);return{hasChanges:(t.stdout.trim().length===0?[]:t.stdout.split(`
|
|
8
|
+
`).filter(n=>!At.some(s=>s instanceof RegExp?s.test(n):n===s))).length!==0,status:t.stdout}};var Je=async e=>{let{stdout:t}=await e.utils.run("git",["rev-parse","HEAD"]);return t.trim()},Xe=async e=>{let{stdout:t}=await e.utils.run("git",["rev-list","--max-parents=0","HEAD"]);return t.trim()},vt=async(e,t="")=>{let r=[".netlify","mise.toml","node_modules"],o=[],n=r.map(async a=>{try{return await e.utils.run("git",["check-ignore","-v",a]),null}catch{return`:!${a}`}});return(await Promise.all(n)).forEach(a=>{a&&o.push(a)}),t.split(`
|
|
9
|
+
`).forEach(a=>{let l=a.match(/\?\? (.+?)\.log$/)?.[1];l&&o.push(`:!${l}.log`)}),o};import kt from"fs/promises";import bt from"os";import de from"path";import M from"process";import Ne from"path";import Ot from"fs/promises";var Se=_("agent-output-utils");async function W({initialResult:e,agentName:t,hasError:r}){let o="",n=Ne.join(process.cwd(),b,re);try{let s=await Ot.readFile(n,"utf-8");s&&(o=s,Se.log(`Pulled result from ${Ne.relative(process.cwd(),n)}`))}catch{Se.log(`No results file found at ${Ne.relative(process.cwd(),n)}`)}return o||(!e&&!r?`${t} has finished working on task.`:e||void 0)}function J({error:e,agentName:t}){let r=e&&typeof e=="object"?JSON.stringify(e):e,o=r?.replace(/\s+/g," ").trim()||"",n="";return o?.includes("ai gateway is not available for your account")?n="AI Gateway is currently not available on your account. Please confirm your account meets the criteria for using Agent Runners and AI Gateway and that your account has remaining AI Gateway inference credits available. Reach out to Netlify support if this is unexpected.":o?.includes("error when talking to gemini api")?n="Gemini's API is currently having issues. Please try again or use a different available agent while Google resolves the issue.":(o?.includes("connection closed prematurely while waiting for the model")||o?.includes("499")&&t.toLowerCase().includes("gemini"))&&(n="The agent's models were currently overloaded. Please try again or use a different available agent."),n&&Se.log(`Providing updated error messsage: ${n}, replacing original error: ${r}`),n||r||void 0}function X(e){if(!e)return!1;let t=e?.replace(/\s+/g," ").trim()||"";return!!(t?.includes("error when talking to gemini api")||t?.includes("connection closed prematurely while waiting for the model"))}var F=_("runner_claude"),ze="Claude Code",$t=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(F.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(F.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(F.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function Ae({config:e,netlify:t,persistSteps:r,aiGateway:o}){let n=e,{accountType:s,prompt:a,modelVersionOverrides:l}=n,{model:i}=n;if(o){let{token:w,url:m}=o;if(!w||!m)throw new Error("No token or url provided from AI Gateway");let d=Dt[s];if(!d)throw new Error(`Claude is not supported for the account type ${s}`);if(i&&!d?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`);if(l?.claude){let u=l?.claude?.[s];u&&(i=u)}M.env.ANTHROPIC_API_KEY=w,M.env.ANTHROPIC_BASE_URL=m}else if(!M.env.ANTHROPIC_API_KEY)throw new Error("ANTHROPIC_API_KEY is not provided");let f=[],c=[],p={},g=0,y=0,R,x,h=de.join(M.cwd(),"node_modules"),A=[de.join(M.env.NODE_PATH||h,".bin/claude"),"--permission-mode","bypassPermissions","--dangerously-skip-permissions","--output-format","stream-json","--verbose",...i?["--model",i]:[],"-p",a],C=`${M.env.NVM_BIN}/node`;F.log(`Running ${C} ${A.join(" ")}`);let P=t.utils.run(C,A,{all:!0,env:M.env});P.stdin?.end();let I=ce(()=>{r?.({steps:f,duration:y})},250),T=(w,m)=>{let d={...w,id:g};g+=1,c.push(d),f.push(d),m||I.flush(),I(),m&&I.flush()},N="";return P.all?.on("data",w=>{if(N+=w.toString(),!w.includes(`
|
|
10
|
+
`))return;let m=N.split(`
|
|
11
|
+
`).filter(Boolean).map(d=>{try{return JSON.parse(d)}catch{F.log("Could not parse line",d)}return null}).filter(Boolean);N="",m.forEach(d=>{Array.isArray(d?.message?.content)?d.message.content.forEach(u=>{switch(u.type){case"text":{u.text&&T({message:u.text});break}case"image":{typeof u.source=="object"&&u.source&&u.source.type==="base64"&&u.source.media_type?T({message:``}):F.log(`Unsupported image type ${u.source?.type}`,u.source);break}case"tool_use":{if(u.name==="Task"){let E=u.input?.description&&`\`${u.input.description}\``;T({title:[u.name,E].filter(Boolean).join(" ")})}else u.id&&(p[u.id]=u);I.flush();break}case"tool_result":{let E=u.tool_use_id?p[u.tool_use_id]:void 0,D;if(E){let L=E.input?.file_path&&de.relative(M.cwd(),E.input.file_path),S=L&&`\`${L}\``;D=[E.name,S].filter(Boolean).join(" ")}let U=["Bash","Glob","Grep","LS","Read","Edit","Write"].includes(E?.name||""),k;if(typeof u.content=="string")k=u.content;else if(Array.isArray(u.content)){let L=[];u.content.forEach(S=>{S?.type==="text"&&typeof S.text=="string"?L.push(S.text):S?.type==="image"&&typeof S.source=="object"&&S.source?S.source.type==="base64"&&S.source.media_type?L.push(``):F.log(`Unsupported image type ${S.source.type}`,S.source):F.log(`Unsupported block type ${S?.type}`)}),k=L.join(`
|
|
12
|
+
|
|
13
|
+
`)}U&&k&&(k=`\`\`\`
|
|
14
|
+
${k.trim()}
|
|
15
|
+
\`\`\``),T({title:D,message:k},!0);break}case"thinking":{u.thinking&&T({title:"Thinking",message:u.thinking},!0);break}default:F.log(`Message content type is not supported ${u.type}`,u)}}):d?.type==="result"&&(y=d.duration_ms||0,d.is_error?x=d.result:R=d.result,[c,f].forEach(u=>{u[u.length-1]?.message===R&&u.pop()}))})}),await P.catch(w=>{({error:x,result:R}=$t({catchError:w,runCmd:P,error:x,result:R,runnerName:"Claude"}))}),I.flush(),{steps:c,duration:y,result:await W({initialResult:R,agentName:ze,hasError:!!x}),error:J({error:x,agentName:ze}),isRetryableError:X(x)}}var Ze=async()=>{let e=de.join(bt.homedir(),".claude");await kt.rm(e,{recursive:!0,force:!0})},Dt={[K]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:64e3},"claude-3-haiku-20240307":{maxTokens:4096},"claude-opus-4-20250514":{maxTokens:32e3},"claude-sonnet-4-20250514":{maxTokens:64e3}}},pro:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-haiku-20240307":{maxTokens:4096}}},[q]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-haiku-20240307":{maxTokens:4096}}},[V]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}},[H]:{models:{"claude-3-5-haiku-20241022":{maxTokens:8192},"claude-3-7-sonnet-20250219":{maxTokens:16e3}}}};import Lt from"fs/promises";import Ft from"os";import Pe from"path";import B from"process";var z=_("runner_codex"),Qe="Codex CLI",Ut=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(z.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(z.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(z.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0}));async function ve({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:T,url:N}=n;if(!T||!N)throw new Error("No token or url provided from AI Gateway");let w=Mt[s];if(!w)throw new Error(`Codex is not supported for the account type ${s}`);if(i&&!w?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`);if(l?.codex){let m=l?.codex?.[s];m&&(i=m)}B.env.OPENAI_API_KEY=T,B.env.OPENAI_BASE_URL=N}else if(!B.env.OPENAI_API_KEY)throw new Error("OPENAI_API_KEY is not provided");let f=[],c=[],p={},g=0,y=0,R,x,h=Pe.join(B.cwd(),"node_modules"),A=[Pe.join(B.env.NODE_PATH||h,".bin/codex"),"--approval-mode","full-auto",n?"--disable-response-storage":void 0,"--dangerously-auto-approve-everything",...i?["--model",i]:[],"-q",a].filter(Boolean),C=`${B.env.NVM_BIN}/node`;z.log(`Running ${C} ${A.join(" ")}`);let P=t.utils.run(C,A,{all:!0,env:{...B.env,CODEX_UNSAFE_ALLOW_NO_SANDBOX:"1"}}),I="";return P.all.on("data",T=>{if(I+=T.toString(),!T.includes(`
|
|
16
|
+
`))return;let N=I.toString().split(`
|
|
17
|
+
`).filter(Boolean).map(d=>{try{return JSON.parse(d)}catch{z.log("Could not parse line",d)}return null}).filter(Boolean);I="";let w=[],m=!1;N.forEach(d=>{if(d?.duration_ms&&(y=d.duration_ms,m=!0),d?.type==="local_shell_call")p[d.call_id]=d;else if(d?.type==="local_shell_call_output"){let u=Gt(p[d.call_id],d);u.id=g,g+=1,u&&(c.push(u),f.push(u),w.push(u),m=!0)}else d?.type==="message"&&d.role==="assistant"?R=d.content.map(u=>u.text).join(`
|
|
18
|
+
`):d?.type==="message"&&d.role==="system"&&(x=d.content.map(u=>u.text).join(`
|
|
19
|
+
`))}),m&&(r?.({steps:f,duration:y}),o?.({steps:w,duration:y}))}),await P.catch(T=>{({error:x,result:R}=Ut({catchError:T,runCmd:P,error:x,result:R,runnerName:"Codex"}))}),{steps:c,duration:y,result:await W({initialResult:R,agentName:Qe,hasError:!!x}),error:J({error:x,agentName:Qe}),isRetryableError:X(x)}}var et=async()=>{let e=Pe.join(Ft.homedir(),".codex");await Lt.rm(e,{recursive:!0,force:!0})},Mt={[K]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},o1:{maxTokens:1e5},"o1-mini":{maxTokens:65536},"o3-mini":{maxTokens:1e5},"gpt-image-1":{},"dall-e-2":{},"dall-e-3":{}}},[q]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768}}},pro:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768}}},[V]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}},[H]:{models:{"codex-mini-latest":{maxTokens:75e3},"gpt-3.5-turbo":{maxTokens:4096},"gpt-4-turbo":{maxTokens:4096},"gpt-4o":{maxTokens:16384},"gpt-4o-mini":{maxTokens:32768},"o3-mini":{maxTokens:16e3}}}},jt=new Set(["bash","-lc"]),Gt=(e,t)=>{if(!e||!t||e.call_id!==t.call_id)return null;let r=e.action?.command?.filter(s=>!jt.has(s)),o=r?`Running \`${r.join(" ")}\``:void 0,n;try{n=JSON.parse(t.output).output?.trim(),n&&(n=`\`\`\`
|
|
20
|
+
${n.trim()}
|
|
21
|
+
\`\`\``)}catch(s){z.error("Could not decode outputMsg",s,t.output)}return{title:o,message:n}};import Bt from"fs/promises";import Yt from"os";import pe from"path";import j from"process";var oe=_("runner_gemini"),tt="Gemini CLI",Ht=({catchError:e,runCmd:t,error:r,result:o,runnerName:n})=>(oe.log(`${n} command completed with catch handler triggered`,{hadExistingError:!!r,hadExistingResult:!!o,resultLength:o?o.length:0,catchError:e?.message||"No error object",processExitCode:t.exitCode,processKilled:t.killed}),o?(oe.log("Preserving existing result despite catch handler being triggered"),r?{error:r,result:o}:{error:"Process completed with errors but result was captured",result:o}):(oe.log("Setting result to undefined because no valid result was captured"),{error:r||`${n} failed`,result:void 0})),qt={list_directory:"List",read_file:"Read",write_file:"Write",glob:"Find",search_file_content:"Find",replace:"Edit",run_shell_command:"Run",web_fetch:"Web Fetch",web_search:"Web Search",read_many_files:"Read Many",save_memory:"Memorize"};async function Oe({config:e,netlify:t,persistSteps:r,sendSteps:o,aiGateway:n}){let{accountType:s,prompt:a,modelVersionOverrides:l}=e,{model:i}=e;if(n){let{token:m,url:d}=n;if(!m||!d)throw new Error("No token or url provided from AI Gateway");let u=Kt[s];if(!u)throw new Error(`Gemini is not supported for the account type ${s}`);if(i&&!u?.models?.[i])throw new Error(`${i} is not supported for account type ${s}`);if(l?.gemini){let E=l?.gemini?.[s];E&&(i=E)}j.env.GEMINI_API_KEY=m,j.env.GOOGLE_GEMINI_BASE_URL=d}else if(!j.env.GEMINI_API_KEY)throw new Error("GEMINI_API_KEY is not provided");let f=[],c=[],p=[],g={},y=0,R=0,x,h,A=pe.join(j.cwd(),"node_modules"),C=[pe.join(j.env.NODE_PATH||A,".bin/gemini"),...i?["--model",i]:[],"--yolo","-p",a],P=`${j.env.NVM_BIN}/node`;oe.log(`Running ${P} ${C.join(" ")}`);let I=t.utils.run(P,C,{all:!0,env:j.env});I.stdin?.end();let T=ce(()=>{r?.({steps:f,duration:R}),o?.({steps:c,duration:R}),c=[]},250),N=(m,d)=>{m.id=y,y+=1,p.push(m),f.push(m),c.push(m),d||T.flush(),T(),d&&T.flush()},w="";return I.all.on("data",m=>{if(w+=m.toString(),!m.includes(`
|
|
22
|
+
`))return;let d=w.toString().split(`
|
|
23
|
+
`).filter(Boolean).map(u=>{try{if(u.startsWith("[API Error")){let E=u.match(/\[api error: (.+?)]$/i)?.[1];return{type:"error",value:qe(E,!1)?.error?.message||E||"Gemini encountered error"}}return JSON.parse(u)}catch{}return null}).filter(Boolean);w="",d.forEach(u=>{switch(u.type){case"thought":{let E=u.value;N({title:E?.subject??"Thinking...",message:E?.description},!0);break}case"content":{u.value&&N({message:u.value});break}case"tool_call_request":{let E=u.value,D=qt[E.name]??E.name,U=E.args?.path||E.args?.absolute_path,k=U&&pe.relative(j.cwd(),U),L=E.args?.command,ft={title:[D,k&&`\`${k}\``,L&&`\`${L}\``].filter(Boolean).join(" ")};g[E.callId]=ft,T.flush();break}case"tool_result":{let E=u.value,D=g[E.callId];if(D){let U=[E.resultDisplay,E.responseParts?.functionResponse?.response?.output].find(k=>typeof k=="string"&&k);U&&(D.message=`\`\`\`
|
|
24
|
+
${U.trim()}
|
|
25
|
+
\`\`\``),N(D,!0)}break}case"result":{R=u.duration_ms,x=u.value,[p,f,c].forEach(E=>{E[E.length-1]?.message===x&&E.pop()});break}case"error":{h=u.value;break}case"finished":break;default:{oe.warn("Unhandled message type:",u.type);break}}})}),await I.catch(m=>{({error:h,result:x}=Ht({catchError:m,runCmd:I,error:h,result:x,runnerName:"Gemini"}))}),T.flush(),{steps:p,duration:R,result:await W({initialResult:x,agentName:tt,hasError:!!h}),error:J({error:h,agentName:tt}),isRetryableError:X(h)}}var rt=async()=>{let e=pe.join(Yt.homedir(),".gemini");await Bt.rm(e,{recursive:!0,force:!0})},Kt={[K]:{models:{"gemini-1.5-flash":{maxTokens:8192},"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-1.5-pro":{maxTokens:8192},"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:65536},"gemini-2.5-flash-lite":{maxTokens:65536},"gemini-2.5-pro":{maxTokens:65536},"imagen-4.0-generate-001":{},"veo-3.0-generate-preview":{}}},pro:{models:{"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192}}},[q]:{models:{"gemini-1.5-flash-8b":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192}}},[V]:{models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}},[H]:{models:{"gemini-2.0-flash":{maxTokens:8192},"gemini-2.0-flash-lite":{maxTokens:8192},"gemini-2.5-flash":{maxTokens:16e3}}}};var Vt={codex:{runner:ve,clean:et},claude:{runner:Ae,clean:Ze},gemini:{runner:Oe,clean:rt}},nt=Vt;var ot=async({config:e,apiThrottle:t,apiToken:r})=>{let o=Wt();return ie(o,"init-stage",async n=>{let s=performance.now();n?.setAttributes({"init.runner":e.runner,"init.id":e.id,"init.sessionId":e.sessionId,"init.hasRepo":e.hasRepo,"init.useGateway":e.useGateway,"init.validateAgent":e.validateAgent});let a=nt[e.runner];if(!a)throw n?.setAttributes({"init.error":"unsupported_runner"}),new Error(`${e.runner} is not supported`);let l=Jt({apiToken:r});Le(l);let i=e.useGateway?await je({netlify:l,config:e}):void 0;n?.setAttributes({"init.aiGateway.created":!!i}),e.validateAgent&&e.errorLogsPath&&n?.setAttributes({"init.errorLogsPath":e.errorLogsPath});let f=He(({steps:g=[],duration:y})=>{let R=[...g];return g.length=0,Y(e.id,e.sessionId,{steps:R,duration:y})},t),c;e.hasRepo?e.sha?(c=e.sha,n?.setAttributes({"init.sha.source":"provided"})):(c=await Je(l),await ue(e.id,{sha:c}),n?.setAttributes({"init.sha.source":"current_commit"})):(c=await Xe(l),n?.setAttributes({"init.sha.source":"first_commit","init.source":"zip"}));let p=performance.now()-s;return n?.setAttributes({"init.sha":c||"unknown","init.duration.ms":p,"init.status":"success"}),{aiGateway:i,context:l,persistSteps:f,runner:a,sha:c}})},Jt=({apiToken:e})=>({constants:{NETLIFY_API_HOST:fe.env.NETLIFY_API_HOST||"api.netlify.com",NETLIFY_API_TOKEN:e||fe.env.NETLIFY_API_TOKEN,SITE_ID:fe.env.SITE_ID,FUNCTIONS_DIST:fe.env.FUNCTIONS_DIST||"netlify/functions"},utils:{run:Be}});import Xt from"crypto";import G from"fs/promises";import O from"path";import $ from"process";var v=_("context"),zt=e=>{let t=e?.constants||{};return{siteId:t.SITE_ID,accountId:$.env.NETLIFY_TEAM_ID,userId:$.env.NETLIFY_AGENT_RUNNER_USER_ID,siteSlug:$.env.SITE_NAME,apiHost:t.NETLIFY_API_HOST,functionsDir:t.FUNCTIONS_DIST}},Zt=10,Qt=async e=>{let{name:t,ext:r}=O.parse(e),o=e,n=O.join($.cwd(),b,o),s=0;for(;await er(n);){if(s>=Zt)throw new Error("Failed to generate context file");o=`${t}-${Xt.randomUUID().slice(0,5)}${r}`,n=O.join($.cwd(),b,o),s+=1}return o},er=async e=>{try{return await G.access(e),!0}catch{return!1}},tr=async()=>{try{v.log("Fetching Netlify features context...");let e=await fetch("https://docs.netlify.com/ai-context/context-consumers",{signal:AbortSignal.timeout(1e4)});if(!e.ok)throw new Error(`Failed to fetch context consumers: ${e.status} ${e.statusText}`);let t=await e.json();if(!t||typeof t!="object"||!Array.isArray(t.consumers))return v.warn("Invalid response structure: missing or invalid consumers array"),null;let r=t.consumers.find(o=>o&&typeof o=="object"&&o.key==="catchall-consumer");return r?!r.contextScopes||typeof r.contextScopes!="object"?(v.warn("Catchall consumer missing or invalid contextScopes"),null):r:(v.warn("Catchall consumer not found in context consumers"),null)}catch(e){return e.name==="AbortError"?v.warn("Netlify features context request timed out"):v.warn("Failed to fetch Netlify features context:",e.message),null}},rr=async(e,t)=>{try{let r=await fetch(e,{signal:AbortSignal.timeout(1e4)});if(!r.ok)throw new Error(`Failed to fetch ${e}: ${r.status} ${r.statusText}`);let o=await r.text();return await G.writeFile(t,o,"utf-8"),!0}catch(r){return r.name==="AbortError"?v.warn(`Download timeout for ${e}`):v.warn(`Failed to download context file ${e}:`,r.message),!1}},me=null,nr=async()=>{if(me)return me;let e=await tr();if(!e)return[];let t=O.join($.cwd(),b,Ie);await G.mkdir(t,{recursive:!0});let r=Object.entries(e.contextScopes).map(async([n,s])=>{if(!s||typeof s!="object"||!s.endpoint||!s.scope)return v.warn(`Invalid scope data for ${n}, skipping...`),null;let a=`${n}.md`,l=O.join(t,a),i=O.join(b,Ie,a);return v.log(`Downloading ${s.scope} context...`),await rr(s.endpoint,l)?(v.log(`Downloaded: ${i}`),{scope:s.scope,path:i,key:n}):null});return me=(await Promise.all(r)).filter(n=>n!==null),me},st=async({cliPath:e,netlify:t,config:r,buildErrorContext:o})=>{let n=zt(t),s=await Qt(Ye),a=O.join($.cwd(),b);await G.mkdir(a,{recursive:!0});let l=O.join(b,s),i=O.join($.cwd(),l),f=O.join($.cwd(),b,re);try{await G.unlink(f),v.log(`Deleted old results file: ${f}`)}catch{}let c=o?`You've already made changes to complete the above request. However, the build is currently failing after your changes.
|
|
26
26
|
Your task is to analyze and fix the build errors.
|
|
27
27
|
Don't apply techniques of reverting changes. Apply fixes related to errors.
|
|
28
28
|
Don't try to run build by yourself. Just fix the errors.
|
|
29
29
|
|
|
30
30
|
<build_error_context>
|
|
31
|
-
${
|
|
32
|
-
</build_error_context>`:"",
|
|
31
|
+
${o}
|
|
32
|
+
</build_error_context>`:"",p="";r.siteContext&&r.siteContext.length!==0&&(p=`
|
|
33
33
|
<project_rules>
|
|
34
|
-
${r.siteContext.filter(
|
|
34
|
+
${r.siteContext.filter(h=>h.site_context).map(h=>typeof h.site_context=="string"?h.site_context:typeof h.site_context=="object"?JSON.stringify(h.site_context):"").join(`
|
|
35
35
|
|
|
36
36
|
`)}
|
|
37
37
|
</project_rules>
|
|
38
|
-
`);let
|
|
38
|
+
`);let g="";if(r.sessionHistoryContext?.length){let h=O.join($.cwd(),b,Re);await G.mkdir(h,{recursive:!0});let A=await Promise.all(r.sessionHistoryContext.map(async(C,P)=>{let I=P+1,T=`attempt-${I}.md`,N=O.join(h,T),w=O.join(b,Re,T),m=`# Task History - Attempt ${I}
|
|
39
39
|
|
|
40
40
|
## Request - what the user asked for
|
|
41
|
-
${
|
|
41
|
+
${C.request}
|
|
42
42
|
|
|
43
43
|
---
|
|
44
44
|
|
|
45
45
|
## Response - what the agent replied with after its work
|
|
46
46
|
|
|
47
|
-
${
|
|
48
|
-
`;return await G.writeFile(N,
|
|
47
|
+
${C.response}
|
|
48
|
+
`;return await G.writeFile(N,m,"utf-8"),v.log(`Created history file: ${w}`),w}));g+=`
|
|
49
49
|
<session_history_context>
|
|
50
50
|
History of prior work on this task.
|
|
51
51
|
You MUST review ALL of the files below as context to understand the context of previous attempts. Use this information to continue the discussion appropriately.
|
|
52
52
|
|
|
53
|
-
${
|
|
53
|
+
${A.slice(-5).map(C=>`- ${C}`).join(`
|
|
54
54
|
`)}
|
|
55
55
|
|
|
56
56
|
</session_history_context>
|
|
57
|
-
`}let
|
|
57
|
+
`}let y=await nr(),R="";y.length>0&&(R=`
|
|
58
58
|
<netlify_features_context>
|
|
59
59
|
If the user request is explicitly related to a specific Netlify feature (e.g., Netlify Forms, Netlify Functions, etc.), you MUST review the relevant documentation below in addition to reviewing the project files.
|
|
60
60
|
DO NOT force the use of any Netlify feature if the user request does not explicitly require it or if the project has alternative implementations in place already.
|
|
61
61
|
|
|
62
|
-
${
|
|
62
|
+
${y.map(h=>`- **${h.scope}**: ${h.path}`).join(`
|
|
63
63
|
`)}
|
|
64
64
|
|
|
65
65
|
Refer to these files when working with specific Netlify features.
|
|
66
66
|
</netlify_features_context>
|
|
67
|
-
`);let
|
|
67
|
+
`);let x=`
|
|
68
68
|
You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
|
|
69
69
|
|
|
70
70
|
<request>
|
|
@@ -77,23 +77,23 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
77
77
|
<requirements>
|
|
78
78
|
<responses>
|
|
79
79
|
- Do not speak in first person. You may speak as "the agent".
|
|
80
|
-
- When
|
|
80
|
+
- When work is complete, write a changes summary in ${a}/${re} as a standalone PR description. Explain what was accomplished and why (avoid too many implementation details), assuming the reader has no prior context. Use past tense and write in prose without calling it a "PR", "Changelog", etc. This is the core of a PR message or summary page that already has a heading.
|
|
81
81
|
- Do not attempt to create git commits, PRs, etc. directly. You can use git to review information if required but the system that runs this agent will handle creating PRs or commits of the changes it performs.
|
|
82
82
|
</responses>
|
|
83
83
|
<attachements>
|
|
84
|
-
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${
|
|
85
|
-
- move assets from ${
|
|
84
|
+
- for requests that require work with attachments or assets, take into account that uploaded attachments are stored in ${a}/${Ce} folder
|
|
85
|
+
- move assets from ${a}/${Ce} folder to the project assets folder if they are referenced in a code or applied changes
|
|
86
86
|
</attachements>
|
|
87
|
-
${
|
|
87
|
+
${p}
|
|
88
88
|
</requirements>
|
|
89
89
|
|
|
90
90
|
<extra_context>
|
|
91
91
|
<metadata>
|
|
92
|
-
- Site/Project ID: ${
|
|
93
|
-
- Account/Team ID: ${
|
|
94
|
-
- User ID: ${
|
|
95
|
-
- Site/Project Slug: ${
|
|
96
|
-
- Netlify Functions directory: ${
|
|
92
|
+
- Site/Project ID: ${n.siteId}
|
|
93
|
+
- Account/Team ID: ${n.accountId}
|
|
94
|
+
- User ID: ${n.userId}
|
|
95
|
+
- Site/Project Slug: ${n.siteSlug}
|
|
96
|
+
- Netlify Functions directory: ${n.functionsDir}
|
|
97
97
|
</metadata>
|
|
98
98
|
<environment>
|
|
99
99
|
- Node Version: ${$.version||"unknown"}
|
|
@@ -101,15 +101,15 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
101
101
|
- 'netlify-cli' npm package is already available as a global package. Don't try to install it again
|
|
102
102
|
- If you need to start a local development server in order to fulfill the request, try using the Netlify CLI over by running the shell command '${e} dev'. This will start a local HTTP server on port 8888, including live-reloading of any changes and, most critically, it offers local emulation for all Netlify features.
|
|
103
103
|
</environment>
|
|
104
|
-
${
|
|
104
|
+
${R}
|
|
105
105
|
<docs>
|
|
106
106
|
- Netlify Docs: https://docs.netlify.com
|
|
107
107
|
- LLM Resources Index: https://docs.netlify.com/llms.txt
|
|
108
108
|
</docs>
|
|
109
109
|
</extra_context>
|
|
110
110
|
|
|
111
|
-
${
|
|
112
|
-
`;return await G.writeFile(i,
|
|
111
|
+
${g}
|
|
112
|
+
`;return await G.writeFile(i,x,"utf-8"),v.log(`Generated agent context document at: ${i}`),x.length>5e5&&(x=`
|
|
113
113
|
You're an AI agent designed to assist with tasks related to a Netlify project. Please review, understand, and use the context provided to complete the user's request as needed.
|
|
114
114
|
|
|
115
115
|
<request>
|
|
@@ -120,12 +120,12 @@ You're an AI agent designed to assist with tasks related to a Netlify project. P
|
|
|
120
120
|
</request>
|
|
121
121
|
|
|
122
122
|
Use the following file for the complete context of the ask, the environment, and what's available. ${i} You MUST READ ALL OF IT. Make sure to read it first. Never cite or paraphrase private context.
|
|
123
|
-
`),
|
|
123
|
+
`),x};var or=_("prompt"),it=async({cliPath:e,config:t,netlify:r,buildErrorContext:o})=>{let n=await st({cliPath:e,config:t,netlify:r,buildErrorContext:o});return process.env.AGENT_RUNNER_DEBUG&&or.log("Contextful Prompt:",n),{prompt:n}};var at=_("inference_stage"),lt=2,ge=async e=>{let{cliPath:t,config:r,context:o,buildErrors:n,runner:s,persistSteps:a,aiGateway:l,attempt:i}=e;Ge();let{prompt:f}=await it({cliPath:t,config:r,buildErrorContext:sr(n),netlify:o}),c={...r,prompt:f},p=await s({aiGateway:l,config:c,netlify:o,persistSteps:a});if(await a.flush(),p.error){if(at.error("Runner failed",{stepsCount:p.steps.length,duration:p.duration,error:p.error,isRetryableError:p.isRetryableError}),p.error&&p.isRetryableError&&(!i||i<lt))return at.log(`Retrying inference stage, attempt ${i} of ${lt}...`),await new Promise(y=>setTimeout(y,5e3)),{runnerResult:(await ge({...e,attempt:(i||1)+1})).runnerResult};throw new Error(p.error)}return{runnerResult:p}},sr=e=>!e||e.length===0?"":`
|
|
124
124
|
Deploy failed failed. Here are the errors to review on the latest build:
|
|
125
125
|
|
|
126
126
|
Below are all of the logs with potential issues that we extracted. Some of them may be false positives, discern them carefully and ensure fixes are relevant.
|
|
127
127
|
|
|
128
128
|
${e.pop()}
|
|
129
|
-
`;import
|
|
130
|
-
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let
|
|
129
|
+
`;import ir from"process";var se=_("deploy"),ut=async({netlify:e,hasRepo:t,skipBuild:r,message:o="Agent Preview",deploySubdomain:n,cliPath:s,filter:a})=>{try{let l=["deploy","--message",`"${o}"`,"--json","--draft","--verbose"];t||(se.log("Deploy: Uploading source zip"),l.push("--upload-source-zip")),n&&l.push("--alias",n),a&&l.push("--filter",a),r?(se.log("Deploy: Skipping build"),l.push("--no-build")):l.push("--context","deploy-preview");let i=s||"netlify";se.log(`Running: ${i} ${l.join(" ")}`);let f=await e.utils.run(i,l,{stdio:["ignore","pipe","pipe"]}),c=JSON.parse(f.stdout.trim());se.log(`
|
|
130
|
+
Preview deploy created successfully:`,{deployId:c.deploy_id,deployUrl:c.deploy_url,siteId:c.site_id});let p={deployId:c.deploy_id,previewUrl:c.deploy_url,logsUrl:c.logs,siteId:c.site_id};return t||(p.sourceZipFilename=c.source_zip_filename),p}catch(l){throw se.error("Failed to create preview deploy via CLI:",l),l}};var he=_("deploy_stage"),ke=async({cliPath:e,config:t,context:r,result:o,filter:n})=>{let s=await We({config:t,netlify:r});if(!s.hasChanges)return{diff:"",hasChanges:!1,previewInfo:null};let{diff:a,resultDiff:l,diffBinary:i,resultDiffBinary:f}=s,c=!0;he.log("Preview deploy condition check:",{resultUndefined:o===void 0,resultType:typeof o,hasChanges:c,wouldCreatePreview:o!==void 0&&c});let p=null;if(o!==void 0&&c)try{let g;try{let y=await Fe(t.id,t.sessionId);y?.title&&(g=y.title)}catch(y){he.warn("Failed to fetch session title, using fallback message:",y.message)}p=await ut({cliPath:e,netlify:r,hasRepo:t.hasRepo,message:g,skipBuild:!1,deploySubdomain:Ke(t.id,ir.env.SITE_NAME),filter:n})}catch(g){return he.warn("Failed to create preview deploy (continuing with agent run):",g),{diff:a,resultDiff:l,hasChanges:c,previewInfo:null,diffBinary:i,resultDiffBinary:f,deployError:g instanceof Error?g.message:String(g)}}return he.log("Git status",{hasDiff:!!a,hasChanges:c}),{diff:a,resultDiff:l,hasChanges:c,previewInfo:p,diffBinary:i,resultDiffBinary:f}};async function ct(e,t){let{maxRetries:r,baseDelay:o,onRetry:n}=t,s;for(let a=1;a<=r;a++)try{return await e()}catch(l){if(s=l,a===r)throw s;n&&n(a,s),await new Promise(i=>setTimeout(i,o*a))}throw s}var Z=_("cleanup_stage"),dt=async({config:e,diff:t,result:r,duration:o,resultDiff:n,diffBinary:s,resultDiffBinary:a,previewInfo:l})=>{let i={result_diff:t,result:r||"Done",duration:o,result_diff_binary:s};return l&&l.deployId&&(i.deploy_id=l.deployId),l&&l.sourceZipFilename&&(i.result_zip_file_name=l.sourceZipFilename),n||a?(Z.log("Updating total agent result diff"),await ue(e.id,{result_diff:n,result_diff_binary:a})):Z.log("No total result diff, not updating"),Z.log("Updated agent runner with result"),await ct(()=>Y(e.id,e.sessionId,i),{maxRetries:3,baseDelay:1e3,onRetry:(f,c)=>{Z.error(`Error updating agent runner session (attempt ${f}):`,c),Z.log("Retrying...")}}),Z.log("Finished updating agent runner with result"),{sessionUpdate:i}};import{getTracer as lr}from"@netlify/otel";var ur=ar(import.meta.url),cr=ur("../package.json"),pt=_("pipeline_index"),ye=3,_o=async({config:e,apiToken:t,cliPath:r="netlify",cwd:o,errorLogsPath:n,filter:s,traceExporterUrl:a})=>{let l;try{await $e(cr.version,e.id,a);let i=lr(),f,{aiGateway:c,context:p,persistSteps:g,runner:y,sha:R}=await ot({config:e,apiToken:t,cliPath:r,cwd:o,errorLogsPath:n,filter:s});l=y.clean,e.sha=R;let{runnerResult:x}=await ge({cliPath:r,config:e,context:p,runner:y.runner,persistSteps:g,aiGateway:c}),h=await ke({cliPath:r,config:e,context:p,result:x.result,filter:s}),A=x,C=[];if(h.hasChanges&&h.deployError){C.push(De(h.deployError));let m=1;for(;m<=ye&&!h.previewInfo;)pt.log(`Deploy attempt had errors. Retrying. ${m}/${ye}`),await ie(i,"stage-deploy",async d=>{d?.setAttributes({"stage.attempt":m});let{runnerResult:u}=await ge({cliPath:r,config:e,context:p,runner:y.runner,persistSteps:g,aiGateway:c,buildErrors:C});A={...u,steps:[...A.steps||[],...u.steps||[]],duration:(A.duration||0)+(u.duration||0)},h=await ke({cliPath:r,config:e,context:p,result:u.result,filter:s}),h.deployError&&C.push(h.deployError),m++});m>ye&&!h.previewInfo&&(f=new Error(`Deploy validation failed after ${ye} attempts`))}let{diff:P,resultDiff:I,previewInfo:T,diffBinary:N,resultDiffBinary:w}=h;if(await dt({config:e,diff:P,result:A.result,duration:A.duration,resultDiff:I,diffBinary:N,resultDiffBinary:w,previewInfo:T}),f)throw f;await y.clean?.()}catch(i){pt.error("Got error while running pipeline",i),await l?.();let f=i instanceof Error&&i.message;throw await Y(e.id,e.sessionId,{result:f||"Encountered error when running agent",state:"error"}),i}};export{_o as runPipeline};
|
|
131
131
|
//# sourceMappingURL=index.js.map
|