@zibby/cli 0.4.21 → 0.4.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,18 +1,18 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import{dirname as
|
|
3
|
-
`);S=$.pop()||"";for(let y of $){let O=y.trim();O&&
|
|
4
|
-
`);n!==
|
|
5
|
-
`),r(e,"in_progress",n,s).catch(h=>{
|
|
6
|
-
`)}))},500);try{await r(e,"in_progress","",s);let n=await o(),h=((Date.now()-d)/1e3).toFixed(1);R=!0,clearInterval(T),await new Promise(
|
|
7
|
-
`);if(
|
|
8
|
-
`),n.success){await r(e,"success",w||`Completed in ${h}s`,s);let
|
|
2
|
+
import{dirname as b,join as G,resolve as J}from"path";import{fileURLToPath as L}from"url";import{readFileSync as j,existsSync as B}from"fs";import{compileGraph as Q,validateGraphConfig as z,WorkflowGraph as W}from"@zibby/agent-workflow";import{invokeAgent as q}from"@zibby/core";import{buildAnalysisGraph as H}from"@zibby/core/templates/code-analysis/graph.js";import{analysisStateSchema as X}from"@zibby/core/templates/code-analysis/state.js";import"@zibby/core/templates/register-nodes.js";async function U(r,t){let a=process.env.CONTEXT_PRESIGNED_URL;if(!a)throw new Error("CONTEXT_PRESIGNED_URL env var is required");console.log("\u{1F4E6} Fetching execution context via pre-signed URL");let e=await fetch(a);if(!e.ok)throw new Error(`Failed to fetch execution context: ${e.status}`);let o=await e.json();return console.log(` \u2705 Got ticketContext (${JSON.stringify(o.ticketContext||{}).length} chars)`),o.nodeConfigs&&Object.keys(o.nodeConfigs).length>0&&console.log(` \u2705 Got nodeConfigs (${Object.keys(o.nodeConfigs).length} nodes configured)`),{ticketContext:o.ticketContext||{},nodeConfigs:o.nodeConfigs||{},graphConfig:o.graphConfig||null,repos:o.repos||[]}}import{SQSClient as M,SendMessageCommand as D}from"@aws-sdk/client-sqs";var I=null;function F(){return I||(I=new M({region:process.env.AWS_REGION||"ap-southeast-2"})),I}async function A(r,t,a,e){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:d,PROGRESS_QUEUE_URL:i,PROJECT_API_TOKEN:_}=e;if(!o)return;let g={executionId:o,...s&&{sqsAuthToken:s},step:{name:r,status:t,logs:a,timestamp:new Date().toISOString(),...t==="success"&&{completedAt:new Date().toISOString()}},status:t==="failed"?"failed":"running"};try{d?await v(d,o,g,_):i&&await k(i,o,g)}catch(u){console.error(`\u26A0\uFE0F Failed to send progress: ${u.message}`)}}async function N(r,t,a){let{EXECUTION_ID:e,SQS_AUTH_TOKEN:o,PROGRESS_API_URL:s,PROGRESS_QUEUE_URL:d,PROJECT_API_TOKEN:i}=r;if(!e||!a)return;let _=JSON.stringify(a).length;console.log(`Sending artifact: ${t} (${(_/1024).toFixed(1)}KB)`);let g={executionId:e,...o&&{sqsAuthToken:o},artifacts:{[t]:a},timestamp:new Date().toISOString()},u=s?"HTTP":d?"SQS":"NONE",f=JSON.stringify(g).length;try{if(s)await v(s,e,g,i);else if(d)await k(d,e,g);else{console.warn(`\u26A0\uFE0F No transport configured for artifact ${t} \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set`);return}console.log(`Artifact ${t} sent via ${u} (payload=${(f/1024).toFixed(1)}KB, value=${(_/1024).toFixed(1)}KB)`)}catch(c){console.error(`Failed to send artifact ${t} via ${u}:`),console.error(` Payload size: ${(f/1024).toFixed(1)}KB, Value size: ${(_/1024).toFixed(1)}KB`),console.error(` Error: ${c.message}`),c.name&&console.error(` Error type: ${c.name}`),c.code&&console.error(` Error code: ${c.code}`),f>256*1024&&console.error(" \u26A0\uFE0F Message exceeds SQS 256KB limit! Consider splitting or compressing.")}}async function P(r,{status:t,error:a,finalState:e}){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:d,PROGRESS_QUEUE_URL:i,PROJECT_API_TOKEN:_}=r;if(!o)return;let g={executionId:o,...s&&{sqsAuthToken:s},status:t,...a&&{error:a},...e&&typeof e=="object"&&Object.keys(e).length>0?{finalState:e}:{},timestamp:new Date().toISOString()},u=d?"HTTP":i?"SQS":"NONE",f=JSON.stringify(g).length;console.log(`Sending final status: ${t} via ${u} (${(f/1024).toFixed(1)}KB)`);try{if(d)await v(d,o,g,_);else if(i){let c=["completed","failed","insufficient_context","blocked"].includes(t)?"execution_completed":"progress_update";await k(i,o,g,c)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${t} sent via ${u}`)}catch(c){console.error(`Failed to send final status (${t}) via ${u}:`),console.error(` Payload: ${(f/1024).toFixed(1)}KB`),console.error(` Error: ${c.message}`),c.name&&console.error(` Error type: ${c.name}`),c.code&&console.error(` Error code: ${c.code}`)}}async function v(r,t,a,e){let o=`${r}/${t}/progress`,s={"Content-Type":"application/json"};e&&(s.Authorization=`Bearer ${e}`);let d=await fetch(o,{method:"POST",headers:s,body:JSON.stringify(a)});if(!d.ok){let i=await d.text();throw new Error(`HTTP ${d.status}: ${i}`)}}async function k(r,t,a,e="progress_update"){let o=JSON.stringify(a),s=(o.length/1024).toFixed(1);o.length>256*1024&&console.error(`\u274C SQS message too large: ${s}KB (limit 256KB) for ${t} [${e}]`),await F().send(new D({QueueUrl:r,MessageBody:o,MessageGroupId:t,MessageAttributes:{executionId:{DataType:"String",StringValue:t},messageType:{DataType:"String",StringValue:e}}}))}import{writeMcpConfig as V}from"@zibby/core/utils/mcp-config-writer.js";var Y=L(import.meta.url),Z=b(Y),ee=JSON.parse(j(G(Z,"../../package.json"),"utf-8")),oe={analyze_ticket:r=>({key:"analysis",value:{raw:r.raw,structured:r.output}}),generate_code:r=>({key:"codeImplementation",value:r.output?.codeImplementation}),generate_test_cases:r=>({key:"tests",value:r.output?.tests}),finalize:r=>({key:"report",value:r.output?.report})};function te(r,t){return async function(e,o,s){let d=Date.now(),i=[],_="",g=console.log,u=process.stdout.write.bind(process.stdout),f=process.stderr.write.bind(process.stderr),c=!1;console.log=(...n)=>{let h=n.map(w=>typeof w=="string"?w:JSON.stringify(w)).join(" ");i.push(h),c=!0,g(...n),c=!1};let S="";process.stdout.write=(n,h,w)=>{if(!c){let p=typeof n=="string"?n:n.toString();S+=p;let $=S.split(`
|
|
3
|
+
`);S=$.pop()||"";for(let y of $){let O=y.trim();O&&i.push(O)}}return u(n,h,w)},g(`[Middleware] Started capturing logs for ${e}`);let R=!1,T=setInterval(()=>{if(R)return;let n=i.join(`
|
|
4
|
+
`);n!==_&&n.length>0&&(_=n,f(`\u{1F4E1} [Middleware] Sending live update for ${e}: ${n.length} chars, ${i.length} lines
|
|
5
|
+
`),r(e,"in_progress",n,s).catch(h=>{f(`\u26A0\uFE0F [Middleware] Failed to send live update: ${h.message}
|
|
6
|
+
`)}))},500);try{await r(e,"in_progress","",s);let n=await o(),h=((Date.now()-d)/1e3).toFixed(1);R=!0,clearInterval(T),await new Promise(p=>setImmediate(p)),console.log=g,process.stdout.write=u,S.trim()&&(i.push(S.trim()),S="");let w=i.join(`
|
|
7
|
+
`);if(f(`\u{1F4E1} [Middleware] Sending final update for ${e}: ${w.length} chars, ${i.length} total lines captured
|
|
8
|
+
`),n.success){await r(e,"success",w||`Completed in ${h}s`,s);let p=oe[e];if(p){let{key:$,value:y}=p(n);y&&await t(s,$,y)}}else await r(e,"failed",`${w}
|
|
9
9
|
|
|
10
|
-
Error: ${n.error}`,s);return n}catch(n){R=!0,clearInterval(T),await new Promise(w=>setImmediate(w)),console.log=
|
|
10
|
+
Error: ${n.error}`,s);return n}catch(n){R=!0,clearInterval(T),await new Promise(w=>setImmediate(w)),console.log=g,process.stdout.write=u;let h=`${i.join(`
|
|
11
11
|
`)}
|
|
12
12
|
|
|
13
|
-
Error: ${n.message}`;throw await r(e,"failed",h,s),n}}}async function re(r){let{EXECUTION_ID:t,TICKET_KEY:a,PROJECT_ID:e,REPOS:o,PROGRESS_QUEUE_URL:s,PROGRESS_API_URL:d,SQS_AUTH_TOKEN:
|
|
14
|
-
\u{1F680} Zibby Analysis (Graph Mode)`),console.log(`@zibby/cli v${ee.version} | Node.js ${process.version}`),console.log("\u2500".repeat(60)),console.log(`Ticket: ${a}`),console.log(`Repositories: ${R.length}`),console.log(`Workspace: ${T}`),console.log(`AI Model: ${
|
|
13
|
+
Error: ${n.message}`;throw await r(e,"failed",h,s),n}}}async function re(r){let{EXECUTION_ID:t,TICKET_KEY:a,PROJECT_ID:e,REPOS:o,PROGRESS_QUEUE_URL:s,PROGRESS_API_URL:d,SQS_AUTH_TOKEN:i,PROJECT_API_TOKEN:_,GITHUB_TOKEN:g,MODEL:u}=process.env;(!t||!a||!e)&&(console.error("\u274C Missing required environment variables"),console.error(" Required: EXECUTION_ID, TICKET_KEY, PROJECT_ID"),process.exit(1));let f=await U(t,e),c=f.ticketContext,S=f.nodeConfigs||{},R=o?JSON.parse(o):f.repos,T=process.env.WORKSPACE||"/workspace",n=L(import.meta.resolve("@zibby/core/package.json")),h=G(b(n),"templates","code-analysis","prompts");console.log(`
|
|
14
|
+
\u{1F680} Zibby Analysis (Graph Mode)`),console.log(`@zibby/cli v${ee.version} | Node.js ${process.version}`),console.log("\u2500".repeat(60)),console.log(`Ticket: ${a}`),console.log(`Repositories: ${R.length}`),console.log(`Workspace: ${T}`),console.log(`AI Model: ${u||"auto"}`),console.log("\u2500".repeat(60));let w=te(A,N),p,$,y=null;if(r?.workflow){let l=J(process.cwd(),r.workflow);if(B(l)||(console.error(`\u274C Workflow file not found: ${l}`),process.exit(1)),l.endsWith(".js")||l.endsWith(".mjs"))try{let{pathToFileURL:m}=await import("url");y=await import(m(l).href),$=`local JS module (${l})`}catch(m){console.error(`\u274C Failed to load workflow JS module: ${m.message}`),process.exit(1)}else{try{let E=JSON.parse(j(l,"utf-8")),{_meta:C,...K}=E;p=K,$=`local file (${l})`}catch(E){console.error(`\u274C Failed to parse workflow file: ${E.message}`),process.exit(1)}let m=z(p);m.valid||(console.error("\u274C Invalid workflow file:"),m.errors.forEach(E=>console.error(` - ${E}`)),process.exit(1))}}else if(f.graphConfig)p=f.graphConfig,$="custom (from project workflow)";else{let l=new W;H(l),p=l.serialize(),$="default"}let O;if(y){let m={...y.nodeConfigs||{},...S};O=y.buildGraph({nodeMiddleware:w}),console.log(`\u{1F4D0} Graph source: ${$}`),console.log(` Nodes: ${O.nodes.size}`),S=m}else{if(S&&Object.keys(S).length>0){let l=p.nodeConfigs||{},m={...l};for(let[E,C]of Object.entries(S))m[E]={...l[E],...C};p.nodeConfigs=m}console.log(`\u{1F4D0} Graph source: ${$}`),console.log(` Nodes: ${p.nodes?.length||0}`),console.log(` Edges: ${p.edges?.length||0}`),O=Q(p,{nodeMiddleware:w,stateSchema:X,invokeAgent:q})}V(S);let x={EXECUTION_ID:t,PROGRESS_QUEUE_URL:s,PROGRESS_API_URL:d,SQS_AUTH_TOKEN:i,PROJECT_API_TOKEN:_,workspace:T,repos:R,ticketContext:c,promptsDir:h,githubToken:g,model:u,nodeConfigs:S};try{let m=(await O.run(null,x)).state,E=m.analyze_ticket_output?.validation||m.analyze_ticket_output?.analysis?.structured?.validation,C="completed";E&&!E.canProceed&&(C=E.status==="insufficient_context"?"insufficient_context":"blocked"),console.log(`
|
|
15
15
|
\u{1F4CB} Validation: canProceed=${E?.canProceed}, status=${E?.status}, finalStatus=${C}`),console.log(`
|
|
16
16
|
\u{1F4CA} Sending final status: ${C}`),await P(x,{status:C}),console.log(`
|
|
17
|
-
\u2705 Analysis completed successfully`),process.exit(0)}catch(
|
|
18
|
-
\u274C Analysis failed:`,
|
|
17
|
+
\u2705 Analysis completed successfully`),process.exit(0)}catch(l){if(console.error(`
|
|
18
|
+
\u274C Analysis failed:`,l.message),t)try{console.log("\u{1F4E1} Reporting failure..."),await P(x,{status:"failed",error:l.message})}catch{console.error("\u26A0\uFE0F Failed to report error")}process.exit(1)}}import.meta.url===`file://${process.argv[1]}`&&re();export{re as analyzeCommand};
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
var A=(t=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(t,{get:(r,n)=>(typeof require<"u"?require:r)[n]}):t)(function(t){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+t+'" is not supported')});import{existsSync as
|
|
2
|
-
`);
|
|
3
|
-
`);return await
|
|
4
|
-
`);throw await
|
|
1
|
+
var A=(t=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(t,{get:(r,n)=>(typeof require<"u"?require:r)[n]}):t)(function(t){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+t+'" is not supported')});import{existsSync as y,readFileSync as O,writeFileSync as J}from"fs";import{join as a,dirname as B}from"path";import{fileURLToPath as Q}from"url";import{invokeAgent as X}from"@zibby/core";async function b(t,r){let n=process.env.CONTEXT_PRESIGNED_URL;if(!n)throw new Error("CONTEXT_PRESIGNED_URL env var is required");console.log("\u{1F4E6} Fetching execution context via pre-signed URL");let o=await fetch(n);if(!o.ok)throw new Error(`Failed to fetch execution context: ${o.status}`);let e=await o.json();return console.log(` \u2705 Got ticketContext (${JSON.stringify(e.ticketContext||{}).length} chars)`),e.nodeConfigs&&Object.keys(e.nodeConfigs).length>0&&console.log(` \u2705 Got nodeConfigs (${Object.keys(e.nodeConfigs).length} nodes configured)`),{ticketContext:e.ticketContext||{},nodeConfigs:e.nodeConfigs||{},graphConfig:e.graphConfig||null,repos:e.repos||[]}}import{SQSClient as K,SendMessageCommand as F}from"@aws-sdk/client-sqs";var v=null;function G(){return v||(v=new K({region:process.env.AWS_REGION||"ap-southeast-2"})),v}async function P(t,r,n,o){let{EXECUTION_ID:e,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:c,PROGRESS_QUEUE_URL:g,PROJECT_API_TOKEN:i}=o;if(!e)return;let m={executionId:e,...s&&{sqsAuthToken:s},step:{name:t,status:r,logs:n,timestamp:new Date().toISOString(),...r==="success"&&{completedAt:new Date().toISOString()}},status:r==="failed"?"failed":"running"};try{c?await D(c,e,m,i):g&&await j(g,e,m)}catch(p){console.error(`\u26A0\uFE0F Failed to send progress: ${p.message}`)}}async function U(t,{status:r,error:n,finalState:o}){let{EXECUTION_ID:e,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:c,PROGRESS_QUEUE_URL:g,PROJECT_API_TOKEN:i}=t;if(!e)return;let m={executionId:e,...s&&{sqsAuthToken:s},status:r,...n&&{error:n},...o&&typeof o=="object"&&Object.keys(o).length>0?{finalState:o}:{},timestamp:new Date().toISOString()},p=c?"HTTP":g?"SQS":"NONE",S=JSON.stringify(m).length;console.log(`Sending final status: ${r} via ${p} (${(S/1024).toFixed(1)}KB)`);try{if(c)await D(c,e,m,i);else if(g){let d=["completed","failed","insufficient_context","blocked"].includes(r)?"execution_completed":"progress_update";await j(g,e,m,d)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${r} sent via ${p}`)}catch(d){console.error(`Failed to send final status (${r}) via ${p}:`),console.error(` Payload: ${(S/1024).toFixed(1)}KB`),console.error(` Error: ${d.message}`),d.name&&console.error(` Error type: ${d.name}`),d.code&&console.error(` Error code: ${d.code}`)}}async function D(t,r,n,o){let e=`${t}/${r}/progress`,s={"Content-Type":"application/json"};o&&(s.Authorization=`Bearer ${o}`);let c=await fetch(e,{method:"POST",headers:s,body:JSON.stringify(n)});if(!c.ok){let g=await c.text();throw new Error(`HTTP ${c.status}: ${g}`)}}async function j(t,r,n,o="progress_update"){let e=JSON.stringify(n),s=(e.length/1024).toFixed(1);e.length>256*1024&&console.error(`\u274C SQS message too large: ${s}KB (limit 256KB) for ${r} [${o}]`),await G().send(new F({QueueUrl:t,MessageBody:e,MessageGroupId:r,MessageAttributes:{executionId:{DataType:"String",StringValue:r},messageType:{DataType:"String",StringValue:o}}}))}var H=Q(import.meta.url),M=B(H);async function ie(t){let{EXECUTION_ID:r,TICKET_KEY:n,PROJECT_ID:o,REPOS:e,_PRIMARY_REPO:s,_GITHUB_TOKEN:c,MODEL:g}=process.env;(!r||!n||!o)&&(console.error("\u274C Missing required environment variables:"),console.error(" EXECUTION_ID, TICKET_KEY, PROJECT_ID"),process.exit(1));let i=await b(r,o),m=i.ticketContext,p=e?JSON.parse(e):i.repos,S=p.find(w=>w.isPrimary)||p[0],d=process.cwd(),f={status:"running",steps:[]};try{await T("Start Environment",async()=>{}),await T("Clone Repositories",async()=>{let l=process.env.GITHUB_TOKEN,u=process.env.GITLAB_TOKEN||"",E=process.env.GITLAB_URL||"";for(let h of p){let R=a(d,h.name),C=h.url,I=h.provider==="gitlab"||E&&h.url.includes(new URL(E).host);if((h.provider==="github"||h.url.includes("github.com"))&&l)C=h.url.replace("https://github.com",`https://${l}@github.com`);else if(I&&u&&E)try{let k=new URL(E).host;C=h.url.replace(`https://${k}`,`https://oauth2:${u}@${k}`)}catch(k){console.warn(`\u26A0\uFE0F Failed to parse GITLAB_URL: ${k.message}`)}if(_(["git","clone",C,R],d),_(["git","checkout",h.branch],R),h.isPrimary){let k=`feature/${n.toLowerCase()}`;_(["git","checkout","-b",k],R)}}f.steps.push({name:"clone",status:"success",repoCount:p.length})});let w=await T("Load Ticket Context",async()=>(f.steps.push({name:"load_ticket",status:"success"}),m));await T("Install Dependencies",async()=>{for(let l of p){let u=a(d,l.name),E=L(u);try{_(E.installCommand,u)}catch{}}f.steps.push({name:"install_deps",status:"success"})});let $=await T("Detect Dev Command",async()=>{let l=a(d,S.name),u=["docker-compose.yml","docker-compose.yaml","compose.yml","compose.yaml"];for(let I of u)if(y(a(l,I)))return f.steps.push({name:"detect_dev",status:"success",command:"docker-compose up",type:"docker-compose"}),{command:"docker-compose up",type:"docker-compose",configFile:I};let E=a(l,"package.json");if(!y(E))return console.log(" \u26A0\uFE0F No package.json or docker-compose found"),f.steps.push({name:"detect_dev",status:"skipped"}),null;let R=JSON.parse(O(E,"utf-8")).scripts||{},C=null;return R.dev?C="npm run dev":R.start?C="npm start":R["dev:local"]&&(C="npm run dev:local"),C?(f.steps.push({name:"detect_dev",status:"success",command:C,type:"npm"}),{command:C,type:"npm"}):(f.steps.push({name:"detect_dev",status:"skipped"}),null)});await T("Start Dev Server",async()=>{let l=a(d,S.name),u="docker-compose.test.yml";return y(a(l,u))?(_(["docker","compose","-f",u,"up","-d"],l),await new Promise(E=>setTimeout(E,1e4)),f.steps.push({name:"start_server",status:"success"}),!0):(console.log(` \u26A0\uFE0F No ${u} found, skipping server startup`),f.steps.push({name:"start_server",status:"skipped"}),null)}),await T("Run AI Agent Implementation",async()=>{let l=p.map(h=>{let R=a(d,h.name);return{...h,...L(R)}}),u=q(w,l,$),E=a(d,".cursor-prompt.md");J(E,u),await X(u,{state:{model:g,workspace:d}},{print:!0}),f.steps.push({name:"ai_agent",status:"success"})});let Y=await T("Run E2E Tests",async()=>{let l=a(d,S.name);if(!y(a(l,"playwright.config.js"))&&!y(a(l,"playwright.config.ts")))return f.steps.push({name:"e2e_tests",status:"skipped"}),null;try{return _("npx playwright test --reporter=json",l),f.steps.push({name:"e2e_tests",status:"success"}),{passed:!0}}catch(u){throw _("docker compose -f docker-compose.test.yml down",l,{allowFailure:!0}),new Error(`E2E tests failed: ${u.message}`,{cause:u})}});try{_("docker compose -f docker-compose.test.yml down",a(d,S.name),{allowFailure:!0})}catch{}let x=await T("Create Pull Request",async()=>{let l=a(d,S.name),u=`feature/${n.toLowerCase()}`;return _(["git","add","."],l),_(["git","commit","-m",`feat(${n}): ${w.summary}`],l),_(["git","push","origin",u],l),console.log(" \u26A0\uFE0F PR creation via API removed (using SQS flow)"),f.steps.push({name:"create_pr",status:"skipped"}),null});await T("Report Results",async()=>{let l=a(d,S.name),u=a(l,"test-results"),E=[];y(u),f.status="completed",f.prUrl=x,f.videoUrls=E,await U(N(),{status:"completed",artifacts:{prUrl:x,videoUrls:E}})}),process.exit(0)}catch(w){console.error(""),console.error("\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557"),console.error("\u2551 \u274C FAILED! \u2551"),console.error("\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255D"),console.error(""),console.error("Error:",w.message),console.error("Stack:",w.stack);try{await U(N(),{status:"failed",error:w.message})}catch($){console.error("Failed to report error:",$.message)}process.exit(1)}}function N(){return{EXECUTION_ID:process.env.EXECUTION_ID,PROGRESS_API_URL:process.env.PROGRESS_API_URL,PROGRESS_QUEUE_URL:process.env.PROGRESS_QUEUE_URL,SQS_AUTH_TOKEN:process.env.SQS_AUTH_TOKEN,PROJECT_API_TOKEN:process.env.PROJECT_API_TOKEN}}async function T(t,r){let n=Date.now(),o=[],e="",s=console.log;console.log=(...i)=>{let m=i.join(" ");o.push(m),s(...i)};let c=N(),g=setInterval(()=>{let i=o.join(`
|
|
2
|
+
`);i!==e&&i.length>0&&(e=i,P(t,"running",i,c).catch(()=>{}))},2e3);try{await P(t,"running","",c);let i=await r(),m=`${((Date.now()-n)/1e3).toFixed(1)}s`;clearInterval(g),console.log=s;let p=o.join(`
|
|
3
|
+
`);return await P(t,"success",p||`Completed in ${m}`,c),i}catch(i){clearInterval(g),console.log=s;let m=o.join(`
|
|
4
|
+
`);throw await P(t,"failed",`${m}
|
|
5
5
|
|
|
6
|
-
Error: ${
|
|
6
|
+
Error: ${i.message}`,c),i}}function _(t,r,n={}){try{let{spawnSync:o}=A("child_process"),e;if(Array.isArray(t)){let[s,...c]=t;e=o(s,c,{cwd:r,encoding:"utf-8",stdio:["pipe","pipe","pipe"]})}else e=o(t,{cwd:r,shell:!0,encoding:"utf-8",stdio:["pipe","pipe","pipe"]});if(e.stdout&&console.log(e.stdout),e.stderr&&console.log(e.stderr),e.status!==0&&!n.allowFailure){let s=Array.isArray(t)?t.join(" "):t;throw new Error(`Command failed with exit code ${e.status}: ${s}`)}return e.stdout||e.stderr}catch(o){if(n.allowFailure)return null;throw o}}function L(t){let r=a(t,".zibby.yml");if(y(r))try{let e=A("js-yaml").load(O(r,"utf-8"));return{name:e.name||"Custom Project",framework:e.framework||"Custom",language:e.language||"Custom",testCommand:e.test||"make test",installCommand:e.install||"make install",custom:!0}}catch{console.warn("Invalid .zibby.yml, falling back to auto-detection")}let n=a(t,"package.json");if(y(n)){let o=JSON.parse(O(n,"utf-8")),e={...o.dependencies,...o.devDependencies},s="Node.js";return e.next?s="Next.js":e["react-scripts"]?s="Create React App":e.vite&&e.react?s="React + Vite":e["@angular/core"]?s="Angular":e.vue?s="Vue.js":e.express&&(s="Express.js"),{name:o.name||"Unknown Project",framework:s,language:"JavaScript/TypeScript",testCommand:o.scripts?.test||"npm test",installCommand:"npm install"}}return y(a(t,"requirements.txt"))||y(a(t,"pyproject.toml"))?{name:"Python Project",framework:y(a(t,"manage.py"))?"Django":y(a(t,"app.py"))?"Flask":"Python",language:"Python",testCommand:"pytest",installCommand:"pip install -r requirements.txt"}:y(a(t,"Gemfile"))?{name:"Ruby Project",framework:"Rails",language:"Ruby",testCommand:"bundle exec rspec",installCommand:"bundle install"}:y(a(t,"go.mod"))?{name:"Go Project",framework:"Go",language:"Go",testCommand:"go test ./...",installCommand:"go mod download"}:y(a(t,"pom.xml"))?{name:"Java Project",framework:"Spring Boot",language:"Java",testCommand:"./mvnw test",installCommand:"./mvnw install"}:{name:"Unknown Project",framework:"Unknown",language:"Unknown",testCommand:"make test",installCommand:"make install"}}function q(t,r,n){let o=a(M,"../../prompts/implement-ticket.md"),e;try{e=O(o,"utf-8")}catch{e=`
|
|
7
7
|
# Implement Ticket: {{TICKET_KEY}}
|
|
8
8
|
|
|
9
9
|
## Project Context
|
|
@@ -34,7 +34,7 @@ You are implementing this ticket. Follow these steps:
|
|
|
34
34
|
5. Fix any linter errors
|
|
35
35
|
|
|
36
36
|
Now implement this ticket completely!
|
|
37
|
-
`.trim()}let s=r.find(
|
|
37
|
+
`.trim()}let s=r.find(m=>m.isPrimary)||r[0],c;n?.type==="docker-compose"?c=`\`docker-compose up\` (using ${n.configFile})`:n?.command?c=`\`cd ${s.name} && ${n.command}\``:c="`npm run dev` (or check package.json scripts)";let g;if(r.length===1)g=`
|
|
38
38
|
You are working in **${s.name}**, a ${s.framework} project.
|
|
39
39
|
|
|
40
40
|
**Commands:**
|
|
@@ -42,11 +42,11 @@ You are working in **${s.name}**, a ${s.framework} project.
|
|
|
42
42
|
- Run tests: \`cd ${s.name} && ${s.testCommand}\`
|
|
43
43
|
|
|
44
44
|
You have full access to the codebase in the current directory.
|
|
45
|
-
`.trim();else{let
|
|
46
|
-
`);
|
|
45
|
+
`.trim();else{let m=r.map(p=>`- **${p.name}/** (${p.framework})${p.isPrimary?" \u2190 **MAKE CHANGES HERE**":" (reference only)"}`).join(`
|
|
46
|
+
`);g=`
|
|
47
47
|
You are working in a **multi-repository** setup with ${r.length} repositories:
|
|
48
48
|
|
|
49
|
-
${
|
|
49
|
+
${m}
|
|
50
50
|
|
|
51
51
|
**Primary Repository:** ${s.name}
|
|
52
52
|
- This is where you should implement the feature
|
|
@@ -55,11 +55,11 @@ ${p}
|
|
|
55
55
|
- Run tests: \`cd ${s.name} && ${s.testCommand}\`
|
|
56
56
|
|
|
57
57
|
**Other Repositories:**
|
|
58
|
-
${r.filter(
|
|
58
|
+
${r.filter(p=>!p.isPrimary).map(p=>`- **${p.name}**: You can read code from here for reference (shared libraries, services, etc.)`).join(`
|
|
59
59
|
`)||"(none)"}
|
|
60
60
|
|
|
61
61
|
**Important:** Make all code changes in the \`${s.name}/\` directory only.
|
|
62
|
-
`.trim()}let
|
|
62
|
+
`.trim()}let i=e.replace(/\{\{TICKET_KEY\}\}/g,t.ticketKey||t.key||"UNKNOWN").replace(/\{\{PROJECT_CONTEXT\}\}/g,g).replace(/\{\{TICKET_SUMMARY\}\}/g,t.summary||"No summary").replace(/\{\{TICKET_DESCRIPTION\}\}/g,t.description||"No description provided").replace(/\{\{ACCEPTANCE_CRITERIA\}\}/g,t.acceptanceCriteria||"Not specified");if(t.additionalContext){let m=`## Additional Context from User
|
|
63
63
|
${t.additionalContext}
|
|
64
64
|
|
|
65
|
-
`;
|
|
65
|
+
`;i=i.replace(/\{\{#if ADDITIONAL_CONTEXT\}\}[\s\S]*?\{\{\/if\}\}/g,m)}else i=i.replace(/\{\{#if ADDITIONAL_CONTEXT\}\}[\s\S]*?\{\{\/if\}\}/g,"");return i}export{ie as implementCommand};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
var wt=Object.defineProperty;var d=(e,t)=>()=>(e&&(t=e(e=0)),t);var V=(e,t)=>{for(var r in t)wt(e,r,{get:t[r],enumerable:!0})};var Re,Oe=d(()=>{Re="ffffffff-ffff-ffff-ffff-ffffffffffff"});var Ue,Pe=d(()=>{Ue="00000000-0000-0000-0000-000000000000"});var Ae,Ie=d(()=>{Ae=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-8][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/i});function At(e){return typeof e=="string"&&Ae.test(e)}var P,L=d(()=>{Ie();P=At});function It(e){if(!P(e))throw TypeError("Invalid UUID");let t;return Uint8Array.of((t=parseInt(e.slice(0,8),16))>>>24,t>>>16&255,t>>>8&255,t&255,(t=parseInt(e.slice(9,13),16))>>>8,t&255,(t=parseInt(e.slice(14,18),16))>>>8,t&255,(t=parseInt(e.slice(19,23),16))>>>8,t&255,(t=parseInt(e.slice(24,36),16))/1099511627776&255,t/4294967296&255,t>>>24&255,t>>>16&255,t>>>8&255,t&255)}var T,C=d(()=>{L();T=It});function w(e,t=0){return(m[e[t+0]]+m[e[t+1]]+m[e[t+2]]+m[e[t+3]]+"-"+m[e[t+4]]+m[e[t+5]]+"-"+m[e[t+6]]+m[e[t+7]]+"-"+m[e[t+8]]+m[e[t+9]]+"-"+m[e[t+10]]+m[e[t+11]]+m[e[t+12]]+m[e[t+13]]+m[e[t+14]]+m[e[t+15]]).toLowerCase()}function bt(e,t=0){let r=w(e,t);if(!P(r))throw TypeError("Stringified UUID is invalid");return r}var m,be,R=d(()=>{L();m=[];for(let e=0;e<256;++e)m.push((e+256).toString(16).slice(1));be=bt});import{randomFillSync as vt}from"crypto";function O(){return B>J.length-16&&(vt(J),B=0),J.slice(B,B+=16)}var J,B,H=d(()=>{J=new Uint8Array(256),B=J.length});function kt(e,t,r){let o,n=e?._v6??!1;if(e){let i=Object.keys(e);i.length===1&&i[0]==="_v6"&&(e=void 0)}if(e)o=ve(e.random??e.rng?.()??O(),e.msecs,e.nsecs,e.clockseq,e.node,t,r);else{let i=Date.now(),s=O();Dt(j,i,s),o=ve(s,j.msecs,j.nsecs,n?void 0:j.clockseq,n?void 0:j.node,t,r)}return t??w(o)}function Dt(e,t,r){return e.msecs??=-1/0,e.nsecs??=0,t===e.msecs?(e.nsecs++,e.nsecs>=1e4&&(e.node=void 0,e.nsecs=0)):t>e.msecs?e.nsecs=0:t<e.msecs&&(e.node=void 0),e.node||(e.node=r.slice(10,16),e.node[0]|=1,e.clockseq=(r[8]<<8|r[9])&16383),e.msecs=t,e}function ve(e,t,r,o,n,i,s=0){if(e.length<16)throw new Error("Random bytes length must be >= 16");if(!i)i=new Uint8Array(16),s=0;else if(s<0||s+16>i.length)throw new RangeError(`UUID byte range ${s}:${s+15} is out of buffer bounds`);t??=Date.now(),r??=0,o??=(e[8]<<8|e[9])&16383,n??=e.slice(10,16),t+=122192928e5;let l=((t&268435455)*1e4+r)%4294967296;i[s++]=l>>>24&255,i[s++]=l>>>16&255,i[s++]=l>>>8&255,i[s++]=l&255;let a=t/4294967296*1e4&268435455;i[s++]=a>>>8&255,i[s++]=a&255,i[s++]=a>>>24&15|16,i[s++]=a>>>16&255,i[s++]=o>>>8|128,i[s++]=o&255;for(let c=0;c<6;++c)i[s++]=n[c];return i}var j,Y,ee=d(()=>{H();R();j={};Y=kt});function F(e){let t=typeof e=="string"?T(e):e,r=Nt(t);return typeof e=="string"?w(r):r}function Nt(e){return Uint8Array.of((e[6]&15)<<4|e[7]>>4&15,(e[7]&15)<<4|(e[4]&240)>>4,(e[4]&15)<<4|(e[5]&240)>>4,(e[5]&15)<<4|(e[0]&240)>>4,(e[0]&15)<<4|(e[1]&240)>>4,(e[1]&15)<<4|(e[2]&240)>>4,96|e[2]&15,e[3],e[8],e[9],e[10],e[11],e[12],e[13],e[14],e[15])}var te=d(()=>{C();R()});import{createHash as Lt}from"crypto";function Ct(e){return Array.isArray(e)?e=Buffer.from(e):typeof e=="string"&&(e=Buffer.from(e,"utf8")),Lt("md5").update(e).digest()}var ke,De=d(()=>{ke=Ct});function jt(e){e=unescape(encodeURIComponent(e));let t=new Uint8Array(e.length);for(let r=0;r<e.length;++r)t[r]=e.charCodeAt(r);return t}function z(e,t,r,o,n,i){let s=typeof r=="string"?jt(r):r,l=typeof o=="string"?T(o):o;if(typeof o=="string"&&(o=T(o)),o?.length!==16)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");let a=new Uint8Array(16+s.length);if(a.set(l),a.set(s,l.length),a=t(a),a[6]=a[6]&15|e,a[8]=a[8]&63|128,n){if(i=i||0,i<0||i+16>n.length)throw new RangeError(`UUID byte range ${i}:${i+15} is out of buffer bounds`);for(let c=0;c<16;++c)n[i+c]=a[c];return n}return w(a)}var q,Q,re=d(()=>{C();R();q="6ba7b810-9dad-11d1-80b4-00c04fd430c8",Q="6ba7b811-9dad-11d1-80b4-00c04fd430c8"});function oe(e,t,r,o){return z(48,ke,e,t,r,o)}var Ne,Le=d(()=>{De();re();oe.DNS=q;oe.URL=Q;Ne=oe});import{randomUUID as Ft}from"crypto";var ne,Ce=d(()=>{ne={randomUUID:Ft}});function zt(e,t,r){if(ne.randomUUID&&!t&&!e)return ne.randomUUID();e=e||{};let o=e.random??e.rng?.()??O();if(o.length<16)throw new Error("Random bytes length must be >= 16");if(o[6]=o[6]&15|64,o[8]=o[8]&63|128,t){if(r=r||0,r<0||r+16>t.length)throw new RangeError(`UUID byte range ${r}:${r+15} is out of buffer bounds`);for(let n=0;n<16;++n)t[r+n]=o[n];return t}return w(o)}var je,Fe=d(()=>{Ce();H();R();je=zt});import{createHash as Kt}from"crypto";function Gt(e){return Array.isArray(e)?e=Buffer.from(e):typeof e=="string"&&(e=Buffer.from(e,"utf8")),Kt("sha1").update(e).digest()}var ze,Ke=d(()=>{ze=Gt});function se(e,t,r,o){return z(80,ze,e,t,r,o)}var Ge,We=d(()=>{Ke();re();se.DNS=q;se.URL=Q;Ge=se});function Wt(e,t,r){e??={},r??=0;let o=Y({...e,_v6:!0},new Uint8Array(16));if(o=F(o),t){if(r<0||r+16>t.length)throw new RangeError(`UUID byte range ${r}:${r+15} is out of buffer bounds`);for(let n=0;n<16;n++)t[r+n]=o[n];return t}return w(o)}var Be,Je=d(()=>{R();ee();te();Be=Wt});function ie(e){let t=typeof e=="string"?T(e):e,r=Bt(t);return typeof e=="string"?w(r):r}function Bt(e){return Uint8Array.of((e[3]&15)<<4|e[4]>>4&15,(e[4]&15)<<4|(e[5]&240)>>4,(e[5]&15)<<4|e[6]&15,e[7],(e[1]&15)<<4|(e[2]&240)>>4,(e[2]&15)<<4|(e[3]&240)>>4,16|(e[0]&240)>>4,(e[0]&15)<<4|(e[1]&240)>>4,e[8],e[9],e[10],e[11],e[12],e[13],e[14],e[15])}var He=d(()=>{C();R()});function Jt(e,t,r){let o;if(e)o=Ye(e.random??e.rng?.()??O(),e.msecs,e.seq,t,r);else{let n=Date.now(),i=O();Ht(ae,n,i),o=Ye(i,ae.msecs,ae.seq,t,r)}return t??w(o)}function Ht(e,t,r){return e.msecs??=-1/0,e.seq??=0,t>e.msecs?(e.seq=r[6]<<23|r[7]<<16|r[8]<<8|r[9],e.msecs=t):(e.seq=e.seq+1|0,e.seq===0&&e.msecs++),e}function Ye(e,t,r,o,n=0){if(e.length<16)throw new Error("Random bytes length must be >= 16");if(!o)o=new Uint8Array(16),n=0;else if(n<0||n+16>o.length)throw new RangeError(`UUID byte range ${n}:${n+15} is out of buffer bounds`);return t??=Date.now(),r??=e[6]*127<<24|e[7]<<16|e[8]<<8|e[9],o[n++]=t/1099511627776&255,o[n++]=t/4294967296&255,o[n++]=t/16777216&255,o[n++]=t/65536&255,o[n++]=t/256&255,o[n++]=t&255,o[n++]=112|r>>>28&15,o[n++]=r>>>20&255,o[n++]=128|r>>>14&63,o[n++]=r>>>6&255,o[n++]=r<<2&255|e[10]&3,o[n++]=e[11],o[n++]=e[12],o[n++]=e[13],o[n++]=e[14],o[n++]=e[15],o}var ae,qe,Qe=d(()=>{H();R();ae={};qe=Jt});function Yt(e){if(!P(e))throw TypeError("Invalid UUID");return parseInt(e.slice(14,15),16)}var Me,Ve=d(()=>{L();Me=Yt});var Ze={};V(Ze,{MAX:()=>Re,NIL:()=>Ue,parse:()=>T,stringify:()=>be,v1:()=>Y,v1ToV6:()=>F,v3:()=>Ne,v4:()=>je,v5:()=>Ge,v6:()=>Be,v6ToV1:()=>ie,v7:()=>qe,validate:()=>P,version:()=>Me});var Xe=d(()=>{Oe();Pe();C();R();ee();te();Le();Fe();We();Je();He();Qe();L();Ve()});var le={};V(le,{__test:()=>nr,sessionPullFromS3:()=>rr,sessionPushToS3:()=>or});import{existsSync as qt,mkdirSync as Qt,readFileSync as Mt,writeFileSync as Vt,statSync as Zt}from"node:fs";import{homedir as Xt}from"node:os";import{join as er,dirname as tr}from"node:path";function et(e){return e.replace(/\//g,"-")}function ce(e,t){return er(Xt(),".claude","projects",et(e),`${t}.jsonl`)}async function rr({apiUrl:e,apiKey:t,workflowUuid:r,sessionId:o,cwd:n}){if(!e||!t||!r||!o||!n)return{pulled:!1,reason:"missing required arg"};let i;try{let a=await fetch(`${e}/workflows/${r}/sessions/${o}/download-url`,{method:"POST",headers:{Authorization:`Bearer ${t}`,"Content-Type":"application/json"}});if(!a.ok)return{pulled:!1,reason:`mint download-url failed: HTTP ${a.status}`};i=await a.json()}catch(a){return{pulled:!1,reason:`mint download-url threw: ${a.message}`}}let s;try{let a=await fetch(i.url,{method:"GET"});if(a.status===404||a.status===403)return{pulled:!1,reason:"no prior session (first turn)"};if(!a.ok)return{pulled:!1,reason:`GET presigned failed: HTTP ${a.status}`};s=Buffer.from(await a.arrayBuffer())}catch(a){return{pulled:!1,reason:`GET presigned threw: ${a.message}`}}let l=ce(n,o);try{return Qt(tr(l),{recursive:!0}),Vt(l,s),{pulled:!0,bytes:s.length}}catch(a){return{pulled:!1,reason:`write local JSONL failed: ${a.message}`}}}async function or({apiUrl:e,apiKey:t,workflowUuid:r,sessionId:o,cwd:n}){if(!e||!t||!r||!o||!n)return{pushed:!1,reason:"missing required arg"};let i=ce(n,o);if(!qt(i))return{pushed:!1,reason:"no local JSONL to push"};let s,l;try{s=Mt(i),l=Zt(i).size}catch(c){return{pushed:!1,reason:`read local JSONL failed: ${c.message}`}}let a;try{let c=await fetch(`${e}/workflows/${r}/sessions/${o}/upload-url`,{method:"POST",headers:{Authorization:`Bearer ${t}`,"Content-Type":"application/json"}});if(!c.ok)return{pushed:!1,reason:`mint upload-url failed: HTTP ${c.status}`};a=await c.json()}catch(c){return{pushed:!1,reason:`mint upload-url threw: ${c.message}`}}try{let c=await fetch(a.url,{method:"PUT",headers:{...a.requiredHeaders||{},"Content-Length":String(l)},body:s});return c.ok?{pushed:!0,bytes:l}:{pushed:!1,reason:`PUT presigned failed: HTTP ${c.status}`}}catch(c){return{pushed:!1,reason:`PUT presigned threw: ${c.message}`}}}var nr,fe=d(()=>{nr={sdkProjectDir:et,sessionJsonlPath:ce}});var st={};V(st,{uploadSessionArtifacts:()=>mr});import{readdirSync as rt,statSync as pe,createReadStream as sr,existsSync as ir}from"node:fs";import{join as ot,relative as ar,sep as cr,extname as lr}from"node:path";function dr(e){let t=lr(e).toLowerCase();return ur[t]||"application/octet-stream"}function nt(e){let t=[],r;try{r=rt(e)}catch{return t}for(let o of r){if(fr.has(o)||o.startsWith(".")||pr.has(o))continue;let n=ot(e,o),i;try{i=pe(n)}catch{continue}i.isDirectory()?t.push(...nt(n)):i.isFile()&&t.push(n)}return t}async function gr({apiUrl:e,apiKey:t,executionId:r,nodeName:o,filename:n,absolutePath:i,sizeBytes:s,contentType:l}){let a;try{let c=await fetch(`${e}/${r}/artifacts/upload-url`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${t}`},body:JSON.stringify({nodeName:o,filename:n,contentType:l,sizeBytes:s})});if(!c.ok){let f=await c.text();return console.warn(`[artifacts] upload-url failed for ${o}/${n}: ${c.status} ${f.slice(0,200)}`),null}a=await c.json()}catch(c){return console.warn(`[artifacts] upload-url request errored for ${o}/${n}: ${c.message}`),null}try{let c=sr(i),f=a.requiredHeaders?{...a.requiredHeaders,"Content-Length":String(s)}:{"Content-Type":l,"Content-Length":String(s)},u=await fetch(a.url,{method:"PUT",headers:f,body:c,duplex:"half"});if(!u.ok)return console.warn(`[artifacts] S3 PUT failed for ${o}/${n}: ${u.status}`),null}catch(c){return console.warn(`[artifacts] S3 PUT errored for ${o}/${n}: ${c.message}`),null}return{nodeName:o,filename:n,s3Key:a.s3Key,contentType:l,sizeBytes:s}}async function mr({sessionPath:e,executionId:t,apiUrl:r,apiKey:o}){let n={uploaded:[],skipped:[]};if(!e||!ir(e))return n;if(!r||!o||!t)return console.warn("[artifacts] uploader missing required input \u2014 skipping"),n;let i;try{i=rt(e)}catch(f){return console.warn(`[artifacts] could not read session folder ${e}: ${f.message}`),n}let s=[];for(let f of i){let u=ot(e,f),S;try{S=pe(u)}catch{continue}if(!S.isDirectory()||f.startsWith(".")||f.startsWith("_"))continue;let K=nt(u);for(let _ of K){let k=ar(u,_).split(cr).join("/"),U;try{U=pe(_).size}catch{continue}if(U>tt){n.skipped.push({nodeName:f,filename:k,reason:`size ${U} > ${tt}`});continue}if(U===0){n.skipped.push({nodeName:f,filename:k,reason:"empty"});continue}s.push({apiUrl:r,apiKey:o,executionId:t,nodeName:f,filename:k,absolutePath:_,sizeBytes:U,contentType:dr(k)})}}if(s.length===0)return n;let l=4,a=s.slice(),c=Array.from({length:Math.min(l,a.length)},async()=>{for(;a.length;){let f=a.shift(),u=await gr(f);u?n.uploaded.push(u):n.skipped.push({nodeName:f.nodeName,filename:f.filename,reason:"upload failed"})}});if(await Promise.all(c),n.uploaded.length>0)try{let f=await fetch(`${r}/${t}/artifacts`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${o}`},body:JSON.stringify({files:n.uploaded})});if(!f.ok){let u=await f.text();console.warn(`[artifacts] record failed: ${f.status} ${u.slice(0,200)}`)}}catch(f){console.warn(`[artifacts] record errored: ${f.message}`)}return n}var tt,fr,pr,ur,it=d(()=>{tt=500*1024*1024,fr=new Set([".DS_Store","Thumbs.db",".zibby-stop"]),pr=new Set(["node_modules",".git","dist",".zibby","__tests__","__mocks__",".cache",".next",".turbo"]),ur={".webm":"video/webm",".mp4":"video/mp4",".mov":"video/quicktime",".png":"image/png",".jpg":"image/jpeg",".jpeg":"image/jpeg",".gif":"image/gif",".txt":"text/plain",".md":"text/markdown",".csv":"text/csv",".log":"text/plain",".json":"application/json",".yaml":"application/yaml",".yml":"application/yaml",".pdf":"application/pdf",".zip":"application/zip",".tar":"application/x-tar",".gz":"application/gzip"}});import{mkdirSync as pt,writeFileSync as hr,existsSync as A,readFileSync as at}from"fs";import{join as $,dirname as wr,resolve as ct}from"path";import{pathToFileURL as de}from"url";import{execSync as yr,spawn as lt}from"node:child_process";import{SQSClient as yt,SendMessageCommand as xt}from"@aws-sdk/client-sqs";var Z=null;function St(){return Z||(Z=new yt({region:process.env.AWS_REGION||"ap-southeast-2"})),Z}async function $e(e,{status:t,error:r}){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:n,PROGRESS_API_URL:i,PROGRESS_QUEUE_URL:s,PROJECT_API_TOKEN:l}=e;if(!o)return;let a={executionId:o,...n&&{sqsAuthToken:n},status:t,...r&&{error:r},timestamp:new Date().toISOString()},c=i?"HTTP":s?"SQS":"NONE",f=JSON.stringify(a).length;console.log(`Sending final status: ${t} via ${c} (${(f/1024).toFixed(1)}KB)`);try{if(i)await $t(i,o,a,l);else if(s){let u=["completed","failed","insufficient_context","blocked"].includes(t)?"execution_completed":"progress_update";await _t(s,o,a,u)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${t} sent via ${c}`)}catch(u){console.error(`Failed to send final status (${t}) via ${c}:`),console.error(` Payload: ${(f/1024).toFixed(1)}KB`),console.error(` Error: ${u.message}`),u.name&&console.error(` Error type: ${u.name}`),u.code&&console.error(` Error code: ${u.code}`)}}async function $t(e,t,r,o){let n=`${e}/${t}/progress`,i={"Content-Type":"application/json"};o&&(i.Authorization=`Bearer ${o}`);let s=await fetch(n,{method:"POST",headers:i,body:JSON.stringify(r)});if(!s.ok){let l=await s.text();throw new Error(`HTTP ${s.status}: ${l}`)}}async function _t(e,t,r,o="progress_update"){let n=JSON.stringify(r),i=(n.length/1024).toFixed(1);n.length>256*1024&&console.error(`\u274C SQS message too large: ${i}KB (limit 256KB) for ${t} [${o}]`),await St().send(new xt({QueueUrl:e,MessageBody:n,MessageGroupId:t,MessageAttributes:{executionId:{DataType:"String",StringValue:t},messageType:{DataType:"String",StringValue:o}}}))}function _e({workflowType:e,jobId:t,projectId:r,agentType:o,model:n,egressIp:i,egressKind:s}){let l="\u2500".repeat(60),a=`${o||"default"} (model: ${n||"auto"})`,c=["",l,` Workflow: ${e}`,` Job: ${t||"local"}`,` Project: ${r||"none"}`,` Agent: ${a}`];if(i||s){let f=i||"unknown",u=s||"static";c.push(` Egress: ${f} (${u})`)}return c.push(l),c.join(`
|
|
3
|
-
`)}import{existsSync as
|
|
2
|
+
var pe=Object.defineProperty;var de=(t,e)=>()=>(t&&(e=t(t=0)),e);var fe=(t,e)=>{for(var o in e)pe(t,o,{get:e[o],enumerable:!0})};var V={};fe(V,{uploadSessionArtifacts:()=>Ce});import{readdirSync as M,statSync as x,createReadStream as Oe,existsSync as Re}from"node:fs";import{join as q,relative as Te,sep as Pe,extname as Ae}from"node:path";function xe(t){let e=Ae(t).toLowerCase();return Ie[e]||"application/octet-stream"}function X(t){let e=[],o;try{o=M(t)}catch{return e}for(let s of o){if(ke.has(s)||s.startsWith(".")||ve.has(s))continue;let n=q(t,s),i;try{i=x(n)}catch{continue}i.isDirectory()?e.push(...X(n)):i.isFile()&&e.push(n)}return e}async function Ue({apiUrl:t,apiKey:e,executionId:o,nodeName:s,filename:n,absolutePath:i,sizeBytes:r,contentType:d}){let p;try{let c=await fetch(`${t}/${o}/artifacts/upload-url`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({nodeName:s,filename:n,contentType:d,sizeBytes:r})});if(!c.ok){let a=await c.text();return console.warn(`[artifacts] upload-url failed for ${s}/${n}: ${c.status} ${a.slice(0,200)}`),null}p=await c.json()}catch(c){return console.warn(`[artifacts] upload-url request errored for ${s}/${n}: ${c.message}`),null}try{let c=Oe(i),a=p.requiredHeaders?{...p.requiredHeaders,"Content-Length":String(r)}:{"Content-Type":d,"Content-Length":String(r)},f=await fetch(p.url,{method:"PUT",headers:a,body:c,duplex:"half"});if(!f.ok)return console.warn(`[artifacts] S3 PUT failed for ${s}/${n}: ${f.status}`),null}catch(c){return console.warn(`[artifacts] S3 PUT errored for ${s}/${n}: ${c.message}`),null}return{nodeName:s,filename:n,s3Key:p.s3Key,contentType:d,sizeBytes:r}}async function Ce({sessionPath:t,executionId:e,apiUrl:o,apiKey:s}){let n={uploaded:[],skipped:[]};if(!t||!Re(t))return n;if(!o||!s||!e)return console.warn("[artifacts] uploader missing required input \u2014 skipping"),n;let i;try{i=M(t)}catch(a){return console.warn(`[artifacts] could not read session folder ${t}: ${a.message}`),n}let r=[];for(let a of i){let f=q(t,a),u;try{u=x(f)}catch{continue}if(!u.isDirectory()||a.startsWith(".")||a.startsWith("_"))continue;let P=X(f);for(let w of P){let b=Te(f,w).split(Pe).join("/"),m;try{m=x(w).size}catch{continue}if(m>Y){n.skipped.push({nodeName:a,filename:b,reason:`size ${m} > ${Y}`});continue}if(m===0){n.skipped.push({nodeName:a,filename:b,reason:"empty"});continue}r.push({apiUrl:o,apiKey:s,executionId:e,nodeName:a,filename:b,absolutePath:w,sizeBytes:m,contentType:xe(b)})}}if(r.length===0)return n;let d=4,p=r.slice(),c=Array.from({length:Math.min(d,p.length)},async()=>{for(;p.length;){let a=p.shift(),f=await Ue(a);f?n.uploaded.push(f):n.skipped.push({nodeName:a.nodeName,filename:a.filename,reason:"upload failed"})}});if(await Promise.all(c),n.uploaded.length>0)try{let a=await fetch(`${o}/${e}/artifacts`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${s}`},body:JSON.stringify({files:n.uploaded})});if(!a.ok){let f=await a.text();console.warn(`[artifacts] record failed: ${a.status} ${f.slice(0,200)}`)}}catch(a){console.warn(`[artifacts] record errored: ${a.message}`)}return n}var Y,ke,ve,Ie,Z=de(()=>{Y=500*1024*1024,ke=new Set([".DS_Store","Thumbs.db",".zibby-stop"]),ve=new Set(["node_modules",".git","dist",".zibby","__tests__","__mocks__",".cache",".next",".turbo"]),Ie={".webm":"video/webm",".mp4":"video/mp4",".mov":"video/quicktime",".png":"image/png",".jpg":"image/jpeg",".jpeg":"image/jpeg",".gif":"image/gif",".txt":"text/plain",".md":"text/markdown",".csv":"text/csv",".log":"text/plain",".json":"application/json",".yaml":"application/yaml",".yml":"application/yaml",".pdf":"application/pdf",".zip":"application/zip",".tar":"application/x-tar",".gz":"application/gzip"}});import{mkdirSync as se,writeFileSync as De,existsSync as _,readFileSync as ee}from"fs";import{join as h,dirname as Ne,resolve as te}from"path";import{pathToFileURL as C}from"url";import{execSync as je,spawn as oe}from"node:child_process";import{SQSClient as ue,SendMessageCommand as ge}from"@aws-sdk/client-sqs";var v=null;function he(){return v||(v=new ue({region:process.env.AWS_REGION||"ap-southeast-2"})),v}async function G(t,{status:e,error:o,finalState:s}){let{EXECUTION_ID:n,SQS_AUTH_TOKEN:i,PROGRESS_API_URL:r,PROGRESS_QUEUE_URL:d,PROJECT_API_TOKEN:p}=t;if(!n)return;let c={executionId:n,...i&&{sqsAuthToken:i},status:e,...o&&{error:o},...s&&typeof s=="object"&&Object.keys(s).length>0?{finalState:s}:{},timestamp:new Date().toISOString()},a=r?"HTTP":d?"SQS":"NONE",f=JSON.stringify(c).length;console.log(`Sending final status: ${e} via ${a} (${(f/1024).toFixed(1)}KB)`);try{if(r)await we(r,n,c,p);else if(d){let u=["completed","failed","insufficient_context","blocked"].includes(e)?"execution_completed":"progress_update";await ye(d,n,c,u)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${e} sent via ${a}`)}catch(u){console.error(`Failed to send final status (${e}) via ${a}:`),console.error(` Payload: ${(f/1024).toFixed(1)}KB`),console.error(` Error: ${u.message}`),u.name&&console.error(` Error type: ${u.name}`),u.code&&console.error(` Error code: ${u.code}`)}}async function we(t,e,o,s){let n=`${t}/${e}/progress`,i={"Content-Type":"application/json"};s&&(i.Authorization=`Bearer ${s}`);let r=await fetch(n,{method:"POST",headers:i,body:JSON.stringify(o)});if(!r.ok){let d=await r.text();throw new Error(`HTTP ${r.status}: ${d}`)}}async function ye(t,e,o,s="progress_update"){let n=JSON.stringify(o),i=(n.length/1024).toFixed(1);n.length>256*1024&&console.error(`\u274C SQS message too large: ${i}KB (limit 256KB) for ${e} [${s}]`),await he().send(new ge({QueueUrl:t,MessageBody:n,MessageGroupId:e,MessageAttributes:{executionId:{DataType:"String",StringValue:e},messageType:{DataType:"String",StringValue:s}}}))}function J({workflowType:t,jobId:e,projectId:o,agentType:s,model:n,egressIp:i,egressKind:r}){let d="\u2500".repeat(60),p=`${s||"default"} (model: ${n||"auto"})`,c=["",d,` Workflow: ${t}`,` Job: ${e||"local"}`,` Project: ${o||"none"}`,` Agent: ${p}`];if(i||r){let a=i||"unknown",f=r||"static";c.push(` Egress: ${a} (${f})`)}return c.push(d),c.join(`
|
|
3
|
+
`)}import{existsSync as me,writeFileSync as Se}from"fs";import{join as Q}from"path";var I={width:1280,height:720},_e="on",Ee="tests",$e="test-results/playwright";function be(t={}){let e=t.viewport&&typeof t.viewport=="object"?{width:Number(t.viewport.width)||I.width,height:Number(t.viewport.height)||I.height}:I,o=typeof t.video=="string"?t.video:_e,s=t.paths?.generated||Ee,n=t.playwrightArtifacts!==!1,i=n?"on":"off",r=n?"only-on-failure":"off";return`// AUTO-GENERATED at workflow run start by @zibby/cli's
|
|
4
4
|
// playwright-config-materialize.js. Derived from the bundled
|
|
5
5
|
// zibby.config.json (which @zibby/workflow-deploy serialized from your
|
|
6
6
|
// project's .zibby.config.mjs). Do NOT edit by hand \u2014 re-run a workflow
|
|
@@ -10,23 +10,23 @@ var wt=Object.defineProperty;var d=(e,t)=>()=>(e&&(t=e(e=0)),t);var V=(e,t)=>{fo
|
|
|
10
10
|
import { defineConfig } from '@playwright/test';
|
|
11
11
|
|
|
12
12
|
export default defineConfig({
|
|
13
|
-
testDir: '${
|
|
14
|
-
outputDir: '${
|
|
13
|
+
testDir: '${s.replace(/'/g,"\\'")}',
|
|
14
|
+
outputDir: '${$e}',
|
|
15
15
|
timeout: 60000,
|
|
16
16
|
retries: 0,
|
|
17
17
|
workers: 1,
|
|
18
18
|
|
|
19
19
|
use: {
|
|
20
20
|
headless: process.env.PLAYWRIGHT_HEADLESS === '1',
|
|
21
|
-
viewport: { width: ${
|
|
22
|
-
video: '${
|
|
21
|
+
viewport: { width: ${e.width}, height: ${e.height} },
|
|
22
|
+
video: '${o}',
|
|
23
23
|
trace: '${i}',
|
|
24
|
-
screenshot: '${
|
|
24
|
+
screenshot: '${r}',
|
|
25
25
|
},
|
|
26
26
|
|
|
27
27
|
reporter: [['list']],
|
|
28
28
|
});
|
|
29
|
-
`}function
|
|
30
|
-
Workflow execution failed: ${
|
|
31
|
-
[done] ${
|
|
32
|
-
[done] ${
|
|
29
|
+
`}function H(t,e){if(!t)return{written:!1,path:null,reason:"no workspaceDir"};for(let n of["js","mjs","ts"]){let i=Q(t,`playwright.config.${n}`);if(me(i))return{written:!1,path:i,reason:`existing playwright.config.${n} in workspace`}}let o=Q(t,"playwright.config.js"),s=be(e||{});try{return Se(o,s,"utf-8"),{written:!0,path:o,reason:"derived from zibby.config"}}catch(n){return{written:!1,path:null,reason:`write failed: ${n.message}`}}}import"@zibby/core";var A=process.env.WORKSPACE||"/workspace";async function Fe(t,e){se(e,{recursive:!0});let o=Date.now();console.log("[setup] Fetching bundle...");let s=setInterval(()=>{let i=((Date.now()-o)/1e3).toFixed(1);console.log(`[setup] still fetching (${i}s elapsed)`)},3e3);try{await new Promise((i,r)=>{let d=oe("curl",["-fsSL",t],{stdio:["ignore","pipe","inherit"]}),p=oe("tar",["-xzf","-","-C",e],{stdio:["pipe","inherit","inherit"]});d.stdout.pipe(p.stdin);let c,a,f=()=>{if(c!==void 0&&a!==void 0){if(c!==0)return r(new Error(`curl exited ${c}`));if(a!==0)return r(new Error(`tar exited ${a}`));i()}};d.on("close",u=>{c=u,f()}),p.on("close",u=>{a=u,f()}),d.on("error",r),p.on("error",r)})}finally{clearInterval(s)}let n=((Date.now()-o)/1e3).toFixed(1);return console.log(`[setup] Bundle extracted (${n}s)`),e}async function ne(){let t=process.env.WORKFLOW_SOURCES_URL;if(!t)throw new Error("WORKFLOW_SOURCES_URL env var is required");let e=await fetch(t);if(!e.ok)throw new Error(`Failed to fetch sources: ${e.status} ${e.statusText}`);let o=await e.json();if(!o.sources||typeof o.sources!="object")throw new Error('Invalid sources payload \u2014 missing "sources" map');return o}function Le(t){let e=t?.agent;if(!e)return null;if(typeof e=="string")return e;if(typeof e=="object"){if(typeof e.provider=="string")return e.provider;for(let o of["claude","cursor","codex","gemini"])if(e[o])return o}return null}function Be(t,e){let o=te(e),s=0;for(let[n,i]of Object.entries(t)){let r=te(e,n);if(!r.startsWith(`${o}/`)&&r!==o){console.error(` \u26D4 Skipping unsafe path: ${n}`);continue}se(Ne(r),{recursive:!0}),De(r,i,"utf-8"),s++}return s}async function ze(){let t=process.env.ZIBBY_EGRESS_PROXY_URL,e=process.env.ZIBBY_EGRESS_TOKEN;if(!(!t||!e))try{let o=await import("undici"),s=new o.ProxyAgent({uri:t,token:`Bearer ${e}`});o.setGlobalDispatcher(s)}catch(o){console.warn(`[setup] Failed to install egress proxy dispatcher: ${o.message}`)}}async function Ke(){if(process.env.ZIBBY_EGRESS_IP)return{ip:process.env.ZIBBY_EGRESS_IP,kind:"static"};try{let t=new AbortController,e=setTimeout(()=>t.abort(),1500),o=await fetch("https://api.ipify.org?format=json",{signal:t.signal});return clearTimeout(e),o.ok?{ip:(await o.json())?.ip||null,kind:"dynamic"}:{ip:null,kind:"dynamic"}}catch{return{ip:null,kind:"dynamic"}}}async function We(t,e){let o=h(t,"graph.mjs");if(!_(o))throw new Error(`graph.mjs not found at ${o}`);let s=await import(C(o).href),n=e?.entryClass,i=n&&s[n]||s.default||Object.values(s).find(r=>typeof r=="function"&&r.prototype?.buildGraph);if(!i)throw new Error("No WorkflowAgent class found in graph.mjs");return i}async function ct(){if(!process.env.NODE_PATH){process.env.NODE_PATH="/opt/zibby/packages";let l=await import("module");l.default._initPaths&&l.default._initPaths()}await ze();let{WORKFLOW_JOB_ID:t,WORKFLOW_TYPE:e,PROJECT_ID:o,AGENT_TYPE:s,MODEL:n}=process.env;e||(console.error("Missing WORKFLOW_TYPE env var"),process.exit(1));let i=process.env.WORKFLOW_BUNDLE_URL,r,d={},p,c;if(i){p=e,r=h(A,".zibby","workflows",p);try{await Fe(i,r);try{let l=await ne();d=l.input||{},c=l.version}catch{}}catch(l){console.warn(`[setup] Bundle extract failed (${l.message}); falling back to source install`),r=null}}if(!r){let l=await ne(),{sources:g,input:S,workflowType:E,version:y}=l;d=S||{},p=E||e,c=y,console.log(`[setup] Workflow v${c||"?"} (${Object.keys(g).length} files)`),r=h(A,".zibby","workflows",p);let T=Be(g,r);console.log(`[setup] Wrote ${T} files`),console.log("[setup] Installing dependencies...");try{je("npm install --silent --no-audit --no-fund",{cwd:r,stdio:"inherit"}),console.log("[setup] Dependencies installed")}catch($){console.warn(`[setup] npm install failed: ${$.message}`)}}let a={},f=h(r,"workflow.json");_(f)&&(a=JSON.parse(ee(f,"utf-8")));let u={},P=h(r,"zibby.config.json");if(_(P))try{u=JSON.parse(ee(P,"utf-8")),console.log("[setup] Loaded user config from zibby.config.json")}catch(l){console.warn(`[setup] Failed to parse zibby.config.json: ${l.message} \u2014 falling back to defaults`)}let w=H(A,u);w.written?console.log(`[setup] Materialized playwright.config.js \u2192 ${w.path} (${w.reason})`):w.path&&console.log(`[setup] Using existing playwright config: ${w.path}`);let b=Le(u)||s,m=await Ke();console.log(J({workflowType:e,jobId:t,projectId:o,agentType:b,model:n,egressIp:m.ip,egressKind:m.kind}));let D=await We(r,a);console.log(`[setup] Loaded ${D.name}`);let O=[],N=h(r,"node_modules","@zibby","agent-workflow"),j=h(r,"node_modules","@zibby","core","node_modules","@zibby","agent-workflow");_(N)&&O.push({kind:"hoisted",path:N}),_(j)&&O.push({kind:"nested",path:j});let k=process.env.ZIBBY_RUN_DIAG==="1";if(k){let{readdirSync:l}=await import("fs");console.log(` [diag] @zibby/agent-workflow copies in bundle: ${O.length}`);for(let g of O)console.log(` [diag] ${g.kind}: ${g.path}`);try{let g=h(r,"node_modules","@zibby");_(g)&&console.log(` [diag] node_modules/@zibby/ contents: [${l(g).join(", ")}]`)}catch{}}let F=h(r,"node_modules","@zibby","core","dist","index.js");if(_(F)&&O.length>0)try{let l=await import(C(F).href),g=[l.AssistantStrategy,l.CursorAgentStrategy,l.ClaudeAgentStrategy,l.CodexAgentStrategy,l.GeminiAgentStrategy].filter(Boolean);for(let S of O){let E=h(S.path,"dist","index.js");if(!_(E))continue;let y=await import(C(E).href),T=k?y.listStrategies():null;for(let $ of g)try{y.registerStrategy(new $)}catch(le){console.warn(` register ${$.name} into ${S.kind} failed: ${le.message}`)}k&&console.log(` [diag] ${S.kind} registry: before=[${T.join(",")||"empty"}] after=[${y.listStrategies().join(",")||"empty"}]`)}console.log("[setup] Registered 5 agent strategies (assistant, cursor, claude, codex, gemini)")}catch(l){console.warn(`[setup] Failed to bridge strategies: ${l.message}`)}else console.warn("[setup] No @zibby/core or @zibby/agent-workflow in bundle \u2014 agent strategies may be unavailable");let re=Date.now(),L=new D({workflow:p||e}),ie=L.buildGraph(),B=process.env.WORKFLOW_UUID||null,z=process.env.ZIBBY_CONVERSATION_ID||null,ae={...d||{},cwd:A,runId:t||`run-${Date.now()}`,config:u,input:d||{},...B?{workflowUuid:B}:{},...z?{conversationId:z}:{}};console.log("");let R;try{R=await ie.run(L,ae)}catch(l){console.error(`
|
|
30
|
+
Workflow execution failed: ${l.message}`),console.error(l.stack),await U("failed",l.message),process.exit(1)}let K=((Date.now()-re)/1e3).toFixed(1),ce=R?.success!==!1,W=p||e;if(process.env.UPLOAD_ARTIFACTS!=="0"){let l=R?.state?.sessionPath,g=process.env.PROGRESS_API_URL||process.env.ZIBBY_API_BASE,S=process.env.PROJECT_API_TOKEN,E=process.env.WORKFLOW_JOB_ID;if(l&&g&&S&&E)try{let{uploadSessionArtifacts:y}=await Promise.resolve().then(()=>(Z(),V)),{uploaded:T,skipped:$}=await y({sessionPath:l,executionId:E,apiUrl:g,apiKey:S});console.log(`[artifacts] uploaded ${T.length} file(s)${$.length?`, skipped ${$.length}`:""}`)}catch(y){console.warn(`[artifacts] uploader threw: ${y.message}`)}else console.log("[artifacts] skipping upload \u2014 sessionPath/apiUrl/apiKey/executionId missing")}ce?(console.log(`
|
|
31
|
+
[done] ${W} completed in ${K}s`),await U("completed",null,R?.state)):(console.error(`
|
|
32
|
+
[done] ${W} failed after ${K}s`),await U("failed",R?.error||"Workflow execution failed",R?.state),process.exit(1))}async function U(t,e=null,o=void 0){let s={EXECUTION_ID:process.env.WORKFLOW_JOB_ID,PROGRESS_API_URL:process.env.PROGRESS_API_URL,PROGRESS_QUEUE_URL:process.env.PROGRESS_QUEUE_URL,PROJECT_API_TOKEN:process.env.PROJECT_API_TOKEN,SQS_AUTH_TOKEN:process.env.SQS_AUTH_TOKEN};if(s.EXECUTION_ID)try{await G(s,{status:t,...e&&{error:e},...o?{finalState:o}:{}})}catch(n){console.error(`\u26A0\uFE0F Failed to report status: ${n.message}`)}}export{Le as resolveAgentFromConfig,ct as runWorkflowCommand};
|
package/dist/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{SQSClient as _,SendMessageCommand as u}from"@aws-sdk/client-sqs";var E=null;function O(){return E||(E=new _({region:process.env.AWS_REGION||"ap-southeast-2"})),E}async function $(
|
|
1
|
+
import{SQSClient as _,SendMessageCommand as u}from"@aws-sdk/client-sqs";var E=null;function O(){return E||(E=new _({region:process.env.AWS_REGION||"ap-southeast-2"})),E}async function $(l,e,i,t){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:r,PROGRESS_API_URL:n,PROGRESS_QUEUE_URL:a,PROJECT_API_TOKEN:g}=t;if(!o)return;let c={executionId:o,...r&&{sqsAuthToken:r},step:{name:l,status:e,logs:i,timestamp:new Date().toISOString(),...e==="success"&&{completedAt:new Date().toISOString()}},status:e==="failed"?"failed":"running"};try{n?await f(n,o,c,g):a&&await p(a,o,c)}catch(S){console.error(`\u26A0\uFE0F Failed to send progress: ${S.message}`)}}async function m(l,e,i){let{EXECUTION_ID:t,SQS_AUTH_TOKEN:o,PROGRESS_API_URL:r,PROGRESS_QUEUE_URL:n,PROJECT_API_TOKEN:a}=l;if(!t||!i)return;let g=JSON.stringify(i).length;console.log(`Sending artifact: ${e} (${(g/1024).toFixed(1)}KB)`);let c={executionId:t,...o&&{sqsAuthToken:o},artifacts:{[e]:i},timestamp:new Date().toISOString()},S=r?"HTTP":n?"SQS":"NONE",d=JSON.stringify(c).length;try{if(r)await f(r,t,c,a);else if(n)await p(n,t,c);else{console.warn(`\u26A0\uFE0F No transport configured for artifact ${e} \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set`);return}console.log(`Artifact ${e} sent via ${S} (payload=${(d/1024).toFixed(1)}KB, value=${(g/1024).toFixed(1)}KB)`)}catch(s){console.error(`Failed to send artifact ${e} via ${S}:`),console.error(` Payload size: ${(d/1024).toFixed(1)}KB, Value size: ${(g/1024).toFixed(1)}KB`),console.error(` Error: ${s.message}`),s.name&&console.error(` Error type: ${s.name}`),s.code&&console.error(` Error code: ${s.code}`),d>256*1024&&console.error(" \u26A0\uFE0F Message exceeds SQS 256KB limit! Consider splitting or compressing.")}}async function P(l,{status:e,error:i,finalState:t}){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:r,PROGRESS_API_URL:n,PROGRESS_QUEUE_URL:a,PROJECT_API_TOKEN:g}=l;if(!o)return;let c={executionId:o,...r&&{sqsAuthToken:r},status:e,...i&&{error:i},...t&&typeof t=="object"&&Object.keys(t).length>0?{finalState:t}:{},timestamp:new Date().toISOString()},S=n?"HTTP":a?"SQS":"NONE",d=JSON.stringify(c).length;console.log(`Sending final status: ${e} via ${S} (${(d/1024).toFixed(1)}KB)`);try{if(n)await f(n,o,c,g);else if(a){let s=["completed","failed","insufficient_context","blocked"].includes(e)?"execution_completed":"progress_update";await p(a,o,c,s)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${e} sent via ${S}`)}catch(s){console.error(`Failed to send final status (${e}) via ${S}:`),console.error(` Payload: ${(d/1024).toFixed(1)}KB`),console.error(` Error: ${s.message}`),s.name&&console.error(` Error type: ${s.name}`),s.code&&console.error(` Error code: ${s.code}`)}}async function f(l,e,i,t){let o=`${l}/${e}/progress`,r={"Content-Type":"application/json"};t&&(r.Authorization=`Bearer ${t}`);let n=await fetch(o,{method:"POST",headers:r,body:JSON.stringify(i)});if(!n.ok){let a=await n.text();throw new Error(`HTTP ${n.status}: ${a}`)}}async function p(l,e,i,t="progress_update"){let o=JSON.stringify(i),r=(o.length/1024).toFixed(1);o.length>256*1024&&console.error(`\u274C SQS message too large: ${r}KB (limit 256KB) for ${e} [${t}]`),await O().send(new u({QueueUrl:l,MessageBody:o,MessageGroupId:e,MessageAttributes:{executionId:{DataType:"String",StringValue:e},messageType:{DataType:"String",StringValue:t}}}))}export{m as reportArtifact,P as reportFinalStatus,$ as reportProgress};
|
package/package.json
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{existsSync as c,mkdirSync as p,readFileSync as h,writeFileSync as m,statSync as g}from"node:fs";import{homedir as y}from"node:os";import{join as T,dirname as $}from"node:path";function f(t){return t.replace(/\//g,"-")}function d(t,s){return T(y(),".claude","projects",f(t),`${s}.jsonl`)}async function k({apiUrl:t,apiKey:s,workflowUuid:u,sessionId:a,cwd:i}){if(!t||!s||!u||!a||!i)return{pulled:!1,reason:"missing required arg"};let n;try{let e=await fetch(`${t}/workflows/${u}/sessions/${a}/download-url`,{method:"POST",headers:{Authorization:`Bearer ${s}`,"Content-Type":"application/json"}});if(!e.ok)return{pulled:!1,reason:`mint download-url failed: HTTP ${e.status}`};n=await e.json()}catch(e){return{pulled:!1,reason:`mint download-url threw: ${e.message}`}}let o;try{let e=await fetch(n.url,{method:"GET"});if(e.status===404||e.status===403)return{pulled:!1,reason:"no prior session (first turn)"};if(!e.ok)return{pulled:!1,reason:`GET presigned failed: HTTP ${e.status}`};o=Buffer.from(await e.arrayBuffer())}catch(e){return{pulled:!1,reason:`GET presigned threw: ${e.message}`}}let l=d(i,a);try{return p($(l),{recursive:!0}),m(l,o),{pulled:!0,bytes:o.length}}catch(e){return{pulled:!1,reason:`write local JSONL failed: ${e.message}`}}}async function B({apiUrl:t,apiKey:s,workflowUuid:u,sessionId:a,cwd:i}){if(!t||!s||!u||!a||!i)return{pushed:!1,reason:"missing required arg"};let n=d(i,a);if(!c(n))return{pushed:!1,reason:"no local JSONL to push"};let o,l;try{o=h(n),l=g(n).size}catch(r){return{pushed:!1,reason:`read local JSONL failed: ${r.message}`}}let e;try{let r=await fetch(`${t}/workflows/${u}/sessions/${a}/upload-url`,{method:"POST",headers:{Authorization:`Bearer ${s}`,"Content-Type":"application/json"}});if(!r.ok)return{pushed:!1,reason:`mint upload-url failed: HTTP ${r.status}`};e=await r.json()}catch(r){return{pushed:!1,reason:`mint upload-url threw: ${r.message}`}}try{let r=await fetch(e.url,{method:"PUT",headers:{...e.requiredHeaders||{},"Content-Length":String(l)},body:o});return r.ok?{pushed:!0,bytes:l}:{pushed:!1,reason:`PUT presigned failed: HTTP ${r.status}`}}catch(r){return{pushed:!1,reason:`PUT presigned threw: ${r.message}`}}}var H={sdkProjectDir:f,sessionJsonlPath:d};export{H as __test,k as sessionPullFromS3,B as sessionPushToS3};
|