@zibby/cli 0.4.20 → 0.4.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/analyze-graph.js +12 -12
- package/dist/commands/implement.js +12 -12
- package/dist/commands/workflows/run.js +5 -5
- package/dist/commands/workflows/trigger-helpers.js +1 -1
- package/dist/commands/workflows/trigger.js +33 -33
- package/dist/package.json +1 -1
- package/dist/utils/progress-reporter.js +1 -1
- package/dist/utils/session-sync.js +1 -1
- package/package.json +1 -1
|
@@ -1,18 +1,18 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import{dirname as
|
|
3
|
-
`);S=$.pop()||"";for(let y of $){let O=y.trim();O&&
|
|
4
|
-
`);n!==
|
|
5
|
-
`),r(e,"in_progress",n,s).catch(h=>{
|
|
6
|
-
`)}))},500);try{await r(e,"in_progress","",s);let n=await o(),h=((Date.now()-d)/1e3).toFixed(1);R=!0,clearInterval(T),await new Promise(
|
|
7
|
-
`);if(
|
|
8
|
-
`),n.success){await r(e,"success",w||`Completed in ${h}s`,s);let
|
|
2
|
+
import{dirname as b,join as G,resolve as J}from"path";import{fileURLToPath as L}from"url";import{readFileSync as j,existsSync as B}from"fs";import{compileGraph as Q,validateGraphConfig as z,WorkflowGraph as W}from"@zibby/agent-workflow";import{invokeAgent as q}from"@zibby/core";import{buildAnalysisGraph as H}from"@zibby/core/templates/code-analysis/graph.js";import{analysisStateSchema as X}from"@zibby/core/templates/code-analysis/state.js";import"@zibby/core/templates/register-nodes.js";async function U(r,t){let a=process.env.CONTEXT_PRESIGNED_URL;if(!a)throw new Error("CONTEXT_PRESIGNED_URL env var is required");console.log("\u{1F4E6} Fetching execution context via pre-signed URL");let e=await fetch(a);if(!e.ok)throw new Error(`Failed to fetch execution context: ${e.status}`);let o=await e.json();return console.log(` \u2705 Got ticketContext (${JSON.stringify(o.ticketContext||{}).length} chars)`),o.nodeConfigs&&Object.keys(o.nodeConfigs).length>0&&console.log(` \u2705 Got nodeConfigs (${Object.keys(o.nodeConfigs).length} nodes configured)`),{ticketContext:o.ticketContext||{},nodeConfigs:o.nodeConfigs||{},graphConfig:o.graphConfig||null,repos:o.repos||[]}}import{SQSClient as M,SendMessageCommand as D}from"@aws-sdk/client-sqs";var I=null;function F(){return I||(I=new M({region:process.env.AWS_REGION||"ap-southeast-2"})),I}async function A(r,t,a,e){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:d,PROGRESS_QUEUE_URL:i,PROJECT_API_TOKEN:_}=e;if(!o)return;let g={executionId:o,...s&&{sqsAuthToken:s},step:{name:r,status:t,logs:a,timestamp:new Date().toISOString(),...t==="success"&&{completedAt:new Date().toISOString()}},status:t==="failed"?"failed":"running"};try{d?await v(d,o,g,_):i&&await k(i,o,g)}catch(u){console.error(`\u26A0\uFE0F Failed to send progress: ${u.message}`)}}async function N(r,t,a){let{EXECUTION_ID:e,SQS_AUTH_TOKEN:o,PROGRESS_API_URL:s,PROGRESS_QUEUE_URL:d,PROJECT_API_TOKEN:i}=r;if(!e||!a)return;let _=JSON.stringify(a).length;console.log(`Sending artifact: ${t} (${(_/1024).toFixed(1)}KB)`);let g={executionId:e,...o&&{sqsAuthToken:o},artifacts:{[t]:a},timestamp:new Date().toISOString()},u=s?"HTTP":d?"SQS":"NONE",f=JSON.stringify(g).length;try{if(s)await v(s,e,g,i);else if(d)await k(d,e,g);else{console.warn(`\u26A0\uFE0F No transport configured for artifact ${t} \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set`);return}console.log(`Artifact ${t} sent via ${u} (payload=${(f/1024).toFixed(1)}KB, value=${(_/1024).toFixed(1)}KB)`)}catch(c){console.error(`Failed to send artifact ${t} via ${u}:`),console.error(` Payload size: ${(f/1024).toFixed(1)}KB, Value size: ${(_/1024).toFixed(1)}KB`),console.error(` Error: ${c.message}`),c.name&&console.error(` Error type: ${c.name}`),c.code&&console.error(` Error code: ${c.code}`),f>256*1024&&console.error(" \u26A0\uFE0F Message exceeds SQS 256KB limit! Consider splitting or compressing.")}}async function P(r,{status:t,error:a,finalState:e}){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:d,PROGRESS_QUEUE_URL:i,PROJECT_API_TOKEN:_}=r;if(!o)return;let g={executionId:o,...s&&{sqsAuthToken:s},status:t,...a&&{error:a},...e&&typeof e=="object"&&Object.keys(e).length>0?{finalState:e}:{},timestamp:new Date().toISOString()},u=d?"HTTP":i?"SQS":"NONE",f=JSON.stringify(g).length;console.log(`Sending final status: ${t} via ${u} (${(f/1024).toFixed(1)}KB)`);try{if(d)await v(d,o,g,_);else if(i){let c=["completed","failed","insufficient_context","blocked"].includes(t)?"execution_completed":"progress_update";await k(i,o,g,c)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${t} sent via ${u}`)}catch(c){console.error(`Failed to send final status (${t}) via ${u}:`),console.error(` Payload: ${(f/1024).toFixed(1)}KB`),console.error(` Error: ${c.message}`),c.name&&console.error(` Error type: ${c.name}`),c.code&&console.error(` Error code: ${c.code}`)}}async function v(r,t,a,e){let o=`${r}/${t}/progress`,s={"Content-Type":"application/json"};e&&(s.Authorization=`Bearer ${e}`);let d=await fetch(o,{method:"POST",headers:s,body:JSON.stringify(a)});if(!d.ok){let i=await d.text();throw new Error(`HTTP ${d.status}: ${i}`)}}async function k(r,t,a,e="progress_update"){let o=JSON.stringify(a),s=(o.length/1024).toFixed(1);o.length>256*1024&&console.error(`\u274C SQS message too large: ${s}KB (limit 256KB) for ${t} [${e}]`),await F().send(new D({QueueUrl:r,MessageBody:o,MessageGroupId:t,MessageAttributes:{executionId:{DataType:"String",StringValue:t},messageType:{DataType:"String",StringValue:e}}}))}import{writeMcpConfig as V}from"@zibby/core/utils/mcp-config-writer.js";var Y=L(import.meta.url),Z=b(Y),ee=JSON.parse(j(G(Z,"../../package.json"),"utf-8")),oe={analyze_ticket:r=>({key:"analysis",value:{raw:r.raw,structured:r.output}}),generate_code:r=>({key:"codeImplementation",value:r.output?.codeImplementation}),generate_test_cases:r=>({key:"tests",value:r.output?.tests}),finalize:r=>({key:"report",value:r.output?.report})};function te(r,t){return async function(e,o,s){let d=Date.now(),i=[],_="",g=console.log,u=process.stdout.write.bind(process.stdout),f=process.stderr.write.bind(process.stderr),c=!1;console.log=(...n)=>{let h=n.map(w=>typeof w=="string"?w:JSON.stringify(w)).join(" ");i.push(h),c=!0,g(...n),c=!1};let S="";process.stdout.write=(n,h,w)=>{if(!c){let p=typeof n=="string"?n:n.toString();S+=p;let $=S.split(`
|
|
3
|
+
`);S=$.pop()||"";for(let y of $){let O=y.trim();O&&i.push(O)}}return u(n,h,w)},g(`[Middleware] Started capturing logs for ${e}`);let R=!1,T=setInterval(()=>{if(R)return;let n=i.join(`
|
|
4
|
+
`);n!==_&&n.length>0&&(_=n,f(`\u{1F4E1} [Middleware] Sending live update for ${e}: ${n.length} chars, ${i.length} lines
|
|
5
|
+
`),r(e,"in_progress",n,s).catch(h=>{f(`\u26A0\uFE0F [Middleware] Failed to send live update: ${h.message}
|
|
6
|
+
`)}))},500);try{await r(e,"in_progress","",s);let n=await o(),h=((Date.now()-d)/1e3).toFixed(1);R=!0,clearInterval(T),await new Promise(p=>setImmediate(p)),console.log=g,process.stdout.write=u,S.trim()&&(i.push(S.trim()),S="");let w=i.join(`
|
|
7
|
+
`);if(f(`\u{1F4E1} [Middleware] Sending final update for ${e}: ${w.length} chars, ${i.length} total lines captured
|
|
8
|
+
`),n.success){await r(e,"success",w||`Completed in ${h}s`,s);let p=oe[e];if(p){let{key:$,value:y}=p(n);y&&await t(s,$,y)}}else await r(e,"failed",`${w}
|
|
9
9
|
|
|
10
|
-
Error: ${n.error}`,s);return n}catch(n){R=!0,clearInterval(T),await new Promise(w=>setImmediate(w)),console.log=
|
|
10
|
+
Error: ${n.error}`,s);return n}catch(n){R=!0,clearInterval(T),await new Promise(w=>setImmediate(w)),console.log=g,process.stdout.write=u;let h=`${i.join(`
|
|
11
11
|
`)}
|
|
12
12
|
|
|
13
|
-
Error: ${n.message}`;throw await r(e,"failed",h,s),n}}}async function re(r){let{EXECUTION_ID:t,TICKET_KEY:a,PROJECT_ID:e,REPOS:o,PROGRESS_QUEUE_URL:s,PROGRESS_API_URL:d,SQS_AUTH_TOKEN:
|
|
14
|
-
\u{1F680} Zibby Analysis (Graph Mode)`),console.log(`@zibby/cli v${ee.version} | Node.js ${process.version}`),console.log("\u2500".repeat(60)),console.log(`Ticket: ${a}`),console.log(`Repositories: ${R.length}`),console.log(`Workspace: ${T}`),console.log(`AI Model: ${
|
|
13
|
+
Error: ${n.message}`;throw await r(e,"failed",h,s),n}}}async function re(r){let{EXECUTION_ID:t,TICKET_KEY:a,PROJECT_ID:e,REPOS:o,PROGRESS_QUEUE_URL:s,PROGRESS_API_URL:d,SQS_AUTH_TOKEN:i,PROJECT_API_TOKEN:_,GITHUB_TOKEN:g,MODEL:u}=process.env;(!t||!a||!e)&&(console.error("\u274C Missing required environment variables"),console.error(" Required: EXECUTION_ID, TICKET_KEY, PROJECT_ID"),process.exit(1));let f=await U(t,e),c=f.ticketContext,S=f.nodeConfigs||{},R=o?JSON.parse(o):f.repos,T=process.env.WORKSPACE||"/workspace",n=L(import.meta.resolve("@zibby/core/package.json")),h=G(b(n),"templates","code-analysis","prompts");console.log(`
|
|
14
|
+
\u{1F680} Zibby Analysis (Graph Mode)`),console.log(`@zibby/cli v${ee.version} | Node.js ${process.version}`),console.log("\u2500".repeat(60)),console.log(`Ticket: ${a}`),console.log(`Repositories: ${R.length}`),console.log(`Workspace: ${T}`),console.log(`AI Model: ${u||"auto"}`),console.log("\u2500".repeat(60));let w=te(A,N),p,$,y=null;if(r?.workflow){let l=J(process.cwd(),r.workflow);if(B(l)||(console.error(`\u274C Workflow file not found: ${l}`),process.exit(1)),l.endsWith(".js")||l.endsWith(".mjs"))try{let{pathToFileURL:m}=await import("url");y=await import(m(l).href),$=`local JS module (${l})`}catch(m){console.error(`\u274C Failed to load workflow JS module: ${m.message}`),process.exit(1)}else{try{let E=JSON.parse(j(l,"utf-8")),{_meta:C,...K}=E;p=K,$=`local file (${l})`}catch(E){console.error(`\u274C Failed to parse workflow file: ${E.message}`),process.exit(1)}let m=z(p);m.valid||(console.error("\u274C Invalid workflow file:"),m.errors.forEach(E=>console.error(` - ${E}`)),process.exit(1))}}else if(f.graphConfig)p=f.graphConfig,$="custom (from project workflow)";else{let l=new W;H(l),p=l.serialize(),$="default"}let O;if(y){let m={...y.nodeConfigs||{},...S};O=y.buildGraph({nodeMiddleware:w}),console.log(`\u{1F4D0} Graph source: ${$}`),console.log(` Nodes: ${O.nodes.size}`),S=m}else{if(S&&Object.keys(S).length>0){let l=p.nodeConfigs||{},m={...l};for(let[E,C]of Object.entries(S))m[E]={...l[E],...C};p.nodeConfigs=m}console.log(`\u{1F4D0} Graph source: ${$}`),console.log(` Nodes: ${p.nodes?.length||0}`),console.log(` Edges: ${p.edges?.length||0}`),O=Q(p,{nodeMiddleware:w,stateSchema:X,invokeAgent:q})}V(S);let x={EXECUTION_ID:t,PROGRESS_QUEUE_URL:s,PROGRESS_API_URL:d,SQS_AUTH_TOKEN:i,PROJECT_API_TOKEN:_,workspace:T,repos:R,ticketContext:c,promptsDir:h,githubToken:g,model:u,nodeConfigs:S};try{let m=(await O.run(null,x)).state,E=m.analyze_ticket_output?.validation||m.analyze_ticket_output?.analysis?.structured?.validation,C="completed";E&&!E.canProceed&&(C=E.status==="insufficient_context"?"insufficient_context":"blocked"),console.log(`
|
|
15
15
|
\u{1F4CB} Validation: canProceed=${E?.canProceed}, status=${E?.status}, finalStatus=${C}`),console.log(`
|
|
16
16
|
\u{1F4CA} Sending final status: ${C}`),await P(x,{status:C}),console.log(`
|
|
17
|
-
\u2705 Analysis completed successfully`),process.exit(0)}catch(
|
|
18
|
-
\u274C Analysis failed:`,
|
|
17
|
+
\u2705 Analysis completed successfully`),process.exit(0)}catch(l){if(console.error(`
|
|
18
|
+
\u274C Analysis failed:`,l.message),t)try{console.log("\u{1F4E1} Reporting failure..."),await P(x,{status:"failed",error:l.message})}catch{console.error("\u26A0\uFE0F Failed to report error")}process.exit(1)}}import.meta.url===`file://${process.argv[1]}`&&re();export{re as analyzeCommand};
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
var A=(t=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(t,{get:(r,n)=>(typeof require<"u"?require:r)[n]}):t)(function(t){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+t+'" is not supported')});import{existsSync as
|
|
2
|
-
`);
|
|
3
|
-
`);return await
|
|
4
|
-
`);throw await
|
|
1
|
+
var A=(t=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(t,{get:(r,n)=>(typeof require<"u"?require:r)[n]}):t)(function(t){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+t+'" is not supported')});import{existsSync as y,readFileSync as O,writeFileSync as J}from"fs";import{join as a,dirname as B}from"path";import{fileURLToPath as Q}from"url";import{invokeAgent as X}from"@zibby/core";async function b(t,r){let n=process.env.CONTEXT_PRESIGNED_URL;if(!n)throw new Error("CONTEXT_PRESIGNED_URL env var is required");console.log("\u{1F4E6} Fetching execution context via pre-signed URL");let o=await fetch(n);if(!o.ok)throw new Error(`Failed to fetch execution context: ${o.status}`);let e=await o.json();return console.log(` \u2705 Got ticketContext (${JSON.stringify(e.ticketContext||{}).length} chars)`),e.nodeConfigs&&Object.keys(e.nodeConfigs).length>0&&console.log(` \u2705 Got nodeConfigs (${Object.keys(e.nodeConfigs).length} nodes configured)`),{ticketContext:e.ticketContext||{},nodeConfigs:e.nodeConfigs||{},graphConfig:e.graphConfig||null,repos:e.repos||[]}}import{SQSClient as K,SendMessageCommand as F}from"@aws-sdk/client-sqs";var v=null;function G(){return v||(v=new K({region:process.env.AWS_REGION||"ap-southeast-2"})),v}async function P(t,r,n,o){let{EXECUTION_ID:e,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:c,PROGRESS_QUEUE_URL:g,PROJECT_API_TOKEN:i}=o;if(!e)return;let m={executionId:e,...s&&{sqsAuthToken:s},step:{name:t,status:r,logs:n,timestamp:new Date().toISOString(),...r==="success"&&{completedAt:new Date().toISOString()}},status:r==="failed"?"failed":"running"};try{c?await D(c,e,m,i):g&&await j(g,e,m)}catch(p){console.error(`\u26A0\uFE0F Failed to send progress: ${p.message}`)}}async function U(t,{status:r,error:n,finalState:o}){let{EXECUTION_ID:e,SQS_AUTH_TOKEN:s,PROGRESS_API_URL:c,PROGRESS_QUEUE_URL:g,PROJECT_API_TOKEN:i}=t;if(!e)return;let m={executionId:e,...s&&{sqsAuthToken:s},status:r,...n&&{error:n},...o&&typeof o=="object"&&Object.keys(o).length>0?{finalState:o}:{},timestamp:new Date().toISOString()},p=c?"HTTP":g?"SQS":"NONE",S=JSON.stringify(m).length;console.log(`Sending final status: ${r} via ${p} (${(S/1024).toFixed(1)}KB)`);try{if(c)await D(c,e,m,i);else if(g){let d=["completed","failed","insufficient_context","blocked"].includes(r)?"execution_completed":"progress_update";await j(g,e,m,d)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${r} sent via ${p}`)}catch(d){console.error(`Failed to send final status (${r}) via ${p}:`),console.error(` Payload: ${(S/1024).toFixed(1)}KB`),console.error(` Error: ${d.message}`),d.name&&console.error(` Error type: ${d.name}`),d.code&&console.error(` Error code: ${d.code}`)}}async function D(t,r,n,o){let e=`${t}/${r}/progress`,s={"Content-Type":"application/json"};o&&(s.Authorization=`Bearer ${o}`);let c=await fetch(e,{method:"POST",headers:s,body:JSON.stringify(n)});if(!c.ok){let g=await c.text();throw new Error(`HTTP ${c.status}: ${g}`)}}async function j(t,r,n,o="progress_update"){let e=JSON.stringify(n),s=(e.length/1024).toFixed(1);e.length>256*1024&&console.error(`\u274C SQS message too large: ${s}KB (limit 256KB) for ${r} [${o}]`),await G().send(new F({QueueUrl:t,MessageBody:e,MessageGroupId:r,MessageAttributes:{executionId:{DataType:"String",StringValue:r},messageType:{DataType:"String",StringValue:o}}}))}var H=Q(import.meta.url),M=B(H);async function ie(t){let{EXECUTION_ID:r,TICKET_KEY:n,PROJECT_ID:o,REPOS:e,_PRIMARY_REPO:s,_GITHUB_TOKEN:c,MODEL:g}=process.env;(!r||!n||!o)&&(console.error("\u274C Missing required environment variables:"),console.error(" EXECUTION_ID, TICKET_KEY, PROJECT_ID"),process.exit(1));let i=await b(r,o),m=i.ticketContext,p=e?JSON.parse(e):i.repos,S=p.find(w=>w.isPrimary)||p[0],d=process.cwd(),f={status:"running",steps:[]};try{await T("Start Environment",async()=>{}),await T("Clone Repositories",async()=>{let l=process.env.GITHUB_TOKEN,u=process.env.GITLAB_TOKEN||"",E=process.env.GITLAB_URL||"";for(let h of p){let R=a(d,h.name),C=h.url,I=h.provider==="gitlab"||E&&h.url.includes(new URL(E).host);if((h.provider==="github"||h.url.includes("github.com"))&&l)C=h.url.replace("https://github.com",`https://${l}@github.com`);else if(I&&u&&E)try{let k=new URL(E).host;C=h.url.replace(`https://${k}`,`https://oauth2:${u}@${k}`)}catch(k){console.warn(`\u26A0\uFE0F Failed to parse GITLAB_URL: ${k.message}`)}if(_(["git","clone",C,R],d),_(["git","checkout",h.branch],R),h.isPrimary){let k=`feature/${n.toLowerCase()}`;_(["git","checkout","-b",k],R)}}f.steps.push({name:"clone",status:"success",repoCount:p.length})});let w=await T("Load Ticket Context",async()=>(f.steps.push({name:"load_ticket",status:"success"}),m));await T("Install Dependencies",async()=>{for(let l of p){let u=a(d,l.name),E=L(u);try{_(E.installCommand,u)}catch{}}f.steps.push({name:"install_deps",status:"success"})});let $=await T("Detect Dev Command",async()=>{let l=a(d,S.name),u=["docker-compose.yml","docker-compose.yaml","compose.yml","compose.yaml"];for(let I of u)if(y(a(l,I)))return f.steps.push({name:"detect_dev",status:"success",command:"docker-compose up",type:"docker-compose"}),{command:"docker-compose up",type:"docker-compose",configFile:I};let E=a(l,"package.json");if(!y(E))return console.log(" \u26A0\uFE0F No package.json or docker-compose found"),f.steps.push({name:"detect_dev",status:"skipped"}),null;let R=JSON.parse(O(E,"utf-8")).scripts||{},C=null;return R.dev?C="npm run dev":R.start?C="npm start":R["dev:local"]&&(C="npm run dev:local"),C?(f.steps.push({name:"detect_dev",status:"success",command:C,type:"npm"}),{command:C,type:"npm"}):(f.steps.push({name:"detect_dev",status:"skipped"}),null)});await T("Start Dev Server",async()=>{let l=a(d,S.name),u="docker-compose.test.yml";return y(a(l,u))?(_(["docker","compose","-f",u,"up","-d"],l),await new Promise(E=>setTimeout(E,1e4)),f.steps.push({name:"start_server",status:"success"}),!0):(console.log(` \u26A0\uFE0F No ${u} found, skipping server startup`),f.steps.push({name:"start_server",status:"skipped"}),null)}),await T("Run AI Agent Implementation",async()=>{let l=p.map(h=>{let R=a(d,h.name);return{...h,...L(R)}}),u=q(w,l,$),E=a(d,".cursor-prompt.md");J(E,u),await X(u,{state:{model:g,workspace:d}},{print:!0}),f.steps.push({name:"ai_agent",status:"success"})});let Y=await T("Run E2E Tests",async()=>{let l=a(d,S.name);if(!y(a(l,"playwright.config.js"))&&!y(a(l,"playwright.config.ts")))return f.steps.push({name:"e2e_tests",status:"skipped"}),null;try{return _("npx playwright test --reporter=json",l),f.steps.push({name:"e2e_tests",status:"success"}),{passed:!0}}catch(u){throw _("docker compose -f docker-compose.test.yml down",l,{allowFailure:!0}),new Error(`E2E tests failed: ${u.message}`,{cause:u})}});try{_("docker compose -f docker-compose.test.yml down",a(d,S.name),{allowFailure:!0})}catch{}let x=await T("Create Pull Request",async()=>{let l=a(d,S.name),u=`feature/${n.toLowerCase()}`;return _(["git","add","."],l),_(["git","commit","-m",`feat(${n}): ${w.summary}`],l),_(["git","push","origin",u],l),console.log(" \u26A0\uFE0F PR creation via API removed (using SQS flow)"),f.steps.push({name:"create_pr",status:"skipped"}),null});await T("Report Results",async()=>{let l=a(d,S.name),u=a(l,"test-results"),E=[];y(u),f.status="completed",f.prUrl=x,f.videoUrls=E,await U(N(),{status:"completed",artifacts:{prUrl:x,videoUrls:E}})}),process.exit(0)}catch(w){console.error(""),console.error("\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557"),console.error("\u2551 \u274C FAILED! \u2551"),console.error("\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255D"),console.error(""),console.error("Error:",w.message),console.error("Stack:",w.stack);try{await U(N(),{status:"failed",error:w.message})}catch($){console.error("Failed to report error:",$.message)}process.exit(1)}}function N(){return{EXECUTION_ID:process.env.EXECUTION_ID,PROGRESS_API_URL:process.env.PROGRESS_API_URL,PROGRESS_QUEUE_URL:process.env.PROGRESS_QUEUE_URL,SQS_AUTH_TOKEN:process.env.SQS_AUTH_TOKEN,PROJECT_API_TOKEN:process.env.PROJECT_API_TOKEN}}async function T(t,r){let n=Date.now(),o=[],e="",s=console.log;console.log=(...i)=>{let m=i.join(" ");o.push(m),s(...i)};let c=N(),g=setInterval(()=>{let i=o.join(`
|
|
2
|
+
`);i!==e&&i.length>0&&(e=i,P(t,"running",i,c).catch(()=>{}))},2e3);try{await P(t,"running","",c);let i=await r(),m=`${((Date.now()-n)/1e3).toFixed(1)}s`;clearInterval(g),console.log=s;let p=o.join(`
|
|
3
|
+
`);return await P(t,"success",p||`Completed in ${m}`,c),i}catch(i){clearInterval(g),console.log=s;let m=o.join(`
|
|
4
|
+
`);throw await P(t,"failed",`${m}
|
|
5
5
|
|
|
6
|
-
Error: ${
|
|
6
|
+
Error: ${i.message}`,c),i}}function _(t,r,n={}){try{let{spawnSync:o}=A("child_process"),e;if(Array.isArray(t)){let[s,...c]=t;e=o(s,c,{cwd:r,encoding:"utf-8",stdio:["pipe","pipe","pipe"]})}else e=o(t,{cwd:r,shell:!0,encoding:"utf-8",stdio:["pipe","pipe","pipe"]});if(e.stdout&&console.log(e.stdout),e.stderr&&console.log(e.stderr),e.status!==0&&!n.allowFailure){let s=Array.isArray(t)?t.join(" "):t;throw new Error(`Command failed with exit code ${e.status}: ${s}`)}return e.stdout||e.stderr}catch(o){if(n.allowFailure)return null;throw o}}function L(t){let r=a(t,".zibby.yml");if(y(r))try{let e=A("js-yaml").load(O(r,"utf-8"));return{name:e.name||"Custom Project",framework:e.framework||"Custom",language:e.language||"Custom",testCommand:e.test||"make test",installCommand:e.install||"make install",custom:!0}}catch{console.warn("Invalid .zibby.yml, falling back to auto-detection")}let n=a(t,"package.json");if(y(n)){let o=JSON.parse(O(n,"utf-8")),e={...o.dependencies,...o.devDependencies},s="Node.js";return e.next?s="Next.js":e["react-scripts"]?s="Create React App":e.vite&&e.react?s="React + Vite":e["@angular/core"]?s="Angular":e.vue?s="Vue.js":e.express&&(s="Express.js"),{name:o.name||"Unknown Project",framework:s,language:"JavaScript/TypeScript",testCommand:o.scripts?.test||"npm test",installCommand:"npm install"}}return y(a(t,"requirements.txt"))||y(a(t,"pyproject.toml"))?{name:"Python Project",framework:y(a(t,"manage.py"))?"Django":y(a(t,"app.py"))?"Flask":"Python",language:"Python",testCommand:"pytest",installCommand:"pip install -r requirements.txt"}:y(a(t,"Gemfile"))?{name:"Ruby Project",framework:"Rails",language:"Ruby",testCommand:"bundle exec rspec",installCommand:"bundle install"}:y(a(t,"go.mod"))?{name:"Go Project",framework:"Go",language:"Go",testCommand:"go test ./...",installCommand:"go mod download"}:y(a(t,"pom.xml"))?{name:"Java Project",framework:"Spring Boot",language:"Java",testCommand:"./mvnw test",installCommand:"./mvnw install"}:{name:"Unknown Project",framework:"Unknown",language:"Unknown",testCommand:"make test",installCommand:"make install"}}function q(t,r,n){let o=a(M,"../../prompts/implement-ticket.md"),e;try{e=O(o,"utf-8")}catch{e=`
|
|
7
7
|
# Implement Ticket: {{TICKET_KEY}}
|
|
8
8
|
|
|
9
9
|
## Project Context
|
|
@@ -34,7 +34,7 @@ You are implementing this ticket. Follow these steps:
|
|
|
34
34
|
5. Fix any linter errors
|
|
35
35
|
|
|
36
36
|
Now implement this ticket completely!
|
|
37
|
-
`.trim()}let s=r.find(
|
|
37
|
+
`.trim()}let s=r.find(m=>m.isPrimary)||r[0],c;n?.type==="docker-compose"?c=`\`docker-compose up\` (using ${n.configFile})`:n?.command?c=`\`cd ${s.name} && ${n.command}\``:c="`npm run dev` (or check package.json scripts)";let g;if(r.length===1)g=`
|
|
38
38
|
You are working in **${s.name}**, a ${s.framework} project.
|
|
39
39
|
|
|
40
40
|
**Commands:**
|
|
@@ -42,11 +42,11 @@ You are working in **${s.name}**, a ${s.framework} project.
|
|
|
42
42
|
- Run tests: \`cd ${s.name} && ${s.testCommand}\`
|
|
43
43
|
|
|
44
44
|
You have full access to the codebase in the current directory.
|
|
45
|
-
`.trim();else{let
|
|
46
|
-
`);
|
|
45
|
+
`.trim();else{let m=r.map(p=>`- **${p.name}/** (${p.framework})${p.isPrimary?" \u2190 **MAKE CHANGES HERE**":" (reference only)"}`).join(`
|
|
46
|
+
`);g=`
|
|
47
47
|
You are working in a **multi-repository** setup with ${r.length} repositories:
|
|
48
48
|
|
|
49
|
-
${
|
|
49
|
+
${m}
|
|
50
50
|
|
|
51
51
|
**Primary Repository:** ${s.name}
|
|
52
52
|
- This is where you should implement the feature
|
|
@@ -55,11 +55,11 @@ ${p}
|
|
|
55
55
|
- Run tests: \`cd ${s.name} && ${s.testCommand}\`
|
|
56
56
|
|
|
57
57
|
**Other Repositories:**
|
|
58
|
-
${r.filter(
|
|
58
|
+
${r.filter(p=>!p.isPrimary).map(p=>`- **${p.name}**: You can read code from here for reference (shared libraries, services, etc.)`).join(`
|
|
59
59
|
`)||"(none)"}
|
|
60
60
|
|
|
61
61
|
**Important:** Make all code changes in the \`${s.name}/\` directory only.
|
|
62
|
-
`.trim()}let
|
|
62
|
+
`.trim()}let i=e.replace(/\{\{TICKET_KEY\}\}/g,t.ticketKey||t.key||"UNKNOWN").replace(/\{\{PROJECT_CONTEXT\}\}/g,g).replace(/\{\{TICKET_SUMMARY\}\}/g,t.summary||"No summary").replace(/\{\{TICKET_DESCRIPTION\}\}/g,t.description||"No description provided").replace(/\{\{ACCEPTANCE_CRITERIA\}\}/g,t.acceptanceCriteria||"Not specified");if(t.additionalContext){let m=`## Additional Context from User
|
|
63
63
|
${t.additionalContext}
|
|
64
64
|
|
|
65
|
-
`;
|
|
65
|
+
`;i=i.replace(/\{\{#if ADDITIONAL_CONTEXT\}\}[\s\S]*?\{\{\/if\}\}/g,m)}else i=i.replace(/\{\{#if ADDITIONAL_CONTEXT\}\}[\s\S]*?\{\{\/if\}\}/g,"");return i}export{ie as implementCommand};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
var wt=Object.defineProperty;var u=(e,t)=>()=>(e&&(t=e(e=0)),t);var V=(e,t)=>{for(var r in t)wt(e,r,{get:t[r],enumerable:!0})};var Re,Oe=u(()=>{Re="ffffffff-ffff-ffff-ffff-ffffffffffff"});var Ue,Pe=u(()=>{Ue="00000000-0000-0000-0000-000000000000"});var Ae,Ie=u(()=>{Ae=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-8][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/i});function At(e){return typeof e=="string"&&Ae.test(e)}var P,L=u(()=>{Ie();P=At});function It(e){if(!P(e))throw TypeError("Invalid UUID");let t;return Uint8Array.of((t=parseInt(e.slice(0,8),16))>>>24,t>>>16&255,t>>>8&255,t&255,(t=parseInt(e.slice(9,13),16))>>>8,t&255,(t=parseInt(e.slice(14,18),16))>>>8,t&255,(t=parseInt(e.slice(19,23),16))>>>8,t&255,(t=parseInt(e.slice(24,36),16))/1099511627776&255,t/4294967296&255,t>>>24&255,t>>>16&255,t>>>8&255,t&255)}var T,C=u(()=>{L();T=It});function w(e,t=0){return(m[e[t+0]]+m[e[t+1]]+m[e[t+2]]+m[e[t+3]]+"-"+m[e[t+4]]+m[e[t+5]]+"-"+m[e[t+6]]+m[e[t+7]]+"-"+m[e[t+8]]+m[e[t+9]]+"-"+m[e[t+10]]+m[e[t+11]]+m[e[t+12]]+m[e[t+13]]+m[e[t+14]]+m[e[t+15]]).toLowerCase()}function bt(e,t=0){let r=w(e,t);if(!P(r))throw TypeError("Stringified UUID is invalid");return r}var m,be,R=u(()=>{L();m=[];for(let e=0;e<256;++e)m.push((e+256).toString(16).slice(1));be=bt});import{randomFillSync as vt}from"crypto";function O(){return B>J.length-16&&(vt(J),B=0),J.slice(B,B+=16)}var J,B,H=u(()=>{J=new Uint8Array(256),B=J.length});function kt(e,t,r){let o,n=e?._v6??!1;if(e){let i=Object.keys(e);i.length===1&&i[0]==="_v6"&&(e=void 0)}if(e)o=ve(e.random??e.rng?.()??O(),e.msecs,e.nsecs,e.clockseq,e.node,t,r);else{let i=Date.now(),s=O();Dt(j,i,s),o=ve(s,j.msecs,j.nsecs,n?void 0:j.clockseq,n?void 0:j.node,t,r)}return t??w(o)}function Dt(e,t,r){return e.msecs??=-1/0,e.nsecs??=0,t===e.msecs?(e.nsecs++,e.nsecs>=1e4&&(e.node=void 0,e.nsecs=0)):t>e.msecs?e.nsecs=0:t<e.msecs&&(e.node=void 0),e.node||(e.node=r.slice(10,16),e.node[0]|=1,e.clockseq=(r[8]<<8|r[9])&16383),e.msecs=t,e}function ve(e,t,r,o,n,i,s=0){if(e.length<16)throw new Error("Random bytes length must be >= 16");if(!i)i=new Uint8Array(16),s=0;else if(s<0||s+16>i.length)throw new RangeError(`UUID byte range ${s}:${s+15} is out of buffer bounds`);t??=Date.now(),r??=0,o??=(e[8]<<8|e[9])&16383,n??=e.slice(10,16),t+=122192928e5;let l=((t&268435455)*1e4+r)%4294967296;i[s++]=l>>>24&255,i[s++]=l>>>16&255,i[s++]=l>>>8&255,i[s++]=l&255;let a=t/4294967296*1e4&268435455;i[s++]=a>>>8&255,i[s++]=a&255,i[s++]=a>>>24&15|16,i[s++]=a>>>16&255,i[s++]=o>>>8|128,i[s++]=o&255;for(let c=0;c<6;++c)i[s++]=n[c];return i}var j,Y,ee=u(()=>{H();R();j={};Y=kt});function F(e){let t=typeof e=="string"?T(e):e,r=Nt(t);return typeof e=="string"?w(r):r}function Nt(e){return Uint8Array.of((e[6]&15)<<4|e[7]>>4&15,(e[7]&15)<<4|(e[4]&240)>>4,(e[4]&15)<<4|(e[5]&240)>>4,(e[5]&15)<<4|(e[0]&240)>>4,(e[0]&15)<<4|(e[1]&240)>>4,(e[1]&15)<<4|(e[2]&240)>>4,96|e[2]&15,e[3],e[8],e[9],e[10],e[11],e[12],e[13],e[14],e[15])}var te=u(()=>{C();R()});import{createHash as Lt}from"crypto";function Ct(e){return Array.isArray(e)?e=Buffer.from(e):typeof e=="string"&&(e=Buffer.from(e,"utf8")),Lt("md5").update(e).digest()}var ke,De=u(()=>{ke=Ct});function jt(e){e=unescape(encodeURIComponent(e));let t=new Uint8Array(e.length);for(let r=0;r<e.length;++r)t[r]=e.charCodeAt(r);return t}function z(e,t,r,o,n,i){let s=typeof r=="string"?jt(r):r,l=typeof o=="string"?T(o):o;if(typeof o=="string"&&(o=T(o)),o?.length!==16)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");let a=new Uint8Array(16+s.length);if(a.set(l),a.set(s,l.length),a=t(a),a[6]=a[6]&15|e,a[8]=a[8]&63|128,n){if(i=i||0,i<0||i+16>n.length)throw new RangeError(`UUID byte range ${i}:${i+15} is out of buffer bounds`);for(let c=0;c<16;++c)n[i+c]=a[c];return n}return w(a)}var q,Q,re=u(()=>{C();R();q="6ba7b810-9dad-11d1-80b4-00c04fd430c8",Q="6ba7b811-9dad-11d1-80b4-00c04fd430c8"});function oe(e,t,r,o){return z(48,ke,e,t,r,o)}var Ne,Le=u(()=>{De();re();oe.DNS=q;oe.URL=Q;Ne=oe});import{randomUUID as Ft}from"crypto";var ne,Ce=u(()=>{ne={randomUUID:Ft}});function zt(e,t,r){if(ne.randomUUID&&!t&&!e)return ne.randomUUID();e=e||{};let o=e.random??e.rng?.()??O();if(o.length<16)throw new Error("Random bytes length must be >= 16");if(o[6]=o[6]&15|64,o[8]=o[8]&63|128,t){if(r=r||0,r<0||r+16>t.length)throw new RangeError(`UUID byte range ${r}:${r+15} is out of buffer bounds`);for(let n=0;n<16;++n)t[r+n]=o[n];return t}return w(o)}var je,Fe=u(()=>{Ce();H();R();je=zt});import{createHash as Kt}from"crypto";function Gt(e){return Array.isArray(e)?e=Buffer.from(e):typeof e=="string"&&(e=Buffer.from(e,"utf8")),Kt("sha1").update(e).digest()}var ze,Ke=u(()=>{ze=Gt});function se(e,t,r,o){return z(80,ze,e,t,r,o)}var Ge,We=u(()=>{Ke();re();se.DNS=q;se.URL=Q;Ge=se});function Wt(e,t,r){e??={},r??=0;let o=Y({...e,_v6:!0},new Uint8Array(16));if(o=F(o),t){if(r<0||r+16>t.length)throw new RangeError(`UUID byte range ${r}:${r+15} is out of buffer bounds`);for(let n=0;n<16;n++)t[r+n]=o[n];return t}return w(o)}var Be,Je=u(()=>{R();ee();te();Be=Wt});function ie(e){let t=typeof e=="string"?T(e):e,r=Bt(t);return typeof e=="string"?w(r):r}function Bt(e){return Uint8Array.of((e[3]&15)<<4|e[4]>>4&15,(e[4]&15)<<4|(e[5]&240)>>4,(e[5]&15)<<4|e[6]&15,e[7],(e[1]&15)<<4|(e[2]&240)>>4,(e[2]&15)<<4|(e[3]&240)>>4,16|(e[0]&240)>>4,(e[0]&15)<<4|(e[1]&240)>>4,e[8],e[9],e[10],e[11],e[12],e[13],e[14],e[15])}var He=u(()=>{C();R()});function Jt(e,t,r){let o;if(e)o=Ye(e.random??e.rng?.()??O(),e.msecs,e.seq,t,r);else{let n=Date.now(),i=O();Ht(ae,n,i),o=Ye(i,ae.msecs,ae.seq,t,r)}return t??w(o)}function Ht(e,t,r){return e.msecs??=-1/0,e.seq??=0,t>e.msecs?(e.seq=r[6]<<23|r[7]<<16|r[8]<<8|r[9],e.msecs=t):(e.seq=e.seq+1|0,e.seq===0&&e.msecs++),e}function Ye(e,t,r,o,n=0){if(e.length<16)throw new Error("Random bytes length must be >= 16");if(!o)o=new Uint8Array(16),n=0;else if(n<0||n+16>o.length)throw new RangeError(`UUID byte range ${n}:${n+15} is out of buffer bounds`);return t??=Date.now(),r??=e[6]*127<<24|e[7]<<16|e[8]<<8|e[9],o[n++]=t/1099511627776&255,o[n++]=t/4294967296&255,o[n++]=t/16777216&255,o[n++]=t/65536&255,o[n++]=t/256&255,o[n++]=t&255,o[n++]=112|r>>>28&15,o[n++]=r>>>20&255,o[n++]=128|r>>>14&63,o[n++]=r>>>6&255,o[n++]=r<<2&255|e[10]&3,o[n++]=e[11],o[n++]=e[12],o[n++]=e[13],o[n++]=e[14],o[n++]=e[15],o}var ae,qe,Qe=u(()=>{H();R();ae={};qe=Jt});function Yt(e){if(!P(e))throw TypeError("Invalid UUID");return parseInt(e.slice(14,15),16)}var Me,Ve=u(()=>{L();Me=Yt});var Ze={};V(Ze,{MAX:()=>Re,NIL:()=>Ue,parse:()=>T,stringify:()=>be,v1:()=>Y,v1ToV6:()=>F,v3:()=>Ne,v4:()=>je,v5:()=>Ge,v6:()=>Be,v6ToV1:()=>ie,v7:()=>qe,validate:()=>P,version:()=>Me});var Xe=u(()=>{Oe();Pe();C();R();ee();te();Le();Fe();We();Je();He();Qe();L();Ve()});var le={};V(le,{__test:()=>nr,sessionPullFromS3:()=>rr,sessionPushToS3:()=>or});import{existsSync as qt,mkdirSync as Qt,readFileSync as Mt,writeFileSync as Vt,statSync as Zt}from"node:fs";import{homedir as Xt}from"node:os";import{join as er,dirname as tr}from"node:path";function et(e){return e.replace(/\//g,"-")}function ce(e,t){return er(Xt(),".claude","projects",et(e),`${t}.jsonl`)}async function rr({apiUrl:e,apiKey:t,workflowUuid:r,sessionId:o,cwd:n}){if(!e||!t||!r||!o||!n)return{pulled:!1,reason:"missing required arg"};let i;try{let a=await fetch(`${e}/workflows/${r}/sessions/${o}/download-url`,{method:"POST",headers:{Authorization:`Bearer ${t}`,"Content-Type":"application/json"}});if(!a.ok)return{pulled:!1,reason:`mint download-url failed: HTTP ${a.status}`};i=await a.json()}catch(a){return{pulled:!1,reason:`mint download-url threw: ${a.message}`}}let s;try{let a=await fetch(i.url,{method:"GET"});if(a.status===404||a.status===403)return{pulled:!1,reason:"no prior session (first turn)"};if(!a.ok)return{pulled:!1,reason:`GET presigned failed: HTTP ${a.status}`};s=Buffer.from(await a.arrayBuffer())}catch(a){return{pulled:!1,reason:`GET presigned threw: ${a.message}`}}let l=ce(n,o);try{return Qt(tr(l),{recursive:!0}),Vt(l,s),{pulled:!0,bytes:s.length}}catch(a){return{pulled:!1,reason:`write local JSONL failed: ${a.message}`}}}async function or({apiUrl:e,apiKey:t,workflowUuid:r,sessionId:o,cwd:n}){if(!e||!t||!r||!o||!n)return{pushed:!1,reason:"missing required arg"};let i=ce(n,o);if(!qt(i))return{pushed:!1,reason:"no local JSONL to push"};let s,l;try{s=Mt(i),l=Zt(i).size}catch(c){return{pushed:!1,reason:`read local JSONL failed: ${c.message}`}}let a;try{let c=await fetch(`${e}/workflows/${r}/sessions/${o}/upload-url`,{method:"POST",headers:{Authorization:`Bearer ${t}`,"Content-Type":"application/json"}});if(!c.ok)return{pushed:!1,reason:`mint upload-url failed: HTTP ${c.status}`};a=await c.json()}catch(c){return{pushed:!1,reason:`mint upload-url threw: ${c.message}`}}try{let c=await fetch(a.url,{method:"PUT",headers:{...a.requiredHeaders||{},"Content-Length":String(l)},body:s});return c.ok?{pushed:!0,bytes:l}:{pushed:!1,reason:`PUT presigned failed: HTTP ${c.status}`}}catch(c){return{pushed:!1,reason:`PUT presigned threw: ${c.message}`}}}var nr,fe=u(()=>{nr={sdkProjectDir:et,sessionJsonlPath:ce}});var st={};V(st,{uploadSessionArtifacts:()=>mr});import{readdirSync as rt,statSync as pe,createReadStream as sr,existsSync as ir}from"node:fs";import{join as ot,relative as ar,sep as cr,extname as lr}from"node:path";function ur(e){let t=lr(e).toLowerCase();return dr[t]||"application/octet-stream"}function nt(e){let t=[],r;try{r=rt(e)}catch{return t}for(let o of r){if(fr.has(o)||o.startsWith(".")||pr.has(o))continue;let n=ot(e,o),i;try{i=pe(n)}catch{continue}i.isDirectory()?t.push(...nt(n)):i.isFile()&&t.push(n)}return t}async function gr({apiUrl:e,apiKey:t,executionId:r,nodeName:o,filename:n,absolutePath:i,sizeBytes:s,contentType:l}){let a;try{let c=await fetch(`${e}/${r}/artifacts/upload-url`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${t}`},body:JSON.stringify({nodeName:o,filename:n,contentType:l,sizeBytes:s})});if(!c.ok){let f=await c.text();return console.warn(`[artifacts] upload-url failed for ${o}/${n}: ${c.status} ${f.slice(0,200)}`),null}a=await c.json()}catch(c){return console.warn(`[artifacts] upload-url request errored for ${o}/${n}: ${c.message}`),null}try{let c=sr(i),f=a.requiredHeaders?{...a.requiredHeaders,"Content-Length":String(s)}:{"Content-Type":l,"Content-Length":String(s)},d=await fetch(a.url,{method:"PUT",headers:f,body:c,duplex:"half"});if(!d.ok)return console.warn(`[artifacts] S3 PUT failed for ${o}/${n}: ${d.status}`),null}catch(c){return console.warn(`[artifacts] S3 PUT errored for ${o}/${n}: ${c.message}`),null}return{nodeName:o,filename:n,s3Key:a.s3Key,contentType:l,sizeBytes:s}}async function mr({sessionPath:e,executionId:t,apiUrl:r,apiKey:o}){let n={uploaded:[],skipped:[]};if(!e||!ir(e))return n;if(!r||!o||!t)return console.warn("[artifacts] uploader missing required input \u2014 skipping"),n;let i;try{i=rt(e)}catch(f){return console.warn(`[artifacts] could not read session folder ${e}: ${f.message}`),n}let s=[];for(let f of i){let d=ot(e,f),S;try{S=pe(d)}catch{continue}if(!S.isDirectory()||f.startsWith(".")||f.startsWith("_"))continue;let K=nt(d);for(let _ of K){let k=ar(d,_).split(cr).join("/"),U;try{U=pe(_).size}catch{continue}if(U>tt){n.skipped.push({nodeName:f,filename:k,reason:`size ${U} > ${tt}`});continue}if(U===0){n.skipped.push({nodeName:f,filename:k,reason:"empty"});continue}s.push({apiUrl:r,apiKey:o,executionId:t,nodeName:f,filename:k,absolutePath:_,sizeBytes:U,contentType:ur(k)})}}if(s.length===0)return n;let l=4,a=s.slice(),c=Array.from({length:Math.min(l,a.length)},async()=>{for(;a.length;){let f=a.shift(),d=await gr(f);d?n.uploaded.push(d):n.skipped.push({nodeName:f.nodeName,filename:f.filename,reason:"upload failed"})}});if(await Promise.all(c),n.uploaded.length>0)try{let f=await fetch(`${r}/${t}/artifacts`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${o}`},body:JSON.stringify({files:n.uploaded})});if(!f.ok){let d=await f.text();console.warn(`[artifacts] record failed: ${f.status} ${d.slice(0,200)}`)}}catch(f){console.warn(`[artifacts] record errored: ${f.message}`)}return n}var tt,fr,pr,dr,it=u(()=>{tt=500*1024*1024,fr=new Set([".DS_Store","Thumbs.db",".zibby-stop"]),pr=new Set(["node_modules",".git","dist",".zibby","__tests__","__mocks__",".cache",".next",".turbo"]),dr={".webm":"video/webm",".mp4":"video/mp4",".mov":"video/quicktime",".png":"image/png",".jpg":"image/jpeg",".jpeg":"image/jpeg",".gif":"image/gif",".txt":"text/plain",".md":"text/markdown",".csv":"text/csv",".log":"text/plain",".json":"application/json",".yaml":"application/yaml",".yml":"application/yaml",".pdf":"application/pdf",".zip":"application/zip",".tar":"application/x-tar",".gz":"application/gzip"}});import{mkdirSync as pt,writeFileSync as hr,existsSync as A,readFileSync as at}from"fs";import{join as $,dirname as wr,resolve as ct}from"path";import{pathToFileURL as ue}from"url";import{execSync as yr,spawn as lt}from"node:child_process";import{SQSClient as yt,SendMessageCommand as xt}from"@aws-sdk/client-sqs";var Z=null;function St(){return Z||(Z=new yt({region:process.env.AWS_REGION||"ap-southeast-2"})),Z}async function $e(e,{status:t,error:r}){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:n,PROGRESS_API_URL:i,PROGRESS_QUEUE_URL:s,PROJECT_API_TOKEN:l}=e;if(!o)return;let a={executionId:o,...n&&{sqsAuthToken:n},status:t,...r&&{error:r},timestamp:new Date().toISOString()},c=i?"HTTP":s?"SQS":"NONE",f=JSON.stringify(a).length;console.log(`Sending final status: ${t} via ${c} (${(f/1024).toFixed(1)}KB)`);try{if(i)await $t(i,o,a,l);else if(s){let d=["completed","failed","insufficient_context","blocked"].includes(t)?"execution_completed":"progress_update";await _t(s,o,a,d)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${t} sent via ${c}`)}catch(d){console.error(`Failed to send final status (${t}) via ${c}:`),console.error(` Payload: ${(f/1024).toFixed(1)}KB`),console.error(` Error: ${d.message}`),d.name&&console.error(` Error type: ${d.name}`),d.code&&console.error(` Error code: ${d.code}`)}}async function $t(e,t,r,o){let n=`${e}/${t}/progress`,i={"Content-Type":"application/json"};o&&(i.Authorization=`Bearer ${o}`);let s=await fetch(n,{method:"POST",headers:i,body:JSON.stringify(r)});if(!s.ok){let l=await s.text();throw new Error(`HTTP ${s.status}: ${l}`)}}async function _t(e,t,r,o="progress_update"){let n=JSON.stringify(r),i=(n.length/1024).toFixed(1);n.length>256*1024&&console.error(`\u274C SQS message too large: ${i}KB (limit 256KB) for ${t} [${o}]`),await St().send(new xt({QueueUrl:e,MessageBody:n,MessageGroupId:t,MessageAttributes:{executionId:{DataType:"String",StringValue:t},messageType:{DataType:"String",StringValue:o}}}))}function _e({workflowType:e,jobId:t,projectId:r,agentType:o,model:n,egressIp:i,egressKind:s}){let l="\u2500".repeat(60),a=`${o||"default"} (model: ${n||"auto"})`,c=["",l,` Workflow: ${e}`,` Job: ${t||"local"}`,` Project: ${r||"none"}`,` Agent: ${a}`];if(i||s){let f=i||"unknown",d=s||"static";c.push(` Egress: ${f} (${d})`)}return c.push(l),c.join(`
|
|
2
|
+
var wt=Object.defineProperty;var u=(e,t)=>()=>(e&&(t=e(e=0)),t);var V=(e,t)=>{for(var r in t)wt(e,r,{get:t[r],enumerable:!0})};var Re,Oe=u(()=>{Re="ffffffff-ffff-ffff-ffff-ffffffffffff"});var Ue,Pe=u(()=>{Ue="00000000-0000-0000-0000-000000000000"});var Ae,Ie=u(()=>{Ae=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-8][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/i});function At(e){return typeof e=="string"&&Ae.test(e)}var P,C=u(()=>{Ie();P=At});function It(e){if(!P(e))throw TypeError("Invalid UUID");let t;return Uint8Array.of((t=parseInt(e.slice(0,8),16))>>>24,t>>>16&255,t>>>8&255,t&255,(t=parseInt(e.slice(9,13),16))>>>8,t&255,(t=parseInt(e.slice(14,18),16))>>>8,t&255,(t=parseInt(e.slice(19,23),16))>>>8,t&255,(t=parseInt(e.slice(24,36),16))/1099511627776&255,t/4294967296&255,t>>>24&255,t>>>16&255,t>>>8&255,t&255)}var T,j=u(()=>{C();T=It});function y(e,t=0){return(h[e[t+0]]+h[e[t+1]]+h[e[t+2]]+h[e[t+3]]+"-"+h[e[t+4]]+h[e[t+5]]+"-"+h[e[t+6]]+h[e[t+7]]+"-"+h[e[t+8]]+h[e[t+9]]+"-"+h[e[t+10]]+h[e[t+11]]+h[e[t+12]]+h[e[t+13]]+h[e[t+14]]+h[e[t+15]]).toLowerCase()}function bt(e,t=0){let r=y(e,t);if(!P(r))throw TypeError("Stringified UUID is invalid");return r}var h,be,R=u(()=>{C();h=[];for(let e=0;e<256;++e)h.push((e+256).toString(16).slice(1));be=bt});import{randomFillSync as vt}from"crypto";function O(){return B>J.length-16&&(vt(J),B=0),J.slice(B,B+=16)}var J,B,H=u(()=>{J=new Uint8Array(256),B=J.length});function kt(e,t,r){let o,n=e?._v6??!1;if(e){let i=Object.keys(e);i.length===1&&i[0]==="_v6"&&(e=void 0)}if(e)o=ve(e.random??e.rng?.()??O(),e.msecs,e.nsecs,e.clockseq,e.node,t,r);else{let i=Date.now(),s=O();Dt(F,i,s),o=ve(s,F.msecs,F.nsecs,n?void 0:F.clockseq,n?void 0:F.node,t,r)}return t??y(o)}function Dt(e,t,r){return e.msecs??=-1/0,e.nsecs??=0,t===e.msecs?(e.nsecs++,e.nsecs>=1e4&&(e.node=void 0,e.nsecs=0)):t>e.msecs?e.nsecs=0:t<e.msecs&&(e.node=void 0),e.node||(e.node=r.slice(10,16),e.node[0]|=1,e.clockseq=(r[8]<<8|r[9])&16383),e.msecs=t,e}function ve(e,t,r,o,n,i,s=0){if(e.length<16)throw new Error("Random bytes length must be >= 16");if(!i)i=new Uint8Array(16),s=0;else if(s<0||s+16>i.length)throw new RangeError(`UUID byte range ${s}:${s+15} is out of buffer bounds`);t??=Date.now(),r??=0,o??=(e[8]<<8|e[9])&16383,n??=e.slice(10,16),t+=122192928e5;let l=((t&268435455)*1e4+r)%4294967296;i[s++]=l>>>24&255,i[s++]=l>>>16&255,i[s++]=l>>>8&255,i[s++]=l&255;let a=t/4294967296*1e4&268435455;i[s++]=a>>>8&255,i[s++]=a&255,i[s++]=a>>>24&15|16,i[s++]=a>>>16&255,i[s++]=o>>>8|128,i[s++]=o&255;for(let c=0;c<6;++c)i[s++]=n[c];return i}var F,Y,ee=u(()=>{H();R();F={};Y=kt});function z(e){let t=typeof e=="string"?T(e):e,r=Nt(t);return typeof e=="string"?y(r):r}function Nt(e){return Uint8Array.of((e[6]&15)<<4|e[7]>>4&15,(e[7]&15)<<4|(e[4]&240)>>4,(e[4]&15)<<4|(e[5]&240)>>4,(e[5]&15)<<4|(e[0]&240)>>4,(e[0]&15)<<4|(e[1]&240)>>4,(e[1]&15)<<4|(e[2]&240)>>4,96|e[2]&15,e[3],e[8],e[9],e[10],e[11],e[12],e[13],e[14],e[15])}var te=u(()=>{j();R()});import{createHash as Lt}from"crypto";function Ct(e){return Array.isArray(e)?e=Buffer.from(e):typeof e=="string"&&(e=Buffer.from(e,"utf8")),Lt("md5").update(e).digest()}var ke,De=u(()=>{ke=Ct});function jt(e){e=unescape(encodeURIComponent(e));let t=new Uint8Array(e.length);for(let r=0;r<e.length;++r)t[r]=e.charCodeAt(r);return t}function K(e,t,r,o,n,i){let s=typeof r=="string"?jt(r):r,l=typeof o=="string"?T(o):o;if(typeof o=="string"&&(o=T(o)),o?.length!==16)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");let a=new Uint8Array(16+s.length);if(a.set(l),a.set(s,l.length),a=t(a),a[6]=a[6]&15|e,a[8]=a[8]&63|128,n){if(i=i||0,i<0||i+16>n.length)throw new RangeError(`UUID byte range ${i}:${i+15} is out of buffer bounds`);for(let c=0;c<16;++c)n[i+c]=a[c];return n}return y(a)}var q,Q,re=u(()=>{j();R();q="6ba7b810-9dad-11d1-80b4-00c04fd430c8",Q="6ba7b811-9dad-11d1-80b4-00c04fd430c8"});function oe(e,t,r,o){return K(48,ke,e,t,r,o)}var Ne,Le=u(()=>{De();re();oe.DNS=q;oe.URL=Q;Ne=oe});import{randomUUID as Ft}from"crypto";var ne,Ce=u(()=>{ne={randomUUID:Ft}});function zt(e,t,r){if(ne.randomUUID&&!t&&!e)return ne.randomUUID();e=e||{};let o=e.random??e.rng?.()??O();if(o.length<16)throw new Error("Random bytes length must be >= 16");if(o[6]=o[6]&15|64,o[8]=o[8]&63|128,t){if(r=r||0,r<0||r+16>t.length)throw new RangeError(`UUID byte range ${r}:${r+15} is out of buffer bounds`);for(let n=0;n<16;++n)t[r+n]=o[n];return t}return y(o)}var je,Fe=u(()=>{Ce();H();R();je=zt});import{createHash as Kt}from"crypto";function Gt(e){return Array.isArray(e)?e=Buffer.from(e):typeof e=="string"&&(e=Buffer.from(e,"utf8")),Kt("sha1").update(e).digest()}var ze,Ke=u(()=>{ze=Gt});function se(e,t,r,o){return K(80,ze,e,t,r,o)}var Ge,We=u(()=>{Ke();re();se.DNS=q;se.URL=Q;Ge=se});function Wt(e,t,r){e??={},r??=0;let o=Y({...e,_v6:!0},new Uint8Array(16));if(o=z(o),t){if(r<0||r+16>t.length)throw new RangeError(`UUID byte range ${r}:${r+15} is out of buffer bounds`);for(let n=0;n<16;n++)t[r+n]=o[n];return t}return y(o)}var Be,Je=u(()=>{R();ee();te();Be=Wt});function ie(e){let t=typeof e=="string"?T(e):e,r=Bt(t);return typeof e=="string"?y(r):r}function Bt(e){return Uint8Array.of((e[3]&15)<<4|e[4]>>4&15,(e[4]&15)<<4|(e[5]&240)>>4,(e[5]&15)<<4|e[6]&15,e[7],(e[1]&15)<<4|(e[2]&240)>>4,(e[2]&15)<<4|(e[3]&240)>>4,16|(e[0]&240)>>4,(e[0]&15)<<4|(e[1]&240)>>4,e[8],e[9],e[10],e[11],e[12],e[13],e[14],e[15])}var He=u(()=>{j();R()});function Jt(e,t,r){let o;if(e)o=Ye(e.random??e.rng?.()??O(),e.msecs,e.seq,t,r);else{let n=Date.now(),i=O();Ht(ae,n,i),o=Ye(i,ae.msecs,ae.seq,t,r)}return t??y(o)}function Ht(e,t,r){return e.msecs??=-1/0,e.seq??=0,t>e.msecs?(e.seq=r[6]<<23|r[7]<<16|r[8]<<8|r[9],e.msecs=t):(e.seq=e.seq+1|0,e.seq===0&&e.msecs++),e}function Ye(e,t,r,o,n=0){if(e.length<16)throw new Error("Random bytes length must be >= 16");if(!o)o=new Uint8Array(16),n=0;else if(n<0||n+16>o.length)throw new RangeError(`UUID byte range ${n}:${n+15} is out of buffer bounds`);return t??=Date.now(),r??=e[6]*127<<24|e[7]<<16|e[8]<<8|e[9],o[n++]=t/1099511627776&255,o[n++]=t/4294967296&255,o[n++]=t/16777216&255,o[n++]=t/65536&255,o[n++]=t/256&255,o[n++]=t&255,o[n++]=112|r>>>28&15,o[n++]=r>>>20&255,o[n++]=128|r>>>14&63,o[n++]=r>>>6&255,o[n++]=r<<2&255|e[10]&3,o[n++]=e[11],o[n++]=e[12],o[n++]=e[13],o[n++]=e[14],o[n++]=e[15],o}var ae,qe,Qe=u(()=>{H();R();ae={};qe=Jt});function Yt(e){if(!P(e))throw TypeError("Invalid UUID");return parseInt(e.slice(14,15),16)}var Me,Ve=u(()=>{C();Me=Yt});var Ze={};V(Ze,{MAX:()=>Re,NIL:()=>Ue,parse:()=>T,stringify:()=>be,v1:()=>Y,v1ToV6:()=>z,v3:()=>Ne,v4:()=>je,v5:()=>Ge,v6:()=>Be,v6ToV1:()=>ie,v7:()=>qe,validate:()=>P,version:()=>Me});var Xe=u(()=>{Oe();Pe();j();R();ee();te();Le();Fe();We();Je();He();Qe();C();Ve()});var le={};V(le,{__test:()=>nr,sessionPullFromS3:()=>rr,sessionPushToS3:()=>or});import{existsSync as qt,mkdirSync as Qt,readFileSync as Mt,writeFileSync as Vt,statSync as Zt}from"node:fs";import{homedir as Xt}from"node:os";import{join as er,dirname as tr}from"node:path";function et(e){return e.replace(/\//g,"-")}function ce(e,t){return er(Xt(),".claude","projects",et(e),`${t}.jsonl`)}async function rr({apiUrl:e,apiKey:t,workflowUuid:r,sessionId:o,cwd:n}){if(!e||!t||!r||!o||!n)return{pulled:!1,reason:"missing required arg"};let i;try{let a=await fetch(`${e}/workflows/${r}/sessions/${o}/download-url`,{method:"POST",headers:{Authorization:`Bearer ${t}`,"Content-Type":"application/json"}});if(!a.ok)return{pulled:!1,reason:`mint download-url failed: HTTP ${a.status}`};i=await a.json()}catch(a){return{pulled:!1,reason:`mint download-url threw: ${a.message}`}}let s;try{let a=await fetch(i.url,{method:"GET"});if(a.status===404||a.status===403)return{pulled:!1,reason:"no prior session (first turn)"};if(!a.ok)return{pulled:!1,reason:`GET presigned failed: HTTP ${a.status}`};s=Buffer.from(await a.arrayBuffer())}catch(a){return{pulled:!1,reason:`GET presigned threw: ${a.message}`}}let l=ce(n,o);try{return Qt(tr(l),{recursive:!0}),Vt(l,s),{pulled:!0,bytes:s.length}}catch(a){return{pulled:!1,reason:`write local JSONL failed: ${a.message}`}}}async function or({apiUrl:e,apiKey:t,workflowUuid:r,sessionId:o,cwd:n}){if(!e||!t||!r||!o||!n)return{pushed:!1,reason:"missing required arg"};let i=ce(n,o);if(!qt(i))return{pushed:!1,reason:"no local JSONL to push"};let s,l;try{s=Mt(i),l=Zt(i).size}catch(c){return{pushed:!1,reason:`read local JSONL failed: ${c.message}`}}let a;try{let c=await fetch(`${e}/workflows/${r}/sessions/${o}/upload-url`,{method:"POST",headers:{Authorization:`Bearer ${t}`,"Content-Type":"application/json"}});if(!c.ok)return{pushed:!1,reason:`mint upload-url failed: HTTP ${c.status}`};a=await c.json()}catch(c){return{pushed:!1,reason:`mint upload-url threw: ${c.message}`}}try{let c=await fetch(a.url,{method:"PUT",headers:{...a.requiredHeaders||{},"Content-Length":String(l)},body:s});return c.ok?{pushed:!0,bytes:l}:{pushed:!1,reason:`PUT presigned failed: HTTP ${c.status}`}}catch(c){return{pushed:!1,reason:`PUT presigned threw: ${c.message}`}}}var nr,fe=u(()=>{nr={sdkProjectDir:et,sessionJsonlPath:ce}});var st={};V(st,{uploadSessionArtifacts:()=>mr});import{readdirSync as rt,statSync as pe,createReadStream as sr,existsSync as ir}from"node:fs";import{join as ot,relative as ar,sep as cr,extname as lr}from"node:path";function dr(e){let t=lr(e).toLowerCase();return ur[t]||"application/octet-stream"}function nt(e){let t=[],r;try{r=rt(e)}catch{return t}for(let o of r){if(fr.has(o)||o.startsWith(".")||pr.has(o))continue;let n=ot(e,o),i;try{i=pe(n)}catch{continue}i.isDirectory()?t.push(...nt(n)):i.isFile()&&t.push(n)}return t}async function gr({apiUrl:e,apiKey:t,executionId:r,nodeName:o,filename:n,absolutePath:i,sizeBytes:s,contentType:l}){let a;try{let c=await fetch(`${e}/${r}/artifacts/upload-url`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${t}`},body:JSON.stringify({nodeName:o,filename:n,contentType:l,sizeBytes:s})});if(!c.ok){let f=await c.text();return console.warn(`[artifacts] upload-url failed for ${o}/${n}: ${c.status} ${f.slice(0,200)}`),null}a=await c.json()}catch(c){return console.warn(`[artifacts] upload-url request errored for ${o}/${n}: ${c.message}`),null}try{let c=sr(i),f=a.requiredHeaders?{...a.requiredHeaders,"Content-Length":String(s)}:{"Content-Type":l,"Content-Length":String(s)},d=await fetch(a.url,{method:"PUT",headers:f,body:c,duplex:"half"});if(!d.ok)return console.warn(`[artifacts] S3 PUT failed for ${o}/${n}: ${d.status}`),null}catch(c){return console.warn(`[artifacts] S3 PUT errored for ${o}/${n}: ${c.message}`),null}return{nodeName:o,filename:n,s3Key:a.s3Key,contentType:l,sizeBytes:s}}async function mr({sessionPath:e,executionId:t,apiUrl:r,apiKey:o}){let n={uploaded:[],skipped:[]};if(!e||!ir(e))return n;if(!r||!o||!t)return console.warn("[artifacts] uploader missing required input \u2014 skipping"),n;let i;try{i=rt(e)}catch(f){return console.warn(`[artifacts] could not read session folder ${e}: ${f.message}`),n}let s=[];for(let f of i){let d=ot(e,f),g;try{g=pe(d)}catch{continue}if(!g.isDirectory()||f.startsWith(".")||f.startsWith("_"))continue;let G=nt(d);for(let _ of G){let k=ar(d,_).split(cr).join("/"),U;try{U=pe(_).size}catch{continue}if(U>tt){n.skipped.push({nodeName:f,filename:k,reason:`size ${U} > ${tt}`});continue}if(U===0){n.skipped.push({nodeName:f,filename:k,reason:"empty"});continue}s.push({apiUrl:r,apiKey:o,executionId:t,nodeName:f,filename:k,absolutePath:_,sizeBytes:U,contentType:dr(k)})}}if(s.length===0)return n;let l=4,a=s.slice(),c=Array.from({length:Math.min(l,a.length)},async()=>{for(;a.length;){let f=a.shift(),d=await gr(f);d?n.uploaded.push(d):n.skipped.push({nodeName:f.nodeName,filename:f.filename,reason:"upload failed"})}});if(await Promise.all(c),n.uploaded.length>0)try{let f=await fetch(`${r}/${t}/artifacts`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${o}`},body:JSON.stringify({files:n.uploaded})});if(!f.ok){let d=await f.text();console.warn(`[artifacts] record failed: ${f.status} ${d.slice(0,200)}`)}}catch(f){console.warn(`[artifacts] record errored: ${f.message}`)}return n}var tt,fr,pr,ur,it=u(()=>{tt=500*1024*1024,fr=new Set([".DS_Store","Thumbs.db",".zibby-stop"]),pr=new Set(["node_modules",".git","dist",".zibby","__tests__","__mocks__",".cache",".next",".turbo"]),ur={".webm":"video/webm",".mp4":"video/mp4",".mov":"video/quicktime",".png":"image/png",".jpg":"image/jpeg",".jpeg":"image/jpeg",".gif":"image/gif",".txt":"text/plain",".md":"text/markdown",".csv":"text/csv",".log":"text/plain",".json":"application/json",".yaml":"application/yaml",".yml":"application/yaml",".pdf":"application/pdf",".zip":"application/zip",".tar":"application/x-tar",".gz":"application/gzip"}});import{mkdirSync as pt,writeFileSync as hr,existsSync as A,readFileSync as at}from"fs";import{join as $,dirname as wr,resolve as ct}from"path";import{pathToFileURL as de}from"url";import{execSync as yr,spawn as lt}from"node:child_process";import{SQSClient as yt,SendMessageCommand as xt}from"@aws-sdk/client-sqs";var Z=null;function St(){return Z||(Z=new yt({region:process.env.AWS_REGION||"ap-southeast-2"})),Z}async function $e(e,{status:t,error:r,finalState:o}){let{EXECUTION_ID:n,SQS_AUTH_TOKEN:i,PROGRESS_API_URL:s,PROGRESS_QUEUE_URL:l,PROJECT_API_TOKEN:a}=e;if(!n)return;let c={executionId:n,...i&&{sqsAuthToken:i},status:t,...r&&{error:r},...o&&typeof o=="object"&&Object.keys(o).length>0?{finalState:o}:{},timestamp:new Date().toISOString()},f=s?"HTTP":l?"SQS":"NONE",d=JSON.stringify(c).length;console.log(`Sending final status: ${t} via ${f} (${(d/1024).toFixed(1)}KB)`);try{if(s)await $t(s,n,c,a);else if(l){let g=["completed","failed","insufficient_context","blocked"].includes(t)?"execution_completed":"progress_update";await _t(l,n,c,g)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${t} sent via ${f}`)}catch(g){console.error(`Failed to send final status (${t}) via ${f}:`),console.error(` Payload: ${(d/1024).toFixed(1)}KB`),console.error(` Error: ${g.message}`),g.name&&console.error(` Error type: ${g.name}`),g.code&&console.error(` Error code: ${g.code}`)}}async function $t(e,t,r,o){let n=`${e}/${t}/progress`,i={"Content-Type":"application/json"};o&&(i.Authorization=`Bearer ${o}`);let s=await fetch(n,{method:"POST",headers:i,body:JSON.stringify(r)});if(!s.ok){let l=await s.text();throw new Error(`HTTP ${s.status}: ${l}`)}}async function _t(e,t,r,o="progress_update"){let n=JSON.stringify(r),i=(n.length/1024).toFixed(1);n.length>256*1024&&console.error(`\u274C SQS message too large: ${i}KB (limit 256KB) for ${t} [${o}]`),await St().send(new xt({QueueUrl:e,MessageBody:n,MessageGroupId:t,MessageAttributes:{executionId:{DataType:"String",StringValue:t},messageType:{DataType:"String",StringValue:o}}}))}function _e({workflowType:e,jobId:t,projectId:r,agentType:o,model:n,egressIp:i,egressKind:s}){let l="\u2500".repeat(60),a=`${o||"default"} (model: ${n||"auto"})`,c=["",l,` Workflow: ${e}`,` Job: ${t||"local"}`,` Project: ${r||"none"}`,` Agent: ${a}`];if(i||s){let f=i||"unknown",d=s||"static";c.push(` Egress: ${f} (${d})`)}return c.push(l),c.join(`
|
|
3
3
|
`)}import{existsSync as Et,writeFileSync as Tt}from"fs";import{join as Ee}from"path";var X={width:1280,height:720},Rt="on",Ot="tests",Ut="test-results/playwright";function Pt(e={}){let t=e.viewport&&typeof e.viewport=="object"?{width:Number(e.viewport.width)||X.width,height:Number(e.viewport.height)||X.height}:X,r=typeof e.video=="string"?e.video:Rt,o=e.paths?.generated||Ot,n=e.playwrightArtifacts!==!1,i=n?"on":"off",s=n?"only-on-failure":"off";return`// AUTO-GENERATED at workflow run start by @zibby/cli's
|
|
4
4
|
// playwright-config-materialize.js. Derived from the bundled
|
|
5
5
|
// zibby.config.json (which @zibby/workflow-deploy serialized from your
|
|
@@ -26,7 +26,7 @@ export default defineConfig({
|
|
|
26
26
|
|
|
27
27
|
reporter: [['list']],
|
|
28
28
|
});
|
|
29
|
-
`}function Te(e,t){if(!e)return{written:!1,path:null,reason:"no workspaceDir"};for(let n of["js","mjs","ts"]){let i=Ee(e,`playwright.config.${n}`);if(Et(i))return{written:!1,path:i,reason:`existing playwright.config.${n} in workspace`}}let r=Ee(e,"playwright.config.js"),o=Pt(t||{});try{return Tt(r,o,"utf-8"),{written:!0,path:r,reason:"derived from zibby.config"}}catch(n){return{written:!1,path:null,reason:`write failed: ${n.message}`}}}import"@zibby/core";var
|
|
30
|
-
Workflow execution failed: ${p.message}`),console.error(p.stack),await
|
|
31
|
-
[done] ${Se} completed in ${xe}s`),await
|
|
32
|
-
[done] ${Se} failed after ${xe}s`),await
|
|
29
|
+
`}function Te(e,t){if(!e)return{written:!1,path:null,reason:"no workspaceDir"};for(let n of["js","mjs","ts"]){let i=Ee(e,`playwright.config.${n}`);if(Et(i))return{written:!1,path:i,reason:`existing playwright.config.${n} in workspace`}}let r=Ee(e,"playwright.config.js"),o=Pt(t||{});try{return Tt(r,o,"utf-8"),{written:!0,path:r,reason:"derived from zibby.config"}}catch(n){return{written:!1,path:null,reason:`write failed: ${n.message}`}}}import"@zibby/core";var L=process.env.WORKSPACE||"/workspace";async function xr(e,t){pt(t,{recursive:!0});let r=Date.now();console.log("[setup] Fetching bundle...");let o=setInterval(()=>{let i=((Date.now()-r)/1e3).toFixed(1);console.log(`[setup] still fetching (${i}s elapsed)`)},3e3);try{await new Promise((i,s)=>{let l=lt("curl",["-fsSL",e],{stdio:["ignore","pipe","inherit"]}),a=lt("tar",["-xzf","-","-C",t],{stdio:["pipe","inherit","inherit"]});l.stdout.pipe(a.stdin);let c,f,d=()=>{if(c!==void 0&&f!==void 0){if(c!==0)return s(new Error(`curl exited ${c}`));if(f!==0)return s(new Error(`tar exited ${f}`));i()}};l.on("close",g=>{c=g,d()}),a.on("close",g=>{f=g,d()}),l.on("error",s),a.on("error",s)})}finally{clearInterval(o)}let n=((Date.now()-r)/1e3).toFixed(1);return console.log(`[setup] Bundle extracted (${n}s)`),t}async function ft(){let e=process.env.WORKFLOW_SOURCES_URL;if(!e)throw new Error("WORKFLOW_SOURCES_URL env var is required");let t=await fetch(e);if(!t.ok)throw new Error(`Failed to fetch sources: ${t.status} ${t.statusText}`);let r=await t.json();if(!r.sources||typeof r.sources!="object")throw new Error('Invalid sources payload \u2014 missing "sources" map');return r}function Sr(e){let t=e?.agent;if(!t)return null;if(typeof t=="string")return t;if(typeof t=="object"){if(typeof t.provider=="string")return t.provider;for(let r of["claude","cursor","codex","gemini"])if(t[r])return r}return null}function $r(e,t){let r=ct(t),o=0;for(let[n,i]of Object.entries(e)){let s=ct(t,n);if(!s.startsWith(`${r}/`)&&s!==r){console.error(` \u26D4 Skipping unsafe path: ${n}`);continue}pt(wr(s),{recursive:!0}),hr(s,i,"utf-8"),o++}return o}async function _r(){let e=process.env.ZIBBY_EGRESS_PROXY_URL,t=process.env.ZIBBY_EGRESS_TOKEN;if(!(!e||!t))try{let r=await import("undici"),o=new r.ProxyAgent({uri:e,token:`Bearer ${t}`});r.setGlobalDispatcher(o)}catch(r){console.warn(`[setup] Failed to install egress proxy dispatcher: ${r.message}`)}}async function Er(){if(process.env.ZIBBY_EGRESS_IP)return{ip:process.env.ZIBBY_EGRESS_IP,kind:"static"};try{let e=new AbortController,t=setTimeout(()=>e.abort(),1500),r=await fetch("https://api.ipify.org?format=json",{signal:e.signal});return clearTimeout(t),r.ok?{ip:(await r.json())?.ip||null,kind:"dynamic"}:{ip:null,kind:"dynamic"}}catch{return{ip:null,kind:"dynamic"}}}async function Tr(e,t){let r=$(e,"graph.mjs");if(!A(r))throw new Error(`graph.mjs not found at ${r}`);let o=await import(de(r).href),n=t?.entryClass,i=n&&o[n]||o.default||Object.values(o).find(s=>typeof s=="function"&&s.prototype?.buildGraph);if(!i)throw new Error("No WorkflowAgent class found in graph.mjs");return i}async function rn(){if(!process.env.NODE_PATH){process.env.NODE_PATH="/opt/zibby/packages";let p=await import("module");p.default._initPaths&&p.default._initPaths()}await _r();let{WORKFLOW_JOB_ID:e,WORKFLOW_TYPE:t,PROJECT_ID:r,AGENT_TYPE:o,MODEL:n}=process.env;t||(console.error("Missing WORKFLOW_TYPE env var"),process.exit(1));let i=process.env.WORKFLOW_BUNDLE_URL,s,l={},a,c;if(i){a=t,s=$(L,".zibby","workflows",a);try{await xr(i,s);try{let p=await ft();l=p.input||{},c=p.version}catch{}}catch(p){console.warn(`[setup] Bundle extract failed (${p.message}); falling back to source install`),s=null}}if(!s){let p=await ft(),{sources:m,input:w,workflowType:x,version:S}=p;l=w||{},a=x||t,c=S,console.log(`[setup] Workflow v${c||"?"} (${Object.keys(m).length} files)`),s=$(L,".zibby","workflows",a);let E=$r(m,s);console.log(`[setup] Wrote ${E} files`),console.log("[setup] Installing dependencies...");try{yr("npm install --silent --no-audit --no-fund",{cwd:s,stdio:"inherit"}),console.log("[setup] Dependencies installed")}catch(v){console.warn(`[setup] npm install failed: ${v.message}`)}}let f={},d=$(s,"workflow.json");A(d)&&(f=JSON.parse(at(d,"utf-8")));let g={},G=$(s,"zibby.config.json");if(A(G))try{g=JSON.parse(at(G,"utf-8")),console.log("[setup] Loaded user config from zibby.config.json")}catch(p){console.warn(`[setup] Failed to parse zibby.config.json: ${p.message} \u2014 falling back to defaults`)}let _=Te(L,g);_.written?console.log(`[setup] Materialized playwright.config.js \u2192 ${_.path} (${_.reason})`):_.path&&console.log(`[setup] Using existing playwright config: ${_.path}`);let k=Sr(g)||o,U=await Er();console.log(_e({workflowType:t,jobId:e,projectId:r,agentType:k,model:n,egressIp:U.ip,egressKind:U.kind}));let ge=await Tr(s,f);console.log(`[setup] Loaded ${ge.name}`);let D=[],me=$(s,"node_modules","@zibby","agent-workflow"),he=$(s,"node_modules","@zibby","core","node_modules","@zibby","agent-workflow");A(me)&&D.push({kind:"hoisted",path:me}),A(he)&&D.push({kind:"nested",path:he});let M=process.env.ZIBBY_RUN_DIAG==="1";if(M){let{readdirSync:p}=await import("fs");console.log(` [diag] @zibby/agent-workflow copies in bundle: ${D.length}`);for(let m of D)console.log(` [diag] ${m.kind}: ${m.path}`);try{let m=$(s,"node_modules","@zibby");A(m)&&console.log(` [diag] node_modules/@zibby/ contents: [${p(m).join(", ")}]`)}catch{}}let we=$(s,"node_modules","@zibby","core","dist","index.js");if(A(we)&&D.length>0)try{let p=await import(de(we).href),m=[p.AssistantStrategy,p.CursorAgentStrategy,p.ClaudeAgentStrategy,p.CodexAgentStrategy,p.GeminiAgentStrategy].filter(Boolean);for(let w of D){let x=$(w.path,"dist","index.js");if(!A(x))continue;let S=await import(de(x).href),E=M?S.listStrategies():null;for(let v of m)try{S.registerStrategy(new v)}catch(ht){console.warn(` register ${v.name} into ${w.kind} failed: ${ht.message}`)}M&&console.log(` [diag] ${w.kind} registry: before=[${E.join(",")||"empty"}] after=[${S.listStrategies().join(",")||"empty"}]`)}console.log("[setup] Registered 5 agent strategies (assistant, cursor, claude, codex, gemini)")}catch(p){console.warn(`[setup] Failed to bridge strategies: ${p.message}`)}else console.warn("[setup] No @zibby/core or @zibby/agent-workflow in bundle \u2014 agent strategies may be unavailable");let ut=Date.now(),ye=new ge({workflow:a||t}),dt=ye.buildGraph(),I=process.env.WORKFLOW_UUID||null,b=process.env.ZIBBY_CONVERSATION_ID||null,gt={...l||{},cwd:L,runId:e||`run-${Date.now()}`,config:g,input:l||{},...I?{workflowUuid:I}:{},...b?{conversationId:b}:{}},W=null;if(I&&b){let{v5:p}=await Promise.resolve().then(()=>(Xe(),Ze));W=p(`${I}:${b}`,"6ba7b810-9dad-11d1-80b4-00c04fd430c8");let w=(process.env.PROGRESS_API_URL||process.env.ZIBBY_API_BASE||"").replace(/\/executions\/?$/,""),x=process.env.PROJECT_API_TOKEN;if(w&&x)try{let{sessionPullFromS3:S}=await Promise.resolve().then(()=>(fe(),le)),E=await S({apiUrl:w,apiKey:x,workflowUuid:I,sessionId:W,cwd:L});E.pulled?console.log(`[session] pulled ${E.bytes} bytes for conversation ${b}`):console.log(`[session] no pull: ${E.reason}`)}catch(S){console.warn(`[session] pull threw: ${S.message}`)}}console.log("");let N;try{N=await dt.run(ye,gt)}catch(p){console.error(`
|
|
30
|
+
Workflow execution failed: ${p.message}`),console.error(p.stack),await ue("failed",p.message),process.exit(1)}let xe=((Date.now()-ut)/1e3).toFixed(1),mt=N?.success!==!1,Se=a||t;if(I&&b&&W){let p=(process.env.PROGRESS_API_URL||process.env.ZIBBY_API_BASE||"").replace(/\/executions\/?$/,""),m=process.env.PROJECT_API_TOKEN;if(p&&m)try{let{sessionPushToS3:w}=await Promise.resolve().then(()=>(fe(),le)),x=await w({apiUrl:p,apiKey:m,workflowUuid:I,sessionId:W,cwd:L});x.pushed?console.log(`[session] pushed ${x.bytes} bytes for conversation ${b}`):console.log(`[session] no push: ${x.reason}`)}catch(w){console.warn(`[session] push threw: ${w.message}`)}}if(process.env.UPLOAD_ARTIFACTS!=="0"){let p=N?.state?.sessionPath,m=process.env.PROGRESS_API_URL||process.env.ZIBBY_API_BASE,w=process.env.PROJECT_API_TOKEN,x=process.env.WORKFLOW_JOB_ID;if(p&&m&&w&&x)try{let{uploadSessionArtifacts:S}=await Promise.resolve().then(()=>(it(),st)),{uploaded:E,skipped:v}=await S({sessionPath:p,executionId:x,apiUrl:m,apiKey:w});console.log(`[artifacts] uploaded ${E.length} file(s)${v.length?`, skipped ${v.length}`:""}`)}catch(S){console.warn(`[artifacts] uploader threw: ${S.message}`)}else console.log("[artifacts] skipping upload \u2014 sessionPath/apiUrl/apiKey/executionId missing")}mt?(console.log(`
|
|
31
|
+
[done] ${Se} completed in ${xe}s`),await ue("completed",null,N?.state)):(console.error(`
|
|
32
|
+
[done] ${Se} failed after ${xe}s`),await ue("failed",N?.error||"Workflow execution failed",N?.state),process.exit(1))}async function ue(e,t=null,r=void 0){let o={EXECUTION_ID:process.env.WORKFLOW_JOB_ID,PROGRESS_API_URL:process.env.PROGRESS_API_URL,PROGRESS_QUEUE_URL:process.env.PROGRESS_QUEUE_URL,PROJECT_API_TOKEN:process.env.PROJECT_API_TOKEN,SQS_AUTH_TOKEN:process.env.SQS_AUTH_TOKEN};if(o.EXECUTION_ID)try{await $e(o,{status:e,...t&&{error:t},...r?{finalState:r}:{}})}catch(n){console.error(`\u26A0\uFE0F Failed to report status: ${n.message}`)}}export{Sr as resolveAgentFromConfig,rn as runWorkflowCommand};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
var
|
|
1
|
+
var o=/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;function t(r){return r?o.test(r)?{ok:!0}:{ok:!1,error:`'${r}' is not a UUID. Cloud workflows are identified by UUID only. Run \`zibby workflow list\` to find yours, or run \`zibby workflow trigger\` with no argument for interactive selection.`}:{ok:!0}}export{o as TRIGGER_UUID_REGEX,t as validateTriggerUuid};
|
|
@@ -1,63 +1,63 @@
|
|
|
1
|
-
var K=Object.defineProperty;var
|
|
1
|
+
var K=Object.defineProperty;var q=(o,e)=>()=>(o&&(e=o(o=0)),e);var H=(o,e)=>{for(var t in e)K(o,t,{get:e[t],enumerable:!0})};var G={};H(G,{logsCommand:()=>me,parseSseChunk:()=>F,runReconnectLoop:()=>J,sseBackoffMs:()=>M});import i from"chalk";import{readFileSync as ne,existsSync as re,writeSync as se}from"fs";import{homedir as ie}from"os";import{join as le}from"path";async function ce(o){return T||(process.env.ZIBBY_SSE_ENDPOINT?(T=process.env.ZIBBY_SSE_ENDPOINT,T):(T=ae,T))}function fe(o){let e=le(ie(),".zibby","config.json");re(e)||(console.log(i.red(`
|
|
2
2
|
Not authenticated`)),console.log(i.gray(` Run: zibby login
|
|
3
|
-
`)),process.exit(1));let t;try{t=JSON.parse(
|
|
3
|
+
`)),process.exit(1));let t;try{t=JSON.parse(ne(e,"utf-8"))}catch{console.log(i.red(`
|
|
4
4
|
Config file corrupt`)),console.log(i.gray(` Run: zibby login
|
|
5
5
|
`)),process.exit(1)}let n=t.sessionToken;n||(console.log(i.red(`
|
|
6
6
|
Not authenticated`)),console.log(i.gray(` Run: zibby login
|
|
7
|
-
`)),process.exit(1));let
|
|
7
|
+
`)),process.exit(1));let s=o.project;return{token:n,projectId:s}}function B(o){let e=new Date(o);if(process.env.ZIBBY_LOGS_UTC==="1")return e.toISOString().replace("T"," ").replace("Z","");let t=(n,s=2)=>String(n).padStart(s,"0");return`${e.getFullYear()}-${t(e.getMonth()+1)}-${t(e.getDate())} ${t(e.getHours())}:${t(e.getMinutes())}:${t(e.getSeconds())}.${t(e.getMilliseconds(),3)}`}async function Y(o,e){let t=await fetch(o,{headers:{Authorization:`Bearer ${e}`}});if(!t.ok){let n=await t.text();throw new Error(`API ${t.status}: ${n}`)}return t.json()}async function pe(o,e,t,n){return o||(console.log(i.red(`
|
|
8
8
|
Workflow UUID is required`)),console.log(i.gray(" Usage: zibby workflow logs <workflow-uuid>")),console.log(i.gray(` zibby workflow logs <workflow-uuid> -t
|
|
9
|
-
`)),process.exit(1)),o}function
|
|
10
|
-
`),
|
|
9
|
+
`)),process.exit(1)),o}function F(o,e){let n=((o||"")+e).split(`
|
|
10
|
+
`),s=n.pop()||"",l=[],a=null,r=null;for(let c=0;c<n.length;c++){let f=n[c];if(f.trim()){if(f.startsWith("id:")){a=f.slice(3).trim();continue}if(f.startsWith("event:")){let d=f.slice(6).trim();if(d==="log")continue;if(d==="status"){let p=n[c+1];if(p&&p.startsWith("data:"))try{let g=JSON.parse(p.slice(5).trim());g.status==="new_execution"?l.push({type:"newExecution",executionId:g.executionId,taskId:g.taskId}):g.status==="waiting"&&l.push({type:"waiting"})}catch{}continue}if(d==="complete"){l.push({type:"complete"});continue}if(d==="error"){let p=n[c+1];if(p&&p.startsWith("data:"))try{if(JSON.parse(p.slice(5).trim()).error==="No executions found for workflow"){r={type:"notFound"};break}}catch{}r={type:"failed"};break}continue}if(f.startsWith("data:")){let d=f.slice(5).trim();if(!d)continue;try{let p=JSON.parse(d);p.timestamp&&p.message&&l.push({type:"log",timestamp:p.timestamp,message:p.message,taskId:p.taskId})}catch{}}}}return{actions:l,remainder:s,lastEventId:a,returnSignal:r}}async function ue({token:o,executionId:e,sseEndpoint:t,stopped:n}){let s=null;try{let h=function(){let $=Array.from(y.entries()).slice(0,p);y.clear();for(let[I,w]of $)y.set(I,w);u=new Set(Array.from(y.values()).map(I=>I.taskId).filter(Boolean));for(let I of[...m.keys()])y.has(I)||m.delete(I)},E=function(b){for(let[$,I]of y)if(I.taskId===b)return $;return null},v=function(b){let $=y.get(b);!$||$.headerPrinted||(console.log(i.cyan(`
|
|
11
11
|
\u250C\u2500 Execution: ${$.shortId} (task: ${$.taskSuffix})`)),console.log(i.cyan(` \u2514\u2500 Streaming logs...
|
|
12
|
-
`)),$.headerPrinted=!0)},_=function(b){let $=i.gray(B(b.timestamp)),I=b.taskId?i.gray(`(${b.taskId.slice(-8)}) `):"";console.log(`${$} ${I}${b.message.replace(/\n$/,"")}`)},R=function(){if(!k)return;k=!1,x&&(clearTimeout(x),x=null);let b=Array.from(y.entries()).reverse();for(let[$]of b){let I=m.get($);if(!(!I||I.length===0)){v($);for(let
|
|
13
|
-
Waiting for next execution...`));break;case"complete":R(),d=!0;break;case"log":{if(
|
|
12
|
+
`)),$.headerPrinted=!0)},_=function(b){let $=i.gray(B(b.timestamp)),I=b.taskId?i.gray(`(${b.taskId.slice(-8)}) `):"";console.log(`${$} ${I}${b.message.replace(/\n$/,"")}`)},R=function(){if(!k)return;k=!1,x&&(clearTimeout(x),x=null);let b=Array.from(y.entries()).reverse();for(let[$]of b){let I=m.get($);if(!(!I||I.length===0)){v($);for(let w of I)_(w)}}m.clear()},P=function(){k&&(x&&clearTimeout(x),x=setTimeout(R,g))},l=new URL(t);l.searchParams.set("jobId",e),s&&l.searchParams.set("lastEventId",s);let a=await fetch(l.toString(),{headers:{Authorization:`Bearer ${o}`,Accept:"text/event-stream"}});if(!a.ok)throw new Error(`SSE connection failed: ${a.status} ${a.statusText}`);let r=a.body.getReader(),c=new TextDecoder,f="",d=!1,p=Number(process.env.ZIBBY_LOGS_HISTORY_LIMIT)||10,g=Number(process.env.ZIBBY_LOGS_REPLAY_TIMEOUT_MS)||3e3,y=new Map,u=new Set,m=new Map,k=!0,x=null;for(P();!n.value;){let{done:b,value:$}=await r.read();if(b)break;let I=F(f,c.decode($,{stream:!0}));f=I.remainder,I.lastEventId&&(s=I.lastEventId);for(let w of I.actions)switch(w.type){case"newExecution":{let S=`${w.executionId.slice(0,8)}...${w.executionId.slice(-4)}`,N=w.taskId?w.taskId.slice(-8):"pending";y.set(w.executionId,{shortId:S,taskSuffix:N,taskId:w.taskId||null,headerPrinted:!1}),m.has(w.executionId)||m.set(w.executionId,[]),h(),P();break}case"waiting":R(),console.log(i.gray(`
|
|
13
|
+
Waiting for next execution...`));break;case"complete":R(),d=!0;break;case"log":{if(w.taskId&&u.size>0&&!u.has(w.taskId))break;if(k){let S=w.taskId?E(w.taskId):null;if(S){let N=m.get(S)||[];N.push(w),m.set(S,N),P()}else _(w)}else{if(w.taskId){let S=E(w.taskId);S&&v(S)}_(w)}break}}if(I.returnSignal)return I.returnSignal.type==="notFound"?{notFound:!0}:{failed:!0}}return R(),{completed:d}}catch(l){if(l.name==="AbortError")return{aborted:!0};throw l}}function M(o,{baseMs:e=500,capMs:t=3e4,rand:n=Math.random}={}){let s=Math.min(t,e*Math.pow(2,Math.max(0,o)));return Math.floor(n()*s)}async function J({attemptStream:o,stopped:e,follow:t,logger:n,sleep:s=c=>new Promise(f=>setTimeout(f,c)),exit:l=c=>{throw new Error(`exit:${c}`)},backoff:a=M,notFoundPollMs:r=5e3}){let c=0,f=!1;for(;!e.value;){let d;try{d=await o(),c=0}catch(p){if(p.name==="AbortError"||e.value)return{reason:"aborted"};if(f||(n.error(` SSE Error: ${p.message}`),t&&n.gray(" Reconnecting..."),f=!0),!t)return l("error")??{reason:"error"};let g=a(c);c++,await s(g);continue}if(d.aborted||e.value)return{reason:"aborted"};if(d.notFound){if(t){f||(n.yellow(" No executions found yet. Waiting for workflow to be triggered..."),n.gray(" Press Ctrl+C to stop."),f=!0),await s(r);continue}return n.yellow(`
|
|
14
14
|
No executions found for this workflow. Trigger the workflow first.
|
|
15
15
|
`),l("notFound")??{reason:"notFound"}}if(f&&(n.gray(` Reconnected.
|
|
16
16
|
`),f=!1),d.failed)return n.red(`
|
|
17
17
|
Execution failed.`),t?{reason:"failed"}:l("failed")??{reason:"failed"};if(d.completed)return l("completed")??{reason:"completed"};if(!t)return{reason:"disconnected"}}return{reason:"stopped"}}async function de({token:o,jobId:e,follow:t,projectId:n}){console.log(i.gray(` Streaming logs for workflow ${i.cyan(e)}...`)),console.log(t?i.gray(` Press Ctrl+C to stop.
|
|
18
|
-
`):"");let
|
|
19
|
-
`)),
|
|
18
|
+
`):"");let s=await ce(o);if(!s)return console.log(i.yellow(` SSE endpoint not configured, using CloudWatch polling...
|
|
19
|
+
`)),Z({token:o,projectId:null,jobId:e,follow:t,limit:1e5});let l={value:!1},a=()=>{l.value=!0;try{se(2,`
|
|
20
20
|
Stopped streaming.
|
|
21
|
-
`)}catch{}process.exit(0)};process.prependListener("SIGINT",a),process.prependListener("SIGTERM",a),await
|
|
21
|
+
`)}catch{}process.exit(0)};process.prependListener("SIGINT",a),process.prependListener("SIGTERM",a),await J({attemptStream:()=>ue({token:o,executionId:e,sseEndpoint:s,stopped:l}),stopped:l,follow:t,logger:{gray:r=>console.log(i.gray(r)),red:r=>console.log(i.red(r)),yellow:r=>console.log(i.yellow(r)),error:r=>console.error(i.red(r))},exit:r=>{r==="completed"&&process.exit(0),(r==="error"||r==="notFound"||r==="failed")&&process.exit(1)}})}async function Z({token:o,projectId:e,jobId:t,follow:n,limit:s}){let l=e?`${A}/logs/${e}/${t}`:`${A}/job/${t}`,a=null,r=0,c=new Set,f=!1,d=0,p=5,g=()=>{f=!0,console.log(i.gray(`
|
|
22
22
|
Stopped tailing.
|
|
23
|
-
`)),process.exit(0)};for(process.on("SIGINT",
|
|
24
|
-
`):"");!f;)try{let y=new URLSearchParams({limit:String(
|
|
25
|
-
Job ${
|
|
26
|
-
Status: ${
|
|
23
|
+
`)),process.exit(0)};for(process.on("SIGINT",g),process.on("SIGTERM",g),console.log(i.gray(` Fetching logs for workflow ${i.cyan(t)}...`)),console.log(n?i.gray(` Press Ctrl+C to stop.
|
|
24
|
+
`):"");!f;)try{let y=new URLSearchParams({limit:String(s)});a&&y.set("nextToken",a);let u=await Y(`${l}?${y}`,o);d=0,u.message&&u.lines?.length===0&&r===0&&console.log(i.gray(` ${u.message}`)),u.status==="starting"&&u.lines?.length===0&&r===0&&console.log(i.gray(" Container starting..."));for(let h of u.lines||[]){let E=`${h.timestamp}:${h.message}`;if(c.has(E))continue;c.add(E);let v=i.gray(B(h.timestamp)),_=u.taskId?i.gray(`(${u.taskId.slice(-8)}) `):"";console.log(`${v} ${_}${h.message.replace(/\n$/,"")}`)}if(r=u.lines?.length>0?0:r+1,a=u.nextForwardToken||null,u.status==="completed"||u.status==="failed"){let h=u.status==="completed"?i.green:i.red;console.log(h(`
|
|
25
|
+
Job ${u.status}.`)),process.exit(u.status==="completed"?0:1)}if(!n){u.status&&console.log(i.gray(`
|
|
26
|
+
Status: ${u.status}`));break}let x=u.lines?.length>0?500:r>5?5e3:2e3;await new Promise(h=>setTimeout(h,x))}catch(y){if(y.name==="AbortError")break;y.message.match(/API (400|401|403|404):/)&&(console.error(i.red(`
|
|
27
27
|
${y.message}
|
|
28
28
|
`)),process.exit(1)),d++,console.error(i.red(` Error: ${y.message}`)),d>=p&&(console.error(i.red(`
|
|
29
29
|
Too many consecutive errors (${p}). Stopping.
|
|
30
|
-
`)),process.exit(1)),n||process.exit(1),await new Promise(m=>setTimeout(m,3e3))}}async function
|
|
30
|
+
`)),process.exit(1)),n||process.exit(1),await new Promise(m=>setTimeout(m,3e3))}}async function ge({token:o,projectId:e,workflow:t,follow:n,limit:s}){let l=`${A}/all/${e}`,a=null,r=0,c=new Set,f=null,d=!1,p=0,g=5,y=()=>{d=!0,console.log(i.gray(`
|
|
31
31
|
Stopped tailing.
|
|
32
32
|
`)),process.exit(0)};for(process.on("SIGINT",y),process.on("SIGTERM",y),console.log(i.gray(`
|
|
33
33
|
Tailing all runs for ${i.cyan(t)}...`)),console.log(n?i.gray(` Press Ctrl+C to stop.
|
|
34
|
-
`):"");!d;)try{let
|
|
35
|
-
... more logs available. Run again or use --follow to stream.`)),m.jobCount&&console.log(i.gray(` ${m.jobCount} job(s) found.`));break}if(!m.hasRunning&&!a&&
|
|
36
|
-
No running jobs. All caught up.`));break}let x=m.lines?.length>0?500:
|
|
37
|
-
${
|
|
38
|
-
`)),process.exit(1)),p++,console.error(i.red(` Error: ${
|
|
39
|
-
Too many consecutive errors (${
|
|
40
|
-
`)),process.exit(1)),n||process.exit(1),await new Promise(k=>setTimeout(k,3e3))}}async function
|
|
34
|
+
`):"");!d;)try{let u=new URLSearchParams({workflow:t,limit:String(s)});a&&u.set("nextToken",a);let m=await Y(`${l}?${u}`,o);p=0,m.message&&m.lines?.length===0&&r===0&&console.log(i.gray(` ${m.message}`));for(let h of m.lines||[]){let E=`${h.timestamp}:${h.jobId}:${h.message}`;if(c.has(E))continue;c.add(E),h.jobId!==f&&(f!==null&&console.log(""),console.log(i.dim(` \u2500\u2500 ${h.jobId} \u2500\u2500`)),f=h.jobId);let v=i.gray(B(h.timestamp));console.log(`${v} ${h.message.replace(/\n$/,"")}`)}if(r=m.lines?.length>0?0:r+1,a=m.nextToken||null,!n){a&&console.log(i.gray(`
|
|
35
|
+
... more logs available. Run again or use --follow to stream.`)),m.jobCount&&console.log(i.gray(` ${m.jobCount} job(s) found.`));break}if(!m.hasRunning&&!a&&r>2){console.log(i.gray(`
|
|
36
|
+
No running jobs. All caught up.`));break}let x=m.lines?.length>0?500:r>5?5e3:2e3;await new Promise(h=>setTimeout(h,x))}catch(u){if(u.name==="AbortError")break;u.message.match(/API (400|401|403|404):/)&&(console.error(i.red(`
|
|
37
|
+
${u.message}
|
|
38
|
+
`)),process.exit(1)),p++,console.error(i.red(` Error: ${u.message}`)),p>=g&&(console.error(i.red(`
|
|
39
|
+
Too many consecutive errors (${g}). Stopping.
|
|
40
|
+
`)),process.exit(1)),n||process.exit(1),await new Promise(k=>setTimeout(k,3e3))}}async function me(o,e){let{token:t,projectId:n}=fe(e),s=e.follow===!0,l=e.lines?parseInt(e.lines,10):1e5;if(e.all){let r=e.workflow;return r||(console.log(i.red(`
|
|
41
41
|
--workflow is required with --all`)),console.log(i.gray(` Example: zibby workflow logs --workflow ticket-triage --all --project <id>
|
|
42
|
-
`)),process.exit(1)),
|
|
42
|
+
`)),process.exit(1)),ge({token:t,projectId:n,workflow:r,follow:s,limit:l})}let a=await pe(o,e,t,n);return s?de({token:t,jobId:a,follow:s,projectId:n}):Z({token:t,projectId:n,jobId:a,follow:!1,limit:l})}var A,ae,T,W=q(()=>{A="https://logs.workflows.zibby.app",ae="https://logs-stream.zibby.app/",T=null});import C from"ora";import{select as V}from"@inquirer/prompts";import{readFileSync as ye,existsSync as we}from"fs";import{homedir as he}from"os";import{join as Ie}from"path";var U={local:{name:"Local Development",apiUrl:"http://localhost:3001",accountApiUrl:"http://localhost:3001",frontendUrl:"http://localhost:3000",description:"Local backend running on port 3001"},prod:{name:"Production",apiUrl:process.env.ZIBBY_PROD_API_URL||"https://api-prod.zibby.app",accountApiUrl:process.env.ZIBBY_PROD_ACCOUNT_API_URL||"https://account-api-prod.zibby.app",frontendUrl:process.env.ZIBBY_PROD_FRONTEND_URL||"https://studio.zibby.dev",description:"Production environment"}};function j(){let o;if(process.env.ZIBBY_API_URL)o=process.env.ZIBBY_API_URL;else{let e=process.env.ZIBBY_ENV||"prod";U[e]?o=U[e].apiUrl:o=U.prod.apiUrl}try{let e=new URL(o);return e.protocol!=="http:"&&e.protocol!=="https:"?(console.error(`\u26A0\uFE0F Invalid API URL protocol: ${e.protocol} (only http/https allowed)`),U.prod.apiUrl):o}catch{return console.error(`\u26A0\uFE0F Invalid API URL: ${o}`),U.prod.apiUrl}}var X=/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;function L(o){return o?X.test(o)?{ok:!0}:{ok:!1,error:`'${o}' is not a UUID. Cloud workflows are identified by UUID only. Run \`zibby workflow list\` to find yours, or run \`zibby workflow trigger\` with no argument for interactive selection.`}:{ok:!0}}import{existsSync as Q,readFileSync as ee}from"fs";import{resolve as oe}from"path";function O(o){return o==="true"?!0:o==="false"?!1:o==="null"?null:o!==""&&!isNaN(Number(o))?Number(o):o}function z(o){let e={};for(let t of o||[]){let n=t.indexOf("=");if(n===-1){console.warn(` Warning: ignored param "${t}" \u2014 expected key=value format`);continue}let s=t.slice(0,n).trim(),l=O(t.slice(n+1)),a=s.split("."),r=e;for(let c=0;c<a.length-1;c++)(typeof r[a[c]]!="object"||r[a[c]]===null)&&(r[a[c]]={}),r=r[a[c]];r[a[a.length-1]]=l}return e}function te(o){let e=oe(o);Q(e)||(console.log(`
|
|
43
43
|
Error: --input-file not found: ${o}
|
|
44
|
-
`),process.exit(1));try{return JSON.parse(
|
|
44
|
+
`),process.exit(1));try{return JSON.parse(ee(e,"utf-8"))}catch(t){console.log(`
|
|
45
45
|
Error: --input-file is not valid JSON: ${t.message}
|
|
46
|
-
`),process.exit(1)}}function
|
|
46
|
+
`),process.exit(1)}}function D(o){let e={};if(o.inputFile&&(e={...te(o.inputFile)}),o.input)try{e={...e,...JSON.parse(o.input)}}catch(t){console.log(`
|
|
47
47
|
Error: --input is not valid JSON`),console.log(` ${t.message}
|
|
48
|
-
`),process.exit(1)}return o.param?.length&&(e={...e,...
|
|
48
|
+
`),process.exit(1)}return o.param?.length&&(e={...e,...z(o.param)}),e}function $e(){let o=Ie(he(),".zibby","config.json");if(we(o))try{let t=JSON.parse(ye(o,"utf-8"));if(t.sessionToken)return t.sessionToken}catch{}let e=process.env.ZIBBY_API_KEY;if(e)return e;console.log(`
|
|
49
49
|
Not authenticated`),console.log(" Run: zibby login"),console.log(` OR set ZIBBY_API_KEY env var (for CI/CD)
|
|
50
|
-
`),process.exit(1)}async function
|
|
50
|
+
`),process.exit(1)}async function be(o){let e=j(),t=C("Fetching projects...").start();try{let n=await fetch(`${e}/projects`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${o}`}});n.ok||(t.fail("Failed to fetch projects"),process.exit(1));let s=await n.json();Array.isArray(s)||(s.projects?s=s.projects:s.data&&(s=s.data)),(!s||s.length===0)&&(t.fail("No projects found"),process.exit(1)),t.succeed(`Found ${s.length} project${s.length===1?"":"s"}`),console.log("");let l=s.map(a=>({name:`${a.name||"Unnamed"} (${a.projectId||a.id})`,value:a.projectId||a.id}));return await V({message:"Select a project:",choices:l})}catch(n){t.fail(`Error: ${n.message}`),process.exit(1)}}async function ke(o,e){let t=j(),n=C("Fetching deployed workflows...").start();try{let s=["analysis","implementation","run_test"],l=[];for(let r of s){let c=await fetch(`${t}/projects/${o}/workflows/${r}`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`}});if(c.ok){let f=await c.json();f.graph&&l.push({name:r,version:f.version||0,isDefault:f.isDefault!==!1})}}l.length===0&&(n.fail("No deployed workflows found for this project"),process.exit(1)),n.succeed(`Found ${l.length} deployed workflow${l.length===1?"":"s"}`),console.log("");let a=l.map(r=>({name:`${r.name} (v${r.version})${r.isDefault?" [default]":""}`,value:r.name}));return await V({message:"Select a workflow to trigger:",choices:a})}catch(s){n.fail(`Error: ${s.message}`),process.exit(1)}}async function Ye(o,e={}){let t=L(o);t.ok||(console.log(`
|
|
51
51
|
Error: ${t.error}
|
|
52
|
-
`),process.exit(1));let n
|
|
52
|
+
`),process.exit(1));let n=$e(),s=e.project||process.env.ZIBBY_PROJECT_ID,l;if(o){let c=j();try{let f=await fetch(`${c}/projects`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${n}`}});if(f.ok){let p=(await f.json()).projects||[];for(let g of p){let y=await fetch(`${c}/projects/${g.projectId}/workflows`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${n}`}});if(y.ok){let m=(await y.json()).find(k=>k.uuid===o);if(m){s=g.projectId,l=m.workflowType||m.name,console.log(`
|
|
53
53
|
\u2713 Found workflow "${l}" (UUID: ${o})
|
|
54
54
|
`);break}}}(!l||l===o)&&(console.log(`
|
|
55
55
|
Error: Workflow with UUID "${o}" not found`),console.log(` Check: zibby workflow list
|
|
56
56
|
`),process.exit(1))}}catch(f){console.log(`
|
|
57
57
|
Error looking up workflow UUID: ${f.message}
|
|
58
|
-
`),process.exit(1)}}
|
|
58
|
+
`),process.exit(1)}}s||(console.log(""),s=await be(n)),l||(console.log(""),l=await ke(s,n));let a=D(e);if(console.log(`
|
|
59
59
|
Triggering Workflow
|
|
60
|
-
`),console.log(" ".padEnd(60,"-")),console.log(` Workflow: ${l}`),console.log(` Project: ${
|
|
61
|
-
Your workflow execution quota has been exceeded`),
|
|
62
|
-
`),process.exit(1)}let p=await d.json();if(
|
|
63
|
-
`),process.exit(1)}}export{
|
|
60
|
+
`),console.log(" ".padEnd(60,"-")),console.log(` Workflow: ${l}`),console.log(` Project: ${s}`),Object.keys(a).length>0){let c=JSON.stringify(a);console.log(` Input: ${c.length>60?`${c.substring(0,57)}...`:c}`)}e.idempotencyKey&&console.log(` Idempotency: ${e.idempotencyKey}`),console.log(" ".padEnd(60,"-")),console.log("");let r=C("Triggering workflow execution...").start();try{let c=j(),f={input:a};e.idempotencyKey&&(f.idempotencyKey=e.idempotencyKey);let d=await fetch(`${c}/projects/${s}/workflows/${l}/trigger`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${n}`},body:JSON.stringify(f)});if(!d.ok){let g=await d.json().catch(()=>({}));d.status===429&&(r.fail("Quota exceeded"),console.log(`
|
|
61
|
+
Your workflow execution quota has been exceeded`),g.quotaInfo&&(console.log(` Used: ${g.quotaInfo.used}/${g.quotaInfo.limit} executions`),console.log(` Plan: ${g.quotaInfo.planId}`),g.quotaInfo.periodEnd&&console.log(` Resets: ${new Date(g.quotaInfo.periodEnd).toLocaleDateString()}`)),console.log(""),process.exit(1)),r.fail("Trigger failed"),console.log(` Error: ${g.message||d.statusText}
|
|
62
|
+
`),process.exit(1)}let p=await d.json();if(r.succeed("Workflow triggered successfully"),console.log(""),console.log(" Job Details:"),console.log(` Job ID: ${p.jobId}`),console.log(` Status: ${p.status}`),console.log(` Version: ${p.version}`),console.log(` Triggered: ${new Date(p.triggeredAt).toLocaleString()}`),console.log(""),e.follow){console.log(" Streaming logs (Ctrl+C to stop)..."),console.log("");let{logsCommand:g}=await Promise.resolve().then(()=>(W(),G));return g(p.jobId,{follow:!0,project:s,apiKey:e.apiKey})}console.log(" Monitor execution:"),o?(console.log(` zibby workflow logs ${o}`),console.log(` zibby workflow logs ${o} -t`)):(console.log(` zibby workflow logs --workflow ${l} --project ${s}`),console.log(` zibby workflow logs --workflow ${l} --project ${s} -t`)),console.log("")}catch(c){r.fail("Trigger failed"),console.log(` Error: ${c.message}
|
|
63
|
+
`),process.exit(1)}}export{O as coerceValue,z as parseParams,D as resolveInput,Ye as triggerWorkflowCommand};
|
package/dist/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{SQSClient as _,SendMessageCommand as u}from"@aws-sdk/client-sqs";var E=null;function O(){return E||(E=new _({region:process.env.AWS_REGION||"ap-southeast-2"})),E}async function $(
|
|
1
|
+
import{SQSClient as _,SendMessageCommand as u}from"@aws-sdk/client-sqs";var E=null;function O(){return E||(E=new _({region:process.env.AWS_REGION||"ap-southeast-2"})),E}async function $(l,e,i,t){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:r,PROGRESS_API_URL:n,PROGRESS_QUEUE_URL:a,PROJECT_API_TOKEN:g}=t;if(!o)return;let c={executionId:o,...r&&{sqsAuthToken:r},step:{name:l,status:e,logs:i,timestamp:new Date().toISOString(),...e==="success"&&{completedAt:new Date().toISOString()}},status:e==="failed"?"failed":"running"};try{n?await f(n,o,c,g):a&&await p(a,o,c)}catch(S){console.error(`\u26A0\uFE0F Failed to send progress: ${S.message}`)}}async function m(l,e,i){let{EXECUTION_ID:t,SQS_AUTH_TOKEN:o,PROGRESS_API_URL:r,PROGRESS_QUEUE_URL:n,PROJECT_API_TOKEN:a}=l;if(!t||!i)return;let g=JSON.stringify(i).length;console.log(`Sending artifact: ${e} (${(g/1024).toFixed(1)}KB)`);let c={executionId:t,...o&&{sqsAuthToken:o},artifacts:{[e]:i},timestamp:new Date().toISOString()},S=r?"HTTP":n?"SQS":"NONE",d=JSON.stringify(c).length;try{if(r)await f(r,t,c,a);else if(n)await p(n,t,c);else{console.warn(`\u26A0\uFE0F No transport configured for artifact ${e} \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set`);return}console.log(`Artifact ${e} sent via ${S} (payload=${(d/1024).toFixed(1)}KB, value=${(g/1024).toFixed(1)}KB)`)}catch(s){console.error(`Failed to send artifact ${e} via ${S}:`),console.error(` Payload size: ${(d/1024).toFixed(1)}KB, Value size: ${(g/1024).toFixed(1)}KB`),console.error(` Error: ${s.message}`),s.name&&console.error(` Error type: ${s.name}`),s.code&&console.error(` Error code: ${s.code}`),d>256*1024&&console.error(" \u26A0\uFE0F Message exceeds SQS 256KB limit! Consider splitting or compressing.")}}async function P(l,{status:e,error:i,finalState:t}){let{EXECUTION_ID:o,SQS_AUTH_TOKEN:r,PROGRESS_API_URL:n,PROGRESS_QUEUE_URL:a,PROJECT_API_TOKEN:g}=l;if(!o)return;let c={executionId:o,...r&&{sqsAuthToken:r},status:e,...i&&{error:i},...t&&typeof t=="object"&&Object.keys(t).length>0?{finalState:t}:{},timestamp:new Date().toISOString()},S=n?"HTTP":a?"SQS":"NONE",d=JSON.stringify(c).length;console.log(`Sending final status: ${e} via ${S} (${(d/1024).toFixed(1)}KB)`);try{if(n)await f(n,o,c,g);else if(a){let s=["completed","failed","insufficient_context","blocked"].includes(e)?"execution_completed":"progress_update";await p(a,o,c,s)}else{console.warn("No transport configured for final status \u2014 neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set");return}console.log(`Final status ${e} sent via ${S}`)}catch(s){console.error(`Failed to send final status (${e}) via ${S}:`),console.error(` Payload: ${(d/1024).toFixed(1)}KB`),console.error(` Error: ${s.message}`),s.name&&console.error(` Error type: ${s.name}`),s.code&&console.error(` Error code: ${s.code}`)}}async function f(l,e,i,t){let o=`${l}/${e}/progress`,r={"Content-Type":"application/json"};t&&(r.Authorization=`Bearer ${t}`);let n=await fetch(o,{method:"POST",headers:r,body:JSON.stringify(i)});if(!n.ok){let a=await n.text();throw new Error(`HTTP ${n.status}: ${a}`)}}async function p(l,e,i,t="progress_update"){let o=JSON.stringify(i),r=(o.length/1024).toFixed(1);o.length>256*1024&&console.error(`\u274C SQS message too large: ${r}KB (limit 256KB) for ${e} [${t}]`),await O().send(new u({QueueUrl:l,MessageBody:o,MessageGroupId:e,MessageAttributes:{executionId:{DataType:"String",StringValue:e},messageType:{DataType:"String",StringValue:t}}}))}export{m as reportArtifact,P as reportFinalStatus,$ as reportProgress};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{existsSync as c,mkdirSync as p,readFileSync as h,writeFileSync as m,statSync as g}from"node:fs";import{homedir as y}from"node:os";import{join as T,dirname as $}from"node:path";function f(t){return t.replace(/\//g,"-")}function d(t,s){return T(y(),".claude","projects",f(t),`${s}.jsonl`)}async function
|
|
1
|
+
import{existsSync as c,mkdirSync as p,readFileSync as h,writeFileSync as m,statSync as g}from"node:fs";import{homedir as y}from"node:os";import{join as T,dirname as $}from"node:path";function f(t){return t.replace(/\//g,"-")}function d(t,s){return T(y(),".claude","projects",f(t),`${s}.jsonl`)}async function k({apiUrl:t,apiKey:s,workflowUuid:u,sessionId:a,cwd:i}){if(!t||!s||!u||!a||!i)return{pulled:!1,reason:"missing required arg"};let n;try{let e=await fetch(`${t}/workflows/${u}/sessions/${a}/download-url`,{method:"POST",headers:{Authorization:`Bearer ${s}`,"Content-Type":"application/json"}});if(!e.ok)return{pulled:!1,reason:`mint download-url failed: HTTP ${e.status}`};n=await e.json()}catch(e){return{pulled:!1,reason:`mint download-url threw: ${e.message}`}}let o;try{let e=await fetch(n.url,{method:"GET"});if(e.status===404||e.status===403)return{pulled:!1,reason:"no prior session (first turn)"};if(!e.ok)return{pulled:!1,reason:`GET presigned failed: HTTP ${e.status}`};o=Buffer.from(await e.arrayBuffer())}catch(e){return{pulled:!1,reason:`GET presigned threw: ${e.message}`}}let l=d(i,a);try{return p($(l),{recursive:!0}),m(l,o),{pulled:!0,bytes:o.length}}catch(e){return{pulled:!1,reason:`write local JSONL failed: ${e.message}`}}}async function B({apiUrl:t,apiKey:s,workflowUuid:u,sessionId:a,cwd:i}){if(!t||!s||!u||!a||!i)return{pushed:!1,reason:"missing required arg"};let n=d(i,a);if(!c(n))return{pushed:!1,reason:"no local JSONL to push"};let o,l;try{o=h(n),l=g(n).size}catch(r){return{pushed:!1,reason:`read local JSONL failed: ${r.message}`}}let e;try{let r=await fetch(`${t}/workflows/${u}/sessions/${a}/upload-url`,{method:"POST",headers:{Authorization:`Bearer ${s}`,"Content-Type":"application/json"}});if(!r.ok)return{pushed:!1,reason:`mint upload-url failed: HTTP ${r.status}`};e=await r.json()}catch(r){return{pushed:!1,reason:`mint upload-url threw: ${r.message}`}}try{let r=await fetch(e.url,{method:"PUT",headers:{...e.requiredHeaders||{},"Content-Length":String(l)},body:o});return r.ok?{pushed:!0,bytes:l}:{pushed:!1,reason:`PUT presigned failed: HTTP ${r.status}`}}catch(r){return{pushed:!1,reason:`PUT presigned threw: ${r.message}`}}}var H={sdkProjectDir:f,sessionJsonlPath:d};export{H as __test,k as sessionPullFromS3,B as sessionPushToS3};
|