@zibby/cli 0.4.18 → 0.4.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/workflows/deploy.js +37 -37
- package/dist/commands/workflows/run-local.js +15 -15
- package/dist/commands/workflows/run.js +10 -10
- package/dist/commands/workflows/trigger-helpers.js +1 -1
- package/dist/commands/workflows/trigger.js +33 -33
- package/dist/commands/workflows/validate-helpers.js +1 -1
- package/dist/commands/workflows/validate.js +4 -4
- package/dist/package.json +1 -1
- package/dist/templates/.claude/CLAUDE.md +164 -48
- package/dist/templates/.claude/commands/add-skill.md +19 -7
- package/dist/utils/session-sync.js +1 -0
- package/package.json +1 -1
- package/templates/.claude/CLAUDE.md +164 -48
- package/templates/.claude/commands/add-skill.md +19 -7
|
@@ -1,63 +1,63 @@
|
|
|
1
|
-
var K=Object.defineProperty;var
|
|
1
|
+
var K=Object.defineProperty;var H=(o,e)=>()=>(o&&(e=o(o=0)),e);var X=(o,e)=>{for(var t in e)K(o,t,{get:e[t],enumerable:!0})};var G={};X(G,{logsCommand:()=>ye,parseSseChunk:()=>M,runReconnectLoop:()=>Z,sseBackoffMs:()=>J});import i from"chalk";import{readFileSync as re,existsSync as se,writeSync as ie}from"fs";import{homedir as le}from"os";import{join as ae}from"path";async function fe(o){return T||(process.env.ZIBBY_SSE_ENDPOINT?(T=process.env.ZIBBY_SSE_ENDPOINT,T):(T=ce,T))}function pe(o){let e=ae(le(),".zibby","config.json");se(e)||(console.log(i.red(`
|
|
2
2
|
Not authenticated`)),console.log(i.gray(` Run: zibby login
|
|
3
|
-
`)),process.exit(1));let t;try{t=JSON.parse(
|
|
3
|
+
`)),process.exit(1));let t;try{t=JSON.parse(re(e,"utf-8"))}catch{console.log(i.red(`
|
|
4
4
|
Config file corrupt`)),console.log(i.gray(` Run: zibby login
|
|
5
5
|
`)),process.exit(1)}let n=t.sessionToken;n||(console.log(i.red(`
|
|
6
6
|
Not authenticated`)),console.log(i.gray(` Run: zibby login
|
|
7
|
-
`)),process.exit(1));let
|
|
7
|
+
`)),process.exit(1));let r=o.project;return{token:n,projectId:r}}function B(o){let e=new Date(o);if(process.env.ZIBBY_LOGS_UTC==="1")return e.toISOString().replace("T"," ").replace("Z","");let t=(n,r=2)=>String(n).padStart(r,"0");return`${e.getFullYear()}-${t(e.getMonth()+1)}-${t(e.getDate())} ${t(e.getHours())}:${t(e.getMinutes())}:${t(e.getSeconds())}.${t(e.getMilliseconds(),3)}`}async function F(o,e){let t=await fetch(o,{headers:{Authorization:`Bearer ${e}`}});if(!t.ok){let n=await t.text();throw new Error(`API ${t.status}: ${n}`)}return t.json()}async function ue(o,e,t,n){return o||(console.log(i.red(`
|
|
8
8
|
Workflow UUID is required`)),console.log(i.gray(" Usage: zibby workflow logs <workflow-uuid>")),console.log(i.gray(` zibby workflow logs <workflow-uuid> -t
|
|
9
|
-
`)),process.exit(1)),o}function
|
|
10
|
-
`),
|
|
9
|
+
`)),process.exit(1)),o}function M(o,e){let n=((o||"")+e).split(`
|
|
10
|
+
`),r=n.pop()||"",l=[],a=null,s=null;for(let c=0;c<n.length;c++){let f=n[c];if(f.trim()){if(f.startsWith("id:")){a=f.slice(3).trim();continue}if(f.startsWith("event:")){let d=f.slice(6).trim();if(d==="log")continue;if(d==="status"){let p=n[c+1];if(p&&p.startsWith("data:"))try{let u=JSON.parse(p.slice(5).trim());u.status==="new_execution"?l.push({type:"newExecution",executionId:u.executionId,taskId:u.taskId}):u.status==="waiting"&&l.push({type:"waiting"})}catch{}continue}if(d==="complete"){l.push({type:"complete"});continue}if(d==="error"){let p=n[c+1];if(p&&p.startsWith("data:"))try{if(JSON.parse(p.slice(5).trim()).error==="No executions found for workflow"){s={type:"notFound"};break}}catch{}s={type:"failed"};break}continue}if(f.startsWith("data:")){let d=f.slice(5).trim();if(!d)continue;try{let p=JSON.parse(d);p.timestamp&&p.message&&l.push({type:"log",timestamp:p.timestamp,message:p.message,taskId:p.taskId})}catch{}}}}return{actions:l,remainder:r,lastEventId:a,returnSignal:s}}async function ge({token:o,executionId:e,sseEndpoint:t,stopped:n}){let r=null;try{let w=function(){let $=Array.from(y.entries()).slice(0,p);y.clear();for(let[I,h]of $)y.set(I,h);g=new Set(Array.from(y.values()).map(I=>I.taskId).filter(Boolean));for(let I of[...m.keys()])y.has(I)||m.delete(I)},E=function(b){for(let[$,I]of y)if(I.taskId===b)return $;return null},v=function(b){let $=y.get(b);!$||$.headerPrinted||(console.log(i.cyan(`
|
|
11
11
|
\u250C\u2500 Execution: ${$.shortId} (task: ${$.taskSuffix})`)),console.log(i.cyan(` \u2514\u2500 Streaming logs...
|
|
12
|
-
`)),$.headerPrinted=!0)},_=function(b){let $=i.gray(B(b.timestamp)),I=b.taskId?i.gray(`(${b.taskId.slice(-8)}) `):"";console.log(`${$} ${I}${b.message.replace(/\n$/,"")}`)},R=function(){if(!k)return;k=!1,x&&(clearTimeout(x),x=null);let b=Array.from(y.entries()).reverse();for(let[$]of b){let I=m.get($);if(!(!I||I.length===0)){v($);for(let
|
|
13
|
-
Waiting for next execution...`));break;case"complete":R(),d=!0;break;case"log":{if(
|
|
12
|
+
`)),$.headerPrinted=!0)},_=function(b){let $=i.gray(B(b.timestamp)),I=b.taskId?i.gray(`(${b.taskId.slice(-8)}) `):"";console.log(`${$} ${I}${b.message.replace(/\n$/,"")}`)},R=function(){if(!k)return;k=!1,x&&(clearTimeout(x),x=null);let b=Array.from(y.entries()).reverse();for(let[$]of b){let I=m.get($);if(!(!I||I.length===0)){v($);for(let h of I)_(h)}}m.clear()},P=function(){k&&(x&&clearTimeout(x),x=setTimeout(R,u))},l=new URL(t);l.searchParams.set("jobId",e),r&&l.searchParams.set("lastEventId",r);let a=await fetch(l.toString(),{headers:{Authorization:`Bearer ${o}`,Accept:"text/event-stream"}});if(!a.ok)throw new Error(`SSE connection failed: ${a.status} ${a.statusText}`);let s=a.body.getReader(),c=new TextDecoder,f="",d=!1,p=Number(process.env.ZIBBY_LOGS_HISTORY_LIMIT)||10,u=Number(process.env.ZIBBY_LOGS_REPLAY_TIMEOUT_MS)||3e3,y=new Map,g=new Set,m=new Map,k=!0,x=null;for(P();!n.value;){let{done:b,value:$}=await s.read();if(b)break;let I=M(f,c.decode($,{stream:!0}));f=I.remainder,I.lastEventId&&(r=I.lastEventId);for(let h of I.actions)switch(h.type){case"newExecution":{let S=`${h.executionId.slice(0,8)}...${h.executionId.slice(-4)}`,N=h.taskId?h.taskId.slice(-8):"pending";y.set(h.executionId,{shortId:S,taskSuffix:N,taskId:h.taskId||null,headerPrinted:!1}),m.has(h.executionId)||m.set(h.executionId,[]),w(),P();break}case"waiting":R(),console.log(i.gray(`
|
|
13
|
+
Waiting for next execution...`));break;case"complete":R(),d=!0;break;case"log":{if(h.taskId&&g.size>0&&!g.has(h.taskId))break;if(k){let S=h.taskId?E(h.taskId):null;if(S){let N=m.get(S)||[];N.push(h),m.set(S,N),P()}else _(h)}else{if(h.taskId){let S=E(h.taskId);S&&v(S)}_(h)}break}}if(I.returnSignal)return I.returnSignal.type==="notFound"?{notFound:!0}:{failed:!0}}return R(),{completed:d}}catch(l){if(l.name==="AbortError")return{aborted:!0};throw l}}function J(o,{baseMs:e=500,capMs:t=3e4,rand:n=Math.random}={}){let r=Math.min(t,e*Math.pow(2,Math.max(0,o)));return Math.floor(n()*r)}async function Z({attemptStream:o,stopped:e,follow:t,logger:n,sleep:r=c=>new Promise(f=>setTimeout(f,c)),exit:l=c=>{throw new Error(`exit:${c}`)},backoff:a=J,notFoundPollMs:s=5e3}){let c=0,f=!1;for(;!e.value;){let d;try{d=await o(),c=0}catch(p){if(p.name==="AbortError"||e.value)return{reason:"aborted"};if(f||(n.error(` SSE Error: ${p.message}`),t&&n.gray(" Reconnecting..."),f=!0),!t)return l("error")??{reason:"error"};let u=a(c);c++,await r(u);continue}if(d.aborted||e.value)return{reason:"aborted"};if(d.notFound){if(t){f||(n.yellow(" No executions found yet. Waiting for workflow to be triggered..."),n.gray(" Press Ctrl+C to stop."),f=!0),await r(s);continue}return n.yellow(`
|
|
14
14
|
No executions found for this workflow. Trigger the workflow first.
|
|
15
15
|
`),l("notFound")??{reason:"notFound"}}if(f&&(n.gray(` Reconnected.
|
|
16
16
|
`),f=!1),d.failed)return n.red(`
|
|
17
17
|
Execution failed.`),t?{reason:"failed"}:l("failed")??{reason:"failed"};if(d.completed)return l("completed")??{reason:"completed"};if(!t)return{reason:"disconnected"}}return{reason:"stopped"}}async function de({token:o,jobId:e,follow:t,projectId:n}){console.log(i.gray(` Streaming logs for workflow ${i.cyan(e)}...`)),console.log(t?i.gray(` Press Ctrl+C to stop.
|
|
18
|
-
`):"");let
|
|
19
|
-
`)),
|
|
18
|
+
`):"");let r=await fe(o);if(!r)return console.log(i.yellow(` SSE endpoint not configured, using CloudWatch polling...
|
|
19
|
+
`)),W({token:o,projectId:null,jobId:e,follow:t,limit:1e5});let l={value:!1},a=()=>{l.value=!0;try{ie(2,`
|
|
20
20
|
Stopped streaming.
|
|
21
|
-
`)}catch{}process.exit(0)};process.prependListener("SIGINT",a),process.prependListener("SIGTERM",a),await
|
|
21
|
+
`)}catch{}process.exit(0)};process.prependListener("SIGINT",a),process.prependListener("SIGTERM",a),await Z({attemptStream:()=>ge({token:o,executionId:e,sseEndpoint:r,stopped:l}),stopped:l,follow:t,logger:{gray:s=>console.log(i.gray(s)),red:s=>console.log(i.red(s)),yellow:s=>console.log(i.yellow(s)),error:s=>console.error(i.red(s))},exit:s=>{s==="completed"&&process.exit(0),(s==="error"||s==="notFound"||s==="failed")&&process.exit(1)}})}async function W({token:o,projectId:e,jobId:t,follow:n,limit:r}){let l=e?`${A}/logs/${e}/${t}`:`${A}/job/${t}`,a=null,s=0,c=new Set,f=!1,d=0,p=5,u=()=>{f=!0,console.log(i.gray(`
|
|
22
22
|
Stopped tailing.
|
|
23
|
-
`)),process.exit(0)};for(process.on("SIGINT",
|
|
24
|
-
`):"");!f;)try{let y=new URLSearchParams({limit:String(
|
|
25
|
-
Job ${
|
|
26
|
-
Status: ${
|
|
23
|
+
`)),process.exit(0)};for(process.on("SIGINT",u),process.on("SIGTERM",u),console.log(i.gray(` Fetching logs for workflow ${i.cyan(t)}...`)),console.log(n?i.gray(` Press Ctrl+C to stop.
|
|
24
|
+
`):"");!f;)try{let y=new URLSearchParams({limit:String(r)});a&&y.set("nextToken",a);let g=await F(`${l}?${y}`,o);d=0,g.message&&g.lines?.length===0&&s===0&&console.log(i.gray(` ${g.message}`)),g.status==="starting"&&g.lines?.length===0&&s===0&&console.log(i.gray(" Container starting..."));for(let w of g.lines||[]){let E=`${w.timestamp}:${w.message}`;if(c.has(E))continue;c.add(E);let v=i.gray(B(w.timestamp)),_=g.taskId?i.gray(`(${g.taskId.slice(-8)}) `):"";console.log(`${v} ${_}${w.message.replace(/\n$/,"")}`)}if(s=g.lines?.length>0?0:s+1,a=g.nextForwardToken||null,g.status==="completed"||g.status==="failed"){let w=g.status==="completed"?i.green:i.red;console.log(w(`
|
|
25
|
+
Job ${g.status}.`)),process.exit(g.status==="completed"?0:1)}if(!n){g.status&&console.log(i.gray(`
|
|
26
|
+
Status: ${g.status}`));break}let x=g.lines?.length>0?500:s>5?5e3:2e3;await new Promise(w=>setTimeout(w,x))}catch(y){if(y.name==="AbortError")break;y.message.match(/API (400|401|403|404):/)&&(console.error(i.red(`
|
|
27
27
|
${y.message}
|
|
28
28
|
`)),process.exit(1)),d++,console.error(i.red(` Error: ${y.message}`)),d>=p&&(console.error(i.red(`
|
|
29
29
|
Too many consecutive errors (${p}). Stopping.
|
|
30
|
-
`)),process.exit(1)),n||process.exit(1),await new Promise(m=>setTimeout(m,3e3))}}async function
|
|
30
|
+
`)),process.exit(1)),n||process.exit(1),await new Promise(m=>setTimeout(m,3e3))}}async function me({token:o,projectId:e,workflow:t,follow:n,limit:r}){let l=`${A}/all/${e}`,a=null,s=0,c=new Set,f=null,d=!1,p=0,u=5,y=()=>{d=!0,console.log(i.gray(`
|
|
31
31
|
Stopped tailing.
|
|
32
32
|
`)),process.exit(0)};for(process.on("SIGINT",y),process.on("SIGTERM",y),console.log(i.gray(`
|
|
33
33
|
Tailing all runs for ${i.cyan(t)}...`)),console.log(n?i.gray(` Press Ctrl+C to stop.
|
|
34
|
-
`):"");!d;)try{let
|
|
35
|
-
... more logs available. Run again or use --follow to stream.`)),m.jobCount&&console.log(i.gray(` ${m.jobCount} job(s) found.`));break}if(!m.hasRunning&&!a&&
|
|
36
|
-
No running jobs. All caught up.`));break}let x=m.lines?.length>0?500:
|
|
37
|
-
${
|
|
38
|
-
`)),process.exit(1)),p++,console.error(i.red(` Error: ${
|
|
39
|
-
Too many consecutive errors (${
|
|
40
|
-
`)),process.exit(1)),n||process.exit(1),await new Promise(k=>setTimeout(k,3e3))}}async function
|
|
34
|
+
`):"");!d;)try{let g=new URLSearchParams({workflow:t,limit:String(r)});a&&g.set("nextToken",a);let m=await F(`${l}?${g}`,o);p=0,m.message&&m.lines?.length===0&&s===0&&console.log(i.gray(` ${m.message}`));for(let w of m.lines||[]){let E=`${w.timestamp}:${w.jobId}:${w.message}`;if(c.has(E))continue;c.add(E),w.jobId!==f&&(f!==null&&console.log(""),console.log(i.dim(` \u2500\u2500 ${w.jobId} \u2500\u2500`)),f=w.jobId);let v=i.gray(B(w.timestamp));console.log(`${v} ${w.message.replace(/\n$/,"")}`)}if(s=m.lines?.length>0?0:s+1,a=m.nextToken||null,!n){a&&console.log(i.gray(`
|
|
35
|
+
... more logs available. Run again or use --follow to stream.`)),m.jobCount&&console.log(i.gray(` ${m.jobCount} job(s) found.`));break}if(!m.hasRunning&&!a&&s>2){console.log(i.gray(`
|
|
36
|
+
No running jobs. All caught up.`));break}let x=m.lines?.length>0?500:s>5?5e3:2e3;await new Promise(w=>setTimeout(w,x))}catch(g){if(g.name==="AbortError")break;g.message.match(/API (400|401|403|404):/)&&(console.error(i.red(`
|
|
37
|
+
${g.message}
|
|
38
|
+
`)),process.exit(1)),p++,console.error(i.red(` Error: ${g.message}`)),p>=u&&(console.error(i.red(`
|
|
39
|
+
Too many consecutive errors (${u}). Stopping.
|
|
40
|
+
`)),process.exit(1)),n||process.exit(1),await new Promise(k=>setTimeout(k,3e3))}}async function ye(o,e){let{token:t,projectId:n}=pe(e),r=e.follow===!0,l=e.lines?parseInt(e.lines,10):1e5;if(e.all){let s=e.workflow;return s||(console.log(i.red(`
|
|
41
41
|
--workflow is required with --all`)),console.log(i.gray(` Example: zibby workflow logs --workflow ticket-triage --all --project <id>
|
|
42
|
-
`)),process.exit(1)),
|
|
42
|
+
`)),process.exit(1)),me({token:t,projectId:n,workflow:s,follow:r,limit:l})}let a=await ue(o,e,t,n);return r?de({token:t,jobId:a,follow:r,projectId:n}):W({token:t,projectId:n,jobId:a,follow:!1,limit:l})}var A,ce,T,V=H(()=>{A="https://logs.workflows.zibby.app",ce="https://logs-stream.zibby.app/",T=null});import C from"ora";import{select as q}from"@inquirer/prompts";import{readFileSync as he,existsSync as we}from"fs";import{homedir as Ie}from"os";import{join as $e}from"path";var U={local:{name:"Local Development",apiUrl:"http://localhost:3001",accountApiUrl:"http://localhost:3001",frontendUrl:"http://localhost:3000",description:"Local backend running on port 3001"},prod:{name:"Production",apiUrl:process.env.ZIBBY_PROD_API_URL||"https://api-prod.zibby.app",accountApiUrl:process.env.ZIBBY_PROD_ACCOUNT_API_URL||"https://account-api-prod.zibby.app",frontendUrl:process.env.ZIBBY_PROD_FRONTEND_URL||"https://studio.zibby.dev",description:"Production environment"}};function j(){let o;if(process.env.ZIBBY_API_URL)o=process.env.ZIBBY_API_URL;else{let e=process.env.ZIBBY_ENV||"prod";U[e]?o=U[e].apiUrl:o=U.prod.apiUrl}try{let e=new URL(o);return e.protocol!=="http:"&&e.protocol!=="https:"?(console.error(`\u26A0\uFE0F Invalid API URL protocol: ${e.protocol} (only http/https allowed)`),U.prod.apiUrl):o}catch{return console.error(`\u26A0\uFE0F Invalid API URL: ${o}`),U.prod.apiUrl}}var Q=/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;function L(o){return o?Q.test(o)?{ok:!0}:{ok:!1,error:`'${o}' is not a UUID. Cloud workflows are identified by UUID only. Run \`zibby workflow list\` to find yours, or run \`zibby workflow trigger\` with no argument for interactive selection.`}:{ok:!0}}function O(o){if(!o||typeof o!="object")return[" Error: Invalid input"];let e=[],t=Array.isArray(o.missing)?o.missing:[],n=Array.isArray(o.typeMismatches)?o.typeMismatches:[];if(t.length){e.push(` Missing required input${t.length>1?"s":""}:`);for(let r of t)e.push(` - ${r}`)}if(n.length){e.push(` Wrong type for input${n.length>1?"s":""}:`);for(let r of n)e.push(` - ${r.message}`)}return t.length===0&&n.length===0&&e.push(` Error: ${o.error||"Invalid input"}`),o.hint&&(e.push(""),e.push(` ${o.hint}`)),e}import{existsSync as ee,readFileSync as oe}from"fs";import{resolve as te}from"path";function z(o){return o==="true"?!0:o==="false"?!1:o==="null"?null:o!==""&&!isNaN(Number(o))?Number(o):o}function D(o){let e={};for(let t of o||[]){let n=t.indexOf("=");if(n===-1){console.warn(` Warning: ignored param "${t}" \u2014 expected key=value format`);continue}let r=t.slice(0,n).trim(),l=z(t.slice(n+1)),a=r.split("."),s=e;for(let c=0;c<a.length-1;c++)(typeof s[a[c]]!="object"||s[a[c]]===null)&&(s[a[c]]={}),s=s[a[c]];s[a[a.length-1]]=l}return e}function ne(o){let e=te(o);ee(e)||(console.log(`
|
|
43
43
|
Error: --input-file not found: ${o}
|
|
44
|
-
`),process.exit(1));try{return JSON.parse(
|
|
44
|
+
`),process.exit(1));try{return JSON.parse(oe(e,"utf-8"))}catch(t){console.log(`
|
|
45
45
|
Error: --input-file is not valid JSON: ${t.message}
|
|
46
|
-
`),process.exit(1)}}function
|
|
46
|
+
`),process.exit(1)}}function Y(o){let e={};if(o.inputFile&&(e={...ne(o.inputFile)}),o.input)try{e={...e,...JSON.parse(o.input)}}catch(t){console.log(`
|
|
47
47
|
Error: --input is not valid JSON`),console.log(` ${t.message}
|
|
48
|
-
`),process.exit(1)}return o.param?.length&&(e={...e,...
|
|
48
|
+
`),process.exit(1)}return o.param?.length&&(e={...e,...D(o.param)}),e}function be(){let o=$e(Ie(),".zibby","config.json");if(we(o))try{let t=JSON.parse(he(o,"utf-8"));if(t.sessionToken)return t.sessionToken}catch{}let e=process.env.ZIBBY_API_KEY;if(e)return e;console.log(`
|
|
49
49
|
Not authenticated`),console.log(" Run: zibby login"),console.log(` OR set ZIBBY_API_KEY env var (for CI/CD)
|
|
50
|
-
`),process.exit(1)}async function
|
|
50
|
+
`),process.exit(1)}async function ke(o){let e=j(),t=C("Fetching projects...").start();try{let n=await fetch(`${e}/projects`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${o}`}});n.ok||(t.fail("Failed to fetch projects"),process.exit(1));let r=await n.json();Array.isArray(r)||(r.projects?r=r.projects:r.data&&(r=r.data)),(!r||r.length===0)&&(t.fail("No projects found"),process.exit(1)),t.succeed(`Found ${r.length} project${r.length===1?"":"s"}`),console.log("");let l=r.map(a=>({name:`${a.name||"Unnamed"} (${a.projectId||a.id})`,value:a.projectId||a.id}));return await q({message:"Select a project:",choices:l})}catch(n){t.fail(`Error: ${n.message}`),process.exit(1)}}async function xe(o,e){let t=j(),n=C("Fetching deployed workflows...").start();try{let r=["analysis","implementation","run_test"],l=[];for(let s of r){let c=await fetch(`${t}/projects/${o}/workflows/${s}`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`}});if(c.ok){let f=await c.json();f.graph&&l.push({name:s,version:f.version||0,isDefault:f.isDefault!==!1})}}l.length===0&&(n.fail("No deployed workflows found for this project"),process.exit(1)),n.succeed(`Found ${l.length} deployed workflow${l.length===1?"":"s"}`),console.log("");let a=l.map(s=>({name:`${s.name} (v${s.version})${s.isDefault?" [default]":""}`,value:s.name}));return await q({message:"Select a workflow to trigger:",choices:a})}catch(r){n.fail(`Error: ${r.message}`),process.exit(1)}}async function Fe(o,e={}){let t=L(o);t.ok||(console.log(`
|
|
51
51
|
Error: ${t.error}
|
|
52
|
-
`),process.exit(1));let n
|
|
52
|
+
`),process.exit(1));let n=be(),r=e.project||process.env.ZIBBY_PROJECT_ID,l;if(o){let c=j();try{let f=await fetch(`${c}/projects`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${n}`}});if(f.ok){let p=(await f.json()).projects||[];for(let u of p){let y=await fetch(`${c}/projects/${u.projectId}/workflows`,{method:"GET",headers:{"Content-Type":"application/json",Authorization:`Bearer ${n}`}});if(y.ok){let m=(await y.json()).find(k=>k.uuid===o);if(m){r=u.projectId,l=m.workflowType||m.name,console.log(`
|
|
53
53
|
\u2713 Found workflow "${l}" (UUID: ${o})
|
|
54
54
|
`);break}}}(!l||l===o)&&(console.log(`
|
|
55
55
|
Error: Workflow with UUID "${o}" not found`),console.log(` Check: zibby workflow list
|
|
56
56
|
`),process.exit(1))}}catch(f){console.log(`
|
|
57
57
|
Error looking up workflow UUID: ${f.message}
|
|
58
|
-
`),process.exit(1)}}
|
|
58
|
+
`),process.exit(1)}}r||(console.log(""),r=await ke(n)),l||(console.log(""),l=await xe(r,n));let a=Y(e);if(console.log(`
|
|
59
59
|
Triggering Workflow
|
|
60
|
-
`),console.log(" ".padEnd(60,"-")),console.log(` Workflow: ${l}`),console.log(` Project: ${
|
|
61
|
-
Your workflow execution quota has been exceeded`),
|
|
62
|
-
`),process.exit(1)}let p=await d.json();if(
|
|
63
|
-
`),process.exit(1)}}export{
|
|
60
|
+
`),console.log(" ".padEnd(60,"-")),console.log(` Workflow: ${l}`),console.log(` Project: ${r}`),Object.keys(a).length>0){let c=JSON.stringify(a);console.log(` Input: ${c.length>60?`${c.substring(0,57)}...`:c}`)}e.idempotencyKey&&console.log(` Idempotency: ${e.idempotencyKey}`),console.log(" ".padEnd(60,"-")),console.log("");let s=C("Triggering workflow execution...").start();try{let c=j(),f={input:a};e.idempotencyKey&&(f.idempotencyKey=e.idempotencyKey);let d=await fetch(`${c}/projects/${r}/workflows/${l}/trigger`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${n}`},body:JSON.stringify(f)});if(!d.ok){let u=await d.json().catch(()=>({}));if(d.status===429&&(s.fail("Quota exceeded"),console.log(`
|
|
61
|
+
Your workflow execution quota has been exceeded`),u.quotaInfo&&(console.log(` Used: ${u.quotaInfo.used}/${u.quotaInfo.limit} executions`),console.log(` Plan: ${u.quotaInfo.planId}`),u.quotaInfo.periodEnd&&console.log(` Resets: ${new Date(u.quotaInfo.periodEnd).toLocaleDateString()}`)),console.log(""),process.exit(1)),d.status===400&&Array.isArray(u.validationErrors)){s.fail("Invalid input"),console.log("");for(let y of O(u))console.log(y);console.log(""),process.exit(1)}s.fail("Trigger failed"),console.log(` Error: ${u.message||u.error||d.statusText}
|
|
62
|
+
`),process.exit(1)}let p=await d.json();if(s.succeed("Workflow triggered successfully"),console.log(""),console.log(" Job Details:"),console.log(` Job ID: ${p.jobId}`),console.log(` Status: ${p.status}`),console.log(` Version: ${p.version}`),console.log(` Triggered: ${new Date(p.triggeredAt).toLocaleString()}`),console.log(""),e.follow){console.log(" Streaming logs (Ctrl+C to stop)..."),console.log("");let{logsCommand:u}=await Promise.resolve().then(()=>(V(),G));return u(p.jobId,{follow:!0,project:r,apiKey:e.apiKey})}console.log(" Monitor execution:"),o?(console.log(` zibby workflow logs ${o}`),console.log(` zibby workflow logs ${o} -t`)):(console.log(` zibby workflow logs --workflow ${l} --project ${r}`),console.log(` zibby workflow logs --workflow ${l} --project ${r} -t`)),console.log("")}catch(c){s.fail("Trigger failed"),console.log(` Error: ${c.message}
|
|
63
|
+
`),process.exit(1)}}export{z as coerceValue,D as parseParams,Y as resolveInput,Fe as triggerWorkflowCommand};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
var
|
|
1
|
+
var d=new Set(["claude","cursor","codex","gemini","assistant"]);function u(e,t={}){let r=[],o=t.location||"workflow.json";return!e||typeof e!="object"?(r.push({severity:"error",code:"workflow-json-missing-or-invalid",message:"workflow.json is missing or not a JSON object",location:o}),r):(!e.name||typeof e.name!="string"?r.push({severity:"error",code:"workflow-json-missing-name",message:"workflow.json must have a string `name`",location:o}):/^[a-z][a-z0-9_-]{0,62}[a-z0-9]$/.test(e.name)||r.push({severity:"error",code:"workflow-json-bad-name",message:`workflow.json \`name\` must match [a-z][a-z0-9_-]*[a-z0-9] (got "${e.name}")`,location:o}),(!e.description||typeof e.description!="string")&&r.push({severity:"warning",code:"workflow-json-missing-description",message:"workflow.json should have a `description` (one-line, what the workflow does)",location:o}),e.defaultAgent!=null&&!d.has(e.defaultAgent)&&r.push({severity:"error",code:"workflow-json-bad-agent",message:`workflow.json \`defaultAgent\` must be one of ${[...d].join(", ")} (got "${e.defaultAgent}")`,location:o}),e.entryClass!=null&&typeof e.entryClass!="string"&&r.push({severity:"error",code:"workflow-json-bad-entry-class",message:"workflow.json `entryClass` must be a string (the class name in graph.mjs)",location:o}),e.triggers!=null&&typeof e.triggers!="object"&&r.push({severity:"error",code:"workflow-json-bad-triggers",message:'workflow.json `triggers` must be an object (e.g. {"api": true})',location:o}),r)}function h(e){return!!e&&typeof e=="object"&&typeof e._def<"u"&&typeof e.parse=="function"}function g(e,t,r={}){let o=[],s=r.location||`node "${e}"`;if(!t||typeof t!="object")return o.push({severity:"error",code:"node-missing",message:`Node "${e}" is missing or not an object`,location:s}),o;t.outputSchema?h(t.outputSchema)||o.push({severity:"warning",code:"node-non-zod-output-schema",message:`Node "${e}" \`outputSchema\` is not a recognizable Zod schema (no _def + parse). Plain-object schemas will be coerced via OutputParser, which is more permissive than the Zod path.`,location:s}):o.push({severity:"error",code:"node-missing-output-schema",message:`Node "${e}" has no \`outputSchema\` \u2014 every node needs a Zod schema`,location:s});let n=t.prompt!=null||t.config&&t.config.prompt!=null,i=typeof t.customExecute=="function";return!n&&!i&&o.push({severity:"error",code:"node-no-prompt-or-execute",message:`Node "${e}" has neither \`prompt\` nor \`execute\` \u2014 needs one of them (prompt for LLM nodes, execute for custom-code nodes)`,location:s}),n&&i&&o.push({severity:"warning",code:"node-both-prompt-and-execute",message:`Node "${e}" defines both \`prompt\` and \`execute\` \u2014 \`execute\` always wins (LLM is skipped). Probably want to drop one.`,location:s}),o}var c=new Set(["END","__end__","end"]);function p(e){let t=[];if(!e||!(e.nodes instanceof Map)||!(e.edges instanceof Map))return t.push({severity:"error",code:"graph-not-a-workflow-graph",message:"buildGraph() did not return a WorkflowGraph instance (or returned an unrecognizable shape)"}),t;e.entryPoint?e.nodes.has(e.entryPoint)||t.push({severity:"error",code:"graph-entry-point-unknown",message:`Graph entry point "${e.entryPoint}" is not a registered node`}):t.push({severity:"error",code:"graph-no-entry-point",message:'Graph has no entry point \u2014 call `graph.setEntryPoint("nodeName")` before returning'});let r=[];for(let[s,n]of e.edges)if(e.nodes.has(s)||t.push({severity:"error",code:"graph-edge-from-unknown",message:`Edge starts from "${s}" but no such node is registered`,location:`addEdge("${s}", \u2026)`}),typeof n=="string")r.push({from:s,to:n});else if(n&&n.conditional&&typeof n.routes=="function"){let a=n.routes.toString().match(/['"`]([A-Za-z_][\w-]*)['"`]/g)||[];for(let f of a){let l=f.slice(1,-1);r.push({from:s,to:l})}}else t.push({severity:"error",code:"graph-bad-edge-target",message:`Edge from "${s}" has an unrecognizable target shape`,location:`addEdge / addConditionalEdges from "${s}"`});for(let{from:s,to:n}of r)c.has(n)||e.nodes.has(n)||t.push({severity:"error",code:"graph-edge-to-unknown",message:`Edge from "${s}" goes to "${n}" but no such node is registered (and it isn't the END sentinel)`,location:`addEdge("${s}", "${n}")`});if(e.entryPoint){let s=new Set([e.entryPoint]),n=!0;for(;n;){n=!1;for(let{from:i,to:a}of r)s.has(i)&&!c.has(a)&&!s.has(a)&&e.nodes.has(a)&&(s.add(a),n=!0)}for(let i of e.nodes.keys())s.has(i)||t.push({severity:"warning",code:"graph-orphan-node",message:`Node "${i}" is registered but unreachable from the entry point "${e.entryPoint}". Add an edge or remove the node.`})}let o=new Set(c);for(let s=0;s<e.nodes.size+1;s++){let n=!1;for(let{from:i,to:a}of r)o.has(a)&&!o.has(i)&&(o.add(i),n=!0);if(!n)break}for(let s of e.nodes.keys())o.has(s)||t.push({severity:"warning",code:"graph-dead-end",message:`Node "${s}" has no path that reaches END \u2014 the workflow will run forever or crash. Add an edge to END (or another terminal-reaching node).`});return t}function m(e,t){let r=[];if(!(e instanceof Map))return r;for(let[o,s]of e){let n=s.config?.skills||[];if(!Array.isArray(n)){r.push({severity:"error",code:"node-skills-not-array",message:`Node "${o}".skills must be an array of strings (got ${typeof n})`,location:`node "${o}"`});continue}for(let i of n){if(typeof i!="string"){r.push({severity:"error",code:"node-skill-not-string",message:`Node "${o}".skills contains a non-string entry: ${JSON.stringify(i)}`,location:`node "${o}"`});continue}t.has(i)||r.push({severity:"error",code:"node-skill-not-registered",message:`Node "${o}" references skill "${i}" which is not registered. Import the skill file in graph.mjs (e.g. import "./skills/${i}.mjs") BEFORE creating the WorkflowGraph.`,location:`node "${o}"`})}}return r}function w({workflowJson:e,graph:t,registeredSkills:r}){let o=[];if(o.push(...u(e)),o.push(...p(t)),t&&t.nodes instanceof Map){for(let[s,n]of t.nodes)o.push(...g(s,n));o.push(...m(t.nodes,r||new Set))}return o}export{w as runAllValidators,p as validateGraphTopology,g as validateNode,m as validateSkillReferences,u as validateWorkflowJson};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
2
|
-
${e.message}`}async function
|
|
3
|
-
Validating workflow: ${
|
|
4
|
-
`));let
|
|
1
|
+
import a from"chalk";import{readFileSync as j,existsSync as g}from"fs";import{join as u}from"path";import{pathToFileURL as E}from"url";var w=new Set(["claude","cursor","codex","gemini","assistant"]);function m(e,r={}){let s=[],t=r.location||"workflow.json";return!e||typeof e!="object"?(s.push({severity:"error",code:"workflow-json-missing-or-invalid",message:"workflow.json is missing or not a JSON object",location:t}),s):(!e.name||typeof e.name!="string"?s.push({severity:"error",code:"workflow-json-missing-name",message:"workflow.json must have a string `name`",location:t}):/^[a-z][a-z0-9_-]{0,62}[a-z0-9]$/.test(e.name)||s.push({severity:"error",code:"workflow-json-bad-name",message:`workflow.json \`name\` must match [a-z][a-z0-9_-]*[a-z0-9] (got "${e.name}")`,location:t}),(!e.description||typeof e.description!="string")&&s.push({severity:"warning",code:"workflow-json-missing-description",message:"workflow.json should have a `description` (one-line, what the workflow does)",location:t}),e.defaultAgent!=null&&!w.has(e.defaultAgent)&&s.push({severity:"error",code:"workflow-json-bad-agent",message:`workflow.json \`defaultAgent\` must be one of ${[...w].join(", ")} (got "${e.defaultAgent}")`,location:t}),e.entryClass!=null&&typeof e.entryClass!="string"&&s.push({severity:"error",code:"workflow-json-bad-entry-class",message:"workflow.json `entryClass` must be a string (the class name in graph.mjs)",location:t}),e.triggers!=null&&typeof e.triggers!="object"&&s.push({severity:"error",code:"workflow-json-bad-triggers",message:'workflow.json `triggers` must be an object (e.g. {"api": true})',location:t}),s)}function k(e){return!!e&&typeof e=="object"&&typeof e._def<"u"&&typeof e.parse=="function"}function b(e,r,s={}){let t=[],o=s.location||`node "${e}"`;if(!r||typeof r!="object")return t.push({severity:"error",code:"node-missing",message:`Node "${e}" is missing or not an object`,location:o}),t;r.outputSchema?k(r.outputSchema)||t.push({severity:"warning",code:"node-non-zod-output-schema",message:`Node "${e}" \`outputSchema\` is not a recognizable Zod schema (no _def + parse). Plain-object schemas will be coerced via OutputParser, which is more permissive than the Zod path.`,location:o}):t.push({severity:"error",code:"node-missing-output-schema",message:`Node "${e}" has no \`outputSchema\` \u2014 every node needs a Zod schema`,location:o});let n=r.prompt!=null||r.config&&r.config.prompt!=null,i=typeof r.customExecute=="function";return!n&&!i&&t.push({severity:"error",code:"node-no-prompt-or-execute",message:`Node "${e}" has neither \`prompt\` nor \`execute\` \u2014 needs one of them (prompt for LLM nodes, execute for custom-code nodes)`,location:o}),n&&i&&t.push({severity:"warning",code:"node-both-prompt-and-execute",message:`Node "${e}" defines both \`prompt\` and \`execute\` \u2014 \`execute\` always wins (LLM is skipped). Probably want to drop one.`,location:o}),t}var h=new Set(["END","__end__","end"]);function $(e){let r=[];if(!e||!(e.nodes instanceof Map)||!(e.edges instanceof Map))return r.push({severity:"error",code:"graph-not-a-workflow-graph",message:"buildGraph() did not return a WorkflowGraph instance (or returned an unrecognizable shape)"}),r;e.entryPoint?e.nodes.has(e.entryPoint)||r.push({severity:"error",code:"graph-entry-point-unknown",message:`Graph entry point "${e.entryPoint}" is not a registered node`}):r.push({severity:"error",code:"graph-no-entry-point",message:'Graph has no entry point \u2014 call `graph.setEntryPoint("nodeName")` before returning'});let s=[];for(let[o,n]of e.edges)if(e.nodes.has(o)||r.push({severity:"error",code:"graph-edge-from-unknown",message:`Edge starts from "${o}" but no such node is registered`,location:`addEdge("${o}", \u2026)`}),typeof n=="string")s.push({from:o,to:n});else if(n&&n.conditional&&typeof n.routes=="function"){let l=n.routes.toString().match(/['"`]([A-Za-z_][\w-]*)['"`]/g)||[];for(let f of l){let d=f.slice(1,-1);s.push({from:o,to:d})}}else r.push({severity:"error",code:"graph-bad-edge-target",message:`Edge from "${o}" has an unrecognizable target shape`,location:`addEdge / addConditionalEdges from "${o}"`});for(let{from:o,to:n}of s)h.has(n)||e.nodes.has(n)||r.push({severity:"error",code:"graph-edge-to-unknown",message:`Edge from "${o}" goes to "${n}" but no such node is registered (and it isn't the END sentinel)`,location:`addEdge("${o}", "${n}")`});if(e.entryPoint){let o=new Set([e.entryPoint]),n=!0;for(;n;){n=!1;for(let{from:i,to:l}of s)o.has(i)&&!h.has(l)&&!o.has(l)&&e.nodes.has(l)&&(o.add(l),n=!0)}for(let i of e.nodes.keys())o.has(i)||r.push({severity:"warning",code:"graph-orphan-node",message:`Node "${i}" is registered but unreachable from the entry point "${e.entryPoint}". Add an edge or remove the node.`})}let t=new Set(h);for(let o=0;o<e.nodes.size+1;o++){let n=!1;for(let{from:i,to:l}of s)t.has(l)&&!t.has(i)&&(t.add(i),n=!0);if(!n)break}for(let o of e.nodes.keys())t.has(o)||r.push({severity:"warning",code:"graph-dead-end",message:`Node "${o}" has no path that reaches END \u2014 the workflow will run forever or crash. Add an edge to END (or another terminal-reaching node).`});return r}function v(e,r){let s=[];if(!(e instanceof Map))return s;for(let[t,o]of e){let n=o.config?.skills||[];if(!Array.isArray(n)){s.push({severity:"error",code:"node-skills-not-array",message:`Node "${t}".skills must be an array of strings (got ${typeof n})`,location:`node "${t}"`});continue}for(let i of n){if(typeof i!="string"){s.push({severity:"error",code:"node-skill-not-string",message:`Node "${t}".skills contains a non-string entry: ${JSON.stringify(i)}`,location:`node "${t}"`});continue}r.has(i)||s.push({severity:"error",code:"node-skill-not-registered",message:`Node "${t}" references skill "${i}" which is not registered. Import the skill file in graph.mjs (e.g. import "./skills/${i}.mjs") BEFORE creating the WorkflowGraph.`,location:`node "${t}"`})}}return s}function y({workflowJson:e,graph:r,registeredSkills:s}){let t=[];if(t.push(...m(e)),t.push(...$(r)),r&&r.nodes instanceof Map){for(let[o,n]of r.nodes)t.push(...b(o,n));t.push(...v(r.nodes,s||new Set))}return t}async function x(e,r){let s=null;try{let o=u(e,".zibby.config.mjs");if(g(o)){let{loadUserConfig:n}=await import("../../utils/user-config.js").catch(()=>({}));if(typeof n=="function"){let i=await n(e);i?.paths?.workflows&&(s=i.paths.workflows)}}}catch{}let t=[...s?[u(e,s,r)]:[],u(e,"workflows",r),u(e,".zibby","workflows",r),u(e,r)];for(let o of t)if(g(u(o,"workflow.json"))&&g(u(o,"graph.mjs")))return o;return null}function N(e){let r=u(e,"workflow.json");if(!g(r))return null;try{return JSON.parse(j(r,"utf-8"))}catch(s){return{__parseError:s.message}}}async function S(e,r){let s=u(e,"graph.mjs");if(!g(s))throw new Error(`graph.mjs not found at ${s}`);let o=await import(`${E(s).href}?ts=${Date.now()}`),n=r?.entryClass,i=n&&o[n]||Object.values(o).find(f=>typeof f=="function"&&f.prototype&&typeof f.prototype.buildGraph=="function");if(i){let d=new i().buildGraph();if(!d)throw new Error(`${i.name}.buildGraph() returned undefined`);return d}let l=o.default||o.buildGraph;if(typeof l=="function")return l();throw new Error("graph.mjs must EITHER export a class extending WorkflowAgent (with `buildGraph()` method, named via workflow.json `entryClass`), OR export a default function that returns a WorkflowGraph.")}async function z(){try{let{listSkillIds:e}=await import("@zibby/agent-workflow");return new Set(e())}catch{return null}}function A(e){let r=e.severity==="error"?a.red("\u2717 ERROR "):a.yellow("\u26A0 WARN "),s=a.gray(`[${e.code}]`),t=e.location?a.gray(` (${e.location})`):"";return` ${r}${s}${t}
|
|
2
|
+
${e.message}`}async function D(e,r={}){let s=r.cwd||process.cwd();e||(console.error(a.red("Workflow name required.")),console.error(a.gray("Usage: zibby workflow validate <name>")),console.error(a.gray(" e.g. zibby workflow validate code-review")),process.exit(1));let t=await x(s,e);t||(console.error(a.red(`Workflow "${e}" not found.`)),console.error(a.gray(" Looked for workflow.json + graph.mjs in:")),console.error(a.gray(` <paths.workflows>/${e}/ (from .zibby.config.mjs)`)),console.error(a.gray(` workflows/${e}/`)),console.error(a.gray(` .zibby/workflows/${e}/`)),console.error(a.gray(" Run `zibby workflow new <name>` to scaffold one, or `zibby workflow list` to see what exists.")),process.exit(1)),console.log(a.bold(`
|
|
3
|
+
Validating workflow: ${a.cyan(e)}`)),console.log(a.gray(` at ${t}
|
|
4
|
+
`));let o=N(t);o?.__parseError&&(console.error(a.red(`\u2717 workflow.json is not valid JSON: ${o.__parseError}`)),process.exit(1));let n;try{n=await S(t,o)}catch(c){console.error(a.red(`\u2717 Failed to load graph.mjs: ${c.message}`)),c.stack&&r.verbose&&console.error(a.gray(c.stack)),process.exit(1)}let i=await z(),l=y({workflowJson:o,graph:n,registeredSkills:i||new Set}),f=i===null?l.filter(c=>c.code!=="node-skill-not-registered"):l,d=f.filter(c=>c.severity==="error"),p=f.filter(c=>c.severity==="warning");for(let c of f)console.log(A(c));console.log(""),d.length===0&&p.length===0?console.log(a.green(`\u2714 ${e} is valid. Ready for \`zibby workflow run\`.`)):console.log(`${d.length>0?a.red(`${d.length} error(s)`):a.gray("0 errors")}, ${p.length>0?a.yellow(`${p.length} warning(s)`):a.gray("0 warnings")}`),i===null&&console.log(a.gray(" (skill-registration checks skipped \u2014 @zibby/agent-workflow not resolvable; run `npm install` in the workflow folder first)")),console.log(""),d.length>0&&process.exit(1)}export{D as validateCommand};
|
package/dist/package.json
CHANGED
|
@@ -13,20 +13,31 @@ the intent.
|
|
|
13
13
|
## 0. The 30-second tour
|
|
14
14
|
|
|
15
15
|
```
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
├──
|
|
16
|
+
workflows/<name>/ # default. Override via paths.workflows
|
|
17
|
+
# in .zibby.config.mjs (legacy projects
|
|
18
|
+
# may have .zibby/workflows/).
|
|
19
|
+
├── workflow.json # name, description, entryClass, triggers, defaultAgent
|
|
20
|
+
├── graph.mjs # WorkflowAgent class — buildGraph() + onComplete()
|
|
21
|
+
├── state.js # Zod schema for caller-provided inputs (-p key=value)
|
|
22
|
+
├── nodes/ # one file per node — prompt|execute + outputSchema
|
|
23
|
+
│ ├── index.mjs # OPTIONAL barrel — re-exports keep graph.mjs imports
|
|
24
|
+
│ │ # tidy ("import { fooNode, barNode } from './nodes/'")
|
|
25
|
+
│ │ # but a graph that imports each file directly is
|
|
26
|
+
│ │ # equally valid. Pick whichever you prefer.
|
|
20
27
|
│ ├── plan.mjs
|
|
21
28
|
│ ├── implement.mjs
|
|
22
29
|
│ └── verify.mjs
|
|
23
|
-
└── package.json # @zibby/
|
|
30
|
+
└── package.json # @zibby/core + zod (core re-exports WorkflowGraph,
|
|
31
|
+
# WorkflowAgent, z, skills, agent strategies).
|
|
24
32
|
```
|
|
25
33
|
|
|
26
34
|
Lifecycle:
|
|
27
35
|
|
|
28
36
|
```bash
|
|
29
|
-
zibby workflow new <name> # scaffold (creates
|
|
37
|
+
zibby workflow new <name> # scaffold (creates workflows/<name>/ by default;
|
|
38
|
+
# also writes a starter nodes/example.mjs that
|
|
39
|
+
# you can replace/delete once your real nodes
|
|
40
|
+
# are in place)
|
|
30
41
|
zibby workflow run <name> -p key=val # run locally — one-shot, no server
|
|
31
42
|
zibby workflow start <name> # run locally with hot-reload (server)
|
|
32
43
|
zibby workflow validate <name> # static check (graph topology, schemas, skills)
|
|
@@ -49,44 +60,76 @@ start; cloud is ~60s. Iterating in cloud is 12× slower.
|
|
|
49
60
|
{
|
|
50
61
|
"name": "code-review",
|
|
51
62
|
"description": "Review a git diff and return structured findings",
|
|
52
|
-
"
|
|
53
|
-
"
|
|
54
|
-
"
|
|
55
|
-
"diff": "string",
|
|
56
|
-
"findings": "array"
|
|
57
|
-
}
|
|
63
|
+
"entryClass": "CodeReviewWorkflow",
|
|
64
|
+
"triggers": { "api": true },
|
|
65
|
+
"defaultAgent": "claude"
|
|
58
66
|
}
|
|
59
67
|
```
|
|
60
68
|
|
|
61
|
-
`
|
|
62
|
-
|
|
69
|
+
- `name` — kebab-case slug, ≤24 chars
|
|
70
|
+
- `entryClass` — the class exported from `graph.mjs` (CLI uses this to
|
|
71
|
+
pick the right export when there are multiple)
|
|
72
|
+
- `triggers.api` — `true` exposes a webhook URL after `zibby workflow
|
|
73
|
+
deploy`; `false` hides it (cron-only or internal)
|
|
74
|
+
- `defaultAgent` — one of `claude`, `cursor`, `codex`, `gemini`. Any
|
|
75
|
+
node overrides with its own `agent: 'cursor'` field.
|
|
63
76
|
|
|
64
|
-
### `graph.mjs`
|
|
77
|
+
### `graph.mjs` (class form — what production runtime expects)
|
|
65
78
|
|
|
66
79
|
```js
|
|
67
|
-
import { WorkflowGraph } from '@zibby/
|
|
68
|
-
import { z } from 'zod';
|
|
80
|
+
import { WorkflowAgent, WorkflowGraph } from '@zibby/core';
|
|
69
81
|
import { planNode } from './nodes/plan.mjs';
|
|
70
82
|
import { implementNode } from './nodes/implement.mjs';
|
|
71
83
|
import { verifyNode } from './nodes/verify.mjs';
|
|
84
|
+
import { codeReviewStateSchema } from './state.js';
|
|
72
85
|
|
|
73
|
-
export
|
|
74
|
-
|
|
86
|
+
export class CodeReviewWorkflow extends WorkflowAgent {
|
|
87
|
+
buildGraph() {
|
|
88
|
+
const graph = new WorkflowGraph();
|
|
89
|
+
graph.setStateSchema(codeReviewStateSchema);
|
|
75
90
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
91
|
+
graph.addNode('plan', planNode);
|
|
92
|
+
graph.addNode('implement', implementNode);
|
|
93
|
+
graph.addNode('verify', verifyNode);
|
|
79
94
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
95
|
+
graph.addEdge('plan', 'implement');
|
|
96
|
+
graph.addEdge('implement', 'verify');
|
|
97
|
+
graph.addEdge('verify', 'END'); // 'END' is the terminal sentinel
|
|
83
98
|
|
|
84
|
-
|
|
99
|
+
graph.setEntryPoint('plan');
|
|
100
|
+
return graph;
|
|
101
|
+
}
|
|
85
102
|
|
|
86
|
-
|
|
103
|
+
async onComplete(result) {
|
|
104
|
+
// Optional — runs after the graph finishes. Useful for posting a
|
|
105
|
+
// summary somewhere or transforming `result` before the runner
|
|
106
|
+
// logs it.
|
|
107
|
+
console.log(`[code-review] done — success=${result.success !== false}`);
|
|
108
|
+
}
|
|
87
109
|
}
|
|
88
110
|
```
|
|
89
111
|
|
|
112
|
+
The class name MUST match `entryClass` in workflow.json. The CLI
|
|
113
|
+
instantiates it (`new CodeReviewWorkflow()`), calls `.buildGraph()`,
|
|
114
|
+
runs the graph, then invokes `.onComplete(result)`.
|
|
115
|
+
|
|
116
|
+
### `state.js`
|
|
117
|
+
|
|
118
|
+
```js
|
|
119
|
+
import { z } from 'zod';
|
|
120
|
+
|
|
121
|
+
export const codeReviewStateSchema = z.object({
|
|
122
|
+
diff: z.string().describe('Staged git diff to review'),
|
|
123
|
+
strict: z.boolean().optional().describe('Treat warnings as errors'),
|
|
124
|
+
});
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
Declares which user-input fields callers can pass via `-p key=value`
|
|
128
|
+
or `--input '{"key":"value"}'`. The runner validates inputs against
|
|
129
|
+
this schema at run time — invalid inputs fail fast with a Zod error.
|
|
130
|
+
Read fields in nodes via `state.diff`, `state.strict`, etc. Optional
|
|
131
|
+
in v1 — graphs without `state.js` work but lose input validation.
|
|
132
|
+
|
|
90
133
|
### A node — `nodes/plan.mjs`
|
|
91
134
|
|
|
92
135
|
```js
|
|
@@ -159,6 +202,13 @@ the first node sees `state.userRequest = "fix login"`.
|
|
|
159
202
|
are internal to the graph runtime. Just `return` a plain object that
|
|
160
203
|
matches your `outputSchema`. The runtime puts it under `state[nodeName]`.
|
|
161
204
|
|
|
205
|
+
**Execute nodes can ONLY write under `state[nodeName]`.** There's no
|
|
206
|
+
mechanism to mutate a top-level state key from inside `execute()`. If
|
|
207
|
+
you need a counter that survives across loop iterations (retries,
|
|
208
|
+
attempts, accumulator), put it inside the node's own output schema —
|
|
209
|
+
read the prior value via `state.<nodeName>?.<field>`, return the new
|
|
210
|
+
value as part of `execute()`'s output. See §3 for the loop pattern.
|
|
211
|
+
|
|
162
212
|
---
|
|
163
213
|
|
|
164
214
|
## 3. Conditional routing
|
|
@@ -173,7 +223,47 @@ graph.addConditionalEdges('verify', (state) => {
|
|
|
173
223
|
|
|
174
224
|
The router function receives the full state, returns the name of the
|
|
175
225
|
next node (or `'END'`). All possible target nodes must also be declared
|
|
176
|
-
elsewhere via `addNode
|
|
226
|
+
elsewhere via `addNode` — a router returning an unregistered name will
|
|
227
|
+
fail at runtime (validate also catches it as `graph-edge-to-unknown`).
|
|
228
|
+
|
|
229
|
+
### Loops with a retry counter
|
|
230
|
+
|
|
231
|
+
Graphs aren't DAGs — you can route back to an earlier node. The
|
|
232
|
+
counter pattern lives **inside the looping node's own output**, since
|
|
233
|
+
`execute()` can only write to `state[nodeName]`. Example: a
|
|
234
|
+
`generate → check` loop that retries up to 2 times on failure.
|
|
235
|
+
|
|
236
|
+
```js
|
|
237
|
+
// nodes/check.mjs
|
|
238
|
+
export const checkNode = {
|
|
239
|
+
name: 'check',
|
|
240
|
+
outputSchema: z.object({
|
|
241
|
+
passed: z.boolean(),
|
|
242
|
+
attempts: z.number(),
|
|
243
|
+
}),
|
|
244
|
+
execute: async (state) => {
|
|
245
|
+
const priorAttempts = state.check?.attempts ?? 0;
|
|
246
|
+
const passed = /* … your check logic … */ false;
|
|
247
|
+
return { passed, attempts: priorAttempts + 1 };
|
|
248
|
+
},
|
|
249
|
+
};
|
|
250
|
+
|
|
251
|
+
// graph.mjs (inside buildGraph)
|
|
252
|
+
graph.addNode('generate', generateNode);
|
|
253
|
+
graph.addNode('check', checkNode);
|
|
254
|
+
graph.addEdge('generate', 'check');
|
|
255
|
+
graph.addConditionalEdges('check', (state) => {
|
|
256
|
+
if (state.check.passed) return 'END';
|
|
257
|
+
if (state.check.attempts < 2) return 'generate'; // loop back
|
|
258
|
+
return 'END'; // give up
|
|
259
|
+
});
|
|
260
|
+
graph.setEntryPoint('generate');
|
|
261
|
+
```
|
|
262
|
+
|
|
263
|
+
Each iteration through `check`, `priorAttempts` reads the value the
|
|
264
|
+
previous iteration wrote, and the new value is what the next router
|
|
265
|
+
call sees as `state.check.attempts`. `'END'` is the only sentinel —
|
|
266
|
+
any other return value must be an `addNode`'d name.
|
|
177
267
|
|
|
178
268
|
---
|
|
179
269
|
|
|
@@ -202,7 +292,7 @@ don't list each tool.
|
|
|
202
292
|
|
|
203
293
|
```js
|
|
204
294
|
// .zibby/workflows/<name>/skills/slack.mjs
|
|
205
|
-
import { registerSkill } from '@zibby/
|
|
295
|
+
import { registerSkill } from '@zibby/core';
|
|
206
296
|
|
|
207
297
|
registerSkill({
|
|
208
298
|
id: 'slack', // referenced by `skills: ['slack']`
|
|
@@ -219,7 +309,7 @@ Then import it from your `graph.mjs` BEFORE building the graph:
|
|
|
219
309
|
|
|
220
310
|
```js
|
|
221
311
|
import './skills/slack.mjs'; // side-effect: registers the skill
|
|
222
|
-
import { WorkflowGraph } from '@zibby/
|
|
312
|
+
import { WorkflowAgent, WorkflowGraph } from '@zibby/core';
|
|
223
313
|
// ...
|
|
224
314
|
```
|
|
225
315
|
|
|
@@ -361,6 +451,7 @@ cleaned up too.
|
|
|
361
451
|
|
|
362
452
|
| Symptom | Fix |
|
|
363
453
|
|--------------------------------------------------|------------------------------------------------|
|
|
454
|
+
| `No WorkflowAgent class found in graph.mjs` | Class name in `graph.mjs` must match `entryClass` in `workflow.json`. Or export a class that `extends WorkflowAgent` from `@zibby/core`. |
|
|
364
455
|
| `Node 'X' must define outputSchema` | Add `outputSchema: z.object({...})` to the node config |
|
|
365
456
|
| `Skill 'foo' not registered` | Import the skill file in graph.mjs BEFORE `new WorkflowGraph()` |
|
|
366
457
|
| `state.previousNode is undefined` in a prompt | Wrong order — add `graph.addEdge('previousNode', 'thisNode')` |
|
|
@@ -369,6 +460,9 @@ cleaned up too.
|
|
|
369
460
|
| Zod error: "Expected string, received undefined" | The previous node's outputSchema doesn't match its return — fix the producer, not the consumer |
|
|
370
461
|
| `workflow trigger` works but `run` doesn't | Local run reads env from `.env` / shell; cloud reads from `zibby workflow env`. Set both. |
|
|
371
462
|
| Hangs forever on a node | Add `retries: 0` to fail fast while debugging; check the prompt isn't asking the agent to wait |
|
|
463
|
+
| `Workflow "<name>" not found.` | Check `paths.workflows` in `.zibby.config.mjs` matches where you scaffolded. Default is `workflows/` at repo root. |
|
|
464
|
+
| Router returns a string that's not a registered node name | All possible return values must be either `'END'` or a name passed to `graph.addNode(...)` elsewhere. `validate` flags this as `graph-edge-to-unknown`. |
|
|
465
|
+
| Need a counter / accumulator across loop iterations | Put it in the looping node's own outputSchema. Read prior value via `state.<nodeName>?.<field>` inside `execute()`, return new value. See §3 retry-loop example. |
|
|
372
466
|
|
|
373
467
|
---
|
|
374
468
|
|
|
@@ -399,27 +493,49 @@ Read `.claude/commands/` for slash commands the user can invoke:
|
|
|
399
493
|
## 9. Quick reference
|
|
400
494
|
|
|
401
495
|
```js
|
|
402
|
-
|
|
403
|
-
import {
|
|
404
|
-
|
|
405
|
-
//
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
//
|
|
416
|
-
|
|
417
|
-
|
|
496
|
+
// Imports — @zibby/core re-exports everything you need.
|
|
497
|
+
import {
|
|
498
|
+
WorkflowAgent, // base class your workflow extends
|
|
499
|
+
WorkflowGraph, // construct + wire nodes inside buildGraph()
|
|
500
|
+
registerSkill, // for custom MCP tool bundles
|
|
501
|
+
registerStrategy, AgentStrategy, // for custom LLM strategies (rare)
|
|
502
|
+
z, // re-exported Zod for schemas
|
|
503
|
+
} from '@zibby/core';
|
|
504
|
+
|
|
505
|
+
// Workflow shell (production form — what run/start/deploy expect):
|
|
506
|
+
export class MyWorkflow extends WorkflowAgent {
|
|
507
|
+
buildGraph() {
|
|
508
|
+
const graph = new WorkflowGraph();
|
|
509
|
+
graph.setStateSchema(myStateSchema); // from ./state.js (optional)
|
|
510
|
+
graph.addNode(name, { prompt, outputSchema, execute, skills, agent, retries });
|
|
511
|
+
graph.addEdge(from, to);
|
|
512
|
+
graph.addConditionalEdges(from, (state) =>
|
|
513
|
+
state.cond ? 'nextNodeName' : 'END' // 'END' = terminal sentinel
|
|
514
|
+
);
|
|
515
|
+
graph.setEntryPoint(name);
|
|
516
|
+
return graph;
|
|
517
|
+
}
|
|
518
|
+
async onComplete(result) { /* optional post-processing */ }
|
|
519
|
+
}
|
|
418
520
|
|
|
419
521
|
// State (inside execute / prompt fns)
|
|
420
522
|
// READ: state.someKey or state.previousNode.field
|
|
421
523
|
// WRITE: return { ... } from execute() — runtime puts it at state[nodeName]
|
|
422
524
|
```
|
|
423
525
|
|
|
424
|
-
|
|
425
|
-
|
|
526
|
+
**Two API surfaces** — when in doubt, use the class form:
|
|
527
|
+
|
|
528
|
+
- **Class form** (above) — what `zibby workflow new` scaffolds and what
|
|
529
|
+
`zibby workflow run/start/deploy` execute. Required for cloud
|
|
530
|
+
deployment. Use this for anything you might trigger remotely.
|
|
531
|
+
|
|
532
|
+
- **Function form** — `export default function buildGraph() { return new
|
|
533
|
+
WorkflowGraph()... }`. Works for local `validate` + the standalone
|
|
534
|
+
`@zibby/agent-workflow` library (the underlying graph runtime), but
|
|
535
|
+
NOT for `run`/`deploy` (they look for the class). Useful for one-off
|
|
536
|
+
local scripts that import the graph runtime directly. **For Zibby
|
|
537
|
+
workflows, always use the class form** — import from `@zibby/core`,
|
|
538
|
+
which re-exports everything you need.
|
|
539
|
+
|
|
540
|
+
`zibby workflow validate` accepts both shapes. Other commands need the
|
|
541
|
+
class.
|