@cascaide-ts/react 0.1.0 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  The MIT License
2
2
 
3
- Copyright (c) 2026 Cascaide
3
+ Copyright (c) 2026 Jishnu K Ramakrishnan
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
package/README.md ADDED
@@ -0,0 +1,24 @@
1
+ # @cascaide-ts/react
2
+
3
+ Cascaide is a fullstack agent runtime and AI orchestration framework in typescript designed to run anywhere JS/TS can. It was originally built for web applications but works equally well for headless/CLI AI agents and workflows in javascript runtimes.
4
+
5
+ What it really is is a distributed, observable, durable graph executor. The first split just happens to be client/server, hence full stack.
6
+
7
+ `@cascaide-ts/react` is the React adapter layer and provides
8
+
9
+ - `WorkflowProvider` : Sets up the client side state and runtime
10
+ - `WorkflowRenderer` : Renders the right UI nodes as you spawn them
11
+ - `useWorkflow` : Hook to control graph execution
12
+ - `useCascade` : Hook to observe graph execution
13
+
14
+ ## Installation
15
+
16
+ ```bash
17
+ npm i @cascaide-ts/react
18
+ ```
19
+
20
+ ## Additional Resources
21
+
22
+ [Quickstart](https://www.cascaide-ts.com/docs/QuickStart/overview)
23
+ [Provider and Renderer](https://www.cascaide-ts.com/docs/Learn/provider_renderer)
24
+ [Hooks](https://www.cascaide-ts.com/docs/Learn/hooks)
package/dist/index.cjs CHANGED
@@ -1,4 +1,4 @@
1
- var oe=Object.create;var S=Object.defineProperty;var re=Object.getOwnPropertyDescriptor;var ie=Object.getOwnPropertyNames;var se=Object.getPrototypeOf,ce=Object.prototype.hasOwnProperty;var de=(e,n)=>{for(var a in n)S(e,a,{get:n[a],enumerable:!0})},F=(e,n,a,t)=>{if(n&&typeof n=="object"||typeof n=="function")for(let o of ie(n))!ce.call(e,o)&&o!==a&&S(e,o,{get:()=>n[o],enumerable:!(t=re(n,o))||t.enumerable});return e};var le=(e,n,a)=>(a=e!=null?oe(se(e)):{},F(n||!e||!e.__esModule?S(a,"default",{value:e,enumerable:!0}):a,e)),ue=e=>F(S({},"__esModule",{value:!0}),e);var De={};de(De,{ReactWorkflowContext:()=>D,WorkflowProvider:()=>_e,WorkflowRenderer:()=>Re,apiForkCascade:()=>Q,apiHydrateCascadeContext:()=>Z,useAllCascades:()=>xe,useAppDispatch:()=>Ee,useAppSelector:()=>ne,useCascade:()=>Ne,useWorkflow:()=>ve});module.exports=ue(De);var C=require("react-redux"),x=require("react");var _=require("@reduxjs/toolkit"),J=require("@reduxjs/toolkit"),H=require("@reduxjs/toolkit"),Y=require("@reduxjs/toolkit"),E=require("reselect"),fe={context:{},activeNodes:{},history:[],errors:{}},$=(0,J.createSlice)({name:"workflow",initialState:fe,reducers:{updateContext:{reducer:(e,n)=>{Object.entries(n.payload).forEach(([a,t])=>{if(e.context[a]||(e.context[a]=[]),typeof t=="object"&&t!==null&&!Array.isArray(t)&&"index"in t){let o=t.index;e.context[a][o]=t}else e.context[a].push(t)})},prepare:(e,n)=>({payload:e,meta:n||{}})},addActiveNode:{reducer:(e,n)=>{let{nodeId:a,nodeName:t,parentTriggerId:o,contextData:r}=n.payload,{origin:i,functionId:d,cascadeId:c}=n.meta;e.activeNodes[a]={nodeName:t,parentTriggerId:o,processed:!1,initialContext:r,origin:i,functionId:d,cascadeId:c||r?.cascadeId},e.errors[a]&&delete e.errors[a]},prepare:(e,n)=>{let a=n?.cascadeId||e.contextData?.cascadeId;return{payload:e,meta:{...n,cascadeId:a}}}},removeActiveNode:{reducer:(e,n)=>{let{nodeId:a}=n.payload;e.activeNodes[a]&&(e.history.push({nodeId:a,nodeName:e.activeNodes[a].nodeName,timestamp:Date.now()}),delete e.activeNodes[a])},prepare:(e,n)=>({payload:e,meta:n||{}})},setError:{reducer:(e,n)=>{let{nodeId:a,error:t}=n.payload;e.errors[a]=t},prepare:(e,n)=>({payload:e,meta:n||{}})},markNodeProcessed:(e,n)=>{let{nodeId:a}=n.payload;e.activeNodes[a]&&(e.activeNodes[a].processed=!0)},streamChunkReceived:(e,n)=>{let{cascadeId:a,contentChunk:t,toolChunk:o,reasoningChunk:r,thoughtSignature:i}=n.payload;if(!t&&!o&&!r&&!i)return;let d=e.context[a];if(!d||d.length===0)return;let c=d[d.length-1].history,l=c[c.length-1];if(!(!l||l.role!=="assistant")&&(r&&(l.reasoning_content===void 0&&(l.reasoning_content=""),l.reasoning_content+=r),i&&(l.thought_signature=i),t&&(l.content+=t),o)){l.tool_calls||(l.tool_calls=[]);let u=o.index??0;l.tool_calls[u]||(l.tool_calls[u]={id:"",type:"function",function:{name:"",arguments:""}});let s=l.tool_calls[u];o.id&&(s.id=o.id),o.thought_signature&&(s.thought_signature=o.thought_signature),o.function?.name&&(s.function.name=o.function.name),o.function?.arguments&&(s.function.arguments+=o.function.arguments)}},hydrateContext:(e,n)=>{Object.entries(n.payload).forEach(([a,t])=>{e.context[a]=t})}}}),{updateContext:y,addActiveNode:I,removeActiveNode:N,setError:V,markNodeProcessed:Ae,streamChunkReceived:pe,hydrateContext:b}=$.actions,Oe=$.reducer,O=process.env.NODE_ENV==="development",he=e=>n=>a=>async t=>{if(I.match(t)){let{origin:o,functionId:r,cascadeId:i}=t.meta;if(i&&r>0&&o==="client"){let d=n.getState().workflow.context[i];if(!d||d.length===0){O&&console.log(`[CLIENT HYDRATION] \u{1F9CA} Cold start for ${i}`);try{let c=await fetch(e.hydrationEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({cascadeId:i,functionId:r})});if(!c.ok)throw new Error(`Hydration API failed: ${c.status}`);let l=await c.json();l&&Object.keys(l).length>0&&(n.dispatch(b(l)),O&&console.log("[CLIENT HYDRATION] \u2705 State hydrated via Dispatch"))}catch(c){O&&console.error("[CLIENT HYDRATION] \u274C Hydration failed:",c)}}}}return a(t)},w=process.env.NODE_ENV==="development";async function v(e,n=3,a=100){for(let t=1;t<=n;t++)try{return await e()}catch(o){if(t===n)return w&&console.error(`[CLIENT MW] \u274C All ${n} retry attempts failed:`,o),null;let r=a*Math.pow(2,t-1);w&&console.warn(`[CLIENT MW] \u26A0\uFE0F Attempt ${t} failed, retrying in ${r}ms...`),await new Promise(i=>setTimeout(i,r))}return null}var ge=e=>n=>a=>async t=>{if(I.match(t)){let{origin:o,functionId:r,cascadeId:i}=t.meta,{nodeId:d,nodeName:c,contextData:l}=t.payload;if(!i||o)return a(t);let u=r??0,s=await v(async()=>{let h=await fetch(e.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"claim",nodeInstanceId:d,cascadeId:i,userId:l.userId,nodeName:c,functionId:u,inputContext:l,location:"client"})});if(!h.ok)throw new Error(`Persistence sync failed: ${h.status}`);return await h.json()});if(!s){w&&console.error(`[CLIENT MW] \u274C Failed to claim node ${d} after retries`);return}let f=s.functionId;w&&console.log(`[CLIENT MW] \u2705 Node ${d} bound to FnId: ${f}`);let p={...t,meta:{...t.meta,origin:"client",functionId:f,cascadeId:i}};return a(p)}if(N.match(t)){let{origin:o,functionId:r,cascadeId:i}=t.meta,d=typeof t.payload=="string"?t.payload:t.payload.nodeId,c=t.payload.hasSpawns,l=t.payload.fullOutput,u=n.getState(),s=u.workflow.activeNodes[d],f=i||s?.cascadeId,p=r??s?.functionId??0;if(!f||o)return a(t);let h=l||u.workflow.context[f];if(!await v(async()=>{let g=await fetch(e.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"finalize",nodeInstanceId:d,cascadeId:f,fullOutput:h,hasSpawns:c})});if(!g.ok)throw new Error(`Persistence sync failed: ${g.status}`);return await g.json()})){w&&console.error(`[CLIENT MW] \u274C Failed to finalize node ${d} after retries`);return}return a({...t,meta:{...t.meta,origin:"client",functionId:p,cascadeId:f}})}if(V.match(t)){let{origin:o,functionId:r,cascadeId:i}=t.meta,{nodeId:d,error:c}=t.payload,l=n.getState().workflow.activeNodes[d],u=i||l?.cascadeId;if(!u||o)return a(t);if(!await v(async()=>{let s=await fetch(e.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"error",nodeInstanceId:d,cascadeId:u,error:c})});if(!s.ok)throw new Error(`Persistence sync failed: ${s.status}`);return await s.json()})){w&&console.error(`[CLIENT MW] \u274C Failed to mark node ${d} as failed after retries`);return}return a({...t,meta:{...t.meta,origin:"client",cascadeId:u}})}if(y.match(t)){let{origin:o,functionId:r,cascadeId:i}=t.meta,d=t.payload,c=i,l=r??0;if(!c||o||(w&&console.log("[CLIENT MW] \u{1F504} Persisting context:",JSON.stringify(d,null,2)),d[c]?.status==="streaming"))return a(t);if(!await v(async()=>{let u=await fetch(e.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"context",cascadeId:c,functionId:l,updates:d})});if(!u.ok)throw new Error(`Persistence sync failed: ${u.status}`);return await u.json()})){w&&console.error(`[CLIENT MW] \u274C Failed to record context for cascade ${c} after retries`);return}return a({...t,meta:{...t.meta,origin:"client",functionId:l,cascadeId:c}})}return a(t)},U=(0,H.createListenerMiddleware)(),me=U.startListening,j=`
1
+ var re=Object.create;var N=Object.defineProperty;var ie=Object.getOwnPropertyDescriptor;var se=Object.getOwnPropertyNames;var de=Object.getPrototypeOf,ce=Object.prototype.hasOwnProperty;var le=(t,n)=>{for(var a in n)N(t,a,{get:n[a],enumerable:!0})},W=(t,n,a,e)=>{if(n&&typeof n=="object"||typeof n=="function")for(let o of se(n))!ce.call(t,o)&&o!==a&&N(t,o,{get:()=>n[o],enumerable:!(e=ie(n,o))||e.enumerable});return t};var ue=(t,n,a)=>(a=t!=null?re(de(t)):{},W(n||!t||!t.__esModule?N(a,"default",{value:t,enumerable:!0}):a,t)),pe=t=>W(N({},"__esModule",{value:!0}),t);var be={};le(be,{ReactWorkflowContext:()=>R,WorkflowProvider:()=>Ae,WorkflowRenderer:()=>Oe,useAllCascades:()=>Ee,useAppDispatch:()=>Re,useAppSelector:()=>oe,useCascade:()=>ke,useWorkflow:()=>_e});module.exports=pe(be);var v=require("react-redux"),M=require("react");var E=require("@reduxjs/toolkit"),H=require("@reduxjs/toolkit"),J=require("@reduxjs/toolkit"),D=require("uuid"),K=require("@reduxjs/toolkit"),Ce=require("uuid"),_=require("reselect"),Se=require("uuid"),fe={context:{},activeNodes:{},history:[],errors:{}},T=(0,J.createSlice)({name:"workflow",initialState:fe,reducers:{updateContext:{reducer:(t,n)=>{Object.entries(n.payload).forEach(([a,e])=>{if(t.context[a]||(t.context[a]=[]),typeof e=="object"&&e!==null&&!Array.isArray(e)&&"index"in e){let o=e.index;t.context[a][o]=e}else t.context[a].push(e)})},prepare:(t,n)=>({payload:t,meta:n||{}})},addActiveNode:{reducer:(t,n)=>{let{nodeId:a,nodeName:e,parentTriggerId:o,contextData:i}=n.payload,{origin:r,functionId:d,cascadeId:c}=n.meta;t.activeNodes[a]={nodeName:e,parentTriggerId:o,processed:!1,initialContext:i,origin:r,functionId:d,cascadeId:c||i?.cascadeId},t.errors[a]&&delete t.errors[a]},prepare:(t,n)=>{let a=n?.cascadeId||t.contextData?.cascadeId;return{payload:t,meta:{...n,cascadeId:a}}}},removeActiveNode:{reducer:(t,n)=>{let{nodeId:a}=n.payload;t.activeNodes[a]&&(t.history.push({nodeId:a,nodeName:t.activeNodes[a].nodeName,timestamp:Date.now()}),delete t.activeNodes[a])},prepare:(t,n)=>({payload:t,meta:n||{}})},setError:{reducer:(t,n)=>{let{nodeId:a,error:e}=n.payload;t.errors[a]=e},prepare:(t,n)=>({payload:t,meta:n||{}})},markNodeProcessed:(t,n)=>{let{nodeId:a}=n.payload;t.activeNodes[a]&&(t.activeNodes[a].processed=!0)},streamChunkReceived:(t,n)=>{let{cascadeId:a,identity:e,value:o}=n.payload;if(!e||o===void 0)return;let i=t.context[a];if(!i||i.length===0)return;let r=i[i.length-1].history,d=r[r.length-1];!d||d.role!=="assistant"||(typeof d[e]=="string"?d[e]+=o:d[e]=o)},hydrateContext:(t,n)=>{Object.entries(n.payload).forEach(([a,e])=>{t.context[a]=e})},forkAndHydrate:{reducer:()=>{},prepare:t=>({payload:t})}}}),{updateContext:I,addActiveNode:w,removeActiveNode:x,setError:G,markNodeProcessed:Te,streamChunkReceived:me,hydrateContext:P,forkAndHydrate:$}=T.actions,$e=T.reducer,Y=(0,H.createListenerMiddleware)(),ye=Y.startListening,U=`
2
2
  __END_STREAM_METADATA__
3
- `,ye=e=>{me({actionCreator:I,effect:async(n,a)=>{let{nodeId:t,nodeName:o,contextData:r}=n.payload,{functionId:i}=n.meta,d=e.workflowGraph[o],c=e.isLite;if(!d||d.isUINode)return;let l=c?r?.history?.slice(-1)??[]:[...r?.history??[]];r?.sentFromClient&&await a.dispatch(y({[r?.cascadeId]:{history:l,status:"completed"}},{functionId:i,cascadeId:r?.cascadeId}));try{let u=await fetch(e.actionRelayEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},signal:a.signal,body:JSON.stringify(n)});if(!u.ok||!u.body)throw new Error(await u.text()||"Action relay failed");let s=r?.cascadeId;await we(u.body,s,t,a,n,c)}catch(u){if(u.name==="AbortError")return;await a.dispatch(V({nodeId:t,error:u.message})),await a.dispatch(N({nodeId:t,hasSpawns:!1}))}}})};async function we(e,n,a,t,o,r){let i=e.getReader(),d=new TextDecoder,c="",l=!1;try{for(;;){let{done:u,value:s}=await i.read();if(u)break;if(c+=d.decode(s,{stream:!0}),!l){let f=c.indexOf(j);if(f!==-1){let p=c.substring(0,f),h=c.substring(f+j.length);if(L(p,n,t),h.trim())try{let g=JSON.parse(h.trim());await Ie(g,a,t,o,r)}catch(g){console.error("Metadata parse error",g)}l=!0;break}c=await L(c,n,t)}}}finally{i.releaseLock()}}async function L(e,n,a){let t=e.split(`
4
- `),o=t.pop()||"";for(let r of t)if(!(!r.trim()||r.startsWith(":")))try{let i=JSON.parse(r);i.type==="init"?await a.dispatch(y({[i.cascadeId]:{history:[{role:"assistant",content:""}],status:"streaming"}})):i.type==="sync"?await a.dispatch(y({[i.cascadeId]:{history:[i.history],status:"completed"}})):a.dispatch(pe({cascadeId:n,contentChunk:i.contentChunk||i.chunk,toolChunk:i.toolChunk||i.tool_call,reasoningChunk:i.reasoningChunk}))}catch(i){console.error("Chunk parse error",i)}return o}async function Ie(e,n,a,t,o){let r=t.meta?.functionId??0,i=t.meta?.origin??"server",d=t.payload.contextData?.cascadeId,c=e.spawns?Object.entries(e.spawns):[],l=c.length>0;l&&c.forEach(async([u,s],f)=>{let p=o?a.getState().workflow.context[d]:s;await a.dispatch(I({nodeId:`${u}_${Date.now()}_${f}`,nodeName:u,parentTriggerId:n,contextData:{...p,userId:p?.userId||t.payload.contextData?.userId}},{functionId:r+1+f,cascadeId:d,origin:i}))}),await a.dispatch(N({nodeId:n,hasSpawns:l,fullOutput:e.updates},{functionId:void 0,cascadeId:d,origin:i}))}var Ce=(0,_.combineReducers)({workflow:$.reducer}),G=(e,n,a,t,o=[])=>{ye({workflowGraph:e,actionRelayEndpoint:n,isLite:!a});let r=[];return a&&r.push(ge({persistenceEndpoint:a}),he({hydrationEndpoint:t})),r.push(...o),r.push(U.middleware),(0,_.configureStore)({reducer:Ce,middleware:i=>i({serializableCheck:!1}).concat(r),devTools:process.env.NODE_ENV!=="production"})},$e=process.env.NODE_ENV==="development";var be=process.env.NODE_ENV==="development";var Me=process.env.NODE_ENV==="development";var ke=e=>e.workflow.activeNodes,z=e=>e.workflow.context,B=(e,n)=>n,X=()=>(0,E.createSelector)([z,B],(e,n)=>{let a=e[n];if(!a||a.length===0)return;let t=a[a.length-1].status,o=a.flatMap(r=>r.history||[]);return{status:t,history:o}}),q=()=>(0,E.createSelector)([ke,B],(e,n)=>(console.log(`Reselect: Re-calculating nodes for cascade ${n}`),Object.entries(e).filter(([a,t])=>t.initialContext?.cascadeId===n).map(([a,t])=>({nodeId:a,nodeName:t.nodeName,parentTriggerId:t.parentTriggerId,initialContext:t.initialContext,processed:t.processed})))),K=(0,E.createSelector)([z],e=>(console.log("Reselect: Re-calculating all cascade IDs"),Object.keys(e).filter(n=>n.startsWith("cascade_"))));var Ne=e=>{let n=(0,x.useMemo)(X,[]),a=(0,x.useMemo)(q,[]),t=(0,C.useDispatch)(),o=(0,C.useSelector)(s=>n(s,e)),r=(0,C.useSelector)(s=>a(s,e)),i=(0,x.useMemo)(()=>r.length===0?null:[...r].sort((s,f)=>{let p=parseInt(s.nodeId.split("_")[1]||"0");return parseInt(f.nodeId.split("_")[1]||"0")-p})[0],[r]),d=(0,x.useMemo)(()=>{let s=new Map(r.map(p=>[p.nodeId,{...p,children:[]}])),f=[];return r.forEach(p=>{let h=s.get(p.nodeId);p.parentTriggerId&&s.has(p.parentTriggerId)?s.get(p.parentTriggerId).children.push(h):f.push(h)}),f},[r]),c=async(s,f,p)=>{let h=await Q(e,s,f,p),g=await Z(s,f,p);return t(b(g)),h},l=r.length===0&&o!==void 0,u=o!==void 0||r.length>0;return{cascadeState:o,cascadeNodes:r,currentNode:i,cascadeTree:d,isComplete:l,exists:u,forkCascade:c}},xe=()=>(0,C.useSelector)(K);async function Q(e,n,a,t){let o=await fetch(t,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"fork",sourceCascadeId:e,newCascadeId:n,upToFunctionId:a})});if(!o.ok)throw new Error(`forkCascade failed: ${await o.text()}`);return o.json()}async function Z(e,n,a){let t=await fetch(a,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"hydrate",cascadeId:e,functionId:n})});if(!t.ok)throw new Error(`hydrate failed: ${await t.text()}`);return t.json()}var k=require("react-redux"),m=require("react");var Se={},ve=e=>{let n=(0,k.useDispatch)(),a=(0,m.useRef)(void 0);(0,m.useEffect)(()=>{a.current!==e&&(a.current=e)},[e]);let t=(0,m.useMemo)(()=>e.split("_")[0],[e]),o=(0,k.useSelector)(s=>s.workflow.context[t]||Se),r=(0,k.useSelector)(s=>s.workflow.activeNodes[e]),i=s=>(0,k.useSelector)(f=>s(f.workflow.context)),d=(0,m.useCallback)(async s=>await n(y({[t]:{...o,...s}})),[n,t,o]),c=(0,m.useCallback)(async s=>await n(y(s)),[n]),l=(0,m.useCallback)(async s=>await n(N(s)),[n]),u=(0,m.useCallback)(async(s,f)=>{let p=Array.isArray(s)?s:[s],h=f?.cascadeId,g=[];for(let W of p){let ae=`${W}_${Date.now()}_${Math.random().toString(36).substring(2,8)}`,P={...f,sentFromClient:!0};h&&(P.cascadeId=h,g.push(h)),await n(I({nodeId:ae,nodeName:W,parentTriggerId:e,contextData:P}))}if(h)return g.length===1?g[0]:g},[n,e]);return(0,m.useMemo)(()=>({context:o,useWorkflowSelector:i,saveContext:d,saveGlobalContext:c,signalCompletion:l,addActiveNode:u,nodeName:t,nodeId:e,nodeData:r}),[o,i,d,c,l,u,t,e,r])};var R=le(require("react"),1),ee=require("react-redux"),M=require("react/jsx-runtime"),D=R.default.createContext(null);function _e({children:e,initialNodeId:n,initialNodeName:a,initialContext:t={},config:o,actionRelayEndpoint:r,persistenceEndpoint:i,hydrationEndpoint:d}){let c=(0,R.useRef)(null);return c.current||(c.current=G(o.clientWorkflowGraph,r,i,d),c.current.dispatch(y(t)),c.current.dispatch(I({nodeId:n,nodeName:a}))),(0,M.jsx)(ee.Provider,{store:c.current,children:(0,M.jsx)(D.Provider,{value:o,children:e})})}var te=require("react");var A=require("react-redux"),T=require("react/jsx-runtime"),Ee=()=>(0,A.useDispatch)(),ne=A.useSelector;function Re(){let e=(0,te.useContext)(D);if(!e)throw new Error("WorkflowRenderer must be used within a WorkflowProvider.");let{activeNodes:n}=ne(r=>r.workflow),{clientWorkflowGraph:a,uiComponentRegistry:t}=e,o=Object.keys(n).filter(r=>{let i=n[r];return a[i.nodeName]?.isUINode});return o.length===0?null:(0,T.jsx)("div",{children:o.map(r=>{let i=n[r],d=t[i.nodeName];return d?(0,T.jsx)("div",{children:(0,T.jsx)(d,{nodeId:r})},r):(console.error(`UI component for node '${i.nodeName}' not found.`),null)})})}0&&(module.exports={ReactWorkflowContext,WorkflowProvider,WorkflowRenderer,apiForkCascade,apiHydrateCascadeContext,useAllCascades,useAppDispatch,useAppSelector,useCascade,useWorkflow});
3
+ `,he=t=>{ye({actionCreator:w,effect:async(n,a)=>{let{nodeId:e,nodeName:o,contextData:i}=n.payload,{functionId:r}=n.meta,d=t.workflowGraph[o],c=t.isLite;if(!(!d||d.isUINode)){if(i?.sentFromClient&&!i?.handledTimeout){let{cascadeId:s,userId:u,history:l,sentFromClient:f,...m}=i,p=c?l?.slice(-1)??[]:[...l??[]];await a.dispatch(I({[s]:{...m,history:p,status:"completed"}},{functionId:r,cascadeId:s}))}try{let s=await fetch(t.actionRelayEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},signal:a.signal,body:JSON.stringify(n)});if(!s.ok||!s.body)throw new Error(await s.text()||"Action relay failed");let u=i?.cascadeId;await we(s.body,u,e,a,n,c)}catch(s){if(s.name==="AbortError")return;await a.dispatch(G({nodeId:e,error:s.message})),await a.dispatch(x({nodeId:e,hasSpawns:!1}))}}}})};async function we(t,n,a,e,o,i){let r=t.getReader(),d=new TextDecoder,c="",s=!1;try{for(;;){let{done:u,value:l}=await r.read();if(u)break;if(c+=d.decode(l,{stream:!0}),!s){let f=c.indexOf(U);if(f!==-1){let m=c.substring(0,f),p=c.substring(f+U.length);if(await V(m,n,e),p.trim())try{let y=JSON.parse(p.trim());await ge(y,a,e,o,i)}catch(y){console.error("Metadata parse error",y)}s=!0;break}c=await V(c,n,e)}}}finally{r.releaseLock()}}async function V(t,n,a){let e=t.split(`
4
+ `),o=e.pop()||"";for(let i of e)if(!(!i.trim()||i.startsWith(":")))try{let r=JSON.parse(i);r.type==="init"?await a.dispatch(I({[r.cascadeId]:{history:[{role:"assistant"}],status:"streaming"}})):r.type==="sync"?await a.dispatch(I({[r.cascadeId]:{history:r.history,status:"completed"}})):r.type==="ui_spawn"?await a.dispatch(w({nodeId:`${r.nodeName}_${(0,D.v7)()}`,nodeName:r.nodeName,contextData:r.contextData})):await a.dispatch(me({cascadeId:r.cascadeId,identity:r.identity,value:r.value}))}catch(r){console.error("Chunk parse error",r)}return o}async function ge(t,n,a,e,o){let i=e.meta?.functionId??0,r=e.meta?.origin??"server",d=e.payload.contextData?.cascadeId,c=l=>{if(!o)return l;let f=a.getState().workflow.context[d],m=Object.keys(f||{}),p=Object.fromEntries(Object.entries(l||{}).filter(([F])=>!m.includes(F))),y=f.flatMap(F=>F.history||[]);return{...p,history:y,sentFromClient:!0,handledTimeout:!0}},s=t.spawns?Object.entries(t.spawns):[],u=s.length>0;u&&s.forEach(async([l,f],m)=>{let p=c(f);await a.dispatch(w({nodeId:`${l}_${(0,D.v7)()}_${m}`,nodeName:l,parentTriggerId:n,contextData:{...p,cascadeId:d,userId:p?.userId||e.payload.contextData?.userId}},{functionId:o?0:i+1+m,cascadeId:d,origin:r}))}),await a.dispatch(x({nodeId:n,hasSpawns:u,fullOutput:t.updates},{functionId:void 0,cascadeId:d,origin:r}))}var Ie=(0,E.combineReducers)({workflow:T.reducer}),z=(t,n,a,e=[])=>{he({workflowGraph:t,actionRelayEndpoint:n,isLite:!a});let o=[];return a&&o.push(xe({persistenceEndpoint:a}),Ne({persistenceEndpoint:a})),o.push(...e),o.push(Y.middleware),(0,E.configureStore)({reducer:Ie,middleware:i=>i({serializableCheck:!1}).concat(o),devTools:process.env.NODE_ENV!=="production"})},De=process.env.NODE_ENV==="development";var Me=process.env.NODE_ENV==="development";var ve=t=>t.workflow.activeNodes,Z=t=>t.workflow.context,B=(t,n)=>n,q=()=>(0,_.createSelector)([Z,B],(t,n)=>{let a=t[n];if(!a||a.length===0)return;let e=a[a.length-1].status,o={};return a.forEach(i=>{let{status:r,...d}=i;Object.entries(d).forEach(([c,s])=>{s!=null&&(o[c]||(o[c]=[]),Array.isArray(s)?o[c].push(...s):o[c].push(s))})}),{...o,status:e}}),X=()=>(0,_.createSelector)([ve,B],(t,n)=>Object.entries(t).filter(([a,e])=>e.initialContext?.cascadeId===n).map(([a,e])=>({nodeId:a,nodeName:e.nodeName,parentTriggerId:e.parentTriggerId,initialContext:e.initialContext,processed:e.processed}))),Q=(0,_.createSelector)([Z],t=>Object.keys(t));var je=process.env.NODE_ENV==="development";var g=process.env.NODE_ENV==="development";async function k(t,n=3,a=100){for(let e=1;e<=n;e++)try{return await t()}catch(o){if(e===n)return g&&console.error(`[CLIENT MW] \u274C All ${n} retry attempts failed:`,o),null;let i=a*Math.pow(2,e-1);g&&console.warn(`[CLIENT MW] \u26A0\uFE0F Attempt ${e} failed, retrying in ${i}ms...`),await new Promise(r=>setTimeout(r,i))}return null}var xe=t=>n=>a=>async e=>{if(w.match(e)){let{origin:o,functionId:i,cascadeId:r}=e.meta,{nodeId:d,nodeName:c,contextData:s}=e.payload;if(!r||o)return a(e);let u=i??0,l=await k(async()=>{let p=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"claim",nodeInstanceId:d,cascadeId:r,userId:s.userId,nodeName:c,functionId:u,inputContext:s,location:"client"})});if(!p.ok)throw new Error(`Persistence sync failed: ${p.status}`);return await p.json()});if(!l){g&&console.error(`[CLIENT MW] \u274C Failed to claim node ${d} after retries`);return}let f=l.functionId;g&&console.log(`[CLIENT MW] \u2705 Node ${d} bound to FnId: ${f}`);let m={...e,meta:{...e.meta,origin:"client",functionId:f,cascadeId:r}};return a(m)}if(x.match(e)){let{origin:o,functionId:i,cascadeId:r}=e.meta,d=typeof e.payload=="string"?e.payload:e.payload.nodeId,c=e.payload.hasSpawns,s=e.payload.fullOutput,u=n.getState(),l=u.workflow.activeNodes[d],f=r||l?.cascadeId,m=i??l?.functionId??0;if(!f||o)return a(e);let p=s||u.workflow.context[f];if(!await k(async()=>{let y=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"finalize",nodeInstanceId:d,cascadeId:f,fullOutput:p,hasSpawns:c})});if(!y.ok)throw new Error(`Persistence sync failed: ${y.status}`);return await y.json()})){g&&console.error(`[CLIENT MW] \u274C Failed to finalize node ${d} after retries`);return}return a({...e,meta:{...e.meta,origin:"client",functionId:m,cascadeId:f}})}if(G.match(e)){let{origin:o,functionId:i,cascadeId:r}=e.meta,{nodeId:d,error:c}=e.payload,s=n.getState().workflow.activeNodes[d],u=r||s?.cascadeId;if(!u||o)return a(e);if(!await k(async()=>{let l=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"error",nodeInstanceId:d,cascadeId:u,error:c})});if(!l.ok)throw new Error(`Persistence sync failed: ${l.status}`);return await l.json()})){g&&console.error(`[CLIENT MW] \u274C Failed to mark node ${d} as failed after retries`);return}return a({...e,meta:{...e.meta,origin:"client",cascadeId:u}})}if(I.match(e)){let{origin:o,functionId:i,cascadeId:r,uiUpdates:d}=e.meta,c=e.payload,s=r,u=i??0;if(!s||o||(g&&console.log("[CLIENT MW] \u{1F504} Persisting context:",JSON.stringify(c,null,2)),c[s]?.status==="streaming"))return a(e);if(!await k(async()=>{let l=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"context",cascadeId:s,functionId:u,updates:c,uiUpdates:d})});if(!l.ok)throw new Error(`Persistence sync failed: ${l.status}`);return await l.json()})){g&&console.error(`[CLIENT MW] \u274C Failed to record context for cascade ${s} after retries`);return}return a({...e,meta:{...e.meta,origin:"client",functionId:u,cascadeId:s}})}return a(e)},h=process.env.NODE_ENV==="development";async function L(t,n=3,a=100){for(let e=1;e<=n;e++)try{return await t()}catch(o){if(e===n)return h&&console.error(`[HYDRATION] \u274C All ${n} attempts failed:`,o),null;let i=a*Math.pow(2,e-1);h&&console.warn(`[HYDRATION] \u26A0\uFE0F Attempt ${e} failed, retrying in ${i}ms...`),await new Promise(r=>setTimeout(r,i))}return null}var Ne=t=>n=>a=>async e=>{if(w.match(e)){let{origin:o,functionId:i,cascadeId:r}=e.meta;if(r&&i>0&&o==="client"){let d=n.getState().workflow.context[r];if(!d||Object.keys(d).length===0){h&&console.log(`[HYDRATION] \u{1F9CA} Cold start for ${r}`);let c=await L(async()=>{let s=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"hydrate",cascadeId:r,functionId:i,ui:!0})});if(!s.ok)throw new Error(`Status ${s.status}`);return await s.json()});if(!c){let s=`[HYDRATION] Critical Failure: Could not hydrate cascade ${r}. Action terminated.`;throw h&&console.error(s),new Error("Hydration failed")}Object.keys(c).length>0&&(n.dispatch(P(c)),h&&console.log("[HYDRATION] \u2705 State restored successfully"))}}}if($.match(e)){let{sourceCascadeId:o,newCascadeId:i,upToFunctionId:r}=e.payload;h&&console.log(`[FORK] \u{1F374} Forking ${o} \u2192 ${i} at fn ${r}`);let d=await L(async()=>{let c=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"forkAndHydrate",sourceCascadeId:o,newCascadeId:i,upToFunctionId:r})});if(!c.ok)throw new Error(`Fork failed with status ${c.status}`);return await c.json()});if(!d){let c=`[FORK] Critical Failure: Could not fork cascade ${o}. Action terminated.`;throw h&&console.error(c),new Error("Fork failed")}d.context&&Object.keys(d.context).length>0?(n.dispatch(P(d.context)),h&&console.log(`[FORK] \u2705 Fork complete, context hydrated for ${i}`)):h&&console.log(`[FORK] \u2705 Fork complete, no context to hydrate for ${i}`)}return a(e)};var ke=t=>{let n=(0,M.useMemo)(q,[]),a=(0,M.useMemo)(X,[]),e=(0,v.useDispatch)(),o=(0,v.useSelector)(s=>n(s,t)),i=(0,v.useSelector)(s=>a(s,t)),r=i.length===0&&o!==void 0,d=o!==void 0||i.length>0;return{cascadeState:o,cascadeNodes:i,isComplete:r,exists:d,forkCascade:async(s,u,l)=>{try{return await e($({sourceCascadeId:l||t,newCascadeId:s,upToFunctionId:u})),{status:"SUCCESS"}}catch{return{status:"FAILED"}}}}},Ee=()=>(0,v.useSelector)(Q);var S=require("react-redux"),C=require("react");var te=require("uuid"),ee=process.env.NODE_ENV==="development",_e=t=>{let n=(0,S.useDispatch)(),a=(0,S.useStore)(),e=(0,S.useSelector)(s=>s.workflow.activeNodes[t]),o=!!(e?.cascadeId&&e?.origin),i=(0,C.useCallback)(()=>a.getState(),[a]),r=(0,C.useCallback)(async s=>{if(!o){ee&&console.warn(`[useWorkflow] updateContext called on ephemeral node "${t}". This node has no cascadeId or origin \u2014 context will not be persisted. If persistence is required, ensure the node is spawned with a cascadeId.`);return}return await n(I(s,{origin:"client",cascadeId:e.cascadeId,functionId:e.functionId}))},[n,o,t,e]),d=(0,C.useCallback)(async s=>{let u={};for(let[l,f]of Object.entries(s)){let m=`${l}_${(0,te.v7)()}`,{cascadeId:p,...y}=f;await n(w({nodeId:m,nodeName:l,parentTriggerId:t,contextData:{...y,sentFromClient:!0,...p&&{cascadeId:p}}})),p&&(u[l]=p)}return Object.keys(u).length>0?u:void 0},[n,t]),c=(0,C.useCallback)(async(s,u)=>(ee&&o&&!u&&console.warn(`[useWorkflow] signalCompletion called on persisted node "${t}" without fullOutput. The node's output will not be recorded. Pass fullOutput to capture the result of this node execution.`),await n(x({nodeId:t,hasSpawns:s,fullOutput:u},{functionId:e?.functionId,cascadeId:e?.cascadeId}))),[n,t,o,e]);return(0,C.useMemo)(()=>({nodeData:e,getState:i,updateContext:r,addActiveNode:d,signalCompletion:c}),[e,i,r,d,c])};var A=ue(require("react"),1),ae=require("react-redux"),j=require("react/jsx-runtime"),R=A.default.createContext(null);function Ae({children:t,initialNodeId:n,initialNodeName:a,initialContext:e={},config:o,actionRelayEndpoint:i,persistenceEndpoint:r,extraMiddlewares:d=[]}){let c=(0,A.useRef)(null);return c.current||(c.current=z(o.clientWorkflowGraph,i,r,d),c.current.dispatch(I(e)),c.current.dispatch(w({nodeId:n,nodeName:a}))),(0,j.jsx)(ae.Provider,{store:c.current,children:(0,j.jsx)(R.Provider,{value:o,children:t})})}var ne=require("react");var b=require("react-redux"),O=require("react/jsx-runtime"),Re=()=>(0,b.useDispatch)(),oe=b.useSelector;function Oe(){let t=(0,ne.useContext)(R);if(!t)throw new Error("WorkflowRenderer must be used within a WorkflowProvider.");let{activeNodes:n}=oe(i=>i.workflow),{clientWorkflowGraph:a,uiComponentRegistry:e}=t,o=Object.keys(n).filter(i=>{let r=n[i];return a[r.nodeName]?.isUINode});return o.length===0?null:(0,O.jsx)("div",{children:o.map(i=>{let r=n[i],d=e[r.nodeName];return d?(0,O.jsx)("div",{children:(0,O.jsx)(d,{nodeId:i})},i):(console.error(`UI component for node '${r.nodeName}' not found.`),null)})})}0&&(module.exports={ReactWorkflowContext,WorkflowProvider,WorkflowRenderer,useAllCascades,useAppDispatch,useAppSelector,useCascade,useWorkflow});
package/dist/index.d.cts CHANGED
@@ -1,6 +1,7 @@
1
- import * as _cascaide_ts_core from '@cascaide-ts/core';
2
- import { WorkflowContext, removeActiveNodePayload, ClientWorkflowGraph, RootState } from '@cascaide-ts/core';
3
1
  import * as _reduxjs_toolkit from '@reduxjs/toolkit';
2
+ import { Middleware } from '@reduxjs/toolkit';
3
+ import * as _cascaide_ts_core from '@cascaide-ts/core';
4
+ import { Updates, Spawns, ClientWorkflowGraph, RootState } from '@cascaide-ts/core';
4
5
  import * as react_jsx_runtime from 'react/jsx-runtime';
5
6
  import React from 'react';
6
7
  import { TypedUseSelectorHook } from 'react-redux';
@@ -12,68 +13,32 @@ type CascadeNode = {
12
13
  initialContext?: any;
13
14
  processed?: boolean;
14
15
  };
15
- type CascadeState = {
16
- task?: string;
17
- status?: string;
18
- currentNode?: string;
19
- history?: any[];
20
- lastUpdate?: number;
16
+ type CascadeStateResult = {
17
+ status: string;
18
+ history: any[];
21
19
  [key: string]: any;
22
20
  };
23
21
  declare const useCascade: (cascadeId: string) => {
24
- cascadeState: {
25
- status: string;
26
- history: any[];
27
- };
28
- cascadeNodes: {
29
- nodeId: string;
30
- nodeName: string;
31
- parentTriggerId: string;
32
- initialContext: any;
33
- processed: boolean;
34
- }[];
35
- currentNode: {
36
- nodeId: string;
37
- nodeName: string;
38
- parentTriggerId: string;
39
- initialContext: any;
40
- processed: boolean;
41
- };
42
- cascadeTree: any[];
22
+ cascadeState: CascadeStateResult;
23
+ cascadeNodes: CascadeNode[];
43
24
  isComplete: boolean;
44
25
  exists: boolean;
45
- forkCascade: (newCascadeId: string, upToFunctionId: number, apiEndpoint: string) => Promise<{
46
- newCascadeId: string;
47
- status: string;
26
+ forkCascade: (newCascadeId: string, upToFunctionId: number, sourceCascadeId?: string) => Promise<{
27
+ status: "SUCCESS" | "FAILED";
48
28
  }>;
49
29
  };
50
30
  declare const useAllCascades: () => string[];
51
- declare function apiForkCascade(sourceCascadeId: string, newCascadeId: string, upToFunctionId: number, apiEndpoint: string): Promise<{
52
- newCascadeId: string;
53
- status: string;
54
- }>;
55
- declare function apiHydrateCascadeContext(cascadeId: string, functionId: number, apiEndpoint: string): Promise<WorkflowContext>;
56
31
 
57
- interface AddActiveNodeSignature {
58
- (nodeNames: string | string[], initialContext?: Omit<any, 'cascadeId'>): Promise<undefined>;
59
- (nodeNames: string | string[], initialContext: {
60
- cascadeId: string;
61
- } & any): Promise<string | string[]>;
62
- }
63
32
  declare const useWorkflow: (nodeId: string) => {
64
- context: {};
65
- useWorkflowSelector: <T>(selector: (context: any) => T) => T;
66
- saveContext: (data: any) => Promise<_reduxjs_toolkit.PayloadAction<{
67
- [key: string]: any;
68
- }, "workflow/updateContext", _cascaide_ts_core.ActionMeta, never>>;
69
- saveGlobalContext: (data: any) => Promise<_reduxjs_toolkit.PayloadAction<{
33
+ nodeData: _cascaide_ts_core.ActiveNode;
34
+ getState: () => {
35
+ workflow: _cascaide_ts_core.WorkflowState;
36
+ };
37
+ updateContext: (updates: Updates) => Promise<_reduxjs_toolkit.PayloadAction<{
70
38
  [key: string]: any;
71
39
  }, "workflow/updateContext", _cascaide_ts_core.ActionMeta, never>>;
72
- signalCompletion: (data: removeActiveNodePayload) => Promise<_reduxjs_toolkit.PayloadAction<removeActiveNodePayload, "workflow/removeActiveNode", _cascaide_ts_core.ActionMeta, never>>;
73
- addActiveNode: AddActiveNodeSignature;
74
- nodeName: string;
75
- nodeId: string;
76
- nodeData: _cascaide_ts_core.ActiveNode;
40
+ addActiveNode: (spawns: Spawns) => Promise<Record<string, string> | undefined>;
41
+ signalCompletion: (hasSpawns: boolean, fullOutput?: any) => Promise<_reduxjs_toolkit.PayloadAction<_cascaide_ts_core.removeActiveNodePayload, "workflow/removeActiveNode", _cascaide_ts_core.ActionMeta, never>>;
77
42
  };
78
43
 
79
44
  type ClientWorkflowConfig = {
@@ -83,8 +48,7 @@ type ClientWorkflowConfig = {
83
48
  };
84
49
  };
85
50
  declare const ReactWorkflowContext: React.Context<ClientWorkflowConfig>;
86
- declare function WorkflowProvider({ children, initialNodeId, initialNodeName, initialContext, config, actionRelayEndpoint, persistenceEndpoint, // Now optional
87
- hydrationEndpoint, }: {
51
+ declare function WorkflowProvider({ children, initialNodeId, initialNodeName, initialContext, config, actionRelayEndpoint, persistenceEndpoint, extraMiddlewares }: {
88
52
  children: React.ReactNode;
89
53
  initialNodeId: string;
90
54
  initialNodeName: string;
@@ -92,11 +56,11 @@ hydrationEndpoint, }: {
92
56
  config: ClientWorkflowConfig;
93
57
  actionRelayEndpoint: string;
94
58
  persistenceEndpoint?: string;
95
- hydrationEndpoint?: string;
59
+ extraMiddlewares?: Middleware[];
96
60
  }): react_jsx_runtime.JSX.Element;
97
61
 
98
62
  declare const useAppDispatch: () => _reduxjs_toolkit.Dispatch<_reduxjs_toolkit.Action>;
99
63
  declare const useAppSelector: TypedUseSelectorHook<RootState>;
100
64
  declare function WorkflowRenderer(): react_jsx_runtime.JSX.Element;
101
65
 
102
- export { type CascadeNode, type CascadeState, type ClientWorkflowConfig, ReactWorkflowContext, WorkflowProvider, WorkflowRenderer, apiForkCascade, apiHydrateCascadeContext, useAllCascades, useAppDispatch, useAppSelector, useCascade, useWorkflow };
66
+ export { type CascadeNode, type CascadeStateResult, type ClientWorkflowConfig, ReactWorkflowContext, WorkflowProvider, WorkflowRenderer, useAllCascades, useAppDispatch, useAppSelector, useCascade, useWorkflow };
package/dist/index.d.ts CHANGED
@@ -1,6 +1,7 @@
1
- import * as _cascaide_ts_core from '@cascaide-ts/core';
2
- import { WorkflowContext, removeActiveNodePayload, ClientWorkflowGraph, RootState } from '@cascaide-ts/core';
3
1
  import * as _reduxjs_toolkit from '@reduxjs/toolkit';
2
+ import { Middleware } from '@reduxjs/toolkit';
3
+ import * as _cascaide_ts_core from '@cascaide-ts/core';
4
+ import { Updates, Spawns, ClientWorkflowGraph, RootState } from '@cascaide-ts/core';
4
5
  import * as react_jsx_runtime from 'react/jsx-runtime';
5
6
  import React from 'react';
6
7
  import { TypedUseSelectorHook } from 'react-redux';
@@ -12,68 +13,32 @@ type CascadeNode = {
12
13
  initialContext?: any;
13
14
  processed?: boolean;
14
15
  };
15
- type CascadeState = {
16
- task?: string;
17
- status?: string;
18
- currentNode?: string;
19
- history?: any[];
20
- lastUpdate?: number;
16
+ type CascadeStateResult = {
17
+ status: string;
18
+ history: any[];
21
19
  [key: string]: any;
22
20
  };
23
21
  declare const useCascade: (cascadeId: string) => {
24
- cascadeState: {
25
- status: string;
26
- history: any[];
27
- };
28
- cascadeNodes: {
29
- nodeId: string;
30
- nodeName: string;
31
- parentTriggerId: string;
32
- initialContext: any;
33
- processed: boolean;
34
- }[];
35
- currentNode: {
36
- nodeId: string;
37
- nodeName: string;
38
- parentTriggerId: string;
39
- initialContext: any;
40
- processed: boolean;
41
- };
42
- cascadeTree: any[];
22
+ cascadeState: CascadeStateResult;
23
+ cascadeNodes: CascadeNode[];
43
24
  isComplete: boolean;
44
25
  exists: boolean;
45
- forkCascade: (newCascadeId: string, upToFunctionId: number, apiEndpoint: string) => Promise<{
46
- newCascadeId: string;
47
- status: string;
26
+ forkCascade: (newCascadeId: string, upToFunctionId: number, sourceCascadeId?: string) => Promise<{
27
+ status: "SUCCESS" | "FAILED";
48
28
  }>;
49
29
  };
50
30
  declare const useAllCascades: () => string[];
51
- declare function apiForkCascade(sourceCascadeId: string, newCascadeId: string, upToFunctionId: number, apiEndpoint: string): Promise<{
52
- newCascadeId: string;
53
- status: string;
54
- }>;
55
- declare function apiHydrateCascadeContext(cascadeId: string, functionId: number, apiEndpoint: string): Promise<WorkflowContext>;
56
31
 
57
- interface AddActiveNodeSignature {
58
- (nodeNames: string | string[], initialContext?: Omit<any, 'cascadeId'>): Promise<undefined>;
59
- (nodeNames: string | string[], initialContext: {
60
- cascadeId: string;
61
- } & any): Promise<string | string[]>;
62
- }
63
32
  declare const useWorkflow: (nodeId: string) => {
64
- context: {};
65
- useWorkflowSelector: <T>(selector: (context: any) => T) => T;
66
- saveContext: (data: any) => Promise<_reduxjs_toolkit.PayloadAction<{
67
- [key: string]: any;
68
- }, "workflow/updateContext", _cascaide_ts_core.ActionMeta, never>>;
69
- saveGlobalContext: (data: any) => Promise<_reduxjs_toolkit.PayloadAction<{
33
+ nodeData: _cascaide_ts_core.ActiveNode;
34
+ getState: () => {
35
+ workflow: _cascaide_ts_core.WorkflowState;
36
+ };
37
+ updateContext: (updates: Updates) => Promise<_reduxjs_toolkit.PayloadAction<{
70
38
  [key: string]: any;
71
39
  }, "workflow/updateContext", _cascaide_ts_core.ActionMeta, never>>;
72
- signalCompletion: (data: removeActiveNodePayload) => Promise<_reduxjs_toolkit.PayloadAction<removeActiveNodePayload, "workflow/removeActiveNode", _cascaide_ts_core.ActionMeta, never>>;
73
- addActiveNode: AddActiveNodeSignature;
74
- nodeName: string;
75
- nodeId: string;
76
- nodeData: _cascaide_ts_core.ActiveNode;
40
+ addActiveNode: (spawns: Spawns) => Promise<Record<string, string> | undefined>;
41
+ signalCompletion: (hasSpawns: boolean, fullOutput?: any) => Promise<_reduxjs_toolkit.PayloadAction<_cascaide_ts_core.removeActiveNodePayload, "workflow/removeActiveNode", _cascaide_ts_core.ActionMeta, never>>;
77
42
  };
78
43
 
79
44
  type ClientWorkflowConfig = {
@@ -83,8 +48,7 @@ type ClientWorkflowConfig = {
83
48
  };
84
49
  };
85
50
  declare const ReactWorkflowContext: React.Context<ClientWorkflowConfig>;
86
- declare function WorkflowProvider({ children, initialNodeId, initialNodeName, initialContext, config, actionRelayEndpoint, persistenceEndpoint, // Now optional
87
- hydrationEndpoint, }: {
51
+ declare function WorkflowProvider({ children, initialNodeId, initialNodeName, initialContext, config, actionRelayEndpoint, persistenceEndpoint, extraMiddlewares }: {
88
52
  children: React.ReactNode;
89
53
  initialNodeId: string;
90
54
  initialNodeName: string;
@@ -92,11 +56,11 @@ hydrationEndpoint, }: {
92
56
  config: ClientWorkflowConfig;
93
57
  actionRelayEndpoint: string;
94
58
  persistenceEndpoint?: string;
95
- hydrationEndpoint?: string;
59
+ extraMiddlewares?: Middleware[];
96
60
  }): react_jsx_runtime.JSX.Element;
97
61
 
98
62
  declare const useAppDispatch: () => _reduxjs_toolkit.Dispatch<_reduxjs_toolkit.Action>;
99
63
  declare const useAppSelector: TypedUseSelectorHook<RootState>;
100
64
  declare function WorkflowRenderer(): react_jsx_runtime.JSX.Element;
101
65
 
102
- export { type CascadeNode, type CascadeState, type ClientWorkflowConfig, ReactWorkflowContext, WorkflowProvider, WorkflowRenderer, apiForkCascade, apiHydrateCascadeContext, useAllCascades, useAppDispatch, useAppSelector, useCascade, useWorkflow };
66
+ export { type CascadeNode, type CascadeStateResult, type ClientWorkflowConfig, ReactWorkflowContext, WorkflowProvider, WorkflowRenderer, useAllCascades, useAppDispatch, useAppSelector, useCascade, useWorkflow };
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import{useDispatch as ie,useSelector as E}from"react-redux";import{useMemo as k}from"react";import{configureStore as Y,combineReducers as z}from"@reduxjs/toolkit";import{createSlice as B}from"@reduxjs/toolkit";import{createListenerMiddleware as Z}from"@reduxjs/toolkit";import{configureStore as _e,createListenerMiddleware as Ee}from"@reduxjs/toolkit";import{createSelector as _}from"reselect";var X={context:{},activeNodes:{},history:[],errors:{}},S=B({name:"workflow",initialState:X,reducers:{updateContext:{reducer:(t,a)=>{Object.entries(a.payload).forEach(([n,e])=>{if(t.context[n]||(t.context[n]=[]),typeof e=="object"&&e!==null&&!Array.isArray(e)&&"index"in e){let r=e.index;t.context[n][r]=e}else t.context[n].push(e)})},prepare:(t,a)=>({payload:t,meta:a||{}})},addActiveNode:{reducer:(t,a)=>{let{nodeId:n,nodeName:e,parentTriggerId:r,contextData:o}=a.payload,{origin:i,functionId:d,cascadeId:c}=a.meta;t.activeNodes[n]={nodeName:e,parentTriggerId:r,processed:!1,initialContext:o,origin:i,functionId:d,cascadeId:c||o?.cascadeId},t.errors[n]&&delete t.errors[n]},prepare:(t,a)=>{let n=a?.cascadeId||t.contextData?.cascadeId;return{payload:t,meta:{...a,cascadeId:n}}}},removeActiveNode:{reducer:(t,a)=>{let{nodeId:n}=a.payload;t.activeNodes[n]&&(t.history.push({nodeId:n,nodeName:t.activeNodes[n].nodeName,timestamp:Date.now()}),delete t.activeNodes[n])},prepare:(t,a)=>({payload:t,meta:a||{}})},setError:{reducer:(t,a)=>{let{nodeId:n,error:e}=a.payload;t.errors[n]=e},prepare:(t,a)=>({payload:t,meta:a||{}})},markNodeProcessed:(t,a)=>{let{nodeId:n}=a.payload;t.activeNodes[n]&&(t.activeNodes[n].processed=!0)},streamChunkReceived:(t,a)=>{let{cascadeId:n,contentChunk:e,toolChunk:r,reasoningChunk:o,thoughtSignature:i}=a.payload;if(!e&&!r&&!o&&!i)return;let d=t.context[n];if(!d||d.length===0)return;let c=d[d.length-1].history,l=c[c.length-1];if(!(!l||l.role!=="assistant")&&(o&&(l.reasoning_content===void 0&&(l.reasoning_content=""),l.reasoning_content+=o),i&&(l.thought_signature=i),e&&(l.content+=e),r)){l.tool_calls||(l.tool_calls=[]);let u=r.index??0;l.tool_calls[u]||(l.tool_calls[u]={id:"",type:"function",function:{name:"",arguments:""}});let s=l.tool_calls[u];r.id&&(s.id=r.id),r.thought_signature&&(s.thought_signature=r.thought_signature),r.function?.name&&(s.function.name=r.function.name),r.function?.arguments&&(s.function.arguments+=r.function.arguments)}},hydrateContext:(t,a)=>{Object.entries(a.payload).forEach(([n,e])=>{t.context[n]=e})}}}),{updateContext:m,addActiveNode:w,removeActiveNode:I,setError:M,markNodeProcessed:Ne,streamChunkReceived:q,hydrateContext:v}=S.actions,xe=S.reducer,x=process.env.NODE_ENV==="development",K=t=>a=>n=>async e=>{if(w.match(e)){let{origin:r,functionId:o,cascadeId:i}=e.meta;if(i&&o>0&&r==="client"){let d=a.getState().workflow.context[i];if(!d||d.length===0){x&&console.log(`[CLIENT HYDRATION] \u{1F9CA} Cold start for ${i}`);try{let c=await fetch(t.hydrationEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({cascadeId:i,functionId:o})});if(!c.ok)throw new Error(`Hydration API failed: ${c.status}`);let l=await c.json();l&&Object.keys(l).length>0&&(a.dispatch(v(l)),x&&console.log("[CLIENT HYDRATION] \u2705 State hydrated via Dispatch"))}catch(c){x&&console.error("[CLIENT HYDRATION] \u274C Hydration failed:",c)}}}}return n(e)},y=process.env.NODE_ENV==="development";async function C(t,a=3,n=100){for(let e=1;e<=a;e++)try{return await t()}catch(r){if(e===a)return y&&console.error(`[CLIENT MW] \u274C All ${a} retry attempts failed:`,r),null;let o=n*Math.pow(2,e-1);y&&console.warn(`[CLIENT MW] \u26A0\uFE0F Attempt ${e} failed, retrying in ${o}ms...`),await new Promise(i=>setTimeout(i,o))}return null}var Q=t=>a=>n=>async e=>{if(w.match(e)){let{origin:r,functionId:o,cascadeId:i}=e.meta,{nodeId:d,nodeName:c,contextData:l}=e.payload;if(!i||r)return n(e);let u=o??0,s=await C(async()=>{let h=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"claim",nodeInstanceId:d,cascadeId:i,userId:l.userId,nodeName:c,functionId:u,inputContext:l,location:"client"})});if(!h.ok)throw new Error(`Persistence sync failed: ${h.status}`);return await h.json()});if(!s){y&&console.error(`[CLIENT MW] \u274C Failed to claim node ${d} after retries`);return}let f=s.functionId;y&&console.log(`[CLIENT MW] \u2705 Node ${d} bound to FnId: ${f}`);let p={...e,meta:{...e.meta,origin:"client",functionId:f,cascadeId:i}};return n(p)}if(I.match(e)){let{origin:r,functionId:o,cascadeId:i}=e.meta,d=typeof e.payload=="string"?e.payload:e.payload.nodeId,c=e.payload.hasSpawns,l=e.payload.fullOutput,u=a.getState(),s=u.workflow.activeNodes[d],f=i||s?.cascadeId,p=o??s?.functionId??0;if(!f||r)return n(e);let h=l||u.workflow.context[f];if(!await C(async()=>{let g=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"finalize",nodeInstanceId:d,cascadeId:f,fullOutput:h,hasSpawns:c})});if(!g.ok)throw new Error(`Persistence sync failed: ${g.status}`);return await g.json()})){y&&console.error(`[CLIENT MW] \u274C Failed to finalize node ${d} after retries`);return}return n({...e,meta:{...e.meta,origin:"client",functionId:p,cascadeId:f}})}if(M.match(e)){let{origin:r,functionId:o,cascadeId:i}=e.meta,{nodeId:d,error:c}=e.payload,l=a.getState().workflow.activeNodes[d],u=i||l?.cascadeId;if(!u||r)return n(e);if(!await C(async()=>{let s=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"error",nodeInstanceId:d,cascadeId:u,error:c})});if(!s.ok)throw new Error(`Persistence sync failed: ${s.status}`);return await s.json()})){y&&console.error(`[CLIENT MW] \u274C Failed to mark node ${d} as failed after retries`);return}return n({...e,meta:{...e.meta,origin:"client",cascadeId:u}})}if(m.match(e)){let{origin:r,functionId:o,cascadeId:i}=e.meta,d=e.payload,c=i,l=o??0;if(!c||r||(y&&console.log("[CLIENT MW] \u{1F504} Persisting context:",JSON.stringify(d,null,2)),d[c]?.status==="streaming"))return n(e);if(!await C(async()=>{let u=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"context",cascadeId:c,functionId:l,updates:d})});if(!u.ok)throw new Error(`Persistence sync failed: ${u.status}`);return await u.json()})){y&&console.error(`[CLIENT MW] \u274C Failed to record context for cascade ${c} after retries`);return}return n({...e,meta:{...e.meta,origin:"client",functionId:l,cascadeId:c}})}return n(e)},W=Z(),ee=W.startListening,$=`
1
+ import{useDispatch as ie,useSelector as _}from"react-redux";import{useMemo as H}from"react";import{configureStore as Y,combineReducers as z}from"@reduxjs/toolkit";import{createListenerMiddleware as K}from"@reduxjs/toolkit";import{createSlice as Z}from"@reduxjs/toolkit";import{v7 as D}from"uuid";import{configureStore as Ee,createListenerMiddleware as _e}from"@reduxjs/toolkit";import{v7 as be}from"uuid";import{createSelector as E}from"reselect";import{v7 as $e}from"uuid";var B={context:{},activeNodes:{},history:[],errors:{}},N=Z({name:"workflow",initialState:B,reducers:{updateContext:{reducer:(t,n)=>{Object.entries(n.payload).forEach(([a,e])=>{if(t.context[a]||(t.context[a]=[]),typeof e=="object"&&e!==null&&!Array.isArray(e)&&"index"in e){let i=e.index;t.context[a][i]=e}else t.context[a].push(e)})},prepare:(t,n)=>({payload:t,meta:n||{}})},addActiveNode:{reducer:(t,n)=>{let{nodeId:a,nodeName:e,parentTriggerId:i,contextData:r}=n.payload,{origin:o,functionId:d,cascadeId:c}=n.meta;t.activeNodes[a]={nodeName:e,parentTriggerId:i,processed:!1,initialContext:r,origin:o,functionId:d,cascadeId:c||r?.cascadeId},t.errors[a]&&delete t.errors[a]},prepare:(t,n)=>{let a=n?.cascadeId||t.contextData?.cascadeId;return{payload:t,meta:{...n,cascadeId:a}}}},removeActiveNode:{reducer:(t,n)=>{let{nodeId:a}=n.payload;t.activeNodes[a]&&(t.history.push({nodeId:a,nodeName:t.activeNodes[a].nodeName,timestamp:Date.now()}),delete t.activeNodes[a])},prepare:(t,n)=>({payload:t,meta:n||{}})},setError:{reducer:(t,n)=>{let{nodeId:a,error:e}=n.payload;t.errors[a]=e},prepare:(t,n)=>({payload:t,meta:n||{}})},markNodeProcessed:(t,n)=>{let{nodeId:a}=n.payload;t.activeNodes[a]&&(t.activeNodes[a].processed=!0)},streamChunkReceived:(t,n)=>{let{cascadeId:a,identity:e,value:i}=n.payload;if(!e||i===void 0)return;let r=t.context[a];if(!r||r.length===0)return;let o=r[r.length-1].history,d=o[o.length-1];!d||d.role!=="assistant"||(typeof d[e]=="string"?d[e]+=i:d[e]=i)},hydrateContext:(t,n)=>{Object.entries(n.payload).forEach(([a,e])=>{t.context[a]=e})},forkAndHydrate:{reducer:()=>{},prepare:t=>({payload:t})}}}),{updateContext:I,addActiveNode:w,removeActiveNode:C,setError:$,markNodeProcessed:Se,streamChunkReceived:q,hydrateContext:O,forkAndHydrate:k}=N.actions,xe=N.reducer,M=K(),X=M.startListening,b=`
2
2
  __END_STREAM_METADATA__
3
- `,te=t=>{ee({actionCreator:w,effect:async(a,n)=>{let{nodeId:e,nodeName:r,contextData:o}=a.payload,{functionId:i}=a.meta,d=t.workflowGraph[r],c=t.isLite;if(!d||d.isUINode)return;let l=c?o?.history?.slice(-1)??[]:[...o?.history??[]];o?.sentFromClient&&await n.dispatch(m({[o?.cascadeId]:{history:l,status:"completed"}},{functionId:i,cascadeId:o?.cascadeId}));try{let u=await fetch(t.actionRelayEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},signal:n.signal,body:JSON.stringify(a)});if(!u.ok||!u.body)throw new Error(await u.text()||"Action relay failed");let s=o?.cascadeId;await ne(u.body,s,e,n,a,c)}catch(u){if(u.name==="AbortError")return;await n.dispatch(M({nodeId:e,error:u.message})),await n.dispatch(I({nodeId:e,hasSpawns:!1}))}}})};async function ne(t,a,n,e,r,o){let i=t.getReader(),d=new TextDecoder,c="",l=!1;try{for(;;){let{done:u,value:s}=await i.read();if(u)break;if(c+=d.decode(s,{stream:!0}),!l){let f=c.indexOf($);if(f!==-1){let p=c.substring(0,f),h=c.substring(f+$.length);if(b(p,a,e),h.trim())try{let g=JSON.parse(h.trim());await ae(g,n,e,r,o)}catch(g){console.error("Metadata parse error",g)}l=!0;break}c=await b(c,a,e)}}}finally{i.releaseLock()}}async function b(t,a,n){let e=t.split(`
4
- `),r=e.pop()||"";for(let o of e)if(!(!o.trim()||o.startsWith(":")))try{let i=JSON.parse(o);i.type==="init"?await n.dispatch(m({[i.cascadeId]:{history:[{role:"assistant",content:""}],status:"streaming"}})):i.type==="sync"?await n.dispatch(m({[i.cascadeId]:{history:[i.history],status:"completed"}})):n.dispatch(q({cascadeId:a,contentChunk:i.contentChunk||i.chunk,toolChunk:i.toolChunk||i.tool_call,reasoningChunk:i.reasoningChunk}))}catch(i){console.error("Chunk parse error",i)}return r}async function ae(t,a,n,e,r){let o=e.meta?.functionId??0,i=e.meta?.origin??"server",d=e.payload.contextData?.cascadeId,c=t.spawns?Object.entries(t.spawns):[],l=c.length>0;l&&c.forEach(async([u,s],f)=>{let p=r?n.getState().workflow.context[d]:s;await n.dispatch(w({nodeId:`${u}_${Date.now()}_${f}`,nodeName:u,parentTriggerId:a,contextData:{...p,userId:p?.userId||e.payload.contextData?.userId}},{functionId:o+1+f,cascadeId:d,origin:i}))}),await n.dispatch(I({nodeId:a,hasSpawns:l,fullOutput:t.updates},{functionId:void 0,cascadeId:d,origin:i}))}var oe=z({workflow:S.reducer}),P=(t,a,n,e,r=[])=>{te({workflowGraph:t,actionRelayEndpoint:a,isLite:!n});let o=[];return n&&o.push(Q({persistenceEndpoint:n}),K({hydrationEndpoint:e})),o.push(...r),o.push(W.middleware),Y({reducer:oe,middleware:i=>i({serializableCheck:!1}).concat(o),devTools:process.env.NODE_ENV!=="production"})},Re=process.env.NODE_ENV==="development";var De=process.env.NODE_ENV==="development";var Te=process.env.NODE_ENV==="development";var re=t=>t.workflow.activeNodes,F=t=>t.workflow.context,j=(t,a)=>a,L=()=>_([F,j],(t,a)=>{let n=t[a];if(!n||n.length===0)return;let e=n[n.length-1].status,r=n.flatMap(o=>o.history||[]);return{status:e,history:r}}),J=()=>_([re,j],(t,a)=>(console.log(`Reselect: Re-calculating nodes for cascade ${a}`),Object.entries(t).filter(([n,e])=>e.initialContext?.cascadeId===a).map(([n,e])=>({nodeId:n,nodeName:e.nodeName,parentTriggerId:e.parentTriggerId,initialContext:e.initialContext,processed:e.processed})))),V=_([F],t=>(console.log("Reselect: Re-calculating all cascade IDs"),Object.keys(t).filter(a=>a.startsWith("cascade_"))));var Le=t=>{let a=k(L,[]),n=k(J,[]),e=ie(),r=E(s=>a(s,t)),o=E(s=>n(s,t)),i=k(()=>o.length===0?null:[...o].sort((s,f)=>{let p=parseInt(s.nodeId.split("_")[1]||"0");return parseInt(f.nodeId.split("_")[1]||"0")-p})[0],[o]),d=k(()=>{let s=new Map(o.map(p=>[p.nodeId,{...p,children:[]}])),f=[];return o.forEach(p=>{let h=s.get(p.nodeId);p.parentTriggerId&&s.has(p.parentTriggerId)?s.get(p.parentTriggerId).children.push(h):f.push(h)}),f},[o]),c=async(s,f,p)=>{let h=await se(t,s,f,p),g=await ce(s,f,p);return e(v(g)),h},l=o.length===0&&r!==void 0,u=r!==void 0||o.length>0;return{cascadeState:r,cascadeNodes:o,currentNode:i,cascadeTree:d,isComplete:l,exists:u,forkCascade:c}},Je=()=>E(V);async function se(t,a,n,e){let r=await fetch(e,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"fork",sourceCascadeId:t,newCascadeId:a,upToFunctionId:n})});if(!r.ok)throw new Error(`forkCascade failed: ${await r.text()}`);return r.json()}async function ce(t,a,n){let e=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"hydrate",cascadeId:t,functionId:a})});if(!e.ok)throw new Error(`hydrate failed: ${await e.text()}`);return e.json()}import{useDispatch as de,useSelector as R}from"react-redux";import{useMemo as H,useCallback as N,useRef as le,useEffect as ue}from"react";var fe={},Be=t=>{let a=de(),n=le(void 0);ue(()=>{n.current!==t&&(n.current=t)},[t]);let e=H(()=>t.split("_")[0],[t]),r=R(s=>s.workflow.context[e]||fe),o=R(s=>s.workflow.activeNodes[t]),i=s=>R(f=>s(f.workflow.context)),d=N(async s=>await a(m({[e]:{...r,...s}})),[a,e,r]),c=N(async s=>await a(m(s)),[a]),l=N(async s=>await a(I(s)),[a]),u=N(async(s,f)=>{let p=Array.isArray(s)?s:[s],h=f?.cascadeId,g=[];for(let A of p){let G=`${A}_${Date.now()}_${Math.random().toString(36).substring(2,8)}`,O={...f,sentFromClient:!0};h&&(O.cascadeId=h,g.push(h)),await a(w({nodeId:G,nodeName:A,parentTriggerId:t,contextData:O}))}if(h)return g.length===1?g[0]:g},[a,t]);return H(()=>({context:r,useWorkflowSelector:i,saveContext:d,saveGlobalContext:c,signalCompletion:l,addActiveNode:u,nodeName:e,nodeId:t,nodeData:o}),[r,i,d,c,l,u,e,t,o])};import pe,{useRef as he}from"react";import{Provider as ge}from"react-redux";import{jsx as U}from"react/jsx-runtime";var D=pe.createContext(null);function et({children:t,initialNodeId:a,initialNodeName:n,initialContext:e={},config:r,actionRelayEndpoint:o,persistenceEndpoint:i,hydrationEndpoint:d}){let c=he(null);return c.current||(c.current=P(r.clientWorkflowGraph,o,i,d),c.current.dispatch(m(e)),c.current.dispatch(w({nodeId:a,nodeName:n}))),U(ge,{store:c.current,children:U(D.Provider,{value:r,children:t})})}import{useContext as me}from"react";import{useDispatch as ye,useSelector as we}from"react-redux";import{jsx as T}from"react/jsx-runtime";var ct=()=>ye(),Ie=we;function dt(){let t=me(D);if(!t)throw new Error("WorkflowRenderer must be used within a WorkflowProvider.");let{activeNodes:a}=Ie(o=>o.workflow),{clientWorkflowGraph:n,uiComponentRegistry:e}=t,r=Object.keys(a).filter(o=>{let i=a[o];return n[i.nodeName]?.isUINode});return r.length===0?null:T("div",{children:r.map(o=>{let i=a[o],d=e[i.nodeName];return d?T("div",{children:T(d,{nodeId:o})},o):(console.error(`UI component for node '${i.nodeName}' not found.`),null)})})}export{D as ReactWorkflowContext,et as WorkflowProvider,dt as WorkflowRenderer,se as apiForkCascade,ce as apiHydrateCascadeContext,Je as useAllCascades,ct as useAppDispatch,Ie as useAppSelector,Le as useCascade,Be as useWorkflow};
3
+ `,Q=t=>{X({actionCreator:w,effect:async(n,a)=>{let{nodeId:e,nodeName:i,contextData:r}=n.payload,{functionId:o}=n.meta,d=t.workflowGraph[i],c=t.isLite;if(!(!d||d.isUINode)){if(r?.sentFromClient&&!r?.handledTimeout){let{cascadeId:s,userId:u,history:l,sentFromClient:f,...m}=r,p=c?l?.slice(-1)??[]:[...l??[]];await a.dispatch(I({[s]:{...m,history:p,status:"completed"}},{functionId:o,cascadeId:s}))}try{let s=await fetch(t.actionRelayEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},signal:a.signal,body:JSON.stringify(n)});if(!s.ok||!s.body)throw new Error(await s.text()||"Action relay failed");let u=r?.cascadeId;await ee(s.body,u,e,a,n,c)}catch(s){if(s.name==="AbortError")return;await a.dispatch($({nodeId:e,error:s.message})),await a.dispatch(C({nodeId:e,hasSpawns:!1}))}}}})};async function ee(t,n,a,e,i,r){let o=t.getReader(),d=new TextDecoder,c="",s=!1;try{for(;;){let{done:u,value:l}=await o.read();if(u)break;if(c+=d.decode(l,{stream:!0}),!s){let f=c.indexOf(b);if(f!==-1){let m=c.substring(0,f),p=c.substring(f+b.length);if(await F(m,n,e),p.trim())try{let y=JSON.parse(p.trim());await te(y,a,e,i,r)}catch(y){console.error("Metadata parse error",y)}s=!0;break}c=await F(c,n,e)}}}finally{o.releaseLock()}}async function F(t,n,a){let e=t.split(`
4
+ `),i=e.pop()||"";for(let r of e)if(!(!r.trim()||r.startsWith(":")))try{let o=JSON.parse(r);o.type==="init"?await a.dispatch(I({[o.cascadeId]:{history:[{role:"assistant"}],status:"streaming"}})):o.type==="sync"?await a.dispatch(I({[o.cascadeId]:{history:o.history,status:"completed"}})):o.type==="ui_spawn"?await a.dispatch(w({nodeId:`${o.nodeName}_${D()}`,nodeName:o.nodeName,contextData:o.contextData})):await a.dispatch(q({cascadeId:o.cascadeId,identity:o.identity,value:o.value}))}catch(o){console.error("Chunk parse error",o)}return i}async function te(t,n,a,e,i){let r=e.meta?.functionId??0,o=e.meta?.origin??"server",d=e.payload.contextData?.cascadeId,c=l=>{if(!i)return l;let f=a.getState().workflow.context[d],m=Object.keys(f||{}),p=Object.fromEntries(Object.entries(l||{}).filter(([x])=>!m.includes(x))),y=f.flatMap(x=>x.history||[]);return{...p,history:y,sentFromClient:!0,handledTimeout:!0}},s=t.spawns?Object.entries(t.spawns):[],u=s.length>0;u&&s.forEach(async([l,f],m)=>{let p=c(f);await a.dispatch(w({nodeId:`${l}_${D()}_${m}`,nodeName:l,parentTriggerId:n,contextData:{...p,cascadeId:d,userId:p?.userId||e.payload.contextData?.userId}},{functionId:i?0:r+1+m,cascadeId:d,origin:o}))}),await a.dispatch(C({nodeId:n,hasSpawns:u,fullOutput:t.updates},{functionId:void 0,cascadeId:d,origin:o}))}var ae=z({workflow:N.reducer}),j=(t,n,a,e=[])=>{Q({workflowGraph:t,actionRelayEndpoint:n,isLite:!a});let i=[];return a&&i.push(oe({persistenceEndpoint:a}),re({persistenceEndpoint:a})),i.push(...e),i.push(M.middleware),Y({reducer:ae,middleware:r=>r({serializableCheck:!1}).concat(i),devTools:process.env.NODE_ENV!=="production"})},Ae=process.env.NODE_ENV==="development";var Re=process.env.NODE_ENV==="development";var ne=t=>t.workflow.activeNodes,W=t=>t.workflow.context,P=(t,n)=>n,U=()=>E([W,P],(t,n)=>{let a=t[n];if(!a||a.length===0)return;let e=a[a.length-1].status,i={};return a.forEach(r=>{let{status:o,...d}=r;Object.entries(d).forEach(([c,s])=>{s!=null&&(i[c]||(i[c]=[]),Array.isArray(s)?i[c].push(...s):i[c].push(s))})}),{...i,status:e}}),V=()=>E([ne,P],(t,n)=>Object.entries(t).filter(([a,e])=>e.initialContext?.cascadeId===n).map(([a,e])=>({nodeId:a,nodeName:e.nodeName,parentTriggerId:e.parentTriggerId,initialContext:e.initialContext,processed:e.processed}))),L=E([W],t=>Object.keys(t));var De=process.env.NODE_ENV==="development";var g=process.env.NODE_ENV==="development";async function v(t,n=3,a=100){for(let e=1;e<=n;e++)try{return await t()}catch(i){if(e===n)return g&&console.error(`[CLIENT MW] \u274C All ${n} retry attempts failed:`,i),null;let r=a*Math.pow(2,e-1);g&&console.warn(`[CLIENT MW] \u26A0\uFE0F Attempt ${e} failed, retrying in ${r}ms...`),await new Promise(o=>setTimeout(o,r))}return null}var oe=t=>n=>a=>async e=>{if(w.match(e)){let{origin:i,functionId:r,cascadeId:o}=e.meta,{nodeId:d,nodeName:c,contextData:s}=e.payload;if(!o||i)return a(e);let u=r??0,l=await v(async()=>{let p=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"claim",nodeInstanceId:d,cascadeId:o,userId:s.userId,nodeName:c,functionId:u,inputContext:s,location:"client"})});if(!p.ok)throw new Error(`Persistence sync failed: ${p.status}`);return await p.json()});if(!l){g&&console.error(`[CLIENT MW] \u274C Failed to claim node ${d} after retries`);return}let f=l.functionId;g&&console.log(`[CLIENT MW] \u2705 Node ${d} bound to FnId: ${f}`);let m={...e,meta:{...e.meta,origin:"client",functionId:f,cascadeId:o}};return a(m)}if(C.match(e)){let{origin:i,functionId:r,cascadeId:o}=e.meta,d=typeof e.payload=="string"?e.payload:e.payload.nodeId,c=e.payload.hasSpawns,s=e.payload.fullOutput,u=n.getState(),l=u.workflow.activeNodes[d],f=o||l?.cascadeId,m=r??l?.functionId??0;if(!f||i)return a(e);let p=s||u.workflow.context[f];if(!await v(async()=>{let y=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"finalize",nodeInstanceId:d,cascadeId:f,fullOutput:p,hasSpawns:c})});if(!y.ok)throw new Error(`Persistence sync failed: ${y.status}`);return await y.json()})){g&&console.error(`[CLIENT MW] \u274C Failed to finalize node ${d} after retries`);return}return a({...e,meta:{...e.meta,origin:"client",functionId:m,cascadeId:f}})}if($.match(e)){let{origin:i,functionId:r,cascadeId:o}=e.meta,{nodeId:d,error:c}=e.payload,s=n.getState().workflow.activeNodes[d],u=o||s?.cascadeId;if(!u||i)return a(e);if(!await v(async()=>{let l=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"error",nodeInstanceId:d,cascadeId:u,error:c})});if(!l.ok)throw new Error(`Persistence sync failed: ${l.status}`);return await l.json()})){g&&console.error(`[CLIENT MW] \u274C Failed to mark node ${d} as failed after retries`);return}return a({...e,meta:{...e.meta,origin:"client",cascadeId:u}})}if(I.match(e)){let{origin:i,functionId:r,cascadeId:o,uiUpdates:d}=e.meta,c=e.payload,s=o,u=r??0;if(!s||i||(g&&console.log("[CLIENT MW] \u{1F504} Persisting context:",JSON.stringify(c,null,2)),c[s]?.status==="streaming"))return a(e);if(!await v(async()=>{let l=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"context",cascadeId:s,functionId:u,updates:c,uiUpdates:d})});if(!l.ok)throw new Error(`Persistence sync failed: ${l.status}`);return await l.json()})){g&&console.error(`[CLIENT MW] \u274C Failed to record context for cascade ${s} after retries`);return}return a({...e,meta:{...e.meta,origin:"client",functionId:u,cascadeId:s}})}return a(e)},h=process.env.NODE_ENV==="development";async function T(t,n=3,a=100){for(let e=1;e<=n;e++)try{return await t()}catch(i){if(e===n)return h&&console.error(`[HYDRATION] \u274C All ${n} attempts failed:`,i),null;let r=a*Math.pow(2,e-1);h&&console.warn(`[HYDRATION] \u26A0\uFE0F Attempt ${e} failed, retrying in ${r}ms...`),await new Promise(o=>setTimeout(o,r))}return null}var re=t=>n=>a=>async e=>{if(w.match(e)){let{origin:i,functionId:r,cascadeId:o}=e.meta;if(o&&r>0&&i==="client"){let d=n.getState().workflow.context[o];if(!d||Object.keys(d).length===0){h&&console.log(`[HYDRATION] \u{1F9CA} Cold start for ${o}`);let c=await T(async()=>{let s=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"hydrate",cascadeId:o,functionId:r,ui:!0})});if(!s.ok)throw new Error(`Status ${s.status}`);return await s.json()});if(!c){let s=`[HYDRATION] Critical Failure: Could not hydrate cascade ${o}. Action terminated.`;throw h&&console.error(s),new Error("Hydration failed")}Object.keys(c).length>0&&(n.dispatch(O(c)),h&&console.log("[HYDRATION] \u2705 State restored successfully"))}}}if(k.match(e)){let{sourceCascadeId:i,newCascadeId:r,upToFunctionId:o}=e.payload;h&&console.log(`[FORK] \u{1F374} Forking ${i} \u2192 ${r} at fn ${o}`);let d=await T(async()=>{let c=await fetch(t.persistenceEndpoint,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"forkAndHydrate",sourceCascadeId:i,newCascadeId:r,upToFunctionId:o})});if(!c.ok)throw new Error(`Fork failed with status ${c.status}`);return await c.json()});if(!d){let c=`[FORK] Critical Failure: Could not fork cascade ${i}. Action terminated.`;throw h&&console.error(c),new Error("Fork failed")}d.context&&Object.keys(d.context).length>0?(n.dispatch(O(d.context)),h&&console.log(`[FORK] \u2705 Fork complete, context hydrated for ${r}`)):h&&console.log(`[FORK] \u2705 Fork complete, no context to hydrate for ${r}`)}return a(e)};var He=t=>{let n=H(U,[]),a=H(V,[]),e=ie(),i=_(s=>n(s,t)),r=_(s=>a(s,t)),o=r.length===0&&i!==void 0,d=i!==void 0||r.length>0;return{cascadeState:i,cascadeNodes:r,isComplete:o,exists:d,forkCascade:async(s,u,l)=>{try{return await e(k({sourceCascadeId:l||t,newCascadeId:s,upToFunctionId:u})),{status:"SUCCESS"}}catch{return{status:"FAILED"}}}}},Je=()=>_(L);import{useDispatch as se,useSelector as de,useStore as ce}from"react-redux";import{useMemo as le,useCallback as S}from"react";import{v7 as ue}from"uuid";var J=process.env.NODE_ENV==="development",Be=t=>{let n=se(),a=ce(),e=de(s=>s.workflow.activeNodes[t]),i=!!(e?.cascadeId&&e?.origin),r=S(()=>a.getState(),[a]),o=S(async s=>{if(!i){J&&console.warn(`[useWorkflow] updateContext called on ephemeral node "${t}". This node has no cascadeId or origin \u2014 context will not be persisted. If persistence is required, ensure the node is spawned with a cascadeId.`);return}return await n(I(s,{origin:"client",cascadeId:e.cascadeId,functionId:e.functionId}))},[n,i,t,e]),d=S(async s=>{let u={};for(let[l,f]of Object.entries(s)){let m=`${l}_${ue()}`,{cascadeId:p,...y}=f;await n(w({nodeId:m,nodeName:l,parentTriggerId:t,contextData:{...y,sentFromClient:!0,...p&&{cascadeId:p}}})),p&&(u[l]=p)}return Object.keys(u).length>0?u:void 0},[n,t]),c=S(async(s,u)=>(J&&i&&!u&&console.warn(`[useWorkflow] signalCompletion called on persisted node "${t}" without fullOutput. The node's output will not be recorded. Pass fullOutput to capture the result of this node execution.`),await n(C({nodeId:t,hasSpawns:s,fullOutput:u},{functionId:e?.functionId,cascadeId:e?.cascadeId}))),[n,t,i,e]);return le(()=>({nodeData:e,getState:r,updateContext:o,addActiveNode:d,signalCompletion:c}),[e,r,o,d,c])};import pe,{useRef as fe}from"react";import{Provider as me}from"react-redux";import{jsx as G}from"react/jsx-runtime";var A=pe.createContext(null);function at({children:t,initialNodeId:n,initialNodeName:a,initialContext:e={},config:i,actionRelayEndpoint:r,persistenceEndpoint:o,extraMiddlewares:d=[]}){let c=fe(null);return c.current||(c.current=j(i.clientWorkflowGraph,r,o,d),c.current.dispatch(I(e)),c.current.dispatch(w({nodeId:n,nodeName:a}))),G(me,{store:c.current,children:G(A.Provider,{value:i,children:t})})}import{useContext as ye}from"react";import{useDispatch as he,useSelector as we}from"react-redux";import{jsx as R}from"react/jsx-runtime";var lt=()=>he(),ge=we;function ut(){let t=ye(A);if(!t)throw new Error("WorkflowRenderer must be used within a WorkflowProvider.");let{activeNodes:n}=ge(r=>r.workflow),{clientWorkflowGraph:a,uiComponentRegistry:e}=t,i=Object.keys(n).filter(r=>{let o=n[r];return a[o.nodeName]?.isUINode});return i.length===0?null:R("div",{children:i.map(r=>{let o=n[r],d=e[o.nodeName];return d?R("div",{children:R(d,{nodeId:r})},r):(console.error(`UI component for node '${o.nodeName}' not found.`),null)})})}export{A as ReactWorkflowContext,at as WorkflowProvider,ut as WorkflowRenderer,Je as useAllCascades,lt as useAppDispatch,ge as useAppSelector,He as useCascade,Be as useWorkflow};
package/package.json CHANGED
@@ -1,8 +1,11 @@
1
1
  {
2
2
  "name": "@cascaide-ts/react",
3
- "version": "0.1.0",
3
+ "version": "0.5.1",
4
+ "license": "MIT",
4
5
  "files": [
5
- "dist"
6
+ "dist",
7
+ "LICENSE",
8
+ "README.md"
6
9
  ],
7
10
  "type": "module",
8
11
  "main": "./dist/index.cjs",
@@ -22,16 +25,19 @@
22
25
  "react": ">=18",
23
26
  "react-dom": ">=18",
24
27
  "react-redux": "^9.0.0",
25
- "@cascaide-ts/core": "0.1.0"
28
+ "uuid": "^13.0.0",
29
+ "@cascaide-ts/core": "0.5.1"
26
30
  },
27
31
  "devDependencies": {
28
32
  "@types/react": "^18.0.0",
29
33
  "react": "^18.0.0",
30
34
  "react-dom": "^18.0.0",
31
35
  "react-redux": "^9.2.0",
36
+ "@reduxjs/toolkit": "^2.0.0",
32
37
  "tsup": "^8.5.1",
33
38
  "typescript": "^5.0.0",
34
- "@cascaide-ts/core": "0.1.0"
39
+ "uuid": "^13.0.0",
40
+ "@cascaide-ts/core": "0.5.1"
35
41
  },
36
42
  "scripts": {
37
43
  "build": "tsup",