@vpxa/aikit 0.1.67 → 0.1.69
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import{copyFileSync as e,cpSync as t,existsSync as n,mkdirSync as r,readFileSync as i,readdirSync as a,rmSync as o,rmdirSync as s,symlinkSync as c,unlinkSync as l,writeFileSync as u}from"node:fs";import{basename as d,dirname as f,join as p,relative as m}from"node:path";import{
|
|
1
|
+
import{copyFileSync as e,cpSync as t,existsSync as n,mkdirSync as r,readFileSync as i,readdirSync as a,rmSync as o,rmdirSync as s,symlinkSync as c,unlinkSync as l,writeFileSync as u}from"node:fs";import{basename as d,dirname as f,join as p,relative as m}from"node:path";import{parse as h}from"yaml";import{execSync as g,spawnSync as _}from"node:child_process";import{tmpdir as v}from"node:os";const y=[`spec`,`design`,`plan`,`task`,`execute`,`verify`];function b(e){let t=e.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n?([\s\S]*)$/);if(!t)return{data:{},body:e};let n=h(t[1]),r={};if(n&&typeof n==`object`&&!Array.isArray(n))for(let[e,t]of Object.entries(n))r[e]=String(t);return{data:r,body:t[2]}}function x(e){return e.split(/[-_]/g).filter(Boolean).map(e=>e.charAt(0).toUpperCase()+e.slice(1)).join(` `)}function S(e){let t=y.indexOf(e);if(t!==-1)return t;let n=e.lastIndexOf(`.`);if(n!==-1){let t=e.slice(n+1),r=y.indexOf(t);if(r!==-1)return r}return 1/0}var C=class{format=`claude-plugin`;detect(e){return n(p(e,`.claude-plugin`,`plugin.json`))}async parse(e,t){let n=this.readPluginJson(e),r=this.discoverSteps(e);await this.materializeNativeSteps(e,r,t);let i=this.discoverAgents(e),a=r.map(({step:e})=>e);return{name:this.readString(n.name)??d(e),version:this.readString(n.version)??`1.0.0`,description:this.readString(n.description)??``,author:this.readString(n.author),steps:a,agents:i,artifacts_dir:`.spec`,install:[]}}readPluginJson(e){let t=p(e,`.claude-plugin`,`plugin.json`);return JSON.parse(i(t,`utf-8`))}readString(e){return typeof e==`string`?e:void 0}discoverSteps(e){let t=p(e,`skills`);if(!n(t))return[];let r=a(t,{withFileTypes:!0}).filter(e=>e.isDirectory()).map(e=>e.name).map(t=>this.readStep(e,t)).filter(e=>e!==null).sort((e,t)=>{let n=Number(e.metadata.order),r=Number(t.metadata.order),i=Number.isFinite(n),a=Number.isFinite(r);if(i&&a)return n-r;if(i)return-1;if(a)return 1;let o=S(e.step.id),s=S(t.step.id);return o===s?e.step.id.localeCompare(t.step.id):o-s});return r.map((e,t)=>({...e,step:{...e.step,requires:t>0?[r[t-1].step.id]:[]}}))}readStep(e,t){let r=p(e,`skills`,t,`SKILL.md`);if(!n(r))return null;let a=i(r,`utf-8`),{data:o,body:s}=b(a),c=o.name||x(t),l=o.description||`${c} step`;return{step:{id:t,name:c,instruction:`steps/${t}/README.md`,produces:[`${t}.md`],requires:[],agents:[],description:l},metadata:o,content:a,body:s}}syncStepAssets(e,r,i){let a=[`references`,`assets`,`scripts`];for(let s of r)for(let r of a){let a=p(e,`skills`,s.id,r),c=p(e,`steps`,s.id,r);if(n(a)){if(n(c)){if(!i)continue;o(c,{recursive:!0,force:!0})}t(a,c,{recursive:!0})}}}async materializeNativeSteps(e,t,n){for(let i of t){let t=p(e,`steps`,i.step.id);r(t,{recursive:!0});let a=n?.transform?await n.transform({content:i.content,sourceFormat:`claude-plugin`,stepName:i.step.id,metadata:i.metadata}):this.buildStepReadme(i.step.name,i.step.description,i.body);u(p(t,`README.md`),a,`utf-8`)}this.syncStepAssets(e,t.map(({step:e})=>e),n?.forceAssetSync)}buildStepReadme(e,t,n){return[`# ${e}`,``,t,``,`---`,``,n.trim()].join(`
|
|
2
2
|
`).trimEnd().concat(`
|
|
3
|
-
`)}discoverAgents(e){let t=p(e,`agents`);return n(t)?a(t,{withFileTypes:!0}).filter(e=>e.isFile()&&e.name.endsWith(`.md`)).map(e=>`agents/${e.name}`):[]}},w=class{format=`copilot`;detect(e){return n(p(e,`.github`,`agents`))}async parse(e,t){let n=a(p(e,`.github`,`agents`),{withFileTypes:!0}).filter(e=>e.isFile()&&e.name.endsWith(`.md`)).map(e=>`.github/agents/${e.name}`),r=n.map(e=>this.getStepId(e)),i=n.map((e,t)=>{let n=r[t];return{id:n,name:n.charAt(0).toUpperCase()+n.slice(1),instruction:`steps/${n}/README.md`,produces:[`${n}.md`],requires:t>0?[r[t-1]]:[],agents:[e],description:`${n} agent step`}});return await this.materializeSteps(e,i,t),{name:d(e),version:`1.0.0`,description:`Copilot agents flow from ${d(e)}`,steps:i,agents:n,artifacts_dir:`.spec`,install:[]}}getStepId(e){return d(e,`.md`).toLowerCase().replace(/\.agent$/,``)}async materializeSteps(e,t,n){for(let a of t){let t=a.agents[0];if(!t)continue;let o=i(p(e,t),`utf-8`),s=p(e,`steps`,a.id),c=p(s,`README.md`);r(s,{recursive:!0}),u(c,n?.transform?await n.transform({content:o,sourceFormat:`copilot`,stepName:a.id,metadata:{sourcePath:t,displayName:a.name}}):`# ${a.name}\n\n${o}`,`utf-8`)}}},T=class{format=`native`;detect(e){return n(p(e,`flow.json`))}async parse(e){let t=i(p(e,`flow.json`),`utf-8`);return JSON.parse(t)}},E=class{format=`openspec`;detect(e){return!!(n(p(e,`openspec`,`config.yaml`))||n(p(e,`schemas`))&&this.findSchemaYaml(e))}async parse(e,t){let n=this.readMeta(e),r=this.loadSchema(e),i=this.buildStepsFromSchema(r);return await this.materializeSteps(e,i,r,t),{name:n.name,version:n.version,description:r.description??n.description,author:n.author,steps:i,agents:[],artifacts_dir:`openspec`,install:[]}}readMeta(e){let t=p(e,`package.json`);if(n(t))try{let n=JSON.parse(i(t,`utf-8`));return{name:n.name??d(e),version:n.version??`0.1.0`,description:n.description??`OpenSpec flow`,author:n.author}}catch{}return{name:d(e),version:`0.1.0`,description:`OpenSpec flow`}}readConfigSchema(e){let t=p(e,`openspec`,`config.yaml`);if(!n(t))return`spec-driven`;try{let e=_(i(t,`utf-8`));return typeof e?.schema==`string`&&e.schema.trim().length>0?e.schema:`spec-driven`}catch{return`spec-driven`}}resolveSchemaDir(e){let t=this.readConfigSchema(e),r=p(e,`openspec`,`schemas`,t);if(n(p(r,`schema.yaml`)))return r;let i=p(e,`schemas`,t);if(n(p(i,`schema.yaml`)))return i;try{let e=p(h(`npm root -g`,{encoding:`utf-8`,timeout:5e3,stdio:[`ignore`,`pipe`,`ignore`]}).trim(),`@fission-ai`,`openspec`,`schemas`,t);if(n(p(e,`schema.yaml`)))return e}catch{}return null}parseSchemaArtifacts(e){try{let t=_(e);if(!t||!Array.isArray(t.artifacts))return null;let n=t.artifacts.map(e=>this.normalizeArtifact(e)).filter(e=>e!==null);return n.length===0?null:{name:typeof t.name==`string`&&t.name.trim().length>0?t.name:this.readString(t.schema)??`spec-driven`,version:typeof t.version==`number`?t.version:1,description:this.readString(t.description),artifacts:n,apply:this.normalizeApply(t.apply)}}catch{return null}}loadSchema(e){let t=this.resolveSchemaDir(e);if(t){let e=p(t,`schema.yaml`);try{let t=i(e,`utf-8`),n=this.parseSchemaArtifacts(t);if(n)return n}catch{}}return this.discoverSchemaFromDirectory(e)}discoverSchemaFromDirectory(e){let t=[p(e,`openspec`,`schemas`),p(e,`schemas`)];for(let e of t)if(n(e))try{let t=a(e,{withFileTypes:!0}).filter(e=>e.isDirectory());for(let r of t){let t=p(e,r.name,`schema.yaml`);if(n(t))try{let e=this.parseSchemaArtifacts(i(t,`utf-8`));if(e)return e}catch{}}}catch{}let r=[],o=p(e,`openspec`),s=[],c=this.collectMarkdownFiles(o);for(let e of c){let t=m(o,e).replace(/\\/g,`/`),n=this.toStepIdFromMarkdown(t);!n||s.includes(n)||(r.push({id:n,generates:t,description:`${this.humanizeStepName(n)} phase`,template:d(e),requires:s.length>0?[s[s.length-1]]:[]}),s.push(n))}return{name:this.readConfigSchema(e),version:1,artifacts:r}}collectMarkdownFiles(e){if(!n(e))return[];let t=[];try{let n=a(e,{withFileTypes:!0}).sort((e,t)=>e.name.localeCompare(t.name));for(let r of n){let n=p(e,r.name);if(r.isFile()&&r.name.toLowerCase().endsWith(`.md`)){t.push(n);continue}r.isDirectory()&&t.push(...this.collectMarkdownFiles(n))}}catch{}return t}hasFileRecursive(e,t){if(!n(e))return!1;try{let n=a(e,{withFileTypes:!0});for(let r of n)if(r.isFile()&&r.name===t||r.isDirectory()&&this.hasFileRecursive(p(e,r.name),t))return!0}catch{}return!1}findSchemaYaml(e){let t=p(e,`schemas`);if(!n(t))return!1;try{return a(t,{withFileTypes:!0}).filter(e=>e.isDirectory()).some(e=>n(p(t,e.name,`schema.yaml`)))}catch{return!1}}toStepIdFromMarkdown(e){let t=e.replace(/\\/g,`/`).split(`/`).filter(Boolean);if(t.length===0)return null;let n=(t[t.length-1]??``).replace(/\.md$/i,``);return(n.toLowerCase()===`readme`&&t.length>1?t[t.length-2]:n).toLowerCase().replace(/[^a-z0-9]+/g,`-`).replace(/^-+|-+$/g,``)||null}buildStepsFromSchema(e){let t=e.artifacts.map(e=>({id:e.id,name:this.humanizeStepName(e.id),instruction:`steps/${e.id}/README.md`,produces:e.generates?[e.generates]:[],requires:e.requires,agents:[],description:e.description}));return e.apply&&t.push({id:`apply`,name:`Apply`,instruction:`steps/apply/README.md`,produces:[],requires:e.apply.requires,agents:[],description:`Implement the tasks, writing code and tests according to the design`}),t}async materializeSteps(e,t,i,a){let o=this.resolveSchemaDir(e),s=new Map(i.artifacts.map(e=>[e.id,e]));for(let c of t){let t=p(e,`steps`,c.id),l=p(t,`README.md`);r(t,{recursive:!0});let d=this.resolveInstructionContent(o,c.id,s.get(c.id),i.apply);if(!d){n(l)||u(l,`# ${c.name}\n\n${c.description}\n`,`utf-8`);continue}u(l,a?.transform?await a.transform({content:d,sourceFormat:`openspec`,stepName:c.id,metadata:{schemaName:i.name,displayName:c.name,description:c.description,produces:c.produces,requires:c.requires}}):`# ${c.name}\n\n${d}`,`utf-8`)}}resolveInstructionContent(e,t,r,a){if(t===`apply`&&a?.instruction)return a.instruction;if(!r)return null;if(r.instruction)return r.instruction;if(e&&r.template){let t=p(e,`templates`,r.template);if(n(t))try{return i(t,`utf-8`)}catch{}}return r.description||null}normalizeArtifact(e){if(!e||typeof e!=`object`)return null;let t=e,n=this.readString(t.id),r=this.readString(t.generates);return!n||!r?null:{id:n,generates:r,description:this.readString(t.description)??`${this.humanizeStepName(n)} phase`,template:this.readString(t.template)??``,instruction:this.readString(t.instruction),requires:this.readStringArray(t.requires)}}normalizeApply(e){if(!e||typeof e!=`object`)return;let t=e;return{requires:this.readStringArray(t.requires),tracks:this.readString(t.tracks)??null,instruction:this.readString(t.instruction)}}readString(e){return typeof e==`string`&&e.trim().length>0?e:void 0}readStringArray(e){return Array.isArray(e)?e.filter(e=>typeof e==`string`&&e.length>0):[]}humanizeStepName(e){return e.split(/[-_]/g).filter(Boolean).map(e=>e.charAt(0).toUpperCase()+e.slice(1)).join(` `)}};const D={name:`aikit:basic`,version:`0.1.0`,description:`Quick development flow for bug fixes, small features, and refactoring`,steps:[{id:`design`,name:`Design Gate`,instruction:`steps/design/README.md`,produces:[`design-decisions.md`],requires:[],agents:[`Researcher-Alpha`,`Researcher-Beta`,`Researcher-Gamma`,`Researcher-Delta`],description:`Evaluate task type, run brainstorming for features, FORGE classification. Auto-skips for bug fixes and refactors.`},{id:`assess`,name:`Assessment`,instruction:`steps/assess/README.md`,produces:[`assessment.md`],requires:[`design-decisions.md`],agents:[`Explorer`,`Researcher-Alpha`],description:`Understand scope, analyze codebase, identify approach`},{id:`implement`,name:`Implementation`,instruction:`steps/implement/README.md`,produces:[`progress.md`],requires:[`assessment.md`],agents:[`Implementer`,`Frontend`],description:`Write code following the assessment plan`},{id:`verify`,name:`Verification`,instruction:`steps/verify/README.md`,produces:[`verify-report.md`],requires:[`progress.md`],agents:[`Code-Reviewer-Alpha`,`Security`],description:`Review code, run tests, validate changes`}],agents:[],artifacts_dir:`.spec`,install:[]},O={name:`aikit:advanced`,version:`0.1.0`,description:`Full development flow for new features, API design, and architecture changes`,steps:[{id:`design`,name:`Design Gate`,instruction:`steps/design/README.md`,produces:[`design-decisions.md`],requires:[],agents:[`Researcher-Alpha`,`Researcher-Beta`,`Researcher-Gamma`,`Researcher-Delta`],description:`Full brainstorming, FORGE classification, decision protocol with parallel research. ADR for critical-tier tasks.`},{id:`spec`,name:`Specification`,instruction:`steps/spec/README.md`,produces:[`spec.md`],requires:[`design-decisions.md`],agents:[`Researcher-Alpha`],description:`Elicit requirements, clarify scope, define acceptance criteria`},{id:`plan`,name:`Planning`,instruction:`steps/plan/README.md`,produces:[`plan.md`],requires:[`spec.md`],agents:[`Planner`,`Explorer`],description:`Analyze codebase, design architecture, create implementation plan`},{id:`task`,name:`Task Breakdown`,instruction:`steps/task/README.md`,produces:[`tasks.md`],requires:[`plan.md`],agents:[`Planner`,`Architect-Reviewer-Alpha`],description:`Break plan into ordered implementation tasks with dependencies`},{id:`execute`,name:`Execution`,instruction:`steps/execute/README.md`,produces:[`progress.md`],requires:[`tasks.md`],agents:[`Orchestrator`,`Implementer`,`Frontend`,`Refactor`],description:`Implement all tasks, write code, write tests`},{id:`verify`,name:`Verification`,instruction:`steps/verify/README.md`,produces:[`verify-report.md`],requires:[`progress.md`],agents:[`Code-Reviewer-Alpha`,`Code-Reviewer-Beta`,`Architect-Reviewer-Alpha`,`Architect-Reviewer-Beta`,`Security`],description:`Dual code review, architecture review, security review, test validation`}],agents:[],artifacts_dir:`.spec`,install:[]};function k(){return[{manifest:D,scaffoldDir:`scaffold/flows/aikit-basic`},{manifest:O,scaffoldDir:`scaffold/flows/aikit-advanced`}]}const A=`<!-- aikit foundation context -->`,j=`<!-- end foundation context -->`;function M(e){let t=e.artifacts_dir||`.spec`;return`${A}
|
|
3
|
+
`)}discoverAgents(e){let t=p(e,`agents`);return n(t)?a(t,{withFileTypes:!0}).filter(e=>e.isFile()&&e.name.endsWith(`.md`)).map(e=>`agents/${e.name}`):[]}},w=class{format=`copilot`;detect(e){return n(p(e,`.github`,`agents`))}async parse(e,t){let n=a(p(e,`.github`,`agents`),{withFileTypes:!0}).filter(e=>e.isFile()&&e.name.endsWith(`.md`)).map(e=>`.github/agents/${e.name}`),r=n.map(e=>this.getStepId(e)),i=n.map((e,t)=>{let n=r[t];return{id:n,name:n.charAt(0).toUpperCase()+n.slice(1),instruction:`steps/${n}/README.md`,produces:[`${n}.md`],requires:t>0?[r[t-1]]:[],agents:[e],description:`${n} agent step`}});return await this.materializeSteps(e,i,t),{name:d(e),version:`1.0.0`,description:`Copilot agents flow from ${d(e)}`,steps:i,agents:n,artifacts_dir:`.spec`,install:[]}}getStepId(e){return d(e,`.md`).toLowerCase().replace(/\.agent$/,``)}async materializeSteps(e,t,n){for(let a of t){let t=a.agents[0];if(!t)continue;let o=i(p(e,t),`utf-8`),s=p(e,`steps`,a.id),c=p(s,`README.md`);r(s,{recursive:!0}),u(c,n?.transform?await n.transform({content:o,sourceFormat:`copilot`,stepName:a.id,metadata:{sourcePath:t,displayName:a.name}}):`# ${a.name}\n\n${o}`,`utf-8`)}}},T=class{format=`native`;detect(e){return n(p(e,`flow.json`))}async parse(e){let t=i(p(e,`flow.json`),`utf-8`);return JSON.parse(t)}},E=class{format=`openspec`;detect(e){return!!(n(p(e,`openspec`,`config.yaml`))||n(p(e,`schemas`))&&this.findSchemaYaml(e))}async parse(e,t){let n=this.readMeta(e),r=this.loadSchema(e),i=this.buildStepsFromSchema(r);return await this.materializeSteps(e,i,r,t),{name:n.name,version:n.version,description:r.description??n.description,author:n.author,steps:i,agents:[],artifacts_dir:`openspec`,install:[]}}readMeta(e){let t=p(e,`package.json`);if(n(t))try{let n=JSON.parse(i(t,`utf-8`));return{name:n.name??d(e),version:n.version??`0.1.0`,description:n.description??`OpenSpec flow`,author:n.author}}catch{}return{name:d(e),version:`0.1.0`,description:`OpenSpec flow`}}readConfigSchema(e){let t=p(e,`openspec`,`config.yaml`);if(!n(t))return`spec-driven`;try{let e=h(i(t,`utf-8`));return typeof e?.schema==`string`&&e.schema.trim().length>0?e.schema:`spec-driven`}catch{return`spec-driven`}}resolveSchemaDir(e){let t=this.readConfigSchema(e),r=p(e,`openspec`,`schemas`,t);if(n(p(r,`schema.yaml`)))return r;let i=p(e,`schemas`,t);if(n(p(i,`schema.yaml`)))return i;try{let e=p(g(`npm root -g`,{encoding:`utf-8`,timeout:5e3,stdio:[`ignore`,`pipe`,`ignore`]}).trim(),`@fission-ai`,`openspec`,`schemas`,t);if(n(p(e,`schema.yaml`)))return e}catch{}return null}parseSchemaArtifacts(e){try{let t=h(e);if(!t||!Array.isArray(t.artifacts))return null;let n=t.artifacts.map(e=>this.normalizeArtifact(e)).filter(e=>e!==null);return n.length===0?null:{name:typeof t.name==`string`&&t.name.trim().length>0?t.name:this.readString(t.schema)??`spec-driven`,version:typeof t.version==`number`?t.version:1,description:this.readString(t.description),artifacts:n,apply:this.normalizeApply(t.apply)}}catch{return null}}loadSchema(e){let t=this.resolveSchemaDir(e);if(t){let e=p(t,`schema.yaml`);try{let t=i(e,`utf-8`),n=this.parseSchemaArtifacts(t);if(n)return n}catch{}}return this.discoverSchemaFromDirectory(e)}discoverSchemaFromDirectory(e){let t=[p(e,`openspec`,`schemas`),p(e,`schemas`)];for(let e of t)if(n(e))try{let t=a(e,{withFileTypes:!0}).filter(e=>e.isDirectory());for(let r of t){let t=p(e,r.name,`schema.yaml`);if(n(t))try{let e=this.parseSchemaArtifacts(i(t,`utf-8`));if(e)return e}catch{}}}catch{}let r=[],o=p(e,`openspec`),s=[],c=this.collectMarkdownFiles(o);for(let e of c){let t=m(o,e).replace(/\\/g,`/`),n=this.toStepIdFromMarkdown(t);!n||s.includes(n)||(r.push({id:n,generates:t,description:`${this.humanizeStepName(n)} phase`,template:d(e),requires:s.length>0?[s[s.length-1]]:[]}),s.push(n))}return{name:this.readConfigSchema(e),version:1,artifacts:r}}collectMarkdownFiles(e){if(!n(e))return[];let t=[];try{let n=a(e,{withFileTypes:!0}).sort((e,t)=>e.name.localeCompare(t.name));for(let r of n){let n=p(e,r.name);if(r.isFile()&&r.name.toLowerCase().endsWith(`.md`)){t.push(n);continue}r.isDirectory()&&t.push(...this.collectMarkdownFiles(n))}}catch{}return t}hasFileRecursive(e,t){if(!n(e))return!1;try{let n=a(e,{withFileTypes:!0});for(let r of n)if(r.isFile()&&r.name===t||r.isDirectory()&&this.hasFileRecursive(p(e,r.name),t))return!0}catch{}return!1}findSchemaYaml(e){let t=p(e,`schemas`);if(!n(t))return!1;try{return a(t,{withFileTypes:!0}).filter(e=>e.isDirectory()).some(e=>n(p(t,e.name,`schema.yaml`)))}catch{return!1}}toStepIdFromMarkdown(e){let t=e.replace(/\\/g,`/`).split(`/`).filter(Boolean);if(t.length===0)return null;let n=(t[t.length-1]??``).replace(/\.md$/i,``);return(n.toLowerCase()===`readme`&&t.length>1?t[t.length-2]:n).toLowerCase().replace(/[^a-z0-9]+/g,`-`).replace(/^-+|-+$/g,``)||null}buildStepsFromSchema(e){let t=e.artifacts.map(e=>({id:e.id,name:this.humanizeStepName(e.id),instruction:`steps/${e.id}/README.md`,produces:e.generates?[e.generates]:[],requires:e.requires,agents:[],description:e.description}));return e.apply&&t.push({id:`apply`,name:`Apply`,instruction:`steps/apply/README.md`,produces:[],requires:e.apply.requires,agents:[],description:`Implement the tasks, writing code and tests according to the design`}),t}async materializeSteps(e,t,i,a){let o=this.resolveSchemaDir(e),s=new Map(i.artifacts.map(e=>[e.id,e]));for(let c of t){let t=p(e,`steps`,c.id),l=p(t,`README.md`);r(t,{recursive:!0});let d=this.resolveInstructionContent(o,c.id,s.get(c.id),i.apply);if(!d){n(l)||u(l,`# ${c.name}\n\n${c.description}\n`,`utf-8`);continue}u(l,a?.transform?await a.transform({content:d,sourceFormat:`openspec`,stepName:c.id,metadata:{schemaName:i.name,displayName:c.name,description:c.description,produces:c.produces,requires:c.requires}}):`# ${c.name}\n\n${d}`,`utf-8`)}}resolveInstructionContent(e,t,r,a){if(t===`apply`&&a?.instruction)return a.instruction;if(!r)return null;if(r.instruction)return r.instruction;if(e&&r.template){let t=p(e,`templates`,r.template);if(n(t))try{return i(t,`utf-8`)}catch{}}return r.description||null}normalizeArtifact(e){if(!e||typeof e!=`object`)return null;let t=e,n=this.readString(t.id),r=this.readString(t.generates);return!n||!r?null:{id:n,generates:r,description:this.readString(t.description)??`${this.humanizeStepName(n)} phase`,template:this.readString(t.template)??``,instruction:this.readString(t.instruction),requires:this.readStringArray(t.requires)}}normalizeApply(e){if(!e||typeof e!=`object`)return;let t=e;return{requires:this.readStringArray(t.requires),tracks:this.readString(t.tracks)??null,instruction:this.readString(t.instruction)}}readString(e){return typeof e==`string`&&e.trim().length>0?e:void 0}readStringArray(e){return Array.isArray(e)?e.filter(e=>typeof e==`string`&&e.length>0):[]}humanizeStepName(e){return e.split(/[-_]/g).filter(Boolean).map(e=>e.charAt(0).toUpperCase()+e.slice(1)).join(` `)}};const D={name:`aikit:basic`,version:`0.1.0`,description:`Quick development flow for bug fixes, small features, and refactoring`,steps:[{id:`design`,name:`Design Gate`,instruction:`steps/design/README.md`,produces:[`design-decisions.md`],requires:[],agents:[`Researcher-Alpha`,`Researcher-Beta`,`Researcher-Gamma`,`Researcher-Delta`],description:`Evaluate task type, run brainstorming for features, FORGE classification. Auto-skips for bug fixes and refactors.`},{id:`assess`,name:`Assessment`,instruction:`steps/assess/README.md`,produces:[`assessment.md`],requires:[`design-decisions.md`],agents:[`Explorer`,`Researcher-Alpha`],description:`Understand scope, analyze codebase, identify approach`},{id:`implement`,name:`Implementation`,instruction:`steps/implement/README.md`,produces:[`progress.md`],requires:[`assessment.md`],agents:[`Implementer`,`Frontend`],description:`Write code following the assessment plan`},{id:`verify`,name:`Verification`,instruction:`steps/verify/README.md`,produces:[`verify-report.md`],requires:[`progress.md`],agents:[`Code-Reviewer-Alpha`,`Security`],description:`Review code, run tests, validate changes`}],agents:[],artifacts_dir:`.spec`,install:[]},O={name:`aikit:advanced`,version:`0.1.0`,description:`Full development flow for new features, API design, and architecture changes`,steps:[{id:`design`,name:`Design Gate`,instruction:`steps/design/README.md`,produces:[`design-decisions.md`],requires:[],agents:[`Researcher-Alpha`,`Researcher-Beta`,`Researcher-Gamma`,`Researcher-Delta`],description:`Full brainstorming, FORGE classification, decision protocol with parallel research. ADR for critical-tier tasks.`},{id:`spec`,name:`Specification`,instruction:`steps/spec/README.md`,produces:[`spec.md`],requires:[`design-decisions.md`],agents:[`Researcher-Alpha`],description:`Elicit requirements, clarify scope, define acceptance criteria`},{id:`plan`,name:`Planning`,instruction:`steps/plan/README.md`,produces:[`plan.md`],requires:[`spec.md`],agents:[`Planner`,`Explorer`],description:`Analyze codebase, design architecture, create implementation plan`},{id:`task`,name:`Task Breakdown`,instruction:`steps/task/README.md`,produces:[`tasks.md`],requires:[`plan.md`],agents:[`Planner`,`Architect-Reviewer-Alpha`],description:`Break plan into ordered implementation tasks with dependencies`},{id:`execute`,name:`Execution`,instruction:`steps/execute/README.md`,produces:[`progress.md`],requires:[`tasks.md`],agents:[`Orchestrator`,`Implementer`,`Frontend`,`Refactor`],description:`Implement all tasks, write code, write tests`},{id:`verify`,name:`Verification`,instruction:`steps/verify/README.md`,produces:[`verify-report.md`],requires:[`progress.md`],agents:[`Code-Reviewer-Alpha`,`Code-Reviewer-Beta`,`Architect-Reviewer-Alpha`,`Architect-Reviewer-Beta`,`Security`],description:`Dual code review, architecture review, security review, test validation`}],agents:[],artifacts_dir:`.spec`,install:[]};function k(){return[{manifest:D,scaffoldDir:`scaffold/flows/aikit-basic`},{manifest:O,scaffoldDir:`scaffold/flows/aikit-advanced`}]}const A=`<!-- aikit foundation context -->`,j=`<!-- end foundation context -->`;function M(e){let t=e.artifacts_dir||`.spec`;return`${A}
|
|
4
4
|
You are operating within the @vpxa/aikit framework.
|
|
5
5
|
- Use aikit MCP tools for all search, analysis, and memory operations
|
|
6
6
|
- Follow agents defined in AGENTS.md for delegation
|
|
@@ -11,5 +11,5 @@ You are operating within the @vpxa/aikit framework.
|
|
|
11
11
|
${j}`}var N=class{injectPreamble(e,t){let r=M(t);if(!n(e)){u(e,`${r}\n`,`utf-8`);return}let a=i(e,`utf-8`),o=a.indexOf(A),s=a.indexOf(j);if(o!==-1&&s!==-1){u(e,`${a.slice(0,o)}${r}${a.slice(s+31)}`,`utf-8`);return}u(e,`${r}\n\n${a}`,`utf-8`)}removePreamble(e){if(!n(e))return;let t=i(e,`utf-8`),r=t.indexOf(A),a=t.indexOf(j);r===-1||a===-1||u(e,(t.slice(0,r)+t.slice(a+31)).replace(/^\n+/,``).replace(/\n{3,}/g,`
|
|
12
12
|
|
|
13
13
|
`),`utf-8`)}getTargetFiles(e){let t=[],r=p(e,`.github`,`copilot-instructions.md`);n(r)&&t.push(r);let i=p(e,`CLAUDE.md`);n(i)&&t.push(i);let a=p(e,`AGENTS.md`);return n(a)&&t.push(a),t}};function P(e){if(!e||typeof e!=`object`||!(`stderr`in e))return``;let t=e.stderr;return Buffer.isBuffer(t)?t.toString().trim():typeof t==`string`?t.trim():``}function F(e){try{return new URL(e).hostname}catch{}return e.match(/^[^@]+@([^:]+):/)?.[1]??`<host>`}function I(e,t,n){let r=[`Git operation failed for: ${e}`],i=F(e),a=t.includes(`ETIMEDOUT`)||t.includes(`SIGTERM`)||t.toLowerCase().includes(`timed out`),o=n.includes(`Authentication failed`)||n.includes(`could not read Username`)||n.includes(`terminal prompts disabled`)||n.includes(`401`)||n.includes(`403`)||n.includes(`Permission denied`),s=n.includes(`SSL certificate`)||n.includes(`unable to access`)&&n.includes(`SSL`),c=n.includes(`Could not resolve host`)||n.includes(`Name or service not known`),l=n.includes(`SAML`)||n.includes(`single sign-on`);return o||l?(r.push(``),r.push(`Cause: Authentication required or credentials not configured.`),r.push(``),r.push(`To fix this, ensure git can access this repository:`),r.push(``),r.push(` Option 1 - SSH key:`),r.push(` 1. Generate an SSH key: ssh-keygen -t ed25519`),r.push(` 2. Add the public key to your Git hosting account`),r.push(` 3. Use the SSH URL instead: git@<host>:<org>/<repo>.git`),r.push(``),r.push(` Option 2 - Personal Access Token (PAT):`),r.push(` 1. Create a PAT in your Git hosting Settings > Developer settings > Tokens`),r.push(` 2. For GitHub Enterprise with SAML SSO, authorize the token for your org`),r.push(` 3. Configure git credentials:`),r.push(` git config --global credential.helper store`),r.push(` git clone ${e} (enter PAT as password)`),r.push(``),r.push(` Option 3 - Git Credential Manager:`),r.push(` 1. Install: https://github.com/git-ecosystem/git-credential-manager`),r.push(` 2. Run: git clone ${e}`),r.push(` 3. Follow the browser-based auth prompt`),r.push(``),r.push(`After configuring credentials, retry: aikit flow add ${e}`)):a?(r.push(``),r.push(`Cause: Connection timed out - the server did not respond within 60 seconds.`),r.push(``),r.push(`Possible reasons:`),r.push(` - The repository requires VPN access. Ensure your VPN is connected`),r.push(` - The host is behind a firewall or corporate proxy`),r.push(` - The URL may be incorrect`),r.push(``),r.push(`Diagnostics:`),r.push(` 1. Verify the URL is correct: ${e}`),r.push(` 2. Test connectivity: git ls-remote ${e}`),r.push(` 3. If behind a proxy, configure git:`),r.push(` git config --global http.proxy http://proxy:port`),r.push(``),r.push(`If this is a corporate/internal host, you may need to:`),r.push(` - Connect to the corporate VPN`),r.push(` - Add the host to your SSH config or git config`),r.push(` - Ask your IT team to allowlist your machine`),r.push(``),r.push(`After resolving, retry: aikit flow add ${e}`)):s?(r.push(``),r.push(`Cause: SSL/TLS certificate verification failed.`),r.push(``),r.push(`This often happens with corporate proxies or self-signed certificates.`),r.push(``),r.push(`To fix:`),r.push(` 1. If your company uses a custom CA, add it:`),r.push(` git config --global http.sslCAInfo /path/to/ca-bundle.crt`),r.push(` 2. As a last resort (not recommended for production):`),r.push(` git config --global http.sslVerify false`),r.push(``),r.push(`After resolving, retry: aikit flow add ${e}`)):c?(r.push(``),r.push(`Cause: Cannot resolve hostname.`),r.push(``),r.push(`Check:`),r.push(` 1. Is the URL correct? ${e}`),r.push(` 2. Are you connected to the internet/VPN?`),r.push(` 3. Can you resolve the host? ping ${i}`),r.push(``),r.push(`After resolving, retry: aikit flow add ${e}`)):(r.push(``),r.push(`Error: ${t}`),n&&r.push(`Details: ${n}`),r.push(``),r.push(`Troubleshooting:`),r.push(` 1. Verify the URL is a valid git repository: git ls-remote ${e}`),r.push(` 2. Check your git credentials and network connectivity`),r.push(` 3. If the repo requires auth, configure credentials first (PAT or SSH key)`),r.push(``),r.push(`After resolving, retry: aikit flow add ${e}`)),r.join(`
|
|
14
|
-
`)}function L(){try{return
|
|
14
|
+
`)}function L(){try{return _(`git`,[`credential-manager`,`--version`],{stdio:`pipe`,timeout:5e3}).status===0}catch{return!1}}function R(e,t){let n=`${e}\n${t}`.toLowerCase();return n.includes(`authentication failed`)||n.includes(`could not read username`)||n.includes(`saml sso`)||n.includes(`terminal prompts disabled`)||n.includes(`host key verification failed`)||n.includes(`permission denied`)||n.includes(`403`)||n.includes(`401`)}var z=class{constructor(e){this.flowsDir=e}clone(e,t){let r=this.repoNameFromUrl(e),i=p(this.flowsDir,r);if(n(i))if(!n(p(i,`.git`)))o(i,{recursive:!0,force:!0});else return{success:!1,error:`Flow "${r}" already installed at ${i}. Use update instead.`};try{if(this.ensureFlowsDir(),t){let n=process.platform===`win32`?`bat`:`sh`,r=p(v(),`git-askpass-${Date.now()}.${n}`);process.platform===`win32`?u(r,`@echo ${t}`,{mode:448}):u(r,`#!/bin/sh\necho "${t}"`,{mode:448});try{let t=_(`git`,[`clone`,`--depth`,`1`,e,i],{stdio:`pipe`,timeout:6e4,env:{...process.env,GIT_TERMINAL_PROMPT:`0`,GIT_ASKPASS:r}});if(t.status!==0){let e=t.stderr?.toString().trim()??``;throw Error(e||t.error?.message||`git clone failed`)}}finally{try{l(r)}catch{}}}else{let t=_(`git`,[`clone`,`--depth`,`1`,e,i],{stdio:`pipe`,timeout:6e4,env:{...process.env,GIT_TERMINAL_PROMPT:`0`,GIT_ASKPASS:``}});if(t.status!==0){let e=t.stderr?.toString().trim()??``;throw Error(e||t.error?.message||`git clone failed`)}}return{success:!0,data:i}}catch(r){n(i)&&o(i,{recursive:!0,force:!0});let a=P(r),s=r instanceof Error?r.message:String(r);if(!t&&R(s,a)&&L()){let t=F(e)||e;console.log(`\nAuthentication required for ${t}.\nGit Credential Manager detected - opening browser for login...\n(If a browser window does not open, check your terminal for a device code.)\n`);try{if(this.ensureFlowsDir(),_(`git`,[`clone`,`--depth`,`1`,e,i],{stdio:`inherit`,timeout:12e4,env:{...process.env,GIT_TERMINAL_PROMPT:`1`}}).status===0)return{success:!0,data:i};n(i)&&o(i,{recursive:!0,force:!0})}catch{n(i)&&o(i,{recursive:!0,force:!0})}}return{success:!1,error:I(e,s,a)}}}update(e){if(!n(e))return{success:!1,error:`Install path not found: ${e}`};try{return g(`git pull --ff-only`,{cwd:e,stdio:`pipe`,timeout:6e4,env:{...process.env,GIT_TERMINAL_PROMPT:`0`,GIT_ASKPASS:``}}),{success:!0}}catch(t){let n=e;try{n=g(`git remote get-url origin`,{cwd:e,stdio:`pipe`,timeout:1e4}).toString().trim()}catch{}let r=P(t),i=t instanceof Error?t.message:String(t);if(R(i,r)&&L()){let t=F(n)||n;console.log(`\nAuthentication required for ${t}.\nGit Credential Manager detected - opening browser for login...\n`);try{if(_(`git`,[`pull`,`--ff-only`],{cwd:e,stdio:`inherit`,timeout:12e4,env:{...process.env,GIT_TERMINAL_PROMPT:`1`}}).status===0)return{success:!0}}catch{}}return{success:!1,error:I(n,i,r)}}}copyLocal(e,r){let i=p(this.flowsDir,r);if(n(i))return{success:!1,error:`Flow "${r}" already installed at ${i}`};try{return this.ensureFlowsDir(),t(e,i,{recursive:!0}),{success:!0,data:i}}catch(e){return{success:!1,error:`Copy failed: ${e instanceof Error?e.message:String(e)}`}}}remove(e){if(!n(e))return{success:!0};try{return o(e,{recursive:!0,force:!0}),{success:!0}}catch(e){return{success:!1,error:`Remove failed: ${e instanceof Error?e.message:String(e)}`}}}runInstallDeps(e){for(let t of e)try{if(t.startsWith(`npm:`)){g(`npx skills add ${t.slice(4)} -g`,{stdio:`pipe`,timeout:12e4});continue}if(t.endsWith(`.git`)||t.includes(`github.com`)){g(`npx skills add ${t} -g`,{stdio:`pipe`,timeout:12e4});continue}return{success:!1,error:`Unknown install entry format: ${t}`}}catch(e){return{success:!1,error:`Install dependency failed for "${t}": ${e instanceof Error?e.message:String(e)}`}}return{success:!0}}getLocalCommit(e){try{return g(`git rev-parse HEAD`,{cwd:e,stdio:`pipe`,timeout:1e4,env:{...process.env,GIT_TERMINAL_PROMPT:`0`}}).toString().trim()||null}catch{return null}}getRemoteCommit(e){try{return g(`git ls-remote origin HEAD`,{cwd:e,stdio:`pipe`,timeout:3e4,env:{...process.env,GIT_TERMINAL_PROMPT:`0`}}).toString().trim().split(/\s+/)[0]||null}catch{return null}}hasUpdates(e){let t=this.getLocalCommit(e);if(!t)return{success:!1,error:`Could not determine local commit (not a git repo?)`};let n=this.getRemoteCommit(e);return n?{success:!0,data:{localCommit:t,remoteCommit:n,hasUpdates:t!==n}}:{success:!1,error:`Could not reach remote to check for updates`}}repoNameFromUrl(e){return d(e).replace(/\.git$/,``)}ensureFlowsDir(){n(this.flowsDir)||r(this.flowsDir,{recursive:!0})}};const B=[new T,new C,new w,new E];function V(e){let t=B.find(t=>t.format===e);if(!t)throw Error(`No adapter for format: ${e}`);return t}function H(e){for(let t of B)if(t.detect(e))return t;return null}var U=class{async load(e,t){if(!n(e))return{success:!1,error:`Source directory not found: ${e}`};let r=H(e);if(!r)return{success:!1,error:`No format adapter matches source: ${e}`};try{let n=await r.parse(e,t),i=this.validate(n);return i.success?{success:!0,data:{manifest:n,format:r.format}}:i}catch(e){return{success:!1,error:`Failed to parse flow: ${e instanceof Error?e.message:String(e)}`}}}async loadWithFormat(e,t,n){let r=V(t);try{let t=await r.parse(e,n),i=this.validate(t);return i.success?{success:!0,data:t}:i}catch(e){return{success:!1,error:`Failed to parse flow: ${e instanceof Error?e.message:String(e)}`}}}validate(e){let t=[];e.name?.trim()||t.push(`Missing flow name`),e.version?.trim()||t.push(`Missing flow version`),e.steps?.length||t.push(`Flow must have at least one step`);let n=new Set(e.steps.map(e=>e.id));for(let r of e.steps??[]){r.id?.trim()||t.push(`Step missing id`),r.instruction?.trim()||t.push(`Step "${r.id}" missing instruction path`);for(let e of r.requires??[])n.has(e)||t.push(`Step "${r.id}" requires unknown step "${e}"`)}return n.size!==(e.steps?.length??0)&&t.push(`Duplicate step IDs found`),t.length>0?{success:!1,error:`Validation failed:\n${t.join(`
|
|
15
15
|
`)}`}:{success:!0}}};function W(){return k().map(e=>({name:e.manifest.name,version:e.manifest.version,source:`builtin`,sourceType:`builtin`,installPath:e.scaffoldDir,format:`native`,registeredAt:`1970-01-01T00:00:00.000Z`,updatedAt:`1970-01-01T00:00:00.000Z`,manifest:e.manifest}))}var G=class{constructor(e){this.registryPath=e}load(){if(!n(this.registryPath))return{version:1,flows:{}};try{let e=i(this.registryPath,`utf-8`);return JSON.parse(e)}catch{return{version:1,flows:{}}}}save(e){let t=f(this.registryPath);n(t)||r(t,{recursive:!0}),u(this.registryPath,JSON.stringify(e,null,2),`utf-8`)}register(e){let t=this.load();return t.flows[e.name]=e,this.save(t),{success:!0}}unregister(e){let t=this.load();return t.flows[e]?(delete t.flows[e],this.save(t),{success:!0}):{success:!1,error:`Flow "${e}" not found in registry`}}get(e){return this.load().flows[e]||(W().find(t=>t.name===e)??null)}list(){let e=this.load(),t=new Set(Object.keys(e.flows)),n=Object.values(e.flows);for(let e of W())t.has(e.name)||n.push(e);return n}has(e){return e in this.load().flows?!0:W().some(t=>t.name===e)}},K=class{constructor(e,t={before:[],after:[]}){this.flowsDir=e,this.epilogueConfig=t}slugify(e){return e.toLowerCase().replace(/[^a-z0-9]+/g,`-`).replace(/-+/g,`-`).replace(/^-|-$/g,``)||`flow`}generateSlug(e){let t=this.slugify(e);if(!n(p(this.flowsDir,t)))return t;for(let e=2;e<=100;e+=1){let r=`${t}-${e}`;if(!n(p(this.flowsDir,r)))return r}throw Error(`Unable to allocate a flow run slug for topic "${e}"`)}getMetaPath(e){return p(this.flowsDir,e,`meta.json`)}buildStepSequence(e){return[...this.epilogueConfig.before.map(e=>({id:e.id,phase:`before`})),...e.steps.map(e=>({id:e.id,phase:`flow`})),...this.epilogueConfig.after.map(e=>({id:e.id,phase:`after`}))]}readMeta(e){let t=this.getMetaPath(e);if(!n(t))return null;try{let e=i(t,`utf-8`);return JSON.parse(e)}catch{return null}}writeMeta(e,t){let i=p(this.flowsDir,e);n(i)||r(i,{recursive:!0}),u(this.getMetaPath(e),JSON.stringify(t,null,2),`utf-8`)}findActiveRun(){if(!n(this.flowsDir))return null;for(let e of a(this.flowsDir,{withFileTypes:!0})){if(!e.isDirectory())continue;let t=this.readMeta(e.name);if(t?.status===`active`)return{slug:e.name,meta:t}}return null}metaToState(e,t){let n=t.phase??`flow`;return{flow:t.flow,status:t.status,currentStep:t.currentStep,completedSteps:t.completedSteps,skippedSteps:t.skippedSteps,artifacts:t.artifacts,startedAt:t.startedAt,updatedAt:t.updatedAt,slug:e,runDir:p(this.flowsDir,e),topic:t.topic,phase:n,isEpilogue:n!==`flow`}}start(e,t,n){let i=this.findActiveRun();if(i)return{success:!1,error:`Flow "${i.meta.flow}" is already active. Reset it first.`};let a=this.buildStepSequence(t);if(!a.length)return{success:!1,error:`Flow has no steps`};try{let i=(n??t.description)||e,o=this.generateSlug(i),s=p(this.flowsDir,o),c=new Date().toISOString(),l=a[0],u={id:o,flow:e,flowVersion:t.version,topic:i,status:`active`,currentStep:l.id,completedSteps:[],skippedSteps:[],artifactsDir:t.artifacts_dir,artifacts:{},startedAt:c,updatedAt:c,phase:l.phase,completedEpilogueSteps:[],skippedEpilogueSteps:[]};return r(s,{recursive:!0}),r(p(s,t.artifacts_dir),{recursive:!0}),this.writeMeta(o,u),{success:!0,data:this.metaToState(o,u)}}catch(e){return{success:!1,error:e instanceof Error?e.message:String(e)}}}step(e,t){let n=this.findActiveRun();if(!n)return{success:!1,error:`No active flow`};if(n.meta.status!==`active`)return{success:!1,error:`Flow is ${n.meta.status}, not active`};if(!n.meta.currentStep)return{success:!1,error:`No current step`};let r=n.meta.currentStep,i=this.buildStepSequence(t),a=i.findIndex(e=>e.id===r);if(a===-1)return{success:!1,error:`Current step "${n.meta.currentStep}" not found in manifest`};let o=i[a],s=new Date().toISOString(),c=n.meta;switch(e){case`next`:{o.phase===`flow`?c.completedSteps.includes(r)||c.completedSteps.push(r):c.completedEpilogueSteps.includes(r)||c.completedEpilogueSteps.push(r);let e=a+1;e>=i.length?(c.currentStep=null,c.status=`completed`,c.phase=`after`):(c.currentStep=i[e].id,c.phase=i[e].phase);break}case`skip`:{o.phase===`flow`?c.skippedSteps.includes(r)||c.skippedSteps.push(r):c.skippedEpilogueSteps.includes(r)||c.skippedEpilogueSteps.push(r);let e=a+1;e>=i.length?(c.currentStep=null,c.status=`completed`,c.phase=`after`):(c.currentStep=i[e].id,c.phase=i[e].phase);break}case`redo`:o.phase===`flow`?(c.completedSteps=c.completedSteps.filter(e=>e!==r),c.skippedSteps=c.skippedSteps.filter(e=>e!==r)):(c.completedEpilogueSteps=c.completedEpilogueSteps.filter(e=>e!==r),c.skippedEpilogueSteps=c.skippedEpilogueSteps.filter(e=>e!==r));break}return c.updatedAt=s,this.writeMeta(n.slug,c),{success:!0,data:this.metaToState(n.slug,c)}}getStatus(){let e=this.findActiveRun();return e?{success:!0,data:this.metaToState(e.slug,e.meta)}:{success:!1,error:`No active flow`}}reset(){let e=this.findActiveRun();return e?(e.meta.status=`abandoned`,e.meta.currentStep=null,e.meta.updatedAt=new Date().toISOString(),this.writeMeta(e.slug,e.meta),{success:!0}):{success:!0}}recordArtifact(e,t){let n=this.findActiveRun();return n?(n.meta.artifacts[e]=t,n.meta.updatedAt=new Date().toISOString(),this.writeMeta(n.slug,n.meta),{success:!0}):{success:!1,error:`No active flow`}}listRuns(e){if(!n(this.flowsDir))return[];let t=[];for(let n of a(this.flowsDir,{withFileTypes:!0})){if(!n.isDirectory())continue;let r=this.readMeta(n.name);r&&(e?.flow&&r.flow!==e.flow||e?.status&&r.status!==e.status||t.push({id:n.name,flow:r.flow,topic:r.topic,status:r.status,currentStep:r.currentStep,startedAt:r.startedAt,updatedAt:r.updatedAt}))}return t.sort((e,t)=>t.updatedAt.localeCompare(e.updatedAt))}},q=class{createSymlinks(t,i,a,o){let s=this.getTargets(t,i);for(let t of s){n(t.baseDir)||r(t.baseDir,{recursive:!0});for(let r of o.agents){let i=p(a,r);if(!n(i))continue;let o=this.getAgentStem(r),s=p(t.baseDir,`${o}${t.extension}`);n(s)&&l(s);let u=m(f(s),i);try{c(u,s,`file`)}catch{try{e(i,s)}catch(e){console.warn(`Failed to create symlink or copy fallback for ${i}: ${e instanceof Error?e.message:String(e)}`)}}}}}removeSymlinks(e,t){let r=this.getTargets(e,t);for(let e of r)if(n(e.baseDir))try{let t=a(e.baseDir,{withFileTypes:!0});for(let n of t)!n.isFile()&&!n.isSymbolicLink()||l(p(e.baseDir,n.name));a(e.baseDir).length===0&&s(e.baseDir)}catch{}}getTargets(e,t){return[{ide:`copilot`,baseDir:p(e,`.github`,`agents`,`flows`,t),extension:`.agent.md`},{ide:`claude-code`,baseDir:p(e,`.claude`,`agents`,`flows`,t),extension:`.md`}]}getAgentStem(e){return d(e).replace(/\.agent\.md$/,``).replace(/\.md$/,``)}};export{C as ClaudePluginAdapter,w as CopilotAdapter,U as FlowLoader,G as FlowRegistryManager,K as FlowStateMachine,N as FoundationIntegration,z as GitInstaller,T as NativeAdapter,E as OpenSpecAdapter,q as SymlinkManager,k as getBuiltinFlows};
|
|
@@ -484,6 +484,24 @@ Keep diagrams under 15 elements. Split complex architectures into multiple focus
|
|
|
484
484
|
**Element Ordering:**
|
|
485
485
|
Elements appear in the order they are defined. Reorder statements to adjust layout.
|
|
486
486
|
|
|
487
|
+
### Special Characters
|
|
488
|
+
|
|
489
|
+
Node labels and subgraph titles containing `@`, `/`, `#`, or other special characters must be quoted:
|
|
490
|
+
|
|
491
|
+
```mermaid
|
|
492
|
+
%% WRONG — will break parsing
|
|
493
|
+
subgraph @scope/package
|
|
494
|
+
NodeA[@scope/lib]
|
|
495
|
+
end
|
|
496
|
+
|
|
497
|
+
%% CORRECT — quoted labels
|
|
498
|
+
subgraph ScopePkg["@scope/package"]
|
|
499
|
+
NodeA["@scope/lib"]
|
|
500
|
+
end
|
|
501
|
+
```
|
|
502
|
+
|
|
503
|
+
Use plain alphanumeric strings for node IDs (aliases). Put display names with special characters inside `["..."]`.
|
|
504
|
+
|
|
487
505
|
### Alternative Tools
|
|
488
506
|
|
|
489
507
|
For features Mermaid doesn't support, consider:
|
|
@@ -206,6 +206,22 @@ How to confirm it worked.
|
|
|
206
206
|
Common issues and solutions.
|
|
207
207
|
```
|
|
208
208
|
|
|
209
|
+
### Decision Index Template (`docs/decisions/index.md`)
|
|
210
|
+
|
|
211
|
+
```markdown
|
|
212
|
+
# Architecture Decision Record Index
|
|
213
|
+
|
|
214
|
+
This page is the ADR log for the project. IDs are assigned sequentially as decisions are recorded.
|
|
215
|
+
|
|
216
|
+
## Index
|
|
217
|
+
|
|
218
|
+
| ID | Title | Status | Date | Source |
|
|
219
|
+
| --- | --- | --- | --- | --- |
|
|
220
|
+
| ADR-001 | {Decision title} | {Accepted/Proposed/Deprecated/Superseded} | {YYYY-MM-DD} | [{flow-topic}]({relative-path-to-flow-artifact}) |
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
The **Source** column links to the flow artifact (typically `design.md` or `design-decisions.md`) where the decision was originally designed. Use the flow's run directory path relative to the index file location (e.g., `../../.flows/{topic}/{artifacts}/design.md`). If the decision was made outside a flow, use "Manual" or link to the relevant discussion.
|
|
224
|
+
|
|
209
225
|
### Reference Template (`docs/reference/{topic}.md`)
|
|
210
226
|
|
|
211
227
|
```markdown
|
|
@@ -229,6 +245,7 @@ Brief description of what this reference covers.
|
|
|
229
245
|
- All ADRs live in `docs/decisions/`
|
|
230
246
|
- When the docs-sync step detects an architecture decision was made during the flow, delegate to `adr-skill`
|
|
231
247
|
- After `adr-skill` creates/updates an ADR, update `docs/decisions/index.md`
|
|
248
|
+
- Include the Source column linking to the flow artifact where the decision was designed.
|
|
232
249
|
- Cross-reference ADRs from component docs where relevant
|
|
233
250
|
|
|
234
251
|
### `c4-architecture` Integration
|
|
@@ -237,6 +254,14 @@ Brief description of what this reference covers.
|
|
|
237
254
|
- Use Mermaid format for docs files (not HTML) — markdown renders in GitHub
|
|
238
255
|
- Reference diagrams from component docs
|
|
239
256
|
|
|
257
|
+
### Mermaid Syntax Rules
|
|
258
|
+
|
|
259
|
+
When generating Mermaid diagrams in documentation:
|
|
260
|
+
|
|
261
|
+
- **Quote node labels containing special characters** — Names with `@`, `/`, or other special characters must be wrapped in double quotes inside square brackets: `subgraph Commons["@scope/package-name"]`, `NodeId["@org/lib"]`
|
|
262
|
+
- **Quote subgraph titles with special characters** — `subgraph Title["@scope/name"]` not `subgraph @scope/name`
|
|
263
|
+
- Node IDs (aliases) must not contain special characters — use plain alphanumeric IDs and put the display name in `["..."]`
|
|
264
|
+
|
|
240
265
|
## Project Knowledge Acquisition
|
|
241
266
|
|
|
242
267
|
Produces seven populated documents in `docs/architecture/` covering everything needed to work effectively on the project — stack, structure, design, conventions, integrations, testing, and concerns.
|
|
@@ -431,7 +456,7 @@ docs/
|
|
|
431
456
|
│ ├── overview.md ← From analyze_structure + analyze_dependencies + analyze_diagram
|
|
432
457
|
│ └── components/ ← From analyze_symbols per major component
|
|
433
458
|
├── decisions/
|
|
434
|
-
│ └── index.md ← ADR log
|
|
459
|
+
│ └── index.md ← ADR log with Source column linking to flow artifacts
|
|
435
460
|
├── guides/
|
|
436
461
|
│ └── testing.md ← From analyze_patterns test info
|
|
437
462
|
└── reference/
|
|
@@ -483,6 +508,25 @@ Follow these rules when generating documentation content. Adapted from *The Elem
|
|
|
483
508
|
- **No summary closers** — Do not end every paragraph by restating what it just said
|
|
484
509
|
- **Consistent terminology** — Pick one term and use it throughout; do not alternate synonyms for variety
|
|
485
510
|
|
|
511
|
+
## Link Rules
|
|
512
|
+
|
|
513
|
+
### Relative Path Correctness
|
|
514
|
+
|
|
515
|
+
All links in generated docs must be correct relative to the file that contains them. Compute the path from the target doc's directory, not from the project root.
|
|
516
|
+
|
|
517
|
+
| Doc location | Link to `src/types/index.ts` | Link to `.flows/topic/artifacts/design.md` |
|
|
518
|
+
|---|---|---|
|
|
519
|
+
| `docs/README.md` | `../src/types/index.ts` | `../.flows/topic/artifacts/design.md` |
|
|
520
|
+
| `docs/architecture/overview.md` | `../../src/types/index.ts` | `../../.flows/topic/artifacts/design.md` |
|
|
521
|
+
| `docs/decisions/index.md` | `../../src/types/index.ts` | `../../.flows/topic/artifacts/design.md` |
|
|
522
|
+
| `docs/reference/api.md` | `../../src/types/index.ts` | `../../.flows/topic/artifacts/design.md` |
|
|
523
|
+
|
|
524
|
+
**Rules:**
|
|
525
|
+
1. Count the directory depth from the doc file to `docs/`, then add `../` to reach the project root
|
|
526
|
+
2. Verify every link target exists before writing it — use `find({ pattern })` if unsure
|
|
527
|
+
3. Never use absolute paths in documentation — always relative
|
|
528
|
+
4. Test links mentally: from `docs/architecture/overview.md`, `../../` reaches the project root
|
|
529
|
+
|
|
486
530
|
## Anti-Patterns
|
|
487
531
|
|
|
488
532
|
### Documentation Maintenance
|