@aurodesignsystem-dev/auro-cli 0.0.0-pr177.0 → 0.0.0-pr177.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/auro-cli.js CHANGED
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- import{program as ue}from"commander";import Ze from"figlet";import{mind as et}from"gradient-string";var de=()=>et(Ze.textSync("Auro CLI"));import pe from"node:fs";import fe from"node:path";import{fileURLToPath as tt}from"node:url";function S(t){process.env.DEBUG&&console.log(`[DEBUG] ${t}`)}function q(){try{let t=tt(import.meta.url),e=fe.dirname(t);S(`Current module path: ${e}`);let r=fe.resolve(e,"..","package.json");return S(`Checking package.json at: ${r}`),pe.existsSync(r)?(S(`Found package.json at: ${r}`),JSON.parse(pe.readFileSync(r,"utf8")).version):(S("Could not find package.json in the standard installed module location, using default version"),"0.0.0")}catch(t){return console.error("Error retrieving package version:",t),"0.0.0"}}import{program as jt}from"commander";import Re from"ora";function R(t){return t.option("-m, --module-paths [paths...]","Path(s) to node_modules folder").option("-w, --watch","Watches for changes").option("--skip-docs","Skip documentation generation",!1).option("--wca-input [files...]","Source file(s) to analyze for API documentation").option("--wca-output [files...]","Output file(s) for API documentation")}function ge(t){return t.option("-s, --serve","Starts a server").option("-p, --port <number>","Port for the server").option("-o, --open","Open the browser after starting the server")}import Dt from"@rollup/plugin-terser";import{watch as Pt}from"rollup";import{rmSync as mt}from"node:fs";import{join as ut}from"node:path";import V from"ora";import{rollup as J}from"rollup";import he from"ora";import{spawn as rt}from"node:child_process";import nt from"ora";var h=(t,e)=>{let r=`${t} ${e?e.join(" "):""}`,n=nt(),o=t,i=e||[];if(!e&&typeof t=="string"){let a=t.split(" ");o=a[0],i=a.slice(1)}let s=r.includes("--watch")||r.includes(" -w"),c=rt(o,i,{stdio:s?"inherit":["inherit","pipe","pipe"],shell:!0});if(!s){let a=[];c.stdout?.on("data",u=>{let m=u.toString();a.push(m),process.stdout.write(m)}),c.stderr?.on("data",u=>{let m=u.toString();a.push(m),process.stderr.write(m)})}return new Promise((a,u)=>{c.on("close",m=>{m!==0?s?(n.info(`Watch mode terminated with code ${m}`),a()):(n.fail(`${r} failed (code ${m})`),u(new Error(`Command failed with exit code ${m}`))):(n.succeed(`${r} completed successfully`),a())})})};import j from"node:fs";import ot from"node:path";var x=class{static{this.manifest={schemaVersion:"1.0.0",readme:"",modules:[]}}static generate(e={}){let{outDir:r="./docs",outFile:n="api.md",manifestPath:o="./custom-elements.json"}=e;if(o)try{let a=j.readFileSync(o,"utf8");this.manifest=JSON.parse(a)}catch(a){throw console.error(`Error reading manifest file at ${o}:`,a),a}let i=this.getElements(),s=r;j.existsSync(s)||j.mkdirSync(s,{recursive:!0});let l=this.renderAllElements(i),c=ot.join(s,n);j.writeFileSync(c,l),console.log(`Generated combined API documentation at ${c}`)}static getElements(){return this.manifest.modules.reduce((e,r)=>e.concat(r.declarations?.filter(n=>"customElement"in n&&n.customElement===!0&&"tagName"in n&&this.isWcaModule(r))??[]),[])}static isWcaModule(e){let r=e.path;return r?r.startsWith("scripts/wca/auro-")&&r.endsWith(".js"):!1}static renderAllElements(e){return`${e.map(r=>this.renderElement(r,!1)).join(`
2
+ import{program as ue}from"commander";import Ze from"figlet";import{mind as et}from"gradient-string";var de=()=>et(Ze.textSync("Auro CLI"));import pe from"node:fs";import fe from"node:path";import{fileURLToPath as tt}from"node:url";function S(t){process.env.DEBUG&&console.log(`[DEBUG] ${t}`)}function q(){try{let t=tt(import.meta.url),e=fe.dirname(t);S(`Current module path: ${e}`);let r=fe.resolve(e,"..","package.json");return S(`Checking package.json at: ${r}`),pe.existsSync(r)?(S(`Found package.json at: ${r}`),JSON.parse(pe.readFileSync(r,"utf8")).version):(S("Could not find package.json in the standard installed module location, using default version"),"0.0.0")}catch(t){return console.error("Error retrieving package version:",t),"0.0.0"}}import{program as jt}from"commander";import Re from"ora";function R(t){return t.option("-m, --module-paths [paths...]","Path(s) to node_modules folder").option("-w, --watch","Watches for changes").option("--skip-docs","Skip documentation generation",!1).option("--wca-input [files...]","Source file(s) to analyze for API documentation").option("--wca-output [files...]","Output file(s) for API documentation")}function ge(t){return t.option("-s, --serve","Starts a server").option("-p, --port <number>","Port for the server").option("-o, --open","Open the browser after starting the server")}import Dt from"@rollup/plugin-terser";import{watch as Pt}from"rollup";import{rmSync as mt}from"node:fs";import{join as ut}from"node:path";import V from"ora";import{rollup as J}from"rollup";import he from"ora";import{spawn as rt}from"node:child_process";import nt from"ora";var h=(t,e)=>{let r=`${t} ${e?e.join(" "):""}`,n=nt(),o=t,s=e||[];if(!e&&typeof t=="string"){let a=t.split(" ");o=a[0],s=a.slice(1)}let i=r.includes("--watch")||r.includes(" -w"),c=rt(o,s,{stdio:i?"inherit":["inherit","pipe","pipe"],shell:!0});if(!i){let a=[];c.stdout?.on("data",u=>{let m=u.toString();a.push(m),process.stdout.write(m)}),c.stderr?.on("data",u=>{let m=u.toString();a.push(m),process.stderr.write(m)})}return new Promise((a,u)=>{c.on("close",m=>{m!==0?i?(n.info(`Watch mode terminated with code ${m}`),a()):(n.fail(`${r} failed (code ${m})`),u(new Error(`Command failed with exit code ${m}`))):(n.succeed(`${r} completed successfully`),a())})})};import j from"node:fs";import ot from"node:path";var x=class{static{this.manifest={schemaVersion:"1.0.0",readme:"",modules:[]}}static generate(e={}){let{outDir:r="./docs",outFile:n="api.md",manifestPath:o="./custom-elements.json"}=e;if(o)try{let a=j.readFileSync(o,"utf8");this.manifest=JSON.parse(a)}catch(a){throw console.error(`Error reading manifest file at ${o}:`,a),a}let s=this.getElements(),i=r;j.existsSync(i)||j.mkdirSync(i,{recursive:!0});let l=this.renderAllElements(s),c=ot.join(i,n);j.writeFileSync(c,l),console.log(`Generated combined API documentation at ${c}`)}static getElements(){return this.manifest.modules.reduce((e,r)=>e.concat(r.declarations?.filter(n=>"customElement"in n&&n.customElement===!0&&"tagName"in n&&this.isWcaModule(r))??[]),[])}static isWcaModule(e){let r=e.path;return r?r.startsWith("scripts/wca/auro-")&&r.endsWith(".js"):!1}static renderAllElements(e){return`${e.map(r=>this.renderElement(r,!1)).join(`
3
3
 
4
4
  ---
5
5
 
@@ -10,26 +10,26 @@ import{program as ue}from"commander";import Ze from"figlet";import{mind as et}fr
10
10
 
11
11
  `}${e.description?`${e.description}
12
12
 
13
- `:""}${this.renderPropertiesAttributesTable(e)}${this.renderTable("Methods",["name","parameters","return.type.text","description"],(e.members||[]).filter(n=>n.kind==="method"&&("privacy"in n?n.privacy!=="private":!0)&&n.name[0]!=="_").map(n=>({...n,parameters:this.renderParameters("parameters"in n?n.parameters:void 0)})))}${this.renderTable("Events",["name","description"],e.events)}${this.renderTable("Slots",[["name","(default)"],"description"],e.slots)}${this.renderTable("CSS Shadow Parts",["name","description"],e.cssParts)}${this.renderTable("CSS Custom Properties",["name","description"],e.cssProperties)}`}static renderPropertiesAttributesTable(e){let r=e.members?.filter(a=>a.kind==="field")||[],n=e.attributes||[],o=[],i=new Set;if(r.forEach(a=>{a.description?.trim()&&o.push({name:a.name,properties:a.name,attributes:("attribute"in a?a.attribute:"")||"",type:this.get(a,"type.text")||"",default:("default"in a?a.default:"")||"",description:a.description||""}),i.add(a.name),"attribute"in a&&a.attribute&&i.add(a.attribute)}),n.forEach(a=>{!i.has(a.name)&&a.description?.trim()&&o.push({name:a.name,properties:"",attributes:a.name,type:this.get(a,"type.text")||"",default:a.default||"",description:a.description||""})}),o.length===0)return"";let s="Properties | Attributes | Type | Default | Description ",l="--- | --- | --- | --- | ---",c=o.map(a=>[a.properties,a.attributes,a.type,a.default,a.description].map(u=>String(u||"").replace(/\|/g,"\\|").replace(/\n/g,"<br>")).join(" | ")).join(`
13
+ `:""}${this.renderPropertiesAttributesTable(e)}${this.renderTable("Methods",["name","parameters","return.type.text","description"],(e.members||[]).filter(n=>n.kind==="method"&&("privacy"in n?n.privacy!=="private":!0)&&n.name[0]!=="_").map(n=>({...n,parameters:this.renderParameters("parameters"in n?n.parameters:void 0)})))}${this.renderTable("Events",["name","description"],e.events)}${this.renderTable("Slots",[["name","(default)"],"description"],e.slots)}${this.renderTable("CSS Shadow Parts",["name","description"],e.cssParts)}${this.renderTable("CSS Custom Properties",["name","description"],e.cssProperties)}`}static renderPropertiesAttributesTable(e){let r=e.members?.filter(a=>a.kind==="field")||[],n=e.attributes||[],o=[],s=new Set;if(r.forEach(a=>{a.description?.trim()&&o.push({name:a.name,properties:a.name,attributes:("attribute"in a?a.attribute:"")||"",type:this.get(a,"type.text")||"",default:("default"in a?a.default:"")||"",description:a.description||""}),s.add(a.name),"attribute"in a&&a.attribute&&s.add(a.attribute)}),n.forEach(a=>{!s.has(a.name)&&a.description?.trim()&&o.push({name:a.name,properties:"",attributes:a.name,type:this.get(a,"type.text")||"",default:a.default||"",description:a.description||""})}),o.length===0)return"";let i="Properties | Attributes | Type | Default | Description ",l="--- | --- | --- | --- | ---",c=o.map(a=>[a.properties,a.attributes,a.type,a.default,a.description].map(u=>String(u||"").replace(/\|/g,"\\|").replace(/\n/g,"<br>")).join(" | ")).join(`
14
14
  `);return`
15
15
  ### Properties & Attributes
16
16
 
17
- | ${s} |
17
+ | ${i} |
18
18
  | ${l} |
19
19
  ${c}
20
20
 
21
- `}static renderParameters(e){return!e||e.length===0?"None":e.map(r=>`\`${r.name}\` (${this.get(r,"type.text")||"any"})${r.description?` - ${r.description}`:""}`).join("<br>")}static renderTable(e,r,n){if(n===void 0||n.length===0)return"";let o=n.filter(c=>{let a=c.description;return typeof a=="string"&&a.trim()});if(o.length===0)return"";let i=r.map(c=>this.capitalize((Array.isArray(c)?c[0]:c).split(".")[0])).join(" | "),s=r.map(()=>"---").join(" | "),l=o.map(c=>r.map(a=>{let u=this.get(c,a);return String(u||"").replace(/\|/g,"\\|").replace(/\n/g,"<br>")}).join(" | ")).join(`
21
+ `}static renderParameters(e){return!e||e.length===0?"None":e.map(r=>`\`${r.name}\` (${this.get(r,"type.text")||"any"})${r.description?` - ${r.description}`:""}`).join("<br>")}static renderTable(e,r,n){if(n===void 0||n.length===0)return"";let o=n.filter(c=>{let a=c.description;return typeof a=="string"&&a.trim()});if(o.length===0)return"";let s=r.map(c=>this.capitalize((Array.isArray(c)?c[0]:c).split(".")[0])).join(" | "),i=r.map(()=>"---").join(" | "),l=o.map(c=>r.map(a=>{let u=this.get(c,a);return String(u||"").replace(/\|/g,"\\|").replace(/\n/g,"<br>")}).join(" | ")).join(`
22
22
  `);return`
23
23
  ### ${e}
24
24
 
25
- | ${i} |
26
25
  | ${s} |
26
+ | ${i} |
27
27
  ${l}
28
28
 
29
- `}static get(e,r){let n="",o=r;Array.isArray(r)&&([o,n]=r);let i=o.split("."),s=e;for(;s&&i.length;)s=s[i.shift()];return s==null||s===""?n:String(s)}static capitalize(e){return e[0].toUpperCase()+e.substring(1)}};async function O(){let t=he("Generating Custom Elements Manifest...").start();try{await h("npx --package=@custom-elements-manifest/analyzer -y -- cem analyze --litelement --globs src/*.*js scripts/wca/**/*.*js --packagejson --dependencies"),t.succeed("Custom Elements Manifest generated successfully!")}catch(e){let r=e instanceof Error?e.message:String(e);t.warn("CEM analyzer completed with warnings: "+r)}}async function _(){let t=he("Generating API documentation...").start();try{await x.generate(),t.succeed("API documentation generated successfully!")}catch(e){let r=e instanceof Error?e.message:String(e);throw t.fail("Failed to generate API documentation: "+r),e}}async function B(){await O(),await _()}import{Logger as st}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import{generateReadmeUrl as it,processContentForFile as at,templateFiller as ye}from"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs";var we={overwriteLocalCopies:!0,remoteReadmeVersion:"master",remoteReadmeVariant:"_updated_paths"};function $(t){return`${process.cwd()}/${t}`}var ct=t=>[{identifier:"README.md",input:{remoteUrl:t.remoteReadmeUrl||it(t.remoteReadmeVersion,t.remoteReadmeVariant),fileName:$("/docTemplates/README.md"),overwrite:t.overwriteLocalCopies},output:$("/README.md")},{identifier:"index.md",input:$("/docs/partials/index.md"),output:$("/demo/index.md"),mdMagicConfig:{output:{directory:$("/demo")}}},{identifier:"api.md",input:$("/docs/partials/api.md"),output:$("/demo/api.md"),preProcessors:[ye.formatApiTable]}];async function lt(t=we){await ye.extractNames();for(let e of ct(t))try{await at(e)}catch(r){st.error(`Error processing ${e.identifier}: ${r.message}`)}}async function be(){await lt({...we,remoteReadmeUrl:"https://raw.githubusercontent.com/AlaskaAirlines/auro-templates/main/templates/default/README.md"})}function ke(){let t=ut("./dist"),e=V("Cleaning dist folder...").start();try{return mt(t,{recursive:!0,force:!0}),e.succeed("All clean! Dist folder wiped."),!0}catch(r){return e.fail(`Oops! Couldn't clean dist/ folder: ${r.message}`),console.error(r),!1}}async function Y(t,e,r,n){let o=V(t).start();try{let i=await e();return o.succeed(r),i}catch(i){throw o.fail(n),console.error(`Error: ${i.message}`),i}}async function Ae(t,e){return Y("Creating type definitions...",async()=>{let r=await J(t);await r.write(e),await r.close()},"Types files built.","Darn! Type definitions failed.")}async function $e(t,e){return Y(`Bundling ${t.name||"main"} and ${e.name||"demo"}...`,async()=>{let r=await J(t);await r.write(t.output),await r.close();let n=await J(e);await n.write(e.output),await n.close()},`Bundles ready! ${t.name||"Main"} and ${e.name||"demo"} built.`,"Bundle hiccup! Build failed.")}async function I(t){let{wcaInput:e,wcaOutput:r,skipDocs:n}=t;if(n){let o=V("Skipping docs generation...").start();setTimeout(()=>{o.succeed("Docs generation skipped.")},0);return}return Y("Analyzing components and making docs...",async()=>{await B(e,r),await be()},"Docs ready! Looking good.","Doc troubles!")}import{basename as ft,join as xe}from"node:path";import{nodeResolve as gt}from"@rollup/plugin-node-resolve";import{glob as ht}from"glob";import{dts as yt}from"rollup-plugin-dts";import{litScss as wt}from"rollup-plugin-scss-lit";import dt from"node:path";import{glob as pt}from"glob";function Ee(t){return{name:"watch-globs",buildStart(){let e=Array.isArray(t)?t:[t];for(let r of e)try{for(let n of pt.sync(dt.resolve(r)))this.addWatchFile(n)}catch(n){this.error(`Error watching glob pattern "${r}": ${n.message}`)}}}}var K={moduleDirectories:["node_modules"],modulePaths:["../../node_modules","../node_modules","node_modules"],watchPatterns:["./apiExamples/**/*","./docs/**/*"]};function Ce(t=[],e={}){let{watchPatterns:r=K.watchPatterns,dedupe:n=["lit","lit-element","lit-html"]}=e,o=[...K.modulePaths,...t];return[gt({dedupe:n,preferBuiltins:!1,moduleDirectories:K.moduleDirectories}),wt({minify:{fast:!0},options:{loadPaths:[...o,xe(process.cwd(),"src","styles"),xe(process.cwd(),"src")]}}),Ee(r)]}function X(t={}){let{modulePaths:e=[],watch:r=!1,input:n=["./src/index.js","./src/registered.js"],outputDir:o="./dist",format:i="esm"}=t;return{name:"Main",config:{input:n,output:{format:i,dir:o,entryFileNames:"[name].js"},external:bt(),plugins:Ce(e),watch:Te(r)}}}function Q(t={}){let{modulePaths:e=[],watch:r=!1,globPattern:n="./demo/*.js",ignorePattern:o=["./demo/*.min.js"],outputDir:i="./demo"}=t;return{name:"Demo",config:{input:Object.fromEntries(ht.sync(n,{ignore:o}).map(s=>[ft(s,".js"),s])),output:{format:"esm",dir:i,entryFileNames:"[name].min.js",chunkFileNames:"[name].min.js"},plugins:Ce(e),watch:Te(r)}}}function C(t={}){let{input:e=["./dist/index.js"],outputDir:r="./dist"}=t;return{name:"DTS",config:{input:e,output:{format:"esm",dir:r,entryFileNames:"[name].d.ts"},plugins:[yt()]}}}function Te(t){if(!t)return!1;let e=typeof t=="object"?t:{};return{clearScreen:e.clearScreen??!0,buildDelay:e.buildDelay??500,chokidar:{ignoreInitial:!0,ignored:e.ignored??["**/dist/**/*.d.ts","**/custom-elements.json","**/demo/*.md","**/demo/**/*.min.js","**/docs/api.md","**/node_modules/**","**/.git/**"],awaitWriteFinish:e.awaitWriteFinish??{stabilityThreshold:1e3,pollInterval:100}},include:e.include??["./src/**/*.scss","./src/**/*.js","./src/**/*.ts","./demo/**/*.js","./apiExamples/**/*","./docs/**/*.md"],exclude:e.exclude??["./dist/**/*","./node_modules/**/*"]}}function bt(t=[]){return[...[/node_modules\/lit/,/node_modules\/lit-element/,/node_modules\/lit-html/,/node_modules\/@lit/],...t]}import{startDevServer as kt}from"@web/dev-server";import{hmrPlugin as At}from"@web/dev-server-hmr";import $t from"ora";var T={watch:!0,nodeResolve:!0,basePath:"/",rootDir:"./demo",hmrInclude:["src/**/*","demo/**/*","apiExamples/**/*","docs/**/*"]};async function ve(t={}){if(!t.serve)return;let e=$t(`Firing up dev server...
30
- `).start();try{let r={port:Number(t.port)||void 0,open:t.open?"/":void 0,watch:t.watch??T.watch,nodeResolve:t.nodeResolve??T.nodeResolve,basePath:t.basePath??T.basePath,rootDir:t.rootDir??T.rootDir,middleware:[function(i,s){return!i.url.endsWith("/")&&!i.url.includes(".")&&(i.url+=".html"),s()}],plugins:[At({include:t.hmrInclude??T.hmrInclude})]},n=await kt({config:r,readCliArgs:!1,readFileConfig:!1});return e.stop(),n}catch(r){throw e.fail("Server snag! Couldn't start dev server."),console.error("Error starting development server:",r),new Error(`Development server failed to start: ${r.message}`)}}import De from"node:path";import b from"ora";import{rollup as Et}from"rollup";var N=!1,xt={dts:{active:!1,lastTime:0},analyze:{active:!1,lastTime:0},docs:{active:!1,lastTime:0}},Ct=5e3,F=new Set,Tt=["/dist/index.d.ts","/custom-elements.json","/demo/api.md","/docs/api.md","/demo/index.min.js"];function vt(t){if(!t||typeof t!="string")return!1;try{let e=De.normalize(t);return Tt.some(r=>e.endsWith(r))||e.includes("/dist/")||e.endsWith(".min.js")||e.endsWith(".d.ts")}catch(e){return console.error(`Error checking path (${typeof t}):`,e.message),!1}}async function Z(t,e){let r=xt[t];if(r.active||Date.now()-r.lastTime<Ct)return!1;try{return r.active=!0,r.lastTime=Date.now(),await e()}catch(n){return console.error(`Error in ${t} task:`,n),!1}finally{r.active=!1}}async function Pe(t,e,r){let n=!0,o={dts:!1,analyze:!1,docs:!1},i=null,s,l=b("Activating watch mode...").start(),c={dts:async()=>{let m=b("Crafting type definitions...").start();try{let d=await Et(C().config);return await d.write(C().config.output),await d.close(),m.succeed("Type files built."),!0}catch(d){return m.fail("Types trouble! Build failed."),console.error("TypeScript definition build error:",d),!1}},analyze:async()=>{let{wcaInput:m,wcaOutput:d,skipDocs:y}=e;if(y){let w=b("Skipping component analysis...").start();return setTimeout(()=>{w.succeed("Component analysis skipped.")},0),!0}let A=b("Detective work: analyzing components...").start();try{return await B(m,d),A.succeed("Component analysis complete! API generated."),!0}catch(w){return A.fail("Analysis hiccup! Something went wrong."),console.error("Component analysis error:",w),!1}},docs:async()=>{if(N)return!1;if(e.skipDocs){let d=b("Skipping docs generation...").start();return setTimeout(()=>{d.succeed("Docs generation skipped.")},0),!0}let m=b("Refreshing docs...").start();try{return await I(e),m.succeed("Documentation refreshed!"),!0}catch(d){m.fail("Docs stumble! Couldn't refresh."),console.error("Documentation rebuild error:",d)}}},a=()=>{n&&o.dts&&o.analyze&&o.docs&&typeof r=="function"&&(n=!1,r())};function u(m=1e3){i&&clearTimeout(i),i=setTimeout(async()=>{o.dts=await Z("dts",c.dts),setTimeout(async()=>{o.analyze=await Z("analyze",c.analyze),setTimeout(async()=>{o.docs=await Z("docs",c.docs),a()},1e3)},1e3)},m)}t.on("event",async m=>{switch(m.code){case"START":l.succeed("Watch mode active! Eyes peeled.");break;case"BUNDLE_START":if(F.clear(),m.input)try{let d=Array.isArray(m.input)?m.input:typeof m.input=="string"?[m.input]:typeof m.input=="object"&&m.input!==null?Object.values(m.input):[];for(let y of d)typeof y=="string"&&!vt(y)&&F.add(De.normalize(y))}catch(d){console.error("Error processing input paths:",d)}s=b("Weaving bundles...").start(),N=!0;break;case"BUNDLE_END":s&&s.succeed(`Bundle ${Array.isArray(m.input)?`of ${m.input.join("& ")} `:""}done in ${m.duration}ms! \u{1F680}`),N=!1,F.size>0&&u();break;case"END":break;case"ERROR":N=!1,s?s.fail(`Oops! Bundle hit a snag: ${m.error.message}`):b().fail(`Watch mode hiccup: ${m.error.message}`),F.clear();break}})}function Se(t){return process.on("SIGINT",()=>{let e=b("Wrapping up...").start();t.close(),e.succeed("All done! See you next time. \u2728"),process.exit(0)}),t}async function St(t){let e=X(t),r=Q(t),n=C();e.config.plugins.push(Dt()),await I(t),await $e(e.config,r.config),await Ae(n.config,n.config.output)}async function Rt(t){let{dev:e}=t,r=X({...t,watch:!0}),n=Q({...t,watch:!0}),o=Pt([r.config,n.config]);return Pe(o,t,e?async()=>ve(t):void 0),Se(o),o}async function M(t={}){try{let{watch:e}=t;return ke(),e?await Rt(t):await St(t)}catch(e){throw new Error(`Build failed: ${e.message}`)}}var v=jt.command("dev").description("Runs development server for auro components");v=R(v);v=ge(v);var Un=v.action(async t=>{try{let e=Re("Initializing...");t.watch?(e.text="Waiting for changes...",e.spinner="bouncingBar",e.color="green"):e.text=t.docs===!1?"Building component (docs disabled)":"Building component",e.start(),t.watch||e.succeed("Build completed!"),await M({...t,dev:!0,watch:t.watch})}catch(e){Re().fail(`Build failed: ${e.message}`),console.error(e),process.exit(1)}});import{program as Ot}from"commander";import je from"ora";var ee=Ot.command("build").description("Builds auro components");ee=R(ee);var Vn=ee.action(async t=>{try{let e=je("Initializing...");t.watch?(e.text="Waiting for changes...",e.spinner="bouncingBar",e.color="green"):e.text=t.docs===!1?"Building component (docs disabled)":"Building component",e.start(),await M(t),t.watch||e.succeed("Build completed!")}catch(e){je().fail(`Build failed: ${e.message}`),console.error(e),process.exit(1)}});import{exec as _t}from"node:child_process";import Oe from"node:path";import Bt from"node:process";import{fileURLToPath as It}from"node:url";import Nt from"node:util";import{program as Ft}from"commander";import Mt from"inquirer";var oo=Ft.command("migrate").description("Script runner to perform repetitive code change tasks").requiredOption("-i, --id <string>","Select the migration you would like to run by id").option("-m, --multi-gitter","Run the migration on all repositories in the multi-gitter config").action(async t=>{let e=It(import.meta.url),r=Oe.dirname(e),n=Oe.resolve(r,"migrations",t.id);if(t.multiGitter){let o=Nt.promisify(_t);try{await o("command -v multi-gitter")}catch{console.error("multi-gitter is not installed."),Bt.exit(1)}(await Mt.prompt([{type:"confirm",name:"dryRun",message:"Run migration in dry-run mode? (no changes will be committed)",default:!0}])).dryRun?h(`multi-gitter run ${n}/script.sh --config "${n}/multi-gitter.yml" --dry-run`):h(`multi-gitter run ${n}/script.sh --config "${n}/multi-gitter.yml"`)}else h(`${n}/script.sh`)});import Vt from"node:process";import{program as Yt}from"commander";import{readFile as Kt,writeFile as Xt}from"node:fs/promises";import{Logger as re}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import Gt from"node:fs/promises";import{Logger as k}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import{processContentForFile as Lt,templateFiller as Ut}from"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs";var te="https://raw.githubusercontent.com/AlaskaAirlines/auro-templates",_e="main",Wt="main",Ht="default",Be={ISSUE_TEMPLATE:["bug_report.yaml","config.yml","feature_request.yaml","general-support.yaml","group.yaml","story.yaml","task.yaml"],workflows:["codeql.yml","publishDemo.yml","testPublish.yml"],_root:["CODEOWNERS","CODE_OF_CONDUCT.md","CONTRIBUTING.md","PULL_REQUEST_TEMPLATE.md","SECURITY.md","settings.yml","stale.yml"]};function zt(t,e){return t.startsWith("v")&&/^\d+\.\d+\.\d+(?<_>-.*)?$/u.test(t.slice(1))?`${te}/refs/tags/${t}/${e}`:t!==_e?`${te}/refs/heads/${t}/${e}`:`${te}/${_e}/${e}`}function qt(t,e,r){let n=zt(e,t);return{identifier:t.split("/").pop(),input:{remoteUrl:n,fileName:r,overwrite:!0},output:r,overwrite:!0}}async function Jt(t){try{await Gt.rm(t,{recursive:!0,force:!0}),k.log(`Successfully removed directory: ${t}`)}catch(e){throw k.error(`Error removing directory ${t}: ${e.message}`),e}}async function Ie(t){t||(k.error("Root directory must be specified"),process.exit(1));let e=".github";try{await Jt(e),k.log(".github directory removed successfully")}catch(o){k.error(`Error removing .github directory: ${o.message}`),process.exit(1)}await Ut.extractNames();let r=[],n=[];for(let o of Object.keys(Be))for(let i of Be[o]){let s=`${o==="_root"?"":`${o}/`}${i}`,l=`${t}/.github/${s}`,c=qt(`templates/${Ht}/.github/${s}`,Wt,l);r.push(c)}if(await Promise.all(r.map(async o=>{try{(await fetch(o.input.remoteUrl,{method:"HEAD"})).ok||n.push(o.input.remoteUrl)}catch{n.push(o.input.remoteUrl)}})),n.length>0){let o=n.map(i=>`File not found: ${i}`).join(`
29
+ `}static get(e,r){let n="",o=r;Array.isArray(r)&&([o,n]=r);let s=o.split("."),i=e;for(;i&&s.length;)i=i[s.shift()];return i==null||i===""?n:String(i)}static capitalize(e){return e[0].toUpperCase()+e.substring(1)}};async function O(){let t=he("Generating Custom Elements Manifest...").start();try{await h("npx --package=@custom-elements-manifest/analyzer -y -- cem analyze --litelement --globs src/*.*js scripts/wca/**/*.*js --packagejson --dependencies"),t.succeed("Custom Elements Manifest generated successfully!")}catch(e){let r=e instanceof Error?e.message:String(e);t.warn("CEM analyzer completed with warnings: "+r)}}async function _(){let t=he("Generating API documentation...").start();try{await x.generate(),t.succeed("API documentation generated successfully!")}catch(e){let r=e instanceof Error?e.message:String(e);throw t.fail("Failed to generate API documentation: "+r),e}}async function B(){await O(),await _()}import{Logger as it}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import{generateReadmeUrl as st,processContentForFile as at,templateFiller as ye}from"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs";var we={overwriteLocalCopies:!0,remoteReadmeVersion:"master",remoteReadmeVariant:"_updated_paths"};function $(t){return`${process.cwd()}/${t}`}var ct=t=>[{identifier:"README.md",input:{remoteUrl:t.remoteReadmeUrl||st(t.remoteReadmeVersion,t.remoteReadmeVariant),fileName:$("/docTemplates/README.md"),overwrite:t.overwriteLocalCopies},output:$("/README.md")},{identifier:"index.md",input:$("/docs/partials/index.md"),output:$("/demo/index.md"),mdMagicConfig:{output:{directory:$("/demo")}}},{identifier:"api.md",input:$("/docs/partials/api.md"),output:$("/demo/api.md"),preProcessors:[ye.formatApiTable]}];async function lt(t=we){await ye.extractNames();for(let e of ct(t))try{await at(e)}catch(r){it.error(`Error processing ${e.identifier}: ${r.message}`)}}async function be(){await lt({...we,remoteReadmeUrl:"https://raw.githubusercontent.com/AlaskaAirlines/auro-templates/main/templates/default/README.md"})}function ke(){let t=ut("./dist"),e=V("Cleaning dist folder...").start();try{return mt(t,{recursive:!0,force:!0}),e.succeed("All clean! Dist folder wiped."),!0}catch(r){return e.fail(`Oops! Couldn't clean dist/ folder: ${r.message}`),console.error(r),!1}}async function Y(t,e,r,n){let o=V(t).start();try{let s=await e();return o.succeed(r),s}catch(s){throw o.fail(n),console.error(`Error: ${s.message}`),s}}async function Ae(t,e){return Y("Creating type definitions...",async()=>{let r=await J(t);await r.write(e),await r.close()},"Types files built.","Darn! Type definitions failed.")}async function $e(t,e){return Y(`Bundling ${t.name||"main"} and ${e.name||"demo"}...`,async()=>{let r=await J(t);await r.write(t.output),await r.close();let n=await J(e);await n.write(e.output),await n.close()},`Bundles ready! ${t.name||"Main"} and ${e.name||"demo"} built.`,"Bundle hiccup! Build failed.")}async function I(t){let{wcaInput:e,wcaOutput:r,skipDocs:n}=t;if(n){let o=V("Skipping docs generation...").start();setTimeout(()=>{o.succeed("Docs generation skipped.")},0);return}return Y("Analyzing components and making docs...",async()=>{await B(e,r),await be()},"Docs ready! Looking good.","Doc troubles!")}import{basename as ft,join as xe}from"node:path";import{nodeResolve as gt}from"@rollup/plugin-node-resolve";import{glob as ht}from"glob";import{dts as yt}from"rollup-plugin-dts";import{litScss as wt}from"rollup-plugin-scss-lit";import dt from"node:path";import{glob as pt}from"glob";function Ee(t){return{name:"watch-globs",buildStart(){let e=Array.isArray(t)?t:[t];for(let r of e)try{for(let n of pt.sync(dt.resolve(r)))this.addWatchFile(n)}catch(n){this.error(`Error watching glob pattern "${r}": ${n.message}`)}}}}var K={moduleDirectories:["node_modules"],modulePaths:["../../node_modules","../node_modules","node_modules"],watchPatterns:["./apiExamples/**/*","./docs/**/*"]};function Ce(t=[],e={}){let{watchPatterns:r=K.watchPatterns,dedupe:n=["lit","lit-element","lit-html"]}=e,o=[...K.modulePaths,...t];return[gt({dedupe:n,preferBuiltins:!1,moduleDirectories:K.moduleDirectories}),wt({minify:{fast:!0},options:{loadPaths:[...o,xe(process.cwd(),"src","styles"),xe(process.cwd(),"src")]}}),Ee(r)]}function X(t={}){let{modulePaths:e=[],watch:r=!1,input:n=["./src/index.js","./src/registered.js"],outputDir:o="./dist",format:s="esm"}=t;return{name:"Main",config:{input:n,output:{format:s,dir:o,entryFileNames:"[name].js"},external:bt(),plugins:Ce(e),watch:Te(r)}}}function Q(t={}){let{modulePaths:e=[],watch:r=!1,globPattern:n="./demo/*.js",ignorePattern:o=["./demo/*.min.js"],outputDir:s="./demo"}=t;return{name:"Demo",config:{input:Object.fromEntries(ht.sync(n,{ignore:o}).map(i=>[ft(i,".js"),i])),output:{format:"esm",dir:s,entryFileNames:"[name].min.js",chunkFileNames:"[name].min.js"},plugins:Ce(e),watch:Te(r)}}}function C(t={}){let{input:e=["./dist/index.js"],outputDir:r="./dist"}=t;return{name:"DTS",config:{input:e,output:{format:"esm",dir:r,entryFileNames:"[name].d.ts"},plugins:[yt()]}}}function Te(t){if(!t)return!1;let e=typeof t=="object"?t:{};return{clearScreen:e.clearScreen??!0,buildDelay:e.buildDelay??500,chokidar:{ignoreInitial:!0,ignored:e.ignored??["**/dist/**/*.d.ts","**/custom-elements.json","**/demo/*.md","**/demo/**/*.min.js","**/docs/api.md","**/node_modules/**","**/.git/**"],awaitWriteFinish:e.awaitWriteFinish??{stabilityThreshold:1e3,pollInterval:100}},include:e.include??["./src/**/*.scss","./src/**/*.js","./src/**/*.ts","./demo/**/*.js","./apiExamples/**/*","./docs/**/*.md"],exclude:e.exclude??["./dist/**/*","./node_modules/**/*"]}}function bt(t=[]){return[...[/node_modules\/lit/,/node_modules\/lit-element/,/node_modules\/lit-html/,/node_modules\/@lit/],...t]}import{startDevServer as kt}from"@web/dev-server";import{hmrPlugin as At}from"@web/dev-server-hmr";import $t from"ora";var T={watch:!0,nodeResolve:!0,basePath:"/",rootDir:"./demo",hmrInclude:["src/**/*","demo/**/*","apiExamples/**/*","docs/**/*"]};async function ve(t={}){if(!t.serve)return;let e=$t(`Firing up dev server...
30
+ `).start();try{let r={port:Number(t.port)||void 0,open:t.open?"/":void 0,watch:t.watch??T.watch,nodeResolve:t.nodeResolve??T.nodeResolve,basePath:t.basePath??T.basePath,rootDir:t.rootDir??T.rootDir,middleware:[function(s,i){return!s.url.endsWith("/")&&!s.url.includes(".")&&(s.url+=".html"),i()}],plugins:[At({include:t.hmrInclude??T.hmrInclude})]},n=await kt({config:r,readCliArgs:!1,readFileConfig:!1});return e.stop(),n}catch(r){throw e.fail("Server snag! Couldn't start dev server."),console.error("Error starting development server:",r),new Error(`Development server failed to start: ${r.message}`)}}import De from"node:path";import b from"ora";import{rollup as Et}from"rollup";var N=!1,xt={dts:{active:!1,lastTime:0},analyze:{active:!1,lastTime:0},docs:{active:!1,lastTime:0}},Ct=5e3,F=new Set,Tt=["/dist/index.d.ts","/custom-elements.json","/demo/api.md","/docs/api.md","/demo/index.min.js"];function vt(t){if(!t||typeof t!="string")return!1;try{let e=De.normalize(t);return Tt.some(r=>e.endsWith(r))||e.includes("/dist/")||e.endsWith(".min.js")||e.endsWith(".d.ts")}catch(e){return console.error(`Error checking path (${typeof t}):`,e.message),!1}}async function Z(t,e){let r=xt[t];if(r.active||Date.now()-r.lastTime<Ct)return!1;try{return r.active=!0,r.lastTime=Date.now(),await e()}catch(n){return console.error(`Error in ${t} task:`,n),!1}finally{r.active=!1}}async function Pe(t,e,r){let n=!0,o={dts:!1,analyze:!1,docs:!1},s=null,i,l=b("Activating watch mode...").start(),c={dts:async()=>{let m=b("Crafting type definitions...").start();try{let d=await Et(C().config);return await d.write(C().config.output),await d.close(),m.succeed("Type files built."),!0}catch(d){return m.fail("Types trouble! Build failed."),console.error("TypeScript definition build error:",d),!1}},analyze:async()=>{let{wcaInput:m,wcaOutput:d,skipDocs:y}=e;if(y){let w=b("Skipping component analysis...").start();return setTimeout(()=>{w.succeed("Component analysis skipped.")},0),!0}let A=b("Detective work: analyzing components...").start();try{return await B(m,d),A.succeed("Component analysis complete! API generated."),!0}catch(w){return A.fail("Analysis hiccup! Something went wrong."),console.error("Component analysis error:",w),!1}},docs:async()=>{if(N)return!1;if(e.skipDocs){let d=b("Skipping docs generation...").start();return setTimeout(()=>{d.succeed("Docs generation skipped.")},0),!0}let m=b("Refreshing docs...").start();try{return await I(e),m.succeed("Documentation refreshed!"),!0}catch(d){m.fail("Docs stumble! Couldn't refresh."),console.error("Documentation rebuild error:",d)}}},a=()=>{n&&o.dts&&o.analyze&&o.docs&&typeof r=="function"&&(n=!1,r())};function u(m=1e3){s&&clearTimeout(s),s=setTimeout(async()=>{o.dts=await Z("dts",c.dts),setTimeout(async()=>{o.analyze=await Z("analyze",c.analyze),setTimeout(async()=>{o.docs=await Z("docs",c.docs),a()},1e3)},1e3)},m)}t.on("event",async m=>{switch(m.code){case"START":l.succeed("Watch mode active! Eyes peeled.");break;case"BUNDLE_START":if(F.clear(),m.input)try{let d=Array.isArray(m.input)?m.input:typeof m.input=="string"?[m.input]:typeof m.input=="object"&&m.input!==null?Object.values(m.input):[];for(let y of d)typeof y=="string"&&!vt(y)&&F.add(De.normalize(y))}catch(d){console.error("Error processing input paths:",d)}i=b("Weaving bundles...").start(),N=!0;break;case"BUNDLE_END":i&&i.succeed(`Bundle ${Array.isArray(m.input)?`of ${m.input.join("& ")} `:""}done in ${m.duration}ms! \u{1F680}`),N=!1,F.size>0&&u();break;case"END":break;case"ERROR":N=!1,i?i.fail(`Oops! Bundle hit a snag: ${m.error.message}`):b().fail(`Watch mode hiccup: ${m.error.message}`),F.clear();break}})}function Se(t){return process.on("SIGINT",()=>{let e=b("Wrapping up...").start();t.close(),e.succeed("All done! See you next time. \u2728"),process.exit(0)}),t}async function St(t){let e=X(t),r=Q(t),n=C();e.config.plugins.push(Dt()),await I(t),await $e(e.config,r.config),await Ae(n.config,n.config.output)}async function Rt(t){let{dev:e}=t,r=X({...t,watch:!0}),n=Q({...t,watch:!0}),o=Pt([r.config,n.config]);return Pe(o,t,e?async()=>ve(t):void 0),Se(o),o}async function M(t={}){try{let{watch:e}=t;return ke(),e?await Rt(t):await St(t)}catch(e){throw new Error(`Build failed: ${e.message}`)}}var v=jt.command("dev").description("Runs development server for auro components");v=R(v);v=ge(v);var Wn=v.action(async t=>{try{let e=Re("Initializing...");t.watch?(e.text="Waiting for changes...",e.spinner="bouncingBar",e.color="green"):e.text=t.docs===!1?"Building component (docs disabled)":"Building component",e.start(),t.watch||e.succeed("Build completed!"),await M({...t,dev:!0,watch:t.watch})}catch(e){Re().fail(`Build failed: ${e.message}`),console.error(e),process.exit(1)}});import{program as Ot}from"commander";import je from"ora";var ee=Ot.command("build").description("Builds auro components");ee=R(ee);var Yn=ee.action(async t=>{try{let e=je("Initializing...");t.watch?(e.text="Waiting for changes...",e.spinner="bouncingBar",e.color="green"):e.text=t.docs===!1?"Building component (docs disabled)":"Building component",e.start(),await M(t),t.watch||e.succeed("Build completed!")}catch(e){je().fail(`Build failed: ${e.message}`),console.error(e),process.exit(1)}});import{exec as _t}from"node:child_process";import Oe from"node:path";import Bt from"node:process";import{fileURLToPath as It}from"node:url";import Nt from"node:util";import{program as Ft}from"commander";import Mt from"inquirer";var io=Ft.command("migrate").description("Script runner to perform repetitive code change tasks").requiredOption("-i, --id <string>","Select the migration you would like to run by id").option("-m, --multi-gitter","Run the migration on all repositories in the multi-gitter config").action(async t=>{let e=It(import.meta.url),r=Oe.dirname(e),n=Oe.resolve(r,"migrations",t.id);if(t.multiGitter){let o=Nt.promisify(_t);try{await o("command -v multi-gitter")}catch{console.error("multi-gitter is not installed."),Bt.exit(1)}(await Mt.prompt([{type:"confirm",name:"dryRun",message:"Run migration in dry-run mode? (no changes will be committed)",default:!0}])).dryRun?h(`multi-gitter run ${n}/script.sh --config "${n}/multi-gitter.yml" --dry-run`):h(`multi-gitter run ${n}/script.sh --config "${n}/multi-gitter.yml"`)}else h(`${n}/script.sh`)});import Vt from"node:process";import{program as Yt}from"commander";import{readFile as Kt,writeFile as Xt}from"node:fs/promises";import{Logger as re}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import Gt from"node:fs/promises";import{Logger as k}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import{processContentForFile as Ut,templateFiller as Lt}from"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs";var te="https://raw.githubusercontent.com/AlaskaAirlines/auro-templates",_e="main",Wt="main",Ht="default",Be={ISSUE_TEMPLATE:["bug_report.yaml","config.yml","feature_request.yaml","general-support.yaml","group.yaml","story.yaml","task.yaml"],workflows:["codeql.yml","publishDemo.yml","testPublish.yml"],_root:["CODEOWNERS","CODE_OF_CONDUCT.md","CONTRIBUTING.md","PULL_REQUEST_TEMPLATE.md","SECURITY.md","settings.yml","stale.yml"]};function zt(t,e){return t.startsWith("v")&&/^\d+\.\d+\.\d+(?<_>-.*)?$/u.test(t.slice(1))?`${te}/refs/tags/${t}/${e}`:t!==_e?`${te}/refs/heads/${t}/${e}`:`${te}/${_e}/${e}`}function qt(t,e,r){let n=zt(e,t);return{identifier:t.split("/").pop(),input:{remoteUrl:n,fileName:r,overwrite:!0},output:r,overwrite:!0}}async function Jt(t){try{await Gt.rm(t,{recursive:!0,force:!0}),k.log(`Successfully removed directory: ${t}`)}catch(e){throw k.error(`Error removing directory ${t}: ${e.message}`),e}}async function Ie(t){t||(k.error("Root directory must be specified"),process.exit(1));let e=".github";try{await Jt(e),k.log(".github directory removed successfully")}catch(o){k.error(`Error removing .github directory: ${o.message}`),process.exit(1)}await Lt.extractNames();let r=[],n=[];for(let o of Object.keys(Be))for(let s of Be[o]){let i=`${o==="_root"?"":`${o}/`}${s}`,l=`${t}/.github/${i}`,c=qt(`templates/${Ht}/.github/${i}`,Wt,l);r.push(c)}if(await Promise.all(r.map(async o=>{try{(await fetch(o.input.remoteUrl,{method:"HEAD"})).ok||n.push(o.input.remoteUrl)}catch{n.push(o.input.remoteUrl)}})),n.length>0){let o=n.map(s=>`File not found: ${s}`).join(`
31
31
  `);k.error(`Failed to sync .github directory. Confirm githubDirShape object is up to date:
32
- ${o}`),process.exit(1)}try{await Promise.all(r.map(o=>Lt(o))),k.log("All files processed.")}catch(o){k.error(`Error processing files: ${o.message}`),process.exit(1)}}var ho=Yt.command("sync").description("Script runner to synchronize local repository configuration files").action(async()=>{re.info("Synchronizing repository configuration files..."),re.warn("Note: sync does not create a new git branch. Changes are added to the current branch.");let t=Vt.cwd();await Ie(t);let e=`${t}/.github/CODEOWNERS`,n=(await Kt(e,{encoding:"utf-8"})).replace(/\r\n/gu,`
32
+ ${o}`),process.exit(1)}try{await Promise.all(r.map(o=>Ut(o))),k.log("All files processed.")}catch(o){k.error(`Error processing files: ${o.message}`),process.exit(1)}}var yo=Yt.command("sync").description("Script runner to synchronize local repository configuration files").action(async()=>{re.info("Synchronizing repository configuration files..."),re.warn("Note: sync does not create a new git branch. Changes are added to the current branch.");let t=Vt.cwd();await Ie(t);let e=`${t}/.github/CODEOWNERS`,n=(await Kt(e,{encoding:"utf-8"})).replace(/\r\n/gu,`
33
33
  `).replace(/\n\n/gu,`
34
34
  `);await Xt(e,n,{encoding:"utf-8"}),(n.includes("\r")||n.includes(`
35
35
 
@@ -42,24 +42,24 @@ class ${n}WCA extends ${n} {}
42
42
  if (!customElements.get(${r})) {
43
43
  customElements.define(${r}, ${n}WCA);
44
44
  }
45
- `};var L=D.resolve(process.cwd(),"./scripts/wca");async function er(t){try{return(await Promise.all(t.map(r=>Zt(r)))).flat()}catch(e){throw console.error("Error processing glob patterns:",e),e}}async function tr(t){G.existsSync(L)||await G.promises.mkdir(L,{recursive:!0});for(let e of t){let r=D.resolve(process.cwd(),e),n=await G.promises.readFile(r,"utf-8"),o=D.resolve(L,`${D.basename(e)}`),i=Ne(n,D.relative(L,e));await G.promises.writeFile(o,i)}}async function rr(){let t=await er(["./src/auro-*.js"]);await tr(t)}var Co=Qt.command("wca-setup").description("Set up WCA (Web Component Analyzer) for the project").action(()=>{rr().then(()=>{Fe.success("WCA setup completed successfully.")}).catch(t=>{Fe.error(`WCA setup failed: ${t.message}`)})});import{program as mr}from"commander";import cr from"chalk";import Ue from"ora";import{appendFile as nr,readFile as or}from"node:fs/promises";import{Logger as p}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import{simpleGit as sr}from"simple-git";var g;try{g=sr({baseDir:process.cwd(),binary:"git",maxConcurrentProcesses:1})}catch(t){p.error(`Failed to initialize git: ${t}`),g={}}var U=class t{static async checkGitignore(e){if(e==="")return!1;try{return(await or(".gitignore","utf-8")).includes(e)}catch(r){return p.error(`Error reading file: ${r}`),!1}}static async getCommitMessages(){try{let e=await g.branchLocal();p.info(`Current branch: ${e.current}`);let r="main",n="";if(!!process.env.GITHUB_ACTIONS){p.info("Running in GitHub Actions environment"),r=process.env.GITHUB_BASE_REF||"main";try{await g.fetch("origin",r),p.info(`Fetched target branch: origin/${r}`),n=`${(await g.raw(["merge-base",`origin/${r}`,"HEAD"])).trim()}..HEAD`,p.info(`Using commit range: ${n}`)}catch(i){p.warn(`Error setting up commit range in CI: ${i}`),n=`origin/${r}..HEAD`,p.info(`Falling back to commit range: ${n}`)}}else{p.info("Running in local environment");try{try{await g.raw(["rev-parse","--verify",`origin/${r}`])}catch{p.info(`Fetching ${r} from origin`),await g.fetch("origin",r)}n=`${(await g.raw(["merge-base",`origin/${r}`,e.current])).trim()}..HEAD`,p.info(`Using commit range for PR commits: ${n}`)}catch(i){p.warn(`Error determining PR commits locally: ${i}`),p.info("Falling back to analyzing recent commits"),n="HEAD~10..HEAD",p.info(`Using fallback commit range: ${n}`)}}return await t.getFormattedCommits(n)}catch(e){return p.error(`Error getting commit messages: ${e}`),[]}}static async getFormattedCommits(e){let n=(await g.raw(["log","--pretty=format:COMMIT_START%n%H%n%ad%n%an%n%s%n%b%nCOMMIT_END","--date=short",e])).split(`COMMIT_START
46
- `).filter(i=>i.trim()!==""),o=[];for(let i of n){let s=i.split(`
47
- `);if(s.length>=4){let l=s[0],c=s[1],a=s[2],u=s[3],m=s.slice(4).filter(z=>z!=="COMMIT_END"),d=m.length>0?m.join(""):"",y=l.substring(0,7),A=u.match(/^(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\(.+\))?:/),w=A?A[1]:"unknown";d.includes("BREAKING CHANGE")&&(w="breaking"),o.push({type:w,hash:y,date:c,subject:u,body:d,message:`${u}${d?`
45
+ `};var U=D.resolve(process.cwd(),"./scripts/wca");async function er(t){try{return(await Promise.all(t.map(r=>Zt(r)))).flat()}catch(e){throw console.error("Error processing glob patterns:",e),e}}async function tr(t){G.existsSync(U)||await G.promises.mkdir(U,{recursive:!0});for(let e of t){let r=D.resolve(process.cwd(),e),n=await G.promises.readFile(r,"utf-8"),o=D.resolve(U,`${D.basename(e)}`),s=Ne(n,D.relative(U,e));await G.promises.writeFile(o,s)}}async function rr(){let t=await er(["./src/auro-*.js"]);await tr(t)}var To=Qt.command("wca-setup").description("Set up WCA (Web Component Analyzer) for the project").action(()=>{rr().then(()=>{Fe.success("WCA setup completed successfully.")}).catch(t=>{Fe.error(`WCA setup failed: ${t.message}`)})});import{program as mr}from"commander";import cr from"chalk";import Le from"ora";import{appendFile as nr,readFile as or}from"node:fs/promises";import{Logger as p}from"@aurodesignsystem/auro-library/scripts/utils/logger.mjs";import{simpleGit as ir}from"simple-git";var g;try{g=ir({baseDir:process.cwd(),binary:"git",maxConcurrentProcesses:1})}catch(t){p.error(`Failed to initialize git: ${t}`),g={}}var L=class t{static async checkGitignore(e){if(e==="")return!1;try{return(await or(".gitignore","utf-8")).includes(e)}catch(r){return p.error(`Error reading file: ${r}`),!1}}static async getCommitMessages(){try{let e=await g.branchLocal();p.info(`Current branch: ${e.current}`);let r="main",n="";if(!!process.env.GITHUB_ACTIONS){p.info("Running in GitHub Actions environment"),r=process.env.GITHUB_BASE_REF||"main";try{await g.fetch("origin",r),p.info(`Fetched target branch: origin/${r}`),n=`${(await g.raw(["merge-base",`origin/${r}`,"HEAD"])).trim()}..HEAD`,p.info(`Using commit range: ${n}`)}catch(s){p.warn(`Error setting up commit range in CI: ${s}`),n=`origin/${r}..HEAD`,p.info(`Falling back to commit range: ${n}`)}}else{p.info("Running in local environment");try{try{await g.raw(["rev-parse","--verify",`origin/${r}`])}catch{p.info(`Fetching ${r} from origin`),await g.fetch("origin",r)}n=`${(await g.raw(["merge-base",`origin/${r}`,e.current])).trim()}..HEAD`,p.info(`Using commit range for PR commits: ${n}`)}catch(s){p.warn(`Error determining PR commits locally: ${s}`),p.info("Falling back to analyzing recent commits"),n="HEAD~10..HEAD",p.info(`Using fallback commit range: ${n}`)}}return await t.getFormattedCommits(n)}catch(e){return p.error(`Error getting commit messages: ${e}`),[]}}static async getFormattedCommits(e){let n=(await g.raw(["log","--pretty=format:COMMIT_START%n%H%n%ad%n%an%n%s%n%b%nCOMMIT_END","--date=short",e])).split(`COMMIT_START
46
+ `).filter(s=>s.trim()!==""),o=[];for(let s of n){let i=s.split(`
47
+ `);if(i.length>=4){let l=i[0],c=i[1],a=i[2],u=i[3],m=i.slice(4).filter(z=>z!=="COMMIT_END"),d=m.length>0?m.join(""):"",y=l.substring(0,7),A=u.match(/^(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\(.+\))?:/),w=A?A[1]:"unknown";d.includes("BREAKING CHANGE")&&(w="breaking"),o.push({type:w,hash:y,date:c,subject:u,body:d,message:`${u}${d?`
48
48
 
49
49
  ${d}`:""}`,author_name:a})}}return o}static async addToGitignore(e,r=!0){await t.checkGitignore(e).then(async n=>{if(n)p.warn(`${e} already exists`);else try{await nr(".gitignore",`
50
- ${e}`),r&&p.success(`${e} added to .gitignore`)}catch(o){p.error(o)}})}static async removeFromGitCache(e){try{await g.rmKeepLocal(e),p.success(`${e.join(", ")} are removed from git cache`)}catch(r){p.error(r)}}static async createBranch(e){try{await g.checkoutLocalBranch(e),p.success(`Created and switched to ${e} branch`)}catch(r){p.error(r)}}static async commitStagedFiles(e){try{await g.add("."),await g.commit(e),p.success(`Committed with message: ${e}`)}catch(r){p.error(r)}}};import f from"chalk";var ir=60,ar=100;function P(t){switch(t){case"breaking":return f.bold.red(t);case"feat":return f.bold.green(t);case"fix":return f.bold.green(t);case"perf":return f.bold.green(t);case"docs":return f.bold.cyan(t);case"style":return f.bold.cyan(t);case"refactor":return f.bold.cyan(t);case"test":return f.bold.cyan(t);case"build":return f.bold.cyan(t);case"ci":return f.bold.cyan(t);case"chore":return f.bold.cyan(t);default:return f.bold.white(t)}}function Me(t,e){if(!t)return"";if(t.length<=e)return t;let r=t.split(" "),n="",o="";for(let i of r)(o+i).length>e&&o.length>0&&(n+=`${o.trim()}
51
- `,o=""),o=`${o}${i} `;return o.length>0&&(n+=o.trim()),n}function Ge(t){for(let e of t){console.log("\u2500".repeat(60));let r=Me(e.subject,ir),n=Me(e.body,ar);console.log(f.bold(`${P(e.type)}`)),console.log(f.dim(`${e.hash} | ${e.date} | ${e.author_name}`)),console.log(f.bold(`${f.white(r)}`)),e.body&&console.log(f.dim(n))}console.log("\u2500".repeat(60)),console.log(`
52
- `)}import W from"@actions/github";async function ne(){try{let t=process.env.GITHUB_TOKEN;if(!t)throw new Error("GITHUB_TOKEN environment variable is not set");if(!process.env.GITHUB_REPOSITORY||!process.env.GITHUB_EVENT_PATH)throw new Error("This function can only be used in a GitHub Actions environment");let e=W.getOctokit(t),{context:r}=W;if(!r.payload.pull_request)throw new Error("No pull request found in the GitHub context");let[n,o]=process.env.GITHUB_REPOSITORY.split("/"),i=r.payload.pull_request.number,{data:s}=await e.rest.issues.listLabelsOnIssue({owner:n,repo:o,issue_number:i});return s.map(l=>l.name)}catch(t){throw t instanceof Error?new Error(`Failed to get existing labels: ${t.message}`):t}}async function Le(t){try{let e=process.env.GITHUB_TOKEN;if(!e)throw new Error("GITHUB_TOKEN environment variable is not set");if(!process.env.GITHUB_REPOSITORY||!process.env.GITHUB_EVENT_PATH)throw new Error("This function can only be used in a GitHub Actions environment");let r=W.getOctokit(e),{context:n}=W;if(!n.payload.pull_request)throw new Error("No pull request found in the GitHub context");let[o,i]=process.env.GITHUB_REPOSITORY.split("/"),s=n.payload.pull_request.number,l=`semantic-status: ${t}`,c=await ne();if(c.includes(l))return;let a=c.filter(u=>u.startsWith("semantic-status:")&&u!==l);for(let u of a)await r.rest.issues.removeLabel({owner:o,repo:i,issue_number:s,name:u});await r.rest.issues.addLabels({owner:o,repo:i,issue_number:s,labels:[l]});return}catch(e){throw e instanceof Error?new Error(`Failed to apply label: ${e.message}`):e}}async function We(t=!1,e=!1){let r=Ue(`Checking commits...
53
- `).start();try{let n=await U.getCommitMessages();if(t&&Ge(n),r.succeed(`Total commits analyzed: ${n.length}`),n.length!==0){let o=n.map(l=>l.type),s=Array.from(new Set(o)).map(l=>P(l)).join(", ");r.succeed(`Found commit types: ${s}`)}else r.info(`The list of commits is created by comparing the current branch
50
+ ${e}`),r&&p.success(`${e} added to .gitignore`)}catch(o){p.error(o)}})}static async removeFromGitCache(e){try{await g.rmKeepLocal(e),p.success(`${e.join(", ")} are removed from git cache`)}catch(r){p.error(r)}}static async createBranch(e){try{await g.checkoutLocalBranch(e),p.success(`Created and switched to ${e} branch`)}catch(r){p.error(r)}}static async commitStagedFiles(e){try{await g.add("."),await g.commit(e),p.success(`Committed with message: ${e}`)}catch(r){p.error(r)}}};import f from"chalk";var sr=60,ar=100;function P(t){switch(t){case"breaking":return f.bold.red(t);case"feat":return f.bold.green(t);case"fix":return f.bold.green(t);case"perf":return f.bold.green(t);case"docs":return f.bold.cyan(t);case"style":return f.bold.cyan(t);case"refactor":return f.bold.cyan(t);case"test":return f.bold.cyan(t);case"build":return f.bold.cyan(t);case"ci":return f.bold.cyan(t);case"chore":return f.bold.cyan(t);default:return f.bold.white(t)}}function Me(t,e){if(!t)return"";if(t.length<=e)return t;let r=t.split(" "),n="",o="";for(let s of r)(o+s).length>e&&o.length>0&&(n+=`${o.trim()}
51
+ `,o=""),o=`${o}${s} `;return o.length>0&&(n+=o.trim()),n}function Ge(t){for(let e of t){console.log("\u2500".repeat(60));let r=Me(e.subject,sr),n=Me(e.body,ar);console.log(f.bold(`${P(e.type)}`)),console.log(f.dim(`${e.hash} | ${e.date} | ${e.author_name}`)),console.log(f.bold(`${f.white(r)}`)),e.body&&console.log(f.dim(n))}console.log("\u2500".repeat(60)),console.log(`
52
+ `)}import W from"@actions/github";async function ne(){try{let t=process.env.GITHUB_TOKEN;if(!t)throw new Error("GITHUB_TOKEN environment variable is not set");if(!process.env.GITHUB_REPOSITORY||!process.env.GITHUB_EVENT_PATH)throw new Error("This function can only be used in a GitHub Actions environment");let e=W.getOctokit(t),{context:r}=W;if(!r.payload.pull_request)throw new Error("No pull request found in the GitHub context");let[n,o]=process.env.GITHUB_REPOSITORY.split("/"),s=r.payload.pull_request.number,{data:i}=await e.rest.issues.listLabelsOnIssue({owner:n,repo:o,issue_number:s});return i.map(l=>l.name)}catch(t){throw t instanceof Error?new Error(`Failed to get existing labels: ${t.message}`):t}}async function Ue(t){try{let e=process.env.GITHUB_TOKEN;if(!e)throw new Error("GITHUB_TOKEN environment variable is not set");if(!process.env.GITHUB_REPOSITORY||!process.env.GITHUB_EVENT_PATH)throw new Error("This function can only be used in a GitHub Actions environment");let r=W.getOctokit(e),{context:n}=W;if(!n.payload.pull_request)throw new Error("No pull request found in the GitHub context");let[o,s]=process.env.GITHUB_REPOSITORY.split("/"),i=n.payload.pull_request.number,l=`semantic-status: ${t}`,c=await ne();if(c.includes(l))return;let a=c.filter(u=>u.startsWith("semantic-status:")&&u!==l);for(let u of a)await r.rest.issues.removeLabel({owner:o,repo:s,issue_number:i,name:u});await r.rest.issues.addLabels({owner:o,repo:s,issue_number:i,labels:[l]});return}catch(e){throw e instanceof Error?new Error(`Failed to apply label: ${e.message}`):e}}async function We(t=!1,e=!1){let r=Le(`Checking commits...
53
+ `).start();try{let n=await L.getCommitMessages();if(t&&Ge(n),r.succeed(`Total commits analyzed: ${n.length}`),n.length!==0){let o=n.map(l=>l.type),i=Array.from(new Set(o)).map(l=>P(l)).join(", ");r.succeed(`Found commit types: ${i}`)}else r.info(`The list of commits is created by comparing the current branch
54
54
  with the main branch. If you are on a new branch, please
55
- make sure to commit some changes before running this command.`);e&&await lr(n,r)}catch(n){r.fail("Error getting commit messages"),console.error(n)}}async function lr(t,e){let r=["breaking","feat","fix","perf","docs","style","refactor","test","build","ci","chore"],n=t.map(s=>s.type).filter(s=>r.includes(s)),o=null,i=Number.POSITIVE_INFINITY;for(let s of n){let l=r.indexOf(s);l<i&&(i=l,o=s)}if(o){let s=Ue("Checking existing labels on pull request...").start();try{if((await ne()).includes(`semantic-status: ${o}`)){s.info(`Label "semantic-status: ${P(o)}" already exists on the pull request.`);return}s.text="Applying label to pull request...",await Le(o),s.succeed(`Label "semantic-status: ${P(o)}" applied to the pull request.`)}catch(l){let c=l instanceof Error?l.message:String(l);s.fail(c)}}else e.warn(cr.yellow("No semantic commit type found to apply as label."))}var Wo=mr.command("check-commits").alias("cc").option("-l, --set-label","Set label on the pull request based on the commit message type").option("-d, --debug","Display detailed commit information for debugging").description("Check commits in the local repository for the types of semantic commit messages made and return the results.").action(async t=>{await We(t.debug,t.setLabel)});import He from"node:fs";import{get as ze}from"node:https";import E from"chalk";import{program as ur}from"commander";import dr from"ora";var Ko=ur.command("pr-release").option("-n, --namespace <package-namespace>","Set namespace of the package release","@aurodesignsystem-dev").option("-p, --pr-number <number>","Set pull request number for the release","0").description("Generate the package version based off of PR number then update the package.json file. Note: this does not publish the package.").action(async t=>{await pr(t)}),pr=async t=>{let{namespace:e,prNumber:r}=t,n=dr("Updating package.json").start();try{let o="package.json",i=JSON.parse(He.readFileSync(o,"utf8"));n.text="Checking npm registry for version information...";let s=`0.0.0-pr${r}`,l=i.name.split("/")[1],c=`${e}/${l}`,a=await fr(s,c,n),u=`${s}.${a}`;i.name=c,i.version=u,n.text="Writing updated package.json...",He.writeFileSync(o,`${JSON.stringify(i,null,2)}
56
- `,"utf8"),n.succeed(`Package.json updated to use ${E.green(u)} and ${E.green(c)}`),process.exit(0)}catch(o){n.fail(`Failed to update package.json: ${o}`),process.exit(1)}},fr=(t,e,r)=>new Promise(n=>{try{let l=function(c){if(c.statusCode!==200){r.info(`Package not found. Status code: ${E.red(c.statusCode)}, defaulting to version 0`),n(0);return}r.text="Processing version information...";let a="";c.on("data",u=>{a+=u}),c.on("end",()=>{try{let u=JSON.parse(a),m=u.versions?Object.keys(u.versions):[];r.text="Calculating next version number...";let d=-1,y=new RegExp(`^${t}\\.(\\d+)$`);for(let A of m){let w=A.match(y);if(w){let z=Number.parseInt(w[1],10);d=Math.max(d,z)}}d>=0?r.info(`Found existing version ${E.green(`${t}.${d}`)}. Incrementing to ${E.green(`${t}.${d+1}`)}`):r.info(`No existing version found for ${E.green(t)}. Starting with ${E.green(`${t}.0`)}`),n(d+1)}catch(u){r.warn(`Failed to parse NPM registry response: ${u instanceof Error?u.message:"Unknown error"}, defaulting to version 0`),n(0)}})};var o=l;let i=`https://registry.npmjs.org/${e}`,s=ze(i,{headers:{Accept:"application/json"}},c=>{if((c.statusCode===301||c.statusCode===302)&&c.headers.location){r.info(`Following redirect to ${c.headers.location}...`);try{ze(c.headers.location,{headers:{Accept:"application/json"}},l).on("error",a=>{r.warn(`Error following redirect: ${a.message}, defaulting to version 0`),n(0)}).end()}catch(a){r.warn(`Redirect request failed: ${a instanceof Error?a.message:"Unknown error"}, defaulting to version 0`),n(0)}return}l(c)});s.on("error",c=>{r.warn(`Request error: ${c.message}, defaulting to version 0`),n(0)}),s.end()}catch{r.warn("Error checking version in npm registry, defaulting to version 0"),n(0)}});import oe from"node:path";import{fileURLToPath as gr}from"node:url";import{program as hr}from"commander";import yr from"open";var wr=gr(import.meta.url),br=oe.resolve(oe.dirname(wr),".."),ns=hr.command("test").option("-w, --watch","Set watch number for the test").option("-c, --coverage-report","Generate coverage report").option("-o, --open","Open the coverage report in the browser").option("-f, --files <String|String[]>","Test files glob pattern").description("Run the web test runner to test the component library").action(async t=>{let r=`npx wtr --config "${oe.join(br,"dist","configs","web-test-runner.config.mjs")}"`,n=`${process.cwd()}/coverage/index.html`;if(t.coverageReport&&(r+=" --coverage"),t.watch&&(r+=" --watch"),t.files){let o=Array.isArray(t.files)?t.files.join(" "):t.files;r+=` --files "${o}"`}h(r),t.open&&await yr(n)});import le from"node:fs/promises";import Xe from"node:path";import{program as Dr}from"commander";import Pr from"inquirer";import me from"ora";import kr from"node:fs/promises";import Ar from"node:path";import $r from"ora";var Er={"auth-type":"workspace-token","author-email":null,"author-name":null,"base-branch":"main","base-url":null,"clone-dir":".gitter-temp","code-search":null,concurrent:4,"conflict-strategy":"replace",draft:!1,"dry-run":!0,"fetch-depth":1,fork:!1,"fork-owner":null,"git-type":"go",group:null,"include-subgroups":!1,insecure:!1,interactive:!1,labels:null,"log-file":"'-'","log-format":"'text'","log-level":"'error'","max-reviewers":0,"max-team-reviewers":0,org:null,output:"'-'","plain-output":!1,platform:"github",project:null,"push-only":!1,repo:["AlaskaAirlines/auro-accordion","AlaskaAirlines/auro-alert","AlaskaAirlines/auro-avatar","AlaskaAirlines/auro-background","AlaskaAirlines/auro-backtotop","AlaskaAirlines/auro-button","AlaskaAirlines/auro-badge","AlaskaAirlines/auro-banner","AlaskaAirlines/auro-card","AlaskaAirlines/auro-carousel","AlaskaAirlines/auro-datetime","AlaskaAirlines/auro-dialog","AlaskaAirlines/auro-drawer","AlaskaAirlines/auro-flight","AlaskaAirlines/auro-flightline","AlaskaAirlines/auro-header","AlaskaAirlines/auro-hyperlink","AlaskaAirlines/auro-icon","AlaskaAirlines/auro-loader","AlaskaAirlines/auro-lockup","AlaskaAirlines/auro-nav","AlaskaAirlines/auro-pane","AlaskaAirlines/auro-popover","AlaskaAirlines/auro-sidenav","AlaskaAirlines/auro-skeleton","AlaskaAirlines/auro-slideshow","AlaskaAirlines/auro-table","AlaskaAirlines/auro-tabs","AlaskaAirlines/auro-toast"],"repo-exclude":null,"repo-include":null,"repo-search":null,reviewers:null,"skip-forks":!1,"skip-pr":!1,"skip-repo":null,"ssh-auth":!1,"team-reviewers":null};function xr(t){return Object.entries(t).map(([e,r])=>Array.isArray(r)?`${e}:
55
+ make sure to commit some changes before running this command.`);e&&await lr(n,r)}catch(n){r.fail("Error getting commit messages"),console.error(n)}}async function lr(t,e){let r=["breaking","feat","fix","perf","docs","style","refactor","test","build","ci","chore"],n=t.map(i=>i.type).filter(i=>r.includes(i)),o=null,s=Number.POSITIVE_INFINITY;for(let i of n){let l=r.indexOf(i);l<s&&(s=l,o=i)}if(o){let i=Le("Checking existing labels on pull request...").start();try{if((await ne()).includes(`semantic-status: ${o}`)){i.info(`Label "semantic-status: ${P(o)}" already exists on the pull request.`);return}i.text="Applying label to pull request...",await Ue(o),i.succeed(`Label "semantic-status: ${P(o)}" applied to the pull request.`)}catch(l){let c=l instanceof Error?l.message:String(l);i.fail(c)}}else e.warn(cr.yellow("No semantic commit type found to apply as label."))}var Ho=mr.command("check-commits").alias("cc").option("-l, --set-label","Set label on the pull request based on the commit message type").option("-d, --debug","Display detailed commit information for debugging").description("Check commits in the local repository for the types of semantic commit messages made and return the results.").action(async t=>{await We(t.debug,t.setLabel)});import He from"node:fs";import{get as ze}from"node:https";import E from"chalk";import{program as ur}from"commander";import dr from"ora";var Xo=ur.command("pr-release").option("-n, --namespace <package-namespace>","Set namespace of the package release","@aurodesignsystem-dev").option("-p, --pr-number <number>","Set pull request number for the release","0").description("Generate the package version based off of PR number then update the package.json file. Note: this does not publish the package.").action(async t=>{await pr(t)}),pr=async t=>{let{namespace:e,prNumber:r}=t,n=dr("Updating package.json").start();try{let o="package.json",s=JSON.parse(He.readFileSync(o,"utf8"));n.text="Checking npm registry for version information...";let i=`0.0.0-pr${r}`,l=s.name.split("/")[1],c=`${e}/${l}`,a=await fr(i,c,n),u=`${i}.${a}`;s.name=c,s.version=u,n.text="Writing updated package.json...",He.writeFileSync(o,`${JSON.stringify(s,null,2)}
56
+ `,"utf8"),n.succeed(`Package.json updated to use ${E.green(u)} and ${E.green(c)}`),process.exit(0)}catch(o){n.fail(`Failed to update package.json: ${o}`),process.exit(1)}},fr=(t,e,r)=>new Promise(n=>{try{let l=function(c){if(c.statusCode!==200){r.info(`Package not found. Status code: ${E.red(c.statusCode)}, defaulting to version 0`),n(0);return}r.text="Processing version information...";let a="";c.on("data",u=>{a+=u}),c.on("end",()=>{try{let u=JSON.parse(a),m=u.versions?Object.keys(u.versions):[];r.text="Calculating next version number...";let d=-1,y=new RegExp(`^${t}\\.(\\d+)$`);for(let A of m){let w=A.match(y);if(w){let z=Number.parseInt(w[1],10);d=Math.max(d,z)}}d>=0?r.info(`Found existing version ${E.green(`${t}.${d}`)}. Incrementing to ${E.green(`${t}.${d+1}`)}`):r.info(`No existing version found for ${E.green(t)}. Starting with ${E.green(`${t}.0`)}`),n(d+1)}catch(u){r.warn(`Failed to parse NPM registry response: ${u instanceof Error?u.message:"Unknown error"}, defaulting to version 0`),n(0)}})};var o=l;let s=`https://registry.npmjs.org/${e}`,i=ze(s,{headers:{Accept:"application/json"}},c=>{if((c.statusCode===301||c.statusCode===302)&&c.headers.location){r.info(`Following redirect to ${c.headers.location}...`);try{ze(c.headers.location,{headers:{Accept:"application/json"}},l).on("error",a=>{r.warn(`Error following redirect: ${a.message}, defaulting to version 0`),n(0)}).end()}catch(a){r.warn(`Redirect request failed: ${a instanceof Error?a.message:"Unknown error"}, defaulting to version 0`),n(0)}return}l(c)});i.on("error",c=>{r.warn(`Request error: ${c.message}, defaulting to version 0`),n(0)}),i.end()}catch{r.warn("Error checking version in npm registry, defaulting to version 0"),n(0)}});import oe from"node:path";import{fileURLToPath as gr}from"node:url";import{program as hr}from"commander";import yr from"open";var wr=gr(import.meta.url),br=oe.resolve(oe.dirname(wr),".."),oi=hr.command("test").option("-w, --watch","Set watch number for the test").option("-c, --coverage-report","Generate coverage report").option("-o, --open","Open the coverage report in the browser").option("-f, --files <String|String[]>","Test files glob pattern").description("Run the web test runner to test the component library").action(async t=>{let r=`npx wtr --config "${oe.join(br,"dist","configs","web-test-runner.config.mjs")}"`,n=`${process.cwd()}/coverage/index.html`;if(t.coverageReport&&(r+=" --coverage"),t.watch&&(r+=" --watch"),t.files){let o=Array.isArray(t.files)?t.files.join(" "):t.files;r+=` --files "${o}"`}h(r),t.open&&await yr(n)});import le from"node:fs/promises";import Xe from"node:path";import{program as Pr}from"commander";import Sr from"inquirer";import me from"ora";import kr from"node:fs/promises";import Ar from"node:path";import $r from"ora";var Er={"auth-type":"workspace-token","author-email":null,"author-name":null,"base-branch":"main","base-url":null,"clone-dir":".gitter-temp","code-search":null,concurrent:4,"conflict-strategy":"replace",draft:!1,"dry-run":!0,"fetch-depth":1,fork:!1,"fork-owner":null,"git-type":"go",group:null,"include-subgroups":!1,insecure:!1,interactive:!1,labels:null,"log-file":"'-'","log-format":"'text'","log-level":"'error'","max-reviewers":0,"max-team-reviewers":0,org:null,output:"'-'","plain-output":!1,platform:"github",project:null,"push-only":!1,repo:["AlaskaAirlines/auro-accordion","AlaskaAirlines/auro-alert","AlaskaAirlines/auro-avatar","AlaskaAirlines/auro-background","AlaskaAirlines/auro-backtotop","AlaskaAirlines/auro-button","AlaskaAirlines/auro-badge","AlaskaAirlines/auro-banner","AlaskaAirlines/auro-card","AlaskaAirlines/auro-carousel","AlaskaAirlines/auro-datetime","AlaskaAirlines/auro-dialog","AlaskaAirlines/auro-drawer","AlaskaAirlines/auro-flight","AlaskaAirlines/auro-flightline","AlaskaAirlines/auro-header","AlaskaAirlines/auro-hyperlink","AlaskaAirlines/auro-icon","AlaskaAirlines/auro-loader","AlaskaAirlines/auro-lockup","AlaskaAirlines/auro-nav","AlaskaAirlines/auro-pane","AlaskaAirlines/auro-popover","AlaskaAirlines/auro-sidenav","AlaskaAirlines/auro-skeleton","AlaskaAirlines/auro-slideshow","AlaskaAirlines/auro-table","AlaskaAirlines/auro-tabs","AlaskaAirlines/auro-toast"],"repo-exclude":null,"repo-include":null,"repo-search":null,reviewers:null,"skip-forks":!1,"skip-pr":!1,"skip-repo":null,"ssh-auth":!1,"team-reviewers":null};function xr(t){return Object.entries(t).map(([e,r])=>Array.isArray(r)?`${e}:
57
57
  - ${r.join(`
58
58
  - `)}`:typeof r=="object"&&r!==null?`${e}:
59
59
  ${Object.entries(r).map(([n,o])=>` ${n}: ${o}`).join(`
60
60
  `)}`:`${e}: ${r}`).join(`
61
- `)}async function qe(t){let e=$r("Writing multi-gitter configuration...").start(),r=xr(Er),n=Ar.join(t,"multi-gitter_DEPENDENCY_TREE.yml");try{await kr.writeFile(n,r,"utf8"),e.succeed(`Multi-gitter configuration written to ${n}`)}catch(o){e.fail("Error writing multi-gitter configuration:"),console.error(o)}}import se from"node:fs";function Je(t){let e={},r=[],n=[],o=[];for(let i in t)e[i]=t[i].dependsOn.length;for(let i in e)e[i]===0&&o.push(i);for(;o.length>0;){n=[];let i=o.length;for(let s=0;s<i;s++){let l=o.shift();n.push(l);for(let c of t[l].dependentPackages)e[c]--,e[c]===0&&o.push(c)}r.push(n)}if(r.flat().length!==Object.keys(t).length)throw new Error("Circular dependency detected!");return r}function Cr(t){return se.readdirSync(t).filter(e=>e.endsWith(".json"))}async function Ve(t,e=[]){console.log(e);let r={},n=Cr(t);for(let o of n){if(o==="dependencyTree.json")continue;let i=se.readFileSync(`${t}/${o}`,"utf-8"),s=JSON.parse(i),l=s.name,c=Object.keys(s.peerDependencies),a=Object.keys(s.devDependencies),u=Object.keys(s.dependencies);r[l]||(r[l]={dependsOn:[],dependentPackages:[]});let m=[...c,...a,...u];r[l].dependsOn=[...new Set(m)];for(let d of m)r[d]||(r[d]={dependsOn:[],dependentPackages:[]}),r[d].dependentPackages.includes(l)||r[d].dependentPackages.push(l)}if(e.length){let o=new Set;for(let[s,l]of Object.entries(r))l.dependsOn.some(c=>e.includes(c))&&o.add(s);for(let s of e)r[s]&&o.add(s);let i={};for(let s of o)i[s]={dependsOn:r[s].dependsOn.filter(l=>o.has(l)),dependentPackages:r[s].dependentPackages.filter(l=>o.has(l))};r=i}else console.log("No target dependencies provided - using all packages.");return se.writeFileSync(`${t}/dependencyTree.json`,JSON.stringify(r,null,2)),r}import H from"node:path";import Ye from"node:process";import{fileURLToPath as Tr}from"node:url";function vr(){let t=Ye.env.HOME||Ye.env.USERPROFILE;return H.join(t,".auro")}function ie(...t){return H.join(vr(),...t)}function Ke(...t){let e=Tr(import.meta.url),r=H.dirname(e);return H.resolve(r,...t)}var ae=ie("run-migrations","config"),ce=ie("run-migrations","outputs");var Qe=["@aurodesignsystem/auro-accordion","@aurodesignsystem/auro-alert","@aurodesignsystem/auro-avatar","@aurodesignsystem/auro-background","@aurodesignsystem/auro-backtotop","@aurodesignsystem/auro-button","@aurodesignsystem/auro-badge","@aurodesignsystem/auro-banner","@aurodesignsystem/auro-card","@aurodesignsystem/auro-carousel","@aurodesignsystem/auro-datetime","@aurodesignsystem/auro-dialog","@aurodesignsystem/auro-drawer","@aurodesignsystem/auro-formkit","@aurodesignsystem/auro-flight","@aurodesignsystem/auro-flightline","@aurodesignsystem/auro-header","@aurodesignsystem/auro-hyperlink","@aurodesignsystem/auro-icon","@aurodesignsystem/auro-loader","@aurodesignsystem/auro-lockup","@aurodesignsystem/auro-nav","@aurodesignsystem/auro-pane","@aurodesignsystem/auro-popover","@aurodesignsystem/auro-sidenav","@aurodesignsystem/auro-skeleton","@aurodesignsystem/auro-slideshow","@aurodesignsystem/auro-table","@aurodesignsystem/auro-tabs","@aurodesignsystem/auro-toast"],xs=[...Qe,"@aurodesignsystem/auro-library","@aurodesignsystem/WebCoreStyleSheets","@aurodesignsystem/AuroDesignTokens","@aurodesignsystem/auro-cli","@alaskaairux/icons"];async function Sr(t){try{await le.mkdir(ce,{recursive:!0}),await le.mkdir(ae,{recursive:!0})}catch(o){console.error("Failed to create output or config directories:",o),process.exit(1)}let e=me("Creating dependency tree...").start();e.text="Creating multi-gitter dependency tree configuration...",await qe(ae),e.text="Scraping dependencies from Auro packages...";let n=`multi-gitter run "node ${Ke("static","getAuroDeps.js")}" --config ${Xe.join(ae,"multi-gitter_DEPENDENCY_TREE.yml")}`;try{await h(n)}catch(o){e.fail("Failed to generate dependency tree:"),console.error(o),process.exit(1)}return e.text="Generating dependency tree JSON file using packages...",await Ve(ce,t),e.succeed("Dependency tree generated successfully."),Xe.join(ce,"dependencyTree.json")}var Rr=async t=>{let e=me("Loading dependency tree...").start(),r=JSON.parse(await le.readFile(t,"utf-8"));e.text="Processing dependency tree...";let n=Je(r);return e.succeed("Dependency batches created successfully."),n},Cs=Dr.command("agent").action(async t=>{let e=await Pr.prompt([{type:"select",name:"agentAction",message:"What agent action do you want to perform?",choices:[{name:"Run a migration on auro components",value:"run-migration"}],default:["run-migration"]},{type:"input",name:"migrationId",message:"What migration id do you want to run?",when:r=>r.agentAction==="run-migration",validate:r=>r.trim()!==""||"Migration id cannot be empty."},{type:"confirm",name:"useExisting",message:"Would you like to specify starting packages?",default:!0,transformer:r=>r?"Yes = Packages related to selections":"No = All packages",when:r=>r.agentAction==="run-migration"},{type:"checkbox",name:"startWithComponents",message:"Enter the components to start with (comma-separated, blank for all):",choices:Qe.map(r=>({name:r.replace("@aurodesignsystem/",""),value:r})),when:r=>r.agentAction==="run-migration"&&r.useExisting}]);switch(e.agentAction){case"run-migration":{let r=me("Running migration...").start(),n=await Sr(e.startWithComponents);r.text="Getting dependency batches from tree...";let i=(await Rr(n)).map((s,l)=>`Batch ${l+1}
62
- ${s.map(c=>` - ${c.replace("@aurodesignsystem","AlaskaAirlines").replace("@alaskaairux/icons","AlaskaAirlines/Icons")}`).join(`
61
+ `)}async function qe(t){let e=$r("Writing multi-gitter configuration...").start(),r=xr(Er),n=Ar.join(t,"multi-gitter_DEPENDENCY_TREE.yml");try{await kr.writeFile(n,r,"utf8"),e.succeed(`Multi-gitter configuration written to ${n}`)}catch(o){e.fail("Error writing multi-gitter configuration:"),console.error(o)}}import ie from"node:fs";function Je(t){let e={},r=[],n=[],o=[];for(let s in t)e[s]=t[s].dependsOn.length;for(let s in e)e[s]===0&&o.push(s);for(;o.length>0;){n=[];let s=o.length;for(let i=0;i<s;i++){let l=o.shift();n.push(l);for(let c of t[l].dependentPackages)e[c]--,e[c]===0&&o.push(c)}r.push(n)}if(r.flat().length!==Object.keys(t).length)throw new Error("Circular dependency detected!");return r}function Cr(t){return ie.readdirSync(t).filter(e=>e.endsWith(".json"))}async function Ve(t,e=[]){console.log(e);let r={},n=Cr(t);for(let o of n){if(o==="dependencyTree.json")continue;let s=ie.readFileSync(`${t}/${o}`,"utf-8"),i=JSON.parse(s),l=i.name,c=Object.keys(i.peerDependencies),a=Object.keys(i.devDependencies),u=Object.keys(i.dependencies);r[l]||(r[l]={dependsOn:[],dependentPackages:[]});let m=[...c,...a,...u];r[l].dependsOn=[...new Set(m)];for(let d of m)r[d]||(r[d]={dependsOn:[],dependentPackages:[]}),r[d].dependentPackages.includes(l)||r[d].dependentPackages.push(l)}if(e.length){let o=new Set;for(let[i,l]of Object.entries(r))l.dependsOn.some(c=>e.includes(c))&&o.add(i);for(let i of e)r[i]&&o.add(i);let s={};for(let i of o)s[i]={dependsOn:r[i].dependsOn.filter(l=>o.has(l)),dependentPackages:r[i].dependentPackages.filter(l=>o.has(l))};r=s}else console.log("No target dependencies provided - using all packages.");return ie.writeFileSync(`${t}/dependencyTree.json`,JSON.stringify(r,null,2)),r}import Tr from"node:os";import H from"node:path";import Ye from"node:process";import{fileURLToPath as vr}from"node:url";function Dr(){let t=Tr.homedir()||Ye.env.HOME||Ye.env.USERPROFILE;if(!t)throw new Error("Unable to determine user home directory");return H.join(t,".auro")}function se(...t){return H.join(Dr(),...t)}function Ke(...t){let e=vr(import.meta.url),r=H.dirname(e);return H.resolve(r,...t)}var ae=se("run-migrations","config"),ce=se("run-migrations","outputs");var Qe=["@aurodesignsystem/auro-accordion","@aurodesignsystem/auro-alert","@aurodesignsystem/auro-avatar","@aurodesignsystem/auro-background","@aurodesignsystem/auro-backtotop","@aurodesignsystem/auro-button","@aurodesignsystem/auro-badge","@aurodesignsystem/auro-banner","@aurodesignsystem/auro-card","@aurodesignsystem/auro-carousel","@aurodesignsystem/auro-datetime","@aurodesignsystem/auro-dialog","@aurodesignsystem/auro-drawer","@aurodesignsystem/auro-formkit","@aurodesignsystem/auro-flight","@aurodesignsystem/auro-flightline","@aurodesignsystem/auro-header","@aurodesignsystem/auro-hyperlink","@aurodesignsystem/auro-icon","@aurodesignsystem/auro-loader","@aurodesignsystem/auro-lockup","@aurodesignsystem/auro-nav","@aurodesignsystem/auro-pane","@aurodesignsystem/auro-popover","@aurodesignsystem/auro-sidenav","@aurodesignsystem/auro-skeleton","@aurodesignsystem/auro-slideshow","@aurodesignsystem/auro-table","@aurodesignsystem/auro-tabs","@aurodesignsystem/auro-toast"],Ti=[...Qe,"@aurodesignsystem/auro-library","@aurodesignsystem/WebCoreStyleSheets","@aurodesignsystem/AuroDesignTokens","@aurodesignsystem/auro-cli","@alaskaairux/icons"];async function Rr(t){try{await le.mkdir(ce,{recursive:!0}),await le.mkdir(ae,{recursive:!0})}catch(o){console.error("Failed to create output or config directories:",o),process.exit(1)}let e=me("Creating dependency tree...").start();e.text="Creating multi-gitter dependency tree configuration...",await qe(ae),e.text="Scraping dependencies from Auro packages...";let n=`multi-gitter run "node ${Ke("static","getAuroDeps.js")}" --config ${Xe.join(ae,"multi-gitter_DEPENDENCY_TREE.yml")}`;try{await h(n)}catch(o){e.fail("Failed to generate dependency tree:"),console.error(o),process.exit(1)}return e.text="Generating dependency tree JSON file using packages...",await Ve(ce,t),e.succeed("Dependency tree generated successfully."),Xe.join(ce,"dependencyTree.json")}var jr=async t=>{let e=me("Loading dependency tree...").start(),r=JSON.parse(await le.readFile(t,"utf-8"));e.text="Processing dependency tree...";let n=Je(r);return e.succeed("Dependency batches created successfully."),n},vi=Pr.command("agent").action(async t=>{let e=await Sr.prompt([{type:"select",name:"agentAction",message:"What agent action do you want to perform?",choices:[{name:"Run a migration on auro components",value:"run-migration"}],default:["run-migration"]},{type:"input",name:"migrationId",message:"What migration id do you want to run?",when:r=>r.agentAction==="run-migration",validate:r=>r.trim()!==""||"Migration id cannot be empty."},{type:"confirm",name:"useExisting",message:"Would you like to specify starting packages?",default:!0,transformer:r=>r?"Yes = Packages related to selections":"No = All packages",when:r=>r.agentAction==="run-migration"},{type:"checkbox",name:"startWithComponents",message:"Enter the components to start with (comma-separated, blank for all):",choices:Qe.map(r=>({name:r.replace("@aurodesignsystem/",""),value:r})),when:r=>r.agentAction==="run-migration"&&r.useExisting}]);switch(e.agentAction){case"run-migration":{let r=me("Running migration...").start(),n=await Rr(e.startWithComponents);r.text="Getting dependency batches from tree...";let s=(await jr(n)).map((i,l)=>`Batch ${l+1}
62
+ ${i.map(c=>` - ${c.replace("@aurodesignsystem","AlaskaAirlines").replace("@alaskaairux/icons","AlaskaAirlines/Icons")}`).join(`
63
63
  `)}`).join(`
64
64
 
65
- `);console.log(i),r.text="Running migrations on dependency batches...",new Promise(s=>setTimeout(s,2e3)),r.succeed("Migration process completed successfully.");break}default:console.error("Unknown action selected.")}});import{program as jr}from"commander";var Ps=jr.command("docs").description("Generate API documentation").option("-c, --cem","Generate Custom Elements Manifest (CEM) file",!1).action(async t=>{t.cem&&await O(),await _()});ue.name("auro").version(q()).description("A cli tool to support the Auro Design System");ue.addHelpText("beforeAll",de());ue.parse();
65
+ `);console.log(s),r.text="Running migrations on dependency batches...",new Promise(i=>setTimeout(i,2e3)),r.succeed("Migration process completed successfully.");break}default:console.error("Unknown action selected.")}});import{program as Or}from"commander";var Ri=Or.command("docs").description("Generate API documentation").option("-c, --cem","Generate Custom Elements Manifest (CEM) file",!1).action(async t=>{t.cem&&await O(),await _()});ue.name("auro").version(q()).description("A cli tool to support the Auro Design System");ue.addHelpText("beforeAll",de());ue.parse();
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/index.ts", "../src/utils/auroSplash.js", "../src/utils/packageVersion.js", "../src/commands/dev.js", "../src/commands/_sharedOptions.js", "../src/scripts/build/index.js", "../src/scripts/build/bundleHandlers.js", "../src/scripts/docs/index.ts", "../src/utils/shell.js", "../src/scripts/docs/docs-generator.ts", "../src/scripts/analyze.js", "../src/scripts/build/defaultDocsBuild.js", "../src/scripts/build/configUtils.js", "../src/scripts/build/plugins.js", "../src/scripts/build/devServerUtils.js", "../src/scripts/build/watchModeHandlers.js", "../src/commands/build.js", "../src/commands/migrate.js", "../src/commands/sync.js", "../src/scripts/syncDotGithubDir.js", "../src/commands/wca-setup.js", "../src/scripts/prepWcaCompatibleCode.mjs", "../src/commands/check-commits.ts", "../src/scripts/check-commits/commit-analyzer.ts", "../src/utils/gitUtils.ts", "../src/scripts/check-commits/display-utils.ts", "../src/scripts/check-commits/github-labels.ts", "../src/commands/pr-release.ts", "../src/commands/test.ts", "../src/commands/agent.ts", "../src/scripts/agent/run-migrations/writeMultiGitterConfig.js", "../src/scripts/formatDependencyTree.ts", "../src/utils/pathUtils.js", "../src/commands/docs.ts"],
4
- "sourcesContent": ["import { program } from \"commander\";\nimport auroSplash from \"#utils/auroSplash.js\";\nimport getPackageVersion from \"#utils/packageVersion.js\";\n\n// Register commands (importing them will register them)\nimport \"#commands/dev.js\";\nimport \"#commands/build.js\";\nimport \"#commands/migrate.js\";\nimport \"#commands/sync.js\";\nimport \"#commands/wca-setup.js\";\nimport \"#commands/check-commits.ts\";\nimport \"#commands/pr-release.ts\";\nimport \"#commands/test.js\";\nimport \"#commands/agent.ts\";\nimport \"#commands/docs.ts\";\n\nprogram\n .name(\"auro\")\n .version(getPackageVersion())\n .description(\"A cli tool to support the Auro Design System\");\n\nprogram.addHelpText(\"beforeAll\", auroSplash());\n\nprogram.parse();\n", "import figlet from \"figlet\";\nimport { mind } from \"gradient-string\";\n\nexport default () => {\n return mind(figlet.textSync(\"Auro CLI\"));\n};\n", "/* eslint-disable no-underscore-dangle, no-undef */\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\n/**\n * Simple debug logger that only prints when DEBUG environment variable is set.\n * @param {string} message - The message to log.\n */\nfunction debugLog(message) {\n if (process.env.DEBUG) {\n console.log(`[DEBUG] ${message}`);\n }\n}\n\n/**\n * Retrieves the version from the package.json file.\n * @returns {string} The version from package.json.\n */\nexport default function getPackageVersion() {\n try {\n // Get the directory path of the current module\n const __filename = fileURLToPath(import.meta.url);\n const __dirname = path.dirname(__filename);\n debugLog(`Current module path: ${__dirname}`);\n\n // Standard installed module location - current directory\n const packagePath = path.resolve(__dirname, \"..\", \"package.json\");\n\n debugLog(`Checking package.json at: ${packagePath}`);\n if (fs.existsSync(packagePath)) {\n debugLog(`Found package.json at: ${packagePath}`);\n const packageJson = JSON.parse(fs.readFileSync(packagePath, \"utf8\"));\n return packageJson.version;\n }\n\n // Fallback to a default version if we can't find the package.json\n debugLog(\n \"Could not find package.json in the standard installed module location, using default version\",\n );\n return \"0.0.0\";\n } catch (error) {\n console.error(\"Error retrieving package version:\", error);\n return \"0.0.0\";\n }\n}\n", "import { program } from \"commander\";\nimport ora from \"ora\";\nimport {\n withBuildOptions,\n withServerOptions,\n} from \"#commands/_sharedOptions.js\";\nimport { buildWithRollup } from \"#scripts/build/index.js\";\n\nlet devCommand = program\n .command(\"dev\")\n .description(\"Runs development server for auro components\");\n\ndevCommand = withBuildOptions(devCommand);\ndevCommand = withServerOptions(devCommand);\n\nexport default devCommand.action(async (options) => {\n try {\n const build = ora(\"Initializing...\");\n\n if (options.watch) {\n build.text = \"Waiting for changes...\";\n build.spinner = \"bouncingBar\";\n build.color = \"green\";\n } else {\n build.text =\n options.docs === false\n ? \"Building component (docs disabled)\"\n : \"Building component\";\n }\n\n build.start();\n\n if (!options.watch) {\n build.succeed(\"Build completed!\");\n }\n\n await buildWithRollup({ ...options, dev: true, watch: options.watch });\n } catch (error) {\n // If there's any active spinner, we need to fail it\n ora().fail(`Build failed: ${error.message}`);\n console.error(error);\n process.exit(1);\n }\n});\n", "/**\n * @param {import('commander').Command} command\n * @returns {import('commander').Command}\n */\nexport function withBuildOptions(command) {\n return command\n .option(\"-m, --module-paths [paths...]\", \"Path(s) to node_modules folder\")\n .option(\"-w, --watch\", \"Watches for changes\")\n .option(\"--skip-docs\", \"Skip documentation generation\", false)\n .option(\n \"--wca-input [files...]\",\n \"Source file(s) to analyze for API documentation\",\n )\n .option(\"--wca-output [files...]\", \"Output file(s) for API documentation\");\n}\n\n/**\n * @param {import('commander').Command} command\n */\nexport function withServerOptions(command) {\n return command\n .option(\"-s, --serve\", \"Starts a server\")\n .option(\"-p, --port <number>\", \"Port for the server\")\n .option(\"-o, --open\", \"Open the browser after starting the server\");\n}\n", "import terser from \"@rollup/plugin-terser\";\nimport { watch } from \"rollup\";\nimport {\n buildCombinedBundle,\n buildTypeDefinitions,\n cleanupDist,\n generateDocs,\n} from \"./bundleHandlers.js\";\nimport {\n getDemoConfig,\n getDtsConfig,\n getMainBundleConfig,\n} from \"./configUtils.js\";\nimport { startDevelopmentServer } from \"./devServerUtils.js\";\nimport {\n handleWatcherEvents,\n setupWatchModeListeners,\n} from \"./watchModeHandlers.js\";\n\n/**\n * Run a production build once\n * @param {object} options - Build options\n * @returns {Promise<void>}\n */\nasync function runProductionBuild(options) {\n const mainBundleConfig = getMainBundleConfig(options);\n const demoConfig = getDemoConfig(options);\n const dtsConfig = getDtsConfig();\n\n // Add terser for minification in production\n mainBundleConfig.config.plugins.push(terser());\n\n // Generate docs if enabled\n await generateDocs(options);\n\n // Build main and demo bundles\n await buildCombinedBundle(mainBundleConfig.config, demoConfig.config);\n\n // Build TypeScript definitions\n await buildTypeDefinitions(dtsConfig.config, dtsConfig.config.output);\n}\n\n/**\n * Set up watch mode for development\n * @param {object} options - Build options\n * @returns {Promise<object>} - Rollup watcher\n */\nasync function setupWatchMode(options) {\n const { dev: isDevMode } = options;\n const mainBundleConfig = getMainBundleConfig({ ...options, watch: true });\n const demoConfig = getDemoConfig({ ...options, watch: true });\n\n // Create and configure the watcher\n const watcher = watch([mainBundleConfig.config, demoConfig.config]);\n\n // Set up watcher event handlers\n handleWatcherEvents(\n watcher,\n options,\n isDevMode ? async () => startDevelopmentServer(options) : undefined,\n );\n\n // Set up clean shutdown\n setupWatchModeListeners(watcher);\n\n return watcher;\n}\n\n/**\n * Build the component using Rollup with the provided options\n * @param {object} options - Build configuration options\n * @param {boolean} [options.dev=false] - Whether to run in development mode\n * @param {boolean} [options.watch] - Whether to run in watch mode (defaults to value of dev)\n * @param {boolean} [options.docs=true] - Whether to generate documentation\n * @returns {Promise<object|void>} - Rollup watcher if in watch mode\n */\nexport async function buildWithRollup(options = {}) {\n try {\n const { watch } = options;\n\n // Clean output directory\n cleanupDist();\n\n // Run production build once or set up watch mode\n // Only use watch mode if explicitly enabled\n if (watch) {\n return await setupWatchMode(options);\n }\n\n return await runProductionBuild(options);\n } catch (error) {\n throw new Error(`Build failed: ${error.message}`);\n }\n}\n\n// Re-export utilities for backward compatibility\nexport { cleanupDist };\n", "import { rmSync } from \"node:fs\";\nimport { join } from \"node:path\";\nimport ora from \"ora\";\nimport { rollup } from \"rollup\";\nimport { analyzeComponents } from \"#scripts/analyze.js\";\nimport { runDefaultDocsBuild } from \"#scripts/build/defaultDocsBuild.js\";\n\n/**\n * Clean up the dist folder\n * @returns {boolean} Success status\n */\nexport function cleanupDist() {\n const distPath = join(\"./dist\");\n const spinner = ora(\"Cleaning dist folder...\").start();\n\n try {\n rmSync(distPath, { recursive: true, force: true });\n spinner.succeed(\"All clean! Dist folder wiped.\");\n return true;\n } catch (error) {\n spinner.fail(`Oops! Couldn't clean dist/ folder: ${error.message}`);\n console.error(error);\n return false;\n }\n}\n\n/**\n * Run a build step with spinner feedback\n * @param {string} taskName - Name of the task for spinner text\n * @param {Function} taskFn - Async function to execute the task\n * @param {string} successMsg - Message to show on success\n * @param {string} failMsg - Message to show on failure\n * @returns {Promise<any>} - Result of the task function or throws error\n */\nasync function runBuildStep(taskName, taskFn, successMsg, failMsg) {\n const spinner = ora(taskName).start();\n\n try {\n const result = await taskFn();\n spinner.succeed(successMsg);\n return result;\n } catch (error) {\n spinner.fail(failMsg);\n console.error(`Error: ${error.message}`);\n throw error;\n }\n}\n\n/**\n * Builds the TypeScript definition files\n * @param {object} config - Rollup config for d.ts generation\n * @param {object} outputConfig - Output configuration for d.ts files\n */\nexport async function buildTypeDefinitions(config, outputConfig) {\n return runBuildStep(\n \"Creating type definitions...\",\n async () => {\n const bundle = await rollup(config);\n await bundle.write(outputConfig);\n await bundle.close();\n },\n \"Types files built.\",\n \"Darn! Type definitions failed.\",\n );\n}\n\n/**\n * Builds both the main bundle and demo files in one operation\n * @param {object} mainConfig - Rollup config for the main bundle\n * @param {object} demoConfig - Rollup config for the demo files\n */\nexport async function buildCombinedBundle(mainConfig, demoConfig) {\n return runBuildStep(\n `Bundling ${mainConfig.name || \"main\"} and ${demoConfig.name || \"demo\"}...`,\n async () => {\n // Build main bundle\n const mainBundle = await rollup(mainConfig);\n await mainBundle.write(mainConfig.output);\n await mainBundle.close();\n\n // Build demo files\n const demoBundle = await rollup(demoConfig);\n await demoBundle.write(demoConfig.output);\n await demoBundle.close();\n },\n `Bundles ready! ${mainConfig.name || \"Main\"} and ${demoConfig.name || \"demo\"} built.`,\n \"Bundle hiccup! Build failed.\",\n );\n}\n\n/**\n * Analyzes web components and generates API documentation.\n * @param {object} options - Options containing wcaInput and wcaOutput\n */\nexport async function generateDocs(options) {\n const { wcaInput: sourceFiles, wcaOutput: outFile, skipDocs } = options;\n\n if (skipDocs) {\n const skipSpinner = ora(\"Skipping docs generation...\").start();\n\n setTimeout(() => {\n skipSpinner.succeed(\"Docs generation skipped.\");\n }, 0);\n return;\n }\n\n return runBuildStep(\n \"Analyzing components and making docs...\",\n async () => {\n await analyzeComponents(sourceFiles, outFile);\n await runDefaultDocsBuild();\n },\n \"Docs ready! Looking good.\",\n \"Doc troubles!\",\n );\n}\n", "import ora from \"ora\";\nimport { shell } from \"#utils/shell.js\";\nimport Docs from \"./docs-generator.ts\";\n\nexport async function cem() {\n const cemSpinner = ora(\"Generating Custom Elements Manifest...\").start();\n\n try {\n // The shell function returns a promise that resolves when the command completes\n await shell(\n \"npx --package=@custom-elements-manifest/analyzer -y -- cem analyze --litelement --globs src/*.*js scripts/wca/**/*.*js --packagejson --dependencies\",\n );\n cemSpinner.succeed(\"Custom Elements Manifest generated successfully!\");\n } catch (error) {\n // Check if the error is just the plugin issue but the manifest was still created\n const errorMessage = error instanceof Error ? error.message : String(error);\n cemSpinner.warn('CEM analyzer completed with warnings: ' + errorMessage);\n }\n}\n\nexport async function docs() {\n const docsSpinner = ora(\"Generating API documentation...\").start();\n\n try {\n await Docs.generate();\n docsSpinner.succeed(\"API documentation generated successfully!\");\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n docsSpinner.fail(\"Failed to generate API documentation: \" + errorMessage);\n throw error;\n }\n}\n", "import { spawn } from \"node:child_process\";\nimport ora from \"ora\";\n\nconst shell = (command, _args) => {\n const commandString = `${command} ${_args ? _args.join(\" \") : \"\"}`;\n\n // Initialize the spinner but don't start it - we'll just use it for completion status\n const spinner = ora();\n\n // Parse command string if no args are provided\n let finalCommand = command;\n let finalArgs = _args || [];\n\n if (!_args && typeof command === \"string\") {\n const parts = command.split(\" \");\n finalCommand = parts[0];\n finalArgs = parts.slice(1);\n }\n\n // Simple check for watch mode - if the command contains --watch or -w flags\n const isWatchMode =\n commandString.includes(\"--watch\") || commandString.includes(\" -w\");\n\n // Use different stdio configurations based on watch mode\n const stdio = isWatchMode\n ? \"inherit\" // Full TTY support for watch mode\n : [\"inherit\", \"pipe\", \"pipe\"]; // Capture output but allow input for normal mode\n\n const child = spawn(finalCommand, finalArgs, {\n stdio,\n shell: true,\n });\n\n // Only set up output capture if we're not in watch mode (stdio isn't 'inherit')\n if (!isWatchMode) {\n // Store command output to display after completion\n const commandOutput = [];\n\n child.stdout?.on(\"data\", (data) => {\n // Convert buffer to string\n const output = data.toString();\n\n // Store full output\n commandOutput.push(output);\n\n // Output directly to console\n process.stdout.write(output);\n });\n\n child.stderr?.on(\"data\", (data) => {\n const output = data.toString();\n commandOutput.push(output);\n process.stderr.write(output);\n });\n }\n\n // Set up a promise to track command completion\n return new Promise((resolve, reject) => {\n child.on(\"close\", (code) => {\n if (code !== 0) {\n // In watch mode, don't treat exit codes as errors - these are typically user terminations\n if (isWatchMode) {\n spinner.info(`Watch mode terminated with code ${code}`);\n resolve(); // Resolve without an error for watch mode commands\n } else {\n spinner.fail(`${commandString} failed (code ${code})`);\n reject(new Error(`Command failed with exit code ${code}`));\n }\n } else {\n spinner.succeed(`${commandString} completed successfully`);\n resolve();\n }\n });\n });\n};\n\nexport { shell };\n", "/** biome-ignore-all lint/complexity/noThisInStatic: not confusing */\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport type {\n Package,\n Module,\n Declaration,\n CustomElementDeclaration,\n ClassMember,\n Parameter,\n Attribute\n} from 'custom-elements-manifest';\n\ninterface GenerateOptions {\n outDir?: string;\n outFile?: string;\n manifestPath?: string;\n}\n\ninterface MergedTableData {\n name: string;\n properties: string;\n attributes: string;\n type: string;\n default: string;\n description: string;\n}\n\nexport default class Docs {\n private static manifest: Package = { schemaVersion: \"1.0.0\", readme: \"\", modules: [] };\n\n /**\n * Generate markdown documentation for all components\n */\n static generate(options: GenerateOptions = {}): void {\n const {\n outDir = \"./docs\",\n outFile = \"api.md\",\n manifestPath = \"./custom-elements.json\",\n } = options;\n\n // Use provided manifest or fallback to default\n if (manifestPath) {\n try {\n const manifestContent = fs.readFileSync(manifestPath, \"utf8\");\n this.manifest = JSON.parse(manifestContent) as Package;\n } catch (error) {\n console.error(`Error reading manifest file at ${manifestPath}:`, error);\n throw error;\n }\n }\n\n const elements = this.getElements();\n\n // Create docs directory if it doesn't exist\n const docsDir = outDir;\n if (!fs.existsSync(docsDir)) {\n fs.mkdirSync(docsDir, { recursive: true });\n }\n\n // Generate combined API documentation\n const apiMarkdown = this.renderAllElements(elements);\n const apiFilename = path.join(docsDir, outFile);\n fs.writeFileSync(apiFilename, apiMarkdown);\n console.log(`Generated combined API documentation at ${apiFilename}`);\n }\n\n /**\n * Extract custom elements from the manifest\n */\n static getElements(): CustomElementDeclaration[] {\n return this.manifest.modules.reduce(\n (els: CustomElementDeclaration[], module: Module) =>\n els.concat(\n module.declarations?.filter(\n (dec: Declaration): dec is CustomElementDeclaration => \n 'customElement' in dec && dec.customElement === true && 'tagName' in dec && \n this.isWcaModule(module),\n ) ?? [],\n ),\n [],\n );\n }\n\n /**\n * Check if a module has a path that matches the WCA pattern\n */\n static isWcaModule(module: Module): boolean {\n // Check if the module path matches \"scripts/wca/auro-*.js\"\n const path = module.path;\n if (!path) {\n return false;\n }\n \n // Match the pattern: starts with \"scripts/wca/auro-\" and ends with \".js\"\n return path.startsWith('scripts/wca/auro-') && path.endsWith('.js');\n }\n\n /**\n * Render all elements into a single markdown document\n */\n static renderAllElements(elements: CustomElementDeclaration[]): string {\n return `${elements\n .map((element: CustomElementDeclaration) => this.renderElement(element, false))\n .join(\"\\n\\n---\\n\\n\")}\n `;\n }\n\n /**\n * Render a single element as markdown\n */\n static renderElement(element: CustomElementDeclaration, includeTitle = true): string {\n return `${includeTitle ? `# ${element.tagName}\\n\\n` : `# ${element.tagName}\\n\\n`}${element.description ? `${element.description}\\n\\n` : \"\"}${this.renderPropertiesAttributesTable(element)}${this.renderTable(\n \"Methods\",\n [\"name\", \"parameters\", \"return.type.text\", \"description\"],\n (element.members || [])\n .filter(\n (m: ClassMember) =>\n m.kind === \"method\" && ('privacy' in m ? m.privacy !== \"private\" : true) && m.name[0] !== \"_\",\n )\n .map((m: ClassMember) => ({\n ...m,\n parameters: this.renderParameters('parameters' in m ? m.parameters as Parameter[] : undefined),\n })),\n )}${this.renderTable(\n \"Events\",\n [\"name\", \"description\"],\n element.events as unknown as Record<string, unknown>[],\n )}${this.renderTable(\n \"Slots\",\n [[\"name\", \"(default)\"], \"description\"],\n element.slots as unknown as Record<string, unknown>[],\n )}${this.renderTable(\n \"CSS Shadow Parts\",\n [\"name\", \"description\"],\n element.cssParts as unknown as Record<string, unknown>[],\n )}${this.renderTable(\n \"CSS Custom Properties\",\n [\"name\", \"description\"],\n element.cssProperties as unknown as Record<string, unknown>[],\n )}`;\n }\n\n /**\n * Render combined properties and attributes table\n */\n static renderPropertiesAttributesTable(element: CustomElementDeclaration): string {\n const properties = element.members?.filter((m: ClassMember) => m.kind === \"field\") || [];\n const attributes = element.attributes || [];\n\n // Create a merged dataset\n const mergedData: MergedTableData[] = [];\n const processedNames = new Set<string>();\n\n // Process properties first (only include those with descriptions)\n properties.forEach((prop: ClassMember) => {\n if (prop.description?.trim()) {\n mergedData.push({\n name: prop.name,\n properties: prop.name,\n attributes: ('attribute' in prop ? prop.attribute as string : '') || \"\",\n type: this.get(prop, \"type.text\") || \"\",\n default: ('default' in prop ? prop.default as string : '') || \"\",\n description: prop.description || \"\",\n });\n }\n processedNames.add(prop.name);\n if ('attribute' in prop && prop.attribute) {\n processedNames.add(prop.attribute as string);\n }\n });\n\n // Process attributes that don't have corresponding properties (only include those with descriptions)\n attributes.forEach((attr: Attribute) => {\n if (!processedNames.has(attr.name) && attr.description?.trim()) {\n mergedData.push({\n name: attr.name,\n properties: \"\",\n attributes: attr.name,\n type: this.get(attr, \"type.text\") || \"\",\n default: attr.default || \"\",\n description: attr.description || \"\",\n });\n }\n });\n\n if (mergedData.length === 0) {\n return \"\";\n }\n\n const headers = \"Properties | Attributes | Type | Default | Description \";\n const separator = \"--- | --- | --- | --- | ---\";\n\n const rows = mergedData\n .map((item: MergedTableData) =>\n [\n item.properties,\n item.attributes,\n item.type,\n item.default,\n item.description,\n ]\n .map((value: string) =>\n String(value || \"\")\n .replace(/\\|/g, \"\\\\|\")\n .replace(/\\n/g, \"<br>\"),\n )\n .join(\" | \"),\n )\n .join(\"\\n\");\n\n return `\n### Properties & Attributes\n\n| ${headers} |\n| ${separator} |\n${rows}\n\n`;\n }\n\n /**\n * Render method parameters as a formatted string\n */\n static renderParameters(parameters?: Parameter[]): string {\n if (!parameters || parameters.length === 0) {\n return \"None\";\n }\n\n return parameters\n .map(\n (param: Parameter) =>\n `\\`${param.name}\\` (${this.get(param, \"type.text\") || \"any\"})${param.description ? ` - ${param.description}` : \"\"}`,\n )\n .join(\"<br>\");\n }\n\n /**\n * Renders a markdown table of data, plucking the given properties from each item in `data`.\n */\n static renderTable(\n name: string, \n properties: (string | string[])[], \n data?: Array<Record<string, unknown>>\n ): string {\n if (data === undefined || data.length === 0) {\n return \"\";\n }\n\n // Filter out items without descriptions\n const filteredData = data.filter((item: Record<string, unknown>) => {\n const description = item.description;\n return typeof description === 'string' && description.trim();\n });\n\n if (filteredData.length === 0) {\n return \"\";\n }\n\n const headers = properties\n .map((p: string | string[]) => this.capitalize((Array.isArray(p) ? p[0] : p).split(\".\")[0]))\n .join(\" | \");\n\n const separator = properties.map(() => \"---\").join(\" | \");\n\n const rows = filteredData\n .map((item: Record<string, unknown>) =>\n properties\n .map((p: string | string[]) => {\n const value = this.get(item, p);\n // Escape pipes in table cells and handle multiline content\n return String(value || \"\")\n .replace(/\\|/g, \"\\\\|\")\n .replace(/\\n/g, \"<br>\");\n })\n .join(\" | \"),\n )\n .join(\"\\n\");\n\n return `\n### ${name}\n\n| ${headers} |\n| ${separator} |\n${rows}\n\n`;\n }\n\n /**\n * Reads a (possibly deep) path off of an object.\n */\n // biome-ignore lint/suspicious/noExplicitAny: utility method needs to work with any object structure\n static get(obj: any, pathInput: string | string[]): string {\n let fallback = \"\";\n let path: string = pathInput as string;\n if (Array.isArray(pathInput)) {\n [path, fallback] = pathInput;\n }\n const parts = path.split(\".\");\n // biome-ignore lint/suspicious/noExplicitAny: utility method needs to work with any object structure\n let current: any = obj;\n while (current && parts.length) {\n current = current[parts.shift() as string];\n }\n return current == null || current === \"\" ? fallback : String(current);\n }\n\n /**\n * Capitalize the first letter of a string\n */\n static capitalize(s: string): string {\n return s[0].toUpperCase() + s.substring(1);\n }\n}", "\nimport { cem, docs } from '#scripts/docs/index.ts';\n\n/**\n * Analyzes web components and generates API documentation.\n */\nexport async function analyzeComponents() {\n \n await cem();\n\n await docs();\n\n}", "import { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport {\n generateReadmeUrl,\n processContentForFile,\n templateFiller,\n} from \"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs\";\n\n/**\n * Processor config object.\n * @typedef {Object} ProcessorConfig\n * @property {boolean} [overwriteLocalCopies=true] - The release version tag to use instead of master.\n * @property {string} [remoteReadmeVersion=\"master\"] - The release version tag to use instead of master.\n * @property {string} [remoteReadmeUrl] - The release version tag to use instead of master.\n * @property {string} [remoteReadmeVariant=\"\"] - The variant string to use for the README source.\n * (like \"_esm\" to make README_esm.md).\n */\n\n/**\n * @param {ProcessorConfig} config - The configuration for this processor.\n */\nexport const defaultDocsProcessorConfig = {\n overwriteLocalCopies: true,\n remoteReadmeVersion: \"master\",\n // eslint-disable-next-line no-warning-comments\n // TODO: remove this variant when all components are updated to use latest auro-library\n // AND the default README.md is updated to use the new paths\n remoteReadmeVariant: \"_updated_paths\",\n};\n\nfunction pathFromCwd(pathLike) {\n const cwd = process.cwd();\n return `${cwd}/${pathLike}`;\n}\n\n/**\n * @param {ProcessorConfig} config - The configuration for this processor.\n * @returns {import('../utils/sharedFileProcessorUtils').FileProcessorConfig[]}\n */\nexport const fileConfigs = (config) => [\n // README.md\n {\n identifier: \"README.md\",\n input: {\n remoteUrl:\n config.remoteReadmeUrl ||\n generateReadmeUrl(\n config.remoteReadmeVersion,\n config.remoteReadmeVariant,\n ),\n fileName: pathFromCwd(\"/docTemplates/README.md\"),\n overwrite: config.overwriteLocalCopies,\n },\n output: pathFromCwd(\"/README.md\"),\n },\n // index.md\n {\n identifier: \"index.md\",\n input: pathFromCwd(\"/docs/partials/index.md\"),\n output: pathFromCwd(\"/demo/index.md\"),\n mdMagicConfig: {\n output: {\n directory: pathFromCwd(\"/demo\"),\n },\n },\n },\n // api.md\n {\n identifier: \"api.md\",\n input: pathFromCwd(\"/docs/partials/api.md\"),\n output: pathFromCwd(\"/demo/api.md\"),\n preProcessors: [templateFiller.formatApiTable],\n },\n];\n\n/**\n *\n * @param {ProcessorConfig} config - The configuration for this processor.\n * @return {Promise<void>}\n */\nexport async function processDocFiles(config = defaultDocsProcessorConfig) {\n // setup\n await templateFiller.extractNames();\n\n for (const fileConfig of fileConfigs(config)) {\n try {\n // eslint-disable-next-line no-await-in-loop\n await processContentForFile(fileConfig);\n } catch (err) {\n Logger.error(`Error processing ${fileConfig.identifier}: ${err.message}`);\n }\n }\n}\n\nexport async function runDefaultDocsBuild() {\n await processDocFiles({\n ...defaultDocsProcessorConfig,\n remoteReadmeUrl:\n \"https://raw.githubusercontent.com/AlaskaAirlines/auro-templates/main/templates/default/README.md\",\n });\n}\n", "import { basename, join } from \"node:path\";\nimport { nodeResolve } from \"@rollup/plugin-node-resolve\";\nimport { glob } from \"glob\";\nimport { dts } from \"rollup-plugin-dts\";\nimport { litScss } from \"rollup-plugin-scss-lit\";\nimport { watchGlobs } from \"./plugins.js\";\n\n// Default paths used across configurations\nconst DEFAULTS = {\n moduleDirectories: [\"node_modules\"],\n modulePaths: [\"../../node_modules\", \"../node_modules\", \"node_modules\"],\n watchPatterns: [\"./apiExamples/**/*\", \"./docs/**/*\"],\n};\n\n/**\n * Creates Rollup plugins configuration.\n * @param {string[]} modulePaths - Additional paths to include in litScss.\n * @param {object} options - Additional options for plugins\n * @returns {object[]} - Array of Rollup plugins.\n */\nexport function getPluginsConfig(modulePaths = [], options = {}) {\n const {\n watchPatterns = DEFAULTS.watchPatterns,\n dedupe = [\"lit\", \"lit-element\", \"lit-html\"],\n } = options;\n\n // Combine default paths with any user-provided paths\n const allModulePaths = [...DEFAULTS.modulePaths, ...modulePaths];\n\n return [\n nodeResolve({\n dedupe,\n preferBuiltins: false,\n moduleDirectories: DEFAULTS.moduleDirectories,\n }),\n litScss({\n minify: { fast: true },\n options: {\n loadPaths: [...allModulePaths, join(process.cwd(), \"src\", \"styles\"), join(process.cwd(), \"src\")],\n },\n }),\n watchGlobs(watchPatterns),\n ];\n}\n\n/**\n * Creates Rollup configuration for the main bundle with output options.\n * @param {object} options - Build options.\n * @returns {object} - Complete Rollup configuration object with input and output.\n */\nexport function getMainBundleConfig(options = {}) {\n const {\n modulePaths = [],\n watch = false,\n input = [\"./src/index.js\", \"./src/registered.js\"],\n outputDir = \"./dist\",\n format = \"esm\",\n } = options;\n\n return {\n name: \"Main\",\n config: {\n input,\n output: {\n format,\n dir: outputDir,\n entryFileNames: \"[name].js\",\n },\n external: getExternalConfig(),\n plugins: getPluginsConfig(modulePaths),\n watch: getWatcherConfig(watch),\n },\n };\n}\n\n/**\n * Creates Rollup configuration for demo files.\n * @param {object} options - Build options.\n * @returns {object} - Rollup configuration object.\n */\nexport function getDemoConfig(options = {}) {\n const {\n modulePaths = [],\n watch = false,\n globPattern = \"./demo/*.js\",\n ignorePattern = [\"./demo/*.min.js\"],\n outputDir = \"./demo\",\n } = options;\n\n return {\n name: \"Demo\",\n config: {\n input: Object.fromEntries(\n glob.sync(globPattern, { ignore: ignorePattern }).map((file) => {\n const name = basename(file, \".js\");\n return [name, file];\n }),\n ),\n output: {\n format: \"esm\",\n dir: outputDir,\n entryFileNames: \"[name].min.js\",\n chunkFileNames: \"[name].min.js\",\n },\n plugins: getPluginsConfig(modulePaths),\n watch: getWatcherConfig(watch),\n },\n };\n}\n\n/**\n * Creates Rollup configuration for the d.ts files with output options.\n * @param {object} options - Configuration options\n * @returns {object} - Complete Rollup configuration object with input and output.\n */\nexport function getDtsConfig(options = {}) {\n const { input = [\"./dist/index.js\"], outputDir = \"./dist\" } = options;\n\n return {\n name: \"DTS\",\n config: {\n input,\n output: {\n format: \"esm\",\n dir: outputDir,\n entryFileNames: \"[name].d.ts\",\n },\n plugins: [dts()],\n },\n };\n}\n\n/**\n * Creates Rollup configuration for watch mode.\n * @param {boolean|object} watchOptions - Whether to enable watch mode or watch options\n * @returns {object|false} - Watch configuration for Rollup or false if disabled\n */\nexport function getWatcherConfig(watchOptions) {\n // Return false if watch mode is disabled\n if (!watchOptions) {\n return false;\n }\n\n // Allow passing a configuration object or use defaults\n const options = typeof watchOptions === \"object\" ? watchOptions : {};\n\n return {\n clearScreen: options.clearScreen ?? true,\n buildDelay: options.buildDelay ?? 500,\n chokidar: {\n ignoreInitial: true,\n // Ignore common output files that cause feedback loops\n ignored: options.ignored ?? [\n \"**/dist/**/*.d.ts\",\n \"**/custom-elements.json\",\n \"**/demo/*.md\",\n \"**/demo/**/*.min.js\",\n \"**/docs/api.md\",\n \"**/node_modules/**\",\n \"**/.git/**\",\n ],\n // Reduce watcher's sensitivity to prevent loops\n awaitWriteFinish: options.awaitWriteFinish ?? {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n },\n include: options.include ?? [\n \"./src/**/*.scss\",\n \"./src/**/*.js\",\n \"./src/**/*.ts\",\n \"./demo/**/*.js\",\n \"./apiExamples/**/*\",\n \"./docs/**/*.md\",\n ],\n exclude: options.exclude ?? [\"./dist/**/*\", \"./node_modules/**/*\"],\n };\n}\n\n/**\n * Creates external configuration for Rollup.\n * @param {string[]} additional - Additional external patterns\n * @returns {(string|RegExp)[]} - Array of external dependencies.\n */\nexport function getExternalConfig(additional = []) {\n const defaults = [\n // externalize all lit dependencies\n /node_modules\\/lit/,\n /node_modules\\/lit-element/,\n /node_modules\\/lit-html/,\n /node_modules\\/@lit/,\n ];\n\n return [...defaults, ...additional];\n}\n", "import path from \"node:path\";\nimport { glob } from \"glob\";\n\n/**\n * Creates a plugin that watches file globs and adds them to Rollup's watch list.\n * @param {string|string[]} globs - Glob pattern(s) to watch\n * @returns {object} - Rollup plugin\n */\nexport function watchGlobs(globs) {\n return {\n name: \"watch-globs\",\n buildStart() {\n const items = Array.isArray(globs) ? globs : [globs];\n\n for (const item of items) {\n try {\n for (const filename of glob.sync(path.resolve(item))) {\n this.addWatchFile(filename);\n }\n } catch (error) {\n this.error(`Error watching glob pattern \"${item}\": ${error.message}`);\n }\n }\n },\n };\n}\n", "import { startDevServer } from \"@web/dev-server\";\nimport { hmrPlugin } from \"@web/dev-server-hmr\";\nimport ora from \"ora\";\n/**\n * Default server configuration\n */\nconst DEFAULT_CONFIG = {\n watch: true,\n nodeResolve: true,\n basePath: \"/\",\n rootDir: \"./demo\",\n hmrInclude: [\"src/**/*\", \"demo/**/*\", \"apiExamples/**/*\", \"docs/**/*\"],\n};\n\n/**\n * Starts the development server\n * @param {object} options - Server options\n * @param {boolean} [options.serve] - Whether to start the server\n * @param {number} [options.port] - Port number for the server\n * @param {boolean} [options.open] - Whether to open the browser\n * @param {string} [options.rootDir] - Root directory for serving files\n * @param {string[]} [options.hmrInclude] - Patterns to include for HMR\n * @returns {Promise<object>} - The server instance\n */\nexport async function startDevelopmentServer(options = {}) {\n if (!options.serve) return;\n\n const serverSpinner = ora(\"Firing up dev server...\\n\").start();\n\n try {\n // Merge options with defaults\n const serverConfig = {\n port: Number(options.port) || undefined,\n open: options.open ? \"/\" : undefined,\n watch: options.watch ?? DEFAULT_CONFIG.watch,\n nodeResolve: options.nodeResolve ?? DEFAULT_CONFIG.nodeResolve,\n basePath: options.basePath ?? DEFAULT_CONFIG.basePath,\n rootDir: options.rootDir ?? DEFAULT_CONFIG.rootDir,\n\n // HTML file extension middleware\n middleware: [\n function rewriteIndex(context, next) {\n if (!context.url.endsWith(\"/\") && !context.url.includes(\".\")) {\n context.url += \".html\";\n }\n return next();\n },\n ],\n\n // Hot Module Replacement plugin\n plugins: [\n hmrPlugin({\n include: options.hmrInclude ?? DEFAULT_CONFIG.hmrInclude,\n }),\n ],\n };\n\n // Start the server with our configuration\n const server = await startDevServer({\n config: serverConfig,\n readCliArgs: false,\n readFileConfig: false,\n });\n\n serverSpinner.stop();\n return server;\n } catch (error) {\n serverSpinner.fail(\"Server snag! Couldn't start dev server.\");\n console.error(\"Error starting development server:\", error);\n throw new Error(`Development server failed to start: ${error.message}`);\n }\n}\n", "import path from \"node:path\";\nimport ora from \"ora\";\nimport { rollup } from \"rollup\";\nimport { analyzeComponents } from \"#scripts/analyze.js\";\nimport { generateDocs } from \"./bundleHandlers.js\";\nimport { getDtsConfig } from \"./configUtils.js\";\n\n// Track if any build is in progress to prevent overlapping operations\nlet buildInProgress = false;\n\n// Track build states and times in a single object for cleaner management\nconst builds = {\n dts: { active: false, lastTime: 0 },\n analyze: { active: false, lastTime: 0 },\n docs: { active: false, lastTime: 0 },\n};\n\n// Minimum time between builds of the same type (in ms)\nconst MIN_BUILD_INTERVAL = 5000;\n\n// Track source paths of files that triggered a watch event\nconst sourceEventPaths = new Set();\n\n// Known output files that should never trigger a rebuild\nconst OUTPUT_PATHS = [\n \"/dist/index.d.ts\",\n \"/custom-elements.json\",\n \"/demo/api.md\",\n \"/docs/api.md\",\n \"/demo/index.min.js\",\n];\n\n// Path matching checks - handle any non-string input safely\nfunction isOutputFile(filePath) {\n if (!filePath || typeof filePath !== \"string\") return false;\n\n try {\n const normalizedPath = path.normalize(filePath);\n\n // Check if it's in our known output paths\n return (\n OUTPUT_PATHS.some((outputPath) => normalizedPath.endsWith(outputPath)) ||\n normalizedPath.includes(\"/dist/\") ||\n normalizedPath.endsWith(\".min.js\") ||\n normalizedPath.endsWith(\".d.ts\")\n );\n } catch (error) {\n console.error(`Error checking path (${typeof filePath}):`, error.message);\n return false; // If any error occurs, assume it's not an output file\n }\n}\n\n/**\n * Runs a build task with proper tracking of state\n * @param {string} taskName - Type of task (dts, analyze, docs)\n * @param {Function} taskFn - The actual task function to run\n * @returns {Promise<boolean>} - Success status\n */\nasync function runBuildTask(taskName, taskFn) {\n const task = builds[taskName];\n\n // Skip if build is active or within throttle time\n if (task.active || Date.now() - task.lastTime < MIN_BUILD_INTERVAL) {\n return false;\n }\n\n try {\n task.active = true;\n task.lastTime = Date.now();\n return await taskFn();\n } catch (error) {\n console.error(`Error in ${taskName} task:`, error);\n return false;\n } finally {\n task.active = false;\n }\n}\n\n/**\n * Handles the watcher events.\n * @param {object} watcher - Rollup watcher object.\n * @param {object} options - Build options.\n * @param {Function} [onInitialBuildComplete] - Callback to run after initial build completes.\n */\nexport async function handleWatcherEvents(\n watcher,\n options,\n onInitialBuildComplete,\n) {\n // Track if this is the first build\n let isInitialBuild = true;\n // biome-ignore lint/style/useConst: This is an object that is mutated.\n let buildTasksResults = { dts: false, analyze: false, docs: false };\n let scheduledTasksTimer = null;\n let bundleSpinner;\n\n // Create a spinner for watch mode\n const watchSpinner = ora(\"Activating watch mode...\").start();\n\n // The actual task functions\n const buildTasks = {\n // Function to build d.ts files\n dts: async () => {\n const dtsSpinner = ora(\"Crafting type definitions...\").start();\n try {\n const create_dts = await rollup(getDtsConfig().config);\n await create_dts.write(getDtsConfig().config.output);\n await create_dts.close();\n dtsSpinner.succeed(\"Type files built.\");\n return true;\n } catch (error) {\n dtsSpinner.fail(\"Types trouble! Build failed.\");\n console.error(\"TypeScript definition build error:\", error);\n return false;\n }\n },\n\n // Function to analyze components\n analyze: async () => {\n const { wcaInput: sourceFiles, wcaOutput: outFile, skipDocs } = options;\n if (skipDocs) {\n const skipSpinner = ora(\"Skipping component analysis...\").start();\n setTimeout(() => {\n skipSpinner.succeed(\"Component analysis skipped.\");\n }, 0);\n return true;\n }\n\n const analyzeSpinner = ora(\n \"Detective work: analyzing components...\",\n ).start();\n try {\n await analyzeComponents(sourceFiles, outFile);\n analyzeSpinner.succeed(\"Component analysis complete! API generated.\");\n return true;\n } catch (error) {\n analyzeSpinner.fail(\"Analysis hiccup! Something went wrong.\");\n console.error(\"Component analysis error:\", error);\n return false;\n }\n },\n\n // Function to rebuild documentation\n docs: async () => {\n // Skip if main bundle is still building\n if (buildInProgress) {\n return false;\n }\n\n // Check if docs generation is skipped\n if (options.skipDocs) {\n const skipSpinner = ora(\"Skipping docs generation...\").start();\n setTimeout(() => {\n skipSpinner.succeed(\"Docs generation skipped.\");\n }, 0);\n return true;\n }\n\n const docsSpinner = ora(\"Refreshing docs...\").start();\n try {\n await generateDocs(options);\n docsSpinner.succeed(\"Documentation refreshed!\");\n return true;\n } catch (error) {\n docsSpinner.fail(\"Docs stumble! Couldn't refresh.\");\n console.error(\"Documentation rebuild error:\", error);\n }\n },\n };\n\n // Check if all initial build tasks completed successfully\n const checkInitialBuildComplete = () => {\n if (\n isInitialBuild &&\n buildTasksResults.dts &&\n buildTasksResults.analyze &&\n buildTasksResults.docs &&\n typeof onInitialBuildComplete === \"function\"\n ) {\n isInitialBuild = false;\n onInitialBuildComplete();\n }\n };\n\n // Schedule the post-bundle tasks with proper sequencing\n function schedulePostBundleTasks(delay = 1000) {\n if (scheduledTasksTimer) {\n clearTimeout(scheduledTasksTimer);\n }\n\n scheduledTasksTimer = setTimeout(async () => {\n // Run tasks with delays between them to avoid race conditions\n buildTasksResults.dts = await runBuildTask(\"dts\", buildTasks.dts);\n\n setTimeout(async () => {\n buildTasksResults.analyze = await runBuildTask(\n \"analyze\",\n buildTasks.analyze,\n );\n\n setTimeout(async () => {\n buildTasksResults.docs = await runBuildTask(\"docs\", buildTasks.docs);\n checkInitialBuildComplete();\n }, 1000);\n }, 1000);\n }, delay);\n }\n\n // Set up event handlers for the watcher\n watcher.on(\"event\", async (event) => {\n switch (event.code) {\n case \"START\":\n watchSpinner.succeed(\"Watch mode active! Eyes peeled.\");\n break;\n\n case \"BUNDLE_START\":\n // Clear source paths from the previous bundle operation\n sourceEventPaths.clear();\n\n // Store source file paths that triggered this build\n if (event.input) {\n try {\n // Handle different input formats safely\n const inputs = Array.isArray(event.input)\n ? event.input\n : typeof event.input === \"string\"\n ? [event.input]\n : typeof event.input === \"object\" && event.input !== null\n ? Object.values(event.input)\n : [];\n\n for (const input of inputs) {\n // Only process string inputs and skip non-string values\n if (typeof input === \"string\" && !isOutputFile(input)) {\n sourceEventPaths.add(path.normalize(input));\n }\n }\n } catch (error) {\n console.error(\"Error processing input paths:\", error);\n }\n }\n\n bundleSpinner = ora(\"Weaving bundles...\").start();\n buildInProgress = true;\n break;\n\n case \"BUNDLE_END\":\n if (bundleSpinner) {\n bundleSpinner.succeed(\n `Bundle ${Array.isArray(event.input) ? `of ${event.input.join(\"& \")} ` : \"\"}done in ${event.duration}ms! \uD83D\uDE80`,\n );\n }\n buildInProgress = false;\n\n // Schedule post-bundle tasks if source files triggered this build\n if (sourceEventPaths.size > 0) {\n schedulePostBundleTasks();\n }\n break;\n\n case \"END\":\n // We've already scheduled tasks in BUNDLE_END, nothing to do here\n break;\n\n case \"ERROR\":\n buildInProgress = false;\n if (bundleSpinner) {\n bundleSpinner.fail(`Oops! Bundle hit a snag: ${event.error.message}`);\n } else {\n ora().fail(`Watch mode hiccup: ${event.error.message}`);\n }\n sourceEventPaths.clear();\n break;\n }\n });\n}\n\n/**\n * Setup watch mode for rollup\n * @param {object} watcher - Rollup watcher instance\n */\nexport function setupWatchModeListeners(watcher) {\n process.on(\"SIGINT\", () => {\n const closeSpinner = ora(\"Wrapping up...\").start();\n watcher.close();\n closeSpinner.succeed(\"All done! See you next time. \u2728\");\n process.exit(0);\n });\n\n return watcher;\n}\n", "import { program } from \"commander\";\nimport ora from \"ora\";\nimport { withBuildOptions } from \"#commands/_sharedOptions.js\";\nimport { buildWithRollup } from \"#scripts/build/index.js\";\n\nlet buildCommand = program\n .command(\"build\")\n .description(\"Builds auro components\");\n\nbuildCommand = withBuildOptions(buildCommand);\n\nexport default buildCommand.action(async (options) => {\n try {\n const build = ora(\"Initializing...\");\n\n if (options.watch) {\n build.text = \"Waiting for changes...\";\n build.spinner = \"bouncingBar\";\n build.color = \"green\";\n } else {\n build.text =\n options.docs === false\n ? \"Building component (docs disabled)\"\n : \"Building component\";\n }\n\n build.start();\n\n await buildWithRollup(options);\n\n if (!options.watch) {\n build.succeed(\"Build completed!\");\n }\n } catch (error) {\n // If there's any active spinner, we need to fail it\n ora().fail(`Build failed: ${error.message}`);\n console.error(error);\n process.exit(1);\n }\n});\n", "import { exec } from \"node:child_process\";\nimport path from \"node:path\";\nimport process from \"node:process\";\nimport { fileURLToPath } from \"node:url\";\nimport util from \"node:util\";\nimport { program } from \"commander\";\nimport inquirer from \"inquirer\";\nimport { shell } from \"#utils/shell.js\";\n\nexport default program\n .command(\"migrate\")\n .description(\"Script runner to perform repetitive code change tasks\")\n .requiredOption(\n \"-i, --id <string>\",\n \"Select the migration you would like to run by id\",\n )\n .option(\n \"-m, --multi-gitter\",\n \"Run the migration on all repositories in the multi-gitter config\",\n )\n .action(async (options) => {\n const filename = fileURLToPath(import.meta.url);\n const dirname = path.dirname(filename);\n const scriptPath = path.resolve(dirname, \"migrations\", options.id);\n\n if (options.multiGitter) {\n // Check if multi-gitter CLI command is available\n const execPromise = util.promisify(exec);\n\n try {\n await execPromise(\"command -v multi-gitter\");\n } catch {\n console.error(\"multi-gitter is not installed.\");\n process.exit(1);\n }\n\n const answers = await inquirer.prompt([\n {\n type: \"confirm\",\n name: \"dryRun\",\n message:\n \"Run migration in dry-run mode? (no changes will be committed)\",\n default: true,\n },\n ]);\n\n if (answers.dryRun) {\n shell(\n `multi-gitter run ${scriptPath}/script.sh --config \"${scriptPath}/multi-gitter.yml\" --dry-run`,\n );\n } else {\n shell(\n `multi-gitter run ${scriptPath}/script.sh --config \"${scriptPath}/multi-gitter.yml\"`,\n );\n }\n } else {\n shell(`${scriptPath}/script.sh`);\n }\n });\n", "import process from \"node:process\";\nimport { program } from \"commander\";\n\nimport { readFile, writeFile } from \"node:fs/promises\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport { syncDotGithubDir } from \"#scripts/syncDotGithubDir.js\";\n\nexport default program\n .command(\"sync\")\n .description(\n \"Script runner to synchronize local repository configuration files\",\n )\n .action(async () => {\n Logger.info(\"Synchronizing repository configuration files...\");\n\n Logger.warn(\n \"Note: sync does not create a new git branch. Changes are added to the current branch.\",\n );\n\n const cwd = process.cwd();\n\n await syncDotGithubDir(cwd);\n\n // Cleanup for specific files\n // ------------------------------------------------------\n\n // Some files have specific cleanup tasks that need to be run after syncing\n\n // CODEOWNERS - has a bizarre issue with line endings. This is a workaround!\n // Maybe it has to do with the file type since there's no ending?\n const codeownersPath = `${cwd}/.github/CODEOWNERS`;\n const codeowners = await readFile(codeownersPath, { encoding: \"utf-8\" });\n\n // Convert line endings to \\n\n const codeownersFixed = codeowners\n .replace(/\\r\\n/gu, \"\\n\")\n .replace(/\\n\\n/gu, \"\\n\");\n await writeFile(codeownersPath, codeownersFixed, { encoding: \"utf-8\" });\n\n if (codeownersFixed.includes(\"\\r\") || codeownersFixed.includes(\"\\n\\n\")) {\n Logger.error(\"CODEOWNERS file still has Windows line endings.\");\n }\n });\n", "import fs from \"node:fs/promises\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport {\n processContentForFile,\n templateFiller,\n} from \"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs\";\n\nconst REMOTE_TEMPLATE_BASE_URL =\n \"https://raw.githubusercontent.com/AlaskaAirlines/auro-templates\";\n\n// Constants for configuring sync branch and template selection\n// ------------------------------------------------------------\nconst BRANCH_BASE = \"main\";\nconst TARGET_BRANCH_TO_COPY = \"main\";\nconst CONFIG_TEMPLATE = \"default\";\n\n/**\n * @typedef {Object} GithubDirectory\n * @property {string[]} ISSUE_TEMPLATE - The issue template directory.\n * @property {string[]} workflows - The workflows directory.\n * @property {string} _root - The root directory (places files in .github directly).\n */\n\n/**\n * @type {GithubDirectory} githubDirectory\n */\nconst githubDirShape = {\n ISSUE_TEMPLATE: [\n \"bug_report.yaml\",\n \"config.yml\",\n \"feature_request.yaml\",\n \"general-support.yaml\",\n \"group.yaml\",\n \"story.yaml\",\n \"task.yaml\",\n ],\n workflows: [\"codeql.yml\", \"publishDemo.yml\", \"testPublish.yml\"],\n _root: [\n \"CODEOWNERS\",\n \"CODE_OF_CONDUCT.md\",\n \"CONTRIBUTING.md\",\n \"PULL_REQUEST_TEMPLATE.md\",\n \"SECURITY.md\",\n \"settings.yml\",\n \"stale.yml\",\n ],\n};\n\n// BELOW TYPES ARE COPIED DIRECTLY FROM THE LIBRARY\n// How can we import JSDoc types from the library?\n\n/**\n * This is the expected object type when passing something other than a string.\n * @typedef {Object} InputFileType\n * @property {string} remoteUrl - The remote template to fetch.\n * @property {string} fileName - Path including file name to store.\n * @property {boolean} [overwrite] - Default is true. Choose to overwrite the file if it exists.\n */\n\n/**\n * @typedef {Object} FileProcessorConfig\n * @property {string} identifier - A unique identifier for this file (used for logging).\n * @property {string | InputFileType} input - Path to an input file, including filename.\n * @property {string} output - Path to an output file, including filename.\n * @property {Partial<MarkdownMagicOptions>} [mdMagicConfig] - Extra configuration options for md magic.\n * @property {Array<(contents: string) => string>} [preProcessors] - Extra processor functions to run on content AFTER markdownmagic and BEFORE templateFiller.\n * @property {Array<(contents: string) => string>} [postProcessors] - Extra processor functions to run on content.\n */\n\n// BELOW NEEDS TO BE UPSTREAMED OR REMOVED FROM THE LIBRARY\n/**\n * Take a branch or tag name and return the URL for the README file.\n * @param {string} branchOrTag - The git branch or tag to use for the README source.\n * @param {string} filePath - The path to the file in the remote repository.\n * @returns {string} The complete URL for the remote file.\n */\nfunction branchNameToRemoteUrl(branchOrTag, filePath) {\n // check if tag starts with 'vX' since our tags are `v4.0.0`\n const isTag =\n branchOrTag.startsWith(\"v\") &&\n /^\\d+\\.\\d+\\.\\d+(?<_>-.*)?$/u.test(branchOrTag.slice(1));\n\n if (isTag) {\n return `${REMOTE_TEMPLATE_BASE_URL}/refs/tags/${branchOrTag}/${filePath}`;\n }\n\n if (branchOrTag !== BRANCH_BASE) {\n return `${REMOTE_TEMPLATE_BASE_URL}/refs/heads/${branchOrTag}/${filePath}`;\n }\n\n return `${REMOTE_TEMPLATE_BASE_URL}/${BRANCH_BASE}/${filePath}`;\n}\n\n/**\n * Take a branch or tag name and return the URL for the remote file.\n * @param {string} filePath - The name of the file to fetch.\n * @param {string} branchOrTag - The git branch or tag to use for the README source.\n * @param {string} outputPath - The path to the file in the local repository.\n * @returns {FileProcessorConfig} Configuration object for file processing.\n */\nfunction filePathToRemoteInput(filePath, branchOrTag, outputPath) {\n const remoteUrl = branchNameToRemoteUrl(branchOrTag, filePath);\n\n return {\n // Identifier is only used for logging\n identifier: filePath.split(\"/\").pop(),\n input: {\n remoteUrl,\n fileName: outputPath,\n overwrite: true,\n },\n output: outputPath,\n overwrite: true,\n };\n}\n\n/**\n * Recursively removes a directory and all its contents.\n * @param {string} dirPath - The path to the directory to remove.\n * @returns {Promise<void>} A promise that resolves when the directory is removed or rejects if an error occurs.\n * @throws {Error} If the directory cannot be removed.\n */\nasync function removeDirectory(dirPath) {\n try {\n await fs.rm(dirPath, { recursive: true, force: true });\n Logger.log(`Successfully removed directory: ${dirPath}`);\n } catch (error) {\n Logger.error(`Error removing directory ${dirPath}: ${error.message}`);\n throw error;\n }\n}\n\n/**\n * Sync the .github directory with the remote repository.\n * @param {string} rootDir - The root directory of the local repository.\n * @returns {Promise<void>} A promise that resolves when syncing is complete.\n */\nexport async function syncDotGithubDir(rootDir) {\n if (!rootDir) {\n Logger.error(\"Root directory must be specified\");\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n\n // Remove .github directory if it exists\n const githubPath = \".github\";\n\n try {\n await removeDirectory(githubPath);\n Logger.log(\".github directory removed successfully\");\n } catch (error) {\n Logger.error(`Error removing .github directory: ${error.message}`);\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n\n // Setup\n await templateFiller.extractNames();\n\n const fileConfigs = [];\n const missingFiles = [];\n\n for (const dir of Object.keys(githubDirShape)) {\n for (const file of githubDirShape[dir]) {\n const inputPath = `${dir === \"_root\" ? \"\" : `${dir}/`}${file}`;\n const outputPath = `${rootDir}/.github/${inputPath}`;\n\n const fileConfig = filePathToRemoteInput(\n `templates/${CONFIG_TEMPLATE}/.github/${inputPath}`,\n TARGET_BRANCH_TO_COPY,\n outputPath,\n );\n fileConfigs.push(fileConfig);\n }\n }\n\n // Check if files exist\n await Promise.all(\n fileConfigs.map(async (config) => {\n try {\n const response = await fetch(config.input.remoteUrl, {\n method: \"HEAD\",\n });\n if (!response.ok) {\n missingFiles.push(config.input.remoteUrl);\n }\n } catch {\n missingFiles.push(config.input.remoteUrl);\n }\n }),\n );\n\n // If missing, log and exit\n if (missingFiles.length > 0) {\n const errorMessage = missingFiles\n .map((file) => `File not found: ${file}`)\n .join(\"\\n\");\n Logger.error(\n `Failed to sync .github directory. Confirm githubDirShape object is up to date:\\n${errorMessage}`,\n );\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n\n // Process all files\n try {\n await Promise.all(\n fileConfigs.map((config) => processContentForFile(config)),\n );\n Logger.log(\"All files processed.\");\n } catch (error) {\n Logger.error(`Error processing files: ${error.message}`);\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n}\n", "/* eslint-disable no-await-in-loop, line-comment-position, no-inline-comments, jsdoc/require-jsdoc, no-undef */\n\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport { program } from \"commander\";\nimport { glob } from \"glob\";\nimport getTemplatedComponentCode from \"#scripts/prepWcaCompatibleCode.mjs\";\n\n// Use glob directly as it's already promised-based in newer versions\n\nconst WAC_DIR = path.resolve(process.cwd(), \"./scripts/wca\");\n\nasync function globPath(sources) {\n try {\n const fileArrays = await Promise.all(sources.map((source) => glob(source)));\n return fileArrays.flat();\n } catch (err) {\n console.error(\"Error processing glob patterns:\", err);\n throw err; // Re-throw to handle failure at caller\n }\n}\n\nasync function createExtendsFile(filePaths) {\n if (!fs.existsSync(WAC_DIR)) {\n await fs.promises.mkdir(WAC_DIR, { recursive: true });\n }\n\n for (const filePath of filePaths) {\n const resolvedPath = path.resolve(process.cwd(), filePath);\n const fileContent = await fs.promises.readFile(resolvedPath, \"utf-8\");\n const newPath = path.resolve(WAC_DIR, `${path.basename(filePath)}`);\n const newCode = getTemplatedComponentCode(\n fileContent,\n path.relative(WAC_DIR, filePath),\n );\n await fs.promises.writeFile(newPath, newCode);\n }\n}\n\nasync function main() {\n // files to analyze\n const filePaths = await globPath([\"./src/auro-*.js\"]);\n await createExtendsFile(filePaths);\n}\n\nexport default program\n .command(\"wca-setup\")\n .description(\"Set up WCA (Web Component Analyzer) for the project\")\n .action(() => {\n main()\n .then(() => {\n Logger.success(\"WCA setup completed successfully.\");\n })\n .catch((error) => {\n Logger.error(`WCA setup failed: ${error.message}`);\n });\n });\n", "/* eslint-disable require-unicode-regexp, prefer-named-capture-group, prefer-destructuring, prettier/prettier */\n\nexport default (code, sourcePath) => {\n const defaultTag = (code.match(/static register\\(name \\= (.+)\\)/) ||\n code.match(/customElements.get\\((.+?)\\)/))[1];\n const className = code.match(/export class (.+) extends/)?.[1];\n const classDesc = code.match(/\\/\\*\\*((.|\\n)*?)(\\*\\n|\\*\\/|[@])/)?.[1] || \"\";\n\n if (!defaultTag || !className) {\n return code;\n }\n return `\nimport { ${className} } from '${sourcePath}';\n\n/**${classDesc}*/\nclass ${className}WCA extends ${className} {}\n\nif (!customElements.get(${defaultTag})) {\n customElements.define(${defaultTag}, ${className}WCA);\n}\n`;\n};\n", "import { program } from \"commander\";\nimport { analyzeCommits } from \"#scripts/check-commits/commit-analyzer.ts\";\n\nexport default program\n .command(\"check-commits\")\n .alias(\"cc\")\n .option(\n \"-l, --set-label\",\n \"Set label on the pull request based on the commit message type\",\n )\n .option(\"-d, --debug\", \"Display detailed commit information for debugging\")\n .description(\n \"Check commits in the local repository for the types of semantic commit messages made and return the results.\",\n )\n .action(async (option) => {\n await analyzeCommits(option.debug, option.setLabel);\n });\n", "import chalk from \"chalk\";\nimport ora from \"ora\";\nimport type { Ora } from \"ora\";\nimport { Git } from \"#utils/gitUtils.ts\";\nimport type { CommitInfo } from \"./display-utils.ts\";\nimport { displayDebugView, getColoredType } from \"./display-utils.ts\";\nimport { applyLabelToPR, getExistingLabels } from \"./github-labels.ts\";\n\n/**\n * Analyze commit messages in the repository\n * @param debug Whether to display detailed debug information\n * @param verbose Whether to display verbose commit messages without truncation\n * @param setLabel Whether to apply a label to the PR based on commit types\n * @returns A promise that resolves when analysis is complete\n */\nexport async function analyzeCommits(\n debug = false,\n setLabel = false,\n): Promise<void> {\n const spinner = ora(\"Checking commits...\\n\").start();\n\n try {\n const commitList = await Git.getCommitMessages();\n\n // Only display commit details if debug mode is enabled\n if (debug) {\n displayDebugView(commitList);\n }\n\n spinner.succeed(`Total commits analyzed: ${commitList.length}`);\n\n if (commitList.length !== 0) {\n const commitTypes = commitList.map((commit) => commit.type);\n const uniqueTypes = Array.from(new Set(commitTypes));\n const formattedTypes = uniqueTypes\n .map((type) => getColoredType(type))\n .join(\", \");\n spinner.succeed(`Found commit types: ${formattedTypes}`);\n } else {\n spinner.info(\n \"The list of commits is created by comparing the current branch\\n\" +\n \"with the main branch. If you are on a new branch, please\\n\" +\n \"make sure to commit some changes before running this command.\",\n );\n }\n\n if (setLabel) {\n await handleLabels(commitList, spinner);\n }\n } catch (error) {\n spinner.fail(\"Error getting commit messages\");\n console.error(error);\n }\n}\n\n/**\n * Handle applying labels based on commit types\n * @param commitList The list of commits to analyze\n * @param spinner The ora spinner instance for status updates\n */\nasync function handleLabels(\n commitList: CommitInfo[],\n spinner: Ora,\n): Promise<void> {\n const validCommitTypes = [\n \"breaking\",\n \"feat\",\n \"fix\",\n \"perf\",\n \"docs\",\n \"style\",\n \"refactor\",\n \"test\",\n \"build\",\n \"ci\",\n \"chore\",\n ];\n\n const foundCommitTypes = commitList\n .map((commit) => commit.type)\n .filter((type) => validCommitTypes.includes(type));\n\n let selectedLabel = null;\n let highestPriorityIndex = Number.POSITIVE_INFINITY;\n\n for (const type of foundCommitTypes) {\n const priorityIndex = validCommitTypes.indexOf(type);\n if (priorityIndex < highestPriorityIndex) {\n highestPriorityIndex = priorityIndex;\n selectedLabel = type;\n }\n }\n\n if (selectedLabel) {\n const labelSpinner = ora(\n \"Checking existing labels on pull request...\",\n ).start();\n try {\n const existingLabels = await getExistingLabels();\n\n if (existingLabels.includes(`semantic-status: ${selectedLabel}`)) {\n labelSpinner.info(\n `Label \"semantic-status: ${getColoredType(selectedLabel)}\" already exists on the pull request.`,\n );\n return;\n }\n\n labelSpinner.text = \"Applying label to pull request...\";\n await applyLabelToPR(selectedLabel);\n labelSpinner.succeed(\n `Label \"semantic-status: ${getColoredType(selectedLabel)}\" applied to the pull request.`,\n );\n } catch (error: unknown) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n labelSpinner.fail(errorMessage);\n }\n } else {\n spinner.warn(\n chalk.yellow(\"No semantic commit type found to apply as label.\"),\n );\n }\n}\n", "import { appendFile, readFile } from \"node:fs/promises\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport { simpleGit } from \"simple-git\";\nimport type { SimpleGit } from \"simple-git\";\n\n// Initialize simple-git with proper typing\nlet git: SimpleGit;\ntry {\n git = simpleGit({\n baseDir: process.cwd(),\n binary: \"git\",\n maxConcurrentProcesses: 1,\n });\n} catch (error) {\n Logger.error(`Failed to initialize git: ${error}`);\n // Provide a minimal implementation to prevent runtime errors\n git = {} as SimpleGit;\n}\n\nexport class Git {\n static async checkGitignore(pattern: string) {\n if (pattern === \"\") {\n return false;\n }\n try {\n const fileContent = await readFile(\".gitignore\", \"utf-8\");\n return fileContent.includes(pattern);\n } catch (err) {\n Logger.error(`Error reading file: ${err}`);\n return false;\n }\n }\n\n static async getCommitMessages(): Promise<\n Array<{\n type: string;\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n }>\n > {\n try {\n interface GitCommitType {\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n type: string;\n }\n\n const currentBranch = await git.branchLocal();\n Logger.info(`Current branch: ${currentBranch.current}`);\n\n // ---- Get target branch (main) and PR commits ----\n let targetBranch = \"main\";\n let commitRange = \"\";\n\n // Check if we're in a GitHub Actions environment\n const isGitHubAction = !!process.env.GITHUB_ACTIONS;\n\n if (isGitHubAction) {\n Logger.info(\"Running in GitHub Actions environment\");\n // In GitHub Actions, we can use environment variables to determine the PR branch and base\n targetBranch = process.env.GITHUB_BASE_REF || \"main\";\n\n try {\n // Ensure target branch is fetched\n await git.fetch(\"origin\", targetBranch);\n Logger.info(`Fetched target branch: origin/${targetBranch}`);\n\n // Use the merge base between target branch and current HEAD to get PR-specific commits\n const mergeBase = await git.raw([\n \"merge-base\",\n `origin/${targetBranch}`,\n \"HEAD\",\n ]);\n\n // Get commits between merge base and HEAD - these are the PR commits\n commitRange = `${mergeBase.trim()}..HEAD`;\n Logger.info(`Using commit range: ${commitRange}`);\n } catch (error) {\n Logger.warn(`Error setting up commit range in CI: ${error}`);\n // Fall back to simpler approach (just compare with origin/targetBranch)\n commitRange = `origin/${targetBranch}..HEAD`;\n Logger.info(`Falling back to commit range: ${commitRange}`);\n }\n } else {\n // Local environment - try to determine PR commits\n Logger.info(\"Running in local environment\");\n\n try {\n // First check if origin/main exists, fetch it if needed\n try {\n await git.raw([\"rev-parse\", \"--verify\", `origin/${targetBranch}`]);\n } catch {\n Logger.info(`Fetching ${targetBranch} from origin`);\n await git.fetch(\"origin\", targetBranch);\n }\n\n // Find merge base between current branch and target branch\n const mergeBase = await git.raw([\n \"merge-base\",\n `origin/${targetBranch}`,\n currentBranch.current,\n ]);\n\n commitRange = `${mergeBase.trim()}..HEAD`;\n Logger.info(`Using commit range for PR commits: ${commitRange}`);\n } catch (error) {\n Logger.warn(`Error determining PR commits locally: ${error}`);\n\n // Fallback - use last few commits\n Logger.info(\"Falling back to analyzing recent commits\");\n commitRange = \"HEAD~10..HEAD\";\n Logger.info(`Using fallback commit range: ${commitRange}`);\n }\n }\n\n // Get and format the PR commits\n return await Git.getFormattedCommits(commitRange);\n } catch (err) {\n Logger.error(`Error getting commit messages: ${err}`);\n return [];\n }\n }\n\n // Helper function to get formatted commits for a given git range\n static async getFormattedCommits(commitRange: string): Promise<\n Array<{\n type: string;\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n }>\n > {\n interface GitCommitType {\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n type: string;\n }\n\n // Use a format that will let us parse each commit separately\n // %H = hash, %ad = author date, %an = author name, %s = subject, %b = body\n const branchCommitsRaw = await git.raw([\n \"log\",\n \"--pretty=format:COMMIT_START%n%H%n%ad%n%an%n%s%n%b%nCOMMIT_END\",\n \"--date=short\",\n commitRange,\n ]);\n\n // Split by our custom delimiter to get individual commits\n const commitChunks = branchCommitsRaw\n .split(\"COMMIT_START\\n\")\n .filter((chunk: string) => chunk.trim() !== \"\");\n\n const commits: GitCommitType[] = [];\n\n for (const chunk of commitChunks) {\n const parts = chunk.split(\"\\n\");\n if (parts.length >= 4) {\n const hash = parts[0];\n const date = parts[1];\n const author_name = parts[2];\n const subject = parts[3];\n\n // The rest is the body (may contain breaking changes)\n // Filter out the COMMIT_END marker\n const bodyLines = parts\n .slice(4)\n .filter((line: string) => line !== \"COMMIT_END\");\n const body = bodyLines.length > 0 ? bodyLines.join(\"\") : \"\";\n\n // Use a shorter hash format for better readability (7 characters)\n const shortHash = hash.substring(0, 7);\n\n // Determine commit type from subject\n const typeMatch = subject.match(\n /^(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\\(.+\\))?:/,\n );\n let type = typeMatch ? typeMatch[1] : \"unknown\";\n\n // Check for breaking changes\n if (body.includes(\"BREAKING CHANGE\")) {\n type = \"breaking\";\n }\n\n commits.push({\n type,\n hash: shortHash,\n date,\n subject,\n body,\n message: `${subject}${body ? `\\n\\n${body}` : \"\"}`,\n author_name,\n });\n }\n }\n\n return commits;\n }\n\n // Function to add file to .gitignore\n static async addToGitignore(pattern: string, log = true) {\n await Git.checkGitignore(pattern).then(async (result) => {\n if (result) {\n Logger.warn(`${pattern} already exists`);\n } else {\n try {\n await appendFile(\".gitignore\", `\\n${pattern}`);\n if (log) {\n Logger.success(`${pattern} added to .gitignore`);\n }\n } catch (err) {\n Logger.error(err);\n }\n }\n });\n }\n\n // Function to remove file from git cache\n static async removeFromGitCache(files: string[]) {\n try {\n await git.rmKeepLocal(files);\n Logger.success(`${files.join(\", \")} are removed from git cache`);\n } catch (err) {\n Logger.error(err);\n }\n }\n\n static async createBranch(branchName: string) {\n try {\n await git.checkoutLocalBranch(branchName);\n Logger.success(`Created and switched to ${branchName} branch`);\n } catch (err) {\n Logger.error(err);\n }\n }\n\n static async commitStagedFiles(message: string) {\n try {\n await git.add(\".\");\n await git.commit(message);\n Logger.success(`Committed with message: ${message}`);\n } catch (err) {\n Logger.error(err);\n }\n }\n}\n", "import chalk from \"chalk\";\n\n// Configuration constants for display\nexport const MAX_SUBJECT_LENGTH = 60;\nexport const MAX_BODY_LENGTH = 100;\n\nexport interface CommitInfo {\n type: string;\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n}\n\n// Define valid commit types for better type checking\nexport type CommitType =\n | \"breaking\"\n | \"feat\"\n | \"fix\"\n | \"perf\"\n | \"docs\"\n | \"style\"\n | \"refactor\"\n | \"test\"\n | \"build\"\n | \"ci\"\n | \"chore\"\n | \"unknown\";\n\n/**\n * Get colored text for commit type using a more harmonious color scheme\n */\nexport function getColoredType(type: string): string {\n switch (type) {\n case \"breaking\":\n return chalk.bold.red(type);\n case \"feat\":\n return chalk.bold.green(type);\n case \"fix\":\n return chalk.bold.green(type);\n case \"perf\":\n return chalk.bold.green(type);\n case \"docs\":\n return chalk.bold.cyan(type);\n case \"style\":\n return chalk.bold.cyan(type);\n case \"refactor\":\n return chalk.bold.cyan(type);\n case \"test\":\n return chalk.bold.cyan(type);\n case \"build\":\n return chalk.bold.cyan(type);\n case \"ci\":\n return chalk.bold.cyan(type);\n case \"chore\":\n return chalk.bold.cyan(type);\n default:\n return chalk.bold.white(type);\n }\n}\n\n/**\n * Helper function to wrap long strings to new lines\n */\nexport function wrapString(str: string, maxLength: number): string {\n if (!str) {\n return \"\";\n }\n\n // If the string is shorter than maxLength, return it as is\n if (str.length <= maxLength) {\n return str;\n }\n\n // Split the string into words\n const words = str.split(\" \");\n let result = \"\";\n let currentLine = \"\";\n\n // Build wrapped text with line breaks\n for (const word of words) {\n // If adding this word would exceed maxLength, start a new line\n if ((currentLine + word).length > maxLength && currentLine.length > 0) {\n result += `${currentLine.trim()}\\n`;\n currentLine = \"\";\n }\n currentLine = `${currentLine}${word} `;\n }\n\n // Add the last line\n if (currentLine.length > 0) {\n result += currentLine.trim();\n }\n\n return result;\n}\n\n/**\n * Display commits in a debug format with detailed information\n */\nexport function displayDebugView(commitList: CommitInfo[]): void {\n for (const commit of commitList) {\n console.log(\"\u2500\".repeat(60));\n\n // Use a consistent color theme for metadata\n const subject = wrapString(commit.subject, MAX_SUBJECT_LENGTH);\n const body = wrapString(commit.body, MAX_BODY_LENGTH);\n\n // Display commit info in a more compact format\n console.log(chalk.bold(`${getColoredType(commit.type)}`));\n console.log(\n chalk.dim(`${commit.hash} | ${commit.date} | ${commit.author_name}`),\n );\n console.log(chalk.bold(`${chalk.white(subject)}`));\n\n // Only add body if it exists and keep it more compact\n if (commit.body) {\n console.log(chalk.dim(body));\n }\n }\n console.log(\"\u2500\".repeat(60));\n console.log(\"\\n\");\n}\n", "import github from \"@actions/github\";\n\n/**\n * Get existing labels from the current pull request in a GitHub Actions environment\n * @returns Promise that resolves with an array of label names\n */\nexport async function getExistingLabels(): Promise<string[]> {\n try {\n // Get the GitHub token from environment\n const token = process.env.GITHUB_TOKEN;\n\n if (!token) {\n throw new Error(\"GITHUB_TOKEN environment variable is not set\");\n }\n\n // Check if we're in a GitHub Actions environment\n if (!process.env.GITHUB_REPOSITORY || !process.env.GITHUB_EVENT_PATH) {\n throw new Error(\n \"This function can only be used in a GitHub Actions environment\",\n );\n }\n\n const octokit = github.getOctokit(token);\n const { context } = github;\n\n // Make sure we're in a pull request context\n if (!context.payload.pull_request) {\n throw new Error(\"No pull request found in the GitHub context\");\n }\n\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split(\"/\");\n const prNumber = context.payload.pull_request.number;\n\n // Get existing labels\n const { data: existingLabels } =\n await octokit.rest.issues.listLabelsOnIssue({\n owner,\n repo,\n issue_number: prNumber,\n });\n\n // Return array of label names\n return existingLabels.map((label) => label.name);\n } catch (error) {\n if (error instanceof Error) {\n throw new Error(`Failed to get existing labels: ${error.message}`);\n }\n throw error;\n }\n}\n\n/**\n * Apply a label to the current pull request in a GitHub Actions environment\n * @param label The label to apply to the pull request\n * @returns Promise that resolves when the label is applied\n */\nexport async function applyLabelToPR(label: string): Promise<void> {\n try {\n // Get the GitHub token from environment\n const token = process.env.GITHUB_TOKEN;\n\n if (!token) {\n throw new Error(\"GITHUB_TOKEN environment variable is not set\");\n }\n\n // Check if we're in a GitHub Actions environment\n if (!process.env.GITHUB_REPOSITORY || !process.env.GITHUB_EVENT_PATH) {\n throw new Error(\n \"This function can only be used in a GitHub Actions environment\",\n );\n }\n\n const octokit = github.getOctokit(token);\n const { context } = github;\n\n // Make sure we're in a pull request context\n if (!context.payload.pull_request) {\n throw new Error(\"No pull request found in the GitHub context\");\n }\n\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split(\"/\");\n const prNumber = context.payload.pull_request.number;\n\n // Add prefix to the label\n const prefixedLabel = `semantic-status: ${label}`;\n\n // Get existing labels\n const existingLabels = await getExistingLabels();\n\n // If the label we want to apply already exists, do nothing\n if (existingLabels.includes(prefixedLabel)) {\n return;\n }\n\n // Find existing semantic status labels that are different from the one we want to apply\n const existingSemanticLabels = existingLabels.filter(\n (existingLabel) =>\n existingLabel.startsWith(\"semantic-status:\") &&\n existingLabel !== prefixedLabel,\n );\n\n // Remove existing semantic status labels that don't match the new one\n for (const existingLabel of existingSemanticLabels) {\n await octokit.rest.issues.removeLabel({\n owner,\n repo,\n issue_number: prNumber,\n name: existingLabel,\n });\n }\n\n // Add the new semantic status label\n await octokit.rest.issues.addLabels({\n owner,\n repo,\n issue_number: prNumber,\n labels: [prefixedLabel],\n });\n\n return;\n } catch (error) {\n if (error instanceof Error) {\n throw new Error(`Failed to apply label: ${error.message}`);\n }\n throw error;\n }\n}\n", "import fs from \"node:fs\";\nimport { get } from \"node:https\"; // Change to https\nimport chalk from \"chalk\";\nimport { program } from \"commander\";\nimport ora from \"ora\";\nimport type { Ora } from \"ora\";\n\nexport default program\n .command(\"pr-release\")\n .option(\n \"-n, --namespace <package-namespace>\",\n \"Set namespace of the package release\",\n \"@aurodesignsystem-dev\",\n )\n .option(\n \"-p, --pr-number <number>\",\n \"Set pull request number for the release\",\n \"0\",\n )\n .description(\n \"Generate the package version based off of PR number then update the package.json file. Note: this does not publish the package.\",\n )\n .action(async (option) => {\n await updatePackageJson(option);\n });\n\ninterface ReleaseOptions {\n namespace: string;\n prNumber: number;\n}\n\nconst updatePackageJson = async (option: ReleaseOptions): Promise<void> => {\n const { namespace, prNumber } = option;\n\n const packageSpinner = ora(\"Updating package.json\").start();\n\n try {\n const packageJsonPath = \"package.json\";\n\n // Read package.json\n const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, \"utf8\"));\n\n // Check if release version is on npmjs already\n packageSpinner.text = \"Checking npm registry for version information...\";\n\n const releaseVersion = `0.0.0-pr${prNumber}`;\n const packageComponent = packageJson.name.split(\"/\")[1];\n const packageName = `${namespace}/${packageComponent}`;\n const incrementVersion = await getIncrementVersion(\n releaseVersion,\n packageName,\n packageSpinner,\n );\n const packageVersion = `${releaseVersion}.${incrementVersion}`;\n\n packageJson.name = packageName;\n packageJson.version = packageVersion;\n\n packageSpinner.text = \"Writing updated package.json...\";\n\n // Write the updated package.json back to the file\n fs.writeFileSync(\n packageJsonPath,\n `${JSON.stringify(packageJson, null, 2)}\\n`,\n \"utf8\",\n );\n\n packageSpinner.succeed(\n `Package.json updated to use ${chalk.green(packageVersion)} and ${chalk.green(packageName)}`,\n );\n\n // Explicitly exit with success code to ensure terminal prompt returns\n process.exit(0);\n } catch (error: unknown) {\n packageSpinner.fail(`Failed to update package.json: ${error}`);\n process.exit(1); // Exit with error code\n }\n};\n\n// checks if version exists on npmjs and returns the next available increment version\nconst getIncrementVersion = (\n releaseVersion: string,\n packageName: string,\n spinner: Ora,\n): Promise<number> => {\n return new Promise((resolve) => {\n try {\n // Use the registry URL to get all versions for the package\n const registryUrl = `https://registry.npmjs.org/${packageName}`;\n\n const req = get(\n registryUrl,\n {\n headers: { Accept: \"application/json\" },\n },\n (res) => {\n // Handle redirects\n if (\n (res.statusCode === 301 || res.statusCode === 302) &&\n res.headers.location\n ) {\n // Persist redirect message\n spinner.info(`Following redirect to ${res.headers.location}...`);\n try {\n get(\n res.headers.location,\n { headers: { Accept: \"application/json\" } },\n handleResponse,\n )\n .on(\"error\", (err) => {\n // On redirect error, default to 0\n spinner.warn(\n `Error following redirect: ${err.message}, defaulting to version 0`,\n );\n resolve(0);\n })\n .end();\n } catch (error) {\n // If redirect request fails, default to 0\n spinner.warn(\n `Redirect request failed: ${error instanceof Error ? error.message : \"Unknown error\"}, defaulting to version 0`,\n );\n resolve(0);\n }\n return;\n }\n\n handleResponse(res);\n },\n );\n\n function handleResponse(res: import(\"http\").IncomingMessage) {\n if (res.statusCode !== 200) {\n // If package not found or other error, we can start with version 0\n spinner.info(\n `Package not found. Status code: ${chalk.red(res.statusCode)}, defaulting to version 0`,\n );\n resolve(0);\n return;\n }\n\n spinner.text = \"Processing version information...\";\n let data = \"\";\n res.on(\"data\", (chunk: Buffer | string) => {\n data += chunk;\n });\n\n res.on(\"end\", () => {\n try {\n const packageData = JSON.parse(data);\n const versions = packageData.versions\n ? Object.keys(packageData.versions)\n : [];\n\n spinner.text = \"Calculating next version number...\";\n\n // Find the highest existing iteration for this release version\n let maxIteration = -1;\n const versionRegex = new RegExp(`^${releaseVersion}\\\\.(\\\\d+)$`);\n\n for (const version of versions) {\n const match = version.match(versionRegex);\n if (match) {\n const iteration = Number.parseInt(match[1], 10);\n maxIteration = Math.max(maxIteration, iteration);\n }\n }\n\n // Return the next iteration number and persist this important info\n if (maxIteration >= 0) {\n spinner.info(\n `Found existing version ${chalk.green(`${releaseVersion}.${maxIteration}`)}. Incrementing to ${chalk.green(`${releaseVersion}.${maxIteration + 1}`)}`,\n );\n } else {\n spinner.info(\n `No existing version found for ${chalk.green(releaseVersion)}. Starting with ${chalk.green(`${releaseVersion}.0`)}`,\n );\n }\n resolve(maxIteration + 1);\n } catch (error) {\n // In case of parsing error, default to 0\n spinner.warn(\n `Failed to parse NPM registry response: ${error instanceof Error ? error.message : \"Unknown error\"}, defaulting to version 0`,\n );\n resolve(0);\n }\n });\n }\n\n req.on(\"error\", (err) => {\n // On request error, default to 0\n spinner.warn(`Request error: ${err.message}, defaulting to version 0`);\n resolve(0);\n });\n\n req.end();\n } catch (error) {\n // Catch any other errors and default to 0\n spinner.warn(\n \"Error checking version in npm registry, defaulting to version 0\",\n );\n resolve(0);\n }\n });\n};\n", "import path from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { program } from \"commander\";\nimport open from \"open\";\nimport { shell } from \"#utils/shell.js\";\n\nconst __filename = fileURLToPath(import.meta.url);\nconst cliRootDir = path.resolve(path.dirname(__filename), \"..\");\n\nexport default program\n .command(\"test\")\n .option(\"-w, --watch\", \"Set watch number for the test\")\n .option(\"-c, --coverage-report\", \"Generate coverage report\")\n .option(\"-o, --open\", \"Open the coverage report in the browser\")\n .option(\"-f, --files <String|String[]>\", \"Test files glob pattern\")\n .description(\"Run the web test runner to test the component library\")\n .action(async (option) => {\n const configPath = path.join(\n cliRootDir,\n \"dist\",\n \"configs\",\n \"web-test-runner.config.mjs\",\n );\n let command = `npx wtr --config \"${configPath}\"`;\n const coveragePath = `${process.cwd()}/coverage/index.html`;\n\n if (option.coverageReport) {\n command += \" --coverage\";\n }\n\n if (option.watch) {\n command += \" --watch\";\n }\n\n if (option.files) {\n const files = Array.isArray(option.files)\n ? option.files.join(\" \")\n : option.files;\n command += ` --files \"${files}\"`;\n }\n\n shell(command);\n\n if (option.open) {\n await open(coveragePath);\n }\n });\n", "import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { program } from \"commander\";\nimport inquirer from \"inquirer\";\nimport ora from \"ora\";\nimport { createMultiGitterDependencyTreeConfig } from \"#scripts/agent/run-migrations/writeMultiGitterConfig.js\";\nimport {\n formatDependencyTree,\n getBatchedUpdateOrder,\n} from \"#scripts/formatDependencyTree.ts\";\nimport { fromCliRoot, withHomeDir } from \"#utils/pathUtils.js\";\nimport { shell } from \"#utils/shell.js\";\n\n// Multi-gitter and other config files live here\nconst CONFIG_DIR = withHomeDir(\"run-migrations\", \"config\");\n// Generated output files live here\nconst OUTPUT_DIR = withHomeDir(\"run-migrations\", \"outputs\");\n\nenum AgentActions {\n RunMigration = \"run-migration\",\n // Add more actions as needed\n}\n\ninterface AgentAnswers {\n agentAction: AgentActions;\n}\n\n// Agent component options\n// =========================================================\n\nconst auroComponents = [\n \"@aurodesignsystem/auro-accordion\",\n \"@aurodesignsystem/auro-alert\",\n \"@aurodesignsystem/auro-avatar\",\n \"@aurodesignsystem/auro-background\",\n \"@aurodesignsystem/auro-backtotop\",\n \"@aurodesignsystem/auro-button\",\n \"@aurodesignsystem/auro-badge\",\n \"@aurodesignsystem/auro-banner\",\n \"@aurodesignsystem/auro-card\",\n \"@aurodesignsystem/auro-carousel\",\n \"@aurodesignsystem/auro-datetime\",\n \"@aurodesignsystem/auro-dialog\",\n \"@aurodesignsystem/auro-drawer\",\n \"@aurodesignsystem/auro-formkit\",\n \"@aurodesignsystem/auro-flight\",\n \"@aurodesignsystem/auro-flightline\",\n \"@aurodesignsystem/auro-header\",\n \"@aurodesignsystem/auro-hyperlink\",\n \"@aurodesignsystem/auro-icon\",\n \"@aurodesignsystem/auro-loader\",\n \"@aurodesignsystem/auro-lockup\",\n \"@aurodesignsystem/auro-nav\",\n \"@aurodesignsystem/auro-pane\",\n \"@aurodesignsystem/auro-popover\",\n \"@aurodesignsystem/auro-sidenav\",\n \"@aurodesignsystem/auro-skeleton\",\n \"@aurodesignsystem/auro-slideshow\",\n \"@aurodesignsystem/auro-table\",\n \"@aurodesignsystem/auro-tabs\",\n \"@aurodesignsystem/auro-toast\",\n];\n\nconst auroPackages = [\n ...auroComponents,\n \"@aurodesignsystem/auro-library\",\n \"@aurodesignsystem/WebCoreStyleSheets\",\n \"@aurodesignsystem/AuroDesignTokens\",\n \"@aurodesignsystem/auro-cli\",\n \"@alaskaairux/icons\",\n];\n\n// Agent helpers\n// =========================================================\ninterface DependencyTreeAnswers {\n useExisting: boolean;\n}\n\nasync function getOrCreateDependencyTree(\n relevantPackages: string[],\n): Promise<string> {\n // check if output and config directories exist, if not create them\n try {\n await fs.mkdir(OUTPUT_DIR, { recursive: true });\n await fs.mkdir(CONFIG_DIR, { recursive: true });\n } catch (error) {\n console.error(\"Failed to create output or config directories:\", error);\n process.exit(1);\n }\n\n const spinner = ora(\"Creating dependency tree...\").start();\n\n // Create multi-gitter dependency tree configuration\n spinner.text = \"Creating multi-gitter dependency tree configuration...\";\n await createMultiGitterDependencyTreeConfig(CONFIG_DIR);\n\n spinner.text = \"Scraping dependencies from Auro packages...\";\n\n // Run multi-gitter using the generated config\n const scriptPath = fromCliRoot(\"static\", \"getAuroDeps.js\");\n const multiGitterCommand = `multi-gitter run \"node ${scriptPath}\" --config ${path.join(CONFIG_DIR, \"multi-gitter_DEPENDENCY_TREE.yml\")}`;\n try {\n await shell(multiGitterCommand);\n } catch (error) {\n spinner.fail(\"Failed to generate dependency tree:\");\n console.error(error);\n process.exit(1);\n }\n\n spinner.text = \"Generating dependency tree JSON file using packages...\";\n await formatDependencyTree(OUTPUT_DIR, relevantPackages);\n\n spinner.succeed(\"Dependency tree generated successfully.\");\n\n return path.join(OUTPUT_DIR, \"dependencyTree.json\");\n}\n\nconst getDependencyBatchesFromTree = async (\n dependencyTreePath: string,\n): Promise<string[][]> => {\n const spinner = ora(\"Loading dependency tree...\").start();\n const dependencyTree = JSON.parse(\n await fs.readFile(dependencyTreePath, \"utf-8\"),\n );\n\n spinner.text = \"Processing dependency tree...\";\n const batches = getBatchedUpdateOrder(dependencyTree);\n spinner.succeed(\"Dependency batches created successfully.\");\n\n return batches;\n};\n\n// Agent command\n// =========================================================\nexport default program.command(\"agent\").action(async (option) => {\n const answers = await inquirer.prompt([\n {\n type: \"select\",\n name: \"agentAction\",\n message: \"What agent action do you want to perform?\",\n choices: [\n {\n name: \"Run a migration on auro components\",\n value: AgentActions.RunMigration,\n },\n ],\n default: [AgentActions.RunMigration],\n },\n\n {\n type: \"input\",\n name: \"migrationId\",\n message: \"What migration id do you want to run?\",\n when: (answers) => answers.agentAction === AgentActions.RunMigration,\n validate: (input) =>\n input.trim() !== \"\" || \"Migration id cannot be empty.\",\n },\n\n {\n type: \"confirm\",\n name: \"useExisting\",\n message: \"Would you like to specify starting packages?\",\n default: true,\n transformer: (value) =>\n value ? \"Yes = Packages related to selections\" : \"No = All packages\",\n when: (answers) => answers.agentAction === AgentActions.RunMigration,\n },\n\n {\n type: \"checkbox\",\n name: \"startWithComponents\",\n message:\n \"Enter the components to start with (comma-separated, blank for all):\",\n choices: auroComponents.map((component) => ({\n name: component.replace(\"@aurodesignsystem/\", \"\"),\n value: component,\n })),\n when: (answers) =>\n answers.agentAction === AgentActions.RunMigration &&\n answers.useExisting,\n },\n ]);\n\n switch (answers.agentAction) {\n case AgentActions.RunMigration: {\n // Placeholder for actual migration logic\n const spinner = ora(\"Running migration...\").start();\n const dependencyTreePath = await getOrCreateDependencyTree(\n answers.startWithComponents,\n );\n\n spinner.text = \"Getting dependency batches from tree...\";\n const dependencyBatches =\n await getDependencyBatchesFromTree(dependencyTreePath);\n\n const batchedUpdateOrderText = dependencyBatches\n .map(\n (batch, index) =>\n `Batch ${index + 1}\\n${batch.map((pkg) => ` - ${pkg.replace(\"@aurodesignsystem\", \"AlaskaAirlines\").replace(\"@alaskaairux/icons\", \"AlaskaAirlines/Icons\")}`).join(\"\\n\")}`,\n )\n .join(\"\\n\\n\");\n\n console.log(batchedUpdateOrderText);\n\n spinner.text = \"Running migrations on dependency batches...\";\n // DO STUFF HERE :)\n\n new Promise((resolve) => setTimeout(resolve, 2000)); // Simulate async operation\n spinner.succeed(\"Migration process completed successfully.\");\n\n // spinner.succeed(\"Migration process completed.\");\n break;\n }\n // Add more cases for additional actions as needed\n default:\n console.error(\"Unknown action selected.\");\n // spinner.fail(\"Unknown action selected.\");\n }\n});\n", "import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport ora from \"ora\";\n\nconst JsonConfig = {\n \"auth-type\": \"workspace-token\",\n \"author-email\": null,\n \"author-name\": null,\n \"base-branch\": \"main\",\n \"base-url\": null,\n \"clone-dir\": \".gitter-temp\",\n \"code-search\": null,\n concurrent: 4,\n \"conflict-strategy\": \"replace\",\n draft: false,\n \"dry-run\": true,\n \"fetch-depth\": 1,\n fork: false,\n \"fork-owner\": null,\n \"git-type\": \"go\",\n group: null,\n \"include-subgroups\": false,\n insecure: false,\n interactive: false,\n labels: null,\n \"log-file\": \"'-'\",\n \"log-format\": \"'text'\",\n \"log-level\": \"'error'\",\n \"max-reviewers\": 0,\n \"max-team-reviewers\": 0,\n org: null,\n output: \"'-'\",\n \"plain-output\": false,\n platform: \"github\",\n project: null,\n \"push-only\": false,\n repo: [\n \"AlaskaAirlines/auro-accordion\",\n \"AlaskaAirlines/auro-alert\",\n \"AlaskaAirlines/auro-avatar\",\n \"AlaskaAirlines/auro-background\",\n \"AlaskaAirlines/auro-backtotop\",\n \"AlaskaAirlines/auro-button\",\n \"AlaskaAirlines/auro-badge\",\n \"AlaskaAirlines/auro-banner\",\n \"AlaskaAirlines/auro-card\",\n \"AlaskaAirlines/auro-carousel\",\n \"AlaskaAirlines/auro-datetime\",\n \"AlaskaAirlines/auro-dialog\",\n \"AlaskaAirlines/auro-drawer\",\n \"AlaskaAirlines/auro-flight\",\n \"AlaskaAirlines/auro-flightline\",\n \"AlaskaAirlines/auro-header\",\n \"AlaskaAirlines/auro-hyperlink\",\n \"AlaskaAirlines/auro-icon\",\n \"AlaskaAirlines/auro-loader\",\n \"AlaskaAirlines/auro-lockup\",\n \"AlaskaAirlines/auro-nav\",\n \"AlaskaAirlines/auro-pane\",\n \"AlaskaAirlines/auro-popover\",\n \"AlaskaAirlines/auro-sidenav\",\n \"AlaskaAirlines/auro-skeleton\",\n \"AlaskaAirlines/auro-slideshow\",\n \"AlaskaAirlines/auro-table\",\n \"AlaskaAirlines/auro-tabs\",\n \"AlaskaAirlines/auro-toast\",\n // UNCOMMENT BELOW WHEN MAIN/MASTER BRANCHES ARE READY\n // \"AlaskaAirlines/AuroDocsSite\"\n ],\n \"repo-exclude\": null,\n \"repo-include\": null,\n \"repo-search\": null,\n reviewers: null,\n \"skip-forks\": false,\n \"skip-pr\": false,\n \"skip-repo\": null,\n \"ssh-auth\": false,\n \"team-reviewers\": null,\n};\n\nfunction toYaml(config) {\n return Object.entries(config)\n .map(([key, value]) => {\n if (Array.isArray(value)) {\n return `${key}:\\n - ${value.join(\"\\n - \")}`;\n }\n if (typeof value === \"object\" && value !== null) {\n return `${key}:\\n${Object.entries(value)\n .map(([k, v]) => ` ${k}: ${v}`)\n .join(\"\\n\")}`;\n }\n return `${key}: ${value}`;\n })\n .join(\"\\n\");\n}\n\nexport async function createMultiGitterDependencyTreeConfig(outputPath) {\n const spinner = ora(\"Writing multi-gitter configuration...\").start();\n const configContent = toYaml(JsonConfig);\n const configPath = path.join(outputPath, \"multi-gitter_DEPENDENCY_TREE.yml\");\n\n try {\n await fs.writeFile(configPath, configContent, \"utf8\");\n spinner.succeed(`Multi-gitter configuration written to ${configPath}`);\n } catch (error) {\n spinner.fail(\"Error writing multi-gitter configuration:\");\n console.error(error);\n }\n}\n", "import fs from \"node:fs\";\nimport path from \"node:path\";\n\ninterface PackageJsonExcerpt {\n name: string;\n peerDependencies: Record<string, string>;\n devDependencies: Record<string, string>;\n dependencies: Record<string, string>;\n}\n\ninterface DependencyNode {\n dependsOn: string[];\n dependentPackages: string[];\n}\n\ntype DependencyTree = Record<string, DependencyNode>;\n\nexport function getBatchedUpdateOrder(\n dependencyTree: DependencyTree,\n): Array<string[]> {\n const inDegree: Record<string, number> = {};\n const batches: Array<string[]> = [];\n let currentBatch: string[] = [];\n const queue: string[] = [];\n\n // Initialize in-degree (count of dependencies for each package)\n for (const pkg in dependencyTree) {\n inDegree[pkg] = dependencyTree[pkg].dependsOn.length;\n }\n\n // Find packages with no dependencies (in-degree = 0)\n for (const pkg in inDegree) {\n if (inDegree[pkg] === 0) {\n queue.push(pkg);\n }\n }\n\n while (queue.length > 0) {\n currentBatch = [];\n // Process the queue (topological sorting)\n const queueLength = queue.length;\n for (let i = 0; i < queueLength; i++) {\n const current = queue.shift()!;\n currentBatch.push(current);\n\n // Reduce the in-degree of dependent packages\n for (const dependent of dependencyTree[current].dependentPackages) {\n inDegree[dependent]--;\n\n // If a package now has no dependencies, add it to the queue\n if (inDegree[dependent] === 0) {\n queue.push(dependent);\n }\n }\n }\n batches.push(currentBatch);\n }\n\n // If we couldn't process all packages, there is a circular dependency\n if (batches.flat().length !== Object.keys(dependencyTree).length) {\n throw new Error(\"Circular dependency detected!\");\n }\n\n return batches;\n}\n\nfunction getJsonFilesFromDirectory(directory: string): string[] {\n return fs.readdirSync(directory).filter((file) => file.endsWith(\".json\"));\n}\n\n/**\n * Formats the dependency tree for the specified target dependencies.\n * @param rawTargetDependencies {string[]} - List of target dependencies to format. Expects package names like \"button\", \"hyperlink\", etc. without the \"@aurodesignsystem/\" prefix.\n * @returns {Promise<DependencyTree>} - A promise that resolves to the formatted dependency tree.\n */\nexport async function formatDependencyTree(\n jsonFileDirectory: string,\n targetDependencies: string[] = [],\n): Promise<DependencyTree> {\n console.log(targetDependencies);\n let dependencyTree: DependencyTree = {};\n\n const files = getJsonFilesFromDirectory(jsonFileDirectory);\n\n for (const file of files) {\n // Skip the dependency tree file itself if it already exists\n if (file === \"dependencyTree.json\") {\n continue;\n }\n\n const contents = fs.readFileSync(`${jsonFileDirectory}/${file}`, \"utf-8\");\n const data: PackageJsonExcerpt = JSON.parse(contents);\n\n const packageName = data.name;\n const peerDependencies = Object.keys(data.peerDependencies);\n const devDependencies = Object.keys(data.devDependencies);\n const dependencies = Object.keys(data.dependencies);\n\n if (!dependencyTree[packageName]) {\n dependencyTree[packageName] = { dependsOn: [], dependentPackages: [] };\n }\n\n const allDependencies = [\n ...peerDependencies,\n ...devDependencies,\n ...dependencies,\n ];\n\n dependencyTree[packageName].dependsOn = [...new Set(allDependencies)];\n\n for (const dependency of allDependencies) {\n if (!dependencyTree[dependency]) {\n dependencyTree[dependency] = { dependsOn: [], dependentPackages: [] };\n }\n\n if (!dependencyTree[dependency].dependentPackages.includes(packageName)) {\n dependencyTree[dependency].dependentPackages.push(packageName);\n }\n }\n }\n\n // If there are no specified target dependencies, use all packages\n if (targetDependencies.length) {\n // If there ARE target dependencies, filter the dependency tree down to just relevant packages\n // A tree will start only include package that the target dependencies depend on, OR packages that depend on the target dependencies\n const relevantPackages = new Set<string>();\n\n // Include any packages that depend on a target dependency\n for (const [pkg, node] of Object.entries(dependencyTree)) {\n if (node.dependsOn.some((dep) => targetDependencies.includes(dep))) {\n relevantPackages.add(pkg);\n }\n }\n\n // Also include the target dependencies themselves\n for (const target of targetDependencies) {\n if (dependencyTree[target]) {\n relevantPackages.add(target);\n }\n }\n\n // Final filtered dependency tree\n const _filteredDependencyTree: DependencyTree = {};\n for (const pkg of relevantPackages) {\n _filteredDependencyTree[pkg] = {\n dependsOn: dependencyTree[pkg].dependsOn.filter((dep) =>\n relevantPackages.has(dep),\n ),\n dependentPackages: dependencyTree[pkg].dependentPackages.filter((dep) =>\n relevantPackages.has(dep),\n ),\n };\n }\n\n dependencyTree = _filteredDependencyTree;\n } else {\n console.log(\"No target dependencies provided - using all packages.\");\n }\n\n // Write the dependency tree to a file\n fs.writeFileSync(\n `${jsonFileDirectory}/dependencyTree.json`,\n JSON.stringify(dependencyTree, null, 2),\n );\n\n return dependencyTree;\n}\n", "import path from \"node:path\";\nimport process from \"node:process\";\nimport { fileURLToPath } from \"node:url\";\n\nexport function getAuroHomeDir() {\n const homeDir = process.env.HOME || process.env.USERPROFILE;\n\n return path.join(homeDir, \".auro\");\n}\n\nexport function withHomeDir(...args) {\n return path.join(getAuroHomeDir(), ...args);\n}\n\nexport function fromCliRoot(...relativePath) {\n const filename = fileURLToPath(import.meta.url);\n const dirname = path.dirname(filename);\n\n return path.resolve(dirname, ...relativePath);\n}\n", "import { program } from \"commander\";\nimport { cem, docs } from \"#scripts/docs/index.ts\";\n\nexport const docsCommand = program\n .command(\"docs\")\n .description(\"Generate API documentation\")\n .option(\"-c, --cem\", \"Generate Custom Elements Manifest (CEM) file\", false)\n .action(async (options) => {\n\n if (options.cem) {\n await cem();\n }\n\n await docs();\n });\n"],
5
- "mappings": ";AAAA,OAAS,WAAAA,OAAe,YCAxB,OAAOC,OAAY,SACnB,OAAS,QAAAC,OAAY,kBAErB,IAAOC,GAAQ,IACND,GAAKD,GAAO,SAAS,UAAU,CAAC,ECHzC,OAAOG,OAAQ,UACf,OAAOC,OAAU,YACjB,OAAS,iBAAAC,OAAqB,WAM9B,SAASC,EAASC,EAAS,CACrB,QAAQ,IAAI,OACd,QAAQ,IAAI,WAAWA,CAAO,EAAE,CAEpC,CAMe,SAARC,GAAqC,CAC1C,GAAI,CAEF,IAAMC,EAAaJ,GAAc,YAAY,GAAG,EAC1CK,EAAYN,GAAK,QAAQK,CAAU,EACzCH,EAAS,wBAAwBI,CAAS,EAAE,EAG5C,IAAMC,EAAcP,GAAK,QAAQM,EAAW,KAAM,cAAc,EAGhE,OADAJ,EAAS,6BAA6BK,CAAW,EAAE,EAC/CR,GAAG,WAAWQ,CAAW,GAC3BL,EAAS,0BAA0BK,CAAW,EAAE,EAC5B,KAAK,MAAMR,GAAG,aAAaQ,EAAa,MAAM,CAAC,EAChD,UAIrBL,EACE,8FACF,EACO,QACT,OAASM,EAAO,CACd,eAAQ,MAAM,oCAAqCA,CAAK,EACjD,OACT,CACF,CC7CA,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAS,MCGT,SAASC,EAAiBC,EAAS,CACxC,OAAOA,EACJ,OAAO,gCAAiC,gCAAgC,EACxE,OAAO,cAAe,qBAAqB,EAC3C,OAAO,cAAe,gCAAiC,EAAK,EAC5D,OACC,yBACA,iDACF,EACC,OAAO,0BAA2B,sCAAsC,CAC7E,CAKO,SAASC,GAAkBD,EAAS,CACzC,OAAOA,EACJ,OAAO,cAAe,iBAAiB,EACvC,OAAO,sBAAuB,qBAAqB,EACnD,OAAO,aAAc,4CAA4C,CACtE,CCxBA,OAAOE,OAAY,wBACnB,OAAS,SAAAC,OAAa,SCDtB,OAAS,UAAAC,OAAc,UACvB,OAAS,QAAAC,OAAY,YACrB,OAAOC,MAAS,MAChB,OAAS,UAAAC,MAAc,SCHvB,OAAOC,OAAS,MCAhB,OAAS,SAAAC,OAAa,qBACtB,OAAOC,OAAS,MAEhB,IAAMC,EAAQ,CAACC,EAASC,IAAU,CAChC,IAAMC,EAAgB,GAAGF,CAAO,IAAIC,EAAQA,EAAM,KAAK,GAAG,EAAI,EAAE,GAG1DE,EAAUL,GAAI,EAGhBM,EAAeJ,EACfK,EAAYJ,GAAS,CAAC,EAE1B,GAAI,CAACA,GAAS,OAAOD,GAAY,SAAU,CACzC,IAAMM,EAAQN,EAAQ,MAAM,GAAG,EAC/BI,EAAeE,EAAM,CAAC,EACtBD,EAAYC,EAAM,MAAM,CAAC,CAC3B,CAGA,IAAMC,EACJL,EAAc,SAAS,SAAS,GAAKA,EAAc,SAAS,KAAK,EAO7DM,EAAQX,GAAMO,EAAcC,EAAW,CAC3C,MALYE,EACV,UACA,CAAC,UAAW,OAAQ,MAAM,EAI5B,MAAO,EACT,CAAC,EAGD,GAAI,CAACA,EAAa,CAEhB,IAAME,EAAgB,CAAC,EAEvBD,EAAM,QAAQ,GAAG,OAASE,GAAS,CAEjC,IAAMC,EAASD,EAAK,SAAS,EAG7BD,EAAc,KAAKE,CAAM,EAGzB,QAAQ,OAAO,MAAMA,CAAM,CAC7B,CAAC,EAEDH,EAAM,QAAQ,GAAG,OAASE,GAAS,CACjC,IAAMC,EAASD,EAAK,SAAS,EAC7BD,EAAc,KAAKE,CAAM,EACzB,QAAQ,OAAO,MAAMA,CAAM,CAC7B,CAAC,CACH,CAGA,OAAO,IAAI,QAAQ,CAACC,EAASC,IAAW,CACtCL,EAAM,GAAG,QAAUM,GAAS,CACtBA,IAAS,EAEPP,GACFJ,EAAQ,KAAK,mCAAmCW,CAAI,EAAE,EACtDF,EAAQ,IAERT,EAAQ,KAAK,GAAGD,CAAa,iBAAiBY,CAAI,GAAG,EACrDD,EAAO,IAAI,MAAM,iCAAiCC,CAAI,EAAE,CAAC,IAG3DX,EAAQ,QAAQ,GAAGD,CAAa,yBAAyB,EACzDU,EAAQ,EAEZ,CAAC,CACH,CAAC,CACH,ECzEA,OAAOG,MAAQ,UACf,OAAOC,OAAU,YA0BjB,IAAqBC,EAArB,KAA0B,CACxB,YAAe,SAAoB,CAAE,cAAe,QAAS,OAAQ,GAAI,QAAS,CAAC,CAAE,EAKrF,OAAO,SAASC,EAA2B,CAAC,EAAS,CACnD,GAAM,CACJ,OAAAC,EAAS,SACT,QAAAC,EAAU,SACV,aAAAC,EAAe,wBACjB,EAAIH,EAGJ,GAAIG,EACF,GAAI,CACF,IAAMC,EAAkBP,EAAG,aAAaM,EAAc,MAAM,EAC5D,KAAK,SAAW,KAAK,MAAMC,CAAe,CAC5C,OAASC,EAAO,CACd,cAAQ,MAAM,kCAAkCF,CAAY,IAAKE,CAAK,EAChEA,CACR,CAGF,IAAMC,EAAW,KAAK,YAAY,EAG5BC,EAAUN,EACXJ,EAAG,WAAWU,CAAO,GACxBV,EAAG,UAAUU,EAAS,CAAE,UAAW,EAAK,CAAC,EAI3C,IAAMC,EAAc,KAAK,kBAAkBF,CAAQ,EAC7CG,EAAcX,GAAK,KAAKS,EAASL,CAAO,EAC9CL,EAAG,cAAcY,EAAaD,CAAW,EACzC,QAAQ,IAAI,2CAA2CC,CAAW,EAAE,CACtE,CAKA,OAAO,aAA0C,CAC/C,OAAO,KAAK,SAAS,QAAQ,OAC3B,CAACC,EAAiCC,IAChCD,EAAI,OACFC,EAAO,cAAc,OAClBC,GACC,kBAAmBA,GAAOA,EAAI,gBAAkB,IAAQ,YAAaA,GACrE,KAAK,YAAYD,CAAM,CAC3B,GAAK,CAAC,CACR,EACF,CAAC,CACH,CACF,CAKA,OAAO,YAAYA,EAAyB,CAE1C,IAAMb,EAAOa,EAAO,KACpB,OAAKb,EAKEA,EAAK,WAAW,mBAAmB,GAAKA,EAAK,SAAS,KAAK,EAJzD,EAKX,CAKA,OAAO,kBAAkBQ,EAA8C,CACrE,MAAO,GAAGA,EACP,IAAKO,GAAsC,KAAK,cAAcA,EAAS,EAAK,CAAC,EAC7E,KAAK;AAAA;AAAA;AAAA;AAAA,CAAa,CAAC;AAAA,KAExB,CAKA,OAAO,cAAcA,EAAmCC,EAAe,GAAc,CACnF,MAAO,GAAGA,EAAe,KAAKD,EAAQ,OAAO;AAAA;AAAA,EAAS,KAAKA,EAAQ,OAAO;AAAA;AAAA,CAAM,GAAGA,EAAQ,YAAc,GAAGA,EAAQ,WAAW;AAAA;AAAA,EAAS,EAAE,GAAG,KAAK,gCAAgCA,CAAO,CAAC,GAAG,KAAK,YAChM,UACA,CAAC,OAAQ,aAAc,mBAAoB,aAAa,GACvDA,EAAQ,SAAW,CAAC,GAClB,OACEE,GACCA,EAAE,OAAS,WAAa,YAAaA,EAAIA,EAAE,UAAY,UAAY,KAASA,EAAE,KAAK,CAAC,IAAM,GAC9F,EACC,IAAKA,IAAoB,CACxB,GAAGA,EACH,WAAY,KAAK,iBAAiB,eAAgBA,EAAIA,EAAE,WAA4B,MAAS,CAC/F,EAAE,CACN,CAAC,GAAG,KAAK,YACP,SACA,CAAC,OAAQ,aAAa,EACtBF,EAAQ,MACV,CAAC,GAAG,KAAK,YACP,QACA,CAAC,CAAC,OAAQ,WAAW,EAAG,aAAa,EACrCA,EAAQ,KACV,CAAC,GAAG,KAAK,YACP,mBACA,CAAC,OAAQ,aAAa,EACtBA,EAAQ,QACV,CAAC,GAAG,KAAK,YACP,wBACA,CAAC,OAAQ,aAAa,EACtBA,EAAQ,aACV,CAAC,EACH,CAKA,OAAO,gCAAgCA,EAA2C,CAChF,IAAMG,EAAaH,EAAQ,SAAS,OAAQE,GAAmBA,EAAE,OAAS,OAAO,GAAK,CAAC,EACjFE,EAAaJ,EAAQ,YAAc,CAAC,EAGpCK,EAAgC,CAAC,EACjCC,EAAiB,IAAI,IAkC3B,GA/BAH,EAAW,QAASI,GAAsB,CACpCA,EAAK,aAAa,KAAK,GACzBF,EAAW,KAAK,CACd,KAAME,EAAK,KACX,WAAYA,EAAK,KACjB,YAAa,cAAeA,EAAOA,EAAK,UAAsB,KAAO,GACrE,KAAM,KAAK,IAAIA,EAAM,WAAW,GAAK,GACrC,SAAU,YAAaA,EAAOA,EAAK,QAAoB,KAAO,GAC9D,YAAaA,EAAK,aAAe,EACnC,CAAC,EAEHD,EAAe,IAAIC,EAAK,IAAI,EACxB,cAAeA,GAAQA,EAAK,WAC9BD,EAAe,IAAIC,EAAK,SAAmB,CAE/C,CAAC,EAGDH,EAAW,QAASI,GAAoB,CAClC,CAACF,EAAe,IAAIE,EAAK,IAAI,GAAKA,EAAK,aAAa,KAAK,GAC3DH,EAAW,KAAK,CACd,KAAMG,EAAK,KACX,WAAY,GACZ,WAAYA,EAAK,KACjB,KAAM,KAAK,IAAIA,EAAM,WAAW,GAAK,GACrC,QAASA,EAAK,SAAW,GACzB,YAAaA,EAAK,aAAe,EACnC,CAAC,CAEL,CAAC,EAEGH,EAAW,SAAW,EACxB,MAAO,GAGT,IAAMI,EAAU,0DACVC,EAAY,8BAEZC,EAAON,EACV,IAAKO,GACJ,CACEA,EAAK,WACLA,EAAK,WACLA,EAAK,KACLA,EAAK,QACLA,EAAK,WACP,EACG,IAAKC,GACJ,OAAOA,GAAS,EAAE,EACf,QAAQ,MAAO,KAAK,EACpB,QAAQ,MAAO,MAAM,CAC1B,EACC,KAAK,KAAK,CACf,EACC,KAAK;AAAA,CAAI,EAEZ,MAAO;AAAA;AAAA;AAAA,IAGPJ,CAAO;AAAA,IACPC,CAAS;AAAA,EACXC,CAAI;AAAA;AAAA,CAGJ,CAKA,OAAO,iBAAiBG,EAAkC,CACxD,MAAI,CAACA,GAAcA,EAAW,SAAW,EAChC,OAGFA,EACJ,IACEC,GACC,KAAKA,EAAM,IAAI,OAAO,KAAK,IAAIA,EAAO,WAAW,GAAK,KAAK,IAAIA,EAAM,YAAc,MAAMA,EAAM,WAAW,GAAK,EAAE,EACrH,EACC,KAAK,MAAM,CAChB,CAKA,OAAO,YACLC,EACAb,EACAc,EACQ,CACR,GAAIA,IAAS,QAAaA,EAAK,SAAW,EACxC,MAAO,GAIT,IAAMC,EAAeD,EAAK,OAAQL,GAAkC,CAClE,IAAMO,EAAcP,EAAK,YACzB,OAAO,OAAOO,GAAgB,UAAYA,EAAY,KAAK,CAC7D,CAAC,EAED,GAAID,EAAa,SAAW,EAC1B,MAAO,GAGT,IAAMT,EAAUN,EACb,IAAKiB,GAAyB,KAAK,YAAY,MAAM,QAAQA,CAAC,EAAIA,EAAE,CAAC,EAAIA,GAAG,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAC1F,KAAK,KAAK,EAEPV,EAAYP,EAAW,IAAI,IAAM,KAAK,EAAE,KAAK,KAAK,EAElDQ,EAAOO,EACV,IAAKN,GACJT,EACG,IAAKiB,GAAyB,CAC7B,IAAMP,EAAQ,KAAK,IAAID,EAAMQ,CAAC,EAE9B,OAAO,OAAOP,GAAS,EAAE,EACtB,QAAQ,MAAO,KAAK,EACpB,QAAQ,MAAO,MAAM,CAC1B,CAAC,EACA,KAAK,KAAK,CACf,EACC,KAAK;AAAA,CAAI,EAEZ,MAAO;AAAA,MACLG,CAAI;AAAA;AAAA,IAENP,CAAO;AAAA,IACPC,CAAS;AAAA,EACXC,CAAI;AAAA;AAAA,CAGJ,CAMA,OAAO,IAAIU,EAAUC,EAAsC,CACzD,IAAIC,EAAW,GACXtC,EAAeqC,EACf,MAAM,QAAQA,CAAS,IACzB,CAACrC,EAAMsC,CAAQ,EAAID,GAErB,IAAME,EAAQvC,EAAK,MAAM,GAAG,EAExBwC,EAAeJ,EACnB,KAAOI,GAAWD,EAAM,QACtBC,EAAUA,EAAQD,EAAM,MAAM,CAAW,EAE3C,OAAOC,GAAW,MAAQA,IAAY,GAAKF,EAAW,OAAOE,CAAO,CACtE,CAKA,OAAO,WAAWC,EAAmB,CACnC,OAAOA,EAAE,CAAC,EAAE,YAAY,EAAIA,EAAE,UAAU,CAAC,CAC3C,CACF,EFtTA,eAAsBC,GAAM,CAC1B,IAAMC,EAAaC,GAAI,wCAAwC,EAAE,MAAM,EAEvE,GAAI,CAEF,MAAMC,EACJ,qJACF,EACAF,EAAW,QAAQ,kDAAkD,CACvE,OAASG,EAAO,CAEd,IAAMC,EAAeD,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,EAC1EH,EAAW,KAAK,yCAA2CI,CAAY,CACzE,CACF,CAEA,eAAsBC,GAAO,CAC3B,IAAMC,EAAcL,GAAI,iCAAiC,EAAE,MAAM,EAEjE,GAAI,CACF,MAAMM,EAAK,SAAS,EACpBD,EAAY,QAAQ,2CAA2C,CACjE,OAASH,EAAO,CACd,IAAMC,EAAeD,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,EAC1E,MAAAG,EAAY,KAAK,yCAA2CF,CAAY,EAClED,CACR,CACF,CGzBA,eAAsBK,GAAoB,CAExC,MAAMC,EAAI,EAEV,MAAMC,EAAK,CAEb,CCZA,OAAS,UAAAC,OAAc,0DACvB,OACE,qBAAAC,GACA,yBAAAC,GACA,kBAAAC,OACK,4EAeA,IAAMC,GAA6B,CACxC,qBAAsB,GACtB,oBAAqB,SAIrB,oBAAqB,gBACvB,EAEA,SAASC,EAAYC,EAAU,CAE7B,MAAO,GADK,QAAQ,IAAI,CACX,IAAIA,CAAQ,EAC3B,CAMO,IAAMC,GAAeC,GAAW,CAErC,CACE,WAAY,YACZ,MAAO,CACL,UACEA,EAAO,iBACPP,GACEO,EAAO,oBACPA,EAAO,mBACT,EACF,SAAUH,EAAY,yBAAyB,EAC/C,UAAWG,EAAO,oBACpB,EACA,OAAQH,EAAY,YAAY,CAClC,EAEA,CACE,WAAY,WACZ,MAAOA,EAAY,yBAAyB,EAC5C,OAAQA,EAAY,gBAAgB,EACpC,cAAe,CACb,OAAQ,CACN,UAAWA,EAAY,OAAO,CAChC,CACF,CACF,EAEA,CACE,WAAY,SACZ,MAAOA,EAAY,uBAAuB,EAC1C,OAAQA,EAAY,cAAc,EAClC,cAAe,CAACF,GAAe,cAAc,CAC/C,CACF,EAOA,eAAsBM,GAAgBD,EAASJ,GAA4B,CAEzE,MAAMD,GAAe,aAAa,EAElC,QAAWO,KAAcH,GAAYC,CAAM,EACzC,GAAI,CAEF,MAAMN,GAAsBQ,CAAU,CACxC,OAASC,EAAK,CACZX,GAAO,MAAM,oBAAoBU,EAAW,UAAU,KAAKC,EAAI,OAAO,EAAE,CAC1E,CAEJ,CAEA,eAAsBC,IAAsB,CAC1C,MAAMH,GAAgB,CACpB,GAAGL,GACH,gBACE,kGACJ,CAAC,CACH,CLxFO,SAASS,IAAc,CAC5B,IAAMC,EAAWC,GAAK,QAAQ,EACxBC,EAAUC,EAAI,yBAAyB,EAAE,MAAM,EAErD,GAAI,CACF,OAAAC,GAAOJ,EAAU,CAAE,UAAW,GAAM,MAAO,EAAK,CAAC,EACjDE,EAAQ,QAAQ,+BAA+B,EACxC,EACT,OAASG,EAAO,CACd,OAAAH,EAAQ,KAAK,sCAAsCG,EAAM,OAAO,EAAE,EAClE,QAAQ,MAAMA,CAAK,EACZ,EACT,CACF,CAUA,eAAeC,EAAaC,EAAUC,EAAQC,EAAYC,EAAS,CACjE,IAAMR,EAAUC,EAAII,CAAQ,EAAE,MAAM,EAEpC,GAAI,CACF,IAAMI,EAAS,MAAMH,EAAO,EAC5B,OAAAN,EAAQ,QAAQO,CAAU,EACnBE,CACT,OAASN,EAAO,CACd,MAAAH,EAAQ,KAAKQ,CAAO,EACpB,QAAQ,MAAM,UAAUL,EAAM,OAAO,EAAE,EACjCA,CACR,CACF,CAOA,eAAsBO,GAAqBC,EAAQC,EAAc,CAC/D,OAAOR,EACL,+BACA,SAAY,CACV,IAAMS,EAAS,MAAMC,EAAOH,CAAM,EAClC,MAAME,EAAO,MAAMD,CAAY,EAC/B,MAAMC,EAAO,MAAM,CACrB,EACA,qBACA,gCACF,CACF,CAOA,eAAsBE,GAAoBC,EAAYC,EAAY,CAChE,OAAOb,EACL,YAAYY,EAAW,MAAQ,MAAM,QAAQC,EAAW,MAAQ,MAAM,MACtE,SAAY,CAEV,IAAMC,EAAa,MAAMJ,EAAOE,CAAU,EAC1C,MAAME,EAAW,MAAMF,EAAW,MAAM,EACxC,MAAME,EAAW,MAAM,EAGvB,IAAMC,EAAa,MAAML,EAAOG,CAAU,EAC1C,MAAME,EAAW,MAAMF,EAAW,MAAM,EACxC,MAAME,EAAW,MAAM,CACzB,EACA,kBAAkBH,EAAW,MAAQ,MAAM,QAAQC,EAAW,MAAQ,MAAM,UAC5E,8BACF,CACF,CAMA,eAAsBG,EAAaC,EAAS,CAC1C,GAAM,CAAE,SAAUC,EAAa,UAAWC,EAAS,SAAAC,CAAS,EAAIH,EAEhE,GAAIG,EAAU,CACZ,IAAMC,EAAcxB,EAAI,6BAA6B,EAAE,MAAM,EAE7D,WAAW,IAAM,CACfwB,EAAY,QAAQ,0BAA0B,CAChD,EAAG,CAAC,EACJ,MACF,CAEA,OAAOrB,EACL,0CACA,SAAY,CACV,MAAMsB,EAAkBJ,EAAaC,CAAO,EAC5C,MAAMI,GAAoB,CAC5B,EACA,4BACA,eACF,CACF,CMnHA,OAAS,YAAAC,GAAU,QAAAC,OAAY,YAC/B,OAAS,eAAAC,OAAmB,8BAC5B,OAAS,QAAAC,OAAY,OACrB,OAAS,OAAAC,OAAW,oBACpB,OAAS,WAAAC,OAAe,yBCJxB,OAAOC,OAAU,YACjB,OAAS,QAAAC,OAAY,OAOd,SAASC,GAAWC,EAAO,CAChC,MAAO,CACL,KAAM,cACN,YAAa,CACX,IAAMC,EAAQ,MAAM,QAAQD,CAAK,EAAIA,EAAQ,CAACA,CAAK,EAEnD,QAAWE,KAAQD,EACjB,GAAI,CACF,QAAWE,KAAYL,GAAK,KAAKD,GAAK,QAAQK,CAAI,CAAC,EACjD,KAAK,aAAaC,CAAQ,CAE9B,OAASC,EAAO,CACd,KAAK,MAAM,gCAAgCF,CAAI,MAAME,EAAM,OAAO,EAAE,CACtE,CAEJ,CACF,CACF,CDjBA,IAAMC,EAAW,CACf,kBAAmB,CAAC,cAAc,EAClC,YAAa,CAAC,qBAAsB,kBAAmB,cAAc,EACrE,cAAe,CAAC,qBAAsB,aAAa,CACrD,EAQO,SAASC,GAAiBC,EAAc,CAAC,EAAGC,EAAU,CAAC,EAAG,CAC/D,GAAM,CACJ,cAAAC,EAAgBJ,EAAS,cACzB,OAAAK,EAAS,CAAC,MAAO,cAAe,UAAU,CAC5C,EAAIF,EAGEG,EAAiB,CAAC,GAAGN,EAAS,YAAa,GAAGE,CAAW,EAE/D,MAAO,CACLK,GAAY,CACV,OAAAF,EACA,eAAgB,GAChB,kBAAmBL,EAAS,iBAC9B,CAAC,EACDQ,GAAQ,CACN,OAAQ,CAAE,KAAM,EAAK,EACrB,QAAS,CACP,UAAW,CAAC,GAAGF,EAAgBG,GAAK,QAAQ,IAAI,EAAG,MAAO,QAAQ,EAAGA,GAAK,QAAQ,IAAI,EAAG,KAAK,CAAC,CACjG,CACF,CAAC,EACDC,GAAWN,CAAa,CAC1B,CACF,CAOO,SAASO,EAAoBR,EAAU,CAAC,EAAG,CAChD,GAAM,CACJ,YAAAD,EAAc,CAAC,EACf,MAAAU,EAAQ,GACR,MAAAC,EAAQ,CAAC,iBAAkB,qBAAqB,EAChD,UAAAC,EAAY,SACZ,OAAAC,EAAS,KACX,EAAIZ,EAEJ,MAAO,CACL,KAAM,OACN,OAAQ,CACN,MAAAU,EACA,OAAQ,CACN,OAAAE,EACA,IAAKD,EACL,eAAgB,WAClB,EACA,SAAUE,GAAkB,EAC5B,QAASf,GAAiBC,CAAW,EACrC,MAAOe,GAAiBL,CAAK,CAC/B,CACF,CACF,CAOO,SAASM,EAAcf,EAAU,CAAC,EAAG,CAC1C,GAAM,CACJ,YAAAD,EAAc,CAAC,EACf,MAAAU,EAAQ,GACR,YAAAO,EAAc,cACd,cAAAC,EAAgB,CAAC,iBAAiB,EAClC,UAAAN,EAAY,QACd,EAAIX,EAEJ,MAAO,CACL,KAAM,OACN,OAAQ,CACN,MAAO,OAAO,YACZkB,GAAK,KAAKF,EAAa,CAAE,OAAQC,CAAc,CAAC,EAAE,IAAKE,GAE9C,CADMC,GAASD,EAAM,KAAK,EACnBA,CAAI,CACnB,CACH,EACA,OAAQ,CACN,OAAQ,MACR,IAAKR,EACL,eAAgB,gBAChB,eAAgB,eAClB,EACA,QAASb,GAAiBC,CAAW,EACrC,MAAOe,GAAiBL,CAAK,CAC/B,CACF,CACF,CAOO,SAASY,EAAarB,EAAU,CAAC,EAAG,CACzC,GAAM,CAAE,MAAAU,EAAQ,CAAC,iBAAiB,EAAG,UAAAC,EAAY,QAAS,EAAIX,EAE9D,MAAO,CACL,KAAM,MACN,OAAQ,CACN,MAAAU,EACA,OAAQ,CACN,OAAQ,MACR,IAAKC,EACL,eAAgB,aAClB,EACA,QAAS,CAACW,GAAI,CAAC,CACjB,CACF,CACF,CAOO,SAASR,GAAiBS,EAAc,CAE7C,GAAI,CAACA,EACH,MAAO,GAIT,IAAMvB,EAAU,OAAOuB,GAAiB,SAAWA,EAAe,CAAC,EAEnE,MAAO,CACL,YAAavB,EAAQ,aAAe,GACpC,WAAYA,EAAQ,YAAc,IAClC,SAAU,CACR,cAAe,GAEf,QAASA,EAAQ,SAAW,CAC1B,oBACA,0BACA,eACA,sBACA,iBACA,qBACA,YACF,EAEA,iBAAkBA,EAAQ,kBAAoB,CAC5C,mBAAoB,IACpB,aAAc,GAChB,CACF,EACA,QAASA,EAAQ,SAAW,CAC1B,kBACA,gBACA,gBACA,iBACA,qBACA,gBACF,EACA,QAASA,EAAQ,SAAW,CAAC,cAAe,qBAAqB,CACnE,CACF,CAOO,SAASa,GAAkBW,EAAa,CAAC,EAAG,CASjD,MAAO,CAAC,GARS,CAEf,oBACA,4BACA,yBACA,oBACF,EAEqB,GAAGA,CAAU,CACpC,CElMA,OAAS,kBAAAC,OAAsB,kBAC/B,OAAS,aAAAC,OAAiB,sBAC1B,OAAOC,OAAS,MAIhB,IAAMC,EAAiB,CACrB,MAAO,GACP,YAAa,GACb,SAAU,IACV,QAAS,SACT,WAAY,CAAC,WAAY,YAAa,mBAAoB,WAAW,CACvE,EAYA,eAAsBC,GAAuBC,EAAU,CAAC,EAAG,CACzD,GAAI,CAACA,EAAQ,MAAO,OAEpB,IAAMC,EAAgBJ,GAAI;AAAA,CAA2B,EAAE,MAAM,EAE7D,GAAI,CAEF,IAAMK,EAAe,CACnB,KAAM,OAAOF,EAAQ,IAAI,GAAK,OAC9B,KAAMA,EAAQ,KAAO,IAAM,OAC3B,MAAOA,EAAQ,OAASF,EAAe,MACvC,YAAaE,EAAQ,aAAeF,EAAe,YACnD,SAAUE,EAAQ,UAAYF,EAAe,SAC7C,QAASE,EAAQ,SAAWF,EAAe,QAG3C,WAAY,CACV,SAAsBK,EAASC,EAAM,CACnC,MAAI,CAACD,EAAQ,IAAI,SAAS,GAAG,GAAK,CAACA,EAAQ,IAAI,SAAS,GAAG,IACzDA,EAAQ,KAAO,SAEVC,EAAK,CACd,CACF,EAGA,QAAS,CACPR,GAAU,CACR,QAASI,EAAQ,YAAcF,EAAe,UAChD,CAAC,CACH,CACF,EAGMO,EAAS,MAAMV,GAAe,CAClC,OAAQO,EACR,YAAa,GACb,eAAgB,EAClB,CAAC,EAED,OAAAD,EAAc,KAAK,EACZI,CACT,OAASC,EAAO,CACd,MAAAL,EAAc,KAAK,yCAAyC,EAC5D,QAAQ,MAAM,qCAAsCK,CAAK,EACnD,IAAI,MAAM,uCAAuCA,EAAM,OAAO,EAAE,CACxE,CACF,CCvEA,OAAOC,OAAU,YACjB,OAAOC,MAAS,MAChB,OAAS,UAAAC,OAAc,SAMvB,IAAIC,EAAkB,GAGhBC,GAAS,CACb,IAAK,CAAE,OAAQ,GAAO,SAAU,CAAE,EAClC,QAAS,CAAE,OAAQ,GAAO,SAAU,CAAE,EACtC,KAAM,CAAE,OAAQ,GAAO,SAAU,CAAE,CACrC,EAGMC,GAAqB,IAGrBC,EAAmB,IAAI,IAGvBC,GAAe,CACnB,mBACA,wBACA,eACA,eACA,oBACF,EAGA,SAASC,GAAaC,EAAU,CAC9B,GAAI,CAACA,GAAY,OAAOA,GAAa,SAAU,MAAO,GAEtD,GAAI,CACF,IAAMC,EAAiBC,GAAK,UAAUF,CAAQ,EAG9C,OACEF,GAAa,KAAMK,GAAeF,EAAe,SAASE,CAAU,CAAC,GACrEF,EAAe,SAAS,QAAQ,GAChCA,EAAe,SAAS,SAAS,GACjCA,EAAe,SAAS,OAAO,CAEnC,OAASG,EAAO,CACd,eAAQ,MAAM,wBAAwB,OAAOJ,CAAQ,KAAMI,EAAM,OAAO,EACjE,EACT,CACF,CAQA,eAAeC,EAAaC,EAAUC,EAAQ,CAC5C,IAAMC,EAAOb,GAAOW,CAAQ,EAG5B,GAAIE,EAAK,QAAU,KAAK,IAAI,EAAIA,EAAK,SAAWZ,GAC9C,MAAO,GAGT,GAAI,CACF,OAAAY,EAAK,OAAS,GACdA,EAAK,SAAW,KAAK,IAAI,EAClB,MAAMD,EAAO,CACtB,OAASH,EAAO,CACd,eAAQ,MAAM,YAAYE,CAAQ,SAAUF,CAAK,EAC1C,EACT,QAAE,CACAI,EAAK,OAAS,EAChB,CACF,CAQA,eAAsBC,GACpBC,EACAC,EACAC,EACA,CAEA,IAAIC,EAAiB,GAEjBC,EAAoB,CAAE,IAAK,GAAO,QAAS,GAAO,KAAM,EAAM,EAC9DC,EAAsB,KACtBC,EAGEC,EAAeC,EAAI,0BAA0B,EAAE,MAAM,EAGrDC,EAAa,CAEjB,IAAK,SAAY,CACf,IAAMC,EAAaF,EAAI,8BAA8B,EAAE,MAAM,EAC7D,GAAI,CACF,IAAMG,EAAa,MAAMC,GAAOC,EAAa,EAAE,MAAM,EACrD,aAAMF,EAAW,MAAME,EAAa,EAAE,OAAO,MAAM,EACnD,MAAMF,EAAW,MAAM,EACvBD,EAAW,QAAQ,mBAAmB,EAC/B,EACT,OAAShB,EAAO,CACd,OAAAgB,EAAW,KAAK,8BAA8B,EAC9C,QAAQ,MAAM,qCAAsChB,CAAK,EAClD,EACT,CACF,EAGA,QAAS,SAAY,CACnB,GAAM,CAAE,SAAUoB,EAAa,UAAWC,EAAS,SAAAC,CAAS,EAAIf,EAChE,GAAIe,EAAU,CACZ,IAAMC,EAAcT,EAAI,gCAAgC,EAAE,MAAM,EAChE,kBAAW,IAAM,CACfS,EAAY,QAAQ,6BAA6B,CACnD,EAAG,CAAC,EACG,EACT,CAEA,IAAMC,EAAiBV,EACrB,yCACF,EAAE,MAAM,EACR,GAAI,CACF,aAAMW,EAAkBL,EAAaC,CAAO,EAC5CG,EAAe,QAAQ,6CAA6C,EAC7D,EACT,OAASxB,EAAO,CACd,OAAAwB,EAAe,KAAK,wCAAwC,EAC5D,QAAQ,MAAM,4BAA6BxB,CAAK,EACzC,EACT,CACF,EAGA,KAAM,SAAY,CAEhB,GAAIV,EACF,MAAO,GAIT,GAAIiB,EAAQ,SAAU,CACpB,IAAMgB,EAAcT,EAAI,6BAA6B,EAAE,MAAM,EAC7D,kBAAW,IAAM,CACfS,EAAY,QAAQ,0BAA0B,CAChD,EAAG,CAAC,EACG,EACT,CAEA,IAAMG,EAAcZ,EAAI,oBAAoB,EAAE,MAAM,EACpD,GAAI,CACF,aAAMa,EAAapB,CAAO,EAC1BmB,EAAY,QAAQ,0BAA0B,EACvC,EACT,OAAS1B,EAAO,CACd0B,EAAY,KAAK,iCAAiC,EAClD,QAAQ,MAAM,+BAAgC1B,CAAK,CACrD,CACF,CACF,EAGM4B,EAA4B,IAAM,CAEpCnB,GACAC,EAAkB,KAClBA,EAAkB,SAClBA,EAAkB,MAClB,OAAOF,GAA2B,aAElCC,EAAiB,GACjBD,EAAuB,EAE3B,EAGA,SAASqB,EAAwBC,EAAQ,IAAM,CACzCnB,GACF,aAAaA,CAAmB,EAGlCA,EAAsB,WAAW,SAAY,CAE3CD,EAAkB,IAAM,MAAMT,EAAa,MAAOc,EAAW,GAAG,EAEhE,WAAW,SAAY,CACrBL,EAAkB,QAAU,MAAMT,EAChC,UACAc,EAAW,OACb,EAEA,WAAW,SAAY,CACrBL,EAAkB,KAAO,MAAMT,EAAa,OAAQc,EAAW,IAAI,EACnEa,EAA0B,CAC5B,EAAG,GAAI,CACT,EAAG,GAAI,CACT,EAAGE,CAAK,CACV,CAGAxB,EAAQ,GAAG,QAAS,MAAOyB,GAAU,CACnC,OAAQA,EAAM,KAAM,CAClB,IAAK,QACHlB,EAAa,QAAQ,iCAAiC,EACtD,MAEF,IAAK,eAKH,GAHApB,EAAiB,MAAM,EAGnBsC,EAAM,MACR,GAAI,CAEF,IAAMC,EAAS,MAAM,QAAQD,EAAM,KAAK,EACpCA,EAAM,MACN,OAAOA,EAAM,OAAU,SACrB,CAACA,EAAM,KAAK,EACZ,OAAOA,EAAM,OAAU,UAAYA,EAAM,QAAU,KACjD,OAAO,OAAOA,EAAM,KAAK,EACzB,CAAC,EAET,QAAWE,KAASD,EAEd,OAAOC,GAAU,UAAY,CAACtC,GAAasC,CAAK,GAClDxC,EAAiB,IAAIK,GAAK,UAAUmC,CAAK,CAAC,CAGhD,OAASjC,EAAO,CACd,QAAQ,MAAM,gCAAiCA,CAAK,CACtD,CAGFY,EAAgBE,EAAI,oBAAoB,EAAE,MAAM,EAChDxB,EAAkB,GAClB,MAEF,IAAK,aACCsB,GACFA,EAAc,QACZ,UAAU,MAAM,QAAQmB,EAAM,KAAK,EAAI,MAAMA,EAAM,MAAM,KAAK,IAAI,CAAC,IAAM,EAAE,WAAWA,EAAM,QAAQ,eACtG,EAEFzC,EAAkB,GAGdG,EAAiB,KAAO,GAC1BoC,EAAwB,EAE1B,MAEF,IAAK,MAEH,MAEF,IAAK,QACHvC,EAAkB,GACdsB,EACFA,EAAc,KAAK,4BAA4BmB,EAAM,MAAM,OAAO,EAAE,EAEpEjB,EAAI,EAAE,KAAK,sBAAsBiB,EAAM,MAAM,OAAO,EAAE,EAExDtC,EAAiB,MAAM,EACvB,KACJ,CACF,CAAC,CACH,CAMO,SAASyC,GAAwB5B,EAAS,CAC/C,eAAQ,GAAG,SAAU,IAAM,CACzB,IAAM6B,EAAerB,EAAI,gBAAgB,EAAE,MAAM,EACjDR,EAAQ,MAAM,EACd6B,EAAa,QAAQ,qCAAgC,EACrD,QAAQ,KAAK,CAAC,CAChB,CAAC,EAEM7B,CACT,CV1QA,eAAe8B,GAAmBC,EAAS,CACzC,IAAMC,EAAmBC,EAAoBF,CAAO,EAC9CG,EAAaC,EAAcJ,CAAO,EAClCK,EAAYC,EAAa,EAG/BL,EAAiB,OAAO,QAAQ,KAAKM,GAAO,CAAC,EAG7C,MAAMC,EAAaR,CAAO,EAG1B,MAAMS,GAAoBR,EAAiB,OAAQE,EAAW,MAAM,EAGpE,MAAMO,GAAqBL,EAAU,OAAQA,EAAU,OAAO,MAAM,CACtE,CAOA,eAAeM,GAAeX,EAAS,CACrC,GAAM,CAAE,IAAKY,CAAU,EAAIZ,EACrBC,EAAmBC,EAAoB,CAAE,GAAGF,EAAS,MAAO,EAAK,CAAC,EAClEG,EAAaC,EAAc,CAAE,GAAGJ,EAAS,MAAO,EAAK,CAAC,EAGtDa,EAAUC,GAAM,CAACb,EAAiB,OAAQE,EAAW,MAAM,CAAC,EAGlE,OAAAY,GACEF,EACAb,EACAY,EAAY,SAAYI,GAAuBhB,CAAO,EAAI,MAC5D,EAGAiB,GAAwBJ,CAAO,EAExBA,CACT,CAUA,eAAsBK,EAAgBlB,EAAU,CAAC,EAAG,CAClD,GAAI,CACF,GAAM,CAAE,MAAAc,CAAM,EAAId,EAOlB,OAJAmB,GAAY,EAIRL,EACK,MAAMH,GAAeX,CAAO,EAG9B,MAAMD,GAAmBC,CAAO,CACzC,OAASoB,EAAO,CACd,MAAM,IAAI,MAAM,iBAAiBA,EAAM,OAAO,EAAE,CAClD,CACF,CFrFA,IAAIC,EAAaC,GACd,QAAQ,KAAK,EACb,YAAY,6CAA6C,EAE5DD,EAAaE,EAAiBF,CAAU,EACxCA,EAAaG,GAAkBH,CAAU,EAEzC,IAAOI,GAAQJ,EAAW,OAAO,MAAOK,GAAY,CAClD,GAAI,CACF,IAAMC,EAAQC,GAAI,iBAAiB,EAE/BF,EAAQ,OACVC,EAAM,KAAO,yBACbA,EAAM,QAAU,cAChBA,EAAM,MAAQ,SAEdA,EAAM,KACJD,EAAQ,OAAS,GACb,qCACA,qBAGRC,EAAM,MAAM,EAEPD,EAAQ,OACXC,EAAM,QAAQ,kBAAkB,EAGlC,MAAME,EAAgB,CAAE,GAAGH,EAAS,IAAK,GAAM,MAAOA,EAAQ,KAAM,CAAC,CACvE,OAASI,EAAO,CAEdF,GAAI,EAAE,KAAK,iBAAiBE,EAAM,OAAO,EAAE,EAC3C,QAAQ,MAAMA,CAAK,EACnB,QAAQ,KAAK,CAAC,CAChB,CACF,CAAC,Ea3CD,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAS,MAIhB,IAAIC,GAAeC,GAChB,QAAQ,OAAO,EACf,YAAY,wBAAwB,EAEvCD,GAAeE,EAAiBF,EAAY,EAE5C,IAAOG,GAAQH,GAAa,OAAO,MAAOI,GAAY,CACpD,GAAI,CACF,IAAMC,EAAQC,GAAI,iBAAiB,EAE/BF,EAAQ,OACVC,EAAM,KAAO,yBACbA,EAAM,QAAU,cAChBA,EAAM,MAAQ,SAEdA,EAAM,KACJD,EAAQ,OAAS,GACb,qCACA,qBAGRC,EAAM,MAAM,EAEZ,MAAME,EAAgBH,CAAO,EAExBA,EAAQ,OACXC,EAAM,QAAQ,kBAAkB,CAEpC,OAASG,EAAO,CAEdF,GAAI,EAAE,KAAK,iBAAiBE,EAAM,OAAO,EAAE,EAC3C,QAAQ,MAAMA,CAAK,EACnB,QAAQ,KAAK,CAAC,CAChB,CACF,CAAC,ECvCD,OAAS,QAAAC,OAAY,qBACrB,OAAOC,OAAU,YACjB,OAAOC,OAAa,eACpB,OAAS,iBAAAC,OAAqB,WAC9B,OAAOC,OAAU,YACjB,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAc,WAGrB,IAAOC,GAAQC,GACZ,QAAQ,SAAS,EACjB,YAAY,uDAAuD,EACnE,eACC,oBACA,kDACF,EACC,OACC,qBACA,kEACF,EACC,OAAO,MAAOC,GAAY,CACzB,IAAMC,EAAWC,GAAc,YAAY,GAAG,EACxCC,EAAUC,GAAK,QAAQH,CAAQ,EAC/BI,EAAaD,GAAK,QAAQD,EAAS,aAAcH,EAAQ,EAAE,EAEjE,GAAIA,EAAQ,YAAa,CAEvB,IAAMM,EAAcC,GAAK,UAAUC,EAAI,EAEvC,GAAI,CACF,MAAMF,EAAY,yBAAyB,CAC7C,MAAQ,CACN,QAAQ,MAAM,gCAAgC,EAC9CG,GAAQ,KAAK,CAAC,CAChB,EAEgB,MAAMC,GAAS,OAAO,CACpC,CACE,KAAM,UACN,KAAM,SACN,QACE,gEACF,QAAS,EACX,CACF,CAAC,GAEW,OACVC,EACE,oBAAoBN,CAAU,wBAAwBA,CAAU,8BAClE,EAEAM,EACE,oBAAoBN,CAAU,wBAAwBA,CAAU,oBAClE,CAEJ,MACEM,EAAM,GAAGN,CAAU,YAAY,CAEnC,CAAC,EC1DH,OAAOO,OAAa,eACpB,OAAS,WAAAC,OAAe,YAExB,OAAS,YAAAC,GAAU,aAAAC,OAAiB,mBACpC,OAAS,UAAAC,OAAc,0DCJvB,OAAOC,OAAQ,mBACf,OAAS,UAAAC,MAAc,0DACvB,OACE,yBAAAC,GACA,kBAAAC,OACK,4EAEP,IAAMC,GACJ,kEAIIC,GAAc,OACdC,GAAwB,OACxBC,GAAkB,UAYlBC,GAAiB,CACrB,eAAgB,CACd,kBACA,aACA,uBACA,uBACA,aACA,aACA,WACF,EACA,UAAW,CAAC,aAAc,kBAAmB,iBAAiB,EAC9D,MAAO,CACL,aACA,qBACA,kBACA,2BACA,cACA,eACA,WACF,CACF,EA8BA,SAASC,GAAsBC,EAAaC,EAAU,CAMpD,OAHED,EAAY,WAAW,GAAG,GAC1B,6BAA6B,KAAKA,EAAY,MAAM,CAAC,CAAC,EAG/C,GAAGN,EAAwB,cAAcM,CAAW,IAAIC,CAAQ,GAGrED,IAAgBL,GACX,GAAGD,EAAwB,eAAeM,CAAW,IAAIC,CAAQ,GAGnE,GAAGP,EAAwB,IAAIC,EAAW,IAAIM,CAAQ,EAC/D,CASA,SAASC,GAAsBD,EAAUD,EAAaG,EAAY,CAChE,IAAMC,EAAYL,GAAsBC,EAAaC,CAAQ,EAE7D,MAAO,CAEL,WAAYA,EAAS,MAAM,GAAG,EAAE,IAAI,EACpC,MAAO,CACL,UAAAG,EACA,SAAUD,EACV,UAAW,EACb,EACA,OAAQA,EACR,UAAW,EACb,CACF,CAQA,eAAeE,GAAgBC,EAAS,CACtC,GAAI,CACF,MAAMhB,GAAG,GAAGgB,EAAS,CAAE,UAAW,GAAM,MAAO,EAAK,CAAC,EACrDf,EAAO,IAAI,mCAAmCe,CAAO,EAAE,CACzD,OAASC,EAAO,CACd,MAAAhB,EAAO,MAAM,4BAA4Be,CAAO,KAAKC,EAAM,OAAO,EAAE,EAC9DA,CACR,CACF,CAOA,eAAsBC,GAAiBC,EAAS,CACzCA,IACHlB,EAAO,MAAM,kCAAkC,EAE/C,QAAQ,KAAK,CAAC,GAIhB,IAAMmB,EAAa,UAEnB,GAAI,CACF,MAAML,GAAgBK,CAAU,EAChCnB,EAAO,IAAI,wCAAwC,CACrD,OAASgB,EAAO,CACdhB,EAAO,MAAM,qCAAqCgB,EAAM,OAAO,EAAE,EAEjE,QAAQ,KAAK,CAAC,CAChB,CAGA,MAAMd,GAAe,aAAa,EAElC,IAAMkB,EAAc,CAAC,EACfC,EAAe,CAAC,EAEtB,QAAWC,KAAO,OAAO,KAAKf,EAAc,EAC1C,QAAWgB,KAAQhB,GAAee,CAAG,EAAG,CACtC,IAAME,EAAY,GAAGF,IAAQ,QAAU,GAAK,GAAGA,CAAG,GAAG,GAAGC,CAAI,GACtDX,EAAa,GAAGM,CAAO,YAAYM,CAAS,GAE5CC,EAAad,GACjB,aAAaL,EAAe,YAAYkB,CAAS,GACjDnB,GACAO,CACF,EACAQ,EAAY,KAAKK,CAAU,CAC7B,CAoBF,GAhBA,MAAM,QAAQ,IACZL,EAAY,IAAI,MAAOM,GAAW,CAChC,GAAI,EACe,MAAM,MAAMA,EAAO,MAAM,UAAW,CACnD,OAAQ,MACV,CAAC,GACa,IACZL,EAAa,KAAKK,EAAO,MAAM,SAAS,CAE5C,MAAQ,CACNL,EAAa,KAAKK,EAAO,MAAM,SAAS,CAC1C,CACF,CAAC,CACH,EAGIL,EAAa,OAAS,EAAG,CAC3B,IAAMM,EAAeN,EAClB,IAAKE,GAAS,mBAAmBA,CAAI,EAAE,EACvC,KAAK;AAAA,CAAI,EACZvB,EAAO,MACL;AAAA,EAAmF2B,CAAY,EACjG,EAEA,QAAQ,KAAK,CAAC,CAChB,CAGA,GAAI,CACF,MAAM,QAAQ,IACZP,EAAY,IAAKM,GAAWzB,GAAsByB,CAAM,CAAC,CAC3D,EACA1B,EAAO,IAAI,sBAAsB,CACnC,OAASgB,EAAO,CACdhB,EAAO,MAAM,2BAA2BgB,EAAM,OAAO,EAAE,EAEvD,QAAQ,KAAK,CAAC,CAChB,CACF,CDhNA,IAAOY,GAAQC,GACZ,QAAQ,MAAM,EACd,YACC,mEACF,EACC,OAAO,SAAY,CAClBC,GAAO,KAAK,iDAAiD,EAE7DA,GAAO,KACL,uFACF,EAEA,IAAMC,EAAMC,GAAQ,IAAI,EAExB,MAAMC,GAAiBF,CAAG,EAS1B,IAAMG,EAAiB,GAAGH,CAAG,sBAIvBI,GAHa,MAAMC,GAASF,EAAgB,CAAE,SAAU,OAAQ,CAAC,GAIpE,QAAQ,SAAU;AAAA,CAAI,EACtB,QAAQ,SAAU;AAAA,CAAI,EACzB,MAAMG,GAAUH,EAAgBC,EAAiB,CAAE,SAAU,OAAQ,CAAC,GAElEA,EAAgB,SAAS,IAAI,GAAKA,EAAgB,SAAS;AAAA;AAAA,CAAM,IACnEL,GAAO,MAAM,iDAAiD,CAElE,CAAC,EExCH,OAAOQ,MAAQ,UACf,OAAOC,MAAU,YACjB,OAAS,UAAAC,OAAc,0DACvB,OAAS,WAAAC,OAAe,YACxB,OAAS,QAAAC,OAAY,OCJrB,IAAOC,GAAQ,CAACC,EAAMC,IAAe,CACnC,IAAMC,GAAcF,EAAK,MAAM,iCAAiC,GAC9DA,EAAK,MAAM,6BAA6B,GAAG,CAAC,EACxCG,EAAYH,EAAK,MAAM,2BAA2B,IAAI,CAAC,EACvDI,EAAYJ,EAAK,MAAM,iCAAiC,IAAI,CAAC,GAAK,GAExE,MAAI,CAACE,GAAc,CAACC,EACXH,EAEF;AAAA,WACEG,CAAS,YAAYF,CAAU;AAAA;AAAA,KAErCG,CAAS;AAAA,QACND,CAAS,eAAeA,CAAS;AAAA;AAAA,0BAEfD,CAAU;AAAA,0BACVA,CAAU,KAAKC,CAAS;AAAA;AAAA,CAGlD,EDVA,IAAME,EAAUC,EAAK,QAAQ,QAAQ,IAAI,EAAG,eAAe,EAE3D,eAAeC,GAASC,EAAS,CAC/B,GAAI,CAEF,OADmB,MAAM,QAAQ,IAAIA,EAAQ,IAAKC,GAAWC,GAAKD,CAAM,CAAC,CAAC,GACxD,KAAK,CACzB,OAASE,EAAK,CACZ,cAAQ,MAAM,kCAAmCA,CAAG,EAC9CA,CACR,CACF,CAEA,eAAeC,GAAkBC,EAAW,CACrCC,EAAG,WAAWT,CAAO,GACxB,MAAMS,EAAG,SAAS,MAAMT,EAAS,CAAE,UAAW,EAAK,CAAC,EAGtD,QAAWU,KAAYF,EAAW,CAChC,IAAMG,EAAeV,EAAK,QAAQ,QAAQ,IAAI,EAAGS,CAAQ,EACnDE,EAAc,MAAMH,EAAG,SAAS,SAASE,EAAc,OAAO,EAC9DE,EAAUZ,EAAK,QAAQD,EAAS,GAAGC,EAAK,SAASS,CAAQ,CAAC,EAAE,EAC5DI,EAAUC,GACdH,EACAX,EAAK,SAASD,EAASU,CAAQ,CACjC,EACA,MAAMD,EAAG,SAAS,UAAUI,EAASC,CAAO,CAC9C,CACF,CAEA,eAAeE,IAAO,CAEpB,IAAMR,EAAY,MAAMN,GAAS,CAAC,iBAAiB,CAAC,EACpD,MAAMK,GAAkBC,CAAS,CACnC,CAEA,IAAOS,GAAQC,GACZ,QAAQ,WAAW,EACnB,YAAY,qDAAqD,EACjE,OAAO,IAAM,CACZF,GAAK,EACF,KAAK,IAAM,CACVG,GAAO,QAAQ,mCAAmC,CACpD,CAAC,EACA,MAAOC,GAAU,CAChBD,GAAO,MAAM,qBAAqBC,EAAM,OAAO,EAAE,CACnD,CAAC,CACL,CAAC,EEzDH,OAAS,WAAAC,OAAe,YCAxB,OAAOC,OAAW,QAClB,OAAOC,OAAS,MCDhB,OAAS,cAAAC,GAAY,YAAAC,OAAgB,mBACrC,OAAS,UAAAC,MAAc,0DACvB,OAAS,aAAAC,OAAiB,aAI1B,IAAIC,EACJ,GAAI,CACFA,EAAMD,GAAU,CACd,QAAS,QAAQ,IAAI,EACrB,OAAQ,MACR,uBAAwB,CAC1B,CAAC,CACH,OAASE,EAAO,CACdH,EAAO,MAAM,6BAA6BG,CAAK,EAAE,EAEjDD,EAAM,CAAC,CACT,CAEO,IAAME,EAAN,MAAMC,CAAI,CACf,aAAa,eAAeC,EAAiB,CAC3C,GAAIA,IAAY,GACd,MAAO,GAET,GAAI,CAEF,OADoB,MAAMP,GAAS,aAAc,OAAO,GACrC,SAASO,CAAO,CACrC,OAASC,EAAK,CACZ,OAAAP,EAAO,MAAM,uBAAuBO,CAAG,EAAE,EAClC,EACT,CACF,CAEA,aAAa,mBAUX,CACA,GAAI,CAWF,IAAMC,EAAgB,MAAMN,EAAI,YAAY,EAC5CF,EAAO,KAAK,mBAAmBQ,EAAc,OAAO,EAAE,EAGtD,IAAIC,EAAe,OACfC,EAAc,GAKlB,GAFuB,CAAC,CAAC,QAAQ,IAAI,eAEjB,CAClBV,EAAO,KAAK,uCAAuC,EAEnDS,EAAe,QAAQ,IAAI,iBAAmB,OAE9C,GAAI,CAEF,MAAMP,EAAI,MAAM,SAAUO,CAAY,EACtCT,EAAO,KAAK,iCAAiCS,CAAY,EAAE,EAU3DC,EAAc,IAPI,MAAMR,EAAI,IAAI,CAC9B,aACA,UAAUO,CAAY,GACtB,MACF,CAAC,GAG0B,KAAK,CAAC,SACjCT,EAAO,KAAK,uBAAuBU,CAAW,EAAE,CAClD,OAASP,EAAO,CACdH,EAAO,KAAK,wCAAwCG,CAAK,EAAE,EAE3DO,EAAc,UAAUD,CAAY,SACpCT,EAAO,KAAK,iCAAiCU,CAAW,EAAE,CAC5D,CACF,KAAO,CAELV,EAAO,KAAK,8BAA8B,EAE1C,GAAI,CAEF,GAAI,CACF,MAAME,EAAI,IAAI,CAAC,YAAa,WAAY,UAAUO,CAAY,EAAE,CAAC,CACnE,MAAQ,CACNT,EAAO,KAAK,YAAYS,CAAY,cAAc,EAClD,MAAMP,EAAI,MAAM,SAAUO,CAAY,CACxC,CASAC,EAAc,IANI,MAAMR,EAAI,IAAI,CAC9B,aACA,UAAUO,CAAY,GACtBD,EAAc,OAChB,CAAC,GAE0B,KAAK,CAAC,SACjCR,EAAO,KAAK,sCAAsCU,CAAW,EAAE,CACjE,OAASP,EAAO,CACdH,EAAO,KAAK,yCAAyCG,CAAK,EAAE,EAG5DH,EAAO,KAAK,0CAA0C,EACtDU,EAAc,gBACdV,EAAO,KAAK,gCAAgCU,CAAW,EAAE,CAC3D,CACF,CAGA,OAAO,MAAML,EAAI,oBAAoBK,CAAW,CAClD,OAASH,EAAK,CACZ,OAAAP,EAAO,MAAM,kCAAkCO,CAAG,EAAE,EAC7C,CAAC,CACV,CACF,CAGA,aAAa,oBAAoBG,EAU/B,CAqBA,IAAMC,GARmB,MAAMT,EAAI,IAAI,CACrC,MACA,iEACA,eACAQ,CACF,CAAC,GAIE,MAAM;AAAA,CAAgB,EACtB,OAAQE,GAAkBA,EAAM,KAAK,IAAM,EAAE,EAE1CC,EAA2B,CAAC,EAElC,QAAWD,KAASD,EAAc,CAChC,IAAMG,EAAQF,EAAM,MAAM;AAAA,CAAI,EAC9B,GAAIE,EAAM,QAAU,EAAG,CACrB,IAAMC,EAAOD,EAAM,CAAC,EACdE,EAAOF,EAAM,CAAC,EACdG,EAAcH,EAAM,CAAC,EACrBI,EAAUJ,EAAM,CAAC,EAIjBK,EAAYL,EACf,MAAM,CAAC,EACP,OAAQM,GAAiBA,IAAS,YAAY,EAC3CC,EAAOF,EAAU,OAAS,EAAIA,EAAU,KAAK,EAAE,EAAI,GAGnDG,EAAYP,EAAK,UAAU,EAAG,CAAC,EAG/BQ,EAAYL,EAAQ,MACxB,oEACF,EACIM,EAAOD,EAAYA,EAAU,CAAC,EAAI,UAGlCF,EAAK,SAAS,iBAAiB,IACjCG,EAAO,YAGTX,EAAQ,KAAK,CACX,KAAAW,EACA,KAAMF,EACN,KAAAN,EACA,QAAAE,EACA,KAAAG,EACA,QAAS,GAAGH,CAAO,GAAGG,EAAO;AAAA;AAAA,EAAOA,CAAI,GAAK,EAAE,GAC/C,YAAAJ,CACF,CAAC,CACH,CACF,CAEA,OAAOJ,CACT,CAGA,aAAa,eAAeP,EAAiBmB,EAAM,GAAM,CACvD,MAAMpB,EAAI,eAAeC,CAAO,EAAE,KAAK,MAAOoB,GAAW,CACvD,GAAIA,EACF1B,EAAO,KAAK,GAAGM,CAAO,iBAAiB,MAEvC,IAAI,CACF,MAAMR,GAAW,aAAc;AAAA,EAAKQ,CAAO,EAAE,EACzCmB,GACFzB,EAAO,QAAQ,GAAGM,CAAO,sBAAsB,CAEnD,OAASC,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CAEJ,CAAC,CACH,CAGA,aAAa,mBAAmBoB,EAAiB,CAC/C,GAAI,CACF,MAAMzB,EAAI,YAAYyB,CAAK,EAC3B3B,EAAO,QAAQ,GAAG2B,EAAM,KAAK,IAAI,CAAC,6BAA6B,CACjE,OAASpB,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CACF,CAEA,aAAa,aAAaqB,EAAoB,CAC5C,GAAI,CACF,MAAM1B,EAAI,oBAAoB0B,CAAU,EACxC5B,EAAO,QAAQ,2BAA2B4B,CAAU,SAAS,CAC/D,OAASrB,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CACF,CAEA,aAAa,kBAAkBsB,EAAiB,CAC9C,GAAI,CACF,MAAM3B,EAAI,IAAI,GAAG,EACjB,MAAMA,EAAI,OAAO2B,CAAO,EACxB7B,EAAO,QAAQ,2BAA2B6B,CAAO,EAAE,CACrD,OAAStB,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CACF,CACF,ECnQA,OAAOuB,MAAW,QAGX,IAAMC,GAAqB,GACrBC,GAAkB,IA8BxB,SAASC,EAAeC,EAAsB,CACnD,OAAQA,EAAM,CACZ,IAAK,WACH,OAAOJ,EAAM,KAAK,IAAII,CAAI,EAC5B,IAAK,OACH,OAAOJ,EAAM,KAAK,MAAMI,CAAI,EAC9B,IAAK,MACH,OAAOJ,EAAM,KAAK,MAAMI,CAAI,EAC9B,IAAK,OACH,OAAOJ,EAAM,KAAK,MAAMI,CAAI,EAC9B,IAAK,OACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,QACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,WACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,OACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,QACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,KACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,QACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,QACE,OAAOJ,EAAM,KAAK,MAAMI,CAAI,CAChC,CACF,CAKO,SAASC,GAAWC,EAAaC,EAA2B,CACjE,GAAI,CAACD,EACH,MAAO,GAIT,GAAIA,EAAI,QAAUC,EAChB,OAAOD,EAIT,IAAME,EAAQF,EAAI,MAAM,GAAG,EACvBG,EAAS,GACTC,EAAc,GAGlB,QAAWC,KAAQH,GAEZE,EAAcC,GAAM,OAASJ,GAAaG,EAAY,OAAS,IAClED,GAAU,GAAGC,EAAY,KAAK,CAAC;AAAA,EAC/BA,EAAc,IAEhBA,EAAc,GAAGA,CAAW,GAAGC,CAAI,IAIrC,OAAID,EAAY,OAAS,IACvBD,GAAUC,EAAY,KAAK,GAGtBD,CACT,CAKO,SAASG,GAAiBC,EAAgC,CAC/D,QAAWC,KAAUD,EAAY,CAC/B,QAAQ,IAAI,SAAI,OAAO,EAAE,CAAC,EAG1B,IAAME,EAAUV,GAAWS,EAAO,QAASb,EAAkB,EACvDe,EAAOX,GAAWS,EAAO,KAAMZ,EAAe,EAGpD,QAAQ,IAAIF,EAAM,KAAK,GAAGG,EAAeW,EAAO,IAAI,CAAC,EAAE,CAAC,EACxD,QAAQ,IACNd,EAAM,IAAI,GAAGc,EAAO,IAAI,MAAMA,EAAO,IAAI,MAAMA,EAAO,WAAW,EAAE,CACrE,EACA,QAAQ,IAAId,EAAM,KAAK,GAAGA,EAAM,MAAMe,CAAO,CAAC,EAAE,CAAC,EAG7CD,EAAO,MACT,QAAQ,IAAId,EAAM,IAAIgB,CAAI,CAAC,CAE/B,CACA,QAAQ,IAAI,SAAI,OAAO,EAAE,CAAC,EAC1B,QAAQ,IAAI;AAAA,CAAI,CAClB,CC5HA,OAAOC,MAAY,kBAMnB,eAAsBC,IAAuC,CAC3D,GAAI,CAEF,IAAMC,EAAQ,QAAQ,IAAI,aAE1B,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,8CAA8C,EAIhE,GAAI,CAAC,QAAQ,IAAI,mBAAqB,CAAC,QAAQ,IAAI,kBACjD,MAAM,IAAI,MACR,gEACF,EAGF,IAAMC,EAAUH,EAAO,WAAWE,CAAK,EACjC,CAAE,QAAAE,CAAQ,EAAIJ,EAGpB,GAAI,CAACI,EAAQ,QAAQ,aACnB,MAAM,IAAI,MAAM,6CAA6C,EAG/D,GAAM,CAACC,EAAOC,CAAI,EAAI,QAAQ,IAAI,kBAAkB,MAAM,GAAG,EACvDC,EAAWH,EAAQ,QAAQ,aAAa,OAGxC,CAAE,KAAMI,CAAe,EAC3B,MAAML,EAAQ,KAAK,OAAO,kBAAkB,CAC1C,MAAAE,EACA,KAAAC,EACA,aAAcC,CAChB,CAAC,EAGH,OAAOC,EAAe,IAAKC,GAAUA,EAAM,IAAI,CACjD,OAASC,EAAO,CACd,MAAIA,aAAiB,MACb,IAAI,MAAM,kCAAkCA,EAAM,OAAO,EAAE,EAE7DA,CACR,CACF,CAOA,eAAsBC,GAAeF,EAA8B,CACjE,GAAI,CAEF,IAAMP,EAAQ,QAAQ,IAAI,aAE1B,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,8CAA8C,EAIhE,GAAI,CAAC,QAAQ,IAAI,mBAAqB,CAAC,QAAQ,IAAI,kBACjD,MAAM,IAAI,MACR,gEACF,EAGF,IAAMC,EAAUH,EAAO,WAAWE,CAAK,EACjC,CAAE,QAAAE,CAAQ,EAAIJ,EAGpB,GAAI,CAACI,EAAQ,QAAQ,aACnB,MAAM,IAAI,MAAM,6CAA6C,EAG/D,GAAM,CAACC,EAAOC,CAAI,EAAI,QAAQ,IAAI,kBAAkB,MAAM,GAAG,EACvDC,EAAWH,EAAQ,QAAQ,aAAa,OAGxCQ,EAAgB,oBAAoBH,CAAK,GAGzCD,EAAiB,MAAMP,GAAkB,EAG/C,GAAIO,EAAe,SAASI,CAAa,EACvC,OAIF,IAAMC,EAAyBL,EAAe,OAC3CM,GACCA,EAAc,WAAW,kBAAkB,GAC3CA,IAAkBF,CACtB,EAGA,QAAWE,KAAiBD,EAC1B,MAAMV,EAAQ,KAAK,OAAO,YAAY,CACpC,MAAAE,EACA,KAAAC,EACA,aAAcC,EACd,KAAMO,CACR,CAAC,EAIH,MAAMX,EAAQ,KAAK,OAAO,UAAU,CAClC,MAAAE,EACA,KAAAC,EACA,aAAcC,EACd,OAAQ,CAACK,CAAa,CACxB,CAAC,EAED,MACF,OAASF,EAAO,CACd,MAAIA,aAAiB,MACb,IAAI,MAAM,0BAA0BA,EAAM,OAAO,EAAE,EAErDA,CACR,CACF,CH/GA,eAAsBK,GACpBC,EAAQ,GACRC,EAAW,GACI,CACf,IAAMC,EAAUC,GAAI;AAAA,CAAuB,EAAE,MAAM,EAEnD,GAAI,CACF,IAAMC,EAAa,MAAMC,EAAI,kBAAkB,EAS/C,GANIL,GACFM,GAAiBF,CAAU,EAG7BF,EAAQ,QAAQ,2BAA2BE,EAAW,MAAM,EAAE,EAE1DA,EAAW,SAAW,EAAG,CAC3B,IAAMG,EAAcH,EAAW,IAAKI,GAAWA,EAAO,IAAI,EAEpDC,EADc,MAAM,KAAK,IAAI,IAAIF,CAAW,CAAC,EAEhD,IAAKG,GAASC,EAAeD,CAAI,CAAC,EAClC,KAAK,IAAI,EACZR,EAAQ,QAAQ,uBAAuBO,CAAc,EAAE,CACzD,MACEP,EAAQ,KACN;AAAA;AAAA,8DAGF,EAGED,GACF,MAAMW,GAAaR,EAAYF,CAAO,CAE1C,OAASW,EAAO,CACdX,EAAQ,KAAK,+BAA+B,EAC5C,QAAQ,MAAMW,CAAK,CACrB,CACF,CAOA,eAAeD,GACbR,EACAF,EACe,CACf,IAAMY,EAAmB,CACvB,WACA,OACA,MACA,OACA,OACA,QACA,WACA,OACA,QACA,KACA,OACF,EAEMC,EAAmBX,EACtB,IAAKI,GAAWA,EAAO,IAAI,EAC3B,OAAQE,GAASI,EAAiB,SAASJ,CAAI,CAAC,EAE/CM,EAAgB,KAChBC,EAAuB,OAAO,kBAElC,QAAWP,KAAQK,EAAkB,CACnC,IAAMG,EAAgBJ,EAAiB,QAAQJ,CAAI,EAC/CQ,EAAgBD,IAClBA,EAAuBC,EACvBF,EAAgBN,EAEpB,CAEA,GAAIM,EAAe,CACjB,IAAMG,EAAehB,GACnB,6CACF,EAAE,MAAM,EACR,GAAI,CAGF,IAFuB,MAAMiB,GAAkB,GAE5B,SAAS,oBAAoBJ,CAAa,EAAE,EAAG,CAChEG,EAAa,KACX,2BAA2BR,EAAeK,CAAa,CAAC,uCAC1D,EACA,MACF,CAEAG,EAAa,KAAO,oCACpB,MAAME,GAAeL,CAAa,EAClCG,EAAa,QACX,2BAA2BR,EAAeK,CAAa,CAAC,gCAC1D,CACF,OAASH,EAAgB,CACvB,IAAMS,EACJT,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,EACvDM,EAAa,KAAKG,CAAY,CAChC,CACF,MACEpB,EAAQ,KACNqB,GAAM,OAAO,kDAAkD,CACjE,CAEJ,CDvHA,IAAOC,GAAQC,GACZ,QAAQ,eAAe,EACvB,MAAM,IAAI,EACV,OACC,kBACA,gEACF,EACC,OAAO,cAAe,mDAAmD,EACzE,YACC,8GACF,EACC,OAAO,MAAOC,GAAW,CACxB,MAAMC,GAAeD,EAAO,MAAOA,EAAO,QAAQ,CACpD,CAAC,EKhBH,OAAOE,OAAQ,UACf,OAAS,OAAAC,OAAW,aACpB,OAAOC,MAAW,QAClB,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAS,MAGhB,IAAOC,GAAQF,GACZ,QAAQ,YAAY,EACpB,OACC,sCACA,uCACA,uBACF,EACC,OACC,2BACA,0CACA,GACF,EACC,YACC,iIACF,EACC,OAAO,MAAOG,GAAW,CACxB,MAAMC,GAAkBD,CAAM,CAChC,CAAC,EAOGC,GAAoB,MAAOD,GAA0C,CACzE,GAAM,CAAE,UAAAE,EAAW,SAAAC,CAAS,EAAIH,EAE1BI,EAAiBN,GAAI,uBAAuB,EAAE,MAAM,EAE1D,GAAI,CACF,IAAMO,EAAkB,eAGlBC,EAAc,KAAK,MAAMZ,GAAG,aAAaW,EAAiB,MAAM,CAAC,EAGvED,EAAe,KAAO,mDAEtB,IAAMG,EAAiB,WAAWJ,CAAQ,GACpCK,EAAmBF,EAAY,KAAK,MAAM,GAAG,EAAE,CAAC,EAChDG,EAAc,GAAGP,CAAS,IAAIM,CAAgB,GAC9CE,EAAmB,MAAMC,GAC7BJ,EACAE,EACAL,CACF,EACMQ,EAAiB,GAAGL,CAAc,IAAIG,CAAgB,GAE5DJ,EAAY,KAAOG,EACnBH,EAAY,QAAUM,EAEtBR,EAAe,KAAO,kCAGtBV,GAAG,cACDW,EACA,GAAG,KAAK,UAAUC,EAAa,KAAM,CAAC,CAAC;AAAA,EACvC,MACF,EAEAF,EAAe,QACb,+BAA+BR,EAAM,MAAMgB,CAAc,CAAC,QAAQhB,EAAM,MAAMa,CAAW,CAAC,EAC5F,EAGA,QAAQ,KAAK,CAAC,CAChB,OAASI,EAAgB,CACvBT,EAAe,KAAK,kCAAkCS,CAAK,EAAE,EAC7D,QAAQ,KAAK,CAAC,CAChB,CACF,EAGMF,GAAsB,CAC1BJ,EACAE,EACAK,IAEO,IAAI,QAASC,GAAY,CAC9B,GAAI,CA6CF,IAASC,EAAT,SAAwBC,EAAqC,CAC3D,GAAIA,EAAI,aAAe,IAAK,CAE1BH,EAAQ,KACN,mCAAmClB,EAAM,IAAIqB,EAAI,UAAU,CAAC,2BAC9D,EACAF,EAAQ,CAAC,EACT,MACF,CAEAD,EAAQ,KAAO,oCACf,IAAII,EAAO,GACXD,EAAI,GAAG,OAASE,GAA2B,CACzCD,GAAQC,CACV,CAAC,EAEDF,EAAI,GAAG,MAAO,IAAM,CAClB,GAAI,CACF,IAAMG,EAAc,KAAK,MAAMF,CAAI,EAC7BG,EAAWD,EAAY,SACzB,OAAO,KAAKA,EAAY,QAAQ,EAChC,CAAC,EAELN,EAAQ,KAAO,qCAGf,IAAIQ,EAAe,GACbC,EAAe,IAAI,OAAO,IAAIhB,CAAc,YAAY,EAE9D,QAAWiB,KAAWH,EAAU,CAC9B,IAAMI,EAAQD,EAAQ,MAAMD,CAAY,EACxC,GAAIE,EAAO,CACT,IAAMC,EAAY,OAAO,SAASD,EAAM,CAAC,EAAG,EAAE,EAC9CH,EAAe,KAAK,IAAIA,EAAcI,CAAS,CACjD,CACF,CAGIJ,GAAgB,EAClBR,EAAQ,KACN,0BAA0BlB,EAAM,MAAM,GAAGW,CAAc,IAAIe,CAAY,EAAE,CAAC,qBAAqB1B,EAAM,MAAM,GAAGW,CAAc,IAAIe,EAAe,CAAC,EAAE,CAAC,EACrJ,EAEAR,EAAQ,KACN,iCAAiClB,EAAM,MAAMW,CAAc,CAAC,mBAAmBX,EAAM,MAAM,GAAGW,CAAc,IAAI,CAAC,EACnH,EAEFQ,EAAQO,EAAe,CAAC,CAC1B,OAAST,EAAO,CAEdC,EAAQ,KACN,0CAA0CD,aAAiB,MAAQA,EAAM,QAAU,eAAe,2BACpG,EACAE,EAAQ,CAAC,CACX,CACF,CAAC,CACH,EAxDS,IAAAC,IA3CT,IAAMW,EAAc,8BAA8BlB,CAAW,GAEvDmB,EAAMjC,GACVgC,EACA,CACE,QAAS,CAAE,OAAQ,kBAAmB,CACxC,EACCV,GAAQ,CAEP,IACGA,EAAI,aAAe,KAAOA,EAAI,aAAe,MAC9CA,EAAI,QAAQ,SACZ,CAEAH,EAAQ,KAAK,yBAAyBG,EAAI,QAAQ,QAAQ,KAAK,EAC/D,GAAI,CACFtB,GACEsB,EAAI,QAAQ,SACZ,CAAE,QAAS,CAAE,OAAQ,kBAAmB,CAAE,EAC1CD,CACF,EACG,GAAG,QAAUa,GAAQ,CAEpBf,EAAQ,KACN,6BAA6Be,EAAI,OAAO,2BAC1C,EACAd,EAAQ,CAAC,CACX,CAAC,EACA,IAAI,CACT,OAASF,EAAO,CAEdC,EAAQ,KACN,4BAA4BD,aAAiB,MAAQA,EAAM,QAAU,eAAe,2BACtF,EACAE,EAAQ,CAAC,CACX,CACA,MACF,CAEAC,EAAeC,CAAG,CACpB,CACF,EA4DAW,EAAI,GAAG,QAAUC,GAAQ,CAEvBf,EAAQ,KAAK,kBAAkBe,EAAI,OAAO,2BAA2B,EACrEd,EAAQ,CAAC,CACX,CAAC,EAEDa,EAAI,IAAI,CACV,MAAgB,CAEdd,EAAQ,KACN,iEACF,EACAC,EAAQ,CAAC,CACX,CACF,CAAC,EC3MH,OAAOe,OAAU,YACjB,OAAS,iBAAAC,OAAqB,WAC9B,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAU,OAGjB,IAAMC,GAAaC,GAAc,YAAY,GAAG,EAC1CC,GAAaC,GAAK,QAAQA,GAAK,QAAQH,EAAU,EAAG,IAAI,EAEvDI,GAAQC,GACZ,QAAQ,MAAM,EACd,OAAO,cAAe,+BAA+B,EACrD,OAAO,wBAAyB,0BAA0B,EAC1D,OAAO,aAAc,yCAAyC,EAC9D,OAAO,gCAAiC,yBAAyB,EACjE,YAAY,uDAAuD,EACnE,OAAO,MAAOC,GAAW,CAOxB,IAAIC,EAAU,qBANKJ,GAAK,KACtBD,GACA,OACA,UACA,4BACF,CAC6C,IACvCM,EAAe,GAAG,QAAQ,IAAI,CAAC,uBAUrC,GARIF,EAAO,iBACTC,GAAW,eAGTD,EAAO,QACTC,GAAW,YAGTD,EAAO,MAAO,CAChB,IAAMG,EAAQ,MAAM,QAAQH,EAAO,KAAK,EACpCA,EAAO,MAAM,KAAK,GAAG,EACrBA,EAAO,MACXC,GAAW,aAAaE,CAAK,GAC/B,CAEAC,EAAMH,CAAO,EAETD,EAAO,MACT,MAAMK,GAAKH,CAAY,CAE3B,CAAC,EC9CH,OAAOI,OAAQ,mBACf,OAAOC,OAAU,YACjB,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAc,WACrB,OAAOC,OAAS,MCJhB,OAAOC,OAAQ,mBACf,OAAOC,OAAU,YACjB,OAAOC,OAAS,MAEhB,IAAMC,GAAa,CACjB,YAAa,kBACb,eAAgB,KAChB,cAAe,KACf,cAAe,OACf,WAAY,KACZ,YAAa,eACb,cAAe,KACf,WAAY,EACZ,oBAAqB,UACrB,MAAO,GACP,UAAW,GACX,cAAe,EACf,KAAM,GACN,aAAc,KACd,WAAY,KACZ,MAAO,KACP,oBAAqB,GACrB,SAAU,GACV,YAAa,GACb,OAAQ,KACR,WAAY,MACZ,aAAc,SACd,YAAa,UACb,gBAAiB,EACjB,qBAAsB,EACtB,IAAK,KACL,OAAQ,MACR,eAAgB,GAChB,SAAU,SACV,QAAS,KACT,YAAa,GACb,KAAM,CACJ,gCACA,4BACA,6BACA,iCACA,gCACA,6BACA,4BACA,6BACA,2BACA,+BACA,+BACA,6BACA,6BACA,6BACA,iCACA,6BACA,gCACA,2BACA,6BACA,6BACA,0BACA,2BACA,8BACA,8BACA,+BACA,gCACA,4BACA,2BACA,2BAGF,EACA,eAAgB,KAChB,eAAgB,KAChB,cAAe,KACf,UAAW,KACX,aAAc,GACd,UAAW,GACX,YAAa,KACb,WAAY,GACZ,iBAAkB,IACpB,EAEA,SAASC,GAAOC,EAAQ,CACtB,OAAO,OAAO,QAAQA,CAAM,EACzB,IAAI,CAAC,CAACC,EAAKC,CAAK,IACX,MAAM,QAAQA,CAAK,EACd,GAAGD,CAAG;AAAA,MAAUC,EAAM,KAAK;AAAA,KAAQ,CAAC,GAEzC,OAAOA,GAAU,UAAYA,IAAU,KAClC,GAAGD,CAAG;AAAA,EAAM,OAAO,QAAQC,CAAK,EACpC,IAAI,CAAC,CAACC,EAAGC,CAAC,IAAM,KAAKD,CAAC,KAAKC,CAAC,EAAE,EAC9B,KAAK;AAAA,CAAI,CAAC,GAER,GAAGH,CAAG,KAAKC,CAAK,EACxB,EACA,KAAK;AAAA,CAAI,CACd,CAEA,eAAsBG,GAAsCC,EAAY,CACtE,IAAMC,EAAUV,GAAI,uCAAuC,EAAE,MAAM,EAC7DW,EAAgBT,GAAOD,EAAU,EACjCW,EAAab,GAAK,KAAKU,EAAY,kCAAkC,EAE3E,GAAI,CACF,MAAMX,GAAG,UAAUc,EAAYD,EAAe,MAAM,EACpDD,EAAQ,QAAQ,yCAAyCE,CAAU,EAAE,CACvE,OAASC,EAAO,CACdH,EAAQ,KAAK,2CAA2C,EACxD,QAAQ,MAAMG,CAAK,CACrB,CACF,CC5GA,OAAOC,OAAQ,UAiBR,SAASC,GACdC,EACiB,CACjB,IAAMC,EAAmC,CAAC,EACpCC,EAA2B,CAAC,EAC9BC,EAAyB,CAAC,EACxBC,EAAkB,CAAC,EAGzB,QAAWC,KAAOL,EAChBC,EAASI,CAAG,EAAIL,EAAeK,CAAG,EAAE,UAAU,OAIhD,QAAWA,KAAOJ,EACZA,EAASI,CAAG,IAAM,GACpBD,EAAM,KAAKC,CAAG,EAIlB,KAAOD,EAAM,OAAS,GAAG,CACvBD,EAAe,CAAC,EAEhB,IAAMG,EAAcF,EAAM,OAC1B,QAASG,EAAI,EAAGA,EAAID,EAAaC,IAAK,CACpC,IAAMC,EAAUJ,EAAM,MAAM,EAC5BD,EAAa,KAAKK,CAAO,EAGzB,QAAWC,KAAaT,EAAeQ,CAAO,EAAE,kBAC9CP,EAASQ,CAAS,IAGdR,EAASQ,CAAS,IAAM,GAC1BL,EAAM,KAAKK,CAAS,CAG1B,CACAP,EAAQ,KAAKC,CAAY,CAC3B,CAGA,GAAID,EAAQ,KAAK,EAAE,SAAW,OAAO,KAAKF,CAAc,EAAE,OACxD,MAAM,IAAI,MAAM,+BAA+B,EAGjD,OAAOE,CACT,CAEA,SAASQ,GAA0BC,EAA6B,CAC9D,OAAOb,GAAG,YAAYa,CAAS,EAAE,OAAQC,GAASA,EAAK,SAAS,OAAO,CAAC,CAC1E,CAOA,eAAsBC,GACpBC,EACAC,EAA+B,CAAC,EACP,CACzB,QAAQ,IAAIA,CAAkB,EAC9B,IAAIf,EAAiC,CAAC,EAEhCgB,EAAQN,GAA0BI,CAAiB,EAEzD,QAAWF,KAAQI,EAAO,CAExB,GAAIJ,IAAS,sBACX,SAGF,IAAMK,EAAWnB,GAAG,aAAa,GAAGgB,CAAiB,IAAIF,CAAI,GAAI,OAAO,EAClEM,EAA2B,KAAK,MAAMD,CAAQ,EAE9CE,EAAcD,EAAK,KACnBE,EAAmB,OAAO,KAAKF,EAAK,gBAAgB,EACpDG,EAAkB,OAAO,KAAKH,EAAK,eAAe,EAClDI,EAAe,OAAO,KAAKJ,EAAK,YAAY,EAE7ClB,EAAemB,CAAW,IAC7BnB,EAAemB,CAAW,EAAI,CAAE,UAAW,CAAC,EAAG,kBAAmB,CAAC,CAAE,GAGvE,IAAMI,EAAkB,CACtB,GAAGH,EACH,GAAGC,EACH,GAAGC,CACL,EAEAtB,EAAemB,CAAW,EAAE,UAAY,CAAC,GAAG,IAAI,IAAII,CAAe,CAAC,EAEpE,QAAWC,KAAcD,EAClBvB,EAAewB,CAAU,IAC5BxB,EAAewB,CAAU,EAAI,CAAE,UAAW,CAAC,EAAG,kBAAmB,CAAC,CAAE,GAGjExB,EAAewB,CAAU,EAAE,kBAAkB,SAASL,CAAW,GACpEnB,EAAewB,CAAU,EAAE,kBAAkB,KAAKL,CAAW,CAGnE,CAGA,GAAIJ,EAAmB,OAAQ,CAG7B,IAAMU,EAAmB,IAAI,IAG7B,OAAW,CAACpB,EAAKqB,CAAI,IAAK,OAAO,QAAQ1B,CAAc,EACjD0B,EAAK,UAAU,KAAMC,GAAQZ,EAAmB,SAASY,CAAG,CAAC,GAC/DF,EAAiB,IAAIpB,CAAG,EAK5B,QAAWuB,KAAUb,EACff,EAAe4B,CAAM,GACvBH,EAAiB,IAAIG,CAAM,EAK/B,IAAMC,EAA0C,CAAC,EACjD,QAAWxB,KAAOoB,EAChBI,EAAwBxB,CAAG,EAAI,CAC7B,UAAWL,EAAeK,CAAG,EAAE,UAAU,OAAQsB,GAC/CF,EAAiB,IAAIE,CAAG,CAC1B,EACA,kBAAmB3B,EAAeK,CAAG,EAAE,kBAAkB,OAAQsB,GAC/DF,EAAiB,IAAIE,CAAG,CAC1B,CACF,EAGF3B,EAAiB6B,CACnB,MACE,QAAQ,IAAI,uDAAuD,EAIrE,OAAA/B,GAAG,cACD,GAAGgB,CAAiB,uBACpB,KAAK,UAAUd,EAAgB,KAAM,CAAC,CACxC,EAEOA,CACT,CCtKA,OAAO8B,MAAU,YACjB,OAAOC,OAAa,eACpB,OAAS,iBAAAC,OAAqB,WAEvB,SAASC,IAAiB,CAC/B,IAAMC,EAAUH,GAAQ,IAAI,MAAQA,GAAQ,IAAI,YAEhD,OAAOD,EAAK,KAAKI,EAAS,OAAO,CACnC,CAEO,SAASC,MAAeC,EAAM,CACnC,OAAON,EAAK,KAAKG,GAAe,EAAG,GAAGG,CAAI,CAC5C,CAEO,SAASC,MAAeC,EAAc,CAC3C,IAAMC,EAAWP,GAAc,YAAY,GAAG,EACxCQ,EAAUV,EAAK,QAAQS,CAAQ,EAErC,OAAOT,EAAK,QAAQU,EAAS,GAAGF,CAAY,CAC9C,CHLA,IAAMG,GAAaC,GAAY,iBAAkB,QAAQ,EAEnDC,GAAaD,GAAY,iBAAkB,SAAS,EAc1D,IAAME,GAAiB,CACrB,mCACA,+BACA,gCACA,oCACA,mCACA,gCACA,+BACA,gCACA,8BACA,kCACA,kCACA,gCACA,gCACA,iCACA,gCACA,oCACA,gCACA,mCACA,8BACA,gCACA,gCACA,6BACA,8BACA,iCACA,iCACA,kCACA,mCACA,+BACA,8BACA,8BACF,EAEMC,GAAe,CACnB,GAAGD,GACH,iCACA,uCACA,qCACA,6BACA,oBACF,EAQA,eAAeE,GACbC,EACiB,CAEjB,GAAI,CACF,MAAMC,GAAG,MAAMC,GAAY,CAAE,UAAW,EAAK,CAAC,EAC9C,MAAMD,GAAG,MAAME,GAAY,CAAE,UAAW,EAAK,CAAC,CAChD,OAASC,EAAO,CACd,QAAQ,MAAM,iDAAkDA,CAAK,EACrE,QAAQ,KAAK,CAAC,CAChB,CAEA,IAAMC,EAAUC,GAAI,6BAA6B,EAAE,MAAM,EAGzDD,EAAQ,KAAO,yDACf,MAAME,GAAsCJ,EAAU,EAEtDE,EAAQ,KAAO,8CAIf,IAAMG,EAAqB,0BADRC,GAAY,SAAU,gBAAgB,CACM,cAAcC,GAAK,KAAKP,GAAY,kCAAkC,CAAC,GACtI,GAAI,CACF,MAAMQ,EAAMH,CAAkB,CAChC,OAASJ,EAAO,CACdC,EAAQ,KAAK,qCAAqC,EAClD,QAAQ,MAAMD,CAAK,EACnB,QAAQ,KAAK,CAAC,CAChB,CAEA,OAAAC,EAAQ,KAAO,yDACf,MAAMO,GAAqBV,GAAYF,CAAgB,EAEvDK,EAAQ,QAAQ,yCAAyC,EAElDK,GAAK,KAAKR,GAAY,qBAAqB,CACpD,CAEA,IAAMW,GAA+B,MACnCC,GACwB,CACxB,IAAMT,EAAUC,GAAI,4BAA4B,EAAE,MAAM,EAClDS,EAAiB,KAAK,MAC1B,MAAMd,GAAG,SAASa,EAAoB,OAAO,CAC/C,EAEAT,EAAQ,KAAO,gCACf,IAAMW,EAAUC,GAAsBF,CAAc,EACpD,OAAAV,EAAQ,QAAQ,0CAA0C,EAEnDW,CACT,EAIOE,GAAQC,GAAQ,QAAQ,OAAO,EAAE,OAAO,MAAOC,GAAW,CAC/D,IAAMC,EAAU,MAAMC,GAAS,OAAO,CACpC,CACE,KAAM,SACN,KAAM,cACN,QAAS,4CACT,QAAS,CACP,CACE,KAAM,qCACN,MAAO,eACT,CACF,EACA,QAAS,CAAC,eAAyB,CACrC,EAEA,CACE,KAAM,QACN,KAAM,cACN,QAAS,wCACT,KAAOD,GAAYA,EAAQ,cAAgB,gBAC3C,SAAWE,GACTA,EAAM,KAAK,IAAM,IAAM,+BAC3B,EAEA,CACE,KAAM,UACN,KAAM,cACN,QAAS,+CACT,QAAS,GACT,YAAcC,GACZA,EAAQ,uCAAyC,oBACnD,KAAOH,GAAYA,EAAQ,cAAgB,eAC7C,EAEA,CACE,KAAM,WACN,KAAM,sBACN,QACE,uEACF,QAASxB,GAAe,IAAK4B,IAAe,CAC1C,KAAMA,EAAU,QAAQ,qBAAsB,EAAE,EAChD,MAAOA,CACT,EAAE,EACF,KAAOJ,GACLA,EAAQ,cAAgB,iBACxBA,EAAQ,WACZ,CACF,CAAC,EAED,OAAQA,EAAQ,YAAa,CAC3B,IAAK,gBAA2B,CAE9B,IAAMhB,EAAUC,GAAI,sBAAsB,EAAE,MAAM,EAC5CQ,EAAqB,MAAMf,GAC/BsB,EAAQ,mBACV,EAEAhB,EAAQ,KAAO,0CAIf,IAAMqB,GAFJ,MAAMb,GAA6BC,CAAkB,GAGpD,IACC,CAACa,EAAOC,IACN,SAASA,EAAQ,CAAC;AAAA,EAAKD,EAAM,IAAKE,GAAQ,OAAOA,EAAI,QAAQ,oBAAqB,gBAAgB,EAAE,QAAQ,qBAAsB,sBAAsB,CAAC,EAAE,EAAE,KAAK;AAAA,CAAI,CAAC,EAC3K,EACC,KAAK;AAAA;AAAA,CAAM,EAEd,QAAQ,IAAIH,CAAsB,EAElCrB,EAAQ,KAAO,8CAGf,IAAI,QAASyB,GAAY,WAAWA,EAAS,GAAI,CAAC,EAClDzB,EAAQ,QAAQ,2CAA2C,EAG3D,KACF,CAEA,QACE,QAAQ,MAAM,0BAA0B,CAE5C,CACF,CAAC,EI1ND,OAAS,WAAA0B,OAAe,YAGjB,IAAMC,GAAcC,GACxB,QAAQ,MAAM,EACd,YAAY,4BAA4B,EACxC,OAAO,YAAa,+CAAgD,EAAK,EACzE,OAAO,MAAOC,GAAY,CAErBA,EAAQ,KACV,MAAMC,EAAI,EAGZ,MAAMC,EAAK,CACb,CAAC,EjCEHC,GACG,KAAK,MAAM,EACX,QAAQC,EAAkB,CAAC,EAC3B,YAAY,8CAA8C,EAE7DD,GAAQ,YAAY,YAAaE,GAAW,CAAC,EAE7CF,GAAQ,MAAM",
6
- "names": ["program", "figlet", "mind", "auroSplash_default", "fs", "path", "fileURLToPath", "debugLog", "message", "getPackageVersion", "__filename", "__dirname", "packagePath", "error", "program", "ora", "withBuildOptions", "command", "withServerOptions", "terser", "watch", "rmSync", "join", "ora", "rollup", "ora", "spawn", "ora", "shell", "command", "_args", "commandString", "spinner", "finalCommand", "finalArgs", "parts", "isWatchMode", "child", "commandOutput", "data", "output", "resolve", "reject", "code", "fs", "path", "Docs", "options", "outDir", "outFile", "manifestPath", "manifestContent", "error", "elements", "docsDir", "apiMarkdown", "apiFilename", "els", "module", "dec", "element", "includeTitle", "m", "properties", "attributes", "mergedData", "processedNames", "prop", "attr", "headers", "separator", "rows", "item", "value", "parameters", "param", "name", "data", "filteredData", "description", "p", "obj", "pathInput", "fallback", "parts", "current", "s", "cem", "cemSpinner", "ora", "shell", "error", "errorMessage", "docs", "docsSpinner", "Docs", "analyzeComponents", "cem", "docs", "Logger", "generateReadmeUrl", "processContentForFile", "templateFiller", "defaultDocsProcessorConfig", "pathFromCwd", "pathLike", "fileConfigs", "config", "processDocFiles", "fileConfig", "err", "runDefaultDocsBuild", "cleanupDist", "distPath", "join", "spinner", "ora", "rmSync", "error", "runBuildStep", "taskName", "taskFn", "successMsg", "failMsg", "result", "buildTypeDefinitions", "config", "outputConfig", "bundle", "rollup", "buildCombinedBundle", "mainConfig", "demoConfig", "mainBundle", "demoBundle", "generateDocs", "options", "sourceFiles", "outFile", "skipDocs", "skipSpinner", "analyzeComponents", "runDefaultDocsBuild", "basename", "join", "nodeResolve", "glob", "dts", "litScss", "path", "glob", "watchGlobs", "globs", "items", "item", "filename", "error", "DEFAULTS", "getPluginsConfig", "modulePaths", "options", "watchPatterns", "dedupe", "allModulePaths", "nodeResolve", "litScss", "join", "watchGlobs", "getMainBundleConfig", "watch", "input", "outputDir", "format", "getExternalConfig", "getWatcherConfig", "getDemoConfig", "globPattern", "ignorePattern", "glob", "file", "basename", "getDtsConfig", "dts", "watchOptions", "additional", "startDevServer", "hmrPlugin", "ora", "DEFAULT_CONFIG", "startDevelopmentServer", "options", "serverSpinner", "serverConfig", "context", "next", "server", "error", "path", "ora", "rollup", "buildInProgress", "builds", "MIN_BUILD_INTERVAL", "sourceEventPaths", "OUTPUT_PATHS", "isOutputFile", "filePath", "normalizedPath", "path", "outputPath", "error", "runBuildTask", "taskName", "taskFn", "task", "handleWatcherEvents", "watcher", "options", "onInitialBuildComplete", "isInitialBuild", "buildTasksResults", "scheduledTasksTimer", "bundleSpinner", "watchSpinner", "ora", "buildTasks", "dtsSpinner", "create_dts", "rollup", "getDtsConfig", "sourceFiles", "outFile", "skipDocs", "skipSpinner", "analyzeSpinner", "analyzeComponents", "docsSpinner", "generateDocs", "checkInitialBuildComplete", "schedulePostBundleTasks", "delay", "event", "inputs", "input", "setupWatchModeListeners", "closeSpinner", "runProductionBuild", "options", "mainBundleConfig", "getMainBundleConfig", "demoConfig", "getDemoConfig", "dtsConfig", "getDtsConfig", "terser", "generateDocs", "buildCombinedBundle", "buildTypeDefinitions", "setupWatchMode", "isDevMode", "watcher", "watch", "handleWatcherEvents", "startDevelopmentServer", "setupWatchModeListeners", "buildWithRollup", "cleanupDist", "error", "devCommand", "program", "withBuildOptions", "withServerOptions", "dev_default", "options", "build", "ora", "buildWithRollup", "error", "program", "ora", "buildCommand", "program", "withBuildOptions", "build_default", "options", "build", "ora", "buildWithRollup", "error", "exec", "path", "process", "fileURLToPath", "util", "program", "inquirer", "migrate_default", "program", "options", "filename", "fileURLToPath", "dirname", "path", "scriptPath", "execPromise", "util", "exec", "process", "inquirer", "shell", "process", "program", "readFile", "writeFile", "Logger", "fs", "Logger", "processContentForFile", "templateFiller", "REMOTE_TEMPLATE_BASE_URL", "BRANCH_BASE", "TARGET_BRANCH_TO_COPY", "CONFIG_TEMPLATE", "githubDirShape", "branchNameToRemoteUrl", "branchOrTag", "filePath", "filePathToRemoteInput", "outputPath", "remoteUrl", "removeDirectory", "dirPath", "error", "syncDotGithubDir", "rootDir", "githubPath", "fileConfigs", "missingFiles", "dir", "file", "inputPath", "fileConfig", "config", "errorMessage", "sync_default", "program", "Logger", "cwd", "process", "syncDotGithubDir", "codeownersPath", "codeownersFixed", "readFile", "writeFile", "fs", "path", "Logger", "program", "glob", "prepWcaCompatibleCode_default", "code", "sourcePath", "defaultTag", "className", "classDesc", "WAC_DIR", "path", "globPath", "sources", "source", "glob", "err", "createExtendsFile", "filePaths", "fs", "filePath", "resolvedPath", "fileContent", "newPath", "newCode", "prepWcaCompatibleCode_default", "main", "wca_setup_default", "program", "Logger", "error", "program", "chalk", "ora", "appendFile", "readFile", "Logger", "simpleGit", "git", "error", "Git", "_Git", "pattern", "err", "currentBranch", "targetBranch", "commitRange", "commitChunks", "chunk", "commits", "parts", "hash", "date", "author_name", "subject", "bodyLines", "line", "body", "shortHash", "typeMatch", "type", "log", "result", "files", "branchName", "message", "chalk", "MAX_SUBJECT_LENGTH", "MAX_BODY_LENGTH", "getColoredType", "type", "wrapString", "str", "maxLength", "words", "result", "currentLine", "word", "displayDebugView", "commitList", "commit", "subject", "body", "github", "getExistingLabels", "token", "octokit", "context", "owner", "repo", "prNumber", "existingLabels", "label", "error", "applyLabelToPR", "prefixedLabel", "existingSemanticLabels", "existingLabel", "analyzeCommits", "debug", "setLabel", "spinner", "ora", "commitList", "Git", "displayDebugView", "commitTypes", "commit", "formattedTypes", "type", "getColoredType", "handleLabels", "error", "validCommitTypes", "foundCommitTypes", "selectedLabel", "highestPriorityIndex", "priorityIndex", "labelSpinner", "getExistingLabels", "applyLabelToPR", "errorMessage", "chalk", "check_commits_default", "program", "option", "analyzeCommits", "fs", "get", "chalk", "program", "ora", "pr_release_default", "option", "updatePackageJson", "namespace", "prNumber", "packageSpinner", "packageJsonPath", "packageJson", "releaseVersion", "packageComponent", "packageName", "incrementVersion", "getIncrementVersion", "packageVersion", "error", "spinner", "resolve", "handleResponse", "res", "data", "chunk", "packageData", "versions", "maxIteration", "versionRegex", "version", "match", "iteration", "registryUrl", "req", "err", "path", "fileURLToPath", "program", "open", "__filename", "fileURLToPath", "cliRootDir", "path", "test_default", "program", "option", "command", "coveragePath", "files", "shell", "open", "fs", "path", "program", "inquirer", "ora", "fs", "path", "ora", "JsonConfig", "toYaml", "config", "key", "value", "k", "v", "createMultiGitterDependencyTreeConfig", "outputPath", "spinner", "configContent", "configPath", "error", "fs", "getBatchedUpdateOrder", "dependencyTree", "inDegree", "batches", "currentBatch", "queue", "pkg", "queueLength", "i", "current", "dependent", "getJsonFilesFromDirectory", "directory", "file", "formatDependencyTree", "jsonFileDirectory", "targetDependencies", "files", "contents", "data", "packageName", "peerDependencies", "devDependencies", "dependencies", "allDependencies", "dependency", "relevantPackages", "node", "dep", "target", "_filteredDependencyTree", "path", "process", "fileURLToPath", "getAuroHomeDir", "homeDir", "withHomeDir", "args", "fromCliRoot", "relativePath", "filename", "dirname", "CONFIG_DIR", "withHomeDir", "OUTPUT_DIR", "auroComponents", "auroPackages", "getOrCreateDependencyTree", "relevantPackages", "fs", "OUTPUT_DIR", "CONFIG_DIR", "error", "spinner", "ora", "createMultiGitterDependencyTreeConfig", "multiGitterCommand", "fromCliRoot", "path", "shell", "formatDependencyTree", "getDependencyBatchesFromTree", "dependencyTreePath", "dependencyTree", "batches", "getBatchedUpdateOrder", "agent_default", "program", "option", "answers", "inquirer", "input", "value", "component", "batchedUpdateOrderText", "batch", "index", "pkg", "resolve", "program", "docsCommand", "program", "options", "cem", "docs", "program", "getPackageVersion", "auroSplash_default"]
4
+ "sourcesContent": ["import { program } from \"commander\";\nimport auroSplash from \"#utils/auroSplash.js\";\nimport getPackageVersion from \"#utils/packageVersion.js\";\n\n// Register commands (importing them will register them)\nimport \"#commands/dev.js\";\nimport \"#commands/build.js\";\nimport \"#commands/migrate.js\";\nimport \"#commands/sync.js\";\nimport \"#commands/wca-setup.js\";\nimport \"#commands/check-commits.ts\";\nimport \"#commands/pr-release.ts\";\nimport \"#commands/test.js\";\nimport \"#commands/agent.ts\";\nimport \"#commands/docs.ts\";\n\nprogram\n .name(\"auro\")\n .version(getPackageVersion())\n .description(\"A cli tool to support the Auro Design System\");\n\nprogram.addHelpText(\"beforeAll\", auroSplash());\n\nprogram.parse();\n", "import figlet from \"figlet\";\nimport { mind } from \"gradient-string\";\n\nexport default () => {\n return mind(figlet.textSync(\"Auro CLI\"));\n};\n", "/* eslint-disable no-underscore-dangle, no-undef */\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\n/**\n * Simple debug logger that only prints when DEBUG environment variable is set.\n * @param {string} message - The message to log.\n */\nfunction debugLog(message) {\n if (process.env.DEBUG) {\n console.log(`[DEBUG] ${message}`);\n }\n}\n\n/**\n * Retrieves the version from the package.json file.\n * @returns {string} The version from package.json.\n */\nexport default function getPackageVersion() {\n try {\n // Get the directory path of the current module\n const __filename = fileURLToPath(import.meta.url);\n const __dirname = path.dirname(__filename);\n debugLog(`Current module path: ${__dirname}`);\n\n // Standard installed module location - current directory\n const packagePath = path.resolve(__dirname, \"..\", \"package.json\");\n\n debugLog(`Checking package.json at: ${packagePath}`);\n if (fs.existsSync(packagePath)) {\n debugLog(`Found package.json at: ${packagePath}`);\n const packageJson = JSON.parse(fs.readFileSync(packagePath, \"utf8\"));\n return packageJson.version;\n }\n\n // Fallback to a default version if we can't find the package.json\n debugLog(\n \"Could not find package.json in the standard installed module location, using default version\",\n );\n return \"0.0.0\";\n } catch (error) {\n console.error(\"Error retrieving package version:\", error);\n return \"0.0.0\";\n }\n}\n", "import { program } from \"commander\";\nimport ora from \"ora\";\nimport {\n withBuildOptions,\n withServerOptions,\n} from \"#commands/_sharedOptions.js\";\nimport { buildWithRollup } from \"#scripts/build/index.js\";\n\nlet devCommand = program\n .command(\"dev\")\n .description(\"Runs development server for auro components\");\n\ndevCommand = withBuildOptions(devCommand);\ndevCommand = withServerOptions(devCommand);\n\nexport default devCommand.action(async (options) => {\n try {\n const build = ora(\"Initializing...\");\n\n if (options.watch) {\n build.text = \"Waiting for changes...\";\n build.spinner = \"bouncingBar\";\n build.color = \"green\";\n } else {\n build.text =\n options.docs === false\n ? \"Building component (docs disabled)\"\n : \"Building component\";\n }\n\n build.start();\n\n if (!options.watch) {\n build.succeed(\"Build completed!\");\n }\n\n await buildWithRollup({ ...options, dev: true, watch: options.watch });\n } catch (error) {\n // If there's any active spinner, we need to fail it\n ora().fail(`Build failed: ${error.message}`);\n console.error(error);\n process.exit(1);\n }\n});\n", "/**\n * @param {import('commander').Command} command\n * @returns {import('commander').Command}\n */\nexport function withBuildOptions(command) {\n return command\n .option(\"-m, --module-paths [paths...]\", \"Path(s) to node_modules folder\")\n .option(\"-w, --watch\", \"Watches for changes\")\n .option(\"--skip-docs\", \"Skip documentation generation\", false)\n .option(\n \"--wca-input [files...]\",\n \"Source file(s) to analyze for API documentation\",\n )\n .option(\"--wca-output [files...]\", \"Output file(s) for API documentation\");\n}\n\n/**\n * @param {import('commander').Command} command\n */\nexport function withServerOptions(command) {\n return command\n .option(\"-s, --serve\", \"Starts a server\")\n .option(\"-p, --port <number>\", \"Port for the server\")\n .option(\"-o, --open\", \"Open the browser after starting the server\");\n}\n", "import terser from \"@rollup/plugin-terser\";\nimport { watch } from \"rollup\";\nimport {\n buildCombinedBundle,\n buildTypeDefinitions,\n cleanupDist,\n generateDocs,\n} from \"./bundleHandlers.js\";\nimport {\n getDemoConfig,\n getDtsConfig,\n getMainBundleConfig,\n} from \"./configUtils.js\";\nimport { startDevelopmentServer } from \"./devServerUtils.js\";\nimport {\n handleWatcherEvents,\n setupWatchModeListeners,\n} from \"./watchModeHandlers.js\";\n\n/**\n * Run a production build once\n * @param {object} options - Build options\n * @returns {Promise<void>}\n */\nasync function runProductionBuild(options) {\n const mainBundleConfig = getMainBundleConfig(options);\n const demoConfig = getDemoConfig(options);\n const dtsConfig = getDtsConfig();\n\n // Add terser for minification in production\n mainBundleConfig.config.plugins.push(terser());\n\n // Generate docs if enabled\n await generateDocs(options);\n\n // Build main and demo bundles\n await buildCombinedBundle(mainBundleConfig.config, demoConfig.config);\n\n // Build TypeScript definitions\n await buildTypeDefinitions(dtsConfig.config, dtsConfig.config.output);\n}\n\n/**\n * Set up watch mode for development\n * @param {object} options - Build options\n * @returns {Promise<object>} - Rollup watcher\n */\nasync function setupWatchMode(options) {\n const { dev: isDevMode } = options;\n const mainBundleConfig = getMainBundleConfig({ ...options, watch: true });\n const demoConfig = getDemoConfig({ ...options, watch: true });\n\n // Create and configure the watcher\n const watcher = watch([mainBundleConfig.config, demoConfig.config]);\n\n // Set up watcher event handlers\n handleWatcherEvents(\n watcher,\n options,\n isDevMode ? async () => startDevelopmentServer(options) : undefined,\n );\n\n // Set up clean shutdown\n setupWatchModeListeners(watcher);\n\n return watcher;\n}\n\n/**\n * Build the component using Rollup with the provided options\n * @param {object} options - Build configuration options\n * @param {boolean} [options.dev=false] - Whether to run in development mode\n * @param {boolean} [options.watch] - Whether to run in watch mode (defaults to value of dev)\n * @param {boolean} [options.docs=true] - Whether to generate documentation\n * @returns {Promise<object|void>} - Rollup watcher if in watch mode\n */\nexport async function buildWithRollup(options = {}) {\n try {\n const { watch } = options;\n\n // Clean output directory\n cleanupDist();\n\n // Run production build once or set up watch mode\n // Only use watch mode if explicitly enabled\n if (watch) {\n return await setupWatchMode(options);\n }\n\n return await runProductionBuild(options);\n } catch (error) {\n throw new Error(`Build failed: ${error.message}`);\n }\n}\n\n// Re-export utilities for backward compatibility\nexport { cleanupDist };\n", "import { rmSync } from \"node:fs\";\nimport { join } from \"node:path\";\nimport ora from \"ora\";\nimport { rollup } from \"rollup\";\nimport { analyzeComponents } from \"#scripts/analyze.js\";\nimport { runDefaultDocsBuild } from \"#scripts/build/defaultDocsBuild.js\";\n\n/**\n * Clean up the dist folder\n * @returns {boolean} Success status\n */\nexport function cleanupDist() {\n const distPath = join(\"./dist\");\n const spinner = ora(\"Cleaning dist folder...\").start();\n\n try {\n rmSync(distPath, { recursive: true, force: true });\n spinner.succeed(\"All clean! Dist folder wiped.\");\n return true;\n } catch (error) {\n spinner.fail(`Oops! Couldn't clean dist/ folder: ${error.message}`);\n console.error(error);\n return false;\n }\n}\n\n/**\n * Run a build step with spinner feedback\n * @param {string} taskName - Name of the task for spinner text\n * @param {Function} taskFn - Async function to execute the task\n * @param {string} successMsg - Message to show on success\n * @param {string} failMsg - Message to show on failure\n * @returns {Promise<any>} - Result of the task function or throws error\n */\nasync function runBuildStep(taskName, taskFn, successMsg, failMsg) {\n const spinner = ora(taskName).start();\n\n try {\n const result = await taskFn();\n spinner.succeed(successMsg);\n return result;\n } catch (error) {\n spinner.fail(failMsg);\n console.error(`Error: ${error.message}`);\n throw error;\n }\n}\n\n/**\n * Builds the TypeScript definition files\n * @param {object} config - Rollup config for d.ts generation\n * @param {object} outputConfig - Output configuration for d.ts files\n */\nexport async function buildTypeDefinitions(config, outputConfig) {\n return runBuildStep(\n \"Creating type definitions...\",\n async () => {\n const bundle = await rollup(config);\n await bundle.write(outputConfig);\n await bundle.close();\n },\n \"Types files built.\",\n \"Darn! Type definitions failed.\",\n );\n}\n\n/**\n * Builds both the main bundle and demo files in one operation\n * @param {object} mainConfig - Rollup config for the main bundle\n * @param {object} demoConfig - Rollup config for the demo files\n */\nexport async function buildCombinedBundle(mainConfig, demoConfig) {\n return runBuildStep(\n `Bundling ${mainConfig.name || \"main\"} and ${demoConfig.name || \"demo\"}...`,\n async () => {\n // Build main bundle\n const mainBundle = await rollup(mainConfig);\n await mainBundle.write(mainConfig.output);\n await mainBundle.close();\n\n // Build demo files\n const demoBundle = await rollup(demoConfig);\n await demoBundle.write(demoConfig.output);\n await demoBundle.close();\n },\n `Bundles ready! ${mainConfig.name || \"Main\"} and ${demoConfig.name || \"demo\"} built.`,\n \"Bundle hiccup! Build failed.\",\n );\n}\n\n/**\n * Analyzes web components and generates API documentation.\n * @param {object} options - Options containing wcaInput and wcaOutput\n */\nexport async function generateDocs(options) {\n const { wcaInput: sourceFiles, wcaOutput: outFile, skipDocs } = options;\n\n if (skipDocs) {\n const skipSpinner = ora(\"Skipping docs generation...\").start();\n\n setTimeout(() => {\n skipSpinner.succeed(\"Docs generation skipped.\");\n }, 0);\n return;\n }\n\n return runBuildStep(\n \"Analyzing components and making docs...\",\n async () => {\n await analyzeComponents(sourceFiles, outFile);\n await runDefaultDocsBuild();\n },\n \"Docs ready! Looking good.\",\n \"Doc troubles!\",\n );\n}\n", "import ora from \"ora\";\nimport { shell } from \"#utils/shell.js\";\nimport Docs from \"./docs-generator.ts\";\n\nexport async function cem() {\n const cemSpinner = ora(\"Generating Custom Elements Manifest...\").start();\n\n try {\n // The shell function returns a promise that resolves when the command completes\n await shell(\n \"npx --package=@custom-elements-manifest/analyzer -y -- cem analyze --litelement --globs src/*.*js scripts/wca/**/*.*js --packagejson --dependencies\",\n );\n cemSpinner.succeed(\"Custom Elements Manifest generated successfully!\");\n } catch (error) {\n // Check if the error is just the plugin issue but the manifest was still created\n const errorMessage = error instanceof Error ? error.message : String(error);\n cemSpinner.warn('CEM analyzer completed with warnings: ' + errorMessage);\n }\n}\n\nexport async function docs() {\n const docsSpinner = ora(\"Generating API documentation...\").start();\n\n try {\n await Docs.generate();\n docsSpinner.succeed(\"API documentation generated successfully!\");\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n docsSpinner.fail(\"Failed to generate API documentation: \" + errorMessage);\n throw error;\n }\n}\n", "import { spawn } from \"node:child_process\";\nimport ora from \"ora\";\n\nconst shell = (command, _args) => {\n const commandString = `${command} ${_args ? _args.join(\" \") : \"\"}`;\n\n // Initialize the spinner but don't start it - we'll just use it for completion status\n const spinner = ora();\n\n // Parse command string if no args are provided\n let finalCommand = command;\n let finalArgs = _args || [];\n\n if (!_args && typeof command === \"string\") {\n const parts = command.split(\" \");\n finalCommand = parts[0];\n finalArgs = parts.slice(1);\n }\n\n // Simple check for watch mode - if the command contains --watch or -w flags\n const isWatchMode =\n commandString.includes(\"--watch\") || commandString.includes(\" -w\");\n\n // Use different stdio configurations based on watch mode\n const stdio = isWatchMode\n ? \"inherit\" // Full TTY support for watch mode\n : [\"inherit\", \"pipe\", \"pipe\"]; // Capture output but allow input for normal mode\n\n const child = spawn(finalCommand, finalArgs, {\n stdio,\n shell: true,\n });\n\n // Only set up output capture if we're not in watch mode (stdio isn't 'inherit')\n if (!isWatchMode) {\n // Store command output to display after completion\n const commandOutput = [];\n\n child.stdout?.on(\"data\", (data) => {\n // Convert buffer to string\n const output = data.toString();\n\n // Store full output\n commandOutput.push(output);\n\n // Output directly to console\n process.stdout.write(output);\n });\n\n child.stderr?.on(\"data\", (data) => {\n const output = data.toString();\n commandOutput.push(output);\n process.stderr.write(output);\n });\n }\n\n // Set up a promise to track command completion\n return new Promise((resolve, reject) => {\n child.on(\"close\", (code) => {\n if (code !== 0) {\n // In watch mode, don't treat exit codes as errors - these are typically user terminations\n if (isWatchMode) {\n spinner.info(`Watch mode terminated with code ${code}`);\n resolve(); // Resolve without an error for watch mode commands\n } else {\n spinner.fail(`${commandString} failed (code ${code})`);\n reject(new Error(`Command failed with exit code ${code}`));\n }\n } else {\n spinner.succeed(`${commandString} completed successfully`);\n resolve();\n }\n });\n });\n};\n\nexport { shell };\n", "/** biome-ignore-all lint/complexity/noThisInStatic: not confusing */\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport type {\n Package,\n Module,\n Declaration,\n CustomElementDeclaration,\n ClassMember,\n Parameter,\n Attribute\n} from 'custom-elements-manifest';\n\ninterface GenerateOptions {\n outDir?: string;\n outFile?: string;\n manifestPath?: string;\n}\n\ninterface MergedTableData {\n name: string;\n properties: string;\n attributes: string;\n type: string;\n default: string;\n description: string;\n}\n\nexport default class Docs {\n private static manifest: Package = { schemaVersion: \"1.0.0\", readme: \"\", modules: [] };\n\n /**\n * Generate markdown documentation for all components\n */\n static generate(options: GenerateOptions = {}): void {\n const {\n outDir = \"./docs\",\n outFile = \"api.md\",\n manifestPath = \"./custom-elements.json\",\n } = options;\n\n // Use provided manifest or fallback to default\n if (manifestPath) {\n try {\n const manifestContent = fs.readFileSync(manifestPath, \"utf8\");\n this.manifest = JSON.parse(manifestContent) as Package;\n } catch (error) {\n console.error(`Error reading manifest file at ${manifestPath}:`, error);\n throw error;\n }\n }\n\n const elements = this.getElements();\n\n // Create docs directory if it doesn't exist\n const docsDir = outDir;\n if (!fs.existsSync(docsDir)) {\n fs.mkdirSync(docsDir, { recursive: true });\n }\n\n // Generate combined API documentation\n const apiMarkdown = this.renderAllElements(elements);\n const apiFilename = path.join(docsDir, outFile);\n fs.writeFileSync(apiFilename, apiMarkdown);\n console.log(`Generated combined API documentation at ${apiFilename}`);\n }\n\n /**\n * Extract custom elements from the manifest\n */\n static getElements(): CustomElementDeclaration[] {\n return this.manifest.modules.reduce(\n (els: CustomElementDeclaration[], module: Module) =>\n els.concat(\n module.declarations?.filter(\n (dec: Declaration): dec is CustomElementDeclaration => \n 'customElement' in dec && dec.customElement === true && 'tagName' in dec && \n this.isWcaModule(module),\n ) ?? [],\n ),\n [],\n );\n }\n\n /**\n * Check if a module has a path that matches the WCA pattern\n */\n static isWcaModule(module: Module): boolean {\n // Check if the module path matches \"scripts/wca/auro-*.js\"\n const path = module.path;\n if (!path) {\n return false;\n }\n \n // Match the pattern: starts with \"scripts/wca/auro-\" and ends with \".js\"\n return path.startsWith('scripts/wca/auro-') && path.endsWith('.js');\n }\n\n /**\n * Render all elements into a single markdown document\n */\n static renderAllElements(elements: CustomElementDeclaration[]): string {\n return `${elements\n .map((element: CustomElementDeclaration) => this.renderElement(element, false))\n .join(\"\\n\\n---\\n\\n\")}\n `;\n }\n\n /**\n * Render a single element as markdown\n */\n static renderElement(element: CustomElementDeclaration, includeTitle = true): string {\n return `${includeTitle ? `# ${element.tagName}\\n\\n` : `# ${element.tagName}\\n\\n`}${element.description ? `${element.description}\\n\\n` : \"\"}${this.renderPropertiesAttributesTable(element)}${this.renderTable(\n \"Methods\",\n [\"name\", \"parameters\", \"return.type.text\", \"description\"],\n (element.members || [])\n .filter(\n (m: ClassMember) =>\n m.kind === \"method\" && ('privacy' in m ? m.privacy !== \"private\" : true) && m.name[0] !== \"_\",\n )\n .map((m: ClassMember) => ({\n ...m,\n parameters: this.renderParameters('parameters' in m ? m.parameters as Parameter[] : undefined),\n })),\n )}${this.renderTable(\n \"Events\",\n [\"name\", \"description\"],\n element.events as unknown as Record<string, unknown>[],\n )}${this.renderTable(\n \"Slots\",\n [[\"name\", \"(default)\"], \"description\"],\n element.slots as unknown as Record<string, unknown>[],\n )}${this.renderTable(\n \"CSS Shadow Parts\",\n [\"name\", \"description\"],\n element.cssParts as unknown as Record<string, unknown>[],\n )}${this.renderTable(\n \"CSS Custom Properties\",\n [\"name\", \"description\"],\n element.cssProperties as unknown as Record<string, unknown>[],\n )}`;\n }\n\n /**\n * Render combined properties and attributes table\n */\n static renderPropertiesAttributesTable(element: CustomElementDeclaration): string {\n const properties = element.members?.filter((m: ClassMember) => m.kind === \"field\") || [];\n const attributes = element.attributes || [];\n\n // Create a merged dataset\n const mergedData: MergedTableData[] = [];\n const processedNames = new Set<string>();\n\n // Process properties first (only include those with descriptions)\n properties.forEach((prop: ClassMember) => {\n if (prop.description?.trim()) {\n mergedData.push({\n name: prop.name,\n properties: prop.name,\n attributes: ('attribute' in prop ? prop.attribute as string : '') || \"\",\n type: this.get(prop, \"type.text\") || \"\",\n default: ('default' in prop ? prop.default as string : '') || \"\",\n description: prop.description || \"\",\n });\n }\n processedNames.add(prop.name);\n if ('attribute' in prop && prop.attribute) {\n processedNames.add(prop.attribute as string);\n }\n });\n\n // Process attributes that don't have corresponding properties (only include those with descriptions)\n attributes.forEach((attr: Attribute) => {\n if (!processedNames.has(attr.name) && attr.description?.trim()) {\n mergedData.push({\n name: attr.name,\n properties: \"\",\n attributes: attr.name,\n type: this.get(attr, \"type.text\") || \"\",\n default: attr.default || \"\",\n description: attr.description || \"\",\n });\n }\n });\n\n if (mergedData.length === 0) {\n return \"\";\n }\n\n const headers = \"Properties | Attributes | Type | Default | Description \";\n const separator = \"--- | --- | --- | --- | ---\";\n\n const rows = mergedData\n .map((item: MergedTableData) =>\n [\n item.properties,\n item.attributes,\n item.type,\n item.default,\n item.description,\n ]\n .map((value: string) =>\n String(value || \"\")\n .replace(/\\|/g, \"\\\\|\")\n .replace(/\\n/g, \"<br>\"),\n )\n .join(\" | \"),\n )\n .join(\"\\n\");\n\n return `\n### Properties & Attributes\n\n| ${headers} |\n| ${separator} |\n${rows}\n\n`;\n }\n\n /**\n * Render method parameters as a formatted string\n */\n static renderParameters(parameters?: Parameter[]): string {\n if (!parameters || parameters.length === 0) {\n return \"None\";\n }\n\n return parameters\n .map(\n (param: Parameter) =>\n `\\`${param.name}\\` (${this.get(param, \"type.text\") || \"any\"})${param.description ? ` - ${param.description}` : \"\"}`,\n )\n .join(\"<br>\");\n }\n\n /**\n * Renders a markdown table of data, plucking the given properties from each item in `data`.\n */\n static renderTable(\n name: string, \n properties: (string | string[])[], \n data?: Array<Record<string, unknown>>\n ): string {\n if (data === undefined || data.length === 0) {\n return \"\";\n }\n\n // Filter out items without descriptions\n const filteredData = data.filter((item: Record<string, unknown>) => {\n const description = item.description;\n return typeof description === 'string' && description.trim();\n });\n\n if (filteredData.length === 0) {\n return \"\";\n }\n\n const headers = properties\n .map((p: string | string[]) => this.capitalize((Array.isArray(p) ? p[0] : p).split(\".\")[0]))\n .join(\" | \");\n\n const separator = properties.map(() => \"---\").join(\" | \");\n\n const rows = filteredData\n .map((item: Record<string, unknown>) =>\n properties\n .map((p: string | string[]) => {\n const value = this.get(item, p);\n // Escape pipes in table cells and handle multiline content\n return String(value || \"\")\n .replace(/\\|/g, \"\\\\|\")\n .replace(/\\n/g, \"<br>\");\n })\n .join(\" | \"),\n )\n .join(\"\\n\");\n\n return `\n### ${name}\n\n| ${headers} |\n| ${separator} |\n${rows}\n\n`;\n }\n\n /**\n * Reads a (possibly deep) path off of an object.\n */\n // biome-ignore lint/suspicious/noExplicitAny: utility method needs to work with any object structure\n static get(obj: any, pathInput: string | string[]): string {\n let fallback = \"\";\n let path: string = pathInput as string;\n if (Array.isArray(pathInput)) {\n [path, fallback] = pathInput;\n }\n const parts = path.split(\".\");\n // biome-ignore lint/suspicious/noExplicitAny: utility method needs to work with any object structure\n let current: any = obj;\n while (current && parts.length) {\n current = current[parts.shift() as string];\n }\n return current == null || current === \"\" ? fallback : String(current);\n }\n\n /**\n * Capitalize the first letter of a string\n */\n static capitalize(s: string): string {\n return s[0].toUpperCase() + s.substring(1);\n }\n}", "\nimport { cem, docs } from '#scripts/docs/index.ts';\n\n/**\n * Analyzes web components and generates API documentation.\n */\nexport async function analyzeComponents() {\n \n await cem();\n\n await docs();\n\n}", "import { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport {\n generateReadmeUrl,\n processContentForFile,\n templateFiller,\n} from \"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs\";\n\n/**\n * Processor config object.\n * @typedef {Object} ProcessorConfig\n * @property {boolean} [overwriteLocalCopies=true] - The release version tag to use instead of master.\n * @property {string} [remoteReadmeVersion=\"master\"] - The release version tag to use instead of master.\n * @property {string} [remoteReadmeUrl] - The release version tag to use instead of master.\n * @property {string} [remoteReadmeVariant=\"\"] - The variant string to use for the README source.\n * (like \"_esm\" to make README_esm.md).\n */\n\n/**\n * @param {ProcessorConfig} config - The configuration for this processor.\n */\nexport const defaultDocsProcessorConfig = {\n overwriteLocalCopies: true,\n remoteReadmeVersion: \"master\",\n // eslint-disable-next-line no-warning-comments\n // TODO: remove this variant when all components are updated to use latest auro-library\n // AND the default README.md is updated to use the new paths\n remoteReadmeVariant: \"_updated_paths\",\n};\n\nfunction pathFromCwd(pathLike) {\n const cwd = process.cwd();\n return `${cwd}/${pathLike}`;\n}\n\n/**\n * @param {ProcessorConfig} config - The configuration for this processor.\n * @returns {import('../utils/sharedFileProcessorUtils').FileProcessorConfig[]}\n */\nexport const fileConfigs = (config) => [\n // README.md\n {\n identifier: \"README.md\",\n input: {\n remoteUrl:\n config.remoteReadmeUrl ||\n generateReadmeUrl(\n config.remoteReadmeVersion,\n config.remoteReadmeVariant,\n ),\n fileName: pathFromCwd(\"/docTemplates/README.md\"),\n overwrite: config.overwriteLocalCopies,\n },\n output: pathFromCwd(\"/README.md\"),\n },\n // index.md\n {\n identifier: \"index.md\",\n input: pathFromCwd(\"/docs/partials/index.md\"),\n output: pathFromCwd(\"/demo/index.md\"),\n mdMagicConfig: {\n output: {\n directory: pathFromCwd(\"/demo\"),\n },\n },\n },\n // api.md\n {\n identifier: \"api.md\",\n input: pathFromCwd(\"/docs/partials/api.md\"),\n output: pathFromCwd(\"/demo/api.md\"),\n preProcessors: [templateFiller.formatApiTable],\n },\n];\n\n/**\n *\n * @param {ProcessorConfig} config - The configuration for this processor.\n * @return {Promise<void>}\n */\nexport async function processDocFiles(config = defaultDocsProcessorConfig) {\n // setup\n await templateFiller.extractNames();\n\n for (const fileConfig of fileConfigs(config)) {\n try {\n // eslint-disable-next-line no-await-in-loop\n await processContentForFile(fileConfig);\n } catch (err) {\n Logger.error(`Error processing ${fileConfig.identifier}: ${err.message}`);\n }\n }\n}\n\nexport async function runDefaultDocsBuild() {\n await processDocFiles({\n ...defaultDocsProcessorConfig,\n remoteReadmeUrl:\n \"https://raw.githubusercontent.com/AlaskaAirlines/auro-templates/main/templates/default/README.md\",\n });\n}\n", "import { basename, join } from \"node:path\";\nimport { nodeResolve } from \"@rollup/plugin-node-resolve\";\nimport { glob } from \"glob\";\nimport { dts } from \"rollup-plugin-dts\";\nimport { litScss } from \"rollup-plugin-scss-lit\";\nimport { watchGlobs } from \"./plugins.js\";\n\n// Default paths used across configurations\nconst DEFAULTS = {\n moduleDirectories: [\"node_modules\"],\n modulePaths: [\"../../node_modules\", \"../node_modules\", \"node_modules\"],\n watchPatterns: [\"./apiExamples/**/*\", \"./docs/**/*\"],\n};\n\n/**\n * Creates Rollup plugins configuration.\n * @param {string[]} modulePaths - Additional paths to include in litScss.\n * @param {object} options - Additional options for plugins\n * @returns {object[]} - Array of Rollup plugins.\n */\nexport function getPluginsConfig(modulePaths = [], options = {}) {\n const {\n watchPatterns = DEFAULTS.watchPatterns,\n dedupe = [\"lit\", \"lit-element\", \"lit-html\"],\n } = options;\n\n // Combine default paths with any user-provided paths\n const allModulePaths = [...DEFAULTS.modulePaths, ...modulePaths];\n\n return [\n nodeResolve({\n dedupe,\n preferBuiltins: false,\n moduleDirectories: DEFAULTS.moduleDirectories,\n }),\n litScss({\n minify: { fast: true },\n options: {\n loadPaths: [...allModulePaths, join(process.cwd(), \"src\", \"styles\"), join(process.cwd(), \"src\")],\n },\n }),\n watchGlobs(watchPatterns),\n ];\n}\n\n/**\n * Creates Rollup configuration for the main bundle with output options.\n * @param {object} options - Build options.\n * @returns {object} - Complete Rollup configuration object with input and output.\n */\nexport function getMainBundleConfig(options = {}) {\n const {\n modulePaths = [],\n watch = false,\n input = [\"./src/index.js\", \"./src/registered.js\"],\n outputDir = \"./dist\",\n format = \"esm\",\n } = options;\n\n return {\n name: \"Main\",\n config: {\n input,\n output: {\n format,\n dir: outputDir,\n entryFileNames: \"[name].js\",\n },\n external: getExternalConfig(),\n plugins: getPluginsConfig(modulePaths),\n watch: getWatcherConfig(watch),\n },\n };\n}\n\n/**\n * Creates Rollup configuration for demo files.\n * @param {object} options - Build options.\n * @returns {object} - Rollup configuration object.\n */\nexport function getDemoConfig(options = {}) {\n const {\n modulePaths = [],\n watch = false,\n globPattern = \"./demo/*.js\",\n ignorePattern = [\"./demo/*.min.js\"],\n outputDir = \"./demo\",\n } = options;\n\n return {\n name: \"Demo\",\n config: {\n input: Object.fromEntries(\n glob.sync(globPattern, { ignore: ignorePattern }).map((file) => {\n const name = basename(file, \".js\");\n return [name, file];\n }),\n ),\n output: {\n format: \"esm\",\n dir: outputDir,\n entryFileNames: \"[name].min.js\",\n chunkFileNames: \"[name].min.js\",\n },\n plugins: getPluginsConfig(modulePaths),\n watch: getWatcherConfig(watch),\n },\n };\n}\n\n/**\n * Creates Rollup configuration for the d.ts files with output options.\n * @param {object} options - Configuration options\n * @returns {object} - Complete Rollup configuration object with input and output.\n */\nexport function getDtsConfig(options = {}) {\n const { input = [\"./dist/index.js\"], outputDir = \"./dist\" } = options;\n\n return {\n name: \"DTS\",\n config: {\n input,\n output: {\n format: \"esm\",\n dir: outputDir,\n entryFileNames: \"[name].d.ts\",\n },\n plugins: [dts()],\n },\n };\n}\n\n/**\n * Creates Rollup configuration for watch mode.\n * @param {boolean|object} watchOptions - Whether to enable watch mode or watch options\n * @returns {object|false} - Watch configuration for Rollup or false if disabled\n */\nexport function getWatcherConfig(watchOptions) {\n // Return false if watch mode is disabled\n if (!watchOptions) {\n return false;\n }\n\n // Allow passing a configuration object or use defaults\n const options = typeof watchOptions === \"object\" ? watchOptions : {};\n\n return {\n clearScreen: options.clearScreen ?? true,\n buildDelay: options.buildDelay ?? 500,\n chokidar: {\n ignoreInitial: true,\n // Ignore common output files that cause feedback loops\n ignored: options.ignored ?? [\n \"**/dist/**/*.d.ts\",\n \"**/custom-elements.json\",\n \"**/demo/*.md\",\n \"**/demo/**/*.min.js\",\n \"**/docs/api.md\",\n \"**/node_modules/**\",\n \"**/.git/**\",\n ],\n // Reduce watcher's sensitivity to prevent loops\n awaitWriteFinish: options.awaitWriteFinish ?? {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n },\n include: options.include ?? [\n \"./src/**/*.scss\",\n \"./src/**/*.js\",\n \"./src/**/*.ts\",\n \"./demo/**/*.js\",\n \"./apiExamples/**/*\",\n \"./docs/**/*.md\",\n ],\n exclude: options.exclude ?? [\"./dist/**/*\", \"./node_modules/**/*\"],\n };\n}\n\n/**\n * Creates external configuration for Rollup.\n * @param {string[]} additional - Additional external patterns\n * @returns {(string|RegExp)[]} - Array of external dependencies.\n */\nexport function getExternalConfig(additional = []) {\n const defaults = [\n // externalize all lit dependencies\n /node_modules\\/lit/,\n /node_modules\\/lit-element/,\n /node_modules\\/lit-html/,\n /node_modules\\/@lit/,\n ];\n\n return [...defaults, ...additional];\n}\n", "import path from \"node:path\";\nimport { glob } from \"glob\";\n\n/**\n * Creates a plugin that watches file globs and adds them to Rollup's watch list.\n * @param {string|string[]} globs - Glob pattern(s) to watch\n * @returns {object} - Rollup plugin\n */\nexport function watchGlobs(globs) {\n return {\n name: \"watch-globs\",\n buildStart() {\n const items = Array.isArray(globs) ? globs : [globs];\n\n for (const item of items) {\n try {\n for (const filename of glob.sync(path.resolve(item))) {\n this.addWatchFile(filename);\n }\n } catch (error) {\n this.error(`Error watching glob pattern \"${item}\": ${error.message}`);\n }\n }\n },\n };\n}\n", "import { startDevServer } from \"@web/dev-server\";\nimport { hmrPlugin } from \"@web/dev-server-hmr\";\nimport ora from \"ora\";\n/**\n * Default server configuration\n */\nconst DEFAULT_CONFIG = {\n watch: true,\n nodeResolve: true,\n basePath: \"/\",\n rootDir: \"./demo\",\n hmrInclude: [\"src/**/*\", \"demo/**/*\", \"apiExamples/**/*\", \"docs/**/*\"],\n};\n\n/**\n * Starts the development server\n * @param {object} options - Server options\n * @param {boolean} [options.serve] - Whether to start the server\n * @param {number} [options.port] - Port number for the server\n * @param {boolean} [options.open] - Whether to open the browser\n * @param {string} [options.rootDir] - Root directory for serving files\n * @param {string[]} [options.hmrInclude] - Patterns to include for HMR\n * @returns {Promise<object>} - The server instance\n */\nexport async function startDevelopmentServer(options = {}) {\n if (!options.serve) return;\n\n const serverSpinner = ora(\"Firing up dev server...\\n\").start();\n\n try {\n // Merge options with defaults\n const serverConfig = {\n port: Number(options.port) || undefined,\n open: options.open ? \"/\" : undefined,\n watch: options.watch ?? DEFAULT_CONFIG.watch,\n nodeResolve: options.nodeResolve ?? DEFAULT_CONFIG.nodeResolve,\n basePath: options.basePath ?? DEFAULT_CONFIG.basePath,\n rootDir: options.rootDir ?? DEFAULT_CONFIG.rootDir,\n\n // HTML file extension middleware\n middleware: [\n function rewriteIndex(context, next) {\n if (!context.url.endsWith(\"/\") && !context.url.includes(\".\")) {\n context.url += \".html\";\n }\n return next();\n },\n ],\n\n // Hot Module Replacement plugin\n plugins: [\n hmrPlugin({\n include: options.hmrInclude ?? DEFAULT_CONFIG.hmrInclude,\n }),\n ],\n };\n\n // Start the server with our configuration\n const server = await startDevServer({\n config: serverConfig,\n readCliArgs: false,\n readFileConfig: false,\n });\n\n serverSpinner.stop();\n return server;\n } catch (error) {\n serverSpinner.fail(\"Server snag! Couldn't start dev server.\");\n console.error(\"Error starting development server:\", error);\n throw new Error(`Development server failed to start: ${error.message}`);\n }\n}\n", "import path from \"node:path\";\nimport ora from \"ora\";\nimport { rollup } from \"rollup\";\nimport { analyzeComponents } from \"#scripts/analyze.js\";\nimport { generateDocs } from \"./bundleHandlers.js\";\nimport { getDtsConfig } from \"./configUtils.js\";\n\n// Track if any build is in progress to prevent overlapping operations\nlet buildInProgress = false;\n\n// Track build states and times in a single object for cleaner management\nconst builds = {\n dts: { active: false, lastTime: 0 },\n analyze: { active: false, lastTime: 0 },\n docs: { active: false, lastTime: 0 },\n};\n\n// Minimum time between builds of the same type (in ms)\nconst MIN_BUILD_INTERVAL = 5000;\n\n// Track source paths of files that triggered a watch event\nconst sourceEventPaths = new Set();\n\n// Known output files that should never trigger a rebuild\nconst OUTPUT_PATHS = [\n \"/dist/index.d.ts\",\n \"/custom-elements.json\",\n \"/demo/api.md\",\n \"/docs/api.md\",\n \"/demo/index.min.js\",\n];\n\n// Path matching checks - handle any non-string input safely\nfunction isOutputFile(filePath) {\n if (!filePath || typeof filePath !== \"string\") return false;\n\n try {\n const normalizedPath = path.normalize(filePath);\n\n // Check if it's in our known output paths\n return (\n OUTPUT_PATHS.some((outputPath) => normalizedPath.endsWith(outputPath)) ||\n normalizedPath.includes(\"/dist/\") ||\n normalizedPath.endsWith(\".min.js\") ||\n normalizedPath.endsWith(\".d.ts\")\n );\n } catch (error) {\n console.error(`Error checking path (${typeof filePath}):`, error.message);\n return false; // If any error occurs, assume it's not an output file\n }\n}\n\n/**\n * Runs a build task with proper tracking of state\n * @param {string} taskName - Type of task (dts, analyze, docs)\n * @param {Function} taskFn - The actual task function to run\n * @returns {Promise<boolean>} - Success status\n */\nasync function runBuildTask(taskName, taskFn) {\n const task = builds[taskName];\n\n // Skip if build is active or within throttle time\n if (task.active || Date.now() - task.lastTime < MIN_BUILD_INTERVAL) {\n return false;\n }\n\n try {\n task.active = true;\n task.lastTime = Date.now();\n return await taskFn();\n } catch (error) {\n console.error(`Error in ${taskName} task:`, error);\n return false;\n } finally {\n task.active = false;\n }\n}\n\n/**\n * Handles the watcher events.\n * @param {object} watcher - Rollup watcher object.\n * @param {object} options - Build options.\n * @param {Function} [onInitialBuildComplete] - Callback to run after initial build completes.\n */\nexport async function handleWatcherEvents(\n watcher,\n options,\n onInitialBuildComplete,\n) {\n // Track if this is the first build\n let isInitialBuild = true;\n // biome-ignore lint/style/useConst: This is an object that is mutated.\n let buildTasksResults = { dts: false, analyze: false, docs: false };\n let scheduledTasksTimer = null;\n let bundleSpinner;\n\n // Create a spinner for watch mode\n const watchSpinner = ora(\"Activating watch mode...\").start();\n\n // The actual task functions\n const buildTasks = {\n // Function to build d.ts files\n dts: async () => {\n const dtsSpinner = ora(\"Crafting type definitions...\").start();\n try {\n const create_dts = await rollup(getDtsConfig().config);\n await create_dts.write(getDtsConfig().config.output);\n await create_dts.close();\n dtsSpinner.succeed(\"Type files built.\");\n return true;\n } catch (error) {\n dtsSpinner.fail(\"Types trouble! Build failed.\");\n console.error(\"TypeScript definition build error:\", error);\n return false;\n }\n },\n\n // Function to analyze components\n analyze: async () => {\n const { wcaInput: sourceFiles, wcaOutput: outFile, skipDocs } = options;\n if (skipDocs) {\n const skipSpinner = ora(\"Skipping component analysis...\").start();\n setTimeout(() => {\n skipSpinner.succeed(\"Component analysis skipped.\");\n }, 0);\n return true;\n }\n\n const analyzeSpinner = ora(\n \"Detective work: analyzing components...\",\n ).start();\n try {\n await analyzeComponents(sourceFiles, outFile);\n analyzeSpinner.succeed(\"Component analysis complete! API generated.\");\n return true;\n } catch (error) {\n analyzeSpinner.fail(\"Analysis hiccup! Something went wrong.\");\n console.error(\"Component analysis error:\", error);\n return false;\n }\n },\n\n // Function to rebuild documentation\n docs: async () => {\n // Skip if main bundle is still building\n if (buildInProgress) {\n return false;\n }\n\n // Check if docs generation is skipped\n if (options.skipDocs) {\n const skipSpinner = ora(\"Skipping docs generation...\").start();\n setTimeout(() => {\n skipSpinner.succeed(\"Docs generation skipped.\");\n }, 0);\n return true;\n }\n\n const docsSpinner = ora(\"Refreshing docs...\").start();\n try {\n await generateDocs(options);\n docsSpinner.succeed(\"Documentation refreshed!\");\n return true;\n } catch (error) {\n docsSpinner.fail(\"Docs stumble! Couldn't refresh.\");\n console.error(\"Documentation rebuild error:\", error);\n }\n },\n };\n\n // Check if all initial build tasks completed successfully\n const checkInitialBuildComplete = () => {\n if (\n isInitialBuild &&\n buildTasksResults.dts &&\n buildTasksResults.analyze &&\n buildTasksResults.docs &&\n typeof onInitialBuildComplete === \"function\"\n ) {\n isInitialBuild = false;\n onInitialBuildComplete();\n }\n };\n\n // Schedule the post-bundle tasks with proper sequencing\n function schedulePostBundleTasks(delay = 1000) {\n if (scheduledTasksTimer) {\n clearTimeout(scheduledTasksTimer);\n }\n\n scheduledTasksTimer = setTimeout(async () => {\n // Run tasks with delays between them to avoid race conditions\n buildTasksResults.dts = await runBuildTask(\"dts\", buildTasks.dts);\n\n setTimeout(async () => {\n buildTasksResults.analyze = await runBuildTask(\n \"analyze\",\n buildTasks.analyze,\n );\n\n setTimeout(async () => {\n buildTasksResults.docs = await runBuildTask(\"docs\", buildTasks.docs);\n checkInitialBuildComplete();\n }, 1000);\n }, 1000);\n }, delay);\n }\n\n // Set up event handlers for the watcher\n watcher.on(\"event\", async (event) => {\n switch (event.code) {\n case \"START\":\n watchSpinner.succeed(\"Watch mode active! Eyes peeled.\");\n break;\n\n case \"BUNDLE_START\":\n // Clear source paths from the previous bundle operation\n sourceEventPaths.clear();\n\n // Store source file paths that triggered this build\n if (event.input) {\n try {\n // Handle different input formats safely\n const inputs = Array.isArray(event.input)\n ? event.input\n : typeof event.input === \"string\"\n ? [event.input]\n : typeof event.input === \"object\" && event.input !== null\n ? Object.values(event.input)\n : [];\n\n for (const input of inputs) {\n // Only process string inputs and skip non-string values\n if (typeof input === \"string\" && !isOutputFile(input)) {\n sourceEventPaths.add(path.normalize(input));\n }\n }\n } catch (error) {\n console.error(\"Error processing input paths:\", error);\n }\n }\n\n bundleSpinner = ora(\"Weaving bundles...\").start();\n buildInProgress = true;\n break;\n\n case \"BUNDLE_END\":\n if (bundleSpinner) {\n bundleSpinner.succeed(\n `Bundle ${Array.isArray(event.input) ? `of ${event.input.join(\"& \")} ` : \"\"}done in ${event.duration}ms! \uD83D\uDE80`,\n );\n }\n buildInProgress = false;\n\n // Schedule post-bundle tasks if source files triggered this build\n if (sourceEventPaths.size > 0) {\n schedulePostBundleTasks();\n }\n break;\n\n case \"END\":\n // We've already scheduled tasks in BUNDLE_END, nothing to do here\n break;\n\n case \"ERROR\":\n buildInProgress = false;\n if (bundleSpinner) {\n bundleSpinner.fail(`Oops! Bundle hit a snag: ${event.error.message}`);\n } else {\n ora().fail(`Watch mode hiccup: ${event.error.message}`);\n }\n sourceEventPaths.clear();\n break;\n }\n });\n}\n\n/**\n * Setup watch mode for rollup\n * @param {object} watcher - Rollup watcher instance\n */\nexport function setupWatchModeListeners(watcher) {\n process.on(\"SIGINT\", () => {\n const closeSpinner = ora(\"Wrapping up...\").start();\n watcher.close();\n closeSpinner.succeed(\"All done! See you next time. \u2728\");\n process.exit(0);\n });\n\n return watcher;\n}\n", "import { program } from \"commander\";\nimport ora from \"ora\";\nimport { withBuildOptions } from \"#commands/_sharedOptions.js\";\nimport { buildWithRollup } from \"#scripts/build/index.js\";\n\nlet buildCommand = program\n .command(\"build\")\n .description(\"Builds auro components\");\n\nbuildCommand = withBuildOptions(buildCommand);\n\nexport default buildCommand.action(async (options) => {\n try {\n const build = ora(\"Initializing...\");\n\n if (options.watch) {\n build.text = \"Waiting for changes...\";\n build.spinner = \"bouncingBar\";\n build.color = \"green\";\n } else {\n build.text =\n options.docs === false\n ? \"Building component (docs disabled)\"\n : \"Building component\";\n }\n\n build.start();\n\n await buildWithRollup(options);\n\n if (!options.watch) {\n build.succeed(\"Build completed!\");\n }\n } catch (error) {\n // If there's any active spinner, we need to fail it\n ora().fail(`Build failed: ${error.message}`);\n console.error(error);\n process.exit(1);\n }\n});\n", "import { exec } from \"node:child_process\";\nimport path from \"node:path\";\nimport process from \"node:process\";\nimport { fileURLToPath } from \"node:url\";\nimport util from \"node:util\";\nimport { program } from \"commander\";\nimport inquirer from \"inquirer\";\nimport { shell } from \"#utils/shell.js\";\n\nexport default program\n .command(\"migrate\")\n .description(\"Script runner to perform repetitive code change tasks\")\n .requiredOption(\n \"-i, --id <string>\",\n \"Select the migration you would like to run by id\",\n )\n .option(\n \"-m, --multi-gitter\",\n \"Run the migration on all repositories in the multi-gitter config\",\n )\n .action(async (options) => {\n const filename = fileURLToPath(import.meta.url);\n const dirname = path.dirname(filename);\n const scriptPath = path.resolve(dirname, \"migrations\", options.id);\n\n if (options.multiGitter) {\n // Check if multi-gitter CLI command is available\n const execPromise = util.promisify(exec);\n\n try {\n await execPromise(\"command -v multi-gitter\");\n } catch {\n console.error(\"multi-gitter is not installed.\");\n process.exit(1);\n }\n\n const answers = await inquirer.prompt([\n {\n type: \"confirm\",\n name: \"dryRun\",\n message:\n \"Run migration in dry-run mode? (no changes will be committed)\",\n default: true,\n },\n ]);\n\n if (answers.dryRun) {\n shell(\n `multi-gitter run ${scriptPath}/script.sh --config \"${scriptPath}/multi-gitter.yml\" --dry-run`,\n );\n } else {\n shell(\n `multi-gitter run ${scriptPath}/script.sh --config \"${scriptPath}/multi-gitter.yml\"`,\n );\n }\n } else {\n shell(`${scriptPath}/script.sh`);\n }\n });\n", "import process from \"node:process\";\nimport { program } from \"commander\";\n\nimport { readFile, writeFile } from \"node:fs/promises\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport { syncDotGithubDir } from \"#scripts/syncDotGithubDir.js\";\n\nexport default program\n .command(\"sync\")\n .description(\n \"Script runner to synchronize local repository configuration files\",\n )\n .action(async () => {\n Logger.info(\"Synchronizing repository configuration files...\");\n\n Logger.warn(\n \"Note: sync does not create a new git branch. Changes are added to the current branch.\",\n );\n\n const cwd = process.cwd();\n\n await syncDotGithubDir(cwd);\n\n // Cleanup for specific files\n // ------------------------------------------------------\n\n // Some files have specific cleanup tasks that need to be run after syncing\n\n // CODEOWNERS - has a bizarre issue with line endings. This is a workaround!\n // Maybe it has to do with the file type since there's no ending?\n const codeownersPath = `${cwd}/.github/CODEOWNERS`;\n const codeowners = await readFile(codeownersPath, { encoding: \"utf-8\" });\n\n // Convert line endings to \\n\n const codeownersFixed = codeowners\n .replace(/\\r\\n/gu, \"\\n\")\n .replace(/\\n\\n/gu, \"\\n\");\n await writeFile(codeownersPath, codeownersFixed, { encoding: \"utf-8\" });\n\n if (codeownersFixed.includes(\"\\r\") || codeownersFixed.includes(\"\\n\\n\")) {\n Logger.error(\"CODEOWNERS file still has Windows line endings.\");\n }\n });\n", "import fs from \"node:fs/promises\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport {\n processContentForFile,\n templateFiller,\n} from \"@aurodesignsystem/auro-library/scripts/utils/sharedFileProcessorUtils.mjs\";\n\nconst REMOTE_TEMPLATE_BASE_URL =\n \"https://raw.githubusercontent.com/AlaskaAirlines/auro-templates\";\n\n// Constants for configuring sync branch and template selection\n// ------------------------------------------------------------\nconst BRANCH_BASE = \"main\";\nconst TARGET_BRANCH_TO_COPY = \"main\";\nconst CONFIG_TEMPLATE = \"default\";\n\n/**\n * @typedef {Object} GithubDirectory\n * @property {string[]} ISSUE_TEMPLATE - The issue template directory.\n * @property {string[]} workflows - The workflows directory.\n * @property {string} _root - The root directory (places files in .github directly).\n */\n\n/**\n * @type {GithubDirectory} githubDirectory\n */\nconst githubDirShape = {\n ISSUE_TEMPLATE: [\n \"bug_report.yaml\",\n \"config.yml\",\n \"feature_request.yaml\",\n \"general-support.yaml\",\n \"group.yaml\",\n \"story.yaml\",\n \"task.yaml\",\n ],\n workflows: [\"codeql.yml\", \"publishDemo.yml\", \"testPublish.yml\"],\n _root: [\n \"CODEOWNERS\",\n \"CODE_OF_CONDUCT.md\",\n \"CONTRIBUTING.md\",\n \"PULL_REQUEST_TEMPLATE.md\",\n \"SECURITY.md\",\n \"settings.yml\",\n \"stale.yml\",\n ],\n};\n\n// BELOW TYPES ARE COPIED DIRECTLY FROM THE LIBRARY\n// How can we import JSDoc types from the library?\n\n/**\n * This is the expected object type when passing something other than a string.\n * @typedef {Object} InputFileType\n * @property {string} remoteUrl - The remote template to fetch.\n * @property {string} fileName - Path including file name to store.\n * @property {boolean} [overwrite] - Default is true. Choose to overwrite the file if it exists.\n */\n\n/**\n * @typedef {Object} FileProcessorConfig\n * @property {string} identifier - A unique identifier for this file (used for logging).\n * @property {string | InputFileType} input - Path to an input file, including filename.\n * @property {string} output - Path to an output file, including filename.\n * @property {Partial<MarkdownMagicOptions>} [mdMagicConfig] - Extra configuration options for md magic.\n * @property {Array<(contents: string) => string>} [preProcessors] - Extra processor functions to run on content AFTER markdownmagic and BEFORE templateFiller.\n * @property {Array<(contents: string) => string>} [postProcessors] - Extra processor functions to run on content.\n */\n\n// BELOW NEEDS TO BE UPSTREAMED OR REMOVED FROM THE LIBRARY\n/**\n * Take a branch or tag name and return the URL for the README file.\n * @param {string} branchOrTag - The git branch or tag to use for the README source.\n * @param {string} filePath - The path to the file in the remote repository.\n * @returns {string} The complete URL for the remote file.\n */\nfunction branchNameToRemoteUrl(branchOrTag, filePath) {\n // check if tag starts with 'vX' since our tags are `v4.0.0`\n const isTag =\n branchOrTag.startsWith(\"v\") &&\n /^\\d+\\.\\d+\\.\\d+(?<_>-.*)?$/u.test(branchOrTag.slice(1));\n\n if (isTag) {\n return `${REMOTE_TEMPLATE_BASE_URL}/refs/tags/${branchOrTag}/${filePath}`;\n }\n\n if (branchOrTag !== BRANCH_BASE) {\n return `${REMOTE_TEMPLATE_BASE_URL}/refs/heads/${branchOrTag}/${filePath}`;\n }\n\n return `${REMOTE_TEMPLATE_BASE_URL}/${BRANCH_BASE}/${filePath}`;\n}\n\n/**\n * Take a branch or tag name and return the URL for the remote file.\n * @param {string} filePath - The name of the file to fetch.\n * @param {string} branchOrTag - The git branch or tag to use for the README source.\n * @param {string} outputPath - The path to the file in the local repository.\n * @returns {FileProcessorConfig} Configuration object for file processing.\n */\nfunction filePathToRemoteInput(filePath, branchOrTag, outputPath) {\n const remoteUrl = branchNameToRemoteUrl(branchOrTag, filePath);\n\n return {\n // Identifier is only used for logging\n identifier: filePath.split(\"/\").pop(),\n input: {\n remoteUrl,\n fileName: outputPath,\n overwrite: true,\n },\n output: outputPath,\n overwrite: true,\n };\n}\n\n/**\n * Recursively removes a directory and all its contents.\n * @param {string} dirPath - The path to the directory to remove.\n * @returns {Promise<void>} A promise that resolves when the directory is removed or rejects if an error occurs.\n * @throws {Error} If the directory cannot be removed.\n */\nasync function removeDirectory(dirPath) {\n try {\n await fs.rm(dirPath, { recursive: true, force: true });\n Logger.log(`Successfully removed directory: ${dirPath}`);\n } catch (error) {\n Logger.error(`Error removing directory ${dirPath}: ${error.message}`);\n throw error;\n }\n}\n\n/**\n * Sync the .github directory with the remote repository.\n * @param {string} rootDir - The root directory of the local repository.\n * @returns {Promise<void>} A promise that resolves when syncing is complete.\n */\nexport async function syncDotGithubDir(rootDir) {\n if (!rootDir) {\n Logger.error(\"Root directory must be specified\");\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n\n // Remove .github directory if it exists\n const githubPath = \".github\";\n\n try {\n await removeDirectory(githubPath);\n Logger.log(\".github directory removed successfully\");\n } catch (error) {\n Logger.error(`Error removing .github directory: ${error.message}`);\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n\n // Setup\n await templateFiller.extractNames();\n\n const fileConfigs = [];\n const missingFiles = [];\n\n for (const dir of Object.keys(githubDirShape)) {\n for (const file of githubDirShape[dir]) {\n const inputPath = `${dir === \"_root\" ? \"\" : `${dir}/`}${file}`;\n const outputPath = `${rootDir}/.github/${inputPath}`;\n\n const fileConfig = filePathToRemoteInput(\n `templates/${CONFIG_TEMPLATE}/.github/${inputPath}`,\n TARGET_BRANCH_TO_COPY,\n outputPath,\n );\n fileConfigs.push(fileConfig);\n }\n }\n\n // Check if files exist\n await Promise.all(\n fileConfigs.map(async (config) => {\n try {\n const response = await fetch(config.input.remoteUrl, {\n method: \"HEAD\",\n });\n if (!response.ok) {\n missingFiles.push(config.input.remoteUrl);\n }\n } catch {\n missingFiles.push(config.input.remoteUrl);\n }\n }),\n );\n\n // If missing, log and exit\n if (missingFiles.length > 0) {\n const errorMessage = missingFiles\n .map((file) => `File not found: ${file}`)\n .join(\"\\n\");\n Logger.error(\n `Failed to sync .github directory. Confirm githubDirShape object is up to date:\\n${errorMessage}`,\n );\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n\n // Process all files\n try {\n await Promise.all(\n fileConfigs.map((config) => processContentForFile(config)),\n );\n Logger.log(\"All files processed.\");\n } catch (error) {\n Logger.error(`Error processing files: ${error.message}`);\n // eslint-disable-next-line no-undef\n process.exit(1);\n }\n}\n", "/* eslint-disable no-await-in-loop, line-comment-position, no-inline-comments, jsdoc/require-jsdoc, no-undef */\n\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport { program } from \"commander\";\nimport { glob } from \"glob\";\nimport getTemplatedComponentCode from \"#scripts/prepWcaCompatibleCode.mjs\";\n\n// Use glob directly as it's already promised-based in newer versions\n\nconst WAC_DIR = path.resolve(process.cwd(), \"./scripts/wca\");\n\nasync function globPath(sources) {\n try {\n const fileArrays = await Promise.all(sources.map((source) => glob(source)));\n return fileArrays.flat();\n } catch (err) {\n console.error(\"Error processing glob patterns:\", err);\n throw err; // Re-throw to handle failure at caller\n }\n}\n\nasync function createExtendsFile(filePaths) {\n if (!fs.existsSync(WAC_DIR)) {\n await fs.promises.mkdir(WAC_DIR, { recursive: true });\n }\n\n for (const filePath of filePaths) {\n const resolvedPath = path.resolve(process.cwd(), filePath);\n const fileContent = await fs.promises.readFile(resolvedPath, \"utf-8\");\n const newPath = path.resolve(WAC_DIR, `${path.basename(filePath)}`);\n const newCode = getTemplatedComponentCode(\n fileContent,\n path.relative(WAC_DIR, filePath),\n );\n await fs.promises.writeFile(newPath, newCode);\n }\n}\n\nasync function main() {\n // files to analyze\n const filePaths = await globPath([\"./src/auro-*.js\"]);\n await createExtendsFile(filePaths);\n}\n\nexport default program\n .command(\"wca-setup\")\n .description(\"Set up WCA (Web Component Analyzer) for the project\")\n .action(() => {\n main()\n .then(() => {\n Logger.success(\"WCA setup completed successfully.\");\n })\n .catch((error) => {\n Logger.error(`WCA setup failed: ${error.message}`);\n });\n });\n", "/* eslint-disable require-unicode-regexp, prefer-named-capture-group, prefer-destructuring, prettier/prettier */\n\nexport default (code, sourcePath) => {\n const defaultTag = (code.match(/static register\\(name \\= (.+)\\)/) ||\n code.match(/customElements.get\\((.+?)\\)/))[1];\n const className = code.match(/export class (.+) extends/)?.[1];\n const classDesc = code.match(/\\/\\*\\*((.|\\n)*?)(\\*\\n|\\*\\/|[@])/)?.[1] || \"\";\n\n if (!defaultTag || !className) {\n return code;\n }\n return `\nimport { ${className} } from '${sourcePath}';\n\n/**${classDesc}*/\nclass ${className}WCA extends ${className} {}\n\nif (!customElements.get(${defaultTag})) {\n customElements.define(${defaultTag}, ${className}WCA);\n}\n`;\n};\n", "import { program } from \"commander\";\nimport { analyzeCommits } from \"#scripts/check-commits/commit-analyzer.ts\";\n\nexport default program\n .command(\"check-commits\")\n .alias(\"cc\")\n .option(\n \"-l, --set-label\",\n \"Set label on the pull request based on the commit message type\",\n )\n .option(\"-d, --debug\", \"Display detailed commit information for debugging\")\n .description(\n \"Check commits in the local repository for the types of semantic commit messages made and return the results.\",\n )\n .action(async (option) => {\n await analyzeCommits(option.debug, option.setLabel);\n });\n", "import chalk from \"chalk\";\nimport ora from \"ora\";\nimport type { Ora } from \"ora\";\nimport { Git } from \"#utils/gitUtils.ts\";\nimport type { CommitInfo } from \"./display-utils.ts\";\nimport { displayDebugView, getColoredType } from \"./display-utils.ts\";\nimport { applyLabelToPR, getExistingLabels } from \"./github-labels.ts\";\n\n/**\n * Analyze commit messages in the repository\n * @param debug Whether to display detailed debug information\n * @param verbose Whether to display verbose commit messages without truncation\n * @param setLabel Whether to apply a label to the PR based on commit types\n * @returns A promise that resolves when analysis is complete\n */\nexport async function analyzeCommits(\n debug = false,\n setLabel = false,\n): Promise<void> {\n const spinner = ora(\"Checking commits...\\n\").start();\n\n try {\n const commitList = await Git.getCommitMessages();\n\n // Only display commit details if debug mode is enabled\n if (debug) {\n displayDebugView(commitList);\n }\n\n spinner.succeed(`Total commits analyzed: ${commitList.length}`);\n\n if (commitList.length !== 0) {\n const commitTypes = commitList.map((commit) => commit.type);\n const uniqueTypes = Array.from(new Set(commitTypes));\n const formattedTypes = uniqueTypes\n .map((type) => getColoredType(type))\n .join(\", \");\n spinner.succeed(`Found commit types: ${formattedTypes}`);\n } else {\n spinner.info(\n \"The list of commits is created by comparing the current branch\\n\" +\n \"with the main branch. If you are on a new branch, please\\n\" +\n \"make sure to commit some changes before running this command.\",\n );\n }\n\n if (setLabel) {\n await handleLabels(commitList, spinner);\n }\n } catch (error) {\n spinner.fail(\"Error getting commit messages\");\n console.error(error);\n }\n}\n\n/**\n * Handle applying labels based on commit types\n * @param commitList The list of commits to analyze\n * @param spinner The ora spinner instance for status updates\n */\nasync function handleLabels(\n commitList: CommitInfo[],\n spinner: Ora,\n): Promise<void> {\n const validCommitTypes = [\n \"breaking\",\n \"feat\",\n \"fix\",\n \"perf\",\n \"docs\",\n \"style\",\n \"refactor\",\n \"test\",\n \"build\",\n \"ci\",\n \"chore\",\n ];\n\n const foundCommitTypes = commitList\n .map((commit) => commit.type)\n .filter((type) => validCommitTypes.includes(type));\n\n let selectedLabel = null;\n let highestPriorityIndex = Number.POSITIVE_INFINITY;\n\n for (const type of foundCommitTypes) {\n const priorityIndex = validCommitTypes.indexOf(type);\n if (priorityIndex < highestPriorityIndex) {\n highestPriorityIndex = priorityIndex;\n selectedLabel = type;\n }\n }\n\n if (selectedLabel) {\n const labelSpinner = ora(\n \"Checking existing labels on pull request...\",\n ).start();\n try {\n const existingLabels = await getExistingLabels();\n\n if (existingLabels.includes(`semantic-status: ${selectedLabel}`)) {\n labelSpinner.info(\n `Label \"semantic-status: ${getColoredType(selectedLabel)}\" already exists on the pull request.`,\n );\n return;\n }\n\n labelSpinner.text = \"Applying label to pull request...\";\n await applyLabelToPR(selectedLabel);\n labelSpinner.succeed(\n `Label \"semantic-status: ${getColoredType(selectedLabel)}\" applied to the pull request.`,\n );\n } catch (error: unknown) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n labelSpinner.fail(errorMessage);\n }\n } else {\n spinner.warn(\n chalk.yellow(\"No semantic commit type found to apply as label.\"),\n );\n }\n}\n", "import { appendFile, readFile } from \"node:fs/promises\";\nimport { Logger } from \"@aurodesignsystem/auro-library/scripts/utils/logger.mjs\";\nimport { simpleGit } from \"simple-git\";\nimport type { SimpleGit } from \"simple-git\";\n\n// Initialize simple-git with proper typing\nlet git: SimpleGit;\ntry {\n git = simpleGit({\n baseDir: process.cwd(),\n binary: \"git\",\n maxConcurrentProcesses: 1,\n });\n} catch (error) {\n Logger.error(`Failed to initialize git: ${error}`);\n // Provide a minimal implementation to prevent runtime errors\n git = {} as SimpleGit;\n}\n\nexport class Git {\n static async checkGitignore(pattern: string) {\n if (pattern === \"\") {\n return false;\n }\n try {\n const fileContent = await readFile(\".gitignore\", \"utf-8\");\n return fileContent.includes(pattern);\n } catch (err) {\n Logger.error(`Error reading file: ${err}`);\n return false;\n }\n }\n\n static async getCommitMessages(): Promise<\n Array<{\n type: string;\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n }>\n > {\n try {\n interface GitCommitType {\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n type: string;\n }\n\n const currentBranch = await git.branchLocal();\n Logger.info(`Current branch: ${currentBranch.current}`);\n\n // ---- Get target branch (main) and PR commits ----\n let targetBranch = \"main\";\n let commitRange = \"\";\n\n // Check if we're in a GitHub Actions environment\n const isGitHubAction = !!process.env.GITHUB_ACTIONS;\n\n if (isGitHubAction) {\n Logger.info(\"Running in GitHub Actions environment\");\n // In GitHub Actions, we can use environment variables to determine the PR branch and base\n targetBranch = process.env.GITHUB_BASE_REF || \"main\";\n\n try {\n // Ensure target branch is fetched\n await git.fetch(\"origin\", targetBranch);\n Logger.info(`Fetched target branch: origin/${targetBranch}`);\n\n // Use the merge base between target branch and current HEAD to get PR-specific commits\n const mergeBase = await git.raw([\n \"merge-base\",\n `origin/${targetBranch}`,\n \"HEAD\",\n ]);\n\n // Get commits between merge base and HEAD - these are the PR commits\n commitRange = `${mergeBase.trim()}..HEAD`;\n Logger.info(`Using commit range: ${commitRange}`);\n } catch (error) {\n Logger.warn(`Error setting up commit range in CI: ${error}`);\n // Fall back to simpler approach (just compare with origin/targetBranch)\n commitRange = `origin/${targetBranch}..HEAD`;\n Logger.info(`Falling back to commit range: ${commitRange}`);\n }\n } else {\n // Local environment - try to determine PR commits\n Logger.info(\"Running in local environment\");\n\n try {\n // First check if origin/main exists, fetch it if needed\n try {\n await git.raw([\"rev-parse\", \"--verify\", `origin/${targetBranch}`]);\n } catch {\n Logger.info(`Fetching ${targetBranch} from origin`);\n await git.fetch(\"origin\", targetBranch);\n }\n\n // Find merge base between current branch and target branch\n const mergeBase = await git.raw([\n \"merge-base\",\n `origin/${targetBranch}`,\n currentBranch.current,\n ]);\n\n commitRange = `${mergeBase.trim()}..HEAD`;\n Logger.info(`Using commit range for PR commits: ${commitRange}`);\n } catch (error) {\n Logger.warn(`Error determining PR commits locally: ${error}`);\n\n // Fallback - use last few commits\n Logger.info(\"Falling back to analyzing recent commits\");\n commitRange = \"HEAD~10..HEAD\";\n Logger.info(`Using fallback commit range: ${commitRange}`);\n }\n }\n\n // Get and format the PR commits\n return await Git.getFormattedCommits(commitRange);\n } catch (err) {\n Logger.error(`Error getting commit messages: ${err}`);\n return [];\n }\n }\n\n // Helper function to get formatted commits for a given git range\n static async getFormattedCommits(commitRange: string): Promise<\n Array<{\n type: string;\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n }>\n > {\n interface GitCommitType {\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n type: string;\n }\n\n // Use a format that will let us parse each commit separately\n // %H = hash, %ad = author date, %an = author name, %s = subject, %b = body\n const branchCommitsRaw = await git.raw([\n \"log\",\n \"--pretty=format:COMMIT_START%n%H%n%ad%n%an%n%s%n%b%nCOMMIT_END\",\n \"--date=short\",\n commitRange,\n ]);\n\n // Split by our custom delimiter to get individual commits\n const commitChunks = branchCommitsRaw\n .split(\"COMMIT_START\\n\")\n .filter((chunk: string) => chunk.trim() !== \"\");\n\n const commits: GitCommitType[] = [];\n\n for (const chunk of commitChunks) {\n const parts = chunk.split(\"\\n\");\n if (parts.length >= 4) {\n const hash = parts[0];\n const date = parts[1];\n const author_name = parts[2];\n const subject = parts[3];\n\n // The rest is the body (may contain breaking changes)\n // Filter out the COMMIT_END marker\n const bodyLines = parts\n .slice(4)\n .filter((line: string) => line !== \"COMMIT_END\");\n const body = bodyLines.length > 0 ? bodyLines.join(\"\") : \"\";\n\n // Use a shorter hash format for better readability (7 characters)\n const shortHash = hash.substring(0, 7);\n\n // Determine commit type from subject\n const typeMatch = subject.match(\n /^(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\\(.+\\))?:/,\n );\n let type = typeMatch ? typeMatch[1] : \"unknown\";\n\n // Check for breaking changes\n if (body.includes(\"BREAKING CHANGE\")) {\n type = \"breaking\";\n }\n\n commits.push({\n type,\n hash: shortHash,\n date,\n subject,\n body,\n message: `${subject}${body ? `\\n\\n${body}` : \"\"}`,\n author_name,\n });\n }\n }\n\n return commits;\n }\n\n // Function to add file to .gitignore\n static async addToGitignore(pattern: string, log = true) {\n await Git.checkGitignore(pattern).then(async (result) => {\n if (result) {\n Logger.warn(`${pattern} already exists`);\n } else {\n try {\n await appendFile(\".gitignore\", `\\n${pattern}`);\n if (log) {\n Logger.success(`${pattern} added to .gitignore`);\n }\n } catch (err) {\n Logger.error(err);\n }\n }\n });\n }\n\n // Function to remove file from git cache\n static async removeFromGitCache(files: string[]) {\n try {\n await git.rmKeepLocal(files);\n Logger.success(`${files.join(\", \")} are removed from git cache`);\n } catch (err) {\n Logger.error(err);\n }\n }\n\n static async createBranch(branchName: string) {\n try {\n await git.checkoutLocalBranch(branchName);\n Logger.success(`Created and switched to ${branchName} branch`);\n } catch (err) {\n Logger.error(err);\n }\n }\n\n static async commitStagedFiles(message: string) {\n try {\n await git.add(\".\");\n await git.commit(message);\n Logger.success(`Committed with message: ${message}`);\n } catch (err) {\n Logger.error(err);\n }\n }\n}\n", "import chalk from \"chalk\";\n\n// Configuration constants for display\nexport const MAX_SUBJECT_LENGTH = 60;\nexport const MAX_BODY_LENGTH = 100;\n\nexport interface CommitInfo {\n type: string;\n hash: string;\n date: string;\n subject: string;\n body: string;\n message: string;\n author_name: string;\n}\n\n// Define valid commit types for better type checking\nexport type CommitType =\n | \"breaking\"\n | \"feat\"\n | \"fix\"\n | \"perf\"\n | \"docs\"\n | \"style\"\n | \"refactor\"\n | \"test\"\n | \"build\"\n | \"ci\"\n | \"chore\"\n | \"unknown\";\n\n/**\n * Get colored text for commit type using a more harmonious color scheme\n */\nexport function getColoredType(type: string): string {\n switch (type) {\n case \"breaking\":\n return chalk.bold.red(type);\n case \"feat\":\n return chalk.bold.green(type);\n case \"fix\":\n return chalk.bold.green(type);\n case \"perf\":\n return chalk.bold.green(type);\n case \"docs\":\n return chalk.bold.cyan(type);\n case \"style\":\n return chalk.bold.cyan(type);\n case \"refactor\":\n return chalk.bold.cyan(type);\n case \"test\":\n return chalk.bold.cyan(type);\n case \"build\":\n return chalk.bold.cyan(type);\n case \"ci\":\n return chalk.bold.cyan(type);\n case \"chore\":\n return chalk.bold.cyan(type);\n default:\n return chalk.bold.white(type);\n }\n}\n\n/**\n * Helper function to wrap long strings to new lines\n */\nexport function wrapString(str: string, maxLength: number): string {\n if (!str) {\n return \"\";\n }\n\n // If the string is shorter than maxLength, return it as is\n if (str.length <= maxLength) {\n return str;\n }\n\n // Split the string into words\n const words = str.split(\" \");\n let result = \"\";\n let currentLine = \"\";\n\n // Build wrapped text with line breaks\n for (const word of words) {\n // If adding this word would exceed maxLength, start a new line\n if ((currentLine + word).length > maxLength && currentLine.length > 0) {\n result += `${currentLine.trim()}\\n`;\n currentLine = \"\";\n }\n currentLine = `${currentLine}${word} `;\n }\n\n // Add the last line\n if (currentLine.length > 0) {\n result += currentLine.trim();\n }\n\n return result;\n}\n\n/**\n * Display commits in a debug format with detailed information\n */\nexport function displayDebugView(commitList: CommitInfo[]): void {\n for (const commit of commitList) {\n console.log(\"\u2500\".repeat(60));\n\n // Use a consistent color theme for metadata\n const subject = wrapString(commit.subject, MAX_SUBJECT_LENGTH);\n const body = wrapString(commit.body, MAX_BODY_LENGTH);\n\n // Display commit info in a more compact format\n console.log(chalk.bold(`${getColoredType(commit.type)}`));\n console.log(\n chalk.dim(`${commit.hash} | ${commit.date} | ${commit.author_name}`),\n );\n console.log(chalk.bold(`${chalk.white(subject)}`));\n\n // Only add body if it exists and keep it more compact\n if (commit.body) {\n console.log(chalk.dim(body));\n }\n }\n console.log(\"\u2500\".repeat(60));\n console.log(\"\\n\");\n}\n", "import github from \"@actions/github\";\n\n/**\n * Get existing labels from the current pull request in a GitHub Actions environment\n * @returns Promise that resolves with an array of label names\n */\nexport async function getExistingLabels(): Promise<string[]> {\n try {\n // Get the GitHub token from environment\n const token = process.env.GITHUB_TOKEN;\n\n if (!token) {\n throw new Error(\"GITHUB_TOKEN environment variable is not set\");\n }\n\n // Check if we're in a GitHub Actions environment\n if (!process.env.GITHUB_REPOSITORY || !process.env.GITHUB_EVENT_PATH) {\n throw new Error(\n \"This function can only be used in a GitHub Actions environment\",\n );\n }\n\n const octokit = github.getOctokit(token);\n const { context } = github;\n\n // Make sure we're in a pull request context\n if (!context.payload.pull_request) {\n throw new Error(\"No pull request found in the GitHub context\");\n }\n\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split(\"/\");\n const prNumber = context.payload.pull_request.number;\n\n // Get existing labels\n const { data: existingLabels } =\n await octokit.rest.issues.listLabelsOnIssue({\n owner,\n repo,\n issue_number: prNumber,\n });\n\n // Return array of label names\n return existingLabels.map((label) => label.name);\n } catch (error) {\n if (error instanceof Error) {\n throw new Error(`Failed to get existing labels: ${error.message}`);\n }\n throw error;\n }\n}\n\n/**\n * Apply a label to the current pull request in a GitHub Actions environment\n * @param label The label to apply to the pull request\n * @returns Promise that resolves when the label is applied\n */\nexport async function applyLabelToPR(label: string): Promise<void> {\n try {\n // Get the GitHub token from environment\n const token = process.env.GITHUB_TOKEN;\n\n if (!token) {\n throw new Error(\"GITHUB_TOKEN environment variable is not set\");\n }\n\n // Check if we're in a GitHub Actions environment\n if (!process.env.GITHUB_REPOSITORY || !process.env.GITHUB_EVENT_PATH) {\n throw new Error(\n \"This function can only be used in a GitHub Actions environment\",\n );\n }\n\n const octokit = github.getOctokit(token);\n const { context } = github;\n\n // Make sure we're in a pull request context\n if (!context.payload.pull_request) {\n throw new Error(\"No pull request found in the GitHub context\");\n }\n\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split(\"/\");\n const prNumber = context.payload.pull_request.number;\n\n // Add prefix to the label\n const prefixedLabel = `semantic-status: ${label}`;\n\n // Get existing labels\n const existingLabels = await getExistingLabels();\n\n // If the label we want to apply already exists, do nothing\n if (existingLabels.includes(prefixedLabel)) {\n return;\n }\n\n // Find existing semantic status labels that are different from the one we want to apply\n const existingSemanticLabels = existingLabels.filter(\n (existingLabel) =>\n existingLabel.startsWith(\"semantic-status:\") &&\n existingLabel !== prefixedLabel,\n );\n\n // Remove existing semantic status labels that don't match the new one\n for (const existingLabel of existingSemanticLabels) {\n await octokit.rest.issues.removeLabel({\n owner,\n repo,\n issue_number: prNumber,\n name: existingLabel,\n });\n }\n\n // Add the new semantic status label\n await octokit.rest.issues.addLabels({\n owner,\n repo,\n issue_number: prNumber,\n labels: [prefixedLabel],\n });\n\n return;\n } catch (error) {\n if (error instanceof Error) {\n throw new Error(`Failed to apply label: ${error.message}`);\n }\n throw error;\n }\n}\n", "import fs from \"node:fs\";\nimport { get } from \"node:https\"; // Change to https\nimport chalk from \"chalk\";\nimport { program } from \"commander\";\nimport ora from \"ora\";\nimport type { Ora } from \"ora\";\n\nexport default program\n .command(\"pr-release\")\n .option(\n \"-n, --namespace <package-namespace>\",\n \"Set namespace of the package release\",\n \"@aurodesignsystem-dev\",\n )\n .option(\n \"-p, --pr-number <number>\",\n \"Set pull request number for the release\",\n \"0\",\n )\n .description(\n \"Generate the package version based off of PR number then update the package.json file. Note: this does not publish the package.\",\n )\n .action(async (option) => {\n await updatePackageJson(option);\n });\n\ninterface ReleaseOptions {\n namespace: string;\n prNumber: number;\n}\n\nconst updatePackageJson = async (option: ReleaseOptions): Promise<void> => {\n const { namespace, prNumber } = option;\n\n const packageSpinner = ora(\"Updating package.json\").start();\n\n try {\n const packageJsonPath = \"package.json\";\n\n // Read package.json\n const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, \"utf8\"));\n\n // Check if release version is on npmjs already\n packageSpinner.text = \"Checking npm registry for version information...\";\n\n const releaseVersion = `0.0.0-pr${prNumber}`;\n const packageComponent = packageJson.name.split(\"/\")[1];\n const packageName = `${namespace}/${packageComponent}`;\n const incrementVersion = await getIncrementVersion(\n releaseVersion,\n packageName,\n packageSpinner,\n );\n const packageVersion = `${releaseVersion}.${incrementVersion}`;\n\n packageJson.name = packageName;\n packageJson.version = packageVersion;\n\n packageSpinner.text = \"Writing updated package.json...\";\n\n // Write the updated package.json back to the file\n fs.writeFileSync(\n packageJsonPath,\n `${JSON.stringify(packageJson, null, 2)}\\n`,\n \"utf8\",\n );\n\n packageSpinner.succeed(\n `Package.json updated to use ${chalk.green(packageVersion)} and ${chalk.green(packageName)}`,\n );\n\n // Explicitly exit with success code to ensure terminal prompt returns\n process.exit(0);\n } catch (error: unknown) {\n packageSpinner.fail(`Failed to update package.json: ${error}`);\n process.exit(1); // Exit with error code\n }\n};\n\n// checks if version exists on npmjs and returns the next available increment version\nconst getIncrementVersion = (\n releaseVersion: string,\n packageName: string,\n spinner: Ora,\n): Promise<number> => {\n return new Promise((resolve) => {\n try {\n // Use the registry URL to get all versions for the package\n const registryUrl = `https://registry.npmjs.org/${packageName}`;\n\n const req = get(\n registryUrl,\n {\n headers: { Accept: \"application/json\" },\n },\n (res) => {\n // Handle redirects\n if (\n (res.statusCode === 301 || res.statusCode === 302) &&\n res.headers.location\n ) {\n // Persist redirect message\n spinner.info(`Following redirect to ${res.headers.location}...`);\n try {\n get(\n res.headers.location,\n { headers: { Accept: \"application/json\" } },\n handleResponse,\n )\n .on(\"error\", (err) => {\n // On redirect error, default to 0\n spinner.warn(\n `Error following redirect: ${err.message}, defaulting to version 0`,\n );\n resolve(0);\n })\n .end();\n } catch (error) {\n // If redirect request fails, default to 0\n spinner.warn(\n `Redirect request failed: ${error instanceof Error ? error.message : \"Unknown error\"}, defaulting to version 0`,\n );\n resolve(0);\n }\n return;\n }\n\n handleResponse(res);\n },\n );\n\n function handleResponse(res: import(\"http\").IncomingMessage) {\n if (res.statusCode !== 200) {\n // If package not found or other error, we can start with version 0\n spinner.info(\n `Package not found. Status code: ${chalk.red(res.statusCode)}, defaulting to version 0`,\n );\n resolve(0);\n return;\n }\n\n spinner.text = \"Processing version information...\";\n let data = \"\";\n res.on(\"data\", (chunk: Buffer | string) => {\n data += chunk;\n });\n\n res.on(\"end\", () => {\n try {\n const packageData = JSON.parse(data);\n const versions = packageData.versions\n ? Object.keys(packageData.versions)\n : [];\n\n spinner.text = \"Calculating next version number...\";\n\n // Find the highest existing iteration for this release version\n let maxIteration = -1;\n const versionRegex = new RegExp(`^${releaseVersion}\\\\.(\\\\d+)$`);\n\n for (const version of versions) {\n const match = version.match(versionRegex);\n if (match) {\n const iteration = Number.parseInt(match[1], 10);\n maxIteration = Math.max(maxIteration, iteration);\n }\n }\n\n // Return the next iteration number and persist this important info\n if (maxIteration >= 0) {\n spinner.info(\n `Found existing version ${chalk.green(`${releaseVersion}.${maxIteration}`)}. Incrementing to ${chalk.green(`${releaseVersion}.${maxIteration + 1}`)}`,\n );\n } else {\n spinner.info(\n `No existing version found for ${chalk.green(releaseVersion)}. Starting with ${chalk.green(`${releaseVersion}.0`)}`,\n );\n }\n resolve(maxIteration + 1);\n } catch (error) {\n // In case of parsing error, default to 0\n spinner.warn(\n `Failed to parse NPM registry response: ${error instanceof Error ? error.message : \"Unknown error\"}, defaulting to version 0`,\n );\n resolve(0);\n }\n });\n }\n\n req.on(\"error\", (err) => {\n // On request error, default to 0\n spinner.warn(`Request error: ${err.message}, defaulting to version 0`);\n resolve(0);\n });\n\n req.end();\n } catch (error) {\n // Catch any other errors and default to 0\n spinner.warn(\n \"Error checking version in npm registry, defaulting to version 0\",\n );\n resolve(0);\n }\n });\n};\n", "import path from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { program } from \"commander\";\nimport open from \"open\";\nimport { shell } from \"#utils/shell.js\";\n\nconst __filename = fileURLToPath(import.meta.url);\nconst cliRootDir = path.resolve(path.dirname(__filename), \"..\");\n\nexport default program\n .command(\"test\")\n .option(\"-w, --watch\", \"Set watch number for the test\")\n .option(\"-c, --coverage-report\", \"Generate coverage report\")\n .option(\"-o, --open\", \"Open the coverage report in the browser\")\n .option(\"-f, --files <String|String[]>\", \"Test files glob pattern\")\n .description(\"Run the web test runner to test the component library\")\n .action(async (option) => {\n const configPath = path.join(\n cliRootDir,\n \"dist\",\n \"configs\",\n \"web-test-runner.config.mjs\",\n );\n let command = `npx wtr --config \"${configPath}\"`;\n const coveragePath = `${process.cwd()}/coverage/index.html`;\n\n if (option.coverageReport) {\n command += \" --coverage\";\n }\n\n if (option.watch) {\n command += \" --watch\";\n }\n\n if (option.files) {\n const files = Array.isArray(option.files)\n ? option.files.join(\" \")\n : option.files;\n command += ` --files \"${files}\"`;\n }\n\n shell(command);\n\n if (option.open) {\n await open(coveragePath);\n }\n });\n", "import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { program } from \"commander\";\nimport inquirer from \"inquirer\";\nimport ora from \"ora\";\nimport { createMultiGitterDependencyTreeConfig } from \"#scripts/agent/run-migrations/writeMultiGitterConfig.js\";\nimport {\n formatDependencyTree,\n getBatchedUpdateOrder,\n} from \"#scripts/formatDependencyTree.ts\";\nimport { fromCliRoot, withHomeDir } from \"#utils/pathUtils.js\";\nimport { shell } from \"#utils/shell.js\";\n\n// Multi-gitter and other config files live here\nconst CONFIG_DIR = withHomeDir(\"run-migrations\", \"config\");\n// Generated output files live here\nconst OUTPUT_DIR = withHomeDir(\"run-migrations\", \"outputs\");\n\nenum AgentActions {\n RunMigration = \"run-migration\",\n // Add more actions as needed\n}\n\ninterface AgentAnswers {\n agentAction: AgentActions;\n}\n\n// Agent component options\n// =========================================================\n\nconst auroComponents = [\n \"@aurodesignsystem/auro-accordion\",\n \"@aurodesignsystem/auro-alert\",\n \"@aurodesignsystem/auro-avatar\",\n \"@aurodesignsystem/auro-background\",\n \"@aurodesignsystem/auro-backtotop\",\n \"@aurodesignsystem/auro-button\",\n \"@aurodesignsystem/auro-badge\",\n \"@aurodesignsystem/auro-banner\",\n \"@aurodesignsystem/auro-card\",\n \"@aurodesignsystem/auro-carousel\",\n \"@aurodesignsystem/auro-datetime\",\n \"@aurodesignsystem/auro-dialog\",\n \"@aurodesignsystem/auro-drawer\",\n \"@aurodesignsystem/auro-formkit\",\n \"@aurodesignsystem/auro-flight\",\n \"@aurodesignsystem/auro-flightline\",\n \"@aurodesignsystem/auro-header\",\n \"@aurodesignsystem/auro-hyperlink\",\n \"@aurodesignsystem/auro-icon\",\n \"@aurodesignsystem/auro-loader\",\n \"@aurodesignsystem/auro-lockup\",\n \"@aurodesignsystem/auro-nav\",\n \"@aurodesignsystem/auro-pane\",\n \"@aurodesignsystem/auro-popover\",\n \"@aurodesignsystem/auro-sidenav\",\n \"@aurodesignsystem/auro-skeleton\",\n \"@aurodesignsystem/auro-slideshow\",\n \"@aurodesignsystem/auro-table\",\n \"@aurodesignsystem/auro-tabs\",\n \"@aurodesignsystem/auro-toast\",\n];\n\nconst auroPackages = [\n ...auroComponents,\n \"@aurodesignsystem/auro-library\",\n \"@aurodesignsystem/WebCoreStyleSheets\",\n \"@aurodesignsystem/AuroDesignTokens\",\n \"@aurodesignsystem/auro-cli\",\n \"@alaskaairux/icons\",\n];\n\n// Agent helpers\n// =========================================================\ninterface DependencyTreeAnswers {\n useExisting: boolean;\n}\n\nasync function getOrCreateDependencyTree(\n relevantPackages: string[],\n): Promise<string> {\n // check if output and config directories exist, if not create them\n try {\n await fs.mkdir(OUTPUT_DIR, { recursive: true });\n await fs.mkdir(CONFIG_DIR, { recursive: true });\n } catch (error) {\n console.error(\"Failed to create output or config directories:\", error);\n process.exit(1);\n }\n\n const spinner = ora(\"Creating dependency tree...\").start();\n\n // Create multi-gitter dependency tree configuration\n spinner.text = \"Creating multi-gitter dependency tree configuration...\";\n await createMultiGitterDependencyTreeConfig(CONFIG_DIR);\n\n spinner.text = \"Scraping dependencies from Auro packages...\";\n\n // Run multi-gitter using the generated config\n const scriptPath = fromCliRoot(\"static\", \"getAuroDeps.js\");\n const multiGitterCommand = `multi-gitter run \"node ${scriptPath}\" --config ${path.join(CONFIG_DIR, \"multi-gitter_DEPENDENCY_TREE.yml\")}`;\n try {\n await shell(multiGitterCommand);\n } catch (error) {\n spinner.fail(\"Failed to generate dependency tree:\");\n console.error(error);\n process.exit(1);\n }\n\n spinner.text = \"Generating dependency tree JSON file using packages...\";\n await formatDependencyTree(OUTPUT_DIR, relevantPackages);\n\n spinner.succeed(\"Dependency tree generated successfully.\");\n\n return path.join(OUTPUT_DIR, \"dependencyTree.json\");\n}\n\nconst getDependencyBatchesFromTree = async (\n dependencyTreePath: string,\n): Promise<string[][]> => {\n const spinner = ora(\"Loading dependency tree...\").start();\n const dependencyTree = JSON.parse(\n await fs.readFile(dependencyTreePath, \"utf-8\"),\n );\n\n spinner.text = \"Processing dependency tree...\";\n const batches = getBatchedUpdateOrder(dependencyTree);\n spinner.succeed(\"Dependency batches created successfully.\");\n\n return batches;\n};\n\n// Agent command\n// =========================================================\nexport default program.command(\"agent\").action(async (option) => {\n const answers = await inquirer.prompt([\n {\n type: \"select\",\n name: \"agentAction\",\n message: \"What agent action do you want to perform?\",\n choices: [\n {\n name: \"Run a migration on auro components\",\n value: AgentActions.RunMigration,\n },\n ],\n default: [AgentActions.RunMigration],\n },\n\n {\n type: \"input\",\n name: \"migrationId\",\n message: \"What migration id do you want to run?\",\n when: (answers) => answers.agentAction === AgentActions.RunMigration,\n validate: (input) =>\n input.trim() !== \"\" || \"Migration id cannot be empty.\",\n },\n\n {\n type: \"confirm\",\n name: \"useExisting\",\n message: \"Would you like to specify starting packages?\",\n default: true,\n transformer: (value) =>\n value ? \"Yes = Packages related to selections\" : \"No = All packages\",\n when: (answers) => answers.agentAction === AgentActions.RunMigration,\n },\n\n {\n type: \"checkbox\",\n name: \"startWithComponents\",\n message:\n \"Enter the components to start with (comma-separated, blank for all):\",\n choices: auroComponents.map((component) => ({\n name: component.replace(\"@aurodesignsystem/\", \"\"),\n value: component,\n })),\n when: (answers) =>\n answers.agentAction === AgentActions.RunMigration &&\n answers.useExisting,\n },\n ]);\n\n switch (answers.agentAction) {\n case AgentActions.RunMigration: {\n // Placeholder for actual migration logic\n const spinner = ora(\"Running migration...\").start();\n const dependencyTreePath = await getOrCreateDependencyTree(\n answers.startWithComponents,\n );\n\n spinner.text = \"Getting dependency batches from tree...\";\n const dependencyBatches =\n await getDependencyBatchesFromTree(dependencyTreePath);\n\n const batchedUpdateOrderText = dependencyBatches\n .map(\n (batch, index) =>\n `Batch ${index + 1}\\n${batch.map((pkg) => ` - ${pkg.replace(\"@aurodesignsystem\", \"AlaskaAirlines\").replace(\"@alaskaairux/icons\", \"AlaskaAirlines/Icons\")}`).join(\"\\n\")}`,\n )\n .join(\"\\n\\n\");\n\n console.log(batchedUpdateOrderText);\n\n spinner.text = \"Running migrations on dependency batches...\";\n // DO STUFF HERE :)\n\n new Promise((resolve) => setTimeout(resolve, 2000)); // Simulate async operation\n spinner.succeed(\"Migration process completed successfully.\");\n\n // spinner.succeed(\"Migration process completed.\");\n break;\n }\n // Add more cases for additional actions as needed\n default:\n console.error(\"Unknown action selected.\");\n // spinner.fail(\"Unknown action selected.\");\n }\n});\n", "import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport ora from \"ora\";\n\nconst JsonConfig = {\n \"auth-type\": \"workspace-token\",\n \"author-email\": null,\n \"author-name\": null,\n \"base-branch\": \"main\",\n \"base-url\": null,\n \"clone-dir\": \".gitter-temp\",\n \"code-search\": null,\n concurrent: 4,\n \"conflict-strategy\": \"replace\",\n draft: false,\n \"dry-run\": true,\n \"fetch-depth\": 1,\n fork: false,\n \"fork-owner\": null,\n \"git-type\": \"go\",\n group: null,\n \"include-subgroups\": false,\n insecure: false,\n interactive: false,\n labels: null,\n \"log-file\": \"'-'\",\n \"log-format\": \"'text'\",\n \"log-level\": \"'error'\",\n \"max-reviewers\": 0,\n \"max-team-reviewers\": 0,\n org: null,\n output: \"'-'\",\n \"plain-output\": false,\n platform: \"github\",\n project: null,\n \"push-only\": false,\n repo: [\n \"AlaskaAirlines/auro-accordion\",\n \"AlaskaAirlines/auro-alert\",\n \"AlaskaAirlines/auro-avatar\",\n \"AlaskaAirlines/auro-background\",\n \"AlaskaAirlines/auro-backtotop\",\n \"AlaskaAirlines/auro-button\",\n \"AlaskaAirlines/auro-badge\",\n \"AlaskaAirlines/auro-banner\",\n \"AlaskaAirlines/auro-card\",\n \"AlaskaAirlines/auro-carousel\",\n \"AlaskaAirlines/auro-datetime\",\n \"AlaskaAirlines/auro-dialog\",\n \"AlaskaAirlines/auro-drawer\",\n \"AlaskaAirlines/auro-flight\",\n \"AlaskaAirlines/auro-flightline\",\n \"AlaskaAirlines/auro-header\",\n \"AlaskaAirlines/auro-hyperlink\",\n \"AlaskaAirlines/auro-icon\",\n \"AlaskaAirlines/auro-loader\",\n \"AlaskaAirlines/auro-lockup\",\n \"AlaskaAirlines/auro-nav\",\n \"AlaskaAirlines/auro-pane\",\n \"AlaskaAirlines/auro-popover\",\n \"AlaskaAirlines/auro-sidenav\",\n \"AlaskaAirlines/auro-skeleton\",\n \"AlaskaAirlines/auro-slideshow\",\n \"AlaskaAirlines/auro-table\",\n \"AlaskaAirlines/auro-tabs\",\n \"AlaskaAirlines/auro-toast\",\n // UNCOMMENT BELOW WHEN MAIN/MASTER BRANCHES ARE READY\n // \"AlaskaAirlines/AuroDocsSite\"\n ],\n \"repo-exclude\": null,\n \"repo-include\": null,\n \"repo-search\": null,\n reviewers: null,\n \"skip-forks\": false,\n \"skip-pr\": false,\n \"skip-repo\": null,\n \"ssh-auth\": false,\n \"team-reviewers\": null,\n};\n\nfunction toYaml(config) {\n return Object.entries(config)\n .map(([key, value]) => {\n if (Array.isArray(value)) {\n return `${key}:\\n - ${value.join(\"\\n - \")}`;\n }\n if (typeof value === \"object\" && value !== null) {\n return `${key}:\\n${Object.entries(value)\n .map(([k, v]) => ` ${k}: ${v}`)\n .join(\"\\n\")}`;\n }\n return `${key}: ${value}`;\n })\n .join(\"\\n\");\n}\n\nexport async function createMultiGitterDependencyTreeConfig(outputPath) {\n const spinner = ora(\"Writing multi-gitter configuration...\").start();\n const configContent = toYaml(JsonConfig);\n const configPath = path.join(outputPath, \"multi-gitter_DEPENDENCY_TREE.yml\");\n\n try {\n await fs.writeFile(configPath, configContent, \"utf8\");\n spinner.succeed(`Multi-gitter configuration written to ${configPath}`);\n } catch (error) {\n spinner.fail(\"Error writing multi-gitter configuration:\");\n console.error(error);\n }\n}\n", "import fs from \"node:fs\";\nimport path from \"node:path\";\n\ninterface PackageJsonExcerpt {\n name: string;\n peerDependencies: Record<string, string>;\n devDependencies: Record<string, string>;\n dependencies: Record<string, string>;\n}\n\ninterface DependencyNode {\n dependsOn: string[];\n dependentPackages: string[];\n}\n\ntype DependencyTree = Record<string, DependencyNode>;\n\nexport function getBatchedUpdateOrder(\n dependencyTree: DependencyTree,\n): Array<string[]> {\n const inDegree: Record<string, number> = {};\n const batches: Array<string[]> = [];\n let currentBatch: string[] = [];\n const queue: string[] = [];\n\n // Initialize in-degree (count of dependencies for each package)\n for (const pkg in dependencyTree) {\n inDegree[pkg] = dependencyTree[pkg].dependsOn.length;\n }\n\n // Find packages with no dependencies (in-degree = 0)\n for (const pkg in inDegree) {\n if (inDegree[pkg] === 0) {\n queue.push(pkg);\n }\n }\n\n while (queue.length > 0) {\n currentBatch = [];\n // Process the queue (topological sorting)\n const queueLength = queue.length;\n for (let i = 0; i < queueLength; i++) {\n const current = queue.shift()!;\n currentBatch.push(current);\n\n // Reduce the in-degree of dependent packages\n for (const dependent of dependencyTree[current].dependentPackages) {\n inDegree[dependent]--;\n\n // If a package now has no dependencies, add it to the queue\n if (inDegree[dependent] === 0) {\n queue.push(dependent);\n }\n }\n }\n batches.push(currentBatch);\n }\n\n // If we couldn't process all packages, there is a circular dependency\n if (batches.flat().length !== Object.keys(dependencyTree).length) {\n throw new Error(\"Circular dependency detected!\");\n }\n\n return batches;\n}\n\nfunction getJsonFilesFromDirectory(directory: string): string[] {\n return fs.readdirSync(directory).filter((file) => file.endsWith(\".json\"));\n}\n\n/**\n * Formats the dependency tree for the specified target dependencies.\n * @param rawTargetDependencies {string[]} - List of target dependencies to format. Expects package names like \"button\", \"hyperlink\", etc. without the \"@aurodesignsystem/\" prefix.\n * @returns {Promise<DependencyTree>} - A promise that resolves to the formatted dependency tree.\n */\nexport async function formatDependencyTree(\n jsonFileDirectory: string,\n targetDependencies: string[] = [],\n): Promise<DependencyTree> {\n console.log(targetDependencies);\n let dependencyTree: DependencyTree = {};\n\n const files = getJsonFilesFromDirectory(jsonFileDirectory);\n\n for (const file of files) {\n // Skip the dependency tree file itself if it already exists\n if (file === \"dependencyTree.json\") {\n continue;\n }\n\n const contents = fs.readFileSync(`${jsonFileDirectory}/${file}`, \"utf-8\");\n const data: PackageJsonExcerpt = JSON.parse(contents);\n\n const packageName = data.name;\n const peerDependencies = Object.keys(data.peerDependencies);\n const devDependencies = Object.keys(data.devDependencies);\n const dependencies = Object.keys(data.dependencies);\n\n if (!dependencyTree[packageName]) {\n dependencyTree[packageName] = { dependsOn: [], dependentPackages: [] };\n }\n\n const allDependencies = [\n ...peerDependencies,\n ...devDependencies,\n ...dependencies,\n ];\n\n dependencyTree[packageName].dependsOn = [...new Set(allDependencies)];\n\n for (const dependency of allDependencies) {\n if (!dependencyTree[dependency]) {\n dependencyTree[dependency] = { dependsOn: [], dependentPackages: [] };\n }\n\n if (!dependencyTree[dependency].dependentPackages.includes(packageName)) {\n dependencyTree[dependency].dependentPackages.push(packageName);\n }\n }\n }\n\n // If there are no specified target dependencies, use all packages\n if (targetDependencies.length) {\n // If there ARE target dependencies, filter the dependency tree down to just relevant packages\n // A tree will start only include package that the target dependencies depend on, OR packages that depend on the target dependencies\n const relevantPackages = new Set<string>();\n\n // Include any packages that depend on a target dependency\n for (const [pkg, node] of Object.entries(dependencyTree)) {\n if (node.dependsOn.some((dep) => targetDependencies.includes(dep))) {\n relevantPackages.add(pkg);\n }\n }\n\n // Also include the target dependencies themselves\n for (const target of targetDependencies) {\n if (dependencyTree[target]) {\n relevantPackages.add(target);\n }\n }\n\n // Final filtered dependency tree\n const _filteredDependencyTree: DependencyTree = {};\n for (const pkg of relevantPackages) {\n _filteredDependencyTree[pkg] = {\n dependsOn: dependencyTree[pkg].dependsOn.filter((dep) =>\n relevantPackages.has(dep),\n ),\n dependentPackages: dependencyTree[pkg].dependentPackages.filter((dep) =>\n relevantPackages.has(dep),\n ),\n };\n }\n\n dependencyTree = _filteredDependencyTree;\n } else {\n console.log(\"No target dependencies provided - using all packages.\");\n }\n\n // Write the dependency tree to a file\n fs.writeFileSync(\n `${jsonFileDirectory}/dependencyTree.json`,\n JSON.stringify(dependencyTree, null, 2),\n );\n\n return dependencyTree;\n}\n", "import os from \"node:os\";\nimport path from \"node:path\";\nimport process from \"node:process\";\nimport { fileURLToPath } from \"node:url\";\n\nexport function getAuroHomeDir() {\n const homeDir = os.homedir() || process.env.HOME || process.env.USERPROFILE;\n\n if (!homeDir) {\n throw new Error(\"Unable to determine user home directory\");\n }\n\n return path.join(homeDir, \".auro\");\n}\n\nexport function withHomeDir(...args) {\n return path.join(getAuroHomeDir(), ...args);\n}\n\nexport function fromCliRoot(...relativePath) {\n const filename = fileURLToPath(import.meta.url);\n const dirname = path.dirname(filename);\n\n return path.resolve(dirname, ...relativePath);\n}\n", "import { program } from \"commander\";\nimport { cem, docs } from \"#scripts/docs/index.ts\";\n\nexport const docsCommand = program\n .command(\"docs\")\n .description(\"Generate API documentation\")\n .option(\"-c, --cem\", \"Generate Custom Elements Manifest (CEM) file\", false)\n .action(async (options) => {\n\n if (options.cem) {\n await cem();\n }\n\n await docs();\n });\n"],
5
+ "mappings": ";AAAA,OAAS,WAAAA,OAAe,YCAxB,OAAOC,OAAY,SACnB,OAAS,QAAAC,OAAY,kBAErB,IAAOC,GAAQ,IACND,GAAKD,GAAO,SAAS,UAAU,CAAC,ECHzC,OAAOG,OAAQ,UACf,OAAOC,OAAU,YACjB,OAAS,iBAAAC,OAAqB,WAM9B,SAASC,EAASC,EAAS,CACrB,QAAQ,IAAI,OACd,QAAQ,IAAI,WAAWA,CAAO,EAAE,CAEpC,CAMe,SAARC,GAAqC,CAC1C,GAAI,CAEF,IAAMC,EAAaJ,GAAc,YAAY,GAAG,EAC1CK,EAAYN,GAAK,QAAQK,CAAU,EACzCH,EAAS,wBAAwBI,CAAS,EAAE,EAG5C,IAAMC,EAAcP,GAAK,QAAQM,EAAW,KAAM,cAAc,EAGhE,OADAJ,EAAS,6BAA6BK,CAAW,EAAE,EAC/CR,GAAG,WAAWQ,CAAW,GAC3BL,EAAS,0BAA0BK,CAAW,EAAE,EAC5B,KAAK,MAAMR,GAAG,aAAaQ,EAAa,MAAM,CAAC,EAChD,UAIrBL,EACE,8FACF,EACO,QACT,OAASM,EAAO,CACd,eAAQ,MAAM,oCAAqCA,CAAK,EACjD,OACT,CACF,CC7CA,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAS,MCGT,SAASC,EAAiBC,EAAS,CACxC,OAAOA,EACJ,OAAO,gCAAiC,gCAAgC,EACxE,OAAO,cAAe,qBAAqB,EAC3C,OAAO,cAAe,gCAAiC,EAAK,EAC5D,OACC,yBACA,iDACF,EACC,OAAO,0BAA2B,sCAAsC,CAC7E,CAKO,SAASC,GAAkBD,EAAS,CACzC,OAAOA,EACJ,OAAO,cAAe,iBAAiB,EACvC,OAAO,sBAAuB,qBAAqB,EACnD,OAAO,aAAc,4CAA4C,CACtE,CCxBA,OAAOE,OAAY,wBACnB,OAAS,SAAAC,OAAa,SCDtB,OAAS,UAAAC,OAAc,UACvB,OAAS,QAAAC,OAAY,YACrB,OAAOC,MAAS,MAChB,OAAS,UAAAC,MAAc,SCHvB,OAAOC,OAAS,MCAhB,OAAS,SAAAC,OAAa,qBACtB,OAAOC,OAAS,MAEhB,IAAMC,EAAQ,CAACC,EAASC,IAAU,CAChC,IAAMC,EAAgB,GAAGF,CAAO,IAAIC,EAAQA,EAAM,KAAK,GAAG,EAAI,EAAE,GAG1DE,EAAUL,GAAI,EAGhBM,EAAeJ,EACfK,EAAYJ,GAAS,CAAC,EAE1B,GAAI,CAACA,GAAS,OAAOD,GAAY,SAAU,CACzC,IAAMM,EAAQN,EAAQ,MAAM,GAAG,EAC/BI,EAAeE,EAAM,CAAC,EACtBD,EAAYC,EAAM,MAAM,CAAC,CAC3B,CAGA,IAAMC,EACJL,EAAc,SAAS,SAAS,GAAKA,EAAc,SAAS,KAAK,EAO7DM,EAAQX,GAAMO,EAAcC,EAAW,CAC3C,MALYE,EACV,UACA,CAAC,UAAW,OAAQ,MAAM,EAI5B,MAAO,EACT,CAAC,EAGD,GAAI,CAACA,EAAa,CAEhB,IAAME,EAAgB,CAAC,EAEvBD,EAAM,QAAQ,GAAG,OAASE,GAAS,CAEjC,IAAMC,EAASD,EAAK,SAAS,EAG7BD,EAAc,KAAKE,CAAM,EAGzB,QAAQ,OAAO,MAAMA,CAAM,CAC7B,CAAC,EAEDH,EAAM,QAAQ,GAAG,OAASE,GAAS,CACjC,IAAMC,EAASD,EAAK,SAAS,EAC7BD,EAAc,KAAKE,CAAM,EACzB,QAAQ,OAAO,MAAMA,CAAM,CAC7B,CAAC,CACH,CAGA,OAAO,IAAI,QAAQ,CAACC,EAASC,IAAW,CACtCL,EAAM,GAAG,QAAUM,GAAS,CACtBA,IAAS,EAEPP,GACFJ,EAAQ,KAAK,mCAAmCW,CAAI,EAAE,EACtDF,EAAQ,IAERT,EAAQ,KAAK,GAAGD,CAAa,iBAAiBY,CAAI,GAAG,EACrDD,EAAO,IAAI,MAAM,iCAAiCC,CAAI,EAAE,CAAC,IAG3DX,EAAQ,QAAQ,GAAGD,CAAa,yBAAyB,EACzDU,EAAQ,EAEZ,CAAC,CACH,CAAC,CACH,ECzEA,OAAOG,MAAQ,UACf,OAAOC,OAAU,YA0BjB,IAAqBC,EAArB,KAA0B,CACxB,YAAe,SAAoB,CAAE,cAAe,QAAS,OAAQ,GAAI,QAAS,CAAC,CAAE,EAKrF,OAAO,SAASC,EAA2B,CAAC,EAAS,CACnD,GAAM,CACJ,OAAAC,EAAS,SACT,QAAAC,EAAU,SACV,aAAAC,EAAe,wBACjB,EAAIH,EAGJ,GAAIG,EACF,GAAI,CACF,IAAMC,EAAkBP,EAAG,aAAaM,EAAc,MAAM,EAC5D,KAAK,SAAW,KAAK,MAAMC,CAAe,CAC5C,OAASC,EAAO,CACd,cAAQ,MAAM,kCAAkCF,CAAY,IAAKE,CAAK,EAChEA,CACR,CAGF,IAAMC,EAAW,KAAK,YAAY,EAG5BC,EAAUN,EACXJ,EAAG,WAAWU,CAAO,GACxBV,EAAG,UAAUU,EAAS,CAAE,UAAW,EAAK,CAAC,EAI3C,IAAMC,EAAc,KAAK,kBAAkBF,CAAQ,EAC7CG,EAAcX,GAAK,KAAKS,EAASL,CAAO,EAC9CL,EAAG,cAAcY,EAAaD,CAAW,EACzC,QAAQ,IAAI,2CAA2CC,CAAW,EAAE,CACtE,CAKA,OAAO,aAA0C,CAC/C,OAAO,KAAK,SAAS,QAAQ,OAC3B,CAACC,EAAiCC,IAChCD,EAAI,OACFC,EAAO,cAAc,OAClBC,GACC,kBAAmBA,GAAOA,EAAI,gBAAkB,IAAQ,YAAaA,GACrE,KAAK,YAAYD,CAAM,CAC3B,GAAK,CAAC,CACR,EACF,CAAC,CACH,CACF,CAKA,OAAO,YAAYA,EAAyB,CAE1C,IAAMb,EAAOa,EAAO,KACpB,OAAKb,EAKEA,EAAK,WAAW,mBAAmB,GAAKA,EAAK,SAAS,KAAK,EAJzD,EAKX,CAKA,OAAO,kBAAkBQ,EAA8C,CACrE,MAAO,GAAGA,EACP,IAAKO,GAAsC,KAAK,cAAcA,EAAS,EAAK,CAAC,EAC7E,KAAK;AAAA;AAAA;AAAA;AAAA,CAAa,CAAC;AAAA,KAExB,CAKA,OAAO,cAAcA,EAAmCC,EAAe,GAAc,CACnF,MAAO,GAAGA,EAAe,KAAKD,EAAQ,OAAO;AAAA;AAAA,EAAS,KAAKA,EAAQ,OAAO;AAAA;AAAA,CAAM,GAAGA,EAAQ,YAAc,GAAGA,EAAQ,WAAW;AAAA;AAAA,EAAS,EAAE,GAAG,KAAK,gCAAgCA,CAAO,CAAC,GAAG,KAAK,YAChM,UACA,CAAC,OAAQ,aAAc,mBAAoB,aAAa,GACvDA,EAAQ,SAAW,CAAC,GAClB,OACEE,GACCA,EAAE,OAAS,WAAa,YAAaA,EAAIA,EAAE,UAAY,UAAY,KAASA,EAAE,KAAK,CAAC,IAAM,GAC9F,EACC,IAAKA,IAAoB,CACxB,GAAGA,EACH,WAAY,KAAK,iBAAiB,eAAgBA,EAAIA,EAAE,WAA4B,MAAS,CAC/F,EAAE,CACN,CAAC,GAAG,KAAK,YACP,SACA,CAAC,OAAQ,aAAa,EACtBF,EAAQ,MACV,CAAC,GAAG,KAAK,YACP,QACA,CAAC,CAAC,OAAQ,WAAW,EAAG,aAAa,EACrCA,EAAQ,KACV,CAAC,GAAG,KAAK,YACP,mBACA,CAAC,OAAQ,aAAa,EACtBA,EAAQ,QACV,CAAC,GAAG,KAAK,YACP,wBACA,CAAC,OAAQ,aAAa,EACtBA,EAAQ,aACV,CAAC,EACH,CAKA,OAAO,gCAAgCA,EAA2C,CAChF,IAAMG,EAAaH,EAAQ,SAAS,OAAQE,GAAmBA,EAAE,OAAS,OAAO,GAAK,CAAC,EACjFE,EAAaJ,EAAQ,YAAc,CAAC,EAGpCK,EAAgC,CAAC,EACjCC,EAAiB,IAAI,IAkC3B,GA/BAH,EAAW,QAASI,GAAsB,CACpCA,EAAK,aAAa,KAAK,GACzBF,EAAW,KAAK,CACd,KAAME,EAAK,KACX,WAAYA,EAAK,KACjB,YAAa,cAAeA,EAAOA,EAAK,UAAsB,KAAO,GACrE,KAAM,KAAK,IAAIA,EAAM,WAAW,GAAK,GACrC,SAAU,YAAaA,EAAOA,EAAK,QAAoB,KAAO,GAC9D,YAAaA,EAAK,aAAe,EACnC,CAAC,EAEHD,EAAe,IAAIC,EAAK,IAAI,EACxB,cAAeA,GAAQA,EAAK,WAC9BD,EAAe,IAAIC,EAAK,SAAmB,CAE/C,CAAC,EAGDH,EAAW,QAASI,GAAoB,CAClC,CAACF,EAAe,IAAIE,EAAK,IAAI,GAAKA,EAAK,aAAa,KAAK,GAC3DH,EAAW,KAAK,CACd,KAAMG,EAAK,KACX,WAAY,GACZ,WAAYA,EAAK,KACjB,KAAM,KAAK,IAAIA,EAAM,WAAW,GAAK,GACrC,QAASA,EAAK,SAAW,GACzB,YAAaA,EAAK,aAAe,EACnC,CAAC,CAEL,CAAC,EAEGH,EAAW,SAAW,EACxB,MAAO,GAGT,IAAMI,EAAU,0DACVC,EAAY,8BAEZC,EAAON,EACV,IAAKO,GACJ,CACEA,EAAK,WACLA,EAAK,WACLA,EAAK,KACLA,EAAK,QACLA,EAAK,WACP,EACG,IAAKC,GACJ,OAAOA,GAAS,EAAE,EACf,QAAQ,MAAO,KAAK,EACpB,QAAQ,MAAO,MAAM,CAC1B,EACC,KAAK,KAAK,CACf,EACC,KAAK;AAAA,CAAI,EAEZ,MAAO;AAAA;AAAA;AAAA,IAGPJ,CAAO;AAAA,IACPC,CAAS;AAAA,EACXC,CAAI;AAAA;AAAA,CAGJ,CAKA,OAAO,iBAAiBG,EAAkC,CACxD,MAAI,CAACA,GAAcA,EAAW,SAAW,EAChC,OAGFA,EACJ,IACEC,GACC,KAAKA,EAAM,IAAI,OAAO,KAAK,IAAIA,EAAO,WAAW,GAAK,KAAK,IAAIA,EAAM,YAAc,MAAMA,EAAM,WAAW,GAAK,EAAE,EACrH,EACC,KAAK,MAAM,CAChB,CAKA,OAAO,YACLC,EACAb,EACAc,EACQ,CACR,GAAIA,IAAS,QAAaA,EAAK,SAAW,EACxC,MAAO,GAIT,IAAMC,EAAeD,EAAK,OAAQL,GAAkC,CAClE,IAAMO,EAAcP,EAAK,YACzB,OAAO,OAAOO,GAAgB,UAAYA,EAAY,KAAK,CAC7D,CAAC,EAED,GAAID,EAAa,SAAW,EAC1B,MAAO,GAGT,IAAMT,EAAUN,EACb,IAAKiB,GAAyB,KAAK,YAAY,MAAM,QAAQA,CAAC,EAAIA,EAAE,CAAC,EAAIA,GAAG,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAC1F,KAAK,KAAK,EAEPV,EAAYP,EAAW,IAAI,IAAM,KAAK,EAAE,KAAK,KAAK,EAElDQ,EAAOO,EACV,IAAKN,GACJT,EACG,IAAKiB,GAAyB,CAC7B,IAAMP,EAAQ,KAAK,IAAID,EAAMQ,CAAC,EAE9B,OAAO,OAAOP,GAAS,EAAE,EACtB,QAAQ,MAAO,KAAK,EACpB,QAAQ,MAAO,MAAM,CAC1B,CAAC,EACA,KAAK,KAAK,CACf,EACC,KAAK;AAAA,CAAI,EAEZ,MAAO;AAAA,MACLG,CAAI;AAAA;AAAA,IAENP,CAAO;AAAA,IACPC,CAAS;AAAA,EACXC,CAAI;AAAA;AAAA,CAGJ,CAMA,OAAO,IAAIU,EAAUC,EAAsC,CACzD,IAAIC,EAAW,GACXtC,EAAeqC,EACf,MAAM,QAAQA,CAAS,IACzB,CAACrC,EAAMsC,CAAQ,EAAID,GAErB,IAAME,EAAQvC,EAAK,MAAM,GAAG,EAExBwC,EAAeJ,EACnB,KAAOI,GAAWD,EAAM,QACtBC,EAAUA,EAAQD,EAAM,MAAM,CAAW,EAE3C,OAAOC,GAAW,MAAQA,IAAY,GAAKF,EAAW,OAAOE,CAAO,CACtE,CAKA,OAAO,WAAWC,EAAmB,CACnC,OAAOA,EAAE,CAAC,EAAE,YAAY,EAAIA,EAAE,UAAU,CAAC,CAC3C,CACF,EFtTA,eAAsBC,GAAM,CAC1B,IAAMC,EAAaC,GAAI,wCAAwC,EAAE,MAAM,EAEvE,GAAI,CAEF,MAAMC,EACJ,qJACF,EACAF,EAAW,QAAQ,kDAAkD,CACvE,OAASG,EAAO,CAEd,IAAMC,EAAeD,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,EAC1EH,EAAW,KAAK,yCAA2CI,CAAY,CACzE,CACF,CAEA,eAAsBC,GAAO,CAC3B,IAAMC,EAAcL,GAAI,iCAAiC,EAAE,MAAM,EAEjE,GAAI,CACF,MAAMM,EAAK,SAAS,EACpBD,EAAY,QAAQ,2CAA2C,CACjE,OAASH,EAAO,CACd,IAAMC,EAAeD,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,EAC1E,MAAAG,EAAY,KAAK,yCAA2CF,CAAY,EAClED,CACR,CACF,CGzBA,eAAsBK,GAAoB,CAExC,MAAMC,EAAI,EAEV,MAAMC,EAAK,CAEb,CCZA,OAAS,UAAAC,OAAc,0DACvB,OACE,qBAAAC,GACA,yBAAAC,GACA,kBAAAC,OACK,4EAeA,IAAMC,GAA6B,CACxC,qBAAsB,GACtB,oBAAqB,SAIrB,oBAAqB,gBACvB,EAEA,SAASC,EAAYC,EAAU,CAE7B,MAAO,GADK,QAAQ,IAAI,CACX,IAAIA,CAAQ,EAC3B,CAMO,IAAMC,GAAeC,GAAW,CAErC,CACE,WAAY,YACZ,MAAO,CACL,UACEA,EAAO,iBACPP,GACEO,EAAO,oBACPA,EAAO,mBACT,EACF,SAAUH,EAAY,yBAAyB,EAC/C,UAAWG,EAAO,oBACpB,EACA,OAAQH,EAAY,YAAY,CAClC,EAEA,CACE,WAAY,WACZ,MAAOA,EAAY,yBAAyB,EAC5C,OAAQA,EAAY,gBAAgB,EACpC,cAAe,CACb,OAAQ,CACN,UAAWA,EAAY,OAAO,CAChC,CACF,CACF,EAEA,CACE,WAAY,SACZ,MAAOA,EAAY,uBAAuB,EAC1C,OAAQA,EAAY,cAAc,EAClC,cAAe,CAACF,GAAe,cAAc,CAC/C,CACF,EAOA,eAAsBM,GAAgBD,EAASJ,GAA4B,CAEzE,MAAMD,GAAe,aAAa,EAElC,QAAWO,KAAcH,GAAYC,CAAM,EACzC,GAAI,CAEF,MAAMN,GAAsBQ,CAAU,CACxC,OAASC,EAAK,CACZX,GAAO,MAAM,oBAAoBU,EAAW,UAAU,KAAKC,EAAI,OAAO,EAAE,CAC1E,CAEJ,CAEA,eAAsBC,IAAsB,CAC1C,MAAMH,GAAgB,CACpB,GAAGL,GACH,gBACE,kGACJ,CAAC,CACH,CLxFO,SAASS,IAAc,CAC5B,IAAMC,EAAWC,GAAK,QAAQ,EACxBC,EAAUC,EAAI,yBAAyB,EAAE,MAAM,EAErD,GAAI,CACF,OAAAC,GAAOJ,EAAU,CAAE,UAAW,GAAM,MAAO,EAAK,CAAC,EACjDE,EAAQ,QAAQ,+BAA+B,EACxC,EACT,OAASG,EAAO,CACd,OAAAH,EAAQ,KAAK,sCAAsCG,EAAM,OAAO,EAAE,EAClE,QAAQ,MAAMA,CAAK,EACZ,EACT,CACF,CAUA,eAAeC,EAAaC,EAAUC,EAAQC,EAAYC,EAAS,CACjE,IAAMR,EAAUC,EAAII,CAAQ,EAAE,MAAM,EAEpC,GAAI,CACF,IAAMI,EAAS,MAAMH,EAAO,EAC5B,OAAAN,EAAQ,QAAQO,CAAU,EACnBE,CACT,OAASN,EAAO,CACd,MAAAH,EAAQ,KAAKQ,CAAO,EACpB,QAAQ,MAAM,UAAUL,EAAM,OAAO,EAAE,EACjCA,CACR,CACF,CAOA,eAAsBO,GAAqBC,EAAQC,EAAc,CAC/D,OAAOR,EACL,+BACA,SAAY,CACV,IAAMS,EAAS,MAAMC,EAAOH,CAAM,EAClC,MAAME,EAAO,MAAMD,CAAY,EAC/B,MAAMC,EAAO,MAAM,CACrB,EACA,qBACA,gCACF,CACF,CAOA,eAAsBE,GAAoBC,EAAYC,EAAY,CAChE,OAAOb,EACL,YAAYY,EAAW,MAAQ,MAAM,QAAQC,EAAW,MAAQ,MAAM,MACtE,SAAY,CAEV,IAAMC,EAAa,MAAMJ,EAAOE,CAAU,EAC1C,MAAME,EAAW,MAAMF,EAAW,MAAM,EACxC,MAAME,EAAW,MAAM,EAGvB,IAAMC,EAAa,MAAML,EAAOG,CAAU,EAC1C,MAAME,EAAW,MAAMF,EAAW,MAAM,EACxC,MAAME,EAAW,MAAM,CACzB,EACA,kBAAkBH,EAAW,MAAQ,MAAM,QAAQC,EAAW,MAAQ,MAAM,UAC5E,8BACF,CACF,CAMA,eAAsBG,EAAaC,EAAS,CAC1C,GAAM,CAAE,SAAUC,EAAa,UAAWC,EAAS,SAAAC,CAAS,EAAIH,EAEhE,GAAIG,EAAU,CACZ,IAAMC,EAAcxB,EAAI,6BAA6B,EAAE,MAAM,EAE7D,WAAW,IAAM,CACfwB,EAAY,QAAQ,0BAA0B,CAChD,EAAG,CAAC,EACJ,MACF,CAEA,OAAOrB,EACL,0CACA,SAAY,CACV,MAAMsB,EAAkBJ,EAAaC,CAAO,EAC5C,MAAMI,GAAoB,CAC5B,EACA,4BACA,eACF,CACF,CMnHA,OAAS,YAAAC,GAAU,QAAAC,OAAY,YAC/B,OAAS,eAAAC,OAAmB,8BAC5B,OAAS,QAAAC,OAAY,OACrB,OAAS,OAAAC,OAAW,oBACpB,OAAS,WAAAC,OAAe,yBCJxB,OAAOC,OAAU,YACjB,OAAS,QAAAC,OAAY,OAOd,SAASC,GAAWC,EAAO,CAChC,MAAO,CACL,KAAM,cACN,YAAa,CACX,IAAMC,EAAQ,MAAM,QAAQD,CAAK,EAAIA,EAAQ,CAACA,CAAK,EAEnD,QAAWE,KAAQD,EACjB,GAAI,CACF,QAAWE,KAAYL,GAAK,KAAKD,GAAK,QAAQK,CAAI,CAAC,EACjD,KAAK,aAAaC,CAAQ,CAE9B,OAASC,EAAO,CACd,KAAK,MAAM,gCAAgCF,CAAI,MAAME,EAAM,OAAO,EAAE,CACtE,CAEJ,CACF,CACF,CDjBA,IAAMC,EAAW,CACf,kBAAmB,CAAC,cAAc,EAClC,YAAa,CAAC,qBAAsB,kBAAmB,cAAc,EACrE,cAAe,CAAC,qBAAsB,aAAa,CACrD,EAQO,SAASC,GAAiBC,EAAc,CAAC,EAAGC,EAAU,CAAC,EAAG,CAC/D,GAAM,CACJ,cAAAC,EAAgBJ,EAAS,cACzB,OAAAK,EAAS,CAAC,MAAO,cAAe,UAAU,CAC5C,EAAIF,EAGEG,EAAiB,CAAC,GAAGN,EAAS,YAAa,GAAGE,CAAW,EAE/D,MAAO,CACLK,GAAY,CACV,OAAAF,EACA,eAAgB,GAChB,kBAAmBL,EAAS,iBAC9B,CAAC,EACDQ,GAAQ,CACN,OAAQ,CAAE,KAAM,EAAK,EACrB,QAAS,CACP,UAAW,CAAC,GAAGF,EAAgBG,GAAK,QAAQ,IAAI,EAAG,MAAO,QAAQ,EAAGA,GAAK,QAAQ,IAAI,EAAG,KAAK,CAAC,CACjG,CACF,CAAC,EACDC,GAAWN,CAAa,CAC1B,CACF,CAOO,SAASO,EAAoBR,EAAU,CAAC,EAAG,CAChD,GAAM,CACJ,YAAAD,EAAc,CAAC,EACf,MAAAU,EAAQ,GACR,MAAAC,EAAQ,CAAC,iBAAkB,qBAAqB,EAChD,UAAAC,EAAY,SACZ,OAAAC,EAAS,KACX,EAAIZ,EAEJ,MAAO,CACL,KAAM,OACN,OAAQ,CACN,MAAAU,EACA,OAAQ,CACN,OAAAE,EACA,IAAKD,EACL,eAAgB,WAClB,EACA,SAAUE,GAAkB,EAC5B,QAASf,GAAiBC,CAAW,EACrC,MAAOe,GAAiBL,CAAK,CAC/B,CACF,CACF,CAOO,SAASM,EAAcf,EAAU,CAAC,EAAG,CAC1C,GAAM,CACJ,YAAAD,EAAc,CAAC,EACf,MAAAU,EAAQ,GACR,YAAAO,EAAc,cACd,cAAAC,EAAgB,CAAC,iBAAiB,EAClC,UAAAN,EAAY,QACd,EAAIX,EAEJ,MAAO,CACL,KAAM,OACN,OAAQ,CACN,MAAO,OAAO,YACZkB,GAAK,KAAKF,EAAa,CAAE,OAAQC,CAAc,CAAC,EAAE,IAAKE,GAE9C,CADMC,GAASD,EAAM,KAAK,EACnBA,CAAI,CACnB,CACH,EACA,OAAQ,CACN,OAAQ,MACR,IAAKR,EACL,eAAgB,gBAChB,eAAgB,eAClB,EACA,QAASb,GAAiBC,CAAW,EACrC,MAAOe,GAAiBL,CAAK,CAC/B,CACF,CACF,CAOO,SAASY,EAAarB,EAAU,CAAC,EAAG,CACzC,GAAM,CAAE,MAAAU,EAAQ,CAAC,iBAAiB,EAAG,UAAAC,EAAY,QAAS,EAAIX,EAE9D,MAAO,CACL,KAAM,MACN,OAAQ,CACN,MAAAU,EACA,OAAQ,CACN,OAAQ,MACR,IAAKC,EACL,eAAgB,aAClB,EACA,QAAS,CAACW,GAAI,CAAC,CACjB,CACF,CACF,CAOO,SAASR,GAAiBS,EAAc,CAE7C,GAAI,CAACA,EACH,MAAO,GAIT,IAAMvB,EAAU,OAAOuB,GAAiB,SAAWA,EAAe,CAAC,EAEnE,MAAO,CACL,YAAavB,EAAQ,aAAe,GACpC,WAAYA,EAAQ,YAAc,IAClC,SAAU,CACR,cAAe,GAEf,QAASA,EAAQ,SAAW,CAC1B,oBACA,0BACA,eACA,sBACA,iBACA,qBACA,YACF,EAEA,iBAAkBA,EAAQ,kBAAoB,CAC5C,mBAAoB,IACpB,aAAc,GAChB,CACF,EACA,QAASA,EAAQ,SAAW,CAC1B,kBACA,gBACA,gBACA,iBACA,qBACA,gBACF,EACA,QAASA,EAAQ,SAAW,CAAC,cAAe,qBAAqB,CACnE,CACF,CAOO,SAASa,GAAkBW,EAAa,CAAC,EAAG,CASjD,MAAO,CAAC,GARS,CAEf,oBACA,4BACA,yBACA,oBACF,EAEqB,GAAGA,CAAU,CACpC,CElMA,OAAS,kBAAAC,OAAsB,kBAC/B,OAAS,aAAAC,OAAiB,sBAC1B,OAAOC,OAAS,MAIhB,IAAMC,EAAiB,CACrB,MAAO,GACP,YAAa,GACb,SAAU,IACV,QAAS,SACT,WAAY,CAAC,WAAY,YAAa,mBAAoB,WAAW,CACvE,EAYA,eAAsBC,GAAuBC,EAAU,CAAC,EAAG,CACzD,GAAI,CAACA,EAAQ,MAAO,OAEpB,IAAMC,EAAgBJ,GAAI;AAAA,CAA2B,EAAE,MAAM,EAE7D,GAAI,CAEF,IAAMK,EAAe,CACnB,KAAM,OAAOF,EAAQ,IAAI,GAAK,OAC9B,KAAMA,EAAQ,KAAO,IAAM,OAC3B,MAAOA,EAAQ,OAASF,EAAe,MACvC,YAAaE,EAAQ,aAAeF,EAAe,YACnD,SAAUE,EAAQ,UAAYF,EAAe,SAC7C,QAASE,EAAQ,SAAWF,EAAe,QAG3C,WAAY,CACV,SAAsBK,EAASC,EAAM,CACnC,MAAI,CAACD,EAAQ,IAAI,SAAS,GAAG,GAAK,CAACA,EAAQ,IAAI,SAAS,GAAG,IACzDA,EAAQ,KAAO,SAEVC,EAAK,CACd,CACF,EAGA,QAAS,CACPR,GAAU,CACR,QAASI,EAAQ,YAAcF,EAAe,UAChD,CAAC,CACH,CACF,EAGMO,EAAS,MAAMV,GAAe,CAClC,OAAQO,EACR,YAAa,GACb,eAAgB,EAClB,CAAC,EAED,OAAAD,EAAc,KAAK,EACZI,CACT,OAASC,EAAO,CACd,MAAAL,EAAc,KAAK,yCAAyC,EAC5D,QAAQ,MAAM,qCAAsCK,CAAK,EACnD,IAAI,MAAM,uCAAuCA,EAAM,OAAO,EAAE,CACxE,CACF,CCvEA,OAAOC,OAAU,YACjB,OAAOC,MAAS,MAChB,OAAS,UAAAC,OAAc,SAMvB,IAAIC,EAAkB,GAGhBC,GAAS,CACb,IAAK,CAAE,OAAQ,GAAO,SAAU,CAAE,EAClC,QAAS,CAAE,OAAQ,GAAO,SAAU,CAAE,EACtC,KAAM,CAAE,OAAQ,GAAO,SAAU,CAAE,CACrC,EAGMC,GAAqB,IAGrBC,EAAmB,IAAI,IAGvBC,GAAe,CACnB,mBACA,wBACA,eACA,eACA,oBACF,EAGA,SAASC,GAAaC,EAAU,CAC9B,GAAI,CAACA,GAAY,OAAOA,GAAa,SAAU,MAAO,GAEtD,GAAI,CACF,IAAMC,EAAiBC,GAAK,UAAUF,CAAQ,EAG9C,OACEF,GAAa,KAAMK,GAAeF,EAAe,SAASE,CAAU,CAAC,GACrEF,EAAe,SAAS,QAAQ,GAChCA,EAAe,SAAS,SAAS,GACjCA,EAAe,SAAS,OAAO,CAEnC,OAASG,EAAO,CACd,eAAQ,MAAM,wBAAwB,OAAOJ,CAAQ,KAAMI,EAAM,OAAO,EACjE,EACT,CACF,CAQA,eAAeC,EAAaC,EAAUC,EAAQ,CAC5C,IAAMC,EAAOb,GAAOW,CAAQ,EAG5B,GAAIE,EAAK,QAAU,KAAK,IAAI,EAAIA,EAAK,SAAWZ,GAC9C,MAAO,GAGT,GAAI,CACF,OAAAY,EAAK,OAAS,GACdA,EAAK,SAAW,KAAK,IAAI,EAClB,MAAMD,EAAO,CACtB,OAASH,EAAO,CACd,eAAQ,MAAM,YAAYE,CAAQ,SAAUF,CAAK,EAC1C,EACT,QAAE,CACAI,EAAK,OAAS,EAChB,CACF,CAQA,eAAsBC,GACpBC,EACAC,EACAC,EACA,CAEA,IAAIC,EAAiB,GAEjBC,EAAoB,CAAE,IAAK,GAAO,QAAS,GAAO,KAAM,EAAM,EAC9DC,EAAsB,KACtBC,EAGEC,EAAeC,EAAI,0BAA0B,EAAE,MAAM,EAGrDC,EAAa,CAEjB,IAAK,SAAY,CACf,IAAMC,EAAaF,EAAI,8BAA8B,EAAE,MAAM,EAC7D,GAAI,CACF,IAAMG,EAAa,MAAMC,GAAOC,EAAa,EAAE,MAAM,EACrD,aAAMF,EAAW,MAAME,EAAa,EAAE,OAAO,MAAM,EACnD,MAAMF,EAAW,MAAM,EACvBD,EAAW,QAAQ,mBAAmB,EAC/B,EACT,OAAShB,EAAO,CACd,OAAAgB,EAAW,KAAK,8BAA8B,EAC9C,QAAQ,MAAM,qCAAsChB,CAAK,EAClD,EACT,CACF,EAGA,QAAS,SAAY,CACnB,GAAM,CAAE,SAAUoB,EAAa,UAAWC,EAAS,SAAAC,CAAS,EAAIf,EAChE,GAAIe,EAAU,CACZ,IAAMC,EAAcT,EAAI,gCAAgC,EAAE,MAAM,EAChE,kBAAW,IAAM,CACfS,EAAY,QAAQ,6BAA6B,CACnD,EAAG,CAAC,EACG,EACT,CAEA,IAAMC,EAAiBV,EACrB,yCACF,EAAE,MAAM,EACR,GAAI,CACF,aAAMW,EAAkBL,EAAaC,CAAO,EAC5CG,EAAe,QAAQ,6CAA6C,EAC7D,EACT,OAASxB,EAAO,CACd,OAAAwB,EAAe,KAAK,wCAAwC,EAC5D,QAAQ,MAAM,4BAA6BxB,CAAK,EACzC,EACT,CACF,EAGA,KAAM,SAAY,CAEhB,GAAIV,EACF,MAAO,GAIT,GAAIiB,EAAQ,SAAU,CACpB,IAAMgB,EAAcT,EAAI,6BAA6B,EAAE,MAAM,EAC7D,kBAAW,IAAM,CACfS,EAAY,QAAQ,0BAA0B,CAChD,EAAG,CAAC,EACG,EACT,CAEA,IAAMG,EAAcZ,EAAI,oBAAoB,EAAE,MAAM,EACpD,GAAI,CACF,aAAMa,EAAapB,CAAO,EAC1BmB,EAAY,QAAQ,0BAA0B,EACvC,EACT,OAAS1B,EAAO,CACd0B,EAAY,KAAK,iCAAiC,EAClD,QAAQ,MAAM,+BAAgC1B,CAAK,CACrD,CACF,CACF,EAGM4B,EAA4B,IAAM,CAEpCnB,GACAC,EAAkB,KAClBA,EAAkB,SAClBA,EAAkB,MAClB,OAAOF,GAA2B,aAElCC,EAAiB,GACjBD,EAAuB,EAE3B,EAGA,SAASqB,EAAwBC,EAAQ,IAAM,CACzCnB,GACF,aAAaA,CAAmB,EAGlCA,EAAsB,WAAW,SAAY,CAE3CD,EAAkB,IAAM,MAAMT,EAAa,MAAOc,EAAW,GAAG,EAEhE,WAAW,SAAY,CACrBL,EAAkB,QAAU,MAAMT,EAChC,UACAc,EAAW,OACb,EAEA,WAAW,SAAY,CACrBL,EAAkB,KAAO,MAAMT,EAAa,OAAQc,EAAW,IAAI,EACnEa,EAA0B,CAC5B,EAAG,GAAI,CACT,EAAG,GAAI,CACT,EAAGE,CAAK,CACV,CAGAxB,EAAQ,GAAG,QAAS,MAAOyB,GAAU,CACnC,OAAQA,EAAM,KAAM,CAClB,IAAK,QACHlB,EAAa,QAAQ,iCAAiC,EACtD,MAEF,IAAK,eAKH,GAHApB,EAAiB,MAAM,EAGnBsC,EAAM,MACR,GAAI,CAEF,IAAMC,EAAS,MAAM,QAAQD,EAAM,KAAK,EACpCA,EAAM,MACN,OAAOA,EAAM,OAAU,SACrB,CAACA,EAAM,KAAK,EACZ,OAAOA,EAAM,OAAU,UAAYA,EAAM,QAAU,KACjD,OAAO,OAAOA,EAAM,KAAK,EACzB,CAAC,EAET,QAAWE,KAASD,EAEd,OAAOC,GAAU,UAAY,CAACtC,GAAasC,CAAK,GAClDxC,EAAiB,IAAIK,GAAK,UAAUmC,CAAK,CAAC,CAGhD,OAASjC,EAAO,CACd,QAAQ,MAAM,gCAAiCA,CAAK,CACtD,CAGFY,EAAgBE,EAAI,oBAAoB,EAAE,MAAM,EAChDxB,EAAkB,GAClB,MAEF,IAAK,aACCsB,GACFA,EAAc,QACZ,UAAU,MAAM,QAAQmB,EAAM,KAAK,EAAI,MAAMA,EAAM,MAAM,KAAK,IAAI,CAAC,IAAM,EAAE,WAAWA,EAAM,QAAQ,eACtG,EAEFzC,EAAkB,GAGdG,EAAiB,KAAO,GAC1BoC,EAAwB,EAE1B,MAEF,IAAK,MAEH,MAEF,IAAK,QACHvC,EAAkB,GACdsB,EACFA,EAAc,KAAK,4BAA4BmB,EAAM,MAAM,OAAO,EAAE,EAEpEjB,EAAI,EAAE,KAAK,sBAAsBiB,EAAM,MAAM,OAAO,EAAE,EAExDtC,EAAiB,MAAM,EACvB,KACJ,CACF,CAAC,CACH,CAMO,SAASyC,GAAwB5B,EAAS,CAC/C,eAAQ,GAAG,SAAU,IAAM,CACzB,IAAM6B,EAAerB,EAAI,gBAAgB,EAAE,MAAM,EACjDR,EAAQ,MAAM,EACd6B,EAAa,QAAQ,qCAAgC,EACrD,QAAQ,KAAK,CAAC,CAChB,CAAC,EAEM7B,CACT,CV1QA,eAAe8B,GAAmBC,EAAS,CACzC,IAAMC,EAAmBC,EAAoBF,CAAO,EAC9CG,EAAaC,EAAcJ,CAAO,EAClCK,EAAYC,EAAa,EAG/BL,EAAiB,OAAO,QAAQ,KAAKM,GAAO,CAAC,EAG7C,MAAMC,EAAaR,CAAO,EAG1B,MAAMS,GAAoBR,EAAiB,OAAQE,EAAW,MAAM,EAGpE,MAAMO,GAAqBL,EAAU,OAAQA,EAAU,OAAO,MAAM,CACtE,CAOA,eAAeM,GAAeX,EAAS,CACrC,GAAM,CAAE,IAAKY,CAAU,EAAIZ,EACrBC,EAAmBC,EAAoB,CAAE,GAAGF,EAAS,MAAO,EAAK,CAAC,EAClEG,EAAaC,EAAc,CAAE,GAAGJ,EAAS,MAAO,EAAK,CAAC,EAGtDa,EAAUC,GAAM,CAACb,EAAiB,OAAQE,EAAW,MAAM,CAAC,EAGlE,OAAAY,GACEF,EACAb,EACAY,EAAY,SAAYI,GAAuBhB,CAAO,EAAI,MAC5D,EAGAiB,GAAwBJ,CAAO,EAExBA,CACT,CAUA,eAAsBK,EAAgBlB,EAAU,CAAC,EAAG,CAClD,GAAI,CACF,GAAM,CAAE,MAAAc,CAAM,EAAId,EAOlB,OAJAmB,GAAY,EAIRL,EACK,MAAMH,GAAeX,CAAO,EAG9B,MAAMD,GAAmBC,CAAO,CACzC,OAASoB,EAAO,CACd,MAAM,IAAI,MAAM,iBAAiBA,EAAM,OAAO,EAAE,CAClD,CACF,CFrFA,IAAIC,EAAaC,GACd,QAAQ,KAAK,EACb,YAAY,6CAA6C,EAE5DD,EAAaE,EAAiBF,CAAU,EACxCA,EAAaG,GAAkBH,CAAU,EAEzC,IAAOI,GAAQJ,EAAW,OAAO,MAAOK,GAAY,CAClD,GAAI,CACF,IAAMC,EAAQC,GAAI,iBAAiB,EAE/BF,EAAQ,OACVC,EAAM,KAAO,yBACbA,EAAM,QAAU,cAChBA,EAAM,MAAQ,SAEdA,EAAM,KACJD,EAAQ,OAAS,GACb,qCACA,qBAGRC,EAAM,MAAM,EAEPD,EAAQ,OACXC,EAAM,QAAQ,kBAAkB,EAGlC,MAAME,EAAgB,CAAE,GAAGH,EAAS,IAAK,GAAM,MAAOA,EAAQ,KAAM,CAAC,CACvE,OAASI,EAAO,CAEdF,GAAI,EAAE,KAAK,iBAAiBE,EAAM,OAAO,EAAE,EAC3C,QAAQ,MAAMA,CAAK,EACnB,QAAQ,KAAK,CAAC,CAChB,CACF,CAAC,Ea3CD,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAS,MAIhB,IAAIC,GAAeC,GAChB,QAAQ,OAAO,EACf,YAAY,wBAAwB,EAEvCD,GAAeE,EAAiBF,EAAY,EAE5C,IAAOG,GAAQH,GAAa,OAAO,MAAOI,GAAY,CACpD,GAAI,CACF,IAAMC,EAAQC,GAAI,iBAAiB,EAE/BF,EAAQ,OACVC,EAAM,KAAO,yBACbA,EAAM,QAAU,cAChBA,EAAM,MAAQ,SAEdA,EAAM,KACJD,EAAQ,OAAS,GACb,qCACA,qBAGRC,EAAM,MAAM,EAEZ,MAAME,EAAgBH,CAAO,EAExBA,EAAQ,OACXC,EAAM,QAAQ,kBAAkB,CAEpC,OAASG,EAAO,CAEdF,GAAI,EAAE,KAAK,iBAAiBE,EAAM,OAAO,EAAE,EAC3C,QAAQ,MAAMA,CAAK,EACnB,QAAQ,KAAK,CAAC,CAChB,CACF,CAAC,ECvCD,OAAS,QAAAC,OAAY,qBACrB,OAAOC,OAAU,YACjB,OAAOC,OAAa,eACpB,OAAS,iBAAAC,OAAqB,WAC9B,OAAOC,OAAU,YACjB,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAc,WAGrB,IAAOC,GAAQC,GACZ,QAAQ,SAAS,EACjB,YAAY,uDAAuD,EACnE,eACC,oBACA,kDACF,EACC,OACC,qBACA,kEACF,EACC,OAAO,MAAOC,GAAY,CACzB,IAAMC,EAAWC,GAAc,YAAY,GAAG,EACxCC,EAAUC,GAAK,QAAQH,CAAQ,EAC/BI,EAAaD,GAAK,QAAQD,EAAS,aAAcH,EAAQ,EAAE,EAEjE,GAAIA,EAAQ,YAAa,CAEvB,IAAMM,EAAcC,GAAK,UAAUC,EAAI,EAEvC,GAAI,CACF,MAAMF,EAAY,yBAAyB,CAC7C,MAAQ,CACN,QAAQ,MAAM,gCAAgC,EAC9CG,GAAQ,KAAK,CAAC,CAChB,EAEgB,MAAMC,GAAS,OAAO,CACpC,CACE,KAAM,UACN,KAAM,SACN,QACE,gEACF,QAAS,EACX,CACF,CAAC,GAEW,OACVC,EACE,oBAAoBN,CAAU,wBAAwBA,CAAU,8BAClE,EAEAM,EACE,oBAAoBN,CAAU,wBAAwBA,CAAU,oBAClE,CAEJ,MACEM,EAAM,GAAGN,CAAU,YAAY,CAEnC,CAAC,EC1DH,OAAOO,OAAa,eACpB,OAAS,WAAAC,OAAe,YAExB,OAAS,YAAAC,GAAU,aAAAC,OAAiB,mBACpC,OAAS,UAAAC,OAAc,0DCJvB,OAAOC,OAAQ,mBACf,OAAS,UAAAC,MAAc,0DACvB,OACE,yBAAAC,GACA,kBAAAC,OACK,4EAEP,IAAMC,GACJ,kEAIIC,GAAc,OACdC,GAAwB,OACxBC,GAAkB,UAYlBC,GAAiB,CACrB,eAAgB,CACd,kBACA,aACA,uBACA,uBACA,aACA,aACA,WACF,EACA,UAAW,CAAC,aAAc,kBAAmB,iBAAiB,EAC9D,MAAO,CACL,aACA,qBACA,kBACA,2BACA,cACA,eACA,WACF,CACF,EA8BA,SAASC,GAAsBC,EAAaC,EAAU,CAMpD,OAHED,EAAY,WAAW,GAAG,GAC1B,6BAA6B,KAAKA,EAAY,MAAM,CAAC,CAAC,EAG/C,GAAGN,EAAwB,cAAcM,CAAW,IAAIC,CAAQ,GAGrED,IAAgBL,GACX,GAAGD,EAAwB,eAAeM,CAAW,IAAIC,CAAQ,GAGnE,GAAGP,EAAwB,IAAIC,EAAW,IAAIM,CAAQ,EAC/D,CASA,SAASC,GAAsBD,EAAUD,EAAaG,EAAY,CAChE,IAAMC,EAAYL,GAAsBC,EAAaC,CAAQ,EAE7D,MAAO,CAEL,WAAYA,EAAS,MAAM,GAAG,EAAE,IAAI,EACpC,MAAO,CACL,UAAAG,EACA,SAAUD,EACV,UAAW,EACb,EACA,OAAQA,EACR,UAAW,EACb,CACF,CAQA,eAAeE,GAAgBC,EAAS,CACtC,GAAI,CACF,MAAMhB,GAAG,GAAGgB,EAAS,CAAE,UAAW,GAAM,MAAO,EAAK,CAAC,EACrDf,EAAO,IAAI,mCAAmCe,CAAO,EAAE,CACzD,OAASC,EAAO,CACd,MAAAhB,EAAO,MAAM,4BAA4Be,CAAO,KAAKC,EAAM,OAAO,EAAE,EAC9DA,CACR,CACF,CAOA,eAAsBC,GAAiBC,EAAS,CACzCA,IACHlB,EAAO,MAAM,kCAAkC,EAE/C,QAAQ,KAAK,CAAC,GAIhB,IAAMmB,EAAa,UAEnB,GAAI,CACF,MAAML,GAAgBK,CAAU,EAChCnB,EAAO,IAAI,wCAAwC,CACrD,OAASgB,EAAO,CACdhB,EAAO,MAAM,qCAAqCgB,EAAM,OAAO,EAAE,EAEjE,QAAQ,KAAK,CAAC,CAChB,CAGA,MAAMd,GAAe,aAAa,EAElC,IAAMkB,EAAc,CAAC,EACfC,EAAe,CAAC,EAEtB,QAAWC,KAAO,OAAO,KAAKf,EAAc,EAC1C,QAAWgB,KAAQhB,GAAee,CAAG,EAAG,CACtC,IAAME,EAAY,GAAGF,IAAQ,QAAU,GAAK,GAAGA,CAAG,GAAG,GAAGC,CAAI,GACtDX,EAAa,GAAGM,CAAO,YAAYM,CAAS,GAE5CC,EAAad,GACjB,aAAaL,EAAe,YAAYkB,CAAS,GACjDnB,GACAO,CACF,EACAQ,EAAY,KAAKK,CAAU,CAC7B,CAoBF,GAhBA,MAAM,QAAQ,IACZL,EAAY,IAAI,MAAOM,GAAW,CAChC,GAAI,EACe,MAAM,MAAMA,EAAO,MAAM,UAAW,CACnD,OAAQ,MACV,CAAC,GACa,IACZL,EAAa,KAAKK,EAAO,MAAM,SAAS,CAE5C,MAAQ,CACNL,EAAa,KAAKK,EAAO,MAAM,SAAS,CAC1C,CACF,CAAC,CACH,EAGIL,EAAa,OAAS,EAAG,CAC3B,IAAMM,EAAeN,EAClB,IAAKE,GAAS,mBAAmBA,CAAI,EAAE,EACvC,KAAK;AAAA,CAAI,EACZvB,EAAO,MACL;AAAA,EAAmF2B,CAAY,EACjG,EAEA,QAAQ,KAAK,CAAC,CAChB,CAGA,GAAI,CACF,MAAM,QAAQ,IACZP,EAAY,IAAKM,GAAWzB,GAAsByB,CAAM,CAAC,CAC3D,EACA1B,EAAO,IAAI,sBAAsB,CACnC,OAASgB,EAAO,CACdhB,EAAO,MAAM,2BAA2BgB,EAAM,OAAO,EAAE,EAEvD,QAAQ,KAAK,CAAC,CAChB,CACF,CDhNA,IAAOY,GAAQC,GACZ,QAAQ,MAAM,EACd,YACC,mEACF,EACC,OAAO,SAAY,CAClBC,GAAO,KAAK,iDAAiD,EAE7DA,GAAO,KACL,uFACF,EAEA,IAAMC,EAAMC,GAAQ,IAAI,EAExB,MAAMC,GAAiBF,CAAG,EAS1B,IAAMG,EAAiB,GAAGH,CAAG,sBAIvBI,GAHa,MAAMC,GAASF,EAAgB,CAAE,SAAU,OAAQ,CAAC,GAIpE,QAAQ,SAAU;AAAA,CAAI,EACtB,QAAQ,SAAU;AAAA,CAAI,EACzB,MAAMG,GAAUH,EAAgBC,EAAiB,CAAE,SAAU,OAAQ,CAAC,GAElEA,EAAgB,SAAS,IAAI,GAAKA,EAAgB,SAAS;AAAA;AAAA,CAAM,IACnEL,GAAO,MAAM,iDAAiD,CAElE,CAAC,EExCH,OAAOQ,MAAQ,UACf,OAAOC,MAAU,YACjB,OAAS,UAAAC,OAAc,0DACvB,OAAS,WAAAC,OAAe,YACxB,OAAS,QAAAC,OAAY,OCJrB,IAAOC,GAAQ,CAACC,EAAMC,IAAe,CACnC,IAAMC,GAAcF,EAAK,MAAM,iCAAiC,GAC9DA,EAAK,MAAM,6BAA6B,GAAG,CAAC,EACxCG,EAAYH,EAAK,MAAM,2BAA2B,IAAI,CAAC,EACvDI,EAAYJ,EAAK,MAAM,iCAAiC,IAAI,CAAC,GAAK,GAExE,MAAI,CAACE,GAAc,CAACC,EACXH,EAEF;AAAA,WACEG,CAAS,YAAYF,CAAU;AAAA;AAAA,KAErCG,CAAS;AAAA,QACND,CAAS,eAAeA,CAAS;AAAA;AAAA,0BAEfD,CAAU;AAAA,0BACVA,CAAU,KAAKC,CAAS;AAAA;AAAA,CAGlD,EDVA,IAAME,EAAUC,EAAK,QAAQ,QAAQ,IAAI,EAAG,eAAe,EAE3D,eAAeC,GAASC,EAAS,CAC/B,GAAI,CAEF,OADmB,MAAM,QAAQ,IAAIA,EAAQ,IAAKC,GAAWC,GAAKD,CAAM,CAAC,CAAC,GACxD,KAAK,CACzB,OAASE,EAAK,CACZ,cAAQ,MAAM,kCAAmCA,CAAG,EAC9CA,CACR,CACF,CAEA,eAAeC,GAAkBC,EAAW,CACrCC,EAAG,WAAWT,CAAO,GACxB,MAAMS,EAAG,SAAS,MAAMT,EAAS,CAAE,UAAW,EAAK,CAAC,EAGtD,QAAWU,KAAYF,EAAW,CAChC,IAAMG,EAAeV,EAAK,QAAQ,QAAQ,IAAI,EAAGS,CAAQ,EACnDE,EAAc,MAAMH,EAAG,SAAS,SAASE,EAAc,OAAO,EAC9DE,EAAUZ,EAAK,QAAQD,EAAS,GAAGC,EAAK,SAASS,CAAQ,CAAC,EAAE,EAC5DI,EAAUC,GACdH,EACAX,EAAK,SAASD,EAASU,CAAQ,CACjC,EACA,MAAMD,EAAG,SAAS,UAAUI,EAASC,CAAO,CAC9C,CACF,CAEA,eAAeE,IAAO,CAEpB,IAAMR,EAAY,MAAMN,GAAS,CAAC,iBAAiB,CAAC,EACpD,MAAMK,GAAkBC,CAAS,CACnC,CAEA,IAAOS,GAAQC,GACZ,QAAQ,WAAW,EACnB,YAAY,qDAAqD,EACjE,OAAO,IAAM,CACZF,GAAK,EACF,KAAK,IAAM,CACVG,GAAO,QAAQ,mCAAmC,CACpD,CAAC,EACA,MAAOC,GAAU,CAChBD,GAAO,MAAM,qBAAqBC,EAAM,OAAO,EAAE,CACnD,CAAC,CACL,CAAC,EEzDH,OAAS,WAAAC,OAAe,YCAxB,OAAOC,OAAW,QAClB,OAAOC,OAAS,MCDhB,OAAS,cAAAC,GAAY,YAAAC,OAAgB,mBACrC,OAAS,UAAAC,MAAc,0DACvB,OAAS,aAAAC,OAAiB,aAI1B,IAAIC,EACJ,GAAI,CACFA,EAAMD,GAAU,CACd,QAAS,QAAQ,IAAI,EACrB,OAAQ,MACR,uBAAwB,CAC1B,CAAC,CACH,OAASE,EAAO,CACdH,EAAO,MAAM,6BAA6BG,CAAK,EAAE,EAEjDD,EAAM,CAAC,CACT,CAEO,IAAME,EAAN,MAAMC,CAAI,CACf,aAAa,eAAeC,EAAiB,CAC3C,GAAIA,IAAY,GACd,MAAO,GAET,GAAI,CAEF,OADoB,MAAMP,GAAS,aAAc,OAAO,GACrC,SAASO,CAAO,CACrC,OAASC,EAAK,CACZ,OAAAP,EAAO,MAAM,uBAAuBO,CAAG,EAAE,EAClC,EACT,CACF,CAEA,aAAa,mBAUX,CACA,GAAI,CAWF,IAAMC,EAAgB,MAAMN,EAAI,YAAY,EAC5CF,EAAO,KAAK,mBAAmBQ,EAAc,OAAO,EAAE,EAGtD,IAAIC,EAAe,OACfC,EAAc,GAKlB,GAFuB,CAAC,CAAC,QAAQ,IAAI,eAEjB,CAClBV,EAAO,KAAK,uCAAuC,EAEnDS,EAAe,QAAQ,IAAI,iBAAmB,OAE9C,GAAI,CAEF,MAAMP,EAAI,MAAM,SAAUO,CAAY,EACtCT,EAAO,KAAK,iCAAiCS,CAAY,EAAE,EAU3DC,EAAc,IAPI,MAAMR,EAAI,IAAI,CAC9B,aACA,UAAUO,CAAY,GACtB,MACF,CAAC,GAG0B,KAAK,CAAC,SACjCT,EAAO,KAAK,uBAAuBU,CAAW,EAAE,CAClD,OAASP,EAAO,CACdH,EAAO,KAAK,wCAAwCG,CAAK,EAAE,EAE3DO,EAAc,UAAUD,CAAY,SACpCT,EAAO,KAAK,iCAAiCU,CAAW,EAAE,CAC5D,CACF,KAAO,CAELV,EAAO,KAAK,8BAA8B,EAE1C,GAAI,CAEF,GAAI,CACF,MAAME,EAAI,IAAI,CAAC,YAAa,WAAY,UAAUO,CAAY,EAAE,CAAC,CACnE,MAAQ,CACNT,EAAO,KAAK,YAAYS,CAAY,cAAc,EAClD,MAAMP,EAAI,MAAM,SAAUO,CAAY,CACxC,CASAC,EAAc,IANI,MAAMR,EAAI,IAAI,CAC9B,aACA,UAAUO,CAAY,GACtBD,EAAc,OAChB,CAAC,GAE0B,KAAK,CAAC,SACjCR,EAAO,KAAK,sCAAsCU,CAAW,EAAE,CACjE,OAASP,EAAO,CACdH,EAAO,KAAK,yCAAyCG,CAAK,EAAE,EAG5DH,EAAO,KAAK,0CAA0C,EACtDU,EAAc,gBACdV,EAAO,KAAK,gCAAgCU,CAAW,EAAE,CAC3D,CACF,CAGA,OAAO,MAAML,EAAI,oBAAoBK,CAAW,CAClD,OAASH,EAAK,CACZ,OAAAP,EAAO,MAAM,kCAAkCO,CAAG,EAAE,EAC7C,CAAC,CACV,CACF,CAGA,aAAa,oBAAoBG,EAU/B,CAqBA,IAAMC,GARmB,MAAMT,EAAI,IAAI,CACrC,MACA,iEACA,eACAQ,CACF,CAAC,GAIE,MAAM;AAAA,CAAgB,EACtB,OAAQE,GAAkBA,EAAM,KAAK,IAAM,EAAE,EAE1CC,EAA2B,CAAC,EAElC,QAAWD,KAASD,EAAc,CAChC,IAAMG,EAAQF,EAAM,MAAM;AAAA,CAAI,EAC9B,GAAIE,EAAM,QAAU,EAAG,CACrB,IAAMC,EAAOD,EAAM,CAAC,EACdE,EAAOF,EAAM,CAAC,EACdG,EAAcH,EAAM,CAAC,EACrBI,EAAUJ,EAAM,CAAC,EAIjBK,EAAYL,EACf,MAAM,CAAC,EACP,OAAQM,GAAiBA,IAAS,YAAY,EAC3CC,EAAOF,EAAU,OAAS,EAAIA,EAAU,KAAK,EAAE,EAAI,GAGnDG,EAAYP,EAAK,UAAU,EAAG,CAAC,EAG/BQ,EAAYL,EAAQ,MACxB,oEACF,EACIM,EAAOD,EAAYA,EAAU,CAAC,EAAI,UAGlCF,EAAK,SAAS,iBAAiB,IACjCG,EAAO,YAGTX,EAAQ,KAAK,CACX,KAAAW,EACA,KAAMF,EACN,KAAAN,EACA,QAAAE,EACA,KAAAG,EACA,QAAS,GAAGH,CAAO,GAAGG,EAAO;AAAA;AAAA,EAAOA,CAAI,GAAK,EAAE,GAC/C,YAAAJ,CACF,CAAC,CACH,CACF,CAEA,OAAOJ,CACT,CAGA,aAAa,eAAeP,EAAiBmB,EAAM,GAAM,CACvD,MAAMpB,EAAI,eAAeC,CAAO,EAAE,KAAK,MAAOoB,GAAW,CACvD,GAAIA,EACF1B,EAAO,KAAK,GAAGM,CAAO,iBAAiB,MAEvC,IAAI,CACF,MAAMR,GAAW,aAAc;AAAA,EAAKQ,CAAO,EAAE,EACzCmB,GACFzB,EAAO,QAAQ,GAAGM,CAAO,sBAAsB,CAEnD,OAASC,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CAEJ,CAAC,CACH,CAGA,aAAa,mBAAmBoB,EAAiB,CAC/C,GAAI,CACF,MAAMzB,EAAI,YAAYyB,CAAK,EAC3B3B,EAAO,QAAQ,GAAG2B,EAAM,KAAK,IAAI,CAAC,6BAA6B,CACjE,OAASpB,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CACF,CAEA,aAAa,aAAaqB,EAAoB,CAC5C,GAAI,CACF,MAAM1B,EAAI,oBAAoB0B,CAAU,EACxC5B,EAAO,QAAQ,2BAA2B4B,CAAU,SAAS,CAC/D,OAASrB,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CACF,CAEA,aAAa,kBAAkBsB,EAAiB,CAC9C,GAAI,CACF,MAAM3B,EAAI,IAAI,GAAG,EACjB,MAAMA,EAAI,OAAO2B,CAAO,EACxB7B,EAAO,QAAQ,2BAA2B6B,CAAO,EAAE,CACrD,OAAStB,EAAK,CACZP,EAAO,MAAMO,CAAG,CAClB,CACF,CACF,ECnQA,OAAOuB,MAAW,QAGX,IAAMC,GAAqB,GACrBC,GAAkB,IA8BxB,SAASC,EAAeC,EAAsB,CACnD,OAAQA,EAAM,CACZ,IAAK,WACH,OAAOJ,EAAM,KAAK,IAAII,CAAI,EAC5B,IAAK,OACH,OAAOJ,EAAM,KAAK,MAAMI,CAAI,EAC9B,IAAK,MACH,OAAOJ,EAAM,KAAK,MAAMI,CAAI,EAC9B,IAAK,OACH,OAAOJ,EAAM,KAAK,MAAMI,CAAI,EAC9B,IAAK,OACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,QACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,WACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,OACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,QACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,KACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,IAAK,QACH,OAAOJ,EAAM,KAAK,KAAKI,CAAI,EAC7B,QACE,OAAOJ,EAAM,KAAK,MAAMI,CAAI,CAChC,CACF,CAKO,SAASC,GAAWC,EAAaC,EAA2B,CACjE,GAAI,CAACD,EACH,MAAO,GAIT,GAAIA,EAAI,QAAUC,EAChB,OAAOD,EAIT,IAAME,EAAQF,EAAI,MAAM,GAAG,EACvBG,EAAS,GACTC,EAAc,GAGlB,QAAWC,KAAQH,GAEZE,EAAcC,GAAM,OAASJ,GAAaG,EAAY,OAAS,IAClED,GAAU,GAAGC,EAAY,KAAK,CAAC;AAAA,EAC/BA,EAAc,IAEhBA,EAAc,GAAGA,CAAW,GAAGC,CAAI,IAIrC,OAAID,EAAY,OAAS,IACvBD,GAAUC,EAAY,KAAK,GAGtBD,CACT,CAKO,SAASG,GAAiBC,EAAgC,CAC/D,QAAWC,KAAUD,EAAY,CAC/B,QAAQ,IAAI,SAAI,OAAO,EAAE,CAAC,EAG1B,IAAME,EAAUV,GAAWS,EAAO,QAASb,EAAkB,EACvDe,EAAOX,GAAWS,EAAO,KAAMZ,EAAe,EAGpD,QAAQ,IAAIF,EAAM,KAAK,GAAGG,EAAeW,EAAO,IAAI,CAAC,EAAE,CAAC,EACxD,QAAQ,IACNd,EAAM,IAAI,GAAGc,EAAO,IAAI,MAAMA,EAAO,IAAI,MAAMA,EAAO,WAAW,EAAE,CACrE,EACA,QAAQ,IAAId,EAAM,KAAK,GAAGA,EAAM,MAAMe,CAAO,CAAC,EAAE,CAAC,EAG7CD,EAAO,MACT,QAAQ,IAAId,EAAM,IAAIgB,CAAI,CAAC,CAE/B,CACA,QAAQ,IAAI,SAAI,OAAO,EAAE,CAAC,EAC1B,QAAQ,IAAI;AAAA,CAAI,CAClB,CC5HA,OAAOC,MAAY,kBAMnB,eAAsBC,IAAuC,CAC3D,GAAI,CAEF,IAAMC,EAAQ,QAAQ,IAAI,aAE1B,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,8CAA8C,EAIhE,GAAI,CAAC,QAAQ,IAAI,mBAAqB,CAAC,QAAQ,IAAI,kBACjD,MAAM,IAAI,MACR,gEACF,EAGF,IAAMC,EAAUH,EAAO,WAAWE,CAAK,EACjC,CAAE,QAAAE,CAAQ,EAAIJ,EAGpB,GAAI,CAACI,EAAQ,QAAQ,aACnB,MAAM,IAAI,MAAM,6CAA6C,EAG/D,GAAM,CAACC,EAAOC,CAAI,EAAI,QAAQ,IAAI,kBAAkB,MAAM,GAAG,EACvDC,EAAWH,EAAQ,QAAQ,aAAa,OAGxC,CAAE,KAAMI,CAAe,EAC3B,MAAML,EAAQ,KAAK,OAAO,kBAAkB,CAC1C,MAAAE,EACA,KAAAC,EACA,aAAcC,CAChB,CAAC,EAGH,OAAOC,EAAe,IAAKC,GAAUA,EAAM,IAAI,CACjD,OAASC,EAAO,CACd,MAAIA,aAAiB,MACb,IAAI,MAAM,kCAAkCA,EAAM,OAAO,EAAE,EAE7DA,CACR,CACF,CAOA,eAAsBC,GAAeF,EAA8B,CACjE,GAAI,CAEF,IAAMP,EAAQ,QAAQ,IAAI,aAE1B,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,8CAA8C,EAIhE,GAAI,CAAC,QAAQ,IAAI,mBAAqB,CAAC,QAAQ,IAAI,kBACjD,MAAM,IAAI,MACR,gEACF,EAGF,IAAMC,EAAUH,EAAO,WAAWE,CAAK,EACjC,CAAE,QAAAE,CAAQ,EAAIJ,EAGpB,GAAI,CAACI,EAAQ,QAAQ,aACnB,MAAM,IAAI,MAAM,6CAA6C,EAG/D,GAAM,CAACC,EAAOC,CAAI,EAAI,QAAQ,IAAI,kBAAkB,MAAM,GAAG,EACvDC,EAAWH,EAAQ,QAAQ,aAAa,OAGxCQ,EAAgB,oBAAoBH,CAAK,GAGzCD,EAAiB,MAAMP,GAAkB,EAG/C,GAAIO,EAAe,SAASI,CAAa,EACvC,OAIF,IAAMC,EAAyBL,EAAe,OAC3CM,GACCA,EAAc,WAAW,kBAAkB,GAC3CA,IAAkBF,CACtB,EAGA,QAAWE,KAAiBD,EAC1B,MAAMV,EAAQ,KAAK,OAAO,YAAY,CACpC,MAAAE,EACA,KAAAC,EACA,aAAcC,EACd,KAAMO,CACR,CAAC,EAIH,MAAMX,EAAQ,KAAK,OAAO,UAAU,CAClC,MAAAE,EACA,KAAAC,EACA,aAAcC,EACd,OAAQ,CAACK,CAAa,CACxB,CAAC,EAED,MACF,OAASF,EAAO,CACd,MAAIA,aAAiB,MACb,IAAI,MAAM,0BAA0BA,EAAM,OAAO,EAAE,EAErDA,CACR,CACF,CH/GA,eAAsBK,GACpBC,EAAQ,GACRC,EAAW,GACI,CACf,IAAMC,EAAUC,GAAI;AAAA,CAAuB,EAAE,MAAM,EAEnD,GAAI,CACF,IAAMC,EAAa,MAAMC,EAAI,kBAAkB,EAS/C,GANIL,GACFM,GAAiBF,CAAU,EAG7BF,EAAQ,QAAQ,2BAA2BE,EAAW,MAAM,EAAE,EAE1DA,EAAW,SAAW,EAAG,CAC3B,IAAMG,EAAcH,EAAW,IAAKI,GAAWA,EAAO,IAAI,EAEpDC,EADc,MAAM,KAAK,IAAI,IAAIF,CAAW,CAAC,EAEhD,IAAKG,GAASC,EAAeD,CAAI,CAAC,EAClC,KAAK,IAAI,EACZR,EAAQ,QAAQ,uBAAuBO,CAAc,EAAE,CACzD,MACEP,EAAQ,KACN;AAAA;AAAA,8DAGF,EAGED,GACF,MAAMW,GAAaR,EAAYF,CAAO,CAE1C,OAASW,EAAO,CACdX,EAAQ,KAAK,+BAA+B,EAC5C,QAAQ,MAAMW,CAAK,CACrB,CACF,CAOA,eAAeD,GACbR,EACAF,EACe,CACf,IAAMY,EAAmB,CACvB,WACA,OACA,MACA,OACA,OACA,QACA,WACA,OACA,QACA,KACA,OACF,EAEMC,EAAmBX,EACtB,IAAKI,GAAWA,EAAO,IAAI,EAC3B,OAAQE,GAASI,EAAiB,SAASJ,CAAI,CAAC,EAE/CM,EAAgB,KAChBC,EAAuB,OAAO,kBAElC,QAAWP,KAAQK,EAAkB,CACnC,IAAMG,EAAgBJ,EAAiB,QAAQJ,CAAI,EAC/CQ,EAAgBD,IAClBA,EAAuBC,EACvBF,EAAgBN,EAEpB,CAEA,GAAIM,EAAe,CACjB,IAAMG,EAAehB,GACnB,6CACF,EAAE,MAAM,EACR,GAAI,CAGF,IAFuB,MAAMiB,GAAkB,GAE5B,SAAS,oBAAoBJ,CAAa,EAAE,EAAG,CAChEG,EAAa,KACX,2BAA2BR,EAAeK,CAAa,CAAC,uCAC1D,EACA,MACF,CAEAG,EAAa,KAAO,oCACpB,MAAME,GAAeL,CAAa,EAClCG,EAAa,QACX,2BAA2BR,EAAeK,CAAa,CAAC,gCAC1D,CACF,OAASH,EAAgB,CACvB,IAAMS,EACJT,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,EACvDM,EAAa,KAAKG,CAAY,CAChC,CACF,MACEpB,EAAQ,KACNqB,GAAM,OAAO,kDAAkD,CACjE,CAEJ,CDvHA,IAAOC,GAAQC,GACZ,QAAQ,eAAe,EACvB,MAAM,IAAI,EACV,OACC,kBACA,gEACF,EACC,OAAO,cAAe,mDAAmD,EACzE,YACC,8GACF,EACC,OAAO,MAAOC,GAAW,CACxB,MAAMC,GAAeD,EAAO,MAAOA,EAAO,QAAQ,CACpD,CAAC,EKhBH,OAAOE,OAAQ,UACf,OAAS,OAAAC,OAAW,aACpB,OAAOC,MAAW,QAClB,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAS,MAGhB,IAAOC,GAAQF,GACZ,QAAQ,YAAY,EACpB,OACC,sCACA,uCACA,uBACF,EACC,OACC,2BACA,0CACA,GACF,EACC,YACC,iIACF,EACC,OAAO,MAAOG,GAAW,CACxB,MAAMC,GAAkBD,CAAM,CAChC,CAAC,EAOGC,GAAoB,MAAOD,GAA0C,CACzE,GAAM,CAAE,UAAAE,EAAW,SAAAC,CAAS,EAAIH,EAE1BI,EAAiBN,GAAI,uBAAuB,EAAE,MAAM,EAE1D,GAAI,CACF,IAAMO,EAAkB,eAGlBC,EAAc,KAAK,MAAMZ,GAAG,aAAaW,EAAiB,MAAM,CAAC,EAGvED,EAAe,KAAO,mDAEtB,IAAMG,EAAiB,WAAWJ,CAAQ,GACpCK,EAAmBF,EAAY,KAAK,MAAM,GAAG,EAAE,CAAC,EAChDG,EAAc,GAAGP,CAAS,IAAIM,CAAgB,GAC9CE,EAAmB,MAAMC,GAC7BJ,EACAE,EACAL,CACF,EACMQ,EAAiB,GAAGL,CAAc,IAAIG,CAAgB,GAE5DJ,EAAY,KAAOG,EACnBH,EAAY,QAAUM,EAEtBR,EAAe,KAAO,kCAGtBV,GAAG,cACDW,EACA,GAAG,KAAK,UAAUC,EAAa,KAAM,CAAC,CAAC;AAAA,EACvC,MACF,EAEAF,EAAe,QACb,+BAA+BR,EAAM,MAAMgB,CAAc,CAAC,QAAQhB,EAAM,MAAMa,CAAW,CAAC,EAC5F,EAGA,QAAQ,KAAK,CAAC,CAChB,OAASI,EAAgB,CACvBT,EAAe,KAAK,kCAAkCS,CAAK,EAAE,EAC7D,QAAQ,KAAK,CAAC,CAChB,CACF,EAGMF,GAAsB,CAC1BJ,EACAE,EACAK,IAEO,IAAI,QAASC,GAAY,CAC9B,GAAI,CA6CF,IAASC,EAAT,SAAwBC,EAAqC,CAC3D,GAAIA,EAAI,aAAe,IAAK,CAE1BH,EAAQ,KACN,mCAAmClB,EAAM,IAAIqB,EAAI,UAAU,CAAC,2BAC9D,EACAF,EAAQ,CAAC,EACT,MACF,CAEAD,EAAQ,KAAO,oCACf,IAAII,EAAO,GACXD,EAAI,GAAG,OAASE,GAA2B,CACzCD,GAAQC,CACV,CAAC,EAEDF,EAAI,GAAG,MAAO,IAAM,CAClB,GAAI,CACF,IAAMG,EAAc,KAAK,MAAMF,CAAI,EAC7BG,EAAWD,EAAY,SACzB,OAAO,KAAKA,EAAY,QAAQ,EAChC,CAAC,EAELN,EAAQ,KAAO,qCAGf,IAAIQ,EAAe,GACbC,EAAe,IAAI,OAAO,IAAIhB,CAAc,YAAY,EAE9D,QAAWiB,KAAWH,EAAU,CAC9B,IAAMI,EAAQD,EAAQ,MAAMD,CAAY,EACxC,GAAIE,EAAO,CACT,IAAMC,EAAY,OAAO,SAASD,EAAM,CAAC,EAAG,EAAE,EAC9CH,EAAe,KAAK,IAAIA,EAAcI,CAAS,CACjD,CACF,CAGIJ,GAAgB,EAClBR,EAAQ,KACN,0BAA0BlB,EAAM,MAAM,GAAGW,CAAc,IAAIe,CAAY,EAAE,CAAC,qBAAqB1B,EAAM,MAAM,GAAGW,CAAc,IAAIe,EAAe,CAAC,EAAE,CAAC,EACrJ,EAEAR,EAAQ,KACN,iCAAiClB,EAAM,MAAMW,CAAc,CAAC,mBAAmBX,EAAM,MAAM,GAAGW,CAAc,IAAI,CAAC,EACnH,EAEFQ,EAAQO,EAAe,CAAC,CAC1B,OAAST,EAAO,CAEdC,EAAQ,KACN,0CAA0CD,aAAiB,MAAQA,EAAM,QAAU,eAAe,2BACpG,EACAE,EAAQ,CAAC,CACX,CACF,CAAC,CACH,EAxDS,IAAAC,IA3CT,IAAMW,EAAc,8BAA8BlB,CAAW,GAEvDmB,EAAMjC,GACVgC,EACA,CACE,QAAS,CAAE,OAAQ,kBAAmB,CACxC,EACCV,GAAQ,CAEP,IACGA,EAAI,aAAe,KAAOA,EAAI,aAAe,MAC9CA,EAAI,QAAQ,SACZ,CAEAH,EAAQ,KAAK,yBAAyBG,EAAI,QAAQ,QAAQ,KAAK,EAC/D,GAAI,CACFtB,GACEsB,EAAI,QAAQ,SACZ,CAAE,QAAS,CAAE,OAAQ,kBAAmB,CAAE,EAC1CD,CACF,EACG,GAAG,QAAUa,GAAQ,CAEpBf,EAAQ,KACN,6BAA6Be,EAAI,OAAO,2BAC1C,EACAd,EAAQ,CAAC,CACX,CAAC,EACA,IAAI,CACT,OAASF,EAAO,CAEdC,EAAQ,KACN,4BAA4BD,aAAiB,MAAQA,EAAM,QAAU,eAAe,2BACtF,EACAE,EAAQ,CAAC,CACX,CACA,MACF,CAEAC,EAAeC,CAAG,CACpB,CACF,EA4DAW,EAAI,GAAG,QAAUC,GAAQ,CAEvBf,EAAQ,KAAK,kBAAkBe,EAAI,OAAO,2BAA2B,EACrEd,EAAQ,CAAC,CACX,CAAC,EAEDa,EAAI,IAAI,CACV,MAAgB,CAEdd,EAAQ,KACN,iEACF,EACAC,EAAQ,CAAC,CACX,CACF,CAAC,EC3MH,OAAOe,OAAU,YACjB,OAAS,iBAAAC,OAAqB,WAC9B,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAU,OAGjB,IAAMC,GAAaC,GAAc,YAAY,GAAG,EAC1CC,GAAaC,GAAK,QAAQA,GAAK,QAAQH,EAAU,EAAG,IAAI,EAEvDI,GAAQC,GACZ,QAAQ,MAAM,EACd,OAAO,cAAe,+BAA+B,EACrD,OAAO,wBAAyB,0BAA0B,EAC1D,OAAO,aAAc,yCAAyC,EAC9D,OAAO,gCAAiC,yBAAyB,EACjE,YAAY,uDAAuD,EACnE,OAAO,MAAOC,GAAW,CAOxB,IAAIC,EAAU,qBANKJ,GAAK,KACtBD,GACA,OACA,UACA,4BACF,CAC6C,IACvCM,EAAe,GAAG,QAAQ,IAAI,CAAC,uBAUrC,GARIF,EAAO,iBACTC,GAAW,eAGTD,EAAO,QACTC,GAAW,YAGTD,EAAO,MAAO,CAChB,IAAMG,EAAQ,MAAM,QAAQH,EAAO,KAAK,EACpCA,EAAO,MAAM,KAAK,GAAG,EACrBA,EAAO,MACXC,GAAW,aAAaE,CAAK,GAC/B,CAEAC,EAAMH,CAAO,EAETD,EAAO,MACT,MAAMK,GAAKH,CAAY,CAE3B,CAAC,EC9CH,OAAOI,OAAQ,mBACf,OAAOC,OAAU,YACjB,OAAS,WAAAC,OAAe,YACxB,OAAOC,OAAc,WACrB,OAAOC,OAAS,MCJhB,OAAOC,OAAQ,mBACf,OAAOC,OAAU,YACjB,OAAOC,OAAS,MAEhB,IAAMC,GAAa,CACjB,YAAa,kBACb,eAAgB,KAChB,cAAe,KACf,cAAe,OACf,WAAY,KACZ,YAAa,eACb,cAAe,KACf,WAAY,EACZ,oBAAqB,UACrB,MAAO,GACP,UAAW,GACX,cAAe,EACf,KAAM,GACN,aAAc,KACd,WAAY,KACZ,MAAO,KACP,oBAAqB,GACrB,SAAU,GACV,YAAa,GACb,OAAQ,KACR,WAAY,MACZ,aAAc,SACd,YAAa,UACb,gBAAiB,EACjB,qBAAsB,EACtB,IAAK,KACL,OAAQ,MACR,eAAgB,GAChB,SAAU,SACV,QAAS,KACT,YAAa,GACb,KAAM,CACJ,gCACA,4BACA,6BACA,iCACA,gCACA,6BACA,4BACA,6BACA,2BACA,+BACA,+BACA,6BACA,6BACA,6BACA,iCACA,6BACA,gCACA,2BACA,6BACA,6BACA,0BACA,2BACA,8BACA,8BACA,+BACA,gCACA,4BACA,2BACA,2BAGF,EACA,eAAgB,KAChB,eAAgB,KAChB,cAAe,KACf,UAAW,KACX,aAAc,GACd,UAAW,GACX,YAAa,KACb,WAAY,GACZ,iBAAkB,IACpB,EAEA,SAASC,GAAOC,EAAQ,CACtB,OAAO,OAAO,QAAQA,CAAM,EACzB,IAAI,CAAC,CAACC,EAAKC,CAAK,IACX,MAAM,QAAQA,CAAK,EACd,GAAGD,CAAG;AAAA,MAAUC,EAAM,KAAK;AAAA,KAAQ,CAAC,GAEzC,OAAOA,GAAU,UAAYA,IAAU,KAClC,GAAGD,CAAG;AAAA,EAAM,OAAO,QAAQC,CAAK,EACpC,IAAI,CAAC,CAACC,EAAGC,CAAC,IAAM,KAAKD,CAAC,KAAKC,CAAC,EAAE,EAC9B,KAAK;AAAA,CAAI,CAAC,GAER,GAAGH,CAAG,KAAKC,CAAK,EACxB,EACA,KAAK;AAAA,CAAI,CACd,CAEA,eAAsBG,GAAsCC,EAAY,CACtE,IAAMC,EAAUV,GAAI,uCAAuC,EAAE,MAAM,EAC7DW,EAAgBT,GAAOD,EAAU,EACjCW,EAAab,GAAK,KAAKU,EAAY,kCAAkC,EAE3E,GAAI,CACF,MAAMX,GAAG,UAAUc,EAAYD,EAAe,MAAM,EACpDD,EAAQ,QAAQ,yCAAyCE,CAAU,EAAE,CACvE,OAASC,EAAO,CACdH,EAAQ,KAAK,2CAA2C,EACxD,QAAQ,MAAMG,CAAK,CACrB,CACF,CC5GA,OAAOC,OAAQ,UAiBR,SAASC,GACdC,EACiB,CACjB,IAAMC,EAAmC,CAAC,EACpCC,EAA2B,CAAC,EAC9BC,EAAyB,CAAC,EACxBC,EAAkB,CAAC,EAGzB,QAAWC,KAAOL,EAChBC,EAASI,CAAG,EAAIL,EAAeK,CAAG,EAAE,UAAU,OAIhD,QAAWA,KAAOJ,EACZA,EAASI,CAAG,IAAM,GACpBD,EAAM,KAAKC,CAAG,EAIlB,KAAOD,EAAM,OAAS,GAAG,CACvBD,EAAe,CAAC,EAEhB,IAAMG,EAAcF,EAAM,OAC1B,QAAS,EAAI,EAAG,EAAIE,EAAa,IAAK,CACpC,IAAMC,EAAUH,EAAM,MAAM,EAC5BD,EAAa,KAAKI,CAAO,EAGzB,QAAWC,KAAaR,EAAeO,CAAO,EAAE,kBAC9CN,EAASO,CAAS,IAGdP,EAASO,CAAS,IAAM,GAC1BJ,EAAM,KAAKI,CAAS,CAG1B,CACAN,EAAQ,KAAKC,CAAY,CAC3B,CAGA,GAAID,EAAQ,KAAK,EAAE,SAAW,OAAO,KAAKF,CAAc,EAAE,OACxD,MAAM,IAAI,MAAM,+BAA+B,EAGjD,OAAOE,CACT,CAEA,SAASO,GAA0BC,EAA6B,CAC9D,OAAOZ,GAAG,YAAYY,CAAS,EAAE,OAAQC,GAASA,EAAK,SAAS,OAAO,CAAC,CAC1E,CAOA,eAAsBC,GACpBC,EACAC,EAA+B,CAAC,EACP,CACzB,QAAQ,IAAIA,CAAkB,EAC9B,IAAId,EAAiC,CAAC,EAEhCe,EAAQN,GAA0BI,CAAiB,EAEzD,QAAWF,KAAQI,EAAO,CAExB,GAAIJ,IAAS,sBACX,SAGF,IAAMK,EAAWlB,GAAG,aAAa,GAAGe,CAAiB,IAAIF,CAAI,GAAI,OAAO,EAClEM,EAA2B,KAAK,MAAMD,CAAQ,EAE9CE,EAAcD,EAAK,KACnBE,EAAmB,OAAO,KAAKF,EAAK,gBAAgB,EACpDG,EAAkB,OAAO,KAAKH,EAAK,eAAe,EAClDI,EAAe,OAAO,KAAKJ,EAAK,YAAY,EAE7CjB,EAAekB,CAAW,IAC7BlB,EAAekB,CAAW,EAAI,CAAE,UAAW,CAAC,EAAG,kBAAmB,CAAC,CAAE,GAGvE,IAAMI,EAAkB,CACtB,GAAGH,EACH,GAAGC,EACH,GAAGC,CACL,EAEArB,EAAekB,CAAW,EAAE,UAAY,CAAC,GAAG,IAAI,IAAII,CAAe,CAAC,EAEpE,QAAWC,KAAcD,EAClBtB,EAAeuB,CAAU,IAC5BvB,EAAeuB,CAAU,EAAI,CAAE,UAAW,CAAC,EAAG,kBAAmB,CAAC,CAAE,GAGjEvB,EAAeuB,CAAU,EAAE,kBAAkB,SAASL,CAAW,GACpElB,EAAeuB,CAAU,EAAE,kBAAkB,KAAKL,CAAW,CAGnE,CAGA,GAAIJ,EAAmB,OAAQ,CAG7B,IAAMU,EAAmB,IAAI,IAG7B,OAAW,CAACnB,EAAKoB,CAAI,IAAK,OAAO,QAAQzB,CAAc,EACjDyB,EAAK,UAAU,KAAMC,GAAQZ,EAAmB,SAASY,CAAG,CAAC,GAC/DF,EAAiB,IAAInB,CAAG,EAK5B,QAAWsB,KAAUb,EACfd,EAAe2B,CAAM,GACvBH,EAAiB,IAAIG,CAAM,EAK/B,IAAMC,EAA0C,CAAC,EACjD,QAAWvB,KAAOmB,EAChBI,EAAwBvB,CAAG,EAAI,CAC7B,UAAWL,EAAeK,CAAG,EAAE,UAAU,OAAQqB,GAC/CF,EAAiB,IAAIE,CAAG,CAC1B,EACA,kBAAmB1B,EAAeK,CAAG,EAAE,kBAAkB,OAAQqB,GAC/DF,EAAiB,IAAIE,CAAG,CAC1B,CACF,EAGF1B,EAAiB4B,CACnB,MACE,QAAQ,IAAI,uDAAuD,EAIrE,OAAA9B,GAAG,cACD,GAAGe,CAAiB,uBACpB,KAAK,UAAUb,EAAgB,KAAM,CAAC,CACxC,EAEOA,CACT,CCtKA,OAAO6B,OAAQ,UACf,OAAOC,MAAU,YACjB,OAAOC,OAAa,eACpB,OAAS,iBAAAC,OAAqB,WAEvB,SAASC,IAAiB,CAC/B,IAAMC,EAAUL,GAAG,QAAQ,GAAKE,GAAQ,IAAI,MAAQA,GAAQ,IAAI,YAEhE,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,yCAAyC,EAG3D,OAAOJ,EAAK,KAAKI,EAAS,OAAO,CACnC,CAEO,SAASC,MAAeC,EAAM,CACnC,OAAON,EAAK,KAAKG,GAAe,EAAG,GAAGG,CAAI,CAC5C,CAEO,SAASC,MAAeC,EAAc,CAC3C,IAAMC,EAAWP,GAAc,YAAY,GAAG,EACxCQ,EAAUV,EAAK,QAAQS,CAAQ,EAErC,OAAOT,EAAK,QAAQU,EAAS,GAAGF,CAAY,CAC9C,CHVA,IAAMG,GAAaC,GAAY,iBAAkB,QAAQ,EAEnDC,GAAaD,GAAY,iBAAkB,SAAS,EAc1D,IAAME,GAAiB,CACrB,mCACA,+BACA,gCACA,oCACA,mCACA,gCACA,+BACA,gCACA,8BACA,kCACA,kCACA,gCACA,gCACA,iCACA,gCACA,oCACA,gCACA,mCACA,8BACA,gCACA,gCACA,6BACA,8BACA,iCACA,iCACA,kCACA,mCACA,+BACA,8BACA,8BACF,EAEMC,GAAe,CACnB,GAAGD,GACH,iCACA,uCACA,qCACA,6BACA,oBACF,EAQA,eAAeE,GACbC,EACiB,CAEjB,GAAI,CACF,MAAMC,GAAG,MAAMC,GAAY,CAAE,UAAW,EAAK,CAAC,EAC9C,MAAMD,GAAG,MAAME,GAAY,CAAE,UAAW,EAAK,CAAC,CAChD,OAASC,EAAO,CACd,QAAQ,MAAM,iDAAkDA,CAAK,EACrE,QAAQ,KAAK,CAAC,CAChB,CAEA,IAAMC,EAAUC,GAAI,6BAA6B,EAAE,MAAM,EAGzDD,EAAQ,KAAO,yDACf,MAAME,GAAsCJ,EAAU,EAEtDE,EAAQ,KAAO,8CAIf,IAAMG,EAAqB,0BADRC,GAAY,SAAU,gBAAgB,CACM,cAAcC,GAAK,KAAKP,GAAY,kCAAkC,CAAC,GACtI,GAAI,CACF,MAAMQ,EAAMH,CAAkB,CAChC,OAASJ,EAAO,CACdC,EAAQ,KAAK,qCAAqC,EAClD,QAAQ,MAAMD,CAAK,EACnB,QAAQ,KAAK,CAAC,CAChB,CAEA,OAAAC,EAAQ,KAAO,yDACf,MAAMO,GAAqBV,GAAYF,CAAgB,EAEvDK,EAAQ,QAAQ,yCAAyC,EAElDK,GAAK,KAAKR,GAAY,qBAAqB,CACpD,CAEA,IAAMW,GAA+B,MACnCC,GACwB,CACxB,IAAMT,EAAUC,GAAI,4BAA4B,EAAE,MAAM,EAClDS,EAAiB,KAAK,MAC1B,MAAMd,GAAG,SAASa,EAAoB,OAAO,CAC/C,EAEAT,EAAQ,KAAO,gCACf,IAAMW,EAAUC,GAAsBF,CAAc,EACpD,OAAAV,EAAQ,QAAQ,0CAA0C,EAEnDW,CACT,EAIOE,GAAQC,GAAQ,QAAQ,OAAO,EAAE,OAAO,MAAOC,GAAW,CAC/D,IAAMC,EAAU,MAAMC,GAAS,OAAO,CACpC,CACE,KAAM,SACN,KAAM,cACN,QAAS,4CACT,QAAS,CACP,CACE,KAAM,qCACN,MAAO,eACT,CACF,EACA,QAAS,CAAC,eAAyB,CACrC,EAEA,CACE,KAAM,QACN,KAAM,cACN,QAAS,wCACT,KAAOD,GAAYA,EAAQ,cAAgB,gBAC3C,SAAWE,GACTA,EAAM,KAAK,IAAM,IAAM,+BAC3B,EAEA,CACE,KAAM,UACN,KAAM,cACN,QAAS,+CACT,QAAS,GACT,YAAcC,GACZA,EAAQ,uCAAyC,oBACnD,KAAOH,GAAYA,EAAQ,cAAgB,eAC7C,EAEA,CACE,KAAM,WACN,KAAM,sBACN,QACE,uEACF,QAASxB,GAAe,IAAK4B,IAAe,CAC1C,KAAMA,EAAU,QAAQ,qBAAsB,EAAE,EAChD,MAAOA,CACT,EAAE,EACF,KAAOJ,GACLA,EAAQ,cAAgB,iBACxBA,EAAQ,WACZ,CACF,CAAC,EAED,OAAQA,EAAQ,YAAa,CAC3B,IAAK,gBAA2B,CAE9B,IAAMhB,EAAUC,GAAI,sBAAsB,EAAE,MAAM,EAC5CQ,EAAqB,MAAMf,GAC/BsB,EAAQ,mBACV,EAEAhB,EAAQ,KAAO,0CAIf,IAAMqB,GAFJ,MAAMb,GAA6BC,CAAkB,GAGpD,IACC,CAACa,EAAOC,IACN,SAASA,EAAQ,CAAC;AAAA,EAAKD,EAAM,IAAKE,GAAQ,OAAOA,EAAI,QAAQ,oBAAqB,gBAAgB,EAAE,QAAQ,qBAAsB,sBAAsB,CAAC,EAAE,EAAE,KAAK;AAAA,CAAI,CAAC,EAC3K,EACC,KAAK;AAAA;AAAA,CAAM,EAEd,QAAQ,IAAIH,CAAsB,EAElCrB,EAAQ,KAAO,8CAGf,IAAI,QAASyB,GAAY,WAAWA,EAAS,GAAI,CAAC,EAClDzB,EAAQ,QAAQ,2CAA2C,EAG3D,KACF,CAEA,QACE,QAAQ,MAAM,0BAA0B,CAE5C,CACF,CAAC,EI1ND,OAAS,WAAA0B,OAAe,YAGjB,IAAMC,GAAcC,GACxB,QAAQ,MAAM,EACd,YAAY,4BAA4B,EACxC,OAAO,YAAa,+CAAgD,EAAK,EACzE,OAAO,MAAOC,GAAY,CAErBA,EAAQ,KACV,MAAMC,EAAI,EAGZ,MAAMC,EAAK,CACb,CAAC,EjCEHC,GACG,KAAK,MAAM,EACX,QAAQC,EAAkB,CAAC,EAC3B,YAAY,8CAA8C,EAE7DD,GAAQ,YAAY,YAAaE,GAAW,CAAC,EAE7CF,GAAQ,MAAM",
6
+ "names": ["program", "figlet", "mind", "auroSplash_default", "fs", "path", "fileURLToPath", "debugLog", "message", "getPackageVersion", "__filename", "__dirname", "packagePath", "error", "program", "ora", "withBuildOptions", "command", "withServerOptions", "terser", "watch", "rmSync", "join", "ora", "rollup", "ora", "spawn", "ora", "shell", "command", "_args", "commandString", "spinner", "finalCommand", "finalArgs", "parts", "isWatchMode", "child", "commandOutput", "data", "output", "resolve", "reject", "code", "fs", "path", "Docs", "options", "outDir", "outFile", "manifestPath", "manifestContent", "error", "elements", "docsDir", "apiMarkdown", "apiFilename", "els", "module", "dec", "element", "includeTitle", "m", "properties", "attributes", "mergedData", "processedNames", "prop", "attr", "headers", "separator", "rows", "item", "value", "parameters", "param", "name", "data", "filteredData", "description", "p", "obj", "pathInput", "fallback", "parts", "current", "s", "cem", "cemSpinner", "ora", "shell", "error", "errorMessage", "docs", "docsSpinner", "Docs", "analyzeComponents", "cem", "docs", "Logger", "generateReadmeUrl", "processContentForFile", "templateFiller", "defaultDocsProcessorConfig", "pathFromCwd", "pathLike", "fileConfigs", "config", "processDocFiles", "fileConfig", "err", "runDefaultDocsBuild", "cleanupDist", "distPath", "join", "spinner", "ora", "rmSync", "error", "runBuildStep", "taskName", "taskFn", "successMsg", "failMsg", "result", "buildTypeDefinitions", "config", "outputConfig", "bundle", "rollup", "buildCombinedBundle", "mainConfig", "demoConfig", "mainBundle", "demoBundle", "generateDocs", "options", "sourceFiles", "outFile", "skipDocs", "skipSpinner", "analyzeComponents", "runDefaultDocsBuild", "basename", "join", "nodeResolve", "glob", "dts", "litScss", "path", "glob", "watchGlobs", "globs", "items", "item", "filename", "error", "DEFAULTS", "getPluginsConfig", "modulePaths", "options", "watchPatterns", "dedupe", "allModulePaths", "nodeResolve", "litScss", "join", "watchGlobs", "getMainBundleConfig", "watch", "input", "outputDir", "format", "getExternalConfig", "getWatcherConfig", "getDemoConfig", "globPattern", "ignorePattern", "glob", "file", "basename", "getDtsConfig", "dts", "watchOptions", "additional", "startDevServer", "hmrPlugin", "ora", "DEFAULT_CONFIG", "startDevelopmentServer", "options", "serverSpinner", "serverConfig", "context", "next", "server", "error", "path", "ora", "rollup", "buildInProgress", "builds", "MIN_BUILD_INTERVAL", "sourceEventPaths", "OUTPUT_PATHS", "isOutputFile", "filePath", "normalizedPath", "path", "outputPath", "error", "runBuildTask", "taskName", "taskFn", "task", "handleWatcherEvents", "watcher", "options", "onInitialBuildComplete", "isInitialBuild", "buildTasksResults", "scheduledTasksTimer", "bundleSpinner", "watchSpinner", "ora", "buildTasks", "dtsSpinner", "create_dts", "rollup", "getDtsConfig", "sourceFiles", "outFile", "skipDocs", "skipSpinner", "analyzeSpinner", "analyzeComponents", "docsSpinner", "generateDocs", "checkInitialBuildComplete", "schedulePostBundleTasks", "delay", "event", "inputs", "input", "setupWatchModeListeners", "closeSpinner", "runProductionBuild", "options", "mainBundleConfig", "getMainBundleConfig", "demoConfig", "getDemoConfig", "dtsConfig", "getDtsConfig", "terser", "generateDocs", "buildCombinedBundle", "buildTypeDefinitions", "setupWatchMode", "isDevMode", "watcher", "watch", "handleWatcherEvents", "startDevelopmentServer", "setupWatchModeListeners", "buildWithRollup", "cleanupDist", "error", "devCommand", "program", "withBuildOptions", "withServerOptions", "dev_default", "options", "build", "ora", "buildWithRollup", "error", "program", "ora", "buildCommand", "program", "withBuildOptions", "build_default", "options", "build", "ora", "buildWithRollup", "error", "exec", "path", "process", "fileURLToPath", "util", "program", "inquirer", "migrate_default", "program", "options", "filename", "fileURLToPath", "dirname", "path", "scriptPath", "execPromise", "util", "exec", "process", "inquirer", "shell", "process", "program", "readFile", "writeFile", "Logger", "fs", "Logger", "processContentForFile", "templateFiller", "REMOTE_TEMPLATE_BASE_URL", "BRANCH_BASE", "TARGET_BRANCH_TO_COPY", "CONFIG_TEMPLATE", "githubDirShape", "branchNameToRemoteUrl", "branchOrTag", "filePath", "filePathToRemoteInput", "outputPath", "remoteUrl", "removeDirectory", "dirPath", "error", "syncDotGithubDir", "rootDir", "githubPath", "fileConfigs", "missingFiles", "dir", "file", "inputPath", "fileConfig", "config", "errorMessage", "sync_default", "program", "Logger", "cwd", "process", "syncDotGithubDir", "codeownersPath", "codeownersFixed", "readFile", "writeFile", "fs", "path", "Logger", "program", "glob", "prepWcaCompatibleCode_default", "code", "sourcePath", "defaultTag", "className", "classDesc", "WAC_DIR", "path", "globPath", "sources", "source", "glob", "err", "createExtendsFile", "filePaths", "fs", "filePath", "resolvedPath", "fileContent", "newPath", "newCode", "prepWcaCompatibleCode_default", "main", "wca_setup_default", "program", "Logger", "error", "program", "chalk", "ora", "appendFile", "readFile", "Logger", "simpleGit", "git", "error", "Git", "_Git", "pattern", "err", "currentBranch", "targetBranch", "commitRange", "commitChunks", "chunk", "commits", "parts", "hash", "date", "author_name", "subject", "bodyLines", "line", "body", "shortHash", "typeMatch", "type", "log", "result", "files", "branchName", "message", "chalk", "MAX_SUBJECT_LENGTH", "MAX_BODY_LENGTH", "getColoredType", "type", "wrapString", "str", "maxLength", "words", "result", "currentLine", "word", "displayDebugView", "commitList", "commit", "subject", "body", "github", "getExistingLabels", "token", "octokit", "context", "owner", "repo", "prNumber", "existingLabels", "label", "error", "applyLabelToPR", "prefixedLabel", "existingSemanticLabels", "existingLabel", "analyzeCommits", "debug", "setLabel", "spinner", "ora", "commitList", "Git", "displayDebugView", "commitTypes", "commit", "formattedTypes", "type", "getColoredType", "handleLabels", "error", "validCommitTypes", "foundCommitTypes", "selectedLabel", "highestPriorityIndex", "priorityIndex", "labelSpinner", "getExistingLabels", "applyLabelToPR", "errorMessage", "chalk", "check_commits_default", "program", "option", "analyzeCommits", "fs", "get", "chalk", "program", "ora", "pr_release_default", "option", "updatePackageJson", "namespace", "prNumber", "packageSpinner", "packageJsonPath", "packageJson", "releaseVersion", "packageComponent", "packageName", "incrementVersion", "getIncrementVersion", "packageVersion", "error", "spinner", "resolve", "handleResponse", "res", "data", "chunk", "packageData", "versions", "maxIteration", "versionRegex", "version", "match", "iteration", "registryUrl", "req", "err", "path", "fileURLToPath", "program", "open", "__filename", "fileURLToPath", "cliRootDir", "path", "test_default", "program", "option", "command", "coveragePath", "files", "shell", "open", "fs", "path", "program", "inquirer", "ora", "fs", "path", "ora", "JsonConfig", "toYaml", "config", "key", "value", "k", "v", "createMultiGitterDependencyTreeConfig", "outputPath", "spinner", "configContent", "configPath", "error", "fs", "getBatchedUpdateOrder", "dependencyTree", "inDegree", "batches", "currentBatch", "queue", "pkg", "queueLength", "current", "dependent", "getJsonFilesFromDirectory", "directory", "file", "formatDependencyTree", "jsonFileDirectory", "targetDependencies", "files", "contents", "data", "packageName", "peerDependencies", "devDependencies", "dependencies", "allDependencies", "dependency", "relevantPackages", "node", "dep", "target", "_filteredDependencyTree", "os", "path", "process", "fileURLToPath", "getAuroHomeDir", "homeDir", "withHomeDir", "args", "fromCliRoot", "relativePath", "filename", "dirname", "CONFIG_DIR", "withHomeDir", "OUTPUT_DIR", "auroComponents", "auroPackages", "getOrCreateDependencyTree", "relevantPackages", "fs", "OUTPUT_DIR", "CONFIG_DIR", "error", "spinner", "ora", "createMultiGitterDependencyTreeConfig", "multiGitterCommand", "fromCliRoot", "path", "shell", "formatDependencyTree", "getDependencyBatchesFromTree", "dependencyTreePath", "dependencyTree", "batches", "getBatchedUpdateOrder", "agent_default", "program", "option", "answers", "inquirer", "input", "value", "component", "batchedUpdateOrderText", "batch", "index", "pkg", "resolve", "program", "docsCommand", "program", "options", "cem", "docs", "program", "getPackageVersion", "auroSplash_default"]
7
7
  }
@@ -1 +1 @@
1
- import c from"node:fs/promises";import m from"node:path";import v from"ora";import p from"node:path";import a from"node:process";import{fileURLToPath as j}from"node:url";function k(){let n=a.env.HOME||a.env.USERPROFILE;return p.join(n,".auro")}function u(...n){return p.join(k(),...n)}var d=u("run-migrations","outputs");async function w(){let n=await c.readFile(m.resolve(process.cwd(),"package.json"));await c.mkdir(d,{recursive:!0});let r=JSON.parse(n.toString("utf8")),l=["@aurodesignsystem","@alaskaairux"],e={name:r.name,peerDependencies:{},devDependencies:{},dependencies:{}};function t(o,h){let i={};return o&&Object.keys(o).forEach(s=>{s.includes(h)&&(i[s]=o[s])}),i}for(let o of l)e.peerDependencies={...e.peerDependencies,...t(r.peerDependencies,o)},e.devDependencies={...e.devDependencies,...t(r.devDependencies,o)},e.dependencies={...e.dependencies,...t(r.dependencies,o)};console.log(`${r.name} -> ${JSON.stringify(e,null,4)}`);let g=d,D=m.join(g,`${r.name.replace("@aurodesignsystem/","")}_deps.json`);await c.writeFile(D,JSON.stringify(e,null,4))}var f=v("Fetching Auro package dependencies...").start();w().then(()=>{f.succeed("Auro package dependencies have been written to the output file.")}).catch(n=>{f.fail("Error fetching Auro package dependencies:"),console.error(n)});export{w as getAuroPackageDependencies};
1
+ import c from"node:fs/promises";import u from"node:path";import w from"ora";import k from"node:os";import p from"node:path";import a from"node:process";import{fileURLToPath as A}from"node:url";function v(){let e=k.homedir()||a.env.HOME||a.env.USERPROFILE;if(!e)throw new Error("Unable to determine user home directory");return p.join(e,".auro")}function m(...e){return p.join(v(),...e)}var d=m("run-migrations","outputs");async function x(){let e=await c.readFile(u.resolve(process.cwd(),"package.json"));await c.mkdir(d,{recursive:!0});let o=JSON.parse(e.toString("utf8")),l=["@aurodesignsystem","@alaskaairux"],n={name:o.name,peerDependencies:{},devDependencies:{},dependencies:{}};function t(r,D){let i={};return r&&Object.keys(r).forEach(s=>{s.includes(D)&&(i[s]=r[s])}),i}for(let r of l)n.peerDependencies={...n.peerDependencies,...t(o.peerDependencies,r)},n.devDependencies={...n.devDependencies,...t(o.devDependencies,r)},n.dependencies={...n.dependencies,...t(o.dependencies,r)};console.log(`${o.name} -> ${JSON.stringify(n,null,4)}`);let g=d,h=u.join(g,`${o.name.replace("@aurodesignsystem/","")}_deps.json`);await c.writeFile(h,JSON.stringify(n,null,4))}var f=w("Fetching Auro package dependencies...").start();x().then(()=>{f.succeed("Auro package dependencies have been written to the output file.")}).catch(e=>{f.fail("Error fetching Auro package dependencies:"),console.error(e)});export{x as getAuroPackageDependencies};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aurodesignsystem-dev/auro-cli",
3
- "version": "0.0.0-pr177.0",
3
+ "version": "0.0.0-pr177.1",
4
4
  "description": "A cli tool to support the Auro Design System",
5
5
  "scripts": {
6
6
  "lint": "biome check --fix --no-errors-on-unmatched",