@sylphx/flow 0.0.4 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-n5gbcebk.js → chunk-02jj9ahn.js} +3 -3
- package/dist/{chunk-n5gbcebk.js.map → chunk-02jj9ahn.js.map} +1 -1
- package/dist/{chunk-t6aqpg7b.js → chunk-1ph5m7yh.js} +3 -3
- package/dist/{chunk-t6aqpg7b.js.map → chunk-1ph5m7yh.js.map} +1 -1
- package/dist/chunk-2cevqakq.js +3 -0
- package/dist/chunk-2cevqakq.js.map +10 -0
- package/dist/{chunk-0fz6p8ka.js → chunk-3nkp0s1j.js} +2 -2
- package/dist/{chunk-0fz6p8ka.js.map → chunk-3nkp0s1j.js.map} +1 -1
- package/dist/chunk-7wd1res1.js +3 -0
- package/dist/{chunk-e07nqvwm.js.map → chunk-7wd1res1.js.map} +2 -2
- package/dist/chunk-7wpv8zs7.js +4 -0
- package/dist/chunk-7wpv8zs7.js.map +11 -0
- package/dist/{chunk-5eyfa6f8.js → chunk-h9kssnyy.js} +2 -2
- package/dist/{chunk-5eyfa6f8.js.map → chunk-h9kssnyy.js.map} +1 -1
- package/dist/{chunk-wj8k6qd1.js → chunk-j08f9mnk.js} +2 -2
- package/dist/{chunk-wj8k6qd1.js.map → chunk-j08f9mnk.js.map} +1 -1
- package/dist/chunk-jgkkyjtr.js +4 -0
- package/dist/chunk-jgkkyjtr.js.map +10 -0
- package/dist/{chunk-6hhhwb03.js → chunk-pcm6twpw.js} +2 -2
- package/dist/{chunk-6hhhwb03.js.map → chunk-pcm6twpw.js.map} +1 -1
- package/dist/{chunk-hpef24m4.js → chunk-qb07mbyd.js} +3 -3
- package/dist/{chunk-hpef24m4.js.map → chunk-qb07mbyd.js.map} +1 -1
- package/dist/{chunk-e80fq5bq.js → chunk-x46tzzn9.js} +2 -2
- package/dist/{chunk-e80fq5bq.js.map → chunk-x46tzzn9.js.map} +1 -1
- package/dist/{chunk-8saardnr.js → chunk-y21bxgfy.js} +3 -3
- package/dist/{chunk-8saardnr.js.map → chunk-y21bxgfy.js.map} +1 -1
- package/dist/{chunk-n9kqfqp9.js → chunk-y6n0153m.js} +2 -2
- package/dist/{chunk-n9kqfqp9.js.map → chunk-y6n0153m.js.map} +1 -1
- package/dist/{chunk-sdjvmsk2.js → chunk-zn6bhyhc.js} +2 -2
- package/dist/{chunk-sdjvmsk2.js.map → chunk-zn6bhyhc.js.map} +1 -1
- package/dist/index.js +427 -296
- package/dist/index.js.map +143 -73
- package/package.json +6 -1
- package/dist/chunk-e07nqvwm.js +0 -3
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import{B as X,E as M}from"./chunk-j08f9mnk.js";import"./chunk-x46tzzn9.js";import J from"node:fs/promises";import Y from"node:path";var F=".sylphx-flow/settings.json",G="1.0.0",K=(q=process.cwd())=>Y.join(q,F),L=async(q=process.cwd())=>{try{return await J.access(K(q)),!0}catch{return!1}},Q=async(q=process.cwd())=>{let z=K(q);return M(async()=>{let B=await J.readFile(z,"utf8");return JSON.parse(B)},(B)=>{if(B.code==="ENOENT")return Error("EMPTY_SETTINGS");return Error(`Failed to load settings: ${B.message}`)}).then((B)=>{if(B._tag==="Failure"&&B.error.message==="EMPTY_SETTINGS")return X({});return B})},Z=async(q,z=process.cwd())=>{let B=K(z);return M(async()=>{await J.mkdir(Y.dirname(B),{recursive:!0});let H={...q,version:q.version||G};await J.writeFile(B,`${JSON.stringify(H,null,2)}
|
|
2
2
|
`,"utf8")},(H)=>Error(`Failed to save settings: ${H.message}`))},$=async(q,z=process.cwd())=>{let B=await Q(z);if(B._tag==="Failure")return B;let H={...B.value,...q};return Z(H,z)},O=async(q=process.cwd())=>{let z=await Q(q);return z._tag==="Success"?z.value.defaultTarget:void 0},W=async(q,z=process.cwd())=>$({defaultTarget:q},z);class k{cwd;constructor(q=process.cwd()){this.cwd=q;this.settingsPath=K(q)}async load(){let q=await Q(this.cwd);if(q._tag==="Failure")throw q.error;return q.value}async save(q){let z=await Z(q,this.cwd);if(z._tag==="Failure")throw z.error}async update(q){let z=await $(q,this.cwd);if(z._tag==="Failure")throw z.error}async getDefaultTarget(){return O(this.cwd)}async setDefaultTarget(q){let z=await W(q,this.cwd);if(z._tag==="Failure")throw z.error}async exists(){return L(this.cwd)}}var D=new k;export{$ as updateSettings,L as settingsExists,W as setDefaultTarget,Z as saveSettings,D as projectSettings,Q as loadSettings,K as getSettingsPath,O as getDefaultTarget,k as ProjectSettings};
|
|
3
|
-
export{D as
|
|
3
|
+
export{D as y};
|
|
4
4
|
|
|
5
|
-
//# debugId=
|
|
5
|
+
//# debugId=ADDFE133D120885A64756E2164756E21
|
|
@@ -5,6 +5,6 @@
|
|
|
5
5
|
"/**\n * Project settings manager - functional implementation\n * Pure functions for managing uncommitted project-specific settings\n */\n\nimport fs from 'node:fs/promises';\nimport path from 'node:path';\nimport { type Result, success, tryCatchAsync } from '../core/functional/result.js';\n\nexport interface ProjectSettings {\n /** Default target for the project */\n defaultTarget?: string;\n /** Settings version for migration purposes */\n version?: string;\n /** How the CLI was invoked (for generating consistent commands) */\n invocationMethod?: {\n type: 'npm' | 'github' | 'bunx' | 'local-dev' | 'global' | 'unknown';\n package?: string;\n repo?: string;\n script?: string;\n command?: string;\n };\n}\n\nconst SETTINGS_FILE = '.sylphx-flow/settings.json';\nconst CURRENT_VERSION = '1.0.0';\n\n/**\n * Get settings file path for a given working directory\n */\nexport const getSettingsPath = (cwd: string = process.cwd()): string =>\n path.join(cwd, SETTINGS_FILE);\n\n/**\n * Check if settings file exists\n */\nexport const settingsExists = async (cwd: string = process.cwd()): Promise<boolean> => {\n try {\n await fs.access(getSettingsPath(cwd));\n return true;\n } catch {\n return false;\n }\n};\n\n/**\n * Load project settings from file\n * Returns Result type for explicit error handling\n */\nexport const loadSettings = async (\n cwd: string = process.cwd()\n): Promise<Result<ProjectSettings, Error>> => {\n const settingsPath = getSettingsPath(cwd);\n\n return tryCatchAsync(\n async () => {\n const content = await fs.readFile(settingsPath, 'utf8');\n return JSON.parse(content) as ProjectSettings;\n },\n (error: any) => {\n // File not found is not an error - return empty settings\n if (error.code === 'ENOENT') {\n return new Error('EMPTY_SETTINGS');\n }\n return new Error(`Failed to load settings: ${error.message}`);\n }\n ).then((result) => {\n // Convert EMPTY_SETTINGS error to success with empty object\n if (result._tag === 'Failure' && result.error.message === 'EMPTY_SETTINGS') {\n return success({});\n }\n return result;\n });\n};\n\n/**\n * Save project settings to file\n * Returns Result type for explicit error handling\n */\nexport const saveSettings = async (\n settings: ProjectSettings,\n cwd: string = process.cwd()\n): Promise<Result<void, Error>> => {\n const settingsPath = getSettingsPath(cwd);\n\n return tryCatchAsync(\n async () => {\n // Ensure the directory exists\n await fs.mkdir(path.dirname(settingsPath), { recursive: true });\n\n // Add current version if not present\n const settingsWithVersion = {\n ...settings,\n version: settings.version || CURRENT_VERSION,\n };\n\n // Write settings with proper formatting\n await fs.writeFile(settingsPath, `${JSON.stringify(settingsWithVersion, null, 2)}\\n`, 'utf8');\n },\n (error: any) => new Error(`Failed to save settings: ${error.message}`)\n );\n};\n\n/**\n * Update specific settings properties\n */\nexport const updateSettings = async (\n updates: Partial<ProjectSettings>,\n cwd: string = process.cwd()\n): Promise<Result<void, Error>> => {\n const currentResult = await loadSettings(cwd);\n\n if (currentResult._tag === 'Failure') {\n return currentResult;\n }\n\n const newSettings = { ...currentResult.value, ...updates };\n return saveSettings(newSettings, cwd);\n};\n\n/**\n * Get the default target from settings\n */\nexport const getDefaultTarget = async (\n cwd: string = process.cwd()\n): Promise<string | undefined> => {\n const result = await loadSettings(cwd);\n return result._tag === 'Success' ? result.value.defaultTarget : undefined;\n};\n\n/**\n * Set the default target in settings\n */\nexport const setDefaultTarget = async (\n target: string,\n cwd: string = process.cwd()\n): Promise<Result<void, Error>> => updateSettings({ defaultTarget: target }, cwd);\n\n/**\n * Legacy class-based interface for backward compatibility\n * @deprecated Use functional exports instead (loadSettings, saveSettings, etc.)\n */\nexport class ProjectSettings {\n constructor(private cwd: string = process.cwd()) {\n this.settingsPath = getSettingsPath(cwd);\n }\n\n async load(): Promise<ProjectSettings> {\n const result = await loadSettings(this.cwd);\n if (result._tag === 'Failure') {\n throw result.error;\n }\n return result.value;\n }\n\n async save(settings: ProjectSettings): Promise<void> {\n const result = await saveSettings(settings, this.cwd);\n if (result._tag === 'Failure') {\n throw result.error;\n }\n }\n\n async update(updates: Partial<ProjectSettings>): Promise<void> {\n const result = await updateSettings(updates, this.cwd);\n if (result._tag === 'Failure') {\n throw result.error;\n }\n }\n\n async getDefaultTarget(): Promise<string | undefined> {\n return getDefaultTarget(this.cwd);\n }\n\n async setDefaultTarget(target: string): Promise<void> {\n const result = await setDefaultTarget(target, this.cwd);\n if (result._tag === 'Failure') {\n throw result.error;\n }\n }\n\n async exists(): Promise<boolean> {\n return settingsExists(this.cwd);\n }\n}\n\n/**\n * Singleton instance for backward compatibility\n * @deprecated Use functional exports with explicit cwd parameter\n */\nexport const projectSettings = new ProjectSettings();\n"
|
|
6
6
|
],
|
|
7
7
|
"mappings": "iFAKA,0BACA,yBAkBA,IAAM,EAAgB,6BAChB,EAAkB,QAKX,EAAkB,CAAC,EAAc,QAAQ,IAAI,IACxD,EAAK,KAAK,EAAK,CAAa,EAKjB,EAAiB,MAAO,EAAc,QAAQ,IAAI,IAAwB,CACrF,GAAI,CAEF,OADA,MAAM,EAAG,OAAO,EAAgB,CAAG,CAAC,EAC7B,GACP,KAAM,CACN,MAAO,KAQE,EAAe,MAC1B,EAAc,QAAQ,IAAI,IACkB,CAC5C,IAAM,EAAe,EAAgB,CAAG,EAExC,OAAO,EACL,SAAY,CACV,IAAM,EAAU,MAAM,EAAG,SAAS,EAAc,MAAM,EACtD,OAAO,KAAK,MAAM,CAAO,GAE3B,CAAC,IAAe,CAEd,GAAI,EAAM,OAAS,SACjB,OAAW,MAAM,gBAAgB,EAEnC,OAAW,MAAM,4BAA4B,EAAM,SAAS,EAEhE,EAAE,KAAK,CAAC,IAAW,CAEjB,GAAI,EAAO,OAAS,WAAa,EAAO,MAAM,UAAY,iBACxD,OAAO,EAAQ,CAAC,CAAC,EAEnB,OAAO,EACR,GAOU,EAAe,MAC1B,EACA,EAAc,QAAQ,IAAI,IACO,CACjC,IAAM,EAAe,EAAgB,CAAG,EAExC,OAAO,EACL,SAAY,CAEV,MAAM,EAAG,MAAM,EAAK,QAAQ,CAAY,EAAG,CAAE,UAAW,EAAK,CAAC,EAG9D,IAAM,EAAsB,IACvB,EACH,QAAS,EAAS,SAAW,CAC/B,EAGA,MAAM,EAAG,UAAU,EAAc,GAAG,KAAK,UAAU,EAAqB,KAAM,CAAC;AAAA,EAAO,MAAM,GAE9F,CAAC,IAAmB,MAAM,4BAA4B,EAAM,SAAS,CACvE,GAMW,EAAiB,MAC5B,EACA,EAAc,QAAQ,IAAI,IACO,CACjC,IAAM,EAAgB,MAAM,EAAa,CAAG,EAE5C,GAAI,EAAc,OAAS,UACzB,OAAO,EAGT,IAAM,EAAc,IAAK,EAAc,SAAU,CAAQ,EACzD,OAAO,EAAa,EAAa,CAAG,GAMzB,EAAmB,MAC9B,EAAc,QAAQ,IAAI,IACM,CAChC,IAAM,EAAS,MAAM,EAAa,CAAG,EACrC,OAAO,EAAO,OAAS,UAAY,EAAO,MAAM,cAAgB,QAMrD,EAAmB,MAC9B,EACA,EAAc,QAAQ,IAAI,IACO,EAAe,CAAE,cAAe,CAAO,EAAG,CAAG,EAMzE,MAAM,CAAgB,CACP,IAApB,WAAW,CAAS,EAAc,QAAQ,IAAI,EAAG,CAA7B,WAClB,KAAK,aAAe,EAAgB,CAAG,OAGnC,KAAI,EAA6B,CACrC,IAAM,EAAS,MAAM,EAAa,KAAK,GAAG,EAC1C,GAAI,EAAO,OAAS,UAClB,MAAM,EAAO,MAEf,OAAO,EAAO,WAGV,KAAI,CAAC,EAA0C,CACnD,IAAM,EAAS,MAAM,EAAa,EAAU,KAAK,GAAG,EACpD,GAAI,EAAO,OAAS,UAClB,MAAM,EAAO,WAIX,OAAM,CAAC,EAAkD,CAC7D,IAAM,EAAS,MAAM,EAAe,EAAS,KAAK,GAAG,EACrD,GAAI,EAAO,OAAS,UAClB,MAAM,EAAO,WAIX,iBAAgB,EAAgC,CACpD,OAAO,EAAiB,KAAK,GAAG,OAG5B,iBAAgB,CAAC,EAA+B,CACpD,IAAM,EAAS,MAAM,EAAiB,EAAQ,KAAK,GAAG,EACtD,GAAI,EAAO,OAAS,UAClB,MAAM,EAAO,WAIX,OAAM,EAAqB,CAC/B,OAAO,EAAe,KAAK,GAAG,EAElC,CAMO,IAAM,EAAkB,IAAI",
|
|
8
|
-
"debugId": "
|
|
8
|
+
"debugId": "ADDFE133D120885A64756E2164756E21",
|
|
9
9
|
"names": []
|
|
10
10
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import"./chunk-
|
|
2
|
-
export{q as
|
|
1
|
+
import"./chunk-x46tzzn9.js";import{AutoTokenizer as j}from"@huggingface/transformers";class R{tokenizer;initialized=!1;modelPath;constructor(B={}){this.modelPath=B.modelPath||"./models/starcoder2"}async initialize(){if(this.initialized)return;try{this.tokenizer=await j.from_pretrained(this.modelPath),this.initialized=!0}catch(B){throw Error(`Tokenizer initialization failed: ${B.message}`)}}async tokenize(B){if(!this.initialized)await this.initialize();let H=Date.now();if(!B||B.trim().length===0)return{tokens:[],metadata:{totalTokens:0,vocabSize:49152,processingTime:Date.now()-H,averageConfidence:0},raw:{inputIds:[],decodedText:""}};try{let J=(await this.tokenizer(B)).input_ids.tolist()[0],O=await this.tokenizer.decode(J),U=await this.createDirectTokens(O,J),K=Date.now()-H;return{tokens:U,metadata:{totalTokens:U.length,vocabSize:49152,processingTime:K,averageConfidence:0.95},raw:{inputIds:J,decodedText:O}}}catch(G){throw Error(`Tokenization failed: ${G.message}`)}}async createDirectTokens(B,H){let G=[];for(let J=0;J<H.length;J++){let O=H[J];try{let K=(await this.tokenizer.decode([O],{skip_special_tokens:!0})).trim().toLowerCase();if(K.length>0)G.push({text:K,id:O,score:1,confidence:1,relevance:"high"})}catch(U){}}return G}async getTopTokens(B,H=20){return(await this.tokenize(B)).tokens.slice(0,H)}async getTechnicalTokens(B){return(await this.tokenize(B)).tokens}async decode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return await this.tokenizer.decode(B)}async encode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return(await this.tokenizer(B)).input_ids.tolist()[0]}}async function k(B,H){try{let G=await B.getAllCodebaseFiles();if(H){if(H.file_extensions&&H.file_extensions.length>0)G=G.filter((K)=>H.file_extensions?.some((W)=>K.path.endsWith(W)));if(H.path_filter)G=G.filter((K)=>K.path.includes(H.path_filter));if(H.exclude_paths&&H.exclude_paths.length>0)G=G.filter((K)=>!H.exclude_paths?.some((W)=>K.path.includes(W)))}if(G.length===0)return null;let J=[];for(let K of G){let W=await B.getTFIDFDocument(K.path);if(W){let _=await B.getTFIDFTerms(K.path),X=new Map,$=new Map;for(let[E,Y]of Object.entries(_))X.set(E,Y);let Z=W.rawTerms||{};for(let[E,Y]of Object.entries(Z))$.set(E,Y);J.push({uri:`file://${K.path}`,terms:X,rawTerms:$,magnitude:W.magnitude})}}if(J.length===0)return null;let O=await B.getIDFValues(),U=new Map;for(let[K,W]of Object.entries(O))U.set(K,W);return{documents:J,idf:U,totalDocuments:J.length,metadata:{generatedAt:new Date().toISOString(),version:"1.0.0"}}}catch(G){return console.error("[ERROR] Failed to build search index from database:",G),null}}function M(B){let H=Array.from(B.values()).reduce((J,O)=>J+O,0),G=new Map;for(let[J,O]of B.entries())G.set(J,O/H);return G}function z(B,H){let G=new Map;for(let O of B){let U=new Set(O.keys());for(let K of U)G.set(K,(G.get(K)||0)+1)}let J=new Map;for(let[O,U]of G.entries())J.set(O,Math.log(H/U));return J}function V(B,H){let G=new Map;for(let[J,O]of B.entries()){let U=H.get(J)||0;G.set(J,O*U)}return G}function w(B){let H=0;for(let G of B.values())H+=G*G;return Math.sqrt(H)}var Q=null,v=!1;async function D(){if(!Q)Q=new R({modelPath:"./models/starcoder2"});if(!v){let{log:B,error:H}=console;console.log=()=>{},console.error=()=>{};try{await Q.initialize(),v=!0}finally{console.log=B,console.error=H}}return Q}async function S(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase(),K=J.get(U)||0;J.set(U,K+O.score)}return J}async function P(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase();if(!J.has(U)||O.score>0.8)J.set(U,O.text)}return Array.from(J.values())}async function q(B,H){let J=[];for(let K=0;K<B.length;K+=1){let W=B.slice(K,K+1),_=[];for(let X=0;X<W.length;X++){let $=W[X],Z=$.uri.split("/").pop()||$.uri;H?.({current:K+X+1,total:B.length,fileName:Z,status:"processing"});try{let E=await S($.content);_.push({uri:$.uri,terms:E}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"completed"})}catch(E){_.push({uri:$.uri,terms:new Map}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"skipped"})}}J.push(..._)}let O=z(J.map((K)=>K.terms),B.length);return{documents:J.map((K)=>{let W=M(K.terms),_=V(W,O),X=w(_);return{uri:K.uri,terms:_,rawTerms:K.terms,magnitude:X}}),idf:O,totalDocuments:B.length,metadata:{generatedAt:new Date().toISOString(),version:"5.0.0",tokenizer:"AdvancedCodeTokenizer",features:["Industry-leading code understanding","Advanced technical term recognition","Optimized for code search","Simple and effective approach","No unnecessary complexity"]}}}function h(B,H){let G=0;for(let[O,U]of B.entries()){let K=H.terms.get(O)||0;G+=U*K}let J=w(B);if(J===0||H.magnitude===0)return 0;return G/(J*H.magnitude)}async function I(B,H){let G=await P(B),J=new Map;for(let O of G){let U=O.toLowerCase(),K=H.get(U)||0;if(K>0)J.set(U,K)}return J}async function y(B,H,G={}){let{limit:J=10,minScore:O=0,boostFactors:U={}}=G,{exactMatch:K=1.5,phraseMatch:W=2,technicalMatch:_=1.8,identifierMatch:X=1.3}=U,$=await I(B,H.idf),Z=(await P(B)).map((Y)=>Y.toLowerCase());return H.documents.map((Y)=>{let L=h($,Y),C=[];for(let N of Z)if(Y.rawTerms.has(N)){let A=K;if(b(N))A=Math.max(A,_);if(p(N))A=Math.max(A,X);L*=A,C.push(N)}if(C.length===Z.length&&Z.length>1)L*=W;if(Z.length>3&&C.length>=Z.length*0.7)L*=1.2;return{uri:Y.uri,score:L,matchedTerms:C}}).filter((Y)=>Y.score>=O).sort((Y,L)=>L.score-Y.score).slice(0,J)}function b(B){return[/\b[A-Z]{2,}\b/,/\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\b/,/\b[a-z]+[A-Z][a-z]*\b/,/\b\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\b/,/\b(?:get|set|is|has|can|should|will|do)[A-Z]\w*\b/,/\b(?:http|https|json|xml|yaml|sql|api|url|uri)\b/].some((G)=>G.test(B))}function p(B){return/^[a-zA-Z][a-zA-Z0-9_]*$/.test(B)&&B.length>1}function T(B){let H={documents:B.documents.map((G)=>({uri:G.uri,terms:Array.from(G.terms.entries()),rawTerms:Array.from(G.rawTerms.entries()),magnitude:G.magnitude})),idf:Array.from(B.idf.entries()),totalDocuments:B.totalDocuments,metadata:B.metadata};return JSON.stringify(H,null,2)}function f(B){let H=JSON.parse(B);return{documents:H.documents.map((G)=>({uri:G.uri,terms:new Map(G.terms),rawTerms:new Map(G.rawTerms),magnitude:G.magnitude})),idf:new Map(H.idf),totalDocuments:H.totalDocuments,metadata:H.metadata}}export{T as serializeIndex,y as searchDocuments,I as processQuery,f as deserializeIndex,h as calculateCosineSimilarity,k as buildSearchIndexFromDB,q as buildSearchIndex};
|
|
2
|
+
export{q as w,y as x};
|
|
3
3
|
|
|
4
|
-
//# debugId=
|
|
4
|
+
//# debugId=9F61091ED12D520B64756E2164756E21
|
|
@@ -6,6 +6,6 @@
|
|
|
6
6
|
"/**\n * TF-IDF (Term Frequency-Inverse Document Frequency) implementation\n * Used for ranking document relevance in semantic search\n */\n\nimport { AdvancedCodeTokenizer } from '../../utils/advanced-tokenizer.js';\nimport type { SeparatedMemoryStorage } from './separated-storage.js';\n\nexport interface DocumentVector {\n uri: string;\n terms: Map<string, number>; // term → TF-IDF score\n rawTerms: Map<string, number>; // term → raw frequency\n magnitude: number; // Vector magnitude for cosine similarity\n}\n\nexport interface SearchIndex {\n documents: DocumentVector[];\n idf: Map<string, number>; // term → IDF score\n totalDocuments: number;\n metadata: {\n generatedAt: string;\n version: string;\n };\n}\n\n/**\n * Build search index from database (shared between CLI and MCP)\n */\nexport async function buildSearchIndexFromDB(\n memoryStorage: SeparatedMemoryStorage,\n filters?: {\n file_extensions?: string[];\n path_filter?: string;\n exclude_paths?: string[];\n }\n): Promise<SearchIndex | null> {\n try {\n // Get all files from database\n let files = await memoryStorage.getAllCodebaseFiles();\n\n // Apply filters\n if (filters) {\n if (filters.file_extensions && filters.file_extensions.length > 0) {\n files = files.filter((file) =>\n filters.file_extensions?.some((ext: string) => file.path.endsWith(ext))\n );\n }\n\n if (filters.path_filter) {\n files = files.filter((file) => file.path.includes(filters.path_filter!));\n }\n\n if (filters.exclude_paths && filters.exclude_paths.length > 0) {\n files = files.filter(\n (file) => !filters.exclude_paths?.some((exclude: string) => file.path.includes(exclude))\n );\n }\n }\n\n if (files.length === 0) {\n return null;\n }\n\n // Build search documents - read TF-IDF terms directly from database\n const documents = [];\n for (const file of files) {\n const tfidfDoc = await memoryStorage.getTFIDFDocument(file.path);\n if (tfidfDoc) {\n // Get TF-IDF terms from database (already calculated)\n const tfidfTerms = await memoryStorage.getTFIDFTerms(file.path);\n const terms = new Map<string, number>();\n const rawTermsMap = new Map<string, number>();\n\n // Use TF-IDF terms for search scoring\n for (const [term, tfidfScore] of Object.entries(tfidfTerms)) {\n terms.set(term, tfidfScore as number);\n }\n\n // Use rawTerms for reference\n const rawTerms = tfidfDoc.rawTerms || {};\n for (const [term, freq] of Object.entries(rawTerms)) {\n rawTermsMap.set(term, freq as number);\n }\n\n documents.push({\n uri: `file://${file.path}`,\n terms,\n rawTerms: rawTermsMap,\n magnitude: tfidfDoc.magnitude,\n });\n }\n }\n\n if (documents.length === 0) {\n return null;\n }\n\n // Get IDF values from database\n const idfRecords = await memoryStorage.getIDFValues();\n const idf = new Map<string, number>();\n for (const [term, value] of Object.entries(idfRecords)) {\n idf.set(term, value as number);\n }\n\n return {\n documents,\n idf,\n totalDocuments: documents.length,\n metadata: {\n generatedAt: new Date().toISOString(),\n version: '1.0.0',\n },\n };\n } catch (error) {\n console.error('[ERROR] Failed to build search index from database:', error);\n return null;\n }\n}\n\n/**\n * Calculate Term Frequency (TF)\n * TF = (number of times term appears in document) / (total terms in document)\n */\nfunction calculateTF(termFrequency: Map<string, number>): Map<string, number> {\n const totalTerms = Array.from(termFrequency.values()).reduce((sum, freq) => sum + freq, 0);\n const tf = new Map<string, number>();\n\n for (const [term, freq] of termFrequency.entries()) {\n tf.set(term, freq / totalTerms);\n }\n\n return tf;\n}\n\n/**\n * Calculate Inverse Document Frequency (IDF)\n * IDF = log(total documents / documents containing term)\n */\nfunction calculateIDF(\n documents: Map<string, number>[],\n totalDocuments: number\n): Map<string, number> {\n const documentFrequency = new Map<string, number>();\n\n // Count how many documents contain each term\n for (const doc of documents) {\n const uniqueTerms = new Set(doc.keys());\n for (const term of uniqueTerms) {\n documentFrequency.set(term, (documentFrequency.get(term) || 0) + 1);\n }\n }\n\n // Calculate IDF for each term\n const idf = new Map<string, number>();\n for (const [term, docFreq] of documentFrequency.entries()) {\n idf.set(term, Math.log(totalDocuments / docFreq));\n }\n\n return idf;\n}\n\n/**\n * Calculate TF-IDF scores for a document\n */\nfunction calculateTFIDF(tf: Map<string, number>, idf: Map<string, number>): Map<string, number> {\n const tfidf = new Map<string, number>();\n\n for (const [term, tfScore] of tf.entries()) {\n const idfScore = idf.get(term) || 0;\n tfidf.set(term, tfScore * idfScore);\n }\n\n return tfidf;\n}\n\n/**\n * Calculate vector magnitude for cosine similarity\n */\nfunction calculateMagnitude(vector: Map<string, number>): number {\n let sum = 0;\n for (const value of vector.values()) {\n sum += value * value;\n }\n return Math.sqrt(sum);\n}\n\n// Global tokenizer instance for performance\nlet globalTokenizer: AdvancedCodeTokenizer | null = null;\nlet tokenizerInitialized = false;\n\n/**\n * Get or create the global tokenizer\n */\nasync function getTokenizer(): Promise<AdvancedCodeTokenizer> {\n if (!globalTokenizer) {\n globalTokenizer = new AdvancedCodeTokenizer({\n modelPath: './models/starcoder2',\n });\n }\n\n if (!tokenizerInitialized) {\n // Silently initialize - no console output\n const originalLog = console.log;\n const originalError = console.error;\n console.log = () => {}; // Temporarily silence console.log\n console.error = () => {}; // Temporarily silence console.error\n try {\n await globalTokenizer.initialize();\n tokenizerInitialized = true;\n } finally {\n console.log = originalLog; // Restore console.log\n console.error = originalError; // Restore console.error\n }\n }\n\n return globalTokenizer;\n}\n\n/**\n * Extract terms using our advanced tokenizer\n */\nasync function extractTerms(content: string): Promise<Map<string, number>> {\n const tokenizer = await getTokenizer();\n const result = await tokenizer.tokenize(content);\n const terms = new Map<string, number>();\n\n // Use token scores as TF weights\n for (const token of result.tokens) {\n const term = token.text.toLowerCase();\n const currentScore = terms.get(term) || 0;\n terms.set(term, currentScore + token.score);\n }\n\n return terms;\n}\n\n/**\n * Extract simple tokens for query processing\n */\nasync function extractQueryTokens(query: string): Promise<string[]> {\n const tokenizer = await getTokenizer();\n const result = await tokenizer.tokenize(query);\n\n // Return unique tokens, sorted by score (highest first)\n const uniqueTokens = new Map<string, string>();\n for (const token of result.tokens) {\n const lowerText = token.text.toLowerCase();\n if (!uniqueTokens.has(lowerText) || token.score > 0.8) {\n uniqueTokens.set(lowerText, token.text);\n }\n }\n\n return Array.from(uniqueTokens.values());\n}\n\nexport interface BuildIndexProgress {\n current: number;\n total: number;\n fileName: string;\n status: 'processing' | 'completed' | 'skipped';\n}\n\n/**\n * Build TF-IDF search index from documents using our advanced tokenizer\n */\nexport async function buildSearchIndex(\n documents: Array<{ uri: string; content: string }>,\n onProgress?: (progress: BuildIndexProgress) => void\n): Promise<SearchIndex> {\n // Process documents one by one to avoid hanging\n const batchSize = 1; // Process 1 document at a time to avoid hanging\n const documentTerms: Array<{ uri: string; terms: Map<string, number> }> = [];\n\n for (let i = 0; i < documents.length; i += batchSize) {\n const batch = documents.slice(i, i + batchSize);\n\n // Process sequentially to avoid hanging\n const batchResults = [];\n for (let j = 0; j < batch.length; j++) {\n const doc = batch[j];\n const fileName = doc.uri.split('/').pop() || doc.uri;\n\n // Report progress\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'processing',\n });\n\n try {\n const result = await extractTerms(doc.content);\n\n batchResults.push({\n uri: doc.uri,\n terms: result,\n });\n\n // Report completion\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'completed',\n });\n } catch (_error) {\n batchResults.push({\n uri: doc.uri,\n terms: new Map<string, number>(),\n });\n\n // Report skip\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'skipped',\n });\n }\n }\n\n documentTerms.push(...batchResults);\n }\n\n // Calculate IDF scores\n const idf = calculateIDF(\n documentTerms.map((d) => d.terms),\n documents.length\n );\n\n // Calculate TF-IDF for each document\n const documentVectors: DocumentVector[] = documentTerms.map((doc) => {\n const tf = calculateTF(doc.terms);\n const tfidf = calculateTFIDF(tf, idf);\n const magnitude = calculateMagnitude(tfidf);\n\n return {\n uri: doc.uri,\n terms: tfidf,\n rawTerms: doc.terms,\n magnitude,\n };\n });\n\n return {\n documents: documentVectors,\n idf,\n totalDocuments: documents.length,\n metadata: {\n generatedAt: new Date().toISOString(),\n version: '5.0.0',\n tokenizer: 'AdvancedCodeTokenizer',\n features: [\n 'Industry-leading code understanding',\n 'Advanced technical term recognition',\n 'Optimized for code search',\n 'Simple and effective approach',\n 'No unnecessary complexity',\n ],\n },\n };\n}\n\n/**\n * Calculate cosine similarity between query and document\n */\nexport function calculateCosineSimilarity(\n queryVector: Map<string, number>,\n docVector: DocumentVector\n): number {\n let dotProduct = 0;\n\n // Calculate dot product\n for (const [term, queryScore] of queryVector.entries()) {\n const docScore = docVector.terms.get(term) || 0;\n dotProduct += queryScore * docScore;\n }\n\n // Calculate query magnitude\n const queryMagnitude = calculateMagnitude(queryVector);\n\n if (queryMagnitude === 0 || docVector.magnitude === 0) {\n return 0;\n }\n\n return dotProduct / (queryMagnitude * docVector.magnitude);\n}\n\n/**\n * Process query into TF-IDF vector using database values\n */\nexport async function processQuery(\n query: string,\n idf: Map<string, number>\n): Promise<Map<string, number>> {\n const terms = await extractQueryTokens(query);\n const queryVector = new Map<string, number>();\n\n // 為每個查詢詞使用 IDF 值(查詢本身無 TF-IDF,直接用 IDF)\n for (const term of terms) {\n const lowerTerm = term.toLowerCase();\n const idfValue = idf.get(lowerTerm) || 0;\n\n // 純粹用 IDF 值,完全信任 StarCoder2 嘅 tokenization\n if (idfValue > 0) {\n queryVector.set(lowerTerm, idfValue);\n }\n }\n\n return queryVector;\n}\n\n/**\n * Search documents using TF-IDF and cosine similarity with Advanced Code Tokenizer\n */\nexport async function searchDocuments(\n query: string,\n index: SearchIndex,\n options: {\n limit?: number;\n minScore?: number;\n boostFactors?: {\n exactMatch?: number; // Boost for exact term matches\n phraseMatch?: number; // Boost for phrase matches\n technicalMatch?: number; // Boost for technical term matches\n identifierMatch?: number; // Boost for identifier matches\n };\n } = {}\n): Promise<Array<{ uri: string; score: number; matchedTerms: string[] }>> {\n const { limit = 10, minScore = 0, boostFactors = {} } = options;\n const {\n exactMatch = 1.5,\n phraseMatch = 2.0,\n technicalMatch = 1.8,\n identifierMatch = 1.3,\n } = boostFactors;\n\n // Process query using Advanced Code Tokenizer\n const queryVector = await processQuery(query, index.idf);\n const queryTokens = (await extractQueryTokens(query)).map((t) => t.toLowerCase());\n\n // Calculate similarity for each document\n const results = index.documents.map((doc) => {\n let score = calculateCosineSimilarity(queryVector, doc);\n\n // Boost for exact term matches with enhanced scoring\n const matchedTerms: string[] = [];\n for (const token of queryTokens) {\n if (doc.rawTerms.has(token)) {\n // Apply different boost factors based on term characteristics\n let boostFactor = exactMatch;\n\n // Additional boost for technical terms\n if (isTechnicalTerm(token)) {\n boostFactor = Math.max(boostFactor, technicalMatch);\n }\n\n // Additional boost for identifiers\n if (isIdentifier(token)) {\n boostFactor = Math.max(boostFactor, identifierMatch);\n }\n\n score *= boostFactor;\n matchedTerms.push(token);\n }\n }\n\n // Enhanced phrase match detection (all query terms appear in document)\n if (matchedTerms.length === queryTokens.length && queryTokens.length > 1) {\n score *= phraseMatch;\n }\n\n // Contextual relevance boost for longer queries\n if (queryTokens.length > 3 && matchedTerms.length >= queryTokens.length * 0.7) {\n score *= 1.2; // Boost for partial matches on complex queries\n }\n\n return {\n uri: doc.uri,\n score,\n matchedTerms,\n };\n });\n\n // Filter and sort\n return results\n .filter((result) => result.score >= minScore)\n .sort((a, b) => b.score - a.score)\n .slice(0, limit);\n}\n\n/**\n * Check if a term is likely a technical term\n */\nfunction isTechnicalTerm(term: string): boolean {\n const technicalPatterns = [\n /\\b[A-Z]{2,}\\b/, // Acronyms like HTTP, API, JSON\n /\\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\\b/, // PascalCase like ComponentName\n /\\b[a-z]+[A-Z][a-z]*\\b/, // camelCase like functionName\n /\\b\\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\\b/, // Common suffixes\n /\\b(?:get|set|is|has|can|should|will|do)[A-Z]\\w*\\b/, // Common prefixes\n /\\b(?:http|https|json|xml|yaml|sql|api|url|uri)\\b/, // Technical keywords\n ];\n\n return technicalPatterns.some((pattern) => pattern.test(term));\n}\n\n/**\n * Check if a term is likely an identifier\n */\nfunction isIdentifier(term: string): boolean {\n // Identifiers typically contain letters and numbers, maybe underscores\n return /^[a-zA-Z][a-zA-Z0-9_]*$/.test(term) && term.length > 1;\n}\n\n/**\n * Serialize search index to JSON\n */\nexport function serializeIndex(index: SearchIndex): string {\n const serializable = {\n documents: index.documents.map((doc) => ({\n uri: doc.uri,\n terms: Array.from(doc.terms.entries()),\n rawTerms: Array.from(doc.rawTerms.entries()),\n magnitude: doc.magnitude,\n })),\n idf: Array.from(index.idf.entries()),\n totalDocuments: index.totalDocuments,\n metadata: index.metadata,\n };\n\n return JSON.stringify(serializable, null, 2);\n}\n\n/**\n * Deserialize search index from JSON\n */\nexport function deserializeIndex(json: string): SearchIndex {\n const data = JSON.parse(json);\n\n return {\n documents: data.documents.map(\n (doc: {\n uri: string;\n terms: [string, number][];\n rawTerms: [string, number][];\n magnitude: number;\n }) => ({\n uri: doc.uri,\n terms: new Map(doc.terms),\n rawTerms: new Map(doc.rawTerms),\n magnitude: doc.magnitude,\n })\n ),\n idf: new Map(data.idf),\n totalDocuments: data.totalDocuments,\n metadata: data.metadata,\n };\n}\n"
|
|
7
7
|
],
|
|
8
8
|
"mappings": "4BAKA,wBAAS,kCA2BF,MAAM,CAAsB,CACzB,UACA,YAAc,GACd,UAER,WAAW,CACT,EAEI,CAAC,EACL,CACA,KAAK,UAAY,EAAQ,WAAa,2BAMlC,WAAU,EAAkB,CAChC,GAAI,KAAK,YACP,OAGF,GAAI,CACF,KAAK,UAAY,MAAM,EAAc,gBAAgB,KAAK,SAAS,EACnE,KAAK,YAAc,GACnB,MAAO,EAAO,CACd,MAAU,MAAM,oCAAoC,EAAM,SAAS,QAOjE,SAAQ,CAAC,EAAmD,CAChE,GAAI,CAAC,KAAK,YACR,MAAM,KAAK,WAAW,EAGxB,IAAM,EAAY,KAAK,IAAI,EAG3B,GAAI,CAAC,GAAW,EAAQ,KAAK,EAAE,SAAW,EACxC,MAAO,CACL,OAAQ,CAAC,EACT,SAAU,CACR,YAAa,EACb,UAAW,MACX,eAAgB,KAAK,IAAI,EAAI,EAC7B,kBAAmB,CACrB,EACA,IAAK,CACH,SAAU,CAAC,EACX,YAAa,EACf,CACF,EAGF,GAAI,CAGF,IAAM,GADU,MAAM,KAAK,UAAU,CAAO,GACnB,UAAU,OAAO,EAAE,GAGtC,EAAc,MAAM,KAAK,UAAU,OAAO,CAAQ,EAGlD,EAAS,MAAM,KAAK,mBAAmB,EAAa,CAAQ,EAE5D,EAAiB,KAAK,IAAI,EAAI,EAEpC,MAAO,CACL,SACA,SAAU,CACR,YAAa,EAAO,OACpB,UAAW,MACX,iBACA,kBAAmB,IACrB,EACA,IAAK,CACH,WACA,aACF,CACF,EACA,MAAO,EAAO,CACd,MAAU,MAAM,wBAAwB,EAAM,SAAS,QAQ7C,mBAAkB,CAC9B,EACA,EAC0B,CAC1B,IAAM,EAA0B,CAAC,EAIjC,QAAS,EAAI,EAAG,EAAI,EAAS,OAAQ,IAAK,CACxC,IAAM,EAAU,EAAS,GACzB,GAAI,CAGF,IAAM,GADY,MAAM,KAAK,UAAU,OAAO,CAAC,CAAO,EAAG,CAAE,oBAAqB,EAAK,CAAC,GAC5D,KAAK,EAAE,YAAY,EAG7C,GAAI,EAAQ,OAAS,EACnB,EAAO,KAAK,CACV,KAAM,EACN,GAAI,EACJ,MAAO,EACP,WAAY,EACZ,UAAW,MACb,CAAC,EAEH,MAAO,EAAQ,GAGnB,OAAO,OAMH,aAAY,CAAC,EAAiB,EAAQ,GAA8B,CAExE,OADe,MAAM,KAAK,SAAS,CAAO,GAC5B,OAAO,MAAM,EAAG,CAAK,OAM/B,mBAAkB,CAAC,EAA2C,CAElE,OADe,MAAM,KAAK,SAAS,CAAO,GAC5B,YAMV,OAAM,CAAC,EAAqC,CAChD,GAAI,CAAC,KAAK,YACR,MAAU,MAAM,qDAAqD,EAEvE,OAAO,MAAM,KAAK,UAAU,OAAO,CAAQ,OAMvC,OAAM,CAAC,EAAiC,CAC5C,GAAI,CAAC,KAAK,YACR,MAAU,MAAM,qDAAqD,EAGvE,OADe,MAAM,KAAK,UAAU,CAAI,GAC1B,UAAU,OAAO,EAAE,GAErC,CClKA,eAAsB,CAAsB,CAC1C,EACA,EAK6B,CAC7B,GAAI,CAEF,IAAI,EAAQ,MAAM,EAAc,oBAAoB,EAGpD,GAAI,EAAS,CACX,GAAI,EAAQ,iBAAmB,EAAQ,gBAAgB,OAAS,EAC9D,EAAQ,EAAM,OAAO,CAAC,IACpB,EAAQ,iBAAiB,KAAK,CAAC,IAAgB,EAAK,KAAK,SAAS,CAAG,CAAC,CACxE,EAGF,GAAI,EAAQ,YACV,EAAQ,EAAM,OAAO,CAAC,IAAS,EAAK,KAAK,SAAS,EAAQ,WAAY,CAAC,EAGzE,GAAI,EAAQ,eAAiB,EAAQ,cAAc,OAAS,EAC1D,EAAQ,EAAM,OACZ,CAAC,IAAS,CAAC,EAAQ,eAAe,KAAK,CAAC,IAAoB,EAAK,KAAK,SAAS,CAAO,CAAC,CACzF,EAIJ,GAAI,EAAM,SAAW,EACnB,OAAO,KAIT,IAAM,EAAY,CAAC,EACnB,QAAW,KAAQ,EAAO,CACxB,IAAM,EAAW,MAAM,EAAc,iBAAiB,EAAK,IAAI,EAC/D,GAAI,EAAU,CAEZ,IAAM,EAAa,MAAM,EAAc,cAAc,EAAK,IAAI,EACxD,EAAQ,IAAI,IACZ,EAAc,IAAI,IAGxB,QAAY,EAAM,KAAe,OAAO,QAAQ,CAAU,EACxD,EAAM,IAAI,EAAM,CAAoB,EAItC,IAAM,EAAW,EAAS,UAAY,CAAC,EACvC,QAAY,EAAM,KAAS,OAAO,QAAQ,CAAQ,EAChD,EAAY,IAAI,EAAM,CAAc,EAGtC,EAAU,KAAK,CACb,IAAK,UAAU,EAAK,OACpB,QACA,SAAU,EACV,UAAW,EAAS,SACtB,CAAC,GAIL,GAAI,EAAU,SAAW,EACvB,OAAO,KAIT,IAAM,EAAa,MAAM,EAAc,aAAa,EAC9C,EAAM,IAAI,IAChB,QAAY,EAAM,KAAU,OAAO,QAAQ,CAAU,EACnD,EAAI,IAAI,EAAM,CAAe,EAG/B,MAAO,CACL,YACA,MACA,eAAgB,EAAU,OAC1B,SAAU,CACR,YAAa,IAAI,KAAK,EAAE,YAAY,EACpC,QAAS,OACX,CACF,EACA,MAAO,EAAO,CAEd,OADA,QAAQ,MAAM,sDAAuD,CAAK,EACnE,MAQX,SAAS,CAAW,CAAC,EAAyD,CAC5E,IAAM,EAAa,MAAM,KAAK,EAAc,OAAO,CAAC,EAAE,OAAO,CAAC,EAAK,IAAS,EAAM,EAAM,CAAC,EACnF,EAAK,IAAI,IAEf,QAAY,EAAM,KAAS,EAAc,QAAQ,EAC/C,EAAG,IAAI,EAAM,EAAO,CAAU,EAGhC,OAAO,EAOT,SAAS,CAAY,CACnB,EACA,EACqB,CACrB,IAAM,EAAoB,IAAI,IAG9B,QAAW,KAAO,EAAW,CAC3B,IAAM,EAAc,IAAI,IAAI,EAAI,KAAK,CAAC,EACtC,QAAW,KAAQ,EACjB,EAAkB,IAAI,GAAO,EAAkB,IAAI,CAAI,GAAK,GAAK,CAAC,EAKtE,IAAM,EAAM,IAAI,IAChB,QAAY,EAAM,KAAY,EAAkB,QAAQ,EACtD,EAAI,IAAI,EAAM,KAAK,IAAI,EAAiB,CAAO,CAAC,EAGlD,OAAO,EAMT,SAAS,CAAc,CAAC,EAAyB,EAA+C,CAC9F,IAAM,EAAQ,IAAI,IAElB,QAAY,EAAM,KAAY,EAAG,QAAQ,EAAG,CAC1C,IAAM,EAAW,EAAI,IAAI,CAAI,GAAK,EAClC,EAAM,IAAI,EAAM,EAAU,CAAQ,EAGpC,OAAO,EAMT,SAAS,CAAkB,CAAC,EAAqC,CAC/D,IAAI,EAAM,EACV,QAAW,KAAS,EAAO,OAAO,EAChC,GAAO,EAAQ,EAEjB,OAAO,KAAK,KAAK,CAAG,EAItB,IAAI,EAAgD,KAChD,EAAuB,GAK3B,eAAe,CAAY,EAAmC,CAC5D,GAAI,CAAC,EACH,EAAkB,IAAI,EAAsB,CAC1C,UAAW,qBACb,CAAC,EAGH,GAAI,CAAC,EAAsB,CAEzB,IAA4B,IAAtB,EACwB,MAAxB,GAAgB,QACtB,QAAQ,IAAM,IAAM,GACpB,QAAQ,MAAQ,IAAM,GACtB,GAAI,CACF,MAAM,EAAgB,WAAW,EACjC,EAAuB,UACvB,CACA,QAAQ,IAAM,EACd,QAAQ,MAAQ,GAIpB,OAAO,EAMT,eAAe,CAAY,CAAC,EAA+C,CAEzE,IAAM,EAAS,MADG,MAAM,EAAa,GACN,SAAS,CAAO,EACzC,EAAQ,IAAI,IAGlB,QAAW,KAAS,EAAO,OAAQ,CACjC,IAAM,EAAO,EAAM,KAAK,YAAY,EAC9B,EAAe,EAAM,IAAI,CAAI,GAAK,EACxC,EAAM,IAAI,EAAM,EAAe,EAAM,KAAK,EAG5C,OAAO,EAMT,eAAe,CAAkB,CAAC,EAAkC,CAElE,IAAM,EAAS,MADG,MAAM,EAAa,GACN,SAAS,CAAK,EAGvC,EAAe,IAAI,IACzB,QAAW,KAAS,EAAO,OAAQ,CACjC,IAAM,EAAY,EAAM,KAAK,YAAY,EACzC,GAAI,CAAC,EAAa,IAAI,CAAS,GAAK,EAAM,MAAQ,IAChD,EAAa,IAAI,EAAW,EAAM,IAAI,EAI1C,OAAO,MAAM,KAAK,EAAa,OAAO,CAAC,EAazC,eAAsB,CAAgB,CACpC,EACA,EACsB,CAGtB,IAAM,EAAoE,CAAC,EAE3E,QAAS,EAAI,EAAG,EAAI,EAAU,OAAQ,GAHpB,EAGoC,CACpD,IAAM,EAAQ,EAAU,MAAM,EAAG,EAJjB,CAI8B,EAGxC,EAAe,CAAC,EACtB,QAAS,EAAI,EAAG,EAAI,EAAM,OAAQ,IAAK,CACrC,IAAM,EAAM,EAAM,GACZ,EAAW,EAAI,IAAI,MAAM,GAAG,EAAE,IAAI,GAAK,EAAI,IAGjD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,YACV,CAAC,EAED,GAAI,CACF,IAAM,EAAS,MAAM,EAAa,EAAI,OAAO,EAE7C,EAAa,KAAK,CAChB,IAAK,EAAI,IACT,MAAO,CACT,CAAC,EAGD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,WACV,CAAC,EACD,MAAO,EAAQ,CACf,EAAa,KAAK,CAChB,IAAK,EAAI,IACT,MAAO,IAAI,GACb,CAAC,EAGD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,SACV,CAAC,GAIL,EAAc,KAAK,GAAG,CAAY,EAIpC,IAAM,EAAM,EACV,EAAc,IAAI,CAAC,IAAM,EAAE,KAAK,EAChC,EAAU,MACZ,EAgBA,MAAO,CACL,UAdwC,EAAc,IAAI,CAAC,IAAQ,CACnE,IAAM,EAAK,EAAY,EAAI,KAAK,EAC1B,EAAQ,EAAe,EAAI,CAAG,EAC9B,EAAY,EAAmB,CAAK,EAE1C,MAAO,CACL,IAAK,EAAI,IACT,MAAO,EACP,SAAU,EAAI,MACd,WACF,EACD,EAIC,MACA,eAAgB,EAAU,OAC1B,SAAU,CACR,YAAa,IAAI,KAAK,EAAE,YAAY,EACpC,QAAS,QACT,UAAW,wBACX,SAAU,CACR,sCACA,sCACA,4BACA,gCACA,2BACF,CACF,CACF,EAMK,SAAS,CAAyB,CACvC,EACA,EACQ,CACR,IAAI,EAAa,EAGjB,QAAY,EAAM,KAAe,EAAY,QAAQ,EAAG,CACtD,IAAM,EAAW,EAAU,MAAM,IAAI,CAAI,GAAK,EAC9C,GAAc,EAAa,EAI7B,IAAM,EAAiB,EAAmB,CAAW,EAErD,GAAI,IAAmB,GAAK,EAAU,YAAc,EAClD,MAAO,GAGT,OAAO,GAAc,EAAiB,EAAU,WAMlD,eAAsB,CAAY,CAChC,EACA,EAC8B,CAC9B,IAAM,EAAQ,MAAM,EAAmB,CAAK,EACtC,EAAc,IAAI,IAGxB,QAAW,KAAQ,EAAO,CACxB,IAAM,EAAY,EAAK,YAAY,EAC7B,EAAW,EAAI,IAAI,CAAS,GAAK,EAGvC,GAAI,EAAW,EACb,EAAY,IAAI,EAAW,CAAQ,EAIvC,OAAO,EAMT,eAAsB,CAAe,CACnC,EACA,EACA,EASI,CAAC,EACmE,CACxE,IAAQ,QAAQ,GAAI,WAAW,EAAG,eAAe,CAAC,GAAM,GAEtD,aAAa,IACb,cAAc,EACd,iBAAiB,IACjB,kBAAkB,KAChB,EAGE,EAAc,MAAM,EAAa,EAAO,EAAM,GAAG,EACjD,GAAe,MAAM,EAAmB,CAAK,GAAG,IAAI,CAAC,IAAM,EAAE,YAAY,CAAC,EA8ChF,OA3CgB,EAAM,UAAU,IAAI,CAAC,IAAQ,CAC3C,IAAI,EAAQ,EAA0B,EAAa,CAAG,EAGhD,EAAyB,CAAC,EAChC,QAAW,KAAS,EAClB,GAAI,EAAI,SAAS,IAAI,CAAK,EAAG,CAE3B,IAAI,EAAc,EAGlB,GAAI,EAAgB,CAAK,EACvB,EAAc,KAAK,IAAI,EAAa,CAAc,EAIpD,GAAI,EAAa,CAAK,EACpB,EAAc,KAAK,IAAI,EAAa,CAAe,EAGrD,GAAS,EACT,EAAa,KAAK,CAAK,EAK3B,GAAI,EAAa,SAAW,EAAY,QAAU,EAAY,OAAS,EACrE,GAAS,EAIX,GAAI,EAAY,OAAS,GAAK,EAAa,QAAU,EAAY,OAAS,IACxE,GAAS,IAGX,MAAO,CACL,IAAK,EAAI,IACT,QACA,cACF,EACD,EAIE,OAAO,CAAC,IAAW,EAAO,OAAS,CAAQ,EAC3C,KAAK,CAAC,EAAG,IAAM,EAAE,MAAQ,EAAE,KAAK,EAChC,MAAM,EAAG,CAAK,EAMnB,SAAS,CAAe,CAAC,EAAuB,CAU9C,MAT0B,CACxB,gBACA,kCACA,wBACA,+DACA,oDACA,kDACF,EAEyB,KAAK,CAAC,IAAY,EAAQ,KAAK,CAAI,CAAC,EAM/D,SAAS,CAAY,CAAC,EAAuB,CAE3C,MAAO,0BAA0B,KAAK,CAAI,GAAK,EAAK,OAAS,EAMxD,SAAS,CAAc,CAAC,EAA4B,CACzD,IAAM,EAAe,CACnB,UAAW,EAAM,UAAU,IAAI,CAAC,KAAS,CACvC,IAAK,EAAI,IACT,MAAO,MAAM,KAAK,EAAI,MAAM,QAAQ,CAAC,EACrC,SAAU,MAAM,KAAK,EAAI,SAAS,QAAQ,CAAC,EAC3C,UAAW,EAAI,SACjB,EAAE,EACF,IAAK,MAAM,KAAK,EAAM,IAAI,QAAQ,CAAC,EACnC,eAAgB,EAAM,eACtB,SAAU,EAAM,QAClB,EAEA,OAAO,KAAK,UAAU,EAAc,KAAM,CAAC,EAMtC,SAAS,CAAgB,CAAC,EAA2B,CAC1D,IAAM,EAAO,KAAK,MAAM,CAAI,EAE5B,MAAO,CACL,UAAW,EAAK,UAAU,IACxB,CAAC,KAKM,CACL,IAAK,EAAI,IACT,MAAO,IAAI,IAAI,EAAI,KAAK,EACxB,SAAU,IAAI,IAAI,EAAI,QAAQ,EAC9B,UAAW,EAAI,SACjB,EACF,EACA,IAAK,IAAI,IAAI,EAAK,GAAG,EACrB,eAAgB,EAAK,eACrB,SAAU,EAAK,QACjB",
|
|
9
|
-
"debugId": "
|
|
9
|
+
"debugId": "9F61091ED12D520B64756E2164756E21",
|
|
10
10
|
"names": []
|
|
11
11
|
}
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import{s as V}from"./chunk-7wpv8zs7.js";import{t as U}from"./chunk-jgkkyjtr.js";import{G as R}from"./chunk-x46tzzn9.js";var E=R((M,Q)=>{var{defineProperty:K,getOwnPropertyDescriptor:W,getOwnPropertyNames:X}=Object,Y=Object.prototype.hasOwnProperty,Z=(q,v)=>{for(var z in v)K(q,z,{get:v[z],enumerable:!0})},$=(q,v,z,F)=>{if(v&&typeof v==="object"||typeof v==="function"){for(let G of X(v))if(!Y.call(q,G)&&G!==z)K(q,G,{get:()=>v[G],enumerable:!(F=W(v,G))||F.enumerable})}return q},A=(q)=>$(K({},"__esModule",{value:!0}),q),L={};Z(L,{refreshToken:()=>D});Q.exports=A(L);var H=U(),B=V();async function D(){let{projectId:q,teamId:v}=(0,B.findProjectInfo)(),z=(0,B.loadToken)(q);if(!z||(0,B.isExpired)((0,B.getTokenPayload)(z.token))){let F=(0,B.getVercelCliToken)();if(!F)throw new H.VercelOidcTokenError("Failed to refresh OIDC token: login to vercel cli");if(!q)throw new H.VercelOidcTokenError("Failed to refresh OIDC token: project id not found");if(z=await(0,B.getVercelOidcToken)(F,q,v),!z)throw new H.VercelOidcTokenError("Failed to refresh OIDC token");(0,B.saveToken)(z,q)}process.env.VERCEL_OIDC_TOKEN=z.token;return}});export default E();
|
|
2
|
+
|
|
3
|
+
//# debugId=0C13E322BB18836164756E2164756E21
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../node_modules/@vercel/oidc/dist/token.js"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\nvar token_exports = {};\n__export(token_exports, {\n refreshToken: () => refreshToken\n});\nmodule.exports = __toCommonJS(token_exports);\nvar import_token_error = require(\"./token-error\");\nvar import_token_util = require(\"./token-util\");\nasync function refreshToken() {\n const { projectId, teamId } = (0, import_token_util.findProjectInfo)();\n let maybeToken = (0, import_token_util.loadToken)(projectId);\n if (!maybeToken || (0, import_token_util.isExpired)((0, import_token_util.getTokenPayload)(maybeToken.token))) {\n const authToken = (0, import_token_util.getVercelCliToken)();\n if (!authToken) {\n throw new import_token_error.VercelOidcTokenError(\n \"Failed to refresh OIDC token: login to vercel cli\"\n );\n }\n if (!projectId) {\n throw new import_token_error.VercelOidcTokenError(\n \"Failed to refresh OIDC token: project id not found\"\n );\n }\n maybeToken = await (0, import_token_util.getVercelOidcToken)(authToken, projectId, teamId);\n if (!maybeToken) {\n throw new import_token_error.VercelOidcTokenError(\"Failed to refresh OIDC token\");\n }\n (0, import_token_util.saveToken)(maybeToken, projectId);\n }\n process.env.VERCEL_OIDC_TOKEN = maybeToken.token;\n return;\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n refreshToken\n});\n"
|
|
6
|
+
],
|
|
7
|
+
"mappings": "oJACA,FAAuB,SAAnB,EAC0B,yBAA1B,EAC2B,oBAA3B,GADmB,OAEnB,EAAe,OAAO,UAAU,eAChC,EAAW,CAAC,EAAQ,IAAQ,CAC9B,QAAS,KAAQ,EACf,EAAU,EAAQ,EAAM,CAAE,IAAK,EAAI,GAAO,WAAY,EAAK,CAAC,GAE5D,EAAc,CAAC,EAAI,EAAM,EAAQ,IAAS,CAC5C,GAAI,GAAQ,OAAO,IAAS,UAAY,OAAO,IAAS,YACtD,QAAS,KAAO,EAAkB,CAAI,EACpC,GAAI,CAAC,EAAa,KAAK,EAAI,CAAG,GAAK,IAAQ,EACzC,EAAU,EAAI,EAAK,CAAE,IAAK,IAAM,EAAK,GAAM,WAAY,EAAE,EAAO,EAAiB,EAAM,CAAG,IAAM,EAAK,UAAW,CAAC,EAEvH,OAAO,GAEL,EAAe,CAAC,IAAQ,EAAY,EAAU,CAAC,EAAG,aAAc,CAAE,MAAO,EAAK,CAAC,EAAG,CAAG,EACrF,EAAgB,CAAC,EACrB,EAAS,EAAe,CACtB,aAAc,IAAM,CACtB,CAAC,EACD,EAAO,QAAU,EAAa,CAAa,EAC3C,IAAI,MACA,MACJ,eAAe,CAAY,EAAG,CAC5B,IAAQ,YAAW,WAAY,EAAG,EAAkB,iBAAiB,EACjE,GAAc,EAAG,EAAkB,WAAW,CAAS,EAC3D,GAAI,CAAC,IAAe,EAAG,EAAkB,YAAY,EAAG,EAAkB,iBAAiB,EAAW,KAAK,CAAC,EAAG,CAC7G,IAAM,GAAa,EAAG,EAAkB,mBAAmB,EAC3D,GAAI,CAAC,EACH,MAAM,IAAI,EAAmB,qBAC3B,mDACF,EAEF,GAAI,CAAC,EACH,MAAM,IAAI,EAAmB,qBAC3B,oDACF,EAGF,GADA,EAAa,MAAO,EAAG,EAAkB,oBAAoB,EAAW,EAAW,CAAM,EACrF,CAAC,EACH,MAAM,IAAI,EAAmB,qBAAqB,8BAA8B,GAEjF,EAAG,EAAkB,WAAW,EAAY,CAAS,EAExD,QAAQ,IAAI,kBAAoB,EAAW,MAC3C",
|
|
8
|
+
"debugId": "0C13E322BB18836164756E2164756E21",
|
|
9
|
+
"names": []
|
|
10
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import{j as G,l as H,p as z}from"./chunk-y21bxgfy.js";import"./chunk-x46tzzn9.js";import x from"node:fs/promises";import I from"node:path";async function M(){let k=[],q=await N();if(q)k.push(q);let v=await Q();if(v)k.push(v);return k.join(`
|
|
2
2
|
|
|
3
3
|
---
|
|
4
4
|
|
|
@@ -12,4 +12,4 @@ import{s as G,u as H,y as z}from"./chunk-8saardnr.js";import"./chunk-e80fq5bq.js
|
|
|
12
12
|
|
|
13
13
|
${q}`}export{M as loadRulesAndStyles,Y as enhanceAgentContent};
|
|
14
14
|
|
|
15
|
-
//# debugId=
|
|
15
|
+
//# debugId=5C22F9914C3ACEA764756E2164756E21
|
|
@@ -5,6 +5,6 @@
|
|
|
5
5
|
"/**\n * Agent Enhancer - Append rules and output styles to agent content\n *\n * This module provides utilities to enhance agent files with:\n * - Rules (from assets/rules/core.md)\n * - Output Styles (from assets/output-styles/*.md)\n *\n * These are appended to agent content to ensure every agent has\n * access to the same rules and output styles without duplicating\n * them in CLAUDE.md or other system prompts.\n */\n\nimport fs from 'node:fs/promises';\nimport path from 'node:path';\nimport { getOutputStylesDir, getRulesDir } from './paths.js';\nimport { yamlUtils } from './target-utils.js';\n\n/**\n * Load and combine rules and output styles\n */\nexport async function loadRulesAndStyles(): Promise<string> {\n const sections: string[] = [];\n\n // Load rules\n const rulesContent = await loadRules();\n if (rulesContent) {\n sections.push(rulesContent);\n }\n\n // Load output styles\n const stylesContent = await loadOutputStyles();\n if (stylesContent) {\n sections.push(stylesContent);\n }\n\n return sections.join('\\n\\n---\\n\\n');\n}\n\n/**\n * Load rules from assets/rules/core.md\n */\nasync function loadRules(): Promise<string> {\n try {\n const rulesDir = getRulesDir();\n const coreRulesPath = path.join(rulesDir, 'core.md');\n\n const content = await fs.readFile(coreRulesPath, 'utf8');\n\n // Strip YAML front matter\n return await yamlUtils.stripFrontMatter(content);\n } catch (_error) {\n // If rules file doesn't exist, return empty string\n return '';\n }\n}\n\n/**\n * Load output styles from assets/output-styles/\n */\nasync function loadOutputStyles(): Promise<string> {\n try {\n const outputStylesDir = getOutputStylesDir();\n const files = await fs.readdir(outputStylesDir);\n const mdFiles = files.filter((f) => f.endsWith('.md'));\n\n if (mdFiles.length === 0) {\n return '';\n }\n\n const sections: string[] = [];\n\n for (const file of mdFiles) {\n const filePath = path.join(outputStylesDir, file);\n const content = await fs.readFile(filePath, 'utf8');\n\n // Strip YAML front matter\n const stripped = await yamlUtils.stripFrontMatter(content);\n sections.push(stripped);\n }\n\n return sections.join('\\n\\n');\n } catch (_error) {\n // If output styles directory doesn't exist, return empty string\n return '';\n }\n}\n\n/**\n * Enhance agent content by appending rules and output styles\n */\nexport async function enhanceAgentContent(agentContent: string): Promise<string> {\n const rulesAndStyles = await loadRulesAndStyles();\n\n if (!rulesAndStyles) {\n return agentContent;\n }\n\n return `${agentContent}\\n\\n---\\n\\n# Rules and Output Styles\\n\\n${rulesAndStyles}`;\n}\n"
|
|
6
6
|
],
|
|
7
7
|
"mappings": "wFAYA,0BACA,yBAOA,eAAsB,CAAkB,EAAoB,CAC1D,IAAM,EAAqB,CAAC,EAGtB,EAAe,MAAM,EAAU,EACrC,GAAI,EACF,EAAS,KAAK,CAAY,EAI5B,IAAM,EAAgB,MAAM,EAAiB,EAC7C,GAAI,EACF,EAAS,KAAK,CAAa,EAG7B,OAAO,EAAS,KAAK;AAAA;AAAA;AAAA;AAAA,CAAa,EAMpC,eAAe,CAAS,EAAoB,CAC1C,GAAI,CACF,IAAM,EAAW,EAAY,EACvB,EAAgB,EAAK,KAAK,EAAU,SAAS,EAE7C,EAAU,MAAM,EAAG,SAAS,EAAe,MAAM,EAGvD,OAAO,MAAM,EAAU,iBAAiB,CAAO,EAC/C,MAAO,EAAQ,CAEf,MAAO,IAOX,eAAe,CAAgB,EAAoB,CACjD,GAAI,CACF,IAAM,EAAkB,EAAmB,EAErC,GADQ,MAAM,EAAG,QAAQ,CAAe,GACxB,OAAO,CAAC,IAAM,EAAE,SAAS,KAAK,CAAC,EAErD,GAAI,EAAQ,SAAW,EACrB,MAAO,GAGT,IAAM,EAAqB,CAAC,EAE5B,QAAW,KAAQ,EAAS,CAC1B,IAAM,EAAW,EAAK,KAAK,EAAiB,CAAI,EAC1C,EAAU,MAAM,EAAG,SAAS,EAAU,MAAM,EAG5C,EAAW,MAAM,EAAU,iBAAiB,CAAO,EACzD,EAAS,KAAK,CAAQ,EAGxB,OAAO,EAAS,KAAK;AAAA;AAAA,CAAM,EAC3B,MAAO,EAAQ,CAEf,MAAO,IAOX,eAAsB,CAAmB,CAAC,EAAuC,CAC/E,IAAM,EAAiB,MAAM,EAAmB,EAEhD,GAAI,CAAC,EACH,OAAO,EAGT,MAAO,GAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAAuD",
|
|
8
|
-
"debugId": "
|
|
8
|
+
"debugId": "5C22F9914C3ACEA764756E2164756E21",
|
|
9
9
|
"names": []
|
|
10
10
|
}
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import{F as B,I as D}from"./chunk-x46tzzn9.js";var A=()=>{let q=process.argv,b=q[0],w=q[1];if(w.includes("/_npx/")&&w.includes("github"))return{type:"github",repo:"github:sylphxltd/flow"};if(w.includes("/_npx/")){let z=w.match(/@sylphx\/flow|@sylphxltd\/flow/);return{type:"npm",package:z?z[0]:"@sylphx/flow"}}if(b.includes("bun")&&!w.includes(process.cwd()))return{type:"bunx",package:"@sylphx/flow"};if(w.includes(process.cwd())){if(b.includes("bun"))return{type:"local-dev",script:"bun run dev"};return{type:"local-dev",script:"npm run dev"}}if(w.includes("node_modules/.bin")||w.includes("/bin/sylphx-flow"))return{type:"global",command:"sylphx-flow"};return{type:"unknown"}},E=(q)=>{let b=q||A();switch(b.type){case"npm":return`npx -y ${b.package}`;case"github":return`npx -y ${b.repo}`;case"bunx":return`bunx ${b.package}`;case"local-dev":return b.script;case"global":return b.command;case"unknown":return"npx -y @sylphx/flow"}},F=(q)=>{let b=q||A();switch(b.type){case"npm":return["-y",b.package,"mcp","start"];case"github":return["-y",b.repo,"mcp","start"];case"bunx":return[b.package,"mcp","start"];case"local-dev":return["./dist/index.js","mcp","start"];case"global":return[b.command,"mcp","start"];case"unknown":return["-y","@sylphx/flow","mcp","start"]}},G=(q)=>{switch((q||A()).type){case"npm":case"github":return"npx";case"bunx":return"bunx";case"local-dev":return process.execPath.includes("bun")?"bun":"node";case"global":case"unknown":return"npx"}},H=(q,b,w)=>{return`${E(w)} hook --type ${q} --target ${b}`},J=async(q)=>{let{updateSettings:b}=await import("./chunk-02jj9ahn.js");await b({invocationMethod:q})},K=async()=>{let{loadSettings:q}=await import("./chunk-02jj9ahn.js"),b=await q();if(b._tag==="Success")return b.value.invocationMethod;return};export{J as saveInvocationMethod,K as loadInvocationMethod,G as getMCPServerCommand,F as getMCPServerArgs,E as getCommandPrefix,H as generateHookCommand,A as detectInvocation};
|
|
2
|
+
|
|
3
|
+
//# debugId=572E9DC3588E6C4B64756E2164756E21
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
"sourcesContent": [
|
|
5
5
|
"/**\n * CLI Invocation Detection\n * Detects how the CLI was invoked and generates appropriate commands\n *\n * Supports common invocation methods:\n * - npm/npx (most common)\n * - GitHub direct (for bleeding edge)\n * - Bun/bunx (emerging)\n * - Local development (bun/npm run dev)\n * - Global installation\n * - Unknown methods fallback to npm (safe default)\n */\n\n/**\n * Invocation method types\n */\nexport type InvocationMethod =\n | { type: 'npm'; package: string }\n | { type: 'github'; repo: string }\n | { type: 'bunx'; package: string }\n | { type: 'local-dev'; script: string }\n | { type: 'global'; command: string }\n | { type: 'unknown' };\n\n/**\n * Detect how the CLI was invoked\n * Examines process.argv to determine invocation method\n */\nexport const detectInvocation = (): InvocationMethod => {\n const argv = process.argv;\n const execPath = argv[0]; // node/bun executable path\n const scriptPath = argv[1]; // script being executed\n\n // Check if running via npx with github\n if (scriptPath.includes('/_npx/') && scriptPath.includes('github')) {\n return { type: 'github', repo: 'github:sylphxltd/flow' };\n }\n\n // Check if running via npx with npm package\n if (scriptPath.includes('/_npx/')) {\n // Extract package name from path\n const match = scriptPath.match(/@sylphx\\/flow|@sylphxltd\\/flow/);\n return { type: 'npm', package: match ? match[0] : '@sylphx/flow' };\n }\n\n // Check if running via bunx\n if (execPath.includes('bun') && !scriptPath.includes(process.cwd())) {\n return { type: 'bunx', package: '@sylphx/flow' };\n }\n\n // Check if running locally in development\n if (scriptPath.includes(process.cwd())) {\n // Check if using bun run dev or npm run dev\n if (execPath.includes('bun')) {\n return { type: 'local-dev', script: 'bun run dev' };\n }\n return { type: 'local-dev', script: 'npm run dev' };\n }\n\n // Check if globally installed\n if (scriptPath.includes('node_modules/.bin') || scriptPath.includes('/bin/sylphx-flow')) {\n return { type: 'global', command: 'sylphx-flow' };\n }\n\n return { type: 'unknown' };\n};\n\n/**\n * Generate command prefix based on invocation method\n * Used for generating hook commands, MCP configs, etc.\n */\nexport const getCommandPrefix = (method?: InvocationMethod): string => {\n const invocation = method || detectInvocation();\n\n switch (invocation.type) {\n case 'npm':\n return `npx -y ${invocation.package}`;\n\n case 'github':\n return `npx -y ${invocation.repo}`;\n\n case 'bunx':\n return `bunx ${invocation.package}`;\n\n case 'local-dev':\n return invocation.script;\n\n case 'global':\n return invocation.command;\n\n case 'unknown':\n // Fallback to npm package (safe default)\n return 'npx -y @sylphx/flow';\n }\n};\n\n/**\n * Generate MCP server args array based on invocation\n */\nexport const getMCPServerArgs = (method?: InvocationMethod): string[] => {\n const invocation = method || detectInvocation();\n\n switch (invocation.type) {\n case 'npm':\n return ['-y', invocation.package, 'mcp', 'start'];\n\n case 'github':\n return ['-y', invocation.repo, 'mcp', 'start'];\n\n case 'bunx':\n return [invocation.package, 'mcp', 'start'];\n\n case 'local-dev':\n // For local dev, use the built dist\n return ['./dist/index.js', 'mcp', 'start'];\n\n case 'global':\n return [invocation.command, 'mcp', 'start'];\n\n case 'unknown':\n // Fallback to npm\n return ['-y', '@sylphx/flow', 'mcp', 'start'];\n }\n};\n\n/**\n * Get MCP server command based on invocation\n */\nexport const getMCPServerCommand = (method?: InvocationMethod): string => {\n const invocation = method || detectInvocation();\n\n switch (invocation.type) {\n case 'npm':\n case 'github':\n return 'npx';\n\n case 'bunx':\n return 'bunx';\n\n case 'local-dev':\n // Use node or bun depending on what's running\n return process.execPath.includes('bun') ? 'bun' : 'node';\n\n case 'global':\n case 'unknown':\n return 'npx';\n }\n};\n\n/**\n * Generate hook command for specific hook type\n */\nexport const generateHookCommand = (\n hookType: 'session' | 'message' | 'notification',\n targetId: string,\n method?: InvocationMethod\n): string => {\n const prefix = getCommandPrefix(method);\n return `${prefix} hook --type ${hookType} --target ${targetId}`;\n};\n\n/**\n * Save invocation method to settings for future use\n */\nexport const saveInvocationMethod = async (method: InvocationMethod): Promise<void> => {\n const { updateSettings } = await import('./settings.js');\n await updateSettings({\n invocationMethod: method as any,\n });\n};\n\n/**\n * Load saved invocation method from settings\n */\nexport const loadInvocationMethod = async (): Promise<InvocationMethod | undefined> => {\n const { loadSettings } = await import('./settings.js');\n const result = await loadSettings();\n if (result._tag === 'Success') {\n return (result.value as any).invocationMethod;\n }\n return undefined;\n};\n"
|
|
6
6
|
],
|
|
7
|
-
"mappings": "
|
|
8
|
-
"debugId": "
|
|
7
|
+
"mappings": "+CA4BO,IAAM,EAAmB,IAAwB,CACtD,IAAM,EAAO,QAAQ,KACf,EAAW,EAAK,GAChB,EAAa,EAAK,GAGxB,GAAI,EAAW,SAAS,QAAQ,GAAK,EAAW,SAAS,QAAQ,EAC/D,MAAO,CAAE,KAAM,SAAU,KAAM,uBAAwB,EAIzD,GAAI,EAAW,SAAS,QAAQ,EAAG,CAEjC,IAAM,EAAQ,EAAW,MAAM,gCAAgC,EAC/D,MAAO,CAAE,KAAM,MAAO,QAAS,EAAQ,EAAM,GAAK,cAAe,EAInE,GAAI,EAAS,SAAS,KAAK,GAAK,CAAC,EAAW,SAAS,QAAQ,IAAI,CAAC,EAChE,MAAO,CAAE,KAAM,OAAQ,QAAS,cAAe,EAIjD,GAAI,EAAW,SAAS,QAAQ,IAAI,CAAC,EAAG,CAEtC,GAAI,EAAS,SAAS,KAAK,EACzB,MAAO,CAAE,KAAM,YAAa,OAAQ,aAAc,EAEpD,MAAO,CAAE,KAAM,YAAa,OAAQ,aAAc,EAIpD,GAAI,EAAW,SAAS,mBAAmB,GAAK,EAAW,SAAS,kBAAkB,EACpF,MAAO,CAAE,KAAM,SAAU,QAAS,aAAc,EAGlD,MAAO,CAAE,KAAM,SAAU,GAOd,EAAmB,CAAC,IAAsC,CACrE,IAAM,EAAa,GAAU,EAAiB,EAE9C,OAAQ,EAAW,UACZ,MACH,MAAO,UAAU,EAAW,cAEzB,SACH,MAAO,UAAU,EAAW,WAEzB,OACH,MAAO,QAAQ,EAAW,cAEvB,YACH,OAAO,EAAW,WAEf,SACH,OAAO,EAAW,YAEf,UAEH,MAAO,wBAOA,EAAmB,CAAC,IAAwC,CACvE,IAAM,EAAa,GAAU,EAAiB,EAE9C,OAAQ,EAAW,UACZ,MACH,MAAO,CAAC,KAAM,EAAW,QAAS,MAAO,OAAO,MAE7C,SACH,MAAO,CAAC,KAAM,EAAW,KAAM,MAAO,OAAO,MAE1C,OACH,MAAO,CAAC,EAAW,QAAS,MAAO,OAAO,MAEvC,YAEH,MAAO,CAAC,kBAAmB,MAAO,OAAO,MAEtC,SACH,MAAO,CAAC,EAAW,QAAS,MAAO,OAAO,MAEvC,UAEH,MAAO,CAAC,KAAM,eAAgB,MAAO,OAAO,IAOrC,EAAsB,CAAC,IAAsC,CAGxE,QAFmB,GAAU,EAAiB,GAE3B,UACZ,UACA,SACH,MAAO,UAEJ,OACH,MAAO,WAEJ,YAEH,OAAO,QAAQ,SAAS,SAAS,KAAK,EAAI,MAAQ,WAE/C,aACA,UACH,MAAO,QAOA,EAAsB,CACjC,EACA,EACA,IACW,CAEX,MAAO,GADQ,EAAiB,CAAM,iBACN,cAAqB,KAM1C,EAAuB,MAAO,IAA4C,CACrF,IAAQ,kBAAmB,KAAa,+BACxC,MAAM,EAAe,CACnB,iBAAkB,CACpB,CAAC,GAMU,EAAuB,SAAmD,CACrF,IAAQ,gBAAiB,KAAa,+BAChC,EAAS,MAAM,EAAa,EAClC,GAAI,EAAO,OAAS,UAClB,OAAQ,EAAO,MAAc,iBAE/B",
|
|
8
|
+
"debugId": "572E9DC3588E6C4B64756E2164756E21",
|
|
9
9
|
"names": []
|
|
10
10
|
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import{t as J}from"./chunk-jgkkyjtr.js";import{G as C,I as W}from"./chunk-x46tzzn9.js";var V=C((zq,u)=>{var{create:x,defineProperty:X,getOwnPropertyDescriptor:D,getOwnPropertyNames:S,getPrototypeOf:I}=Object,M=Object.prototype.hasOwnProperty,y=(q,z)=>{for(var B in z)X(q,B,{get:z[B],enumerable:!0})},w=(q,z,B,F)=>{if(z&&typeof z==="object"||typeof z==="function"){for(let G of S(z))if(!M.call(q,G)&&G!==B)X(q,G,{get:()=>z[G],enumerable:!(F=D(z,G))||F.enumerable})}return q},v=(q,z,B)=>(B=q!=null?x(I(q)):{},w(z||!q||!q.__esModule?X(B,"default",{value:q,enumerable:!0}):B,q)),g=(q)=>w(X({},"__esModule",{value:!0}),q),R={};y(R,{findRootDir:()=>l,getUserDataDir:()=>P});u.exports=g(R);var Q=v(W("path")),h=v(W("fs")),$=v(W("os")),b=J();function l(){try{let q=process.cwd();while(q!==Q.default.dirname(q)){let z=Q.default.join(q,".vercel");if(h.default.existsSync(z))return q;q=Q.default.dirname(q)}}catch(q){throw new b.VercelOidcTokenError("Token refresh only supported in node server environments")}throw new b.VercelOidcTokenError("Unable to find root directory")}function P(){if(process.env.XDG_DATA_HOME)return process.env.XDG_DATA_HOME;switch($.default.platform()){case"darwin":return Q.default.join($.default.homedir(),"Library/Application Support");case"linux":return Q.default.join($.default.homedir(),".local/share");case"win32":if(process.env.LOCALAPPDATA)return process.env.LOCALAPPDATA;return null;default:return null}}});var qq=C((Bq,j)=>{var{create:c,defineProperty:Y,getOwnPropertyDescriptor:f,getOwnPropertyNames:k,getPrototypeOf:i}=Object,s=Object.prototype.hasOwnProperty,n=(q,z)=>{for(var B in z)Y(q,B,{get:z[B],enumerable:!0})},N=(q,z,B,F)=>{if(z&&typeof z==="object"||typeof z==="function"){for(let G of k(z))if(!s.call(q,G)&&G!==B)Y(q,G,{get:()=>z[G],enumerable:!(F=f(z,G))||F.enumerable})}return q},E=(q,z,B)=>(B=q!=null?c(i(q)):{},N(z||!q||!q.__esModule?Y(B,"default",{value:q,enumerable:!0}):B,q)),p=(q)=>N(Y({},"__esModule",{value:!0}),q),O={};n(O,{assertVercelOidcTokenResponse:()=>A,findProjectInfo:()=>m,getTokenPayload:()=>t,getVercelCliToken:()=>d,getVercelDataDir:()=>T,getVercelOidcToken:()=>a,isExpired:()=>o,loadToken:()=>r,saveToken:()=>_});j.exports=p(O);var L=E(W("path")),H=E(W("fs")),K=J(),Z=V();function T(){let z=(0,Z.getUserDataDir)();if(!z)return null;return L.join(z,"com.vercel.cli")}function d(){let q=T();if(!q)return null;let z=L.join(q,"auth.json");if(!H.existsSync(z))return null;let B=H.readFileSync(z,"utf8");if(!B)return null;return JSON.parse(B).token}async function a(q,z,B){try{let F=`https://api.vercel.com/v1/projects/${z}/token?source=vercel-oidc-refresh${B?`&teamId=${B}`:""}`,G=await fetch(F,{method:"POST",headers:{Authorization:`Bearer ${q}`}});if(!G.ok)throw new K.VercelOidcTokenError(`Failed to refresh OIDC token: ${G.statusText}`);let U=await G.json();return A(U),U}catch(F){throw new K.VercelOidcTokenError("Failed to refresh OIDC token",F)}}function A(q){if(!q||typeof q!=="object")throw TypeError("Expected an object");if(!("token"in q)||typeof q.token!=="string")throw TypeError("Expected a string-valued token property")}function m(){let q=(0,Z.findRootDir)();if(!q)throw new K.VercelOidcTokenError("Unable to find root directory");try{let z=L.join(q,".vercel","project.json");if(!H.existsSync(z))throw new K.VercelOidcTokenError("project.json not found");let B=JSON.parse(H.readFileSync(z,"utf8"));if(typeof B.projectId!=="string"&&typeof B.orgId!=="string")throw TypeError("Expected a string-valued projectId property");return{projectId:B.projectId,teamId:B.orgId}}catch(z){throw new K.VercelOidcTokenError("Unable to find project ID",z)}}function _(q,z){try{let B=(0,Z.getUserDataDir)();if(!B)throw new K.VercelOidcTokenError("Unable to find user data directory");let F=L.join(B,"com.vercel.token",`${z}.json`),G=JSON.stringify(q);H.mkdirSync(L.dirname(F),{mode:432,recursive:!0}),H.writeFileSync(F,G),H.chmodSync(F,432);return}catch(B){throw new K.VercelOidcTokenError("Failed to save token",B)}}function r(q){try{let z=(0,Z.getUserDataDir)();if(!z)return null;let B=L.join(z,"com.vercel.token",`${q}.json`);if(!H.existsSync(B))return null;let F=JSON.parse(H.readFileSync(B,"utf8"));return A(F),F}catch(z){throw new K.VercelOidcTokenError("Failed to load token",z)}}function t(q){let z=q.split(".");if(z.length!==3)throw new K.VercelOidcTokenError("Invalid token");let B=z[1].replace(/-/g,"+").replace(/_/g,"/"),F=B.padEnd(B.length+(4-B.length%4)%4,"=");return JSON.parse(Buffer.from(F,"base64").toString("utf8"))}var e=900000;function o(q){return q.exp*1000<Date.now()+e}});export default qq();
|
|
2
|
+
export{qq as s};
|
|
3
|
+
|
|
4
|
+
//# debugId=EA40A78D9849B6A864756E2164756E21
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../node_modules/@vercel/oidc/dist/token-io.js", "../node_modules/@vercel/oidc/dist/token-util.js"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\nvar token_io_exports = {};\n__export(token_io_exports, {\n findRootDir: () => findRootDir,\n getUserDataDir: () => getUserDataDir\n});\nmodule.exports = __toCommonJS(token_io_exports);\nvar import_path = __toESM(require(\"path\"));\nvar import_fs = __toESM(require(\"fs\"));\nvar import_os = __toESM(require(\"os\"));\nvar import_token_error = require(\"./token-error\");\nfunction findRootDir() {\n try {\n let dir = process.cwd();\n while (dir !== import_path.default.dirname(dir)) {\n const pkgPath = import_path.default.join(dir, \".vercel\");\n if (import_fs.default.existsSync(pkgPath)) {\n return dir;\n }\n dir = import_path.default.dirname(dir);\n }\n } catch (e) {\n throw new import_token_error.VercelOidcTokenError(\n \"Token refresh only supported in node server environments\"\n );\n }\n throw new import_token_error.VercelOidcTokenError(\"Unable to find root directory\");\n}\nfunction getUserDataDir() {\n if (process.env.XDG_DATA_HOME) {\n return process.env.XDG_DATA_HOME;\n }\n switch (import_os.default.platform()) {\n case \"darwin\":\n return import_path.default.join(import_os.default.homedir(), \"Library/Application Support\");\n case \"linux\":\n return import_path.default.join(import_os.default.homedir(), \".local/share\");\n case \"win32\":\n if (process.env.LOCALAPPDATA) {\n return process.env.LOCALAPPDATA;\n }\n return null;\n default:\n return null;\n }\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n findRootDir,\n getUserDataDir\n});\n",
|
|
6
|
+
"\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\nvar token_util_exports = {};\n__export(token_util_exports, {\n assertVercelOidcTokenResponse: () => assertVercelOidcTokenResponse,\n findProjectInfo: () => findProjectInfo,\n getTokenPayload: () => getTokenPayload,\n getVercelCliToken: () => getVercelCliToken,\n getVercelDataDir: () => getVercelDataDir,\n getVercelOidcToken: () => getVercelOidcToken,\n isExpired: () => isExpired,\n loadToken: () => loadToken,\n saveToken: () => saveToken\n});\nmodule.exports = __toCommonJS(token_util_exports);\nvar path = __toESM(require(\"path\"));\nvar fs = __toESM(require(\"fs\"));\nvar import_token_error = require(\"./token-error\");\nvar import_token_io = require(\"./token-io\");\nfunction getVercelDataDir() {\n const vercelFolder = \"com.vercel.cli\";\n const dataDir = (0, import_token_io.getUserDataDir)();\n if (!dataDir) {\n return null;\n }\n return path.join(dataDir, vercelFolder);\n}\nfunction getVercelCliToken() {\n const dataDir = getVercelDataDir();\n if (!dataDir) {\n return null;\n }\n const tokenPath = path.join(dataDir, \"auth.json\");\n if (!fs.existsSync(tokenPath)) {\n return null;\n }\n const token = fs.readFileSync(tokenPath, \"utf8\");\n if (!token) {\n return null;\n }\n return JSON.parse(token).token;\n}\nasync function getVercelOidcToken(authToken, projectId, teamId) {\n try {\n const url = `https://api.vercel.com/v1/projects/${projectId}/token?source=vercel-oidc-refresh${teamId ? `&teamId=${teamId}` : \"\"}`;\n const res = await fetch(url, {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${authToken}`\n }\n });\n if (!res.ok) {\n throw new import_token_error.VercelOidcTokenError(\n `Failed to refresh OIDC token: ${res.statusText}`\n );\n }\n const tokenRes = await res.json();\n assertVercelOidcTokenResponse(tokenRes);\n return tokenRes;\n } catch (e) {\n throw new import_token_error.VercelOidcTokenError(`Failed to refresh OIDC token`, e);\n }\n}\nfunction assertVercelOidcTokenResponse(res) {\n if (!res || typeof res !== \"object\") {\n throw new TypeError(\"Expected an object\");\n }\n if (!(\"token\" in res) || typeof res.token !== \"string\") {\n throw new TypeError(\"Expected a string-valued token property\");\n }\n}\nfunction findProjectInfo() {\n const dir = (0, import_token_io.findRootDir)();\n if (!dir) {\n throw new import_token_error.VercelOidcTokenError(\"Unable to find root directory\");\n }\n try {\n const prjPath = path.join(dir, \".vercel\", \"project.json\");\n if (!fs.existsSync(prjPath)) {\n throw new import_token_error.VercelOidcTokenError(\"project.json not found\");\n }\n const prj = JSON.parse(fs.readFileSync(prjPath, \"utf8\"));\n if (typeof prj.projectId !== \"string\" && typeof prj.orgId !== \"string\") {\n throw new TypeError(\"Expected a string-valued projectId property\");\n }\n return { projectId: prj.projectId, teamId: prj.orgId };\n } catch (e) {\n throw new import_token_error.VercelOidcTokenError(`Unable to find project ID`, e);\n }\n}\nfunction saveToken(token, projectId) {\n try {\n const dir = (0, import_token_io.getUserDataDir)();\n if (!dir) {\n throw new import_token_error.VercelOidcTokenError(\"Unable to find user data directory\");\n }\n const tokenPath = path.join(dir, \"com.vercel.token\", `${projectId}.json`);\n const tokenJson = JSON.stringify(token);\n fs.mkdirSync(path.dirname(tokenPath), { mode: 432, recursive: true });\n fs.writeFileSync(tokenPath, tokenJson);\n fs.chmodSync(tokenPath, 432);\n return;\n } catch (e) {\n throw new import_token_error.VercelOidcTokenError(`Failed to save token`, e);\n }\n}\nfunction loadToken(projectId) {\n try {\n const dir = (0, import_token_io.getUserDataDir)();\n if (!dir) {\n return null;\n }\n const tokenPath = path.join(dir, \"com.vercel.token\", `${projectId}.json`);\n if (!fs.existsSync(tokenPath)) {\n return null;\n }\n const token = JSON.parse(fs.readFileSync(tokenPath, \"utf8\"));\n assertVercelOidcTokenResponse(token);\n return token;\n } catch (e) {\n throw new import_token_error.VercelOidcTokenError(`Failed to load token`, e);\n }\n}\nfunction getTokenPayload(token) {\n const tokenParts = token.split(\".\");\n if (tokenParts.length !== 3) {\n throw new import_token_error.VercelOidcTokenError(\"Invalid token\");\n }\n const base64 = tokenParts[1].replace(/-/g, \"+\").replace(/_/g, \"/\");\n const padded = base64.padEnd(\n base64.length + (4 - base64.length % 4) % 4,\n \"=\"\n );\n return JSON.parse(Buffer.from(padded, \"base64\").toString(\"utf8\"));\n}\nconst TIME_15_MINUTES_IN_MS = 15 * 60 * 1e3;\nfunction isExpired(token) {\n return token.exp * 1e3 < Date.now() + TIME_15_MINUTES_IN_MS;\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n assertVercelOidcTokenResponse,\n findProjectInfo,\n getTokenPayload,\n getVercelCliToken,\n getVercelDataDir,\n getVercelOidcToken,\n isExpired,\n loadToken,\n saveToken\n});\n"
|
|
7
|
+
],
|
|
8
|
+
"mappings": "8GACA,FAAsB,OAAlB,EACmB,eAAnB,EAC0B,yBAA1B,EAC2B,oBAA3B,EACsB,eAAtB,GAHY,OAIZ,EAAe,OAAO,UAAU,eAChC,EAAW,CAAC,EAAQ,IAAQ,CAC9B,QAAS,KAAQ,EACf,EAAU,EAAQ,EAAM,CAAE,IAAK,EAAI,GAAO,WAAY,EAAK,CAAC,GAE5D,EAAc,CAAC,EAAI,EAAM,EAAQ,IAAS,CAC5C,GAAI,GAAQ,OAAO,IAAS,UAAY,OAAO,IAAS,YACtD,QAAS,KAAO,EAAkB,CAAI,EACpC,GAAI,CAAC,EAAa,KAAK,EAAI,CAAG,GAAK,IAAQ,EACzC,EAAU,EAAI,EAAK,CAAE,IAAK,IAAM,EAAK,GAAM,WAAY,EAAE,EAAO,EAAiB,EAAM,CAAG,IAAM,EAAK,UAAW,CAAC,EAEvH,OAAO,GAEL,EAAU,CAAC,EAAK,EAAY,KAAY,EAAS,GAAO,KAAO,EAAS,EAAa,CAAG,CAAC,EAAI,CAAC,EAAG,EAKnG,GAAc,CAAC,GAAO,CAAC,EAAI,WAAa,EAAU,EAAQ,UAAW,CAAE,MAAO,EAAK,WAAY,EAAK,CAAC,EAAI,EACzG,CACF,GACI,EAAe,CAAC,IAAQ,EAAY,EAAU,CAAC,EAAG,aAAc,CAAE,MAAO,EAAK,CAAC,EAAG,CAAG,EACrF,EAAmB,CAAC,EACxB,EAAS,EAAkB,CACzB,YAAa,IAAM,EACnB,eAAgB,IAAM,CACxB,CAAC,EACD,EAAO,QAAU,EAAa,CAAgB,EAC9C,IAAI,EAAc,WAAuB,EACrC,EAAY,SAAqB,EACjC,EAAY,SAAqB,EACjC,MACJ,SAAS,CAAW,EAAG,CACrB,GAAI,CACF,IAAI,EAAM,QAAQ,IAAI,EACtB,MAAO,IAAQ,EAAY,QAAQ,QAAQ,CAAG,EAAG,CAC/C,IAAM,EAAU,EAAY,QAAQ,KAAK,EAAK,SAAS,EACvD,GAAI,EAAU,QAAQ,WAAW,CAAO,EACtC,OAAO,EAET,EAAM,EAAY,QAAQ,QAAQ,CAAG,GAEvC,MAAO,EAAG,CACV,MAAM,IAAI,EAAmB,qBAC3B,0DACF,EAEF,MAAM,IAAI,EAAmB,qBAAqB,+BAA+B,EAEnF,SAAS,CAAc,EAAG,CACxB,GAAI,QAAQ,IAAI,cACd,OAAO,QAAQ,IAAI,cAErB,OAAQ,EAAU,QAAQ,SAAS,OAC5B,SACH,OAAO,EAAY,QAAQ,KAAK,EAAU,QAAQ,QAAQ,EAAG,6BAA6B,MACvF,QACH,OAAO,EAAY,QAAQ,KAAK,EAAU,QAAQ,QAAQ,EAAG,cAAc,MACxE,QACH,GAAI,QAAQ,IAAI,aACd,OAAO,QAAQ,IAAI,aAErB,OAAO,aAEP,OAAO,2BCrEb,IAAsB,OAAlB,EACmB,eAAnB,EAC0B,yBAA1B,EAC2B,oBAA3B,EACsB,eAAtB,GAHY,OAIZ,EAAe,OAAO,UAAU,eAChC,EAAW,CAAC,EAAQ,IAAQ,CAC9B,QAAS,KAAQ,EACf,EAAU,EAAQ,EAAM,CAAE,IAAK,EAAI,GAAO,WAAY,EAAK,CAAC,GAE5D,EAAc,CAAC,EAAI,EAAM,EAAQ,IAAS,CAC5C,GAAI,GAAQ,OAAO,IAAS,UAAY,OAAO,IAAS,YACtD,QAAS,KAAO,EAAkB,CAAI,EACpC,GAAI,CAAC,EAAa,KAAK,EAAI,CAAG,GAAK,IAAQ,EACzC,EAAU,EAAI,EAAK,CAAE,IAAK,IAAM,EAAK,GAAM,WAAY,EAAE,EAAO,EAAiB,EAAM,CAAG,IAAM,EAAK,UAAW,CAAC,EAEvH,OAAO,GAEL,EAAU,CAAC,EAAK,EAAY,KAAY,EAAS,GAAO,KAAO,EAAS,EAAa,CAAG,CAAC,EAAI,CAAC,EAAG,EAKnG,GAAc,CAAC,GAAO,CAAC,EAAI,WAAa,EAAU,EAAQ,UAAW,CAAE,MAAO,EAAK,WAAY,EAAK,CAAC,EAAI,EACzG,CACF,GACI,EAAe,CAAC,IAAQ,EAAY,EAAU,CAAC,EAAG,aAAc,CAAE,MAAO,EAAK,CAAC,EAAG,CAAG,EACrF,EAAqB,CAAC,EAC1B,EAAS,EAAoB,CAC3B,8BAA+B,IAAM,EACrC,gBAAiB,IAAM,EACvB,gBAAiB,IAAM,EACvB,kBAAmB,IAAM,EACzB,iBAAkB,IAAM,EACxB,mBAAoB,IAAM,EAC1B,UAAW,IAAM,EACjB,UAAW,IAAM,EACjB,UAAW,IAAM,CACnB,CAAC,EACD,EAAO,QAAU,EAAa,CAAkB,EAChD,IAAI,EAAO,WAAuB,EAC9B,EAAK,SAAqB,EAC1B,MACA,MACJ,SAAS,CAAgB,EAAG,CAE1B,IAAM,GAAW,EAAG,EAAgB,gBAAgB,EACpD,GAAI,CAAC,EACH,OAAO,KAET,OAAO,EAAK,KAAK,EALI,gBAKiB,EAExC,SAAS,CAAiB,EAAG,CAC3B,IAAM,EAAU,EAAiB,EACjC,GAAI,CAAC,EACH,OAAO,KAET,IAAM,EAAY,EAAK,KAAK,EAAS,WAAW,EAChD,GAAI,CAAC,EAAG,WAAW,CAAS,EAC1B,OAAO,KAET,IAAM,EAAQ,EAAG,aAAa,EAAW,MAAM,EAC/C,GAAI,CAAC,EACH,OAAO,KAET,OAAO,KAAK,MAAM,CAAK,EAAE,MAE3B,eAAe,CAAkB,CAAC,EAAW,EAAW,EAAQ,CAC9D,GAAI,CACF,IAAM,EAAM,sCAAsC,qCAA6C,EAAS,WAAW,IAAW,KACxH,EAAM,MAAM,MAAM,EAAK,CAC3B,OAAQ,OACR,QAAS,CACP,cAAe,UAAU,GAC3B,CACF,CAAC,EACD,GAAI,CAAC,EAAI,GACP,MAAM,IAAI,EAAmB,qBAC3B,iCAAiC,EAAI,YACvC,EAEF,IAAM,EAAW,MAAM,EAAI,KAAK,EAEhC,OADA,EAA8B,CAAQ,EAC/B,EACP,MAAO,EAAG,CACV,MAAM,IAAI,EAAmB,qBAAqB,+BAAgC,CAAC,GAGvF,SAAS,CAA6B,CAAC,EAAK,CAC1C,GAAI,CAAC,GAAO,OAAO,IAAQ,SACzB,MAAU,UAAU,oBAAoB,EAE1C,GAAI,EAAE,UAAW,IAAQ,OAAO,EAAI,QAAU,SAC5C,MAAU,UAAU,yCAAyC,EAGjE,SAAS,CAAe,EAAG,CACzB,IAAM,GAAO,EAAG,EAAgB,aAAa,EAC7C,GAAI,CAAC,EACH,MAAM,IAAI,EAAmB,qBAAqB,+BAA+B,EAEnF,GAAI,CACF,IAAM,EAAU,EAAK,KAAK,EAAK,UAAW,cAAc,EACxD,GAAI,CAAC,EAAG,WAAW,CAAO,EACxB,MAAM,IAAI,EAAmB,qBAAqB,wBAAwB,EAE5E,IAAM,EAAM,KAAK,MAAM,EAAG,aAAa,EAAS,MAAM,CAAC,EACvD,GAAI,OAAO,EAAI,YAAc,UAAY,OAAO,EAAI,QAAU,SAC5D,MAAU,UAAU,6CAA6C,EAEnE,MAAO,CAAE,UAAW,EAAI,UAAW,OAAQ,EAAI,KAAM,EACrD,MAAO,EAAG,CACV,MAAM,IAAI,EAAmB,qBAAqB,4BAA6B,CAAC,GAGpF,SAAS,CAAS,CAAC,EAAO,EAAW,CACnC,GAAI,CACF,IAAM,GAAO,EAAG,EAAgB,gBAAgB,EAChD,GAAI,CAAC,EACH,MAAM,IAAI,EAAmB,qBAAqB,oCAAoC,EAExF,IAAM,EAAY,EAAK,KAAK,EAAK,mBAAoB,GAAG,QAAgB,EAClE,EAAY,KAAK,UAAU,CAAK,EACtC,EAAG,UAAU,EAAK,QAAQ,CAAS,EAAG,CAAE,KAAM,IAAK,UAAW,EAAK,CAAC,EACpE,EAAG,cAAc,EAAW,CAAS,EACrC,EAAG,UAAU,EAAW,GAAG,EAC3B,OACA,MAAO,EAAG,CACV,MAAM,IAAI,EAAmB,qBAAqB,uBAAwB,CAAC,GAG/E,SAAS,CAAS,CAAC,EAAW,CAC5B,GAAI,CACF,IAAM,GAAO,EAAG,EAAgB,gBAAgB,EAChD,GAAI,CAAC,EACH,OAAO,KAET,IAAM,EAAY,EAAK,KAAK,EAAK,mBAAoB,GAAG,QAAgB,EACxE,GAAI,CAAC,EAAG,WAAW,CAAS,EAC1B,OAAO,KAET,IAAM,EAAQ,KAAK,MAAM,EAAG,aAAa,EAAW,MAAM,CAAC,EAE3D,OADA,EAA8B,CAAK,EAC5B,EACP,MAAO,EAAG,CACV,MAAM,IAAI,EAAmB,qBAAqB,uBAAwB,CAAC,GAG/E,SAAS,CAAe,CAAC,EAAO,CAC9B,IAAM,EAAa,EAAM,MAAM,GAAG,EAClC,GAAI,EAAW,SAAW,EACxB,MAAM,IAAI,EAAmB,qBAAqB,eAAe,EAEnE,IAAM,EAAS,EAAW,GAAG,QAAQ,KAAM,GAAG,EAAE,QAAQ,KAAM,GAAG,EAC3D,EAAS,EAAO,OACpB,EAAO,QAAU,EAAI,EAAO,OAAS,GAAK,EAC1C,GACF,EACA,OAAO,KAAK,MAAM,OAAO,KAAK,EAAQ,QAAQ,EAAE,SAAS,MAAM,CAAC,EAElE,IAAM,EAAwB,OAC9B,SAAS,CAAS,CAAC,EAAO,CACxB,OAAO,EAAM,IAAM,KAAM,KAAK,IAAI,EAAI",
|
|
9
|
+
"debugId": "EA40A78D9849B6A864756E2164756E21",
|
|
10
|
+
"names": []
|
|
11
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
var z=(j,b)=>({kind:"ConfigError",message:j,configKey:b?.configKey,configPath:b?.configPath,context:b?.context,cause:b?.cause}),B=(j,b,x,v)=>({kind:"FileSystemError",message:j,path:b,operation:x,context:v?.context,cause:v?.cause});
|
|
2
|
-
export{z
|
|
2
|
+
export{z,B as A};
|
|
3
3
|
|
|
4
|
-
//# debugId=
|
|
4
|
+
//# debugId=3EAFCD053EC31FC264756E2164756E21
|
|
@@ -5,6 +5,6 @@
|
|
|
5
5
|
"/**\n * Standard error types for the application\n * Typed errors enable better error handling and recovery\n *\n * DESIGN RATIONALE:\n * - Discriminated union for all error types\n * - Each error type has specific context\n * - Enables type-safe error handling\n * - Clear error categories for recovery strategies\n */\n\n/**\n * Base error type with common fields\n */\nexport interface BaseError {\n readonly kind: string;\n readonly message: string;\n readonly context?: Record<string, unknown>;\n readonly cause?: Error;\n}\n\n/**\n * Configuration error - invalid configuration or missing required config\n */\nexport interface ConfigError extends BaseError {\n readonly kind: 'ConfigError';\n readonly configKey?: string;\n readonly configPath?: string;\n}\n\n/**\n * File system error - file not found, permission denied, etc.\n */\nexport interface FileSystemError extends BaseError {\n readonly kind: 'FileSystemError';\n readonly path: string;\n readonly operation: 'read' | 'write' | 'delete' | 'create' | 'stat';\n}\n\n/**\n * Database error - query failure, connection error, etc.\n */\nexport interface DatabaseError extends BaseError {\n readonly kind: 'DatabaseError';\n readonly operation: string;\n readonly table?: string;\n}\n\n/**\n * Validation error - input validation failure\n */\nexport interface ValidationError extends BaseError {\n readonly kind: 'ValidationError';\n readonly field?: string;\n readonly errors: string[];\n}\n\n/**\n * Network error - HTTP error, timeout, connection refused, etc.\n */\nexport interface NetworkError extends BaseError {\n readonly kind: 'NetworkError';\n readonly url?: string;\n readonly statusCode?: number;\n}\n\n/**\n * CLI error - command line interface specific errors\n */\nexport interface CLIError extends BaseError {\n readonly kind: 'CLIError';\n readonly command?: string;\n readonly exitCode?: number;\n}\n\n/**\n * Not found error - resource not found\n */\nexport interface NotFoundError extends BaseError {\n readonly kind: 'NotFoundError';\n readonly resourceType: string;\n readonly resourceId: string;\n}\n\n/**\n * Permission error - insufficient permissions\n */\nexport interface PermissionError extends BaseError {\n readonly kind: 'PermissionError';\n readonly resource: string;\n readonly requiredPermission: string;\n}\n\n/**\n * Unknown error - catch-all for unexpected errors\n */\nexport interface UnknownError extends BaseError {\n readonly kind: 'UnknownError';\n}\n\n/**\n * Union of all error types\n */\nexport type AppError =\n | ConfigError\n | FileSystemError\n | DatabaseError\n | ValidationError\n | NetworkError\n | CLIError\n | NotFoundError\n | PermissionError\n | UnknownError;\n\n/**\n * Error constructors\n */\n\nexport const configError = (\n message: string,\n options?: {\n configKey?: string;\n configPath?: string;\n context?: Record<string, unknown>;\n cause?: Error;\n }\n): ConfigError => ({\n kind: 'ConfigError',\n message,\n configKey: options?.configKey,\n configPath: options?.configPath,\n context: options?.context,\n cause: options?.cause,\n});\n\nexport const fileSystemError = (\n message: string,\n path: string,\n operation: 'read' | 'write' | 'delete' | 'create' | 'stat',\n options?: { context?: Record<string, unknown>; cause?: Error }\n): FileSystemError => ({\n kind: 'FileSystemError',\n message,\n path,\n operation,\n context: options?.context,\n cause: options?.cause,\n});\n\nexport const databaseError = (\n message: string,\n operation: string,\n options?: { table?: string; context?: Record<string, unknown>; cause?: Error }\n): DatabaseError => ({\n kind: 'DatabaseError',\n message,\n operation,\n table: options?.table,\n context: options?.context,\n cause: options?.cause,\n});\n\nexport const validationError = (\n message: string,\n errors: string[],\n options?: { field?: string; context?: Record<string, unknown> }\n): ValidationError => ({\n kind: 'ValidationError',\n message,\n field: options?.field,\n errors,\n context: options?.context,\n});\n\nexport const networkError = (\n message: string,\n options?: { url?: string; statusCode?: number; context?: Record<string, unknown>; cause?: Error }\n): NetworkError => ({\n kind: 'NetworkError',\n message,\n url: options?.url,\n statusCode: options?.statusCode,\n context: options?.context,\n cause: options?.cause,\n});\n\nexport const cliError = (\n message: string,\n options?: {\n command?: string;\n exitCode?: number;\n context?: Record<string, unknown>;\n cause?: Error;\n }\n): CLIError => ({\n kind: 'CLIError',\n message,\n command: options?.command,\n exitCode: options?.exitCode,\n context: options?.context,\n cause: options?.cause,\n});\n\nexport const notFoundError = (\n message: string,\n resourceType: string,\n resourceId: string,\n options?: { context?: Record<string, unknown>; cause?: Error }\n): NotFoundError => ({\n kind: 'NotFoundError',\n message,\n resourceType,\n resourceId,\n context: options?.context,\n cause: options?.cause,\n});\n\nexport const permissionError = (\n message: string,\n resource: string,\n requiredPermission: string,\n options?: { context?: Record<string, unknown>; cause?: Error }\n): PermissionError => ({\n kind: 'PermissionError',\n message,\n resource,\n requiredPermission,\n context: options?.context,\n cause: options?.cause,\n});\n\nexport const unknownError = (\n message: string,\n options?: { context?: Record<string, unknown>; cause?: Error }\n): UnknownError => ({\n kind: 'UnknownError',\n message,\n context: options?.context,\n cause: options?.cause,\n});\n\n/**\n * Convert unknown error to AppError\n */\nexport const toAppError = (error: unknown): AppError => {\n if (isAppError(error)) {\n return error;\n }\n\n if (error instanceof Error) {\n return unknownError(error.message, { cause: error });\n }\n\n return unknownError(String(error));\n};\n\n/**\n * Type guard for AppError\n */\nexport const isAppError = (error: unknown): error is AppError => {\n return typeof error === 'object' && error !== null && 'kind' in error && 'message' in error;\n};\n\n/**\n * Format error for display\n */\nexport const formatError = (error: AppError): string => {\n let formatted = `[${error.kind}] ${error.message}`;\n\n if (error.kind === 'ConfigError' && error.configKey) {\n formatted += `\\n Config key: ${error.configKey}`;\n }\n\n if (error.kind === 'FileSystemError') {\n formatted += `\\n Path: ${error.path}`;\n formatted += `\\n Operation: ${error.operation}`;\n }\n\n if (error.kind === 'DatabaseError') {\n formatted += `\\n Operation: ${error.operation}`;\n if (error.table) {\n formatted += `\\n Table: ${error.table}`;\n }\n }\n\n if (error.kind === 'ValidationError') {\n formatted += `\\n Errors:`;\n for (const err of error.errors) {\n formatted += `\\n - ${err}`;\n }\n }\n\n if (error.kind === 'NetworkError') {\n if (error.url) {\n formatted += `\\n URL: ${error.url}`;\n }\n if (error.statusCode) {\n formatted += `\\n Status: ${error.statusCode}`;\n }\n }\n\n if (error.context) {\n formatted += `\\n Context: ${JSON.stringify(error.context, null, 2)}`;\n }\n\n if (error.cause) {\n formatted += `\\n Caused by: ${error.cause.message}`;\n }\n\n return formatted;\n};\n"
|
|
6
6
|
],
|
|
7
7
|
"mappings": "AAsHO,IAAM,EAAc,CACzB,EACA,KAMiB,CACjB,KAAM,cACN,UACA,UAAW,GAAS,UACpB,WAAY,GAAS,WACrB,QAAS,GAAS,QAClB,MAAO,GAAS,KAClB,GAEa,EAAkB,CAC7B,EACA,EACA,EACA,KACqB,CACrB,KAAM,kBACN,UACA,OACA,YACA,QAAS,GAAS,QAClB,MAAO,GAAS,KAClB",
|
|
8
|
-
"debugId": "
|
|
8
|
+
"debugId": "3EAFCD053EC31FC264756E2164756E21",
|
|
9
9
|
"names": []
|
|
10
10
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
var k=(h)=>({_tag:"Success",value:h}),q=(h)=>({_tag:"Failure",error:h});var w=(h,j=(b)=>b)=>{try{return k(h())}catch(b){return q(j(b))}},x=async(h,j=(b)=>b)=>{try{let b=await h();return k(b)}catch(b){return q(j(b))}};
|
|
2
|
-
export{k as
|
|
2
|
+
export{k as B,q as C,w as D,x as E};
|
|
3
3
|
|
|
4
|
-
//# debugId=
|
|
4
|
+
//# debugId=008AEC80568AD24C64756E2164756E21
|
|
@@ -5,6 +5,6 @@
|
|
|
5
5
|
"/**\n * Result type for functional error handling\n * Represents success or failure without exceptions\n *\n * DESIGN RATIONALE:\n * - Explicit error handling at type level\n * - Composable through map/flatMap\n * - Forces caller to handle errors\n * - No hidden control flow (no thrown exceptions)\n */\n\nexport type Result<T, E = Error> = Success<T> | Failure<E>;\n\nexport interface Success<T> {\n readonly _tag: 'Success';\n readonly value: T;\n}\n\nexport interface Failure<E> {\n readonly _tag: 'Failure';\n readonly error: E;\n}\n\n/**\n * Constructors\n */\n\nexport const success = <T>(value: T): Success<T> => ({\n _tag: 'Success',\n value,\n});\n\nexport const failure = <E>(error: E): Failure<E> => ({\n _tag: 'Failure',\n error,\n});\n\n/**\n * Type guards\n */\n\nexport const isSuccess = <T, E>(result: Result<T, E>): result is Success<T> =>\n result._tag === 'Success';\n\nexport const isFailure = <T, E>(result: Result<T, E>): result is Failure<E> =>\n result._tag === 'Failure';\n\n/**\n * Transformations\n */\n\n/**\n * Transform the success value\n * Failure propagates unchanged\n */\nexport const map =\n <T, U, E>(fn: (value: T) => U) =>\n (result: Result<T, E>): Result<U, E> => {\n if (isSuccess(result)) {\n return success(fn(result.value));\n }\n return result;\n };\n\n/**\n * Transform the success value with a function that returns a Result\n * Enables chaining operations that can fail\n * Failure propagates unchanged\n */\nexport const flatMap =\n <T, U, E>(fn: (value: T) => Result<U, E>) =>\n (result: Result<T, E>): Result<U, E> => {\n if (isSuccess(result)) {\n return fn(result.value);\n }\n return result;\n };\n\n/**\n * Transform the error\n * Success propagates unchanged\n */\nexport const mapError =\n <T, E, F>(fn: (error: E) => F) =>\n (result: Result<T, E>): Result<T, F> => {\n if (isFailure(result)) {\n return failure(fn(result.error));\n }\n return result;\n };\n\n/**\n * Extract value or provide default\n */\nexport const getOrElse =\n <T>(defaultValue: T) =>\n <E>(result: Result<T, E>): T => {\n if (isSuccess(result)) {\n return result.value;\n }\n return defaultValue;\n };\n\n/**\n * Extract value or compute default\n */\nexport const getOrElseLazy =\n <T>(fn: () => T) =>\n <E>(result: Result<T, E>): T => {\n if (isSuccess(result)) {\n return result.value;\n }\n return fn();\n };\n\n/**\n * Pattern matching\n */\nexport const match =\n <T, E, U>(onSuccess: (value: T) => U, onFailure: (error: E) => U) =>\n (result: Result<T, E>): U => {\n if (isSuccess(result)) {\n return onSuccess(result.value);\n }\n return onFailure(result.error);\n };\n\n/**\n * Convert thrown exception to Result\n */\nexport const tryCatch = <T, E = Error>(\n fn: () => T,\n onError: (error: unknown) => E = (error: unknown) => error as E\n): Result<T, E> => {\n try {\n return success(fn());\n } catch (error) {\n return failure(onError(error));\n }\n};\n\n/**\n * Convert Promise to Result\n */\nexport const tryCatchAsync = async <T, E = Error>(\n fn: () => Promise<T>,\n onError: (error: unknown) => E = (error: unknown) => error as E\n): Promise<Result<T, E>> => {\n try {\n const value = await fn();\n return success(value);\n } catch (error) {\n return failure(onError(error));\n }\n};\n\n/**\n * Combine multiple Results into a single Result containing an array\n * Fails if any Result is a Failure (short-circuits on first failure)\n */\nexport const all = <T, E>(results: Result<T, E>[]): Result<T[], E> => {\n const values: T[] = [];\n\n for (const result of results) {\n if (isFailure(result)) {\n return result;\n }\n values.push(result.value);\n }\n\n return success(values);\n};\n\n/**\n * Run side effect for success case\n */\nexport const tap =\n <T, E>(fn: (value: T) => void) =>\n (result: Result<T, E>): Result<T, E> => {\n if (isSuccess(result)) {\n fn(result.value);\n }\n return result;\n };\n\n/**\n * Run side effect for failure case\n */\nexport const tapError =\n <T, E>(fn: (error: E) => void) =>\n (result: Result<T, E>): Result<T, E> => {\n if (isFailure(result)) {\n fn(result.error);\n }\n return result;\n };\n\n/**\n * Functional pipe for composing Result transformations\n */\nexport const pipe =\n <T, E>(result: Result<T, E>) =>\n <U>(fn: (result: Result<T, E>) => U): U =>\n fn(result);\n"
|
|
6
6
|
],
|
|
7
7
|
"mappings": "AA2BO,IAAM,EAAU,CAAI,KAA0B,CACnD,KAAM,UACN,OACF,GAEa,EAAU,CAAI,KAA0B,CACnD,KAAM,UACN,OACF,GA+FO,IAAM,EAAW,CACtB,EACA,EAAiC,CAAC,IAAmB,IACpC,CACjB,GAAI,CACF,OAAO,EAAQ,EAAG,CAAC,EACnB,MAAO,EAAO,CACd,OAAO,EAAQ,EAAQ,CAAK,CAAC,IAOpB,EAAgB,MAC3B,EACA,EAAiC,CAAC,IAAmB,IAC3B,CAC1B,GAAI,CACF,IAAM,EAAQ,MAAM,EAAG,EACvB,OAAO,EAAQ,CAAK,EACpB,MAAO,EAAO,CACd,OAAO,EAAQ,EAAQ,CAAK,CAAC",
|
|
8
|
-
"debugId": "
|
|
8
|
+
"debugId": "008AEC80568AD24C64756E2164756E21",
|
|
9
9
|
"names": []
|
|
10
10
|
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import{G as I}from"./chunk-x46tzzn9.js";var X=I((Y,H)=>{var{defineProperty:z,getOwnPropertyDescriptor:K,getOwnPropertyNames:L}=Object,Q=Object.prototype.hasOwnProperty,R=(j,b)=>{for(var v in b)z(j,v,{get:b[v],enumerable:!0})},U=(j,b,v,B)=>{if(b&&typeof b==="object"||typeof b==="function"){for(let q of L(b))if(!Q.call(j,q)&&q!==v)z(j,q,{get:()=>b[q],enumerable:!(B=K(b,q))||B.enumerable})}return j},W=(j)=>U(z({},"__esModule",{value:!0}),j),F={};R(F,{VercelOidcTokenError:()=>G});H.exports=W(F);class G extends Error{constructor(j,b){super(j);this.name="VercelOidcTokenError",this.cause=b}toString(){if(this.cause)return`${this.name}: ${this.message}: ${this.cause}`;return`${this.name}: ${this.message}`}}});
|
|
2
|
+
export{X as t};
|
|
3
|
+
|
|
4
|
+
//# debugId=6C5C6AF10E3A2E8B64756E2164756E21
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../node_modules/@vercel/oidc/dist/token-error.js"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\nvar token_error_exports = {};\n__export(token_error_exports, {\n VercelOidcTokenError: () => VercelOidcTokenError\n});\nmodule.exports = __toCommonJS(token_error_exports);\nclass VercelOidcTokenError extends Error {\n constructor(message, cause) {\n super(message);\n this.name = \"VercelOidcTokenError\";\n this.cause = cause;\n }\n toString() {\n if (this.cause) {\n return `${this.name}: ${this.message}: ${this.cause}`;\n }\n return `${this.name}: ${this.message}`;\n }\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n VercelOidcTokenError\n});\n"
|
|
6
|
+
],
|
|
7
|
+
"mappings": "wDACA,IAAuB,eAAnB,EAC0B,yBAA1B,EAC2B,oBAA3B,GADmB,OAEnB,EAAe,OAAO,UAAU,eAChC,EAAW,CAAC,EAAQ,IAAQ,CAC9B,QAAS,KAAQ,EACf,EAAU,EAAQ,EAAM,CAAE,IAAK,EAAI,GAAO,WAAY,EAAK,CAAC,GAE5D,EAAc,CAAC,EAAI,EAAM,EAAQ,IAAS,CAC5C,GAAI,GAAQ,OAAO,IAAS,UAAY,OAAO,IAAS,YACtD,QAAS,KAAO,EAAkB,CAAI,EACpC,GAAI,CAAC,EAAa,KAAK,EAAI,CAAG,GAAK,IAAQ,EACzC,EAAU,EAAI,EAAK,CAAE,IAAK,IAAM,EAAK,GAAM,WAAY,EAAE,EAAO,EAAiB,EAAM,CAAG,IAAM,EAAK,UAAW,CAAC,EAEvH,OAAO,GAEL,EAAe,CAAC,IAAQ,EAAY,EAAU,CAAC,EAAG,aAAc,CAAE,MAAO,EAAK,CAAC,EAAG,CAAG,EACrF,EAAsB,CAAC,EAC3B,EAAS,EAAqB,CAC5B,qBAAsB,IAAM,CAC9B,CAAC,EACD,EAAO,QAAU,EAAa,CAAmB,EACjD,MAAM,UAA6B,KAAM,CACvC,WAAW,CAAC,EAAS,EAAO,CAC1B,MAAM,CAAO,EACb,KAAK,KAAO,uBACZ,KAAK,MAAQ,EAEf,QAAQ,EAAG,CACT,GAAI,KAAK,MACP,MAAO,GAAG,KAAK,SAAS,KAAK,YAAY,KAAK,QAEhD,MAAO,GAAG,KAAK,SAAS,KAAK,UAEjC",
|
|
8
|
+
"debugId": "6C5C6AF10E3A2E8B64756E2164756E21",
|
|
9
|
+
"names": []
|
|
10
|
+
}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import{z as V}from"./chunk-h9kssnyy.js";import{B as P,C as Y,D as $}from"./chunk-j08f9mnk.js";import{F as M,I as b}from"./chunk-x46tzzn9.js";var z=async(q)=>{let{detectInvocation:B,generateHookCommand:G,loadInvocationMethod:N}=await import("./chunk-7wd1res1.js"),W=await N()||B();return{sessionCommand:G("session",q,W),messageCommand:G("message",q,W),notificationCommand:G("notification",q,W)}},J={sessionCommand:"npx -y @sylphx/flow hook --type session --target claude-code",messageCommand:"npx -y @sylphx/flow hook --type message --target claude-code",notificationCommand:"npx -y @sylphx/flow hook --type notification --target claude-code"},y=(q)=>{return $(()=>JSON.parse(q),(B)=>V("Failed to parse Claude Code settings",{cause:B instanceof Error?B:void 0}))},w=(q=J)=>{let B=q.sessionCommand||J.sessionCommand,G=q.messageCommand||J.messageCommand,N=q.notificationCommand||J.notificationCommand;return{SessionStart:[{hooks:[{type:"command",command:B}]}],UserPromptSubmit:[{hooks:[{type:"command",command:G}]}],Notification:[{matcher:"",hooks:[{type:"command",command:N}]}]}},R=(q,B=J)=>{let G=w(B);return{...q,hooks:{...q.hooks||{},...G}}},Z=(q=J)=>{return{hooks:w(q)}},X=(q)=>{return JSON.stringify(q,null,2)},p=()=>{return"Claude Code hooks configured: SessionStart (static info) + UserPromptSubmit (dynamic info)"},A=(q,B=J)=>{if(q===null||q.trim()===""){let Q=Z(B);return P(X(Q))}let G=y(q);if(G._tag==="Failure"){let Q=Z(B);return P(X(Q))}let N=R(G.value,B);return P(X(N))},D=(q)=>{if(q.sessionCommand!==void 0&&q.sessionCommand.trim()==="")return Y(V("Session command cannot be empty"));if(q.messageCommand!==void 0&&q.messageCommand.trim()==="")return Y(V("Message command cannot be empty"));return P(q)};export{D as validateHookConfig,X as serializeSettings,A as processSettings,y as parseSettings,R as mergeSettings,p as getSuccessMessage,z as generateHookCommands,Z as createSettings,w as buildHookConfiguration,J as DEFAULT_HOOKS};
|
|
2
2
|
|
|
3
|
-
//# debugId=
|
|
3
|
+
//# debugId=FAAB79DEE5BF095C64756E2164756E21
|
|
@@ -5,6 +5,6 @@
|
|
|
5
5
|
"/**\n * Business logic for Claude Code target setup\n * Pure functions separated from I/O\n *\n * DESIGN RATIONALE:\n * - Business logic testable without file system\n * - Pure functions for settings transformation\n * - Side effects isolated\n * - Clear separation of concerns\n */\n\nimport type { ConfigError } from '../../core/functional/error-types.js';\nimport { configError } from '../../core/functional/error-types.js';\nimport type { Result } from '../../core/functional/result.js';\nimport { failure, success, tryCatch } from '../../core/functional/result.js';\n\n/**\n * Claude Code settings structure\n */\nexport interface ClaudeCodeSettings {\n hooks?: Record<\n string,\n Array<{\n hooks: Array<{\n type: string;\n command: string;\n }>;\n }>\n >;\n [key: string]: unknown;\n}\n\nexport interface HookConfig {\n sessionCommand?: string;\n messageCommand?: string;\n notificationCommand?: string;\n}\n\n/**\n * Generate hook commands based on how CLI was invoked\n * Detects invocation method and generates matching commands\n */\nexport const generateHookCommands = async (targetId: string): Promise<HookConfig> => {\n const { detectInvocation, generateHookCommand, loadInvocationMethod } = await import(\n '../../utils/cli-invocation.js'\n );\n\n // Try to load saved invocation method, fall back to detection\n const savedMethod = await loadInvocationMethod();\n const method = savedMethod || detectInvocation();\n\n return {\n sessionCommand: generateHookCommand('session', targetId, method),\n messageCommand: generateHookCommand('message', targetId, method),\n notificationCommand: generateHookCommand('notification', targetId, method),\n };\n};\n\n/**\n * Default hook commands (fallback)\n * Now using unified hook command for all content (rules, output styles, system info)\n */\nexport const DEFAULT_HOOKS: HookConfig = {\n sessionCommand: 'npx -y @sylphx/flow hook --type session --target claude-code',\n messageCommand: 'npx -y @sylphx/flow hook --type message --target claude-code',\n notificationCommand: 'npx -y @sylphx/flow hook --type notification --target claude-code',\n};\n\n/**\n * Parse JSON settings (pure)\n */\nexport const parseSettings = (content: string): Result<ClaudeCodeSettings, ConfigError> => {\n return tryCatch(\n () => JSON.parse(content) as ClaudeCodeSettings,\n (error) =>\n configError('Failed to parse Claude Code settings', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n\n/**\n * Build hook configuration (pure)\n */\nexport const buildHookConfiguration = (\n config: HookConfig = DEFAULT_HOOKS\n): ClaudeCodeSettings['hooks'] => {\n const sessionCommand = config.sessionCommand || DEFAULT_HOOKS.sessionCommand!;\n const messageCommand = config.messageCommand || DEFAULT_HOOKS.messageCommand!;\n const notificationCommand = config.notificationCommand || DEFAULT_HOOKS.notificationCommand!;\n\n return {\n SessionStart: [\n {\n hooks: [\n {\n type: 'command',\n command: sessionCommand,\n },\n ],\n },\n ],\n UserPromptSubmit: [\n {\n hooks: [\n {\n type: 'command',\n command: messageCommand,\n },\n ],\n },\n ],\n Notification: [\n {\n matcher: '',\n hooks: [\n {\n type: 'command',\n command: notificationCommand,\n },\n ],\n },\n ],\n };\n};\n\n/**\n * Merge settings with new hooks (pure)\n */\nexport const mergeSettings = (\n existingSettings: ClaudeCodeSettings,\n hookConfig: HookConfig = DEFAULT_HOOKS\n): ClaudeCodeSettings => {\n const newHooks = buildHookConfiguration(hookConfig);\n\n return {\n ...existingSettings,\n hooks: {\n ...(existingSettings.hooks || {}),\n ...newHooks,\n },\n };\n};\n\n/**\n * Create settings with hooks (pure)\n */\nexport const createSettings = (hookConfig: HookConfig = DEFAULT_HOOKS): ClaudeCodeSettings => {\n return {\n hooks: buildHookConfiguration(hookConfig),\n };\n};\n\n/**\n * Serialize settings to JSON (pure)\n */\nexport const serializeSettings = (settings: ClaudeCodeSettings): string => {\n return JSON.stringify(settings, null, 2);\n};\n\n/**\n * Get success message (pure)\n */\nexport const getSuccessMessage = (): string => {\n return 'Claude Code hooks configured: SessionStart (static info) + UserPromptSubmit (dynamic info)';\n};\n\n/**\n * Process settings: parse existing or create new, merge hooks, serialize (pure)\n */\nexport const processSettings = (\n existingContent: string | null,\n hookConfig: HookConfig = DEFAULT_HOOKS\n): Result<string, ConfigError> => {\n if (existingContent === null || existingContent.trim() === '') {\n // No existing settings, create new\n const settings = createSettings(hookConfig);\n return success(serializeSettings(settings));\n }\n\n // Parse existing settings\n const parseResult = parseSettings(existingContent);\n if (parseResult._tag === 'Failure') {\n // If parsing fails, create new settings\n const settings = createSettings(hookConfig);\n return success(serializeSettings(settings));\n }\n\n // Merge with existing\n const merged = mergeSettings(parseResult.value, hookConfig);\n return success(serializeSettings(merged));\n};\n\n/**\n * Validate hook configuration (pure)\n */\nexport const validateHookConfig = (config: HookConfig): Result<HookConfig, ConfigError> => {\n if (config.sessionCommand !== undefined && config.sessionCommand.trim() === '') {\n return failure(configError('Session command cannot be empty'));\n }\n\n if (config.messageCommand !== undefined && config.messageCommand.trim() === '') {\n return failure(configError('Message command cannot be empty'));\n }\n\n return success(config);\n};\n"
|
|
6
6
|
],
|
|
7
7
|
"mappings": "yJA0CO,FAAM,JAAuB,MAAO,IAA0C,CACnF,IAAQ,mBAAkB,sBAAqB,wBAAyB,KACtE,+BAKI,EADc,MAAM,EAAqB,GACjB,EAAiB,EAE/C,MAAO,CACL,eAAgB,EAAoB,UAAW,EAAU,CAAM,EAC/D,eAAgB,EAAoB,UAAW,EAAU,CAAM,EAC/D,oBAAqB,EAAoB,eAAgB,EAAU,CAAM,CAC3E,GAOW,EAA4B,CACvC,eAAgB,+DAChB,eAAgB,+DAChB,oBAAqB,mEACvB,EAKa,EAAgB,CAAC,IAA6D,CACzF,OAAO,EACL,IAAM,KAAK,MAAM,CAAO,EACxB,CAAC,IACC,EAAY,uCAAwC,CAClD,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL,GAMW,EAAyB,CACpC,EAAqB,IACW,CAChC,IAAM,EAAiB,EAAO,gBAAkB,EAAc,eACxD,EAAiB,EAAO,gBAAkB,EAAc,eACxD,EAAsB,EAAO,qBAAuB,EAAc,oBAExE,MAAO,CACL,aAAc,CACZ,CACE,MAAO,CACL,CACE,KAAM,UACN,QAAS,CACX,CACF,CACF,CACF,EACA,iBAAkB,CAChB,CACE,MAAO,CACL,CACE,KAAM,UACN,QAAS,CACX,CACF,CACF,CACF,EACA,aAAc,CACZ,CACE,QAAS,GACT,MAAO,CACL,CACE,KAAM,UACN,QAAS,CACX,CACF,CACF,CACF,CACF,GAMW,EAAgB,CAC3B,EACA,EAAyB,IACF,CACvB,IAAM,EAAW,EAAuB,CAAU,EAElD,MAAO,IACF,EACH,MAAO,IACD,EAAiB,OAAS,CAAC,KAC5B,CACL,CACF,GAMW,EAAiB,CAAC,EAAyB,IAAsC,CAC5F,MAAO,CACL,MAAO,EAAuB,CAAU,CAC1C,GAMW,EAAoB,CAAC,IAAyC,CACzE,OAAO,KAAK,UAAU,EAAU,KAAM,CAAC,GAM5B,EAAoB,IAAc,CAC7C,MAAO,8FAMI,EAAkB,CAC7B,EACA,EAAyB,IACO,CAChC,GAAI,IAAoB,MAAQ,EAAgB,KAAK,IAAM,GAAI,CAE7D,IAAM,EAAW,EAAe,CAAU,EAC1C,OAAO,EAAQ,EAAkB,CAAQ,CAAC,EAI5C,IAAM,EAAc,EAAc,CAAe,EACjD,GAAI,EAAY,OAAS,UAAW,CAElC,IAAM,EAAW,EAAe,CAAU,EAC1C,OAAO,EAAQ,EAAkB,CAAQ,CAAC,EAI5C,IAAM,EAAS,EAAc,EAAY,MAAO,CAAU,EAC1D,OAAO,EAAQ,EAAkB,CAAM,CAAC,GAM7B,EAAqB,CAAC,IAAwD,CACzF,GAAI,EAAO,iBAAmB,QAAa,EAAO,eAAe,KAAK,IAAM,GAC1E,OAAO,EAAQ,EAAY,iCAAiC,CAAC,EAG/D,GAAI,EAAO,iBAAmB,QAAa,EAAO,eAAe,KAAK,IAAM,GAC1E,OAAO,EAAQ,EAAY,iCAAiC,CAAC,EAG/D,OAAO,EAAQ,CAAM",
|
|
8
|
-
"debugId": "
|
|
8
|
+
"debugId": "FAAB79DEE5BF095C64756E2164756E21",
|
|
9
9
|
"names": []
|
|
10
10
|
}
|