@sylphx/flow 0.0.3 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/README.md +14 -10
  2. package/dist/{chunk-rtrp1qa5.js → chunk-0fz6p8ka.js} +2 -2
  3. package/dist/{chunk-rtrp1qa5.js.map → chunk-0fz6p8ka.js.map} +1 -1
  4. package/dist/{chunk-j7rqqb2w.js → chunk-5eyfa6f8.js} +2 -2
  5. package/dist/{chunk-j7rqqb2w.js.map → chunk-5eyfa6f8.js.map} +1 -1
  6. package/dist/chunk-6hhhwb03.js +3 -0
  7. package/dist/chunk-6hhhwb03.js.map +10 -0
  8. package/dist/{chunk-z9h5spfk.js → chunk-8saardnr.js} +3 -3
  9. package/dist/{chunk-z9h5spfk.js.map → chunk-8saardnr.js.map} +1 -1
  10. package/dist/chunk-e07nqvwm.js +3 -0
  11. package/dist/chunk-e07nqvwm.js.map +10 -0
  12. package/dist/{chunk-8bvc13yx.js → chunk-e80fq5bq.js} +2 -2
  13. package/dist/{chunk-8bvc13yx.js.map → chunk-e80fq5bq.js.map} +1 -1
  14. package/dist/{chunk-5grx6px4.js → chunk-hpef24m4.js} +2 -2
  15. package/dist/{chunk-5grx6px4.js.map → chunk-hpef24m4.js.map} +1 -1
  16. package/dist/chunk-n5gbcebk.js +5 -0
  17. package/dist/chunk-n5gbcebk.js.map +10 -0
  18. package/dist/{chunk-wv5zpnbp.js → chunk-n9kqfqp9.js} +2 -2
  19. package/dist/{chunk-wv5zpnbp.js.map → chunk-n9kqfqp9.js.map} +1 -1
  20. package/dist/chunk-sdjvmsk2.js +3 -0
  21. package/dist/{chunk-npw0sq1m.js.map → chunk-sdjvmsk2.js.map} +1 -1
  22. package/dist/{chunk-p56r2pqw.js → chunk-t6aqpg7b.js} +1 -1
  23. package/dist/{chunk-e0cdpket.js → chunk-wj8k6qd1.js} +2 -2
  24. package/dist/{chunk-e0cdpket.js.map → chunk-wj8k6qd1.js.map} +1 -1
  25. package/dist/index.js +153 -154
  26. package/dist/index.js.map +6 -7
  27. package/package.json +1 -1
  28. package/dist/chunk-npw0sq1m.js +0 -3
  29. package/dist/chunk-w8epyz4j.js +0 -3
  30. package/dist/chunk-w8epyz4j.js.map +0 -10
  31. /package/dist/{chunk-p56r2pqw.js.map → chunk-t6aqpg7b.js.map} +0 -0
@@ -0,0 +1,3 @@
1
+ import{g as v}from"./chunk-5eyfa6f8.js";import{k as q}from"./chunk-wj8k6qd1.js";import"./chunk-e80fq5bq.js";import w from"node:fs/promises";import B from"node:path";var I=(...b)=>B.join(...b),J=(...b)=>B.resolve(...b),K=(b)=>B.dirname(b),L=(b,k)=>B.basename(b,k),M=(b)=>B.extname(b),N=async(b)=>{return q(async()=>{return await w.readFile(b,"utf-8")},(k)=>v(`Failed to read file: ${b}`,b,"read",{cause:k instanceof Error?k:void 0}))},O=async(b,k)=>{return q(async()=>{await w.writeFile(b,k,"utf-8")},(z)=>v(`Failed to write file: ${b}`,b,"write",{cause:z instanceof Error?z:void 0}))},Q=async(b)=>{return q(async()=>{await w.unlink(b)},(k)=>v(`Failed to delete file: ${b}`,b,"delete",{cause:k instanceof Error?k:void 0}))},R=async(b,k)=>{return q(async()=>{await w.mkdir(b,{recursive:k?.recursive??!0})},(z)=>v(`Failed to create directory: ${b}`,b,"create",{cause:z instanceof Error?z:void 0}))},T=async(b)=>{return q(async()=>{try{return await w.access(b),!0}catch{return!1}},(k)=>v(`Failed to check if path exists: ${b}`,b,"stat",{cause:k instanceof Error?k:void 0}))},U=async(b)=>{return q(async()=>{return await w.readdir(b)},(k)=>v(`Failed to read directory: ${b}`,b,"read",{cause:k instanceof Error?k:void 0}))},V=async(b)=>{return q(async()=>{let k=await w.stat(b);return{isFile:k.isFile(),isDirectory:k.isDirectory(),size:k.size}},(k)=>v(`Failed to get stats for: ${b}`,b,"stat",{cause:k instanceof Error?k:void 0}))};export{O as writeFile,J as resolvePath,N as readFile,U as readDirectory,T as pathExists,I as joinPath,V as getStats,M as extname,K as dirname,Q as deleteFile,R as createDirectory,L as basename};
2
+
3
+ //# debugId=9075DAC5AD7C01A364756E2164756E21
@@ -5,6 +5,6 @@
5
5
  "/**\n * Functional composable for file system operations\n * Returns Result instead of throwing exceptions\n *\n * DESIGN RATIONALE:\n * - Explicit error handling\n * - Composable file operations\n * - Type-safe path handling\n * - Separation of concerns (pure path operations vs side effects)\n */\n\nimport fsPromises from 'node:fs/promises';\nimport path from 'node:path';\nimport type { FileSystemError } from '../../core/functional/error-types.js';\nimport { fileSystemError } from '../../core/functional/error-types.js';\nimport type { Result } from '../../core/functional/result.js';\nimport { tryCatchAsync } from '../../core/functional/result.js';\n\n/**\n * Pure path operations (no side effects)\n */\n\nexport const joinPath = (...segments: string[]): string => path.join(...segments);\n\nexport const resolvePath = (...segments: string[]): string => path.resolve(...segments);\n\nexport const dirname = (filePath: string): string => path.dirname(filePath);\n\nexport const basename = (filePath: string, ext?: string): string => path.basename(filePath, ext);\n\nexport const extname = (filePath: string): string => path.extname(filePath);\n\n/**\n * File system operations (side effects, return Result)\n */\n\nexport const readFile = async (filePath: string): Promise<Result<string, FileSystemError>> => {\n return tryCatchAsync(\n async () => {\n return await fsPromises.readFile(filePath, 'utf-8');\n },\n (error) =>\n fileSystemError(`Failed to read file: ${filePath}`, filePath, 'read', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n\nexport const writeFile = async (\n filePath: string,\n content: string\n): Promise<Result<void, FileSystemError>> => {\n return tryCatchAsync(\n async () => {\n await fsPromises.writeFile(filePath, content, 'utf-8');\n },\n (error) =>\n fileSystemError(`Failed to write file: ${filePath}`, filePath, 'write', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n\nexport const deleteFile = async (filePath: string): Promise<Result<void, FileSystemError>> => {\n return tryCatchAsync(\n async () => {\n await fsPromises.unlink(filePath);\n },\n (error) =>\n fileSystemError(`Failed to delete file: ${filePath}`, filePath, 'delete', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n\nexport const createDirectory = async (\n dirPath: string,\n options?: { recursive?: boolean }\n): Promise<Result<void, FileSystemError>> => {\n return tryCatchAsync(\n async () => {\n await fsPromises.mkdir(dirPath, { recursive: options?.recursive ?? true });\n },\n (error) =>\n fileSystemError(`Failed to create directory: ${dirPath}`, dirPath, 'create', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n\nexport const pathExists = async (filePath: string): Promise<Result<boolean, FileSystemError>> => {\n return tryCatchAsync(\n async () => {\n try {\n await fsPromises.access(filePath);\n return true;\n } catch {\n return false;\n }\n },\n (error) =>\n fileSystemError(`Failed to check if path exists: ${filePath}`, filePath, 'stat', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n\nexport const readDirectory = async (\n dirPath: string\n): Promise<Result<string[], FileSystemError>> => {\n return tryCatchAsync(\n async () => {\n return await fsPromises.readdir(dirPath);\n },\n (error) =>\n fileSystemError(`Failed to read directory: ${dirPath}`, dirPath, 'read', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n\nexport const getStats = async (\n filePath: string\n): Promise<Result<{ isFile: boolean; isDirectory: boolean; size: number }, FileSystemError>> => {\n return tryCatchAsync(\n async () => {\n const stats = await fsPromises.stat(filePath);\n return {\n isFile: stats.isFile(),\n isDirectory: stats.isDirectory(),\n size: stats.size,\n };\n },\n (error) =>\n fileSystemError(`Failed to get stats for: ${filePath}`, filePath, 'stat', {\n cause: error instanceof Error ? error : undefined,\n })\n );\n};\n"
6
6
  ],
7
7
  "mappings": "wHAWA,0BACA,mBAUO,IAAM,EAAW,IAAI,IAA+B,EAAK,KAAK,GAAG,CAAQ,EAEnE,EAAc,IAAI,IAA+B,EAAK,QAAQ,GAAG,CAAQ,EAEzE,EAAU,CAAC,IAA6B,EAAK,QAAQ,CAAQ,EAE7D,EAAW,CAAC,EAAkB,IAAyB,EAAK,SAAS,EAAU,CAAG,EAElF,EAAU,CAAC,IAA6B,EAAK,QAAQ,CAAQ,EAM7D,EAAW,MAAO,IAA+D,CAC5F,OAAO,EACL,SAAY,CACV,OAAO,MAAM,EAAW,SAAS,EAAU,OAAO,GAEpD,CAAC,IACC,EAAgB,wBAAwB,IAAY,EAAU,OAAQ,CACpE,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL,GAGW,EAAY,MACvB,EACA,IAC2C,CAC3C,OAAO,EACL,SAAY,CACV,MAAM,EAAW,UAAU,EAAU,EAAS,OAAO,GAEvD,CAAC,IACC,EAAgB,yBAAyB,IAAY,EAAU,QAAS,CACtE,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL,GAGW,EAAa,MAAO,IAA6D,CAC5F,OAAO,EACL,SAAY,CACV,MAAM,EAAW,OAAO,CAAQ,GAElC,CAAC,IACC,EAAgB,0BAA0B,IAAY,EAAU,SAAU,CACxE,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL,GAGW,EAAkB,MAC7B,EACA,IAC2C,CAC3C,OAAO,EACL,SAAY,CACV,MAAM,EAAW,MAAM,EAAS,CAAE,UAAW,GAAS,WAAa,EAAK,CAAC,GAE3E,CAAC,IACC,EAAgB,+BAA+B,IAAW,EAAS,SAAU,CAC3E,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL,GAGW,EAAa,MAAO,IAAgE,CAC/F,OAAO,EACL,SAAY,CACV,GAAI,CAEF,OADA,MAAM,EAAW,OAAO,CAAQ,EACzB,GACP,KAAM,CACN,MAAO,KAGX,CAAC,IACC,EAAgB,mCAAmC,IAAY,EAAU,OAAQ,CAC/E,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL,GAGW,EAAgB,MAC3B,IAC+C,CAC/C,OAAO,EACL,SAAY,CACV,OAAO,MAAM,EAAW,QAAQ,CAAO,GAEzC,CAAC,IACC,EAAgB,6BAA6B,IAAW,EAAS,OAAQ,CACvE,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL,GAGW,EAAW,MACtB,IAC8F,CAC9F,OAAO,EACL,SAAY,CACV,IAAM,EAAQ,MAAM,EAAW,KAAK,CAAQ,EAC5C,MAAO,CACL,OAAQ,EAAM,OAAO,EACrB,YAAa,EAAM,YAAY,EAC/B,KAAM,EAAM,IACd,GAEF,CAAC,IACC,EAAgB,4BAA4B,IAAY,EAAU,OAAQ,CACxE,MAAO,aAAiB,MAAQ,EAAQ,MAC1C,CAAC,CACL",
8
- "debugId": "05B3E02BA3B1178D64756E2164756E21",
8
+ "debugId": "9075DAC5AD7C01A364756E2164756E21",
9
9
  "names": []
10
10
  }
@@ -1,4 +1,4 @@
1
- import"./chunk-8bvc13yx.js";import{AutoTokenizer as j}from"@huggingface/transformers";class R{tokenizer;initialized=!1;modelPath;constructor(B={}){this.modelPath=B.modelPath||"./models/starcoder2"}async initialize(){if(this.initialized)return;try{this.tokenizer=await j.from_pretrained(this.modelPath),this.initialized=!0}catch(B){throw Error(`Tokenizer initialization failed: ${B.message}`)}}async tokenize(B){if(!this.initialized)await this.initialize();let H=Date.now();if(!B||B.trim().length===0)return{tokens:[],metadata:{totalTokens:0,vocabSize:49152,processingTime:Date.now()-H,averageConfidence:0},raw:{inputIds:[],decodedText:""}};try{let J=(await this.tokenizer(B)).input_ids.tolist()[0],O=await this.tokenizer.decode(J),U=await this.createDirectTokens(O,J),K=Date.now()-H;return{tokens:U,metadata:{totalTokens:U.length,vocabSize:49152,processingTime:K,averageConfidence:0.95},raw:{inputIds:J,decodedText:O}}}catch(G){throw Error(`Tokenization failed: ${G.message}`)}}async createDirectTokens(B,H){let G=[];for(let J=0;J<H.length;J++){let O=H[J];try{let K=(await this.tokenizer.decode([O],{skip_special_tokens:!0})).trim().toLowerCase();if(K.length>0)G.push({text:K,id:O,score:1,confidence:1,relevance:"high"})}catch(U){}}return G}async getTopTokens(B,H=20){return(await this.tokenize(B)).tokens.slice(0,H)}async getTechnicalTokens(B){return(await this.tokenize(B)).tokens}async decode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return await this.tokenizer.decode(B)}async encode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return(await this.tokenizer(B)).input_ids.tolist()[0]}}async function k(B,H){try{let G=await B.getAllCodebaseFiles();if(H){if(H.file_extensions&&H.file_extensions.length>0)G=G.filter((K)=>H.file_extensions?.some((W)=>K.path.endsWith(W)));if(H.path_filter)G=G.filter((K)=>K.path.includes(H.path_filter));if(H.exclude_paths&&H.exclude_paths.length>0)G=G.filter((K)=>!H.exclude_paths?.some((W)=>K.path.includes(W)))}if(G.length===0)return null;let J=[];for(let K of G){let W=await B.getTFIDFDocument(K.path);if(W){let _=await B.getTFIDFTerms(K.path),X=new Map,$=new Map;for(let[E,Y]of Object.entries(_))X.set(E,Y);let Z=W.rawTerms||{};for(let[E,Y]of Object.entries(Z))$.set(E,Y);J.push({uri:`file://${K.path}`,terms:X,rawTerms:$,magnitude:W.magnitude})}}if(J.length===0)return null;let O=await B.getIDFValues(),U=new Map;for(let[K,W]of Object.entries(O))U.set(K,W);return{documents:J,idf:U,totalDocuments:J.length,metadata:{generatedAt:new Date().toISOString(),version:"1.0.0"}}}catch(G){return console.error("[ERROR] Failed to build search index from database:",G),null}}function M(B){let H=Array.from(B.values()).reduce((J,O)=>J+O,0),G=new Map;for(let[J,O]of B.entries())G.set(J,O/H);return G}function z(B,H){let G=new Map;for(let O of B){let U=new Set(O.keys());for(let K of U)G.set(K,(G.get(K)||0)+1)}let J=new Map;for(let[O,U]of G.entries())J.set(O,Math.log(H/U));return J}function V(B,H){let G=new Map;for(let[J,O]of B.entries()){let U=H.get(J)||0;G.set(J,O*U)}return G}function w(B){let H=0;for(let G of B.values())H+=G*G;return Math.sqrt(H)}var Q=null,v=!1;async function D(){if(!Q)Q=new R({modelPath:"./models/starcoder2"});if(!v){let{log:B,error:H}=console;console.log=()=>{},console.error=()=>{};try{await Q.initialize(),v=!0}finally{console.log=B,console.error=H}}return Q}async function S(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase(),K=J.get(U)||0;J.set(U,K+O.score)}return J}async function P(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase();if(!J.has(U)||O.score>0.8)J.set(U,O.text)}return Array.from(J.values())}async function q(B,H){let J=[];for(let K=0;K<B.length;K+=1){let W=B.slice(K,K+1),_=[];for(let X=0;X<W.length;X++){let $=W[X],Z=$.uri.split("/").pop()||$.uri;H?.({current:K+X+1,total:B.length,fileName:Z,status:"processing"});try{let E=await S($.content);_.push({uri:$.uri,terms:E}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"completed"})}catch(E){_.push({uri:$.uri,terms:new Map}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"skipped"})}}J.push(..._)}let O=z(J.map((K)=>K.terms),B.length);return{documents:J.map((K)=>{let W=M(K.terms),_=V(W,O),X=w(_);return{uri:K.uri,terms:_,rawTerms:K.terms,magnitude:X}}),idf:O,totalDocuments:B.length,metadata:{generatedAt:new Date().toISOString(),version:"5.0.0",tokenizer:"AdvancedCodeTokenizer",features:["Industry-leading code understanding","Advanced technical term recognition","Optimized for code search","Simple and effective approach","No unnecessary complexity"]}}}function h(B,H){let G=0;for(let[O,U]of B.entries()){let K=H.terms.get(O)||0;G+=U*K}let J=w(B);if(J===0||H.magnitude===0)return 0;return G/(J*H.magnitude)}async function I(B,H){let G=await P(B),J=new Map;for(let O of G){let U=O.toLowerCase(),K=H.get(U)||0;if(K>0)J.set(U,K)}return J}async function y(B,H,G={}){let{limit:J=10,minScore:O=0,boostFactors:U={}}=G,{exactMatch:K=1.5,phraseMatch:W=2,technicalMatch:_=1.8,identifierMatch:X=1.3}=U,$=await I(B,H.idf),Z=(await P(B)).map((Y)=>Y.toLowerCase());return H.documents.map((Y)=>{let L=h($,Y),C=[];for(let N of Z)if(Y.rawTerms.has(N)){let A=K;if(b(N))A=Math.max(A,_);if(p(N))A=Math.max(A,X);L*=A,C.push(N)}if(C.length===Z.length&&Z.length>1)L*=W;if(Z.length>3&&C.length>=Z.length*0.7)L*=1.2;return{uri:Y.uri,score:L,matchedTerms:C}}).filter((Y)=>Y.score>=O).sort((Y,L)=>L.score-Y.score).slice(0,J)}function b(B){return[/\b[A-Z]{2,}\b/,/\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\b/,/\b[a-z]+[A-Z][a-z]*\b/,/\b\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\b/,/\b(?:get|set|is|has|can|should|will|do)[A-Z]\w*\b/,/\b(?:http|https|json|xml|yaml|sql|api|url|uri)\b/].some((G)=>G.test(B))}function p(B){return/^[a-zA-Z][a-zA-Z0-9_]*$/.test(B)&&B.length>1}function T(B){let H={documents:B.documents.map((G)=>({uri:G.uri,terms:Array.from(G.terms.entries()),rawTerms:Array.from(G.rawTerms.entries()),magnitude:G.magnitude})),idf:Array.from(B.idf.entries()),totalDocuments:B.totalDocuments,metadata:B.metadata};return JSON.stringify(H,null,2)}function f(B){let H=JSON.parse(B);return{documents:H.documents.map((G)=>({uri:G.uri,terms:new Map(G.terms),rawTerms:new Map(G.rawTerms),magnitude:G.magnitude})),idf:new Map(H.idf),totalDocuments:H.totalDocuments,metadata:H.metadata}}export{T as serializeIndex,y as searchDocuments,I as processQuery,f as deserializeIndex,h as calculateCosineSimilarity,k as buildSearchIndexFromDB,q as buildSearchIndex};
1
+ import"./chunk-e80fq5bq.js";import{AutoTokenizer as j}from"@huggingface/transformers";class R{tokenizer;initialized=!1;modelPath;constructor(B={}){this.modelPath=B.modelPath||"./models/starcoder2"}async initialize(){if(this.initialized)return;try{this.tokenizer=await j.from_pretrained(this.modelPath),this.initialized=!0}catch(B){throw Error(`Tokenizer initialization failed: ${B.message}`)}}async tokenize(B){if(!this.initialized)await this.initialize();let H=Date.now();if(!B||B.trim().length===0)return{tokens:[],metadata:{totalTokens:0,vocabSize:49152,processingTime:Date.now()-H,averageConfidence:0},raw:{inputIds:[],decodedText:""}};try{let J=(await this.tokenizer(B)).input_ids.tolist()[0],O=await this.tokenizer.decode(J),U=await this.createDirectTokens(O,J),K=Date.now()-H;return{tokens:U,metadata:{totalTokens:U.length,vocabSize:49152,processingTime:K,averageConfidence:0.95},raw:{inputIds:J,decodedText:O}}}catch(G){throw Error(`Tokenization failed: ${G.message}`)}}async createDirectTokens(B,H){let G=[];for(let J=0;J<H.length;J++){let O=H[J];try{let K=(await this.tokenizer.decode([O],{skip_special_tokens:!0})).trim().toLowerCase();if(K.length>0)G.push({text:K,id:O,score:1,confidence:1,relevance:"high"})}catch(U){}}return G}async getTopTokens(B,H=20){return(await this.tokenize(B)).tokens.slice(0,H)}async getTechnicalTokens(B){return(await this.tokenize(B)).tokens}async decode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return await this.tokenizer.decode(B)}async encode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return(await this.tokenizer(B)).input_ids.tolist()[0]}}async function k(B,H){try{let G=await B.getAllCodebaseFiles();if(H){if(H.file_extensions&&H.file_extensions.length>0)G=G.filter((K)=>H.file_extensions?.some((W)=>K.path.endsWith(W)));if(H.path_filter)G=G.filter((K)=>K.path.includes(H.path_filter));if(H.exclude_paths&&H.exclude_paths.length>0)G=G.filter((K)=>!H.exclude_paths?.some((W)=>K.path.includes(W)))}if(G.length===0)return null;let J=[];for(let K of G){let W=await B.getTFIDFDocument(K.path);if(W){let _=await B.getTFIDFTerms(K.path),X=new Map,$=new Map;for(let[E,Y]of Object.entries(_))X.set(E,Y);let Z=W.rawTerms||{};for(let[E,Y]of Object.entries(Z))$.set(E,Y);J.push({uri:`file://${K.path}`,terms:X,rawTerms:$,magnitude:W.magnitude})}}if(J.length===0)return null;let O=await B.getIDFValues(),U=new Map;for(let[K,W]of Object.entries(O))U.set(K,W);return{documents:J,idf:U,totalDocuments:J.length,metadata:{generatedAt:new Date().toISOString(),version:"1.0.0"}}}catch(G){return console.error("[ERROR] Failed to build search index from database:",G),null}}function M(B){let H=Array.from(B.values()).reduce((J,O)=>J+O,0),G=new Map;for(let[J,O]of B.entries())G.set(J,O/H);return G}function z(B,H){let G=new Map;for(let O of B){let U=new Set(O.keys());for(let K of U)G.set(K,(G.get(K)||0)+1)}let J=new Map;for(let[O,U]of G.entries())J.set(O,Math.log(H/U));return J}function V(B,H){let G=new Map;for(let[J,O]of B.entries()){let U=H.get(J)||0;G.set(J,O*U)}return G}function w(B){let H=0;for(let G of B.values())H+=G*G;return Math.sqrt(H)}var Q=null,v=!1;async function D(){if(!Q)Q=new R({modelPath:"./models/starcoder2"});if(!v){let{log:B,error:H}=console;console.log=()=>{},console.error=()=>{};try{await Q.initialize(),v=!0}finally{console.log=B,console.error=H}}return Q}async function S(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase(),K=J.get(U)||0;J.set(U,K+O.score)}return J}async function P(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase();if(!J.has(U)||O.score>0.8)J.set(U,O.text)}return Array.from(J.values())}async function q(B,H){let J=[];for(let K=0;K<B.length;K+=1){let W=B.slice(K,K+1),_=[];for(let X=0;X<W.length;X++){let $=W[X],Z=$.uri.split("/").pop()||$.uri;H?.({current:K+X+1,total:B.length,fileName:Z,status:"processing"});try{let E=await S($.content);_.push({uri:$.uri,terms:E}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"completed"})}catch(E){_.push({uri:$.uri,terms:new Map}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"skipped"})}}J.push(..._)}let O=z(J.map((K)=>K.terms),B.length);return{documents:J.map((K)=>{let W=M(K.terms),_=V(W,O),X=w(_);return{uri:K.uri,terms:_,rawTerms:K.terms,magnitude:X}}),idf:O,totalDocuments:B.length,metadata:{generatedAt:new Date().toISOString(),version:"5.0.0",tokenizer:"AdvancedCodeTokenizer",features:["Industry-leading code understanding","Advanced technical term recognition","Optimized for code search","Simple and effective approach","No unnecessary complexity"]}}}function h(B,H){let G=0;for(let[O,U]of B.entries()){let K=H.terms.get(O)||0;G+=U*K}let J=w(B);if(J===0||H.magnitude===0)return 0;return G/(J*H.magnitude)}async function I(B,H){let G=await P(B),J=new Map;for(let O of G){let U=O.toLowerCase(),K=H.get(U)||0;if(K>0)J.set(U,K)}return J}async function y(B,H,G={}){let{limit:J=10,minScore:O=0,boostFactors:U={}}=G,{exactMatch:K=1.5,phraseMatch:W=2,technicalMatch:_=1.8,identifierMatch:X=1.3}=U,$=await I(B,H.idf),Z=(await P(B)).map((Y)=>Y.toLowerCase());return H.documents.map((Y)=>{let L=h($,Y),C=[];for(let N of Z)if(Y.rawTerms.has(N)){let A=K;if(b(N))A=Math.max(A,_);if(p(N))A=Math.max(A,X);L*=A,C.push(N)}if(C.length===Z.length&&Z.length>1)L*=W;if(Z.length>3&&C.length>=Z.length*0.7)L*=1.2;return{uri:Y.uri,score:L,matchedTerms:C}}).filter((Y)=>Y.score>=O).sort((Y,L)=>L.score-Y.score).slice(0,J)}function b(B){return[/\b[A-Z]{2,}\b/,/\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\b/,/\b[a-z]+[A-Z][a-z]*\b/,/\b\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\b/,/\b(?:get|set|is|has|can|should|will|do)[A-Z]\w*\b/,/\b(?:http|https|json|xml|yaml|sql|api|url|uri)\b/].some((G)=>G.test(B))}function p(B){return/^[a-zA-Z][a-zA-Z0-9_]*$/.test(B)&&B.length>1}function T(B){let H={documents:B.documents.map((G)=>({uri:G.uri,terms:Array.from(G.terms.entries()),rawTerms:Array.from(G.rawTerms.entries()),magnitude:G.magnitude})),idf:Array.from(B.idf.entries()),totalDocuments:B.totalDocuments,metadata:B.metadata};return JSON.stringify(H,null,2)}function f(B){let H=JSON.parse(B);return{documents:H.documents.map((G)=>({uri:G.uri,terms:new Map(G.terms),rawTerms:new Map(G.rawTerms),magnitude:G.magnitude})),idf:new Map(H.idf),totalDocuments:H.totalDocuments,metadata:H.metadata}}export{T as serializeIndex,y as searchDocuments,I as processQuery,f as deserializeIndex,h as calculateCosineSimilarity,k as buildSearchIndexFromDB,q as buildSearchIndex};
2
2
  export{q as c,y as d};
3
3
 
4
4
  //# debugId=A2D767B1A03BB0A664756E2164756E21
@@ -1,4 +1,4 @@
1
1
  var k=(h)=>({_tag:"Success",value:h}),q=(h)=>({_tag:"Failure",error:h});var w=(h,j=(b)=>b)=>{try{return k(h())}catch(b){return q(j(b))}},x=async(h,j=(b)=>b)=>{try{let b=await h();return k(b)}catch(b){return q(j(b))}};
2
- export{k as g,q as h,w as i,x as j};
2
+ export{k as h,q as i,w as j,x as k};
3
3
 
4
- //# debugId=470D6E2D430E28E264756E2164756E21
4
+ //# debugId=89CEB9962555A2F864756E2164756E21
@@ -5,6 +5,6 @@
5
5
  "/**\n * Result type for functional error handling\n * Represents success or failure without exceptions\n *\n * DESIGN RATIONALE:\n * - Explicit error handling at type level\n * - Composable through map/flatMap\n * - Forces caller to handle errors\n * - No hidden control flow (no thrown exceptions)\n */\n\nexport type Result<T, E = Error> = Success<T> | Failure<E>;\n\nexport interface Success<T> {\n readonly _tag: 'Success';\n readonly value: T;\n}\n\nexport interface Failure<E> {\n readonly _tag: 'Failure';\n readonly error: E;\n}\n\n/**\n * Constructors\n */\n\nexport const success = <T>(value: T): Success<T> => ({\n _tag: 'Success',\n value,\n});\n\nexport const failure = <E>(error: E): Failure<E> => ({\n _tag: 'Failure',\n error,\n});\n\n/**\n * Type guards\n */\n\nexport const isSuccess = <T, E>(result: Result<T, E>): result is Success<T> =>\n result._tag === 'Success';\n\nexport const isFailure = <T, E>(result: Result<T, E>): result is Failure<E> =>\n result._tag === 'Failure';\n\n/**\n * Transformations\n */\n\n/**\n * Transform the success value\n * Failure propagates unchanged\n */\nexport const map =\n <T, U, E>(fn: (value: T) => U) =>\n (result: Result<T, E>): Result<U, E> => {\n if (isSuccess(result)) {\n return success(fn(result.value));\n }\n return result;\n };\n\n/**\n * Transform the success value with a function that returns a Result\n * Enables chaining operations that can fail\n * Failure propagates unchanged\n */\nexport const flatMap =\n <T, U, E>(fn: (value: T) => Result<U, E>) =>\n (result: Result<T, E>): Result<U, E> => {\n if (isSuccess(result)) {\n return fn(result.value);\n }\n return result;\n };\n\n/**\n * Transform the error\n * Success propagates unchanged\n */\nexport const mapError =\n <T, E, F>(fn: (error: E) => F) =>\n (result: Result<T, E>): Result<T, F> => {\n if (isFailure(result)) {\n return failure(fn(result.error));\n }\n return result;\n };\n\n/**\n * Extract value or provide default\n */\nexport const getOrElse =\n <T>(defaultValue: T) =>\n <E>(result: Result<T, E>): T => {\n if (isSuccess(result)) {\n return result.value;\n }\n return defaultValue;\n };\n\n/**\n * Extract value or compute default\n */\nexport const getOrElseLazy =\n <T>(fn: () => T) =>\n <E>(result: Result<T, E>): T => {\n if (isSuccess(result)) {\n return result.value;\n }\n return fn();\n };\n\n/**\n * Pattern matching\n */\nexport const match =\n <T, E, U>(onSuccess: (value: T) => U, onFailure: (error: E) => U) =>\n (result: Result<T, E>): U => {\n if (isSuccess(result)) {\n return onSuccess(result.value);\n }\n return onFailure(result.error);\n };\n\n/**\n * Convert thrown exception to Result\n */\nexport const tryCatch = <T, E = Error>(\n fn: () => T,\n onError: (error: unknown) => E = (error: unknown) => error as E\n): Result<T, E> => {\n try {\n return success(fn());\n } catch (error) {\n return failure(onError(error));\n }\n};\n\n/**\n * Convert Promise to Result\n */\nexport const tryCatchAsync = async <T, E = Error>(\n fn: () => Promise<T>,\n onError: (error: unknown) => E = (error: unknown) => error as E\n): Promise<Result<T, E>> => {\n try {\n const value = await fn();\n return success(value);\n } catch (error) {\n return failure(onError(error));\n }\n};\n\n/**\n * Combine multiple Results into a single Result containing an array\n * Fails if any Result is a Failure (short-circuits on first failure)\n */\nexport const all = <T, E>(results: Result<T, E>[]): Result<T[], E> => {\n const values: T[] = [];\n\n for (const result of results) {\n if (isFailure(result)) {\n return result;\n }\n values.push(result.value);\n }\n\n return success(values);\n};\n\n/**\n * Run side effect for success case\n */\nexport const tap =\n <T, E>(fn: (value: T) => void) =>\n (result: Result<T, E>): Result<T, E> => {\n if (isSuccess(result)) {\n fn(result.value);\n }\n return result;\n };\n\n/**\n * Run side effect for failure case\n */\nexport const tapError =\n <T, E>(fn: (error: E) => void) =>\n (result: Result<T, E>): Result<T, E> => {\n if (isFailure(result)) {\n fn(result.error);\n }\n return result;\n };\n\n/**\n * Functional pipe for composing Result transformations\n */\nexport const pipe =\n <T, E>(result: Result<T, E>) =>\n <U>(fn: (result: Result<T, E>) => U): U =>\n fn(result);\n"
6
6
  ],
7
7
  "mappings": "AA2BO,IAAM,EAAU,CAAI,KAA0B,CACnD,KAAM,UACN,OACF,GAEa,EAAU,CAAI,KAA0B,CACnD,KAAM,UACN,OACF,GA+FO,IAAM,EAAW,CACtB,EACA,EAAiC,CAAC,IAAmB,IACpC,CACjB,GAAI,CACF,OAAO,EAAQ,EAAG,CAAC,EACnB,MAAO,EAAO,CACd,OAAO,EAAQ,EAAQ,CAAK,CAAC,IAOpB,EAAgB,MAC3B,EACA,EAAiC,CAAC,IAAmB,IAC3B,CAC1B,GAAI,CACF,IAAM,EAAQ,MAAM,EAAG,EACvB,OAAO,EAAQ,CAAK,EACpB,MAAO,EAAO,CACd,OAAO,EAAQ,EAAQ,CAAK,CAAC",
8
- "debugId": "470D6E2D430E28E264756E2164756E21",
8
+ "debugId": "89CEB9962555A2F864756E2164756E21",
9
9
  "names": []
10
10
  }