@sylphx/flow 0.2.12 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +185 -0
- package/LOOP_MODE.md +446 -0
- package/package.json +44 -93
- package/README.md +0 -625
- package/assets/agents/coder.md +0 -32
- package/assets/agents/orchestrator.md +0 -36
- package/assets/agents/reviewer.md +0 -30
- package/assets/agents/writer.md +0 -30
- package/assets/knowledge/data/sql.md +0 -216
- package/assets/knowledge/guides/saas-template.md +0 -85
- package/assets/knowledge/guides/system-prompt.md +0 -344
- package/assets/knowledge/guides/tech-stack.md +0 -92
- package/assets/knowledge/guides/ui-ux.md +0 -44
- package/assets/knowledge/stacks/nextjs-app.md +0 -165
- package/assets/knowledge/stacks/node-api.md +0 -220
- package/assets/knowledge/stacks/react-app.md +0 -232
- package/assets/knowledge/universal/deployment.md +0 -109
- package/assets/knowledge/universal/performance.md +0 -121
- package/assets/knowledge/universal/security.md +0 -79
- package/assets/knowledge/universal/testing.md +0 -111
- package/assets/output-styles/silent.md +0 -23
- package/assets/rules/core.md +0 -144
- package/assets/slash-commands/commit.md +0 -23
- package/assets/slash-commands/context.md +0 -112
- package/assets/slash-commands/explain.md +0 -35
- package/assets/slash-commands/mep.md +0 -63
- package/assets/slash-commands/review.md +0 -39
- package/assets/slash-commands/test.md +0 -30
- package/dist/assets/agents/coder.md +0 -32
- package/dist/assets/agents/orchestrator.md +0 -36
- package/dist/assets/agents/reviewer.md +0 -30
- package/dist/assets/agents/writer.md +0 -30
- package/dist/assets/knowledge/data/sql.md +0 -216
- package/dist/assets/knowledge/guides/saas-template.md +0 -85
- package/dist/assets/knowledge/guides/system-prompt.md +0 -344
- package/dist/assets/knowledge/guides/tech-stack.md +0 -92
- package/dist/assets/knowledge/guides/ui-ux.md +0 -44
- package/dist/assets/knowledge/stacks/nextjs-app.md +0 -165
- package/dist/assets/knowledge/stacks/node-api.md +0 -220
- package/dist/assets/knowledge/stacks/react-app.md +0 -232
- package/dist/assets/knowledge/universal/deployment.md +0 -109
- package/dist/assets/knowledge/universal/performance.md +0 -121
- package/dist/assets/knowledge/universal/security.md +0 -79
- package/dist/assets/knowledge/universal/testing.md +0 -111
- package/dist/assets/output-styles/silent.md +0 -23
- package/dist/assets/rules/core.md +0 -144
- package/dist/assets/slash-commands/commit.md +0 -23
- package/dist/assets/slash-commands/context.md +0 -112
- package/dist/assets/slash-commands/explain.md +0 -35
- package/dist/assets/slash-commands/mep.md +0 -63
- package/dist/assets/slash-commands/review.md +0 -39
- package/dist/assets/slash-commands/test.md +0 -30
- package/dist/chunk-01gv4qey.js +0 -4
- package/dist/chunk-01gv4qey.js.map +0 -11
- package/dist/chunk-3m9whg4q.js +0 -4
- package/dist/chunk-3m9whg4q.js.map +0 -9
- package/dist/chunk-3w6pd43t.js +0 -25
- package/dist/chunk-3w6pd43t.js.map +0 -61
- package/dist/chunk-4nm4ere4.js +0 -4
- package/dist/chunk-4nm4ere4.js.map +0 -11
- package/dist/chunk-4vrj3f8r.js +0 -26
- package/dist/chunk-4vrj3f8r.js.map +0 -75
- package/dist/chunk-5njgv5k5.js +0 -161
- package/dist/chunk-5njgv5k5.js.map +0 -83
- package/dist/chunk-67n29s4q.js +0 -7
- package/dist/chunk-67n29s4q.js.map +0 -10
- package/dist/chunk-86ce45n6.js +0 -3
- package/dist/chunk-86ce45n6.js.map +0 -10
- package/dist/chunk-99pz5wm0.js +0 -75
- package/dist/chunk-99pz5wm0.js.map +0 -12
- package/dist/chunk-cv1nhr27.js +0 -2
- package/dist/chunk-cv1nhr27.js.map +0 -9
- package/dist/chunk-d409xn8f.js +0 -6
- package/dist/chunk-d409xn8f.js.map +0 -11
- package/dist/chunk-g0qpndpd.js +0 -23
- package/dist/chunk-g0qpndpd.js.map +0 -132
- package/dist/chunk-g4baca7p.js +0 -10
- package/dist/chunk-g4baca7p.js.map +0 -23
- package/dist/chunk-gc66xe7z.js +0 -4
- package/dist/chunk-gc66xe7z.js.map +0 -11
- package/dist/chunk-hj6qtsqp.js +0 -15
- package/dist/chunk-hj6qtsqp.js.map +0 -10
- package/dist/chunk-jbd95k1f.js +0 -14
- package/dist/chunk-jbd95k1f.js.map +0 -20
- package/dist/chunk-kn908zkk.js +0 -4
- package/dist/chunk-kn908zkk.js.map +0 -10
- package/dist/chunk-mw13a082.js +0 -4
- package/dist/chunk-mw13a082.js.map +0 -10
- package/dist/chunk-nke51f3c.js +0 -4
- package/dist/chunk-nke51f3c.js.map +0 -10
- package/dist/chunk-ns5atzyz.js +0 -3
- package/dist/chunk-ns5atzyz.js.map +0 -10
- package/dist/chunk-pp4r3hp4.js +0 -105
- package/dist/chunk-pp4r3hp4.js.map +0 -27
- package/dist/chunk-q4nh3vst.js +0 -54
- package/dist/chunk-q4nh3vst.js.map +0 -53
- package/dist/chunk-q5gqgs0p.js +0 -4
- package/dist/chunk-q5gqgs0p.js.map +0 -10
- package/dist/chunk-s9bsh0gp.js +0 -4
- package/dist/chunk-s9bsh0gp.js.map +0 -10
- package/dist/chunk-ss51dw5h.js +0 -27
- package/dist/chunk-ss51dw5h.js.map +0 -23
- package/dist/chunk-waemzsf4.js +0 -4
- package/dist/chunk-waemzsf4.js.map +0 -10
- package/dist/chunk-xs370t8p.js +0 -119
- package/dist/chunk-xs370t8p.js.map +0 -26
- package/dist/chunk-xtrn4wn0.js +0 -3
- package/dist/chunk-xtrn4wn0.js.map +0 -10
- package/dist/chunk-xvfz960r.js +0 -4
- package/dist/chunk-xvfz960r.js.map +0 -12
- package/dist/chunk-xytc0fks.js +0 -27
- package/dist/chunk-xytc0fks.js.map +0 -14
- package/dist/chunk-yxv7hqse.js +0 -23
- package/dist/chunk-yxv7hqse.js.map +0 -11
- package/dist/chunk-zv5y8yfq.js +0 -19
- package/dist/chunk-zv5y8yfq.js.map +0 -11
- package/dist/index.js +0 -854
- package/dist/index.js.map +0 -903
- package/drizzle/0000_wooden_lady_bullseye.sql +0 -52
- package/drizzle/0001_material_pyro.sql +0 -85
- package/drizzle/0002_lyrical_random.sql +0 -2
- package/drizzle/0003_romantic_lockjaw.sql +0 -4
- package/drizzle/0004_blushing_meteorite.sql +0 -6
- package/drizzle/meta/0000_snapshot.json +0 -310
- package/drizzle/meta/0001_snapshot.json +0 -906
- package/drizzle/meta/0002_snapshot.json +0 -920
- package/drizzle/meta/0003_snapshot.json +0 -920
- package/drizzle/meta/0004_snapshot.json +0 -921
- package/drizzle/meta/_journal.json +0 -41
package/dist/chunk-4nm4ere4.js
DELETED
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
import"./chunk-3m9whg4q.js";import{AutoTokenizer as j}from"@huggingface/transformers";class R{tokenizer;initialized=!1;modelPath;constructor(B={}){this.modelPath=B.modelPath||"./models/starcoder2"}async initialize(){if(this.initialized)return;try{this.tokenizer=await j.from_pretrained(this.modelPath),this.initialized=!0}catch(B){throw Error(`Tokenizer initialization failed: ${B.message}`)}}async tokenize(B){if(!this.initialized)await this.initialize();let H=Date.now();if(!B||B.trim().length===0)return{tokens:[],metadata:{totalTokens:0,vocabSize:49152,processingTime:Date.now()-H,averageConfidence:0},raw:{inputIds:[],decodedText:""}};try{let J=(await this.tokenizer(B)).input_ids.tolist()[0],O=await this.tokenizer.decode(J),U=await this.createDirectTokens(O,J),K=Date.now()-H;return{tokens:U,metadata:{totalTokens:U.length,vocabSize:49152,processingTime:K,averageConfidence:0.95},raw:{inputIds:J,decodedText:O}}}catch(G){throw Error(`Tokenization failed: ${G.message}`)}}async createDirectTokens(B,H){let G=[];for(let J=0;J<H.length;J++){let O=H[J];try{let K=(await this.tokenizer.decode([O],{skip_special_tokens:!0})).trim().toLowerCase();if(K.length>0)G.push({text:K,id:O,score:1,confidence:1,relevance:"high"})}catch(U){}}return G}async getTopTokens(B,H=20){return(await this.tokenize(B)).tokens.slice(0,H)}async getTechnicalTokens(B){return(await this.tokenize(B)).tokens}async decode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return await this.tokenizer.decode(B)}async encode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return(await this.tokenizer(B)).input_ids.tolist()[0]}}async function k(B,H){try{let G=await B.getAllCodebaseFiles();if(H){if(H.file_extensions&&H.file_extensions.length>0)G=G.filter((K)=>H.file_extensions?.some((W)=>K.path.endsWith(W)));if(H.path_filter)G=G.filter((K)=>K.path.includes(H.path_filter));if(H.exclude_paths&&H.exclude_paths.length>0)G=G.filter((K)=>!H.exclude_paths?.some((W)=>K.path.includes(W)))}if(G.length===0)return null;let J=[];for(let K of G){let W=await B.getTFIDFDocument(K.path);if(W){let _=await B.getTFIDFTerms(K.path),X=new Map,$=new Map;for(let[E,Y]of Object.entries(_))X.set(E,Y);let Z=W.rawTerms||{};for(let[E,Y]of Object.entries(Z))$.set(E,Y);J.push({uri:`file://${K.path}`,terms:X,rawTerms:$,magnitude:W.magnitude})}}if(J.length===0)return null;let O=await B.getIDFValues(),U=new Map;for(let[K,W]of Object.entries(O))U.set(K,W);return{documents:J,idf:U,totalDocuments:J.length,metadata:{generatedAt:new Date().toISOString(),version:"1.0.0"}}}catch(G){return console.error("[ERROR] Failed to build search index from database:",G),null}}function M(B){let H=Array.from(B.values()).reduce((J,O)=>J+O,0),G=new Map;for(let[J,O]of B.entries())G.set(J,O/H);return G}function z(B,H){let G=new Map;for(let O of B){let U=new Set(O.keys());for(let K of U)G.set(K,(G.get(K)||0)+1)}let J=new Map;for(let[O,U]of G.entries())J.set(O,Math.log(H/U));return J}function V(B,H){let G=new Map;for(let[J,O]of B.entries()){let U=H.get(J)||0;G.set(J,O*U)}return G}function w(B){let H=0;for(let G of B.values())H+=G*G;return Math.sqrt(H)}var Q=null,v=!1;async function D(){if(!Q)Q=new R({modelPath:"./models/starcoder2"});if(!v){let{log:B,error:H}=console;console.log=()=>{},console.error=()=>{};try{await Q.initialize(),v=!0}finally{console.log=B,console.error=H}}return Q}async function S(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase(),K=J.get(U)||0;J.set(U,K+O.score)}return J}async function P(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase();if(!J.has(U)||O.score>0.8)J.set(U,O.text)}return Array.from(J.values())}async function q(B,H){let J=[];for(let K=0;K<B.length;K+=1){let W=B.slice(K,K+1),_=[];for(let X=0;X<W.length;X++){let $=W[X],Z=$.uri.split("/").pop()||$.uri;H?.({current:K+X+1,total:B.length,fileName:Z,status:"processing"});try{let E=await S($.content);_.push({uri:$.uri,terms:E}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"completed"})}catch(E){_.push({uri:$.uri,terms:new Map}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"skipped"})}}J.push(..._)}let O=z(J.map((K)=>K.terms),B.length);return{documents:J.map((K)=>{let W=M(K.terms),_=V(W,O),X=w(_);return{uri:K.uri,terms:_,rawTerms:K.terms,magnitude:X}}),idf:O,totalDocuments:B.length,metadata:{generatedAt:new Date().toISOString(),version:"5.0.0",tokenizer:"AdvancedCodeTokenizer",features:["Industry-leading code understanding","Advanced technical term recognition","Optimized for code search","Simple and effective approach","No unnecessary complexity"]}}}function h(B,H){let G=0;for(let[O,U]of B.entries()){let K=H.terms.get(O)||0;G+=U*K}let J=w(B);if(J===0||H.magnitude===0)return 0;return G/(J*H.magnitude)}async function I(B,H){let G=await P(B),J=new Map;for(let O of G){let U=O.toLowerCase(),K=H.get(U)||0;if(K>0)J.set(U,K)}return J}async function y(B,H,G={}){let{limit:J=10,minScore:O=0,boostFactors:U={}}=G,{exactMatch:K=1.5,phraseMatch:W=2,technicalMatch:_=1.8,identifierMatch:X=1.3}=U,$=await I(B,H.idf),Z=(await P(B)).map((Y)=>Y.toLowerCase());return H.documents.map((Y)=>{let L=h($,Y),C=[];for(let N of Z)if(Y.rawTerms.has(N)){let A=K;if(b(N))A=Math.max(A,_);if(p(N))A=Math.max(A,X);L*=A,C.push(N)}if(C.length===Z.length&&Z.length>1)L*=W;if(Z.length>3&&C.length>=Z.length*0.7)L*=1.2;return{uri:Y.uri,score:L,matchedTerms:C}}).filter((Y)=>Y.score>=O).sort((Y,L)=>L.score-Y.score).slice(0,J)}function b(B){return[/\b[A-Z]{2,}\b/,/\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\b/,/\b[a-z]+[A-Z][a-z]*\b/,/\b\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\b/,/\b(?:get|set|is|has|can|should|will|do)[A-Z]\w*\b/,/\b(?:http|https|json|xml|yaml|sql|api|url|uri)\b/].some((G)=>G.test(B))}function p(B){return/^[a-zA-Z][a-zA-Z0-9_]*$/.test(B)&&B.length>1}function T(B){let H={documents:B.documents.map((G)=>({uri:G.uri,terms:Array.from(G.terms.entries()),rawTerms:Array.from(G.rawTerms.entries()),magnitude:G.magnitude})),idf:Array.from(B.idf.entries()),totalDocuments:B.totalDocuments,metadata:B.metadata};return JSON.stringify(H,null,2)}function f(B){let H=JSON.parse(B);return{documents:H.documents.map((G)=>({uri:G.uri,terms:new Map(G.terms),rawTerms:new Map(G.rawTerms),magnitude:G.magnitude})),idf:new Map(H.idf),totalDocuments:H.totalDocuments,metadata:H.metadata}}export{T as serializeIndex,y as searchDocuments,I as processQuery,f as deserializeIndex,h as calculateCosineSimilarity,k as buildSearchIndexFromDB,q as buildSearchIndex};
|
|
2
|
-
export{q as d,y as e};
|
|
3
|
-
|
|
4
|
-
//# debugId=F6CC4F566F5A987064756E2164756E21
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../src/utils/advanced-tokenizer.ts", "../src/services/search/tfidf.ts"],
|
|
4
|
-
"sourcesContent": [
|
|
5
|
-
"/**\n * Advanced Code Tokenizer - 純粹 StarCoder2,無任何多餘處理\n * 完全信任 StarCoder2 嘅世界級 tokenization 能力\n */\n\nimport { AutoTokenizer } from '@huggingface/transformers';\n\nexport interface AdvancedToken {\n text: string;\n id: number;\n score: number;\n confidence: number;\n relevance: 'high' | 'medium' | 'low';\n}\n\nexport interface AdvancedTokenizerResult {\n tokens: AdvancedToken[];\n metadata: {\n totalTokens: number;\n vocabSize: number;\n processingTime: number;\n averageConfidence: number;\n };\n raw: {\n inputIds: number[];\n decodedText: string;\n };\n}\n\n/**\n * Advanced Code Tokenizer - 純粹 StarCoder2\n */\nexport class AdvancedCodeTokenizer {\n private tokenizer: any;\n private initialized = false;\n private modelPath: string;\n\n constructor(\n options: {\n modelPath?: string;\n } = {}\n ) {\n this.modelPath = options.modelPath || './models/starcoder2';\n }\n\n /**\n * 初始化 tokenizer\n */\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n try {\n this.tokenizer = await AutoTokenizer.from_pretrained(this.modelPath);\n this.initialized = true;\n } catch (error) {\n throw new Error(`Tokenizer initialization failed: ${error.message}`);\n }\n }\n\n /**\n * 純粹 StarCoder2 tokenization - 無任何安全限制或多餘分析\n */\n async tokenize(content: string): Promise<AdvancedTokenizerResult> {\n if (!this.initialized) {\n await this.initialize();\n }\n\n const startTime = Date.now();\n\n // Handle empty content\n if (!content || content.trim().length === 0) {\n return {\n tokens: [],\n metadata: {\n totalTokens: 0,\n vocabSize: 49152,\n processingTime: Date.now() - startTime,\n averageConfidence: 0,\n },\n raw: {\n inputIds: [],\n decodedText: '',\n },\n };\n }\n\n try {\n // 完全信任 StarCoder2,直接處理所有內容\n const encoded = await this.tokenizer(content);\n const inputIds = encoded.input_ids.tolist()[0];\n\n // 解碼獲得原文\n const decodedText = await this.tokenizer.decode(inputIds);\n\n // 直接用 StarCoder2 嘅輸出,唔做多餘分析\n const tokens = await this.createDirectTokens(decodedText, inputIds);\n\n const processingTime = Date.now() - startTime;\n\n return {\n tokens,\n metadata: {\n totalTokens: tokens.length,\n vocabSize: 49152,\n processingTime,\n averageConfidence: 0.95, // StarCoder2 本身就係高質量\n },\n raw: {\n inputIds,\n decodedText,\n },\n };\n } catch (error) {\n throw new Error(`Tokenization failed: ${error.message}`);\n }\n }\n\n /**\n * 純粹 StarCoder2 tokenization - 完全信任,無任何額外處理\n * 直接使用 StarCoder2 嘅 token IDs,逐個解碼成文字\n */\n private async createDirectTokens(\n _decodedText: string,\n inputIds: number[]\n ): Promise<AdvancedToken[]> {\n const tokens: AdvancedToken[] = [];\n\n // 完全信任 StarCoder2,直接使用佢嘅 tokenization\n // 逐個 token ID 解碼,得到 StarCoder2 認為嘅最佳分割\n for (let i = 0; i < inputIds.length; i++) {\n const tokenId = inputIds[i];\n try {\n // 直接使用 StarCoder2 嘅解碼結果\n const tokenText = await this.tokenizer.decode([tokenId], { skip_special_tokens: true });\n const cleaned = tokenText.trim().toLowerCase();\n\n // 只過濾空白 token,其他全部保留(完全信任 StarCoder2)\n if (cleaned.length > 0) {\n tokens.push({\n text: cleaned,\n id: tokenId,\n score: 1.0, // StarCoder2 嘅所有 token 都係高質量\n confidence: 1.0, // 完全信任 StarCoder2\n relevance: 'high' as const,\n });\n }\n } catch (_error) {}\n }\n\n return tokens;\n }\n\n /**\n * 便利方法:只返回高質量 tokens\n */\n async getTopTokens(content: string, limit = 20): Promise<AdvancedToken[]> {\n const result = await this.tokenize(content);\n return result.tokens.slice(0, limit);\n }\n\n /**\n * 便利方法:返回所有 tokens (StarCoder2 嘅輸出全部都係高質量)\n */\n async getTechnicalTokens(content: string): Promise<AdvancedToken[]> {\n const result = await this.tokenize(content);\n return result.tokens; // StarCoder2 嘅所有輸出都係技術相關\n }\n\n /**\n * 解碼 token IDs 回文本\n */\n async decode(tokenIds: number[]): Promise<string> {\n if (!this.initialized) {\n throw new Error('Tokenizer not initialized. Call initialize() first.');\n }\n return await this.tokenizer.decode(tokenIds);\n }\n\n /**\n * 編碼文本為 token IDs\n */\n async encode(text: string): Promise<number[]> {\n if (!this.initialized) {\n throw new Error('Tokenizer not initialized. Call initialize() first.');\n }\n const result = await this.tokenizer(text);\n return result.input_ids.tolist()[0];\n }\n}\n",
|
|
6
|
-
"/**\n * TF-IDF (Term Frequency-Inverse Document Frequency) implementation\n * Used for ranking document relevance in semantic search\n */\n\nimport { AdvancedCodeTokenizer } from '../../utils/advanced-tokenizer.js';\nimport type { SeparatedMemoryStorage } from './separated-storage.js';\n\nexport interface DocumentVector {\n uri: string;\n terms: Map<string, number>; // term → TF-IDF score\n rawTerms: Map<string, number>; // term → raw frequency\n magnitude: number; // Vector magnitude for cosine similarity\n}\n\nexport interface SearchIndex {\n documents: DocumentVector[];\n idf: Map<string, number>; // term → IDF score\n totalDocuments: number;\n metadata: {\n generatedAt: string;\n version: string;\n };\n}\n\n/**\n * Build search index from database (shared between CLI and MCP)\n */\nexport async function buildSearchIndexFromDB(\n memoryStorage: SeparatedMemoryStorage,\n filters?: {\n file_extensions?: string[];\n path_filter?: string;\n exclude_paths?: string[];\n }\n): Promise<SearchIndex | null> {\n try {\n // Get all files from database\n let files = await memoryStorage.getAllCodebaseFiles();\n\n // Apply filters\n if (filters) {\n if (filters.file_extensions && filters.file_extensions.length > 0) {\n files = files.filter((file) =>\n filters.file_extensions?.some((ext: string) => file.path.endsWith(ext))\n );\n }\n\n if (filters.path_filter) {\n files = files.filter((file) => file.path.includes(filters.path_filter!));\n }\n\n if (filters.exclude_paths && filters.exclude_paths.length > 0) {\n files = files.filter(\n (file) => !filters.exclude_paths?.some((exclude: string) => file.path.includes(exclude))\n );\n }\n }\n\n if (files.length === 0) {\n return null;\n }\n\n // Build search documents - read TF-IDF terms directly from database\n const documents = [];\n for (const file of files) {\n const tfidfDoc = await memoryStorage.getTFIDFDocument(file.path);\n if (tfidfDoc) {\n // Get TF-IDF terms from database (already calculated)\n const tfidfTerms = await memoryStorage.getTFIDFTerms(file.path);\n const terms = new Map<string, number>();\n const rawTermsMap = new Map<string, number>();\n\n // Use TF-IDF terms for search scoring\n for (const [term, tfidfScore] of Object.entries(tfidfTerms)) {\n terms.set(term, tfidfScore as number);\n }\n\n // Use rawTerms for reference\n const rawTerms = tfidfDoc.rawTerms || {};\n for (const [term, freq] of Object.entries(rawTerms)) {\n rawTermsMap.set(term, freq as number);\n }\n\n documents.push({\n uri: `file://${file.path}`,\n terms,\n rawTerms: rawTermsMap,\n magnitude: tfidfDoc.magnitude,\n });\n }\n }\n\n if (documents.length === 0) {\n return null;\n }\n\n // Get IDF values from database\n const idfRecords = await memoryStorage.getIDFValues();\n const idf = new Map<string, number>();\n for (const [term, value] of Object.entries(idfRecords)) {\n idf.set(term, value as number);\n }\n\n return {\n documents,\n idf,\n totalDocuments: documents.length,\n metadata: {\n generatedAt: new Date().toISOString(),\n version: '1.0.0',\n },\n };\n } catch (error) {\n console.error('[ERROR] Failed to build search index from database:', error);\n return null;\n }\n}\n\n/**\n * Calculate Term Frequency (TF)\n * TF = (number of times term appears in document) / (total terms in document)\n */\nfunction calculateTF(termFrequency: Map<string, number>): Map<string, number> {\n const totalTerms = Array.from(termFrequency.values()).reduce((sum, freq) => sum + freq, 0);\n const tf = new Map<string, number>();\n\n for (const [term, freq] of termFrequency.entries()) {\n tf.set(term, freq / totalTerms);\n }\n\n return tf;\n}\n\n/**\n * Calculate Inverse Document Frequency (IDF)\n * IDF = log(total documents / documents containing term)\n */\nfunction calculateIDF(\n documents: Map<string, number>[],\n totalDocuments: number\n): Map<string, number> {\n const documentFrequency = new Map<string, number>();\n\n // Count how many documents contain each term\n for (const doc of documents) {\n const uniqueTerms = new Set(doc.keys());\n for (const term of uniqueTerms) {\n documentFrequency.set(term, (documentFrequency.get(term) || 0) + 1);\n }\n }\n\n // Calculate IDF for each term\n const idf = new Map<string, number>();\n for (const [term, docFreq] of documentFrequency.entries()) {\n idf.set(term, Math.log(totalDocuments / docFreq));\n }\n\n return idf;\n}\n\n/**\n * Calculate TF-IDF scores for a document\n */\nfunction calculateTFIDF(tf: Map<string, number>, idf: Map<string, number>): Map<string, number> {\n const tfidf = new Map<string, number>();\n\n for (const [term, tfScore] of tf.entries()) {\n const idfScore = idf.get(term) || 0;\n tfidf.set(term, tfScore * idfScore);\n }\n\n return tfidf;\n}\n\n/**\n * Calculate vector magnitude for cosine similarity\n */\nfunction calculateMagnitude(vector: Map<string, number>): number {\n let sum = 0;\n for (const value of vector.values()) {\n sum += value * value;\n }\n return Math.sqrt(sum);\n}\n\n// Global tokenizer instance for performance\nlet globalTokenizer: AdvancedCodeTokenizer | null = null;\nlet tokenizerInitialized = false;\n\n/**\n * Get or create the global tokenizer\n */\nasync function getTokenizer(): Promise<AdvancedCodeTokenizer> {\n if (!globalTokenizer) {\n globalTokenizer = new AdvancedCodeTokenizer({\n modelPath: './models/starcoder2',\n });\n }\n\n if (!tokenizerInitialized) {\n // Silently initialize - no console output\n const originalLog = console.log;\n const originalError = console.error;\n console.log = () => {}; // Temporarily silence console.log\n console.error = () => {}; // Temporarily silence console.error\n try {\n await globalTokenizer.initialize();\n tokenizerInitialized = true;\n } finally {\n console.log = originalLog; // Restore console.log\n console.error = originalError; // Restore console.error\n }\n }\n\n return globalTokenizer;\n}\n\n/**\n * Extract terms using our advanced tokenizer\n */\nasync function extractTerms(content: string): Promise<Map<string, number>> {\n const tokenizer = await getTokenizer();\n const result = await tokenizer.tokenize(content);\n const terms = new Map<string, number>();\n\n // Use token scores as TF weights\n for (const token of result.tokens) {\n const term = token.text.toLowerCase();\n const currentScore = terms.get(term) || 0;\n terms.set(term, currentScore + token.score);\n }\n\n return terms;\n}\n\n/**\n * Extract simple tokens for query processing\n */\nasync function extractQueryTokens(query: string): Promise<string[]> {\n const tokenizer = await getTokenizer();\n const result = await tokenizer.tokenize(query);\n\n // Return unique tokens, sorted by score (highest first)\n const uniqueTokens = new Map<string, string>();\n for (const token of result.tokens) {\n const lowerText = token.text.toLowerCase();\n if (!uniqueTokens.has(lowerText) || token.score > 0.8) {\n uniqueTokens.set(lowerText, token.text);\n }\n }\n\n return Array.from(uniqueTokens.values());\n}\n\nexport interface BuildIndexProgress {\n current: number;\n total: number;\n fileName: string;\n status: 'processing' | 'completed' | 'skipped';\n}\n\n/**\n * Build TF-IDF search index from documents using our advanced tokenizer\n */\nexport async function buildSearchIndex(\n documents: Array<{ uri: string; content: string }>,\n onProgress?: (progress: BuildIndexProgress) => void\n): Promise<SearchIndex> {\n // Process documents one by one to avoid hanging\n const batchSize = 1; // Process 1 document at a time to avoid hanging\n const documentTerms: Array<{ uri: string; terms: Map<string, number> }> = [];\n\n for (let i = 0; i < documents.length; i += batchSize) {\n const batch = documents.slice(i, i + batchSize);\n\n // Process sequentially to avoid hanging\n const batchResults = [];\n for (let j = 0; j < batch.length; j++) {\n const doc = batch[j];\n const fileName = doc.uri.split('/').pop() || doc.uri;\n\n // Report progress\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'processing',\n });\n\n try {\n const result = await extractTerms(doc.content);\n\n batchResults.push({\n uri: doc.uri,\n terms: result,\n });\n\n // Report completion\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'completed',\n });\n } catch (_error) {\n batchResults.push({\n uri: doc.uri,\n terms: new Map<string, number>(),\n });\n\n // Report skip\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'skipped',\n });\n }\n }\n\n documentTerms.push(...batchResults);\n }\n\n // Calculate IDF scores\n const idf = calculateIDF(\n documentTerms.map((d) => d.terms),\n documents.length\n );\n\n // Calculate TF-IDF for each document\n const documentVectors: DocumentVector[] = documentTerms.map((doc) => {\n const tf = calculateTF(doc.terms);\n const tfidf = calculateTFIDF(tf, idf);\n const magnitude = calculateMagnitude(tfidf);\n\n return {\n uri: doc.uri,\n terms: tfidf,\n rawTerms: doc.terms,\n magnitude,\n };\n });\n\n return {\n documents: documentVectors,\n idf,\n totalDocuments: documents.length,\n metadata: {\n generatedAt: new Date().toISOString(),\n version: '5.0.0',\n tokenizer: 'AdvancedCodeTokenizer',\n features: [\n 'Industry-leading code understanding',\n 'Advanced technical term recognition',\n 'Optimized for code search',\n 'Simple and effective approach',\n 'No unnecessary complexity',\n ],\n },\n };\n}\n\n/**\n * Calculate cosine similarity between query and document\n */\nexport function calculateCosineSimilarity(\n queryVector: Map<string, number>,\n docVector: DocumentVector\n): number {\n let dotProduct = 0;\n\n // Calculate dot product\n for (const [term, queryScore] of queryVector.entries()) {\n const docScore = docVector.terms.get(term) || 0;\n dotProduct += queryScore * docScore;\n }\n\n // Calculate query magnitude\n const queryMagnitude = calculateMagnitude(queryVector);\n\n if (queryMagnitude === 0 || docVector.magnitude === 0) {\n return 0;\n }\n\n return dotProduct / (queryMagnitude * docVector.magnitude);\n}\n\n/**\n * Process query into TF-IDF vector using database values\n */\nexport async function processQuery(\n query: string,\n idf: Map<string, number>\n): Promise<Map<string, number>> {\n const terms = await extractQueryTokens(query);\n const queryVector = new Map<string, number>();\n\n // 為每個查詢詞使用 IDF 值(查詢本身無 TF-IDF,直接用 IDF)\n for (const term of terms) {\n const lowerTerm = term.toLowerCase();\n const idfValue = idf.get(lowerTerm) || 0;\n\n // 純粹用 IDF 值,完全信任 StarCoder2 嘅 tokenization\n if (idfValue > 0) {\n queryVector.set(lowerTerm, idfValue);\n }\n }\n\n return queryVector;\n}\n\n/**\n * Search documents using TF-IDF and cosine similarity with Advanced Code Tokenizer\n */\nexport async function searchDocuments(\n query: string,\n index: SearchIndex,\n options: {\n limit?: number;\n minScore?: number;\n boostFactors?: {\n exactMatch?: number; // Boost for exact term matches\n phraseMatch?: number; // Boost for phrase matches\n technicalMatch?: number; // Boost for technical term matches\n identifierMatch?: number; // Boost for identifier matches\n };\n } = {}\n): Promise<Array<{ uri: string; score: number; matchedTerms: string[] }>> {\n const { limit = 10, minScore = 0, boostFactors = {} } = options;\n const {\n exactMatch = 1.5,\n phraseMatch = 2.0,\n technicalMatch = 1.8,\n identifierMatch = 1.3,\n } = boostFactors;\n\n // Process query using Advanced Code Tokenizer\n const queryVector = await processQuery(query, index.idf);\n const queryTokens = (await extractQueryTokens(query)).map((t) => t.toLowerCase());\n\n // Calculate similarity for each document\n const results = index.documents.map((doc) => {\n let score = calculateCosineSimilarity(queryVector, doc);\n\n // Boost for exact term matches with enhanced scoring\n const matchedTerms: string[] = [];\n for (const token of queryTokens) {\n if (doc.rawTerms.has(token)) {\n // Apply different boost factors based on term characteristics\n let boostFactor = exactMatch;\n\n // Additional boost for technical terms\n if (isTechnicalTerm(token)) {\n boostFactor = Math.max(boostFactor, technicalMatch);\n }\n\n // Additional boost for identifiers\n if (isIdentifier(token)) {\n boostFactor = Math.max(boostFactor, identifierMatch);\n }\n\n score *= boostFactor;\n matchedTerms.push(token);\n }\n }\n\n // Enhanced phrase match detection (all query terms appear in document)\n if (matchedTerms.length === queryTokens.length && queryTokens.length > 1) {\n score *= phraseMatch;\n }\n\n // Contextual relevance boost for longer queries\n if (queryTokens.length > 3 && matchedTerms.length >= queryTokens.length * 0.7) {\n score *= 1.2; // Boost for partial matches on complex queries\n }\n\n return {\n uri: doc.uri,\n score,\n matchedTerms,\n };\n });\n\n // Filter and sort\n return results\n .filter((result) => result.score >= minScore)\n .sort((a, b) => b.score - a.score)\n .slice(0, limit);\n}\n\n/**\n * Check if a term is likely a technical term\n */\nfunction isTechnicalTerm(term: string): boolean {\n const technicalPatterns = [\n /\\b[A-Z]{2,}\\b/, // Acronyms like HTTP, API, JSON\n /\\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\\b/, // PascalCase like ComponentName\n /\\b[a-z]+[A-Z][a-z]*\\b/, // camelCase like functionName\n /\\b\\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\\b/, // Common suffixes\n /\\b(?:get|set|is|has|can|should|will|do)[A-Z]\\w*\\b/, // Common prefixes\n /\\b(?:http|https|json|xml|yaml|sql|api|url|uri)\\b/, // Technical keywords\n ];\n\n return technicalPatterns.some((pattern) => pattern.test(term));\n}\n\n/**\n * Check if a term is likely an identifier\n */\nfunction isIdentifier(term: string): boolean {\n // Identifiers typically contain letters and numbers, maybe underscores\n return /^[a-zA-Z][a-zA-Z0-9_]*$/.test(term) && term.length > 1;\n}\n\n/**\n * Serialize search index to JSON\n */\nexport function serializeIndex(index: SearchIndex): string {\n const serializable = {\n documents: index.documents.map((doc) => ({\n uri: doc.uri,\n terms: Array.from(doc.terms.entries()),\n rawTerms: Array.from(doc.rawTerms.entries()),\n magnitude: doc.magnitude,\n })),\n idf: Array.from(index.idf.entries()),\n totalDocuments: index.totalDocuments,\n metadata: index.metadata,\n };\n\n return JSON.stringify(serializable, null, 2);\n}\n\n/**\n * Deserialize search index from JSON\n */\nexport function deserializeIndex(json: string): SearchIndex {\n const data = JSON.parse(json);\n\n return {\n documents: data.documents.map(\n (doc: {\n uri: string;\n terms: [string, number][];\n rawTerms: [string, number][];\n magnitude: number;\n }) => ({\n uri: doc.uri,\n terms: new Map(doc.terms),\n rawTerms: new Map(doc.rawTerms),\n magnitude: doc.magnitude,\n })\n ),\n idf: new Map(data.idf),\n totalDocuments: data.totalDocuments,\n metadata: data.metadata,\n };\n}\n"
|
|
7
|
-
],
|
|
8
|
-
"mappings": "4BAKA,wBAAS,kCA2BF,MAAM,CAAsB,CACzB,UACA,YAAc,GACd,UAER,WAAW,CACT,EAEI,CAAC,EACL,CACA,KAAK,UAAY,EAAQ,WAAa,2BAMlC,WAAU,EAAkB,CAChC,GAAI,KAAK,YACP,OAGF,GAAI,CACF,KAAK,UAAY,MAAM,EAAc,gBAAgB,KAAK,SAAS,EACnE,KAAK,YAAc,GACnB,MAAO,EAAO,CACd,MAAU,MAAM,oCAAoC,EAAM,SAAS,QAOjE,SAAQ,CAAC,EAAmD,CAChE,GAAI,CAAC,KAAK,YACR,MAAM,KAAK,WAAW,EAGxB,IAAM,EAAY,KAAK,IAAI,EAG3B,GAAI,CAAC,GAAW,EAAQ,KAAK,EAAE,SAAW,EACxC,MAAO,CACL,OAAQ,CAAC,EACT,SAAU,CACR,YAAa,EACb,UAAW,MACX,eAAgB,KAAK,IAAI,EAAI,EAC7B,kBAAmB,CACrB,EACA,IAAK,CACH,SAAU,CAAC,EACX,YAAa,EACf,CACF,EAGF,GAAI,CAGF,IAAM,GADU,MAAM,KAAK,UAAU,CAAO,GACnB,UAAU,OAAO,EAAE,GAGtC,EAAc,MAAM,KAAK,UAAU,OAAO,CAAQ,EAGlD,EAAS,MAAM,KAAK,mBAAmB,EAAa,CAAQ,EAE5D,EAAiB,KAAK,IAAI,EAAI,EAEpC,MAAO,CACL,SACA,SAAU,CACR,YAAa,EAAO,OACpB,UAAW,MACX,iBACA,kBAAmB,IACrB,EACA,IAAK,CACH,WACA,aACF,CACF,EACA,MAAO,EAAO,CACd,MAAU,MAAM,wBAAwB,EAAM,SAAS,QAQ7C,mBAAkB,CAC9B,EACA,EAC0B,CAC1B,IAAM,EAA0B,CAAC,EAIjC,QAAS,EAAI,EAAG,EAAI,EAAS,OAAQ,IAAK,CACxC,IAAM,EAAU,EAAS,GACzB,GAAI,CAGF,IAAM,GADY,MAAM,KAAK,UAAU,OAAO,CAAC,CAAO,EAAG,CAAE,oBAAqB,EAAK,CAAC,GAC5D,KAAK,EAAE,YAAY,EAG7C,GAAI,EAAQ,OAAS,EACnB,EAAO,KAAK,CACV,KAAM,EACN,GAAI,EACJ,MAAO,EACP,WAAY,EACZ,UAAW,MACb,CAAC,EAEH,MAAO,EAAQ,GAGnB,OAAO,OAMH,aAAY,CAAC,EAAiB,EAAQ,GAA8B,CAExE,OADe,MAAM,KAAK,SAAS,CAAO,GAC5B,OAAO,MAAM,EAAG,CAAK,OAM/B,mBAAkB,CAAC,EAA2C,CAElE,OADe,MAAM,KAAK,SAAS,CAAO,GAC5B,YAMV,OAAM,CAAC,EAAqC,CAChD,GAAI,CAAC,KAAK,YACR,MAAU,MAAM,qDAAqD,EAEvE,OAAO,MAAM,KAAK,UAAU,OAAO,CAAQ,OAMvC,OAAM,CAAC,EAAiC,CAC5C,GAAI,CAAC,KAAK,YACR,MAAU,MAAM,qDAAqD,EAGvE,OADe,MAAM,KAAK,UAAU,CAAI,GAC1B,UAAU,OAAO,EAAE,GAErC,CClKA,eAAsB,CAAsB,CAC1C,EACA,EAK6B,CAC7B,GAAI,CAEF,IAAI,EAAQ,MAAM,EAAc,oBAAoB,EAGpD,GAAI,EAAS,CACX,GAAI,EAAQ,iBAAmB,EAAQ,gBAAgB,OAAS,EAC9D,EAAQ,EAAM,OAAO,CAAC,IACpB,EAAQ,iBAAiB,KAAK,CAAC,IAAgB,EAAK,KAAK,SAAS,CAAG,CAAC,CACxE,EAGF,GAAI,EAAQ,YACV,EAAQ,EAAM,OAAO,CAAC,IAAS,EAAK,KAAK,SAAS,EAAQ,WAAY,CAAC,EAGzE,GAAI,EAAQ,eAAiB,EAAQ,cAAc,OAAS,EAC1D,EAAQ,EAAM,OACZ,CAAC,IAAS,CAAC,EAAQ,eAAe,KAAK,CAAC,IAAoB,EAAK,KAAK,SAAS,CAAO,CAAC,CACzF,EAIJ,GAAI,EAAM,SAAW,EACnB,OAAO,KAIT,IAAM,EAAY,CAAC,EACnB,QAAW,KAAQ,EAAO,CACxB,IAAM,EAAW,MAAM,EAAc,iBAAiB,EAAK,IAAI,EAC/D,GAAI,EAAU,CAEZ,IAAM,EAAa,MAAM,EAAc,cAAc,EAAK,IAAI,EACxD,EAAQ,IAAI,IACZ,EAAc,IAAI,IAGxB,QAAY,EAAM,KAAe,OAAO,QAAQ,CAAU,EACxD,EAAM,IAAI,EAAM,CAAoB,EAItC,IAAM,EAAW,EAAS,UAAY,CAAC,EACvC,QAAY,EAAM,KAAS,OAAO,QAAQ,CAAQ,EAChD,EAAY,IAAI,EAAM,CAAc,EAGtC,EAAU,KAAK,CACb,IAAK,UAAU,EAAK,OACpB,QACA,SAAU,EACV,UAAW,EAAS,SACtB,CAAC,GAIL,GAAI,EAAU,SAAW,EACvB,OAAO,KAIT,IAAM,EAAa,MAAM,EAAc,aAAa,EAC9C,EAAM,IAAI,IAChB,QAAY,EAAM,KAAU,OAAO,QAAQ,CAAU,EACnD,EAAI,IAAI,EAAM,CAAe,EAG/B,MAAO,CACL,YACA,MACA,eAAgB,EAAU,OAC1B,SAAU,CACR,YAAa,IAAI,KAAK,EAAE,YAAY,EACpC,QAAS,OACX,CACF,EACA,MAAO,EAAO,CAEd,OADA,QAAQ,MAAM,sDAAuD,CAAK,EACnE,MAQX,SAAS,CAAW,CAAC,EAAyD,CAC5E,IAAM,EAAa,MAAM,KAAK,EAAc,OAAO,CAAC,EAAE,OAAO,CAAC,EAAK,IAAS,EAAM,EAAM,CAAC,EACnF,EAAK,IAAI,IAEf,QAAY,EAAM,KAAS,EAAc,QAAQ,EAC/C,EAAG,IAAI,EAAM,EAAO,CAAU,EAGhC,OAAO,EAOT,SAAS,CAAY,CACnB,EACA,EACqB,CACrB,IAAM,EAAoB,IAAI,IAG9B,QAAW,KAAO,EAAW,CAC3B,IAAM,EAAc,IAAI,IAAI,EAAI,KAAK,CAAC,EACtC,QAAW,KAAQ,EACjB,EAAkB,IAAI,GAAO,EAAkB,IAAI,CAAI,GAAK,GAAK,CAAC,EAKtE,IAAM,EAAM,IAAI,IAChB,QAAY,EAAM,KAAY,EAAkB,QAAQ,EACtD,EAAI,IAAI,EAAM,KAAK,IAAI,EAAiB,CAAO,CAAC,EAGlD,OAAO,EAMT,SAAS,CAAc,CAAC,EAAyB,EAA+C,CAC9F,IAAM,EAAQ,IAAI,IAElB,QAAY,EAAM,KAAY,EAAG,QAAQ,EAAG,CAC1C,IAAM,EAAW,EAAI,IAAI,CAAI,GAAK,EAClC,EAAM,IAAI,EAAM,EAAU,CAAQ,EAGpC,OAAO,EAMT,SAAS,CAAkB,CAAC,EAAqC,CAC/D,IAAI,EAAM,EACV,QAAW,KAAS,EAAO,OAAO,EAChC,GAAO,EAAQ,EAEjB,OAAO,KAAK,KAAK,CAAG,EAItB,IAAI,EAAgD,KAChD,EAAuB,GAK3B,eAAe,CAAY,EAAmC,CAC5D,GAAI,CAAC,EACH,EAAkB,IAAI,EAAsB,CAC1C,UAAW,qBACb,CAAC,EAGH,GAAI,CAAC,EAAsB,CAEzB,IAA4B,IAAtB,EACwB,MAAxB,GAAgB,QACtB,QAAQ,IAAM,IAAM,GACpB,QAAQ,MAAQ,IAAM,GACtB,GAAI,CACF,MAAM,EAAgB,WAAW,EACjC,EAAuB,UACvB,CACA,QAAQ,IAAM,EACd,QAAQ,MAAQ,GAIpB,OAAO,EAMT,eAAe,CAAY,CAAC,EAA+C,CAEzE,IAAM,EAAS,MADG,MAAM,EAAa,GACN,SAAS,CAAO,EACzC,EAAQ,IAAI,IAGlB,QAAW,KAAS,EAAO,OAAQ,CACjC,IAAM,EAAO,EAAM,KAAK,YAAY,EAC9B,EAAe,EAAM,IAAI,CAAI,GAAK,EACxC,EAAM,IAAI,EAAM,EAAe,EAAM,KAAK,EAG5C,OAAO,EAMT,eAAe,CAAkB,CAAC,EAAkC,CAElE,IAAM,EAAS,MADG,MAAM,EAAa,GACN,SAAS,CAAK,EAGvC,EAAe,IAAI,IACzB,QAAW,KAAS,EAAO,OAAQ,CACjC,IAAM,EAAY,EAAM,KAAK,YAAY,EACzC,GAAI,CAAC,EAAa,IAAI,CAAS,GAAK,EAAM,MAAQ,IAChD,EAAa,IAAI,EAAW,EAAM,IAAI,EAI1C,OAAO,MAAM,KAAK,EAAa,OAAO,CAAC,EAazC,eAAsB,CAAgB,CACpC,EACA,EACsB,CAGtB,IAAM,EAAoE,CAAC,EAE3E,QAAS,EAAI,EAAG,EAAI,EAAU,OAAQ,GAHpB,EAGoC,CACpD,IAAM,EAAQ,EAAU,MAAM,EAAG,EAJjB,CAI8B,EAGxC,EAAe,CAAC,EACtB,QAAS,EAAI,EAAG,EAAI,EAAM,OAAQ,IAAK,CACrC,IAAM,EAAM,EAAM,GACZ,EAAW,EAAI,IAAI,MAAM,GAAG,EAAE,IAAI,GAAK,EAAI,IAGjD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,YACV,CAAC,EAED,GAAI,CACF,IAAM,EAAS,MAAM,EAAa,EAAI,OAAO,EAE7C,EAAa,KAAK,CAChB,IAAK,EAAI,IACT,MAAO,CACT,CAAC,EAGD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,WACV,CAAC,EACD,MAAO,EAAQ,CACf,EAAa,KAAK,CAChB,IAAK,EAAI,IACT,MAAO,IAAI,GACb,CAAC,EAGD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,SACV,CAAC,GAIL,EAAc,KAAK,GAAG,CAAY,EAIpC,IAAM,EAAM,EACV,EAAc,IAAI,CAAC,IAAM,EAAE,KAAK,EAChC,EAAU,MACZ,EAgBA,MAAO,CACL,UAdwC,EAAc,IAAI,CAAC,IAAQ,CACnE,IAAM,EAAK,EAAY,EAAI,KAAK,EAC1B,EAAQ,EAAe,EAAI,CAAG,EAC9B,EAAY,EAAmB,CAAK,EAE1C,MAAO,CACL,IAAK,EAAI,IACT,MAAO,EACP,SAAU,EAAI,MACd,WACF,EACD,EAIC,MACA,eAAgB,EAAU,OAC1B,SAAU,CACR,YAAa,IAAI,KAAK,EAAE,YAAY,EACpC,QAAS,QACT,UAAW,wBACX,SAAU,CACR,sCACA,sCACA,4BACA,gCACA,2BACF,CACF,CACF,EAMK,SAAS,CAAyB,CACvC,EACA,EACQ,CACR,IAAI,EAAa,EAGjB,QAAY,EAAM,KAAe,EAAY,QAAQ,EAAG,CACtD,IAAM,EAAW,EAAU,MAAM,IAAI,CAAI,GAAK,EAC9C,GAAc,EAAa,EAI7B,IAAM,EAAiB,EAAmB,CAAW,EAErD,GAAI,IAAmB,GAAK,EAAU,YAAc,EAClD,MAAO,GAGT,OAAO,GAAc,EAAiB,EAAU,WAMlD,eAAsB,CAAY,CAChC,EACA,EAC8B,CAC9B,IAAM,EAAQ,MAAM,EAAmB,CAAK,EACtC,EAAc,IAAI,IAGxB,QAAW,KAAQ,EAAO,CACxB,IAAM,EAAY,EAAK,YAAY,EAC7B,EAAW,EAAI,IAAI,CAAS,GAAK,EAGvC,GAAI,EAAW,EACb,EAAY,IAAI,EAAW,CAAQ,EAIvC,OAAO,EAMT,eAAsB,CAAe,CACnC,EACA,EACA,EASI,CAAC,EACmE,CACxE,IAAQ,QAAQ,GAAI,WAAW,EAAG,eAAe,CAAC,GAAM,GAEtD,aAAa,IACb,cAAc,EACd,iBAAiB,IACjB,kBAAkB,KAChB,EAGE,EAAc,MAAM,EAAa,EAAO,EAAM,GAAG,EACjD,GAAe,MAAM,EAAmB,CAAK,GAAG,IAAI,CAAC,IAAM,EAAE,YAAY,CAAC,EA8ChF,OA3CgB,EAAM,UAAU,IAAI,CAAC,IAAQ,CAC3C,IAAI,EAAQ,EAA0B,EAAa,CAAG,EAGhD,EAAyB,CAAC,EAChC,QAAW,KAAS,EAClB,GAAI,EAAI,SAAS,IAAI,CAAK,EAAG,CAE3B,IAAI,EAAc,EAGlB,GAAI,EAAgB,CAAK,EACvB,EAAc,KAAK,IAAI,EAAa,CAAc,EAIpD,GAAI,EAAa,CAAK,EACpB,EAAc,KAAK,IAAI,EAAa,CAAe,EAGrD,GAAS,EACT,EAAa,KAAK,CAAK,EAK3B,GAAI,EAAa,SAAW,EAAY,QAAU,EAAY,OAAS,EACrE,GAAS,EAIX,GAAI,EAAY,OAAS,GAAK,EAAa,QAAU,EAAY,OAAS,IACxE,GAAS,IAGX,MAAO,CACL,IAAK,EAAI,IACT,QACA,cACF,EACD,EAIE,OAAO,CAAC,IAAW,EAAO,OAAS,CAAQ,EAC3C,KAAK,CAAC,EAAG,IAAM,EAAE,MAAQ,EAAE,KAAK,EAChC,MAAM,EAAG,CAAK,EAMnB,SAAS,CAAe,CAAC,EAAuB,CAU9C,MAT0B,CACxB,gBACA,kCACA,wBACA,+DACA,oDACA,kDACF,EAEyB,KAAK,CAAC,IAAY,EAAQ,KAAK,CAAI,CAAC,EAM/D,SAAS,CAAY,CAAC,EAAuB,CAE3C,MAAO,0BAA0B,KAAK,CAAI,GAAK,EAAK,OAAS,EAMxD,SAAS,CAAc,CAAC,EAA4B,CACzD,IAAM,EAAe,CACnB,UAAW,EAAM,UAAU,IAAI,CAAC,KAAS,CACvC,IAAK,EAAI,IACT,MAAO,MAAM,KAAK,EAAI,MAAM,QAAQ,CAAC,EACrC,SAAU,MAAM,KAAK,EAAI,SAAS,QAAQ,CAAC,EAC3C,UAAW,EAAI,SACjB,EAAE,EACF,IAAK,MAAM,KAAK,EAAM,IAAI,QAAQ,CAAC,EACnC,eAAgB,EAAM,eACtB,SAAU,EAAM,QAClB,EAEA,OAAO,KAAK,UAAU,EAAc,KAAM,CAAC,EAMtC,SAAS,CAAgB,CAAC,EAA2B,CAC1D,IAAM,EAAO,KAAK,MAAM,CAAI,EAE5B,MAAO,CACL,UAAW,EAAK,UAAU,IACxB,CAAC,KAKM,CACL,IAAK,EAAI,IACT,MAAO,IAAI,IAAI,EAAI,KAAK,EACxB,SAAU,IAAI,IAAI,EAAI,QAAQ,EAC9B,UAAW,EAAI,SACjB,EACF,EACA,IAAK,IAAI,IAAI,EAAK,GAAG,EACrB,eAAgB,EAAK,eACrB,SAAU,EAAK,QACjB",
|
|
9
|
-
"debugId": "F6CC4F566F5A987064756E2164756E21",
|
|
10
|
-
"names": []
|
|
11
|
-
}
|