@sylphx/flow 0.2.10 → 0.2.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -0
- package/dist/chunk-01gv4qey.js +4 -0
- package/dist/chunk-01gv4qey.js.map +11 -0
- package/dist/{chunk-f9yb2zk3.js → chunk-3m9whg4q.js} +2 -2
- package/dist/{chunk-f9yb2zk3.js.map → chunk-3m9whg4q.js.map} +1 -1
- package/dist/{chunk-sgafqzer.js → chunk-3w6pd43t.js} +3 -3
- package/dist/{chunk-sgafqzer.js.map → chunk-3w6pd43t.js.map} +2 -2
- package/dist/{chunk-d6k94684.js → chunk-4nm4ere4.js} +3 -3
- package/dist/{chunk-d6k94684.js.map → chunk-4nm4ere4.js.map} +1 -1
- package/dist/{chunk-ceyg2zjg.js → chunk-4vrj3f8r.js} +3 -3
- package/dist/{chunk-ceyg2zjg.js.map → chunk-4vrj3f8r.js.map} +1 -1
- package/dist/chunk-5njgv5k5.js +161 -0
- package/dist/{chunk-rsagxdqj.js.map → chunk-5njgv5k5.js.map} +4 -6
- package/dist/{chunk-6eb5a8kv.js → chunk-67n29s4q.js} +4 -4
- package/dist/chunk-67n29s4q.js.map +10 -0
- package/dist/{chunk-66qzap9m.js → chunk-86ce45n6.js} +2 -2
- package/dist/{chunk-66qzap9m.js.map → chunk-86ce45n6.js.map} +1 -1
- package/dist/{chunk-fyvtp08n.js → chunk-99pz5wm0.js} +2 -2
- package/dist/{chunk-fyvtp08n.js.map → chunk-99pz5wm0.js.map} +1 -1
- package/dist/chunk-d409xn8f.js +6 -0
- package/dist/{chunk-cjy100rr.js.map → chunk-d409xn8f.js.map} +2 -2
- package/dist/chunk-g0qpndpd.js +23 -0
- package/dist/{chunk-gmmm7xpw.js.map → chunk-g0qpndpd.js.map} +5 -5
- package/dist/{chunk-51kpynby.js → chunk-g4baca7p.js} +3 -3
- package/dist/{chunk-51kpynby.js.map → chunk-g4baca7p.js.map} +1 -1
- package/dist/chunk-gc66xe7z.js +4 -0
- package/dist/{chunk-8ep9gk6d.js.map → chunk-gc66xe7z.js.map} +2 -2
- package/dist/{chunk-2nfq57ym.js → chunk-hj6qtsqp.js} +2 -2
- package/dist/{chunk-2nfq57ym.js.map → chunk-hj6qtsqp.js.map} +2 -2
- package/dist/{chunk-etqfkn4f.js → chunk-jbd95k1f.js} +3 -3
- package/dist/{chunk-etqfkn4f.js.map → chunk-jbd95k1f.js.map} +1 -1
- package/dist/{chunk-z61qdct1.js → chunk-kn908zkk.js} +2 -2
- package/dist/{chunk-z61qdct1.js.map → chunk-kn908zkk.js.map} +1 -1
- package/dist/{chunk-rvx8bgqk.js → chunk-mw13a082.js} +3 -3
- package/dist/{chunk-rvx8bgqk.js.map → chunk-mw13a082.js.map} +1 -1
- package/dist/chunk-nke51f3c.js +4 -0
- package/dist/chunk-nke51f3c.js.map +10 -0
- package/dist/{chunk-ny1s8fnh.js → chunk-ns5atzyz.js} +2 -2
- package/dist/{chunk-ny1s8fnh.js.map → chunk-ns5atzyz.js.map} +1 -1
- package/dist/chunk-pp4r3hp4.js +105 -0
- package/dist/chunk-pp4r3hp4.js.map +27 -0
- package/dist/{chunk-372bgp30.js → chunk-q4nh3vst.js} +3 -3
- package/dist/{chunk-372bgp30.js.map → chunk-q4nh3vst.js.map} +1 -1
- package/dist/{chunk-585jp0rg.js → chunk-q5gqgs0p.js} +3 -3
- package/dist/chunk-q5gqgs0p.js.map +10 -0
- package/dist/{chunk-f676awyz.js → chunk-s9bsh0gp.js} +3 -3
- package/dist/{chunk-f676awyz.js.map → chunk-s9bsh0gp.js.map} +1 -1
- package/dist/chunk-ss51dw5h.js +27 -0
- package/dist/{chunk-weshapwk.js.map → chunk-ss51dw5h.js.map} +5 -5
- package/dist/{chunk-mtrcdhzn.js → chunk-xs370t8p.js} +3 -3
- package/dist/{chunk-mtrcdhzn.js.map → chunk-xs370t8p.js.map} +8 -8
- package/dist/chunk-xtrn4wn0.js +3 -0
- package/dist/{chunk-b0047ggx.js.map → chunk-xtrn4wn0.js.map} +2 -2
- package/dist/{chunk-7h737bp8.js → chunk-xvfz960r.js} +3 -3
- package/dist/{chunk-7h737bp8.js.map → chunk-xvfz960r.js.map} +1 -1
- package/dist/chunk-xytc0fks.js +27 -0
- package/dist/{chunk-vc4xy6dm.js.map → chunk-xytc0fks.js.map} +2 -2
- package/dist/chunk-yxv7hqse.js +23 -0
- package/dist/chunk-yxv7hqse.js.map +11 -0
- package/dist/chunk-zv5y8yfq.js +19 -0
- package/dist/chunk-zv5y8yfq.js.map +11 -0
- package/dist/index.js +278 -285
- package/dist/index.js.map +34 -30
- package/drizzle/0002_lyrical_random.sql +2 -0
- package/drizzle/0003_romantic_lockjaw.sql +4 -0
- package/drizzle/0004_blushing_meteorite.sql +6 -0
- package/drizzle/meta/0002_snapshot.json +920 -0
- package/drizzle/meta/0003_snapshot.json +920 -0
- package/drizzle/meta/0004_snapshot.json +921 -0
- package/drizzle/meta/_journal.json +21 -0
- package/package.json +4 -1
- package/dist/chunk-057m762a.js +0 -4
- package/dist/chunk-057m762a.js.map +0 -10
- package/dist/chunk-2j2gmjg5.js +0 -107
- package/dist/chunk-2j2gmjg5.js.map +0 -25
- package/dist/chunk-585jp0rg.js.map +0 -10
- package/dist/chunk-6eb5a8kv.js.map +0 -10
- package/dist/chunk-8ep9gk6d.js +0 -4
- package/dist/chunk-9qzv4trv.js +0 -23
- package/dist/chunk-9qzv4trv.js.map +0 -11
- package/dist/chunk-b0047ggx.js +0 -3
- package/dist/chunk-cjy100rr.js +0 -6
- package/dist/chunk-e74zv5ct.js +0 -19
- package/dist/chunk-e74zv5ct.js.map +0 -10
- package/dist/chunk-gmmm7xpw.js +0 -23
- package/dist/chunk-rsagxdqj.js +0 -161
- package/dist/chunk-vc4xy6dm.js +0 -27
- package/dist/chunk-weshapwk.js +0 -27
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import"./chunk-
|
|
2
|
-
export{q as
|
|
1
|
+
import"./chunk-3m9whg4q.js";import{AutoTokenizer as j}from"@huggingface/transformers";class R{tokenizer;initialized=!1;modelPath;constructor(B={}){this.modelPath=B.modelPath||"./models/starcoder2"}async initialize(){if(this.initialized)return;try{this.tokenizer=await j.from_pretrained(this.modelPath),this.initialized=!0}catch(B){throw Error(`Tokenizer initialization failed: ${B.message}`)}}async tokenize(B){if(!this.initialized)await this.initialize();let H=Date.now();if(!B||B.trim().length===0)return{tokens:[],metadata:{totalTokens:0,vocabSize:49152,processingTime:Date.now()-H,averageConfidence:0},raw:{inputIds:[],decodedText:""}};try{let J=(await this.tokenizer(B)).input_ids.tolist()[0],O=await this.tokenizer.decode(J),U=await this.createDirectTokens(O,J),K=Date.now()-H;return{tokens:U,metadata:{totalTokens:U.length,vocabSize:49152,processingTime:K,averageConfidence:0.95},raw:{inputIds:J,decodedText:O}}}catch(G){throw Error(`Tokenization failed: ${G.message}`)}}async createDirectTokens(B,H){let G=[];for(let J=0;J<H.length;J++){let O=H[J];try{let K=(await this.tokenizer.decode([O],{skip_special_tokens:!0})).trim().toLowerCase();if(K.length>0)G.push({text:K,id:O,score:1,confidence:1,relevance:"high"})}catch(U){}}return G}async getTopTokens(B,H=20){return(await this.tokenize(B)).tokens.slice(0,H)}async getTechnicalTokens(B){return(await this.tokenize(B)).tokens}async decode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return await this.tokenizer.decode(B)}async encode(B){if(!this.initialized)throw Error("Tokenizer not initialized. Call initialize() first.");return(await this.tokenizer(B)).input_ids.tolist()[0]}}async function k(B,H){try{let G=await B.getAllCodebaseFiles();if(H){if(H.file_extensions&&H.file_extensions.length>0)G=G.filter((K)=>H.file_extensions?.some((W)=>K.path.endsWith(W)));if(H.path_filter)G=G.filter((K)=>K.path.includes(H.path_filter));if(H.exclude_paths&&H.exclude_paths.length>0)G=G.filter((K)=>!H.exclude_paths?.some((W)=>K.path.includes(W)))}if(G.length===0)return null;let J=[];for(let K of G){let W=await B.getTFIDFDocument(K.path);if(W){let _=await B.getTFIDFTerms(K.path),X=new Map,$=new Map;for(let[E,Y]of Object.entries(_))X.set(E,Y);let Z=W.rawTerms||{};for(let[E,Y]of Object.entries(Z))$.set(E,Y);J.push({uri:`file://${K.path}`,terms:X,rawTerms:$,magnitude:W.magnitude})}}if(J.length===0)return null;let O=await B.getIDFValues(),U=new Map;for(let[K,W]of Object.entries(O))U.set(K,W);return{documents:J,idf:U,totalDocuments:J.length,metadata:{generatedAt:new Date().toISOString(),version:"1.0.0"}}}catch(G){return console.error("[ERROR] Failed to build search index from database:",G),null}}function M(B){let H=Array.from(B.values()).reduce((J,O)=>J+O,0),G=new Map;for(let[J,O]of B.entries())G.set(J,O/H);return G}function z(B,H){let G=new Map;for(let O of B){let U=new Set(O.keys());for(let K of U)G.set(K,(G.get(K)||0)+1)}let J=new Map;for(let[O,U]of G.entries())J.set(O,Math.log(H/U));return J}function V(B,H){let G=new Map;for(let[J,O]of B.entries()){let U=H.get(J)||0;G.set(J,O*U)}return G}function w(B){let H=0;for(let G of B.values())H+=G*G;return Math.sqrt(H)}var Q=null,v=!1;async function D(){if(!Q)Q=new R({modelPath:"./models/starcoder2"});if(!v){let{log:B,error:H}=console;console.log=()=>{},console.error=()=>{};try{await Q.initialize(),v=!0}finally{console.log=B,console.error=H}}return Q}async function S(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase(),K=J.get(U)||0;J.set(U,K+O.score)}return J}async function P(B){let G=await(await D()).tokenize(B),J=new Map;for(let O of G.tokens){let U=O.text.toLowerCase();if(!J.has(U)||O.score>0.8)J.set(U,O.text)}return Array.from(J.values())}async function q(B,H){let J=[];for(let K=0;K<B.length;K+=1){let W=B.slice(K,K+1),_=[];for(let X=0;X<W.length;X++){let $=W[X],Z=$.uri.split("/").pop()||$.uri;H?.({current:K+X+1,total:B.length,fileName:Z,status:"processing"});try{let E=await S($.content);_.push({uri:$.uri,terms:E}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"completed"})}catch(E){_.push({uri:$.uri,terms:new Map}),H?.({current:K+X+1,total:B.length,fileName:Z,status:"skipped"})}}J.push(..._)}let O=z(J.map((K)=>K.terms),B.length);return{documents:J.map((K)=>{let W=M(K.terms),_=V(W,O),X=w(_);return{uri:K.uri,terms:_,rawTerms:K.terms,magnitude:X}}),idf:O,totalDocuments:B.length,metadata:{generatedAt:new Date().toISOString(),version:"5.0.0",tokenizer:"AdvancedCodeTokenizer",features:["Industry-leading code understanding","Advanced technical term recognition","Optimized for code search","Simple and effective approach","No unnecessary complexity"]}}}function h(B,H){let G=0;for(let[O,U]of B.entries()){let K=H.terms.get(O)||0;G+=U*K}let J=w(B);if(J===0||H.magnitude===0)return 0;return G/(J*H.magnitude)}async function I(B,H){let G=await P(B),J=new Map;for(let O of G){let U=O.toLowerCase(),K=H.get(U)||0;if(K>0)J.set(U,K)}return J}async function y(B,H,G={}){let{limit:J=10,minScore:O=0,boostFactors:U={}}=G,{exactMatch:K=1.5,phraseMatch:W=2,technicalMatch:_=1.8,identifierMatch:X=1.3}=U,$=await I(B,H.idf),Z=(await P(B)).map((Y)=>Y.toLowerCase());return H.documents.map((Y)=>{let L=h($,Y),C=[];for(let N of Z)if(Y.rawTerms.has(N)){let A=K;if(b(N))A=Math.max(A,_);if(p(N))A=Math.max(A,X);L*=A,C.push(N)}if(C.length===Z.length&&Z.length>1)L*=W;if(Z.length>3&&C.length>=Z.length*0.7)L*=1.2;return{uri:Y.uri,score:L,matchedTerms:C}}).filter((Y)=>Y.score>=O).sort((Y,L)=>L.score-Y.score).slice(0,J)}function b(B){return[/\b[A-Z]{2,}\b/,/\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\b/,/\b[a-z]+[A-Z][a-z]*\b/,/\b\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\b/,/\b(?:get|set|is|has|can|should|will|do)[A-Z]\w*\b/,/\b(?:http|https|json|xml|yaml|sql|api|url|uri)\b/].some((G)=>G.test(B))}function p(B){return/^[a-zA-Z][a-zA-Z0-9_]*$/.test(B)&&B.length>1}function T(B){let H={documents:B.documents.map((G)=>({uri:G.uri,terms:Array.from(G.terms.entries()),rawTerms:Array.from(G.rawTerms.entries()),magnitude:G.magnitude})),idf:Array.from(B.idf.entries()),totalDocuments:B.totalDocuments,metadata:B.metadata};return JSON.stringify(H,null,2)}function f(B){let H=JSON.parse(B);return{documents:H.documents.map((G)=>({uri:G.uri,terms:new Map(G.terms),rawTerms:new Map(G.rawTerms),magnitude:G.magnitude})),idf:new Map(H.idf),totalDocuments:H.totalDocuments,metadata:H.metadata}}export{T as serializeIndex,y as searchDocuments,I as processQuery,f as deserializeIndex,h as calculateCosineSimilarity,k as buildSearchIndexFromDB,q as buildSearchIndex};
|
|
2
|
+
export{q as d,y as e};
|
|
3
3
|
|
|
4
|
-
//# debugId=
|
|
4
|
+
//# debugId=F6CC4F566F5A987064756E2164756E21
|
|
@@ -6,6 +6,6 @@
|
|
|
6
6
|
"/**\n * TF-IDF (Term Frequency-Inverse Document Frequency) implementation\n * Used for ranking document relevance in semantic search\n */\n\nimport { AdvancedCodeTokenizer } from '../../utils/advanced-tokenizer.js';\nimport type { SeparatedMemoryStorage } from './separated-storage.js';\n\nexport interface DocumentVector {\n uri: string;\n terms: Map<string, number>; // term → TF-IDF score\n rawTerms: Map<string, number>; // term → raw frequency\n magnitude: number; // Vector magnitude for cosine similarity\n}\n\nexport interface SearchIndex {\n documents: DocumentVector[];\n idf: Map<string, number>; // term → IDF score\n totalDocuments: number;\n metadata: {\n generatedAt: string;\n version: string;\n };\n}\n\n/**\n * Build search index from database (shared between CLI and MCP)\n */\nexport async function buildSearchIndexFromDB(\n memoryStorage: SeparatedMemoryStorage,\n filters?: {\n file_extensions?: string[];\n path_filter?: string;\n exclude_paths?: string[];\n }\n): Promise<SearchIndex | null> {\n try {\n // Get all files from database\n let files = await memoryStorage.getAllCodebaseFiles();\n\n // Apply filters\n if (filters) {\n if (filters.file_extensions && filters.file_extensions.length > 0) {\n files = files.filter((file) =>\n filters.file_extensions?.some((ext: string) => file.path.endsWith(ext))\n );\n }\n\n if (filters.path_filter) {\n files = files.filter((file) => file.path.includes(filters.path_filter!));\n }\n\n if (filters.exclude_paths && filters.exclude_paths.length > 0) {\n files = files.filter(\n (file) => !filters.exclude_paths?.some((exclude: string) => file.path.includes(exclude))\n );\n }\n }\n\n if (files.length === 0) {\n return null;\n }\n\n // Build search documents - read TF-IDF terms directly from database\n const documents = [];\n for (const file of files) {\n const tfidfDoc = await memoryStorage.getTFIDFDocument(file.path);\n if (tfidfDoc) {\n // Get TF-IDF terms from database (already calculated)\n const tfidfTerms = await memoryStorage.getTFIDFTerms(file.path);\n const terms = new Map<string, number>();\n const rawTermsMap = new Map<string, number>();\n\n // Use TF-IDF terms for search scoring\n for (const [term, tfidfScore] of Object.entries(tfidfTerms)) {\n terms.set(term, tfidfScore as number);\n }\n\n // Use rawTerms for reference\n const rawTerms = tfidfDoc.rawTerms || {};\n for (const [term, freq] of Object.entries(rawTerms)) {\n rawTermsMap.set(term, freq as number);\n }\n\n documents.push({\n uri: `file://${file.path}`,\n terms,\n rawTerms: rawTermsMap,\n magnitude: tfidfDoc.magnitude,\n });\n }\n }\n\n if (documents.length === 0) {\n return null;\n }\n\n // Get IDF values from database\n const idfRecords = await memoryStorage.getIDFValues();\n const idf = new Map<string, number>();\n for (const [term, value] of Object.entries(idfRecords)) {\n idf.set(term, value as number);\n }\n\n return {\n documents,\n idf,\n totalDocuments: documents.length,\n metadata: {\n generatedAt: new Date().toISOString(),\n version: '1.0.0',\n },\n };\n } catch (error) {\n console.error('[ERROR] Failed to build search index from database:', error);\n return null;\n }\n}\n\n/**\n * Calculate Term Frequency (TF)\n * TF = (number of times term appears in document) / (total terms in document)\n */\nfunction calculateTF(termFrequency: Map<string, number>): Map<string, number> {\n const totalTerms = Array.from(termFrequency.values()).reduce((sum, freq) => sum + freq, 0);\n const tf = new Map<string, number>();\n\n for (const [term, freq] of termFrequency.entries()) {\n tf.set(term, freq / totalTerms);\n }\n\n return tf;\n}\n\n/**\n * Calculate Inverse Document Frequency (IDF)\n * IDF = log(total documents / documents containing term)\n */\nfunction calculateIDF(\n documents: Map<string, number>[],\n totalDocuments: number\n): Map<string, number> {\n const documentFrequency = new Map<string, number>();\n\n // Count how many documents contain each term\n for (const doc of documents) {\n const uniqueTerms = new Set(doc.keys());\n for (const term of uniqueTerms) {\n documentFrequency.set(term, (documentFrequency.get(term) || 0) + 1);\n }\n }\n\n // Calculate IDF for each term\n const idf = new Map<string, number>();\n for (const [term, docFreq] of documentFrequency.entries()) {\n idf.set(term, Math.log(totalDocuments / docFreq));\n }\n\n return idf;\n}\n\n/**\n * Calculate TF-IDF scores for a document\n */\nfunction calculateTFIDF(tf: Map<string, number>, idf: Map<string, number>): Map<string, number> {\n const tfidf = new Map<string, number>();\n\n for (const [term, tfScore] of tf.entries()) {\n const idfScore = idf.get(term) || 0;\n tfidf.set(term, tfScore * idfScore);\n }\n\n return tfidf;\n}\n\n/**\n * Calculate vector magnitude for cosine similarity\n */\nfunction calculateMagnitude(vector: Map<string, number>): number {\n let sum = 0;\n for (const value of vector.values()) {\n sum += value * value;\n }\n return Math.sqrt(sum);\n}\n\n// Global tokenizer instance for performance\nlet globalTokenizer: AdvancedCodeTokenizer | null = null;\nlet tokenizerInitialized = false;\n\n/**\n * Get or create the global tokenizer\n */\nasync function getTokenizer(): Promise<AdvancedCodeTokenizer> {\n if (!globalTokenizer) {\n globalTokenizer = new AdvancedCodeTokenizer({\n modelPath: './models/starcoder2',\n });\n }\n\n if (!tokenizerInitialized) {\n // Silently initialize - no console output\n const originalLog = console.log;\n const originalError = console.error;\n console.log = () => {}; // Temporarily silence console.log\n console.error = () => {}; // Temporarily silence console.error\n try {\n await globalTokenizer.initialize();\n tokenizerInitialized = true;\n } finally {\n console.log = originalLog; // Restore console.log\n console.error = originalError; // Restore console.error\n }\n }\n\n return globalTokenizer;\n}\n\n/**\n * Extract terms using our advanced tokenizer\n */\nasync function extractTerms(content: string): Promise<Map<string, number>> {\n const tokenizer = await getTokenizer();\n const result = await tokenizer.tokenize(content);\n const terms = new Map<string, number>();\n\n // Use token scores as TF weights\n for (const token of result.tokens) {\n const term = token.text.toLowerCase();\n const currentScore = terms.get(term) || 0;\n terms.set(term, currentScore + token.score);\n }\n\n return terms;\n}\n\n/**\n * Extract simple tokens for query processing\n */\nasync function extractQueryTokens(query: string): Promise<string[]> {\n const tokenizer = await getTokenizer();\n const result = await tokenizer.tokenize(query);\n\n // Return unique tokens, sorted by score (highest first)\n const uniqueTokens = new Map<string, string>();\n for (const token of result.tokens) {\n const lowerText = token.text.toLowerCase();\n if (!uniqueTokens.has(lowerText) || token.score > 0.8) {\n uniqueTokens.set(lowerText, token.text);\n }\n }\n\n return Array.from(uniqueTokens.values());\n}\n\nexport interface BuildIndexProgress {\n current: number;\n total: number;\n fileName: string;\n status: 'processing' | 'completed' | 'skipped';\n}\n\n/**\n * Build TF-IDF search index from documents using our advanced tokenizer\n */\nexport async function buildSearchIndex(\n documents: Array<{ uri: string; content: string }>,\n onProgress?: (progress: BuildIndexProgress) => void\n): Promise<SearchIndex> {\n // Process documents one by one to avoid hanging\n const batchSize = 1; // Process 1 document at a time to avoid hanging\n const documentTerms: Array<{ uri: string; terms: Map<string, number> }> = [];\n\n for (let i = 0; i < documents.length; i += batchSize) {\n const batch = documents.slice(i, i + batchSize);\n\n // Process sequentially to avoid hanging\n const batchResults = [];\n for (let j = 0; j < batch.length; j++) {\n const doc = batch[j];\n const fileName = doc.uri.split('/').pop() || doc.uri;\n\n // Report progress\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'processing',\n });\n\n try {\n const result = await extractTerms(doc.content);\n\n batchResults.push({\n uri: doc.uri,\n terms: result,\n });\n\n // Report completion\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'completed',\n });\n } catch (_error) {\n batchResults.push({\n uri: doc.uri,\n terms: new Map<string, number>(),\n });\n\n // Report skip\n onProgress?.({\n current: i + j + 1,\n total: documents.length,\n fileName,\n status: 'skipped',\n });\n }\n }\n\n documentTerms.push(...batchResults);\n }\n\n // Calculate IDF scores\n const idf = calculateIDF(\n documentTerms.map((d) => d.terms),\n documents.length\n );\n\n // Calculate TF-IDF for each document\n const documentVectors: DocumentVector[] = documentTerms.map((doc) => {\n const tf = calculateTF(doc.terms);\n const tfidf = calculateTFIDF(tf, idf);\n const magnitude = calculateMagnitude(tfidf);\n\n return {\n uri: doc.uri,\n terms: tfidf,\n rawTerms: doc.terms,\n magnitude,\n };\n });\n\n return {\n documents: documentVectors,\n idf,\n totalDocuments: documents.length,\n metadata: {\n generatedAt: new Date().toISOString(),\n version: '5.0.0',\n tokenizer: 'AdvancedCodeTokenizer',\n features: [\n 'Industry-leading code understanding',\n 'Advanced technical term recognition',\n 'Optimized for code search',\n 'Simple and effective approach',\n 'No unnecessary complexity',\n ],\n },\n };\n}\n\n/**\n * Calculate cosine similarity between query and document\n */\nexport function calculateCosineSimilarity(\n queryVector: Map<string, number>,\n docVector: DocumentVector\n): number {\n let dotProduct = 0;\n\n // Calculate dot product\n for (const [term, queryScore] of queryVector.entries()) {\n const docScore = docVector.terms.get(term) || 0;\n dotProduct += queryScore * docScore;\n }\n\n // Calculate query magnitude\n const queryMagnitude = calculateMagnitude(queryVector);\n\n if (queryMagnitude === 0 || docVector.magnitude === 0) {\n return 0;\n }\n\n return dotProduct / (queryMagnitude * docVector.magnitude);\n}\n\n/**\n * Process query into TF-IDF vector using database values\n */\nexport async function processQuery(\n query: string,\n idf: Map<string, number>\n): Promise<Map<string, number>> {\n const terms = await extractQueryTokens(query);\n const queryVector = new Map<string, number>();\n\n // 為每個查詢詞使用 IDF 值(查詢本身無 TF-IDF,直接用 IDF)\n for (const term of terms) {\n const lowerTerm = term.toLowerCase();\n const idfValue = idf.get(lowerTerm) || 0;\n\n // 純粹用 IDF 值,完全信任 StarCoder2 嘅 tokenization\n if (idfValue > 0) {\n queryVector.set(lowerTerm, idfValue);\n }\n }\n\n return queryVector;\n}\n\n/**\n * Search documents using TF-IDF and cosine similarity with Advanced Code Tokenizer\n */\nexport async function searchDocuments(\n query: string,\n index: SearchIndex,\n options: {\n limit?: number;\n minScore?: number;\n boostFactors?: {\n exactMatch?: number; // Boost for exact term matches\n phraseMatch?: number; // Boost for phrase matches\n technicalMatch?: number; // Boost for technical term matches\n identifierMatch?: number; // Boost for identifier matches\n };\n } = {}\n): Promise<Array<{ uri: string; score: number; matchedTerms: string[] }>> {\n const { limit = 10, minScore = 0, boostFactors = {} } = options;\n const {\n exactMatch = 1.5,\n phraseMatch = 2.0,\n technicalMatch = 1.8,\n identifierMatch = 1.3,\n } = boostFactors;\n\n // Process query using Advanced Code Tokenizer\n const queryVector = await processQuery(query, index.idf);\n const queryTokens = (await extractQueryTokens(query)).map((t) => t.toLowerCase());\n\n // Calculate similarity for each document\n const results = index.documents.map((doc) => {\n let score = calculateCosineSimilarity(queryVector, doc);\n\n // Boost for exact term matches with enhanced scoring\n const matchedTerms: string[] = [];\n for (const token of queryTokens) {\n if (doc.rawTerms.has(token)) {\n // Apply different boost factors based on term characteristics\n let boostFactor = exactMatch;\n\n // Additional boost for technical terms\n if (isTechnicalTerm(token)) {\n boostFactor = Math.max(boostFactor, technicalMatch);\n }\n\n // Additional boost for identifiers\n if (isIdentifier(token)) {\n boostFactor = Math.max(boostFactor, identifierMatch);\n }\n\n score *= boostFactor;\n matchedTerms.push(token);\n }\n }\n\n // Enhanced phrase match detection (all query terms appear in document)\n if (matchedTerms.length === queryTokens.length && queryTokens.length > 1) {\n score *= phraseMatch;\n }\n\n // Contextual relevance boost for longer queries\n if (queryTokens.length > 3 && matchedTerms.length >= queryTokens.length * 0.7) {\n score *= 1.2; // Boost for partial matches on complex queries\n }\n\n return {\n uri: doc.uri,\n score,\n matchedTerms,\n };\n });\n\n // Filter and sort\n return results\n .filter((result) => result.score >= minScore)\n .sort((a, b) => b.score - a.score)\n .slice(0, limit);\n}\n\n/**\n * Check if a term is likely a technical term\n */\nfunction isTechnicalTerm(term: string): boolean {\n const technicalPatterns = [\n /\\b[A-Z]{2,}\\b/, // Acronyms like HTTP, API, JSON\n /\\b[A-Z][a-z]+(?:[A-Z][a-z]+)+\\b/, // PascalCase like ComponentName\n /\\b[a-z]+[A-Z][a-z]*\\b/, // camelCase like functionName\n /\\b\\w+(?:Dir|Config|File|Path|Data|Service|Manager|Handler)\\b/, // Common suffixes\n /\\b(?:get|set|is|has|can|should|will|do)[A-Z]\\w*\\b/, // Common prefixes\n /\\b(?:http|https|json|xml|yaml|sql|api|url|uri)\\b/, // Technical keywords\n ];\n\n return technicalPatterns.some((pattern) => pattern.test(term));\n}\n\n/**\n * Check if a term is likely an identifier\n */\nfunction isIdentifier(term: string): boolean {\n // Identifiers typically contain letters and numbers, maybe underscores\n return /^[a-zA-Z][a-zA-Z0-9_]*$/.test(term) && term.length > 1;\n}\n\n/**\n * Serialize search index to JSON\n */\nexport function serializeIndex(index: SearchIndex): string {\n const serializable = {\n documents: index.documents.map((doc) => ({\n uri: doc.uri,\n terms: Array.from(doc.terms.entries()),\n rawTerms: Array.from(doc.rawTerms.entries()),\n magnitude: doc.magnitude,\n })),\n idf: Array.from(index.idf.entries()),\n totalDocuments: index.totalDocuments,\n metadata: index.metadata,\n };\n\n return JSON.stringify(serializable, null, 2);\n}\n\n/**\n * Deserialize search index from JSON\n */\nexport function deserializeIndex(json: string): SearchIndex {\n const data = JSON.parse(json);\n\n return {\n documents: data.documents.map(\n (doc: {\n uri: string;\n terms: [string, number][];\n rawTerms: [string, number][];\n magnitude: number;\n }) => ({\n uri: doc.uri,\n terms: new Map(doc.terms),\n rawTerms: new Map(doc.rawTerms),\n magnitude: doc.magnitude,\n })\n ),\n idf: new Map(data.idf),\n totalDocuments: data.totalDocuments,\n metadata: data.metadata,\n };\n}\n"
|
|
7
7
|
],
|
|
8
8
|
"mappings": "4BAKA,wBAAS,kCA2BF,MAAM,CAAsB,CACzB,UACA,YAAc,GACd,UAER,WAAW,CACT,EAEI,CAAC,EACL,CACA,KAAK,UAAY,EAAQ,WAAa,2BAMlC,WAAU,EAAkB,CAChC,GAAI,KAAK,YACP,OAGF,GAAI,CACF,KAAK,UAAY,MAAM,EAAc,gBAAgB,KAAK,SAAS,EACnE,KAAK,YAAc,GACnB,MAAO,EAAO,CACd,MAAU,MAAM,oCAAoC,EAAM,SAAS,QAOjE,SAAQ,CAAC,EAAmD,CAChE,GAAI,CAAC,KAAK,YACR,MAAM,KAAK,WAAW,EAGxB,IAAM,EAAY,KAAK,IAAI,EAG3B,GAAI,CAAC,GAAW,EAAQ,KAAK,EAAE,SAAW,EACxC,MAAO,CACL,OAAQ,CAAC,EACT,SAAU,CACR,YAAa,EACb,UAAW,MACX,eAAgB,KAAK,IAAI,EAAI,EAC7B,kBAAmB,CACrB,EACA,IAAK,CACH,SAAU,CAAC,EACX,YAAa,EACf,CACF,EAGF,GAAI,CAGF,IAAM,GADU,MAAM,KAAK,UAAU,CAAO,GACnB,UAAU,OAAO,EAAE,GAGtC,EAAc,MAAM,KAAK,UAAU,OAAO,CAAQ,EAGlD,EAAS,MAAM,KAAK,mBAAmB,EAAa,CAAQ,EAE5D,EAAiB,KAAK,IAAI,EAAI,EAEpC,MAAO,CACL,SACA,SAAU,CACR,YAAa,EAAO,OACpB,UAAW,MACX,iBACA,kBAAmB,IACrB,EACA,IAAK,CACH,WACA,aACF,CACF,EACA,MAAO,EAAO,CACd,MAAU,MAAM,wBAAwB,EAAM,SAAS,QAQ7C,mBAAkB,CAC9B,EACA,EAC0B,CAC1B,IAAM,EAA0B,CAAC,EAIjC,QAAS,EAAI,EAAG,EAAI,EAAS,OAAQ,IAAK,CACxC,IAAM,EAAU,EAAS,GACzB,GAAI,CAGF,IAAM,GADY,MAAM,KAAK,UAAU,OAAO,CAAC,CAAO,EAAG,CAAE,oBAAqB,EAAK,CAAC,GAC5D,KAAK,EAAE,YAAY,EAG7C,GAAI,EAAQ,OAAS,EACnB,EAAO,KAAK,CACV,KAAM,EACN,GAAI,EACJ,MAAO,EACP,WAAY,EACZ,UAAW,MACb,CAAC,EAEH,MAAO,EAAQ,GAGnB,OAAO,OAMH,aAAY,CAAC,EAAiB,EAAQ,GAA8B,CAExE,OADe,MAAM,KAAK,SAAS,CAAO,GAC5B,OAAO,MAAM,EAAG,CAAK,OAM/B,mBAAkB,CAAC,EAA2C,CAElE,OADe,MAAM,KAAK,SAAS,CAAO,GAC5B,YAMV,OAAM,CAAC,EAAqC,CAChD,GAAI,CAAC,KAAK,YACR,MAAU,MAAM,qDAAqD,EAEvE,OAAO,MAAM,KAAK,UAAU,OAAO,CAAQ,OAMvC,OAAM,CAAC,EAAiC,CAC5C,GAAI,CAAC,KAAK,YACR,MAAU,MAAM,qDAAqD,EAGvE,OADe,MAAM,KAAK,UAAU,CAAI,GAC1B,UAAU,OAAO,EAAE,GAErC,CClKA,eAAsB,CAAsB,CAC1C,EACA,EAK6B,CAC7B,GAAI,CAEF,IAAI,EAAQ,MAAM,EAAc,oBAAoB,EAGpD,GAAI,EAAS,CACX,GAAI,EAAQ,iBAAmB,EAAQ,gBAAgB,OAAS,EAC9D,EAAQ,EAAM,OAAO,CAAC,IACpB,EAAQ,iBAAiB,KAAK,CAAC,IAAgB,EAAK,KAAK,SAAS,CAAG,CAAC,CACxE,EAGF,GAAI,EAAQ,YACV,EAAQ,EAAM,OAAO,CAAC,IAAS,EAAK,KAAK,SAAS,EAAQ,WAAY,CAAC,EAGzE,GAAI,EAAQ,eAAiB,EAAQ,cAAc,OAAS,EAC1D,EAAQ,EAAM,OACZ,CAAC,IAAS,CAAC,EAAQ,eAAe,KAAK,CAAC,IAAoB,EAAK,KAAK,SAAS,CAAO,CAAC,CACzF,EAIJ,GAAI,EAAM,SAAW,EACnB,OAAO,KAIT,IAAM,EAAY,CAAC,EACnB,QAAW,KAAQ,EAAO,CACxB,IAAM,EAAW,MAAM,EAAc,iBAAiB,EAAK,IAAI,EAC/D,GAAI,EAAU,CAEZ,IAAM,EAAa,MAAM,EAAc,cAAc,EAAK,IAAI,EACxD,EAAQ,IAAI,IACZ,EAAc,IAAI,IAGxB,QAAY,EAAM,KAAe,OAAO,QAAQ,CAAU,EACxD,EAAM,IAAI,EAAM,CAAoB,EAItC,IAAM,EAAW,EAAS,UAAY,CAAC,EACvC,QAAY,EAAM,KAAS,OAAO,QAAQ,CAAQ,EAChD,EAAY,IAAI,EAAM,CAAc,EAGtC,EAAU,KAAK,CACb,IAAK,UAAU,EAAK,OACpB,QACA,SAAU,EACV,UAAW,EAAS,SACtB,CAAC,GAIL,GAAI,EAAU,SAAW,EACvB,OAAO,KAIT,IAAM,EAAa,MAAM,EAAc,aAAa,EAC9C,EAAM,IAAI,IAChB,QAAY,EAAM,KAAU,OAAO,QAAQ,CAAU,EACnD,EAAI,IAAI,EAAM,CAAe,EAG/B,MAAO,CACL,YACA,MACA,eAAgB,EAAU,OAC1B,SAAU,CACR,YAAa,IAAI,KAAK,EAAE,YAAY,EACpC,QAAS,OACX,CACF,EACA,MAAO,EAAO,CAEd,OADA,QAAQ,MAAM,sDAAuD,CAAK,EACnE,MAQX,SAAS,CAAW,CAAC,EAAyD,CAC5E,IAAM,EAAa,MAAM,KAAK,EAAc,OAAO,CAAC,EAAE,OAAO,CAAC,EAAK,IAAS,EAAM,EAAM,CAAC,EACnF,EAAK,IAAI,IAEf,QAAY,EAAM,KAAS,EAAc,QAAQ,EAC/C,EAAG,IAAI,EAAM,EAAO,CAAU,EAGhC,OAAO,EAOT,SAAS,CAAY,CACnB,EACA,EACqB,CACrB,IAAM,EAAoB,IAAI,IAG9B,QAAW,KAAO,EAAW,CAC3B,IAAM,EAAc,IAAI,IAAI,EAAI,KAAK,CAAC,EACtC,QAAW,KAAQ,EACjB,EAAkB,IAAI,GAAO,EAAkB,IAAI,CAAI,GAAK,GAAK,CAAC,EAKtE,IAAM,EAAM,IAAI,IAChB,QAAY,EAAM,KAAY,EAAkB,QAAQ,EACtD,EAAI,IAAI,EAAM,KAAK,IAAI,EAAiB,CAAO,CAAC,EAGlD,OAAO,EAMT,SAAS,CAAc,CAAC,EAAyB,EAA+C,CAC9F,IAAM,EAAQ,IAAI,IAElB,QAAY,EAAM,KAAY,EAAG,QAAQ,EAAG,CAC1C,IAAM,EAAW,EAAI,IAAI,CAAI,GAAK,EAClC,EAAM,IAAI,EAAM,EAAU,CAAQ,EAGpC,OAAO,EAMT,SAAS,CAAkB,CAAC,EAAqC,CAC/D,IAAI,EAAM,EACV,QAAW,KAAS,EAAO,OAAO,EAChC,GAAO,EAAQ,EAEjB,OAAO,KAAK,KAAK,CAAG,EAItB,IAAI,EAAgD,KAChD,EAAuB,GAK3B,eAAe,CAAY,EAAmC,CAC5D,GAAI,CAAC,EACH,EAAkB,IAAI,EAAsB,CAC1C,UAAW,qBACb,CAAC,EAGH,GAAI,CAAC,EAAsB,CAEzB,IAA4B,IAAtB,EACwB,MAAxB,GAAgB,QACtB,QAAQ,IAAM,IAAM,GACpB,QAAQ,MAAQ,IAAM,GACtB,GAAI,CACF,MAAM,EAAgB,WAAW,EACjC,EAAuB,UACvB,CACA,QAAQ,IAAM,EACd,QAAQ,MAAQ,GAIpB,OAAO,EAMT,eAAe,CAAY,CAAC,EAA+C,CAEzE,IAAM,EAAS,MADG,MAAM,EAAa,GACN,SAAS,CAAO,EACzC,EAAQ,IAAI,IAGlB,QAAW,KAAS,EAAO,OAAQ,CACjC,IAAM,EAAO,EAAM,KAAK,YAAY,EAC9B,EAAe,EAAM,IAAI,CAAI,GAAK,EACxC,EAAM,IAAI,EAAM,EAAe,EAAM,KAAK,EAG5C,OAAO,EAMT,eAAe,CAAkB,CAAC,EAAkC,CAElE,IAAM,EAAS,MADG,MAAM,EAAa,GACN,SAAS,CAAK,EAGvC,EAAe,IAAI,IACzB,QAAW,KAAS,EAAO,OAAQ,CACjC,IAAM,EAAY,EAAM,KAAK,YAAY,EACzC,GAAI,CAAC,EAAa,IAAI,CAAS,GAAK,EAAM,MAAQ,IAChD,EAAa,IAAI,EAAW,EAAM,IAAI,EAI1C,OAAO,MAAM,KAAK,EAAa,OAAO,CAAC,EAazC,eAAsB,CAAgB,CACpC,EACA,EACsB,CAGtB,IAAM,EAAoE,CAAC,EAE3E,QAAS,EAAI,EAAG,EAAI,EAAU,OAAQ,GAHpB,EAGoC,CACpD,IAAM,EAAQ,EAAU,MAAM,EAAG,EAJjB,CAI8B,EAGxC,EAAe,CAAC,EACtB,QAAS,EAAI,EAAG,EAAI,EAAM,OAAQ,IAAK,CACrC,IAAM,EAAM,EAAM,GACZ,EAAW,EAAI,IAAI,MAAM,GAAG,EAAE,IAAI,GAAK,EAAI,IAGjD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,YACV,CAAC,EAED,GAAI,CACF,IAAM,EAAS,MAAM,EAAa,EAAI,OAAO,EAE7C,EAAa,KAAK,CAChB,IAAK,EAAI,IACT,MAAO,CACT,CAAC,EAGD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,WACV,CAAC,EACD,MAAO,EAAQ,CACf,EAAa,KAAK,CAChB,IAAK,EAAI,IACT,MAAO,IAAI,GACb,CAAC,EAGD,IAAa,CACX,QAAS,EAAI,EAAI,EACjB,MAAO,EAAU,OACjB,WACA,OAAQ,SACV,CAAC,GAIL,EAAc,KAAK,GAAG,CAAY,EAIpC,IAAM,EAAM,EACV,EAAc,IAAI,CAAC,IAAM,EAAE,KAAK,EAChC,EAAU,MACZ,EAgBA,MAAO,CACL,UAdwC,EAAc,IAAI,CAAC,IAAQ,CACnE,IAAM,EAAK,EAAY,EAAI,KAAK,EAC1B,EAAQ,EAAe,EAAI,CAAG,EAC9B,EAAY,EAAmB,CAAK,EAE1C,MAAO,CACL,IAAK,EAAI,IACT,MAAO,EACP,SAAU,EAAI,MACd,WACF,EACD,EAIC,MACA,eAAgB,EAAU,OAC1B,SAAU,CACR,YAAa,IAAI,KAAK,EAAE,YAAY,EACpC,QAAS,QACT,UAAW,wBACX,SAAU,CACR,sCACA,sCACA,4BACA,gCACA,2BACF,CACF,CACF,EAMK,SAAS,CAAyB,CACvC,EACA,EACQ,CACR,IAAI,EAAa,EAGjB,QAAY,EAAM,KAAe,EAAY,QAAQ,EAAG,CACtD,IAAM,EAAW,EAAU,MAAM,IAAI,CAAI,GAAK,EAC9C,GAAc,EAAa,EAI7B,IAAM,EAAiB,EAAmB,CAAW,EAErD,GAAI,IAAmB,GAAK,EAAU,YAAc,EAClD,MAAO,GAGT,OAAO,GAAc,EAAiB,EAAU,WAMlD,eAAsB,CAAY,CAChC,EACA,EAC8B,CAC9B,IAAM,EAAQ,MAAM,EAAmB,CAAK,EACtC,EAAc,IAAI,IAGxB,QAAW,KAAQ,EAAO,CACxB,IAAM,EAAY,EAAK,YAAY,EAC7B,EAAW,EAAI,IAAI,CAAS,GAAK,EAGvC,GAAI,EAAW,EACb,EAAY,IAAI,EAAW,CAAQ,EAIvC,OAAO,EAMT,eAAsB,CAAe,CACnC,EACA,EACA,EASI,CAAC,EACmE,CACxE,IAAQ,QAAQ,GAAI,WAAW,EAAG,eAAe,CAAC,GAAM,GAEtD,aAAa,IACb,cAAc,EACd,iBAAiB,IACjB,kBAAkB,KAChB,EAGE,EAAc,MAAM,EAAa,EAAO,EAAM,GAAG,EACjD,GAAe,MAAM,EAAmB,CAAK,GAAG,IAAI,CAAC,IAAM,EAAE,YAAY,CAAC,EA8ChF,OA3CgB,EAAM,UAAU,IAAI,CAAC,IAAQ,CAC3C,IAAI,EAAQ,EAA0B,EAAa,CAAG,EAGhD,EAAyB,CAAC,EAChC,QAAW,KAAS,EAClB,GAAI,EAAI,SAAS,IAAI,CAAK,EAAG,CAE3B,IAAI,EAAc,EAGlB,GAAI,EAAgB,CAAK,EACvB,EAAc,KAAK,IAAI,EAAa,CAAc,EAIpD,GAAI,EAAa,CAAK,EACpB,EAAc,KAAK,IAAI,EAAa,CAAe,EAGrD,GAAS,EACT,EAAa,KAAK,CAAK,EAK3B,GAAI,EAAa,SAAW,EAAY,QAAU,EAAY,OAAS,EACrE,GAAS,EAIX,GAAI,EAAY,OAAS,GAAK,EAAa,QAAU,EAAY,OAAS,IACxE,GAAS,IAGX,MAAO,CACL,IAAK,EAAI,IACT,QACA,cACF,EACD,EAIE,OAAO,CAAC,IAAW,EAAO,OAAS,CAAQ,EAC3C,KAAK,CAAC,EAAG,IAAM,EAAE,MAAQ,EAAE,KAAK,EAChC,MAAM,EAAG,CAAK,EAMnB,SAAS,CAAe,CAAC,EAAuB,CAU9C,MAT0B,CACxB,gBACA,kCACA,wBACA,+DACA,oDACA,kDACF,EAEyB,KAAK,CAAC,IAAY,EAAQ,KAAK,CAAI,CAAC,EAM/D,SAAS,CAAY,CAAC,EAAuB,CAE3C,MAAO,0BAA0B,KAAK,CAAI,GAAK,EAAK,OAAS,EAMxD,SAAS,CAAc,CAAC,EAA4B,CACzD,IAAM,EAAe,CACnB,UAAW,EAAM,UAAU,IAAI,CAAC,KAAS,CACvC,IAAK,EAAI,IACT,MAAO,MAAM,KAAK,EAAI,MAAM,QAAQ,CAAC,EACrC,SAAU,MAAM,KAAK,EAAI,SAAS,QAAQ,CAAC,EAC3C,UAAW,EAAI,SACjB,EAAE,EACF,IAAK,MAAM,KAAK,EAAM,IAAI,QAAQ,CAAC,EACnC,eAAgB,EAAM,eACtB,SAAU,EAAM,QAClB,EAEA,OAAO,KAAK,UAAU,EAAc,KAAM,CAAC,EAMtC,SAAS,CAAgB,CAAC,EAA2B,CAC1D,IAAM,EAAO,KAAK,MAAM,CAAI,EAE5B,MAAO,CACL,UAAW,EAAK,UAAU,IACxB,CAAC,KAKM,CACL,IAAK,EAAI,IACT,MAAO,IAAI,IAAI,EAAI,KAAK,EACxB,SAAU,IAAI,IAAI,EAAI,QAAQ,EAC9B,UAAW,EAAI,SACjB,EACF,EACA,IAAK,IAAI,IAAI,EAAK,GAAG,EACrB,eAAgB,EAAK,eACrB,SAAU,EAAK,QACjB",
|
|
9
|
-
"debugId": "
|
|
9
|
+
"debugId": "F6CC4F566F5A987064756E2164756E21",
|
|
10
10
|
"names": []
|
|
11
11
|
}
|