cntx-ui 2.0.11 → 2.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,441 @@
1
+ /**
2
+ * Semantic Chunking Integration for cntx-ui Server
3
+ * Extends the existing server with semantic analysis capabilities
4
+ */
5
+
6
+ import SemanticChunker from './semantic-chunker.js'
7
+ import { readFileSync, writeFileSync } from 'fs'
8
+ import { join } from 'path'
9
+
10
+ class SemanticIntegration {
11
+ constructor(projectPath = process.cwd()) {
12
+ this.projectPath = projectPath
13
+ this.chunker = new SemanticChunker({
14
+ includeImports: true,
15
+ includeExports: true,
16
+ detectComponentTypes: true,
17
+ groupRelatedFiles: true,
18
+ minChunkSize: 100,
19
+ maxChunkSize: 10000
20
+ })
21
+ }
22
+
23
+ /**
24
+ * Analyze project and generate semantic bundle suggestions
25
+ */
26
+ async generateSemanticBundles() {
27
+ try {
28
+ console.log('šŸ” Analyzing project for semantic chunking...')
29
+
30
+ // Analyze different file types separately
31
+ const analyses = await Promise.all([
32
+ this.chunker.analyzeProject(this.projectPath, ['web/src/components/**/*.{tsx,ts}']),
33
+ this.chunker.analyzeProject(this.projectPath, ['web/src/hooks/**/*.{tsx,ts}']),
34
+ this.chunker.analyzeProject(this.projectPath, ['web/src/lib/**/*.{tsx,ts}']),
35
+ this.chunker.analyzeProject(this.projectPath, ['web/src/utils/**/*.{tsx,ts}']),
36
+ this.chunker.analyzeProject(this.projectPath, ['lib/**/*.js']),
37
+ ])
38
+
39
+ const [componentsAnalysis, hooksAnalysis, libAnalysis, utilsAnalysis, serverAnalysis] = analyses
40
+
41
+ // Generate bundle suggestions
42
+ const suggestions = this.generateBundleSuggestions([
43
+ { name: 'components', analysis: componentsAnalysis },
44
+ { name: 'hooks', analysis: hooksAnalysis },
45
+ { name: 'lib', analysis: libAnalysis },
46
+ { name: 'utils', analysis: utilsAnalysis },
47
+ { name: 'server', analysis: serverAnalysis }
48
+ ])
49
+
50
+ console.log('āœ… Semantic analysis complete!')
51
+
52
+ return {
53
+ timestamp: new Date().toISOString(),
54
+ projectPath: this.projectPath,
55
+ suggestions,
56
+ analyses: {
57
+ components: this.chunker.formatResults(componentsAnalysis),
58
+ hooks: this.chunker.formatResults(hooksAnalysis),
59
+ lib: this.chunker.formatResults(libAnalysis),
60
+ utils: this.chunker.formatResults(utilsAnalysis),
61
+ server: this.chunker.formatResults(serverAnalysis)
62
+ },
63
+ summary: this.generateOverallSummary(analyses)
64
+ }
65
+ } catch (error) {
66
+ console.error('āŒ Semantic analysis failed:', error.message)
67
+ throw error
68
+ }
69
+ }
70
+
71
+ /**
72
+ * Generate bundle configuration suggestions based on semantic analysis
73
+ */
74
+ generateBundleSuggestions(namedAnalyses) {
75
+ const suggestions = {
76
+ recommended: {},
77
+ alternative: {},
78
+ notes: []
79
+ }
80
+
81
+ for (const { name, analysis } of namedAnalyses) {
82
+ if (analysis.chunks.length === 0) continue
83
+
84
+ // Recommended: Group by semantic purpose
85
+ const purposeGroups = this.groupChunksByPurpose(analysis.chunks)
86
+
87
+ for (const [purpose, chunks] of Object.entries(purposeGroups)) {
88
+ const bundleName = `${name}-${purpose}`
89
+ const patterns = this.generatePatternsForChunks(chunks)
90
+
91
+ suggestions.recommended[bundleName] = patterns
92
+ }
93
+
94
+ // Alternative: Group by directory structure
95
+ const dirGroups = this.groupChunksByDirectory(analysis.chunks)
96
+
97
+ for (const [dir, chunks] of Object.entries(dirGroups)) {
98
+ const bundleName = `${name}-${dir}`
99
+ const patterns = this.generatePatternsForChunks(chunks)
100
+
101
+ suggestions.alternative[bundleName] = patterns
102
+ }
103
+ }
104
+
105
+ // Add specialized bundles
106
+ this.addSpecializedBundles(suggestions, namedAnalyses)
107
+
108
+ // Add optimization notes
109
+ this.addOptimizationNotes(suggestions, namedAnalyses)
110
+
111
+ return suggestions
112
+ }
113
+
114
+ /**
115
+ * Group chunks by their primary purpose
116
+ */
117
+ groupChunksByPurpose(chunks) {
118
+ const groups = {}
119
+
120
+ for (const chunk of chunks) {
121
+ const purpose = chunk.purpose || 'misc'
122
+ if (!groups[purpose]) {
123
+ groups[purpose] = []
124
+ }
125
+ groups[purpose].push(chunk)
126
+ }
127
+
128
+ return groups
129
+ }
130
+
131
+ /**
132
+ * Group chunks by directory structure
133
+ */
134
+ groupChunksByDirectory(chunks) {
135
+ const groups = {}
136
+
137
+ for (const chunk of chunks) {
138
+ // Extract directory from chunk name or files
139
+ const dir = chunk.name.split('-')[0] || 'misc'
140
+ if (!groups[dir]) {
141
+ groups[dir] = []
142
+ }
143
+ groups[dir].push(chunk)
144
+ }
145
+
146
+ return groups
147
+ }
148
+
149
+ /**
150
+ * Generate glob patterns for chunks
151
+ */
152
+ generatePatternsForChunks(chunks) {
153
+ const patterns = new Set()
154
+
155
+ for (const chunk of chunks) {
156
+ for (const file of chunk.files) {
157
+ // Convert file paths to glob patterns
158
+ const pattern = this.fileToGlobPattern(file)
159
+ patterns.add(pattern)
160
+ }
161
+ }
162
+
163
+ return Array.from(patterns)
164
+ }
165
+
166
+ /**
167
+ * Convert file path to glob pattern
168
+ */
169
+ fileToGlobPattern(filePath) {
170
+ // Extract directory and create pattern
171
+ const parts = filePath.split('/')
172
+ const dir = parts.slice(0, -1).join('/')
173
+ const ext = parts[parts.length - 1].split('.').pop()
174
+
175
+ return `${dir}/**/*.${ext}`
176
+ }
177
+
178
+ /**
179
+ * Add specialized bundle suggestions
180
+ */
181
+ addSpecializedBundles(suggestions, namedAnalyses) {
182
+ // UI Components bundle
183
+ const componentsAnalysis = namedAnalyses.find(a => a.name === 'components')?.analysis
184
+ if (componentsAnalysis?.chunks.length > 0) {
185
+ const uiComponents = componentsAnalysis.chunks.filter(c =>
186
+ c.name.includes('ui') || c.tags.includes('has-components')
187
+ )
188
+
189
+ if (uiComponents.length > 0) {
190
+ suggestions.recommended['ui-system'] = [
191
+ 'web/src/components/ui/**/*.tsx',
192
+ 'web/src/components/theme-*.tsx'
193
+ ]
194
+ }
195
+ }
196
+
197
+ // Test files bundle
198
+ suggestions.recommended['tests'] = [
199
+ '**/*.test.{js,jsx,ts,tsx}',
200
+ '**/*.spec.{js,jsx,ts,tsx}',
201
+ '**/__tests__/**/*'
202
+ ]
203
+
204
+ // Configuration bundle
205
+ suggestions.recommended['config'] = [
206
+ '*.config.{js,ts}',
207
+ '*.json',
208
+ 'package.json',
209
+ 'tsconfig*.json',
210
+ '.env*'
211
+ ]
212
+
213
+ // Documentation bundle
214
+ suggestions.recommended['docs'] = [
215
+ '**/*.md',
216
+ 'docs/**/*',
217
+ 'README*'
218
+ ]
219
+ }
220
+
221
+ /**
222
+ * Add optimization notes and recommendations
223
+ */
224
+ addOptimizationNotes(suggestions, namedAnalyses) {
225
+ suggestions.notes = []
226
+
227
+ // Analyze bundle sizes
228
+ const totalFiles = namedAnalyses.reduce((sum, a) => sum + a.analysis.summary.totalFiles, 0)
229
+ const totalChunks = namedAnalyses.reduce((sum, a) => sum + a.analysis.summary.totalChunks, 0)
230
+
231
+ suggestions.notes.push({
232
+ type: 'info',
233
+ message: `Project has ${totalFiles} files organized into ${totalChunks} semantic chunks`
234
+ })
235
+
236
+ // Check for large files
237
+ const largeFiles = []
238
+ for (const { analysis } of namedAnalyses) {
239
+ for (const [path, file] of Object.entries(analysis.files)) {
240
+ if (file.lines > 300) {
241
+ largeFiles.push({ path, lines: file.lines })
242
+ }
243
+ }
244
+ }
245
+
246
+ if (largeFiles.length > 0) {
247
+ suggestions.notes.push({
248
+ type: 'warning',
249
+ message: `${largeFiles.length} files exceed 300 lines and may benefit from splitting`,
250
+ details: largeFiles.slice(0, 3).map(f => `${f.path} (${f.lines} lines)`)
251
+ })
252
+ }
253
+
254
+ // Check for high complexity
255
+ const complexFiles = []
256
+ for (const { analysis } of namedAnalyses) {
257
+ for (const [path, file] of Object.entries(analysis.files)) {
258
+ if (file.complexity?.level === 'high') {
259
+ complexFiles.push({ path, score: file.complexity.score })
260
+ }
261
+ }
262
+ }
263
+
264
+ if (complexFiles.length > 0) {
265
+ suggestions.notes.push({
266
+ type: 'optimization',
267
+ message: `${complexFiles.length} files have high complexity and may need refactoring`,
268
+ details: complexFiles.slice(0, 3).map(f => `${f.path} (score: ${f.score})`)
269
+ })
270
+ }
271
+
272
+ // Suggest bundle patterns
273
+ suggestions.notes.push({
274
+ type: 'suggestion',
275
+ message: 'Consider using semantic bundles for better AI context understanding',
276
+ details: [
277
+ 'UI components grouped by functionality',
278
+ 'Hooks grouped by domain logic',
279
+ 'Utilities grouped by purpose',
280
+ 'Tests separated for focused debugging'
281
+ ]
282
+ })
283
+ }
284
+
285
+ /**
286
+ * Generate overall project summary
287
+ */
288
+ generateOverallSummary(analyses) {
289
+ const totalFiles = analyses.reduce((sum, a) => sum + a.summary.totalFiles, 0)
290
+ const totalSize = analyses.reduce((sum, a) => sum + a.summary.totalSize, 0)
291
+ const totalLines = analyses.reduce((sum, a) => sum + a.summary.totalLines, 0)
292
+ const totalChunks = analyses.reduce((sum, a) => sum + a.summary.totalChunks, 0)
293
+
294
+ const fileTypes = {}
295
+ const filePurposes = {}
296
+ const complexityDistribution = {}
297
+
298
+ for (const analysis of analyses) {
299
+ // Merge file types
300
+ for (const [type, count] of Object.entries(analysis.summary.fileTypes)) {
301
+ fileTypes[type] = (fileTypes[type] || 0) + count
302
+ }
303
+
304
+ // Merge purposes
305
+ for (const [purpose, count] of Object.entries(analysis.summary.filePurposes)) {
306
+ filePurposes[purpose] = (filePurposes[purpose] || 0) + count
307
+ }
308
+
309
+ // Merge complexity
310
+ for (const [level, count] of Object.entries(analysis.summary.complexityDistribution)) {
311
+ complexityDistribution[level] = (complexityDistribution[level] || 0) + count
312
+ }
313
+ }
314
+
315
+ return {
316
+ totalFiles,
317
+ totalSize,
318
+ totalLines,
319
+ totalChunks,
320
+ fileTypes,
321
+ filePurposes,
322
+ complexityDistribution,
323
+ averageFileSize: Math.round(totalSize / totalFiles),
324
+ averageLinesPerFile: Math.round(totalLines / totalFiles),
325
+ formattedSize: this.formatBytes(totalSize)
326
+ }
327
+ }
328
+
329
+ /**
330
+ * Convert semantic suggestions to cntx-ui bundle format
331
+ */
332
+ convertToConfigFormat(suggestions) {
333
+ return {
334
+ bundles: {
335
+ // Keep existing master bundle
336
+ master: ['**/*'],
337
+
338
+ // Add recommended semantic bundles
339
+ ...suggestions.recommended,
340
+
341
+ // Add alternative bundles as commented examples
342
+ // ...suggestions.alternative (uncomment to use directory-based grouping)
343
+ }
344
+ }
345
+ }
346
+
347
+ /**
348
+ * Save analysis results to file
349
+ */
350
+ async saveAnalysisResults(results, outputPath = 'semantic-analysis.json') {
351
+ try {
352
+ const fullPath = join(this.projectPath, outputPath)
353
+ writeFileSync(fullPath, JSON.stringify(results, null, 2))
354
+ console.log(`šŸ“ Analysis saved to ${fullPath}`)
355
+ return fullPath
356
+ } catch (error) {
357
+ console.error('āŒ Failed to save analysis:', error.message)
358
+ throw error
359
+ }
360
+ }
361
+
362
+ /**
363
+ * Generate bundle config suggestions file
364
+ */
365
+ async generateBundleConfigSuggestions(results, outputPath = 'semantic-bundle-suggestions.json') {
366
+ try {
367
+ const config = this.convertToConfigFormat(results.suggestions)
368
+ const fullPath = join(this.projectPath, outputPath)
369
+
370
+ const output = {
371
+ timestamp: results.timestamp,
372
+ description: 'Semantic chunking suggestions for cntx-ui bundles',
373
+ usage: 'Copy the bundles object to your .cntx/bundles.json file',
374
+ ...config,
375
+ notes: results.suggestions.notes,
376
+ summary: results.summary
377
+ }
378
+
379
+ writeFileSync(fullPath, JSON.stringify(output, null, 2))
380
+ console.log(`šŸ“¦ Bundle suggestions saved to ${fullPath}`)
381
+ return fullPath
382
+ } catch (error) {
383
+ console.error('āŒ Failed to save bundle suggestions:', error.message)
384
+ throw error
385
+ }
386
+ }
387
+
388
+ /**
389
+ * Format bytes to human readable
390
+ */
391
+ formatBytes(bytes) {
392
+ if (bytes === 0) return '0 Bytes'
393
+ const k = 1024
394
+ const sizes = ['Bytes', 'KB', 'MB', 'GB']
395
+ const i = Math.floor(Math.log(bytes) / Math.log(k))
396
+ return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]
397
+ }
398
+ }
399
+
400
+ export default SemanticIntegration
401
+
402
+ // CLI usage
403
+ if (import.meta.url === `file://${process.argv[1]}`) {
404
+ const integration = new SemanticIntegration()
405
+
406
+ console.log('šŸš€ Starting semantic analysis integration...')
407
+
408
+ try {
409
+ const results = await integration.generateSemanticBundles()
410
+
411
+ // Save full analysis
412
+ await integration.saveAnalysisResults(results)
413
+
414
+ // Save bundle suggestions
415
+ await integration.generateBundleConfigSuggestions(results)
416
+
417
+ console.log('\nšŸ“Š Project Summary:')
418
+ console.log(`šŸ“ ${results.summary.totalFiles} files analyzed`)
419
+ console.log(`šŸ’¾ ${results.summary.formattedSize} total size`)
420
+ console.log(`šŸ“¦ ${results.summary.totalChunks} semantic chunks created`)
421
+ console.log(`šŸŽÆ ${Object.keys(results.suggestions.recommended).length} recommended bundles`)
422
+
423
+ console.log('\nšŸŽÆ Recommended Bundles:')
424
+ for (const [name, patterns] of Object.entries(results.suggestions.recommended)) {
425
+ console.log(` • ${name}: ${patterns.length} patterns`)
426
+ }
427
+
428
+ console.log('\nšŸ’” Notes:')
429
+ results.suggestions.notes.forEach(note => {
430
+ const emoji = note.type === 'warning' ? 'āš ļø' : note.type === 'optimization' ? 'šŸ”§' : 'šŸ’”'
431
+ console.log(` ${emoji} ${note.message}`)
432
+ })
433
+
434
+ console.log('\nāœ… Semantic integration complete!')
435
+ console.log('šŸ“„ Check semantic-bundle-suggestions.json for ready-to-use bundle configurations')
436
+
437
+ } catch (error) {
438
+ console.error('āŒ Integration failed:', error.message)
439
+ process.exit(1)
440
+ }
441
+ }