ruvector 0.1.80 → 0.1.82
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +182 -39
- package/dist/analysis/complexity.d.ts +52 -0
- package/dist/analysis/complexity.d.ts.map +1 -0
- package/dist/analysis/complexity.js +146 -0
- package/dist/analysis/index.d.ts +15 -0
- package/dist/analysis/index.d.ts.map +1 -0
- package/dist/analysis/index.js +38 -0
- package/dist/analysis/patterns.d.ts +71 -0
- package/dist/analysis/patterns.d.ts.map +1 -0
- package/dist/analysis/patterns.js +243 -0
- package/dist/analysis/security.d.ts +51 -0
- package/dist/analysis/security.d.ts.map +1 -0
- package/dist/analysis/security.js +139 -0
- package/dist/core/adaptive-embedder.d.ts +148 -0
- package/dist/core/adaptive-embedder.d.ts.map +1 -0
- package/dist/core/adaptive-embedder.js +594 -0
- package/dist/core/index.d.ts +3 -0
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +6 -1
- package/dist/core/parallel-workers.d.ts +2 -8
- package/dist/core/parallel-workers.d.ts.map +1 -1
- package/dist/workers/native-worker.d.ts +3 -3
- package/dist/workers/native-worker.d.ts.map +1 -1
- package/dist/workers/native-worker.js +45 -87
- package/package.json +1 -1
- package/.agentic-flow/intelligence.db +0 -0
- package/.agentic-flow/vectors.db +0 -0
- package/.agentic-flow/workers.db +0 -0
- package/.claude-flow/metrics/agent-metrics.json +0 -1
- package/.claude-flow/metrics/performance.json +0 -87
- package/.claude-flow/metrics/task-metrics.json +0 -10
- package/.ruvector/intelligence.json +0 -5289
- package/.ruvector/workers/code-analyzer.db +0 -0
- package/ruvector.db +0 -0
package/bin/cli.js
CHANGED
|
@@ -1919,50 +1919,193 @@ program
|
|
|
1919
1919
|
// Embed Command - Generate embeddings
|
|
1920
1920
|
// =============================================================================
|
|
1921
1921
|
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
1925
|
-
.option('-t, --text <string>', 'Text to embed')
|
|
1926
|
-
.option('-f, --file <path>', 'File containing text (one per line)')
|
|
1927
|
-
.option('-m, --model <name>', 'Embedding model', 'all-minilm-l6-v2')
|
|
1928
|
-
.option('-o, --output <file>', 'Output file for embeddings')
|
|
1929
|
-
.option('--info', 'Show embedding info')
|
|
1930
|
-
.action(async (options) => {
|
|
1931
|
-
console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
|
|
1932
|
-
console.log(chalk.cyan(' RuVector Embed'));
|
|
1933
|
-
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
|
|
1922
|
+
// =============================================================================
|
|
1923
|
+
// Embed Command - Generate embeddings (now with ONNX + Adaptive LoRA)
|
|
1924
|
+
// =============================================================================
|
|
1934
1925
|
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
|
|
1941
|
-
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
|
|
1926
|
+
const embedCmd = program.command('embed').description('Generate embeddings from text (ONNX + Adaptive LoRA)');
|
|
1927
|
+
|
|
1928
|
+
embedCmd
|
|
1929
|
+
.command('text')
|
|
1930
|
+
.description('Embed a text string')
|
|
1931
|
+
.argument('<text>', 'Text to embed')
|
|
1932
|
+
.option('--adaptive', 'Use adaptive embedder with LoRA')
|
|
1933
|
+
.option('--domain <domain>', 'Domain for prototype learning')
|
|
1934
|
+
.option('-o, --output <file>', 'Output file for embedding')
|
|
1935
|
+
.action(async (text, opts) => {
|
|
1936
|
+
try {
|
|
1937
|
+
const { performance } = require('perf_hooks');
|
|
1938
|
+
const start = performance.now();
|
|
1939
|
+
|
|
1940
|
+
if (opts.adaptive) {
|
|
1941
|
+
const { initAdaptiveEmbedder } = require('../dist/core/adaptive-embedder.js');
|
|
1942
|
+
const embedder = await initAdaptiveEmbedder();
|
|
1943
|
+
const embedding = await embedder.embed(text, { domain: opts.domain });
|
|
1944
|
+
const stats = embedder.getStats();
|
|
1945
|
+
|
|
1946
|
+
console.log(chalk.cyan('\n🧠 Adaptive Embedding (ONNX + Micro-LoRA)\n'));
|
|
1947
|
+
console.log(chalk.dim(`Text: "${text.slice(0, 60)}..."`));
|
|
1948
|
+
console.log(chalk.dim(`Dimension: ${embedding.length}`));
|
|
1949
|
+
console.log(chalk.dim(`LoRA rank: ${stats.loraRank} (${stats.loraParams} params)`));
|
|
1950
|
+
console.log(chalk.dim(`Prototypes: ${stats.prototypes}`));
|
|
1951
|
+
console.log(chalk.dim(`Time: ${(performance.now() - start).toFixed(1)}ms`));
|
|
1952
|
+
|
|
1953
|
+
if (opts.output) {
|
|
1954
|
+
fs.writeFileSync(opts.output, JSON.stringify({ text, embedding, stats }, null, 2));
|
|
1955
|
+
console.log(chalk.green(`\nSaved to ${opts.output}`));
|
|
1956
|
+
}
|
|
1957
|
+
} else {
|
|
1958
|
+
const { initOnnxEmbedder, embed } = require('../dist/core/onnx-embedder.js');
|
|
1959
|
+
await initOnnxEmbedder();
|
|
1960
|
+
const result = await embed(text);
|
|
1961
|
+
|
|
1962
|
+
console.log(chalk.cyan('\n📊 ONNX Embedding (all-MiniLM-L6-v2)\n'));
|
|
1963
|
+
console.log(chalk.dim(`Text: "${text.slice(0, 60)}..."`));
|
|
1964
|
+
console.log(chalk.dim(`Dimension: ${result.embedding.length}`));
|
|
1965
|
+
console.log(chalk.dim(`Time: ${(performance.now() - start).toFixed(1)}ms`));
|
|
1966
|
+
|
|
1967
|
+
if (opts.output) {
|
|
1968
|
+
fs.writeFileSync(opts.output, JSON.stringify({ text, embedding: result.embedding }, null, 2));
|
|
1969
|
+
console.log(chalk.green(`\nSaved to ${opts.output}`));
|
|
1970
|
+
}
|
|
1971
|
+
}
|
|
1972
|
+
} catch (e) {
|
|
1973
|
+
console.error(chalk.red('Embedding failed:'), e.message);
|
|
1955
1974
|
}
|
|
1975
|
+
});
|
|
1956
1976
|
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
|
|
1977
|
+
embedCmd
|
|
1978
|
+
.command('adaptive')
|
|
1979
|
+
.description('Adaptive embedding with Micro-LoRA optimization')
|
|
1980
|
+
.option('--stats', 'Show adaptive embedder statistics')
|
|
1981
|
+
.option('--consolidate', 'Run EWC consolidation')
|
|
1982
|
+
.option('--reset', 'Reset adaptive weights')
|
|
1983
|
+
.option('--export <file>', 'Export learned weights')
|
|
1984
|
+
.option('--import <file>', 'Import learned weights')
|
|
1985
|
+
.action(async (opts) => {
|
|
1986
|
+
try {
|
|
1987
|
+
const { initAdaptiveEmbedder } = require('../dist/core/adaptive-embedder.js');
|
|
1988
|
+
const embedder = await initAdaptiveEmbedder();
|
|
1989
|
+
|
|
1990
|
+
if (opts.stats) {
|
|
1991
|
+
const stats = embedder.getStats();
|
|
1992
|
+
console.log(chalk.cyan('\n🧠 Adaptive Embedder Statistics\n'));
|
|
1993
|
+
console.log(chalk.white('Base Model:'), chalk.dim(stats.baseModel));
|
|
1994
|
+
console.log(chalk.white('Dimension:'), chalk.dim(stats.dimension));
|
|
1995
|
+
console.log(chalk.white('LoRA Rank:'), chalk.dim(stats.loraRank));
|
|
1996
|
+
console.log(chalk.white('LoRA Params:'), chalk.dim(`${stats.loraParams} (~${(stats.loraParams / (stats.dimension * stats.dimension) * 100).toFixed(2)}% of base)`));
|
|
1997
|
+
console.log(chalk.white('Adaptations:'), chalk.dim(stats.adaptations));
|
|
1998
|
+
console.log(chalk.white('Prototypes:'), chalk.dim(stats.prototypes));
|
|
1999
|
+
console.log(chalk.white('Memory Size:'), chalk.dim(stats.memorySize));
|
|
2000
|
+
console.log(chalk.white('EWC Consolidations:'), chalk.dim(stats.ewcConsolidations));
|
|
2001
|
+
console.log(chalk.white('Contrastive Updates:'), chalk.dim(stats.contrastiveUpdates));
|
|
2002
|
+
console.log('');
|
|
2003
|
+
}
|
|
2004
|
+
|
|
2005
|
+
if (opts.consolidate) {
|
|
2006
|
+
console.log(chalk.yellow('Running EWC consolidation...'));
|
|
2007
|
+
await embedder.consolidate();
|
|
2008
|
+
console.log(chalk.green('✓ Consolidation complete'));
|
|
2009
|
+
}
|
|
2010
|
+
|
|
2011
|
+
if (opts.reset) {
|
|
2012
|
+
embedder.reset();
|
|
2013
|
+
console.log(chalk.green('✓ Adaptive weights reset'));
|
|
2014
|
+
}
|
|
2015
|
+
|
|
2016
|
+
if (opts.export) {
|
|
2017
|
+
const data = embedder.export();
|
|
2018
|
+
fs.writeFileSync(opts.export, JSON.stringify(data, null, 2));
|
|
2019
|
+
console.log(chalk.green(`✓ Exported to ${opts.export}`));
|
|
2020
|
+
}
|
|
2021
|
+
|
|
2022
|
+
if (opts.import) {
|
|
2023
|
+
const data = JSON.parse(fs.readFileSync(opts.import, 'utf-8'));
|
|
2024
|
+
embedder.import(data);
|
|
2025
|
+
console.log(chalk.green(`✓ Imported from ${opts.import}`));
|
|
2026
|
+
}
|
|
2027
|
+
} catch (e) {
|
|
2028
|
+
console.error(chalk.red('Error:'), e.message);
|
|
1963
2029
|
}
|
|
2030
|
+
});
|
|
1964
2031
|
|
|
1965
|
-
|
|
2032
|
+
embedCmd
|
|
2033
|
+
.command('benchmark')
|
|
2034
|
+
.description('Benchmark base vs adaptive embeddings')
|
|
2035
|
+
.option('--iterations <n>', 'Number of iterations', '10')
|
|
2036
|
+
.action(async (opts) => {
|
|
2037
|
+
try {
|
|
2038
|
+
const { performance } = require('perf_hooks');
|
|
2039
|
+
const iterations = parseInt(opts.iterations) || 10;
|
|
2040
|
+
|
|
2041
|
+
console.log(chalk.cyan('\n🚀 Embedding Benchmark: Base ONNX vs Adaptive LoRA\n'));
|
|
2042
|
+
|
|
2043
|
+
const testTexts = [
|
|
2044
|
+
'This is a test sentence for embedding generation.',
|
|
2045
|
+
'The quick brown fox jumps over the lazy dog.',
|
|
2046
|
+
'Machine learning models can learn from data.',
|
|
2047
|
+
'Vector databases enable semantic search.',
|
|
2048
|
+
];
|
|
2049
|
+
|
|
2050
|
+
// Benchmark base ONNX
|
|
2051
|
+
const { initOnnxEmbedder, embed, embedBatch } = require('../dist/core/onnx-embedder.js');
|
|
2052
|
+
await initOnnxEmbedder();
|
|
2053
|
+
|
|
2054
|
+
console.log(chalk.yellow('1. Base ONNX Embeddings'));
|
|
2055
|
+
const baseStart = performance.now();
|
|
2056
|
+
for (let i = 0; i < iterations; i++) {
|
|
2057
|
+
await embed(testTexts[i % testTexts.length]);
|
|
2058
|
+
}
|
|
2059
|
+
const baseTime = (performance.now() - baseStart) / iterations;
|
|
2060
|
+
console.log(chalk.dim(` Single: ${baseTime.toFixed(1)}ms avg`));
|
|
2061
|
+
|
|
2062
|
+
const baseBatchStart = performance.now();
|
|
2063
|
+
for (let i = 0; i < Math.ceil(iterations / 4); i++) {
|
|
2064
|
+
await embedBatch(testTexts);
|
|
2065
|
+
}
|
|
2066
|
+
const baseBatchTime = (performance.now() - baseBatchStart) / Math.ceil(iterations / 4);
|
|
2067
|
+
console.log(chalk.dim(` Batch(4): ${baseBatchTime.toFixed(1)}ms avg (${(4000 / baseBatchTime).toFixed(1)}/s)`));
|
|
2068
|
+
|
|
2069
|
+
// Benchmark adaptive
|
|
2070
|
+
const { initAdaptiveEmbedder } = require('../dist/core/adaptive-embedder.js');
|
|
2071
|
+
const adaptive = await initAdaptiveEmbedder();
|
|
2072
|
+
|
|
2073
|
+
console.log(chalk.yellow('\n2. Adaptive ONNX + LoRA'));
|
|
2074
|
+
const adaptStart = performance.now();
|
|
2075
|
+
for (let i = 0; i < iterations; i++) {
|
|
2076
|
+
await adaptive.embed(testTexts[i % testTexts.length]);
|
|
2077
|
+
}
|
|
2078
|
+
const adaptTime = (performance.now() - adaptStart) / iterations;
|
|
2079
|
+
console.log(chalk.dim(` Single: ${adaptTime.toFixed(1)}ms avg`));
|
|
2080
|
+
|
|
2081
|
+
const adaptBatchStart = performance.now();
|
|
2082
|
+
for (let i = 0; i < Math.ceil(iterations / 4); i++) {
|
|
2083
|
+
await adaptive.embedBatch(testTexts);
|
|
2084
|
+
}
|
|
2085
|
+
const adaptBatchTime = (performance.now() - adaptBatchStart) / Math.ceil(iterations / 4);
|
|
2086
|
+
console.log(chalk.dim(` Batch(4): ${adaptBatchTime.toFixed(1)}ms avg (${(4000 / adaptBatchTime).toFixed(1)}/s)`));
|
|
2087
|
+
|
|
2088
|
+
// Summary
|
|
2089
|
+
console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
|
|
2090
|
+
console.log(chalk.bold('Summary'));
|
|
2091
|
+
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════'));
|
|
2092
|
+
const stats = adaptive.getStats();
|
|
2093
|
+
console.log(chalk.dim(`\nAdaptive overhead: +${(adaptTime - baseTime).toFixed(1)}ms (+${((adaptTime/baseTime - 1) * 100).toFixed(1)}%)`));
|
|
2094
|
+
console.log(chalk.dim(`LoRA params: ${stats.loraParams} (rank ${stats.loraRank})`));
|
|
2095
|
+
console.log(chalk.dim(`Memory prototypes: ${stats.prototypes}`));
|
|
2096
|
+
console.log(chalk.dim(`Episodic memory: ${stats.memorySize} entries`));
|
|
2097
|
+
|
|
2098
|
+
console.log(chalk.white('\nBenefits of Adaptive:'));
|
|
2099
|
+
console.log(chalk.dim(' • Domain-specific fine-tuning via Micro-LoRA'));
|
|
2100
|
+
console.log(chalk.dim(' • Contrastive learning from co-edit patterns'));
|
|
2101
|
+
console.log(chalk.dim(' • EWC++ prevents catastrophic forgetting'));
|
|
2102
|
+
console.log(chalk.dim(' • Prototype-based domain adaptation'));
|
|
2103
|
+
console.log(chalk.dim(' • Episodic memory augmentation'));
|
|
2104
|
+
console.log('');
|
|
2105
|
+
} catch (e) {
|
|
2106
|
+
console.error(chalk.red('Benchmark failed:'), e.message);
|
|
2107
|
+
if (e.stack) console.error(chalk.dim(e.stack));
|
|
2108
|
+
}
|
|
1966
2109
|
});
|
|
1967
2110
|
|
|
1968
2111
|
// =============================================================================
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Complexity Analysis Module - Consolidated code complexity metrics
|
|
3
|
+
*
|
|
4
|
+
* Single source of truth for cyclomatic complexity and code metrics.
|
|
5
|
+
* Used by native-worker.ts and parallel-workers.ts
|
|
6
|
+
*/
|
|
7
|
+
export interface ComplexityResult {
|
|
8
|
+
file: string;
|
|
9
|
+
lines: number;
|
|
10
|
+
nonEmptyLines: number;
|
|
11
|
+
cyclomaticComplexity: number;
|
|
12
|
+
functions: number;
|
|
13
|
+
avgFunctionSize: number;
|
|
14
|
+
maxFunctionComplexity?: number;
|
|
15
|
+
}
|
|
16
|
+
export interface ComplexityThresholds {
|
|
17
|
+
complexity: number;
|
|
18
|
+
functions: number;
|
|
19
|
+
lines: number;
|
|
20
|
+
avgSize: number;
|
|
21
|
+
}
|
|
22
|
+
export declare const DEFAULT_THRESHOLDS: ComplexityThresholds;
|
|
23
|
+
/**
|
|
24
|
+
* Analyze complexity of a single file
|
|
25
|
+
*/
|
|
26
|
+
export declare function analyzeFile(filePath: string, content?: string): ComplexityResult;
|
|
27
|
+
/**
|
|
28
|
+
* Analyze complexity of multiple files
|
|
29
|
+
*/
|
|
30
|
+
export declare function analyzeFiles(files: string[], maxFiles?: number): ComplexityResult[];
|
|
31
|
+
/**
|
|
32
|
+
* Check if complexity exceeds thresholds
|
|
33
|
+
*/
|
|
34
|
+
export declare function exceedsThresholds(result: ComplexityResult, thresholds?: ComplexityThresholds): boolean;
|
|
35
|
+
/**
|
|
36
|
+
* Get complexity rating
|
|
37
|
+
*/
|
|
38
|
+
export declare function getComplexityRating(complexity: number): 'low' | 'medium' | 'high' | 'critical';
|
|
39
|
+
/**
|
|
40
|
+
* Filter files exceeding thresholds
|
|
41
|
+
*/
|
|
42
|
+
export declare function filterComplex(results: ComplexityResult[], thresholds?: ComplexityThresholds): ComplexityResult[];
|
|
43
|
+
declare const _default: {
|
|
44
|
+
DEFAULT_THRESHOLDS: ComplexityThresholds;
|
|
45
|
+
analyzeFile: typeof analyzeFile;
|
|
46
|
+
analyzeFiles: typeof analyzeFiles;
|
|
47
|
+
exceedsThresholds: typeof exceedsThresholds;
|
|
48
|
+
getComplexityRating: typeof getComplexityRating;
|
|
49
|
+
filterComplex: typeof filterComplex;
|
|
50
|
+
};
|
|
51
|
+
export default _default;
|
|
52
|
+
//# sourceMappingURL=complexity.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"complexity.d.ts","sourceRoot":"","sources":["../../src/analysis/complexity.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,aAAa,EAAE,MAAM,CAAC;IACtB,oBAAoB,EAAE,MAAM,CAAC;IAC7B,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,EAAE,MAAM,CAAC;IACxB,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC;AAED,MAAM,WAAW,oBAAoB;IACnC,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,eAAO,MAAM,kBAAkB,EAAE,oBAKhC,CAAC;AAEF;;GAEG;AACH,wBAAgB,WAAW,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,gBAAgB,CAsDhF;AAED;;GAEG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,QAAQ,GAAE,MAAY,GAAG,gBAAgB,EAAE,CAExF;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,MAAM,EAAE,gBAAgB,EACxB,UAAU,GAAE,oBAAyC,GACpD,OAAO,CAOT;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,UAAU,EAAE,MAAM,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,UAAU,CAK9F;AAED;;GAEG;AACH,wBAAgB,aAAa,CAC3B,OAAO,EAAE,gBAAgB,EAAE,EAC3B,UAAU,GAAE,oBAAyC,GACpD,gBAAgB,EAAE,CAEpB;;;;;;;;;AAED,wBAOE"}
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Complexity Analysis Module - Consolidated code complexity metrics
|
|
4
|
+
*
|
|
5
|
+
* Single source of truth for cyclomatic complexity and code metrics.
|
|
6
|
+
* Used by native-worker.ts and parallel-workers.ts
|
|
7
|
+
*/
|
|
8
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
9
|
+
if (k2 === undefined) k2 = k;
|
|
10
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
11
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
12
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
13
|
+
}
|
|
14
|
+
Object.defineProperty(o, k2, desc);
|
|
15
|
+
}) : (function(o, m, k, k2) {
|
|
16
|
+
if (k2 === undefined) k2 = k;
|
|
17
|
+
o[k2] = m[k];
|
|
18
|
+
}));
|
|
19
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
20
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
21
|
+
}) : function(o, v) {
|
|
22
|
+
o["default"] = v;
|
|
23
|
+
});
|
|
24
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
25
|
+
var ownKeys = function(o) {
|
|
26
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
27
|
+
var ar = [];
|
|
28
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
29
|
+
return ar;
|
|
30
|
+
};
|
|
31
|
+
return ownKeys(o);
|
|
32
|
+
};
|
|
33
|
+
return function (mod) {
|
|
34
|
+
if (mod && mod.__esModule) return mod;
|
|
35
|
+
var result = {};
|
|
36
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
37
|
+
__setModuleDefault(result, mod);
|
|
38
|
+
return result;
|
|
39
|
+
};
|
|
40
|
+
})();
|
|
41
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
42
|
+
exports.DEFAULT_THRESHOLDS = void 0;
|
|
43
|
+
exports.analyzeFile = analyzeFile;
|
|
44
|
+
exports.analyzeFiles = analyzeFiles;
|
|
45
|
+
exports.exceedsThresholds = exceedsThresholds;
|
|
46
|
+
exports.getComplexityRating = getComplexityRating;
|
|
47
|
+
exports.filterComplex = filterComplex;
|
|
48
|
+
const fs = __importStar(require("fs"));
|
|
49
|
+
exports.DEFAULT_THRESHOLDS = {
|
|
50
|
+
complexity: 10,
|
|
51
|
+
functions: 30,
|
|
52
|
+
lines: 500,
|
|
53
|
+
avgSize: 50,
|
|
54
|
+
};
|
|
55
|
+
/**
|
|
56
|
+
* Analyze complexity of a single file
|
|
57
|
+
*/
|
|
58
|
+
function analyzeFile(filePath, content) {
|
|
59
|
+
try {
|
|
60
|
+
const fileContent = content ?? (fs.existsSync(filePath) ? fs.readFileSync(filePath, 'utf-8') : '');
|
|
61
|
+
if (!fileContent) {
|
|
62
|
+
return { file: filePath, lines: 0, nonEmptyLines: 0, cyclomaticComplexity: 1, functions: 0, avgFunctionSize: 0 };
|
|
63
|
+
}
|
|
64
|
+
const lines = fileContent.split('\n');
|
|
65
|
+
const nonEmptyLines = lines.filter(l => l.trim().length > 0).length;
|
|
66
|
+
// Count branching statements for cyclomatic complexity
|
|
67
|
+
const branches = (fileContent.match(/\bif\b/g)?.length || 0) +
|
|
68
|
+
(fileContent.match(/\belse\b/g)?.length || 0) +
|
|
69
|
+
(fileContent.match(/\bfor\b/g)?.length || 0) +
|
|
70
|
+
(fileContent.match(/\bwhile\b/g)?.length || 0) +
|
|
71
|
+
(fileContent.match(/\bswitch\b/g)?.length || 0) +
|
|
72
|
+
(fileContent.match(/\bcase\b/g)?.length || 0) +
|
|
73
|
+
(fileContent.match(/\bcatch\b/g)?.length || 0) +
|
|
74
|
+
(fileContent.match(/\?\?/g)?.length || 0) +
|
|
75
|
+
(fileContent.match(/&&/g)?.length || 0) +
|
|
76
|
+
(fileContent.match(/\|\|/g)?.length || 0) +
|
|
77
|
+
(fileContent.match(/\?[^:]/g)?.length || 0); // Ternary
|
|
78
|
+
const cyclomaticComplexity = branches + 1;
|
|
79
|
+
// Count functions
|
|
80
|
+
const functionPatterns = [
|
|
81
|
+
/function\s+\w+/g,
|
|
82
|
+
/\w+\s*=\s*(?:async\s*)?\(/g,
|
|
83
|
+
/\w+\s*:\s*(?:async\s*)?\(/g,
|
|
84
|
+
/(?:async\s+)?(?:public|private|protected)?\s+\w+\s*\([^)]*\)\s*[:{]/g,
|
|
85
|
+
];
|
|
86
|
+
let functions = 0;
|
|
87
|
+
for (const pattern of functionPatterns) {
|
|
88
|
+
functions += (fileContent.match(pattern) || []).length;
|
|
89
|
+
}
|
|
90
|
+
// Deduplicate by rough estimate
|
|
91
|
+
functions = Math.ceil(functions / 2);
|
|
92
|
+
const avgFunctionSize = functions > 0 ? Math.round(nonEmptyLines / functions) : nonEmptyLines;
|
|
93
|
+
return {
|
|
94
|
+
file: filePath,
|
|
95
|
+
lines: lines.length,
|
|
96
|
+
nonEmptyLines,
|
|
97
|
+
cyclomaticComplexity,
|
|
98
|
+
functions,
|
|
99
|
+
avgFunctionSize,
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
return { file: filePath, lines: 0, nonEmptyLines: 0, cyclomaticComplexity: 1, functions: 0, avgFunctionSize: 0 };
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Analyze complexity of multiple files
|
|
108
|
+
*/
|
|
109
|
+
function analyzeFiles(files, maxFiles = 100) {
|
|
110
|
+
return files.slice(0, maxFiles).map(f => analyzeFile(f));
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Check if complexity exceeds thresholds
|
|
114
|
+
*/
|
|
115
|
+
function exceedsThresholds(result, thresholds = exports.DEFAULT_THRESHOLDS) {
|
|
116
|
+
return (result.cyclomaticComplexity > thresholds.complexity ||
|
|
117
|
+
result.functions > thresholds.functions ||
|
|
118
|
+
result.lines > thresholds.lines ||
|
|
119
|
+
result.avgFunctionSize > thresholds.avgSize);
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Get complexity rating
|
|
123
|
+
*/
|
|
124
|
+
function getComplexityRating(complexity) {
|
|
125
|
+
if (complexity <= 5)
|
|
126
|
+
return 'low';
|
|
127
|
+
if (complexity <= 10)
|
|
128
|
+
return 'medium';
|
|
129
|
+
if (complexity <= 20)
|
|
130
|
+
return 'high';
|
|
131
|
+
return 'critical';
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Filter files exceeding thresholds
|
|
135
|
+
*/
|
|
136
|
+
function filterComplex(results, thresholds = exports.DEFAULT_THRESHOLDS) {
|
|
137
|
+
return results.filter(r => exceedsThresholds(r, thresholds));
|
|
138
|
+
}
|
|
139
|
+
exports.default = {
|
|
140
|
+
DEFAULT_THRESHOLDS: exports.DEFAULT_THRESHOLDS,
|
|
141
|
+
analyzeFile,
|
|
142
|
+
analyzeFiles,
|
|
143
|
+
exceedsThresholds,
|
|
144
|
+
getComplexityRating,
|
|
145
|
+
filterComplex,
|
|
146
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Analysis Module - Consolidated code analysis utilities
|
|
3
|
+
*
|
|
4
|
+
* Single source of truth for:
|
|
5
|
+
* - Security scanning
|
|
6
|
+
* - Complexity analysis
|
|
7
|
+
* - Pattern extraction
|
|
8
|
+
*/
|
|
9
|
+
export * from './security';
|
|
10
|
+
export * from './complexity';
|
|
11
|
+
export * from './patterns';
|
|
12
|
+
export { default as security } from './security';
|
|
13
|
+
export { default as complexity } from './complexity';
|
|
14
|
+
export { default as patterns } from './patterns';
|
|
15
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/analysis/index.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH,cAAc,YAAY,CAAC;AAC3B,cAAc,cAAc,CAAC;AAC7B,cAAc,YAAY,CAAC;AAG3B,OAAO,EAAE,OAAO,IAAI,QAAQ,EAAE,MAAM,YAAY,CAAC;AACjD,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,cAAc,CAAC;AACrD,OAAO,EAAE,OAAO,IAAI,QAAQ,EAAE,MAAM,YAAY,CAAC"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Analysis Module - Consolidated code analysis utilities
|
|
4
|
+
*
|
|
5
|
+
* Single source of truth for:
|
|
6
|
+
* - Security scanning
|
|
7
|
+
* - Complexity analysis
|
|
8
|
+
* - Pattern extraction
|
|
9
|
+
*/
|
|
10
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
11
|
+
if (k2 === undefined) k2 = k;
|
|
12
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
13
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
14
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
15
|
+
}
|
|
16
|
+
Object.defineProperty(o, k2, desc);
|
|
17
|
+
}) : (function(o, m, k, k2) {
|
|
18
|
+
if (k2 === undefined) k2 = k;
|
|
19
|
+
o[k2] = m[k];
|
|
20
|
+
}));
|
|
21
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
22
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
23
|
+
};
|
|
24
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
25
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
26
|
+
};
|
|
27
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
28
|
+
exports.patterns = exports.complexity = exports.security = void 0;
|
|
29
|
+
__exportStar(require("./security"), exports);
|
|
30
|
+
__exportStar(require("./complexity"), exports);
|
|
31
|
+
__exportStar(require("./patterns"), exports);
|
|
32
|
+
// Re-export defaults for convenience
|
|
33
|
+
var security_1 = require("./security");
|
|
34
|
+
Object.defineProperty(exports, "security", { enumerable: true, get: function () { return __importDefault(security_1).default; } });
|
|
35
|
+
var complexity_1 = require("./complexity");
|
|
36
|
+
Object.defineProperty(exports, "complexity", { enumerable: true, get: function () { return __importDefault(complexity_1).default; } });
|
|
37
|
+
var patterns_1 = require("./patterns");
|
|
38
|
+
Object.defineProperty(exports, "patterns", { enumerable: true, get: function () { return __importDefault(patterns_1).default; } });
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pattern Extraction Module - Consolidated code pattern detection
|
|
3
|
+
*
|
|
4
|
+
* Single source of truth for extracting functions, imports, exports, etc.
|
|
5
|
+
* Used by native-worker.ts and parallel-workers.ts
|
|
6
|
+
*/
|
|
7
|
+
export interface PatternMatch {
|
|
8
|
+
type: 'function' | 'class' | 'import' | 'export' | 'todo' | 'variable' | 'type';
|
|
9
|
+
match: string;
|
|
10
|
+
file: string;
|
|
11
|
+
line?: number;
|
|
12
|
+
}
|
|
13
|
+
export interface FilePatterns {
|
|
14
|
+
file: string;
|
|
15
|
+
language: string;
|
|
16
|
+
functions: string[];
|
|
17
|
+
classes: string[];
|
|
18
|
+
imports: string[];
|
|
19
|
+
exports: string[];
|
|
20
|
+
todos: string[];
|
|
21
|
+
variables: string[];
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Detect language from file extension
|
|
25
|
+
*/
|
|
26
|
+
export declare function detectLanguage(file: string): string;
|
|
27
|
+
/**
|
|
28
|
+
* Extract function names from content
|
|
29
|
+
*/
|
|
30
|
+
export declare function extractFunctions(content: string): string[];
|
|
31
|
+
/**
|
|
32
|
+
* Extract class names from content
|
|
33
|
+
*/
|
|
34
|
+
export declare function extractClasses(content: string): string[];
|
|
35
|
+
/**
|
|
36
|
+
* Extract import statements from content
|
|
37
|
+
*/
|
|
38
|
+
export declare function extractImports(content: string): string[];
|
|
39
|
+
/**
|
|
40
|
+
* Extract export statements from content
|
|
41
|
+
*/
|
|
42
|
+
export declare function extractExports(content: string): string[];
|
|
43
|
+
/**
|
|
44
|
+
* Extract TODO/FIXME comments from content
|
|
45
|
+
*/
|
|
46
|
+
export declare function extractTodos(content: string): string[];
|
|
47
|
+
/**
|
|
48
|
+
* Extract all patterns from a file
|
|
49
|
+
*/
|
|
50
|
+
export declare function extractAllPatterns(filePath: string, content?: string): FilePatterns;
|
|
51
|
+
/**
|
|
52
|
+
* Extract patterns from multiple files
|
|
53
|
+
*/
|
|
54
|
+
export declare function extractFromFiles(files: string[], maxFiles?: number): FilePatterns[];
|
|
55
|
+
/**
|
|
56
|
+
* Convert FilePatterns to PatternMatch array (for native-worker compatibility)
|
|
57
|
+
*/
|
|
58
|
+
export declare function toPatternMatches(patterns: FilePatterns): PatternMatch[];
|
|
59
|
+
declare const _default: {
|
|
60
|
+
detectLanguage: typeof detectLanguage;
|
|
61
|
+
extractFunctions: typeof extractFunctions;
|
|
62
|
+
extractClasses: typeof extractClasses;
|
|
63
|
+
extractImports: typeof extractImports;
|
|
64
|
+
extractExports: typeof extractExports;
|
|
65
|
+
extractTodos: typeof extractTodos;
|
|
66
|
+
extractAllPatterns: typeof extractAllPatterns;
|
|
67
|
+
extractFromFiles: typeof extractFromFiles;
|
|
68
|
+
toPatternMatches: typeof toPatternMatches;
|
|
69
|
+
};
|
|
70
|
+
export default _default;
|
|
71
|
+
//# sourceMappingURL=patterns.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"patterns.d.ts","sourceRoot":"","sources":["../../src/analysis/patterns.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,UAAU,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,MAAM,GAAG,UAAU,GAAG,MAAM,CAAC;IAChF,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,SAAS,EAAE,MAAM,EAAE,CAAC;CACrB;AAED;;GAEG;AACH,wBAAgB,cAAc,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAUnD;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CA2B1D;AAED;;GAEG;AACH,wBAAgB,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAmBxD;AAED;;GAEG;AACH,wBAAgB,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAmBxD;AAED;;GAEG;AACH,wBAAgB,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAuBxD;AAED;;GAEG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAUtD;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,YAAY,CA0BnF;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,QAAQ,GAAE,MAAY,GAAG,YAAY,EAAE,CAExF;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,QAAQ,EAAE,YAAY,GAAG,YAAY,EAAE,CAoBvE;;;;;;;;;;;;AAED,wBAUE"}
|