ruvector 0.2.21 → 0.2.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/bin/cli.js +160 -0
- package/package.json +9 -5
- package/src/decompiler/api-prober.js +302 -0
- package/src/decompiler/model-decompiler.js +423 -0
- package/dist/analysis/complexity.d.ts +0 -52
- package/dist/analysis/complexity.d.ts.map +0 -1
- package/dist/analysis/complexity.js +0 -146
- package/dist/analysis/index.d.ts +0 -15
- package/dist/analysis/index.d.ts.map +0 -1
- package/dist/analysis/index.js +0 -38
- package/dist/analysis/patterns.d.ts +0 -71
- package/dist/analysis/patterns.d.ts.map +0 -1
- package/dist/analysis/patterns.js +0 -243
- package/dist/analysis/security.d.ts +0 -51
- package/dist/analysis/security.d.ts.map +0 -1
- package/dist/analysis/security.js +0 -139
- package/dist/core/adaptive-embedder.d.ts +0 -156
- package/dist/core/adaptive-embedder.d.ts.map +0 -1
- package/dist/core/adaptive-embedder.js +0 -838
- package/dist/core/agentdb-fast.d.ts +0 -149
- package/dist/core/agentdb-fast.d.ts.map +0 -1
- package/dist/core/agentdb-fast.js +0 -301
- package/dist/core/ast-parser.d.ts +0 -108
- package/dist/core/ast-parser.d.ts.map +0 -1
- package/dist/core/ast-parser.js +0 -602
- package/dist/core/attention-fallbacks.d.ts +0 -321
- package/dist/core/attention-fallbacks.d.ts.map +0 -1
- package/dist/core/attention-fallbacks.js +0 -552
- package/dist/core/cluster-wrapper.d.ts +0 -148
- package/dist/core/cluster-wrapper.d.ts.map +0 -1
- package/dist/core/cluster-wrapper.js +0 -271
- package/dist/core/coverage-router.d.ts +0 -88
- package/dist/core/coverage-router.d.ts.map +0 -1
- package/dist/core/coverage-router.js +0 -315
- package/dist/core/diff-embeddings.d.ts +0 -93
- package/dist/core/diff-embeddings.d.ts.map +0 -1
- package/dist/core/diff-embeddings.js +0 -334
- package/dist/core/gnn-wrapper.d.ts +0 -143
- package/dist/core/gnn-wrapper.d.ts.map +0 -1
- package/dist/core/gnn-wrapper.js +0 -213
- package/dist/core/graph-algorithms.d.ts +0 -83
- package/dist/core/graph-algorithms.d.ts.map +0 -1
- package/dist/core/graph-algorithms.js +0 -514
- package/dist/core/graph-wrapper.d.ts +0 -147
- package/dist/core/graph-wrapper.d.ts.map +0 -1
- package/dist/core/graph-wrapper.js +0 -299
- package/dist/core/index.d.ts +0 -48
- package/dist/core/index.d.ts.map +0 -1
- package/dist/core/index.js +0 -89
- package/dist/core/intelligence-engine.d.ts +0 -258
- package/dist/core/intelligence-engine.d.ts.map +0 -1
- package/dist/core/intelligence-engine.js +0 -1030
- package/dist/core/learning-engine.d.ts +0 -160
- package/dist/core/learning-engine.d.ts.map +0 -1
- package/dist/core/learning-engine.js +0 -589
- package/dist/core/neural-embeddings.d.ts +0 -393
- package/dist/core/neural-embeddings.d.ts.map +0 -1
- package/dist/core/neural-embeddings.js +0 -1091
- package/dist/core/neural-perf.d.ts +0 -331
- package/dist/core/neural-perf.d.ts.map +0 -1
- package/dist/core/neural-perf.js +0 -704
- package/dist/core/onnx/loader.js +0 -348
- package/dist/core/onnx/pkg/LICENSE +0 -21
- package/dist/core/onnx/pkg/loader.js +0 -348
- package/dist/core/onnx/pkg/package.json +0 -3
- package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm.d.ts +0 -112
- package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm.js +0 -5
- package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_bg.js +0 -638
- package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_bg.wasm +0 -0
- package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_bg.wasm.d.ts +0 -29
- package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_cjs.js +0 -127
- package/dist/core/onnx-embedder.d.ts +0 -105
- package/dist/core/onnx-embedder.d.ts.map +0 -1
- package/dist/core/onnx-embedder.js +0 -410
- package/dist/core/onnx-llm.d.ts +0 -206
- package/dist/core/onnx-llm.d.ts.map +0 -1
- package/dist/core/onnx-llm.js +0 -430
- package/dist/core/onnx-optimized.d.ts +0 -109
- package/dist/core/onnx-optimized.d.ts.map +0 -1
- package/dist/core/onnx-optimized.js +0 -419
- package/dist/core/parallel-intelligence.d.ts +0 -109
- package/dist/core/parallel-intelligence.d.ts.map +0 -1
- package/dist/core/parallel-intelligence.js +0 -340
- package/dist/core/parallel-workers.d.ts +0 -177
- package/dist/core/parallel-workers.d.ts.map +0 -1
- package/dist/core/parallel-workers.js +0 -671
- package/dist/core/router-wrapper.d.ts +0 -62
- package/dist/core/router-wrapper.d.ts.map +0 -1
- package/dist/core/router-wrapper.js +0 -209
- package/dist/core/rvf-wrapper.d.ts +0 -86
- package/dist/core/rvf-wrapper.d.ts.map +0 -1
- package/dist/core/rvf-wrapper.js +0 -102
- package/dist/core/sona-wrapper.d.ts +0 -226
- package/dist/core/sona-wrapper.d.ts.map +0 -1
- package/dist/core/sona-wrapper.js +0 -282
- package/dist/core/tensor-compress.d.ts +0 -134
- package/dist/core/tensor-compress.d.ts.map +0 -1
- package/dist/core/tensor-compress.js +0 -432
- package/dist/index.d.ts +0 -105
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js +0 -221
- package/dist/services/embedding-service.d.ts +0 -136
- package/dist/services/embedding-service.d.ts.map +0 -1
- package/dist/services/embedding-service.js +0 -294
- package/dist/services/index.d.ts +0 -6
- package/dist/services/index.d.ts.map +0 -1
- package/dist/services/index.js +0 -26
- package/dist/types.d.ts +0 -145
- package/dist/types.d.ts.map +0 -1
- package/dist/types.js +0 -2
- package/dist/workers/benchmark.d.ts +0 -44
- package/dist/workers/benchmark.d.ts.map +0 -1
- package/dist/workers/benchmark.js +0 -230
- package/dist/workers/index.d.ts +0 -10
- package/dist/workers/index.d.ts.map +0 -1
- package/dist/workers/index.js +0 -25
- package/dist/workers/native-worker.d.ts +0 -76
- package/dist/workers/native-worker.d.ts.map +0 -1
- package/dist/workers/native-worker.js +0 -490
- package/dist/workers/types.d.ts +0 -69
- package/dist/workers/types.d.ts.map +0 -1
- package/dist/workers/types.js +0 -7
|
@@ -0,0 +1,423 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* LLM model weight decompiler for Node.js.
|
|
5
|
+
* Parses GGUF and Safetensors files to reconstruct architecture info.
|
|
6
|
+
* See ADR-138.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const fs = require('fs');
|
|
10
|
+
const path = require('path');
|
|
11
|
+
const crypto = require('crypto');
|
|
12
|
+
|
|
13
|
+
// ── GGUF constants ───────────────────────────────────────────────────────
|
|
14
|
+
|
|
15
|
+
const GGUF_MAGIC = 0x46554747;
|
|
16
|
+
|
|
17
|
+
const QUANT_TYPES = {
|
|
18
|
+
0: { name: 'F32', bpw: 32 }, 1: { name: 'F16', bpw: 16 },
|
|
19
|
+
2: { name: 'Q4_0', bpw: 4.5 }, 3: { name: 'Q4_1', bpw: 5 },
|
|
20
|
+
6: { name: 'Q5_0', bpw: 5.5 }, 7: { name: 'Q5_1', bpw: 6 },
|
|
21
|
+
8: { name: 'Q8_0', bpw: 8.5 }, 9: { name: 'Q8_1', bpw: 9 },
|
|
22
|
+
10: { name: 'Q2_K', bpw: 2.56 }, 11: { name: 'Q3_K', bpw: 3.44 },
|
|
23
|
+
12: { name: 'Q4_K', bpw: 4.5 }, 13: { name: 'Q5_K', bpw: 5.5 },
|
|
24
|
+
14: { name: 'Q6_K', bpw: 6.56 }, 15: { name: 'Q8_K', bpw: 8.5 },
|
|
25
|
+
29: { name: 'BF16', bpw: 16 },
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
// ── Main entry ───────────────────────────────────────────────────────────
|
|
29
|
+
|
|
30
|
+
async function decompileModelFile(filePath) {
|
|
31
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
32
|
+
if (ext === '.gguf') return decompileGguf(filePath);
|
|
33
|
+
if (ext === '.safetensors') return decompileSafetensors(filePath);
|
|
34
|
+
throw new Error(`Unsupported model format: ${ext} (expected .gguf or .safetensors)`);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// ── GGUF decompiler ──────────────────────────────────────────────────────
|
|
38
|
+
|
|
39
|
+
function decompileGguf(filePath) {
|
|
40
|
+
const fd = fs.openSync(filePath, 'r');
|
|
41
|
+
const stat = fs.fstatSync(fd);
|
|
42
|
+
let pos = 0;
|
|
43
|
+
|
|
44
|
+
function readBuf(n) {
|
|
45
|
+
const buf = Buffer.alloc(n);
|
|
46
|
+
fs.readSync(fd, buf, 0, n, pos);
|
|
47
|
+
pos += n;
|
|
48
|
+
return buf;
|
|
49
|
+
}
|
|
50
|
+
function readU32() { return readBuf(4).readUInt32LE(0); }
|
|
51
|
+
function readU64() { return Number(readBuf(8).readBigUInt64LE(0)); }
|
|
52
|
+
function readF32() { return readBuf(4).readFloatLE(0); }
|
|
53
|
+
function readF64() { return readBuf(8).readDoubleLE(0); }
|
|
54
|
+
function readStr() {
|
|
55
|
+
const len = readU64();
|
|
56
|
+
if (len > 65536) throw new Error(`String too long: ${len}`);
|
|
57
|
+
return readBuf(len).toString('utf8');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function readValue() {
|
|
61
|
+
const type = readU32();
|
|
62
|
+
switch (type) {
|
|
63
|
+
case 0: return readBuf(1).readUInt8(0);
|
|
64
|
+
case 1: return readBuf(1).readInt8(0);
|
|
65
|
+
case 2: return readBuf(2).readUInt16LE(0);
|
|
66
|
+
case 3: return readBuf(2).readInt16LE(0);
|
|
67
|
+
case 4: return readU32();
|
|
68
|
+
case 5: return readBuf(4).readInt32LE(0);
|
|
69
|
+
case 6: return readF32();
|
|
70
|
+
case 7: return readBuf(1).readUInt8(0) !== 0;
|
|
71
|
+
case 8: return readStr();
|
|
72
|
+
case 9: { // Array
|
|
73
|
+
const elemType = readU32();
|
|
74
|
+
const count = readU64();
|
|
75
|
+
const arr = [];
|
|
76
|
+
for (let i = 0; i < Math.min(count, 10000); i++) {
|
|
77
|
+
if (elemType === 8) arr.push(readStr());
|
|
78
|
+
else if (elemType === 4) arr.push(readU32());
|
|
79
|
+
else if (elemType === 0) arr.push(readBuf(1).readUInt8(0));
|
|
80
|
+
else if (elemType === 5) arr.push(readBuf(4).readInt32LE(0));
|
|
81
|
+
else if (elemType === 6) arr.push(readF32());
|
|
82
|
+
else if (elemType === 10) arr.push(readU64());
|
|
83
|
+
else readBuf(elemType <= 1 ? 1 : elemType <= 3 ? 2 : elemType <= 6 ? 4 : 8);
|
|
84
|
+
}
|
|
85
|
+
// Skip remaining if array was truncated
|
|
86
|
+
if (count > 10000) {
|
|
87
|
+
// Cannot reliably skip variable-size elements, just return what we have
|
|
88
|
+
}
|
|
89
|
+
return arr;
|
|
90
|
+
}
|
|
91
|
+
case 10: return readU64();
|
|
92
|
+
case 11: return Number(readBuf(8).readBigInt64LE(0));
|
|
93
|
+
case 12: return readF64();
|
|
94
|
+
default: throw new Error(`Unknown value type: ${type}`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Parse header
|
|
99
|
+
const magic = readU32();
|
|
100
|
+
if (magic !== GGUF_MAGIC) throw new Error(`Not a GGUF file (magic: 0x${magic.toString(16)})`);
|
|
101
|
+
const version = readU32();
|
|
102
|
+
const tensorCount = readU64();
|
|
103
|
+
const metadataCount = readU64();
|
|
104
|
+
|
|
105
|
+
// Parse metadata
|
|
106
|
+
const metadata = {};
|
|
107
|
+
for (let i = 0; i < metadataCount; i++) {
|
|
108
|
+
const key = readStr();
|
|
109
|
+
metadata[key] = readValue();
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Parse tensor infos
|
|
113
|
+
const tensors = [];
|
|
114
|
+
for (let i = 0; i < tensorCount; i++) {
|
|
115
|
+
const name = readStr();
|
|
116
|
+
const nDims = readU32();
|
|
117
|
+
const shape = [];
|
|
118
|
+
for (let d = 0; d < nDims; d++) shape.push(readU64());
|
|
119
|
+
const quantType = readU32();
|
|
120
|
+
const offset = readU64();
|
|
121
|
+
const qt = QUANT_TYPES[quantType] || { name: `Unknown(${quantType})`, bpw: 0 };
|
|
122
|
+
tensors.push({ name, shape, quantType, quantName: qt.name, bpw: qt.bpw, offset });
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
fs.closeSync(fd);
|
|
126
|
+
|
|
127
|
+
return buildResult({
|
|
128
|
+
format: `GGUF v${version}`,
|
|
129
|
+
metadata,
|
|
130
|
+
tensors,
|
|
131
|
+
fileSize: stat.size,
|
|
132
|
+
filePath,
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// ── Safetensors decompiler ───────────────────────────────────────────────
|
|
137
|
+
|
|
138
|
+
function decompileSafetensors(filePath) {
|
|
139
|
+
const fd = fs.openSync(filePath, 'r');
|
|
140
|
+
const stat = fs.fstatSync(fd);
|
|
141
|
+
const lenBuf = Buffer.alloc(8);
|
|
142
|
+
fs.readSync(fd, lenBuf, 0, 8, 0);
|
|
143
|
+
const headerLen = Number(lenBuf.readBigUInt64LE(0));
|
|
144
|
+
if (headerLen > 100 * 1024 * 1024) throw new Error(`Header too large: ${headerLen}`);
|
|
145
|
+
|
|
146
|
+
const headerBuf = Buffer.alloc(headerLen);
|
|
147
|
+
fs.readSync(fd, headerBuf, 0, headerLen, 8);
|
|
148
|
+
fs.closeSync(fd);
|
|
149
|
+
|
|
150
|
+
const header = JSON.parse(headerBuf.toString('utf8'));
|
|
151
|
+
const metadata = {};
|
|
152
|
+
const tensors = [];
|
|
153
|
+
|
|
154
|
+
for (const [name, info] of Object.entries(header)) {
|
|
155
|
+
if (name === '__metadata__') {
|
|
156
|
+
Object.assign(metadata, info);
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
if (!info || !info.dtype) continue;
|
|
160
|
+
const dtypeMap = { F32: 32, F16: 16, BF16: 16, F64: 64, I8: 8, I16: 16, I32: 32, I64: 64 };
|
|
161
|
+
tensors.push({
|
|
162
|
+
name,
|
|
163
|
+
shape: info.shape || [],
|
|
164
|
+
quantName: info.dtype,
|
|
165
|
+
bpw: dtypeMap[info.dtype] || 32,
|
|
166
|
+
offset: info.data_offsets ? info.data_offsets[0] : 0,
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
tensors.sort((a, b) => a.offset - b.offset);
|
|
171
|
+
|
|
172
|
+
return buildResult({
|
|
173
|
+
format: 'Safetensors',
|
|
174
|
+
metadata,
|
|
175
|
+
tensors,
|
|
176
|
+
fileSize: stat.size,
|
|
177
|
+
filePath,
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// ── Architecture inference ───────────────────────────────────────────────
|
|
182
|
+
|
|
183
|
+
function buildResult({ format, metadata, tensors, fileSize, filePath }) {
|
|
184
|
+
const arch = inferArchitecture(metadata, tensors);
|
|
185
|
+
const quant = detectQuantization(tensors, arch);
|
|
186
|
+
const layers = extractLayers(tensors, arch);
|
|
187
|
+
|
|
188
|
+
// Witness: SHA3 not available in Node crypto, use SHA256
|
|
189
|
+
const hash = crypto.createHash('sha256').update(filePath).digest('hex');
|
|
190
|
+
|
|
191
|
+
return {
|
|
192
|
+
format,
|
|
193
|
+
architecture: arch,
|
|
194
|
+
layers: layers.slice(0, 50), // Limit output
|
|
195
|
+
tokenizer: extractTokenizer(metadata),
|
|
196
|
+
quantization: quant,
|
|
197
|
+
witness: { source_hash: hash, chain_root: hash.slice(0, 32) },
|
|
198
|
+
metadata: flattenMetadata(metadata),
|
|
199
|
+
fileSize,
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function inferArchitecture(metadata, tensors) {
|
|
204
|
+
const archKey = metadata['general.architecture'] || '';
|
|
205
|
+
const prefix = archKey ? `${archKey}.` : '';
|
|
206
|
+
const hiddenSize = Number(metadata[`${prefix}embedding_length`]) || inferHiddenSize(tensors);
|
|
207
|
+
const numLayers = Number(metadata[`${prefix}block_count`]) || inferNumLayers(tensors);
|
|
208
|
+
const numHeads = Number(metadata[`${prefix}attention.head_count`]) || inferNumHeads(hiddenSize);
|
|
209
|
+
const numKvHeads = Number(metadata[`${prefix}attention.head_count_kv`]) || inferKvHeads(tensors, hiddenSize, numHeads);
|
|
210
|
+
const ffnSize = Number(metadata[`${prefix}feed_forward_length`]) || inferFfnSize(tensors);
|
|
211
|
+
const vocabSize = inferVocabSize(tensors);
|
|
212
|
+
const maxSeqLen = Number(metadata[`${prefix}context_length`]) || 0;
|
|
213
|
+
const totalParams = tensors.reduce((sum, t) => sum + t.shape.reduce((a, b) => a * b, 1), 0);
|
|
214
|
+
|
|
215
|
+
return {
|
|
216
|
+
name: archKey || 'unknown',
|
|
217
|
+
hidden_size: hiddenSize,
|
|
218
|
+
num_layers: numLayers,
|
|
219
|
+
num_heads: numHeads,
|
|
220
|
+
num_kv_heads: numKvHeads,
|
|
221
|
+
intermediate_size: ffnSize,
|
|
222
|
+
vocab_size: vocabSize,
|
|
223
|
+
max_sequence_length: maxSeqLen,
|
|
224
|
+
total_params: totalParams,
|
|
225
|
+
estimated_size_mb: (totalParams * 2) / (1024 * 1024),
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
function inferHiddenSize(tensors) {
|
|
230
|
+
for (const t of tensors) {
|
|
231
|
+
if ((t.name.includes('embed') || t.name.includes('token_embd')) && t.shape.length === 2) {
|
|
232
|
+
return t.shape[1];
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
return 0;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
function inferNumLayers(tensors) {
|
|
239
|
+
let max = -1;
|
|
240
|
+
for (const t of tensors) {
|
|
241
|
+
const m = t.name.match(/(?:blk|layers|h)\.\s*(\d+)\./);
|
|
242
|
+
if (m) max = Math.max(max, parseInt(m[1], 10));
|
|
243
|
+
}
|
|
244
|
+
return max >= 0 ? max + 1 : 0;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
function inferNumHeads(hiddenSize) {
|
|
248
|
+
if (!hiddenSize) return 0;
|
|
249
|
+
for (const hd of [128, 64, 96, 256]) {
|
|
250
|
+
if (hiddenSize % hd === 0) return hiddenSize / hd;
|
|
251
|
+
}
|
|
252
|
+
return 0;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
function inferKvHeads(tensors, hiddenSize, numHeads) {
|
|
256
|
+
if (!hiddenSize || !numHeads) return numHeads;
|
|
257
|
+
const headDim = hiddenSize / numHeads;
|
|
258
|
+
for (const t of tensors) {
|
|
259
|
+
if ((t.name.includes('attn_k') || t.name.includes('k_proj')) && t.shape.length === 2) {
|
|
260
|
+
if (headDim > 0 && t.shape[0] % headDim === 0) return t.shape[0] / headDim;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
return numHeads;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
function inferFfnSize(tensors) {
|
|
267
|
+
for (const t of tensors) {
|
|
268
|
+
if ((t.name.includes('ffn_up') || t.name.includes('up_proj') ||
|
|
269
|
+
t.name.includes('ffn_gate') || t.name.includes('gate_proj')) && t.shape.length === 2) {
|
|
270
|
+
return t.shape[0];
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
return 0;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
function inferVocabSize(tensors) {
|
|
277
|
+
for (const t of tensors) {
|
|
278
|
+
if ((t.name.includes('embed') || t.name.includes('token_embd')) && t.shape.length === 2) {
|
|
279
|
+
return t.shape[0];
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
return 0;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
function detectQuantization(tensors, arch) {
|
|
286
|
+
const counts = {};
|
|
287
|
+
for (const t of tensors) {
|
|
288
|
+
if (t.name.includes('norm') || t.name.includes('embed') || t.name.includes('embd')) continue;
|
|
289
|
+
counts[t.quantName] = (counts[t.quantName] || 0) + 1;
|
|
290
|
+
}
|
|
291
|
+
let method = 'Unknown';
|
|
292
|
+
let maxCount = 0;
|
|
293
|
+
for (const [name, count] of Object.entries(counts)) {
|
|
294
|
+
if (count > maxCount) { method = name; maxCount = count; }
|
|
295
|
+
}
|
|
296
|
+
const bpw = (QUANT_TYPES[Object.keys(QUANT_TYPES).find(k => QUANT_TYPES[k].name === method)] || {}).bpw || 0;
|
|
297
|
+
const totalBits = tensors.reduce((s, t) => s + t.shape.reduce((a, b) => a * b, 1) * t.bpw, 0);
|
|
298
|
+
const quantizedMb = totalBits / 8 / (1024 * 1024);
|
|
299
|
+
|
|
300
|
+
return {
|
|
301
|
+
method,
|
|
302
|
+
bits_per_weight: bpw,
|
|
303
|
+
original_size_mb: arch.estimated_size_mb,
|
|
304
|
+
quantized_size_mb: quantizedMb,
|
|
305
|
+
compression_ratio: quantizedMb > 0 ? arch.estimated_size_mb / quantizedMb : 1,
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
function extractLayers(tensors, arch) {
|
|
310
|
+
const layers = [];
|
|
311
|
+
// Just collect unique layer indices
|
|
312
|
+
const seen = new Set();
|
|
313
|
+
for (const t of tensors) {
|
|
314
|
+
const m = t.name.match(/(?:blk|layers|h)\.\s*(\d+)\./);
|
|
315
|
+
if (m && !seen.has(m[1])) {
|
|
316
|
+
seen.add(m[1]);
|
|
317
|
+
const idx = parseInt(m[1], 10);
|
|
318
|
+
const blockTensors = tensors.filter(tt => {
|
|
319
|
+
const mm = tt.name.match(/(?:blk|layers|h)\.\s*(\d+)\./);
|
|
320
|
+
return mm && parseInt(mm[1], 10) === idx;
|
|
321
|
+
});
|
|
322
|
+
layers.push({
|
|
323
|
+
index: idx,
|
|
324
|
+
tensor_count: blockTensors.length,
|
|
325
|
+
param_count: blockTensors.reduce((s, tt) => s + tt.shape.reduce((a, b) => a * b, 1), 0),
|
|
326
|
+
quantization: blockTensors[0]?.quantName,
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
return layers;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
function extractTokenizer(metadata) {
|
|
334
|
+
const tokens = metadata['tokenizer.ggml.tokens'];
|
|
335
|
+
if (!Array.isArray(tokens)) return null;
|
|
336
|
+
const special = [];
|
|
337
|
+
for (const key of ['tokenizer.ggml.bos_token_id', 'tokenizer.ggml.eos_token_id',
|
|
338
|
+
'tokenizer.ggml.padding_token_id', 'tokenizer.ggml.unknown_token_id']) {
|
|
339
|
+
if (metadata[key] != null) {
|
|
340
|
+
special.push({ name: key.replace('tokenizer.ggml.', ''), id: metadata[key] });
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
return {
|
|
344
|
+
vocab_size: tokens.length,
|
|
345
|
+
special_tokens: special,
|
|
346
|
+
sample_tokens: tokens.slice(0, 20).map((t, i) => ({ id: i, text: String(t) })),
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
function flattenMetadata(metadata) {
|
|
351
|
+
const flat = {};
|
|
352
|
+
for (const [k, v] of Object.entries(metadata)) {
|
|
353
|
+
if (Array.isArray(v)) flat[k] = `[${v.length} elements]`;
|
|
354
|
+
else if (typeof v === 'object' && v !== null) flat[k] = JSON.stringify(v);
|
|
355
|
+
else flat[k] = String(v);
|
|
356
|
+
}
|
|
357
|
+
return flat;
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
// ── Pretty printer ───────────────────────────────────────────────────────
|
|
361
|
+
|
|
362
|
+
function printModelResult(result) {
|
|
363
|
+
const _chalk = require('chalk');
|
|
364
|
+
const chalk = _chalk.default || _chalk;
|
|
365
|
+
const a = result.architecture;
|
|
366
|
+
|
|
367
|
+
console.log(chalk.bold.cyan('\n LLM Model Decompilation'));
|
|
368
|
+
console.log(chalk.white(` Format: ${result.format}`));
|
|
369
|
+
console.log(chalk.white(` Architecture: ${a.name}`));
|
|
370
|
+
console.log(chalk.white(` Parameters: ${formatNumber(a.total_params)} (${formatSize(a.total_params)})`));
|
|
371
|
+
console.log('');
|
|
372
|
+
console.log(chalk.white(` Hidden size: ${a.hidden_size}`));
|
|
373
|
+
console.log(chalk.white(` Layers: ${a.num_layers}`));
|
|
374
|
+
console.log(chalk.white(` Attention heads: ${a.num_heads}`));
|
|
375
|
+
if (a.num_kv_heads !== a.num_heads) {
|
|
376
|
+
const ratio = a.num_heads / a.num_kv_heads;
|
|
377
|
+
console.log(chalk.white(` KV heads: ${a.num_kv_heads} (GQA ${ratio}:1)`));
|
|
378
|
+
}
|
|
379
|
+
console.log(chalk.white(` FFN size: ${a.intermediate_size}`));
|
|
380
|
+
console.log(chalk.white(` Vocab size: ${a.vocab_size}`));
|
|
381
|
+
if (a.max_sequence_length > 0) {
|
|
382
|
+
console.log(chalk.white(` Max seq length: ${a.max_sequence_length}`));
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
if (result.quantization) {
|
|
386
|
+
const q = result.quantization;
|
|
387
|
+
console.log('');
|
|
388
|
+
console.log(chalk.white(` Quantization: ${q.method}`));
|
|
389
|
+
console.log(chalk.white(` Original size: ${q.original_size_mb.toFixed(0)} MB (FP16)`));
|
|
390
|
+
console.log(chalk.white(` Quantized: ${q.quantized_size_mb.toFixed(0)} MB`));
|
|
391
|
+
console.log(chalk.white(` Compression: ${q.compression_ratio.toFixed(1)}x`));
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
if (result.tokenizer) {
|
|
395
|
+
console.log('');
|
|
396
|
+
console.log(chalk.white(` Tokenizer:`));
|
|
397
|
+
console.log(chalk.white(` Vocab: ${formatNumber(result.tokenizer.vocab_size)} tokens`));
|
|
398
|
+
if (result.tokenizer.special_tokens.length > 0) {
|
|
399
|
+
const specials = result.tokenizer.special_tokens.map(s => `${s.name}(${s.id})`).join(', ');
|
|
400
|
+
console.log(chalk.white(` Special: ${specials}`));
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
console.log('');
|
|
405
|
+
console.log(chalk.dim(` Witness: ${result.witness.source_hash.slice(0, 16)}...`));
|
|
406
|
+
console.log('');
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
function formatNumber(n) {
|
|
410
|
+
if (n >= 1e12) return `${(n / 1e12).toFixed(1)}T`;
|
|
411
|
+
if (n >= 1e9) return `${(n / 1e9).toFixed(1)}B`;
|
|
412
|
+
if (n >= 1e6) return `${(n / 1e6).toFixed(1)}M`;
|
|
413
|
+
if (n >= 1e3) return `${(n / 1e3).toFixed(1)}K`;
|
|
414
|
+
return String(n);
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
function formatSize(params) {
|
|
418
|
+
const mb = (params * 2) / (1024 * 1024);
|
|
419
|
+
if (mb >= 1024) return `${(mb / 1024).toFixed(1)} GB FP16`;
|
|
420
|
+
return `${mb.toFixed(0)} MB FP16`;
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
module.exports = { decompileModelFile, decompileGguf, decompileSafetensors, printModelResult };
|
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Complexity Analysis Module - Consolidated code complexity metrics
|
|
3
|
-
*
|
|
4
|
-
* Single source of truth for cyclomatic complexity and code metrics.
|
|
5
|
-
* Used by native-worker.ts and parallel-workers.ts
|
|
6
|
-
*/
|
|
7
|
-
export interface ComplexityResult {
|
|
8
|
-
file: string;
|
|
9
|
-
lines: number;
|
|
10
|
-
nonEmptyLines: number;
|
|
11
|
-
cyclomaticComplexity: number;
|
|
12
|
-
functions: number;
|
|
13
|
-
avgFunctionSize: number;
|
|
14
|
-
maxFunctionComplexity?: number;
|
|
15
|
-
}
|
|
16
|
-
export interface ComplexityThresholds {
|
|
17
|
-
complexity: number;
|
|
18
|
-
functions: number;
|
|
19
|
-
lines: number;
|
|
20
|
-
avgSize: number;
|
|
21
|
-
}
|
|
22
|
-
export declare const DEFAULT_THRESHOLDS: ComplexityThresholds;
|
|
23
|
-
/**
|
|
24
|
-
* Analyze complexity of a single file
|
|
25
|
-
*/
|
|
26
|
-
export declare function analyzeFile(filePath: string, content?: string): ComplexityResult;
|
|
27
|
-
/**
|
|
28
|
-
* Analyze complexity of multiple files
|
|
29
|
-
*/
|
|
30
|
-
export declare function analyzeFiles(files: string[], maxFiles?: number): ComplexityResult[];
|
|
31
|
-
/**
|
|
32
|
-
* Check if complexity exceeds thresholds
|
|
33
|
-
*/
|
|
34
|
-
export declare function exceedsThresholds(result: ComplexityResult, thresholds?: ComplexityThresholds): boolean;
|
|
35
|
-
/**
|
|
36
|
-
* Get complexity rating
|
|
37
|
-
*/
|
|
38
|
-
export declare function getComplexityRating(complexity: number): 'low' | 'medium' | 'high' | 'critical';
|
|
39
|
-
/**
|
|
40
|
-
* Filter files exceeding thresholds
|
|
41
|
-
*/
|
|
42
|
-
export declare function filterComplex(results: ComplexityResult[], thresholds?: ComplexityThresholds): ComplexityResult[];
|
|
43
|
-
declare const _default: {
|
|
44
|
-
DEFAULT_THRESHOLDS: ComplexityThresholds;
|
|
45
|
-
analyzeFile: typeof analyzeFile;
|
|
46
|
-
analyzeFiles: typeof analyzeFiles;
|
|
47
|
-
exceedsThresholds: typeof exceedsThresholds;
|
|
48
|
-
getComplexityRating: typeof getComplexityRating;
|
|
49
|
-
filterComplex: typeof filterComplex;
|
|
50
|
-
};
|
|
51
|
-
export default _default;
|
|
52
|
-
//# sourceMappingURL=complexity.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"complexity.d.ts","sourceRoot":"","sources":["../../src/analysis/complexity.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,aAAa,EAAE,MAAM,CAAC;IACtB,oBAAoB,EAAE,MAAM,CAAC;IAC7B,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,EAAE,MAAM,CAAC;IACxB,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC;AAED,MAAM,WAAW,oBAAoB;IACnC,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,eAAO,MAAM,kBAAkB,EAAE,oBAKhC,CAAC;AAEF;;GAEG;AACH,wBAAgB,WAAW,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,gBAAgB,CAsDhF;AAED;;GAEG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,QAAQ,GAAE,MAAY,GAAG,gBAAgB,EAAE,CAExF;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,MAAM,EAAE,gBAAgB,EACxB,UAAU,GAAE,oBAAyC,GACpD,OAAO,CAOT;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,UAAU,EAAE,MAAM,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,UAAU,CAK9F;AAED;;GAEG;AACH,wBAAgB,aAAa,CAC3B,OAAO,EAAE,gBAAgB,EAAE,EAC3B,UAAU,GAAE,oBAAyC,GACpD,gBAAgB,EAAE,CAEpB;;;;;;;;;AAED,wBAOE"}
|
|
@@ -1,146 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
/**
|
|
3
|
-
* Complexity Analysis Module - Consolidated code complexity metrics
|
|
4
|
-
*
|
|
5
|
-
* Single source of truth for cyclomatic complexity and code metrics.
|
|
6
|
-
* Used by native-worker.ts and parallel-workers.ts
|
|
7
|
-
*/
|
|
8
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
9
|
-
if (k2 === undefined) k2 = k;
|
|
10
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
11
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
12
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
13
|
-
}
|
|
14
|
-
Object.defineProperty(o, k2, desc);
|
|
15
|
-
}) : (function(o, m, k, k2) {
|
|
16
|
-
if (k2 === undefined) k2 = k;
|
|
17
|
-
o[k2] = m[k];
|
|
18
|
-
}));
|
|
19
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
20
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
21
|
-
}) : function(o, v) {
|
|
22
|
-
o["default"] = v;
|
|
23
|
-
});
|
|
24
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
25
|
-
var ownKeys = function(o) {
|
|
26
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
27
|
-
var ar = [];
|
|
28
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
29
|
-
return ar;
|
|
30
|
-
};
|
|
31
|
-
return ownKeys(o);
|
|
32
|
-
};
|
|
33
|
-
return function (mod) {
|
|
34
|
-
if (mod && mod.__esModule) return mod;
|
|
35
|
-
var result = {};
|
|
36
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
37
|
-
__setModuleDefault(result, mod);
|
|
38
|
-
return result;
|
|
39
|
-
};
|
|
40
|
-
})();
|
|
41
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
42
|
-
exports.DEFAULT_THRESHOLDS = void 0;
|
|
43
|
-
exports.analyzeFile = analyzeFile;
|
|
44
|
-
exports.analyzeFiles = analyzeFiles;
|
|
45
|
-
exports.exceedsThresholds = exceedsThresholds;
|
|
46
|
-
exports.getComplexityRating = getComplexityRating;
|
|
47
|
-
exports.filterComplex = filterComplex;
|
|
48
|
-
const fs = __importStar(require("fs"));
|
|
49
|
-
exports.DEFAULT_THRESHOLDS = {
|
|
50
|
-
complexity: 10,
|
|
51
|
-
functions: 30,
|
|
52
|
-
lines: 500,
|
|
53
|
-
avgSize: 50,
|
|
54
|
-
};
|
|
55
|
-
/**
|
|
56
|
-
* Analyze complexity of a single file
|
|
57
|
-
*/
|
|
58
|
-
function analyzeFile(filePath, content) {
|
|
59
|
-
try {
|
|
60
|
-
const fileContent = content ?? (fs.existsSync(filePath) ? fs.readFileSync(filePath, 'utf-8') : '');
|
|
61
|
-
if (!fileContent) {
|
|
62
|
-
return { file: filePath, lines: 0, nonEmptyLines: 0, cyclomaticComplexity: 1, functions: 0, avgFunctionSize: 0 };
|
|
63
|
-
}
|
|
64
|
-
const lines = fileContent.split('\n');
|
|
65
|
-
const nonEmptyLines = lines.filter(l => l.trim().length > 0).length;
|
|
66
|
-
// Count branching statements for cyclomatic complexity
|
|
67
|
-
const branches = (fileContent.match(/\bif\b/g)?.length || 0) +
|
|
68
|
-
(fileContent.match(/\belse\b/g)?.length || 0) +
|
|
69
|
-
(fileContent.match(/\bfor\b/g)?.length || 0) +
|
|
70
|
-
(fileContent.match(/\bwhile\b/g)?.length || 0) +
|
|
71
|
-
(fileContent.match(/\bswitch\b/g)?.length || 0) +
|
|
72
|
-
(fileContent.match(/\bcase\b/g)?.length || 0) +
|
|
73
|
-
(fileContent.match(/\bcatch\b/g)?.length || 0) +
|
|
74
|
-
(fileContent.match(/\?\?/g)?.length || 0) +
|
|
75
|
-
(fileContent.match(/&&/g)?.length || 0) +
|
|
76
|
-
(fileContent.match(/\|\|/g)?.length || 0) +
|
|
77
|
-
(fileContent.match(/\?[^:]/g)?.length || 0); // Ternary
|
|
78
|
-
const cyclomaticComplexity = branches + 1;
|
|
79
|
-
// Count functions
|
|
80
|
-
const functionPatterns = [
|
|
81
|
-
/function\s+\w+/g,
|
|
82
|
-
/\w+\s*=\s*(?:async\s*)?\(/g,
|
|
83
|
-
/\w+\s*:\s*(?:async\s*)?\(/g,
|
|
84
|
-
/(?:async\s+)?(?:public|private|protected)?\s+\w+\s*\([^)]*\)\s*[:{]/g,
|
|
85
|
-
];
|
|
86
|
-
let functions = 0;
|
|
87
|
-
for (const pattern of functionPatterns) {
|
|
88
|
-
functions += (fileContent.match(pattern) || []).length;
|
|
89
|
-
}
|
|
90
|
-
// Deduplicate by rough estimate
|
|
91
|
-
functions = Math.ceil(functions / 2);
|
|
92
|
-
const avgFunctionSize = functions > 0 ? Math.round(nonEmptyLines / functions) : nonEmptyLines;
|
|
93
|
-
return {
|
|
94
|
-
file: filePath,
|
|
95
|
-
lines: lines.length,
|
|
96
|
-
nonEmptyLines,
|
|
97
|
-
cyclomaticComplexity,
|
|
98
|
-
functions,
|
|
99
|
-
avgFunctionSize,
|
|
100
|
-
};
|
|
101
|
-
}
|
|
102
|
-
catch {
|
|
103
|
-
return { file: filePath, lines: 0, nonEmptyLines: 0, cyclomaticComplexity: 1, functions: 0, avgFunctionSize: 0 };
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
/**
|
|
107
|
-
* Analyze complexity of multiple files
|
|
108
|
-
*/
|
|
109
|
-
function analyzeFiles(files, maxFiles = 100) {
|
|
110
|
-
return files.slice(0, maxFiles).map(f => analyzeFile(f));
|
|
111
|
-
}
|
|
112
|
-
/**
|
|
113
|
-
* Check if complexity exceeds thresholds
|
|
114
|
-
*/
|
|
115
|
-
function exceedsThresholds(result, thresholds = exports.DEFAULT_THRESHOLDS) {
|
|
116
|
-
return (result.cyclomaticComplexity > thresholds.complexity ||
|
|
117
|
-
result.functions > thresholds.functions ||
|
|
118
|
-
result.lines > thresholds.lines ||
|
|
119
|
-
result.avgFunctionSize > thresholds.avgSize);
|
|
120
|
-
}
|
|
121
|
-
/**
|
|
122
|
-
* Get complexity rating
|
|
123
|
-
*/
|
|
124
|
-
function getComplexityRating(complexity) {
|
|
125
|
-
if (complexity <= 5)
|
|
126
|
-
return 'low';
|
|
127
|
-
if (complexity <= 10)
|
|
128
|
-
return 'medium';
|
|
129
|
-
if (complexity <= 20)
|
|
130
|
-
return 'high';
|
|
131
|
-
return 'critical';
|
|
132
|
-
}
|
|
133
|
-
/**
|
|
134
|
-
* Filter files exceeding thresholds
|
|
135
|
-
*/
|
|
136
|
-
function filterComplex(results, thresholds = exports.DEFAULT_THRESHOLDS) {
|
|
137
|
-
return results.filter(r => exceedsThresholds(r, thresholds));
|
|
138
|
-
}
|
|
139
|
-
exports.default = {
|
|
140
|
-
DEFAULT_THRESHOLDS: exports.DEFAULT_THRESHOLDS,
|
|
141
|
-
analyzeFile,
|
|
142
|
-
analyzeFiles,
|
|
143
|
-
exceedsThresholds,
|
|
144
|
-
getComplexityRating,
|
|
145
|
-
filterComplex,
|
|
146
|
-
};
|
package/dist/analysis/index.d.ts
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Analysis Module - Consolidated code analysis utilities
|
|
3
|
-
*
|
|
4
|
-
* Single source of truth for:
|
|
5
|
-
* - Security scanning
|
|
6
|
-
* - Complexity analysis
|
|
7
|
-
* - Pattern extraction
|
|
8
|
-
*/
|
|
9
|
-
export * from './security';
|
|
10
|
-
export * from './complexity';
|
|
11
|
-
export * from './patterns';
|
|
12
|
-
export { default as security } from './security';
|
|
13
|
-
export { default as complexity } from './complexity';
|
|
14
|
-
export { default as patterns } from './patterns';
|
|
15
|
-
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/analysis/index.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH,cAAc,YAAY,CAAC;AAC3B,cAAc,cAAc,CAAC;AAC7B,cAAc,YAAY,CAAC;AAG3B,OAAO,EAAE,OAAO,IAAI,QAAQ,EAAE,MAAM,YAAY,CAAC;AACjD,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,cAAc,CAAC;AACrD,OAAO,EAAE,OAAO,IAAI,QAAQ,EAAE,MAAM,YAAY,CAAC"}
|
package/dist/analysis/index.js
DELETED
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
/**
|
|
3
|
-
* Analysis Module - Consolidated code analysis utilities
|
|
4
|
-
*
|
|
5
|
-
* Single source of truth for:
|
|
6
|
-
* - Security scanning
|
|
7
|
-
* - Complexity analysis
|
|
8
|
-
* - Pattern extraction
|
|
9
|
-
*/
|
|
10
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
11
|
-
if (k2 === undefined) k2 = k;
|
|
12
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
13
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
14
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
15
|
-
}
|
|
16
|
-
Object.defineProperty(o, k2, desc);
|
|
17
|
-
}) : (function(o, m, k, k2) {
|
|
18
|
-
if (k2 === undefined) k2 = k;
|
|
19
|
-
o[k2] = m[k];
|
|
20
|
-
}));
|
|
21
|
-
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
22
|
-
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
23
|
-
};
|
|
24
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
25
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
26
|
-
};
|
|
27
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
28
|
-
exports.patterns = exports.complexity = exports.security = void 0;
|
|
29
|
-
__exportStar(require("./security"), exports);
|
|
30
|
-
__exportStar(require("./complexity"), exports);
|
|
31
|
-
__exportStar(require("./patterns"), exports);
|
|
32
|
-
// Re-export defaults for convenience
|
|
33
|
-
var security_1 = require("./security");
|
|
34
|
-
Object.defineProperty(exports, "security", { enumerable: true, get: function () { return __importDefault(security_1).default; } });
|
|
35
|
-
var complexity_1 = require("./complexity");
|
|
36
|
-
Object.defineProperty(exports, "complexity", { enumerable: true, get: function () { return __importDefault(complexity_1).default; } });
|
|
37
|
-
var patterns_1 = require("./patterns");
|
|
38
|
-
Object.defineProperty(exports, "patterns", { enumerable: true, get: function () { return __importDefault(patterns_1).default; } });
|