@funeste38/allmight 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +35 -0
  3. package/dist/cli/index.d.ts +2 -0
  4. package/dist/cli/index.js +66 -0
  5. package/dist/core/canonical/suggestCanonicalSource.d.ts +2 -0
  6. package/dist/core/canonical/suggestCanonicalSource.js +40 -0
  7. package/dist/core/classifier/classifyDuplicateFamily.d.ts +8 -0
  8. package/dist/core/classifier/classifyDuplicateFamily.js +129 -0
  9. package/dist/core/patcher/applySafePatches.d.ts +2 -0
  10. package/dist/core/patcher/applySafePatches.js +34 -0
  11. package/dist/core/patcher/buildProposedPatches.d.ts +2 -0
  12. package/dist/core/patcher/buildProposedPatches.js +52 -0
  13. package/dist/core/patcher/buildSafePatch.d.ts +2 -0
  14. package/dist/core/patcher/buildSafePatch.js +47 -0
  15. package/dist/core/patcher/rewriters.d.ts +3 -0
  16. package/dist/core/patcher/rewriters.js +35 -0
  17. package/dist/core/report/writeReportArtifacts.d.ts +2 -0
  18. package/dist/core/report/writeReportArtifacts.js +36 -0
  19. package/dist/core/scanner/fileRecord.d.ts +2 -0
  20. package/dist/core/scanner/fileRecord.js +87 -0
  21. package/dist/core/scanner/scanDuplicateFamilies.d.ts +2 -0
  22. package/dist/core/scanner/scanDuplicateFamilies.js +166 -0
  23. package/dist/index.d.ts +9 -0
  24. package/dist/index.js +8 -0
  25. package/dist/rules/default-rules.d.ts +4 -0
  26. package/dist/rules/default-rules.js +41 -0
  27. package/dist/types.d.ts +76 -0
  28. package/dist/types.js +1 -0
  29. package/dist/utils/fs.d.ts +4 -0
  30. package/dist/utils/fs.js +36 -0
  31. package/dist/utils/text.d.ts +8 -0
  32. package/dist/utils/text.js +51 -0
  33. package/package.json +44 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 jEFFLEZ
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,35 @@
1
+ # @funeste38/allmight
2
+
3
+ `allmight` audits repositories for duplicate code families, picks a canonical source, and proposes safe transition patches instead of destructive cleanup.
4
+
5
+ ## Goals
6
+
7
+ - detect exact and near duplicates
8
+ - identify nested package mirrors and competing barrels
9
+ - propose a canonical source per family
10
+ - generate traceable reports and diff proposals
11
+ - apply only safe patches on explicit request
12
+
13
+ ## CLI
14
+
15
+ ```bash
16
+ allmight scan <root>
17
+ allmight report <root>
18
+ allmight propose <root>
19
+ allmight fix-safe <root>
20
+ allmight canonicalize <root>
21
+ ```
22
+
23
+ Outputs:
24
+
25
+ - `duplicate-report.json`
26
+ - `canonical-map.json`
27
+ - `patches/*.diff`
28
+ - `summary.md`
29
+
30
+ ## Design
31
+
32
+ - audit-first by default
33
+ - never deletes divergent logic automatically
34
+ - preserves runtime transitions with shims when possible
35
+ - favors traceability over hidden cleanup
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export declare function runCli(argv?: string[]): Promise<number>;
@@ -0,0 +1,66 @@
1
+ #!/usr/bin/env node
2
+ import path from "node:path";
3
+ import { scanDuplicateFamilies } from "../core/scanner/scanDuplicateFamilies.js";
4
+ import { applySafePatches } from "../core/patcher/applySafePatches.js";
5
+ import { writeReportArtifacts } from "../core/report/writeReportArtifacts.js";
6
+ function usage() {
7
+ console.log([
8
+ "allmight <command> [root] [--output <dir>]",
9
+ "",
10
+ "Commands:",
11
+ " scan scan repo and write report artifacts",
12
+ " report alias of scan",
13
+ " propose scan repo and write diff proposals",
14
+ " fix-safe scan repo, apply safe patches, then write artifacts",
15
+ " canonicalize scan repo and print canonical map"
16
+ ].join("\n"));
17
+ }
18
+ function parseArgs(argv) {
19
+ const command = argv[0];
20
+ const positional = argv.filter((arg) => !arg.startsWith("--"));
21
+ const outputIndex = argv.indexOf("--output");
22
+ const outputDir = outputIndex !== -1 ? argv[outputIndex + 1] ?? "" : "";
23
+ const root = positional[1] ? path.resolve(positional[1]) : process.cwd();
24
+ return {
25
+ command,
26
+ root,
27
+ outputDir: outputDir || path.resolve(process.cwd(), "allmight-output")
28
+ };
29
+ }
30
+ export async function runCli(argv = process.argv.slice(2)) {
31
+ const { command, root, outputDir } = parseArgs(argv);
32
+ if (!command || command === "--help" || command === "-h") {
33
+ usage();
34
+ return 0;
35
+ }
36
+ const report = await scanDuplicateFamilies(root, { outputDir });
37
+ await writeReportArtifacts(report, outputDir);
38
+ if (command === "fix-safe") {
39
+ const applied = await applySafePatches(report);
40
+ console.log(`Applied ${applied.length} safe patches.`);
41
+ return 0;
42
+ }
43
+ if (command === "canonicalize") {
44
+ console.log(JSON.stringify(report.canonicalMap, null, 2));
45
+ return 0;
46
+ }
47
+ if (command === "scan" || command === "report" || command === "propose") {
48
+ console.log(`Scanned ${report.stats.filesScanned} files in ${root}`);
49
+ console.log(`Families: ${report.stats.familiesDetected}`);
50
+ console.log(`Patches: ${report.stats.patchesProposed} (${report.stats.safePatches} safe)`);
51
+ console.log(`Artifacts: ${outputDir}`);
52
+ return 0;
53
+ }
54
+ usage();
55
+ return 1;
56
+ }
57
+ const entryArg = process.argv[1];
58
+ const isEntrypoint = !!entryArg && import.meta.url === new URL(`file://${entryArg.replace(/\\/g, "/")}`).href;
59
+ if (isEntrypoint) {
60
+ runCli()
61
+ .then((code) => process.exit(code))
62
+ .catch((error) => {
63
+ console.error("allmight failed", error);
64
+ process.exit(1);
65
+ });
66
+ }
@@ -0,0 +1,2 @@
1
+ import { DuplicateFamily } from "../../types.js";
2
+ export declare function suggestCanonicalSource(family: DuplicateFamily): string;
@@ -0,0 +1,40 @@
1
+ import { LEGACY_HINTS } from "../../rules/default-rules.js";
2
+ import { toPosix } from "../../utils/text.js";
3
+ function pathSegments(record) {
4
+ return toPosix(record.relativePath).split("/").filter(Boolean);
5
+ }
6
+ function repeatedSegmentPenalty(record) {
7
+ const segments = pathSegments(record);
8
+ let penalty = 0;
9
+ for (let index = 0; index < segments.length - 1; index += 1) {
10
+ if (segments[index] === segments[index + 1]) {
11
+ penalty += 15;
12
+ }
13
+ }
14
+ return penalty;
15
+ }
16
+ function semanticScore(record) {
17
+ const relative = toPosix(record.relativePath).toLowerCase();
18
+ let score = 0;
19
+ if (relative.includes("/packages/"))
20
+ score += 30;
21
+ if (relative.includes("/src/"))
22
+ score += 15;
23
+ if (relative.endsWith("/package.json") && record.packagePrivate === false)
24
+ score += 20;
25
+ if (record.packageVersion) {
26
+ const numeric = record.packageVersion.split(".").map((part) => Number(part) || 0);
27
+ score += numeric[0] * 10 + numeric[1] * 2 + numeric[2] * 0.2;
28
+ }
29
+ if (record.packagePrivate)
30
+ score -= 20;
31
+ if (LEGACY_HINTS.some((hint) => relative.includes(hint)))
32
+ score -= 35;
33
+ score -= repeatedSegmentPenalty(record);
34
+ score -= pathSegments(record).length;
35
+ return score;
36
+ }
37
+ export function suggestCanonicalSource(family) {
38
+ const sorted = [...family.records].sort((left, right) => semanticScore(right) - semanticScore(left));
39
+ return sorted[0]?.path ?? family.records[0]?.path;
40
+ }
@@ -0,0 +1,8 @@
1
+ import { DuplicateFamily, DuplicateType, FileFingerprint } from "../../types.js";
2
+ type ClassificationInput = {
3
+ id: string;
4
+ records: FileFingerprint[];
5
+ seedType?: DuplicateType;
6
+ };
7
+ export declare function classifyDuplicateFamily(input: ClassificationInput): DuplicateFamily;
8
+ export {};
@@ -0,0 +1,129 @@
1
+ import { DANGEROUS_PATH_HINTS, LEGACY_HINTS } from "../../rules/default-rules.js";
2
+ import { jaccardSimilarity, sharedValues, tokenize, toPosix } from "../../utils/text.js";
3
+ function containsAny(value, fragments) {
4
+ const lower = value.toLowerCase();
5
+ return fragments.some((fragment) => lower.includes(fragment));
6
+ }
7
+ function hasNestedPackage(records) {
8
+ const packageRecords = records.filter((record) => record.packageName);
9
+ return packageRecords.some((record) => /(^|\/)packages\//.test(toPosix(record.relativePath))) &&
10
+ packageRecords.some((record) => !/(^|\/)packages\//.test(toPosix(record.relativePath)));
11
+ }
12
+ function detectMixedImportRoots(records) {
13
+ const roots = new Set();
14
+ for (const record of records) {
15
+ for (const specifier of record.imports) {
16
+ if (!specifier.startsWith("."))
17
+ continue;
18
+ const first = specifier.replace(/\\/g, "/").split("/").filter(Boolean)[0];
19
+ if (first)
20
+ roots.add(first);
21
+ }
22
+ }
23
+ return Array.from(roots).sort();
24
+ }
25
+ function computeSimilarity(records) {
26
+ if (records.length < 2)
27
+ return 1;
28
+ let total = 0;
29
+ let count = 0;
30
+ for (let index = 0; index < records.length; index += 1) {
31
+ for (let other = index + 1; other < records.length; other += 1) {
32
+ total += jaccardSimilarity(tokenize(records[index].rawContent ?? ""), tokenize(records[other].rawContent ?? ""));
33
+ count += 1;
34
+ }
35
+ }
36
+ return count === 0 ? 0 : total / count;
37
+ }
38
+ function classifyType(records, seedType) {
39
+ if (seedType === "exact-copy")
40
+ return "exact-copy";
41
+ if (records.every((record) => record.packageName) && hasNestedPackage(records)) {
42
+ return "nested-package-duplicate";
43
+ }
44
+ const relativePaths = records.map((record) => toPosix(record.relativePath));
45
+ const similarity = computeSimilarity(records);
46
+ if (relativePaths.some((value) => containsAny(value, LEGACY_HINTS))) {
47
+ return "alias-duplicate";
48
+ }
49
+ if (records.some((record) => record.topLevelRuntimeSignals.length > 0)) {
50
+ return "runtime-duplicate";
51
+ }
52
+ if (relativePaths.some((value) => /(prompt|mask|image|logic)/i.test(value))) {
53
+ return "prompt-logic-duplicate";
54
+ }
55
+ if (relativePaths.some((value) => /(semantic|intent|context)/i.test(value))) {
56
+ return "semantic-duplicate";
57
+ }
58
+ if (similarity >= 0.6) {
59
+ return "near-duplicate";
60
+ }
61
+ if (records.some((record) => record.exports.length > 0)) {
62
+ return "export-duplicate";
63
+ }
64
+ return "near-duplicate";
65
+ }
66
+ function scoreConfidence(type, similarity) {
67
+ if (type === "exact-copy")
68
+ return 0.98;
69
+ if (type === "nested-package-duplicate")
70
+ return 0.96;
71
+ if (type === "alias-duplicate")
72
+ return Math.max(0.9, similarity);
73
+ if (type === "export-duplicate")
74
+ return Math.max(0.72, Math.min(0.9, similarity + 0.1));
75
+ if (type === "runtime-duplicate")
76
+ return Math.max(0.75, similarity);
77
+ if (type === "prompt-logic-duplicate")
78
+ return Math.max(0.7, similarity);
79
+ if (type === "semantic-duplicate")
80
+ return Math.max(0.45, similarity);
81
+ return Math.max(0.8, similarity);
82
+ }
83
+ function scoreRisk(type, records) {
84
+ const joined = records.map((record) => record.relativePath).join(" ").toLowerCase();
85
+ if (type === "runtime-duplicate" || containsAny(joined, DANGEROUS_PATH_HINTS)) {
86
+ return "dangerous";
87
+ }
88
+ if (type === "exact-copy" || type === "alias-duplicate" || type === "nested-package-duplicate") {
89
+ return "safe";
90
+ }
91
+ return "review";
92
+ }
93
+ export function classifyDuplicateFamily(input) {
94
+ const { records } = input;
95
+ const type = classifyType(records, input.seedType);
96
+ const similarity = input.seedType === "exact-copy" ? 1 : computeSimilarity(records);
97
+ const sharedDeclarations = records.length >= 2
98
+ ? records.slice(1).reduce((memo, record) => sharedValues(memo, record.declarations), records[0].declarations)
99
+ : [];
100
+ const sharedExports = records.length >= 2
101
+ ? records.slice(1).reduce((memo, record) => sharedValues(memo, record.exports), records[0].exports)
102
+ : [];
103
+ const mixedImportRoots = detectMixedImportRoots(records);
104
+ const risk = scoreRisk(type, records);
105
+ const confidence = scoreConfidence(type, similarity);
106
+ const reasonBits = [
107
+ `${type} detected across ${records.length} paths`,
108
+ sharedDeclarations.length ? `shared declarations: ${sharedDeclarations.join(", ")}` : "",
109
+ sharedExports.length ? `shared exports: ${sharedExports.join(", ")}` : "",
110
+ mixedImportRoots.length > 1 ? `mixed import roots: ${mixedImportRoots.join(", ")}` : ""
111
+ ].filter(Boolean);
112
+ return {
113
+ id: input.id,
114
+ type,
115
+ confidence,
116
+ reason: reasonBits.join(" | "),
117
+ risk,
118
+ safePatchEligible: risk === "safe" && (type === "exact-copy" || type === "alias-duplicate"),
119
+ files: records.map((record) => record.path),
120
+ records,
121
+ evidence: {
122
+ similarity,
123
+ sharedDeclarations,
124
+ sharedExports,
125
+ nestedPackage: hasNestedPackage(records),
126
+ mixedImportRoots
127
+ }
128
+ };
129
+ }
@@ -0,0 +1,2 @@
1
+ import { PatchProposal, ScanReport } from "../../types.js";
2
+ export declare function applySafePatches(report: ScanReport): Promise<PatchProposal[]>;
@@ -0,0 +1,34 @@
1
+ import fs from "node:fs/promises";
2
+ import { createLegacyReexportShim, rewriteImportsToCanonicalSource } from "./rewriters.js";
3
+ function safePatches(report) {
4
+ return report.patches.filter((patch) => patch.safe);
5
+ }
6
+ export async function applySafePatches(report) {
7
+ const applied = [];
8
+ for (const patch of safePatches(report)) {
9
+ if (patch.kind === "legacy-reexport-shim" && patch.canonicalSource) {
10
+ const content = createLegacyReexportShim(patch.targetPath, patch.canonicalSource);
11
+ await fs.writeFile(patch.targetPath, `${content}\n`, "utf8");
12
+ applied.push(patch);
13
+ continue;
14
+ }
15
+ if (patch.kind === "rewrite-imports") {
16
+ const beforeContent = await fs.readFile(patch.targetPath, "utf8");
17
+ let nextContent = beforeContent;
18
+ for (const family of report.families) {
19
+ if (!family.canonicalSource)
20
+ continue;
21
+ for (const duplicate of family.files) {
22
+ if (duplicate === family.canonicalSource)
23
+ continue;
24
+ nextContent = rewriteImportsToCanonicalSource(nextContent, patch.targetPath, duplicate, family.canonicalSource);
25
+ }
26
+ }
27
+ if (nextContent !== beforeContent) {
28
+ await fs.writeFile(patch.targetPath, nextContent, "utf8");
29
+ applied.push(patch);
30
+ }
31
+ }
32
+ }
33
+ return applied;
34
+ }
@@ -0,0 +1,2 @@
1
+ import { PatchProposal, ScanReport } from "../../types.js";
2
+ export declare function buildProposedPatches(report: ScanReport): Promise<PatchProposal[]>;
@@ -0,0 +1,52 @@
1
+ import fs from "node:fs/promises";
2
+ import { slugify } from "../../utils/text.js";
3
+ import { buildUnifiedDiff, rewriteImportsToCanonicalSource } from "./rewriters.js";
4
+ import { buildSafePatch } from "./buildSafePatch.js";
5
+ function canonicalLookup(report) {
6
+ const map = new Map();
7
+ for (const family of report.families) {
8
+ if (!family.canonicalSource)
9
+ continue;
10
+ for (const filePath of family.files) {
11
+ if (filePath !== family.canonicalSource) {
12
+ map.set(filePath, family.canonicalSource);
13
+ }
14
+ }
15
+ }
16
+ return map;
17
+ }
18
+ async function buildImportRewritePatches(report) {
19
+ const replacements = canonicalLookup(report);
20
+ if (!replacements.size)
21
+ return [];
22
+ const proposals = [];
23
+ for (const family of report.families) {
24
+ for (const record of family.records) {
25
+ const content = record.rawContent ?? (await fs.readFile(record.path, "utf8"));
26
+ let nextContent = content;
27
+ for (const [duplicatePath, canonicalPath] of replacements.entries()) {
28
+ nextContent = rewriteImportsToCanonicalSource(nextContent, record.path, duplicatePath, canonicalPath);
29
+ }
30
+ if (nextContent === content)
31
+ continue;
32
+ proposals.push({
33
+ id: `rewrite-${slugify(record.relativePath)}`,
34
+ familyId: family.id,
35
+ kind: "rewrite-imports",
36
+ safe: true,
37
+ targetPath: record.path,
38
+ reason: "Rewrite imports to the canonical source where the duplicate family is considered safe.",
39
+ diff: buildUnifiedDiff(record.relativePath, content, nextContent)
40
+ });
41
+ }
42
+ }
43
+ return proposals;
44
+ }
45
+ export async function buildProposedPatches(report) {
46
+ const proposals = [];
47
+ for (const family of report.families) {
48
+ proposals.push(...(await buildSafePatch(family)));
49
+ }
50
+ proposals.push(...(await buildImportRewritePatches(report)));
51
+ return proposals;
52
+ }
@@ -0,0 +1,2 @@
1
+ import { DuplicateFamily, PatchProposal } from "../../types.js";
2
+ export declare function buildSafePatch(family: DuplicateFamily): Promise<PatchProposal[]>;
@@ -0,0 +1,47 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { slugify } from "../../utils/text.js";
4
+ import { buildUnifiedDiff, createLegacyReexportShim } from "./rewriters.js";
5
+ function isCodeFile(filePath) {
6
+ return /\.(ts|tsx|js|jsx|mjs|cjs)$/i.test(filePath);
7
+ }
8
+ function isBarrel(recordPath, content) {
9
+ if (!isCodeFile(recordPath))
10
+ return false;
11
+ const lines = content
12
+ .split(/\r?\n/)
13
+ .map((line) => line.trim())
14
+ .filter(Boolean);
15
+ return lines.length > 0 && lines.every((line) => line.startsWith("export "));
16
+ }
17
+ export async function buildSafePatch(family) {
18
+ const canonicalSource = family.canonicalSource;
19
+ if (!canonicalSource || !family.safePatchEligible) {
20
+ return [];
21
+ }
22
+ const proposals = [];
23
+ for (const record of family.records) {
24
+ if (record.path === canonicalSource)
25
+ continue;
26
+ if (!isCodeFile(record.path))
27
+ continue;
28
+ const beforeContent = record.rawContent ?? (await fs.readFile(record.path, "utf8"));
29
+ const targetLooksSafe = /(?:legacy|mirror|wrapper|copy|old|deprecated)/i.test(record.relativePath) ||
30
+ /(^|\/)([^/]+)\/\2(\/|$)/.test(record.relativePath) ||
31
+ isBarrel(record.path, beforeContent);
32
+ if (!targetLooksSafe)
33
+ continue;
34
+ const afterContent = createLegacyReexportShim(record.path, canonicalSource);
35
+ proposals.push({
36
+ id: `${family.id}-${slugify(record.relativePath)}`,
37
+ familyId: family.id,
38
+ kind: "legacy-reexport-shim",
39
+ safe: true,
40
+ targetPath: record.path,
41
+ canonicalSource,
42
+ reason: `Replace duplicate module with a non-breaking re-export shim to ${path.basename(canonicalSource)}.`,
43
+ diff: buildUnifiedDiff(record.relativePath, beforeContent, afterContent)
44
+ });
45
+ }
46
+ return proposals;
47
+ }
@@ -0,0 +1,3 @@
1
+ export declare function createLegacyReexportShim(targetPath: string, canonicalPath: string): string;
2
+ export declare function buildUnifiedDiff(targetPath: string, beforeContent: string, afterContent: string): string;
3
+ export declare function rewriteImportsToCanonicalSource(sourceContent: string, sourcePath: string, duplicatePath: string, canonicalPath: string): string;
@@ -0,0 +1,35 @@
1
+ import path from "node:path";
2
+ import { removeExtension, toPosix } from "../../utils/text.js";
3
+ function maybeJsTarget(filePath) {
4
+ return filePath.replace(/\.(tsx?|jsx?|mjs|cjs)$/i, ".js");
5
+ }
6
+ export function createLegacyReexportShim(targetPath, canonicalPath) {
7
+ const fromDir = path.dirname(targetPath);
8
+ const relative = toPosix(path.relative(fromDir, maybeJsTarget(canonicalPath)));
9
+ const specifier = relative.startsWith(".") ? relative : `./${relative}`;
10
+ return [
11
+ `export * from "${specifier}";`,
12
+ `export { default } from "${specifier}";`
13
+ ].join("\n");
14
+ }
15
+ export function buildUnifiedDiff(targetPath, beforeContent, afterContent) {
16
+ const beforeLines = beforeContent.split(/\r?\n/);
17
+ const afterLines = afterContent.split(/\r?\n/);
18
+ return [
19
+ `--- a/${toPosix(targetPath)}`,
20
+ `+++ b/${toPosix(targetPath)}`,
21
+ "@@",
22
+ ...beforeLines.map((line) => `-${line}`),
23
+ ...afterLines.map((line) => `+${line}`)
24
+ ].join("\n");
25
+ }
26
+ export function rewriteImportsToCanonicalSource(sourceContent, sourcePath, duplicatePath, canonicalPath) {
27
+ const fromDir = path.dirname(sourcePath);
28
+ const duplicateSpecifier = toPosix(path.relative(fromDir, removeExtension(duplicatePath)));
29
+ const canonicalSpecifier = toPosix(path.relative(fromDir, removeExtension(canonicalPath)));
30
+ const fromSpecifier = duplicateSpecifier.startsWith(".") ? duplicateSpecifier : `./${duplicateSpecifier}`;
31
+ const toSpecifier = canonicalSpecifier.startsWith(".") ? canonicalSpecifier : `./${canonicalSpecifier}`;
32
+ return sourceContent
33
+ .replaceAll(`"${fromSpecifier}"`, `"${toSpecifier}"`)
34
+ .replaceAll(`'${fromSpecifier}'`, `'${toSpecifier}'`);
35
+ }
@@ -0,0 +1,2 @@
1
+ import { ScanReport } from "../../types.js";
2
+ export declare function writeReportArtifacts(report: ScanReport, outputDir: string): Promise<void>;
@@ -0,0 +1,36 @@
1
+ import path from "node:path";
2
+ import { ensureDir, writeJson, writeText } from "../../utils/fs.js";
3
+ function buildSummary(report) {
4
+ const lines = [
5
+ "# Allmight Summary",
6
+ "",
7
+ `Root: \`${report.root}\``,
8
+ `Created at: ${report.createdAt}`,
9
+ "",
10
+ `Families detected: ${report.stats.familiesDetected}`,
11
+ `Patches proposed: ${report.stats.patchesProposed}`,
12
+ `Safe patches: ${report.stats.safePatches}`,
13
+ "",
14
+ "## Families",
15
+ ""
16
+ ];
17
+ for (const family of report.families.slice(0, 50)) {
18
+ lines.push(`- \`${family.type}\` (${family.confidence.toFixed(2)}) -> \`${family.canonicalSource ?? "unknown"}\``);
19
+ lines.push(` reason: ${family.reason}`);
20
+ }
21
+ return lines.join("\n");
22
+ }
23
+ export async function writeReportArtifacts(report, outputDir) {
24
+ await ensureDir(outputDir);
25
+ await writeJson(path.join(outputDir, "duplicate-report.json"), report);
26
+ await writeJson(path.join(outputDir, "canonical-map.json"), report.canonicalMap.reduce((memo, entry) => {
27
+ memo[entry.familyId] = entry;
28
+ return memo;
29
+ }, {}));
30
+ await writeText(path.join(outputDir, "summary.md"), buildSummary(report));
31
+ const patchesDir = path.join(outputDir, "patches");
32
+ await ensureDir(patchesDir);
33
+ for (const patch of report.patches) {
34
+ await writeText(path.join(patchesDir, `${patch.id}.diff`), patch.diff);
35
+ }
36
+ }
@@ -0,0 +1,2 @@
1
+ import { FileFingerprint } from "../../types.js";
2
+ export declare function buildFileRecord(root: string, filePath: string, includeContent?: boolean): Promise<FileFingerprint>;
@@ -0,0 +1,87 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { normalizeText, sha1, toPosix } from "../../utils/text.js";
4
+ function extractNames(regex, value) {
5
+ const names = new Set();
6
+ for (const match of value.matchAll(regex)) {
7
+ const name = match[1]?.trim();
8
+ if (name) {
9
+ names.add(name);
10
+ }
11
+ }
12
+ return Array.from(names).sort();
13
+ }
14
+ function extractImports(value) {
15
+ const specifiers = new Set();
16
+ for (const match of value.matchAll(/from\s+["']([^"']+)["']/g)) {
17
+ specifiers.add(match[1]);
18
+ }
19
+ for (const match of value.matchAll(/require\(["']([^"']+)["']\)/g)) {
20
+ specifiers.add(match[1]);
21
+ }
22
+ return Array.from(specifiers).sort();
23
+ }
24
+ function extractRuntimeSignals(value) {
25
+ const signals = [
26
+ "listen(",
27
+ "app.listen(",
28
+ "createServer(",
29
+ "spawn(",
30
+ "setInterval(",
31
+ "new Worker(",
32
+ "new WebSocketServer("
33
+ ];
34
+ return signals.filter((signal) => value.includes(signal));
35
+ }
36
+ export async function buildFileRecord(root, filePath, includeContent = false) {
37
+ const rawContent = await fs.readFile(filePath, "utf8");
38
+ const ext = path.extname(filePath);
39
+ const normalized = normalizeText(rawContent, ext);
40
+ const relativePath = toPosix(path.relative(root, filePath));
41
+ const declarations = extractNames(/\b(?:class|function|interface|type|enum|const)\s+([A-Za-z0-9_$]+)/g, rawContent);
42
+ const exportedDirect = extractNames(/\bexport\s+(?:class|function|interface|type|const|enum)\s+([A-Za-z0-9_$]+)/g, rawContent);
43
+ const exportedNamed = [];
44
+ for (const match of rawContent.matchAll(/\bexport\s*{\s*([^}]+)\s*}/g)) {
45
+ for (const token of match[1].split(",")) {
46
+ const cleaned = token.split(/\s+as\s+/i)[0]?.trim();
47
+ if (cleaned) {
48
+ exportedNamed.push(cleaned);
49
+ }
50
+ }
51
+ }
52
+ const exports = Array.from(new Set([...exportedDirect, ...exportedNamed])).sort();
53
+ let packageName;
54
+ let packageVersion;
55
+ let packagePrivate;
56
+ if (path.basename(filePath) === "package.json") {
57
+ try {
58
+ const parsed = JSON.parse(rawContent);
59
+ if (typeof parsed.name === "string")
60
+ packageName = parsed.name;
61
+ if (typeof parsed.version === "string")
62
+ packageVersion = parsed.version;
63
+ if (typeof parsed.private === "boolean")
64
+ packagePrivate = parsed.private;
65
+ }
66
+ catch {
67
+ // ignore malformed package.json
68
+ }
69
+ }
70
+ return {
71
+ path: filePath,
72
+ relativePath,
73
+ ext,
74
+ size: Buffer.byteLength(rawContent),
75
+ hash: sha1(rawContent),
76
+ normalizedHash: sha1(normalized),
77
+ basename: path.basename(filePath),
78
+ declarations,
79
+ exports,
80
+ imports: extractImports(rawContent),
81
+ packageName,
82
+ packageVersion,
83
+ packagePrivate,
84
+ topLevelRuntimeSignals: extractRuntimeSignals(rawContent),
85
+ rawContent: includeContent ? rawContent : undefined
86
+ };
87
+ }
@@ -0,0 +1,2 @@
1
+ import { ScanOptions, ScanReport } from "../../types.js";
2
+ export declare function scanDuplicateFamilies(root: string, _options?: ScanOptions): Promise<ScanReport>;
@@ -0,0 +1,166 @@
1
+ import path from "node:path";
2
+ import { buildFileRecord } from "./fileRecord.js";
3
+ import { classifyDuplicateFamily } from "../classifier/classifyDuplicateFamily.js";
4
+ import { suggestCanonicalSource } from "../canonical/suggestCanonicalSource.js";
5
+ import { buildProposedPatches } from "../patcher/buildProposedPatches.js";
6
+ import { walkFiles } from "../../utils/fs.js";
7
+ import { jaccardSimilarity, sharedValues, slugify, tokenize } from "../../utils/text.js";
8
+ function pairwise(items) {
9
+ const pairs = [];
10
+ for (let index = 0; index < items.length; index += 1) {
11
+ for (let other = index + 1; other < items.length; other += 1) {
12
+ pairs.push([items[index], items[other]]);
13
+ }
14
+ }
15
+ return pairs;
16
+ }
17
+ function familyId(prefix, records) {
18
+ return `${prefix}-${slugify(records.map((record) => record.relativePath).join("-"))}`;
19
+ }
20
+ function collectExactFamilies(records) {
21
+ const families = [];
22
+ const groups = new Map();
23
+ for (const record of records) {
24
+ const group = groups.get(record.hash) ?? [];
25
+ group.push(record);
26
+ groups.set(record.hash, group);
27
+ }
28
+ for (const group of groups.values()) {
29
+ if (group.length < 2)
30
+ continue;
31
+ families.push(classifyDuplicateFamily({
32
+ id: familyId("exact-copy", group),
33
+ records: group,
34
+ seedType: "exact-copy"
35
+ }));
36
+ }
37
+ return families;
38
+ }
39
+ function collectPackageFamilies(records) {
40
+ const families = [];
41
+ const packages = records.filter((record) => record.packageName);
42
+ const groups = new Map();
43
+ for (const record of packages) {
44
+ const key = record.packageName;
45
+ const group = groups.get(key) ?? [];
46
+ group.push(record);
47
+ groups.set(key, group);
48
+ }
49
+ for (const group of groups.values()) {
50
+ if (group.length < 2)
51
+ continue;
52
+ families.push(classifyDuplicateFamily({
53
+ id: familyId("package", group),
54
+ records: group
55
+ }));
56
+ }
57
+ return families;
58
+ }
59
+ function collectNearFamilies(records) {
60
+ const families = [];
61
+ const byBasename = new Map();
62
+ for (const record of records) {
63
+ const group = byBasename.get(record.basename) ?? [];
64
+ group.push(record);
65
+ byBasename.set(record.basename, group);
66
+ }
67
+ for (const group of byBasename.values()) {
68
+ if (group.length < 2 || group.length > 12)
69
+ continue;
70
+ const related = new Set();
71
+ for (const [left, right] of pairwise(group)) {
72
+ const similarity = jaccardSimilarity(tokenize(left.rawContent ?? ""), tokenize(right.rawContent ?? ""));
73
+ const sharedDecls = sharedValues(left.declarations, right.declarations);
74
+ const sharedExports = sharedValues(left.exports, right.exports);
75
+ if (similarity >= 0.78 ||
76
+ sharedDecls.length >= 1 ||
77
+ sharedExports.length >= 1 ||
78
+ (left.packageName && right.packageName && left.packageName === right.packageName)) {
79
+ related.add(left);
80
+ related.add(right);
81
+ }
82
+ }
83
+ if (related.size >= 2) {
84
+ families.push(classifyDuplicateFamily({
85
+ id: familyId("near", Array.from(related)),
86
+ records: Array.from(related)
87
+ }));
88
+ }
89
+ }
90
+ return families;
91
+ }
92
+ function collectExportFamilies(records) {
93
+ const families = [];
94
+ const exportMap = new Map();
95
+ for (const record of records) {
96
+ for (const exported of record.exports) {
97
+ if (exported.length < 3)
98
+ continue;
99
+ const group = exportMap.get(exported) ?? [];
100
+ group.push(record);
101
+ exportMap.set(exported, group);
102
+ }
103
+ }
104
+ for (const group of exportMap.values()) {
105
+ if (group.length < 2 || group.length > 8)
106
+ continue;
107
+ families.push(classifyDuplicateFamily({
108
+ id: familyId("export", group),
109
+ records: group
110
+ }));
111
+ }
112
+ return families;
113
+ }
114
+ function dedupeFamilies(families) {
115
+ const seen = new Map();
116
+ for (const family of families) {
117
+ const key = `${family.type}:${family.files.map((file) => file.toLowerCase()).sort().join("|")}`;
118
+ if (!seen.has(key)) {
119
+ seen.set(key, family);
120
+ }
121
+ }
122
+ return Array.from(seen.values());
123
+ }
124
+ export async function scanDuplicateFamilies(root, _options = {}) {
125
+ const filePaths = await walkFiles(root);
126
+ const records = await Promise.all(filePaths.map((filePath) => buildFileRecord(root, filePath, true)));
127
+ const families = dedupeFamilies([
128
+ ...collectExactFamilies(records),
129
+ ...collectPackageFamilies(records),
130
+ ...collectNearFamilies(records),
131
+ ...collectExportFamilies(records)
132
+ ])
133
+ .map((family) => ({
134
+ ...family,
135
+ canonicalSource: suggestCanonicalSource(family)
136
+ }))
137
+ .sort((left, right) => right.confidence - left.confidence);
138
+ const canonicalMap = families
139
+ .filter((family) => family.canonicalSource)
140
+ .map((family) => ({
141
+ familyId: family.id,
142
+ type: family.type,
143
+ canonicalSource: family.canonicalSource,
144
+ members: family.files,
145
+ confidence: family.confidence,
146
+ reason: family.reason,
147
+ risk: family.risk
148
+ }));
149
+ const report = {
150
+ root: path.resolve(root),
151
+ createdAt: new Date().toISOString(),
152
+ stats: {
153
+ filesScanned: filePaths.length,
154
+ familiesDetected: families.length,
155
+ patchesProposed: 0,
156
+ safePatches: 0
157
+ },
158
+ families,
159
+ canonicalMap,
160
+ patches: []
161
+ };
162
+ report.patches = await buildProposedPatches(report);
163
+ report.stats.patchesProposed = report.patches.length;
164
+ report.stats.safePatches = report.patches.filter((patch) => patch.safe).length;
165
+ return report;
166
+ }
@@ -0,0 +1,9 @@
1
+ export { scanDuplicateFamilies } from "./core/scanner/scanDuplicateFamilies.js";
2
+ export { classifyDuplicateFamily } from "./core/classifier/classifyDuplicateFamily.js";
3
+ export { suggestCanonicalSource } from "./core/canonical/suggestCanonicalSource.js";
4
+ export { buildSafePatch } from "./core/patcher/buildSafePatch.js";
5
+ export { applySafePatches } from "./core/patcher/applySafePatches.js";
6
+ export { buildProposedPatches } from "./core/patcher/buildProposedPatches.js";
7
+ export { rewriteImportsToCanonicalSource, createLegacyReexportShim } from "./core/patcher/rewriters.js";
8
+ export { writeReportArtifacts } from "./core/report/writeReportArtifacts.js";
9
+ export type { CanonicalMapEntry, DuplicateFamily, DuplicateType, FileFingerprint, PatchProposal, ScanOptions, ScanReport } from "./types.js";
package/dist/index.js ADDED
@@ -0,0 +1,8 @@
1
+ export { scanDuplicateFamilies } from "./core/scanner/scanDuplicateFamilies.js";
2
+ export { classifyDuplicateFamily } from "./core/classifier/classifyDuplicateFamily.js";
3
+ export { suggestCanonicalSource } from "./core/canonical/suggestCanonicalSource.js";
4
+ export { buildSafePatch } from "./core/patcher/buildSafePatch.js";
5
+ export { applySafePatches } from "./core/patcher/applySafePatches.js";
6
+ export { buildProposedPatches } from "./core/patcher/buildProposedPatches.js";
7
+ export { rewriteImportsToCanonicalSource, createLegacyReexportShim } from "./core/patcher/rewriters.js";
8
+ export { writeReportArtifacts } from "./core/report/writeReportArtifacts.js";
@@ -0,0 +1,4 @@
1
+ export declare const DEFAULT_IGNORE_DIRS: Set<string>;
2
+ export declare const CODE_EXTENSIONS: Set<string>;
3
+ export declare const DANGEROUS_PATH_HINTS: string[];
4
+ export declare const LEGACY_HINTS: string[];
@@ -0,0 +1,41 @@
1
+ export const DEFAULT_IGNORE_DIRS = new Set([
2
+ ".git",
3
+ "node_modules",
4
+ "dist",
5
+ "build",
6
+ "coverage",
7
+ ".turbo",
8
+ ".qflush",
9
+ ".qflash",
10
+ ".next",
11
+ ".netlify",
12
+ ".vs",
13
+ ".vscode"
14
+ ]);
15
+ export const CODE_EXTENSIONS = new Set([
16
+ ".ts",
17
+ ".tsx",
18
+ ".js",
19
+ ".jsx",
20
+ ".mjs",
21
+ ".cjs",
22
+ ".json",
23
+ ".md"
24
+ ]);
25
+ export const DANGEROUS_PATH_HINTS = [
26
+ "auth",
27
+ "routing",
28
+ "image",
29
+ "memory",
30
+ "agent",
31
+ "orchestration"
32
+ ];
33
+ export const LEGACY_HINTS = [
34
+ "legacy",
35
+ "mirror",
36
+ "wrapper",
37
+ "backup",
38
+ "copy",
39
+ "old",
40
+ "deprecated"
41
+ ];
@@ -0,0 +1,76 @@
1
+ export type DuplicateType = "exact-copy" | "alias-duplicate" | "near-duplicate" | "nested-package-duplicate" | "export-duplicate" | "runtime-duplicate" | "prompt-logic-duplicate" | "semantic-duplicate";
2
+ export type PatchKind = "legacy-reexport-shim" | "rewrite-imports" | "mark-private-package" | "report-only";
3
+ export type RiskLevel = "safe" | "review" | "dangerous";
4
+ export type FileFingerprint = {
5
+ path: string;
6
+ relativePath: string;
7
+ ext: string;
8
+ size: number;
9
+ hash: string;
10
+ normalizedHash: string;
11
+ basename: string;
12
+ declarations: string[];
13
+ exports: string[];
14
+ imports: string[];
15
+ packageName?: string;
16
+ packageVersion?: string;
17
+ packagePrivate?: boolean;
18
+ topLevelRuntimeSignals: string[];
19
+ rawContent?: string;
20
+ };
21
+ export type FamilyEvidence = {
22
+ similarity?: number;
23
+ sharedDeclarations?: string[];
24
+ sharedExports?: string[];
25
+ nestedPackage?: boolean;
26
+ mixedImportRoots?: string[];
27
+ };
28
+ export type DuplicateFamily = {
29
+ id: string;
30
+ type: DuplicateType;
31
+ confidence: number;
32
+ reason: string;
33
+ risk: RiskLevel;
34
+ safePatchEligible: boolean;
35
+ files: string[];
36
+ records: FileFingerprint[];
37
+ canonicalSource?: string;
38
+ evidence: FamilyEvidence;
39
+ };
40
+ export type CanonicalMapEntry = {
41
+ familyId: string;
42
+ type: DuplicateType;
43
+ canonicalSource: string;
44
+ members: string[];
45
+ confidence: number;
46
+ reason: string;
47
+ risk: RiskLevel;
48
+ };
49
+ export type PatchProposal = {
50
+ id: string;
51
+ familyId: string;
52
+ kind: PatchKind;
53
+ safe: boolean;
54
+ targetPath: string;
55
+ canonicalSource?: string;
56
+ reason: string;
57
+ diff: string;
58
+ };
59
+ export type ScanStats = {
60
+ filesScanned: number;
61
+ familiesDetected: number;
62
+ patchesProposed: number;
63
+ safePatches: number;
64
+ };
65
+ export type ScanReport = {
66
+ root: string;
67
+ createdAt: string;
68
+ stats: ScanStats;
69
+ families: DuplicateFamily[];
70
+ canonicalMap: CanonicalMapEntry[];
71
+ patches: PatchProposal[];
72
+ };
73
+ export type ScanOptions = {
74
+ outputDir?: string;
75
+ includeContent?: boolean;
76
+ };
package/dist/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,4 @@
1
+ export declare function ensureDir(dirPath: string): Promise<void>;
2
+ export declare function writeJson(filePath: string, value: unknown): Promise<void>;
3
+ export declare function writeText(filePath: string, value: string): Promise<void>;
4
+ export declare function walkFiles(root: string): Promise<string[]>;
@@ -0,0 +1,36 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { DEFAULT_IGNORE_DIRS, CODE_EXTENSIONS } from "../rules/default-rules.js";
4
+ export async function ensureDir(dirPath) {
5
+ await fs.mkdir(dirPath, { recursive: true });
6
+ }
7
+ export async function writeJson(filePath, value) {
8
+ await ensureDir(path.dirname(filePath));
9
+ await fs.writeFile(filePath, JSON.stringify(value, null, 2), "utf8");
10
+ }
11
+ export async function writeText(filePath, value) {
12
+ await ensureDir(path.dirname(filePath));
13
+ await fs.writeFile(filePath, value, "utf8");
14
+ }
15
+ export async function walkFiles(root) {
16
+ const found = [];
17
+ async function walk(current) {
18
+ const entries = await fs.readdir(current, { withFileTypes: true });
19
+ for (const entry of entries) {
20
+ const fullPath = path.join(current, entry.name);
21
+ if (entry.isDirectory()) {
22
+ if (DEFAULT_IGNORE_DIRS.has(entry.name)) {
23
+ continue;
24
+ }
25
+ await walk(fullPath);
26
+ continue;
27
+ }
28
+ if (!CODE_EXTENSIONS.has(path.extname(entry.name))) {
29
+ continue;
30
+ }
31
+ found.push(fullPath);
32
+ }
33
+ }
34
+ await walk(root);
35
+ return found.sort();
36
+ }
@@ -0,0 +1,8 @@
1
+ export declare function toPosix(input: string): string;
2
+ export declare function sha1(value: string): string;
3
+ export declare function normalizeText(raw: string, ext: string): string;
4
+ export declare function tokenize(raw: string): Set<string>;
5
+ export declare function jaccardSimilarity(a: Set<string>, b: Set<string>): number;
6
+ export declare function sharedValues(a: string[], b: string[]): string[];
7
+ export declare function slugify(value: string): string;
8
+ export declare function removeExtension(filePath: string): string;
@@ -0,0 +1,51 @@
1
+ import { createHash } from "node:crypto";
2
+ import path from "node:path";
3
+ export function toPosix(input) {
4
+ return input.replace(/\\/g, "/");
5
+ }
6
+ export function sha1(value) {
7
+ return createHash("sha1").update(value).digest("hex");
8
+ }
9
+ export function normalizeText(raw, ext) {
10
+ let value = raw;
11
+ if (ext !== ".json") {
12
+ value = value.replace(/\/\*[\s\S]*?\*\//g, "");
13
+ value = value.replace(/^\s*\/\/.*$/gm, "");
14
+ }
15
+ return value.replace(/\s+/g, " ").trim();
16
+ }
17
+ export function tokenize(raw) {
18
+ return new Set(raw
19
+ .toLowerCase()
20
+ .split(/[^a-z0-9_@/.-]+/i)
21
+ .map((token) => token.trim())
22
+ .filter((token) => token.length >= 2));
23
+ }
24
+ export function jaccardSimilarity(a, b) {
25
+ if (!a.size && !b.size) {
26
+ return 1;
27
+ }
28
+ let intersection = 0;
29
+ for (const token of a) {
30
+ if (b.has(token)) {
31
+ intersection += 1;
32
+ }
33
+ }
34
+ const union = new Set([...a, ...b]).size;
35
+ return union === 0 ? 0 : intersection / union;
36
+ }
37
+ export function sharedValues(a, b) {
38
+ const bSet = new Set(b);
39
+ return Array.from(new Set(a.filter((value) => bSet.has(value)))).sort();
40
+ }
41
+ export function slugify(value) {
42
+ return value
43
+ .toLowerCase()
44
+ .replace(/[^a-z0-9]+/g, "-")
45
+ .replace(/^-+|-+$/g, "")
46
+ .slice(0, 80);
47
+ }
48
+ export function removeExtension(filePath) {
49
+ const ext = path.extname(filePath);
50
+ return filePath.slice(0, filePath.length - ext.length);
51
+ }
package/package.json ADDED
@@ -0,0 +1,44 @@
1
+ {
2
+ "name": "@funeste38/allmight",
3
+ "version": "1.0.0",
4
+ "description": "Audit-first duplicate detector and canonicalization helper for Funesterie repositories.",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "types": "dist/index.d.ts",
8
+ "bin": {
9
+ "allmight": "dist/cli/index.js"
10
+ },
11
+ "files": [
12
+ "dist",
13
+ "README.md",
14
+ "LICENSE"
15
+ ],
16
+ "scripts": {
17
+ "build": "tsc -p .",
18
+ "prepare": "npm run build",
19
+ "prepublishOnly": "npm run build && npm test",
20
+ "test": "vitest run",
21
+ "scan:spyder": "node dist/cli/index.js scan D:/SPYDER --output docs/reports/spyder"
22
+ },
23
+ "repository": {
24
+ "type": "git",
25
+ "url": "https://github.com/jEFFLEZ/allmight.git"
26
+ },
27
+ "bugs": {
28
+ "url": "https://github.com/jEFFLEZ/allmight/issues"
29
+ },
30
+ "homepage": "https://github.com/jEFFLEZ/allmight#readme",
31
+ "author": "jEFFLEZ",
32
+ "license": "MIT",
33
+ "publishConfig": {
34
+ "access": "public"
35
+ },
36
+ "engines": {
37
+ "node": ">=20"
38
+ },
39
+ "devDependencies": {
40
+ "@types/node": "^24.12.0",
41
+ "typescript": "^5.9.3",
42
+ "vitest": "^4.0.7"
43
+ }
44
+ }