@drewpayment/mink 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +347 -0
- package/package.json +32 -0
- package/src/cli.ts +176 -0
- package/src/commands/bug-search.ts +32 -0
- package/src/commands/config.ts +109 -0
- package/src/commands/cron.ts +295 -0
- package/src/commands/daemon.ts +46 -0
- package/src/commands/dashboard.ts +21 -0
- package/src/commands/designqc.ts +160 -0
- package/src/commands/detect-waste.ts +81 -0
- package/src/commands/framework-advisor.ts +52 -0
- package/src/commands/init.ts +159 -0
- package/src/commands/post-read.ts +123 -0
- package/src/commands/post-write.ts +157 -0
- package/src/commands/pre-read.ts +109 -0
- package/src/commands/pre-write.ts +136 -0
- package/src/commands/reflect.ts +39 -0
- package/src/commands/restore.ts +31 -0
- package/src/commands/scan.ts +101 -0
- package/src/commands/session-start.ts +21 -0
- package/src/commands/session-stop.ts +115 -0
- package/src/commands/status.ts +152 -0
- package/src/commands/update.ts +121 -0
- package/src/core/action-log.ts +341 -0
- package/src/core/backup.ts +122 -0
- package/src/core/bug-memory.ts +223 -0
- package/src/core/cron-parser.ts +94 -0
- package/src/core/daemon.ts +152 -0
- package/src/core/dashboard-api.ts +280 -0
- package/src/core/dashboard-server.ts +580 -0
- package/src/core/description.ts +232 -0
- package/src/core/design-eval/capture.ts +269 -0
- package/src/core/design-eval/route-detect.ts +165 -0
- package/src/core/design-eval/server-detect.ts +91 -0
- package/src/core/framework-advisor/catalog.ts +360 -0
- package/src/core/framework-advisor/decision-tree.ts +287 -0
- package/src/core/framework-advisor/generate.ts +132 -0
- package/src/core/framework-advisor/migration-prompts.ts +502 -0
- package/src/core/framework-advisor/validate.ts +137 -0
- package/src/core/fs-utils.ts +30 -0
- package/src/core/global-config.ts +74 -0
- package/src/core/index-store.ts +72 -0
- package/src/core/learning-memory.ts +120 -0
- package/src/core/paths.ts +86 -0
- package/src/core/pattern-engine.ts +108 -0
- package/src/core/project-id.ts +19 -0
- package/src/core/project-registry.ts +64 -0
- package/src/core/reflection.ts +256 -0
- package/src/core/scanner.ts +99 -0
- package/src/core/scheduler.ts +352 -0
- package/src/core/seed.ts +239 -0
- package/src/core/session.ts +128 -0
- package/src/core/stdin.ts +13 -0
- package/src/core/task-registry.ts +202 -0
- package/src/core/token-estimate.ts +36 -0
- package/src/core/token-ledger.ts +185 -0
- package/src/core/waste-detection.ts +214 -0
- package/src/core/write-exclusions.ts +24 -0
- package/src/types/action-log.ts +20 -0
- package/src/types/backup.ts +6 -0
- package/src/types/bug-memory.ts +24 -0
- package/src/types/config.ts +59 -0
- package/src/types/dashboard.ts +104 -0
- package/src/types/design-eval.ts +64 -0
- package/src/types/file-index.ts +38 -0
- package/src/types/framework-advisor.ts +97 -0
- package/src/types/hook-input.ts +27 -0
- package/src/types/learning-memory.ts +36 -0
- package/src/types/scheduler.ts +82 -0
- package/src/types/session.ts +50 -0
- package/src/types/token-ledger.ts +43 -0
- package/src/types/waste-detection.ts +21 -0
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { globalConfigPath } from "./paths";
|
|
2
|
+
import { safeReadJson, atomicWriteJson } from "./fs-utils";
|
|
3
|
+
import {
|
|
4
|
+
CONFIG_KEYS,
|
|
5
|
+
isValidConfigKey,
|
|
6
|
+
getConfigKeyMeta,
|
|
7
|
+
type GlobalConfig,
|
|
8
|
+
type ConfigKey,
|
|
9
|
+
} from "../types/config";
|
|
10
|
+
|
|
11
|
+
export function loadGlobalConfig(): GlobalConfig {
|
|
12
|
+
const raw = safeReadJson(globalConfigPath());
|
|
13
|
+
if (raw === null) return {};
|
|
14
|
+
if (typeof raw !== "object" || Array.isArray(raw)) {
|
|
15
|
+
console.warn("[mink] warning: corrupt config file at " + globalConfigPath());
|
|
16
|
+
return {};
|
|
17
|
+
}
|
|
18
|
+
return raw as GlobalConfig;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function saveGlobalConfig(config: GlobalConfig): void {
|
|
22
|
+
atomicWriteJson(globalConfigPath(), config);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface ResolvedValue {
|
|
26
|
+
value: string;
|
|
27
|
+
source: "default" | "config file" | "environment variable";
|
|
28
|
+
configFileValue?: string;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export function resolveConfigValue(key: ConfigKey): ResolvedValue {
|
|
32
|
+
const meta = getConfigKeyMeta(key);
|
|
33
|
+
const config = loadGlobalConfig();
|
|
34
|
+
|
|
35
|
+
const envValue = process.env[meta.envVar];
|
|
36
|
+
const fileValue = config[key];
|
|
37
|
+
|
|
38
|
+
if (envValue !== undefined && envValue !== "") {
|
|
39
|
+
return {
|
|
40
|
+
value: envValue,
|
|
41
|
+
source: "environment variable",
|
|
42
|
+
configFileValue: fileValue,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (fileValue !== undefined) {
|
|
47
|
+
return { value: fileValue, source: "config file" };
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return { value: meta.default, source: "default" };
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function resolveAllConfig(): Array<ResolvedValue & { key: ConfigKey }> {
|
|
54
|
+
return CONFIG_KEYS.map((meta) => ({
|
|
55
|
+
key: meta.key,
|
|
56
|
+
...resolveConfigValue(meta.key),
|
|
57
|
+
}));
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function setConfigValue(key: ConfigKey, value: string): void {
|
|
61
|
+
const config = loadGlobalConfig();
|
|
62
|
+
config[key] = value;
|
|
63
|
+
saveGlobalConfig(config);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export function resetConfigKey(key: ConfigKey): void {
|
|
67
|
+
const config = loadGlobalConfig();
|
|
68
|
+
delete config[key];
|
|
69
|
+
saveGlobalConfig(config);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export function resetAllConfig(): void {
|
|
73
|
+
saveGlobalConfig({});
|
|
74
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
FileIndex,
|
|
3
|
+
FileIndexEntry,
|
|
4
|
+
StalenessReport,
|
|
5
|
+
} from "../types/file-index";
|
|
6
|
+
|
|
7
|
+
export function createEmptyIndex(): FileIndex {
|
|
8
|
+
return {
|
|
9
|
+
header: {
|
|
10
|
+
lastScanTimestamp: "",
|
|
11
|
+
totalFiles: 0,
|
|
12
|
+
lifetimeHits: 0,
|
|
13
|
+
lifetimeMisses: 0,
|
|
14
|
+
},
|
|
15
|
+
entries: {},
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function isFileIndex(value: unknown): value is FileIndex {
|
|
20
|
+
if (value === null || typeof value !== "object") return false;
|
|
21
|
+
const obj = value as Record<string, unknown>;
|
|
22
|
+
return (
|
|
23
|
+
typeof obj.header === "object" &&
|
|
24
|
+
obj.header !== null &&
|
|
25
|
+
typeof obj.entries === "object" &&
|
|
26
|
+
obj.entries !== null
|
|
27
|
+
);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function upsertEntry(index: FileIndex, entry: FileIndexEntry): void {
|
|
31
|
+
index.entries[entry.filePath] = entry;
|
|
32
|
+
index.header.totalFiles = Object.keys(index.entries).length;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function removeEntry(index: FileIndex, filePath: string): void {
|
|
36
|
+
delete index.entries[filePath];
|
|
37
|
+
index.header.totalFiles = Object.keys(index.entries).length;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export function lookupEntry(
|
|
41
|
+
index: FileIndex,
|
|
42
|
+
filePath: string
|
|
43
|
+
): FileIndexEntry | null {
|
|
44
|
+
return index.entries[filePath] ?? null;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export function recordHit(index: FileIndex): void {
|
|
48
|
+
index.header.lifetimeHits++;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export function recordMiss(index: FileIndex): void {
|
|
52
|
+
index.header.lifetimeMisses++;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export function checkStaleness(
|
|
56
|
+
index: FileIndex,
|
|
57
|
+
scannedFiles: string[]
|
|
58
|
+
): StalenessReport {
|
|
59
|
+
const scannedSet = new Set(scannedFiles);
|
|
60
|
+
const indexedSet = new Set(Object.keys(index.entries));
|
|
61
|
+
|
|
62
|
+
const missingFromIndex = scannedFiles.filter((f) => !indexedSet.has(f));
|
|
63
|
+
const orphanedEntries = Object.keys(index.entries).filter(
|
|
64
|
+
(f) => !scannedSet.has(f)
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
return {
|
|
68
|
+
missingFromIndex,
|
|
69
|
+
orphanedEntries,
|
|
70
|
+
isStale: missingFromIndex.length > 0 || orphanedEntries.length > 0,
|
|
71
|
+
};
|
|
72
|
+
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import type { LearningMemory, SectionName } from "../types/learning-memory";
|
|
2
|
+
|
|
3
|
+
const SECTION_ORDER: SectionName[] = [
|
|
4
|
+
"User Preferences",
|
|
5
|
+
"Key Learnings",
|
|
6
|
+
"Do-Not-Repeat",
|
|
7
|
+
"Decision Log",
|
|
8
|
+
];
|
|
9
|
+
|
|
10
|
+
const RECOGNIZED_SECTIONS = new Set<string>(SECTION_ORDER);
|
|
11
|
+
|
|
12
|
+
function emptySections(): Record<SectionName, string[]> {
|
|
13
|
+
return {
|
|
14
|
+
"User Preferences": [],
|
|
15
|
+
"Key Learnings": [],
|
|
16
|
+
"Do-Not-Repeat": [],
|
|
17
|
+
"Decision Log": [],
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function createEmptyLearningMemory(projectName: string): LearningMemory {
|
|
22
|
+
return {
|
|
23
|
+
projectName,
|
|
24
|
+
sections: emptySections(),
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function parseLearningMemory(markdown: string): LearningMemory {
|
|
29
|
+
const sections = emptySections();
|
|
30
|
+
let projectName = "unknown";
|
|
31
|
+
|
|
32
|
+
if (!markdown || markdown.trim() === "") {
|
|
33
|
+
return { projectName, sections };
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const lines = markdown.split("\n");
|
|
37
|
+
let currentSection: SectionName | null = null;
|
|
38
|
+
|
|
39
|
+
for (const line of lines) {
|
|
40
|
+
// Check for title line: # Learning Memory — <name>
|
|
41
|
+
const titleMatch = line.match(/^#\s+Learning Memory\s+[—–-]+\s+(.+)$/);
|
|
42
|
+
if (titleMatch) {
|
|
43
|
+
projectName = titleMatch[1].trim();
|
|
44
|
+
currentSection = null;
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Check for section heading: ## Section Name
|
|
49
|
+
const sectionMatch = line.match(/^##\s+(.+)$/);
|
|
50
|
+
if (sectionMatch) {
|
|
51
|
+
const sectionName = sectionMatch[1].trim();
|
|
52
|
+
if (RECOGNIZED_SECTIONS.has(sectionName)) {
|
|
53
|
+
currentSection = sectionName as SectionName;
|
|
54
|
+
} else {
|
|
55
|
+
currentSection = null;
|
|
56
|
+
}
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Check for entry line: - entry
|
|
61
|
+
if (currentSection !== null) {
|
|
62
|
+
const entryMatch = line.match(/^-\s+(.+)$/);
|
|
63
|
+
if (entryMatch) {
|
|
64
|
+
sections[currentSection].push(entryMatch[1]);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return { projectName, sections };
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export function serializeLearningMemory(mem: LearningMemory): string {
|
|
73
|
+
const lines: string[] = [];
|
|
74
|
+
|
|
75
|
+
lines.push(`# Learning Memory — ${mem.projectName}`);
|
|
76
|
+
|
|
77
|
+
for (const section of SECTION_ORDER) {
|
|
78
|
+
lines.push("");
|
|
79
|
+
lines.push(`## ${section}`);
|
|
80
|
+
for (const entry of mem.sections[section]) {
|
|
81
|
+
lines.push(`- ${entry}`);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return lines.join("\n") + "\n";
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
export function addEntry(
|
|
89
|
+
mem: LearningMemory,
|
|
90
|
+
section: SectionName,
|
|
91
|
+
entry: string
|
|
92
|
+
): void {
|
|
93
|
+
mem.sections[section].push(entry);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
export function removeEntry(
|
|
97
|
+
mem: LearningMemory,
|
|
98
|
+
section: SectionName,
|
|
99
|
+
index: number
|
|
100
|
+
): void {
|
|
101
|
+
const entries = mem.sections[section];
|
|
102
|
+
if (index < 0 || index >= entries.length) {
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
entries.splice(index, 1);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function getEntries(
|
|
109
|
+
mem: LearningMemory,
|
|
110
|
+
section: SectionName
|
|
111
|
+
): string[] {
|
|
112
|
+
return [...mem.sections[section]];
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
export function totalEntryCount(mem: LearningMemory): number {
|
|
116
|
+
return SECTION_ORDER.reduce(
|
|
117
|
+
(sum, section) => sum + mem.sections[section].length,
|
|
118
|
+
0
|
|
119
|
+
);
|
|
120
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { join } from "path";
|
|
2
|
+
import { homedir } from "os";
|
|
3
|
+
import { generateProjectId } from "./project-id";
|
|
4
|
+
|
|
5
|
+
const MINK_ROOT = join(homedir(), ".mink");
|
|
6
|
+
|
|
7
|
+
export function minkRoot(): string {
|
|
8
|
+
return MINK_ROOT;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function projectDir(cwd: string): string {
|
|
12
|
+
const id = generateProjectId(cwd);
|
|
13
|
+
return join(MINK_ROOT, "projects", id);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function sessionPath(cwd: string): string {
|
|
17
|
+
return join(projectDir(cwd), "session.json");
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function fileIndexPath(cwd: string): string {
|
|
21
|
+
return join(projectDir(cwd), "file-index.json");
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function configPath(cwd: string): string {
|
|
25
|
+
return join(projectDir(cwd), "config.json");
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function learningMemoryPath(cwd: string): string {
|
|
29
|
+
return join(projectDir(cwd), "learning-memory.md");
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function tokenLedgerPath(cwd: string): string {
|
|
33
|
+
return join(projectDir(cwd), "token-ledger.json");
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function tokenLedgerArchivePath(cwd: string): string {
|
|
37
|
+
return join(projectDir(cwd), "token-ledger-archive.json");
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export function bugMemoryPath(cwd: string): string {
|
|
41
|
+
return join(projectDir(cwd), "bug-memory.json");
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export function actionLogPath(cwd: string): string {
|
|
45
|
+
return join(projectDir(cwd), "action-log.md");
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function schedulerPidPath(): string {
|
|
49
|
+
return join(MINK_ROOT, "scheduler.pid");
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export function schedulerLogPath(): string {
|
|
53
|
+
return join(MINK_ROOT, "scheduler.log");
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function schedulerManifestPath(cwd: string): string {
|
|
57
|
+
return join(projectDir(cwd), "scheduler-manifest.json");
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function globalConfigPath(): string {
|
|
61
|
+
return join(MINK_ROOT, "config");
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export function projectMetaPath(cwd: string): string {
|
|
65
|
+
return join(projectDir(cwd), "project-meta.json");
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export function backupDirPath(cwd: string): string {
|
|
69
|
+
return join(projectDir(cwd), "backups");
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export function designCapturesDir(cwd: string): string {
|
|
73
|
+
return join(projectDir(cwd), "design-captures");
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function designReportPath(cwd: string): string {
|
|
77
|
+
return join(projectDir(cwd), "design-report.json");
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export function frameworkAdvisorPath(cwd: string): string {
|
|
81
|
+
return join(projectDir(cwd), "framework-advisor.md");
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export function frameworkAdvisorJsonPath(cwd: string): string {
|
|
85
|
+
return join(projectDir(cwd), "framework-advisor.json");
|
|
86
|
+
}
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import type { ExtractedPattern, PatternMatch } from "../types/learning-memory";
|
|
2
|
+
|
|
3
|
+
// Triggers for phrase-based word-boundary patterns
|
|
4
|
+
const PHRASE_TRIGGERS = [
|
|
5
|
+
/never\s+use\s+/i,
|
|
6
|
+
/\bavoid\s+/i,
|
|
7
|
+
];
|
|
8
|
+
|
|
9
|
+
// Stop characters/sequences that end a phrase
|
|
10
|
+
const PHRASE_STOP_RE = /[—–\-.]|\s+(?:in|for|with|on|by|from|to|when|if|because|since|after|before|during|until)\s+|$/;
|
|
11
|
+
|
|
12
|
+
export function extractPatterns(entries: string[]): ExtractedPattern[] {
|
|
13
|
+
const results: ExtractedPattern[] = [];
|
|
14
|
+
|
|
15
|
+
for (const entry of entries) {
|
|
16
|
+
const quotedPatterns: ExtractedPattern[] = [];
|
|
17
|
+
|
|
18
|
+
// 1. Extract quoted strings (double and single quotes)
|
|
19
|
+
const quoteRe = /["']([^"']+)["']/g;
|
|
20
|
+
let qMatch: RegExpExecArray | null;
|
|
21
|
+
while ((qMatch = quoteRe.exec(entry)) !== null) {
|
|
22
|
+
quotedPatterns.push({
|
|
23
|
+
type: "literal",
|
|
24
|
+
pattern: qMatch[1],
|
|
25
|
+
sourceEntry: entry,
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
results.push(...quotedPatterns);
|
|
30
|
+
|
|
31
|
+
// 2. Extract phrase-based word-boundary patterns
|
|
32
|
+
for (const triggerRe of PHRASE_TRIGGERS) {
|
|
33
|
+
// Build a combined regex that finds the trigger and captures the rest
|
|
34
|
+
const fullRe = new RegExp(triggerRe.source, "gi");
|
|
35
|
+
let triggerMatch: RegExpExecArray | null;
|
|
36
|
+
|
|
37
|
+
while ((triggerMatch = fullRe.exec(entry)) !== null) {
|
|
38
|
+
const afterTrigger = entry.slice(triggerMatch.index + triggerMatch[0].length);
|
|
39
|
+
|
|
40
|
+
// Remove any quoted portions from the remaining text before finding stop
|
|
41
|
+
let cleaned = afterTrigger;
|
|
42
|
+
// Replace quoted content with spaces to preserve indices
|
|
43
|
+
cleaned = cleaned.replace(/["'][^"']*["']/g, (m) => " ".repeat(m.length));
|
|
44
|
+
|
|
45
|
+
// Find stop position
|
|
46
|
+
const stopMatch = PHRASE_STOP_RE.exec(cleaned);
|
|
47
|
+
const phraseLength = stopMatch && stopMatch.index > 0 ? stopMatch.index : cleaned.length;
|
|
48
|
+
|
|
49
|
+
let phrase = afterTrigger.slice(0, phraseLength).trim();
|
|
50
|
+
|
|
51
|
+
// Remove quoted substrings from phrase
|
|
52
|
+
phrase = phrase.replace(/["'][^"']*["']/g, "").trim();
|
|
53
|
+
// Collapse multiple spaces
|
|
54
|
+
phrase = phrase.replace(/\s+/g, " ").trim();
|
|
55
|
+
|
|
56
|
+
if (phrase.length > 0) {
|
|
57
|
+
results.push({
|
|
58
|
+
type: "word-boundary",
|
|
59
|
+
pattern: phrase,
|
|
60
|
+
sourceEntry: entry,
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return results;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export function matchPatterns(
|
|
71
|
+
patterns: ExtractedPattern[],
|
|
72
|
+
content: string
|
|
73
|
+
): PatternMatch[] {
|
|
74
|
+
if (!content || patterns.length === 0) {
|
|
75
|
+
return [];
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const matches: PatternMatch[] = [];
|
|
79
|
+
|
|
80
|
+
for (const pat of patterns) {
|
|
81
|
+
if (pat.type === "literal") {
|
|
82
|
+
// Case-sensitive includes check
|
|
83
|
+
let idx = content.indexOf(pat.pattern);
|
|
84
|
+
while (idx !== -1) {
|
|
85
|
+
matches.push({
|
|
86
|
+
pattern: pat,
|
|
87
|
+
matchedText: pat.pattern,
|
|
88
|
+
index: idx,
|
|
89
|
+
});
|
|
90
|
+
idx = content.indexOf(pat.pattern, idx + 1);
|
|
91
|
+
}
|
|
92
|
+
} else {
|
|
93
|
+
// Word-boundary, case-insensitive
|
|
94
|
+
const escaped = pat.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
95
|
+
const re = new RegExp(`\\b${escaped}\\b`, "gi");
|
|
96
|
+
let m: RegExpExecArray | null;
|
|
97
|
+
while ((m = re.exec(content)) !== null) {
|
|
98
|
+
matches.push({
|
|
99
|
+
pattern: pat,
|
|
100
|
+
matchedText: m[0],
|
|
101
|
+
index: m.index,
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return matches;
|
|
108
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { createHash } from "crypto";
|
|
2
|
+
import { basename } from "path";
|
|
3
|
+
|
|
4
|
+
function slugify(name: string): string {
|
|
5
|
+
return name
|
|
6
|
+
.toLowerCase()
|
|
7
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
8
|
+
.replace(/^-|-$/g, "");
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function generateProjectId(absolutePath: string): string {
|
|
12
|
+
const normalized = absolutePath.replace(/\/+$/, "");
|
|
13
|
+
const slug = slugify(basename(normalized));
|
|
14
|
+
const hash = createHash("sha256")
|
|
15
|
+
.update(normalized)
|
|
16
|
+
.digest("hex")
|
|
17
|
+
.slice(0, 6);
|
|
18
|
+
return `${slug}-${hash}`;
|
|
19
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { readdirSync, existsSync } from "fs";
|
|
2
|
+
import { join } from "path";
|
|
3
|
+
import { minkRoot } from "./paths";
|
|
4
|
+
import { safeReadJson } from "./fs-utils";
|
|
5
|
+
|
|
6
|
+
export interface ProjectMeta {
|
|
7
|
+
cwd: string;
|
|
8
|
+
name: string;
|
|
9
|
+
initTimestamp: string;
|
|
10
|
+
version: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface RegisteredProject {
|
|
14
|
+
id: string;
|
|
15
|
+
cwd: string;
|
|
16
|
+
name: string;
|
|
17
|
+
version: string;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function getProjectMeta(projDir: string): ProjectMeta | null {
|
|
21
|
+
const metaPath = join(projDir, "project-meta.json");
|
|
22
|
+
const raw = safeReadJson(metaPath);
|
|
23
|
+
if (
|
|
24
|
+
raw === null ||
|
|
25
|
+
typeof raw !== "object" ||
|
|
26
|
+
Array.isArray(raw)
|
|
27
|
+
) {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
const obj = raw as Record<string, unknown>;
|
|
31
|
+
if (typeof obj.cwd !== "string" || typeof obj.name !== "string") {
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
return {
|
|
35
|
+
cwd: obj.cwd as string,
|
|
36
|
+
name: obj.name as string,
|
|
37
|
+
initTimestamp: (obj.initTimestamp as string) ?? "",
|
|
38
|
+
version: (obj.version as string) ?? "0.1.0",
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function listRegisteredProjects(): RegisteredProject[] {
|
|
43
|
+
const projectsDir = join(minkRoot(), "projects");
|
|
44
|
+
if (!existsSync(projectsDir)) return [];
|
|
45
|
+
|
|
46
|
+
const entries = readdirSync(projectsDir, { withFileTypes: true });
|
|
47
|
+
const projects: RegisteredProject[] = [];
|
|
48
|
+
|
|
49
|
+
for (const entry of entries) {
|
|
50
|
+
if (!entry.isDirectory()) continue;
|
|
51
|
+
const projDir = join(projectsDir, entry.name);
|
|
52
|
+
const meta = getProjectMeta(projDir);
|
|
53
|
+
if (meta) {
|
|
54
|
+
projects.push({
|
|
55
|
+
id: entry.name,
|
|
56
|
+
cwd: meta.cwd,
|
|
57
|
+
name: meta.name,
|
|
58
|
+
version: meta.version,
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return projects;
|
|
64
|
+
}
|