@getmikk/core 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +431 -0
  2. package/package.json +6 -2
  3. package/src/contract/contract-generator.ts +85 -85
  4. package/src/contract/contract-reader.ts +28 -28
  5. package/src/contract/contract-writer.ts +114 -114
  6. package/src/contract/index.ts +12 -12
  7. package/src/contract/lock-compiler.ts +221 -221
  8. package/src/contract/lock-reader.ts +34 -34
  9. package/src/contract/schema.ts +147 -147
  10. package/src/graph/cluster-detector.ts +312 -312
  11. package/src/graph/graph-builder.ts +211 -211
  12. package/src/graph/impact-analyzer.ts +55 -55
  13. package/src/graph/index.ts +4 -4
  14. package/src/graph/types.ts +59 -59
  15. package/src/hash/file-hasher.ts +30 -30
  16. package/src/hash/hash-store.ts +119 -119
  17. package/src/hash/index.ts +3 -3
  18. package/src/hash/tree-hasher.ts +20 -20
  19. package/src/index.ts +12 -12
  20. package/src/parser/base-parser.ts +16 -16
  21. package/src/parser/boundary-checker.ts +211 -211
  22. package/src/parser/index.ts +46 -46
  23. package/src/parser/types.ts +90 -90
  24. package/src/parser/typescript/ts-extractor.ts +543 -543
  25. package/src/parser/typescript/ts-parser.ts +41 -41
  26. package/src/parser/typescript/ts-resolver.ts +86 -86
  27. package/src/utils/errors.ts +42 -42
  28. package/src/utils/fs.ts +75 -75
  29. package/src/utils/fuzzy-match.ts +186 -186
  30. package/src/utils/logger.ts +36 -36
  31. package/src/utils/minimatch.ts +19 -19
  32. package/tests/contract.test.ts +134 -134
  33. package/tests/fixtures/simple-api/package.json +5 -5
  34. package/tests/fixtures/simple-api/src/auth/middleware.ts +9 -9
  35. package/tests/fixtures/simple-api/src/auth/verify.ts +6 -6
  36. package/tests/fixtures/simple-api/src/index.ts +9 -9
  37. package/tests/fixtures/simple-api/src/utils/jwt.ts +3 -3
  38. package/tests/fixtures/simple-api/tsconfig.json +8 -8
  39. package/tests/fuzzy-match.test.ts +142 -142
  40. package/tests/graph.test.ts +169 -169
  41. package/tests/hash.test.ts +49 -49
  42. package/tests/helpers.ts +83 -83
  43. package/tests/parser.test.ts +218 -218
  44. package/tsconfig.json +15 -15
@@ -1,30 +1,30 @@
1
- import { createHash } from 'node:crypto'
2
- import * as fs from 'node:fs/promises'
3
-
4
- /**
5
- * Compute SHA-256 hash of a string.
6
- */
7
- export function hashContent(content: string): string {
8
- return createHash('sha256').update(content).digest('hex')
9
- }
10
-
11
- /**
12
- * Compute SHA-256 hash of a file on disk.
13
- */
14
- export async function hashFile(filePath: string): Promise<string> {
15
- const content = await fs.readFile(filePath, 'utf-8')
16
- return hashContent(content)
17
- }
18
-
19
- /**
20
- * Hash a specific function body by extracting lines from file content.
21
- */
22
- export function hashFunctionBody(
23
- fileContent: string,
24
- startLine: number,
25
- endLine: number
26
- ): string {
27
- const lines = fileContent.split('\n')
28
- const body = lines.slice(startLine - 1, endLine).join('\n')
29
- return hashContent(body)
30
- }
1
+ import { createHash } from 'node:crypto'
2
+ import * as fs from 'node:fs/promises'
3
+
4
+ /**
5
+ * Compute SHA-256 hash of a string.
6
+ */
7
+ export function hashContent(content: string): string {
8
+ return createHash('sha256').update(content).digest('hex')
9
+ }
10
+
11
+ /**
12
+ * Compute SHA-256 hash of a file on disk.
13
+ */
14
+ export async function hashFile(filePath: string): Promise<string> {
15
+ const content = await fs.readFile(filePath, 'utf-8')
16
+ return hashContent(content)
17
+ }
18
+
19
+ /**
20
+ * Hash a specific function body by extracting lines from file content.
21
+ */
22
+ export function hashFunctionBody(
23
+ fileContent: string,
24
+ startLine: number,
25
+ endLine: number
26
+ ): string {
27
+ const lines = fileContent.split('\n')
28
+ const body = lines.slice(startLine - 1, endLine).join('\n')
29
+ return hashContent(body)
30
+ }
@@ -1,119 +1,119 @@
1
- import * as path from 'node:path'
2
- import * as fs from 'node:fs'
3
- import Database from 'better-sqlite3'
4
- import { logger } from '../utils/logger.js'
5
-
6
- const CREATE_SCHEMA_SQL = `
7
- CREATE TABLE IF NOT EXISTS file_hashes (
8
- path TEXT PRIMARY KEY,
9
- hash TEXT NOT NULL,
10
- size_bytes INTEGER NOT NULL,
11
- updated_at INTEGER NOT NULL
12
- );
13
- CREATE INDEX IF NOT EXISTS idx_updated_at ON file_hashes(updated_at);
14
- `
15
-
16
- /**
17
- * HashStore — SQLite-backed persistent store for file content hashes.
18
- *
19
- * Survives process restarts, handles concurrent access via WAL mode,
20
- * and provides fast O(1) lookups for change detection.
21
- */
22
- export class HashStore {
23
- private db: InstanceType<typeof Database>
24
-
25
- constructor(projectRoot: string) {
26
- const dbPath = path.join(projectRoot, '.mikk', 'cache', 'hashes.db')
27
- const dbDir = path.dirname(dbPath)
28
-
29
- // Ensure directory exists
30
- if (!fs.existsSync(dbDir)) {
31
- fs.mkdirSync(dbDir, { recursive: true })
32
- }
33
-
34
- this.db = this.openDatabase(dbPath)
35
- this.db.exec(CREATE_SCHEMA_SQL)
36
- }
37
-
38
- private openDatabase(dbPath: string): InstanceType<typeof Database> {
39
- try {
40
- const db = new Database(dbPath)
41
- // WAL mode: concurrent reads + writes, atomic commits
42
- db.pragma('journal_mode = WAL')
43
- // Wait up to 5s if locked by another process
44
- db.pragma('busy_timeout = 5000')
45
- return db
46
- } catch (err: any) {
47
- // Corrupted database — delete and recreate
48
- logger.warn('Hash store corrupted, recreating', { error: err.message })
49
- try {
50
- fs.unlinkSync(dbPath)
51
- } catch { /* ignore */ }
52
- const db = new Database(dbPath)
53
- db.pragma('journal_mode = WAL')
54
- db.pragma('busy_timeout = 5000')
55
- return db
56
- }
57
- }
58
-
59
- /** Get the stored hash for a file path, or null if not tracked */
60
- get(filePath: string): string | null {
61
- const row = this.db
62
- .prepare('SELECT hash FROM file_hashes WHERE path = ?')
63
- .get(filePath) as { hash: string } | undefined
64
- return row?.hash ?? null
65
- }
66
-
67
- /** Store or update the hash for a file */
68
- set(filePath: string, hash: string, sizeBytes: number): void {
69
- this.db
70
- .prepare(
71
- `INSERT OR REPLACE INTO file_hashes
72
- (path, hash, size_bytes, updated_at) VALUES (?, ?, ?, ?)`
73
- )
74
- .run(filePath, hash, sizeBytes, Date.now())
75
- }
76
-
77
- /** Remove tracked hash for a (deleted) file */
78
- delete(filePath: string): void {
79
- this.db
80
- .prepare('DELETE FROM file_hashes WHERE path = ?')
81
- .run(filePath)
82
- }
83
-
84
- /** Return all paths whose updated_at is greater than a timestamp */
85
- getChangedSince(timestamp: number): string[] {
86
- const rows = this.db
87
- .prepare('SELECT path FROM file_hashes WHERE updated_at > ?')
88
- .all(timestamp) as { path: string }[]
89
- return rows.map(r => r.path)
90
- }
91
-
92
- /** Return all tracked file paths */
93
- getAllPaths(): string[] {
94
- const rows = this.db
95
- .prepare('SELECT path FROM file_hashes')
96
- .all() as { path: string }[]
97
- return rows.map(r => r.path)
98
- }
99
-
100
- /** Batch-set multiple hashes inside a single transaction (fast for init) */
101
- setBatch(entries: { path: string; hash: string; sizeBytes: number }[]): void {
102
- const insert = this.db.prepare(
103
- `INSERT OR REPLACE INTO file_hashes
104
- (path, hash, size_bytes, updated_at) VALUES (?, ?, ?, ?)`
105
- )
106
- const now = Date.now()
107
- const runAll = this.db.transaction((rows: typeof entries) => {
108
- for (const row of rows) {
109
- insert.run(row.path, row.hash, row.sizeBytes, now)
110
- }
111
- })
112
- runAll(entries)
113
- }
114
-
115
- /** Close the database connection */
116
- close(): void {
117
- this.db.close()
118
- }
119
- }
1
+ import * as path from 'node:path'
2
+ import * as fs from 'node:fs'
3
+ import Database from 'better-sqlite3'
4
+ import { logger } from '../utils/logger.js'
5
+
6
+ const CREATE_SCHEMA_SQL = `
7
+ CREATE TABLE IF NOT EXISTS file_hashes (
8
+ path TEXT PRIMARY KEY,
9
+ hash TEXT NOT NULL,
10
+ size_bytes INTEGER NOT NULL,
11
+ updated_at INTEGER NOT NULL
12
+ );
13
+ CREATE INDEX IF NOT EXISTS idx_updated_at ON file_hashes(updated_at);
14
+ `
15
+
16
+ /**
17
+ * HashStore — SQLite-backed persistent store for file content hashes.
18
+ *
19
+ * Survives process restarts, handles concurrent access via WAL mode,
20
+ * and provides fast O(1) lookups for change detection.
21
+ */
22
+ export class HashStore {
23
+ private db: InstanceType<typeof Database>
24
+
25
+ constructor(projectRoot: string) {
26
+ const dbPath = path.join(projectRoot, '.mikk', 'cache', 'hashes.db')
27
+ const dbDir = path.dirname(dbPath)
28
+
29
+ // Ensure directory exists
30
+ if (!fs.existsSync(dbDir)) {
31
+ fs.mkdirSync(dbDir, { recursive: true })
32
+ }
33
+
34
+ this.db = this.openDatabase(dbPath)
35
+ this.db.exec(CREATE_SCHEMA_SQL)
36
+ }
37
+
38
+ private openDatabase(dbPath: string): InstanceType<typeof Database> {
39
+ try {
40
+ const db = new Database(dbPath)
41
+ // WAL mode: concurrent reads + writes, atomic commits
42
+ db.pragma('journal_mode = WAL')
43
+ // Wait up to 5s if locked by another process
44
+ db.pragma('busy_timeout = 5000')
45
+ return db
46
+ } catch (err: any) {
47
+ // Corrupted database — delete and recreate
48
+ logger.warn('Hash store corrupted, recreating', { error: err.message })
49
+ try {
50
+ fs.unlinkSync(dbPath)
51
+ } catch { /* ignore */ }
52
+ const db = new Database(dbPath)
53
+ db.pragma('journal_mode = WAL')
54
+ db.pragma('busy_timeout = 5000')
55
+ return db
56
+ }
57
+ }
58
+
59
+ /** Get the stored hash for a file path, or null if not tracked */
60
+ get(filePath: string): string | null {
61
+ const row = this.db
62
+ .prepare('SELECT hash FROM file_hashes WHERE path = ?')
63
+ .get(filePath) as { hash: string } | undefined
64
+ return row?.hash ?? null
65
+ }
66
+
67
+ /** Store or update the hash for a file */
68
+ set(filePath: string, hash: string, sizeBytes: number): void {
69
+ this.db
70
+ .prepare(
71
+ `INSERT OR REPLACE INTO file_hashes
72
+ (path, hash, size_bytes, updated_at) VALUES (?, ?, ?, ?)`
73
+ )
74
+ .run(filePath, hash, sizeBytes, Date.now())
75
+ }
76
+
77
+ /** Remove tracked hash for a (deleted) file */
78
+ delete(filePath: string): void {
79
+ this.db
80
+ .prepare('DELETE FROM file_hashes WHERE path = ?')
81
+ .run(filePath)
82
+ }
83
+
84
+ /** Return all paths whose updated_at is greater than a timestamp */
85
+ getChangedSince(timestamp: number): string[] {
86
+ const rows = this.db
87
+ .prepare('SELECT path FROM file_hashes WHERE updated_at > ?')
88
+ .all(timestamp) as { path: string }[]
89
+ return rows.map(r => r.path)
90
+ }
91
+
92
+ /** Return all tracked file paths */
93
+ getAllPaths(): string[] {
94
+ const rows = this.db
95
+ .prepare('SELECT path FROM file_hashes')
96
+ .all() as { path: string }[]
97
+ return rows.map(r => r.path)
98
+ }
99
+
100
+ /** Batch-set multiple hashes inside a single transaction (fast for init) */
101
+ setBatch(entries: { path: string; hash: string; sizeBytes: number }[]): void {
102
+ const insert = this.db.prepare(
103
+ `INSERT OR REPLACE INTO file_hashes
104
+ (path, hash, size_bytes, updated_at) VALUES (?, ?, ?, ?)`
105
+ )
106
+ const now = Date.now()
107
+ const runAll = this.db.transaction((rows: typeof entries) => {
108
+ for (const row of rows) {
109
+ insert.run(row.path, row.hash, row.sizeBytes, now)
110
+ }
111
+ })
112
+ runAll(entries)
113
+ }
114
+
115
+ /** Close the database connection */
116
+ close(): void {
117
+ this.db.close()
118
+ }
119
+ }
package/src/hash/index.ts CHANGED
@@ -1,3 +1,3 @@
1
- export { hashContent, hashFile, hashFunctionBody } from './file-hasher.js'
2
- export { computeModuleHash, computeRootHash } from './tree-hasher.js'
3
- export { HashStore } from './hash-store.js'
1
+ export { hashContent, hashFile, hashFunctionBody } from './file-hasher.js'
2
+ export { computeModuleHash, computeRootHash } from './tree-hasher.js'
3
+ export { HashStore } from './hash-store.js'
@@ -1,20 +1,20 @@
1
- import { hashContent } from './file-hasher.js'
2
-
3
- /**
4
- * Compute Merkle tree hash for a module from its file hashes.
5
- * Sort to ensure order doesn't matter.
6
- */
7
- export function computeModuleHash(fileHashes: string[]): string {
8
- const sorted = [...fileHashes].sort()
9
- return hashContent(sorted.join(''))
10
- }
11
-
12
- /**
13
- * Compute root hash from all module hashes.
14
- */
15
- export function computeRootHash(moduleHashes: Record<string, string>): string {
16
- const sorted = Object.entries(moduleHashes)
17
- .sort(([a], [b]) => a.localeCompare(b))
18
- .map(([, hash]) => hash)
19
- return hashContent(sorted.join(''))
20
- }
1
+ import { hashContent } from './file-hasher.js'
2
+
3
+ /**
4
+ * Compute Merkle tree hash for a module from its file hashes.
5
+ * Sort to ensure order doesn't matter.
6
+ */
7
+ export function computeModuleHash(fileHashes: string[]): string {
8
+ const sorted = [...fileHashes].sort()
9
+ return hashContent(sorted.join(''))
10
+ }
11
+
12
+ /**
13
+ * Compute root hash from all module hashes.
14
+ */
15
+ export function computeRootHash(moduleHashes: Record<string, string>): string {
16
+ const sorted = Object.entries(moduleHashes)
17
+ .sort(([a], [b]) => a.localeCompare(b))
18
+ .map(([, hash]) => hash)
19
+ return hashContent(sorted.join(''))
20
+ }
package/src/index.ts CHANGED
@@ -1,12 +1,12 @@
1
- // @getmikk/core — Public API
2
- // Every other package imports from '@getmikk/core'
3
-
4
- export * from './parser/index.js'
5
- export * from './graph/index.js'
6
- export * from './contract/index.js'
7
- export * from './hash/index.js'
8
- export * from './utils/errors.js'
9
- export * from './utils/logger.js'
10
- export { discoverFiles, readFileContent, writeFileContent, fileExists, setupMikkDirectory } from './utils/fs.js'
11
- export { minimatch } from './utils/minimatch.js'
12
- export { scoreFunctions, findFuzzyMatches, levenshtein, splitCamelCase, extractKeywords } from './utils/fuzzy-match.js'
1
+ // @getmikk/core — Public API
2
+ // Every other package imports from '@getmikk/core'
3
+
4
+ export * from './parser/index.js'
5
+ export * from './graph/index.js'
6
+ export * from './contract/index.js'
7
+ export * from './hash/index.js'
8
+ export * from './utils/errors.js'
9
+ export * from './utils/logger.js'
10
+ export { discoverFiles, readFileContent, writeFileContent, fileExists, setupMikkDirectory } from './utils/fs.js'
11
+ export { minimatch } from './utils/minimatch.js'
12
+ export { scoreFunctions, findFuzzyMatches, levenshtein, splitCamelCase, extractKeywords } from './utils/fuzzy-match.js'
@@ -1,16 +1,16 @@
1
- import type { ParsedFile, ParsedImport } from './types.js'
2
-
3
- /**
4
- * Abstract base class all language parsers extend.
5
- * Forces consistency — every parser implements the same interface.
6
- */
7
- export abstract class BaseParser {
8
- /** Given raw file content as a string, return ParsedFile */
9
- abstract parse(filePath: string, content: string): ParsedFile
10
-
11
- /** Given a list of parsed files, resolve all import paths to absolute project paths */
12
- abstract resolveImports(files: ParsedFile[], projectRoot: string): ParsedFile[]
13
-
14
- /** Returns which file extensions this parser handles */
15
- abstract getSupportedExtensions(): string[]
16
- }
1
+ import type { ParsedFile, ParsedImport } from './types.js'
2
+
3
+ /**
4
+ * Abstract base class all language parsers extend.
5
+ * Forces consistency — every parser implements the same interface.
6
+ */
7
+ export abstract class BaseParser {
8
+ /** Given raw file content as a string, return ParsedFile */
9
+ abstract parse(filePath: string, content: string): ParsedFile
10
+
11
+ /** Given a list of parsed files, resolve all import paths to absolute project paths */
12
+ abstract resolveImports(files: ParsedFile[], projectRoot: string): ParsedFile[]
13
+
14
+ /** Returns which file extensions this parser handles */
15
+ abstract getSupportedExtensions(): string[]
16
+ }