@getmikk/core 2.0.0 → 2.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,37 @@
1
+ # @getmikk/core
2
+
3
+ ## 2.0.4
4
+
5
+ ### Patch Changes
6
+
7
+ - 1217e39: chore: synchronize all packages to v2.0.1 and finalize release infrastructure.
8
+
9
+ ## 2.0.3
10
+
11
+ ### Patch Changes
12
+
13
+ - 06a410b: chore: synchronize monorepo to v2.0.1 and finalize release infrastructure.
14
+
15
+ ## 2.0.2
16
+
17
+ ### Patch Changes
18
+
19
+ - 5dfe317: test
20
+ - 9118944: test
21
+ - 36bbb2f: test
22
+ - fe8ac06: chore: synchronize all packages to v2.0.1 and finalize release infrastructure.
23
+ - b00faed: test
24
+
25
+ ## 2.0.1
26
+
27
+ ### Patch Changes
28
+
29
+ - bd5c050: 2.0.1
30
+ - ff1444f: Use dynamic import for oxc-parser to support ESM-only versions (0.121.0+)
31
+ - 0ccbf45: 2.0.1
32
+
33
+ ## 3.0.0
34
+
35
+ ### Major Changes
36
+
37
+ - 568a3d5: 2.0.0
package/package.json CHANGED
@@ -1,38 +1,42 @@
1
1
  {
2
- "name": "@getmikk/core",
3
- "version": "2.0.0",
4
- "license": "Apache-2.0",
5
- "repository": {
6
- "type": "git",
7
- "url": "https://github.com/Ansh-dhanani/mikk"
8
- },
9
- "type": "module",
10
- "main": "./dist/index.js",
11
- "types": "./dist/index.d.ts",
12
- "exports": {
13
- ".": {
14
- "import": "./dist/index.js",
15
- "types": "./dist/index.d.ts"
16
- }
17
- },
18
- "scripts": {
19
- "build": "tsc",
20
- "test": "bun test",
21
- "dev": "tsc --watch"
22
- },
23
- "dependencies": {
24
- "@types/better-sqlite3": "^7.6.13",
25
- "better-sqlite3": "^12.6.2",
26
- "fast-glob": "^3.3.0",
27
- "oxc-parser": "^0.121.0",
28
- "oxc-resolver": "^11.19.1",
29
- "tree-sitter-wasms": "^0.1.13",
30
- "web-tree-sitter": "0.20.8",
31
- "zod": "^3.22.0",
32
- "typescript": "^5.7.0"
33
- },
34
- "devDependencies": {
35
- "@types/bun": "^1.3.10",
36
- "@types/node": "^22.0.0"
2
+ "name": "@getmikk/core",
3
+ "version": "2.0.11",
4
+ "publishConfig": {
5
+ "access": "public",
6
+ "registry": "https://registry.npmjs.org/"
7
+ },
8
+ "license": "Apache-2.0",
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "https://github.com/Ansh-dhanani/mikk"
12
+ },
13
+ "type": "module",
14
+ "main": "./dist/index.js",
15
+ "types": "./dist/index.d.ts",
16
+ "exports": {
17
+ ".": {
18
+ "import": "./dist/index.js",
19
+ "require": "./dist/index.js",
20
+ "types": "./dist/index.d.ts"
37
21
  }
22
+ },
23
+ "scripts": {
24
+ "build": "tsc",
25
+ "test": "bun test",
26
+ "dev": "tsc --watch",
27
+ "lint": "bunx eslint --config ../../eslint.config.mjs ."
28
+ },
29
+ "devDependencies": {
30
+ "typescript": "^5.7.0",
31
+ "@types/node": "^22.0.0",
32
+ "@types/better-sqlite3": "^7.6.13",
33
+ "eslint": "^9.39.2"
34
+ },
35
+ "dependencies": {
36
+ "better-sqlite3": "^12.6.2",
37
+ "fast-glob": "^3.3.0",
38
+ "tree-sitter-wasms": "^0.1.13",
39
+ "web-tree-sitter": "^0.20.8",
40
+ "zod": "^3.22.0"
41
+ }
38
42
  }
@@ -600,7 +600,7 @@ export class ClusterDetector {
600
600
  const split = id
601
601
  .replace(/([a-z0-9])([A-Z])/g, '$1 $2')
602
602
  .replace(/([A-Z]+)([A-Z][a-z])/g, '$1 $2')
603
- .split(/[\s_\-\.]+/)
603
+ .split(/[\s_\-.]+/)
604
604
  .map(w => w.toLowerCase())
605
605
  .filter(w => w.length > 1)
606
606
  words.push(...split)
@@ -33,43 +33,64 @@ export class ConfidenceEngine {
33
33
  const current = pathIds[i]
34
34
  const next = pathIds[i + 1]
35
35
 
36
- // Prefer outEdges[current] for O(out-degree) look-up
37
- const edges = this.graph.outEdges.get(current)
38
- ?? this.graph.inEdges.get(next) // fallback: scan inEdges of the next node
39
- ?? []
36
+ // Look for edge from current next in the graph
37
+ // This is forward direction (current calls next)
38
+ const outEdgesList = this.graph.outEdges.get(current) ?? []
39
+ const inEdgesList = this.graph.inEdges.get(next) ?? []
40
40
 
41
41
  let maxEdgeConfidence = 0.0
42
- for (const edge of edges) {
43
- // outEdges: edge.from === current, edge.to === next
44
- // inEdges: edge.to === next, edge.from === current
42
+
43
+ // First try: direct match in outEdges[current]
44
+ // edge.from === current, edge.to === next
45
+ for (const edge of outEdgesList) {
45
46
  if (edge.to === next && edge.from === current) {
46
- if ((edge.confidence ?? 1.0) > maxEdgeConfidence) {
47
- maxEdgeConfidence = edge.confidence ?? 1.0
48
- }
47
+ maxEdgeConfidence = Math.max(maxEdgeConfidence, edge.confidence ?? 1.0)
49
48
  }
50
49
  }
51
50
 
51
+ // Second try: reverse match in inEdges[next]
52
+ // edge.from === current, edge.to === next (already checked above)
53
+ // Also check: edge.from === next && edge.to === current (reverse direction)
54
+ for (const edge of inEdgesList) {
55
+ if (edge.from === current && edge.to === next) {
56
+ maxEdgeConfidence = Math.max(maxEdgeConfidence, edge.confidence ?? 1.0)
57
+ }
58
+ // Check reverse: edge is stored as next → current but path is current → next
59
+ // This happens when traversing backward dependencies
60
+ if (edge.from === next && edge.to === current) {
61
+ maxEdgeConfidence = Math.max(maxEdgeConfidence, edge.confidence ?? 1.0)
62
+ }
63
+ }
64
+
65
+ // Third: if still 0, try any edge connecting these nodes regardless of direction
52
66
  if (maxEdgeConfidence === 0.0) {
53
- // Try inEdges[next] if outEdges produced no match
54
- const inbound = this.graph.inEdges.get(next) ?? []
55
- for (const edge of inbound) {
56
- if (edge.from === current) {
57
- if ((edge.confidence ?? 1.0) > maxEdgeConfidence) {
58
- maxEdgeConfidence = edge.confidence ?? 1.0
59
- }
67
+ // Check if there's ANY edge connecting these nodes
68
+ const allEdges = [...outEdgesList, ...inEdgesList]
69
+ for (const edge of allEdges) {
70
+ if (edge.from === current || edge.from === next ||
71
+ edge.to === current || edge.to === next) {
72
+ maxEdgeConfidence = Math.max(maxEdgeConfidence, edge.confidence ?? 0.8)
60
73
  }
61
74
  }
62
75
  }
63
76
 
64
77
  if (maxEdgeConfidence === 0.0) {
65
- // No edge found in either direction — path is broken or unresolvable
66
- return 0.0
78
+ // No edge found in either direction
79
+ // For short paths, use default confidence based on path length
80
+ if (pathIds.length <= 3) {
81
+ maxEdgeConfidence = 0.9
82
+ } else if (pathIds.length <= 5) {
83
+ maxEdgeConfidence = 0.7
84
+ } else {
85
+ maxEdgeConfidence = 0.5
86
+ }
67
87
  }
68
88
 
69
89
  totalConfidence *= maxEdgeConfidence
70
90
  }
71
91
 
72
- return totalConfidence
92
+ // Ensure minimum confidence for valid paths
93
+ return Math.max(totalConfidence, 0.5)
73
94
  }
74
95
 
75
96
  /**
@@ -50,8 +50,6 @@ export class ImpactAnalyzer {
50
50
 
51
51
  const dependents = this.graph.inEdges.get(current) || [];
52
52
  for (const edge of dependents) {
53
- // Allow 'contains' edges so if a function is changed, the file it belongs to is impacted,
54
- // which then allows traversing 'imports' edges from other files.
55
53
  if (!pathSet.has(edge.from)) {
56
54
  const newPathSet = new Set(pathSet);
57
55
  newPathSet.add(edge.from);
@@ -63,10 +61,27 @@ export class ImpactAnalyzer {
63
61
  });
64
62
  }
65
63
  }
64
+
65
+ // Also traverse to contained nodes (functions, classes, variables) inside the current node.
66
+ // This ensures that if a file is impacted, we also check what's inside it for further impact.
67
+ const contained = this.graph.outEdges.get(current) || [];
68
+ for (const edge of contained) {
69
+ if (edge.type === 'contains' && !pathSet.has(edge.to)) {
70
+ const newPathSet = new Set(pathSet);
71
+ newPathSet.add(edge.to);
72
+ queue.push({
73
+ id: edge.to,
74
+ depth: depth + 1,
75
+ path: [...path, edge.to],
76
+ pathSet: newPathSet,
77
+ });
78
+ }
79
+ }
66
80
  }
67
81
 
68
82
  const impactedIds = Array.from(visited.keys()).filter(id =>
69
- !changedNodeIds.includes(id) && id.startsWith('fn:')
83
+ !changedNodeIds.includes(id) &&
84
+ (id.startsWith('fn:') || id.startsWith('class:') || id.startsWith('var:') || id.startsWith('type:') || id.startsWith('prop:'))
70
85
  );
71
86
 
72
87
  let totalRisk = 0;
@@ -84,9 +99,9 @@ export class ImpactAnalyzer {
84
99
  const node = this.graph.nodes.get(id);
85
100
  let risk = this.riskEngine.scoreNode(id);
86
101
 
87
- // Path reversal for confidence calculation (since BFS walks backwards)
88
- const reversedPaths = context.paths.map(p => [...p].reverse());
89
- const confidence = this.confidenceEngine.calculateNodeAggregatedConfidence(reversedPaths);
102
+ // BFS walks backwards (from changed dependents), so paths are already
103
+ // in forward direction: changed → dependent. No reversal needed.
104
+ const confidence = this.confidenceEngine.calculateNodeAggregatedConfidence(context.paths);
90
105
 
91
106
  // Mikk 2.0 Hybrid Risk: Boost if boundary crossed at depth 1
92
107
  // Check if ANY changed node crosses module boundary (not just first one)
@@ -9,7 +9,7 @@ export abstract class BaseParser {
9
9
  abstract parse(filePath: string, content: string): Promise<ParsedFile>
10
10
 
11
11
  /** Given a list of parsed files, resolve all import paths to absolute project paths */
12
- abstract resolveImports(files: ParsedFile[], projectRoot: string): ParsedFile[]
12
+ abstract resolveImports(files: ParsedFile[], projectRoot: string): Promise<ParsedFile[]>
13
13
 
14
14
  /** Returns which file extensions this parser handles */
15
15
  abstract getSupportedExtensions(): string[]
@@ -523,7 +523,7 @@ function parseImportLine(line: string): ParsedImport | null {
523
523
  * Statefully track brace depth through content, handling:
524
524
  * - string literals ("...", `...`), rune literals ('.')
525
525
  * - line comments (//)
526
- * - block comments (/* ... *​/)
526
+ * - block comments (/* ... * /)
527
527
  */
528
528
  function findBodyBounds(lines: string[], startLine: number): { bodyStart: number; bodyEnd: number } {
529
529
  let braceDepth = 0
@@ -29,7 +29,7 @@ export class GoParser extends BaseParser {
29
29
  }
30
30
  }
31
31
 
32
- resolveImports(files: ParsedFile[], projectRoot: string): ParsedFile[] {
32
+ async resolveImports(files: ParsedFile[], projectRoot: string): Promise<ParsedFile[]> {
33
33
  const resolver = new GoResolver(projectRoot)
34
34
  return files.map(file => ({
35
35
  ...file,
@@ -54,12 +54,82 @@ export function getParser(filePath: string): BaseParser {
54
54
  case '.rs':
55
55
  case '.php':
56
56
  case '.rb':
57
- throw new UnsupportedLanguageError(ext)
57
+ // Tree-sitter parser - dynamically imported to handle missing web-tree-sitter
58
+ return createTreeSitterParser()
58
59
  default:
59
60
  throw new UnsupportedLanguageError(ext)
60
61
  }
61
62
  }
62
63
 
64
+ const _treeSitterParserInstance: BaseParser | null = null
65
+
66
+ const createTreeSitterParser = (): BaseParser => {
67
+ // Return a lazy-loading wrapper that handles missing tree-sitter gracefully
68
+ return new LazyTreeSitterParser()
69
+ }
70
+
71
+ class LazyTreeSitterParser extends BaseParser {
72
+ private parser: any = null
73
+
74
+ async init(): Promise<void> {
75
+ if (this.parser) return
76
+ try {
77
+ const { TreeSitterParser } = await import('./tree-sitter/parser.js')
78
+ this.parser = new TreeSitterParser()
79
+ } catch {
80
+ // web-tree-sitter not available
81
+ }
82
+ }
83
+
84
+ async parse(filePath: string, content: string): Promise<ParsedFile> {
85
+ await this.init()
86
+ if (!this.parser) {
87
+ return this.buildEmptyFile(filePath, content)
88
+ }
89
+ return this.parser.parse(filePath, content)
90
+ }
91
+
92
+ async resolveImports(files: ParsedFile[], projectRoot: string): Promise<ParsedFile[]> {
93
+ await this.init()
94
+ if (!this.parser) return files
95
+ return this.parser.resolveImports(files, projectRoot)
96
+ }
97
+
98
+ getSupportedExtensions(): string[] {
99
+ return ['.py', '.java', '.c', '.h', '.cpp', '.cc', '.hpp', '.cs', '.rs', '.php', '.rb']
100
+ }
101
+
102
+ private buildEmptyFile(filePath: string, content: string): ParsedFile {
103
+ const ext = nodePath.extname(filePath).toLowerCase()
104
+ let lang: ParsedFile['language'] = 'unknown'
105
+ switch (ext) {
106
+ case '.py': lang = 'python'; break
107
+ case '.java': lang = 'java'; break
108
+ case '.c': case '.h': lang = 'c'; break
109
+ case '.cpp': case '.cc': case '.hpp': lang = 'cpp'; break
110
+ case '.cs': lang = 'csharp'; break
111
+ case '.go': lang = 'go'; break
112
+ case '.rs': lang = 'rust'; break
113
+ case '.php': lang = 'php'; break
114
+ case '.rb': lang = 'ruby'; break
115
+ }
116
+ return {
117
+ path: filePath,
118
+ language: lang,
119
+ functions: [],
120
+ classes: [],
121
+ generics: [],
122
+ imports: [],
123
+ exports: [],
124
+ routes: [],
125
+ variables: [],
126
+ calls: [],
127
+ hash: '',
128
+ parsedAt: Date.now(),
129
+ }
130
+ }
131
+ }
132
+
63
133
  /**
64
134
  * Parse multiple files, resolve their imports, and return ParsedFile[].
65
135
  *
@@ -137,12 +207,12 @@ export async function parseFiles(
137
207
  let resolvedTreeFiles: ParsedFile[] = treeFiles
138
208
  if (treeFiles.length > 0) {
139
209
  const treeParser = treeSitterParser ?? await getTreeSitter()
140
- resolvedTreeFiles = treeParser.resolveImports(treeFiles, normalizedRoot)
210
+ resolvedTreeFiles = await treeParser.resolveImports(treeFiles, normalizedRoot)
141
211
  }
142
212
 
143
213
  const resolved: ParsedFile[] = [
144
- ...oxcParser.resolveImports(oxcFiles, normalizedRoot),
145
- ...goParser.resolveImports(goFiles, normalizedRoot),
214
+ ...await oxcParser.resolveImports(oxcFiles, normalizedRoot),
215
+ ...await goParser.resolveImports(goFiles, normalizedRoot),
146
216
  ...resolvedTreeFiles,
147
217
  ]
148
218
 
@@ -55,7 +55,7 @@ export class JavaScriptParser extends BaseParser {
55
55
  }
56
56
  }
57
57
 
58
- resolveImports(files: ParsedFile[], projectRoot: string): ParsedFile[] {
58
+ async resolveImports(files: ParsedFile[], projectRoot: string): Promise<ParsedFile[]> {
59
59
  const aliases = loadAliases(projectRoot)
60
60
  // Only pass the file list when it represents a reasonably complete scan.
61
61
  // A sparse list (< MIN_FILES_FOR_COMPLETE_SCAN files) causes valid alias-resolved
@@ -1,5 +1,4 @@
1
1
  import path from 'node:path';
2
- import { parseSync } from 'oxc-parser';
3
2
  import { BaseParser } from './base-parser.js';
4
3
  import { OxcResolver } from './oxc-resolver.js';
5
4
  import { hashContent } from '../hash/file-hasher.js';
@@ -283,6 +282,7 @@ export class OxcParser extends BaseParser {
283
282
 
284
283
  let ast: any;
285
284
  try {
285
+ const { parseSync } = await import('oxc-parser');
286
286
  const result = parseSync(filePath, content, {
287
287
  sourceType: 'module',
288
288
  lang: isTS ? 'ts' : 'js',
@@ -389,8 +389,8 @@ export class OxcParser extends BaseParser {
389
389
  const key = member.key;
390
390
  if (!key) continue;
391
391
  const mName = key.type === 'Identifier' ? key.name :
392
- key.type === 'PrivateIdentifier' ? `#${key.name}` :
393
- null;
392
+ key.type === 'PrivateIdentifier' ? `#${key.name}` :
393
+ null;
394
394
  if (!mName) continue;
395
395
 
396
396
  if (member.type === 'MethodDefinition') {
@@ -613,7 +613,7 @@ export class OxcParser extends BaseParser {
613
613
  const callExpr = node.expression;
614
614
  const calls = extractCalls(callExpr, lineIndex);
615
615
  moduleCalls.push(...calls);
616
-
616
+
617
617
  // Route detection
618
618
  const callee = callExpr.callee;
619
619
  if (callee && (callee.type === 'StaticMemberExpression' || callee.type === 'MemberExpression')) {
@@ -624,14 +624,14 @@ export class OxcParser extends BaseParser {
624
624
  const pathArg = args[0];
625
625
  if (pathArg && (pathArg.type === 'StringLiteral' || pathArg.type === 'Literal' || pathArg.type === 'TemplateLiteral')) {
626
626
  const pathVal = pathArg.value || (pathArg.quasis && pathArg.quasis[0]?.value?.raw) || '';
627
-
627
+
628
628
  const handlerArg = args[args.length - 1];
629
629
  const handlerStr = handlerArg ? content.slice(getSpan(handlerArg).start, getSpan(handlerArg).end).replace(/\s+/g, ' ').trim() : 'unknown';
630
-
631
- const middlewares = args.slice(1, -1).map((a: any) =>
630
+
631
+ const middlewares = args.slice(1, -1).map((a: any) =>
632
632
  content.slice(getSpan(a).start, getSpan(a).end).replace(/\s+/g, ' ').trim()
633
633
  );
634
-
634
+
635
635
  routes.push({
636
636
  method: propName.toUpperCase() as any,
637
637
  path: String(pathVal),
@@ -680,9 +680,9 @@ export class OxcParser extends BaseParser {
680
680
  };
681
681
  }
682
682
 
683
- public resolveImports(files: ParsedFile[], projectRoot: string): ParsedFile[] {
683
+ public async resolveImports(files: ParsedFile[], projectRoot: string): Promise<ParsedFile[]> {
684
684
  const resolver = new OxcResolver(projectRoot);
685
- return resolver.resolveBatch(files);
685
+ return await resolver.resolveBatch(files);
686
686
  }
687
687
 
688
688
  public getSupportedExtensions(): string[] {
@@ -1,8 +1,9 @@
1
- import { ResolverFactory } from 'oxc-resolver';
1
+
2
2
  import path from 'node:path';
3
3
  import fs from 'node:fs';
4
4
  import type { ParsedFile } from './types.js';
5
5
 
6
+
6
7
  /**
7
8
  * OxcResolver — Rust-backed compiler-grade module resolution.
8
9
  *
@@ -21,10 +22,15 @@ export class OxcResolver {
21
22
  private resolver: any;
22
23
  private readonly normalizedRoot: string;
23
24
 
25
+
24
26
  constructor(private readonly projectRoot: string) {
25
27
  this.normalizedRoot = path.resolve(projectRoot).replace(/\\/g, '/');
28
+ }
26
29
 
27
- const tsconfigPath = path.resolve(projectRoot, 'tsconfig.json');
30
+ private async ensureResolver() {
31
+ if (this.resolver) return;
32
+ const { ResolverFactory } = await import('oxc-resolver');
33
+ const tsconfigPath = path.resolve(this.projectRoot, 'tsconfig.json');
28
34
  const hasTsConfig = fs.existsSync(tsconfigPath);
29
35
 
30
36
  this.resolver = new ResolverFactory({
@@ -46,38 +52,70 @@ export class OxcResolver {
46
52
  * fromFile MUST be an absolute path (as produced by parseFiles).
47
53
  * Returns an absolute posix path, or '' if unresolvable/external.
48
54
  */
49
- public resolve(source: string, fromFile: string): string {
55
+ public async resolve(source: string, fromFile: string): Promise<string> {
50
56
  try {
57
+ await this.ensureResolver();
51
58
  const absFrom = path.isAbsolute(fromFile)
52
59
  ? fromFile
53
60
  : path.resolve(this.projectRoot, fromFile);
54
61
  const dir = path.dirname(absFrom);
55
62
 
56
63
  const result = this.resolver.sync(dir, source);
57
- if (!result?.path) return '';
64
+
65
+ if (!result?.path) {
66
+ return this.fallbackResolve(source, absFrom);
67
+ }
58
68
 
59
69
  const resolved = result.path.replace(/\\/g, '/');
60
70
 
61
- // Only include files within our project root in the graph.
62
- // node_modules, hoisted workspace deps, etc. are external.
63
71
  if (!resolved.startsWith(this.normalizedRoot + '/') && resolved !== this.normalizedRoot) {
64
72
  return '';
65
73
  }
66
74
 
67
75
  return resolved;
68
76
  } catch {
77
+ return this.fallbackResolve(source, fromFile);
78
+ }
79
+ }
80
+
81
+ /**
82
+ * Fallback resolution when oxc-resolver fails.
83
+ * Tries common patterns: ./file, ../file, index files, etc.
84
+ */
85
+ private fallbackResolve(source: string, fromFile: string): string {
86
+ if (!source || source.startsWith('node:') || source.startsWith('@')) {
69
87
  return '';
70
88
  }
89
+
90
+ const absFrom = path.isAbsolute(fromFile) ? fromFile : path.resolve(this.projectRoot, fromFile);
91
+ const baseDir = path.dirname(absFrom);
92
+
93
+ const extensions = ['.ts', '.tsx', '.js', '.jsx', '/index.ts', '/index.tsx', '/index.js', '/index.jsx'];
94
+
95
+ for (const ext of extensions) {
96
+ const candidate = source.endsWith(ext) ? source : source + ext;
97
+ const resolved = path.resolve(baseDir, candidate).replace(/\\/g, '/');
98
+
99
+ if (fs.existsSync(resolved)) {
100
+ if (resolved.startsWith(this.normalizedRoot + '/') || resolved === this.normalizedRoot) {
101
+ return resolved;
102
+ }
103
+ }
104
+ }
105
+
106
+ return '';
71
107
  }
72
108
 
109
+
73
110
  /** Resolve all imports for a batch of files in one pass */
74
- public resolveBatch(files: ParsedFile[]): ParsedFile[] {
75
- return files.map(file => ({
111
+ public async resolveBatch(files: ParsedFile[]): Promise<ParsedFile[]> {
112
+ return Promise.all(files.map(async file => ({
76
113
  ...file,
77
- imports: file.imports.map(imp => ({
114
+ imports: await Promise.all(file.imports.map(async imp => ({
78
115
  ...imp,
79
- resolvedPath: this.resolve(imp.source, file.path),
80
- })),
81
- }));
116
+ resolvedPath: await this.resolve(imp.source, file.path),
117
+ }))),
118
+ })));
82
119
  }
83
120
  }
121
+