@dependabit/detector 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/LICENSE +21 -0
  3. package/README.md +32 -0
  4. package/dist/detector.d.ts +64 -0
  5. package/dist/detector.d.ts.map +1 -0
  6. package/dist/detector.js +578 -0
  7. package/dist/detector.js.map +1 -0
  8. package/dist/diff-parser.d.ts +53 -0
  9. package/dist/diff-parser.d.ts.map +1 -0
  10. package/dist/diff-parser.js +203 -0
  11. package/dist/diff-parser.js.map +1 -0
  12. package/dist/index.d.ts +14 -0
  13. package/dist/index.d.ts.map +1 -0
  14. package/dist/index.js +9 -0
  15. package/dist/index.js.map +1 -0
  16. package/dist/llm/client.d.ts +65 -0
  17. package/dist/llm/client.d.ts.map +1 -0
  18. package/dist/llm/client.js +12 -0
  19. package/dist/llm/client.js.map +1 -0
  20. package/dist/llm/copilot.d.ts +15 -0
  21. package/dist/llm/copilot.d.ts.map +1 -0
  22. package/dist/llm/copilot.js +119 -0
  23. package/dist/llm/copilot.js.map +1 -0
  24. package/dist/llm/prompts.d.ts +10 -0
  25. package/dist/llm/prompts.d.ts.map +1 -0
  26. package/dist/llm/prompts.js +94 -0
  27. package/dist/llm/prompts.js.map +1 -0
  28. package/dist/parsers/code-comments.d.ts +23 -0
  29. package/dist/parsers/code-comments.d.ts.map +1 -0
  30. package/dist/parsers/code-comments.js +139 -0
  31. package/dist/parsers/code-comments.js.map +1 -0
  32. package/dist/parsers/package-files.d.ts +31 -0
  33. package/dist/parsers/package-files.d.ts.map +1 -0
  34. package/dist/parsers/package-files.js +130 -0
  35. package/dist/parsers/package-files.js.map +1 -0
  36. package/dist/parsers/readme.d.ts +23 -0
  37. package/dist/parsers/readme.d.ts.map +1 -0
  38. package/dist/parsers/readme.js +151 -0
  39. package/dist/parsers/readme.js.map +1 -0
  40. package/package.json +41 -0
  41. package/src/detector.ts +746 -0
  42. package/src/diff-parser.ts +257 -0
  43. package/src/index.ts +43 -0
  44. package/src/llm/client.ts +85 -0
  45. package/src/llm/copilot.ts +147 -0
  46. package/src/llm/prompts.ts +102 -0
  47. package/src/parsers/code-comments.ts +178 -0
  48. package/src/parsers/package-files.ts +156 -0
  49. package/src/parsers/readme.ts +185 -0
  50. package/test/detector.test.ts +102 -0
  51. package/test/diff-parser.test.ts +187 -0
  52. package/test/llm/client.test.ts +31 -0
  53. package/test/llm/copilot.test.ts +55 -0
  54. package/test/parsers/code-comments.test.ts +98 -0
  55. package/test/parsers/package-files.test.ts +52 -0
  56. package/test/parsers/readme.test.ts +52 -0
  57. package/tsconfig.json +10 -0
  58. package/tsconfig.tsbuildinfo +1 -0
@@ -0,0 +1,187 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import {
3
+ parseDiff,
4
+ extractAddedContent,
5
+ extractRemovedContent,
6
+ getChangedFiles
7
+ } from '../src/diff-parser.js';
8
+
9
+ describe('Diff Parser Tests', () => {
10
+ describe('parseDiff', () => {
11
+ it('should parse unified diff format', () => {
12
+ const diff = `@@ -1,3 +1,4 @@
13
+ Line 1
14
+ -Old line
15
+ +New line
16
+ +Another new line
17
+ Line 3`;
18
+
19
+ const result = parseDiff(diff);
20
+
21
+ expect(result.additions).toContain('New line');
22
+ expect(result.additions).toContain('Another new line');
23
+ expect(result.deletions).toContain('Old line');
24
+ });
25
+
26
+ it('should handle diff with only additions', () => {
27
+ const diff = `@@ -0,0 +1,3 @@
28
+ +First line
29
+ +Second line
30
+ +Third line`;
31
+
32
+ const result = parseDiff(diff);
33
+
34
+ expect(result.additions).toHaveLength(3);
35
+ expect(result.deletions).toHaveLength(0);
36
+ });
37
+
38
+ it('should handle diff with only deletions', () => {
39
+ const diff = `@@ -1,3 +0,0 @@
40
+ -First line
41
+ -Second line
42
+ -Third line`;
43
+
44
+ const result = parseDiff(diff);
45
+
46
+ expect(result.additions).toHaveLength(0);
47
+ expect(result.deletions).toHaveLength(3);
48
+ });
49
+
50
+ it('should ignore context lines (no +/-)', () => {
51
+ const diff = `@@ -1,5 +1,5 @@
52
+ Context line 1
53
+ -Old line
54
+ +New line
55
+ Context line 2
56
+ Context line 3`;
57
+
58
+ const result = parseDiff(diff);
59
+
60
+ expect(result.additions).toHaveLength(1);
61
+ expect(result.deletions).toHaveLength(1);
62
+ expect(result.additions[0]).toBe('New line');
63
+ });
64
+ });
65
+
66
+ describe('extractAddedContent', () => {
67
+ it('should extract URLs from added lines', () => {
68
+ const additions = [
69
+ 'Check out https://example.com/api for details',
70
+ 'See documentation at https://docs.example.com',
71
+ 'Regular line without URL'
72
+ ];
73
+
74
+ const result = extractAddedContent(additions);
75
+
76
+ expect(result.urls).toContain('https://example.com/api');
77
+ expect(result.urls).toContain('https://docs.example.com');
78
+ expect(result.urls).toHaveLength(2);
79
+ });
80
+
81
+ it('should extract dependencies from package.json additions', () => {
82
+ const additions = [
83
+ ' "dependencies": {',
84
+ ' "axios": "^1.0.0",',
85
+ ' "lodash": "^4.17.21"',
86
+ ' }'
87
+ ];
88
+
89
+ const result = extractAddedContent(additions, 'package.json');
90
+
91
+ expect(result.packageDeps).toContain('axios');
92
+ expect(result.packageDeps).toContain('lodash');
93
+ });
94
+
95
+ it('should extract references from code comments', () => {
96
+ const additions = [
97
+ '// Based on https://github.com/example/repo',
98
+ '/* Reference: https://arxiv.org/abs/2301.12345 */',
99
+ 'const x = 5; // Regular comment'
100
+ ];
101
+
102
+ const result = extractAddedContent(additions);
103
+
104
+ expect(result.urls).toContain('https://github.com/example/repo');
105
+ expect(result.urls).toContain('https://arxiv.org/abs/2301.12345');
106
+ });
107
+
108
+ it('should handle empty additions', () => {
109
+ const result = extractAddedContent([]);
110
+
111
+ expect(result.urls).toHaveLength(0);
112
+ expect(result.packageDeps).toHaveLength(0);
113
+ });
114
+ });
115
+
116
+ describe('extractRemovedContent', () => {
117
+ it('should extract URLs from removed lines', () => {
118
+ const deletions = [
119
+ 'Old link: https://old-example.com',
120
+ 'Deprecated: https://deprecated.example.com/api'
121
+ ];
122
+
123
+ const result = extractRemovedContent(deletions);
124
+
125
+ expect(result.urls).toContain('https://old-example.com');
126
+ expect(result.urls).toContain('https://deprecated.example.com/api');
127
+ });
128
+
129
+ it('should extract dependencies from package.json deletions', () => {
130
+ const deletions = [' "old-package": "^1.0.0",', ' "deprecated-lib": "^2.0.0"'];
131
+
132
+ const result = extractRemovedContent(deletions, 'package.json');
133
+
134
+ expect(result.packageDeps).toContain('old-package');
135
+ expect(result.packageDeps).toContain('deprecated-lib');
136
+ });
137
+ });
138
+
139
+ describe('getChangedFiles', () => {
140
+ it('should identify files relevant for dependency analysis', () => {
141
+ const files = [
142
+ { filename: 'README.md', status: 'modified' as const },
143
+ { filename: 'src/index.ts', status: 'modified' as const },
144
+ { filename: 'package.json', status: 'modified' as const },
145
+ { filename: 'docs/guide.md', status: 'added' as const },
146
+ { filename: 'test/unit.test.ts', status: 'modified' as const }
147
+ ];
148
+
149
+ const result = getChangedFiles(files);
150
+
151
+ expect(result.relevantFiles).toContain('README.md');
152
+ expect(result.relevantFiles).toContain('package.json');
153
+ expect(result.relevantFiles).toContain('docs/guide.md');
154
+ expect(result.packageFiles).toContain('package.json');
155
+ });
156
+
157
+ it('should exclude irrelevant file types', () => {
158
+ const files = [
159
+ { filename: 'image.png', status: 'added' as const },
160
+ { filename: 'video.mp4', status: 'added' as const },
161
+ { filename: 'README.md', status: 'modified' as const }
162
+ ];
163
+
164
+ const result = getChangedFiles(files);
165
+
166
+ expect(result.relevantFiles).not.toContain('image.png');
167
+ expect(result.relevantFiles).not.toContain('video.mp4');
168
+ expect(result.relevantFiles).toContain('README.md');
169
+ });
170
+
171
+ it('should identify package manifest files', () => {
172
+ const files = [
173
+ { filename: 'package.json', status: 'modified' as const },
174
+ { filename: 'requirements.txt', status: 'modified' as const },
175
+ { filename: 'Cargo.toml', status: 'modified' as const },
176
+ { filename: 'go.mod', status: 'modified' as const }
177
+ ];
178
+
179
+ const result = getChangedFiles(files);
180
+
181
+ expect(result.packageFiles).toContain('package.json');
182
+ expect(result.packageFiles).toContain('requirements.txt');
183
+ expect(result.packageFiles).toContain('Cargo.toml');
184
+ expect(result.packageFiles).toContain('go.mod');
185
+ });
186
+ });
187
+ });
@@ -0,0 +1,31 @@
1
+ import { describe, it, expect } from 'vitest';
2
+
3
+ describe('LLMProvider interface', () => {
4
+ it('should define analyze method signature', () => {
5
+ // This is a type test - we'll verify the interface exists
6
+ // The interface will be implemented in src/llm/client.ts
7
+ expect(true).toBe(true);
8
+ });
9
+
10
+ it('should define getSupportedModels method', () => {
11
+ expect(true).toBe(true);
12
+ });
13
+
14
+ it('should define getRateLimit method', () => {
15
+ expect(true).toBe(true);
16
+ });
17
+ });
18
+
19
+ describe('LLMResponse', () => {
20
+ it('should include detected dependencies', () => {
21
+ expect(true).toBe(true);
22
+ });
23
+
24
+ it('should include confidence scores', () => {
25
+ expect(true).toBe(true);
26
+ });
27
+
28
+ it('should include usage metadata (tokens, latency)', () => {
29
+ expect(true).toBe(true);
30
+ });
31
+ });
@@ -0,0 +1,55 @@
1
+ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
+
3
+ describe('GitHubCopilotProvider', () => {
4
+ beforeEach(() => {
5
+ vi.clearAllMocks();
6
+ });
7
+
8
+ describe('constructor', () => {
9
+ it('should initialize with API configuration', () => {
10
+ expect(true).toBe(true);
11
+ });
12
+
13
+ it('should use environment variable for API key', () => {
14
+ expect(true).toBe(true);
15
+ });
16
+ });
17
+
18
+ describe('analyze', () => {
19
+ it('should call Azure OpenAI API with correct parameters', async () => {
20
+ expect(true).toBe(true);
21
+ });
22
+
23
+ it('should parse LLM response into structured dependencies', async () => {
24
+ expect(true).toBe(true);
25
+ });
26
+
27
+ it('should include confidence scores for each detection', async () => {
28
+ expect(true).toBe(true);
29
+ });
30
+
31
+ it('should handle API errors gracefully', async () => {
32
+ expect(true).toBe(true);
33
+ });
34
+
35
+ it('should respect rate limits', async () => {
36
+ expect(true).toBe(true);
37
+ });
38
+
39
+ it('should log request/response metadata', async () => {
40
+ expect(true).toBe(true);
41
+ });
42
+ });
43
+
44
+ describe('getSupportedModels', () => {
45
+ it('should return available GPT models', () => {
46
+ expect(true).toBe(true);
47
+ });
48
+ });
49
+
50
+ describe('getRateLimit', () => {
51
+ it('should return current rate limit status', () => {
52
+ expect(true).toBe(true);
53
+ });
54
+ });
55
+ });
@@ -0,0 +1,98 @@
1
+ import { describe, it, expect } from 'vitest';
2
+
3
+ describe('CodeCommentParser', () => {
4
+ describe('parse', () => {
5
+ it('should extract URLs from single-line comments', () => {
6
+ expect(true).toBe(true);
7
+ });
8
+
9
+ it('should extract URLs from multi-line comments', () => {
10
+ expect(true).toBe(true);
11
+ });
12
+
13
+ it('should extract JSDoc @see tags', () => {
14
+ expect(true).toBe(true);
15
+ });
16
+
17
+ it('should include file path and line number', () => {
18
+ // Expected: Include { file: 'src/foo.ts', line: 5 }
19
+ expect(true).toBe(true);
20
+ });
21
+
22
+ it('should handle TypeScript files', () => {
23
+ expect(true).toBe(true);
24
+ });
25
+
26
+ it('should handle JavaScript files', () => {
27
+ expect(true).toBe(true);
28
+ });
29
+
30
+ it('should handle Python files', () => {
31
+ expect(true).toBe(true);
32
+ });
33
+
34
+ it('should skip commented-out code', () => {
35
+ expect(true).toBe(true);
36
+ });
37
+ });
38
+
39
+ describe('extractDocReferences', () => {
40
+ it('should identify specification references', () => {
41
+ expect(true).toBe(true);
42
+ });
43
+
44
+ it('should identify API endpoint references', () => {
45
+ const code = '// API endpoint: https://example.com/api';
46
+ expect(true).toBe(true);
47
+ expect(code).toContain('https://example.com/api');
48
+ });
49
+
50
+ it('should extract URLs from multi-line comments', () => {
51
+ const code = '/* Reference: https://example.com */';
52
+ expect(code).toContain('https://example.com');
53
+ });
54
+
55
+ it('should extract JSDoc @see tags', () => {
56
+ const code = '/** @see https://example.com/docs */';
57
+ expect(code).toContain('https://example.com/docs');
58
+ });
59
+
60
+ it('should include file path and line number', () => {
61
+ const code = '// Based on https://spec.example.com';
62
+ // Expected: Include { file: 'src/foo.ts', line: 5 }
63
+ expect(code).toContain('https://spec.example.com');
64
+ });
65
+
66
+ it('should handle TypeScript files', () => {
67
+ const code = '// Implementation from https://github.com/example/repo';
68
+ expect(code).toContain('https://github.com/example/repo');
69
+ });
70
+
71
+ it('should handle JavaScript files', () => {
72
+ const code = '/* Spec: https://openapi.example.com/spec.yaml */';
73
+ expect(code).toContain('https://openapi.example.com/spec.yaml');
74
+ });
75
+
76
+ it('should handle Python files', () => {
77
+ const code = '# Reference: https://arxiv.org/abs/1234.5678';
78
+ expect(code).toContain('https://arxiv.org/abs/1234.5678');
79
+ });
80
+
81
+ it('should skip commented-out code', () => {
82
+ const code = '// const url = "https://example.com"; // unused';
83
+ expect(code).toContain('https://example.com');
84
+ });
85
+ });
86
+
87
+ describe('extractDocReferences', () => {
88
+ it('should identify specification references', () => {
89
+ const code = '// Implements RFC 7231 specification';
90
+ expect(code).toContain('RFC 7231');
91
+ });
92
+
93
+ it('should identify API endpoint references', () => {
94
+ const code = '// Calls POST /api/v1/users';
95
+ expect(code).toContain('/api/v1/users');
96
+ });
97
+ });
98
+ });
@@ -0,0 +1,52 @@
1
+ import { describe, it, expect } from 'vitest';
2
+
3
+ describe('PackageFileParser', () => {
4
+ describe('parsePackageJson', () => {
5
+ it('should extract repository URLs', () => {
6
+ expect(true).toBe(true);
7
+ });
8
+
9
+ it('should extract homepage URL', () => {
10
+ expect(true).toBe(true);
11
+ });
12
+
13
+ it('should extract documentation URL', () => {
14
+ expect(true).toBe(true);
15
+ });
16
+
17
+ it('should mark dependencies as tracked by dependabot', () => {
18
+ // Expected: Return empty (dependabot handles this)
19
+ expect(true).toBe(true);
20
+ });
21
+
22
+ it('should extract URLs from package description', () => {
23
+ expect(true).toBe(true);
24
+ });
25
+ });
26
+
27
+ describe('parseRequirementsTxt', () => {
28
+ it('should extract GitHub repository URLs from comments', () => {
29
+ expect(true).toBe(true);
30
+ });
31
+
32
+ it('should skip PyPI packages', () => {
33
+ // Expected: Return empty (dependabot handles this)
34
+ expect(true).toBe(true);
35
+ });
36
+
37
+ it('should extract documentation URLs from comments', () => {
38
+ expect(true).toBe(true);
39
+ });
40
+ });
41
+
42
+ describe('parseCargoToml', () => {
43
+ it('should extract repository URLs', () => {
44
+ expect(true).toBe(true);
45
+ });
46
+
47
+ it('should skip crates.io dependencies', () => {
48
+ // Expected: Return empty (dependabot handles this)
49
+ expect(true).toBe(true);
50
+ });
51
+ });
52
+ });
@@ -0,0 +1,52 @@
1
+ import { describe, it, expect } from 'vitest';
2
+
3
+ describe('READMEParser', () => {
4
+ describe('parse', () => {
5
+ it('should extract URLs from markdown links', () => {
6
+ // Expected: Extract https://example.com/docs
7
+ expect(true).toBe(true);
8
+ });
9
+
10
+ it('should extract bare URLs', () => {
11
+ // Expected: Extract https://example.com
12
+ expect(true).toBe(true);
13
+ });
14
+
15
+ it('should extract reference-style links', () => {
16
+ // Expected: Extract https://example.com/docs
17
+ expect(true).toBe(true);
18
+ });
19
+
20
+ it('should include context around URLs', () => {
21
+ // Expected: Include surrounding text as context
22
+ expect(true).toBe(true);
23
+ });
24
+
25
+ it('should skip package manager URLs (npm, pypi)', () => {
26
+ // Expected: Skip npmjs.com URLs
27
+ expect(true).toBe(true);
28
+ });
29
+
30
+ it('should extract documentation references', () => {
31
+ expect(true).toBe(true);
32
+ });
33
+
34
+ it('should handle empty README', () => {
35
+ expect(true).toBe(true);
36
+ });
37
+
38
+ it('should extract arXiv paper references', () => {
39
+ expect(true).toBe(true);
40
+ });
41
+ });
42
+
43
+ describe('extractReferences', () => {
44
+ it('should identify GitHub repository references', () => {
45
+ expect(true).toBe(true);
46
+ });
47
+
48
+ it('should identify research paper citations', () => {
49
+ expect(true).toBe(true);
50
+ });
51
+ });
52
+ });
package/tsconfig.json ADDED
@@ -0,0 +1,10 @@
1
+ {
2
+ "extends": "../../tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "dist",
5
+ "rootDir": "src"
6
+ },
7
+ "include": ["src"],
8
+ "exclude": ["src/**/*.test.ts", "src/**/*.spec.ts"],
9
+ "references": [{ "path": "../github-client" }, { "path": "../manifest" }]
10
+ }