@dependabit/detector 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/LICENSE +21 -0
- package/README.md +32 -0
- package/dist/detector.d.ts +64 -0
- package/dist/detector.d.ts.map +1 -0
- package/dist/detector.js +578 -0
- package/dist/detector.js.map +1 -0
- package/dist/diff-parser.d.ts +53 -0
- package/dist/diff-parser.d.ts.map +1 -0
- package/dist/diff-parser.js +203 -0
- package/dist/diff-parser.js.map +1 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +9 -0
- package/dist/index.js.map +1 -0
- package/dist/llm/client.d.ts +65 -0
- package/dist/llm/client.d.ts.map +1 -0
- package/dist/llm/client.js +12 -0
- package/dist/llm/client.js.map +1 -0
- package/dist/llm/copilot.d.ts +15 -0
- package/dist/llm/copilot.d.ts.map +1 -0
- package/dist/llm/copilot.js +119 -0
- package/dist/llm/copilot.js.map +1 -0
- package/dist/llm/prompts.d.ts +10 -0
- package/dist/llm/prompts.d.ts.map +1 -0
- package/dist/llm/prompts.js +94 -0
- package/dist/llm/prompts.js.map +1 -0
- package/dist/parsers/code-comments.d.ts +23 -0
- package/dist/parsers/code-comments.d.ts.map +1 -0
- package/dist/parsers/code-comments.js +139 -0
- package/dist/parsers/code-comments.js.map +1 -0
- package/dist/parsers/package-files.d.ts +31 -0
- package/dist/parsers/package-files.d.ts.map +1 -0
- package/dist/parsers/package-files.js +130 -0
- package/dist/parsers/package-files.js.map +1 -0
- package/dist/parsers/readme.d.ts +23 -0
- package/dist/parsers/readme.d.ts.map +1 -0
- package/dist/parsers/readme.js +151 -0
- package/dist/parsers/readme.js.map +1 -0
- package/package.json +41 -0
- package/src/detector.ts +746 -0
- package/src/diff-parser.ts +257 -0
- package/src/index.ts +43 -0
- package/src/llm/client.ts +85 -0
- package/src/llm/copilot.ts +147 -0
- package/src/llm/prompts.ts +102 -0
- package/src/parsers/code-comments.ts +178 -0
- package/src/parsers/package-files.ts +156 -0
- package/src/parsers/readme.ts +185 -0
- package/test/detector.test.ts +102 -0
- package/test/diff-parser.test.ts +187 -0
- package/test/llm/client.test.ts +31 -0
- package/test/llm/copilot.test.ts +55 -0
- package/test/parsers/code-comments.test.ts +98 -0
- package/test/parsers/package-files.test.ts +52 -0
- package/test/parsers/readme.test.ts +52 -0
- package/tsconfig.json +10 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Diff Parser
|
|
3
|
+
* Parse git diffs to extract meaningful changes for dependency analysis
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type { CommitFile } from '@dependabit/github-client';
|
|
7
|
+
|
|
8
|
+
export interface DiffParseResult {
|
|
9
|
+
additions: string[];
|
|
10
|
+
deletions: string[];
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface ExtractedContent {
|
|
14
|
+
urls: string[];
|
|
15
|
+
packageDeps: string[];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export interface ChangedFilesResult {
|
|
19
|
+
relevantFiles: string[];
|
|
20
|
+
packageFiles: string[];
|
|
21
|
+
documentationFiles: string[];
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// URL pattern to match HTTP(S) URLs
|
|
25
|
+
const URL_PATTERN = /https?:\/\/[^\s<>"{}|\\^`[\]]+/gi;
|
|
26
|
+
|
|
27
|
+
// Package dependency patterns
|
|
28
|
+
const PACKAGE_DEP_PATTERNS = {
|
|
29
|
+
packageJson: /"([^"]+)":\s*"[\^~]?[\d.]+"/g,
|
|
30
|
+
requirementsTxt: /^([a-zA-Z0-9_-]+)[>=<~!]=.*/gm,
|
|
31
|
+
cargoToml:
|
|
32
|
+
/^(?!\s*(?:name|version|authors|edition|description|license|workspace|build|default-run|repository|homepage|documentation|readme|keywords|categories|exclude|include|publish|resolver)\s*=)\s*([a-zA-Z0-9_-]+)\s*=.*/gm
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
// File extensions relevant for dependency analysis
|
|
36
|
+
const RELEVANT_EXTENSIONS = [
|
|
37
|
+
'.md',
|
|
38
|
+
'.txt',
|
|
39
|
+
'.rst',
|
|
40
|
+
'.adoc', // Documentation
|
|
41
|
+
'.ts',
|
|
42
|
+
'.js',
|
|
43
|
+
'.py',
|
|
44
|
+
'.rs',
|
|
45
|
+
'.go',
|
|
46
|
+
'.java',
|
|
47
|
+
'.cpp',
|
|
48
|
+
'.c',
|
|
49
|
+
'.h', // Code
|
|
50
|
+
'.json',
|
|
51
|
+
'.toml',
|
|
52
|
+
'.yaml',
|
|
53
|
+
'.yml', // Config
|
|
54
|
+
'.html',
|
|
55
|
+
'.xml' // Markup
|
|
56
|
+
];
|
|
57
|
+
|
|
58
|
+
// Package manifest files
|
|
59
|
+
const PACKAGE_MANIFEST_FILES = [
|
|
60
|
+
'package.json',
|
|
61
|
+
'requirements.txt',
|
|
62
|
+
'Cargo.toml',
|
|
63
|
+
'go.mod',
|
|
64
|
+
'pom.xml',
|
|
65
|
+
'build.gradle',
|
|
66
|
+
'Gemfile',
|
|
67
|
+
'composer.json'
|
|
68
|
+
];
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Parse a unified diff and extract additions and deletions
|
|
72
|
+
*/
|
|
73
|
+
export function parseDiff(patch: string): DiffParseResult {
|
|
74
|
+
const additions: string[] = [];
|
|
75
|
+
const deletions: string[] = [];
|
|
76
|
+
|
|
77
|
+
if (!patch) {
|
|
78
|
+
return { additions, deletions };
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const lines = patch.split('\n');
|
|
82
|
+
|
|
83
|
+
for (const line of lines) {
|
|
84
|
+
if (line.startsWith('+') && !line.startsWith('+++')) {
|
|
85
|
+
// Addition (remove the + prefix)
|
|
86
|
+
additions.push(line.substring(1));
|
|
87
|
+
} else if (line.startsWith('-') && !line.startsWith('---')) {
|
|
88
|
+
// Deletion (remove the - prefix)
|
|
89
|
+
deletions.push(line.substring(1));
|
|
90
|
+
}
|
|
91
|
+
// Ignore context lines (no prefix or space prefix)
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return { additions, deletions };
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Extract meaningful content from added lines
|
|
99
|
+
*/
|
|
100
|
+
export function extractAddedContent(additions: string[], filename?: string): ExtractedContent {
|
|
101
|
+
const urls: string[] = [];
|
|
102
|
+
const packageDeps: string[] = [];
|
|
103
|
+
|
|
104
|
+
const content = additions.join('\n');
|
|
105
|
+
|
|
106
|
+
// Extract URLs
|
|
107
|
+
const urlMatches = content.matchAll(URL_PATTERN);
|
|
108
|
+
for (const match of urlMatches) {
|
|
109
|
+
urls.push(match[0]);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Extract package dependencies based on file type
|
|
113
|
+
if (filename) {
|
|
114
|
+
const lowerFilename = filename.toLowerCase();
|
|
115
|
+
if (lowerFilename === 'package.json') {
|
|
116
|
+
const depMatches = content.matchAll(PACKAGE_DEP_PATTERNS.packageJson);
|
|
117
|
+
for (const match of depMatches) {
|
|
118
|
+
if (match[1]) packageDeps.push(match[1]);
|
|
119
|
+
}
|
|
120
|
+
} else if (lowerFilename === 'requirements.txt') {
|
|
121
|
+
const depMatches = content.matchAll(PACKAGE_DEP_PATTERNS.requirementsTxt);
|
|
122
|
+
for (const match of depMatches) {
|
|
123
|
+
if (match[1]) packageDeps.push(match[1]);
|
|
124
|
+
}
|
|
125
|
+
} else if (lowerFilename === 'cargo.toml') {
|
|
126
|
+
const depMatches = content.matchAll(PACKAGE_DEP_PATTERNS.cargoToml);
|
|
127
|
+
for (const match of depMatches) {
|
|
128
|
+
if (match[1]) packageDeps.push(match[1]);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return {
|
|
134
|
+
urls: Array.from(new Set(urls)),
|
|
135
|
+
packageDeps: Array.from(new Set(packageDeps))
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Extract meaningful content from removed lines
|
|
141
|
+
*/
|
|
142
|
+
export function extractRemovedContent(deletions: string[], filename?: string): ExtractedContent {
|
|
143
|
+
// Use the same logic as extractAddedContent
|
|
144
|
+
return extractAddedContent(deletions, filename);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Identify files relevant for dependency analysis
|
|
149
|
+
*
|
|
150
|
+
* Note: Filenames in relevantFiles preserve their original case from the commit.
|
|
151
|
+
* Case-insensitive matching is used for identification, but original casing is maintained
|
|
152
|
+
* for consistency with file system operations.
|
|
153
|
+
*/
|
|
154
|
+
export function getChangedFiles(files: CommitFile[]): ChangedFilesResult {
|
|
155
|
+
const relevantFiles: string[] = [];
|
|
156
|
+
const packageFiles: string[] = [];
|
|
157
|
+
const documentationFiles: string[] = [];
|
|
158
|
+
|
|
159
|
+
for (const file of files) {
|
|
160
|
+
const filename = file.filename.toLowerCase();
|
|
161
|
+
const basename = filename.split('/').pop() || '';
|
|
162
|
+
|
|
163
|
+
// Check if it's a package manifest file (case-insensitive comparison)
|
|
164
|
+
const isPackageFile = PACKAGE_MANIFEST_FILES.some(
|
|
165
|
+
(manifestFile) => manifestFile.toLowerCase() === basename
|
|
166
|
+
);
|
|
167
|
+
|
|
168
|
+
if (isPackageFile) {
|
|
169
|
+
packageFiles.push(file.filename);
|
|
170
|
+
relevantFiles.push(file.filename);
|
|
171
|
+
continue;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// Check if it's a documentation file
|
|
175
|
+
if (
|
|
176
|
+
basename.startsWith('readme') ||
|
|
177
|
+
filename.includes('/docs/') ||
|
|
178
|
+
filename.includes('/documentation/')
|
|
179
|
+
) {
|
|
180
|
+
documentationFiles.push(file.filename);
|
|
181
|
+
relevantFiles.push(file.filename);
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Check if it has a relevant extension
|
|
186
|
+
const hasRelevantExtension = RELEVANT_EXTENSIONS.some((ext) => filename.endsWith(ext));
|
|
187
|
+
|
|
188
|
+
if (hasRelevantExtension) {
|
|
189
|
+
relevantFiles.push(file.filename);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
return {
|
|
194
|
+
relevantFiles: Array.from(new Set(relevantFiles)),
|
|
195
|
+
packageFiles: Array.from(new Set(packageFiles)),
|
|
196
|
+
documentationFiles: Array.from(new Set(documentationFiles))
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Parse all diffs from commit files
|
|
202
|
+
*/
|
|
203
|
+
export function parseCommitDiffs(files: CommitFile[]): Map<string, DiffParseResult> {
|
|
204
|
+
const diffMap = new Map<string, DiffParseResult>();
|
|
205
|
+
|
|
206
|
+
for (const file of files) {
|
|
207
|
+
if (file.patch) {
|
|
208
|
+
diffMap.set(file.filename, parseDiff(file.patch));
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return diffMap;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Extract all dependency-related content from commit diffs
|
|
217
|
+
*/
|
|
218
|
+
export function extractDependencyChanges(files: CommitFile[]): {
|
|
219
|
+
addedUrls: string[];
|
|
220
|
+
removedUrls: string[];
|
|
221
|
+
addedPackages: string[];
|
|
222
|
+
removedPackages: string[];
|
|
223
|
+
changedFiles: ChangedFilesResult;
|
|
224
|
+
} {
|
|
225
|
+
const changedFiles = getChangedFiles(files);
|
|
226
|
+
const allAddedUrls: string[] = [];
|
|
227
|
+
const allRemovedUrls: string[] = [];
|
|
228
|
+
const allAddedPackages: string[] = [];
|
|
229
|
+
const allRemovedPackages: string[] = [];
|
|
230
|
+
|
|
231
|
+
for (const file of files) {
|
|
232
|
+
if (!file.patch || !changedFiles.relevantFiles.includes(file.filename)) {
|
|
233
|
+
continue;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const diff = parseDiff(file.patch);
|
|
237
|
+
const basename = file.filename.split('/').pop();
|
|
238
|
+
|
|
239
|
+
// Extract added content
|
|
240
|
+
const addedContent = extractAddedContent(diff.additions, basename);
|
|
241
|
+
allAddedUrls.push(...addedContent.urls);
|
|
242
|
+
allAddedPackages.push(...addedContent.packageDeps);
|
|
243
|
+
|
|
244
|
+
// Extract removed content
|
|
245
|
+
const removedContent = extractRemovedContent(diff.deletions, basename);
|
|
246
|
+
allRemovedUrls.push(...removedContent.urls);
|
|
247
|
+
allRemovedPackages.push(...removedContent.packageDeps);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
return {
|
|
251
|
+
addedUrls: Array.from(new Set(allAddedUrls)),
|
|
252
|
+
removedUrls: Array.from(new Set(allRemovedUrls)),
|
|
253
|
+
addedPackages: Array.from(new Set(allAddedPackages)),
|
|
254
|
+
removedPackages: Array.from(new Set(allRemovedPackages)),
|
|
255
|
+
changedFiles
|
|
256
|
+
};
|
|
257
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
// Entry point for @dependabit/detector
|
|
2
|
+
|
|
3
|
+
// LLM Provider
|
|
4
|
+
export type {
|
|
5
|
+
LLMProvider,
|
|
6
|
+
LLMProviderConfig,
|
|
7
|
+
LLMResponse,
|
|
8
|
+
RateLimitInfo,
|
|
9
|
+
DetectedDependency,
|
|
10
|
+
LLMUsageMetadata
|
|
11
|
+
} from './llm/client.js';
|
|
12
|
+
export { GitHubCopilotProvider } from './llm/copilot.js';
|
|
13
|
+
export { SYSTEM_PROMPT, createDetectionPrompt, createClassificationPrompt } from './llm/prompts.js';
|
|
14
|
+
|
|
15
|
+
// Parsers
|
|
16
|
+
export type { ExtractedReference } from './parsers/readme.js';
|
|
17
|
+
export { parseReadme, extractGitHubReferences } from './parsers/readme.js';
|
|
18
|
+
|
|
19
|
+
export type { CommentReference } from './parsers/code-comments.js';
|
|
20
|
+
export { parseCodeComments, extractSpecReferences } from './parsers/code-comments.js';
|
|
21
|
+
|
|
22
|
+
export type { PackageMetadata } from './parsers/package-files.js';
|
|
23
|
+
export {
|
|
24
|
+
parsePackageJson,
|
|
25
|
+
parseRequirementsTxt,
|
|
26
|
+
parseCargoToml,
|
|
27
|
+
parseGoMod
|
|
28
|
+
} from './parsers/package-files.js';
|
|
29
|
+
|
|
30
|
+
// Diff Parser
|
|
31
|
+
export type { DiffParseResult, ExtractedContent, ChangedFilesResult } from './diff-parser.js';
|
|
32
|
+
export {
|
|
33
|
+
parseDiff,
|
|
34
|
+
extractAddedContent,
|
|
35
|
+
extractRemovedContent,
|
|
36
|
+
getChangedFiles,
|
|
37
|
+
parseCommitDiffs,
|
|
38
|
+
extractDependencyChanges
|
|
39
|
+
} from './diff-parser.js';
|
|
40
|
+
|
|
41
|
+
// Detector
|
|
42
|
+
export type { DetectorOptions, DetectionResult } from './detector.js';
|
|
43
|
+
export { Detector } from './detector.js';
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LLM Provider Interface
|
|
3
|
+
* Abstraction layer for different LLM providers (GitHub Copilot, Claude, OpenAI, etc.)
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type { z } from 'zod';
|
|
7
|
+
|
|
8
|
+
export interface RateLimitInfo {
|
|
9
|
+
remaining: number;
|
|
10
|
+
limit: number;
|
|
11
|
+
resetAt: Date;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface LLMUsageMetadata {
|
|
15
|
+
promptTokens: number;
|
|
16
|
+
completionTokens: number;
|
|
17
|
+
totalTokens: number;
|
|
18
|
+
model: string;
|
|
19
|
+
latencyMs: number;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface DetectedDependency {
|
|
23
|
+
url: string;
|
|
24
|
+
name: string;
|
|
25
|
+
description?: string;
|
|
26
|
+
type:
|
|
27
|
+
| 'reference-implementation'
|
|
28
|
+
| 'schema'
|
|
29
|
+
| 'documentation'
|
|
30
|
+
| 'research-paper'
|
|
31
|
+
| 'api-example'
|
|
32
|
+
| 'other';
|
|
33
|
+
confidence: number; // 0.0 - 1.0
|
|
34
|
+
reasoning?: string; // Why this was detected as a dependency
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export interface LLMResponse {
|
|
38
|
+
dependencies: DetectedDependency[];
|
|
39
|
+
usage: LLMUsageMetadata;
|
|
40
|
+
rawResponse?: string; // For debugging
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface LLMProviderConfig {
|
|
44
|
+
apiKey?: string;
|
|
45
|
+
endpoint?: string;
|
|
46
|
+
model?: string;
|
|
47
|
+
maxTokens?: number;
|
|
48
|
+
temperature?: number;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Base interface that all LLM providers must implement
|
|
53
|
+
*/
|
|
54
|
+
export interface LLMProvider {
|
|
55
|
+
/**
|
|
56
|
+
* Analyze content and detect external dependencies
|
|
57
|
+
* @param content - Text content to analyze (README, code, etc.)
|
|
58
|
+
* @param prompt - Detection prompt template
|
|
59
|
+
* @returns LLM response with detected dependencies
|
|
60
|
+
*/
|
|
61
|
+
analyze(content: string, prompt: string): Promise<LLMResponse>;
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Get list of supported models for this provider
|
|
65
|
+
*/
|
|
66
|
+
getSupportedModels(): string[];
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Get current rate limit status
|
|
70
|
+
*/
|
|
71
|
+
getRateLimit(): Promise<RateLimitInfo>;
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Validate provider configuration
|
|
75
|
+
*/
|
|
76
|
+
validateConfig(): boolean;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Create an LLM provider instance
|
|
81
|
+
*/
|
|
82
|
+
export function createLLMProvider(providerName: string, config: LLMProviderConfig): LLMProvider {
|
|
83
|
+
// Implementation will be in specific provider files
|
|
84
|
+
throw new Error(`Provider ${providerName} not yet implemented`);
|
|
85
|
+
}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* GitHub Copilot CLI Provider Implementation
|
|
3
|
+
* Integrates with GitHub Copilot via CLI commands
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { exec } from 'node:child_process';
|
|
7
|
+
import { promisify } from 'node:util';
|
|
8
|
+
import type {
|
|
9
|
+
LLMProvider,
|
|
10
|
+
LLMProviderConfig,
|
|
11
|
+
LLMResponse,
|
|
12
|
+
RateLimitInfo,
|
|
13
|
+
DetectedDependency,
|
|
14
|
+
LLMUsageMetadata
|
|
15
|
+
} from './client.js';
|
|
16
|
+
import { SYSTEM_PROMPT } from './prompts.js';
|
|
17
|
+
|
|
18
|
+
const execAsync = promisify(exec);
|
|
19
|
+
|
|
20
|
+
export class GitHubCopilotProvider implements LLMProvider {
|
|
21
|
+
private config: Required<LLMProviderConfig>;
|
|
22
|
+
private model: string;
|
|
23
|
+
|
|
24
|
+
constructor(config: LLMProviderConfig = {}) {
|
|
25
|
+
// Default configuration for CLI-based approach
|
|
26
|
+
this.config = {
|
|
27
|
+
apiKey: config.apiKey || process.env['GITHUB_TOKEN'] || '',
|
|
28
|
+
endpoint: config.endpoint || '',
|
|
29
|
+
model: config.model || 'gpt-4',
|
|
30
|
+
maxTokens: config.maxTokens || 4000,
|
|
31
|
+
temperature: config.temperature || 0.3
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
this.model = this.config.model;
|
|
35
|
+
|
|
36
|
+
// GitHub Copilot CLI uses GitHub authentication, not a separate API key
|
|
37
|
+
// The GITHUB_TOKEN is used for authentication with GitHub, not OpenAI
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async analyze(content: string, prompt: string): Promise<LLMResponse> {
|
|
41
|
+
const startTime = Date.now();
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
// Combine system prompt and user prompt for CLI
|
|
45
|
+
const fullPrompt = `${SYSTEM_PROMPT}\n\n${prompt}`;
|
|
46
|
+
|
|
47
|
+
// Escape the prompt for shell safety (basic escaping)
|
|
48
|
+
const escapedPrompt = fullPrompt.replace(/"/g, '\\"').replace(/\$/g, '\\$');
|
|
49
|
+
|
|
50
|
+
// Use gh copilot suggest command to get AI response
|
|
51
|
+
// The --yes flag auto-accepts the suggestion, --shell-out returns raw output
|
|
52
|
+
const command = `echo "${escapedPrompt}" | gh copilot suggest --yes 2>&1`;
|
|
53
|
+
|
|
54
|
+
const { stdout, stderr } = await execAsync(command, {
|
|
55
|
+
maxBuffer: 10 * 1024 * 1024, // 10MB buffer for large responses
|
|
56
|
+
timeout: 60000 // 60 second timeout
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
const latencyMs = Date.now() - startTime;
|
|
60
|
+
|
|
61
|
+
if (stderr && !stdout) {
|
|
62
|
+
throw new Error(`Copilot CLI error: ${stderr}`);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Try to parse the output as JSON
|
|
66
|
+
// Copilot CLI may return the JSON directly or wrapped in markdown
|
|
67
|
+
let content_text = stdout.trim();
|
|
68
|
+
|
|
69
|
+
// Remove markdown code blocks if present
|
|
70
|
+
if (content_text.includes('```json')) {
|
|
71
|
+
const jsonMatch = content_text.match(/```json\s*([\s\S]*?)```/);
|
|
72
|
+
if (jsonMatch && jsonMatch[1]) {
|
|
73
|
+
content_text = jsonMatch[1].trim();
|
|
74
|
+
}
|
|
75
|
+
} else if (content_text.includes('```')) {
|
|
76
|
+
const codeMatch = content_text.match(/```\s*([\s\S]*?)```/);
|
|
77
|
+
if (codeMatch && codeMatch[1]) {
|
|
78
|
+
content_text = codeMatch[1].trim();
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
let parsed: { dependencies: DetectedDependency[] };
|
|
83
|
+
|
|
84
|
+
try {
|
|
85
|
+
parsed = JSON.parse(content_text);
|
|
86
|
+
} catch (parseError) {
|
|
87
|
+
console.error('Failed to parse Copilot CLI response:', content_text, parseError);
|
|
88
|
+
// Return empty dependencies if parsing fails
|
|
89
|
+
parsed = { dependencies: [] };
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Estimate token usage (rough approximation since CLI doesn't provide this)
|
|
93
|
+
const estimatedTokens = Math.ceil(fullPrompt.length / 4) + Math.ceil(content_text.length / 4);
|
|
94
|
+
|
|
95
|
+
const usage: LLMUsageMetadata = {
|
|
96
|
+
promptTokens: Math.ceil(fullPrompt.length / 4),
|
|
97
|
+
completionTokens: Math.ceil(content_text.length / 4),
|
|
98
|
+
totalTokens: estimatedTokens,
|
|
99
|
+
model: this.model,
|
|
100
|
+
latencyMs
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
return {
|
|
104
|
+
dependencies: parsed.dependencies || [],
|
|
105
|
+
usage,
|
|
106
|
+
rawResponse: content_text
|
|
107
|
+
};
|
|
108
|
+
} catch (error) {
|
|
109
|
+
const latencyMs = Date.now() - startTime;
|
|
110
|
+
console.error('Copilot CLI analysis failed:', error);
|
|
111
|
+
|
|
112
|
+
// Return empty result on error
|
|
113
|
+
return {
|
|
114
|
+
dependencies: [],
|
|
115
|
+
usage: {
|
|
116
|
+
promptTokens: 0,
|
|
117
|
+
completionTokens: 0,
|
|
118
|
+
totalTokens: 0,
|
|
119
|
+
model: this.model,
|
|
120
|
+
latencyMs
|
|
121
|
+
},
|
|
122
|
+
rawResponse: error instanceof Error ? error.message : 'Unknown error'
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
getSupportedModels(): string[] {
|
|
128
|
+
// Copilot CLI uses GitHub's models, not directly specified
|
|
129
|
+
return ['github-copilot', 'gpt-4', 'gpt-4-turbo'];
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
async getRateLimit(): Promise<RateLimitInfo> {
|
|
133
|
+
// Copilot CLI doesn't expose rate limits directly
|
|
134
|
+
// Rate limiting is handled by GitHub's infrastructure
|
|
135
|
+
return {
|
|
136
|
+
remaining: -1, // Unknown
|
|
137
|
+
limit: -1, // Unknown
|
|
138
|
+
resetAt: new Date(0) // Unknown
|
|
139
|
+
};
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
validateConfig(): boolean {
|
|
143
|
+
// For CLI approach, we just need gh CLI to be available
|
|
144
|
+
// Authentication is handled by GitHub CLI itself
|
|
145
|
+
return true;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Detection prompts for LLM-based dependency analysis
|
|
3
|
+
* Optimized for identifying external informational dependencies
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export const SYSTEM_PROMPT = `You are an expert at analyzing code repositories to identify external informational dependencies.
|
|
7
|
+
|
|
8
|
+
Your task is to identify external resources that developers reference but are NOT tracked by package managers (npm, PyPI, Cargo, etc.).
|
|
9
|
+
|
|
10
|
+
INCLUDE these types of dependencies:
|
|
11
|
+
- GitHub repositories referenced but not declared in package files
|
|
12
|
+
- Documentation sites and API references
|
|
13
|
+
- OpenAPI/GraphQL schemas
|
|
14
|
+
- Research papers and arXiv preprints
|
|
15
|
+
- Reference implementations and code examples
|
|
16
|
+
- Technical specifications and RFCs
|
|
17
|
+
|
|
18
|
+
EXCLUDE these (handled by dependabot):
|
|
19
|
+
- NPM packages in package.json
|
|
20
|
+
- Python packages in requirements.txt
|
|
21
|
+
- Rust crates in Cargo.toml
|
|
22
|
+
- Docker images in Dockerfile
|
|
23
|
+
- Any declared package manager dependencies
|
|
24
|
+
|
|
25
|
+
For each dependency found, provide:
|
|
26
|
+
1. url: The complete URL
|
|
27
|
+
2. name: A descriptive name
|
|
28
|
+
3. description: What this dependency is used for
|
|
29
|
+
4. type: One of [reference-implementation, schema, documentation, research-paper, api-example, other]
|
|
30
|
+
5. confidence: A score from 0.0 to 1.0 indicating detection confidence
|
|
31
|
+
6. reasoning: Brief explanation of why this is a dependency
|
|
32
|
+
|
|
33
|
+
Return ONLY valid JSON in this format:
|
|
34
|
+
{
|
|
35
|
+
"dependencies": [
|
|
36
|
+
{
|
|
37
|
+
"url": "https://example.com/resource",
|
|
38
|
+
"name": "Resource Name",
|
|
39
|
+
"description": "Purpose in the project",
|
|
40
|
+
"type": "documentation",
|
|
41
|
+
"confidence": 0.95,
|
|
42
|
+
"reasoning": "Referenced in README as API documentation"
|
|
43
|
+
}
|
|
44
|
+
]
|
|
45
|
+
}`;
|
|
46
|
+
|
|
47
|
+
export const DETECTION_PROMPT_TEMPLATE = `Analyze the following content from a code repository and identify external informational dependencies:
|
|
48
|
+
|
|
49
|
+
## Content Type: {contentType}
|
|
50
|
+
## File Path: {filePath}
|
|
51
|
+
|
|
52
|
+
## Content:
|
|
53
|
+
{content}
|
|
54
|
+
|
|
55
|
+
Remember:
|
|
56
|
+
- Focus on external resources NOT in package managers
|
|
57
|
+
- Provide confidence scores based on clarity of references
|
|
58
|
+
- Include context about how each dependency is used
|
|
59
|
+
- Return valid JSON only
|
|
60
|
+
|
|
61
|
+
Analyze and respond:`;
|
|
62
|
+
|
|
63
|
+
export function createDetectionPrompt(
|
|
64
|
+
contentType: string,
|
|
65
|
+
filePath: string,
|
|
66
|
+
content: string
|
|
67
|
+
): string {
|
|
68
|
+
return DETECTION_PROMPT_TEMPLATE.replace('{contentType}', contentType)
|
|
69
|
+
.replace('{filePath}', filePath)
|
|
70
|
+
.replace('{content}', content);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export const CLASSIFICATION_PROMPT_TEMPLATE = `Given this URL, classify its dependency type and suggest the best access method:
|
|
74
|
+
|
|
75
|
+
URL: {url}
|
|
76
|
+
Context: {context}
|
|
77
|
+
|
|
78
|
+
Classify as one of:
|
|
79
|
+
- reference-implementation: Example code demonstrating usage
|
|
80
|
+
- schema: OpenAPI, JSON Schema, GraphQL, Protocol Buffers
|
|
81
|
+
- documentation: API docs, tutorials, guides
|
|
82
|
+
- research-paper: Academic papers, arXiv preprints
|
|
83
|
+
- api-example: Code snippets from documentation
|
|
84
|
+
- other: If none of the above fit
|
|
85
|
+
|
|
86
|
+
Also determine the best access method:
|
|
87
|
+
- github-api: For GitHub repositories
|
|
88
|
+
- arxiv: For arXiv papers
|
|
89
|
+
- openapi: For OpenAPI specifications
|
|
90
|
+
- http: For generic web content
|
|
91
|
+
|
|
92
|
+
Return JSON:
|
|
93
|
+
{
|
|
94
|
+
"type": "documentation",
|
|
95
|
+
"accessMethod": "http",
|
|
96
|
+
"confidence": 0.9,
|
|
97
|
+
"reasoning": "URL structure suggests API documentation"
|
|
98
|
+
}`;
|
|
99
|
+
|
|
100
|
+
export function createClassificationPrompt(url: string, context: string): string {
|
|
101
|
+
return CLASSIFICATION_PROMPT_TEMPLATE.replace('{url}', url).replace('{context}', context);
|
|
102
|
+
}
|