@inkeep/agents-work-apps 0.0.0-dev-20260203033642
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +49 -0
- package/dist/db/index.d.ts +2 -0
- package/dist/db/index.js +3 -0
- package/dist/db/runDbClient.d.ts +6 -0
- package/dist/db/runDbClient.js +9 -0
- package/dist/env.d.ts +47 -0
- package/dist/env.js +48 -0
- package/dist/github/config.d.ts +22 -0
- package/dist/github/config.js +79 -0
- package/dist/github/index.d.ts +13 -0
- package/dist/github/index.js +23 -0
- package/dist/github/installation.d.ts +66 -0
- package/dist/github/installation.js +293 -0
- package/dist/github/jwks.d.ts +20 -0
- package/dist/github/jwks.js +85 -0
- package/dist/github/mcp/auth.d.ts +10 -0
- package/dist/github/mcp/auth.js +43 -0
- package/dist/github/mcp/index.d.ts +11 -0
- package/dist/github/mcp/index.js +670 -0
- package/dist/github/mcp/schemas.d.ts +87 -0
- package/dist/github/mcp/schemas.js +69 -0
- package/dist/github/mcp/utils.d.ts +228 -0
- package/dist/github/mcp/utils.js +464 -0
- package/dist/github/oidcToken.d.ts +22 -0
- package/dist/github/oidcToken.js +140 -0
- package/dist/github/routes/setup.d.ts +7 -0
- package/dist/github/routes/setup.js +217 -0
- package/dist/github/routes/tokenExchange.d.ts +7 -0
- package/dist/github/routes/tokenExchange.js +233 -0
- package/dist/github/routes/webhooks.d.ts +12 -0
- package/dist/github/routes/webhooks.js +278 -0
- package/dist/logger.d.ts +2 -0
- package/dist/logger.js +3 -0
- package/package.json +65 -0
|
@@ -0,0 +1,464 @@
|
|
|
1
|
+
import { env } from "../../env.js";
|
|
2
|
+
import { getLogger } from "../../logger.js";
|
|
3
|
+
import { createAppAuth } from "@octokit/auth-app";
|
|
4
|
+
import { Octokit } from "@octokit/rest";
|
|
5
|
+
import { minimatch } from "minimatch";
|
|
6
|
+
|
|
7
|
+
//#region src/github/mcp/utils.ts
|
|
8
|
+
const logger = getLogger("github-mcp-utils");
|
|
9
|
+
function getGitHubClientFromRepo(owner, repo, installationIdMap) {
|
|
10
|
+
const repoFullName = `${owner}/${repo}`;
|
|
11
|
+
const installationId = installationIdMap.get(repoFullName);
|
|
12
|
+
if (!installationId) {
|
|
13
|
+
logger.error({
|
|
14
|
+
owner,
|
|
15
|
+
repo,
|
|
16
|
+
installationIdMap
|
|
17
|
+
}, "Installation ID not found for repository");
|
|
18
|
+
throw new Error(`Installation ID not found for repository ${repoFullName}`);
|
|
19
|
+
}
|
|
20
|
+
return getGitHubClientFromInstallationId(installationId);
|
|
21
|
+
}
|
|
22
|
+
function getGitHubClientFromInstallationId(installationId) {
|
|
23
|
+
if (!env.GITHUB_APP_PRIVATE_KEY) {
|
|
24
|
+
logger.error({ installationId }, "GITHUB_APP_PRIVATE_KEY is not set");
|
|
25
|
+
throw new Error("GITHUB_APP_PRIVATE_KEY is not set");
|
|
26
|
+
}
|
|
27
|
+
const privateKey = env.GITHUB_APP_PRIVATE_KEY.replace(/\\n/g, "\n");
|
|
28
|
+
logger.info({ installationId }, "Creating GitHub client for installation ID");
|
|
29
|
+
return new Octokit({
|
|
30
|
+
authStrategy: createAppAuth,
|
|
31
|
+
auth: {
|
|
32
|
+
appId: env.GITHUB_APP_ID,
|
|
33
|
+
privateKey,
|
|
34
|
+
installationId
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
function mapUser(user) {
|
|
39
|
+
return {
|
|
40
|
+
login: user.login,
|
|
41
|
+
id: user.id,
|
|
42
|
+
avatarUrl: user.avatar_url,
|
|
43
|
+
url: user.html_url
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Fetch pull request details
|
|
48
|
+
*/
|
|
49
|
+
async function fetchPrInfo(octokit, owner, repo, prNumber) {
|
|
50
|
+
logger.info({
|
|
51
|
+
owner,
|
|
52
|
+
repo,
|
|
53
|
+
prNumber
|
|
54
|
+
}, `Fetching PR #${prNumber} details`);
|
|
55
|
+
const { data: pr } = await octokit.rest.pulls.get({
|
|
56
|
+
owner,
|
|
57
|
+
repo,
|
|
58
|
+
pull_number: prNumber
|
|
59
|
+
});
|
|
60
|
+
return {
|
|
61
|
+
number: pr.number,
|
|
62
|
+
title: pr.title,
|
|
63
|
+
body: pr.body,
|
|
64
|
+
author: mapUser(pr.user),
|
|
65
|
+
url: pr.html_url,
|
|
66
|
+
state: pr.state,
|
|
67
|
+
base: {
|
|
68
|
+
ref: pr.base.ref,
|
|
69
|
+
sha: pr.base.sha
|
|
70
|
+
},
|
|
71
|
+
head: {
|
|
72
|
+
ref: pr.head.ref,
|
|
73
|
+
sha: pr.head.sha
|
|
74
|
+
},
|
|
75
|
+
createdAt: pr.created_at,
|
|
76
|
+
updatedAt: pr.updated_at
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Fetch all commits in a pull request.
|
|
81
|
+
*/
|
|
82
|
+
async function fetchPrCommits(octokit, owner, repo, prNumber) {
|
|
83
|
+
try {
|
|
84
|
+
const commits = [];
|
|
85
|
+
let page = 1;
|
|
86
|
+
const perPage = 100;
|
|
87
|
+
while (true) {
|
|
88
|
+
const response = await octokit.rest.pulls.listCommits({
|
|
89
|
+
owner,
|
|
90
|
+
repo,
|
|
91
|
+
pull_number: prNumber,
|
|
92
|
+
per_page: perPage,
|
|
93
|
+
page
|
|
94
|
+
});
|
|
95
|
+
commits.push(...response.data);
|
|
96
|
+
if (response.data.length < perPage) break;
|
|
97
|
+
page++;
|
|
98
|
+
}
|
|
99
|
+
return commits;
|
|
100
|
+
} catch (error) {
|
|
101
|
+
throw new Error(`Failed to get PR commits: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Fetch detailed information about a specific commit including its diff.
|
|
106
|
+
*/
|
|
107
|
+
async function fetchCommitDetails(octokit, owner, repo, commitSha) {
|
|
108
|
+
try {
|
|
109
|
+
return (await octokit.rest.repos.getCommit({
|
|
110
|
+
owner,
|
|
111
|
+
repo,
|
|
112
|
+
ref: commitSha
|
|
113
|
+
})).data;
|
|
114
|
+
} catch (error) {
|
|
115
|
+
throw new Error(`Failed to get commit details: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Fetch changed files with optional path filtering and content fetching
|
|
120
|
+
*/
|
|
121
|
+
async function fetchPrFiles(octokit, owner, repo, prNumber, pathFilters = [], includeContents = false, includePatch = false) {
|
|
122
|
+
logger.info({
|
|
123
|
+
owner,
|
|
124
|
+
repo,
|
|
125
|
+
prNumber,
|
|
126
|
+
pathFilters,
|
|
127
|
+
includeContents,
|
|
128
|
+
includePatch
|
|
129
|
+
}, `Fetching PR #${prNumber} changed files`);
|
|
130
|
+
const files = [];
|
|
131
|
+
const headSha = (await fetchPrInfo(octokit, owner, repo, prNumber)).head.sha;
|
|
132
|
+
for await (const response of octokit.paginate.iterator(octokit.rest.pulls.listFiles, {
|
|
133
|
+
owner,
|
|
134
|
+
repo,
|
|
135
|
+
pull_number: prNumber,
|
|
136
|
+
per_page: 100
|
|
137
|
+
})) for (const file of response.data) {
|
|
138
|
+
if (pathFilters.length > 0 && !pathFilters.some((filter) => minimatch(file.filename, filter))) continue;
|
|
139
|
+
const changedFile = {
|
|
140
|
+
commit_messages: [],
|
|
141
|
+
path: file.filename,
|
|
142
|
+
status: file.status,
|
|
143
|
+
additions: file.additions,
|
|
144
|
+
deletions: file.deletions,
|
|
145
|
+
patch: includePatch ? file.patch : void 0,
|
|
146
|
+
previousPath: file.previous_filename
|
|
147
|
+
};
|
|
148
|
+
if (includeContents && file.status !== "removed") try {
|
|
149
|
+
const { data: content } = await octokit.rest.repos.getContent({
|
|
150
|
+
owner,
|
|
151
|
+
repo,
|
|
152
|
+
path: file.filename,
|
|
153
|
+
ref: headSha
|
|
154
|
+
});
|
|
155
|
+
if ("content" in content && content.encoding === "base64") changedFile.contents = Buffer.from(content.content, "base64").toString("utf-8");
|
|
156
|
+
} catch (error) {
|
|
157
|
+
logger.warn({
|
|
158
|
+
owner,
|
|
159
|
+
repo,
|
|
160
|
+
prNumber,
|
|
161
|
+
headSha,
|
|
162
|
+
file
|
|
163
|
+
}, `Failed to fetch contents for ${file.filename}: ${error}`);
|
|
164
|
+
}
|
|
165
|
+
files.push(changedFile);
|
|
166
|
+
}
|
|
167
|
+
logger.info({
|
|
168
|
+
owner,
|
|
169
|
+
repo,
|
|
170
|
+
prNumber,
|
|
171
|
+
headSha,
|
|
172
|
+
pathFilters,
|
|
173
|
+
includeContents,
|
|
174
|
+
files
|
|
175
|
+
}, `Found ${files.length} changed files${pathFilters.length > 0 ? ` matching "${pathFilters.join(", ")}"` : ""}`);
|
|
176
|
+
return files;
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Get file-based diffs with all commit messages that impacted each file.
|
|
180
|
+
*/
|
|
181
|
+
async function fetchPrFileDiffs(octokit, owner, repo, prNumber) {
|
|
182
|
+
try {
|
|
183
|
+
const commits = await fetchPrCommits(octokit, owner, repo, prNumber);
|
|
184
|
+
const prFiles = await fetchPrFiles(octokit, owner, repo, prNumber);
|
|
185
|
+
const fileToCommits = {};
|
|
186
|
+
for (const commit of commits) {
|
|
187
|
+
const commitSha = commit.sha;
|
|
188
|
+
const commitDetails = await fetchCommitDetails(octokit, owner, repo, commitSha);
|
|
189
|
+
const commitMessage = commit.commit.message;
|
|
190
|
+
for (const fileInfo of commitDetails.files || []) {
|
|
191
|
+
const filename = fileInfo.filename;
|
|
192
|
+
if (!fileToCommits[filename]) fileToCommits[filename] = [];
|
|
193
|
+
fileToCommits[filename].push({
|
|
194
|
+
commit_sha: commitSha,
|
|
195
|
+
commit_message: commitMessage,
|
|
196
|
+
file_info: fileInfo
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
const fileDiffs = [];
|
|
201
|
+
for (const prFile of prFiles) {
|
|
202
|
+
const filename = prFile.path;
|
|
203
|
+
if (filename in fileToCommits) {
|
|
204
|
+
const commitMessages = fileToCommits[filename].map((commitData) => commitData.commit_message);
|
|
205
|
+
const diff = prFile.patch;
|
|
206
|
+
const additions = prFile.additions || 0;
|
|
207
|
+
const deletions = prFile.deletions || 0;
|
|
208
|
+
const githubFileDiff = {
|
|
209
|
+
commit_messages: commitMessages,
|
|
210
|
+
path: filename,
|
|
211
|
+
status: prFile.status,
|
|
212
|
+
additions,
|
|
213
|
+
deletions,
|
|
214
|
+
patch: diff
|
|
215
|
+
};
|
|
216
|
+
fileDiffs.push(githubFileDiff);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
return fileDiffs;
|
|
220
|
+
} catch (error) {
|
|
221
|
+
throw new Error(`Failed to get PR file diffs: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
/**
|
|
225
|
+
* Generate a markdown representation of a pull request with file diffs
|
|
226
|
+
*/
|
|
227
|
+
function generatePrMarkdown(pr, fileDiffs, owner, repo) {
|
|
228
|
+
let markdown = `# Pull Request #${pr.number}: ${pr.title}\n\n`;
|
|
229
|
+
markdown += `**Repository:** ${owner}/${repo}\n`;
|
|
230
|
+
markdown += `**State:** ${pr.state}\n`;
|
|
231
|
+
markdown += `**Author:** ${pr.author.login}\n`;
|
|
232
|
+
markdown += `**Created:** ${new Date(pr.createdAt).toLocaleDateString()}\n`;
|
|
233
|
+
markdown += `**Updated:** ${new Date(pr.updatedAt).toLocaleDateString()}\n\n`;
|
|
234
|
+
markdown += "## Branches\n";
|
|
235
|
+
markdown += `- **From:** \`${pr.head.ref}\`\n`;
|
|
236
|
+
markdown += `- **To:** \`${pr.base.ref}\` (${owner}/${repo})\n\n`;
|
|
237
|
+
markdown += `**URL:** ${pr.url}\n\n`;
|
|
238
|
+
if (pr.body) markdown += `## Description\n${pr.body}\n\n`;
|
|
239
|
+
else markdown += "## Description\n_No description provided._\n\n";
|
|
240
|
+
if (fileDiffs.length > 0) {
|
|
241
|
+
markdown += "## Files Changed\n";
|
|
242
|
+
for (const fileDiff of fileDiffs) {
|
|
243
|
+
markdown += `### ${fileDiff.path}\n`;
|
|
244
|
+
markdown += `- **Additions:** +${fileDiff.additions}\n`;
|
|
245
|
+
markdown += `- **Deletions:** -${fileDiff.deletions}\n`;
|
|
246
|
+
if (fileDiff.commit_messages.length > 0) {
|
|
247
|
+
markdown += "- **Related commits:**\n";
|
|
248
|
+
const uniqueMessages = [...new Set(fileDiff.commit_messages)];
|
|
249
|
+
for (const message of uniqueMessages) markdown += ` - ${message.split("\n")[0]}\n`;
|
|
250
|
+
}
|
|
251
|
+
markdown += "\n";
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
return markdown;
|
|
255
|
+
}
|
|
256
|
+
/**
|
|
257
|
+
* Validates that line numbers are within valid range
|
|
258
|
+
*/
|
|
259
|
+
function validateLineNumbers(startLine, endLine, totalLines) {
|
|
260
|
+
if (startLine < 1 || endLine > totalLines) throw new Error(`Line numbers out of range: ${startLine}-${endLine}`);
|
|
261
|
+
if (startLine > endLine) throw new Error(`Invalid line range: ${startLine} > ${endLine}`);
|
|
262
|
+
}
|
|
263
|
+
async function getFilePathsInRepo(githubClient, owner, repo, path = "") {
|
|
264
|
+
const filePaths = [];
|
|
265
|
+
try {
|
|
266
|
+
const response = await githubClient.rest.repos.getContent({
|
|
267
|
+
owner,
|
|
268
|
+
repo,
|
|
269
|
+
path
|
|
270
|
+
});
|
|
271
|
+
if (Array.isArray(response.data)) for (const item of response.data) {
|
|
272
|
+
if (item.path.trimStart().startsWith(".") || item.path.includes("[") || item.path.includes("]") || item.path.includes("__tests__")) continue;
|
|
273
|
+
if (item.type === "file") filePaths.push(item.path);
|
|
274
|
+
else if (item.type === "dir") {
|
|
275
|
+
console.log(`Getting files from subdirectory: ${item.path}`);
|
|
276
|
+
const subDirFiles = await getFilePathsInRepo(githubClient, owner, repo, item.path);
|
|
277
|
+
filePaths.push(...subDirFiles);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
else if (response.data.type === "file") filePaths.push(response.data.path);
|
|
281
|
+
return filePaths;
|
|
282
|
+
} catch (error) {
|
|
283
|
+
throw new Error(`Failed to get file paths from repository: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
/**
|
|
287
|
+
* Apply the operations to the document.
|
|
288
|
+
*
|
|
289
|
+
* Operations are applied in reverse order (by line number) to avoid
|
|
290
|
+
* line number shifts affecting subsequent operations.
|
|
291
|
+
*/
|
|
292
|
+
function applyOperations(fileContent, operations) {
|
|
293
|
+
if (!operations || operations.length === 0) return fileContent;
|
|
294
|
+
const lines = fileContent.split("\n");
|
|
295
|
+
const totalLines = lines.length;
|
|
296
|
+
const sortedOperations = operations.sort((a, b) => {
|
|
297
|
+
const aStart = a.lineStart || 0;
|
|
298
|
+
const aEnd = a.lineEnd || 0;
|
|
299
|
+
const bStart = b.lineStart || 0;
|
|
300
|
+
const bEnd = b.lineEnd || 0;
|
|
301
|
+
return bStart - aStart || bEnd - aEnd;
|
|
302
|
+
});
|
|
303
|
+
for (const operation of sortedOperations) try {
|
|
304
|
+
switch (operation.operation) {
|
|
305
|
+
case "replace_lines": {
|
|
306
|
+
if (!operation.lineStart || !operation.lineEnd || operation.content === void 0) throw new Error("replace_lines requires lineStart, lineEnd, and content");
|
|
307
|
+
validateLineNumbers(operation.lineStart, operation.lineEnd, totalLines);
|
|
308
|
+
const startIdx = operation.lineStart - 1;
|
|
309
|
+
const endIdx = operation.lineEnd;
|
|
310
|
+
const newLines = operation.content.split("\n");
|
|
311
|
+
lines.splice(startIdx, endIdx - startIdx, ...newLines);
|
|
312
|
+
break;
|
|
313
|
+
}
|
|
314
|
+
case "insert_after": {
|
|
315
|
+
if (!operation.lineStart || operation.content === void 0) throw new Error("insert_after requires lineStart and content");
|
|
316
|
+
if (operation.lineStart < 1 || operation.lineStart > totalLines) throw new Error(`Line number out of range: ${operation.lineStart}`);
|
|
317
|
+
const insertIdx = operation.lineStart;
|
|
318
|
+
const newLines = operation.content.split("\n");
|
|
319
|
+
lines.splice(insertIdx, 0, ...newLines);
|
|
320
|
+
break;
|
|
321
|
+
}
|
|
322
|
+
case "insert_before": {
|
|
323
|
+
if (!operation.lineStart || operation.content === void 0) throw new Error("insert_before requires lineStart and content");
|
|
324
|
+
if (operation.lineStart < 1 || operation.lineStart > totalLines) throw new Error(`Line number out of range: ${operation.lineStart}`);
|
|
325
|
+
const insertIdx = operation.lineStart - 1;
|
|
326
|
+
const newLines = operation.content.split("\n");
|
|
327
|
+
lines.splice(insertIdx, 0, ...newLines);
|
|
328
|
+
break;
|
|
329
|
+
}
|
|
330
|
+
case "delete_lines": {
|
|
331
|
+
if (!operation.lineStart || !operation.lineEnd) throw new Error("delete_lines requires lineStart and lineEnd");
|
|
332
|
+
validateLineNumbers(operation.lineStart, operation.lineEnd, totalLines);
|
|
333
|
+
const startIdx = operation.lineStart - 1;
|
|
334
|
+
const deleteCount = operation.lineEnd - operation.lineStart + 1;
|
|
335
|
+
lines.splice(startIdx, deleteCount);
|
|
336
|
+
break;
|
|
337
|
+
}
|
|
338
|
+
default: throw new Error(`Unknown operation: ${operation.operation}`);
|
|
339
|
+
}
|
|
340
|
+
} catch (error) {
|
|
341
|
+
console.error(`Error applying operation ${operation.operation}: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
342
|
+
}
|
|
343
|
+
return lines.join("\n");
|
|
344
|
+
}
|
|
345
|
+
/**
|
|
346
|
+
* Convenience function to apply a single operation to file content
|
|
347
|
+
*/
|
|
348
|
+
function applyOperation(fileContent, operation) {
|
|
349
|
+
return applyOperations(fileContent, [operation]);
|
|
350
|
+
}
|
|
351
|
+
/**
|
|
352
|
+
* Apply a list of operations to a piece of documentation and return a mapping of line number to line content.
|
|
353
|
+
* This function is useful for visualizing what the operations would do before applying them.
|
|
354
|
+
*
|
|
355
|
+
* @param fileContent - The original content of the file
|
|
356
|
+
* @param operations - A list of operations to apply to the file
|
|
357
|
+
* @returns A mapping of line number to line content, or an error message string if operations fail
|
|
358
|
+
*/
|
|
359
|
+
function visualizeUpdateOperations(fileContent, operations) {
|
|
360
|
+
try {
|
|
361
|
+
const lines = applyOperations(fileContent, operations).split("\n");
|
|
362
|
+
const outputMapping = {};
|
|
363
|
+
for (let i = 0; i < lines.length; i++) outputMapping[i + 1] = lines[i];
|
|
364
|
+
return outputMapping;
|
|
365
|
+
} catch (error) {
|
|
366
|
+
return `Error applying operations: ${error instanceof Error ? error.message : "Unknown error"}`;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
async function commitContent({ githubClient, owner, repo, filePath, branchName, content, commitMessage }) {
|
|
370
|
+
const currentSha = (await githubClient.rest.git.getRef({
|
|
371
|
+
owner,
|
|
372
|
+
repo,
|
|
373
|
+
ref: `heads/${branchName}`
|
|
374
|
+
})).data.object.sha;
|
|
375
|
+
const currentTreeSha = (await githubClient.rest.git.getCommit({
|
|
376
|
+
owner,
|
|
377
|
+
repo,
|
|
378
|
+
commit_sha: currentSha
|
|
379
|
+
})).data.tree.sha;
|
|
380
|
+
const blob = await githubClient.rest.git.createBlob({
|
|
381
|
+
owner,
|
|
382
|
+
repo,
|
|
383
|
+
content: Buffer.from(content).toString("base64"),
|
|
384
|
+
encoding: "base64"
|
|
385
|
+
});
|
|
386
|
+
const newTree = await githubClient.rest.git.createTree({
|
|
387
|
+
owner,
|
|
388
|
+
repo,
|
|
389
|
+
base_tree: currentTreeSha,
|
|
390
|
+
tree: [{
|
|
391
|
+
path: filePath,
|
|
392
|
+
mode: "100644",
|
|
393
|
+
type: "blob",
|
|
394
|
+
sha: blob.data.sha
|
|
395
|
+
}]
|
|
396
|
+
});
|
|
397
|
+
const newCommit = await githubClient.rest.git.createCommit({
|
|
398
|
+
owner,
|
|
399
|
+
repo,
|
|
400
|
+
message: commitMessage,
|
|
401
|
+
tree: newTree.data.sha,
|
|
402
|
+
parents: [currentSha]
|
|
403
|
+
});
|
|
404
|
+
await githubClient.rest.git.updateRef({
|
|
405
|
+
owner,
|
|
406
|
+
repo,
|
|
407
|
+
ref: `heads/${branchName}`,
|
|
408
|
+
sha: newCommit.data.sha
|
|
409
|
+
});
|
|
410
|
+
return newCommit.data.sha;
|
|
411
|
+
}
|
|
412
|
+
async function commitFileChanges({ githubClient, owner, repo, fileContent, filePath, branchName, operations, commitMessage }) {
|
|
413
|
+
try {
|
|
414
|
+
return await commitContent({
|
|
415
|
+
githubClient,
|
|
416
|
+
owner,
|
|
417
|
+
repo,
|
|
418
|
+
filePath,
|
|
419
|
+
branchName,
|
|
420
|
+
content: applyOperations(fileContent, operations),
|
|
421
|
+
commitMessage
|
|
422
|
+
});
|
|
423
|
+
} catch (error) {
|
|
424
|
+
throw new Error(`Error committing file changes: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
async function commitNewFile({ githubClient, owner, repo, filePath, branchName, content, commitMessage }) {
|
|
428
|
+
try {
|
|
429
|
+
return await commitContent({
|
|
430
|
+
githubClient,
|
|
431
|
+
owner,
|
|
432
|
+
repo,
|
|
433
|
+
filePath,
|
|
434
|
+
branchName,
|
|
435
|
+
content,
|
|
436
|
+
commitMessage
|
|
437
|
+
});
|
|
438
|
+
} catch (error) {
|
|
439
|
+
throw new Error(`Failed to commit new file: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
async function formatFileDiff(pullRequestNumber, files, includeContents = false) {
|
|
443
|
+
let output = `## File Patches for PR #${pullRequestNumber}\n\n`;
|
|
444
|
+
output += `Found ${files.length} file(s) matching the requested paths.\n\n`;
|
|
445
|
+
for (const file of files) {
|
|
446
|
+
output += `### ${file.path}\n`;
|
|
447
|
+
output += `**Status:** ${file.status} | **+${file.additions}** / **-${file.deletions}**\n\n`;
|
|
448
|
+
if (file.patch) {
|
|
449
|
+
output += "```diff\n";
|
|
450
|
+
output += file.patch;
|
|
451
|
+
output += "\n```\n\n";
|
|
452
|
+
} else output += "_No patch available (file may be binary or too large)_\n\n";
|
|
453
|
+
if (includeContents && file.contents) {
|
|
454
|
+
output += "<details>\n<summary>Full file contents</summary>\n\n";
|
|
455
|
+
output += "```\n";
|
|
456
|
+
output += file.contents;
|
|
457
|
+
output += "\n```\n\n</details>\n\n";
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
return output;
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
//#endregion
|
|
464
|
+
export { applyOperation, applyOperations, commitFileChanges, commitNewFile, fetchCommitDetails, fetchPrCommits, fetchPrFileDiffs, fetchPrFiles, fetchPrInfo, formatFileDiff, generatePrMarkdown, getFilePathsInRepo, getGitHubClientFromInstallationId, getGitHubClientFromRepo, validateLineNumbers, visualizeUpdateOperations };
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
//#region src/github/oidcToken.d.ts
|
|
2
|
+
interface GitHubOidcClaims {
|
|
3
|
+
repository: string;
|
|
4
|
+
repository_owner: string;
|
|
5
|
+
repository_id: string;
|
|
6
|
+
workflow: string;
|
|
7
|
+
actor: string;
|
|
8
|
+
ref: string;
|
|
9
|
+
}
|
|
10
|
+
interface ValidateTokenResult {
|
|
11
|
+
success: true;
|
|
12
|
+
claims: GitHubOidcClaims;
|
|
13
|
+
}
|
|
14
|
+
interface ValidateTokenError {
|
|
15
|
+
success: false;
|
|
16
|
+
errorType: 'invalid_signature' | 'expired' | 'wrong_issuer' | 'wrong_audience' | 'malformed' | 'jwks_error';
|
|
17
|
+
message: string;
|
|
18
|
+
}
|
|
19
|
+
type ValidateOidcTokenResult = ValidateTokenResult | ValidateTokenError;
|
|
20
|
+
declare function validateOidcToken(token: string): Promise<ValidateOidcTokenResult>;
|
|
21
|
+
//#endregion
|
|
22
|
+
export { GitHubOidcClaims, ValidateOidcTokenResult, ValidateTokenError, ValidateTokenResult, validateOidcToken };
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import { getLogger } from "../logger.js";
|
|
2
|
+
import { getJwkForToken } from "./jwks.js";
|
|
3
|
+
import { decodeProtectedHeader, errors, jwtVerify } from "jose";
|
|
4
|
+
|
|
5
|
+
//#region src/github/oidcToken.ts
|
|
6
|
+
const logger = getLogger("github-oidc-token");
|
|
7
|
+
const GITHUB_OIDC_ISSUER = "https://token.actions.githubusercontent.com";
|
|
8
|
+
const EXPECTED_AUDIENCE = "inkeep-agents-action";
|
|
9
|
+
async function validateOidcToken(token) {
|
|
10
|
+
let header;
|
|
11
|
+
try {
|
|
12
|
+
header = decodeProtectedHeader(token);
|
|
13
|
+
} catch (error) {
|
|
14
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
15
|
+
logger.warn({ error: message }, "Failed to decode JWT header");
|
|
16
|
+
return {
|
|
17
|
+
success: false,
|
|
18
|
+
errorType: "malformed",
|
|
19
|
+
message: "Invalid JWT format: unable to decode token header"
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
if (header.alg !== "RS256") {
|
|
23
|
+
logger.warn({ algorithm: header.alg }, "Unexpected JWT algorithm");
|
|
24
|
+
return {
|
|
25
|
+
success: false,
|
|
26
|
+
errorType: "malformed",
|
|
27
|
+
message: `Invalid JWT algorithm: expected RS256, got ${header.alg}`
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
const jwkResult = await getJwkForToken(header);
|
|
31
|
+
if (!jwkResult.success) {
|
|
32
|
+
logger.error({ error: jwkResult.error }, "Failed to get JWK for token");
|
|
33
|
+
return {
|
|
34
|
+
success: false,
|
|
35
|
+
errorType: "jwks_error",
|
|
36
|
+
message: jwkResult.error
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
const { payload } = await jwtVerify(token, jwkResult.key, {
|
|
41
|
+
issuer: GITHUB_OIDC_ISSUER,
|
|
42
|
+
audience: EXPECTED_AUDIENCE
|
|
43
|
+
});
|
|
44
|
+
const repository = payload.repository;
|
|
45
|
+
const repositoryOwner = payload.repository_owner;
|
|
46
|
+
const repositoryId = payload.repository_id;
|
|
47
|
+
const workflow = payload.workflow;
|
|
48
|
+
const actor = payload.actor;
|
|
49
|
+
const ref = payload.ref;
|
|
50
|
+
if (typeof repository !== "string" || typeof repositoryOwner !== "string" || typeof repositoryId !== "string" || typeof workflow !== "string" || typeof actor !== "string" || typeof ref !== "string") {
|
|
51
|
+
logger.warn({ payload }, "OIDC token missing required claims");
|
|
52
|
+
return {
|
|
53
|
+
success: false,
|
|
54
|
+
errorType: "malformed",
|
|
55
|
+
message: "OIDC token missing required claims: repository, repository_owner, repository_id, workflow, actor, or ref"
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
logger.info({
|
|
59
|
+
repository,
|
|
60
|
+
actor
|
|
61
|
+
}, "Successfully validated OIDC token");
|
|
62
|
+
return {
|
|
63
|
+
success: true,
|
|
64
|
+
claims: {
|
|
65
|
+
repository,
|
|
66
|
+
repository_owner: repositoryOwner,
|
|
67
|
+
repository_id: repositoryId,
|
|
68
|
+
workflow,
|
|
69
|
+
actor,
|
|
70
|
+
ref
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
} catch (error) {
|
|
74
|
+
if (error instanceof errors.JWTExpired) {
|
|
75
|
+
logger.warn({}, "OIDC token has expired");
|
|
76
|
+
return {
|
|
77
|
+
success: false,
|
|
78
|
+
errorType: "expired",
|
|
79
|
+
message: "OIDC token has expired"
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
if (error instanceof errors.JWTClaimValidationFailed) {
|
|
83
|
+
const claimError = error;
|
|
84
|
+
if (claimError.claim === "iss") {
|
|
85
|
+
logger.warn({ issuer: claimError.reason }, "Invalid OIDC token issuer");
|
|
86
|
+
return {
|
|
87
|
+
success: false,
|
|
88
|
+
errorType: "wrong_issuer",
|
|
89
|
+
message: `Invalid token issuer: expected ${GITHUB_OIDC_ISSUER}`
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
if (claimError.claim === "aud") {
|
|
93
|
+
logger.warn({ audience: claimError.reason }, "Invalid OIDC token audience");
|
|
94
|
+
return {
|
|
95
|
+
success: false,
|
|
96
|
+
errorType: "wrong_audience",
|
|
97
|
+
message: `Invalid token audience: expected ${EXPECTED_AUDIENCE}`
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
logger.warn({
|
|
101
|
+
claim: claimError.claim,
|
|
102
|
+
reason: claimError.reason
|
|
103
|
+
}, "JWT claim validation failed");
|
|
104
|
+
return {
|
|
105
|
+
success: false,
|
|
106
|
+
errorType: "malformed",
|
|
107
|
+
message: `JWT claim validation failed: ${claimError.claim} ${claimError.reason}`
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
if (error instanceof errors.JWSSignatureVerificationFailed) {
|
|
111
|
+
logger.warn({}, "Invalid OIDC token signature");
|
|
112
|
+
return {
|
|
113
|
+
success: false,
|
|
114
|
+
errorType: "invalid_signature",
|
|
115
|
+
message: "Invalid token signature"
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
if (error instanceof errors.JOSEError) {
|
|
119
|
+
logger.error({
|
|
120
|
+
error: error.message,
|
|
121
|
+
code: error.code
|
|
122
|
+
}, "JOSE error during token validation");
|
|
123
|
+
return {
|
|
124
|
+
success: false,
|
|
125
|
+
errorType: "malformed",
|
|
126
|
+
message: `Token validation error: ${error.message}`
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
130
|
+
logger.error({ error: message }, "Unexpected error during token validation");
|
|
131
|
+
return {
|
|
132
|
+
success: false,
|
|
133
|
+
errorType: "malformed",
|
|
134
|
+
message: `Token validation error: ${message}`
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
//#endregion
|
|
140
|
+
export { validateOidcToken };
|