@0xbigboss/gh-pulse-core 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache.cjs +312 -0
- package/dist/cache.d.cts +58 -0
- package/dist/cache.d.ts +58 -0
- package/dist/cache.js +303 -0
- package/dist/config.cjs +182 -0
- package/dist/config.d.cts +248 -0
- package/dist/config.d.ts +248 -0
- package/dist/config.js +172 -0
- package/dist/filters.cjs +16 -0
- package/dist/filters.d.cts +3 -0
- package/dist/filters.d.ts +3 -0
- package/dist/filters.js +12 -0
- package/dist/github.cjs +240 -0
- package/dist/github.d.cts +46 -0
- package/dist/github.d.ts +46 -0
- package/dist/github.js +235 -0
- package/dist/index.cjs +28 -0
- package/dist/index.d.cts +11 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.js +11 -0
- package/dist/reports/context.cjs +8 -0
- package/dist/reports/context.d.cts +7 -0
- package/dist/reports/context.d.ts +7 -0
- package/dist/reports/context.js +5 -0
- package/dist/reports/exec.cjs +160 -0
- package/dist/reports/exec.d.cts +6 -0
- package/dist/reports/exec.d.ts +6 -0
- package/dist/reports/exec.js +157 -0
- package/dist/reports/index.cjs +21 -0
- package/dist/reports/index.d.cts +5 -0
- package/dist/reports/index.d.ts +5 -0
- package/dist/reports/index.js +5 -0
- package/dist/reports/meta.cjs +15 -0
- package/dist/reports/meta.d.cts +12 -0
- package/dist/reports/meta.d.ts +12 -0
- package/dist/reports/meta.js +12 -0
- package/dist/reports/personal.cjs +90 -0
- package/dist/reports/personal.d.cts +8 -0
- package/dist/reports/personal.d.ts +8 -0
- package/dist/reports/personal.js +87 -0
- package/dist/reports/team.cjs +127 -0
- package/dist/reports/team.d.cts +6 -0
- package/dist/reports/team.d.ts +6 -0
- package/dist/reports/team.js +124 -0
- package/dist/reports/types.cjs +2 -0
- package/dist/reports/types.d.cts +144 -0
- package/dist/reports/types.d.ts +144 -0
- package/dist/reports/types.js +1 -0
- package/dist/reports/utils.cjs +71 -0
- package/dist/reports/utils.d.cts +6 -0
- package/dist/reports/utils.d.ts +6 -0
- package/dist/reports/utils.js +65 -0
- package/dist/repos.cjs +102 -0
- package/dist/repos.d.cts +12 -0
- package/dist/repos.d.ts +12 -0
- package/dist/repos.js +96 -0
- package/dist/sync.cjs +360 -0
- package/dist/sync.d.cts +24 -0
- package/dist/sync.d.ts +24 -0
- package/dist/sync.js +357 -0
- package/dist/team.cjs +45 -0
- package/dist/team.d.cts +10 -0
- package/dist/team.d.ts +10 -0
- package/dist/team.js +42 -0
- package/dist/time.cjs +153 -0
- package/dist/time.d.cts +13 -0
- package/dist/time.d.ts +13 -0
- package/dist/time.js +145 -0
- package/dist/types.cjs +2 -0
- package/dist/types.d.cts +133 -0
- package/dist/types.d.ts +133 -0
- package/dist/types.js +1 -0
- package/package.json +29 -0
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { diffInDays } from "../time.js";
|
|
2
|
+
export function buildPullRequestSummary(pr, now) {
|
|
3
|
+
return {
|
|
4
|
+
repo: pr.repo,
|
|
5
|
+
number: pr.number,
|
|
6
|
+
title: pr.title,
|
|
7
|
+
author: pr.author,
|
|
8
|
+
state: pr.state,
|
|
9
|
+
draft: pr.draft,
|
|
10
|
+
updated_at: pr.updated_at,
|
|
11
|
+
created_at: pr.created_at,
|
|
12
|
+
age_days: diffInDays(pr.created_at, now),
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
export function computeCycleTime(pr, events) {
|
|
16
|
+
if (pr.merged_at === null) {
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
const openedAt = minTimestamp(events.filter((event) => event.type === 'pr_opened').map((e) => e.opened_at));
|
|
20
|
+
const readyAt = minTimestamp(events.filter((event) => event.type === 'pr_ready').map((e) => e.ready_at));
|
|
21
|
+
const firstReviewAt = minTimestamp(events.filter((event) => event.type === 'review_submitted').map((event) => event.submitted_at));
|
|
22
|
+
const openTime = openedAt ?? pr.created_at;
|
|
23
|
+
const readyTime = readyAt ?? openTime;
|
|
24
|
+
const reviewStart = firstReviewAt ?? null;
|
|
25
|
+
const draftTime = readyAt ? readyTime - openTime : null;
|
|
26
|
+
const reviewTime = reviewStart ? reviewStart - readyTime : 0;
|
|
27
|
+
const mergeTime = reviewStart ? pr.merged_at - reviewStart : 0;
|
|
28
|
+
const totalTime = pr.merged_at - openTime;
|
|
29
|
+
return {
|
|
30
|
+
draft_time: draftTime,
|
|
31
|
+
review_time: reviewTime,
|
|
32
|
+
merge_time: mergeTime,
|
|
33
|
+
total_time: totalTime,
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
export function computeReviewEngagement(pr, events) {
|
|
37
|
+
const comments = events.filter((event) => event.type === 'comment_added');
|
|
38
|
+
const reviews = events.filter((event) => event.type === 'review_submitted');
|
|
39
|
+
if (comments.length === 0 && reviews.length === 0) {
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
const firstComment = minTimestamp(comments.map((event) => event.commented_at));
|
|
43
|
+
const firstReview = minTimestamp(reviews.map((event) => event.submitted_at));
|
|
44
|
+
return {
|
|
45
|
+
time_to_first_comment: firstComment ? firstComment - pr.created_at : null,
|
|
46
|
+
time_to_first_review: firstReview ? firstReview - pr.created_at : null,
|
|
47
|
+
review_count: reviews.length,
|
|
48
|
+
comment_count: comments.length,
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
export function percentile(values, pct) {
|
|
52
|
+
if (values.length === 0) {
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
const sorted = [...values].toSorted((a, b) => a - b);
|
|
56
|
+
const index = Math.floor((pct / 100) * (sorted.length - 1));
|
|
57
|
+
return sorted[index] ?? null;
|
|
58
|
+
}
|
|
59
|
+
function minTimestamp(values) {
|
|
60
|
+
const filtered = values.filter((value) => typeof value === 'number');
|
|
61
|
+
if (filtered.length === 0) {
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
return Math.min(...filtered);
|
|
65
|
+
}
|
package/dist/repos.cjs
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.resolveRepos = resolveRepos;
|
|
7
|
+
const picomatch_1 = __importDefault(require("picomatch"));
|
|
8
|
+
async function resolveRepos(options) {
|
|
9
|
+
const orgs = uniqueStrings([...(options.config.orgs ?? []), ...(options.orgs ?? [])]);
|
|
10
|
+
const repoPatterns = options.repoPatterns ?? [];
|
|
11
|
+
const repoSet = new Set();
|
|
12
|
+
// orgs can include GitHub orgs OR usernames - we try org first, fall back to user
|
|
13
|
+
for (const [index, org] of orgs.entries()) {
|
|
14
|
+
options.onProgress?.({
|
|
15
|
+
phase: 'resolve',
|
|
16
|
+
message: `Resolving repos for ${org}`,
|
|
17
|
+
current: index + 1,
|
|
18
|
+
total: orgs.length,
|
|
19
|
+
});
|
|
20
|
+
const repos = await listOwnerRepos(options.github, org);
|
|
21
|
+
repos.forEach((repo) => repoSet.add(repo));
|
|
22
|
+
}
|
|
23
|
+
for (const pattern of options.config.repos ?? []) {
|
|
24
|
+
if (pattern.includes('*')) {
|
|
25
|
+
const owner = pattern.split('/')[0] ?? '';
|
|
26
|
+
if (!owner) {
|
|
27
|
+
throw new Error(`Invalid repo pattern: ${pattern}`);
|
|
28
|
+
}
|
|
29
|
+
options.onProgress?.({
|
|
30
|
+
phase: 'resolve',
|
|
31
|
+
message: `Resolving repos for ${owner}`,
|
|
32
|
+
});
|
|
33
|
+
const repos = await listOwnerRepos(options.github, owner);
|
|
34
|
+
const matcher = (0, picomatch_1.default)(pattern);
|
|
35
|
+
repos.filter((repo) => matcher(repo)).forEach((repo) => repoSet.add(repo));
|
|
36
|
+
continue;
|
|
37
|
+
}
|
|
38
|
+
repoSet.add(pattern);
|
|
39
|
+
}
|
|
40
|
+
let repos = [...repoSet];
|
|
41
|
+
const patterns = repoPatterns
|
|
42
|
+
.flatMap((pattern) => pattern.split(',').map((p) => p.trim()))
|
|
43
|
+
.filter(Boolean);
|
|
44
|
+
const needsFilter = patterns.length > 0 || options.config.exclude.forks || options.config.exclude.archived;
|
|
45
|
+
if (repos.length > 0 || needsFilter) {
|
|
46
|
+
options.onProgress?.({
|
|
47
|
+
phase: 'resolve',
|
|
48
|
+
message: `Found ${repos.length} repos${needsFilter ? ', filtering...' : ''}`,
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
if (patterns.length > 0) {
|
|
52
|
+
const matcher = (0, picomatch_1.default)(patterns);
|
|
53
|
+
repos = repos.filter((repo) => matcher(repo));
|
|
54
|
+
}
|
|
55
|
+
if (options.config.exclude.forks || options.config.exclude.archived) {
|
|
56
|
+
repos = await filterRepoStates(repos, options);
|
|
57
|
+
}
|
|
58
|
+
else if (options.strict ?? true) {
|
|
59
|
+
await verifyReposAccessible(repos, options);
|
|
60
|
+
}
|
|
61
|
+
return repos.toSorted();
|
|
62
|
+
}
|
|
63
|
+
async function listOwnerRepos(github, owner) {
|
|
64
|
+
try {
|
|
65
|
+
return await github.listOrgRepos(owner);
|
|
66
|
+
}
|
|
67
|
+
catch (error) {
|
|
68
|
+
const status = getStatus(error);
|
|
69
|
+
if (status === 404) {
|
|
70
|
+
return github.listUserRepos(owner);
|
|
71
|
+
}
|
|
72
|
+
throw error;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
async function filterRepoStates(repos, options) {
|
|
76
|
+
const results = [];
|
|
77
|
+
for (const repo of repos) {
|
|
78
|
+
const detail = await options.github.getRepo(repo);
|
|
79
|
+
if (options.config.exclude.forks && detail.fork) {
|
|
80
|
+
continue;
|
|
81
|
+
}
|
|
82
|
+
if (options.config.exclude.archived && detail.archived) {
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
results.push(detail.full_name);
|
|
86
|
+
}
|
|
87
|
+
return results;
|
|
88
|
+
}
|
|
89
|
+
async function verifyReposAccessible(repos, options) {
|
|
90
|
+
for (const repo of repos) {
|
|
91
|
+
await options.github.ensureRepoAccessible(repo);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
function uniqueStrings(values) {
|
|
95
|
+
return Array.from(new Set(values));
|
|
96
|
+
}
|
|
97
|
+
function getStatus(error) {
|
|
98
|
+
if (typeof error === 'object' && error && 'status' in error) {
|
|
99
|
+
return error.status;
|
|
100
|
+
}
|
|
101
|
+
return undefined;
|
|
102
|
+
}
|
package/dist/repos.d.cts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { Config } from "./config.cjs";
|
|
2
|
+
import type { GitHubClient } from "./github.cjs";
|
|
3
|
+
import type { ProgressEvent, RepoFullName } from "./types.cjs";
|
|
4
|
+
export interface ResolveReposOptions {
|
|
5
|
+
config: Config;
|
|
6
|
+
github: GitHubClient;
|
|
7
|
+
orgs?: string[];
|
|
8
|
+
repoPatterns?: string[];
|
|
9
|
+
strict?: boolean;
|
|
10
|
+
onProgress?: (event: ProgressEvent) => void;
|
|
11
|
+
}
|
|
12
|
+
export declare function resolveRepos(options: ResolveReposOptions): Promise<RepoFullName[]>;
|
package/dist/repos.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { Config } from "./config.js";
|
|
2
|
+
import type { GitHubClient } from "./github.js";
|
|
3
|
+
import type { ProgressEvent, RepoFullName } from "./types.js";
|
|
4
|
+
export interface ResolveReposOptions {
|
|
5
|
+
config: Config;
|
|
6
|
+
github: GitHubClient;
|
|
7
|
+
orgs?: string[];
|
|
8
|
+
repoPatterns?: string[];
|
|
9
|
+
strict?: boolean;
|
|
10
|
+
onProgress?: (event: ProgressEvent) => void;
|
|
11
|
+
}
|
|
12
|
+
export declare function resolveRepos(options: ResolveReposOptions): Promise<RepoFullName[]>;
|
package/dist/repos.js
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import picomatch from 'picomatch';
|
|
2
|
+
export async function resolveRepos(options) {
|
|
3
|
+
const orgs = uniqueStrings([...(options.config.orgs ?? []), ...(options.orgs ?? [])]);
|
|
4
|
+
const repoPatterns = options.repoPatterns ?? [];
|
|
5
|
+
const repoSet = new Set();
|
|
6
|
+
// orgs can include GitHub orgs OR usernames - we try org first, fall back to user
|
|
7
|
+
for (const [index, org] of orgs.entries()) {
|
|
8
|
+
options.onProgress?.({
|
|
9
|
+
phase: 'resolve',
|
|
10
|
+
message: `Resolving repos for ${org}`,
|
|
11
|
+
current: index + 1,
|
|
12
|
+
total: orgs.length,
|
|
13
|
+
});
|
|
14
|
+
const repos = await listOwnerRepos(options.github, org);
|
|
15
|
+
repos.forEach((repo) => repoSet.add(repo));
|
|
16
|
+
}
|
|
17
|
+
for (const pattern of options.config.repos ?? []) {
|
|
18
|
+
if (pattern.includes('*')) {
|
|
19
|
+
const owner = pattern.split('/')[0] ?? '';
|
|
20
|
+
if (!owner) {
|
|
21
|
+
throw new Error(`Invalid repo pattern: ${pattern}`);
|
|
22
|
+
}
|
|
23
|
+
options.onProgress?.({
|
|
24
|
+
phase: 'resolve',
|
|
25
|
+
message: `Resolving repos for ${owner}`,
|
|
26
|
+
});
|
|
27
|
+
const repos = await listOwnerRepos(options.github, owner);
|
|
28
|
+
const matcher = picomatch(pattern);
|
|
29
|
+
repos.filter((repo) => matcher(repo)).forEach((repo) => repoSet.add(repo));
|
|
30
|
+
continue;
|
|
31
|
+
}
|
|
32
|
+
repoSet.add(pattern);
|
|
33
|
+
}
|
|
34
|
+
let repos = [...repoSet];
|
|
35
|
+
const patterns = repoPatterns
|
|
36
|
+
.flatMap((pattern) => pattern.split(',').map((p) => p.trim()))
|
|
37
|
+
.filter(Boolean);
|
|
38
|
+
const needsFilter = patterns.length > 0 || options.config.exclude.forks || options.config.exclude.archived;
|
|
39
|
+
if (repos.length > 0 || needsFilter) {
|
|
40
|
+
options.onProgress?.({
|
|
41
|
+
phase: 'resolve',
|
|
42
|
+
message: `Found ${repos.length} repos${needsFilter ? ', filtering...' : ''}`,
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
if (patterns.length > 0) {
|
|
46
|
+
const matcher = picomatch(patterns);
|
|
47
|
+
repos = repos.filter((repo) => matcher(repo));
|
|
48
|
+
}
|
|
49
|
+
if (options.config.exclude.forks || options.config.exclude.archived) {
|
|
50
|
+
repos = await filterRepoStates(repos, options);
|
|
51
|
+
}
|
|
52
|
+
else if (options.strict ?? true) {
|
|
53
|
+
await verifyReposAccessible(repos, options);
|
|
54
|
+
}
|
|
55
|
+
return repos.toSorted();
|
|
56
|
+
}
|
|
57
|
+
async function listOwnerRepos(github, owner) {
|
|
58
|
+
try {
|
|
59
|
+
return await github.listOrgRepos(owner);
|
|
60
|
+
}
|
|
61
|
+
catch (error) {
|
|
62
|
+
const status = getStatus(error);
|
|
63
|
+
if (status === 404) {
|
|
64
|
+
return github.listUserRepos(owner);
|
|
65
|
+
}
|
|
66
|
+
throw error;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
async function filterRepoStates(repos, options) {
|
|
70
|
+
const results = [];
|
|
71
|
+
for (const repo of repos) {
|
|
72
|
+
const detail = await options.github.getRepo(repo);
|
|
73
|
+
if (options.config.exclude.forks && detail.fork) {
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
if (options.config.exclude.archived && detail.archived) {
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
results.push(detail.full_name);
|
|
80
|
+
}
|
|
81
|
+
return results;
|
|
82
|
+
}
|
|
83
|
+
async function verifyReposAccessible(repos, options) {
|
|
84
|
+
for (const repo of repos) {
|
|
85
|
+
await options.github.ensureRepoAccessible(repo);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
function uniqueStrings(values) {
|
|
89
|
+
return Array.from(new Set(values));
|
|
90
|
+
}
|
|
91
|
+
function getStatus(error) {
|
|
92
|
+
if (typeof error === 'object' && error && 'status' in error) {
|
|
93
|
+
return error.status;
|
|
94
|
+
}
|
|
95
|
+
return undefined;
|
|
96
|
+
}
|
package/dist/sync.cjs
ADDED
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.syncRepos = syncRepos;
|
|
4
|
+
const cache_1 = require("./cache.cjs");
|
|
5
|
+
async function syncRepos(options) {
|
|
6
|
+
const now = Date.now();
|
|
7
|
+
const syncTtlMs = options.config.cache.sync_ttl_hours * 60 * 60 * 1000;
|
|
8
|
+
const concurrency = options.config.cache.concurrency;
|
|
9
|
+
// Filter repos that need syncing
|
|
10
|
+
const reposToSync = [];
|
|
11
|
+
const skippedRepos = [];
|
|
12
|
+
for (const repo of options.repos) {
|
|
13
|
+
if (options.forceSync) {
|
|
14
|
+
reposToSync.push(repo);
|
|
15
|
+
continue;
|
|
16
|
+
}
|
|
17
|
+
const syncState = options.cache.getSyncState(repo);
|
|
18
|
+
if (syncState && now - syncState.last_sync < syncTtlMs) {
|
|
19
|
+
skippedRepos.push(repo);
|
|
20
|
+
}
|
|
21
|
+
else {
|
|
22
|
+
reposToSync.push(repo);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
if (skippedRepos.length > 0) {
|
|
26
|
+
options.onProgress?.({
|
|
27
|
+
phase: 'sync',
|
|
28
|
+
message: `Skipping ${skippedRepos.length} repos (synced within ${options.config.cache.sync_ttl_hours}h)`,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
if (reposToSync.length === 0) {
|
|
32
|
+
options.onProgress?.({
|
|
33
|
+
phase: 'sync',
|
|
34
|
+
message: 'All repos up to date',
|
|
35
|
+
});
|
|
36
|
+
return {
|
|
37
|
+
synced_at: now,
|
|
38
|
+
repos: options.repos,
|
|
39
|
+
events_inserted: 0,
|
|
40
|
+
pull_requests_upserted: 0,
|
|
41
|
+
commits_upserted: 0,
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
let eventsInserted = 0;
|
|
45
|
+
let prsUpserted = 0;
|
|
46
|
+
let commitsUpserted = 0;
|
|
47
|
+
let completed = 0;
|
|
48
|
+
// Process repos in parallel with concurrency limit
|
|
49
|
+
const processRepo = async (repo) => {
|
|
50
|
+
const repoResult = await syncRepo(repo, options);
|
|
51
|
+
eventsInserted += repoResult.eventsInserted;
|
|
52
|
+
prsUpserted += repoResult.prsUpserted;
|
|
53
|
+
commitsUpserted += repoResult.commitsUpserted;
|
|
54
|
+
options.cache.upsertSyncState(repo, now, null);
|
|
55
|
+
completed += 1;
|
|
56
|
+
options.onProgress?.({
|
|
57
|
+
phase: 'sync',
|
|
58
|
+
message: `Synced ${repo} (${repoResult.prsUpserted} PRs, ${repoResult.commitsUpserted} commits)`,
|
|
59
|
+
current: completed,
|
|
60
|
+
total: reposToSync.length,
|
|
61
|
+
});
|
|
62
|
+
};
|
|
63
|
+
// Run with concurrency limit
|
|
64
|
+
await runWithConcurrency(reposToSync, processRepo, concurrency);
|
|
65
|
+
return {
|
|
66
|
+
synced_at: now,
|
|
67
|
+
repos: options.repos,
|
|
68
|
+
events_inserted: eventsInserted,
|
|
69
|
+
pull_requests_upserted: prsUpserted,
|
|
70
|
+
commits_upserted: commitsUpserted,
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
async function runWithConcurrency(items, fn, concurrency) {
|
|
74
|
+
if (!Number.isFinite(concurrency) || concurrency <= 0) {
|
|
75
|
+
throw new Error(`Invalid concurrency: ${concurrency}`);
|
|
76
|
+
}
|
|
77
|
+
if (items.length === 0) {
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
const queue = [...items];
|
|
81
|
+
const running = new Set();
|
|
82
|
+
let firstError = null;
|
|
83
|
+
while (queue.length > 0 || running.size > 0) {
|
|
84
|
+
while (!firstError && running.size < concurrency && queue.length > 0) {
|
|
85
|
+
const item = queue.shift();
|
|
86
|
+
if (item === undefined) {
|
|
87
|
+
break;
|
|
88
|
+
}
|
|
89
|
+
const promise = Promise.resolve().then(() => fn(item));
|
|
90
|
+
const tracked = promise.finally(() => {
|
|
91
|
+
running.delete(tracked);
|
|
92
|
+
});
|
|
93
|
+
running.add(tracked);
|
|
94
|
+
}
|
|
95
|
+
if (running.size > 0) {
|
|
96
|
+
try {
|
|
97
|
+
await Promise.race(running);
|
|
98
|
+
}
|
|
99
|
+
catch (error) {
|
|
100
|
+
if (!firstError) {
|
|
101
|
+
firstError = error;
|
|
102
|
+
queue.length = 0;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
if (firstError) {
|
|
108
|
+
throw firstError;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
async function syncRepo(repo, options) {
|
|
112
|
+
const { cache, github, config, timeRange } = options;
|
|
113
|
+
const now = Date.now();
|
|
114
|
+
let eventsInserted = 0;
|
|
115
|
+
let prsUpserted = 0;
|
|
116
|
+
let commitsUpserted = 0;
|
|
117
|
+
const pullRequests = await github.listPullRequests(repo);
|
|
118
|
+
for (const prListItem of pullRequests) {
|
|
119
|
+
const updatedAt = Date.parse(prListItem.updated_at);
|
|
120
|
+
if (prListItem.state !== 'open' && updatedAt < timeRange.start) {
|
|
121
|
+
continue;
|
|
122
|
+
}
|
|
123
|
+
const existing = cache.getPullRequest(repo, prListItem.number);
|
|
124
|
+
const shouldFetch = !existing || !cache.isFresh(existing.fetched_at, config.cache.ttl_hours);
|
|
125
|
+
if (shouldFetch) {
|
|
126
|
+
const prDetail = await github.getPullRequest(repo, prListItem.number);
|
|
127
|
+
const pr = toPullRequest(repo, prDetail);
|
|
128
|
+
cache.upsertPullRequest(pr, now);
|
|
129
|
+
prsUpserted += 1;
|
|
130
|
+
eventsInserted += insertPrEvents(cache, pr, prDetail.merged_by?.login ?? pr.author);
|
|
131
|
+
eventsInserted += await insertTimelineEvents(cache, github, pr);
|
|
132
|
+
eventsInserted += await insertReviewEvents(cache, github, pr);
|
|
133
|
+
eventsInserted += await insertCommentEvents(cache, github, pr);
|
|
134
|
+
}
|
|
135
|
+
else if (existing) {
|
|
136
|
+
eventsInserted += insertPrEvents(cache, existing.data);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
const commitList = await github.listCommits(repo, {
|
|
140
|
+
since: new Date(timeRange.start).toISOString(),
|
|
141
|
+
until: new Date(timeRange.end).toISOString(),
|
|
142
|
+
});
|
|
143
|
+
for (const commitItem of commitList) {
|
|
144
|
+
const sha = commitItem.sha;
|
|
145
|
+
const existingCommit = cache.getCommit(repo, sha);
|
|
146
|
+
const shouldFetch = !existingCommit || !cache.isFresh(existingCommit.fetched_at, config.cache.ttl_hours);
|
|
147
|
+
let commitDetail = null;
|
|
148
|
+
if (shouldFetch) {
|
|
149
|
+
commitDetail = await github.getCommit(repo, sha);
|
|
150
|
+
}
|
|
151
|
+
if (!commitDetail && existingCommit) {
|
|
152
|
+
const commitEvent = toCommitEventFromCommit(repo, existingCommit.data);
|
|
153
|
+
const inserted = cache.insertEvent(commitEvent, now);
|
|
154
|
+
if (inserted) {
|
|
155
|
+
eventsInserted += 1;
|
|
156
|
+
}
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
const commitEvent = toCommitEvent(repo, commitDetail);
|
|
160
|
+
const inserted = cache.insertEvent(commitEvent, now);
|
|
161
|
+
if (inserted) {
|
|
162
|
+
eventsInserted += 1;
|
|
163
|
+
}
|
|
164
|
+
cache.upsertCommit((0, cache_1.toCommitFromEvent)(commitEvent), now);
|
|
165
|
+
commitsUpserted += 1;
|
|
166
|
+
}
|
|
167
|
+
return { eventsInserted, prsUpserted, commitsUpserted };
|
|
168
|
+
}
|
|
169
|
+
function insertPrEvents(cache, pr, mergedBy) {
|
|
170
|
+
let count = 0;
|
|
171
|
+
const openedEvent = {
|
|
172
|
+
type: 'pr_opened',
|
|
173
|
+
repo: pr.repo,
|
|
174
|
+
pr_number: pr.number,
|
|
175
|
+
author: pr.author,
|
|
176
|
+
opened_at: pr.created_at,
|
|
177
|
+
is_draft: pr.draft,
|
|
178
|
+
};
|
|
179
|
+
if (cache.insertEvent(openedEvent)) {
|
|
180
|
+
count += 1;
|
|
181
|
+
}
|
|
182
|
+
if (pr.merged_at) {
|
|
183
|
+
const mergedEvent = {
|
|
184
|
+
type: 'pr_merged',
|
|
185
|
+
repo: pr.repo,
|
|
186
|
+
pr_number: pr.number,
|
|
187
|
+
merged_at: pr.merged_at,
|
|
188
|
+
merged_by: mergedBy ?? pr.author,
|
|
189
|
+
};
|
|
190
|
+
if (cache.insertEvent(mergedEvent)) {
|
|
191
|
+
count += 1;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
else if (pr.closed_at) {
|
|
195
|
+
const closedEvent = {
|
|
196
|
+
type: 'pr_closed',
|
|
197
|
+
repo: pr.repo,
|
|
198
|
+
pr_number: pr.number,
|
|
199
|
+
closed_at: pr.closed_at,
|
|
200
|
+
};
|
|
201
|
+
if (cache.insertEvent(closedEvent)) {
|
|
202
|
+
count += 1;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
return count;
|
|
206
|
+
}
|
|
207
|
+
async function insertTimelineEvents(cache, github, pr) {
|
|
208
|
+
let count = 0;
|
|
209
|
+
const events = await github.listIssueEvents(pr.repo, pr.number);
|
|
210
|
+
for (const event of events) {
|
|
211
|
+
if (event.event === 'ready_for_review') {
|
|
212
|
+
const readyEvent = {
|
|
213
|
+
type: 'pr_ready',
|
|
214
|
+
repo: pr.repo,
|
|
215
|
+
pr_number: pr.number,
|
|
216
|
+
ready_at: Date.parse(event.created_at),
|
|
217
|
+
};
|
|
218
|
+
if (cache.insertEvent(readyEvent)) {
|
|
219
|
+
count += 1;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
if (event.event === 'reopened') {
|
|
223
|
+
const reopenedEvent = {
|
|
224
|
+
type: 'pr_opened',
|
|
225
|
+
repo: pr.repo,
|
|
226
|
+
pr_number: pr.number,
|
|
227
|
+
author: pr.author,
|
|
228
|
+
opened_at: Date.parse(event.created_at),
|
|
229
|
+
is_draft: false,
|
|
230
|
+
};
|
|
231
|
+
if (cache.insertEvent(reopenedEvent)) {
|
|
232
|
+
count += 1;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
if (event.event === 'closed' && pr.merged_at === null) {
|
|
236
|
+
const closedEvent = {
|
|
237
|
+
type: 'pr_closed',
|
|
238
|
+
repo: pr.repo,
|
|
239
|
+
pr_number: pr.number,
|
|
240
|
+
closed_at: Date.parse(event.created_at),
|
|
241
|
+
};
|
|
242
|
+
if (cache.insertEvent(closedEvent)) {
|
|
243
|
+
count += 1;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
return count;
|
|
248
|
+
}
|
|
249
|
+
async function insertReviewEvents(cache, github, pr) {
|
|
250
|
+
let count = 0;
|
|
251
|
+
const reviews = await github.listPullRequestReviews(pr.repo, pr.number);
|
|
252
|
+
for (const review of reviews) {
|
|
253
|
+
const reviewer = review.user?.login;
|
|
254
|
+
if (!reviewer || !review.submitted_at) {
|
|
255
|
+
continue;
|
|
256
|
+
}
|
|
257
|
+
const reviewEvent = {
|
|
258
|
+
type: 'review_submitted',
|
|
259
|
+
repo: pr.repo,
|
|
260
|
+
pr_number: pr.number,
|
|
261
|
+
reviewer,
|
|
262
|
+
state: normalizeReviewState(review.state),
|
|
263
|
+
submitted_at: Date.parse(review.submitted_at),
|
|
264
|
+
};
|
|
265
|
+
if (cache.insertEvent(reviewEvent)) {
|
|
266
|
+
count += 1;
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
return count;
|
|
270
|
+
}
|
|
271
|
+
async function insertCommentEvents(cache, github, pr) {
|
|
272
|
+
let count = 0;
|
|
273
|
+
const comments = await github.listIssueComments(pr.repo, pr.number);
|
|
274
|
+
for (const comment of comments) {
|
|
275
|
+
const author = comment.user?.login;
|
|
276
|
+
if (!author) {
|
|
277
|
+
continue;
|
|
278
|
+
}
|
|
279
|
+
const commentEvent = {
|
|
280
|
+
type: 'comment_added',
|
|
281
|
+
repo: pr.repo,
|
|
282
|
+
pr_number: pr.number,
|
|
283
|
+
author,
|
|
284
|
+
commented_at: Date.parse(comment.created_at),
|
|
285
|
+
};
|
|
286
|
+
if (cache.insertEvent(commentEvent)) {
|
|
287
|
+
count += 1;
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
return count;
|
|
291
|
+
}
|
|
292
|
+
function toPullRequest(repo, pr) {
|
|
293
|
+
return {
|
|
294
|
+
repo,
|
|
295
|
+
number: pr.number,
|
|
296
|
+
title: pr.title,
|
|
297
|
+
state: pr.merged_at ? 'merged' : pr.state,
|
|
298
|
+
draft: pr.draft ?? false,
|
|
299
|
+
author: pr.user?.login ?? 'unknown',
|
|
300
|
+
created_at: Date.parse(pr.created_at),
|
|
301
|
+
updated_at: Date.parse(pr.updated_at),
|
|
302
|
+
merged_at: pr.merged_at ? Date.parse(pr.merged_at) : null,
|
|
303
|
+
closed_at: pr.closed_at ? Date.parse(pr.closed_at) : null,
|
|
304
|
+
additions: pr.additions ?? 0,
|
|
305
|
+
deletions: pr.deletions ?? 0,
|
|
306
|
+
commits: pr.commits ?? 0,
|
|
307
|
+
requested_reviewers: (pr.requested_reviewers ?? []).map((reviewer) => reviewer.login ?? 'unknown'),
|
|
308
|
+
labels: (pr.labels ?? []).map((label) => typeof label === 'string' ? label : (label.name ?? 'unknown')),
|
|
309
|
+
linked_issues: [],
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
function toCommitEvent(repo, commit) {
|
|
313
|
+
const author = commit.author?.login ?? commit.commit.author?.name ?? 'unknown';
|
|
314
|
+
const message = commit.commit.message ?? '';
|
|
315
|
+
const stats = commit.stats ?? { additions: 0, deletions: 0, total: 0 };
|
|
316
|
+
const filesChanged = commit.files ? commit.files.length : 0;
|
|
317
|
+
const committedAt = commit.commit.author?.date
|
|
318
|
+
? Date.parse(commit.commit.author.date)
|
|
319
|
+
: Date.now();
|
|
320
|
+
return {
|
|
321
|
+
type: 'commit_pushed',
|
|
322
|
+
repo,
|
|
323
|
+
sha: commit.sha,
|
|
324
|
+
author,
|
|
325
|
+
committed_at: committedAt,
|
|
326
|
+
message,
|
|
327
|
+
additions: stats.additions ?? 0,
|
|
328
|
+
deletions: stats.deletions ?? 0,
|
|
329
|
+
files_changed: filesChanged,
|
|
330
|
+
};
|
|
331
|
+
}
|
|
332
|
+
function toCommitEventFromCommit(repo, commit) {
|
|
333
|
+
return {
|
|
334
|
+
type: 'commit_pushed',
|
|
335
|
+
repo,
|
|
336
|
+
sha: commit.sha,
|
|
337
|
+
author: commit.author,
|
|
338
|
+
committed_at: commit.committed_at,
|
|
339
|
+
message: commit.message,
|
|
340
|
+
additions: commit.additions,
|
|
341
|
+
deletions: commit.deletions,
|
|
342
|
+
files_changed: commit.files_changed,
|
|
343
|
+
};
|
|
344
|
+
}
|
|
345
|
+
function normalizeReviewState(state) {
|
|
346
|
+
switch (state) {
|
|
347
|
+
case 'approved':
|
|
348
|
+
return 'approved';
|
|
349
|
+
case 'changes_requested':
|
|
350
|
+
return 'changes_requested';
|
|
351
|
+
case 'commented':
|
|
352
|
+
return 'commented';
|
|
353
|
+
case 'dismissed':
|
|
354
|
+
return 'dismissed';
|
|
355
|
+
case 'pending':
|
|
356
|
+
return 'pending';
|
|
357
|
+
default:
|
|
358
|
+
return 'commented';
|
|
359
|
+
}
|
|
360
|
+
}
|
package/dist/sync.d.cts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { Cache } from "./cache.cjs";
|
|
2
|
+
import type { Config } from "./config.cjs";
|
|
3
|
+
import type { GitHubClient } from "./github.cjs";
|
|
4
|
+
import type { ProgressEvent, RepoFullName, Timestamp } from "./types.cjs";
|
|
5
|
+
export interface SyncOptions {
|
|
6
|
+
repos: RepoFullName[];
|
|
7
|
+
cache: Cache;
|
|
8
|
+
github: GitHubClient;
|
|
9
|
+
config: Config;
|
|
10
|
+
timeRange: {
|
|
11
|
+
start: Timestamp;
|
|
12
|
+
end: Timestamp;
|
|
13
|
+
};
|
|
14
|
+
forceSync?: boolean;
|
|
15
|
+
onProgress?: (event: ProgressEvent) => void;
|
|
16
|
+
}
|
|
17
|
+
export interface SyncResult {
|
|
18
|
+
synced_at: Timestamp;
|
|
19
|
+
repos: RepoFullName[];
|
|
20
|
+
events_inserted: number;
|
|
21
|
+
pull_requests_upserted: number;
|
|
22
|
+
commits_upserted: number;
|
|
23
|
+
}
|
|
24
|
+
export declare function syncRepos(options: SyncOptions): Promise<SyncResult>;
|
package/dist/sync.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { Cache } from "./cache.js";
|
|
2
|
+
import type { Config } from "./config.js";
|
|
3
|
+
import type { GitHubClient } from "./github.js";
|
|
4
|
+
import type { ProgressEvent, RepoFullName, Timestamp } from "./types.js";
|
|
5
|
+
export interface SyncOptions {
|
|
6
|
+
repos: RepoFullName[];
|
|
7
|
+
cache: Cache;
|
|
8
|
+
github: GitHubClient;
|
|
9
|
+
config: Config;
|
|
10
|
+
timeRange: {
|
|
11
|
+
start: Timestamp;
|
|
12
|
+
end: Timestamp;
|
|
13
|
+
};
|
|
14
|
+
forceSync?: boolean;
|
|
15
|
+
onProgress?: (event: ProgressEvent) => void;
|
|
16
|
+
}
|
|
17
|
+
export interface SyncResult {
|
|
18
|
+
synced_at: Timestamp;
|
|
19
|
+
repos: RepoFullName[];
|
|
20
|
+
events_inserted: number;
|
|
21
|
+
pull_requests_upserted: number;
|
|
22
|
+
commits_upserted: number;
|
|
23
|
+
}
|
|
24
|
+
export declare function syncRepos(options: SyncOptions): Promise<SyncResult>;
|