@0xbigboss/gh-pulse-core 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache.cjs +312 -0
- package/dist/cache.d.cts +58 -0
- package/dist/cache.d.ts +58 -0
- package/dist/cache.js +303 -0
- package/dist/config.cjs +182 -0
- package/dist/config.d.cts +248 -0
- package/dist/config.d.ts +248 -0
- package/dist/config.js +172 -0
- package/dist/filters.cjs +16 -0
- package/dist/filters.d.cts +3 -0
- package/dist/filters.d.ts +3 -0
- package/dist/filters.js +12 -0
- package/dist/github.cjs +240 -0
- package/dist/github.d.cts +46 -0
- package/dist/github.d.ts +46 -0
- package/dist/github.js +235 -0
- package/dist/index.cjs +28 -0
- package/dist/index.d.cts +11 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.js +11 -0
- package/dist/reports/context.cjs +8 -0
- package/dist/reports/context.d.cts +7 -0
- package/dist/reports/context.d.ts +7 -0
- package/dist/reports/context.js +5 -0
- package/dist/reports/exec.cjs +160 -0
- package/dist/reports/exec.d.cts +6 -0
- package/dist/reports/exec.d.ts +6 -0
- package/dist/reports/exec.js +157 -0
- package/dist/reports/index.cjs +21 -0
- package/dist/reports/index.d.cts +5 -0
- package/dist/reports/index.d.ts +5 -0
- package/dist/reports/index.js +5 -0
- package/dist/reports/meta.cjs +15 -0
- package/dist/reports/meta.d.cts +12 -0
- package/dist/reports/meta.d.ts +12 -0
- package/dist/reports/meta.js +12 -0
- package/dist/reports/personal.cjs +90 -0
- package/dist/reports/personal.d.cts +8 -0
- package/dist/reports/personal.d.ts +8 -0
- package/dist/reports/personal.js +87 -0
- package/dist/reports/team.cjs +127 -0
- package/dist/reports/team.d.cts +6 -0
- package/dist/reports/team.d.ts +6 -0
- package/dist/reports/team.js +124 -0
- package/dist/reports/types.cjs +2 -0
- package/dist/reports/types.d.cts +144 -0
- package/dist/reports/types.d.ts +144 -0
- package/dist/reports/types.js +1 -0
- package/dist/reports/utils.cjs +71 -0
- package/dist/reports/utils.d.cts +6 -0
- package/dist/reports/utils.d.ts +6 -0
- package/dist/reports/utils.js +65 -0
- package/dist/repos.cjs +102 -0
- package/dist/repos.d.cts +12 -0
- package/dist/repos.d.ts +12 -0
- package/dist/repos.js +96 -0
- package/dist/sync.cjs +360 -0
- package/dist/sync.d.cts +24 -0
- package/dist/sync.d.ts +24 -0
- package/dist/sync.js +357 -0
- package/dist/team.cjs +45 -0
- package/dist/team.d.cts +10 -0
- package/dist/team.d.ts +10 -0
- package/dist/team.js +42 -0
- package/dist/time.cjs +153 -0
- package/dist/time.d.cts +13 -0
- package/dist/time.d.ts +13 -0
- package/dist/time.js +145 -0
- package/dist/types.cjs +2 -0
- package/dist/types.d.cts +133 -0
- package/dist/types.d.ts +133 -0
- package/dist/types.js +1 -0
- package/package.json +29 -0
package/dist/cache.js
ADDED
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import { Database } from 'bun:sqlite';
|
|
2
|
+
import { promises as fs } from 'node:fs';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
export class Cache {
|
|
5
|
+
db;
|
|
6
|
+
now;
|
|
7
|
+
constructor(db, now) {
|
|
8
|
+
this.db = db;
|
|
9
|
+
this.now = now;
|
|
10
|
+
}
|
|
11
|
+
static async open(options) {
|
|
12
|
+
await ensureDir(options.path);
|
|
13
|
+
const db = new Database(options.path);
|
|
14
|
+
const cache = new Cache(db, options.now ?? (() => Date.now()));
|
|
15
|
+
cache.init();
|
|
16
|
+
return cache;
|
|
17
|
+
}
|
|
18
|
+
init() {
|
|
19
|
+
this.db.exec(`
|
|
20
|
+
CREATE TABLE IF NOT EXISTS meta (
|
|
21
|
+
key TEXT PRIMARY KEY,
|
|
22
|
+
value TEXT NOT NULL
|
|
23
|
+
);
|
|
24
|
+
|
|
25
|
+
CREATE TABLE IF NOT EXISTS events (
|
|
26
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
27
|
+
type TEXT NOT NULL,
|
|
28
|
+
repo TEXT NOT NULL,
|
|
29
|
+
payload JSON NOT NULL,
|
|
30
|
+
timestamp INTEGER NOT NULL,
|
|
31
|
+
fetched_at INTEGER NOT NULL
|
|
32
|
+
);
|
|
33
|
+
|
|
34
|
+
CREATE INDEX IF NOT EXISTS idx_events_type ON events(type);
|
|
35
|
+
CREATE INDEX IF NOT EXISTS idx_events_repo ON events(repo);
|
|
36
|
+
CREATE INDEX IF NOT EXISTS idx_events_timestamp ON events(timestamp);
|
|
37
|
+
|
|
38
|
+
CREATE TABLE IF NOT EXISTS pull_requests (
|
|
39
|
+
repo TEXT NOT NULL,
|
|
40
|
+
number INTEGER NOT NULL,
|
|
41
|
+
data JSON NOT NULL,
|
|
42
|
+
fetched_at INTEGER NOT NULL,
|
|
43
|
+
PRIMARY KEY (repo, number)
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
CREATE INDEX IF NOT EXISTS idx_pr_author ON pull_requests((json_extract(data, '$.author')));
|
|
47
|
+
CREATE INDEX IF NOT EXISTS idx_pr_state ON pull_requests((json_extract(data, '$.state')));
|
|
48
|
+
|
|
49
|
+
CREATE TABLE IF NOT EXISTS commits (
|
|
50
|
+
repo TEXT NOT NULL,
|
|
51
|
+
sha TEXT NOT NULL,
|
|
52
|
+
data JSON NOT NULL,
|
|
53
|
+
fetched_at INTEGER NOT NULL,
|
|
54
|
+
PRIMARY KEY (repo, sha)
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
CREATE INDEX IF NOT EXISTS idx_commits_author ON commits((json_extract(data, '$.author')));
|
|
58
|
+
CREATE INDEX IF NOT EXISTS idx_commits_timestamp ON commits((json_extract(data, '$.committed_at')));
|
|
59
|
+
|
|
60
|
+
CREATE TABLE IF NOT EXISTS linear_issues (
|
|
61
|
+
id TEXT PRIMARY KEY,
|
|
62
|
+
data JSON NOT NULL,
|
|
63
|
+
fetched_at INTEGER NOT NULL
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
CREATE INDEX IF NOT EXISTS idx_linear_state ON linear_issues((json_extract(data, '$.state')));
|
|
67
|
+
|
|
68
|
+
CREATE TABLE IF NOT EXISTS pr_issue_links (
|
|
69
|
+
repo TEXT NOT NULL,
|
|
70
|
+
pr_number INTEGER NOT NULL,
|
|
71
|
+
issue_id TEXT NOT NULL,
|
|
72
|
+
correlation_method TEXT NOT NULL,
|
|
73
|
+
PRIMARY KEY (repo, pr_number, issue_id)
|
|
74
|
+
);
|
|
75
|
+
|
|
76
|
+
CREATE TABLE IF NOT EXISTS sync_state (
|
|
77
|
+
repo TEXT PRIMARY KEY,
|
|
78
|
+
last_sync INTEGER NOT NULL,
|
|
79
|
+
cursor TEXT
|
|
80
|
+
);
|
|
81
|
+
`);
|
|
82
|
+
}
|
|
83
|
+
close() {
|
|
84
|
+
this.db.close();
|
|
85
|
+
}
|
|
86
|
+
insertEvent(event, fetchedAt = this.now()) {
|
|
87
|
+
const timestamp = getEventTimestamp(event);
|
|
88
|
+
if (this.eventExists(event, timestamp)) {
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
this.db.run('INSERT INTO events (type, repo, payload, timestamp, fetched_at) VALUES (?, ?, ?, ?, ?)', [event.type, event.repo, JSON.stringify(event), timestamp, fetchedAt]);
|
|
92
|
+
return true;
|
|
93
|
+
}
|
|
94
|
+
upsertPullRequest(pr, fetchedAt = this.now()) {
|
|
95
|
+
this.db.run(`INSERT INTO pull_requests (repo, number, data, fetched_at)
|
|
96
|
+
VALUES (?, ?, ?, ?)
|
|
97
|
+
ON CONFLICT(repo, number) DO UPDATE SET data = excluded.data, fetched_at = excluded.fetched_at`, [pr.repo, pr.number, JSON.stringify(pr), fetchedAt]);
|
|
98
|
+
}
|
|
99
|
+
upsertCommit(commit, fetchedAt = this.now()) {
|
|
100
|
+
this.db.run(`INSERT INTO commits (repo, sha, data, fetched_at)
|
|
101
|
+
VALUES (?, ?, ?, ?)
|
|
102
|
+
ON CONFLICT(repo, sha) DO UPDATE SET data = excluded.data, fetched_at = excluded.fetched_at`, [commit.repo, commit.sha, JSON.stringify(commit), fetchedAt]);
|
|
103
|
+
}
|
|
104
|
+
getPullRequest(repo, number) {
|
|
105
|
+
const row = this.db
|
|
106
|
+
.query('SELECT data, fetched_at FROM pull_requests WHERE repo = ? AND number = ?')
|
|
107
|
+
.get(repo, number);
|
|
108
|
+
if (!row) {
|
|
109
|
+
return null;
|
|
110
|
+
}
|
|
111
|
+
return {
|
|
112
|
+
data: JSON.parse(row.data),
|
|
113
|
+
fetched_at: row.fetched_at,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
getPullRequests(filter = {}) {
|
|
117
|
+
const conditions = [];
|
|
118
|
+
const params = [];
|
|
119
|
+
if (filter.repo) {
|
|
120
|
+
conditions.push('repo = ?');
|
|
121
|
+
params.push(filter.repo);
|
|
122
|
+
}
|
|
123
|
+
if (filter.author) {
|
|
124
|
+
conditions.push("json_extract(data, '$.author') = ?");
|
|
125
|
+
params.push(filter.author);
|
|
126
|
+
}
|
|
127
|
+
if (filter.state) {
|
|
128
|
+
conditions.push("json_extract(data, '$.state') = ?");
|
|
129
|
+
params.push(filter.state);
|
|
130
|
+
}
|
|
131
|
+
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
132
|
+
const rows = this.db.query(`SELECT data FROM pull_requests ${where}`).all(...params);
|
|
133
|
+
return rows.map((row) => JSON.parse(row.data));
|
|
134
|
+
}
|
|
135
|
+
getCommits(filter = {}) {
|
|
136
|
+
const conditions = [];
|
|
137
|
+
const params = [];
|
|
138
|
+
if (filter.repo) {
|
|
139
|
+
conditions.push('repo = ?');
|
|
140
|
+
params.push(filter.repo);
|
|
141
|
+
}
|
|
142
|
+
if (filter.author) {
|
|
143
|
+
conditions.push("json_extract(data, '$.author') = ?");
|
|
144
|
+
params.push(filter.author);
|
|
145
|
+
}
|
|
146
|
+
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
147
|
+
const rows = this.db.query(`SELECT data FROM commits ${where}`).all(...params);
|
|
148
|
+
return rows.map((row) => JSON.parse(row.data));
|
|
149
|
+
}
|
|
150
|
+
getCommit(repo, sha) {
|
|
151
|
+
const row = this.db
|
|
152
|
+
.query('SELECT data, fetched_at FROM commits WHERE repo = ? AND sha = ?')
|
|
153
|
+
.get(repo, sha);
|
|
154
|
+
if (!row) {
|
|
155
|
+
return null;
|
|
156
|
+
}
|
|
157
|
+
return {
|
|
158
|
+
data: JSON.parse(row.data),
|
|
159
|
+
fetched_at: row.fetched_at,
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
getEvents(filter = {}) {
|
|
163
|
+
const conditions = [];
|
|
164
|
+
const params = [];
|
|
165
|
+
if (filter.repo) {
|
|
166
|
+
conditions.push('repo = ?');
|
|
167
|
+
params.push(filter.repo);
|
|
168
|
+
}
|
|
169
|
+
if (filter.type) {
|
|
170
|
+
conditions.push('type = ?');
|
|
171
|
+
params.push(filter.type);
|
|
172
|
+
}
|
|
173
|
+
if (filter.since !== undefined) {
|
|
174
|
+
conditions.push('timestamp >= ?');
|
|
175
|
+
params.push(filter.since);
|
|
176
|
+
}
|
|
177
|
+
if (filter.until !== undefined) {
|
|
178
|
+
conditions.push('timestamp <= ?');
|
|
179
|
+
params.push(filter.until);
|
|
180
|
+
}
|
|
181
|
+
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
182
|
+
const rows = this.db
|
|
183
|
+
.query(`SELECT payload FROM events ${where} ORDER BY timestamp ASC`)
|
|
184
|
+
.all(...params);
|
|
185
|
+
return rows.map((row) => JSON.parse(row.payload));
|
|
186
|
+
}
|
|
187
|
+
getEventsForPr(repo, prNumber) {
|
|
188
|
+
const rows = this.db
|
|
189
|
+
.query("SELECT payload FROM events WHERE repo = ? AND json_extract(payload, '$.pr_number') = ? ORDER BY timestamp ASC")
|
|
190
|
+
.all(repo, prNumber);
|
|
191
|
+
return rows.map((row) => JSON.parse(row.payload));
|
|
192
|
+
}
|
|
193
|
+
getSyncState(repo) {
|
|
194
|
+
const row = this.db
|
|
195
|
+
.query('SELECT repo, last_sync, cursor FROM sync_state WHERE repo = ?')
|
|
196
|
+
.get(repo);
|
|
197
|
+
if (!row) {
|
|
198
|
+
return null;
|
|
199
|
+
}
|
|
200
|
+
return {
|
|
201
|
+
repo: row.repo,
|
|
202
|
+
last_sync: row.last_sync,
|
|
203
|
+
cursor: row.cursor,
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
upsertSyncState(repo, lastSync, cursor = null) {
|
|
207
|
+
this.db.run(`INSERT INTO sync_state (repo, last_sync, cursor)
|
|
208
|
+
VALUES (?, ?, ?)
|
|
209
|
+
ON CONFLICT(repo) DO UPDATE SET last_sync = excluded.last_sync, cursor = excluded.cursor`, [repo, lastSync, cursor]);
|
|
210
|
+
}
|
|
211
|
+
getFreshness(repos) {
|
|
212
|
+
const now = this.now();
|
|
213
|
+
const eventsMax = this.db.query('SELECT MAX(fetched_at) as max FROM events').get();
|
|
214
|
+
const prMax = this.db.query('SELECT MAX(fetched_at) as max FROM pull_requests').get();
|
|
215
|
+
const commitMax = this.db.query('SELECT MAX(fetched_at) as max FROM commits').get();
|
|
216
|
+
const values = [eventsMax?.max ?? null, prMax?.max ?? null, commitMax?.max ?? null].filter((value) => typeof value === 'number');
|
|
217
|
+
const dataAsOf = values.length > 0 ? Math.max(...values) : null;
|
|
218
|
+
const cacheAgeMinutes = dataAsOf !== null ? Math.floor((now - dataAsOf) / 60000) : null;
|
|
219
|
+
const syncedRows = this.db.query('SELECT COUNT(*) as count FROM sync_state').get();
|
|
220
|
+
return {
|
|
221
|
+
dataAsOf,
|
|
222
|
+
cacheAgeMinutes,
|
|
223
|
+
reposSynced: syncedRows.count,
|
|
224
|
+
totalRepos: repos.length,
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
isFresh(fetchedAt, ttlHours) {
|
|
228
|
+
return fetchedAt >= this.now() - ttlHours * 60 * 60 * 1000;
|
|
229
|
+
}
|
|
230
|
+
eventExists(event, timestamp) {
|
|
231
|
+
switch (event.type) {
|
|
232
|
+
case 'pr_opened':
|
|
233
|
+
case 'pr_ready':
|
|
234
|
+
case 'pr_merged':
|
|
235
|
+
case 'pr_closed':
|
|
236
|
+
case 'review_submitted':
|
|
237
|
+
case 'comment_added': {
|
|
238
|
+
const prNumber = 'pr_number' in event ? event.pr_number : null;
|
|
239
|
+
if (prNumber === null) {
|
|
240
|
+
return false;
|
|
241
|
+
}
|
|
242
|
+
const row = this.db
|
|
243
|
+
.query("SELECT 1 FROM events WHERE type = ? AND repo = ? AND timestamp = ? AND json_extract(payload, '$.pr_number') = ?")
|
|
244
|
+
.get(event.type, event.repo, timestamp, prNumber);
|
|
245
|
+
return Boolean(row);
|
|
246
|
+
}
|
|
247
|
+
case 'commit_pushed': {
|
|
248
|
+
const row = this.db
|
|
249
|
+
.query("SELECT 1 FROM events WHERE type = ? AND repo = ? AND json_extract(payload, '$.sha') = ?")
|
|
250
|
+
.get(event.type, event.repo, event.sha);
|
|
251
|
+
return Boolean(row);
|
|
252
|
+
}
|
|
253
|
+
default: {
|
|
254
|
+
const _exhaustive = event;
|
|
255
|
+
throw new Error(`Unhandled event type: ${_exhaustive}`);
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
export function getEventTimestamp(event) {
|
|
261
|
+
switch (event.type) {
|
|
262
|
+
case 'pr_opened':
|
|
263
|
+
return event.opened_at;
|
|
264
|
+
case 'pr_ready':
|
|
265
|
+
return event.ready_at;
|
|
266
|
+
case 'pr_merged':
|
|
267
|
+
return event.merged_at;
|
|
268
|
+
case 'pr_closed':
|
|
269
|
+
return event.closed_at;
|
|
270
|
+
case 'review_submitted':
|
|
271
|
+
return event.submitted_at;
|
|
272
|
+
case 'comment_added':
|
|
273
|
+
return event.commented_at;
|
|
274
|
+
case 'commit_pushed':
|
|
275
|
+
return event.committed_at;
|
|
276
|
+
default: {
|
|
277
|
+
const _exhaustive = event;
|
|
278
|
+
throw new Error(`Unhandled event type: ${_exhaustive}`);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
function isSqliteMemoryPath(dbPath) {
|
|
283
|
+
return dbPath === ':memory:' || dbPath === '';
|
|
284
|
+
}
|
|
285
|
+
async function ensureDir(dbPath) {
|
|
286
|
+
if (isSqliteMemoryPath(dbPath)) {
|
|
287
|
+
return;
|
|
288
|
+
}
|
|
289
|
+
const dir = path.dirname(dbPath);
|
|
290
|
+
await fs.mkdir(dir, { recursive: true });
|
|
291
|
+
}
|
|
292
|
+
export function toCommitFromEvent(event) {
|
|
293
|
+
return {
|
|
294
|
+
repo: event.repo,
|
|
295
|
+
sha: event.sha,
|
|
296
|
+
author: event.author,
|
|
297
|
+
committed_at: event.committed_at,
|
|
298
|
+
message: event.message,
|
|
299
|
+
additions: event.additions,
|
|
300
|
+
deletions: event.deletions,
|
|
301
|
+
files_changed: event.files_changed,
|
|
302
|
+
};
|
|
303
|
+
}
|
package/dist/config.cjs
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.CONFIG_SCHEMA = exports.DEFAULT_CONFIG_ENV = void 0;
|
|
7
|
+
exports.getGhCliToken = getGhCliToken;
|
|
8
|
+
exports.resolveConfigPath = resolveConfigPath;
|
|
9
|
+
exports.resolveCachePath = resolveCachePath;
|
|
10
|
+
exports.loadConfig = loadConfig;
|
|
11
|
+
const node_fs_1 = require("node:fs");
|
|
12
|
+
const node_child_process_1 = require("node:child_process");
|
|
13
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
14
|
+
const node_os_1 = __importDefault(require("node:os"));
|
|
15
|
+
const zod_1 = require("zod");
|
|
16
|
+
const yaml_1 = __importDefault(require("yaml"));
|
|
17
|
+
const TeamStrategySchema = zod_1.z.enum(['org_members', 'config_list', 'repo_contributors']);
|
|
18
|
+
const ExcludeSchema = zod_1.z
|
|
19
|
+
.object({
|
|
20
|
+
bots: zod_1.z.boolean().default(true),
|
|
21
|
+
forks: zod_1.z.boolean().default(true),
|
|
22
|
+
archived: zod_1.z.boolean().default(true),
|
|
23
|
+
authors: zod_1.z.array(zod_1.z.string().min(1)).default([]),
|
|
24
|
+
})
|
|
25
|
+
.default({
|
|
26
|
+
bots: true,
|
|
27
|
+
forks: true,
|
|
28
|
+
archived: true,
|
|
29
|
+
authors: [],
|
|
30
|
+
});
|
|
31
|
+
const ThresholdsSchema = zod_1.z
|
|
32
|
+
.object({
|
|
33
|
+
stale_days: zod_1.z.number().int().positive().default(2),
|
|
34
|
+
stuck_days: zod_1.z.number().int().positive().default(4),
|
|
35
|
+
large_pr_lines: zod_1.z.number().int().positive().default(500),
|
|
36
|
+
})
|
|
37
|
+
.default({
|
|
38
|
+
stale_days: 2,
|
|
39
|
+
stuck_days: 4,
|
|
40
|
+
large_pr_lines: 500,
|
|
41
|
+
});
|
|
42
|
+
const CacheSchema = zod_1.z
|
|
43
|
+
.object({
|
|
44
|
+
path: zod_1.z.string().default(''),
|
|
45
|
+
ttl_hours: zod_1.z.number().int().positive().default(1),
|
|
46
|
+
sync_ttl_hours: zod_1.z.number().int().nonnegative().default(24),
|
|
47
|
+
concurrency: zod_1.z.number().int().positive().default(5),
|
|
48
|
+
})
|
|
49
|
+
.default({
|
|
50
|
+
path: '',
|
|
51
|
+
ttl_hours: 1,
|
|
52
|
+
sync_ttl_hours: 24,
|
|
53
|
+
concurrency: 5,
|
|
54
|
+
});
|
|
55
|
+
const TeamSchema = zod_1.z
|
|
56
|
+
.object({
|
|
57
|
+
strategy: TeamStrategySchema.default('org_members'),
|
|
58
|
+
members: zod_1.z.array(zod_1.z.string().min(1)).default([]),
|
|
59
|
+
})
|
|
60
|
+
.default({
|
|
61
|
+
strategy: 'org_members',
|
|
62
|
+
members: [],
|
|
63
|
+
});
|
|
64
|
+
const ConfigSchema = zod_1.z.object({
|
|
65
|
+
github_token: zod_1.z.string().optional(),
|
|
66
|
+
linear_api_key: zod_1.z.string().min(1).optional(),
|
|
67
|
+
orgs: zod_1.z.array(zod_1.z.string().min(1)).default([]),
|
|
68
|
+
repos: zod_1.z.array(zod_1.z.string().min(1)).default([]),
|
|
69
|
+
team: TeamSchema,
|
|
70
|
+
exclude: ExcludeSchema,
|
|
71
|
+
thresholds: ThresholdsSchema,
|
|
72
|
+
timezone: zod_1.z.string().default(''),
|
|
73
|
+
cache: CacheSchema,
|
|
74
|
+
});
|
|
75
|
+
// Internal schema with required token (after resolution)
|
|
76
|
+
const ResolvedConfigSchema = ConfigSchema.extend({
|
|
77
|
+
github_token: zod_1.z.string().min(1),
|
|
78
|
+
});
|
|
79
|
+
exports.DEFAULT_CONFIG_ENV = 'GH_PULSE_CONFIG';
|
|
80
|
+
/**
|
|
81
|
+
* Try to get GitHub token from `gh` CLI.
|
|
82
|
+
* Returns undefined if gh is not available or not authenticated.
|
|
83
|
+
*/
|
|
84
|
+
function getGhCliToken() {
|
|
85
|
+
try {
|
|
86
|
+
const token = (0, node_child_process_1.execSync)('gh auth token', {
|
|
87
|
+
encoding: 'utf8',
|
|
88
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
89
|
+
timeout: 5000,
|
|
90
|
+
}).trim();
|
|
91
|
+
return token.length > 0 ? token : undefined;
|
|
92
|
+
}
|
|
93
|
+
catch {
|
|
94
|
+
return undefined;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
function resolveConfigPath(providedPath, env = process.env) {
|
|
98
|
+
if (providedPath) {
|
|
99
|
+
return expandHome(providedPath);
|
|
100
|
+
}
|
|
101
|
+
const envPath = env[exports.DEFAULT_CONFIG_ENV];
|
|
102
|
+
if (envPath && envPath.trim().length > 0) {
|
|
103
|
+
return expandHome(envPath.trim());
|
|
104
|
+
}
|
|
105
|
+
const xdgConfigHome = env.XDG_CONFIG_HOME;
|
|
106
|
+
const base = xdgConfigHome && xdgConfigHome.trim().length > 0
|
|
107
|
+
? xdgConfigHome.trim()
|
|
108
|
+
: node_path_1.default.join(node_os_1.default.homedir(), '.config');
|
|
109
|
+
return node_path_1.default.join(base, 'gh-pulse', 'config.yaml');
|
|
110
|
+
}
|
|
111
|
+
function resolveCachePath(configuredPath, env = process.env) {
|
|
112
|
+
if (configuredPath && configuredPath.trim().length > 0) {
|
|
113
|
+
return expandHome(configuredPath.trim());
|
|
114
|
+
}
|
|
115
|
+
const xdgCacheHome = env.XDG_CACHE_HOME;
|
|
116
|
+
const base = xdgCacheHome && xdgCacheHome.trim().length > 0
|
|
117
|
+
? xdgCacheHome.trim()
|
|
118
|
+
: node_path_1.default.join(node_os_1.default.homedir(), '.cache');
|
|
119
|
+
return node_path_1.default.join(base, 'gh-pulse', 'cache.db');
|
|
120
|
+
}
|
|
121
|
+
async function loadConfig(options = {}) {
|
|
122
|
+
const env = options.env ?? process.env;
|
|
123
|
+
const configPath = resolveConfigPath(options.path, env);
|
|
124
|
+
const rawText = await node_fs_1.promises.readFile(configPath, 'utf8');
|
|
125
|
+
const parsed = yaml_1.default.parse(rawText) ?? {};
|
|
126
|
+
// Resolve env placeholders, allowing GITHUB_TOKEN to be missing
|
|
127
|
+
const resolved = resolveEnvPlaceholders(parsed, env, ['GITHUB_TOKEN']);
|
|
128
|
+
const parsedConfig = ConfigSchema.parse(resolved);
|
|
129
|
+
// Resolve GitHub token: config > env > gh CLI
|
|
130
|
+
let githubToken = parsedConfig.github_token;
|
|
131
|
+
if (!githubToken || githubToken.trim().length === 0) {
|
|
132
|
+
// Try gh CLI as fallback
|
|
133
|
+
githubToken = getGhCliToken();
|
|
134
|
+
}
|
|
135
|
+
if (!githubToken || githubToken.trim().length === 0) {
|
|
136
|
+
throw new Error('GitHub token not found. Provide github_token in config, set GITHUB_TOKEN env var, or authenticate with `gh auth login`');
|
|
137
|
+
}
|
|
138
|
+
const timezone = parsedConfig.timezone.trim().length > 0
|
|
139
|
+
? parsedConfig.timezone
|
|
140
|
+
: Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC';
|
|
141
|
+
return ResolvedConfigSchema.parse({
|
|
142
|
+
...parsedConfig,
|
|
143
|
+
github_token: githubToken,
|
|
144
|
+
timezone,
|
|
145
|
+
cache: {
|
|
146
|
+
...parsedConfig.cache,
|
|
147
|
+
path: resolveCachePath(parsedConfig.cache.path, env),
|
|
148
|
+
},
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
function resolveEnvPlaceholders(value, env, allowMissing = []) {
|
|
152
|
+
if (Array.isArray(value)) {
|
|
153
|
+
return value.map((item) => resolveEnvPlaceholders(item, env, allowMissing));
|
|
154
|
+
}
|
|
155
|
+
if (value && typeof value === 'object') {
|
|
156
|
+
const entries = Object.entries(value);
|
|
157
|
+
return Object.fromEntries(entries.map(([key, val]) => [key, resolveEnvPlaceholders(val, env, allowMissing)]));
|
|
158
|
+
}
|
|
159
|
+
if (typeof value === 'string') {
|
|
160
|
+
return substituteEnv(value, env, allowMissing);
|
|
161
|
+
}
|
|
162
|
+
return value;
|
|
163
|
+
}
|
|
164
|
+
function substituteEnv(value, env, allowMissing = []) {
|
|
165
|
+
return value.replace(/\$\{([A-Z0-9_]+)\}/gi, (_, name) => {
|
|
166
|
+
const replacement = env[name];
|
|
167
|
+
if (!replacement || replacement.trim().length === 0) {
|
|
168
|
+
if (allowMissing.includes(name)) {
|
|
169
|
+
return ''; // Return empty string, will be handled by fallback logic
|
|
170
|
+
}
|
|
171
|
+
throw new Error(`Missing environment variable for ${name}`);
|
|
172
|
+
}
|
|
173
|
+
return replacement;
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
function expandHome(value) {
|
|
177
|
+
if (!value.startsWith('~')) {
|
|
178
|
+
return value;
|
|
179
|
+
}
|
|
180
|
+
return node_path_1.default.join(node_os_1.default.homedir(), value.slice(1));
|
|
181
|
+
}
|
|
182
|
+
exports.CONFIG_SCHEMA = ConfigSchema;
|