jowork 0.2.5 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-ROIINI33.js → chunk-4PIT2GZ4.js} +13 -1
- package/dist/{chunk-XLYRHKG6.js → chunk-54SD5GBF.js} +1 -1
- package/dist/chunk-63AMINQC.js +156 -0
- package/dist/{chunk-XAEGXSEO.js → chunk-74AHY7X6.js} +4 -0
- package/dist/{chunk-7U3SXINY.js → chunk-ATAUWJYD.js} +320 -50
- package/dist/chunk-DQW74UCN.js +671 -0
- package/dist/chunk-EYP6WMFF.js +153 -0
- package/dist/{chunk-JSTXMDXI.js → chunk-FCFZCZHR.js} +1 -1
- package/dist/chunk-FX6Z3QHV.js +34 -0
- package/dist/chunk-HENAABEL.js +419 -0
- package/dist/chunk-OXWWOKC7.js +201 -0
- package/dist/{chunk-HUHDL7WV.js → chunk-QGHJ45PL.js} +276 -199
- package/dist/chunk-RO3KK5RC.js +132 -0
- package/dist/{chunk-JE6TOU7W.js → chunk-TFMF3EXE.js} +2 -7
- package/dist/{chunk-TN327MDF.js → chunk-VX662YLA.js} +3 -3
- package/dist/cli.js +308 -135
- package/dist/{config-AI6UIJJN.js → config-FH2XLN7A.js} +2 -2
- package/dist/content-reader-VPGTR2SF.js +10 -0
- package/dist/context-ZNI3WOB7.js +10 -0
- package/dist/{credential-store-ZRZCSRPC.js → credential-store-OS5ZY4OW.js} +2 -2
- package/dist/{feishu-A6YVFKEN.js → feishu-XW5T6ER2.js} +8 -3
- package/dist/{git-manager-N35XSG4Y.js → git-manager-RVWV2GSV.js} +2 -1
- package/dist/github-PQKAYTLO.js +11 -0
- package/dist/{paths-JXOMBYIT.js → paths-FFRET6F7.js} +7 -3
- package/dist/{server-5GVWN2NB.js → server-WEADPUST.js} +59 -66
- package/dist/{setup-SYBQIL2O.js → setup-S2S2CHB2.js} +76 -30
- package/dist/sync-SRLFR5NA.js +21 -0
- package/dist/transport.js +6 -4
- package/package.json +1 -1
- package/src/dashboard/public/app.js +34 -8
- package/src/dashboard/public/style.css +14 -0
- package/dist/chunk-L5ZR7TSK.js +0 -82
- package/dist/chunk-LS2AJM5A.js +0 -163
- package/dist/chunk-QMOFQX7X.js +0 -612
- package/dist/chunk-YJWTKFWX.js +0 -451
- package/dist/github-SHWUFNYB.js +0 -10
- package/dist/sync-KDSPGY4A.js +0 -18
|
@@ -35,6 +35,16 @@ function fileRepoDir() {
|
|
|
35
35
|
mkdirSync(dir, { recursive: true });
|
|
36
36
|
return dir;
|
|
37
37
|
}
|
|
38
|
+
function bareReposDir() {
|
|
39
|
+
const dir = join(joworkDir(), "data", "repos");
|
|
40
|
+
mkdirSync(dir, { recursive: true });
|
|
41
|
+
return dir;
|
|
42
|
+
}
|
|
43
|
+
function pluginsDir() {
|
|
44
|
+
const dir = join(joworkDir(), "plugins");
|
|
45
|
+
mkdirSync(dir, { recursive: true });
|
|
46
|
+
return dir;
|
|
47
|
+
}
|
|
38
48
|
|
|
39
49
|
export {
|
|
40
50
|
joworkDir,
|
|
@@ -43,5 +53,7 @@ export {
|
|
|
43
53
|
credentialsDir,
|
|
44
54
|
configPath,
|
|
45
55
|
logsDir,
|
|
46
|
-
fileRepoDir
|
|
56
|
+
fileRepoDir,
|
|
57
|
+
bareReposDir,
|
|
58
|
+
pluginsDir
|
|
47
59
|
};
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import {
|
|
2
|
+
GitManager
|
|
3
|
+
} from "./chunk-EYP6WMFF.js";
|
|
4
|
+
import {
|
|
5
|
+
formatIssue,
|
|
6
|
+
formatPullRequest
|
|
7
|
+
} from "./chunk-RO3KK5RC.js";
|
|
8
|
+
import {
|
|
9
|
+
logError,
|
|
10
|
+
logInfo
|
|
11
|
+
} from "./chunk-MYDK7MWB.js";
|
|
12
|
+
|
|
13
|
+
// src/sync/github.ts
|
|
14
|
+
var defaultLogger = {
|
|
15
|
+
info: (msg, ctx) => logInfo("sync", msg, ctx),
|
|
16
|
+
warn: (msg, ctx) => logError("sync", msg, ctx),
|
|
17
|
+
error: (msg, ctx) => logError("sync", msg, ctx)
|
|
18
|
+
};
|
|
19
|
+
var GITHUB_API = "https://api.github.com";
|
|
20
|
+
var RATE_LIMIT_DELAY_MS = 200;
|
|
21
|
+
function parseNextLink(linkHeader) {
|
|
22
|
+
if (!linkHeader) return null;
|
|
23
|
+
const match = linkHeader.match(/<([^>]+)>;\s*rel="next"/);
|
|
24
|
+
return match?.[1] ?? null;
|
|
25
|
+
}
|
|
26
|
+
async function fetchAllPages(url, headers, logger) {
|
|
27
|
+
const results = [];
|
|
28
|
+
let nextUrl = url;
|
|
29
|
+
while (nextUrl) {
|
|
30
|
+
const res = await fetch(nextUrl, { headers });
|
|
31
|
+
if (!res.ok) {
|
|
32
|
+
if (res.status === 403) {
|
|
33
|
+
const resetAt = res.headers.get("x-ratelimit-reset");
|
|
34
|
+
if (resetAt) {
|
|
35
|
+
const waitMs = Math.max(0, parseInt(resetAt) * 1e3 - Date.now()) + 1e3;
|
|
36
|
+
if (waitMs < 6e4) {
|
|
37
|
+
logger.warn(`Rate limited, waiting ${Math.ceil(waitMs / 1e3)}s`);
|
|
38
|
+
await new Promise((r) => setTimeout(r, waitMs));
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
const data = await res.json();
|
|
46
|
+
results.push(...data);
|
|
47
|
+
nextUrl = parseNextLink(res.headers.get("link"));
|
|
48
|
+
if (nextUrl) {
|
|
49
|
+
await new Promise((r) => setTimeout(r, RATE_LIMIT_DELAY_MS));
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return results;
|
|
53
|
+
}
|
|
54
|
+
async function syncGitHub(ctx, data, logger = defaultLogger) {
|
|
55
|
+
const token = data.token;
|
|
56
|
+
if (!token) throw new Error("Missing GitHub token");
|
|
57
|
+
const headers = {
|
|
58
|
+
Authorization: `Bearer ${token}`,
|
|
59
|
+
Accept: "application/vnd.github.v3+json",
|
|
60
|
+
"User-Agent": "jowork/0.1.0"
|
|
61
|
+
};
|
|
62
|
+
let repos = 0, issues = 0, prs = 0, newObjects = 0, updatedObjects = 0, clonedRepos = 0;
|
|
63
|
+
const repoList = await fetchAllPages(
|
|
64
|
+
`${GITHUB_API}/user/repos?per_page=100&sort=pushed&affiliation=owner,collaborator`,
|
|
65
|
+
headers,
|
|
66
|
+
logger
|
|
67
|
+
);
|
|
68
|
+
repos = repoList.length;
|
|
69
|
+
logger.info(`Found ${repos} repos`);
|
|
70
|
+
const ownRepos = repoList.filter((r) => !r.fork);
|
|
71
|
+
const cloneTargets = ownRepos.map((r) => ({
|
|
72
|
+
url: r.html_url.replace(/\/$/, "") + ".git",
|
|
73
|
+
source: "github",
|
|
74
|
+
name: r.full_name.replace("/", "--"),
|
|
75
|
+
token
|
|
76
|
+
}));
|
|
77
|
+
try {
|
|
78
|
+
const cloneResults = await GitManager.syncBareRepos(cloneTargets);
|
|
79
|
+
clonedRepos = cloneResults.filter((r) => r.path && r.isNew).length;
|
|
80
|
+
const fetchedRepos = cloneResults.filter((r) => r.path && !r.isNew).length;
|
|
81
|
+
if (clonedRepos > 0 || fetchedRepos > 0) {
|
|
82
|
+
logger.info(`Git repos: ${clonedRepos} cloned, ${fetchedRepos} fetched`);
|
|
83
|
+
}
|
|
84
|
+
} catch (err) {
|
|
85
|
+
logger.warn(`Bare repo sync error (non-fatal): ${err}`);
|
|
86
|
+
}
|
|
87
|
+
const since = ctx.getUpdatedSince("github:issues");
|
|
88
|
+
for (const repo of repoList) {
|
|
89
|
+
try {
|
|
90
|
+
let issueUrl = `${GITHUB_API}/repos/${repo.full_name}/issues?state=all&per_page=100&sort=updated&direction=asc`;
|
|
91
|
+
if (since) {
|
|
92
|
+
issueUrl += `&since=${since}`;
|
|
93
|
+
}
|
|
94
|
+
const issueList = await fetchAllPages(issueUrl, headers, logger);
|
|
95
|
+
const items = [];
|
|
96
|
+
for (const item of issueList) {
|
|
97
|
+
const isPR = !!item.pull_request;
|
|
98
|
+
const sourceType = isPR ? "pull_request" : "issue";
|
|
99
|
+
const uri = `github://${repo.full_name}/${sourceType}/${item.number}`;
|
|
100
|
+
const title = `${repo.full_name}#${item.number}: ${item.title}`;
|
|
101
|
+
const body = formatIssueBody(item, repo.full_name);
|
|
102
|
+
const labelNames = item.labels.map((l) => l.name);
|
|
103
|
+
const formatter = isPR ? formatPullRequest : formatIssue;
|
|
104
|
+
const fileContent = formatter({
|
|
105
|
+
source: "github",
|
|
106
|
+
repo: repo.full_name,
|
|
107
|
+
number: item.number,
|
|
108
|
+
title: item.title,
|
|
109
|
+
state: item.state,
|
|
110
|
+
author: item.user?.login ?? "unknown",
|
|
111
|
+
labels: labelNames,
|
|
112
|
+
created: item.created_at,
|
|
113
|
+
uri,
|
|
114
|
+
body: item.body ?? ""
|
|
115
|
+
});
|
|
116
|
+
items.push({
|
|
117
|
+
source: "github",
|
|
118
|
+
sourceType,
|
|
119
|
+
uri,
|
|
120
|
+
title,
|
|
121
|
+
summary: item.body ? item.body.length > 200 ? item.body.slice(0, 200) + "..." : item.body : item.title,
|
|
122
|
+
tags: ["github", sourceType, item.state, ...labelNames],
|
|
123
|
+
content: body,
|
|
124
|
+
contentType: "text/plain",
|
|
125
|
+
createdAt: new Date(item.created_at).getTime(),
|
|
126
|
+
fileContent,
|
|
127
|
+
fileMeta: { repo: repo.full_name, number: item.number }
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
const result = ctx.batchUpsert(items);
|
|
131
|
+
newObjects += result.inserted;
|
|
132
|
+
updatedObjects += result.updated;
|
|
133
|
+
issues += items.filter((i) => i.sourceType === "issue").length;
|
|
134
|
+
prs += items.filter((i) => i.sourceType === "pull_request").length;
|
|
135
|
+
} catch (err) {
|
|
136
|
+
logger.warn(`Error syncing ${repo.full_name}: ${err}`);
|
|
137
|
+
}
|
|
138
|
+
await new Promise((r) => setTimeout(r, RATE_LIMIT_DELAY_MS));
|
|
139
|
+
}
|
|
140
|
+
ctx.saveTimestampCursor("github:issues");
|
|
141
|
+
logger.info("GitHub sync complete", { repos, issues, prs, newObjects, updatedObjects, clonedRepos });
|
|
142
|
+
return { repos, issues, prs, newObjects, updatedObjects, clonedRepos };
|
|
143
|
+
}
|
|
144
|
+
function formatIssueBody(item, repo) {
|
|
145
|
+
return [
|
|
146
|
+
`${repo}#${item.number}: ${item.title}`,
|
|
147
|
+
`State: ${item.state} | Author: ${item.user?.login ?? "unknown"} | Created: ${item.created_at}`,
|
|
148
|
+
`Labels: ${item.labels.map((l) => l.name).join(", ") || "none"}`,
|
|
149
|
+
"",
|
|
150
|
+
item.body ?? "(no description)"
|
|
151
|
+
].join("\n");
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
export {
|
|
155
|
+
syncGitHub
|
|
156
|
+
};
|
|
@@ -216,6 +216,10 @@ var MIGRATIONS = [
|
|
|
216
216
|
`
|
|
217
217
|
ALTER TABLE objects ADD COLUMN file_path TEXT;
|
|
218
218
|
CREATE INDEX IF NOT EXISTS idx_objects_file_path ON objects(file_path);
|
|
219
|
+
`,
|
|
220
|
+
// 009 — Drop object_bodies table (content now lives on disk via FileWriter)
|
|
221
|
+
`
|
|
222
|
+
DROP TABLE IF EXISTS object_bodies;
|
|
219
223
|
`
|
|
220
224
|
];
|
|
221
225
|
var DbManager = class {
|