jowork 0.1.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-AIXKXEYS.js +547 -0
- package/dist/{chunk-3NMLDZBL.js → chunk-JE6TOU7W.js} +81 -213
- package/dist/chunk-JSTXMDXI.js +32 -0
- package/dist/chunk-L5ZR7TSK.js +82 -0
- package/dist/chunk-LS2AJM5A.js +163 -0
- package/dist/chunk-MYDK7MWB.js +31 -0
- package/dist/chunk-QMOFQX7X.js +612 -0
- package/dist/chunk-ROIINI33.js +47 -0
- package/dist/chunk-TN327MDF.js +189 -0
- package/dist/chunk-UJ4KEHGZ.js +18 -0
- package/dist/chunk-XAEGXSEO.js +320 -0
- package/dist/chunk-XLYRHKG6.js +50 -0
- package/dist/chunk-YJWTKFWX.js +451 -0
- package/dist/{chunk-S24PDC46.js → chunk-YVPWTH6F.js} +395 -108
- package/dist/cli.js +2175 -891
- package/dist/config-AI6UIJJN.js +10 -0
- package/dist/credential-store-ZRZCSRPC.js +14 -0
- package/dist/feishu-A6YVFKEN.js +19 -0
- package/dist/git-manager-N35XSG4Y.js +8 -0
- package/dist/github-SHWUFNYB.js +10 -0
- package/dist/paths-JXOMBYIT.js +19 -0
- package/dist/server-6WYDERK6.js +884 -0
- package/dist/setup-IDQDPCEJ.js +772 -0
- package/dist/sync-7V54N62M.js +18 -0
- package/dist/transport.js +8 -3
- package/package.json +31 -13
- package/src/dashboard/public/app.js +603 -0
- package/src/dashboard/public/index.html +18 -0
- package/src/dashboard/public/style.css +656 -0
- package/dist/src-WYAQWZZZ.js +0 -66
|
@@ -0,0 +1,547 @@
|
|
|
1
|
+
import {
|
|
2
|
+
linkAllUnprocessed,
|
|
3
|
+
syncGitLab,
|
|
4
|
+
syncLinear
|
|
5
|
+
} from "./chunk-YJWTKFWX.js";
|
|
6
|
+
import {
|
|
7
|
+
DbManager
|
|
8
|
+
} from "./chunk-XAEGXSEO.js";
|
|
9
|
+
import {
|
|
10
|
+
listCredentials,
|
|
11
|
+
loadCredential
|
|
12
|
+
} from "./chunk-XLYRHKG6.js";
|
|
13
|
+
import {
|
|
14
|
+
dbPath,
|
|
15
|
+
fileRepoDir
|
|
16
|
+
} from "./chunk-ROIINI33.js";
|
|
17
|
+
import {
|
|
18
|
+
syncGitHub
|
|
19
|
+
} from "./chunk-LS2AJM5A.js";
|
|
20
|
+
import {
|
|
21
|
+
contentHash,
|
|
22
|
+
formatAnalytics,
|
|
23
|
+
formatMessages,
|
|
24
|
+
syncFeishu,
|
|
25
|
+
syncFeishuApprovals,
|
|
26
|
+
syncFeishuDocs,
|
|
27
|
+
syncFeishuMeetings
|
|
28
|
+
} from "./chunk-QMOFQX7X.js";
|
|
29
|
+
import {
|
|
30
|
+
createId
|
|
31
|
+
} from "./chunk-JE6TOU7W.js";
|
|
32
|
+
import {
|
|
33
|
+
GitManager
|
|
34
|
+
} from "./chunk-L5ZR7TSK.js";
|
|
35
|
+
import {
|
|
36
|
+
logError,
|
|
37
|
+
logInfo
|
|
38
|
+
} from "./chunk-MYDK7MWB.js";
|
|
39
|
+
|
|
40
|
+
// src/commands/sync.ts
|
|
41
|
+
import { existsSync as existsSync2 } from "fs";
|
|
42
|
+
|
|
43
|
+
// src/sync/posthog.ts
|
|
44
|
+
var defaultLogger = {
|
|
45
|
+
info: (msg, ctx) => logInfo("sync", msg, ctx),
|
|
46
|
+
warn: (msg, ctx) => logError("sync", msg, ctx),
|
|
47
|
+
error: (msg, ctx) => logError("sync", msg, ctx)
|
|
48
|
+
};
|
|
49
|
+
async function syncPostHog(sqlite, data, logger = defaultLogger, fileWriter) {
|
|
50
|
+
const { apiKey, host, projectId: rawProjectId } = data;
|
|
51
|
+
if (!apiKey) throw new Error("Missing PostHog API key");
|
|
52
|
+
const baseUrl = host || "https://app.posthog.com";
|
|
53
|
+
const projectId = rawProjectId || "1";
|
|
54
|
+
const headers = {
|
|
55
|
+
Authorization: `Bearer ${apiKey}`,
|
|
56
|
+
"Content-Type": "application/json"
|
|
57
|
+
};
|
|
58
|
+
let events = 0, insights = 0, newObjects = 0;
|
|
59
|
+
const checkExists = sqlite.prepare("SELECT id FROM objects WHERE uri = ?");
|
|
60
|
+
const insertObj = sqlite.prepare(`
|
|
61
|
+
INSERT INTO objects (id, source, source_type, uri, title, summary, tags, content_hash, last_synced_at, created_at)
|
|
62
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
63
|
+
`);
|
|
64
|
+
const insertBody = sqlite.prepare(`
|
|
65
|
+
INSERT OR REPLACE INTO object_bodies (object_id, content, content_type, fetched_at)
|
|
66
|
+
VALUES (?, ?, ?, ?)
|
|
67
|
+
`);
|
|
68
|
+
const insertFts = sqlite.prepare(`
|
|
69
|
+
INSERT INTO objects_fts(rowid, title, summary, tags, source, source_type, body_excerpt)
|
|
70
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
71
|
+
`);
|
|
72
|
+
const getRowid = sqlite.prepare("SELECT rowid FROM objects WHERE id = ?");
|
|
73
|
+
try {
|
|
74
|
+
const insightsRes = await fetch(`${baseUrl}/api/projects/${projectId}/insights/?limit=50`, { headers });
|
|
75
|
+
if (insightsRes.ok) {
|
|
76
|
+
const insightsData = await insightsRes.json();
|
|
77
|
+
const batch = sqlite.transaction((items) => {
|
|
78
|
+
for (const insight of items) {
|
|
79
|
+
const uri = `posthog://insight/${insight.id}`;
|
|
80
|
+
if (checkExists.get(uri)) continue;
|
|
81
|
+
const now = Date.now();
|
|
82
|
+
const id = createId("obj");
|
|
83
|
+
const summary = insight.description || `Insight: ${insight.name}`;
|
|
84
|
+
const tags = JSON.stringify(["posthog", "insight", ...Object.keys(insight.filters).slice(0, 3)]);
|
|
85
|
+
const body = JSON.stringify({
|
|
86
|
+
name: insight.name,
|
|
87
|
+
description: insight.description,
|
|
88
|
+
filters: insight.filters,
|
|
89
|
+
lastRefresh: insight.last_refresh
|
|
90
|
+
}, null, 2);
|
|
91
|
+
insertObj.run(id, "posthog", "insight", uri, insight.name, summary, tags, contentHash(body), now, now);
|
|
92
|
+
insertBody.run(id, body, "application/json", now);
|
|
93
|
+
try {
|
|
94
|
+
const rowid = getRowid.get(id);
|
|
95
|
+
if (rowid) {
|
|
96
|
+
const excerpt = body.length > 500 ? body.slice(0, 500) : body;
|
|
97
|
+
insertFts.run(rowid.rowid, insight.name ?? "", summary ?? "", tags, "posthog", "insight", excerpt);
|
|
98
|
+
}
|
|
99
|
+
} catch {
|
|
100
|
+
}
|
|
101
|
+
if (fileWriter) {
|
|
102
|
+
try {
|
|
103
|
+
const fileContent = formatAnalytics({
|
|
104
|
+
name: insight.name,
|
|
105
|
+
description: insight.description,
|
|
106
|
+
filters: insight.filters,
|
|
107
|
+
lastRefresh: insight.last_refresh
|
|
108
|
+
});
|
|
109
|
+
const filePath = fileWriter.writeObject("posthog", "insight", {
|
|
110
|
+
id,
|
|
111
|
+
title: insight.name
|
|
112
|
+
}, fileContent);
|
|
113
|
+
sqlite.prepare("UPDATE objects SET file_path = ? WHERE id = ?").run(filePath, id);
|
|
114
|
+
} catch {
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
insights++;
|
|
118
|
+
newObjects++;
|
|
119
|
+
}
|
|
120
|
+
});
|
|
121
|
+
batch(insightsData.results ?? []);
|
|
122
|
+
logger.info(`Synced ${insights} insights`);
|
|
123
|
+
} else {
|
|
124
|
+
logger.warn(`Failed to fetch insights: ${insightsRes.status}`);
|
|
125
|
+
}
|
|
126
|
+
} catch (err) {
|
|
127
|
+
logger.error(`Insights sync error: ${err}`);
|
|
128
|
+
}
|
|
129
|
+
try {
|
|
130
|
+
const eventsRes = await fetch(`${baseUrl}/api/projects/${projectId}/event_definitions/?limit=100`, { headers });
|
|
131
|
+
if (eventsRes.ok) {
|
|
132
|
+
const eventsData = await eventsRes.json();
|
|
133
|
+
const batch = sqlite.transaction((items) => {
|
|
134
|
+
for (const event of items) {
|
|
135
|
+
const uri = `posthog://event/${event.name}`;
|
|
136
|
+
if (checkExists.get(uri)) continue;
|
|
137
|
+
const now = Date.now();
|
|
138
|
+
const id = createId("obj");
|
|
139
|
+
const summary = `${event.name}: ${event.description ?? "no description"} (30d volume: ${event.volume_30_day ?? "N/A"})`;
|
|
140
|
+
const tags = JSON.stringify(["posthog", "event_definition"]);
|
|
141
|
+
const body = JSON.stringify(event, null, 2);
|
|
142
|
+
insertObj.run(id, "posthog", "event_definition", uri, event.name, summary, tags, contentHash(body), now, now);
|
|
143
|
+
insertBody.run(id, body, "application/json", now);
|
|
144
|
+
try {
|
|
145
|
+
const rowid = getRowid.get(id);
|
|
146
|
+
if (rowid) {
|
|
147
|
+
const excerpt = body.length > 500 ? body.slice(0, 500) : body;
|
|
148
|
+
insertFts.run(rowid.rowid, event.name ?? "", summary ?? "", tags, "posthog", "event_definition", excerpt);
|
|
149
|
+
}
|
|
150
|
+
} catch {
|
|
151
|
+
}
|
|
152
|
+
if (fileWriter) {
|
|
153
|
+
try {
|
|
154
|
+
const fileContent = formatAnalytics(event);
|
|
155
|
+
const filePath = fileWriter.writeObject("posthog", "event_definition", {
|
|
156
|
+
id,
|
|
157
|
+
title: event.name
|
|
158
|
+
}, fileContent);
|
|
159
|
+
sqlite.prepare("UPDATE objects SET file_path = ? WHERE id = ?").run(filePath, id);
|
|
160
|
+
} catch {
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
events++;
|
|
164
|
+
newObjects++;
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
batch(eventsData.results ?? []);
|
|
168
|
+
logger.info(`Synced ${events} event definitions`);
|
|
169
|
+
}
|
|
170
|
+
} catch (err) {
|
|
171
|
+
logger.error(`Events sync error: ${err}`);
|
|
172
|
+
}
|
|
173
|
+
logger.info("PostHog sync complete", { events, insights, newObjects });
|
|
174
|
+
return { events, insights, newObjects };
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// src/sync/firebase.ts
|
|
178
|
+
var defaultLogger2 = {
|
|
179
|
+
info: (msg, ctx) => logInfo("sync", msg, ctx),
|
|
180
|
+
warn: (msg, ctx) => logError("sync", msg, ctx),
|
|
181
|
+
error: (msg, ctx) => logError("sync", msg, ctx)
|
|
182
|
+
};
|
|
183
|
+
async function syncFirebase(sqlite, data, logger = defaultLogger2, fileWriter) {
|
|
184
|
+
const { projectId, apiKey } = data;
|
|
185
|
+
if (!projectId) throw new Error("Missing Firebase project ID");
|
|
186
|
+
let events = 0, newObjects = 0;
|
|
187
|
+
const checkExists = sqlite.prepare("SELECT id FROM objects WHERE uri = ?");
|
|
188
|
+
const insertObj = sqlite.prepare(`
|
|
189
|
+
INSERT INTO objects (id, source, source_type, uri, title, summary, tags, content_hash, last_synced_at, created_at)
|
|
190
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
191
|
+
`);
|
|
192
|
+
const insertBody = sqlite.prepare(`
|
|
193
|
+
INSERT OR REPLACE INTO object_bodies (object_id, content, content_type, fetched_at)
|
|
194
|
+
VALUES (?, ?, ?, ?)
|
|
195
|
+
`);
|
|
196
|
+
const insertFts = sqlite.prepare(`
|
|
197
|
+
INSERT INTO objects_fts(rowid, title, summary, tags, source, source_type, body_excerpt)
|
|
198
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
199
|
+
`);
|
|
200
|
+
const getRowid = sqlite.prepare("SELECT rowid FROM objects WHERE id = ?");
|
|
201
|
+
if (apiKey) {
|
|
202
|
+
try {
|
|
203
|
+
const propertyId = data.propertyId ?? projectId;
|
|
204
|
+
const res = await fetch(
|
|
205
|
+
`https://analyticsdata.googleapis.com/v1beta/properties/${propertyId}:runReport?key=${apiKey}`,
|
|
206
|
+
{
|
|
207
|
+
method: "POST",
|
|
208
|
+
headers: { "Content-Type": "application/json" },
|
|
209
|
+
body: JSON.stringify({
|
|
210
|
+
dateRanges: [{ startDate: "7daysAgo", endDate: "today" }],
|
|
211
|
+
dimensions: [{ name: "eventName" }],
|
|
212
|
+
metrics: [{ name: "eventCount" }],
|
|
213
|
+
limit: 50
|
|
214
|
+
})
|
|
215
|
+
}
|
|
216
|
+
);
|
|
217
|
+
if (res.ok) {
|
|
218
|
+
const report = await res.json();
|
|
219
|
+
const batch = sqlite.transaction((rows) => {
|
|
220
|
+
for (const row of rows) {
|
|
221
|
+
const eventName = row.dimensionValues[0]?.value ?? "unknown";
|
|
222
|
+
const eventCount = parseInt(row.metricValues[0]?.value ?? "0");
|
|
223
|
+
const uri = `firebase://${projectId}/event/${eventName}`;
|
|
224
|
+
if (checkExists.get(uri)) continue;
|
|
225
|
+
const nowMs = Date.now();
|
|
226
|
+
const id = createId("obj");
|
|
227
|
+
const summary = `${eventName}: ${eventCount} events (last 7 days)`;
|
|
228
|
+
const tags = JSON.stringify(["firebase", "analytics", "event"]);
|
|
229
|
+
const body = JSON.stringify({ eventName, eventCount, period: "7daysAgo..today" }, null, 2);
|
|
230
|
+
insertObj.run(id, "firebase", "analytics_event", uri, eventName, summary, tags, contentHash(body), nowMs, nowMs);
|
|
231
|
+
insertBody.run(id, body, "application/json", nowMs);
|
|
232
|
+
try {
|
|
233
|
+
const rowid = getRowid.get(id);
|
|
234
|
+
if (rowid) {
|
|
235
|
+
insertFts.run(rowid.rowid, eventName ?? "", summary ?? "", tags, "firebase", "analytics_event", body.length > 500 ? body.slice(0, 500) : body);
|
|
236
|
+
}
|
|
237
|
+
} catch {
|
|
238
|
+
}
|
|
239
|
+
if (fileWriter) {
|
|
240
|
+
try {
|
|
241
|
+
const fileContent = formatAnalytics({ eventName, eventCount, period: "7daysAgo..today" });
|
|
242
|
+
const filePath = fileWriter.writeObject("firebase", "analytics_event", {
|
|
243
|
+
id,
|
|
244
|
+
title: eventName
|
|
245
|
+
}, fileContent);
|
|
246
|
+
sqlite.prepare("UPDATE objects SET file_path = ? WHERE id = ?").run(filePath, id);
|
|
247
|
+
} catch {
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
events++;
|
|
251
|
+
newObjects++;
|
|
252
|
+
}
|
|
253
|
+
});
|
|
254
|
+
batch(report.rows ?? []);
|
|
255
|
+
} else {
|
|
256
|
+
logger.warn(`Firebase Analytics API: ${res.status}`);
|
|
257
|
+
}
|
|
258
|
+
} catch (err) {
|
|
259
|
+
logger.error(`Firebase sync error: ${err}`);
|
|
260
|
+
}
|
|
261
|
+
} else {
|
|
262
|
+
logger.warn("Firebase sync requires apiKey. Provide via jowork connect firebase --api-key <key>");
|
|
263
|
+
}
|
|
264
|
+
logger.info("Firebase sync complete", { events, newObjects });
|
|
265
|
+
return { events, newObjects };
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// src/sync/file-writer.ts
|
|
269
|
+
import { writeFileSync, readFileSync, appendFileSync, existsSync, mkdirSync, chmodSync } from "fs";
|
|
270
|
+
import { join, dirname } from "path";
|
|
271
|
+
|
|
272
|
+
// src/utils/slugify.ts
|
|
273
|
+
function slugify(name) {
|
|
274
|
+
return name.replace(/[\/\\:*?"<>|]/g, "-").replace(/\s+/g, "-").replace(/--+/g, "-").replace(/^-|-$/g, "").slice(0, 100) || "untitled";
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
// src/sync/sanitizer.ts
|
|
278
|
+
var SENSITIVE_PATTERNS = [
|
|
279
|
+
/Bearer [A-Za-z0-9\-._~+/]+=*/g,
|
|
280
|
+
/[A-Za-z0-9+/]{40,}/g,
|
|
281
|
+
/sk-[a-zA-Z0-9]{20,}/g,
|
|
282
|
+
/ghp_[a-zA-Z0-9]{36}/g,
|
|
283
|
+
/xoxb-[0-9]{10,}-[a-zA-Z0-9]{20,}/g,
|
|
284
|
+
/glpat-[a-zA-Z0-9\-]{20,}/g,
|
|
285
|
+
/-----BEGIN (RSA |EC )?PRIVATE KEY-----[\s\S]*?-----END/g
|
|
286
|
+
];
|
|
287
|
+
function sanitizeContent(content) {
|
|
288
|
+
let result = content;
|
|
289
|
+
for (const pattern of SENSITIVE_PATTERNS) {
|
|
290
|
+
result = result.replace(pattern, "[REDACTED]");
|
|
291
|
+
}
|
|
292
|
+
return result;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// src/sync/file-writer.ts
|
|
296
|
+
var FileWriter = class {
|
|
297
|
+
repoDir;
|
|
298
|
+
constructor(repoDir) {
|
|
299
|
+
this.repoDir = repoDir ?? fileRepoDir();
|
|
300
|
+
try {
|
|
301
|
+
chmodSync(this.repoDir, 448);
|
|
302
|
+
} catch {
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
/** Write an object to a file. Returns the relative path from repoDir. */
|
|
306
|
+
writeObject(source, sourceType, meta, content) {
|
|
307
|
+
const filePath = this.getFilePath(source, sourceType, meta);
|
|
308
|
+
const absPath = join(this.repoDir, filePath);
|
|
309
|
+
mkdirSync(dirname(absPath), { recursive: true });
|
|
310
|
+
writeFileSync(absPath, sanitizeContent(content));
|
|
311
|
+
return filePath;
|
|
312
|
+
}
|
|
313
|
+
/** Append messages to a day file (for message-type sources). */
|
|
314
|
+
appendMessages(source, chatName, chatId, date, messages) {
|
|
315
|
+
const dir = join(source, "messages", slugify(chatName));
|
|
316
|
+
const filePath = join(dir, `${date}.md`);
|
|
317
|
+
const absPath = join(this.repoDir, filePath);
|
|
318
|
+
mkdirSync(dirname(absPath), { recursive: true });
|
|
319
|
+
let existingHeaders = [];
|
|
320
|
+
if (existsSync(absPath)) {
|
|
321
|
+
const existing = readFileSync(absPath, "utf-8");
|
|
322
|
+
existingHeaders = [...existing.matchAll(/^## .+/gm)].map((m) => m[0]);
|
|
323
|
+
}
|
|
324
|
+
const newMessages = messages.filter((m) => {
|
|
325
|
+
const header = `## ${m.time} \u2014 ${m.sender}`;
|
|
326
|
+
return !existingHeaders.includes(header);
|
|
327
|
+
});
|
|
328
|
+
if (newMessages.length === 0) return filePath;
|
|
329
|
+
if (!existsSync(absPath)) {
|
|
330
|
+
writeFileSync(absPath, formatMessages(chatName, chatId, date, newMessages));
|
|
331
|
+
} else {
|
|
332
|
+
const appendText = newMessages.map((m) => `
|
|
333
|
+
## ${m.time} \u2014 ${m.sender}
|
|
334
|
+
${sanitizeContent(m.content)}`).join("\n");
|
|
335
|
+
appendFileSync(absPath, appendText + "\n");
|
|
336
|
+
}
|
|
337
|
+
return filePath;
|
|
338
|
+
}
|
|
339
|
+
/** Calculate file path for an object based on source + type. */
|
|
340
|
+
getFilePath(source, sourceType, meta) {
|
|
341
|
+
switch (source) {
|
|
342
|
+
case "github":
|
|
343
|
+
case "gitlab": {
|
|
344
|
+
const repo = slugify(meta.repo ?? "unknown");
|
|
345
|
+
const typeDir = sourceType === "pull_request" ? "pulls" : sourceType === "merge_request" ? "merge-requests" : "issues";
|
|
346
|
+
return join(source, repo, typeDir, `${meta.number ?? meta.id}.md`);
|
|
347
|
+
}
|
|
348
|
+
case "linear":
|
|
349
|
+
return join("linear", "issues", `${meta.identifier ?? meta.id}.md`);
|
|
350
|
+
case "feishu": {
|
|
351
|
+
if (sourceType === "calendar_event")
|
|
352
|
+
return join("feishu", "meetings", `${meta.date}-${slugify(meta.title ?? "event")}.md`);
|
|
353
|
+
if (sourceType === "approval")
|
|
354
|
+
return join("feishu", "approvals", `${slugify(meta.title ?? "approval")}-${meta.id}.md`);
|
|
355
|
+
if (sourceType === "document")
|
|
356
|
+
return join("feishu", "docs", `${slugify(meta.title ?? "doc")}.md`);
|
|
357
|
+
return join("feishu", "other", `${meta.id}.md`);
|
|
358
|
+
}
|
|
359
|
+
case "posthog":
|
|
360
|
+
return join(
|
|
361
|
+
"posthog",
|
|
362
|
+
sourceType === "insight" ? "insights" : "events",
|
|
363
|
+
`${slugify(meta.title ?? meta.id)}.json`
|
|
364
|
+
);
|
|
365
|
+
case "firebase":
|
|
366
|
+
return join(
|
|
367
|
+
"firebase",
|
|
368
|
+
"analytics",
|
|
369
|
+
`${slugify(meta.title ?? meta.id)}.json`
|
|
370
|
+
);
|
|
371
|
+
case "notion":
|
|
372
|
+
return join("notion", "pages", `${slugify(meta.title ?? meta.id)}.md`);
|
|
373
|
+
case "jira":
|
|
374
|
+
return join("jira", slugify(meta.project ?? "unknown"), `${meta.identifier ?? meta.id}.md`);
|
|
375
|
+
case "sentry":
|
|
376
|
+
return join("sentry", "issues", `${meta.id}.json`);
|
|
377
|
+
default:
|
|
378
|
+
return join(source, `${meta.id}.md`);
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
get rootDir() {
|
|
382
|
+
return this.repoDir;
|
|
383
|
+
}
|
|
384
|
+
};
|
|
385
|
+
|
|
386
|
+
// src/commands/sync.ts
|
|
387
|
+
async function runSync(sources) {
|
|
388
|
+
const db = new DbManager(dbPath());
|
|
389
|
+
db.ensureTables();
|
|
390
|
+
const fileWriter = new FileWriter();
|
|
391
|
+
const syncResults = [];
|
|
392
|
+
let gitManager = null;
|
|
393
|
+
try {
|
|
394
|
+
gitManager = new GitManager(fileRepoDir());
|
|
395
|
+
await gitManager.init();
|
|
396
|
+
} catch {
|
|
397
|
+
}
|
|
398
|
+
for (const source of sources) {
|
|
399
|
+
const cred = loadCredential(source);
|
|
400
|
+
if (!cred) {
|
|
401
|
+
console.log(`\u2298 ${source}: no credentials found, skipping`);
|
|
402
|
+
continue;
|
|
403
|
+
}
|
|
404
|
+
console.log(`Syncing ${source}...`);
|
|
405
|
+
try {
|
|
406
|
+
switch (source) {
|
|
407
|
+
case "feishu": {
|
|
408
|
+
const logger = {
|
|
409
|
+
info: (msg) => console.log(` ${msg}`),
|
|
410
|
+
warn: (msg) => console.log(` \u26A0 ${msg}`),
|
|
411
|
+
error: (msg) => console.error(` \u2717 ${msg}`)
|
|
412
|
+
};
|
|
413
|
+
const result = await syncFeishu(db.getSqlite(), cred.data, logger, fileWriter);
|
|
414
|
+
console.log(` \u2713 Synced ${result.totalMessages} messages (${result.newMessages} new) from ${result.chats} chats`);
|
|
415
|
+
syncResults.push({ source: "feishu", newObjects: result.newMessages, label: "messages" });
|
|
416
|
+
try {
|
|
417
|
+
const meetResult = await syncFeishuMeetings(db.getSqlite(), cred.data, logger, fileWriter);
|
|
418
|
+
if (meetResult.meetings > 0) {
|
|
419
|
+
console.log(` \u2713 Synced ${meetResult.meetings} calendar events (${meetResult.newObjects} new)`);
|
|
420
|
+
syncResults.push({ source: "feishu/meetings", newObjects: meetResult.newObjects, label: "events" });
|
|
421
|
+
}
|
|
422
|
+
} catch (err) {
|
|
423
|
+
console.log(` \u26A0 Meeting sync: ${err}`);
|
|
424
|
+
}
|
|
425
|
+
try {
|
|
426
|
+
const docResult = await syncFeishuDocs(db.getSqlite(), cred.data, logger, fileWriter);
|
|
427
|
+
if (docResult.docs > 0) {
|
|
428
|
+
console.log(` \u2713 Synced ${docResult.docs} documents (${docResult.newObjects} new)`);
|
|
429
|
+
syncResults.push({ source: "feishu/docs", newObjects: docResult.newObjects, label: "docs" });
|
|
430
|
+
}
|
|
431
|
+
} catch (err) {
|
|
432
|
+
console.log(` \u26A0 Document sync: ${err}`);
|
|
433
|
+
}
|
|
434
|
+
try {
|
|
435
|
+
const approvalResult = await syncFeishuApprovals(db.getSqlite(), cred.data, logger, fileWriter);
|
|
436
|
+
if (approvalResult.approvals > 0) {
|
|
437
|
+
console.log(` \u2713 Synced ${approvalResult.approvals} approvals (${approvalResult.newObjects} new)`);
|
|
438
|
+
syncResults.push({ source: "feishu/approvals", newObjects: approvalResult.newObjects, label: "approvals" });
|
|
439
|
+
}
|
|
440
|
+
} catch (err) {
|
|
441
|
+
console.log(` \u26A0 Approval sync: ${err}`);
|
|
442
|
+
}
|
|
443
|
+
break;
|
|
444
|
+
}
|
|
445
|
+
case "github": {
|
|
446
|
+
const ghLogger = {
|
|
447
|
+
info: (msg) => console.log(` ${msg}`),
|
|
448
|
+
warn: (msg) => console.log(` \u26A0 ${msg}`),
|
|
449
|
+
error: (msg) => console.error(` \u2717 ${msg}`)
|
|
450
|
+
};
|
|
451
|
+
const result = await syncGitHub(db.getSqlite(), cred.data, ghLogger, fileWriter);
|
|
452
|
+
console.log(` \u2713 Synced ${result.repos} repos: ${result.issues} issues, ${result.prs} PRs (${result.newObjects} new)`);
|
|
453
|
+
syncResults.push({ source: "github", newObjects: result.newObjects });
|
|
454
|
+
break;
|
|
455
|
+
}
|
|
456
|
+
case "gitlab": {
|
|
457
|
+
const glLogger = {
|
|
458
|
+
info: (msg) => console.log(` ${msg}`),
|
|
459
|
+
warn: (msg) => console.log(` \u26A0 ${msg}`),
|
|
460
|
+
error: (msg) => console.error(` \u2717 ${msg}`)
|
|
461
|
+
};
|
|
462
|
+
const glResult = await syncGitLab(db.getSqlite(), cred.data, glLogger, fileWriter);
|
|
463
|
+
console.log(` \u2713 Synced ${glResult.projects} projects: ${glResult.issues} issues, ${glResult.mrs} MRs (${glResult.newObjects} new)`);
|
|
464
|
+
syncResults.push({ source: "gitlab", newObjects: glResult.newObjects });
|
|
465
|
+
break;
|
|
466
|
+
}
|
|
467
|
+
case "linear": {
|
|
468
|
+
const linLogger = {
|
|
469
|
+
info: (msg) => console.log(` ${msg}`),
|
|
470
|
+
warn: (msg) => console.log(` \u26A0 ${msg}`),
|
|
471
|
+
error: (msg) => console.error(` \u2717 ${msg}`)
|
|
472
|
+
};
|
|
473
|
+
const linResult = await syncLinear(db.getSqlite(), cred.data, linLogger, fileWriter);
|
|
474
|
+
console.log(` \u2713 Synced ${linResult.issues} Linear issues (${linResult.newObjects} new)`);
|
|
475
|
+
syncResults.push({ source: "linear", newObjects: linResult.newObjects, label: "issues" });
|
|
476
|
+
break;
|
|
477
|
+
}
|
|
478
|
+
case "posthog": {
|
|
479
|
+
const phLogger = {
|
|
480
|
+
info: (msg) => console.log(` ${msg}`),
|
|
481
|
+
warn: (msg) => console.log(` \u26A0 ${msg}`),
|
|
482
|
+
error: (msg) => console.error(` \u2717 ${msg}`)
|
|
483
|
+
};
|
|
484
|
+
const phResult = await syncPostHog(db.getSqlite(), cred.data, phLogger, fileWriter);
|
|
485
|
+
console.log(` \u2713 Synced ${phResult.insights} insights, ${phResult.events} events (${phResult.newObjects} new)`);
|
|
486
|
+
syncResults.push({ source: "posthog", newObjects: phResult.newObjects });
|
|
487
|
+
break;
|
|
488
|
+
}
|
|
489
|
+
case "firebase": {
|
|
490
|
+
const fbLogger = {
|
|
491
|
+
info: (msg) => console.log(` ${msg}`),
|
|
492
|
+
warn: (msg) => console.log(` \u26A0 ${msg}`),
|
|
493
|
+
error: (msg) => console.error(` \u2717 ${msg}`)
|
|
494
|
+
};
|
|
495
|
+
const fbResult = await syncFirebase(db.getSqlite(), cred.data, fbLogger, fileWriter);
|
|
496
|
+
console.log(` \u2713 Synced ${fbResult.events} Firebase events (${fbResult.newObjects} new)`);
|
|
497
|
+
syncResults.push({ source: "firebase", newObjects: fbResult.newObjects, label: "events" });
|
|
498
|
+
break;
|
|
499
|
+
}
|
|
500
|
+
default:
|
|
501
|
+
console.log(` Unknown source: ${source}`);
|
|
502
|
+
}
|
|
503
|
+
} catch (err) {
|
|
504
|
+
logError("sync", `Failed to sync ${source}`, { error: String(err) });
|
|
505
|
+
console.error(` \u2717 ${source} sync failed: ${err}`);
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
console.log("Running entity extraction...");
|
|
509
|
+
const { processed, linksCreated } = linkAllUnprocessed(db.getSqlite());
|
|
510
|
+
console.log(` \u2713 Extracted ${linksCreated} links from ${processed} objects`);
|
|
511
|
+
db.close();
|
|
512
|
+
if (gitManager) {
|
|
513
|
+
try {
|
|
514
|
+
const sha = await gitManager.commitSync({
|
|
515
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
516
|
+
sources: syncResults
|
|
517
|
+
});
|
|
518
|
+
if (sha) {
|
|
519
|
+
console.log(` \u2713 Committed: ${sha.slice(0, 7)}`);
|
|
520
|
+
}
|
|
521
|
+
} catch (err) {
|
|
522
|
+
logError("sync", "Git commit failed", { error: String(err) });
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
function syncCommand(program) {
|
|
527
|
+
program.command("sync").description("Sync data from connected sources").option("--source <source>", "Sync specific source only").action(async (opts) => {
|
|
528
|
+
if (!existsSync2(dbPath())) {
|
|
529
|
+
console.error("Error: JoWork not initialized. Run `jowork init` first.");
|
|
530
|
+
process.exit(1);
|
|
531
|
+
}
|
|
532
|
+
const sources = opts.source ? [opts.source] : listCredentials();
|
|
533
|
+
if (sources.length === 0) {
|
|
534
|
+
console.log("No data sources connected. Run `jowork connect <source>` first.");
|
|
535
|
+
return;
|
|
536
|
+
}
|
|
537
|
+
await runSync(sources);
|
|
538
|
+
});
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
export {
|
|
542
|
+
syncPostHog,
|
|
543
|
+
syncFirebase,
|
|
544
|
+
FileWriter,
|
|
545
|
+
runSync,
|
|
546
|
+
syncCommand
|
|
547
|
+
};
|