@mukulaggarwal/pacman 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/README.md +39 -0
  2. package/dist/chunk-3QNXXON5.js +330 -0
  3. package/dist/chunk-3QNXXON5.js.map +1 -0
  4. package/dist/chunk-43PUZDIZ.js +148 -0
  5. package/dist/chunk-43PUZDIZ.js.map +1 -0
  6. package/dist/chunk-7D4SUZUM.js +38 -0
  7. package/dist/chunk-7D4SUZUM.js.map +1 -0
  8. package/dist/chunk-AYFIQNZ5.js +807 -0
  9. package/dist/chunk-AYFIQNZ5.js.map +1 -0
  10. package/dist/chunk-FH6ZHWGR.js +37 -0
  11. package/dist/chunk-FH6ZHWGR.js.map +1 -0
  12. package/dist/chunk-O6T35A4O.js +137 -0
  13. package/dist/chunk-O6T35A4O.js.map +1 -0
  14. package/dist/chunk-TRQIZP6Z.js +451 -0
  15. package/dist/chunk-TRQIZP6Z.js.map +1 -0
  16. package/dist/chunk-UWT6AFJB.js +471 -0
  17. package/dist/chunk-UWT6AFJB.js.map +1 -0
  18. package/dist/chunk-ZKKMIDRK.js +3923 -0
  19. package/dist/chunk-ZKKMIDRK.js.map +1 -0
  20. package/dist/daemon.d.ts +3 -0
  21. package/dist/daemon.js +141 -0
  22. package/dist/daemon.js.map +1 -0
  23. package/dist/dist-3PIJOFZ4.js +91 -0
  24. package/dist/dist-3PIJOFZ4.js.map +1 -0
  25. package/dist/dist-L76NGFFH.js +102 -0
  26. package/dist/dist-L76NGFFH.js.map +1 -0
  27. package/dist/dist-NV2YVVHI.js +178 -0
  28. package/dist/dist-NV2YVVHI.js.map +1 -0
  29. package/dist/dist-RMYCRZIU.js +41 -0
  30. package/dist/dist-RMYCRZIU.js.map +1 -0
  31. package/dist/dist-THLCZNOZ.js +14 -0
  32. package/dist/dist-THLCZNOZ.js.map +1 -0
  33. package/dist/dist-TWNHTXYH.js +95 -0
  34. package/dist/dist-TWNHTXYH.js.map +1 -0
  35. package/dist/index.d.ts +1 -0
  36. package/dist/index.js +452 -0
  37. package/dist/index.js.map +1 -0
  38. package/dist/mcp-compat.d.ts +1 -0
  39. package/dist/mcp-compat.js +78 -0
  40. package/dist/mcp-compat.js.map +1 -0
  41. package/dist/onboarding-server.d.ts +3 -0
  42. package/dist/onboarding-server.js +1172 -0
  43. package/dist/onboarding-server.js.map +1 -0
  44. package/dist/provider-runtime.d.ts +11 -0
  45. package/dist/provider-runtime.js +10 -0
  46. package/dist/provider-runtime.js.map +1 -0
  47. package/dist/slack-listener.d.ts +49 -0
  48. package/dist/slack-listener.js +888 -0
  49. package/dist/slack-listener.js.map +1 -0
  50. package/dist/storage.d.ts +8 -0
  51. package/dist/storage.js +9 -0
  52. package/dist/storage.js.map +1 -0
  53. package/package.json +75 -0
package/dist/daemon.js ADDED
@@ -0,0 +1,141 @@
1
+ import {
2
+ createIndexer
3
+ } from "./chunk-3QNXXON5.js";
4
+ import {
5
+ createNoopEventClient,
6
+ loadConnector
7
+ } from "./chunk-43PUZDIZ.js";
8
+ import {
9
+ createContextManager
10
+ } from "./chunk-UWT6AFJB.js";
11
+ import {
12
+ createConfigManager,
13
+ createGDriveStorage,
14
+ createLocalStorage
15
+ } from "./chunk-TRQIZP6Z.js";
16
+ import "./chunk-7D4SUZUM.js";
17
+
18
+ // src/daemon.ts
19
+ import * as path from "path";
20
+ async function startDaemon(workspacePath) {
21
+ const localStore = createLocalStorage(workspacePath);
22
+ const bootConfigManager = createConfigManager(localStore);
23
+ const eventClient = createNoopEventClient();
24
+ let config;
25
+ try {
26
+ config = await bootConfigManager.loadConfig();
27
+ } catch {
28
+ console.error("Error: No configuration found. Run `pacman init` first.");
29
+ process.exit(1);
30
+ }
31
+ let resolvedStorage = localStore;
32
+ if (config.storage.mode === "gdrive") {
33
+ try {
34
+ const gdriveConfig = config.storage;
35
+ const resolvedCachePath = path.isAbsolute(gdriveConfig.cachePath) ? gdriveConfig.cachePath : path.resolve(path.dirname(workspacePath), gdriveConfig.cachePath);
36
+ const gdriveStorage = createGDriveStorage({
37
+ ...gdriveConfig,
38
+ cachePath: resolvedCachePath
39
+ });
40
+ await gdriveStorage.initialize();
41
+ resolvedStorage = gdriveStorage;
42
+ console.log("Using Google Drive storage backend");
43
+ } catch (err) {
44
+ console.error("Warning: GDrive init failed, falling back to local storage:", err);
45
+ }
46
+ }
47
+ const configManager = createConfigManager(resolvedStorage);
48
+ console.log(`Daemon started for ${config.user.name} (${config.user.profileType})`);
49
+ console.log(`Sync schedule: ${config.sync.dailySyncTime} ${config.sync.timezone}`);
50
+ console.log(`Enabled integrations: ${config.integrations.filter((i) => i.enabled).map((i) => i.type).join(", ") || "none"}`);
51
+ const contextManager = createContextManager(resolvedStorage);
52
+ const indexer = createIndexer(resolvedStorage);
53
+ const syncInterval = parseSyncInterval(config.sync.dailySyncTime);
54
+ const runSync = async () => {
55
+ console.log(`[${(/* @__PURE__ */ new Date()).toISOString()}] Starting sync...`);
56
+ await eventClient.emit("integration_sync_started");
57
+ for (const integration of config.integrations.filter((i) => i.enabled)) {
58
+ try {
59
+ console.log(` Syncing ${integration.type}...`);
60
+ const connector = await loadConnector(integration);
61
+ await connector.authenticate(integration);
62
+ const delta = await connector.fetchDelta(integration.cursor ?? void 0);
63
+ if (delta.items.length > 0) {
64
+ const normalized = await connector.normalize(delta.items);
65
+ for (const item of normalized) {
66
+ const filePath = `context/raw/${integration.type}/${item.id}.md`;
67
+ const content = `# ${item.title}
68
+
69
+ ${item.body}
70
+
71
+ ---
72
+ Source: ${item.source}
73
+ Timestamp: ${item.timestamp}
74
+ `;
75
+ await resolvedStorage.write(filePath, content);
76
+ }
77
+ integration.cursor = delta.newCursor;
78
+ integration.lastSyncAt = (/* @__PURE__ */ new Date()).toISOString();
79
+ console.log(` ${integration.type}: ${delta.items.length} items synced`);
80
+ } else {
81
+ console.log(` ${integration.type}: no new items`);
82
+ }
83
+ } catch (err) {
84
+ console.error(` Error syncing ${integration.type}:`, err);
85
+ await eventClient.emit("error_occurred", {
86
+ integration: integration.type,
87
+ error: String(err)
88
+ });
89
+ }
90
+ }
91
+ await configManager.saveConfig(config);
92
+ const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
93
+ const summaryContent = await generateDailySummary(resolvedStorage, today);
94
+ await resolvedStorage.write(`context/derived/daily/${today}.md`, summaryContent);
95
+ await indexer.buildIndexes();
96
+ await eventClient.emit("integration_sync_completed");
97
+ console.log(`[${(/* @__PURE__ */ new Date()).toISOString()}] Sync complete.`);
98
+ };
99
+ await runSync();
100
+ console.log(`
101
+ Next sync in ${syncInterval}ms (${Math.round(syncInterval / 6e4)} minutes)`);
102
+ const interval = setInterval(runSync, syncInterval);
103
+ process.on("SIGINT", () => {
104
+ console.log("\nStopping daemon...");
105
+ clearInterval(interval);
106
+ process.exit(0);
107
+ });
108
+ await new Promise(() => {
109
+ });
110
+ }
111
+ function parseSyncInterval(syncTime) {
112
+ return 24 * 60 * 60 * 1e3;
113
+ }
114
+ async function generateDailySummary(storage, date) {
115
+ let summary = `# Daily Summary - ${date}
116
+
117
+ `;
118
+ summary += `Generated at: ${(/* @__PURE__ */ new Date()).toISOString()}
119
+
120
+ `;
121
+ const integrationTypes = ["slack", "gmail", "github", "gitlab", "gdrive", "gchat"];
122
+ for (const intType of integrationTypes) {
123
+ try {
124
+ const files = await storage.list(`context/raw/${intType}`);
125
+ if (files.length > 0) {
126
+ summary += `## ${intType.charAt(0).toUpperCase() + intType.slice(1)}
127
+
128
+ `;
129
+ summary += `- ${files.length} items
130
+
131
+ `;
132
+ }
133
+ } catch {
134
+ }
135
+ }
136
+ return summary;
137
+ }
138
+ export {
139
+ startDaemon
140
+ };
141
+ //# sourceMappingURL=daemon.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/daemon.ts"],"sourcesContent":["import * as path from 'node:path';\nimport { createLocalStorage } from '@personal-assistant/storage-local';\nimport { createGDriveStorage } from '@personal-assistant/storage-gdrive';\nimport { createConfigManager } from '@personal-assistant/config-manager';\nimport { createContextManager } from '@personal-assistant/context-manager';\nimport { createIndexer } from '@personal-assistant/indexer';\nimport { createNoopEventClient } from '@personal-assistant/event-client';\nimport { loadConnector } from '@personal-assistant/integration-runtime';\nimport type { StorageBackend, GDriveStorageConfig } from '@personal-assistant/core-types';\n\nexport async function startDaemon(workspacePath: string): Promise<void> {\n // Bootstrap: read config from local storage to determine which backend to use\n const localStore = createLocalStorage(workspacePath);\n const bootConfigManager = createConfigManager(localStore);\n const eventClient = createNoopEventClient();\n\n let config;\n try {\n config = await bootConfigManager.loadConfig();\n } catch {\n console.error('Error: No configuration found. Run `pacman init` first.');\n process.exit(1);\n }\n\n // Resolve storage backend — all operations use the user's chosen backend\n let resolvedStorage: StorageBackend = localStore;\n if (config.storage.mode === 'gdrive') {\n try {\n const gdriveConfig = config.storage as GDriveStorageConfig;\n const resolvedCachePath = path.isAbsolute(gdriveConfig.cachePath)\n ? gdriveConfig.cachePath\n : path.resolve(path.dirname(workspacePath), gdriveConfig.cachePath);\n\n const gdriveStorage = createGDriveStorage({\n ...gdriveConfig,\n cachePath: resolvedCachePath,\n });\n await gdriveStorage.initialize();\n resolvedStorage = gdriveStorage;\n console.log('Using Google Drive storage backend');\n } catch (err) {\n console.error('Warning: GDrive init failed, falling back to local storage:', err);\n }\n }\n\n const configManager = createConfigManager(resolvedStorage);\n\n console.log(`Daemon started for ${config.user.name} (${config.user.profileType})`);\n console.log(`Sync schedule: ${config.sync.dailySyncTime} ${config.sync.timezone}`);\n console.log(`Enabled integrations: ${config.integrations.filter((i) => i.enabled).map((i) => i.type).join(', ') || 'none'}`);\n\n const contextManager = createContextManager(resolvedStorage);\n const indexer = createIndexer(resolvedStorage);\n\n // Schedule sync using node-cron style interval\n const syncInterval = parseSyncInterval(config.sync.dailySyncTime);\n\n const runSync = async () => {\n console.log(`[${new Date().toISOString()}] Starting sync...`);\n await eventClient.emit('integration_sync_started');\n\n for (const integration of config!.integrations.filter((i) => i.enabled)) {\n try {\n console.log(` Syncing ${integration.type}...`);\n const connector = await loadConnector(integration);\n await connector.authenticate(integration);\n\n const delta = await connector.fetchDelta(integration.cursor ?? undefined);\n if (delta.items.length > 0) {\n const normalized = await connector.normalize(delta.items);\n\n // Write raw items\n for (const item of normalized) {\n const filePath = `context/raw/${integration.type}/${item.id}.md`;\n const content = `# ${item.title}\\n\\n${item.body}\\n\\n---\\nSource: ${item.source}\\nTimestamp: ${item.timestamp}\\n`;\n await resolvedStorage.write(filePath, content);\n }\n\n // Update cursor\n integration.cursor = delta.newCursor;\n integration.lastSyncAt = new Date().toISOString();\n console.log(` ${integration.type}: ${delta.items.length} items synced`);\n } else {\n console.log(` ${integration.type}: no new items`);\n }\n } catch (err) {\n console.error(` Error syncing ${integration.type}:`, err);\n await eventClient.emit('error_occurred', {\n integration: integration.type,\n error: String(err),\n });\n }\n }\n\n // Save updated cursors\n await configManager.saveConfig(config!);\n\n // Generate daily summary\n const today = new Date().toISOString().slice(0, 10);\n const summaryContent = await generateDailySummary(resolvedStorage, today);\n await resolvedStorage.write(`context/derived/daily/${today}.md`, summaryContent);\n\n // Rebuild indexes\n await indexer.buildIndexes();\n\n await eventClient.emit('integration_sync_completed');\n console.log(`[${new Date().toISOString()}] Sync complete.`);\n };\n\n // Run immediately\n await runSync();\n\n // Then run on interval\n console.log(`\\nNext sync in ${syncInterval}ms (${Math.round(syncInterval / 60000)} minutes)`);\n const interval = setInterval(runSync, syncInterval);\n\n process.on('SIGINT', () => {\n console.log('\\nStopping daemon...');\n clearInterval(interval);\n process.exit(0);\n });\n\n // Keep process alive\n await new Promise(() => {});\n}\n\nfunction parseSyncInterval(syncTime: string): number {\n // For the daemon, we run every 24 hours by default\n // In a real implementation, this would use node-cron with the actual time\n return 24 * 60 * 60 * 1000; // 24 hours\n}\n\nasync function generateDailySummary(\n storage: import('@personal-assistant/core-types').StorageBackend,\n date: string,\n): Promise<string> {\n let summary = `# Daily Summary - ${date}\\n\\n`;\n summary += `Generated at: ${new Date().toISOString()}\\n\\n`;\n\n // Aggregate raw items from today\n const integrationTypes = ['slack', 'gmail', 'github', 'gitlab', 'gdrive', 'gchat'];\n for (const intType of integrationTypes) {\n try {\n const files = await storage.list(`context/raw/${intType}`);\n if (files.length > 0) {\n summary += `## ${intType.charAt(0).toUpperCase() + intType.slice(1)}\\n\\n`;\n summary += `- ${files.length} items\\n\\n`;\n }\n } catch {\n // Directory doesn't exist\n }\n }\n\n return summary;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA,YAAY,UAAU;AAUtB,eAAsB,YAAY,eAAsC;AAEtE,QAAM,aAAa,mBAAmB,aAAa;AACnD,QAAM,oBAAoB,oBAAoB,UAAU;AACxD,QAAM,cAAc,sBAAsB;AAE1C,MAAI;AACJ,MAAI;AACF,aAAS,MAAM,kBAAkB,WAAW;AAAA,EAC9C,QAAQ;AACN,YAAQ,MAAM,yDAAyD;AACvE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,MAAI,kBAAkC;AACtC,MAAI,OAAO,QAAQ,SAAS,UAAU;AACpC,QAAI;AACF,YAAM,eAAe,OAAO;AAC5B,YAAM,oBAAyB,gBAAW,aAAa,SAAS,IAC5D,aAAa,YACR,aAAa,aAAQ,aAAa,GAAG,aAAa,SAAS;AAEpE,YAAM,gBAAgB,oBAAoB;AAAA,QACxC,GAAG;AAAA,QACH,WAAW;AAAA,MACb,CAAC;AACD,YAAM,cAAc,WAAW;AAC/B,wBAAkB;AAClB,cAAQ,IAAI,oCAAoC;AAAA,IAClD,SAAS,KAAK;AACZ,cAAQ,MAAM,+DAA+D,GAAG;AAAA,IAClF;AAAA,EACF;AAEA,QAAM,gBAAgB,oBAAoB,eAAe;AAEzD,UAAQ,IAAI,sBAAsB,OAAO,KAAK,IAAI,KAAK,OAAO,KAAK,WAAW,GAAG;AACjF,UAAQ,IAAI,kBAAkB,OAAO,KAAK,aAAa,IAAI,OAAO,KAAK,QAAQ,EAAE;AACjF,UAAQ,IAAI,yBAAyB,OAAO,aAAa,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,IAAI,KAAK,MAAM,EAAE;AAE3H,QAAM,iBAAiB,qBAAqB,eAAe;AAC3D,QAAM,UAAU,cAAc,eAAe;AAG7C,QAAM,eAAe,kBAAkB,OAAO,KAAK,aAAa;AAEhE,QAAM,UAAU,YAAY;AAC1B,YAAQ,IAAI,KAAI,oBAAI,KAAK,GAAE,YAAY,CAAC,oBAAoB;AAC5D,UAAM,YAAY,KAAK,0BAA0B;AAEjD,eAAW,eAAe,OAAQ,aAAa,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG;AACvE,UAAI;AACF,gBAAQ,IAAI,aAAa,YAAY,IAAI,KAAK;AAC9C,cAAM,YAAY,MAAM,cAAc,WAAW;AACjD,cAAM,UAAU,aAAa,WAAW;AAExC,cAAM,QAAQ,MAAM,UAAU,WAAW,YAAY,UAAU,MAAS;AACxE,YAAI,MAAM,MAAM,SAAS,GAAG;AAC1B,gBAAM,aAAa,MAAM,UAAU,UAAU,MAAM,KAAK;AAGxD,qBAAW,QAAQ,YAAY;AAC7B,kBAAM,WAAW,eAAe,YAAY,IAAI,IAAI,KAAK,EAAE;AAC3D,kBAAM,UAAU,KAAK,KAAK,KAAK;AAAA;AAAA,EAAO,KAAK,IAAI;AAAA;AAAA;AAAA,UAAoB,KAAK,MAAM;AAAA,aAAgB,KAAK,SAAS;AAAA;AAC5G,kBAAM,gBAAgB,MAAM,UAAU,OAAO;AAAA,UAC/C;AAGA,sBAAY,SAAS,MAAM;AAC3B,sBAAY,cAAa,oBAAI,KAAK,GAAE,YAAY;AAChD,kBAAQ,IAAI,KAAK,YAAY,IAAI,KAAK,MAAM,MAAM,MAAM,eAAe;AAAA,QACzE,OAAO;AACL,kBAAQ,IAAI,KAAK,YAAY,IAAI,gBAAgB;AAAA,QACnD;AAAA,MACF,SAAS,KAAK;AACZ,gBAAQ,MAAM,mBAAmB,YAAY,IAAI,KAAK,GAAG;AACzD,cAAM,YAAY,KAAK,kBAAkB;AAAA,UACvC,aAAa,YAAY;AAAA,UACzB,OAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,cAAc,WAAW,MAAO;AAGtC,UAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAClD,UAAM,iBAAiB,MAAM,qBAAqB,iBAAiB,KAAK;AACxE,UAAM,gBAAgB,MAAM,yBAAyB,KAAK,OAAO,cAAc;AAG/E,UAAM,QAAQ,aAAa;AAE3B,UAAM,YAAY,KAAK,4BAA4B;AACnD,YAAQ,IAAI,KAAI,oBAAI,KAAK,GAAE,YAAY,CAAC,kBAAkB;AAAA,EAC5D;AAGA,QAAM,QAAQ;AAGd,UAAQ,IAAI;AAAA,eAAkB,YAAY,OAAO,KAAK,MAAM,eAAe,GAAK,CAAC,WAAW;AAC5F,QAAM,WAAW,YAAY,SAAS,YAAY;AAElD,UAAQ,GAAG,UAAU,MAAM;AACzB,YAAQ,IAAI,sBAAsB;AAClC,kBAAc,QAAQ;AACtB,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AAGD,QAAM,IAAI,QAAQ,MAAM;AAAA,EAAC,CAAC;AAC5B;AAEA,SAAS,kBAAkB,UAA0B;AAGnD,SAAO,KAAK,KAAK,KAAK;AACxB;AAEA,eAAe,qBACb,SACA,MACiB;AACjB,MAAI,UAAU,qBAAqB,IAAI;AAAA;AAAA;AACvC,aAAW,kBAAiB,oBAAI,KAAK,GAAE,YAAY,CAAC;AAAA;AAAA;AAGpD,QAAM,mBAAmB,CAAC,SAAS,SAAS,UAAU,UAAU,UAAU,OAAO;AACjF,aAAW,WAAW,kBAAkB;AACtC,QAAI;AACF,YAAM,QAAQ,MAAM,QAAQ,KAAK,eAAe,OAAO,EAAE;AACzD,UAAI,MAAM,SAAS,GAAG;AACpB,mBAAW,MAAM,QAAQ,OAAO,CAAC,EAAE,YAAY,IAAI,QAAQ,MAAM,CAAC,CAAC;AAAA;AAAA;AACnE,mBAAW,KAAK,MAAM,MAAM;AAAA;AAAA;AAAA,MAC9B;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;","names":[]}
@@ -0,0 +1,91 @@
1
+ import "./chunk-7D4SUZUM.js";
2
+
3
+ // ../integrations-gdrive/dist/index.js
4
+ var GDriveDocsConnector = class {
5
+ type = "gdrive";
6
+ credentials;
7
+ async authenticate(config) {
8
+ this.credentials = config.credentials;
9
+ if (!this.credentials?.clientId || !this.credentials?.clientSecret || !this.credentials?.refreshToken) {
10
+ throw new Error("Google Drive client ID, client secret, and refresh token are required");
11
+ }
12
+ }
13
+ async healthCheck() {
14
+ if (!this.credentials?.clientId || !this.credentials?.clientSecret || !this.credentials?.refreshToken) {
15
+ throw new Error("Google Drive client ID, client secret, and refresh token are required");
16
+ }
17
+ const { google } = await import("googleapis");
18
+ const oauth2Client = new google.auth.OAuth2(
19
+ this.credentials.clientId,
20
+ this.credentials.clientSecret
21
+ );
22
+ oauth2Client.setCredentials({ refresh_token: this.credentials.refreshToken });
23
+ const drive = google.drive({ version: "v3", auth: oauth2Client });
24
+ const about = await drive.about.get({ fields: "user" });
25
+ return { summary: `Connected as ${about.data.user?.displayName ?? "unknown"}` };
26
+ }
27
+ async fetchDelta(cursor) {
28
+ if (!this.credentials) throw new Error("Not authenticated");
29
+ const { google } = await import("googleapis");
30
+ const oauth2Client = new google.auth.OAuth2(
31
+ this.credentials.clientId,
32
+ this.credentials.clientSecret
33
+ );
34
+ oauth2Client.setCredentials({ refresh_token: this.credentials.refreshToken });
35
+ const drive = google.drive({ version: "v3", auth: oauth2Client });
36
+ const modifiedAfter = cursor ?? new Date(Date.now() - 864e5).toISOString();
37
+ const res = await drive.files.list({
38
+ q: `modifiedTime > '${modifiedAfter}' and mimeType = 'application/vnd.google-apps.document' and trashed = false`,
39
+ fields: "files(id, name, modifiedTime, owners)",
40
+ pageSize: 50
41
+ });
42
+ const items = [];
43
+ for (const file of res.data.files ?? []) {
44
+ if (!file.id || !file.name) continue;
45
+ let content = "";
46
+ try {
47
+ const exportRes = await drive.files.export({
48
+ fileId: file.id,
49
+ mimeType: "text/plain"
50
+ });
51
+ content = String(exportRes.data).slice(0, 5e3);
52
+ } catch {
53
+ content = `[Document: ${file.name}]`;
54
+ }
55
+ items.push({
56
+ id: file.id,
57
+ source: "gdrive",
58
+ type: "document",
59
+ content,
60
+ metadata: {
61
+ name: file.name,
62
+ modifiedTime: file.modifiedTime ?? ""
63
+ },
64
+ timestamp: file.modifiedTime ?? (/* @__PURE__ */ new Date()).toISOString()
65
+ });
66
+ }
67
+ return {
68
+ items,
69
+ newCursor: (/* @__PURE__ */ new Date()).toISOString(),
70
+ hasMore: false
71
+ };
72
+ }
73
+ async normalize(rawItems) {
74
+ return rawItems.map((item) => ({
75
+ id: item.id,
76
+ source: "gdrive",
77
+ title: `Doc: ${item.metadata.name}`,
78
+ body: item.content.slice(0, 2e3),
79
+ entities: [],
80
+ timestamp: item.timestamp
81
+ }));
82
+ }
83
+ };
84
+ function createGDriveDocsConnector() {
85
+ return new GDriveDocsConnector();
86
+ }
87
+ export {
88
+ GDriveDocsConnector,
89
+ createGDriveDocsConnector
90
+ };
91
+ //# sourceMappingURL=dist-3PIJOFZ4.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../integrations-gdrive/dist/index.js"],"sourcesContent":["// src/index.ts\nvar GDriveDocsConnector = class {\n type = \"gdrive\";\n credentials;\n async authenticate(config) {\n this.credentials = config.credentials;\n if (!this.credentials?.clientId || !this.credentials?.clientSecret || !this.credentials?.refreshToken) {\n throw new Error(\"Google Drive client ID, client secret, and refresh token are required\");\n }\n }\n async healthCheck() {\n if (!this.credentials?.clientId || !this.credentials?.clientSecret || !this.credentials?.refreshToken) {\n throw new Error(\"Google Drive client ID, client secret, and refresh token are required\");\n }\n const { google } = await import(\"googleapis\");\n const oauth2Client = new google.auth.OAuth2(\n this.credentials.clientId,\n this.credentials.clientSecret\n );\n oauth2Client.setCredentials({ refresh_token: this.credentials.refreshToken });\n const drive = google.drive({ version: \"v3\", auth: oauth2Client });\n const about = await drive.about.get({ fields: \"user\" });\n return { summary: `Connected as ${about.data.user?.displayName ?? \"unknown\"}` };\n }\n async fetchDelta(cursor) {\n if (!this.credentials) throw new Error(\"Not authenticated\");\n const { google } = await import(\"googleapis\");\n const oauth2Client = new google.auth.OAuth2(\n this.credentials.clientId,\n this.credentials.clientSecret\n );\n oauth2Client.setCredentials({ refresh_token: this.credentials.refreshToken });\n const drive = google.drive({ version: \"v3\", auth: oauth2Client });\n const modifiedAfter = cursor ?? new Date(Date.now() - 864e5).toISOString();\n const res = await drive.files.list({\n q: `modifiedTime > '${modifiedAfter}' and mimeType = 'application/vnd.google-apps.document' and trashed = false`,\n fields: \"files(id, name, modifiedTime, owners)\",\n pageSize: 50\n });\n const items = [];\n for (const file of res.data.files ?? []) {\n if (!file.id || !file.name) continue;\n let content = \"\";\n try {\n const exportRes = await drive.files.export({\n fileId: file.id,\n mimeType: \"text/plain\"\n });\n content = String(exportRes.data).slice(0, 5e3);\n } catch {\n content = `[Document: ${file.name}]`;\n }\n items.push({\n id: file.id,\n source: \"gdrive\",\n type: \"document\",\n content,\n metadata: {\n name: file.name,\n modifiedTime: file.modifiedTime ?? \"\"\n },\n timestamp: file.modifiedTime ?? (/* @__PURE__ */ new Date()).toISOString()\n });\n }\n return {\n items,\n newCursor: (/* @__PURE__ */ new Date()).toISOString(),\n hasMore: false\n };\n }\n async normalize(rawItems) {\n return rawItems.map((item) => ({\n id: item.id,\n source: \"gdrive\",\n title: `Doc: ${item.metadata.name}`,\n body: item.content.slice(0, 2e3),\n entities: [],\n timestamp: item.timestamp\n }));\n }\n};\nfunction createGDriveDocsConnector() {\n return new GDriveDocsConnector();\n}\nexport {\n GDriveDocsConnector,\n createGDriveDocsConnector\n};\n"],"mappings":";;;AACA,IAAI,sBAAsB,MAAM;AAAA,EAC9B,OAAO;AAAA,EACP;AAAA,EACA,MAAM,aAAa,QAAQ;AACzB,SAAK,cAAc,OAAO;AAC1B,QAAI,CAAC,KAAK,aAAa,YAAY,CAAC,KAAK,aAAa,gBAAgB,CAAC,KAAK,aAAa,cAAc;AACrG,YAAM,IAAI,MAAM,uEAAuE;AAAA,IACzF;AAAA,EACF;AAAA,EACA,MAAM,cAAc;AAClB,QAAI,CAAC,KAAK,aAAa,YAAY,CAAC,KAAK,aAAa,gBAAgB,CAAC,KAAK,aAAa,cAAc;AACrG,YAAM,IAAI,MAAM,uEAAuE;AAAA,IACzF;AACA,UAAM,EAAE,OAAO,IAAI,MAAM,OAAO,YAAY;AAC5C,UAAM,eAAe,IAAI,OAAO,KAAK;AAAA,MACnC,KAAK,YAAY;AAAA,MACjB,KAAK,YAAY;AAAA,IACnB;AACA,iBAAa,eAAe,EAAE,eAAe,KAAK,YAAY,aAAa,CAAC;AAC5E,UAAM,QAAQ,OAAO,MAAM,EAAE,SAAS,MAAM,MAAM,aAAa,CAAC;AAChE,UAAM,QAAQ,MAAM,MAAM,MAAM,IAAI,EAAE,QAAQ,OAAO,CAAC;AACtD,WAAO,EAAE,SAAS,gBAAgB,MAAM,KAAK,MAAM,eAAe,SAAS,GAAG;AAAA,EAChF;AAAA,EACA,MAAM,WAAW,QAAQ;AACvB,QAAI,CAAC,KAAK,YAAa,OAAM,IAAI,MAAM,mBAAmB;AAC1D,UAAM,EAAE,OAAO,IAAI,MAAM,OAAO,YAAY;AAC5C,UAAM,eAAe,IAAI,OAAO,KAAK;AAAA,MACnC,KAAK,YAAY;AAAA,MACjB,KAAK,YAAY;AAAA,IACnB;AACA,iBAAa,eAAe,EAAE,eAAe,KAAK,YAAY,aAAa,CAAC;AAC5E,UAAM,QAAQ,OAAO,MAAM,EAAE,SAAS,MAAM,MAAM,aAAa,CAAC;AAChE,UAAM,gBAAgB,UAAU,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,EAAE,YAAY;AACzE,UAAM,MAAM,MAAM,MAAM,MAAM,KAAK;AAAA,MACjC,GAAG,mBAAmB,aAAa;AAAA,MACnC,QAAQ;AAAA,MACR,UAAU;AAAA,IACZ,CAAC;AACD,UAAM,QAAQ,CAAC;AACf,eAAW,QAAQ,IAAI,KAAK,SAAS,CAAC,GAAG;AACvC,UAAI,CAAC,KAAK,MAAM,CAAC,KAAK,KAAM;AAC5B,UAAI,UAAU;AACd,UAAI;AACF,cAAM,YAAY,MAAM,MAAM,MAAM,OAAO;AAAA,UACzC,QAAQ,KAAK;AAAA,UACb,UAAU;AAAA,QACZ,CAAC;AACD,kBAAU,OAAO,UAAU,IAAI,EAAE,MAAM,GAAG,GAAG;AAAA,MAC/C,QAAQ;AACN,kBAAU,cAAc,KAAK,IAAI;AAAA,MACnC;AACA,YAAM,KAAK;AAAA,QACT,IAAI,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN;AAAA,QACA,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,cAAc,KAAK,gBAAgB;AAAA,QACrC;AAAA,QACA,WAAW,KAAK,iBAAiC,oBAAI,KAAK,GAAG,YAAY;AAAA,MAC3E,CAAC;AAAA,IACH;AACA,WAAO;AAAA,MACL;AAAA,MACA,YAA4B,oBAAI,KAAK,GAAG,YAAY;AAAA,MACpD,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,UAAU,UAAU;AACxB,WAAO,SAAS,IAAI,CAAC,UAAU;AAAA,MAC7B,IAAI,KAAK;AAAA,MACT,QAAQ;AAAA,MACR,OAAO,QAAQ,KAAK,SAAS,IAAI;AAAA,MACjC,MAAM,KAAK,QAAQ,MAAM,GAAG,GAAG;AAAA,MAC/B,UAAU,CAAC;AAAA,MACX,WAAW,KAAK;AAAA,IAClB,EAAE;AAAA,EACJ;AACF;AACA,SAAS,4BAA4B;AACnC,SAAO,IAAI,oBAAoB;AACjC;","names":[]}
@@ -0,0 +1,102 @@
1
+ import "./chunk-7D4SUZUM.js";
2
+
3
+ // ../integrations-gitlab/dist/index.js
4
+ var GitLabConnector = class {
5
+ type = "gitlab";
6
+ token;
7
+ baseUrl = "https://gitlab.com/api/v4";
8
+ async authenticate(config) {
9
+ this.token = config.credentials?.token;
10
+ if (config.credentials?.baseUrl) {
11
+ this.baseUrl = config.credentials.baseUrl;
12
+ }
13
+ if (!this.token) {
14
+ throw new Error("GitLab personal access token is required");
15
+ }
16
+ }
17
+ async healthCheck() {
18
+ if (!this.token) throw new Error("GitLab personal access token is required");
19
+ const res = await fetch(`${this.baseUrl.replace(/\/$/, "")}/user`, {
20
+ headers: { "PRIVATE-TOKEN": this.token }
21
+ });
22
+ if (!res.ok) {
23
+ throw new Error("GitLab auth test failed: invalid token or base URL");
24
+ }
25
+ const data = await res.json();
26
+ return { summary: `Connected as ${data.username ?? "unknown"}` };
27
+ }
28
+ async fetchDelta(cursor) {
29
+ if (!this.token) throw new Error("Not authenticated");
30
+ const since = cursor ?? new Date(Date.now() - 864e5).toISOString();
31
+ const items = [];
32
+ const res = await fetch(
33
+ `${this.baseUrl}/events?after=${since.slice(0, 10)}&per_page=50`,
34
+ {
35
+ headers: { "PRIVATE-TOKEN": this.token }
36
+ }
37
+ );
38
+ if (res.ok) {
39
+ const events = await res.json();
40
+ for (const event of events) {
41
+ items.push({
42
+ id: String(event.id),
43
+ source: "gitlab",
44
+ type: event.target_type?.toLowerCase() ?? event.action_name,
45
+ content: `${event.action_name}: ${event.target_title ?? "no title"} (project ${event.project_id})`,
46
+ metadata: {
47
+ action: event.action_name,
48
+ targetType: event.target_type,
49
+ projectId: String(event.project_id)
50
+ },
51
+ timestamp: event.created_at
52
+ });
53
+ }
54
+ }
55
+ const mrRes = await fetch(
56
+ `${this.baseUrl}/merge_requests?scope=assigned_to_me&state=opened&per_page=20`,
57
+ {
58
+ headers: { "PRIVATE-TOKEN": this.token }
59
+ }
60
+ );
61
+ if (mrRes.ok) {
62
+ const mrs = await mrRes.json();
63
+ for (const mr of mrs) {
64
+ items.push({
65
+ id: `mr-${mr.id}`,
66
+ source: "gitlab",
67
+ type: "merge_request",
68
+ content: `MR !${mr.iid}: ${mr.title}`,
69
+ metadata: {
70
+ url: mr.web_url,
71
+ projectId: String(mr.project_id),
72
+ state: mr.state
73
+ },
74
+ timestamp: mr.updated_at
75
+ });
76
+ }
77
+ }
78
+ return {
79
+ items,
80
+ newCursor: (/* @__PURE__ */ new Date()).toISOString(),
81
+ hasMore: false
82
+ };
83
+ }
84
+ async normalize(rawItems) {
85
+ return rawItems.map((item) => ({
86
+ id: item.id,
87
+ source: "gitlab",
88
+ title: item.content,
89
+ body: item.content,
90
+ entities: [item.metadata.projectId].filter(Boolean),
91
+ timestamp: item.timestamp
92
+ }));
93
+ }
94
+ };
95
+ function createGitLabConnector() {
96
+ return new GitLabConnector();
97
+ }
98
+ export {
99
+ GitLabConnector,
100
+ createGitLabConnector
101
+ };
102
+ //# sourceMappingURL=dist-L76NGFFH.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../integrations-gitlab/dist/index.js"],"sourcesContent":["// src/index.ts\nvar GitLabConnector = class {\n type = \"gitlab\";\n token;\n baseUrl = \"https://gitlab.com/api/v4\";\n async authenticate(config) {\n this.token = config.credentials?.token;\n if (config.credentials?.baseUrl) {\n this.baseUrl = config.credentials.baseUrl;\n }\n if (!this.token) {\n throw new Error(\"GitLab personal access token is required\");\n }\n }\n async healthCheck() {\n if (!this.token) throw new Error(\"GitLab personal access token is required\");\n const res = await fetch(`${this.baseUrl.replace(/\\/$/, \"\")}/user`, {\n headers: { \"PRIVATE-TOKEN\": this.token }\n });\n if (!res.ok) {\n throw new Error(\"GitLab auth test failed: invalid token or base URL\");\n }\n const data = await res.json();\n return { summary: `Connected as ${data.username ?? \"unknown\"}` };\n }\n async fetchDelta(cursor) {\n if (!this.token) throw new Error(\"Not authenticated\");\n const since = cursor ?? new Date(Date.now() - 864e5).toISOString();\n const items = [];\n const res = await fetch(\n `${this.baseUrl}/events?after=${since.slice(0, 10)}&per_page=50`,\n {\n headers: { \"PRIVATE-TOKEN\": this.token }\n }\n );\n if (res.ok) {\n const events = await res.json();\n for (const event of events) {\n items.push({\n id: String(event.id),\n source: \"gitlab\",\n type: event.target_type?.toLowerCase() ?? event.action_name,\n content: `${event.action_name}: ${event.target_title ?? \"no title\"} (project ${event.project_id})`,\n metadata: {\n action: event.action_name,\n targetType: event.target_type,\n projectId: String(event.project_id)\n },\n timestamp: event.created_at\n });\n }\n }\n const mrRes = await fetch(\n `${this.baseUrl}/merge_requests?scope=assigned_to_me&state=opened&per_page=20`,\n {\n headers: { \"PRIVATE-TOKEN\": this.token }\n }\n );\n if (mrRes.ok) {\n const mrs = await mrRes.json();\n for (const mr of mrs) {\n items.push({\n id: `mr-${mr.id}`,\n source: \"gitlab\",\n type: \"merge_request\",\n content: `MR !${mr.iid}: ${mr.title}`,\n metadata: {\n url: mr.web_url,\n projectId: String(mr.project_id),\n state: mr.state\n },\n timestamp: mr.updated_at\n });\n }\n }\n return {\n items,\n newCursor: (/* @__PURE__ */ new Date()).toISOString(),\n hasMore: false\n };\n }\n async normalize(rawItems) {\n return rawItems.map((item) => ({\n id: item.id,\n source: \"gitlab\",\n title: item.content,\n body: item.content,\n entities: [item.metadata.projectId].filter(Boolean),\n timestamp: item.timestamp\n }));\n }\n};\nfunction createGitLabConnector() {\n return new GitLabConnector();\n}\nexport {\n GitLabConnector,\n createGitLabConnector\n};\n"],"mappings":";;;AACA,IAAI,kBAAkB,MAAM;AAAA,EAC1B,OAAO;AAAA,EACP;AAAA,EACA,UAAU;AAAA,EACV,MAAM,aAAa,QAAQ;AACzB,SAAK,QAAQ,OAAO,aAAa;AACjC,QAAI,OAAO,aAAa,SAAS;AAC/B,WAAK,UAAU,OAAO,YAAY;AAAA,IACpC;AACA,QAAI,CAAC,KAAK,OAAO;AACf,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAAA,EACF;AAAA,EACA,MAAM,cAAc;AAClB,QAAI,CAAC,KAAK,MAAO,OAAM,IAAI,MAAM,0CAA0C;AAC3E,UAAM,MAAM,MAAM,MAAM,GAAG,KAAK,QAAQ,QAAQ,OAAO,EAAE,CAAC,SAAS;AAAA,MACjE,SAAS,EAAE,iBAAiB,KAAK,MAAM;AAAA,IACzC,CAAC;AACD,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,IAAI,MAAM,oDAAoD;AAAA,IACtE;AACA,UAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,WAAO,EAAE,SAAS,gBAAgB,KAAK,YAAY,SAAS,GAAG;AAAA,EACjE;AAAA,EACA,MAAM,WAAW,QAAQ;AACvB,QAAI,CAAC,KAAK,MAAO,OAAM,IAAI,MAAM,mBAAmB;AACpD,UAAM,QAAQ,UAAU,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,EAAE,YAAY;AACjE,UAAM,QAAQ,CAAC;AACf,UAAM,MAAM,MAAM;AAAA,MAChB,GAAG,KAAK,OAAO,iBAAiB,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,MAClD;AAAA,QACE,SAAS,EAAE,iBAAiB,KAAK,MAAM;AAAA,MACzC;AAAA,IACF;AACA,QAAI,IAAI,IAAI;AACV,YAAM,SAAS,MAAM,IAAI,KAAK;AAC9B,iBAAW,SAAS,QAAQ;AAC1B,cAAM,KAAK;AAAA,UACT,IAAI,OAAO,MAAM,EAAE;AAAA,UACnB,QAAQ;AAAA,UACR,MAAM,MAAM,aAAa,YAAY,KAAK,MAAM;AAAA,UAChD,SAAS,GAAG,MAAM,WAAW,KAAK,MAAM,gBAAgB,UAAU,aAAa,MAAM,UAAU;AAAA,UAC/F,UAAU;AAAA,YACR,QAAQ,MAAM;AAAA,YACd,YAAY,MAAM;AAAA,YAClB,WAAW,OAAO,MAAM,UAAU;AAAA,UACpC;AAAA,UACA,WAAW,MAAM;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AACA,UAAM,QAAQ,MAAM;AAAA,MAClB,GAAG,KAAK,OAAO;AAAA,MACf;AAAA,QACE,SAAS,EAAE,iBAAiB,KAAK,MAAM;AAAA,MACzC;AAAA,IACF;AACA,QAAI,MAAM,IAAI;AACZ,YAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,iBAAW,MAAM,KAAK;AACpB,cAAM,KAAK;AAAA,UACT,IAAI,MAAM,GAAG,EAAE;AAAA,UACf,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,SAAS,OAAO,GAAG,GAAG,KAAK,GAAG,KAAK;AAAA,UACnC,UAAU;AAAA,YACR,KAAK,GAAG;AAAA,YACR,WAAW,OAAO,GAAG,UAAU;AAAA,YAC/B,OAAO,GAAG;AAAA,UACZ;AAAA,UACA,WAAW,GAAG;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,MACA,YAA4B,oBAAI,KAAK,GAAG,YAAY;AAAA,MACpD,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,UAAU,UAAU;AACxB,WAAO,SAAS,IAAI,CAAC,UAAU;AAAA,MAC7B,IAAI,KAAK;AAAA,MACT,QAAQ;AAAA,MACR,OAAO,KAAK;AAAA,MACZ,MAAM,KAAK;AAAA,MACX,UAAU,CAAC,KAAK,SAAS,SAAS,EAAE,OAAO,OAAO;AAAA,MAClD,WAAW,KAAK;AAAA,IAClB,EAAE;AAAA,EACJ;AACF;AACA,SAAS,wBAAwB;AAC/B,SAAO,IAAI,gBAAgB;AAC7B;","names":[]}
@@ -0,0 +1,178 @@
1
+ import "./chunk-7D4SUZUM.js";
2
+
3
+ // ../integrations-github/dist/index.js
4
+ var GitHubConnector = class {
5
+ type = "github";
6
+ token;
7
+ repositories;
8
+ async authenticate(config) {
9
+ this.token = config.credentials?.token;
10
+ if (!this.token) {
11
+ throw new Error("GitHub personal access token is required");
12
+ }
13
+ this.repositories = config.repositories;
14
+ }
15
+ async healthCheck() {
16
+ if (!this.token) throw new Error("GitHub personal access token is required");
17
+ const res = await fetch("https://api.github.com/user", {
18
+ headers: {
19
+ Authorization: `token ${this.token}`,
20
+ "User-Agent": "personal-assistant",
21
+ Accept: "application/vnd.github+json"
22
+ }
23
+ });
24
+ if (!res.ok) {
25
+ throw new Error("GitHub auth test failed: invalid token or insufficient scopes");
26
+ }
27
+ const data = await res.json();
28
+ return { summary: `Connected as ${data.login ?? "unknown"}` };
29
+ }
30
+ async fetchDelta(cursor) {
31
+ if (!this.token) throw new Error("Not authenticated");
32
+ const since = cursor ?? new Date(Date.now() - 864e5).toISOString();
33
+ const items = [];
34
+ const headers = {
35
+ Authorization: `token ${this.token}`,
36
+ Accept: "application/vnd.github.v3+json"
37
+ };
38
+ const notifRes = await fetch(
39
+ `https://api.github.com/notifications?since=${since}&all=false`,
40
+ { headers }
41
+ );
42
+ if (notifRes.ok) {
43
+ const notifications = await notifRes.json();
44
+ for (const notif of notifications) {
45
+ if (this.repositories && !this.repositories.includes(notif.repository.full_name)) {
46
+ continue;
47
+ }
48
+ items.push({
49
+ id: notif.id,
50
+ source: "github",
51
+ type: notif.subject.type.toLowerCase(),
52
+ content: `${notif.subject.type}: ${notif.subject.title} in ${notif.repository.full_name}`,
53
+ metadata: {
54
+ repo: notif.repository.full_name,
55
+ subjectType: notif.subject.type,
56
+ reason: notif.reason
57
+ },
58
+ timestamp: notif.updated_at
59
+ });
60
+ }
61
+ }
62
+ if (this.repositories && this.repositories.length > 0) {
63
+ for (const repo of this.repositories) {
64
+ const eventsRes = await fetch(
65
+ `https://api.github.com/repos/${repo}/events?per_page=30`,
66
+ { headers }
67
+ );
68
+ if (eventsRes.ok) {
69
+ const events = await eventsRes.json();
70
+ for (const event of events) {
71
+ if (new Date(event.created_at) < new Date(since)) continue;
72
+ items.push({
73
+ id: event.id,
74
+ source: "github",
75
+ type: event.type,
76
+ content: this.formatEventContent(event),
77
+ metadata: {
78
+ repo: event.repo.name,
79
+ eventType: event.type,
80
+ actor: event.actor.login,
81
+ payload: this.extractPayloadSummary(event.type, event.payload)
82
+ },
83
+ timestamp: event.created_at
84
+ });
85
+ }
86
+ }
87
+ }
88
+ } else {
89
+ const eventsRes = await fetch("https://api.github.com/events?per_page=30", {
90
+ headers
91
+ });
92
+ if (eventsRes.ok) {
93
+ const events = await eventsRes.json();
94
+ for (const event of events) {
95
+ if (new Date(event.created_at) < new Date(since)) continue;
96
+ items.push({
97
+ id: event.id,
98
+ source: "github",
99
+ type: event.type,
100
+ content: this.formatEventContent(event),
101
+ metadata: {
102
+ repo: event.repo.name,
103
+ eventType: event.type,
104
+ actor: event.actor.login,
105
+ payload: this.extractPayloadSummary(event.type, event.payload)
106
+ },
107
+ timestamp: event.created_at
108
+ });
109
+ }
110
+ }
111
+ }
112
+ return {
113
+ items,
114
+ newCursor: (/* @__PURE__ */ new Date()).toISOString(),
115
+ hasMore: false
116
+ };
117
+ }
118
+ formatEventContent(event) {
119
+ const summary = this.extractPayloadSummary(event.type, event.payload);
120
+ const base = `${event.type} in ${event.repo.name} by ${event.actor.login}`;
121
+ return summary ? `${base} \u2014 ${summary}` : base;
122
+ }
123
+ extractPayloadSummary(type, payload) {
124
+ switch (type) {
125
+ case "PushEvent": {
126
+ const commits = payload.commits;
127
+ if (commits && commits.length > 0) {
128
+ return commits.map((c) => c.message.split("\n")[0]).join("; ");
129
+ }
130
+ return "";
131
+ }
132
+ case "PullRequestEvent": {
133
+ const pr = payload.pull_request;
134
+ const action = payload.action;
135
+ return pr ? `${action}: ${pr.title}` : "";
136
+ }
137
+ case "IssuesEvent": {
138
+ const issue = payload.issue;
139
+ const action = payload.action;
140
+ return issue ? `${action}: ${issue.title}` : "";
141
+ }
142
+ case "IssueCommentEvent": {
143
+ const issue = payload.issue;
144
+ return issue ? `comment on: ${issue.title}` : "";
145
+ }
146
+ case "CreateEvent": {
147
+ const refType = payload.ref_type;
148
+ const ref = payload.ref;
149
+ return ref ? `${refType}: ${ref}` : `${refType}`;
150
+ }
151
+ case "DeleteEvent": {
152
+ const refType = payload.ref_type;
153
+ const ref = payload.ref;
154
+ return `deleted ${refType}: ${ref}`;
155
+ }
156
+ default:
157
+ return "";
158
+ }
159
+ }
160
+ async normalize(rawItems) {
161
+ return rawItems.map((item) => ({
162
+ id: item.id,
163
+ source: "github",
164
+ title: item.content,
165
+ body: item.content,
166
+ entities: [item.metadata.repo, item.metadata.actor].filter(Boolean),
167
+ timestamp: item.timestamp
168
+ }));
169
+ }
170
+ };
171
+ function createGitHubConnector() {
172
+ return new GitHubConnector();
173
+ }
174
+ export {
175
+ GitHubConnector,
176
+ createGitHubConnector
177
+ };
178
+ //# sourceMappingURL=dist-NV2YVVHI.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../integrations-github/dist/index.js"],"sourcesContent":["// src/index.ts\nvar GitHubConnector = class {\n type = \"github\";\n token;\n repositories;\n async authenticate(config) {\n this.token = config.credentials?.token;\n if (!this.token) {\n throw new Error(\"GitHub personal access token is required\");\n }\n this.repositories = config.repositories;\n }\n async healthCheck() {\n if (!this.token) throw new Error(\"GitHub personal access token is required\");\n const res = await fetch(\"https://api.github.com/user\", {\n headers: {\n Authorization: `token ${this.token}`,\n \"User-Agent\": \"personal-assistant\",\n Accept: \"application/vnd.github+json\"\n }\n });\n if (!res.ok) {\n throw new Error(\"GitHub auth test failed: invalid token or insufficient scopes\");\n }\n const data = await res.json();\n return { summary: `Connected as ${data.login ?? \"unknown\"}` };\n }\n async fetchDelta(cursor) {\n if (!this.token) throw new Error(\"Not authenticated\");\n const since = cursor ?? new Date(Date.now() - 864e5).toISOString();\n const items = [];\n const headers = {\n Authorization: `token ${this.token}`,\n Accept: \"application/vnd.github.v3+json\"\n };\n const notifRes = await fetch(\n `https://api.github.com/notifications?since=${since}&all=false`,\n { headers }\n );\n if (notifRes.ok) {\n const notifications = await notifRes.json();\n for (const notif of notifications) {\n if (this.repositories && !this.repositories.includes(notif.repository.full_name)) {\n continue;\n }\n items.push({\n id: notif.id,\n source: \"github\",\n type: notif.subject.type.toLowerCase(),\n content: `${notif.subject.type}: ${notif.subject.title} in ${notif.repository.full_name}`,\n metadata: {\n repo: notif.repository.full_name,\n subjectType: notif.subject.type,\n reason: notif.reason\n },\n timestamp: notif.updated_at\n });\n }\n }\n if (this.repositories && this.repositories.length > 0) {\n for (const repo of this.repositories) {\n const eventsRes = await fetch(\n `https://api.github.com/repos/${repo}/events?per_page=30`,\n { headers }\n );\n if (eventsRes.ok) {\n const events = await eventsRes.json();\n for (const event of events) {\n if (new Date(event.created_at) < new Date(since)) continue;\n items.push({\n id: event.id,\n source: \"github\",\n type: event.type,\n content: this.formatEventContent(event),\n metadata: {\n repo: event.repo.name,\n eventType: event.type,\n actor: event.actor.login,\n payload: this.extractPayloadSummary(event.type, event.payload)\n },\n timestamp: event.created_at\n });\n }\n }\n }\n } else {\n const eventsRes = await fetch(\"https://api.github.com/events?per_page=30\", {\n headers\n });\n if (eventsRes.ok) {\n const events = await eventsRes.json();\n for (const event of events) {\n if (new Date(event.created_at) < new Date(since)) continue;\n items.push({\n id: event.id,\n source: \"github\",\n type: event.type,\n content: this.formatEventContent(event),\n metadata: {\n repo: event.repo.name,\n eventType: event.type,\n actor: event.actor.login,\n payload: this.extractPayloadSummary(event.type, event.payload)\n },\n timestamp: event.created_at\n });\n }\n }\n }\n return {\n items,\n newCursor: (/* @__PURE__ */ new Date()).toISOString(),\n hasMore: false\n };\n }\n formatEventContent(event) {\n const summary = this.extractPayloadSummary(event.type, event.payload);\n const base = `${event.type} in ${event.repo.name} by ${event.actor.login}`;\n return summary ? `${base} \\u2014 ${summary}` : base;\n }\n extractPayloadSummary(type, payload) {\n switch (type) {\n case \"PushEvent\": {\n const commits = payload.commits;\n if (commits && commits.length > 0) {\n return commits.map((c) => c.message.split(\"\\n\")[0]).join(\"; \");\n }\n return \"\";\n }\n case \"PullRequestEvent\": {\n const pr = payload.pull_request;\n const action = payload.action;\n return pr ? `${action}: ${pr.title}` : \"\";\n }\n case \"IssuesEvent\": {\n const issue = payload.issue;\n const action = payload.action;\n return issue ? `${action}: ${issue.title}` : \"\";\n }\n case \"IssueCommentEvent\": {\n const issue = payload.issue;\n return issue ? `comment on: ${issue.title}` : \"\";\n }\n case \"CreateEvent\": {\n const refType = payload.ref_type;\n const ref = payload.ref;\n return ref ? `${refType}: ${ref}` : `${refType}`;\n }\n case \"DeleteEvent\": {\n const refType = payload.ref_type;\n const ref = payload.ref;\n return `deleted ${refType}: ${ref}`;\n }\n default:\n return \"\";\n }\n }\n async normalize(rawItems) {\n return rawItems.map((item) => ({\n id: item.id,\n source: \"github\",\n title: item.content,\n body: item.content,\n entities: [item.metadata.repo, item.metadata.actor].filter(Boolean),\n timestamp: item.timestamp\n }));\n }\n};\nfunction createGitHubConnector() {\n return new GitHubConnector();\n}\nexport {\n GitHubConnector,\n createGitHubConnector\n};\n"],"mappings":";;;AACA,IAAI,kBAAkB,MAAM;AAAA,EAC1B,OAAO;AAAA,EACP;AAAA,EACA;AAAA,EACA,MAAM,aAAa,QAAQ;AACzB,SAAK,QAAQ,OAAO,aAAa;AACjC,QAAI,CAAC,KAAK,OAAO;AACf,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AACA,SAAK,eAAe,OAAO;AAAA,EAC7B;AAAA,EACA,MAAM,cAAc;AAClB,QAAI,CAAC,KAAK,MAAO,OAAM,IAAI,MAAM,0CAA0C;AAC3E,UAAM,MAAM,MAAM,MAAM,+BAA+B;AAAA,MACrD,SAAS;AAAA,QACP,eAAe,SAAS,KAAK,KAAK;AAAA,QAClC,cAAc;AAAA,QACd,QAAQ;AAAA,MACV;AAAA,IACF,CAAC;AACD,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,IAAI,MAAM,+DAA+D;AAAA,IACjF;AACA,UAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,WAAO,EAAE,SAAS,gBAAgB,KAAK,SAAS,SAAS,GAAG;AAAA,EAC9D;AAAA,EACA,MAAM,WAAW,QAAQ;AACvB,QAAI,CAAC,KAAK,MAAO,OAAM,IAAI,MAAM,mBAAmB;AACpD,UAAM,QAAQ,UAAU,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,EAAE,YAAY;AACjE,UAAM,QAAQ,CAAC;AACf,UAAM,UAAU;AAAA,MACd,eAAe,SAAS,KAAK,KAAK;AAAA,MAClC,QAAQ;AAAA,IACV;AACA,UAAM,WAAW,MAAM;AAAA,MACrB,8CAA8C,KAAK;AAAA,MACnD,EAAE,QAAQ;AAAA,IACZ;AACA,QAAI,SAAS,IAAI;AACf,YAAM,gBAAgB,MAAM,SAAS,KAAK;AAC1C,iBAAW,SAAS,eAAe;AACjC,YAAI,KAAK,gBAAgB,CAAC,KAAK,aAAa,SAAS,MAAM,WAAW,SAAS,GAAG;AAChF;AAAA,QACF;AACA,cAAM,KAAK;AAAA,UACT,IAAI,MAAM;AAAA,UACV,QAAQ;AAAA,UACR,MAAM,MAAM,QAAQ,KAAK,YAAY;AAAA,UACrC,SAAS,GAAG,MAAM,QAAQ,IAAI,KAAK,MAAM,QAAQ,KAAK,OAAO,MAAM,WAAW,SAAS;AAAA,UACvF,UAAU;AAAA,YACR,MAAM,MAAM,WAAW;AAAA,YACvB,aAAa,MAAM,QAAQ;AAAA,YAC3B,QAAQ,MAAM;AAAA,UAChB;AAAA,UACA,WAAW,MAAM;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AACA,QAAI,KAAK,gBAAgB,KAAK,aAAa,SAAS,GAAG;AACrD,iBAAW,QAAQ,KAAK,cAAc;AACpC,cAAM,YAAY,MAAM;AAAA,UACtB,gCAAgC,IAAI;AAAA,UACpC,EAAE,QAAQ;AAAA,QACZ;AACA,YAAI,UAAU,IAAI;AAChB,gBAAM,SAAS,MAAM,UAAU,KAAK;AACpC,qBAAW,SAAS,QAAQ;AAC1B,gBAAI,IAAI,KAAK,MAAM,UAAU,IAAI,IAAI,KAAK,KAAK,EAAG;AAClD,kBAAM,KAAK;AAAA,cACT,IAAI,MAAM;AAAA,cACV,QAAQ;AAAA,cACR,MAAM,MAAM;AAAA,cACZ,SAAS,KAAK,mBAAmB,KAAK;AAAA,cACtC,UAAU;AAAA,gBACR,MAAM,MAAM,KAAK;AAAA,gBACjB,WAAW,MAAM;AAAA,gBACjB,OAAO,MAAM,MAAM;AAAA,gBACnB,SAAS,KAAK,sBAAsB,MAAM,MAAM,MAAM,OAAO;AAAA,cAC/D;AAAA,cACA,WAAW,MAAM;AAAA,YACnB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM,YAAY,MAAM,MAAM,6CAA6C;AAAA,QACzE;AAAA,MACF,CAAC;AACD,UAAI,UAAU,IAAI;AAChB,cAAM,SAAS,MAAM,UAAU,KAAK;AACpC,mBAAW,SAAS,QAAQ;AAC1B,cAAI,IAAI,KAAK,MAAM,UAAU,IAAI,IAAI,KAAK,KAAK,EAAG;AAClD,gBAAM,KAAK;AAAA,YACT,IAAI,MAAM;AAAA,YACV,QAAQ;AAAA,YACR,MAAM,MAAM;AAAA,YACZ,SAAS,KAAK,mBAAmB,KAAK;AAAA,YACtC,UAAU;AAAA,cACR,MAAM,MAAM,KAAK;AAAA,cACjB,WAAW,MAAM;AAAA,cACjB,OAAO,MAAM,MAAM;AAAA,cACnB,SAAS,KAAK,sBAAsB,MAAM,MAAM,MAAM,OAAO;AAAA,YAC/D;AAAA,YACA,WAAW,MAAM;AAAA,UACnB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,MACA,YAA4B,oBAAI,KAAK,GAAG,YAAY;AAAA,MACpD,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,mBAAmB,OAAO;AACxB,UAAM,UAAU,KAAK,sBAAsB,MAAM,MAAM,MAAM,OAAO;AACpE,UAAM,OAAO,GAAG,MAAM,IAAI,OAAO,MAAM,KAAK,IAAI,OAAO,MAAM,MAAM,KAAK;AACxE,WAAO,UAAU,GAAG,IAAI,WAAW,OAAO,KAAK;AAAA,EACjD;AAAA,EACA,sBAAsB,MAAM,SAAS;AACnC,YAAQ,MAAM;AAAA,MACZ,KAAK,aAAa;AAChB,cAAM,UAAU,QAAQ;AACxB,YAAI,WAAW,QAAQ,SAAS,GAAG;AACjC,iBAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ,MAAM,IAAI,EAAE,CAAC,CAAC,EAAE,KAAK,IAAI;AAAA,QAC/D;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,oBAAoB;AACvB,cAAM,KAAK,QAAQ;AACnB,cAAM,SAAS,QAAQ;AACvB,eAAO,KAAK,GAAG,MAAM,KAAK,GAAG,KAAK,KAAK;AAAA,MACzC;AAAA,MACA,KAAK,eAAe;AAClB,cAAM,QAAQ,QAAQ;AACtB,cAAM,SAAS,QAAQ;AACvB,eAAO,QAAQ,GAAG,MAAM,KAAK,MAAM,KAAK,KAAK;AAAA,MAC/C;AAAA,MACA,KAAK,qBAAqB;AACxB,cAAM,QAAQ,QAAQ;AACtB,eAAO,QAAQ,eAAe,MAAM,KAAK,KAAK;AAAA,MAChD;AAAA,MACA,KAAK,eAAe;AAClB,cAAM,UAAU,QAAQ;AACxB,cAAM,MAAM,QAAQ;AACpB,eAAO,MAAM,GAAG,OAAO,KAAK,GAAG,KAAK,GAAG,OAAO;AAAA,MAChD;AAAA,MACA,KAAK,eAAe;AAClB,cAAM,UAAU,QAAQ;AACxB,cAAM,MAAM,QAAQ;AACpB,eAAO,WAAW,OAAO,KAAK,GAAG;AAAA,MACnC;AAAA,MACA;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,UAAU,UAAU;AACxB,WAAO,SAAS,IAAI,CAAC,UAAU;AAAA,MAC7B,IAAI,KAAK;AAAA,MACT,QAAQ;AAAA,MACR,OAAO,KAAK;AAAA,MACZ,MAAM,KAAK;AAAA,MACX,UAAU,CAAC,KAAK,SAAS,MAAM,KAAK,SAAS,KAAK,EAAE,OAAO,OAAO;AAAA,MAClE,WAAW,KAAK;AAAA,IAClB,EAAE;AAAA,EACJ;AACF;AACA,SAAS,wBAAwB;AAC/B,SAAO,IAAI,gBAAgB;AAC7B;","names":[]}
@@ -0,0 +1,41 @@
1
+ import "./chunk-7D4SUZUM.js";
2
+
3
+ // ../integrations-gchat/dist/index.js
4
+ var GChatConnector = class {
5
+ type = "gchat";
6
+ async authenticate(_config) {
7
+ throw new Error(
8
+ "Google Chat integration is not implemented yet. Disable gchat in your integrations config until the connector is available."
9
+ );
10
+ }
11
+ async healthCheck() {
12
+ throw new Error(
13
+ "Google Chat integration is not implemented yet. Disable gchat in your integrations config until the connector is available."
14
+ );
15
+ }
16
+ async fetchDelta(_cursor) {
17
+ return {
18
+ items: [],
19
+ newCursor: (/* @__PURE__ */ new Date()).toISOString(),
20
+ hasMore: false
21
+ };
22
+ }
23
+ async normalize(rawItems) {
24
+ return rawItems.map((item) => ({
25
+ id: item.id,
26
+ source: "gchat",
27
+ title: "GChat message",
28
+ body: item.content,
29
+ entities: [],
30
+ timestamp: item.timestamp
31
+ }));
32
+ }
33
+ };
34
+ function createGChatConnector() {
35
+ return new GChatConnector();
36
+ }
37
+ export {
38
+ GChatConnector,
39
+ createGChatConnector
40
+ };
41
+ //# sourceMappingURL=dist-RMYCRZIU.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../integrations-gchat/dist/index.js"],"sourcesContent":["// src/index.ts\nvar GChatConnector = class {\n type = \"gchat\";\n async authenticate(_config) {\n throw new Error(\n \"Google Chat integration is not implemented yet. Disable gchat in your integrations config until the connector is available.\"\n );\n }\n async healthCheck() {\n throw new Error(\n \"Google Chat integration is not implemented yet. Disable gchat in your integrations config until the connector is available.\"\n );\n }\n async fetchDelta(_cursor) {\n return {\n items: [],\n newCursor: (/* @__PURE__ */ new Date()).toISOString(),\n hasMore: false\n };\n }\n async normalize(rawItems) {\n return rawItems.map((item) => ({\n id: item.id,\n source: \"gchat\",\n title: \"GChat message\",\n body: item.content,\n entities: [],\n timestamp: item.timestamp\n }));\n }\n};\nfunction createGChatConnector() {\n return new GChatConnector();\n}\nexport {\n GChatConnector,\n createGChatConnector\n};\n"],"mappings":";;;AACA,IAAI,iBAAiB,MAAM;AAAA,EACzB,OAAO;AAAA,EACP,MAAM,aAAa,SAAS;AAC1B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EACA,MAAM,cAAc;AAClB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EACA,MAAM,WAAW,SAAS;AACxB,WAAO;AAAA,MACL,OAAO,CAAC;AAAA,MACR,YAA4B,oBAAI,KAAK,GAAG,YAAY;AAAA,MACpD,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,UAAU,UAAU;AACxB,WAAO,SAAS,IAAI,CAAC,UAAU;AAAA,MAC7B,IAAI,KAAK;AAAA,MACT,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,MAAM,KAAK;AAAA,MACX,UAAU,CAAC;AAAA,MACX,WAAW,KAAK;AAAA,IAClB,EAAE;AAAA,EACJ;AACF;AACA,SAAS,uBAAuB;AAC9B,SAAO,IAAI,eAAe;AAC5B;","names":[]}
@@ -0,0 +1,14 @@
1
+ import {
2
+ SlackConnector,
3
+ createSlackConnector,
4
+ openSocketModeConnection,
5
+ validateSlackAppToken
6
+ } from "./chunk-ZKKMIDRK.js";
7
+ import "./chunk-7D4SUZUM.js";
8
+ export {
9
+ SlackConnector,
10
+ createSlackConnector,
11
+ openSocketModeConnection,
12
+ validateSlackAppToken
13
+ };
14
+ //# sourceMappingURL=dist-THLCZNOZ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}