daeda-mcp 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,64 @@
1
+ import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs";
2
+ import { join, dirname } from "node:path";
3
+ import { fileURLToPath } from "node:url";
4
+ const __dirname = dirname(fileURLToPath(import.meta.url));
5
+ const DATA_DIR = join(__dirname, "..", "..", "data");
6
+ const STATE_FILE = join(DATA_DIR, "init_state.json");
7
+ export const ALL_EXPORTS = [
8
+ "contacts",
9
+ "companies",
10
+ "deals",
11
+ "contact_company",
12
+ "deal_contact",
13
+ "deal_company",
14
+ ];
15
+ function createEmptyExportState() {
16
+ return {
17
+ exportId: null,
18
+ status: "pending",
19
+ error: null,
20
+ };
21
+ }
22
+ export function createInitialState() {
23
+ return {
24
+ status: "idle",
25
+ exports: {
26
+ contacts: createEmptyExportState(),
27
+ companies: createEmptyExportState(),
28
+ deals: createEmptyExportState(),
29
+ contact_company: createEmptyExportState(),
30
+ deal_contact: createEmptyExportState(),
31
+ deal_company: createEmptyExportState(),
32
+ },
33
+ startedAt: null,
34
+ error: null,
35
+ };
36
+ }
37
+ export function readInitState() {
38
+ try {
39
+ if (!existsSync(STATE_FILE)) {
40
+ return createInitialState();
41
+ }
42
+ const content = readFileSync(STATE_FILE, "utf-8");
43
+ return JSON.parse(content);
44
+ }
45
+ catch {
46
+ return createInitialState();
47
+ }
48
+ }
49
+ export function writeInitState(state) {
50
+ mkdirSync(DATA_DIR, { recursive: true });
51
+ writeFileSync(STATE_FILE, JSON.stringify(state, null, 2), "utf-8");
52
+ }
53
+ export function resetInitState() {
54
+ writeInitState(createInitialState());
55
+ }
56
+ export function getSyncedCount(state) {
57
+ return ALL_EXPORTS.filter((name) => state.exports[name].status === "synced").length;
58
+ }
59
+ export function isFullySynced(state) {
60
+ return getSyncedCount(state) === ALL_EXPORTS.length;
61
+ }
62
+ export function getStateFilePath() {
63
+ return STATE_FILE;
64
+ }
@@ -0,0 +1 @@
1
+ export declare function runSeeding(token: string): Promise<void>;
@@ -0,0 +1,176 @@
1
+ import { batchInsertDeals, batchInsertContacts, batchInsertCompanies, batchInsertAssociations, } from "../db/sqlite.js";
2
+ import { readInitState, writeInitState, } from "./init-state.js";
3
+ const HUBSPOT_API_BASE = "https://api.hubapi.com";
4
+ const SEED_LIMIT = 1000;
5
+ const BATCH_SIZE = 100;
6
+ async function apiRequest(token, method, path, body) {
7
+ const response = await fetch(`${HUBSPOT_API_BASE}${path}`, {
8
+ method,
9
+ headers: {
10
+ Authorization: `Bearer ${token}`,
11
+ "Content-Type": "application/json",
12
+ },
13
+ body: body ? JSON.stringify(body) : undefined,
14
+ });
15
+ if (!response.ok) {
16
+ const errorText = await response.text();
17
+ throw new Error(`HubSpot API error (${response.status}): ${errorText}`);
18
+ }
19
+ return response.json();
20
+ }
21
+ async function searchRecentDeals(token, limit = SEED_LIMIT) {
22
+ const deals = [];
23
+ let after;
24
+ while (deals.length < limit) {
25
+ const pageSize = Math.min(BATCH_SIZE, limit - deals.length);
26
+ const body = {
27
+ limit: pageSize,
28
+ sorts: [{ propertyName: "hs_lastmodifieddate", direction: "DESCENDING" }],
29
+ properties: ["dealname", "amount", "dealstage", "closedate", "pipeline", "hs_lastmodifieddate"],
30
+ };
31
+ if (after) {
32
+ body.after = after;
33
+ }
34
+ const response = await apiRequest(token, "POST", "/crm/v3/objects/deals/search", body);
35
+ deals.push(...response.results);
36
+ if (!response.paging?.next?.after || response.results.length < pageSize) {
37
+ break;
38
+ }
39
+ after = response.paging.next.after;
40
+ }
41
+ return deals;
42
+ }
43
+ async function batchGetAssociations(token, fromObjectType, toObjectType, objectIds) {
44
+ const associations = new Map();
45
+ for (let i = 0; i < objectIds.length; i += BATCH_SIZE) {
46
+ const batch = objectIds.slice(i, i + BATCH_SIZE);
47
+ const body = {
48
+ inputs: batch.map((id) => ({ id })),
49
+ };
50
+ const response = await apiRequest(token, "POST", `/crm/v4/associations/${fromObjectType}/${toObjectType}/batch/read`, body);
51
+ for (const result of response.results) {
52
+ const fromId = result.from.id;
53
+ const toIds = result.to.map((t) => t.toObjectId);
54
+ associations.set(fromId, toIds);
55
+ }
56
+ }
57
+ return associations;
58
+ }
59
+ async function batchReadObjects(token, objectType, objectIds, properties) {
60
+ const uniqueIds = [...new Set(objectIds)];
61
+ const objects = [];
62
+ for (let i = 0; i < uniqueIds.length; i += BATCH_SIZE) {
63
+ const batch = uniqueIds.slice(i, i + BATCH_SIZE);
64
+ const body = {
65
+ inputs: batch.map((id) => ({ id })),
66
+ properties,
67
+ };
68
+ const response = await apiRequest(token, "POST", `/crm/v3/objects/${objectType}/batch/read`, body);
69
+ objects.push(...response.results);
70
+ }
71
+ return objects;
72
+ }
73
+ function updateSeedingProgress(deals, contacts, companies) {
74
+ const state = readInitState();
75
+ state.seedingProgress = { deals, contacts, companies };
76
+ writeInitState(state);
77
+ }
78
+ function updateSeedingStatus(status, error) {
79
+ const state = readInitState();
80
+ state.seedingStatus = status;
81
+ if (error) {
82
+ state.seedingError = error;
83
+ }
84
+ writeInitState(state);
85
+ }
86
+ export async function runSeeding(token) {
87
+ console.error("[seeder] Starting quick seed of recent deals...");
88
+ updateSeedingStatus("in_progress");
89
+ updateSeedingProgress(0, 0, 0);
90
+ try {
91
+ console.error("[seeder] Fetching 1,000 most recently modified deals...");
92
+ const deals = await searchRecentDeals(token, SEED_LIMIT);
93
+ console.error(`[seeder] Found ${deals.length} deals`);
94
+ if (deals.length === 0) {
95
+ console.error("[seeder] No deals found, skipping seed");
96
+ updateSeedingStatus("completed");
97
+ return;
98
+ }
99
+ const dealIds = deals.map((d) => d.id);
100
+ const dealRows = deals.map((d) => ({
101
+ id: d.id,
102
+ dealname: d.properties.dealname || null,
103
+ properties: d.properties,
104
+ }));
105
+ await batchInsertDeals(dealRows);
106
+ updateSeedingProgress(dealRows.length, 0, 0);
107
+ console.error(`[seeder] Inserted ${dealRows.length} deals`);
108
+ console.error("[seeder] Fetching deal-contact associations...");
109
+ const dealContactAssocs = await batchGetAssociations(token, "deals", "contacts", dealIds);
110
+ console.error("[seeder] Fetching deal-company associations...");
111
+ const dealCompanyAssocs = await batchGetAssociations(token, "deals", "companies", dealIds);
112
+ const contactIds = new Set();
113
+ const companyIds = new Set();
114
+ for (const [dealId, contacts] of dealContactAssocs) {
115
+ for (const contactId of contacts) {
116
+ contactIds.add(contactId);
117
+ }
118
+ }
119
+ for (const [dealId, companies] of dealCompanyAssocs) {
120
+ for (const companyId of companies) {
121
+ companyIds.add(companyId);
122
+ }
123
+ }
124
+ console.error(`[seeder] Found ${contactIds.size} unique contacts, ${companyIds.size} unique companies`);
125
+ let contactsInserted = 0;
126
+ if (contactIds.size > 0) {
127
+ console.error("[seeder] Fetching contact records...");
128
+ const contacts = await batchReadObjects(token, "contacts", [...contactIds], ["email", "firstname", "lastname", "phone", "company", "jobtitle", "hs_lastmodifieddate"]);
129
+ const contactRows = contacts.map((c) => ({
130
+ id: c.id,
131
+ email: c.properties.email || null,
132
+ properties: c.properties,
133
+ }));
134
+ await batchInsertContacts(contactRows);
135
+ contactsInserted = contactRows.length;
136
+ console.error(`[seeder] Inserted ${contactRows.length} contacts`);
137
+ }
138
+ updateSeedingProgress(dealRows.length, contactsInserted, 0);
139
+ let companiesInserted = 0;
140
+ if (companyIds.size > 0) {
141
+ console.error("[seeder] Fetching company records...");
142
+ const companies = await batchReadObjects(token, "companies", [...companyIds], ["domain", "name", "industry", "city", "state", "country", "hs_lastmodifieddate"]);
143
+ const companyRows = companies.map((c) => ({
144
+ id: c.id,
145
+ domain: c.properties.domain || null,
146
+ properties: c.properties,
147
+ }));
148
+ await batchInsertCompanies(companyRows);
149
+ companiesInserted = companyRows.length;
150
+ console.error(`[seeder] Inserted ${companyRows.length} companies`);
151
+ }
152
+ updateSeedingProgress(dealRows.length, contactsInserted, companiesInserted);
153
+ console.error("[seeder] Inserting associations...");
154
+ const dealContactRows = [];
155
+ for (const [dealId, contacts] of dealContactAssocs) {
156
+ for (const contactId of contacts) {
157
+ dealContactRows.push({ fromId: dealId, toId: contactId });
158
+ }
159
+ }
160
+ await batchInsertAssociations("deal_contact", dealContactRows);
161
+ const dealCompanyRows = [];
162
+ for (const [dealId, companies] of dealCompanyAssocs) {
163
+ for (const companyId of companies) {
164
+ dealCompanyRows.push({ fromId: dealId, toId: companyId });
165
+ }
166
+ }
167
+ await batchInsertAssociations("deal_company", dealCompanyRows);
168
+ console.error(`[seeder] Seeding complete: ${dealRows.length} deals, ${contactsInserted} contacts, ${companiesInserted} companies`);
169
+ updateSeedingStatus("completed");
170
+ }
171
+ catch (err) {
172
+ const errMsg = err instanceof Error ? err.message : String(err);
173
+ console.error("[seeder] Seeding failed:", errMsg);
174
+ updateSeedingStatus("error", errMsg);
175
+ }
176
+ }
@@ -0,0 +1,26 @@
1
+ import { type SeedingStatus, type SeedingProgress } from "../sync/init-state.js";
2
+ export interface DbStatusResult {
3
+ status: "not_started" | "sending_requests" | "seeding" | "syncing" | "ready" | "error";
4
+ syncProgress?: string;
5
+ exportDetails?: Record<string, {
6
+ status: string;
7
+ error?: string;
8
+ }>;
9
+ seedingStatus?: SeedingStatus;
10
+ seedingProgress?: SeedingProgress;
11
+ seedingError?: string;
12
+ hasToken: boolean;
13
+ lastSynced?: string | null;
14
+ initializedAt?: string | null;
15
+ recordCounts?: {
16
+ contacts: number;
17
+ companies: number;
18
+ deals: number;
19
+ contact_company: number;
20
+ deal_contact: number;
21
+ deal_company: number;
22
+ };
23
+ error?: string | null;
24
+ message?: string;
25
+ }
26
+ export declare function dbStatus(forceReinit?: boolean): Promise<string>;
@@ -0,0 +1,127 @@
1
+ import { getHubSpotToken } from "../db/keychain.js";
2
+ import { dbExists, isDbHealthy, getRecordCount, getAllMetadata, } from "../db/sqlite.js";
3
+ import { getInitStatus, forceReinitialize, } from "../sync/init-manager.js";
4
+ import { getSyncedCount, ALL_EXPORTS } from "../sync/init-state.js";
5
+ export async function dbStatus(forceReinit = false) {
6
+ if (forceReinit) {
7
+ forceReinitialize();
8
+ return JSON.stringify({
9
+ status: "sending_requests",
10
+ syncProgress: "0/6 exports synced",
11
+ hasToken: !!getHubSpotToken(),
12
+ message: "Re-initialization started. Poll db_status to track progress.",
13
+ });
14
+ }
15
+ const initState = getInitStatus();
16
+ const hasToken = !!getHubSpotToken();
17
+ if (initState.status === "idle") {
18
+ return JSON.stringify({
19
+ status: "not_started",
20
+ hasToken,
21
+ message: "Database initialization not started. Server will auto-start on next restart.",
22
+ });
23
+ }
24
+ if (initState.status === "sending_requests") {
25
+ return JSON.stringify({
26
+ status: "sending_requests",
27
+ syncProgress: "0/6 exports synced",
28
+ hasToken,
29
+ message: "Sending export requests to HubSpot...",
30
+ });
31
+ }
32
+ if (initState.status === "polling_exports") {
33
+ const syncedCount = getSyncedCount(initState);
34
+ const exportDetails = {};
35
+ for (const name of ALL_EXPORTS) {
36
+ const exp = initState.exports[name];
37
+ exportDetails[name] = {
38
+ status: exp.status,
39
+ ...(exp.error && { error: exp.error }),
40
+ };
41
+ }
42
+ const seedingStatus = initState.seedingStatus;
43
+ const seedingProgress = initState.seedingProgress;
44
+ const seedingError = initState.seedingError;
45
+ let message = `Syncing HubSpot data... ${syncedCount} of ${ALL_EXPORTS.length} exports complete.`;
46
+ if (seedingStatus === "in_progress" && seedingProgress) {
47
+ message = `Quick preview loading: ${seedingProgress.deals} deals seeded. Full sync: ${syncedCount}/${ALL_EXPORTS.length} exports.`;
48
+ }
49
+ else if (seedingStatus === "completed" && seedingProgress) {
50
+ message = `Preview ready! ${seedingProgress.deals} deals, ${seedingProgress.contacts} contacts, ${seedingProgress.companies} companies available. Full sync: ${syncedCount}/${ALL_EXPORTS.length} exports.`;
51
+ }
52
+ const result = {
53
+ status: seedingStatus === "in_progress" ? "seeding" : "syncing",
54
+ syncProgress: `${syncedCount}/${ALL_EXPORTS.length} exports synced`,
55
+ exportDetails,
56
+ hasToken,
57
+ message,
58
+ };
59
+ if (seedingStatus) {
60
+ result.seedingStatus = seedingStatus;
61
+ }
62
+ if (seedingProgress) {
63
+ result.seedingProgress = seedingProgress;
64
+ }
65
+ if (seedingError) {
66
+ result.seedingError = seedingError;
67
+ }
68
+ return JSON.stringify(result);
69
+ }
70
+ if (initState.status === "error") {
71
+ const syncedCount = getSyncedCount(initState);
72
+ const exportDetails = {};
73
+ for (const name of ALL_EXPORTS) {
74
+ const exp = initState.exports[name];
75
+ exportDetails[name] = {
76
+ status: exp.status,
77
+ ...(exp.error && { error: exp.error }),
78
+ };
79
+ }
80
+ return JSON.stringify({
81
+ status: "error",
82
+ syncProgress: `${syncedCount}/${ALL_EXPORTS.length} exports synced`,
83
+ exportDetails,
84
+ hasToken,
85
+ error: initState.error,
86
+ message: `Initialization failed: ${initState.error}. Use forceReinit=true to retry.`,
87
+ });
88
+ }
89
+ const exists = await dbExists();
90
+ const healthy = exists ? await isDbHealthy() : false;
91
+ if (!exists || !healthy) {
92
+ return JSON.stringify({
93
+ status: "error",
94
+ hasToken,
95
+ error: "Database file missing or corrupted",
96
+ message: "Database file issue detected. Use forceReinit=true to reinitialize.",
97
+ });
98
+ }
99
+ const [contacts, companies, deals] = await Promise.all([
100
+ getRecordCount("contacts"),
101
+ getRecordCount("companies"),
102
+ getRecordCount("deals"),
103
+ ]);
104
+ const metadata = await getAllMetadata();
105
+ const result = {
106
+ status: "ready",
107
+ syncProgress: "6/6 exports synced",
108
+ hasToken,
109
+ lastSynced: metadata.last_synced || null,
110
+ initializedAt: metadata.initialized_at || initState.startedAt || null,
111
+ recordCounts: {
112
+ contacts,
113
+ companies,
114
+ deals,
115
+ contact_company: parseInt(metadata.contact_company_count || "0"),
116
+ deal_contact: parseInt(metadata.deal_contact_count || "0"),
117
+ deal_company: parseInt(metadata.deal_company_count || "0"),
118
+ },
119
+ };
120
+ if (initState.seedingStatus) {
121
+ result.seedingStatus = initState.seedingStatus;
122
+ }
123
+ if (initState.seedingProgress) {
124
+ result.seedingProgress = initState.seedingProgress;
125
+ }
126
+ return JSON.stringify(result);
127
+ }
@@ -0,0 +1,6 @@
1
+ import { z } from "zod";
2
+ export declare const rawSqlSchema: z.ZodObject<{
3
+ sql: z.ZodString;
4
+ limit: z.ZodOptional<z.ZodNumber>;
5
+ }, z.core.$strip>;
6
+ export declare function getRawSql(args: z.infer<typeof rawSqlSchema>): Promise<string>;
@@ -0,0 +1,109 @@
1
+ import { z } from "zod";
2
+ import { executeQuery, isDbHealthy } from "../db/sqlite.js";
3
+ const MAX_ROWS = 1000;
4
+ const QUERY_TIMEOUT_MS = 30_000;
5
+ export const rawSqlSchema = z.object({
6
+ sql: z.string().min(1).describe("SQL query to execute (SELECT only)"),
7
+ limit: z
8
+ .number()
9
+ .int()
10
+ .min(1)
11
+ .max(MAX_ROWS)
12
+ .optional()
13
+ .describe(`Max rows to return (default: ${MAX_ROWS}, max: ${MAX_ROWS})`),
14
+ });
15
+ function withTimeout(promise, ms) {
16
+ return Promise.race([
17
+ promise,
18
+ new Promise((_, reject) => setTimeout(() => reject(new Error(`Query timed out after ${ms}ms`)), ms)),
19
+ ]);
20
+ }
21
+ function enforceLimitClause(sql, maxRows) {
22
+ const normalized = sql.trim();
23
+ const limitRegex = /\bLIMIT\s+(\d+)/i;
24
+ const match = normalized.match(limitRegex);
25
+ if (match) {
26
+ const existingLimit = parseInt(match[1], 10);
27
+ if (existingLimit > maxRows) {
28
+ return normalized.replace(limitRegex, `LIMIT ${maxRows}`);
29
+ }
30
+ return normalized;
31
+ }
32
+ const endsWithSemicolon = normalized.endsWith(";");
33
+ const base = endsWithSemicolon ? normalized.slice(0, -1) : normalized;
34
+ return `${base} LIMIT ${maxRows}${endsWithSemicolon ? ";" : ""}`;
35
+ }
36
+ export async function getRawSql(args) {
37
+ const healthy = await isDbHealthy();
38
+ if (!healthy) {
39
+ return JSON.stringify({
40
+ success: false,
41
+ error: "Database not initialized. Run init_database first.",
42
+ });
43
+ }
44
+ const { sql, limit } = args;
45
+ const effectiveLimit = Math.min(limit ?? MAX_ROWS, MAX_ROWS);
46
+ // Security: Only allow SELECT queries
47
+ const trimmedSql = sql.trim().toLowerCase();
48
+ if (!trimmedSql.startsWith("select")) {
49
+ return JSON.stringify({
50
+ success: false,
51
+ error: "Only SELECT queries are allowed. This is a read-only interface.",
52
+ });
53
+ }
54
+ // Block dangerous keywords
55
+ const dangerousKeywords = [
56
+ "insert",
57
+ "update",
58
+ "delete",
59
+ "drop",
60
+ "alter",
61
+ "create",
62
+ "truncate",
63
+ "replace",
64
+ "attach",
65
+ "detach",
66
+ ];
67
+ for (const keyword of dangerousKeywords) {
68
+ if (trimmedSql.includes(keyword)) {
69
+ return JSON.stringify({
70
+ success: false,
71
+ error: `Query contains forbidden keyword: ${keyword}. Only SELECT queries are allowed.`,
72
+ });
73
+ }
74
+ }
75
+ const safeSql = enforceLimitClause(sql, effectiveLimit);
76
+ try {
77
+ const result = await withTimeout(executeQuery(safeSql), QUERY_TIMEOUT_MS);
78
+ // Parse properties JSON in results if present
79
+ const parsedRows = result.rows.map((row) => {
80
+ const parsed = { ...row };
81
+ if (typeof parsed.properties === "string") {
82
+ try {
83
+ parsed.properties = JSON.parse(parsed.properties);
84
+ }
85
+ catch {
86
+ // Keep as string if parsing fails
87
+ }
88
+ }
89
+ return parsed;
90
+ });
91
+ const wasLimited = parsedRows.length === effectiveLimit;
92
+ return JSON.stringify({
93
+ success: true,
94
+ columns: result.columns,
95
+ rowCount: parsedRows.length,
96
+ rows: parsedRows,
97
+ ...(wasLimited && {
98
+ warning: `Results limited to ${effectiveLimit} rows. Use OFFSET for pagination.`,
99
+ }),
100
+ });
101
+ }
102
+ catch (error) {
103
+ const errorMessage = error instanceof Error ? error.message : String(error);
104
+ return JSON.stringify({
105
+ success: false,
106
+ error: `SQL execution failed: ${errorMessage}`,
107
+ });
108
+ }
109
+ }
package/package.json ADDED
@@ -0,0 +1,34 @@
1
+ {
2
+ "name": "daeda-mcp",
3
+ "version": "1.0.0",
4
+ "description": "MCP server for HubSpot CRM data sync",
5
+ "license": "MIT",
6
+ "type": "module",
7
+ "main": "./dist/index.js",
8
+ "bin": {
9
+ "daeda-mcp": "./dist/index.js"
10
+ },
11
+ "files": [
12
+ "dist"
13
+ ],
14
+ "scripts": {
15
+ "build": "tsc",
16
+ "prepublishOnly": "npm run build",
17
+ "dev": "bun run --watch src/index.ts",
18
+ "start": "bun run src/index.ts",
19
+ "start:node": "node dist/index.js"
20
+ },
21
+ "dependencies": {
22
+ "@libsql/client": "^0.14.0",
23
+ "@modelcontextprotocol/sdk": "^1.25.3",
24
+ "adm-zip": "^0.5.16",
25
+ "csv-parse": "^6.1.0",
26
+ "zod": "^4"
27
+ },
28
+ "devDependencies": {
29
+ "@types/adm-zip": "^0.5.7",
30
+ "@types/bun": "latest",
31
+ "@types/node": "^22.0.0",
32
+ "typescript": "^5.7.0"
33
+ }
34
+ }