daeda-mcp 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -2
- package/dist/sync/csv-loader.js +11 -12
- package/dist/sync/csv-worker.js +9 -9
- package/dist/sync/export-api.js +41 -34
- package/dist/sync/init-manager.js +9 -5
- package/package.json +17 -1
package/README.md
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
# Daeda MCP
|
|
2
2
|
|
|
3
|
+
[](https://www.npmjs.com/package/daeda-mcp)
|
|
4
|
+
[](https://opensource.org/licenses/MIT)
|
|
5
|
+
|
|
3
6
|
An MCP server that syncs your HubSpot CRM to a local encrypted database, enabling AI assistants to query your contacts, companies, and deals instantly.
|
|
4
7
|
|
|
5
8
|
## Why Daeda?
|
|
@@ -51,8 +54,8 @@ npm install -g daeda-mcp
|
|
|
51
54
|
### From source
|
|
52
55
|
|
|
53
56
|
```bash
|
|
54
|
-
git clone https://github.com/
|
|
55
|
-
cd daeda-mcp
|
|
57
|
+
git clone https://github.com/Daeda-Technologies-Ltd/daeda-free-mcp.git
|
|
58
|
+
cd daeda-free-mcp
|
|
56
59
|
npm install
|
|
57
60
|
npm run build
|
|
58
61
|
```
|
package/dist/sync/csv-loader.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { Worker } from "node:worker_threads";
|
|
1
2
|
import { clearTable, setMetadata, batchInsertContacts, batchInsertCompanies, batchInsertDeals, batchInsertAssociations, } from "../db/sqlite.js";
|
|
2
3
|
function findIdColumn(headers) {
|
|
3
4
|
const idCandidates = [
|
|
@@ -37,13 +38,12 @@ function findIdColumn(headers) {
|
|
|
37
38
|
async function loadCsvWithWorker(filePath, tableName, mapRow, insertBatch, onProgress) {
|
|
38
39
|
await clearTable(tableName);
|
|
39
40
|
return new Promise((resolve, reject) => {
|
|
40
|
-
const workerUrl = new URL("./csv-worker.
|
|
41
|
+
const workerUrl = new URL("./csv-worker.js", import.meta.url);
|
|
41
42
|
const worker = new Worker(workerUrl);
|
|
42
43
|
let idColumn = null;
|
|
43
44
|
let totalInserted = 0;
|
|
44
45
|
let pendingInserts = [];
|
|
45
|
-
worker.
|
|
46
|
-
const msg = event.data;
|
|
46
|
+
worker.on("message", async (msg) => {
|
|
47
47
|
try {
|
|
48
48
|
switch (msg.type) {
|
|
49
49
|
case "headers": {
|
|
@@ -95,11 +95,11 @@ async function loadCsvWithWorker(filePath, tableName, mapRow, insertBatch, onPro
|
|
|
95
95
|
worker.terminate();
|
|
96
96
|
reject(err);
|
|
97
97
|
}
|
|
98
|
-
};
|
|
99
|
-
worker.
|
|
98
|
+
});
|
|
99
|
+
worker.on("error", (err) => {
|
|
100
100
|
worker.terminate();
|
|
101
101
|
reject(new Error(`Worker error: ${err.message}`));
|
|
102
|
-
};
|
|
102
|
+
});
|
|
103
103
|
worker.postMessage({
|
|
104
104
|
type: "start",
|
|
105
105
|
filePath,
|
|
@@ -158,14 +158,13 @@ export async function loadDealsCsvFromFile(filePath, onProgress) {
|
|
|
158
158
|
export async function loadAssociationsCsvFromFile(filePath, associationType, onProgress) {
|
|
159
159
|
await clearTable(associationType);
|
|
160
160
|
return new Promise((resolve, reject) => {
|
|
161
|
-
const workerUrl = new URL("./csv-worker.
|
|
161
|
+
const workerUrl = new URL("./csv-worker.js", import.meta.url);
|
|
162
162
|
const worker = new Worker(workerUrl);
|
|
163
163
|
let idColumn = null;
|
|
164
164
|
let associatedIdColumn = null;
|
|
165
165
|
let totalInserted = 0;
|
|
166
166
|
let pendingInserts = [];
|
|
167
|
-
worker.
|
|
168
|
-
const msg = event.data;
|
|
167
|
+
worker.on("message", async (msg) => {
|
|
169
168
|
try {
|
|
170
169
|
switch (msg.type) {
|
|
171
170
|
case "headers": {
|
|
@@ -236,11 +235,11 @@ export async function loadAssociationsCsvFromFile(filePath, associationType, onP
|
|
|
236
235
|
worker.terminate();
|
|
237
236
|
reject(err);
|
|
238
237
|
}
|
|
239
|
-
};
|
|
240
|
-
worker.
|
|
238
|
+
});
|
|
239
|
+
worker.on("error", (err) => {
|
|
241
240
|
worker.terminate();
|
|
242
241
|
reject(new Error(`Worker error: ${err.message}`));
|
|
243
|
-
};
|
|
242
|
+
});
|
|
244
243
|
worker.postMessage({
|
|
245
244
|
type: "start",
|
|
246
245
|
filePath,
|
package/dist/sync/csv-worker.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { createReadStream } from "node:fs";
|
|
2
|
+
import { parentPort } from "node:worker_threads";
|
|
2
3
|
import { parse } from "csv-parse";
|
|
3
4
|
let cancelled = false;
|
|
4
|
-
|
|
5
|
-
const msg = event.data;
|
|
5
|
+
parentPort?.on("message", async (msg) => {
|
|
6
6
|
if (msg.type === "cancel") {
|
|
7
7
|
cancelled = true;
|
|
8
8
|
return;
|
|
@@ -11,7 +11,7 @@ self.onmessage = async (event) => {
|
|
|
11
11
|
cancelled = false;
|
|
12
12
|
await processFile(msg.filePath, msg.batchSize ?? 5000);
|
|
13
13
|
}
|
|
14
|
-
};
|
|
14
|
+
});
|
|
15
15
|
async function processFile(filePath, batchSize) {
|
|
16
16
|
try {
|
|
17
17
|
const parser = createReadStream(filePath).pipe(parse({
|
|
@@ -25,17 +25,17 @@ async function processFile(filePath, batchSize) {
|
|
|
25
25
|
let headersSent = false;
|
|
26
26
|
for await (const record of parser) {
|
|
27
27
|
if (cancelled) {
|
|
28
|
-
|
|
28
|
+
parentPort?.postMessage({ type: "done", totalCount: totalProcessed });
|
|
29
29
|
return;
|
|
30
30
|
}
|
|
31
31
|
if (!headersSent) {
|
|
32
|
-
|
|
32
|
+
parentPort?.postMessage({ type: "headers", headers: Object.keys(record) });
|
|
33
33
|
headersSent = true;
|
|
34
34
|
}
|
|
35
35
|
batch.push(record);
|
|
36
36
|
if (batch.length >= batchSize) {
|
|
37
37
|
totalProcessed += batch.length;
|
|
38
|
-
|
|
38
|
+
parentPort?.postMessage({
|
|
39
39
|
type: "batch",
|
|
40
40
|
rows: batch,
|
|
41
41
|
processedCount: totalProcessed,
|
|
@@ -45,16 +45,16 @@ async function processFile(filePath, batchSize) {
|
|
|
45
45
|
}
|
|
46
46
|
if (batch.length > 0) {
|
|
47
47
|
totalProcessed += batch.length;
|
|
48
|
-
|
|
48
|
+
parentPort?.postMessage({
|
|
49
49
|
type: "batch",
|
|
50
50
|
rows: batch,
|
|
51
51
|
processedCount: totalProcessed,
|
|
52
52
|
});
|
|
53
53
|
}
|
|
54
|
-
|
|
54
|
+
parentPort?.postMessage({ type: "done", totalCount: totalProcessed });
|
|
55
55
|
}
|
|
56
56
|
catch (err) {
|
|
57
57
|
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
58
|
-
|
|
58
|
+
parentPort?.postMessage({ type: "error", error: errorMessage });
|
|
59
59
|
}
|
|
60
60
|
}
|
package/dist/sync/export-api.js
CHANGED
|
@@ -213,45 +213,52 @@ export async function downloadExportCsvToFile(token, downloadUrl, exportName) {
|
|
|
213
213
|
throw new Error("Response has no body");
|
|
214
214
|
}
|
|
215
215
|
await pipeline(Readable.fromWeb(response.body), createWriteStream(tempPath));
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
}
|
|
227
|
-
const writeStream = createWriteStream(outputPath);
|
|
228
|
-
for (let i = 0; i < csvEntries.length; i++) {
|
|
229
|
-
const csvContent = csvEntries[i].getData().toString("utf-8");
|
|
230
|
-
if (i === 0) {
|
|
231
|
-
writeStream.write(csvContent);
|
|
216
|
+
try {
|
|
217
|
+
const data = readFileSync(tempPath);
|
|
218
|
+
if (data[0] === 0x50 && data[1] === 0x4b) {
|
|
219
|
+
const zip = new AdmZip(tempPath);
|
|
220
|
+
const entries = zip.getEntries();
|
|
221
|
+
const csvEntries = entries
|
|
222
|
+
.filter((e) => e.entryName.endsWith(".csv"))
|
|
223
|
+
.sort((a, b) => a.entryName.localeCompare(b.entryName));
|
|
224
|
+
if (csvEntries.length === 0) {
|
|
225
|
+
throw new Error("No CSV file found in ZIP archive");
|
|
232
226
|
}
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
writeStream.write(
|
|
227
|
+
const writeStream = createWriteStream(outputPath);
|
|
228
|
+
for (let i = 0; i < csvEntries.length; i++) {
|
|
229
|
+
const csvBuffer = csvEntries[i].getData();
|
|
230
|
+
if (i === 0) {
|
|
231
|
+
writeStream.write(csvBuffer);
|
|
232
|
+
}
|
|
233
|
+
else {
|
|
234
|
+
const firstNewline = csvBuffer.indexOf(0x0a); // 0x0a = '\n'
|
|
235
|
+
if (firstNewline !== -1) {
|
|
236
|
+
writeStream.write("\n");
|
|
237
|
+
writeStream.write(csvBuffer.subarray(firstNewline + 1));
|
|
238
|
+
}
|
|
238
239
|
}
|
|
239
240
|
}
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
241
|
+
await new Promise((resolve, reject) => {
|
|
242
|
+
writeStream.end((err) => {
|
|
243
|
+
if (err)
|
|
244
|
+
reject(err);
|
|
245
|
+
else
|
|
246
|
+
resolve();
|
|
247
|
+
});
|
|
247
248
|
});
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
249
|
+
unlinkSync(tempPath);
|
|
250
|
+
console.error(`[export-api] Extracted and concatenated ${csvEntries.length} CSV files from ZIP`);
|
|
251
|
+
}
|
|
252
|
+
else {
|
|
253
|
+
const { renameSync } = await import("node:fs");
|
|
254
|
+
renameSync(tempPath, outputPath);
|
|
255
|
+
}
|
|
251
256
|
}
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
257
|
+
catch (err) {
|
|
258
|
+
if (existsSync(tempPath)) {
|
|
259
|
+
unlinkSync(tempPath);
|
|
260
|
+
}
|
|
261
|
+
throw err;
|
|
255
262
|
}
|
|
256
263
|
return outputPath;
|
|
257
264
|
}
|
|
@@ -54,16 +54,16 @@ export async function startInitialization(force = false) {
|
|
|
54
54
|
newState.status = "sending_requests";
|
|
55
55
|
newState.startedAt = new Date().toISOString();
|
|
56
56
|
writeInitState(newState);
|
|
57
|
-
console.error("[init-manager] Starting export requests...");
|
|
57
|
+
console.error("[init-manager] Starting seeding first, then export requests...");
|
|
58
58
|
try {
|
|
59
|
+
runSeeding(token).catch((err) => {
|
|
60
|
+
console.error("[init-manager] Seeding failed (non-fatal):", err);
|
|
61
|
+
});
|
|
59
62
|
await fireAllExportRequests(token, newState);
|
|
60
63
|
newState.status = "polling_exports";
|
|
61
64
|
newState.seedingStatus = "pending";
|
|
62
65
|
writeInitState(newState);
|
|
63
|
-
console.error("[init-manager] All export requests sent, starting
|
|
64
|
-
runSeeding(token).catch((err) => {
|
|
65
|
-
console.error("[init-manager] Seeding failed (non-fatal):", err);
|
|
66
|
-
});
|
|
66
|
+
console.error("[init-manager] All export requests sent, starting poll loop");
|
|
67
67
|
startPollLoop();
|
|
68
68
|
}
|
|
69
69
|
catch (err) {
|
|
@@ -172,6 +172,7 @@ async function pollOnce() {
|
|
|
172
172
|
exportState.status = "synced";
|
|
173
173
|
writeInitState(state);
|
|
174
174
|
console.error(`[init-manager] ${exportName} synced successfully`);
|
|
175
|
+
break;
|
|
175
176
|
}
|
|
176
177
|
else if (status.status === "FAILED") {
|
|
177
178
|
exportState.status = "error";
|
|
@@ -183,6 +184,9 @@ async function pollOnce() {
|
|
|
183
184
|
catch (err) {
|
|
184
185
|
const errMsg = err instanceof Error ? err.message : String(err);
|
|
185
186
|
console.error(`[init-manager] Error polling ${exportName}:`, errMsg);
|
|
187
|
+
exportState.status = "error";
|
|
188
|
+
exportState.error = errMsg;
|
|
189
|
+
writeInitState(state);
|
|
186
190
|
}
|
|
187
191
|
}
|
|
188
192
|
if (isFullySynced(state)) {
|
package/package.json
CHANGED
|
@@ -1,8 +1,24 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "daeda-mcp",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.2",
|
|
4
4
|
"description": "MCP server for HubSpot CRM data sync",
|
|
5
5
|
"license": "MIT",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "git+https://github.com/Daeda-Technologies-Ltd/daeda-free-mcp.git"
|
|
9
|
+
},
|
|
10
|
+
"homepage": "https://github.com/Daeda-Technologies-Ltd/daeda-free-mcp#readme",
|
|
11
|
+
"bugs": {
|
|
12
|
+
"url": "https://github.com/Daeda-Technologies-Ltd/daeda-free-mcp/issues"
|
|
13
|
+
},
|
|
14
|
+
"keywords": [
|
|
15
|
+
"mcp",
|
|
16
|
+
"hubspot",
|
|
17
|
+
"crm",
|
|
18
|
+
"ai",
|
|
19
|
+
"claude",
|
|
20
|
+
"model-context-protocol"
|
|
21
|
+
],
|
|
6
22
|
"type": "module",
|
|
7
23
|
"main": "./dist/index.js",
|
|
8
24
|
"bin": {
|