daeda-mcp 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/sync/csv-loader.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { Worker } from "node:worker_threads";
|
|
1
2
|
import { clearTable, setMetadata, batchInsertContacts, batchInsertCompanies, batchInsertDeals, batchInsertAssociations, } from "../db/sqlite.js";
|
|
2
3
|
function findIdColumn(headers) {
|
|
3
4
|
const idCandidates = [
|
|
@@ -42,8 +43,7 @@ async function loadCsvWithWorker(filePath, tableName, mapRow, insertBatch, onPro
|
|
|
42
43
|
let idColumn = null;
|
|
43
44
|
let totalInserted = 0;
|
|
44
45
|
let pendingInserts = [];
|
|
45
|
-
worker.
|
|
46
|
-
const msg = event.data;
|
|
46
|
+
worker.on("message", async (msg) => {
|
|
47
47
|
try {
|
|
48
48
|
switch (msg.type) {
|
|
49
49
|
case "headers": {
|
|
@@ -95,11 +95,11 @@ async function loadCsvWithWorker(filePath, tableName, mapRow, insertBatch, onPro
|
|
|
95
95
|
worker.terminate();
|
|
96
96
|
reject(err);
|
|
97
97
|
}
|
|
98
|
-
};
|
|
99
|
-
worker.
|
|
98
|
+
});
|
|
99
|
+
worker.on("error", (err) => {
|
|
100
100
|
worker.terminate();
|
|
101
101
|
reject(new Error(`Worker error: ${err.message}`));
|
|
102
|
-
};
|
|
102
|
+
});
|
|
103
103
|
worker.postMessage({
|
|
104
104
|
type: "start",
|
|
105
105
|
filePath,
|
|
@@ -164,8 +164,7 @@ export async function loadAssociationsCsvFromFile(filePath, associationType, onP
|
|
|
164
164
|
let associatedIdColumn = null;
|
|
165
165
|
let totalInserted = 0;
|
|
166
166
|
let pendingInserts = [];
|
|
167
|
-
worker.
|
|
168
|
-
const msg = event.data;
|
|
167
|
+
worker.on("message", async (msg) => {
|
|
169
168
|
try {
|
|
170
169
|
switch (msg.type) {
|
|
171
170
|
case "headers": {
|
|
@@ -236,11 +235,11 @@ export async function loadAssociationsCsvFromFile(filePath, associationType, onP
|
|
|
236
235
|
worker.terminate();
|
|
237
236
|
reject(err);
|
|
238
237
|
}
|
|
239
|
-
};
|
|
240
|
-
worker.
|
|
238
|
+
});
|
|
239
|
+
worker.on("error", (err) => {
|
|
241
240
|
worker.terminate();
|
|
242
241
|
reject(new Error(`Worker error: ${err.message}`));
|
|
243
|
-
};
|
|
242
|
+
});
|
|
244
243
|
worker.postMessage({
|
|
245
244
|
type: "start",
|
|
246
245
|
filePath,
|
package/dist/sync/csv-worker.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { createReadStream } from "node:fs";
|
|
2
|
+
import { parentPort } from "node:worker_threads";
|
|
2
3
|
import { parse } from "csv-parse";
|
|
3
4
|
let cancelled = false;
|
|
4
|
-
|
|
5
|
-
const msg = event.data;
|
|
5
|
+
parentPort?.on("message", async (msg) => {
|
|
6
6
|
if (msg.type === "cancel") {
|
|
7
7
|
cancelled = true;
|
|
8
8
|
return;
|
|
@@ -11,7 +11,7 @@ self.onmessage = async (event) => {
|
|
|
11
11
|
cancelled = false;
|
|
12
12
|
await processFile(msg.filePath, msg.batchSize ?? 5000);
|
|
13
13
|
}
|
|
14
|
-
};
|
|
14
|
+
});
|
|
15
15
|
async function processFile(filePath, batchSize) {
|
|
16
16
|
try {
|
|
17
17
|
const parser = createReadStream(filePath).pipe(parse({
|
|
@@ -25,17 +25,17 @@ async function processFile(filePath, batchSize) {
|
|
|
25
25
|
let headersSent = false;
|
|
26
26
|
for await (const record of parser) {
|
|
27
27
|
if (cancelled) {
|
|
28
|
-
|
|
28
|
+
parentPort?.postMessage({ type: "done", totalCount: totalProcessed });
|
|
29
29
|
return;
|
|
30
30
|
}
|
|
31
31
|
if (!headersSent) {
|
|
32
|
-
|
|
32
|
+
parentPort?.postMessage({ type: "headers", headers: Object.keys(record) });
|
|
33
33
|
headersSent = true;
|
|
34
34
|
}
|
|
35
35
|
batch.push(record);
|
|
36
36
|
if (batch.length >= batchSize) {
|
|
37
37
|
totalProcessed += batch.length;
|
|
38
|
-
|
|
38
|
+
parentPort?.postMessage({
|
|
39
39
|
type: "batch",
|
|
40
40
|
rows: batch,
|
|
41
41
|
processedCount: totalProcessed,
|
|
@@ -45,16 +45,16 @@ async function processFile(filePath, batchSize) {
|
|
|
45
45
|
}
|
|
46
46
|
if (batch.length > 0) {
|
|
47
47
|
totalProcessed += batch.length;
|
|
48
|
-
|
|
48
|
+
parentPort?.postMessage({
|
|
49
49
|
type: "batch",
|
|
50
50
|
rows: batch,
|
|
51
51
|
processedCount: totalProcessed,
|
|
52
52
|
});
|
|
53
53
|
}
|
|
54
|
-
|
|
54
|
+
parentPort?.postMessage({ type: "done", totalCount: totalProcessed });
|
|
55
55
|
}
|
|
56
56
|
catch (err) {
|
|
57
57
|
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
58
|
-
|
|
58
|
+
parentPort?.postMessage({ type: "error", error: errorMessage });
|
|
59
59
|
}
|
|
60
60
|
}
|
package/dist/sync/export-api.js
CHANGED
|
@@ -226,15 +226,15 @@ export async function downloadExportCsvToFile(token, downloadUrl, exportName) {
|
|
|
226
226
|
}
|
|
227
227
|
const writeStream = createWriteStream(outputPath);
|
|
228
228
|
for (let i = 0; i < csvEntries.length; i++) {
|
|
229
|
-
const
|
|
229
|
+
const csvBuffer = csvEntries[i].getData();
|
|
230
230
|
if (i === 0) {
|
|
231
|
-
writeStream.write(
|
|
231
|
+
writeStream.write(csvBuffer);
|
|
232
232
|
}
|
|
233
233
|
else {
|
|
234
|
-
const firstNewline =
|
|
234
|
+
const firstNewline = csvBuffer.indexOf(0x0a); // 0x0a = '\n'
|
|
235
235
|
if (firstNewline !== -1) {
|
|
236
236
|
writeStream.write("\n");
|
|
237
|
-
writeStream.write(
|
|
237
|
+
writeStream.write(csvBuffer.subarray(firstNewline + 1));
|
|
238
238
|
}
|
|
239
239
|
}
|
|
240
240
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { readInitState, writeInitState, createInitialState, isFullySynced, getSyncedCount, ALL_EXPORTS, } from "./init-state.js";
|
|
1
|
+
import { readInitState, writeInitState, createInitialState, isFullySynced, getSyncedCount, readExportOverride, ALL_EXPORTS, } from "./init-state.js";
|
|
2
2
|
import { validateToken, startObjectExport, startAssociationExport, getExportStatus, downloadExportCsvToFile, fetchObjectProperties, findReusableExports, } from "./export-api.js";
|
|
3
3
|
import { loadContactsCsvFromFile, loadCompaniesCsvFromFile, loadDealsCsvFromFile, loadAssociationsCsvFromFile, } from "./csv-loader.js";
|
|
4
4
|
import { runSeeding } from "./seeder.js";
|
|
@@ -54,16 +54,16 @@ export async function startInitialization(force = false) {
|
|
|
54
54
|
newState.status = "sending_requests";
|
|
55
55
|
newState.startedAt = new Date().toISOString();
|
|
56
56
|
writeInitState(newState);
|
|
57
|
-
console.error("[init-manager] Starting export requests...");
|
|
57
|
+
console.error("[init-manager] Starting seeding first, then export requests...");
|
|
58
58
|
try {
|
|
59
|
+
runSeeding(token).catch((err) => {
|
|
60
|
+
console.error("[init-manager] Seeding failed (non-fatal):", err);
|
|
61
|
+
});
|
|
59
62
|
await fireAllExportRequests(token, newState);
|
|
60
63
|
newState.status = "polling_exports";
|
|
61
64
|
newState.seedingStatus = "pending";
|
|
62
65
|
writeInitState(newState);
|
|
63
|
-
console.error("[init-manager] All export requests sent, starting
|
|
64
|
-
runSeeding(token).catch((err) => {
|
|
65
|
-
console.error("[init-manager] Seeding failed (non-fatal):", err);
|
|
66
|
-
});
|
|
66
|
+
console.error("[init-manager] All export requests sent, starting poll loop");
|
|
67
67
|
startPollLoop();
|
|
68
68
|
}
|
|
69
69
|
catch (err) {
|
|
@@ -74,6 +74,21 @@ export async function startInitialization(force = false) {
|
|
|
74
74
|
}
|
|
75
75
|
}
|
|
76
76
|
async function fireAllExportRequests(token, state) {
|
|
77
|
+
const override = readExportOverride();
|
|
78
|
+
if (override) {
|
|
79
|
+
console.error("[init-manager] Using export override file");
|
|
80
|
+
for (const exportName of ALL_EXPORTS) {
|
|
81
|
+
const overrideExport = override[exportName];
|
|
82
|
+
if (overrideExport?.exportId) {
|
|
83
|
+
state.exports[exportName].exportId = overrideExport.exportId;
|
|
84
|
+
state.exports[exportName].status = overrideExport.status;
|
|
85
|
+
state.exports[exportName].error = overrideExport.error;
|
|
86
|
+
console.error(`[init-manager] Override ${exportName}: ${overrideExport.exportId}`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
writeInitState(state);
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
77
92
|
console.error("[init-manager] Checking for reusable exports from the past week...");
|
|
78
93
|
let reusableExports;
|
|
79
94
|
try {
|
|
@@ -29,3 +29,4 @@ export declare function resetInitState(): void;
|
|
|
29
29
|
export declare function getSyncedCount(state: InitState): number;
|
|
30
30
|
export declare function isFullySynced(state: InitState): boolean;
|
|
31
31
|
export declare function getStateFilePath(): string;
|
|
32
|
+
export declare function readExportOverride(): Record<ExportName, ExportState> | null;
|
package/dist/sync/init-state.js
CHANGED
|
@@ -4,6 +4,7 @@ import { fileURLToPath } from "node:url";
|
|
|
4
4
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
5
5
|
const DATA_DIR = join(__dirname, "..", "..", "data");
|
|
6
6
|
const STATE_FILE = join(DATA_DIR, "init_state.json");
|
|
7
|
+
const OVERRIDE_FILE = join(DATA_DIR, "export_override.json");
|
|
7
8
|
export const ALL_EXPORTS = [
|
|
8
9
|
"contacts",
|
|
9
10
|
"companies",
|
|
@@ -62,3 +63,15 @@ export function isFullySynced(state) {
|
|
|
62
63
|
export function getStateFilePath() {
|
|
63
64
|
return STATE_FILE;
|
|
64
65
|
}
|
|
66
|
+
export function readExportOverride() {
|
|
67
|
+
try {
|
|
68
|
+
if (!existsSync(OVERRIDE_FILE)) {
|
|
69
|
+
return null;
|
|
70
|
+
}
|
|
71
|
+
const content = readFileSync(OVERRIDE_FILE, "utf-8");
|
|
72
|
+
return JSON.parse(content);
|
|
73
|
+
}
|
|
74
|
+
catch {
|
|
75
|
+
return null;
|
|
76
|
+
}
|
|
77
|
+
}
|