@daeda/mcp-pro 0.1.24 → 0.1.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +297 -103
  2. package/package.json +3 -3
package/dist/index.js CHANGED
@@ -1293,7 +1293,7 @@ import { z as z12 } from "zod";
1293
1293
  var ClientStateSchema = z12.object({
1294
1294
  selectedPortalId: z12.number().nullable()
1295
1295
  });
1296
- var SyncStatusSchema = z12.enum(["NOT_STARTED", "DOWNLOADING", "PROCESSING", "SYNCED", "FAILED"]);
1296
+ var SyncStatusSchema = z12.enum(["NOT_STARTED", "DOWNLOADING", "PROCESSING", "PARTIAL", "SYNCED", "FAILED"]);
1297
1297
  var PluginSyncStatusSchema = z12.enum(["NOT_STARTED", "SYNCING", "SYNCED", "FAILED"]);
1298
1298
  var SyncPluginItemSchema = z12.object({
1299
1299
  name: z12.string(),
@@ -1467,9 +1467,11 @@ var insertAssociationRowSql = (tableName) => `
1467
1467
  label
1468
1468
  ) VALUES ($1, $2, $3, $4, $5, $6)
1469
1469
  `;
1470
- var insertCsvIntoTableSql = (tableName, escapedPath) => `
1470
+ var HUBSPOT_GZIP_CSV_READ_OPTIONS = `all_varchar=true, header=true, compression='gzip', delim=',', quote='"', escape='"', max_line_size=8388608`;
1471
+ var readHubSpotGzipCsvSql = (escapedPath, rejectTables) => `read_csv('${escapedPath}', ${HUBSPOT_GZIP_CSV_READ_OPTIONS}${rejectTables ? `, ignore_errors=true, store_rejects=true, rejects_table='${rejectTables.rejectsTable}', rejects_scan='${rejectTables.rejectsScan}'` : ""})`;
1472
+ var insertCsvIntoTableSql = (tableName, escapedPath, rejectTables) => `
1471
1473
  INSERT INTO "${tableName}"
1472
- SELECT * FROM read_csv('${escapedPath}', all_varchar=true, header=true, compression='gzip')
1474
+ SELECT * FROM ${readHubSpotGzipCsvSql(escapedPath, rejectTables)}
1473
1475
  `;
1474
1476
  var mergeAssociationStagingSql = (stagingTable, fromExpression, toExpression) => `
1475
1477
  INSERT OR IGNORE INTO associations
@@ -1484,9 +1486,9 @@ var mergeAssociationStagingSql = (stagingTable, fromExpression, toExpression) =>
1484
1486
  `;
1485
1487
  var countAssociationsForPairSql = () => `SELECT COUNT(*) as count FROM associations WHERE from_object_type = $1 AND to_object_type = $2`;
1486
1488
  var deleteAllRowsSql = (tableName) => `DELETE FROM "${tableName}"`;
1487
- var insertOrReplaceObjectsFromCsvSql = (tableName, escapedPath) => `
1489
+ var insertOrReplaceObjectsFromCsvSql = (tableName, escapedPath, rejectTables) => `
1488
1490
  INSERT OR REPLACE INTO "${tableName}"
1489
- SELECT * FROM read_csv('${escapedPath}', all_varchar=true, header=true, compression='gzip')
1491
+ SELECT * FROM ${readHubSpotGzipCsvSql(escapedPath, rejectTables)}
1490
1492
  `;
1491
1493
  var countRowsSql = (tableName) => `SELECT COUNT(*) as count FROM "${tableName}"`;
1492
1494
 
@@ -1524,6 +1526,7 @@ var SupabaseAssociationCsvSchema = z13.object({
1524
1526
 
1525
1527
  // src/effects/duck-db.ts
1526
1528
  var CHUNK_SIZE = 2048;
1529
+ var CSV_REJECT_SAMPLE_LIMIT = 5;
1527
1530
  var PLUGIN_ERROR_KEY = (name) => `plugin_error:${name}`;
1528
1531
  var SCHEMA_VERSION_KEY = (name) => `schema_version:plugin:${name}`;
1529
1532
  var databaseConnectionCache = /* @__PURE__ */ new Map();
@@ -1694,23 +1697,86 @@ async function safeRollback(connection) {
1694
1697
  } catch {
1695
1698
  }
1696
1699
  }
1697
- async function readCsvColumns(connection, csvPath) {
1700
+ function makeCsvRejectTableNames(prefix) {
1701
+ const suffix = `${Date.now()}_${Math.random().toString(36).slice(2, 10)}`;
1702
+ const sanitizedPrefix = prefix.replace(/[^a-zA-Z0-9_]/g, "_");
1703
+ return {
1704
+ rejectsTable: `${sanitizedPrefix}_reject_errors_${suffix}`,
1705
+ rejectsScan: `${sanitizedPrefix}_reject_scans_${suffix}`
1706
+ };
1707
+ }
1708
+ async function cleanupCsvRejectTables(connection, rejectTables) {
1709
+ try {
1710
+ await connection.run(dropTableIfExistsSql(rejectTables.rejectsTable));
1711
+ } catch {
1712
+ }
1713
+ try {
1714
+ await connection.run(dropTableIfExistsSql(rejectTables.rejectsScan));
1715
+ } catch {
1716
+ }
1717
+ }
1718
+ function formatCsvRejectSummary(rejectCount, sampleRows) {
1719
+ const samples = sampleRows.map((row) => {
1720
+ const lineValue = typeof row.line === "number" ? row.line : typeof row.line === "bigint" ? Number(row.line) : typeof row.line_number === "number" ? row.line_number : typeof row.line_number === "bigint" ? Number(row.line_number) : null;
1721
+ const errorType = typeof row.error_type === "string" ? row.error_type : typeof row.type === "string" ? row.type : null;
1722
+ const errorMessage = typeof row.error_message === "string" ? row.error_message : typeof row.message === "string" ? row.message : "CSV row rejected by DuckDB";
1723
+ return `${lineValue !== null ? `line ${lineValue}` : "unknown line"}${errorType ? ` (${errorType})` : ""}: ${errorMessage}`;
1724
+ }).slice(0, CSV_REJECT_SAMPLE_LIMIT);
1725
+ const message = samples.length > 0 ? `Skipped ${rejectCount} malformed CSV row${rejectCount === 1 ? "" : "s"} during import. Examples: ${samples.join(" | ")}` : `Skipped ${rejectCount} malformed CSV row${rejectCount === 1 ? "" : "s"} during import.`;
1726
+ return { rejectCount, message };
1727
+ }
1728
+ async function readCsvRejectSummary(connection, rejectTables) {
1729
+ try {
1730
+ const countResult = await connection.runAndReadAll(
1731
+ `SELECT COUNT(*) AS count FROM "${rejectTables.rejectsTable}"`
1732
+ );
1733
+ const countRows = countResult.getRowObjects();
1734
+ const rejectCount = Number(countRows[0]?.count ?? 0);
1735
+ if (rejectCount === 0) return null;
1736
+ const sampleResult = await connection.runAndReadAll(
1737
+ `SELECT * FROM "${rejectTables.rejectsTable}" LIMIT ${CSV_REJECT_SAMPLE_LIMIT}`
1738
+ );
1739
+ const sampleRows = sampleResult.getRowObjects();
1740
+ return formatCsvRejectSummary(rejectCount, sampleRows);
1741
+ } catch {
1742
+ return null;
1743
+ }
1744
+ }
1745
+ async function withCsvRejectTracking(connection, prefix, work) {
1746
+ const rejectTables = makeCsvRejectTableNames(prefix);
1747
+ try {
1748
+ const result = await work(rejectTables);
1749
+ const rejects = await readCsvRejectSummary(connection, rejectTables);
1750
+ return { result, rejects };
1751
+ } finally {
1752
+ await cleanupCsvRejectTables(connection, rejectTables);
1753
+ }
1754
+ }
1755
+ async function readCsvColumns(connection, csvPath, rejectTables) {
1698
1756
  const result = await connection.runAndReadAll(
1699
- `DESCRIBE SELECT * FROM read_csv('${escapeSqlString(csvPath)}', all_varchar=true, header=true, compression='gzip')`
1757
+ `DESCRIBE SELECT * FROM ${readHubSpotGzipCsvSql(escapeSqlString(csvPath), rejectTables)}`
1700
1758
  );
1701
1759
  const rows = result.getRowObjects();
1702
1760
  return rows.map((row) => typeof row.column_name === "string" ? row.column_name : null).filter((value) => value !== null);
1703
1761
  }
1704
1762
  async function validateSupabaseCsvShape(connection, csvPath, kind) {
1705
1763
  const expectedColumns = kind === "object" ? OBJECT_CSV_COLUMNS : ASSOCIATION_CSV_COLUMNS;
1706
- const columns = await readCsvColumns(connection, csvPath);
1764
+ const { result: columns } = await withCsvRejectTracking(
1765
+ connection,
1766
+ `validate_${kind}`,
1767
+ (rejectTables) => readCsvColumns(connection, csvPath, rejectTables)
1768
+ );
1707
1769
  if (columns.length !== expectedColumns.length || columns.some((column, index) => column !== expectedColumns[index])) {
1708
1770
  throw new Error(
1709
1771
  `Invalid ${kind} CSV columns for Supabase sync. Expected ${expectedColumns.join(", ")} but received ${columns.join(", ") || "(none)"}`
1710
1772
  );
1711
1773
  }
1712
- const sampleResult = await connection.runAndReadAll(
1713
- `SELECT * FROM read_csv('${escapeSqlString(csvPath)}', all_varchar=true, header=true, compression='gzip') LIMIT 1`
1774
+ const { result: sampleResult } = await withCsvRejectTracking(
1775
+ connection,
1776
+ `validate_${kind}_sample`,
1777
+ (rejectTables) => connection.runAndReadAll(
1778
+ `SELECT * FROM ${readHubSpotGzipCsvSql(escapeSqlString(csvPath), rejectTables)} LIMIT 1`
1779
+ )
1714
1780
  );
1715
1781
  const sampleRows = sampleResult.getRowObjects();
1716
1782
  if (sampleRows.length === 0) return;
@@ -1743,13 +1809,23 @@ async function withTransaction(connection, work) {
1743
1809
  async function atomicReplaceObjectsFromCsv(connection, tableName, csvPath) {
1744
1810
  const sanitizedTableName = sanitizeTableName(tableName);
1745
1811
  const escapedPath = escapeSqlString(csvPath);
1746
- return withTransaction(connection, async () => {
1747
- await connection.run(deleteAllRowsSql(sanitizedTableName));
1748
- await connection.run(insertOrReplaceObjectsFromCsvSql(sanitizedTableName, escapedPath));
1749
- const countResult = await connection.runAndReadAll(countRowsSql(sanitizedTableName));
1750
- const rows = countResult.getRowObjects();
1751
- return Number(rows[0]?.count ?? 0);
1752
- });
1812
+ const { result, rejects } = await withCsvRejectTracking(
1813
+ connection,
1814
+ `objects_${sanitizedTableName}`,
1815
+ (rejectTables) => withTransaction(connection, async () => {
1816
+ await connection.run(deleteAllRowsSql(sanitizedTableName));
1817
+ await connection.run(
1818
+ insertOrReplaceObjectsFromCsvSql(sanitizedTableName, escapedPath, rejectTables)
1819
+ );
1820
+ const countResult = await connection.runAndReadAll(countRowsSql(sanitizedTableName));
1821
+ const rows = countResult.getRowObjects();
1822
+ return { rowCount: Number(rows[0]?.count ?? 0) };
1823
+ })
1824
+ );
1825
+ return {
1826
+ rowCount: result.rowCount,
1827
+ rejects
1828
+ };
1753
1829
  }
1754
1830
  async function deleteAssociationsForPair(connection, fromObjectType, toObjectType) {
1755
1831
  await connection.run(deleteAssociationsForPairSql(), [fromObjectType, toObjectType]);
@@ -1792,7 +1868,12 @@ async function loadAssociationRowsIntoStaging(connection, stagingTable, rows) {
1792
1868
  return insertAssociationsIntoTable(connection, rows, stagingTable);
1793
1869
  }
1794
1870
  async function loadAssociationCsvIntoStaging(connection, stagingTable, csvPath) {
1795
- await connection.run(insertCsvIntoTableSql(stagingTable, escapeSqlString(csvPath)));
1871
+ const { rejects } = await withCsvRejectTracking(
1872
+ connection,
1873
+ `assoc_${stagingTable}`,
1874
+ (rejectTables) => connection.run(insertCsvIntoTableSql(stagingTable, escapeSqlString(csvPath), rejectTables))
1875
+ );
1876
+ return rejects;
1796
1877
  }
1797
1878
  async function mergeAssociationStaging(connection, stagingTable, scope) {
1798
1879
  const fromExpression = scope._tag === "pair" ? `'${escapeSqlString(scope.fromObjectType)}'` : "from_object_type";
@@ -1816,15 +1897,17 @@ async function syncAssociations({
1816
1897
  const handle = await getDatabaseConnection(portalId, encryptionKey);
1817
1898
  const stagingTable = makeAssociationStagingTableName();
1818
1899
  try {
1900
+ let rejects = null;
1819
1901
  await withTransaction(handle.connection, async () => {
1820
1902
  await ensureAssociationsTable(handle.connection);
1821
1903
  await deleteAssociationsForScope(handle.connection, scope);
1822
1904
  await createAssociationStagingTable(handle.connection, stagingTable);
1823
- await loadAssociationCsvIntoStaging(handle.connection, stagingTable, source.csvPath);
1905
+ rejects = await loadAssociationCsvIntoStaging(handle.connection, stagingTable, source.csvPath);
1824
1906
  await mergeAssociationStaging(handle.connection, stagingTable, scope);
1825
1907
  await handle.connection.run(dropTableIfExistsSql(stagingTable));
1826
1908
  });
1827
- return scope._tag === "pair" ? countAssociationsForPair(handle.connection, scope.fromObjectType, scope.toObjectType) : 0;
1909
+ const rowCount = scope._tag === "pair" ? await countAssociationsForPair(handle.connection, scope.fromObjectType, scope.toObjectType) : 0;
1910
+ return { rowCount, rejects };
1828
1911
  } catch (error) {
1829
1912
  try {
1830
1913
  await handle.connection.run(dropTableIfExistsSql(stagingTable));
@@ -1845,8 +1928,13 @@ async function syncObjectArtifact({
1845
1928
  );
1846
1929
  await validateSupabaseCsvShape(handle.connection, csvPath, "object");
1847
1930
  await ensureObjectTable(handle.connection, objectType);
1848
- const rowCount = await atomicReplaceObjectsFromCsv(handle.connection, objectType, csvPath);
1849
- return { portalId, rowCount };
1931
+ const { rowCount, rejects } = await atomicReplaceObjectsFromCsv(handle.connection, objectType, csvPath);
1932
+ if (rejects) {
1933
+ console.error(
1934
+ `[duck-db] Object artifact import was partial for portal ${portalId}: ${objectType}. ${rejects.message}`
1935
+ );
1936
+ }
1937
+ return { portalId, rowCount, rejects };
1850
1938
  }
1851
1939
  async function initializePortalSchema({
1852
1940
  masterLock,
@@ -1919,7 +2007,7 @@ async function syncAssociationArtifact({
1919
2007
  `[duck-db] Syncing association artifact for portal ${portalId}: ${fromObjectType}->${toObjectType} from ${csvPath}`
1920
2008
  );
1921
2009
  await validateSupabaseCsvShape(handle.connection, csvPath, "association");
1922
- const rowCount = await syncAssociations({
2010
+ const { rowCount, rejects } = await syncAssociations({
1923
2011
  portalId,
1924
2012
  encryptionKey: encryptionKey ?? null,
1925
2013
  scope: {
@@ -1929,7 +2017,12 @@ async function syncAssociationArtifact({
1929
2017
  },
1930
2018
  source: { kind: "csv", csvPath }
1931
2019
  });
1932
- return { portalId, rowCount };
2020
+ if (rejects) {
2021
+ console.error(
2022
+ `[duck-db] Association artifact import was partial for portal ${portalId}: ${fromObjectType}->${toObjectType}. ${rejects.message}`
2023
+ );
2024
+ }
2025
+ return { portalId, rowCount, rejects };
1933
2026
  }
1934
2027
  async function syncPlugin({
1935
2028
  masterLock,
@@ -2236,60 +2329,105 @@ var buildCappedSelectSql = (sql, maxRows) => {
2236
2329
  };
2237
2330
 
2238
2331
  // src/layers/MasterLockLive.ts
2239
- import { createRequire } from "module";
2240
2332
  import path2 from "path";
2241
- import { mkdir, open } from "fs/promises";
2242
2333
  import { Effect as Effect24, Layer as Layer2, Ref, pipe as pipe13 } from "effect";
2243
- var require2 = createRequire(import.meta.url);
2244
- var { lock, unlock } = require2("os-lock");
2334
+
2335
+ // src/layers/MasterLockDependencies.ts
2336
+ import { createRequire } from "module";
2337
+ import { mkdir } from "fs/promises";
2338
+ var loadProperLockfileModule = () => {
2339
+ const requireFromHere = createRequire(import.meta.url);
2340
+ return requireFromHere("proper-lockfile");
2341
+ };
2342
+ var resolveMasterLockDependencies = (config, defaults) => ({
2343
+ ensurePortalsDir: config.ensurePortalsDir ?? (async (dirPath) => {
2344
+ await mkdir(dirPath, { recursive: true });
2345
+ }),
2346
+ acquireLock: config.acquireLock ?? (async (dirPath, options2) => {
2347
+ const release = await loadProperLockfileModule().lock(dirPath, options2);
2348
+ return { release };
2349
+ }),
2350
+ setInterval: config.setInterval ?? ((callback, ms) => setInterval(callback, ms)),
2351
+ clearInterval: config.clearInterval ?? ((timer) => clearInterval(timer)),
2352
+ log: config.log ?? ((message) => console.error(message)),
2353
+ promotionPollMs: config.promotionPollMs ?? defaults.promotionPollMs
2354
+ });
2355
+
2356
+ // src/layers/MasterLockLive.ts
2245
2357
  var PROMOTION_POLL_MS = 3e3;
2358
+ var LOCK_STALE_MS = 5e3;
2359
+ var LOCK_UPDATE_MS = 2500;
2246
2360
  var lockFilePath = () => path2.join(PORTALS_DIR(), "master.lock");
2247
- var logStderr3 = (message) => Effect24.sync(() => console.error(message));
2361
+ var logMessage = (runtime, message) => Effect24.sync(() => runtime.deps.log(message));
2248
2362
  var isLockConflictError = (error) => {
2249
2363
  const code = error?.code;
2250
- return code === "EACCES" || code === "EAGAIN" || code === "EBUSY";
2364
+ return code === "ELOCKED";
2251
2365
  };
2252
- var ensurePortalsDir = () => Effect24.tryPromise({
2253
- try: () => mkdir(PORTALS_DIR(), { recursive: true }),
2366
+ var ensurePortalsDir = (runtime) => Effect24.tryPromise({
2367
+ try: () => runtime.deps.ensurePortalsDir(PORTALS_DIR()),
2254
2368
  catch: (cause) => new Error(`Failed to create portals directory: ${String(cause)}`)
2255
2369
  });
2256
- var closeHandle = (handle) => Effect24.tryPromise({
2370
+ var releaseLease = (runtime, lease) => Effect24.tryPromise({
2257
2371
  try: async () => {
2258
- try {
2259
- await unlock(handle.fd);
2260
- } catch {
2261
- }
2262
- try {
2263
- await handle.close();
2264
- } catch {
2265
- }
2372
+ await lease.release();
2266
2373
  },
2267
- catch: (cause) => new Error(`Failed to close master lock handle: ${String(cause)}`)
2374
+ catch: (cause) => new Error(`Failed to release master lock lease: ${String(cause)}`)
2268
2375
  });
2376
+ var acquireLockOptions = (runtime) => ({
2377
+ lockfilePath: lockFilePath(),
2378
+ retries: 0,
2379
+ stale: LOCK_STALE_MS,
2380
+ update: LOCK_UPDATE_MS,
2381
+ realpath: true,
2382
+ onCompromised: (error) => {
2383
+ Effect24.runFork(
2384
+ pipe13(
2385
+ handleCompromisedLease(runtime, error),
2386
+ Effect24.catchAll(() => Effect24.void)
2387
+ )
2388
+ );
2389
+ }
2390
+ });
2391
+ var handleCompromisedLease = (runtime, error) => pipe13(
2392
+ Ref.get(runtime.releasedRef),
2393
+ Effect24.flatMap((released) => {
2394
+ if (released) return Effect24.void;
2395
+ return pipe13(
2396
+ Ref.getAndSet(runtime.lockLeaseRef, null),
2397
+ Effect24.flatMap((lease) => {
2398
+ if (lease === null) return Effect24.void;
2399
+ return pipe13(
2400
+ logMessage(
2401
+ runtime,
2402
+ `[master-lock] master lease compromised: ${error.message}`
2403
+ ),
2404
+ Effect24.flatMap(() => Ref.set(runtime.connectionTypeRef, "READ_ONLY")),
2405
+ Effect24.flatMap(() => startPromotionPolling(runtime))
2406
+ );
2407
+ })
2408
+ );
2409
+ })
2410
+ );
2269
2411
  var tryAcquireMasterLock = (runtime) => pipe13(
2270
- Ref.get(runtime.lockHandleRef),
2271
- Effect24.flatMap((existingHandle) => {
2272
- if (existingHandle !== null) {
2412
+ Ref.get(runtime.lockLeaseRef),
2413
+ Effect24.flatMap((existingLease) => {
2414
+ if (existingLease !== null) {
2273
2415
  return Ref.get(runtime.connectionTypeRef);
2274
2416
  }
2275
2417
  return pipe13(
2276
- ensurePortalsDir(),
2418
+ ensurePortalsDir(runtime),
2277
2419
  Effect24.flatMap(
2278
2420
  () => Effect24.tryPromise({
2279
- try: () => open(lockFilePath(), "a+"),
2280
- catch: (cause) => new Error(`Failed to open master lock file: ${String(cause)}`)
2281
- })
2282
- ),
2283
- Effect24.flatMap(
2284
- (handle) => Effect24.tryPromise({
2285
2421
  try: async () => {
2286
2422
  try {
2287
- await lock(handle.fd, { exclusive: true, immediate: true });
2288
- return { connectionType: "MASTER", handle };
2423
+ const lease = await runtime.deps.acquireLock(
2424
+ PORTALS_DIR(),
2425
+ acquireLockOptions(runtime)
2426
+ );
2427
+ return { connectionType: "MASTER", lease };
2289
2428
  } catch (error) {
2290
- await handle.close();
2291
2429
  if (isLockConflictError(error)) {
2292
- return { connectionType: "READ_ONLY", handle: null };
2430
+ return { connectionType: "READ_ONLY", lease: null };
2293
2431
  }
2294
2432
  throw error;
2295
2433
  }
@@ -2298,10 +2436,21 @@ var tryAcquireMasterLock = (runtime) => pipe13(
2298
2436
  })
2299
2437
  ),
2300
2438
  Effect24.flatMap(
2301
- ({ connectionType, handle }) => pipe13(
2302
- Ref.set(runtime.connectionTypeRef, connectionType),
2303
- Effect24.flatMap(() => Ref.set(runtime.lockHandleRef, handle)),
2304
- Effect24.as(connectionType)
2439
+ ({ connectionType, lease }) => pipe13(
2440
+ Ref.get(runtime.releasedRef),
2441
+ Effect24.flatMap((released) => {
2442
+ if (!released || lease === null) {
2443
+ return pipe13(
2444
+ Ref.set(runtime.connectionTypeRef, connectionType),
2445
+ Effect24.flatMap(() => Ref.set(runtime.lockLeaseRef, lease)),
2446
+ Effect24.as(connectionType)
2447
+ );
2448
+ }
2449
+ return pipe13(
2450
+ releaseLease(runtime, lease),
2451
+ Effect24.flatMap(() => Effect24.succeed("READ_ONLY"))
2452
+ );
2453
+ })
2305
2454
  )
2306
2455
  )
2307
2456
  );
@@ -2311,14 +2460,17 @@ var stopPromotionPolling = (runtime) => pipe13(
2311
2460
  Ref.getAndSet(runtime.promotionTimerRef, null),
2312
2461
  Effect24.flatMap(
2313
2462
  (timer) => timer === null ? Effect24.void : Effect24.sync(() => {
2314
- clearInterval(timer);
2463
+ runtime.deps.clearInterval(timer);
2315
2464
  })
2316
2465
  )
2317
2466
  );
2318
2467
  var tryPromoteToMaster = (runtime) => pipe13(
2319
- Ref.get(runtime.promotingRef),
2320
- Effect24.flatMap((promoting) => {
2321
- if (promoting) return Effect24.void;
2468
+ Effect24.all({
2469
+ promoting: Ref.get(runtime.promotingRef),
2470
+ released: Ref.get(runtime.releasedRef)
2471
+ }),
2472
+ Effect24.flatMap(({ promoting, released }) => {
2473
+ if (promoting || released) return Effect24.void;
2322
2474
  return pipe13(
2323
2475
  Ref.set(runtime.promotingRef, true),
2324
2476
  Effect24.flatMap(
@@ -2327,11 +2479,19 @@ var tryPromoteToMaster = (runtime) => pipe13(
2327
2479
  Effect24.flatMap(
2328
2480
  (connectionType) => connectionType === "MASTER" ? pipe13(
2329
2481
  stopPromotionPolling(runtime),
2330
- Effect24.flatMap(() => logStderr3("[master-lock] promoted read-only client to master"))
2482
+ Effect24.flatMap(
2483
+ () => logMessage(
2484
+ runtime,
2485
+ "[master-lock] promoted read-only client to master"
2486
+ )
2487
+ )
2331
2488
  ) : Effect24.void
2332
2489
  ),
2333
2490
  Effect24.catchAll(
2334
- (error) => logStderr3(`[master-lock] promotion attempt failed: ${error.message}`)
2491
+ (error) => logMessage(
2492
+ runtime,
2493
+ `[master-lock] promotion attempt failed: ${error.message}`
2494
+ )
2335
2495
  ),
2336
2496
  Effect24.ensuring(Ref.set(runtime.promotingRef, false))
2337
2497
  )
@@ -2344,47 +2504,58 @@ var startPromotionPolling = (runtime) => pipe13(
2344
2504
  Effect24.flatMap((existingTimer) => {
2345
2505
  if (existingTimer !== null) return Effect24.void;
2346
2506
  return pipe13(
2347
- Effect24.sync(() => setInterval(() => {
2348
- Effect24.runFork(tryPromoteToMaster(runtime));
2349
- }, PROMOTION_POLL_MS)),
2507
+ Effect24.sync(
2508
+ () => runtime.deps.setInterval(() => {
2509
+ Effect24.runFork(tryPromoteToMaster(runtime));
2510
+ }, runtime.deps.promotionPollMs)
2511
+ ),
2350
2512
  Effect24.flatMap((timer) => Ref.set(runtime.promotionTimerRef, timer)),
2351
- Effect24.flatMap(() => logStderr3(`[master-lock] another process is master; retrying promotion every ${PROMOTION_POLL_MS}ms`))
2513
+ Effect24.flatMap(
2514
+ () => logMessage(
2515
+ runtime,
2516
+ `[master-lock] another process is master; retrying promotion every ${runtime.deps.promotionPollMs}ms`
2517
+ )
2518
+ )
2352
2519
  );
2353
2520
  })
2354
2521
  );
2355
2522
  var releaseRuntime = (runtime) => pipe13(
2356
- stopPromotionPolling(runtime),
2357
- Effect24.flatMap(() => Ref.getAndSet(runtime.lockHandleRef, null)),
2523
+ Ref.set(runtime.releasedRef, true),
2524
+ Effect24.flatMap(() => stopPromotionPolling(runtime)),
2525
+ Effect24.flatMap(() => Ref.getAndSet(runtime.lockLeaseRef, null)),
2358
2526
  Effect24.flatMap(
2359
- (handle) => handle === null ? Effect24.void : closeHandle(handle)
2527
+ (lease) => lease === null ? Effect24.void : releaseLease(runtime, lease)
2360
2528
  ),
2361
2529
  Effect24.flatMap(() => Ref.set(runtime.connectionTypeRef, "READ_ONLY"))
2362
2530
  );
2363
- var makeRuntime = pipe13(
2531
+ var makeRuntime = (config = {}) => pipe13(
2364
2532
  Effect24.all({
2533
+ deps: Effect24.sync(
2534
+ () => resolveMasterLockDependencies(config, {
2535
+ promotionPollMs: PROMOTION_POLL_MS
2536
+ })
2537
+ ),
2365
2538
  connectionTypeRef: Ref.make("READ_ONLY"),
2366
- lockHandleRef: Ref.make(null),
2539
+ lockLeaseRef: Ref.make(null),
2367
2540
  promotionTimerRef: Ref.make(null),
2368
- promotingRef: Ref.make(false)
2541
+ promotingRef: Ref.make(false),
2542
+ releasedRef: Ref.make(false)
2369
2543
  }),
2370
2544
  Effect24.tap(
2371
2545
  (runtime) => pipe13(
2372
2546
  tryAcquireMasterLock(runtime),
2373
2547
  Effect24.flatMap(
2374
- (connectionType) => connectionType === "MASTER" ? logStderr3("[master-lock] acquired master lock") : startPromotionPolling(runtime)
2548
+ (connectionType) => connectionType === "MASTER" ? logMessage(runtime, "[master-lock] acquired master lock") : startPromotionPolling(runtime)
2375
2549
  )
2376
2550
  )
2377
2551
  )
2378
2552
  );
2379
- var MasterLockLive = Layer2.scoped(
2553
+ var makeMasterLockLive = (config = {}) => Layer2.scoped(
2380
2554
  MasterLockService,
2381
2555
  pipe13(
2382
2556
  Effect24.acquireRelease(
2383
- makeRuntime,
2384
- (runtime) => pipe13(
2385
- releaseRuntime(runtime),
2386
- Effect24.catchAll(() => Effect24.void)
2387
- )
2557
+ makeRuntime(config),
2558
+ (runtime) => pipe13(releaseRuntime(runtime), Effect24.catchAll(() => Effect24.void))
2388
2559
  ),
2389
2560
  Effect24.map((runtime) => ({
2390
2561
  getConnectionType: Ref.get(runtime.connectionTypeRef),
@@ -2395,11 +2566,12 @@ var MasterLockLive = Layer2.scoped(
2395
2566
  }))
2396
2567
  )
2397
2568
  );
2569
+ var MasterLockLive = makeMasterLockLive();
2398
2570
 
2399
2571
  // src/layers/ReplicaLive.ts
2400
2572
  import { Effect as Effect25, Layer as Layer3, Ref as Ref2, pipe as pipe14 } from "effect";
2401
2573
  var DEFAULT_REPLICA_DEBOUNCE_MS = 6e4;
2402
- var logStderr4 = (message) => Effect25.sync(() => console.error(message));
2574
+ var logStderr3 = (message) => Effect25.sync(() => console.error(message));
2403
2575
  var normalizeDebounceMs = (debounceMs) => debounceMs === void 0 ? DEFAULT_REPLICA_DEBOUNCE_MS : Math.max(0, debounceMs);
2404
2576
  var initialPortalReplicaState = () => ({
2405
2577
  dirty: false,
@@ -2443,7 +2615,7 @@ var makeReplicaLive = (config = {}) => {
2443
2615
  return Effect25.tryPromise({
2444
2616
  try: async () => {
2445
2617
  await Effect25.runPromise(
2446
- logStderr4(
2618
+ logStderr3(
2447
2619
  `[replica] Publishing replica for portal ${portalId} (reason: ${reason})`
2448
2620
  )
2449
2621
  );
@@ -2504,7 +2676,7 @@ var makeReplicaLive = (config = {}) => {
2504
2676
  dirty: true
2505
2677
  })),
2506
2678
  Effect25.flatMap(
2507
- () => logStderr4(
2679
+ () => logStderr3(
2508
2680
  `[replica] Failed to publish replica for portal ${portalId}: ${error.message}`
2509
2681
  )
2510
2682
  ),
@@ -3708,7 +3880,7 @@ var resetStuckPlugins = (plugins, updatedAt) => {
3708
3880
  return changed ? nextPlugins : plugins;
3709
3881
  };
3710
3882
  var upsertArtifact = (artifacts, input, updatedAt) => {
3711
- const errorValue = input.status === "FAILED" ? input.error ?? null : null;
3883
+ const errorValue = input.status === "FAILED" || input.status === "PARTIAL" ? input.error ?? null : null;
3712
3884
  let didUpdate = false;
3713
3885
  const nextArtifacts = artifacts.map((artifact) => {
3714
3886
  if (artifact.object_type !== input.objectType) {
@@ -3938,6 +4110,15 @@ var toSyncFailure = (message, cause) => Effect33.fail(
3938
4110
  })
3939
4111
  );
3940
4112
  var swallowPortalFileStateError = (effect) => effect.pipe(Effect33.catchAll(() => Effect33.void));
4113
+ var summarizePartialImport = (results) => {
4114
+ const rejectSummaries = results.map((result) => result.rejects).filter((rejects) => rejects !== null);
4115
+ if (rejectSummaries.length === 0) return null;
4116
+ const totalRejectedRows = rejectSummaries.reduce(
4117
+ (sum, rejects) => sum + rejects.rejectCount,
4118
+ 0
4119
+ );
4120
+ return `Imported with partial data. Skipped ${totalRejectedRows} malformed CSV row${totalRejectedRows === 1 ? "" : "s"} across ${rejectSummaries.length} file${rejectSummaries.length === 1 ? "" : "s"}. ${rejectSummaries.map((rejects) => rejects.message).join(" ")}`;
4121
+ };
3941
4122
  var wrapSyncFailure = (message, effect) => effect.pipe(
3942
4123
  Effect33.catchAllCause((cause) => toSyncFailure(message, cause))
3943
4124
  );
@@ -4043,7 +4224,9 @@ var PortalDataLiveBase = Layer7.effect(
4043
4224
  csvPath: downloaded.objectFile.path
4044
4225
  })
4045
4226
  ),
4046
- Effect33.flatMap(
4227
+ Effect33.bindTo("objectResult"),
4228
+ Effect33.bind(
4229
+ "associationResults",
4047
4230
  () => Effect33.forEach(
4048
4231
  downloaded.associationFiles,
4049
4232
  (associationFile) => duckDb.syncAssociationArtifact({
@@ -4055,13 +4238,18 @@ var PortalDataLiveBase = Layer7.effect(
4055
4238
  { concurrency: 1 }
4056
4239
  )
4057
4240
  ),
4241
+ Effect33.bind(
4242
+ "partialMessage",
4243
+ ({ objectResult, associationResults }) => Effect33.succeed(summarizePartialImport([objectResult, ...associationResults]))
4244
+ ),
4058
4245
  Effect33.flatMap(
4059
- () => swallowPortalFileStateError(
4246
+ ({ partialMessage }) => swallowPortalFileStateError(
4060
4247
  portalFileState.updateArtifactStatus({
4061
4248
  portalId: input.portalId,
4062
4249
  artifactId: input.artifactId,
4063
4250
  objectType: input.objectType,
4064
- status: "SYNCED",
4251
+ status: partialMessage === null ? "SYNCED" : "PARTIAL",
4252
+ error: partialMessage,
4065
4253
  sourcePath: downloaded.sourcePath
4066
4254
  })
4067
4255
  )
@@ -4113,13 +4301,19 @@ var PortalDataLiveBase = Layer7.effect(
4113
4301
  csvPath: downloaded.sourcePath
4114
4302
  })
4115
4303
  ),
4304
+ Effect33.bindTo("artifactResult"),
4305
+ Effect33.bind(
4306
+ "partialMessage",
4307
+ ({ artifactResult }) => Effect33.succeed(summarizePartialImport([artifactResult]))
4308
+ ),
4116
4309
  Effect33.flatMap(
4117
- () => swallowPortalFileStateError(
4310
+ ({ partialMessage }) => swallowPortalFileStateError(
4118
4311
  portalFileState.updateArtifactStatus({
4119
4312
  portalId: input.portalId,
4120
4313
  artifactId: input.artifactId,
4121
4314
  objectType: input.objectType,
4122
- status: "SYNCED",
4315
+ status: partialMessage === null ? "SYNCED" : "PARTIAL",
4316
+ error: partialMessage,
4123
4317
  sourcePath: downloaded.sourcePath
4124
4318
  })
4125
4319
  )
@@ -4276,7 +4470,7 @@ var filterUnsyncedArtifacts = (artifacts, allowedObjectTypes) => artifacts.filte
4276
4470
  );
4277
4471
 
4278
4472
  // src/effects/run-full-sync.ts
4279
- var logStderr5 = (message) => Effect34.sync(() => console.error(message));
4473
+ var logStderr4 = (message) => Effect34.sync(() => console.error(message));
4280
4474
  var runFullSync = (ws, portalData, portalFileState, portalId, artifacts, objectTypesToSync) => pipe22(
4281
4475
  Ref3.make({
4282
4476
  totalArtifacts: 0,
@@ -4319,14 +4513,14 @@ var runFullSync = (ws, portalData, portalFileState, portalId, artifacts, objectT
4319
4513
  currentArtifact: artifact.object_type
4320
4514
  })),
4321
4515
  Effect34.tap(
4322
- () => logStderr5(
4516
+ () => logStderr4(
4323
4517
  `[sync] (${idx + 1}/${toSync.length}) Processing: ${artifact.object_type}`
4324
4518
  )
4325
4519
  ),
4326
4520
  Effect34.flatMap(() => {
4327
4521
  const plugin = findArtifactPlugin(artifact.object_type);
4328
4522
  if (!plugin) {
4329
- return logStderr5(`[sync] No artifact plugin found for ${artifact.object_type}, skipping`);
4523
+ return logStderr4(`[sync] No artifact plugin found for ${artifact.object_type}, skipping`);
4330
4524
  }
4331
4525
  return plugin.processArtifact(artifactCtx, artifact);
4332
4526
  }),
@@ -4383,7 +4577,7 @@ var runFullSync = (ws, portalData, portalFileState, portalId, artifacts, objectT
4383
4577
 
4384
4578
  // src/effects/sync-message-plugins.ts
4385
4579
  import { Effect as Effect35, pipe as pipe23 } from "effect";
4386
- var logStderr6 = (message) => Effect35.sync(() => console.error(message));
4580
+ var logStderr5 = (message) => Effect35.sync(() => console.error(message));
4387
4581
  var PLUGIN_ERROR_KEY2 = (name) => `plugin_error:${name}`;
4388
4582
  var extractMessage = (value) => {
4389
4583
  if (typeof value === "string") {
@@ -4429,7 +4623,7 @@ var syncMessagePlugin = (ws, portalData, portalFileState, portalId, plugin) => p
4429
4623
  Effect35.map((raw) => raw ? new Date(raw) : null),
4430
4624
  Effect35.flatMap((lastSynced) => {
4431
4625
  if (plugin.shouldSync && !plugin.shouldSync(lastSynced)) {
4432
- return logStderr6(`[sync] Skipping plugin ${plugin.name} (shouldSync returned false)`);
4626
+ return logStderr5(`[sync] Skipping plugin ${plugin.name} (shouldSync returned false)`);
4433
4627
  }
4434
4628
  return pipe23(
4435
4629
  portalFileState.updatePluginStatus({
@@ -4455,7 +4649,7 @@ var syncMessagePlugin = (ws, portalData, portalFileState, portalId, plugin) => p
4455
4649
  }),
4456
4650
  Effect35.catchAll((error) => {
4457
4651
  if (isReadOnlySkipError(error)) {
4458
- return logStderr6(
4652
+ return logStderr5(
4459
4653
  `[sync] Skipping message plugin ${plugin.name} for portal ${portalId} on read-only client`
4460
4654
  );
4461
4655
  }
@@ -4472,7 +4666,7 @@ var syncMessagePlugin = (ws, portalData, portalFileState, portalId, plugin) => p
4472
4666
  }).pipe(Effect35.catchAll(() => Effect35.void))
4473
4667
  ),
4474
4668
  Effect35.flatMap(
4475
- () => logStderr6(`[sync] Message plugin failed (${plugin.name}): ${message}`)
4669
+ () => logStderr5(`[sync] Message plugin failed (${plugin.name}): ${message}`)
4476
4670
  )
4477
4671
  );
4478
4672
  })
@@ -4482,12 +4676,12 @@ var syncMessagePlugins = (ws, portalData, portalFileState, portalId, plugins) =>
4482
4676
  Effect35.as(true),
4483
4677
  Effect35.catchAll(
4484
4678
  (error) => isReadOnlySkipError(error) ? pipe23(
4485
- logStderr6(
4679
+ logStderr5(
4486
4680
  `[sync] Skipping message plugin sync for portal ${portalId} on read-only client`
4487
4681
  ),
4488
4682
  Effect35.as(false)
4489
4683
  ) : pipe23(
4490
- logStderr6(
4684
+ logStderr5(
4491
4685
  `[sync] Failed to initialize portal schema before message plugin sync for portal ${portalId}: ${error instanceof Error ? error.message : String(error)}`
4492
4686
  ),
4493
4687
  Effect35.as(true)
@@ -16668,12 +16862,12 @@ var mainProgram = Effect108.gen(function* () {
16668
16862
  Effect108.flatMap(() => portalFileState.get(portalId)),
16669
16863
  Effect108.map((portal) => {
16670
16864
  const status = portal.artifacts.find((a) => a.object_type === artifact.object_type)?.status;
16671
- if (status !== "SYNCED") {
16865
+ if (status !== "SYNCED" && status !== "PARTIAL") {
16672
16866
  console.error(
16673
16867
  `[artifact-queue] Artifact ${artifact.object_type} for portal ${portalId} finished with status ${status ?? "MISSING"}`
16674
16868
  );
16675
16869
  }
16676
- return status === "SYNCED";
16870
+ return status === "SYNCED" || status === "PARTIAL";
16677
16871
  }),
16678
16872
  Effect108.catchAll(
16679
16873
  (err) => Effect108.sync(() => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@daeda/mcp-pro",
3
- "version": "0.1.24",
3
+ "version": "0.1.26",
4
4
  "description": "MCP server for HubSpot CRM — sync, query, and manage your portal data",
5
5
  "type": "module",
6
6
  "bin": {
@@ -35,8 +35,8 @@
35
35
  "@modelcontextprotocol/sdk": "^1.25.3",
36
36
  "effect": "3.19.15",
37
37
  "fflate": "^0.8.2",
38
- "os-lock": "^2.0.0",
39
38
  "papaparse": "^5.5.3",
39
+ "proper-lockfile": "^4.1.2",
40
40
  "zod": "^3.23.8"
41
41
  },
42
42
  "devDependencies": {
@@ -49,6 +49,6 @@
49
49
  },
50
50
  "trustedDependencies": [
51
51
  "fs-ext",
52
- "os-lock"
52
+ "proper-lockfile"
53
53
  ]
54
54
  }