@daeda/mcp-pro 0.1.24 → 0.1.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +216 -70
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -1293,7 +1293,7 @@ import { z as z12 } from "zod";
1293
1293
  var ClientStateSchema = z12.object({
1294
1294
  selectedPortalId: z12.number().nullable()
1295
1295
  });
1296
- var SyncStatusSchema = z12.enum(["NOT_STARTED", "DOWNLOADING", "PROCESSING", "SYNCED", "FAILED"]);
1296
+ var SyncStatusSchema = z12.enum(["NOT_STARTED", "DOWNLOADING", "PROCESSING", "PARTIAL", "SYNCED", "FAILED"]);
1297
1297
  var PluginSyncStatusSchema = z12.enum(["NOT_STARTED", "SYNCING", "SYNCED", "FAILED"]);
1298
1298
  var SyncPluginItemSchema = z12.object({
1299
1299
  name: z12.string(),
@@ -1467,9 +1467,11 @@ var insertAssociationRowSql = (tableName) => `
1467
1467
  label
1468
1468
  ) VALUES ($1, $2, $3, $4, $5, $6)
1469
1469
  `;
1470
- var insertCsvIntoTableSql = (tableName, escapedPath) => `
1470
+ var HUBSPOT_GZIP_CSV_READ_OPTIONS = `all_varchar=true, header=true, compression='gzip', delim=',', quote='"', escape='"', max_line_size=8388608`;
1471
+ var readHubSpotGzipCsvSql = (escapedPath, rejectTables) => `read_csv('${escapedPath}', ${HUBSPOT_GZIP_CSV_READ_OPTIONS}${rejectTables ? `, ignore_errors=true, store_rejects=true, rejects_table='${rejectTables.rejectsTable}', rejects_scan='${rejectTables.rejectsScan}'` : ""})`;
1472
+ var insertCsvIntoTableSql = (tableName, escapedPath, rejectTables) => `
1471
1473
  INSERT INTO "${tableName}"
1472
- SELECT * FROM read_csv('${escapedPath}', all_varchar=true, header=true, compression='gzip')
1474
+ SELECT * FROM ${readHubSpotGzipCsvSql(escapedPath, rejectTables)}
1473
1475
  `;
1474
1476
  var mergeAssociationStagingSql = (stagingTable, fromExpression, toExpression) => `
1475
1477
  INSERT OR IGNORE INTO associations
@@ -1484,9 +1486,9 @@ var mergeAssociationStagingSql = (stagingTable, fromExpression, toExpression) =>
1484
1486
  `;
1485
1487
  var countAssociationsForPairSql = () => `SELECT COUNT(*) as count FROM associations WHERE from_object_type = $1 AND to_object_type = $2`;
1486
1488
  var deleteAllRowsSql = (tableName) => `DELETE FROM "${tableName}"`;
1487
- var insertOrReplaceObjectsFromCsvSql = (tableName, escapedPath) => `
1489
+ var insertOrReplaceObjectsFromCsvSql = (tableName, escapedPath, rejectTables) => `
1488
1490
  INSERT OR REPLACE INTO "${tableName}"
1489
- SELECT * FROM read_csv('${escapedPath}', all_varchar=true, header=true, compression='gzip')
1491
+ SELECT * FROM ${readHubSpotGzipCsvSql(escapedPath, rejectTables)}
1490
1492
  `;
1491
1493
  var countRowsSql = (tableName) => `SELECT COUNT(*) as count FROM "${tableName}"`;
1492
1494
 
@@ -1524,6 +1526,7 @@ var SupabaseAssociationCsvSchema = z13.object({
1524
1526
 
1525
1527
  // src/effects/duck-db.ts
1526
1528
  var CHUNK_SIZE = 2048;
1529
+ var CSV_REJECT_SAMPLE_LIMIT = 5;
1527
1530
  var PLUGIN_ERROR_KEY = (name) => `plugin_error:${name}`;
1528
1531
  var SCHEMA_VERSION_KEY = (name) => `schema_version:plugin:${name}`;
1529
1532
  var databaseConnectionCache = /* @__PURE__ */ new Map();
@@ -1694,23 +1697,86 @@ async function safeRollback(connection) {
1694
1697
  } catch {
1695
1698
  }
1696
1699
  }
1697
- async function readCsvColumns(connection, csvPath) {
1700
+ function makeCsvRejectTableNames(prefix) {
1701
+ const suffix = `${Date.now()}_${Math.random().toString(36).slice(2, 10)}`;
1702
+ const sanitizedPrefix = prefix.replace(/[^a-zA-Z0-9_]/g, "_");
1703
+ return {
1704
+ rejectsTable: `${sanitizedPrefix}_reject_errors_${suffix}`,
1705
+ rejectsScan: `${sanitizedPrefix}_reject_scans_${suffix}`
1706
+ };
1707
+ }
1708
+ async function cleanupCsvRejectTables(connection, rejectTables) {
1709
+ try {
1710
+ await connection.run(dropTableIfExistsSql(rejectTables.rejectsTable));
1711
+ } catch {
1712
+ }
1713
+ try {
1714
+ await connection.run(dropTableIfExistsSql(rejectTables.rejectsScan));
1715
+ } catch {
1716
+ }
1717
+ }
1718
+ function formatCsvRejectSummary(rejectCount, sampleRows) {
1719
+ const samples = sampleRows.map((row) => {
1720
+ const lineValue = typeof row.line === "number" ? row.line : typeof row.line === "bigint" ? Number(row.line) : typeof row.line_number === "number" ? row.line_number : typeof row.line_number === "bigint" ? Number(row.line_number) : null;
1721
+ const errorType = typeof row.error_type === "string" ? row.error_type : typeof row.type === "string" ? row.type : null;
1722
+ const errorMessage = typeof row.error_message === "string" ? row.error_message : typeof row.message === "string" ? row.message : "CSV row rejected by DuckDB";
1723
+ return `${lineValue !== null ? `line ${lineValue}` : "unknown line"}${errorType ? ` (${errorType})` : ""}: ${errorMessage}`;
1724
+ }).slice(0, CSV_REJECT_SAMPLE_LIMIT);
1725
+ const message = samples.length > 0 ? `Skipped ${rejectCount} malformed CSV row${rejectCount === 1 ? "" : "s"} during import. Examples: ${samples.join(" | ")}` : `Skipped ${rejectCount} malformed CSV row${rejectCount === 1 ? "" : "s"} during import.`;
1726
+ return { rejectCount, message };
1727
+ }
1728
+ async function readCsvRejectSummary(connection, rejectTables) {
1729
+ try {
1730
+ const countResult = await connection.runAndReadAll(
1731
+ `SELECT COUNT(*) AS count FROM "${rejectTables.rejectsTable}"`
1732
+ );
1733
+ const countRows = countResult.getRowObjects();
1734
+ const rejectCount = Number(countRows[0]?.count ?? 0);
1735
+ if (rejectCount === 0) return null;
1736
+ const sampleResult = await connection.runAndReadAll(
1737
+ `SELECT * FROM "${rejectTables.rejectsTable}" LIMIT ${CSV_REJECT_SAMPLE_LIMIT}`
1738
+ );
1739
+ const sampleRows = sampleResult.getRowObjects();
1740
+ return formatCsvRejectSummary(rejectCount, sampleRows);
1741
+ } catch {
1742
+ return null;
1743
+ }
1744
+ }
1745
+ async function withCsvRejectTracking(connection, prefix, work) {
1746
+ const rejectTables = makeCsvRejectTableNames(prefix);
1747
+ try {
1748
+ const result = await work(rejectTables);
1749
+ const rejects = await readCsvRejectSummary(connection, rejectTables);
1750
+ return { result, rejects };
1751
+ } finally {
1752
+ await cleanupCsvRejectTables(connection, rejectTables);
1753
+ }
1754
+ }
1755
+ async function readCsvColumns(connection, csvPath, rejectTables) {
1698
1756
  const result = await connection.runAndReadAll(
1699
- `DESCRIBE SELECT * FROM read_csv('${escapeSqlString(csvPath)}', all_varchar=true, header=true, compression='gzip')`
1757
+ `DESCRIBE SELECT * FROM ${readHubSpotGzipCsvSql(escapeSqlString(csvPath), rejectTables)}`
1700
1758
  );
1701
1759
  const rows = result.getRowObjects();
1702
1760
  return rows.map((row) => typeof row.column_name === "string" ? row.column_name : null).filter((value) => value !== null);
1703
1761
  }
1704
1762
  async function validateSupabaseCsvShape(connection, csvPath, kind) {
1705
1763
  const expectedColumns = kind === "object" ? OBJECT_CSV_COLUMNS : ASSOCIATION_CSV_COLUMNS;
1706
- const columns = await readCsvColumns(connection, csvPath);
1764
+ const { result: columns } = await withCsvRejectTracking(
1765
+ connection,
1766
+ `validate_${kind}`,
1767
+ (rejectTables) => readCsvColumns(connection, csvPath, rejectTables)
1768
+ );
1707
1769
  if (columns.length !== expectedColumns.length || columns.some((column, index) => column !== expectedColumns[index])) {
1708
1770
  throw new Error(
1709
1771
  `Invalid ${kind} CSV columns for Supabase sync. Expected ${expectedColumns.join(", ")} but received ${columns.join(", ") || "(none)"}`
1710
1772
  );
1711
1773
  }
1712
- const sampleResult = await connection.runAndReadAll(
1713
- `SELECT * FROM read_csv('${escapeSqlString(csvPath)}', all_varchar=true, header=true, compression='gzip') LIMIT 1`
1774
+ const { result: sampleResult } = await withCsvRejectTracking(
1775
+ connection,
1776
+ `validate_${kind}_sample`,
1777
+ (rejectTables) => connection.runAndReadAll(
1778
+ `SELECT * FROM ${readHubSpotGzipCsvSql(escapeSqlString(csvPath), rejectTables)} LIMIT 1`
1779
+ )
1714
1780
  );
1715
1781
  const sampleRows = sampleResult.getRowObjects();
1716
1782
  if (sampleRows.length === 0) return;
@@ -1743,13 +1809,23 @@ async function withTransaction(connection, work) {
1743
1809
  async function atomicReplaceObjectsFromCsv(connection, tableName, csvPath) {
1744
1810
  const sanitizedTableName = sanitizeTableName(tableName);
1745
1811
  const escapedPath = escapeSqlString(csvPath);
1746
- return withTransaction(connection, async () => {
1747
- await connection.run(deleteAllRowsSql(sanitizedTableName));
1748
- await connection.run(insertOrReplaceObjectsFromCsvSql(sanitizedTableName, escapedPath));
1749
- const countResult = await connection.runAndReadAll(countRowsSql(sanitizedTableName));
1750
- const rows = countResult.getRowObjects();
1751
- return Number(rows[0]?.count ?? 0);
1752
- });
1812
+ const { result, rejects } = await withCsvRejectTracking(
1813
+ connection,
1814
+ `objects_${sanitizedTableName}`,
1815
+ (rejectTables) => withTransaction(connection, async () => {
1816
+ await connection.run(deleteAllRowsSql(sanitizedTableName));
1817
+ await connection.run(
1818
+ insertOrReplaceObjectsFromCsvSql(sanitizedTableName, escapedPath, rejectTables)
1819
+ );
1820
+ const countResult = await connection.runAndReadAll(countRowsSql(sanitizedTableName));
1821
+ const rows = countResult.getRowObjects();
1822
+ return { rowCount: Number(rows[0]?.count ?? 0) };
1823
+ })
1824
+ );
1825
+ return {
1826
+ rowCount: result.rowCount,
1827
+ rejects
1828
+ };
1753
1829
  }
1754
1830
  async function deleteAssociationsForPair(connection, fromObjectType, toObjectType) {
1755
1831
  await connection.run(deleteAssociationsForPairSql(), [fromObjectType, toObjectType]);
@@ -1792,7 +1868,12 @@ async function loadAssociationRowsIntoStaging(connection, stagingTable, rows) {
1792
1868
  return insertAssociationsIntoTable(connection, rows, stagingTable);
1793
1869
  }
1794
1870
  async function loadAssociationCsvIntoStaging(connection, stagingTable, csvPath) {
1795
- await connection.run(insertCsvIntoTableSql(stagingTable, escapeSqlString(csvPath)));
1871
+ const { rejects } = await withCsvRejectTracking(
1872
+ connection,
1873
+ `assoc_${stagingTable}`,
1874
+ (rejectTables) => connection.run(insertCsvIntoTableSql(stagingTable, escapeSqlString(csvPath), rejectTables))
1875
+ );
1876
+ return rejects;
1796
1877
  }
1797
1878
  async function mergeAssociationStaging(connection, stagingTable, scope) {
1798
1879
  const fromExpression = scope._tag === "pair" ? `'${escapeSqlString(scope.fromObjectType)}'` : "from_object_type";
@@ -1816,15 +1897,17 @@ async function syncAssociations({
1816
1897
  const handle = await getDatabaseConnection(portalId, encryptionKey);
1817
1898
  const stagingTable = makeAssociationStagingTableName();
1818
1899
  try {
1900
+ let rejects = null;
1819
1901
  await withTransaction(handle.connection, async () => {
1820
1902
  await ensureAssociationsTable(handle.connection);
1821
1903
  await deleteAssociationsForScope(handle.connection, scope);
1822
1904
  await createAssociationStagingTable(handle.connection, stagingTable);
1823
- await loadAssociationCsvIntoStaging(handle.connection, stagingTable, source.csvPath);
1905
+ rejects = await loadAssociationCsvIntoStaging(handle.connection, stagingTable, source.csvPath);
1824
1906
  await mergeAssociationStaging(handle.connection, stagingTable, scope);
1825
1907
  await handle.connection.run(dropTableIfExistsSql(stagingTable));
1826
1908
  });
1827
- return scope._tag === "pair" ? countAssociationsForPair(handle.connection, scope.fromObjectType, scope.toObjectType) : 0;
1909
+ const rowCount = scope._tag === "pair" ? await countAssociationsForPair(handle.connection, scope.fromObjectType, scope.toObjectType) : 0;
1910
+ return { rowCount, rejects };
1828
1911
  } catch (error) {
1829
1912
  try {
1830
1913
  await handle.connection.run(dropTableIfExistsSql(stagingTable));
@@ -1845,8 +1928,13 @@ async function syncObjectArtifact({
1845
1928
  );
1846
1929
  await validateSupabaseCsvShape(handle.connection, csvPath, "object");
1847
1930
  await ensureObjectTable(handle.connection, objectType);
1848
- const rowCount = await atomicReplaceObjectsFromCsv(handle.connection, objectType, csvPath);
1849
- return { portalId, rowCount };
1931
+ const { rowCount, rejects } = await atomicReplaceObjectsFromCsv(handle.connection, objectType, csvPath);
1932
+ if (rejects) {
1933
+ console.error(
1934
+ `[duck-db] Object artifact import was partial for portal ${portalId}: ${objectType}. ${rejects.message}`
1935
+ );
1936
+ }
1937
+ return { portalId, rowCount, rejects };
1850
1938
  }
1851
1939
  async function initializePortalSchema({
1852
1940
  masterLock,
@@ -1919,7 +2007,7 @@ async function syncAssociationArtifact({
1919
2007
  `[duck-db] Syncing association artifact for portal ${portalId}: ${fromObjectType}->${toObjectType} from ${csvPath}`
1920
2008
  );
1921
2009
  await validateSupabaseCsvShape(handle.connection, csvPath, "association");
1922
- const rowCount = await syncAssociations({
2010
+ const { rowCount, rejects } = await syncAssociations({
1923
2011
  portalId,
1924
2012
  encryptionKey: encryptionKey ?? null,
1925
2013
  scope: {
@@ -1929,7 +2017,12 @@ async function syncAssociationArtifact({
1929
2017
  },
1930
2018
  source: { kind: "csv", csvPath }
1931
2019
  });
1932
- return { portalId, rowCount };
2020
+ if (rejects) {
2021
+ console.error(
2022
+ `[duck-db] Association artifact import was partial for portal ${portalId}: ${fromObjectType}->${toObjectType}. ${rejects.message}`
2023
+ );
2024
+ }
2025
+ return { portalId, rowCount, rejects };
1933
2026
  }
1934
2027
  async function syncPlugin({
1935
2028
  masterLock,
@@ -2240,23 +2333,37 @@ import { createRequire } from "module";
2240
2333
  import path2 from "path";
2241
2334
  import { mkdir, open } from "fs/promises";
2242
2335
  import { Effect as Effect24, Layer as Layer2, Ref, pipe as pipe13 } from "effect";
2243
- var require2 = createRequire(import.meta.url);
2244
- var { lock, unlock } = require2("os-lock");
2245
2336
  var PROMOTION_POLL_MS = 3e3;
2246
2337
  var lockFilePath = () => path2.join(PORTALS_DIR(), "master.lock");
2247
- var logStderr3 = (message) => Effect24.sync(() => console.error(message));
2338
+ var loadOsLockModule = () => {
2339
+ const requireFromHere = createRequire(import.meta.url);
2340
+ return requireFromHere("os-lock");
2341
+ };
2342
+ var resolveDependencies = (config = {}) => ({
2343
+ ensurePortalsDir: config.ensurePortalsDir ?? (async (dirPath) => {
2344
+ await mkdir(dirPath, { recursive: true });
2345
+ }),
2346
+ openLockFile: config.openLockFile ?? ((filePath) => open(filePath, "a+")),
2347
+ lock: config.lock ?? loadOsLockModule().lock,
2348
+ unlock: config.unlock ?? loadOsLockModule().unlock,
2349
+ setInterval: config.setInterval ?? ((callback, ms) => setInterval(callback, ms)),
2350
+ clearInterval: config.clearInterval ?? ((timer) => clearInterval(timer)),
2351
+ log: config.log ?? ((message) => console.error(message)),
2352
+ promotionPollMs: config.promotionPollMs ?? PROMOTION_POLL_MS
2353
+ });
2354
+ var logMessage = (runtime, message) => Effect24.sync(() => runtime.deps.log(message));
2248
2355
  var isLockConflictError = (error) => {
2249
2356
  const code = error?.code;
2250
2357
  return code === "EACCES" || code === "EAGAIN" || code === "EBUSY";
2251
2358
  };
2252
- var ensurePortalsDir = () => Effect24.tryPromise({
2253
- try: () => mkdir(PORTALS_DIR(), { recursive: true }),
2359
+ var ensurePortalsDir = (runtime) => Effect24.tryPromise({
2360
+ try: () => runtime.deps.ensurePortalsDir(PORTALS_DIR()),
2254
2361
  catch: (cause) => new Error(`Failed to create portals directory: ${String(cause)}`)
2255
2362
  });
2256
- var closeHandle = (handle) => Effect24.tryPromise({
2363
+ var closeHandle = (runtime, handle) => Effect24.tryPromise({
2257
2364
  try: async () => {
2258
2365
  try {
2259
- await unlock(handle.fd);
2366
+ await runtime.deps.unlock(handle.fd);
2260
2367
  } catch {
2261
2368
  }
2262
2369
  try {
@@ -2273,10 +2380,10 @@ var tryAcquireMasterLock = (runtime) => pipe13(
2273
2380
  return Ref.get(runtime.connectionTypeRef);
2274
2381
  }
2275
2382
  return pipe13(
2276
- ensurePortalsDir(),
2383
+ ensurePortalsDir(runtime),
2277
2384
  Effect24.flatMap(
2278
2385
  () => Effect24.tryPromise({
2279
- try: () => open(lockFilePath(), "a+"),
2386
+ try: () => runtime.deps.openLockFile(lockFilePath()),
2280
2387
  catch: (cause) => new Error(`Failed to open master lock file: ${String(cause)}`)
2281
2388
  })
2282
2389
  ),
@@ -2284,7 +2391,10 @@ var tryAcquireMasterLock = (runtime) => pipe13(
2284
2391
  (handle) => Effect24.tryPromise({
2285
2392
  try: async () => {
2286
2393
  try {
2287
- await lock(handle.fd, { exclusive: true, immediate: true });
2394
+ await runtime.deps.lock(handle.fd, {
2395
+ exclusive: true,
2396
+ immediate: true
2397
+ });
2288
2398
  return { connectionType: "MASTER", handle };
2289
2399
  } catch (error) {
2290
2400
  await handle.close();
@@ -2311,7 +2421,7 @@ var stopPromotionPolling = (runtime) => pipe13(
2311
2421
  Ref.getAndSet(runtime.promotionTimerRef, null),
2312
2422
  Effect24.flatMap(
2313
2423
  (timer) => timer === null ? Effect24.void : Effect24.sync(() => {
2314
- clearInterval(timer);
2424
+ runtime.deps.clearInterval(timer);
2315
2425
  })
2316
2426
  )
2317
2427
  );
@@ -2327,11 +2437,19 @@ var tryPromoteToMaster = (runtime) => pipe13(
2327
2437
  Effect24.flatMap(
2328
2438
  (connectionType) => connectionType === "MASTER" ? pipe13(
2329
2439
  stopPromotionPolling(runtime),
2330
- Effect24.flatMap(() => logStderr3("[master-lock] promoted read-only client to master"))
2440
+ Effect24.flatMap(
2441
+ () => logMessage(
2442
+ runtime,
2443
+ "[master-lock] promoted read-only client to master"
2444
+ )
2445
+ )
2331
2446
  ) : Effect24.void
2332
2447
  ),
2333
2448
  Effect24.catchAll(
2334
- (error) => logStderr3(`[master-lock] promotion attempt failed: ${error.message}`)
2449
+ (error) => logMessage(
2450
+ runtime,
2451
+ `[master-lock] promotion attempt failed: ${error.message}`
2452
+ )
2335
2453
  ),
2336
2454
  Effect24.ensuring(Ref.set(runtime.promotingRef, false))
2337
2455
  )
@@ -2344,11 +2462,18 @@ var startPromotionPolling = (runtime) => pipe13(
2344
2462
  Effect24.flatMap((existingTimer) => {
2345
2463
  if (existingTimer !== null) return Effect24.void;
2346
2464
  return pipe13(
2347
- Effect24.sync(() => setInterval(() => {
2348
- Effect24.runFork(tryPromoteToMaster(runtime));
2349
- }, PROMOTION_POLL_MS)),
2465
+ Effect24.sync(
2466
+ () => runtime.deps.setInterval(() => {
2467
+ Effect24.runFork(tryPromoteToMaster(runtime));
2468
+ }, runtime.deps.promotionPollMs)
2469
+ ),
2350
2470
  Effect24.flatMap((timer) => Ref.set(runtime.promotionTimerRef, timer)),
2351
- Effect24.flatMap(() => logStderr3(`[master-lock] another process is master; retrying promotion every ${PROMOTION_POLL_MS}ms`))
2471
+ Effect24.flatMap(
2472
+ () => logMessage(
2473
+ runtime,
2474
+ `[master-lock] another process is master; retrying promotion every ${runtime.deps.promotionPollMs}ms`
2475
+ )
2476
+ )
2352
2477
  );
2353
2478
  })
2354
2479
  );
@@ -2356,12 +2481,13 @@ var releaseRuntime = (runtime) => pipe13(
2356
2481
  stopPromotionPolling(runtime),
2357
2482
  Effect24.flatMap(() => Ref.getAndSet(runtime.lockHandleRef, null)),
2358
2483
  Effect24.flatMap(
2359
- (handle) => handle === null ? Effect24.void : closeHandle(handle)
2484
+ (handle) => handle === null ? Effect24.void : closeHandle(runtime, handle)
2360
2485
  ),
2361
2486
  Effect24.flatMap(() => Ref.set(runtime.connectionTypeRef, "READ_ONLY"))
2362
2487
  );
2363
- var makeRuntime = pipe13(
2488
+ var makeRuntime = (config = {}) => pipe13(
2364
2489
  Effect24.all({
2490
+ deps: Effect24.sync(() => resolveDependencies(config)),
2365
2491
  connectionTypeRef: Ref.make("READ_ONLY"),
2366
2492
  lockHandleRef: Ref.make(null),
2367
2493
  promotionTimerRef: Ref.make(null),
@@ -2371,20 +2497,17 @@ var makeRuntime = pipe13(
2371
2497
  (runtime) => pipe13(
2372
2498
  tryAcquireMasterLock(runtime),
2373
2499
  Effect24.flatMap(
2374
- (connectionType) => connectionType === "MASTER" ? logStderr3("[master-lock] acquired master lock") : startPromotionPolling(runtime)
2500
+ (connectionType) => connectionType === "MASTER" ? logMessage(runtime, "[master-lock] acquired master lock") : startPromotionPolling(runtime)
2375
2501
  )
2376
2502
  )
2377
2503
  )
2378
2504
  );
2379
- var MasterLockLive = Layer2.scoped(
2505
+ var makeMasterLockLive = (config = {}) => Layer2.scoped(
2380
2506
  MasterLockService,
2381
2507
  pipe13(
2382
2508
  Effect24.acquireRelease(
2383
- makeRuntime,
2384
- (runtime) => pipe13(
2385
- releaseRuntime(runtime),
2386
- Effect24.catchAll(() => Effect24.void)
2387
- )
2509
+ makeRuntime(config),
2510
+ (runtime) => pipe13(releaseRuntime(runtime), Effect24.catchAll(() => Effect24.void))
2388
2511
  ),
2389
2512
  Effect24.map((runtime) => ({
2390
2513
  getConnectionType: Ref.get(runtime.connectionTypeRef),
@@ -2395,11 +2518,12 @@ var MasterLockLive = Layer2.scoped(
2395
2518
  }))
2396
2519
  )
2397
2520
  );
2521
+ var MasterLockLive = makeMasterLockLive();
2398
2522
 
2399
2523
  // src/layers/ReplicaLive.ts
2400
2524
  import { Effect as Effect25, Layer as Layer3, Ref as Ref2, pipe as pipe14 } from "effect";
2401
2525
  var DEFAULT_REPLICA_DEBOUNCE_MS = 6e4;
2402
- var logStderr4 = (message) => Effect25.sync(() => console.error(message));
2526
+ var logStderr3 = (message) => Effect25.sync(() => console.error(message));
2403
2527
  var normalizeDebounceMs = (debounceMs) => debounceMs === void 0 ? DEFAULT_REPLICA_DEBOUNCE_MS : Math.max(0, debounceMs);
2404
2528
  var initialPortalReplicaState = () => ({
2405
2529
  dirty: false,
@@ -2443,7 +2567,7 @@ var makeReplicaLive = (config = {}) => {
2443
2567
  return Effect25.tryPromise({
2444
2568
  try: async () => {
2445
2569
  await Effect25.runPromise(
2446
- logStderr4(
2570
+ logStderr3(
2447
2571
  `[replica] Publishing replica for portal ${portalId} (reason: ${reason})`
2448
2572
  )
2449
2573
  );
@@ -2504,7 +2628,7 @@ var makeReplicaLive = (config = {}) => {
2504
2628
  dirty: true
2505
2629
  })),
2506
2630
  Effect25.flatMap(
2507
- () => logStderr4(
2631
+ () => logStderr3(
2508
2632
  `[replica] Failed to publish replica for portal ${portalId}: ${error.message}`
2509
2633
  )
2510
2634
  ),
@@ -3708,7 +3832,7 @@ var resetStuckPlugins = (plugins, updatedAt) => {
3708
3832
  return changed ? nextPlugins : plugins;
3709
3833
  };
3710
3834
  var upsertArtifact = (artifacts, input, updatedAt) => {
3711
- const errorValue = input.status === "FAILED" ? input.error ?? null : null;
3835
+ const errorValue = input.status === "FAILED" || input.status === "PARTIAL" ? input.error ?? null : null;
3712
3836
  let didUpdate = false;
3713
3837
  const nextArtifacts = artifacts.map((artifact) => {
3714
3838
  if (artifact.object_type !== input.objectType) {
@@ -3938,6 +4062,15 @@ var toSyncFailure = (message, cause) => Effect33.fail(
3938
4062
  })
3939
4063
  );
3940
4064
  var swallowPortalFileStateError = (effect) => effect.pipe(Effect33.catchAll(() => Effect33.void));
4065
+ var summarizePartialImport = (results) => {
4066
+ const rejectSummaries = results.map((result) => result.rejects).filter((rejects) => rejects !== null);
4067
+ if (rejectSummaries.length === 0) return null;
4068
+ const totalRejectedRows = rejectSummaries.reduce(
4069
+ (sum, rejects) => sum + rejects.rejectCount,
4070
+ 0
4071
+ );
4072
+ return `Imported with partial data. Skipped ${totalRejectedRows} malformed CSV row${totalRejectedRows === 1 ? "" : "s"} across ${rejectSummaries.length} file${rejectSummaries.length === 1 ? "" : "s"}. ${rejectSummaries.map((rejects) => rejects.message).join(" ")}`;
4073
+ };
3941
4074
  var wrapSyncFailure = (message, effect) => effect.pipe(
3942
4075
  Effect33.catchAllCause((cause) => toSyncFailure(message, cause))
3943
4076
  );
@@ -4043,7 +4176,9 @@ var PortalDataLiveBase = Layer7.effect(
4043
4176
  csvPath: downloaded.objectFile.path
4044
4177
  })
4045
4178
  ),
4046
- Effect33.flatMap(
4179
+ Effect33.bindTo("objectResult"),
4180
+ Effect33.bind(
4181
+ "associationResults",
4047
4182
  () => Effect33.forEach(
4048
4183
  downloaded.associationFiles,
4049
4184
  (associationFile) => duckDb.syncAssociationArtifact({
@@ -4055,13 +4190,18 @@ var PortalDataLiveBase = Layer7.effect(
4055
4190
  { concurrency: 1 }
4056
4191
  )
4057
4192
  ),
4193
+ Effect33.bind(
4194
+ "partialMessage",
4195
+ ({ objectResult, associationResults }) => Effect33.succeed(summarizePartialImport([objectResult, ...associationResults]))
4196
+ ),
4058
4197
  Effect33.flatMap(
4059
- () => swallowPortalFileStateError(
4198
+ ({ partialMessage }) => swallowPortalFileStateError(
4060
4199
  portalFileState.updateArtifactStatus({
4061
4200
  portalId: input.portalId,
4062
4201
  artifactId: input.artifactId,
4063
4202
  objectType: input.objectType,
4064
- status: "SYNCED",
4203
+ status: partialMessage === null ? "SYNCED" : "PARTIAL",
4204
+ error: partialMessage,
4065
4205
  sourcePath: downloaded.sourcePath
4066
4206
  })
4067
4207
  )
@@ -4113,13 +4253,19 @@ var PortalDataLiveBase = Layer7.effect(
4113
4253
  csvPath: downloaded.sourcePath
4114
4254
  })
4115
4255
  ),
4256
+ Effect33.bindTo("artifactResult"),
4257
+ Effect33.bind(
4258
+ "partialMessage",
4259
+ ({ artifactResult }) => Effect33.succeed(summarizePartialImport([artifactResult]))
4260
+ ),
4116
4261
  Effect33.flatMap(
4117
- () => swallowPortalFileStateError(
4262
+ ({ partialMessage }) => swallowPortalFileStateError(
4118
4263
  portalFileState.updateArtifactStatus({
4119
4264
  portalId: input.portalId,
4120
4265
  artifactId: input.artifactId,
4121
4266
  objectType: input.objectType,
4122
- status: "SYNCED",
4267
+ status: partialMessage === null ? "SYNCED" : "PARTIAL",
4268
+ error: partialMessage,
4123
4269
  sourcePath: downloaded.sourcePath
4124
4270
  })
4125
4271
  )
@@ -4276,7 +4422,7 @@ var filterUnsyncedArtifacts = (artifacts, allowedObjectTypes) => artifacts.filte
4276
4422
  );
4277
4423
 
4278
4424
  // src/effects/run-full-sync.ts
4279
- var logStderr5 = (message) => Effect34.sync(() => console.error(message));
4425
+ var logStderr4 = (message) => Effect34.sync(() => console.error(message));
4280
4426
  var runFullSync = (ws, portalData, portalFileState, portalId, artifacts, objectTypesToSync) => pipe22(
4281
4427
  Ref3.make({
4282
4428
  totalArtifacts: 0,
@@ -4319,14 +4465,14 @@ var runFullSync = (ws, portalData, portalFileState, portalId, artifacts, objectT
4319
4465
  currentArtifact: artifact.object_type
4320
4466
  })),
4321
4467
  Effect34.tap(
4322
- () => logStderr5(
4468
+ () => logStderr4(
4323
4469
  `[sync] (${idx + 1}/${toSync.length}) Processing: ${artifact.object_type}`
4324
4470
  )
4325
4471
  ),
4326
4472
  Effect34.flatMap(() => {
4327
4473
  const plugin = findArtifactPlugin(artifact.object_type);
4328
4474
  if (!plugin) {
4329
- return logStderr5(`[sync] No artifact plugin found for ${artifact.object_type}, skipping`);
4475
+ return logStderr4(`[sync] No artifact plugin found for ${artifact.object_type}, skipping`);
4330
4476
  }
4331
4477
  return plugin.processArtifact(artifactCtx, artifact);
4332
4478
  }),
@@ -4383,7 +4529,7 @@ var runFullSync = (ws, portalData, portalFileState, portalId, artifacts, objectT
4383
4529
 
4384
4530
  // src/effects/sync-message-plugins.ts
4385
4531
  import { Effect as Effect35, pipe as pipe23 } from "effect";
4386
- var logStderr6 = (message) => Effect35.sync(() => console.error(message));
4532
+ var logStderr5 = (message) => Effect35.sync(() => console.error(message));
4387
4533
  var PLUGIN_ERROR_KEY2 = (name) => `plugin_error:${name}`;
4388
4534
  var extractMessage = (value) => {
4389
4535
  if (typeof value === "string") {
@@ -4429,7 +4575,7 @@ var syncMessagePlugin = (ws, portalData, portalFileState, portalId, plugin) => p
4429
4575
  Effect35.map((raw) => raw ? new Date(raw) : null),
4430
4576
  Effect35.flatMap((lastSynced) => {
4431
4577
  if (plugin.shouldSync && !plugin.shouldSync(lastSynced)) {
4432
- return logStderr6(`[sync] Skipping plugin ${plugin.name} (shouldSync returned false)`);
4578
+ return logStderr5(`[sync] Skipping plugin ${plugin.name} (shouldSync returned false)`);
4433
4579
  }
4434
4580
  return pipe23(
4435
4581
  portalFileState.updatePluginStatus({
@@ -4455,7 +4601,7 @@ var syncMessagePlugin = (ws, portalData, portalFileState, portalId, plugin) => p
4455
4601
  }),
4456
4602
  Effect35.catchAll((error) => {
4457
4603
  if (isReadOnlySkipError(error)) {
4458
- return logStderr6(
4604
+ return logStderr5(
4459
4605
  `[sync] Skipping message plugin ${plugin.name} for portal ${portalId} on read-only client`
4460
4606
  );
4461
4607
  }
@@ -4472,7 +4618,7 @@ var syncMessagePlugin = (ws, portalData, portalFileState, portalId, plugin) => p
4472
4618
  }).pipe(Effect35.catchAll(() => Effect35.void))
4473
4619
  ),
4474
4620
  Effect35.flatMap(
4475
- () => logStderr6(`[sync] Message plugin failed (${plugin.name}): ${message}`)
4621
+ () => logStderr5(`[sync] Message plugin failed (${plugin.name}): ${message}`)
4476
4622
  )
4477
4623
  );
4478
4624
  })
@@ -4482,12 +4628,12 @@ var syncMessagePlugins = (ws, portalData, portalFileState, portalId, plugins) =>
4482
4628
  Effect35.as(true),
4483
4629
  Effect35.catchAll(
4484
4630
  (error) => isReadOnlySkipError(error) ? pipe23(
4485
- logStderr6(
4631
+ logStderr5(
4486
4632
  `[sync] Skipping message plugin sync for portal ${portalId} on read-only client`
4487
4633
  ),
4488
4634
  Effect35.as(false)
4489
4635
  ) : pipe23(
4490
- logStderr6(
4636
+ logStderr5(
4491
4637
  `[sync] Failed to initialize portal schema before message plugin sync for portal ${portalId}: ${error instanceof Error ? error.message : String(error)}`
4492
4638
  ),
4493
4639
  Effect35.as(true)
@@ -16668,12 +16814,12 @@ var mainProgram = Effect108.gen(function* () {
16668
16814
  Effect108.flatMap(() => portalFileState.get(portalId)),
16669
16815
  Effect108.map((portal) => {
16670
16816
  const status = portal.artifacts.find((a) => a.object_type === artifact.object_type)?.status;
16671
- if (status !== "SYNCED") {
16817
+ if (status !== "SYNCED" && status !== "PARTIAL") {
16672
16818
  console.error(
16673
16819
  `[artifact-queue] Artifact ${artifact.object_type} for portal ${portalId} finished with status ${status ?? "MISSING"}`
16674
16820
  );
16675
16821
  }
16676
- return status === "SYNCED";
16822
+ return status === "SYNCED" || status === "PARTIAL";
16677
16823
  }),
16678
16824
  Effect108.catchAll(
16679
16825
  (err) => Effect108.sync(() => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@daeda/mcp-pro",
3
- "version": "0.1.24",
3
+ "version": "0.1.25",
4
4
  "description": "MCP server for HubSpot CRM — sync, query, and manage your portal data",
5
5
  "type": "module",
6
6
  "bin": {