@daeda/mcp-pro 0.1.25 → 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +229 -105
  2. package/package.json +3 -3
package/dist/index.js CHANGED
@@ -1268,15 +1268,54 @@ var WebSocketService = class extends Context9.Tag("WebSocketService")() {
1268
1268
  import { Effect as Effect22, Layer } from "effect";
1269
1269
 
1270
1270
  // src/paths.ts
1271
+ import fs from "fs";
1271
1272
  import os from "os";
1272
1273
  import path from "path";
1274
+ var LEGACY_REPLICA_DB_FILENAME = "hubspot.replica.duckdb";
1275
+ var REPLICA_POINTER_FILENAME = "hubspot.replica.current";
1276
+ var REPLICA_VERSION_PREFIX = "hubspot.replica.";
1277
+ var REPLICA_VERSION_SUFFIX = ".duckdb";
1273
1278
  var defaultDataRoot = () => process.env.MCP_CLIENT_DATA_ROOT ?? path.join(os.homedir(), ".daeda-mcp");
1274
1279
  var _dataRoot = defaultDataRoot();
1275
1280
  var DATA_ROOT = () => _dataRoot;
1276
1281
  var PORTALS_DIR = () => path.join(_dataRoot, "portals");
1277
1282
  var portalDir = (portalId) => path.join(PORTALS_DIR(), String(portalId));
1278
1283
  var dbPath = (portalId) => path.join(portalDir(portalId), "hubspot.duckdb");
1279
- var replicaDbPath = (portalId) => path.join(portalDir(portalId), "hubspot.replica.duckdb");
1284
+ var legacyReplicaDbPath = (portalId) => path.join(portalDir(portalId), LEGACY_REPLICA_DB_FILENAME);
1285
+ var replicaPointerPath = (portalId) => path.join(portalDir(portalId), REPLICA_POINTER_FILENAME);
1286
+ var replicaVersionDbPath = (portalId, filename) => path.join(portalDir(portalId), path.basename(filename));
1287
+ var readReplicaPointer = (portalId) => {
1288
+ try {
1289
+ const raw = fs.readFileSync(replicaPointerPath(portalId), "utf-8").trim();
1290
+ if (raw.length === 0 || raw !== path.basename(raw)) {
1291
+ return null;
1292
+ }
1293
+ return raw;
1294
+ } catch {
1295
+ return null;
1296
+ }
1297
+ };
1298
+ var readLatestReplicaVersion = (portalId) => {
1299
+ try {
1300
+ const versions = fs.readdirSync(portalDir(portalId)).filter(
1301
+ (entry) => entry.startsWith(REPLICA_VERSION_PREFIX) && entry.endsWith(REPLICA_VERSION_SUFFIX)
1302
+ ).sort((left, right) => right.localeCompare(left));
1303
+ return versions[0] ?? null;
1304
+ } catch {
1305
+ return null;
1306
+ }
1307
+ };
1308
+ var replicaDbPath = (portalId) => {
1309
+ const currentReplicaFile = readReplicaPointer(portalId);
1310
+ if (currentReplicaFile !== null) {
1311
+ return replicaVersionDbPath(portalId, currentReplicaFile);
1312
+ }
1313
+ const latestReplicaVersion = readLatestReplicaVersion(portalId);
1314
+ if (latestReplicaVersion !== null) {
1315
+ return replicaVersionDbPath(portalId, latestReplicaVersion);
1316
+ }
1317
+ return legacyReplicaDbPath(portalId);
1318
+ };
1280
1319
  var stateFilePath = () => path.join(_dataRoot, "client_state.json");
1281
1320
  var DOWNLOADS_DIR = () => path.join(_dataRoot, "downloads");
1282
1321
  var HUBSPOT_NORMALIZED_DOWNLOADS_DIR = () => path.join(DOWNLOADS_DIR(), "hubspot-normalized");
@@ -1286,7 +1325,7 @@ var CHARTS_DIR = () => path.join(_dataRoot, "charts");
1286
1325
  var chartFilePath = (filename) => path.join(CHARTS_DIR(), filename);
1287
1326
 
1288
1327
  // src/layers/ConfigLayerLive.ts
1289
- import * as fs from "fs";
1328
+ import * as fs2 from "fs";
1290
1329
 
1291
1330
  // src/types.ts
1292
1331
  import { z as z12 } from "zod";
@@ -1342,7 +1381,7 @@ var ConfigLive = Layer.succeed(
1342
1381
  load: () => Effect22.try({
1343
1382
  try: () => {
1344
1383
  const file = stateFilePath();
1345
- const raw = fs.existsSync(file) ? fs.readFileSync(file, "utf-8") : null;
1384
+ const raw = fs2.existsSync(file) ? fs2.readFileSync(file, "utf-8") : null;
1346
1385
  if (!raw) return { selectedPortalId: null };
1347
1386
  return ClientStateSchema.parse(JSON.parse(raw));
1348
1387
  },
@@ -1353,8 +1392,8 @@ var ConfigLive = Layer.succeed(
1353
1392
  ),
1354
1393
  save: (state) => Effect22.try({
1355
1394
  try: () => {
1356
- fs.mkdirSync(DATA_ROOT(), { recursive: true });
1357
- fs.writeFileSync(stateFilePath(), JSON.stringify(state, null, 2), "utf-8");
1395
+ fs2.mkdirSync(DATA_ROOT(), { recursive: true });
1396
+ fs2.writeFileSync(stateFilePath(), JSON.stringify(state, null, 2), "utf-8");
1358
1397
  },
1359
1398
  catch: (error) => new ConfigWriteError({ path: stateFilePath(), cause: error })
1360
1399
  }).pipe(
@@ -1373,7 +1412,8 @@ import {
1373
1412
  VARCHAR
1374
1413
  } from "@duckdb/node-api";
1375
1414
  import { Effect as Effect23 } from "effect";
1376
- import fs2 from "fs";
1415
+ import fs3 from "fs";
1416
+ import path2 from "path";
1377
1417
 
1378
1418
  // src/pure/schema.ts
1379
1419
  var VALID_TABLE_NAME = /^[a-z][a-z0-9_]*$/;
@@ -1529,10 +1569,12 @@ var CHUNK_SIZE = 2048;
1529
1569
  var CSV_REJECT_SAMPLE_LIMIT = 5;
1530
1570
  var PLUGIN_ERROR_KEY = (name) => `plugin_error:${name}`;
1531
1571
  var SCHEMA_VERSION_KEY = (name) => `schema_version:plugin:${name}`;
1572
+ var REPLICA_VERSION_PREFIX2 = "hubspot.replica.";
1573
+ var REPLICA_VERSION_SUFFIX2 = ".duckdb";
1532
1574
  var databaseConnectionCache = /* @__PURE__ */ new Map();
1533
1575
  function getReplicaFingerprint(portalId) {
1534
1576
  try {
1535
- const stats = fs2.statSync(replicaDbPath(portalId));
1577
+ const stats = fs3.statSync(replicaDbPath(portalId));
1536
1578
  return {
1537
1579
  mtimeMs: stats.mtimeMs,
1538
1580
  size: stats.size
@@ -1548,6 +1590,7 @@ function sameReplicaFingerprint(left, right) {
1548
1590
  }
1549
1591
  function shouldRefreshHandle(portalId, handle) {
1550
1592
  if (handle.mode !== "read_only") return false;
1593
+ if (handle.sourcePath !== getDatabaseFilePath(portalId, "read_only")) return true;
1551
1594
  return !sameReplicaFingerprint(handle.replicaFingerprint, getReplicaFingerprint(portalId));
1552
1595
  }
1553
1596
  async function closeDatabaseHandle(handle) {
@@ -1571,7 +1614,10 @@ function getDatabaseFilePath(portalId, mode) {
1571
1614
  async function openDatabaseConnection(portalId, _encryptionKey, options2) {
1572
1615
  const mode = resolveConnectionMode(options2);
1573
1616
  const sourcePath = getDatabaseFilePath(portalId, mode);
1574
- fs2.mkdirSync(portalDir(portalId), { recursive: true });
1617
+ fs3.mkdirSync(portalDir(portalId), { recursive: true });
1618
+ if (mode === "read_only" && !fs3.existsSync(sourcePath)) {
1619
+ throw new Error(`Replica database not found for portal ${portalId}: ${sourcePath}`);
1620
+ }
1575
1621
  const instance = await DuckDBInstance.create(sourcePath);
1576
1622
  const connection = await instance.connect();
1577
1623
  return {
@@ -1643,11 +1689,41 @@ async function bulkAppend(connection, tableName, columns) {
1643
1689
  }
1644
1690
  function cleanupDbArtifacts(filePath) {
1645
1691
  try {
1646
- fs2.unlinkSync(filePath);
1692
+ fs3.unlinkSync(filePath);
1693
+ } catch {
1694
+ }
1695
+ try {
1696
+ fs3.unlinkSync(`${filePath}.wal`);
1697
+ } catch {
1698
+ }
1699
+ }
1700
+ function nextReplicaFileName(seed = Date.now()) {
1701
+ return `${REPLICA_VERSION_PREFIX2}${seed}-${process.pid}-${Math.random().toString(36).slice(2, 8)}${REPLICA_VERSION_SUFFIX2}`;
1702
+ }
1703
+ function writeReplicaPointer(portalId, fileName) {
1704
+ const pointerPath = replicaPointerPath(portalId);
1705
+ const nextPointerPath = `${pointerPath}.next`;
1706
+ cleanupDbArtifacts(nextPointerPath);
1707
+ fs3.writeFileSync(nextPointerPath, `${path2.basename(fileName)}
1708
+ `, "utf-8");
1709
+ try {
1710
+ fs3.unlinkSync(pointerPath);
1647
1711
  } catch {
1648
1712
  }
1713
+ fs3.renameSync(nextPointerPath, pointerPath);
1714
+ }
1715
+ function isManagedReplicaFile(fileName) {
1716
+ return fileName === path2.basename(legacyReplicaDbPath(0)) || fileName.startsWith(REPLICA_VERSION_PREFIX2) && fileName.endsWith(REPLICA_VERSION_SUFFIX2);
1717
+ }
1718
+ function cleanupStaleReplicaFiles(portalId, currentReplicaPath) {
1719
+ const currentFile = path2.basename(currentReplicaPath);
1649
1720
  try {
1650
- fs2.unlinkSync(`${filePath}.wal`);
1721
+ for (const entry of fs3.readdirSync(portalDir(portalId))) {
1722
+ if (!isManagedReplicaFile(entry) || entry === currentFile) {
1723
+ continue;
1724
+ }
1725
+ cleanupDbArtifacts(path2.join(portalDir(portalId), entry));
1726
+ }
1651
1727
  } catch {
1652
1728
  }
1653
1729
  }
@@ -1658,13 +1734,13 @@ async function publishReplica({
1658
1734
  assertWritable(masterLock);
1659
1735
  const handle = await getDatabaseConnection(portalId, null);
1660
1736
  const liveDbPath = dbPath(portalId);
1661
- const replicaPath = replicaDbPath(portalId);
1662
- const nextReplicaPath = `${replicaPath}.next`;
1737
+ const nextReplicaName = nextReplicaFileName();
1738
+ const nextReplicaPath = replicaVersionDbPath(portalId, nextReplicaName);
1663
1739
  cleanupDbArtifacts(nextReplicaPath);
1664
1740
  await handle.connection.run("CHECKPOINT");
1665
- cleanupDbArtifacts(replicaPath);
1666
- fs2.copyFileSync(liveDbPath, nextReplicaPath);
1667
- fs2.renameSync(nextReplicaPath, replicaPath);
1741
+ fs3.copyFileSync(liveDbPath, nextReplicaPath);
1742
+ writeReplicaPointer(portalId, nextReplicaName);
1743
+ cleanupStaleReplicaFiles(portalId, nextReplicaPath);
1668
1744
  }
1669
1745
  async function ensureTable(connection, ddlStatements) {
1670
1746
  for (const statement of ddlStatements) {
@@ -2188,7 +2264,7 @@ async function loadPluginPayload(source) {
2188
2264
  if (source.kind === "payload") {
2189
2265
  return source.payload;
2190
2266
  }
2191
- const raw = await fs2.promises.readFile(source.jsonPath, "utf8");
2267
+ const raw = await fs3.promises.readFile(source.jsonPath, "utf8");
2192
2268
  return JSON.parse(raw);
2193
2269
  }
2194
2270
  async function applyDiffBatch({
@@ -2329,77 +2405,105 @@ var buildCappedSelectSql = (sql, maxRows) => {
2329
2405
  };
2330
2406
 
2331
2407
  // src/layers/MasterLockLive.ts
2332
- import { createRequire } from "module";
2333
- import path2 from "path";
2334
- import { mkdir, open } from "fs/promises";
2408
+ import path3 from "path";
2335
2409
  import { Effect as Effect24, Layer as Layer2, Ref, pipe as pipe13 } from "effect";
2336
- var PROMOTION_POLL_MS = 3e3;
2337
- var lockFilePath = () => path2.join(PORTALS_DIR(), "master.lock");
2338
- var loadOsLockModule = () => {
2410
+
2411
+ // src/layers/MasterLockDependencies.ts
2412
+ import { createRequire } from "module";
2413
+ import { mkdir } from "fs/promises";
2414
+ var loadProperLockfileModule = () => {
2339
2415
  const requireFromHere = createRequire(import.meta.url);
2340
- return requireFromHere("os-lock");
2416
+ return requireFromHere("proper-lockfile");
2341
2417
  };
2342
- var resolveDependencies = (config = {}) => ({
2418
+ var resolveMasterLockDependencies = (config, defaults) => ({
2343
2419
  ensurePortalsDir: config.ensurePortalsDir ?? (async (dirPath) => {
2344
2420
  await mkdir(dirPath, { recursive: true });
2345
2421
  }),
2346
- openLockFile: config.openLockFile ?? ((filePath) => open(filePath, "a+")),
2347
- lock: config.lock ?? loadOsLockModule().lock,
2348
- unlock: config.unlock ?? loadOsLockModule().unlock,
2422
+ acquireLock: config.acquireLock ?? (async (dirPath, options2) => {
2423
+ const release = await loadProperLockfileModule().lock(dirPath, options2);
2424
+ return { release };
2425
+ }),
2349
2426
  setInterval: config.setInterval ?? ((callback, ms) => setInterval(callback, ms)),
2350
2427
  clearInterval: config.clearInterval ?? ((timer) => clearInterval(timer)),
2351
2428
  log: config.log ?? ((message) => console.error(message)),
2352
- promotionPollMs: config.promotionPollMs ?? PROMOTION_POLL_MS
2429
+ promotionPollMs: config.promotionPollMs ?? defaults.promotionPollMs
2353
2430
  });
2431
+
2432
+ // src/layers/MasterLockLive.ts
2433
+ var PROMOTION_POLL_MS = 3e3;
2434
+ var LOCK_STALE_MS = 5e3;
2435
+ var LOCK_UPDATE_MS = 2500;
2436
+ var lockFilePath = () => path3.join(PORTALS_DIR(), "master.lock");
2354
2437
  var logMessage = (runtime, message) => Effect24.sync(() => runtime.deps.log(message));
2355
2438
  var isLockConflictError = (error) => {
2356
2439
  const code = error?.code;
2357
- return code === "EACCES" || code === "EAGAIN" || code === "EBUSY";
2440
+ return code === "ELOCKED";
2358
2441
  };
2359
2442
  var ensurePortalsDir = (runtime) => Effect24.tryPromise({
2360
2443
  try: () => runtime.deps.ensurePortalsDir(PORTALS_DIR()),
2361
2444
  catch: (cause) => new Error(`Failed to create portals directory: ${String(cause)}`)
2362
2445
  });
2363
- var closeHandle = (runtime, handle) => Effect24.tryPromise({
2446
+ var releaseLease = (runtime, lease) => Effect24.tryPromise({
2364
2447
  try: async () => {
2365
- try {
2366
- await runtime.deps.unlock(handle.fd);
2367
- } catch {
2368
- }
2369
- try {
2370
- await handle.close();
2371
- } catch {
2372
- }
2448
+ await lease.release();
2373
2449
  },
2374
- catch: (cause) => new Error(`Failed to close master lock handle: ${String(cause)}`)
2450
+ catch: (cause) => new Error(`Failed to release master lock lease: ${String(cause)}`)
2451
+ });
2452
+ var acquireLockOptions = (runtime) => ({
2453
+ lockfilePath: lockFilePath(),
2454
+ retries: 0,
2455
+ stale: LOCK_STALE_MS,
2456
+ update: LOCK_UPDATE_MS,
2457
+ realpath: true,
2458
+ onCompromised: (error) => {
2459
+ Effect24.runFork(
2460
+ pipe13(
2461
+ handleCompromisedLease(runtime, error),
2462
+ Effect24.catchAll(() => Effect24.void)
2463
+ )
2464
+ );
2465
+ }
2375
2466
  });
2467
+ var handleCompromisedLease = (runtime, error) => pipe13(
2468
+ Ref.get(runtime.releasedRef),
2469
+ Effect24.flatMap((released) => {
2470
+ if (released) return Effect24.void;
2471
+ return pipe13(
2472
+ Ref.getAndSet(runtime.lockLeaseRef, null),
2473
+ Effect24.flatMap((lease) => {
2474
+ if (lease === null) return Effect24.void;
2475
+ return pipe13(
2476
+ logMessage(
2477
+ runtime,
2478
+ `[master-lock] master lease compromised: ${error.message}`
2479
+ ),
2480
+ Effect24.flatMap(() => Ref.set(runtime.connectionTypeRef, "READ_ONLY")),
2481
+ Effect24.flatMap(() => startPromotionPolling(runtime))
2482
+ );
2483
+ })
2484
+ );
2485
+ })
2486
+ );
2376
2487
  var tryAcquireMasterLock = (runtime) => pipe13(
2377
- Ref.get(runtime.lockHandleRef),
2378
- Effect24.flatMap((existingHandle) => {
2379
- if (existingHandle !== null) {
2488
+ Ref.get(runtime.lockLeaseRef),
2489
+ Effect24.flatMap((existingLease) => {
2490
+ if (existingLease !== null) {
2380
2491
  return Ref.get(runtime.connectionTypeRef);
2381
2492
  }
2382
2493
  return pipe13(
2383
2494
  ensurePortalsDir(runtime),
2384
2495
  Effect24.flatMap(
2385
2496
  () => Effect24.tryPromise({
2386
- try: () => runtime.deps.openLockFile(lockFilePath()),
2387
- catch: (cause) => new Error(`Failed to open master lock file: ${String(cause)}`)
2388
- })
2389
- ),
2390
- Effect24.flatMap(
2391
- (handle) => Effect24.tryPromise({
2392
2497
  try: async () => {
2393
2498
  try {
2394
- await runtime.deps.lock(handle.fd, {
2395
- exclusive: true,
2396
- immediate: true
2397
- });
2398
- return { connectionType: "MASTER", handle };
2499
+ const lease = await runtime.deps.acquireLock(
2500
+ PORTALS_DIR(),
2501
+ acquireLockOptions(runtime)
2502
+ );
2503
+ return { connectionType: "MASTER", lease };
2399
2504
  } catch (error) {
2400
- await handle.close();
2401
2505
  if (isLockConflictError(error)) {
2402
- return { connectionType: "READ_ONLY", handle: null };
2506
+ return { connectionType: "READ_ONLY", lease: null };
2403
2507
  }
2404
2508
  throw error;
2405
2509
  }
@@ -2408,10 +2512,21 @@ var tryAcquireMasterLock = (runtime) => pipe13(
2408
2512
  })
2409
2513
  ),
2410
2514
  Effect24.flatMap(
2411
- ({ connectionType, handle }) => pipe13(
2412
- Ref.set(runtime.connectionTypeRef, connectionType),
2413
- Effect24.flatMap(() => Ref.set(runtime.lockHandleRef, handle)),
2414
- Effect24.as(connectionType)
2515
+ ({ connectionType, lease }) => pipe13(
2516
+ Ref.get(runtime.releasedRef),
2517
+ Effect24.flatMap((released) => {
2518
+ if (!released || lease === null) {
2519
+ return pipe13(
2520
+ Ref.set(runtime.connectionTypeRef, connectionType),
2521
+ Effect24.flatMap(() => Ref.set(runtime.lockLeaseRef, lease)),
2522
+ Effect24.as(connectionType)
2523
+ );
2524
+ }
2525
+ return pipe13(
2526
+ releaseLease(runtime, lease),
2527
+ Effect24.flatMap(() => Effect24.succeed("READ_ONLY"))
2528
+ );
2529
+ })
2415
2530
  )
2416
2531
  )
2417
2532
  );
@@ -2426,9 +2541,12 @@ var stopPromotionPolling = (runtime) => pipe13(
2426
2541
  )
2427
2542
  );
2428
2543
  var tryPromoteToMaster = (runtime) => pipe13(
2429
- Ref.get(runtime.promotingRef),
2430
- Effect24.flatMap((promoting) => {
2431
- if (promoting) return Effect24.void;
2544
+ Effect24.all({
2545
+ promoting: Ref.get(runtime.promotingRef),
2546
+ released: Ref.get(runtime.releasedRef)
2547
+ }),
2548
+ Effect24.flatMap(({ promoting, released }) => {
2549
+ if (promoting || released) return Effect24.void;
2432
2550
  return pipe13(
2433
2551
  Ref.set(runtime.promotingRef, true),
2434
2552
  Effect24.flatMap(
@@ -2478,20 +2596,26 @@ var startPromotionPolling = (runtime) => pipe13(
2478
2596
  })
2479
2597
  );
2480
2598
  var releaseRuntime = (runtime) => pipe13(
2481
- stopPromotionPolling(runtime),
2482
- Effect24.flatMap(() => Ref.getAndSet(runtime.lockHandleRef, null)),
2599
+ Ref.set(runtime.releasedRef, true),
2600
+ Effect24.flatMap(() => stopPromotionPolling(runtime)),
2601
+ Effect24.flatMap(() => Ref.getAndSet(runtime.lockLeaseRef, null)),
2483
2602
  Effect24.flatMap(
2484
- (handle) => handle === null ? Effect24.void : closeHandle(runtime, handle)
2603
+ (lease) => lease === null ? Effect24.void : releaseLease(runtime, lease)
2485
2604
  ),
2486
2605
  Effect24.flatMap(() => Ref.set(runtime.connectionTypeRef, "READ_ONLY"))
2487
2606
  );
2488
2607
  var makeRuntime = (config = {}) => pipe13(
2489
2608
  Effect24.all({
2490
- deps: Effect24.sync(() => resolveDependencies(config)),
2609
+ deps: Effect24.sync(
2610
+ () => resolveMasterLockDependencies(config, {
2611
+ promotionPollMs: PROMOTION_POLL_MS
2612
+ })
2613
+ ),
2491
2614
  connectionTypeRef: Ref.make("READ_ONLY"),
2492
- lockHandleRef: Ref.make(null),
2615
+ lockLeaseRef: Ref.make(null),
2493
2616
  promotionTimerRef: Ref.make(null),
2494
- promotingRef: Ref.make(false)
2617
+ promotingRef: Ref.make(false),
2618
+ releasedRef: Ref.make(false)
2495
2619
  }),
2496
2620
  Effect24.tap(
2497
2621
  (runtime) => pipe13(
@@ -2952,8 +3076,8 @@ import { Effect as Effect29, Layer as Layer5, pipe as pipe18 } from "effect";
2952
3076
 
2953
3077
  // src/effects/csv-parser.ts
2954
3078
  import { Effect as Effect27, pipe as pipe16 } from "effect";
2955
- import * as path3 from "path";
2956
- import * as fs3 from "fs";
3079
+ import * as path4 from "path";
3080
+ import * as fs4 from "fs";
2957
3081
  import { PassThrough, Readable } from "stream";
2958
3082
  import { pipeline } from "stream/promises";
2959
3083
  import { createGunzip } from "zlib";
@@ -3008,7 +3132,7 @@ var hasMagicBytes = (bytes, magic) => bytes.length >= magic.length && magic.ever
3008
3132
  var isCsvEntry = (name) => name.endsWith(".csv") || name.endsWith(".tsv");
3009
3133
  var openZipCsvReadable = (filePath) => new Promise((resolve, reject) => {
3010
3134
  const output = new PassThrough();
3011
- const source = fs3.createReadStream(filePath);
3135
+ const source = fs4.createReadStream(filePath);
3012
3136
  const unzipper = new Unzip();
3013
3137
  let settled = false;
3014
3138
  let foundCsv = false;
@@ -3090,18 +3214,18 @@ var openZipCsvReadable = (filePath) => new Promise((resolve, reject) => {
3090
3214
  source.on("error", fail);
3091
3215
  });
3092
3216
  var openCsvReadableFile = async (filePath) => {
3093
- const fd = await fs3.promises.open(filePath, "r");
3217
+ const fd = await fs4.promises.open(filePath, "r");
3094
3218
  const header = Buffer.alloc(4);
3095
3219
  try {
3096
3220
  const { bytesRead } = await fd.read(header, 0, header.length, 0);
3097
3221
  const bytes = new Uint8Array(header.subarray(0, bytesRead));
3098
3222
  if (hasMagicBytes(bytes, GZIP_MAGIC)) {
3099
- return fs3.createReadStream(filePath).pipe(createGunzip());
3223
+ return fs4.createReadStream(filePath).pipe(createGunzip());
3100
3224
  }
3101
3225
  if (hasMagicBytes(bytes, ZIP_MAGIC)) {
3102
3226
  return openZipCsvReadable(filePath);
3103
3227
  }
3104
- return fs3.createReadStream(filePath);
3228
+ return fs4.createReadStream(filePath);
3105
3229
  } finally {
3106
3230
  await fd.close();
3107
3231
  }
@@ -3109,8 +3233,8 @@ var openCsvReadableFile = async (filePath) => {
3109
3233
  var downloadToTempFile = (url, objectType) => pipe16(
3110
3234
  Effect27.tryPromise({
3111
3235
  try: async () => {
3112
- fs3.mkdirSync(tempDir(), { recursive: true });
3113
- const tempFile = path3.join(tempDir(), `${objectType}_${Date.now()}.csv.gz`);
3236
+ fs4.mkdirSync(tempDir(), { recursive: true });
3237
+ const tempFile = path4.join(tempDir(), `${objectType}_${Date.now()}.csv.gz`);
3114
3238
  const response = await fetch(url);
3115
3239
  if (!response.ok) {
3116
3240
  throw new Error(`HTTP ${response.status}: ${response.statusText}`);
@@ -3120,7 +3244,7 @@ var downloadToTempFile = (url, objectType) => pipe16(
3120
3244
  }
3121
3245
  await pipeline(
3122
3246
  Readable.fromWeb(response.body),
3123
- fs3.createWriteStream(tempFile)
3247
+ fs4.createWriteStream(tempFile)
3124
3248
  );
3125
3249
  return tempFile;
3126
3250
  },
@@ -3132,16 +3256,16 @@ var GZIP_MAGIC_1 = 139;
3132
3256
  var MIN_GZIP_SIZE = 20;
3133
3257
  var validateGzipFile = (filePath) => Effect27.try({
3134
3258
  try: () => {
3135
- const stat = fs3.statSync(filePath);
3259
+ const stat = fs4.statSync(filePath);
3136
3260
  if (stat.size < MIN_GZIP_SIZE) {
3137
3261
  throw new Error(`File too small (${stat.size} bytes), minimum ${MIN_GZIP_SIZE}`);
3138
3262
  }
3139
3263
  const header = Buffer.alloc(2);
3140
- const fd = fs3.openSync(filePath, "r");
3264
+ const fd = fs4.openSync(filePath, "r");
3141
3265
  try {
3142
- fs3.readSync(fd, header, 0, 2, 0);
3266
+ fs4.readSync(fd, header, 0, 2, 0);
3143
3267
  } finally {
3144
- fs3.closeSync(fd);
3268
+ fs4.closeSync(fd);
3145
3269
  }
3146
3270
  if (header[0] !== GZIP_MAGIC_0 || header[1] !== GZIP_MAGIC_1) {
3147
3271
  throw new Error(
@@ -3153,7 +3277,7 @@ var validateGzipFile = (filePath) => Effect27.try({
3153
3277
  });
3154
3278
  var cleanupTempFile = (filePath) => Effect27.sync(() => {
3155
3279
  try {
3156
- fs3.unlinkSync(filePath);
3280
+ fs4.unlinkSync(filePath);
3157
3281
  } catch {
3158
3282
  }
3159
3283
  });
@@ -3161,8 +3285,8 @@ var cleanupTempFile = (filePath) => Effect27.sync(() => {
3161
3285
  // src/effects/hubspot-file-processing.ts
3162
3286
  import { Effect as Effect28, pipe as pipe17 } from "effect";
3163
3287
  import Papa3 from "papaparse";
3164
- import * as fs4 from "fs";
3165
- import * as path4 from "path";
3288
+ import * as fs5 from "fs";
3289
+ import * as path5 from "path";
3166
3290
  import { createGzip } from "zlib";
3167
3291
  var normalizedTempDir = () => HUBSPOT_NORMALIZED_DOWNLOADS_DIR();
3168
3292
  var EXPORT_NAME_TO_OBJECT_TYPE = {
@@ -3232,7 +3356,7 @@ var finalizeStream = async (stream) => {
3232
3356
  await waitForStream(stream);
3233
3357
  };
3234
3358
  var createTempWriter = async (filePath, header) => {
3235
- const stream = fs4.createWriteStream(filePath);
3359
+ const stream = fs5.createWriteStream(filePath);
3236
3360
  const gzip = createGzip();
3237
3361
  gzip.pipe(stream);
3238
3362
  await writeChunk(gzip, header);
@@ -3270,9 +3394,9 @@ var createObjectRow = (id, properties, lastSynced) => [
3270
3394
  escapeCsv(lastSynced)
3271
3395
  ].join(",") + "\n";
3272
3396
  var normalizeHubSpotExportFileInternal = async (sourcePath, objectType) => {
3273
- await fs4.promises.mkdir(normalizedTempDir(), { recursive: true });
3397
+ await fs5.promises.mkdir(normalizedTempDir(), { recursive: true });
3274
3398
  const runId = `${Date.now()}-${Math.random().toString(36).slice(2, 10)}`;
3275
- const objectPath = path4.join(
3399
+ const objectPath = path5.join(
3276
3400
  normalizedTempDir(),
3277
3401
  `${sanitizeFileSegment(objectType)}-${runId}-objects.csv.gz`
3278
3402
  );
@@ -3323,7 +3447,7 @@ var normalizeHubSpotExportFileInternal = async (sourcePath, objectType) => {
3323
3447
  for (const mapping of mappings) {
3324
3448
  associationHeaders.add(mapping.idsHeader);
3325
3449
  if (mapping.labelsHeader) associationHeaders.add(mapping.labelsHeader);
3326
- const assocPath = path4.join(
3450
+ const assocPath = path5.join(
3327
3451
  normalizedTempDir(),
3328
3452
  `${sanitizeFileSegment(objectType)}-${runId}-assoc-${sanitizeFileSegment(mapping.toObjectType)}.csv.gz`
3329
3453
  );
@@ -3421,12 +3545,12 @@ var normalizeHubSpotExportFileInternal = async (sourcePath, objectType) => {
3421
3545
  }
3422
3546
  }
3423
3547
  try {
3424
- fs4.unlinkSync(objectWriter.path);
3548
+ fs5.unlinkSync(objectWriter.path);
3425
3549
  } catch {
3426
3550
  }
3427
3551
  for (const writer of associationWriters.values()) {
3428
3552
  try {
3429
- fs4.unlinkSync(writer.path);
3553
+ fs5.unlinkSync(writer.path);
3430
3554
  } catch {
3431
3555
  }
3432
3556
  }
@@ -3517,7 +3641,7 @@ var FileDownloadLive = Layer5.succeed(FileDownloadService, {
3517
3641
 
3518
3642
  // src/layers/PortalDataLive.ts
3519
3643
  import { Cause, Effect as Effect33, Layer as Layer7, pipe as pipe21 } from "effect";
3520
- import fs7 from "fs";
3644
+ import fs8 from "fs";
3521
3645
 
3522
3646
  // src/effects/artifact-queue.ts
3523
3647
  import { Effect as Effect30 } from "effect";
@@ -3700,11 +3824,11 @@ var setSelectedPortal = (portalId) => {
3700
3824
 
3701
3825
  // src/effects/read-connection.ts
3702
3826
  import { Effect as Effect31, pipe as pipe19 } from "effect";
3703
- import fs5 from "fs";
3827
+ import fs6 from "fs";
3704
3828
  var getReadConnection = (portalId) => pipe19(
3705
3829
  Effect31.sync(() => replicaDbPath(portalId)),
3706
3830
  Effect31.filterOrFail(
3707
- (dbFile) => fs5.existsSync(dbFile),
3831
+ (dbFile) => fs6.existsSync(dbFile),
3708
3832
  () => new DatabaseError({
3709
3833
  message: `Read replica not found for portal ${portalId}. Has it been synced?`
3710
3834
  })
@@ -3726,7 +3850,7 @@ var closeReadConnection = (portalId) => Effect31.promise(() => evictDatabaseConn
3726
3850
 
3727
3851
  // src/layers/PortalFileStateLive.ts
3728
3852
  import { Effect as Effect32, Layer as Layer6, pipe as pipe20 } from "effect";
3729
- import * as fs6 from "fs";
3853
+ import * as fs7 from "fs";
3730
3854
  var EMPTY_STATE = () => ({
3731
3855
  artifacts: [],
3732
3856
  plugins: [],
@@ -3754,7 +3878,7 @@ var normalizeLegacyState = (legacy) => ({
3754
3878
  var readLegacyFromDisk = (portalId) => Effect32.try({
3755
3879
  try: () => {
3756
3880
  const file = legacyPortalStatePath(portalId);
3757
- const raw = fs6.existsSync(file) ? fs6.readFileSync(file, "utf-8") : null;
3881
+ const raw = fs7.existsSync(file) ? fs7.readFileSync(file, "utf-8") : null;
3758
3882
  if (!raw) return null;
3759
3883
  return normalizeLegacyState(SyncedArtifactsSchema.parse(JSON.parse(raw)));
3760
3884
  },
@@ -3763,7 +3887,7 @@ var readLegacyFromDisk = (portalId) => Effect32.try({
3763
3887
  var readFromDisk = (portalId) => Effect32.try({
3764
3888
  try: () => {
3765
3889
  const file = portalDataPath(portalId);
3766
- const raw = fs6.existsSync(file) ? fs6.readFileSync(file, "utf-8") : null;
3890
+ const raw = fs7.existsSync(file) ? fs7.readFileSync(file, "utf-8") : null;
3767
3891
  if (!raw) return null;
3768
3892
  return PortalDataStateSchema.parse(JSON.parse(raw));
3769
3893
  },
@@ -3771,8 +3895,8 @@ var readFromDisk = (portalId) => Effect32.try({
3771
3895
  });
3772
3896
  var writeToDisk = (portalId, state) => Effect32.try({
3773
3897
  try: () => {
3774
- fs6.mkdirSync(portalDir(portalId), { recursive: true });
3775
- fs6.writeFileSync(portalDataPath(portalId), JSON.stringify(state, null, 2), "utf-8");
3898
+ fs7.mkdirSync(portalDir(portalId), { recursive: true });
3899
+ fs7.writeFileSync(portalDataPath(portalId), JSON.stringify(state, null, 2), "utf-8");
3776
3900
  },
3777
3901
  catch: (cause) => new ConfigWriteError({ path: portalDataPath(portalId), cause })
3778
3902
  });
@@ -4037,11 +4161,11 @@ var recoverCorruptedDatabase = (portalId) => pipe21(
4037
4161
  () => Effect33.sync(() => {
4038
4162
  const file = dbPath(portalId);
4039
4163
  try {
4040
- fs7.unlinkSync(file);
4164
+ fs8.unlinkSync(file);
4041
4165
  } catch {
4042
4166
  }
4043
4167
  try {
4044
- fs7.unlinkSync(`${file}.wal`);
4168
+ fs8.unlinkSync(`${file}.wal`);
4045
4169
  } catch {
4046
4170
  }
4047
4171
  })
@@ -5973,7 +6097,7 @@ SELECT json_extract_string(properties, '$.dealstage') as stage, COUNT(*) as coun
5973
6097
 
5974
6098
  // src/tools/status.ts
5975
6099
  import { z as z17 } from "zod";
5976
- import fs8 from "fs";
6100
+ import fs9 from "fs";
5977
6101
  import { Effect as Effect52 } from "effect";
5978
6102
  function formatBytes(bytes) {
5979
6103
  if (bytes === 0) return "0 B";
@@ -5984,8 +6108,8 @@ function formatBytes(bytes) {
5984
6108
  }
5985
6109
  function getDbFileSize(portalId) {
5986
6110
  const db = dbPath(portalId);
5987
- if (!fs8.existsSync(db)) return null;
5988
- return fs8.statSync(db).size;
6111
+ if (!fs9.existsSync(db)) return null;
6112
+ return fs9.statSync(db).size;
5989
6113
  }
5990
6114
  async function buildConnectionSection(deps) {
5991
6115
  const selectedPortalId2 = deps.getSelectedPortalId();
@@ -16421,7 +16545,7 @@ var resolveDiffFlow = (portalId, selectedPortalId2) => selectedPortalId2 === por
16421
16545
  var shouldSubscribeAfterDiffComplete = (portalId, selectedPortalId2, success) => success && selectedPortalId2 === portalId;
16422
16546
 
16423
16547
  // src/effects/ensure-fresh.ts
16424
- import fs9 from "fs";
16548
+ import fs10 from "fs";
16425
16549
  import { Effect as Effect107, pipe as pipe87 } from "effect";
16426
16550
  var DEFAULT_MAX_AGE_MS = 3e5;
16427
16551
  var DEFAULT_QUEUE_DRAIN_TIMEOUT_MS = 12e4;
@@ -16494,7 +16618,7 @@ var makeEnsureFresh = (deps, options2) => createEnsureFresh(
16494
16618
  {
16495
16619
  getSelectedPortalId: deps.getSelectedPortalId,
16496
16620
  waitForPortalDrain,
16497
- hasLocalDatabase: (portalId) => fs9.existsSync(dbPath(portalId)),
16621
+ hasLocalDatabase: (portalId) => fs10.existsSync(dbPath(portalId)),
16498
16622
  getFreshnessCheckedAt: (portalId) => Effect107.runPromise(
16499
16623
  pipe87(
16500
16624
  deps.portalData.getMetadata(portalId, LAST_DIFF_CHECKED_AT_KEY),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@daeda/mcp-pro",
3
- "version": "0.1.25",
3
+ "version": "0.1.27",
4
4
  "description": "MCP server for HubSpot CRM — sync, query, and manage your portal data",
5
5
  "type": "module",
6
6
  "bin": {
@@ -35,8 +35,8 @@
35
35
  "@modelcontextprotocol/sdk": "^1.25.3",
36
36
  "effect": "3.19.15",
37
37
  "fflate": "^0.8.2",
38
- "os-lock": "^2.0.0",
39
38
  "papaparse": "^5.5.3",
39
+ "proper-lockfile": "^4.1.2",
40
40
  "zod": "^3.23.8"
41
41
  },
42
42
  "devDependencies": {
@@ -49,6 +49,6 @@
49
49
  },
50
50
  "trustedDependencies": [
51
51
  "fs-ext",
52
- "os-lock"
52
+ "proper-lockfile"
53
53
  ]
54
54
  }