@daeda/mcp-pro 0.1.26 → 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +131 -55
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -1268,15 +1268,54 @@ var WebSocketService = class extends Context9.Tag("WebSocketService")() {
1268
1268
  import { Effect as Effect22, Layer } from "effect";
1269
1269
 
1270
1270
  // src/paths.ts
1271
+ import fs from "fs";
1271
1272
  import os from "os";
1272
1273
  import path from "path";
1274
+ var LEGACY_REPLICA_DB_FILENAME = "hubspot.replica.duckdb";
1275
+ var REPLICA_POINTER_FILENAME = "hubspot.replica.current";
1276
+ var REPLICA_VERSION_PREFIX = "hubspot.replica.";
1277
+ var REPLICA_VERSION_SUFFIX = ".duckdb";
1273
1278
  var defaultDataRoot = () => process.env.MCP_CLIENT_DATA_ROOT ?? path.join(os.homedir(), ".daeda-mcp");
1274
1279
  var _dataRoot = defaultDataRoot();
1275
1280
  var DATA_ROOT = () => _dataRoot;
1276
1281
  var PORTALS_DIR = () => path.join(_dataRoot, "portals");
1277
1282
  var portalDir = (portalId) => path.join(PORTALS_DIR(), String(portalId));
1278
1283
  var dbPath = (portalId) => path.join(portalDir(portalId), "hubspot.duckdb");
1279
- var replicaDbPath = (portalId) => path.join(portalDir(portalId), "hubspot.replica.duckdb");
1284
+ var legacyReplicaDbPath = (portalId) => path.join(portalDir(portalId), LEGACY_REPLICA_DB_FILENAME);
1285
+ var replicaPointerPath = (portalId) => path.join(portalDir(portalId), REPLICA_POINTER_FILENAME);
1286
+ var replicaVersionDbPath = (portalId, filename) => path.join(portalDir(portalId), path.basename(filename));
1287
+ var readReplicaPointer = (portalId) => {
1288
+ try {
1289
+ const raw = fs.readFileSync(replicaPointerPath(portalId), "utf-8").trim();
1290
+ if (raw.length === 0 || raw !== path.basename(raw)) {
1291
+ return null;
1292
+ }
1293
+ return raw;
1294
+ } catch {
1295
+ return null;
1296
+ }
1297
+ };
1298
+ var readLatestReplicaVersion = (portalId) => {
1299
+ try {
1300
+ const versions = fs.readdirSync(portalDir(portalId)).filter(
1301
+ (entry) => entry.startsWith(REPLICA_VERSION_PREFIX) && entry.endsWith(REPLICA_VERSION_SUFFIX)
1302
+ ).sort((left, right) => right.localeCompare(left));
1303
+ return versions[0] ?? null;
1304
+ } catch {
1305
+ return null;
1306
+ }
1307
+ };
1308
+ var replicaDbPath = (portalId) => {
1309
+ const currentReplicaFile = readReplicaPointer(portalId);
1310
+ if (currentReplicaFile !== null) {
1311
+ return replicaVersionDbPath(portalId, currentReplicaFile);
1312
+ }
1313
+ const latestReplicaVersion = readLatestReplicaVersion(portalId);
1314
+ if (latestReplicaVersion !== null) {
1315
+ return replicaVersionDbPath(portalId, latestReplicaVersion);
1316
+ }
1317
+ return legacyReplicaDbPath(portalId);
1318
+ };
1280
1319
  var stateFilePath = () => path.join(_dataRoot, "client_state.json");
1281
1320
  var DOWNLOADS_DIR = () => path.join(_dataRoot, "downloads");
1282
1321
  var HUBSPOT_NORMALIZED_DOWNLOADS_DIR = () => path.join(DOWNLOADS_DIR(), "hubspot-normalized");
@@ -1286,7 +1325,7 @@ var CHARTS_DIR = () => path.join(_dataRoot, "charts");
1286
1325
  var chartFilePath = (filename) => path.join(CHARTS_DIR(), filename);
1287
1326
 
1288
1327
  // src/layers/ConfigLayerLive.ts
1289
- import * as fs from "fs";
1328
+ import * as fs2 from "fs";
1290
1329
 
1291
1330
  // src/types.ts
1292
1331
  import { z as z12 } from "zod";
@@ -1342,7 +1381,7 @@ var ConfigLive = Layer.succeed(
1342
1381
  load: () => Effect22.try({
1343
1382
  try: () => {
1344
1383
  const file = stateFilePath();
1345
- const raw = fs.existsSync(file) ? fs.readFileSync(file, "utf-8") : null;
1384
+ const raw = fs2.existsSync(file) ? fs2.readFileSync(file, "utf-8") : null;
1346
1385
  if (!raw) return { selectedPortalId: null };
1347
1386
  return ClientStateSchema.parse(JSON.parse(raw));
1348
1387
  },
@@ -1353,8 +1392,8 @@ var ConfigLive = Layer.succeed(
1353
1392
  ),
1354
1393
  save: (state) => Effect22.try({
1355
1394
  try: () => {
1356
- fs.mkdirSync(DATA_ROOT(), { recursive: true });
1357
- fs.writeFileSync(stateFilePath(), JSON.stringify(state, null, 2), "utf-8");
1395
+ fs2.mkdirSync(DATA_ROOT(), { recursive: true });
1396
+ fs2.writeFileSync(stateFilePath(), JSON.stringify(state, null, 2), "utf-8");
1358
1397
  },
1359
1398
  catch: (error) => new ConfigWriteError({ path: stateFilePath(), cause: error })
1360
1399
  }).pipe(
@@ -1373,7 +1412,8 @@ import {
1373
1412
  VARCHAR
1374
1413
  } from "@duckdb/node-api";
1375
1414
  import { Effect as Effect23 } from "effect";
1376
- import fs2 from "fs";
1415
+ import fs3 from "fs";
1416
+ import path2 from "path";
1377
1417
 
1378
1418
  // src/pure/schema.ts
1379
1419
  var VALID_TABLE_NAME = /^[a-z][a-z0-9_]*$/;
@@ -1529,10 +1569,12 @@ var CHUNK_SIZE = 2048;
1529
1569
  var CSV_REJECT_SAMPLE_LIMIT = 5;
1530
1570
  var PLUGIN_ERROR_KEY = (name) => `plugin_error:${name}`;
1531
1571
  var SCHEMA_VERSION_KEY = (name) => `schema_version:plugin:${name}`;
1572
+ var REPLICA_VERSION_PREFIX2 = "hubspot.replica.";
1573
+ var REPLICA_VERSION_SUFFIX2 = ".duckdb";
1532
1574
  var databaseConnectionCache = /* @__PURE__ */ new Map();
1533
1575
  function getReplicaFingerprint(portalId) {
1534
1576
  try {
1535
- const stats = fs2.statSync(replicaDbPath(portalId));
1577
+ const stats = fs3.statSync(replicaDbPath(portalId));
1536
1578
  return {
1537
1579
  mtimeMs: stats.mtimeMs,
1538
1580
  size: stats.size
@@ -1548,6 +1590,7 @@ function sameReplicaFingerprint(left, right) {
1548
1590
  }
1549
1591
  function shouldRefreshHandle(portalId, handle) {
1550
1592
  if (handle.mode !== "read_only") return false;
1593
+ if (handle.sourcePath !== getDatabaseFilePath(portalId, "read_only")) return true;
1551
1594
  return !sameReplicaFingerprint(handle.replicaFingerprint, getReplicaFingerprint(portalId));
1552
1595
  }
1553
1596
  async function closeDatabaseHandle(handle) {
@@ -1571,7 +1614,10 @@ function getDatabaseFilePath(portalId, mode) {
1571
1614
  async function openDatabaseConnection(portalId, _encryptionKey, options2) {
1572
1615
  const mode = resolveConnectionMode(options2);
1573
1616
  const sourcePath = getDatabaseFilePath(portalId, mode);
1574
- fs2.mkdirSync(portalDir(portalId), { recursive: true });
1617
+ fs3.mkdirSync(portalDir(portalId), { recursive: true });
1618
+ if (mode === "read_only" && !fs3.existsSync(sourcePath)) {
1619
+ throw new Error(`Replica database not found for portal ${portalId}: ${sourcePath}`);
1620
+ }
1575
1621
  const instance = await DuckDBInstance.create(sourcePath);
1576
1622
  const connection = await instance.connect();
1577
1623
  return {
@@ -1643,11 +1689,41 @@ async function bulkAppend(connection, tableName, columns) {
1643
1689
  }
1644
1690
  function cleanupDbArtifacts(filePath) {
1645
1691
  try {
1646
- fs2.unlinkSync(filePath);
1692
+ fs3.unlinkSync(filePath);
1647
1693
  } catch {
1648
1694
  }
1649
1695
  try {
1650
- fs2.unlinkSync(`${filePath}.wal`);
1696
+ fs3.unlinkSync(`${filePath}.wal`);
1697
+ } catch {
1698
+ }
1699
+ }
1700
+ function nextReplicaFileName(seed = Date.now()) {
1701
+ return `${REPLICA_VERSION_PREFIX2}${seed}-${process.pid}-${Math.random().toString(36).slice(2, 8)}${REPLICA_VERSION_SUFFIX2}`;
1702
+ }
1703
+ function writeReplicaPointer(portalId, fileName) {
1704
+ const pointerPath = replicaPointerPath(portalId);
1705
+ const nextPointerPath = `${pointerPath}.next`;
1706
+ cleanupDbArtifacts(nextPointerPath);
1707
+ fs3.writeFileSync(nextPointerPath, `${path2.basename(fileName)}
1708
+ `, "utf-8");
1709
+ try {
1710
+ fs3.unlinkSync(pointerPath);
1711
+ } catch {
1712
+ }
1713
+ fs3.renameSync(nextPointerPath, pointerPath);
1714
+ }
1715
+ function isManagedReplicaFile(fileName) {
1716
+ return fileName === path2.basename(legacyReplicaDbPath(0)) || fileName.startsWith(REPLICA_VERSION_PREFIX2) && fileName.endsWith(REPLICA_VERSION_SUFFIX2);
1717
+ }
1718
+ function cleanupStaleReplicaFiles(portalId, currentReplicaPath) {
1719
+ const currentFile = path2.basename(currentReplicaPath);
1720
+ try {
1721
+ for (const entry of fs3.readdirSync(portalDir(portalId))) {
1722
+ if (!isManagedReplicaFile(entry) || entry === currentFile) {
1723
+ continue;
1724
+ }
1725
+ cleanupDbArtifacts(path2.join(portalDir(portalId), entry));
1726
+ }
1651
1727
  } catch {
1652
1728
  }
1653
1729
  }
@@ -1658,13 +1734,13 @@ async function publishReplica({
1658
1734
  assertWritable(masterLock);
1659
1735
  const handle = await getDatabaseConnection(portalId, null);
1660
1736
  const liveDbPath = dbPath(portalId);
1661
- const replicaPath = replicaDbPath(portalId);
1662
- const nextReplicaPath = `${replicaPath}.next`;
1737
+ const nextReplicaName = nextReplicaFileName();
1738
+ const nextReplicaPath = replicaVersionDbPath(portalId, nextReplicaName);
1663
1739
  cleanupDbArtifacts(nextReplicaPath);
1664
1740
  await handle.connection.run("CHECKPOINT");
1665
- cleanupDbArtifacts(replicaPath);
1666
- fs2.copyFileSync(liveDbPath, nextReplicaPath);
1667
- fs2.renameSync(nextReplicaPath, replicaPath);
1741
+ fs3.copyFileSync(liveDbPath, nextReplicaPath);
1742
+ writeReplicaPointer(portalId, nextReplicaName);
1743
+ cleanupStaleReplicaFiles(portalId, nextReplicaPath);
1668
1744
  }
1669
1745
  async function ensureTable(connection, ddlStatements) {
1670
1746
  for (const statement of ddlStatements) {
@@ -2188,7 +2264,7 @@ async function loadPluginPayload(source) {
2188
2264
  if (source.kind === "payload") {
2189
2265
  return source.payload;
2190
2266
  }
2191
- const raw = await fs2.promises.readFile(source.jsonPath, "utf8");
2267
+ const raw = await fs3.promises.readFile(source.jsonPath, "utf8");
2192
2268
  return JSON.parse(raw);
2193
2269
  }
2194
2270
  async function applyDiffBatch({
@@ -2329,7 +2405,7 @@ var buildCappedSelectSql = (sql, maxRows) => {
2329
2405
  };
2330
2406
 
2331
2407
  // src/layers/MasterLockLive.ts
2332
- import path2 from "path";
2408
+ import path3 from "path";
2333
2409
  import { Effect as Effect24, Layer as Layer2, Ref, pipe as pipe13 } from "effect";
2334
2410
 
2335
2411
  // src/layers/MasterLockDependencies.ts
@@ -2357,7 +2433,7 @@ var resolveMasterLockDependencies = (config, defaults) => ({
2357
2433
  var PROMOTION_POLL_MS = 3e3;
2358
2434
  var LOCK_STALE_MS = 5e3;
2359
2435
  var LOCK_UPDATE_MS = 2500;
2360
- var lockFilePath = () => path2.join(PORTALS_DIR(), "master.lock");
2436
+ var lockFilePath = () => path3.join(PORTALS_DIR(), "master.lock");
2361
2437
  var logMessage = (runtime, message) => Effect24.sync(() => runtime.deps.log(message));
2362
2438
  var isLockConflictError = (error) => {
2363
2439
  const code = error?.code;
@@ -3000,8 +3076,8 @@ import { Effect as Effect29, Layer as Layer5, pipe as pipe18 } from "effect";
3000
3076
 
3001
3077
  // src/effects/csv-parser.ts
3002
3078
  import { Effect as Effect27, pipe as pipe16 } from "effect";
3003
- import * as path3 from "path";
3004
- import * as fs3 from "fs";
3079
+ import * as path4 from "path";
3080
+ import * as fs4 from "fs";
3005
3081
  import { PassThrough, Readable } from "stream";
3006
3082
  import { pipeline } from "stream/promises";
3007
3083
  import { createGunzip } from "zlib";
@@ -3056,7 +3132,7 @@ var hasMagicBytes = (bytes, magic) => bytes.length >= magic.length && magic.ever
3056
3132
  var isCsvEntry = (name) => name.endsWith(".csv") || name.endsWith(".tsv");
3057
3133
  var openZipCsvReadable = (filePath) => new Promise((resolve, reject) => {
3058
3134
  const output = new PassThrough();
3059
- const source = fs3.createReadStream(filePath);
3135
+ const source = fs4.createReadStream(filePath);
3060
3136
  const unzipper = new Unzip();
3061
3137
  let settled = false;
3062
3138
  let foundCsv = false;
@@ -3138,18 +3214,18 @@ var openZipCsvReadable = (filePath) => new Promise((resolve, reject) => {
3138
3214
  source.on("error", fail);
3139
3215
  });
3140
3216
  var openCsvReadableFile = async (filePath) => {
3141
- const fd = await fs3.promises.open(filePath, "r");
3217
+ const fd = await fs4.promises.open(filePath, "r");
3142
3218
  const header = Buffer.alloc(4);
3143
3219
  try {
3144
3220
  const { bytesRead } = await fd.read(header, 0, header.length, 0);
3145
3221
  const bytes = new Uint8Array(header.subarray(0, bytesRead));
3146
3222
  if (hasMagicBytes(bytes, GZIP_MAGIC)) {
3147
- return fs3.createReadStream(filePath).pipe(createGunzip());
3223
+ return fs4.createReadStream(filePath).pipe(createGunzip());
3148
3224
  }
3149
3225
  if (hasMagicBytes(bytes, ZIP_MAGIC)) {
3150
3226
  return openZipCsvReadable(filePath);
3151
3227
  }
3152
- return fs3.createReadStream(filePath);
3228
+ return fs4.createReadStream(filePath);
3153
3229
  } finally {
3154
3230
  await fd.close();
3155
3231
  }
@@ -3157,8 +3233,8 @@ var openCsvReadableFile = async (filePath) => {
3157
3233
  var downloadToTempFile = (url, objectType) => pipe16(
3158
3234
  Effect27.tryPromise({
3159
3235
  try: async () => {
3160
- fs3.mkdirSync(tempDir(), { recursive: true });
3161
- const tempFile = path3.join(tempDir(), `${objectType}_${Date.now()}.csv.gz`);
3236
+ fs4.mkdirSync(tempDir(), { recursive: true });
3237
+ const tempFile = path4.join(tempDir(), `${objectType}_${Date.now()}.csv.gz`);
3162
3238
  const response = await fetch(url);
3163
3239
  if (!response.ok) {
3164
3240
  throw new Error(`HTTP ${response.status}: ${response.statusText}`);
@@ -3168,7 +3244,7 @@ var downloadToTempFile = (url, objectType) => pipe16(
3168
3244
  }
3169
3245
  await pipeline(
3170
3246
  Readable.fromWeb(response.body),
3171
- fs3.createWriteStream(tempFile)
3247
+ fs4.createWriteStream(tempFile)
3172
3248
  );
3173
3249
  return tempFile;
3174
3250
  },
@@ -3180,16 +3256,16 @@ var GZIP_MAGIC_1 = 139;
3180
3256
  var MIN_GZIP_SIZE = 20;
3181
3257
  var validateGzipFile = (filePath) => Effect27.try({
3182
3258
  try: () => {
3183
- const stat = fs3.statSync(filePath);
3259
+ const stat = fs4.statSync(filePath);
3184
3260
  if (stat.size < MIN_GZIP_SIZE) {
3185
3261
  throw new Error(`File too small (${stat.size} bytes), minimum ${MIN_GZIP_SIZE}`);
3186
3262
  }
3187
3263
  const header = Buffer.alloc(2);
3188
- const fd = fs3.openSync(filePath, "r");
3264
+ const fd = fs4.openSync(filePath, "r");
3189
3265
  try {
3190
- fs3.readSync(fd, header, 0, 2, 0);
3266
+ fs4.readSync(fd, header, 0, 2, 0);
3191
3267
  } finally {
3192
- fs3.closeSync(fd);
3268
+ fs4.closeSync(fd);
3193
3269
  }
3194
3270
  if (header[0] !== GZIP_MAGIC_0 || header[1] !== GZIP_MAGIC_1) {
3195
3271
  throw new Error(
@@ -3201,7 +3277,7 @@ var validateGzipFile = (filePath) => Effect27.try({
3201
3277
  });
3202
3278
  var cleanupTempFile = (filePath) => Effect27.sync(() => {
3203
3279
  try {
3204
- fs3.unlinkSync(filePath);
3280
+ fs4.unlinkSync(filePath);
3205
3281
  } catch {
3206
3282
  }
3207
3283
  });
@@ -3209,8 +3285,8 @@ var cleanupTempFile = (filePath) => Effect27.sync(() => {
3209
3285
  // src/effects/hubspot-file-processing.ts
3210
3286
  import { Effect as Effect28, pipe as pipe17 } from "effect";
3211
3287
  import Papa3 from "papaparse";
3212
- import * as fs4 from "fs";
3213
- import * as path4 from "path";
3288
+ import * as fs5 from "fs";
3289
+ import * as path5 from "path";
3214
3290
  import { createGzip } from "zlib";
3215
3291
  var normalizedTempDir = () => HUBSPOT_NORMALIZED_DOWNLOADS_DIR();
3216
3292
  var EXPORT_NAME_TO_OBJECT_TYPE = {
@@ -3280,7 +3356,7 @@ var finalizeStream = async (stream) => {
3280
3356
  await waitForStream(stream);
3281
3357
  };
3282
3358
  var createTempWriter = async (filePath, header) => {
3283
- const stream = fs4.createWriteStream(filePath);
3359
+ const stream = fs5.createWriteStream(filePath);
3284
3360
  const gzip = createGzip();
3285
3361
  gzip.pipe(stream);
3286
3362
  await writeChunk(gzip, header);
@@ -3318,9 +3394,9 @@ var createObjectRow = (id, properties, lastSynced) => [
3318
3394
  escapeCsv(lastSynced)
3319
3395
  ].join(",") + "\n";
3320
3396
  var normalizeHubSpotExportFileInternal = async (sourcePath, objectType) => {
3321
- await fs4.promises.mkdir(normalizedTempDir(), { recursive: true });
3397
+ await fs5.promises.mkdir(normalizedTempDir(), { recursive: true });
3322
3398
  const runId = `${Date.now()}-${Math.random().toString(36).slice(2, 10)}`;
3323
- const objectPath = path4.join(
3399
+ const objectPath = path5.join(
3324
3400
  normalizedTempDir(),
3325
3401
  `${sanitizeFileSegment(objectType)}-${runId}-objects.csv.gz`
3326
3402
  );
@@ -3371,7 +3447,7 @@ var normalizeHubSpotExportFileInternal = async (sourcePath, objectType) => {
3371
3447
  for (const mapping of mappings) {
3372
3448
  associationHeaders.add(mapping.idsHeader);
3373
3449
  if (mapping.labelsHeader) associationHeaders.add(mapping.labelsHeader);
3374
- const assocPath = path4.join(
3450
+ const assocPath = path5.join(
3375
3451
  normalizedTempDir(),
3376
3452
  `${sanitizeFileSegment(objectType)}-${runId}-assoc-${sanitizeFileSegment(mapping.toObjectType)}.csv.gz`
3377
3453
  );
@@ -3469,12 +3545,12 @@ var normalizeHubSpotExportFileInternal = async (sourcePath, objectType) => {
3469
3545
  }
3470
3546
  }
3471
3547
  try {
3472
- fs4.unlinkSync(objectWriter.path);
3548
+ fs5.unlinkSync(objectWriter.path);
3473
3549
  } catch {
3474
3550
  }
3475
3551
  for (const writer of associationWriters.values()) {
3476
3552
  try {
3477
- fs4.unlinkSync(writer.path);
3553
+ fs5.unlinkSync(writer.path);
3478
3554
  } catch {
3479
3555
  }
3480
3556
  }
@@ -3565,7 +3641,7 @@ var FileDownloadLive = Layer5.succeed(FileDownloadService, {
3565
3641
 
3566
3642
  // src/layers/PortalDataLive.ts
3567
3643
  import { Cause, Effect as Effect33, Layer as Layer7, pipe as pipe21 } from "effect";
3568
- import fs7 from "fs";
3644
+ import fs8 from "fs";
3569
3645
 
3570
3646
  // src/effects/artifact-queue.ts
3571
3647
  import { Effect as Effect30 } from "effect";
@@ -3748,11 +3824,11 @@ var setSelectedPortal = (portalId) => {
3748
3824
 
3749
3825
  // src/effects/read-connection.ts
3750
3826
  import { Effect as Effect31, pipe as pipe19 } from "effect";
3751
- import fs5 from "fs";
3827
+ import fs6 from "fs";
3752
3828
  var getReadConnection = (portalId) => pipe19(
3753
3829
  Effect31.sync(() => replicaDbPath(portalId)),
3754
3830
  Effect31.filterOrFail(
3755
- (dbFile) => fs5.existsSync(dbFile),
3831
+ (dbFile) => fs6.existsSync(dbFile),
3756
3832
  () => new DatabaseError({
3757
3833
  message: `Read replica not found for portal ${portalId}. Has it been synced?`
3758
3834
  })
@@ -3774,7 +3850,7 @@ var closeReadConnection = (portalId) => Effect31.promise(() => evictDatabaseConn
3774
3850
 
3775
3851
  // src/layers/PortalFileStateLive.ts
3776
3852
  import { Effect as Effect32, Layer as Layer6, pipe as pipe20 } from "effect";
3777
- import * as fs6 from "fs";
3853
+ import * as fs7 from "fs";
3778
3854
  var EMPTY_STATE = () => ({
3779
3855
  artifacts: [],
3780
3856
  plugins: [],
@@ -3802,7 +3878,7 @@ var normalizeLegacyState = (legacy) => ({
3802
3878
  var readLegacyFromDisk = (portalId) => Effect32.try({
3803
3879
  try: () => {
3804
3880
  const file = legacyPortalStatePath(portalId);
3805
- const raw = fs6.existsSync(file) ? fs6.readFileSync(file, "utf-8") : null;
3881
+ const raw = fs7.existsSync(file) ? fs7.readFileSync(file, "utf-8") : null;
3806
3882
  if (!raw) return null;
3807
3883
  return normalizeLegacyState(SyncedArtifactsSchema.parse(JSON.parse(raw)));
3808
3884
  },
@@ -3811,7 +3887,7 @@ var readLegacyFromDisk = (portalId) => Effect32.try({
3811
3887
  var readFromDisk = (portalId) => Effect32.try({
3812
3888
  try: () => {
3813
3889
  const file = portalDataPath(portalId);
3814
- const raw = fs6.existsSync(file) ? fs6.readFileSync(file, "utf-8") : null;
3890
+ const raw = fs7.existsSync(file) ? fs7.readFileSync(file, "utf-8") : null;
3815
3891
  if (!raw) return null;
3816
3892
  return PortalDataStateSchema.parse(JSON.parse(raw));
3817
3893
  },
@@ -3819,8 +3895,8 @@ var readFromDisk = (portalId) => Effect32.try({
3819
3895
  });
3820
3896
  var writeToDisk = (portalId, state) => Effect32.try({
3821
3897
  try: () => {
3822
- fs6.mkdirSync(portalDir(portalId), { recursive: true });
3823
- fs6.writeFileSync(portalDataPath(portalId), JSON.stringify(state, null, 2), "utf-8");
3898
+ fs7.mkdirSync(portalDir(portalId), { recursive: true });
3899
+ fs7.writeFileSync(portalDataPath(portalId), JSON.stringify(state, null, 2), "utf-8");
3824
3900
  },
3825
3901
  catch: (cause) => new ConfigWriteError({ path: portalDataPath(portalId), cause })
3826
3902
  });
@@ -4085,11 +4161,11 @@ var recoverCorruptedDatabase = (portalId) => pipe21(
4085
4161
  () => Effect33.sync(() => {
4086
4162
  const file = dbPath(portalId);
4087
4163
  try {
4088
- fs7.unlinkSync(file);
4164
+ fs8.unlinkSync(file);
4089
4165
  } catch {
4090
4166
  }
4091
4167
  try {
4092
- fs7.unlinkSync(`${file}.wal`);
4168
+ fs8.unlinkSync(`${file}.wal`);
4093
4169
  } catch {
4094
4170
  }
4095
4171
  })
@@ -6021,7 +6097,7 @@ SELECT json_extract_string(properties, '$.dealstage') as stage, COUNT(*) as coun
6021
6097
 
6022
6098
  // src/tools/status.ts
6023
6099
  import { z as z17 } from "zod";
6024
- import fs8 from "fs";
6100
+ import fs9 from "fs";
6025
6101
  import { Effect as Effect52 } from "effect";
6026
6102
  function formatBytes(bytes) {
6027
6103
  if (bytes === 0) return "0 B";
@@ -6032,8 +6108,8 @@ function formatBytes(bytes) {
6032
6108
  }
6033
6109
  function getDbFileSize(portalId) {
6034
6110
  const db = dbPath(portalId);
6035
- if (!fs8.existsSync(db)) return null;
6036
- return fs8.statSync(db).size;
6111
+ if (!fs9.existsSync(db)) return null;
6112
+ return fs9.statSync(db).size;
6037
6113
  }
6038
6114
  async function buildConnectionSection(deps) {
6039
6115
  const selectedPortalId2 = deps.getSelectedPortalId();
@@ -16469,7 +16545,7 @@ var resolveDiffFlow = (portalId, selectedPortalId2) => selectedPortalId2 === por
16469
16545
  var shouldSubscribeAfterDiffComplete = (portalId, selectedPortalId2, success) => success && selectedPortalId2 === portalId;
16470
16546
 
16471
16547
  // src/effects/ensure-fresh.ts
16472
- import fs9 from "fs";
16548
+ import fs10 from "fs";
16473
16549
  import { Effect as Effect107, pipe as pipe87 } from "effect";
16474
16550
  var DEFAULT_MAX_AGE_MS = 3e5;
16475
16551
  var DEFAULT_QUEUE_DRAIN_TIMEOUT_MS = 12e4;
@@ -16542,7 +16618,7 @@ var makeEnsureFresh = (deps, options2) => createEnsureFresh(
16542
16618
  {
16543
16619
  getSelectedPortalId: deps.getSelectedPortalId,
16544
16620
  waitForPortalDrain,
16545
- hasLocalDatabase: (portalId) => fs9.existsSync(dbPath(portalId)),
16621
+ hasLocalDatabase: (portalId) => fs10.existsSync(dbPath(portalId)),
16546
16622
  getFreshnessCheckedAt: (portalId) => Effect107.runPromise(
16547
16623
  pipe87(
16548
16624
  deps.portalData.getMetadata(portalId, LAST_DIFF_CHECKED_AT_KEY),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@daeda/mcp-pro",
3
- "version": "0.1.26",
3
+ "version": "0.1.27",
4
4
  "description": "MCP server for HubSpot CRM — sync, query, and manage your portal data",
5
5
  "type": "module",
6
6
  "bin": {