framer-code-link 0.17.0 → 0.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +711 -186
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -3,10 +3,13 @@ import { createRequire } from "node:module";
|
|
|
3
3
|
import { Command } from "commander";
|
|
4
4
|
import fs from "fs/promises";
|
|
5
5
|
import path from "path";
|
|
6
|
-
import { WebSocketServer } from "ws";
|
|
7
6
|
import { createHash } from "crypto";
|
|
8
|
-
import { execSync } from "child_process";
|
|
9
|
-
import
|
|
7
|
+
import { execFile, execSync } from "child_process";
|
|
8
|
+
import nodeFs from "fs";
|
|
9
|
+
import os from "os";
|
|
10
|
+
import { promisify } from "util";
|
|
11
|
+
import https from "node:https";
|
|
12
|
+
import { WebSocketServer } from "ws";
|
|
10
13
|
import { setupTypeAcquisition } from "@typescript/ata";
|
|
11
14
|
import ts from "typescript";
|
|
12
15
|
import { fileURLToPath } from "node:url";
|
|
@@ -142,7 +145,7 @@ function pathJoin(...parts) {
|
|
|
142
145
|
});
|
|
143
146
|
return res;
|
|
144
147
|
}
|
|
145
|
-
function normalizePath
|
|
148
|
+
function normalizePath(filePath) {
|
|
146
149
|
if (!filePath) return "";
|
|
147
150
|
const isAbsolute = filePath.startsWith("/");
|
|
148
151
|
const segments = filePath.replace(/\\/g, "/").split("/");
|
|
@@ -159,12 +162,24 @@ function normalizePath$1(filePath) {
|
|
|
159
162
|
if (isAbsolute) return `/${normalized}`;
|
|
160
163
|
return normalized;
|
|
161
164
|
}
|
|
165
|
+
/**
|
|
166
|
+
* Use when you only want path normalization.
|
|
167
|
+
* Preserves the caller-provided extension so `Foo.ts` and `Foo.tsx` stay distinct.
|
|
168
|
+
*/
|
|
162
169
|
function normalizeCodeFilePath(filePath) {
|
|
163
|
-
const normalized = normalizePath
|
|
170
|
+
const normalized = normalizePath(filePath);
|
|
164
171
|
return normalized.startsWith("/") ? normalized.slice(1) : normalized;
|
|
165
172
|
}
|
|
166
|
-
function
|
|
167
|
-
|
|
173
|
+
function ensureExtension(filePath, extension = ".tsx") {
|
|
174
|
+
const normalized = normalizeCodeFilePath(filePath);
|
|
175
|
+
return /\.(tsx?|jsx?|json)$/i.test(normalized) ? normalized : `${normalized}${extension}`;
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Use when the path must match the code-file API contract.
|
|
179
|
+
* Normalizes the path and ensures a default `.tsx` extension when one is missing.
|
|
180
|
+
*/
|
|
181
|
+
function normalizeCodeFilePathWithExtension(filePath) {
|
|
182
|
+
return ensureExtension(filePath);
|
|
168
183
|
}
|
|
169
184
|
function sanitizeFilePath(input, capitalizeReactComponent = true) {
|
|
170
185
|
const trimmed = input.trim();
|
|
@@ -188,7 +203,7 @@ function isSupportedExtension$1(filePath) {
|
|
|
188
203
|
* Use this for Map keys on operating systems where "File.tsx" and "file.tsx" are the same file.
|
|
189
204
|
*/
|
|
190
205
|
function fileKeyForLookup(filePath) {
|
|
191
|
-
return
|
|
206
|
+
return normalizeCodeFilePath(filePath).toLowerCase();
|
|
192
207
|
}
|
|
193
208
|
/**
|
|
194
209
|
* Pluralize a word based on count
|
|
@@ -220,6 +235,11 @@ function getPortFromHash(projectHash) {
|
|
|
220
235
|
return 3847 + Math.abs(hash) % 250;
|
|
221
236
|
}
|
|
222
237
|
|
|
238
|
+
//#endregion
|
|
239
|
+
//#region ../code-link-shared/src/types.ts
|
|
240
|
+
/** Custom close code sent when a new plugin tab replaces the active one. */
|
|
241
|
+
const CLOSE_CODE_REPLACED = 4001;
|
|
242
|
+
|
|
223
243
|
//#endregion
|
|
224
244
|
//#region ../../node_modules/picocolors/picocolors.js
|
|
225
245
|
var require_picocolors = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
@@ -475,28 +495,250 @@ function resetDisconnectState() {
|
|
|
475
495
|
hadRecentDisconnect = false;
|
|
476
496
|
}
|
|
477
497
|
|
|
498
|
+
//#endregion
|
|
499
|
+
//#region src/helpers/certs.ts
|
|
500
|
+
/**
|
|
501
|
+
* Certificate management for WSS support.
|
|
502
|
+
*
|
|
503
|
+
* Downloads FiloSottile's mkcert binary on first run, then shells out to it
|
|
504
|
+
* to generate and trust a local CA + server certificate for wss://localhost.
|
|
505
|
+
*
|
|
506
|
+
* The mkcert binary is SHA-256 verified before execution (update
|
|
507
|
+
* MKCERT_CHECKSUMS when bumping MKCERT_VERSION). The CA key is user-only;
|
|
508
|
+
* never share or commit the cert directory.
|
|
509
|
+
*
|
|
510
|
+
* Certs and the mkcert binary are cached in ~/.framer/code-link/.
|
|
511
|
+
*/
|
|
512
|
+
const execFileAsync = promisify(execFile);
|
|
513
|
+
/** Keep in sync with MKCERT_CHECKSUMS below. */
|
|
514
|
+
const MKCERT_VERSION = "v1.4.4";
|
|
515
|
+
const CERT_DIR = process.env.FRAMER_CODE_LINK_CERT_DIR ?? path.join(os.homedir(), ".framer", "code-link");
|
|
516
|
+
const MKCERT_BIN_NAME = process.platform === "win32" ? "mkcert.exe" : "mkcert";
|
|
517
|
+
const MKCERT_BIN_PATH = path.join(CERT_DIR, MKCERT_BIN_NAME);
|
|
518
|
+
const ROOT_CA_CERT_PATH = path.join(CERT_DIR, "rootCA.pem");
|
|
519
|
+
const ROOT_CA_KEY_PATH = path.join(CERT_DIR, "rootCA-key.pem");
|
|
520
|
+
const SERVER_KEY_PATH = path.join(CERT_DIR, "localhost-key.pem");
|
|
521
|
+
const SERVER_CERT_PATH = path.join(CERT_DIR, "localhost.pem");
|
|
522
|
+
/**
|
|
523
|
+
* SHA-256 checksums for mkcert v1.4.4 release binaries, keyed by "platform-arch".
|
|
524
|
+
* These must be updated whenever MKCERT_VERSION changes.
|
|
525
|
+
* Source: https://github.com/FiloSottile/mkcert/releases/tag/v1.4.4
|
|
526
|
+
*/
|
|
527
|
+
const MKCERT_CHECKSUMS = {
|
|
528
|
+
"darwin-amd64": "a32dfab51f1845d51e810db8e47dcf0e6b51ae3422426514bf5a2b8302e97d4e",
|
|
529
|
+
"darwin-arm64": "c8af0df44bce04359794dad8ea28d750437411d632748049d08644ffb66a60c6",
|
|
530
|
+
"linux-amd64": "6d31c65b03972c6dc4a14ab429f2928300518b26503f58723e532d1b0a3bbb52",
|
|
531
|
+
"linux-arm64": "b98f2cc69fd9147fe4d405d859c57504571adec0d3611c3eefd04107c7ac00d0",
|
|
532
|
+
"windows-amd64": "d2660b50a9ed59eada480750561c96abc2ed4c9a38c6a24d93e30e0977631398",
|
|
533
|
+
"windows-arm64": "793747256c562622d40127c8080df26add2fb44c50906ce9db63b42a5280582e"
|
|
534
|
+
};
|
|
535
|
+
/** Env vars passed to every mkcert invocation. */
|
|
536
|
+
const MKCERT_ENV = {
|
|
537
|
+
...process.env,
|
|
538
|
+
CAROOT: CERT_DIR,
|
|
539
|
+
JAVA_HOME: "",
|
|
540
|
+
...process.platform === "darwin" ? { TRUST_STORES: "system" } : {}
|
|
541
|
+
};
|
|
542
|
+
/**
|
|
543
|
+
* Returns a TLS cert bundle for the WSS server, or null if generation fails.
|
|
544
|
+
* On first run, downloads mkcert, installs a local CA into trust stores, and
|
|
545
|
+
* generates a server cert for localhost.
|
|
546
|
+
*/
|
|
547
|
+
async function getOrCreateCerts() {
|
|
548
|
+
try {
|
|
549
|
+
await fs.mkdir(CERT_DIR, { recursive: true });
|
|
550
|
+
const mkcertPath = await ensureMkcertBinary();
|
|
551
|
+
const rootCAState = await syncRootCA(mkcertPath);
|
|
552
|
+
if (rootCAState !== "unchanged") await invalidateServerCerts(rootCAState);
|
|
553
|
+
const existingKey = await loadFile(SERVER_KEY_PATH);
|
|
554
|
+
const existingCert = await loadFile(SERVER_CERT_PATH);
|
|
555
|
+
if (existingKey && existingCert) {
|
|
556
|
+
debug("Loaded existing server certificates from disk");
|
|
557
|
+
return {
|
|
558
|
+
key: existingKey,
|
|
559
|
+
cert: existingCert
|
|
560
|
+
};
|
|
561
|
+
}
|
|
562
|
+
if (existingKey || existingCert) await invalidateIncompleteServerBundle();
|
|
563
|
+
status("Generating local certificates to connect securely. You may be asked for your password.");
|
|
564
|
+
await generateCerts(mkcertPath);
|
|
565
|
+
status("Successfully generated certificates.");
|
|
566
|
+
return {
|
|
567
|
+
key: await fs.readFile(SERVER_KEY_PATH, "utf-8"),
|
|
568
|
+
cert: await fs.readFile(SERVER_CERT_PATH, "utf-8")
|
|
569
|
+
};
|
|
570
|
+
} catch (err) {
|
|
571
|
+
error(`Failed to set up TLS certificates: ${err instanceof Error ? err.message : String(err)}`);
|
|
572
|
+
return null;
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
function getDownloadInfo() {
|
|
576
|
+
const platformMap = {
|
|
577
|
+
darwin: "darwin",
|
|
578
|
+
linux: "linux",
|
|
579
|
+
win32: "windows"
|
|
580
|
+
};
|
|
581
|
+
const archMap = {
|
|
582
|
+
x64: "amd64",
|
|
583
|
+
arm64: "arm64"
|
|
584
|
+
};
|
|
585
|
+
const platform = platformMap[process.platform];
|
|
586
|
+
const arch = archMap[process.arch];
|
|
587
|
+
if (!platform || !arch) throw new Error(`Unsupported platform: ${process.platform}/${process.arch}. Install mkcert manually: https://github.com/FiloSottile/mkcert#installation`);
|
|
588
|
+
const key = `${platform}-${arch}`;
|
|
589
|
+
const expectedChecksum = MKCERT_CHECKSUMS[key];
|
|
590
|
+
if (!expectedChecksum) throw new Error(`No checksum available for mkcert ${key}. Install mkcert manually: https://github.com/FiloSottile/mkcert#installation`);
|
|
591
|
+
return {
|
|
592
|
+
url: `https://github.com/FiloSottile/mkcert/releases/download/${MKCERT_VERSION}/${`mkcert-${MKCERT_VERSION}-${platform}-${arch}${process.platform === "win32" ? ".exe" : ""}`}`,
|
|
593
|
+
expectedChecksum
|
|
594
|
+
};
|
|
595
|
+
}
|
|
596
|
+
async function ensureMkcertBinary() {
|
|
597
|
+
const { url, expectedChecksum } = getDownloadInfo();
|
|
598
|
+
try {
|
|
599
|
+
await fs.access(MKCERT_BIN_PATH, nodeFs.constants.X_OK);
|
|
600
|
+
if (await verifyFileChecksum(MKCERT_BIN_PATH, expectedChecksum)) {
|
|
601
|
+
debug("mkcert binary already available and verified");
|
|
602
|
+
return MKCERT_BIN_PATH;
|
|
603
|
+
}
|
|
604
|
+
warn("Cached mkcert binary failed checksum verification, re-downloading...");
|
|
605
|
+
} catch {}
|
|
606
|
+
debug(`Downloading mkcert from ${url}`);
|
|
607
|
+
status("Downloading mkcert for certificate generation...");
|
|
608
|
+
try {
|
|
609
|
+
const response = await fetch(url, { redirect: "follow" });
|
|
610
|
+
if (!response.ok) throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
611
|
+
const buffer = Buffer.from(await response.arrayBuffer());
|
|
612
|
+
const actualChecksum = createHash("sha256").update(buffer).digest("hex");
|
|
613
|
+
if (actualChecksum !== expectedChecksum) throw new Error(`mkcert binary checksum mismatch — the download may have been tampered with.\n Expected: ${expectedChecksum}\n Actual: ${actualChecksum}`);
|
|
614
|
+
await fs.writeFile(MKCERT_BIN_PATH, buffer, { mode: 493 });
|
|
615
|
+
debug(`mkcert binary saved to ${MKCERT_BIN_PATH}`);
|
|
616
|
+
return MKCERT_BIN_PATH;
|
|
617
|
+
} catch (err) {
|
|
618
|
+
await fs.rm(MKCERT_BIN_PATH, { force: true });
|
|
619
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
620
|
+
throw new Error(`Failed to download mkcert: ${message}\nYou can install it manually: https://github.com/FiloSottile/mkcert#installation\nThen run: mkcert -install && mkcert -key-file "${SERVER_KEY_PATH}" -cert-file "${SERVER_CERT_PATH}" localhost 127.0.0.1`);
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
async function generateCerts(mkcertPath) {
|
|
624
|
+
debug("Running mkcert to install the local root CA...");
|
|
625
|
+
try {
|
|
626
|
+
await execFileAsync(mkcertPath, ["-install"], { env: MKCERT_ENV });
|
|
627
|
+
} catch (err) {
|
|
628
|
+
throw new Error(`Failed to install mkcert root CA into the system trust store. If you canceled the password prompt, rerun this command and allow the install.
|
|
629
|
+
mkcert error: ${formatMkcertError(err)}`);
|
|
630
|
+
}
|
|
631
|
+
debug("Running mkcert to generate the localhost server certificate...");
|
|
632
|
+
try {
|
|
633
|
+
await execFileAsync(mkcertPath, [
|
|
634
|
+
"-key-file",
|
|
635
|
+
SERVER_KEY_PATH,
|
|
636
|
+
"-cert-file",
|
|
637
|
+
SERVER_CERT_PATH,
|
|
638
|
+
"localhost",
|
|
639
|
+
"127.0.0.1"
|
|
640
|
+
], { env: MKCERT_ENV });
|
|
641
|
+
} catch (err) {
|
|
642
|
+
if (await loadFile(SERVER_KEY_PATH) || await loadFile(SERVER_CERT_PATH)) await invalidateIncompleteServerBundle();
|
|
643
|
+
throw new Error(`Failed to generate localhost TLS certificate and key with mkcert.
|
|
644
|
+
mkcert error: ${formatMkcertError(err)}\nPlease rerun:\n mkcert -key-file "${SERVER_KEY_PATH}" -cert-file "${SERVER_CERT_PATH}" localhost 127.0.0.1`);
|
|
645
|
+
}
|
|
646
|
+
const [generatedKey, generatedCert] = await Promise.all([loadFile(SERVER_KEY_PATH), loadFile(SERVER_CERT_PATH)]);
|
|
647
|
+
if (generatedKey && generatedCert) {
|
|
648
|
+
debug("CA installed and server certificate generated successfully");
|
|
649
|
+
return;
|
|
650
|
+
}
|
|
651
|
+
if (generatedKey || generatedCert) await invalidateIncompleteServerBundle();
|
|
652
|
+
throw new Error(`Failed to generate localhost TLS certificate and key with mkcert. Please ensure mkcert is installed and rerun:
|
|
653
|
+
mkcert -install && mkcert -key-file "${SERVER_KEY_PATH}" -cert-file "${SERVER_CERT_PATH}" localhost 127.0.0.1`);
|
|
654
|
+
}
|
|
655
|
+
async function syncRootCA(mkcertPath) {
|
|
656
|
+
const existingRootCert = await loadFile(ROOT_CA_CERT_PATH);
|
|
657
|
+
const existingRootKey = await loadFile(ROOT_CA_KEY_PATH);
|
|
658
|
+
const { stdout } = await execFileAsync(mkcertPath, ["-CAROOT"], { env: {
|
|
659
|
+
...process.env,
|
|
660
|
+
JAVA_HOME: ""
|
|
661
|
+
} });
|
|
662
|
+
const defaultCAROOT = stdout.trim();
|
|
663
|
+
if (!defaultCAROOT || defaultCAROOT === CERT_DIR) return existingRootCert && existingRootKey ? "unchanged" : "missing";
|
|
664
|
+
const defaultRootCert = await loadFile(path.join(defaultCAROOT, "rootCA.pem"));
|
|
665
|
+
const defaultRootKey = await loadFile(path.join(defaultCAROOT, "rootCA-key.pem"));
|
|
666
|
+
if (!defaultRootCert || !defaultRootKey) return existingRootCert && existingRootKey ? "unchanged" : "missing";
|
|
667
|
+
if (existingRootCert === defaultRootCert && existingRootKey === defaultRootKey) return "unchanged";
|
|
668
|
+
await Promise.all([fs.rm(ROOT_CA_CERT_PATH, { force: true }), fs.rm(ROOT_CA_KEY_PATH, { force: true })]);
|
|
669
|
+
await fs.writeFile(ROOT_CA_CERT_PATH, defaultRootCert, { mode: 420 });
|
|
670
|
+
await fs.writeFile(ROOT_CA_KEY_PATH, defaultRootKey, { mode: 384 });
|
|
671
|
+
return existingRootCert && existingRootKey ? "updated" : "copied";
|
|
672
|
+
}
|
|
673
|
+
async function invalidateServerCerts(rootCAState) {
|
|
674
|
+
const reasons = {
|
|
675
|
+
copied: "Copied an existing mkcert root CA into the Code Link cache",
|
|
676
|
+
updated: "Detected a different mkcert root CA and refreshed the Code Link cache",
|
|
677
|
+
missing: "No cached mkcert root CA was available for the existing server certificate"
|
|
678
|
+
};
|
|
679
|
+
if (!(await loadFile(SERVER_KEY_PATH) !== null || await loadFile(SERVER_CERT_PATH) !== null)) return;
|
|
680
|
+
await fs.rm(SERVER_KEY_PATH, { force: true });
|
|
681
|
+
await fs.rm(SERVER_CERT_PATH, { force: true });
|
|
682
|
+
debug(`${reasons[rootCAState]}; removed stale localhost certificate`);
|
|
683
|
+
}
|
|
684
|
+
async function invalidateIncompleteServerBundle() {
|
|
685
|
+
await fs.rm(SERVER_KEY_PATH, { force: true });
|
|
686
|
+
await fs.rm(SERVER_CERT_PATH, { force: true });
|
|
687
|
+
warn("Found an incomplete localhost certificate bundle; regenerating it");
|
|
688
|
+
}
|
|
689
|
+
async function verifyFileChecksum(filePath, expectedHash) {
|
|
690
|
+
const data = await fs.readFile(filePath);
|
|
691
|
+
return createHash("sha256").update(data).digest("hex") === expectedHash;
|
|
692
|
+
}
|
|
693
|
+
async function loadFile(filePath) {
|
|
694
|
+
try {
|
|
695
|
+
return await fs.readFile(filePath, "utf-8");
|
|
696
|
+
} catch {
|
|
697
|
+
return null;
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
function formatMkcertError(err) {
|
|
701
|
+
if (err instanceof Error) {
|
|
702
|
+
const stdout = "stdout" in err && typeof err.stdout === "string" ? err.stdout.trim() : "";
|
|
703
|
+
const output = ["stderr" in err && typeof err.stderr === "string" ? err.stderr.trim() : "", stdout].filter(Boolean).join("\n");
|
|
704
|
+
return output ? `${err.message}\n${output}` : err.message;
|
|
705
|
+
}
|
|
706
|
+
return String(err);
|
|
707
|
+
}
|
|
708
|
+
|
|
478
709
|
//#endregion
|
|
479
710
|
//#region src/helpers/connection.ts
|
|
480
711
|
/**
|
|
481
|
-
*
|
|
482
|
-
*
|
|
712
|
+
* WebSocket connection helper
|
|
713
|
+
*
|
|
714
|
+
* Wrapper around ws.Server that normalizes handshake and surfaces callbacks.
|
|
715
|
+
*/
|
|
716
|
+
/**
|
|
717
|
+
* Initializes a WSS (TLS) WebSocket server and returns a connection interface.
|
|
718
|
+
* Returns a Promise that resolves when the server is ready, or rejects on startup errors.
|
|
483
719
|
*/
|
|
484
|
-
function initConnection(port) {
|
|
720
|
+
function initConnection(port, certs) {
|
|
485
721
|
return new Promise((resolve, reject) => {
|
|
486
|
-
const wss = new WebSocketServer({ port });
|
|
487
722
|
const handlers = {};
|
|
488
723
|
let connectionId = 0;
|
|
489
724
|
let isReady = false;
|
|
725
|
+
const httpsServer = https.createServer({
|
|
726
|
+
key: certs.key,
|
|
727
|
+
cert: certs.cert
|
|
728
|
+
});
|
|
729
|
+
const wss = new WebSocketServer({ server: httpsServer });
|
|
490
730
|
wss.on("error", (err) => {
|
|
731
|
+
error(`WebSocket server error: ${err.message}`);
|
|
732
|
+
handlers.onError?.(err);
|
|
733
|
+
});
|
|
734
|
+
const handleError = (err) => {
|
|
491
735
|
if (!isReady) {
|
|
492
736
|
if (err.code === "EADDRINUSE") {
|
|
493
737
|
error(`Port ${port} is already in use.`);
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
error(` 2. Or run: lsof -i :${port} | grep LISTEN`);
|
|
499
|
-
error(` Then kill the process: kill -9 <PID>`);
|
|
738
|
+
info(`This usually means another instance of Code Link is already running.`);
|
|
739
|
+
info(``);
|
|
740
|
+
info(`To fix this:`);
|
|
741
|
+
info(` Close any other terminal running Code Link for this project`);
|
|
500
742
|
reject(/* @__PURE__ */ new Error(`Port ${port} is already in use`));
|
|
501
743
|
} else {
|
|
502
744
|
error(`Failed to start WebSocket server: ${err.message}`);
|
|
@@ -505,10 +747,11 @@ function initConnection(port) {
|
|
|
505
747
|
return;
|
|
506
748
|
}
|
|
507
749
|
error(`WebSocket server error: ${err.message}`);
|
|
508
|
-
|
|
509
|
-
|
|
750
|
+
handlers.onError?.(err);
|
|
751
|
+
};
|
|
752
|
+
const handleListening = () => {
|
|
510
753
|
isReady = true;
|
|
511
|
-
debug(`
|
|
754
|
+
debug(`WSS server listening on port ${port}`);
|
|
512
755
|
let activeClient = null;
|
|
513
756
|
wss.on("connection", (ws) => {
|
|
514
757
|
const connId = ++connectionId;
|
|
@@ -524,7 +767,7 @@ function initConnection(port) {
|
|
|
524
767
|
activeClient = ws;
|
|
525
768
|
if (previousActiveClient && previousActiveClient !== activeClient) {
|
|
526
769
|
debug(`Replacing active client with conn ${connId}`);
|
|
527
|
-
if (previousActiveClient.readyState === READY_STATE.OPEN || previousActiveClient.readyState === READY_STATE.CONNECTING) previousActiveClient.close();
|
|
770
|
+
if (previousActiveClient.readyState === READY_STATE.OPEN || previousActiveClient.readyState === READY_STATE.CONNECTING) previousActiveClient.close(CLOSE_CODE_REPLACED);
|
|
528
771
|
}
|
|
529
772
|
handlers.onHandshake?.(ws, message);
|
|
530
773
|
} else if (handshakeReceived && activeClient === ws) handlers.onMessage?.(message);
|
|
@@ -564,9 +807,13 @@ function initConnection(port) {
|
|
|
564
807
|
},
|
|
565
808
|
close() {
|
|
566
809
|
wss.close();
|
|
810
|
+
httpsServer.close();
|
|
567
811
|
}
|
|
568
812
|
});
|
|
569
|
-
}
|
|
813
|
+
};
|
|
814
|
+
httpsServer.on("error", handleError);
|
|
815
|
+
httpsServer.on("listening", handleListening);
|
|
816
|
+
httpsServer.listen(port);
|
|
570
817
|
});
|
|
571
818
|
}
|
|
572
819
|
/**
|
|
@@ -608,41 +855,6 @@ function sendMessage(socket, message) {
|
|
|
608
855
|
});
|
|
609
856
|
}
|
|
610
857
|
|
|
611
|
-
//#endregion
|
|
612
|
-
//#region src/utils/node-paths.ts
|
|
613
|
-
/**
|
|
614
|
-
* Path manipulation utilities
|
|
615
|
-
*/
|
|
616
|
-
/**
|
|
617
|
-
* Gets a relative path from the project directory
|
|
618
|
-
*/
|
|
619
|
-
function getRelativePath(projectDir, absolutePath) {
|
|
620
|
-
return path.relative(projectDir, absolutePath);
|
|
621
|
-
}
|
|
622
|
-
/**
|
|
623
|
-
* Normalizes a file path by:
|
|
624
|
-
* - Converting backslashes to forward slashes
|
|
625
|
-
* - Resolving . and .. segments
|
|
626
|
-
* - Removing duplicate slashes
|
|
627
|
-
*/
|
|
628
|
-
function normalizePath(filePath) {
|
|
629
|
-
if (!filePath) return "";
|
|
630
|
-
const isAbsolute = filePath.startsWith("/");
|
|
631
|
-
const segments = filePath.replace(/\\/g, "/").split("/");
|
|
632
|
-
const stack = [];
|
|
633
|
-
for (const segment of segments) {
|
|
634
|
-
if (!segment || segment === ".") continue;
|
|
635
|
-
if (segment === "..") {
|
|
636
|
-
if (stack.length > 0) stack.pop();
|
|
637
|
-
continue;
|
|
638
|
-
}
|
|
639
|
-
stack.push(segment);
|
|
640
|
-
}
|
|
641
|
-
const normalized = stack.join("/");
|
|
642
|
-
if (isAbsolute) return `/${normalized}`;
|
|
643
|
-
return normalized;
|
|
644
|
-
}
|
|
645
|
-
|
|
646
858
|
//#endregion
|
|
647
859
|
//#region src/utils/state-persistence.ts
|
|
648
860
|
/**
|
|
@@ -654,18 +866,8 @@ function normalizePath(filePath) {
|
|
|
654
866
|
*/
|
|
655
867
|
const STATE_FILE_NAME = ".framer-sync-state.json";
|
|
656
868
|
const CURRENT_VERSION = 1;
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
".tsx",
|
|
660
|
-
".js",
|
|
661
|
-
".jsx",
|
|
662
|
-
".json"
|
|
663
|
-
];
|
|
664
|
-
const DEFAULT_EXTENSION$1 = ".tsx";
|
|
665
|
-
function normalizePersistedFileName(fileName) {
|
|
666
|
-
let normalized = normalizePath(fileName.trim());
|
|
667
|
-
if (!SUPPORTED_EXTENSIONS$1.some((ext) => normalized.toLowerCase().endsWith(ext))) normalized = `${normalized}${DEFAULT_EXTENSION$1}`;
|
|
668
|
-
return normalized;
|
|
869
|
+
function persistedFileKey(fileName) {
|
|
870
|
+
return fileKeyForLookup(normalizeCodeFilePathWithExtension(fileName));
|
|
669
871
|
}
|
|
670
872
|
/**
|
|
671
873
|
* Hash file content to detect changes
|
|
@@ -687,7 +889,7 @@ async function loadPersistedState(projectDir) {
|
|
|
687
889
|
return result;
|
|
688
890
|
}
|
|
689
891
|
for (const [fileName, state] of Object.entries(parsed.files)) {
|
|
690
|
-
const normalizedName =
|
|
892
|
+
const normalizedName = persistedFileKey(fileName);
|
|
691
893
|
if (normalizedName !== fileName) debug(`Normalized persisted key "${fileName}" -> "${normalizedName}" for compatibility`);
|
|
692
894
|
result.set(normalizedName, state);
|
|
693
895
|
}
|
|
@@ -755,7 +957,7 @@ async function listFiles(filesDir) {
|
|
|
755
957
|
continue;
|
|
756
958
|
}
|
|
757
959
|
if (!isSupportedExtension(entry.name)) continue;
|
|
758
|
-
const sanitizedPath = sanitizeFilePath(normalizePath
|
|
960
|
+
const sanitizedPath = sanitizeFilePath(normalizePath(path.relative(filesDir, entryPath)), false).path;
|
|
759
961
|
try {
|
|
760
962
|
const [content, stats] = await Promise.all([fs.readFile(entryPath, "utf-8"), fs.stat(entryPath)]);
|
|
761
963
|
files.push({
|
|
@@ -1000,9 +1202,9 @@ function resolveRemoteReference(filesDir, rawName) {
|
|
|
1000
1202
|
};
|
|
1001
1203
|
}
|
|
1002
1204
|
function sanitizeRelativePath(relativePath) {
|
|
1003
|
-
const trimmed = normalizePath
|
|
1205
|
+
const trimmed = normalizePath(relativePath.trim());
|
|
1004
1206
|
const sanitized = sanitizeFilePath(SUPPORTED_EXTENSIONS.some((ext) => trimmed.toLowerCase().endsWith(ext)) ? trimmed : `${trimmed}${DEFAULT_EXTENSION}`, false);
|
|
1005
|
-
const normalized = normalizePath
|
|
1207
|
+
const normalized = normalizePath(sanitized.path);
|
|
1006
1208
|
return {
|
|
1007
1209
|
relativePath: normalized,
|
|
1008
1210
|
extension: sanitized.extension || path.extname(normalized) || DEFAULT_EXTENSION
|
|
@@ -1061,6 +1263,7 @@ function tryGitInit(projectDir) {
|
|
|
1061
1263
|
debug("Already in a repository, skipping git init");
|
|
1062
1264
|
return false;
|
|
1063
1265
|
}
|
|
1266
|
+
status("Initializing git repository...");
|
|
1064
1267
|
execSync("git init", {
|
|
1065
1268
|
stdio: "ignore",
|
|
1066
1269
|
cwd: projectDir
|
|
@@ -1082,7 +1285,7 @@ function tryGitInit(projectDir) {
|
|
|
1082
1285
|
return true;
|
|
1083
1286
|
} catch (e) {
|
|
1084
1287
|
if (didInit) try {
|
|
1085
|
-
|
|
1288
|
+
nodeFs.rmSync(path.join(projectDir, ".git"), {
|
|
1086
1289
|
recursive: true,
|
|
1087
1290
|
force: true
|
|
1088
1291
|
});
|
|
@@ -1250,9 +1453,21 @@ async function findSkillsSourceDir() {
|
|
|
1250
1453
|
const FETCH_TIMEOUT_MS = 6e4;
|
|
1251
1454
|
const MAX_FETCH_RETRIES = 3;
|
|
1252
1455
|
const MAX_CONSECUTIVE_FAILURES = 10;
|
|
1253
|
-
const
|
|
1254
|
-
const
|
|
1255
|
-
|
|
1456
|
+
const FRAMER_PACKAGE_NAME = "framer";
|
|
1457
|
+
const CORE_LIBRARIES = [
|
|
1458
|
+
"framer-motion",
|
|
1459
|
+
"framer",
|
|
1460
|
+
"react",
|
|
1461
|
+
"react-dom"
|
|
1462
|
+
];
|
|
1463
|
+
/** Packages with pinned type versions — used by ATA's `// types:` comment syntax */
|
|
1464
|
+
const DEFAULT_PINNED_TYPE_VERSIONS = {
|
|
1465
|
+
"framer-motion": "12.34.3",
|
|
1466
|
+
"react": "18.2.0",
|
|
1467
|
+
"react-dom": "18.2.0",
|
|
1468
|
+
"@types/react": "18.2.0",
|
|
1469
|
+
"@types/react-dom": "18.2.0"
|
|
1470
|
+
};
|
|
1256
1471
|
const JSON_EXTENSION_REGEX = /\.json$/i;
|
|
1257
1472
|
/**
|
|
1258
1473
|
* Packages that are officially supported for type acquisition.
|
|
@@ -1262,6 +1477,7 @@ const SUPPORTED_PACKAGES = new Set([
|
|
|
1262
1477
|
"framer",
|
|
1263
1478
|
"framer-motion",
|
|
1264
1479
|
"react",
|
|
1480
|
+
"react-dom",
|
|
1265
1481
|
"@types/react",
|
|
1266
1482
|
"eventemitter3",
|
|
1267
1483
|
"csstype",
|
|
@@ -1277,6 +1493,8 @@ var Installer = class {
|
|
|
1277
1493
|
ata;
|
|
1278
1494
|
processedImports = /* @__PURE__ */ new Set();
|
|
1279
1495
|
initializationPromise = null;
|
|
1496
|
+
pinnedTypeVersions = { ...DEFAULT_PINNED_TYPE_VERSIONS };
|
|
1497
|
+
pinnedTypeVersionsPromise = null;
|
|
1280
1498
|
constructor(config) {
|
|
1281
1499
|
this.projectDir = config.projectDir;
|
|
1282
1500
|
this.allowUnsupportedNpm = config.allowUnsupportedNpm ?? false;
|
|
@@ -1357,10 +1575,12 @@ var Installer = class {
|
|
|
1357
1575
|
this.ensureSkills(),
|
|
1358
1576
|
this.ensureGitignore()
|
|
1359
1577
|
]);
|
|
1578
|
+
this.pinnedTypeVersionsPromise = this.resolvePinnedTypeVersions();
|
|
1360
1579
|
Promise.resolve().then(async () => {
|
|
1361
|
-
await this.
|
|
1362
|
-
const
|
|
1363
|
-
await this.
|
|
1580
|
+
const coreImports = await this.buildPinnedImports(CORE_LIBRARIES);
|
|
1581
|
+
const packageJsonDeps = this.allowUnsupportedNpm ? Object.keys(this.pinnedTypeVersions).filter((name) => !SUPPORTED_PACKAGES.has(name)) : [];
|
|
1582
|
+
const imports = [...coreImports, ...await this.buildPinnedImports(packageJsonDeps)].join("\n");
|
|
1583
|
+
await this.ata(imports);
|
|
1364
1584
|
}).catch((err) => {
|
|
1365
1585
|
debug("Type installation failed", err);
|
|
1366
1586
|
});
|
|
@@ -1371,15 +1591,18 @@ var Installer = class {
|
|
|
1371
1591
|
const imports = this.allowUnsupportedNpm ? allImports : allImports.filter((i) => this.isSupportedPackage(i.name));
|
|
1372
1592
|
if (allImports.length - imports.length > 0 && !this.allowUnsupportedNpm) debug(`Skipping unsupported packages: ${allImports.filter((i) => !this.isSupportedPackage(i.name)).map((i) => i.name).join(", ")} (use --unsupported-npm to enable)`);
|
|
1373
1593
|
if (imports.length === 0) return;
|
|
1374
|
-
|
|
1594
|
+
await this.pinnedTypeVersionsPromise;
|
|
1595
|
+
if (this.allowUnsupportedNpm) await this.resolvePackageJsonPins();
|
|
1596
|
+
const hash = imports.map((imp) => this.pinImport(imp.name)).sort().join(",");
|
|
1375
1597
|
if (this.processedImports.has(hash)) return;
|
|
1376
1598
|
this.processedImports.add(hash);
|
|
1377
1599
|
debug(`Processing imports for ${fileName} (${imports.length} packages)`);
|
|
1378
|
-
const filteredContent = this.allowUnsupportedNpm ? content : this.buildFilteredImports(imports);
|
|
1600
|
+
const filteredContent = this.allowUnsupportedNpm ? content : await this.buildFilteredImports(imports);
|
|
1379
1601
|
try {
|
|
1380
1602
|
await this.ata(filteredContent);
|
|
1381
1603
|
} catch (err) {
|
|
1382
|
-
warn(`
|
|
1604
|
+
warn(`Type fetching failed for ${fileName}`);
|
|
1605
|
+
debug(`ATA error for ${fileName}:`, err);
|
|
1383
1606
|
}
|
|
1384
1607
|
}
|
|
1385
1608
|
/**
|
|
@@ -1394,8 +1617,55 @@ var Installer = class {
|
|
|
1394
1617
|
/**
|
|
1395
1618
|
* Build synthetic import statements for ATA from filtered imports
|
|
1396
1619
|
*/
|
|
1397
|
-
buildFilteredImports(imports) {
|
|
1398
|
-
return imports.map((imp) =>
|
|
1620
|
+
async buildFilteredImports(imports) {
|
|
1621
|
+
return (await this.buildPinnedImports(imports.map((imp) => imp.name))).join("\n");
|
|
1622
|
+
}
|
|
1623
|
+
async buildPinnedImports(imports) {
|
|
1624
|
+
await this.pinnedTypeVersionsPromise;
|
|
1625
|
+
return imports.map((name) => this.pinImport(name));
|
|
1626
|
+
}
|
|
1627
|
+
async resolvePinnedTypeVersions() {
|
|
1628
|
+
try {
|
|
1629
|
+
const framerManifest = await fetchNpmPackageManifest(FRAMER_PACKAGE_NAME);
|
|
1630
|
+
const framerVersion = normalizePinnedVersion(framerManifest.version);
|
|
1631
|
+
if (framerVersion) this.pinnedTypeVersions.framer = framerVersion;
|
|
1632
|
+
for (const [pkg, defaultVersion] of Object.entries(DEFAULT_PINNED_TYPE_VERSIONS)) {
|
|
1633
|
+
const manifestDep = pkg.replace(/^@types\//, "");
|
|
1634
|
+
this.pinnedTypeVersions[pkg] = normalizePinnedVersion(getManifestDependencyVersion(framerManifest, manifestDep)) ?? defaultVersion;
|
|
1635
|
+
}
|
|
1636
|
+
debug(`Resolved ATA pins from ${FRAMER_PACKAGE_NAME}@${framerVersion ?? "latest"} (framer-motion ${this.pinnedTypeVersions["framer-motion"]}, react ${this.pinnedTypeVersions.react})`);
|
|
1637
|
+
} catch (err) {
|
|
1638
|
+
debug(`Falling back to default ATA pins for ${FRAMER_PACKAGE_NAME}`, err);
|
|
1639
|
+
}
|
|
1640
|
+
if (this.allowUnsupportedNpm) await this.resolvePackageJsonPins();
|
|
1641
|
+
}
|
|
1642
|
+
async resolvePackageJsonPins() {
|
|
1643
|
+
try {
|
|
1644
|
+
const pkgPath = path.join(this.projectDir, "package.json");
|
|
1645
|
+
const raw = await fs.readFile(pkgPath, "utf-8");
|
|
1646
|
+
const pkg = JSON.parse(raw);
|
|
1647
|
+
const allDeps = {
|
|
1648
|
+
...pkg.dependencies ?? {},
|
|
1649
|
+
...pkg.devDependencies ?? {}
|
|
1650
|
+
};
|
|
1651
|
+
for (const [name, range] of Object.entries(allDeps)) {
|
|
1652
|
+
const version = normalizePinnedVersion(range);
|
|
1653
|
+
if (version) this.pinnedTypeVersions[name] = version;
|
|
1654
|
+
}
|
|
1655
|
+
debug(`Resolved ${Object.keys(allDeps).length} package.json version pins`);
|
|
1656
|
+
} catch {
|
|
1657
|
+
warn("Could not read package.json for version pinning");
|
|
1658
|
+
}
|
|
1659
|
+
}
|
|
1660
|
+
/**
|
|
1661
|
+
* Build an import statement with an optional `// types:` version pin for ATA.
|
|
1662
|
+
* Resolves the base package name for subpath imports (e.g., "framer-motion/dist" -> "framer-motion").
|
|
1663
|
+
*/
|
|
1664
|
+
pinImport(name) {
|
|
1665
|
+
const base = name.startsWith("@") ? name.split("/").slice(0, 2).join("/") : name.split("/")[0];
|
|
1666
|
+
const version = this.pinnedTypeVersions[base];
|
|
1667
|
+
if (version) return `import "${name}"; // types: ${version}`;
|
|
1668
|
+
return `import "${name}";`;
|
|
1399
1669
|
}
|
|
1400
1670
|
async writeTypeFile(receivedPath, code) {
|
|
1401
1671
|
const normalized = receivedPath.replace(/^\//, "");
|
|
@@ -1419,7 +1689,8 @@ var Installer = class {
|
|
|
1419
1689
|
const response = await fetch(`https://registry.npmjs.org/${pkgName}`);
|
|
1420
1690
|
if (!response.ok) return;
|
|
1421
1691
|
const npmData = await response.json();
|
|
1422
|
-
const
|
|
1692
|
+
const pinnedVersion = this.pinnedTypeVersions[pkgName];
|
|
1693
|
+
const version = pinnedVersion ? this.findMatchingVersion(Object.keys(npmData.versions ?? {}), pinnedVersion) : npmData["dist-tags"]?.latest;
|
|
1423
1694
|
if (!version || !npmData.versions?.[version]) return;
|
|
1424
1695
|
const pkg = npmData.versions[version];
|
|
1425
1696
|
if (pkg.exports) for (const key of Object.keys(pkg.exports)) pkg.exports[key] = fixExportTypes(pkg.exports[key]);
|
|
@@ -1427,6 +1698,17 @@ var Installer = class {
|
|
|
1427
1698
|
await fs.writeFile(pkgJsonPath, JSON.stringify(pkg, null, 2));
|
|
1428
1699
|
} catch {}
|
|
1429
1700
|
}
|
|
1701
|
+
/**
|
|
1702
|
+
* Find the best matching version from a list of available versions.
|
|
1703
|
+
* Supports exact versions ("18.2.0") — returns exact match if available.
|
|
1704
|
+
*/
|
|
1705
|
+
findMatchingVersion(versions, pinned) {
|
|
1706
|
+
if (versions.includes(pinned)) return pinned;
|
|
1707
|
+
const [major, minor] = pinned.split(".");
|
|
1708
|
+
const prefix = `${major}.${minor}.`;
|
|
1709
|
+
const matching = versions.filter((v) => v.startsWith(prefix));
|
|
1710
|
+
return matching.length > 0 ? matching[matching.length - 1] : void 0;
|
|
1711
|
+
}
|
|
1430
1712
|
async ensureTsConfig() {
|
|
1431
1713
|
const tsconfigPath = path.join(this.projectDir, "tsconfig.json");
|
|
1432
1714
|
try {
|
|
@@ -1502,7 +1784,7 @@ declare module "*.json"
|
|
|
1502
1784
|
private: true,
|
|
1503
1785
|
description: "Framer files synced with framer-code-link"
|
|
1504
1786
|
};
|
|
1505
|
-
await fs.writeFile(packagePath, JSON.stringify(pkg, null,
|
|
1787
|
+
await fs.writeFile(packagePath, JSON.stringify(pkg, null, 4));
|
|
1506
1788
|
debug("Created package.json");
|
|
1507
1789
|
}
|
|
1508
1790
|
}
|
|
@@ -1532,61 +1814,19 @@ declare module "*.json"
|
|
|
1532
1814
|
await fs.writeFile(gitignorePath, content);
|
|
1533
1815
|
debug("Created .gitignore");
|
|
1534
1816
|
}
|
|
1535
|
-
async ensureReact18Types() {
|
|
1536
|
-
const reactTypesDir = path.join(this.projectDir, "node_modules/@types/react");
|
|
1537
|
-
const reactFiles = [
|
|
1538
|
-
"package.json",
|
|
1539
|
-
"index.d.ts",
|
|
1540
|
-
"global.d.ts",
|
|
1541
|
-
"jsx-runtime.d.ts",
|
|
1542
|
-
"jsx-dev-runtime.d.ts"
|
|
1543
|
-
];
|
|
1544
|
-
if (await this.hasTypePackage(reactTypesDir, REACT_TYPES_VERSION, reactFiles)) debug("📦 React types (from cache)");
|
|
1545
|
-
else {
|
|
1546
|
-
debug("Downloading React 18 types...");
|
|
1547
|
-
await this.downloadTypePackage("@types/react", REACT_TYPES_VERSION, reactTypesDir, reactFiles);
|
|
1548
|
-
}
|
|
1549
|
-
const reactDomDir = path.join(this.projectDir, "node_modules/@types/react-dom");
|
|
1550
|
-
const reactDomFiles = [
|
|
1551
|
-
"package.json",
|
|
1552
|
-
"index.d.ts",
|
|
1553
|
-
"client.d.ts"
|
|
1554
|
-
];
|
|
1555
|
-
if (await this.hasTypePackage(reactDomDir, REACT_DOM_TYPES_VERSION, reactDomFiles)) debug("📦 React DOM types (from cache)");
|
|
1556
|
-
else await this.downloadTypePackage("@types/react-dom", REACT_DOM_TYPES_VERSION, reactDomDir, reactDomFiles);
|
|
1557
|
-
}
|
|
1558
|
-
async hasTypePackage(destinationDir, version, files) {
|
|
1559
|
-
try {
|
|
1560
|
-
const pkgJsonPath = path.join(destinationDir, "package.json");
|
|
1561
|
-
const pkgJson = await fs.readFile(pkgJsonPath, "utf-8");
|
|
1562
|
-
if (JSON.parse(pkgJson).version !== version) return false;
|
|
1563
|
-
for (const file of files) {
|
|
1564
|
-
if (file === "package.json") continue;
|
|
1565
|
-
await fs.access(path.join(destinationDir, file));
|
|
1566
|
-
}
|
|
1567
|
-
return true;
|
|
1568
|
-
} catch {
|
|
1569
|
-
return false;
|
|
1570
|
-
}
|
|
1571
|
-
}
|
|
1572
|
-
async downloadTypePackage(pkgName, version, destinationDir, files) {
|
|
1573
|
-
const baseUrl = `https://unpkg.com/${pkgName}@${version}`;
|
|
1574
|
-
await fs.mkdir(destinationDir, { recursive: true });
|
|
1575
|
-
await Promise.all(files.map(async (file) => {
|
|
1576
|
-
const destination = path.join(destinationDir, file);
|
|
1577
|
-
try {
|
|
1578
|
-
await fs.access(destination);
|
|
1579
|
-
return;
|
|
1580
|
-
} catch {}
|
|
1581
|
-
try {
|
|
1582
|
-
const response = await fetch(`${baseUrl}/${file}`);
|
|
1583
|
-
if (!response.ok) return;
|
|
1584
|
-
const content = await response.text();
|
|
1585
|
-
await fs.writeFile(destination, content);
|
|
1586
|
-
} catch {}
|
|
1587
|
-
}));
|
|
1588
|
-
}
|
|
1589
1817
|
};
|
|
1818
|
+
function getManifestDependencyVersion(manifest, packageName) {
|
|
1819
|
+
return manifest.peerDependencies?.[packageName] ?? manifest.dependencies?.[packageName];
|
|
1820
|
+
}
|
|
1821
|
+
function normalizePinnedVersion(version) {
|
|
1822
|
+
if (!version) return void 0;
|
|
1823
|
+
return /\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?/.exec(version)?.[0];
|
|
1824
|
+
}
|
|
1825
|
+
async function fetchNpmPackageManifest(packageName) {
|
|
1826
|
+
const response = await fetchWithRetry(`https://registry.npmjs.org/${packageName}/latest`);
|
|
1827
|
+
if (!response.ok) throw new Error(`Failed to fetch ${packageName} manifest: ${response.status}`);
|
|
1828
|
+
return await response.json();
|
|
1829
|
+
}
|
|
1590
1830
|
/**
|
|
1591
1831
|
* Transform package.json exports to include .d.ts type paths
|
|
1592
1832
|
*/
|
|
@@ -1634,11 +1874,12 @@ async function fetchWithRetry(url, init, retries = MAX_FETCH_RETRIES) {
|
|
|
1634
1874
|
if (isRetryable) checkFatalFailure(urlString);
|
|
1635
1875
|
if (attempt < retries && isRetryable) {
|
|
1636
1876
|
const delay = attempt * 1e3;
|
|
1637
|
-
|
|
1877
|
+
debug(`Fetch failed for ${urlString}, retrying...`, error);
|
|
1638
1878
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
1639
1879
|
continue;
|
|
1640
1880
|
}
|
|
1641
|
-
warn(`Fetch failed for ${urlString}
|
|
1881
|
+
warn(`Fetch failed for ${urlString}`);
|
|
1882
|
+
debug(`Fetch error details:`, error);
|
|
1642
1883
|
throw error;
|
|
1643
1884
|
}
|
|
1644
1885
|
}
|
|
@@ -1783,6 +2024,18 @@ function validateIncomingChange(fileMeta, currentMode) {
|
|
|
1783
2024
|
};
|
|
1784
2025
|
}
|
|
1785
2026
|
|
|
2027
|
+
//#endregion
|
|
2028
|
+
//#region src/utils/node-paths.ts
|
|
2029
|
+
/**
|
|
2030
|
+
* Path manipulation utilities
|
|
2031
|
+
*/
|
|
2032
|
+
/**
|
|
2033
|
+
* Gets a relative path from the project directory
|
|
2034
|
+
*/
|
|
2035
|
+
function getRelativePath(projectDir, absolutePath) {
|
|
2036
|
+
return path.relative(projectDir, absolutePath);
|
|
2037
|
+
}
|
|
2038
|
+
|
|
1786
2039
|
//#endregion
|
|
1787
2040
|
//#region src/helpers/watcher.ts
|
|
1788
2041
|
/**
|
|
@@ -1790,47 +2043,244 @@ function validateIncomingChange(fileMeta, currentMode) {
|
|
|
1790
2043
|
*
|
|
1791
2044
|
* Wrapper around chokidar that normalizes file paths and filters to ts, tsx, js, json.
|
|
1792
2045
|
*/
|
|
2046
|
+
const RENAME_BUFFER_MS = 100;
|
|
2047
|
+
function findUniqueHashMatch(pendingItems, contentHash) {
|
|
2048
|
+
let matchingKey;
|
|
2049
|
+
for (const [key, pending] of pendingItems) {
|
|
2050
|
+
if (pending.contentHash !== contentHash) continue;
|
|
2051
|
+
if (matchingKey !== void 0) return;
|
|
2052
|
+
matchingKey = key;
|
|
2053
|
+
}
|
|
2054
|
+
return matchingKey;
|
|
2055
|
+
}
|
|
2056
|
+
function matchPendingAddForDelete(contentHash, pendingAdds) {
|
|
2057
|
+
if (!contentHash) return null;
|
|
2058
|
+
const matchingAddKey = findUniqueHashMatch(pendingAdds, contentHash);
|
|
2059
|
+
if (!matchingAddKey) return null;
|
|
2060
|
+
const pendingAdd = pendingAdds.get(matchingAddKey);
|
|
2061
|
+
if (!pendingAdd) return null;
|
|
2062
|
+
return {
|
|
2063
|
+
key: matchingAddKey,
|
|
2064
|
+
pendingAdd
|
|
2065
|
+
};
|
|
2066
|
+
}
|
|
2067
|
+
function matchPendingDeleteForAdd(contentHash, pendingDeletes) {
|
|
2068
|
+
const matchingDeleteKey = findUniqueHashMatch(pendingDeletes, contentHash);
|
|
2069
|
+
if (!matchingDeleteKey) return null;
|
|
2070
|
+
const pendingDelete = pendingDeletes.get(matchingDeleteKey);
|
|
2071
|
+
if (!pendingDelete) return null;
|
|
2072
|
+
return {
|
|
2073
|
+
key: matchingDeleteKey,
|
|
2074
|
+
pendingDelete
|
|
2075
|
+
};
|
|
2076
|
+
}
|
|
1793
2077
|
/**
|
|
1794
2078
|
* Initializes a file watcher for the given directory
|
|
1795
2079
|
*/
|
|
1796
2080
|
function initWatcher(filesDir) {
|
|
1797
2081
|
const handlers = [];
|
|
2082
|
+
const contentHashCache = /* @__PURE__ */ new Map();
|
|
2083
|
+
const pendingDeletes = /* @__PURE__ */ new Map();
|
|
2084
|
+
const pendingAdds = /* @__PURE__ */ new Map();
|
|
2085
|
+
const recentSanitizations = /* @__PURE__ */ new Set();
|
|
1798
2086
|
const watcher = chokidar.watch(filesDir, {
|
|
1799
2087
|
ignored: /(^|[/\\])\.\./,
|
|
1800
2088
|
persistent: true,
|
|
1801
2089
|
ignoreInitial: false
|
|
1802
2090
|
});
|
|
1803
2091
|
debug(`Watching directory: ${filesDir}`);
|
|
1804
|
-
const
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
|
|
2092
|
+
const dispatchEvent = (event) => {
|
|
2093
|
+
let eventToDispatch = event;
|
|
2094
|
+
if (event.kind === "rename" && event.relativePath === event.oldRelativePath) {
|
|
2095
|
+
if (event.content === void 0) {
|
|
2096
|
+
warn(`Skipping invalid same-path rename without content: ${event.relativePath}`);
|
|
2097
|
+
return;
|
|
2098
|
+
}
|
|
2099
|
+
debug(`Converting same-path rename to change: ${event.relativePath}`);
|
|
2100
|
+
eventToDispatch = {
|
|
2101
|
+
kind: "change",
|
|
2102
|
+
relativePath: event.relativePath,
|
|
2103
|
+
content: event.content
|
|
2104
|
+
};
|
|
2105
|
+
}
|
|
2106
|
+
debug(`Watcher event: ${eventToDispatch.kind} ${eventToDispatch.relativePath}`);
|
|
2107
|
+
for (const handler of handlers) handler(eventToDispatch);
|
|
2108
|
+
};
|
|
2109
|
+
/**
|
|
2110
|
+
* Resolves the relative path identity for a watcher event.
|
|
2111
|
+
* Only "add" may rewrite that identity by successfully sanitizing on disk.
|
|
2112
|
+
*/
|
|
2113
|
+
const resolveRelativePath = async (kind, absolutePath) => {
|
|
2114
|
+
if (!isSupportedExtension$1(absolutePath)) return null;
|
|
2115
|
+
const rawRelativePath = normalizePath(getRelativePath(filesDir, absolutePath));
|
|
2116
|
+
let relativePath = rawRelativePath;
|
|
1808
2117
|
let effectiveAbsolutePath = absolutePath;
|
|
1809
|
-
if (
|
|
1810
|
-
const
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
2118
|
+
if (kind === "add") {
|
|
2119
|
+
const sanitized = sanitizeFilePath(rawRelativePath, false);
|
|
2120
|
+
if (sanitized.path !== rawRelativePath) {
|
|
2121
|
+
const nextRelativePath = sanitized.path;
|
|
2122
|
+
const newAbsolutePath = path.join(filesDir, nextRelativePath);
|
|
2123
|
+
try {
|
|
2124
|
+
await fs.mkdir(path.dirname(newAbsolutePath), { recursive: true });
|
|
2125
|
+
await fs.rename(absolutePath, newAbsolutePath);
|
|
2126
|
+
debug(`Renamed ${rawRelativePath} -> ${nextRelativePath}`);
|
|
2127
|
+
relativePath = nextRelativePath;
|
|
2128
|
+
effectiveAbsolutePath = newAbsolutePath;
|
|
2129
|
+
recentSanitizations.add(rawRelativePath);
|
|
2130
|
+
recentSanitizations.add(nextRelativePath);
|
|
2131
|
+
setTimeout(() => {
|
|
2132
|
+
recentSanitizations.delete(rawRelativePath);
|
|
2133
|
+
recentSanitizations.delete(nextRelativePath);
|
|
2134
|
+
}, RENAME_BUFFER_MS * 3);
|
|
2135
|
+
} catch (err) {
|
|
2136
|
+
warn(`Failed to rename ${rawRelativePath}`, err);
|
|
2137
|
+
return {
|
|
2138
|
+
relativePath: rawRelativePath,
|
|
2139
|
+
effectiveAbsolutePath: absolutePath
|
|
2140
|
+
};
|
|
2141
|
+
}
|
|
2142
|
+
}
|
|
2143
|
+
}
|
|
2144
|
+
return {
|
|
2145
|
+
relativePath,
|
|
2146
|
+
effectiveAbsolutePath
|
|
2147
|
+
};
|
|
2148
|
+
};
|
|
2149
|
+
const emitEvent = async (kind, absolutePath) => {
|
|
2150
|
+
const rawRelPath = normalizePath(getRelativePath(filesDir, absolutePath));
|
|
2151
|
+
if (recentSanitizations.delete(rawRelPath)) {
|
|
2152
|
+
debug(`Suppressing sanitization echo: ${kind} ${rawRelPath}`);
|
|
2153
|
+
return;
|
|
2154
|
+
}
|
|
2155
|
+
const resolved = await resolveRelativePath(kind, absolutePath);
|
|
2156
|
+
if (!resolved) return;
|
|
2157
|
+
const { relativePath, effectiveAbsolutePath } = resolved;
|
|
2158
|
+
if (kind === "delete") {
|
|
2159
|
+
const lastHash = contentHashCache.get(relativePath);
|
|
2160
|
+
contentHashCache.delete(relativePath);
|
|
2161
|
+
const samePathPendingAdd = pendingAdds.get(relativePath);
|
|
2162
|
+
if (samePathPendingAdd) {
|
|
2163
|
+
clearTimeout(samePathPendingAdd.timer);
|
|
2164
|
+
pendingAdds.delete(relativePath);
|
|
2165
|
+
try {
|
|
2166
|
+
const latestContent = await fs.readFile(effectiveAbsolutePath, "utf-8");
|
|
2167
|
+
const latestHash = hashFileContent(latestContent);
|
|
2168
|
+
contentHashCache.set(relativePath, latestHash);
|
|
2169
|
+
dispatchEvent({
|
|
2170
|
+
kind: "change",
|
|
2171
|
+
relativePath,
|
|
2172
|
+
content: latestContent
|
|
2173
|
+
});
|
|
2174
|
+
} catch {
|
|
2175
|
+
if (samePathPendingAdd.previousContentHash !== void 0) dispatchEvent({
|
|
2176
|
+
kind: "delete",
|
|
2177
|
+
relativePath
|
|
2178
|
+
});
|
|
2179
|
+
else debug(`Suppressing transient add+delete: ${relativePath}`);
|
|
2180
|
+
}
|
|
2181
|
+
return;
|
|
2182
|
+
}
|
|
2183
|
+
const matchedAdd = matchPendingAddForDelete(lastHash, pendingAdds);
|
|
2184
|
+
if (matchedAdd) {
|
|
2185
|
+
clearTimeout(matchedAdd.pendingAdd.timer);
|
|
2186
|
+
pendingAdds.delete(matchedAdd.key);
|
|
2187
|
+
dispatchEvent({
|
|
2188
|
+
kind: "rename",
|
|
2189
|
+
relativePath: matchedAdd.pendingAdd.relativePath,
|
|
2190
|
+
oldRelativePath: relativePath,
|
|
2191
|
+
content: matchedAdd.pendingAdd.content
|
|
2192
|
+
});
|
|
2193
|
+
return;
|
|
1818
2194
|
}
|
|
2195
|
+
if (lastHash) {
|
|
2196
|
+
const timer = setTimeout(() => {
|
|
2197
|
+
pendingDeletes.delete(relativePath);
|
|
2198
|
+
dispatchEvent({
|
|
2199
|
+
kind: "delete",
|
|
2200
|
+
relativePath
|
|
2201
|
+
});
|
|
2202
|
+
}, RENAME_BUFFER_MS);
|
|
2203
|
+
pendingDeletes.set(relativePath, {
|
|
2204
|
+
relativePath,
|
|
2205
|
+
contentHash: lastHash,
|
|
2206
|
+
timer
|
|
2207
|
+
});
|
|
2208
|
+
} else dispatchEvent({
|
|
2209
|
+
kind: "delete",
|
|
2210
|
+
relativePath
|
|
2211
|
+
});
|
|
2212
|
+
return;
|
|
1819
2213
|
}
|
|
1820
2214
|
let content;
|
|
1821
|
-
|
|
2215
|
+
try {
|
|
1822
2216
|
content = await fs.readFile(effectiveAbsolutePath, "utf-8");
|
|
1823
2217
|
} catch (err) {
|
|
1824
2218
|
debug(`Failed to read file ${relativePath}:`, err);
|
|
1825
2219
|
return;
|
|
1826
2220
|
}
|
|
1827
|
-
const
|
|
2221
|
+
const previousContentHash = contentHashCache.get(relativePath);
|
|
2222
|
+
const contentHash = hashFileContent(content);
|
|
2223
|
+
contentHashCache.set(relativePath, contentHash);
|
|
2224
|
+
if (kind === "add") {
|
|
2225
|
+
const samePathPendingDelete = pendingDeletes.get(relativePath);
|
|
2226
|
+
if (samePathPendingDelete) {
|
|
2227
|
+
clearTimeout(samePathPendingDelete.timer);
|
|
2228
|
+
pendingDeletes.delete(relativePath);
|
|
2229
|
+
dispatchEvent({
|
|
2230
|
+
kind: "change",
|
|
2231
|
+
relativePath,
|
|
2232
|
+
content
|
|
2233
|
+
});
|
|
2234
|
+
return;
|
|
2235
|
+
}
|
|
2236
|
+
const matchedDelete = matchPendingDeleteForAdd(contentHash, pendingDeletes);
|
|
2237
|
+
if (matchedDelete) {
|
|
2238
|
+
clearTimeout(matchedDelete.pendingDelete.timer);
|
|
2239
|
+
pendingDeletes.delete(matchedDelete.key);
|
|
2240
|
+
dispatchEvent({
|
|
2241
|
+
kind: "rename",
|
|
2242
|
+
relativePath,
|
|
2243
|
+
oldRelativePath: matchedDelete.pendingDelete.relativePath,
|
|
2244
|
+
content
|
|
2245
|
+
});
|
|
2246
|
+
return;
|
|
2247
|
+
}
|
|
2248
|
+
const existingPendingAdd = pendingAdds.get(relativePath);
|
|
2249
|
+
if (existingPendingAdd) clearTimeout(existingPendingAdd.timer);
|
|
2250
|
+
const retainedPreviousContentHash = existingPendingAdd ? existingPendingAdd.previousContentHash : previousContentHash;
|
|
2251
|
+
const timer = setTimeout(() => {
|
|
2252
|
+
pendingAdds.delete(relativePath);
|
|
2253
|
+
dispatchEvent({
|
|
2254
|
+
kind: "add",
|
|
2255
|
+
relativePath,
|
|
2256
|
+
content
|
|
2257
|
+
});
|
|
2258
|
+
}, RENAME_BUFFER_MS);
|
|
2259
|
+
pendingAdds.set(relativePath, {
|
|
2260
|
+
relativePath,
|
|
2261
|
+
contentHash,
|
|
2262
|
+
content,
|
|
2263
|
+
timer,
|
|
2264
|
+
previousContentHash: retainedPreviousContentHash
|
|
2265
|
+
});
|
|
2266
|
+
return;
|
|
2267
|
+
}
|
|
2268
|
+
const pendingAdd = pendingAdds.get(relativePath);
|
|
2269
|
+
if (pendingAdd) {
|
|
2270
|
+
clearTimeout(pendingAdd.timer);
|
|
2271
|
+
pendingAdds.delete(relativePath);
|
|
2272
|
+
dispatchEvent({
|
|
2273
|
+
kind: "add",
|
|
2274
|
+
relativePath,
|
|
2275
|
+
content
|
|
2276
|
+
});
|
|
2277
|
+
return;
|
|
2278
|
+
}
|
|
2279
|
+
dispatchEvent({
|
|
1828
2280
|
kind,
|
|
1829
2281
|
relativePath,
|
|
1830
2282
|
content
|
|
1831
|
-
};
|
|
1832
|
-
debug(`Watcher event: ${kind} ${relativePath}`);
|
|
1833
|
-
for (const handler of handlers) handler(event);
|
|
2283
|
+
});
|
|
1834
2284
|
};
|
|
1835
2285
|
watcher.on("add", (filePath) => {
|
|
1836
2286
|
emitEvent("add", filePath);
|
|
@@ -1846,6 +2296,12 @@ function initWatcher(filesDir) {
|
|
|
1846
2296
|
handlers.push(handler);
|
|
1847
2297
|
},
|
|
1848
2298
|
async close() {
|
|
2299
|
+
for (const pending of pendingDeletes.values()) clearTimeout(pending.timer);
|
|
2300
|
+
for (const pending of pendingAdds.values()) clearTimeout(pending.timer);
|
|
2301
|
+
pendingDeletes.clear();
|
|
2302
|
+
pendingAdds.clear();
|
|
2303
|
+
contentHashCache.clear();
|
|
2304
|
+
recentSanitizations.clear();
|
|
1849
2305
|
await watcher.close();
|
|
1850
2306
|
}
|
|
1851
2307
|
};
|
|
@@ -1948,45 +2404,42 @@ var FileMetadataCache = class {
|
|
|
1948
2404
|
function createHashTracker() {
|
|
1949
2405
|
const hashes = /* @__PURE__ */ new Map();
|
|
1950
2406
|
const pendingDeletes = /* @__PURE__ */ new Map();
|
|
2407
|
+
const keyFor = (filePath) => normalizeCodeFilePathWithExtension(filePath);
|
|
1951
2408
|
return {
|
|
1952
2409
|
remember(filePath, content) {
|
|
1953
|
-
const hash =
|
|
1954
|
-
hashes.set(filePath, hash);
|
|
2410
|
+
const hash = hashFileContent(content);
|
|
2411
|
+
hashes.set(keyFor(filePath), hash);
|
|
1955
2412
|
},
|
|
1956
2413
|
shouldSkip(filePath, content) {
|
|
1957
|
-
const currentHash =
|
|
1958
|
-
return hashes.get(filePath) === currentHash;
|
|
2414
|
+
const currentHash = hashFileContent(content);
|
|
2415
|
+
return hashes.get(keyFor(filePath)) === currentHash;
|
|
1959
2416
|
},
|
|
1960
2417
|
forget(filePath) {
|
|
1961
|
-
hashes.delete(filePath);
|
|
2418
|
+
hashes.delete(keyFor(filePath));
|
|
1962
2419
|
},
|
|
1963
2420
|
clear() {
|
|
1964
2421
|
hashes.clear();
|
|
1965
2422
|
},
|
|
1966
2423
|
markDelete(filePath) {
|
|
1967
|
-
const
|
|
2424
|
+
const key = keyFor(filePath);
|
|
2425
|
+
const existingTimer = pendingDeletes.get(key);
|
|
1968
2426
|
if (existingTimer) clearTimeout(existingTimer);
|
|
1969
2427
|
const timeout = setTimeout(() => {
|
|
1970
|
-
pendingDeletes.delete(
|
|
2428
|
+
pendingDeletes.delete(key);
|
|
1971
2429
|
}, 5e3);
|
|
1972
|
-
pendingDeletes.set(
|
|
2430
|
+
pendingDeletes.set(key, timeout);
|
|
1973
2431
|
},
|
|
1974
2432
|
shouldSkipDelete(filePath) {
|
|
1975
|
-
return pendingDeletes.has(filePath);
|
|
2433
|
+
return pendingDeletes.has(keyFor(filePath));
|
|
1976
2434
|
},
|
|
1977
2435
|
clearDelete(filePath) {
|
|
1978
|
-
const
|
|
2436
|
+
const key = keyFor(filePath);
|
|
2437
|
+
const timeout = pendingDeletes.get(key);
|
|
1979
2438
|
if (timeout) clearTimeout(timeout);
|
|
1980
|
-
pendingDeletes.delete(
|
|
2439
|
+
pendingDeletes.delete(key);
|
|
1981
2440
|
}
|
|
1982
2441
|
};
|
|
1983
2442
|
}
|
|
1984
|
-
/**
|
|
1985
|
-
* Computes a SHA256 hash of file content for comparison
|
|
1986
|
-
*/
|
|
1987
|
-
function hashContent(content) {
|
|
1988
|
-
return createHash("sha256").update(content).digest("hex");
|
|
1989
|
-
}
|
|
1990
2443
|
|
|
1991
2444
|
//#endregion
|
|
1992
2445
|
//#region src/utils/project.ts
|
|
@@ -1994,7 +2447,7 @@ function toPackageName(name) {
|
|
|
1994
2447
|
return name.toLowerCase().replace(/[^a-z0-9-]/g, "-").replace(/^-+|-+$/g, "").replace(/-+/g, "-");
|
|
1995
2448
|
}
|
|
1996
2449
|
function toDirectoryName(name) {
|
|
1997
|
-
return name.replace(/[^a-zA-Z0-9-]/g, "-").replace(/^-+|-+$/g, "").replace(/-+/g, "-");
|
|
2450
|
+
return name.replace(/[^a-zA-Z0-9 -]/g, "-").trim().replace(/^-+|-+$/g, "").replace(/-+/g, "-");
|
|
1998
2451
|
}
|
|
1999
2452
|
async function getProjectHashFromCwd() {
|
|
2000
2453
|
try {
|
|
@@ -2034,7 +2487,7 @@ async function findOrCreateProjectDirectory(options) {
|
|
|
2034
2487
|
shortProjectHash: shortId,
|
|
2035
2488
|
framerProjectName: projectName
|
|
2036
2489
|
};
|
|
2037
|
-
await fs.writeFile(path.join(projectDirectory, "package.json"), JSON.stringify(pkg, null,
|
|
2490
|
+
await fs.writeFile(path.join(projectDirectory, "package.json"), JSON.stringify(pkg, null, 4));
|
|
2038
2491
|
return {
|
|
2039
2492
|
directory: projectDirectory,
|
|
2040
2493
|
created: true,
|
|
@@ -2399,6 +2852,21 @@ function transition(state, event) {
|
|
|
2399
2852
|
fileNames: [relativePath]
|
|
2400
2853
|
});
|
|
2401
2854
|
break;
|
|
2855
|
+
case "rename":
|
|
2856
|
+
if (content === void 0 || !event.event.oldRelativePath) {
|
|
2857
|
+
effects.push(log("warn", `Rename event missing data: ${relativePath}`));
|
|
2858
|
+
return {
|
|
2859
|
+
state,
|
|
2860
|
+
effects
|
|
2861
|
+
};
|
|
2862
|
+
}
|
|
2863
|
+
effects.push(log("debug", `Local rename detected: ${event.event.oldRelativePath} → ${relativePath}`), {
|
|
2864
|
+
type: "SEND_FILE_RENAME",
|
|
2865
|
+
oldFileName: event.event.oldRelativePath,
|
|
2866
|
+
newFileName: relativePath,
|
|
2867
|
+
content
|
|
2868
|
+
});
|
|
2869
|
+
break;
|
|
2402
2870
|
}
|
|
2403
2871
|
return {
|
|
2404
2872
|
state,
|
|
@@ -2487,7 +2955,7 @@ function transition(state, event) {
|
|
|
2487
2955
|
* Returns additional events that should be processed (e.g., CONFLICTS_DETECTED after DETECT_CONFLICTS)
|
|
2488
2956
|
*/
|
|
2489
2957
|
async function executeEffect(effect, context) {
|
|
2490
|
-
const { config, hashTracker, installer, fileMetadataCache, userActions, syncState } = context;
|
|
2958
|
+
const { config, hashTracker, installer, fileMetadataCache, pendingRenameConfirmations, userActions, syncState } = context;
|
|
2491
2959
|
switch (effect.type) {
|
|
2492
2960
|
case "INIT_WORKSPACE":
|
|
2493
2961
|
if (!config.projectDir) {
|
|
@@ -2582,10 +3050,18 @@ async function executeEffect(effect, context) {
|
|
|
2582
3050
|
case "UPDATE_FILE_METADATA": {
|
|
2583
3051
|
if (!config.filesDir || !config.projectDir) return [];
|
|
2584
3052
|
const currentContent = await readFileSafe(effect.fileName, config.filesDir);
|
|
2585
|
-
|
|
2586
|
-
|
|
3053
|
+
const pendingRenameConfirmation = pendingRenameConfirmations.get(normalizeCodeFilePathWithExtension(effect.fileName));
|
|
3054
|
+
const syncedContent = currentContent ?? pendingRenameConfirmation?.content ?? null;
|
|
3055
|
+
if (syncedContent !== null) {
|
|
3056
|
+
const contentHash = hashFileContent(syncedContent);
|
|
2587
3057
|
fileMetadataCache.recordSyncedSnapshot(effect.fileName, contentHash, effect.remoteModifiedAt);
|
|
2588
3058
|
}
|
|
3059
|
+
if (pendingRenameConfirmation) {
|
|
3060
|
+
hashTracker.forget(pendingRenameConfirmation.oldFileName);
|
|
3061
|
+
fileMetadataCache.recordDelete(pendingRenameConfirmation.oldFileName);
|
|
3062
|
+
if (currentContent !== null) hashTracker.remember(effect.fileName, currentContent);
|
|
3063
|
+
pendingRenameConfirmations.delete(normalizeCodeFilePathWithExtension(effect.fileName));
|
|
3064
|
+
}
|
|
2589
3065
|
return [];
|
|
2590
3066
|
}
|
|
2591
3067
|
case "SEND_LOCAL_CHANGE": {
|
|
@@ -2612,6 +3088,37 @@ async function executeEffect(effect, context) {
|
|
|
2612
3088
|
}
|
|
2613
3089
|
return [];
|
|
2614
3090
|
}
|
|
3091
|
+
case "SEND_FILE_RENAME": {
|
|
3092
|
+
const normalizedNewFileName = normalizeCodeFilePathWithExtension(effect.newFileName);
|
|
3093
|
+
if (hashTracker.shouldSkip(normalizedNewFileName, effect.content) && hashTracker.shouldSkipDelete(effect.oldFileName)) {
|
|
3094
|
+
hashTracker.forget(normalizedNewFileName);
|
|
3095
|
+
hashTracker.clearDelete(effect.oldFileName);
|
|
3096
|
+
debug(`Skipping echoed rename ${effect.oldFileName} -> ${effect.newFileName}`);
|
|
3097
|
+
return [];
|
|
3098
|
+
}
|
|
3099
|
+
try {
|
|
3100
|
+
if (!syncState.socket) {
|
|
3101
|
+
warn(`No socket available to send rename ${effect.oldFileName} -> ${effect.newFileName}`);
|
|
3102
|
+
return [];
|
|
3103
|
+
}
|
|
3104
|
+
if (!await sendMessage(syncState.socket, {
|
|
3105
|
+
type: "file-rename",
|
|
3106
|
+
oldFileName: effect.oldFileName,
|
|
3107
|
+
newFileName: normalizedNewFileName,
|
|
3108
|
+
content: effect.content
|
|
3109
|
+
})) {
|
|
3110
|
+
warn(`Failed to send rename ${effect.oldFileName} -> ${effect.newFileName}`);
|
|
3111
|
+
return [];
|
|
3112
|
+
}
|
|
3113
|
+
pendingRenameConfirmations.set(normalizeCodeFilePathWithExtension(effect.newFileName), {
|
|
3114
|
+
oldFileName: effect.oldFileName,
|
|
3115
|
+
content: effect.content
|
|
3116
|
+
});
|
|
3117
|
+
} catch (err) {
|
|
3118
|
+
warn(`Failed to send rename ${effect.oldFileName} -> ${effect.newFileName}`);
|
|
3119
|
+
}
|
|
3120
|
+
return [];
|
|
3121
|
+
}
|
|
2615
3122
|
case "LOCAL_INITIATED_FILE_DELETE": {
|
|
2616
3123
|
const filesToDelete = effect.fileNames.filter((fileName) => {
|
|
2617
3124
|
const shouldSkip = hashTracker.shouldSkipDelete(fileName);
|
|
@@ -2679,9 +3186,9 @@ async function executeEffect(effect, context) {
|
|
|
2679
3186
|
* Starts the sync controller with the given configuration
|
|
2680
3187
|
*/
|
|
2681
3188
|
async function start(config) {
|
|
2682
|
-
status("Waiting for Plugin connection...");
|
|
2683
3189
|
const hashTracker = createHashTracker();
|
|
2684
3190
|
const fileMetadataCache = new FileMetadataCache();
|
|
3191
|
+
const pendingRenameConfirmations = /* @__PURE__ */ new Map();
|
|
2685
3192
|
let installer = null;
|
|
2686
3193
|
let syncState = {
|
|
2687
3194
|
mode: "disconnected",
|
|
@@ -2703,13 +3210,25 @@ async function start(config) {
|
|
|
2703
3210
|
hashTracker,
|
|
2704
3211
|
installer,
|
|
2705
3212
|
fileMetadataCache,
|
|
3213
|
+
pendingRenameConfirmations,
|
|
2706
3214
|
userActions,
|
|
2707
3215
|
syncState
|
|
2708
3216
|
});
|
|
2709
3217
|
for (const followUpEvent of followUpEvents) await processEvent(followUpEvent);
|
|
2710
3218
|
}
|
|
2711
3219
|
}
|
|
2712
|
-
const
|
|
3220
|
+
const certs = await getOrCreateCerts();
|
|
3221
|
+
if (!certs) {
|
|
3222
|
+
error("Failed to generate TLS certificates. The Framer plugin requires a secure (wss://) connection.");
|
|
3223
|
+
info("");
|
|
3224
|
+
info("To fix this:");
|
|
3225
|
+
info(" 1. Re-run this command — certificate generation is often a one-time issue");
|
|
3226
|
+
info(` 2. Manually delete "${String(CERT_DIR)}" and try again`);
|
|
3227
|
+
info("");
|
|
3228
|
+
throw new Error("TLS certificate generation failed");
|
|
3229
|
+
}
|
|
3230
|
+
status("Waiting for Plugin connection...");
|
|
3231
|
+
const connection = await initConnection(config.port, certs);
|
|
2713
3232
|
connection.on("handshake", (client, message) => {
|
|
2714
3233
|
debug(`Received handshake: ${message.projectName} (${message.projectId})`);
|
|
2715
3234
|
const expectedShort = shortProjectHash(config.projectHash);
|
|
@@ -2727,6 +3246,7 @@ async function start(config) {
|
|
|
2727
3246
|
return;
|
|
2728
3247
|
}
|
|
2729
3248
|
debug(`New handshake received in ${syncState.mode} mode, resetting sync state`);
|
|
3249
|
+
pendingRenameConfirmations.clear();
|
|
2730
3250
|
await processEvent({ type: "DISCONNECT" });
|
|
2731
3251
|
}
|
|
2732
3252
|
if (!wasRecentlyDisconnected() && !didShowDisconnect()) success(`Connected to ${message.projectName}`);
|
|
@@ -2808,6 +3328,10 @@ async function start(config) {
|
|
|
2808
3328
|
remoteModifiedAt: message.remoteModifiedAt
|
|
2809
3329
|
};
|
|
2810
3330
|
break;
|
|
3331
|
+
case "error":
|
|
3332
|
+
if (message.fileName) pendingRenameConfirmations.delete(normalizeCodeFilePathWithExtension(message.fileName));
|
|
3333
|
+
warn(message.message);
|
|
3334
|
+
return;
|
|
2811
3335
|
case "conflicts-resolved":
|
|
2812
3336
|
event = {
|
|
2813
3337
|
type: "CONFLICTS_RESOLVED",
|
|
@@ -2844,6 +3368,7 @@ async function start(config) {
|
|
|
2844
3368
|
status("Disconnected, waiting to reconnect...");
|
|
2845
3369
|
});
|
|
2846
3370
|
(async () => {
|
|
3371
|
+
pendingRenameConfirmations.clear();
|
|
2847
3372
|
await processEvent({ type: "DISCONNECT" });
|
|
2848
3373
|
userActions.cleanup();
|
|
2849
3374
|
})();
|