framer-code-link 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +196 -0
- package/dist/index.js +2021 -0
- package/dist/project-DhpsFg77.js +53 -0
- package/package.json +36 -0
- package/src/controller.test.ts +966 -0
- package/src/controller.ts +1212 -0
- package/src/helpers/connection.ts +95 -0
- package/src/helpers/files.test.ts +117 -0
- package/src/helpers/files.ts +378 -0
- package/src/helpers/installer.ts +534 -0
- package/src/helpers/sync-validator.ts +87 -0
- package/src/helpers/user-actions.ts +162 -0
- package/src/helpers/watcher.ts +115 -0
- package/src/index.ts +75 -0
- package/src/types.ts +107 -0
- package/src/utils/file-metadata-cache.ts +121 -0
- package/src/utils/hashing.ts +95 -0
- package/src/utils/imports.ts +62 -0
- package/src/utils/logging.ts +47 -0
- package/src/utils/paths.ts +76 -0
- package/src/utils/project.ts +94 -0
- package/src/utils/state-persistence.ts +138 -0
- package/tsconfig.json +14 -0
- package/vitest.config.ts +8 -0
|
@@ -0,0 +1,534 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Type installer helper
|
|
3
|
+
*
|
|
4
|
+
* Wraps @typescript/ata with our custom fetcher and initialization routines.
|
|
5
|
+
* This preserves the battle-tested logic from the classic CLI while
|
|
6
|
+
* conforming to the controller-centric architecture.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { setupTypeAcquisition } from "@typescript/ata"
|
|
10
|
+
import ts from "typescript"
|
|
11
|
+
import path from "path"
|
|
12
|
+
import fs from "fs/promises"
|
|
13
|
+
import { extractImports } from "../utils/imports.js"
|
|
14
|
+
import { info, debug, warn } from "../utils/logging.js"
|
|
15
|
+
|
|
16
|
+
export interface InstallerConfig {
|
|
17
|
+
projectDir: string
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const FETCH_TIMEOUT_MS = 60_000
|
|
21
|
+
const MAX_FETCH_RETRIES = 3
|
|
22
|
+
const REACT_TYPES_VERSION = "18.3.12"
|
|
23
|
+
const REACT_DOM_TYPES_VERSION = "18.3.1"
|
|
24
|
+
const CORE_LIBRARIES = ["framer-motion", "framer"]
|
|
25
|
+
const JSON_EXTENSION_REGEX = /\.json$/i
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Installer class for managing automatic type acquisition.
|
|
29
|
+
*/
|
|
30
|
+
export class Installer {
|
|
31
|
+
private projectDir: string
|
|
32
|
+
private ata: ReturnType<typeof setupTypeAcquisition>
|
|
33
|
+
private processedImports = new Set<string>()
|
|
34
|
+
private initializationPromise: Promise<void> | null = null
|
|
35
|
+
|
|
36
|
+
constructor(config: InstallerConfig) {
|
|
37
|
+
this.projectDir = config.projectDir
|
|
38
|
+
|
|
39
|
+
const seenPackages = new Set<string>()
|
|
40
|
+
|
|
41
|
+
this.ata = setupTypeAcquisition({
|
|
42
|
+
projectName: "framer-code-link",
|
|
43
|
+
typescript: ts,
|
|
44
|
+
logger: console,
|
|
45
|
+
fetcher: fetchWithRetry,
|
|
46
|
+
delegate: {
|
|
47
|
+
started: () => {
|
|
48
|
+
seenPackages.clear()
|
|
49
|
+
debug("ATA: fetching type definitions...")
|
|
50
|
+
},
|
|
51
|
+
progress: () => {
|
|
52
|
+
// intentionally noop – progress noise is not helpful in CLI output
|
|
53
|
+
},
|
|
54
|
+
finished: (files) => {
|
|
55
|
+
if (files && files.size > 0) {
|
|
56
|
+
debug("ATA: type acquisition complete")
|
|
57
|
+
}
|
|
58
|
+
},
|
|
59
|
+
errorMessage: (message: string, error: Error) => {
|
|
60
|
+
warn(`ATA warning: ${message}`, error)
|
|
61
|
+
},
|
|
62
|
+
receivedFile: async (code: string, receivedPath: string) => {
|
|
63
|
+
const normalized = receivedPath.replace(/^\//, "")
|
|
64
|
+
const destination = path.join(this.projectDir, normalized)
|
|
65
|
+
|
|
66
|
+
const pkgMatch = receivedPath.match(
|
|
67
|
+
/\/node_modules\/(@?[^\/]+(?:\/[^\/]+)?)\//
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
// Check if file already exists with same content
|
|
71
|
+
let isFromCache = false
|
|
72
|
+
try {
|
|
73
|
+
const existing = await fs.readFile(destination, "utf-8")
|
|
74
|
+
if (existing === code) {
|
|
75
|
+
isFromCache = true
|
|
76
|
+
if (pkgMatch && !seenPackages.has(pkgMatch[1])) {
|
|
77
|
+
seenPackages.add(pkgMatch[1])
|
|
78
|
+
debug(`📦 Types: ${pkgMatch[1]} (from disk cache)`)
|
|
79
|
+
}
|
|
80
|
+
return // Skip write if identical
|
|
81
|
+
}
|
|
82
|
+
} catch {
|
|
83
|
+
// File doesn't exist or can't be read, proceed with write
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (pkgMatch && !seenPackages.has(pkgMatch[1])) {
|
|
87
|
+
seenPackages.add(pkgMatch[1])
|
|
88
|
+
info(`📦 Types: ${pkgMatch[1]}`)
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
await this.writeTypeFile(receivedPath, code)
|
|
92
|
+
},
|
|
93
|
+
},
|
|
94
|
+
})
|
|
95
|
+
|
|
96
|
+
info("Type installer initialized")
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Ensure the project scaffolding exists (tsconfig, declarations, etc.)
|
|
101
|
+
*/
|
|
102
|
+
async initialize(): Promise<void> {
|
|
103
|
+
if (this.initializationPromise) {
|
|
104
|
+
return this.initializationPromise
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
this.initializationPromise = this.initializeProject()
|
|
108
|
+
.then(() => {
|
|
109
|
+
debug("Type installer initialization complete")
|
|
110
|
+
})
|
|
111
|
+
.catch((err) => {
|
|
112
|
+
this.initializationPromise = null
|
|
113
|
+
throw err
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
return this.initializationPromise
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Fire-and-forget processing of a component file to fetch missing types.
|
|
121
|
+
* JSON files are ignored.
|
|
122
|
+
*/
|
|
123
|
+
process(fileName: string, content: string): void {
|
|
124
|
+
if (!content || JSON_EXTENSION_REGEX.test(fileName)) {
|
|
125
|
+
return
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
Promise.resolve()
|
|
129
|
+
.then(async () => {
|
|
130
|
+
await this.processImports(fileName, content)
|
|
131
|
+
})
|
|
132
|
+
.catch((err) => {
|
|
133
|
+
debug(`Type installer failed for ${fileName}`, err)
|
|
134
|
+
})
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// ---------------------------------------------------------------------------
|
|
138
|
+
// Internal helpers
|
|
139
|
+
// ---------------------------------------------------------------------------
|
|
140
|
+
|
|
141
|
+
private async initializeProject(): Promise<void> {
|
|
142
|
+
await Promise.all([
|
|
143
|
+
this.ensureTsConfig(),
|
|
144
|
+
this.ensurePrettierConfig(),
|
|
145
|
+
this.ensureFramerDeclarations(),
|
|
146
|
+
this.ensurePackageJson(),
|
|
147
|
+
])
|
|
148
|
+
|
|
149
|
+
// Fire-and-forget type installation - don't block initialization
|
|
150
|
+
Promise.resolve()
|
|
151
|
+
.then(async () => {
|
|
152
|
+
await this.ensureReact18Types()
|
|
153
|
+
|
|
154
|
+
const coreImports = CORE_LIBRARIES.map(
|
|
155
|
+
(lib) => `import "${lib}";`
|
|
156
|
+
).join("\n")
|
|
157
|
+
await this.ata(coreImports)
|
|
158
|
+
})
|
|
159
|
+
.catch((err) => {
|
|
160
|
+
debug("Type installation failed", err)
|
|
161
|
+
})
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
private async processImports(
|
|
165
|
+
fileName: string,
|
|
166
|
+
content: string
|
|
167
|
+
): Promise<void> {
|
|
168
|
+
const imports = extractImports(content).filter((imp) => imp.type === "npm")
|
|
169
|
+
|
|
170
|
+
if (imports.length === 0) {
|
|
171
|
+
return
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
const hash = imports
|
|
175
|
+
.map((imp) => imp.name)
|
|
176
|
+
.sort()
|
|
177
|
+
.join(",")
|
|
178
|
+
|
|
179
|
+
if (this.processedImports.has(hash)) {
|
|
180
|
+
return
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
this.processedImports.add(hash)
|
|
184
|
+
info(`Processing imports for ${fileName} (${imports.length} packages)`)
|
|
185
|
+
|
|
186
|
+
try {
|
|
187
|
+
await this.ata(content)
|
|
188
|
+
} catch (err) {
|
|
189
|
+
warn(`ATA failed for ${fileName}`, err as Error)
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
private async writeTypeFile(
|
|
194
|
+
receivedPath: string,
|
|
195
|
+
code: string
|
|
196
|
+
): Promise<void> {
|
|
197
|
+
const normalized = receivedPath.replace(/^\//, "")
|
|
198
|
+
const destination = path.join(this.projectDir, normalized)
|
|
199
|
+
|
|
200
|
+
try {
|
|
201
|
+
await fs.mkdir(path.dirname(destination), { recursive: true })
|
|
202
|
+
await fs.writeFile(destination, code, "utf-8")
|
|
203
|
+
} catch (err) {
|
|
204
|
+
warn(`Failed to write type file ${destination}`, err)
|
|
205
|
+
return
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
if (normalized.match(/node_modules\/@types\/[^\/]+\/index\.d\.ts$/)) {
|
|
209
|
+
await this.ensureTypesPackageJson(normalized)
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
if (normalized.includes("node_modules/@types/react/index.d.ts")) {
|
|
213
|
+
await this.patchReactTypes(destination)
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
private async ensureTypesPackageJson(normalizedPath: string): Promise<void> {
|
|
218
|
+
const pkgMatch = normalizedPath.match(/node_modules\/(@types\/[^\/]+)\//)
|
|
219
|
+
if (!pkgMatch) return
|
|
220
|
+
|
|
221
|
+
const pkgName = pkgMatch[1]
|
|
222
|
+
const pkgDir = path.join(this.projectDir, "node_modules", pkgName)
|
|
223
|
+
const pkgJsonPath = path.join(pkgDir, "package.json")
|
|
224
|
+
|
|
225
|
+
try {
|
|
226
|
+
const response = await fetch(`https://registry.npmjs.org/${pkgName}`)
|
|
227
|
+
if (!response.ok) return
|
|
228
|
+
|
|
229
|
+
const npmData = await response.json()
|
|
230
|
+
const version = npmData["dist-tags"]?.latest
|
|
231
|
+
if (!version || !npmData.versions?.[version]) return
|
|
232
|
+
|
|
233
|
+
const pkg = npmData.versions[version]
|
|
234
|
+
|
|
235
|
+
if (pkg.exports && typeof pkg.exports === "object") {
|
|
236
|
+
const fixExport = (value: any): any => {
|
|
237
|
+
if (typeof value === "string") {
|
|
238
|
+
const tsPath = value
|
|
239
|
+
.replace(/\.js$/, ".d.ts")
|
|
240
|
+
.replace(/\.cjs$/, ".d.cts")
|
|
241
|
+
return { types: tsPath }
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if (value && typeof value === "object") {
|
|
245
|
+
if ((value.import || value.require) && !value.types) {
|
|
246
|
+
const base = value.import || value.require
|
|
247
|
+
value.types = base
|
|
248
|
+
.replace(/\.js$/, ".d.ts")
|
|
249
|
+
.replace(/\.cjs$/, ".d.cts")
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
return value
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
for (const key of Object.keys(pkg.exports)) {
|
|
257
|
+
pkg.exports[key] = fixExport(pkg.exports[key])
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
await fs.mkdir(pkgDir, { recursive: true })
|
|
262
|
+
await fs.writeFile(pkgJsonPath, JSON.stringify(pkg, null, 2))
|
|
263
|
+
} catch {
|
|
264
|
+
// best-effort
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
private async patchReactTypes(destination: string): Promise<void> {
|
|
269
|
+
try {
|
|
270
|
+
let content = await fs.readFile(destination, "utf-8")
|
|
271
|
+
if (content.includes("function useRef<T = undefined>()")) {
|
|
272
|
+
return
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
const overloadPattern =
|
|
276
|
+
/function useRef<T>\(initialValue: T \| undefined\): RefObject<T \| undefined>;/
|
|
277
|
+
|
|
278
|
+
if (!overloadPattern.test(content)) {
|
|
279
|
+
return
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
content = content.replace(
|
|
283
|
+
overloadPattern,
|
|
284
|
+
`function useRef<T>(initialValue: T | undefined): RefObject<T | undefined>;
|
|
285
|
+
function useRef<T = undefined>(): MutableRefObject<T | undefined>;`
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
await fs.writeFile(destination, content, "utf-8")
|
|
289
|
+
} catch {
|
|
290
|
+
// ignore patch failures
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
private async ensureTsConfig(): Promise<void> {
|
|
295
|
+
const tsconfigPath = path.join(this.projectDir, "tsconfig.json")
|
|
296
|
+
try {
|
|
297
|
+
await fs.access(tsconfigPath)
|
|
298
|
+
debug("tsconfig.json already exists")
|
|
299
|
+
} catch {
|
|
300
|
+
const config = {
|
|
301
|
+
compilerOptions: {
|
|
302
|
+
noEmit: true,
|
|
303
|
+
target: "ES2021",
|
|
304
|
+
lib: ["ES2021", "DOM", "DOM.Iterable"],
|
|
305
|
+
module: "ESNext",
|
|
306
|
+
moduleResolution: "bundler",
|
|
307
|
+
customConditions: ["source"],
|
|
308
|
+
jsx: "react-jsx",
|
|
309
|
+
allowJs: true,
|
|
310
|
+
allowSyntheticDefaultImports: true,
|
|
311
|
+
strict: false,
|
|
312
|
+
allowImportingTsExtensions: true,
|
|
313
|
+
resolveJsonModule: true,
|
|
314
|
+
esModuleInterop: true,
|
|
315
|
+
skipLibCheck: true,
|
|
316
|
+
typeRoots: ["./node_modules/@types"],
|
|
317
|
+
},
|
|
318
|
+
include: ["files/**/*", "framer-modules.d.ts"],
|
|
319
|
+
}
|
|
320
|
+
await fs.writeFile(tsconfigPath, JSON.stringify(config, null, 2))
|
|
321
|
+
info("Created tsconfig.json")
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
private async ensurePrettierConfig(): Promise<void> {
|
|
326
|
+
const prettierPath = path.join(this.projectDir, ".prettierrc")
|
|
327
|
+
try {
|
|
328
|
+
await fs.access(prettierPath)
|
|
329
|
+
debug(".prettierrc already exists")
|
|
330
|
+
} catch {
|
|
331
|
+
const config = {
|
|
332
|
+
tabWidth: 4,
|
|
333
|
+
semi: false,
|
|
334
|
+
trailingComma: "es5",
|
|
335
|
+
}
|
|
336
|
+
await fs.writeFile(prettierPath, JSON.stringify(config, null, 2))
|
|
337
|
+
info("Created .prettierrc")
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
private async ensureFramerDeclarations(): Promise<void> {
|
|
342
|
+
const declarationsPath = path.join(this.projectDir, "framer-modules.d.ts")
|
|
343
|
+
try {
|
|
344
|
+
await fs.access(declarationsPath)
|
|
345
|
+
debug("framer-modules.d.ts already exists")
|
|
346
|
+
} catch {
|
|
347
|
+
const declarations = `// Type declarations for Framer URL imports
|
|
348
|
+
declare module "https://framer.com/m/*"
|
|
349
|
+
|
|
350
|
+
declare module "https://framerusercontent.com/*"
|
|
351
|
+
|
|
352
|
+
declare module "*.json"
|
|
353
|
+
`
|
|
354
|
+
await fs.writeFile(declarationsPath, declarations)
|
|
355
|
+
info("Created framer-modules.d.ts")
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
private async ensurePackageJson(): Promise<void> {
|
|
360
|
+
const packagePath = path.join(this.projectDir, "package.json")
|
|
361
|
+
try {
|
|
362
|
+
await fs.access(packagePath)
|
|
363
|
+
debug("package.json already exists")
|
|
364
|
+
} catch {
|
|
365
|
+
const pkg = {
|
|
366
|
+
name: path.basename(this.projectDir),
|
|
367
|
+
version: "1.0.0",
|
|
368
|
+
private: true,
|
|
369
|
+
description: "Framer files synced with framer-code-link",
|
|
370
|
+
}
|
|
371
|
+
await fs.writeFile(packagePath, JSON.stringify(pkg, null, 2))
|
|
372
|
+
info("Created package.json")
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
private async ensureReact18Types(): Promise<void> {
|
|
377
|
+
const reactTypesDir = path.join(
|
|
378
|
+
this.projectDir,
|
|
379
|
+
"node_modules/@types/react"
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
const reactFiles = [
|
|
383
|
+
"package.json",
|
|
384
|
+
"index.d.ts",
|
|
385
|
+
"global.d.ts",
|
|
386
|
+
"jsx-runtime.d.ts",
|
|
387
|
+
"jsx-dev-runtime.d.ts",
|
|
388
|
+
]
|
|
389
|
+
|
|
390
|
+
if (
|
|
391
|
+
await this.hasTypePackage(reactTypesDir, REACT_TYPES_VERSION, reactFiles)
|
|
392
|
+
) {
|
|
393
|
+
info("📦 React types (from cache)")
|
|
394
|
+
} else {
|
|
395
|
+
info("Downloading React 18 types for Framer compatibility...")
|
|
396
|
+
await this.downloadTypePackage(
|
|
397
|
+
"@types/react",
|
|
398
|
+
REACT_TYPES_VERSION,
|
|
399
|
+
reactTypesDir,
|
|
400
|
+
reactFiles
|
|
401
|
+
)
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
const reactDomDir = path.join(
|
|
405
|
+
this.projectDir,
|
|
406
|
+
"node_modules/@types/react-dom"
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
const reactDomFiles = ["package.json", "index.d.ts", "client.d.ts"]
|
|
410
|
+
|
|
411
|
+
if (
|
|
412
|
+
await this.hasTypePackage(
|
|
413
|
+
reactDomDir,
|
|
414
|
+
REACT_DOM_TYPES_VERSION,
|
|
415
|
+
reactDomFiles
|
|
416
|
+
)
|
|
417
|
+
) {
|
|
418
|
+
info("📦 React DOM types (from cache)")
|
|
419
|
+
} else {
|
|
420
|
+
await this.downloadTypePackage(
|
|
421
|
+
"@types/react-dom",
|
|
422
|
+
REACT_DOM_TYPES_VERSION,
|
|
423
|
+
reactDomDir,
|
|
424
|
+
reactDomFiles
|
|
425
|
+
)
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
private async hasTypePackage(
|
|
430
|
+
destinationDir: string,
|
|
431
|
+
version: string,
|
|
432
|
+
files: string[]
|
|
433
|
+
): Promise<boolean> {
|
|
434
|
+
try {
|
|
435
|
+
const pkgJsonPath = path.join(destinationDir, "package.json")
|
|
436
|
+
const pkgJson = await fs.readFile(pkgJsonPath, "utf-8")
|
|
437
|
+
const parsed = JSON.parse(pkgJson)
|
|
438
|
+
|
|
439
|
+
if (parsed.version !== version) {
|
|
440
|
+
return false
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
for (const file of files) {
|
|
444
|
+
if (file === "package.json") continue
|
|
445
|
+
await fs.access(path.join(destinationDir, file))
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
return true
|
|
449
|
+
} catch {
|
|
450
|
+
return false
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
private async downloadTypePackage(
|
|
455
|
+
pkgName: string,
|
|
456
|
+
version: string,
|
|
457
|
+
destinationDir: string,
|
|
458
|
+
files: string[]
|
|
459
|
+
): Promise<void> {
|
|
460
|
+
const baseUrl = `https://unpkg.com/${pkgName}@${version}`
|
|
461
|
+
await fs.mkdir(destinationDir, { recursive: true })
|
|
462
|
+
|
|
463
|
+
await Promise.all(
|
|
464
|
+
files.map(async (file) => {
|
|
465
|
+
const destination = path.join(destinationDir, file)
|
|
466
|
+
|
|
467
|
+
// Check if file already exists
|
|
468
|
+
try {
|
|
469
|
+
await fs.access(destination)
|
|
470
|
+
return // Skip if exists
|
|
471
|
+
} catch {
|
|
472
|
+
// File doesn't exist, download it
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
try {
|
|
476
|
+
const response = await fetch(`${baseUrl}/${file}`)
|
|
477
|
+
if (!response.ok) return
|
|
478
|
+
const content = await response.text()
|
|
479
|
+
await fs.writeFile(destination, content)
|
|
480
|
+
} catch {
|
|
481
|
+
// ignore per-file failures
|
|
482
|
+
}
|
|
483
|
+
})
|
|
484
|
+
)
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// -----------------------------------------------------------------------------
|
|
489
|
+
// Fetch helpers
|
|
490
|
+
// -----------------------------------------------------------------------------
|
|
491
|
+
|
|
492
|
+
async function fetchWithRetry(
|
|
493
|
+
url: string | URL | Request,
|
|
494
|
+
init?: RequestInit,
|
|
495
|
+
retries = MAX_FETCH_RETRIES
|
|
496
|
+
): Promise<Response> {
|
|
497
|
+
const urlString = typeof url === "string" ? url : url.toString()
|
|
498
|
+
|
|
499
|
+
for (let attempt = 1; attempt <= retries; attempt++) {
|
|
500
|
+
const controller = new AbortController()
|
|
501
|
+
const timeout = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS)
|
|
502
|
+
|
|
503
|
+
try {
|
|
504
|
+
const response = await fetch(url, {
|
|
505
|
+
...init,
|
|
506
|
+
signal: controller.signal,
|
|
507
|
+
})
|
|
508
|
+
clearTimeout(timeout)
|
|
509
|
+
return response
|
|
510
|
+
} catch (error: any) {
|
|
511
|
+
clearTimeout(timeout)
|
|
512
|
+
|
|
513
|
+
const isRetryable =
|
|
514
|
+
error?.cause?.code === "ECONNRESET" ||
|
|
515
|
+
error?.cause?.code === "ETIMEDOUT" ||
|
|
516
|
+
error?.cause?.code === "UND_ERR_CONNECT_TIMEOUT" ||
|
|
517
|
+
error?.message?.includes("timeout")
|
|
518
|
+
|
|
519
|
+
if (attempt < retries && isRetryable) {
|
|
520
|
+
const delay = attempt * 1_000
|
|
521
|
+
warn(
|
|
522
|
+
`Fetch failed (${error?.cause?.code || error?.message}) for ${urlString}, retrying in ${delay}ms...`
|
|
523
|
+
)
|
|
524
|
+
await new Promise((resolve) => setTimeout(resolve, delay))
|
|
525
|
+
continue
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
warn(`Fetch failed for ${urlString}`, error)
|
|
529
|
+
throw error
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
throw new Error(`Max retries exceeded for ${urlString}`)
|
|
534
|
+
}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync Validation Helper
|
|
3
|
+
*
|
|
4
|
+
* Pure functions for validating incoming changes during live sync.
|
|
5
|
+
* Determines if a change should be applied, queued, or rejected.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { FileInfo } from "../types.js"
|
|
9
|
+
import { hashFileContent } from "../utils/state-persistence.js"
|
|
10
|
+
import type { FileSyncMetadata } from "../utils/file-metadata-cache.js"
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Result of validating an incoming file change
|
|
14
|
+
*/
|
|
15
|
+
export type ChangeValidation =
|
|
16
|
+
| { action: "apply"; reason: "new-file" | "safe-update" }
|
|
17
|
+
| { action: "queue"; reason: "snapshot-in-progress" }
|
|
18
|
+
| { action: "reject"; reason: "stale-base" | "unknown-file" }
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Validates whether an incoming REMOTE file change should be applied
|
|
22
|
+
*
|
|
23
|
+
* During watching mode, we trust remote changes and apply them immediately.
|
|
24
|
+
* During snapshot_processing, we queue them for later (to avoid race conditions).
|
|
25
|
+
*
|
|
26
|
+
* Note: This is for INCOMING changes from remote. Local changes (from watcher)
|
|
27
|
+
* are handled separately and always sent during watching mode.
|
|
28
|
+
*/
|
|
29
|
+
export function validateIncomingChange(
|
|
30
|
+
file: FileInfo,
|
|
31
|
+
fileMeta: FileSyncMetadata | undefined,
|
|
32
|
+
currentMode: string
|
|
33
|
+
): ChangeValidation {
|
|
34
|
+
// Queue changes that arrive during snapshot processing
|
|
35
|
+
if (currentMode === "snapshot_processing" || currentMode === "handshaking") {
|
|
36
|
+
return { action: "queue", reason: "snapshot-in-progress" }
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// During watching, apply changes immediately
|
|
40
|
+
if (currentMode === "watching") {
|
|
41
|
+
if (!fileMeta) {
|
|
42
|
+
// New file from remote
|
|
43
|
+
return { action: "apply", reason: "new-file" }
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Existing file - trust the remote (we're in steady state)
|
|
47
|
+
return { action: "apply", reason: "safe-update" }
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// During conflict resolution, queue for now (could be enhanced later)
|
|
51
|
+
if (currentMode === "conflict_resolution") {
|
|
52
|
+
return { action: "queue", reason: "snapshot-in-progress" }
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Shouldn't receive changes while disconnected
|
|
56
|
+
return { action: "reject", reason: "unknown-file" }
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Validates whether an outgoing LOCAL change should be sent to remote
|
|
61
|
+
*
|
|
62
|
+
* Checks if the local file has actually changed since last sync
|
|
63
|
+
* to avoid sending duplicate updates.
|
|
64
|
+
*
|
|
65
|
+
* Note: This will be used when WATCHER_EVENT is migrated to the state machine.
|
|
66
|
+
* Currently, the legacy watcher path always sends changes (with echo prevention).
|
|
67
|
+
*/
|
|
68
|
+
export function validateOutgoingChange(
|
|
69
|
+
fileName: string,
|
|
70
|
+
content: string,
|
|
71
|
+
fileMeta: FileSyncMetadata | undefined
|
|
72
|
+
): { shouldSend: boolean; reason: string } {
|
|
73
|
+
const currentHash = hashFileContent(content)
|
|
74
|
+
|
|
75
|
+
if (!fileMeta) {
|
|
76
|
+
// New local file
|
|
77
|
+
return { shouldSend: true, reason: "new-file" }
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if (fileMeta.localHash === currentHash) {
|
|
81
|
+
// No change since we last saw this file
|
|
82
|
+
return { shouldSend: false, reason: "no-change" }
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// File has changed
|
|
86
|
+
return { shouldSend: true, reason: "changed" }
|
|
87
|
+
}
|