@alfe.ai/openclaw-sync 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,672 @@
1
+ import { r as loadIgnorePatterns, t as filterIgnored } from "./ignore.js";
2
+ import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
3
+ import { createReadStream, existsSync } from "node:fs";
4
+ import { basename, dirname, extname, join } from "node:path";
5
+ import { createHash } from "node:crypto";
6
+ import { createLogger } from "@auriclabs/logger";
7
+ //#region src/config.ts
8
+ /**
9
+ * AlfeSync configuration — read/write `.alfesync/config.json` in workspace root.
10
+ */
11
+ const CONFIG_DIR = ".alfesync";
12
+ const CONFIG_FILE = "config.json";
13
+ /**
14
+ * Resolve the .alfesync directory path for a given workspace root.
15
+ */
16
+ function configDir(workspacePath) {
17
+ return join(workspacePath, CONFIG_DIR);
18
+ }
19
+ /**
20
+ * Resolve the config file path for a given workspace root.
21
+ */
22
+ function configPath(workspacePath) {
23
+ return join(workspacePath, CONFIG_DIR, CONFIG_FILE);
24
+ }
25
+ /**
26
+ * Check if a workspace has been initialized with AlfeSync.
27
+ */
28
+ function isInitialized(workspacePath) {
29
+ return existsSync(configPath(workspacePath));
30
+ }
31
+ /**
32
+ * Read the AlfeSync config from a workspace.
33
+ * Returns null if not initialized.
34
+ */
35
+ async function readConfig(workspacePath) {
36
+ const path = configPath(workspacePath);
37
+ if (!existsSync(path)) return null;
38
+ try {
39
+ const raw = await readFile(path, "utf-8");
40
+ const parsed = JSON.parse(raw);
41
+ if (!parsed.agentId || !parsed.token || !parsed.apiUrl) return null;
42
+ return {
43
+ ...parsed,
44
+ workspacePath
45
+ };
46
+ } catch {
47
+ return null;
48
+ }
49
+ }
50
+ /**
51
+ * Write the AlfeSync config to a workspace.
52
+ * Creates the .alfesync directory if it doesn't exist.
53
+ */
54
+ async function writeConfig(config) {
55
+ await mkdir(configDir(config.workspacePath), { recursive: true });
56
+ const path = configPath(config.workspacePath);
57
+ const data = {
58
+ agentId: config.agentId,
59
+ orgId: config.orgId,
60
+ token: config.token,
61
+ workspacePath: config.workspacePath,
62
+ apiUrl: config.apiUrl
63
+ };
64
+ await writeFile(path, JSON.stringify(data, null, 2) + "\n", "utf-8");
65
+ }
66
+ /**
67
+ * Load config from workspace, throwing if not initialized.
68
+ */
69
+ async function requireConfig(workspacePath) {
70
+ const config = await readConfig(workspacePath);
71
+ if (!config) throw new Error(`AlfeSync not initialized in ${workspacePath}. Run: alfesync init`);
72
+ return config;
73
+ }
74
+ //#endregion
75
+ //#region src/manifest.ts
76
+ /**
77
+ * AlfeSync manifest — local file manifest at `.alfesync/manifest.json`.
78
+ *
79
+ * Tracks file hashes, sizes, sync timestamps, and storage classes
80
+ * to enable efficient diff-based syncing.
81
+ */
82
+ const MANIFEST_FILE = "manifest.json";
83
+ /**
84
+ * Resolve the manifest file path for a given workspace root.
85
+ */
86
+ function manifestPath(workspacePath) {
87
+ return join(configDir(workspacePath), MANIFEST_FILE);
88
+ }
89
+ /**
90
+ * Read the local manifest. Returns empty manifest if not found.
91
+ */
92
+ async function readManifest(workspacePath) {
93
+ const path = manifestPath(workspacePath);
94
+ if (!existsSync(path)) return { files: {} };
95
+ try {
96
+ const raw = await readFile(path, "utf-8");
97
+ return JSON.parse(raw);
98
+ } catch {
99
+ return { files: {} };
100
+ }
101
+ }
102
+ /**
103
+ * Write the local manifest.
104
+ */
105
+ async function writeManifest(workspacePath, manifest) {
106
+ await mkdir(configDir(workspacePath), { recursive: true });
107
+ await writeFile(manifestPath(workspacePath), JSON.stringify(manifest, null, 2) + "\n", "utf-8");
108
+ }
109
+ /**
110
+ * Update a single file entry in the local manifest.
111
+ */
112
+ async function updateManifestEntry(workspacePath, relativePath, entry) {
113
+ const manifest = await readManifest(workspacePath);
114
+ manifest.files[relativePath] = entry;
115
+ await writeManifest(workspacePath, manifest);
116
+ }
117
+ /**
118
+ * Remove a file entry from the local manifest.
119
+ */
120
+ async function removeManifestEntry(workspacePath, relativePath) {
121
+ const manifest = await readManifest(workspacePath);
122
+ manifest.files = Object.fromEntries(Object.entries(manifest.files).filter(([key]) => key !== relativePath));
123
+ await writeManifest(workspacePath, manifest);
124
+ }
125
+ /**
126
+ * Compute SHA-256 hash of a file using streaming (memory-efficient).
127
+ * Returns `sha256:<hex>` format.
128
+ */
129
+ async function computeFileHash(filePath) {
130
+ return new Promise((resolve, reject) => {
131
+ const hash = createHash("sha256");
132
+ const stream = createReadStream(filePath);
133
+ stream.on("data", (chunk) => hash.update(chunk));
134
+ stream.on("end", () => {
135
+ resolve(`sha256:${hash.digest("hex")}`);
136
+ });
137
+ stream.on("error", reject);
138
+ });
139
+ }
140
+ /**
141
+ * Diff the local manifest against the remote manifest.
142
+ *
143
+ * Returns lists of files to push, pull, and conflicts.
144
+ */
145
+ function diffManifests(local, remote) {
146
+ const toPush = [];
147
+ const toPull = [];
148
+ const conflicts = [];
149
+ const remoteDeleted = [];
150
+ const localPaths = new Set(Object.keys(local.files));
151
+ const remotePaths = new Set(Object.keys(remote.files));
152
+ for (const path of localPaths) if (!remotePaths.has(path)) toPush.push(path);
153
+ for (const path of remotePaths) if (!localPaths.has(path)) toPull.push(path);
154
+ for (const path of localPaths) {
155
+ if (!remotePaths.has(path)) continue;
156
+ const localEntry = local.files[path];
157
+ const remoteEntry = remote.files[path];
158
+ if (localEntry.hash === remoteEntry.hash) continue;
159
+ const localSyncTime = new Date(localEntry.lastSynced).getTime();
160
+ if (new Date(remoteEntry.modified).getTime() > localSyncTime) conflicts.push(path);
161
+ else toPush.push(path);
162
+ }
163
+ for (const path of localPaths) if (local.files[path].lastSynced !== "" && !remotePaths.has(path)) remoteDeleted.push(path);
164
+ return {
165
+ toPush,
166
+ toPull,
167
+ conflicts,
168
+ remoteDeleted
169
+ };
170
+ }
171
+ //#endregion
172
+ //#region src/api-client.ts
173
+ /**
174
+ * Create an AlfeSync API client.
175
+ */
176
+ function createApiClient(config) {
177
+ const { apiUrl, token, agentId } = config;
178
+ const baseUrl = apiUrl.replace(/\/$/, "");
179
+ async function request(method, path, body) {
180
+ const url = `${baseUrl}${path}`;
181
+ const headers = {
182
+ Authorization: `Bearer ${token}`,
183
+ "Content-Type": "application/json"
184
+ };
185
+ const response = await fetch(url, {
186
+ method,
187
+ headers,
188
+ body: body ? JSON.stringify(body) : void 0
189
+ });
190
+ if (!response.ok) {
191
+ const text = await response.text();
192
+ let errorMsg;
193
+ try {
194
+ const parsed = JSON.parse(text);
195
+ errorMsg = (typeof parsed.error === "string" ? parsed.error : void 0) ?? (typeof parsed.message === "string" ? parsed.message : void 0) ?? text;
196
+ } catch {
197
+ errorMsg = text;
198
+ }
199
+ throw new Error(`AlfeSync API error (${String(response.status)}): ${errorMsg}`);
200
+ }
201
+ const json = await response.json();
202
+ if (!json.success) throw new Error("AlfeSync API returned unsuccessful response");
203
+ return json.data;
204
+ }
205
+ return {
206
+ async getManifest() {
207
+ return request("GET", `/sync/agents/${agentId}/manifest`);
208
+ },
209
+ async presignPut(filePath, contentType = "application/octet-stream") {
210
+ return (await request("POST", `/sync/agents/${agentId}/presign`, { files: [{
211
+ path: filePath,
212
+ operation: "put",
213
+ contentType
214
+ }] })).urls[0];
215
+ },
216
+ async presignPutBatch(files) {
217
+ return (await request("POST", `/sync/agents/${agentId}/presign`, { files: files.map((f) => ({
218
+ path: f.path,
219
+ operation: "put",
220
+ contentType: f.contentType ?? "application/octet-stream"
221
+ })) })).urls;
222
+ },
223
+ async confirmUpload(filePath, hash, size, storageClass = "STANDARD") {
224
+ return request("POST", `/sync/agents/${agentId}/files/${filePath}/confirm`, {
225
+ hash,
226
+ size,
227
+ storageClass
228
+ });
229
+ },
230
+ async presignGet(filePath) {
231
+ return (await request("POST", `/sync/agents/${agentId}/presign`, { files: [{
232
+ path: filePath,
233
+ operation: "get"
234
+ }] })).urls[0];
235
+ },
236
+ async presignGetBatch(paths) {
237
+ return (await request("POST", `/sync/agents/${agentId}/presign`, { files: paths.map((p) => ({
238
+ path: p,
239
+ operation: "get"
240
+ })) })).urls;
241
+ },
242
+ async getStats() {
243
+ return request("GET", `/sync/agents/${agentId}/stats`);
244
+ },
245
+ async registerAgent(displayName) {
246
+ return request("POST", "/sync/agents", {
247
+ agentId,
248
+ displayName
249
+ });
250
+ },
251
+ async getFileHistory(filePath) {
252
+ return (await request("GET", `/sync/agents/${agentId}/files/${filePath}/versions`)).versions;
253
+ },
254
+ async reconstruct(mode = "full") {
255
+ return request("POST", `/sync/agents/${agentId}/reconstruct`, { mode });
256
+ }
257
+ };
258
+ }
259
+ //#endregion
260
+ //#region src/uploader.ts
261
+ /**
262
+ * AlfeSync uploader — upload changed files to S3 via presigned URLs.
263
+ *
264
+ * Flow per file:
265
+ * 1. Request presigned PUT URL from API
266
+ * 2. PUT file content directly to S3
267
+ * 3. Notify API of completion (confirm endpoint)
268
+ * 4. Update local manifest
269
+ *
270
+ * Retries 3x with exponential backoff on transient failures.
271
+ */
272
+ const log$2 = createLogger("SyncUploader");
273
+ const MAX_RETRIES$1 = 3;
274
+ const BASE_DELAY_MS$1 = 1e3;
275
+ /**
276
+ * Determine the storage class based on file path.
277
+ */
278
+ function getStorageClass(relativePath) {
279
+ return relativePath.startsWith("sessions/archive/") || relativePath.startsWith("context/archive/") ? "GLACIER_IR" : "STANDARD";
280
+ }
281
+ /**
282
+ * Determine MIME type from file path.
283
+ */
284
+ function getContentType(relativePath) {
285
+ if (relativePath.endsWith(".json")) return "application/json";
286
+ if (relativePath.endsWith(".md")) return "text/markdown";
287
+ if (relativePath.endsWith(".txt")) return "text/plain";
288
+ if (relativePath.endsWith(".gz")) return "application/gzip";
289
+ if (relativePath.endsWith(".yaml") || relativePath.endsWith(".yml")) return "text/yaml";
290
+ return "application/octet-stream";
291
+ }
292
+ /**
293
+ * Upload a single file with retry logic.
294
+ */
295
+ async function uploadFileWithRetry(workspacePath, relativePath, client) {
296
+ const absolutePath = join(workspacePath, relativePath);
297
+ let lastError;
298
+ for (let attempt = 0; attempt <= MAX_RETRIES$1; attempt++) try {
299
+ const [hash, fileStat] = await Promise.all([computeFileHash(absolutePath), stat(absolutePath)]);
300
+ const size = fileStat.size;
301
+ const storageClass = getStorageClass(relativePath);
302
+ const contentType = getContentType(relativePath);
303
+ const presigned = await client.presignPut(relativePath, contentType);
304
+ const fileContent = await readFile(absolutePath);
305
+ const putResponse = await fetch(presigned.url, {
306
+ method: "PUT",
307
+ headers: { "Content-Type": contentType },
308
+ body: fileContent
309
+ });
310
+ if (!putResponse.ok) throw new Error(`S3 PUT failed (${String(putResponse.status)}): ${await putResponse.text()}`);
311
+ await client.confirmUpload(relativePath, hash, size, storageClass);
312
+ await updateManifestEntry(workspacePath, relativePath, {
313
+ hash,
314
+ size,
315
+ lastSynced: (/* @__PURE__ */ new Date()).toISOString(),
316
+ storageClass
317
+ });
318
+ return {
319
+ path: relativePath,
320
+ success: true,
321
+ hash,
322
+ size
323
+ };
324
+ } catch (err) {
325
+ lastError = err instanceof Error ? err : new Error(String(err));
326
+ if (attempt < MAX_RETRIES$1) {
327
+ const delay = BASE_DELAY_MS$1 * Math.pow(2, attempt);
328
+ await new Promise((resolve) => setTimeout(resolve, delay));
329
+ }
330
+ }
331
+ return {
332
+ path: relativePath,
333
+ success: false,
334
+ error: lastError?.message ?? "Unknown error"
335
+ };
336
+ }
337
+ /**
338
+ * Upload multiple files to S3.
339
+ *
340
+ * Uploads are performed in parallel with a concurrency limit.
341
+ */
342
+ async function uploadFiles(workspacePath, relativePaths, client, options = {}) {
343
+ const { concurrency = 5, quiet = false } = options;
344
+ const results = [];
345
+ for (let i = 0; i < relativePaths.length; i += concurrency) {
346
+ const batch = relativePaths.slice(i, i + concurrency);
347
+ const batchResults = await Promise.all(batch.map((path) => uploadFileWithRetry(workspacePath, path, client)));
348
+ for (const result of batchResults) {
349
+ results.push(result);
350
+ if (!quiet) if (result.success) log$2.info(`Uploaded ${result.path} (${formatBytes$1(result.size ?? 0)})`);
351
+ else log$2.error(`Failed to upload ${result.path}: ${result.error ?? "Unknown error"}`);
352
+ }
353
+ }
354
+ return results;
355
+ }
356
+ function formatBytes$1(bytes) {
357
+ if (bytes < 1024) return `${String(bytes)} B`;
358
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
359
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
360
+ }
361
+ //#endregion
362
+ //#region src/downloader.ts
363
+ /**
364
+ * AlfeSync downloader — download files from S3 via presigned URLs.
365
+ *
366
+ * Flow per file:
367
+ * 1. Request presigned GET URL from API
368
+ * 2. GET file content from S3
369
+ * 3. Write to local disk
370
+ * 4. Update local manifest
371
+ */
372
+ const log$1 = createLogger("SyncDownloader");
373
+ const MAX_RETRIES = 3;
374
+ const BASE_DELAY_MS = 1e3;
375
+ /**
376
+ * Download a single file with retry logic.
377
+ */
378
+ async function downloadFileWithRetry(workspacePath, relativePath, client, remoteEntry) {
379
+ let lastError;
380
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) try {
381
+ const presigned = await client.presignGet(relativePath);
382
+ const response = await fetch(presigned.url);
383
+ if (!response.ok) throw new Error(`S3 GET failed (${String(response.status)}): ${await response.text()}`);
384
+ const buffer = Buffer.from(await response.arrayBuffer());
385
+ const absolutePath = join(workspacePath, relativePath);
386
+ await mkdir(dirname(absolutePath), { recursive: true });
387
+ await writeFile(absolutePath, buffer);
388
+ await updateManifestEntry(workspacePath, relativePath, {
389
+ hash: remoteEntry?.hash ?? "",
390
+ size: buffer.length,
391
+ lastSynced: (/* @__PURE__ */ new Date()).toISOString(),
392
+ storageClass: remoteEntry?.storageClass ?? "STANDARD"
393
+ });
394
+ return {
395
+ path: relativePath,
396
+ success: true,
397
+ size: buffer.length
398
+ };
399
+ } catch (err) {
400
+ lastError = err instanceof Error ? err : new Error(String(err));
401
+ if (attempt < MAX_RETRIES) {
402
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt);
403
+ await new Promise((resolve) => setTimeout(resolve, delay));
404
+ }
405
+ }
406
+ return {
407
+ path: relativePath,
408
+ success: false,
409
+ error: lastError?.message ?? "Unknown error"
410
+ };
411
+ }
412
+ /**
413
+ * Download multiple files from S3.
414
+ *
415
+ * Downloads are performed in parallel with a concurrency limit.
416
+ */
417
+ async function downloadFiles(workspacePath, relativePaths, client, remoteManifest, options = {}) {
418
+ const { concurrency = 5, quiet = false } = options;
419
+ const results = [];
420
+ for (let i = 0; i < relativePaths.length; i += concurrency) {
421
+ const batch = relativePaths.slice(i, i + concurrency);
422
+ const batchResults = await Promise.all(batch.map((path) => downloadFileWithRetry(workspacePath, path, client, remoteManifest?.files[path])));
423
+ for (const result of batchResults) {
424
+ results.push(result);
425
+ if (!quiet) if (result.success) log$1.info(`Downloaded ${result.path} (${formatBytes(result.size ?? 0)})`);
426
+ else log$1.error(`Failed to download ${result.path}: ${result.error ?? "Unknown error"}`);
427
+ }
428
+ }
429
+ return results;
430
+ }
431
+ function formatBytes(bytes) {
432
+ if (bytes < 1024) return `${String(bytes)} B`;
433
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
434
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
435
+ }
436
+ //#endregion
437
+ //#region src/sync-engine.ts
438
+ /**
439
+ * AlfeSync engine — orchestrates push, pull, and full sync operations.
440
+ *
441
+ * Handles:
442
+ * - push(paths[]): upload changed files to S3
443
+ * - pull(): download files newer on remote
444
+ * - fullSync(): bidirectional sync with conflict detection
445
+ *
446
+ * Conflict resolution: if remote file is newer than local manifest entry
447
+ * AND local file has changed → write `.conflict-{timestamp}` alongside original.
448
+ */
449
+ const log = createLogger("SyncEngine");
450
+ /**
451
+ * Create a sync engine for a workspace.
452
+ */
453
+ async function createSyncEngine(workspacePath) {
454
+ const config = await requireConfig(workspacePath);
455
+ const client = createApiClient({
456
+ apiUrl: config.apiUrl,
457
+ token: config.token,
458
+ agentId: config.agentId
459
+ });
460
+ return {
461
+ config,
462
+ client,
463
+ async push(paths, options = {}) {
464
+ const { quiet = false, filter } = options;
465
+ const ignorePatterns = await loadIgnorePatterns(workspacePath);
466
+ let filesToPush;
467
+ if (paths && paths.length > 0) filesToPush = filterIgnored(paths, ignorePatterns);
468
+ else filesToPush = await detectLocalChanges(workspacePath, ignorePatterns);
469
+ if (filter) filesToPush = filesToPush.filter((p) => p.startsWith(filter));
470
+ if (filesToPush.length === 0) {
471
+ if (!quiet) log.info("Nothing to push");
472
+ return {
473
+ pushed: 0,
474
+ pulled: 0,
475
+ conflicts: 0,
476
+ errors: 0
477
+ };
478
+ }
479
+ if (!quiet) log.info(`Pushing ${String(filesToPush.length)} file(s)`);
480
+ const results = await uploadFiles(workspacePath, filesToPush, client, { quiet });
481
+ const pushed = results.filter((r) => r.success).length;
482
+ const errors = results.filter((r) => !r.success).length;
483
+ if (!quiet) log.info(`Push complete: ${String(pushed)} uploaded, ${String(errors)} failed`);
484
+ return {
485
+ pushed,
486
+ pulled: 0,
487
+ conflicts: 0,
488
+ errors
489
+ };
490
+ },
491
+ async pull(options = {}) {
492
+ const { quiet = false } = options;
493
+ const [localManifest, remoteManifest] = await Promise.all([readManifest(workspacePath), client.getManifest()]);
494
+ const diff = diffManifests(localManifest, remoteManifest);
495
+ const filesToPull = [...diff.toPull];
496
+ if (filesToPull.length === 0) {
497
+ if (!quiet) log.info("Nothing to pull");
498
+ return {
499
+ pushed: 0,
500
+ pulled: 0,
501
+ conflicts: diff.conflicts.length,
502
+ errors: 0
503
+ };
504
+ }
505
+ if (!quiet) log.info(`Pulling ${String(filesToPull.length)} file(s)`);
506
+ const results = await downloadFiles(workspacePath, filesToPull, client, remoteManifest, { quiet });
507
+ const pulled = results.filter((r) => r.success).length;
508
+ const errors = results.filter((r) => !r.success).length;
509
+ if (!quiet) log.info(`Pull complete: ${String(pulled)} downloaded, ${String(errors)} failed`);
510
+ return {
511
+ pushed: 0,
512
+ pulled,
513
+ conflicts: diff.conflicts.length,
514
+ errors
515
+ };
516
+ },
517
+ async fullSync(options = {}) {
518
+ const { quiet = false } = options;
519
+ const [localManifest, remoteManifest] = await Promise.all([readManifest(workspacePath), client.getManifest()]);
520
+ const ignorePatterns = await loadIgnorePatterns(workspacePath);
521
+ const localChanges = await detectLocalChanges(workspacePath, ignorePatterns);
522
+ const diff = diffManifests(localManifest, remoteManifest);
523
+ const trueConflicts = diff.conflicts.filter((p) => localChanges.includes(p));
524
+ const remoteOnlyChanges = diff.conflicts.filter((p) => !localChanges.includes(p));
525
+ let conflictCount = 0;
526
+ for (const conflictPath of trueConflicts) {
527
+ const absolutePath = join(workspacePath, conflictPath);
528
+ if (existsSync(absolutePath)) {
529
+ const ext = extname(conflictPath);
530
+ const base = basename(conflictPath, ext);
531
+ const dir = dirname(conflictPath);
532
+ const conflictName = `${base}.conflict-${(/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-")}${ext}`;
533
+ await writeFile(join(workspacePath, dir, conflictName), await readFile(absolutePath));
534
+ if (!quiet) log.warn(`Conflict: ${conflictPath} — saved as ${conflictName}`);
535
+ conflictCount++;
536
+ }
537
+ }
538
+ const filesToPush = filterIgnored([...diff.toPush, ...localChanges.filter((p) => !diff.conflicts.includes(p))], ignorePatterns);
539
+ const filesToPull = [
540
+ ...diff.toPull,
541
+ ...remoteOnlyChanges,
542
+ ...trueConflicts
543
+ ];
544
+ const pushResult = {
545
+ pushed: 0,
546
+ errors: 0
547
+ };
548
+ const pullResult = {
549
+ pulled: 0,
550
+ errors: 0
551
+ };
552
+ if (filesToPush.length > 0) {
553
+ if (!quiet) log.info(`Pushing ${String(filesToPush.length)} file(s)`);
554
+ const results = await uploadFiles(workspacePath, [...new Set(filesToPush)], client, { quiet });
555
+ pushResult.pushed = results.filter((r) => r.success).length;
556
+ pushResult.errors = results.filter((r) => !r.success).length;
557
+ }
558
+ if (filesToPull.length > 0) {
559
+ if (!quiet) log.info(`Pulling ${String(filesToPull.length)} file(s)`);
560
+ const results = await downloadFiles(workspacePath, filesToPull, client, remoteManifest, { quiet });
561
+ pullResult.pulled = results.filter((r) => r.success).length;
562
+ pullResult.errors = results.filter((r) => !r.success).length;
563
+ }
564
+ const totalErrors = pushResult.errors + pullResult.errors;
565
+ if (!quiet) log.info(`Sync complete: ${String(pushResult.pushed)} pushed, ${String(pullResult.pulled)} pulled, ${String(conflictCount)} conflicts, ${String(totalErrors)} errors`);
566
+ return {
567
+ pushed: pushResult.pushed,
568
+ pulled: pullResult.pulled,
569
+ conflicts: conflictCount,
570
+ errors: totalErrors
571
+ };
572
+ },
573
+ async pullFiles(paths, options = {}) {
574
+ const { quiet = false } = options;
575
+ if (paths.length === 0) return {
576
+ pushed: 0,
577
+ pulled: 0,
578
+ conflicts: 0,
579
+ errors: 0
580
+ };
581
+ const remoteManifest = await client.getManifest();
582
+ const localManifest = await readManifest(workspacePath);
583
+ const filesToPull = paths.filter((p) => {
584
+ if (!(p in remoteManifest.files)) return false;
585
+ const remoteEntry = remoteManifest.files[p];
586
+ if (!(p in localManifest.files)) return true;
587
+ return localManifest.files[p].hash !== remoteEntry.hash;
588
+ });
589
+ if (filesToPull.length === 0) {
590
+ if (!quiet) log.info("All notified files already in sync");
591
+ return {
592
+ pushed: 0,
593
+ pulled: 0,
594
+ conflicts: 0,
595
+ errors: 0
596
+ };
597
+ }
598
+ if (!quiet) log.info(`Pulling ${String(filesToPull.length)} changed file(s)`);
599
+ const results = await downloadFiles(workspacePath, filesToPull, client, remoteManifest, { quiet });
600
+ const pulled = results.filter((r) => r.success).length;
601
+ const errors = results.filter((r) => !r.success).length;
602
+ if (!quiet) log.info(`Pull complete: ${String(pulled)} downloaded, ${String(errors)} failed`);
603
+ return {
604
+ pushed: 0,
605
+ pulled,
606
+ conflicts: 0,
607
+ errors
608
+ };
609
+ },
610
+ async removeLocalFile(filePath, options = {}) {
611
+ const { quiet = false } = options;
612
+ const absolutePath = join(workspacePath, filePath);
613
+ try {
614
+ const { unlink } = await import("node:fs/promises");
615
+ if (existsSync(absolutePath)) {
616
+ await unlink(absolutePath);
617
+ if (!quiet) log.info(`Deleted: ${filePath}`);
618
+ }
619
+ } catch (err) {
620
+ if (!quiet) log.error({ err }, `Failed to delete ${filePath}`);
621
+ }
622
+ await removeManifestEntry(workspacePath, filePath);
623
+ }
624
+ };
625
+ }
626
+ /**
627
+ * Detect files that have changed locally since last sync.
628
+ *
629
+ * Compares current file hashes against the local manifest.
630
+ * Returns list of relative paths that need pushing.
631
+ */
632
+ async function detectLocalChanges(workspacePath, ignorePatterns) {
633
+ const manifest = await readManifest(workspacePath);
634
+ const changed = [];
635
+ const { readdir } = await import("node:fs/promises");
636
+ async function walk(dir) {
637
+ const entries = await readdir(dir, { withFileTypes: true });
638
+ for (const entry of entries) {
639
+ const fullPath = join(dir, entry.name);
640
+ const relativePath = fullPath.slice(workspacePath.length + 1).replace(/\\/g, "/");
641
+ if (shouldSkipDir(entry.name)) continue;
642
+ if (entry.isDirectory()) {
643
+ if (!ignorePatterns.some((p) => {
644
+ if (p.endsWith("/**")) return relativePath.startsWith(p.slice(0, -3));
645
+ return false;
646
+ })) await walk(fullPath);
647
+ } else if (entry.isFile()) {
648
+ const { shouldIgnore } = await import("./ignore.js").then((n) => n.n);
649
+ if (shouldIgnore(relativePath, ignorePatterns)) continue;
650
+ if (!(relativePath in manifest.files)) changed.push(relativePath);
651
+ else {
652
+ const manifestEntry = manifest.files[relativePath];
653
+ try {
654
+ if (await computeFileHash(fullPath) !== manifestEntry.hash) changed.push(relativePath);
655
+ } catch {}
656
+ }
657
+ }
658
+ }
659
+ }
660
+ await walk(workspacePath);
661
+ return changed;
662
+ }
663
+ /**
664
+ * Directories to always skip during walks.
665
+ */
666
+ function shouldSkipDir(name) {
667
+ return name === "node_modules" || name === ".git" || name === ".sst" || name === ".alfesync" || name === ".build" || name === "dist";
668
+ }
669
+ //#endregion
670
+ export { computeFileHash as a, removeManifestEntry as c, configDir as d, configPath as f, writeConfig as g, requireConfig as h, createApiClient as i, updateManifestEntry as l, readConfig as m, downloadFiles as n, diffManifests as o, isInitialized as p, uploadFiles as r, readManifest as s, createSyncEngine as t, writeManifest as u };
671
+
672
+ //# sourceMappingURL=sync-engine.js.map