pushwork 2.0.0-a.sub.0 → 2.0.0-preview
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/branches.d.ts +19 -0
- package/dist/branches.d.ts.map +1 -0
- package/dist/branches.js +111 -0
- package/dist/branches.js.map +1 -0
- package/dist/cli.d.ts +1 -1
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +238 -272
- package/dist/cli.js.map +1 -1
- package/dist/config.d.ts +17 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +84 -0
- package/dist/config.js.map +1 -0
- package/dist/fs-tree.d.ts +6 -0
- package/dist/fs-tree.d.ts.map +1 -0
- package/dist/fs-tree.js +99 -0
- package/dist/fs-tree.js.map +1 -0
- package/dist/ignore.d.ts +6 -0
- package/dist/ignore.d.ts.map +1 -0
- package/dist/ignore.js +74 -0
- package/dist/ignore.js.map +1 -0
- package/dist/index.d.ts +8 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +34 -4
- package/dist/index.js.map +1 -1
- package/dist/log.d.ts +3 -0
- package/dist/log.d.ts.map +1 -0
- package/dist/log.js +14 -0
- package/dist/log.js.map +1 -0
- package/dist/pushwork.d.ts +115 -0
- package/dist/pushwork.d.ts.map +1 -0
- package/dist/pushwork.js +918 -0
- package/dist/pushwork.js.map +1 -0
- package/dist/repo.d.ts +14 -0
- package/dist/repo.d.ts.map +1 -0
- package/dist/repo.js +60 -0
- package/dist/repo.js.map +1 -0
- package/dist/shapes/custom.d.ts +3 -0
- package/dist/shapes/custom.d.ts.map +1 -0
- package/dist/shapes/custom.js +57 -0
- package/dist/shapes/custom.js.map +1 -0
- package/dist/shapes/file.d.ts +20 -0
- package/dist/shapes/file.d.ts.map +1 -0
- package/dist/shapes/file.js +140 -0
- package/dist/shapes/file.js.map +1 -0
- package/dist/shapes/index.d.ts +10 -0
- package/dist/shapes/index.d.ts.map +1 -0
- package/dist/shapes/index.js +35 -0
- package/dist/shapes/index.js.map +1 -0
- package/dist/shapes/patchwork-folder.d.ts +3 -0
- package/dist/shapes/patchwork-folder.d.ts.map +1 -0
- package/dist/shapes/patchwork-folder.js +160 -0
- package/dist/shapes/patchwork-folder.js.map +1 -0
- package/dist/shapes/types.d.ts +37 -0
- package/dist/shapes/types.d.ts.map +1 -0
- package/dist/shapes/types.js +52 -0
- package/dist/shapes/types.js.map +1 -0
- package/dist/shapes/vfs.d.ts +3 -0
- package/dist/shapes/vfs.d.ts.map +1 -0
- package/dist/shapes/vfs.js +88 -0
- package/dist/shapes/vfs.js.map +1 -0
- package/dist/stash.d.ts +23 -0
- package/dist/stash.d.ts.map +1 -0
- package/dist/stash.js +118 -0
- package/dist/stash.js.map +1 -0
- package/flake.lock +128 -0
- package/flake.nix +66 -0
- package/package.json +15 -48
- package/patches/@automerge__automerge-repo@2.6.0-subduction.15.patch +26 -0
- package/pnpm-workspace.yaml +5 -0
- package/src/branches.ts +93 -0
- package/src/cli.ts +258 -408
- package/src/config.ts +64 -0
- package/src/fs-tree.ts +70 -0
- package/src/ignore.ts +33 -0
- package/src/index.ts +38 -4
- package/src/log.ts +8 -0
- package/src/pushwork.ts +1055 -0
- package/src/repo.ts +76 -0
- package/src/shapes/custom.ts +29 -0
- package/src/shapes/file.ts +115 -0
- package/src/shapes/index.ts +19 -0
- package/src/shapes/patchwork-folder.ts +156 -0
- package/src/shapes/types.ts +79 -0
- package/src/shapes/vfs.ts +93 -0
- package/src/stash.ts +106 -0
- package/test/integration/branches.test.ts +389 -0
- package/test/integration/pushwork.test.ts +547 -0
- package/test/setup.ts +29 -0
- package/test/unit/doc-shape.test.ts +612 -0
- package/tsconfig.json +2 -3
- package/vitest.config.ts +14 -0
- package/ARCHITECTURE-ACCORDING-TO-CLAUDE.md +0 -248
- package/CLAUDE.md +0 -141
- package/README.md +0 -221
- package/babel.config.js +0 -5
- package/dist/cli/commands.d.ts +0 -71
- package/dist/cli/commands.d.ts.map +0 -1
- package/dist/cli/commands.js +0 -794
- package/dist/cli/commands.js.map +0 -1
- package/dist/cli/index.d.ts +0 -2
- package/dist/cli/index.d.ts.map +0 -1
- package/dist/cli/index.js +0 -19
- package/dist/cli/index.js.map +0 -1
- package/dist/commands.d.ts +0 -61
- package/dist/commands.d.ts.map +0 -1
- package/dist/commands.js +0 -861
- package/dist/commands.js.map +0 -1
- package/dist/config/index.d.ts +0 -71
- package/dist/config/index.d.ts.map +0 -1
- package/dist/config/index.js +0 -314
- package/dist/config/index.js.map +0 -1
- package/dist/core/change-detection.d.ts +0 -80
- package/dist/core/change-detection.d.ts.map +0 -1
- package/dist/core/change-detection.js +0 -523
- package/dist/core/change-detection.js.map +0 -1
- package/dist/core/config.d.ts +0 -81
- package/dist/core/config.d.ts.map +0 -1
- package/dist/core/config.js +0 -258
- package/dist/core/config.js.map +0 -1
- package/dist/core/index.d.ts +0 -6
- package/dist/core/index.d.ts.map +0 -1
- package/dist/core/index.js +0 -6
- package/dist/core/index.js.map +0 -1
- package/dist/core/move-detection.d.ts +0 -34
- package/dist/core/move-detection.d.ts.map +0 -1
- package/dist/core/move-detection.js +0 -121
- package/dist/core/move-detection.js.map +0 -1
- package/dist/core/snapshot.d.ts +0 -105
- package/dist/core/snapshot.d.ts.map +0 -1
- package/dist/core/snapshot.js +0 -217
- package/dist/core/snapshot.js.map +0 -1
- package/dist/core/sync-engine.d.ts +0 -151
- package/dist/core/sync-engine.d.ts.map +0 -1
- package/dist/core/sync-engine.js +0 -1346
- package/dist/core/sync-engine.js.map +0 -1
- package/dist/types/config.d.ts +0 -99
- package/dist/types/config.d.ts.map +0 -1
- package/dist/types/config.js +0 -5
- package/dist/types/config.js.map +0 -1
- package/dist/types/documents.d.ts +0 -88
- package/dist/types/documents.d.ts.map +0 -1
- package/dist/types/documents.js +0 -20
- package/dist/types/documents.js.map +0 -1
- package/dist/types/index.d.ts +0 -4
- package/dist/types/index.d.ts.map +0 -1
- package/dist/types/index.js +0 -4
- package/dist/types/index.js.map +0 -1
- package/dist/types/snapshot.d.ts +0 -64
- package/dist/types/snapshot.d.ts.map +0 -1
- package/dist/types/snapshot.js +0 -2
- package/dist/types/snapshot.js.map +0 -1
- package/dist/utils/content-similarity.d.ts +0 -53
- package/dist/utils/content-similarity.d.ts.map +0 -1
- package/dist/utils/content-similarity.js +0 -155
- package/dist/utils/content-similarity.js.map +0 -1
- package/dist/utils/content.d.ts +0 -10
- package/dist/utils/content.d.ts.map +0 -1
- package/dist/utils/content.js +0 -31
- package/dist/utils/content.js.map +0 -1
- package/dist/utils/directory.d.ts +0 -24
- package/dist/utils/directory.d.ts.map +0 -1
- package/dist/utils/directory.js +0 -52
- package/dist/utils/directory.js.map +0 -1
- package/dist/utils/fs.d.ts +0 -74
- package/dist/utils/fs.d.ts.map +0 -1
- package/dist/utils/fs.js +0 -248
- package/dist/utils/fs.js.map +0 -1
- package/dist/utils/index.d.ts +0 -5
- package/dist/utils/index.d.ts.map +0 -1
- package/dist/utils/index.js +0 -5
- package/dist/utils/index.js.map +0 -1
- package/dist/utils/mime-types.d.ts +0 -13
- package/dist/utils/mime-types.d.ts.map +0 -1
- package/dist/utils/mime-types.js +0 -209
- package/dist/utils/mime-types.js.map +0 -1
- package/dist/utils/network-sync.d.ts +0 -36
- package/dist/utils/network-sync.d.ts.map +0 -1
- package/dist/utils/network-sync.js +0 -250
- package/dist/utils/network-sync.js.map +0 -1
- package/dist/utils/node-polyfills.d.ts +0 -9
- package/dist/utils/node-polyfills.d.ts.map +0 -1
- package/dist/utils/node-polyfills.js +0 -9
- package/dist/utils/node-polyfills.js.map +0 -1
- package/dist/utils/output.d.ts +0 -129
- package/dist/utils/output.d.ts.map +0 -1
- package/dist/utils/output.js +0 -368
- package/dist/utils/output.js.map +0 -1
- package/dist/utils/repo-factory.d.ts +0 -13
- package/dist/utils/repo-factory.d.ts.map +0 -1
- package/dist/utils/repo-factory.js +0 -46
- package/dist/utils/repo-factory.js.map +0 -1
- package/dist/utils/string-similarity.d.ts +0 -14
- package/dist/utils/string-similarity.d.ts.map +0 -1
- package/dist/utils/string-similarity.js +0 -39
- package/dist/utils/string-similarity.js.map +0 -1
- package/dist/utils/text-diff.d.ts +0 -37
- package/dist/utils/text-diff.d.ts.map +0 -1
- package/dist/utils/text-diff.js +0 -93
- package/dist/utils/text-diff.js.map +0 -1
- package/dist/utils/trace.d.ts +0 -19
- package/dist/utils/trace.d.ts.map +0 -1
- package/dist/utils/trace.js +0 -63
- package/dist/utils/trace.js.map +0 -1
- package/src/commands.ts +0 -1134
- package/src/core/change-detection.ts +0 -712
- package/src/core/config.ts +0 -313
- package/src/core/index.ts +0 -5
- package/src/core/move-detection.ts +0 -169
- package/src/core/snapshot.ts +0 -275
- package/src/core/sync-engine.ts +0 -1758
- package/src/types/config.ts +0 -111
- package/src/types/documents.ts +0 -91
- package/src/types/index.ts +0 -3
- package/src/types/snapshot.ts +0 -67
- package/src/utils/content.ts +0 -34
- package/src/utils/directory.ts +0 -73
- package/src/utils/fs.ts +0 -297
- package/src/utils/index.ts +0 -4
- package/src/utils/mime-types.ts +0 -244
- package/src/utils/network-sync.ts +0 -319
- package/src/utils/node-polyfills.ts +0 -8
- package/src/utils/output.ts +0 -450
- package/src/utils/repo-factory.ts +0 -73
- package/src/utils/string-similarity.ts +0 -54
- package/src/utils/text-diff.ts +0 -101
- package/src/utils/trace.ts +0 -70
- package/test/integration/README.md +0 -328
- package/test/integration/clone-test.sh +0 -310
- package/test/integration/conflict-resolution-test.sh +0 -309
- package/test/integration/debug-both-nested.sh +0 -74
- package/test/integration/debug-concurrent-nested.sh +0 -87
- package/test/integration/debug-nested.sh +0 -73
- package/test/integration/deletion-behavior-test.sh +0 -487
- package/test/integration/deletion-sync-test-simple.sh +0 -193
- package/test/integration/deletion-sync-test.sh +0 -297
- package/test/integration/exclude-patterns.test.ts +0 -144
- package/test/integration/full-integration-test.sh +0 -363
- package/test/integration/fuzzer.test.ts +0 -818
- package/test/integration/in-memory-sync.test.ts +0 -830
- package/test/integration/init-sync.test.ts +0 -89
- package/test/integration/manual-sync-test.sh +0 -84
- package/test/integration/sync-deletion.test.ts +0 -280
- package/test/integration/sync-flow.test.ts +0 -291
- package/test/jest.setup.ts +0 -34
- package/test/run-tests.sh +0 -225
- package/test/unit/deletion-behavior.test.ts +0 -249
- package/test/unit/enhanced-mime-detection.test.ts +0 -244
- package/test/unit/snapshot.test.ts +0 -404
- package/test/unit/sync-convergence.test.ts +0 -298
- package/test/unit/sync-timing.test.ts +0 -134
- package/test/unit/utils.test.ts +0 -366
package/src/core/sync-engine.ts
DELETED
|
@@ -1,1758 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
AutomergeUrl,
|
|
3
|
-
Repo,
|
|
4
|
-
DocHandle,
|
|
5
|
-
UrlHeads,
|
|
6
|
-
parseAutomergeUrl,
|
|
7
|
-
stringifyAutomergeUrl,
|
|
8
|
-
} from "@automerge/automerge-repo"
|
|
9
|
-
import * as A from "@automerge/automerge"
|
|
10
|
-
import {
|
|
11
|
-
SyncSnapshot,
|
|
12
|
-
SyncResult,
|
|
13
|
-
FileDocument,
|
|
14
|
-
DirectoryDocument,
|
|
15
|
-
ChangeType,
|
|
16
|
-
FileType,
|
|
17
|
-
MoveCandidate,
|
|
18
|
-
DirectoryConfig,
|
|
19
|
-
DetectedChange,
|
|
20
|
-
} from "../types/index.js"
|
|
21
|
-
import {
|
|
22
|
-
writeFileContent,
|
|
23
|
-
removePath,
|
|
24
|
-
getFileExtension,
|
|
25
|
-
getEnhancedMimeType,
|
|
26
|
-
formatRelativePath,
|
|
27
|
-
findFileInDirectoryHierarchy,
|
|
28
|
-
joinAndNormalizePath,
|
|
29
|
-
getPlainUrl,
|
|
30
|
-
updateTextContent,
|
|
31
|
-
readDocContent,
|
|
32
|
-
} from "../utils/index.js"
|
|
33
|
-
import {isContentEqual, contentHash} from "../utils/content.js"
|
|
34
|
-
import {waitForSync, waitForBidirectionalSync} from "../utils/network-sync.js"
|
|
35
|
-
import {SnapshotManager} from "./snapshot.js"
|
|
36
|
-
import {ChangeDetector} from "./change-detection.js"
|
|
37
|
-
import {MoveDetector} from "./move-detection.js"
|
|
38
|
-
import {out} from "../utils/output.js"
|
|
39
|
-
import * as path from "path"
|
|
40
|
-
|
|
41
|
-
const isDebug = !!process.env.DEBUG
|
|
42
|
-
function debug(...args: any[]) {
|
|
43
|
-
if (isDebug) console.error("[pushwork:engine]", ...args)
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
/**
|
|
47
|
-
* Apply a change to a document handle, using changeAt when heads are available
|
|
48
|
-
* to branch from a known version, otherwise falling back to change.
|
|
49
|
-
*/
|
|
50
|
-
function changeWithOptionalHeads<T>(
|
|
51
|
-
handle: DocHandle<T>,
|
|
52
|
-
heads: UrlHeads | undefined,
|
|
53
|
-
callback: A.ChangeFn<T>
|
|
54
|
-
): void {
|
|
55
|
-
if (heads && heads.length > 0) {
|
|
56
|
-
handle.changeAt(heads, callback)
|
|
57
|
-
} else {
|
|
58
|
-
handle.change(callback)
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
/**
|
|
63
|
-
* Sync configuration constants
|
|
64
|
-
*/
|
|
65
|
-
const BIDIRECTIONAL_SYNC_TIMEOUT_MS = 5000 // Timeout for bidirectional sync stability check
|
|
66
|
-
|
|
67
|
-
/**
|
|
68
|
-
* Bidirectional sync engine implementing two-phase sync
|
|
69
|
-
*/
|
|
70
|
-
export class SyncEngine {
|
|
71
|
-
private snapshotManager: SnapshotManager
|
|
72
|
-
private changeDetector: ChangeDetector
|
|
73
|
-
private moveDetector: MoveDetector
|
|
74
|
-
// Map from path to handle for leaf-first sync ordering
|
|
75
|
-
// Path depth determines sync order (deepest first)
|
|
76
|
-
private handlesByPath: Map<string, DocHandle<unknown>> = new Map()
|
|
77
|
-
private config: DirectoryConfig
|
|
78
|
-
|
|
79
|
-
constructor(
|
|
80
|
-
private repo: Repo,
|
|
81
|
-
private rootPath: string,
|
|
82
|
-
config: DirectoryConfig
|
|
83
|
-
) {
|
|
84
|
-
this.config = config
|
|
85
|
-
this.snapshotManager = new SnapshotManager(rootPath)
|
|
86
|
-
this.changeDetector = new ChangeDetector(
|
|
87
|
-
repo,
|
|
88
|
-
rootPath,
|
|
89
|
-
config.exclude_patterns,
|
|
90
|
-
config.artifact_directories || []
|
|
91
|
-
)
|
|
92
|
-
this.moveDetector = new MoveDetector(config.sync.move_detection_threshold)
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
/**
|
|
96
|
-
* Determine if content should be treated as text for Automerge text operations
|
|
97
|
-
* Note: This method checks the runtime type. File type detection happens
|
|
98
|
-
* during reading with isEnhancedTextFile() which now has better dev file support.
|
|
99
|
-
*/
|
|
100
|
-
private isTextContent(content: string | Uint8Array): boolean {
|
|
101
|
-
// Simply check the actual type of the content
|
|
102
|
-
return typeof content === "string"
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
/**
|
|
106
|
-
* Get a versioned URL from a handle (includes current heads).
|
|
107
|
-
* This ensures clients can fetch the exact version of the document.
|
|
108
|
-
*/
|
|
109
|
-
private getVersionedUrl(handle: DocHandle<unknown>): AutomergeUrl {
|
|
110
|
-
const {documentId} = parseAutomergeUrl(handle.url)
|
|
111
|
-
const heads = handle.heads()
|
|
112
|
-
return stringifyAutomergeUrl({documentId, heads})
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
/**
|
|
116
|
-
* Determine if a file path is inside an artifact directory.
|
|
117
|
-
* Artifact files are stored as immutable strings (RawString) and
|
|
118
|
-
* referenced with versioned URLs in directory entries.
|
|
119
|
-
*/
|
|
120
|
-
private isArtifactPath(filePath: string): boolean {
|
|
121
|
-
const artifactDirs = this.config.artifact_directories || []
|
|
122
|
-
return artifactDirs.some(
|
|
123
|
-
dir => filePath === dir || filePath.startsWith(dir + "/")
|
|
124
|
-
)
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
/**
|
|
128
|
-
* Get the appropriate URL for a directory entry.
|
|
129
|
-
* Artifact paths get versioned URLs (with heads) for exact version fetching.
|
|
130
|
-
* Non-artifact paths get plain URLs for collaborative editing.
|
|
131
|
-
*/
|
|
132
|
-
private getEntryUrl(handle: DocHandle<unknown>, filePath: string): AutomergeUrl {
|
|
133
|
-
if (this.isArtifactPath(filePath)) {
|
|
134
|
-
return this.getVersionedUrl(handle)
|
|
135
|
-
}
|
|
136
|
-
return getPlainUrl(handle.url)
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
/**
|
|
140
|
-
* Set the root directory URL in the snapshot
|
|
141
|
-
*/
|
|
142
|
-
async setRootDirectoryUrl(url: AutomergeUrl): Promise<void> {
|
|
143
|
-
let snapshot = await this.snapshotManager.load()
|
|
144
|
-
if (!snapshot) {
|
|
145
|
-
snapshot = this.snapshotManager.createEmpty()
|
|
146
|
-
}
|
|
147
|
-
snapshot.rootDirectoryUrl = url
|
|
148
|
-
await this.snapshotManager.save(snapshot)
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
/**
|
|
152
|
-
* Reset the snapshot, clearing all tracked files and directories.
|
|
153
|
-
* Preserves the rootDirectoryUrl so sync can still operate.
|
|
154
|
-
* Used by --force to re-sync every file.
|
|
155
|
-
*/
|
|
156
|
-
async resetSnapshot(): Promise<void> {
|
|
157
|
-
let snapshot = await this.snapshotManager.load()
|
|
158
|
-
if (!snapshot) return
|
|
159
|
-
this.snapshotManager.clear(snapshot)
|
|
160
|
-
await this.snapshotManager.save(snapshot)
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
/**
|
|
164
|
-
* Nuclear reset: clear the snapshot AND wipe the root directory document's
|
|
165
|
-
* entries so that every file and subdirectory gets brand-new Automerge
|
|
166
|
-
* documents. The root directory document itself is preserved.
|
|
167
|
-
*/
|
|
168
|
-
async nuclearReset(): Promise<void> {
|
|
169
|
-
let snapshot = await this.snapshotManager.load()
|
|
170
|
-
if (!snapshot) return
|
|
171
|
-
|
|
172
|
-
// Clear the root directory document's entries
|
|
173
|
-
if (snapshot.rootDirectoryUrl) {
|
|
174
|
-
const rootHandle = await this.repo.find<DirectoryDocument>(
|
|
175
|
-
getPlainUrl(snapshot.rootDirectoryUrl)
|
|
176
|
-
)
|
|
177
|
-
rootHandle.change((doc: DirectoryDocument) => {
|
|
178
|
-
doc.docs.splice(0, doc.docs.length)
|
|
179
|
-
})
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
// Clear all tracked files and directories from snapshot
|
|
183
|
-
this.snapshotManager.clear(snapshot)
|
|
184
|
-
await this.snapshotManager.save(snapshot)
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
/**
|
|
188
|
-
* Commit local changes only (no network sync)
|
|
189
|
-
*/
|
|
190
|
-
async commitLocal(): Promise<SyncResult> {
|
|
191
|
-
const result: SyncResult = {
|
|
192
|
-
success: false,
|
|
193
|
-
filesChanged: 0,
|
|
194
|
-
directoriesChanged: 0,
|
|
195
|
-
errors: [],
|
|
196
|
-
warnings: [],
|
|
197
|
-
}
|
|
198
|
-
|
|
199
|
-
try {
|
|
200
|
-
// Load current snapshot
|
|
201
|
-
let snapshot = await this.snapshotManager.load()
|
|
202
|
-
if (!snapshot) {
|
|
203
|
-
snapshot = this.snapshotManager.createEmpty()
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
// Detect all changes
|
|
207
|
-
const changes = await this.changeDetector.detectChanges(snapshot)
|
|
208
|
-
|
|
209
|
-
// Detect moves
|
|
210
|
-
const {moves, remainingChanges} = await this.moveDetector.detectMoves(
|
|
211
|
-
changes,
|
|
212
|
-
snapshot
|
|
213
|
-
)
|
|
214
|
-
|
|
215
|
-
// Apply local changes only (no network sync)
|
|
216
|
-
const commitResult = await this.pushLocalChanges(
|
|
217
|
-
remainingChanges,
|
|
218
|
-
moves,
|
|
219
|
-
snapshot
|
|
220
|
-
)
|
|
221
|
-
|
|
222
|
-
result.filesChanged += commitResult.filesChanged
|
|
223
|
-
result.directoriesChanged += commitResult.directoriesChanged
|
|
224
|
-
result.errors.push(...commitResult.errors)
|
|
225
|
-
result.warnings.push(...commitResult.warnings)
|
|
226
|
-
|
|
227
|
-
// Touch root directory if any changes were made
|
|
228
|
-
const hasChanges =
|
|
229
|
-
result.filesChanged > 0 || result.directoriesChanged > 0
|
|
230
|
-
if (hasChanges) {
|
|
231
|
-
await this.touchRootDirectory(snapshot)
|
|
232
|
-
}
|
|
233
|
-
|
|
234
|
-
// Save updated snapshot
|
|
235
|
-
await this.snapshotManager.save(snapshot)
|
|
236
|
-
|
|
237
|
-
result.success = result.errors.length === 0
|
|
238
|
-
|
|
239
|
-
return result
|
|
240
|
-
} catch (error) {
|
|
241
|
-
result.errors.push({
|
|
242
|
-
path: this.rootPath,
|
|
243
|
-
operation: "commitLocal",
|
|
244
|
-
error: error instanceof Error ? error : new Error(String(error)),
|
|
245
|
-
recoverable: true,
|
|
246
|
-
})
|
|
247
|
-
result.success = false
|
|
248
|
-
return result
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
/**
|
|
253
|
-
* Recreate documents that failed to sync. Creates new Automerge documents
|
|
254
|
-
* with the same content and updates all references (snapshot, parent directory).
|
|
255
|
-
* Returns new handles that should be retried for sync.
|
|
256
|
-
*/
|
|
257
|
-
private async recreateFailedDocuments(
|
|
258
|
-
failedHandles: DocHandle<unknown>[],
|
|
259
|
-
snapshot: SyncSnapshot
|
|
260
|
-
): Promise<DocHandle<unknown>[]> {
|
|
261
|
-
const failedUrls = new Set(failedHandles.map(h => getPlainUrl(h.url)))
|
|
262
|
-
const newHandles: DocHandle<unknown>[] = []
|
|
263
|
-
|
|
264
|
-
// Find which paths correspond to the failed handles
|
|
265
|
-
for (const [filePath, entry] of snapshot.files.entries()) {
|
|
266
|
-
const plainUrl = getPlainUrl(entry.url)
|
|
267
|
-
if (!failedUrls.has(plainUrl)) continue
|
|
268
|
-
|
|
269
|
-
debug(`recreate: recreating document for ${filePath} (${plainUrl})`)
|
|
270
|
-
out.taskLine(`Recreating document for ${filePath}`)
|
|
271
|
-
|
|
272
|
-
try {
|
|
273
|
-
// Read the current content from the old handle
|
|
274
|
-
const oldHandle = await this.repo.find<FileDocument>(plainUrl)
|
|
275
|
-
const doc = await oldHandle.doc()
|
|
276
|
-
if (!doc) {
|
|
277
|
-
debug(`recreate: could not read doc for ${filePath}, skipping`)
|
|
278
|
-
continue
|
|
279
|
-
}
|
|
280
|
-
|
|
281
|
-
const content = readDocContent(doc.content)
|
|
282
|
-
if (content === null) {
|
|
283
|
-
debug(`recreate: null content for ${filePath}, skipping`)
|
|
284
|
-
continue
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
// Create a fresh document
|
|
288
|
-
const fakeChange: DetectedChange = {
|
|
289
|
-
path: filePath,
|
|
290
|
-
changeType: ChangeType.LOCAL_ONLY,
|
|
291
|
-
fileType: this.isTextContent(content) ? FileType.TEXT : FileType.BINARY,
|
|
292
|
-
localContent: content,
|
|
293
|
-
remoteContent: null,
|
|
294
|
-
}
|
|
295
|
-
const newHandle = await this.createRemoteFile(fakeChange)
|
|
296
|
-
if (!newHandle) continue
|
|
297
|
-
|
|
298
|
-
const entryUrl = this.getEntryUrl(newHandle, filePath)
|
|
299
|
-
|
|
300
|
-
// Update snapshot entry
|
|
301
|
-
this.snapshotManager.updateFileEntry(snapshot, filePath, {
|
|
302
|
-
...entry,
|
|
303
|
-
url: entryUrl,
|
|
304
|
-
head: newHandle.heads(),
|
|
305
|
-
...(this.isArtifactPath(filePath) ? {contentHash: contentHash(content)} : {}),
|
|
306
|
-
})
|
|
307
|
-
|
|
308
|
-
// Update parent directory entry to point to new document
|
|
309
|
-
const pathParts = filePath.split("/")
|
|
310
|
-
const fileName = pathParts.pop() || ""
|
|
311
|
-
const dirPath = pathParts.join("/")
|
|
312
|
-
|
|
313
|
-
let dirUrl: AutomergeUrl
|
|
314
|
-
if (!dirPath || dirPath === "") {
|
|
315
|
-
dirUrl = snapshot.rootDirectoryUrl!
|
|
316
|
-
} else {
|
|
317
|
-
const dirEntry = snapshot.directories.get(dirPath)
|
|
318
|
-
if (!dirEntry) continue
|
|
319
|
-
dirUrl = dirEntry.url
|
|
320
|
-
}
|
|
321
|
-
|
|
322
|
-
const dirHandle = await this.repo.find<DirectoryDocument>(getPlainUrl(dirUrl))
|
|
323
|
-
dirHandle.change((d: DirectoryDocument) => {
|
|
324
|
-
const idx = d.docs.findIndex(
|
|
325
|
-
e => e.name === fileName && e.type === "file"
|
|
326
|
-
)
|
|
327
|
-
if (idx !== -1) {
|
|
328
|
-
d.docs[idx].url = entryUrl
|
|
329
|
-
}
|
|
330
|
-
})
|
|
331
|
-
|
|
332
|
-
// Track new handles
|
|
333
|
-
this.handlesByPath.set(filePath, newHandle)
|
|
334
|
-
this.handlesByPath.set(dirPath, dirHandle)
|
|
335
|
-
newHandles.push(newHandle)
|
|
336
|
-
newHandles.push(dirHandle)
|
|
337
|
-
|
|
338
|
-
debug(`recreate: created new doc for ${filePath} -> ${newHandle.url}`)
|
|
339
|
-
} catch (error) {
|
|
340
|
-
debug(`recreate: failed for ${filePath}: ${error}`)
|
|
341
|
-
out.taskLine(`Failed to recreate ${filePath}: ${error}`, true)
|
|
342
|
-
}
|
|
343
|
-
}
|
|
344
|
-
|
|
345
|
-
// Also check directory documents
|
|
346
|
-
for (const [dirPath, entry] of snapshot.directories.entries()) {
|
|
347
|
-
const plainUrl = getPlainUrl(entry.url)
|
|
348
|
-
if (!failedUrls.has(plainUrl)) continue
|
|
349
|
-
|
|
350
|
-
// Directory docs can't be easily recreated (they reference children).
|
|
351
|
-
// Just log a warning — the child recreation above should handle most cases.
|
|
352
|
-
debug(`recreate: directory ${dirPath || "(root)"} failed to sync, cannot recreate`)
|
|
353
|
-
out.taskLine(`Warning: directory ${dirPath || "(root)"} failed to sync`, true)
|
|
354
|
-
}
|
|
355
|
-
|
|
356
|
-
return newHandles
|
|
357
|
-
}
|
|
358
|
-
|
|
359
|
-
/**
|
|
360
|
-
* Run full bidirectional sync
|
|
361
|
-
*/
|
|
362
|
-
async sync(): Promise<SyncResult> {
|
|
363
|
-
const result: SyncResult = {
|
|
364
|
-
success: false,
|
|
365
|
-
filesChanged: 0,
|
|
366
|
-
directoriesChanged: 0,
|
|
367
|
-
errors: [],
|
|
368
|
-
warnings: [],
|
|
369
|
-
timings: {},
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
// Reset tracked handles for sync
|
|
373
|
-
this.handlesByPath = new Map()
|
|
374
|
-
|
|
375
|
-
try {
|
|
376
|
-
// Load current snapshot
|
|
377
|
-
const snapshot =
|
|
378
|
-
(await this.snapshotManager.load()) ||
|
|
379
|
-
this.snapshotManager.createEmpty()
|
|
380
|
-
|
|
381
|
-
debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl}, files=${snapshot.files.size}, dirs=${snapshot.directories.size}`)
|
|
382
|
-
|
|
383
|
-
// Wait for initial sync to receive any pending remote changes
|
|
384
|
-
if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
|
|
385
|
-
debug("sync: waiting for root document to be ready")
|
|
386
|
-
out.update("Waiting for root document from server")
|
|
387
|
-
|
|
388
|
-
// Wait for the root document to be fetched from the network.
|
|
389
|
-
// repo.find() rejects with "unavailable" if the server doesn't
|
|
390
|
-
// have the document yet, so we retry with backoff.
|
|
391
|
-
// This is critical for clone scenarios.
|
|
392
|
-
const plainRootUrl = getPlainUrl(snapshot.rootDirectoryUrl)
|
|
393
|
-
const maxAttempts = 6
|
|
394
|
-
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
395
|
-
try {
|
|
396
|
-
const rootHandle = await this.repo.find<DirectoryDocument>(plainRootUrl)
|
|
397
|
-
rootHandle.doc() // throws if not ready
|
|
398
|
-
debug(`sync: root document ready (attempt ${attempt})`)
|
|
399
|
-
break
|
|
400
|
-
} catch (error) {
|
|
401
|
-
const isUnavailable = String(error).includes("unavailable") || String(error).includes("not ready")
|
|
402
|
-
if (isUnavailable && attempt < maxAttempts) {
|
|
403
|
-
const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000)
|
|
404
|
-
debug(`sync: root document not available (attempt ${attempt}/${maxAttempts}), retrying in ${delay}ms`)
|
|
405
|
-
out.update(`Waiting for root document (attempt ${attempt}/${maxAttempts})`)
|
|
406
|
-
await new Promise(r => setTimeout(r, delay))
|
|
407
|
-
} else {
|
|
408
|
-
debug(`sync: root document unavailable after ${maxAttempts} attempts: ${error}`)
|
|
409
|
-
out.taskLine(`Root document unavailable: ${error}`, true)
|
|
410
|
-
break
|
|
411
|
-
}
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
debug("sync: waiting for initial bidirectional sync")
|
|
416
|
-
out.update("Waiting for initial sync from server")
|
|
417
|
-
try {
|
|
418
|
-
await waitForBidirectionalSync(
|
|
419
|
-
this.repo,
|
|
420
|
-
snapshot.rootDirectoryUrl,
|
|
421
|
-
{
|
|
422
|
-
timeoutMs: 5000, // Increased timeout for initial sync
|
|
423
|
-
pollIntervalMs: 100,
|
|
424
|
-
stableChecksRequired: 3,
|
|
425
|
-
}
|
|
426
|
-
)
|
|
427
|
-
} catch (error) {
|
|
428
|
-
out.taskLine(`Initial sync: ${error}`, true)
|
|
429
|
-
}
|
|
430
|
-
}
|
|
431
|
-
|
|
432
|
-
// Detect all changes
|
|
433
|
-
debug("sync: detecting changes")
|
|
434
|
-
out.update("Detecting local and remote changes")
|
|
435
|
-
// Capture pre-push snapshot file paths to detect deletions after push
|
|
436
|
-
const prePushFilePaths = new Set(snapshot.files.keys())
|
|
437
|
-
const changes = await this.changeDetector.detectChanges(snapshot)
|
|
438
|
-
|
|
439
|
-
// Detect moves
|
|
440
|
-
const {moves, remainingChanges} = await this.moveDetector.detectMoves(
|
|
441
|
-
changes,
|
|
442
|
-
snapshot
|
|
443
|
-
)
|
|
444
|
-
|
|
445
|
-
debug(`sync: detected ${changes.length} changes, ${moves.length} moves, ${remainingChanges.length} remaining`)
|
|
446
|
-
|
|
447
|
-
// Phase 1: Push local changes to remote
|
|
448
|
-
debug("sync: phase 1 - pushing local changes")
|
|
449
|
-
const phase1Result = await this.pushLocalChanges(
|
|
450
|
-
remainingChanges,
|
|
451
|
-
moves,
|
|
452
|
-
snapshot
|
|
453
|
-
)
|
|
454
|
-
|
|
455
|
-
result.filesChanged += phase1Result.filesChanged
|
|
456
|
-
result.directoriesChanged += phase1Result.directoriesChanged
|
|
457
|
-
result.errors.push(...phase1Result.errors)
|
|
458
|
-
result.warnings.push(...phase1Result.warnings)
|
|
459
|
-
|
|
460
|
-
debug(`sync: phase 1 complete - ${phase1Result.filesChanged} files, ${phase1Result.directoriesChanged} dirs changed`)
|
|
461
|
-
|
|
462
|
-
// Wait for network sync (important for clone scenarios)
|
|
463
|
-
if (this.config.sync_enabled) {
|
|
464
|
-
try {
|
|
465
|
-
// Ensure root directory handle is tracked for sync
|
|
466
|
-
if (snapshot.rootDirectoryUrl) {
|
|
467
|
-
const rootHandle =
|
|
468
|
-
await this.repo.find<DirectoryDocument>(
|
|
469
|
-
snapshot.rootDirectoryUrl
|
|
470
|
-
)
|
|
471
|
-
this.handlesByPath.set("", rootHandle)
|
|
472
|
-
}
|
|
473
|
-
|
|
474
|
-
// Single waitForSync with ALL tracked handles at once
|
|
475
|
-
if (this.handlesByPath.size > 0) {
|
|
476
|
-
const allHandles = Array.from(
|
|
477
|
-
this.handlesByPath.values()
|
|
478
|
-
)
|
|
479
|
-
const handlePaths = Array.from(this.handlesByPath.keys())
|
|
480
|
-
debug(`sync: waiting for ${allHandles.length} handles to sync to server: ${handlePaths.slice(0, 10).map(p => p || "(root)").join(", ")}${handlePaths.length > 10 ? ` ...and ${handlePaths.length - 10} more` : ""}`)
|
|
481
|
-
out.update(`Uploading ${allHandles.length} documents to sync server`)
|
|
482
|
-
const {failed} = await waitForSync(
|
|
483
|
-
allHandles
|
|
484
|
-
)
|
|
485
|
-
|
|
486
|
-
// Recreate failed documents and retry once
|
|
487
|
-
if (failed.length > 0) {
|
|
488
|
-
debug(`sync: ${failed.length} documents failed, recreating`)
|
|
489
|
-
out.update(`Recreating ${failed.length} failed documents`)
|
|
490
|
-
const retryHandles = await this.recreateFailedDocuments(failed, snapshot)
|
|
491
|
-
if (retryHandles.length > 0) {
|
|
492
|
-
debug(`sync: retrying ${retryHandles.length} recreated handles`)
|
|
493
|
-
out.update(`Retrying ${retryHandles.length} recreated documents`)
|
|
494
|
-
const retry = await waitForSync(
|
|
495
|
-
retryHandles
|
|
496
|
-
)
|
|
497
|
-
if (retry.failed.length > 0) {
|
|
498
|
-
const msg = `${retry.failed.length} documents failed to sync to server after recreation`
|
|
499
|
-
debug(`sync: ${msg}`)
|
|
500
|
-
result.errors.push({
|
|
501
|
-
path: "sync",
|
|
502
|
-
operation: "upload",
|
|
503
|
-
error: new Error(msg),
|
|
504
|
-
recoverable: true,
|
|
505
|
-
})
|
|
506
|
-
}
|
|
507
|
-
}
|
|
508
|
-
}
|
|
509
|
-
|
|
510
|
-
debug("sync: all handles synced to server")
|
|
511
|
-
}
|
|
512
|
-
|
|
513
|
-
// Wait for bidirectional sync to stabilize
|
|
514
|
-
// Use tracked handles for post-push check (cheaper than full tree scan)
|
|
515
|
-
const changedHandles = Array.from(this.handlesByPath.values())
|
|
516
|
-
debug(`sync: waiting for bidirectional sync to stabilize (${changedHandles.length} tracked handles)`)
|
|
517
|
-
out.update("Waiting for bidirectional sync to stabilize")
|
|
518
|
-
await waitForBidirectionalSync(
|
|
519
|
-
this.repo,
|
|
520
|
-
snapshot.rootDirectoryUrl,
|
|
521
|
-
{
|
|
522
|
-
timeoutMs: BIDIRECTIONAL_SYNC_TIMEOUT_MS,
|
|
523
|
-
pollIntervalMs: 100,
|
|
524
|
-
stableChecksRequired: 3,
|
|
525
|
-
handles: changedHandles.length > 0 ? changedHandles : undefined,
|
|
526
|
-
}
|
|
527
|
-
)
|
|
528
|
-
|
|
529
|
-
// Touch root directory AFTER all docs are synced and stable.
|
|
530
|
-
// This signals consumers (e.g. Patchwork) that new content is
|
|
531
|
-
// available. Because file docs are already on the server,
|
|
532
|
-
// consumers can immediately fetch them when they see the root change.
|
|
533
|
-
const hasPhase1Changes =
|
|
534
|
-
phase1Result.filesChanged > 0 || phase1Result.directoriesChanged > 0
|
|
535
|
-
if (hasPhase1Changes && snapshot.rootDirectoryUrl) {
|
|
536
|
-
await this.touchRootDirectory(snapshot)
|
|
537
|
-
const rootHandle =
|
|
538
|
-
await this.repo.find<DirectoryDocument>(
|
|
539
|
-
snapshot.rootDirectoryUrl
|
|
540
|
-
)
|
|
541
|
-
debug("sync: syncing root directory touch to server")
|
|
542
|
-
out.update("Syncing root directory update")
|
|
543
|
-
await waitForSync(
|
|
544
|
-
[rootHandle]
|
|
545
|
-
)
|
|
546
|
-
}
|
|
547
|
-
} catch (error) {
|
|
548
|
-
debug(`sync: network sync error: ${error}`)
|
|
549
|
-
out.taskLine(`Network sync failed: ${error}`, true)
|
|
550
|
-
result.errors.push({
|
|
551
|
-
path: "sync",
|
|
552
|
-
operation: "network-sync",
|
|
553
|
-
error: error instanceof Error ? error : new Error(String(error)),
|
|
554
|
-
recoverable: true,
|
|
555
|
-
})
|
|
556
|
-
}
|
|
557
|
-
}
|
|
558
|
-
|
|
559
|
-
// Re-detect changes after network sync for fresh state
|
|
560
|
-
// Compute paths deleted during push so they aren't resurrected during pull
|
|
561
|
-
const deletedPaths = new Set<string>()
|
|
562
|
-
for (const p of prePushFilePaths) {
|
|
563
|
-
if (!snapshot.files.has(p)) {
|
|
564
|
-
deletedPaths.add(p)
|
|
565
|
-
}
|
|
566
|
-
}
|
|
567
|
-
if (deletedPaths.size > 0) {
|
|
568
|
-
debug(`sync: excluding ${deletedPaths.size} deleted paths from re-detection`)
|
|
569
|
-
}
|
|
570
|
-
debug("sync: re-detecting changes after network sync")
|
|
571
|
-
const freshChanges = await this.changeDetector.detectChanges(snapshot, deletedPaths)
|
|
572
|
-
const freshRemoteChanges = freshChanges.filter(
|
|
573
|
-
c =>
|
|
574
|
-
c.changeType === ChangeType.REMOTE_ONLY ||
|
|
575
|
-
c.changeType === ChangeType.BOTH_CHANGED
|
|
576
|
-
)
|
|
577
|
-
|
|
578
|
-
debug(`sync: phase 2 - pulling ${freshRemoteChanges.length} remote changes`)
|
|
579
|
-
if (freshRemoteChanges.length > 0) {
|
|
580
|
-
out.update(`Pulling ${freshRemoteChanges.length} remote changes`)
|
|
581
|
-
}
|
|
582
|
-
// Phase 2: Pull remote changes to local using fresh detection
|
|
583
|
-
const phase2Result = await this.pullRemoteChanges(
|
|
584
|
-
freshRemoteChanges,
|
|
585
|
-
snapshot
|
|
586
|
-
)
|
|
587
|
-
result.filesChanged += phase2Result.filesChanged
|
|
588
|
-
result.directoriesChanged += phase2Result.directoriesChanged
|
|
589
|
-
result.errors.push(...phase2Result.errors)
|
|
590
|
-
result.warnings.push(...phase2Result.warnings)
|
|
591
|
-
|
|
592
|
-
// Update snapshot heads after pulling remote changes
|
|
593
|
-
// IMPORTANT: Use getPlainUrl() to strip version/heads from URLs.
|
|
594
|
-
// Artifact entries store versioned URLs (with heads baked in).
|
|
595
|
-
// repo.find(versionedUrl) returns a view handle whose .heads()
|
|
596
|
-
// returns the VERSION heads, not the current document heads.
|
|
597
|
-
// Using the versioned URL here would overwrite correct heads with
|
|
598
|
-
// stale ones, causing changeAt() to fork from the wrong point
|
|
599
|
-
// on the next sync (e.g. an empty directory state where deletions
|
|
600
|
-
// can't find the entries to splice out).
|
|
601
|
-
for (const [filePath, snapshotEntry] of snapshot.files.entries()) {
|
|
602
|
-
try {
|
|
603
|
-
const handle = await this.repo.find(getPlainUrl(snapshotEntry.url))
|
|
604
|
-
const currentHeads = handle.heads()
|
|
605
|
-
if (!A.equals(currentHeads, snapshotEntry.head)) {
|
|
606
|
-
// Update snapshot with current heads after pulling changes
|
|
607
|
-
snapshot.files.set(filePath, {
|
|
608
|
-
...snapshotEntry,
|
|
609
|
-
head: currentHeads,
|
|
610
|
-
})
|
|
611
|
-
}
|
|
612
|
-
} catch (error) {
|
|
613
|
-
// Handle might not exist if file was deleted
|
|
614
|
-
}
|
|
615
|
-
}
|
|
616
|
-
|
|
617
|
-
// Update directory document heads
|
|
618
|
-
for (const [dirPath, snapshotEntry] of snapshot.directories.entries()) {
|
|
619
|
-
try {
|
|
620
|
-
const handle = await this.repo.find(getPlainUrl(snapshotEntry.url))
|
|
621
|
-
const currentHeads = handle.heads()
|
|
622
|
-
if (!A.equals(currentHeads, snapshotEntry.head)) {
|
|
623
|
-
// Update snapshot with current heads after pulling changes
|
|
624
|
-
snapshot.directories.set(dirPath, {
|
|
625
|
-
...snapshotEntry,
|
|
626
|
-
head: currentHeads,
|
|
627
|
-
})
|
|
628
|
-
}
|
|
629
|
-
} catch (error) {
|
|
630
|
-
// Handle might not exist if directory was deleted
|
|
631
|
-
}
|
|
632
|
-
}
|
|
633
|
-
|
|
634
|
-
// Save updated snapshot if not dry run
|
|
635
|
-
await this.snapshotManager.save(snapshot)
|
|
636
|
-
|
|
637
|
-
result.success = result.errors.length === 0
|
|
638
|
-
return result
|
|
639
|
-
} catch (error) {
|
|
640
|
-
result.errors.push({
|
|
641
|
-
path: "sync",
|
|
642
|
-
operation: "full-sync",
|
|
643
|
-
error: error as Error,
|
|
644
|
-
recoverable: false,
|
|
645
|
-
})
|
|
646
|
-
return result
|
|
647
|
-
}
|
|
648
|
-
}
|
|
649
|
-
|
|
650
|
-
/**
|
|
651
|
-
* Phase 1: Push local changes to Automerge documents.
|
|
652
|
-
*
|
|
653
|
-
* Works depth-first: processes the deepest files first, creates/updates all
|
|
654
|
-
* file docs at each level, then batch-updates the parent directory document
|
|
655
|
-
* in a single change. Propagates subdirectory URL updates as we walk up
|
|
656
|
-
* toward the root. This eliminates the need for a separate URL update pass.
|
|
657
|
-
*/
|
|
658
|
-
private async pushLocalChanges(
|
|
659
|
-
changes: DetectedChange[],
|
|
660
|
-
moves: MoveCandidate[],
|
|
661
|
-
snapshot: SyncSnapshot
|
|
662
|
-
): Promise<SyncResult> {
|
|
663
|
-
const result: SyncResult = {
|
|
664
|
-
success: true,
|
|
665
|
-
filesChanged: 0,
|
|
666
|
-
directoriesChanged: 0,
|
|
667
|
-
errors: [],
|
|
668
|
-
warnings: [],
|
|
669
|
-
}
|
|
670
|
-
|
|
671
|
-
// Process moves first - all detected moves are applied
|
|
672
|
-
if (moves.length > 0) {
|
|
673
|
-
debug(`push: processing ${moves.length} moves`)
|
|
674
|
-
out.update(`Processing ${moves.length} move${moves.length > 1 ? "s" : ""}`)
|
|
675
|
-
}
|
|
676
|
-
for (let i = 0; i < moves.length; i++) {
|
|
677
|
-
const move = moves[i]
|
|
678
|
-
try {
|
|
679
|
-
debug(`push: move ${i + 1}/${moves.length}: ${move.fromPath} -> ${move.toPath}`)
|
|
680
|
-
out.taskLine(`Moving ${move.fromPath} -> ${move.toPath}`)
|
|
681
|
-
await this.applyMoveToRemote(move, snapshot)
|
|
682
|
-
result.filesChanged++
|
|
683
|
-
} catch (error) {
|
|
684
|
-
debug(`push: move failed for ${move.fromPath}: ${error}`)
|
|
685
|
-
result.errors.push({
|
|
686
|
-
path: move.fromPath,
|
|
687
|
-
operation: "move",
|
|
688
|
-
error: error as Error,
|
|
689
|
-
recoverable: true,
|
|
690
|
-
})
|
|
691
|
-
}
|
|
692
|
-
}
|
|
693
|
-
|
|
694
|
-
// Filter to local changes only
|
|
695
|
-
const localChanges = changes.filter(
|
|
696
|
-
c =>
|
|
697
|
-
c.changeType === ChangeType.LOCAL_ONLY ||
|
|
698
|
-
c.changeType === ChangeType.BOTH_CHANGED
|
|
699
|
-
)
|
|
700
|
-
|
|
701
|
-
if (localChanges.length === 0) {
|
|
702
|
-
debug("push: no local changes to push")
|
|
703
|
-
return result
|
|
704
|
-
}
|
|
705
|
-
|
|
706
|
-
const newFiles = localChanges.filter(c => !snapshot.files.has(c.path) && c.localContent !== null)
|
|
707
|
-
const modifiedFiles = localChanges.filter(c => snapshot.files.has(c.path) && c.localContent !== null)
|
|
708
|
-
const deletedFiles = localChanges.filter(c => c.localContent === null && snapshot.files.has(c.path))
|
|
709
|
-
debug(`push: ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`)
|
|
710
|
-
out.update(`Pushing ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`)
|
|
711
|
-
|
|
712
|
-
// Group changes by parent directory path
|
|
713
|
-
const changesByDir = new Map<string, DetectedChange[]>()
|
|
714
|
-
for (const change of localChanges) {
|
|
715
|
-
const pathParts = change.path.split("/")
|
|
716
|
-
pathParts.pop() // remove filename
|
|
717
|
-
const dirPath = pathParts.join("/")
|
|
718
|
-
if (!changesByDir.has(dirPath)) {
|
|
719
|
-
changesByDir.set(dirPath, [])
|
|
720
|
-
}
|
|
721
|
-
changesByDir.get(dirPath)!.push(change)
|
|
722
|
-
}
|
|
723
|
-
|
|
724
|
-
// Collect all directory paths that need processing:
|
|
725
|
-
// directories with file changes + all ancestors up to root
|
|
726
|
-
const allDirsToProcess = new Set<string>()
|
|
727
|
-
for (const dirPath of changesByDir.keys()) {
|
|
728
|
-
allDirsToProcess.add(dirPath)
|
|
729
|
-
// Add ancestors so subdirectory URL updates propagate to root
|
|
730
|
-
let current = dirPath
|
|
731
|
-
while (current) {
|
|
732
|
-
const parts = current.split("/")
|
|
733
|
-
parts.pop()
|
|
734
|
-
current = parts.join("/")
|
|
735
|
-
allDirsToProcess.add(current)
|
|
736
|
-
}
|
|
737
|
-
}
|
|
738
|
-
|
|
739
|
-
// Sort deepest-first
|
|
740
|
-
const sortedDirPaths = Array.from(allDirsToProcess).sort((a, b) => {
|
|
741
|
-
const depthA = a ? a.split("/").length : 0
|
|
742
|
-
const depthB = b ? b.split("/").length : 0
|
|
743
|
-
return depthB - depthA
|
|
744
|
-
})
|
|
745
|
-
|
|
746
|
-
debug(`push: processing ${sortedDirPaths.length} directories (deepest first)`)
|
|
747
|
-
|
|
748
|
-
// Track which directories were modified (for subdirectory URL propagation)
|
|
749
|
-
const modifiedDirs = new Set<string>()
|
|
750
|
-
let filesProcessed = 0
|
|
751
|
-
const totalFiles = localChanges.length
|
|
752
|
-
|
|
753
|
-
for (const dirPath of sortedDirPaths) {
|
|
754
|
-
const dirChanges = changesByDir.get(dirPath) || []
|
|
755
|
-
const dirLabel = dirPath || "(root)"
|
|
756
|
-
|
|
757
|
-
if (dirChanges.length > 0) {
|
|
758
|
-
debug(`push: directory "${dirLabel}": ${dirChanges.length} file changes`)
|
|
759
|
-
}
|
|
760
|
-
|
|
761
|
-
// Ensure directory document exists
|
|
762
|
-
if (snapshot.rootDirectoryUrl) {
|
|
763
|
-
await this.ensureDirectoryDocument(snapshot, dirPath)
|
|
764
|
-
}
|
|
765
|
-
|
|
766
|
-
// Process all file changes in this directory
|
|
767
|
-
const newEntries: {name: string; url: AutomergeUrl}[] = []
|
|
768
|
-
const updatedEntries: {name: string; url: AutomergeUrl}[] = []
|
|
769
|
-
const deletedNames: string[] = []
|
|
770
|
-
|
|
771
|
-
for (const change of dirChanges) {
|
|
772
|
-
const fileName = change.path.split("/").pop() || ""
|
|
773
|
-
const snapshotEntry = snapshot.files.get(change.path)
|
|
774
|
-
filesProcessed++
|
|
775
|
-
|
|
776
|
-
try {
|
|
777
|
-
if (change.localContent === null && snapshotEntry) {
|
|
778
|
-
// Delete file
|
|
779
|
-
debug(`push: [${filesProcessed}/${totalFiles}] delete ${change.path}`)
|
|
780
|
-
out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] deleting ${change.path}`)
|
|
781
|
-
await this.deleteRemoteFile(
|
|
782
|
-
snapshotEntry.url,
|
|
783
|
-
snapshot,
|
|
784
|
-
change.path
|
|
785
|
-
)
|
|
786
|
-
deletedNames.push(fileName)
|
|
787
|
-
this.snapshotManager.removeFileEntry(snapshot, change.path)
|
|
788
|
-
result.filesChanged++
|
|
789
|
-
} else if (!snapshotEntry) {
|
|
790
|
-
// New file
|
|
791
|
-
debug(`push: [${filesProcessed}/${totalFiles}] create ${change.path} (${change.fileType})`)
|
|
792
|
-
out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] creating ${change.path}`)
|
|
793
|
-
const handle = await this.createRemoteFile(change)
|
|
794
|
-
if (handle) {
|
|
795
|
-
const entryUrl = this.getEntryUrl(handle, change.path)
|
|
796
|
-
newEntries.push({name: fileName, url: entryUrl})
|
|
797
|
-
this.snapshotManager.updateFileEntry(
|
|
798
|
-
snapshot,
|
|
799
|
-
change.path,
|
|
800
|
-
{
|
|
801
|
-
path: joinAndNormalizePath(
|
|
802
|
-
this.rootPath,
|
|
803
|
-
change.path
|
|
804
|
-
),
|
|
805
|
-
url: entryUrl,
|
|
806
|
-
head: handle.heads(),
|
|
807
|
-
extension: getFileExtension(change.path),
|
|
808
|
-
mimeType: getEnhancedMimeType(change.path),
|
|
809
|
-
...(this.isArtifactPath(change.path) && change.localContent
|
|
810
|
-
? {contentHash: contentHash(change.localContent)}
|
|
811
|
-
: {}),
|
|
812
|
-
}
|
|
813
|
-
)
|
|
814
|
-
result.filesChanged++
|
|
815
|
-
debug(`push: created ${change.path} -> ${handle.url}`)
|
|
816
|
-
}
|
|
817
|
-
} else {
|
|
818
|
-
// Update existing file
|
|
819
|
-
const contentSize = typeof change.localContent === "string"
|
|
820
|
-
? `${change.localContent!.length} chars`
|
|
821
|
-
: `${(change.localContent as Uint8Array).length} bytes`
|
|
822
|
-
debug(`push: [${filesProcessed}/${totalFiles}] update ${change.path} (${contentSize})`)
|
|
823
|
-
out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] updating ${change.path}`)
|
|
824
|
-
await this.updateRemoteFile(
|
|
825
|
-
snapshotEntry.url,
|
|
826
|
-
change.localContent!,
|
|
827
|
-
snapshot,
|
|
828
|
-
change.path
|
|
829
|
-
)
|
|
830
|
-
// Get current entry URL (updateRemoteFile updates snapshot)
|
|
831
|
-
const updatedFileEntry = snapshot.files.get(change.path)
|
|
832
|
-
if (updatedFileEntry) {
|
|
833
|
-
const fileHandle =
|
|
834
|
-
await this.repo.find<FileDocument>(
|
|
835
|
-
getPlainUrl(updatedFileEntry.url)
|
|
836
|
-
)
|
|
837
|
-
updatedEntries.push({
|
|
838
|
-
name: fileName,
|
|
839
|
-
url: this.getEntryUrl(fileHandle, change.path),
|
|
840
|
-
})
|
|
841
|
-
}
|
|
842
|
-
result.filesChanged++
|
|
843
|
-
}
|
|
844
|
-
} catch (error) {
|
|
845
|
-
debug(`push: error processing ${change.path}: ${error}`)
|
|
846
|
-
out.taskLine(`Error pushing ${change.path}: ${error}`, true)
|
|
847
|
-
result.errors.push({
|
|
848
|
-
path: change.path,
|
|
849
|
-
operation: "local-to-remote",
|
|
850
|
-
error: error as Error,
|
|
851
|
-
recoverable: true,
|
|
852
|
-
})
|
|
853
|
-
}
|
|
854
|
-
}
|
|
855
|
-
|
|
856
|
-
// Collect subdirectory URL updates for child dirs already processed
|
|
857
|
-
const subdirUpdates: {name: string; url: AutomergeUrl}[] = []
|
|
858
|
-
for (const modifiedDir of modifiedDirs) {
|
|
859
|
-
// Check if modifiedDir is a direct child of dirPath
|
|
860
|
-
const parts = modifiedDir.split("/")
|
|
861
|
-
const childName = parts.pop() || ""
|
|
862
|
-
const parentOfModified = parts.join("/")
|
|
863
|
-
if (parentOfModified === dirPath) {
|
|
864
|
-
const dirEntry = snapshot.directories.get(modifiedDir)
|
|
865
|
-
if (dirEntry) {
|
|
866
|
-
const childHandle =
|
|
867
|
-
await this.repo.find<DirectoryDocument>(
|
|
868
|
-
getPlainUrl(dirEntry.url)
|
|
869
|
-
)
|
|
870
|
-
subdirUpdates.push({
|
|
871
|
-
name: childName,
|
|
872
|
-
url: this.getEntryUrl(childHandle, modifiedDir),
|
|
873
|
-
})
|
|
874
|
-
}
|
|
875
|
-
}
|
|
876
|
-
}
|
|
877
|
-
|
|
878
|
-
// Batch-update the directory document in a single change
|
|
879
|
-
const hasChanges =
|
|
880
|
-
newEntries.length > 0 ||
|
|
881
|
-
updatedEntries.length > 0 ||
|
|
882
|
-
deletedNames.length > 0 ||
|
|
883
|
-
subdirUpdates.length > 0
|
|
884
|
-
if (hasChanges && snapshot.rootDirectoryUrl) {
|
|
885
|
-
debug(`push: batch-updating directory "${dirLabel}" (+${newEntries.length} new, ~${updatedEntries.length} updated, -${deletedNames.length} deleted, ${subdirUpdates.length} subdir URL updates)`)
|
|
886
|
-
await this.batchUpdateDirectory(
|
|
887
|
-
snapshot,
|
|
888
|
-
dirPath,
|
|
889
|
-
newEntries,
|
|
890
|
-
updatedEntries,
|
|
891
|
-
deletedNames,
|
|
892
|
-
subdirUpdates
|
|
893
|
-
)
|
|
894
|
-
modifiedDirs.add(dirPath)
|
|
895
|
-
result.directoriesChanged++
|
|
896
|
-
}
|
|
897
|
-
}
|
|
898
|
-
|
|
899
|
-
debug(`push: complete - ${result.filesChanged} files, ${result.directoriesChanged} dirs changed, ${result.errors.length} errors`)
|
|
900
|
-
return result
|
|
901
|
-
}
|
|
902
|
-
|
|
903
|
-
/**
|
|
904
|
-
* Phase 2: Pull remote changes to local filesystem
|
|
905
|
-
*/
|
|
906
|
-
private async pullRemoteChanges(
|
|
907
|
-
changes: DetectedChange[],
|
|
908
|
-
snapshot: SyncSnapshot
|
|
909
|
-
): Promise<SyncResult> {
|
|
910
|
-
const result: SyncResult = {
|
|
911
|
-
success: true,
|
|
912
|
-
filesChanged: 0,
|
|
913
|
-
directoriesChanged: 0,
|
|
914
|
-
errors: [],
|
|
915
|
-
warnings: [],
|
|
916
|
-
}
|
|
917
|
-
|
|
918
|
-
// Process remote changes
|
|
919
|
-
const remoteChanges = changes.filter(
|
|
920
|
-
c =>
|
|
921
|
-
c.changeType === ChangeType.REMOTE_ONLY ||
|
|
922
|
-
c.changeType === ChangeType.BOTH_CHANGED
|
|
923
|
-
)
|
|
924
|
-
|
|
925
|
-
// Sort changes by dependency order (parents before children)
|
|
926
|
-
const sortedChanges = this.sortChangesByDependency(remoteChanges)
|
|
927
|
-
|
|
928
|
-
for (const change of sortedChanges) {
|
|
929
|
-
try {
|
|
930
|
-
await this.applyRemoteChangeToLocal(change, snapshot)
|
|
931
|
-
result.filesChanged++
|
|
932
|
-
} catch (error) {
|
|
933
|
-
result.errors.push({
|
|
934
|
-
path: change.path,
|
|
935
|
-
operation: "remote-to-local",
|
|
936
|
-
error: error as Error,
|
|
937
|
-
recoverable: true,
|
|
938
|
-
})
|
|
939
|
-
}
|
|
940
|
-
}
|
|
941
|
-
|
|
942
|
-
return result
|
|
943
|
-
}
|
|
944
|
-
|
|
945
|
-
/**
|
|
946
|
-
* Apply remote change to local filesystem
|
|
947
|
-
*/
|
|
948
|
-
private async applyRemoteChangeToLocal(
|
|
949
|
-
change: DetectedChange,
|
|
950
|
-
snapshot: SyncSnapshot
|
|
951
|
-
): Promise<void> {
|
|
952
|
-
const localPath = joinAndNormalizePath(this.rootPath, change.path)
|
|
953
|
-
|
|
954
|
-
if (!change.remoteHead) {
|
|
955
|
-
throw new Error(
|
|
956
|
-
`No remote head found for remote change to ${change.path}`
|
|
957
|
-
)
|
|
958
|
-
}
|
|
959
|
-
|
|
960
|
-
// Check for null (empty string/Uint8Array are valid content)
|
|
961
|
-
if (change.remoteContent === null) {
|
|
962
|
-
// File was deleted remotely
|
|
963
|
-
await removePath(localPath)
|
|
964
|
-
this.snapshotManager.removeFileEntry(snapshot, change.path)
|
|
965
|
-
return
|
|
966
|
-
}
|
|
967
|
-
|
|
968
|
-
// Create or update local file
|
|
969
|
-
await writeFileContent(localPath, change.remoteContent)
|
|
970
|
-
|
|
971
|
-
// Update or create snapshot entry for this file
|
|
972
|
-
const snapshotEntry = snapshot.files.get(change.path)
|
|
973
|
-
if (snapshotEntry) {
|
|
974
|
-
// Update existing entry
|
|
975
|
-
snapshotEntry.head = change.remoteHead
|
|
976
|
-
// If the remote document was replaced (new URL), update the snapshot URL
|
|
977
|
-
if (change.remoteUrl) {
|
|
978
|
-
const fileHandle = await this.repo.find<FileDocument>(change.remoteUrl)
|
|
979
|
-
snapshotEntry.url = this.getEntryUrl(fileHandle, change.path)
|
|
980
|
-
}
|
|
981
|
-
} else {
|
|
982
|
-
// Create new snapshot entry for newly discovered remote file
|
|
983
|
-
// We need to find the remote file's URL from the directory hierarchy
|
|
984
|
-
if (snapshot.rootDirectoryUrl) {
|
|
985
|
-
try {
|
|
986
|
-
const fileEntry = await findFileInDirectoryHierarchy(
|
|
987
|
-
this.repo,
|
|
988
|
-
snapshot.rootDirectoryUrl,
|
|
989
|
-
change.path
|
|
990
|
-
)
|
|
991
|
-
|
|
992
|
-
if (fileEntry) {
|
|
993
|
-
const fileHandle = await this.repo.find<FileDocument>(fileEntry.url)
|
|
994
|
-
const entryUrl = this.getEntryUrl(fileHandle, change.path)
|
|
995
|
-
this.snapshotManager.updateFileEntry(snapshot, change.path, {
|
|
996
|
-
path: localPath,
|
|
997
|
-
url: entryUrl,
|
|
998
|
-
head: change.remoteHead,
|
|
999
|
-
extension: getFileExtension(change.path),
|
|
1000
|
-
mimeType: getEnhancedMimeType(change.path),
|
|
1001
|
-
})
|
|
1002
|
-
}
|
|
1003
|
-
} catch (error) {
|
|
1004
|
-
// Failed to update snapshot - file may have been deleted
|
|
1005
|
-
out.taskLine(
|
|
1006
|
-
`Warning: Failed to update snapshot for remote file ${change.path}`,
|
|
1007
|
-
true
|
|
1008
|
-
)
|
|
1009
|
-
}
|
|
1010
|
-
}
|
|
1011
|
-
}
|
|
1012
|
-
}
|
|
1013
|
-
|
|
1014
|
-
/**
|
|
1015
|
-
* Apply move to remote documents
|
|
1016
|
-
*/
|
|
1017
|
-
private async applyMoveToRemote(
|
|
1018
|
-
move: MoveCandidate,
|
|
1019
|
-
snapshot: SyncSnapshot
|
|
1020
|
-
): Promise<void> {
|
|
1021
|
-
const fromEntry = snapshot.files.get(move.fromPath)
|
|
1022
|
-
if (!fromEntry) return
|
|
1023
|
-
|
|
1024
|
-
// Parse paths
|
|
1025
|
-
const toParts = move.toPath.split("/")
|
|
1026
|
-
const toFileName = toParts.pop() || ""
|
|
1027
|
-
const toDirPath = toParts.join("/")
|
|
1028
|
-
|
|
1029
|
-
// 1) Remove file entry from old directory document
|
|
1030
|
-
if (move.fromPath !== move.toPath) {
|
|
1031
|
-
await this.removeFileFromDirectory(snapshot, move.fromPath)
|
|
1032
|
-
}
|
|
1033
|
-
|
|
1034
|
-
// 2) Ensure destination directory document exists
|
|
1035
|
-
await this.ensureDirectoryDocument(snapshot, toDirPath)
|
|
1036
|
-
|
|
1037
|
-
// 3) Update the FileDocument name and content to match new location/state
|
|
1038
|
-
try {
|
|
1039
|
-
let entryUrl: AutomergeUrl
|
|
1040
|
-
let finalHeads: UrlHeads
|
|
1041
|
-
|
|
1042
|
-
if (this.isArtifactPath(move.toPath)) {
|
|
1043
|
-
// Artifact files use RawString — no diffing needed, just create a fresh doc
|
|
1044
|
-
const content = move.newContent !== undefined
|
|
1045
|
-
? move.newContent
|
|
1046
|
-
: readDocContent((await (await this.repo.find<FileDocument>(getPlainUrl(fromEntry.url))).doc())?.content)
|
|
1047
|
-
const fakeChange: DetectedChange = {
|
|
1048
|
-
path: move.toPath,
|
|
1049
|
-
changeType: ChangeType.LOCAL_ONLY,
|
|
1050
|
-
fileType: content != null && typeof content === "string" ? FileType.TEXT : FileType.BINARY,
|
|
1051
|
-
localContent: content,
|
|
1052
|
-
remoteContent: null,
|
|
1053
|
-
}
|
|
1054
|
-
const newHandle = await this.createRemoteFile(fakeChange)
|
|
1055
|
-
if (!newHandle) return
|
|
1056
|
-
entryUrl = this.getEntryUrl(newHandle, move.toPath)
|
|
1057
|
-
finalHeads = newHandle.heads()
|
|
1058
|
-
} else {
|
|
1059
|
-
// Use plain URL for mutable handle
|
|
1060
|
-
const handle = await this.repo.find<FileDocument>(
|
|
1061
|
-
getPlainUrl(fromEntry.url)
|
|
1062
|
-
)
|
|
1063
|
-
const heads = fromEntry.head
|
|
1064
|
-
|
|
1065
|
-
// Update both name and content (if content changed during move)
|
|
1066
|
-
changeWithOptionalHeads(handle, heads, (doc: FileDocument) => {
|
|
1067
|
-
doc.name = toFileName
|
|
1068
|
-
|
|
1069
|
-
// If new content is provided, update it (handles move + modification case)
|
|
1070
|
-
if (move.newContent !== undefined) {
|
|
1071
|
-
if (typeof move.newContent === "string") {
|
|
1072
|
-
updateTextContent(doc, ["content"], move.newContent)
|
|
1073
|
-
} else {
|
|
1074
|
-
doc.content = move.newContent
|
|
1075
|
-
}
|
|
1076
|
-
}
|
|
1077
|
-
})
|
|
1078
|
-
|
|
1079
|
-
entryUrl = this.getEntryUrl(handle, move.toPath)
|
|
1080
|
-
finalHeads = handle.heads()
|
|
1081
|
-
|
|
1082
|
-
// Track file handle for network sync
|
|
1083
|
-
this.handlesByPath.set(move.toPath, handle)
|
|
1084
|
-
}
|
|
1085
|
-
|
|
1086
|
-
// 4) Add file entry to destination directory
|
|
1087
|
-
await this.addFileToDirectory(snapshot, move.toPath, entryUrl)
|
|
1088
|
-
|
|
1089
|
-
// 5) Update snapshot entries
|
|
1090
|
-
this.snapshotManager.removeFileEntry(snapshot, move.fromPath)
|
|
1091
|
-
this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
|
|
1092
|
-
...fromEntry,
|
|
1093
|
-
path: joinAndNormalizePath(this.rootPath, move.toPath),
|
|
1094
|
-
url: entryUrl,
|
|
1095
|
-
head: finalHeads,
|
|
1096
|
-
...(this.isArtifactPath(move.toPath) && move.newContent != null
|
|
1097
|
-
? {contentHash: contentHash(move.newContent)}
|
|
1098
|
-
: {}),
|
|
1099
|
-
})
|
|
1100
|
-
} catch (e) {
|
|
1101
|
-
// Failed to update file name - file may have been deleted
|
|
1102
|
-
out.taskLine(
|
|
1103
|
-
`Warning: Failed to rename ${move.fromPath} to ${move.toPath}`,
|
|
1104
|
-
true
|
|
1105
|
-
)
|
|
1106
|
-
}
|
|
1107
|
-
}
|
|
1108
|
-
|
|
1109
|
-
/**
|
|
1110
|
-
* Create new remote file document
|
|
1111
|
-
*/
|
|
1112
|
-
private async createRemoteFile(
|
|
1113
|
-
change: DetectedChange
|
|
1114
|
-
): Promise<DocHandle<FileDocument> | null> {
|
|
1115
|
-
if (change.localContent === null) return null
|
|
1116
|
-
|
|
1117
|
-
const isText = this.isTextContent(change.localContent)
|
|
1118
|
-
const isArtifact = this.isArtifactPath(change.path)
|
|
1119
|
-
|
|
1120
|
-
// For artifact files, store text as RawString (immutable snapshot).
|
|
1121
|
-
// For regular files, store as collaborative text (empty string + splice).
|
|
1122
|
-
const fileDoc: FileDocument = {
|
|
1123
|
-
"@patchwork": {type: "file"},
|
|
1124
|
-
name: change.path.split("/").pop() || "",
|
|
1125
|
-
extension: getFileExtension(change.path),
|
|
1126
|
-
mimeType: getEnhancedMimeType(change.path),
|
|
1127
|
-
content:
|
|
1128
|
-
isText && isArtifact
|
|
1129
|
-
? new A.RawString(change.localContent as string) as unknown as string
|
|
1130
|
-
: isText
|
|
1131
|
-
? ""
|
|
1132
|
-
: change.localContent,
|
|
1133
|
-
metadata: {
|
|
1134
|
-
permissions: 0o644,
|
|
1135
|
-
},
|
|
1136
|
-
}
|
|
1137
|
-
|
|
1138
|
-
const handle = this.repo.create(fileDoc)
|
|
1139
|
-
|
|
1140
|
-
// For non-artifact text files, splice in the content so it's stored as collaborative text
|
|
1141
|
-
if (isText && !isArtifact && typeof change.localContent === "string") {
|
|
1142
|
-
handle.change((doc: FileDocument) => {
|
|
1143
|
-
updateTextContent(doc, ["content"], change.localContent as string)
|
|
1144
|
-
})
|
|
1145
|
-
}
|
|
1146
|
-
|
|
1147
|
-
// Always track newly created files for network sync
|
|
1148
|
-
// (they always represent a change that needs to sync)
|
|
1149
|
-
this.handlesByPath.set(change.path, handle)
|
|
1150
|
-
|
|
1151
|
-
return handle
|
|
1152
|
-
}
|
|
1153
|
-
|
|
1154
|
-
/**
|
|
1155
|
-
* Update existing remote file document
|
|
1156
|
-
*/
|
|
1157
|
-
private async updateRemoteFile(
|
|
1158
|
-
url: AutomergeUrl,
|
|
1159
|
-
content: string | Uint8Array,
|
|
1160
|
-
snapshot: SyncSnapshot,
|
|
1161
|
-
filePath: string
|
|
1162
|
-
): Promise<void> {
|
|
1163
|
-
// Use plain URL for mutable handle
|
|
1164
|
-
const handle = await this.repo.find<FileDocument>(getPlainUrl(url))
|
|
1165
|
-
|
|
1166
|
-
// Check if content actually changed before tracking for sync
|
|
1167
|
-
const doc = await handle.doc()
|
|
1168
|
-
const rawContent = doc?.content
|
|
1169
|
-
|
|
1170
|
-
// For artifact paths, always replace with a new document containing RawString.
|
|
1171
|
-
// For non-artifact paths with immutable strings, replace with mutable text.
|
|
1172
|
-
// In both cases we create a new document and update the snapshot URL.
|
|
1173
|
-
const isArtifact = this.isArtifactPath(filePath)
|
|
1174
|
-
if (
|
|
1175
|
-
isArtifact ||
|
|
1176
|
-
!doc ||
|
|
1177
|
-
(rawContent != null && A.isImmutableString(rawContent))
|
|
1178
|
-
) {
|
|
1179
|
-
if (!isArtifact) {
|
|
1180
|
-
out.taskLine(
|
|
1181
|
-
`Replacing ${!doc ? 'unavailable' : 'immutable string'} document for ${filePath}`,
|
|
1182
|
-
true
|
|
1183
|
-
)
|
|
1184
|
-
}
|
|
1185
|
-
const fakeChange: DetectedChange = {
|
|
1186
|
-
path: filePath,
|
|
1187
|
-
changeType: ChangeType.LOCAL_ONLY,
|
|
1188
|
-
fileType: this.isTextContent(content)
|
|
1189
|
-
? FileType.TEXT
|
|
1190
|
-
: FileType.BINARY,
|
|
1191
|
-
localContent: content,
|
|
1192
|
-
remoteContent: null,
|
|
1193
|
-
}
|
|
1194
|
-
const newHandle = await this.createRemoteFile(fakeChange)
|
|
1195
|
-
if (newHandle) {
|
|
1196
|
-
const entryUrl = this.getEntryUrl(newHandle, filePath)
|
|
1197
|
-
this.snapshotManager.updateFileEntry(snapshot, filePath, {
|
|
1198
|
-
path: joinAndNormalizePath(this.rootPath, filePath),
|
|
1199
|
-
url: entryUrl,
|
|
1200
|
-
head: newHandle.heads(),
|
|
1201
|
-
extension: getFileExtension(filePath),
|
|
1202
|
-
mimeType: getEnhancedMimeType(filePath),
|
|
1203
|
-
...(this.isArtifactPath(filePath)
|
|
1204
|
-
? {contentHash: contentHash(content)}
|
|
1205
|
-
: {}),
|
|
1206
|
-
})
|
|
1207
|
-
}
|
|
1208
|
-
return
|
|
1209
|
-
}
|
|
1210
|
-
|
|
1211
|
-
const currentContent = readDocContent(rawContent)
|
|
1212
|
-
const contentChanged = !isContentEqual(content, currentContent)
|
|
1213
|
-
|
|
1214
|
-
// Update snapshot heads even when content is identical
|
|
1215
|
-
const snapshotEntry = snapshot.files.get(filePath)
|
|
1216
|
-
if (snapshotEntry) {
|
|
1217
|
-
// Update snapshot with current document heads
|
|
1218
|
-
snapshot.files.set(filePath, {
|
|
1219
|
-
...snapshotEntry,
|
|
1220
|
-
head: handle.heads(),
|
|
1221
|
-
})
|
|
1222
|
-
}
|
|
1223
|
-
|
|
1224
|
-
if (!contentChanged) {
|
|
1225
|
-
// Content is identical, but we've updated the snapshot heads above
|
|
1226
|
-
// This prevents fresh change detection from seeing stale heads
|
|
1227
|
-
return
|
|
1228
|
-
}
|
|
1229
|
-
|
|
1230
|
-
const heads = snapshotEntry?.head
|
|
1231
|
-
|
|
1232
|
-
if (!heads) {
|
|
1233
|
-
throw new Error(`No heads found for ${url}`)
|
|
1234
|
-
}
|
|
1235
|
-
|
|
1236
|
-
handle.changeAt(heads, (doc: FileDocument) => {
|
|
1237
|
-
if (typeof content === "string") {
|
|
1238
|
-
updateTextContent(doc, ["content"], content)
|
|
1239
|
-
} else {
|
|
1240
|
-
doc.content = content
|
|
1241
|
-
}
|
|
1242
|
-
})
|
|
1243
|
-
|
|
1244
|
-
// Update snapshot with new heads after content change
|
|
1245
|
-
if (snapshotEntry) {
|
|
1246
|
-
snapshot.files.set(filePath, {
|
|
1247
|
-
...snapshotEntry,
|
|
1248
|
-
head: handle.heads(),
|
|
1249
|
-
})
|
|
1250
|
-
}
|
|
1251
|
-
|
|
1252
|
-
// Only track files that actually changed content
|
|
1253
|
-
this.handlesByPath.set(filePath, handle)
|
|
1254
|
-
}
|
|
1255
|
-
|
|
1256
|
-
/**
|
|
1257
|
-
* Delete remote file document
|
|
1258
|
-
*/
|
|
1259
|
-
private async deleteRemoteFile(
|
|
1260
|
-
_url: AutomergeUrl,
|
|
1261
|
-
_snapshot?: SyncSnapshot,
|
|
1262
|
-
_filePath?: string
|
|
1263
|
-
): Promise<void> {
|
|
1264
|
-
// In Automerge, we don't actually delete documents.
|
|
1265
|
-
// The file entry is removed from its parent directory, making the
|
|
1266
|
-
// document orphaned. Clearing content via splice is expensive for
|
|
1267
|
-
// large text files (every character is a CRDT op), so we skip it.
|
|
1268
|
-
}
|
|
1269
|
-
|
|
1270
|
-
/**
|
|
1271
|
-
* Add file entry to appropriate directory document (maintains hierarchy)
|
|
1272
|
-
*/
|
|
1273
|
-
private async addFileToDirectory(
|
|
1274
|
-
snapshot: SyncSnapshot,
|
|
1275
|
-
filePath: string,
|
|
1276
|
-
fileUrl: AutomergeUrl
|
|
1277
|
-
): Promise<void> {
|
|
1278
|
-
if (!snapshot.rootDirectoryUrl) return
|
|
1279
|
-
|
|
1280
|
-
const pathParts = filePath.split("/")
|
|
1281
|
-
const fileName = pathParts.pop() || ""
|
|
1282
|
-
const directoryPath = pathParts.join("/")
|
|
1283
|
-
|
|
1284
|
-
// Get or create the parent directory document
|
|
1285
|
-
const parentDirUrl = await this.ensureDirectoryDocument(
|
|
1286
|
-
snapshot,
|
|
1287
|
-
directoryPath
|
|
1288
|
-
)
|
|
1289
|
-
|
|
1290
|
-
// Use plain URL for mutable handle
|
|
1291
|
-
const dirHandle = await this.repo.find<DirectoryDocument>(
|
|
1292
|
-
getPlainUrl(parentDirUrl)
|
|
1293
|
-
)
|
|
1294
|
-
|
|
1295
|
-
let didChange = false
|
|
1296
|
-
const snapshotEntry = snapshot.directories.get(directoryPath)
|
|
1297
|
-
const heads = snapshotEntry?.head
|
|
1298
|
-
changeWithOptionalHeads(dirHandle, heads, (doc: DirectoryDocument) => {
|
|
1299
|
-
const existingIndex = doc.docs.findIndex(
|
|
1300
|
-
entry => entry.name === fileName && entry.type === "file"
|
|
1301
|
-
)
|
|
1302
|
-
if (existingIndex === -1) {
|
|
1303
|
-
doc.docs.push({
|
|
1304
|
-
name: fileName,
|
|
1305
|
-
type: "file",
|
|
1306
|
-
url: fileUrl,
|
|
1307
|
-
})
|
|
1308
|
-
didChange = true
|
|
1309
|
-
}
|
|
1310
|
-
})
|
|
1311
|
-
// Always track the directory (even if unchanged) for proper leaf-first sync ordering
|
|
1312
|
-
this.handlesByPath.set(directoryPath, dirHandle)
|
|
1313
|
-
|
|
1314
|
-
if (didChange && snapshotEntry) {
|
|
1315
|
-
snapshotEntry.head = dirHandle.heads()
|
|
1316
|
-
}
|
|
1317
|
-
}
|
|
1318
|
-
|
|
1319
|
-
/**
|
|
1320
|
-
* Ensure directory document exists for the given path, creating hierarchy as needed
|
|
1321
|
-
* First checks for existing shared directories before creating new ones
|
|
1322
|
-
*/
|
|
1323
|
-
private async ensureDirectoryDocument(
|
|
1324
|
-
snapshot: SyncSnapshot,
|
|
1325
|
-
directoryPath: string
|
|
1326
|
-
): Promise<AutomergeUrl> {
|
|
1327
|
-
// Root directory case
|
|
1328
|
-
if (!directoryPath || directoryPath === "") {
|
|
1329
|
-
return snapshot.rootDirectoryUrl!
|
|
1330
|
-
}
|
|
1331
|
-
|
|
1332
|
-
// Check if we already have this directory in snapshot
|
|
1333
|
-
const existingDir = snapshot.directories.get(directoryPath)
|
|
1334
|
-
if (existingDir) {
|
|
1335
|
-
return existingDir.url
|
|
1336
|
-
}
|
|
1337
|
-
|
|
1338
|
-
// Split path into parent and current directory name
|
|
1339
|
-
const pathParts = directoryPath.split("/")
|
|
1340
|
-
const currentDirName = pathParts.pop() || ""
|
|
1341
|
-
const parentPath = pathParts.join("/")
|
|
1342
|
-
|
|
1343
|
-
// Ensure parent directory exists first (recursive)
|
|
1344
|
-
const parentDirUrl = await this.ensureDirectoryDocument(
|
|
1345
|
-
snapshot,
|
|
1346
|
-
parentPath
|
|
1347
|
-
)
|
|
1348
|
-
|
|
1349
|
-
// DISCOVERY: Check if directory already exists in parent on server
|
|
1350
|
-
try {
|
|
1351
|
-
const parentHandle = await this.repo.find<DirectoryDocument>(parentDirUrl)
|
|
1352
|
-
const parentDoc = await parentHandle.doc()
|
|
1353
|
-
|
|
1354
|
-
if (parentDoc) {
|
|
1355
|
-
const existingDirEntry = parentDoc.docs.find(
|
|
1356
|
-
(entry: {name: string; type: string; url: AutomergeUrl}) =>
|
|
1357
|
-
entry.name === currentDirName && entry.type === "folder"
|
|
1358
|
-
)
|
|
1359
|
-
|
|
1360
|
-
if (existingDirEntry) {
|
|
1361
|
-
// Resolve the actual directory handle and use its current heads
|
|
1362
|
-
// Directory entries in parent docs may not carry valid heads
|
|
1363
|
-
try {
|
|
1364
|
-
const childDirHandle = await this.repo.find<DirectoryDocument>(
|
|
1365
|
-
existingDirEntry.url
|
|
1366
|
-
)
|
|
1367
|
-
|
|
1368
|
-
// Track discovered directory for sync
|
|
1369
|
-
this.handlesByPath.set(directoryPath, childDirHandle)
|
|
1370
|
-
|
|
1371
|
-
// Get appropriate URL for directory entry
|
|
1372
|
-
const entryUrl = this.getEntryUrl(childDirHandle, directoryPath)
|
|
1373
|
-
|
|
1374
|
-
// Update snapshot with discovered directory
|
|
1375
|
-
this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
|
|
1376
|
-
path: joinAndNormalizePath(this.rootPath, directoryPath),
|
|
1377
|
-
url: entryUrl,
|
|
1378
|
-
head: childDirHandle.heads(),
|
|
1379
|
-
entries: [],
|
|
1380
|
-
})
|
|
1381
|
-
|
|
1382
|
-
return entryUrl
|
|
1383
|
-
} catch (resolveErr) {
|
|
1384
|
-
// Failed to resolve directory - fall through to create a fresh directory document
|
|
1385
|
-
}
|
|
1386
|
-
}
|
|
1387
|
-
}
|
|
1388
|
-
} catch (error) {
|
|
1389
|
-
// Failed to check for existing directory - will create new one
|
|
1390
|
-
}
|
|
1391
|
-
|
|
1392
|
-
// CREATE: Directory doesn't exist, create new one
|
|
1393
|
-
const dirDoc: DirectoryDocument = {
|
|
1394
|
-
"@patchwork": {type: "folder"},
|
|
1395
|
-
name: currentDirName,
|
|
1396
|
-
title: currentDirName,
|
|
1397
|
-
docs: [],
|
|
1398
|
-
}
|
|
1399
|
-
|
|
1400
|
-
const dirHandle = this.repo.create(dirDoc)
|
|
1401
|
-
|
|
1402
|
-
// Get appropriate URL for directory entry
|
|
1403
|
-
const dirEntryUrl = this.getEntryUrl(dirHandle, directoryPath)
|
|
1404
|
-
|
|
1405
|
-
// Add this directory to its parent
|
|
1406
|
-
// Use plain URL for mutable handle
|
|
1407
|
-
const parentHandle = await this.repo.find<DirectoryDocument>(
|
|
1408
|
-
getPlainUrl(parentDirUrl)
|
|
1409
|
-
)
|
|
1410
|
-
|
|
1411
|
-
let didChange = false
|
|
1412
|
-
parentHandle.change((doc: DirectoryDocument) => {
|
|
1413
|
-
// Double-check that entry doesn't exist (race condition protection)
|
|
1414
|
-
const existingIndex = doc.docs.findIndex(
|
|
1415
|
-
(entry: {name: string; type: string; url: AutomergeUrl}) =>
|
|
1416
|
-
entry.name === currentDirName && entry.type === "folder"
|
|
1417
|
-
)
|
|
1418
|
-
if (existingIndex === -1) {
|
|
1419
|
-
doc.docs.push({
|
|
1420
|
-
name: currentDirName,
|
|
1421
|
-
type: "folder",
|
|
1422
|
-
url: dirEntryUrl,
|
|
1423
|
-
})
|
|
1424
|
-
didChange = true
|
|
1425
|
-
}
|
|
1426
|
-
})
|
|
1427
|
-
|
|
1428
|
-
// Track directory handles for sync
|
|
1429
|
-
this.handlesByPath.set(directoryPath, dirHandle)
|
|
1430
|
-
if (didChange) {
|
|
1431
|
-
this.handlesByPath.set(parentPath, parentHandle)
|
|
1432
|
-
|
|
1433
|
-
const parentSnapshotEntry = snapshot.directories.get(parentPath)
|
|
1434
|
-
if (parentSnapshotEntry) {
|
|
1435
|
-
parentSnapshotEntry.head = parentHandle.heads()
|
|
1436
|
-
}
|
|
1437
|
-
}
|
|
1438
|
-
|
|
1439
|
-
// Update snapshot with new directory
|
|
1440
|
-
this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
|
|
1441
|
-
path: joinAndNormalizePath(this.rootPath, directoryPath),
|
|
1442
|
-
url: dirEntryUrl,
|
|
1443
|
-
head: dirHandle.heads(),
|
|
1444
|
-
entries: [],
|
|
1445
|
-
})
|
|
1446
|
-
|
|
1447
|
-
return dirEntryUrl
|
|
1448
|
-
}
|
|
1449
|
-
|
|
1450
|
-
/**
|
|
1451
|
-
* Remove file entry from directory document
|
|
1452
|
-
*/
|
|
1453
|
-
private async removeFileFromDirectory(
|
|
1454
|
-
snapshot: SyncSnapshot,
|
|
1455
|
-
filePath: string
|
|
1456
|
-
): Promise<void> {
|
|
1457
|
-
if (!snapshot.rootDirectoryUrl) return
|
|
1458
|
-
|
|
1459
|
-
const pathParts = filePath.split("/")
|
|
1460
|
-
const fileName = pathParts.pop() || ""
|
|
1461
|
-
const directoryPath = pathParts.join("/")
|
|
1462
|
-
|
|
1463
|
-
// Get the parent directory URL
|
|
1464
|
-
let parentDirUrl: AutomergeUrl
|
|
1465
|
-
if (!directoryPath || directoryPath === "") {
|
|
1466
|
-
parentDirUrl = snapshot.rootDirectoryUrl
|
|
1467
|
-
} else {
|
|
1468
|
-
const existingDir = snapshot.directories.get(directoryPath)
|
|
1469
|
-
if (!existingDir) {
|
|
1470
|
-
// Directory not found - file may already be removed
|
|
1471
|
-
return
|
|
1472
|
-
}
|
|
1473
|
-
parentDirUrl = existingDir.url
|
|
1474
|
-
}
|
|
1475
|
-
|
|
1476
|
-
try {
|
|
1477
|
-
// Use plain URL for mutable handle
|
|
1478
|
-
const dirHandle = await this.repo.find<DirectoryDocument>(
|
|
1479
|
-
getPlainUrl(parentDirUrl)
|
|
1480
|
-
)
|
|
1481
|
-
|
|
1482
|
-
// Track this handle for network sync waiting
|
|
1483
|
-
this.handlesByPath.set(directoryPath, dirHandle)
|
|
1484
|
-
const snapshotEntry = snapshot.directories.get(directoryPath)
|
|
1485
|
-
const heads = snapshotEntry?.head
|
|
1486
|
-
let didChange = false
|
|
1487
|
-
|
|
1488
|
-
changeWithOptionalHeads(dirHandle, heads, (doc: DirectoryDocument) => {
|
|
1489
|
-
const indexToRemove = doc.docs.findIndex(
|
|
1490
|
-
entry => entry.name === fileName && entry.type === "file"
|
|
1491
|
-
)
|
|
1492
|
-
if (indexToRemove !== -1) {
|
|
1493
|
-
doc.docs.splice(indexToRemove, 1)
|
|
1494
|
-
didChange = true
|
|
1495
|
-
out.taskLine(
|
|
1496
|
-
`Removed ${fileName} from ${
|
|
1497
|
-
formatRelativePath(directoryPath) || "root"
|
|
1498
|
-
}`
|
|
1499
|
-
)
|
|
1500
|
-
}
|
|
1501
|
-
})
|
|
1502
|
-
|
|
1503
|
-
if (didChange && snapshotEntry) {
|
|
1504
|
-
snapshotEntry.head = dirHandle.heads()
|
|
1505
|
-
}
|
|
1506
|
-
} catch (error) {
|
|
1507
|
-
throw error
|
|
1508
|
-
}
|
|
1509
|
-
}
|
|
1510
|
-
|
|
1511
|
-
/**
|
|
1512
|
-
* Batch-update a directory document in a single change: add new file entries,
|
|
1513
|
-
* update URLs for modified files, remove deleted entries, and update
|
|
1514
|
-
* subdirectory URLs. This replaces the separate per-file directory mutations
|
|
1515
|
-
* and the post-hoc URL update pass.
|
|
1516
|
-
*/
|
|
1517
|
-
private async batchUpdateDirectory(
|
|
1518
|
-
snapshot: SyncSnapshot,
|
|
1519
|
-
dirPath: string,
|
|
1520
|
-
newEntries: {name: string; url: AutomergeUrl}[],
|
|
1521
|
-
updatedEntries: {name: string; url: AutomergeUrl}[],
|
|
1522
|
-
deletedNames: string[],
|
|
1523
|
-
subdirUpdates: {name: string; url: AutomergeUrl}[]
|
|
1524
|
-
): Promise<void> {
|
|
1525
|
-
let dirUrl: AutomergeUrl
|
|
1526
|
-
if (!dirPath || dirPath === "") {
|
|
1527
|
-
dirUrl = snapshot.rootDirectoryUrl!
|
|
1528
|
-
} else {
|
|
1529
|
-
const dirEntry = snapshot.directories.get(dirPath)
|
|
1530
|
-
if (!dirEntry) return
|
|
1531
|
-
dirUrl = dirEntry.url
|
|
1532
|
-
}
|
|
1533
|
-
|
|
1534
|
-
const dirHandle = await this.repo.find<DirectoryDocument>(
|
|
1535
|
-
getPlainUrl(dirUrl)
|
|
1536
|
-
)
|
|
1537
|
-
|
|
1538
|
-
const snapshotEntry = snapshot.directories.get(dirPath)
|
|
1539
|
-
const heads = snapshotEntry?.head
|
|
1540
|
-
|
|
1541
|
-
// Determine directory name
|
|
1542
|
-
const dirName = dirPath ? dirPath.split("/").pop() || "" : path.basename(this.rootPath)
|
|
1543
|
-
|
|
1544
|
-
changeWithOptionalHeads(dirHandle, heads, (doc: DirectoryDocument) => {
|
|
1545
|
-
// Ensure name and title fields are set
|
|
1546
|
-
if (!doc.name) doc.name = dirName
|
|
1547
|
-
if (!doc.title) doc.title = dirName
|
|
1548
|
-
|
|
1549
|
-
// Remove deleted file entries
|
|
1550
|
-
for (const name of deletedNames) {
|
|
1551
|
-
const idx = doc.docs.findIndex(
|
|
1552
|
-
entry => entry.name === name && entry.type === "file"
|
|
1553
|
-
)
|
|
1554
|
-
if (idx !== -1) {
|
|
1555
|
-
doc.docs.splice(idx, 1)
|
|
1556
|
-
out.taskLine(
|
|
1557
|
-
`Removed ${name} from ${
|
|
1558
|
-
formatRelativePath(dirPath) || "root"
|
|
1559
|
-
}`
|
|
1560
|
-
)
|
|
1561
|
-
}
|
|
1562
|
-
}
|
|
1563
|
-
|
|
1564
|
-
// Update URLs for modified files
|
|
1565
|
-
for (const {name, url} of updatedEntries) {
|
|
1566
|
-
const idx = doc.docs.findIndex(
|
|
1567
|
-
entry => entry.name === name && entry.type === "file"
|
|
1568
|
-
)
|
|
1569
|
-
if (idx !== -1) {
|
|
1570
|
-
doc.docs[idx].url = url
|
|
1571
|
-
}
|
|
1572
|
-
}
|
|
1573
|
-
|
|
1574
|
-
// Add new file entries
|
|
1575
|
-
for (const {name, url} of newEntries) {
|
|
1576
|
-
const existing = doc.docs.findIndex(
|
|
1577
|
-
entry => entry.name === name && entry.type === "file"
|
|
1578
|
-
)
|
|
1579
|
-
if (existing === -1) {
|
|
1580
|
-
doc.docs.push({name, type: "file", url})
|
|
1581
|
-
} else {
|
|
1582
|
-
// Entry already exists (e.g. from immutable string replacement)
|
|
1583
|
-
doc.docs[existing].url = url
|
|
1584
|
-
}
|
|
1585
|
-
}
|
|
1586
|
-
|
|
1587
|
-
// Update subdirectory URLs with current heads
|
|
1588
|
-
for (const {name, url} of subdirUpdates) {
|
|
1589
|
-
const idx = doc.docs.findIndex(
|
|
1590
|
-
entry => entry.name === name && entry.type === "folder"
|
|
1591
|
-
)
|
|
1592
|
-
if (idx !== -1) {
|
|
1593
|
-
doc.docs[idx].url = url
|
|
1594
|
-
}
|
|
1595
|
-
}
|
|
1596
|
-
})
|
|
1597
|
-
|
|
1598
|
-
// Track directory handle and update snapshot heads
|
|
1599
|
-
this.handlesByPath.set(dirPath, dirHandle)
|
|
1600
|
-
if (snapshotEntry) {
|
|
1601
|
-
snapshotEntry.head = dirHandle.heads()
|
|
1602
|
-
}
|
|
1603
|
-
}
|
|
1604
|
-
|
|
1605
|
-
/**
|
|
1606
|
-
* Sort changes by dependency order
|
|
1607
|
-
*/
|
|
1608
|
-
private sortChangesByDependency(changes: DetectedChange[]): DetectedChange[] {
|
|
1609
|
-
// Sort by path depth (shallower paths first)
|
|
1610
|
-
return changes.sort((a, b) => {
|
|
1611
|
-
const depthA = a.path.split("/").length
|
|
1612
|
-
const depthB = b.path.split("/").length
|
|
1613
|
-
return depthA - depthB
|
|
1614
|
-
})
|
|
1615
|
-
}
|
|
1616
|
-
|
|
1617
|
-
/**
|
|
1618
|
-
* Get sync status
|
|
1619
|
-
*/
|
|
1620
|
-
async getStatus(): Promise<{
|
|
1621
|
-
snapshot: SyncSnapshot | null
|
|
1622
|
-
hasChanges: boolean
|
|
1623
|
-
changeCount: number
|
|
1624
|
-
lastSync: Date | null
|
|
1625
|
-
}> {
|
|
1626
|
-
const snapshot = await this.snapshotManager.load()
|
|
1627
|
-
|
|
1628
|
-
if (!snapshot) {
|
|
1629
|
-
return {
|
|
1630
|
-
snapshot: null,
|
|
1631
|
-
hasChanges: false,
|
|
1632
|
-
changeCount: 0,
|
|
1633
|
-
lastSync: null,
|
|
1634
|
-
}
|
|
1635
|
-
}
|
|
1636
|
-
|
|
1637
|
-
const changes = await this.changeDetector.detectChanges(snapshot)
|
|
1638
|
-
|
|
1639
|
-
return {
|
|
1640
|
-
snapshot,
|
|
1641
|
-
hasChanges: changes.length > 0,
|
|
1642
|
-
changeCount: changes.length,
|
|
1643
|
-
lastSync: new Date(snapshot.timestamp),
|
|
1644
|
-
}
|
|
1645
|
-
}
|
|
1646
|
-
|
|
1647
|
-
/**
|
|
1648
|
-
* Preview changes without applying them
|
|
1649
|
-
*/
|
|
1650
|
-
async previewChanges(): Promise<{
|
|
1651
|
-
changes: DetectedChange[]
|
|
1652
|
-
moves: MoveCandidate[]
|
|
1653
|
-
summary: string
|
|
1654
|
-
}> {
|
|
1655
|
-
const snapshot = await this.snapshotManager.load()
|
|
1656
|
-
if (!snapshot) {
|
|
1657
|
-
return {
|
|
1658
|
-
changes: [],
|
|
1659
|
-
moves: [],
|
|
1660
|
-
summary: "No snapshot found - run init first",
|
|
1661
|
-
}
|
|
1662
|
-
}
|
|
1663
|
-
|
|
1664
|
-
const changes = await this.changeDetector.detectChanges(snapshot)
|
|
1665
|
-
const {moves} = await this.moveDetector.detectMoves(changes, snapshot)
|
|
1666
|
-
|
|
1667
|
-
const summary = this.generateChangeSummary(changes, moves)
|
|
1668
|
-
|
|
1669
|
-
return {changes, moves, summary}
|
|
1670
|
-
}
|
|
1671
|
-
|
|
1672
|
-
/**
|
|
1673
|
-
* Generate human-readable summary of changes
|
|
1674
|
-
*/
|
|
1675
|
-
private generateChangeSummary(
|
|
1676
|
-
changes: DetectedChange[],
|
|
1677
|
-
moves: MoveCandidate[]
|
|
1678
|
-
): string {
|
|
1679
|
-
const localChanges = changes.filter(
|
|
1680
|
-
c =>
|
|
1681
|
-
c.changeType === ChangeType.LOCAL_ONLY ||
|
|
1682
|
-
c.changeType === ChangeType.BOTH_CHANGED
|
|
1683
|
-
).length
|
|
1684
|
-
|
|
1685
|
-
const remoteChanges = changes.filter(
|
|
1686
|
-
c =>
|
|
1687
|
-
c.changeType === ChangeType.REMOTE_ONLY ||
|
|
1688
|
-
c.changeType === ChangeType.BOTH_CHANGED
|
|
1689
|
-
).length
|
|
1690
|
-
|
|
1691
|
-
const conflicts = changes.filter(
|
|
1692
|
-
c => c.changeType === ChangeType.BOTH_CHANGED
|
|
1693
|
-
).length
|
|
1694
|
-
|
|
1695
|
-
const parts: string[] = []
|
|
1696
|
-
|
|
1697
|
-
if (localChanges > 0) {
|
|
1698
|
-
parts.push(`${localChanges} local change${localChanges > 1 ? "s" : ""}`)
|
|
1699
|
-
}
|
|
1700
|
-
|
|
1701
|
-
if (remoteChanges > 0) {
|
|
1702
|
-
parts.push(
|
|
1703
|
-
`${remoteChanges} remote change${remoteChanges > 1 ? "s" : ""}`
|
|
1704
|
-
)
|
|
1705
|
-
}
|
|
1706
|
-
|
|
1707
|
-
if (moves.length > 0) {
|
|
1708
|
-
parts.push(`${moves.length} potential move${moves.length > 1 ? "s" : ""}`)
|
|
1709
|
-
}
|
|
1710
|
-
|
|
1711
|
-
if (conflicts > 0) {
|
|
1712
|
-
parts.push(`${conflicts} conflict${conflicts > 1 ? "s" : ""}`)
|
|
1713
|
-
}
|
|
1714
|
-
|
|
1715
|
-
if (parts.length === 0) {
|
|
1716
|
-
return "No changes detected"
|
|
1717
|
-
}
|
|
1718
|
-
|
|
1719
|
-
return parts.join(", ")
|
|
1720
|
-
}
|
|
1721
|
-
|
|
1722
|
-
/**
|
|
1723
|
-
* Update the lastSyncAt timestamp on the root directory document
|
|
1724
|
-
*/
|
|
1725
|
-
private async touchRootDirectory(snapshot: SyncSnapshot): Promise<void> {
|
|
1726
|
-
if (!snapshot.rootDirectoryUrl) {
|
|
1727
|
-
return
|
|
1728
|
-
}
|
|
1729
|
-
|
|
1730
|
-
try {
|
|
1731
|
-
const rootHandle = await this.repo.find<DirectoryDocument>(
|
|
1732
|
-
snapshot.rootDirectoryUrl
|
|
1733
|
-
)
|
|
1734
|
-
|
|
1735
|
-
const snapshotEntry = snapshot.directories.get("")
|
|
1736
|
-
const heads = snapshotEntry?.head
|
|
1737
|
-
|
|
1738
|
-
const timestamp = Date.now()
|
|
1739
|
-
|
|
1740
|
-
const version = require("../../package.json").version
|
|
1741
|
-
|
|
1742
|
-
changeWithOptionalHeads(rootHandle, heads, (doc: DirectoryDocument) => {
|
|
1743
|
-
doc.lastSyncAt = timestamp
|
|
1744
|
-
doc.with = `pushwork@${version}`
|
|
1745
|
-
})
|
|
1746
|
-
|
|
1747
|
-
// Track root directory for network sync
|
|
1748
|
-
this.handlesByPath.set("", rootHandle)
|
|
1749
|
-
|
|
1750
|
-
if (snapshotEntry) {
|
|
1751
|
-
snapshotEntry.head = rootHandle.heads()
|
|
1752
|
-
}
|
|
1753
|
-
} catch (error) {
|
|
1754
|
-
// Failed to update root directory timestamp
|
|
1755
|
-
}
|
|
1756
|
-
}
|
|
1757
|
-
|
|
1758
|
-
}
|