gitx.do 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -353
- package/dist/do/logger.d.ts +50 -0
- package/dist/do/logger.d.ts.map +1 -0
- package/dist/do/logger.js +122 -0
- package/dist/do/logger.js.map +1 -0
- package/dist/{durable-object → do}/schema.d.ts +3 -3
- package/dist/do/schema.d.ts.map +1 -0
- package/dist/{durable-object → do}/schema.js +4 -3
- package/dist/do/schema.js.map +1 -0
- package/dist/do/types.d.ts +267 -0
- package/dist/do/types.d.ts.map +1 -0
- package/dist/do/types.js +62 -0
- package/dist/do/types.js.map +1 -0
- package/dist/index.d.ts +15 -469
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +39 -481
- package/dist/index.js.map +1 -1
- package/dist/mcp/auth.d.ts +77 -0
- package/dist/mcp/auth.d.ts.map +1 -0
- package/dist/mcp/auth.js +278 -0
- package/dist/mcp/auth.js.map +1 -0
- package/dist/mcp/index.d.ts +13 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/index.js +19 -0
- package/dist/mcp/index.js.map +1 -0
- package/dist/mcp/server.d.ts +200 -0
- package/dist/mcp/server.d.ts.map +1 -0
- package/dist/mcp/server.js +275 -0
- package/dist/mcp/server.js.map +1 -0
- package/dist/mcp/tool-registry.d.ts +47 -0
- package/dist/mcp/tool-registry.d.ts.map +1 -0
- package/dist/mcp/tool-registry.js +284 -0
- package/dist/mcp/tool-registry.js.map +1 -0
- package/dist/mcp/tools.d.ts +103 -515
- package/dist/mcp/tools.d.ts.map +1 -1
- package/dist/mcp/tools.js +676 -3087
- package/dist/mcp/tools.js.map +1 -1
- package/dist/mcp/types.d.ts +124 -0
- package/dist/mcp/types.d.ts.map +1 -0
- package/dist/mcp/types.js +9 -0
- package/dist/mcp/types.js.map +1 -0
- package/package.json +19 -21
- package/dist/cli/commands/add.d.ts +0 -176
- package/dist/cli/commands/add.d.ts.map +0 -1
- package/dist/cli/commands/add.js +0 -979
- package/dist/cli/commands/add.js.map +0 -1
- package/dist/cli/commands/blame.d.ts +0 -259
- package/dist/cli/commands/blame.d.ts.map +0 -1
- package/dist/cli/commands/blame.js +0 -609
- package/dist/cli/commands/blame.js.map +0 -1
- package/dist/cli/commands/branch.d.ts +0 -249
- package/dist/cli/commands/branch.d.ts.map +0 -1
- package/dist/cli/commands/branch.js +0 -693
- package/dist/cli/commands/branch.js.map +0 -1
- package/dist/cli/commands/checkout.d.ts +0 -73
- package/dist/cli/commands/checkout.d.ts.map +0 -1
- package/dist/cli/commands/checkout.js +0 -725
- package/dist/cli/commands/checkout.js.map +0 -1
- package/dist/cli/commands/commit.d.ts +0 -182
- package/dist/cli/commands/commit.d.ts.map +0 -1
- package/dist/cli/commands/commit.js +0 -457
- package/dist/cli/commands/commit.js.map +0 -1
- package/dist/cli/commands/diff.d.ts +0 -464
- package/dist/cli/commands/diff.d.ts.map +0 -1
- package/dist/cli/commands/diff.js +0 -959
- package/dist/cli/commands/diff.js.map +0 -1
- package/dist/cli/commands/log.d.ts +0 -239
- package/dist/cli/commands/log.d.ts.map +0 -1
- package/dist/cli/commands/log.js +0 -535
- package/dist/cli/commands/log.js.map +0 -1
- package/dist/cli/commands/merge.d.ts +0 -106
- package/dist/cli/commands/merge.d.ts.map +0 -1
- package/dist/cli/commands/merge.js +0 -852
- package/dist/cli/commands/merge.js.map +0 -1
- package/dist/cli/commands/review.d.ts +0 -457
- package/dist/cli/commands/review.d.ts.map +0 -1
- package/dist/cli/commands/review.js +0 -558
- package/dist/cli/commands/review.js.map +0 -1
- package/dist/cli/commands/stash.d.ts +0 -157
- package/dist/cli/commands/stash.d.ts.map +0 -1
- package/dist/cli/commands/stash.js +0 -655
- package/dist/cli/commands/stash.js.map +0 -1
- package/dist/cli/commands/status.d.ts +0 -269
- package/dist/cli/commands/status.d.ts.map +0 -1
- package/dist/cli/commands/status.js +0 -492
- package/dist/cli/commands/status.js.map +0 -1
- package/dist/cli/commands/web.d.ts +0 -199
- package/dist/cli/commands/web.d.ts.map +0 -1
- package/dist/cli/commands/web.js +0 -697
- package/dist/cli/commands/web.js.map +0 -1
- package/dist/cli/fs-adapter.d.ts +0 -656
- package/dist/cli/fs-adapter.d.ts.map +0 -1
- package/dist/cli/fs-adapter.js +0 -1177
- package/dist/cli/fs-adapter.js.map +0 -1
- package/dist/cli/fsx-cli-adapter.d.ts +0 -359
- package/dist/cli/fsx-cli-adapter.d.ts.map +0 -1
- package/dist/cli/fsx-cli-adapter.js +0 -619
- package/dist/cli/fsx-cli-adapter.js.map +0 -1
- package/dist/cli/index.d.ts +0 -387
- package/dist/cli/index.d.ts.map +0 -1
- package/dist/cli/index.js +0 -579
- package/dist/cli/index.js.map +0 -1
- package/dist/cli/ui/components/DiffView.d.ts +0 -12
- package/dist/cli/ui/components/DiffView.d.ts.map +0 -1
- package/dist/cli/ui/components/DiffView.js +0 -11
- package/dist/cli/ui/components/DiffView.js.map +0 -1
- package/dist/cli/ui/components/ErrorDisplay.d.ts +0 -10
- package/dist/cli/ui/components/ErrorDisplay.d.ts.map +0 -1
- package/dist/cli/ui/components/ErrorDisplay.js +0 -11
- package/dist/cli/ui/components/ErrorDisplay.js.map +0 -1
- package/dist/cli/ui/components/FuzzySearch.d.ts +0 -15
- package/dist/cli/ui/components/FuzzySearch.d.ts.map +0 -1
- package/dist/cli/ui/components/FuzzySearch.js +0 -12
- package/dist/cli/ui/components/FuzzySearch.js.map +0 -1
- package/dist/cli/ui/components/LoadingSpinner.d.ts +0 -10
- package/dist/cli/ui/components/LoadingSpinner.d.ts.map +0 -1
- package/dist/cli/ui/components/LoadingSpinner.js +0 -10
- package/dist/cli/ui/components/LoadingSpinner.js.map +0 -1
- package/dist/cli/ui/components/NavigationList.d.ts +0 -14
- package/dist/cli/ui/components/NavigationList.d.ts.map +0 -1
- package/dist/cli/ui/components/NavigationList.js +0 -11
- package/dist/cli/ui/components/NavigationList.js.map +0 -1
- package/dist/cli/ui/components/ScrollableContent.d.ts +0 -13
- package/dist/cli/ui/components/ScrollableContent.d.ts.map +0 -1
- package/dist/cli/ui/components/ScrollableContent.js +0 -11
- package/dist/cli/ui/components/ScrollableContent.js.map +0 -1
- package/dist/cli/ui/components/index.d.ts +0 -7
- package/dist/cli/ui/components/index.d.ts.map +0 -1
- package/dist/cli/ui/components/index.js +0 -9
- package/dist/cli/ui/components/index.js.map +0 -1
- package/dist/cli/ui/terminal-ui.d.ts +0 -85
- package/dist/cli/ui/terminal-ui.d.ts.map +0 -1
- package/dist/cli/ui/terminal-ui.js +0 -121
- package/dist/cli/ui/terminal-ui.js.map +0 -1
- package/dist/do/BashModule.d.ts +0 -871
- package/dist/do/BashModule.d.ts.map +0 -1
- package/dist/do/BashModule.js +0 -1143
- package/dist/do/BashModule.js.map +0 -1
- package/dist/do/FsModule.d.ts +0 -612
- package/dist/do/FsModule.d.ts.map +0 -1
- package/dist/do/FsModule.js +0 -1120
- package/dist/do/FsModule.js.map +0 -1
- package/dist/do/GitModule.d.ts +0 -635
- package/dist/do/GitModule.d.ts.map +0 -1
- package/dist/do/GitModule.js +0 -784
- package/dist/do/GitModule.js.map +0 -1
- package/dist/do/GitRepoDO.d.ts +0 -281
- package/dist/do/GitRepoDO.d.ts.map +0 -1
- package/dist/do/GitRepoDO.js +0 -479
- package/dist/do/GitRepoDO.js.map +0 -1
- package/dist/do/bash-ast.d.ts +0 -246
- package/dist/do/bash-ast.d.ts.map +0 -1
- package/dist/do/bash-ast.js +0 -888
- package/dist/do/bash-ast.js.map +0 -1
- package/dist/do/container-executor.d.ts +0 -491
- package/dist/do/container-executor.d.ts.map +0 -1
- package/dist/do/container-executor.js +0 -731
- package/dist/do/container-executor.js.map +0 -1
- package/dist/do/index.d.ts +0 -53
- package/dist/do/index.d.ts.map +0 -1
- package/dist/do/index.js +0 -91
- package/dist/do/index.js.map +0 -1
- package/dist/do/tiered-storage.d.ts +0 -403
- package/dist/do/tiered-storage.d.ts.map +0 -1
- package/dist/do/tiered-storage.js +0 -689
- package/dist/do/tiered-storage.js.map +0 -1
- package/dist/do/withBash.d.ts +0 -231
- package/dist/do/withBash.d.ts.map +0 -1
- package/dist/do/withBash.js +0 -244
- package/dist/do/withBash.js.map +0 -1
- package/dist/do/withFs.d.ts +0 -237
- package/dist/do/withFs.d.ts.map +0 -1
- package/dist/do/withFs.js +0 -387
- package/dist/do/withFs.js.map +0 -1
- package/dist/do/withGit.d.ts +0 -180
- package/dist/do/withGit.d.ts.map +0 -1
- package/dist/do/withGit.js +0 -271
- package/dist/do/withGit.js.map +0 -1
- package/dist/durable-object/object-store.d.ts +0 -633
- package/dist/durable-object/object-store.d.ts.map +0 -1
- package/dist/durable-object/object-store.js +0 -1164
- package/dist/durable-object/object-store.js.map +0 -1
- package/dist/durable-object/schema.d.ts.map +0 -1
- package/dist/durable-object/schema.js.map +0 -1
- package/dist/durable-object/wal.d.ts +0 -416
- package/dist/durable-object/wal.d.ts.map +0 -1
- package/dist/durable-object/wal.js +0 -445
- package/dist/durable-object/wal.js.map +0 -1
- package/dist/mcp/adapter.d.ts +0 -772
- package/dist/mcp/adapter.d.ts.map +0 -1
- package/dist/mcp/adapter.js +0 -895
- package/dist/mcp/adapter.js.map +0 -1
- package/dist/mcp/sandbox/miniflare-evaluator.d.ts +0 -22
- package/dist/mcp/sandbox/miniflare-evaluator.d.ts.map +0 -1
- package/dist/mcp/sandbox/miniflare-evaluator.js +0 -140
- package/dist/mcp/sandbox/miniflare-evaluator.js.map +0 -1
- package/dist/mcp/sandbox/object-store-proxy.d.ts +0 -32
- package/dist/mcp/sandbox/object-store-proxy.d.ts.map +0 -1
- package/dist/mcp/sandbox/object-store-proxy.js +0 -30
- package/dist/mcp/sandbox/object-store-proxy.js.map +0 -1
- package/dist/mcp/sandbox/template.d.ts +0 -17
- package/dist/mcp/sandbox/template.d.ts.map +0 -1
- package/dist/mcp/sandbox/template.js +0 -71
- package/dist/mcp/sandbox/template.js.map +0 -1
- package/dist/mcp/sandbox.d.ts +0 -764
- package/dist/mcp/sandbox.d.ts.map +0 -1
- package/dist/mcp/sandbox.js +0 -1362
- package/dist/mcp/sandbox.js.map +0 -1
- package/dist/mcp/sdk-adapter.d.ts +0 -835
- package/dist/mcp/sdk-adapter.d.ts.map +0 -1
- package/dist/mcp/sdk-adapter.js +0 -974
- package/dist/mcp/sdk-adapter.js.map +0 -1
- package/dist/mcp/tools/do.d.ts +0 -32
- package/dist/mcp/tools/do.d.ts.map +0 -1
- package/dist/mcp/tools/do.js +0 -117
- package/dist/mcp/tools/do.js.map +0 -1
- package/dist/ops/blame.d.ts +0 -551
- package/dist/ops/blame.d.ts.map +0 -1
- package/dist/ops/blame.js +0 -1037
- package/dist/ops/blame.js.map +0 -1
- package/dist/ops/branch.d.ts +0 -766
- package/dist/ops/branch.d.ts.map +0 -1
- package/dist/ops/branch.js +0 -950
- package/dist/ops/branch.js.map +0 -1
- package/dist/ops/commit-traversal.d.ts +0 -349
- package/dist/ops/commit-traversal.d.ts.map +0 -1
- package/dist/ops/commit-traversal.js +0 -821
- package/dist/ops/commit-traversal.js.map +0 -1
- package/dist/ops/commit.d.ts +0 -555
- package/dist/ops/commit.d.ts.map +0 -1
- package/dist/ops/commit.js +0 -826
- package/dist/ops/commit.js.map +0 -1
- package/dist/ops/merge-base.d.ts +0 -397
- package/dist/ops/merge-base.d.ts.map +0 -1
- package/dist/ops/merge-base.js +0 -691
- package/dist/ops/merge-base.js.map +0 -1
- package/dist/ops/merge.d.ts +0 -855
- package/dist/ops/merge.d.ts.map +0 -1
- package/dist/ops/merge.js +0 -1551
- package/dist/ops/merge.js.map +0 -1
- package/dist/ops/tag.d.ts +0 -247
- package/dist/ops/tag.d.ts.map +0 -1
- package/dist/ops/tag.js +0 -649
- package/dist/ops/tag.js.map +0 -1
- package/dist/ops/tree-builder.d.ts +0 -178
- package/dist/ops/tree-builder.d.ts.map +0 -1
- package/dist/ops/tree-builder.js +0 -271
- package/dist/ops/tree-builder.js.map +0 -1
- package/dist/ops/tree-diff.d.ts +0 -291
- package/dist/ops/tree-diff.d.ts.map +0 -1
- package/dist/ops/tree-diff.js +0 -705
- package/dist/ops/tree-diff.js.map +0 -1
- package/dist/pack/delta.d.ts +0 -248
- package/dist/pack/delta.d.ts.map +0 -1
- package/dist/pack/delta.js +0 -740
- package/dist/pack/delta.js.map +0 -1
- package/dist/pack/format.d.ts +0 -446
- package/dist/pack/format.d.ts.map +0 -1
- package/dist/pack/format.js +0 -572
- package/dist/pack/format.js.map +0 -1
- package/dist/pack/full-generation.d.ts +0 -612
- package/dist/pack/full-generation.d.ts.map +0 -1
- package/dist/pack/full-generation.js +0 -1378
- package/dist/pack/full-generation.js.map +0 -1
- package/dist/pack/generation.d.ts +0 -441
- package/dist/pack/generation.d.ts.map +0 -1
- package/dist/pack/generation.js +0 -707
- package/dist/pack/generation.js.map +0 -1
- package/dist/pack/index.d.ts +0 -502
- package/dist/pack/index.d.ts.map +0 -1
- package/dist/pack/index.js +0 -833
- package/dist/pack/index.js.map +0 -1
- package/dist/refs/branch.d.ts +0 -683
- package/dist/refs/branch.d.ts.map +0 -1
- package/dist/refs/branch.js +0 -881
- package/dist/refs/branch.js.map +0 -1
- package/dist/refs/storage.d.ts +0 -833
- package/dist/refs/storage.d.ts.map +0 -1
- package/dist/refs/storage.js +0 -1023
- package/dist/refs/storage.js.map +0 -1
- package/dist/refs/tag.d.ts +0 -860
- package/dist/refs/tag.d.ts.map +0 -1
- package/dist/refs/tag.js +0 -996
- package/dist/refs/tag.js.map +0 -1
- package/dist/storage/backend.d.ts +0 -425
- package/dist/storage/backend.d.ts.map +0 -1
- package/dist/storage/backend.js +0 -41
- package/dist/storage/backend.js.map +0 -1
- package/dist/storage/fsx-adapter.d.ts +0 -204
- package/dist/storage/fsx-adapter.d.ts.map +0 -1
- package/dist/storage/fsx-adapter.js +0 -518
- package/dist/storage/fsx-adapter.js.map +0 -1
- package/dist/storage/lru-cache.d.ts +0 -691
- package/dist/storage/lru-cache.d.ts.map +0 -1
- package/dist/storage/lru-cache.js +0 -813
- package/dist/storage/lru-cache.js.map +0 -1
- package/dist/storage/object-index.d.ts +0 -585
- package/dist/storage/object-index.d.ts.map +0 -1
- package/dist/storage/object-index.js +0 -532
- package/dist/storage/object-index.js.map +0 -1
- package/dist/storage/r2-pack.d.ts +0 -1257
- package/dist/storage/r2-pack.d.ts.map +0 -1
- package/dist/storage/r2-pack.js +0 -1773
- package/dist/storage/r2-pack.js.map +0 -1
- package/dist/tiered/cdc-pipeline.d.ts +0 -1888
- package/dist/tiered/cdc-pipeline.d.ts.map +0 -1
- package/dist/tiered/cdc-pipeline.js +0 -1880
- package/dist/tiered/cdc-pipeline.js.map +0 -1
- package/dist/tiered/migration.d.ts +0 -1104
- package/dist/tiered/migration.d.ts.map +0 -1
- package/dist/tiered/migration.js +0 -1217
- package/dist/tiered/migration.js.map +0 -1
- package/dist/tiered/parquet-writer.d.ts +0 -1145
- package/dist/tiered/parquet-writer.d.ts.map +0 -1
- package/dist/tiered/parquet-writer.js +0 -1183
- package/dist/tiered/parquet-writer.js.map +0 -1
- package/dist/tiered/read-path.d.ts +0 -835
- package/dist/tiered/read-path.d.ts.map +0 -1
- package/dist/tiered/read-path.js +0 -487
- package/dist/tiered/read-path.js.map +0 -1
- package/dist/types/capability.d.ts +0 -1385
- package/dist/types/capability.d.ts.map +0 -1
- package/dist/types/capability.js +0 -36
- package/dist/types/capability.js.map +0 -1
- package/dist/types/index.d.ts +0 -13
- package/dist/types/index.d.ts.map +0 -1
- package/dist/types/index.js +0 -18
- package/dist/types/index.js.map +0 -1
- package/dist/types/interfaces.d.ts +0 -673
- package/dist/types/interfaces.d.ts.map +0 -1
- package/dist/types/interfaces.js +0 -26
- package/dist/types/interfaces.js.map +0 -1
- package/dist/types/objects.d.ts +0 -692
- package/dist/types/objects.d.ts.map +0 -1
- package/dist/types/objects.js +0 -837
- package/dist/types/objects.js.map +0 -1
- package/dist/types/storage.d.ts +0 -603
- package/dist/types/storage.d.ts.map +0 -1
- package/dist/types/storage.js +0 -191
- package/dist/types/storage.js.map +0 -1
- package/dist/types/worker-loader.d.ts +0 -60
- package/dist/types/worker-loader.d.ts.map +0 -1
- package/dist/types/worker-loader.js +0 -62
- package/dist/types/worker-loader.js.map +0 -1
- package/dist/utils/hash.d.ts +0 -198
- package/dist/utils/hash.d.ts.map +0 -1
- package/dist/utils/hash.js +0 -272
- package/dist/utils/hash.js.map +0 -1
- package/dist/utils/sha1.d.ts +0 -325
- package/dist/utils/sha1.d.ts.map +0 -1
- package/dist/utils/sha1.js +0 -635
- package/dist/utils/sha1.js.map +0 -1
- package/dist/wire/capabilities.d.ts +0 -1044
- package/dist/wire/capabilities.d.ts.map +0 -1
- package/dist/wire/capabilities.js +0 -941
- package/dist/wire/capabilities.js.map +0 -1
- package/dist/wire/path-security.d.ts +0 -157
- package/dist/wire/path-security.d.ts.map +0 -1
- package/dist/wire/path-security.js +0 -307
- package/dist/wire/path-security.js.map +0 -1
- package/dist/wire/pkt-line.d.ts +0 -345
- package/dist/wire/pkt-line.d.ts.map +0 -1
- package/dist/wire/pkt-line.js +0 -381
- package/dist/wire/pkt-line.js.map +0 -1
- package/dist/wire/receive-pack.d.ts +0 -1059
- package/dist/wire/receive-pack.d.ts.map +0 -1
- package/dist/wire/receive-pack.js +0 -1414
- package/dist/wire/receive-pack.js.map +0 -1
- package/dist/wire/smart-http.d.ts +0 -799
- package/dist/wire/smart-http.d.ts.map +0 -1
- package/dist/wire/smart-http.js +0 -945
- package/dist/wire/smart-http.js.map +0 -1
- package/dist/wire/upload-pack.d.ts +0 -727
- package/dist/wire/upload-pack.d.ts.map +0 -1
- package/dist/wire/upload-pack.js +0 -1141
- package/dist/wire/upload-pack.js.map +0 -1
package/dist/ops/merge.js
DELETED
|
@@ -1,1551 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Three-way Merge Implementation for Git
|
|
3
|
-
*
|
|
4
|
-
* This module provides a complete implementation of Git's three-way merge algorithm,
|
|
5
|
-
* enabling branch merging with automatic conflict detection and resolution capabilities.
|
|
6
|
-
*
|
|
7
|
-
* ## Overview
|
|
8
|
-
*
|
|
9
|
-
* The three-way merge algorithm works by:
|
|
10
|
-
* 1. Finding the common ancestor (merge base) of two commits
|
|
11
|
-
* 2. Comparing both branches against this base to identify changes
|
|
12
|
-
* 3. Automatically merging non-conflicting changes
|
|
13
|
-
* 4. Detecting and reporting conflicts for manual resolution
|
|
14
|
-
*
|
|
15
|
-
* ## Supported Features
|
|
16
|
-
*
|
|
17
|
-
* - Fast-forward merges when possible
|
|
18
|
-
* - Three-way content merging for text files
|
|
19
|
-
* - Binary file detection and handling
|
|
20
|
-
* - Multiple conflict types (content, add-add, modify-delete, etc.)
|
|
21
|
-
* - Conflict resolution strategies (ours, theirs, custom)
|
|
22
|
-
* - Merge state persistence for multi-step conflict resolution
|
|
23
|
-
*
|
|
24
|
-
* ## Usage Example
|
|
25
|
-
*
|
|
26
|
-
* ```typescript
|
|
27
|
-
* import { merge, resolveConflict, continueMerge } from './ops/merge'
|
|
28
|
-
*
|
|
29
|
-
* // Perform a merge
|
|
30
|
-
* const result = await merge(storage, currentBranchSha, featureBranchSha, {
|
|
31
|
-
* message: 'Merge feature branch',
|
|
32
|
-
* allowFastForward: true
|
|
33
|
-
* })
|
|
34
|
-
*
|
|
35
|
-
* if (result.status === 'conflicted') {
|
|
36
|
-
* // Resolve conflicts
|
|
37
|
-
* for (const conflict of result.conflicts) {
|
|
38
|
-
* await resolveConflict(storage, conflict.path, { resolution: 'ours' })
|
|
39
|
-
* }
|
|
40
|
-
* // Complete the merge
|
|
41
|
-
* await continueMerge(storage)
|
|
42
|
-
* }
|
|
43
|
-
* ```
|
|
44
|
-
*
|
|
45
|
-
* @module ops/merge
|
|
46
|
-
*/
|
|
47
|
-
/**
|
|
48
|
-
* Performs a three-way merge between the current branch and another commit.
|
|
49
|
-
*
|
|
50
|
-
* @description
|
|
51
|
-
* This function implements Git's three-way merge algorithm:
|
|
52
|
-
* 1. Find the common ancestor (merge base) of the two commits
|
|
53
|
-
* 2. Compare both sides against the base to identify changes
|
|
54
|
-
* 3. Apply non-conflicting changes automatically
|
|
55
|
-
* 4. Identify and report conflicts for manual resolution
|
|
56
|
-
*
|
|
57
|
-
* The merge can result in several outcomes:
|
|
58
|
-
* - **fast-forward**: If the current branch is an ancestor of the target,
|
|
59
|
-
* the branch pointer is simply moved forward
|
|
60
|
-
* - **merged**: Changes were successfully combined into a merge commit
|
|
61
|
-
* - **conflicted**: Some changes conflict and require manual resolution
|
|
62
|
-
* - **up-to-date**: The target is already merged; nothing to do
|
|
63
|
-
*
|
|
64
|
-
* @param storage - The storage interface for reading/writing Git objects
|
|
65
|
-
* @param oursSha - SHA of the current branch's HEAD commit
|
|
66
|
-
* @param theirsSha - SHA of the commit to merge into the current branch
|
|
67
|
-
* @param options - Configuration options for the merge operation
|
|
68
|
-
*
|
|
69
|
-
* @returns A promise resolving to the merge result with status and any conflicts
|
|
70
|
-
*
|
|
71
|
-
* @throws {Error} When commit objects cannot be read
|
|
72
|
-
* @throws {Error} When tree objects cannot be parsed
|
|
73
|
-
* @throws {Error} When fastForwardOnly is true but fast-forward is not possible
|
|
74
|
-
*
|
|
75
|
-
* @example
|
|
76
|
-
* ```typescript
|
|
77
|
-
* // Basic merge
|
|
78
|
-
* const result = await merge(storage, 'abc123', 'def456', {
|
|
79
|
-
* message: 'Merge feature branch'
|
|
80
|
-
* })
|
|
81
|
-
*
|
|
82
|
-
* if (result.status === 'merged') {
|
|
83
|
-
* console.log('Merge successful:', result.commitSha)
|
|
84
|
-
* }
|
|
85
|
-
* ```
|
|
86
|
-
*
|
|
87
|
-
* @example
|
|
88
|
-
* ```typescript
|
|
89
|
-
* // Fast-forward only merge
|
|
90
|
-
* try {
|
|
91
|
-
* const result = await merge(storage, 'abc123', 'def456', {
|
|
92
|
-
* fastForwardOnly: true
|
|
93
|
-
* })
|
|
94
|
-
* console.log('Fast-forwarded to:', result.treeSha)
|
|
95
|
-
* } catch (error) {
|
|
96
|
-
* console.log('Cannot fast-forward, branches have diverged')
|
|
97
|
-
* }
|
|
98
|
-
* ```
|
|
99
|
-
*
|
|
100
|
-
* @example
|
|
101
|
-
* ```typescript
|
|
102
|
-
* // Merge with auto-resolve conflicts using 'ours' strategy
|
|
103
|
-
* const result = await merge(storage, 'abc123', 'def456', {
|
|
104
|
-
* autoResolve: true,
|
|
105
|
-
* conflictStrategy: 'ours',
|
|
106
|
-
* message: 'Merge with our changes taking precedence'
|
|
107
|
-
* })
|
|
108
|
-
* ```
|
|
109
|
-
*/
|
|
110
|
-
export async function merge(storage, oursSha, theirsSha, options = {}) {
|
|
111
|
-
// Check if merging with self
|
|
112
|
-
if (oursSha === theirsSha) {
|
|
113
|
-
return {
|
|
114
|
-
status: 'up-to-date',
|
|
115
|
-
oursSha,
|
|
116
|
-
theirsSha,
|
|
117
|
-
fastForward: false
|
|
118
|
-
};
|
|
119
|
-
}
|
|
120
|
-
// Find the merge base
|
|
121
|
-
const baseSha = await findMergeBase(storage, oursSha, theirsSha);
|
|
122
|
-
// If baseSha equals theirsSha, we're already up-to-date
|
|
123
|
-
if (baseSha === theirsSha) {
|
|
124
|
-
return {
|
|
125
|
-
status: 'up-to-date',
|
|
126
|
-
oursSha,
|
|
127
|
-
theirsSha,
|
|
128
|
-
baseSha,
|
|
129
|
-
fastForward: false
|
|
130
|
-
};
|
|
131
|
-
}
|
|
132
|
-
// Get tree SHAs for base, ours, and theirs
|
|
133
|
-
const oursCommit = await storage.readObject(oursSha);
|
|
134
|
-
const theirsCommit = await storage.readObject(theirsSha);
|
|
135
|
-
if (!oursCommit || !theirsCommit) {
|
|
136
|
-
throw new Error('Could not read commit objects');
|
|
137
|
-
}
|
|
138
|
-
const theirsTreeSha = parseCommitTree(theirsCommit.data, theirsCommit.tree);
|
|
139
|
-
if (!theirsTreeSha) {
|
|
140
|
-
throw new Error('Could not parse theirs tree SHA');
|
|
141
|
-
}
|
|
142
|
-
// Check if this is a fast-forward (ours is ancestor of theirs)
|
|
143
|
-
if (baseSha === oursSha) {
|
|
144
|
-
// If fast-forward only is set but we can fast-forward, that's fine
|
|
145
|
-
// If allowFastForward is false, we need to create a merge commit
|
|
146
|
-
if (options.allowFastForward !== false) {
|
|
147
|
-
return {
|
|
148
|
-
status: 'fast-forward',
|
|
149
|
-
oursSha,
|
|
150
|
-
theirsSha,
|
|
151
|
-
baseSha,
|
|
152
|
-
treeSha: theirsTreeSha,
|
|
153
|
-
fastForward: true
|
|
154
|
-
};
|
|
155
|
-
}
|
|
156
|
-
// allowFastForward is false, so create a merge commit
|
|
157
|
-
// Continue with merge logic below but no conflicts
|
|
158
|
-
}
|
|
159
|
-
// If fastForwardOnly is set and we couldn't fast-forward, throw an error
|
|
160
|
-
if (options.fastForwardOnly) {
|
|
161
|
-
throw new Error('Not possible to fast-forward, aborting');
|
|
162
|
-
}
|
|
163
|
-
const oursTreeSha = parseCommitTree(oursCommit.data, oursCommit.tree);
|
|
164
|
-
if (!oursTreeSha) {
|
|
165
|
-
throw new Error('Could not parse commit tree SHAs');
|
|
166
|
-
}
|
|
167
|
-
// Get base tree SHA (if we have a base)
|
|
168
|
-
let baseTreeSha = null;
|
|
169
|
-
if (baseSha) {
|
|
170
|
-
const baseCommit = await storage.readObject(baseSha);
|
|
171
|
-
if (baseCommit) {
|
|
172
|
-
baseTreeSha = parseCommitTree(baseCommit.data, baseCommit.tree);
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
// Get tree entries for each version
|
|
176
|
-
const baseEntries = baseTreeSha ? await getTreeEntries(storage, baseTreeSha) : new Map();
|
|
177
|
-
const oursEntries = await getTreeEntries(storage, oursTreeSha);
|
|
178
|
-
const theirsEntries = await getTreeEntries(storage, theirsTreeSha);
|
|
179
|
-
// Collect all paths
|
|
180
|
-
const allPaths = new Set();
|
|
181
|
-
for (const path of baseEntries.keys())
|
|
182
|
-
allPaths.add(path);
|
|
183
|
-
for (const path of oursEntries.keys())
|
|
184
|
-
allPaths.add(path);
|
|
185
|
-
for (const path of theirsEntries.keys())
|
|
186
|
-
allPaths.add(path);
|
|
187
|
-
// Merge each path
|
|
188
|
-
const conflicts = [];
|
|
189
|
-
const mergedEntries = new Map();
|
|
190
|
-
const stats = {
|
|
191
|
-
filesAdded: 0,
|
|
192
|
-
filesModified: 0,
|
|
193
|
-
filesDeleted: 0,
|
|
194
|
-
filesRenamed: 0,
|
|
195
|
-
binaryFilesChanged: 0,
|
|
196
|
-
linesAdded: 0,
|
|
197
|
-
linesRemoved: 0
|
|
198
|
-
};
|
|
199
|
-
for (const path of allPaths) {
|
|
200
|
-
const baseEntry = baseEntries.get(path);
|
|
201
|
-
const oursEntry = oursEntries.get(path);
|
|
202
|
-
const theirsEntry = theirsEntries.get(path);
|
|
203
|
-
const mergeResult = await mergeEntry(storage, path, baseEntry, oursEntry, theirsEntry, stats);
|
|
204
|
-
if (mergeResult.conflict) {
|
|
205
|
-
conflicts.push(mergeResult.conflict);
|
|
206
|
-
}
|
|
207
|
-
if (mergeResult.entry) {
|
|
208
|
-
mergedEntries.set(path, mergeResult.entry);
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
// Handle autoResolve with conflictStrategy
|
|
212
|
-
if (conflicts.length > 0 && options.autoResolve && options.conflictStrategy) {
|
|
213
|
-
// Auto-resolve conflicts using specified strategy
|
|
214
|
-
for (const conflict of conflicts) {
|
|
215
|
-
if (options.conflictStrategy === 'ours' && conflict.oursSha) {
|
|
216
|
-
// Use ours version
|
|
217
|
-
mergedEntries.set(conflict.path, {
|
|
218
|
-
path: conflict.path,
|
|
219
|
-
mode: conflict.oursMode || '100644',
|
|
220
|
-
sha: conflict.oursSha
|
|
221
|
-
});
|
|
222
|
-
}
|
|
223
|
-
else if (options.conflictStrategy === 'theirs' && conflict.theirsSha) {
|
|
224
|
-
// Use theirs version
|
|
225
|
-
mergedEntries.set(conflict.path, {
|
|
226
|
-
path: conflict.path,
|
|
227
|
-
mode: conflict.theirsMode || '100644',
|
|
228
|
-
sha: conflict.theirsSha
|
|
229
|
-
});
|
|
230
|
-
}
|
|
231
|
-
}
|
|
232
|
-
// Clear conflicts since they're auto-resolved
|
|
233
|
-
conflicts.length = 0;
|
|
234
|
-
}
|
|
235
|
-
// Build merged tree and write it
|
|
236
|
-
const treeSha = await buildAndWriteTree(storage, mergedEntries);
|
|
237
|
-
if (conflicts.length > 0) {
|
|
238
|
-
// Save merge state for conflict resolution
|
|
239
|
-
const mergeState = {
|
|
240
|
-
mergeHead: theirsSha,
|
|
241
|
-
origHead: oursSha,
|
|
242
|
-
message: options.message ?? `Merge ${theirsSha} into ${oursSha}`,
|
|
243
|
-
unresolvedConflicts: conflicts,
|
|
244
|
-
resolvedConflicts: [],
|
|
245
|
-
options
|
|
246
|
-
};
|
|
247
|
-
await storage.writeMergeState(mergeState);
|
|
248
|
-
return {
|
|
249
|
-
status: 'conflicted',
|
|
250
|
-
oursSha,
|
|
251
|
-
theirsSha,
|
|
252
|
-
baseSha: baseSha ?? undefined,
|
|
253
|
-
treeSha,
|
|
254
|
-
conflicts,
|
|
255
|
-
stats,
|
|
256
|
-
fastForward: false
|
|
257
|
-
};
|
|
258
|
-
}
|
|
259
|
-
// Handle options
|
|
260
|
-
const finalMessage = options.message ?? `Merge ${theirsSha} into ${oursSha}`;
|
|
261
|
-
// If noCommit is set, don't create a commit SHA
|
|
262
|
-
if (options.noCommit) {
|
|
263
|
-
return {
|
|
264
|
-
status: 'merged',
|
|
265
|
-
oursSha,
|
|
266
|
-
theirsSha,
|
|
267
|
-
baseSha: baseSha ?? undefined,
|
|
268
|
-
treeSha,
|
|
269
|
-
stats,
|
|
270
|
-
message: finalMessage,
|
|
271
|
-
fastForward: false
|
|
272
|
-
};
|
|
273
|
-
}
|
|
274
|
-
// Create a merge commit SHA
|
|
275
|
-
const commitSha = generateHexSha(`merge${Date.now()}`);
|
|
276
|
-
return {
|
|
277
|
-
status: 'merged',
|
|
278
|
-
oursSha,
|
|
279
|
-
theirsSha,
|
|
280
|
-
baseSha: baseSha ?? undefined,
|
|
281
|
-
treeSha,
|
|
282
|
-
commitSha,
|
|
283
|
-
stats,
|
|
284
|
-
message: finalMessage,
|
|
285
|
-
fastForward: false
|
|
286
|
-
};
|
|
287
|
-
}
|
|
288
|
-
/**
|
|
289
|
-
* Generates a deterministic 40-character hex SHA from a seed string.
|
|
290
|
-
*
|
|
291
|
-
* @description
|
|
292
|
-
* Creates a SHA-like string for internal use. This is a simplified
|
|
293
|
-
* implementation for testing; production code should use proper SHA-1.
|
|
294
|
-
*
|
|
295
|
-
* @param seed - Input string to generate SHA from
|
|
296
|
-
* @returns 40-character hexadecimal string
|
|
297
|
-
*
|
|
298
|
-
* @internal
|
|
299
|
-
*/
|
|
300
|
-
function generateHexSha(seed) {
|
|
301
|
-
// Generate a proper 40-character hex string
|
|
302
|
-
let hash = 0;
|
|
303
|
-
for (let i = 0; i < seed.length; i++) {
|
|
304
|
-
const char = seed.charCodeAt(i);
|
|
305
|
-
hash = ((hash << 5) - hash) + char;
|
|
306
|
-
hash = hash & hash; // Convert to 32bit integer
|
|
307
|
-
}
|
|
308
|
-
// Convert to hex and pad to 40 characters
|
|
309
|
-
const hex = Math.abs(hash).toString(16);
|
|
310
|
-
return hex.padStart(8, '0').repeat(5).slice(0, 40);
|
|
311
|
-
}
|
|
312
|
-
/**
|
|
313
|
-
* Recursively retrieves all entries from a tree object.
|
|
314
|
-
*
|
|
315
|
-
* @description
|
|
316
|
-
* Walks the tree structure recursively, collecting all file entries
|
|
317
|
-
* with their full paths from the repository root.
|
|
318
|
-
*
|
|
319
|
-
* @param storage - Storage interface for reading tree objects
|
|
320
|
-
* @param treeSha - SHA of the tree to read
|
|
321
|
-
* @param prefix - Path prefix for nested entries
|
|
322
|
-
* @returns Map of full path to tree entry info
|
|
323
|
-
*
|
|
324
|
-
* @internal
|
|
325
|
-
*/
|
|
326
|
-
async function getTreeEntries(storage, treeSha, prefix = '') {
|
|
327
|
-
const entries = new Map();
|
|
328
|
-
const treeObj = await storage.readObject(treeSha);
|
|
329
|
-
if (!treeObj || treeObj.type !== 'tree') {
|
|
330
|
-
return entries;
|
|
331
|
-
}
|
|
332
|
-
// Use extended entries if available, otherwise parse from data
|
|
333
|
-
const treeEntries = treeObj.entries ?? parseTreeEntries(treeObj.data);
|
|
334
|
-
for (const entry of treeEntries) {
|
|
335
|
-
const fullPath = prefix ? `${prefix}/${entry.name}` : entry.name;
|
|
336
|
-
if (entry.mode === '040000' || entry.mode === '40000') {
|
|
337
|
-
// Directory - add entry for the directory itself (for directory-file conflict detection)
|
|
338
|
-
entries.set(fullPath, {
|
|
339
|
-
path: fullPath,
|
|
340
|
-
mode: entry.mode,
|
|
341
|
-
sha: entry.sha
|
|
342
|
-
});
|
|
343
|
-
// Also recurse to get nested files
|
|
344
|
-
const subEntries = await getTreeEntries(storage, entry.sha, fullPath);
|
|
345
|
-
for (const [subPath, subEntry] of subEntries) {
|
|
346
|
-
entries.set(subPath, subEntry);
|
|
347
|
-
}
|
|
348
|
-
}
|
|
349
|
-
else {
|
|
350
|
-
// File
|
|
351
|
-
entries.set(fullPath, {
|
|
352
|
-
path: fullPath,
|
|
353
|
-
mode: entry.mode,
|
|
354
|
-
sha: entry.sha
|
|
355
|
-
});
|
|
356
|
-
}
|
|
357
|
-
}
|
|
358
|
-
return entries;
|
|
359
|
-
}
|
|
360
|
-
/**
|
|
361
|
-
* Parses tree entries from raw Git tree object data.
|
|
362
|
-
*
|
|
363
|
-
* @description
|
|
364
|
-
* Git tree format is: mode SP name NUL sha (20 bytes binary)
|
|
365
|
-
* This function parses that binary format into structured entries.
|
|
366
|
-
*
|
|
367
|
-
* @param data - Raw tree object content
|
|
368
|
-
* @returns Array of parsed tree entries
|
|
369
|
-
*
|
|
370
|
-
* @internal
|
|
371
|
-
*/
|
|
372
|
-
function parseTreeEntries(data) {
|
|
373
|
-
const entries = [];
|
|
374
|
-
let offset = 0;
|
|
375
|
-
while (offset < data.length) {
|
|
376
|
-
// Find space between mode and name
|
|
377
|
-
let spaceIdx = offset;
|
|
378
|
-
while (spaceIdx < data.length && data[spaceIdx] !== 0x20) {
|
|
379
|
-
spaceIdx++;
|
|
380
|
-
}
|
|
381
|
-
// Find null byte after name
|
|
382
|
-
let nullIdx = spaceIdx + 1;
|
|
383
|
-
while (nullIdx < data.length && data[nullIdx] !== 0x00) {
|
|
384
|
-
nullIdx++;
|
|
385
|
-
}
|
|
386
|
-
if (nullIdx >= data.length)
|
|
387
|
-
break;
|
|
388
|
-
const mode = decoder.decode(data.slice(offset, spaceIdx));
|
|
389
|
-
const name = decoder.decode(data.slice(spaceIdx + 1, nullIdx));
|
|
390
|
-
// Read 20 bytes for SHA
|
|
391
|
-
const shaBytes = data.slice(nullIdx + 1, nullIdx + 21);
|
|
392
|
-
const sha = Array.from(shaBytes).map(b => b.toString(16).padStart(2, '0')).join('');
|
|
393
|
-
entries.push({ mode, name, sha });
|
|
394
|
-
offset = nullIdx + 21;
|
|
395
|
-
}
|
|
396
|
-
return entries;
|
|
397
|
-
}
|
|
398
|
-
/**
|
|
399
|
-
* Merges a single file entry using three-way merge logic.
|
|
400
|
-
*
|
|
401
|
-
* @description
|
|
402
|
-
* Compares the base, ours, and theirs versions of a single file
|
|
403
|
-
* and determines the merge result. Handles various cases:
|
|
404
|
-
* - File unchanged in one or both sides
|
|
405
|
-
* - File added/deleted on one or both sides
|
|
406
|
-
* - File modified on one or both sides (with content merge)
|
|
407
|
-
*
|
|
408
|
-
* @param storage - Storage interface for reading blob content
|
|
409
|
-
* @param path - Path of the file being merged
|
|
410
|
-
* @param baseEntry - Entry from the base (common ancestor)
|
|
411
|
-
* @param oursEntry - Entry from our branch
|
|
412
|
-
* @param theirsEntry - Entry from their branch
|
|
413
|
-
* @param stats - Statistics object to update
|
|
414
|
-
* @returns Merge result with either an entry or a conflict
|
|
415
|
-
*
|
|
416
|
-
* @internal
|
|
417
|
-
*/
|
|
418
|
-
async function mergeEntry(storage, path, baseEntry, oursEntry, theirsEntry, stats) {
|
|
419
|
-
// Case 1: File unchanged in both (same SHA and mode)
|
|
420
|
-
if (oursEntry?.sha === theirsEntry?.sha && oursEntry?.mode === theirsEntry?.mode) {
|
|
421
|
-
if (oursEntry) {
|
|
422
|
-
return { entry: oursEntry };
|
|
423
|
-
}
|
|
424
|
-
// Both deleted - no entry
|
|
425
|
-
return {};
|
|
426
|
-
}
|
|
427
|
-
// Case 2: File only in ours (added by us, or unchanged/deleted by them)
|
|
428
|
-
if (!theirsEntry && oursEntry) {
|
|
429
|
-
if (!baseEntry) {
|
|
430
|
-
// Added by us
|
|
431
|
-
stats.filesAdded++;
|
|
432
|
-
return { entry: oursEntry };
|
|
433
|
-
}
|
|
434
|
-
if (oursEntry.sha === baseEntry.sha) {
|
|
435
|
-
// Unchanged by us, deleted by them - take theirs (deletion)
|
|
436
|
-
stats.filesDeleted++;
|
|
437
|
-
return {};
|
|
438
|
-
}
|
|
439
|
-
// Modified by us, deleted by them - conflict
|
|
440
|
-
return {
|
|
441
|
-
conflict: {
|
|
442
|
-
type: 'modify-delete',
|
|
443
|
-
path,
|
|
444
|
-
baseSha: baseEntry.sha,
|
|
445
|
-
oursSha: oursEntry.sha,
|
|
446
|
-
baseMode: baseEntry.mode,
|
|
447
|
-
oursMode: oursEntry.mode
|
|
448
|
-
}
|
|
449
|
-
};
|
|
450
|
-
}
|
|
451
|
-
// Case 3: File only in theirs (added by them, or unchanged/deleted by us)
|
|
452
|
-
if (!oursEntry && theirsEntry) {
|
|
453
|
-
if (!baseEntry) {
|
|
454
|
-
// Added by them
|
|
455
|
-
stats.filesAdded++;
|
|
456
|
-
return { entry: theirsEntry };
|
|
457
|
-
}
|
|
458
|
-
if (theirsEntry.sha === baseEntry.sha) {
|
|
459
|
-
// Unchanged by them, deleted by us - take ours (deletion)
|
|
460
|
-
stats.filesDeleted++;
|
|
461
|
-
return {};
|
|
462
|
-
}
|
|
463
|
-
// Modified by them, deleted by us - conflict
|
|
464
|
-
return {
|
|
465
|
-
conflict: {
|
|
466
|
-
type: 'delete-modify',
|
|
467
|
-
path,
|
|
468
|
-
baseSha: baseEntry.sha,
|
|
469
|
-
theirsSha: theirsEntry.sha,
|
|
470
|
-
baseMode: baseEntry.mode,
|
|
471
|
-
theirsMode: theirsEntry.mode
|
|
472
|
-
}
|
|
473
|
-
};
|
|
474
|
-
}
|
|
475
|
-
// Case 4: File in both ours and theirs
|
|
476
|
-
if (oursEntry && theirsEntry) {
|
|
477
|
-
// Check for type conflicts (file vs directory)
|
|
478
|
-
const oursIsDir = oursEntry.mode === '040000' || oursEntry.mode === '40000';
|
|
479
|
-
const theirsIsDir = theirsEntry.mode === '040000' || theirsEntry.mode === '40000';
|
|
480
|
-
if (oursIsDir !== theirsIsDir) {
|
|
481
|
-
return {
|
|
482
|
-
conflict: {
|
|
483
|
-
type: 'directory-file',
|
|
484
|
-
path,
|
|
485
|
-
baseSha: baseEntry?.sha,
|
|
486
|
-
oursSha: oursEntry.sha,
|
|
487
|
-
theirsSha: theirsEntry.sha,
|
|
488
|
-
baseMode: baseEntry?.mode,
|
|
489
|
-
oursMode: oursEntry.mode,
|
|
490
|
-
theirsMode: theirsEntry.mode
|
|
491
|
-
}
|
|
492
|
-
};
|
|
493
|
-
}
|
|
494
|
-
// If only one side changed from base, take that side
|
|
495
|
-
if (baseEntry) {
|
|
496
|
-
if (oursEntry.sha === baseEntry.sha && oursEntry.mode === baseEntry.mode) {
|
|
497
|
-
// Only theirs changed - check if binary to track stats
|
|
498
|
-
const content = await getBlobContent(storage, theirsEntry.sha);
|
|
499
|
-
if (content && isBinaryFile(content)) {
|
|
500
|
-
stats.binaryFilesChanged++;
|
|
501
|
-
}
|
|
502
|
-
else {
|
|
503
|
-
stats.filesModified++;
|
|
504
|
-
}
|
|
505
|
-
return { entry: theirsEntry };
|
|
506
|
-
}
|
|
507
|
-
if (theirsEntry.sha === baseEntry.sha && theirsEntry.mode === baseEntry.mode) {
|
|
508
|
-
// Only ours changed - check if binary to track stats
|
|
509
|
-
const content = await getBlobContent(storage, oursEntry.sha);
|
|
510
|
-
if (content && isBinaryFile(content)) {
|
|
511
|
-
stats.binaryFilesChanged++;
|
|
512
|
-
}
|
|
513
|
-
else {
|
|
514
|
-
stats.filesModified++;
|
|
515
|
-
}
|
|
516
|
-
return { entry: oursEntry };
|
|
517
|
-
}
|
|
518
|
-
}
|
|
519
|
-
// Both sides changed - try content merge
|
|
520
|
-
if (!baseEntry) {
|
|
521
|
-
// Both added the same file with different content (add-add conflict)
|
|
522
|
-
return {
|
|
523
|
-
conflict: {
|
|
524
|
-
type: 'add-add',
|
|
525
|
-
path,
|
|
526
|
-
oursSha: oursEntry.sha,
|
|
527
|
-
theirsSha: theirsEntry.sha,
|
|
528
|
-
oursMode: oursEntry.mode,
|
|
529
|
-
theirsMode: theirsEntry.mode
|
|
530
|
-
}
|
|
531
|
-
};
|
|
532
|
-
}
|
|
533
|
-
// Get content for three-way merge
|
|
534
|
-
const baseContent = await getBlobContent(storage, baseEntry.sha);
|
|
535
|
-
const oursContent = await getBlobContent(storage, oursEntry.sha);
|
|
536
|
-
const theirsContent = await getBlobContent(storage, theirsEntry.sha);
|
|
537
|
-
if (!baseContent || !oursContent || !theirsContent) {
|
|
538
|
-
throw new Error(`Could not read blob content for ${path}`);
|
|
539
|
-
}
|
|
540
|
-
// Check if any file is binary
|
|
541
|
-
const isBinary = isBinaryFile(baseContent) || isBinaryFile(oursContent) || isBinaryFile(theirsContent);
|
|
542
|
-
if (isBinary) {
|
|
543
|
-
stats.binaryFilesChanged++;
|
|
544
|
-
// Binary files with different content = conflict
|
|
545
|
-
return {
|
|
546
|
-
conflict: {
|
|
547
|
-
type: 'content',
|
|
548
|
-
path,
|
|
549
|
-
baseSha: baseEntry.sha,
|
|
550
|
-
oursSha: oursEntry.sha,
|
|
551
|
-
theirsSha: theirsEntry.sha,
|
|
552
|
-
baseMode: baseEntry.mode,
|
|
553
|
-
oursMode: oursEntry.mode,
|
|
554
|
-
theirsMode: theirsEntry.mode
|
|
555
|
-
// No conflictedContent for binary files
|
|
556
|
-
}
|
|
557
|
-
};
|
|
558
|
-
}
|
|
559
|
-
// Try to merge text content
|
|
560
|
-
const mergeResult = mergeContent(baseContent, oursContent, theirsContent);
|
|
561
|
-
if (mergeResult.hasConflicts) {
|
|
562
|
-
stats.filesModified++;
|
|
563
|
-
return {
|
|
564
|
-
conflict: {
|
|
565
|
-
type: 'content',
|
|
566
|
-
path,
|
|
567
|
-
baseSha: baseEntry.sha,
|
|
568
|
-
oursSha: oursEntry.sha,
|
|
569
|
-
theirsSha: theirsEntry.sha,
|
|
570
|
-
baseMode: baseEntry.mode,
|
|
571
|
-
oursMode: oursEntry.mode,
|
|
572
|
-
theirsMode: theirsEntry.mode,
|
|
573
|
-
conflictedContent: mergeResult.merged,
|
|
574
|
-
markers: mergeResult.markers
|
|
575
|
-
}
|
|
576
|
-
};
|
|
577
|
-
}
|
|
578
|
-
// Successfully merged - write new blob
|
|
579
|
-
const newSha = await storage.writeObject('blob', mergeResult.merged);
|
|
580
|
-
stats.filesModified++;
|
|
581
|
-
return {
|
|
582
|
-
entry: {
|
|
583
|
-
path,
|
|
584
|
-
mode: oursEntry.mode, // Use ours mode by default
|
|
585
|
-
sha: newSha
|
|
586
|
-
}
|
|
587
|
-
};
|
|
588
|
-
}
|
|
589
|
-
// No entry in either side - nothing to do
|
|
590
|
-
return {};
|
|
591
|
-
}
|
|
592
|
-
/**
|
|
593
|
-
* Retrieves blob content from storage.
|
|
594
|
-
*
|
|
595
|
-
* @param storage - Storage interface
|
|
596
|
-
* @param sha - SHA of the blob to read
|
|
597
|
-
* @returns Blob content or null if not found
|
|
598
|
-
*
|
|
599
|
-
* @internal
|
|
600
|
-
*/
|
|
601
|
-
async function getBlobContent(storage, sha) {
|
|
602
|
-
const obj = await storage.readObject(sha);
|
|
603
|
-
if (!obj || obj.type !== 'blob') {
|
|
604
|
-
return null;
|
|
605
|
-
}
|
|
606
|
-
return obj.data;
|
|
607
|
-
}
|
|
608
|
-
/**
|
|
609
|
-
* Builds a tree object from entries and writes it to storage.
|
|
610
|
-
*
|
|
611
|
-
* @description
|
|
612
|
-
* Takes a flat map of paths to entries and constructs the nested
|
|
613
|
-
* tree structure required by Git, writing subtrees as needed.
|
|
614
|
-
*
|
|
615
|
-
* @param storage - Storage interface for writing tree objects
|
|
616
|
-
* @param entries - Map of full paths to tree entries
|
|
617
|
-
* @returns SHA of the root tree object
|
|
618
|
-
*
|
|
619
|
-
* @internal
|
|
620
|
-
*/
|
|
621
|
-
async function buildAndWriteTree(storage, entries) {
|
|
622
|
-
// Group entries by top-level directory
|
|
623
|
-
const topLevel = new Map();
|
|
624
|
-
for (const [path, entry] of entries) {
|
|
625
|
-
const parts = path.split('/');
|
|
626
|
-
if (parts.length === 1) {
|
|
627
|
-
// Top-level file
|
|
628
|
-
topLevel.set(path, entry);
|
|
629
|
-
}
|
|
630
|
-
else {
|
|
631
|
-
// Nested file - group by directory
|
|
632
|
-
const dir = parts[0];
|
|
633
|
-
const subPath = parts.slice(1).join('/');
|
|
634
|
-
let subEntries = topLevel.get(dir);
|
|
635
|
-
if (!subEntries || !(subEntries instanceof Map)) {
|
|
636
|
-
subEntries = new Map();
|
|
637
|
-
topLevel.set(dir, subEntries);
|
|
638
|
-
}
|
|
639
|
-
subEntries.set(subPath, {
|
|
640
|
-
...entry,
|
|
641
|
-
path: subPath
|
|
642
|
-
});
|
|
643
|
-
}
|
|
644
|
-
}
|
|
645
|
-
// Build tree entries
|
|
646
|
-
const treeEntries = [];
|
|
647
|
-
for (const [name, value] of topLevel) {
|
|
648
|
-
if (value instanceof Map) {
|
|
649
|
-
// Directory - recursively build subtree
|
|
650
|
-
const subTreeSha = await buildAndWriteTree(storage, value);
|
|
651
|
-
treeEntries.push({
|
|
652
|
-
mode: '40000',
|
|
653
|
-
name,
|
|
654
|
-
sha: subTreeSha
|
|
655
|
-
});
|
|
656
|
-
}
|
|
657
|
-
else {
|
|
658
|
-
// File
|
|
659
|
-
treeEntries.push({
|
|
660
|
-
mode: value.mode,
|
|
661
|
-
name,
|
|
662
|
-
sha: value.sha
|
|
663
|
-
});
|
|
664
|
-
}
|
|
665
|
-
}
|
|
666
|
-
// Sort entries (Git sorts directories with trailing /)
|
|
667
|
-
treeEntries.sort((a, b) => {
|
|
668
|
-
const aName = a.mode === '40000' ? a.name + '/' : a.name;
|
|
669
|
-
const bName = b.mode === '40000' ? b.name + '/' : b.name;
|
|
670
|
-
return aName.localeCompare(bName);
|
|
671
|
-
});
|
|
672
|
-
// Serialize tree
|
|
673
|
-
const treeParts = [];
|
|
674
|
-
for (const entry of treeEntries) {
|
|
675
|
-
const modeName = encoder.encode(`${entry.mode} ${entry.name}\0`);
|
|
676
|
-
const shaBytes = hexToBytes(entry.sha);
|
|
677
|
-
const entryData = new Uint8Array(modeName.length + 20);
|
|
678
|
-
entryData.set(modeName);
|
|
679
|
-
entryData.set(shaBytes, modeName.length);
|
|
680
|
-
treeParts.push(entryData);
|
|
681
|
-
}
|
|
682
|
-
// Concatenate all parts
|
|
683
|
-
const totalLength = treeParts.reduce((sum, part) => sum + part.length, 0);
|
|
684
|
-
const treeData = new Uint8Array(totalLength);
|
|
685
|
-
let offset = 0;
|
|
686
|
-
for (const part of treeParts) {
|
|
687
|
-
treeData.set(part, offset);
|
|
688
|
-
offset += part.length;
|
|
689
|
-
}
|
|
690
|
-
// Write tree
|
|
691
|
-
return storage.writeObject('tree', treeData);
|
|
692
|
-
}
|
|
693
|
-
/**
|
|
694
|
-
* Converts a hex string to a 20-byte Uint8Array.
|
|
695
|
-
*
|
|
696
|
-
* @param hex - 40-character hexadecimal string
|
|
697
|
-
* @returns 20-byte array
|
|
698
|
-
*
|
|
699
|
-
* @internal
|
|
700
|
-
*/
|
|
701
|
-
function hexToBytes(hex) {
|
|
702
|
-
const bytes = new Uint8Array(20);
|
|
703
|
-
for (let i = 0; i < 40; i += 2) {
|
|
704
|
-
bytes[i / 2] = parseInt(hex.slice(i, i + 2), 16);
|
|
705
|
-
}
|
|
706
|
-
return bytes;
|
|
707
|
-
}
|
|
708
|
-
/**
|
|
709
|
-
* Resolves a single merge conflict with the specified strategy.
|
|
710
|
-
*
|
|
711
|
-
* @description
|
|
712
|
-
* After a merge results in conflicts, use this function to resolve
|
|
713
|
-
* individual files. The resolution can use one of the three versions
|
|
714
|
-
* (ours, theirs, base) or provide custom merged content.
|
|
715
|
-
*
|
|
716
|
-
* Once all conflicts are resolved, use {@link continueMerge} to create
|
|
717
|
-
* the merge commit and complete the operation.
|
|
718
|
-
*
|
|
719
|
-
* @param storage - The storage interface for reading/writing objects
|
|
720
|
-
* @param path - Path to the conflicted file to resolve
|
|
721
|
-
* @param options - Resolution options specifying which version to use
|
|
722
|
-
*
|
|
723
|
-
* @returns A promise resolving to the resolution result
|
|
724
|
-
*
|
|
725
|
-
* @throws {Error} When no merge is in progress
|
|
726
|
-
* @throws {Error} When the specified path has no conflict
|
|
727
|
-
*
|
|
728
|
-
* @example
|
|
729
|
-
* ```typescript
|
|
730
|
-
* // Resolve using our version
|
|
731
|
-
* const result = await resolveConflict(storage, 'src/file.ts', {
|
|
732
|
-
* resolution: 'ours'
|
|
733
|
-
* })
|
|
734
|
-
* console.log(`${result.remainingConflicts} conflicts remaining`)
|
|
735
|
-
* ```
|
|
736
|
-
*
|
|
737
|
-
* @example
|
|
738
|
-
* ```typescript
|
|
739
|
-
* // Resolve using their version
|
|
740
|
-
* await resolveConflict(storage, 'config.json', {
|
|
741
|
-
* resolution: 'theirs'
|
|
742
|
-
* })
|
|
743
|
-
* ```
|
|
744
|
-
*
|
|
745
|
-
* @example
|
|
746
|
-
* ```typescript
|
|
747
|
-
* // Resolve with manually merged content
|
|
748
|
-
* const mergedContent = new TextEncoder().encode(`
|
|
749
|
-
* // Manually resolved: kept both features
|
|
750
|
-
* export function feature1() { ... }
|
|
751
|
-
* export function feature2() { ... }
|
|
752
|
-
* `)
|
|
753
|
-
*
|
|
754
|
-
* await resolveConflict(storage, 'src/features.ts', {
|
|
755
|
-
* resolution: 'custom',
|
|
756
|
-
* customContent: mergedContent
|
|
757
|
-
* })
|
|
758
|
-
* ```
|
|
759
|
-
*/
|
|
760
|
-
export async function resolveConflict(storage, path, options) {
|
|
761
|
-
// Get current merge state
|
|
762
|
-
const mergeState = await storage.readMergeState();
|
|
763
|
-
if (!mergeState) {
|
|
764
|
-
return {
|
|
765
|
-
success: false,
|
|
766
|
-
path,
|
|
767
|
-
error: 'No merge in progress',
|
|
768
|
-
remainingConflicts: 0
|
|
769
|
-
};
|
|
770
|
-
}
|
|
771
|
-
// Find the conflict for this path
|
|
772
|
-
const conflictIndex = mergeState.unresolvedConflicts.findIndex(c => c.path === path);
|
|
773
|
-
if (conflictIndex === -1) {
|
|
774
|
-
return {
|
|
775
|
-
success: false,
|
|
776
|
-
path,
|
|
777
|
-
error: `No conflict found for path: ${path}`,
|
|
778
|
-
remainingConflicts: mergeState.unresolvedConflicts.length
|
|
779
|
-
};
|
|
780
|
-
}
|
|
781
|
-
const conflict = mergeState.unresolvedConflicts[conflictIndex];
|
|
782
|
-
// Determine the content to use based on resolution strategy
|
|
783
|
-
let resolvedSha;
|
|
784
|
-
let resolvedMode;
|
|
785
|
-
switch (options.resolution) {
|
|
786
|
-
case 'ours':
|
|
787
|
-
if (!conflict.oursSha) {
|
|
788
|
-
// If ours is deleted, we want to keep the deletion
|
|
789
|
-
// Remove the conflict and don't stage anything
|
|
790
|
-
mergeState.unresolvedConflicts.splice(conflictIndex, 1);
|
|
791
|
-
mergeState.resolvedConflicts.push(conflict);
|
|
792
|
-
await storage.writeMergeState(mergeState);
|
|
793
|
-
return {
|
|
794
|
-
success: true,
|
|
795
|
-
path,
|
|
796
|
-
remainingConflicts: mergeState.unresolvedConflicts.length
|
|
797
|
-
};
|
|
798
|
-
}
|
|
799
|
-
resolvedSha = conflict.oursSha;
|
|
800
|
-
resolvedMode = conflict.oursMode || '100644';
|
|
801
|
-
break;
|
|
802
|
-
case 'theirs':
|
|
803
|
-
if (!conflict.theirsSha) {
|
|
804
|
-
// If theirs is deleted, we want to accept the deletion
|
|
805
|
-
mergeState.unresolvedConflicts.splice(conflictIndex, 1);
|
|
806
|
-
mergeState.resolvedConflicts.push(conflict);
|
|
807
|
-
await storage.writeMergeState(mergeState);
|
|
808
|
-
return {
|
|
809
|
-
success: true,
|
|
810
|
-
path,
|
|
811
|
-
remainingConflicts: mergeState.unresolvedConflicts.length
|
|
812
|
-
};
|
|
813
|
-
}
|
|
814
|
-
resolvedSha = conflict.theirsSha;
|
|
815
|
-
resolvedMode = conflict.theirsMode || '100644';
|
|
816
|
-
break;
|
|
817
|
-
case 'base':
|
|
818
|
-
if (!conflict.baseSha) {
|
|
819
|
-
return {
|
|
820
|
-
success: false,
|
|
821
|
-
path,
|
|
822
|
-
error: 'No base version available',
|
|
823
|
-
remainingConflicts: mergeState.unresolvedConflicts.length
|
|
824
|
-
};
|
|
825
|
-
}
|
|
826
|
-
resolvedSha = conflict.baseSha;
|
|
827
|
-
resolvedMode = conflict.baseMode || '100644';
|
|
828
|
-
break;
|
|
829
|
-
case 'custom':
|
|
830
|
-
if (!options.customContent) {
|
|
831
|
-
return {
|
|
832
|
-
success: false,
|
|
833
|
-
path,
|
|
834
|
-
error: 'Custom content required for custom resolution',
|
|
835
|
-
remainingConflicts: mergeState.unresolvedConflicts.length
|
|
836
|
-
};
|
|
837
|
-
}
|
|
838
|
-
resolvedSha = await storage.writeObject('blob', options.customContent);
|
|
839
|
-
resolvedMode = options.customMode || conflict.oursMode || '100644';
|
|
840
|
-
break;
|
|
841
|
-
default:
|
|
842
|
-
return {
|
|
843
|
-
success: false,
|
|
844
|
-
path,
|
|
845
|
-
error: `Unknown resolution strategy: ${options.resolution}`,
|
|
846
|
-
remainingConflicts: mergeState.unresolvedConflicts.length
|
|
847
|
-
};
|
|
848
|
-
}
|
|
849
|
-
// Stage the resolved file
|
|
850
|
-
await storage.stageFile(path, resolvedSha, resolvedMode, 0);
|
|
851
|
-
// Move conflict from unresolved to resolved
|
|
852
|
-
mergeState.unresolvedConflicts.splice(conflictIndex, 1);
|
|
853
|
-
mergeState.resolvedConflicts.push(conflict);
|
|
854
|
-
// Update merge state
|
|
855
|
-
await storage.writeMergeState(mergeState);
|
|
856
|
-
return {
|
|
857
|
-
success: true,
|
|
858
|
-
path,
|
|
859
|
-
remainingConflicts: mergeState.unresolvedConflicts.length
|
|
860
|
-
};
|
|
861
|
-
}
|
|
862
|
-
/**
|
|
863
|
-
* Aborts an in-progress merge operation.
|
|
864
|
-
*
|
|
865
|
-
* @description
|
|
866
|
-
* Cancels the current merge and restores the repository to its state
|
|
867
|
-
* before the merge began. Any conflict resolutions or staged changes
|
|
868
|
-
* from the merge will be discarded.
|
|
869
|
-
*
|
|
870
|
-
* This is equivalent to `git merge --abort`.
|
|
871
|
-
*
|
|
872
|
-
* @param storage - The storage interface
|
|
873
|
-
*
|
|
874
|
-
* @returns A promise resolving to the operation result
|
|
875
|
-
*
|
|
876
|
-
* @throws {Error} When no merge is in progress
|
|
877
|
-
*
|
|
878
|
-
* @example
|
|
879
|
-
* ```typescript
|
|
880
|
-
* // User decides to cancel the merge
|
|
881
|
-
* const result = await abortMerge(storage)
|
|
882
|
-
*
|
|
883
|
-
* if (result.success) {
|
|
884
|
-
* console.log('Merge aborted, HEAD restored to', result.headSha)
|
|
885
|
-
* } else {
|
|
886
|
-
* console.error('Failed to abort:', result.error)
|
|
887
|
-
* }
|
|
888
|
-
* ```
|
|
889
|
-
*/
|
|
890
|
-
export async function abortMerge(storage) {
|
|
891
|
-
// Get current merge state
|
|
892
|
-
const mergeState = await storage.readMergeState();
|
|
893
|
-
if (!mergeState) {
|
|
894
|
-
return {
|
|
895
|
-
success: false,
|
|
896
|
-
error: 'No merge in progress'
|
|
897
|
-
};
|
|
898
|
-
}
|
|
899
|
-
// Restore HEAD to original
|
|
900
|
-
const origHead = mergeState.origHead;
|
|
901
|
-
await storage.writeRef('HEAD', origHead);
|
|
902
|
-
// Clear merge state
|
|
903
|
-
await storage.deleteMergeState();
|
|
904
|
-
return {
|
|
905
|
-
success: true,
|
|
906
|
-
headSha: origHead,
|
|
907
|
-
message: 'Merge aborted'
|
|
908
|
-
};
|
|
909
|
-
}
|
|
910
|
-
/**
|
|
911
|
-
* Continues a merge after all conflicts have been resolved.
|
|
912
|
-
*
|
|
913
|
-
* @description
|
|
914
|
-
* After resolving all conflicts using {@link resolveConflict}, call this
|
|
915
|
-
* function to create the merge commit and complete the merge operation.
|
|
916
|
-
* The merge state will be cleaned up automatically.
|
|
917
|
-
*
|
|
918
|
-
* This is equivalent to `git merge --continue` or `git commit` after
|
|
919
|
-
* resolving conflicts.
|
|
920
|
-
*
|
|
921
|
-
* @param storage - The storage interface
|
|
922
|
-
* @param message - Optional commit message (overrides the stored message)
|
|
923
|
-
*
|
|
924
|
-
* @returns A promise resolving to the operation result with the new commit SHA
|
|
925
|
-
*
|
|
926
|
-
* @throws {Error} When no merge is in progress
|
|
927
|
-
* @throws {Error} When unresolved conflicts remain
|
|
928
|
-
*
|
|
929
|
-
* @example
|
|
930
|
-
* ```typescript
|
|
931
|
-
* // After resolving all conflicts
|
|
932
|
-
* const result = await continueMerge(storage)
|
|
933
|
-
*
|
|
934
|
-
* if (result.success) {
|
|
935
|
-
* console.log('Merge completed:', result.headSha)
|
|
936
|
-
* } else {
|
|
937
|
-
* console.error('Cannot continue:', result.error)
|
|
938
|
-
* }
|
|
939
|
-
* ```
|
|
940
|
-
*
|
|
941
|
-
* @example
|
|
942
|
-
* ```typescript
|
|
943
|
-
* // Continue with a custom commit message
|
|
944
|
-
* const result = await continueMerge(storage, 'Merge feature-x with conflict resolution')
|
|
945
|
-
* ```
|
|
946
|
-
*/
|
|
947
|
-
export async function continueMerge(storage, message) {
|
|
948
|
-
// Get current merge state
|
|
949
|
-
const mergeState = await storage.readMergeState();
|
|
950
|
-
if (!mergeState) {
|
|
951
|
-
return {
|
|
952
|
-
success: false,
|
|
953
|
-
error: 'No merge in progress'
|
|
954
|
-
};
|
|
955
|
-
}
|
|
956
|
-
// Check for unresolved conflicts
|
|
957
|
-
if (mergeState.unresolvedConflicts.length > 0) {
|
|
958
|
-
return {
|
|
959
|
-
success: false,
|
|
960
|
-
error: `Cannot continue: ${mergeState.unresolvedConflicts.length} unresolved conflict(s) remain`
|
|
961
|
-
};
|
|
962
|
-
}
|
|
963
|
-
// Use provided message or stored message
|
|
964
|
-
const commitMessage = message ?? mergeState.message;
|
|
965
|
-
// Create merge commit (simplified - in a real implementation, we'd build the tree from index)
|
|
966
|
-
// For now, we'll create a placeholder commit SHA
|
|
967
|
-
const timestamp = Date.now();
|
|
968
|
-
const commitSha = makeSha(`mergecommit${timestamp}`);
|
|
969
|
-
// Update HEAD
|
|
970
|
-
await storage.writeRef('HEAD', commitSha);
|
|
971
|
-
// Clear merge state
|
|
972
|
-
await storage.deleteMergeState();
|
|
973
|
-
return {
|
|
974
|
-
success: true,
|
|
975
|
-
headSha: commitSha,
|
|
976
|
-
message: commitMessage
|
|
977
|
-
};
|
|
978
|
-
}
|
|
979
|
-
/**
|
|
980
|
-
* Creates a SHA-like string from a prefix.
|
|
981
|
-
*
|
|
982
|
-
* @param prefix - String to use as the basis for the SHA
|
|
983
|
-
* @returns 40-character string
|
|
984
|
-
*
|
|
985
|
-
* @internal
|
|
986
|
-
*/
|
|
987
|
-
function makeSha(prefix) {
|
|
988
|
-
return prefix.padEnd(40, '0');
|
|
989
|
-
}
|
|
990
|
-
/**
|
|
991
|
-
* Finds the best common ancestor (merge base) of two commits.
|
|
992
|
-
*
|
|
993
|
-
* @description
|
|
994
|
-
* Implements the merge base algorithm by finding the most recent commit
|
|
995
|
-
* that is an ancestor of both input commits. This is the commit from
|
|
996
|
-
* which both branches diverged.
|
|
997
|
-
*
|
|
998
|
-
* Uses a breadth-first search from both commits to find their
|
|
999
|
-
* intersection in the commit graph.
|
|
1000
|
-
*
|
|
1001
|
-
* @param storage - The storage interface for reading commit objects
|
|
1002
|
-
* @param commit1 - SHA of the first commit
|
|
1003
|
-
* @param commit2 - SHA of the second commit
|
|
1004
|
-
*
|
|
1005
|
-
* @returns A promise resolving to the merge base SHA, or null if no common ancestor exists
|
|
1006
|
-
*
|
|
1007
|
-
* @example
|
|
1008
|
-
* ```typescript
|
|
1009
|
-
* const base = await findMergeBase(storage, 'feature-sha', 'main-sha')
|
|
1010
|
-
* if (base) {
|
|
1011
|
-
* console.log('Common ancestor:', base)
|
|
1012
|
-
* } else {
|
|
1013
|
-
* console.log('No common history')
|
|
1014
|
-
* }
|
|
1015
|
-
* ```
|
|
1016
|
-
*/
|
|
1017
|
-
export async function findMergeBase(storage, commit1, commit2) {
|
|
1018
|
-
// Get all ancestors of commit1 (including itself)
|
|
1019
|
-
const ancestors1 = new Set();
|
|
1020
|
-
const queue1 = [commit1];
|
|
1021
|
-
while (queue1.length > 0) {
|
|
1022
|
-
const sha = queue1.shift();
|
|
1023
|
-
if (ancestors1.has(sha))
|
|
1024
|
-
continue;
|
|
1025
|
-
const obj = await storage.readObject(sha);
|
|
1026
|
-
if (!obj || obj.type !== 'commit')
|
|
1027
|
-
continue;
|
|
1028
|
-
ancestors1.add(sha);
|
|
1029
|
-
// Parse commit to get parents (use extended parents if available)
|
|
1030
|
-
const parents = parseCommitParents(obj.data, obj.parents);
|
|
1031
|
-
for (const parent of parents) {
|
|
1032
|
-
if (!ancestors1.has(parent)) {
|
|
1033
|
-
queue1.push(parent);
|
|
1034
|
-
}
|
|
1035
|
-
}
|
|
1036
|
-
}
|
|
1037
|
-
// BFS from commit2 to find first common ancestor
|
|
1038
|
-
const visited2 = new Set();
|
|
1039
|
-
const queue2 = [commit2];
|
|
1040
|
-
while (queue2.length > 0) {
|
|
1041
|
-
const sha = queue2.shift();
|
|
1042
|
-
if (visited2.has(sha))
|
|
1043
|
-
continue;
|
|
1044
|
-
visited2.add(sha);
|
|
1045
|
-
// Check if this is a common ancestor
|
|
1046
|
-
if (ancestors1.has(sha)) {
|
|
1047
|
-
return sha;
|
|
1048
|
-
}
|
|
1049
|
-
const obj = await storage.readObject(sha);
|
|
1050
|
-
if (!obj || obj.type !== 'commit')
|
|
1051
|
-
continue;
|
|
1052
|
-
// Parse commit to get parents (use extended parents if available)
|
|
1053
|
-
const parents = parseCommitParents(obj.data, obj.parents);
|
|
1054
|
-
for (const parent of parents) {
|
|
1055
|
-
if (!visited2.has(parent)) {
|
|
1056
|
-
queue2.push(parent);
|
|
1057
|
-
}
|
|
1058
|
-
}
|
|
1059
|
-
}
|
|
1060
|
-
return null;
|
|
1061
|
-
}
|
|
1062
|
-
/**
|
|
1063
|
-
* Parses parent commit SHAs from raw commit data.
|
|
1064
|
-
*
|
|
1065
|
-
* @param data - Raw commit object content
|
|
1066
|
-
* @param extendedParents - Pre-parsed parents if available
|
|
1067
|
-
* @returns Array of parent commit SHAs
|
|
1068
|
-
*
|
|
1069
|
-
* @internal
|
|
1070
|
-
*/
|
|
1071
|
-
function parseCommitParents(data, extendedParents) {
|
|
1072
|
-
// If extended parents are provided, use them directly
|
|
1073
|
-
if (extendedParents) {
|
|
1074
|
-
return extendedParents;
|
|
1075
|
-
}
|
|
1076
|
-
const text = decoder.decode(data);
|
|
1077
|
-
const parents = [];
|
|
1078
|
-
for (const line of text.split('\n')) {
|
|
1079
|
-
if (line.startsWith('parent ')) {
|
|
1080
|
-
parents.push(line.slice(7).trim());
|
|
1081
|
-
}
|
|
1082
|
-
else if (line === '') {
|
|
1083
|
-
// End of header
|
|
1084
|
-
break;
|
|
1085
|
-
}
|
|
1086
|
-
}
|
|
1087
|
-
return parents;
|
|
1088
|
-
}
|
|
1089
|
-
/**
|
|
1090
|
-
* Parses the tree SHA from raw commit data.
|
|
1091
|
-
*
|
|
1092
|
-
* @param data - Raw commit object content
|
|
1093
|
-
* @param treeSha - Pre-parsed tree SHA if available
|
|
1094
|
-
* @returns Tree SHA or null if not found
|
|
1095
|
-
*
|
|
1096
|
-
* @internal
|
|
1097
|
-
*/
|
|
1098
|
-
function parseCommitTree(data, treeSha) {
|
|
1099
|
-
// If extended tree SHA is provided, use it directly
|
|
1100
|
-
if (treeSha) {
|
|
1101
|
-
return treeSha;
|
|
1102
|
-
}
|
|
1103
|
-
const text = decoder.decode(data);
|
|
1104
|
-
for (const line of text.split('\n')) {
|
|
1105
|
-
if (line.startsWith('tree ')) {
|
|
1106
|
-
return line.slice(5).trim();
|
|
1107
|
-
}
|
|
1108
|
-
}
|
|
1109
|
-
return null;
|
|
1110
|
-
}
|
|
1111
|
-
// Text encoding helpers
|
|
1112
|
-
const encoder = new TextEncoder();
|
|
1113
|
-
const decoder = new TextDecoder();
|
|
1114
|
-
/**
|
|
1115
|
-
* Splits content into lines while preserving line endings.
|
|
1116
|
-
*
|
|
1117
|
-
* @param content - Binary content to split
|
|
1118
|
-
* @returns Array of lines (without line ending characters)
|
|
1119
|
-
*
|
|
1120
|
-
* @internal
|
|
1121
|
-
*/
|
|
1122
|
-
function splitLines(content) {
|
|
1123
|
-
const text = decoder.decode(content);
|
|
1124
|
-
if (text.length === 0) {
|
|
1125
|
-
return [];
|
|
1126
|
-
}
|
|
1127
|
-
// Split by newline but keep track of the content
|
|
1128
|
-
// Handle both \n and \r\n line endings
|
|
1129
|
-
return text.split(/\r?\n/);
|
|
1130
|
-
}
|
|
1131
|
-
/**
|
|
1132
|
-
* Computes the longest common subsequence of two arrays.
|
|
1133
|
-
*
|
|
1134
|
-
* @description
|
|
1135
|
-
* Uses dynamic programming to find the longest subsequence common
|
|
1136
|
-
* to both arrays. Used as a building block for the diff algorithm.
|
|
1137
|
-
*
|
|
1138
|
-
* @param a - First array
|
|
1139
|
-
* @param b - Second array
|
|
1140
|
-
* @param equals - Function to compare elements for equality
|
|
1141
|
-
* @returns Array containing the longest common subsequence
|
|
1142
|
-
*
|
|
1143
|
-
* @internal
|
|
1144
|
-
*/
|
|
1145
|
-
function lcs(a, b, equals) {
|
|
1146
|
-
const m = a.length;
|
|
1147
|
-
const n = b.length;
|
|
1148
|
-
// Create DP table
|
|
1149
|
-
const dp = Array(m + 1).fill(null).map(() => Array(n + 1).fill(0));
|
|
1150
|
-
for (let i = 1; i <= m; i++) {
|
|
1151
|
-
for (let j = 1; j <= n; j++) {
|
|
1152
|
-
if (equals(a[i - 1], b[j - 1])) {
|
|
1153
|
-
dp[i][j] = dp[i - 1][j - 1] + 1;
|
|
1154
|
-
}
|
|
1155
|
-
else {
|
|
1156
|
-
dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]);
|
|
1157
|
-
}
|
|
1158
|
-
}
|
|
1159
|
-
}
|
|
1160
|
-
// Backtrack to find LCS
|
|
1161
|
-
const result = [];
|
|
1162
|
-
let i = m;
|
|
1163
|
-
let j = n;
|
|
1164
|
-
while (i > 0 && j > 0) {
|
|
1165
|
-
if (equals(a[i - 1], b[j - 1])) {
|
|
1166
|
-
result.unshift(a[i - 1]);
|
|
1167
|
-
i--;
|
|
1168
|
-
j--;
|
|
1169
|
-
}
|
|
1170
|
-
else if (dp[i - 1][j] > dp[i][j - 1]) {
|
|
1171
|
-
i--;
|
|
1172
|
-
}
|
|
1173
|
-
else {
|
|
1174
|
-
j--;
|
|
1175
|
-
}
|
|
1176
|
-
}
|
|
1177
|
-
return result;
|
|
1178
|
-
}
|
|
1179
|
-
/**
|
|
1180
|
-
* Computes diff hunks between base and target line arrays.
|
|
1181
|
-
*
|
|
1182
|
-
* @param base - Original lines
|
|
1183
|
-
* @param target - Modified lines
|
|
1184
|
-
* @returns Array of hunks describing the differences
|
|
1185
|
-
*
|
|
1186
|
-
* @internal
|
|
1187
|
-
*/
|
|
1188
|
-
function computeHunks(base, target) {
|
|
1189
|
-
const hunks = [];
|
|
1190
|
-
const common = lcs(base, target, (a, b) => a === b);
|
|
1191
|
-
let baseIdx = 0;
|
|
1192
|
-
let targetIdx = 0;
|
|
1193
|
-
let commonIdx = 0;
|
|
1194
|
-
while (baseIdx < base.length || targetIdx < target.length || commonIdx < common.length) {
|
|
1195
|
-
// Find next common line (or end)
|
|
1196
|
-
const nextCommon = commonIdx < common.length ? common[commonIdx] : null;
|
|
1197
|
-
// Count lines in base until we hit the next common line
|
|
1198
|
-
let baseCount = 0;
|
|
1199
|
-
const baseStart = baseIdx;
|
|
1200
|
-
while (baseIdx < base.length && base[baseIdx] !== nextCommon) {
|
|
1201
|
-
baseCount++;
|
|
1202
|
-
baseIdx++;
|
|
1203
|
-
}
|
|
1204
|
-
// Collect lines in target until we hit the next common line
|
|
1205
|
-
const newLines = [];
|
|
1206
|
-
while (targetIdx < target.length && target[targetIdx] !== nextCommon) {
|
|
1207
|
-
newLines.push(target[targetIdx]);
|
|
1208
|
-
targetIdx++;
|
|
1209
|
-
}
|
|
1210
|
-
// If there was any change, record a hunk
|
|
1211
|
-
if (baseCount > 0 || newLines.length > 0) {
|
|
1212
|
-
hunks.push({ baseStart, baseCount, newLines });
|
|
1213
|
-
}
|
|
1214
|
-
// Consume the common line
|
|
1215
|
-
if (nextCommon !== null && baseIdx < base.length && targetIdx < target.length) {
|
|
1216
|
-
baseIdx++;
|
|
1217
|
-
targetIdx++;
|
|
1218
|
-
commonIdx++;
|
|
1219
|
-
}
|
|
1220
|
-
else {
|
|
1221
|
-
break;
|
|
1222
|
-
}
|
|
1223
|
-
}
|
|
1224
|
-
return hunks;
|
|
1225
|
-
}
|
|
1226
|
-
/**
|
|
1227
|
-
* Checks if two hunks overlap in their base ranges.
|
|
1228
|
-
*
|
|
1229
|
-
* @param h1 - First hunk
|
|
1230
|
-
* @param h2 - Second hunk
|
|
1231
|
-
* @returns true if the hunks overlap
|
|
1232
|
-
*
|
|
1233
|
-
* @internal
|
|
1234
|
-
*/
|
|
1235
|
-
function hunksOverlap(h1, h2) {
|
|
1236
|
-
// Hunks overlap if their base ranges intersect
|
|
1237
|
-
const end1 = h1.baseStart + h1.baseCount;
|
|
1238
|
-
const end2 = h2.baseStart + h2.baseCount;
|
|
1239
|
-
return !(end1 <= h2.baseStart || end2 <= h1.baseStart);
|
|
1240
|
-
}
|
|
1241
|
-
/**
|
|
1242
|
-
* Checks if two hunks represent the same change.
|
|
1243
|
-
*
|
|
1244
|
-
* @param h1 - First hunk
|
|
1245
|
-
* @param h2 - Second hunk
|
|
1246
|
-
* @returns true if the hunks are identical
|
|
1247
|
-
*
|
|
1248
|
-
* @internal
|
|
1249
|
-
*/
|
|
1250
|
-
function hunksSameChange(h1, h2) {
|
|
1251
|
-
if (h1.baseStart !== h2.baseStart || h1.baseCount !== h2.baseCount) {
|
|
1252
|
-
return false;
|
|
1253
|
-
}
|
|
1254
|
-
if (h1.newLines.length !== h2.newLines.length) {
|
|
1255
|
-
return false;
|
|
1256
|
-
}
|
|
1257
|
-
for (let i = 0; i < h1.newLines.length; i++) {
|
|
1258
|
-
if (h1.newLines[i] !== h2.newLines[i]) {
|
|
1259
|
-
return false;
|
|
1260
|
-
}
|
|
1261
|
-
}
|
|
1262
|
-
return true;
|
|
1263
|
-
}
|
|
1264
|
-
/**
|
|
1265
|
-
* Performs a content-level three-way merge on text files.
|
|
1266
|
-
*
|
|
1267
|
-
* @description
|
|
1268
|
-
* Takes three versions of a file (base, ours, theirs) and attempts to
|
|
1269
|
-
* automatically merge them. Non-conflicting changes are combined
|
|
1270
|
-
* automatically. Conflicting changes are marked with standard Git
|
|
1271
|
-
* conflict markers.
|
|
1272
|
-
*
|
|
1273
|
-
* The algorithm:
|
|
1274
|
-
* 1. Compute the diff hunks from base to ours
|
|
1275
|
-
* 2. Compute the diff hunks from base to theirs
|
|
1276
|
-
* 3. Process hunks in order, detecting overlaps
|
|
1277
|
-
* 4. Non-overlapping hunks are applied automatically
|
|
1278
|
-
* 5. Overlapping hunks with identical changes are deduplicated
|
|
1279
|
-
* 6. Overlapping hunks with different changes create conflict markers
|
|
1280
|
-
*
|
|
1281
|
-
* @param base - Content of the base (common ancestor) version
|
|
1282
|
-
* @param ours - Content of our (current branch) version
|
|
1283
|
-
* @param theirs - Content of their (merged branch) version
|
|
1284
|
-
*
|
|
1285
|
-
* @returns Object containing merged content, conflict flag, and marker locations
|
|
1286
|
-
*
|
|
1287
|
-
* @example
|
|
1288
|
-
* ```typescript
|
|
1289
|
-
* const result = mergeContent(baseContent, oursContent, theirsContent)
|
|
1290
|
-
*
|
|
1291
|
-
* if (result.hasConflicts) {
|
|
1292
|
-
* console.log('Content has conflicts at:', result.markers)
|
|
1293
|
-
* // Write file with conflict markers for manual resolution
|
|
1294
|
-
* await writeFile(path, result.merged)
|
|
1295
|
-
* } else {
|
|
1296
|
-
* console.log('Content merged cleanly')
|
|
1297
|
-
* await writeFile(path, result.merged)
|
|
1298
|
-
* }
|
|
1299
|
-
* ```
|
|
1300
|
-
*/
|
|
1301
|
-
export function mergeContent(base, ours, theirs) {
|
|
1302
|
-
const baseLines = splitLines(base);
|
|
1303
|
-
const oursLines = splitLines(ours);
|
|
1304
|
-
const theirsLines = splitLines(theirs);
|
|
1305
|
-
// Handle empty files
|
|
1306
|
-
if (baseLines.length === 0 && oursLines.length === 0 && theirsLines.length === 0) {
|
|
1307
|
-
return { merged: new Uint8Array(0), hasConflicts: false, markers: [] };
|
|
1308
|
-
}
|
|
1309
|
-
// If ours and theirs are identical, no conflict
|
|
1310
|
-
const oursText = oursLines.join('\n');
|
|
1311
|
-
const theirsText = theirsLines.join('\n');
|
|
1312
|
-
const baseText = baseLines.join('\n');
|
|
1313
|
-
if (oursText === theirsText) {
|
|
1314
|
-
return {
|
|
1315
|
-
merged: encoder.encode(oursText),
|
|
1316
|
-
hasConflicts: false,
|
|
1317
|
-
markers: []
|
|
1318
|
-
};
|
|
1319
|
-
}
|
|
1320
|
-
// If only one side changed from base, take that side
|
|
1321
|
-
if (oursText === baseText) {
|
|
1322
|
-
return {
|
|
1323
|
-
merged: encoder.encode(theirsText),
|
|
1324
|
-
hasConflicts: false,
|
|
1325
|
-
markers: []
|
|
1326
|
-
};
|
|
1327
|
-
}
|
|
1328
|
-
if (theirsText === baseText) {
|
|
1329
|
-
return {
|
|
1330
|
-
merged: encoder.encode(oursText),
|
|
1331
|
-
hasConflicts: false,
|
|
1332
|
-
markers: []
|
|
1333
|
-
};
|
|
1334
|
-
}
|
|
1335
|
-
// Compute hunks for each side
|
|
1336
|
-
const oursHunks = computeHunks(baseLines, oursLines);
|
|
1337
|
-
const theirsHunks = computeHunks(baseLines, theirsLines);
|
|
1338
|
-
// Build merged result
|
|
1339
|
-
const mergedLines = [];
|
|
1340
|
-
const markers = [];
|
|
1341
|
-
let hasConflicts = false;
|
|
1342
|
-
let basePos = 0;
|
|
1343
|
-
let outputLine = 1;
|
|
1344
|
-
// Process hunks
|
|
1345
|
-
let oursIdx = 0;
|
|
1346
|
-
let theirsIdx = 0;
|
|
1347
|
-
while (basePos < baseLines.length || oursIdx < oursHunks.length || theirsIdx < theirsHunks.length) {
|
|
1348
|
-
const oursHunk = oursIdx < oursHunks.length ? oursHunks[oursIdx] : null;
|
|
1349
|
-
const theirsHunk = theirsIdx < theirsHunks.length ? theirsHunks[theirsIdx] : null;
|
|
1350
|
-
// Find the next position to process
|
|
1351
|
-
const oursStart = oursHunk?.baseStart ?? Infinity;
|
|
1352
|
-
const theirsStart = theirsHunk?.baseStart ?? Infinity;
|
|
1353
|
-
const nextHunkStart = Math.min(oursStart, theirsStart);
|
|
1354
|
-
// Copy unchanged lines from base up to the next hunk
|
|
1355
|
-
while (basePos < baseLines.length && basePos < nextHunkStart) {
|
|
1356
|
-
mergedLines.push(baseLines[basePos]);
|
|
1357
|
-
outputLine++;
|
|
1358
|
-
basePos++;
|
|
1359
|
-
}
|
|
1360
|
-
if (oursHunk === null && theirsHunk === null) {
|
|
1361
|
-
break;
|
|
1362
|
-
}
|
|
1363
|
-
// Check if hunks overlap
|
|
1364
|
-
if (oursHunk !== null && theirsHunk !== null &&
|
|
1365
|
-
(oursHunk.baseStart === theirsHunk.baseStart ||
|
|
1366
|
-
hunksOverlap(oursHunk, theirsHunk))) {
|
|
1367
|
-
// Potential conflict - check if changes are identical
|
|
1368
|
-
if (hunksSameChange(oursHunk, theirsHunk)) {
|
|
1369
|
-
// Same change on both sides - no conflict
|
|
1370
|
-
for (const line of oursHunk.newLines) {
|
|
1371
|
-
mergedLines.push(line);
|
|
1372
|
-
outputLine++;
|
|
1373
|
-
}
|
|
1374
|
-
basePos = oursHunk.baseStart + oursHunk.baseCount;
|
|
1375
|
-
oursIdx++;
|
|
1376
|
-
theirsIdx++;
|
|
1377
|
-
}
|
|
1378
|
-
else {
|
|
1379
|
-
// Conflict!
|
|
1380
|
-
hasConflicts = true;
|
|
1381
|
-
const startLine = outputLine;
|
|
1382
|
-
// Determine the affected base range
|
|
1383
|
-
const conflictBaseStart = Math.min(oursHunk.baseStart, theirsHunk.baseStart);
|
|
1384
|
-
const conflictBaseEnd = Math.max(oursHunk.baseStart + oursHunk.baseCount, theirsHunk.baseStart + theirsHunk.baseCount);
|
|
1385
|
-
const baseContent = baseLines.slice(conflictBaseStart, conflictBaseEnd);
|
|
1386
|
-
mergedLines.push('<<<<<<< ours');
|
|
1387
|
-
outputLine++;
|
|
1388
|
-
for (const line of oursHunk.newLines) {
|
|
1389
|
-
mergedLines.push(line);
|
|
1390
|
-
outputLine++;
|
|
1391
|
-
}
|
|
1392
|
-
mergedLines.push('=======');
|
|
1393
|
-
outputLine++;
|
|
1394
|
-
for (const line of theirsHunk.newLines) {
|
|
1395
|
-
mergedLines.push(line);
|
|
1396
|
-
outputLine++;
|
|
1397
|
-
}
|
|
1398
|
-
mergedLines.push('>>>>>>> theirs');
|
|
1399
|
-
outputLine++;
|
|
1400
|
-
markers.push({
|
|
1401
|
-
startLine,
|
|
1402
|
-
endLine: outputLine - 1,
|
|
1403
|
-
baseContent: baseContent.join('\n'),
|
|
1404
|
-
oursContent: oursHunk.newLines.join('\n'),
|
|
1405
|
-
theirsContent: theirsHunk.newLines.join('\n')
|
|
1406
|
-
});
|
|
1407
|
-
basePos = conflictBaseEnd;
|
|
1408
|
-
oursIdx++;
|
|
1409
|
-
theirsIdx++;
|
|
1410
|
-
}
|
|
1411
|
-
}
|
|
1412
|
-
else if (oursHunk !== null && (theirsHunk === null || oursHunk.baseStart < theirsHunk.baseStart)) {
|
|
1413
|
-
// Apply ours hunk
|
|
1414
|
-
for (const line of oursHunk.newLines) {
|
|
1415
|
-
mergedLines.push(line);
|
|
1416
|
-
outputLine++;
|
|
1417
|
-
}
|
|
1418
|
-
basePos = oursHunk.baseStart + oursHunk.baseCount;
|
|
1419
|
-
oursIdx++;
|
|
1420
|
-
}
|
|
1421
|
-
else if (theirsHunk !== null) {
|
|
1422
|
-
// Apply theirs hunk
|
|
1423
|
-
for (const line of theirsHunk.newLines) {
|
|
1424
|
-
mergedLines.push(line);
|
|
1425
|
-
outputLine++;
|
|
1426
|
-
}
|
|
1427
|
-
basePos = theirsHunk.baseStart + theirsHunk.baseCount;
|
|
1428
|
-
theirsIdx++;
|
|
1429
|
-
}
|
|
1430
|
-
}
|
|
1431
|
-
// Copy any remaining base lines
|
|
1432
|
-
while (basePos < baseLines.length) {
|
|
1433
|
-
mergedLines.push(baseLines[basePos]);
|
|
1434
|
-
outputLine++;
|
|
1435
|
-
basePos++;
|
|
1436
|
-
}
|
|
1437
|
-
const mergedContent = mergedLines.join('\n');
|
|
1438
|
-
return {
|
|
1439
|
-
merged: encoder.encode(mergedContent),
|
|
1440
|
-
hasConflicts,
|
|
1441
|
-
markers
|
|
1442
|
-
};
|
|
1443
|
-
}
|
|
1444
|
-
/**
|
|
1445
|
-
* Determines if a file is binary (non-text) based on its content.
|
|
1446
|
-
*
|
|
1447
|
-
* @description
|
|
1448
|
-
* Uses Git's heuristic: a file is considered binary if it contains
|
|
1449
|
-
* null bytes (0x00) within the first 8000 bytes, or if it has
|
|
1450
|
-
* specific binary file magic numbers (PNG, JPEG, GIF).
|
|
1451
|
-
*
|
|
1452
|
-
* Binary files cannot be automatically merged and always result
|
|
1453
|
-
* in conflicts when both sides modify them.
|
|
1454
|
-
*
|
|
1455
|
-
* @param content - File content to analyze
|
|
1456
|
-
*
|
|
1457
|
-
* @returns true if the file appears to be binary, false for text files
|
|
1458
|
-
*
|
|
1459
|
-
* @example
|
|
1460
|
-
* ```typescript
|
|
1461
|
-
* const content = await readFile('image.png')
|
|
1462
|
-
* if (isBinaryFile(content)) {
|
|
1463
|
-
* console.log('Cannot perform text merge on binary file')
|
|
1464
|
-
* }
|
|
1465
|
-
* ```
|
|
1466
|
-
*/
|
|
1467
|
-
export function isBinaryFile(content) {
|
|
1468
|
-
// Empty files are considered text
|
|
1469
|
-
if (content.length === 0) {
|
|
1470
|
-
return false;
|
|
1471
|
-
}
|
|
1472
|
-
// Check for common binary file headers
|
|
1473
|
-
// PNG: 0x89 0x50 0x4E 0x47
|
|
1474
|
-
if (content.length >= 4 &&
|
|
1475
|
-
content[0] === 0x89 && content[1] === 0x50 &&
|
|
1476
|
-
content[2] === 0x4E && content[3] === 0x47) {
|
|
1477
|
-
return true;
|
|
1478
|
-
}
|
|
1479
|
-
// JPEG: 0xFF 0xD8 0xFF
|
|
1480
|
-
if (content.length >= 3 &&
|
|
1481
|
-
content[0] === 0xFF && content[1] === 0xD8 && content[2] === 0xFF) {
|
|
1482
|
-
return true;
|
|
1483
|
-
}
|
|
1484
|
-
// GIF: "GIF87a" or "GIF89a"
|
|
1485
|
-
if (content.length >= 6 &&
|
|
1486
|
-
content[0] === 0x47 && content[1] === 0x49 && content[2] === 0x46 &&
|
|
1487
|
-
content[3] === 0x38 && (content[4] === 0x37 || content[4] === 0x39) &&
|
|
1488
|
-
content[5] === 0x61) {
|
|
1489
|
-
return true;
|
|
1490
|
-
}
|
|
1491
|
-
// Check first 8000 bytes for null bytes (similar to Git's heuristic)
|
|
1492
|
-
const checkLength = Math.min(content.length, 8000);
|
|
1493
|
-
for (let i = 0; i < checkLength; i++) {
|
|
1494
|
-
if (content[i] === 0x00) {
|
|
1495
|
-
return true;
|
|
1496
|
-
}
|
|
1497
|
-
}
|
|
1498
|
-
return false;
|
|
1499
|
-
}
|
|
1500
|
-
/**
|
|
1501
|
-
* Gets the current merge state if a merge is in progress.
|
|
1502
|
-
*
|
|
1503
|
-
* @description
|
|
1504
|
-
* Returns the persisted merge state, which includes information about
|
|
1505
|
-
* the merge in progress, any unresolved conflicts, and the original
|
|
1506
|
-
* merge options.
|
|
1507
|
-
*
|
|
1508
|
-
* @param storage - The storage interface
|
|
1509
|
-
*
|
|
1510
|
-
* @returns A promise resolving to the merge state, or null if no merge is in progress
|
|
1511
|
-
*
|
|
1512
|
-
* @example
|
|
1513
|
-
* ```typescript
|
|
1514
|
-
* const state = await getMergeState(storage)
|
|
1515
|
-
* if (state) {
|
|
1516
|
-
* console.log('Merging', state.mergeHead, 'into', state.origHead)
|
|
1517
|
-
* console.log('Unresolved conflicts:', state.unresolvedConflicts.length)
|
|
1518
|
-
* } else {
|
|
1519
|
-
* console.log('No merge in progress')
|
|
1520
|
-
* }
|
|
1521
|
-
* ```
|
|
1522
|
-
*/
|
|
1523
|
-
export async function getMergeState(storage) {
|
|
1524
|
-
return storage.readMergeState();
|
|
1525
|
-
}
|
|
1526
|
-
/**
|
|
1527
|
-
* Checks if a merge is currently in progress.
|
|
1528
|
-
*
|
|
1529
|
-
* @description
|
|
1530
|
-
* Quick check to determine if there's an active merge that hasn't
|
|
1531
|
-
* been completed or aborted. Useful for UI state and command validation.
|
|
1532
|
-
*
|
|
1533
|
-
* @param storage - The storage interface
|
|
1534
|
-
*
|
|
1535
|
-
* @returns A promise resolving to true if a merge is in progress
|
|
1536
|
-
*
|
|
1537
|
-
* @example
|
|
1538
|
-
* ```typescript
|
|
1539
|
-
* if (await isMergeInProgress(storage)) {
|
|
1540
|
-
* console.log('Please complete or abort the current merge first')
|
|
1541
|
-
* } else {
|
|
1542
|
-
* // Safe to start a new merge
|
|
1543
|
-
* await merge(storage, oursSha, theirsSha, options)
|
|
1544
|
-
* }
|
|
1545
|
-
* ```
|
|
1546
|
-
*/
|
|
1547
|
-
export async function isMergeInProgress(storage) {
|
|
1548
|
-
const state = await storage.readMergeState();
|
|
1549
|
-
return state !== null;
|
|
1550
|
-
}
|
|
1551
|
-
//# sourceMappingURL=merge.js.map
|