gitx.do 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -353
- package/dist/do/logger.d.ts +50 -0
- package/dist/do/logger.d.ts.map +1 -0
- package/dist/do/logger.js +122 -0
- package/dist/do/logger.js.map +1 -0
- package/dist/{durable-object → do}/schema.d.ts +3 -3
- package/dist/do/schema.d.ts.map +1 -0
- package/dist/{durable-object → do}/schema.js +4 -3
- package/dist/do/schema.js.map +1 -0
- package/dist/do/types.d.ts +267 -0
- package/dist/do/types.d.ts.map +1 -0
- package/dist/do/types.js +62 -0
- package/dist/do/types.js.map +1 -0
- package/dist/index.d.ts +15 -469
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +39 -481
- package/dist/index.js.map +1 -1
- package/dist/mcp/auth.d.ts +77 -0
- package/dist/mcp/auth.d.ts.map +1 -0
- package/dist/mcp/auth.js +278 -0
- package/dist/mcp/auth.js.map +1 -0
- package/dist/mcp/index.d.ts +13 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/index.js +19 -0
- package/dist/mcp/index.js.map +1 -0
- package/dist/mcp/server.d.ts +200 -0
- package/dist/mcp/server.d.ts.map +1 -0
- package/dist/mcp/server.js +275 -0
- package/dist/mcp/server.js.map +1 -0
- package/dist/mcp/tool-registry.d.ts +47 -0
- package/dist/mcp/tool-registry.d.ts.map +1 -0
- package/dist/mcp/tool-registry.js +284 -0
- package/dist/mcp/tool-registry.js.map +1 -0
- package/dist/mcp/tools.d.ts +103 -515
- package/dist/mcp/tools.d.ts.map +1 -1
- package/dist/mcp/tools.js +676 -3087
- package/dist/mcp/tools.js.map +1 -1
- package/dist/mcp/types.d.ts +124 -0
- package/dist/mcp/types.d.ts.map +1 -0
- package/dist/mcp/types.js +9 -0
- package/dist/mcp/types.js.map +1 -0
- package/package.json +19 -21
- package/dist/cli/commands/add.d.ts +0 -176
- package/dist/cli/commands/add.d.ts.map +0 -1
- package/dist/cli/commands/add.js +0 -979
- package/dist/cli/commands/add.js.map +0 -1
- package/dist/cli/commands/blame.d.ts +0 -259
- package/dist/cli/commands/blame.d.ts.map +0 -1
- package/dist/cli/commands/blame.js +0 -609
- package/dist/cli/commands/blame.js.map +0 -1
- package/dist/cli/commands/branch.d.ts +0 -249
- package/dist/cli/commands/branch.d.ts.map +0 -1
- package/dist/cli/commands/branch.js +0 -693
- package/dist/cli/commands/branch.js.map +0 -1
- package/dist/cli/commands/checkout.d.ts +0 -73
- package/dist/cli/commands/checkout.d.ts.map +0 -1
- package/dist/cli/commands/checkout.js +0 -725
- package/dist/cli/commands/checkout.js.map +0 -1
- package/dist/cli/commands/commit.d.ts +0 -182
- package/dist/cli/commands/commit.d.ts.map +0 -1
- package/dist/cli/commands/commit.js +0 -457
- package/dist/cli/commands/commit.js.map +0 -1
- package/dist/cli/commands/diff.d.ts +0 -464
- package/dist/cli/commands/diff.d.ts.map +0 -1
- package/dist/cli/commands/diff.js +0 -959
- package/dist/cli/commands/diff.js.map +0 -1
- package/dist/cli/commands/log.d.ts +0 -239
- package/dist/cli/commands/log.d.ts.map +0 -1
- package/dist/cli/commands/log.js +0 -535
- package/dist/cli/commands/log.js.map +0 -1
- package/dist/cli/commands/merge.d.ts +0 -106
- package/dist/cli/commands/merge.d.ts.map +0 -1
- package/dist/cli/commands/merge.js +0 -852
- package/dist/cli/commands/merge.js.map +0 -1
- package/dist/cli/commands/review.d.ts +0 -457
- package/dist/cli/commands/review.d.ts.map +0 -1
- package/dist/cli/commands/review.js +0 -558
- package/dist/cli/commands/review.js.map +0 -1
- package/dist/cli/commands/stash.d.ts +0 -157
- package/dist/cli/commands/stash.d.ts.map +0 -1
- package/dist/cli/commands/stash.js +0 -655
- package/dist/cli/commands/stash.js.map +0 -1
- package/dist/cli/commands/status.d.ts +0 -269
- package/dist/cli/commands/status.d.ts.map +0 -1
- package/dist/cli/commands/status.js +0 -492
- package/dist/cli/commands/status.js.map +0 -1
- package/dist/cli/commands/web.d.ts +0 -199
- package/dist/cli/commands/web.d.ts.map +0 -1
- package/dist/cli/commands/web.js +0 -697
- package/dist/cli/commands/web.js.map +0 -1
- package/dist/cli/fs-adapter.d.ts +0 -656
- package/dist/cli/fs-adapter.d.ts.map +0 -1
- package/dist/cli/fs-adapter.js +0 -1177
- package/dist/cli/fs-adapter.js.map +0 -1
- package/dist/cli/fsx-cli-adapter.d.ts +0 -359
- package/dist/cli/fsx-cli-adapter.d.ts.map +0 -1
- package/dist/cli/fsx-cli-adapter.js +0 -619
- package/dist/cli/fsx-cli-adapter.js.map +0 -1
- package/dist/cli/index.d.ts +0 -387
- package/dist/cli/index.d.ts.map +0 -1
- package/dist/cli/index.js +0 -579
- package/dist/cli/index.js.map +0 -1
- package/dist/cli/ui/components/DiffView.d.ts +0 -12
- package/dist/cli/ui/components/DiffView.d.ts.map +0 -1
- package/dist/cli/ui/components/DiffView.js +0 -11
- package/dist/cli/ui/components/DiffView.js.map +0 -1
- package/dist/cli/ui/components/ErrorDisplay.d.ts +0 -10
- package/dist/cli/ui/components/ErrorDisplay.d.ts.map +0 -1
- package/dist/cli/ui/components/ErrorDisplay.js +0 -11
- package/dist/cli/ui/components/ErrorDisplay.js.map +0 -1
- package/dist/cli/ui/components/FuzzySearch.d.ts +0 -15
- package/dist/cli/ui/components/FuzzySearch.d.ts.map +0 -1
- package/dist/cli/ui/components/FuzzySearch.js +0 -12
- package/dist/cli/ui/components/FuzzySearch.js.map +0 -1
- package/dist/cli/ui/components/LoadingSpinner.d.ts +0 -10
- package/dist/cli/ui/components/LoadingSpinner.d.ts.map +0 -1
- package/dist/cli/ui/components/LoadingSpinner.js +0 -10
- package/dist/cli/ui/components/LoadingSpinner.js.map +0 -1
- package/dist/cli/ui/components/NavigationList.d.ts +0 -14
- package/dist/cli/ui/components/NavigationList.d.ts.map +0 -1
- package/dist/cli/ui/components/NavigationList.js +0 -11
- package/dist/cli/ui/components/NavigationList.js.map +0 -1
- package/dist/cli/ui/components/ScrollableContent.d.ts +0 -13
- package/dist/cli/ui/components/ScrollableContent.d.ts.map +0 -1
- package/dist/cli/ui/components/ScrollableContent.js +0 -11
- package/dist/cli/ui/components/ScrollableContent.js.map +0 -1
- package/dist/cli/ui/components/index.d.ts +0 -7
- package/dist/cli/ui/components/index.d.ts.map +0 -1
- package/dist/cli/ui/components/index.js +0 -9
- package/dist/cli/ui/components/index.js.map +0 -1
- package/dist/cli/ui/terminal-ui.d.ts +0 -85
- package/dist/cli/ui/terminal-ui.d.ts.map +0 -1
- package/dist/cli/ui/terminal-ui.js +0 -121
- package/dist/cli/ui/terminal-ui.js.map +0 -1
- package/dist/do/BashModule.d.ts +0 -871
- package/dist/do/BashModule.d.ts.map +0 -1
- package/dist/do/BashModule.js +0 -1143
- package/dist/do/BashModule.js.map +0 -1
- package/dist/do/FsModule.d.ts +0 -612
- package/dist/do/FsModule.d.ts.map +0 -1
- package/dist/do/FsModule.js +0 -1120
- package/dist/do/FsModule.js.map +0 -1
- package/dist/do/GitModule.d.ts +0 -635
- package/dist/do/GitModule.d.ts.map +0 -1
- package/dist/do/GitModule.js +0 -784
- package/dist/do/GitModule.js.map +0 -1
- package/dist/do/GitRepoDO.d.ts +0 -281
- package/dist/do/GitRepoDO.d.ts.map +0 -1
- package/dist/do/GitRepoDO.js +0 -479
- package/dist/do/GitRepoDO.js.map +0 -1
- package/dist/do/bash-ast.d.ts +0 -246
- package/dist/do/bash-ast.d.ts.map +0 -1
- package/dist/do/bash-ast.js +0 -888
- package/dist/do/bash-ast.js.map +0 -1
- package/dist/do/container-executor.d.ts +0 -491
- package/dist/do/container-executor.d.ts.map +0 -1
- package/dist/do/container-executor.js +0 -731
- package/dist/do/container-executor.js.map +0 -1
- package/dist/do/index.d.ts +0 -53
- package/dist/do/index.d.ts.map +0 -1
- package/dist/do/index.js +0 -91
- package/dist/do/index.js.map +0 -1
- package/dist/do/tiered-storage.d.ts +0 -403
- package/dist/do/tiered-storage.d.ts.map +0 -1
- package/dist/do/tiered-storage.js +0 -689
- package/dist/do/tiered-storage.js.map +0 -1
- package/dist/do/withBash.d.ts +0 -231
- package/dist/do/withBash.d.ts.map +0 -1
- package/dist/do/withBash.js +0 -244
- package/dist/do/withBash.js.map +0 -1
- package/dist/do/withFs.d.ts +0 -237
- package/dist/do/withFs.d.ts.map +0 -1
- package/dist/do/withFs.js +0 -387
- package/dist/do/withFs.js.map +0 -1
- package/dist/do/withGit.d.ts +0 -180
- package/dist/do/withGit.d.ts.map +0 -1
- package/dist/do/withGit.js +0 -271
- package/dist/do/withGit.js.map +0 -1
- package/dist/durable-object/object-store.d.ts +0 -633
- package/dist/durable-object/object-store.d.ts.map +0 -1
- package/dist/durable-object/object-store.js +0 -1164
- package/dist/durable-object/object-store.js.map +0 -1
- package/dist/durable-object/schema.d.ts.map +0 -1
- package/dist/durable-object/schema.js.map +0 -1
- package/dist/durable-object/wal.d.ts +0 -416
- package/dist/durable-object/wal.d.ts.map +0 -1
- package/dist/durable-object/wal.js +0 -445
- package/dist/durable-object/wal.js.map +0 -1
- package/dist/mcp/adapter.d.ts +0 -772
- package/dist/mcp/adapter.d.ts.map +0 -1
- package/dist/mcp/adapter.js +0 -895
- package/dist/mcp/adapter.js.map +0 -1
- package/dist/mcp/sandbox/miniflare-evaluator.d.ts +0 -22
- package/dist/mcp/sandbox/miniflare-evaluator.d.ts.map +0 -1
- package/dist/mcp/sandbox/miniflare-evaluator.js +0 -140
- package/dist/mcp/sandbox/miniflare-evaluator.js.map +0 -1
- package/dist/mcp/sandbox/object-store-proxy.d.ts +0 -32
- package/dist/mcp/sandbox/object-store-proxy.d.ts.map +0 -1
- package/dist/mcp/sandbox/object-store-proxy.js +0 -30
- package/dist/mcp/sandbox/object-store-proxy.js.map +0 -1
- package/dist/mcp/sandbox/template.d.ts +0 -17
- package/dist/mcp/sandbox/template.d.ts.map +0 -1
- package/dist/mcp/sandbox/template.js +0 -71
- package/dist/mcp/sandbox/template.js.map +0 -1
- package/dist/mcp/sandbox.d.ts +0 -764
- package/dist/mcp/sandbox.d.ts.map +0 -1
- package/dist/mcp/sandbox.js +0 -1362
- package/dist/mcp/sandbox.js.map +0 -1
- package/dist/mcp/sdk-adapter.d.ts +0 -835
- package/dist/mcp/sdk-adapter.d.ts.map +0 -1
- package/dist/mcp/sdk-adapter.js +0 -974
- package/dist/mcp/sdk-adapter.js.map +0 -1
- package/dist/mcp/tools/do.d.ts +0 -32
- package/dist/mcp/tools/do.d.ts.map +0 -1
- package/dist/mcp/tools/do.js +0 -117
- package/dist/mcp/tools/do.js.map +0 -1
- package/dist/ops/blame.d.ts +0 -551
- package/dist/ops/blame.d.ts.map +0 -1
- package/dist/ops/blame.js +0 -1037
- package/dist/ops/blame.js.map +0 -1
- package/dist/ops/branch.d.ts +0 -766
- package/dist/ops/branch.d.ts.map +0 -1
- package/dist/ops/branch.js +0 -950
- package/dist/ops/branch.js.map +0 -1
- package/dist/ops/commit-traversal.d.ts +0 -349
- package/dist/ops/commit-traversal.d.ts.map +0 -1
- package/dist/ops/commit-traversal.js +0 -821
- package/dist/ops/commit-traversal.js.map +0 -1
- package/dist/ops/commit.d.ts +0 -555
- package/dist/ops/commit.d.ts.map +0 -1
- package/dist/ops/commit.js +0 -826
- package/dist/ops/commit.js.map +0 -1
- package/dist/ops/merge-base.d.ts +0 -397
- package/dist/ops/merge-base.d.ts.map +0 -1
- package/dist/ops/merge-base.js +0 -691
- package/dist/ops/merge-base.js.map +0 -1
- package/dist/ops/merge.d.ts +0 -855
- package/dist/ops/merge.d.ts.map +0 -1
- package/dist/ops/merge.js +0 -1551
- package/dist/ops/merge.js.map +0 -1
- package/dist/ops/tag.d.ts +0 -247
- package/dist/ops/tag.d.ts.map +0 -1
- package/dist/ops/tag.js +0 -649
- package/dist/ops/tag.js.map +0 -1
- package/dist/ops/tree-builder.d.ts +0 -178
- package/dist/ops/tree-builder.d.ts.map +0 -1
- package/dist/ops/tree-builder.js +0 -271
- package/dist/ops/tree-builder.js.map +0 -1
- package/dist/ops/tree-diff.d.ts +0 -291
- package/dist/ops/tree-diff.d.ts.map +0 -1
- package/dist/ops/tree-diff.js +0 -705
- package/dist/ops/tree-diff.js.map +0 -1
- package/dist/pack/delta.d.ts +0 -248
- package/dist/pack/delta.d.ts.map +0 -1
- package/dist/pack/delta.js +0 -740
- package/dist/pack/delta.js.map +0 -1
- package/dist/pack/format.d.ts +0 -446
- package/dist/pack/format.d.ts.map +0 -1
- package/dist/pack/format.js +0 -572
- package/dist/pack/format.js.map +0 -1
- package/dist/pack/full-generation.d.ts +0 -612
- package/dist/pack/full-generation.d.ts.map +0 -1
- package/dist/pack/full-generation.js +0 -1378
- package/dist/pack/full-generation.js.map +0 -1
- package/dist/pack/generation.d.ts +0 -441
- package/dist/pack/generation.d.ts.map +0 -1
- package/dist/pack/generation.js +0 -707
- package/dist/pack/generation.js.map +0 -1
- package/dist/pack/index.d.ts +0 -502
- package/dist/pack/index.d.ts.map +0 -1
- package/dist/pack/index.js +0 -833
- package/dist/pack/index.js.map +0 -1
- package/dist/refs/branch.d.ts +0 -683
- package/dist/refs/branch.d.ts.map +0 -1
- package/dist/refs/branch.js +0 -881
- package/dist/refs/branch.js.map +0 -1
- package/dist/refs/storage.d.ts +0 -833
- package/dist/refs/storage.d.ts.map +0 -1
- package/dist/refs/storage.js +0 -1023
- package/dist/refs/storage.js.map +0 -1
- package/dist/refs/tag.d.ts +0 -860
- package/dist/refs/tag.d.ts.map +0 -1
- package/dist/refs/tag.js +0 -996
- package/dist/refs/tag.js.map +0 -1
- package/dist/storage/backend.d.ts +0 -425
- package/dist/storage/backend.d.ts.map +0 -1
- package/dist/storage/backend.js +0 -41
- package/dist/storage/backend.js.map +0 -1
- package/dist/storage/fsx-adapter.d.ts +0 -204
- package/dist/storage/fsx-adapter.d.ts.map +0 -1
- package/dist/storage/fsx-adapter.js +0 -518
- package/dist/storage/fsx-adapter.js.map +0 -1
- package/dist/storage/lru-cache.d.ts +0 -691
- package/dist/storage/lru-cache.d.ts.map +0 -1
- package/dist/storage/lru-cache.js +0 -813
- package/dist/storage/lru-cache.js.map +0 -1
- package/dist/storage/object-index.d.ts +0 -585
- package/dist/storage/object-index.d.ts.map +0 -1
- package/dist/storage/object-index.js +0 -532
- package/dist/storage/object-index.js.map +0 -1
- package/dist/storage/r2-pack.d.ts +0 -1257
- package/dist/storage/r2-pack.d.ts.map +0 -1
- package/dist/storage/r2-pack.js +0 -1773
- package/dist/storage/r2-pack.js.map +0 -1
- package/dist/tiered/cdc-pipeline.d.ts +0 -1888
- package/dist/tiered/cdc-pipeline.d.ts.map +0 -1
- package/dist/tiered/cdc-pipeline.js +0 -1880
- package/dist/tiered/cdc-pipeline.js.map +0 -1
- package/dist/tiered/migration.d.ts +0 -1104
- package/dist/tiered/migration.d.ts.map +0 -1
- package/dist/tiered/migration.js +0 -1217
- package/dist/tiered/migration.js.map +0 -1
- package/dist/tiered/parquet-writer.d.ts +0 -1145
- package/dist/tiered/parquet-writer.d.ts.map +0 -1
- package/dist/tiered/parquet-writer.js +0 -1183
- package/dist/tiered/parquet-writer.js.map +0 -1
- package/dist/tiered/read-path.d.ts +0 -835
- package/dist/tiered/read-path.d.ts.map +0 -1
- package/dist/tiered/read-path.js +0 -487
- package/dist/tiered/read-path.js.map +0 -1
- package/dist/types/capability.d.ts +0 -1385
- package/dist/types/capability.d.ts.map +0 -1
- package/dist/types/capability.js +0 -36
- package/dist/types/capability.js.map +0 -1
- package/dist/types/index.d.ts +0 -13
- package/dist/types/index.d.ts.map +0 -1
- package/dist/types/index.js +0 -18
- package/dist/types/index.js.map +0 -1
- package/dist/types/interfaces.d.ts +0 -673
- package/dist/types/interfaces.d.ts.map +0 -1
- package/dist/types/interfaces.js +0 -26
- package/dist/types/interfaces.js.map +0 -1
- package/dist/types/objects.d.ts +0 -692
- package/dist/types/objects.d.ts.map +0 -1
- package/dist/types/objects.js +0 -837
- package/dist/types/objects.js.map +0 -1
- package/dist/types/storage.d.ts +0 -603
- package/dist/types/storage.d.ts.map +0 -1
- package/dist/types/storage.js +0 -191
- package/dist/types/storage.js.map +0 -1
- package/dist/types/worker-loader.d.ts +0 -60
- package/dist/types/worker-loader.d.ts.map +0 -1
- package/dist/types/worker-loader.js +0 -62
- package/dist/types/worker-loader.js.map +0 -1
- package/dist/utils/hash.d.ts +0 -198
- package/dist/utils/hash.d.ts.map +0 -1
- package/dist/utils/hash.js +0 -272
- package/dist/utils/hash.js.map +0 -1
- package/dist/utils/sha1.d.ts +0 -325
- package/dist/utils/sha1.d.ts.map +0 -1
- package/dist/utils/sha1.js +0 -635
- package/dist/utils/sha1.js.map +0 -1
- package/dist/wire/capabilities.d.ts +0 -1044
- package/dist/wire/capabilities.d.ts.map +0 -1
- package/dist/wire/capabilities.js +0 -941
- package/dist/wire/capabilities.js.map +0 -1
- package/dist/wire/path-security.d.ts +0 -157
- package/dist/wire/path-security.d.ts.map +0 -1
- package/dist/wire/path-security.js +0 -307
- package/dist/wire/path-security.js.map +0 -1
- package/dist/wire/pkt-line.d.ts +0 -345
- package/dist/wire/pkt-line.d.ts.map +0 -1
- package/dist/wire/pkt-line.js +0 -381
- package/dist/wire/pkt-line.js.map +0 -1
- package/dist/wire/receive-pack.d.ts +0 -1059
- package/dist/wire/receive-pack.d.ts.map +0 -1
- package/dist/wire/receive-pack.js +0 -1414
- package/dist/wire/receive-pack.js.map +0 -1
- package/dist/wire/smart-http.d.ts +0 -799
- package/dist/wire/smart-http.d.ts.map +0 -1
- package/dist/wire/smart-http.js +0 -945
- package/dist/wire/smart-http.js.map +0 -1
- package/dist/wire/upload-pack.d.ts +0 -727
- package/dist/wire/upload-pack.d.ts.map +0 -1
- package/dist/wire/upload-pack.js +0 -1141
- package/dist/wire/upload-pack.js.map +0 -1
|
@@ -1,1164 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Git Object Store for Durable Objects
|
|
3
|
-
*
|
|
4
|
-
* This module provides a Git object storage implementation backed by SQLite
|
|
5
|
-
* within Cloudflare Durable Objects. It handles CRUD operations for all four
|
|
6
|
-
* Git object types (blob, tree, commit, tag) with proper SHA-1 hash computation.
|
|
7
|
-
*
|
|
8
|
-
* **Key Features**:
|
|
9
|
-
* - Content-addressable storage using SHA-1 hashes
|
|
10
|
-
* - Write-ahead logging (WAL) for durability
|
|
11
|
-
* - Object index for tiered storage support
|
|
12
|
-
* - Batch operations for efficiency with transaction support
|
|
13
|
-
* - LRU caching for hot tier objects
|
|
14
|
-
* - Metrics and logging infrastructure
|
|
15
|
-
* - Typed accessors for each Git object type
|
|
16
|
-
*
|
|
17
|
-
* @module durable-object/object-store
|
|
18
|
-
*
|
|
19
|
-
* @example
|
|
20
|
-
* ```typescript
|
|
21
|
-
* import { ObjectStore } from './durable-object/object-store'
|
|
22
|
-
*
|
|
23
|
-
* const store = new ObjectStore(durableObjectStorage, {
|
|
24
|
-
* cacheMaxCount: 1000,
|
|
25
|
-
* cacheMaxBytes: 50 * 1024 * 1024, // 50MB
|
|
26
|
-
* enableMetrics: true
|
|
27
|
-
* })
|
|
28
|
-
*
|
|
29
|
-
* // Store a blob
|
|
30
|
-
* const content = new TextEncoder().encode('Hello, World!')
|
|
31
|
-
* const sha = await store.putObject('blob', content)
|
|
32
|
-
*
|
|
33
|
-
* // Retrieve it (cached on second access)
|
|
34
|
-
* const obj = await store.getObject(sha)
|
|
35
|
-
* console.log(obj?.type, obj?.size)
|
|
36
|
-
*
|
|
37
|
-
* // Get typed object
|
|
38
|
-
* const blob = await store.getBlobObject(sha)
|
|
39
|
-
*
|
|
40
|
-
* // Get metrics
|
|
41
|
-
* const metrics = store.getMetrics()
|
|
42
|
-
* console.log(`Cache hit rate: ${metrics.cacheHitRate}%`)
|
|
43
|
-
* ```
|
|
44
|
-
*/
|
|
45
|
-
import { LRUCache } from '../storage/lru-cache';
|
|
46
|
-
import { isValidMode, isValidSha } from '../types/objects';
|
|
47
|
-
// Reserved for future validation
|
|
48
|
-
import { validateTreeEntry as _validateTreeEntry } from '../types/objects';
|
|
49
|
-
void _validateTreeEntry;
|
|
50
|
-
import { hashObject } from '../utils/hash';
|
|
51
|
-
const encoder = new TextEncoder();
|
|
52
|
-
const decoder = new TextDecoder();
|
|
53
|
-
// Default cache configuration
|
|
54
|
-
const DEFAULT_CACHE_MAX_COUNT = 500;
|
|
55
|
-
const DEFAULT_CACHE_MAX_BYTES = 25 * 1024 * 1024; // 25MB
|
|
56
|
-
// ============================================================================
|
|
57
|
-
// ObjectStore Class
|
|
58
|
-
// ============================================================================
|
|
59
|
-
/**
|
|
60
|
-
* ObjectStore class for managing Git objects in SQLite storage.
|
|
61
|
-
*
|
|
62
|
-
* @description
|
|
63
|
-
* Provides a complete implementation of Git object storage operations.
|
|
64
|
-
* All objects are stored in the `objects` table and indexed in `object_index`
|
|
65
|
-
* for tiered storage support. Write operations are logged to WAL for durability.
|
|
66
|
-
*
|
|
67
|
-
* @example
|
|
68
|
-
* ```typescript
|
|
69
|
-
* const store = new ObjectStore(durableObjectStorage)
|
|
70
|
-
*
|
|
71
|
-
* // Create a commit
|
|
72
|
-
* const commitSha = await store.putCommitObject({
|
|
73
|
-
* tree: treeSha,
|
|
74
|
-
* parents: [parentSha],
|
|
75
|
-
* author: { name: 'Alice', email: 'alice@example.com', timestamp: 1704067200, timezone: '+0000' },
|
|
76
|
-
* committer: { name: 'Alice', email: 'alice@example.com', timestamp: 1704067200, timezone: '+0000' },
|
|
77
|
-
* message: 'Initial commit'
|
|
78
|
-
* })
|
|
79
|
-
*
|
|
80
|
-
* // Read it back
|
|
81
|
-
* const commit = await store.getCommitObject(commitSha)
|
|
82
|
-
* console.log(commit?.message)
|
|
83
|
-
* ```
|
|
84
|
-
*/
|
|
85
|
-
export class ObjectStore {
|
|
86
|
-
storage;
|
|
87
|
-
cache;
|
|
88
|
-
options;
|
|
89
|
-
logger;
|
|
90
|
-
backend;
|
|
91
|
-
// Metrics tracking
|
|
92
|
-
_reads = 0;
|
|
93
|
-
_writes = 0;
|
|
94
|
-
_deletes = 0;
|
|
95
|
-
_bytesWritten = 0;
|
|
96
|
-
_bytesRead = 0;
|
|
97
|
-
_totalWriteLatency = 0;
|
|
98
|
-
_totalReadLatency = 0;
|
|
99
|
-
_batchOperations = 0;
|
|
100
|
-
_batchObjectsTotal = 0;
|
|
101
|
-
/**
|
|
102
|
-
* Create a new ObjectStore.
|
|
103
|
-
*
|
|
104
|
-
* @param storage - Durable Object storage interface with SQL support
|
|
105
|
-
* @param options - Configuration options for caching, metrics, logging, and backend
|
|
106
|
-
*
|
|
107
|
-
* @example
|
|
108
|
-
* ```typescript
|
|
109
|
-
* // Basic usage (SQLite backend)
|
|
110
|
-
* const store = new ObjectStore(storage)
|
|
111
|
-
*
|
|
112
|
-
* // With caching and metrics
|
|
113
|
-
* const store = new ObjectStore(storage, {
|
|
114
|
-
* cacheMaxCount: 1000,
|
|
115
|
-
* cacheMaxBytes: 50 * 1024 * 1024,
|
|
116
|
-
* enableMetrics: true,
|
|
117
|
-
* logger: console
|
|
118
|
-
* })
|
|
119
|
-
*
|
|
120
|
-
* // With StorageBackend abstraction
|
|
121
|
-
* const store = new ObjectStore(storage, {
|
|
122
|
-
* backend: fsBackend
|
|
123
|
-
* })
|
|
124
|
-
* ```
|
|
125
|
-
*/
|
|
126
|
-
constructor(storage, options) {
|
|
127
|
-
this.storage = storage;
|
|
128
|
-
this.options = options ?? {};
|
|
129
|
-
this.logger = options?.logger;
|
|
130
|
-
this.backend = options?.backend ?? null;
|
|
131
|
-
// Initialize LRU cache for hot tier objects
|
|
132
|
-
this.cache = new LRUCache({
|
|
133
|
-
maxCount: options?.cacheMaxCount ?? DEFAULT_CACHE_MAX_COUNT,
|
|
134
|
-
maxBytes: options?.cacheMaxBytes ?? DEFAULT_CACHE_MAX_BYTES,
|
|
135
|
-
defaultTTL: options?.cacheTTL,
|
|
136
|
-
sizeCalculator: (obj) => obj.data.byteLength + 100, // 100 bytes overhead for metadata
|
|
137
|
-
onEvict: (key, _value, reason) => {
|
|
138
|
-
this.log('debug', `Cache eviction: ${key} (reason: ${reason})`);
|
|
139
|
-
}
|
|
140
|
-
});
|
|
141
|
-
}
|
|
142
|
-
/**
|
|
143
|
-
* Log a message if logger is configured.
|
|
144
|
-
* @internal
|
|
145
|
-
*/
|
|
146
|
-
log(level, message, ...args) {
|
|
147
|
-
if (!this.logger)
|
|
148
|
-
return;
|
|
149
|
-
const logFn = this.logger[level];
|
|
150
|
-
if (logFn) {
|
|
151
|
-
logFn.call(this.logger, `[ObjectStore] ${message}`, ...args);
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
/**
|
|
155
|
-
* Store a raw object and return its SHA.
|
|
156
|
-
*
|
|
157
|
-
* @description
|
|
158
|
-
* Computes the SHA-1 hash of the object in Git format (type + size + content),
|
|
159
|
-
* logs the operation to WAL, stores the object, and updates the object index.
|
|
160
|
-
* If an object with the same SHA already exists, it is replaced (idempotent).
|
|
161
|
-
* The object is also added to the LRU cache for fast subsequent reads.
|
|
162
|
-
*
|
|
163
|
-
* @param type - Object type ('blob', 'tree', 'commit', 'tag')
|
|
164
|
-
* @param data - Raw object content (without Git header)
|
|
165
|
-
* @returns 40-character SHA-1 hash of the stored object
|
|
166
|
-
*
|
|
167
|
-
* @example
|
|
168
|
-
* ```typescript
|
|
169
|
-
* const content = new TextEncoder().encode('file content')
|
|
170
|
-
* const sha = await store.putObject('blob', content)
|
|
171
|
-
* console.log(`Stored blob: ${sha}`)
|
|
172
|
-
* ```
|
|
173
|
-
*/
|
|
174
|
-
async putObject(type, data) {
|
|
175
|
-
const startTime = this.options.enableMetrics ? Date.now() : 0;
|
|
176
|
-
// Delegate to backend if available
|
|
177
|
-
if (this.backend) {
|
|
178
|
-
const sha = await this.backend.putObject(type, data);
|
|
179
|
-
// Add to cache for fast subsequent reads
|
|
180
|
-
const storedObject = {
|
|
181
|
-
sha,
|
|
182
|
-
type,
|
|
183
|
-
size: data.length,
|
|
184
|
-
data,
|
|
185
|
-
createdAt: Date.now()
|
|
186
|
-
};
|
|
187
|
-
this.cache.set(sha, storedObject);
|
|
188
|
-
// Update metrics
|
|
189
|
-
if (this.options.enableMetrics) {
|
|
190
|
-
this._writes++;
|
|
191
|
-
this._bytesWritten += data.length;
|
|
192
|
-
this._totalWriteLatency += Date.now() - startTime;
|
|
193
|
-
}
|
|
194
|
-
return sha;
|
|
195
|
-
}
|
|
196
|
-
// Existing SQLite implementation as fallback
|
|
197
|
-
// Compute SHA-1 hash using git object format: "type size\0content"
|
|
198
|
-
const sha = await hashObject(type, data);
|
|
199
|
-
this.log('debug', `Storing ${type} object: ${sha} (${data.length} bytes)`);
|
|
200
|
-
// Log to WAL first
|
|
201
|
-
await this.logToWAL('PUT', sha, type, data);
|
|
202
|
-
const now = Date.now();
|
|
203
|
-
// Store the object
|
|
204
|
-
this.storage.sql.exec('INSERT OR REPLACE INTO objects (sha, type, size, data, created_at) VALUES (?, ?, ?, ?, ?)', sha, type, data.length, data, now);
|
|
205
|
-
// Update object index
|
|
206
|
-
this.storage.sql.exec('INSERT OR REPLACE INTO object_index (sha, tier, pack_id, offset, size, type, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', sha, 'hot', null, // pack_id is null for hot tier
|
|
207
|
-
null, // offset is null for hot tier
|
|
208
|
-
data.length, type, now);
|
|
209
|
-
// Add to cache for fast subsequent reads
|
|
210
|
-
const storedObject = {
|
|
211
|
-
sha,
|
|
212
|
-
type,
|
|
213
|
-
size: data.length,
|
|
214
|
-
data,
|
|
215
|
-
createdAt: now
|
|
216
|
-
};
|
|
217
|
-
this.cache.set(sha, storedObject);
|
|
218
|
-
// Update metrics
|
|
219
|
-
if (this.options.enableMetrics) {
|
|
220
|
-
this._writes++;
|
|
221
|
-
this._bytesWritten += data.length;
|
|
222
|
-
this._totalWriteLatency += Date.now() - startTime;
|
|
223
|
-
}
|
|
224
|
-
return sha;
|
|
225
|
-
}
|
|
226
|
-
/**
|
|
227
|
-
* Store a tree object with entries.
|
|
228
|
-
*
|
|
229
|
-
* @description
|
|
230
|
-
* Creates a Git tree object from an array of entries. Entries are sorted
|
|
231
|
-
* by name (with directories treated as having trailing slashes for sorting).
|
|
232
|
-
* Each entry is serialized as: "{mode} {name}\0{20-byte-sha}"
|
|
233
|
-
*
|
|
234
|
-
* @param entries - Array of tree entries (files and subdirectories)
|
|
235
|
-
* @returns 40-character SHA-1 hash of the stored tree
|
|
236
|
-
*
|
|
237
|
-
* @example
|
|
238
|
-
* ```typescript
|
|
239
|
-
* const treeSha = await store.putTreeObject([
|
|
240
|
-
* { mode: '100644', name: 'README.md', sha: blobSha },
|
|
241
|
-
* { mode: '040000', name: 'src', sha: subdirSha }
|
|
242
|
-
* ])
|
|
243
|
-
* ```
|
|
244
|
-
*/
|
|
245
|
-
async putTreeObject(entries) {
|
|
246
|
-
// Validate all entries first
|
|
247
|
-
const seenNames = new Set();
|
|
248
|
-
for (const entry of entries) {
|
|
249
|
-
// Check for invalid names: empty, '.', '..', contains '/' or null byte
|
|
250
|
-
if (!entry.name || entry.name === '.' || entry.name === '..') {
|
|
251
|
-
throw new Error(`Invalid entry name: "${entry.name}". Entry names cannot be empty, ".", or ".."`);
|
|
252
|
-
}
|
|
253
|
-
if (entry.name.includes('/')) {
|
|
254
|
-
throw new Error(`Invalid entry name: "${entry.name}". Entry names cannot contain path separators`);
|
|
255
|
-
}
|
|
256
|
-
if (entry.name.includes('\0')) {
|
|
257
|
-
throw new Error(`Invalid entry name: "${entry.name}". Entry names cannot contain null bytes`);
|
|
258
|
-
}
|
|
259
|
-
// Check for duplicate names
|
|
260
|
-
if (seenNames.has(entry.name)) {
|
|
261
|
-
throw new Error(`Duplicate entry name: "${entry.name}". Tree entries must have unique names`);
|
|
262
|
-
}
|
|
263
|
-
seenNames.add(entry.name);
|
|
264
|
-
// Validate mode
|
|
265
|
-
if (!isValidMode(entry.mode)) {
|
|
266
|
-
throw new Error(`Invalid mode: "${entry.mode}". Valid modes: 100644, 100755, 040000, 120000, 160000`);
|
|
267
|
-
}
|
|
268
|
-
// Validate SHA
|
|
269
|
-
if (!isValidSha(entry.sha)) {
|
|
270
|
-
throw new Error(`Invalid SHA: "${entry.sha}". Must be 40 lowercase hex characters`);
|
|
271
|
-
}
|
|
272
|
-
}
|
|
273
|
-
// Sort entries by name using ASCII byte-order comparison
|
|
274
|
-
// Git sorts directories as if they have trailing slashes for comparison
|
|
275
|
-
const sortedEntries = [...entries].sort((a, b) => {
|
|
276
|
-
const aName = a.mode === '040000' ? a.name + '/' : a.name;
|
|
277
|
-
const bName = b.mode === '040000' ? b.name + '/' : b.name;
|
|
278
|
-
// Use simple comparison for ASCII byte order
|
|
279
|
-
if (aName < bName)
|
|
280
|
-
return -1;
|
|
281
|
-
if (aName > bName)
|
|
282
|
-
return 1;
|
|
283
|
-
return 0;
|
|
284
|
-
});
|
|
285
|
-
// Build tree content (without header)
|
|
286
|
-
const entryParts = [];
|
|
287
|
-
for (const entry of sortedEntries) {
|
|
288
|
-
const modeName = encoder.encode(`${entry.mode} ${entry.name}\0`);
|
|
289
|
-
const sha20 = hexToBytes(entry.sha);
|
|
290
|
-
const entryData = new Uint8Array(modeName.length + 20);
|
|
291
|
-
entryData.set(modeName);
|
|
292
|
-
entryData.set(sha20, modeName.length);
|
|
293
|
-
entryParts.push(entryData);
|
|
294
|
-
}
|
|
295
|
-
// Combine all entry parts
|
|
296
|
-
const contentLength = entryParts.reduce((sum, part) => sum + part.length, 0);
|
|
297
|
-
const content = new Uint8Array(contentLength);
|
|
298
|
-
let offset = 0;
|
|
299
|
-
for (const part of entryParts) {
|
|
300
|
-
content.set(part, offset);
|
|
301
|
-
offset += part.length;
|
|
302
|
-
}
|
|
303
|
-
return this.putObject('tree', content);
|
|
304
|
-
}
|
|
305
|
-
/**
|
|
306
|
-
* Store a commit object.
|
|
307
|
-
*
|
|
308
|
-
* @description
|
|
309
|
-
* Creates a Git commit object with the specified tree, parents, author,
|
|
310
|
-
* committer, and message. The commit content is formatted according to
|
|
311
|
-
* the Git commit format specification.
|
|
312
|
-
*
|
|
313
|
-
* @param commit - Commit data
|
|
314
|
-
* @param commit.tree - SHA of the root tree object
|
|
315
|
-
* @param commit.parents - Array of parent commit SHAs (empty for root commit)
|
|
316
|
-
* @param commit.author - Author information
|
|
317
|
-
* @param commit.committer - Committer information
|
|
318
|
-
* @param commit.message - Commit message
|
|
319
|
-
* @returns 40-character SHA-1 hash of the stored commit
|
|
320
|
-
*
|
|
321
|
-
* @example
|
|
322
|
-
* ```typescript
|
|
323
|
-
* const now = Math.floor(Date.now() / 1000)
|
|
324
|
-
* const author = { name: 'Alice', email: 'alice@example.com', timestamp: now, timezone: '+0000' }
|
|
325
|
-
*
|
|
326
|
-
* const sha = await store.putCommitObject({
|
|
327
|
-
* tree: treeSha,
|
|
328
|
-
* parents: [],
|
|
329
|
-
* author,
|
|
330
|
-
* committer: author,
|
|
331
|
-
* message: 'Initial commit\n\nThis is the first commit.'
|
|
332
|
-
* })
|
|
333
|
-
* ```
|
|
334
|
-
*/
|
|
335
|
-
async putCommitObject(commit) {
|
|
336
|
-
// Build commit content (without header)
|
|
337
|
-
const lines = [];
|
|
338
|
-
lines.push(`tree ${commit.tree}`);
|
|
339
|
-
for (const parent of commit.parents) {
|
|
340
|
-
lines.push(`parent ${parent}`);
|
|
341
|
-
}
|
|
342
|
-
lines.push(`author ${commit.author.name} <${commit.author.email}> ${commit.author.timestamp} ${commit.author.timezone}`);
|
|
343
|
-
lines.push(`committer ${commit.committer.name} <${commit.committer.email}> ${commit.committer.timestamp} ${commit.committer.timezone}`);
|
|
344
|
-
lines.push('');
|
|
345
|
-
lines.push(commit.message);
|
|
346
|
-
const content = encoder.encode(lines.join('\n'));
|
|
347
|
-
return this.putObject('commit', content);
|
|
348
|
-
}
|
|
349
|
-
/**
|
|
350
|
-
* Store a tag object (annotated tag).
|
|
351
|
-
*
|
|
352
|
-
* @description
|
|
353
|
-
* Creates a Git tag object pointing to another object with tagger
|
|
354
|
-
* information and a message. The tag content is formatted according
|
|
355
|
-
* to the Git tag format specification.
|
|
356
|
-
*
|
|
357
|
-
* @param tag - Tag data
|
|
358
|
-
* @param tag.object - SHA of the object being tagged
|
|
359
|
-
* @param tag.objectType - Type of the object being tagged
|
|
360
|
-
* @param tag.tagger - Tagger information
|
|
361
|
-
* @param tag.message - Tag message
|
|
362
|
-
* @param tag.name - Tag name
|
|
363
|
-
* @returns 40-character SHA-1 hash of the stored tag object
|
|
364
|
-
*
|
|
365
|
-
* @example
|
|
366
|
-
* ```typescript
|
|
367
|
-
* const now = Math.floor(Date.now() / 1000)
|
|
368
|
-
* const tagger = { name: 'Bob', email: 'bob@example.com', timestamp: now, timezone: '+0000' }
|
|
369
|
-
*
|
|
370
|
-
* const sha = await store.putTagObject({
|
|
371
|
-
* object: commitSha,
|
|
372
|
-
* objectType: 'commit',
|
|
373
|
-
* tagger,
|
|
374
|
-
* message: 'Release v1.0.0',
|
|
375
|
-
* name: 'v1.0.0'
|
|
376
|
-
* })
|
|
377
|
-
* ```
|
|
378
|
-
*/
|
|
379
|
-
async putTagObject(tag) {
|
|
380
|
-
// Build tag content (without header)
|
|
381
|
-
const lines = [];
|
|
382
|
-
lines.push(`object ${tag.object}`);
|
|
383
|
-
lines.push(`type ${tag.objectType}`);
|
|
384
|
-
lines.push(`tag ${tag.name}`);
|
|
385
|
-
if (tag.tagger) {
|
|
386
|
-
lines.push(`tagger ${tag.tagger.name} <${tag.tagger.email}> ${tag.tagger.timestamp} ${tag.tagger.timezone}`);
|
|
387
|
-
}
|
|
388
|
-
lines.push('');
|
|
389
|
-
lines.push(tag.message);
|
|
390
|
-
const content = encoder.encode(lines.join('\n'));
|
|
391
|
-
return this.putObject('tag', content);
|
|
392
|
-
}
|
|
393
|
-
/**
|
|
394
|
-
* Retrieve an object by SHA.
|
|
395
|
-
*
|
|
396
|
-
* @description
|
|
397
|
-
* Fetches an object from the LRU cache first, falling back to the database
|
|
398
|
-
* if not cached. Returns null if the object doesn't exist or if the SHA is invalid.
|
|
399
|
-
*
|
|
400
|
-
* @param sha - 40-character SHA-1 hash
|
|
401
|
-
* @returns The stored object or null if not found
|
|
402
|
-
*
|
|
403
|
-
* @example
|
|
404
|
-
* ```typescript
|
|
405
|
-
* const obj = await store.getObject(sha)
|
|
406
|
-
* if (obj) {
|
|
407
|
-
* console.log(`Found ${obj.type} of ${obj.size} bytes`)
|
|
408
|
-
* }
|
|
409
|
-
* ```
|
|
410
|
-
*/
|
|
411
|
-
async getObject(sha) {
|
|
412
|
-
const startTime = this.options.enableMetrics ? Date.now() : 0;
|
|
413
|
-
if (!sha || sha.length < 4) {
|
|
414
|
-
return null;
|
|
415
|
-
}
|
|
416
|
-
// Check cache first (fast path)
|
|
417
|
-
const cached = this.cache.get(sha);
|
|
418
|
-
if (cached) {
|
|
419
|
-
this.log('debug', `Cache hit for object: ${sha}`);
|
|
420
|
-
if (this.options.enableMetrics) {
|
|
421
|
-
this._reads++;
|
|
422
|
-
this._bytesRead += cached.size;
|
|
423
|
-
this._totalReadLatency += Date.now() - startTime;
|
|
424
|
-
}
|
|
425
|
-
return cached;
|
|
426
|
-
}
|
|
427
|
-
// Delegate to backend if available
|
|
428
|
-
if (this.backend) {
|
|
429
|
-
const result = await this.backend.getObject(sha);
|
|
430
|
-
if (!result) {
|
|
431
|
-
this.log('debug', `Object not found: ${sha}`);
|
|
432
|
-
if (this.options.enableMetrics) {
|
|
433
|
-
this._reads++;
|
|
434
|
-
this._totalReadLatency += Date.now() - startTime;
|
|
435
|
-
}
|
|
436
|
-
return null;
|
|
437
|
-
}
|
|
438
|
-
const obj = {
|
|
439
|
-
sha,
|
|
440
|
-
type: result.type,
|
|
441
|
-
size: result.content.length,
|
|
442
|
-
data: result.content,
|
|
443
|
-
createdAt: Date.now()
|
|
444
|
-
};
|
|
445
|
-
// Add to cache for subsequent reads
|
|
446
|
-
this.cache.set(sha, obj);
|
|
447
|
-
if (this.options.enableMetrics) {
|
|
448
|
-
this._reads++;
|
|
449
|
-
this._bytesRead += obj.size;
|
|
450
|
-
this._totalReadLatency += Date.now() - startTime;
|
|
451
|
-
}
|
|
452
|
-
return obj;
|
|
453
|
-
}
|
|
454
|
-
// Existing SQLite implementation as fallback
|
|
455
|
-
// Fall back to database
|
|
456
|
-
const result = this.storage.sql.exec('SELECT sha, type, size, data, created_at as createdAt FROM objects WHERE sha = ?', sha);
|
|
457
|
-
const rows = result.toArray();
|
|
458
|
-
if (rows.length === 0) {
|
|
459
|
-
this.log('debug', `Object not found: ${sha}`);
|
|
460
|
-
if (this.options.enableMetrics) {
|
|
461
|
-
this._reads++;
|
|
462
|
-
this._totalReadLatency += Date.now() - startTime;
|
|
463
|
-
}
|
|
464
|
-
return null;
|
|
465
|
-
}
|
|
466
|
-
const obj = rows[0];
|
|
467
|
-
// Add to cache for subsequent reads
|
|
468
|
-
this.cache.set(sha, obj);
|
|
469
|
-
if (this.options.enableMetrics) {
|
|
470
|
-
this._reads++;
|
|
471
|
-
this._bytesRead += obj.size;
|
|
472
|
-
this._totalReadLatency += Date.now() - startTime;
|
|
473
|
-
}
|
|
474
|
-
return obj;
|
|
475
|
-
}
|
|
476
|
-
/**
|
|
477
|
-
* Delete an object by SHA.
|
|
478
|
-
*
|
|
479
|
-
* @description
|
|
480
|
-
* Removes an object from the cache, objects table, and the object index.
|
|
481
|
-
* The operation is logged to WAL. Returns false if the object doesn't exist.
|
|
482
|
-
*
|
|
483
|
-
* **Warning**: Deleting objects that are still referenced by other objects
|
|
484
|
-
* (e.g., blobs referenced by trees) will corrupt the repository.
|
|
485
|
-
*
|
|
486
|
-
* @param sha - 40-character SHA-1 hash
|
|
487
|
-
* @returns True if the object was deleted, false if it didn't exist
|
|
488
|
-
*
|
|
489
|
-
* @example
|
|
490
|
-
* ```typescript
|
|
491
|
-
* const deleted = await store.deleteObject(sha)
|
|
492
|
-
* if (deleted) {
|
|
493
|
-
* console.log('Object removed')
|
|
494
|
-
* }
|
|
495
|
-
* ```
|
|
496
|
-
*/
|
|
497
|
-
async deleteObject(sha) {
|
|
498
|
-
// Delegate to backend if available
|
|
499
|
-
if (this.backend) {
|
|
500
|
-
// Check if object exists first via backend
|
|
501
|
-
const exists = await this.backend.hasObject(sha);
|
|
502
|
-
if (!exists) {
|
|
503
|
-
return false;
|
|
504
|
-
}
|
|
505
|
-
this.log('debug', `Deleting object via backend: ${sha}`);
|
|
506
|
-
await this.backend.deleteObject(sha);
|
|
507
|
-
// Remove from cache
|
|
508
|
-
this.cache.delete(sha);
|
|
509
|
-
// Update metrics
|
|
510
|
-
if (this.options.enableMetrics) {
|
|
511
|
-
this._deletes++;
|
|
512
|
-
}
|
|
513
|
-
return true;
|
|
514
|
-
}
|
|
515
|
-
// Existing SQLite implementation as fallback
|
|
516
|
-
// Check if object exists first
|
|
517
|
-
const exists = await this.hasObject(sha);
|
|
518
|
-
if (!exists) {
|
|
519
|
-
return false;
|
|
520
|
-
}
|
|
521
|
-
this.log('debug', `Deleting object: ${sha}`);
|
|
522
|
-
// Log to WAL
|
|
523
|
-
await this.logToWAL('DELETE', sha, 'blob', new Uint8Array(0));
|
|
524
|
-
// Delete from objects table
|
|
525
|
-
this.storage.sql.exec('DELETE FROM objects WHERE sha = ?', sha);
|
|
526
|
-
// Delete from object index
|
|
527
|
-
this.storage.sql.exec('DELETE FROM object_index WHERE sha = ?', sha);
|
|
528
|
-
// Remove from cache
|
|
529
|
-
this.cache.delete(sha);
|
|
530
|
-
// Update metrics
|
|
531
|
-
if (this.options.enableMetrics) {
|
|
532
|
-
this._deletes++;
|
|
533
|
-
}
|
|
534
|
-
return true;
|
|
535
|
-
}
|
|
536
|
-
/**
|
|
537
|
-
* Check if an object exists.
|
|
538
|
-
*
|
|
539
|
-
* @description
|
|
540
|
-
* Efficiently checks for object existence without fetching the full content.
|
|
541
|
-
*
|
|
542
|
-
* @param sha - 40-character SHA-1 hash
|
|
543
|
-
* @returns True if the object exists, false otherwise
|
|
544
|
-
*
|
|
545
|
-
* @example
|
|
546
|
-
* ```typescript
|
|
547
|
-
* if (await store.hasObject(sha)) {
|
|
548
|
-
* console.log('Object exists')
|
|
549
|
-
* }
|
|
550
|
-
* ```
|
|
551
|
-
*/
|
|
552
|
-
async hasObject(sha) {
|
|
553
|
-
if (!sha || sha.length < 4) {
|
|
554
|
-
return false;
|
|
555
|
-
}
|
|
556
|
-
// Check cache first (fast path)
|
|
557
|
-
if (this.cache.has(sha)) {
|
|
558
|
-
return true;
|
|
559
|
-
}
|
|
560
|
-
// Delegate to backend if available
|
|
561
|
-
if (this.backend) {
|
|
562
|
-
return this.backend.hasObject(sha);
|
|
563
|
-
}
|
|
564
|
-
// Existing SQLite implementation as fallback
|
|
565
|
-
// Use getObject and check for null - this works better with the mock
|
|
566
|
-
const obj = await this.getObject(sha);
|
|
567
|
-
return obj !== null;
|
|
568
|
-
}
|
|
569
|
-
/**
|
|
570
|
-
* Verify an object's integrity by recomputing its hash.
|
|
571
|
-
*
|
|
572
|
-
* @description
|
|
573
|
-
* Computes the SHA-1 hash of the stored object and compares it
|
|
574
|
-
* to the stored SHA. Returns false if the object is corrupted
|
|
575
|
-
* or doesn't exist.
|
|
576
|
-
*
|
|
577
|
-
* @param sha - 40-character SHA-1 hash to verify
|
|
578
|
-
* @returns True if the computed hash matches, false otherwise
|
|
579
|
-
*
|
|
580
|
-
* @example
|
|
581
|
-
* ```typescript
|
|
582
|
-
* if (await store.verifyObject(sha)) {
|
|
583
|
-
* console.log('Object integrity verified')
|
|
584
|
-
* } else {
|
|
585
|
-
* console.log('Object is corrupted or missing')
|
|
586
|
-
* }
|
|
587
|
-
* ```
|
|
588
|
-
*/
|
|
589
|
-
async verifyObject(sha) {
|
|
590
|
-
// Read directly from storage (bypass cache) to verify actual stored data
|
|
591
|
-
const result = this.storage.sql.exec('SELECT type, data FROM objects WHERE sha = ?', sha);
|
|
592
|
-
const rows = result.toArray();
|
|
593
|
-
if (rows.length === 0) {
|
|
594
|
-
return false;
|
|
595
|
-
}
|
|
596
|
-
const obj = rows[0];
|
|
597
|
-
const computedSha = await hashObject(obj.type, new Uint8Array(obj.data));
|
|
598
|
-
return computedSha === sha;
|
|
599
|
-
}
|
|
600
|
-
/**
|
|
601
|
-
* Get object type by SHA.
|
|
602
|
-
*
|
|
603
|
-
* @description
|
|
604
|
-
* Returns just the type of an object without fetching its content.
|
|
605
|
-
*
|
|
606
|
-
* @param sha - 40-character SHA-1 hash
|
|
607
|
-
* @returns Object type or null if not found
|
|
608
|
-
*
|
|
609
|
-
* @example
|
|
610
|
-
* ```typescript
|
|
611
|
-
* const type = await store.getObjectType(sha)
|
|
612
|
-
* if (type === 'commit') {
|
|
613
|
-
* // Handle commit
|
|
614
|
-
* }
|
|
615
|
-
* ```
|
|
616
|
-
*/
|
|
617
|
-
async getObjectType(sha) {
|
|
618
|
-
const obj = await this.getObject(sha);
|
|
619
|
-
return obj?.type ?? null;
|
|
620
|
-
}
|
|
621
|
-
/**
|
|
622
|
-
* Get object size by SHA.
|
|
623
|
-
*
|
|
624
|
-
* @description
|
|
625
|
-
* Returns just the size of an object without fetching its content.
|
|
626
|
-
*
|
|
627
|
-
* @param sha - 40-character SHA-1 hash
|
|
628
|
-
* @returns Object size in bytes or null if not found
|
|
629
|
-
*
|
|
630
|
-
* @example
|
|
631
|
-
* ```typescript
|
|
632
|
-
* const size = await store.getObjectSize(sha)
|
|
633
|
-
* console.log(`Object is ${size} bytes`)
|
|
634
|
-
* ```
|
|
635
|
-
*/
|
|
636
|
-
async getObjectSize(sha) {
|
|
637
|
-
const obj = await this.getObject(sha);
|
|
638
|
-
return obj?.size ?? null;
|
|
639
|
-
}
|
|
640
|
-
/**
|
|
641
|
-
* Store multiple objects in a batch using a single transaction.
|
|
642
|
-
*
|
|
643
|
-
* @description
|
|
644
|
-
* Stores multiple objects atomically within a single SQLite transaction.
|
|
645
|
-
* This is more efficient than individual puts for bulk operations as it:
|
|
646
|
-
* - Reduces the number of disk flushes
|
|
647
|
-
* - Ensures atomic writes (all-or-nothing)
|
|
648
|
-
* - Batches WAL entries for better performance
|
|
649
|
-
*
|
|
650
|
-
* @param objects - Array of objects to store
|
|
651
|
-
* @returns Array of SHA-1 hashes in the same order as input
|
|
652
|
-
*
|
|
653
|
-
* @example
|
|
654
|
-
* ```typescript
|
|
655
|
-
* const shas = await store.putObjects([
|
|
656
|
-
* { type: 'blob', data: content1 },
|
|
657
|
-
* { type: 'blob', data: content2 }
|
|
658
|
-
* ])
|
|
659
|
-
* ```
|
|
660
|
-
*/
|
|
661
|
-
async putObjects(objects) {
|
|
662
|
-
if (objects.length === 0) {
|
|
663
|
-
return [];
|
|
664
|
-
}
|
|
665
|
-
// For single objects, delegate to putObject
|
|
666
|
-
if (objects.length === 1) {
|
|
667
|
-
const sha = await this.putObject(objects[0].type, objects[0].data);
|
|
668
|
-
return [sha];
|
|
669
|
-
}
|
|
670
|
-
const startTime = this.options.enableMetrics ? Date.now() : 0;
|
|
671
|
-
const shas = [];
|
|
672
|
-
const now = Date.now();
|
|
673
|
-
let totalBytes = 0;
|
|
674
|
-
this.log('info', `Starting batch write of ${objects.length} objects`);
|
|
675
|
-
// Pre-compute all SHA hashes (CPU-bound, before transaction)
|
|
676
|
-
const objectsWithSha = [];
|
|
677
|
-
for (const obj of objects) {
|
|
678
|
-
const sha = await hashObject(obj.type, obj.data);
|
|
679
|
-
objectsWithSha.push({ sha, type: obj.type, data: obj.data });
|
|
680
|
-
shas.push(sha);
|
|
681
|
-
totalBytes += obj.data.length;
|
|
682
|
-
}
|
|
683
|
-
// Begin transaction for atomic batch write
|
|
684
|
-
this.storage.sql.exec('BEGIN TRANSACTION');
|
|
685
|
-
try {
|
|
686
|
-
for (const { sha, type, data } of objectsWithSha) {
|
|
687
|
-
// Log batch operation to WAL (single entry for the batch)
|
|
688
|
-
const payload = encoder.encode(JSON.stringify({
|
|
689
|
-
sha,
|
|
690
|
-
type,
|
|
691
|
-
timestamp: now,
|
|
692
|
-
batchSize: objects.length
|
|
693
|
-
}));
|
|
694
|
-
this.storage.sql.exec('INSERT INTO wal (operation, payload, created_at, flushed) VALUES (?, ?, ?, 0)', 'BATCH_PUT', payload, now);
|
|
695
|
-
// Store the object
|
|
696
|
-
this.storage.sql.exec('INSERT OR REPLACE INTO objects (sha, type, size, data, created_at) VALUES (?, ?, ?, ?, ?)', sha, type, data.length, data, now);
|
|
697
|
-
// Update object index
|
|
698
|
-
this.storage.sql.exec('INSERT OR REPLACE INTO object_index (sha, tier, pack_id, offset, size, type, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', sha, 'hot', null, // pack_id is null for hot tier
|
|
699
|
-
null, // offset is null for hot tier
|
|
700
|
-
data.length, type, now);
|
|
701
|
-
// Add to cache
|
|
702
|
-
const storedObject = {
|
|
703
|
-
sha,
|
|
704
|
-
type,
|
|
705
|
-
size: data.length,
|
|
706
|
-
data,
|
|
707
|
-
createdAt: now
|
|
708
|
-
};
|
|
709
|
-
this.cache.set(sha, storedObject);
|
|
710
|
-
}
|
|
711
|
-
// Commit transaction
|
|
712
|
-
this.storage.sql.exec('COMMIT');
|
|
713
|
-
this.log('info', `Batch write completed: ${objects.length} objects, ${totalBytes} bytes`);
|
|
714
|
-
// Update metrics
|
|
715
|
-
if (this.options.enableMetrics) {
|
|
716
|
-
this._writes += objects.length;
|
|
717
|
-
this._bytesWritten += totalBytes;
|
|
718
|
-
this._totalWriteLatency += Date.now() - startTime;
|
|
719
|
-
this._batchOperations++;
|
|
720
|
-
this._batchObjectsTotal += objects.length;
|
|
721
|
-
}
|
|
722
|
-
return shas;
|
|
723
|
-
}
|
|
724
|
-
catch (error) {
|
|
725
|
-
// Rollback on error
|
|
726
|
-
this.storage.sql.exec('ROLLBACK');
|
|
727
|
-
this.log('error', `Batch write failed, rolled back`, error);
|
|
728
|
-
throw error;
|
|
729
|
-
}
|
|
730
|
-
}
|
|
731
|
-
/**
|
|
732
|
-
* Retrieve multiple objects by SHA using optimized batch queries.
|
|
733
|
-
*
|
|
734
|
-
* @description
|
|
735
|
-
* Fetches multiple objects efficiently by:
|
|
736
|
-
* 1. First checking the LRU cache for each SHA
|
|
737
|
-
* 2. Batching uncached SHAs into a single SQL query with IN clause
|
|
738
|
-
* 3. Returning results in the original order with null for missing objects
|
|
739
|
-
*
|
|
740
|
-
* @param shas - Array of 40-character SHA-1 hashes
|
|
741
|
-
* @returns Array of objects (or null for missing) in the same order
|
|
742
|
-
*
|
|
743
|
-
* @example
|
|
744
|
-
* ```typescript
|
|
745
|
-
* const objects = await store.getObjects([sha1, sha2, sha3])
|
|
746
|
-
* objects.forEach((obj, i) => {
|
|
747
|
-
* if (obj) {
|
|
748
|
-
* console.log(`${i}: ${obj.type}`)
|
|
749
|
-
* }
|
|
750
|
-
* })
|
|
751
|
-
* ```
|
|
752
|
-
*/
|
|
753
|
-
async getObjects(shas) {
|
|
754
|
-
if (shas.length === 0) {
|
|
755
|
-
return [];
|
|
756
|
-
}
|
|
757
|
-
const startTime = this.options.enableMetrics ? Date.now() : 0;
|
|
758
|
-
const results = new Array(shas.length).fill(null);
|
|
759
|
-
const uncachedIndices = [];
|
|
760
|
-
const uncachedShas = [];
|
|
761
|
-
let totalBytesRead = 0;
|
|
762
|
-
// First pass: check cache for each SHA
|
|
763
|
-
for (let i = 0; i < shas.length; i++) {
|
|
764
|
-
const sha = shas[i];
|
|
765
|
-
if (!sha || sha.length < 4) {
|
|
766
|
-
results[i] = null;
|
|
767
|
-
continue;
|
|
768
|
-
}
|
|
769
|
-
const cached = this.cache.get(sha);
|
|
770
|
-
if (cached) {
|
|
771
|
-
results[i] = cached;
|
|
772
|
-
totalBytesRead += cached.size;
|
|
773
|
-
}
|
|
774
|
-
else {
|
|
775
|
-
uncachedIndices.push(i);
|
|
776
|
-
uncachedShas.push(sha);
|
|
777
|
-
}
|
|
778
|
-
}
|
|
779
|
-
// Second pass: batch query for uncached objects
|
|
780
|
-
if (uncachedShas.length > 0) {
|
|
781
|
-
this.log('debug', `Batch fetching ${uncachedShas.length} uncached objects`);
|
|
782
|
-
// Build optimized IN query
|
|
783
|
-
const placeholders = uncachedShas.map(() => '?').join(', ');
|
|
784
|
-
const result = this.storage.sql.exec(`SELECT sha, type, size, data, created_at as createdAt FROM objects WHERE sha IN (${placeholders})`, ...uncachedShas);
|
|
785
|
-
const rows = result.toArray();
|
|
786
|
-
// Build lookup map for O(1) access
|
|
787
|
-
const rowMap = new Map();
|
|
788
|
-
for (const row of rows) {
|
|
789
|
-
rowMap.set(row.sha, row);
|
|
790
|
-
// Add to cache for future reads
|
|
791
|
-
this.cache.set(row.sha, row);
|
|
792
|
-
totalBytesRead += row.size;
|
|
793
|
-
}
|
|
794
|
-
// Fill in results at original indices
|
|
795
|
-
for (let i = 0; i < uncachedIndices.length; i++) {
|
|
796
|
-
const originalIndex = uncachedIndices[i];
|
|
797
|
-
const sha = uncachedShas[i];
|
|
798
|
-
results[originalIndex] = rowMap.get(sha) ?? null;
|
|
799
|
-
}
|
|
800
|
-
}
|
|
801
|
-
// Update metrics
|
|
802
|
-
if (this.options.enableMetrics) {
|
|
803
|
-
this._reads += shas.length;
|
|
804
|
-
this._bytesRead += totalBytesRead;
|
|
805
|
-
this._totalReadLatency += Date.now() - startTime;
|
|
806
|
-
}
|
|
807
|
-
return results;
|
|
808
|
-
}
|
|
809
|
-
/**
|
|
810
|
-
* Get a blob object with typed result.
|
|
811
|
-
*
|
|
812
|
-
* @description
|
|
813
|
-
* Fetches an object and returns it as a BlobObject if it's a blob.
|
|
814
|
-
* Returns null if the object doesn't exist or isn't a blob.
|
|
815
|
-
*
|
|
816
|
-
* @param sha - 40-character SHA-1 hash
|
|
817
|
-
* @returns Typed BlobObject or null
|
|
818
|
-
*
|
|
819
|
-
* @example
|
|
820
|
-
* ```typescript
|
|
821
|
-
* const blob = await store.getBlobObject(sha)
|
|
822
|
-
* if (blob) {
|
|
823
|
-
* const content = new TextDecoder().decode(blob.data)
|
|
824
|
-
* console.log(content)
|
|
825
|
-
* }
|
|
826
|
-
* ```
|
|
827
|
-
*/
|
|
828
|
-
async getBlobObject(sha) {
|
|
829
|
-
const obj = await this.getObject(sha);
|
|
830
|
-
if (!obj || obj.type !== 'blob') {
|
|
831
|
-
return null;
|
|
832
|
-
}
|
|
833
|
-
return {
|
|
834
|
-
type: 'blob',
|
|
835
|
-
data: obj.data
|
|
836
|
-
};
|
|
837
|
-
}
|
|
838
|
-
/**
|
|
839
|
-
* Get a tree object with parsed entries.
|
|
840
|
-
*
|
|
841
|
-
* @description
|
|
842
|
-
* Fetches and parses a tree object, extracting all entries
|
|
843
|
-
* with their modes, names, and SHA references.
|
|
844
|
-
*
|
|
845
|
-
* @param sha - 40-character SHA-1 hash
|
|
846
|
-
* @returns Parsed TreeObject with entries or null
|
|
847
|
-
*
|
|
848
|
-
* @example
|
|
849
|
-
* ```typescript
|
|
850
|
-
* const tree = await store.getTreeObject(sha)
|
|
851
|
-
* if (tree) {
|
|
852
|
-
* for (const entry of tree.entries) {
|
|
853
|
-
* console.log(`${entry.mode} ${entry.name} ${entry.sha}`)
|
|
854
|
-
* }
|
|
855
|
-
* }
|
|
856
|
-
* ```
|
|
857
|
-
*/
|
|
858
|
-
async getTreeObject(sha) {
|
|
859
|
-
const obj = await this.getObject(sha);
|
|
860
|
-
if (!obj || obj.type !== 'tree') {
|
|
861
|
-
return null;
|
|
862
|
-
}
|
|
863
|
-
// Parse tree entries from raw data
|
|
864
|
-
const entries = [];
|
|
865
|
-
let offset = 0;
|
|
866
|
-
const data = obj.data;
|
|
867
|
-
try {
|
|
868
|
-
while (offset < data.length) {
|
|
869
|
-
// Find the null byte after mode+name
|
|
870
|
-
let nullIndex = offset;
|
|
871
|
-
while (nullIndex < data.length && data[nullIndex] !== 0) {
|
|
872
|
-
nullIndex++;
|
|
873
|
-
}
|
|
874
|
-
// Check if we found a null byte
|
|
875
|
-
if (nullIndex >= data.length) {
|
|
876
|
-
// No null byte found - malformed data, return empty entries
|
|
877
|
-
return { type: 'tree', data: obj.data, entries: [] };
|
|
878
|
-
}
|
|
879
|
-
const modeNameStr = decoder.decode(data.slice(offset, nullIndex));
|
|
880
|
-
const spaceIndex = modeNameStr.indexOf(' ');
|
|
881
|
-
// Check for valid mode+name format
|
|
882
|
-
if (spaceIndex === -1) {
|
|
883
|
-
// No space found - malformed entry, return empty entries
|
|
884
|
-
return { type: 'tree', data: obj.data, entries: [] };
|
|
885
|
-
}
|
|
886
|
-
const mode = modeNameStr.slice(0, spaceIndex);
|
|
887
|
-
const name = modeNameStr.slice(spaceIndex + 1);
|
|
888
|
-
// Check if we have enough bytes for the 20-byte SHA
|
|
889
|
-
if (nullIndex + 21 > data.length) {
|
|
890
|
-
// Not enough bytes for SHA - return what we have parsed so far as malformed
|
|
891
|
-
return { type: 'tree', data: obj.data, entries: [] };
|
|
892
|
-
}
|
|
893
|
-
// Read 20-byte SHA
|
|
894
|
-
const sha20 = data.slice(nullIndex + 1, nullIndex + 21);
|
|
895
|
-
const entrySha = bytesToHex(sha20);
|
|
896
|
-
entries.push({ mode, name, sha: entrySha });
|
|
897
|
-
offset = nullIndex + 21;
|
|
898
|
-
}
|
|
899
|
-
}
|
|
900
|
-
catch {
|
|
901
|
-
// Any parsing error - return null or empty entries
|
|
902
|
-
return { type: 'tree', data: obj.data, entries: [] };
|
|
903
|
-
}
|
|
904
|
-
return {
|
|
905
|
-
type: 'tree',
|
|
906
|
-
data: obj.data,
|
|
907
|
-
entries
|
|
908
|
-
};
|
|
909
|
-
}
|
|
910
|
-
/**
|
|
911
|
-
* Get a commit object with parsed fields.
|
|
912
|
-
*
|
|
913
|
-
* @description
|
|
914
|
-
* Fetches and parses a commit object, extracting tree SHA,
|
|
915
|
-
* parent SHAs, author, committer, and message.
|
|
916
|
-
*
|
|
917
|
-
* @param sha - 40-character SHA-1 hash
|
|
918
|
-
* @returns Parsed CommitObject or null
|
|
919
|
-
*
|
|
920
|
-
* @example
|
|
921
|
-
* ```typescript
|
|
922
|
-
* const commit = await store.getCommitObject(sha)
|
|
923
|
-
* if (commit) {
|
|
924
|
-
* console.log(`Author: ${commit.author.name}`)
|
|
925
|
-
* console.log(`Message: ${commit.message}`)
|
|
926
|
-
* console.log(`Parents: ${commit.parents.length}`)
|
|
927
|
-
* }
|
|
928
|
-
* ```
|
|
929
|
-
*/
|
|
930
|
-
async getCommitObject(sha) {
|
|
931
|
-
const obj = await this.getObject(sha);
|
|
932
|
-
if (!obj || obj.type !== 'commit') {
|
|
933
|
-
return null;
|
|
934
|
-
}
|
|
935
|
-
const content = decoder.decode(obj.data);
|
|
936
|
-
const lines = content.split('\n');
|
|
937
|
-
let tree = '';
|
|
938
|
-
const parents = [];
|
|
939
|
-
let author = null;
|
|
940
|
-
let committer = null;
|
|
941
|
-
let messageStartIndex = 0;
|
|
942
|
-
for (let i = 0; i < lines.length; i++) {
|
|
943
|
-
const line = lines[i];
|
|
944
|
-
if (line === '') {
|
|
945
|
-
messageStartIndex = i + 1;
|
|
946
|
-
break;
|
|
947
|
-
}
|
|
948
|
-
if (line.startsWith('tree ')) {
|
|
949
|
-
tree = line.slice(5);
|
|
950
|
-
}
|
|
951
|
-
else if (line.startsWith('parent ')) {
|
|
952
|
-
parents.push(line.slice(7));
|
|
953
|
-
}
|
|
954
|
-
else if (line.startsWith('author ')) {
|
|
955
|
-
author = parseAuthorLine(line);
|
|
956
|
-
}
|
|
957
|
-
else if (line.startsWith('committer ')) {
|
|
958
|
-
committer = parseAuthorLine(line);
|
|
959
|
-
}
|
|
960
|
-
}
|
|
961
|
-
if (!author || !committer) {
|
|
962
|
-
return null;
|
|
963
|
-
}
|
|
964
|
-
const message = lines.slice(messageStartIndex).join('\n');
|
|
965
|
-
return {
|
|
966
|
-
type: 'commit',
|
|
967
|
-
data: obj.data,
|
|
968
|
-
tree,
|
|
969
|
-
parents,
|
|
970
|
-
author,
|
|
971
|
-
committer,
|
|
972
|
-
message
|
|
973
|
-
};
|
|
974
|
-
}
|
|
975
|
-
/**
|
|
976
|
-
* Get a tag object with parsed fields.
|
|
977
|
-
*
|
|
978
|
-
* @description
|
|
979
|
-
* Fetches and parses an annotated tag object, extracting
|
|
980
|
-
* the tagged object SHA, object type, tag name, tagger, and message.
|
|
981
|
-
*
|
|
982
|
-
* @param sha - 40-character SHA-1 hash
|
|
983
|
-
* @returns Parsed TagObject or null
|
|
984
|
-
*
|
|
985
|
-
* @example
|
|
986
|
-
* ```typescript
|
|
987
|
-
* const tag = await store.getTagObject(sha)
|
|
988
|
-
* if (tag) {
|
|
989
|
-
* console.log(`Tag: ${tag.name}`)
|
|
990
|
-
* console.log(`Points to: ${tag.object} (${tag.objectType})`)
|
|
991
|
-
* console.log(`Tagger: ${tag.tagger?.name}`)
|
|
992
|
-
* }
|
|
993
|
-
* ```
|
|
994
|
-
*/
|
|
995
|
-
async getTagObject(sha) {
|
|
996
|
-
const obj = await this.getObject(sha);
|
|
997
|
-
if (!obj || obj.type !== 'tag') {
|
|
998
|
-
return null;
|
|
999
|
-
}
|
|
1000
|
-
const content = decoder.decode(obj.data);
|
|
1001
|
-
const lines = content.split('\n');
|
|
1002
|
-
let object = '';
|
|
1003
|
-
let objectType = 'commit';
|
|
1004
|
-
let name = '';
|
|
1005
|
-
let tagger = undefined;
|
|
1006
|
-
let messageStartIndex = 0;
|
|
1007
|
-
for (let i = 0; i < lines.length; i++) {
|
|
1008
|
-
const line = lines[i];
|
|
1009
|
-
if (line === '') {
|
|
1010
|
-
messageStartIndex = i + 1;
|
|
1011
|
-
break;
|
|
1012
|
-
}
|
|
1013
|
-
if (line.startsWith('object ')) {
|
|
1014
|
-
object = line.slice(7);
|
|
1015
|
-
}
|
|
1016
|
-
else if (line.startsWith('type ')) {
|
|
1017
|
-
objectType = line.slice(5);
|
|
1018
|
-
}
|
|
1019
|
-
else if (line.startsWith('tag ')) {
|
|
1020
|
-
name = line.slice(4);
|
|
1021
|
-
}
|
|
1022
|
-
else if (line.startsWith('tagger ')) {
|
|
1023
|
-
try {
|
|
1024
|
-
tagger = parseAuthorLine(line);
|
|
1025
|
-
}
|
|
1026
|
-
catch {
|
|
1027
|
-
// Malformed tagger line - leave tagger as undefined
|
|
1028
|
-
return null;
|
|
1029
|
-
}
|
|
1030
|
-
}
|
|
1031
|
-
}
|
|
1032
|
-
// Validate required fields - object and name must be present
|
|
1033
|
-
// tagger is optional (some older tags or special tags may not have it)
|
|
1034
|
-
if (!object || !name) {
|
|
1035
|
-
return null;
|
|
1036
|
-
}
|
|
1037
|
-
const message = lines.slice(messageStartIndex).join('\n');
|
|
1038
|
-
return {
|
|
1039
|
-
type: 'tag',
|
|
1040
|
-
data: obj.data,
|
|
1041
|
-
object,
|
|
1042
|
-
objectType,
|
|
1043
|
-
name,
|
|
1044
|
-
tagger,
|
|
1045
|
-
message
|
|
1046
|
-
};
|
|
1047
|
-
}
|
|
1048
|
-
/**
|
|
1049
|
-
* Get raw serialized object with Git header.
|
|
1050
|
-
*
|
|
1051
|
-
* @description
|
|
1052
|
-
* Returns the complete Git object format including header:
|
|
1053
|
-
* "{type} {size}\0{content}"
|
|
1054
|
-
*
|
|
1055
|
-
* This is the format used for hashing and storage in pack files.
|
|
1056
|
-
*
|
|
1057
|
-
* @param sha - 40-character SHA-1 hash
|
|
1058
|
-
* @returns Complete object with Git header or null
|
|
1059
|
-
*
|
|
1060
|
-
* @example
|
|
1061
|
-
* ```typescript
|
|
1062
|
-
* const raw = await store.getRawObject(sha)
|
|
1063
|
-
* if (raw) {
|
|
1064
|
-
* // Can be written directly to a pack file or loose object
|
|
1065
|
-
* }
|
|
1066
|
-
* ```
|
|
1067
|
-
*/
|
|
1068
|
-
async getRawObject(sha) {
|
|
1069
|
-
const obj = await this.getObject(sha);
|
|
1070
|
-
if (!obj) {
|
|
1071
|
-
return null;
|
|
1072
|
-
}
|
|
1073
|
-
// Build git object format: "type size\0content"
|
|
1074
|
-
const header = encoder.encode(`${obj.type} ${obj.data.length}\0`);
|
|
1075
|
-
const result = new Uint8Array(header.length + obj.data.length);
|
|
1076
|
-
result.set(header);
|
|
1077
|
-
result.set(obj.data, header.length);
|
|
1078
|
-
return result;
|
|
1079
|
-
}
|
|
1080
|
-
/**
|
|
1081
|
-
* Log operation to WAL.
|
|
1082
|
-
*
|
|
1083
|
-
* @description
|
|
1084
|
-
* Writes an operation entry to the write-ahead log for durability.
|
|
1085
|
-
* The WAL ensures operations can be recovered after crashes.
|
|
1086
|
-
*
|
|
1087
|
-
* @param operation - Operation type ('PUT', 'DELETE', etc.)
|
|
1088
|
-
* @param sha - Object SHA being operated on
|
|
1089
|
-
* @param type - Object type
|
|
1090
|
-
* @param _data - Object data (not stored in WAL, just for signature compatibility)
|
|
1091
|
-
* @internal
|
|
1092
|
-
*/
|
|
1093
|
-
async logToWAL(operation, sha, type, _data) {
|
|
1094
|
-
// Create payload with operation details
|
|
1095
|
-
const payload = encoder.encode(JSON.stringify({
|
|
1096
|
-
sha,
|
|
1097
|
-
type,
|
|
1098
|
-
timestamp: Date.now()
|
|
1099
|
-
}));
|
|
1100
|
-
this.storage.sql.exec('INSERT INTO wal (operation, payload, created_at, flushed) VALUES (?, ?, ?, 0)', operation, payload, Date.now());
|
|
1101
|
-
}
|
|
1102
|
-
}
|
|
1103
|
-
// ============================================================================
|
|
1104
|
-
// Helper Functions
|
|
1105
|
-
// ============================================================================
|
|
1106
|
-
/**
|
|
1107
|
-
* Convert hexadecimal string to bytes.
|
|
1108
|
-
*
|
|
1109
|
-
* @description
|
|
1110
|
-
* Parses a hexadecimal string and returns the corresponding bytes.
|
|
1111
|
-
* Used for converting SHA strings to 20-byte binary format.
|
|
1112
|
-
*
|
|
1113
|
-
* @param hex - Hexadecimal string
|
|
1114
|
-
* @returns Binary data as Uint8Array
|
|
1115
|
-
* @internal
|
|
1116
|
-
*/
|
|
1117
|
-
function hexToBytes(hex) {
|
|
1118
|
-
const bytes = new Uint8Array(hex.length / 2);
|
|
1119
|
-
for (let i = 0; i < hex.length; i += 2) {
|
|
1120
|
-
bytes[i / 2] = parseInt(hex.slice(i, i + 2), 16);
|
|
1121
|
-
}
|
|
1122
|
-
return bytes;
|
|
1123
|
-
}
|
|
1124
|
-
/**
|
|
1125
|
-
* Convert bytes to hexadecimal string.
|
|
1126
|
-
*
|
|
1127
|
-
* @description
|
|
1128
|
-
* Converts binary data to a lowercase hexadecimal string.
|
|
1129
|
-
* Used for converting 20-byte SHA to 40-character string.
|
|
1130
|
-
*
|
|
1131
|
-
* @param bytes - Binary data to convert
|
|
1132
|
-
* @returns Lowercase hexadecimal string
|
|
1133
|
-
* @internal
|
|
1134
|
-
*/
|
|
1135
|
-
function bytesToHex(bytes) {
|
|
1136
|
-
return Array.from(bytes)
|
|
1137
|
-
.map(b => b.toString(16).padStart(2, '0'))
|
|
1138
|
-
.join('');
|
|
1139
|
-
}
|
|
1140
|
-
/**
|
|
1141
|
-
* Parse author/committer/tagger line.
|
|
1142
|
-
*
|
|
1143
|
-
* @description
|
|
1144
|
-
* Parses a Git author/committer/tagger line in the format:
|
|
1145
|
-
* "author Name <email> timestamp timezone"
|
|
1146
|
-
*
|
|
1147
|
-
* @param line - Full line including prefix
|
|
1148
|
-
* @returns Parsed Author object
|
|
1149
|
-
* @throws Error if line format is invalid
|
|
1150
|
-
* @internal
|
|
1151
|
-
*/
|
|
1152
|
-
function parseAuthorLine(line) {
|
|
1153
|
-
const match = line.match(/^(?:author|committer|tagger) (.+) <(.+)> (\d+) ([+-]\d{4})$/);
|
|
1154
|
-
if (!match) {
|
|
1155
|
-
throw new Error(`Invalid author line: ${line}`);
|
|
1156
|
-
}
|
|
1157
|
-
return {
|
|
1158
|
-
name: match[1],
|
|
1159
|
-
email: match[2],
|
|
1160
|
-
timestamp: parseInt(match[3], 10),
|
|
1161
|
-
timezone: match[4]
|
|
1162
|
-
};
|
|
1163
|
-
}
|
|
1164
|
-
//# sourceMappingURL=object-store.js.map
|