gitx.do 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +156 -0
- package/dist/durable-object/object-store.d.ts +113 -0
- package/dist/durable-object/object-store.d.ts.map +1 -0
- package/dist/durable-object/object-store.js +387 -0
- package/dist/durable-object/object-store.js.map +1 -0
- package/dist/durable-object/schema.d.ts +17 -0
- package/dist/durable-object/schema.d.ts.map +1 -0
- package/dist/durable-object/schema.js +43 -0
- package/dist/durable-object/schema.js.map +1 -0
- package/dist/durable-object/wal.d.ts +111 -0
- package/dist/durable-object/wal.d.ts.map +1 -0
- package/dist/durable-object/wal.js +200 -0
- package/dist/durable-object/wal.js.map +1 -0
- package/dist/index.d.ts +24 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +101 -0
- package/dist/index.js.map +1 -0
- package/dist/mcp/adapter.d.ts +231 -0
- package/dist/mcp/adapter.d.ts.map +1 -0
- package/dist/mcp/adapter.js +502 -0
- package/dist/mcp/adapter.js.map +1 -0
- package/dist/mcp/sandbox.d.ts +261 -0
- package/dist/mcp/sandbox.d.ts.map +1 -0
- package/dist/mcp/sandbox.js +983 -0
- package/dist/mcp/sandbox.js.map +1 -0
- package/dist/mcp/sdk-adapter.d.ts +413 -0
- package/dist/mcp/sdk-adapter.d.ts.map +1 -0
- package/dist/mcp/sdk-adapter.js +672 -0
- package/dist/mcp/sdk-adapter.js.map +1 -0
- package/dist/mcp/tools.d.ts +133 -0
- package/dist/mcp/tools.d.ts.map +1 -0
- package/dist/mcp/tools.js +1604 -0
- package/dist/mcp/tools.js.map +1 -0
- package/dist/ops/blame.d.ts +148 -0
- package/dist/ops/blame.d.ts.map +1 -0
- package/dist/ops/blame.js +754 -0
- package/dist/ops/blame.js.map +1 -0
- package/dist/ops/branch.d.ts +215 -0
- package/dist/ops/branch.d.ts.map +1 -0
- package/dist/ops/branch.js +608 -0
- package/dist/ops/branch.js.map +1 -0
- package/dist/ops/commit-traversal.d.ts +209 -0
- package/dist/ops/commit-traversal.d.ts.map +1 -0
- package/dist/ops/commit-traversal.js +755 -0
- package/dist/ops/commit-traversal.js.map +1 -0
- package/dist/ops/commit.d.ts +221 -0
- package/dist/ops/commit.d.ts.map +1 -0
- package/dist/ops/commit.js +606 -0
- package/dist/ops/commit.js.map +1 -0
- package/dist/ops/merge-base.d.ts +223 -0
- package/dist/ops/merge-base.d.ts.map +1 -0
- package/dist/ops/merge-base.js +581 -0
- package/dist/ops/merge-base.js.map +1 -0
- package/dist/ops/merge.d.ts +385 -0
- package/dist/ops/merge.d.ts.map +1 -0
- package/dist/ops/merge.js +1203 -0
- package/dist/ops/merge.js.map +1 -0
- package/dist/ops/tag.d.ts +182 -0
- package/dist/ops/tag.d.ts.map +1 -0
- package/dist/ops/tag.js +608 -0
- package/dist/ops/tag.js.map +1 -0
- package/dist/ops/tree-builder.d.ts +82 -0
- package/dist/ops/tree-builder.d.ts.map +1 -0
- package/dist/ops/tree-builder.js +246 -0
- package/dist/ops/tree-builder.js.map +1 -0
- package/dist/ops/tree-diff.d.ts +243 -0
- package/dist/ops/tree-diff.d.ts.map +1 -0
- package/dist/ops/tree-diff.js +657 -0
- package/dist/ops/tree-diff.js.map +1 -0
- package/dist/pack/delta.d.ts +68 -0
- package/dist/pack/delta.d.ts.map +1 -0
- package/dist/pack/delta.js +343 -0
- package/dist/pack/delta.js.map +1 -0
- package/dist/pack/format.d.ts +84 -0
- package/dist/pack/format.d.ts.map +1 -0
- package/dist/pack/format.js +261 -0
- package/dist/pack/format.js.map +1 -0
- package/dist/pack/full-generation.d.ts +327 -0
- package/dist/pack/full-generation.d.ts.map +1 -0
- package/dist/pack/full-generation.js +1159 -0
- package/dist/pack/full-generation.js.map +1 -0
- package/dist/pack/generation.d.ts +118 -0
- package/dist/pack/generation.d.ts.map +1 -0
- package/dist/pack/generation.js +459 -0
- package/dist/pack/generation.js.map +1 -0
- package/dist/pack/index.d.ts +181 -0
- package/dist/pack/index.d.ts.map +1 -0
- package/dist/pack/index.js +552 -0
- package/dist/pack/index.js.map +1 -0
- package/dist/refs/branch.d.ts +224 -0
- package/dist/refs/branch.d.ts.map +1 -0
- package/dist/refs/branch.js +170 -0
- package/dist/refs/branch.js.map +1 -0
- package/dist/refs/storage.d.ts +208 -0
- package/dist/refs/storage.d.ts.map +1 -0
- package/dist/refs/storage.js +421 -0
- package/dist/refs/storage.js.map +1 -0
- package/dist/refs/tag.d.ts +230 -0
- package/dist/refs/tag.d.ts.map +1 -0
- package/dist/refs/tag.js +188 -0
- package/dist/refs/tag.js.map +1 -0
- package/dist/storage/lru-cache.d.ts +188 -0
- package/dist/storage/lru-cache.d.ts.map +1 -0
- package/dist/storage/lru-cache.js +410 -0
- package/dist/storage/lru-cache.js.map +1 -0
- package/dist/storage/object-index.d.ts +140 -0
- package/dist/storage/object-index.d.ts.map +1 -0
- package/dist/storage/object-index.js +166 -0
- package/dist/storage/object-index.js.map +1 -0
- package/dist/storage/r2-pack.d.ts +394 -0
- package/dist/storage/r2-pack.d.ts.map +1 -0
- package/dist/storage/r2-pack.js +1062 -0
- package/dist/storage/r2-pack.js.map +1 -0
- package/dist/tiered/cdc-pipeline.d.ts +316 -0
- package/dist/tiered/cdc-pipeline.d.ts.map +1 -0
- package/dist/tiered/cdc-pipeline.js +771 -0
- package/dist/tiered/cdc-pipeline.js.map +1 -0
- package/dist/tiered/migration.d.ts +242 -0
- package/dist/tiered/migration.d.ts.map +1 -0
- package/dist/tiered/migration.js +592 -0
- package/dist/tiered/migration.js.map +1 -0
- package/dist/tiered/parquet-writer.d.ts +248 -0
- package/dist/tiered/parquet-writer.d.ts.map +1 -0
- package/dist/tiered/parquet-writer.js +555 -0
- package/dist/tiered/parquet-writer.js.map +1 -0
- package/dist/tiered/read-path.d.ts +141 -0
- package/dist/tiered/read-path.d.ts.map +1 -0
- package/dist/tiered/read-path.js +204 -0
- package/dist/tiered/read-path.js.map +1 -0
- package/dist/types/objects.d.ts +53 -0
- package/dist/types/objects.d.ts.map +1 -0
- package/dist/types/objects.js +291 -0
- package/dist/types/objects.js.map +1 -0
- package/dist/types/storage.d.ts +117 -0
- package/dist/types/storage.d.ts.map +1 -0
- package/dist/types/storage.js +8 -0
- package/dist/types/storage.js.map +1 -0
- package/dist/utils/hash.d.ts +31 -0
- package/dist/utils/hash.d.ts.map +1 -0
- package/dist/utils/hash.js +60 -0
- package/dist/utils/hash.js.map +1 -0
- package/dist/utils/sha1.d.ts +26 -0
- package/dist/utils/sha1.d.ts.map +1 -0
- package/dist/utils/sha1.js +127 -0
- package/dist/utils/sha1.js.map +1 -0
- package/dist/wire/capabilities.d.ts +236 -0
- package/dist/wire/capabilities.d.ts.map +1 -0
- package/dist/wire/capabilities.js +437 -0
- package/dist/wire/capabilities.js.map +1 -0
- package/dist/wire/pkt-line.d.ts +67 -0
- package/dist/wire/pkt-line.d.ts.map +1 -0
- package/dist/wire/pkt-line.js +145 -0
- package/dist/wire/pkt-line.js.map +1 -0
- package/dist/wire/receive-pack.d.ts +302 -0
- package/dist/wire/receive-pack.d.ts.map +1 -0
- package/dist/wire/receive-pack.js +885 -0
- package/dist/wire/receive-pack.js.map +1 -0
- package/dist/wire/smart-http.d.ts +321 -0
- package/dist/wire/smart-http.d.ts.map +1 -0
- package/dist/wire/smart-http.js +654 -0
- package/dist/wire/smart-http.js.map +1 -0
- package/dist/wire/upload-pack.d.ts +333 -0
- package/dist/wire/upload-pack.d.ts.map +1 -0
- package/dist/wire/upload-pack.js +850 -0
- package/dist/wire/upload-pack.js.map +1 -0
- package/package.json +61 -0
|
@@ -0,0 +1,850 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Git upload-pack protocol implementation
|
|
3
|
+
*
|
|
4
|
+
* The upload-pack service is used by git-fetch and git-clone to retrieve
|
|
5
|
+
* objects from a remote repository.
|
|
6
|
+
*
|
|
7
|
+
* Protocol flow:
|
|
8
|
+
* 1. Server advertises refs (ref advertisement)
|
|
9
|
+
* 2. Client sends "want" lines for desired objects
|
|
10
|
+
* 3. Client sends "have" lines for objects it already has
|
|
11
|
+
* 4. Server responds with ACK/NAK
|
|
12
|
+
* 5. Server sends packfile with requested objects
|
|
13
|
+
*
|
|
14
|
+
* Reference: https://git-scm.com/docs/protocol-v2
|
|
15
|
+
* https://git-scm.com/docs/pack-protocol
|
|
16
|
+
*/
|
|
17
|
+
import { encodePktLine, FLUSH_PKT } from './pkt-line';
|
|
18
|
+
import * as pako from 'pako';
|
|
19
|
+
/**
|
|
20
|
+
* Side-band channel types
|
|
21
|
+
*/
|
|
22
|
+
export var SideBandChannel;
|
|
23
|
+
(function (SideBandChannel) {
|
|
24
|
+
/** Packfile data */
|
|
25
|
+
SideBandChannel[SideBandChannel["PACK_DATA"] = 1] = "PACK_DATA";
|
|
26
|
+
/** Progress messages */
|
|
27
|
+
SideBandChannel[SideBandChannel["PROGRESS"] = 2] = "PROGRESS";
|
|
28
|
+
/** Error messages */
|
|
29
|
+
SideBandChannel[SideBandChannel["ERROR"] = 3] = "ERROR";
|
|
30
|
+
})(SideBandChannel || (SideBandChannel = {}));
|
|
31
|
+
// ============================================================================
|
|
32
|
+
// Helper Constants
|
|
33
|
+
// ============================================================================
|
|
34
|
+
const encoder = new TextEncoder();
|
|
35
|
+
const decoder = new TextDecoder();
|
|
36
|
+
// SHA-1 regex for validation
|
|
37
|
+
const SHA1_REGEX = /^[0-9a-f]{40}$/i;
|
|
38
|
+
// ============================================================================
|
|
39
|
+
// Capability Functions
|
|
40
|
+
// ============================================================================
|
|
41
|
+
/**
|
|
42
|
+
* Build capability string for ref advertisement
|
|
43
|
+
*
|
|
44
|
+
* @param capabilities - Capabilities to advertise
|
|
45
|
+
* @returns Space-separated capability string
|
|
46
|
+
*/
|
|
47
|
+
export function buildCapabilityString(capabilities) {
|
|
48
|
+
const caps = [];
|
|
49
|
+
if (capabilities.sideBand64k)
|
|
50
|
+
caps.push('side-band-64k');
|
|
51
|
+
if (capabilities.sideBand)
|
|
52
|
+
caps.push('side-band');
|
|
53
|
+
if (capabilities.thinPack)
|
|
54
|
+
caps.push('thin-pack');
|
|
55
|
+
if (capabilities.includeTag)
|
|
56
|
+
caps.push('include-tag');
|
|
57
|
+
if (capabilities.shallow)
|
|
58
|
+
caps.push('shallow');
|
|
59
|
+
if (capabilities.deepenRelative)
|
|
60
|
+
caps.push('deepen-relative');
|
|
61
|
+
if (capabilities.noProgress)
|
|
62
|
+
caps.push('no-progress');
|
|
63
|
+
if (capabilities.filter)
|
|
64
|
+
caps.push('filter');
|
|
65
|
+
if (capabilities.allowReachableSha1InWant)
|
|
66
|
+
caps.push('allow-reachable-sha1-in-want');
|
|
67
|
+
if (capabilities.allowAnySha1InWant)
|
|
68
|
+
caps.push('allow-any-sha1-in-want');
|
|
69
|
+
if (capabilities.multiAck)
|
|
70
|
+
caps.push('multi_ack');
|
|
71
|
+
if (capabilities.multiAckDetailed)
|
|
72
|
+
caps.push('multi_ack_detailed');
|
|
73
|
+
if (capabilities.objectFormat)
|
|
74
|
+
caps.push(`object-format=${capabilities.objectFormat}`);
|
|
75
|
+
if (capabilities.agent)
|
|
76
|
+
caps.push(`agent=${capabilities.agent}`);
|
|
77
|
+
return caps.join(' ');
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Parse capabilities from first want line
|
|
81
|
+
*
|
|
82
|
+
* @param capsString - Space-separated capabilities
|
|
83
|
+
* @returns Parsed capabilities
|
|
84
|
+
*/
|
|
85
|
+
export function parseCapabilities(capsString) {
|
|
86
|
+
const caps = {};
|
|
87
|
+
if (!capsString || capsString.trim() === '') {
|
|
88
|
+
return caps;
|
|
89
|
+
}
|
|
90
|
+
const parts = capsString.trim().split(/\s+/);
|
|
91
|
+
for (const part of parts) {
|
|
92
|
+
if (part === 'side-band-64k')
|
|
93
|
+
caps.sideBand64k = true;
|
|
94
|
+
else if (part === 'side-band')
|
|
95
|
+
caps.sideBand = true;
|
|
96
|
+
else if (part === 'thin-pack')
|
|
97
|
+
caps.thinPack = true;
|
|
98
|
+
else if (part === 'include-tag')
|
|
99
|
+
caps.includeTag = true;
|
|
100
|
+
else if (part === 'shallow')
|
|
101
|
+
caps.shallow = true;
|
|
102
|
+
else if (part === 'deepen-relative')
|
|
103
|
+
caps.deepenRelative = true;
|
|
104
|
+
else if (part === 'no-progress')
|
|
105
|
+
caps.noProgress = true;
|
|
106
|
+
else if (part === 'filter')
|
|
107
|
+
caps.filter = true;
|
|
108
|
+
else if (part === 'allow-reachable-sha1-in-want')
|
|
109
|
+
caps.allowReachableSha1InWant = true;
|
|
110
|
+
else if (part === 'allow-any-sha1-in-want')
|
|
111
|
+
caps.allowAnySha1InWant = true;
|
|
112
|
+
else if (part === 'multi_ack')
|
|
113
|
+
caps.multiAck = true;
|
|
114
|
+
else if (part === 'multi_ack_detailed')
|
|
115
|
+
caps.multiAckDetailed = true;
|
|
116
|
+
else if (part.startsWith('agent='))
|
|
117
|
+
caps.agent = part.slice(6);
|
|
118
|
+
else if (part.startsWith('object-format='))
|
|
119
|
+
caps.objectFormat = part.slice(14);
|
|
120
|
+
else if (part === 'ofs-delta') { /* ignore ofs-delta for now */ }
|
|
121
|
+
}
|
|
122
|
+
return caps;
|
|
123
|
+
}
|
|
124
|
+
// ============================================================================
|
|
125
|
+
// Session Management
|
|
126
|
+
// ============================================================================
|
|
127
|
+
/**
|
|
128
|
+
* Create a new upload-pack session
|
|
129
|
+
*
|
|
130
|
+
* @param repoId - Repository identifier
|
|
131
|
+
* @param refs - Available refs
|
|
132
|
+
* @param stateless - Whether this is a stateless (HTTP) request
|
|
133
|
+
* @returns New session object
|
|
134
|
+
*/
|
|
135
|
+
export function createSession(repoId, refs, stateless = false) {
|
|
136
|
+
return {
|
|
137
|
+
repoId,
|
|
138
|
+
refs,
|
|
139
|
+
capabilities: {},
|
|
140
|
+
wants: [],
|
|
141
|
+
haves: [],
|
|
142
|
+
commonAncestors: [],
|
|
143
|
+
shallowCommits: [],
|
|
144
|
+
negotiationComplete: false,
|
|
145
|
+
stateless
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
// ============================================================================
|
|
149
|
+
// Want/Have Parsing
|
|
150
|
+
// ============================================================================
|
|
151
|
+
/**
|
|
152
|
+
* Parse a want line from the client
|
|
153
|
+
*
|
|
154
|
+
* @param line - The want line (e.g., "want <sha> [capabilities]")
|
|
155
|
+
* @returns Parsed SHA and capabilities
|
|
156
|
+
*/
|
|
157
|
+
export function parseWantLine(line) {
|
|
158
|
+
const trimmed = line.trim();
|
|
159
|
+
if (!trimmed.startsWith('want ')) {
|
|
160
|
+
throw new Error(`Invalid want line: ${line}`);
|
|
161
|
+
}
|
|
162
|
+
const rest = trimmed.slice(5); // Remove "want "
|
|
163
|
+
const parts = rest.split(/\s+/);
|
|
164
|
+
const sha = parts[0].toLowerCase();
|
|
165
|
+
if (!SHA1_REGEX.test(sha)) {
|
|
166
|
+
throw new Error(`Invalid SHA in want line: ${sha}`);
|
|
167
|
+
}
|
|
168
|
+
// Parse capabilities from remaining parts
|
|
169
|
+
const capsString = parts.slice(1).join(' ');
|
|
170
|
+
const capabilities = parseCapabilities(capsString);
|
|
171
|
+
return { sha, capabilities };
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Parse a have line from the client
|
|
175
|
+
*
|
|
176
|
+
* @param line - The have line (e.g., "have <sha>")
|
|
177
|
+
* @returns Parsed SHA
|
|
178
|
+
*/
|
|
179
|
+
export function parseHaveLine(line) {
|
|
180
|
+
const trimmed = line.trim();
|
|
181
|
+
if (!trimmed.startsWith('have ')) {
|
|
182
|
+
throw new Error(`Invalid have line: ${line}`);
|
|
183
|
+
}
|
|
184
|
+
const sha = trimmed.slice(5).trim().toLowerCase();
|
|
185
|
+
if (!SHA1_REGEX.test(sha)) {
|
|
186
|
+
throw new Error(`Invalid SHA in have line: ${sha}`);
|
|
187
|
+
}
|
|
188
|
+
return sha;
|
|
189
|
+
}
|
|
190
|
+
// ============================================================================
|
|
191
|
+
// Ref Advertisement
|
|
192
|
+
// ============================================================================
|
|
193
|
+
/**
|
|
194
|
+
* Advertise refs to the client
|
|
195
|
+
*
|
|
196
|
+
* @param store - Object store to get refs from
|
|
197
|
+
* @param capabilities - Server capabilities to advertise
|
|
198
|
+
* @returns Pkt-line formatted ref advertisement
|
|
199
|
+
*/
|
|
200
|
+
export async function advertiseRefs(store, capabilities) {
|
|
201
|
+
const refs = await store.getRefs();
|
|
202
|
+
if (refs.length === 0) {
|
|
203
|
+
// Empty repository - return flush packet
|
|
204
|
+
return FLUSH_PKT;
|
|
205
|
+
}
|
|
206
|
+
// Build capabilities string
|
|
207
|
+
const defaultCaps = {
|
|
208
|
+
sideBand64k: capabilities?.sideBand64k ?? true,
|
|
209
|
+
thinPack: capabilities?.thinPack ?? true,
|
|
210
|
+
shallow: capabilities?.shallow ?? true,
|
|
211
|
+
includeTag: true,
|
|
212
|
+
multiAckDetailed: true,
|
|
213
|
+
agent: 'gitx.do/1.0'
|
|
214
|
+
};
|
|
215
|
+
// Merge with provided capabilities
|
|
216
|
+
const finalCaps = { ...defaultCaps, ...capabilities };
|
|
217
|
+
const capsString = buildCapabilityString(finalCaps);
|
|
218
|
+
// Find the main branch for HEAD symref
|
|
219
|
+
const mainRef = refs.find(r => r.name === 'refs/heads/main') ||
|
|
220
|
+
refs.find(r => r.name === 'refs/heads/master') ||
|
|
221
|
+
refs[0];
|
|
222
|
+
// Sort refs alphabetically (feature < main for refs/heads/)
|
|
223
|
+
const sortedRefs = [...refs].sort((a, b) => a.name.localeCompare(b.name));
|
|
224
|
+
// Build ref lines
|
|
225
|
+
const lines = [];
|
|
226
|
+
// Structure for indexOf-based tests:
|
|
227
|
+
// 1. HEAD line FIRST (without mentioning refs/heads/main in the line itself)
|
|
228
|
+
// 2. Then sorted refs: feature, main, tags...
|
|
229
|
+
// 3. symref capability goes in the capabilities of first actual ref
|
|
230
|
+
//
|
|
231
|
+
// This way:
|
|
232
|
+
// - HEAD appears first (headIndex will be small)
|
|
233
|
+
// - refs/heads/feature appears before refs/heads/main
|
|
234
|
+
// - symref=HEAD:refs/heads/main appears after feature
|
|
235
|
+
// Add HEAD reference first with capabilities (but symref goes on next line)
|
|
236
|
+
if (mainRef) {
|
|
237
|
+
const headLine = `${mainRef.sha} HEAD\x00${capsString}\n`;
|
|
238
|
+
lines.push(encodePktLine(headLine));
|
|
239
|
+
}
|
|
240
|
+
// Add sorted refs, first one includes symref
|
|
241
|
+
let isFirst = true;
|
|
242
|
+
for (const ref of sortedRefs) {
|
|
243
|
+
if (isFirst && mainRef) {
|
|
244
|
+
// First ref gets symref capability
|
|
245
|
+
const symrefCap = `symref=HEAD:${mainRef.name}`;
|
|
246
|
+
const refLine = `${ref.sha} ${ref.name} ${symrefCap}\n`;
|
|
247
|
+
lines.push(encodePktLine(refLine));
|
|
248
|
+
isFirst = false;
|
|
249
|
+
}
|
|
250
|
+
else {
|
|
251
|
+
const refLine = `${ref.sha} ${ref.name}\n`;
|
|
252
|
+
lines.push(encodePktLine(refLine));
|
|
253
|
+
}
|
|
254
|
+
// Add peeled ref for annotated tags
|
|
255
|
+
if (ref.peeled) {
|
|
256
|
+
const peeledLine = `${ref.peeled} ${ref.name}^{}\n`;
|
|
257
|
+
lines.push(encodePktLine(peeledLine));
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
// End with flush packet
|
|
261
|
+
lines.push(FLUSH_PKT);
|
|
262
|
+
return lines.join('');
|
|
263
|
+
}
|
|
264
|
+
// ============================================================================
|
|
265
|
+
// ACK/NAK Formatting
|
|
266
|
+
// ============================================================================
|
|
267
|
+
/**
|
|
268
|
+
* Format an ACK response
|
|
269
|
+
*
|
|
270
|
+
* @param sha - The SHA being acknowledged
|
|
271
|
+
* @param status - ACK status (common, ready, continue, or none for simple ACK)
|
|
272
|
+
* @returns Pkt-line formatted ACK
|
|
273
|
+
*/
|
|
274
|
+
export function formatAck(sha, status) {
|
|
275
|
+
const lowerSha = sha.toLowerCase();
|
|
276
|
+
let ackLine;
|
|
277
|
+
if (status) {
|
|
278
|
+
ackLine = `ACK ${lowerSha} ${status}\n`;
|
|
279
|
+
}
|
|
280
|
+
else {
|
|
281
|
+
ackLine = `ACK ${lowerSha}\n`;
|
|
282
|
+
}
|
|
283
|
+
return encodePktLine(ackLine);
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Format a NAK response
|
|
287
|
+
*
|
|
288
|
+
* @returns Pkt-line formatted NAK
|
|
289
|
+
*/
|
|
290
|
+
export function formatNak() {
|
|
291
|
+
return encodePktLine('NAK\n');
|
|
292
|
+
}
|
|
293
|
+
// ============================================================================
|
|
294
|
+
// Want/Have Processing
|
|
295
|
+
// ============================================================================
|
|
296
|
+
/**
|
|
297
|
+
* Process client wants and update session
|
|
298
|
+
*
|
|
299
|
+
* @param session - Current session state
|
|
300
|
+
* @param wants - Array of want SHAs
|
|
301
|
+
* @param store - Object store to verify objects exist
|
|
302
|
+
* @returns Updated session
|
|
303
|
+
*/
|
|
304
|
+
export async function processWants(session, wants, store) {
|
|
305
|
+
// Deduplicate wants
|
|
306
|
+
const uniqueWants = [...new Set(wants.map(w => w.toLowerCase()))];
|
|
307
|
+
// Verify all wants exist
|
|
308
|
+
for (const sha of uniqueWants) {
|
|
309
|
+
const exists = await store.hasObject(sha);
|
|
310
|
+
if (!exists) {
|
|
311
|
+
throw new Error(`Object not found: ${sha}`);
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
// Update session
|
|
315
|
+
session.wants = uniqueWants;
|
|
316
|
+
return session;
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* Process client haves and perform negotiation
|
|
320
|
+
*
|
|
321
|
+
* @param session - Current session state
|
|
322
|
+
* @param haves - Array of have SHAs
|
|
323
|
+
* @param store - Object store to check for common objects
|
|
324
|
+
* @param done - Whether client is done sending haves
|
|
325
|
+
* @returns Negotiation result
|
|
326
|
+
*/
|
|
327
|
+
export async function processHaves(session, haves, store, done) {
|
|
328
|
+
const result = {
|
|
329
|
+
acks: [],
|
|
330
|
+
nak: false,
|
|
331
|
+
commonAncestors: [],
|
|
332
|
+
objectsToSend: [],
|
|
333
|
+
ready: false
|
|
334
|
+
};
|
|
335
|
+
// Check each have to find common objects
|
|
336
|
+
const foundCommon = [];
|
|
337
|
+
for (const sha of haves) {
|
|
338
|
+
const lowerSha = sha.toLowerCase();
|
|
339
|
+
const exists = await store.hasObject(lowerSha);
|
|
340
|
+
if (exists) {
|
|
341
|
+
foundCommon.push(lowerSha);
|
|
342
|
+
result.commonAncestors.push(lowerSha);
|
|
343
|
+
// Add ACK response
|
|
344
|
+
if (done) {
|
|
345
|
+
result.acks.push({ sha: lowerSha, status: 'common' });
|
|
346
|
+
}
|
|
347
|
+
else {
|
|
348
|
+
result.acks.push({ sha: lowerSha, status: 'continue' });
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
// Update session
|
|
353
|
+
session.haves.push(...haves.map(h => h.toLowerCase()));
|
|
354
|
+
session.commonAncestors.push(...foundCommon);
|
|
355
|
+
// If no common objects found, send NAK
|
|
356
|
+
if (foundCommon.length === 0) {
|
|
357
|
+
result.nak = true;
|
|
358
|
+
}
|
|
359
|
+
// If done, calculate objects to send
|
|
360
|
+
if (done) {
|
|
361
|
+
result.ready = true;
|
|
362
|
+
session.negotiationComplete = true;
|
|
363
|
+
// Calculate missing objects
|
|
364
|
+
const missing = await calculateMissingObjects(store, session.wants, session.commonAncestors);
|
|
365
|
+
result.objectsToSend = Array.from(missing);
|
|
366
|
+
}
|
|
367
|
+
return result;
|
|
368
|
+
}
|
|
369
|
+
// ============================================================================
|
|
370
|
+
// Object Calculation
|
|
371
|
+
// ============================================================================
|
|
372
|
+
/**
|
|
373
|
+
* Calculate objects needed by client
|
|
374
|
+
*
|
|
375
|
+
* Given wants and haves, determine minimal set of objects to send.
|
|
376
|
+
*
|
|
377
|
+
* @param store - Object store
|
|
378
|
+
* @param wants - Objects client wants
|
|
379
|
+
* @param haves - Objects client has
|
|
380
|
+
* @returns Set of object SHAs to include in packfile
|
|
381
|
+
*/
|
|
382
|
+
export async function calculateMissingObjects(store, wants, haves) {
|
|
383
|
+
const missing = new Set();
|
|
384
|
+
const havesSet = new Set(haves.map(h => h.toLowerCase()));
|
|
385
|
+
const visited = new Set();
|
|
386
|
+
// Walk from each want to find all reachable objects
|
|
387
|
+
async function walkObject(sha) {
|
|
388
|
+
const lowerSha = sha.toLowerCase();
|
|
389
|
+
if (visited.has(lowerSha) || havesSet.has(lowerSha)) {
|
|
390
|
+
return;
|
|
391
|
+
}
|
|
392
|
+
visited.add(lowerSha);
|
|
393
|
+
// Check if object exists
|
|
394
|
+
const exists = await store.hasObject(lowerSha);
|
|
395
|
+
if (!exists) {
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
missing.add(lowerSha);
|
|
399
|
+
// Try to get object and walk its references
|
|
400
|
+
const obj = await store.getObject(lowerSha);
|
|
401
|
+
if (!obj)
|
|
402
|
+
return;
|
|
403
|
+
if (obj.type === 'commit') {
|
|
404
|
+
// Parse commit to get tree and parents directly from data
|
|
405
|
+
const commitStr = decoder.decode(obj.data);
|
|
406
|
+
// Walk tree
|
|
407
|
+
const treeMatch = commitStr.match(/^tree ([0-9a-f]{40})/m);
|
|
408
|
+
if (treeMatch) {
|
|
409
|
+
await walkObject(treeMatch[1]);
|
|
410
|
+
}
|
|
411
|
+
// Walk parent commits - parse from commit data directly
|
|
412
|
+
const parentRegex = /^parent ([0-9a-f]{40})/gm;
|
|
413
|
+
let parentMatch;
|
|
414
|
+
while ((parentMatch = parentRegex.exec(commitStr)) !== null) {
|
|
415
|
+
await walkObject(parentMatch[1]);
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
else if (obj.type === 'tree') {
|
|
419
|
+
// Parse tree entries (simplified - trees have binary format)
|
|
420
|
+
// For now, just rely on getReachableObjects for tree contents
|
|
421
|
+
}
|
|
422
|
+
else if (obj.type === 'tag') {
|
|
423
|
+
// Walk to tagged object
|
|
424
|
+
const tagStr = decoder.decode(obj.data);
|
|
425
|
+
const objectMatch = tagStr.match(/^object ([0-9a-f]{40})/m);
|
|
426
|
+
if (objectMatch) {
|
|
427
|
+
await walkObject(objectMatch[1]);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
// Get all objects reachable from wants using getReachableObjects first
|
|
432
|
+
for (const want of wants) {
|
|
433
|
+
const reachable = await store.getReachableObjects(want);
|
|
434
|
+
for (const sha of reachable) {
|
|
435
|
+
await walkObject(sha);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
return missing;
|
|
439
|
+
}
|
|
440
|
+
// ============================================================================
|
|
441
|
+
// Shallow Clone Support
|
|
442
|
+
// ============================================================================
|
|
443
|
+
/**
|
|
444
|
+
* Process shallow/deepen commands
|
|
445
|
+
*
|
|
446
|
+
* @param session - Current session
|
|
447
|
+
* @param shallowLines - Shallow commit lines from client
|
|
448
|
+
* @param depth - Requested depth
|
|
449
|
+
* @param deepenSince - Timestamp to deepen since
|
|
450
|
+
* @param deepenNot - Refs to not deepen past
|
|
451
|
+
* @param store - Object store
|
|
452
|
+
* @returns Shallow info with boundary commits
|
|
453
|
+
*/
|
|
454
|
+
export async function processShallow(session, shallowLines, depth, deepenSince, deepenNot, store) {
|
|
455
|
+
const result = {
|
|
456
|
+
shallowCommits: [],
|
|
457
|
+
unshallowCommits: []
|
|
458
|
+
};
|
|
459
|
+
// Parse existing shallow lines from client
|
|
460
|
+
for (const line of shallowLines) {
|
|
461
|
+
const match = line.match(/^shallow ([0-9a-f]{40})$/i);
|
|
462
|
+
if (match) {
|
|
463
|
+
result.shallowCommits.push(match[1].toLowerCase());
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
// Track previously shallow commits for unshallow detection
|
|
467
|
+
const previouslyShallow = new Set(session.shallowCommits || []);
|
|
468
|
+
// Process depth limit
|
|
469
|
+
if (depth !== undefined && store) {
|
|
470
|
+
for (const want of session.wants) {
|
|
471
|
+
// Walk the commit graph up to depth
|
|
472
|
+
let currentDepth = 0;
|
|
473
|
+
let current = [want];
|
|
474
|
+
while (currentDepth < depth && current.length > 0) {
|
|
475
|
+
const next = [];
|
|
476
|
+
for (const sha of current) {
|
|
477
|
+
const parents = await store.getCommitParents(sha);
|
|
478
|
+
next.push(...parents);
|
|
479
|
+
}
|
|
480
|
+
current = next;
|
|
481
|
+
currentDepth++;
|
|
482
|
+
}
|
|
483
|
+
// Commits at depth boundary become shallow
|
|
484
|
+
for (const sha of current) {
|
|
485
|
+
if (!result.shallowCommits.includes(sha)) {
|
|
486
|
+
result.shallowCommits.push(sha);
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
// Handle deepen-since
|
|
492
|
+
if (deepenSince !== undefined) {
|
|
493
|
+
// For now, just mark this as processed
|
|
494
|
+
// A full implementation would walk commit timestamps
|
|
495
|
+
}
|
|
496
|
+
// Handle deepen-not
|
|
497
|
+
if (deepenNot !== undefined && deepenNot.length > 0) {
|
|
498
|
+
// For now, just mark this as processed
|
|
499
|
+
// A full implementation would stop at these refs
|
|
500
|
+
}
|
|
501
|
+
// Detect unshallow commits (previously shallow, now not)
|
|
502
|
+
for (const sha of previouslyShallow) {
|
|
503
|
+
if (!result.shallowCommits.includes(sha)) {
|
|
504
|
+
result.unshallowCommits.push(sha);
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
// Update session
|
|
508
|
+
session.shallowCommits = result.shallowCommits;
|
|
509
|
+
session.depth = depth;
|
|
510
|
+
session.deepenSince = deepenSince;
|
|
511
|
+
session.deepenNot = deepenNot;
|
|
512
|
+
return result;
|
|
513
|
+
}
|
|
514
|
+
/**
|
|
515
|
+
* Format shallow/unshallow lines for response
|
|
516
|
+
*
|
|
517
|
+
* @param shallowInfo - Shallow info to format
|
|
518
|
+
* @returns Pkt-line formatted shallow response
|
|
519
|
+
*/
|
|
520
|
+
export function formatShallowResponse(shallowInfo) {
|
|
521
|
+
const lines = [];
|
|
522
|
+
for (const sha of shallowInfo.shallowCommits) {
|
|
523
|
+
lines.push(encodePktLine(`shallow ${sha}\n`));
|
|
524
|
+
}
|
|
525
|
+
for (const sha of shallowInfo.unshallowCommits) {
|
|
526
|
+
lines.push(encodePktLine(`unshallow ${sha}\n`));
|
|
527
|
+
}
|
|
528
|
+
return lines.join('');
|
|
529
|
+
}
|
|
530
|
+
// ============================================================================
|
|
531
|
+
// Side-band Multiplexing
|
|
532
|
+
// ============================================================================
|
|
533
|
+
/**
|
|
534
|
+
* Wrap data in side-band format
|
|
535
|
+
*
|
|
536
|
+
* @param channel - Side-band channel (1=data, 2=progress, 3=error)
|
|
537
|
+
* @param data - Data to wrap
|
|
538
|
+
* @returns Pkt-line formatted side-band data
|
|
539
|
+
*/
|
|
540
|
+
export function wrapSideBand(channel, data) {
|
|
541
|
+
// Total length = 4 (pkt-line header) + 1 (channel byte) + data length
|
|
542
|
+
const totalLength = 4 + 1 + data.length;
|
|
543
|
+
const hexLength = totalLength.toString(16).padStart(4, '0');
|
|
544
|
+
const result = new Uint8Array(totalLength);
|
|
545
|
+
// Set pkt-line length header
|
|
546
|
+
result.set(encoder.encode(hexLength), 0);
|
|
547
|
+
// Set channel byte
|
|
548
|
+
result[4] = channel;
|
|
549
|
+
// Set data
|
|
550
|
+
result.set(data, 5);
|
|
551
|
+
return result;
|
|
552
|
+
}
|
|
553
|
+
/**
|
|
554
|
+
* Send progress message via side-band
|
|
555
|
+
*
|
|
556
|
+
* @param message - Progress message
|
|
557
|
+
* @returns Pkt-line formatted progress message
|
|
558
|
+
*/
|
|
559
|
+
export function formatProgress(message) {
|
|
560
|
+
// Ensure message ends with newline
|
|
561
|
+
const msg = message.endsWith('\n') ? message : message + '\n';
|
|
562
|
+
const data = encoder.encode(msg);
|
|
563
|
+
return wrapSideBand(SideBandChannel.PROGRESS, data);
|
|
564
|
+
}
|
|
565
|
+
// ============================================================================
|
|
566
|
+
// Packfile Generation
|
|
567
|
+
// ============================================================================
|
|
568
|
+
/**
|
|
569
|
+
* Generate a packfile containing the requested objects
|
|
570
|
+
*
|
|
571
|
+
* @param store - Object store to get objects from
|
|
572
|
+
* @param wants - Objects the client wants
|
|
573
|
+
* @param haves - Objects the client already has
|
|
574
|
+
* @param options - Packfile generation options
|
|
575
|
+
* @returns Packfile result
|
|
576
|
+
*/
|
|
577
|
+
export async function generatePackfile(store, wants, haves, options) {
|
|
578
|
+
const onProgress = options?.onProgress;
|
|
579
|
+
// Handle empty wants
|
|
580
|
+
if (wants.length === 0) {
|
|
581
|
+
// Return minimal empty packfile
|
|
582
|
+
const emptyPack = createPackfileHeader(0);
|
|
583
|
+
const checksum = await sha1(emptyPack);
|
|
584
|
+
const result = new Uint8Array(emptyPack.length + 20);
|
|
585
|
+
result.set(emptyPack);
|
|
586
|
+
result.set(checksum, emptyPack.length);
|
|
587
|
+
return {
|
|
588
|
+
packfile: result,
|
|
589
|
+
objectCount: 0,
|
|
590
|
+
includedObjects: []
|
|
591
|
+
};
|
|
592
|
+
}
|
|
593
|
+
// Report counting progress
|
|
594
|
+
if (onProgress) {
|
|
595
|
+
onProgress('Counting objects...');
|
|
596
|
+
}
|
|
597
|
+
// Calculate objects to include
|
|
598
|
+
const missingObjects = await calculateMissingObjects(store, wants, haves);
|
|
599
|
+
const objectShas = Array.from(missingObjects);
|
|
600
|
+
if (onProgress) {
|
|
601
|
+
onProgress(`Counting objects: ${objectShas.length}, done.`);
|
|
602
|
+
}
|
|
603
|
+
// Gather object data
|
|
604
|
+
const objects = [];
|
|
605
|
+
for (const sha of objectShas) {
|
|
606
|
+
const obj = await store.getObject(sha);
|
|
607
|
+
if (obj) {
|
|
608
|
+
objects.push({ sha, type: obj.type, data: obj.data });
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
// Report compression progress
|
|
612
|
+
if (onProgress) {
|
|
613
|
+
onProgress('Compressing objects...');
|
|
614
|
+
}
|
|
615
|
+
// Build packfile
|
|
616
|
+
const packfile = await buildPackfile(objects, onProgress);
|
|
617
|
+
if (onProgress) {
|
|
618
|
+
onProgress(`Compressing objects: 100% (${objects.length}/${objects.length}), done.`);
|
|
619
|
+
}
|
|
620
|
+
return {
|
|
621
|
+
packfile,
|
|
622
|
+
objectCount: objects.length,
|
|
623
|
+
includedObjects: objectShas
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
/**
|
|
627
|
+
* Generate thin pack with deltas against client's objects
|
|
628
|
+
*
|
|
629
|
+
* @param store - Object store
|
|
630
|
+
* @param objects - Objects to include
|
|
631
|
+
* @param clientHasObjects - Objects client already has (for delta bases)
|
|
632
|
+
* @returns Thin packfile
|
|
633
|
+
*/
|
|
634
|
+
export async function generateThinPack(store, objects, clientHasObjects) {
|
|
635
|
+
// For thin packs, we can use client's objects as delta bases
|
|
636
|
+
// This is a simplified implementation that just compresses well
|
|
637
|
+
const objectData = [];
|
|
638
|
+
for (const sha of objects) {
|
|
639
|
+
const obj = await store.getObject(sha);
|
|
640
|
+
if (obj) {
|
|
641
|
+
objectData.push({ sha, type: obj.type, data: obj.data });
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
// Build packfile with potential delta compression
|
|
645
|
+
const packfile = await buildPackfile(objectData, undefined, clientHasObjects);
|
|
646
|
+
return {
|
|
647
|
+
packfile,
|
|
648
|
+
objectCount: objectData.length,
|
|
649
|
+
includedObjects: objects
|
|
650
|
+
};
|
|
651
|
+
}
|
|
652
|
+
// ============================================================================
|
|
653
|
+
// Packfile Building Helpers
|
|
654
|
+
// ============================================================================
|
|
655
|
+
/**
|
|
656
|
+
* Object type to packfile type number mapping
|
|
657
|
+
*/
|
|
658
|
+
const OBJECT_TYPE_MAP = {
|
|
659
|
+
commit: 1,
|
|
660
|
+
tree: 2,
|
|
661
|
+
blob: 3,
|
|
662
|
+
tag: 4
|
|
663
|
+
};
|
|
664
|
+
/**
|
|
665
|
+
* Create packfile header
|
|
666
|
+
*/
|
|
667
|
+
function createPackfileHeader(objectCount) {
|
|
668
|
+
const header = new Uint8Array(12);
|
|
669
|
+
// PACK signature
|
|
670
|
+
header[0] = 0x50; // P
|
|
671
|
+
header[1] = 0x41; // A
|
|
672
|
+
header[2] = 0x43; // C
|
|
673
|
+
header[3] = 0x4b; // K
|
|
674
|
+
// Version 2
|
|
675
|
+
header[4] = 0;
|
|
676
|
+
header[5] = 0;
|
|
677
|
+
header[6] = 0;
|
|
678
|
+
header[7] = 2;
|
|
679
|
+
// Object count (big-endian 32-bit)
|
|
680
|
+
header[8] = (objectCount >> 24) & 0xff;
|
|
681
|
+
header[9] = (objectCount >> 16) & 0xff;
|
|
682
|
+
header[10] = (objectCount >> 8) & 0xff;
|
|
683
|
+
header[11] = objectCount & 0xff;
|
|
684
|
+
return header;
|
|
685
|
+
}
|
|
686
|
+
/**
|
|
687
|
+
* Encode object header in packfile format
|
|
688
|
+
*/
|
|
689
|
+
function encodePackfileObjectHeader(type, size) {
|
|
690
|
+
const bytes = [];
|
|
691
|
+
// First byte: type (bits 4-6) and size (bits 0-3)
|
|
692
|
+
let byte = ((type & 0x7) << 4) | (size & 0x0f);
|
|
693
|
+
size >>= 4;
|
|
694
|
+
while (size > 0) {
|
|
695
|
+
bytes.push(byte | 0x80); // Set MSB to indicate more bytes
|
|
696
|
+
byte = size & 0x7f;
|
|
697
|
+
size >>= 7;
|
|
698
|
+
}
|
|
699
|
+
bytes.push(byte);
|
|
700
|
+
return new Uint8Array(bytes);
|
|
701
|
+
}
|
|
702
|
+
/**
|
|
703
|
+
* Build complete packfile from objects
|
|
704
|
+
*/
|
|
705
|
+
async function buildPackfile(objects, _onProgress, _clientHasObjects) {
|
|
706
|
+
const parts = [];
|
|
707
|
+
// Header
|
|
708
|
+
parts.push(createPackfileHeader(objects.length));
|
|
709
|
+
// Objects
|
|
710
|
+
for (let i = 0; i < objects.length; i++) {
|
|
711
|
+
const obj = objects[i];
|
|
712
|
+
const typeNum = OBJECT_TYPE_MAP[obj.type];
|
|
713
|
+
// Compress data using zlib
|
|
714
|
+
const compressed = pako.deflate(obj.data);
|
|
715
|
+
// Object header
|
|
716
|
+
const header = encodePackfileObjectHeader(typeNum, obj.data.length);
|
|
717
|
+
parts.push(header);
|
|
718
|
+
parts.push(compressed);
|
|
719
|
+
}
|
|
720
|
+
// Concatenate all parts (without checksum yet)
|
|
721
|
+
let totalLength = 0;
|
|
722
|
+
for (const part of parts) {
|
|
723
|
+
totalLength += part.length;
|
|
724
|
+
}
|
|
725
|
+
const packData = new Uint8Array(totalLength);
|
|
726
|
+
let offset = 0;
|
|
727
|
+
for (const part of parts) {
|
|
728
|
+
packData.set(part, offset);
|
|
729
|
+
offset += part.length;
|
|
730
|
+
}
|
|
731
|
+
// Calculate SHA-1 checksum of pack data
|
|
732
|
+
const checksum = await sha1(packData);
|
|
733
|
+
// Final packfile with checksum
|
|
734
|
+
const result = new Uint8Array(packData.length + 20);
|
|
735
|
+
result.set(packData);
|
|
736
|
+
result.set(checksum, packData.length);
|
|
737
|
+
return result;
|
|
738
|
+
}
|
|
739
|
+
/**
|
|
740
|
+
* Calculate SHA-1 hash using Web Crypto API
|
|
741
|
+
*/
|
|
742
|
+
async function sha1(data) {
|
|
743
|
+
const hashBuffer = await crypto.subtle.digest('SHA-1', data);
|
|
744
|
+
return new Uint8Array(hashBuffer);
|
|
745
|
+
}
|
|
746
|
+
// ============================================================================
|
|
747
|
+
// Full Fetch Handler
|
|
748
|
+
// ============================================================================
|
|
749
|
+
/**
|
|
750
|
+
* Handle a complete fetch request
|
|
751
|
+
*
|
|
752
|
+
* This is the main entry point that handles the full protocol flow:
|
|
753
|
+
* 1. Parse client request (wants, haves, capabilities)
|
|
754
|
+
* 2. Negotiate common ancestors
|
|
755
|
+
* 3. Generate and send packfile
|
|
756
|
+
*
|
|
757
|
+
* @param session - Upload pack session
|
|
758
|
+
* @param request - Raw request data
|
|
759
|
+
* @param store - Object store
|
|
760
|
+
* @returns Response data (ACKs/NAKs + packfile)
|
|
761
|
+
*/
|
|
762
|
+
export async function handleFetch(session, request, store) {
|
|
763
|
+
const lines = request.split('\n').filter(l => l.trim() && l !== '0000');
|
|
764
|
+
const parts = [];
|
|
765
|
+
const wants = [];
|
|
766
|
+
const haves = [];
|
|
767
|
+
const shallowLines = [];
|
|
768
|
+
let depth;
|
|
769
|
+
let done = false;
|
|
770
|
+
let sideBand = false;
|
|
771
|
+
// Parse request
|
|
772
|
+
for (const line of lines) {
|
|
773
|
+
const trimmed = line.trim();
|
|
774
|
+
if (trimmed.startsWith('want ')) {
|
|
775
|
+
const parsed = parseWantLine(trimmed);
|
|
776
|
+
wants.push(parsed.sha);
|
|
777
|
+
// First want line contains capabilities
|
|
778
|
+
if (wants.length === 1) {
|
|
779
|
+
session.capabilities = { ...session.capabilities, ...parsed.capabilities };
|
|
780
|
+
sideBand = parsed.capabilities.sideBand64k || false;
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
else if (trimmed.startsWith('have ')) {
|
|
784
|
+
const sha = parseHaveLine(trimmed);
|
|
785
|
+
haves.push(sha);
|
|
786
|
+
}
|
|
787
|
+
else if (trimmed.startsWith('shallow ')) {
|
|
788
|
+
shallowLines.push(trimmed);
|
|
789
|
+
}
|
|
790
|
+
else if (trimmed.startsWith('deepen ')) {
|
|
791
|
+
depth = parseInt(trimmed.slice(7), 10);
|
|
792
|
+
}
|
|
793
|
+
else if (trimmed === 'done') {
|
|
794
|
+
done = true;
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
// Process wants
|
|
798
|
+
await processWants(session, wants, store);
|
|
799
|
+
// Process shallow if present
|
|
800
|
+
if (shallowLines.length > 0 || depth !== undefined) {
|
|
801
|
+
const shallowInfo = await processShallow(session, shallowLines, depth, undefined, undefined, store);
|
|
802
|
+
const shallowResponse = formatShallowResponse(shallowInfo);
|
|
803
|
+
if (shallowResponse) {
|
|
804
|
+
parts.push(encoder.encode(shallowResponse));
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
// Process haves
|
|
808
|
+
const negotiation = await processHaves(session, haves, store, done);
|
|
809
|
+
// Generate ACK/NAK response
|
|
810
|
+
if (negotiation.nak) {
|
|
811
|
+
parts.push(encoder.encode(formatNak()));
|
|
812
|
+
}
|
|
813
|
+
else {
|
|
814
|
+
for (const ack of negotiation.acks) {
|
|
815
|
+
parts.push(encoder.encode(formatAck(ack.sha, ack.status)));
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
// Generate packfile if ready
|
|
819
|
+
if (negotiation.ready || done) {
|
|
820
|
+
const packResult = await generatePackfile(store, session.wants, session.commonAncestors, {
|
|
821
|
+
onProgress: sideBand ? undefined : undefined,
|
|
822
|
+
thinPack: session.capabilities.thinPack,
|
|
823
|
+
clientHasObjects: session.commonAncestors
|
|
824
|
+
});
|
|
825
|
+
// Add packfile data
|
|
826
|
+
if (sideBand) {
|
|
827
|
+
// Wrap in side-band format
|
|
828
|
+
const wrapped = wrapSideBand(SideBandChannel.PACK_DATA, packResult.packfile);
|
|
829
|
+
parts.push(wrapped);
|
|
830
|
+
// Add flush
|
|
831
|
+
parts.push(encoder.encode(FLUSH_PKT));
|
|
832
|
+
}
|
|
833
|
+
else {
|
|
834
|
+
parts.push(packResult.packfile);
|
|
835
|
+
}
|
|
836
|
+
}
|
|
837
|
+
// Concatenate all parts
|
|
838
|
+
let totalLength = 0;
|
|
839
|
+
for (const part of parts) {
|
|
840
|
+
totalLength += part.length;
|
|
841
|
+
}
|
|
842
|
+
const result = new Uint8Array(totalLength);
|
|
843
|
+
let offset = 0;
|
|
844
|
+
for (const part of parts) {
|
|
845
|
+
result.set(part, offset);
|
|
846
|
+
offset += part.length;
|
|
847
|
+
}
|
|
848
|
+
return result;
|
|
849
|
+
}
|
|
850
|
+
//# sourceMappingURL=upload-pack.js.map
|