@pierre/storage 0.2.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +37 -0
- package/dist/index.cjs +583 -311
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +47 -2
- package/dist/index.d.ts +47 -2
- package/dist/index.js +583 -311
- package/dist/index.js.map +1 -1
- package/package.json +38 -39
- package/src/commit-pack.ts +128 -0
- package/src/commit.ts +18 -362
- package/src/diff-commit.ts +300 -0
- package/src/index.ts +72 -1
- package/src/schemas.ts +8 -0
- package/src/stream-utils.ts +255 -0
- package/src/types.ts +36 -1
package/dist/index.js
CHANGED
|
@@ -104,6 +104,12 @@ var commitDiffResponseSchema = z.object({
|
|
|
104
104
|
files: z.array(diffFileRawSchema),
|
|
105
105
|
filtered_files: z.array(filteredFileRawSchema)
|
|
106
106
|
});
|
|
107
|
+
var createBranchResponseSchema = z.object({
|
|
108
|
+
message: z.string(),
|
|
109
|
+
target_branch: z.string(),
|
|
110
|
+
target_is_ephemeral: z.boolean(),
|
|
111
|
+
commit_sha: z.string().nullable().optional()
|
|
112
|
+
});
|
|
107
113
|
var refUpdateResultSchema = z.object({
|
|
108
114
|
branch: z.string(),
|
|
109
115
|
old_sha: z.string(),
|
|
@@ -148,11 +154,298 @@ var errorEnvelopeSchema = z.object({
|
|
|
148
154
|
error: z.string()
|
|
149
155
|
});
|
|
150
156
|
|
|
157
|
+
// src/commit-pack.ts
|
|
158
|
+
function buildCommitResult(ack) {
|
|
159
|
+
const refUpdate = toRefUpdate(ack.result);
|
|
160
|
+
if (!ack.result.success) {
|
|
161
|
+
throw new RefUpdateError(
|
|
162
|
+
ack.result.message ?? `Commit failed with status ${ack.result.status}`,
|
|
163
|
+
{
|
|
164
|
+
status: ack.result.status,
|
|
165
|
+
message: ack.result.message,
|
|
166
|
+
refUpdate
|
|
167
|
+
}
|
|
168
|
+
);
|
|
169
|
+
}
|
|
170
|
+
return {
|
|
171
|
+
commitSha: ack.commit.commit_sha,
|
|
172
|
+
treeSha: ack.commit.tree_sha,
|
|
173
|
+
targetBranch: ack.commit.target_branch,
|
|
174
|
+
packBytes: ack.commit.pack_bytes,
|
|
175
|
+
blobCount: ack.commit.blob_count,
|
|
176
|
+
refUpdate
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
function toRefUpdate(result) {
|
|
180
|
+
return {
|
|
181
|
+
branch: result.branch,
|
|
182
|
+
oldSha: result.old_sha,
|
|
183
|
+
newSha: result.new_sha
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
async function parseCommitPackError(response, fallbackMessage) {
|
|
187
|
+
const cloned = response.clone();
|
|
188
|
+
let jsonBody;
|
|
189
|
+
try {
|
|
190
|
+
jsonBody = await cloned.json();
|
|
191
|
+
} catch {
|
|
192
|
+
jsonBody = void 0;
|
|
193
|
+
}
|
|
194
|
+
let textBody;
|
|
195
|
+
if (jsonBody === void 0) {
|
|
196
|
+
try {
|
|
197
|
+
textBody = await response.text();
|
|
198
|
+
} catch {
|
|
199
|
+
textBody = void 0;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
const defaultStatus = (() => {
|
|
203
|
+
const inferred = inferRefUpdateReason(String(response.status));
|
|
204
|
+
return inferred === "unknown" ? "failed" : inferred;
|
|
205
|
+
})();
|
|
206
|
+
let statusLabel = defaultStatus;
|
|
207
|
+
let refUpdate;
|
|
208
|
+
let message;
|
|
209
|
+
if (jsonBody !== void 0) {
|
|
210
|
+
const parsedResponse = commitPackResponseSchema.safeParse(jsonBody);
|
|
211
|
+
if (parsedResponse.success) {
|
|
212
|
+
const result = parsedResponse.data.result;
|
|
213
|
+
if (typeof result.status === "string" && result.status.trim() !== "") {
|
|
214
|
+
statusLabel = result.status.trim();
|
|
215
|
+
}
|
|
216
|
+
refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha);
|
|
217
|
+
if (typeof result.message === "string" && result.message.trim() !== "") {
|
|
218
|
+
message = result.message.trim();
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
if (!message) {
|
|
222
|
+
const parsedError = errorEnvelopeSchema.safeParse(jsonBody);
|
|
223
|
+
if (parsedError.success) {
|
|
224
|
+
const trimmed = parsedError.data.error.trim();
|
|
225
|
+
if (trimmed) {
|
|
226
|
+
message = trimmed;
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
if (!message && typeof jsonBody === "string" && jsonBody.trim() !== "") {
|
|
232
|
+
message = jsonBody.trim();
|
|
233
|
+
}
|
|
234
|
+
if (!message && textBody && textBody.trim() !== "") {
|
|
235
|
+
message = textBody.trim();
|
|
236
|
+
}
|
|
237
|
+
return {
|
|
238
|
+
statusMessage: message ?? fallbackMessage,
|
|
239
|
+
statusLabel,
|
|
240
|
+
refUpdate
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
function toPartialRefUpdateFields(branch, oldSha, newSha) {
|
|
244
|
+
const refUpdate = {};
|
|
245
|
+
if (typeof branch === "string" && branch.trim() !== "") {
|
|
246
|
+
refUpdate.branch = branch.trim();
|
|
247
|
+
}
|
|
248
|
+
if (typeof oldSha === "string" && oldSha.trim() !== "") {
|
|
249
|
+
refUpdate.oldSha = oldSha.trim();
|
|
250
|
+
}
|
|
251
|
+
if (typeof newSha === "string" && newSha.trim() !== "") {
|
|
252
|
+
refUpdate.newSha = newSha.trim();
|
|
253
|
+
}
|
|
254
|
+
return Object.keys(refUpdate).length > 0 ? refUpdate : void 0;
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// src/stream-utils.ts
|
|
258
|
+
var BufferCtor = globalThis.Buffer;
|
|
259
|
+
var MAX_CHUNK_BYTES = 4 * 1024 * 1024;
|
|
260
|
+
async function* chunkify(source) {
|
|
261
|
+
let pending = null;
|
|
262
|
+
let produced = false;
|
|
263
|
+
for await (const value of source) {
|
|
264
|
+
const bytes = value;
|
|
265
|
+
if (pending && pending.byteLength === MAX_CHUNK_BYTES) {
|
|
266
|
+
yield { chunk: pending, eof: false };
|
|
267
|
+
produced = true;
|
|
268
|
+
pending = null;
|
|
269
|
+
}
|
|
270
|
+
const merged = pending ? concatChunks(pending, bytes) : bytes;
|
|
271
|
+
pending = null;
|
|
272
|
+
let cursor = merged;
|
|
273
|
+
while (cursor.byteLength > MAX_CHUNK_BYTES) {
|
|
274
|
+
const chunk = cursor.slice(0, MAX_CHUNK_BYTES);
|
|
275
|
+
cursor = cursor.slice(MAX_CHUNK_BYTES);
|
|
276
|
+
yield { chunk, eof: false };
|
|
277
|
+
produced = true;
|
|
278
|
+
}
|
|
279
|
+
pending = cursor;
|
|
280
|
+
}
|
|
281
|
+
if (pending) {
|
|
282
|
+
yield { chunk: pending, eof: true };
|
|
283
|
+
produced = true;
|
|
284
|
+
}
|
|
285
|
+
if (!produced) {
|
|
286
|
+
yield { chunk: new Uint8Array(0), eof: true };
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
async function* toAsyncIterable(source) {
|
|
290
|
+
if (typeof source === "string") {
|
|
291
|
+
yield new TextEncoder().encode(source);
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
if (source instanceof Uint8Array) {
|
|
295
|
+
yield source;
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
if (source instanceof ArrayBuffer) {
|
|
299
|
+
yield new Uint8Array(source);
|
|
300
|
+
return;
|
|
301
|
+
}
|
|
302
|
+
if (ArrayBuffer.isView(source)) {
|
|
303
|
+
yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
|
|
304
|
+
return;
|
|
305
|
+
}
|
|
306
|
+
if (isBlobLike(source)) {
|
|
307
|
+
const stream = source.stream();
|
|
308
|
+
if (isAsyncIterable(stream)) {
|
|
309
|
+
for await (const chunk of stream) {
|
|
310
|
+
yield ensureUint8Array(chunk);
|
|
311
|
+
}
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
if (isReadableStreamLike(stream)) {
|
|
315
|
+
yield* readReadableStream(stream);
|
|
316
|
+
return;
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
if (isReadableStreamLike(source)) {
|
|
320
|
+
yield* readReadableStream(source);
|
|
321
|
+
return;
|
|
322
|
+
}
|
|
323
|
+
if (isAsyncIterable(source)) {
|
|
324
|
+
for await (const chunk of source) {
|
|
325
|
+
yield ensureUint8Array(chunk);
|
|
326
|
+
}
|
|
327
|
+
return;
|
|
328
|
+
}
|
|
329
|
+
if (isIterable(source)) {
|
|
330
|
+
for (const chunk of source) {
|
|
331
|
+
yield ensureUint8Array(chunk);
|
|
332
|
+
}
|
|
333
|
+
return;
|
|
334
|
+
}
|
|
335
|
+
throw new Error("Unsupported content source; expected binary data");
|
|
336
|
+
}
|
|
337
|
+
function base64Encode(bytes) {
|
|
338
|
+
if (BufferCtor) {
|
|
339
|
+
return BufferCtor.from(bytes).toString("base64");
|
|
340
|
+
}
|
|
341
|
+
let binary = "";
|
|
342
|
+
for (let i = 0; i < bytes.byteLength; i++) {
|
|
343
|
+
binary += String.fromCharCode(bytes[i]);
|
|
344
|
+
}
|
|
345
|
+
const btoaFn = globalThis.btoa;
|
|
346
|
+
if (typeof btoaFn === "function") {
|
|
347
|
+
return btoaFn(binary);
|
|
348
|
+
}
|
|
349
|
+
throw new Error("Base64 encoding is not supported in this environment");
|
|
350
|
+
}
|
|
351
|
+
function requiresDuplex(body) {
|
|
352
|
+
if (!body || typeof body !== "object") {
|
|
353
|
+
return false;
|
|
354
|
+
}
|
|
355
|
+
if (typeof body[Symbol.asyncIterator] === "function") {
|
|
356
|
+
return true;
|
|
357
|
+
}
|
|
358
|
+
const readableStreamCtor = globalThis.ReadableStream;
|
|
359
|
+
if (readableStreamCtor && body instanceof readableStreamCtor) {
|
|
360
|
+
return true;
|
|
361
|
+
}
|
|
362
|
+
return false;
|
|
363
|
+
}
|
|
364
|
+
function toRequestBody(iterable) {
|
|
365
|
+
const readableStreamCtor = globalThis.ReadableStream;
|
|
366
|
+
if (typeof readableStreamCtor === "function") {
|
|
367
|
+
const iterator = iterable[Symbol.asyncIterator]();
|
|
368
|
+
return new readableStreamCtor({
|
|
369
|
+
async pull(controller) {
|
|
370
|
+
const { value, done } = await iterator.next();
|
|
371
|
+
if (done) {
|
|
372
|
+
controller.close();
|
|
373
|
+
return;
|
|
374
|
+
}
|
|
375
|
+
controller.enqueue(value);
|
|
376
|
+
},
|
|
377
|
+
async cancel(reason) {
|
|
378
|
+
if (typeof iterator.return === "function") {
|
|
379
|
+
await iterator.return(reason);
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
});
|
|
383
|
+
}
|
|
384
|
+
return iterable;
|
|
385
|
+
}
|
|
386
|
+
async function* readReadableStream(stream) {
|
|
387
|
+
const reader = stream.getReader();
|
|
388
|
+
try {
|
|
389
|
+
while (true) {
|
|
390
|
+
const { value, done } = await reader.read();
|
|
391
|
+
if (done) {
|
|
392
|
+
break;
|
|
393
|
+
}
|
|
394
|
+
if (value !== void 0) {
|
|
395
|
+
yield ensureUint8Array(value);
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
} finally {
|
|
399
|
+
reader.releaseLock?.();
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
function ensureUint8Array(value) {
|
|
403
|
+
if (value instanceof Uint8Array) {
|
|
404
|
+
return value;
|
|
405
|
+
}
|
|
406
|
+
if (value instanceof ArrayBuffer) {
|
|
407
|
+
return new Uint8Array(value);
|
|
408
|
+
}
|
|
409
|
+
if (ArrayBuffer.isView(value)) {
|
|
410
|
+
return new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
|
|
411
|
+
}
|
|
412
|
+
if (typeof value === "string") {
|
|
413
|
+
return new TextEncoder().encode(value);
|
|
414
|
+
}
|
|
415
|
+
if (BufferCtor && BufferCtor.isBuffer(value)) {
|
|
416
|
+
return value;
|
|
417
|
+
}
|
|
418
|
+
throw new Error("Unsupported chunk type; expected binary data");
|
|
419
|
+
}
|
|
420
|
+
function isBlobLike(value) {
|
|
421
|
+
return typeof value === "object" && value !== null && typeof value.stream === "function";
|
|
422
|
+
}
|
|
423
|
+
function isReadableStreamLike(value) {
|
|
424
|
+
return typeof value === "object" && value !== null && typeof value.getReader === "function";
|
|
425
|
+
}
|
|
426
|
+
function isAsyncIterable(value) {
|
|
427
|
+
return typeof value === "object" && value !== null && Symbol.asyncIterator in value;
|
|
428
|
+
}
|
|
429
|
+
function isIterable(value) {
|
|
430
|
+
return typeof value === "object" && value !== null && Symbol.iterator in value;
|
|
431
|
+
}
|
|
432
|
+
function concatChunks(a, b) {
|
|
433
|
+
if (a.byteLength === 0) {
|
|
434
|
+
return b;
|
|
435
|
+
}
|
|
436
|
+
if (b.byteLength === 0) {
|
|
437
|
+
return a;
|
|
438
|
+
}
|
|
439
|
+
const merged = new Uint8Array(a.byteLength + b.byteLength);
|
|
440
|
+
merged.set(a, 0);
|
|
441
|
+
merged.set(b, a.byteLength);
|
|
442
|
+
return merged;
|
|
443
|
+
}
|
|
444
|
+
|
|
151
445
|
// src/commit.ts
|
|
152
|
-
var MAX_CHUNK_BYTES = 4 * 1024 * 1024;
|
|
153
446
|
var DEFAULT_TTL_SECONDS = 60 * 60;
|
|
154
447
|
var HEADS_REF_PREFIX = "refs/heads/";
|
|
155
|
-
var
|
|
448
|
+
var BufferCtor2 = globalThis.Buffer;
|
|
156
449
|
var CommitBuilderImpl = class {
|
|
157
450
|
options;
|
|
158
451
|
getAuthToken;
|
|
@@ -215,8 +508,8 @@ var CommitBuilderImpl = class {
|
|
|
215
508
|
let data;
|
|
216
509
|
if (normalizedEncoding === "utf8") {
|
|
217
510
|
data = new TextEncoder().encode(contents);
|
|
218
|
-
} else if (
|
|
219
|
-
data =
|
|
511
|
+
} else if (BufferCtor2) {
|
|
512
|
+
data = BufferCtor2.from(
|
|
220
513
|
contents,
|
|
221
514
|
normalizedEncoding
|
|
222
515
|
);
|
|
@@ -324,257 +617,49 @@ var FetchCommitTransport = class {
|
|
|
324
617
|
Accept: "application/json"
|
|
325
618
|
},
|
|
326
619
|
body,
|
|
327
|
-
signal: request.signal
|
|
328
|
-
};
|
|
329
|
-
if (requiresDuplex(body)) {
|
|
330
|
-
init.duplex = "half";
|
|
331
|
-
}
|
|
332
|
-
const response = await fetch(this.url, init);
|
|
333
|
-
if (!response.ok) {
|
|
334
|
-
const
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
}
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
if (typeof readableStreamCtor === "function") {
|
|
348
|
-
const iterator = iterable[Symbol.asyncIterator]();
|
|
349
|
-
return new readableStreamCtor({
|
|
350
|
-
async pull(controller) {
|
|
351
|
-
const { value, done } = await iterator.next();
|
|
352
|
-
if (done) {
|
|
353
|
-
controller.close();
|
|
354
|
-
return;
|
|
355
|
-
}
|
|
356
|
-
controller.enqueue(value);
|
|
357
|
-
},
|
|
358
|
-
async cancel(reason) {
|
|
359
|
-
if (typeof iterator.return === "function") {
|
|
360
|
-
await iterator.return(reason);
|
|
361
|
-
}
|
|
362
|
-
}
|
|
363
|
-
});
|
|
364
|
-
}
|
|
365
|
-
return iterable;
|
|
366
|
-
}
|
|
367
|
-
function buildMessageIterable(metadata, blobs) {
|
|
368
|
-
const encoder = new TextEncoder();
|
|
369
|
-
return {
|
|
370
|
-
async *[Symbol.asyncIterator]() {
|
|
371
|
-
yield encoder.encode(`${JSON.stringify({ metadata })}
|
|
372
|
-
`);
|
|
373
|
-
for (const blob of blobs) {
|
|
374
|
-
for await (const segment of blob.chunks) {
|
|
375
|
-
const payload = {
|
|
376
|
-
blob_chunk: {
|
|
377
|
-
content_id: blob.contentId,
|
|
378
|
-
data: base64Encode(segment.chunk),
|
|
379
|
-
eof: segment.eof
|
|
380
|
-
}
|
|
381
|
-
};
|
|
382
|
-
yield encoder.encode(`${JSON.stringify(payload)}
|
|
383
|
-
`);
|
|
384
|
-
}
|
|
385
|
-
}
|
|
386
|
-
}
|
|
387
|
-
};
|
|
388
|
-
}
|
|
389
|
-
function requiresDuplex(body) {
|
|
390
|
-
if (!body || typeof body !== "object") {
|
|
391
|
-
return false;
|
|
392
|
-
}
|
|
393
|
-
if (typeof body[Symbol.asyncIterator] === "function") {
|
|
394
|
-
return true;
|
|
395
|
-
}
|
|
396
|
-
const readableStreamCtor = globalThis.ReadableStream;
|
|
397
|
-
if (readableStreamCtor && body instanceof readableStreamCtor) {
|
|
398
|
-
return true;
|
|
399
|
-
}
|
|
400
|
-
return false;
|
|
401
|
-
}
|
|
402
|
-
function buildCommitResult(ack) {
|
|
403
|
-
const refUpdate = toRefUpdate(ack.result);
|
|
404
|
-
if (!ack.result.success) {
|
|
405
|
-
throw new RefUpdateError(
|
|
406
|
-
ack.result.message ?? `Commit failed with status ${ack.result.status}`,
|
|
407
|
-
{
|
|
408
|
-
status: ack.result.status,
|
|
409
|
-
message: ack.result.message,
|
|
410
|
-
refUpdate
|
|
411
|
-
}
|
|
412
|
-
);
|
|
413
|
-
}
|
|
414
|
-
return {
|
|
415
|
-
commitSha: ack.commit.commit_sha,
|
|
416
|
-
treeSha: ack.commit.tree_sha,
|
|
417
|
-
targetBranch: ack.commit.target_branch,
|
|
418
|
-
packBytes: ack.commit.pack_bytes,
|
|
419
|
-
blobCount: ack.commit.blob_count,
|
|
420
|
-
refUpdate
|
|
421
|
-
};
|
|
422
|
-
}
|
|
423
|
-
function toRefUpdate(result) {
|
|
424
|
-
return {
|
|
425
|
-
branch: result.branch,
|
|
426
|
-
oldSha: result.old_sha,
|
|
427
|
-
newSha: result.new_sha
|
|
428
|
-
};
|
|
429
|
-
}
|
|
430
|
-
async function* chunkify(source) {
|
|
431
|
-
let pending = null;
|
|
432
|
-
let produced = false;
|
|
433
|
-
for await (const value of source) {
|
|
434
|
-
const bytes = value;
|
|
435
|
-
if (pending && pending.byteLength === MAX_CHUNK_BYTES) {
|
|
436
|
-
yield { chunk: pending, eof: false };
|
|
437
|
-
produced = true;
|
|
438
|
-
pending = null;
|
|
439
|
-
}
|
|
440
|
-
const merged = pending ? concatChunks(pending, bytes) : bytes;
|
|
441
|
-
pending = null;
|
|
442
|
-
let cursor = merged;
|
|
443
|
-
while (cursor.byteLength > MAX_CHUNK_BYTES) {
|
|
444
|
-
const chunk = cursor.slice(0, MAX_CHUNK_BYTES);
|
|
445
|
-
cursor = cursor.slice(MAX_CHUNK_BYTES);
|
|
446
|
-
yield { chunk, eof: false };
|
|
447
|
-
produced = true;
|
|
448
|
-
}
|
|
449
|
-
pending = cursor;
|
|
450
|
-
}
|
|
451
|
-
if (pending) {
|
|
452
|
-
yield { chunk: pending, eof: true };
|
|
453
|
-
produced = true;
|
|
454
|
-
}
|
|
455
|
-
if (!produced) {
|
|
456
|
-
yield { chunk: new Uint8Array(0), eof: true };
|
|
457
|
-
}
|
|
458
|
-
}
|
|
459
|
-
async function* toAsyncIterable(source) {
|
|
460
|
-
if (typeof source === "string") {
|
|
461
|
-
yield new TextEncoder().encode(source);
|
|
462
|
-
return;
|
|
463
|
-
}
|
|
464
|
-
if (source instanceof Uint8Array) {
|
|
465
|
-
yield source;
|
|
466
|
-
return;
|
|
467
|
-
}
|
|
468
|
-
if (source instanceof ArrayBuffer) {
|
|
469
|
-
yield new Uint8Array(source);
|
|
470
|
-
return;
|
|
471
|
-
}
|
|
472
|
-
if (ArrayBuffer.isView(source)) {
|
|
473
|
-
yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
|
|
474
|
-
return;
|
|
475
|
-
}
|
|
476
|
-
if (isBlobLike(source)) {
|
|
477
|
-
const stream = source.stream();
|
|
478
|
-
if (isAsyncIterable(stream)) {
|
|
479
|
-
for await (const chunk of stream) {
|
|
480
|
-
yield ensureUint8Array(chunk);
|
|
481
|
-
}
|
|
482
|
-
return;
|
|
483
|
-
}
|
|
484
|
-
if (isReadableStreamLike(stream)) {
|
|
485
|
-
yield* readReadableStream(stream);
|
|
486
|
-
return;
|
|
487
|
-
}
|
|
488
|
-
}
|
|
489
|
-
if (isReadableStreamLike(source)) {
|
|
490
|
-
yield* readReadableStream(source);
|
|
491
|
-
return;
|
|
492
|
-
}
|
|
493
|
-
if (isAsyncIterable(source)) {
|
|
494
|
-
for await (const chunk of source) {
|
|
495
|
-
yield ensureUint8Array(chunk);
|
|
496
|
-
}
|
|
497
|
-
return;
|
|
498
|
-
}
|
|
499
|
-
if (isIterable(source)) {
|
|
500
|
-
for (const chunk of source) {
|
|
501
|
-
yield ensureUint8Array(chunk);
|
|
502
|
-
}
|
|
503
|
-
return;
|
|
504
|
-
}
|
|
505
|
-
throw new Error("Unsupported file source for createCommit");
|
|
506
|
-
}
|
|
507
|
-
async function* readReadableStream(stream) {
|
|
508
|
-
const reader = stream.getReader();
|
|
509
|
-
try {
|
|
510
|
-
while (true) {
|
|
511
|
-
const { value, done } = await reader.read();
|
|
512
|
-
if (done) {
|
|
513
|
-
break;
|
|
514
|
-
}
|
|
515
|
-
if (value !== void 0) {
|
|
516
|
-
yield ensureUint8Array(value);
|
|
517
|
-
}
|
|
518
|
-
}
|
|
519
|
-
} finally {
|
|
520
|
-
reader.releaseLock?.();
|
|
521
|
-
}
|
|
522
|
-
}
|
|
523
|
-
function ensureUint8Array(value) {
|
|
524
|
-
if (value instanceof Uint8Array) {
|
|
525
|
-
return value;
|
|
526
|
-
}
|
|
527
|
-
if (value instanceof ArrayBuffer) {
|
|
528
|
-
return new Uint8Array(value);
|
|
529
|
-
}
|
|
530
|
-
if (ArrayBuffer.isView(value)) {
|
|
531
|
-
return new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
|
|
532
|
-
}
|
|
533
|
-
if (typeof value === "string") {
|
|
534
|
-
return new TextEncoder().encode(value);
|
|
535
|
-
}
|
|
536
|
-
if (BufferCtor && BufferCtor.isBuffer(value)) {
|
|
537
|
-
return value;
|
|
538
|
-
}
|
|
539
|
-
throw new Error("Unsupported chunk type; expected binary data");
|
|
540
|
-
}
|
|
541
|
-
function isBlobLike(value) {
|
|
542
|
-
return typeof value === "object" && value !== null && typeof value.stream === "function";
|
|
543
|
-
}
|
|
544
|
-
function isReadableStreamLike(value) {
|
|
545
|
-
return typeof value === "object" && value !== null && typeof value.getReader === "function";
|
|
546
|
-
}
|
|
547
|
-
function isAsyncIterable(value) {
|
|
548
|
-
return typeof value === "object" && value !== null && Symbol.asyncIterator in value;
|
|
549
|
-
}
|
|
550
|
-
function isIterable(value) {
|
|
551
|
-
return typeof value === "object" && value !== null && Symbol.iterator in value;
|
|
552
|
-
}
|
|
553
|
-
function concatChunks(a, b) {
|
|
554
|
-
if (a.byteLength === 0) {
|
|
555
|
-
return b;
|
|
556
|
-
}
|
|
557
|
-
if (b.byteLength === 0) {
|
|
558
|
-
return a;
|
|
559
|
-
}
|
|
560
|
-
const merged = new Uint8Array(a.byteLength + b.byteLength);
|
|
561
|
-
merged.set(a, 0);
|
|
562
|
-
merged.set(b, a.byteLength);
|
|
563
|
-
return merged;
|
|
564
|
-
}
|
|
565
|
-
function base64Encode(bytes) {
|
|
566
|
-
if (BufferCtor) {
|
|
567
|
-
return BufferCtor.from(bytes).toString("base64");
|
|
568
|
-
}
|
|
569
|
-
let binary = "";
|
|
570
|
-
for (let i = 0; i < bytes.byteLength; i++) {
|
|
571
|
-
binary += String.fromCharCode(bytes[i]);
|
|
572
|
-
}
|
|
573
|
-
const btoaFn = globalThis.btoa;
|
|
574
|
-
if (typeof btoaFn === "function") {
|
|
575
|
-
return btoaFn(binary);
|
|
620
|
+
signal: request.signal
|
|
621
|
+
};
|
|
622
|
+
if (requiresDuplex(body)) {
|
|
623
|
+
init.duplex = "half";
|
|
624
|
+
}
|
|
625
|
+
const response = await fetch(this.url, init);
|
|
626
|
+
if (!response.ok) {
|
|
627
|
+
const fallbackMessage = `createCommit request failed (${response.status} ${response.statusText})`;
|
|
628
|
+
const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError(
|
|
629
|
+
response,
|
|
630
|
+
fallbackMessage
|
|
631
|
+
);
|
|
632
|
+
throw new RefUpdateError(statusMessage, {
|
|
633
|
+
status: statusLabel,
|
|
634
|
+
message: statusMessage,
|
|
635
|
+
refUpdate
|
|
636
|
+
});
|
|
637
|
+
}
|
|
638
|
+
const ack = commitPackAckSchema.parse(await response.json());
|
|
639
|
+
return ack;
|
|
576
640
|
}
|
|
577
|
-
|
|
641
|
+
};
|
|
642
|
+
function buildMessageIterable(metadata, blobs) {
|
|
643
|
+
const encoder = new TextEncoder();
|
|
644
|
+
return {
|
|
645
|
+
async *[Symbol.asyncIterator]() {
|
|
646
|
+
yield encoder.encode(`${JSON.stringify({ metadata })}
|
|
647
|
+
`);
|
|
648
|
+
for (const blob of blobs) {
|
|
649
|
+
for await (const segment of blob.chunks) {
|
|
650
|
+
const payload = {
|
|
651
|
+
blob_chunk: {
|
|
652
|
+
content_id: blob.contentId,
|
|
653
|
+
data: base64Encode(segment.chunk),
|
|
654
|
+
eof: segment.eof
|
|
655
|
+
}
|
|
656
|
+
};
|
|
657
|
+
yield encoder.encode(`${JSON.stringify(payload)}
|
|
658
|
+
`);
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
};
|
|
578
663
|
}
|
|
579
664
|
function randomContentId() {
|
|
580
665
|
const cryptoObj = globalThis.crypto;
|
|
@@ -651,76 +736,208 @@ function resolveCommitTtlSeconds(options) {
|
|
|
651
736
|
}
|
|
652
737
|
return DEFAULT_TTL_SECONDS;
|
|
653
738
|
}
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
739
|
+
|
|
740
|
+
// src/diff-commit.ts
|
|
741
|
+
var DiffCommitExecutor = class {
|
|
742
|
+
options;
|
|
743
|
+
getAuthToken;
|
|
744
|
+
transport;
|
|
745
|
+
diffFactory;
|
|
746
|
+
sent = false;
|
|
747
|
+
constructor(deps) {
|
|
748
|
+
this.options = normalizeDiffCommitOptions(deps.options);
|
|
749
|
+
this.getAuthToken = deps.getAuthToken;
|
|
750
|
+
this.transport = deps.transport;
|
|
751
|
+
const trimmedMessage = this.options.commitMessage?.trim();
|
|
752
|
+
const trimmedAuthorName = this.options.author?.name?.trim();
|
|
753
|
+
const trimmedAuthorEmail = this.options.author?.email?.trim();
|
|
754
|
+
if (!trimmedMessage) {
|
|
755
|
+
throw new Error("createCommitFromDiff commitMessage is required");
|
|
669
756
|
}
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
const inferred = inferRefUpdateReason(String(response.status));
|
|
673
|
-
return inferred === "unknown" ? "failed" : inferred;
|
|
674
|
-
})();
|
|
675
|
-
let statusLabel = defaultStatus;
|
|
676
|
-
let refUpdate;
|
|
677
|
-
let message;
|
|
678
|
-
if (jsonBody !== void 0) {
|
|
679
|
-
const parsedResponse = commitPackResponseSchema.safeParse(jsonBody);
|
|
680
|
-
if (parsedResponse.success) {
|
|
681
|
-
const result = parsedResponse.data.result;
|
|
682
|
-
if (typeof result.status === "string" && result.status.trim() !== "") {
|
|
683
|
-
statusLabel = result.status.trim();
|
|
684
|
-
}
|
|
685
|
-
refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha);
|
|
686
|
-
if (typeof result.message === "string" && result.message.trim() !== "") {
|
|
687
|
-
message = result.message.trim();
|
|
688
|
-
}
|
|
757
|
+
if (!trimmedAuthorName || !trimmedAuthorEmail) {
|
|
758
|
+
throw new Error("createCommitFromDiff author name and email are required");
|
|
689
759
|
}
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
760
|
+
this.options.commitMessage = trimmedMessage;
|
|
761
|
+
this.options.author = {
|
|
762
|
+
name: trimmedAuthorName,
|
|
763
|
+
email: trimmedAuthorEmail
|
|
764
|
+
};
|
|
765
|
+
if (typeof this.options.expectedHeadSha === "string") {
|
|
766
|
+
this.options.expectedHeadSha = this.options.expectedHeadSha.trim();
|
|
767
|
+
}
|
|
768
|
+
if (typeof this.options.baseBranch === "string") {
|
|
769
|
+
const trimmedBase = this.options.baseBranch.trim();
|
|
770
|
+
if (trimmedBase === "") {
|
|
771
|
+
delete this.options.baseBranch;
|
|
772
|
+
} else {
|
|
773
|
+
if (trimmedBase.startsWith("refs/")) {
|
|
774
|
+
throw new Error("createCommitFromDiff baseBranch must not include refs/ prefix");
|
|
696
775
|
}
|
|
776
|
+
this.options.baseBranch = trimmedBase;
|
|
697
777
|
}
|
|
698
778
|
}
|
|
779
|
+
if (this.options.ephemeralBase && !this.options.baseBranch) {
|
|
780
|
+
throw new Error("createCommitFromDiff ephemeralBase requires baseBranch");
|
|
781
|
+
}
|
|
782
|
+
this.diffFactory = () => toAsyncIterable(this.options.initialDiff);
|
|
699
783
|
}
|
|
700
|
-
|
|
701
|
-
|
|
784
|
+
async send() {
|
|
785
|
+
this.ensureNotSent();
|
|
786
|
+
this.sent = true;
|
|
787
|
+
const metadata = this.buildMetadata();
|
|
788
|
+
const diffIterable = chunkify(this.diffFactory());
|
|
789
|
+
const authorization = await this.getAuthToken();
|
|
790
|
+
const ack = await this.transport.send({
|
|
791
|
+
authorization,
|
|
792
|
+
signal: this.options.signal,
|
|
793
|
+
metadata,
|
|
794
|
+
diffChunks: diffIterable
|
|
795
|
+
});
|
|
796
|
+
return buildCommitResult(ack);
|
|
702
797
|
}
|
|
703
|
-
|
|
704
|
-
|
|
798
|
+
buildMetadata() {
|
|
799
|
+
const metadata = {
|
|
800
|
+
target_branch: this.options.targetBranch,
|
|
801
|
+
commit_message: this.options.commitMessage,
|
|
802
|
+
author: {
|
|
803
|
+
name: this.options.author.name,
|
|
804
|
+
email: this.options.author.email
|
|
805
|
+
}
|
|
806
|
+
};
|
|
807
|
+
if (this.options.expectedHeadSha) {
|
|
808
|
+
metadata.expected_head_sha = this.options.expectedHeadSha;
|
|
809
|
+
}
|
|
810
|
+
if (this.options.baseBranch) {
|
|
811
|
+
metadata.base_branch = this.options.baseBranch;
|
|
812
|
+
}
|
|
813
|
+
if (this.options.committer) {
|
|
814
|
+
metadata.committer = {
|
|
815
|
+
name: this.options.committer.name,
|
|
816
|
+
email: this.options.committer.email
|
|
817
|
+
};
|
|
818
|
+
}
|
|
819
|
+
if (this.options.ephemeral) {
|
|
820
|
+
metadata.ephemeral = true;
|
|
821
|
+
}
|
|
822
|
+
if (this.options.ephemeralBase) {
|
|
823
|
+
metadata.ephemeral_base = true;
|
|
824
|
+
}
|
|
825
|
+
return metadata;
|
|
826
|
+
}
|
|
827
|
+
ensureNotSent() {
|
|
828
|
+
if (this.sent) {
|
|
829
|
+
throw new Error("createCommitFromDiff cannot be reused after send()");
|
|
830
|
+
}
|
|
831
|
+
}
|
|
832
|
+
};
|
|
833
|
+
var FetchDiffCommitTransport = class {
|
|
834
|
+
url;
|
|
835
|
+
constructor(config) {
|
|
836
|
+
const trimmedBase = config.baseUrl.replace(/\/+$/, "");
|
|
837
|
+
this.url = `${trimmedBase}/api/v${config.version}/repos/diff-commit`;
|
|
838
|
+
}
|
|
839
|
+
async send(request) {
|
|
840
|
+
const bodyIterable = buildMessageIterable2(request.metadata, request.diffChunks);
|
|
841
|
+
const body = toRequestBody(bodyIterable);
|
|
842
|
+
const init = {
|
|
843
|
+
method: "POST",
|
|
844
|
+
headers: {
|
|
845
|
+
Authorization: `Bearer ${request.authorization}`,
|
|
846
|
+
"Content-Type": "application/x-ndjson",
|
|
847
|
+
Accept: "application/json"
|
|
848
|
+
},
|
|
849
|
+
body,
|
|
850
|
+
signal: request.signal
|
|
851
|
+
};
|
|
852
|
+
if (requiresDuplex(body)) {
|
|
853
|
+
init.duplex = "half";
|
|
854
|
+
}
|
|
855
|
+
const response = await fetch(this.url, init);
|
|
856
|
+
if (!response.ok) {
|
|
857
|
+
const fallbackMessage = `createCommitFromDiff request failed (${response.status} ${response.statusText})`;
|
|
858
|
+
const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError(
|
|
859
|
+
response,
|
|
860
|
+
fallbackMessage
|
|
861
|
+
);
|
|
862
|
+
throw new RefUpdateError(statusMessage, {
|
|
863
|
+
status: statusLabel,
|
|
864
|
+
message: statusMessage,
|
|
865
|
+
refUpdate
|
|
866
|
+
});
|
|
867
|
+
}
|
|
868
|
+
return commitPackAckSchema.parse(await response.json());
|
|
705
869
|
}
|
|
870
|
+
};
|
|
871
|
+
function buildMessageIterable2(metadata, diffChunks) {
|
|
872
|
+
const encoder = new TextEncoder();
|
|
706
873
|
return {
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
874
|
+
async *[Symbol.asyncIterator]() {
|
|
875
|
+
yield encoder.encode(`${JSON.stringify({ metadata })}
|
|
876
|
+
`);
|
|
877
|
+
for await (const segment of diffChunks) {
|
|
878
|
+
const payload = {
|
|
879
|
+
diff_chunk: {
|
|
880
|
+
data: base64Encode(segment.chunk),
|
|
881
|
+
eof: segment.eof
|
|
882
|
+
}
|
|
883
|
+
};
|
|
884
|
+
yield encoder.encode(`${JSON.stringify(payload)}
|
|
885
|
+
`);
|
|
886
|
+
}
|
|
887
|
+
}
|
|
710
888
|
};
|
|
711
889
|
}
|
|
712
|
-
function
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
890
|
+
function normalizeDiffCommitOptions(options) {
|
|
891
|
+
if (!options || typeof options !== "object") {
|
|
892
|
+
throw new Error("createCommitFromDiff options are required");
|
|
893
|
+
}
|
|
894
|
+
if (options.diff === void 0 || options.diff === null) {
|
|
895
|
+
throw new Error("createCommitFromDiff diff is required");
|
|
896
|
+
}
|
|
897
|
+
const targetBranch = normalizeBranchName2(options.targetBranch);
|
|
898
|
+
let committer;
|
|
899
|
+
if (options.committer) {
|
|
900
|
+
const name = options.committer.name?.trim();
|
|
901
|
+
const email = options.committer.email?.trim();
|
|
902
|
+
if (!name || !email) {
|
|
903
|
+
throw new Error("createCommitFromDiff committer name and email are required when provided");
|
|
904
|
+
}
|
|
905
|
+
committer = { name, email };
|
|
716
906
|
}
|
|
717
|
-
|
|
718
|
-
|
|
907
|
+
return {
|
|
908
|
+
targetBranch,
|
|
909
|
+
commitMessage: options.commitMessage,
|
|
910
|
+
expectedHeadSha: options.expectedHeadSha,
|
|
911
|
+
baseBranch: options.baseBranch,
|
|
912
|
+
ephemeral: options.ephemeral === true,
|
|
913
|
+
ephemeralBase: options.ephemeralBase === true,
|
|
914
|
+
author: options.author,
|
|
915
|
+
committer,
|
|
916
|
+
signal: options.signal,
|
|
917
|
+
ttl: options.ttl,
|
|
918
|
+
initialDiff: options.diff
|
|
919
|
+
};
|
|
920
|
+
}
|
|
921
|
+
function normalizeBranchName2(value) {
|
|
922
|
+
const trimmed = value?.trim();
|
|
923
|
+
if (!trimmed) {
|
|
924
|
+
throw new Error("createCommitFromDiff targetBranch is required");
|
|
719
925
|
}
|
|
720
|
-
if (
|
|
721
|
-
|
|
926
|
+
if (trimmed.startsWith("refs/heads/")) {
|
|
927
|
+
const branch = trimmed.slice("refs/heads/".length).trim();
|
|
928
|
+
if (!branch) {
|
|
929
|
+
throw new Error("createCommitFromDiff targetBranch must include a branch name");
|
|
930
|
+
}
|
|
931
|
+
return branch;
|
|
722
932
|
}
|
|
723
|
-
|
|
933
|
+
if (trimmed.startsWith("refs/")) {
|
|
934
|
+
throw new Error("createCommitFromDiff targetBranch must not include refs/ prefix");
|
|
935
|
+
}
|
|
936
|
+
return trimmed;
|
|
937
|
+
}
|
|
938
|
+
async function sendCommitFromDiff(deps) {
|
|
939
|
+
const executor = new DiffCommitExecutor(deps);
|
|
940
|
+
return executor.send();
|
|
724
941
|
}
|
|
725
942
|
|
|
726
943
|
// src/fetch.ts
|
|
@@ -1274,6 +1491,14 @@ function transformCommitDiffResult(raw) {
|
|
|
1274
1491
|
filteredFiles: raw.filtered_files.map(transformFilteredFile)
|
|
1275
1492
|
};
|
|
1276
1493
|
}
|
|
1494
|
+
function transformCreateBranchResult(raw) {
|
|
1495
|
+
return {
|
|
1496
|
+
message: raw.message,
|
|
1497
|
+
targetBranch: raw.target_branch,
|
|
1498
|
+
targetIsEphemeral: raw.target_is_ephemeral,
|
|
1499
|
+
commitSha: raw.commit_sha ?? void 0
|
|
1500
|
+
};
|
|
1501
|
+
}
|
|
1277
1502
|
var RepoImpl = class {
|
|
1278
1503
|
constructor(id, options, generateJWT) {
|
|
1279
1504
|
this.id = id;
|
|
@@ -1409,7 +1634,7 @@ var RepoImpl = class {
|
|
|
1409
1634
|
const raw = commitDiffResponseSchema.parse(await response.json());
|
|
1410
1635
|
return transformCommitDiffResult(raw);
|
|
1411
1636
|
}
|
|
1412
|
-
async pullUpstream(options) {
|
|
1637
|
+
async pullUpstream(options = {}) {
|
|
1413
1638
|
const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS);
|
|
1414
1639
|
const jwt = await this.generateJWT(this.id, {
|
|
1415
1640
|
permissions: ["git:write"],
|
|
@@ -1425,6 +1650,34 @@ var RepoImpl = class {
|
|
|
1425
1650
|
}
|
|
1426
1651
|
return;
|
|
1427
1652
|
}
|
|
1653
|
+
async createBranch(options) {
|
|
1654
|
+
const baseBranch = options?.baseBranch?.trim();
|
|
1655
|
+
if (!baseBranch) {
|
|
1656
|
+
throw new Error("createBranch baseBranch is required");
|
|
1657
|
+
}
|
|
1658
|
+
const targetBranch = options?.targetBranch?.trim();
|
|
1659
|
+
if (!targetBranch) {
|
|
1660
|
+
throw new Error("createBranch targetBranch is required");
|
|
1661
|
+
}
|
|
1662
|
+
const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS);
|
|
1663
|
+
const jwt = await this.generateJWT(this.id, {
|
|
1664
|
+
permissions: ["git:write"],
|
|
1665
|
+
ttl
|
|
1666
|
+
});
|
|
1667
|
+
const body = {
|
|
1668
|
+
base_branch: baseBranch,
|
|
1669
|
+
target_branch: targetBranch
|
|
1670
|
+
};
|
|
1671
|
+
if (options.baseIsEphemeral === true) {
|
|
1672
|
+
body.base_is_ephemeral = true;
|
|
1673
|
+
}
|
|
1674
|
+
if (options.targetIsEphemeral === true) {
|
|
1675
|
+
body.target_is_ephemeral = true;
|
|
1676
|
+
}
|
|
1677
|
+
const response = await this.api.post({ path: "repos/branches/create", body }, jwt);
|
|
1678
|
+
const raw = createBranchResponseSchema.parse(await response.json());
|
|
1679
|
+
return transformCreateBranchResult(raw);
|
|
1680
|
+
}
|
|
1428
1681
|
async restoreCommit(options) {
|
|
1429
1682
|
const targetBranch = options?.targetBranch?.trim();
|
|
1430
1683
|
if (!targetBranch) {
|
|
@@ -1513,6 +1766,25 @@ var RepoImpl = class {
|
|
|
1513
1766
|
transport
|
|
1514
1767
|
});
|
|
1515
1768
|
}
|
|
1769
|
+
async createCommitFromDiff(options) {
|
|
1770
|
+
const version = this.options.apiVersion ?? API_VERSION;
|
|
1771
|
+
const baseUrl = this.options.apiBaseUrl ?? API_BASE_URL;
|
|
1772
|
+
const transport = new FetchDiffCommitTransport({ baseUrl, version });
|
|
1773
|
+
const ttl = resolveCommitTtlSeconds(options);
|
|
1774
|
+
const requestOptions = {
|
|
1775
|
+
...options,
|
|
1776
|
+
ttl
|
|
1777
|
+
};
|
|
1778
|
+
const getAuthToken = () => this.generateJWT(this.id, {
|
|
1779
|
+
permissions: ["git:write"],
|
|
1780
|
+
ttl
|
|
1781
|
+
});
|
|
1782
|
+
return sendCommitFromDiff({
|
|
1783
|
+
options: requestOptions,
|
|
1784
|
+
getAuthToken,
|
|
1785
|
+
transport
|
|
1786
|
+
});
|
|
1787
|
+
}
|
|
1516
1788
|
};
|
|
1517
1789
|
var GitStorage = class _GitStorage {
|
|
1518
1790
|
static overrides = {};
|