@pierre/storage 0.2.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +37 -0
- package/dist/index.cjs +583 -311
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +47 -2
- package/dist/index.d.ts +47 -2
- package/dist/index.js +583 -311
- package/dist/index.js.map +1 -1
- package/package.json +38 -39
- package/src/commit-pack.ts +128 -0
- package/src/commit.ts +18 -362
- package/src/diff-commit.ts +300 -0
- package/src/index.ts +72 -1
- package/src/schemas.ts +8 -0
- package/src/stream-utils.ts +255 -0
- package/src/types.ts +36 -1
package/dist/index.cjs
CHANGED
|
@@ -110,6 +110,12 @@ var commitDiffResponseSchema = zod.z.object({
|
|
|
110
110
|
files: zod.z.array(diffFileRawSchema),
|
|
111
111
|
filtered_files: zod.z.array(filteredFileRawSchema)
|
|
112
112
|
});
|
|
113
|
+
var createBranchResponseSchema = zod.z.object({
|
|
114
|
+
message: zod.z.string(),
|
|
115
|
+
target_branch: zod.z.string(),
|
|
116
|
+
target_is_ephemeral: zod.z.boolean(),
|
|
117
|
+
commit_sha: zod.z.string().nullable().optional()
|
|
118
|
+
});
|
|
113
119
|
var refUpdateResultSchema = zod.z.object({
|
|
114
120
|
branch: zod.z.string(),
|
|
115
121
|
old_sha: zod.z.string(),
|
|
@@ -154,11 +160,298 @@ var errorEnvelopeSchema = zod.z.object({
|
|
|
154
160
|
error: zod.z.string()
|
|
155
161
|
});
|
|
156
162
|
|
|
163
|
+
// src/commit-pack.ts
|
|
164
|
+
function buildCommitResult(ack) {
|
|
165
|
+
const refUpdate = toRefUpdate(ack.result);
|
|
166
|
+
if (!ack.result.success) {
|
|
167
|
+
throw new RefUpdateError(
|
|
168
|
+
ack.result.message ?? `Commit failed with status ${ack.result.status}`,
|
|
169
|
+
{
|
|
170
|
+
status: ack.result.status,
|
|
171
|
+
message: ack.result.message,
|
|
172
|
+
refUpdate
|
|
173
|
+
}
|
|
174
|
+
);
|
|
175
|
+
}
|
|
176
|
+
return {
|
|
177
|
+
commitSha: ack.commit.commit_sha,
|
|
178
|
+
treeSha: ack.commit.tree_sha,
|
|
179
|
+
targetBranch: ack.commit.target_branch,
|
|
180
|
+
packBytes: ack.commit.pack_bytes,
|
|
181
|
+
blobCount: ack.commit.blob_count,
|
|
182
|
+
refUpdate
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
function toRefUpdate(result) {
|
|
186
|
+
return {
|
|
187
|
+
branch: result.branch,
|
|
188
|
+
oldSha: result.old_sha,
|
|
189
|
+
newSha: result.new_sha
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
async function parseCommitPackError(response, fallbackMessage) {
|
|
193
|
+
const cloned = response.clone();
|
|
194
|
+
let jsonBody;
|
|
195
|
+
try {
|
|
196
|
+
jsonBody = await cloned.json();
|
|
197
|
+
} catch {
|
|
198
|
+
jsonBody = void 0;
|
|
199
|
+
}
|
|
200
|
+
let textBody;
|
|
201
|
+
if (jsonBody === void 0) {
|
|
202
|
+
try {
|
|
203
|
+
textBody = await response.text();
|
|
204
|
+
} catch {
|
|
205
|
+
textBody = void 0;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
const defaultStatus = (() => {
|
|
209
|
+
const inferred = inferRefUpdateReason(String(response.status));
|
|
210
|
+
return inferred === "unknown" ? "failed" : inferred;
|
|
211
|
+
})();
|
|
212
|
+
let statusLabel = defaultStatus;
|
|
213
|
+
let refUpdate;
|
|
214
|
+
let message;
|
|
215
|
+
if (jsonBody !== void 0) {
|
|
216
|
+
const parsedResponse = commitPackResponseSchema.safeParse(jsonBody);
|
|
217
|
+
if (parsedResponse.success) {
|
|
218
|
+
const result = parsedResponse.data.result;
|
|
219
|
+
if (typeof result.status === "string" && result.status.trim() !== "") {
|
|
220
|
+
statusLabel = result.status.trim();
|
|
221
|
+
}
|
|
222
|
+
refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha);
|
|
223
|
+
if (typeof result.message === "string" && result.message.trim() !== "") {
|
|
224
|
+
message = result.message.trim();
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
if (!message) {
|
|
228
|
+
const parsedError = errorEnvelopeSchema.safeParse(jsonBody);
|
|
229
|
+
if (parsedError.success) {
|
|
230
|
+
const trimmed = parsedError.data.error.trim();
|
|
231
|
+
if (trimmed) {
|
|
232
|
+
message = trimmed;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
if (!message && typeof jsonBody === "string" && jsonBody.trim() !== "") {
|
|
238
|
+
message = jsonBody.trim();
|
|
239
|
+
}
|
|
240
|
+
if (!message && textBody && textBody.trim() !== "") {
|
|
241
|
+
message = textBody.trim();
|
|
242
|
+
}
|
|
243
|
+
return {
|
|
244
|
+
statusMessage: message ?? fallbackMessage,
|
|
245
|
+
statusLabel,
|
|
246
|
+
refUpdate
|
|
247
|
+
};
|
|
248
|
+
}
|
|
249
|
+
function toPartialRefUpdateFields(branch, oldSha, newSha) {
|
|
250
|
+
const refUpdate = {};
|
|
251
|
+
if (typeof branch === "string" && branch.trim() !== "") {
|
|
252
|
+
refUpdate.branch = branch.trim();
|
|
253
|
+
}
|
|
254
|
+
if (typeof oldSha === "string" && oldSha.trim() !== "") {
|
|
255
|
+
refUpdate.oldSha = oldSha.trim();
|
|
256
|
+
}
|
|
257
|
+
if (typeof newSha === "string" && newSha.trim() !== "") {
|
|
258
|
+
refUpdate.newSha = newSha.trim();
|
|
259
|
+
}
|
|
260
|
+
return Object.keys(refUpdate).length > 0 ? refUpdate : void 0;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// src/stream-utils.ts
|
|
264
|
+
var BufferCtor = globalThis.Buffer;
|
|
265
|
+
var MAX_CHUNK_BYTES = 4 * 1024 * 1024;
|
|
266
|
+
async function* chunkify(source) {
|
|
267
|
+
let pending = null;
|
|
268
|
+
let produced = false;
|
|
269
|
+
for await (const value of source) {
|
|
270
|
+
const bytes = value;
|
|
271
|
+
if (pending && pending.byteLength === MAX_CHUNK_BYTES) {
|
|
272
|
+
yield { chunk: pending, eof: false };
|
|
273
|
+
produced = true;
|
|
274
|
+
pending = null;
|
|
275
|
+
}
|
|
276
|
+
const merged = pending ? concatChunks(pending, bytes) : bytes;
|
|
277
|
+
pending = null;
|
|
278
|
+
let cursor = merged;
|
|
279
|
+
while (cursor.byteLength > MAX_CHUNK_BYTES) {
|
|
280
|
+
const chunk = cursor.slice(0, MAX_CHUNK_BYTES);
|
|
281
|
+
cursor = cursor.slice(MAX_CHUNK_BYTES);
|
|
282
|
+
yield { chunk, eof: false };
|
|
283
|
+
produced = true;
|
|
284
|
+
}
|
|
285
|
+
pending = cursor;
|
|
286
|
+
}
|
|
287
|
+
if (pending) {
|
|
288
|
+
yield { chunk: pending, eof: true };
|
|
289
|
+
produced = true;
|
|
290
|
+
}
|
|
291
|
+
if (!produced) {
|
|
292
|
+
yield { chunk: new Uint8Array(0), eof: true };
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
async function* toAsyncIterable(source) {
|
|
296
|
+
if (typeof source === "string") {
|
|
297
|
+
yield new TextEncoder().encode(source);
|
|
298
|
+
return;
|
|
299
|
+
}
|
|
300
|
+
if (source instanceof Uint8Array) {
|
|
301
|
+
yield source;
|
|
302
|
+
return;
|
|
303
|
+
}
|
|
304
|
+
if (source instanceof ArrayBuffer) {
|
|
305
|
+
yield new Uint8Array(source);
|
|
306
|
+
return;
|
|
307
|
+
}
|
|
308
|
+
if (ArrayBuffer.isView(source)) {
|
|
309
|
+
yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
|
|
310
|
+
return;
|
|
311
|
+
}
|
|
312
|
+
if (isBlobLike(source)) {
|
|
313
|
+
const stream = source.stream();
|
|
314
|
+
if (isAsyncIterable(stream)) {
|
|
315
|
+
for await (const chunk of stream) {
|
|
316
|
+
yield ensureUint8Array(chunk);
|
|
317
|
+
}
|
|
318
|
+
return;
|
|
319
|
+
}
|
|
320
|
+
if (isReadableStreamLike(stream)) {
|
|
321
|
+
yield* readReadableStream(stream);
|
|
322
|
+
return;
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
if (isReadableStreamLike(source)) {
|
|
326
|
+
yield* readReadableStream(source);
|
|
327
|
+
return;
|
|
328
|
+
}
|
|
329
|
+
if (isAsyncIterable(source)) {
|
|
330
|
+
for await (const chunk of source) {
|
|
331
|
+
yield ensureUint8Array(chunk);
|
|
332
|
+
}
|
|
333
|
+
return;
|
|
334
|
+
}
|
|
335
|
+
if (isIterable(source)) {
|
|
336
|
+
for (const chunk of source) {
|
|
337
|
+
yield ensureUint8Array(chunk);
|
|
338
|
+
}
|
|
339
|
+
return;
|
|
340
|
+
}
|
|
341
|
+
throw new Error("Unsupported content source; expected binary data");
|
|
342
|
+
}
|
|
343
|
+
function base64Encode(bytes) {
|
|
344
|
+
if (BufferCtor) {
|
|
345
|
+
return BufferCtor.from(bytes).toString("base64");
|
|
346
|
+
}
|
|
347
|
+
let binary = "";
|
|
348
|
+
for (let i = 0; i < bytes.byteLength; i++) {
|
|
349
|
+
binary += String.fromCharCode(bytes[i]);
|
|
350
|
+
}
|
|
351
|
+
const btoaFn = globalThis.btoa;
|
|
352
|
+
if (typeof btoaFn === "function") {
|
|
353
|
+
return btoaFn(binary);
|
|
354
|
+
}
|
|
355
|
+
throw new Error("Base64 encoding is not supported in this environment");
|
|
356
|
+
}
|
|
357
|
+
function requiresDuplex(body) {
|
|
358
|
+
if (!body || typeof body !== "object") {
|
|
359
|
+
return false;
|
|
360
|
+
}
|
|
361
|
+
if (typeof body[Symbol.asyncIterator] === "function") {
|
|
362
|
+
return true;
|
|
363
|
+
}
|
|
364
|
+
const readableStreamCtor = globalThis.ReadableStream;
|
|
365
|
+
if (readableStreamCtor && body instanceof readableStreamCtor) {
|
|
366
|
+
return true;
|
|
367
|
+
}
|
|
368
|
+
return false;
|
|
369
|
+
}
|
|
370
|
+
function toRequestBody(iterable) {
|
|
371
|
+
const readableStreamCtor = globalThis.ReadableStream;
|
|
372
|
+
if (typeof readableStreamCtor === "function") {
|
|
373
|
+
const iterator = iterable[Symbol.asyncIterator]();
|
|
374
|
+
return new readableStreamCtor({
|
|
375
|
+
async pull(controller) {
|
|
376
|
+
const { value, done } = await iterator.next();
|
|
377
|
+
if (done) {
|
|
378
|
+
controller.close();
|
|
379
|
+
return;
|
|
380
|
+
}
|
|
381
|
+
controller.enqueue(value);
|
|
382
|
+
},
|
|
383
|
+
async cancel(reason) {
|
|
384
|
+
if (typeof iterator.return === "function") {
|
|
385
|
+
await iterator.return(reason);
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
return iterable;
|
|
391
|
+
}
|
|
392
|
+
async function* readReadableStream(stream) {
|
|
393
|
+
const reader = stream.getReader();
|
|
394
|
+
try {
|
|
395
|
+
while (true) {
|
|
396
|
+
const { value, done } = await reader.read();
|
|
397
|
+
if (done) {
|
|
398
|
+
break;
|
|
399
|
+
}
|
|
400
|
+
if (value !== void 0) {
|
|
401
|
+
yield ensureUint8Array(value);
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
} finally {
|
|
405
|
+
reader.releaseLock?.();
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
function ensureUint8Array(value) {
|
|
409
|
+
if (value instanceof Uint8Array) {
|
|
410
|
+
return value;
|
|
411
|
+
}
|
|
412
|
+
if (value instanceof ArrayBuffer) {
|
|
413
|
+
return new Uint8Array(value);
|
|
414
|
+
}
|
|
415
|
+
if (ArrayBuffer.isView(value)) {
|
|
416
|
+
return new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
|
|
417
|
+
}
|
|
418
|
+
if (typeof value === "string") {
|
|
419
|
+
return new TextEncoder().encode(value);
|
|
420
|
+
}
|
|
421
|
+
if (BufferCtor && BufferCtor.isBuffer(value)) {
|
|
422
|
+
return value;
|
|
423
|
+
}
|
|
424
|
+
throw new Error("Unsupported chunk type; expected binary data");
|
|
425
|
+
}
|
|
426
|
+
function isBlobLike(value) {
|
|
427
|
+
return typeof value === "object" && value !== null && typeof value.stream === "function";
|
|
428
|
+
}
|
|
429
|
+
function isReadableStreamLike(value) {
|
|
430
|
+
return typeof value === "object" && value !== null && typeof value.getReader === "function";
|
|
431
|
+
}
|
|
432
|
+
function isAsyncIterable(value) {
|
|
433
|
+
return typeof value === "object" && value !== null && Symbol.asyncIterator in value;
|
|
434
|
+
}
|
|
435
|
+
function isIterable(value) {
|
|
436
|
+
return typeof value === "object" && value !== null && Symbol.iterator in value;
|
|
437
|
+
}
|
|
438
|
+
function concatChunks(a, b) {
|
|
439
|
+
if (a.byteLength === 0) {
|
|
440
|
+
return b;
|
|
441
|
+
}
|
|
442
|
+
if (b.byteLength === 0) {
|
|
443
|
+
return a;
|
|
444
|
+
}
|
|
445
|
+
const merged = new Uint8Array(a.byteLength + b.byteLength);
|
|
446
|
+
merged.set(a, 0);
|
|
447
|
+
merged.set(b, a.byteLength);
|
|
448
|
+
return merged;
|
|
449
|
+
}
|
|
450
|
+
|
|
157
451
|
// src/commit.ts
|
|
158
|
-
var MAX_CHUNK_BYTES = 4 * 1024 * 1024;
|
|
159
452
|
var DEFAULT_TTL_SECONDS = 60 * 60;
|
|
160
453
|
var HEADS_REF_PREFIX = "refs/heads/";
|
|
161
|
-
var
|
|
454
|
+
var BufferCtor2 = globalThis.Buffer;
|
|
162
455
|
var CommitBuilderImpl = class {
|
|
163
456
|
options;
|
|
164
457
|
getAuthToken;
|
|
@@ -221,8 +514,8 @@ var CommitBuilderImpl = class {
|
|
|
221
514
|
let data;
|
|
222
515
|
if (normalizedEncoding === "utf8") {
|
|
223
516
|
data = new TextEncoder().encode(contents);
|
|
224
|
-
} else if (
|
|
225
|
-
data =
|
|
517
|
+
} else if (BufferCtor2) {
|
|
518
|
+
data = BufferCtor2.from(
|
|
226
519
|
contents,
|
|
227
520
|
normalizedEncoding
|
|
228
521
|
);
|
|
@@ -330,257 +623,49 @@ var FetchCommitTransport = class {
|
|
|
330
623
|
Accept: "application/json"
|
|
331
624
|
},
|
|
332
625
|
body,
|
|
333
|
-
signal: request.signal
|
|
334
|
-
};
|
|
335
|
-
if (requiresDuplex(body)) {
|
|
336
|
-
init.duplex = "half";
|
|
337
|
-
}
|
|
338
|
-
const response = await fetch(this.url, init);
|
|
339
|
-
if (!response.ok) {
|
|
340
|
-
const
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
}
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
if (typeof readableStreamCtor === "function") {
|
|
354
|
-
const iterator = iterable[Symbol.asyncIterator]();
|
|
355
|
-
return new readableStreamCtor({
|
|
356
|
-
async pull(controller) {
|
|
357
|
-
const { value, done } = await iterator.next();
|
|
358
|
-
if (done) {
|
|
359
|
-
controller.close();
|
|
360
|
-
return;
|
|
361
|
-
}
|
|
362
|
-
controller.enqueue(value);
|
|
363
|
-
},
|
|
364
|
-
async cancel(reason) {
|
|
365
|
-
if (typeof iterator.return === "function") {
|
|
366
|
-
await iterator.return(reason);
|
|
367
|
-
}
|
|
368
|
-
}
|
|
369
|
-
});
|
|
370
|
-
}
|
|
371
|
-
return iterable;
|
|
372
|
-
}
|
|
373
|
-
function buildMessageIterable(metadata, blobs) {
|
|
374
|
-
const encoder = new TextEncoder();
|
|
375
|
-
return {
|
|
376
|
-
async *[Symbol.asyncIterator]() {
|
|
377
|
-
yield encoder.encode(`${JSON.stringify({ metadata })}
|
|
378
|
-
`);
|
|
379
|
-
for (const blob of blobs) {
|
|
380
|
-
for await (const segment of blob.chunks) {
|
|
381
|
-
const payload = {
|
|
382
|
-
blob_chunk: {
|
|
383
|
-
content_id: blob.contentId,
|
|
384
|
-
data: base64Encode(segment.chunk),
|
|
385
|
-
eof: segment.eof
|
|
386
|
-
}
|
|
387
|
-
};
|
|
388
|
-
yield encoder.encode(`${JSON.stringify(payload)}
|
|
389
|
-
`);
|
|
390
|
-
}
|
|
391
|
-
}
|
|
392
|
-
}
|
|
393
|
-
};
|
|
394
|
-
}
|
|
395
|
-
function requiresDuplex(body) {
|
|
396
|
-
if (!body || typeof body !== "object") {
|
|
397
|
-
return false;
|
|
398
|
-
}
|
|
399
|
-
if (typeof body[Symbol.asyncIterator] === "function") {
|
|
400
|
-
return true;
|
|
401
|
-
}
|
|
402
|
-
const readableStreamCtor = globalThis.ReadableStream;
|
|
403
|
-
if (readableStreamCtor && body instanceof readableStreamCtor) {
|
|
404
|
-
return true;
|
|
405
|
-
}
|
|
406
|
-
return false;
|
|
407
|
-
}
|
|
408
|
-
function buildCommitResult(ack) {
|
|
409
|
-
const refUpdate = toRefUpdate(ack.result);
|
|
410
|
-
if (!ack.result.success) {
|
|
411
|
-
throw new RefUpdateError(
|
|
412
|
-
ack.result.message ?? `Commit failed with status ${ack.result.status}`,
|
|
413
|
-
{
|
|
414
|
-
status: ack.result.status,
|
|
415
|
-
message: ack.result.message,
|
|
416
|
-
refUpdate
|
|
417
|
-
}
|
|
418
|
-
);
|
|
419
|
-
}
|
|
420
|
-
return {
|
|
421
|
-
commitSha: ack.commit.commit_sha,
|
|
422
|
-
treeSha: ack.commit.tree_sha,
|
|
423
|
-
targetBranch: ack.commit.target_branch,
|
|
424
|
-
packBytes: ack.commit.pack_bytes,
|
|
425
|
-
blobCount: ack.commit.blob_count,
|
|
426
|
-
refUpdate
|
|
427
|
-
};
|
|
428
|
-
}
|
|
429
|
-
function toRefUpdate(result) {
|
|
430
|
-
return {
|
|
431
|
-
branch: result.branch,
|
|
432
|
-
oldSha: result.old_sha,
|
|
433
|
-
newSha: result.new_sha
|
|
434
|
-
};
|
|
435
|
-
}
|
|
436
|
-
async function* chunkify(source) {
|
|
437
|
-
let pending = null;
|
|
438
|
-
let produced = false;
|
|
439
|
-
for await (const value of source) {
|
|
440
|
-
const bytes = value;
|
|
441
|
-
if (pending && pending.byteLength === MAX_CHUNK_BYTES) {
|
|
442
|
-
yield { chunk: pending, eof: false };
|
|
443
|
-
produced = true;
|
|
444
|
-
pending = null;
|
|
445
|
-
}
|
|
446
|
-
const merged = pending ? concatChunks(pending, bytes) : bytes;
|
|
447
|
-
pending = null;
|
|
448
|
-
let cursor = merged;
|
|
449
|
-
while (cursor.byteLength > MAX_CHUNK_BYTES) {
|
|
450
|
-
const chunk = cursor.slice(0, MAX_CHUNK_BYTES);
|
|
451
|
-
cursor = cursor.slice(MAX_CHUNK_BYTES);
|
|
452
|
-
yield { chunk, eof: false };
|
|
453
|
-
produced = true;
|
|
454
|
-
}
|
|
455
|
-
pending = cursor;
|
|
456
|
-
}
|
|
457
|
-
if (pending) {
|
|
458
|
-
yield { chunk: pending, eof: true };
|
|
459
|
-
produced = true;
|
|
460
|
-
}
|
|
461
|
-
if (!produced) {
|
|
462
|
-
yield { chunk: new Uint8Array(0), eof: true };
|
|
463
|
-
}
|
|
464
|
-
}
|
|
465
|
-
async function* toAsyncIterable(source) {
|
|
466
|
-
if (typeof source === "string") {
|
|
467
|
-
yield new TextEncoder().encode(source);
|
|
468
|
-
return;
|
|
469
|
-
}
|
|
470
|
-
if (source instanceof Uint8Array) {
|
|
471
|
-
yield source;
|
|
472
|
-
return;
|
|
473
|
-
}
|
|
474
|
-
if (source instanceof ArrayBuffer) {
|
|
475
|
-
yield new Uint8Array(source);
|
|
476
|
-
return;
|
|
477
|
-
}
|
|
478
|
-
if (ArrayBuffer.isView(source)) {
|
|
479
|
-
yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
|
|
480
|
-
return;
|
|
481
|
-
}
|
|
482
|
-
if (isBlobLike(source)) {
|
|
483
|
-
const stream = source.stream();
|
|
484
|
-
if (isAsyncIterable(stream)) {
|
|
485
|
-
for await (const chunk of stream) {
|
|
486
|
-
yield ensureUint8Array(chunk);
|
|
487
|
-
}
|
|
488
|
-
return;
|
|
489
|
-
}
|
|
490
|
-
if (isReadableStreamLike(stream)) {
|
|
491
|
-
yield* readReadableStream(stream);
|
|
492
|
-
return;
|
|
493
|
-
}
|
|
494
|
-
}
|
|
495
|
-
if (isReadableStreamLike(source)) {
|
|
496
|
-
yield* readReadableStream(source);
|
|
497
|
-
return;
|
|
498
|
-
}
|
|
499
|
-
if (isAsyncIterable(source)) {
|
|
500
|
-
for await (const chunk of source) {
|
|
501
|
-
yield ensureUint8Array(chunk);
|
|
502
|
-
}
|
|
503
|
-
return;
|
|
504
|
-
}
|
|
505
|
-
if (isIterable(source)) {
|
|
506
|
-
for (const chunk of source) {
|
|
507
|
-
yield ensureUint8Array(chunk);
|
|
508
|
-
}
|
|
509
|
-
return;
|
|
510
|
-
}
|
|
511
|
-
throw new Error("Unsupported file source for createCommit");
|
|
512
|
-
}
|
|
513
|
-
async function* readReadableStream(stream) {
|
|
514
|
-
const reader = stream.getReader();
|
|
515
|
-
try {
|
|
516
|
-
while (true) {
|
|
517
|
-
const { value, done } = await reader.read();
|
|
518
|
-
if (done) {
|
|
519
|
-
break;
|
|
520
|
-
}
|
|
521
|
-
if (value !== void 0) {
|
|
522
|
-
yield ensureUint8Array(value);
|
|
523
|
-
}
|
|
524
|
-
}
|
|
525
|
-
} finally {
|
|
526
|
-
reader.releaseLock?.();
|
|
527
|
-
}
|
|
528
|
-
}
|
|
529
|
-
function ensureUint8Array(value) {
|
|
530
|
-
if (value instanceof Uint8Array) {
|
|
531
|
-
return value;
|
|
532
|
-
}
|
|
533
|
-
if (value instanceof ArrayBuffer) {
|
|
534
|
-
return new Uint8Array(value);
|
|
535
|
-
}
|
|
536
|
-
if (ArrayBuffer.isView(value)) {
|
|
537
|
-
return new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
|
|
538
|
-
}
|
|
539
|
-
if (typeof value === "string") {
|
|
540
|
-
return new TextEncoder().encode(value);
|
|
541
|
-
}
|
|
542
|
-
if (BufferCtor && BufferCtor.isBuffer(value)) {
|
|
543
|
-
return value;
|
|
544
|
-
}
|
|
545
|
-
throw new Error("Unsupported chunk type; expected binary data");
|
|
546
|
-
}
|
|
547
|
-
function isBlobLike(value) {
|
|
548
|
-
return typeof value === "object" && value !== null && typeof value.stream === "function";
|
|
549
|
-
}
|
|
550
|
-
function isReadableStreamLike(value) {
|
|
551
|
-
return typeof value === "object" && value !== null && typeof value.getReader === "function";
|
|
552
|
-
}
|
|
553
|
-
function isAsyncIterable(value) {
|
|
554
|
-
return typeof value === "object" && value !== null && Symbol.asyncIterator in value;
|
|
555
|
-
}
|
|
556
|
-
function isIterable(value) {
|
|
557
|
-
return typeof value === "object" && value !== null && Symbol.iterator in value;
|
|
558
|
-
}
|
|
559
|
-
function concatChunks(a, b) {
|
|
560
|
-
if (a.byteLength === 0) {
|
|
561
|
-
return b;
|
|
562
|
-
}
|
|
563
|
-
if (b.byteLength === 0) {
|
|
564
|
-
return a;
|
|
565
|
-
}
|
|
566
|
-
const merged = new Uint8Array(a.byteLength + b.byteLength);
|
|
567
|
-
merged.set(a, 0);
|
|
568
|
-
merged.set(b, a.byteLength);
|
|
569
|
-
return merged;
|
|
570
|
-
}
|
|
571
|
-
function base64Encode(bytes) {
|
|
572
|
-
if (BufferCtor) {
|
|
573
|
-
return BufferCtor.from(bytes).toString("base64");
|
|
574
|
-
}
|
|
575
|
-
let binary = "";
|
|
576
|
-
for (let i = 0; i < bytes.byteLength; i++) {
|
|
577
|
-
binary += String.fromCharCode(bytes[i]);
|
|
578
|
-
}
|
|
579
|
-
const btoaFn = globalThis.btoa;
|
|
580
|
-
if (typeof btoaFn === "function") {
|
|
581
|
-
return btoaFn(binary);
|
|
626
|
+
signal: request.signal
|
|
627
|
+
};
|
|
628
|
+
if (requiresDuplex(body)) {
|
|
629
|
+
init.duplex = "half";
|
|
630
|
+
}
|
|
631
|
+
const response = await fetch(this.url, init);
|
|
632
|
+
if (!response.ok) {
|
|
633
|
+
const fallbackMessage = `createCommit request failed (${response.status} ${response.statusText})`;
|
|
634
|
+
const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError(
|
|
635
|
+
response,
|
|
636
|
+
fallbackMessage
|
|
637
|
+
);
|
|
638
|
+
throw new RefUpdateError(statusMessage, {
|
|
639
|
+
status: statusLabel,
|
|
640
|
+
message: statusMessage,
|
|
641
|
+
refUpdate
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
const ack = commitPackAckSchema.parse(await response.json());
|
|
645
|
+
return ack;
|
|
582
646
|
}
|
|
583
|
-
|
|
647
|
+
};
|
|
648
|
+
function buildMessageIterable(metadata, blobs) {
|
|
649
|
+
const encoder = new TextEncoder();
|
|
650
|
+
return {
|
|
651
|
+
async *[Symbol.asyncIterator]() {
|
|
652
|
+
yield encoder.encode(`${JSON.stringify({ metadata })}
|
|
653
|
+
`);
|
|
654
|
+
for (const blob of blobs) {
|
|
655
|
+
for await (const segment of blob.chunks) {
|
|
656
|
+
const payload = {
|
|
657
|
+
blob_chunk: {
|
|
658
|
+
content_id: blob.contentId,
|
|
659
|
+
data: base64Encode(segment.chunk),
|
|
660
|
+
eof: segment.eof
|
|
661
|
+
}
|
|
662
|
+
};
|
|
663
|
+
yield encoder.encode(`${JSON.stringify(payload)}
|
|
664
|
+
`);
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
};
|
|
584
669
|
}
|
|
585
670
|
function randomContentId() {
|
|
586
671
|
const cryptoObj = globalThis.crypto;
|
|
@@ -657,76 +742,208 @@ function resolveCommitTtlSeconds(options) {
|
|
|
657
742
|
}
|
|
658
743
|
return DEFAULT_TTL_SECONDS;
|
|
659
744
|
}
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
745
|
+
|
|
746
|
+
// src/diff-commit.ts
|
|
747
|
+
var DiffCommitExecutor = class {
|
|
748
|
+
options;
|
|
749
|
+
getAuthToken;
|
|
750
|
+
transport;
|
|
751
|
+
diffFactory;
|
|
752
|
+
sent = false;
|
|
753
|
+
constructor(deps) {
|
|
754
|
+
this.options = normalizeDiffCommitOptions(deps.options);
|
|
755
|
+
this.getAuthToken = deps.getAuthToken;
|
|
756
|
+
this.transport = deps.transport;
|
|
757
|
+
const trimmedMessage = this.options.commitMessage?.trim();
|
|
758
|
+
const trimmedAuthorName = this.options.author?.name?.trim();
|
|
759
|
+
const trimmedAuthorEmail = this.options.author?.email?.trim();
|
|
760
|
+
if (!trimmedMessage) {
|
|
761
|
+
throw new Error("createCommitFromDiff commitMessage is required");
|
|
675
762
|
}
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
const inferred = inferRefUpdateReason(String(response.status));
|
|
679
|
-
return inferred === "unknown" ? "failed" : inferred;
|
|
680
|
-
})();
|
|
681
|
-
let statusLabel = defaultStatus;
|
|
682
|
-
let refUpdate;
|
|
683
|
-
let message;
|
|
684
|
-
if (jsonBody !== void 0) {
|
|
685
|
-
const parsedResponse = commitPackResponseSchema.safeParse(jsonBody);
|
|
686
|
-
if (parsedResponse.success) {
|
|
687
|
-
const result = parsedResponse.data.result;
|
|
688
|
-
if (typeof result.status === "string" && result.status.trim() !== "") {
|
|
689
|
-
statusLabel = result.status.trim();
|
|
690
|
-
}
|
|
691
|
-
refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha);
|
|
692
|
-
if (typeof result.message === "string" && result.message.trim() !== "") {
|
|
693
|
-
message = result.message.trim();
|
|
694
|
-
}
|
|
763
|
+
if (!trimmedAuthorName || !trimmedAuthorEmail) {
|
|
764
|
+
throw new Error("createCommitFromDiff author name and email are required");
|
|
695
765
|
}
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
766
|
+
this.options.commitMessage = trimmedMessage;
|
|
767
|
+
this.options.author = {
|
|
768
|
+
name: trimmedAuthorName,
|
|
769
|
+
email: trimmedAuthorEmail
|
|
770
|
+
};
|
|
771
|
+
if (typeof this.options.expectedHeadSha === "string") {
|
|
772
|
+
this.options.expectedHeadSha = this.options.expectedHeadSha.trim();
|
|
773
|
+
}
|
|
774
|
+
if (typeof this.options.baseBranch === "string") {
|
|
775
|
+
const trimmedBase = this.options.baseBranch.trim();
|
|
776
|
+
if (trimmedBase === "") {
|
|
777
|
+
delete this.options.baseBranch;
|
|
778
|
+
} else {
|
|
779
|
+
if (trimmedBase.startsWith("refs/")) {
|
|
780
|
+
throw new Error("createCommitFromDiff baseBranch must not include refs/ prefix");
|
|
702
781
|
}
|
|
782
|
+
this.options.baseBranch = trimmedBase;
|
|
703
783
|
}
|
|
704
784
|
}
|
|
785
|
+
if (this.options.ephemeralBase && !this.options.baseBranch) {
|
|
786
|
+
throw new Error("createCommitFromDiff ephemeralBase requires baseBranch");
|
|
787
|
+
}
|
|
788
|
+
this.diffFactory = () => toAsyncIterable(this.options.initialDiff);
|
|
705
789
|
}
|
|
706
|
-
|
|
707
|
-
|
|
790
|
+
async send() {
|
|
791
|
+
this.ensureNotSent();
|
|
792
|
+
this.sent = true;
|
|
793
|
+
const metadata = this.buildMetadata();
|
|
794
|
+
const diffIterable = chunkify(this.diffFactory());
|
|
795
|
+
const authorization = await this.getAuthToken();
|
|
796
|
+
const ack = await this.transport.send({
|
|
797
|
+
authorization,
|
|
798
|
+
signal: this.options.signal,
|
|
799
|
+
metadata,
|
|
800
|
+
diffChunks: diffIterable
|
|
801
|
+
});
|
|
802
|
+
return buildCommitResult(ack);
|
|
708
803
|
}
|
|
709
|
-
|
|
710
|
-
|
|
804
|
+
buildMetadata() {
|
|
805
|
+
const metadata = {
|
|
806
|
+
target_branch: this.options.targetBranch,
|
|
807
|
+
commit_message: this.options.commitMessage,
|
|
808
|
+
author: {
|
|
809
|
+
name: this.options.author.name,
|
|
810
|
+
email: this.options.author.email
|
|
811
|
+
}
|
|
812
|
+
};
|
|
813
|
+
if (this.options.expectedHeadSha) {
|
|
814
|
+
metadata.expected_head_sha = this.options.expectedHeadSha;
|
|
815
|
+
}
|
|
816
|
+
if (this.options.baseBranch) {
|
|
817
|
+
metadata.base_branch = this.options.baseBranch;
|
|
818
|
+
}
|
|
819
|
+
if (this.options.committer) {
|
|
820
|
+
metadata.committer = {
|
|
821
|
+
name: this.options.committer.name,
|
|
822
|
+
email: this.options.committer.email
|
|
823
|
+
};
|
|
824
|
+
}
|
|
825
|
+
if (this.options.ephemeral) {
|
|
826
|
+
metadata.ephemeral = true;
|
|
827
|
+
}
|
|
828
|
+
if (this.options.ephemeralBase) {
|
|
829
|
+
metadata.ephemeral_base = true;
|
|
830
|
+
}
|
|
831
|
+
return metadata;
|
|
832
|
+
}
|
|
833
|
+
ensureNotSent() {
|
|
834
|
+
if (this.sent) {
|
|
835
|
+
throw new Error("createCommitFromDiff cannot be reused after send()");
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
};
|
|
839
|
+
var FetchDiffCommitTransport = class {
|
|
840
|
+
url;
|
|
841
|
+
constructor(config) {
|
|
842
|
+
const trimmedBase = config.baseUrl.replace(/\/+$/, "");
|
|
843
|
+
this.url = `${trimmedBase}/api/v${config.version}/repos/diff-commit`;
|
|
844
|
+
}
|
|
845
|
+
async send(request) {
|
|
846
|
+
const bodyIterable = buildMessageIterable2(request.metadata, request.diffChunks);
|
|
847
|
+
const body = toRequestBody(bodyIterable);
|
|
848
|
+
const init = {
|
|
849
|
+
method: "POST",
|
|
850
|
+
headers: {
|
|
851
|
+
Authorization: `Bearer ${request.authorization}`,
|
|
852
|
+
"Content-Type": "application/x-ndjson",
|
|
853
|
+
Accept: "application/json"
|
|
854
|
+
},
|
|
855
|
+
body,
|
|
856
|
+
signal: request.signal
|
|
857
|
+
};
|
|
858
|
+
if (requiresDuplex(body)) {
|
|
859
|
+
init.duplex = "half";
|
|
860
|
+
}
|
|
861
|
+
const response = await fetch(this.url, init);
|
|
862
|
+
if (!response.ok) {
|
|
863
|
+
const fallbackMessage = `createCommitFromDiff request failed (${response.status} ${response.statusText})`;
|
|
864
|
+
const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError(
|
|
865
|
+
response,
|
|
866
|
+
fallbackMessage
|
|
867
|
+
);
|
|
868
|
+
throw new RefUpdateError(statusMessage, {
|
|
869
|
+
status: statusLabel,
|
|
870
|
+
message: statusMessage,
|
|
871
|
+
refUpdate
|
|
872
|
+
});
|
|
873
|
+
}
|
|
874
|
+
return commitPackAckSchema.parse(await response.json());
|
|
711
875
|
}
|
|
876
|
+
};
|
|
877
|
+
function buildMessageIterable2(metadata, diffChunks) {
|
|
878
|
+
const encoder = new TextEncoder();
|
|
712
879
|
return {
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
880
|
+
async *[Symbol.asyncIterator]() {
|
|
881
|
+
yield encoder.encode(`${JSON.stringify({ metadata })}
|
|
882
|
+
`);
|
|
883
|
+
for await (const segment of diffChunks) {
|
|
884
|
+
const payload = {
|
|
885
|
+
diff_chunk: {
|
|
886
|
+
data: base64Encode(segment.chunk),
|
|
887
|
+
eof: segment.eof
|
|
888
|
+
}
|
|
889
|
+
};
|
|
890
|
+
yield encoder.encode(`${JSON.stringify(payload)}
|
|
891
|
+
`);
|
|
892
|
+
}
|
|
893
|
+
}
|
|
716
894
|
};
|
|
717
895
|
}
|
|
718
|
-
function
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
896
|
+
function normalizeDiffCommitOptions(options) {
|
|
897
|
+
if (!options || typeof options !== "object") {
|
|
898
|
+
throw new Error("createCommitFromDiff options are required");
|
|
899
|
+
}
|
|
900
|
+
if (options.diff === void 0 || options.diff === null) {
|
|
901
|
+
throw new Error("createCommitFromDiff diff is required");
|
|
902
|
+
}
|
|
903
|
+
const targetBranch = normalizeBranchName2(options.targetBranch);
|
|
904
|
+
let committer;
|
|
905
|
+
if (options.committer) {
|
|
906
|
+
const name = options.committer.name?.trim();
|
|
907
|
+
const email = options.committer.email?.trim();
|
|
908
|
+
if (!name || !email) {
|
|
909
|
+
throw new Error("createCommitFromDiff committer name and email are required when provided");
|
|
910
|
+
}
|
|
911
|
+
committer = { name, email };
|
|
722
912
|
}
|
|
723
|
-
|
|
724
|
-
|
|
913
|
+
return {
|
|
914
|
+
targetBranch,
|
|
915
|
+
commitMessage: options.commitMessage,
|
|
916
|
+
expectedHeadSha: options.expectedHeadSha,
|
|
917
|
+
baseBranch: options.baseBranch,
|
|
918
|
+
ephemeral: options.ephemeral === true,
|
|
919
|
+
ephemeralBase: options.ephemeralBase === true,
|
|
920
|
+
author: options.author,
|
|
921
|
+
committer,
|
|
922
|
+
signal: options.signal,
|
|
923
|
+
ttl: options.ttl,
|
|
924
|
+
initialDiff: options.diff
|
|
925
|
+
};
|
|
926
|
+
}
|
|
927
|
+
function normalizeBranchName2(value) {
|
|
928
|
+
const trimmed = value?.trim();
|
|
929
|
+
if (!trimmed) {
|
|
930
|
+
throw new Error("createCommitFromDiff targetBranch is required");
|
|
725
931
|
}
|
|
726
|
-
if (
|
|
727
|
-
|
|
932
|
+
if (trimmed.startsWith("refs/heads/")) {
|
|
933
|
+
const branch = trimmed.slice("refs/heads/".length).trim();
|
|
934
|
+
if (!branch) {
|
|
935
|
+
throw new Error("createCommitFromDiff targetBranch must include a branch name");
|
|
936
|
+
}
|
|
937
|
+
return branch;
|
|
728
938
|
}
|
|
729
|
-
|
|
939
|
+
if (trimmed.startsWith("refs/")) {
|
|
940
|
+
throw new Error("createCommitFromDiff targetBranch must not include refs/ prefix");
|
|
941
|
+
}
|
|
942
|
+
return trimmed;
|
|
943
|
+
}
|
|
944
|
+
async function sendCommitFromDiff(deps) {
|
|
945
|
+
const executor = new DiffCommitExecutor(deps);
|
|
946
|
+
return executor.send();
|
|
730
947
|
}
|
|
731
948
|
|
|
732
949
|
// src/fetch.ts
|
|
@@ -1280,6 +1497,14 @@ function transformCommitDiffResult(raw) {
|
|
|
1280
1497
|
filteredFiles: raw.filtered_files.map(transformFilteredFile)
|
|
1281
1498
|
};
|
|
1282
1499
|
}
|
|
1500
|
+
function transformCreateBranchResult(raw) {
|
|
1501
|
+
return {
|
|
1502
|
+
message: raw.message,
|
|
1503
|
+
targetBranch: raw.target_branch,
|
|
1504
|
+
targetIsEphemeral: raw.target_is_ephemeral,
|
|
1505
|
+
commitSha: raw.commit_sha ?? void 0
|
|
1506
|
+
};
|
|
1507
|
+
}
|
|
1283
1508
|
var RepoImpl = class {
|
|
1284
1509
|
constructor(id, options, generateJWT) {
|
|
1285
1510
|
this.id = id;
|
|
@@ -1415,7 +1640,7 @@ var RepoImpl = class {
|
|
|
1415
1640
|
const raw = commitDiffResponseSchema.parse(await response.json());
|
|
1416
1641
|
return transformCommitDiffResult(raw);
|
|
1417
1642
|
}
|
|
1418
|
-
async pullUpstream(options) {
|
|
1643
|
+
async pullUpstream(options = {}) {
|
|
1419
1644
|
const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS);
|
|
1420
1645
|
const jwt = await this.generateJWT(this.id, {
|
|
1421
1646
|
permissions: ["git:write"],
|
|
@@ -1431,6 +1656,34 @@ var RepoImpl = class {
|
|
|
1431
1656
|
}
|
|
1432
1657
|
return;
|
|
1433
1658
|
}
|
|
1659
|
+
async createBranch(options) {
|
|
1660
|
+
const baseBranch = options?.baseBranch?.trim();
|
|
1661
|
+
if (!baseBranch) {
|
|
1662
|
+
throw new Error("createBranch baseBranch is required");
|
|
1663
|
+
}
|
|
1664
|
+
const targetBranch = options?.targetBranch?.trim();
|
|
1665
|
+
if (!targetBranch) {
|
|
1666
|
+
throw new Error("createBranch targetBranch is required");
|
|
1667
|
+
}
|
|
1668
|
+
const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS);
|
|
1669
|
+
const jwt = await this.generateJWT(this.id, {
|
|
1670
|
+
permissions: ["git:write"],
|
|
1671
|
+
ttl
|
|
1672
|
+
});
|
|
1673
|
+
const body = {
|
|
1674
|
+
base_branch: baseBranch,
|
|
1675
|
+
target_branch: targetBranch
|
|
1676
|
+
};
|
|
1677
|
+
if (options.baseIsEphemeral === true) {
|
|
1678
|
+
body.base_is_ephemeral = true;
|
|
1679
|
+
}
|
|
1680
|
+
if (options.targetIsEphemeral === true) {
|
|
1681
|
+
body.target_is_ephemeral = true;
|
|
1682
|
+
}
|
|
1683
|
+
const response = await this.api.post({ path: "repos/branches/create", body }, jwt);
|
|
1684
|
+
const raw = createBranchResponseSchema.parse(await response.json());
|
|
1685
|
+
return transformCreateBranchResult(raw);
|
|
1686
|
+
}
|
|
1434
1687
|
async restoreCommit(options) {
|
|
1435
1688
|
const targetBranch = options?.targetBranch?.trim();
|
|
1436
1689
|
if (!targetBranch) {
|
|
@@ -1519,6 +1772,25 @@ var RepoImpl = class {
|
|
|
1519
1772
|
transport
|
|
1520
1773
|
});
|
|
1521
1774
|
}
|
|
1775
|
+
async createCommitFromDiff(options) {
|
|
1776
|
+
const version = this.options.apiVersion ?? API_VERSION;
|
|
1777
|
+
const baseUrl = this.options.apiBaseUrl ?? API_BASE_URL;
|
|
1778
|
+
const transport = new FetchDiffCommitTransport({ baseUrl, version });
|
|
1779
|
+
const ttl = resolveCommitTtlSeconds(options);
|
|
1780
|
+
const requestOptions = {
|
|
1781
|
+
...options,
|
|
1782
|
+
ttl
|
|
1783
|
+
};
|
|
1784
|
+
const getAuthToken = () => this.generateJWT(this.id, {
|
|
1785
|
+
permissions: ["git:write"],
|
|
1786
|
+
ttl
|
|
1787
|
+
});
|
|
1788
|
+
return sendCommitFromDiff({
|
|
1789
|
+
options: requestOptions,
|
|
1790
|
+
getAuthToken,
|
|
1791
|
+
transport
|
|
1792
|
+
});
|
|
1793
|
+
}
|
|
1522
1794
|
};
|
|
1523
1795
|
var GitStorage = class _GitStorage {
|
|
1524
1796
|
static overrides = {};
|