@pierre/storage 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -0
- package/dist/index.cjs +560 -330
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +15 -1
- package/dist/index.d.ts +15 -1
- package/dist/index.js +560 -330
- package/dist/index.js.map +1 -1
- package/package.json +38 -39
- package/src/commit-pack.ts +128 -0
- package/src/commit.ts +18 -362
- package/src/diff-commit.ts +300 -0
- package/src/index.ts +25 -0
- package/src/stream-utils.ts +255 -0
- package/src/types.ts +16 -0
package/package.json
CHANGED
|
@@ -1,40 +1,39 @@
|
|
|
1
1
|
{
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
}
|
|
2
|
+
"name": "@pierre/storage",
|
|
3
|
+
"version": "0.2.1",
|
|
4
|
+
"description": "Pierre Git Storage SDK",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"main": "./dist/index.cjs",
|
|
8
|
+
"module": "./dist/index.js",
|
|
9
|
+
"types": "./dist/index.d.ts",
|
|
10
|
+
"exports": {
|
|
11
|
+
".": {
|
|
12
|
+
"types": "./dist/index.d.ts",
|
|
13
|
+
"import": "./dist/index.js",
|
|
14
|
+
"require": "./dist/index.cjs",
|
|
15
|
+
"default": "./dist/index.js"
|
|
16
|
+
}
|
|
17
|
+
},
|
|
18
|
+
"files": [
|
|
19
|
+
"dist",
|
|
20
|
+
"src"
|
|
21
|
+
],
|
|
22
|
+
"dependencies": {
|
|
23
|
+
"jose": "^5.10.0",
|
|
24
|
+
"snakecase-keys": "^9.0.2",
|
|
25
|
+
"zod": "^3.23.8"
|
|
26
|
+
},
|
|
27
|
+
"devDependencies": {
|
|
28
|
+
"tsup": "8.5.0",
|
|
29
|
+
"typescript": "5.8.3",
|
|
30
|
+
"vitest": "3.2.4"
|
|
31
|
+
},
|
|
32
|
+
"publishConfig": {
|
|
33
|
+
"access": "public"
|
|
34
|
+
},
|
|
35
|
+
"scripts": {
|
|
36
|
+
"build": "tsup",
|
|
37
|
+
"dev": "tsup --watch"
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import { inferRefUpdateReason, RefUpdateError } from './errors';
|
|
2
|
+
import type { CommitPackAckRaw } from './schemas';
|
|
3
|
+
import { commitPackResponseSchema, errorEnvelopeSchema } from './schemas';
|
|
4
|
+
import type { CommitResult, RefUpdate } from './types';
|
|
5
|
+
|
|
6
|
+
export type CommitPackAck = CommitPackAckRaw;
|
|
7
|
+
|
|
8
|
+
export function buildCommitResult(ack: CommitPackAckRaw): CommitResult {
|
|
9
|
+
const refUpdate = toRefUpdate(ack.result);
|
|
10
|
+
if (!ack.result.success) {
|
|
11
|
+
throw new RefUpdateError(
|
|
12
|
+
ack.result.message ?? `Commit failed with status ${ack.result.status}`,
|
|
13
|
+
{
|
|
14
|
+
status: ack.result.status,
|
|
15
|
+
message: ack.result.message,
|
|
16
|
+
refUpdate,
|
|
17
|
+
},
|
|
18
|
+
);
|
|
19
|
+
}
|
|
20
|
+
return {
|
|
21
|
+
commitSha: ack.commit.commit_sha,
|
|
22
|
+
treeSha: ack.commit.tree_sha,
|
|
23
|
+
targetBranch: ack.commit.target_branch,
|
|
24
|
+
packBytes: ack.commit.pack_bytes,
|
|
25
|
+
blobCount: ack.commit.blob_count,
|
|
26
|
+
refUpdate,
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function toRefUpdate(result: CommitPackAckRaw['result']): RefUpdate {
|
|
31
|
+
return {
|
|
32
|
+
branch: result.branch,
|
|
33
|
+
oldSha: result.old_sha,
|
|
34
|
+
newSha: result.new_sha,
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export async function parseCommitPackError(
|
|
39
|
+
response: Response,
|
|
40
|
+
fallbackMessage: string,
|
|
41
|
+
): Promise<{
|
|
42
|
+
statusMessage: string;
|
|
43
|
+
statusLabel: string;
|
|
44
|
+
refUpdate?: Partial<RefUpdate>;
|
|
45
|
+
}> {
|
|
46
|
+
const cloned = response.clone();
|
|
47
|
+
let jsonBody: unknown;
|
|
48
|
+
try {
|
|
49
|
+
jsonBody = await cloned.json();
|
|
50
|
+
} catch {
|
|
51
|
+
jsonBody = undefined;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
let textBody: string | undefined;
|
|
55
|
+
if (jsonBody === undefined) {
|
|
56
|
+
try {
|
|
57
|
+
textBody = await response.text();
|
|
58
|
+
} catch {
|
|
59
|
+
textBody = undefined;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const defaultStatus = (() => {
|
|
64
|
+
const inferred = inferRefUpdateReason(String(response.status));
|
|
65
|
+
return inferred === 'unknown' ? 'failed' : inferred;
|
|
66
|
+
})();
|
|
67
|
+
let statusLabel = defaultStatus;
|
|
68
|
+
let refUpdate: Partial<RefUpdate> | undefined;
|
|
69
|
+
let message: string | undefined;
|
|
70
|
+
|
|
71
|
+
if (jsonBody !== undefined) {
|
|
72
|
+
const parsedResponse = commitPackResponseSchema.safeParse(jsonBody);
|
|
73
|
+
if (parsedResponse.success) {
|
|
74
|
+
const result = parsedResponse.data.result;
|
|
75
|
+
if (typeof result.status === 'string' && result.status.trim() !== '') {
|
|
76
|
+
statusLabel = result.status.trim() as typeof statusLabel;
|
|
77
|
+
}
|
|
78
|
+
refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha);
|
|
79
|
+
if (typeof result.message === 'string' && result.message.trim() !== '') {
|
|
80
|
+
message = result.message.trim();
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (!message) {
|
|
85
|
+
const parsedError = errorEnvelopeSchema.safeParse(jsonBody);
|
|
86
|
+
if (parsedError.success) {
|
|
87
|
+
const trimmed = parsedError.data.error.trim();
|
|
88
|
+
if (trimmed) {
|
|
89
|
+
message = trimmed;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (!message && typeof jsonBody === 'string' && jsonBody.trim() !== '') {
|
|
96
|
+
message = jsonBody.trim();
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (!message && textBody && textBody.trim() !== '') {
|
|
100
|
+
message = textBody.trim();
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return {
|
|
104
|
+
statusMessage: message ?? fallbackMessage,
|
|
105
|
+
statusLabel,
|
|
106
|
+
refUpdate,
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function toPartialRefUpdateFields(
|
|
111
|
+
branch?: string | null,
|
|
112
|
+
oldSha?: string | null,
|
|
113
|
+
newSha?: string | null,
|
|
114
|
+
): Partial<RefUpdate> | undefined {
|
|
115
|
+
const refUpdate: Partial<RefUpdate> = {};
|
|
116
|
+
|
|
117
|
+
if (typeof branch === 'string' && branch.trim() !== '') {
|
|
118
|
+
refUpdate.branch = branch.trim();
|
|
119
|
+
}
|
|
120
|
+
if (typeof oldSha === 'string' && oldSha.trim() !== '') {
|
|
121
|
+
refUpdate.oldSha = oldSha.trim();
|
|
122
|
+
}
|
|
123
|
+
if (typeof newSha === 'string' && newSha.trim() !== '') {
|
|
124
|
+
refUpdate.newSha = newSha.trim();
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return Object.keys(refUpdate).length > 0 ? refUpdate : undefined;
|
|
128
|
+
}
|
package/src/commit.ts
CHANGED
|
@@ -1,8 +1,16 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { buildCommitResult, parseCommitPackError } from './commit-pack';
|
|
2
|
+
import { RefUpdateError } from './errors';
|
|
2
3
|
import type { CommitPackAckRaw } from './schemas';
|
|
3
|
-
import { commitPackAckSchema
|
|
4
|
+
import { commitPackAckSchema } from './schemas';
|
|
5
|
+
import {
|
|
6
|
+
base64Encode,
|
|
7
|
+
type ChunkSegment,
|
|
8
|
+
chunkify,
|
|
9
|
+
requiresDuplex,
|
|
10
|
+
toAsyncIterable,
|
|
11
|
+
toRequestBody,
|
|
12
|
+
} from './stream-utils';
|
|
4
13
|
import type {
|
|
5
|
-
BlobLike,
|
|
6
14
|
CommitBuilder,
|
|
7
15
|
CommitFileOptions,
|
|
8
16
|
CommitFileSource,
|
|
@@ -11,11 +19,8 @@ import type {
|
|
|
11
19
|
CommitTextFileOptions,
|
|
12
20
|
CreateCommitOptions,
|
|
13
21
|
LegacyCreateCommitOptions,
|
|
14
|
-
ReadableStreamLike,
|
|
15
|
-
RefUpdate,
|
|
16
22
|
} from './types';
|
|
17
23
|
|
|
18
|
-
const MAX_CHUNK_BYTES = 4 * 1024 * 1024;
|
|
19
24
|
const DEFAULT_TTL_SECONDS = 60 * 60;
|
|
20
25
|
const HEADS_REF_PREFIX = 'refs/heads/';
|
|
21
26
|
|
|
@@ -30,11 +35,6 @@ const BufferCtor: NodeBufferConstructor | undefined = (
|
|
|
30
35
|
globalThis as { Buffer?: NodeBufferConstructor }
|
|
31
36
|
).Buffer;
|
|
32
37
|
|
|
33
|
-
type ChunkSegment = {
|
|
34
|
-
chunk: Uint8Array;
|
|
35
|
-
eof: boolean;
|
|
36
|
-
};
|
|
37
|
-
|
|
38
38
|
interface CommitMetadataPayload {
|
|
39
39
|
target_branch: string;
|
|
40
40
|
expected_head_sha?: string;
|
|
@@ -66,7 +66,7 @@ interface CommitTransportRequest {
|
|
|
66
66
|
}
|
|
67
67
|
|
|
68
68
|
interface CommitTransport {
|
|
69
|
-
send(request: CommitTransportRequest): Promise<
|
|
69
|
+
send(request: CommitTransportRequest): Promise<CommitPackAckRaw>;
|
|
70
70
|
}
|
|
71
71
|
|
|
72
72
|
type NormalizedCommitOptions = {
|
|
@@ -96,8 +96,6 @@ type FileOperationState = {
|
|
|
96
96
|
streamFactory?: () => AsyncIterable<Uint8Array>;
|
|
97
97
|
};
|
|
98
98
|
|
|
99
|
-
type CommitPackAck = CommitPackAckRaw;
|
|
100
|
-
|
|
101
99
|
export class CommitBuilderImpl implements CommitBuilder {
|
|
102
100
|
private readonly options: NormalizedCommitOptions;
|
|
103
101
|
private readonly getAuthToken: () => Promise<string>;
|
|
@@ -286,7 +284,7 @@ export class FetchCommitTransport implements CommitTransport {
|
|
|
286
284
|
this.url = `${trimmedBase}/api/v${config.version}/repos/commit-pack`;
|
|
287
285
|
}
|
|
288
286
|
|
|
289
|
-
async send(request: CommitTransportRequest): Promise<
|
|
287
|
+
async send(request: CommitTransportRequest): Promise<CommitPackAckRaw> {
|
|
290
288
|
const bodyIterable = buildMessageIterable(request.metadata, request.blobs);
|
|
291
289
|
const body = toRequestBody(bodyIterable);
|
|
292
290
|
|
|
@@ -308,7 +306,11 @@ export class FetchCommitTransport implements CommitTransport {
|
|
|
308
306
|
const response = await fetch(this.url, init);
|
|
309
307
|
|
|
310
308
|
if (!response.ok) {
|
|
311
|
-
const
|
|
309
|
+
const fallbackMessage = `createCommit request failed (${response.status} ${response.statusText})`;
|
|
310
|
+
const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError(
|
|
311
|
+
response,
|
|
312
|
+
fallbackMessage,
|
|
313
|
+
);
|
|
312
314
|
throw new RefUpdateError(statusMessage, {
|
|
313
315
|
status: statusLabel,
|
|
314
316
|
message: statusMessage,
|
|
@@ -321,31 +323,6 @@ export class FetchCommitTransport implements CommitTransport {
|
|
|
321
323
|
}
|
|
322
324
|
}
|
|
323
325
|
|
|
324
|
-
function toRequestBody(iterable: AsyncIterable<Uint8Array>): unknown {
|
|
325
|
-
const readableStreamCtor = (
|
|
326
|
-
globalThis as { ReadableStream?: new (underlyingSource: unknown) => unknown }
|
|
327
|
-
).ReadableStream;
|
|
328
|
-
if (typeof readableStreamCtor === 'function') {
|
|
329
|
-
const iterator = iterable[Symbol.asyncIterator]();
|
|
330
|
-
return new readableStreamCtor({
|
|
331
|
-
async pull(controller: { enqueue(chunk: Uint8Array): void; close(): void }) {
|
|
332
|
-
const { value, done } = await iterator.next();
|
|
333
|
-
if (done) {
|
|
334
|
-
controller.close();
|
|
335
|
-
return;
|
|
336
|
-
}
|
|
337
|
-
controller.enqueue(value!);
|
|
338
|
-
},
|
|
339
|
-
async cancel(reason: unknown) {
|
|
340
|
-
if (typeof iterator.return === 'function') {
|
|
341
|
-
await iterator.return(reason);
|
|
342
|
-
}
|
|
343
|
-
},
|
|
344
|
-
});
|
|
345
|
-
}
|
|
346
|
-
return iterable;
|
|
347
|
-
}
|
|
348
|
-
|
|
349
326
|
function buildMessageIterable(
|
|
350
327
|
metadata: CommitMetadataPayload,
|
|
351
328
|
blobs: Array<{ contentId: string; chunks: AsyncIterable<ChunkSegment> }>,
|
|
@@ -370,237 +347,6 @@ function buildMessageIterable(
|
|
|
370
347
|
};
|
|
371
348
|
}
|
|
372
349
|
|
|
373
|
-
function requiresDuplex(body: unknown): boolean {
|
|
374
|
-
if (!body || typeof body !== 'object') {
|
|
375
|
-
return false;
|
|
376
|
-
}
|
|
377
|
-
|
|
378
|
-
if (typeof (body as { [Symbol.asyncIterator]?: unknown })[Symbol.asyncIterator] === 'function') {
|
|
379
|
-
return true;
|
|
380
|
-
}
|
|
381
|
-
|
|
382
|
-
const readableStreamCtor = (
|
|
383
|
-
globalThis as {
|
|
384
|
-
ReadableStream?: new (...args: unknown[]) => unknown;
|
|
385
|
-
}
|
|
386
|
-
).ReadableStream;
|
|
387
|
-
if (readableStreamCtor && body instanceof readableStreamCtor) {
|
|
388
|
-
return true;
|
|
389
|
-
}
|
|
390
|
-
|
|
391
|
-
return false;
|
|
392
|
-
}
|
|
393
|
-
|
|
394
|
-
function buildCommitResult(ack: CommitPackAck): CommitResult {
|
|
395
|
-
const refUpdate = toRefUpdate(ack.result);
|
|
396
|
-
if (!ack.result.success) {
|
|
397
|
-
throw new RefUpdateError(
|
|
398
|
-
ack.result.message ?? `Commit failed with status ${ack.result.status}`,
|
|
399
|
-
{
|
|
400
|
-
status: ack.result.status,
|
|
401
|
-
message: ack.result.message,
|
|
402
|
-
refUpdate,
|
|
403
|
-
},
|
|
404
|
-
);
|
|
405
|
-
}
|
|
406
|
-
return {
|
|
407
|
-
commitSha: ack.commit.commit_sha,
|
|
408
|
-
treeSha: ack.commit.tree_sha,
|
|
409
|
-
targetBranch: ack.commit.target_branch,
|
|
410
|
-
packBytes: ack.commit.pack_bytes,
|
|
411
|
-
blobCount: ack.commit.blob_count,
|
|
412
|
-
refUpdate,
|
|
413
|
-
};
|
|
414
|
-
}
|
|
415
|
-
|
|
416
|
-
function toRefUpdate(result: CommitPackAck['result']): RefUpdate {
|
|
417
|
-
return {
|
|
418
|
-
branch: result.branch,
|
|
419
|
-
oldSha: result.old_sha,
|
|
420
|
-
newSha: result.new_sha,
|
|
421
|
-
};
|
|
422
|
-
}
|
|
423
|
-
|
|
424
|
-
async function* chunkify(source: AsyncIterable<Uint8Array>): AsyncIterable<ChunkSegment> {
|
|
425
|
-
let pending: Uint8Array | null = null;
|
|
426
|
-
let produced = false;
|
|
427
|
-
|
|
428
|
-
for await (const value of source) {
|
|
429
|
-
const bytes = value;
|
|
430
|
-
|
|
431
|
-
if (pending && pending.byteLength === MAX_CHUNK_BYTES) {
|
|
432
|
-
yield { chunk: pending, eof: false };
|
|
433
|
-
produced = true;
|
|
434
|
-
pending = null;
|
|
435
|
-
}
|
|
436
|
-
|
|
437
|
-
const merged: Uint8Array = pending ? concatChunks(pending, bytes) : bytes;
|
|
438
|
-
pending = null;
|
|
439
|
-
|
|
440
|
-
let cursor: Uint8Array = merged;
|
|
441
|
-
while (cursor.byteLength > MAX_CHUNK_BYTES) {
|
|
442
|
-
const chunk: Uint8Array = cursor.slice(0, MAX_CHUNK_BYTES);
|
|
443
|
-
cursor = cursor.slice(MAX_CHUNK_BYTES);
|
|
444
|
-
yield { chunk, eof: false };
|
|
445
|
-
produced = true;
|
|
446
|
-
}
|
|
447
|
-
|
|
448
|
-
pending = cursor;
|
|
449
|
-
}
|
|
450
|
-
|
|
451
|
-
if (pending) {
|
|
452
|
-
yield { chunk: pending, eof: true };
|
|
453
|
-
produced = true;
|
|
454
|
-
}
|
|
455
|
-
|
|
456
|
-
if (!produced) {
|
|
457
|
-
yield { chunk: new Uint8Array(0), eof: true };
|
|
458
|
-
}
|
|
459
|
-
}
|
|
460
|
-
|
|
461
|
-
async function* toAsyncIterable(source: CommitFileSource): AsyncIterable<Uint8Array> {
|
|
462
|
-
if (typeof source === 'string') {
|
|
463
|
-
yield new TextEncoder().encode(source);
|
|
464
|
-
return;
|
|
465
|
-
}
|
|
466
|
-
if (source instanceof Uint8Array) {
|
|
467
|
-
yield source;
|
|
468
|
-
return;
|
|
469
|
-
}
|
|
470
|
-
if (source instanceof ArrayBuffer) {
|
|
471
|
-
yield new Uint8Array(source);
|
|
472
|
-
return;
|
|
473
|
-
}
|
|
474
|
-
if (ArrayBuffer.isView(source)) {
|
|
475
|
-
yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
|
|
476
|
-
return;
|
|
477
|
-
}
|
|
478
|
-
if (isBlobLike(source)) {
|
|
479
|
-
const stream = source.stream();
|
|
480
|
-
if (isAsyncIterable(stream)) {
|
|
481
|
-
for await (const chunk of stream as AsyncIterable<unknown>) {
|
|
482
|
-
yield ensureUint8Array(chunk);
|
|
483
|
-
}
|
|
484
|
-
return;
|
|
485
|
-
}
|
|
486
|
-
if (isReadableStreamLike(stream)) {
|
|
487
|
-
yield* readReadableStream(stream);
|
|
488
|
-
return;
|
|
489
|
-
}
|
|
490
|
-
}
|
|
491
|
-
if (isReadableStreamLike(source)) {
|
|
492
|
-
yield* readReadableStream(source);
|
|
493
|
-
return;
|
|
494
|
-
}
|
|
495
|
-
if (isAsyncIterable(source)) {
|
|
496
|
-
for await (const chunk of source as AsyncIterable<unknown>) {
|
|
497
|
-
yield ensureUint8Array(chunk);
|
|
498
|
-
}
|
|
499
|
-
return;
|
|
500
|
-
}
|
|
501
|
-
if (isIterable(source)) {
|
|
502
|
-
for (const chunk of source as Iterable<unknown>) {
|
|
503
|
-
yield ensureUint8Array(chunk);
|
|
504
|
-
}
|
|
505
|
-
return;
|
|
506
|
-
}
|
|
507
|
-
throw new Error('Unsupported file source for createCommit');
|
|
508
|
-
}
|
|
509
|
-
|
|
510
|
-
async function* readReadableStream(stream: ReadableStreamLike<unknown>): AsyncIterable<Uint8Array> {
|
|
511
|
-
const reader = stream.getReader();
|
|
512
|
-
try {
|
|
513
|
-
while (true) {
|
|
514
|
-
const { value, done } = await reader.read();
|
|
515
|
-
if (done) {
|
|
516
|
-
break;
|
|
517
|
-
}
|
|
518
|
-
if (value !== undefined) {
|
|
519
|
-
yield ensureUint8Array(value);
|
|
520
|
-
}
|
|
521
|
-
}
|
|
522
|
-
} finally {
|
|
523
|
-
reader.releaseLock?.();
|
|
524
|
-
}
|
|
525
|
-
}
|
|
526
|
-
|
|
527
|
-
function ensureUint8Array(value: unknown): Uint8Array {
|
|
528
|
-
if (value instanceof Uint8Array) {
|
|
529
|
-
return value;
|
|
530
|
-
}
|
|
531
|
-
if (value instanceof ArrayBuffer) {
|
|
532
|
-
return new Uint8Array(value);
|
|
533
|
-
}
|
|
534
|
-
if (ArrayBuffer.isView(value)) {
|
|
535
|
-
return new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
|
|
536
|
-
}
|
|
537
|
-
if (typeof value === 'string') {
|
|
538
|
-
return new TextEncoder().encode(value);
|
|
539
|
-
}
|
|
540
|
-
if (BufferCtor && BufferCtor.isBuffer(value)) {
|
|
541
|
-
return value as Uint8Array;
|
|
542
|
-
}
|
|
543
|
-
throw new Error('Unsupported chunk type; expected binary data');
|
|
544
|
-
}
|
|
545
|
-
|
|
546
|
-
function isBlobLike(value: unknown): value is BlobLike {
|
|
547
|
-
return (
|
|
548
|
-
typeof value === 'object' && value !== null && typeof (value as BlobLike).stream === 'function'
|
|
549
|
-
);
|
|
550
|
-
}
|
|
551
|
-
|
|
552
|
-
function isReadableStreamLike<T>(value: unknown): value is ReadableStreamLike<T> {
|
|
553
|
-
return (
|
|
554
|
-
typeof value === 'object' &&
|
|
555
|
-
value !== null &&
|
|
556
|
-
typeof (value as ReadableStreamLike<T>).getReader === 'function'
|
|
557
|
-
);
|
|
558
|
-
}
|
|
559
|
-
|
|
560
|
-
function isAsyncIterable(value: unknown): value is AsyncIterable<unknown> {
|
|
561
|
-
return (
|
|
562
|
-
typeof value === 'object' &&
|
|
563
|
-
value !== null &&
|
|
564
|
-
Symbol.asyncIterator in (value as Record<string, unknown>)
|
|
565
|
-
);
|
|
566
|
-
}
|
|
567
|
-
|
|
568
|
-
function isIterable(value: unknown): value is Iterable<unknown> {
|
|
569
|
-
return (
|
|
570
|
-
typeof value === 'object' &&
|
|
571
|
-
value !== null &&
|
|
572
|
-
Symbol.iterator in (value as Record<string, unknown>)
|
|
573
|
-
);
|
|
574
|
-
}
|
|
575
|
-
|
|
576
|
-
function concatChunks(a: Uint8Array, b: Uint8Array): Uint8Array {
|
|
577
|
-
if (a.byteLength === 0) {
|
|
578
|
-
return b;
|
|
579
|
-
}
|
|
580
|
-
if (b.byteLength === 0) {
|
|
581
|
-
return a;
|
|
582
|
-
}
|
|
583
|
-
const merged = new Uint8Array(a.byteLength + b.byteLength);
|
|
584
|
-
merged.set(a, 0);
|
|
585
|
-
merged.set(b, a.byteLength);
|
|
586
|
-
return merged;
|
|
587
|
-
}
|
|
588
|
-
|
|
589
|
-
function base64Encode(bytes: Uint8Array): string {
|
|
590
|
-
if (BufferCtor) {
|
|
591
|
-
return BufferCtor.from(bytes).toString('base64');
|
|
592
|
-
}
|
|
593
|
-
let binary = '';
|
|
594
|
-
for (let i = 0; i < bytes.byteLength; i++) {
|
|
595
|
-
binary += String.fromCharCode(bytes[i]);
|
|
596
|
-
}
|
|
597
|
-
const btoaFn = (globalThis as { btoa?: (data: string) => string }).btoa;
|
|
598
|
-
if (typeof btoaFn === 'function') {
|
|
599
|
-
return btoaFn(binary);
|
|
600
|
-
}
|
|
601
|
-
throw new Error('Base64 encoding is not supported in this environment');
|
|
602
|
-
}
|
|
603
|
-
|
|
604
350
|
function randomContentId(): string {
|
|
605
351
|
const cryptoObj = globalThis.crypto;
|
|
606
352
|
if (cryptoObj && typeof cryptoObj.randomUUID === 'function') {
|
|
@@ -684,93 +430,3 @@ export function resolveCommitTtlSeconds(options?: { ttl?: number }): number {
|
|
|
684
430
|
}
|
|
685
431
|
return DEFAULT_TTL_SECONDS;
|
|
686
432
|
}
|
|
687
|
-
|
|
688
|
-
async function parseCommitPackError(response: Response): Promise<{
|
|
689
|
-
statusMessage: string;
|
|
690
|
-
statusLabel: string;
|
|
691
|
-
refUpdate?: Partial<RefUpdate>;
|
|
692
|
-
}> {
|
|
693
|
-
const fallbackMessage = `createCommit request failed (${response.status} ${response.statusText})`;
|
|
694
|
-
const cloned = response.clone();
|
|
695
|
-
let jsonBody: unknown;
|
|
696
|
-
try {
|
|
697
|
-
jsonBody = await cloned.json();
|
|
698
|
-
} catch {
|
|
699
|
-
jsonBody = undefined;
|
|
700
|
-
}
|
|
701
|
-
|
|
702
|
-
let textBody: string | undefined;
|
|
703
|
-
if (jsonBody === undefined) {
|
|
704
|
-
try {
|
|
705
|
-
textBody = await response.text();
|
|
706
|
-
} catch {
|
|
707
|
-
textBody = undefined;
|
|
708
|
-
}
|
|
709
|
-
}
|
|
710
|
-
|
|
711
|
-
const defaultStatus = (() => {
|
|
712
|
-
const inferred = inferRefUpdateReason(String(response.status));
|
|
713
|
-
return inferred === 'unknown' ? 'failed' : inferred;
|
|
714
|
-
})();
|
|
715
|
-
let statusLabel = defaultStatus;
|
|
716
|
-
let refUpdate: Partial<RefUpdate> | undefined;
|
|
717
|
-
let message: string | undefined;
|
|
718
|
-
|
|
719
|
-
if (jsonBody !== undefined) {
|
|
720
|
-
const parsedResponse = commitPackResponseSchema.safeParse(jsonBody);
|
|
721
|
-
if (parsedResponse.success) {
|
|
722
|
-
const result = parsedResponse.data.result;
|
|
723
|
-
if (typeof result.status === 'string' && result.status.trim() !== '') {
|
|
724
|
-
statusLabel = result.status.trim() as typeof statusLabel;
|
|
725
|
-
}
|
|
726
|
-
refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha);
|
|
727
|
-
if (typeof result.message === 'string' && result.message.trim() !== '') {
|
|
728
|
-
message = result.message.trim();
|
|
729
|
-
}
|
|
730
|
-
}
|
|
731
|
-
|
|
732
|
-
if (!message) {
|
|
733
|
-
const parsedError = errorEnvelopeSchema.safeParse(jsonBody);
|
|
734
|
-
if (parsedError.success) {
|
|
735
|
-
const trimmed = parsedError.data.error.trim();
|
|
736
|
-
if (trimmed) {
|
|
737
|
-
message = trimmed;
|
|
738
|
-
}
|
|
739
|
-
}
|
|
740
|
-
}
|
|
741
|
-
}
|
|
742
|
-
|
|
743
|
-
if (!message && typeof jsonBody === 'string' && jsonBody.trim() !== '') {
|
|
744
|
-
message = jsonBody.trim();
|
|
745
|
-
}
|
|
746
|
-
|
|
747
|
-
if (!message && textBody && textBody.trim() !== '') {
|
|
748
|
-
message = textBody.trim();
|
|
749
|
-
}
|
|
750
|
-
|
|
751
|
-
return {
|
|
752
|
-
statusMessage: message ?? fallbackMessage,
|
|
753
|
-
statusLabel,
|
|
754
|
-
refUpdate,
|
|
755
|
-
};
|
|
756
|
-
}
|
|
757
|
-
|
|
758
|
-
function toPartialRefUpdateFields(
|
|
759
|
-
branch?: string | null,
|
|
760
|
-
oldSha?: string | null,
|
|
761
|
-
newSha?: string | null,
|
|
762
|
-
): Partial<RefUpdate> | undefined {
|
|
763
|
-
const refUpdate: Partial<RefUpdate> = {};
|
|
764
|
-
|
|
765
|
-
if (typeof branch === 'string' && branch.trim() !== '') {
|
|
766
|
-
refUpdate.branch = branch.trim();
|
|
767
|
-
}
|
|
768
|
-
if (typeof oldSha === 'string' && oldSha.trim() !== '') {
|
|
769
|
-
refUpdate.oldSha = oldSha.trim();
|
|
770
|
-
}
|
|
771
|
-
if (typeof newSha === 'string' && newSha.trim() !== '') {
|
|
772
|
-
refUpdate.newSha = newSha.trim();
|
|
773
|
-
}
|
|
774
|
-
|
|
775
|
-
return Object.keys(refUpdate).length > 0 ? refUpdate : undefined;
|
|
776
|
-
}
|