@pierre/storage 0.0.9 → 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +83 -1
- package/dist/index.cjs +367 -5
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +47 -2
- package/dist/index.d.ts +47 -2
- package/dist/index.js +367 -5
- package/dist/index.js.map +1 -1
- package/package.json +37 -38
- package/src/commit.ts +496 -0
- package/src/index.ts +26 -4
- package/src/types.ts +58 -1
package/package.json
CHANGED
|
@@ -1,39 +1,38 @@
|
|
|
1
1
|
{
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
}
|
|
2
|
+
"name": "@pierre/storage",
|
|
3
|
+
"version": "0.0.11",
|
|
4
|
+
"description": "Pierre Git Storage SDK",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"main": "./dist/index.cjs",
|
|
8
|
+
"module": "./dist/index.js",
|
|
9
|
+
"types": "./dist/index.d.ts",
|
|
10
|
+
"exports": {
|
|
11
|
+
".": {
|
|
12
|
+
"types": "./dist/index.d.ts",
|
|
13
|
+
"import": "./dist/index.js",
|
|
14
|
+
"require": "./dist/index.cjs",
|
|
15
|
+
"default": "./dist/index.js"
|
|
16
|
+
}
|
|
17
|
+
},
|
|
18
|
+
"files": [
|
|
19
|
+
"dist",
|
|
20
|
+
"src"
|
|
21
|
+
],
|
|
22
|
+
"dependencies": {
|
|
23
|
+
"jose": "^5.10.0",
|
|
24
|
+
"snakecase-keys": "^9.0.2"
|
|
25
|
+
},
|
|
26
|
+
"devDependencies": {
|
|
27
|
+
"tsup": "8.5.0",
|
|
28
|
+
"typescript": "5.8.3",
|
|
29
|
+
"vitest": "3.2.4"
|
|
30
|
+
},
|
|
31
|
+
"publishConfig": {
|
|
32
|
+
"access": "public"
|
|
33
|
+
},
|
|
34
|
+
"scripts": {
|
|
35
|
+
"build": "tsup",
|
|
36
|
+
"dev": "tsup --watch"
|
|
37
|
+
}
|
|
38
|
+
}
|
package/src/commit.ts
ADDED
|
@@ -0,0 +1,496 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
CommitBuilder,
|
|
3
|
+
CommitFileOptions,
|
|
4
|
+
CommitFileSource,
|
|
5
|
+
CommitResponse,
|
|
6
|
+
CommitTextFileOptions,
|
|
7
|
+
CreateCommitOptions,
|
|
8
|
+
} from './types';
|
|
9
|
+
|
|
10
|
+
const MAX_CHUNK_BYTES = 4 * 1024 * 1024;
|
|
11
|
+
const DEFAULT_TTL_SECONDS = 60 * 60;
|
|
12
|
+
|
|
13
|
+
type NodeBuffer = Uint8Array & { toString(encoding?: string): string };
|
|
14
|
+
interface NodeBufferConstructor {
|
|
15
|
+
from(data: Uint8Array): NodeBuffer;
|
|
16
|
+
from(data: string, encoding?: string): NodeBuffer;
|
|
17
|
+
isBuffer(value: unknown): value is NodeBuffer;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const BufferCtor: NodeBufferConstructor | undefined = (
|
|
21
|
+
globalThis as { Buffer?: NodeBufferConstructor }
|
|
22
|
+
).Buffer;
|
|
23
|
+
|
|
24
|
+
interface ReadableStreamReaderLike<T> {
|
|
25
|
+
read(): Promise<{ value?: T; done: boolean }>;
|
|
26
|
+
releaseLock?(): void;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
interface ReadableStreamLike<T> {
|
|
30
|
+
getReader(): ReadableStreamReaderLike<T>;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
interface BlobLike {
|
|
34
|
+
stream(): unknown;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
type ChunkSegment = {
|
|
38
|
+
chunk: Uint8Array;
|
|
39
|
+
eof: boolean;
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
interface CommitMetadataPayload {
|
|
43
|
+
target_ref: string;
|
|
44
|
+
base_ref?: string;
|
|
45
|
+
commit_message: string;
|
|
46
|
+
author?: {
|
|
47
|
+
name: string;
|
|
48
|
+
email: string;
|
|
49
|
+
date?: string;
|
|
50
|
+
};
|
|
51
|
+
committer?: {
|
|
52
|
+
name: string;
|
|
53
|
+
email: string;
|
|
54
|
+
date?: string;
|
|
55
|
+
};
|
|
56
|
+
files: Array<{
|
|
57
|
+
path: string;
|
|
58
|
+
content_id: string;
|
|
59
|
+
operation?: 'upsert' | 'delete';
|
|
60
|
+
mode?: string;
|
|
61
|
+
}>;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
interface CommitTransportRequest {
|
|
65
|
+
authorization: string;
|
|
66
|
+
signal?: AbortSignal;
|
|
67
|
+
metadata: CommitMetadataPayload;
|
|
68
|
+
blobs: Array<{ contentId: string; chunks: AsyncIterable<ChunkSegment> }>;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
interface CommitTransport {
|
|
72
|
+
send(request: CommitTransportRequest): Promise<CommitResponse>;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
interface CommitBuilderDeps {
|
|
76
|
+
options: CreateCommitOptions;
|
|
77
|
+
getAuthToken: () => Promise<string>;
|
|
78
|
+
transport: CommitTransport;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
type FileOperationState = {
|
|
82
|
+
path: string;
|
|
83
|
+
contentId: string;
|
|
84
|
+
mode?: string;
|
|
85
|
+
operation: 'upsert' | 'delete';
|
|
86
|
+
streamFactory?: () => AsyncIterable<Uint8Array>;
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
export class CommitBuilderImpl implements CommitBuilder {
|
|
90
|
+
private readonly options: CreateCommitOptions;
|
|
91
|
+
private readonly getAuthToken: () => Promise<string>;
|
|
92
|
+
private readonly transport: CommitTransport;
|
|
93
|
+
private readonly operations: FileOperationState[] = [];
|
|
94
|
+
private sent = false;
|
|
95
|
+
|
|
96
|
+
constructor(deps: CommitBuilderDeps) {
|
|
97
|
+
this.options = { ...deps.options };
|
|
98
|
+
this.getAuthToken = deps.getAuthToken;
|
|
99
|
+
this.transport = deps.transport;
|
|
100
|
+
|
|
101
|
+
const trimmedTarget = this.options.targetRef?.trim();
|
|
102
|
+
const trimmedMessage = this.options.commitMessage?.trim();
|
|
103
|
+
if (!trimmedTarget) {
|
|
104
|
+
throw new Error('createCommit targetRef is required');
|
|
105
|
+
}
|
|
106
|
+
if (!trimmedMessage) {
|
|
107
|
+
throw new Error('createCommit commitMessage is required');
|
|
108
|
+
}
|
|
109
|
+
this.options.targetRef = trimmedTarget;
|
|
110
|
+
this.options.commitMessage = trimmedMessage;
|
|
111
|
+
if (typeof this.options.baseRef === 'string') {
|
|
112
|
+
this.options.baseRef = this.options.baseRef.trim();
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
addFile(path: string, source: CommitFileSource, options?: CommitFileOptions): CommitBuilder {
|
|
117
|
+
this.ensureNotSent();
|
|
118
|
+
const normalizedPath = this.normalizePath(path);
|
|
119
|
+
const contentId = randomContentId();
|
|
120
|
+
const mode = options?.mode ?? '100644';
|
|
121
|
+
|
|
122
|
+
this.operations.push({
|
|
123
|
+
path: normalizedPath,
|
|
124
|
+
contentId,
|
|
125
|
+
mode,
|
|
126
|
+
operation: 'upsert',
|
|
127
|
+
streamFactory: () => toAsyncIterable(source),
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
return this;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
addFileFromString(
|
|
134
|
+
path: string,
|
|
135
|
+
contents: string,
|
|
136
|
+
options?: CommitTextFileOptions,
|
|
137
|
+
): CommitBuilder {
|
|
138
|
+
const encoding = options?.encoding;
|
|
139
|
+
if (encoding && encoding !== 'utf8' && encoding !== 'utf-8') {
|
|
140
|
+
throw new Error(`Unsupported encoding "${encoding}". Only UTF-8 is supported.`);
|
|
141
|
+
}
|
|
142
|
+
const data = new TextEncoder().encode(contents);
|
|
143
|
+
return this.addFile(path, data, options);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
deletePath(path: string): CommitBuilder {
|
|
147
|
+
this.ensureNotSent();
|
|
148
|
+
const normalizedPath = this.normalizePath(path);
|
|
149
|
+
this.operations.push({
|
|
150
|
+
path: normalizedPath,
|
|
151
|
+
contentId: randomContentId(),
|
|
152
|
+
operation: 'delete',
|
|
153
|
+
});
|
|
154
|
+
return this;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async send(): Promise<CommitResponse> {
|
|
158
|
+
this.ensureNotSent();
|
|
159
|
+
this.sent = true;
|
|
160
|
+
|
|
161
|
+
const metadata = this.buildMetadata();
|
|
162
|
+
const blobEntries = this.operations
|
|
163
|
+
.filter((op) => op.operation === 'upsert' && op.streamFactory)
|
|
164
|
+
.map((op) => ({
|
|
165
|
+
contentId: op.contentId,
|
|
166
|
+
chunks: chunkify(op.streamFactory!()),
|
|
167
|
+
}));
|
|
168
|
+
|
|
169
|
+
const authorization = await this.getAuthToken();
|
|
170
|
+
return this.transport.send({
|
|
171
|
+
authorization,
|
|
172
|
+
signal: this.options.signal,
|
|
173
|
+
metadata,
|
|
174
|
+
blobs: blobEntries,
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
private buildMetadata(): CommitMetadataPayload {
|
|
179
|
+
const files = this.operations.map((op) => {
|
|
180
|
+
const entry: CommitMetadataPayload['files'][number] = {
|
|
181
|
+
path: op.path,
|
|
182
|
+
content_id: op.contentId,
|
|
183
|
+
operation: op.operation,
|
|
184
|
+
};
|
|
185
|
+
if (op.mode) {
|
|
186
|
+
entry.mode = op.mode;
|
|
187
|
+
}
|
|
188
|
+
return entry;
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
const metadata: CommitMetadataPayload = {
|
|
192
|
+
target_ref: this.options.targetRef,
|
|
193
|
+
commit_message: this.options.commitMessage,
|
|
194
|
+
files,
|
|
195
|
+
};
|
|
196
|
+
|
|
197
|
+
if (this.options.baseRef) {
|
|
198
|
+
metadata.base_ref = this.options.baseRef;
|
|
199
|
+
}
|
|
200
|
+
if (this.options.author) {
|
|
201
|
+
metadata.author = { ...this.options.author };
|
|
202
|
+
}
|
|
203
|
+
if (this.options.committer) {
|
|
204
|
+
metadata.committer = { ...this.options.committer };
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
return metadata;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
private ensureNotSent(): void {
|
|
211
|
+
if (this.sent) {
|
|
212
|
+
throw new Error('createCommit builder cannot be reused after send()');
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
private normalizePath(path: string): string {
|
|
217
|
+
if (!path || typeof path !== 'string' || path.trim() === '') {
|
|
218
|
+
throw new Error('File path must be a non-empty string');
|
|
219
|
+
}
|
|
220
|
+
return path.replace(/^\//, '');
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
export class FetchCommitTransport implements CommitTransport {
|
|
225
|
+
private readonly url: string;
|
|
226
|
+
|
|
227
|
+
constructor(config: { baseUrl: string; version: number }) {
|
|
228
|
+
const trimmedBase = config.baseUrl.replace(/\/+$/, '');
|
|
229
|
+
this.url = `${trimmedBase}/api/v${config.version}/repos/commit-pack`;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
async send(request: CommitTransportRequest): Promise<CommitResponse> {
|
|
233
|
+
const bodyIterable = buildMessageIterable(request.metadata, request.blobs);
|
|
234
|
+
const body = toRequestBody(bodyIterable);
|
|
235
|
+
|
|
236
|
+
const response = await fetch(this.url, {
|
|
237
|
+
method: 'POST',
|
|
238
|
+
headers: {
|
|
239
|
+
Authorization: `Bearer ${request.authorization}`,
|
|
240
|
+
'Content-Type': 'application/x-ndjson',
|
|
241
|
+
Accept: 'application/json',
|
|
242
|
+
},
|
|
243
|
+
body: body as any,
|
|
244
|
+
signal: request.signal,
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
if (!response.ok) {
|
|
248
|
+
const text = await response.text();
|
|
249
|
+
throw new Error(`createCommit request failed (${response.status}): ${text}`);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
return (await response.json()) as CommitResponse;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
function toRequestBody(iterable: AsyncIterable<Uint8Array>): unknown {
|
|
257
|
+
const readableStreamCtor = (
|
|
258
|
+
globalThis as { ReadableStream?: new (underlyingSource: unknown) => unknown }
|
|
259
|
+
).ReadableStream;
|
|
260
|
+
if (typeof readableStreamCtor === 'function') {
|
|
261
|
+
const iterator = iterable[Symbol.asyncIterator]();
|
|
262
|
+
return new readableStreamCtor({
|
|
263
|
+
async pull(controller: { enqueue(chunk: Uint8Array): void; close(): void }) {
|
|
264
|
+
const { value, done } = await iterator.next();
|
|
265
|
+
if (done) {
|
|
266
|
+
controller.close();
|
|
267
|
+
return;
|
|
268
|
+
}
|
|
269
|
+
controller.enqueue(value!);
|
|
270
|
+
},
|
|
271
|
+
async cancel(reason: unknown) {
|
|
272
|
+
if (typeof iterator.return === 'function') {
|
|
273
|
+
await iterator.return(reason);
|
|
274
|
+
}
|
|
275
|
+
},
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
return iterable;
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
function buildMessageIterable(
|
|
282
|
+
metadata: CommitMetadataPayload,
|
|
283
|
+
blobs: Array<{ contentId: string; chunks: AsyncIterable<ChunkSegment> }>,
|
|
284
|
+
): AsyncIterable<Uint8Array> {
|
|
285
|
+
const encoder = new TextEncoder();
|
|
286
|
+
return {
|
|
287
|
+
async *[Symbol.asyncIterator]() {
|
|
288
|
+
yield encoder.encode(`${JSON.stringify({ metadata })}\n`);
|
|
289
|
+
for (const blob of blobs) {
|
|
290
|
+
for await (const segment of blob.chunks) {
|
|
291
|
+
const payload = {
|
|
292
|
+
blob_chunk: {
|
|
293
|
+
content_id: blob.contentId,
|
|
294
|
+
data: base64Encode(segment.chunk),
|
|
295
|
+
eof: segment.eof,
|
|
296
|
+
},
|
|
297
|
+
};
|
|
298
|
+
yield encoder.encode(`${JSON.stringify(payload)}\n`);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
},
|
|
302
|
+
};
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
async function* chunkify(source: AsyncIterable<Uint8Array>): AsyncIterable<ChunkSegment> {
|
|
306
|
+
let pending: Uint8Array | null = null;
|
|
307
|
+
let produced = false;
|
|
308
|
+
|
|
309
|
+
for await (const value of source) {
|
|
310
|
+
const bytes = value;
|
|
311
|
+
|
|
312
|
+
if (pending && pending.byteLength === MAX_CHUNK_BYTES) {
|
|
313
|
+
yield { chunk: pending, eof: false };
|
|
314
|
+
produced = true;
|
|
315
|
+
pending = null;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
const merged: Uint8Array = pending ? concatChunks(pending, bytes) : bytes;
|
|
319
|
+
pending = null;
|
|
320
|
+
|
|
321
|
+
let cursor: Uint8Array = merged;
|
|
322
|
+
while (cursor.byteLength > MAX_CHUNK_BYTES) {
|
|
323
|
+
const chunk: Uint8Array = cursor.slice(0, MAX_CHUNK_BYTES);
|
|
324
|
+
cursor = cursor.slice(MAX_CHUNK_BYTES);
|
|
325
|
+
yield { chunk, eof: false };
|
|
326
|
+
produced = true;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
pending = cursor;
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
if (pending) {
|
|
333
|
+
yield { chunk: pending, eof: true };
|
|
334
|
+
produced = true;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
if (!produced) {
|
|
338
|
+
yield { chunk: new Uint8Array(0), eof: true };
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
async function* toAsyncIterable(source: CommitFileSource): AsyncIterable<Uint8Array> {
|
|
343
|
+
if (typeof source === 'string') {
|
|
344
|
+
yield new TextEncoder().encode(source);
|
|
345
|
+
return;
|
|
346
|
+
}
|
|
347
|
+
if (source instanceof Uint8Array) {
|
|
348
|
+
yield source;
|
|
349
|
+
return;
|
|
350
|
+
}
|
|
351
|
+
if (source instanceof ArrayBuffer) {
|
|
352
|
+
yield new Uint8Array(source);
|
|
353
|
+
return;
|
|
354
|
+
}
|
|
355
|
+
if (ArrayBuffer.isView(source)) {
|
|
356
|
+
yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
|
|
357
|
+
return;
|
|
358
|
+
}
|
|
359
|
+
if (isBlobLike(source)) {
|
|
360
|
+
const stream = source.stream();
|
|
361
|
+
if (isAsyncIterable(stream)) {
|
|
362
|
+
for await (const chunk of stream as AsyncIterable<unknown>) {
|
|
363
|
+
yield ensureUint8Array(chunk);
|
|
364
|
+
}
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
if (isReadableStreamLike(stream)) {
|
|
368
|
+
yield* readReadableStream(stream);
|
|
369
|
+
return;
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
if (isAsyncIterable(source)) {
|
|
373
|
+
for await (const chunk of source as AsyncIterable<unknown>) {
|
|
374
|
+
yield ensureUint8Array(chunk);
|
|
375
|
+
}
|
|
376
|
+
return;
|
|
377
|
+
}
|
|
378
|
+
if (isIterable(source)) {
|
|
379
|
+
for (const chunk of source as Iterable<unknown>) {
|
|
380
|
+
yield ensureUint8Array(chunk);
|
|
381
|
+
}
|
|
382
|
+
return;
|
|
383
|
+
}
|
|
384
|
+
throw new Error('Unsupported file source for createCommit');
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
async function* readReadableStream(stream: ReadableStreamLike<unknown>): AsyncIterable<Uint8Array> {
|
|
388
|
+
const reader = stream.getReader();
|
|
389
|
+
try {
|
|
390
|
+
while (true) {
|
|
391
|
+
const { value, done } = await reader.read();
|
|
392
|
+
if (done) {
|
|
393
|
+
break;
|
|
394
|
+
}
|
|
395
|
+
if (value !== undefined) {
|
|
396
|
+
yield ensureUint8Array(value);
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
} finally {
|
|
400
|
+
reader.releaseLock?.();
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
function ensureUint8Array(value: unknown): Uint8Array {
|
|
405
|
+
if (value instanceof Uint8Array) {
|
|
406
|
+
return value;
|
|
407
|
+
}
|
|
408
|
+
if (value instanceof ArrayBuffer) {
|
|
409
|
+
return new Uint8Array(value);
|
|
410
|
+
}
|
|
411
|
+
if (ArrayBuffer.isView(value)) {
|
|
412
|
+
return new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
|
|
413
|
+
}
|
|
414
|
+
if (typeof value === 'string') {
|
|
415
|
+
return new TextEncoder().encode(value);
|
|
416
|
+
}
|
|
417
|
+
if (BufferCtor && BufferCtor.isBuffer(value)) {
|
|
418
|
+
return value as Uint8Array;
|
|
419
|
+
}
|
|
420
|
+
throw new Error('Unsupported chunk type; expected binary data');
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
function isBlobLike(value: unknown): value is BlobLike {
|
|
424
|
+
return (
|
|
425
|
+
typeof value === 'object' && value !== null && typeof (value as BlobLike).stream === 'function'
|
|
426
|
+
);
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
function isReadableStreamLike<T>(value: unknown): value is ReadableStreamLike<T> {
|
|
430
|
+
return (
|
|
431
|
+
typeof value === 'object' &&
|
|
432
|
+
value !== null &&
|
|
433
|
+
typeof (value as ReadableStreamLike<T>).getReader === 'function'
|
|
434
|
+
);
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
function isAsyncIterable(value: unknown): value is AsyncIterable<unknown> {
|
|
438
|
+
return (
|
|
439
|
+
typeof value === 'object' &&
|
|
440
|
+
value !== null &&
|
|
441
|
+
Symbol.asyncIterator in (value as Record<string, unknown>)
|
|
442
|
+
);
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
function isIterable(value: unknown): value is Iterable<unknown> {
|
|
446
|
+
return (
|
|
447
|
+
typeof value === 'object' &&
|
|
448
|
+
value !== null &&
|
|
449
|
+
Symbol.iterator in (value as Record<string, unknown>)
|
|
450
|
+
);
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
function concatChunks(a: Uint8Array, b: Uint8Array): Uint8Array {
|
|
454
|
+
if (a.byteLength === 0) {
|
|
455
|
+
return b;
|
|
456
|
+
}
|
|
457
|
+
if (b.byteLength === 0) {
|
|
458
|
+
return a;
|
|
459
|
+
}
|
|
460
|
+
const merged = new Uint8Array(a.byteLength + b.byteLength);
|
|
461
|
+
merged.set(a, 0);
|
|
462
|
+
merged.set(b, a.byteLength);
|
|
463
|
+
return merged;
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
function base64Encode(bytes: Uint8Array): string {
|
|
467
|
+
if (BufferCtor) {
|
|
468
|
+
return BufferCtor.from(bytes).toString('base64');
|
|
469
|
+
}
|
|
470
|
+
let binary = '';
|
|
471
|
+
for (let i = 0; i < bytes.byteLength; i++) {
|
|
472
|
+
binary += String.fromCharCode(bytes[i]);
|
|
473
|
+
}
|
|
474
|
+
const btoaFn = (globalThis as { btoa?: (data: string) => string }).btoa;
|
|
475
|
+
if (typeof btoaFn === 'function') {
|
|
476
|
+
return btoaFn(binary);
|
|
477
|
+
}
|
|
478
|
+
throw new Error('Base64 encoding is not supported in this environment');
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
function randomContentId(): string {
|
|
482
|
+
const cryptoObj = globalThis.crypto;
|
|
483
|
+
if (cryptoObj && typeof cryptoObj.randomUUID === 'function') {
|
|
484
|
+
return cryptoObj.randomUUID();
|
|
485
|
+
}
|
|
486
|
+
const random = Math.random().toString(36).slice(2);
|
|
487
|
+
return `cid-${Date.now().toString(36)}-${random}`;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
export function createCommitBuilder(deps: CommitBuilderDeps): CommitBuilder {
|
|
491
|
+
return new CommitBuilderImpl(deps);
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
export function resolveCommitTtlSeconds(options?: { ttl?: number }): number {
|
|
495
|
+
return typeof options?.ttl === 'number' && options.ttl > 0 ? options.ttl : DEFAULT_TTL_SECONDS;
|
|
496
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -6,8 +6,11 @@
|
|
|
6
6
|
|
|
7
7
|
import { importPKCS8, SignJWT } from 'jose';
|
|
8
8
|
import snakecaseKeys from 'snakecase-keys';
|
|
9
|
+
import { createCommitBuilder, FetchCommitTransport, resolveCommitTtlSeconds } from './commit';
|
|
9
10
|
import { ApiFetcher } from './fetch';
|
|
10
11
|
import type {
|
|
12
|
+
CommitBuilder,
|
|
13
|
+
CreateCommitOptions,
|
|
11
14
|
CreateRepoOptions,
|
|
12
15
|
FindOneOptions,
|
|
13
16
|
GetBranchDiffOptions,
|
|
@@ -26,8 +29,8 @@ import type {
|
|
|
26
29
|
ListFilesOptions,
|
|
27
30
|
ListFilesResponse,
|
|
28
31
|
OverrideableGitStorageOptions,
|
|
32
|
+
PullUpstreamOptions,
|
|
29
33
|
Repo,
|
|
30
|
-
RepullOptions,
|
|
31
34
|
ValidAPIVersion,
|
|
32
35
|
} from './types';
|
|
33
36
|
|
|
@@ -219,7 +222,7 @@ class RepoImpl implements Repo {
|
|
|
219
222
|
return (await response.json()) as GetCommitResponse;
|
|
220
223
|
}
|
|
221
224
|
|
|
222
|
-
async
|
|
225
|
+
async pullUpstream(options: PullUpstreamOptions): Promise<void> {
|
|
223
226
|
const jwt = await this.generateJWT(this.id, {
|
|
224
227
|
permissions: ['git:write'],
|
|
225
228
|
ttl: options?.ttl ?? 1 * 60 * 60, // 1hr in seconds
|
|
@@ -231,14 +234,33 @@ class RepoImpl implements Repo {
|
|
|
231
234
|
body.ref = options.ref;
|
|
232
235
|
}
|
|
233
236
|
|
|
234
|
-
const response = await this.api.post({ path: 'repos/
|
|
237
|
+
const response = await this.api.post({ path: 'repos/pull-upstream', body }, jwt);
|
|
235
238
|
|
|
236
239
|
if (response.status !== 202) {
|
|
237
|
-
throw new Error(`
|
|
240
|
+
throw new Error(`Pull Upstream failed: ${response.status} ${await response.text()}`);
|
|
238
241
|
}
|
|
239
242
|
|
|
240
243
|
return;
|
|
241
244
|
}
|
|
245
|
+
|
|
246
|
+
createCommit(options: CreateCommitOptions): CommitBuilder {
|
|
247
|
+
const version = this.options.apiVersion ?? API_VERSION;
|
|
248
|
+
const baseUrl = this.options.apiBaseUrl ?? API_BASE_URL;
|
|
249
|
+
const transport = new FetchCommitTransport({ baseUrl, version });
|
|
250
|
+
const ttlSeconds = resolveCommitTtlSeconds(options);
|
|
251
|
+
const builderOptions: CreateCommitOptions = { ...options, ttl: ttlSeconds };
|
|
252
|
+
const getAuthToken = () =>
|
|
253
|
+
this.generateJWT(this.id, {
|
|
254
|
+
permissions: ['git:write'],
|
|
255
|
+
ttl: ttlSeconds,
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
return createCommitBuilder({
|
|
259
|
+
options: builderOptions,
|
|
260
|
+
getAuthToken,
|
|
261
|
+
transport,
|
|
262
|
+
});
|
|
263
|
+
}
|
|
242
264
|
}
|
|
243
265
|
|
|
244
266
|
export class GitStorage {
|