@pierre/storage 0.0.10 → 0.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -51,7 +51,7 @@ The SDK generates secure URLs with JWT authentication for Git operations:
51
51
  ```typescript
52
52
  // Get URL with default permissions (git:write, git:read) and 1-year TTL
53
53
  const url = await repo.getRemoteURL();
54
- // Returns: https://t:JWT@your-name.git.storage/repo-id.git
54
+ // Returns: https://t:JWT@your-name.code.storage/repo-id.git
55
55
 
56
56
  // Configure the Git remote
57
57
  console.log(`Run: git remote add origin ${url}`);
@@ -120,6 +120,88 @@ const commitDiff = await repo.getCommitDiff({
120
120
  });
121
121
  console.log(commitDiff.stats);
122
122
  console.log(commitDiff.files);
123
+
124
+ // Create a commit using the streaming helper
125
+ const fs = await import('node:fs/promises');
126
+ const response = await repo
127
+ .createCommit({
128
+ targetRef: 'refs/heads/main',
129
+ commitMessage: 'Update docs',
130
+ })
131
+ .addFileFromString('docs/changelog.md', '# v2.0.1\n- add streaming SDK\n')
132
+ .addFile('docs/readme.md', await fs.readFile('README.md'))
133
+ .deletePath('docs/legacy.txt')
134
+ .send();
135
+
136
+ console.log(response.commit.commit_sha);
137
+ console.log(response.result.new_sha);
138
+ console.log(response.result.old_sha); // 40 hex chars; all zeroes for brand-new branches
139
+ ```
140
+
141
+ The builder exposes:
142
+
143
+ - `addFile(path, source, options)` to attach bytes, async iterables, readable streams, or buffers.
144
+ - `addFileFromString(path, contents, options)` for UTF-8 text helpers.
145
+ - `deletePath(path)` to remove files or folders.
146
+ - `send()` to finalize the commit and receive metadata about the new commit.
147
+
148
+ `send()` resolves to `{ commit, result }`. The `result` payload mirrors the HTTP response, including
149
+ `old_sha` (always a 40-character hex string; it is all zeroes when the target ref is being created),
150
+ `new_sha`, `status`, and an optional `message`.
151
+
152
+ **Options**
153
+
154
+ - `targetRef` (required): Fully qualified ref that will receive the commit (for example
155
+ `refs/heads/main`).
156
+ - `baseRef` (optional): Branch or commit that must match the remote tip; omit to fast-forward
157
+ unconditionally.
158
+ - `commitMessage` (required): The commit message.
159
+ - `author` and `committer` (optional): Include `name`, `email`, and optional ISO 8601 `date`.
160
+ - `signal` (optional): Abort an in-flight upload with `AbortController`.
161
+
162
+ > Files are chunked into 4 MiB segments under the hood, so you can stream large assets without
163
+ > buffering them entirely in memory. File paths are normalized relative to the repository root.
164
+
165
+ ### Creating a new branch
166
+
167
+ You can create and populate a new branch in a single request by pointing `targetRef` at the new ref
168
+ and setting `baseRef` to the branch (or commit SHA) you want to fork from.
169
+
170
+ ```typescript
171
+ const baseBranch = 'main';
172
+ const featureBranch = 'refs/heads/feature/onboarding';
173
+
174
+ await repo
175
+ .createCommit({
176
+ targetRef: featureBranch,
177
+ baseRef: `refs/heads/${baseBranch}`,
178
+ commitMessage: 'feat: add onboarding copy',
179
+ })
180
+ .addFileFromString('apps/dashboard/Onboarding.tsx', '<Onboarding />\n')
181
+ .send();
182
+ ```
183
+
184
+ > If you omit `baseRef` while referencing a brand-new `targetRef`, the service creates an orphan
185
+ > commit with no parent. The new branch starts from an empty tree, so only the files you include in
186
+ > this commit will exist. Always provide `baseRef` when you need the branch to inherit history or
187
+ > files from another branch.
188
+
189
+ ### Streaming Large Files
190
+
191
+ The commit builder accepts any async iterable of bytes, so you can stream large assets without
192
+ buffering:
193
+
194
+ ```typescript
195
+ import { createReadStream } from 'node:fs';
196
+
197
+ await repo
198
+ .createCommit({
199
+ targetRef: 'refs/heads/assets',
200
+ baseRef: 'refs/heads/main',
201
+ commitMessage: 'Upload latest design bundle',
202
+ })
203
+ .addFile('assets/design-kit.zip', createReadStream('/tmp/design-kit.zip'))
204
+ .send();
123
205
  ```
124
206
 
125
207
  ## API Reference
package/dist/index.cjs CHANGED
@@ -9,6 +9,352 @@ var snakecaseKeys__default = /*#__PURE__*/_interopDefault(snakecaseKeys);
9
9
 
10
10
  // src/index.ts
11
11
 
12
+ // src/commit.ts
13
+ var MAX_CHUNK_BYTES = 4 * 1024 * 1024;
14
+ var DEFAULT_TTL_SECONDS = 60 * 60;
15
+ var BufferCtor = globalThis.Buffer;
16
+ var CommitBuilderImpl = class {
17
+ options;
18
+ getAuthToken;
19
+ transport;
20
+ operations = [];
21
+ sent = false;
22
+ constructor(deps) {
23
+ this.options = { ...deps.options };
24
+ this.getAuthToken = deps.getAuthToken;
25
+ this.transport = deps.transport;
26
+ const trimmedTarget = this.options.targetRef?.trim();
27
+ const trimmedMessage = this.options.commitMessage?.trim();
28
+ if (!trimmedTarget) {
29
+ throw new Error("createCommit targetRef is required");
30
+ }
31
+ if (!trimmedMessage) {
32
+ throw new Error("createCommit commitMessage is required");
33
+ }
34
+ this.options.targetRef = trimmedTarget;
35
+ this.options.commitMessage = trimmedMessage;
36
+ if (typeof this.options.baseRef === "string") {
37
+ this.options.baseRef = this.options.baseRef.trim();
38
+ }
39
+ }
40
+ addFile(path, source, options) {
41
+ this.ensureNotSent();
42
+ const normalizedPath = this.normalizePath(path);
43
+ const contentId = randomContentId();
44
+ const mode = options?.mode ?? "100644";
45
+ this.operations.push({
46
+ path: normalizedPath,
47
+ contentId,
48
+ mode,
49
+ operation: "upsert",
50
+ streamFactory: () => toAsyncIterable(source)
51
+ });
52
+ return this;
53
+ }
54
+ addFileFromString(path, contents, options) {
55
+ const encoding = options?.encoding;
56
+ if (encoding && encoding !== "utf8" && encoding !== "utf-8") {
57
+ throw new Error(`Unsupported encoding "${encoding}". Only UTF-8 is supported.`);
58
+ }
59
+ const data = new TextEncoder().encode(contents);
60
+ return this.addFile(path, data, options);
61
+ }
62
+ deletePath(path) {
63
+ this.ensureNotSent();
64
+ const normalizedPath = this.normalizePath(path);
65
+ this.operations.push({
66
+ path: normalizedPath,
67
+ contentId: randomContentId(),
68
+ operation: "delete"
69
+ });
70
+ return this;
71
+ }
72
+ async send() {
73
+ this.ensureNotSent();
74
+ this.sent = true;
75
+ const metadata = this.buildMetadata();
76
+ const blobEntries = this.operations.filter((op) => op.operation === "upsert" && op.streamFactory).map((op) => ({
77
+ contentId: op.contentId,
78
+ chunks: chunkify(op.streamFactory())
79
+ }));
80
+ const authorization = await this.getAuthToken();
81
+ return this.transport.send({
82
+ authorization,
83
+ signal: this.options.signal,
84
+ metadata,
85
+ blobs: blobEntries
86
+ });
87
+ }
88
+ buildMetadata() {
89
+ const files = this.operations.map((op) => {
90
+ const entry = {
91
+ path: op.path,
92
+ content_id: op.contentId,
93
+ operation: op.operation
94
+ };
95
+ if (op.mode) {
96
+ entry.mode = op.mode;
97
+ }
98
+ return entry;
99
+ });
100
+ const metadata = {
101
+ target_ref: this.options.targetRef,
102
+ commit_message: this.options.commitMessage,
103
+ files
104
+ };
105
+ if (this.options.baseRef) {
106
+ metadata.base_ref = this.options.baseRef;
107
+ }
108
+ if (this.options.author) {
109
+ metadata.author = { ...this.options.author };
110
+ }
111
+ if (this.options.committer) {
112
+ metadata.committer = { ...this.options.committer };
113
+ }
114
+ return metadata;
115
+ }
116
+ ensureNotSent() {
117
+ if (this.sent) {
118
+ throw new Error("createCommit builder cannot be reused after send()");
119
+ }
120
+ }
121
+ normalizePath(path) {
122
+ if (!path || typeof path !== "string" || path.trim() === "") {
123
+ throw new Error("File path must be a non-empty string");
124
+ }
125
+ return path.replace(/^\//, "");
126
+ }
127
+ };
128
+ var FetchCommitTransport = class {
129
+ url;
130
+ constructor(config) {
131
+ const trimmedBase = config.baseUrl.replace(/\/+$/, "");
132
+ this.url = `${trimmedBase}/api/v${config.version}/repos/commit-pack`;
133
+ }
134
+ async send(request) {
135
+ const bodyIterable = buildMessageIterable(request.metadata, request.blobs);
136
+ const body = toRequestBody(bodyIterable);
137
+ const response = await fetch(this.url, {
138
+ method: "POST",
139
+ headers: {
140
+ Authorization: `Bearer ${request.authorization}`,
141
+ "Content-Type": "application/x-ndjson",
142
+ Accept: "application/json"
143
+ },
144
+ body,
145
+ signal: request.signal
146
+ });
147
+ if (!response.ok) {
148
+ const text = await response.text();
149
+ throw new Error(`createCommit request failed (${response.status}): ${text}`);
150
+ }
151
+ return await response.json();
152
+ }
153
+ };
154
+ function toRequestBody(iterable) {
155
+ const readableStreamCtor = globalThis.ReadableStream;
156
+ if (typeof readableStreamCtor === "function") {
157
+ const iterator = iterable[Symbol.asyncIterator]();
158
+ return new readableStreamCtor({
159
+ async pull(controller) {
160
+ const { value, done } = await iterator.next();
161
+ if (done) {
162
+ controller.close();
163
+ return;
164
+ }
165
+ controller.enqueue(value);
166
+ },
167
+ async cancel(reason) {
168
+ if (typeof iterator.return === "function") {
169
+ await iterator.return(reason);
170
+ }
171
+ }
172
+ });
173
+ }
174
+ return iterable;
175
+ }
176
+ function buildMessageIterable(metadata, blobs) {
177
+ const encoder = new TextEncoder();
178
+ return {
179
+ async *[Symbol.asyncIterator]() {
180
+ yield encoder.encode(`${JSON.stringify({ metadata })}
181
+ `);
182
+ for (const blob of blobs) {
183
+ for await (const segment of blob.chunks) {
184
+ const payload = {
185
+ blob_chunk: {
186
+ content_id: blob.contentId,
187
+ data: base64Encode(segment.chunk),
188
+ eof: segment.eof
189
+ }
190
+ };
191
+ yield encoder.encode(`${JSON.stringify(payload)}
192
+ `);
193
+ }
194
+ }
195
+ }
196
+ };
197
+ }
198
+ async function* chunkify(source) {
199
+ let pending = null;
200
+ let produced = false;
201
+ for await (const value of source) {
202
+ const bytes = value;
203
+ if (pending && pending.byteLength === MAX_CHUNK_BYTES) {
204
+ yield { chunk: pending, eof: false };
205
+ produced = true;
206
+ pending = null;
207
+ }
208
+ const merged = pending ? concatChunks(pending, bytes) : bytes;
209
+ pending = null;
210
+ let cursor = merged;
211
+ while (cursor.byteLength > MAX_CHUNK_BYTES) {
212
+ const chunk = cursor.slice(0, MAX_CHUNK_BYTES);
213
+ cursor = cursor.slice(MAX_CHUNK_BYTES);
214
+ yield { chunk, eof: false };
215
+ produced = true;
216
+ }
217
+ pending = cursor;
218
+ }
219
+ if (pending) {
220
+ yield { chunk: pending, eof: true };
221
+ produced = true;
222
+ }
223
+ if (!produced) {
224
+ yield { chunk: new Uint8Array(0), eof: true };
225
+ }
226
+ }
227
+ async function* toAsyncIterable(source) {
228
+ if (typeof source === "string") {
229
+ yield new TextEncoder().encode(source);
230
+ return;
231
+ }
232
+ if (source instanceof Uint8Array) {
233
+ yield source;
234
+ return;
235
+ }
236
+ if (source instanceof ArrayBuffer) {
237
+ yield new Uint8Array(source);
238
+ return;
239
+ }
240
+ if (ArrayBuffer.isView(source)) {
241
+ yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
242
+ return;
243
+ }
244
+ if (isBlobLike(source)) {
245
+ const stream = source.stream();
246
+ if (isAsyncIterable(stream)) {
247
+ for await (const chunk of stream) {
248
+ yield ensureUint8Array(chunk);
249
+ }
250
+ return;
251
+ }
252
+ if (isReadableStreamLike(stream)) {
253
+ yield* readReadableStream(stream);
254
+ return;
255
+ }
256
+ }
257
+ if (isAsyncIterable(source)) {
258
+ for await (const chunk of source) {
259
+ yield ensureUint8Array(chunk);
260
+ }
261
+ return;
262
+ }
263
+ if (isIterable(source)) {
264
+ for (const chunk of source) {
265
+ yield ensureUint8Array(chunk);
266
+ }
267
+ return;
268
+ }
269
+ throw new Error("Unsupported file source for createCommit");
270
+ }
271
+ async function* readReadableStream(stream) {
272
+ const reader = stream.getReader();
273
+ try {
274
+ while (true) {
275
+ const { value, done } = await reader.read();
276
+ if (done) {
277
+ break;
278
+ }
279
+ if (value !== void 0) {
280
+ yield ensureUint8Array(value);
281
+ }
282
+ }
283
+ } finally {
284
+ reader.releaseLock?.();
285
+ }
286
+ }
287
+ function ensureUint8Array(value) {
288
+ if (value instanceof Uint8Array) {
289
+ return value;
290
+ }
291
+ if (value instanceof ArrayBuffer) {
292
+ return new Uint8Array(value);
293
+ }
294
+ if (ArrayBuffer.isView(value)) {
295
+ return new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
296
+ }
297
+ if (typeof value === "string") {
298
+ return new TextEncoder().encode(value);
299
+ }
300
+ if (BufferCtor && BufferCtor.isBuffer(value)) {
301
+ return value;
302
+ }
303
+ throw new Error("Unsupported chunk type; expected binary data");
304
+ }
305
+ function isBlobLike(value) {
306
+ return typeof value === "object" && value !== null && typeof value.stream === "function";
307
+ }
308
+ function isReadableStreamLike(value) {
309
+ return typeof value === "object" && value !== null && typeof value.getReader === "function";
310
+ }
311
+ function isAsyncIterable(value) {
312
+ return typeof value === "object" && value !== null && Symbol.asyncIterator in value;
313
+ }
314
+ function isIterable(value) {
315
+ return typeof value === "object" && value !== null && Symbol.iterator in value;
316
+ }
317
+ function concatChunks(a, b) {
318
+ if (a.byteLength === 0) {
319
+ return b;
320
+ }
321
+ if (b.byteLength === 0) {
322
+ return a;
323
+ }
324
+ const merged = new Uint8Array(a.byteLength + b.byteLength);
325
+ merged.set(a, 0);
326
+ merged.set(b, a.byteLength);
327
+ return merged;
328
+ }
329
+ function base64Encode(bytes) {
330
+ if (BufferCtor) {
331
+ return BufferCtor.from(bytes).toString("base64");
332
+ }
333
+ let binary = "";
334
+ for (let i = 0; i < bytes.byteLength; i++) {
335
+ binary += String.fromCharCode(bytes[i]);
336
+ }
337
+ const btoaFn = globalThis.btoa;
338
+ if (typeof btoaFn === "function") {
339
+ return btoaFn(binary);
340
+ }
341
+ throw new Error("Base64 encoding is not supported in this environment");
342
+ }
343
+ function randomContentId() {
344
+ const cryptoObj = globalThis.crypto;
345
+ if (cryptoObj && typeof cryptoObj.randomUUID === "function") {
346
+ return cryptoObj.randomUUID();
347
+ }
348
+ const random = Math.random().toString(36).slice(2);
349
+ return `cid-${Date.now().toString(36)}-${random}`;
350
+ }
351
+ function createCommitBuilder(deps) {
352
+ return new CommitBuilderImpl(deps);
353
+ }
354
+ function resolveCommitTtlSeconds(options) {
355
+ return typeof options?.ttl === "number" && options.ttl > 0 ? options.ttl : DEFAULT_TTL_SECONDS;
356
+ }
357
+
12
358
  // src/fetch.ts
13
359
  var ApiFetcher = class {
14
360
  constructor(API_BASE_URL2, version) {
@@ -238,8 +584,8 @@ async function validateWebhook(payload, headers, secret, options = {}) {
238
584
  }
239
585
 
240
586
  // src/index.ts
241
- var API_BASE_URL = "https://api.git.storage";
242
- var STORAGE_BASE_URL = "git.storage";
587
+ var API_BASE_URL = "https://api.code.storage";
588
+ var STORAGE_BASE_URL = "code.storage";
243
589
  var API_VERSION = 1;
244
590
  var apiInstanceMap = /* @__PURE__ */ new Map();
245
591
  function getApiInstance(baseUrl, version) {
@@ -387,6 +733,22 @@ var RepoImpl = class {
387
733
  }
388
734
  return;
389
735
  }
736
+ createCommit(options) {
737
+ const version = this.options.apiVersion ?? API_VERSION;
738
+ const baseUrl = this.options.apiBaseUrl ?? API_BASE_URL;
739
+ const transport = new FetchCommitTransport({ baseUrl, version });
740
+ const ttlSeconds = resolveCommitTtlSeconds(options);
741
+ const builderOptions = { ...options, ttl: ttlSeconds };
742
+ const getAuthToken = () => this.generateJWT(this.id, {
743
+ permissions: ["git:write"],
744
+ ttl: ttlSeconds
745
+ });
746
+ return createCommitBuilder({
747
+ options: builderOptions,
748
+ getAuthToken,
749
+ transport
750
+ });
751
+ }
390
752
  };
391
753
  var GitStorage = class _GitStorage {
392
754
  static overrides = {};