@prmichaelsen/firebase-admin-sdk-v8 2.2.3 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -7,21 +7,36 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## [Unreleased]
9
9
 
10
- ## [2.2.3] - 2026-02-14
10
+ ## [2.3.0] - 2026-02-14
11
11
 
12
12
  ### Added
13
- - Comprehensive unit tests for Storage module (63 new tests)
13
+ - **Resumable uploads** with `uploadFileResumable()` for large files
14
+ - **True streaming support** with ReadableStream (no memory limit)
15
+ - Progress tracking with callbacks for resumable uploads
16
+ - Resume capability for interrupted uploads
17
+ - Chunked uploads with configurable chunk size (default 256KB)
18
+ - Comprehensive unit tests for Storage module (68 new tests total)
14
19
  - Unit tests for `storage/client.ts` (31 tests, 88.11% coverage)
15
20
  - Unit tests for `storage/signed-urls.ts` (32 tests, 100% coverage)
21
+ - Unit tests for `storage/resumable-upload.ts` (16 tests, 88.03% coverage)
22
+ - E2E tests for resumable uploads with real Firebase (11 tests)
23
+ - E2E tests for ReadableStream uploads (validated with 1MB files and puppy.png)
24
+ - E2E test for complex nested arrays (message content scenario)
16
25
  - Agent Context Protocol (ACP) initialization documentation
26
+ - `.env.example` file with all environment variables documented
17
27
 
18
28
  ### Changed
19
- - Improved overall test coverage from 76.15% to 96.02% (+19.87%)
20
- - Storage module coverage improved from 0% to 91.71%
21
- - Total test count increased from 339 to 402 tests
29
+ - **BREAKING**: Default storage bucket format changed from `.appspot.com` to `.firebasestorage.app`
30
+ - Improved overall test coverage from 76.15% to 94.87% (+18.72%)
31
+ - Storage module coverage improved from 0% to 89.89%
32
+ - Total test count increased from 339 to 418 tests (+79 tests)
33
+ - Fixed storage e2e tests to properly initialize Firebase app
22
34
 
23
35
  ### Fixed
24
36
  - Storage module now has proper unit test coverage (previously only e2e tests)
37
+ - Storage e2e tests now initialize app correctly
38
+ - Bucket naming updated to match new Firebase format
39
+ - Array serialization confirmed working (not serialized to JSON strings)
25
40
 
26
41
  ## [2.2.2] - 2026-02-13
27
42
 
package/README.md CHANGED
@@ -352,6 +352,88 @@ Clear the cached access token.
352
352
  clearTokenCache();
353
353
  ```
354
354
 
355
+ ### Storage - Resumable Uploads
356
+
357
+ #### `uploadFileResumable(bucket, path, data, contentType, options?): Promise<FileMetadata>`
358
+
359
+ Upload large files with resumable upload support. Suitable for files >10MB, unreliable networks, or when progress tracking is needed.
360
+
361
+ **Features:**
362
+ - ✅ Chunked uploads (configurable chunk size)
363
+ - ✅ Progress tracking with callbacks
364
+ - ✅ Resume interrupted uploads
365
+ - ✅ Memory efficient (doesn't load entire file at once)
366
+ - ✅ Automatic retry on chunk failure
367
+
368
+ ```typescript
369
+ import { uploadFileResumable } from '@prmichaelsen/firebase-admin-sdk-v8';
370
+
371
+ // Upload large file with progress tracking
372
+ const data = await fetch('https://example.com/large-video.mp4');
373
+ const buffer = await data.arrayBuffer();
374
+
375
+ const metadata = await uploadFileResumable(
376
+ 'my-bucket.appspot.com',
377
+ 'videos/large.mp4',
378
+ buffer,
379
+ 'video/mp4',
380
+ {
381
+ chunkSize: 512 * 1024, // 512KB chunks (default: 256KB)
382
+ onProgress: (uploaded, total) => {
383
+ const percent = (uploaded / total * 100).toFixed(2);
384
+ console.log(`Upload progress: ${percent}%`);
385
+ },
386
+ metadata: { userId: '123', category: 'videos' },
387
+ }
388
+ );
389
+
390
+ console.log('Upload complete:', metadata);
391
+ ```
392
+
393
+ **Resume interrupted upload:**
394
+
395
+ ```typescript
396
+ let sessionUri: string;
397
+
398
+ try {
399
+ const metadata = await uploadFileResumable(
400
+ bucket,
401
+ path,
402
+ data,
403
+ contentType,
404
+ {
405
+ onProgress: (uploaded, total) => {
406
+ // Save session URI for resume
407
+ sessionUri = /* get from response */;
408
+ },
409
+ }
410
+ );
411
+ } catch (error) {
412
+ // Resume from where it left off
413
+ const metadata = await uploadFileResumable(
414
+ bucket,
415
+ path,
416
+ data,
417
+ contentType,
418
+ {
419
+ resumeToken: sessionUri, // Resume from previous session
420
+ }
421
+ );
422
+ }
423
+ ```
424
+
425
+ **When to use:**
426
+ - Files larger than 10MB
427
+ - Unreliable network conditions
428
+ - Need progress reporting
429
+ - Files that may exceed memory limits
430
+
431
+ **When to use simple `uploadFile()` instead:**
432
+ - Small files (<10MB)
433
+ - Reliable network
434
+ - No progress tracking needed
435
+ - Edge runtime with memory constraints
436
+
355
437
  ## 💡 Examples
356
438
 
357
439
  See [EXAMPLES.md](./EXAMPLES.md) for comprehensive examples including:
package/dist/index.d.mts CHANGED
@@ -771,6 +771,63 @@ interface SignedUrlOptions {
771
771
  */
772
772
  declare function generateSignedUrl(path: string, options: SignedUrlOptions): Promise<string>;
773
773
 
774
+ /**
775
+ * Firebase Storage Resumable Uploads
776
+ * Implements Google Cloud Storage resumable upload protocol for large files
777
+ */
778
+
779
+ /**
780
+ * Options for resumable uploads
781
+ */
782
+ interface ResumableUploadOptions extends UploadOptions {
783
+ chunkSize?: number;
784
+ onProgress?: (uploaded: number, total: number) => void;
785
+ resumeToken?: string;
786
+ totalSize?: number;
787
+ }
788
+ /**
789
+ * Upload a file with resumable upload support
790
+ * Suitable for large files and unreliable networks
791
+ *
792
+ * @param path - File path in storage
793
+ * @param data - File data as ArrayBuffer, Uint8Array, Blob, or ReadableStream
794
+ * @param contentType - MIME type of the file
795
+ * @param options - Upload options
796
+ * @returns File metadata
797
+ *
798
+ * @example
799
+ * ```typescript
800
+ * // Upload from buffer
801
+ * const data = await fetch('https://example.com/large-video.mp4');
802
+ * const buffer = await data.arrayBuffer();
803
+ *
804
+ * const metadata = await uploadFileResumable(
805
+ * 'videos/large.mp4',
806
+ * buffer,
807
+ * 'video/mp4',
808
+ * {
809
+ * chunkSize: 512 * 1024, // 512KB chunks
810
+ * onProgress: (uploaded, total) => {
811
+ * console.log(`Progress: ${(uploaded / total * 100).toFixed(2)}%`);
812
+ * },
813
+ * }
814
+ * );
815
+ *
816
+ * // Upload from stream (true streaming - no memory limit)
817
+ * const response = await fetch('https://example.com/huge-file.mp4');
818
+ * const metadata = await uploadFileResumable(
819
+ * 'videos/huge.mp4',
820
+ * response.body!, // ReadableStream
821
+ * 'video/mp4',
822
+ * {
823
+ * totalSize: parseInt(response.headers.get('content-length')!),
824
+ * chunkSize: 1024 * 1024, // 1MB chunks
825
+ * }
826
+ * );
827
+ * ```
828
+ */
829
+ declare function uploadFileResumable(path: string, data: ArrayBuffer | Uint8Array | Blob | ReadableStream<Uint8Array>, contentType: string, options?: ResumableUploadOptions): Promise<FileMetadata>;
830
+
774
831
  /**
775
832
  * Firebase Admin SDK v8 - Field Value Helpers
776
833
  * Special field values for Firestore operations
@@ -868,4 +925,4 @@ declare function getAdminAccessToken(): Promise<string>;
868
925
  */
869
926
  declare function clearTokenCache(): void;
870
927
 
871
- export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, verifyIdToken };
928
+ export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ResumableUploadOptions, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, uploadFileResumable, verifyIdToken };
package/dist/index.d.ts CHANGED
@@ -771,6 +771,63 @@ interface SignedUrlOptions {
771
771
  */
772
772
  declare function generateSignedUrl(path: string, options: SignedUrlOptions): Promise<string>;
773
773
 
774
+ /**
775
+ * Firebase Storage Resumable Uploads
776
+ * Implements Google Cloud Storage resumable upload protocol for large files
777
+ */
778
+
779
+ /**
780
+ * Options for resumable uploads
781
+ */
782
+ interface ResumableUploadOptions extends UploadOptions {
783
+ chunkSize?: number;
784
+ onProgress?: (uploaded: number, total: number) => void;
785
+ resumeToken?: string;
786
+ totalSize?: number;
787
+ }
788
+ /**
789
+ * Upload a file with resumable upload support
790
+ * Suitable for large files and unreliable networks
791
+ *
792
+ * @param path - File path in storage
793
+ * @param data - File data as ArrayBuffer, Uint8Array, Blob, or ReadableStream
794
+ * @param contentType - MIME type of the file
795
+ * @param options - Upload options
796
+ * @returns File metadata
797
+ *
798
+ * @example
799
+ * ```typescript
800
+ * // Upload from buffer
801
+ * const data = await fetch('https://example.com/large-video.mp4');
802
+ * const buffer = await data.arrayBuffer();
803
+ *
804
+ * const metadata = await uploadFileResumable(
805
+ * 'videos/large.mp4',
806
+ * buffer,
807
+ * 'video/mp4',
808
+ * {
809
+ * chunkSize: 512 * 1024, // 512KB chunks
810
+ * onProgress: (uploaded, total) => {
811
+ * console.log(`Progress: ${(uploaded / total * 100).toFixed(2)}%`);
812
+ * },
813
+ * }
814
+ * );
815
+ *
816
+ * // Upload from stream (true streaming - no memory limit)
817
+ * const response = await fetch('https://example.com/huge-file.mp4');
818
+ * const metadata = await uploadFileResumable(
819
+ * 'videos/huge.mp4',
820
+ * response.body!, // ReadableStream
821
+ * 'video/mp4',
822
+ * {
823
+ * totalSize: parseInt(response.headers.get('content-length')!),
824
+ * chunkSize: 1024 * 1024, // 1MB chunks
825
+ * }
826
+ * );
827
+ * ```
828
+ */
829
+ declare function uploadFileResumable(path: string, data: ArrayBuffer | Uint8Array | Blob | ReadableStream<Uint8Array>, contentType: string, options?: ResumableUploadOptions): Promise<FileMetadata>;
830
+
774
831
  /**
775
832
  * Firebase Admin SDK v8 - Field Value Helpers
776
833
  * Special field values for Firestore operations
@@ -868,4 +925,4 @@ declare function getAdminAccessToken(): Promise<string>;
868
925
  */
869
926
  declare function clearTokenCache(): void;
870
927
 
871
- export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, verifyIdToken };
928
+ export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ResumableUploadOptions, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, uploadFileResumable, verifyIdToken };
package/dist/index.js CHANGED
@@ -49,6 +49,7 @@ __export(index_exports, {
49
49
  signInWithCustomToken: () => signInWithCustomToken,
50
50
  updateDocument: () => updateDocument,
51
51
  uploadFile: () => uploadFile,
52
+ uploadFileResumable: () => uploadFileResumable,
52
53
  verifyIdToken: () => verifyIdToken
53
54
  });
54
55
  module.exports = __toCommonJS(index_exports);
@@ -1179,7 +1180,7 @@ function getDefaultBucket() {
1179
1180
  return customBucket;
1180
1181
  }
1181
1182
  const projectId = getProjectId();
1182
- return `${projectId}.appspot.com`;
1183
+ return `${projectId}.firebasestorage.app`;
1183
1184
  }
1184
1185
  function detectContentType(filename) {
1185
1186
  const ext = filename.split(".").pop()?.toLowerCase();
@@ -1478,6 +1479,204 @@ async function generateSignedUrl(path, options) {
1478
1479
  const signedUrl = `https://storage.googleapis.com${canonicalUri}?${canonicalQueryString}&X-Goog-Signature=${signature}`;
1479
1480
  return signedUrl;
1480
1481
  }
1482
+
1483
+ // src/storage/resumable-upload.ts
1484
+ var UPLOAD_API_BASE2 = "https://storage.googleapis.com/upload/storage/v1";
1485
+ function getDefaultBucket2() {
1486
+ const customBucket = process.env.FIREBASE_STORAGE_BUCKET;
1487
+ if (customBucket) {
1488
+ return customBucket;
1489
+ }
1490
+ const projectId = getProjectId();
1491
+ return `${projectId}.firebasestorage.app`;
1492
+ }
1493
+ async function initiateResumableUpload(bucket, path, contentType, totalSize, metadata) {
1494
+ const token = await getAdminAccessToken();
1495
+ const url = `${UPLOAD_API_BASE2}/b/${encodeURIComponent(bucket)}/o?uploadType=resumable&name=${encodeURIComponent(path)}`;
1496
+ const requestBody = {};
1497
+ if (metadata) {
1498
+ requestBody.metadata = metadata;
1499
+ }
1500
+ const response = await fetch(url, {
1501
+ method: "POST",
1502
+ headers: {
1503
+ "Authorization": `Bearer ${token}`,
1504
+ "Content-Type": "application/json",
1505
+ "X-Upload-Content-Type": contentType,
1506
+ "X-Upload-Content-Length": totalSize.toString()
1507
+ },
1508
+ body: Object.keys(requestBody).length > 0 ? JSON.stringify(requestBody) : void 0
1509
+ });
1510
+ if (!response.ok) {
1511
+ const errorText = await response.text();
1512
+ throw new Error(`Failed to initiate resumable upload: ${response.status} ${errorText}`);
1513
+ }
1514
+ const sessionUri = response.headers.get("Location");
1515
+ if (!sessionUri) {
1516
+ throw new Error("No session URI returned from resumable upload initiation");
1517
+ }
1518
+ return sessionUri;
1519
+ }
1520
+ async function uploadChunk(sessionUri, chunk, start, total) {
1521
+ const end = start + chunk.byteLength - 1;
1522
+ const response = await fetch(sessionUri, {
1523
+ method: "PUT",
1524
+ headers: {
1525
+ "Content-Length": chunk.byteLength.toString(),
1526
+ "Content-Range": `bytes ${start}-${end}/${total}`
1527
+ },
1528
+ body: chunk
1529
+ });
1530
+ if (response.status === 200 || response.status === 201) {
1531
+ const metadata = await response.json();
1532
+ return { complete: true, metadata };
1533
+ } else if (response.status === 308) {
1534
+ return { complete: false };
1535
+ } else {
1536
+ const errorText = await response.text();
1537
+ throw new Error(`Chunk upload failed: ${response.status} ${errorText}`);
1538
+ }
1539
+ }
1540
+ async function getUploadProgress(sessionUri) {
1541
+ const response = await fetch(sessionUri, {
1542
+ method: "PUT",
1543
+ headers: {
1544
+ "Content-Length": "0",
1545
+ "Content-Range": "bytes */*"
1546
+ }
1547
+ });
1548
+ if (response.status === 308) {
1549
+ const range = response.headers.get("Range");
1550
+ if (range) {
1551
+ const match = range.match(/bytes=0-(\d+)/);
1552
+ if (match) {
1553
+ return parseInt(match[1], 10) + 1;
1554
+ }
1555
+ }
1556
+ } else if (response.status === 200 || response.status === 201) {
1557
+ const metadata = await response.json();
1558
+ return parseInt(metadata.size, 10);
1559
+ }
1560
+ return 0;
1561
+ }
1562
+ async function toArrayBuffer(data) {
1563
+ if (data instanceof ArrayBuffer) {
1564
+ return data;
1565
+ } else if (data instanceof Uint8Array) {
1566
+ const buffer = new ArrayBuffer(data.byteLength);
1567
+ new Uint8Array(buffer).set(data);
1568
+ return buffer;
1569
+ } else if (data instanceof Blob) {
1570
+ return await data.arrayBuffer();
1571
+ }
1572
+ throw new Error("Unsupported data type");
1573
+ }
1574
+ async function readChunkFromStream(reader, chunkSize) {
1575
+ const chunks = [];
1576
+ let totalBytes = 0;
1577
+ while (totalBytes < chunkSize) {
1578
+ const { value, done } = await reader.read();
1579
+ if (done) {
1580
+ if (totalBytes === 0) {
1581
+ return { chunk: null, done: true };
1582
+ }
1583
+ break;
1584
+ }
1585
+ if (value) {
1586
+ chunks.push(value);
1587
+ totalBytes += value.byteLength;
1588
+ if (totalBytes >= chunkSize) {
1589
+ break;
1590
+ }
1591
+ }
1592
+ }
1593
+ const combined = new Uint8Array(totalBytes);
1594
+ let offset = 0;
1595
+ for (const chunk of chunks) {
1596
+ combined.set(chunk, offset);
1597
+ offset += chunk.byteLength;
1598
+ }
1599
+ return { chunk: combined.buffer, done: false };
1600
+ }
1601
+ async function uploadFileResumable(path, data, contentType, options = {}) {
1602
+ const bucket = getDefaultBucket2();
1603
+ const chunkSize = options.chunkSize || 256 * 1024;
1604
+ if (data instanceof ReadableStream) {
1605
+ return await uploadFromStream(bucket, path, data, contentType, chunkSize, options);
1606
+ }
1607
+ const buffer = await toArrayBuffer(data);
1608
+ const total = buffer.byteLength;
1609
+ let sessionUri = options.resumeToken;
1610
+ let uploaded = 0;
1611
+ if (!sessionUri) {
1612
+ sessionUri = await initiateResumableUpload(
1613
+ bucket,
1614
+ path,
1615
+ contentType,
1616
+ total,
1617
+ options.metadata
1618
+ );
1619
+ } else {
1620
+ uploaded = await getUploadProgress(sessionUri);
1621
+ if (options.onProgress) {
1622
+ options.onProgress(uploaded, total);
1623
+ }
1624
+ }
1625
+ while (uploaded < total) {
1626
+ const end = Math.min(uploaded + chunkSize, total);
1627
+ const chunk = buffer.slice(uploaded, end);
1628
+ const result = await uploadChunk(sessionUri, chunk, uploaded, total);
1629
+ uploaded = end;
1630
+ if (options.onProgress) {
1631
+ options.onProgress(uploaded, total);
1632
+ }
1633
+ if (result.complete) {
1634
+ return result.metadata;
1635
+ }
1636
+ }
1637
+ throw new Error("Upload incomplete - all chunks sent but no completion response");
1638
+ }
1639
+ async function uploadFromStream(bucket, path, stream, contentType, chunkSize, options) {
1640
+ const total = options.totalSize || -1;
1641
+ if (total === -1) {
1642
+ throw new Error("totalSize is required when uploading from ReadableStream");
1643
+ }
1644
+ const sessionUri = await initiateResumableUpload(
1645
+ bucket,
1646
+ path,
1647
+ contentType,
1648
+ total,
1649
+ options.metadata
1650
+ );
1651
+ const reader = stream.getReader();
1652
+ let uploaded = 0;
1653
+ let lastResult = null;
1654
+ try {
1655
+ while (true) {
1656
+ const { chunk, done } = await readChunkFromStream(reader, chunkSize);
1657
+ if (done || !chunk) {
1658
+ if (lastResult && lastResult.complete) {
1659
+ return lastResult.metadata;
1660
+ }
1661
+ if (uploaded === total && lastResult) {
1662
+ return lastResult.metadata;
1663
+ }
1664
+ break;
1665
+ }
1666
+ lastResult = await uploadChunk(sessionUri, chunk, uploaded, total);
1667
+ uploaded += chunk.byteLength;
1668
+ if (options.onProgress) {
1669
+ options.onProgress(uploaded, total);
1670
+ }
1671
+ if (lastResult.complete) {
1672
+ return lastResult.metadata;
1673
+ }
1674
+ }
1675
+ throw new Error("Stream ended but upload not complete");
1676
+ } finally {
1677
+ reader.releaseLock();
1678
+ }
1679
+ }
1481
1680
  // Annotate the CommonJS export names for ESM import in node:
1482
1681
  0 && (module.exports = {
1483
1682
  FieldValue,
@@ -1509,5 +1708,6 @@ async function generateSignedUrl(path, options) {
1509
1708
  signInWithCustomToken,
1510
1709
  updateDocument,
1511
1710
  uploadFile,
1711
+ uploadFileResumable,
1512
1712
  verifyIdToken
1513
1713
  });
package/dist/index.mjs CHANGED
@@ -1124,7 +1124,7 @@ function getDefaultBucket() {
1124
1124
  return customBucket;
1125
1125
  }
1126
1126
  const projectId = getProjectId();
1127
- return `${projectId}.appspot.com`;
1127
+ return `${projectId}.firebasestorage.app`;
1128
1128
  }
1129
1129
  function detectContentType(filename) {
1130
1130
  const ext = filename.split(".").pop()?.toLowerCase();
@@ -1423,6 +1423,204 @@ async function generateSignedUrl(path, options) {
1423
1423
  const signedUrl = `https://storage.googleapis.com${canonicalUri}?${canonicalQueryString}&X-Goog-Signature=${signature}`;
1424
1424
  return signedUrl;
1425
1425
  }
1426
+
1427
+ // src/storage/resumable-upload.ts
1428
+ var UPLOAD_API_BASE2 = "https://storage.googleapis.com/upload/storage/v1";
1429
+ function getDefaultBucket2() {
1430
+ const customBucket = process.env.FIREBASE_STORAGE_BUCKET;
1431
+ if (customBucket) {
1432
+ return customBucket;
1433
+ }
1434
+ const projectId = getProjectId();
1435
+ return `${projectId}.firebasestorage.app`;
1436
+ }
1437
+ async function initiateResumableUpload(bucket, path, contentType, totalSize, metadata) {
1438
+ const token = await getAdminAccessToken();
1439
+ const url = `${UPLOAD_API_BASE2}/b/${encodeURIComponent(bucket)}/o?uploadType=resumable&name=${encodeURIComponent(path)}`;
1440
+ const requestBody = {};
1441
+ if (metadata) {
1442
+ requestBody.metadata = metadata;
1443
+ }
1444
+ const response = await fetch(url, {
1445
+ method: "POST",
1446
+ headers: {
1447
+ "Authorization": `Bearer ${token}`,
1448
+ "Content-Type": "application/json",
1449
+ "X-Upload-Content-Type": contentType,
1450
+ "X-Upload-Content-Length": totalSize.toString()
1451
+ },
1452
+ body: Object.keys(requestBody).length > 0 ? JSON.stringify(requestBody) : void 0
1453
+ });
1454
+ if (!response.ok) {
1455
+ const errorText = await response.text();
1456
+ throw new Error(`Failed to initiate resumable upload: ${response.status} ${errorText}`);
1457
+ }
1458
+ const sessionUri = response.headers.get("Location");
1459
+ if (!sessionUri) {
1460
+ throw new Error("No session URI returned from resumable upload initiation");
1461
+ }
1462
+ return sessionUri;
1463
+ }
1464
+ async function uploadChunk(sessionUri, chunk, start, total) {
1465
+ const end = start + chunk.byteLength - 1;
1466
+ const response = await fetch(sessionUri, {
1467
+ method: "PUT",
1468
+ headers: {
1469
+ "Content-Length": chunk.byteLength.toString(),
1470
+ "Content-Range": `bytes ${start}-${end}/${total}`
1471
+ },
1472
+ body: chunk
1473
+ });
1474
+ if (response.status === 200 || response.status === 201) {
1475
+ const metadata = await response.json();
1476
+ return { complete: true, metadata };
1477
+ } else if (response.status === 308) {
1478
+ return { complete: false };
1479
+ } else {
1480
+ const errorText = await response.text();
1481
+ throw new Error(`Chunk upload failed: ${response.status} ${errorText}`);
1482
+ }
1483
+ }
1484
+ async function getUploadProgress(sessionUri) {
1485
+ const response = await fetch(sessionUri, {
1486
+ method: "PUT",
1487
+ headers: {
1488
+ "Content-Length": "0",
1489
+ "Content-Range": "bytes */*"
1490
+ }
1491
+ });
1492
+ if (response.status === 308) {
1493
+ const range = response.headers.get("Range");
1494
+ if (range) {
1495
+ const match = range.match(/bytes=0-(\d+)/);
1496
+ if (match) {
1497
+ return parseInt(match[1], 10) + 1;
1498
+ }
1499
+ }
1500
+ } else if (response.status === 200 || response.status === 201) {
1501
+ const metadata = await response.json();
1502
+ return parseInt(metadata.size, 10);
1503
+ }
1504
+ return 0;
1505
+ }
1506
+ async function toArrayBuffer(data) {
1507
+ if (data instanceof ArrayBuffer) {
1508
+ return data;
1509
+ } else if (data instanceof Uint8Array) {
1510
+ const buffer = new ArrayBuffer(data.byteLength);
1511
+ new Uint8Array(buffer).set(data);
1512
+ return buffer;
1513
+ } else if (data instanceof Blob) {
1514
+ return await data.arrayBuffer();
1515
+ }
1516
+ throw new Error("Unsupported data type");
1517
+ }
1518
+ async function readChunkFromStream(reader, chunkSize) {
1519
+ const chunks = [];
1520
+ let totalBytes = 0;
1521
+ while (totalBytes < chunkSize) {
1522
+ const { value, done } = await reader.read();
1523
+ if (done) {
1524
+ if (totalBytes === 0) {
1525
+ return { chunk: null, done: true };
1526
+ }
1527
+ break;
1528
+ }
1529
+ if (value) {
1530
+ chunks.push(value);
1531
+ totalBytes += value.byteLength;
1532
+ if (totalBytes >= chunkSize) {
1533
+ break;
1534
+ }
1535
+ }
1536
+ }
1537
+ const combined = new Uint8Array(totalBytes);
1538
+ let offset = 0;
1539
+ for (const chunk of chunks) {
1540
+ combined.set(chunk, offset);
1541
+ offset += chunk.byteLength;
1542
+ }
1543
+ return { chunk: combined.buffer, done: false };
1544
+ }
1545
+ async function uploadFileResumable(path, data, contentType, options = {}) {
1546
+ const bucket = getDefaultBucket2();
1547
+ const chunkSize = options.chunkSize || 256 * 1024;
1548
+ if (data instanceof ReadableStream) {
1549
+ return await uploadFromStream(bucket, path, data, contentType, chunkSize, options);
1550
+ }
1551
+ const buffer = await toArrayBuffer(data);
1552
+ const total = buffer.byteLength;
1553
+ let sessionUri = options.resumeToken;
1554
+ let uploaded = 0;
1555
+ if (!sessionUri) {
1556
+ sessionUri = await initiateResumableUpload(
1557
+ bucket,
1558
+ path,
1559
+ contentType,
1560
+ total,
1561
+ options.metadata
1562
+ );
1563
+ } else {
1564
+ uploaded = await getUploadProgress(sessionUri);
1565
+ if (options.onProgress) {
1566
+ options.onProgress(uploaded, total);
1567
+ }
1568
+ }
1569
+ while (uploaded < total) {
1570
+ const end = Math.min(uploaded + chunkSize, total);
1571
+ const chunk = buffer.slice(uploaded, end);
1572
+ const result = await uploadChunk(sessionUri, chunk, uploaded, total);
1573
+ uploaded = end;
1574
+ if (options.onProgress) {
1575
+ options.onProgress(uploaded, total);
1576
+ }
1577
+ if (result.complete) {
1578
+ return result.metadata;
1579
+ }
1580
+ }
1581
+ throw new Error("Upload incomplete - all chunks sent but no completion response");
1582
+ }
1583
+ async function uploadFromStream(bucket, path, stream, contentType, chunkSize, options) {
1584
+ const total = options.totalSize || -1;
1585
+ if (total === -1) {
1586
+ throw new Error("totalSize is required when uploading from ReadableStream");
1587
+ }
1588
+ const sessionUri = await initiateResumableUpload(
1589
+ bucket,
1590
+ path,
1591
+ contentType,
1592
+ total,
1593
+ options.metadata
1594
+ );
1595
+ const reader = stream.getReader();
1596
+ let uploaded = 0;
1597
+ let lastResult = null;
1598
+ try {
1599
+ while (true) {
1600
+ const { chunk, done } = await readChunkFromStream(reader, chunkSize);
1601
+ if (done || !chunk) {
1602
+ if (lastResult && lastResult.complete) {
1603
+ return lastResult.metadata;
1604
+ }
1605
+ if (uploaded === total && lastResult) {
1606
+ return lastResult.metadata;
1607
+ }
1608
+ break;
1609
+ }
1610
+ lastResult = await uploadChunk(sessionUri, chunk, uploaded, total);
1611
+ uploaded += chunk.byteLength;
1612
+ if (options.onProgress) {
1613
+ options.onProgress(uploaded, total);
1614
+ }
1615
+ if (lastResult.complete) {
1616
+ return lastResult.metadata;
1617
+ }
1618
+ }
1619
+ throw new Error("Stream ended but upload not complete");
1620
+ } finally {
1621
+ reader.releaseLock();
1622
+ }
1623
+ }
1426
1624
  export {
1427
1625
  FieldValue,
1428
1626
  addDocument,
@@ -1453,5 +1651,6 @@ export {
1453
1651
  signInWithCustomToken,
1454
1652
  updateDocument,
1455
1653
  uploadFile,
1654
+ uploadFileResumable,
1456
1655
  verifyIdToken
1457
1656
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@prmichaelsen/firebase-admin-sdk-v8",
3
- "version": "2.2.3",
3
+ "version": "2.3.0",
4
4
  "description": "Firebase Admin SDK for Cloudflare Workers and edge runtimes using REST APIs",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.mjs",