@prmichaelsen/firebase-admin-sdk-v8 2.2.3 → 2.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -7,21 +7,44 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## [Unreleased]
9
9
 
10
- ## [2.2.3] - 2026-02-14
10
+ ## [2.3.1] - 2026-02-14
11
+
12
+ ### Fixed
13
+ - **CRITICAL**: Fixed signed URL generation to match Google Cloud Storage SDK encoding
14
+ - Implemented `fixedEncodeURIComponent` to additionally encode `! * ' ( )` characters
15
+ - This fixes `SignatureDoesNotMatch` errors in production environments
16
+ - Path encoding now exactly matches official `@google-cloud/storage` SDK behavior
17
+
18
+ ## [2.3.0] - 2026-02-14
11
19
 
12
20
  ### Added
13
- - Comprehensive unit tests for Storage module (63 new tests)
21
+ - **Resumable uploads** with `uploadFileResumable()` for large files
22
+ - **True streaming support** with ReadableStream (no memory limit)
23
+ - Progress tracking with callbacks for resumable uploads
24
+ - Resume capability for interrupted uploads
25
+ - Chunked uploads with configurable chunk size (default 256KB)
26
+ - Comprehensive unit tests for Storage module (68 new tests total)
14
27
  - Unit tests for `storage/client.ts` (31 tests, 88.11% coverage)
15
28
  - Unit tests for `storage/signed-urls.ts` (32 tests, 100% coverage)
29
+ - Unit tests for `storage/resumable-upload.ts` (16 tests, 88.03% coverage)
30
+ - E2E tests for resumable uploads with real Firebase (11 tests)
31
+ - E2E tests for ReadableStream uploads (validated with 1MB files and puppy.png)
32
+ - E2E test for complex nested arrays (message content scenario)
16
33
  - Agent Context Protocol (ACP) initialization documentation
34
+ - `.env.example` file with all environment variables documented
17
35
 
18
36
  ### Changed
19
- - Improved overall test coverage from 76.15% to 96.02% (+19.87%)
20
- - Storage module coverage improved from 0% to 91.71%
21
- - Total test count increased from 339 to 402 tests
37
+ - **BREAKING**: Default storage bucket format changed from `.appspot.com` to `.firebasestorage.app`
38
+ - Improved overall test coverage from 76.15% to 94.87% (+18.72%)
39
+ - Storage module coverage improved from 0% to 89.89%
40
+ - Total test count increased from 339 to 418 tests (+79 tests)
41
+ - Fixed storage e2e tests to properly initialize Firebase app
22
42
 
23
43
  ### Fixed
24
44
  - Storage module now has proper unit test coverage (previously only e2e tests)
45
+ - Storage e2e tests now initialize app correctly
46
+ - Bucket naming updated to match new Firebase format
47
+ - Array serialization confirmed working (not serialized to JSON strings)
25
48
 
26
49
  ## [2.2.2] - 2026-02-13
27
50
 
package/README.md CHANGED
@@ -352,6 +352,88 @@ Clear the cached access token.
352
352
  clearTokenCache();
353
353
  ```
354
354
 
355
+ ### Storage - Resumable Uploads
356
+
357
+ #### `uploadFileResumable(bucket, path, data, contentType, options?): Promise<FileMetadata>`
358
+
359
+ Upload large files with resumable upload support. Suitable for files >10MB, unreliable networks, or when progress tracking is needed.
360
+
361
+ **Features:**
362
+ - ✅ Chunked uploads (configurable chunk size)
363
+ - ✅ Progress tracking with callbacks
364
+ - ✅ Resume interrupted uploads
365
+ - ✅ Memory efficient (doesn't load entire file at once)
366
+ - ✅ Automatic retry on chunk failure
367
+
368
+ ```typescript
369
+ import { uploadFileResumable } from '@prmichaelsen/firebase-admin-sdk-v8';
370
+
371
+ // Upload large file with progress tracking
372
+ const data = await fetch('https://example.com/large-video.mp4');
373
+ const buffer = await data.arrayBuffer();
374
+
375
+ const metadata = await uploadFileResumable(
376
+ 'my-bucket.appspot.com',
377
+ 'videos/large.mp4',
378
+ buffer,
379
+ 'video/mp4',
380
+ {
381
+ chunkSize: 512 * 1024, // 512KB chunks (default: 256KB)
382
+ onProgress: (uploaded, total) => {
383
+ const percent = (uploaded / total * 100).toFixed(2);
384
+ console.log(`Upload progress: ${percent}%`);
385
+ },
386
+ metadata: { userId: '123', category: 'videos' },
387
+ }
388
+ );
389
+
390
+ console.log('Upload complete:', metadata);
391
+ ```
392
+
393
+ **Resume interrupted upload:**
394
+
395
+ ```typescript
396
+ let sessionUri: string;
397
+
398
+ try {
399
+ const metadata = await uploadFileResumable(
400
+ bucket,
401
+ path,
402
+ data,
403
+ contentType,
404
+ {
405
+ onProgress: (uploaded, total) => {
406
+ // Save session URI for resume
407
+ sessionUri = /* get from response */;
408
+ },
409
+ }
410
+ );
411
+ } catch (error) {
412
+ // Resume from where it left off
413
+ const metadata = await uploadFileResumable(
414
+ bucket,
415
+ path,
416
+ data,
417
+ contentType,
418
+ {
419
+ resumeToken: sessionUri, // Resume from previous session
420
+ }
421
+ );
422
+ }
423
+ ```
424
+
425
+ **When to use:**
426
+ - Files larger than 10MB
427
+ - Unreliable network conditions
428
+ - Need progress reporting
429
+ - Files that may exceed memory limits
430
+
431
+ **When to use simple `uploadFile()` instead:**
432
+ - Small files (<10MB)
433
+ - Reliable network
434
+ - No progress tracking needed
435
+ - Edge runtime with memory constraints
436
+
355
437
  ## 💡 Examples
356
438
 
357
439
  See [EXAMPLES.md](./EXAMPLES.md) for comprehensive examples including:
package/dist/index.d.mts CHANGED
@@ -771,6 +771,63 @@ interface SignedUrlOptions {
771
771
  */
772
772
  declare function generateSignedUrl(path: string, options: SignedUrlOptions): Promise<string>;
773
773
 
774
+ /**
775
+ * Firebase Storage Resumable Uploads
776
+ * Implements Google Cloud Storage resumable upload protocol for large files
777
+ */
778
+
779
+ /**
780
+ * Options for resumable uploads
781
+ */
782
+ interface ResumableUploadOptions extends UploadOptions {
783
+ chunkSize?: number;
784
+ onProgress?: (uploaded: number, total: number) => void;
785
+ resumeToken?: string;
786
+ totalSize?: number;
787
+ }
788
+ /**
789
+ * Upload a file with resumable upload support
790
+ * Suitable for large files and unreliable networks
791
+ *
792
+ * @param path - File path in storage
793
+ * @param data - File data as ArrayBuffer, Uint8Array, Blob, or ReadableStream
794
+ * @param contentType - MIME type of the file
795
+ * @param options - Upload options
796
+ * @returns File metadata
797
+ *
798
+ * @example
799
+ * ```typescript
800
+ * // Upload from buffer
801
+ * const data = await fetch('https://example.com/large-video.mp4');
802
+ * const buffer = await data.arrayBuffer();
803
+ *
804
+ * const metadata = await uploadFileResumable(
805
+ * 'videos/large.mp4',
806
+ * buffer,
807
+ * 'video/mp4',
808
+ * {
809
+ * chunkSize: 512 * 1024, // 512KB chunks
810
+ * onProgress: (uploaded, total) => {
811
+ * console.log(`Progress: ${(uploaded / total * 100).toFixed(2)}%`);
812
+ * },
813
+ * }
814
+ * );
815
+ *
816
+ * // Upload from stream (true streaming - no memory limit)
817
+ * const response = await fetch('https://example.com/huge-file.mp4');
818
+ * const metadata = await uploadFileResumable(
819
+ * 'videos/huge.mp4',
820
+ * response.body!, // ReadableStream
821
+ * 'video/mp4',
822
+ * {
823
+ * totalSize: parseInt(response.headers.get('content-length')!),
824
+ * chunkSize: 1024 * 1024, // 1MB chunks
825
+ * }
826
+ * );
827
+ * ```
828
+ */
829
+ declare function uploadFileResumable(path: string, data: ArrayBuffer | Uint8Array | Blob | ReadableStream<Uint8Array>, contentType: string, options?: ResumableUploadOptions): Promise<FileMetadata>;
830
+
774
831
  /**
775
832
  * Firebase Admin SDK v8 - Field Value Helpers
776
833
  * Special field values for Firestore operations
@@ -868,4 +925,4 @@ declare function getAdminAccessToken(): Promise<string>;
868
925
  */
869
926
  declare function clearTokenCache(): void;
870
927
 
871
- export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, verifyIdToken };
928
+ export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ResumableUploadOptions, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, uploadFileResumable, verifyIdToken };
package/dist/index.d.ts CHANGED
@@ -771,6 +771,63 @@ interface SignedUrlOptions {
771
771
  */
772
772
  declare function generateSignedUrl(path: string, options: SignedUrlOptions): Promise<string>;
773
773
 
774
+ /**
775
+ * Firebase Storage Resumable Uploads
776
+ * Implements Google Cloud Storage resumable upload protocol for large files
777
+ */
778
+
779
+ /**
780
+ * Options for resumable uploads
781
+ */
782
+ interface ResumableUploadOptions extends UploadOptions {
783
+ chunkSize?: number;
784
+ onProgress?: (uploaded: number, total: number) => void;
785
+ resumeToken?: string;
786
+ totalSize?: number;
787
+ }
788
+ /**
789
+ * Upload a file with resumable upload support
790
+ * Suitable for large files and unreliable networks
791
+ *
792
+ * @param path - File path in storage
793
+ * @param data - File data as ArrayBuffer, Uint8Array, Blob, or ReadableStream
794
+ * @param contentType - MIME type of the file
795
+ * @param options - Upload options
796
+ * @returns File metadata
797
+ *
798
+ * @example
799
+ * ```typescript
800
+ * // Upload from buffer
801
+ * const data = await fetch('https://example.com/large-video.mp4');
802
+ * const buffer = await data.arrayBuffer();
803
+ *
804
+ * const metadata = await uploadFileResumable(
805
+ * 'videos/large.mp4',
806
+ * buffer,
807
+ * 'video/mp4',
808
+ * {
809
+ * chunkSize: 512 * 1024, // 512KB chunks
810
+ * onProgress: (uploaded, total) => {
811
+ * console.log(`Progress: ${(uploaded / total * 100).toFixed(2)}%`);
812
+ * },
813
+ * }
814
+ * );
815
+ *
816
+ * // Upload from stream (true streaming - no memory limit)
817
+ * const response = await fetch('https://example.com/huge-file.mp4');
818
+ * const metadata = await uploadFileResumable(
819
+ * 'videos/huge.mp4',
820
+ * response.body!, // ReadableStream
821
+ * 'video/mp4',
822
+ * {
823
+ * totalSize: parseInt(response.headers.get('content-length')!),
824
+ * chunkSize: 1024 * 1024, // 1MB chunks
825
+ * }
826
+ * );
827
+ * ```
828
+ */
829
+ declare function uploadFileResumable(path: string, data: ArrayBuffer | Uint8Array | Blob | ReadableStream<Uint8Array>, contentType: string, options?: ResumableUploadOptions): Promise<FileMetadata>;
830
+
774
831
  /**
775
832
  * Firebase Admin SDK v8 - Field Value Helpers
776
833
  * Special field values for Firestore operations
@@ -868,4 +925,4 @@ declare function getAdminAccessToken(): Promise<string>;
868
925
  */
869
926
  declare function clearTokenCache(): void;
870
927
 
871
- export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, verifyIdToken };
928
+ export { type BatchWrite, type BatchWriteResult, type CustomClaims, type CustomTokenSignInResponse, type DataObject, type DecodedIdToken, type DocumentReference, type DownloadOptions, FieldValue, type FieldValue$1 as FieldValueSentinel, FieldValueType, type FileMetadata, type FirestoreDocument, type FirestoreValue, type ListFilesResult, type ListOptions, type QueryFilter, type QueryOptions, type QueryOrder, type ResumableUploadOptions, type ServiceAccount, type SetOptions, type SignedUrlOptions, type TokenResponse, type UpdateOptions, type UploadOptions, type UserInfo, type WhereFilterOp, addDocument, batchWrite, clearConfig, clearTokenCache, countDocuments, createCustomToken, deleteDocument, deleteFile, downloadFile, fileExists, generateSignedUrl, getAdminAccessToken, getAuth, getConfig, getDocument, getFileMetadata, getProjectId, getServiceAccount, getUserFromToken, initializeApp, iterateCollection, listDocuments, listFiles, queryDocuments, setDocument, signInWithCustomToken, updateDocument, uploadFile, uploadFileResumable, verifyIdToken };
package/dist/index.js CHANGED
@@ -49,6 +49,7 @@ __export(index_exports, {
49
49
  signInWithCustomToken: () => signInWithCustomToken,
50
50
  updateDocument: () => updateDocument,
51
51
  uploadFile: () => uploadFile,
52
+ uploadFileResumable: () => uploadFileResumable,
52
53
  verifyIdToken: () => verifyIdToken
53
54
  });
54
55
  module.exports = __toCommonJS(index_exports);
@@ -1179,7 +1180,7 @@ function getDefaultBucket() {
1179
1180
  return customBucket;
1180
1181
  }
1181
1182
  const projectId = getProjectId();
1182
- return `${projectId}.appspot.com`;
1183
+ return `${projectId}.firebasestorage.app`;
1183
1184
  }
1184
1185
  function detectContentType(filename) {
1185
1186
  const ext = filename.split(".").pop()?.toLowerCase();
@@ -1371,7 +1372,7 @@ function getStorageBucket() {
1371
1372
  return customBucket;
1372
1373
  }
1373
1374
  const projectId = getProjectId();
1374
- return `${projectId}.appspot.com`;
1375
+ return `${projectId}.firebasestorage.app`;
1375
1376
  }
1376
1377
  function getExpirationTimestamp(expires) {
1377
1378
  if (expires instanceof Date) {
@@ -1389,10 +1390,18 @@ function actionToMethod(action) {
1389
1390
  return "DELETE";
1390
1391
  }
1391
1392
  }
1392
- function stringToHex(str) {
1393
+ function fixedEncodeURIComponent(str) {
1394
+ return encodeURIComponent(str).replace(
1395
+ /[!'()*]/g,
1396
+ (c) => "%" + c.charCodeAt(0).toString(16).toUpperCase()
1397
+ );
1398
+ }
1399
+ async function sha256Hex(str) {
1393
1400
  const encoder = new TextEncoder();
1394
- const bytes = encoder.encode(str);
1395
- return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join("");
1401
+ const data = encoder.encode(str);
1402
+ const hashBuffer = await crypto.subtle.digest("SHA-256", data);
1403
+ const hashArray = new Uint8Array(hashBuffer);
1404
+ return Array.from(hashArray).map((b) => b.toString(16).padStart(2, "0")).join("");
1396
1405
  }
1397
1406
  async function signData(data, privateKey) {
1398
1407
  const pemHeader = "-----BEGIN PRIVATE KEY-----";
@@ -1457,17 +1466,15 @@ async function generateSignedUrl(path, options) {
1457
1466
  }
1458
1467
  const sortedParams = Object.keys(queryParams).sort();
1459
1468
  const canonicalQueryString = sortedParams.map((key) => `${encodeURIComponent(key)}=${encodeURIComponent(queryParams[key])}`).join("&");
1460
- const encodedPath = path.split("/").map((segment) => encodeURIComponent(segment)).join("/");
1469
+ const encodedPath = path.split("/").map((segment) => fixedEncodeURIComponent(segment)).join("/");
1461
1470
  const canonicalUri = `/${bucket}/${encodedPath}`;
1462
- const canonicalRequest = [
1463
- method,
1464
- canonicalUri,
1465
- canonicalQueryString,
1466
- canonicalHeaders,
1467
- signedHeaders,
1468
- "UNSIGNED-PAYLOAD"
1469
- ].join("\n");
1470
- const canonicalRequestHash = stringToHex(canonicalRequest);
1471
+ const canonicalRequest = `${method}
1472
+ ${canonicalUri}
1473
+ ${canonicalQueryString}
1474
+ ${canonicalHeaders}
1475
+ ${signedHeaders}
1476
+ UNSIGNED-PAYLOAD`;
1477
+ const canonicalRequestHash = await sha256Hex(canonicalRequest);
1471
1478
  const stringToSign = [
1472
1479
  "GOOG4-RSA-SHA256",
1473
1480
  dateTimeStamp,
@@ -1478,6 +1485,204 @@ async function generateSignedUrl(path, options) {
1478
1485
  const signedUrl = `https://storage.googleapis.com${canonicalUri}?${canonicalQueryString}&X-Goog-Signature=${signature}`;
1479
1486
  return signedUrl;
1480
1487
  }
1488
+
1489
+ // src/storage/resumable-upload.ts
1490
+ var UPLOAD_API_BASE2 = "https://storage.googleapis.com/upload/storage/v1";
1491
+ function getDefaultBucket2() {
1492
+ const customBucket = process.env.FIREBASE_STORAGE_BUCKET;
1493
+ if (customBucket) {
1494
+ return customBucket;
1495
+ }
1496
+ const projectId = getProjectId();
1497
+ return `${projectId}.firebasestorage.app`;
1498
+ }
1499
+ async function initiateResumableUpload(bucket, path, contentType, totalSize, metadata) {
1500
+ const token = await getAdminAccessToken();
1501
+ const url = `${UPLOAD_API_BASE2}/b/${encodeURIComponent(bucket)}/o?uploadType=resumable&name=${encodeURIComponent(path)}`;
1502
+ const requestBody = {};
1503
+ if (metadata) {
1504
+ requestBody.metadata = metadata;
1505
+ }
1506
+ const response = await fetch(url, {
1507
+ method: "POST",
1508
+ headers: {
1509
+ "Authorization": `Bearer ${token}`,
1510
+ "Content-Type": "application/json",
1511
+ "X-Upload-Content-Type": contentType,
1512
+ "X-Upload-Content-Length": totalSize.toString()
1513
+ },
1514
+ body: Object.keys(requestBody).length > 0 ? JSON.stringify(requestBody) : void 0
1515
+ });
1516
+ if (!response.ok) {
1517
+ const errorText = await response.text();
1518
+ throw new Error(`Failed to initiate resumable upload: ${response.status} ${errorText}`);
1519
+ }
1520
+ const sessionUri = response.headers.get("Location");
1521
+ if (!sessionUri) {
1522
+ throw new Error("No session URI returned from resumable upload initiation");
1523
+ }
1524
+ return sessionUri;
1525
+ }
1526
+ async function uploadChunk(sessionUri, chunk, start, total) {
1527
+ const end = start + chunk.byteLength - 1;
1528
+ const response = await fetch(sessionUri, {
1529
+ method: "PUT",
1530
+ headers: {
1531
+ "Content-Length": chunk.byteLength.toString(),
1532
+ "Content-Range": `bytes ${start}-${end}/${total}`
1533
+ },
1534
+ body: chunk
1535
+ });
1536
+ if (response.status === 200 || response.status === 201) {
1537
+ const metadata = await response.json();
1538
+ return { complete: true, metadata };
1539
+ } else if (response.status === 308) {
1540
+ return { complete: false };
1541
+ } else {
1542
+ const errorText = await response.text();
1543
+ throw new Error(`Chunk upload failed: ${response.status} ${errorText}`);
1544
+ }
1545
+ }
1546
+ async function getUploadProgress(sessionUri) {
1547
+ const response = await fetch(sessionUri, {
1548
+ method: "PUT",
1549
+ headers: {
1550
+ "Content-Length": "0",
1551
+ "Content-Range": "bytes */*"
1552
+ }
1553
+ });
1554
+ if (response.status === 308) {
1555
+ const range = response.headers.get("Range");
1556
+ if (range) {
1557
+ const match = range.match(/bytes=0-(\d+)/);
1558
+ if (match) {
1559
+ return parseInt(match[1], 10) + 1;
1560
+ }
1561
+ }
1562
+ } else if (response.status === 200 || response.status === 201) {
1563
+ const metadata = await response.json();
1564
+ return parseInt(metadata.size, 10);
1565
+ }
1566
+ return 0;
1567
+ }
1568
+ async function toArrayBuffer(data) {
1569
+ if (data instanceof ArrayBuffer) {
1570
+ return data;
1571
+ } else if (data instanceof Uint8Array) {
1572
+ const buffer = new ArrayBuffer(data.byteLength);
1573
+ new Uint8Array(buffer).set(data);
1574
+ return buffer;
1575
+ } else if (data instanceof Blob) {
1576
+ return await data.arrayBuffer();
1577
+ }
1578
+ throw new Error("Unsupported data type");
1579
+ }
1580
+ async function readChunkFromStream(reader, chunkSize) {
1581
+ const chunks = [];
1582
+ let totalBytes = 0;
1583
+ while (totalBytes < chunkSize) {
1584
+ const { value, done } = await reader.read();
1585
+ if (done) {
1586
+ if (totalBytes === 0) {
1587
+ return { chunk: null, done: true };
1588
+ }
1589
+ break;
1590
+ }
1591
+ if (value) {
1592
+ chunks.push(value);
1593
+ totalBytes += value.byteLength;
1594
+ if (totalBytes >= chunkSize) {
1595
+ break;
1596
+ }
1597
+ }
1598
+ }
1599
+ const combined = new Uint8Array(totalBytes);
1600
+ let offset = 0;
1601
+ for (const chunk of chunks) {
1602
+ combined.set(chunk, offset);
1603
+ offset += chunk.byteLength;
1604
+ }
1605
+ return { chunk: combined.buffer, done: false };
1606
+ }
1607
+ async function uploadFileResumable(path, data, contentType, options = {}) {
1608
+ const bucket = getDefaultBucket2();
1609
+ const chunkSize = options.chunkSize || 256 * 1024;
1610
+ if (data instanceof ReadableStream) {
1611
+ return await uploadFromStream(bucket, path, data, contentType, chunkSize, options);
1612
+ }
1613
+ const buffer = await toArrayBuffer(data);
1614
+ const total = buffer.byteLength;
1615
+ let sessionUri = options.resumeToken;
1616
+ let uploaded = 0;
1617
+ if (!sessionUri) {
1618
+ sessionUri = await initiateResumableUpload(
1619
+ bucket,
1620
+ path,
1621
+ contentType,
1622
+ total,
1623
+ options.metadata
1624
+ );
1625
+ } else {
1626
+ uploaded = await getUploadProgress(sessionUri);
1627
+ if (options.onProgress) {
1628
+ options.onProgress(uploaded, total);
1629
+ }
1630
+ }
1631
+ while (uploaded < total) {
1632
+ const end = Math.min(uploaded + chunkSize, total);
1633
+ const chunk = buffer.slice(uploaded, end);
1634
+ const result = await uploadChunk(sessionUri, chunk, uploaded, total);
1635
+ uploaded = end;
1636
+ if (options.onProgress) {
1637
+ options.onProgress(uploaded, total);
1638
+ }
1639
+ if (result.complete) {
1640
+ return result.metadata;
1641
+ }
1642
+ }
1643
+ throw new Error("Upload incomplete - all chunks sent but no completion response");
1644
+ }
1645
+ async function uploadFromStream(bucket, path, stream, contentType, chunkSize, options) {
1646
+ const total = options.totalSize || -1;
1647
+ if (total === -1) {
1648
+ throw new Error("totalSize is required when uploading from ReadableStream");
1649
+ }
1650
+ const sessionUri = await initiateResumableUpload(
1651
+ bucket,
1652
+ path,
1653
+ contentType,
1654
+ total,
1655
+ options.metadata
1656
+ );
1657
+ const reader = stream.getReader();
1658
+ let uploaded = 0;
1659
+ let lastResult = null;
1660
+ try {
1661
+ while (true) {
1662
+ const { chunk, done } = await readChunkFromStream(reader, chunkSize);
1663
+ if (done || !chunk) {
1664
+ if (lastResult && lastResult.complete) {
1665
+ return lastResult.metadata;
1666
+ }
1667
+ if (uploaded === total && lastResult) {
1668
+ return lastResult.metadata;
1669
+ }
1670
+ break;
1671
+ }
1672
+ lastResult = await uploadChunk(sessionUri, chunk, uploaded, total);
1673
+ uploaded += chunk.byteLength;
1674
+ if (options.onProgress) {
1675
+ options.onProgress(uploaded, total);
1676
+ }
1677
+ if (lastResult.complete) {
1678
+ return lastResult.metadata;
1679
+ }
1680
+ }
1681
+ throw new Error("Stream ended but upload not complete");
1682
+ } finally {
1683
+ reader.releaseLock();
1684
+ }
1685
+ }
1481
1686
  // Annotate the CommonJS export names for ESM import in node:
1482
1687
  0 && (module.exports = {
1483
1688
  FieldValue,
@@ -1509,5 +1714,6 @@ async function generateSignedUrl(path, options) {
1509
1714
  signInWithCustomToken,
1510
1715
  updateDocument,
1511
1716
  uploadFile,
1717
+ uploadFileResumable,
1512
1718
  verifyIdToken
1513
1719
  });
package/dist/index.mjs CHANGED
@@ -1124,7 +1124,7 @@ function getDefaultBucket() {
1124
1124
  return customBucket;
1125
1125
  }
1126
1126
  const projectId = getProjectId();
1127
- return `${projectId}.appspot.com`;
1127
+ return `${projectId}.firebasestorage.app`;
1128
1128
  }
1129
1129
  function detectContentType(filename) {
1130
1130
  const ext = filename.split(".").pop()?.toLowerCase();
@@ -1316,7 +1316,7 @@ function getStorageBucket() {
1316
1316
  return customBucket;
1317
1317
  }
1318
1318
  const projectId = getProjectId();
1319
- return `${projectId}.appspot.com`;
1319
+ return `${projectId}.firebasestorage.app`;
1320
1320
  }
1321
1321
  function getExpirationTimestamp(expires) {
1322
1322
  if (expires instanceof Date) {
@@ -1334,10 +1334,18 @@ function actionToMethod(action) {
1334
1334
  return "DELETE";
1335
1335
  }
1336
1336
  }
1337
- function stringToHex(str) {
1337
+ function fixedEncodeURIComponent(str) {
1338
+ return encodeURIComponent(str).replace(
1339
+ /[!'()*]/g,
1340
+ (c) => "%" + c.charCodeAt(0).toString(16).toUpperCase()
1341
+ );
1342
+ }
1343
+ async function sha256Hex(str) {
1338
1344
  const encoder = new TextEncoder();
1339
- const bytes = encoder.encode(str);
1340
- return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join("");
1345
+ const data = encoder.encode(str);
1346
+ const hashBuffer = await crypto.subtle.digest("SHA-256", data);
1347
+ const hashArray = new Uint8Array(hashBuffer);
1348
+ return Array.from(hashArray).map((b) => b.toString(16).padStart(2, "0")).join("");
1341
1349
  }
1342
1350
  async function signData(data, privateKey) {
1343
1351
  const pemHeader = "-----BEGIN PRIVATE KEY-----";
@@ -1402,17 +1410,15 @@ async function generateSignedUrl(path, options) {
1402
1410
  }
1403
1411
  const sortedParams = Object.keys(queryParams).sort();
1404
1412
  const canonicalQueryString = sortedParams.map((key) => `${encodeURIComponent(key)}=${encodeURIComponent(queryParams[key])}`).join("&");
1405
- const encodedPath = path.split("/").map((segment) => encodeURIComponent(segment)).join("/");
1413
+ const encodedPath = path.split("/").map((segment) => fixedEncodeURIComponent(segment)).join("/");
1406
1414
  const canonicalUri = `/${bucket}/${encodedPath}`;
1407
- const canonicalRequest = [
1408
- method,
1409
- canonicalUri,
1410
- canonicalQueryString,
1411
- canonicalHeaders,
1412
- signedHeaders,
1413
- "UNSIGNED-PAYLOAD"
1414
- ].join("\n");
1415
- const canonicalRequestHash = stringToHex(canonicalRequest);
1415
+ const canonicalRequest = `${method}
1416
+ ${canonicalUri}
1417
+ ${canonicalQueryString}
1418
+ ${canonicalHeaders}
1419
+ ${signedHeaders}
1420
+ UNSIGNED-PAYLOAD`;
1421
+ const canonicalRequestHash = await sha256Hex(canonicalRequest);
1416
1422
  const stringToSign = [
1417
1423
  "GOOG4-RSA-SHA256",
1418
1424
  dateTimeStamp,
@@ -1423,6 +1429,204 @@ async function generateSignedUrl(path, options) {
1423
1429
  const signedUrl = `https://storage.googleapis.com${canonicalUri}?${canonicalQueryString}&X-Goog-Signature=${signature}`;
1424
1430
  return signedUrl;
1425
1431
  }
1432
+
1433
+ // src/storage/resumable-upload.ts
1434
+ var UPLOAD_API_BASE2 = "https://storage.googleapis.com/upload/storage/v1";
1435
+ function getDefaultBucket2() {
1436
+ const customBucket = process.env.FIREBASE_STORAGE_BUCKET;
1437
+ if (customBucket) {
1438
+ return customBucket;
1439
+ }
1440
+ const projectId = getProjectId();
1441
+ return `${projectId}.firebasestorage.app`;
1442
+ }
1443
+ async function initiateResumableUpload(bucket, path, contentType, totalSize, metadata) {
1444
+ const token = await getAdminAccessToken();
1445
+ const url = `${UPLOAD_API_BASE2}/b/${encodeURIComponent(bucket)}/o?uploadType=resumable&name=${encodeURIComponent(path)}`;
1446
+ const requestBody = {};
1447
+ if (metadata) {
1448
+ requestBody.metadata = metadata;
1449
+ }
1450
+ const response = await fetch(url, {
1451
+ method: "POST",
1452
+ headers: {
1453
+ "Authorization": `Bearer ${token}`,
1454
+ "Content-Type": "application/json",
1455
+ "X-Upload-Content-Type": contentType,
1456
+ "X-Upload-Content-Length": totalSize.toString()
1457
+ },
1458
+ body: Object.keys(requestBody).length > 0 ? JSON.stringify(requestBody) : void 0
1459
+ });
1460
+ if (!response.ok) {
1461
+ const errorText = await response.text();
1462
+ throw new Error(`Failed to initiate resumable upload: ${response.status} ${errorText}`);
1463
+ }
1464
+ const sessionUri = response.headers.get("Location");
1465
+ if (!sessionUri) {
1466
+ throw new Error("No session URI returned from resumable upload initiation");
1467
+ }
1468
+ return sessionUri;
1469
+ }
1470
+ async function uploadChunk(sessionUri, chunk, start, total) {
1471
+ const end = start + chunk.byteLength - 1;
1472
+ const response = await fetch(sessionUri, {
1473
+ method: "PUT",
1474
+ headers: {
1475
+ "Content-Length": chunk.byteLength.toString(),
1476
+ "Content-Range": `bytes ${start}-${end}/${total}`
1477
+ },
1478
+ body: chunk
1479
+ });
1480
+ if (response.status === 200 || response.status === 201) {
1481
+ const metadata = await response.json();
1482
+ return { complete: true, metadata };
1483
+ } else if (response.status === 308) {
1484
+ return { complete: false };
1485
+ } else {
1486
+ const errorText = await response.text();
1487
+ throw new Error(`Chunk upload failed: ${response.status} ${errorText}`);
1488
+ }
1489
+ }
1490
+ async function getUploadProgress(sessionUri) {
1491
+ const response = await fetch(sessionUri, {
1492
+ method: "PUT",
1493
+ headers: {
1494
+ "Content-Length": "0",
1495
+ "Content-Range": "bytes */*"
1496
+ }
1497
+ });
1498
+ if (response.status === 308) {
1499
+ const range = response.headers.get("Range");
1500
+ if (range) {
1501
+ const match = range.match(/bytes=0-(\d+)/);
1502
+ if (match) {
1503
+ return parseInt(match[1], 10) + 1;
1504
+ }
1505
+ }
1506
+ } else if (response.status === 200 || response.status === 201) {
1507
+ const metadata = await response.json();
1508
+ return parseInt(metadata.size, 10);
1509
+ }
1510
+ return 0;
1511
+ }
1512
+ async function toArrayBuffer(data) {
1513
+ if (data instanceof ArrayBuffer) {
1514
+ return data;
1515
+ } else if (data instanceof Uint8Array) {
1516
+ const buffer = new ArrayBuffer(data.byteLength);
1517
+ new Uint8Array(buffer).set(data);
1518
+ return buffer;
1519
+ } else if (data instanceof Blob) {
1520
+ return await data.arrayBuffer();
1521
+ }
1522
+ throw new Error("Unsupported data type");
1523
+ }
1524
+ async function readChunkFromStream(reader, chunkSize) {
1525
+ const chunks = [];
1526
+ let totalBytes = 0;
1527
+ while (totalBytes < chunkSize) {
1528
+ const { value, done } = await reader.read();
1529
+ if (done) {
1530
+ if (totalBytes === 0) {
1531
+ return { chunk: null, done: true };
1532
+ }
1533
+ break;
1534
+ }
1535
+ if (value) {
1536
+ chunks.push(value);
1537
+ totalBytes += value.byteLength;
1538
+ if (totalBytes >= chunkSize) {
1539
+ break;
1540
+ }
1541
+ }
1542
+ }
1543
+ const combined = new Uint8Array(totalBytes);
1544
+ let offset = 0;
1545
+ for (const chunk of chunks) {
1546
+ combined.set(chunk, offset);
1547
+ offset += chunk.byteLength;
1548
+ }
1549
+ return { chunk: combined.buffer, done: false };
1550
+ }
1551
+ async function uploadFileResumable(path, data, contentType, options = {}) {
1552
+ const bucket = getDefaultBucket2();
1553
+ const chunkSize = options.chunkSize || 256 * 1024;
1554
+ if (data instanceof ReadableStream) {
1555
+ return await uploadFromStream(bucket, path, data, contentType, chunkSize, options);
1556
+ }
1557
+ const buffer = await toArrayBuffer(data);
1558
+ const total = buffer.byteLength;
1559
+ let sessionUri = options.resumeToken;
1560
+ let uploaded = 0;
1561
+ if (!sessionUri) {
1562
+ sessionUri = await initiateResumableUpload(
1563
+ bucket,
1564
+ path,
1565
+ contentType,
1566
+ total,
1567
+ options.metadata
1568
+ );
1569
+ } else {
1570
+ uploaded = await getUploadProgress(sessionUri);
1571
+ if (options.onProgress) {
1572
+ options.onProgress(uploaded, total);
1573
+ }
1574
+ }
1575
+ while (uploaded < total) {
1576
+ const end = Math.min(uploaded + chunkSize, total);
1577
+ const chunk = buffer.slice(uploaded, end);
1578
+ const result = await uploadChunk(sessionUri, chunk, uploaded, total);
1579
+ uploaded = end;
1580
+ if (options.onProgress) {
1581
+ options.onProgress(uploaded, total);
1582
+ }
1583
+ if (result.complete) {
1584
+ return result.metadata;
1585
+ }
1586
+ }
1587
+ throw new Error("Upload incomplete - all chunks sent but no completion response");
1588
+ }
1589
+ async function uploadFromStream(bucket, path, stream, contentType, chunkSize, options) {
1590
+ const total = options.totalSize || -1;
1591
+ if (total === -1) {
1592
+ throw new Error("totalSize is required when uploading from ReadableStream");
1593
+ }
1594
+ const sessionUri = await initiateResumableUpload(
1595
+ bucket,
1596
+ path,
1597
+ contentType,
1598
+ total,
1599
+ options.metadata
1600
+ );
1601
+ const reader = stream.getReader();
1602
+ let uploaded = 0;
1603
+ let lastResult = null;
1604
+ try {
1605
+ while (true) {
1606
+ const { chunk, done } = await readChunkFromStream(reader, chunkSize);
1607
+ if (done || !chunk) {
1608
+ if (lastResult && lastResult.complete) {
1609
+ return lastResult.metadata;
1610
+ }
1611
+ if (uploaded === total && lastResult) {
1612
+ return lastResult.metadata;
1613
+ }
1614
+ break;
1615
+ }
1616
+ lastResult = await uploadChunk(sessionUri, chunk, uploaded, total);
1617
+ uploaded += chunk.byteLength;
1618
+ if (options.onProgress) {
1619
+ options.onProgress(uploaded, total);
1620
+ }
1621
+ if (lastResult.complete) {
1622
+ return lastResult.metadata;
1623
+ }
1624
+ }
1625
+ throw new Error("Stream ended but upload not complete");
1626
+ } finally {
1627
+ reader.releaseLock();
1628
+ }
1629
+ }
1426
1630
  export {
1427
1631
  FieldValue,
1428
1632
  addDocument,
@@ -1453,5 +1657,6 @@ export {
1453
1657
  signInWithCustomToken,
1454
1658
  updateDocument,
1455
1659
  uploadFile,
1660
+ uploadFileResumable,
1456
1661
  verifyIdToken
1457
1662
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@prmichaelsen/firebase-admin-sdk-v8",
3
- "version": "2.2.3",
3
+ "version": "2.3.1",
4
4
  "description": "Firebase Admin SDK for Cloudflare Workers and edge runtimes using REST APIs",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.mjs",