@prmichaelsen/firebase-admin-sdk-v8 2.2.2 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENT.md +1054 -0
- package/CHANGELOG.md +33 -0
- package/README.md +82 -0
- package/dist/index.d.mts +58 -1
- package/dist/index.d.ts +58 -1
- package/dist/index.js +201 -1
- package/dist/index.mjs +200 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -1124,7 +1124,7 @@ function getDefaultBucket() {
|
|
|
1124
1124
|
return customBucket;
|
|
1125
1125
|
}
|
|
1126
1126
|
const projectId = getProjectId();
|
|
1127
|
-
return `${projectId}.
|
|
1127
|
+
return `${projectId}.firebasestorage.app`;
|
|
1128
1128
|
}
|
|
1129
1129
|
function detectContentType(filename) {
|
|
1130
1130
|
const ext = filename.split(".").pop()?.toLowerCase();
|
|
@@ -1423,6 +1423,204 @@ async function generateSignedUrl(path, options) {
|
|
|
1423
1423
|
const signedUrl = `https://storage.googleapis.com${canonicalUri}?${canonicalQueryString}&X-Goog-Signature=${signature}`;
|
|
1424
1424
|
return signedUrl;
|
|
1425
1425
|
}
|
|
1426
|
+
|
|
1427
|
+
// src/storage/resumable-upload.ts
|
|
1428
|
+
var UPLOAD_API_BASE2 = "https://storage.googleapis.com/upload/storage/v1";
|
|
1429
|
+
function getDefaultBucket2() {
|
|
1430
|
+
const customBucket = process.env.FIREBASE_STORAGE_BUCKET;
|
|
1431
|
+
if (customBucket) {
|
|
1432
|
+
return customBucket;
|
|
1433
|
+
}
|
|
1434
|
+
const projectId = getProjectId();
|
|
1435
|
+
return `${projectId}.firebasestorage.app`;
|
|
1436
|
+
}
|
|
1437
|
+
async function initiateResumableUpload(bucket, path, contentType, totalSize, metadata) {
|
|
1438
|
+
const token = await getAdminAccessToken();
|
|
1439
|
+
const url = `${UPLOAD_API_BASE2}/b/${encodeURIComponent(bucket)}/o?uploadType=resumable&name=${encodeURIComponent(path)}`;
|
|
1440
|
+
const requestBody = {};
|
|
1441
|
+
if (metadata) {
|
|
1442
|
+
requestBody.metadata = metadata;
|
|
1443
|
+
}
|
|
1444
|
+
const response = await fetch(url, {
|
|
1445
|
+
method: "POST",
|
|
1446
|
+
headers: {
|
|
1447
|
+
"Authorization": `Bearer ${token}`,
|
|
1448
|
+
"Content-Type": "application/json",
|
|
1449
|
+
"X-Upload-Content-Type": contentType,
|
|
1450
|
+
"X-Upload-Content-Length": totalSize.toString()
|
|
1451
|
+
},
|
|
1452
|
+
body: Object.keys(requestBody).length > 0 ? JSON.stringify(requestBody) : void 0
|
|
1453
|
+
});
|
|
1454
|
+
if (!response.ok) {
|
|
1455
|
+
const errorText = await response.text();
|
|
1456
|
+
throw new Error(`Failed to initiate resumable upload: ${response.status} ${errorText}`);
|
|
1457
|
+
}
|
|
1458
|
+
const sessionUri = response.headers.get("Location");
|
|
1459
|
+
if (!sessionUri) {
|
|
1460
|
+
throw new Error("No session URI returned from resumable upload initiation");
|
|
1461
|
+
}
|
|
1462
|
+
return sessionUri;
|
|
1463
|
+
}
|
|
1464
|
+
async function uploadChunk(sessionUri, chunk, start, total) {
|
|
1465
|
+
const end = start + chunk.byteLength - 1;
|
|
1466
|
+
const response = await fetch(sessionUri, {
|
|
1467
|
+
method: "PUT",
|
|
1468
|
+
headers: {
|
|
1469
|
+
"Content-Length": chunk.byteLength.toString(),
|
|
1470
|
+
"Content-Range": `bytes ${start}-${end}/${total}`
|
|
1471
|
+
},
|
|
1472
|
+
body: chunk
|
|
1473
|
+
});
|
|
1474
|
+
if (response.status === 200 || response.status === 201) {
|
|
1475
|
+
const metadata = await response.json();
|
|
1476
|
+
return { complete: true, metadata };
|
|
1477
|
+
} else if (response.status === 308) {
|
|
1478
|
+
return { complete: false };
|
|
1479
|
+
} else {
|
|
1480
|
+
const errorText = await response.text();
|
|
1481
|
+
throw new Error(`Chunk upload failed: ${response.status} ${errorText}`);
|
|
1482
|
+
}
|
|
1483
|
+
}
|
|
1484
|
+
async function getUploadProgress(sessionUri) {
|
|
1485
|
+
const response = await fetch(sessionUri, {
|
|
1486
|
+
method: "PUT",
|
|
1487
|
+
headers: {
|
|
1488
|
+
"Content-Length": "0",
|
|
1489
|
+
"Content-Range": "bytes */*"
|
|
1490
|
+
}
|
|
1491
|
+
});
|
|
1492
|
+
if (response.status === 308) {
|
|
1493
|
+
const range = response.headers.get("Range");
|
|
1494
|
+
if (range) {
|
|
1495
|
+
const match = range.match(/bytes=0-(\d+)/);
|
|
1496
|
+
if (match) {
|
|
1497
|
+
return parseInt(match[1], 10) + 1;
|
|
1498
|
+
}
|
|
1499
|
+
}
|
|
1500
|
+
} else if (response.status === 200 || response.status === 201) {
|
|
1501
|
+
const metadata = await response.json();
|
|
1502
|
+
return parseInt(metadata.size, 10);
|
|
1503
|
+
}
|
|
1504
|
+
return 0;
|
|
1505
|
+
}
|
|
1506
|
+
async function toArrayBuffer(data) {
|
|
1507
|
+
if (data instanceof ArrayBuffer) {
|
|
1508
|
+
return data;
|
|
1509
|
+
} else if (data instanceof Uint8Array) {
|
|
1510
|
+
const buffer = new ArrayBuffer(data.byteLength);
|
|
1511
|
+
new Uint8Array(buffer).set(data);
|
|
1512
|
+
return buffer;
|
|
1513
|
+
} else if (data instanceof Blob) {
|
|
1514
|
+
return await data.arrayBuffer();
|
|
1515
|
+
}
|
|
1516
|
+
throw new Error("Unsupported data type");
|
|
1517
|
+
}
|
|
1518
|
+
async function readChunkFromStream(reader, chunkSize) {
|
|
1519
|
+
const chunks = [];
|
|
1520
|
+
let totalBytes = 0;
|
|
1521
|
+
while (totalBytes < chunkSize) {
|
|
1522
|
+
const { value, done } = await reader.read();
|
|
1523
|
+
if (done) {
|
|
1524
|
+
if (totalBytes === 0) {
|
|
1525
|
+
return { chunk: null, done: true };
|
|
1526
|
+
}
|
|
1527
|
+
break;
|
|
1528
|
+
}
|
|
1529
|
+
if (value) {
|
|
1530
|
+
chunks.push(value);
|
|
1531
|
+
totalBytes += value.byteLength;
|
|
1532
|
+
if (totalBytes >= chunkSize) {
|
|
1533
|
+
break;
|
|
1534
|
+
}
|
|
1535
|
+
}
|
|
1536
|
+
}
|
|
1537
|
+
const combined = new Uint8Array(totalBytes);
|
|
1538
|
+
let offset = 0;
|
|
1539
|
+
for (const chunk of chunks) {
|
|
1540
|
+
combined.set(chunk, offset);
|
|
1541
|
+
offset += chunk.byteLength;
|
|
1542
|
+
}
|
|
1543
|
+
return { chunk: combined.buffer, done: false };
|
|
1544
|
+
}
|
|
1545
|
+
async function uploadFileResumable(path, data, contentType, options = {}) {
|
|
1546
|
+
const bucket = getDefaultBucket2();
|
|
1547
|
+
const chunkSize = options.chunkSize || 256 * 1024;
|
|
1548
|
+
if (data instanceof ReadableStream) {
|
|
1549
|
+
return await uploadFromStream(bucket, path, data, contentType, chunkSize, options);
|
|
1550
|
+
}
|
|
1551
|
+
const buffer = await toArrayBuffer(data);
|
|
1552
|
+
const total = buffer.byteLength;
|
|
1553
|
+
let sessionUri = options.resumeToken;
|
|
1554
|
+
let uploaded = 0;
|
|
1555
|
+
if (!sessionUri) {
|
|
1556
|
+
sessionUri = await initiateResumableUpload(
|
|
1557
|
+
bucket,
|
|
1558
|
+
path,
|
|
1559
|
+
contentType,
|
|
1560
|
+
total,
|
|
1561
|
+
options.metadata
|
|
1562
|
+
);
|
|
1563
|
+
} else {
|
|
1564
|
+
uploaded = await getUploadProgress(sessionUri);
|
|
1565
|
+
if (options.onProgress) {
|
|
1566
|
+
options.onProgress(uploaded, total);
|
|
1567
|
+
}
|
|
1568
|
+
}
|
|
1569
|
+
while (uploaded < total) {
|
|
1570
|
+
const end = Math.min(uploaded + chunkSize, total);
|
|
1571
|
+
const chunk = buffer.slice(uploaded, end);
|
|
1572
|
+
const result = await uploadChunk(sessionUri, chunk, uploaded, total);
|
|
1573
|
+
uploaded = end;
|
|
1574
|
+
if (options.onProgress) {
|
|
1575
|
+
options.onProgress(uploaded, total);
|
|
1576
|
+
}
|
|
1577
|
+
if (result.complete) {
|
|
1578
|
+
return result.metadata;
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
throw new Error("Upload incomplete - all chunks sent but no completion response");
|
|
1582
|
+
}
|
|
1583
|
+
async function uploadFromStream(bucket, path, stream, contentType, chunkSize, options) {
|
|
1584
|
+
const total = options.totalSize || -1;
|
|
1585
|
+
if (total === -1) {
|
|
1586
|
+
throw new Error("totalSize is required when uploading from ReadableStream");
|
|
1587
|
+
}
|
|
1588
|
+
const sessionUri = await initiateResumableUpload(
|
|
1589
|
+
bucket,
|
|
1590
|
+
path,
|
|
1591
|
+
contentType,
|
|
1592
|
+
total,
|
|
1593
|
+
options.metadata
|
|
1594
|
+
);
|
|
1595
|
+
const reader = stream.getReader();
|
|
1596
|
+
let uploaded = 0;
|
|
1597
|
+
let lastResult = null;
|
|
1598
|
+
try {
|
|
1599
|
+
while (true) {
|
|
1600
|
+
const { chunk, done } = await readChunkFromStream(reader, chunkSize);
|
|
1601
|
+
if (done || !chunk) {
|
|
1602
|
+
if (lastResult && lastResult.complete) {
|
|
1603
|
+
return lastResult.metadata;
|
|
1604
|
+
}
|
|
1605
|
+
if (uploaded === total && lastResult) {
|
|
1606
|
+
return lastResult.metadata;
|
|
1607
|
+
}
|
|
1608
|
+
break;
|
|
1609
|
+
}
|
|
1610
|
+
lastResult = await uploadChunk(sessionUri, chunk, uploaded, total);
|
|
1611
|
+
uploaded += chunk.byteLength;
|
|
1612
|
+
if (options.onProgress) {
|
|
1613
|
+
options.onProgress(uploaded, total);
|
|
1614
|
+
}
|
|
1615
|
+
if (lastResult.complete) {
|
|
1616
|
+
return lastResult.metadata;
|
|
1617
|
+
}
|
|
1618
|
+
}
|
|
1619
|
+
throw new Error("Stream ended but upload not complete");
|
|
1620
|
+
} finally {
|
|
1621
|
+
reader.releaseLock();
|
|
1622
|
+
}
|
|
1623
|
+
}
|
|
1426
1624
|
export {
|
|
1427
1625
|
FieldValue,
|
|
1428
1626
|
addDocument,
|
|
@@ -1453,5 +1651,6 @@ export {
|
|
|
1453
1651
|
signInWithCustomToken,
|
|
1454
1652
|
updateDocument,
|
|
1455
1653
|
uploadFile,
|
|
1654
|
+
uploadFileResumable,
|
|
1456
1655
|
verifyIdToken
|
|
1457
1656
|
};
|
package/package.json
CHANGED