@opentdf/sdk 0.14.0 → 0.16.0-beta.149
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/src/index.js +23 -6
- package/dist/cjs/src/opentdf.js +4 -2
- package/dist/cjs/src/platform/authorization/entity-identifiers.js +3 -2
- package/dist/cjs/src/platform/authorization/resources.js +57 -0
- package/dist/cjs/src/version.js +1 -1
- package/dist/cjs/tdf3/src/ciphers/aes-gcm-cipher.js +11 -7
- package/dist/cjs/tdf3/src/ciphers/symmetric-cipher-base.js +1 -1
- package/dist/cjs/tdf3/src/client/builders.js +17 -1
- package/dist/cjs/tdf3/src/client/index.js +4 -2
- package/dist/cjs/tdf3/src/crypto/core/symmetric.js +29 -12
- package/dist/cjs/tdf3/src/models/encryption-information.js +1 -1
- package/dist/cjs/tdf3/src/tdf.js +199 -53
- package/dist/cjs/tdf3/src/utils/zip-reader.js +2 -2
- package/dist/types/src/index.d.ts +14 -1
- package/dist/types/src/index.d.ts.map +1 -1
- package/dist/types/src/opentdf.d.ts +10 -0
- package/dist/types/src/opentdf.d.ts.map +1 -1
- package/dist/types/src/platform/authorization/entity-identifiers.d.ts +2 -1
- package/dist/types/src/platform/authorization/entity-identifiers.d.ts.map +1 -1
- package/dist/types/src/platform/authorization/resources.d.ts +37 -0
- package/dist/types/src/platform/authorization/resources.d.ts.map +1 -0
- package/dist/types/src/version.d.ts +1 -1
- package/dist/types/tdf3/src/ciphers/aes-gcm-cipher.d.ts +1 -1
- package/dist/types/tdf3/src/ciphers/aes-gcm-cipher.d.ts.map +1 -1
- package/dist/types/tdf3/src/ciphers/symmetric-cipher-base.d.ts +1 -1
- package/dist/types/tdf3/src/ciphers/symmetric-cipher-base.d.ts.map +1 -1
- package/dist/types/tdf3/src/client/builders.d.ts +14 -0
- package/dist/types/tdf3/src/client/builders.d.ts.map +1 -1
- package/dist/types/tdf3/src/client/index.d.ts +1 -1
- package/dist/types/tdf3/src/client/index.d.ts.map +1 -1
- package/dist/types/tdf3/src/crypto/core/symmetric.d.ts +1 -0
- package/dist/types/tdf3/src/crypto/core/symmetric.d.ts.map +1 -1
- package/dist/types/tdf3/src/models/encryption-information.d.ts +1 -1
- package/dist/types/tdf3/src/models/encryption-information.d.ts.map +1 -1
- package/dist/types/tdf3/src/tdf.d.ts +20 -0
- package/dist/types/tdf3/src/tdf.d.ts.map +1 -1
- package/dist/types/tdf3/src/utils/zip-reader.d.ts.map +1 -1
- package/dist/web/src/index.js +15 -2
- package/dist/web/src/opentdf.js +4 -2
- package/dist/web/src/platform/authorization/entity-identifiers.js +3 -2
- package/dist/web/src/platform/authorization/resources.js +53 -0
- package/dist/web/src/version.js +1 -1
- package/dist/web/tdf3/src/ciphers/aes-gcm-cipher.js +11 -7
- package/dist/web/tdf3/src/ciphers/symmetric-cipher-base.js +1 -1
- package/dist/web/tdf3/src/client/builders.js +17 -1
- package/dist/web/tdf3/src/client/index.js +4 -2
- package/dist/web/tdf3/src/crypto/core/symmetric.js +28 -12
- package/dist/web/tdf3/src/models/encryption-information.js +1 -1
- package/dist/web/tdf3/src/tdf.js +198 -53
- package/dist/web/tdf3/src/utils/zip-reader.js +2 -2
- package/package.json +2 -2
- package/src/index.ts +17 -6
- package/src/opentdf.ts +16 -0
- package/src/platform/authorization/entity-identifiers.ts +2 -1
- package/src/platform/authorization/resources.ts +59 -0
- package/src/version.ts +1 -1
- package/tdf3/src/ciphers/aes-gcm-cipher.ts +19 -14
- package/tdf3/src/ciphers/symmetric-cipher-base.ts +5 -1
- package/tdf3/src/client/builders.ts +20 -0
- package/tdf3/src/client/index.ts +4 -0
- package/tdf3/src/crypto/core/symmetric.ts +48 -14
- package/tdf3/src/models/encryption-information.ts +1 -1
- package/tdf3/src/tdf.ts +296 -79
- package/tdf3/src/utils/zip-reader.ts +1 -2
package/tdf3/src/tdf.ts
CHANGED
|
@@ -71,6 +71,10 @@ import {
|
|
|
71
71
|
const DEFAULT_SEGMENT_SIZE = 1024 * 1024;
|
|
72
72
|
|
|
73
73
|
const HEX_SEMVER_VERSION = '4.2.2';
|
|
74
|
+
const LEGACY_SEGMENTS_PER_DOWNLOAD = 500;
|
|
75
|
+
const LEGACY_MAX_CONCURRENT_SEGMENT_BATCHES = 3;
|
|
76
|
+
const DEFAULT_BOUND_SEGMENT_BATCH_SIZE = LEGACY_SEGMENTS_PER_DOWNLOAD;
|
|
77
|
+
const DEFAULT_BOUND_MAX_CONCURRENT_SEGMENT_BATCHES = LEGACY_MAX_CONCURRENT_SEGMENT_BATCHES;
|
|
74
78
|
|
|
75
79
|
/**
|
|
76
80
|
* Configuration for TDF3
|
|
@@ -181,6 +185,8 @@ export type DecryptConfiguration = {
|
|
|
181
185
|
assertionVerificationKeys?: AssertionVerificationKeys;
|
|
182
186
|
noVerifyAssertions?: boolean;
|
|
183
187
|
concurrencyLimit?: number;
|
|
188
|
+
segmentBatchSize?: number;
|
|
189
|
+
maxConcurrentSegmentBatches?: number;
|
|
184
190
|
wrappingKeyAlgorithm?: KasPublicKeyAlgorithm;
|
|
185
191
|
};
|
|
186
192
|
|
|
@@ -1005,7 +1011,7 @@ async function decryptChunk(
|
|
|
1005
1011
|
}
|
|
1006
1012
|
|
|
1007
1013
|
async function updateChunkQueue(
|
|
1008
|
-
|
|
1014
|
+
chunks: Chunk[],
|
|
1009
1015
|
centralDirectory: CentralDirectory[],
|
|
1010
1016
|
zipReader: ZipReader,
|
|
1011
1017
|
reconstructedKey: SymmetricKey,
|
|
@@ -1014,51 +1020,235 @@ async function updateChunkQueue(
|
|
|
1014
1020
|
cryptoService: CryptoService,
|
|
1015
1021
|
specVersion: string
|
|
1016
1022
|
) {
|
|
1017
|
-
const chunksInOneDownload = 500;
|
|
1018
1023
|
let requests = [];
|
|
1019
|
-
const maxLength = 3;
|
|
1020
1024
|
|
|
1021
|
-
for (let i = 0; i <
|
|
1022
|
-
if (requests.length ===
|
|
1025
|
+
for (let i = 0; i < chunks.length; i += LEGACY_SEGMENTS_PER_DOWNLOAD) {
|
|
1026
|
+
if (requests.length === LEGACY_MAX_CONCURRENT_SEGMENT_BATCHES) {
|
|
1023
1027
|
await Promise.all(requests);
|
|
1024
1028
|
requests = [];
|
|
1025
1029
|
}
|
|
1026
1030
|
requests.push(
|
|
1027
|
-
(
|
|
1028
|
-
|
|
1031
|
+
fetchAndDecryptChunkSlice({
|
|
1032
|
+
centralDirectory,
|
|
1033
|
+
zipReader,
|
|
1034
|
+
reconstructedKey,
|
|
1035
|
+
cipher,
|
|
1036
|
+
segmentIntegrityAlgorithm,
|
|
1037
|
+
cryptoService,
|
|
1038
|
+
specVersion,
|
|
1039
|
+
slice: chunks.slice(i, i + LEGACY_SEGMENTS_PER_DOWNLOAD),
|
|
1040
|
+
}).catch(() => undefined)
|
|
1041
|
+
);
|
|
1042
|
+
}
|
|
1043
|
+
}
|
|
1029
1044
|
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1045
|
+
function rejectChunks(chunks: Chunk[], error: Error) {
|
|
1046
|
+
for (const chunk of chunks) {
|
|
1047
|
+
chunk.decryptedChunk.reject(error);
|
|
1048
|
+
}
|
|
1049
|
+
}
|
|
1050
|
+
|
|
1051
|
+
function asDecryptError(error: unknown, fallbackMessage: string): Error {
|
|
1052
|
+
if (error instanceof Error) {
|
|
1053
|
+
return error;
|
|
1054
|
+
}
|
|
1055
|
+
return new DecryptError(fallbackMessage, new Error(String(error)));
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
async function fetchAndDecryptChunkSlice({
|
|
1059
|
+
centralDirectory,
|
|
1060
|
+
zipReader,
|
|
1061
|
+
reconstructedKey,
|
|
1062
|
+
cipher,
|
|
1063
|
+
segmentIntegrityAlgorithm,
|
|
1064
|
+
cryptoService,
|
|
1065
|
+
specVersion,
|
|
1066
|
+
slice,
|
|
1067
|
+
}: {
|
|
1068
|
+
centralDirectory: CentralDirectory[];
|
|
1069
|
+
zipReader: ZipReader;
|
|
1070
|
+
reconstructedKey: SymmetricKey;
|
|
1071
|
+
cipher: SymmetricCipher;
|
|
1072
|
+
segmentIntegrityAlgorithm: IntegrityAlgorithm;
|
|
1073
|
+
cryptoService: CryptoService;
|
|
1074
|
+
specVersion: string;
|
|
1075
|
+
slice: Chunk[];
|
|
1076
|
+
}) {
|
|
1077
|
+
const firstChunk = slice[0];
|
|
1078
|
+
let buffer!: Uint8Array;
|
|
1079
|
+
const bufferSize = slice.reduce(
|
|
1080
|
+
(currentVal, { encryptedSegmentSize }) => currentVal + (encryptedSegmentSize as number),
|
|
1081
|
+
0
|
|
1082
|
+
);
|
|
1083
|
+
try {
|
|
1084
|
+
buffer = await zipReader.getPayloadSegment(
|
|
1085
|
+
centralDirectory,
|
|
1086
|
+
'0.payload',
|
|
1087
|
+
firstChunk.encryptedOffset,
|
|
1088
|
+
bufferSize
|
|
1089
|
+
);
|
|
1090
|
+
} catch (error) {
|
|
1091
|
+
const wrappedError =
|
|
1092
|
+
error instanceof InvalidFileError
|
|
1093
|
+
? error
|
|
1094
|
+
: new NetworkError('unable to fetch payload segment', error);
|
|
1095
|
+
rejectChunks(slice, wrappedError);
|
|
1096
|
+
throw wrappedError;
|
|
1097
|
+
}
|
|
1098
|
+
|
|
1099
|
+
try {
|
|
1100
|
+
await sliceAndDecrypt({
|
|
1101
|
+
buffer,
|
|
1102
|
+
cryptoService,
|
|
1103
|
+
reconstructedKey,
|
|
1104
|
+
slice,
|
|
1105
|
+
cipher,
|
|
1106
|
+
segmentIntegrityAlgorithm,
|
|
1107
|
+
specVersion,
|
|
1108
|
+
});
|
|
1109
|
+
} catch (error) {
|
|
1110
|
+
const wrappedError = asDecryptError(error, 'failed to decrypt payload segment');
|
|
1111
|
+
rejectChunks(slice, wrappedError);
|
|
1112
|
+
throw wrappedError;
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
|
|
1116
|
+
export type SegmentBatchSchedulerState = {
|
|
1117
|
+
consumedSegments: number;
|
|
1118
|
+
inFlightBatches: number;
|
|
1119
|
+
maxPrefetchedSegments: number;
|
|
1120
|
+
scheduledSegments: number;
|
|
1121
|
+
};
|
|
1122
|
+
|
|
1123
|
+
export type SegmentBatchScheduler = {
|
|
1124
|
+
fillWindow: () => void;
|
|
1125
|
+
markConsumed: (count?: number) => void;
|
|
1126
|
+
snapshot: () => SegmentBatchSchedulerState;
|
|
1127
|
+
};
|
|
1128
|
+
|
|
1129
|
+
export function createBoundedSegmentScheduler({
|
|
1130
|
+
totalSegments,
|
|
1131
|
+
segmentBatchSize,
|
|
1132
|
+
maxConcurrentSegmentBatches,
|
|
1133
|
+
onError,
|
|
1134
|
+
scheduleBatch,
|
|
1135
|
+
}: {
|
|
1136
|
+
totalSegments: number;
|
|
1137
|
+
segmentBatchSize: number;
|
|
1138
|
+
maxConcurrentSegmentBatches: number;
|
|
1139
|
+
onError?: (error: Error, startIndex: number, endIndex: number) => void;
|
|
1140
|
+
scheduleBatch: (startIndex: number, endIndex: number) => Promise<void>;
|
|
1141
|
+
}): SegmentBatchScheduler {
|
|
1142
|
+
const maxPrefetchedSegments = segmentBatchSize * maxConcurrentSegmentBatches;
|
|
1143
|
+
let consumedSegments = 0;
|
|
1144
|
+
let inFlightBatches = 0;
|
|
1145
|
+
let pumping = false;
|
|
1146
|
+
let scheduledSegments = 0;
|
|
1147
|
+
let stopped = false;
|
|
1148
|
+
|
|
1149
|
+
const pump = () => {
|
|
1150
|
+
if (pumping || stopped) {
|
|
1151
|
+
return;
|
|
1152
|
+
}
|
|
1153
|
+
pumping = true;
|
|
1154
|
+
try {
|
|
1155
|
+
while (
|
|
1156
|
+
!stopped &&
|
|
1157
|
+
inFlightBatches < maxConcurrentSegmentBatches &&
|
|
1158
|
+
scheduledSegments < totalSegments
|
|
1159
|
+
) {
|
|
1160
|
+
const prefetchedSegments = scheduledSegments - consumedSegments;
|
|
1161
|
+
const remainingWindow = maxPrefetchedSegments - prefetchedSegments;
|
|
1162
|
+
if (remainingWindow <= 0) {
|
|
1163
|
+
break;
|
|
1047
1164
|
}
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
slice,
|
|
1054
|
-
cipher,
|
|
1055
|
-
segmentIntegrityAlgorithm,
|
|
1056
|
-
specVersion,
|
|
1057
|
-
});
|
|
1165
|
+
|
|
1166
|
+
const startIndex = scheduledSegments;
|
|
1167
|
+
const nextBatchSize = Math.min(segmentBatchSize, totalSegments - startIndex);
|
|
1168
|
+
if (remainingWindow < nextBatchSize) {
|
|
1169
|
+
break;
|
|
1058
1170
|
}
|
|
1059
|
-
|
|
1060
|
-
|
|
1171
|
+
const endIndex = startIndex + nextBatchSize;
|
|
1172
|
+
scheduledSegments = endIndex;
|
|
1173
|
+
inFlightBatches += 1;
|
|
1174
|
+
|
|
1175
|
+
void Promise.resolve()
|
|
1176
|
+
.then(() => scheduleBatch(startIndex, endIndex))
|
|
1177
|
+
.catch((error) => {
|
|
1178
|
+
stopped = true;
|
|
1179
|
+
onError?.(
|
|
1180
|
+
asDecryptError(error, 'failed to schedule segment batch'),
|
|
1181
|
+
startIndex,
|
|
1182
|
+
endIndex
|
|
1183
|
+
);
|
|
1184
|
+
})
|
|
1185
|
+
.finally(() => {
|
|
1186
|
+
inFlightBatches -= 1;
|
|
1187
|
+
pump();
|
|
1188
|
+
});
|
|
1189
|
+
}
|
|
1190
|
+
} finally {
|
|
1191
|
+
pumping = false;
|
|
1192
|
+
}
|
|
1193
|
+
};
|
|
1194
|
+
|
|
1195
|
+
return {
|
|
1196
|
+
fillWindow() {
|
|
1197
|
+
pump();
|
|
1198
|
+
},
|
|
1199
|
+
markConsumed(count = 1) {
|
|
1200
|
+
consumedSegments = Math.min(totalSegments, consumedSegments + count);
|
|
1201
|
+
pump();
|
|
1202
|
+
},
|
|
1203
|
+
snapshot() {
|
|
1204
|
+
return {
|
|
1205
|
+
consumedSegments,
|
|
1206
|
+
inFlightBatches,
|
|
1207
|
+
maxPrefetchedSegments,
|
|
1208
|
+
scheduledSegments,
|
|
1209
|
+
};
|
|
1210
|
+
},
|
|
1211
|
+
};
|
|
1212
|
+
}
|
|
1213
|
+
|
|
1214
|
+
function normalizeSegmentBatchSetting(
|
|
1215
|
+
value: number | undefined,
|
|
1216
|
+
defaultValue: number,
|
|
1217
|
+
name: 'segmentBatchSize' | 'maxConcurrentSegmentBatches'
|
|
1218
|
+
) {
|
|
1219
|
+
const normalized = value ?? defaultValue;
|
|
1220
|
+
if (!Number.isInteger(normalized) || normalized < 1) {
|
|
1221
|
+
throw new ConfigurationError(`${name} must be a positive integer`);
|
|
1061
1222
|
}
|
|
1223
|
+
return normalized;
|
|
1224
|
+
}
|
|
1225
|
+
|
|
1226
|
+
/**
|
|
1227
|
+
* Enables bounded scheduling only when at least one tuning knob is set.
|
|
1228
|
+
* If callers set only one knob, the other falls back to the legacy value so
|
|
1229
|
+
* throughput stays aligned with the pre-bounded path. Adjust both knobs together
|
|
1230
|
+
* when tuning for predictable memory and performance characteristics.
|
|
1231
|
+
*/
|
|
1232
|
+
function getBoundedSegmentSchedulerOptions({
|
|
1233
|
+
segmentBatchSize,
|
|
1234
|
+
maxConcurrentSegmentBatches,
|
|
1235
|
+
}: Pick<DecryptConfiguration, 'segmentBatchSize' | 'maxConcurrentSegmentBatches'>) {
|
|
1236
|
+
if (segmentBatchSize === undefined && maxConcurrentSegmentBatches === undefined) {
|
|
1237
|
+
return undefined;
|
|
1238
|
+
}
|
|
1239
|
+
|
|
1240
|
+
return {
|
|
1241
|
+
segmentBatchSize: normalizeSegmentBatchSetting(
|
|
1242
|
+
segmentBatchSize,
|
|
1243
|
+
DEFAULT_BOUND_SEGMENT_BATCH_SIZE,
|
|
1244
|
+
'segmentBatchSize'
|
|
1245
|
+
),
|
|
1246
|
+
maxConcurrentSegmentBatches: normalizeSegmentBatchSetting(
|
|
1247
|
+
maxConcurrentSegmentBatches,
|
|
1248
|
+
DEFAULT_BOUND_MAX_CONCURRENT_SEGMENT_BATCHES,
|
|
1249
|
+
'maxConcurrentSegmentBatches'
|
|
1250
|
+
),
|
|
1251
|
+
};
|
|
1062
1252
|
}
|
|
1063
1253
|
|
|
1064
1254
|
export async function sliceAndDecrypt({
|
|
@@ -1079,13 +1269,12 @@ export async function sliceAndDecrypt({
|
|
|
1079
1269
|
specVersion: string;
|
|
1080
1270
|
}) {
|
|
1081
1271
|
for (const index in slice) {
|
|
1082
|
-
const
|
|
1272
|
+
const chunk = slice[index];
|
|
1273
|
+
const { encryptedOffset, encryptedSegmentSize, plainSegmentSize } = chunk;
|
|
1083
1274
|
|
|
1084
1275
|
const offset =
|
|
1085
1276
|
slice[0].encryptedOffset === 0 ? encryptedOffset : encryptedOffset % slice[0].encryptedOffset;
|
|
1086
|
-
const encryptedChunk =
|
|
1087
|
-
buffer.slice(offset, offset + (encryptedSegmentSize as number))
|
|
1088
|
-
);
|
|
1277
|
+
const encryptedChunk = buffer.subarray(offset, offset + (encryptedSegmentSize as number));
|
|
1089
1278
|
|
|
1090
1279
|
if (encryptedChunk.length !== encryptedSegmentSize) {
|
|
1091
1280
|
throw new DecryptError('Failed to fetch entire segment');
|
|
@@ -1095,7 +1284,7 @@ export async function sliceAndDecrypt({
|
|
|
1095
1284
|
const result = await decryptChunk(
|
|
1096
1285
|
encryptedChunk,
|
|
1097
1286
|
reconstructedKey,
|
|
1098
|
-
|
|
1287
|
+
chunk.hash,
|
|
1099
1288
|
cipher,
|
|
1100
1289
|
segmentIntegrityAlgorithm,
|
|
1101
1290
|
specVersion,
|
|
@@ -1106,9 +1295,9 @@ export async function sliceAndDecrypt({
|
|
|
1106
1295
|
`incorrect segment size: found [${result.payload.length()}], expected [${plainSegmentSize}]`
|
|
1107
1296
|
);
|
|
1108
1297
|
}
|
|
1109
|
-
|
|
1298
|
+
chunk.decryptedChunk.set(result);
|
|
1110
1299
|
} catch (e) {
|
|
1111
|
-
|
|
1300
|
+
chunk.decryptedChunk.reject(e);
|
|
1112
1301
|
}
|
|
1113
1302
|
}
|
|
1114
1303
|
}
|
|
@@ -1212,27 +1401,22 @@ export async function decryptStreamFrom(
|
|
|
1212
1401
|
}
|
|
1213
1402
|
|
|
1214
1403
|
let mapOfRequestsOffset = 0;
|
|
1215
|
-
const
|
|
1216
|
-
|
|
1217
|
-
|
|
1404
|
+
const chunks = segments.map(
|
|
1405
|
+
({
|
|
1406
|
+
hash,
|
|
1407
|
+
encryptedSegmentSize = encryptedSegmentSizeDefault,
|
|
1408
|
+
segmentSize = segmentSizeDefault,
|
|
1409
|
+
}) => {
|
|
1410
|
+
const chunk: Chunk = {
|
|
1218
1411
|
hash,
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
decryptedChunk: mailbox<DecryptResult>(),
|
|
1228
|
-
plainSegmentSize: segmentSize,
|
|
1229
|
-
};
|
|
1230
|
-
return chunk;
|
|
1231
|
-
})();
|
|
1232
|
-
mapOfRequestsOffset += encryptedSegmentSize;
|
|
1233
|
-
return [hash, result];
|
|
1234
|
-
}
|
|
1235
|
-
)
|
|
1412
|
+
encryptedOffset: mapOfRequestsOffset,
|
|
1413
|
+
encryptedSegmentSize,
|
|
1414
|
+
decryptedChunk: mailbox<DecryptResult>(),
|
|
1415
|
+
plainSegmentSize: segmentSize,
|
|
1416
|
+
};
|
|
1417
|
+
mapOfRequestsOffset += encryptedSegmentSize;
|
|
1418
|
+
return chunk;
|
|
1419
|
+
}
|
|
1236
1420
|
);
|
|
1237
1421
|
|
|
1238
1422
|
const cipher = new AesGcmCipher(cfg.cryptoService);
|
|
@@ -1241,33 +1425,66 @@ export async function decryptStreamFrom(
|
|
|
1241
1425
|
throw new UnsupportedError(`Unsupported segment hash alg [${segmentIntegrityAlg}]`);
|
|
1242
1426
|
}
|
|
1243
1427
|
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1428
|
+
const schedulerOptions = getBoundedSegmentSchedulerOptions(cfg);
|
|
1429
|
+
let scheduler: SegmentBatchScheduler | undefined;
|
|
1430
|
+
if (schedulerOptions) {
|
|
1431
|
+
scheduler = createBoundedSegmentScheduler({
|
|
1432
|
+
totalSegments: chunks.length,
|
|
1433
|
+
...schedulerOptions,
|
|
1434
|
+
onError: (error, startIndex) => {
|
|
1435
|
+
rejectChunks(chunks.slice(startIndex), error);
|
|
1436
|
+
},
|
|
1437
|
+
scheduleBatch: async (startIndex, endIndex) =>
|
|
1438
|
+
fetchAndDecryptChunkSlice({
|
|
1439
|
+
centralDirectory,
|
|
1440
|
+
zipReader,
|
|
1441
|
+
reconstructedKey: keyForDecryption,
|
|
1442
|
+
cipher,
|
|
1443
|
+
segmentIntegrityAlgorithm: segmentIntegrityAlg,
|
|
1444
|
+
cryptoService: cfg.cryptoService,
|
|
1445
|
+
specVersion,
|
|
1446
|
+
slice: chunks.slice(startIndex, endIndex),
|
|
1447
|
+
}),
|
|
1448
|
+
});
|
|
1449
|
+
scheduler.fillWindow();
|
|
1450
|
+
} else {
|
|
1451
|
+
void updateChunkQueue(
|
|
1452
|
+
chunks,
|
|
1453
|
+
centralDirectory,
|
|
1454
|
+
zipReader,
|
|
1455
|
+
keyForDecryption,
|
|
1456
|
+
cipher,
|
|
1457
|
+
segmentIntegrityAlg,
|
|
1458
|
+
cfg.cryptoService,
|
|
1459
|
+
specVersion
|
|
1460
|
+
);
|
|
1461
|
+
}
|
|
1255
1462
|
|
|
1256
1463
|
let progress = 0;
|
|
1464
|
+
let nextChunkIndex = 0;
|
|
1257
1465
|
const underlyingSource = {
|
|
1258
1466
|
pull: async (controller: ReadableStreamDefaultController) => {
|
|
1259
|
-
if (
|
|
1467
|
+
if (nextChunkIndex >= chunks.length) {
|
|
1260
1468
|
controller.close();
|
|
1261
1469
|
return;
|
|
1262
1470
|
}
|
|
1263
1471
|
|
|
1264
|
-
const
|
|
1472
|
+
const chunk = chunks[nextChunkIndex];
|
|
1265
1473
|
const decryptedSegment = await chunk.decryptedChunk;
|
|
1474
|
+
const encryptedSegmentSize = chunk.encryptedSegmentSize ?? 0;
|
|
1475
|
+
const plainChunk = new Uint8Array(decryptedSegment.payload.asArrayBuffer());
|
|
1266
1476
|
|
|
1267
|
-
controller.enqueue(
|
|
1268
|
-
progress +=
|
|
1477
|
+
controller.enqueue(plainChunk);
|
|
1478
|
+
progress += encryptedSegmentSize;
|
|
1269
1479
|
cfg.progressHandler?.(progress);
|
|
1270
|
-
|
|
1480
|
+
// Release the resolved plaintext held by the consumed mailbox so long
|
|
1481
|
+
// browser decrypts do not retain every prior segment in memory.
|
|
1482
|
+
chunks[nextChunkIndex] = {
|
|
1483
|
+
...chunk,
|
|
1484
|
+
decryptedChunk: mailbox<DecryptResult>(),
|
|
1485
|
+
};
|
|
1486
|
+
nextChunkIndex += 1;
|
|
1487
|
+
scheduler?.markConsumed();
|
|
1271
1488
|
},
|
|
1272
1489
|
...(cfg.fileStreamServiceWorker && { fileStreamServiceWorker: cfg.fileStreamServiceWorker }),
|
|
1273
1490
|
};
|
|
@@ -133,8 +133,7 @@ export class ZipReader {
|
|
|
133
133
|
cdObj.relativeOffsetOfLocalHeader + cdObj.headerLength + encrpytedSegmentOffset;
|
|
134
134
|
// TODO: what's the exact byte start?
|
|
135
135
|
const byteEnd = byteStart + encryptedSegmentSize;
|
|
136
|
-
|
|
137
|
-
return await this.getChunk(byteStart, byteEnd);
|
|
136
|
+
return this.getChunk(byteStart, byteEnd);
|
|
138
137
|
}
|
|
139
138
|
|
|
140
139
|
/**
|