@opentdf/sdk 0.14.0 → 0.16.0-beta.149

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/dist/cjs/src/index.js +23 -6
  2. package/dist/cjs/src/opentdf.js +4 -2
  3. package/dist/cjs/src/platform/authorization/entity-identifiers.js +3 -2
  4. package/dist/cjs/src/platform/authorization/resources.js +57 -0
  5. package/dist/cjs/src/version.js +1 -1
  6. package/dist/cjs/tdf3/src/ciphers/aes-gcm-cipher.js +11 -7
  7. package/dist/cjs/tdf3/src/ciphers/symmetric-cipher-base.js +1 -1
  8. package/dist/cjs/tdf3/src/client/builders.js +17 -1
  9. package/dist/cjs/tdf3/src/client/index.js +4 -2
  10. package/dist/cjs/tdf3/src/crypto/core/symmetric.js +29 -12
  11. package/dist/cjs/tdf3/src/models/encryption-information.js +1 -1
  12. package/dist/cjs/tdf3/src/tdf.js +199 -53
  13. package/dist/cjs/tdf3/src/utils/zip-reader.js +2 -2
  14. package/dist/types/src/index.d.ts +14 -1
  15. package/dist/types/src/index.d.ts.map +1 -1
  16. package/dist/types/src/opentdf.d.ts +10 -0
  17. package/dist/types/src/opentdf.d.ts.map +1 -1
  18. package/dist/types/src/platform/authorization/entity-identifiers.d.ts +2 -1
  19. package/dist/types/src/platform/authorization/entity-identifiers.d.ts.map +1 -1
  20. package/dist/types/src/platform/authorization/resources.d.ts +37 -0
  21. package/dist/types/src/platform/authorization/resources.d.ts.map +1 -0
  22. package/dist/types/src/version.d.ts +1 -1
  23. package/dist/types/tdf3/src/ciphers/aes-gcm-cipher.d.ts +1 -1
  24. package/dist/types/tdf3/src/ciphers/aes-gcm-cipher.d.ts.map +1 -1
  25. package/dist/types/tdf3/src/ciphers/symmetric-cipher-base.d.ts +1 -1
  26. package/dist/types/tdf3/src/ciphers/symmetric-cipher-base.d.ts.map +1 -1
  27. package/dist/types/tdf3/src/client/builders.d.ts +14 -0
  28. package/dist/types/tdf3/src/client/builders.d.ts.map +1 -1
  29. package/dist/types/tdf3/src/client/index.d.ts +1 -1
  30. package/dist/types/tdf3/src/client/index.d.ts.map +1 -1
  31. package/dist/types/tdf3/src/crypto/core/symmetric.d.ts +1 -0
  32. package/dist/types/tdf3/src/crypto/core/symmetric.d.ts.map +1 -1
  33. package/dist/types/tdf3/src/models/encryption-information.d.ts +1 -1
  34. package/dist/types/tdf3/src/models/encryption-information.d.ts.map +1 -1
  35. package/dist/types/tdf3/src/tdf.d.ts +20 -0
  36. package/dist/types/tdf3/src/tdf.d.ts.map +1 -1
  37. package/dist/types/tdf3/src/utils/zip-reader.d.ts.map +1 -1
  38. package/dist/web/src/index.js +15 -2
  39. package/dist/web/src/opentdf.js +4 -2
  40. package/dist/web/src/platform/authorization/entity-identifiers.js +3 -2
  41. package/dist/web/src/platform/authorization/resources.js +53 -0
  42. package/dist/web/src/version.js +1 -1
  43. package/dist/web/tdf3/src/ciphers/aes-gcm-cipher.js +11 -7
  44. package/dist/web/tdf3/src/ciphers/symmetric-cipher-base.js +1 -1
  45. package/dist/web/tdf3/src/client/builders.js +17 -1
  46. package/dist/web/tdf3/src/client/index.js +4 -2
  47. package/dist/web/tdf3/src/crypto/core/symmetric.js +28 -12
  48. package/dist/web/tdf3/src/models/encryption-information.js +1 -1
  49. package/dist/web/tdf3/src/tdf.js +198 -53
  50. package/dist/web/tdf3/src/utils/zip-reader.js +2 -2
  51. package/package.json +2 -2
  52. package/src/index.ts +17 -6
  53. package/src/opentdf.ts +16 -0
  54. package/src/platform/authorization/entity-identifiers.ts +2 -1
  55. package/src/platform/authorization/resources.ts +59 -0
  56. package/src/version.ts +1 -1
  57. package/tdf3/src/ciphers/aes-gcm-cipher.ts +19 -14
  58. package/tdf3/src/ciphers/symmetric-cipher-base.ts +5 -1
  59. package/tdf3/src/client/builders.ts +20 -0
  60. package/tdf3/src/client/index.ts +4 -0
  61. package/tdf3/src/crypto/core/symmetric.ts +48 -14
  62. package/tdf3/src/models/encryption-information.ts +1 -1
  63. package/tdf3/src/tdf.ts +296 -79
  64. package/tdf3/src/utils/zip-reader.ts +1 -2
package/tdf3/src/tdf.ts CHANGED
@@ -71,6 +71,10 @@ import {
71
71
  const DEFAULT_SEGMENT_SIZE = 1024 * 1024;
72
72
 
73
73
  const HEX_SEMVER_VERSION = '4.2.2';
74
+ const LEGACY_SEGMENTS_PER_DOWNLOAD = 500;
75
+ const LEGACY_MAX_CONCURRENT_SEGMENT_BATCHES = 3;
76
+ const DEFAULT_BOUND_SEGMENT_BATCH_SIZE = LEGACY_SEGMENTS_PER_DOWNLOAD;
77
+ const DEFAULT_BOUND_MAX_CONCURRENT_SEGMENT_BATCHES = LEGACY_MAX_CONCURRENT_SEGMENT_BATCHES;
74
78
 
75
79
  /**
76
80
  * Configuration for TDF3
@@ -181,6 +185,8 @@ export type DecryptConfiguration = {
181
185
  assertionVerificationKeys?: AssertionVerificationKeys;
182
186
  noVerifyAssertions?: boolean;
183
187
  concurrencyLimit?: number;
188
+ segmentBatchSize?: number;
189
+ maxConcurrentSegmentBatches?: number;
184
190
  wrappingKeyAlgorithm?: KasPublicKeyAlgorithm;
185
191
  };
186
192
 
@@ -1005,7 +1011,7 @@ async function decryptChunk(
1005
1011
  }
1006
1012
 
1007
1013
  async function updateChunkQueue(
1008
- chunkMap: Chunk[],
1014
+ chunks: Chunk[],
1009
1015
  centralDirectory: CentralDirectory[],
1010
1016
  zipReader: ZipReader,
1011
1017
  reconstructedKey: SymmetricKey,
@@ -1014,51 +1020,235 @@ async function updateChunkQueue(
1014
1020
  cryptoService: CryptoService,
1015
1021
  specVersion: string
1016
1022
  ) {
1017
- const chunksInOneDownload = 500;
1018
1023
  let requests = [];
1019
- const maxLength = 3;
1020
1024
 
1021
- for (let i = 0; i < chunkMap.length; i += chunksInOneDownload) {
1022
- if (requests.length === maxLength) {
1025
+ for (let i = 0; i < chunks.length; i += LEGACY_SEGMENTS_PER_DOWNLOAD) {
1026
+ if (requests.length === LEGACY_MAX_CONCURRENT_SEGMENT_BATCHES) {
1023
1027
  await Promise.all(requests);
1024
1028
  requests = [];
1025
1029
  }
1026
1030
  requests.push(
1027
- (async () => {
1028
- let buffer: Uint8Array | null;
1031
+ fetchAndDecryptChunkSlice({
1032
+ centralDirectory,
1033
+ zipReader,
1034
+ reconstructedKey,
1035
+ cipher,
1036
+ segmentIntegrityAlgorithm,
1037
+ cryptoService,
1038
+ specVersion,
1039
+ slice: chunks.slice(i, i + LEGACY_SEGMENTS_PER_DOWNLOAD),
1040
+ }).catch(() => undefined)
1041
+ );
1042
+ }
1043
+ }
1029
1044
 
1030
- const slice = chunkMap.slice(i, i + chunksInOneDownload);
1031
- try {
1032
- const bufferSize = slice.reduce(
1033
- (currentVal, { encryptedSegmentSize }) => currentVal + (encryptedSegmentSize as number),
1034
- 0
1035
- );
1036
- buffer = await zipReader.getPayloadSegment(
1037
- centralDirectory,
1038
- '0.payload',
1039
- slice[0].encryptedOffset,
1040
- bufferSize
1041
- );
1042
- } catch (e) {
1043
- if (e instanceof InvalidFileError) {
1044
- throw e;
1045
- }
1046
- throw new NetworkError('unable to fetch payload segment', e);
1045
+ function rejectChunks(chunks: Chunk[], error: Error) {
1046
+ for (const chunk of chunks) {
1047
+ chunk.decryptedChunk.reject(error);
1048
+ }
1049
+ }
1050
+
1051
+ function asDecryptError(error: unknown, fallbackMessage: string): Error {
1052
+ if (error instanceof Error) {
1053
+ return error;
1054
+ }
1055
+ return new DecryptError(fallbackMessage, new Error(String(error)));
1056
+ }
1057
+
1058
+ async function fetchAndDecryptChunkSlice({
1059
+ centralDirectory,
1060
+ zipReader,
1061
+ reconstructedKey,
1062
+ cipher,
1063
+ segmentIntegrityAlgorithm,
1064
+ cryptoService,
1065
+ specVersion,
1066
+ slice,
1067
+ }: {
1068
+ centralDirectory: CentralDirectory[];
1069
+ zipReader: ZipReader;
1070
+ reconstructedKey: SymmetricKey;
1071
+ cipher: SymmetricCipher;
1072
+ segmentIntegrityAlgorithm: IntegrityAlgorithm;
1073
+ cryptoService: CryptoService;
1074
+ specVersion: string;
1075
+ slice: Chunk[];
1076
+ }) {
1077
+ const firstChunk = slice[0];
1078
+ let buffer!: Uint8Array;
1079
+ const bufferSize = slice.reduce(
1080
+ (currentVal, { encryptedSegmentSize }) => currentVal + (encryptedSegmentSize as number),
1081
+ 0
1082
+ );
1083
+ try {
1084
+ buffer = await zipReader.getPayloadSegment(
1085
+ centralDirectory,
1086
+ '0.payload',
1087
+ firstChunk.encryptedOffset,
1088
+ bufferSize
1089
+ );
1090
+ } catch (error) {
1091
+ const wrappedError =
1092
+ error instanceof InvalidFileError
1093
+ ? error
1094
+ : new NetworkError('unable to fetch payload segment', error);
1095
+ rejectChunks(slice, wrappedError);
1096
+ throw wrappedError;
1097
+ }
1098
+
1099
+ try {
1100
+ await sliceAndDecrypt({
1101
+ buffer,
1102
+ cryptoService,
1103
+ reconstructedKey,
1104
+ slice,
1105
+ cipher,
1106
+ segmentIntegrityAlgorithm,
1107
+ specVersion,
1108
+ });
1109
+ } catch (error) {
1110
+ const wrappedError = asDecryptError(error, 'failed to decrypt payload segment');
1111
+ rejectChunks(slice, wrappedError);
1112
+ throw wrappedError;
1113
+ }
1114
+ }
1115
+
1116
+ export type SegmentBatchSchedulerState = {
1117
+ consumedSegments: number;
1118
+ inFlightBatches: number;
1119
+ maxPrefetchedSegments: number;
1120
+ scheduledSegments: number;
1121
+ };
1122
+
1123
+ export type SegmentBatchScheduler = {
1124
+ fillWindow: () => void;
1125
+ markConsumed: (count?: number) => void;
1126
+ snapshot: () => SegmentBatchSchedulerState;
1127
+ };
1128
+
1129
+ export function createBoundedSegmentScheduler({
1130
+ totalSegments,
1131
+ segmentBatchSize,
1132
+ maxConcurrentSegmentBatches,
1133
+ onError,
1134
+ scheduleBatch,
1135
+ }: {
1136
+ totalSegments: number;
1137
+ segmentBatchSize: number;
1138
+ maxConcurrentSegmentBatches: number;
1139
+ onError?: (error: Error, startIndex: number, endIndex: number) => void;
1140
+ scheduleBatch: (startIndex: number, endIndex: number) => Promise<void>;
1141
+ }): SegmentBatchScheduler {
1142
+ const maxPrefetchedSegments = segmentBatchSize * maxConcurrentSegmentBatches;
1143
+ let consumedSegments = 0;
1144
+ let inFlightBatches = 0;
1145
+ let pumping = false;
1146
+ let scheduledSegments = 0;
1147
+ let stopped = false;
1148
+
1149
+ const pump = () => {
1150
+ if (pumping || stopped) {
1151
+ return;
1152
+ }
1153
+ pumping = true;
1154
+ try {
1155
+ while (
1156
+ !stopped &&
1157
+ inFlightBatches < maxConcurrentSegmentBatches &&
1158
+ scheduledSegments < totalSegments
1159
+ ) {
1160
+ const prefetchedSegments = scheduledSegments - consumedSegments;
1161
+ const remainingWindow = maxPrefetchedSegments - prefetchedSegments;
1162
+ if (remainingWindow <= 0) {
1163
+ break;
1047
1164
  }
1048
- if (buffer) {
1049
- sliceAndDecrypt({
1050
- buffer,
1051
- cryptoService,
1052
- reconstructedKey,
1053
- slice,
1054
- cipher,
1055
- segmentIntegrityAlgorithm,
1056
- specVersion,
1057
- });
1165
+
1166
+ const startIndex = scheduledSegments;
1167
+ const nextBatchSize = Math.min(segmentBatchSize, totalSegments - startIndex);
1168
+ if (remainingWindow < nextBatchSize) {
1169
+ break;
1058
1170
  }
1059
- })()
1060
- );
1171
+ const endIndex = startIndex + nextBatchSize;
1172
+ scheduledSegments = endIndex;
1173
+ inFlightBatches += 1;
1174
+
1175
+ void Promise.resolve()
1176
+ .then(() => scheduleBatch(startIndex, endIndex))
1177
+ .catch((error) => {
1178
+ stopped = true;
1179
+ onError?.(
1180
+ asDecryptError(error, 'failed to schedule segment batch'),
1181
+ startIndex,
1182
+ endIndex
1183
+ );
1184
+ })
1185
+ .finally(() => {
1186
+ inFlightBatches -= 1;
1187
+ pump();
1188
+ });
1189
+ }
1190
+ } finally {
1191
+ pumping = false;
1192
+ }
1193
+ };
1194
+
1195
+ return {
1196
+ fillWindow() {
1197
+ pump();
1198
+ },
1199
+ markConsumed(count = 1) {
1200
+ consumedSegments = Math.min(totalSegments, consumedSegments + count);
1201
+ pump();
1202
+ },
1203
+ snapshot() {
1204
+ return {
1205
+ consumedSegments,
1206
+ inFlightBatches,
1207
+ maxPrefetchedSegments,
1208
+ scheduledSegments,
1209
+ };
1210
+ },
1211
+ };
1212
+ }
1213
+
1214
+ function normalizeSegmentBatchSetting(
1215
+ value: number | undefined,
1216
+ defaultValue: number,
1217
+ name: 'segmentBatchSize' | 'maxConcurrentSegmentBatches'
1218
+ ) {
1219
+ const normalized = value ?? defaultValue;
1220
+ if (!Number.isInteger(normalized) || normalized < 1) {
1221
+ throw new ConfigurationError(`${name} must be a positive integer`);
1061
1222
  }
1223
+ return normalized;
1224
+ }
1225
+
1226
+ /**
1227
+ * Enables bounded scheduling only when at least one tuning knob is set.
1228
+ * If callers set only one knob, the other falls back to the legacy value so
1229
+ * throughput stays aligned with the pre-bounded path. Adjust both knobs together
1230
+ * when tuning for predictable memory and performance characteristics.
1231
+ */
1232
+ function getBoundedSegmentSchedulerOptions({
1233
+ segmentBatchSize,
1234
+ maxConcurrentSegmentBatches,
1235
+ }: Pick<DecryptConfiguration, 'segmentBatchSize' | 'maxConcurrentSegmentBatches'>) {
1236
+ if (segmentBatchSize === undefined && maxConcurrentSegmentBatches === undefined) {
1237
+ return undefined;
1238
+ }
1239
+
1240
+ return {
1241
+ segmentBatchSize: normalizeSegmentBatchSetting(
1242
+ segmentBatchSize,
1243
+ DEFAULT_BOUND_SEGMENT_BATCH_SIZE,
1244
+ 'segmentBatchSize'
1245
+ ),
1246
+ maxConcurrentSegmentBatches: normalizeSegmentBatchSetting(
1247
+ maxConcurrentSegmentBatches,
1248
+ DEFAULT_BOUND_MAX_CONCURRENT_SEGMENT_BATCHES,
1249
+ 'maxConcurrentSegmentBatches'
1250
+ ),
1251
+ };
1062
1252
  }
1063
1253
 
1064
1254
  export async function sliceAndDecrypt({
@@ -1079,13 +1269,12 @@ export async function sliceAndDecrypt({
1079
1269
  specVersion: string;
1080
1270
  }) {
1081
1271
  for (const index in slice) {
1082
- const { encryptedOffset, encryptedSegmentSize, plainSegmentSize } = slice[index];
1272
+ const chunk = slice[index];
1273
+ const { encryptedOffset, encryptedSegmentSize, plainSegmentSize } = chunk;
1083
1274
 
1084
1275
  const offset =
1085
1276
  slice[0].encryptedOffset === 0 ? encryptedOffset : encryptedOffset % slice[0].encryptedOffset;
1086
- const encryptedChunk = new Uint8Array(
1087
- buffer.slice(offset, offset + (encryptedSegmentSize as number))
1088
- );
1277
+ const encryptedChunk = buffer.subarray(offset, offset + (encryptedSegmentSize as number));
1089
1278
 
1090
1279
  if (encryptedChunk.length !== encryptedSegmentSize) {
1091
1280
  throw new DecryptError('Failed to fetch entire segment');
@@ -1095,7 +1284,7 @@ export async function sliceAndDecrypt({
1095
1284
  const result = await decryptChunk(
1096
1285
  encryptedChunk,
1097
1286
  reconstructedKey,
1098
- slice[index]['hash'],
1287
+ chunk.hash,
1099
1288
  cipher,
1100
1289
  segmentIntegrityAlgorithm,
1101
1290
  specVersion,
@@ -1106,9 +1295,9 @@ export async function sliceAndDecrypt({
1106
1295
  `incorrect segment size: found [${result.payload.length()}], expected [${plainSegmentSize}]`
1107
1296
  );
1108
1297
  }
1109
- slice[index].decryptedChunk.set(result);
1298
+ chunk.decryptedChunk.set(result);
1110
1299
  } catch (e) {
1111
- slice[index].decryptedChunk.reject(e);
1300
+ chunk.decryptedChunk.reject(e);
1112
1301
  }
1113
1302
  }
1114
1303
  }
@@ -1212,27 +1401,22 @@ export async function decryptStreamFrom(
1212
1401
  }
1213
1402
 
1214
1403
  let mapOfRequestsOffset = 0;
1215
- const chunkMap = new Map(
1216
- segments.map(
1217
- ({
1404
+ const chunks = segments.map(
1405
+ ({
1406
+ hash,
1407
+ encryptedSegmentSize = encryptedSegmentSizeDefault,
1408
+ segmentSize = segmentSizeDefault,
1409
+ }) => {
1410
+ const chunk: Chunk = {
1218
1411
  hash,
1219
- encryptedSegmentSize = encryptedSegmentSizeDefault,
1220
- segmentSize = segmentSizeDefault,
1221
- }) => {
1222
- const result = (() => {
1223
- const chunk: Chunk = {
1224
- hash,
1225
- encryptedOffset: mapOfRequestsOffset,
1226
- encryptedSegmentSize,
1227
- decryptedChunk: mailbox<DecryptResult>(),
1228
- plainSegmentSize: segmentSize,
1229
- };
1230
- return chunk;
1231
- })();
1232
- mapOfRequestsOffset += encryptedSegmentSize;
1233
- return [hash, result];
1234
- }
1235
- )
1412
+ encryptedOffset: mapOfRequestsOffset,
1413
+ encryptedSegmentSize,
1414
+ decryptedChunk: mailbox<DecryptResult>(),
1415
+ plainSegmentSize: segmentSize,
1416
+ };
1417
+ mapOfRequestsOffset += encryptedSegmentSize;
1418
+ return chunk;
1419
+ }
1236
1420
  );
1237
1421
 
1238
1422
  const cipher = new AesGcmCipher(cfg.cryptoService);
@@ -1241,33 +1425,66 @@ export async function decryptStreamFrom(
1241
1425
  throw new UnsupportedError(`Unsupported segment hash alg [${segmentIntegrityAlg}]`);
1242
1426
  }
1243
1427
 
1244
- // Not waiting for Promise to resolve
1245
- updateChunkQueue(
1246
- Array.from(chunkMap.values()),
1247
- centralDirectory,
1248
- zipReader,
1249
- keyForDecryption,
1250
- cipher,
1251
- segmentIntegrityAlg,
1252
- cfg.cryptoService,
1253
- specVersion
1254
- );
1428
+ const schedulerOptions = getBoundedSegmentSchedulerOptions(cfg);
1429
+ let scheduler: SegmentBatchScheduler | undefined;
1430
+ if (schedulerOptions) {
1431
+ scheduler = createBoundedSegmentScheduler({
1432
+ totalSegments: chunks.length,
1433
+ ...schedulerOptions,
1434
+ onError: (error, startIndex) => {
1435
+ rejectChunks(chunks.slice(startIndex), error);
1436
+ },
1437
+ scheduleBatch: async (startIndex, endIndex) =>
1438
+ fetchAndDecryptChunkSlice({
1439
+ centralDirectory,
1440
+ zipReader,
1441
+ reconstructedKey: keyForDecryption,
1442
+ cipher,
1443
+ segmentIntegrityAlgorithm: segmentIntegrityAlg,
1444
+ cryptoService: cfg.cryptoService,
1445
+ specVersion,
1446
+ slice: chunks.slice(startIndex, endIndex),
1447
+ }),
1448
+ });
1449
+ scheduler.fillWindow();
1450
+ } else {
1451
+ void updateChunkQueue(
1452
+ chunks,
1453
+ centralDirectory,
1454
+ zipReader,
1455
+ keyForDecryption,
1456
+ cipher,
1457
+ segmentIntegrityAlg,
1458
+ cfg.cryptoService,
1459
+ specVersion
1460
+ );
1461
+ }
1255
1462
 
1256
1463
  let progress = 0;
1464
+ let nextChunkIndex = 0;
1257
1465
  const underlyingSource = {
1258
1466
  pull: async (controller: ReadableStreamDefaultController) => {
1259
- if (chunkMap.size === 0) {
1467
+ if (nextChunkIndex >= chunks.length) {
1260
1468
  controller.close();
1261
1469
  return;
1262
1470
  }
1263
1471
 
1264
- const [hash, chunk] = chunkMap.entries().next().value;
1472
+ const chunk = chunks[nextChunkIndex];
1265
1473
  const decryptedSegment = await chunk.decryptedChunk;
1474
+ const encryptedSegmentSize = chunk.encryptedSegmentSize ?? 0;
1475
+ const plainChunk = new Uint8Array(decryptedSegment.payload.asArrayBuffer());
1266
1476
 
1267
- controller.enqueue(new Uint8Array(decryptedSegment.payload.asByteArray()));
1268
- progress += chunk.encryptedSegmentSize;
1477
+ controller.enqueue(plainChunk);
1478
+ progress += encryptedSegmentSize;
1269
1479
  cfg.progressHandler?.(progress);
1270
- chunkMap.delete(hash);
1480
+ // Release the resolved plaintext held by the consumed mailbox so long
1481
+ // browser decrypts do not retain every prior segment in memory.
1482
+ chunks[nextChunkIndex] = {
1483
+ ...chunk,
1484
+ decryptedChunk: mailbox<DecryptResult>(),
1485
+ };
1486
+ nextChunkIndex += 1;
1487
+ scheduler?.markConsumed();
1271
1488
  },
1272
1489
  ...(cfg.fileStreamServiceWorker && { fileStreamServiceWorker: cfg.fileStreamServiceWorker }),
1273
1490
  };
@@ -133,8 +133,7 @@ export class ZipReader {
133
133
  cdObj.relativeOffsetOfLocalHeader + cdObj.headerLength + encrpytedSegmentOffset;
134
134
  // TODO: what's the exact byte start?
135
135
  const byteEnd = byteStart + encryptedSegmentSize;
136
-
137
- return await this.getChunk(byteStart, byteEnd);
136
+ return this.getChunk(byteStart, byteEnd);
138
137
  }
139
138
 
140
139
  /**