dicom-curate 0.26.2 → 0.28.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -77997,7 +77997,7 @@ function isPrivateTag(tagId) {
77997
77997
  return false;
77998
77998
  }
77999
77999
  function convertKeywordToTagId(keyword) {
78000
- const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag || keyword;
78000
+ const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag ?? keyword;
78001
78001
  return tagId.replace(/[(),]/g, "").toLowerCase();
78002
78002
  }
78003
78003
  function convertKeywordPathToTagIdPath(keywordPath) {
@@ -78856,7 +78856,7 @@ function getCid7050Codes(options) {
78856
78856
  var import_lodash = __toESM(require_lodash(), 1);
78857
78857
  var nameMap = dcmjs2.data.DicomMetaDictionary.nameMap;
78858
78858
  function getVr(keyword) {
78859
- const element = nameMap[keyword] || nameMap[`RETIRED_${keyword}`];
78859
+ const element = nameMap[keyword] ?? nameMap[`RETIRED_${keyword}`];
78860
78860
  return element?.vr;
78861
78861
  }
78862
78862
  function temporalVr(vr) {
@@ -78928,7 +78928,7 @@ function deidentifyPS315E({
78928
78928
  }
78929
78929
  }
78930
78930
  }
78931
- return current2[tagName] || null;
78931
+ return current2[tagName] ?? null;
78932
78932
  }
78933
78933
  const {
78934
78934
  cleanDescriptorsOption,
@@ -79688,17 +79688,20 @@ async function fetchWithRetry(...args) {
79688
79688
  // src/hash.ts
79689
79689
  var import_md5 = __toESM(require_md5(), 1);
79690
79690
  var import_js_crc = __toESM(require_crc(), 1);
79691
- async function hash(buffer, hashMethod) {
79691
+ var DEFAULT_HASH_PART_SIZE = 5 * 1024 * 1024;
79692
+ async function hash(buffer, hashMethod, hashPartSize) {
79692
79693
  switch (hashMethod) {
79693
79694
  case "sha256":
79694
79695
  return await sha256Hex(buffer);
79695
79696
  case "crc32":
79696
79697
  return crc32Hex(buffer);
79697
- case "md5":
79698
- return md5Hex(buffer);
79699
79698
  case "crc64":
79700
- default:
79701
79699
  return crc64Hex(buffer);
79700
+ case "aws-s3-etag-2025":
79701
+ return awsS3Etag(buffer, hashPartSize ?? DEFAULT_HASH_PART_SIZE);
79702
+ case "md5":
79703
+ default:
79704
+ return md5Hex(buffer);
79702
79705
  }
79703
79706
  }
79704
79707
  async function sha256Hex(buffer) {
@@ -79709,6 +79712,27 @@ async function sha256Hex(buffer) {
79709
79712
  function md5Hex(buffer) {
79710
79713
  return (0, import_md5.default)(new Uint8Array(buffer));
79711
79714
  }
79715
+ function awsS3Etag(buffer, partSize) {
79716
+ if (buffer.byteLength <= partSize) {
79717
+ return md5Hex(buffer);
79718
+ }
79719
+ return multipartMd5(buffer, partSize);
79720
+ }
79721
+ function multipartMd5(buffer, partSize) {
79722
+ const totalSize = buffer.byteLength;
79723
+ const partCount = Math.ceil(totalSize / partSize);
79724
+ const rawDigests = new Uint8Array(partCount * 16);
79725
+ for (let i4 = 0; i4 < partCount; i4++) {
79726
+ const start = i4 * partSize;
79727
+ const end = Math.min(start + partSize, totalSize);
79728
+ const partBuffer = buffer.slice(start, end);
79729
+ const hex = (0, import_md5.default)(new Uint8Array(partBuffer));
79730
+ for (let j4 = 0; j4 < 16; j4++) {
79731
+ rawDigests[i4 * 16 + j4] = parseInt(hex.slice(j4 * 2, j4 * 2 + 2), 16);
79732
+ }
79733
+ }
79734
+ return `${(0, import_md5.default)(rawDigests)}-${partCount}`;
79735
+ }
79712
79736
  function crc32Hex(input) {
79713
79737
  let bytes;
79714
79738
  if (input instanceof Uint8Array) {
@@ -79772,7 +79796,7 @@ async function loadS3Client() {
79772
79796
  const { createRequire } = await import("module");
79773
79797
  const req = createRequire(import.meta.url);
79774
79798
  const mod = req("@aws-sdk/client-s3");
79775
- cachedS3Client = mod?.default || mod;
79799
+ cachedS3Client = mod?.default ?? mod;
79776
79800
  } else {
79777
79801
  cachedS3Client = await Promise.resolve().then(() => __toESM(require_dist_cjs71(), 1));
79778
79802
  }
@@ -79785,6 +79809,7 @@ async function curateOne({
79785
79809
  outputTarget,
79786
79810
  mappingOptions,
79787
79811
  hashMethod,
79812
+ hashPartSize,
79788
79813
  previousSourceFileInfo,
79789
79814
  previousMappedFileInfo
79790
79815
  }) {
@@ -79811,7 +79836,7 @@ async function curateOne({
79811
79836
  );
79812
79837
  }
79813
79838
  file = await resp.blob();
79814
- const lastModifiedHeader = resp.headers.get("last-modified") || void 0;
79839
+ const lastModifiedHeader = resp.headers.get("last-modified");
79815
79840
  if (lastModifiedHeader) {
79816
79841
  mtime = new Date(lastModifiedHeader).toISOString();
79817
79842
  }
@@ -79877,14 +79902,18 @@ async function curateOne({
79877
79902
  } catch (e4) {
79878
79903
  }
79879
79904
  }
79880
- const fileArrayBuffer = await file.arrayBuffer();
79905
+ let fileArrayBuffer = await file.arrayBuffer();
79881
79906
  let preMappedHash;
79882
79907
  let postMappedHash;
79883
79908
  const postMappedHashHeader = "x-source-file-hash";
79884
79909
  let canSkip = false;
79885
79910
  if (previousSourceFileInfo?.preMappedHash !== void 0) {
79886
79911
  try {
79887
- preMappedHash = await hash(fileArrayBuffer, hashMethod || "crc64");
79912
+ preMappedHash = await hash(
79913
+ fileArrayBuffer,
79914
+ hashMethod ?? "md5",
79915
+ hashPartSize
79916
+ );
79888
79917
  } catch (e4) {
79889
79918
  console.warn(`Failed to compute preMappedHash for ${fileInfo.name}`, e4);
79890
79919
  }
@@ -79989,7 +80018,11 @@ async function curateOne({
79989
80018
  }
79990
80019
  if (!preMappedHash) {
79991
80020
  try {
79992
- preMappedHash = await hash(fileArrayBuffer, hashMethod || "crc64");
80021
+ preMappedHash = await hash(
80022
+ fileArrayBuffer,
80023
+ hashMethod ?? "md5",
80024
+ hashPartSize
80025
+ );
79993
80026
  } catch (e4) {
79994
80027
  console.warn(`Failed to compute preMappedHash for ${fileInfo.name}`, e4);
79995
80028
  }
@@ -80000,7 +80033,12 @@ async function curateOne({
80000
80033
  const modifiedArrayBuffer = mappedDicomData.write({
80001
80034
  allowInvalidVRLength: true
80002
80035
  });
80003
- postMappedHash = await hash(modifiedArrayBuffer, hashMethod || "crc64");
80036
+ postMappedHash = await hash(
80037
+ modifiedArrayBuffer,
80038
+ hashMethod ?? "md5",
80039
+ hashPartSize
80040
+ );
80041
+ fileArrayBuffer = null;
80004
80042
  const previousPostMappedHash = previousMappedFileInfo ? previousMappedFileInfo(clonedMapResults.outputFilePath)?.postMappedHash : void 0;
80005
80043
  if (previousPostMappedHash !== void 0 && previousPostMappedHash === postMappedHash) {
80006
80044
  return noMapResult(clonedMapResults.outputFilePath);
@@ -80032,26 +80070,23 @@ async function curateOne({
80032
80070
  }
80033
80071
  const fullFilePath = path.join(fullDirPath, fileName);
80034
80072
  await fs.writeFile(fullFilePath, new DataView(modifiedArrayBuffer));
80035
- } else {
80073
+ } else if (!outputTarget?.http && !outputTarget?.s3) {
80036
80074
  clonedMapResults.mappedBlob = new Blob([modifiedArrayBuffer], {
80037
80075
  type: "application/octet-stream"
80038
80076
  });
80039
80077
  }
80040
- clonedMapResults.mappedBlob = new Blob([modifiedArrayBuffer], {
80041
- type: "application/octet-stream"
80042
- });
80043
80078
  if (outputTarget?.http) {
80044
80079
  try {
80045
80080
  const key = clonedMapResults.outputFilePath.split("/").map(encodeURIComponent).join("/");
80046
80081
  const uploadUrl = `${outputTarget.http.url}/${key}`;
80047
80082
  const headers = {
80048
- "Content-Type": clonedMapResults.mappedBlob.type || "application/octet-stream",
80083
+ "Content-Type": "application/octet-stream",
80049
80084
  "X-File-Name": fileName,
80050
- "X-File-Type": clonedMapResults.mappedBlob.type || "application/octet-stream",
80085
+ "X-File-Type": "application/octet-stream",
80051
80086
  "X-File-Size": String(modifiedArrayBuffer.byteLength),
80052
- "X-Source-File-Size": String(clonedMapResults.fileInfo?.size || ""),
80053
- "X-Source-File-Modified-Time": mtime || "",
80054
- "X-Source-File-Hash": preMappedHash || ""
80087
+ "X-Source-File-Size": String(clonedMapResults.fileInfo?.size ?? ""),
80088
+ "X-Source-File-Modified-Time": mtime ?? "",
80089
+ "X-Source-File-Hash": preMappedHash ?? ""
80055
80090
  };
80056
80091
  if (outputTarget.http.headers) {
80057
80092
  Object.assign(headers, outputTarget.http.headers);
@@ -80061,25 +80096,27 @@ async function curateOne({
80061
80096
  const resp = await fetchWithRetry(uploadUrl, {
80062
80097
  method: "PUT",
80063
80098
  headers,
80064
- body: clonedMapResults.mappedBlob
80099
+ body: new Blob([modifiedArrayBuffer], {
80100
+ type: "application/octet-stream"
80101
+ })
80065
80102
  });
80066
80103
  if (!resp.ok) {
80067
80104
  console.error(
80068
80105
  `Upload failed for ${uploadUrl}: ${resp.status} ${resp.statusText}`
80069
80106
  );
80070
- clonedMapResults.errors = clonedMapResults.errors || [];
80107
+ clonedMapResults.errors = clonedMapResults.errors ?? [];
80071
80108
  clonedMapResults.errors.push(
80072
80109
  `Upload failed: ${resp.status} ${resp.statusText}`
80073
80110
  );
80074
80111
  } else {
80075
- clonedMapResults.outputUpload = clonedMapResults.outputUpload || {
80112
+ clonedMapResults.outputUpload = clonedMapResults.outputUpload ?? {
80076
80113
  url: uploadUrl,
80077
80114
  status: resp.status
80078
80115
  };
80079
80116
  }
80080
80117
  } catch (e4) {
80081
80118
  console.error("Upload error", e4);
80082
- clonedMapResults.errors = clonedMapResults.errors || [];
80119
+ clonedMapResults.errors = clonedMapResults.errors ?? [];
80083
80120
  clonedMapResults.errors.push(
80084
80121
  `Upload error: ${e4 instanceof Error ? e4.message : String(e4)}`
80085
80122
  );
@@ -80099,12 +80136,14 @@ async function curateOne({
80099
80136
  new s32.PutObjectCommand({
80100
80137
  Bucket: outputTarget.s3.bucketName,
80101
80138
  Key: key,
80102
- Body: await clonedMapResults.mappedBlob.arrayBuffer(),
80103
- ContentType: clonedMapResults.mappedBlob.type || "application/octet-stream",
80139
+ // Use the ArrayBuffer directly — going through Blob.arrayBuffer()
80140
+ // would create yet another copy of the data in memory.
80141
+ Body: new Uint8Array(modifiedArrayBuffer),
80142
+ ContentType: "application/octet-stream",
80104
80143
  Metadata: {
80105
- "source-file-size": String(clonedMapResults.fileInfo?.size || ""),
80106
- "source-file-modified-time": mtime || "",
80107
- "source-file-hash": preMappedHash || "",
80144
+ "source-file-size": String(clonedMapResults.fileInfo?.size ?? ""),
80145
+ "source-file-modified-time": mtime ?? "",
80146
+ "source-file-hash": preMappedHash ?? "",
80108
80147
  ...postMappedHash ? { "source-file-post-mapped-hash": postMappedHash } : {}
80109
80148
  }
80110
80149
  })
@@ -80116,7 +80155,7 @@ async function curateOne({
80116
80155
  };
80117
80156
  } catch (e4) {
80118
80157
  console.error("S3 Upload error", e4);
80119
- clonedMapResults.errors = clonedMapResults.errors || [];
80158
+ clonedMapResults.errors = clonedMapResults.errors ?? [];
80120
80159
  clonedMapResults.errors.push(
80121
80160
  `S3 Upload error: ${e4 instanceof Error ? e4.message : String(e4)}`
80122
80161
  );
@@ -85830,6 +85869,7 @@ fixupNodeWorkerEnvironment().then(() => {
85830
85869
  fileInfo,
85831
85870
  outputTarget: event.data.outputTarget ?? {},
85832
85871
  hashMethod: event.data.hashMethod,
85872
+ hashPartSize: event.data.hashPartSize,
85833
85873
  mappingOptions,
85834
85874
  previousSourceFileInfo: event.data.previousFileInfo,
85835
85875
  previousMappedFileInfo: (targetName) => {
@@ -34005,7 +34005,7 @@ function isPrivateTag(tagId) {
34005
34005
  return false;
34006
34006
  }
34007
34007
  function convertKeywordToTagId(keyword) {
34008
- const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag || keyword;
34008
+ const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag ?? keyword;
34009
34009
  return tagId.replace(/[(),]/g, "").toLowerCase();
34010
34010
  }
34011
34011
 
@@ -34850,7 +34850,7 @@ function getCid7050Codes(options) {
34850
34850
  var import_lodash = __toESM(require_lodash(), 1);
34851
34851
  var nameMap = dcmjs2.data.DicomMetaDictionary.nameMap;
34852
34852
  function getVr(keyword) {
34853
- const element = nameMap[keyword] || nameMap[`RETIRED_${keyword}`];
34853
+ const element = nameMap[keyword] ?? nameMap[`RETIRED_${keyword}`];
34854
34854
  return element?.vr;
34855
34855
  }
34856
34856
  function temporalVr(vr) {
@@ -34922,7 +34922,7 @@ function deidentifyPS315E({
34922
34922
  }
34923
34923
  }
34924
34924
  }
34925
- return current[tagName] || null;
34925
+ return current[tagName] ?? null;
34926
34926
  }
34927
34927
  const {
34928
34928
  cleanDescriptorsOption,
@@ -17966,7 +17966,7 @@ function isPrivateTag(tagId) {
17966
17966
  return false;
17967
17967
  }
17968
17968
  function convertKeywordToTagId(keyword) {
17969
- const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag || keyword;
17969
+ const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag ?? keyword;
17970
17970
  return tagId.replace(/[(),]/g, "").toLowerCase();
17971
17971
  }
17972
17972
  function convertKeywordPathToTagIdPath(keywordPath) {
@@ -97,7 +97,7 @@ function composedSpec() {
97
97
  ctxIn.hostProps.activityProviderName,
98
98
  ctxIn.centerSubjectId(parser),
99
99
  ctxIn.timepointName(parser),
100
- ctxIn.scanName(parser) + "=" + parser.getDicom("SeriesNumber") || "UNKNOWN",
100
+ ctxIn.scanName(parser) + "=" + (parser.getDicom("SeriesNumber") ?? "UNKNOWN"),
101
101
  parser.getFilePathComp(parser.FILEBASENAME) + ".dcm"
102
102
  ];
103
103
  },
@@ -34008,7 +34008,7 @@ function isPrivateTag(tagId) {
34008
34008
  return false;
34009
34009
  }
34010
34010
  function convertKeywordToTagId(keyword) {
34011
- const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag || keyword;
34011
+ const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag ?? keyword;
34012
34012
  return tagId.replace(/[(),]/g, "").toLowerCase();
34013
34013
  }
34014
34014
  function convertKeywordPathToTagIdPath(keywordPath) {
@@ -34867,7 +34867,7 @@ function getCid7050Codes(options) {
34867
34867
  var import_lodash = __toESM(require_lodash(), 1);
34868
34868
  var nameMap = dcmjs2.data.DicomMetaDictionary.nameMap;
34869
34869
  function getVr(keyword) {
34870
- const element = nameMap[keyword] || nameMap[`RETIRED_${keyword}`];
34870
+ const element = nameMap[keyword] ?? nameMap[`RETIRED_${keyword}`];
34871
34871
  return element?.vr;
34872
34872
  }
34873
34873
  function temporalVr(vr) {
@@ -34939,7 +34939,7 @@ function deidentifyPS315E({
34939
34939
  }
34940
34940
  }
34941
34941
  }
34942
- return current[tagName] || null;
34942
+ return current[tagName] ?? null;
34943
34943
  }
34944
34944
  const {
34945
34945
  cleanDescriptorsOption,
@@ -71706,7 +71706,7 @@ function isPrivateTag(tagId) {
71706
71706
  return false;
71707
71707
  }
71708
71708
  function convertKeywordToTagId(keyword) {
71709
- const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag || keyword;
71709
+ const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag ?? keyword;
71710
71710
  return tagId.replace(/[(),]/g, "").toLowerCase();
71711
71711
  }
71712
71712
  function convertKeywordPathToTagIdPath(keywordPath) {
@@ -72565,7 +72565,7 @@ function getCid7050Codes(options) {
72565
72565
  var import_lodash = __toESM(require_lodash(), 1);
72566
72566
  var nameMap = dcmjs2.data.DicomMetaDictionary.nameMap;
72567
72567
  function getVr(keyword) {
72568
- const element = nameMap[keyword] || nameMap[`RETIRED_${keyword}`];
72568
+ const element = nameMap[keyword] ?? nameMap[`RETIRED_${keyword}`];
72569
72569
  return element?.vr;
72570
72570
  }
72571
72571
  function temporalVr(vr) {
@@ -72637,7 +72637,7 @@ function deidentifyPS315E({
72637
72637
  }
72638
72638
  }
72639
72639
  }
72640
- return current[tagName] || null;
72640
+ return current[tagName] ?? null;
72641
72641
  }
72642
72642
  const {
72643
72643
  cleanDescriptorsOption,
@@ -73397,17 +73397,20 @@ async function fetchWithRetry(...args) {
73397
73397
  // src/hash.ts
73398
73398
  var import_md5 = __toESM(require_md5(), 1);
73399
73399
  var import_js_crc = __toESM(require_crc(), 1);
73400
- async function hash(buffer, hashMethod) {
73400
+ var DEFAULT_HASH_PART_SIZE = 5 * 1024 * 1024;
73401
+ async function hash(buffer, hashMethod, hashPartSize) {
73401
73402
  switch (hashMethod) {
73402
73403
  case "sha256":
73403
73404
  return await sha256Hex(buffer);
73404
73405
  case "crc32":
73405
73406
  return crc32Hex(buffer);
73406
- case "md5":
73407
- return md5Hex(buffer);
73408
73407
  case "crc64":
73409
- default:
73410
73408
  return crc64Hex(buffer);
73409
+ case "aws-s3-etag-2025":
73410
+ return awsS3Etag(buffer, hashPartSize ?? DEFAULT_HASH_PART_SIZE);
73411
+ case "md5":
73412
+ default:
73413
+ return md5Hex(buffer);
73411
73414
  }
73412
73415
  }
73413
73416
  async function sha256Hex(buffer) {
@@ -73418,6 +73421,27 @@ async function sha256Hex(buffer) {
73418
73421
  function md5Hex(buffer) {
73419
73422
  return (0, import_md5.default)(new Uint8Array(buffer));
73420
73423
  }
73424
+ function awsS3Etag(buffer, partSize) {
73425
+ if (buffer.byteLength <= partSize) {
73426
+ return md5Hex(buffer);
73427
+ }
73428
+ return multipartMd5(buffer, partSize);
73429
+ }
73430
+ function multipartMd5(buffer, partSize) {
73431
+ const totalSize = buffer.byteLength;
73432
+ const partCount = Math.ceil(totalSize / partSize);
73433
+ const rawDigests = new Uint8Array(partCount * 16);
73434
+ for (let i4 = 0; i4 < partCount; i4++) {
73435
+ const start = i4 * partSize;
73436
+ const end = Math.min(start + partSize, totalSize);
73437
+ const partBuffer = buffer.slice(start, end);
73438
+ const hex = (0, import_md5.default)(new Uint8Array(partBuffer));
73439
+ for (let j4 = 0; j4 < 16; j4++) {
73440
+ rawDigests[i4 * 16 + j4] = parseInt(hex.slice(j4 * 2, j4 * 2 + 2), 16);
73441
+ }
73442
+ }
73443
+ return `${(0, import_md5.default)(rawDigests)}-${partCount}`;
73444
+ }
73421
73445
  function crc32Hex(input) {
73422
73446
  let bytes;
73423
73447
  if (input instanceof Uint8Array) {
@@ -73481,7 +73505,7 @@ async function loadS3Client() {
73481
73505
  const { createRequire } = await import("module");
73482
73506
  const req = createRequire(import.meta.url);
73483
73507
  const mod = req("@aws-sdk/client-s3");
73484
- cachedS3Client = mod?.default || mod;
73508
+ cachedS3Client = mod?.default ?? mod;
73485
73509
  } else {
73486
73510
  cachedS3Client = await Promise.resolve().then(() => __toESM(require_dist_cjs71(), 1));
73487
73511
  }
@@ -73494,6 +73518,7 @@ async function curateOne({
73494
73518
  outputTarget,
73495
73519
  mappingOptions,
73496
73520
  hashMethod,
73521
+ hashPartSize,
73497
73522
  previousSourceFileInfo,
73498
73523
  previousMappedFileInfo
73499
73524
  }) {
@@ -73520,7 +73545,7 @@ async function curateOne({
73520
73545
  );
73521
73546
  }
73522
73547
  file = await resp.blob();
73523
- const lastModifiedHeader = resp.headers.get("last-modified") || void 0;
73548
+ const lastModifiedHeader = resp.headers.get("last-modified");
73524
73549
  if (lastModifiedHeader) {
73525
73550
  mtime = new Date(lastModifiedHeader).toISOString();
73526
73551
  }
@@ -73586,14 +73611,18 @@ async function curateOne({
73586
73611
  } catch (e4) {
73587
73612
  }
73588
73613
  }
73589
- const fileArrayBuffer = await file.arrayBuffer();
73614
+ let fileArrayBuffer = await file.arrayBuffer();
73590
73615
  let preMappedHash;
73591
73616
  let postMappedHash;
73592
73617
  const postMappedHashHeader = "x-source-file-hash";
73593
73618
  let canSkip = false;
73594
73619
  if (previousSourceFileInfo?.preMappedHash !== void 0) {
73595
73620
  try {
73596
- preMappedHash = await hash(fileArrayBuffer, hashMethod || "crc64");
73621
+ preMappedHash = await hash(
73622
+ fileArrayBuffer,
73623
+ hashMethod ?? "md5",
73624
+ hashPartSize
73625
+ );
73597
73626
  } catch (e4) {
73598
73627
  console.warn(`Failed to compute preMappedHash for ${fileInfo.name}`, e4);
73599
73628
  }
@@ -73698,7 +73727,11 @@ async function curateOne({
73698
73727
  }
73699
73728
  if (!preMappedHash) {
73700
73729
  try {
73701
- preMappedHash = await hash(fileArrayBuffer, hashMethod || "crc64");
73730
+ preMappedHash = await hash(
73731
+ fileArrayBuffer,
73732
+ hashMethod ?? "md5",
73733
+ hashPartSize
73734
+ );
73702
73735
  } catch (e4) {
73703
73736
  console.warn(`Failed to compute preMappedHash for ${fileInfo.name}`, e4);
73704
73737
  }
@@ -73709,7 +73742,12 @@ async function curateOne({
73709
73742
  const modifiedArrayBuffer = mappedDicomData.write({
73710
73743
  allowInvalidVRLength: true
73711
73744
  });
73712
- postMappedHash = await hash(modifiedArrayBuffer, hashMethod || "crc64");
73745
+ postMappedHash = await hash(
73746
+ modifiedArrayBuffer,
73747
+ hashMethod ?? "md5",
73748
+ hashPartSize
73749
+ );
73750
+ fileArrayBuffer = null;
73713
73751
  const previousPostMappedHash = previousMappedFileInfo ? previousMappedFileInfo(clonedMapResults.outputFilePath)?.postMappedHash : void 0;
73714
73752
  if (previousPostMappedHash !== void 0 && previousPostMappedHash === postMappedHash) {
73715
73753
  return noMapResult(clonedMapResults.outputFilePath);
@@ -73741,26 +73779,23 @@ async function curateOne({
73741
73779
  }
73742
73780
  const fullFilePath = path.join(fullDirPath, fileName);
73743
73781
  await fs.writeFile(fullFilePath, new DataView(modifiedArrayBuffer));
73744
- } else {
73782
+ } else if (!outputTarget?.http && !outputTarget?.s3) {
73745
73783
  clonedMapResults.mappedBlob = new Blob([modifiedArrayBuffer], {
73746
73784
  type: "application/octet-stream"
73747
73785
  });
73748
73786
  }
73749
- clonedMapResults.mappedBlob = new Blob([modifiedArrayBuffer], {
73750
- type: "application/octet-stream"
73751
- });
73752
73787
  if (outputTarget?.http) {
73753
73788
  try {
73754
73789
  const key = clonedMapResults.outputFilePath.split("/").map(encodeURIComponent).join("/");
73755
73790
  const uploadUrl = `${outputTarget.http.url}/${key}`;
73756
73791
  const headers = {
73757
- "Content-Type": clonedMapResults.mappedBlob.type || "application/octet-stream",
73792
+ "Content-Type": "application/octet-stream",
73758
73793
  "X-File-Name": fileName,
73759
- "X-File-Type": clonedMapResults.mappedBlob.type || "application/octet-stream",
73794
+ "X-File-Type": "application/octet-stream",
73760
73795
  "X-File-Size": String(modifiedArrayBuffer.byteLength),
73761
- "X-Source-File-Size": String(clonedMapResults.fileInfo?.size || ""),
73762
- "X-Source-File-Modified-Time": mtime || "",
73763
- "X-Source-File-Hash": preMappedHash || ""
73796
+ "X-Source-File-Size": String(clonedMapResults.fileInfo?.size ?? ""),
73797
+ "X-Source-File-Modified-Time": mtime ?? "",
73798
+ "X-Source-File-Hash": preMappedHash ?? ""
73764
73799
  };
73765
73800
  if (outputTarget.http.headers) {
73766
73801
  Object.assign(headers, outputTarget.http.headers);
@@ -73770,25 +73805,27 @@ async function curateOne({
73770
73805
  const resp = await fetchWithRetry(uploadUrl, {
73771
73806
  method: "PUT",
73772
73807
  headers,
73773
- body: clonedMapResults.mappedBlob
73808
+ body: new Blob([modifiedArrayBuffer], {
73809
+ type: "application/octet-stream"
73810
+ })
73774
73811
  });
73775
73812
  if (!resp.ok) {
73776
73813
  console.error(
73777
73814
  `Upload failed for ${uploadUrl}: ${resp.status} ${resp.statusText}`
73778
73815
  );
73779
- clonedMapResults.errors = clonedMapResults.errors || [];
73816
+ clonedMapResults.errors = clonedMapResults.errors ?? [];
73780
73817
  clonedMapResults.errors.push(
73781
73818
  `Upload failed: ${resp.status} ${resp.statusText}`
73782
73819
  );
73783
73820
  } else {
73784
- clonedMapResults.outputUpload = clonedMapResults.outputUpload || {
73821
+ clonedMapResults.outputUpload = clonedMapResults.outputUpload ?? {
73785
73822
  url: uploadUrl,
73786
73823
  status: resp.status
73787
73824
  };
73788
73825
  }
73789
73826
  } catch (e4) {
73790
73827
  console.error("Upload error", e4);
73791
- clonedMapResults.errors = clonedMapResults.errors || [];
73828
+ clonedMapResults.errors = clonedMapResults.errors ?? [];
73792
73829
  clonedMapResults.errors.push(
73793
73830
  `Upload error: ${e4 instanceof Error ? e4.message : String(e4)}`
73794
73831
  );
@@ -73808,12 +73845,14 @@ async function curateOne({
73808
73845
  new s32.PutObjectCommand({
73809
73846
  Bucket: outputTarget.s3.bucketName,
73810
73847
  Key: key,
73811
- Body: await clonedMapResults.mappedBlob.arrayBuffer(),
73812
- ContentType: clonedMapResults.mappedBlob.type || "application/octet-stream",
73848
+ // Use the ArrayBuffer directly — going through Blob.arrayBuffer()
73849
+ // would create yet another copy of the data in memory.
73850
+ Body: new Uint8Array(modifiedArrayBuffer),
73851
+ ContentType: "application/octet-stream",
73813
73852
  Metadata: {
73814
- "source-file-size": String(clonedMapResults.fileInfo?.size || ""),
73815
- "source-file-modified-time": mtime || "",
73816
- "source-file-hash": preMappedHash || "",
73853
+ "source-file-size": String(clonedMapResults.fileInfo?.size ?? ""),
73854
+ "source-file-modified-time": mtime ?? "",
73855
+ "source-file-hash": preMappedHash ?? "",
73817
73856
  ...postMappedHash ? { "source-file-post-mapped-hash": postMappedHash } : {}
73818
73857
  }
73819
73858
  })
@@ -73825,7 +73864,7 @@ async function curateOne({
73825
73864
  };
73826
73865
  } catch (e4) {
73827
73866
  console.error("S3 Upload error", e4);
73828
- clonedMapResults.errors = clonedMapResults.errors || [];
73867
+ clonedMapResults.errors = clonedMapResults.errors ?? [];
73829
73868
  clonedMapResults.errors.push(
73830
73869
  `S3 Upload error: ${e4 instanceof Error ? e4.message : String(e4)}`
73831
73870
  );
@@ -33999,7 +33999,7 @@ function isPrivateTag(tagId) {
33999
33999
  return false;
34000
34000
  }
34001
34001
  function convertKeywordToTagId(keyword) {
34002
- const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag || keyword;
34002
+ const tagId = isPrivateTag(keyword) ? keyword : dcmjs.data.DicomMetaDictionary.nameMap[keyword]?.tag ?? keyword;
34003
34003
  return tagId.replace(/[(),]/g, "").toLowerCase();
34004
34004
  }
34005
34005
 
@@ -34844,7 +34844,7 @@ function getCid7050Codes(options) {
34844
34844
  var import_lodash = __toESM(require_lodash(), 1);
34845
34845
  var nameMap = dcmjs2.data.DicomMetaDictionary.nameMap;
34846
34846
  function getVr(keyword) {
34847
- const element = nameMap[keyword] || nameMap[`RETIRED_${keyword}`];
34847
+ const element = nameMap[keyword] ?? nameMap[`RETIRED_${keyword}`];
34848
34848
  return element?.vr;
34849
34849
  }
34850
34850
  function temporalVr(vr) {
@@ -34916,7 +34916,7 @@ function deidentifyPS315E({
34916
34916
  }
34917
34917
  }
34918
34918
  }
34919
- return current[tagName] || null;
34919
+ return current[tagName] ?? null;
34920
34920
  }
34921
34921
  const {
34922
34922
  cleanDescriptorsOption,
package/dist/esm/hash.js CHANGED
@@ -640,17 +640,20 @@ var require_crc = __commonJS({
640
640
  // src/hash.ts
641
641
  var import_md5 = __toESM(require_md5(), 1);
642
642
  var import_js_crc = __toESM(require_crc(), 1);
643
- async function hash(buffer, hashMethod) {
643
+ var DEFAULT_HASH_PART_SIZE = 5 * 1024 * 1024;
644
+ async function hash(buffer, hashMethod, hashPartSize) {
644
645
  switch (hashMethod) {
645
646
  case "sha256":
646
647
  return await sha256Hex(buffer);
647
648
  case "crc32":
648
649
  return crc32Hex(buffer);
649
- case "md5":
650
- return md5Hex(buffer);
651
650
  case "crc64":
652
- default:
653
651
  return crc64Hex(buffer);
652
+ case "aws-s3-etag-2025":
653
+ return awsS3Etag(buffer, hashPartSize ?? DEFAULT_HASH_PART_SIZE);
654
+ case "md5":
655
+ default:
656
+ return md5Hex(buffer);
654
657
  }
655
658
  }
656
659
  async function sha256Hex(buffer) {
@@ -661,6 +664,27 @@ async function sha256Hex(buffer) {
661
664
  function md5Hex(buffer) {
662
665
  return (0, import_md5.default)(new Uint8Array(buffer));
663
666
  }
667
+ function awsS3Etag(buffer, partSize) {
668
+ if (buffer.byteLength <= partSize) {
669
+ return md5Hex(buffer);
670
+ }
671
+ return multipartMd5(buffer, partSize);
672
+ }
673
+ function multipartMd5(buffer, partSize) {
674
+ const totalSize = buffer.byteLength;
675
+ const partCount = Math.ceil(totalSize / partSize);
676
+ const rawDigests = new Uint8Array(partCount * 16);
677
+ for (let i = 0; i < partCount; i++) {
678
+ const start = i * partSize;
679
+ const end = Math.min(start + partSize, totalSize);
680
+ const partBuffer = buffer.slice(start, end);
681
+ const hex = (0, import_md5.default)(new Uint8Array(partBuffer));
682
+ for (let j = 0; j < 16; j++) {
683
+ rawDigests[i * 16 + j] = parseInt(hex.slice(j * 2, j * 2 + 2), 16);
684
+ }
685
+ }
686
+ return `${(0, import_md5.default)(rawDigests)}-${partCount}`;
687
+ }
664
688
  function crc32Hex(input) {
665
689
  let bytes;
666
690
  if (input instanceof Uint8Array) {