files.com 1.0.250 → 1.0.251

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/_VERSION CHANGED
@@ -1 +1 @@
1
- 1.0.250
1
+ 1.0.251
@@ -907,7 +907,7 @@ var File = /*#__PURE__*/(0, _createClass2.default)(function File() {
907
907
  });
908
908
  readableStream.on('data', /*#__PURE__*/function () {
909
909
  var _ref17 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee14(chunk) {
910
- var nextLength, excessLength, chunkBuffer, lastChunkForPart, firstChunkForNextPart, buffer, nextFileUploadPart;
910
+ var nextLength, excessLength, chunkBuffer, tailLength, lastChunkForPart, firstChunkForNextPart, buffer, nextFileUploadPart;
911
911
  return _regenerator.default.wrap(function _callee14$(_context14) {
912
912
  while (1) switch (_context14.prev = _context14.next) {
913
913
  case 0:
@@ -916,39 +916,42 @@ var File = /*#__PURE__*/(0, _createClass2.default)(function File() {
916
916
  excessLength = nextLength - firstFileUploadPart.partsize;
917
917
  chunkBuffer = _safeBuffer.Buffer.from(chunk);
918
918
  if (!(excessLength > 0)) {
919
- _context14.next = 19;
919
+ _context14.next = 20;
920
920
  break;
921
921
  }
922
922
  readableStream.pause();
923
- lastChunkForPart = chunkBuffer.subarray(0, excessLength);
924
- firstChunkForNextPart = chunkBuffer.subarray(excessLength);
923
+
924
+ // the amount to append this last part with to make it exactly the full partsize
925
+ tailLength = chunkBuffer.length - excessLength;
926
+ lastChunkForPart = chunkBuffer.subarray(0, tailLength);
927
+ firstChunkForNextPart = chunkBuffer.subarray(tailLength);
925
928
  chunks.push(lastChunkForPart);
926
929
  buffer = _safeBuffer.Buffer.concat(chunks);
927
- _context14.next = 12;
930
+ _context14.next = 13;
928
931
  return File._continueUpload(destinationPath, ++part, firstFileUploadPart, options);
929
- case 12:
932
+ case 13:
930
933
  nextFileUploadPart = _context14.sent;
931
934
  concurrentUploads.push(_Api.default.sendFilePart(nextFileUploadPart.upload_uri, 'PUT', buffer));
932
935
  chunks = [firstChunkForNextPart];
933
936
  length = firstChunkForNextPart.length;
934
937
  readableStream.resume();
935
- _context14.next = 21;
938
+ _context14.next = 22;
936
939
  break;
937
- case 19:
940
+ case 20:
938
941
  chunks.push(chunkBuffer);
939
942
  length += chunk.length;
940
- case 21:
941
- _context14.next = 26;
943
+ case 22:
944
+ _context14.next = 27;
942
945
  break;
943
- case 23:
944
- _context14.prev = 23;
946
+ case 24:
947
+ _context14.prev = 24;
945
948
  _context14.t0 = _context14["catch"](0);
946
949
  reject(_context14.t0);
947
- case 26:
950
+ case 27:
948
951
  case "end":
949
952
  return _context14.stop();
950
953
  }
951
- }, _callee14, null, [[0, 23]]);
954
+ }, _callee14, null, [[0, 24]]);
952
955
  }));
953
956
  return function (_x20) {
954
957
  return _ref17.apply(this, arguments);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "files.com",
3
- "version": "1.0.250",
3
+ "version": "1.0.251",
4
4
  "description": "Files.com SDK for JavaScript",
5
5
  "keywords": [
6
6
  "files.com",
@@ -105,8 +105,11 @@ class File {
105
105
  if (excessLength > 0) {
106
106
  readableStream.pause()
107
107
 
108
- const lastChunkForPart = chunkBuffer.subarray(0, excessLength)
109
- const firstChunkForNextPart = chunkBuffer.subarray(excessLength)
108
+ // the amount to append this last part with to make it exactly the full partsize
109
+ const tailLength = chunkBuffer.length - excessLength
110
+
111
+ const lastChunkForPart = chunkBuffer.subarray(0, tailLength)
112
+ const firstChunkForNextPart = chunkBuffer.subarray(tailLength)
110
113
 
111
114
  chunks.push(lastChunkForPart)
112
115