@internxt/cli 1.4.1 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +25 -25
  2. package/dist/commands/download-file.js +13 -5
  3. package/dist/commands/upload-file.js +51 -17
  4. package/dist/commands/webdav.js +4 -0
  5. package/dist/services/crypto.service.d.ts +1 -3
  6. package/dist/services/crypto.service.js +17 -7
  7. package/dist/services/drive/drive-file.service.d.ts +1 -0
  8. package/dist/services/drive/drive-file.service.js +4 -0
  9. package/dist/services/network/download.service.d.ts +3 -1
  10. package/dist/services/network/download.service.js +5 -3
  11. package/dist/services/network/network-facade.service.d.ts +8 -9
  12. package/dist/services/network/network-facade.service.js +39 -50
  13. package/dist/services/thumbnail.service.d.ts +7 -0
  14. package/dist/services/thumbnail.service.js +55 -0
  15. package/dist/types/network.types.d.ts +11 -2
  16. package/dist/utils/errors.utils.d.ts +4 -0
  17. package/dist/utils/errors.utils.js +10 -1
  18. package/dist/utils/logger.utils.js +16 -2
  19. package/dist/utils/network.utils.d.ts +11 -0
  20. package/dist/utils/network.utils.js +36 -0
  21. package/dist/utils/stream.utils.d.ts +8 -10
  22. package/dist/utils/stream.utils.js +18 -17
  23. package/dist/utils/thumbnail.utils.d.ts +9 -0
  24. package/dist/utils/thumbnail.utils.js +44 -0
  25. package/dist/webdav/handlers/DELETE.handler.js +4 -3
  26. package/dist/webdav/handlers/GET.handler.d.ts +0 -2
  27. package/dist/webdav/handlers/GET.handler.js +21 -19
  28. package/dist/webdav/handlers/HEAD.handler.d.ts +9 -2
  29. package/dist/webdav/handlers/HEAD.handler.js +34 -2
  30. package/dist/webdav/handlers/MKCOL.handler.js +3 -3
  31. package/dist/webdav/handlers/MOVE.handler.js +2 -2
  32. package/dist/webdav/handlers/OPTIONS.handler.d.ts +1 -1
  33. package/dist/webdav/handlers/OPTIONS.handler.js +29 -4
  34. package/dist/webdav/handlers/PROPFIND.handler.js +1 -1
  35. package/dist/webdav/handlers/PUT.handler.js +55 -15
  36. package/dist/webdav/index.js +10 -2
  37. package/dist/webdav/middewares/auth.middleware.d.ts +2 -2
  38. package/dist/webdav/middewares/auth.middleware.js +8 -8
  39. package/dist/webdav/webdav-server.d.ts +1 -3
  40. package/dist/webdav/webdav-server.js +20 -8
  41. package/oclif.manifest.json +1 -1
  42. package/package.json +54 -51
  43. package/scripts/restart-webdav.js +16 -0
  44. package/dist/services/network/upload.service.d.ts +0 -8
  45. package/dist/services/network/upload.service.js +0 -24
  46. package/dist/utils/hash.utils.d.ts +0 -12
  47. package/dist/utils/hash.utils.js +0 -37
  48. package/scripts/skip-in-ci.js +0 -5
@@ -35,22 +35,20 @@ var __importStar = (this && this.__importStar) || (function () {
35
35
  Object.defineProperty(exports, "__esModule", { value: true });
36
36
  exports.NetworkFacade = void 0;
37
37
  const sdk_1 = require("@internxt/sdk");
38
- const NetworkUpload = __importStar(require("@internxt/sdk/dist/network/upload"));
39
38
  const NetworkDownload = __importStar(require("@internxt/sdk/dist/network/download"));
40
39
  const inxt_js_1 = require("@internxt/inxt-js");
41
40
  const node_crypto_1 = require("node:crypto");
42
41
  const validation_service_1 = require("../validation.service");
43
- const hash_utils_1 = require("../../utils/hash.utils");
44
- const stream_utils_1 = require("../../utils/stream.utils");
42
+ const TWENTY_GIGABYTES = 20 * 1024 * 1024 * 1024;
45
43
  class NetworkFacade {
46
44
  network;
47
- uploadService;
45
+ environment;
48
46
  downloadService;
49
47
  cryptoService;
50
48
  cryptoLib;
51
- constructor(network, uploadService, downloadService, cryptoService) {
49
+ constructor(network, environment, downloadService, cryptoService) {
52
50
  this.network = network;
53
- this.uploadService = uploadService;
51
+ this.environment = environment;
54
52
  this.downloadService = downloadService;
55
53
  this.cryptoService = cryptoService;
56
54
  this.cryptoLib = {
@@ -64,30 +62,36 @@ class NetworkFacade {
64
62
  randomBytes: node_crypto_1.randomBytes,
65
63
  };
66
64
  }
67
- async downloadToStream(bucketId, mnemonic, fileId, to, options) {
65
+ async downloadToStream(bucketId, mnemonic, fileId, size, to, rangeOptions, options) {
68
66
  const encryptedContentStreams = [];
69
67
  let fileStream;
70
68
  const abortable = options?.abortController ?? new AbortController();
71
- const onProgress = (progress) => {
69
+ const onProgress = (loadedBytes) => {
72
70
  if (!options?.progressCallback)
73
71
  return;
74
- options.progressCallback(progress);
75
- };
76
- const onDownloadProgress = (progress) => {
77
- onProgress(progress);
72
+ const reportedProgress = Math.round((loadedBytes / size) * 100);
73
+ options.progressCallback(reportedProgress);
78
74
  };
79
75
  const decryptFile = async (_, key, iv) => {
80
- fileStream = await this.cryptoService.decryptStream(encryptedContentStreams, Buffer.from(key), Buffer.from(iv));
76
+ let startOffsetByte;
77
+ if (rangeOptions) {
78
+ startOffsetByte = rangeOptions.parsed.start;
79
+ }
80
+ fileStream = this.cryptoService.decryptStream(encryptedContentStreams, Buffer.from(key), Buffer.from(iv), startOffsetByte);
81
81
  await fileStream.pipeTo(to);
82
82
  };
83
83
  const downloadFile = async (downloadables) => {
84
+ if (rangeOptions && downloadables.length > 1) {
85
+ throw new Error('Multi-Part Download with Range-Requests is not implemented');
86
+ }
84
87
  for (const downloadable of downloadables) {
85
88
  if (abortable.signal.aborted) {
86
89
  throw new Error('Download aborted');
87
90
  }
88
91
  const encryptedContentStream = await this.downloadService.downloadFile(downloadable.url, {
89
- progressCallback: onDownloadProgress,
92
+ progressCallback: onProgress,
90
93
  abortController: options?.abortController,
94
+ rangeHeader: rangeOptions?.range,
91
95
  });
92
96
  encryptedContentStreams.push(encryptedContentStream);
93
97
  }
@@ -97,43 +101,28 @@ class NetworkFacade {
97
101
  };
98
102
  return [downloadOperation(), abortable];
99
103
  }
100
- async uploadFromStream(bucketId, mnemonic, size, from, options) {
101
- const hashStream = new hash_utils_1.HashStream();
102
- const abortable = options?.abortController ?? new AbortController();
103
- let encryptionTransform;
104
- const progressTransform = new stream_utils_1.ProgressTransform({ totalBytes: size }, (progress) => {
105
- if (options?.progressCallback) {
106
- options.progressCallback(progress * 0.95);
107
- }
108
- });
109
- const onProgress = (progress) => {
110
- if (!options?.progressCallback)
111
- return;
112
- options.progressCallback(progress);
113
- };
114
- const encryptFile = async (_, key, iv) => {
115
- encryptionTransform = from
116
- .pipe(await this.cryptoService.getEncryptionTransform(Buffer.from(key), Buffer.from(iv)))
117
- .pipe(hashStream);
118
- };
119
- const uploadFile = async (url) => {
120
- await this.uploadService.uploadFile(url, encryptionTransform.pipe(progressTransform), {
121
- abortController: abortable,
122
- progressCallback: () => {
123
- },
104
+ uploadFile(from, size, bucketId, finishedCallback, progressCallback) {
105
+ if (size > TWENTY_GIGABYTES) {
106
+ throw new Error('File is too big (more than 20 GB)');
107
+ }
108
+ const minimumMultipartThreshold = 100 * 1024 * 1024;
109
+ const useMultipart = size > minimumMultipartThreshold;
110
+ if (useMultipart) {
111
+ return this.environment.uploadMultipartFile(bucketId, {
112
+ source: from,
113
+ fileSize: size,
114
+ finishedCallback,
115
+ progressCallback,
124
116
  });
125
- return hashStream.getHash().toString('hex');
126
- };
127
- const uploadOperation = async () => {
128
- const uploadResult = await NetworkUpload.uploadFile(this.network, this.cryptoLib, bucketId, mnemonic, size, encryptFile, uploadFile);
129
- const fileHash = Buffer.from('');
130
- onProgress(1);
131
- return {
132
- fileId: uploadResult,
133
- hash: fileHash,
134
- };
135
- };
136
- return [uploadOperation(), abortable];
117
+ }
118
+ else {
119
+ return this.environment.upload(bucketId, {
120
+ source: from,
121
+ fileSize: size,
122
+ finishedCallback,
123
+ progressCallback,
124
+ });
125
+ }
137
126
  }
138
127
  }
139
128
  exports.NetworkFacade = NetworkFacade;
@@ -0,0 +1,7 @@
1
+ import { StorageTypes } from '@internxt/sdk/dist/drive';
2
+ import { NetworkFacade } from './network/network-facade.service';
3
+ export declare class ThumbnailService {
4
+ static readonly instance: ThumbnailService;
5
+ uploadThumbnail: (fileContent: Buffer, fileType: string, userBucket: string, file_id: number, networkFacade: NetworkFacade) => Promise<StorageTypes.Thumbnail | undefined>;
6
+ private getThumbnailFromImageBuffer;
7
+ }
@@ -0,0 +1,55 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.ThumbnailService = void 0;
7
+ const node_stream_1 = require("node:stream");
8
+ const drive_file_service_1 = require("./drive/drive-file.service");
9
+ const drive_1 = require("@internxt/sdk/dist/drive");
10
+ const thumbnail_utils_1 = require("../utils/thumbnail.utils");
11
+ const sharp_1 = __importDefault(require("sharp"));
12
+ class ThumbnailService {
13
+ static instance = new ThumbnailService();
14
+ uploadThumbnail = async (fileContent, fileType, userBucket, file_id, networkFacade) => {
15
+ let thumbnailBuffer;
16
+ if ((0, thumbnail_utils_1.isImageThumbnailable)(fileType)) {
17
+ thumbnailBuffer = await this.getThumbnailFromImageBuffer(fileContent);
18
+ }
19
+ if (thumbnailBuffer) {
20
+ const size = thumbnailBuffer.length;
21
+ const fileId = await new Promise((resolve, reject) => {
22
+ networkFacade.uploadFile(node_stream_1.Readable.from(thumbnailBuffer), size, userBucket, (err, res) => {
23
+ if (err) {
24
+ return reject(err);
25
+ }
26
+ resolve(res);
27
+ }, () => { });
28
+ });
29
+ const createdThumbnailFile = await drive_file_service_1.DriveFileService.instance.createThumbnail({
30
+ file_id: file_id,
31
+ max_width: thumbnail_utils_1.ThumbnailConfig.MaxWidth,
32
+ max_height: thumbnail_utils_1.ThumbnailConfig.MaxHeight,
33
+ type: thumbnail_utils_1.ThumbnailConfig.Type,
34
+ size: size,
35
+ bucket_id: userBucket,
36
+ bucket_file: fileId,
37
+ encrypt_version: drive_1.StorageTypes.EncryptionVersion.Aes03,
38
+ });
39
+ return createdThumbnailFile;
40
+ }
41
+ };
42
+ getThumbnailFromImageBuffer = (buffer) => {
43
+ return (0, sharp_1.default)(buffer)
44
+ .resize({
45
+ height: thumbnail_utils_1.ThumbnailConfig.MaxHeight,
46
+ width: thumbnail_utils_1.ThumbnailConfig.MaxWidth,
47
+ fit: 'inside',
48
+ })
49
+ .png({
50
+ quality: thumbnail_utils_1.ThumbnailConfig.Quality,
51
+ })
52
+ .toBuffer();
53
+ };
54
+ }
55
+ exports.ThumbnailService = ThumbnailService;
@@ -2,9 +2,10 @@ export interface NetworkCredentials {
2
2
  user: string;
3
3
  pass: string;
4
4
  }
5
- export type UploadProgressCallback = (progress: number) => void;
5
+ export type DownloadProgressCallback = (downloadedBytes: number) => void;
6
+ export type UploadProgressCallback = (uploadedBytes: number) => void;
6
7
  export interface NetworkOperationBaseOptions {
7
- progressCallback: UploadProgressCallback;
8
+ progressCallback?: UploadProgressCallback;
8
9
  abortController?: AbortController;
9
10
  }
10
11
  export type UploadOptions = NetworkOperationBaseOptions;
@@ -13,3 +14,11 @@ export interface SelfsignedCert {
13
14
  cert: string | Buffer;
14
15
  key: string | Buffer;
15
16
  }
17
+ export interface UploadTask {
18
+ contentToUpload: Buffer;
19
+ urlToUpload: string;
20
+ index: number;
21
+ }
22
+ export interface UploadMultipartOptions extends UploadOptions {
23
+ parts: number;
24
+ }
@@ -17,6 +17,10 @@ export declare class UnsupportedMediaTypeError extends Error {
17
17
  statusCode: number;
18
18
  constructor(message: string);
19
19
  }
20
+ export declare class MethodNotAllowed extends Error {
21
+ statusCode: number;
22
+ constructor(message: string);
23
+ }
20
24
  export declare class NotImplementedError extends Error {
21
25
  statusCode: number;
22
26
  constructor(message: string);
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.NotImplementedError = exports.UnsupportedMediaTypeError = exports.BadRequestError = exports.NotFoundError = exports.ConflictError = exports.ErrorUtils = void 0;
3
+ exports.NotImplementedError = exports.MethodNotAllowed = exports.UnsupportedMediaTypeError = exports.BadRequestError = exports.NotFoundError = exports.ConflictError = exports.ErrorUtils = void 0;
4
4
  class ErrorUtils {
5
5
  static report(reporter, error, props = {}) {
6
6
  if (error instanceof Error) {
@@ -48,6 +48,15 @@ class UnsupportedMediaTypeError extends Error {
48
48
  }
49
49
  }
50
50
  exports.UnsupportedMediaTypeError = UnsupportedMediaTypeError;
51
+ class MethodNotAllowed extends Error {
52
+ statusCode = 405;
53
+ constructor(message) {
54
+ super(message);
55
+ this.name = 'MethodNotAllowed';
56
+ Object.setPrototypeOf(this, MethodNotAllowed.prototype);
57
+ }
58
+ }
59
+ exports.MethodNotAllowed = MethodNotAllowed;
51
60
  class NotImplementedError extends Error {
52
61
  statusCode = 501;
53
62
  constructor(message) {
@@ -6,35 +6,49 @@ Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.webdavLogger = exports.logger = void 0;
7
7
  const winston_1 = __importDefault(require("winston"));
8
8
  const config_service_1 = require("../services/config.service");
9
+ const maxLogSize = 40 * 1024 * 1024;
10
+ const maxLogsFiles = 5;
9
11
  exports.logger = winston_1.default.createLogger({
10
12
  level: 'info',
11
- format: winston_1.default.format.json(),
13
+ format: winston_1.default.format.combine(winston_1.default.format.timestamp(), winston_1.default.format.json()),
12
14
  defaultMeta: { service: 'internxt-cli' },
13
15
  transports: [
14
16
  new winston_1.default.transports.File({
15
17
  filename: 'internxt-cli-error.log',
16
18
  level: 'error',
17
19
  dirname: config_service_1.ConfigService.INTERNXT_CLI_LOGS_DIR,
20
+ maxsize: maxLogSize,
21
+ maxFiles: maxLogsFiles,
22
+ tailable: true,
18
23
  }),
19
24
  new winston_1.default.transports.File({
20
25
  filename: 'internxt-cli-combined.log',
21
26
  dirname: config_service_1.ConfigService.INTERNXT_CLI_LOGS_DIR,
27
+ maxsize: maxLogSize,
28
+ maxFiles: maxLogsFiles,
29
+ tailable: true,
22
30
  }),
23
31
  ],
24
32
  });
25
33
  exports.webdavLogger = winston_1.default.createLogger({
26
34
  level: 'info',
27
- format: winston_1.default.format.json(),
35
+ format: winston_1.default.format.combine(winston_1.default.format.timestamp(), winston_1.default.format.json()),
28
36
  defaultMeta: { service: 'internxt-webdav' },
29
37
  transports: [
30
38
  new winston_1.default.transports.File({
31
39
  filename: 'internxt-webdav-error.log',
32
40
  level: 'error',
33
41
  dirname: config_service_1.ConfigService.INTERNXT_CLI_LOGS_DIR,
42
+ maxsize: maxLogSize,
43
+ maxFiles: maxLogsFiles,
44
+ tailable: true,
34
45
  }),
35
46
  new winston_1.default.transports.File({
36
47
  filename: 'internxt-webdav-combined.log',
37
48
  dirname: config_service_1.ConfigService.INTERNXT_CLI_LOGS_DIR,
49
+ maxsize: maxLogSize,
50
+ maxFiles: maxLogsFiles,
51
+ tailable: true,
38
52
  }),
39
53
  ],
40
54
  });
@@ -1,5 +1,6 @@
1
1
  import { NetworkCredentials, SelfsignedCert } from '../types/network.types';
2
2
  import selfsigned from 'selfsigned';
3
+ import parseRange from 'range-parser';
3
4
  export declare class NetworkUtils {
4
5
  static getAuthFromCredentials(creds: NetworkCredentials): {
5
6
  username: string;
@@ -13,4 +14,14 @@ export declare class NetworkUtils {
13
14
  static getWebdavSSLCerts(): Promise<SelfsignedCert>;
14
15
  static saveWebdavSSLCerts(pems: selfsigned.GenerateResult): Promise<void>;
15
16
  static generateSelfSignedSSLCerts(): selfsigned.GenerateResult;
17
+ static parseRangeHeader(rangeOptions: {
18
+ range?: string;
19
+ totalFileSize: number;
20
+ }): RangeOptions | undefined;
21
+ }
22
+ export interface RangeOptions {
23
+ range: string;
24
+ rangeSize: number;
25
+ totalFileSize: number;
26
+ parsed: parseRange.Range;
16
27
  }
@@ -8,6 +8,7 @@ const node_crypto_1 = require("node:crypto");
8
8
  const promises_1 = require("node:fs/promises");
9
9
  const node_path_1 = __importDefault(require("node:path"));
10
10
  const selfsigned_1 = __importDefault(require("selfsigned"));
11
+ const range_parser_1 = __importDefault(require("range-parser"));
11
12
  const config_service_1 = require("../services/config.service");
12
13
  class NetworkUtils {
13
14
  static getAuthFromCredentials(creds) {
@@ -72,5 +73,40 @@ class NetworkUtils {
72
73
  const pems = selfsigned_1.default.generate(attrs, { days: 365, algorithm: 'sha256', keySize: 2048, extensions });
73
74
  return pems;
74
75
  }
76
+ static parseRangeHeader(rangeOptions) {
77
+ if (!rangeOptions.range) {
78
+ return;
79
+ }
80
+ const parsed = (0, range_parser_1.default)(rangeOptions.totalFileSize, rangeOptions.range);
81
+ if (Array.isArray(parsed)) {
82
+ if (parsed.length > 1) {
83
+ throw new Error(`Multi Range-Requests functionality is not implemented. ${JSON.stringify(rangeOptions)}`);
84
+ }
85
+ else if (parsed.length <= 0) {
86
+ throw new Error(`Empty Range-Request. ${JSON.stringify(rangeOptions)}`);
87
+ }
88
+ else if (parsed.type !== 'bytes') {
89
+ throw new Error(`Unkwnown Range-Request type "${parsed.type}". ${JSON.stringify(rangeOptions)}`);
90
+ }
91
+ else {
92
+ const rangeSize = parsed[0].end - parsed[0].start + 1;
93
+ return {
94
+ range: rangeOptions.range,
95
+ rangeSize: rangeSize,
96
+ totalFileSize: rangeOptions.totalFileSize,
97
+ parsed: parsed[0],
98
+ };
99
+ }
100
+ }
101
+ else if (parsed === -1) {
102
+ throw new Error(`Malformed Range-Request. ${JSON.stringify(rangeOptions)}`);
103
+ }
104
+ else if (parsed === -2) {
105
+ throw new Error(`Unsatisfiable Range-Request. ${JSON.stringify(rangeOptions)}`);
106
+ }
107
+ else {
108
+ throw new Error(`Unknown error from Range-Request. ${JSON.stringify(rangeOptions)}`);
109
+ }
110
+ }
75
111
  }
76
112
  exports.NetworkUtils = NetworkUtils;
@@ -1,17 +1,15 @@
1
1
  import { ReadStream, WriteStream } from 'node:fs';
2
- import { Transform, TransformCallback } from 'node:stream';
2
+ import { Transform, TransformCallback, TransformOptions } from 'node:stream';
3
3
  export declare class StreamUtils {
4
4
  static readStreamToReadableStream(readStream: ReadStream): ReadableStream<Uint8Array>;
5
5
  static writeStreamToWritableStream(writeStream: WriteStream): WritableStream<Uint8Array>;
6
6
  static joinReadableBinaryStreams(streams: ReadableStream<Uint8Array>[]): ReadableStream<Uint8Array>;
7
7
  }
8
- export declare class ProgressTransform extends Transform {
9
- private options;
10
- private progressCallback;
11
- private receivedBytes;
12
- constructor(options: {
13
- totalBytes: number;
14
- }, progressCallback: (percentage: number) => void);
15
- _transform(chunk: Buffer, encoding: BufferEncoding, callback: TransformCallback): void;
16
- _flush(callback: (err: Error | null) => void): void;
8
+ export declare class BufferStream extends Transform {
9
+ buffer: Buffer | null;
10
+ constructor(opts?: TransformOptions);
11
+ _transform(chunk: Buffer, _: BufferEncoding, callback: TransformCallback): void;
12
+ _flush(callback: TransformCallback): void;
13
+ reset(): void;
14
+ getBuffer(): Buffer | null;
17
15
  }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ProgressTransform = exports.StreamUtils = void 0;
3
+ exports.BufferStream = exports.StreamUtils = void 0;
4
4
  const node_stream_1 = require("node:stream");
5
5
  class StreamUtils {
6
6
  static readStreamToReadableStream(readStream) {
@@ -55,24 +55,25 @@ class StreamUtils {
55
55
  }
56
56
  }
57
57
  exports.StreamUtils = StreamUtils;
58
- class ProgressTransform extends node_stream_1.Transform {
59
- options;
60
- progressCallback;
61
- receivedBytes = 0;
62
- constructor(options, progressCallback) {
63
- super();
64
- this.options = options;
65
- this.progressCallback = progressCallback;
58
+ class BufferStream extends node_stream_1.Transform {
59
+ buffer;
60
+ constructor(opts) {
61
+ super(opts);
62
+ this.buffer = null;
66
63
  }
67
- _transform(chunk, encoding, callback) {
68
- this.receivedBytes += chunk.length;
69
- const percentage = this.receivedBytes / this.options.totalBytes;
70
- this.progressCallback(percentage);
71
- this.push(chunk);
72
- callback();
64
+ _transform(chunk, _, callback) {
65
+ const currentBuffer = this.buffer ?? Buffer.alloc(0);
66
+ this.buffer = Buffer.concat([currentBuffer, chunk]);
67
+ callback(null, chunk);
73
68
  }
74
69
  _flush(callback) {
75
- callback(null);
70
+ callback();
71
+ }
72
+ reset() {
73
+ this.buffer = null;
74
+ }
75
+ getBuffer() {
76
+ return this.buffer;
76
77
  }
77
78
  }
78
- exports.ProgressTransform = ProgressTransform;
79
+ exports.BufferStream = BufferStream;
@@ -0,0 +1,9 @@
1
+ export declare const ThumbnailConfig: {
2
+ readonly MaxWidth: 300;
3
+ readonly MaxHeight: 300;
4
+ readonly Quality: 100;
5
+ readonly Type: "png";
6
+ };
7
+ export declare const isFileThumbnailable: (fileType: string) => boolean;
8
+ export declare const isPDFThumbnailable: (fileType: string) => boolean;
9
+ export declare const isImageThumbnailable: (fileType: string) => boolean;
@@ -0,0 +1,44 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.isImageThumbnailable = exports.isPDFThumbnailable = exports.isFileThumbnailable = exports.ThumbnailConfig = void 0;
4
+ exports.ThumbnailConfig = {
5
+ MaxWidth: 300,
6
+ MaxHeight: 300,
7
+ Quality: 100,
8
+ Type: 'png',
9
+ };
10
+ const imageExtensions = {
11
+ tiff: ['tif', 'tiff'],
12
+ bmp: ['bmp'],
13
+ heic: ['heic'],
14
+ jpg: ['jpg', 'jpeg'],
15
+ gif: ['gif'],
16
+ png: ['png'],
17
+ eps: ['eps'],
18
+ raw: ['raw', 'cr2', 'nef', 'orf', 'sr2'],
19
+ webp: ['webp'],
20
+ };
21
+ const pdfExtensions = {
22
+ pdf: ['pdf'],
23
+ };
24
+ const thumbnailableImageExtension = [
25
+ ...imageExtensions['jpg'],
26
+ ...imageExtensions['png'],
27
+ ...imageExtensions['webp'],
28
+ ...imageExtensions['gif'],
29
+ ...imageExtensions['tiff'],
30
+ ];
31
+ const thumbnailablePdfExtension = pdfExtensions['pdf'];
32
+ const thumbnailableExtension = [...thumbnailableImageExtension];
33
+ const isFileThumbnailable = (fileType) => {
34
+ return fileType.trim().length > 0 && thumbnailableExtension.includes(fileType);
35
+ };
36
+ exports.isFileThumbnailable = isFileThumbnailable;
37
+ const isPDFThumbnailable = (fileType) => {
38
+ return fileType.trim().length > 0 && thumbnailablePdfExtension.includes(fileType);
39
+ };
40
+ exports.isPDFThumbnailable = isPDFThumbnailable;
41
+ const isImageThumbnailable = (fileType) => {
42
+ return fileType.trim().length > 0 && thumbnailableImageExtension.includes(fileType);
43
+ };
44
+ exports.isImageThumbnailable = isImageThumbnailable;
@@ -10,16 +10,15 @@ class DELETERequestHandler {
10
10
  }
11
11
  handle = async (req, res) => {
12
12
  const { driveDatabaseManager, driveFileService, driveFolderService, trashService } = this.dependencies;
13
- logger_utils_1.webdavLogger.info('DELETE request received');
14
13
  const resource = await webdav_utils_1.WebDavUtils.getRequestedResource(req);
15
- logger_utils_1.webdavLogger.info('Resource received for DELETE request', { resource });
14
+ logger_utils_1.webdavLogger.info(`[DELETE] Request received for ${resource.type} at ${resource.url}`);
16
15
  const driveItem = await webdav_utils_1.WebDavUtils.getAndSearchItemFromResource({
17
16
  resource,
18
17
  driveDatabaseManager,
19
18
  driveFolderService,
20
19
  driveFileService: driveFileService,
21
20
  });
22
- logger_utils_1.webdavLogger.info(`Trashing ${resource.type} with UUID ${driveItem.uuid}...`);
21
+ logger_utils_1.webdavLogger.info(`[DELETE] [${driveItem.uuid}] Trashing ${resource.type}`);
23
22
  await trashService.trashItems({
24
23
  items: [{ type: resource.type, uuid: driveItem.uuid }],
25
24
  });
@@ -30,6 +29,8 @@ class DELETERequestHandler {
30
29
  await driveDatabaseManager.deleteFileById(driveItem.id);
31
30
  }
32
31
  res.status(204).send();
32
+ const type = resource.type.charAt(0).toUpperCase() + resource.type.substring(1);
33
+ logger_utils_1.webdavLogger.info(`[DELETE] [${driveItem.uuid}] ${type} trashed successfully`);
33
34
  };
34
35
  }
35
36
  exports.DELETERequestHandler = DELETERequestHandler;
@@ -3,7 +3,6 @@ import { Request, Response } from 'express';
3
3
  import { DriveFileService } from '../../services/drive/drive-file.service';
4
4
  import { DriveDatabaseManager } from '../../services/database/drive-database-manager.service';
5
5
  import { NetworkFacade } from '../../services/network/network-facade.service';
6
- import { UploadService } from '../../services/network/upload.service';
7
6
  import { DownloadService } from '../../services/network/download.service';
8
7
  import { CryptoService } from '../../services/crypto.service';
9
8
  import { AuthService } from '../../services/auth.service';
@@ -12,7 +11,6 @@ export declare class GETRequestHandler implements WebDavMethodHandler {
12
11
  constructor(dependencies: {
13
12
  driveFileService: DriveFileService;
14
13
  driveDatabaseManager: DriveDatabaseManager;
15
- uploadService: UploadService;
16
14
  downloadService: DownloadService;
17
15
  cryptoService: CryptoService;
18
16
  authService: AuthService;
@@ -4,6 +4,7 @@ exports.GETRequestHandler = void 0;
4
4
  const webdav_utils_1 = require("../../utils/webdav.utils");
5
5
  const errors_utils_1 = require("../../utils/errors.utils");
6
6
  const logger_utils_1 = require("../../utils/logger.utils");
7
+ const network_utils_1 = require("../../utils/network.utils");
7
8
  class GETRequestHandler {
8
9
  dependencies;
9
10
  constructor(dependencies) {
@@ -12,21 +13,31 @@ class GETRequestHandler {
12
13
  handle = async (req, res) => {
13
14
  const { driveDatabaseManager, driveFileService, authService, networkFacade } = this.dependencies;
14
15
  const resource = await webdav_utils_1.WebDavUtils.getRequestedResource(req);
15
- if (req.headers['content-range'] || req.headers['range'])
16
- throw new errors_utils_1.NotImplementedError('Range requests not supported');
17
16
  if (resource.name.startsWith('._'))
18
17
  throw new errors_utils_1.NotFoundError('File not found');
19
- logger_utils_1.webdavLogger.info(`GET request received for file at ${resource.url}`);
18
+ if (resource.type === 'folder')
19
+ throw new errors_utils_1.NotFoundError('Folders cannot be listed with GET. Use PROPFIND instead.');
20
+ logger_utils_1.webdavLogger.info(`[GET] Request received for ${resource.type} at ${resource.url}`);
20
21
  const driveFile = (await webdav_utils_1.WebDavUtils.getAndSearchItemFromResource({
21
22
  resource,
22
23
  driveDatabaseManager,
23
24
  driveFileService,
24
25
  }));
25
- logger_utils_1.webdavLogger.info(`✅ Found Drive File with uuid ${driveFile.uuid}`);
26
- res.set('Content-Type', 'application/octet-stream');
27
- res.set('Content-length', driveFile.size.toString());
26
+ logger_utils_1.webdavLogger.info(`[GET] [${driveFile.uuid}] Found Drive File`);
28
27
  const { user } = await authService.getAuthDetails();
29
- logger_utils_1.webdavLogger.info('✅ Network ready for download');
28
+ logger_utils_1.webdavLogger.info(`[GET] [${driveFile.uuid}] Network ready for download`);
29
+ const range = req.headers['range'];
30
+ const rangeOptions = network_utils_1.NetworkUtils.parseRangeHeader({
31
+ range,
32
+ totalFileSize: driveFile.size,
33
+ });
34
+ let contentLength = driveFile.size;
35
+ if (rangeOptions) {
36
+ logger_utils_1.webdavLogger.info(`[GET] [${driveFile.uuid}] Range request received:`, { rangeOptions });
37
+ contentLength = rangeOptions.rangeSize;
38
+ }
39
+ res.header('Content-Type', 'application/octet-stream');
40
+ res.header('Content-length', contentLength.toString());
30
41
  const writable = new WritableStream({
31
42
  write(chunk) {
32
43
  res.write(chunk);
@@ -35,20 +46,11 @@ class GETRequestHandler {
35
46
  res.end();
36
47
  },
37
48
  });
38
- let lastLoggedProgress = 0;
39
- const [executeDownload] = await networkFacade.downloadToStream(driveFile.bucket, user.mnemonic, driveFile.fileId, writable, {
40
- progressCallback: (progress) => {
41
- const percentage = Math.floor(100 * progress);
42
- if (percentage >= lastLoggedProgress + 1) {
43
- lastLoggedProgress = percentage;
44
- logger_utils_1.webdavLogger.info(`Download progress for file ${resource.name}: ${percentage}%`);
45
- }
46
- },
47
- });
48
- logger_utils_1.webdavLogger.info('✅ Download prepared, executing...');
49
+ const [executeDownload] = await networkFacade.downloadToStream(driveFile.bucket, user.mnemonic, driveFile.fileId, contentLength, writable, rangeOptions);
50
+ logger_utils_1.webdavLogger.info(`[GET] [${driveFile.uuid}] Download prepared, executing...`);
49
51
  res.status(200);
50
52
  await executeDownload;
51
- logger_utils_1.webdavLogger.info('✅ Download ready, replying to client');
53
+ logger_utils_1.webdavLogger.info(`[GET] [${driveFile.uuid}] ✅ Download ready, replying to client`);
52
54
  };
53
55
  }
54
56
  exports.GETRequestHandler = GETRequestHandler;
@@ -1,5 +1,12 @@
1
- import { WebDavMethodHandler } from '../../types/webdav.types';
2
1
  import { Request, Response } from 'express';
2
+ import { WebDavMethodHandler } from '../../types/webdav.types';
3
+ import { DriveFileService } from '../../services/drive/drive-file.service';
4
+ import { DriveDatabaseManager } from '../../services/database/drive-database-manager.service';
3
5
  export declare class HEADRequestHandler implements WebDavMethodHandler {
4
- handle: (_: Request, res: Response) => Promise<void>;
6
+ private readonly dependencies;
7
+ constructor(dependencies: {
8
+ driveFileService: DriveFileService;
9
+ driveDatabaseManager: DriveDatabaseManager;
10
+ });
11
+ handle: (req: Request, res: Response) => Promise<void>;
5
12
  }