d-drive-cli 1.1.3 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -40,6 +40,8 @@ Upload a single file:
40
40
  d-drive upload ./myfile.txt /backups/
41
41
  ```
42
42
 
43
+ Note: For very large files the server exposes a streaming upload endpoint (`POST /api/files/upload/stream`) that accepts multipart uploads and streams chunks directly to the storage backend without full buffering. Use the API streaming endpoint (see `docs/API.md`) for multi-GB uploads or when you need more robust handling for long uploads.
44
+
43
45
  Upload a directory recursively:
44
46
 
45
47
  ```bash
@@ -6,12 +6,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.uploadCommand = uploadCommand;
7
7
  const fs_extra_1 = __importDefault(require("fs-extra"));
8
8
  const path_1 = __importDefault(require("path"));
9
+ const stream_1 = require("stream");
9
10
  const chalk_1 = __importDefault(require("chalk"));
10
11
  const ora_1 = __importDefault(require("ora"));
11
12
  const form_data_1 = __importDefault(require("form-data"));
12
13
  const progress_1 = __importDefault(require("progress"));
13
14
  const api_1 = require("../api");
14
15
  const glob_1 = require("glob");
16
+ const axios_1 = __importDefault(require("axios"));
15
17
  async function uploadCommand(source, destination = '/', options) {
16
18
  const spinner = (0, ora_1.default)('Preparing upload...').start();
17
19
  try {
@@ -61,27 +63,114 @@ async function uploadSingleFile(api, filePath, destination, showProgress) {
61
63
  console.log(chalk_1.default.cyan(`\nUploading: ${fileName}`));
62
64
  console.log(chalk_1.default.gray(`Size: ${formatFileSize(fileSize)}`));
63
65
  const formData = new form_data_1.default();
64
- formData.append('file', fs_extra_1.default.createReadStream(filePath));
65
- formData.append('path', destination);
66
+ // Create a passthrough so we can monitor bytes read from disk
67
+ const fileStream = fs_extra_1.default.createReadStream(filePath);
68
+ const pass = new stream_1.PassThrough();
69
+ // Pipe file stream into pass-through which is appended to form-data
70
+ fileStream.pipe(pass);
71
+ formData.append('file', pass, {
72
+ filename: fileName,
73
+ knownLength: fileSize,
74
+ });
75
+ // Resolve destination directory to a parentId and send parentId (server-authoritative)
76
+ const parentDir = path_1.default.posix.dirname(destination || '/');
77
+ let parentId = null;
78
+ if (parentDir && parentDir !== '/' && parentDir !== '.') {
79
+ parentId = await ensureFolderExists(api, parentDir);
80
+ }
81
+ if (parentId) {
82
+ formData.append('parentId', parentId);
83
+ }
66
84
  // Ensure CLI uploads follow frontend behavior and request server-side encryption by default
67
85
  formData.append('encrypt', 'true');
68
86
  let progressBar = null;
69
87
  if (showProgress) {
88
+ // Use a byte-counting progress bar so we can display an explicit percent.
70
89
  progressBar = new progress_1.default('[:bar] :percent :etas', {
71
90
  complete: '█',
72
91
  incomplete: '░',
73
92
  width: 40,
74
93
  total: fileSize,
75
94
  });
76
- }
77
- await api.post('/files/upload', formData, {
78
- headers: formData.getHeaders(),
79
- onUploadProgress: (progressEvent) => {
80
- if (progressBar && progressEvent.loaded) {
81
- progressBar.update(progressEvent.loaded / fileSize);
95
+ // Track bytes read from disk and update progress bar by bytes.
96
+ fileStream.on('data', (chunk) => {
97
+ const len = typeof chunk === 'string' ? Buffer.byteLength(chunk) : chunk.length;
98
+ if (progressBar)
99
+ progressBar.tick(len);
100
+ });
101
+ // When local read finishes, indicate we're waiting for the server to finish
102
+ fileStream.on('end', () => {
103
+ if (progressBar && !progressBar.complete) {
104
+ // ensure the bar shows very near completion but leave finalizing to server response
105
+ try {
106
+ progressBar.update(Math.min(1, (progressBar.curr || 0) / (progressBar.total || 1)));
107
+ }
108
+ catch (_) { }
82
109
  }
83
- },
110
+ console.log(chalk_1.default.gray('\nLocal file read complete — waiting for server to finish...'));
111
+ });
112
+ }
113
+ // Use streaming upload endpoint. Do not force Content-Length so the request
114
+ // can stream large files without buffering the whole body in memory.
115
+ const headers = formData.getHeaders();
116
+ // axios in Node needs the adapter to handle stream form-data; use api (axios instance)
117
+ await api.post('/files/upload/stream', formData, {
118
+ headers,
119
+ maxContentLength: Infinity,
120
+ maxBodyLength: Infinity,
121
+ // Do not set a timeout for potentially long uploads
122
+ timeout: 0,
123
+ // Allow axios to stream the form-data
124
+ transitional: { forcedJSONParsing: false },
84
125
  });
126
+ // Upload complete (server has processed). If progress bar exists, ensure it shows 100%.
127
+ if (progressBar && !progressBar.complete) {
128
+ try {
129
+ progressBar.update(progressBar.total || 1);
130
+ }
131
+ catch (_) { }
132
+ }
133
+ console.log(chalk_1.default.green('\nUpload finished'));
134
+ }
135
+ // Ensure the directory at `dirPath` exists. Returns the `id` of the directory or null for root.
136
+ async function ensureFolderExists(api, dirPath) {
137
+ // Normalize and split
138
+ const normalized = path_1.default.posix.normalize(dirPath);
139
+ if (normalized === '/' || normalized === '.' || normalized === '')
140
+ return null;
141
+ const segments = normalized.split('/').filter(Boolean);
142
+ let currentPath = '';
143
+ let parentId = null;
144
+ for (const seg of segments) {
145
+ currentPath = `${currentPath}/${seg}`;
146
+ try {
147
+ const resp = await api.get('/files', { params: { path: currentPath } });
148
+ const items = resp.data;
149
+ const dir = items.find(i => i.type === 'DIRECTORY');
150
+ if (dir) {
151
+ parentId = dir.id;
152
+ continue;
153
+ }
154
+ // Not found — create it
155
+ const createResp = await api.post('/files/directory', { name: seg, parentId: parentId || null, path: currentPath });
156
+ parentId = createResp.data.id;
157
+ }
158
+ catch (err) {
159
+ // If a 409 or other error occurs, try to re-query; otherwise rethrow
160
+ if (axios_1.default.isAxiosError(err) && err.response) {
161
+ // Retry by querying again in case of race
162
+ const retry = await api.get('/files', { params: { path: currentPath } });
163
+ const items = retry.data;
164
+ const dir = items.find(i => i.type === 'DIRECTORY');
165
+ if (dir) {
166
+ parentId = dir.id;
167
+ continue;
168
+ }
169
+ }
170
+ throw err;
171
+ }
172
+ }
173
+ return parentId;
85
174
  }
86
175
  function formatFileSize(bytes) {
87
176
  if (bytes === 0)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "d-drive-cli",
3
- "version": "1.1.3",
3
+ "version": "1.3.0",
4
4
  "description": "D-Drive CLI tool for developers",
5
5
  "main": "dist/index.js",
6
6
  "bin": {
@@ -1,11 +1,13 @@
1
1
  import fs from 'fs-extra';
2
2
  import path from 'path';
3
+ import { PassThrough } from 'stream';
3
4
  import chalk from 'chalk';
4
5
  import ora from 'ora';
5
6
  import FormData from 'form-data';
6
7
  import ProgressBar from 'progress';
7
8
  import { createApiClient } from '../api';
8
9
  import { glob } from 'glob';
10
+ import axios from 'axios';
9
11
 
10
12
  interface UploadOptions {
11
13
  recursive?: boolean;
@@ -85,30 +87,113 @@ async function uploadSingleFile(
85
87
  console.log(chalk.gray(`Size: ${formatFileSize(fileSize)}`));
86
88
 
87
89
  const formData = new FormData();
88
- formData.append('file', fs.createReadStream(filePath));
89
- formData.append('path', destination);
90
+ // Create a passthrough so we can monitor bytes read from disk
91
+ const fileStream = fs.createReadStream(filePath);
92
+ const pass = new PassThrough();
93
+ // Pipe file stream into pass-through which is appended to form-data
94
+ fileStream.pipe(pass);
95
+ formData.append('file', pass, {
96
+ filename: fileName,
97
+ knownLength: fileSize,
98
+ });
99
+ // Resolve destination directory to a parentId and send parentId (server-authoritative)
100
+ const parentDir = path.posix.dirname(destination || '/');
101
+ let parentId: string | null = null;
102
+ if (parentDir && parentDir !== '/' && parentDir !== '.') {
103
+ parentId = await ensureFolderExists(api, parentDir);
104
+ }
105
+ if (parentId) {
106
+ formData.append('parentId', parentId);
107
+ }
90
108
  // Ensure CLI uploads follow frontend behavior and request server-side encryption by default
91
109
  formData.append('encrypt', 'true');
92
110
 
93
111
  let progressBar: ProgressBar | null = null;
94
112
 
95
113
  if (showProgress) {
114
+ // Use a byte-counting progress bar so we can display an explicit percent.
96
115
  progressBar = new ProgressBar('[:bar] :percent :etas', {
97
116
  complete: '█',
98
117
  incomplete: '░',
99
118
  width: 40,
100
119
  total: fileSize,
101
120
  });
102
- }
103
121
 
104
- await api.post('/files/upload', formData, {
105
- headers: formData.getHeaders(),
106
- onUploadProgress: (progressEvent: any) => {
107
- if (progressBar && progressEvent.loaded) {
108
- progressBar.update(progressEvent.loaded / fileSize);
122
+ // Track bytes read from disk and update progress bar by bytes.
123
+ fileStream.on('data', (chunk: Buffer | string) => {
124
+ const len = typeof chunk === 'string' ? Buffer.byteLength(chunk) : chunk.length;
125
+ if (progressBar) progressBar.tick(len);
126
+ });
127
+ // When local read finishes, indicate we're waiting for the server to finish
128
+ fileStream.on('end', () => {
129
+ if (progressBar && !progressBar.complete) {
130
+ // ensure the bar shows very near completion but leave finalizing to server response
131
+ try { progressBar.update(Math.min(1, (progressBar.curr || 0) / (progressBar.total || 1))); } catch (_) {}
109
132
  }
110
- },
133
+ console.log(chalk.gray('\nLocal file read complete — waiting for server to finish...'));
134
+ });
135
+ }
136
+
137
+ // Use streaming upload endpoint. Do not force Content-Length so the request
138
+ // can stream large files without buffering the whole body in memory.
139
+ const headers = formData.getHeaders();
140
+ // axios in Node needs the adapter to handle stream form-data; use api (axios instance)
141
+ await api.post('/files/upload/stream', formData, {
142
+ headers,
143
+ maxContentLength: Infinity,
144
+ maxBodyLength: Infinity,
145
+ // Do not set a timeout for potentially long uploads
146
+ timeout: 0,
147
+ // Allow axios to stream the form-data
148
+ transitional: { forcedJSONParsing: false },
111
149
  });
150
+ // Upload complete (server has processed). If progress bar exists, ensure it shows 100%.
151
+ if (progressBar && !progressBar.complete) {
152
+ try { progressBar.update(progressBar.total || 1); } catch (_) {}
153
+ }
154
+ console.log(chalk.green('\nUpload finished'));
155
+ }
156
+
157
+ // Ensure the directory at `dirPath` exists. Returns the `id` of the directory or null for root.
158
+ async function ensureFolderExists(api: any, dirPath: string): Promise<string | null> {
159
+ // Normalize and split
160
+ const normalized = path.posix.normalize(dirPath);
161
+ if (normalized === '/' || normalized === '.' || normalized === '') return null;
162
+
163
+ const segments = normalized.split('/').filter(Boolean);
164
+ let currentPath = '';
165
+ let parentId: string | null = null;
166
+
167
+ for (const seg of segments) {
168
+ currentPath = `${currentPath}/${seg}`;
169
+ try {
170
+ const resp = await api.get('/files', { params: { path: currentPath } });
171
+ const items = resp.data as any[];
172
+ const dir = items.find(i => i.type === 'DIRECTORY');
173
+ if (dir) {
174
+ parentId = dir.id;
175
+ continue;
176
+ }
177
+ // Not found — create it
178
+ const createResp = await api.post('/files/directory', { name: seg, parentId: parentId || null, path: currentPath });
179
+ parentId = createResp.data.id;
180
+ } catch (err: any) {
181
+ // If a 409 or other error occurs, try to re-query; otherwise rethrow
182
+ if (axios.isAxiosError(err) && err.response) {
183
+ // Retry by querying again in case of race
184
+ const retry = await api.get('/files', { params: { path: currentPath } });
185
+ const items = retry.data as any[];
186
+ const dir = items.find(i => i.type === 'DIRECTORY');
187
+ if (dir) {
188
+ parentId = dir.id;
189
+ continue;
190
+ }
191
+ }
192
+ throw err;
193
+ }
194
+ }
195
+
196
+ return parentId;
112
197
  }
113
198
 
114
199
  function formatFileSize(bytes: number): string {