fs-object-storage 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/npm-publish.yml +41 -0
- package/LICENSE +21 -0
- package/README.md +287 -0
- package/docker-compose.yml +24 -0
- package/package.json +45 -0
- package/quick-test.js +36 -0
- package/samples/fs-minio-test.js +135 -0
- package/samples/memfs-sample.js +44 -0
- package/samples/minio-connection-test.js +66 -0
- package/samples/minio-sample.js +64 -0
- package/src/index.d.ts +98 -0
- package/src/index.js +17 -0
- package/src/lib/ErrorHandler.js +149 -0
- package/src/lib/FsMinioClient.js +480 -0
- package/src/lib/PathConverter.js +209 -0
- package/src/lib/StreamConverter.js +281 -0
- package/test-package.json +9 -0
- package/tests/unit/ErrorHandler.test.js +117 -0
- package/tests/unit/PathConverter.test.js +224 -0
- package/tests/unit/StreamConverter.test.js +267 -0
- package/unit-tests.js +101 -0
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
// StreamConverter.js - Convert between different stream/data formats for MinIO operations
|
|
2
|
+
|
|
3
|
+
import { Readable, PassThrough } from 'stream';
|
|
4
|
+
import { promisify } from 'util';
|
|
5
|
+
import stream from 'stream';
|
|
6
|
+
const pipeline = promisify(stream.pipeline);
|
|
7
|
+
|
|
8
|
+
class StreamConverter {
|
|
9
|
+
/**
|
|
10
|
+
* Convert various data types to readable stream for MinIO upload
|
|
11
|
+
* @param {string|Buffer|Uint8Array|Readable} data - Data to convert
|
|
12
|
+
* @returns {Readable} Readable stream
|
|
13
|
+
*/
|
|
14
|
+
static toReadableStream(data) {
|
|
15
|
+
if (data instanceof Readable) {
|
|
16
|
+
return data;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (typeof data === 'string') {
|
|
20
|
+
return Readable.from([Buffer.from(data, 'utf8')]);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
if (Buffer.isBuffer(data) || data instanceof Uint8Array) {
|
|
24
|
+
return Readable.from([data]);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
throw new Error('Unsupported data type for stream conversion');
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Convert readable stream to buffer
|
|
32
|
+
* @param {Readable} stream - Readable stream
|
|
33
|
+
* @returns {Promise<Buffer>} Buffer containing all stream data
|
|
34
|
+
*/
|
|
35
|
+
static async streamToBuffer(stream) {
|
|
36
|
+
const chunks = [];
|
|
37
|
+
|
|
38
|
+
return new Promise((resolve, reject) => {
|
|
39
|
+
stream.on('data', (chunk) => {
|
|
40
|
+
chunks.push(chunk);
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
stream.on('end', () => {
|
|
44
|
+
resolve(Buffer.concat(chunks));
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
stream.on('error', (error) => {
|
|
48
|
+
reject(error);
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Convert readable stream to string
|
|
55
|
+
* @param {Readable} stream - Readable stream
|
|
56
|
+
* @param {string} [encoding='utf8'] - Text encoding
|
|
57
|
+
* @returns {Promise<string>} String containing all stream data
|
|
58
|
+
*/
|
|
59
|
+
static async streamToString(stream, encoding = 'utf8') {
|
|
60
|
+
const buffer = await this.streamToBuffer(stream);
|
|
61
|
+
return buffer.toString(encoding);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Create a pass-through stream for piping operations
|
|
66
|
+
* @returns {PassThrough} PassThrough stream
|
|
67
|
+
*/
|
|
68
|
+
static createPassThrough() {
|
|
69
|
+
return new PassThrough();
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Get data size for Content-Length header
|
|
74
|
+
* @param {string|Buffer|Uint8Array|Readable} data - Data to measure
|
|
75
|
+
* @returns {number|undefined} Size in bytes, undefined if unknown (stream)
|
|
76
|
+
*/
|
|
77
|
+
static getDataSize(data) {
|
|
78
|
+
if (typeof data === 'string') {
|
|
79
|
+
return Buffer.byteLength(data, 'utf8');
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
if (Buffer.isBuffer(data)) {
|
|
83
|
+
return data.length;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (data instanceof Uint8Array) {
|
|
87
|
+
return data.byteLength;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// For streams, size is unknown
|
|
91
|
+
if (data instanceof Readable) {
|
|
92
|
+
return undefined;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
throw new Error('Unsupported data type for size calculation');
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Create readable stream from string with specified encoding
|
|
100
|
+
* @param {string} content - String content
|
|
101
|
+
* @param {string} [encoding='utf8'] - Text encoding
|
|
102
|
+
* @returns {Readable} Readable stream
|
|
103
|
+
*/
|
|
104
|
+
static stringToStream(content, encoding = 'utf8') {
|
|
105
|
+
const buffer = Buffer.from(content, encoding);
|
|
106
|
+
return Readable.from([buffer]);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* Create readable stream from buffer
|
|
111
|
+
* @param {Buffer} buffer - Buffer data
|
|
112
|
+
* @returns {Readable} Readable stream
|
|
113
|
+
*/
|
|
114
|
+
static bufferToStream(buffer) {
|
|
115
|
+
return Readable.from([buffer]);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Pipe stream with error handling
|
|
120
|
+
* @param {Readable} source - Source stream
|
|
121
|
+
* @param {Writable} destination - Destination stream
|
|
122
|
+
* @returns {Promise<void>} Promise that resolves when piping is complete
|
|
123
|
+
*/
|
|
124
|
+
static async pipeStream(source, destination) {
|
|
125
|
+
try {
|
|
126
|
+
await pipeline(source, destination);
|
|
127
|
+
} catch (error) {
|
|
128
|
+
throw new Error(`Stream pipeline failed: ${error.message}`);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* Clone a readable stream (create multiple consumers)
|
|
134
|
+
* @param {Readable} source - Source stream
|
|
135
|
+
* @param {number} [count=2] - Number of clones to create
|
|
136
|
+
* @returns {Readable[]} Array of cloned streams
|
|
137
|
+
*/
|
|
138
|
+
static cloneStream(source, count = 2) {
|
|
139
|
+
const streams = [];
|
|
140
|
+
|
|
141
|
+
for (let i = 0; i < count; i++) {
|
|
142
|
+
streams.push(new PassThrough());
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
source.on('data', (chunk) => {
|
|
146
|
+
streams.forEach(stream => stream.write(chunk));
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
source.on('end', () => {
|
|
150
|
+
streams.forEach(stream => stream.end());
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
source.on('error', (error) => {
|
|
154
|
+
streams.forEach(stream => stream.destroy(error));
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
return streams;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Transform stream data with a function
|
|
162
|
+
* @param {Readable} source - Source stream
|
|
163
|
+
* @param {Function} transformer - Transform function (chunk) => transformedChunk
|
|
164
|
+
* @returns {Readable} Transformed stream
|
|
165
|
+
*/
|
|
166
|
+
static transformStream(source, transformer) {
|
|
167
|
+
const passThrough = new PassThrough();
|
|
168
|
+
|
|
169
|
+
source.on('data', (chunk) => {
|
|
170
|
+
try {
|
|
171
|
+
const transformed = transformer(chunk);
|
|
172
|
+
passThrough.write(transformed);
|
|
173
|
+
} catch (error) {
|
|
174
|
+
passThrough.destroy(error);
|
|
175
|
+
}
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
source.on('end', () => {
|
|
179
|
+
passThrough.end();
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
source.on('error', (error) => {
|
|
183
|
+
passThrough.destroy(error);
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
return passThrough;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Limit stream size
|
|
191
|
+
* @param {Readable} source - Source stream
|
|
192
|
+
* @param {number} maxSize - Maximum size in bytes
|
|
193
|
+
* @returns {Readable} Size-limited stream
|
|
194
|
+
*/
|
|
195
|
+
static limitStreamSize(source, maxSize) {
|
|
196
|
+
const passThrough = new PassThrough();
|
|
197
|
+
let totalSize = 0;
|
|
198
|
+
|
|
199
|
+
source.on('data', (chunk) => {
|
|
200
|
+
totalSize += chunk.length;
|
|
201
|
+
|
|
202
|
+
if (totalSize > maxSize) {
|
|
203
|
+
const error = new Error(`Stream size exceeds limit of ${maxSize} bytes`);
|
|
204
|
+
error.code = 'EFBIG';
|
|
205
|
+
passThrough.destroy(error);
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
passThrough.write(chunk);
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
source.on('end', () => {
|
|
213
|
+
passThrough.end();
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
source.on('error', (error) => {
|
|
217
|
+
passThrough.destroy(error);
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
return passThrough;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* Convert stream to async iterator
|
|
225
|
+
* @param {Readable} stream - Source stream
|
|
226
|
+
* @returns {AsyncIterable<Buffer>} Async iterable of chunks
|
|
227
|
+
*/
|
|
228
|
+
static streamToAsyncIterable(stream) {
|
|
229
|
+
return {
|
|
230
|
+
[Symbol.asyncIterator]: async function* () {
|
|
231
|
+
let resolve, reject;
|
|
232
|
+
const chunks = [];
|
|
233
|
+
let finished = false;
|
|
234
|
+
|
|
235
|
+
stream.on('readable', () => {
|
|
236
|
+
let chunk;
|
|
237
|
+
while (null !== (chunk = stream.read())) {
|
|
238
|
+
chunks.push(chunk);
|
|
239
|
+
if (resolve) {
|
|
240
|
+
resolve(chunk);
|
|
241
|
+
resolve = null;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
stream.on('end', () => {
|
|
247
|
+
finished = true;
|
|
248
|
+
if (resolve) {
|
|
249
|
+
resolve(null);
|
|
250
|
+
resolve = null;
|
|
251
|
+
}
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
stream.on('error', (error) => {
|
|
255
|
+
if (reject) {
|
|
256
|
+
reject(error);
|
|
257
|
+
reject = null;
|
|
258
|
+
}
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
while (true) {
|
|
262
|
+
if (chunks.length > 0) {
|
|
263
|
+
yield chunks.shift();
|
|
264
|
+
} else if (finished) {
|
|
265
|
+
break;
|
|
266
|
+
} else {
|
|
267
|
+
await new Promise((res, rej) => {
|
|
268
|
+
resolve = res;
|
|
269
|
+
reject = rej;
|
|
270
|
+
});
|
|
271
|
+
if (resolve === null && reject === null) {
|
|
272
|
+
break; // Stream ended
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
export default StreamConverter;
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unit tests for ErrorHandler
|
|
3
|
+
*/
|
|
4
|
+
import ErrorHandler from '../../src/lib/ErrorHandler.js';
|
|
5
|
+
import { strict as assert } from 'assert';
|
|
6
|
+
import { test, describe } from 'node:test';
|
|
7
|
+
|
|
8
|
+
describe('ErrorHandler', () => {
|
|
9
|
+
describe('convertMinioError', () => {
|
|
10
|
+
test('should convert NoSuchKey error to ENOENT', () => {
|
|
11
|
+
const minioError = new Error('NoSuchKey: The specified key does not exist.');
|
|
12
|
+
minioError.code = 'NoSuchKey';
|
|
13
|
+
|
|
14
|
+
const fsError = ErrorHandler.convertMinioError(minioError, '/test/file.txt');
|
|
15
|
+
|
|
16
|
+
assert.strictEqual(fsError.code, 'ENOENT');
|
|
17
|
+
assert.strictEqual(fsError.errno, -2);
|
|
18
|
+
assert.strictEqual(fsError.path, '/test/file.txt');
|
|
19
|
+
assert.ok(fsError.message.includes('ENOENT'));
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
test('should convert NoSuchBucket error to ENOENT', () => {
|
|
23
|
+
const minioError = new Error('NoSuchBucket: The specified bucket does not exist.');
|
|
24
|
+
minioError.code = 'NoSuchBucket';
|
|
25
|
+
|
|
26
|
+
const fsError = ErrorHandler.convertMinioError(minioError, '/bucket/file.txt');
|
|
27
|
+
|
|
28
|
+
assert.strictEqual(fsError.code, 'ENOENT');
|
|
29
|
+
assert.strictEqual(fsError.errno, -2);
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
test('should convert AccessDenied error to EACCES', () => {
|
|
33
|
+
const minioError = new Error('AccessDenied: Access Denied.');
|
|
34
|
+
minioError.code = 'AccessDenied';
|
|
35
|
+
|
|
36
|
+
const fsError = ErrorHandler.convertMinioError(minioError, '/test/file.txt');
|
|
37
|
+
|
|
38
|
+
assert.strictEqual(fsError.code, 'EACCES');
|
|
39
|
+
assert.strictEqual(fsError.errno, -13);
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
test('should convert BucketAlreadyExists error to EEXIST', () => {
|
|
43
|
+
const minioError = new Error('BucketAlreadyExists: The requested bucket name is not available.');
|
|
44
|
+
minioError.code = 'BucketAlreadyExists';
|
|
45
|
+
|
|
46
|
+
const fsError = ErrorHandler.convertMinioError(minioError, '/bucket');
|
|
47
|
+
|
|
48
|
+
assert.strictEqual(fsError.code, 'EEXIST');
|
|
49
|
+
assert.strictEqual(fsError.errno, -17);
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
test('should handle unknown MinIO errors as generic errors', () => {
|
|
53
|
+
const minioError = new Error('UnknownError: Something went wrong.');
|
|
54
|
+
minioError.code = 'UnknownError';
|
|
55
|
+
|
|
56
|
+
const fsError = ErrorHandler.convertMinioError(minioError, '/test/file.txt');
|
|
57
|
+
|
|
58
|
+
assert.strictEqual(fsError.code, 'EUNKNOWN');
|
|
59
|
+
assert.strictEqual(fsError.errno, -1);
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
test('should handle errors without MinIO code', () => {
|
|
63
|
+
const genericError = new Error('Network error');
|
|
64
|
+
|
|
65
|
+
const fsError = ErrorHandler.convertMinioError(genericError, '/test/file.txt');
|
|
66
|
+
|
|
67
|
+
assert.strictEqual(fsError.code, 'EUNKNOWN');
|
|
68
|
+
assert.strictEqual(fsError.errno, -1);
|
|
69
|
+
assert.ok(fsError.message.includes('Network error'));
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
test('should preserve original stack trace', () => {
|
|
73
|
+
const minioError = new Error('Test error');
|
|
74
|
+
minioError.code = 'TestError';
|
|
75
|
+
|
|
76
|
+
const fsError = ErrorHandler.convertMinioError(minioError, '/test');
|
|
77
|
+
|
|
78
|
+
assert.ok(fsError.stack);
|
|
79
|
+
assert.ok(fsError.stack.includes('Test error'));
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
describe('createFileSystemError', () => {
|
|
84
|
+
test('should create ENOENT error correctly', () => {
|
|
85
|
+
const error = ErrorHandler.createFileSystemError('ENOENT', '/test/file.txt', 'open');
|
|
86
|
+
|
|
87
|
+
assert.strictEqual(error.code, 'ENOENT');
|
|
88
|
+
assert.strictEqual(error.errno, -2);
|
|
89
|
+
assert.strictEqual(error.path, '/test/file.txt');
|
|
90
|
+
assert.strictEqual(error.syscall, 'open');
|
|
91
|
+
assert.ok(error.message.includes('ENOENT'));
|
|
92
|
+
assert.ok(error.message.includes('/test/file.txt'));
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
test('should create EACCES error correctly', () => {
|
|
96
|
+
const error = ErrorHandler.createFileSystemError('EACCES', '/test/file.txt', 'read');
|
|
97
|
+
|
|
98
|
+
assert.strictEqual(error.code, 'EACCES');
|
|
99
|
+
assert.strictEqual(error.errno, -13);
|
|
100
|
+
assert.strictEqual(error.syscall, 'read');
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
test('should create EEXIST error correctly', () => {
|
|
104
|
+
const error = ErrorHandler.createFileSystemError('EEXIST', '/test/dir', 'mkdir');
|
|
105
|
+
|
|
106
|
+
assert.strictEqual(error.code, 'EEXIST');
|
|
107
|
+
assert.strictEqual(error.errno, -17);
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
test('should handle unknown error codes', () => {
|
|
111
|
+
const error = ErrorHandler.createFileSystemError('UNKNOWN', '/test', 'test');
|
|
112
|
+
|
|
113
|
+
assert.strictEqual(error.code, 'UNKNOWN');
|
|
114
|
+
assert.strictEqual(error.errno, -1);
|
|
115
|
+
});
|
|
116
|
+
});
|
|
117
|
+
});
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unit tests for PathConverter
|
|
3
|
+
*/
|
|
4
|
+
import PathConverter from '../../src/lib/PathConverter.js';
|
|
5
|
+
import { strict as assert } from 'assert';
|
|
6
|
+
|
|
7
|
+
describe('PathConverter', () => {
|
|
8
|
+
describe('splitPath', () => {
|
|
9
|
+
it('should split absolute path correctly', () => {
|
|
10
|
+
const result = PathConverter.splitPath('/bucket/path/to/file.txt');
|
|
11
|
+
|
|
12
|
+
assert.strictEqual(result.bucket, 'bucket');
|
|
13
|
+
assert.strictEqual(result.key, 'path/to/file.txt');
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it('should handle path without leading slash', () => {
|
|
17
|
+
const result = PathConverter.splitPath('bucket/path/to/file.txt');
|
|
18
|
+
|
|
19
|
+
assert.strictEqual(result.bucket, 'bucket');
|
|
20
|
+
assert.strictEqual(result.key, 'path/to/file.txt');
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
it('should handle bucket-only path', () => {
|
|
24
|
+
const result = PathConverter.splitPath('/bucket');
|
|
25
|
+
|
|
26
|
+
assert.strictEqual(result.bucket, 'bucket');
|
|
27
|
+
assert.strictEqual(result.key, '');
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it('should handle bucket-only path without slash', () => {
|
|
31
|
+
const result = PathConverter.splitPath('bucket');
|
|
32
|
+
|
|
33
|
+
assert.strictEqual(result.bucket, 'bucket');
|
|
34
|
+
assert.strictEqual(result.key, '');
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it('should handle root path', () => {
|
|
38
|
+
const result = PathConverter.splitPath('/');
|
|
39
|
+
|
|
40
|
+
assert.strictEqual(result.bucket, '');
|
|
41
|
+
assert.strictEqual(result.key, '');
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
it('should handle empty path', () => {
|
|
45
|
+
const result = PathConverter.splitPath('');
|
|
46
|
+
|
|
47
|
+
assert.strictEqual(result.bucket, '');
|
|
48
|
+
assert.strictEqual(result.key, '');
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it('should handle bucket with trailing slash', () => {
|
|
52
|
+
const result = PathConverter.splitPath('/bucket/');
|
|
53
|
+
|
|
54
|
+
assert.strictEqual(result.bucket, 'bucket');
|
|
55
|
+
assert.strictEqual(result.key, '');
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it('should handle nested directory structure', () => {
|
|
59
|
+
const result = PathConverter.splitPath('/mybucket/dir1/dir2/dir3/file.txt');
|
|
60
|
+
|
|
61
|
+
assert.strictEqual(result.bucket, 'mybucket');
|
|
62
|
+
assert.strictEqual(result.key, 'dir1/dir2/dir3/file.txt');
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
describe('joinPath', () => {
|
|
67
|
+
it('should join bucket and key correctly', () => {
|
|
68
|
+
const result = PathConverter.joinPath('bucket', 'path/to/file.txt');
|
|
69
|
+
|
|
70
|
+
assert.strictEqual(result, '/bucket/path/to/file.txt');
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
it('should handle empty key', () => {
|
|
74
|
+
const result = PathConverter.joinPath('bucket', '');
|
|
75
|
+
|
|
76
|
+
assert.strictEqual(result, '/bucket');
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
it('should handle empty bucket', () => {
|
|
80
|
+
const result = PathConverter.joinPath('', 'path/to/file.txt');
|
|
81
|
+
|
|
82
|
+
assert.strictEqual(result, '/path/to/file.txt');
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
it('should handle both empty', () => {
|
|
86
|
+
const result = PathConverter.joinPath('', '');
|
|
87
|
+
|
|
88
|
+
assert.strictEqual(result, '/');
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
it('should remove leading slash from key', () => {
|
|
92
|
+
const result = PathConverter.joinPath('bucket', '/path/to/file.txt');
|
|
93
|
+
|
|
94
|
+
assert.strictEqual(result, '/bucket/path/to/file.txt');
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
it('should handle complex paths', () => {
|
|
98
|
+
const result = PathConverter.joinPath('my-bucket', 'documents/2023/report.pdf');
|
|
99
|
+
|
|
100
|
+
assert.strictEqual(result, '/my-bucket/documents/2023/report.pdf');
|
|
101
|
+
});
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
describe('normalizePath', () => {
|
|
105
|
+
it('should normalize standard path', () => {
|
|
106
|
+
const result = PathConverter.normalizePath('/bucket/path/to/file.txt');
|
|
107
|
+
|
|
108
|
+
assert.strictEqual(result, '/bucket/path/to/file.txt');
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it('should remove double slashes', () => {
|
|
112
|
+
const result = PathConverter.normalizePath('/bucket//path///to/file.txt');
|
|
113
|
+
|
|
114
|
+
assert.strictEqual(result, '/bucket/path/to/file.txt');
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it('should handle trailing slash', () => {
|
|
118
|
+
const result = PathConverter.normalizePath('/bucket/path/');
|
|
119
|
+
|
|
120
|
+
assert.strictEqual(result, '/bucket/path');
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
it('should preserve root slash', () => {
|
|
124
|
+
const result = PathConverter.normalizePath('/');
|
|
125
|
+
|
|
126
|
+
assert.strictEqual(result, '/');
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
it('should add leading slash if missing', () => {
|
|
130
|
+
const result = PathConverter.normalizePath('bucket/path/file.txt');
|
|
131
|
+
|
|
132
|
+
assert.strictEqual(result, '/bucket/path/file.txt');
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
it('should handle empty string', () => {
|
|
136
|
+
const result = PathConverter.normalizePath('');
|
|
137
|
+
|
|
138
|
+
assert.strictEqual(result, '/');
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
it('should handle relative path components', () => {
|
|
142
|
+
const result = PathConverter.normalizePath('/bucket/./path/../file.txt');
|
|
143
|
+
|
|
144
|
+
assert.strictEqual(result, '/bucket/file.txt');
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
describe('isDirectory', () => {
|
|
149
|
+
it('should identify directory by trailing slash', () => {
|
|
150
|
+
assert.strictEqual(PathConverter.isDirectory('/bucket/path/'), true);
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
it('should identify file by no trailing slash', () => {
|
|
154
|
+
assert.strictEqual(PathConverter.isDirectory('/bucket/path/file.txt'), false);
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
it('should identify root as directory', () => {
|
|
158
|
+
assert.strictEqual(PathConverter.isDirectory('/'), true);
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
it('should identify bucket as directory', () => {
|
|
162
|
+
assert.strictEqual(PathConverter.isDirectory('/bucket'), false);
|
|
163
|
+
assert.strictEqual(PathConverter.isDirectory('/bucket/'), true);
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
describe('getParentPath', () => {
|
|
168
|
+
it('should get parent directory', () => {
|
|
169
|
+
const result = PathConverter.getParentPath('/bucket/path/to/file.txt');
|
|
170
|
+
|
|
171
|
+
assert.strictEqual(result, '/bucket/path/to');
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
it('should get bucket for top-level file', () => {
|
|
175
|
+
const result = PathConverter.getParentPath('/bucket/file.txt');
|
|
176
|
+
|
|
177
|
+
assert.strictEqual(result, '/bucket');
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
it('should get root for bucket', () => {
|
|
181
|
+
const result = PathConverter.getParentPath('/bucket');
|
|
182
|
+
|
|
183
|
+
assert.strictEqual(result, '/');
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
it('should return root for root', () => {
|
|
187
|
+
const result = PathConverter.getParentPath('/');
|
|
188
|
+
|
|
189
|
+
assert.strictEqual(result, '/');
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
it('should handle nested directories', () => {
|
|
193
|
+
const result = PathConverter.getParentPath('/bucket/dir1/dir2/dir3');
|
|
194
|
+
|
|
195
|
+
assert.strictEqual(result, '/bucket/dir1/dir2');
|
|
196
|
+
});
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
describe('getBasename', () => {
|
|
200
|
+
it('should get filename', () => {
|
|
201
|
+
const result = PathConverter.getBasename('/bucket/path/to/file.txt');
|
|
202
|
+
|
|
203
|
+
assert.strictEqual(result, 'file.txt');
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
it('should get directory name', () => {
|
|
207
|
+
const result = PathConverter.getBasename('/bucket/path/to/dir');
|
|
208
|
+
|
|
209
|
+
assert.strictEqual(result, 'dir');
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
it('should get bucket name', () => {
|
|
213
|
+
const result = PathConverter.getBasename('/bucket');
|
|
214
|
+
|
|
215
|
+
assert.strictEqual(result, 'bucket');
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
it('should return empty for root', () => {
|
|
219
|
+
const result = PathConverter.getBasename('/');
|
|
220
|
+
|
|
221
|
+
assert.strictEqual(result, '');
|
|
222
|
+
});
|
|
223
|
+
});
|
|
224
|
+
});
|