exarch-rs 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Cargo.toml +1 -0
- package/README.md +1 -1
- package/biome.json +47 -0
- package/native/exarch-rs.darwin-arm64.node +0 -0
- package/native/exarch-rs.darwin-x64.node +0 -0
- package/native/exarch-rs.linux-arm64-gnu.node +0 -0
- package/native/exarch-rs.linux-x64-gnu.node +0 -0
- package/native/exarch-rs.win32-x64-msvc.node +0 -0
- package/package.json +25 -5
- package/src/config.rs +303 -47
- package/src/error.rs +42 -0
- package/src/lib.rs +560 -17
- package/src/report.rs +538 -0
- package/tests/create.test.js +136 -0
- package/tests/creation-config.test.js +97 -0
- package/tests/extract.test.js +117 -0
- package/tests/list-verify.test.js +172 -0
- package/tests/security-config.test.js +187 -0
- package/index.d.ts +0 -287
package/src/error.rs
CHANGED
|
@@ -140,6 +140,48 @@ pub fn convert_error(err: CoreError) -> Error {
|
|
|
140
140
|
msg.push_str(&e_str);
|
|
141
141
|
Error::new(Status::GenericFailure, msg)
|
|
142
142
|
}
|
|
143
|
+
CoreError::SourceNotFound { path } => {
|
|
144
|
+
let path_str = sanitize_path_for_error(&path);
|
|
145
|
+
let mut msg = String::with_capacity(40 + path_str.len());
|
|
146
|
+
msg.push_str("SOURCE_NOT_FOUND: source path not found: ");
|
|
147
|
+
msg.push_str(&path_str);
|
|
148
|
+
Error::new(Status::GenericFailure, msg)
|
|
149
|
+
}
|
|
150
|
+
CoreError::SourceNotAccessible { path } => {
|
|
151
|
+
let path_str = sanitize_path_for_error(&path);
|
|
152
|
+
let mut msg = String::with_capacity(60 + path_str.len());
|
|
153
|
+
msg.push_str("SOURCE_NOT_ACCESSIBLE: source path is not accessible: ");
|
|
154
|
+
msg.push_str(&path_str);
|
|
155
|
+
Error::new(Status::GenericFailure, msg)
|
|
156
|
+
}
|
|
157
|
+
CoreError::OutputExists { path } => {
|
|
158
|
+
let path_str = sanitize_path_for_error(&path);
|
|
159
|
+
let mut msg = String::with_capacity(50 + path_str.len());
|
|
160
|
+
msg.push_str("OUTPUT_EXISTS: output file already exists: ");
|
|
161
|
+
msg.push_str(&path_str);
|
|
162
|
+
Error::new(Status::GenericFailure, msg)
|
|
163
|
+
}
|
|
164
|
+
CoreError::InvalidCompressionLevel { level } => {
|
|
165
|
+
let mut msg = String::with_capacity(60);
|
|
166
|
+
let _ = write!(
|
|
167
|
+
&mut msg,
|
|
168
|
+
"INVALID_COMPRESSION_LEVEL: invalid compression level {level}, must be 1-9"
|
|
169
|
+
);
|
|
170
|
+
Error::new(Status::GenericFailure, msg)
|
|
171
|
+
}
|
|
172
|
+
CoreError::UnknownFormat { path } => {
|
|
173
|
+
let path_str = sanitize_path_for_error(&path);
|
|
174
|
+
let mut msg = String::with_capacity(60 + path_str.len());
|
|
175
|
+
msg.push_str("UNKNOWN_FORMAT: cannot determine archive format from: ");
|
|
176
|
+
msg.push_str(&path_str);
|
|
177
|
+
Error::new(Status::GenericFailure, msg)
|
|
178
|
+
}
|
|
179
|
+
CoreError::InvalidConfiguration { reason } => {
|
|
180
|
+
let mut msg = String::with_capacity(40 + reason.len());
|
|
181
|
+
msg.push_str("INVALID_CONFIGURATION: invalid configuration: ");
|
|
182
|
+
msg.push_str(&reason);
|
|
183
|
+
Error::new(Status::GenericFailure, msg)
|
|
184
|
+
}
|
|
143
185
|
}
|
|
144
186
|
}
|
|
145
187
|
|
package/src/lib.rs
CHANGED
|
@@ -40,6 +40,9 @@
|
|
|
40
40
|
//!
|
|
41
41
|
//! MIT OR Apache-2.0
|
|
42
42
|
|
|
43
|
+
// Allow trailing_empty_array from napi macro - this is expected behavior
|
|
44
|
+
#![allow(clippy::trailing_empty_array)]
|
|
45
|
+
|
|
43
46
|
use napi::bindgen_prelude::*;
|
|
44
47
|
use napi_derive::napi;
|
|
45
48
|
|
|
@@ -48,9 +51,13 @@ mod error;
|
|
|
48
51
|
mod report;
|
|
49
52
|
mod utils;
|
|
50
53
|
|
|
54
|
+
use config::CreationConfig;
|
|
51
55
|
use config::SecurityConfig;
|
|
52
56
|
use error::convert_error;
|
|
57
|
+
use report::ArchiveManifest;
|
|
58
|
+
use report::CreationReport;
|
|
53
59
|
use report::ExtractionReport;
|
|
60
|
+
use report::VerificationReport;
|
|
54
61
|
use utils::validate_path;
|
|
55
62
|
|
|
56
63
|
/// Extract an archive to the specified directory (async).
|
|
@@ -127,12 +134,9 @@ pub async fn extract_archive(
|
|
|
127
134
|
validate_path(&archive_path)?;
|
|
128
135
|
validate_path(&output_dir)?;
|
|
129
136
|
|
|
130
|
-
// Get config
|
|
131
|
-
let
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
// Use Arc to share config across thread boundary without cloning
|
|
135
|
-
let config_arc = std::sync::Arc::new(config_ref.clone());
|
|
137
|
+
// Get owned config - clone only when config is Some, use default otherwise
|
|
138
|
+
let config_owned: exarch_core::SecurityConfig =
|
|
139
|
+
config.map(|c| c.as_core().clone()).unwrap_or_default();
|
|
136
140
|
|
|
137
141
|
// Run extraction on tokio thread pool
|
|
138
142
|
//
|
|
@@ -146,7 +150,7 @@ pub async fn extract_archive(
|
|
|
146
150
|
// For maximum security with untrusted archives, use extractArchiveSync()
|
|
147
151
|
// or ensure exclusive file access (e.g., flock) during extraction.
|
|
148
152
|
let report = tokio::task::spawn_blocking(move || {
|
|
149
|
-
exarch_core::extract_archive(&archive_path, &output_dir, &
|
|
153
|
+
exarch_core::extract_archive(&archive_path, &output_dir, &config_owned)
|
|
150
154
|
})
|
|
151
155
|
.await
|
|
152
156
|
.map_err(|e| Error::from_reason(format!("task execution failed: {e}")))?
|
|
@@ -204,13 +208,320 @@ pub fn extract_archive_sync(
|
|
|
204
208
|
let default_config = exarch_core::SecurityConfig::default();
|
|
205
209
|
let config_ref = config.map_or(&default_config, |c| c.as_core());
|
|
206
210
|
|
|
207
|
-
// Run extraction synchronously
|
|
208
|
-
|
|
209
|
-
|
|
211
|
+
// Run extraction synchronously with panic safety
|
|
212
|
+
// CRITICAL: Never panic across FFI boundary
|
|
213
|
+
let report = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
|
214
|
+
exarch_core::extract_archive(&archive_path, &output_dir, config_ref)
|
|
215
|
+
}))
|
|
216
|
+
.map_err(|_| Error::from_reason("Internal panic during archive extraction"))?
|
|
217
|
+
.map_err(convert_error)?;
|
|
210
218
|
|
|
211
219
|
Ok(ExtractionReport::from(report))
|
|
212
220
|
}
|
|
213
221
|
|
|
222
|
+
/// Create an archive from source files and directories (async).
|
|
223
|
+
///
|
|
224
|
+
/// # Arguments
|
|
225
|
+
///
|
|
226
|
+
/// * `output_path` - Path to output archive file
|
|
227
|
+
/// * `sources` - Array of source files/directories to include
|
|
228
|
+
/// * `config` - Optional `CreationConfig` (uses defaults if omitted)
|
|
229
|
+
///
|
|
230
|
+
/// # Returns
|
|
231
|
+
///
|
|
232
|
+
/// Promise resolving to `CreationReport` with creation statistics
|
|
233
|
+
///
|
|
234
|
+
/// # Errors
|
|
235
|
+
///
|
|
236
|
+
/// Returns error if path validation fails, archive creation fails, or I/O
|
|
237
|
+
/// errors occur.
|
|
238
|
+
///
|
|
239
|
+
/// # Examples
|
|
240
|
+
///
|
|
241
|
+
/// ```javascript
|
|
242
|
+
/// // Use defaults
|
|
243
|
+
/// const report = await createArchive('output.tar.gz', ['source_dir/']);
|
|
244
|
+
/// console.log(`Created archive with ${report.filesAdded} files`);
|
|
245
|
+
///
|
|
246
|
+
/// // Customize configuration
|
|
247
|
+
/// const config = new CreationConfig().compressionLevel(9);
|
|
248
|
+
/// const report = await createArchive('output.tar.gz', ['src/'], config);
|
|
249
|
+
/// ```
|
|
250
|
+
#[napi]
|
|
251
|
+
#[allow(clippy::needless_pass_by_value)]
|
|
252
|
+
pub async fn create_archive(
|
|
253
|
+
output_path: String,
|
|
254
|
+
sources: Vec<String>,
|
|
255
|
+
config: Option<&CreationConfig>,
|
|
256
|
+
) -> Result<CreationReport> {
|
|
257
|
+
validate_path(&output_path)?;
|
|
258
|
+
for source in &sources {
|
|
259
|
+
validate_path(source)?;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Get owned config - clone only when config is Some, use default otherwise
|
|
263
|
+
let config_owned: exarch_core::creation::CreationConfig =
|
|
264
|
+
config.map(|c| c.as_core().clone()).unwrap_or_default();
|
|
265
|
+
|
|
266
|
+
let report = tokio::task::spawn_blocking(move || {
|
|
267
|
+
let sources_refs: Vec<&str> = sources.iter().map(String::as_str).collect();
|
|
268
|
+
exarch_core::create_archive(&output_path, &sources_refs, &config_owned)
|
|
269
|
+
})
|
|
270
|
+
.await
|
|
271
|
+
.map_err(|e| Error::from_reason(format!("task execution failed: {e}")))?
|
|
272
|
+
.map_err(convert_error)?;
|
|
273
|
+
|
|
274
|
+
Ok(CreationReport::from(report))
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/// Create an archive from source files and directories (sync).
|
|
278
|
+
///
|
|
279
|
+
/// Synchronous version of `createArchive`. Blocks the event loop until
|
|
280
|
+
/// creation completes. Prefer the async version for most use cases.
|
|
281
|
+
///
|
|
282
|
+
/// # Arguments
|
|
283
|
+
///
|
|
284
|
+
/// * `output_path` - Path to output archive file
|
|
285
|
+
/// * `sources` - Array of source files/directories to include
|
|
286
|
+
/// * `config` - Optional `CreationConfig` (uses defaults if omitted)
|
|
287
|
+
///
|
|
288
|
+
/// # Returns
|
|
289
|
+
///
|
|
290
|
+
/// `CreationReport` with creation statistics
|
|
291
|
+
///
|
|
292
|
+
/// # Errors
|
|
293
|
+
///
|
|
294
|
+
/// Returns error if path validation fails, archive creation fails, or I/O
|
|
295
|
+
/// errors occur.
|
|
296
|
+
///
|
|
297
|
+
/// # Examples
|
|
298
|
+
///
|
|
299
|
+
/// ```javascript
|
|
300
|
+
/// // Use defaults
|
|
301
|
+
/// const report = createArchiveSync('output.tar.gz', ['source_dir/']);
|
|
302
|
+
/// console.log(`Created archive with ${report.filesAdded} files`);
|
|
303
|
+
/// ```
|
|
304
|
+
#[napi]
|
|
305
|
+
#[allow(clippy::needless_pass_by_value)]
|
|
306
|
+
pub fn create_archive_sync(
|
|
307
|
+
output_path: String,
|
|
308
|
+
sources: Vec<String>,
|
|
309
|
+
config: Option<&CreationConfig>,
|
|
310
|
+
) -> Result<CreationReport> {
|
|
311
|
+
validate_path(&output_path)?;
|
|
312
|
+
for source in &sources {
|
|
313
|
+
validate_path(source)?;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
let default_config = exarch_core::creation::CreationConfig::default();
|
|
317
|
+
let config_ref = config.map_or(&default_config, |c| c.as_core());
|
|
318
|
+
|
|
319
|
+
let sources_refs: Vec<&str> = sources.iter().map(String::as_str).collect();
|
|
320
|
+
|
|
321
|
+
let report = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
|
322
|
+
exarch_core::create_archive(&output_path, &sources_refs, config_ref)
|
|
323
|
+
}))
|
|
324
|
+
.map_err(|_| Error::from_reason("Internal panic during archive creation"))?
|
|
325
|
+
.map_err(convert_error)?;
|
|
326
|
+
|
|
327
|
+
Ok(CreationReport::from(report))
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
/// List archive contents without extracting (async).
|
|
331
|
+
///
|
|
332
|
+
/// # Arguments
|
|
333
|
+
///
|
|
334
|
+
/// * `archive_path` - Path to archive file
|
|
335
|
+
/// * `config` - Optional `SecurityConfig` (uses secure defaults if omitted)
|
|
336
|
+
///
|
|
337
|
+
/// # Returns
|
|
338
|
+
///
|
|
339
|
+
/// Promise resolving to `ArchiveManifest` with entry metadata
|
|
340
|
+
///
|
|
341
|
+
/// # Errors
|
|
342
|
+
///
|
|
343
|
+
/// Returns error if path validation fails, archive is invalid, or I/O errors
|
|
344
|
+
/// occur.
|
|
345
|
+
///
|
|
346
|
+
/// # Examples
|
|
347
|
+
///
|
|
348
|
+
/// ```javascript
|
|
349
|
+
/// const manifest = await listArchive('archive.tar.gz');
|
|
350
|
+
/// for (const entry of manifest.entries) {
|
|
351
|
+
/// console.log(`${entry.path}: ${entry.size} bytes`);
|
|
352
|
+
/// }
|
|
353
|
+
/// ```
|
|
354
|
+
#[napi]
|
|
355
|
+
#[allow(clippy::needless_pass_by_value)]
|
|
356
|
+
pub async fn list_archive(
|
|
357
|
+
archive_path: String,
|
|
358
|
+
config: Option<&SecurityConfig>,
|
|
359
|
+
) -> Result<ArchiveManifest> {
|
|
360
|
+
validate_path(&archive_path)?;
|
|
361
|
+
|
|
362
|
+
// Get owned config - clone only when config is Some, use default otherwise
|
|
363
|
+
let config_owned: exarch_core::SecurityConfig =
|
|
364
|
+
config.map(|c| c.as_core().clone()).unwrap_or_default();
|
|
365
|
+
|
|
366
|
+
let manifest = tokio::task::spawn_blocking(move || {
|
|
367
|
+
exarch_core::list_archive(&archive_path, &config_owned)
|
|
368
|
+
})
|
|
369
|
+
.await
|
|
370
|
+
.map_err(|e| Error::from_reason(format!("task execution failed: {e}")))?
|
|
371
|
+
.map_err(convert_error)?;
|
|
372
|
+
|
|
373
|
+
Ok(ArchiveManifest::from(manifest))
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
/// List archive contents without extracting (sync).
|
|
377
|
+
///
|
|
378
|
+
/// Synchronous version of `listArchive`. Blocks the event loop until
|
|
379
|
+
/// listing completes. Prefer the async version for most use cases.
|
|
380
|
+
///
|
|
381
|
+
/// # Arguments
|
|
382
|
+
///
|
|
383
|
+
/// * `archive_path` - Path to archive file
|
|
384
|
+
/// * `config` - Optional `SecurityConfig` (uses secure defaults if omitted)
|
|
385
|
+
///
|
|
386
|
+
/// # Returns
|
|
387
|
+
///
|
|
388
|
+
/// `ArchiveManifest` with entry metadata
|
|
389
|
+
///
|
|
390
|
+
/// # Errors
|
|
391
|
+
///
|
|
392
|
+
/// Returns error if path validation fails, archive is invalid, or I/O errors
|
|
393
|
+
/// occur.
|
|
394
|
+
///
|
|
395
|
+
/// # Examples
|
|
396
|
+
///
|
|
397
|
+
/// ```javascript
|
|
398
|
+
/// const manifest = listArchiveSync('archive.tar.gz');
|
|
399
|
+
/// for (const entry of manifest.entries) {
|
|
400
|
+
/// console.log(`${entry.path}: ${entry.size} bytes`);
|
|
401
|
+
/// }
|
|
402
|
+
/// ```
|
|
403
|
+
#[napi]
|
|
404
|
+
#[allow(clippy::needless_pass_by_value)]
|
|
405
|
+
pub fn list_archive_sync(
|
|
406
|
+
archive_path: String,
|
|
407
|
+
config: Option<&SecurityConfig>,
|
|
408
|
+
) -> Result<ArchiveManifest> {
|
|
409
|
+
validate_path(&archive_path)?;
|
|
410
|
+
|
|
411
|
+
let default_config = exarch_core::SecurityConfig::default();
|
|
412
|
+
let config_ref = config.map_or(&default_config, |c| c.as_core());
|
|
413
|
+
|
|
414
|
+
let manifest = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
|
415
|
+
exarch_core::list_archive(&archive_path, config_ref)
|
|
416
|
+
}))
|
|
417
|
+
.map_err(|_| Error::from_reason("Internal panic during archive listing"))?
|
|
418
|
+
.map_err(convert_error)?;
|
|
419
|
+
|
|
420
|
+
Ok(ArchiveManifest::from(manifest))
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
/// Verify archive integrity and security (async).
|
|
424
|
+
///
|
|
425
|
+
/// # Arguments
|
|
426
|
+
///
|
|
427
|
+
/// * `archive_path` - Path to archive file
|
|
428
|
+
/// * `config` - Optional `SecurityConfig` (uses secure defaults if omitted)
|
|
429
|
+
///
|
|
430
|
+
/// # Returns
|
|
431
|
+
///
|
|
432
|
+
/// Promise resolving to `VerificationReport` with validation results
|
|
433
|
+
///
|
|
434
|
+
/// # Errors
|
|
435
|
+
///
|
|
436
|
+
/// Returns error if path validation fails, archive is invalid, or I/O errors
|
|
437
|
+
/// occur.
|
|
438
|
+
///
|
|
439
|
+
/// # Examples
|
|
440
|
+
///
|
|
441
|
+
/// ```javascript
|
|
442
|
+
/// const report = await verifyArchive('archive.tar.gz');
|
|
443
|
+
/// if (report.status === 'PASS') {
|
|
444
|
+
/// console.log('Archive is safe to extract');
|
|
445
|
+
/// } else {
|
|
446
|
+
/// for (const issue of report.issues) {
|
|
447
|
+
/// console.log(`[${issue.severity}] ${issue.message}`);
|
|
448
|
+
/// }
|
|
449
|
+
/// }
|
|
450
|
+
/// ```
|
|
451
|
+
#[napi]
|
|
452
|
+
#[allow(clippy::needless_pass_by_value)]
|
|
453
|
+
pub async fn verify_archive(
|
|
454
|
+
archive_path: String,
|
|
455
|
+
config: Option<&SecurityConfig>,
|
|
456
|
+
) -> Result<VerificationReport> {
|
|
457
|
+
validate_path(&archive_path)?;
|
|
458
|
+
|
|
459
|
+
// Get owned config - clone only when config is Some, use default otherwise
|
|
460
|
+
let config_owned: exarch_core::SecurityConfig =
|
|
461
|
+
config.map(|c| c.as_core().clone()).unwrap_or_default();
|
|
462
|
+
|
|
463
|
+
let report = tokio::task::spawn_blocking(move || {
|
|
464
|
+
exarch_core::verify_archive(&archive_path, &config_owned)
|
|
465
|
+
})
|
|
466
|
+
.await
|
|
467
|
+
.map_err(|e| Error::from_reason(format!("task execution failed: {e}")))?
|
|
468
|
+
.map_err(convert_error)?;
|
|
469
|
+
|
|
470
|
+
Ok(VerificationReport::from(report))
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
/// Verify archive integrity and security (sync).
|
|
474
|
+
///
|
|
475
|
+
/// Synchronous version of `verifyArchive`. Blocks the event loop until
|
|
476
|
+
/// verification completes. Prefer the async version for most use cases.
|
|
477
|
+
///
|
|
478
|
+
/// # Arguments
|
|
479
|
+
///
|
|
480
|
+
/// * `archive_path` - Path to archive file
|
|
481
|
+
/// * `config` - Optional `SecurityConfig` (uses secure defaults if omitted)
|
|
482
|
+
///
|
|
483
|
+
/// # Returns
|
|
484
|
+
///
|
|
485
|
+
/// `VerificationReport` with validation results
|
|
486
|
+
///
|
|
487
|
+
/// # Errors
|
|
488
|
+
///
|
|
489
|
+
/// Returns error if path validation fails, archive is invalid, or I/O errors
|
|
490
|
+
/// occur.
|
|
491
|
+
///
|
|
492
|
+
/// # Examples
|
|
493
|
+
///
|
|
494
|
+
/// ```javascript
|
|
495
|
+
/// const report = verifyArchiveSync('archive.tar.gz');
|
|
496
|
+
/// if (report.status === 'PASS') {
|
|
497
|
+
/// console.log('Archive is safe to extract');
|
|
498
|
+
/// }
|
|
499
|
+
/// ```
|
|
500
|
+
#[napi]
|
|
501
|
+
#[allow(clippy::needless_pass_by_value)]
|
|
502
|
+
pub fn verify_archive_sync(
|
|
503
|
+
archive_path: String,
|
|
504
|
+
config: Option<&SecurityConfig>,
|
|
505
|
+
) -> Result<VerificationReport> {
|
|
506
|
+
validate_path(&archive_path)?;
|
|
507
|
+
|
|
508
|
+
let default_config = exarch_core::SecurityConfig::default();
|
|
509
|
+
let config_ref = config.map_or(&default_config, |c| c.as_core());
|
|
510
|
+
|
|
511
|
+
let report = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
|
512
|
+
exarch_core::verify_archive(&archive_path, config_ref)
|
|
513
|
+
}))
|
|
514
|
+
.map_err(|_| Error::from_reason("Internal panic during archive verification"))?
|
|
515
|
+
.map_err(convert_error)?;
|
|
516
|
+
|
|
517
|
+
Ok(VerificationReport::from(report))
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
// NOTE: Progress callback support (createArchiveWithProgress) is planned for
|
|
521
|
+
// a future release. The napi-rs 3.x ThreadsafeFunction API requires additional
|
|
522
|
+
// work to properly bridge Rust ProgressCallback trait to JavaScript callbacks.
|
|
523
|
+
// For now, use createArchive/createArchiveSync without progress tracking.
|
|
524
|
+
|
|
214
525
|
#[cfg(test)]
|
|
215
526
|
#[allow(
|
|
216
527
|
clippy::unwrap_used,
|
|
@@ -221,12 +532,6 @@ pub fn extract_archive_sync(
|
|
|
221
532
|
mod tests {
|
|
222
533
|
use super::*;
|
|
223
534
|
|
|
224
|
-
#[test]
|
|
225
|
-
fn test_module_exports_functions() {
|
|
226
|
-
// This test just ensures the module compiles and exports the expected
|
|
227
|
-
// functions. Runtime tests would require actual archive files.
|
|
228
|
-
}
|
|
229
|
-
|
|
230
535
|
// CR-004: Path validation tests
|
|
231
536
|
#[tokio::test]
|
|
232
537
|
async fn test_extract_archive_rejects_null_byte_in_archive_path() {
|
|
@@ -405,7 +710,7 @@ mod tests {
|
|
|
405
710
|
#[test]
|
|
406
711
|
fn test_extract_archive_sync_with_custom_config() {
|
|
407
712
|
let mut config = SecurityConfig::new();
|
|
408
|
-
config.
|
|
713
|
+
config.set_max_file_size(1_000_000).unwrap();
|
|
409
714
|
|
|
410
715
|
// Test that valid paths pass boundary validation with custom config
|
|
411
716
|
let result = extract_archive_sync(
|
|
@@ -425,4 +730,242 @@ mod tests {
|
|
|
425
730
|
}
|
|
426
731
|
// If it succeeds, path validation passed (which is what we're testing)
|
|
427
732
|
}
|
|
733
|
+
|
|
734
|
+
// CR-004: create_archive path validation tests
|
|
735
|
+
#[tokio::test]
|
|
736
|
+
async fn test_create_archive_rejects_null_byte_in_output_path() {
|
|
737
|
+
let result = create_archive(
|
|
738
|
+
"/tmp/output\0malicious.tar".to_string(),
|
|
739
|
+
vec!["source/".to_string()],
|
|
740
|
+
None,
|
|
741
|
+
)
|
|
742
|
+
.await;
|
|
743
|
+
|
|
744
|
+
assert!(result.is_err(), "should reject null bytes in output path");
|
|
745
|
+
assert!(
|
|
746
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
747
|
+
"error message should mention null bytes"
|
|
748
|
+
);
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
#[tokio::test]
|
|
752
|
+
async fn test_create_archive_rejects_null_byte_in_source_path() {
|
|
753
|
+
let result = create_archive(
|
|
754
|
+
"/tmp/output.tar".to_string(),
|
|
755
|
+
vec!["source\0malicious/".to_string()],
|
|
756
|
+
None,
|
|
757
|
+
)
|
|
758
|
+
.await;
|
|
759
|
+
|
|
760
|
+
assert!(result.is_err(), "should reject null bytes in source path");
|
|
761
|
+
assert!(
|
|
762
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
763
|
+
"error message should mention null bytes"
|
|
764
|
+
);
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
#[tokio::test]
|
|
768
|
+
async fn test_create_archive_rejects_excessively_long_output_path() {
|
|
769
|
+
let long_path = "x".repeat(5000);
|
|
770
|
+
let result = create_archive(long_path, vec!["source/".to_string()], None).await;
|
|
771
|
+
|
|
772
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
773
|
+
assert!(
|
|
774
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
775
|
+
"error message should mention length limit"
|
|
776
|
+
);
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
#[tokio::test]
|
|
780
|
+
async fn test_create_archive_rejects_excessively_long_source_path() {
|
|
781
|
+
let long_path = "x".repeat(5000);
|
|
782
|
+
let result = create_archive("/tmp/output.tar".to_string(), vec![long_path], None).await;
|
|
783
|
+
|
|
784
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
785
|
+
assert!(
|
|
786
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
787
|
+
"error message should mention length limit"
|
|
788
|
+
);
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
#[tokio::test]
|
|
792
|
+
async fn test_create_archive_accepts_empty_sources_array() {
|
|
793
|
+
// Empty sources array should pass boundary validation
|
|
794
|
+
// Core library will handle actual validation
|
|
795
|
+
let result = create_archive("/tmp/output.tar".to_string(), vec![], None).await;
|
|
796
|
+
|
|
797
|
+
// If it fails, ensure it's not a boundary path validation error
|
|
798
|
+
if let Err(e) = result {
|
|
799
|
+
let err_msg = e.to_string();
|
|
800
|
+
assert!(
|
|
801
|
+
!err_msg.contains("null bytes") && !err_msg.contains("maximum length"),
|
|
802
|
+
"should not fail on boundary path validation, got: {}",
|
|
803
|
+
err_msg
|
|
804
|
+
);
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
#[test]
|
|
809
|
+
fn test_create_archive_sync_rejects_null_byte_in_output_path() {
|
|
810
|
+
let result = create_archive_sync(
|
|
811
|
+
"/tmp/output\0malicious.tar".to_string(),
|
|
812
|
+
vec!["source/".to_string()],
|
|
813
|
+
None,
|
|
814
|
+
);
|
|
815
|
+
|
|
816
|
+
assert!(result.is_err(), "should reject null bytes in output path");
|
|
817
|
+
assert!(
|
|
818
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
819
|
+
"error message should mention null bytes"
|
|
820
|
+
);
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
#[test]
|
|
824
|
+
fn test_create_archive_sync_rejects_null_byte_in_source_path() {
|
|
825
|
+
let result = create_archive_sync(
|
|
826
|
+
"/tmp/output.tar".to_string(),
|
|
827
|
+
vec!["source\0malicious/".to_string()],
|
|
828
|
+
None,
|
|
829
|
+
);
|
|
830
|
+
|
|
831
|
+
assert!(result.is_err(), "should reject null bytes in source path");
|
|
832
|
+
assert!(
|
|
833
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
834
|
+
"error message should mention null bytes"
|
|
835
|
+
);
|
|
836
|
+
}
|
|
837
|
+
|
|
838
|
+
#[test]
|
|
839
|
+
fn test_create_archive_sync_rejects_excessively_long_output_path() {
|
|
840
|
+
let long_path = "x".repeat(5000);
|
|
841
|
+
let result = create_archive_sync(long_path, vec!["source/".to_string()], None);
|
|
842
|
+
|
|
843
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
844
|
+
assert!(
|
|
845
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
846
|
+
"error message should mention length limit"
|
|
847
|
+
);
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
#[test]
|
|
851
|
+
fn test_create_archive_sync_rejects_excessively_long_source_path() {
|
|
852
|
+
let long_path = "x".repeat(5000);
|
|
853
|
+
let result = create_archive_sync("/tmp/output.tar".to_string(), vec![long_path], None);
|
|
854
|
+
|
|
855
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
856
|
+
assert!(
|
|
857
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
858
|
+
"error message should mention length limit"
|
|
859
|
+
);
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
#[test]
|
|
863
|
+
fn test_create_archive_sync_accepts_empty_sources_array() {
|
|
864
|
+
// Empty sources array should pass boundary validation
|
|
865
|
+
let result = create_archive_sync("/tmp/output.tar".to_string(), vec![], None);
|
|
866
|
+
|
|
867
|
+
// If it fails, ensure it's not a boundary path validation error
|
|
868
|
+
if let Err(e) = result {
|
|
869
|
+
let err_msg = e.to_string();
|
|
870
|
+
assert!(
|
|
871
|
+
!err_msg.contains("null bytes") && !err_msg.contains("maximum length"),
|
|
872
|
+
"should not fail on boundary path validation, got: {}",
|
|
873
|
+
err_msg
|
|
874
|
+
);
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
|
|
878
|
+
// CR-004: list_archive path validation tests
|
|
879
|
+
#[tokio::test]
|
|
880
|
+
async fn test_list_archive_rejects_null_byte_in_archive_path() {
|
|
881
|
+
let result = list_archive("/tmp/test\0malicious.tar".to_string(), None).await;
|
|
882
|
+
|
|
883
|
+
assert!(result.is_err(), "should reject null bytes in archive path");
|
|
884
|
+
assert!(
|
|
885
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
886
|
+
"error message should mention null bytes"
|
|
887
|
+
);
|
|
888
|
+
}
|
|
889
|
+
|
|
890
|
+
#[tokio::test]
|
|
891
|
+
async fn test_list_archive_rejects_excessively_long_archive_path() {
|
|
892
|
+
let long_path = "x".repeat(5000);
|
|
893
|
+
let result = list_archive(long_path, None).await;
|
|
894
|
+
|
|
895
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
896
|
+
assert!(
|
|
897
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
898
|
+
"error message should mention length limit"
|
|
899
|
+
);
|
|
900
|
+
}
|
|
901
|
+
|
|
902
|
+
#[test]
|
|
903
|
+
fn test_list_archive_sync_rejects_null_byte_in_archive_path() {
|
|
904
|
+
let result = list_archive_sync("/tmp/test\0malicious.tar".to_string(), None);
|
|
905
|
+
|
|
906
|
+
assert!(result.is_err(), "should reject null bytes in archive path");
|
|
907
|
+
assert!(
|
|
908
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
909
|
+
"error message should mention null bytes"
|
|
910
|
+
);
|
|
911
|
+
}
|
|
912
|
+
|
|
913
|
+
#[test]
|
|
914
|
+
fn test_list_archive_sync_rejects_excessively_long_archive_path() {
|
|
915
|
+
let long_path = "x".repeat(5000);
|
|
916
|
+
let result = list_archive_sync(long_path, None);
|
|
917
|
+
|
|
918
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
919
|
+
assert!(
|
|
920
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
921
|
+
"error message should mention length limit"
|
|
922
|
+
);
|
|
923
|
+
}
|
|
924
|
+
|
|
925
|
+
// CR-004: verify_archive path validation tests
|
|
926
|
+
#[tokio::test]
|
|
927
|
+
async fn test_verify_archive_rejects_null_byte_in_archive_path() {
|
|
928
|
+
let result = verify_archive("/tmp/test\0malicious.tar".to_string(), None).await;
|
|
929
|
+
|
|
930
|
+
assert!(result.is_err(), "should reject null bytes in archive path");
|
|
931
|
+
assert!(
|
|
932
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
933
|
+
"error message should mention null bytes"
|
|
934
|
+
);
|
|
935
|
+
}
|
|
936
|
+
|
|
937
|
+
#[tokio::test]
|
|
938
|
+
async fn test_verify_archive_rejects_excessively_long_archive_path() {
|
|
939
|
+
let long_path = "x".repeat(5000);
|
|
940
|
+
let result = verify_archive(long_path, None).await;
|
|
941
|
+
|
|
942
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
943
|
+
assert!(
|
|
944
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
945
|
+
"error message should mention length limit"
|
|
946
|
+
);
|
|
947
|
+
}
|
|
948
|
+
|
|
949
|
+
#[test]
|
|
950
|
+
fn test_verify_archive_sync_rejects_null_byte_in_archive_path() {
|
|
951
|
+
let result = verify_archive_sync("/tmp/test\0malicious.tar".to_string(), None);
|
|
952
|
+
|
|
953
|
+
assert!(result.is_err(), "should reject null bytes in archive path");
|
|
954
|
+
assert!(
|
|
955
|
+
result.unwrap_err().to_string().contains("null bytes"),
|
|
956
|
+
"error message should mention null bytes"
|
|
957
|
+
);
|
|
958
|
+
}
|
|
959
|
+
|
|
960
|
+
#[test]
|
|
961
|
+
fn test_verify_archive_sync_rejects_excessively_long_archive_path() {
|
|
962
|
+
let long_path = "x".repeat(5000);
|
|
963
|
+
let result = verify_archive_sync(long_path, None);
|
|
964
|
+
|
|
965
|
+
assert!(result.is_err(), "should reject excessively long paths");
|
|
966
|
+
assert!(
|
|
967
|
+
result.unwrap_err().to_string().contains("maximum length"),
|
|
968
|
+
"error message should mention length limit"
|
|
969
|
+
);
|
|
970
|
+
}
|
|
428
971
|
}
|