packagepurge 1.0.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agent/workflows/build.md +58 -0
- package/.github/workflows/release.yml +176 -0
- package/README.md +215 -49
- package/dist/cli/index.d.ts +1 -0
- package/dist/cli/index.js +122 -132
- package/dist/cli/index.js.map +1 -1
- package/dist/core/bindings.d.ts +11 -0
- package/dist/core/bindings.d.ts.map +1 -1
- package/dist/core/bindings.js +40 -94
- package/dist/core/bindings.js.map +1 -1
- package/dist/utils/core-utils.d.ts +31 -0
- package/dist/utils/core-utils.d.ts.map +1 -0
- package/dist/utils/core-utils.js +121 -0
- package/dist/utils/core-utils.js.map +1 -0
- package/dist/utils/formatter.d.ts +63 -0
- package/dist/utils/formatter.d.ts.map +1 -0
- package/dist/utils/formatter.js +295 -0
- package/dist/utils/formatter.js.map +1 -0
- package/package.json +3 -3
- package/core/src/arc_lfu.rs +0 -91
- package/core/src/cache.rs +0 -205
- package/core/src/lockfiles.rs +0 -112
- package/core/src/main.rs +0 -125
- package/core/src/ml.rs +0 -188
- package/core/src/optimization.rs +0 -314
- package/core/src/safety.rs +0 -103
- package/core/src/scanner.rs +0 -136
- package/core/src/symlink.rs +0 -223
- package/core/src/types.rs +0 -87
- package/core/src/usage_tracker.rs +0 -107
- package/src/cli/index.ts +0 -212
- package/src/core/bindings.ts +0 -157
- package/src/managers/base-manager.ts +0 -117
- package/src/managers/index.ts +0 -32
- package/src/managers/npm-manager.ts +0 -96
- package/src/managers/pnpm-manager.ts +0 -107
- package/src/managers/yarn-manager.ts +0 -112
- package/src/types/index.ts +0 -97
- package/src/utils/logger.ts +0 -50
- package/tsconfig.json +0 -22
package/core/src/optimization.rs
DELETED
|
@@ -1,314 +0,0 @@
|
|
|
1
|
-
use anyhow::Result;
|
|
2
|
-
use chrono::{Duration, Utc};
|
|
3
|
-
use std::collections::{HashMap, HashSet};
|
|
4
|
-
use std::path::PathBuf;
|
|
5
|
-
|
|
6
|
-
use crate::types::{DryRunReport, PlanItem, ScanOutput, PackageUsageMetrics, ProjectMetadata, DeveloperBehavior};
|
|
7
|
-
use crate::symlink::SemanticDeduplication;
|
|
8
|
-
use crate::cache::PackageLruCache;
|
|
9
|
-
use crate::ml::{MlRecommender, PredictiveOptimizer};
|
|
10
|
-
|
|
11
|
-
#[allow(dead_code)]
|
|
12
|
-
pub enum EvictionPolicy {
|
|
13
|
-
MlThenArcThenLru,
|
|
14
|
-
LruOnly,
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
#[allow(dead_code)]
|
|
18
|
-
pub struct RulesConfig {
|
|
19
|
-
pub preserve_days: i64,
|
|
20
|
-
#[allow(dead_code)]
|
|
21
|
-
pub enable_symlinking: bool,
|
|
22
|
-
#[allow(dead_code)]
|
|
23
|
-
pub enable_ml_prediction: bool,
|
|
24
|
-
#[allow(dead_code)]
|
|
25
|
-
pub lru_max_packages: usize,
|
|
26
|
-
#[allow(dead_code)]
|
|
27
|
-
pub lru_max_size_bytes: u64,
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
pub fn plan_basic_cleanup(scan: &ScanOutput, cfg: &RulesConfig) -> Result<DryRunReport> {
|
|
31
|
-
let cutoff = Utc::now() - Duration::days(cfg.preserve_days);
|
|
32
|
-
|
|
33
|
-
let mut used: HashSet<(String, String)> = HashSet::new();
|
|
34
|
-
for proj in &scan.projects {
|
|
35
|
-
for (n, v) in &proj.dependencies {
|
|
36
|
-
used.insert((n.clone(), v.clone()));
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
let mut seen_locations: HashMap<(String, String), Vec<PathBuf>> = HashMap::new();
|
|
41
|
-
|
|
42
|
-
let mut items: Vec<PlanItem> = Vec::new();
|
|
43
|
-
for pkg in &scan.packages {
|
|
44
|
-
let key = (pkg.name.clone(), pkg.version.clone());
|
|
45
|
-
seen_locations.entry(key.clone()).or_default().push(PathBuf::from(&pkg.path));
|
|
46
|
-
|
|
47
|
-
let is_orphan = !used.contains(&key);
|
|
48
|
-
let is_old = pkg.mtime < cutoff;
|
|
49
|
-
|
|
50
|
-
if is_orphan || is_old {
|
|
51
|
-
items.push(PlanItem {
|
|
52
|
-
target_path: pkg.path.clone(),
|
|
53
|
-
estimated_size_bytes: pkg.size_bytes,
|
|
54
|
-
reason: if is_orphan { "orphaned".into() } else { "old".into() },
|
|
55
|
-
});
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
for (_key, paths) in seen_locations.into_iter() {
|
|
60
|
-
if paths.len() > 1 {
|
|
61
|
-
for p in paths.into_iter().skip(1) {
|
|
62
|
-
items.push(PlanItem { target_path: p.to_string_lossy().to_string(), estimated_size_bytes: 0, reason: "duplicate".into() });
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
let total = items.iter().map(|i| i.estimated_size_bytes).sum();
|
|
68
|
-
Ok(DryRunReport { items, total_estimated_bytes: total })
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
/// Optimization engine with symlinking and ML/LRU strategies
|
|
72
|
-
#[allow(dead_code)]
|
|
73
|
-
pub struct OptimizationEngine {
|
|
74
|
-
deduplication: Option<SemanticDeduplication>,
|
|
75
|
-
lru_cache: Option<PackageLruCache>,
|
|
76
|
-
ml_predictor: Option<PredictiveOptimizer>,
|
|
77
|
-
config: RulesConfig,
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
#[allow(dead_code)]
|
|
81
|
-
impl OptimizationEngine {
|
|
82
|
-
pub fn new(config: RulesConfig) -> Result<Self> {
|
|
83
|
-
let deduplication = if config.enable_symlinking {
|
|
84
|
-
Some(SemanticDeduplication::new()?)
|
|
85
|
-
} else {
|
|
86
|
-
None
|
|
87
|
-
};
|
|
88
|
-
|
|
89
|
-
let lru_cache = Some(PackageLruCache::new(
|
|
90
|
-
config.lru_max_packages,
|
|
91
|
-
config.lru_max_size_bytes,
|
|
92
|
-
));
|
|
93
|
-
|
|
94
|
-
let ml_predictor = if config.enable_ml_prediction {
|
|
95
|
-
Some(PredictiveOptimizer::new(config.preserve_days))
|
|
96
|
-
} else {
|
|
97
|
-
None
|
|
98
|
-
};
|
|
99
|
-
|
|
100
|
-
Ok(Self {
|
|
101
|
-
deduplication,
|
|
102
|
-
lru_cache,
|
|
103
|
-
ml_predictor,
|
|
104
|
-
config,
|
|
105
|
-
})
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
/// Plan cleanup with symlinking and ML/LRU optimization
|
|
109
|
-
pub fn plan_optimized_cleanup(
|
|
110
|
-
&mut self,
|
|
111
|
-
scan: &ScanOutput,
|
|
112
|
-
) -> Result<DryRunReport> {
|
|
113
|
-
let cutoff = Utc::now() - Duration::days(self.config.preserve_days);
|
|
114
|
-
|
|
115
|
-
// Build usage metrics map from scan
|
|
116
|
-
let mut usage_map: HashMap<String, PackageUsageMetrics> = HashMap::new();
|
|
117
|
-
for pkg in &scan.packages {
|
|
118
|
-
let key = format!("{}@{}", pkg.name, pkg.version);
|
|
119
|
-
let metrics = PackageUsageMetrics {
|
|
120
|
-
package_key: key.clone(),
|
|
121
|
-
last_access_time: pkg.atime,
|
|
122
|
-
last_script_execution: None, // Would be populated from execution tracking
|
|
123
|
-
access_count: 1, // Would be tracked over time
|
|
124
|
-
script_execution_count: 0,
|
|
125
|
-
last_successful_build: None,
|
|
126
|
-
};
|
|
127
|
-
usage_map.insert(key, metrics);
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
// Build project metadata map
|
|
131
|
-
let mut project_map: HashMap<String, ProjectMetadata> = HashMap::new();
|
|
132
|
-
for proj in &scan.projects {
|
|
133
|
-
let metadata = ProjectMetadata {
|
|
134
|
-
path: proj.path.clone(),
|
|
135
|
-
project_type: detect_project_type(&proj.path),
|
|
136
|
-
last_commit_date: None, // Would be populated from git
|
|
137
|
-
dependency_count: proj.dependencies.len(),
|
|
138
|
-
last_modified: proj.mtime,
|
|
139
|
-
};
|
|
140
|
-
project_map.insert(proj.path.clone(), metadata);
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
let mut used: HashSet<(String, String)> = HashSet::new();
|
|
144
|
-
for proj in &scan.projects {
|
|
145
|
-
for (n, v) in &proj.dependencies {
|
|
146
|
-
used.insert((n.clone(), v.clone()));
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
let mut seen_locations: HashMap<(String, String), Vec<PathBuf>> = HashMap::new();
|
|
151
|
-
let mut items: Vec<PlanItem> = Vec::new();
|
|
152
|
-
let mut symlink_candidates: Vec<(PathBuf, String, String)> = Vec::new();
|
|
153
|
-
|
|
154
|
-
for pkg in &scan.packages {
|
|
155
|
-
let key = (pkg.name.clone(), pkg.version.clone());
|
|
156
|
-
seen_locations.entry(key.clone()).or_default().push(PathBuf::from(&pkg.path));
|
|
157
|
-
|
|
158
|
-
let package_key = format!("{}@{}", pkg.name, pkg.version);
|
|
159
|
-
let is_orphan = !used.contains(&key);
|
|
160
|
-
let is_old = pkg.mtime < cutoff;
|
|
161
|
-
|
|
162
|
-
// Record access in LRU cache
|
|
163
|
-
if let Some(ref mut cache) = self.lru_cache {
|
|
164
|
-
cache.record_access(&package_key, pkg.size_bytes);
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
// Check ML prediction
|
|
168
|
-
let should_keep_ml = if let Some(ref predictor) = self.ml_predictor {
|
|
169
|
-
if let (Some(metrics), Some(proj_path)) = (usage_map.get(&package_key), pkg.project_paths.first()) {
|
|
170
|
-
if let Some(project_meta) = project_map.get(proj_path) {
|
|
171
|
-
let behavior = DeveloperBehavior {
|
|
172
|
-
npm_commands_executed: Vec::new(), // Would be populated from tracking
|
|
173
|
-
file_access_frequency: 0,
|
|
174
|
-
days_since_last_build: None,
|
|
175
|
-
};
|
|
176
|
-
predictor.should_keep(&package_key, metrics, project_meta, &behavior)
|
|
177
|
-
} else {
|
|
178
|
-
true // Conservative: keep if no project metadata
|
|
179
|
-
}
|
|
180
|
-
} else {
|
|
181
|
-
true
|
|
182
|
-
}
|
|
183
|
-
} else {
|
|
184
|
-
true
|
|
185
|
-
};
|
|
186
|
-
|
|
187
|
-
// Check LRU strategy
|
|
188
|
-
let should_keep_lru = if let Some(ref mut cache) = self.lru_cache {
|
|
189
|
-
cache.should_keep_lru(&package_key, self.config.preserve_days)
|
|
190
|
-
} else {
|
|
191
|
-
true
|
|
192
|
-
};
|
|
193
|
-
|
|
194
|
-
// Determine if package should be removed
|
|
195
|
-
if is_orphan || (is_old && !should_keep_ml && !should_keep_lru) {
|
|
196
|
-
items.push(PlanItem {
|
|
197
|
-
target_path: pkg.path.clone(),
|
|
198
|
-
estimated_size_bytes: pkg.size_bytes,
|
|
199
|
-
reason: if is_orphan {
|
|
200
|
-
"orphaned".into()
|
|
201
|
-
} else if !should_keep_ml {
|
|
202
|
-
"ml_predicted_unused".into()
|
|
203
|
-
} else {
|
|
204
|
-
"old".into()
|
|
205
|
-
},
|
|
206
|
-
});
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
// Collect symlink candidates (duplicates)
|
|
210
|
-
if let Some(ref _dedup) = self.deduplication {
|
|
211
|
-
if seen_locations.get(&key).map(|v| v.len()).unwrap_or(0) > 1 {
|
|
212
|
-
symlink_candidates.push((PathBuf::from(&pkg.path), pkg.name.clone(), pkg.version.clone()));
|
|
213
|
-
}
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
// Process symlink candidates (in dry run, just mark them)
|
|
218
|
-
for (path, _name, _version) in symlink_candidates {
|
|
219
|
-
items.push(PlanItem {
|
|
220
|
-
target_path: path.to_string_lossy().to_string(),
|
|
221
|
-
estimated_size_bytes: 0,
|
|
222
|
-
reason: "duplicate_symlink_candidate".into(),
|
|
223
|
-
});
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
let total = items.iter().map(|i| i.estimated_size_bytes).sum();
|
|
227
|
-
Ok(DryRunReport { items, total_estimated_bytes: total })
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
/// Execute symlinking for duplicate packages
|
|
231
|
-
pub fn execute_symlinking(&self, scan: &ScanOutput) -> Result<usize> {
|
|
232
|
-
if let Some(ref dedup) = self.deduplication {
|
|
233
|
-
let mut seen: HashMap<(String, String), PathBuf> = HashMap::new();
|
|
234
|
-
let mut symlinked_count = 0;
|
|
235
|
-
|
|
236
|
-
for pkg in &scan.packages {
|
|
237
|
-
let key = (pkg.name.clone(), pkg.version.clone());
|
|
238
|
-
|
|
239
|
-
// Keep first occurrence as canonical
|
|
240
|
-
let canonical = seen.entry(key.clone()).or_insert_with(|| PathBuf::from(&pkg.path));
|
|
241
|
-
|
|
242
|
-
// Symlink duplicates
|
|
243
|
-
if canonical.to_string_lossy() != pkg.path {
|
|
244
|
-
let pkg_path = PathBuf::from(&pkg.path);
|
|
245
|
-
if let Err(e) = dedup.deduplicate_package(&pkg_path, &pkg.name, &pkg.version) {
|
|
246
|
-
eprintln!("Failed to symlink {:?}: {}", pkg_path, e);
|
|
247
|
-
} else {
|
|
248
|
-
symlinked_count += 1;
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
|
|
253
|
-
Ok(symlinked_count)
|
|
254
|
-
} else {
|
|
255
|
-
Ok(0)
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
fn detect_project_type(project_path: &str) -> String {
|
|
261
|
-
use std::fs;
|
|
262
|
-
use std::path::Path;
|
|
263
|
-
|
|
264
|
-
let path = Path::new(project_path);
|
|
265
|
-
let package_json = path.join("package.json");
|
|
266
|
-
|
|
267
|
-
// Check package.json for project type indicators
|
|
268
|
-
if package_json.exists() {
|
|
269
|
-
if let Ok(content) = fs::read_to_string(&package_json) {
|
|
270
|
-
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&content) {
|
|
271
|
-
// Check for framework-specific dependencies
|
|
272
|
-
if let Some(deps) = json.get("dependencies").and_then(|d| d.as_object()) {
|
|
273
|
-
if deps.contains_key("react") || deps.contains_key("next") {
|
|
274
|
-
return "react".into();
|
|
275
|
-
}
|
|
276
|
-
if deps.contains_key("vue") || deps.contains_key("nuxt") {
|
|
277
|
-
return "vue".into();
|
|
278
|
-
}
|
|
279
|
-
if deps.contains_key("angular") || deps.contains_key("@angular/core") {
|
|
280
|
-
return "angular".into();
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
|
|
284
|
-
// Check devDependencies
|
|
285
|
-
if let Some(dev_deps) = json.get("devDependencies").and_then(|d| d.as_object()) {
|
|
286
|
-
if dev_deps.contains_key("typescript") || dev_deps.contains_key("tsc") {
|
|
287
|
-
return "typescript".into();
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
// Check for TypeScript config files
|
|
294
|
-
if path.join("tsconfig.json").exists() {
|
|
295
|
-
return "typescript".into();
|
|
296
|
-
}
|
|
297
|
-
|
|
298
|
-
// Check for Next.js
|
|
299
|
-
if path.join("next.config.js").exists() || path.join("next.config.ts").exists() {
|
|
300
|
-
return "nextjs".into();
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
// Check path-based heuristics as fallback
|
|
304
|
-
let path_lower = project_path.to_lowercase();
|
|
305
|
-
if path_lower.contains("react") || path_lower.contains("next") {
|
|
306
|
-
return "react".into();
|
|
307
|
-
}
|
|
308
|
-
if path_lower.contains("typescript") || path_lower.contains("ts") {
|
|
309
|
-
return "typescript".into();
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
"node".into()
|
|
314
|
-
}
|
package/core/src/safety.rs
DELETED
|
@@ -1,103 +0,0 @@
|
|
|
1
|
-
use anyhow::{Context, Result};
|
|
2
|
-
use chrono::Utc;
|
|
3
|
-
use sha2::{Digest, Sha256};
|
|
4
|
-
use std::{fs, path::{Path, PathBuf}};
|
|
5
|
-
|
|
6
|
-
use crate::types::QuarantineRecord;
|
|
7
|
-
|
|
8
|
-
fn quarantine_dir() -> PathBuf {
|
|
9
|
-
let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("."));
|
|
10
|
-
home.join(".packagepurge").join("quarantine")
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
fn index_path() -> PathBuf {
|
|
14
|
-
quarantine_dir().join("index.json")
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
fn read_index() -> Vec<QuarantineRecord> {
|
|
18
|
-
let p = index_path();
|
|
19
|
-
if let Ok(text) = fs::read_to_string(&p) {
|
|
20
|
-
if let Ok(list) = serde_json::from_str::<Vec<QuarantineRecord>>(&text) { return list; }
|
|
21
|
-
}
|
|
22
|
-
Vec::new()
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
fn write_index(mut list: Vec<QuarantineRecord>) -> Result<()> {
|
|
26
|
-
// keep only recent N entries (e.g., 200) to bound file size
|
|
27
|
-
if list.len() > 200 { let keep = list.split_off(list.len() - 200); list = keep; }
|
|
28
|
-
let data = serde_json::to_string_pretty(&list)?;
|
|
29
|
-
fs::write(index_path(), data).context("Failed to write quarantine index")?;
|
|
30
|
-
Ok(())
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
fn sha256_dir(path: &Path) -> Result<(String, u64)> {
|
|
34
|
-
let mut hasher = Sha256::new();
|
|
35
|
-
let mut total: u64 = 0;
|
|
36
|
-
for entry in walkdir::WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
|
|
37
|
-
let p = entry.path();
|
|
38
|
-
hasher.update(p.to_string_lossy().as_bytes());
|
|
39
|
-
if entry.file_type().is_file() {
|
|
40
|
-
let data = fs::read(p)?;
|
|
41
|
-
total += data.len() as u64;
|
|
42
|
-
hasher.update(&data);
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
Ok((hex::encode(hasher.finalize()), total))
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
pub fn move_to_quarantine(target: &Path) -> Result<QuarantineRecord> {
|
|
49
|
-
let qdir = quarantine_dir();
|
|
50
|
-
fs::create_dir_all(&qdir).ok();
|
|
51
|
-
let id = format!("{}", Utc::now().timestamp_nanos_opt().unwrap_or(0));
|
|
52
|
-
let (checksum, size) = sha256_dir(target)?;
|
|
53
|
-
let qpath = qdir.join(format!("{}_{}", id, target.file_name().unwrap_or_default().to_string_lossy()));
|
|
54
|
-
if let Err(e) = fs::rename(target, &qpath) {
|
|
55
|
-
// Handle cross-device link errors (os error 17 or 18 on Unix, or similar on Windows)
|
|
56
|
-
// We simply try copy-and-delete as fallback for any rename failure
|
|
57
|
-
if let Err(copy_err) = fs_extra::dir::copy(target, &qpath, &fs_extra::dir::CopyOptions::new().content_only(true)) {
|
|
58
|
-
return Err(anyhow::anyhow!("Failed to move {:?} to quarantine (rename failed: {}, copy failed: {})", target, e, copy_err));
|
|
59
|
-
}
|
|
60
|
-
if let Err(rm_err) = fs::remove_dir_all(target) {
|
|
61
|
-
// If we can't remove original, we should probably clean up the quarantine copy
|
|
62
|
-
fs::remove_dir_all(&qpath).ok();
|
|
63
|
-
return Err(anyhow::anyhow!("Failed to remove original {:?} after copy to quarantine: {}", target, rm_err));
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
let rec = QuarantineRecord {
|
|
67
|
-
id,
|
|
68
|
-
original_path: target.to_string_lossy().to_string(),
|
|
69
|
-
quarantine_path: qpath.to_string_lossy().to_string(),
|
|
70
|
-
sha256: checksum,
|
|
71
|
-
size_bytes: size,
|
|
72
|
-
created_at: Utc::now(),
|
|
73
|
-
};
|
|
74
|
-
let mut list = read_index();
|
|
75
|
-
list.push(rec.clone());
|
|
76
|
-
write_index(list)?;
|
|
77
|
-
Ok(rec)
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
#[allow(dead_code)]
|
|
81
|
-
pub fn list_quarantine() -> Vec<QuarantineRecord> { read_index() }
|
|
82
|
-
|
|
83
|
-
pub fn latest_quarantine() -> Option<QuarantineRecord> {
|
|
84
|
-
let mut list = read_index();
|
|
85
|
-
list.sort_by_key(|r| r.created_at);
|
|
86
|
-
list.pop()
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
pub fn find_quarantine_by_id(id: &str) -> Option<QuarantineRecord> {
|
|
90
|
-
read_index().into_iter().find(|r| r.id == id)
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
pub fn rollback_record(rec: &QuarantineRecord) -> Result<()> {
|
|
94
|
-
let orig = PathBuf::from(&rec.original_path);
|
|
95
|
-
let q = PathBuf::from(&rec.quarantine_path);
|
|
96
|
-
if let Some(parent) = orig.parent() { fs::create_dir_all(parent).ok(); }
|
|
97
|
-
fs::rename(&q, &orig).with_context(|| "Failed to rollback from quarantine")?;
|
|
98
|
-
// remove from index
|
|
99
|
-
let mut list = read_index();
|
|
100
|
-
list.retain(|r| r.id != rec.id);
|
|
101
|
-
write_index(list)?;
|
|
102
|
-
Ok(())
|
|
103
|
-
}
|
package/core/src/scanner.rs
DELETED
|
@@ -1,136 +0,0 @@
|
|
|
1
|
-
use anyhow::Result;
|
|
2
|
-
use chrono::{DateTime, Utc};
|
|
3
|
-
use rayon::prelude::*;
|
|
4
|
-
use std::{fs, path::{Path, PathBuf}, time::SystemTime};
|
|
5
|
-
use walkdir::WalkDir;
|
|
6
|
-
|
|
7
|
-
use crate::types::{PackageRecord, ProjectRecord, ScanOutput, PackageManager};
|
|
8
|
-
use crate::lockfiles::{parse_npm_package_lock, parse_yarn_lock, parse_pnpm_lock};
|
|
9
|
-
|
|
10
|
-
fn to_utc(st: SystemTime) -> DateTime<Utc> { st.into() }
|
|
11
|
-
|
|
12
|
-
fn dir_size(path: &Path) -> u64 {
|
|
13
|
-
let mut total: u64 = 0;
|
|
14
|
-
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
|
|
15
|
-
if entry.file_type().is_file() {
|
|
16
|
-
if let Ok(meta) = entry.metadata() {
|
|
17
|
-
total += meta.len();
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
total
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
fn detect_manager_from_lock(dir: &Path) -> Option<PackageManager> {
|
|
25
|
-
if dir.join("package-lock.json").exists() { return Some(PackageManager::Npm); }
|
|
26
|
-
if dir.join("yarn.lock").exists() { return Some(PackageManager::Yarn); }
|
|
27
|
-
if dir.join("pnpm-lock.yaml").exists() { return Some(PackageManager::Pnpm); }
|
|
28
|
-
None
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
fn collect_projects_and_edges(root: &Path) -> (Vec<ProjectRecord>, Vec<(String, String)>) {
|
|
32
|
-
let mut projects = Vec::new();
|
|
33
|
-
let edges: Vec<(String, String)> = Vec::new();
|
|
34
|
-
for entry in WalkDir::new(root).max_depth(6).into_iter().filter_map(|e| e.ok()) {
|
|
35
|
-
if entry.file_type().is_file() && entry.file_name() == "package.json" {
|
|
36
|
-
let dir = entry.path().parent().unwrap_or(root);
|
|
37
|
-
let manager = detect_manager_from_lock(dir);
|
|
38
|
-
let mtime = fs::metadata(entry.path()).and_then(|m| m.modified()).ok()
|
|
39
|
-
.map(to_utc).unwrap_or_else(|| Utc::now());
|
|
40
|
-
// Basic dependency extraction from package.json
|
|
41
|
-
let mut deps: Vec<(String, String)> = Vec::new();
|
|
42
|
-
if let Ok(content) = fs::read_to_string(entry.path()) {
|
|
43
|
-
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&content) {
|
|
44
|
-
for key in ["dependencies", "devDependencies", "peerDependencies"] {
|
|
45
|
-
if let Some(obj) = json.get(key).and_then(|v| v.as_object()) {
|
|
46
|
-
for (name, ver) in obj {
|
|
47
|
-
if let Some(ver_str) = ver.as_str() {
|
|
48
|
-
deps.push((name.clone(), ver_str.to_string()));
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
// Lockfile dependencies
|
|
56
|
-
let lock_deps = match manager {
|
|
57
|
-
Some(PackageManager::Npm) => parse_npm_package_lock(&dir.join("package-lock.json")),
|
|
58
|
-
Some(PackageManager::Yarn) => parse_yarn_lock(&dir.join("yarn.lock")),
|
|
59
|
-
Some(PackageManager::Pnpm) => parse_pnpm_lock(&dir.join("pnpm-lock.yaml")),
|
|
60
|
-
None => Vec::new(),
|
|
61
|
-
};
|
|
62
|
-
deps.extend(lock_deps);
|
|
63
|
-
|
|
64
|
-
projects.push(ProjectRecord {
|
|
65
|
-
path: dir.to_string_lossy().to_string(),
|
|
66
|
-
manager,
|
|
67
|
-
dependencies: deps,
|
|
68
|
-
mtime,
|
|
69
|
-
});
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
(projects, edges)
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
fn is_cache_dir(path: &Path) -> bool {
|
|
76
|
-
let p = path.to_string_lossy().to_lowercase();
|
|
77
|
-
p.ends_with(".npm") || p.contains("yarn/cache") || p.contains("pnpm/store")
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
pub fn scan(paths: &[PathBuf]) -> Result<ScanOutput> {
|
|
81
|
-
let roots: Vec<PathBuf> = if paths.is_empty() { vec![std::env::current_dir()?] } else { paths.to_vec() };
|
|
82
|
-
|
|
83
|
-
let mut all_projects: Vec<ProjectRecord> = Vec::new();
|
|
84
|
-
let mut all_edges: Vec<(String, String)> = Vec::new();
|
|
85
|
-
for root in &roots {
|
|
86
|
-
let (projects, edges) = collect_projects_and_edges(root);
|
|
87
|
-
all_projects.extend(projects);
|
|
88
|
-
all_edges.extend(edges);
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
// Collect packages in node_modules and caches
|
|
92
|
-
let mut package_dirs: Vec<PathBuf> = Vec::new();
|
|
93
|
-
for root in &roots {
|
|
94
|
-
for entry in WalkDir::new(root).into_iter().filter_map(|e| e.ok()) {
|
|
95
|
-
if entry.file_type().is_dir() {
|
|
96
|
-
let name = entry.file_name().to_string_lossy();
|
|
97
|
-
if name == "node_modules" || is_cache_dir(entry.path()) {
|
|
98
|
-
package_dirs.push(entry.into_path());
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
let packages: Vec<PackageRecord> = package_dirs.par_iter().flat_map(|dir| {
|
|
105
|
-
WalkDir::new(dir).min_depth(1).max_depth(3).into_iter().filter_map(|e| e.ok())
|
|
106
|
-
.filter(|e| e.file_type().is_dir())
|
|
107
|
-
.filter_map(|pkg_dir| {
|
|
108
|
-
let pkg_path = pkg_dir.path().to_path_buf();
|
|
109
|
-
let package_json = pkg_path.join("package.json");
|
|
110
|
-
if !package_json.exists() { return None; }
|
|
111
|
-
let meta = fs::metadata(&pkg_path).ok()?;
|
|
112
|
-
let atime = meta.accessed().ok().map(to_utc).unwrap_or_else(|| Utc::now());
|
|
113
|
-
let mtime = meta.modified().ok().map(to_utc).unwrap_or_else(|| Utc::now());
|
|
114
|
-
let size = dir_size(&pkg_path);
|
|
115
|
-
let (name, version) = if let Ok(text) = fs::read_to_string(&package_json) {
|
|
116
|
-
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&text) {
|
|
117
|
-
let n = json.get("name").and_then(|v| v.as_str()).unwrap_or("unknown").to_string();
|
|
118
|
-
let v = json.get("version").and_then(|v| v.as_str()).unwrap_or("unknown").to_string();
|
|
119
|
-
(n, v)
|
|
120
|
-
} else { ("unknown".into(), "unknown".into()) }
|
|
121
|
-
} else { ("unknown".into(), "unknown".into()) };
|
|
122
|
-
Some(PackageRecord {
|
|
123
|
-
name,
|
|
124
|
-
version,
|
|
125
|
-
path: pkg_path.to_string_lossy().to_string(),
|
|
126
|
-
size_bytes: size,
|
|
127
|
-
atime,
|
|
128
|
-
mtime,
|
|
129
|
-
manager: None,
|
|
130
|
-
project_paths: Vec::new(),
|
|
131
|
-
})
|
|
132
|
-
}).collect::<Vec<_>>()
|
|
133
|
-
}).collect();
|
|
134
|
-
|
|
135
|
-
Ok(ScanOutput { packages, projects: all_projects, edges: all_edges })
|
|
136
|
-
}
|