mac_cleaner 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +53 -0
- data/exe/mac_cleaner +5 -0
- data/lib/common.sh +1751 -0
- data/lib/mac_cleaner/analyzer.rb +156 -0
- data/lib/mac_cleaner/cleaner.rb +540 -0
- data/lib/mac_cleaner/cli.rb +27 -0
- data/lib/mac_cleaner/version.rb +3 -0
- data/lib/mac_cleaner.rb +8 -0
- data/lib/paginated_menu.sh +688 -0
- data/lib/simple_menu.sh +292 -0
- data/lib/whitelist_manager.sh +289 -0
- metadata +72 -5
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
require 'open3'
|
|
2
|
+
require 'fileutils'
|
|
3
|
+
require 'digest'
|
|
4
|
+
|
|
5
|
+
module MacCleaner
|
|
6
|
+
class Analyzer
|
|
7
|
+
MIN_LARGE_FILE_SIZE = 1_000_000_000 # 1GB
|
|
8
|
+
MIN_MEDIUM_FILE_SIZE = 100_000_000 # 100MB
|
|
9
|
+
CACHE_DIR = File.expand_path("~/.cache/mac_cleaner")
|
|
10
|
+
|
|
11
|
+
def initialize(path: "~")
|
|
12
|
+
@path = File.expand_path(path)
|
|
13
|
+
@path_hash = Digest::MD5.hexdigest(@path)
|
|
14
|
+
@large_files = []
|
|
15
|
+
@medium_files = []
|
|
16
|
+
@directories = []
|
|
17
|
+
@aggregated_directories = []
|
|
18
|
+
FileUtils.mkdir_p(CACHE_DIR)
|
|
19
|
+
end
|
|
20
|
+
def analyze
|
|
21
|
+
if cache_valid?
|
|
22
|
+
puts "Loading from cache..."
|
|
23
|
+
load_from_cache
|
|
24
|
+
display_results
|
|
25
|
+
return
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
puts "Analyzing #{@path}..."
|
|
29
|
+
scan_large_files
|
|
30
|
+
scan_medium_files
|
|
31
|
+
scan_directories
|
|
32
|
+
@aggregated_directories = aggregate_by_directory(@large_files + @medium_files)
|
|
33
|
+
save_to_cache
|
|
34
|
+
display_results
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
private
|
|
38
|
+
|
|
39
|
+
def scan_large_files
|
|
40
|
+
puts "Scanning for large files..."
|
|
41
|
+
cmd = "mdfind -onlyin '#{@path}' \"kMDItemFSSize > #{MIN_LARGE_FILE_SIZE}\""
|
|
42
|
+
stdout, stderr, status = Open3.capture3(cmd)
|
|
43
|
+
|
|
44
|
+
return unless status.success?
|
|
45
|
+
|
|
46
|
+
stdout.each_line do |line|
|
|
47
|
+
path = line.strip
|
|
48
|
+
size = File.size(path)
|
|
49
|
+
@large_files << { path: path, size: size }
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
@large_files.sort_by! { |f| -f[:size] }
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def scan_medium_files
|
|
56
|
+
puts "Scanning for medium files..."
|
|
57
|
+
cmd = "mdfind -onlyin '#{@path}' \"kMDItemFSSize > #{MIN_MEDIUM_FILE_SIZE} && kMDItemFSSize < #{MIN_LARGE_FILE_SIZE}\""
|
|
58
|
+
stdout, stderr, status = Open3.capture3(cmd)
|
|
59
|
+
|
|
60
|
+
return unless status.success?
|
|
61
|
+
|
|
62
|
+
stdout.each_line do |line|
|
|
63
|
+
path = line.strip
|
|
64
|
+
size = File.size(path)
|
|
65
|
+
@medium_files << { path: path, size: size }
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
@medium_files.sort_by! { |f| -f[:size] }
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def scan_directories
|
|
72
|
+
puts "Scanning directories..."
|
|
73
|
+
cmd = "du -d 1 -k '#{@path}'"
|
|
74
|
+
stdout, stderr, status = Open3.capture3(cmd)
|
|
75
|
+
|
|
76
|
+
return unless status.success?
|
|
77
|
+
|
|
78
|
+
stdout.each_line do |line|
|
|
79
|
+
size, path = line.split("\t")
|
|
80
|
+
next if path.strip == @path
|
|
81
|
+
@directories << { path: path.strip, size: size.to_i * 1024 }
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
@directories.sort_by! { |d| -d[:size] }
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def display_results
|
|
88
|
+
puts "\n--- Top 10 Large Files ---"
|
|
89
|
+
@large_files.first(10).each do |file|
|
|
90
|
+
puts "#{format_bytes(file[:size])}\t#{file[:path]}"
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
puts "\n--- Top 10 Medium Files ---"
|
|
94
|
+
@medium_files.first(10).each do |file|
|
|
95
|
+
puts "#{format_bytes(file[:size])}\t#{file[:path]}"
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
puts "\n--- Top 10 Directories ---"
|
|
99
|
+
@directories.first(10).each do |dir|
|
|
100
|
+
puts "#{format_bytes(dir[:size])}\t#{dir[:path]}"
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
puts "\n--- Top 10 Aggregated Directories ---"
|
|
104
|
+
@aggregated_directories.first(10).each do |dir|
|
|
105
|
+
puts "#{format_bytes(dir[:size])} in #{dir[:count]} files\t#{dir[:path]}"
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def cache_valid?
|
|
110
|
+
cache_file = "#{CACHE_DIR}/#{@path_hash}.cache"
|
|
111
|
+
return false unless File.exist?(cache_file)
|
|
112
|
+
(Time.now - File.mtime(cache_file)) < 3600 # 1 hour
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def save_to_cache
|
|
116
|
+
cache_file = "#{CACHE_DIR}/#{@path_hash}.cache"
|
|
117
|
+
data = {
|
|
118
|
+
large_files: @large_files,
|
|
119
|
+
medium_files: @medium_files,
|
|
120
|
+
directories: @directories,
|
|
121
|
+
aggregated_directories: @aggregated_directories
|
|
122
|
+
}
|
|
123
|
+
File.write(cache_file, Marshal.dump(data))
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def load_from_cache
|
|
127
|
+
cache_file = "#{CACHE_DIR}/#{@path_hash}.cache"
|
|
128
|
+
data = Marshal.load(File.read(cache_file))
|
|
129
|
+
@large_files = data[:large_files]
|
|
130
|
+
@medium_files = data[:medium_files]
|
|
131
|
+
@directories = data[:directories]
|
|
132
|
+
@aggregated_directories = data[:aggregated_directories]
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
def format_bytes(bytes)
|
|
136
|
+
return "0B" if bytes.zero?
|
|
137
|
+
units = ["B", "KB", "MB", "GB", "TB"]
|
|
138
|
+
i = (Math.log(bytes) / Math.log(1024)).floor
|
|
139
|
+
"%.2f%s" % [bytes.to_f / 1024**i, units[i]]
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
def aggregate_by_directory(files)
|
|
143
|
+
directories = Hash.new { |h, k| h[k] = { size: 0, count: 0 } }
|
|
144
|
+
|
|
145
|
+
files.each do |file|
|
|
146
|
+
dir = File.dirname(file[:path])
|
|
147
|
+
directories[dir][:size] += file[:size]
|
|
148
|
+
directories[dir][:count] += 1
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
directories.map do |path, data|
|
|
152
|
+
{ path: path, size: data[:size], count: data[:count] }
|
|
153
|
+
end.sort_by! { |d| -d[:size] }
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
end
|