appydave-tools 0.16.0 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +6 -0
- data/AGENTS.md +22 -0
- data/CHANGELOG.md +12 -0
- data/CLAUDE.md +206 -51
- data/README.md +144 -11
- data/bin/archive_project.rb +249 -0
- data/bin/configuration.rb +21 -1
- data/bin/generate_manifest.rb +357 -0
- data/bin/sync_from_ssd.rb +236 -0
- data/bin/vat +623 -0
- data/docs/README.md +169 -0
- data/docs/configuration/.env.example +19 -0
- data/docs/configuration/README.md +394 -0
- data/docs/configuration/channels.example.json +26 -0
- data/docs/configuration/settings.example.json +6 -0
- data/docs/development/CODEX-recommendations.md +123 -0
- data/docs/development/README.md +100 -0
- data/docs/development/cli-architecture-patterns.md +1604 -0
- data/docs/development/pattern-comparison.md +284 -0
- data/docs/prd-unified-brands-configuration.md +792 -0
- data/docs/project-brand-systems-analysis.md +934 -0
- data/docs/vat/dam-vision.md +123 -0
- data/docs/vat/session-summary-2025-11-09.md +297 -0
- data/docs/vat/usage.md +508 -0
- data/docs/vat/vat-testing-plan.md +801 -0
- data/lib/appydave/tools/configuration/models/brands_config.rb +238 -0
- data/lib/appydave/tools/configuration/models/config_base.rb +7 -0
- data/lib/appydave/tools/configuration/models/settings_config.rb +4 -0
- data/lib/appydave/tools/vat/config.rb +153 -0
- data/lib/appydave/tools/vat/config_loader.rb +91 -0
- data/lib/appydave/tools/vat/manifest_generator.rb +239 -0
- data/lib/appydave/tools/vat/project_listing.rb +198 -0
- data/lib/appydave/tools/vat/project_resolver.rb +132 -0
- data/lib/appydave/tools/vat/s3_operations.rb +560 -0
- data/lib/appydave/tools/version.rb +1 -1
- data/lib/appydave/tools.rb +9 -1
- data/package.json +1 -1
- metadata +57 -3
- data/docs/dam/overview.md +0 -28
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
# rubocop:disable all
|
|
4
|
+
|
|
5
|
+
# Archive a completed video project to SSD
|
|
6
|
+
# 1. Copies entire project to SSD grouped folder (if not already there)
|
|
7
|
+
# 2. Deletes entire local project folder
|
|
8
|
+
# 3. Run 'ruby sync_from_ssd.rb' afterward to pull back light files to archived/
|
|
9
|
+
#
|
|
10
|
+
# Usage:
|
|
11
|
+
# ruby archive_project.rb PROJECT_ID [--dry-run]
|
|
12
|
+
#
|
|
13
|
+
# Example:
|
|
14
|
+
# ruby archive_project.rb b63-flivideo --dry-run # Preview
|
|
15
|
+
# ruby archive_project.rb b63-flivideo # Execute
|
|
16
|
+
# ruby sync_from_ssd.rb # Pull back light files
|
|
17
|
+
# ruby generate_manifest.rb # Update dashboard
|
|
18
|
+
#
|
|
19
|
+
# Options:
|
|
20
|
+
# --dry-run : Show what would happen without making changes
|
|
21
|
+
|
|
22
|
+
require 'fileutils'
|
|
23
|
+
|
|
24
|
+
# Load configuration
|
|
25
|
+
SCRIPT_DIR = File.dirname(__FILE__)
|
|
26
|
+
TOOLS_DIR = File.expand_path(File.join(SCRIPT_DIR, '..'))
|
|
27
|
+
require_relative '../lib/config_loader'
|
|
28
|
+
|
|
29
|
+
# Determine paths relative to current working directory (repo root)
|
|
30
|
+
LOCAL_BASE = Dir.pwd
|
|
31
|
+
LOCAL_ARCHIVED = File.join(LOCAL_BASE, 'archived')
|
|
32
|
+
|
|
33
|
+
# Load SSD_BASE from config
|
|
34
|
+
begin
|
|
35
|
+
config = ConfigLoader.load_from_repo(LOCAL_BASE)
|
|
36
|
+
SSD_BASE = config['SSD_BASE']
|
|
37
|
+
rescue ConfigLoader::ConfigNotFoundError, ConfigLoader::InvalidConfigError => e
|
|
38
|
+
puts e.message
|
|
39
|
+
exit 1
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def dry_run?
|
|
43
|
+
ARGV.include?('--dry-run')
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def get_range(project_id)
|
|
47
|
+
# Extract letter and number from project ID
|
|
48
|
+
if project_id =~ /^([a-z])(\d+)/i
|
|
49
|
+
letter = Regexp.last_match(1).downcase
|
|
50
|
+
num = Regexp.last_match(2).to_i
|
|
51
|
+
range_suffix = num < 50 ? "00-#{letter}49" : "50-#{letter}99"
|
|
52
|
+
return "#{letter}#{range_suffix}"
|
|
53
|
+
elsif project_id =~ /^(\d+)/
|
|
54
|
+
# Legacy numerical projects
|
|
55
|
+
num = Regexp.last_match(1).to_i
|
|
56
|
+
return '-01-25' if num <= 25
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
nil
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def format_bytes(bytes)
|
|
63
|
+
if bytes < 1024
|
|
64
|
+
"#{bytes}B"
|
|
65
|
+
elsif bytes < 1024 * 1024
|
|
66
|
+
"#{(bytes / 1024.0).round(1)}KB"
|
|
67
|
+
elsif bytes < 1024 * 1024 * 1024
|
|
68
|
+
"#{(bytes / 1024.0 / 1024.0).round(1)}MB"
|
|
69
|
+
else
|
|
70
|
+
"#{(bytes / 1024.0 / 1024.0 / 1024.0).round(1)}GB"
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def get_dir_size(dir)
|
|
75
|
+
total = 0
|
|
76
|
+
Dir.glob(File.join(dir, '**', '*'), File::FNM_DOTMATCH).each do |file|
|
|
77
|
+
total += File.size(file) if File.file?(file)
|
|
78
|
+
end
|
|
79
|
+
total
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def copy_to_ssd(_project_id, local_path, ssd_path)
|
|
83
|
+
puts "\n📦 Step 1: Copy to SSD"
|
|
84
|
+
|
|
85
|
+
if Dir.exist?(ssd_path)
|
|
86
|
+
puts " ⚠️ Already exists on SSD: #{ssd_path}"
|
|
87
|
+
puts ' Skipping copy step'
|
|
88
|
+
return true
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
size = get_dir_size(local_path)
|
|
92
|
+
puts " Source: #{local_path}"
|
|
93
|
+
puts " Dest: #{ssd_path}"
|
|
94
|
+
puts " Size: #{format_bytes(size)}"
|
|
95
|
+
|
|
96
|
+
if dry_run?
|
|
97
|
+
puts ' [DRY-RUN] Would copy entire project to SSD'
|
|
98
|
+
else
|
|
99
|
+
FileUtils.mkdir_p(File.dirname(ssd_path))
|
|
100
|
+
FileUtils.cp_r(local_path, ssd_path, preserve: true)
|
|
101
|
+
puts ' ✅ Copied to SSD'
|
|
102
|
+
end
|
|
103
|
+
true
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
def delete_local_project(local_path)
|
|
107
|
+
puts "\n🗑️ Step 2: Delete local project"
|
|
108
|
+
|
|
109
|
+
size = get_dir_size(local_path)
|
|
110
|
+
puts " Path: #{local_path}"
|
|
111
|
+
puts " Size: #{format_bytes(size)}"
|
|
112
|
+
|
|
113
|
+
if dry_run?
|
|
114
|
+
puts ' [DRY-RUN] Would delete entire local folder'
|
|
115
|
+
else
|
|
116
|
+
FileUtils.rm_rf(local_path)
|
|
117
|
+
puts ' ✅ Deleted local folder'
|
|
118
|
+
puts " 💾 Freed: #{format_bytes(size)}"
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
# Parse --next flag
|
|
123
|
+
def next_count
|
|
124
|
+
next_idx = ARGV.index('--next')
|
|
125
|
+
return nil unless next_idx
|
|
126
|
+
|
|
127
|
+
count = ARGV[next_idx + 1]
|
|
128
|
+
count ? count.to_i : 1
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def find_oldest_flat_projects(count)
|
|
132
|
+
# Get all flat project folders
|
|
133
|
+
flat_projects = Dir.glob(File.join(LOCAL_BASE, '*/')).map { |path| File.basename(path) }
|
|
134
|
+
|
|
135
|
+
# Filter to valid project IDs and sort by prefix
|
|
136
|
+
valid_projects = flat_projects.grep(/^[a-z]\d{2}-/).sort_by do |id|
|
|
137
|
+
match = id.match(/^([a-z])(\d{2})/)
|
|
138
|
+
letter = match[1]
|
|
139
|
+
num = match[2].to_i
|
|
140
|
+
[letter, num]
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
valid_projects.take(count)
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
# Main execution
|
|
147
|
+
count = next_count
|
|
148
|
+
project_id = ARGV.find { |arg| !arg.start_with?('--') && arg != count.to_s }
|
|
149
|
+
|
|
150
|
+
if count
|
|
151
|
+
# Archive multiple projects
|
|
152
|
+
projects_to_archive = find_oldest_flat_projects(count)
|
|
153
|
+
|
|
154
|
+
if projects_to_archive.empty?
|
|
155
|
+
puts '❌ No flat projects found to archive'
|
|
156
|
+
exit 1
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
puts dry_run? ? "🔍 DRY-RUN: Would archive #{projects_to_archive.size} oldest projects" : "🎬 Archiving #{projects_to_archive.size} oldest projects"
|
|
160
|
+
puts '=' * 60
|
|
161
|
+
puts 'Projects to archive:'
|
|
162
|
+
projects_to_archive.each { |p| puts " - #{p}" }
|
|
163
|
+
puts '=' * 60
|
|
164
|
+
puts
|
|
165
|
+
|
|
166
|
+
# Process each project
|
|
167
|
+
projects_to_archive.each_with_index do |proj_id, idx|
|
|
168
|
+
puts "\n[#{idx + 1}/#{projects_to_archive.size}] Processing: #{proj_id}"
|
|
169
|
+
puts '-' * 60
|
|
170
|
+
|
|
171
|
+
local_path = File.join(LOCAL_BASE, proj_id)
|
|
172
|
+
range = get_range(proj_id)
|
|
173
|
+
ssd_path = File.join(SSD_BASE, range, proj_id)
|
|
174
|
+
|
|
175
|
+
success = copy_to_ssd(proj_id, local_path, ssd_path)
|
|
176
|
+
delete_local_project(local_path) if success
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
puts "\n#{'=' * 60}"
|
|
180
|
+
if dry_run?
|
|
181
|
+
puts "✅ Dry-run complete! Would have archived #{projects_to_archive.size} projects"
|
|
182
|
+
puts ' Run without --dry-run to actually archive'
|
|
183
|
+
else
|
|
184
|
+
puts "✅ Archived #{projects_to_archive.size} projects!"
|
|
185
|
+
puts
|
|
186
|
+
puts 'Next steps:'
|
|
187
|
+
puts ' 1. ruby video-asset-tools/bin/sync_from_ssd.rb # Pull back light files to archived/'
|
|
188
|
+
puts ' 2. ruby video-asset-tools/bin/generate_manifest.rb # Update dashboard'
|
|
189
|
+
end
|
|
190
|
+
exit 0
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
unless project_id
|
|
194
|
+
puts 'Usage: ruby archive_project.rb PROJECT_ID [--dry-run]'
|
|
195
|
+
puts ' ruby archive_project.rb --next N [--dry-run]'
|
|
196
|
+
puts
|
|
197
|
+
puts 'Examples:'
|
|
198
|
+
puts ' ruby archive_project.rb b63-flivideo --dry-run # Archive specific project'
|
|
199
|
+
puts ' ruby archive_project.rb --next 5 --dry-run # Archive 5 oldest projects'
|
|
200
|
+
puts
|
|
201
|
+
puts 'After archiving, run:'
|
|
202
|
+
puts ' ruby video-asset-tools/bin/sync_from_ssd.rb # Pull back light files'
|
|
203
|
+
puts ' ruby video-asset-tools/bin/generate_manifest.rb # Update dashboard'
|
|
204
|
+
exit 1
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
# Check SSD first
|
|
208
|
+
unless Dir.exist?(SSD_BASE)
|
|
209
|
+
puts "❌ SSD not mounted at #{SSD_BASE}"
|
|
210
|
+
puts ' Please connect the SSD before archiving.'
|
|
211
|
+
exit 1
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
puts dry_run? ? "🔍 DRY-RUN: Archiving #{project_id}" : "🎬 Archiving: #{project_id}"
|
|
215
|
+
puts '=' * 60
|
|
216
|
+
|
|
217
|
+
# Find local project (must be in flat structure for archiving)
|
|
218
|
+
local_path = File.join(LOCAL_BASE, project_id)
|
|
219
|
+
|
|
220
|
+
unless Dir.exist?(local_path)
|
|
221
|
+
puts "\n❌ Project not found in flat structure: #{local_path}"
|
|
222
|
+
puts ' This tool archives active (flat) projects only.'
|
|
223
|
+
exit 1
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
range = get_range(project_id)
|
|
227
|
+
unless range
|
|
228
|
+
puts "\n❌ Cannot determine range for project: #{project_id}"
|
|
229
|
+
puts ' Expected format: letter + 2 digits (e.g., b63-project)'
|
|
230
|
+
exit 1
|
|
231
|
+
end
|
|
232
|
+
|
|
233
|
+
ssd_path = File.join(SSD_BASE, range, project_id)
|
|
234
|
+
|
|
235
|
+
# Execute steps
|
|
236
|
+
success = copy_to_ssd(project_id, local_path, ssd_path)
|
|
237
|
+
delete_local_project(local_path) if success
|
|
238
|
+
|
|
239
|
+
puts "\n#{'=' * 60}"
|
|
240
|
+
if dry_run?
|
|
241
|
+
puts '✅ Dry-run complete!'
|
|
242
|
+
puts ' Run without --dry-run to actually archive'
|
|
243
|
+
else
|
|
244
|
+
puts '✅ Archive complete!'
|
|
245
|
+
puts
|
|
246
|
+
puts 'Next steps:'
|
|
247
|
+
puts ' 1. ruby video-asset-tools/bin/sync_from_ssd.rb # Pull back light files to archived/'
|
|
248
|
+
puts ' 2. ruby video-asset-tools/bin/generate_manifest.rb # Update dashboard'
|
|
249
|
+
end
|
data/bin/configuration.rb
CHANGED
|
@@ -52,7 +52,27 @@ when :list
|
|
|
52
52
|
tp configurations, :name, :exists, { path: { width: 150 } }
|
|
53
53
|
when :create
|
|
54
54
|
Appydave::Tools::Configuration::Config.configure
|
|
55
|
-
|
|
55
|
+
|
|
56
|
+
# Only save configs that don't exist yet
|
|
57
|
+
created = []
|
|
58
|
+
skipped = []
|
|
59
|
+
|
|
60
|
+
Appydave::Tools::Configuration::Config.configurations.each do |name, config|
|
|
61
|
+
if File.exist?(config.config_path)
|
|
62
|
+
skipped << name
|
|
63
|
+
else
|
|
64
|
+
config.save
|
|
65
|
+
created << name
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
puts "\n✅ Created configurations:"
|
|
70
|
+
created.each { |name| puts " - #{name}" }
|
|
71
|
+
|
|
72
|
+
if skipped.any?
|
|
73
|
+
puts "\n⚠️ Skipped (already exist):"
|
|
74
|
+
skipped.each { |name| puts " - #{name}" }
|
|
75
|
+
end
|
|
56
76
|
when :print
|
|
57
77
|
Appydave::Tools::Configuration::Config.configure
|
|
58
78
|
Appydave::Tools::Configuration::Config.print(*options[:keys])
|
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
# rubocop:disable all
|
|
4
|
+
|
|
5
|
+
# Generate projects.json manifest by scanning local and SSD directories
|
|
6
|
+
# Tracks video projects across storage locations with grouped folder support
|
|
7
|
+
#
|
|
8
|
+
# Usage: ruby generate_manifest.rb
|
|
9
|
+
|
|
10
|
+
require 'json'
|
|
11
|
+
require 'fileutils'
|
|
12
|
+
|
|
13
|
+
# Load configuration
|
|
14
|
+
SCRIPT_DIR = File.dirname(__FILE__)
|
|
15
|
+
TOOLS_DIR = File.expand_path(File.join(SCRIPT_DIR, '..'))
|
|
16
|
+
require_relative '../lib/config_loader'
|
|
17
|
+
|
|
18
|
+
# Determine paths relative to current working directory (repo root)
|
|
19
|
+
LOCAL_BASE = Dir.pwd
|
|
20
|
+
LOCAL_ARCHIVED = File.join(LOCAL_BASE, 'archived')
|
|
21
|
+
OUTPUT_FILE = File.join(LOCAL_BASE, 'projects.json')
|
|
22
|
+
|
|
23
|
+
# Load SSD_BASE from config
|
|
24
|
+
begin
|
|
25
|
+
config = ConfigLoader.load_from_repo(LOCAL_BASE)
|
|
26
|
+
SSD_BASE = config['SSD_BASE']
|
|
27
|
+
rescue ConfigLoader::ConfigNotFoundError, ConfigLoader::InvalidConfigError => e
|
|
28
|
+
puts e.message
|
|
29
|
+
exit 1
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def heavy_files?(dir)
|
|
33
|
+
return false unless Dir.exist?(dir)
|
|
34
|
+
|
|
35
|
+
Dir.glob(File.join(dir, '*.{mp4,mov,avi,mkv,webm}')).any?
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def light_files?(dir)
|
|
39
|
+
return false unless Dir.exist?(dir)
|
|
40
|
+
|
|
41
|
+
Dir.glob(File.join(dir, '**/*.{srt,vtt,jpg,png,md,txt,json,yml}')).any?
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def build_ssd_range_map
|
|
45
|
+
return @ssd_range_map if @ssd_range_map
|
|
46
|
+
|
|
47
|
+
@ssd_range_map = {}
|
|
48
|
+
Dir.glob(File.join(SSD_BASE, '*/')).each do |range_path|
|
|
49
|
+
range_name = File.basename(range_path)
|
|
50
|
+
Dir.glob(File.join(range_path, '*/')).each do |project_path|
|
|
51
|
+
proj_id = File.basename(project_path)
|
|
52
|
+
@ssd_range_map[proj_id] = range_name
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
@ssd_range_map
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def get_range_for_project(project_id)
|
|
60
|
+
build_ssd_range_map[project_id]
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Validation functions
|
|
64
|
+
def validate_project_id_format(project_id)
|
|
65
|
+
# Valid formats:
|
|
66
|
+
# - Modern: letter + 2 digits + dash + name (e.g., a00-project, b63-flivideo)
|
|
67
|
+
# - Legacy: just numbers (e.g., 006-ac-carnivore-90, 010-bing-gpt)
|
|
68
|
+
!!(project_id =~ /^[a-z]\d{2}-/ || project_id =~ /^\d/)
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def extract_prefix(project_id)
|
|
72
|
+
# Extract the prefix (e.g., "b63" from "b63-flivideo")
|
|
73
|
+
match = project_id.match(/^([a-z]\d{2})/)
|
|
74
|
+
match ? match[1] : nil
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def compare_prefixes(prefix1, prefix2)
|
|
78
|
+
# Compare two prefixes (e.g., "a12" vs "b40")
|
|
79
|
+
# Returns: -1 if prefix1 < prefix2, 0 if equal, 1 if prefix1 > prefix2
|
|
80
|
+
return 0 if prefix1 == prefix2
|
|
81
|
+
|
|
82
|
+
letter1 = prefix1[0]
|
|
83
|
+
num1 = prefix1[1..2].to_i
|
|
84
|
+
letter2 = prefix2[0]
|
|
85
|
+
num2 = prefix2[1..2].to_i
|
|
86
|
+
|
|
87
|
+
if letter1 == letter2
|
|
88
|
+
num1 <=> num2
|
|
89
|
+
else
|
|
90
|
+
letter1 <=> letter2
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def validate_flat_structure_consistency(projects)
|
|
95
|
+
warnings = []
|
|
96
|
+
|
|
97
|
+
flat_prefixes = extract_flat_prefixes(projects)
|
|
98
|
+
return warnings if flat_prefixes.empty?
|
|
99
|
+
|
|
100
|
+
oldest_flat = flat_prefixes.first
|
|
101
|
+
newest_flat = flat_prefixes.last
|
|
102
|
+
|
|
103
|
+
grouped_prefixes = extract_grouped_prefixes(projects)
|
|
104
|
+
warnings.concat(check_grouped_within_flat_range(grouped_prefixes, oldest_flat, newest_flat))
|
|
105
|
+
|
|
106
|
+
warnings
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def extract_flat_prefixes(projects)
|
|
110
|
+
flat_projects = projects.select { |p| p[:storage][:local][:structure] == 'flat' }
|
|
111
|
+
flat_projects.map { |p| extract_prefix(p[:id]) }.compact.sort
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
def extract_grouped_prefixes(projects)
|
|
115
|
+
grouped_projects = projects.select { |p| p[:storage][:local][:structure] == 'grouped' }
|
|
116
|
+
grouped_projects.map { |p| extract_prefix(p[:id]) }.compact
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def check_grouped_within_flat_range(grouped_prefixes, oldest_flat, newest_flat)
|
|
120
|
+
warnings = []
|
|
121
|
+
grouped_prefixes.each do |grouped_prefix|
|
|
122
|
+
next unless prefix_within_flat_range?(grouped_prefix, oldest_flat, newest_flat)
|
|
123
|
+
|
|
124
|
+
warnings << "⚠️ WARNING: Project #{grouped_prefix}-* is in grouped structure but falls within flat range (#{oldest_flat} - #{newest_flat})"
|
|
125
|
+
end
|
|
126
|
+
warnings
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def prefix_within_flat_range?(prefix, oldest_flat, newest_flat)
|
|
130
|
+
0.between?(compare_prefixes(prefix, newest_flat), compare_prefixes(prefix, oldest_flat))
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def validate_project_id_formats(projects)
|
|
134
|
+
warnings = []
|
|
135
|
+
|
|
136
|
+
projects.each do |project|
|
|
137
|
+
warnings << "⚠️ WARNING: Invalid project ID format: #{project[:id]}" unless validate_project_id_format(project[:id])
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
warnings
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def find_local_project(project_id)
|
|
144
|
+
range = get_range_for_project(project_id)
|
|
145
|
+
|
|
146
|
+
# Check flat structure first (active projects at root)
|
|
147
|
+
flat_path = File.join(LOCAL_BASE, project_id)
|
|
148
|
+
return flat_path if Dir.exist?(flat_path)
|
|
149
|
+
|
|
150
|
+
# Check archived/grouped folder structure (if we know the range)
|
|
151
|
+
if range
|
|
152
|
+
archived_path = File.join(LOCAL_ARCHIVED, range, project_id)
|
|
153
|
+
return archived_path if Dir.exist?(archived_path)
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
nil
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
def get_ssd_path(project_id)
|
|
160
|
+
range = get_range_for_project(project_id)
|
|
161
|
+
return nil unless range
|
|
162
|
+
|
|
163
|
+
File.join(SSD_BASE, range, project_id)
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
# Require SSD to be mounted
|
|
167
|
+
unless Dir.exist?(SSD_BASE)
|
|
168
|
+
puts "❌ SSD not mounted at #{SSD_BASE}"
|
|
169
|
+
puts ' Please connect the SSD before running this tool.'
|
|
170
|
+
puts ' The manifest requires scanning both local AND SSD to be accurate.'
|
|
171
|
+
exit 1
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
# Collect all unique project IDs from both locations
|
|
175
|
+
all_project_ids = []
|
|
176
|
+
|
|
177
|
+
# Scan SSD (all range folders)
|
|
178
|
+
if Dir.exist?(SSD_BASE)
|
|
179
|
+
Dir.glob(File.join(SSD_BASE, '*/')).each do |range_path|
|
|
180
|
+
File.basename(range_path)
|
|
181
|
+
# Look for project folders (a*, b*, or anything that looks like a project)
|
|
182
|
+
Dir.glob(File.join(range_path, '*/')).each do |project_path|
|
|
183
|
+
all_project_ids << File.basename(project_path)
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
# Scan local flat (root level projects)
|
|
189
|
+
Dir.glob(File.join(LOCAL_BASE, '*/')).each do |path|
|
|
190
|
+
basename = File.basename(path)
|
|
191
|
+
# Skip non-project directories
|
|
192
|
+
next if basename == 'archived'
|
|
193
|
+
next if basename == 'final'
|
|
194
|
+
next unless validate_project_id_format(basename)
|
|
195
|
+
|
|
196
|
+
# Add flat project
|
|
197
|
+
all_project_ids << basename
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
# Scan local archived (grouped folders inside archived/)
|
|
201
|
+
if Dir.exist?(LOCAL_ARCHIVED)
|
|
202
|
+
Dir.glob(File.join(LOCAL_ARCHIVED, '*/')).each do |range_path|
|
|
203
|
+
Dir.glob(File.join(range_path, '*/')).each do |project_path|
|
|
204
|
+
all_project_ids << File.basename(project_path)
|
|
205
|
+
end
|
|
206
|
+
end
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
all_project_ids = all_project_ids.uniq.sort
|
|
210
|
+
|
|
211
|
+
# Build project entries
|
|
212
|
+
projects = []
|
|
213
|
+
all_project_ids.each do |project_id|
|
|
214
|
+
local_path = find_local_project(project_id)
|
|
215
|
+
ssd_path = get_ssd_path(project_id)
|
|
216
|
+
|
|
217
|
+
local_exists = !local_path.nil?
|
|
218
|
+
ssd_exists = ssd_path && Dir.exist?(ssd_path)
|
|
219
|
+
|
|
220
|
+
# Determine if local is in flat or grouped structure
|
|
221
|
+
# Check if path is in archived/ subdirectory
|
|
222
|
+
local_structure = if local_path&.include?('/archived/')
|
|
223
|
+
'grouped'
|
|
224
|
+
elsif local_path
|
|
225
|
+
'flat'
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
projects << {
|
|
229
|
+
id: project_id,
|
|
230
|
+
storage: {
|
|
231
|
+
ssd: {
|
|
232
|
+
exists: ssd_exists,
|
|
233
|
+
path: "#{get_range_for_project(project_id)}/#{project_id}"
|
|
234
|
+
},
|
|
235
|
+
local: {
|
|
236
|
+
exists: local_exists,
|
|
237
|
+
structure: local_structure,
|
|
238
|
+
has_heavy_files: local_exists ? heavy_files?(local_path) : false,
|
|
239
|
+
has_light_files: local_exists ? light_files?(local_path) : false
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
# Calculate disk usage for a specific path
|
|
246
|
+
def calculate_path_size(path)
|
|
247
|
+
return 0 unless Dir.exist?(path)
|
|
248
|
+
|
|
249
|
+
total = 0
|
|
250
|
+
Dir.glob(File.join(path, '**', '*'), File::FNM_DOTMATCH).each do |file|
|
|
251
|
+
total += File.size(file) if File.file?(file)
|
|
252
|
+
end
|
|
253
|
+
total
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
def format_bytes(bytes)
|
|
257
|
+
{
|
|
258
|
+
total_bytes: bytes,
|
|
259
|
+
total_mb: (bytes / 1024.0 / 1024.0).round(2),
|
|
260
|
+
total_gb: (bytes / 1024.0 / 1024.0 / 1024.0).round(2)
|
|
261
|
+
}
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
puts '📊 Calculating disk usage...'
|
|
265
|
+
|
|
266
|
+
# Calculate local flat (root-level project folders only)
|
|
267
|
+
local_flat_bytes = 0
|
|
268
|
+
projects.each do |project|
|
|
269
|
+
if project[:storage][:local][:exists] && project[:storage][:local][:structure] == 'flat'
|
|
270
|
+
flat_path = File.join(LOCAL_BASE, project[:id])
|
|
271
|
+
local_flat_bytes += calculate_path_size(flat_path)
|
|
272
|
+
end
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
# Calculate local grouped (all grouped folders in archived/)
|
|
276
|
+
local_grouped_bytes = 0
|
|
277
|
+
projects.each do |project|
|
|
278
|
+
next unless project[:storage][:local][:exists] && project[:storage][:local][:structure] == 'grouped'
|
|
279
|
+
|
|
280
|
+
range = get_range_for_project(project[:id])
|
|
281
|
+
grouped_path = File.join(LOCAL_ARCHIVED, range, project[:id])
|
|
282
|
+
local_grouped_bytes += calculate_path_size(grouped_path)
|
|
283
|
+
end
|
|
284
|
+
|
|
285
|
+
# Calculate total SSD
|
|
286
|
+
ssd_bytes = 0
|
|
287
|
+
projects.each do |project|
|
|
288
|
+
next unless project[:storage][:ssd][:exists]
|
|
289
|
+
|
|
290
|
+
range = get_range_for_project(project[:id])
|
|
291
|
+
ssd_path = File.join(SSD_BASE, range, project[:id])
|
|
292
|
+
ssd_bytes += calculate_path_size(ssd_path)
|
|
293
|
+
end
|
|
294
|
+
|
|
295
|
+
local_flat_usage = format_bytes(local_flat_bytes)
|
|
296
|
+
local_grouped_usage = format_bytes(local_grouped_bytes)
|
|
297
|
+
ssd_usage = format_bytes(ssd_bytes)
|
|
298
|
+
|
|
299
|
+
# Build manifest
|
|
300
|
+
manifest = {
|
|
301
|
+
config: {
|
|
302
|
+
local_base: LOCAL_BASE,
|
|
303
|
+
ssd_base: SSD_BASE,
|
|
304
|
+
last_updated: Time.now.utc.iso8601,
|
|
305
|
+
note: 'Auto-generated manifest. Regenerate with: ruby video-asset-tools/bin/generate_manifest.rb',
|
|
306
|
+
disk_usage: {
|
|
307
|
+
local_flat: local_flat_usage,
|
|
308
|
+
local_grouped: local_grouped_usage,
|
|
309
|
+
ssd: ssd_usage
|
|
310
|
+
}
|
|
311
|
+
},
|
|
312
|
+
projects: projects
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
# Write to file
|
|
316
|
+
File.write(OUTPUT_FILE, JSON.pretty_generate(manifest))
|
|
317
|
+
|
|
318
|
+
puts "✅ Generated #{OUTPUT_FILE}"
|
|
319
|
+
puts " Found #{projects.size} unique projects"
|
|
320
|
+
puts " SSD mounted: #{Dir.exist?(SSD_BASE)}"
|
|
321
|
+
puts
|
|
322
|
+
|
|
323
|
+
# Summary stats
|
|
324
|
+
local_flat = projects.count { |p| p[:storage][:local][:structure] == 'flat' }
|
|
325
|
+
local_grouped = projects.count { |p| p[:storage][:local][:structure] == 'grouped' }
|
|
326
|
+
local_only = projects.count { |p| p[:storage][:local][:exists] && !p[:storage][:ssd][:exists] }
|
|
327
|
+
ssd_only = projects.count { |p| !p[:storage][:local][:exists] && p[:storage][:ssd][:exists] }
|
|
328
|
+
both = projects.count { |p| p[:storage][:local][:exists] && p[:storage][:ssd][:exists] }
|
|
329
|
+
|
|
330
|
+
puts 'Distribution:'
|
|
331
|
+
puts " Local only: #{local_only}"
|
|
332
|
+
puts " SSD only: #{ssd_only}"
|
|
333
|
+
puts " Both locations: #{both}"
|
|
334
|
+
puts
|
|
335
|
+
puts 'Local structure:'
|
|
336
|
+
puts " Flat (active): #{local_flat}"
|
|
337
|
+
puts " Grouped (archived): #{local_grouped}"
|
|
338
|
+
puts
|
|
339
|
+
|
|
340
|
+
# Run validations
|
|
341
|
+
puts '🔍 Running validations...'
|
|
342
|
+
all_warnings = []
|
|
343
|
+
|
|
344
|
+
# Validate project ID formats
|
|
345
|
+
format_warnings = validate_project_id_formats(projects)
|
|
346
|
+
all_warnings.concat(format_warnings)
|
|
347
|
+
|
|
348
|
+
# Validate flat structure consistency
|
|
349
|
+
consistency_warnings = validate_flat_structure_consistency(projects)
|
|
350
|
+
all_warnings.concat(consistency_warnings)
|
|
351
|
+
|
|
352
|
+
if all_warnings.empty?
|
|
353
|
+
puts '✅ All validations passed!'
|
|
354
|
+
else
|
|
355
|
+
puts "#{all_warnings.size} warning(s) found:"
|
|
356
|
+
all_warnings.each { |w| puts " #{w}" }
|
|
357
|
+
end
|