png_conform 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.rubocop.yml +19 -0
- data/.rubocop_todo.yml +197 -0
- data/CODE_OF_CONDUCT.md +84 -0
- data/CONTRIBUTING.md +323 -0
- data/Gemfile +13 -0
- data/LICENSE +43 -0
- data/README.adoc +859 -0
- data/Rakefile +10 -0
- data/SECURITY.md +147 -0
- data/docs/ARCHITECTURE.adoc +681 -0
- data/docs/CHUNK_TYPES.adoc +450 -0
- data/docs/CLI_OPTIONS.adoc +913 -0
- data/docs/COMPATIBILITY.adoc +616 -0
- data/examples/README.adoc +398 -0
- data/examples/advanced_usage.rb +304 -0
- data/examples/basic_usage.rb +210 -0
- data/exe/png_conform +6 -0
- data/lib/png_conform/analyzers/comparison_analyzer.rb +230 -0
- data/lib/png_conform/analyzers/metrics_analyzer.rb +176 -0
- data/lib/png_conform/analyzers/optimization_analyzer.rb +190 -0
- data/lib/png_conform/analyzers/resolution_analyzer.rb +274 -0
- data/lib/png_conform/bindata/chunk_structure.rb +153 -0
- data/lib/png_conform/bindata/jng_file.rb +79 -0
- data/lib/png_conform/bindata/mng_file.rb +97 -0
- data/lib/png_conform/bindata/png_file.rb +162 -0
- data/lib/png_conform/cli.rb +116 -0
- data/lib/png_conform/commands/check_command.rb +323 -0
- data/lib/png_conform/commands/list_command.rb +67 -0
- data/lib/png_conform/models/chunk.rb +84 -0
- data/lib/png_conform/models/chunk_info.rb +71 -0
- data/lib/png_conform/models/compression_info.rb +49 -0
- data/lib/png_conform/models/decoded_chunk_data.rb +143 -0
- data/lib/png_conform/models/file_analysis.rb +181 -0
- data/lib/png_conform/models/file_info.rb +91 -0
- data/lib/png_conform/models/image_info.rb +52 -0
- data/lib/png_conform/models/validation_error.rb +89 -0
- data/lib/png_conform/models/validation_result.rb +137 -0
- data/lib/png_conform/readers/full_load_reader.rb +113 -0
- data/lib/png_conform/readers/streaming_reader.rb +180 -0
- data/lib/png_conform/reporters/base_reporter.rb +53 -0
- data/lib/png_conform/reporters/color_reporter.rb +65 -0
- data/lib/png_conform/reporters/json_reporter.rb +18 -0
- data/lib/png_conform/reporters/palette_reporter.rb +48 -0
- data/lib/png_conform/reporters/quiet_reporter.rb +18 -0
- data/lib/png_conform/reporters/reporter_factory.rb +108 -0
- data/lib/png_conform/reporters/summary_reporter.rb +65 -0
- data/lib/png_conform/reporters/text_reporter.rb +66 -0
- data/lib/png_conform/reporters/verbose_reporter.rb +87 -0
- data/lib/png_conform/reporters/very_verbose_reporter.rb +33 -0
- data/lib/png_conform/reporters/visual_elements.rb +66 -0
- data/lib/png_conform/reporters/yaml_reporter.rb +18 -0
- data/lib/png_conform/services/profile_manager.rb +242 -0
- data/lib/png_conform/services/validation_service.rb +457 -0
- data/lib/png_conform/services/zlib_validator.rb +270 -0
- data/lib/png_conform/validators/ancillary/bkgd_validator.rb +140 -0
- data/lib/png_conform/validators/ancillary/chrm_validator.rb +178 -0
- data/lib/png_conform/validators/ancillary/cicp_validator.rb +202 -0
- data/lib/png_conform/validators/ancillary/gama_validator.rb +105 -0
- data/lib/png_conform/validators/ancillary/hist_validator.rb +147 -0
- data/lib/png_conform/validators/ancillary/iccp_validator.rb +243 -0
- data/lib/png_conform/validators/ancillary/itxt_validator.rb +280 -0
- data/lib/png_conform/validators/ancillary/mdcv_validator.rb +201 -0
- data/lib/png_conform/validators/ancillary/offs_validator.rb +132 -0
- data/lib/png_conform/validators/ancillary/pcal_validator.rb +289 -0
- data/lib/png_conform/validators/ancillary/phys_validator.rb +107 -0
- data/lib/png_conform/validators/ancillary/sbit_validator.rb +176 -0
- data/lib/png_conform/validators/ancillary/scal_validator.rb +180 -0
- data/lib/png_conform/validators/ancillary/splt_validator.rb +223 -0
- data/lib/png_conform/validators/ancillary/srgb_validator.rb +117 -0
- data/lib/png_conform/validators/ancillary/ster_validator.rb +111 -0
- data/lib/png_conform/validators/ancillary/text_validator.rb +129 -0
- data/lib/png_conform/validators/ancillary/time_validator.rb +132 -0
- data/lib/png_conform/validators/ancillary/trns_validator.rb +154 -0
- data/lib/png_conform/validators/ancillary/ztxt_validator.rb +173 -0
- data/lib/png_conform/validators/apng/actl_validator.rb +81 -0
- data/lib/png_conform/validators/apng/fctl_validator.rb +155 -0
- data/lib/png_conform/validators/apng/fdat_validator.rb +117 -0
- data/lib/png_conform/validators/base_validator.rb +241 -0
- data/lib/png_conform/validators/chunk_registry.rb +219 -0
- data/lib/png_conform/validators/critical/idat_validator.rb +77 -0
- data/lib/png_conform/validators/critical/iend_validator.rb +68 -0
- data/lib/png_conform/validators/critical/ihdr_validator.rb +160 -0
- data/lib/png_conform/validators/critical/plte_validator.rb +120 -0
- data/lib/png_conform/validators/jng/jdat_validator.rb +66 -0
- data/lib/png_conform/validators/jng/jhdr_validator.rb +116 -0
- data/lib/png_conform/validators/jng/jsep_validator.rb +66 -0
- data/lib/png_conform/validators/mng/back_validator.rb +87 -0
- data/lib/png_conform/validators/mng/clip_validator.rb +65 -0
- data/lib/png_conform/validators/mng/clon_validator.rb +45 -0
- data/lib/png_conform/validators/mng/defi_validator.rb +104 -0
- data/lib/png_conform/validators/mng/dhdr_validator.rb +104 -0
- data/lib/png_conform/validators/mng/disc_validator.rb +44 -0
- data/lib/png_conform/validators/mng/endl_validator.rb +65 -0
- data/lib/png_conform/validators/mng/fram_validator.rb +91 -0
- data/lib/png_conform/validators/mng/loop_validator.rb +75 -0
- data/lib/png_conform/validators/mng/mend_validator.rb +31 -0
- data/lib/png_conform/validators/mng/mhdr_validator.rb +69 -0
- data/lib/png_conform/validators/mng/move_validator.rb +61 -0
- data/lib/png_conform/validators/mng/save_validator.rb +39 -0
- data/lib/png_conform/validators/mng/seek_validator.rb +42 -0
- data/lib/png_conform/validators/mng/show_validator.rb +52 -0
- data/lib/png_conform/validators/mng/term_validator.rb +84 -0
- data/lib/png_conform/version.rb +5 -0
- data/lib/png_conform.rb +101 -0
- data/png_conform.gemspec +43 -0
- metadata +201 -0
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../base_validator"
|
|
4
|
+
require "zlib"
|
|
5
|
+
|
|
6
|
+
module PngConform
|
|
7
|
+
module Validators
|
|
8
|
+
module Ancillary
|
|
9
|
+
# Validator for PNG iTXt (International Textual Data) chunk
|
|
10
|
+
#
|
|
11
|
+
# iTXt contains international textual information with UTF-8 encoding:
|
|
12
|
+
# - Keyword (1-79 bytes, Latin-1)
|
|
13
|
+
# - Null separator (1 byte)
|
|
14
|
+
# - Compression flag (1 byte, 0=uncompressed, 1=compressed)
|
|
15
|
+
# - Compression method (1 byte, must be 0 if compressed)
|
|
16
|
+
# - Language tag (0+ bytes, ASCII)
|
|
17
|
+
# - Null separator (1 byte)
|
|
18
|
+
# - Translated keyword (0+ bytes, UTF-8)
|
|
19
|
+
# - Null separator (1 byte)
|
|
20
|
+
# - Text (0+ bytes, UTF-8, possibly compressed)
|
|
21
|
+
#
|
|
22
|
+
# Validation rules from PNG spec:
|
|
23
|
+
# - Keyword must be 1-79 characters, Latin-1 printable
|
|
24
|
+
# - Keyword must not have leading/trailing/consecutive spaces
|
|
25
|
+
# - Compression flag must be 0 or 1
|
|
26
|
+
# - Compression method must be 0 if compressed
|
|
27
|
+
# - Language tag must be ASCII (RFC 3066 format)
|
|
28
|
+
# - Translated keyword and text must be valid UTF-8
|
|
29
|
+
# - Multiple iTXt chunks allowed with different keywords
|
|
30
|
+
class ItxtValidator < BaseValidator
|
|
31
|
+
# Maximum keyword length
|
|
32
|
+
MAX_KEYWORD_LENGTH = 79
|
|
33
|
+
|
|
34
|
+
# Latin-1 printable characters (space to tilde + high ASCII)
|
|
35
|
+
PRINTABLE_LATIN1 = (32..126).to_a + (161..255).to_a
|
|
36
|
+
|
|
37
|
+
# Valid compression flag values
|
|
38
|
+
UNCOMPRESSED = 0
|
|
39
|
+
COMPRESSED = 1
|
|
40
|
+
|
|
41
|
+
# Valid compression method
|
|
42
|
+
COMPRESSION_DEFLATE = 0
|
|
43
|
+
|
|
44
|
+
# Validate iTXt chunk
|
|
45
|
+
#
|
|
46
|
+
# @return [Boolean] True if validation passed
|
|
47
|
+
def validate
|
|
48
|
+
return false unless check_crc
|
|
49
|
+
return false unless check_structure
|
|
50
|
+
return false unless check_keyword
|
|
51
|
+
return false unless check_compression_flags
|
|
52
|
+
return false unless check_language_tag
|
|
53
|
+
return false unless check_utf8_fields
|
|
54
|
+
|
|
55
|
+
return false if compressed? && compressed? && !check_decompression
|
|
56
|
+
|
|
57
|
+
store_text_info
|
|
58
|
+
true
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
private
|
|
62
|
+
|
|
63
|
+
# Check iTXt chunk structure
|
|
64
|
+
def check_structure
|
|
65
|
+
data = chunk.chunk_data
|
|
66
|
+
|
|
67
|
+
# Must contain at least keyword + 3 nulls + flags
|
|
68
|
+
if data.length < 5
|
|
69
|
+
add_error("iTXt chunk too short (minimum 5 bytes)")
|
|
70
|
+
return false
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
# Must contain three null separators
|
|
74
|
+
nulls = data.bytes.each_index.select { |i| data[i] == "\0" }
|
|
75
|
+
if nulls.length < 3
|
|
76
|
+
add_error("iTXt chunk missing null separators " \
|
|
77
|
+
"(found #{nulls.length}, need 3)")
|
|
78
|
+
return false
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
true
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Check keyword validity
|
|
85
|
+
def check_keyword
|
|
86
|
+
data = chunk.chunk_data
|
|
87
|
+
null_pos = data.index("\0")
|
|
88
|
+
keyword = data[0, null_pos]
|
|
89
|
+
|
|
90
|
+
# Check keyword length
|
|
91
|
+
if keyword.empty?
|
|
92
|
+
add_error("iTXt chunk has empty keyword")
|
|
93
|
+
return false
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
if keyword.length > MAX_KEYWORD_LENGTH
|
|
97
|
+
add_error("iTXt keyword too long (#{keyword.length}, " \
|
|
98
|
+
"max #{MAX_KEYWORD_LENGTH})")
|
|
99
|
+
return false
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Check for Latin-1 printable characters
|
|
103
|
+
keyword.bytes.each do |byte|
|
|
104
|
+
next if PRINTABLE_LATIN1.include?(byte)
|
|
105
|
+
|
|
106
|
+
add_error("iTXt keyword contains non-printable character " \
|
|
107
|
+
"(0x#{byte.to_s(16)})")
|
|
108
|
+
return false
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# Check for leading/trailing spaces
|
|
112
|
+
if keyword.start_with?(" ")
|
|
113
|
+
add_error("iTXt keyword has leading space")
|
|
114
|
+
return false
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
if keyword.end_with?(" ")
|
|
118
|
+
add_error("iTXt keyword has trailing space")
|
|
119
|
+
return false
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
# Check for consecutive spaces
|
|
123
|
+
if keyword.include?(" ")
|
|
124
|
+
add_error("iTXt keyword has consecutive spaces")
|
|
125
|
+
return false
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
true
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
# Check compression flag and method
|
|
132
|
+
def check_compression_flags
|
|
133
|
+
data = chunk.chunk_data
|
|
134
|
+
null_pos = data.index("\0")
|
|
135
|
+
compression_flag = data[null_pos + 1].ord
|
|
136
|
+
compression_method = data[null_pos + 2].ord
|
|
137
|
+
|
|
138
|
+
unless [UNCOMPRESSED, COMPRESSED].include?(compression_flag)
|
|
139
|
+
add_error("iTXt invalid compression flag " \
|
|
140
|
+
"(#{compression_flag}, must be 0 or 1)")
|
|
141
|
+
return false
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
if compression_flag == COMPRESSED &&
|
|
145
|
+
compression_method != COMPRESSION_DEFLATE
|
|
146
|
+
add_error("iTXt invalid compression method " \
|
|
147
|
+
"(#{compression_method}, must be 0 when compressed)")
|
|
148
|
+
return false
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
true
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
# Check language tag (should be ASCII, RFC 3066 format)
|
|
155
|
+
def check_language_tag
|
|
156
|
+
data = chunk.chunk_data
|
|
157
|
+
first_null = data.index("\0")
|
|
158
|
+
second_null = data.index("\0", first_null + 3)
|
|
159
|
+
lang_tag = data[(first_null + 3)...second_null]
|
|
160
|
+
|
|
161
|
+
# Language tag can be empty
|
|
162
|
+
return true if lang_tag.empty?
|
|
163
|
+
|
|
164
|
+
# Must be ASCII (0-127)
|
|
165
|
+
lang_tag.bytes.each do |byte|
|
|
166
|
+
next unless byte > 127
|
|
167
|
+
|
|
168
|
+
add_error("iTXt language tag contains non-ASCII character " \
|
|
169
|
+
"(0x#{byte.to_s(16)})")
|
|
170
|
+
return false
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
true
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
# Check UTF-8 validity of translated keyword and text
|
|
177
|
+
def check_utf8_fields
|
|
178
|
+
data = chunk.chunk_data
|
|
179
|
+
first_null = data.index("\0")
|
|
180
|
+
second_null = data.index("\0", first_null + 3)
|
|
181
|
+
third_null = data.index("\0", second_null + 1)
|
|
182
|
+
|
|
183
|
+
# Translated keyword
|
|
184
|
+
translated_keyword = data[(second_null + 1)...third_null]
|
|
185
|
+
unless valid_utf8?(translated_keyword)
|
|
186
|
+
add_error("iTXt translated keyword is not valid UTF-8")
|
|
187
|
+
return false
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
# Text field (may be compressed)
|
|
191
|
+
text_data = data[(third_null + 1)..] || ""
|
|
192
|
+
if !compressed? && !valid_utf8?(text_data)
|
|
193
|
+
add_error("iTXt text is not valid UTF-8")
|
|
194
|
+
return false
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
true
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
# Check that compressed data can be decompressed and is valid UTF-8
|
|
201
|
+
def check_decompression
|
|
202
|
+
data = chunk.chunk_data
|
|
203
|
+
first_null = data.index("\0")
|
|
204
|
+
third_null = data.index("\0", data.index("\0", first_null + 3) + 1)
|
|
205
|
+
compressed_data = data[(third_null + 1)..] || ""
|
|
206
|
+
|
|
207
|
+
# Try to decompress
|
|
208
|
+
begin
|
|
209
|
+
decompressed = Zlib::Inflate.inflate(compressed_data)
|
|
210
|
+
unless valid_utf8?(decompressed)
|
|
211
|
+
add_error("iTXt decompressed text is not valid UTF-8")
|
|
212
|
+
return false
|
|
213
|
+
end
|
|
214
|
+
rescue Zlib::Error => e
|
|
215
|
+
add_error("iTXt decompression failed: #{e.message}")
|
|
216
|
+
return false
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
true
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
# Check if compression is enabled
|
|
223
|
+
def compressed?
|
|
224
|
+
data = chunk.chunk_data
|
|
225
|
+
null_pos = data.index("\0")
|
|
226
|
+
data[null_pos + 1].ord == COMPRESSED
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
# Validate UTF-8 encoding
|
|
230
|
+
def valid_utf8?(str)
|
|
231
|
+
str.force_encoding("UTF-8").valid_encoding?
|
|
232
|
+
end
|
|
233
|
+
|
|
234
|
+
# Store text information in context
|
|
235
|
+
def store_text_info
|
|
236
|
+
data = chunk.chunk_data
|
|
237
|
+
first_null = data.index("\0")
|
|
238
|
+
second_null = data.index("\0", first_null + 3)
|
|
239
|
+
third_null = data.index("\0", second_null + 1)
|
|
240
|
+
|
|
241
|
+
keyword = data[0, first_null]
|
|
242
|
+
lang_tag = data[(first_null + 3)...second_null]
|
|
243
|
+
translated_keyword = data[(second_null + 1)...third_null]
|
|
244
|
+
text_data = data[(third_null + 1)..] || ""
|
|
245
|
+
|
|
246
|
+
# Decompress if needed
|
|
247
|
+
if compressed?
|
|
248
|
+
text = Zlib::Inflate.inflate(text_data)
|
|
249
|
+
comp_info = " (compressed from #{text_data.length} bytes)"
|
|
250
|
+
else
|
|
251
|
+
text = text_data
|
|
252
|
+
comp_info = ""
|
|
253
|
+
end
|
|
254
|
+
|
|
255
|
+
# Force UTF-8 encoding
|
|
256
|
+
text.force_encoding("UTF-8")
|
|
257
|
+
translated_keyword.force_encoding("UTF-8")
|
|
258
|
+
|
|
259
|
+
# Store in context (allow multiple text chunks)
|
|
260
|
+
texts = context.retrieve(:text_chunks) || []
|
|
261
|
+
texts << {
|
|
262
|
+
keyword: keyword,
|
|
263
|
+
text: text,
|
|
264
|
+
compressed: compressed?,
|
|
265
|
+
language: lang_tag.empty? ? nil : lang_tag,
|
|
266
|
+
translated_keyword: translated_keyword.empty? ? nil : translated_keyword,
|
|
267
|
+
}
|
|
268
|
+
context.store(:text_chunks, texts)
|
|
269
|
+
|
|
270
|
+
# Add info about the text chunk
|
|
271
|
+
text_preview = text.length > 40 ? "#{text[0, 40]}..." : text
|
|
272
|
+
lang_info = lang_tag.empty? ? "" : " [#{lang_tag}]"
|
|
273
|
+
trans_info = translated_keyword.empty? ? "" : " (#{translated_keyword})"
|
|
274
|
+
add_info("iTXt: #{keyword}#{lang_info}#{trans_info} = " \
|
|
275
|
+
"\"#{text_preview}\"#{comp_info}")
|
|
276
|
+
end
|
|
277
|
+
end
|
|
278
|
+
end
|
|
279
|
+
end
|
|
280
|
+
end
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module PngConform
|
|
4
|
+
module Validators
|
|
5
|
+
module Ancillary
|
|
6
|
+
# Validator for mDCv (Mastering Display Color Volume) chunk
|
|
7
|
+
#
|
|
8
|
+
# The mDCv chunk specifies the color volume of the mastering display
|
|
9
|
+
# used for content creation. Introduced in PNG 3rd edition for HDR support.
|
|
10
|
+
#
|
|
11
|
+
# Structure:
|
|
12
|
+
# - Display primaries (12 bytes): 6 x uint16 (x,y for R,G,B)
|
|
13
|
+
# Each coordinate is in 0.00002 units (range 0-50000 = 0.0-1.0)
|
|
14
|
+
# - White point (4 bytes): 2 x uint16 (x,y)
|
|
15
|
+
# Each coordinate is in 0.00002 units
|
|
16
|
+
# - Maximum luminance (4 bytes): uint32
|
|
17
|
+
# In 0.0001 cd/m² units
|
|
18
|
+
# - Minimum luminance (4 bytes): uint32
|
|
19
|
+
# In 0.0001 cd/m² units
|
|
20
|
+
#
|
|
21
|
+
# Total: 24 bytes
|
|
22
|
+
#
|
|
23
|
+
# Constraints:
|
|
24
|
+
# - Must contain exactly 24 bytes
|
|
25
|
+
# - Must appear before PLTE and IDAT
|
|
26
|
+
# - At most one mDCv chunk allowed
|
|
27
|
+
# - Coordinates must be in range 0-50000 (0.0-1.0 in CIE 1931)
|
|
28
|
+
# - Maximum luminance must be > minimum luminance
|
|
29
|
+
#
|
|
30
|
+
class MdcvValidator < BaseValidator
|
|
31
|
+
CHUNK_TYPE = "mDCV"
|
|
32
|
+
|
|
33
|
+
# Maximum coordinate value (0.00002 * 50000 = 1.0)
|
|
34
|
+
MAX_COORDINATE = 50_000
|
|
35
|
+
|
|
36
|
+
# Coordinate scale factor
|
|
37
|
+
COORDINATE_SCALE = 0.00002
|
|
38
|
+
|
|
39
|
+
# Luminance scale factor (cd/m²)
|
|
40
|
+
LUMINANCE_SCALE = 0.0001
|
|
41
|
+
|
|
42
|
+
def validate
|
|
43
|
+
check_chunk_length
|
|
44
|
+
check_uniqueness
|
|
45
|
+
check_position
|
|
46
|
+
validate_fields if chunk.chunk_data.bytesize == 24
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
private
|
|
50
|
+
|
|
51
|
+
def check_chunk_length
|
|
52
|
+
return if check_length(24)
|
|
53
|
+
|
|
54
|
+
add_error("invalid chunk length: #{chunk.chunk_data.bytesize} bytes")
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def check_uniqueness
|
|
58
|
+
return unless context.seen?(CHUNK_TYPE)
|
|
59
|
+
|
|
60
|
+
add_error("duplicate mDCV chunk (only one allowed)")
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def check_position
|
|
64
|
+
add_error("mDCv must appear before PLTE") if context.seen?("PLTE")
|
|
65
|
+
|
|
66
|
+
return unless context.seen?("IDAT")
|
|
67
|
+
|
|
68
|
+
add_error("mDCv must appear before IDAT")
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def validate_fields
|
|
72
|
+
data = chunk.data
|
|
73
|
+
|
|
74
|
+
# Parse all uint16 and uint32 values
|
|
75
|
+
red_x = read_uint16(data, 0)
|
|
76
|
+
red_y = read_uint16(data, 2)
|
|
77
|
+
green_x = read_uint16(data, 4)
|
|
78
|
+
green_y = read_uint16(data, 6)
|
|
79
|
+
blue_x = read_uint16(data, 8)
|
|
80
|
+
blue_y = read_uint16(data, 10)
|
|
81
|
+
white_x = read_uint16(data, 12)
|
|
82
|
+
white_y = read_uint16(data, 14)
|
|
83
|
+
max_luminance = read_uint32(data, 16)
|
|
84
|
+
min_luminance = read_uint32(data, 20)
|
|
85
|
+
|
|
86
|
+
# Validate display primaries
|
|
87
|
+
validate_primary("red", red_x, red_y)
|
|
88
|
+
validate_primary("green", green_x, green_y)
|
|
89
|
+
validate_primary("blue", blue_x, blue_y)
|
|
90
|
+
|
|
91
|
+
# Validate white point
|
|
92
|
+
validate_white_point(white_x, white_y)
|
|
93
|
+
|
|
94
|
+
# Validate luminance values
|
|
95
|
+
validate_luminance(max_luminance, min_luminance)
|
|
96
|
+
|
|
97
|
+
# Add informational messages about decoded values
|
|
98
|
+
add_display_info(
|
|
99
|
+
red_x, red_y, green_x, green_y, blue_x, blue_y,
|
|
100
|
+
white_x, white_y, max_luminance, min_luminance
|
|
101
|
+
)
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
def validate_primary(color, x_value, y_value)
|
|
105
|
+
validate_coordinate("#{color} primary x", x_value)
|
|
106
|
+
validate_coordinate("#{color} primary y", y_value)
|
|
107
|
+
|
|
108
|
+
# Check that x + y <= 1.0 (sum <= 50000)
|
|
109
|
+
return unless x_value + y_value > MAX_COORDINATE
|
|
110
|
+
|
|
111
|
+
add_warning(
|
|
112
|
+
"#{color} primary coordinates sum > 1.0 " \
|
|
113
|
+
"(x=#{format_coordinate(x_value)}, " \
|
|
114
|
+
"y=#{format_coordinate(y_value)})",
|
|
115
|
+
)
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
def validate_white_point(x_value, y_value)
|
|
119
|
+
validate_coordinate("white point x", x_value)
|
|
120
|
+
validate_coordinate("white point y", y_value)
|
|
121
|
+
|
|
122
|
+
# Check that x + y <= 1.0
|
|
123
|
+
return unless x_value + y_value > MAX_COORDINATE
|
|
124
|
+
|
|
125
|
+
add_warning(
|
|
126
|
+
"white point coordinates sum > 1.0 " \
|
|
127
|
+
"(x=#{format_coordinate(x_value)}, " \
|
|
128
|
+
"y=#{format_coordinate(y_value)})",
|
|
129
|
+
)
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def validate_coordinate(name, value)
|
|
133
|
+
return if check_range(value, 0, MAX_COORDINATE, name)
|
|
134
|
+
|
|
135
|
+
add_error(
|
|
136
|
+
"#{name} out of range: #{value} " \
|
|
137
|
+
"(must be 0-#{MAX_COORDINATE})",
|
|
138
|
+
)
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
def validate_luminance(max_lum, min_lum)
|
|
142
|
+
# Maximum luminance must be greater than minimum
|
|
143
|
+
if max_lum <= min_lum
|
|
144
|
+
add_error(
|
|
145
|
+
"maximum luminance (#{format_luminance(max_lum)}) must be > " \
|
|
146
|
+
"minimum luminance (#{format_luminance(min_lum)})",
|
|
147
|
+
)
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
# Check for reasonable luminance ranges
|
|
151
|
+
add_warning("maximum luminance is 0 cd/m²") if max_lum.zero?
|
|
152
|
+
|
|
153
|
+
return unless max_lum > 100_000_000 # 10,000 cd/m²
|
|
154
|
+
|
|
155
|
+
add_warning(
|
|
156
|
+
"maximum luminance very high: #{format_luminance(max_lum)} " \
|
|
157
|
+
"(> 10,000 cd/m²)",
|
|
158
|
+
)
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def add_display_info(
|
|
162
|
+
red_x, red_y, green_x, green_y, blue_x, blue_y,
|
|
163
|
+
white_x, white_y, max_lum, min_lum
|
|
164
|
+
)
|
|
165
|
+
add_info(
|
|
166
|
+
"mastering display: " \
|
|
167
|
+
"R=(#{format_coordinate(red_x)},#{format_coordinate(red_y)}), " \
|
|
168
|
+
"G=(#{format_coordinate(green_x)},#{format_coordinate(green_y)}), " \
|
|
169
|
+
"B=(#{format_coordinate(blue_x)},#{format_coordinate(blue_y)})",
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
add_info(
|
|
173
|
+
"white point: " \
|
|
174
|
+
"(#{format_coordinate(white_x)},#{format_coordinate(white_y)})",
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
add_info(
|
|
178
|
+
"luminance range: " \
|
|
179
|
+
"#{format_luminance(min_lum)} - #{format_luminance(max_lum)} cd/m²",
|
|
180
|
+
)
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
def read_uint16(data, offset)
|
|
184
|
+
data[offset, 2].unpack1("n")
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
def read_uint32(data, offset)
|
|
188
|
+
data[offset, 4].unpack1("N")
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
def format_coordinate(value)
|
|
192
|
+
(value * COORDINATE_SCALE).round(5).to_s
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
def format_luminance(value)
|
|
196
|
+
(value * LUMINANCE_SCALE).round(4).to_s
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
end
|
|
200
|
+
end
|
|
201
|
+
end
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../base_validator"
|
|
4
|
+
|
|
5
|
+
module PngConform
|
|
6
|
+
module Validators
|
|
7
|
+
module Ancillary
|
|
8
|
+
# Validator for PNG oFFs (Image Offset) chunk
|
|
9
|
+
#
|
|
10
|
+
# oFFs specifies the offset of the image from a reference point:
|
|
11
|
+
# - X position (4 bytes, signed)
|
|
12
|
+
# - Y position (4 bytes, signed)
|
|
13
|
+
# - Unit specifier (1 byte)
|
|
14
|
+
#
|
|
15
|
+
# Validation rules from PNG spec:
|
|
16
|
+
# - Chunk must be exactly 9 bytes
|
|
17
|
+
# - Unit must be 0 (pixels) or 1 (micrometers)
|
|
18
|
+
# - Must appear before IDAT chunk
|
|
19
|
+
# - Only one oFFs chunk allowed
|
|
20
|
+
class OffsValidator < BaseValidator
|
|
21
|
+
# Expected chunk length
|
|
22
|
+
EXPECTED_LENGTH = 9
|
|
23
|
+
|
|
24
|
+
# Valid unit specifiers
|
|
25
|
+
UNIT_PIXELS = 0
|
|
26
|
+
UNIT_MICROMETERS = 1
|
|
27
|
+
VALID_UNITS = [UNIT_PIXELS, UNIT_MICROMETERS].freeze
|
|
28
|
+
|
|
29
|
+
# Unit names for display
|
|
30
|
+
UNIT_NAMES = {
|
|
31
|
+
UNIT_PIXELS => "pixels",
|
|
32
|
+
UNIT_MICROMETERS => "micrometers",
|
|
33
|
+
}.freeze
|
|
34
|
+
|
|
35
|
+
# Validate oFFs chunk
|
|
36
|
+
#
|
|
37
|
+
# @return [Boolean] True if validation passed
|
|
38
|
+
def validate
|
|
39
|
+
return false unless check_crc
|
|
40
|
+
return false unless check_uniqueness
|
|
41
|
+
return false unless check_position
|
|
42
|
+
return false unless check_length
|
|
43
|
+
return false unless check_unit
|
|
44
|
+
|
|
45
|
+
store_offset_info
|
|
46
|
+
true
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
private
|
|
50
|
+
|
|
51
|
+
# Check that only one oFFs chunk exists
|
|
52
|
+
def check_uniqueness
|
|
53
|
+
if context.retrieve(:has_offset)
|
|
54
|
+
add_error("Multiple oFFs chunks (only one allowed)")
|
|
55
|
+
return false
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
true
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Check that oFFs appears before IDAT
|
|
62
|
+
def check_position
|
|
63
|
+
if context.seen?("IDAT")
|
|
64
|
+
add_error("oFFs chunk after IDAT chunk")
|
|
65
|
+
return false
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
true
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
# Check chunk length
|
|
72
|
+
def check_length
|
|
73
|
+
actual_length = chunk.chunk_data.length
|
|
74
|
+
|
|
75
|
+
unless actual_length == EXPECTED_LENGTH
|
|
76
|
+
add_error("oFFs chunk wrong length (#{actual_length} bytes, " \
|
|
77
|
+
"expected #{EXPECTED_LENGTH})")
|
|
78
|
+
return false
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
true
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Check unit specifier
|
|
85
|
+
def check_unit
|
|
86
|
+
data = chunk.chunk_data
|
|
87
|
+
unit = data[8].ord
|
|
88
|
+
|
|
89
|
+
unless VALID_UNITS.include?(unit)
|
|
90
|
+
add_error("oFFs invalid unit specifier (#{unit}, " \
|
|
91
|
+
"must be 0 or 1)")
|
|
92
|
+
return false
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
true
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Store offset information in context
|
|
99
|
+
def store_offset_info
|
|
100
|
+
data = chunk.chunk_data
|
|
101
|
+
|
|
102
|
+
# Parse X position (4 bytes, signed big-endian)
|
|
103
|
+
x_bytes = data[0, 4].bytes
|
|
104
|
+
x_pos = (x_bytes[0] << 24) | (x_bytes[1] << 16) |
|
|
105
|
+
(x_bytes[2] << 8) | x_bytes[3]
|
|
106
|
+
# Convert to signed
|
|
107
|
+
x_pos -= (1 << 32) if x_pos >= (1 << 31)
|
|
108
|
+
|
|
109
|
+
# Parse Y position (4 bytes, signed big-endian)
|
|
110
|
+
y_bytes = data[4, 4].bytes
|
|
111
|
+
y_pos = (y_bytes[0] << 24) | (y_bytes[1] << 16) |
|
|
112
|
+
(y_bytes[2] << 8) | y_bytes[3]
|
|
113
|
+
# Convert to signed
|
|
114
|
+
y_pos -= (1 << 32) if y_pos >= (1 << 31)
|
|
115
|
+
|
|
116
|
+
# Parse unit
|
|
117
|
+
unit = data[8].ord
|
|
118
|
+
unit_name = UNIT_NAMES[unit]
|
|
119
|
+
|
|
120
|
+
# Store in context
|
|
121
|
+
context.store(:has_offset, true)
|
|
122
|
+
context.store(:offset_x, x_pos)
|
|
123
|
+
context.store(:offset_y, y_pos)
|
|
124
|
+
context.store(:offset_unit, unit)
|
|
125
|
+
|
|
126
|
+
# Add info about the offset
|
|
127
|
+
add_info("oFFs: position (#{x_pos}, #{y_pos}) #{unit_name}")
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|