woff 1.0.0 → 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,2504 @@
1
+ #! /usr/bin/env python
2
+
3
+ """
4
+ A module for validating the the file structure of WOFF Files.
5
+ *validateFont* is the only public function.
6
+
7
+ This can also be used as a command line tool for validating WOFF files.
8
+ """
9
+
10
+ # import
11
+
12
+ import os
13
+ import re
14
+ import time
15
+ import sys
16
+ import struct
17
+ import zlib
18
+ import optparse
19
+ import codecs
20
+ from cStringIO import StringIO
21
+ from xml.etree import ElementTree
22
+ from xml.parsers.expat import ExpatError
23
+
24
+ # ----------------------
25
+ # Support: Metadata Spec
26
+ # ----------------------
27
+
28
+ """
29
+ The Extended Metadata specifications are defined as a set of
30
+ nested Python objects. This allows for a very simple XML
31
+ validation procedure. The common element structure is as follows:
32
+
33
+ {
34
+ # ----------
35
+ # Attributes
36
+ # ----------
37
+
38
+ # In all cases, the dictionary has the attribute name at the top
39
+ # with the possible value(s) as the value. If an attribute has
40
+ # more than one representation (for exmaple xml:lang and lang)
41
+ # the two are specified as a space separated string for example
42
+ # "xml:lang lang".
43
+
44
+ # Required
45
+ "requiredAttributes" : {
46
+ # empty or one or more of the following
47
+ "name" : "default as string, list of options or None"
48
+ },
49
+
50
+ # Recommended
51
+ "recommendedAttributes" : {
52
+ # empty or one or more of the following
53
+ "name" : "default as string, list of options or None"
54
+ },
55
+
56
+ # Optional
57
+ "optionalAttributes" : {
58
+ # empty or one or more of the following
59
+ "name" : "default as string, list of options or None"
60
+ },
61
+
62
+ # -------
63
+ # Content
64
+ # -------
65
+
66
+ "contentLevel" : "not allowed", "recommended" or "required",
67
+
68
+ # --------------
69
+ # Child Elements
70
+ # --------------
71
+
72
+ # In all cases, the dictionary has the element name at the top
73
+ # with a dictionary as the value. The value dictionary defines
74
+ # the number of times the shild-element may occur along with
75
+ # the specification for the child-element.
76
+
77
+ # Required
78
+ "requiredChildElements" : {
79
+ # empty or one or more of the following
80
+ "name" : {
81
+ "minimumOccurrences" : int or None,
82
+ "maximumOccurrences" : int or None,
83
+ "spec" : {}
84
+ }
85
+ },
86
+
87
+ # Recommended
88
+ "recommendedChildElements" : {
89
+ # empty or one or more of the following
90
+ "name" : {
91
+ # minimumOccurrences is implicitly 0
92
+ "maximumOccurrences" : int or None,
93
+ "spec" : {}
94
+ }
95
+ },
96
+
97
+ # Optional
98
+ "optionalChildElements" : {
99
+ # empty or one or more of the following
100
+ "name" : {
101
+ # minimumOccurrences is implicitly 0
102
+ "maximumOccurrences" : int or None,
103
+ "spec" : {}
104
+ }
105
+ }
106
+ }
107
+
108
+ The recommendedAttributes and recommendedChildElements are optional
109
+ but they are separated from the optionalAttributes and optionalChildElements
110
+ to allow for more detailed reporting.
111
+ """
112
+
113
+ # Metadata 1.0
114
+ # ------------
115
+
116
+ # Common Options
117
+
118
+ dirOptions_1_0 = ["ltr", "rtl"]
119
+
120
+ # Fourth-Level Elements
121
+
122
+ divSpec_1_0 = {
123
+ "requiredAttributes" : {},
124
+ "recommendedAttributes" : {},
125
+ "optionalAttributes" : {
126
+ "dir" : dirOptions_1_0,
127
+ "class" : None
128
+ },
129
+ "content" : "recommended",
130
+ "requiredChildElements" : {},
131
+ "recommendedChildElements" : {},
132
+ "optionalChildElements" : {
133
+ "div" : {
134
+ "maximumOccurrences" : None,
135
+ "spec" : "recursive divSpec_1_0" # special override for recursion.
136
+ },
137
+ "span" : {
138
+ "maximumOccurrences" : None,
139
+ "spec" : "recursive spanSpec_1_0" # special override for recursion.
140
+ }
141
+ }
142
+ }
143
+
144
+ spanSpec_1_0 = {
145
+ "requiredAttributes" : {},
146
+ "recommendedAttributes" : {},
147
+ "optionalAttributes" : {
148
+ "dir" : dirOptions_1_0,
149
+ "class" : None
150
+ },
151
+ "content" : "recommended",
152
+ "requiredChildElements" : {},
153
+ "recommendedChildElements" : {},
154
+ "optionalChildElements" : {
155
+ "div" : {
156
+ "maximumOccurrences" : None,
157
+ "spec" : "recursive divSpec_1_0" # special override for recursion.
158
+ },
159
+ "span" : {
160
+ "maximumOccurrences" : None,
161
+ "spec" : "recursive spanSpec_1_0" # special override for recursion.
162
+ }
163
+ }
164
+ }
165
+
166
+ # Third-Level Elements
167
+
168
+ creditSpec_1_0 = {
169
+ "requiredAttributes" : {
170
+ "name" : None
171
+ },
172
+ "recommendedAttributes" : {},
173
+ "optionalAttributes" : {
174
+ "url" : None,
175
+ "role" : None,
176
+ "dir" : dirOptions_1_0,
177
+ "class" : None
178
+ },
179
+ "content" : "not allowed",
180
+ "requiredChildElements" : {},
181
+ "recommendedChildElements" : {},
182
+ "optionalChildElements" : {}
183
+ }
184
+
185
+ textSpec_1_0 = {
186
+ "requiredAttributes" : {},
187
+ "recommendedAttributes" : {},
188
+ "optionalAttributes" : {
189
+ "url" : None,
190
+ "role" : None,
191
+ "dir" : dirOptions_1_0,
192
+ "class" : None,
193
+ "xml:lang lang" : None
194
+ },
195
+ "content" : "recommended",
196
+ "requiredChildElements" : {},
197
+ "recommendedChildElements" : {},
198
+ "optionalChildElements" : {
199
+ "div" : {
200
+ "maximumOccurrences" : None,
201
+ "spec" : divSpec_1_0
202
+ },
203
+ "span" : {
204
+ "maximumOccurrences" : None,
205
+ "spec" : spanSpec_1_0
206
+ }
207
+ }
208
+ }
209
+
210
+ extensionNameSpec_1_0 = {
211
+ "requiredAttributes" : {},
212
+ "recommendedAttributes" : {},
213
+ "optionalAttributes" : {
214
+ "dir" : dirOptions_1_0,
215
+ "class" : None,
216
+ "xml:lang lang" : None
217
+ },
218
+ "content" : "recommended",
219
+ "requiredChildElements" : {},
220
+ "recommendedChildElements" : {},
221
+ "optionalChildElements" : {}
222
+ }
223
+
224
+ extensionValueSpec_1_0 = {
225
+ "requiredAttributes" : {},
226
+ "recommendedAttributes" : {},
227
+ "optionalAttributes" : {
228
+ "dir" : dirOptions_1_0,
229
+ "class" : None,
230
+ "xml:lang lang" : None
231
+ },
232
+ "content" : "recommended",
233
+ "requiredChildElements" : {},
234
+ "recommendedChildElements" : {},
235
+ "optionalChildElements" : {}
236
+ }
237
+
238
+ extensionItemSpec_1_0 = {
239
+ "requiredAttributes" : {},
240
+ "recommendedAttributes" : {},
241
+ "optionalAttributes" : {
242
+ "id" : None
243
+ },
244
+ "content" : "not allowed",
245
+ "requiredChildElements" : {
246
+ "name" : {
247
+ "minimumOccurrences" : 1,
248
+ "maximumOccurrences" : None,
249
+ "spec" : extensionNameSpec_1_0
250
+ },
251
+ "value" : {
252
+ "minimumOccurrences" : 1,
253
+ "maximumOccurrences" : None,
254
+ "spec" : extensionValueSpec_1_0
255
+ }
256
+ },
257
+ "recommendedChildElements" : {
258
+ },
259
+ "optionalChildElements" : {}
260
+ }
261
+
262
+ # Second Level Elements
263
+
264
+ uniqueidSpec_1_0 = {
265
+ "requiredAttributes" : {
266
+ "id" : None
267
+ },
268
+ "recommendedAttributes" : {},
269
+ "optionalAttributes" : {},
270
+ "content" : "not allowed",
271
+ "requiredChildElements" : {},
272
+ "recommendedChildElements" : {},
273
+ "optionalChildElements" : {}
274
+ }
275
+
276
+ vendorSpec_1_0 = {
277
+ "requiredAttributes" : {
278
+ "name" : None
279
+ },
280
+ "recommendedAttributes" : {},
281
+ "optionalAttributes" : {
282
+ "url" : None,
283
+ "dir" : dirOptions_1_0,
284
+ "class" : None
285
+ },
286
+ "content" : "not allowed",
287
+ "requiredChildElements" : {},
288
+ "recommendedChildElements" : {},
289
+ "optionalChildElements" : {}
290
+ }
291
+
292
+ creditsSpec_1_0 = {
293
+ "requiredAttributes" : {},
294
+ "recommendedAttributes" : {},
295
+ "optionalAttributes" : {},
296
+ "content" : "not allowed",
297
+ "requiredChildElements" : {
298
+ "credit" : {
299
+ "minimumOccurrences" : 1,
300
+ "maximumOccurrences" : None,
301
+ "spec" : creditSpec_1_0
302
+ }
303
+ },
304
+ "recommendedChildElements" : {},
305
+ "optionalChildElements" : {}
306
+ }
307
+
308
+ descriptionSpec_1_0 = {
309
+ "requiredAttributes" : {},
310
+ "recommendedAttributes" : {},
311
+ "optionalAttributes" : {
312
+ "url" : None,
313
+ },
314
+ "content" : "not allowed",
315
+ "requiredChildElements" : {
316
+ "text" : {
317
+ "minimumOccurrences" : 1,
318
+ "maximumOccurrences" : None,
319
+ "spec" : textSpec_1_0
320
+ }
321
+ },
322
+ "recommendedChildElements" : {},
323
+ "optionalChildElements" : {}
324
+ }
325
+
326
+ licenseSpec_1_0 = {
327
+ "requiredAttributes" : {},
328
+ "recommendedAttributes" : {},
329
+ "optionalAttributes" : {
330
+ "url" : None,
331
+ "id" : None
332
+ },
333
+ "content" : "not allowed",
334
+ "requiredChildElements" : {},
335
+ "recommendedChildElements" : {},
336
+ "optionalChildElements" : {
337
+ "text" : {
338
+ "maximumOccurrences" : None,
339
+ "spec" : textSpec_1_0
340
+ }
341
+ }
342
+ }
343
+
344
+ copyrightSpec_1_0 = {
345
+ "requiredAttributes" : {},
346
+ "recommendedAttributes" : {},
347
+ "optionalAttributes" : {},
348
+ "content" : "not allowed",
349
+ "requiredChildElements" : {
350
+ "text" : {
351
+ "minimumOccurrences" : 1,
352
+ "maximumOccurrences" : None,
353
+ "spec" : textSpec_1_0
354
+ }
355
+ },
356
+ "recommendedChildElements" : {},
357
+ "optionalChildElements" : {}
358
+ }
359
+
360
+ trademarkSpec_1_0 = {
361
+ "requiredAttributes" : {},
362
+ "recommendedAttributes" : {},
363
+ "optionalAttributes" : {},
364
+ "content" : "not allowed",
365
+ "requiredChildElements" : {
366
+ "text" : {
367
+ "minimumOccurrences" : 1,
368
+ "maximumOccurrences" : None,
369
+ "spec" : textSpec_1_0
370
+ }
371
+ },
372
+ "recommendedChildElements" : {},
373
+ "optionalChildElements" : {}
374
+ }
375
+
376
+ licenseeSpec_1_0 = {
377
+ "requiredAttributes" : {
378
+ "name" : None,
379
+ },
380
+ "recommendedAttributes" : {},
381
+ "optionalAttributes" : {
382
+ "dir" : dirOptions_1_0,
383
+ "class" : None
384
+ },
385
+ "content" : "not allowed",
386
+ "requiredChildElements" : {},
387
+ "recommendedChildElements" : {},
388
+ "optionalChildElements" : {}
389
+ }
390
+
391
+ extensionSpec_1_0 = {
392
+ "requiredAttributes" : {},
393
+ "recommendedAttributes" : {},
394
+ "optionalAttributes" : {
395
+ "id" : None
396
+ },
397
+ "content" : "not allowed",
398
+ "requiredChildElements" : {
399
+ "item" : {
400
+ "minimumOccurrences" : 1,
401
+ "maximumOccurrences" : None,
402
+ "spec" : extensionItemSpec_1_0
403
+ }
404
+ },
405
+ "recommendedChildElements" : {},
406
+ "optionalChildElements" : {
407
+ "name" : {
408
+ "maximumOccurrences" : None,
409
+ "spec" : extensionNameSpec_1_0
410
+ }
411
+ }
412
+ }
413
+
414
+ # First Level Elements
415
+
416
+ metadataSpec_1_0 = {
417
+ "requiredAttributes" : {
418
+ "version" : "1.0"
419
+ },
420
+ "recommendedAttributes" : {},
421
+ "optionalAttributes" : {},
422
+ "content" : "not allowed",
423
+ "requiredChildElements" : {},
424
+ "recommendedChildElements" : {
425
+ "uniqueid" : {
426
+ "maximumOccurrences" : 1,
427
+ "spec" : uniqueidSpec_1_0
428
+ }
429
+ },
430
+ "optionalChildElements" : {
431
+ "vendor" : {
432
+ "maximumOccurrences" : 1,
433
+ "spec" : vendorSpec_1_0
434
+ },
435
+ "credits" : {
436
+ "maximumOccurrences" : 1,
437
+ "spec" : creditsSpec_1_0
438
+ },
439
+ "description" : {
440
+ "maximumOccurrences" : 1,
441
+ "spec" : descriptionSpec_1_0
442
+ },
443
+ "license" : {
444
+ "maximumOccurrences" : 1,
445
+ "spec" : licenseSpec_1_0
446
+ },
447
+ "copyright" : {
448
+ "maximumOccurrences" : 1,
449
+ "spec" : copyrightSpec_1_0
450
+ },
451
+ "trademark" : {
452
+ "maximumOccurrences" : 1,
453
+ "spec" : trademarkSpec_1_0
454
+ },
455
+ "licensee" : {
456
+ "maximumOccurrences" : 1,
457
+ "spec" : licenseeSpec_1_0
458
+ },
459
+ "licensee" : {
460
+ "maximumOccurrences" : 1,
461
+ "spec" : licenseeSpec_1_0
462
+ },
463
+ "extension" : {
464
+ "maximumOccurrences" : None,
465
+ "spec" : extensionSpec_1_0
466
+ }
467
+ }
468
+ }
469
+
470
+ # ----------------------
471
+ # Support: struct Helper
472
+ # ----------------------
473
+
474
+ # This was inspired by Just van Rossum's sstruct module.
475
+ # http://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk/Lib/sstruct.py
476
+
477
+ def structPack(format, obj):
478
+ keys, formatString = _structGetFormat(format)
479
+ values = []
480
+ for key in keys:
481
+ values.append(obj[key])
482
+ data = struct.pack(formatString, *values)
483
+ return data
484
+
485
+ def structUnpack(format, data):
486
+ keys, formatString = _structGetFormat(format)
487
+ size = struct.calcsize(formatString)
488
+ values = struct.unpack(formatString, data[:size])
489
+ unpacked = {}
490
+ for index, key in enumerate(keys):
491
+ value = values[index]
492
+ unpacked[key] = value
493
+ return unpacked, data[size:]
494
+
495
+ def structCalcSize(format):
496
+ keys, formatString = _structGetFormat(format)
497
+ return struct.calcsize(formatString)
498
+
499
+ _structFormatCache = {}
500
+
501
+ def _structGetFormat(format):
502
+ if format not in _structFormatCache:
503
+ keys = []
504
+ formatString = [">"] # always big endian
505
+ for line in format.strip().splitlines():
506
+ line = line.split("#", 1)[0].strip()
507
+ if not line:
508
+ continue
509
+ key, formatCharacter = line.split(":")
510
+ key = key.strip()
511
+ formatCharacter = formatCharacter.strip()
512
+ keys.append(key)
513
+ formatString.append(formatCharacter)
514
+ _structFormatCache[format] = (keys, "".join(formatString))
515
+ return _structFormatCache[format]
516
+
517
+ # -------------
518
+ # Tests: Header
519
+ # -------------
520
+
521
+ def testHeader(data, reporter):
522
+ """
523
+ Test the WOFF header.
524
+ """
525
+ functions = [
526
+ _testHeaderSignature,
527
+ _testHeaderFlavor,
528
+ _testHeaderLength,
529
+ _testHeaderReserved,
530
+ _testHeaderTotalSFNTSize,
531
+ _testHeaderNumTables
532
+ ]
533
+ for function in functions:
534
+ shouldStop = function(data, reporter)
535
+ if shouldStop:
536
+ return True
537
+ return False
538
+
539
+
540
+ headerFormat = """
541
+ signature: 4s
542
+ flavor: 4s
543
+ length: L
544
+ numTables: H
545
+ reserved: H
546
+ totalSfntSize: L
547
+ majorVersion: H
548
+ minorVersion: H
549
+ metaOffset: L
550
+ metaLength: L
551
+ metaOrigLength: L
552
+ privOffset: L
553
+ privLength: L
554
+ """
555
+ headerSize = structCalcSize(headerFormat)
556
+
557
+ def _testHeaderStructure(data, reporter):
558
+ """
559
+ Tests:
560
+ - Header must be the proper structure.
561
+ """
562
+ try:
563
+ structUnpack(headerFormat, data)
564
+ reporter.logPass(message="The header structure is correct.")
565
+ except:
566
+ reporter.logError(message="The header is not properly structured.")
567
+ return True
568
+
569
+ def _testHeaderSignature(data, reporter):
570
+ """
571
+ Tests:
572
+ - The signature must be "wOFF".
573
+ """
574
+ header = unpackHeader(data)
575
+ signature = header["signature"]
576
+ if signature != "wOFF":
577
+ reporter.logError(message="Invalid signature: %s." % signature)
578
+ return True
579
+ else:
580
+ reporter.logPass(message="The signature is correct.")
581
+
582
+ def _testHeaderFlavor(data, reporter):
583
+ """
584
+ Tests:
585
+ - The flavor should be OTTO, 0x00010000 or true. Warn if another value is found.
586
+ - If the flavor is OTTO, the CFF table must be present.
587
+ - If the flavor is not OTTO, the CFF must not be present.
588
+ - If the directory cannot be unpacked, the flavor can not be validated. Issue a warning.
589
+ """
590
+ header = unpackHeader(data)
591
+ flavor = header["flavor"]
592
+ if flavor not in ("OTTO", "\000\001\000\000", "true"):
593
+ reporter.logWarning(message="Unknown flavor: %s." % flavor)
594
+ else:
595
+ try:
596
+ tags = [table["tag"] for table in unpackDirectory(data)]
597
+ if "CFF " in tags and flavor != "OTTO":
598
+ reporter.logError(message="A \"CFF\" table is defined in the font and the flavor is not set to \"OTTO\".")
599
+ elif "CFF " not in tags and flavor == "OTTO":
600
+ reporter.logError(message="The flavor is set to \"OTTO\" but no \"CFF\" table is defined.")
601
+ else:
602
+ reporter.logPass(message="The flavor is a correct value.")
603
+ except:
604
+ reporter.logWarning(message="Could not validate the flavor.")
605
+
606
+ def _testHeaderLength(data, reporter):
607
+ """
608
+ Tests:
609
+ - The length of the data must match the defined length.
610
+ - The length of the data must be long enough for header and directory for defined number of tables.
611
+ - The length of the data must be long enough to contain the table lengths defined in the directory,
612
+ the metaLength and the privLength.
613
+ """
614
+ header = unpackHeader(data)
615
+ length = header["length"]
616
+ numTables = header["numTables"]
617
+ minLength = headerSize + (directorySize * numTables)
618
+ if length != len(data):
619
+ reporter.logError(message="Defined length (%d) does not match actual length of the data (%d)." % (length, len(data)))
620
+ return
621
+ if length < minLength:
622
+ reporter.logError(message="Invalid length defined (%d) for number of tables defined." % length)
623
+ return
624
+ directory = unpackDirectory(data)
625
+ for entry in directory:
626
+ compLength = entry["compLength"]
627
+ if compLength % 4:
628
+ compLength += 4 - (compLength % 4)
629
+ minLength += compLength
630
+ metaLength = privLength = 0
631
+ if header["metaOffset"]:
632
+ metaLength = header["metaLength"]
633
+ if header["privOffset"]:
634
+ privLength = header["privLength"]
635
+ if privLength and metaLength % 4:
636
+ metaLength += 4 - (metaLength % 4)
637
+ minLength += metaLength + privLength
638
+ if length < minLength:
639
+ reporter.logError(message="Defined length (%d) does not match the required length of the data (%d)." % (length, minLength))
640
+ return
641
+ reporter.logPass(message="The length defined in the header is correct.")
642
+
643
+ def _testHeaderReserved(data, reporter):
644
+ """
645
+ Tests:
646
+ - The reserved bit must be set to 0.
647
+ """
648
+ header = unpackHeader(data)
649
+ reserved = header["reserved"]
650
+ if reserved != 0:
651
+ reporter.logError(message="Invalid value in reserved field (%d)." % reserved)
652
+ else:
653
+ reporter.logPass(message="The value in the reserved field is correct.")
654
+
655
+ def _testHeaderTotalSFNTSize(data, reporter):
656
+ """
657
+ Tests:
658
+ - The size of the unpacked SFNT data must be a multiple of 4.
659
+ - The origLength values in the directory, with proper padding, must sum
660
+ to the totalSfntSize in the header.
661
+ """
662
+ header = unpackHeader(data)
663
+ directory = unpackDirectory(data)
664
+ totalSfntSize = header["totalSfntSize"]
665
+ isValid = True
666
+ if totalSfntSize % 4:
667
+ reporter.logError(message="The total sfnt size (%d) is not a multiple of four." % totalSfntSize)
668
+ isValid = False
669
+ else:
670
+ numTables = header["numTables"]
671
+ requiredSize = sfntHeaderSize + (numTables * sfntDirectoryEntrySize)
672
+ for table in directory:
673
+ origLength = table["origLength"]
674
+ if origLength % 4:
675
+ origLength += 4 - (origLength % 4)
676
+ requiredSize += origLength
677
+ if totalSfntSize != requiredSize:
678
+ reporter.logError(message="The total sfnt size (%d) does not match the required sfnt size (%d)." % (totalSfntSize, requiredSize))
679
+ isValid = False
680
+ if isValid:
681
+ reporter.logPass(message="The total sfnt size is valid.")
682
+
683
+ def _testHeaderNumTables(data, reporter):
684
+ """
685
+ Tests:
686
+ - The number of tables must be at least 1.
687
+ - The directory entries for the specified number of tables must be properly formatted.
688
+ """
689
+ header = unpackHeader(data)
690
+ numTables = header["numTables"]
691
+ if numTables < 1:
692
+ reporter.logError(message="Invalid number of tables defined in header structure (%d)." % numTables)
693
+ return
694
+ data = data[headerSize:]
695
+ for index in range(numTables):
696
+ try:
697
+ d, data = structUnpack(directoryFormat, data)
698
+ except:
699
+ reporter.logError(message="The defined number of tables in the header (%d) does not match the actual number of tables (%d)." % (numTables, index))
700
+ return
701
+ reporter.logPass(message="The number of tables defined in the header is valid.")
702
+
703
+ # -------------
704
+ # Tests: Tables
705
+ # -------------
706
+
707
+ def testDataBlocks(data, reporter):
708
+ """
709
+ Test the WOFF data blocks.
710
+ """
711
+ functions = [
712
+ _testBlocksOffsetLengthZero,
713
+ _testBlocksPositioning
714
+ ]
715
+ for function in functions:
716
+ shouldStop = function(data, reporter)
717
+ if shouldStop:
718
+ return True
719
+
720
+ def _testBlocksOffsetLengthZero(data, reporter):
721
+ """
722
+ - The metadata must have the offset and length set to zero consistently.
723
+ - The private data must have the offset and length set to zero consistently.
724
+ """
725
+ header = unpackHeader(data)
726
+ # metadata
727
+ metaOffset = header["metaOffset"]
728
+ metaLength = header["metaLength"]
729
+ if metaOffset == 0 or metaLength == 0:
730
+ if metaOffset == 0 and metaLength == 0:
731
+ reporter.logPass(message="The length and offset are appropriately set for empty metadata.")
732
+ else:
733
+ reporter.logError(message="The metadata offset (%d) and metadata length (%d) are not properly set. If one is 0, they both must be 0." % (metaOffset, metaLength))
734
+ # private data
735
+ privOffset = header["privOffset"]
736
+ privLength = header["privLength"]
737
+ if privOffset == 0 or privLength == 0:
738
+ if privOffset == 0 and privLength == 0:
739
+ reporter.logPass(message="The length and offset are appropriately set for empty private data.")
740
+ else:
741
+ reporter.logError(message="The private data offset (%d) and private data length (%d) are not properly set. If one is 0, they both must be 0." % (privOffset, privLength))
742
+
743
+ def _testBlocksPositioning(data, reporter):
744
+ """
745
+ Tests:
746
+ - The table data must start immediately after the directory.
747
+ - The table data must end at the beginning of the metadata, the beginning of the private data or the end of the file.
748
+ - The metadata must start immediately after the table data.
749
+ - the metadata must end at the beginning of he private data (padded as needed) or the end of the file.
750
+ - The private data must start immediately after the table data or metadata.
751
+ - The private data must end at the edge of the file.
752
+ """
753
+ header = unpackHeader(data)
754
+ # table data start
755
+ directory = unpackDirectory(data)
756
+ if not directory:
757
+ return
758
+ expectedTableDataStart = headerSize + (directorySize * header["numTables"])
759
+ offsets = [entry["offset"] for entry in directory]
760
+ tableDataStart = min(offsets)
761
+ if expectedTableDataStart != tableDataStart:
762
+ reporter.logError(message="The table data does not start (%d) in the required position (%d)." % (tableDataStart, expectedTableDataStart))
763
+ else:
764
+ reporter.logPass(message="The table data begins in the required position.")
765
+ # table data end
766
+ if header["metaOffset"]:
767
+ definedTableDataEnd = header["metaOffset"]
768
+ elif header["privOffset"]:
769
+ definedTableDataEnd = header["privOffset"]
770
+ else:
771
+ definedTableDataEnd = header["length"]
772
+ directory = unpackDirectory(data)
773
+ ends = [table["offset"] + table["compLength"] + calcPaddingLength(table["compLength"]) for table in directory]
774
+ expectedTableDataEnd = max(ends)
775
+ if expectedTableDataEnd != definedTableDataEnd:
776
+ reporter.logError(message="The table data end (%d) is not in the required position (%d)." % (definedTableDataEnd, expectedTableDataEnd))
777
+ else:
778
+ reporter.logPass(message="The table data ends in the required position.")
779
+ # metadata
780
+ if header["metaOffset"]:
781
+ # start
782
+ expectedMetaStart = expectedTableDataEnd
783
+ definedMetaStart = header["metaOffset"]
784
+ if expectedMetaStart != definedMetaStart:
785
+ reporter.logError(message="The metadata does not start (%d) in the required position (%d)." % (definedMetaStart, expectedMetaStart))
786
+ else:
787
+ reporter.logPass(message="The metadata begins in the required position.")
788
+ # end
789
+ if header["privOffset"]:
790
+ definedMetaEnd = header["privOffset"]
791
+ needMetaPadding = True
792
+ else:
793
+ definedMetaEnd = header["length"]
794
+ needMetaPadding = False
795
+ expectedMetaEnd = header["metaOffset"] + header["metaLength"]
796
+ if needMetaPadding:
797
+ expectedMetaEnd += calcPaddingLength(header["metaLength"])
798
+ if expectedMetaEnd != definedMetaEnd:
799
+ reporter.logError(message="The metadata end (%d) is not in the required position (%d)." % (definedMetaEnd, expectedMetaEnd))
800
+ else:
801
+ reporter.logPass(message="The metadata ends in the required position.")
802
+ # private data
803
+ if header["privOffset"]:
804
+ # start
805
+ if header["metaOffset"]:
806
+ expectedPrivateStart = expectedMetaEnd
807
+ else:
808
+ expectedPrivateStart = expectedTableDataEnd
809
+ definedPrivateStart = header["privOffset"]
810
+ if expectedPrivateStart != definedPrivateStart:
811
+ reporter.logError(message="The private data does not start (%d) in the required position (%d)." % (definedPrivateStart, expectedPrivateStart))
812
+ else:
813
+ reporter.logPass(message="The private data begins in the required position.")
814
+ # end
815
+ expectedPrivateEnd = header["length"]
816
+ definedPrivateEnd = header["privOffset"] + header["privLength"]
817
+ if expectedPrivateEnd != definedPrivateEnd:
818
+ reporter.logError(message="The private data end (%d) is not in the required position (%d)." % (definedPrivateEnd, expectedPrivateEnd))
819
+ else:
820
+ reporter.logPass(message="The private data ends in the required position.")
821
+
822
+ # ----------------------
823
+ # Tests: Table Directory
824
+ # ----------------------
825
+
826
+ def testTableDirectory(data, reporter):
827
+ """
828
+ Test the WOFF table directory.
829
+ """
830
+ functions = [
831
+ _testTableDirectoryStructure,
832
+ _testTableDirectory4ByteOffsets,
833
+ _testTableDirectoryPadding,
834
+ _testTableDirectoryPositions,
835
+ _testTableDirectoryCompressedLength,
836
+ _testTableDirectoryDecompressedLength,
837
+ _testTableDirectoryChecksums,
838
+ _testTableDirectoryTableOrder
839
+ ]
840
+ for function in functions:
841
+ shouldStop = function(data, reporter)
842
+ if shouldStop:
843
+ return True
844
+
845
+ directoryFormat = """
846
+ tag: 4s
847
+ offset: L
848
+ compLength: L
849
+ origLength: L
850
+ origChecksum: L
851
+ """
852
+ directorySize = structCalcSize(directoryFormat)
853
+
854
+ def _testTableDirectoryStructure(data, reporter):
855
+ """
856
+ Tests:
857
+ - The entries in the table directory can be unpacked.
858
+ """
859
+ header = unpackHeader(data)
860
+ numTables = header["numTables"]
861
+ data = data[headerSize:]
862
+ try:
863
+ for index in range(numTables):
864
+ table, data = structUnpack(directoryFormat, data)
865
+ reporter.logPass(message="The table directory structure is correct.")
866
+ except:
867
+ reporter.logError(message="The table directory is not properly structured.")
868
+ return True
869
+
870
+ def _testTableDirectory4ByteOffsets(data, reporter):
871
+ """
872
+ Tests:
873
+ - The font tables must each begin on a 4-byte boundary.
874
+ """
875
+ directory = unpackDirectory(data)
876
+ for table in directory:
877
+ tag = table["tag"]
878
+ offset = table["offset"]
879
+ if offset % 4:
880
+ reporter.logError(message="The \"%s\" table does not begin on a 4-byte boundary (%d)." % (tag, offset))
881
+ else:
882
+ reporter.logPass(message="The \"%s\" table begins on a 4-byte boundary." % tag)
883
+
884
+ def _testTableDirectoryPadding(data, reporter):
885
+ """
886
+ Tests:
887
+ - All tables, including the final table, must be padded to a
888
+ four byte boundary using null bytes as needed.
889
+ """
890
+ header = unpackHeader(data)
891
+ directory = unpackDirectory(data)
892
+ # test final table
893
+ endError = False
894
+ sfntEnd = None
895
+ if header["metaOffset"] != 0:
896
+ sfntEnd = header["metaOffset"]
897
+ elif header["privOffset"] != 0:
898
+ sfntEnd = header["privOffset"]
899
+ else:
900
+ sfntEnd = header["length"]
901
+ if sfntEnd % 4:
902
+ reporter.logError(message="The sfnt data does not end with proper padding.")
903
+ else:
904
+ reporter.logPass(message="The sfnt data ends with proper padding.")
905
+ # test the bytes used for padding
906
+ for table in directory:
907
+ tag = table["tag"]
908
+ offset = table["offset"]
909
+ length = table["compLength"]
910
+ paddingLength = calcPaddingLength(length)
911
+ if paddingLength:
912
+ paddingOffset = offset + length
913
+ padding = data[paddingOffset:paddingOffset+paddingLength]
914
+ expectedPadding = "\0" * paddingLength
915
+ if padding != expectedPadding:
916
+ reporter.logError(message="The \"%s\" table is not padded with null bytes." % tag)
917
+ else:
918
+ reporter.logPass(message="The \"%s\" table is padded with null bytes." % tag)
919
+
920
+ def _testTableDirectoryPositions(data, reporter):
921
+ """
922
+ Tests:
923
+ - The table offsets must not be before the end of the header/directory.
924
+ - The table offset + length must not be greater than the edge of the available space.
925
+ - The table offsets must not be after the edge of the available space.
926
+ - Table blocks must not overlap.
927
+ - There must be no gaps between the tables.
928
+ """
929
+ directory = unpackDirectory(data)
930
+ tablesWithProblems = set()
931
+ # test for overlapping tables
932
+ locations = []
933
+ for table in directory:
934
+ offset = table["offset"]
935
+ length = table["compLength"]
936
+ length = length + calcPaddingLength(length)
937
+ locations.append((offset, offset + length, table["tag"]))
938
+ for start, end, tag in locations:
939
+ for otherStart, otherEnd, otherTag in locations:
940
+ if tag == otherTag:
941
+ continue
942
+ if start >= otherStart and start < otherEnd:
943
+ reporter.logError(message="The \"%s\" table overlaps the \"%s\" table." % (tag, otherTag))
944
+ tablesWithProblems.add(tag)
945
+ tablesWithProblems.add(otherTag)
946
+ # test for invalid offset, length and combo
947
+ header = unpackHeader(data)
948
+ if header["metaOffset"] != 0:
949
+ tableDataEnd = header["metaOffset"]
950
+ elif header["privOffset"] != 0:
951
+ tableDataEnd = header["privOffset"]
952
+ else:
953
+ tableDataEnd = header["length"]
954
+ numTables = header["numTables"]
955
+ minOffset = headerSize + (directorySize * numTables)
956
+ maxLength = tableDataEnd - minOffset
957
+ for table in directory:
958
+ tag = table["tag"]
959
+ offset = table["offset"]
960
+ length = table["compLength"]
961
+ # offset is before the beginning of the table data block
962
+ if offset < minOffset:
963
+ tablesWithProblems.add(tag)
964
+ message = "The \"%s\" table directory entry offset (%d) is before the start of the table data block (%d)." % (tag, offset, minOffset)
965
+ reporter.logError(message=message)
966
+ # offset is after the end of the table data block
967
+ elif offset > tableDataEnd:
968
+ tablesWithProblems.add(tag)
969
+ message = "The \"%s\" table directory entry offset (%d) is past the end of the table data block (%d)." % (tag, offset, tableDataEnd)
970
+ reporter.logError(message=message)
971
+ # offset + length is after the end of the table tada block
972
+ elif (offset + length) > tableDataEnd:
973
+ tablesWithProblems.add(tag)
974
+ message = "The \"%s\" table directory entry offset (%d) + length (%d) is past the end of the table data block (%d)." % (tag, offset, length, tableDataEnd)
975
+ reporter.logError(message=message)
976
+ # test for gaps
977
+ tables = []
978
+ for table in directory:
979
+ tag = table["tag"]
980
+ offset = table["offset"]
981
+ length = table["compLength"]
982
+ length += calcPaddingLength(length)
983
+ tables.append((offset, offset + length, tag))
984
+ tables.sort()
985
+ for index, (start, end, tag) in enumerate(tables):
986
+ if index == 0:
987
+ continue
988
+ prevStart, prevEnd, prevTag = tables[index - 1]
989
+ if prevEnd < start:
990
+ tablesWithProblems.add(prevTag)
991
+ tablesWithProblems.add(tag)
992
+ reporter.logError(message="Extraneous data between the \"%s\" and \"%s\" tables." % (prevTag, tag))
993
+ # log passes
994
+ for entry in directory:
995
+ tag = entry["tag"]
996
+ if tag in tablesWithProblems:
997
+ continue
998
+ reporter.logPass(message="The \"%s\" table directory entry has a valid offset and length." % tag)
999
+
1000
+ def _testTableDirectoryCompressedLength(data, reporter):
1001
+ """
1002
+ Tests:
1003
+ - The compressed length must be less than or equal to the original length.
1004
+ """
1005
+ directory = unpackDirectory(data)
1006
+ for table in directory:
1007
+ tag = table["tag"]
1008
+ compLength = table["compLength"]
1009
+ origLength = table["origLength"]
1010
+ if compLength > origLength:
1011
+ reporter.logError(message="The \"%s\" table directory entry has a compressed length (%d) larger than the original length (%d)." % (tag, compLength, origLength))
1012
+ else:
1013
+ reporter.logPass(message="The \"%s\" table directory entry has proper compLength and origLength values." % tag)
1014
+
1015
+ def _testTableDirectoryDecompressedLength(data, reporter):
1016
+ """
1017
+ Tests:
1018
+ - The decompressed length of the data must match the defined original length.
1019
+ """
1020
+ directory = unpackDirectory(data)
1021
+ tableData = unpackTableData(data)
1022
+ for table in directory:
1023
+ tag = table["tag"]
1024
+ offset = table["offset"]
1025
+ compLength = table["compLength"]
1026
+ origLength = table["origLength"]
1027
+ if compLength >= origLength:
1028
+ continue
1029
+ decompressedData = tableData[tag]
1030
+ # couldn't be decompressed. handled elsewhere.
1031
+ if decompressedData is None:
1032
+ continue
1033
+ decompressedLength = len(decompressedData)
1034
+ if origLength != decompressedLength:
1035
+ reporter.logError(message="The \"%s\" table directory entry has an original length (%d) that does not match the actual length of the decompressed data (%d)." % (tag, origLength, decompressedLength))
1036
+ else:
1037
+ reporter.logPass(message="The \"%s\" table directory entry has a proper original length compared to the actual decompressed data." % tag)
1038
+
1039
+ def _testTableDirectoryChecksums(data, reporter):
1040
+ """
1041
+ Tests:
1042
+ - The checksums for the tables must match the checksums in the directory.
1043
+ - The head checksum adjustment must be correct.
1044
+ """
1045
+ # check the table directory checksums
1046
+ directory = unpackDirectory(data)
1047
+ tables = unpackTableData(data)
1048
+ for entry in directory:
1049
+ tag = entry["tag"]
1050
+ origChecksum = entry["origChecksum"]
1051
+ decompressedData = tables[tag]
1052
+ # couldn't be decompressed.
1053
+ if decompressedData is None:
1054
+ continue
1055
+ newChecksum = calcChecksum(tag, decompressedData)
1056
+ if newChecksum != origChecksum:
1057
+ reporter.logError(message="The \"%s\" table directory entry original checksum (%s) does not match the checksum (%s) calculated from the data." % (tag, hex(origChecksum), hex(newChecksum)))
1058
+ else:
1059
+ reporter.logPass(message="The \"%s\" table directory entry original checksum is correct." % tag)
1060
+ # check the head checksum adjustment
1061
+ if "head" not in tables:
1062
+ reporter.logWarning(message="The font does not contain a \"head\" table.")
1063
+ else:
1064
+ newChecksum = calcHeadChecksum(data)
1065
+ data = tables["head"]
1066
+ try:
1067
+ checksum = struct.unpack(">L", data[8:12])[0]
1068
+ if checksum != newChecksum:
1069
+ reporter.logError(message="The \"head\" table checkSumAdjustment (%s) does not match the calculated checkSumAdjustment (%s)." % (hex(checksum), hex(newChecksum)))
1070
+ else:
1071
+ reporter.logPass(message="The \"head\" table checkSumAdjustment is valid.")
1072
+ except:
1073
+ reporter.logError(message="The \"head\" table is not properly structured.")
1074
+
1075
+
1076
+ def _testTableDirectoryTableOrder(data, reporter):
1077
+ """
1078
+ Tests:
1079
+ - The directory entries must be stored in ascending order based on their tag.
1080
+ """
1081
+ storedOrder = [table["tag"] for table in unpackDirectory(data)]
1082
+ if storedOrder != sorted(storedOrder):
1083
+ reporter.logError(message="The table directory entries are not stored in alphabetical order.")
1084
+ else:
1085
+ reporter.logPass(message="The table directory entries are stored in the proper order.")
1086
+
1087
+ # -----------------
1088
+ # Tests: Table Data
1089
+ # -----------------
1090
+
1091
+ def testTableData(data, reporter):
1092
+ """
1093
+ Test the table data.
1094
+ """
1095
+ functions = [
1096
+ _testTableDataDecompression
1097
+ ]
1098
+ for function in functions:
1099
+ shouldStop = function(data, reporter)
1100
+ if shouldStop:
1101
+ return True
1102
+ return False
1103
+
1104
+ def _testTableDataDecompression(data, reporter):
1105
+ """
1106
+ Tests:
1107
+ - The table data, when the defined compressed length is less
1108
+ than the original length, must be properly compressed.
1109
+ """
1110
+ for table in unpackDirectory(data):
1111
+ tag = table["tag"]
1112
+ offset = table["offset"]
1113
+ compLength = table["compLength"]
1114
+ origLength = table["origLength"]
1115
+ if origLength <= compLength:
1116
+ continue
1117
+ entryData = data[offset:offset+compLength]
1118
+ try:
1119
+ decompressed = zlib.decompress(entryData)
1120
+ reporter.logPass(message="The \"%s\" table data can be decompressed with zlib." % tag)
1121
+ except zlib.error:
1122
+ reporter.logError(message="The \"%s\" table data can not be decompressed with zlib." % tag)
1123
+
1124
+ # ----------------
1125
+ # Tests: Metadata
1126
+ # ----------------
1127
+
1128
+ def testMetadata(data, reporter):
1129
+ """
1130
+ Test the WOFF metadata.
1131
+ """
1132
+ if _shouldSkipMetadataTest(data, reporter):
1133
+ return False
1134
+ functions = [
1135
+ _testMetadataPadding,
1136
+ _testMetadataDecompression,
1137
+ _testMetadataDecompressedLength,
1138
+ _testMetadataParse,
1139
+ _testMetadataEncoding,
1140
+ _testMetadataStructure
1141
+ ]
1142
+ for function in functions:
1143
+ shouldStop = function(data, reporter)
1144
+ if shouldStop:
1145
+ return True
1146
+ return False
1147
+
1148
+ def _shouldSkipMetadataTest(data, reporter):
1149
+ """
1150
+ This is used at the start of metadata test functions.
1151
+ It writes a note and returns True if not metadata exists.
1152
+ """
1153
+ header = unpackHeader(data)
1154
+ metaOffset = header["metaOffset"]
1155
+ metaLength = header["metaLength"]
1156
+ if metaOffset == 0 or metaLength == 0:
1157
+ reporter.logNote(message="No metadata to test.")
1158
+ return True
1159
+
1160
+ def _testMetadataPadding(data, reporter):
1161
+ """
1162
+ - The padding must be null.
1163
+ """
1164
+ header = unpackHeader(data)
1165
+ if not header["metaOffset"] or not header["privOffset"]:
1166
+ return
1167
+ paddingLength = calcPaddingLength(header["metaLength"])
1168
+ if not paddingLength:
1169
+ return
1170
+ paddingOffset = header["metaOffset"] + header["metaLength"]
1171
+ padding = data[paddingOffset:paddingOffset + paddingLength]
1172
+ expectedPadding = "\0" * paddingLength
1173
+ if padding != expectedPadding:
1174
+ reporter.logError(message="The metadata is not padded with null bytes.")
1175
+ else:
1176
+ reporter.logPass(message="The metadata is padded with null bytes,")
1177
+
1178
+ # does this need to be tested?
1179
+ #
1180
+ # def testMetadataIsCompressed(data, reporter):
1181
+ # """
1182
+ # Tests:
1183
+ # - The metadata must be compressed.
1184
+ # """
1185
+ # if _shouldSkipMetadataTest(data, reporter):
1186
+ # return
1187
+ # header = unpackHeader(data)
1188
+ # length = header["metaLength"]
1189
+ # origLength = header["metaOrigLength"]
1190
+ # if length >= origLength:
1191
+ # reporter.logError(message="The compressed metdata length (%d) is higher than or equal to the original, uncompressed length (%d)." % (length, origLength))
1192
+ # return True
1193
+ # reporter.logPass(message="The compressed metdata length is smaller than the original, uncompressed length.")
1194
+
1195
+ def _testMetadataDecompression(data, reporter):
1196
+ """
1197
+ Tests:
1198
+ - Metadata must be compressed with zlib.
1199
+ """
1200
+ if _shouldSkipMetadataTest(data, reporter):
1201
+ return
1202
+ compData = unpackMetadata(data, decompress=False, parse=False)
1203
+ try:
1204
+ zlib.decompress(compData)
1205
+ except zlib.error:
1206
+ reporter.logError(message="The metadata can not be decompressed with zlib.")
1207
+ return True
1208
+ reporter.logPass(message="The metadata can be decompressed with zlib.")
1209
+
1210
+ def _testMetadataDecompressedLength(data, reporter):
1211
+ """
1212
+ Tests:
1213
+ - The length of the decompressed metadata must match the defined original length.
1214
+ """
1215
+ if _shouldSkipMetadataTest(data, reporter):
1216
+ return
1217
+ header = unpackHeader(data)
1218
+ metadata = unpackMetadata(data, parse=False)
1219
+ metaOrigLength = header["metaOrigLength"]
1220
+ decompressedLength = len(metadata)
1221
+ if metaOrigLength != decompressedLength:
1222
+ reporter.logError(message="The decompressed metadata length (%d) does not match the original metadata length (%d) in the header." % (decompressedLength, metaOrigLength))
1223
+ else:
1224
+ reporter.logPass(message="The decompressed metadata length matches the original metadata length in the header.")
1225
+
1226
+ def _testMetadataParse(data, reporter):
1227
+ """
1228
+ Tests:
1229
+ - The metadata must be well-formed.
1230
+ """
1231
+ if _shouldSkipMetadataTest(data, reporter):
1232
+ return
1233
+ metadata = unpackMetadata(data, parse=False)
1234
+ try:
1235
+ tree = ElementTree.fromstring(metadata)
1236
+ except (ExpatError, LookupError):
1237
+ reporter.logError(message="The metadata can not be parsed.")
1238
+ return True
1239
+ reporter.logPass(message="The metadata can be parsed.")
1240
+
1241
+ def _testMetadataEncoding(data, reporter):
1242
+ """
1243
+ Tests:
1244
+ - The metadata must be UTF-8 encoded.
1245
+ """
1246
+ if _shouldSkipMetadataTest(data, reporter):
1247
+ return
1248
+ metadata = unpackMetadata(data, parse=False)
1249
+ errorMessage = "The metadata encoding is not valid."
1250
+ encoding = None
1251
+ # check the BOM
1252
+ if not metadata.startswith("<"):
1253
+ if not metadata.startswith(codecs.BOM_UTF8):
1254
+ reporter.logError(message=errorMessage)
1255
+ return
1256
+ else:
1257
+ encoding = "UTF-8"
1258
+ # sniff the encoding
1259
+ else:
1260
+ # quick test to ensure that the regular expression will work.
1261
+ # the string must start with <?xml. this will catch
1262
+ # other encodings such as: <\x00?\x00x\x00m\x00l
1263
+ if not metadata.startswith("<?xml"):
1264
+ reporter.logError(message=errorMessage)
1265
+ return
1266
+ # go to the first occurance of >
1267
+ line = metadata.split(">", 1)[0]
1268
+ # find an encoding string
1269
+ pattern = re.compile(
1270
+ "\s+"
1271
+ "encoding"
1272
+ "\s*"
1273
+ "="
1274
+ "\s*"
1275
+ "[\"']+"
1276
+ "([^\"']+)"
1277
+ )
1278
+ m = pattern.search(line)
1279
+ if m:
1280
+ encoding = m.group(1)
1281
+ else:
1282
+ encoding = "UTF-8"
1283
+ # report
1284
+ if encoding != "UTF-8":
1285
+ reporter.logError(message=errorMessage)
1286
+ else:
1287
+ reporter.logPass(message="The metadata is properly encoded.")
1288
+
1289
+ def _testMetadataStructure(data, reporter):
1290
+ """
1291
+ Test the metadata structure.
1292
+ """
1293
+ if _shouldSkipMetadataTest(data, reporter):
1294
+ return
1295
+ tree = unpackMetadata(data)
1296
+ # make sure the top element is metadata
1297
+ if tree.tag != "metadata":
1298
+ reporter.logError("The top element is not \"metadata\".")
1299
+ return
1300
+ # sniff the version
1301
+ version = tree.attrib.get("version")
1302
+ if not version:
1303
+ reporter.logError("The \"version\" attribute is not defined.")
1304
+ return
1305
+ # grab the appropriate specification
1306
+ versionSpecs = {
1307
+ "1.0" : metadataSpec_1_0
1308
+ }
1309
+ spec = versionSpecs.get(version)
1310
+ if spec is None:
1311
+ reporter.logError("Unknown version (\"%s\")." % version)
1312
+ return
1313
+ haveError = _validateMetadataElement(tree, spec, reporter)
1314
+ if not haveError:
1315
+ reporter.logPass("The \"metadata\" element is properly formatted.")
1316
+
1317
+ def _validateMetadataElement(element, spec, reporter, parentTree=[]):
1318
+ haveError = False
1319
+ # unknown attributes
1320
+ knownAttributes = []
1321
+ for attrib in spec["requiredAttributes"].keys() + spec["recommendedAttributes"].keys() + spec["optionalAttributes"].keys():
1322
+ attrib = _parseAttribute(attrib)
1323
+ knownAttributes.append(attrib)
1324
+ for attrib in sorted(element.attrib.keys()):
1325
+ # the search is a bit complicated because there are
1326
+ # attributes that have more than one name.
1327
+ found = False
1328
+ for knownAttrib in knownAttributes:
1329
+ if knownAttrib == attrib:
1330
+ found = True
1331
+ break
1332
+ elif isinstance(knownAttrib, list) and attrib in knownAttrib:
1333
+ found = True
1334
+ break
1335
+ if not found:
1336
+ _logMetadataResult(
1337
+ reporter,
1338
+ "error",
1339
+ "Unknown attribute (\"%s\")" % attrib,
1340
+ element.tag,
1341
+ parentTree
1342
+ )
1343
+ haveError = True
1344
+ # attributes
1345
+ s = [
1346
+ ("requiredAttributes", "required"),
1347
+ ("recommendedAttributes", "recommended"),
1348
+ ("optionalAttributes", "optional")
1349
+ ]
1350
+ for key, requirementLevel in s:
1351
+ if spec[key]:
1352
+ e = _validateAttributes(element, spec[key], reporter, parentTree, requirementLevel)
1353
+ if e:
1354
+ haveError = True
1355
+ # unknown child-elements
1356
+ knownChildElements = spec["requiredChildElements"].keys() + spec["recommendedChildElements"].keys() + spec["optionalChildElements"].keys()
1357
+ for childElement in element:
1358
+ if childElement.tag not in knownChildElements:
1359
+ _logMetadataResult(
1360
+ reporter,
1361
+ "error",
1362
+ "Unknown child-element (\"%s\")" % childElement.tag,
1363
+ element.tag,
1364
+ parentTree
1365
+ )
1366
+ haveError = True
1367
+ # child elements
1368
+ s = [
1369
+ ("requiredChildElements", "required"),
1370
+ ("recommendedChildElements", "recommended"),
1371
+ ("optionalChildElements", "optional")
1372
+ ]
1373
+ for key, requirementLevel in s:
1374
+ if spec[key]:
1375
+ for childElementTag, childElementData in sorted(spec[key].items()):
1376
+ e = _validateChildElements(element, childElementTag, childElementData, reporter, parentTree, requirementLevel)
1377
+ if e:
1378
+ haveError = True
1379
+ # content
1380
+ content = element.text
1381
+ if content is not None:
1382
+ content = content.strip()
1383
+ if content and spec["content"] == "not allowed":
1384
+ _logMetadataResult(
1385
+ reporter,
1386
+ "error",
1387
+ "Content defined",
1388
+ element.tag,
1389
+ parentTree
1390
+ )
1391
+ haveError = True
1392
+ elif not content and content and spec["content"] == "required":
1393
+ _logMetadataResult(
1394
+ reporter,
1395
+ "error",
1396
+ "Content not defined",
1397
+ element.tag,
1398
+ parentTree
1399
+ )
1400
+ elif not content and spec["content"] == "recommended":
1401
+ _logMetadataResult(
1402
+ reporter,
1403
+ "warn",
1404
+ "Content not defined",
1405
+ element.tag,
1406
+ parentTree
1407
+ )
1408
+ # log the result
1409
+ if not haveError and parentTree == ["metadata"]:
1410
+ reporter.logPass("The \"%s\" element is properly formatted." % element.tag)
1411
+ # done
1412
+ return haveError
1413
+
1414
+ def _parseAttribute(attrib):
1415
+ if " " in attrib:
1416
+ final = []
1417
+ for a in attrib.split(" "):
1418
+ if a.startswith("xml:"):
1419
+ a = "{http://www.w3.org/XML/1998/namespace}" + a[4:]
1420
+ final.append(a)
1421
+ return final
1422
+ return attrib
1423
+
1424
+ def _unEtreeAttribute(attrib):
1425
+ ns = "{http://www.w3.org/XML/1998/namespace}"
1426
+ if attrib.startswith(ns):
1427
+ attrib = "xml:" + attrib[len(ns):]
1428
+ return attrib
1429
+
1430
+ def _validateAttributes(element, spec, reporter, parentTree, requirementLevel):
1431
+ haveError = False
1432
+ for attrib, valueOptions in sorted(spec.items()):
1433
+ attribs = _parseAttribute(attrib)
1434
+ if isinstance(attribs, basestring):
1435
+ attribs = [attribs]
1436
+ found = []
1437
+ for attrib in attribs:
1438
+ if attrib in element.attrib:
1439
+ found.append(attrib)
1440
+ # make strings for reporting
1441
+ if len(attribs) > 1:
1442
+ attribString = ", ".join(["\"%s\"" % _unEtreeAttribute(i) for i in attribs])
1443
+ else:
1444
+ attribString = "\"%s\"" % attribs[0]
1445
+ if len(found) == 0:
1446
+ pass
1447
+ elif len(found) > 1:
1448
+ foundString = ", ".join(["\"%s\"" % _unEtreeAttribute(i) for i in found])
1449
+ else:
1450
+ foundString = "\"%s\"" % found[0]
1451
+ # more than one of the mutually exclusive attributes found
1452
+ if len(found) > 1:
1453
+ _logMetadataResult(
1454
+ reporter,
1455
+ "error",
1456
+ "More than one mutually exclusive attribute (%s) defined" % foundString,
1457
+ element.tag,
1458
+ parentTree
1459
+ )
1460
+ haveError = True
1461
+ # missing
1462
+ elif len(found) == 0:
1463
+ if requirementLevel == "optional":
1464
+ continue
1465
+ elif requirementLevel == "required":
1466
+ errorLevel = "error"
1467
+ else:
1468
+ errorLevel = "warn"
1469
+ _logMetadataResult(
1470
+ reporter,
1471
+ errorLevel,
1472
+ "%s \"%s\" attribute not defined" % (requirementLevel.title(), attrib),
1473
+ element.tag,
1474
+ parentTree
1475
+ )
1476
+ if requirementLevel == "required":
1477
+ haveError = True
1478
+ # incorrect value
1479
+ else:
1480
+ e = _validateAttributeValue(element, found[0], valueOptions, reporter, parentTree)
1481
+ if e:
1482
+ haveError = True
1483
+ # done
1484
+ return haveError
1485
+
1486
+ def _validateAttributeValue(element, attrib, valueOptions, reporter, parentTree):
1487
+ haveError = False
1488
+ value = element.attrib[attrib]
1489
+ if isinstance(valueOptions, basestring):
1490
+ valueOptions = [valueOptions]
1491
+ # no defined value options
1492
+ if valueOptions is None:
1493
+ # the string is empty
1494
+ if not value:
1495
+ _logMetadataResult(
1496
+ reporter,
1497
+ "warn",
1498
+ "Value for the \"%s\" attribute is an empty string" % attrib,
1499
+ element.tag,
1500
+ parentTree
1501
+ )
1502
+ # illegal value
1503
+ elif value not in valueOptions:
1504
+ _logMetadataResult(
1505
+ reporter,
1506
+ "error",
1507
+ "Invalid value (\"%s\") for the \"%s\" attribute" % (value, attrib),
1508
+ element.tag,
1509
+ parentTree
1510
+ )
1511
+ haveError = True
1512
+ # return the error state
1513
+ return haveError
1514
+
1515
+ def _validateChildElements(element, childElementTag, childElementData, reporter, parentTree, requirementLevel):
1516
+ haveError = False
1517
+ # get the valid counts
1518
+ minimumOccurrences = childElementData.get("minimumOccurrences", 0)
1519
+ maximumOccurrences = childElementData.get("maximumOccurrences", None)
1520
+ # find the appropriate elements
1521
+ found = element.findall(childElementTag)
1522
+ # not defined enough times
1523
+ if minimumOccurrences == 1 and len(found) == 0:
1524
+ _logMetadataResult(
1525
+ reporter,
1526
+ "error",
1527
+ "%s \"%s\" child-element not defined" % (requirementLevel.title(), childElementTag),
1528
+ element.tag,
1529
+ parentTree
1530
+ )
1531
+ haveError = True
1532
+ elif len(found) < minimumOccurrences:
1533
+ _logMetadataResult(
1534
+ reporter,
1535
+ "error",
1536
+ "%s \"%s\" child-element is defined %d times instead of the minimum %d times" % (requirementLevel.title(), childElementTag, len(found), minimumOccurrences),
1537
+ element.tag,
1538
+ parentTree
1539
+ )
1540
+ haveError = True
1541
+ # not defined, but not recommended
1542
+ elif len(found) == 0 and requirementLevel == "recommended":
1543
+ _logMetadataResult(
1544
+ reporter,
1545
+ "warn",
1546
+ "%s \"%s\" child-element is not defined" % (requirementLevel.title(), childElementTag),
1547
+ element.tag,
1548
+ parentTree
1549
+ )
1550
+ # defined too many times
1551
+ if maximumOccurrences is not None:
1552
+ if maximumOccurrences == 1 and len(found) > 1:
1553
+ _logMetadataResult(
1554
+ reporter,
1555
+ "error",
1556
+ "%s \"%s\" child-element defined more than once" % (requirementLevel.title(), childElementTag),
1557
+ element.tag,
1558
+ parentTree
1559
+ )
1560
+ haveError = True
1561
+ elif len(found) > maximumOccurrences:
1562
+ _logMetadataResult(
1563
+ reporter,
1564
+ "error",
1565
+ "%s \"%s\" child-element defined %d times instead of the maximum %d times" % (requirementLevel.title(), childElementTag, len(found), minimumOccurrences),
1566
+ element.tag,
1567
+ parentTree
1568
+ )
1569
+ haveError = True
1570
+ # validate the found elements
1571
+ if not haveError:
1572
+ for childElement in found:
1573
+ # handle recursive child-elements
1574
+ childElementSpec = childElementData["spec"]
1575
+ if childElementSpec == "recursive divSpec_1_0":
1576
+ childElementSpec = divSpec_1_0
1577
+ elif childElementSpec == "recursive spanSpec_1_0":
1578
+ childElementSpec = spanSpec_1_0
1579
+ # dive
1580
+ e = _validateMetadataElement(childElement, childElementSpec, reporter, parentTree + [element.tag])
1581
+ if e:
1582
+ haveError = True
1583
+ # return the error state
1584
+ return haveError
1585
+
1586
+ # logging support
1587
+
1588
+ def _logMetadataResult(reporter, result, message, elementTag, parentTree):
1589
+ message = _formatMetadataResultMessage(message, elementTag, parentTree)
1590
+ methods = {
1591
+ "error" : reporter.logError,
1592
+ "warn" : reporter.logWarning,
1593
+ "note" : reporter.logNote,
1594
+ "pass" : reporter.logPass
1595
+ }
1596
+ methods[result](message)
1597
+
1598
+ def _formatMetadataResultMessage(message, elementTag, parentTree):
1599
+ parentTree = parentTree + [elementTag]
1600
+ if parentTree[0] == "metadata":
1601
+ parentTree = parentTree[1:]
1602
+ if parentTree:
1603
+ parentTree = ["\"%s\"" % t for t in reversed(parentTree) if t is not None]
1604
+ message += " in " + " in ".join(parentTree)
1605
+ message += "."
1606
+ return message
1607
+
1608
+ # -------------------------
1609
+ # Support: Misc. SFNT Stuff
1610
+ # -------------------------
1611
+
1612
+ # Some of this was adapted from fontTools.ttLib.sfnt
1613
+
1614
+ sfntHeaderFormat = """
1615
+ sfntVersion: 4s
1616
+ numTables: H
1617
+ searchRange: H
1618
+ entrySelector: H
1619
+ rangeShift: H
1620
+ """
1621
+ sfntHeaderSize = structCalcSize(sfntHeaderFormat)
1622
+
1623
+ sfntDirectoryEntryFormat = """
1624
+ tag: 4s
1625
+ checkSum: L
1626
+ offset: L
1627
+ length: L
1628
+ """
1629
+ sfntDirectoryEntrySize = structCalcSize(sfntDirectoryEntryFormat)
1630
+
1631
+ def maxPowerOfTwo(value):
1632
+ exponent = 0
1633
+ while value:
1634
+ value = value >> 1
1635
+ exponent += 1
1636
+ return max(exponent - 1, 0)
1637
+
1638
+ def getSearchRange(numTables):
1639
+ exponent = maxPowerOfTwo(numTables)
1640
+ searchRange = (2 ** exponent) * 16
1641
+ entrySelector = exponent
1642
+ rangeShift = numTables * 16 - searchRange
1643
+ return searchRange, entrySelector, rangeShift
1644
+
1645
+ def calcPaddingLength(length):
1646
+ if not length % 4:
1647
+ return 0
1648
+ return 4 - (length % 4)
1649
+
1650
+ def padData(data):
1651
+ data += "\0" * calcPaddingLength(len(data))
1652
+ return data
1653
+
1654
+ def sumDataULongs(data):
1655
+ longs = struct.unpack(">%dL" % (len(data) / 4), data)
1656
+ value = sum(longs) % (2 ** 32)
1657
+ return value
1658
+
1659
+ def calcChecksum(tag, data):
1660
+ if tag == "head":
1661
+ data = data[:8] + "\0\0\0\0" + data[12:]
1662
+ data = padData(data)
1663
+ value = sumDataULongs(data)
1664
+ return value
1665
+
1666
+ def calcHeadChecksum(data):
1667
+ header = unpackHeader(data)
1668
+ directory = unpackDirectory(data)
1669
+ numTables = header["numTables"]
1670
+ # build the sfnt directory
1671
+ searchRange, entrySelector, rangeShift = getSearchRange(numTables)
1672
+ sfntHeaderData = dict(
1673
+ sfntVersion=header["flavor"],
1674
+ numTables=numTables,
1675
+ searchRange=searchRange,
1676
+ entrySelector=entrySelector,
1677
+ rangeShift=rangeShift
1678
+ )
1679
+ sfntData = structPack(sfntHeaderFormat, sfntHeaderData)
1680
+ sfntEntries = {}
1681
+ offset = sfntHeaderSize + (sfntDirectoryEntrySize * numTables)
1682
+ directory = [(entry["offset"], entry) for entry in directory]
1683
+ for o, entry in sorted(directory):
1684
+ checksum = entry["origChecksum"]
1685
+ tag = entry["tag"]
1686
+ length = entry["origLength"]
1687
+ sfntEntries[tag] = dict(
1688
+ tag=tag,
1689
+ checkSum=checksum,
1690
+ offset=offset,
1691
+ length=length
1692
+ )
1693
+ offset += length + calcPaddingLength(length)
1694
+ for tag, sfntEntry in sorted(sfntEntries.items()):
1695
+ sfntData += structPack(sfntDirectoryEntryFormat, sfntEntry)
1696
+ # calculate
1697
+ checkSums = [entry["checkSum"] for entry in sfntEntries.values()]
1698
+ checkSums.append(sumDataULongs(sfntData))
1699
+ checkSum = sum(checkSums)
1700
+ checkSum = (0xB1B0AFBA - checkSum) & 0xffffffff
1701
+ return checkSum
1702
+
1703
+ # ------------------
1704
+ # Support XML Writer
1705
+ # ------------------
1706
+
1707
+ class XMLWriter(object):
1708
+
1709
+ def __init__(self):
1710
+ self._root = None
1711
+ self._elements = []
1712
+
1713
+ def simpletag(self, tag, **kwargs):
1714
+ ElementTree.SubElement(self._elements[-1], tag, **kwargs)
1715
+
1716
+ def begintag(self, tag, **kwargs):
1717
+ if self._elements:
1718
+ s = ElementTree.SubElement(self._elements[-1], tag, **kwargs)
1719
+ else:
1720
+ s = ElementTree.Element(tag, **kwargs)
1721
+ if self._root is None:
1722
+ self._root = s
1723
+ self._elements.append(s)
1724
+
1725
+ def endtag(self, tag):
1726
+ assert self._elements[-1].tag == tag
1727
+ del self._elements[-1]
1728
+
1729
+ def write(self, text):
1730
+ if self._elements[-1].text is None:
1731
+ self._elements[-1].text = text
1732
+ else:
1733
+ self._elements[-1].text += text
1734
+
1735
+ def compile(self, encoding="utf-8"):
1736
+ f = StringIO()
1737
+ tree = ElementTree.ElementTree(self._root)
1738
+ indent(tree.getroot())
1739
+ tree.write(f, encoding=encoding)
1740
+ text = f.getvalue()
1741
+ del f
1742
+ return text
1743
+
1744
+ def indent(elem, level=0):
1745
+ # this is from http://effbot.python-hosting.com/file/effbotlib/ElementTree.py
1746
+ i = "\n" + level * "\t"
1747
+ if len(elem):
1748
+ if not elem.text or not elem.text.strip():
1749
+ elem.text = i + "\t"
1750
+ for e in elem:
1751
+ indent(e, level + 1)
1752
+ if not e.tail or not e.tail.strip():
1753
+ e.tail = i
1754
+ if level and (not elem.tail or not elem.tail.strip()):
1755
+ elem.tail = i
1756
+
1757
+ # ---------------------------------
1758
+ # Support: Reporters and HTML Stuff
1759
+ # ---------------------------------
1760
+
1761
+ class TestResultGroup(list):
1762
+
1763
+ def __init__(self, title):
1764
+ super(TestResultGroup, self).__init__()
1765
+ self.title = title
1766
+
1767
+ def _haveType(self, tp):
1768
+ for data in self:
1769
+ if data["type"] == tp:
1770
+ return True
1771
+ return False
1772
+
1773
+ def haveNote(self):
1774
+ return self._haveType("NOTE")
1775
+
1776
+ def haveWarning(self):
1777
+ return self._haveType("WARNING")
1778
+
1779
+ def haveError(self):
1780
+ return self._haveType("ERROR")
1781
+
1782
+ def havePass(self):
1783
+ return self._haveType("PASS")
1784
+
1785
+ def haveTraceback(self):
1786
+ return self._haveType("TRACEBACK")
1787
+
1788
+
1789
+ class BaseReporter(object):
1790
+
1791
+ """
1792
+ Base reporter. This establishes the required API for reporters.
1793
+ """
1794
+
1795
+ def __init__(self):
1796
+ self.title = ""
1797
+ self.fileInfo = []
1798
+ self.testResults = []
1799
+ self.haveReadError = False
1800
+
1801
+ def logTitle(self, title):
1802
+ self.title = title
1803
+
1804
+ def logFileInfo(self, title, value):
1805
+ self.fileInfo.append((title, value))
1806
+
1807
+ def logTableInfo(self, tag=None, offset=None, compLength=None, origLength=None, origChecksum=None):
1808
+ self.tableInfo.append((tag, offset, compLength, origLength, origChecksum))
1809
+
1810
+ def logTestTitle(self, title):
1811
+ self.testResults.append(TestResultGroup(title))
1812
+
1813
+ def logNote(self, message, information=""):
1814
+ d = dict(type="NOTE", message=message, information=information)
1815
+ self.testResults[-1].append(d)
1816
+
1817
+ def logWarning(self, message, information=""):
1818
+ d = dict(type="WARNING", message=message, information=information)
1819
+ self.testResults[-1].append(d)
1820
+
1821
+ def logError(self, message, information=""):
1822
+ d = dict(type="ERROR", message=message, information=information)
1823
+ self.testResults[-1].append(d)
1824
+
1825
+ def logPass(self, message, information=""):
1826
+ d = dict(type="PASS", message=message, information=information)
1827
+ self.testResults[-1].append(d)
1828
+
1829
+ def logTraceback(self, text):
1830
+ d = dict(type="TRACEBACK", message=text, information="")
1831
+ self.testResults[-1].append(d)
1832
+
1833
+ def getReport(self, *args, **kwargs):
1834
+ raise NotImplementedError
1835
+
1836
+ def numErrors(self):
1837
+ numErrors = 0
1838
+ for group in self.testResults:
1839
+ for result in group:
1840
+ if result["type"] == "ERROR":
1841
+ numErrors = numErrors + 1
1842
+ return numErrors
1843
+
1844
+ class TextReporter(BaseReporter):
1845
+
1846
+ """
1847
+ Plain text reporter.
1848
+ """
1849
+
1850
+ def getReport(self, reportNote=True, reportWarning=True, reportError=True, reportPass=True):
1851
+ report = []
1852
+ for group in self.testResults:
1853
+ for result in group:
1854
+ typ = result["type"]
1855
+ if typ == "NOTE" and not reportNote:
1856
+ continue
1857
+ elif typ == "WARNING" and not reportWarning:
1858
+ continue
1859
+ elif typ == "ERROR" and not reportError:
1860
+ continue
1861
+ elif typ == "PASS" and not reportPass:
1862
+ continue
1863
+ t = "%s - %s: %s" % (result["type"], group.title, result["message"])
1864
+ report.append(t)
1865
+ return "\n".join(report)
1866
+
1867
+
1868
+ class HTMLReporter(BaseReporter):
1869
+
1870
+ def getReport(self):
1871
+ writer = startHTML(title=self.title)
1872
+ # write the file info
1873
+ self._writeFileInfo(writer)
1874
+ # write major error alert
1875
+ if self.haveReadError:
1876
+ self._writeMajorError(writer)
1877
+ # write the test overview
1878
+ self._writeTestResultsOverview(writer)
1879
+ # write the test groups
1880
+ self._writeTestResults(writer)
1881
+ # close the html
1882
+ text = finishHTML(writer)
1883
+ # done
1884
+ return text
1885
+
1886
+ def _writeFileInfo(self, writer):
1887
+ # write the font info
1888
+ writer.begintag("div", c_l_a_s_s="infoBlock")
1889
+ ## title
1890
+ writer.begintag("h3", c_l_a_s_s="infoBlockTitle")
1891
+ writer.write("File Information")
1892
+ writer.endtag("h3")
1893
+ ## table
1894
+ writer.begintag("table", c_l_a_s_s="report")
1895
+ ## items
1896
+ for title, value in self.fileInfo:
1897
+ # row
1898
+ writer.begintag("tr")
1899
+ # title
1900
+ writer.begintag("td", c_l_a_s_s="title")
1901
+ writer.write(title)
1902
+ writer.endtag("td")
1903
+ # message
1904
+ writer.begintag("td")
1905
+ writer.write(value)
1906
+ writer.endtag("td")
1907
+ # close row
1908
+ writer.endtag("tr")
1909
+ writer.endtag("table")
1910
+ ## close the container
1911
+ writer.endtag("div")
1912
+
1913
+ def _writeMajorError(self, writer):
1914
+ writer.begintag("h2", c_l_a_s_s="readError")
1915
+ writer.write("The file contains major structural errors!")
1916
+ writer.endtag("h2")
1917
+
1918
+ def _writeTestResultsOverview(self, writer):
1919
+ ## tabulate
1920
+ notes = 0
1921
+ passes = 0
1922
+ errors = 0
1923
+ warnings = 0
1924
+ for group in self.testResults:
1925
+ for data in group:
1926
+ tp = data["type"]
1927
+ if tp == "NOTE":
1928
+ notes += 1
1929
+ elif tp == "PASS":
1930
+ passes += 1
1931
+ elif tp == "ERROR":
1932
+ errors += 1
1933
+ else:
1934
+ warnings += 1
1935
+ total = sum((notes, passes, errors, warnings))
1936
+ ## container
1937
+ writer.begintag("div", c_l_a_s_s="infoBlock")
1938
+ ## header
1939
+ writer.begintag("h3", c_l_a_s_s="infoBlockTitle")
1940
+ writer.write("Results for %d Tests" % total)
1941
+ writer.endtag("h3")
1942
+ ## results
1943
+ results = [
1944
+ ("PASS", passes),
1945
+ ("WARNING", warnings),
1946
+ ("ERROR", errors),
1947
+ ("NOTE", notes),
1948
+ ]
1949
+ writer.begintag("table", c_l_a_s_s="report")
1950
+ for tp, value in results:
1951
+ # title
1952
+ writer.begintag("tr", c_l_a_s_s="testReport%s" % tp.title())
1953
+ writer.begintag("td", c_l_a_s_s="title")
1954
+ writer.write(tp)
1955
+ writer.endtag("td")
1956
+ # count
1957
+ writer.begintag("td", c_l_a_s_s="testReportResultCount")
1958
+ writer.write(str(value))
1959
+ writer.endtag("td")
1960
+ # empty
1961
+ writer.begintag("td")
1962
+ writer.endtag("td")
1963
+ # toggle button
1964
+ buttonID = "testResult%sToggleButton" % tp
1965
+ writer.begintag("td",
1966
+ id=buttonID, c_l_a_s_s="toggleButton",
1967
+ onclick="testResultToggleButtonHit(a_p_o_s_t_r_o_p_h_e%sa_p_o_s_t_r_o_p_h_e, a_p_o_s_t_r_o_p_h_e%sa_p_o_s_t_r_o_p_h_e);" % (buttonID, "test%s" % tp.title()))
1968
+ writer.write("Hide")
1969
+ writer.endtag("td")
1970
+ # close the row
1971
+ writer.endtag("tr")
1972
+ writer.endtag("table")
1973
+ ## close the container
1974
+ writer.endtag("div")
1975
+
1976
+ def _writeTestResults(self, writer):
1977
+ for infoBlock in self.testResults:
1978
+ # container
1979
+ writer.begintag("div", c_l_a_s_s="infoBlock")
1980
+ # header
1981
+ writer.begintag("h4", c_l_a_s_s="infoBlockTitle")
1982
+ writer.write(infoBlock.title)
1983
+ writer.endtag("h4")
1984
+ # individual reports
1985
+ writer.begintag("table", c_l_a_s_s="report")
1986
+ for data in infoBlock:
1987
+ tp = data["type"]
1988
+ message = data["message"]
1989
+ information = data["information"]
1990
+ # row
1991
+ writer.begintag("tr", c_l_a_s_s="test%s" % tp.title())
1992
+ # title
1993
+ writer.begintag("td", c_l_a_s_s="title")
1994
+ writer.write(tp)
1995
+ writer.endtag("td")
1996
+ # message
1997
+ writer.begintag("td")
1998
+ writer.write(message)
1999
+ ## info
2000
+ if information:
2001
+ writer.begintag("p", c_l_a_s_s="info")
2002
+ writer.write(information)
2003
+ writer.endtag("p")
2004
+ writer.endtag("td")
2005
+ # close row
2006
+ writer.endtag("tr")
2007
+ writer.endtag("table")
2008
+ # close container
2009
+ writer.endtag("div")
2010
+
2011
+
2012
+ defaultCSS = """
2013
+ body {
2014
+ background-color: #e5e5e5;
2015
+ padding: 15px 15px 0px 15px;
2016
+ margin: 0px;
2017
+ font-family: Helvetica, Verdana, Arial, sans-serif;
2018
+ }
2019
+
2020
+ h2.readError {
2021
+ background-color: red;
2022
+ color: white;
2023
+ margin: 20px 15px 20px 15px;
2024
+ padding: 10px;
2025
+ border-radius: 5px;
2026
+ font-size: 25px;
2027
+ }
2028
+
2029
+ /* info blocks */
2030
+
2031
+ .infoBlock {
2032
+ background-color: white;
2033
+ margin: 0px 0px 15px 0px;
2034
+ padding: 15px;
2035
+ border-radius: 5px;
2036
+ }
2037
+
2038
+ h3.infoBlockTitle {
2039
+ font-size: 20px;
2040
+ margin: 0px 0px 15px 0px;
2041
+ padding: 0px 0px 10px 0px;
2042
+ border-bottom: 1px solid #e5e5e5;
2043
+ }
2044
+
2045
+ h4.infoBlockTitle {
2046
+ font-size: 17px;
2047
+ margin: 0px 0px 15px 0px;
2048
+ padding: 0px 0px 10px 0px;
2049
+ border-bottom: 1px solid #e5e5e5;
2050
+ }
2051
+
2052
+ table.report {
2053
+ border-collapse: collapse;
2054
+ width: 100%;
2055
+ font-size: 14px;
2056
+ }
2057
+
2058
+ table.report tr {
2059
+ border-top: 1px solid white;
2060
+ }
2061
+
2062
+ table.report tr.testPass, table.report tr.testReportPass {
2063
+ background-color: #c8ffaf;
2064
+ }
2065
+
2066
+ table.report tr.testError, table.report tr.testReportError {
2067
+ background-color: #ffc3af;
2068
+ }
2069
+
2070
+ table.report tr.testWarning, table.report tr.testReportWarning {
2071
+ background-color: #ffe1af;
2072
+ }
2073
+
2074
+ table.report tr.testNote, table.report tr.testReportNote {
2075
+ background-color: #96e1ff;
2076
+ }
2077
+
2078
+ table.report tr.testTraceback, table.report tr.testReportTraceback {
2079
+ background-color: red;
2080
+ color: white;
2081
+ }
2082
+
2083
+ table.report td {
2084
+ padding: 7px 5px 7px 5px;
2085
+ vertical-align: top;
2086
+ }
2087
+
2088
+ table.report td.title {
2089
+ width: 80px;
2090
+ text-align: right;
2091
+ font-weight: bold;
2092
+ text-transform: uppercase;
2093
+ }
2094
+
2095
+ table.report td.testReportResultCount {
2096
+ width: 100px;
2097
+ }
2098
+
2099
+ table.report td.toggleButton {
2100
+ text-align: center;
2101
+ width: 50px;
2102
+ border-left: 1px solid white;
2103
+ cursor: pointer;
2104
+ }
2105
+
2106
+ .infoBlock td p.info {
2107
+ font-size: 12px;
2108
+ font-style: italic;
2109
+ margin: 5px 0px 0px 0px;
2110
+ }
2111
+
2112
+ /* SFNT table */
2113
+
2114
+ table.sfntTableData {
2115
+ font-size: 14px;
2116
+ width: 100%;
2117
+ border-collapse: collapse;
2118
+ padding: 0px;
2119
+ }
2120
+
2121
+ table.sfntTableData th {
2122
+ padding: 5px 0px 5px 0px;
2123
+ text-align: left
2124
+ }
2125
+
2126
+ table.sfntTableData tr.uncompressed {
2127
+ background-color: #ffc3af;
2128
+ }
2129
+
2130
+ table.sfntTableData td {
2131
+ width: 20%;
2132
+ padding: 5px 0px 5px 0px;
2133
+ border: 1px solid #e5e5e5;
2134
+ border-left: none;
2135
+ border-right: none;
2136
+ font-family: Consolas, Menlo, "Vera Mono", Monaco, monospace;
2137
+ }
2138
+
2139
+ pre {
2140
+ font-size: 12px;
2141
+ font-family: Consolas, Menlo, "Vera Mono", Monaco, monospace;
2142
+ margin: 0px;
2143
+ padding: 0px;
2144
+ }
2145
+
2146
+ /* Metadata */
2147
+
2148
+ .metadataElement {
2149
+ background: rgba(0, 0, 0, 0.03);
2150
+ margin: 10px 0px 10px 0px;
2151
+ border: 2px solid #d8d8d8;
2152
+ padding: 10px;
2153
+ }
2154
+
2155
+ h5.metadata {
2156
+ font-size: 14px;
2157
+ margin: 5px 0px 10px 0px;
2158
+ padding: 0px 0px 5px 0px;
2159
+ border-bottom: 1px solid #d8d8d8;
2160
+ }
2161
+
2162
+ h6.metadata {
2163
+ font-size: 12px;
2164
+ font-weight: normal;
2165
+ margin: 10px 0px 10px 0px;
2166
+ padding: 0px 0px 5px 0px;
2167
+ border-bottom: 1px solid #d8d8d8;
2168
+ }
2169
+
2170
+ table.metadata {
2171
+ font-size: 12px;
2172
+ width: 100%;
2173
+ border-collapse: collapse;
2174
+ padding: 0px;
2175
+ }
2176
+
2177
+ table.metadata td.key {
2178
+ width: 5em;
2179
+ padding: 5px 5px 5px 0px;
2180
+ border-right: 1px solid #d8d8d8;
2181
+ text-align: right;
2182
+ vertical-align: top;
2183
+ }
2184
+
2185
+ table.metadata td.value {
2186
+ padding: 5px 0px 5px 5px;
2187
+ border-left: 1px solid #d8d8d8;
2188
+ text-align: left;
2189
+ vertical-align: top;
2190
+ }
2191
+
2192
+ p.metadata {
2193
+ font-size: 12px;
2194
+ font-style: italic;
2195
+ }
2196
+ }
2197
+ """
2198
+
2199
+ defaultJavascript = """
2200
+
2201
+ //<![CDATA[
2202
+ function testResultToggleButtonHit(buttonID, className) {
2203
+ // change the button title
2204
+ var element = document.getElementById(buttonID);
2205
+ if (element.innerHTML == "Show" ) {
2206
+ element.innerHTML = "Hide";
2207
+ }
2208
+ else {
2209
+ element.innerHTML = "Show";
2210
+ }
2211
+ // toggle the elements
2212
+ var elements = getTestResults(className);
2213
+ for (var e = 0; e < elements.length; ++e) {
2214
+ toggleElement(elements[e]);
2215
+ }
2216
+ // toggle the info blocks
2217
+ toggleInfoBlocks();
2218
+ }
2219
+
2220
+ function getTestResults(className) {
2221
+ var rows = document.getElementsByTagName("tr");
2222
+ var found = Array();
2223
+ for (var r = 0; r < rows.length; ++r) {
2224
+ var row = rows[r];
2225
+ if (row.className == className) {
2226
+ found[found.length] = row;
2227
+ }
2228
+ }
2229
+ return found;
2230
+ }
2231
+
2232
+ function toggleElement(element) {
2233
+ if (element.style.display != "none" ) {
2234
+ element.style.display = "none";
2235
+ }
2236
+ else {
2237
+ element.style.display = "";
2238
+ }
2239
+ }
2240
+
2241
+ function toggleInfoBlocks() {
2242
+ var tables = document.getElementsByTagName("table")
2243
+ for (var t = 0; t < tables.length; ++t) {
2244
+ var table = tables[t];
2245
+ if (table.className == "report") {
2246
+ var haveVisibleRow = false;
2247
+ var rows = table.rows;
2248
+ for (var r = 0; r < rows.length; ++r) {
2249
+ var row = rows[r];
2250
+ if (row.style.display == "none") {
2251
+ var i = 0;
2252
+ }
2253
+ else {
2254
+ haveVisibleRow = true;
2255
+ }
2256
+ }
2257
+ var div = table.parentNode;
2258
+ if (haveVisibleRow == true) {
2259
+ div.style.display = "";
2260
+ }
2261
+ else {
2262
+ div.style.display = "none";
2263
+ }
2264
+ }
2265
+ }
2266
+ }
2267
+ //]]>
2268
+ """
2269
+
2270
+ def startHTML(title=None, cssReplacements={}):
2271
+ writer = XMLWriter()
2272
+ # start the html
2273
+ writer.begintag("html", xmlns="http://www.w3.org/1999/xhtml", lang="en")
2274
+ # start the head
2275
+ writer.begintag("head")
2276
+ writer.simpletag("meta", http_equiv="Content-Type", content="text/html; charset=utf-8")
2277
+ # title
2278
+ if title is not None:
2279
+ writer.begintag("title")
2280
+ writer.write(title)
2281
+ writer.endtag("title")
2282
+ # write the css
2283
+ writer.begintag("style", type="text/css")
2284
+ css = defaultCSS
2285
+ for before, after in cssReplacements.items():
2286
+ css = css.replace(before, after)
2287
+ writer.write(css)
2288
+ writer.endtag("style")
2289
+ # write the javascript
2290
+ writer.begintag("script", type="text/javascript")
2291
+ javascript = defaultJavascript
2292
+ ## hack around some ElementTree escaping
2293
+ javascript = javascript.replace("<", "l_e_s_s")
2294
+ javascript = javascript.replace(">", "g_r_e_a_t_e_r")
2295
+ writer.write(javascript)
2296
+ writer.endtag("script")
2297
+ # close the head
2298
+ writer.endtag("head")
2299
+ # start the body
2300
+ writer.begintag("body")
2301
+ # return the writer
2302
+ return writer
2303
+
2304
+ def finishHTML(writer):
2305
+ # close the body
2306
+ writer.endtag("body")
2307
+ # close the html
2308
+ writer.endtag("html")
2309
+ # get the text
2310
+ text = "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
2311
+ text += writer.compile()
2312
+ text = text.replace("c_l_a_s_s", "class")
2313
+ text = text.replace("a_p_o_s_t_r_o_p_h_e", "'")
2314
+ text = text.replace("l_e_s_s", "<")
2315
+ text = text.replace("g_r_e_a_t_e_r", ">")
2316
+ text = text.replace("http_equiv", "http-equiv")
2317
+ # return
2318
+ return text
2319
+
2320
+ # ------------------
2321
+ # Support: Unpackers
2322
+ # ------------------
2323
+
2324
+ def unpackHeader(data):
2325
+ return structUnpack(headerFormat, data)[0]
2326
+
2327
+ def unpackDirectory(data):
2328
+ header = unpackHeader(data)
2329
+ numTables = header["numTables"]
2330
+ data = data[headerSize:]
2331
+ directory = []
2332
+ for index in range(numTables):
2333
+ table, data = structUnpack(directoryFormat, data)
2334
+ directory.append(table)
2335
+ return directory
2336
+
2337
+ def unpackTableData(data):
2338
+ directory = unpackDirectory(data)
2339
+ tables = {}
2340
+ for entry in directory:
2341
+ tag = entry["tag"]
2342
+ offset = entry["offset"]
2343
+ origLength = entry["origLength"]
2344
+ compLength = entry["compLength"]
2345
+ if offset > len(data) or offset < 0 or (offset + compLength) < 0:
2346
+ tableData = ""
2347
+ elif offset + compLength > len(data):
2348
+ tableData = data[offset:]
2349
+ else:
2350
+ tableData = data[offset:offset+compLength]
2351
+ if compLength < origLength:
2352
+ try:
2353
+ td = zlib.decompress(tableData)
2354
+ tableData = td
2355
+ except zlib.error:
2356
+ tableData = None
2357
+ tables[tag] = tableData
2358
+ return tables
2359
+
2360
+ def unpackMetadata(data, decompress=True, parse=True):
2361
+ header = unpackHeader(data)
2362
+ data = data[header["metaOffset"]:header["metaOffset"]+header["metaLength"]]
2363
+ if decompress and data:
2364
+ data = zlib.decompress(data)
2365
+ if parse and data:
2366
+ data = ElementTree.fromstring(data)
2367
+ return data
2368
+
2369
+ def unpackPrivateData(data):
2370
+ header = unpackHeader(data)
2371
+ data = data[header["privOffset"]:header["privOffset"]+header["privLength"]]
2372
+ return data
2373
+
2374
+ # -----------------------
2375
+ # Support: Report Helpers
2376
+ # -----------------------
2377
+
2378
+ def findUniqueFileName(path):
2379
+ if not os.path.exists(path):
2380
+ return path
2381
+ folder = os.path.dirname(path)
2382
+ fileName = os.path.basename(path)
2383
+ fileName, extension = os.path.splitext(fileName)
2384
+ stamp = time.strftime("%Y-%m-%d %H-%M-%S %Z")
2385
+ newFileName = "%s (%s)%s" % (fileName, stamp, extension)
2386
+ newPath = os.path.join(folder, newFileName)
2387
+ # intentionally break to prevent a file overwrite.
2388
+ # this could happen if the user has a directory full
2389
+ # of files with future time stamped file names.
2390
+ # not likely, but avoid it all the same.
2391
+ assert not os.path.exists(newPath)
2392
+ return newPath
2393
+
2394
+
2395
+ # ---------------
2396
+ # Public Function
2397
+ # ---------------
2398
+
2399
+ tests = [
2400
+ ("Header", testHeader),
2401
+ ("Data Blocks", testDataBlocks),
2402
+ ("Table Directory", testTableDirectory),
2403
+ ("Table Data", testTableData),
2404
+ ("Metadata", testMetadata)
2405
+ ]
2406
+
2407
+ def validateFont(path, options, writeFile=True):
2408
+ # start the reporter
2409
+ if options.outputFormat == "html":
2410
+ reporter = HTMLReporter()
2411
+ elif options.outputFormat == "text":
2412
+ reporter = TextReporter()
2413
+ else:
2414
+ raise NotImplementedError
2415
+ # log the title
2416
+ reporter.logTitle("Report: %s" % os.path.basename(path))
2417
+ # log fileinfo
2418
+ reporter.logFileInfo("FILE", os.path.basename(path))
2419
+ reporter.logFileInfo("DIRECTORY", os.path.dirname(path))
2420
+ # run tests and log results
2421
+ f = open(path, "rb")
2422
+ data = f.read()
2423
+ f.close()
2424
+ shouldStop = False
2425
+ for title, func in tests:
2426
+ # skip groups that are not specified in the options
2427
+ if options.testGroups and title not in options.testGroups:
2428
+ continue
2429
+ reporter.logTestTitle(title)
2430
+ shouldStop = func(data, reporter)
2431
+ if shouldStop:
2432
+ break
2433
+ reporter.haveReadError = shouldStop
2434
+ # get the report
2435
+ report = reporter.getReport()
2436
+ # write
2437
+ reportPath = None
2438
+ if writeFile:
2439
+ # make the output file name
2440
+ if options.outputFileName is not None:
2441
+ fileName = options.outputFileName
2442
+ else:
2443
+ fileName = os.path.splitext(os.path.basename(path))[0]
2444
+ fileName += "_validate"
2445
+ if options.outputFormat == "html":
2446
+ fileName += ".html"
2447
+ else:
2448
+ fileName += ".txt"
2449
+ # make the output directory
2450
+ if options.outputDirectory is not None:
2451
+ directory = options.outputDirectory
2452
+ else:
2453
+ directory = os.path.dirname(path)
2454
+ # write the file
2455
+ reportPath = os.path.join(directory, fileName)
2456
+ reportPath = findUniqueFileName(reportPath)
2457
+ f = open(reportPath, "wb")
2458
+ f.write(report)
2459
+ f.close()
2460
+ return reportPath, report, reporter.numErrors()
2461
+
2462
+ # --------------------
2463
+ # Command Line Behvior
2464
+ # --------------------
2465
+
2466
+ usage = "%prog [options] fontpath1 fontpath2"
2467
+
2468
+ description = """This tool examines the structure of one
2469
+ or more WOFF files and issues a detailed report about
2470
+ the validity of the file structure. It does not validate
2471
+ the wrapped font data.
2472
+ """
2473
+
2474
+ def main():
2475
+ parser = optparse.OptionParser(usage=usage, description=description, version="%prog 0.1beta")
2476
+ parser.add_option("-d", dest="outputDirectory", help="Output directory. The default is to output the report into the same directory as the font file.")
2477
+ parser.add_option("-o", dest="outputFileName", help="Output file name. The default is \"fontfilename_validate.html\".")
2478
+ parser.add_option("-f", dest="outputFormat", help="Output format, text|html. The default is html.", default="html")
2479
+ parser.add_option("-q", dest="quiet", action="store_true", help="No report written", default=False)
2480
+
2481
+
2482
+ parser.set_defaults(excludeTests=[])
2483
+ (options, args) = parser.parse_args()
2484
+ outputDirectory = options.outputDirectory
2485
+ options.testGroups = None # don't expose this to the commandline. it's for testing only.
2486
+ if outputDirectory is not None and not os.path.exists(outputDirectory):
2487
+ print "Directory does not exist:", outputDirectory
2488
+ sys.exit()
2489
+ for fontPath in args:
2490
+ if not os.path.exists(fontPath):
2491
+ print "File does not exist:", fontPath
2492
+ sys.exit()
2493
+ else:
2494
+ if not options.quiet: print "Testing: %s..." % fontPath
2495
+ writeFile = True if not options.quiet else False
2496
+ fontPath = fontPath.decode("utf-8")
2497
+ outputPath, report, numErrors = validateFont(fontPath, options, writeFile)
2498
+ if not options.quiet: print "Wrote report to: %s" % outputPath
2499
+ # exit code
2500
+ if numErrors > 0: sys.exit(1)
2501
+
2502
+
2503
+ if __name__ == "__main__":
2504
+ main()