fonttools 4.57.0__py3-none-any.whl → 4.58.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fonttools might be problematic. Click here for more details.
- fontTools/__init__.py +1 -1
- fontTools/cffLib/__init__.py +61 -26
- fontTools/cffLib/specializer.py +4 -1
- fontTools/designspaceLib/statNames.py +14 -7
- fontTools/feaLib/ast.py +12 -9
- fontTools/feaLib/builder.py +75 -49
- fontTools/feaLib/parser.py +1 -39
- fontTools/fontBuilder.py +6 -0
- fontTools/merge/cmap.py +33 -1
- fontTools/merge/tables.py +12 -1
- fontTools/misc/etree.py +4 -27
- fontTools/misc/loggingTools.py +1 -1
- fontTools/misc/symfont.py +6 -8
- fontTools/mtiLib/__init__.py +1 -3
- fontTools/otlLib/builder.py +359 -145
- fontTools/otlLib/optimize/gpos.py +42 -62
- fontTools/pens/pointPen.py +21 -12
- fontTools/pens/t2CharStringPen.py +31 -11
- fontTools/subset/__init__.py +12 -1
- fontTools/ttLib/tables/G_V_A_R_.py +5 -0
- fontTools/ttLib/tables/T_S_I__0.py +14 -3
- fontTools/ttLib/tables/T_S_I__5.py +16 -5
- fontTools/ttLib/tables/__init__.py +1 -0
- fontTools/ttLib/tables/_c_v_t.py +2 -0
- fontTools/ttLib/tables/_f_p_g_m.py +3 -1
- fontTools/ttLib/tables/_g_l_y_f.py +2 -6
- fontTools/ttLib/tables/_g_v_a_r.py +58 -15
- fontTools/ttLib/tables/_p_o_s_t.py +5 -2
- fontTools/ttLib/tables/otBase.py +1 -0
- fontTools/ufoLib/__init__.py +3 -3
- fontTools/ufoLib/converters.py +89 -25
- fontTools/ufoLib/errors.py +8 -0
- fontTools/ufoLib/etree.py +1 -1
- fontTools/ufoLib/filenames.py +155 -100
- fontTools/ufoLib/glifLib.py +9 -2
- fontTools/ufoLib/kerning.py +66 -36
- fontTools/ufoLib/utils.py +5 -2
- fontTools/unicodedata/Mirrored.py +446 -0
- fontTools/unicodedata/__init__.py +6 -2
- fontTools/varLib/__init__.py +20 -6
- fontTools/varLib/featureVars.py +13 -7
- fontTools/varLib/hvar.py +1 -1
- fontTools/varLib/instancer/__init__.py +14 -5
- fontTools/voltLib/__main__.py +206 -0
- fontTools/voltLib/ast.py +4 -0
- fontTools/voltLib/parser.py +16 -8
- fontTools/voltLib/voltToFea.py +347 -166
- {fonttools-4.57.0.dist-info → fonttools-4.58.1.dist-info}/METADATA +64 -11
- {fonttools-4.57.0.dist-info → fonttools-4.58.1.dist-info}/RECORD +55 -51
- {fonttools-4.57.0.dist-info → fonttools-4.58.1.dist-info}/WHEEL +1 -1
- fonttools-4.58.1.dist-info/licenses/LICENSE.external +359 -0
- {fonttools-4.57.0.data → fonttools-4.58.1.data}/data/share/man/man1/ttx.1 +0 -0
- {fonttools-4.57.0.dist-info → fonttools-4.58.1.dist-info}/entry_points.txt +0 -0
- {fonttools-4.57.0.dist-info → fonttools-4.58.1.dist-info}/licenses/LICENSE +0 -0
- {fonttools-4.57.0.dist-info → fonttools-4.58.1.dist-info}/top_level.txt +0 -0
fontTools/__init__.py
CHANGED
fontTools/cffLib/__init__.py
CHANGED
|
@@ -1464,10 +1464,11 @@ class CharsetConverter(SimpleConverter):
|
|
|
1464
1464
|
if glyphName in allNames:
|
|
1465
1465
|
# make up a new glyphName that's unique
|
|
1466
1466
|
n = allNames[glyphName]
|
|
1467
|
-
|
|
1467
|
+
names = set(allNames) | set(charset)
|
|
1468
|
+
while (glyphName + "." + str(n)) in names:
|
|
1468
1469
|
n += 1
|
|
1469
1470
|
allNames[glyphName] = n + 1
|
|
1470
|
-
glyphName = glyphName + "
|
|
1471
|
+
glyphName = glyphName + "." + str(n)
|
|
1471
1472
|
allNames[glyphName] = 1
|
|
1472
1473
|
newCharset.append(glyphName)
|
|
1473
1474
|
charset = newCharset
|
|
@@ -1663,25 +1664,26 @@ class EncodingConverter(SimpleConverter):
|
|
|
1663
1664
|
return "StandardEncoding"
|
|
1664
1665
|
elif value == 1:
|
|
1665
1666
|
return "ExpertEncoding"
|
|
1667
|
+
# custom encoding at offset `value`
|
|
1668
|
+
assert value > 1
|
|
1669
|
+
file = parent.file
|
|
1670
|
+
file.seek(value)
|
|
1671
|
+
log.log(DEBUG, "loading Encoding at %s", value)
|
|
1672
|
+
fmt = readCard8(file)
|
|
1673
|
+
haveSupplement = bool(fmt & 0x80)
|
|
1674
|
+
fmt = fmt & 0x7F
|
|
1675
|
+
|
|
1676
|
+
if fmt == 0:
|
|
1677
|
+
encoding = parseEncoding0(parent.charset, file)
|
|
1678
|
+
elif fmt == 1:
|
|
1679
|
+
encoding = parseEncoding1(parent.charset, file)
|
|
1666
1680
|
else:
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
if haveSupplement:
|
|
1674
|
-
raise NotImplementedError("Encoding supplements are not yet supported")
|
|
1675
|
-
fmt = fmt & 0x7F
|
|
1676
|
-
if fmt == 0:
|
|
1677
|
-
encoding = parseEncoding0(
|
|
1678
|
-
parent.charset, file, haveSupplement, parent.strings
|
|
1679
|
-
)
|
|
1680
|
-
elif fmt == 1:
|
|
1681
|
-
encoding = parseEncoding1(
|
|
1682
|
-
parent.charset, file, haveSupplement, parent.strings
|
|
1683
|
-
)
|
|
1684
|
-
return encoding
|
|
1681
|
+
raise ValueError(f"Unknown Encoding format: {fmt}")
|
|
1682
|
+
|
|
1683
|
+
if haveSupplement:
|
|
1684
|
+
parseEncodingSupplement(file, encoding, parent.strings)
|
|
1685
|
+
|
|
1686
|
+
return encoding
|
|
1685
1687
|
|
|
1686
1688
|
def write(self, parent, value):
|
|
1687
1689
|
if value == "StandardEncoding":
|
|
@@ -1719,27 +1721,60 @@ class EncodingConverter(SimpleConverter):
|
|
|
1719
1721
|
return encoding
|
|
1720
1722
|
|
|
1721
1723
|
|
|
1722
|
-
def
|
|
1724
|
+
def readSID(file):
|
|
1725
|
+
"""Read a String ID (SID) — 2-byte unsigned integer."""
|
|
1726
|
+
data = file.read(2)
|
|
1727
|
+
if len(data) != 2:
|
|
1728
|
+
raise EOFError("Unexpected end of file while reading SID")
|
|
1729
|
+
return struct.unpack(">H", data)[0] # big-endian uint16
|
|
1730
|
+
|
|
1731
|
+
|
|
1732
|
+
def parseEncodingSupplement(file, encoding, strings):
|
|
1733
|
+
"""
|
|
1734
|
+
Parse the CFF Encoding supplement data:
|
|
1735
|
+
- nSups: number of supplementary mappings
|
|
1736
|
+
- each mapping: (code, SID) pair
|
|
1737
|
+
and apply them to the `encoding` list in place.
|
|
1738
|
+
"""
|
|
1739
|
+
nSups = readCard8(file)
|
|
1740
|
+
for _ in range(nSups):
|
|
1741
|
+
code = readCard8(file)
|
|
1742
|
+
sid = readSID(file)
|
|
1743
|
+
name = strings[sid]
|
|
1744
|
+
encoding[code] = name
|
|
1745
|
+
|
|
1746
|
+
|
|
1747
|
+
def parseEncoding0(charset, file):
|
|
1748
|
+
"""
|
|
1749
|
+
Format 0: simple list of codes.
|
|
1750
|
+
After reading the base table, optionally parse the supplement.
|
|
1751
|
+
"""
|
|
1723
1752
|
nCodes = readCard8(file)
|
|
1724
1753
|
encoding = [".notdef"] * 256
|
|
1725
1754
|
for glyphID in range(1, nCodes + 1):
|
|
1726
1755
|
code = readCard8(file)
|
|
1727
1756
|
if code != 0:
|
|
1728
1757
|
encoding[code] = charset[glyphID]
|
|
1758
|
+
|
|
1729
1759
|
return encoding
|
|
1730
1760
|
|
|
1731
1761
|
|
|
1732
|
-
def parseEncoding1(charset, file
|
|
1762
|
+
def parseEncoding1(charset, file):
|
|
1763
|
+
"""
|
|
1764
|
+
Format 1: range-based encoding.
|
|
1765
|
+
After reading the base ranges, optionally parse the supplement.
|
|
1766
|
+
"""
|
|
1733
1767
|
nRanges = readCard8(file)
|
|
1734
1768
|
encoding = [".notdef"] * 256
|
|
1735
1769
|
glyphID = 1
|
|
1736
|
-
for
|
|
1770
|
+
for _ in range(nRanges):
|
|
1737
1771
|
code = readCard8(file)
|
|
1738
1772
|
nLeft = readCard8(file)
|
|
1739
|
-
for
|
|
1773
|
+
for _ in range(nLeft + 1):
|
|
1740
1774
|
encoding[code] = charset[glyphID]
|
|
1741
|
-
code
|
|
1742
|
-
|
|
1775
|
+
code += 1
|
|
1776
|
+
glyphID += 1
|
|
1777
|
+
|
|
1743
1778
|
return encoding
|
|
1744
1779
|
|
|
1745
1780
|
|
fontTools/cffLib/specializer.py
CHANGED
|
@@ -580,7 +580,10 @@ def specializeCommands(
|
|
|
580
580
|
for i in range(len(commands) - 1, 0, -1):
|
|
581
581
|
if "rmoveto" == commands[i][0] == commands[i - 1][0]:
|
|
582
582
|
v1, v2 = commands[i - 1][1], commands[i][1]
|
|
583
|
-
commands[i - 1] = (
|
|
583
|
+
commands[i - 1] = (
|
|
584
|
+
"rmoveto",
|
|
585
|
+
[_addArgs(v1[0], v2[0]), _addArgs(v1[1], v2[1])],
|
|
586
|
+
)
|
|
584
587
|
del commands[i]
|
|
585
588
|
|
|
586
589
|
# 2. Specialize rmoveto/rlineto/rrcurveto operators into horizontal/vertical variants.
|
|
@@ -12,14 +12,13 @@ instance:
|
|
|
12
12
|
from __future__ import annotations
|
|
13
13
|
|
|
14
14
|
from dataclasses import dataclass
|
|
15
|
-
from typing import Dict, Optional, Tuple, Union
|
|
15
|
+
from typing import Dict, Literal, Optional, Tuple, Union
|
|
16
16
|
import logging
|
|
17
17
|
|
|
18
18
|
from fontTools.designspaceLib import (
|
|
19
19
|
AxisDescriptor,
|
|
20
20
|
AxisLabelDescriptor,
|
|
21
21
|
DesignSpaceDocument,
|
|
22
|
-
DesignSpaceDocumentError,
|
|
23
22
|
DiscreteAxisDescriptor,
|
|
24
23
|
SimpleLocationDict,
|
|
25
24
|
SourceDescriptor,
|
|
@@ -27,9 +26,13 @@ from fontTools.designspaceLib import (
|
|
|
27
26
|
|
|
28
27
|
LOGGER = logging.getLogger(__name__)
|
|
29
28
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
29
|
+
RibbiStyleName = Union[
|
|
30
|
+
Literal["regular"],
|
|
31
|
+
Literal["bold"],
|
|
32
|
+
Literal["italic"],
|
|
33
|
+
Literal["bold italic"],
|
|
34
|
+
]
|
|
35
|
+
|
|
33
36
|
BOLD_ITALIC_TO_RIBBI_STYLE = {
|
|
34
37
|
(False, False): "regular",
|
|
35
38
|
(False, True): "italic",
|
|
@@ -46,7 +49,7 @@ class StatNames:
|
|
|
46
49
|
styleNames: Dict[str, str]
|
|
47
50
|
postScriptFontName: Optional[str]
|
|
48
51
|
styleMapFamilyNames: Dict[str, str]
|
|
49
|
-
styleMapStyleName: Optional[
|
|
52
|
+
styleMapStyleName: Optional[RibbiStyleName]
|
|
50
53
|
|
|
51
54
|
|
|
52
55
|
def getStatNames(
|
|
@@ -61,6 +64,10 @@ def getStatNames(
|
|
|
61
64
|
localized names will be empty (family and style names), or the name will be
|
|
62
65
|
None (PostScript name).
|
|
63
66
|
|
|
67
|
+
Note: this method does not consider info attached to the instance, like
|
|
68
|
+
family name. The user needs to override all names on an instance that STAT
|
|
69
|
+
information would compute differently than desired.
|
|
70
|
+
|
|
64
71
|
.. versionadded:: 5.0
|
|
65
72
|
"""
|
|
66
73
|
familyNames: Dict[str, str] = {}
|
|
@@ -201,7 +208,7 @@ def _getAxisLabelsForUserLocation(
|
|
|
201
208
|
|
|
202
209
|
def _getRibbiStyle(
|
|
203
210
|
self: DesignSpaceDocument, userLocation: SimpleLocationDict
|
|
204
|
-
) -> Tuple[
|
|
211
|
+
) -> Tuple[RibbiStyleName, SimpleLocationDict]:
|
|
205
212
|
"""Compute the RIBBI style name of the given user location,
|
|
206
213
|
return the location of the matching Regular in the RIBBI group.
|
|
207
214
|
|
fontTools/feaLib/ast.py
CHANGED
|
@@ -382,8 +382,7 @@ class FeatureBlock(Block):
|
|
|
382
382
|
def build(self, builder):
|
|
383
383
|
"""Call the ``start_feature`` callback on the builder object, visit
|
|
384
384
|
all the statements in this feature, and then call ``end_feature``."""
|
|
385
|
-
|
|
386
|
-
builder.start_feature(self.location, self.name)
|
|
385
|
+
builder.start_feature(self.location, self.name, self.use_extension)
|
|
387
386
|
# language exclude_dflt statements modify builder.features_
|
|
388
387
|
# limit them to this block with temporary builder.features_
|
|
389
388
|
features = builder.features_
|
|
@@ -433,8 +432,7 @@ class LookupBlock(Block):
|
|
|
433
432
|
self.name, self.use_extension = name, use_extension
|
|
434
433
|
|
|
435
434
|
def build(self, builder):
|
|
436
|
-
|
|
437
|
-
builder.start_lookup_block(self.location, self.name)
|
|
435
|
+
builder.start_lookup_block(self.location, self.name, self.use_extension)
|
|
438
436
|
Block.build(self, builder)
|
|
439
437
|
builder.end_lookup_block()
|
|
440
438
|
|
|
@@ -753,7 +751,7 @@ class ChainContextPosStatement(Statement):
|
|
|
753
751
|
if len(self.suffix):
|
|
754
752
|
res += " " + " ".join(map(asFea, self.suffix))
|
|
755
753
|
else:
|
|
756
|
-
res += " ".join(map(asFea, self.
|
|
754
|
+
res += " ".join(map(asFea, self.glyphs))
|
|
757
755
|
res += ";"
|
|
758
756
|
return res
|
|
759
757
|
|
|
@@ -811,7 +809,7 @@ class ChainContextSubstStatement(Statement):
|
|
|
811
809
|
if len(self.suffix):
|
|
812
810
|
res += " " + " ".join(map(asFea, self.suffix))
|
|
813
811
|
else:
|
|
814
|
-
res += " ".join(map(asFea, self.
|
|
812
|
+
res += " ".join(map(asFea, self.glyphs))
|
|
815
813
|
res += ";"
|
|
816
814
|
return res
|
|
817
815
|
|
|
@@ -1512,7 +1510,9 @@ class SinglePosStatement(Statement):
|
|
|
1512
1510
|
res += " ".join(map(asFea, self.prefix)) + " "
|
|
1513
1511
|
res += " ".join(
|
|
1514
1512
|
[
|
|
1515
|
-
asFea(x[0])
|
|
1513
|
+
asFea(x[0])
|
|
1514
|
+
+ "'"
|
|
1515
|
+
+ ((" " + x[1].asFea()) if x[1] is not None else "")
|
|
1516
1516
|
for x in self.pos
|
|
1517
1517
|
]
|
|
1518
1518
|
)
|
|
@@ -1520,7 +1520,10 @@ class SinglePosStatement(Statement):
|
|
|
1520
1520
|
res += " " + " ".join(map(asFea, self.suffix))
|
|
1521
1521
|
else:
|
|
1522
1522
|
res += " ".join(
|
|
1523
|
-
[
|
|
1523
|
+
[
|
|
1524
|
+
asFea(x[0]) + " " + (x[1].asFea() if x[1] is not None else "")
|
|
1525
|
+
for x in self.pos
|
|
1526
|
+
]
|
|
1524
1527
|
)
|
|
1525
1528
|
res += ";"
|
|
1526
1529
|
return res
|
|
@@ -2103,7 +2106,7 @@ class VariationBlock(Block):
|
|
|
2103
2106
|
def build(self, builder):
|
|
2104
2107
|
"""Call the ``start_feature`` callback on the builder object, visit
|
|
2105
2108
|
all the statements in this feature, and then call ``end_feature``."""
|
|
2106
|
-
builder.start_feature(self.location, self.name)
|
|
2109
|
+
builder.start_feature(self.location, self.name, self.use_extension)
|
|
2107
2110
|
if (
|
|
2108
2111
|
self.conditionset != "NULL"
|
|
2109
2112
|
and self.conditionset not in builder.conditionsets_
|
fontTools/feaLib/builder.py
CHANGED
|
@@ -29,6 +29,7 @@ from fontTools.otlLib.builder import (
|
|
|
29
29
|
PairPosBuilder,
|
|
30
30
|
SinglePosBuilder,
|
|
31
31
|
ChainContextualRule,
|
|
32
|
+
AnySubstBuilder,
|
|
32
33
|
)
|
|
33
34
|
from fontTools.otlLib.error import OpenTypeLibError
|
|
34
35
|
from fontTools.varLib.varStore import OnlineVarStoreBuilder
|
|
@@ -126,6 +127,7 @@ class Builder(object):
|
|
|
126
127
|
self.script_ = None
|
|
127
128
|
self.lookupflag_ = 0
|
|
128
129
|
self.lookupflag_markFilterSet_ = None
|
|
130
|
+
self.use_extension_ = False
|
|
129
131
|
self.language_systems = set()
|
|
130
132
|
self.seen_non_DFLT_script_ = False
|
|
131
133
|
self.named_lookups_ = {}
|
|
@@ -141,6 +143,7 @@ class Builder(object):
|
|
|
141
143
|
self.aalt_features_ = [] # [(location, featureName)*], for 'aalt'
|
|
142
144
|
self.aalt_location_ = None
|
|
143
145
|
self.aalt_alternates_ = {}
|
|
146
|
+
self.aalt_use_extension_ = False
|
|
144
147
|
# for 'featureNames'
|
|
145
148
|
self.featureNames_ = set()
|
|
146
149
|
self.featureNames_ids_ = {}
|
|
@@ -247,6 +250,7 @@ class Builder(object):
|
|
|
247
250
|
result = builder_class(self.font, location)
|
|
248
251
|
result.lookupflag = self.lookupflag_
|
|
249
252
|
result.markFilterSet = self.lookupflag_markFilterSet_
|
|
253
|
+
result.extension = self.use_extension_
|
|
250
254
|
self.lookups_.append(result)
|
|
251
255
|
return result
|
|
252
256
|
|
|
@@ -272,6 +276,7 @@ class Builder(object):
|
|
|
272
276
|
self.cur_lookup_ = builder_class(self.font, location)
|
|
273
277
|
self.cur_lookup_.lookupflag = self.lookupflag_
|
|
274
278
|
self.cur_lookup_.markFilterSet = self.lookupflag_markFilterSet_
|
|
279
|
+
self.cur_lookup_.extension = self.use_extension_
|
|
275
280
|
self.lookups_.append(self.cur_lookup_)
|
|
276
281
|
if self.cur_lookup_name_:
|
|
277
282
|
# We are starting a lookup rule inside a named lookup block.
|
|
@@ -323,7 +328,7 @@ class Builder(object):
|
|
|
323
328
|
}
|
|
324
329
|
old_lookups = self.lookups_
|
|
325
330
|
self.lookups_ = []
|
|
326
|
-
self.start_feature(self.aalt_location_, "aalt")
|
|
331
|
+
self.start_feature(self.aalt_location_, "aalt", self.aalt_use_extension_)
|
|
327
332
|
if single:
|
|
328
333
|
single_lookup = self.get_lookup_(location, SingleSubstBuilder)
|
|
329
334
|
single_lookup.mapping = single
|
|
@@ -862,13 +867,22 @@ class Builder(object):
|
|
|
862
867
|
for lookup in self.lookups_:
|
|
863
868
|
if lookup.table != tag:
|
|
864
869
|
continue
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
870
|
+
name = self.get_lookup_name_(lookup)
|
|
871
|
+
resolved = lookup.promote_lookup_type(is_named_lookup=name is not None)
|
|
872
|
+
if resolved is None:
|
|
873
|
+
raise FeatureLibError(
|
|
874
|
+
"Within a named lookup block, all rules must be of "
|
|
875
|
+
"the same lookup type and flag",
|
|
876
|
+
lookup.location,
|
|
877
|
+
)
|
|
878
|
+
for l in resolved:
|
|
879
|
+
lookup.lookup_index = len(lookups)
|
|
880
|
+
self.lookup_locations[tag][str(lookup.lookup_index)] = LookupDebugInfo(
|
|
881
|
+
location=str(lookup.location),
|
|
882
|
+
name=name,
|
|
883
|
+
feature=None,
|
|
884
|
+
)
|
|
885
|
+
lookups.append(l)
|
|
872
886
|
otLookups = []
|
|
873
887
|
for l in lookups:
|
|
874
888
|
try:
|
|
@@ -1054,15 +1068,22 @@ class Builder(object):
|
|
|
1054
1068
|
else:
|
|
1055
1069
|
return frozenset({("DFLT", "dflt")})
|
|
1056
1070
|
|
|
1057
|
-
def start_feature(self, location, name):
|
|
1071
|
+
def start_feature(self, location, name, use_extension=False):
|
|
1072
|
+
if use_extension and name != "aalt":
|
|
1073
|
+
raise FeatureLibError(
|
|
1074
|
+
"'useExtension' keyword for feature blocks is allowed only for 'aalt' feature",
|
|
1075
|
+
location,
|
|
1076
|
+
)
|
|
1058
1077
|
self.language_systems = self.get_default_language_systems_()
|
|
1059
1078
|
self.script_ = "DFLT"
|
|
1060
1079
|
self.cur_lookup_ = None
|
|
1061
1080
|
self.cur_feature_name_ = name
|
|
1062
1081
|
self.lookupflag_ = 0
|
|
1063
1082
|
self.lookupflag_markFilterSet_ = None
|
|
1083
|
+
self.use_extension_ = use_extension
|
|
1064
1084
|
if name == "aalt":
|
|
1065
1085
|
self.aalt_location_ = location
|
|
1086
|
+
self.aalt_use_extension_ = use_extension
|
|
1066
1087
|
|
|
1067
1088
|
def end_feature(self):
|
|
1068
1089
|
assert self.cur_feature_name_ is not None
|
|
@@ -1071,8 +1092,9 @@ class Builder(object):
|
|
|
1071
1092
|
self.cur_lookup_ = None
|
|
1072
1093
|
self.lookupflag_ = 0
|
|
1073
1094
|
self.lookupflag_markFilterSet_ = None
|
|
1095
|
+
self.use_extension_ = False
|
|
1074
1096
|
|
|
1075
|
-
def start_lookup_block(self, location, name):
|
|
1097
|
+
def start_lookup_block(self, location, name, use_extension=False):
|
|
1076
1098
|
if name in self.named_lookups_:
|
|
1077
1099
|
raise FeatureLibError(
|
|
1078
1100
|
'Lookup "%s" has already been defined' % name, location
|
|
@@ -1086,6 +1108,7 @@ class Builder(object):
|
|
|
1086
1108
|
self.cur_lookup_name_ = name
|
|
1087
1109
|
self.named_lookups_[name] = None
|
|
1088
1110
|
self.cur_lookup_ = None
|
|
1111
|
+
self.use_extension_ = use_extension
|
|
1089
1112
|
if self.cur_feature_name_ is None:
|
|
1090
1113
|
self.lookupflag_ = 0
|
|
1091
1114
|
self.lookupflag_markFilterSet_ = None
|
|
@@ -1094,6 +1117,7 @@ class Builder(object):
|
|
|
1094
1117
|
assert self.cur_lookup_name_ is not None
|
|
1095
1118
|
self.cur_lookup_name_ = None
|
|
1096
1119
|
self.cur_lookup_ = None
|
|
1120
|
+
self.use_extension_ = False
|
|
1097
1121
|
if self.cur_feature_name_ is None:
|
|
1098
1122
|
self.lookupflag_ = 0
|
|
1099
1123
|
self.lookupflag_markFilterSet_ = None
|
|
@@ -1280,6 +1304,24 @@ class Builder(object):
|
|
|
1280
1304
|
|
|
1281
1305
|
# GSUB rules
|
|
1282
1306
|
|
|
1307
|
+
def add_any_subst_(self, location, mapping):
|
|
1308
|
+
lookup = self.get_lookup_(location, AnySubstBuilder)
|
|
1309
|
+
for key, value in mapping.items():
|
|
1310
|
+
if key in lookup.mapping:
|
|
1311
|
+
if value == lookup.mapping[key]:
|
|
1312
|
+
log.info(
|
|
1313
|
+
'Removing duplicate substitution from "%s" to "%s" at %s',
|
|
1314
|
+
", ".join(key),
|
|
1315
|
+
", ".join(value),
|
|
1316
|
+
location,
|
|
1317
|
+
)
|
|
1318
|
+
else:
|
|
1319
|
+
raise FeatureLibError(
|
|
1320
|
+
'Already defined substitution for "%s"' % ", ".join(key),
|
|
1321
|
+
location,
|
|
1322
|
+
)
|
|
1323
|
+
lookup.mapping[key] = value
|
|
1324
|
+
|
|
1283
1325
|
# GSUB 1
|
|
1284
1326
|
def add_single_subst(self, location, prefix, suffix, mapping, forceChain):
|
|
1285
1327
|
if self.cur_feature_name_ == "aalt":
|
|
@@ -1291,24 +1333,11 @@ class Builder(object):
|
|
|
1291
1333
|
if prefix or suffix or forceChain:
|
|
1292
1334
|
self.add_single_subst_chained_(location, prefix, suffix, mapping)
|
|
1293
1335
|
return
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
"Removing duplicate single substitution from glyph"
|
|
1300
|
-
' "%s" to "%s" at %s',
|
|
1301
|
-
from_glyph,
|
|
1302
|
-
to_glyph,
|
|
1303
|
-
location,
|
|
1304
|
-
)
|
|
1305
|
-
else:
|
|
1306
|
-
raise FeatureLibError(
|
|
1307
|
-
'Already defined rule for replacing glyph "%s" by "%s"'
|
|
1308
|
-
% (from_glyph, lookup.mapping[from_glyph]),
|
|
1309
|
-
location,
|
|
1310
|
-
)
|
|
1311
|
-
lookup.mapping[from_glyph] = to_glyph
|
|
1336
|
+
|
|
1337
|
+
self.add_any_subst_(
|
|
1338
|
+
location,
|
|
1339
|
+
{(key,): (value,) for key, value in mapping.items()},
|
|
1340
|
+
)
|
|
1312
1341
|
|
|
1313
1342
|
# GSUB 2
|
|
1314
1343
|
def add_multiple_subst(
|
|
@@ -1317,21 +1346,10 @@ class Builder(object):
|
|
|
1317
1346
|
if prefix or suffix or forceChain:
|
|
1318
1347
|
self.add_multi_subst_chained_(location, prefix, glyph, suffix, replacements)
|
|
1319
1348
|
return
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
"Removing duplicate multiple substitution from glyph"
|
|
1325
|
-
' "%s" to %s%s',
|
|
1326
|
-
glyph,
|
|
1327
|
-
replacements,
|
|
1328
|
-
f" at {location}" if location else "",
|
|
1329
|
-
)
|
|
1330
|
-
else:
|
|
1331
|
-
raise FeatureLibError(
|
|
1332
|
-
'Already defined substitution for glyph "%s"' % glyph, location
|
|
1333
|
-
)
|
|
1334
|
-
lookup.mapping[glyph] = replacements
|
|
1349
|
+
self.add_any_subst_(
|
|
1350
|
+
location,
|
|
1351
|
+
{(glyph,): tuple(replacements)},
|
|
1352
|
+
)
|
|
1335
1353
|
|
|
1336
1354
|
# GSUB 3
|
|
1337
1355
|
def add_alternate_subst(self, location, prefix, glyph, suffix, replacement):
|
|
@@ -1361,9 +1379,6 @@ class Builder(object):
|
|
|
1361
1379
|
location, prefix, glyphs, suffix, replacement
|
|
1362
1380
|
)
|
|
1363
1381
|
return
|
|
1364
|
-
else:
|
|
1365
|
-
lookup = self.get_lookup_(location, LigatureSubstBuilder)
|
|
1366
|
-
|
|
1367
1382
|
if not all(glyphs):
|
|
1368
1383
|
raise FeatureLibError("Empty glyph class in substitution", location)
|
|
1369
1384
|
|
|
@@ -1372,8 +1387,10 @@ class Builder(object):
|
|
|
1372
1387
|
# substitutions to be specified on target sequences that contain
|
|
1373
1388
|
# glyph classes, the implementation software will enumerate
|
|
1374
1389
|
# all specific glyph sequences if glyph classes are detected"
|
|
1375
|
-
|
|
1376
|
-
|
|
1390
|
+
self.add_any_subst_(
|
|
1391
|
+
location,
|
|
1392
|
+
{g: (replacement,) for g in itertools.product(*glyphs)},
|
|
1393
|
+
)
|
|
1377
1394
|
|
|
1378
1395
|
# GSUB 5/6
|
|
1379
1396
|
def add_chain_context_subst(self, location, prefix, glyphs, suffix, lookups):
|
|
@@ -1431,6 +1448,13 @@ class Builder(object):
|
|
|
1431
1448
|
sub = self.get_chained_lookup_(location, LigatureSubstBuilder)
|
|
1432
1449
|
|
|
1433
1450
|
for g in itertools.product(*glyphs):
|
|
1451
|
+
existing = sub.ligatures.get(g, replacement)
|
|
1452
|
+
if existing != replacement:
|
|
1453
|
+
raise FeatureLibError(
|
|
1454
|
+
f"Conflicting ligature sub rules: '{g}' maps to '{existing}' and '{replacement}'",
|
|
1455
|
+
location,
|
|
1456
|
+
)
|
|
1457
|
+
|
|
1434
1458
|
sub.ligatures[g] = replacement
|
|
1435
1459
|
|
|
1436
1460
|
chain.rules.append(ChainContextualRule(prefix, glyphs, suffix, [sub]))
|
|
@@ -1471,7 +1495,9 @@ class Builder(object):
|
|
|
1471
1495
|
lookup = self.get_lookup_(location, PairPosBuilder)
|
|
1472
1496
|
v1 = self.makeOpenTypeValueRecord(location, value1, pairPosContext=True)
|
|
1473
1497
|
v2 = self.makeOpenTypeValueRecord(location, value2, pairPosContext=True)
|
|
1474
|
-
|
|
1498
|
+
cls1 = tuple(sorted(set(glyphclass1)))
|
|
1499
|
+
cls2 = tuple(sorted(set(glyphclass2)))
|
|
1500
|
+
lookup.addClassPair(location, cls1, v1, cls2, v2)
|
|
1475
1501
|
|
|
1476
1502
|
def add_specific_pair_pos(self, location, glyph1, value1, glyph2, value2):
|
|
1477
1503
|
if not glyph1 or not glyph2:
|
fontTools/feaLib/parser.py
CHANGED
|
@@ -1613,7 +1613,7 @@ class Parser(object):
|
|
|
1613
1613
|
"HorizAxis.BaseScriptList",
|
|
1614
1614
|
"VertAxis.BaseScriptList",
|
|
1615
1615
|
), self.cur_token_
|
|
1616
|
-
scripts = [
|
|
1616
|
+
scripts = [self.parse_base_script_record_(count)]
|
|
1617
1617
|
while self.next_token_ == ",":
|
|
1618
1618
|
self.expect_symbol_(",")
|
|
1619
1619
|
scripts.append(self.parse_base_script_record_(count))
|
|
@@ -2062,44 +2062,6 @@ class Parser(object):
|
|
|
2062
2062
|
)
|
|
2063
2063
|
self.expect_symbol_(";")
|
|
2064
2064
|
|
|
2065
|
-
# A multiple substitution may have a single destination, in which case
|
|
2066
|
-
# it will look just like a single substitution. So if there are both
|
|
2067
|
-
# multiple and single substitutions, upgrade all the single ones to
|
|
2068
|
-
# multiple substitutions.
|
|
2069
|
-
|
|
2070
|
-
# Check if we have a mix of non-contextual singles and multiples.
|
|
2071
|
-
has_single = False
|
|
2072
|
-
has_multiple = False
|
|
2073
|
-
for s in statements:
|
|
2074
|
-
if isinstance(s, self.ast.SingleSubstStatement):
|
|
2075
|
-
has_single = not any([s.prefix, s.suffix, s.forceChain])
|
|
2076
|
-
elif isinstance(s, self.ast.MultipleSubstStatement):
|
|
2077
|
-
has_multiple = not any([s.prefix, s.suffix, s.forceChain])
|
|
2078
|
-
|
|
2079
|
-
# Upgrade all single substitutions to multiple substitutions.
|
|
2080
|
-
if has_single and has_multiple:
|
|
2081
|
-
statements = []
|
|
2082
|
-
for s in block.statements:
|
|
2083
|
-
if isinstance(s, self.ast.SingleSubstStatement):
|
|
2084
|
-
glyphs = s.glyphs[0].glyphSet()
|
|
2085
|
-
replacements = s.replacements[0].glyphSet()
|
|
2086
|
-
if len(replacements) == 1:
|
|
2087
|
-
replacements *= len(glyphs)
|
|
2088
|
-
for i, glyph in enumerate(glyphs):
|
|
2089
|
-
statements.append(
|
|
2090
|
-
self.ast.MultipleSubstStatement(
|
|
2091
|
-
s.prefix,
|
|
2092
|
-
glyph,
|
|
2093
|
-
s.suffix,
|
|
2094
|
-
[replacements[i]],
|
|
2095
|
-
s.forceChain,
|
|
2096
|
-
location=s.location,
|
|
2097
|
-
)
|
|
2098
|
-
)
|
|
2099
|
-
else:
|
|
2100
|
-
statements.append(s)
|
|
2101
|
-
block.statements = statements
|
|
2102
|
-
|
|
2103
2065
|
def is_cur_keyword_(self, k):
|
|
2104
2066
|
if self.cur_token_type_ is Lexer.NAME:
|
|
2105
2067
|
if isinstance(k, type("")): # basestring is gone in Python3
|
fontTools/fontBuilder.py
CHANGED
|
@@ -714,6 +714,12 @@ class FontBuilder(object):
|
|
|
714
714
|
gvar.reserved = 0
|
|
715
715
|
gvar.variations = variations
|
|
716
716
|
|
|
717
|
+
def setupGVAR(self, variations):
|
|
718
|
+
gvar = self.font["GVAR"] = newTable("GVAR")
|
|
719
|
+
gvar.version = 1
|
|
720
|
+
gvar.reserved = 0
|
|
721
|
+
gvar.variations = variations
|
|
722
|
+
|
|
717
723
|
def calcGlyphBounds(self):
|
|
718
724
|
"""Calculate the bounding boxes of all glyphs in the `glyf` table.
|
|
719
725
|
This is usually not called explicitly by client code.
|
fontTools/merge/cmap.py
CHANGED
|
@@ -54,6 +54,28 @@ def _glyphsAreSame(
|
|
|
54
54
|
return True
|
|
55
55
|
|
|
56
56
|
|
|
57
|
+
def computeMegaUvs(merger, uvsTables):
|
|
58
|
+
"""Returns merged UVS subtable (cmap format=14)."""
|
|
59
|
+
uvsDict = {}
|
|
60
|
+
cmap = merger.cmap
|
|
61
|
+
for table in uvsTables:
|
|
62
|
+
for variationSelector, uvsMapping in table.uvsDict.items():
|
|
63
|
+
if variationSelector not in uvsDict:
|
|
64
|
+
uvsDict[variationSelector] = {}
|
|
65
|
+
for unicodeValue, glyphName in uvsMapping:
|
|
66
|
+
if cmap.get(unicodeValue) == glyphName:
|
|
67
|
+
# this is a default variation
|
|
68
|
+
glyphName = None
|
|
69
|
+
# prefer previous glyph id if both fonts defined UVS
|
|
70
|
+
if unicodeValue not in uvsDict[variationSelector]:
|
|
71
|
+
uvsDict[variationSelector][unicodeValue] = glyphName
|
|
72
|
+
|
|
73
|
+
for variationSelector in uvsDict:
|
|
74
|
+
uvsDict[variationSelector] = [*uvsDict[variationSelector].items()]
|
|
75
|
+
|
|
76
|
+
return uvsDict
|
|
77
|
+
|
|
78
|
+
|
|
57
79
|
# Valid (format, platformID, platEncID) triplets for cmap subtables containing
|
|
58
80
|
# Unicode BMP-only and Unicode Full Repertoire semantics.
|
|
59
81
|
# Cf. OpenType spec for "Platform specific encodings":
|
|
@@ -61,24 +83,29 @@ def _glyphsAreSame(
|
|
|
61
83
|
class _CmapUnicodePlatEncodings:
|
|
62
84
|
BMP = {(4, 3, 1), (4, 0, 3), (4, 0, 4), (4, 0, 6)}
|
|
63
85
|
FullRepertoire = {(12, 3, 10), (12, 0, 4), (12, 0, 6)}
|
|
86
|
+
UVS = {(14, 0, 5)}
|
|
64
87
|
|
|
65
88
|
|
|
66
89
|
def computeMegaCmap(merger, cmapTables):
|
|
67
|
-
"""Sets merger.cmap and merger.
|
|
90
|
+
"""Sets merger.cmap and merger.uvsDict."""
|
|
68
91
|
|
|
69
92
|
# TODO Handle format=14.
|
|
70
93
|
# Only merge format 4 and 12 Unicode subtables, ignores all other subtables
|
|
71
94
|
# If there is a format 12 table for a font, ignore the format 4 table of it
|
|
72
95
|
chosenCmapTables = []
|
|
96
|
+
chosenUvsTables = []
|
|
73
97
|
for fontIdx, table in enumerate(cmapTables):
|
|
74
98
|
format4 = None
|
|
75
99
|
format12 = None
|
|
100
|
+
format14 = None
|
|
76
101
|
for subtable in table.tables:
|
|
77
102
|
properties = (subtable.format, subtable.platformID, subtable.platEncID)
|
|
78
103
|
if properties in _CmapUnicodePlatEncodings.BMP:
|
|
79
104
|
format4 = subtable
|
|
80
105
|
elif properties in _CmapUnicodePlatEncodings.FullRepertoire:
|
|
81
106
|
format12 = subtable
|
|
107
|
+
elif properties in _CmapUnicodePlatEncodings.UVS:
|
|
108
|
+
format14 = subtable
|
|
82
109
|
else:
|
|
83
110
|
log.warning(
|
|
84
111
|
"Dropped cmap subtable from font '%s':\t"
|
|
@@ -93,6 +120,9 @@ def computeMegaCmap(merger, cmapTables):
|
|
|
93
120
|
elif format4 is not None:
|
|
94
121
|
chosenCmapTables.append((format4, fontIdx))
|
|
95
122
|
|
|
123
|
+
if format14 is not None:
|
|
124
|
+
chosenUvsTables.append(format14)
|
|
125
|
+
|
|
96
126
|
# Build the unicode mapping
|
|
97
127
|
merger.cmap = cmap = {}
|
|
98
128
|
fontIndexForGlyph = {}
|
|
@@ -127,6 +157,8 @@ def computeMegaCmap(merger, cmapTables):
|
|
|
127
157
|
"Dropped mapping from codepoint %#06X to glyphId '%s'", uni, gid
|
|
128
158
|
)
|
|
129
159
|
|
|
160
|
+
merger.uvsDict = computeMegaUvs(merger, chosenUvsTables)
|
|
161
|
+
|
|
130
162
|
|
|
131
163
|
def renameCFFCharStrings(merger, glyphOrder, cffTable):
|
|
132
164
|
"""Rename topDictIndex charStrings based on glyphOrder."""
|