fonttools 4.57.0__cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl → 4.58.0__cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fonttools might be problematic. Click here for more details.
- fontTools/__init__.py +1 -1
- fontTools/cffLib/__init__.py +61 -26
- fontTools/cu2qu/cu2qu.c +4564 -4048
- fontTools/cu2qu/cu2qu.cpython-313-aarch64-linux-gnu.so +0 -0
- fontTools/designspaceLib/statNames.py +14 -7
- fontTools/feaLib/ast.py +84 -10
- fontTools/feaLib/builder.py +20 -4
- fontTools/feaLib/lexer.c +6266 -7109
- fontTools/feaLib/lexer.cpython-313-aarch64-linux-gnu.so +0 -0
- fontTools/feaLib/parser.py +1 -39
- fontTools/fontBuilder.py +6 -0
- fontTools/misc/bezierTools.c +13476 -15371
- fontTools/misc/bezierTools.cpython-313-aarch64-linux-gnu.so +0 -0
- fontTools/misc/etree.py +4 -27
- fontTools/mtiLib/__init__.py +0 -2
- fontTools/otlLib/builder.py +195 -145
- fontTools/otlLib/optimize/gpos.py +42 -62
- fontTools/pens/momentsPen.c +4490 -4672
- fontTools/pens/momentsPen.cpython-313-aarch64-linux-gnu.so +0 -0
- fontTools/pens/pointPen.py +21 -12
- fontTools/qu2cu/qu2cu.c +5725 -5456
- fontTools/qu2cu/qu2cu.cpython-313-aarch64-linux-gnu.so +0 -0
- fontTools/subset/__init__.py +11 -0
- fontTools/ttLib/tables/G_V_A_R_.py +5 -0
- fontTools/ttLib/tables/T_S_I__0.py +14 -3
- fontTools/ttLib/tables/T_S_I__5.py +16 -5
- fontTools/ttLib/tables/__init__.py +1 -0
- fontTools/ttLib/tables/_c_v_t.py +2 -0
- fontTools/ttLib/tables/_f_p_g_m.py +3 -1
- fontTools/ttLib/tables/_g_l_y_f.py +2 -6
- fontTools/ttLib/tables/_g_v_a_r.py +58 -15
- fontTools/ttLib/tables/_p_o_s_t.py +5 -2
- fontTools/ttLib/tables/otBase.py +1 -0
- fontTools/ufoLib/__init__.py +2 -2
- fontTools/ufoLib/converters.py +89 -25
- fontTools/ufoLib/errors.py +8 -0
- fontTools/ufoLib/etree.py +1 -1
- fontTools/ufoLib/filenames.py +155 -100
- fontTools/ufoLib/glifLib.py +9 -2
- fontTools/ufoLib/kerning.py +66 -36
- fontTools/ufoLib/utils.py +5 -2
- fontTools/unicodedata/Mirrored.py +446 -0
- fontTools/unicodedata/__init__.py +6 -2
- fontTools/varLib/__init__.py +2 -0
- fontTools/varLib/iup.c +6838 -6362
- fontTools/varLib/iup.cpython-313-aarch64-linux-gnu.so +0 -0
- fontTools/voltLib/__main__.py +206 -0
- fontTools/voltLib/ast.py +4 -0
- fontTools/voltLib/parser.py +16 -8
- fontTools/voltLib/voltToFea.py +347 -166
- {fonttools-4.57.0.dist-info → fonttools-4.58.0.dist-info}/METADATA +45 -11
- {fonttools-4.57.0.dist-info → fonttools-4.58.0.dist-info}/RECORD +58 -54
- {fonttools-4.57.0.dist-info → fonttools-4.58.0.dist-info}/WHEEL +1 -1
- fonttools-4.58.0.dist-info/licenses/LICENSE.external +359 -0
- {fonttools-4.57.0.data → fonttools-4.58.0.data}/data/share/man/man1/ttx.1 +0 -0
- {fonttools-4.57.0.dist-info → fonttools-4.58.0.dist-info}/entry_points.txt +0 -0
- {fonttools-4.57.0.dist-info → fonttools-4.58.0.dist-info}/licenses/LICENSE +0 -0
- {fonttools-4.57.0.dist-info → fonttools-4.58.0.dist-info}/top_level.txt +0 -0
fontTools/__init__.py
CHANGED
fontTools/cffLib/__init__.py
CHANGED
|
@@ -1464,10 +1464,11 @@ class CharsetConverter(SimpleConverter):
|
|
|
1464
1464
|
if glyphName in allNames:
|
|
1465
1465
|
# make up a new glyphName that's unique
|
|
1466
1466
|
n = allNames[glyphName]
|
|
1467
|
-
|
|
1467
|
+
names = set(allNames) | set(charset)
|
|
1468
|
+
while (glyphName + "." + str(n)) in names:
|
|
1468
1469
|
n += 1
|
|
1469
1470
|
allNames[glyphName] = n + 1
|
|
1470
|
-
glyphName = glyphName + "
|
|
1471
|
+
glyphName = glyphName + "." + str(n)
|
|
1471
1472
|
allNames[glyphName] = 1
|
|
1472
1473
|
newCharset.append(glyphName)
|
|
1473
1474
|
charset = newCharset
|
|
@@ -1663,25 +1664,26 @@ class EncodingConverter(SimpleConverter):
|
|
|
1663
1664
|
return "StandardEncoding"
|
|
1664
1665
|
elif value == 1:
|
|
1665
1666
|
return "ExpertEncoding"
|
|
1667
|
+
# custom encoding at offset `value`
|
|
1668
|
+
assert value > 1
|
|
1669
|
+
file = parent.file
|
|
1670
|
+
file.seek(value)
|
|
1671
|
+
log.log(DEBUG, "loading Encoding at %s", value)
|
|
1672
|
+
fmt = readCard8(file)
|
|
1673
|
+
haveSupplement = bool(fmt & 0x80)
|
|
1674
|
+
fmt = fmt & 0x7F
|
|
1675
|
+
|
|
1676
|
+
if fmt == 0:
|
|
1677
|
+
encoding = parseEncoding0(parent.charset, file)
|
|
1678
|
+
elif fmt == 1:
|
|
1679
|
+
encoding = parseEncoding1(parent.charset, file)
|
|
1666
1680
|
else:
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
if haveSupplement:
|
|
1674
|
-
raise NotImplementedError("Encoding supplements are not yet supported")
|
|
1675
|
-
fmt = fmt & 0x7F
|
|
1676
|
-
if fmt == 0:
|
|
1677
|
-
encoding = parseEncoding0(
|
|
1678
|
-
parent.charset, file, haveSupplement, parent.strings
|
|
1679
|
-
)
|
|
1680
|
-
elif fmt == 1:
|
|
1681
|
-
encoding = parseEncoding1(
|
|
1682
|
-
parent.charset, file, haveSupplement, parent.strings
|
|
1683
|
-
)
|
|
1684
|
-
return encoding
|
|
1681
|
+
raise ValueError(f"Unknown Encoding format: {fmt}")
|
|
1682
|
+
|
|
1683
|
+
if haveSupplement:
|
|
1684
|
+
parseEncodingSupplement(file, encoding, parent.strings)
|
|
1685
|
+
|
|
1686
|
+
return encoding
|
|
1685
1687
|
|
|
1686
1688
|
def write(self, parent, value):
|
|
1687
1689
|
if value == "StandardEncoding":
|
|
@@ -1719,27 +1721,60 @@ class EncodingConverter(SimpleConverter):
|
|
|
1719
1721
|
return encoding
|
|
1720
1722
|
|
|
1721
1723
|
|
|
1722
|
-
def
|
|
1724
|
+
def readSID(file):
|
|
1725
|
+
"""Read a String ID (SID) — 2-byte unsigned integer."""
|
|
1726
|
+
data = file.read(2)
|
|
1727
|
+
if len(data) != 2:
|
|
1728
|
+
raise EOFError("Unexpected end of file while reading SID")
|
|
1729
|
+
return struct.unpack(">H", data)[0] # big-endian uint16
|
|
1730
|
+
|
|
1731
|
+
|
|
1732
|
+
def parseEncodingSupplement(file, encoding, strings):
|
|
1733
|
+
"""
|
|
1734
|
+
Parse the CFF Encoding supplement data:
|
|
1735
|
+
- nSups: number of supplementary mappings
|
|
1736
|
+
- each mapping: (code, SID) pair
|
|
1737
|
+
and apply them to the `encoding` list in place.
|
|
1738
|
+
"""
|
|
1739
|
+
nSups = readCard8(file)
|
|
1740
|
+
for _ in range(nSups):
|
|
1741
|
+
code = readCard8(file)
|
|
1742
|
+
sid = readSID(file)
|
|
1743
|
+
name = strings[sid]
|
|
1744
|
+
encoding[code] = name
|
|
1745
|
+
|
|
1746
|
+
|
|
1747
|
+
def parseEncoding0(charset, file):
|
|
1748
|
+
"""
|
|
1749
|
+
Format 0: simple list of codes.
|
|
1750
|
+
After reading the base table, optionally parse the supplement.
|
|
1751
|
+
"""
|
|
1723
1752
|
nCodes = readCard8(file)
|
|
1724
1753
|
encoding = [".notdef"] * 256
|
|
1725
1754
|
for glyphID in range(1, nCodes + 1):
|
|
1726
1755
|
code = readCard8(file)
|
|
1727
1756
|
if code != 0:
|
|
1728
1757
|
encoding[code] = charset[glyphID]
|
|
1758
|
+
|
|
1729
1759
|
return encoding
|
|
1730
1760
|
|
|
1731
1761
|
|
|
1732
|
-
def parseEncoding1(charset, file
|
|
1762
|
+
def parseEncoding1(charset, file):
|
|
1763
|
+
"""
|
|
1764
|
+
Format 1: range-based encoding.
|
|
1765
|
+
After reading the base ranges, optionally parse the supplement.
|
|
1766
|
+
"""
|
|
1733
1767
|
nRanges = readCard8(file)
|
|
1734
1768
|
encoding = [".notdef"] * 256
|
|
1735
1769
|
glyphID = 1
|
|
1736
|
-
for
|
|
1770
|
+
for _ in range(nRanges):
|
|
1737
1771
|
code = readCard8(file)
|
|
1738
1772
|
nLeft = readCard8(file)
|
|
1739
|
-
for
|
|
1773
|
+
for _ in range(nLeft + 1):
|
|
1740
1774
|
encoding[code] = charset[glyphID]
|
|
1741
|
-
code
|
|
1742
|
-
|
|
1775
|
+
code += 1
|
|
1776
|
+
glyphID += 1
|
|
1777
|
+
|
|
1743
1778
|
return encoding
|
|
1744
1779
|
|
|
1745
1780
|
|