fontisan 0.2.9 → 0.2.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop_todo.yml +25 -15
- data/lib/fontisan/collection/table_analyzer.rb +88 -3
- data/lib/fontisan/converters/cff_table_builder.rb +198 -0
- data/lib/fontisan/converters/glyf_table_builder.rb +63 -0
- data/lib/fontisan/converters/outline_converter.rb +33 -370
- data/lib/fontisan/converters/outline_extraction.rb +93 -0
- data/lib/fontisan/converters/outline_optimizer.rb +89 -0
- data/lib/fontisan/glyph_accessor.rb +29 -1
- data/lib/fontisan/sfnt_font.rb +9 -18
- data/lib/fontisan/variation/cache.rb +1 -0
- data/lib/fontisan/version.rb +1 -1
- data/lib/fontisan/woff2_font.rb +3 -3
- metadata +5 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: d5afdba14f32dbf6e482d0f17c64afdcc5dbe62b0567f59e42287234757ec1c3
|
|
4
|
+
data.tar.gz: 6bdf19ae13494e1599dbb523bab6e6b7179ac288ac4d65d26b3cefb1e749878b
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: e4e5fe71fe99f4edef0432684799419d4bcd634bffd879b804955a3777728a50828797210766e8cd3c2141cbbd7b0a23adb8feef0f8046557548be3d77ff5bbb
|
|
7
|
+
data.tar.gz: 287c2f971e1c439de9d1465dbd244247ba65ad57fe2babecb873632429b9f04a935b569e9a1660ab47201e0b4604dac6947642b14b5dc28f353abc97a04c8661
|
data/.rubocop_todo.yml
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# This configuration was generated by
|
|
2
2
|
# `rubocop --auto-gen-config`
|
|
3
|
-
# on 2026-01-17
|
|
3
|
+
# on 2026-01-17 10:49:34 UTC using RuboCop version 1.82.1.
|
|
4
4
|
# The point is for the user to remove these configuration records
|
|
5
5
|
# one by one as the offenses are removed from the code base.
|
|
6
6
|
# Note that changes in the inspected code, or installation of new
|
|
@@ -11,7 +11,7 @@ Gemspec/RequiredRubyVersion:
|
|
|
11
11
|
Exclude:
|
|
12
12
|
- 'fontisan.gemspec'
|
|
13
13
|
|
|
14
|
-
# Offense count:
|
|
14
|
+
# Offense count: 1261
|
|
15
15
|
# This cop supports safe autocorrection (--autocorrect).
|
|
16
16
|
# Configuration parameters: Max, AllowHeredoc, AllowURI, AllowQualifiedName, URISchemes, AllowRBSInlineAnnotation, AllowCopDirectives, AllowedPatterns, SplitStrings.
|
|
17
17
|
# URISchemes: http, https
|
|
@@ -58,6 +58,11 @@ Lint/FloatComparison:
|
|
|
58
58
|
Exclude:
|
|
59
59
|
- 'lib/fontisan/tables/post.rb'
|
|
60
60
|
|
|
61
|
+
# Offense count: 2
|
|
62
|
+
Lint/HashCompareByIdentity:
|
|
63
|
+
Exclude:
|
|
64
|
+
- 'lib/fontisan/collection/table_analyzer.rb'
|
|
65
|
+
|
|
61
66
|
# Offense count: 3
|
|
62
67
|
Lint/IneffectiveAccessModifier:
|
|
63
68
|
Exclude:
|
|
@@ -90,7 +95,13 @@ Lint/UnusedMethodArgument:
|
|
|
90
95
|
- 'lib/fontisan/woff2/glyf_transformer.rb'
|
|
91
96
|
- 'lib/fontisan/woff_font.rb'
|
|
92
97
|
|
|
93
|
-
# Offense count:
|
|
98
|
+
# Offense count: 2
|
|
99
|
+
# This cop supports safe autocorrection (--autocorrect).
|
|
100
|
+
Lint/UselessAssignment:
|
|
101
|
+
Exclude:
|
|
102
|
+
- 'lib/fontisan/converters/cff_table_builder.rb'
|
|
103
|
+
|
|
104
|
+
# Offense count: 480
|
|
94
105
|
# Configuration parameters: AllowedMethods, AllowedPatterns, CountRepeatedAttributes, Max.
|
|
95
106
|
Metrics/AbcSize:
|
|
96
107
|
Enabled: false
|
|
@@ -106,35 +117,27 @@ Metrics/BlockLength:
|
|
|
106
117
|
Metrics/BlockNesting:
|
|
107
118
|
Max: 5
|
|
108
119
|
|
|
109
|
-
# Offense count:
|
|
120
|
+
# Offense count: 239
|
|
110
121
|
# Configuration parameters: AllowedMethods, AllowedPatterns, Max.
|
|
111
122
|
Metrics/CyclomaticComplexity:
|
|
112
123
|
Enabled: false
|
|
113
124
|
|
|
114
|
-
# Offense count:
|
|
125
|
+
# Offense count: 788
|
|
115
126
|
# Configuration parameters: CountComments, CountAsOne, AllowedMethods, AllowedPatterns.
|
|
116
127
|
Metrics/MethodLength:
|
|
117
128
|
Max: 135
|
|
118
129
|
|
|
119
|
-
# Offense count:
|
|
130
|
+
# Offense count: 21
|
|
120
131
|
# Configuration parameters: CountKeywordArgs.
|
|
121
132
|
Metrics/ParameterLists:
|
|
122
133
|
Max: 39
|
|
123
134
|
MaxOptionalParameters: 4
|
|
124
135
|
|
|
125
|
-
# Offense count:
|
|
136
|
+
# Offense count: 176
|
|
126
137
|
# Configuration parameters: AllowedMethods, AllowedPatterns, Max.
|
|
127
138
|
Metrics/PerceivedComplexity:
|
|
128
139
|
Enabled: false
|
|
129
140
|
|
|
130
|
-
# Offense count: 1
|
|
131
|
-
# This cop supports unsafe autocorrection (--autocorrect-all).
|
|
132
|
-
# Configuration parameters: EnforcedStyleForLeadingUnderscores.
|
|
133
|
-
# SupportedStylesForLeadingUnderscores: disallowed, required, optional
|
|
134
|
-
Naming/MemoizedInstanceVariableName:
|
|
135
|
-
Exclude:
|
|
136
|
-
- 'lib/fontisan/sfnt_font.rb'
|
|
137
|
-
|
|
138
141
|
# Offense count: 17
|
|
139
142
|
# Configuration parameters: MinNameLength, AllowNamesEndingInNumbers, AllowedNames, ForbiddenNames.
|
|
140
143
|
# AllowedNames: as, at, by, cc, db, id, if, in, io, ip, of, on, os, pp, to
|
|
@@ -378,3 +381,10 @@ Style/HashLikeCase:
|
|
|
378
381
|
- 'lib/fontisan/commands/convert_command.rb'
|
|
379
382
|
- 'lib/fontisan/commands/unpack_command.rb'
|
|
380
383
|
- 'lib/fontisan/models/validation_report.rb'
|
|
384
|
+
|
|
385
|
+
# Offense count: 1
|
|
386
|
+
# This cop supports safe autocorrection (--autocorrect).
|
|
387
|
+
# Configuration parameters: AllowNamedUnderscoreVariables.
|
|
388
|
+
Style/TrailingUnderscoreVariable:
|
|
389
|
+
Exclude:
|
|
390
|
+
- 'lib/fontisan/converters/cff_table_builder.rb'
|
|
@@ -23,8 +23,10 @@ module Fontisan
|
|
|
23
23
|
# Initialize analyzer with fonts
|
|
24
24
|
#
|
|
25
25
|
# @param fonts [Array<TrueTypeFont, OpenTypeFont>] Fonts to analyze
|
|
26
|
+
# @param parallel [Boolean] Use parallel processing for large collections (default: false)
|
|
27
|
+
# @param thread_count [Integer] Number of threads for parallel processing (default: 4)
|
|
26
28
|
# @raise [ArgumentError] if fonts array is empty or contains invalid fonts
|
|
27
|
-
def initialize(fonts)
|
|
29
|
+
def initialize(fonts, parallel: false, thread_count: 4)
|
|
28
30
|
if fonts.nil? || fonts.empty?
|
|
29
31
|
raise ArgumentError,
|
|
30
32
|
"fonts cannot be nil or empty"
|
|
@@ -32,7 +34,10 @@ module Fontisan
|
|
|
32
34
|
raise ArgumentError, "fonts must be an array" unless fonts.is_a?(Array)
|
|
33
35
|
|
|
34
36
|
@fonts = fonts
|
|
37
|
+
@parallel = parallel
|
|
38
|
+
@thread_count = thread_count
|
|
35
39
|
@report = nil
|
|
40
|
+
@checksum_cache = {}.compare_by_identity
|
|
36
41
|
end
|
|
37
42
|
|
|
38
43
|
# Analyze tables across all fonts
|
|
@@ -103,6 +108,17 @@ module Fontisan
|
|
|
103
108
|
#
|
|
104
109
|
# @return [void]
|
|
105
110
|
def collect_table_checksums
|
|
111
|
+
if @parallel && @fonts.size > 2
|
|
112
|
+
collect_table_checksums_parallel
|
|
113
|
+
else
|
|
114
|
+
collect_table_checksums_sequential
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
# Collect checksums sequentially (original implementation)
|
|
119
|
+
#
|
|
120
|
+
# @return [void]
|
|
121
|
+
def collect_table_checksums_sequential
|
|
106
122
|
@fonts.each_with_index do |font, font_index|
|
|
107
123
|
font.table_names.each do |tag|
|
|
108
124
|
# Get raw table data
|
|
@@ -120,6 +136,71 @@ module Fontisan
|
|
|
120
136
|
end
|
|
121
137
|
end
|
|
122
138
|
|
|
139
|
+
# Collect checksums in parallel using thread pool (lock-free)
|
|
140
|
+
#
|
|
141
|
+
# Uses thread-local storage to avoid mutexes. Each thread processes
|
|
142
|
+
# its assigned fonts with isolated state, then results are aggregated
|
|
143
|
+
# in a single thread after all parallel work completes.
|
|
144
|
+
#
|
|
145
|
+
# @return [void]
|
|
146
|
+
def collect_table_checksums_parallel
|
|
147
|
+
# Partition fonts among threads
|
|
148
|
+
partition_size = (@fonts.size.to_f / @thread_count).ceil
|
|
149
|
+
partitions = @fonts.each_slice(partition_size).to_a
|
|
150
|
+
|
|
151
|
+
# Track starting index for each partition
|
|
152
|
+
partition_start_indices = []
|
|
153
|
+
current_index = 0
|
|
154
|
+
partitions.each do |partition|
|
|
155
|
+
partition_start_indices << current_index
|
|
156
|
+
current_index += partition.size
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
# Process each partition in a separate thread with isolated state
|
|
160
|
+
# No mutexes needed - each thread has its own local_checksum_cache and local_results
|
|
161
|
+
threads = partitions.each_with_index.map do |partition, partition_index|
|
|
162
|
+
start_index = partition_start_indices[partition_index]
|
|
163
|
+
|
|
164
|
+
Thread.new do
|
|
165
|
+
local_checksum_cache = {}.compare_by_identity
|
|
166
|
+
local_results = {}
|
|
167
|
+
|
|
168
|
+
partition.each_with_index do |font, relative_index|
|
|
169
|
+
font_index = start_index + relative_index
|
|
170
|
+
|
|
171
|
+
font.table_names.each do |tag|
|
|
172
|
+
table_data = font.table_data[tag]
|
|
173
|
+
next unless table_data
|
|
174
|
+
|
|
175
|
+
# Thread-local cache - no locks needed
|
|
176
|
+
checksum = local_checksum_cache[table_data] ||= Digest::SHA256.hexdigest(table_data)
|
|
177
|
+
|
|
178
|
+
local_results[tag] ||= {}
|
|
179
|
+
local_results[tag][checksum] ||= []
|
|
180
|
+
local_results[tag][checksum] << font_index
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
# Return thread-local results for aggregation
|
|
185
|
+
local_results
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
# Wait for all threads to complete and aggregate results
|
|
190
|
+
# Single-threaded aggregation - no locks needed
|
|
191
|
+
threads.each do |thread|
|
|
192
|
+
local_results = thread.value
|
|
193
|
+
|
|
194
|
+
local_results.each do |tag, checksums|
|
|
195
|
+
@report[:table_checksums][tag] ||= {}
|
|
196
|
+
checksums.each do |checksum, font_indices|
|
|
197
|
+
@report[:table_checksums][tag][checksum] ||= []
|
|
198
|
+
@report[:table_checksums][tag][checksum].concat(font_indices)
|
|
199
|
+
end
|
|
200
|
+
end
|
|
201
|
+
end
|
|
202
|
+
end
|
|
203
|
+
|
|
123
204
|
# Identify which tables are shared across fonts
|
|
124
205
|
#
|
|
125
206
|
# A table is considered shared if 2 or more fonts have identical content
|
|
@@ -192,12 +273,16 @@ module Fontisan
|
|
|
192
273
|
end
|
|
193
274
|
end
|
|
194
275
|
|
|
195
|
-
# Calculate SHA256 checksum for table data
|
|
276
|
+
# Calculate SHA256 checksum for table data with caching
|
|
277
|
+
#
|
|
278
|
+
# Caches checksums by data object identity to avoid recomputing
|
|
279
|
+
# SHA256 for identical table data across multiple fonts.
|
|
280
|
+
# In parallel mode, each thread has its own cache (no locks needed).
|
|
196
281
|
#
|
|
197
282
|
# @param data [String] Binary table data
|
|
198
283
|
# @return [String] Hexadecimal checksum
|
|
199
284
|
def calculate_checksum(data)
|
|
200
|
-
Digest::SHA256.hexdigest(data)
|
|
285
|
+
@checksum_cache[data] ||= Digest::SHA256.hexdigest(data)
|
|
201
286
|
end
|
|
202
287
|
end
|
|
203
288
|
end
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../tables/cff/index_builder"
|
|
4
|
+
require_relative "../tables/cff/dict_builder"
|
|
5
|
+
|
|
6
|
+
module Fontisan
|
|
7
|
+
module Converters
|
|
8
|
+
# Builds CFF table data from glyph outlines
|
|
9
|
+
#
|
|
10
|
+
# This module handles the construction of complete CFF tables including
|
|
11
|
+
# all INDEX structures (name, Top DICT, String, GlobalSubr, CharStrings, LocalSubr)
|
|
12
|
+
# and the Private DICT.
|
|
13
|
+
#
|
|
14
|
+
# The CFF table structure is:
|
|
15
|
+
# - Header (4 bytes)
|
|
16
|
+
# - Name INDEX
|
|
17
|
+
# - Top DICT INDEX
|
|
18
|
+
# - String INDEX
|
|
19
|
+
# - Global Subr INDEX
|
|
20
|
+
# - CharStrings INDEX
|
|
21
|
+
# - Private DICT (with offset in Top DICT)
|
|
22
|
+
# - Local Subr INDEX (with offset in Private DICT)
|
|
23
|
+
module CffTableBuilder
|
|
24
|
+
# Build complete CFF table from pre-built charstrings
|
|
25
|
+
#
|
|
26
|
+
# @param charstrings [Array<String>] Pre-built CharString data (already optimized if needed)
|
|
27
|
+
# @param local_subrs [Array<String>] Local subroutines from optimization
|
|
28
|
+
# @param font [TrueTypeFont] Source font (for metadata)
|
|
29
|
+
# @return [String] Complete CFF table binary data
|
|
30
|
+
def build_cff_table(charstrings, local_subrs, font)
|
|
31
|
+
# If we have local subrs from optimization, use them
|
|
32
|
+
local_subrs = [] unless local_subrs.is_a?(Array)
|
|
33
|
+
|
|
34
|
+
# Build font metadata
|
|
35
|
+
font_name = extract_font_name(font)
|
|
36
|
+
|
|
37
|
+
# Build all INDEXes
|
|
38
|
+
header_size = 4
|
|
39
|
+
name_index_data = Tables::Cff::IndexBuilder.build([font_name])
|
|
40
|
+
string_index_data = Tables::Cff::IndexBuilder.build([]) # Empty strings
|
|
41
|
+
global_subr_index_data = Tables::Cff::IndexBuilder.build([]) # Empty global subrs
|
|
42
|
+
charstrings_index_data = Tables::Cff::IndexBuilder.build(charstrings)
|
|
43
|
+
local_subrs_index_data = Tables::Cff::IndexBuilder.build(local_subrs)
|
|
44
|
+
|
|
45
|
+
# Build Private DICT with Subrs offset if we have local subroutines
|
|
46
|
+
private_dict_data, private_dict_size = build_private_dict(local_subrs)
|
|
47
|
+
|
|
48
|
+
# Calculate offsets with iterative refinement
|
|
49
|
+
top_dict_index_data, =
|
|
50
|
+
calculate_cff_offsets(
|
|
51
|
+
header_size,
|
|
52
|
+
name_index_data,
|
|
53
|
+
string_index_data,
|
|
54
|
+
global_subr_index_data,
|
|
55
|
+
charstrings_index_data,
|
|
56
|
+
private_dict_size,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Build CFF Header
|
|
60
|
+
header = build_cff_header
|
|
61
|
+
|
|
62
|
+
# Assemble complete CFF table
|
|
63
|
+
header +
|
|
64
|
+
name_index_data +
|
|
65
|
+
top_dict_index_data +
|
|
66
|
+
string_index_data +
|
|
67
|
+
global_subr_index_data +
|
|
68
|
+
charstrings_index_data +
|
|
69
|
+
private_dict_data +
|
|
70
|
+
local_subrs_index_data
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
private
|
|
74
|
+
|
|
75
|
+
# Build Private DICT with optional Subrs offset
|
|
76
|
+
#
|
|
77
|
+
# @param local_subrs [Array<String>] Local subroutines
|
|
78
|
+
# @return [Array<String, Integer>] [Private DICT data, size]
|
|
79
|
+
def build_private_dict(local_subrs)
|
|
80
|
+
private_dict_hash = {
|
|
81
|
+
default_width_x: 1000,
|
|
82
|
+
nominal_width_x: 0,
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
# If we have local subroutines, add Subrs offset
|
|
86
|
+
# Subrs offset is relative to Private DICT start
|
|
87
|
+
if local_subrs.any?
|
|
88
|
+
# Add a placeholder Subrs offset first to get accurate size
|
|
89
|
+
private_dict_hash[:subrs] = 0
|
|
90
|
+
|
|
91
|
+
# Calculate size of Private DICT with Subrs entry
|
|
92
|
+
temp_private_dict_data = Tables::Cff::DictBuilder.build(private_dict_hash)
|
|
93
|
+
subrs_offset = temp_private_dict_data.bytesize
|
|
94
|
+
|
|
95
|
+
# Update with actual Subrs offset
|
|
96
|
+
private_dict_hash[:subrs] = subrs_offset
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Build final Private DICT
|
|
100
|
+
private_dict_data = Tables::Cff::DictBuilder.build(private_dict_hash)
|
|
101
|
+
[private_dict_data, private_dict_data.bytesize]
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
# Calculate CFF table offsets with iterative refinement
|
|
105
|
+
#
|
|
106
|
+
# @param header_size [Integer] CFF header size
|
|
107
|
+
# @param name_index_data [String] Name INDEX data
|
|
108
|
+
# @param string_index_data [String] String INDEX data
|
|
109
|
+
# @param global_subr_index_data [String] Global Subr INDEX data
|
|
110
|
+
# @param charstrings_index_data [String] CharStrings INDEX data
|
|
111
|
+
# @param private_dict_size [Integer] Private DICT size
|
|
112
|
+
# @return [Array<String, Integer, Integer>] [Top DICT INDEX, CharStrings offset, Private DICT offset]
|
|
113
|
+
def calculate_cff_offsets(
|
|
114
|
+
header_size,
|
|
115
|
+
name_index_data,
|
|
116
|
+
string_index_data,
|
|
117
|
+
global_subr_index_data,
|
|
118
|
+
charstrings_index_data,
|
|
119
|
+
private_dict_size
|
|
120
|
+
)
|
|
121
|
+
# Initial pass
|
|
122
|
+
top_dict_index_start = header_size + name_index_data.bytesize
|
|
123
|
+
string_index_start = top_dict_index_start + 100 # Approximate
|
|
124
|
+
global_subr_index_start = string_index_start + string_index_data.bytesize
|
|
125
|
+
charstrings_offset = global_subr_index_start + global_subr_index_data.bytesize
|
|
126
|
+
|
|
127
|
+
# Build Top DICT
|
|
128
|
+
top_dict_hash = {
|
|
129
|
+
charset: 0,
|
|
130
|
+
encoding: 0,
|
|
131
|
+
charstrings: charstrings_offset,
|
|
132
|
+
}
|
|
133
|
+
top_dict_data = Tables::Cff::DictBuilder.build(top_dict_hash)
|
|
134
|
+
top_dict_index_data = Tables::Cff::IndexBuilder.build([top_dict_data])
|
|
135
|
+
|
|
136
|
+
# Recalculate with actual Top DICT size
|
|
137
|
+
string_index_start = top_dict_index_start + top_dict_index_data.bytesize
|
|
138
|
+
global_subr_index_start = string_index_start + string_index_data.bytesize
|
|
139
|
+
charstrings_offset = global_subr_index_start + global_subr_index_data.bytesize
|
|
140
|
+
private_dict_offset = charstrings_offset + charstrings_index_data.bytesize
|
|
141
|
+
|
|
142
|
+
# Update Top DICT with Private DICT info
|
|
143
|
+
top_dict_hash = {
|
|
144
|
+
charset: 0,
|
|
145
|
+
encoding: 0,
|
|
146
|
+
charstrings: charstrings_offset,
|
|
147
|
+
private: [private_dict_size, private_dict_offset],
|
|
148
|
+
}
|
|
149
|
+
top_dict_data = Tables::Cff::DictBuilder.build(top_dict_hash)
|
|
150
|
+
top_dict_index_data = Tables::Cff::IndexBuilder.build([top_dict_data])
|
|
151
|
+
|
|
152
|
+
# Final recalculation
|
|
153
|
+
string_index_start = top_dict_index_start + top_dict_index_data.bytesize
|
|
154
|
+
global_subr_index_start = string_index_start + string_index_data.bytesize
|
|
155
|
+
charstrings_offset = global_subr_index_start + global_subr_index_data.bytesize
|
|
156
|
+
private_dict_offset = charstrings_offset + charstrings_index_data.bytesize
|
|
157
|
+
|
|
158
|
+
# Final Top DICT
|
|
159
|
+
top_dict_hash = {
|
|
160
|
+
charset: 0,
|
|
161
|
+
encoding: 0,
|
|
162
|
+
charstrings: charstrings_offset,
|
|
163
|
+
private: [private_dict_size, private_dict_offset],
|
|
164
|
+
}
|
|
165
|
+
top_dict_data = Tables::Cff::DictBuilder.build(top_dict_hash)
|
|
166
|
+
top_dict_index_data = Tables::Cff::IndexBuilder.build([top_dict_data])
|
|
167
|
+
|
|
168
|
+
[top_dict_index_data, charstrings_offset, private_dict_offset]
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
# Build CFF Header
|
|
172
|
+
#
|
|
173
|
+
# @return [String] 4-byte CFF header
|
|
174
|
+
def build_cff_header
|
|
175
|
+
[
|
|
176
|
+
1, # major version
|
|
177
|
+
0, # minor version
|
|
178
|
+
4, # header size
|
|
179
|
+
4, # offSize (will be in INDEX)
|
|
180
|
+
].pack("C4")
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
# Extract font name from name table
|
|
184
|
+
#
|
|
185
|
+
# @param font [TrueTypeFont, OpenTypeFont] Font
|
|
186
|
+
# @return [String] Font name
|
|
187
|
+
def extract_font_name(font)
|
|
188
|
+
name_table = font.table("name")
|
|
189
|
+
if name_table
|
|
190
|
+
font_name = name_table.english_name(Tables::Name::FAMILY)
|
|
191
|
+
return font_name.dup.force_encoding("ASCII-8BIT") if font_name
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
"UnnamedFont"
|
|
195
|
+
end
|
|
196
|
+
end
|
|
197
|
+
end
|
|
198
|
+
end
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../tables/glyf/glyph_builder"
|
|
4
|
+
|
|
5
|
+
module Fontisan
|
|
6
|
+
module Converters
|
|
7
|
+
# Builds glyf and loca tables from glyph outlines
|
|
8
|
+
#
|
|
9
|
+
# This module handles the construction of TrueType glyph tables:
|
|
10
|
+
# - glyf table: Contains actual glyph outline data
|
|
11
|
+
# - loca table: Contains offsets to glyph data in glyf table
|
|
12
|
+
#
|
|
13
|
+
# The loca table format depends on the maximum offset:
|
|
14
|
+
# - Short format (offsets/2) if max offset <= 0x1FFFE
|
|
15
|
+
# - Long format (raw offsets) if max offset > 0x1FFFE
|
|
16
|
+
module GlyfTableBuilder
|
|
17
|
+
# Build glyf and loca tables from outlines
|
|
18
|
+
#
|
|
19
|
+
# @param outlines [Array<Outline>] Glyph outlines
|
|
20
|
+
# @return [Array<String, String, Integer>] [glyf_data, loca_data, loca_format]
|
|
21
|
+
def build_glyf_loca_tables(outlines)
|
|
22
|
+
glyf_data = "".b
|
|
23
|
+
offsets = []
|
|
24
|
+
|
|
25
|
+
# Build each glyph
|
|
26
|
+
outlines.each do |outline|
|
|
27
|
+
offsets << glyf_data.bytesize
|
|
28
|
+
|
|
29
|
+
if outline.empty?
|
|
30
|
+
# Empty glyph - no data
|
|
31
|
+
next
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Build glyph data using GlyphBuilder class method
|
|
35
|
+
glyph_data = Fontisan::Tables::GlyphBuilder.build_simple_glyph(outline)
|
|
36
|
+
glyf_data << glyph_data
|
|
37
|
+
|
|
38
|
+
# Add padding to 4-byte boundary
|
|
39
|
+
padding = (4 - (glyf_data.bytesize % 4)) % 4
|
|
40
|
+
glyf_data << ("\x00" * padding) if padding.positive?
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Add final offset
|
|
44
|
+
offsets << glyf_data.bytesize
|
|
45
|
+
|
|
46
|
+
# Build loca table
|
|
47
|
+
# Determine format based on max offset
|
|
48
|
+
max_offset = offsets.max
|
|
49
|
+
if max_offset <= 0x1FFFE
|
|
50
|
+
# Short format (offsets / 2)
|
|
51
|
+
loca_format = 0
|
|
52
|
+
loca_data = offsets.map { |off| off / 2 }.pack("n*")
|
|
53
|
+
else
|
|
54
|
+
# Long format
|
|
55
|
+
loca_format = 1
|
|
56
|
+
loca_data = offsets.pack("N*")
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
[glyf_data, loca_data, loca_format]
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
end
|
|
@@ -4,14 +4,10 @@ require_relative "conversion_strategy"
|
|
|
4
4
|
require_relative "../outline_extractor"
|
|
5
5
|
require_relative "../models/outline"
|
|
6
6
|
require_relative "../tables/cff/charstring_builder"
|
|
7
|
-
require_relative "
|
|
8
|
-
require_relative "
|
|
9
|
-
require_relative "
|
|
10
|
-
require_relative "
|
|
11
|
-
require_relative "../optimizers/pattern_analyzer"
|
|
12
|
-
require_relative "../optimizers/subroutine_optimizer"
|
|
13
|
-
require_relative "../optimizers/subroutine_builder"
|
|
14
|
-
require_relative "../optimizers/charstring_rewriter"
|
|
7
|
+
require_relative "outline_extraction"
|
|
8
|
+
require_relative "cff_table_builder"
|
|
9
|
+
require_relative "glyf_table_builder"
|
|
10
|
+
require_relative "outline_optimizer"
|
|
15
11
|
require_relative "../hints/truetype_hint_extractor"
|
|
16
12
|
require_relative "../hints/postscript_hint_extractor"
|
|
17
13
|
require_relative "../hints/hint_converter"
|
|
@@ -67,6 +63,10 @@ module Fontisan
|
|
|
67
63
|
# otf_font = converter.convert(ttf_font, target_format: :otf, preserve_hints: true)
|
|
68
64
|
class OutlineConverter
|
|
69
65
|
include ConversionStrategy
|
|
66
|
+
include OutlineExtraction
|
|
67
|
+
include CffTableBuilder
|
|
68
|
+
include GlyfTableBuilder
|
|
69
|
+
include OutlineOptimizer
|
|
70
70
|
|
|
71
71
|
# Supported outline formats
|
|
72
72
|
SUPPORTED_FORMATS = %i[ttf otf cff2].freeze
|
|
@@ -138,8 +138,30 @@ module Fontisan
|
|
|
138
138
|
# Extract hints if preservation is enabled
|
|
139
139
|
hints_per_glyph = @preserve_hints ? extract_ttf_hints(font) : {}
|
|
140
140
|
|
|
141
|
-
# Build
|
|
142
|
-
|
|
141
|
+
# Build CharStrings from outlines
|
|
142
|
+
charstrings = outlines.map do |outline|
|
|
143
|
+
builder = Tables::Cff::CharStringBuilder.new
|
|
144
|
+
if outline.empty?
|
|
145
|
+
builder.build_empty
|
|
146
|
+
else
|
|
147
|
+
builder.build(outline)
|
|
148
|
+
end
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
# Apply subroutine optimization if enabled
|
|
152
|
+
local_subrs = []
|
|
153
|
+
if @optimize_cff
|
|
154
|
+
begin
|
|
155
|
+
charstrings, local_subrs = optimize_charstrings(charstrings)
|
|
156
|
+
rescue StandardError => e
|
|
157
|
+
# If optimization fails, fall back to unoptimized CharStrings
|
|
158
|
+
warn "CFF optimization failed: #{e.message}, using unoptimized CharStrings"
|
|
159
|
+
local_subrs = []
|
|
160
|
+
end
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
# Build CFF table from charstrings and local subrs
|
|
164
|
+
cff_data = build_cff_table(charstrings, local_subrs, font)
|
|
143
165
|
|
|
144
166
|
# Copy all tables except glyf/loca
|
|
145
167
|
tables = copy_tables(font, %w[glyf loca])
|
|
@@ -184,8 +206,7 @@ module Fontisan
|
|
|
184
206
|
hints_per_glyph = @preserve_hints ? extract_cff_hints(font) : {}
|
|
185
207
|
|
|
186
208
|
# Build glyf and loca tables
|
|
187
|
-
glyf_data, loca_data, loca_format = build_glyf_loca_tables(outlines
|
|
188
|
-
hints_per_glyph)
|
|
209
|
+
glyf_data, loca_data, loca_format = build_glyf_loca_tables(outlines)
|
|
189
210
|
|
|
190
211
|
# Copy all tables except CFF
|
|
191
212
|
tables = copy_tables(font, ["CFF ", "CFF2"])
|
|
@@ -279,285 +300,6 @@ module Fontisan
|
|
|
279
300
|
true
|
|
280
301
|
end
|
|
281
302
|
|
|
282
|
-
# Extract outlines from TrueType font
|
|
283
|
-
#
|
|
284
|
-
# @param font [TrueTypeFont] Source font
|
|
285
|
-
# @return [Array<Outline>] Array of outline objects
|
|
286
|
-
def extract_ttf_outlines(font)
|
|
287
|
-
# Get required tables
|
|
288
|
-
head = font.table("head")
|
|
289
|
-
maxp = font.table("maxp")
|
|
290
|
-
loca = font.table("loca")
|
|
291
|
-
glyf = font.table("glyf")
|
|
292
|
-
|
|
293
|
-
# Parse loca with context
|
|
294
|
-
loca.parse_with_context(head.index_to_loc_format, maxp.num_glyphs)
|
|
295
|
-
|
|
296
|
-
# Create resolver for compound glyphs
|
|
297
|
-
resolver = Tables::CompoundGlyphResolver.new(glyf, loca, head)
|
|
298
|
-
|
|
299
|
-
# Extract all glyphs
|
|
300
|
-
outlines = []
|
|
301
|
-
maxp.num_glyphs.times do |glyph_id|
|
|
302
|
-
glyph = glyf.glyph_for(glyph_id, loca, head)
|
|
303
|
-
|
|
304
|
-
outlines << if glyph.nil? || glyph.empty?
|
|
305
|
-
# Empty glyph - create empty outline
|
|
306
|
-
Models::Outline.new(
|
|
307
|
-
glyph_id: glyph_id,
|
|
308
|
-
commands: [],
|
|
309
|
-
bbox: { x_min: 0, y_min: 0, x_max: 0, y_max: 0 },
|
|
310
|
-
)
|
|
311
|
-
elsif glyph.simple?
|
|
312
|
-
# Convert simple glyph to outline
|
|
313
|
-
Models::Outline.from_truetype(glyph, glyph_id)
|
|
314
|
-
else
|
|
315
|
-
# Compound glyph - resolve to simple outline
|
|
316
|
-
resolver.resolve(glyph)
|
|
317
|
-
end
|
|
318
|
-
end
|
|
319
|
-
|
|
320
|
-
outlines
|
|
321
|
-
end
|
|
322
|
-
|
|
323
|
-
# Extract outlines from CFF font
|
|
324
|
-
#
|
|
325
|
-
# @param font [OpenTypeFont] Source font
|
|
326
|
-
# @return [Array<Outline>] Array of outline objects
|
|
327
|
-
def extract_cff_outlines(font)
|
|
328
|
-
# Get CFF table
|
|
329
|
-
cff = font.table("CFF ")
|
|
330
|
-
raise Fontisan::Error, "CFF table not found" unless cff
|
|
331
|
-
|
|
332
|
-
# Get number of glyphs
|
|
333
|
-
num_glyphs = cff.glyph_count
|
|
334
|
-
|
|
335
|
-
# Extract all glyphs
|
|
336
|
-
outlines = []
|
|
337
|
-
num_glyphs.times do |glyph_id|
|
|
338
|
-
charstring = cff.charstring_for_glyph(glyph_id)
|
|
339
|
-
|
|
340
|
-
outlines << if charstring.nil? || charstring.path.empty?
|
|
341
|
-
# Empty glyph
|
|
342
|
-
Models::Outline.new(
|
|
343
|
-
glyph_id: glyph_id,
|
|
344
|
-
commands: [],
|
|
345
|
-
bbox: { x_min: 0, y_min: 0, x_max: 0, y_max: 0 },
|
|
346
|
-
)
|
|
347
|
-
else
|
|
348
|
-
# Convert CharString to outline
|
|
349
|
-
Models::Outline.from_cff(charstring, glyph_id)
|
|
350
|
-
end
|
|
351
|
-
end
|
|
352
|
-
|
|
353
|
-
outlines
|
|
354
|
-
end
|
|
355
|
-
|
|
356
|
-
# Build CFF table from outlines
|
|
357
|
-
#
|
|
358
|
-
# @param outlines [Array<Outline>] Glyph outlines
|
|
359
|
-
# @param font [TrueTypeFont] Source font (for metadata)
|
|
360
|
-
# @return [String] CFF table binary data
|
|
361
|
-
def build_cff_table(outlines, font, _hints_per_glyph)
|
|
362
|
-
# Build CharStrings INDEX from outlines
|
|
363
|
-
begin
|
|
364
|
-
charstrings = outlines.map do |outline|
|
|
365
|
-
builder = Tables::Cff::CharStringBuilder.new
|
|
366
|
-
if outline.empty?
|
|
367
|
-
builder.build_empty
|
|
368
|
-
else
|
|
369
|
-
builder.build(outline)
|
|
370
|
-
end
|
|
371
|
-
end
|
|
372
|
-
rescue StandardError => e
|
|
373
|
-
raise Fontisan::Error, "Failed to build CharStrings: #{e.message}"
|
|
374
|
-
end
|
|
375
|
-
|
|
376
|
-
# Apply subroutine optimization if enabled
|
|
377
|
-
local_subrs = []
|
|
378
|
-
|
|
379
|
-
if @optimize_cff
|
|
380
|
-
begin
|
|
381
|
-
charstrings, local_subrs = optimize_charstrings(charstrings)
|
|
382
|
-
rescue StandardError => e
|
|
383
|
-
# If optimization fails, fall back to unoptimized CharStrings
|
|
384
|
-
warn "CFF optimization failed: #{e.message}, using unoptimized CharStrings"
|
|
385
|
-
local_subrs = []
|
|
386
|
-
end
|
|
387
|
-
end
|
|
388
|
-
|
|
389
|
-
# Build font metadata
|
|
390
|
-
begin
|
|
391
|
-
font_name = extract_font_name(font)
|
|
392
|
-
rescue StandardError => e
|
|
393
|
-
raise Fontisan::Error, "Failed to extract font name: #{e.message}"
|
|
394
|
-
end
|
|
395
|
-
|
|
396
|
-
# Build all INDEXes
|
|
397
|
-
begin
|
|
398
|
-
header_size = 4
|
|
399
|
-
name_index_data = Tables::Cff::IndexBuilder.build([font_name])
|
|
400
|
-
string_index_data = Tables::Cff::IndexBuilder.build([]) # Empty strings
|
|
401
|
-
global_subr_index_data = Tables::Cff::IndexBuilder.build([]) # Empty global subrs
|
|
402
|
-
charstrings_index_data = Tables::Cff::IndexBuilder.build(charstrings)
|
|
403
|
-
local_subrs_index_data = Tables::Cff::IndexBuilder.build(local_subrs)
|
|
404
|
-
rescue StandardError => e
|
|
405
|
-
raise Fontisan::Error, "Failed to build CFF indexes: #{e.message}"
|
|
406
|
-
end
|
|
407
|
-
|
|
408
|
-
# Build Private DICT with Subrs offset if we have local subroutines
|
|
409
|
-
begin
|
|
410
|
-
private_dict_hash = {
|
|
411
|
-
default_width_x: 1000,
|
|
412
|
-
nominal_width_x: 0,
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
# If we have local subroutines, add Subrs offset
|
|
416
|
-
# Subrs offset is relative to Private DICT start
|
|
417
|
-
if local_subrs.any?
|
|
418
|
-
# Add a placeholder Subrs offset first to get accurate size
|
|
419
|
-
private_dict_hash[:subrs] = 0
|
|
420
|
-
|
|
421
|
-
# Calculate size of Private DICT with Subrs entry
|
|
422
|
-
temp_private_dict_data = Tables::Cff::DictBuilder.build(private_dict_hash)
|
|
423
|
-
subrs_offset = temp_private_dict_data.bytesize
|
|
424
|
-
|
|
425
|
-
# Update with actual Subrs offset
|
|
426
|
-
private_dict_hash[:subrs] = subrs_offset
|
|
427
|
-
end
|
|
428
|
-
|
|
429
|
-
# Build final Private DICT
|
|
430
|
-
private_dict_data = Tables::Cff::DictBuilder.build(private_dict_hash)
|
|
431
|
-
private_dict_size = private_dict_data.bytesize
|
|
432
|
-
rescue StandardError => e
|
|
433
|
-
raise Fontisan::Error, "Failed to build Private DICT: #{e.message}"
|
|
434
|
-
end
|
|
435
|
-
|
|
436
|
-
# Calculate offsets with iterative refinement
|
|
437
|
-
begin
|
|
438
|
-
# Initial pass
|
|
439
|
-
top_dict_index_start = header_size + name_index_data.bytesize
|
|
440
|
-
string_index_start = top_dict_index_start + 100 # Approximate
|
|
441
|
-
global_subr_index_start = string_index_start + string_index_data.bytesize
|
|
442
|
-
charstrings_offset = global_subr_index_start + global_subr_index_data.bytesize
|
|
443
|
-
|
|
444
|
-
# Build Top DICT
|
|
445
|
-
top_dict_hash = {
|
|
446
|
-
charset: 0,
|
|
447
|
-
encoding: 0,
|
|
448
|
-
charstrings: charstrings_offset,
|
|
449
|
-
}
|
|
450
|
-
top_dict_data = Tables::Cff::DictBuilder.build(top_dict_hash)
|
|
451
|
-
top_dict_index_data = Tables::Cff::IndexBuilder.build([top_dict_data])
|
|
452
|
-
|
|
453
|
-
# Recalculate with actual Top DICT size
|
|
454
|
-
string_index_start = top_dict_index_start + top_dict_index_data.bytesize
|
|
455
|
-
global_subr_index_start = string_index_start + string_index_data.bytesize
|
|
456
|
-
charstrings_offset = global_subr_index_start + global_subr_index_data.bytesize
|
|
457
|
-
private_dict_offset = charstrings_offset + charstrings_index_data.bytesize
|
|
458
|
-
|
|
459
|
-
# Update Top DICT with Private DICT info
|
|
460
|
-
top_dict_hash = {
|
|
461
|
-
charset: 0,
|
|
462
|
-
encoding: 0,
|
|
463
|
-
charstrings: charstrings_offset,
|
|
464
|
-
private: [private_dict_size, private_dict_offset],
|
|
465
|
-
}
|
|
466
|
-
top_dict_data = Tables::Cff::DictBuilder.build(top_dict_hash)
|
|
467
|
-
top_dict_index_data = Tables::Cff::IndexBuilder.build([top_dict_data])
|
|
468
|
-
|
|
469
|
-
# Final recalculation
|
|
470
|
-
string_index_start = top_dict_index_start + top_dict_index_data.bytesize
|
|
471
|
-
global_subr_index_start = string_index_start + string_index_data.bytesize
|
|
472
|
-
charstrings_offset = global_subr_index_start + global_subr_index_data.bytesize
|
|
473
|
-
private_dict_offset = charstrings_offset + charstrings_index_data.bytesize
|
|
474
|
-
|
|
475
|
-
# Final Top DICT
|
|
476
|
-
top_dict_hash = {
|
|
477
|
-
charset: 0,
|
|
478
|
-
encoding: 0,
|
|
479
|
-
charstrings: charstrings_offset,
|
|
480
|
-
private: [private_dict_size, private_dict_offset],
|
|
481
|
-
}
|
|
482
|
-
top_dict_data = Tables::Cff::DictBuilder.build(top_dict_hash)
|
|
483
|
-
top_dict_index_data = Tables::Cff::IndexBuilder.build([top_dict_data])
|
|
484
|
-
rescue StandardError => e
|
|
485
|
-
raise Fontisan::Error,
|
|
486
|
-
"Failed to calculate CFF table offsets: #{e.message}"
|
|
487
|
-
end
|
|
488
|
-
|
|
489
|
-
# Build CFF Header
|
|
490
|
-
begin
|
|
491
|
-
header = [
|
|
492
|
-
1, # major version
|
|
493
|
-
0, # minor version
|
|
494
|
-
4, # header size
|
|
495
|
-
4, # offSize (will be in INDEX)
|
|
496
|
-
].pack("C4")
|
|
497
|
-
rescue StandardError => e
|
|
498
|
-
raise Fontisan::Error, "Failed to build CFF header: #{e.message}"
|
|
499
|
-
end
|
|
500
|
-
|
|
501
|
-
# Assemble complete CFF table
|
|
502
|
-
begin
|
|
503
|
-
header +
|
|
504
|
-
name_index_data +
|
|
505
|
-
top_dict_index_data +
|
|
506
|
-
string_index_data +
|
|
507
|
-
global_subr_index_data +
|
|
508
|
-
charstrings_index_data +
|
|
509
|
-
private_dict_data +
|
|
510
|
-
local_subrs_index_data
|
|
511
|
-
rescue StandardError => e
|
|
512
|
-
raise Fontisan::Error, "Failed to assemble CFF table: #{e.message}"
|
|
513
|
-
end
|
|
514
|
-
end
|
|
515
|
-
|
|
516
|
-
# Build glyf and loca tables from outlines
|
|
517
|
-
#
|
|
518
|
-
# @param outlines [Array<Outline>] Glyph outlines
|
|
519
|
-
# @return [Array<String, String, Integer>] [glyf_data, loca_data, loca_format]
|
|
520
|
-
def build_glyf_loca_tables(outlines, _hints_per_glyph)
|
|
521
|
-
glyf_data = "".b
|
|
522
|
-
offsets = []
|
|
523
|
-
|
|
524
|
-
# Build each glyph
|
|
525
|
-
outlines.each do |outline|
|
|
526
|
-
offsets << glyf_data.bytesize
|
|
527
|
-
|
|
528
|
-
if outline.empty?
|
|
529
|
-
# Empty glyph - no data
|
|
530
|
-
next
|
|
531
|
-
end
|
|
532
|
-
|
|
533
|
-
# Build glyph data using GlyphBuilder class method
|
|
534
|
-
glyph_data = Fontisan::Tables::GlyphBuilder.build_simple_glyph(outline)
|
|
535
|
-
glyf_data << glyph_data
|
|
536
|
-
|
|
537
|
-
# Add padding to 4-byte boundary
|
|
538
|
-
padding = (4 - (glyf_data.bytesize % 4)) % 4
|
|
539
|
-
glyf_data << ("\x00" * padding) if padding.positive?
|
|
540
|
-
end
|
|
541
|
-
|
|
542
|
-
# Add final offset
|
|
543
|
-
offsets << glyf_data.bytesize
|
|
544
|
-
|
|
545
|
-
# Build loca table
|
|
546
|
-
# Determine format based on max offset
|
|
547
|
-
max_offset = offsets.max
|
|
548
|
-
if max_offset <= 0x1FFFE
|
|
549
|
-
# Short format (offsets / 2)
|
|
550
|
-
loca_format = 0
|
|
551
|
-
loca_data = offsets.map { |off| off / 2 }.pack("n*")
|
|
552
|
-
else
|
|
553
|
-
# Long format
|
|
554
|
-
loca_format = 1
|
|
555
|
-
loca_data = offsets.pack("N*")
|
|
556
|
-
end
|
|
557
|
-
|
|
558
|
-
[glyf_data, loca_data, loca_format]
|
|
559
|
-
end
|
|
560
|
-
|
|
561
303
|
# Copy non-outline tables from source to target
|
|
562
304
|
#
|
|
563
305
|
# @param font [TrueTypeFont, OpenTypeFont] Source font
|
|
@@ -664,85 +406,6 @@ module Fontisan
|
|
|
664
406
|
head_data
|
|
665
407
|
end
|
|
666
408
|
|
|
667
|
-
# Extract font name from name table
|
|
668
|
-
#
|
|
669
|
-
# @param font [TrueTypeFont, OpenTypeFont] Font
|
|
670
|
-
# @return [String] Font name
|
|
671
|
-
def extract_font_name(font)
|
|
672
|
-
name_table = font.table("name")
|
|
673
|
-
if name_table
|
|
674
|
-
font_name = name_table.english_name(Tables::Name::FAMILY)
|
|
675
|
-
return font_name.dup.force_encoding("ASCII-8BIT") if font_name
|
|
676
|
-
end
|
|
677
|
-
|
|
678
|
-
"UnnamedFont"
|
|
679
|
-
end
|
|
680
|
-
|
|
681
|
-
# Optimize CharStrings using subroutine extraction
|
|
682
|
-
#
|
|
683
|
-
# @param charstrings [Array<String>] Original CharString bytes
|
|
684
|
-
# @return [Array<Array<String>, Array<String>>] [optimized_charstrings, local_subrs]
|
|
685
|
-
def optimize_charstrings(charstrings)
|
|
686
|
-
# Convert to hash format expected by PatternAnalyzer
|
|
687
|
-
charstrings_hash = {}
|
|
688
|
-
charstrings.each_with_index do |cs, index|
|
|
689
|
-
charstrings_hash[index] = cs
|
|
690
|
-
end
|
|
691
|
-
|
|
692
|
-
# Analyze patterns
|
|
693
|
-
analyzer = Optimizers::PatternAnalyzer.new(
|
|
694
|
-
min_length: 10,
|
|
695
|
-
stack_aware: true,
|
|
696
|
-
)
|
|
697
|
-
patterns = analyzer.analyze(charstrings_hash)
|
|
698
|
-
|
|
699
|
-
# Return original if no patterns found
|
|
700
|
-
return [charstrings, []] if patterns.empty?
|
|
701
|
-
|
|
702
|
-
# Optimize selection
|
|
703
|
-
optimizer = Optimizers::SubroutineOptimizer.new(patterns,
|
|
704
|
-
max_subrs: 65_535)
|
|
705
|
-
selected_patterns = optimizer.optimize_selection
|
|
706
|
-
|
|
707
|
-
# Optimize ordering
|
|
708
|
-
selected_patterns = optimizer.optimize_ordering(selected_patterns)
|
|
709
|
-
|
|
710
|
-
# Return original if no patterns selected
|
|
711
|
-
return [charstrings, []] if selected_patterns.empty?
|
|
712
|
-
|
|
713
|
-
# Build subroutines
|
|
714
|
-
builder = Optimizers::SubroutineBuilder.new(selected_patterns,
|
|
715
|
-
type: :local)
|
|
716
|
-
local_subrs = builder.build
|
|
717
|
-
|
|
718
|
-
# Build subroutine map
|
|
719
|
-
subroutine_map = {}
|
|
720
|
-
selected_patterns.each_with_index do |pattern, index|
|
|
721
|
-
subroutine_map[pattern.bytes] = index
|
|
722
|
-
end
|
|
723
|
-
|
|
724
|
-
# Rewrite CharStrings
|
|
725
|
-
rewriter = Optimizers::CharstringRewriter.new(subroutine_map, builder)
|
|
726
|
-
optimized_charstrings = charstrings.map.with_index do |charstring, glyph_id|
|
|
727
|
-
# Find patterns for this glyph
|
|
728
|
-
glyph_patterns = selected_patterns.select do |p|
|
|
729
|
-
p.glyphs.include?(glyph_id)
|
|
730
|
-
end
|
|
731
|
-
|
|
732
|
-
if glyph_patterns.empty?
|
|
733
|
-
charstring
|
|
734
|
-
else
|
|
735
|
-
rewriter.rewrite(charstring, glyph_patterns, glyph_id)
|
|
736
|
-
end
|
|
737
|
-
end
|
|
738
|
-
|
|
739
|
-
[optimized_charstrings, local_subrs]
|
|
740
|
-
rescue StandardError => e
|
|
741
|
-
# If optimization fails for any reason, return original CharStrings
|
|
742
|
-
warn "Optimization warning: #{e.message}"
|
|
743
|
-
[charstrings, []]
|
|
744
|
-
end
|
|
745
|
-
|
|
746
409
|
# Generate static instance from variable font
|
|
747
410
|
#
|
|
748
411
|
# @param font [TrueTypeFont, OpenTypeFont] Variable font
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../outline_extractor"
|
|
4
|
+
require_relative "../tables/cff/charstring_builder"
|
|
5
|
+
require_relative "../tables/glyf/glyph_builder"
|
|
6
|
+
require_relative "../tables/glyf/compound_glyph_resolver"
|
|
7
|
+
|
|
8
|
+
module Fontisan
|
|
9
|
+
module Converters
|
|
10
|
+
# Extracts all glyph outlines from a font for conversion purposes
|
|
11
|
+
#
|
|
12
|
+
# Unlike [`OutlineExtractor`](../outline_extractor.rb) which extracts
|
|
13
|
+
# single glyphs, this module extracts ALL glyphs from a font for
|
|
14
|
+
# bulk conversion operations.
|
|
15
|
+
#
|
|
16
|
+
# @see OutlineExtractor for single glyph extraction
|
|
17
|
+
module OutlineExtraction
|
|
18
|
+
# Extract all outlines from TrueType font
|
|
19
|
+
#
|
|
20
|
+
# @param font [TrueTypeFont] Source font
|
|
21
|
+
# @return [Array<Outline>] Array of outline objects
|
|
22
|
+
def extract_ttf_outlines(font)
|
|
23
|
+
# Get required tables
|
|
24
|
+
head = font.table("head")
|
|
25
|
+
maxp = font.table("maxp")
|
|
26
|
+
loca = font.table("loca")
|
|
27
|
+
glyf = font.table("glyf")
|
|
28
|
+
|
|
29
|
+
# Parse loca with context
|
|
30
|
+
loca.parse_with_context(head.index_to_loc_format, maxp.num_glyphs)
|
|
31
|
+
|
|
32
|
+
# Create resolver for compound glyphs
|
|
33
|
+
resolver = Tables::CompoundGlyphResolver.new(glyf, loca, head)
|
|
34
|
+
|
|
35
|
+
# Extract all glyphs
|
|
36
|
+
outlines = []
|
|
37
|
+
maxp.num_glyphs.times do |glyph_id|
|
|
38
|
+
glyph = glyf.glyph_for(glyph_id, loca, head)
|
|
39
|
+
|
|
40
|
+
outlines << if glyph.nil? || glyph.empty?
|
|
41
|
+
# Empty glyph - create empty outline
|
|
42
|
+
Models::Outline.new(
|
|
43
|
+
glyph_id: glyph_id,
|
|
44
|
+
commands: [],
|
|
45
|
+
bbox: { x_min: 0, y_min: 0, x_max: 0, y_max: 0 },
|
|
46
|
+
)
|
|
47
|
+
elsif glyph.simple?
|
|
48
|
+
# Convert simple glyph to outline
|
|
49
|
+
Models::Outline.from_truetype(glyph, glyph_id)
|
|
50
|
+
else
|
|
51
|
+
# Compound glyph - resolve to simple outline
|
|
52
|
+
resolver.resolve(glyph)
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
outlines
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Extract all outlines from CFF font
|
|
60
|
+
#
|
|
61
|
+
# @param font [OpenTypeFont] Source font
|
|
62
|
+
# @return [Array<Outline>] Array of outline objects
|
|
63
|
+
def extract_cff_outlines(font)
|
|
64
|
+
# Get CFF table
|
|
65
|
+
cff = font.table("CFF ")
|
|
66
|
+
raise Fontisan::Error, "CFF table not found" unless cff
|
|
67
|
+
|
|
68
|
+
# Get number of glyphs
|
|
69
|
+
num_glyphs = cff.glyph_count
|
|
70
|
+
|
|
71
|
+
# Extract all glyphs
|
|
72
|
+
outlines = []
|
|
73
|
+
num_glyphs.times do |glyph_id|
|
|
74
|
+
charstring = cff.charstring_for_glyph(glyph_id)
|
|
75
|
+
|
|
76
|
+
outlines << if charstring.nil? || charstring.path.empty?
|
|
77
|
+
# Empty glyph
|
|
78
|
+
Models::Outline.new(
|
|
79
|
+
glyph_id: glyph_id,
|
|
80
|
+
commands: [],
|
|
81
|
+
bbox: { x_min: 0, y_min: 0, x_max: 0, y_max: 0 },
|
|
82
|
+
)
|
|
83
|
+
else
|
|
84
|
+
# Convert CharString to outline
|
|
85
|
+
Models::Outline.from_cff(charstring, glyph_id)
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
outlines
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../optimizers/pattern_analyzer"
|
|
4
|
+
require_relative "../optimizers/subroutine_optimizer"
|
|
5
|
+
require_relative "../optimizers/subroutine_builder"
|
|
6
|
+
require_relative "../optimizers/charstring_rewriter"
|
|
7
|
+
|
|
8
|
+
module Fontisan
|
|
9
|
+
module Converters
|
|
10
|
+
# Optimizes CFF CharStrings using subroutine extraction
|
|
11
|
+
#
|
|
12
|
+
# This module analyzes CharStrings for repeated patterns, extracts
|
|
13
|
+
# them as subroutines, and rewrites the CharStrings to call the
|
|
14
|
+
# subroutines instead of repeating the code.
|
|
15
|
+
#
|
|
16
|
+
# The optimization process:
|
|
17
|
+
# 1. Analyze patterns across all CharStrings
|
|
18
|
+
# 2. Select optimal set of patterns for subroutines
|
|
19
|
+
# 3. Optimize subroutine ordering
|
|
20
|
+
# 4. Build subroutines from selected patterns
|
|
21
|
+
# 5. Rewrite CharStrings to call subroutines
|
|
22
|
+
module OutlineOptimizer
|
|
23
|
+
# Optimize CharStrings using subroutine extraction
|
|
24
|
+
#
|
|
25
|
+
# @param charstrings [Array<String>] Original CharString bytes
|
|
26
|
+
# @return [Array<Array<String>, Array<String>>] [optimized_charstrings, local_subrs]
|
|
27
|
+
def optimize_charstrings(charstrings)
|
|
28
|
+
# Convert to hash format expected by PatternAnalyzer
|
|
29
|
+
charstrings_hash = {}
|
|
30
|
+
charstrings.each_with_index do |cs, index|
|
|
31
|
+
charstrings_hash[index] = cs
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Analyze patterns
|
|
35
|
+
analyzer = Optimizers::PatternAnalyzer.new(
|
|
36
|
+
min_length: 10,
|
|
37
|
+
stack_aware: true,
|
|
38
|
+
)
|
|
39
|
+
patterns = analyzer.analyze(charstrings_hash)
|
|
40
|
+
|
|
41
|
+
# Return original if no patterns found
|
|
42
|
+
return [charstrings, []] if patterns.empty?
|
|
43
|
+
|
|
44
|
+
# Optimize selection
|
|
45
|
+
optimizer = Optimizers::SubroutineOptimizer.new(patterns,
|
|
46
|
+
max_subrs: 65_535)
|
|
47
|
+
selected_patterns = optimizer.optimize_selection
|
|
48
|
+
|
|
49
|
+
# Optimize ordering
|
|
50
|
+
selected_patterns = optimizer.optimize_ordering(selected_patterns)
|
|
51
|
+
|
|
52
|
+
# Return original if no patterns selected
|
|
53
|
+
return [charstrings, []] if selected_patterns.empty?
|
|
54
|
+
|
|
55
|
+
# Build subroutines
|
|
56
|
+
builder = Optimizers::SubroutineBuilder.new(selected_patterns,
|
|
57
|
+
type: :local)
|
|
58
|
+
local_subrs = builder.build
|
|
59
|
+
|
|
60
|
+
# Build subroutine map
|
|
61
|
+
subroutine_map = {}
|
|
62
|
+
selected_patterns.each_with_index do |pattern, index|
|
|
63
|
+
subroutine_map[pattern.bytes] = index
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Rewrite CharStrings
|
|
67
|
+
rewriter = Optimizers::CharstringRewriter.new(subroutine_map, builder)
|
|
68
|
+
optimized_charstrings = charstrings.map.with_index do |charstring, glyph_id|
|
|
69
|
+
# Find patterns for this glyph
|
|
70
|
+
glyph_patterns = selected_patterns.select do |p|
|
|
71
|
+
p.glyphs.include?(glyph_id)
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
if glyph_patterns.empty?
|
|
75
|
+
charstring
|
|
76
|
+
else
|
|
77
|
+
rewriter.rewrite(charstring, glyph_patterns, glyph_id)
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
[optimized_charstrings, local_subrs]
|
|
82
|
+
rescue StandardError => e
|
|
83
|
+
# If optimization fails for any reason, return original CharStrings
|
|
84
|
+
warn "Optimization warning: #{e.message}"
|
|
85
|
+
[charstrings, []]
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
@@ -17,6 +17,7 @@ module Fontisan
|
|
|
17
17
|
# - Metrics retrieval (advance width, left sidebearing)
|
|
18
18
|
# - Glyph closure calculation for subsetting (tracks composite dependencies)
|
|
19
19
|
# - Validation of glyph IDs and character mappings
|
|
20
|
+
# - Bounded LRU cache to prevent unbounded memory growth
|
|
20
21
|
#
|
|
21
22
|
# @example Basic usage
|
|
22
23
|
# font = Fontisan::TrueTypeFont.from_file('font.ttf')
|
|
@@ -41,6 +42,8 @@ module Fontisan
|
|
|
41
42
|
#
|
|
42
43
|
# Reference: [`docs/ttfunk-feature-analysis.md:541-575`](docs/ttfunk-feature-analysis.md:541)
|
|
43
44
|
class GlyphAccessor
|
|
45
|
+
# Maximum number of glyphs to cache before LRU eviction
|
|
46
|
+
MAX_GLYPH_CACHE_SIZE = 10_000
|
|
44
47
|
# Font instance this accessor operates on
|
|
45
48
|
# @return [TrueTypeFont, OpenTypeFont]
|
|
46
49
|
attr_reader :font
|
|
@@ -59,6 +62,7 @@ module Fontisan
|
|
|
59
62
|
@font = font
|
|
60
63
|
@glyph_cache = {}
|
|
61
64
|
@closure_cache = {}
|
|
65
|
+
@glyph_access_times = {}
|
|
62
66
|
end
|
|
63
67
|
|
|
64
68
|
# Get glyph object for a glyph ID
|
|
@@ -83,7 +87,11 @@ module Fontisan
|
|
|
83
87
|
def glyph_for_id(glyph_id)
|
|
84
88
|
validate_glyph_id!(glyph_id)
|
|
85
89
|
|
|
86
|
-
|
|
90
|
+
# Check cache first and update access time
|
|
91
|
+
if @glyph_cache.key?(glyph_id)
|
|
92
|
+
@glyph_access_times[glyph_id] = Time.now.to_f
|
|
93
|
+
return @glyph_cache[glyph_id]
|
|
94
|
+
end
|
|
87
95
|
|
|
88
96
|
glyph = if truetype?
|
|
89
97
|
truetype_glyph(glyph_id)
|
|
@@ -94,7 +102,12 @@ module Fontisan
|
|
|
94
102
|
"Font has neither glyf nor CFF table"
|
|
95
103
|
end
|
|
96
104
|
|
|
105
|
+
# Evict least recently used entry if cache is full
|
|
106
|
+
evict_lru_glyph if @glyph_cache.size >= MAX_GLYPH_CACHE_SIZE
|
|
107
|
+
|
|
97
108
|
@glyph_cache[glyph_id] = glyph
|
|
109
|
+
@glyph_access_times[glyph_id] = Time.now.to_f
|
|
110
|
+
glyph
|
|
98
111
|
end
|
|
99
112
|
|
|
100
113
|
# Get glyph object for a Unicode character code
|
|
@@ -367,6 +380,7 @@ module Fontisan
|
|
|
367
380
|
def clear_cache
|
|
368
381
|
@glyph_cache.clear
|
|
369
382
|
@closure_cache.clear
|
|
383
|
+
@glyph_access_times.clear
|
|
370
384
|
|
|
371
385
|
# Also clear glyf table cache if present
|
|
372
386
|
glyf = font.table("glyf")
|
|
@@ -375,6 +389,20 @@ module Fontisan
|
|
|
375
389
|
|
|
376
390
|
private
|
|
377
391
|
|
|
392
|
+
# Evict least recently used glyph from cache
|
|
393
|
+
#
|
|
394
|
+
# @return [void]
|
|
395
|
+
def evict_lru_glyph
|
|
396
|
+
return if @glyph_access_times.empty?
|
|
397
|
+
|
|
398
|
+
# Find least recently used entry
|
|
399
|
+
lru_id = @glyph_access_times.min_by { |_id, time| time }&.first
|
|
400
|
+
return unless lru_id
|
|
401
|
+
|
|
402
|
+
@glyph_cache.delete(lru_id)
|
|
403
|
+
@glyph_access_times.delete(lru_id)
|
|
404
|
+
end
|
|
405
|
+
|
|
378
406
|
# Validate a glyph ID
|
|
379
407
|
#
|
|
380
408
|
# @param glyph_id [Integer] Glyph ID to validate
|
data/lib/fontisan/sfnt_font.rb
CHANGED
|
@@ -135,6 +135,9 @@ module Fontisan
|
|
|
135
135
|
Constants::LOCA_TAG => Tables::LocaTable,
|
|
136
136
|
}.freeze
|
|
137
137
|
|
|
138
|
+
# Padding bytes for table alignment (frozen to avoid reallocation)
|
|
139
|
+
PADDING_BYTES = ("\x00" * 4).freeze
|
|
140
|
+
|
|
138
141
|
# Read SFNT Font from a file
|
|
139
142
|
#
|
|
140
143
|
# @param path [String] Path to the font file
|
|
@@ -200,14 +203,11 @@ module Fontisan
|
|
|
200
203
|
@parsed_tables = {}
|
|
201
204
|
@sfnt_tables = {}
|
|
202
205
|
@table_entry_cache = {}
|
|
203
|
-
@tag_encoding_cache = {}
|
|
204
|
-
@
|
|
206
|
+
@tag_encoding_cache = {} # Cache for normalized tag encodings
|
|
207
|
+
@table_names = nil # Cache for table names array
|
|
205
208
|
@loading_mode = LoadingModes::FULL
|
|
206
209
|
@lazy_load_enabled = false
|
|
207
210
|
@io_source = nil
|
|
208
|
-
|
|
209
|
-
# Pre-build table entry cache for O(1) lookups
|
|
210
|
-
build_table_entry_cache
|
|
211
211
|
end
|
|
212
212
|
|
|
213
213
|
# Read table data for all tables in the font
|
|
@@ -447,13 +447,13 @@ module Fontisan
|
|
|
447
447
|
end
|
|
448
448
|
end
|
|
449
449
|
|
|
450
|
+
# Return cached if available (fast path)
|
|
450
451
|
return @parsed_tables[tag] if @parsed_tables.key?(tag)
|
|
451
452
|
|
|
452
453
|
# Lazy load table data if enabled
|
|
453
|
-
if @lazy_load_enabled && !@table_data.key?(tag)
|
|
454
|
-
load_table_data(tag)
|
|
455
|
-
end
|
|
454
|
+
load_table_data(tag) if @lazy_load_enabled && !@table_data.key?(tag)
|
|
456
455
|
|
|
456
|
+
# Parse and cache
|
|
457
457
|
@parsed_tables[tag] ||= parse_table(tag)
|
|
458
458
|
end
|
|
459
459
|
|
|
@@ -570,15 +570,6 @@ module Fontisan
|
|
|
570
570
|
|
|
571
571
|
private
|
|
572
572
|
|
|
573
|
-
# Build table entry cache for O(1) lookups
|
|
574
|
-
#
|
|
575
|
-
# @return [void]
|
|
576
|
-
def build_table_entry_cache
|
|
577
|
-
tables.each do |entry|
|
|
578
|
-
@table_entry_cache[entry.tag] = entry
|
|
579
|
-
end
|
|
580
|
-
end
|
|
581
|
-
|
|
582
573
|
# Normalize tag encoding to UTF-8 (cached for performance)
|
|
583
574
|
#
|
|
584
575
|
# @param tag [String] The tag to normalize
|
|
@@ -675,7 +666,7 @@ module Fontisan
|
|
|
675
666
|
|
|
676
667
|
# Add padding to align to 4-byte boundary
|
|
677
668
|
padding = (Constants::TABLE_ALIGNMENT - (io.pos % Constants::TABLE_ALIGNMENT)) % Constants::TABLE_ALIGNMENT
|
|
678
|
-
io.write(
|
|
669
|
+
io.write(PADDING_BYTES[0, padding]) if padding.positive?
|
|
679
670
|
|
|
680
671
|
# Zero out checksumAdjustment field in head table
|
|
681
672
|
if entry.tag == Constants::HEAD_TAG
|
data/lib/fontisan/version.rb
CHANGED
data/lib/fontisan/woff2_font.rb
CHANGED
|
@@ -168,7 +168,7 @@ module Fontisan
|
|
|
168
168
|
end
|
|
169
169
|
|
|
170
170
|
# Fallback to decompressed_tables
|
|
171
|
-
return @decompressed_tables
|
|
171
|
+
return @decompressed_tables
|
|
172
172
|
end
|
|
173
173
|
|
|
174
174
|
# Tag provided - return specific table
|
|
@@ -189,8 +189,8 @@ module Fontisan
|
|
|
189
189
|
|
|
190
190
|
# Fallback to parsed_tables hash
|
|
191
191
|
# Normalize tag to UTF-8 string for hash lookup
|
|
192
|
-
|
|
193
|
-
tag_key
|
|
192
|
+
tag_key = tag.to_s
|
|
193
|
+
tag_key.force_encoding("UTF-8") unless tag_key.encoding == Encoding::UTF_8
|
|
194
194
|
@parsed_tables[tag_key]
|
|
195
195
|
end
|
|
196
196
|
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: fontisan
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.2.
|
|
4
|
+
version: 0.2.11
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Ribose Inc.
|
|
@@ -168,10 +168,14 @@ files:
|
|
|
168
168
|
- lib/fontisan/config/variable_settings.yml
|
|
169
169
|
- lib/fontisan/config/woff2_settings.yml
|
|
170
170
|
- lib/fontisan/constants.rb
|
|
171
|
+
- lib/fontisan/converters/cff_table_builder.rb
|
|
171
172
|
- lib/fontisan/converters/collection_converter.rb
|
|
172
173
|
- lib/fontisan/converters/conversion_strategy.rb
|
|
173
174
|
- lib/fontisan/converters/format_converter.rb
|
|
175
|
+
- lib/fontisan/converters/glyf_table_builder.rb
|
|
174
176
|
- lib/fontisan/converters/outline_converter.rb
|
|
177
|
+
- lib/fontisan/converters/outline_extraction.rb
|
|
178
|
+
- lib/fontisan/converters/outline_optimizer.rb
|
|
175
179
|
- lib/fontisan/converters/svg_generator.rb
|
|
176
180
|
- lib/fontisan/converters/table_copier.rb
|
|
177
181
|
- lib/fontisan/converters/woff2_encoder.rb
|