three-text 0.2.15 → 0.2.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +21 -4
- package/dist/index.cjs +422 -230
- package/dist/index.js +422 -230
- package/dist/index.min.cjs +632 -618
- package/dist/index.min.js +624 -610
- package/dist/index.umd.js +422 -230
- package/dist/index.umd.min.js +632 -618
- package/dist/types/core/cache/GlyphGeometryBuilder.d.ts +1 -0
- package/dist/types/core/geometry/Tessellator.d.ts +3 -2
- package/dist/types/core/layout/LineBreak.d.ts +2 -1
- package/dist/types/core/layout/TextLayout.d.ts +15 -0
- package/dist/types/core/shaping/TextMeasurer.d.ts +1 -0
- package/dist/types/webgpu/index.d.ts +1 -0
- package/dist/webgpu/index.cjs +4 -2
- package/dist/webgpu/index.d.ts +1 -0
- package/dist/webgpu/index.js +4 -2
- package/package.json +1 -1
package/dist/index.umd.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
/*!
|
|
2
|
-
* three-text v0.2.
|
|
2
|
+
* three-text v0.2.17
|
|
3
3
|
* Copyright (C) 2025 Countertype LLC
|
|
4
4
|
*
|
|
5
5
|
* This program is free software: you can redistribute it and/or modify
|
|
@@ -198,7 +198,7 @@
|
|
|
198
198
|
FitnessClass[FitnessClass["LOOSE"] = 2] = "LOOSE";
|
|
199
199
|
FitnessClass[FitnessClass["VERY_LOOSE"] = 3] = "VERY_LOOSE";
|
|
200
200
|
})(FitnessClass || (FitnessClass = {}));
|
|
201
|
-
// ActiveNodeList maintains all currently viable breakpoints as we scan through the text
|
|
201
|
+
// ActiveNodeList maintains all currently viable breakpoints as we scan through the text
|
|
202
202
|
// Each node represents a potential break with accumulated demerits (total "cost" from start)
|
|
203
203
|
//
|
|
204
204
|
// Demerits = cumulative penalty score from text start to this break, calculated as:
|
|
@@ -340,9 +340,9 @@
|
|
|
340
340
|
// Converts text into items (boxes, glues, penalties) for line breaking
|
|
341
341
|
// The measureText function should return widths that include any letter spacing
|
|
342
342
|
static itemizeText(text, measureText, // function to measure text width
|
|
343
|
-
hyphenate = false, language = 'en-us', availablePatterns, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, context) {
|
|
343
|
+
measureTextWidths, hyphenate = false, language = 'en-us', availablePatterns, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, context) {
|
|
344
344
|
const items = [];
|
|
345
|
-
items.push(...this.itemizeParagraph(text, measureText, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context));
|
|
345
|
+
items.push(...this.itemizeParagraph(text, measureText, measureTextWidths, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context));
|
|
346
346
|
// Final glue and penalty to end the paragraph
|
|
347
347
|
// Use infinite stretch to fill the last line
|
|
348
348
|
items.push({
|
|
@@ -447,9 +447,10 @@
|
|
|
447
447
|
return (this.isCJClosingPunctuation(char) || this.isCJOpeningPunctuation(char));
|
|
448
448
|
}
|
|
449
449
|
// CJK (Chinese/Japanese/Korean) character-level itemization with inter-character glue
|
|
450
|
-
static itemizeCJKText(text, measureText, context, startOffset = 0, glueParams) {
|
|
450
|
+
static itemizeCJKText(text, measureText, measureTextWidths, context, startOffset = 0, glueParams) {
|
|
451
451
|
const items = [];
|
|
452
452
|
const chars = Array.from(text);
|
|
453
|
+
const widths = measureTextWidths ? measureTextWidths(text) : null;
|
|
453
454
|
let textPosition = startOffset;
|
|
454
455
|
// Inter-character glue parameters
|
|
455
456
|
let glueWidth;
|
|
@@ -470,7 +471,7 @@
|
|
|
470
471
|
const char = chars[i];
|
|
471
472
|
const nextChar = i < chars.length - 1 ? chars[i + 1] : null;
|
|
472
473
|
if (/\s/.test(char)) {
|
|
473
|
-
const width = measureText(char);
|
|
474
|
+
const width = widths ? (widths[i] ?? measureText(char)) : measureText(char);
|
|
474
475
|
items.push({
|
|
475
476
|
type: ItemType.GLUE,
|
|
476
477
|
width,
|
|
@@ -484,7 +485,7 @@
|
|
|
484
485
|
}
|
|
485
486
|
items.push({
|
|
486
487
|
type: ItemType.BOX,
|
|
487
|
-
width: measureText(char),
|
|
488
|
+
width: widths ? (widths[i] ?? measureText(char)) : measureText(char),
|
|
488
489
|
text: char,
|
|
489
490
|
originIndex: textPosition
|
|
490
491
|
});
|
|
@@ -515,15 +516,21 @@
|
|
|
515
516
|
}
|
|
516
517
|
return items;
|
|
517
518
|
}
|
|
518
|
-
static itemizeParagraph(text, measureText, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context) {
|
|
519
|
+
static itemizeParagraph(text, measureText, measureTextWidths, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context) {
|
|
519
520
|
const items = [];
|
|
520
521
|
const chars = Array.from(text);
|
|
521
|
-
// Calculate CJK glue parameters once for consistency across all segments
|
|
522
|
-
|
|
523
|
-
const
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
522
|
+
// Calculate CJK glue parameters lazily and once for consistency across all segments
|
|
523
|
+
let cjkGlueParams;
|
|
524
|
+
const getCjkGlueParams = () => {
|
|
525
|
+
if (!cjkGlueParams) {
|
|
526
|
+
const baseCharWidth = measureText('字');
|
|
527
|
+
cjkGlueParams = {
|
|
528
|
+
width: 0,
|
|
529
|
+
stretch: baseCharWidth * 0.04,
|
|
530
|
+
shrink: baseCharWidth * 0.04
|
|
531
|
+
};
|
|
532
|
+
}
|
|
533
|
+
return cjkGlueParams;
|
|
527
534
|
};
|
|
528
535
|
let buffer = '';
|
|
529
536
|
let bufferStart = 0;
|
|
@@ -533,7 +540,7 @@
|
|
|
533
540
|
if (buffer.length === 0)
|
|
534
541
|
return;
|
|
535
542
|
if (bufferScript === 'cjk') {
|
|
536
|
-
const cjkItems = this.itemizeCJKText(buffer, measureText, context, bufferStart,
|
|
543
|
+
const cjkItems = this.itemizeCJKText(buffer, measureText, measureTextWidths, context, bufferStart, getCjkGlueParams());
|
|
537
544
|
items.push(...cjkItems);
|
|
538
545
|
}
|
|
539
546
|
else {
|
|
@@ -726,7 +733,7 @@
|
|
|
726
733
|
align: options.align || 'left',
|
|
727
734
|
hyphenate: options.hyphenate || false
|
|
728
735
|
});
|
|
729
|
-
const { text, width, align = 'left', direction = 'ltr', hyphenate = false, language = 'en-us', respectExistingBreaks = true, measureText, hyphenationPatterns, unitsPerEm, letterSpacing = 0, tolerance = DEFAULT_TOLERANCE, pretolerance = DEFAULT_PRETOLERANCE, emergencyStretch = DEFAULT_EMERGENCY_STRETCH, autoEmergencyStretch, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, linepenalty = DEFAULT_LINE_PENALTY, adjdemerits = DEFAULT_FITNESS_DIFF_DEMERITS, hyphenpenalty = DEFAULT_HYPHEN_PENALTY, exhyphenpenalty = DEFAULT_EX_HYPHEN_PENALTY, doublehyphendemerits = DEFAULT_DOUBLE_HYPHEN_DEMERITS, looseness = 0, disableShortLineDetection = false, shortLineThreshold = SHORT_LINE_WIDTH_THRESHOLD } = options;
|
|
736
|
+
const { text, width, align = 'left', direction = 'ltr', hyphenate = false, language = 'en-us', respectExistingBreaks = true, measureText, measureTextWidths, hyphenationPatterns, unitsPerEm, letterSpacing = 0, tolerance = DEFAULT_TOLERANCE, pretolerance = DEFAULT_PRETOLERANCE, emergencyStretch = DEFAULT_EMERGENCY_STRETCH, autoEmergencyStretch, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, linepenalty = DEFAULT_LINE_PENALTY, adjdemerits = DEFAULT_FITNESS_DIFF_DEMERITS, hyphenpenalty = DEFAULT_HYPHEN_PENALTY, exhyphenpenalty = DEFAULT_EX_HYPHEN_PENALTY, doublehyphendemerits = DEFAULT_DOUBLE_HYPHEN_DEMERITS, looseness = 0, disableShortLineDetection = false, shortLineThreshold = SHORT_LINE_WIDTH_THRESHOLD } = options;
|
|
730
737
|
// Handle multiple paragraphs by processing each independently
|
|
731
738
|
if (respectExistingBreaks && text.includes('\n')) {
|
|
732
739
|
const paragraphs = text.split('\n');
|
|
@@ -789,9 +796,9 @@
|
|
|
789
796
|
exHyphenPenalty: exhyphenpenalty,
|
|
790
797
|
currentAlign: align,
|
|
791
798
|
unitsPerEm,
|
|
792
|
-
// measureText() includes trailing letter spacing after the final glyph of a token
|
|
799
|
+
// measureText() includes trailing letter spacing after the final glyph of a token
|
|
793
800
|
// Shaping applies letter spacing only between glyphs, so we subtract one
|
|
794
|
-
// trailing letterSpacingFU per line segment (see computeAdjustmentRatio/createLines)
|
|
801
|
+
// trailing letterSpacingFU per line segment (see computeAdjustmentRatio/createLines)
|
|
795
802
|
letterSpacingFU: unitsPerEm ? letterSpacing * unitsPerEm : 0
|
|
796
803
|
};
|
|
797
804
|
if (!width || width === Infinity) {
|
|
@@ -810,7 +817,7 @@
|
|
|
810
817
|
];
|
|
811
818
|
}
|
|
812
819
|
// Itemize without hyphenation first (TeX approach: only compute if needed)
|
|
813
|
-
const allItems = LineBreak.itemizeText(text, measureText, false, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
820
|
+
const allItems = LineBreak.itemizeText(text, measureText, measureTextWidths, false, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
814
821
|
if (allItems.length === 0) {
|
|
815
822
|
return [];
|
|
816
823
|
}
|
|
@@ -829,7 +836,7 @@
|
|
|
829
836
|
let breaks = LineBreak.findBreakpoints(currentItems, width, pretolerance, looseness, false, 0, context);
|
|
830
837
|
// Second pass: with hyphenation if first pass failed
|
|
831
838
|
if (breaks.length === 0 && useHyphenation) {
|
|
832
|
-
const itemsWithHyphenation = LineBreak.itemizeText(text, measureText, true, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
839
|
+
const itemsWithHyphenation = LineBreak.itemizeText(text, measureText, measureTextWidths, true, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
833
840
|
currentItems = itemsWithHyphenation;
|
|
834
841
|
breaks = LineBreak.findBreakpoints(currentItems, width, tolerance, looseness, false, 0, context);
|
|
835
842
|
}
|
|
@@ -1143,9 +1150,9 @@
|
|
|
1143
1150
|
? items[lineEnd].width
|
|
1144
1151
|
: items[lineEnd].preBreakWidth;
|
|
1145
1152
|
}
|
|
1146
|
-
// Correct for trailing letter spacing at the end of the line segment
|
|
1153
|
+
// Correct for trailing letter spacing at the end of the line segment
|
|
1147
1154
|
// Our token measurement includes letter spacing after the final glyph;
|
|
1148
|
-
// shaping does not add letter spacing after the final glyph in a line
|
|
1155
|
+
// shaping does not add letter spacing after the final glyph in a line
|
|
1149
1156
|
if (context?.letterSpacingFU && totalWidth !== 0) {
|
|
1150
1157
|
totalWidth -= context.letterSpacingFU;
|
|
1151
1158
|
}
|
|
@@ -1311,7 +1318,7 @@
|
|
|
1311
1318
|
}
|
|
1312
1319
|
}
|
|
1313
1320
|
const lineText = lineTextParts.join('');
|
|
1314
|
-
// Correct for trailing letter spacing at the end of the line
|
|
1321
|
+
// Correct for trailing letter spacing at the end of the line
|
|
1315
1322
|
if (context?.letterSpacingFU && naturalWidth !== 0) {
|
|
1316
1323
|
naturalWidth -= context.letterSpacingFU;
|
|
1317
1324
|
}
|
|
@@ -1368,7 +1375,7 @@
|
|
|
1368
1375
|
finalNaturalWidth += item.width;
|
|
1369
1376
|
}
|
|
1370
1377
|
const finalLineText = finalLineTextParts.join('');
|
|
1371
|
-
// Correct for trailing letter spacing at the end of the final line
|
|
1378
|
+
// Correct for trailing letter spacing at the end of the final line
|
|
1372
1379
|
if (context?.letterSpacingFU && finalNaturalWidth !== 0) {
|
|
1373
1380
|
finalNaturalWidth -= context.letterSpacingFU;
|
|
1374
1381
|
}
|
|
@@ -1405,12 +1412,21 @@
|
|
|
1405
1412
|
}
|
|
1406
1413
|
}
|
|
1407
1414
|
|
|
1415
|
+
// Memoize conversion per feature-object identity to avoid rebuilding the same
|
|
1416
|
+
// comma-separated string on every HarfBuzz shape call
|
|
1417
|
+
const featureStringCache = new WeakMap();
|
|
1408
1418
|
// Convert feature objects to HarfBuzz comma-separated format
|
|
1409
1419
|
function convertFontFeaturesToString(features) {
|
|
1410
1420
|
if (!features || Object.keys(features).length === 0) {
|
|
1411
1421
|
return undefined;
|
|
1412
1422
|
}
|
|
1423
|
+
const cached = featureStringCache.get(features);
|
|
1424
|
+
if (cached !== undefined) {
|
|
1425
|
+
return cached ?? undefined;
|
|
1426
|
+
}
|
|
1413
1427
|
const featureStrings = [];
|
|
1428
|
+
// Preserve insertion order of the input object
|
|
1429
|
+
// (The public API/tests expect this to be stable and predictable)
|
|
1414
1430
|
for (const [tag, value] of Object.entries(features)) {
|
|
1415
1431
|
if (!/^[a-zA-Z0-9]{4}$/.test(tag)) {
|
|
1416
1432
|
logger.warn(`Invalid OpenType feature tag: "${tag}". Tags must be exactly 4 alphanumeric characters.`);
|
|
@@ -1429,10 +1445,63 @@
|
|
|
1429
1445
|
logger.warn(`Invalid value for feature "${tag}": ${value}. Expected boolean or positive number.`);
|
|
1430
1446
|
}
|
|
1431
1447
|
}
|
|
1432
|
-
|
|
1448
|
+
const result = featureStrings.length > 0 ? featureStrings.join(',') : undefined;
|
|
1449
|
+
featureStringCache.set(features, result ?? null);
|
|
1450
|
+
return result;
|
|
1433
1451
|
}
|
|
1434
1452
|
|
|
1435
1453
|
class TextMeasurer {
|
|
1454
|
+
// Shape once and return per-codepoint widths aligned with Array.from(text)
|
|
1455
|
+
// Groups glyph advances by HarfBuzz cluster (cl)
|
|
1456
|
+
// Includes trailing per-glyph letter spacing like measureTextWidth
|
|
1457
|
+
static measureTextWidths(loadedFont, text, letterSpacing = 0) {
|
|
1458
|
+
const chars = Array.from(text);
|
|
1459
|
+
if (chars.length === 0)
|
|
1460
|
+
return [];
|
|
1461
|
+
// HarfBuzz clusters are UTF-16 code unit indices
|
|
1462
|
+
const startToCharIndex = new Map();
|
|
1463
|
+
let codeUnitIndex = 0;
|
|
1464
|
+
for (let i = 0; i < chars.length; i++) {
|
|
1465
|
+
startToCharIndex.set(codeUnitIndex, i);
|
|
1466
|
+
codeUnitIndex += chars[i].length;
|
|
1467
|
+
}
|
|
1468
|
+
const widths = new Array(chars.length).fill(0);
|
|
1469
|
+
const buffer = loadedFont.hb.createBuffer();
|
|
1470
|
+
try {
|
|
1471
|
+
buffer.addText(text);
|
|
1472
|
+
buffer.guessSegmentProperties();
|
|
1473
|
+
const featuresString = convertFontFeaturesToString(loadedFont.fontFeatures);
|
|
1474
|
+
loadedFont.hb.shape(loadedFont.font, buffer, featuresString);
|
|
1475
|
+
const glyphInfos = buffer.json(loadedFont.font);
|
|
1476
|
+
const letterSpacingInFontUnits = letterSpacing * loadedFont.upem;
|
|
1477
|
+
for (let i = 0; i < glyphInfos.length; i++) {
|
|
1478
|
+
const glyph = glyphInfos[i];
|
|
1479
|
+
const cl = glyph.cl ?? 0;
|
|
1480
|
+
let charIndex = startToCharIndex.get(cl);
|
|
1481
|
+
// Fallback if cl lands mid-codepoint
|
|
1482
|
+
if (charIndex === undefined) {
|
|
1483
|
+
// Find the closest start <= cl
|
|
1484
|
+
for (let back = cl; back >= 0; back--) {
|
|
1485
|
+
const candidate = startToCharIndex.get(back);
|
|
1486
|
+
if (candidate !== undefined) {
|
|
1487
|
+
charIndex = candidate;
|
|
1488
|
+
break;
|
|
1489
|
+
}
|
|
1490
|
+
}
|
|
1491
|
+
}
|
|
1492
|
+
if (charIndex === undefined)
|
|
1493
|
+
continue;
|
|
1494
|
+
widths[charIndex] += glyph.ax;
|
|
1495
|
+
if (letterSpacingInFontUnits !== 0) {
|
|
1496
|
+
widths[charIndex] += letterSpacingInFontUnits;
|
|
1497
|
+
}
|
|
1498
|
+
}
|
|
1499
|
+
return widths;
|
|
1500
|
+
}
|
|
1501
|
+
finally {
|
|
1502
|
+
buffer.destroy();
|
|
1503
|
+
}
|
|
1504
|
+
}
|
|
1436
1505
|
static measureTextWidth(loadedFont, text, letterSpacing = 0) {
|
|
1437
1506
|
const buffer = loadedFont.hb.createBuffer();
|
|
1438
1507
|
buffer.addText(text);
|
|
@@ -1489,7 +1558,8 @@
|
|
|
1489
1558
|
unitsPerEm: this.loadedFont.upem,
|
|
1490
1559
|
letterSpacing,
|
|
1491
1560
|
measureText: (textToMeasure) => TextMeasurer.measureTextWidth(this.loadedFont, textToMeasure, letterSpacing // Letter spacing included in width measurements
|
|
1492
|
-
)
|
|
1561
|
+
),
|
|
1562
|
+
measureTextWidths: (textToMeasure) => TextMeasurer.measureTextWidths(this.loadedFont, textToMeasure, letterSpacing)
|
|
1493
1563
|
});
|
|
1494
1564
|
}
|
|
1495
1565
|
else {
|
|
@@ -1511,6 +1581,15 @@
|
|
|
1511
1581
|
return { lines };
|
|
1512
1582
|
}
|
|
1513
1583
|
applyAlignment(vertices, options) {
|
|
1584
|
+
const { offset, adjustedBounds } = this.computeAlignmentOffset(options);
|
|
1585
|
+
if (offset !== 0) {
|
|
1586
|
+
for (let i = 0; i < vertices.length; i += 3) {
|
|
1587
|
+
vertices[i] += offset;
|
|
1588
|
+
}
|
|
1589
|
+
}
|
|
1590
|
+
return { offset, adjustedBounds };
|
|
1591
|
+
}
|
|
1592
|
+
computeAlignmentOffset(options) {
|
|
1514
1593
|
const { width, align, planeBounds } = options;
|
|
1515
1594
|
let offset = 0;
|
|
1516
1595
|
const adjustedBounds = {
|
|
@@ -1522,17 +1601,13 @@
|
|
|
1522
1601
|
if (align === 'center') {
|
|
1523
1602
|
offset = (width - lineWidth) / 2 - planeBounds.min.x;
|
|
1524
1603
|
}
|
|
1525
|
-
else
|
|
1604
|
+
else {
|
|
1526
1605
|
offset = width - planeBounds.max.x;
|
|
1527
1606
|
}
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
}
|
|
1533
|
-
adjustedBounds.min.x += offset;
|
|
1534
|
-
adjustedBounds.max.x += offset;
|
|
1535
|
-
}
|
|
1607
|
+
}
|
|
1608
|
+
if (offset !== 0) {
|
|
1609
|
+
adjustedBounds.min.x += offset;
|
|
1610
|
+
adjustedBounds.max.x += offset;
|
|
1536
1611
|
}
|
|
1537
1612
|
return { offset, adjustedBounds };
|
|
1538
1613
|
}
|
|
@@ -2629,7 +2704,7 @@
|
|
|
2629
2704
|
var libtess_minExports = libtess_min.exports;
|
|
2630
2705
|
|
|
2631
2706
|
class Tessellator {
|
|
2632
|
-
process(paths, removeOverlaps = true, isCFF = false) {
|
|
2707
|
+
process(paths, removeOverlaps = true, isCFF = false, needsExtrusionContours = true) {
|
|
2633
2708
|
if (paths.length === 0) {
|
|
2634
2709
|
return { triangles: { vertices: [], indices: [] }, contours: [] };
|
|
2635
2710
|
}
|
|
@@ -2638,66 +2713,124 @@
|
|
|
2638
2713
|
return { triangles: { vertices: [], indices: [] }, contours: [] };
|
|
2639
2714
|
}
|
|
2640
2715
|
logger.log(`Tessellator: removeOverlaps=${removeOverlaps}, processing ${valid.length} paths`);
|
|
2641
|
-
return this.tessellate(valid, removeOverlaps, isCFF);
|
|
2642
|
-
}
|
|
2643
|
-
tessellate(paths, removeOverlaps, isCFF) {
|
|
2644
|
-
//
|
|
2645
|
-
const
|
|
2646
|
-
|
|
2647
|
-
|
|
2648
|
-
|
|
2716
|
+
return this.tessellate(valid, removeOverlaps, isCFF, needsExtrusionContours);
|
|
2717
|
+
}
|
|
2718
|
+
tessellate(paths, removeOverlaps, isCFF, needsExtrusionContours) {
|
|
2719
|
+
// libtess expects CCW winding; TTF outer contours are CW
|
|
2720
|
+
const needsWindingReversal = !isCFF && !removeOverlaps;
|
|
2721
|
+
let originalContours;
|
|
2722
|
+
let tessContours;
|
|
2723
|
+
if (needsWindingReversal) {
|
|
2724
|
+
tessContours = this.pathsToContours(paths, true);
|
|
2725
|
+
if (removeOverlaps || needsExtrusionContours) {
|
|
2726
|
+
originalContours = this.pathsToContours(paths);
|
|
2727
|
+
}
|
|
2728
|
+
}
|
|
2729
|
+
else {
|
|
2730
|
+
originalContours = this.pathsToContours(paths);
|
|
2731
|
+
tessContours = originalContours;
|
|
2732
|
+
}
|
|
2733
|
+
let extrusionContours = needsExtrusionContours
|
|
2734
|
+
? originalContours ?? this.pathsToContours(paths)
|
|
2735
|
+
: [];
|
|
2649
2736
|
if (removeOverlaps) {
|
|
2650
2737
|
logger.log('Two-pass: boundary extraction then triangulation');
|
|
2651
|
-
// Extract boundaries to remove overlaps
|
|
2652
2738
|
perfLogger.start('Tessellator.boundaryPass', {
|
|
2653
|
-
contourCount:
|
|
2739
|
+
contourCount: tessContours.length
|
|
2654
2740
|
});
|
|
2655
|
-
const boundaryResult = this.performTessellation(
|
|
2741
|
+
const boundaryResult = this.performTessellation(originalContours, 'boundary');
|
|
2656
2742
|
perfLogger.end('Tessellator.boundaryPass');
|
|
2657
2743
|
if (!boundaryResult) {
|
|
2658
2744
|
logger.warn('libtess returned empty result from boundary pass');
|
|
2659
2745
|
return { triangles: { vertices: [], indices: [] }, contours: [] };
|
|
2660
2746
|
}
|
|
2661
|
-
//
|
|
2662
|
-
|
|
2663
|
-
|
|
2747
|
+
// Boundary pass normalizes winding (outer CCW, holes CW)
|
|
2748
|
+
tessContours = this.boundaryToContours(boundaryResult);
|
|
2749
|
+
if (needsExtrusionContours) {
|
|
2750
|
+
extrusionContours = tessContours;
|
|
2751
|
+
}
|
|
2752
|
+
logger.log(`Boundary pass created ${tessContours.length} contours. Starting triangulation pass.`);
|
|
2664
2753
|
}
|
|
2665
2754
|
else {
|
|
2666
2755
|
logger.log(`Single-pass triangulation for ${isCFF ? 'CFF' : 'TTF'}`);
|
|
2756
|
+
// TTF contours may have inconsistent winding; check if we need normalization
|
|
2757
|
+
if (needsExtrusionContours && !isCFF) {
|
|
2758
|
+
const needsNormalization = this.needsWindingNormalization(extrusionContours);
|
|
2759
|
+
if (needsNormalization) {
|
|
2760
|
+
logger.log('Complex topology detected, running boundary pass for winding normalization');
|
|
2761
|
+
perfLogger.start('Tessellator.windingNormalization', {
|
|
2762
|
+
contourCount: extrusionContours.length
|
|
2763
|
+
});
|
|
2764
|
+
const boundaryResult = this.performTessellation(extrusionContours, 'boundary');
|
|
2765
|
+
perfLogger.end('Tessellator.windingNormalization');
|
|
2766
|
+
if (boundaryResult) {
|
|
2767
|
+
extrusionContours = this.boundaryToContours(boundaryResult);
|
|
2768
|
+
}
|
|
2769
|
+
}
|
|
2770
|
+
else {
|
|
2771
|
+
logger.log('Simple topology, skipping winding normalization');
|
|
2772
|
+
}
|
|
2773
|
+
}
|
|
2667
2774
|
}
|
|
2668
|
-
// Triangulate the contours
|
|
2669
2775
|
perfLogger.start('Tessellator.triangulationPass', {
|
|
2670
|
-
contourCount:
|
|
2776
|
+
contourCount: tessContours.length
|
|
2671
2777
|
});
|
|
2672
|
-
const triangleResult = this.performTessellation(
|
|
2778
|
+
const triangleResult = this.performTessellation(tessContours, 'triangles');
|
|
2673
2779
|
perfLogger.end('Tessellator.triangulationPass');
|
|
2674
2780
|
if (!triangleResult) {
|
|
2675
2781
|
const warning = removeOverlaps
|
|
2676
2782
|
? 'libtess returned empty result from triangulation pass'
|
|
2677
2783
|
: 'libtess returned empty result from single-pass triangulation';
|
|
2678
2784
|
logger.warn(warning);
|
|
2679
|
-
return { triangles: { vertices: [], indices: [] }, contours };
|
|
2785
|
+
return { triangles: { vertices: [], indices: [] }, contours: extrusionContours };
|
|
2680
2786
|
}
|
|
2681
2787
|
return {
|
|
2682
2788
|
triangles: {
|
|
2683
2789
|
vertices: triangleResult.vertices,
|
|
2684
2790
|
indices: triangleResult.indices || []
|
|
2685
2791
|
},
|
|
2686
|
-
contours
|
|
2792
|
+
contours: extrusionContours
|
|
2687
2793
|
};
|
|
2688
2794
|
}
|
|
2689
|
-
pathsToContours(paths) {
|
|
2690
|
-
|
|
2691
|
-
|
|
2692
|
-
|
|
2693
|
-
|
|
2795
|
+
pathsToContours(paths, reversePoints = false) {
|
|
2796
|
+
const contours = new Array(paths.length);
|
|
2797
|
+
for (let p = 0; p < paths.length; p++) {
|
|
2798
|
+
const points = paths[p].points;
|
|
2799
|
+
const pointCount = points.length;
|
|
2800
|
+
// Clipper-style paths can be explicitly closed by repeating the first point at the end
|
|
2801
|
+
// Normalize to a single closing vertex for stable side wall generation
|
|
2802
|
+
const isClosed = pointCount > 1 &&
|
|
2803
|
+
points[0].x === points[pointCount - 1].x &&
|
|
2804
|
+
points[0].y === points[pointCount - 1].y;
|
|
2805
|
+
const end = isClosed ? pointCount - 1 : pointCount;
|
|
2806
|
+
// +1 to append a closing vertex
|
|
2807
|
+
const contour = new Array((end + 1) * 2);
|
|
2808
|
+
let i = 0;
|
|
2809
|
+
if (reversePoints) {
|
|
2810
|
+
for (let k = end - 1; k >= 0; k--) {
|
|
2811
|
+
const pt = points[k];
|
|
2812
|
+
contour[i++] = pt.x;
|
|
2813
|
+
contour[i++] = pt.y;
|
|
2814
|
+
}
|
|
2694
2815
|
}
|
|
2695
|
-
|
|
2696
|
-
|
|
2816
|
+
else {
|
|
2817
|
+
for (let k = 0; k < end; k++) {
|
|
2818
|
+
const pt = points[k];
|
|
2819
|
+
contour[i++] = pt.x;
|
|
2820
|
+
contour[i++] = pt.y;
|
|
2821
|
+
}
|
|
2822
|
+
}
|
|
2823
|
+
// Some glyphs omit closePath, leaving gaps in extruded side walls
|
|
2824
|
+
if (i >= 2) {
|
|
2825
|
+
contour[i++] = contour[0];
|
|
2826
|
+
contour[i++] = contour[1];
|
|
2827
|
+
}
|
|
2828
|
+
contours[p] = contour;
|
|
2829
|
+
}
|
|
2830
|
+
return contours;
|
|
2697
2831
|
}
|
|
2698
2832
|
performTessellation(contours, mode) {
|
|
2699
2833
|
const tess = new libtess_minExports.GluTesselator();
|
|
2700
|
-
// Set winding rule to NON-ZERO
|
|
2701
2834
|
tess.gluTessProperty(libtess_minExports.gluEnum.GLU_TESS_WINDING_RULE, libtess_minExports.windingRule.GLU_TESS_WINDING_NONZERO);
|
|
2702
2835
|
const vertices = [];
|
|
2703
2836
|
const indices = [];
|
|
@@ -2720,7 +2853,7 @@
|
|
|
2720
2853
|
});
|
|
2721
2854
|
tess.gluTessCallback(libtess_minExports.gluEnum.GLU_TESS_END, () => {
|
|
2722
2855
|
if (currentContour.length > 0) {
|
|
2723
|
-
contourIndices.push(
|
|
2856
|
+
contourIndices.push(currentContour);
|
|
2724
2857
|
}
|
|
2725
2858
|
});
|
|
2726
2859
|
}
|
|
@@ -2765,7 +2898,6 @@
|
|
|
2765
2898
|
const vertIdx = idx * 2;
|
|
2766
2899
|
contour.push(boundaryResult.vertices[vertIdx], boundaryResult.vertices[vertIdx + 1]);
|
|
2767
2900
|
}
|
|
2768
|
-
// Ensure contour is closed for side wall generation
|
|
2769
2901
|
if (contour.length > 2) {
|
|
2770
2902
|
if (contour[0] !== contour[contour.length - 2] ||
|
|
2771
2903
|
contour[1] !== contour[contour.length - 1]) {
|
|
@@ -2776,11 +2908,45 @@
|
|
|
2776
2908
|
}
|
|
2777
2909
|
return contours;
|
|
2778
2910
|
}
|
|
2779
|
-
|
|
2780
|
-
|
|
2781
|
-
|
|
2782
|
-
|
|
2783
|
-
|
|
2911
|
+
// Check if contours need winding normalization via boundary pass
|
|
2912
|
+
// Returns false if topology is simple enough to skip the expensive pass
|
|
2913
|
+
needsWindingNormalization(contours) {
|
|
2914
|
+
if (contours.length === 0)
|
|
2915
|
+
return false;
|
|
2916
|
+
// Heuristic 1: Single contour never needs normalization
|
|
2917
|
+
if (contours.length === 1)
|
|
2918
|
+
return false;
|
|
2919
|
+
// Heuristic 2: All same winding = all outers, no holes
|
|
2920
|
+
// Compute signed areas
|
|
2921
|
+
let firstSign = null;
|
|
2922
|
+
for (const contour of contours) {
|
|
2923
|
+
const area = this.signedArea(contour);
|
|
2924
|
+
const sign = area >= 0 ? 1 : -1;
|
|
2925
|
+
if (firstSign === null) {
|
|
2926
|
+
firstSign = sign;
|
|
2927
|
+
}
|
|
2928
|
+
else if (sign !== firstSign) {
|
|
2929
|
+
// Mixed winding detected → might have holes or complex topology
|
|
2930
|
+
return true;
|
|
2931
|
+
}
|
|
2932
|
+
}
|
|
2933
|
+
// All same winding → simple topology, no normalization needed
|
|
2934
|
+
return false;
|
|
2935
|
+
}
|
|
2936
|
+
// Compute signed area (CCW = positive, CW = negative)
|
|
2937
|
+
signedArea(contour) {
|
|
2938
|
+
let area = 0;
|
|
2939
|
+
const len = contour.length;
|
|
2940
|
+
if (len < 6)
|
|
2941
|
+
return 0; // Need at least 3 points
|
|
2942
|
+
for (let i = 0; i < len; i += 2) {
|
|
2943
|
+
const x1 = contour[i];
|
|
2944
|
+
const y1 = contour[i + 1];
|
|
2945
|
+
const x2 = contour[(i + 2) % len];
|
|
2946
|
+
const y2 = contour[(i + 3) % len];
|
|
2947
|
+
area += x1 * y2 - x2 * y1;
|
|
2948
|
+
}
|
|
2949
|
+
return area / 2;
|
|
2784
2950
|
}
|
|
2785
2951
|
}
|
|
2786
2952
|
|
|
@@ -2790,12 +2956,11 @@
|
|
|
2790
2956
|
const points = geometry.triangles.vertices;
|
|
2791
2957
|
const triangleIndices = geometry.triangles.indices;
|
|
2792
2958
|
const numPoints = points.length / 2;
|
|
2793
|
-
// Count side-wall segments (
|
|
2959
|
+
// Count side-wall segments (4 vertices + 6 indices per segment)
|
|
2794
2960
|
let sideSegments = 0;
|
|
2795
2961
|
if (depth !== 0) {
|
|
2796
2962
|
for (const contour of geometry.contours) {
|
|
2797
|
-
//
|
|
2798
|
-
// Contours are expected to be closed (last point repeats first), so segments = (nPoints - 1)
|
|
2963
|
+
// Contours are closed (last point repeats first)
|
|
2799
2964
|
const contourPoints = contour.length / 2;
|
|
2800
2965
|
if (contourPoints >= 2)
|
|
2801
2966
|
sideSegments += contourPoints - 1;
|
|
@@ -2811,7 +2976,7 @@
|
|
|
2811
2976
|
: triangleIndices.length * 2 + sideSegments * 6;
|
|
2812
2977
|
const indices = new Uint32Array(indexCount);
|
|
2813
2978
|
if (depth === 0) {
|
|
2814
|
-
//
|
|
2979
|
+
// Single-sided flat geometry at z=0
|
|
2815
2980
|
let vPos = 0;
|
|
2816
2981
|
for (let i = 0; i < points.length; i += 2) {
|
|
2817
2982
|
vertices[vPos] = points[i];
|
|
@@ -2822,42 +2987,44 @@
|
|
|
2822
2987
|
normals[vPos + 2] = 1;
|
|
2823
2988
|
vPos += 3;
|
|
2824
2989
|
}
|
|
2990
|
+
// libtess outputs CCW, use as-is for +Z facing geometry
|
|
2825
2991
|
for (let i = 0; i < triangleIndices.length; i++) {
|
|
2826
2992
|
indices[i] = triangleIndices[i];
|
|
2827
2993
|
}
|
|
2828
2994
|
return { vertices, normals, indices };
|
|
2829
2995
|
}
|
|
2830
|
-
//
|
|
2996
|
+
// Extruded geometry: front at z=0, back at z=depth
|
|
2831
2997
|
const minBackOffset = unitsPerEm * 0.000025;
|
|
2832
2998
|
const backZ = depth <= minBackOffset ? minBackOffset : depth;
|
|
2833
|
-
//
|
|
2834
|
-
for (let p = 0, vi = 0; p < points.length; p += 2, vi++) {
|
|
2835
|
-
const base = vi * 3;
|
|
2836
|
-
vertices[base] = points[p];
|
|
2837
|
-
vertices[base + 1] = points[p + 1];
|
|
2838
|
-
vertices[base + 2] = 0;
|
|
2839
|
-
normals[base] = 0;
|
|
2840
|
-
normals[base + 1] = 0;
|
|
2841
|
-
normals[base + 2] = 1;
|
|
2842
|
-
}
|
|
2843
|
-
// Fill back vertices/normals (numPoints..2*numPoints-1)
|
|
2999
|
+
// Generate both caps in one pass
|
|
2844
3000
|
for (let p = 0, vi = 0; p < points.length; p += 2, vi++) {
|
|
2845
|
-
const
|
|
2846
|
-
|
|
2847
|
-
|
|
2848
|
-
|
|
2849
|
-
|
|
2850
|
-
|
|
2851
|
-
|
|
2852
|
-
|
|
2853
|
-
|
|
3001
|
+
const x = points[p];
|
|
3002
|
+
const y = points[p + 1];
|
|
3003
|
+
// Cap at z=0
|
|
3004
|
+
const base0 = vi * 3;
|
|
3005
|
+
vertices[base0] = x;
|
|
3006
|
+
vertices[base0 + 1] = y;
|
|
3007
|
+
vertices[base0 + 2] = 0;
|
|
3008
|
+
normals[base0] = 0;
|
|
3009
|
+
normals[base0 + 1] = 0;
|
|
3010
|
+
normals[base0 + 2] = -1;
|
|
3011
|
+
// Cap at z=depth
|
|
3012
|
+
const baseD = (numPoints + vi) * 3;
|
|
3013
|
+
vertices[baseD] = x;
|
|
3014
|
+
vertices[baseD + 1] = y;
|
|
3015
|
+
vertices[baseD + 2] = backZ;
|
|
3016
|
+
normals[baseD] = 0;
|
|
3017
|
+
normals[baseD + 1] = 0;
|
|
3018
|
+
normals[baseD + 2] = 1;
|
|
3019
|
+
}
|
|
3020
|
+
// libtess outputs CCW triangles (viewed from +Z)
|
|
3021
|
+
// Z=0 cap faces -Z, reverse winding
|
|
2854
3022
|
for (let i = 0; i < triangleIndices.length; i++) {
|
|
2855
|
-
indices[i] = triangleIndices[i];
|
|
3023
|
+
indices[i] = triangleIndices[triangleIndices.length - 1 - i];
|
|
2856
3024
|
}
|
|
2857
|
-
//
|
|
3025
|
+
// Z=depth cap faces +Z, use original winding
|
|
2858
3026
|
for (let i = 0; i < triangleIndices.length; i++) {
|
|
2859
|
-
indices[triangleIndices.length + i] =
|
|
2860
|
-
triangleIndices[triangleIndices.length - 1 - i] + numPoints;
|
|
3027
|
+
indices[triangleIndices.length + i] = triangleIndices[i] + numPoints;
|
|
2861
3028
|
}
|
|
2862
3029
|
// Side walls
|
|
2863
3030
|
let nextVertex = numPoints * 2;
|
|
@@ -2868,7 +3035,7 @@
|
|
|
2868
3035
|
const p0y = contour[i + 1];
|
|
2869
3036
|
const p1x = contour[i + 2];
|
|
2870
3037
|
const p1y = contour[i + 3];
|
|
2871
|
-
//
|
|
3038
|
+
// Perpendicular normal for this wall segment
|
|
2872
3039
|
const ex = p1x - p0x;
|
|
2873
3040
|
const ey = p1y - p0y;
|
|
2874
3041
|
const lenSq = ex * ex + ey * ey;
|
|
@@ -2881,7 +3048,7 @@
|
|
|
2881
3048
|
}
|
|
2882
3049
|
const baseVertex = nextVertex;
|
|
2883
3050
|
const base = baseVertex * 3;
|
|
2884
|
-
//
|
|
3051
|
+
// Wall quad: front edge at z=0, back edge at z=depth
|
|
2885
3052
|
vertices[base] = p0x;
|
|
2886
3053
|
vertices[base + 1] = p0y;
|
|
2887
3054
|
vertices[base + 2] = 0;
|
|
@@ -2894,7 +3061,7 @@
|
|
|
2894
3061
|
vertices[base + 9] = p1x;
|
|
2895
3062
|
vertices[base + 10] = p1y;
|
|
2896
3063
|
vertices[base + 11] = backZ;
|
|
2897
|
-
//
|
|
3064
|
+
// Wall normals point perpendicular to edge
|
|
2898
3065
|
normals[base] = nx;
|
|
2899
3066
|
normals[base + 1] = ny;
|
|
2900
3067
|
normals[base + 2] = 0;
|
|
@@ -2907,7 +3074,7 @@
|
|
|
2907
3074
|
normals[base + 9] = nx;
|
|
2908
3075
|
normals[base + 10] = ny;
|
|
2909
3076
|
normals[base + 11] = 0;
|
|
2910
|
-
//
|
|
3077
|
+
// Two triangles per wall segment
|
|
2911
3078
|
indices[idxPos++] = baseVertex;
|
|
2912
3079
|
indices[idxPos++] = baseVertex + 1;
|
|
2913
3080
|
indices[idxPos++] = baseVertex + 2;
|
|
@@ -3142,21 +3309,23 @@
|
|
|
3142
3309
|
return path;
|
|
3143
3310
|
}
|
|
3144
3311
|
this.stats.originalPointCount += path.points.length;
|
|
3145
|
-
|
|
3312
|
+
// Most paths are already immutable after collection; avoid copying large point arrays
|
|
3313
|
+
// The optimizers below never mutate the input `points` array
|
|
3314
|
+
const points = path.points;
|
|
3146
3315
|
if (points.length < 5) {
|
|
3147
3316
|
return path;
|
|
3148
3317
|
}
|
|
3149
|
-
|
|
3150
|
-
if (
|
|
3318
|
+
let optimized = this.simplifyPathVW(points, this.config.areaThreshold);
|
|
3319
|
+
if (optimized.length < 3) {
|
|
3151
3320
|
return path;
|
|
3152
3321
|
}
|
|
3153
|
-
|
|
3154
|
-
if (
|
|
3322
|
+
optimized = this.removeColinearPoints(optimized, this.config.colinearThreshold);
|
|
3323
|
+
if (optimized.length < 3) {
|
|
3155
3324
|
return path;
|
|
3156
3325
|
}
|
|
3157
3326
|
return {
|
|
3158
3327
|
...path,
|
|
3159
|
-
points
|
|
3328
|
+
points: optimized
|
|
3160
3329
|
};
|
|
3161
3330
|
}
|
|
3162
3331
|
// Visvalingam-Whyatt algorithm
|
|
@@ -3610,7 +3779,7 @@
|
|
|
3610
3779
|
if (this.currentGlyphPaths.length > 0) {
|
|
3611
3780
|
this.collectedGlyphs.push({
|
|
3612
3781
|
glyphId: this.currentGlyphId,
|
|
3613
|
-
paths:
|
|
3782
|
+
paths: this.currentGlyphPaths,
|
|
3614
3783
|
bounds: {
|
|
3615
3784
|
min: {
|
|
3616
3785
|
x: this.currentGlyphBounds.min.x,
|
|
@@ -3662,11 +3831,10 @@
|
|
|
3662
3831
|
return;
|
|
3663
3832
|
}
|
|
3664
3833
|
const flattenedPoints = this.polygonizer.polygonizeQuadratic(start, control, end);
|
|
3665
|
-
for (const point of flattenedPoints) {
|
|
3666
|
-
this.updateBounds(point);
|
|
3667
|
-
}
|
|
3668
3834
|
for (let i = 0; i < flattenedPoints.length; i++) {
|
|
3669
|
-
|
|
3835
|
+
const pt = flattenedPoints[i];
|
|
3836
|
+
this.updateBounds(pt);
|
|
3837
|
+
this.currentPath.points.push(pt);
|
|
3670
3838
|
}
|
|
3671
3839
|
this.currentPoint = end;
|
|
3672
3840
|
}
|
|
@@ -3686,11 +3854,10 @@
|
|
|
3686
3854
|
return;
|
|
3687
3855
|
}
|
|
3688
3856
|
const flattenedPoints = this.polygonizer.polygonizeCubic(start, control1, control2, end);
|
|
3689
|
-
for (const point of flattenedPoints) {
|
|
3690
|
-
this.updateBounds(point);
|
|
3691
|
-
}
|
|
3692
3857
|
for (let i = 0; i < flattenedPoints.length; i++) {
|
|
3693
|
-
|
|
3858
|
+
const pt = flattenedPoints[i];
|
|
3859
|
+
this.updateBounds(pt);
|
|
3860
|
+
this.currentPath.points.push(pt);
|
|
3694
3861
|
}
|
|
3695
3862
|
this.currentPoint = end;
|
|
3696
3863
|
}
|
|
@@ -3880,6 +4047,7 @@
|
|
|
3880
4047
|
constructor(cache, loadedFont) {
|
|
3881
4048
|
this.fontId = 'default';
|
|
3882
4049
|
this.cacheKeyPrefix = 'default';
|
|
4050
|
+
this.emptyGlyphs = new Set();
|
|
3883
4051
|
this.cache = cache;
|
|
3884
4052
|
this.loadedFont = loadedFont;
|
|
3885
4053
|
this.tessellator = new Tessellator();
|
|
@@ -3933,63 +4101,34 @@
|
|
|
3933
4101
|
}
|
|
3934
4102
|
// Build instanced geometry from glyph contours
|
|
3935
4103
|
buildInstancedGeometry(clustersByLine, depth, removeOverlaps, isCFF, separateGlyphs = false, coloredTextIndices) {
|
|
3936
|
-
|
|
3937
|
-
|
|
3938
|
-
|
|
3939
|
-
|
|
3940
|
-
|
|
3941
|
-
|
|
3942
|
-
|
|
3943
|
-
|
|
3944
|
-
|
|
3945
|
-
|
|
3946
|
-
|
|
3947
|
-
|
|
3948
|
-
|
|
3949
|
-
|
|
3950
|
-
|
|
3951
|
-
|
|
3952
|
-
|
|
3953
|
-
|
|
3954
|
-
|
|
3955
|
-
|
|
3956
|
-
|
|
3957
|
-
|
|
3958
|
-
|
|
3959
|
-
|
|
3960
|
-
|
|
3961
|
-
|
|
3962
|
-
|
|
3963
|
-
|
|
3964
|
-
nextSize *= 2;
|
|
3965
|
-
const next = new Uint32Array(nextSize);
|
|
3966
|
-
next.set(buffer);
|
|
3967
|
-
return next;
|
|
3968
|
-
};
|
|
3969
|
-
const appendGeometryToBuffers = (data, position, vertexOffset) => {
|
|
3970
|
-
const v = data.vertices;
|
|
3971
|
-
const n = data.normals;
|
|
3972
|
-
const idx = data.indices;
|
|
3973
|
-
// Grow buffers as needed
|
|
3974
|
-
vertexBuffer = ensureFloatCapacity(vertexBuffer, vertexPos + v.length);
|
|
3975
|
-
normalBuffer = ensureFloatCapacity(normalBuffer, normalPos + n.length);
|
|
3976
|
-
indexBuffer = ensureIndexCapacity(indexBuffer, indexPos + idx.length);
|
|
3977
|
-
// Vertices: translate by position
|
|
3978
|
-
const px = position.x;
|
|
3979
|
-
const py = position.y;
|
|
3980
|
-
const pz = position.z;
|
|
3981
|
-
for (let j = 0; j < v.length; j += 3) {
|
|
3982
|
-
vertexBuffer[vertexPos++] = v[j] + px;
|
|
3983
|
-
vertexBuffer[vertexPos++] = v[j + 1] + py;
|
|
3984
|
-
vertexBuffer[vertexPos++] = v[j + 2] + pz;
|
|
3985
|
-
}
|
|
3986
|
-
// Normals: straight copy
|
|
3987
|
-
normalBuffer.set(n, normalPos);
|
|
3988
|
-
normalPos += n.length;
|
|
3989
|
-
// Indices: copy with vertex offset
|
|
3990
|
-
for (let j = 0; j < idx.length; j++) {
|
|
3991
|
-
indexBuffer[indexPos++] = idx[j] + vertexOffset;
|
|
3992
|
-
}
|
|
4104
|
+
if (isLogEnabled) {
|
|
4105
|
+
let wordCount = 0;
|
|
4106
|
+
for (let i = 0; i < clustersByLine.length; i++) {
|
|
4107
|
+
wordCount += clustersByLine[i].length;
|
|
4108
|
+
}
|
|
4109
|
+
perfLogger.start('GlyphGeometryBuilder.buildInstancedGeometry', {
|
|
4110
|
+
lineCount: clustersByLine.length,
|
|
4111
|
+
wordCount,
|
|
4112
|
+
depth,
|
|
4113
|
+
removeOverlaps
|
|
4114
|
+
});
|
|
4115
|
+
}
|
|
4116
|
+
else {
|
|
4117
|
+
perfLogger.start('GlyphGeometryBuilder.buildInstancedGeometry');
|
|
4118
|
+
}
|
|
4119
|
+
const tasks = [];
|
|
4120
|
+
let totalVertexFloats = 0;
|
|
4121
|
+
let totalNormalFloats = 0;
|
|
4122
|
+
let totalIndexCount = 0;
|
|
4123
|
+
let vertexCursor = 0; // vertex offset (not float offset)
|
|
4124
|
+
const pushTask = (data, px, py, pz) => {
|
|
4125
|
+
const vertexStart = vertexCursor;
|
|
4126
|
+
tasks.push({ data, px, py, pz, vertexStart });
|
|
4127
|
+
totalVertexFloats += data.vertices.length;
|
|
4128
|
+
totalNormalFloats += data.normals.length;
|
|
4129
|
+
totalIndexCount += data.indices.length;
|
|
4130
|
+
vertexCursor += data.vertices.length / 3;
|
|
4131
|
+
return vertexStart;
|
|
3993
4132
|
};
|
|
3994
4133
|
const glyphInfos = [];
|
|
3995
4134
|
const planeBounds = {
|
|
@@ -3999,6 +4138,9 @@
|
|
|
3999
4138
|
for (let lineIndex = 0; lineIndex < clustersByLine.length; lineIndex++) {
|
|
4000
4139
|
const line = clustersByLine[lineIndex];
|
|
4001
4140
|
for (const cluster of line) {
|
|
4141
|
+
const clusterX = cluster.position.x;
|
|
4142
|
+
const clusterY = cluster.position.y;
|
|
4143
|
+
const clusterZ = cluster.position.z;
|
|
4002
4144
|
const clusterGlyphContours = [];
|
|
4003
4145
|
for (const glyph of cluster.glyphs) {
|
|
4004
4146
|
clusterGlyphContours.push(this.getContoursForGlyph(glyph.g));
|
|
@@ -4039,7 +4181,7 @@
|
|
|
4039
4181
|
// Use glyph-level caching when separateGlyphs is set or when cluster contains colored text
|
|
4040
4182
|
const forceSeparate = separateGlyphs || clusterHasColoredGlyphs;
|
|
4041
4183
|
// Iterate over the geometric groups identified by BoundaryClusterer
|
|
4042
|
-
// logical groups (words)
|
|
4184
|
+
// logical groups (words) split into geometric sub-groups (e.g. "aa", "XX", "bb")
|
|
4043
4185
|
for (const groupIndices of boundaryGroups) {
|
|
4044
4186
|
const isOverlappingGroup = groupIndices.length > 1;
|
|
4045
4187
|
const shouldCluster = isOverlappingGroup && !forceSeparate;
|
|
@@ -4071,16 +4213,19 @@
|
|
|
4071
4213
|
// Calculate the absolute position of this sub-cluster based on its first glyph
|
|
4072
4214
|
// (since the cached geometry is relative to that first glyph)
|
|
4073
4215
|
const firstGlyphInGroup = subClusterGlyphs[0];
|
|
4074
|
-
const
|
|
4075
|
-
const
|
|
4076
|
-
|
|
4216
|
+
const groupPosX = clusterX + (firstGlyphInGroup.x ?? 0);
|
|
4217
|
+
const groupPosY = clusterY + (firstGlyphInGroup.y ?? 0);
|
|
4218
|
+
const groupPosZ = clusterZ;
|
|
4219
|
+
const vertexStart = pushTask(cachedCluster, groupPosX, groupPosY, groupPosZ);
|
|
4077
4220
|
const clusterVertexCount = cachedCluster.vertices.length / 3;
|
|
4078
4221
|
for (let i = 0; i < groupIndices.length; i++) {
|
|
4079
4222
|
const originalIndex = groupIndices[i];
|
|
4080
4223
|
const glyph = cluster.glyphs[originalIndex];
|
|
4081
4224
|
const glyphContours = clusterGlyphContours[originalIndex];
|
|
4082
|
-
const
|
|
4083
|
-
const
|
|
4225
|
+
const glyphPosX = clusterX + (glyph.x ?? 0);
|
|
4226
|
+
const glyphPosY = clusterY + (glyph.y ?? 0);
|
|
4227
|
+
const glyphPosZ = clusterZ;
|
|
4228
|
+
const glyphInfo = this.createGlyphInfo(glyph, vertexStart, clusterVertexCount, glyphPosX, glyphPosY, glyphPosZ, glyphContours, depth);
|
|
4084
4229
|
glyphInfos.push(glyphInfo);
|
|
4085
4230
|
this.updatePlaneBounds(glyphInfo.bounds, planeBounds);
|
|
4086
4231
|
}
|
|
@@ -4090,24 +4235,26 @@
|
|
|
4090
4235
|
for (const i of groupIndices) {
|
|
4091
4236
|
const glyph = cluster.glyphs[i];
|
|
4092
4237
|
const glyphContours = clusterGlyphContours[i];
|
|
4093
|
-
const
|
|
4238
|
+
const glyphPosX = clusterX + (glyph.x ?? 0);
|
|
4239
|
+
const glyphPosY = clusterY + (glyph.y ?? 0);
|
|
4240
|
+
const glyphPosZ = clusterZ;
|
|
4094
4241
|
// Skip glyphs with no paths (spaces, zero-width characters, etc.)
|
|
4095
4242
|
if (glyphContours.paths.length === 0) {
|
|
4096
|
-
const glyphInfo = this.createGlyphInfo(glyph, 0, 0,
|
|
4243
|
+
const glyphInfo = this.createGlyphInfo(glyph, 0, 0, glyphPosX, glyphPosY, glyphPosZ, glyphContours, depth);
|
|
4097
4244
|
glyphInfos.push(glyphInfo);
|
|
4098
4245
|
continue;
|
|
4099
4246
|
}
|
|
4100
|
-
|
|
4247
|
+
const glyphCacheKey = getGlyphCacheKey(this.cacheKeyPrefix, glyph.g, depth, removeOverlaps);
|
|
4248
|
+
let cachedGlyph = this.cache.get(glyphCacheKey);
|
|
4101
4249
|
if (!cachedGlyph) {
|
|
4102
4250
|
cachedGlyph = this.tessellateGlyph(glyphContours, depth, removeOverlaps, isCFF);
|
|
4103
|
-
this.cache.set(
|
|
4251
|
+
this.cache.set(glyphCacheKey, cachedGlyph);
|
|
4104
4252
|
}
|
|
4105
4253
|
else {
|
|
4106
4254
|
cachedGlyph.useCount++;
|
|
4107
4255
|
}
|
|
4108
|
-
const
|
|
4109
|
-
|
|
4110
|
-
const glyphInfo = this.createGlyphInfo(glyph, vertexOffset, cachedGlyph.vertices.length / 3, glyphPosition, glyphContours, depth);
|
|
4256
|
+
const vertexStart = pushTask(cachedGlyph, glyphPosX, glyphPosY, glyphPosZ);
|
|
4257
|
+
const glyphInfo = this.createGlyphInfo(glyph, vertexStart, cachedGlyph.vertices.length / 3, glyphPosX, glyphPosY, glyphPosZ, glyphContours, depth);
|
|
4111
4258
|
glyphInfos.push(glyphInfo);
|
|
4112
4259
|
this.updatePlaneBounds(glyphInfo.bounds, planeBounds);
|
|
4113
4260
|
}
|
|
@@ -4115,10 +4262,33 @@
|
|
|
4115
4262
|
}
|
|
4116
4263
|
}
|
|
4117
4264
|
}
|
|
4118
|
-
//
|
|
4119
|
-
const vertexArray =
|
|
4120
|
-
const normalArray =
|
|
4121
|
-
const indexArray =
|
|
4265
|
+
// Allocate exact-sized buffers and fill once
|
|
4266
|
+
const vertexArray = new Float32Array(totalVertexFloats);
|
|
4267
|
+
const normalArray = new Float32Array(totalNormalFloats);
|
|
4268
|
+
const indexArray = new Uint32Array(totalIndexCount);
|
|
4269
|
+
let vertexPos = 0; // float index (multiple of 3)
|
|
4270
|
+
let normalPos = 0; // float index (multiple of 3)
|
|
4271
|
+
let indexPos = 0; // index count
|
|
4272
|
+
for (let t = 0; t < tasks.length; t++) {
|
|
4273
|
+
const task = tasks[t];
|
|
4274
|
+
const v = task.data.vertices;
|
|
4275
|
+
const n = task.data.normals;
|
|
4276
|
+
const idx = task.data.indices;
|
|
4277
|
+
const px = task.px;
|
|
4278
|
+
const py = task.py;
|
|
4279
|
+
const pz = task.pz;
|
|
4280
|
+
for (let j = 0; j < v.length; j += 3) {
|
|
4281
|
+
vertexArray[vertexPos++] = v[j] + px;
|
|
4282
|
+
vertexArray[vertexPos++] = v[j + 1] + py;
|
|
4283
|
+
vertexArray[vertexPos++] = v[j + 2] + pz;
|
|
4284
|
+
}
|
|
4285
|
+
normalArray.set(n, normalPos);
|
|
4286
|
+
normalPos += n.length;
|
|
4287
|
+
const vertexStart = task.vertexStart;
|
|
4288
|
+
for (let j = 0; j < idx.length; j++) {
|
|
4289
|
+
indexArray[indexPos++] = idx[j] + vertexStart;
|
|
4290
|
+
}
|
|
4291
|
+
}
|
|
4122
4292
|
perfLogger.end('GlyphGeometryBuilder.buildInstancedGeometry');
|
|
4123
4293
|
return {
|
|
4124
4294
|
vertices: vertexArray,
|
|
@@ -4143,7 +4313,7 @@
|
|
|
4143
4313
|
const roundedDepth = Math.round(depth * 1000) / 1000;
|
|
4144
4314
|
return `${this.cacheKeyPrefix}_${ids}_${roundedDepth}_${removeOverlaps}`;
|
|
4145
4315
|
}
|
|
4146
|
-
createGlyphInfo(glyph, vertexStart, vertexCount,
|
|
4316
|
+
createGlyphInfo(glyph, vertexStart, vertexCount, positionX, positionY, positionZ, contours, depth) {
|
|
4147
4317
|
return {
|
|
4148
4318
|
textIndex: glyph.absoluteTextIndex,
|
|
4149
4319
|
lineIndex: glyph.lineIndex,
|
|
@@ -4151,19 +4321,30 @@
|
|
|
4151
4321
|
vertexCount,
|
|
4152
4322
|
bounds: {
|
|
4153
4323
|
min: {
|
|
4154
|
-
x: contours.bounds.min.x +
|
|
4155
|
-
y: contours.bounds.min.y +
|
|
4156
|
-
z:
|
|
4324
|
+
x: contours.bounds.min.x + positionX,
|
|
4325
|
+
y: contours.bounds.min.y + positionY,
|
|
4326
|
+
z: positionZ
|
|
4157
4327
|
},
|
|
4158
4328
|
max: {
|
|
4159
|
-
x: contours.bounds.max.x +
|
|
4160
|
-
y: contours.bounds.max.y +
|
|
4161
|
-
z:
|
|
4329
|
+
x: contours.bounds.max.x + positionX,
|
|
4330
|
+
y: contours.bounds.max.y + positionY,
|
|
4331
|
+
z: positionZ + depth
|
|
4162
4332
|
}
|
|
4163
4333
|
}
|
|
4164
4334
|
};
|
|
4165
4335
|
}
|
|
4166
4336
|
getContoursForGlyph(glyphId) {
|
|
4337
|
+
// Fast path: skip HarfBuzz draw for known-empty glyphs (spaces, zero-width, etc)
|
|
4338
|
+
if (this.emptyGlyphs.has(glyphId)) {
|
|
4339
|
+
return {
|
|
4340
|
+
glyphId,
|
|
4341
|
+
paths: [],
|
|
4342
|
+
bounds: {
|
|
4343
|
+
min: { x: 0, y: 0 },
|
|
4344
|
+
max: { x: 0, y: 0 }
|
|
4345
|
+
}
|
|
4346
|
+
};
|
|
4347
|
+
}
|
|
4167
4348
|
const key = `${this.cacheKeyPrefix}_${glyphId}`;
|
|
4168
4349
|
const cached = this.contourCache.get(key);
|
|
4169
4350
|
if (cached) {
|
|
@@ -4184,11 +4365,15 @@
|
|
|
4184
4365
|
max: { x: 0, y: 0 }
|
|
4185
4366
|
}
|
|
4186
4367
|
};
|
|
4368
|
+
// Mark glyph as empty for future fast-path
|
|
4369
|
+
if (contours.paths.length === 0) {
|
|
4370
|
+
this.emptyGlyphs.add(glyphId);
|
|
4371
|
+
}
|
|
4187
4372
|
this.contourCache.set(key, contours);
|
|
4188
4373
|
return contours;
|
|
4189
4374
|
}
|
|
4190
4375
|
tessellateGlyphCluster(paths, depth, isCFF) {
|
|
4191
|
-
const processedGeometry = this.tessellator.process(paths, true, isCFF);
|
|
4376
|
+
const processedGeometry = this.tessellator.process(paths, true, isCFF, depth !== 0);
|
|
4192
4377
|
return this.extrudeAndPackage(processedGeometry, depth);
|
|
4193
4378
|
}
|
|
4194
4379
|
extrudeAndPackage(processedGeometry, depth) {
|
|
@@ -4236,7 +4421,7 @@
|
|
|
4236
4421
|
glyphId: glyphContours.glyphId,
|
|
4237
4422
|
pathCount: glyphContours.paths.length
|
|
4238
4423
|
});
|
|
4239
|
-
const processedGeometry = this.tessellator.process(glyphContours.paths, removeOverlaps, isCFF);
|
|
4424
|
+
const processedGeometry = this.tessellator.process(glyphContours.paths, removeOverlaps, isCFF, depth !== 0);
|
|
4240
4425
|
perfLogger.end('GlyphGeometryBuilder.tessellateGlyph');
|
|
4241
4426
|
return this.extrudeAndPackage(processedGeometry, depth);
|
|
4242
4427
|
}
|
|
@@ -4306,8 +4491,11 @@
|
|
|
4306
4491
|
const clusters = [];
|
|
4307
4492
|
let currentClusterGlyphs = [];
|
|
4308
4493
|
let currentClusterText = '';
|
|
4309
|
-
let
|
|
4310
|
-
let
|
|
4494
|
+
let clusterStartX = 0;
|
|
4495
|
+
let clusterStartY = 0;
|
|
4496
|
+
let cursorX = lineInfo.xOffset;
|
|
4497
|
+
let cursorY = -lineIndex * scaledLineHeight;
|
|
4498
|
+
const cursorZ = 0;
|
|
4311
4499
|
// Apply letter spacing after each glyph to match width measurements used during line breaking
|
|
4312
4500
|
const letterSpacingFU = letterSpacing * this.loadedFont.upem;
|
|
4313
4501
|
const spaceAdjustment = this.calculateSpaceAdjustment(lineInfo, align, letterSpacing);
|
|
@@ -4332,31 +4520,31 @@
|
|
|
4332
4520
|
clusters.push({
|
|
4333
4521
|
text: currentClusterText,
|
|
4334
4522
|
glyphs: currentClusterGlyphs,
|
|
4335
|
-
position:
|
|
4523
|
+
position: new Vec3(clusterStartX, clusterStartY, cursorZ)
|
|
4336
4524
|
});
|
|
4337
4525
|
currentClusterGlyphs = [];
|
|
4338
4526
|
currentClusterText = '';
|
|
4339
4527
|
}
|
|
4340
4528
|
}
|
|
4341
|
-
const
|
|
4342
|
-
|
|
4343
|
-
.add(new Vec3(glyph.dx, glyph.dy, 0));
|
|
4529
|
+
const absoluteGlyphX = cursorX + glyph.dx;
|
|
4530
|
+
const absoluteGlyphY = cursorY + glyph.dy;
|
|
4344
4531
|
if (!isWhitespace) {
|
|
4345
4532
|
if (currentClusterGlyphs.length === 0) {
|
|
4346
|
-
|
|
4533
|
+
clusterStartX = absoluteGlyphX;
|
|
4534
|
+
clusterStartY = absoluteGlyphY;
|
|
4347
4535
|
}
|
|
4348
|
-
glyph.x =
|
|
4349
|
-
glyph.y =
|
|
4536
|
+
glyph.x = absoluteGlyphX - clusterStartX;
|
|
4537
|
+
glyph.y = absoluteGlyphY - clusterStartY;
|
|
4350
4538
|
currentClusterGlyphs.push(glyph);
|
|
4351
4539
|
currentClusterText += lineInfo.text[glyph.cl];
|
|
4352
4540
|
}
|
|
4353
|
-
|
|
4354
|
-
|
|
4541
|
+
cursorX += glyph.ax;
|
|
4542
|
+
cursorY += glyph.ay;
|
|
4355
4543
|
if (letterSpacingFU !== 0 && i < glyphInfos.length - 1) {
|
|
4356
|
-
|
|
4544
|
+
cursorX += letterSpacingFU;
|
|
4357
4545
|
}
|
|
4358
4546
|
if (isWhitespace) {
|
|
4359
|
-
|
|
4547
|
+
cursorX += spaceAdjustment;
|
|
4360
4548
|
}
|
|
4361
4549
|
// CJK glue adjustment (must match exactly where LineBreak adds glue)
|
|
4362
4550
|
if (cjkAdjustment !== 0 && i < glyphInfos.length - 1 && !isWhitespace) {
|
|
@@ -4377,7 +4565,7 @@
|
|
|
4377
4565
|
shouldApply = false;
|
|
4378
4566
|
}
|
|
4379
4567
|
if (shouldApply) {
|
|
4380
|
-
|
|
4568
|
+
cursorX += cjkAdjustment;
|
|
4381
4569
|
}
|
|
4382
4570
|
}
|
|
4383
4571
|
}
|
|
@@ -4386,7 +4574,7 @@
|
|
|
4386
4574
|
clusters.push({
|
|
4387
4575
|
text: currentClusterText,
|
|
4388
4576
|
glyphs: currentClusterGlyphs,
|
|
4389
|
-
position:
|
|
4577
|
+
position: new Vec3(clusterStartX, clusterStartY, cursorZ)
|
|
4390
4578
|
});
|
|
4391
4579
|
}
|
|
4392
4580
|
return clusters;
|
|
@@ -5213,9 +5401,8 @@
|
|
|
5213
5401
|
const loadedFont = await Text.resolveFont(options);
|
|
5214
5402
|
const text = new Text();
|
|
5215
5403
|
text.setLoadedFont(loadedFont);
|
|
5216
|
-
//
|
|
5217
|
-
const
|
|
5218
|
-
const result = await text.createGeometry(geometryOptions);
|
|
5404
|
+
// Pass full options so createGeometry honors maxCacheSizeMB etc
|
|
5405
|
+
const result = await text.createGeometry(options);
|
|
5219
5406
|
// Recursive update function
|
|
5220
5407
|
const update = async (newOptions) => {
|
|
5221
5408
|
// Merge options - preserve font from original options if not provided
|
|
@@ -5237,8 +5424,7 @@
|
|
|
5237
5424
|
}
|
|
5238
5425
|
// Update closure options for next time
|
|
5239
5426
|
options = mergedOptions;
|
|
5240
|
-
const
|
|
5241
|
-
const newResult = await text.createGeometry(currentGeometryOptions);
|
|
5427
|
+
const newResult = await text.createGeometry(options);
|
|
5242
5428
|
return {
|
|
5243
5429
|
...newResult,
|
|
5244
5430
|
getLoadedFont: () => text.getLoadedFont(),
|
|
@@ -5663,7 +5849,7 @@
|
|
|
5663
5849
|
if (!this.textLayout) {
|
|
5664
5850
|
this.textLayout = new TextLayout(this.loadedFont);
|
|
5665
5851
|
}
|
|
5666
|
-
const alignmentResult = this.textLayout.
|
|
5852
|
+
const alignmentResult = this.textLayout.computeAlignmentOffset({
|
|
5667
5853
|
width,
|
|
5668
5854
|
align,
|
|
5669
5855
|
planeBounds
|
|
@@ -5672,9 +5858,19 @@
|
|
|
5672
5858
|
planeBounds.min.x = alignmentResult.adjustedBounds.min.x;
|
|
5673
5859
|
planeBounds.max.x = alignmentResult.adjustedBounds.max.x;
|
|
5674
5860
|
const finalScale = size / this.loadedFont.upem;
|
|
5861
|
+
const offsetScaled = offset * finalScale;
|
|
5675
5862
|
// Scale vertices only (normals are unit vectors, don't scale)
|
|
5676
|
-
|
|
5677
|
-
|
|
5863
|
+
if (offsetScaled === 0) {
|
|
5864
|
+
for (let i = 0; i < vertices.length; i++) {
|
|
5865
|
+
vertices[i] *= finalScale;
|
|
5866
|
+
}
|
|
5867
|
+
}
|
|
5868
|
+
else {
|
|
5869
|
+
for (let i = 0; i < vertices.length; i += 3) {
|
|
5870
|
+
vertices[i] = vertices[i] * finalScale + offsetScaled;
|
|
5871
|
+
vertices[i + 1] *= finalScale;
|
|
5872
|
+
vertices[i + 2] *= finalScale;
|
|
5873
|
+
}
|
|
5678
5874
|
}
|
|
5679
5875
|
planeBounds.min.x *= finalScale;
|
|
5680
5876
|
planeBounds.min.y *= finalScale;
|
|
@@ -5684,14 +5880,10 @@
|
|
|
5684
5880
|
planeBounds.max.z *= finalScale;
|
|
5685
5881
|
for (let i = 0; i < glyphInfoArray.length; i++) {
|
|
5686
5882
|
const glyphInfo = glyphInfoArray[i];
|
|
5687
|
-
|
|
5688
|
-
glyphInfo.bounds.min.x += offset;
|
|
5689
|
-
glyphInfo.bounds.max.x += offset;
|
|
5690
|
-
}
|
|
5691
|
-
glyphInfo.bounds.min.x *= finalScale;
|
|
5883
|
+
glyphInfo.bounds.min.x = glyphInfo.bounds.min.x * finalScale + offsetScaled;
|
|
5692
5884
|
glyphInfo.bounds.min.y *= finalScale;
|
|
5693
5885
|
glyphInfo.bounds.min.z *= finalScale;
|
|
5694
|
-
glyphInfo.bounds.max.x
|
|
5886
|
+
glyphInfo.bounds.max.x = glyphInfo.bounds.max.x * finalScale + offsetScaled;
|
|
5695
5887
|
glyphInfo.bounds.max.y *= finalScale;
|
|
5696
5888
|
glyphInfo.bounds.max.z *= finalScale;
|
|
5697
5889
|
}
|