three-text 0.2.16 → 0.2.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -2
- package/dist/index.cjs +486 -278
- package/dist/index.js +486 -278
- package/dist/index.min.cjs +639 -617
- package/dist/index.min.js +630 -608
- package/dist/index.umd.js +486 -278
- package/dist/index.umd.min.js +640 -618
- package/dist/types/core/cache/GlyphGeometryBuilder.d.ts +1 -0
- package/dist/types/core/geometry/Extruder.d.ts +1 -0
- package/dist/types/core/geometry/Tessellator.d.ts +3 -2
- package/dist/types/core/layout/LineBreak.d.ts +2 -1
- package/dist/types/core/layout/TextLayout.d.ts +15 -0
- package/dist/types/core/shaping/TextMeasurer.d.ts +1 -0
- package/package.json +1 -1
package/dist/index.umd.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
/*!
|
|
2
|
-
* three-text v0.2.
|
|
2
|
+
* three-text v0.2.18
|
|
3
3
|
* Copyright (C) 2025 Countertype LLC
|
|
4
4
|
*
|
|
5
5
|
* This program is free software: you can redistribute it and/or modify
|
|
@@ -198,7 +198,7 @@
|
|
|
198
198
|
FitnessClass[FitnessClass["LOOSE"] = 2] = "LOOSE";
|
|
199
199
|
FitnessClass[FitnessClass["VERY_LOOSE"] = 3] = "VERY_LOOSE";
|
|
200
200
|
})(FitnessClass || (FitnessClass = {}));
|
|
201
|
-
// ActiveNodeList maintains all currently viable breakpoints as we scan through the text
|
|
201
|
+
// ActiveNodeList maintains all currently viable breakpoints as we scan through the text
|
|
202
202
|
// Each node represents a potential break with accumulated demerits (total "cost" from start)
|
|
203
203
|
//
|
|
204
204
|
// Demerits = cumulative penalty score from text start to this break, calculated as:
|
|
@@ -340,9 +340,9 @@
|
|
|
340
340
|
// Converts text into items (boxes, glues, penalties) for line breaking
|
|
341
341
|
// The measureText function should return widths that include any letter spacing
|
|
342
342
|
static itemizeText(text, measureText, // function to measure text width
|
|
343
|
-
hyphenate = false, language = 'en-us', availablePatterns, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, context) {
|
|
343
|
+
measureTextWidths, hyphenate = false, language = 'en-us', availablePatterns, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, context) {
|
|
344
344
|
const items = [];
|
|
345
|
-
items.push(...this.itemizeParagraph(text, measureText, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context));
|
|
345
|
+
items.push(...this.itemizeParagraph(text, measureText, measureTextWidths, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context));
|
|
346
346
|
// Final glue and penalty to end the paragraph
|
|
347
347
|
// Use infinite stretch to fill the last line
|
|
348
348
|
items.push({
|
|
@@ -447,9 +447,10 @@
|
|
|
447
447
|
return (this.isCJClosingPunctuation(char) || this.isCJOpeningPunctuation(char));
|
|
448
448
|
}
|
|
449
449
|
// CJK (Chinese/Japanese/Korean) character-level itemization with inter-character glue
|
|
450
|
-
static itemizeCJKText(text, measureText, context, startOffset = 0, glueParams) {
|
|
450
|
+
static itemizeCJKText(text, measureText, measureTextWidths, context, startOffset = 0, glueParams) {
|
|
451
451
|
const items = [];
|
|
452
452
|
const chars = Array.from(text);
|
|
453
|
+
const widths = measureTextWidths ? measureTextWidths(text) : null;
|
|
453
454
|
let textPosition = startOffset;
|
|
454
455
|
// Inter-character glue parameters
|
|
455
456
|
let glueWidth;
|
|
@@ -470,7 +471,7 @@
|
|
|
470
471
|
const char = chars[i];
|
|
471
472
|
const nextChar = i < chars.length - 1 ? chars[i + 1] : null;
|
|
472
473
|
if (/\s/.test(char)) {
|
|
473
|
-
const width = measureText(char);
|
|
474
|
+
const width = widths ? (widths[i] ?? measureText(char)) : measureText(char);
|
|
474
475
|
items.push({
|
|
475
476
|
type: ItemType.GLUE,
|
|
476
477
|
width,
|
|
@@ -484,7 +485,7 @@
|
|
|
484
485
|
}
|
|
485
486
|
items.push({
|
|
486
487
|
type: ItemType.BOX,
|
|
487
|
-
width: measureText(char),
|
|
488
|
+
width: widths ? (widths[i] ?? measureText(char)) : measureText(char),
|
|
488
489
|
text: char,
|
|
489
490
|
originIndex: textPosition
|
|
490
491
|
});
|
|
@@ -515,15 +516,21 @@
|
|
|
515
516
|
}
|
|
516
517
|
return items;
|
|
517
518
|
}
|
|
518
|
-
static itemizeParagraph(text, measureText, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context) {
|
|
519
|
+
static itemizeParagraph(text, measureText, measureTextWidths, hyphenate, language, availablePatterns, lefthyphenmin, righthyphenmin, context) {
|
|
519
520
|
const items = [];
|
|
520
521
|
const chars = Array.from(text);
|
|
521
|
-
// Calculate CJK glue parameters once for consistency across all segments
|
|
522
|
-
|
|
523
|
-
const
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
522
|
+
// Calculate CJK glue parameters lazily and once for consistency across all segments
|
|
523
|
+
let cjkGlueParams;
|
|
524
|
+
const getCjkGlueParams = () => {
|
|
525
|
+
if (!cjkGlueParams) {
|
|
526
|
+
const baseCharWidth = measureText('字');
|
|
527
|
+
cjkGlueParams = {
|
|
528
|
+
width: 0,
|
|
529
|
+
stretch: baseCharWidth * 0.04,
|
|
530
|
+
shrink: baseCharWidth * 0.04
|
|
531
|
+
};
|
|
532
|
+
}
|
|
533
|
+
return cjkGlueParams;
|
|
527
534
|
};
|
|
528
535
|
let buffer = '';
|
|
529
536
|
let bufferStart = 0;
|
|
@@ -533,7 +540,7 @@
|
|
|
533
540
|
if (buffer.length === 0)
|
|
534
541
|
return;
|
|
535
542
|
if (bufferScript === 'cjk') {
|
|
536
|
-
const cjkItems = this.itemizeCJKText(buffer, measureText, context, bufferStart,
|
|
543
|
+
const cjkItems = this.itemizeCJKText(buffer, measureText, measureTextWidths, context, bufferStart, getCjkGlueParams());
|
|
537
544
|
items.push(...cjkItems);
|
|
538
545
|
}
|
|
539
546
|
else {
|
|
@@ -726,7 +733,7 @@
|
|
|
726
733
|
align: options.align || 'left',
|
|
727
734
|
hyphenate: options.hyphenate || false
|
|
728
735
|
});
|
|
729
|
-
const { text, width, align = 'left', direction = 'ltr', hyphenate = false, language = 'en-us', respectExistingBreaks = true, measureText, hyphenationPatterns, unitsPerEm, letterSpacing = 0, tolerance = DEFAULT_TOLERANCE, pretolerance = DEFAULT_PRETOLERANCE, emergencyStretch = DEFAULT_EMERGENCY_STRETCH, autoEmergencyStretch, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, linepenalty = DEFAULT_LINE_PENALTY, adjdemerits = DEFAULT_FITNESS_DIFF_DEMERITS, hyphenpenalty = DEFAULT_HYPHEN_PENALTY, exhyphenpenalty = DEFAULT_EX_HYPHEN_PENALTY, doublehyphendemerits = DEFAULT_DOUBLE_HYPHEN_DEMERITS, looseness = 0, disableShortLineDetection = false, shortLineThreshold = SHORT_LINE_WIDTH_THRESHOLD } = options;
|
|
736
|
+
const { text, width, align = 'left', direction = 'ltr', hyphenate = false, language = 'en-us', respectExistingBreaks = true, measureText, measureTextWidths, hyphenationPatterns, unitsPerEm, letterSpacing = 0, tolerance = DEFAULT_TOLERANCE, pretolerance = DEFAULT_PRETOLERANCE, emergencyStretch = DEFAULT_EMERGENCY_STRETCH, autoEmergencyStretch, lefthyphenmin = DEFAULT_LEFT_HYPHEN_MIN, righthyphenmin = DEFAULT_RIGHT_HYPHEN_MIN, linepenalty = DEFAULT_LINE_PENALTY, adjdemerits = DEFAULT_FITNESS_DIFF_DEMERITS, hyphenpenalty = DEFAULT_HYPHEN_PENALTY, exhyphenpenalty = DEFAULT_EX_HYPHEN_PENALTY, doublehyphendemerits = DEFAULT_DOUBLE_HYPHEN_DEMERITS, looseness = 0, disableShortLineDetection = false, shortLineThreshold = SHORT_LINE_WIDTH_THRESHOLD } = options;
|
|
730
737
|
// Handle multiple paragraphs by processing each independently
|
|
731
738
|
if (respectExistingBreaks && text.includes('\n')) {
|
|
732
739
|
const paragraphs = text.split('\n');
|
|
@@ -789,9 +796,9 @@
|
|
|
789
796
|
exHyphenPenalty: exhyphenpenalty,
|
|
790
797
|
currentAlign: align,
|
|
791
798
|
unitsPerEm,
|
|
792
|
-
// measureText() includes trailing letter spacing after the final glyph of a token
|
|
799
|
+
// measureText() includes trailing letter spacing after the final glyph of a token
|
|
793
800
|
// Shaping applies letter spacing only between glyphs, so we subtract one
|
|
794
|
-
// trailing letterSpacingFU per line segment (see computeAdjustmentRatio/createLines)
|
|
801
|
+
// trailing letterSpacingFU per line segment (see computeAdjustmentRatio/createLines)
|
|
795
802
|
letterSpacingFU: unitsPerEm ? letterSpacing * unitsPerEm : 0
|
|
796
803
|
};
|
|
797
804
|
if (!width || width === Infinity) {
|
|
@@ -810,7 +817,7 @@
|
|
|
810
817
|
];
|
|
811
818
|
}
|
|
812
819
|
// Itemize without hyphenation first (TeX approach: only compute if needed)
|
|
813
|
-
const allItems = LineBreak.itemizeText(text, measureText, false, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
820
|
+
const allItems = LineBreak.itemizeText(text, measureText, measureTextWidths, false, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
814
821
|
if (allItems.length === 0) {
|
|
815
822
|
return [];
|
|
816
823
|
}
|
|
@@ -829,7 +836,7 @@
|
|
|
829
836
|
let breaks = LineBreak.findBreakpoints(currentItems, width, pretolerance, looseness, false, 0, context);
|
|
830
837
|
// Second pass: with hyphenation if first pass failed
|
|
831
838
|
if (breaks.length === 0 && useHyphenation) {
|
|
832
|
-
const itemsWithHyphenation = LineBreak.itemizeText(text, measureText, true, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
839
|
+
const itemsWithHyphenation = LineBreak.itemizeText(text, measureText, measureTextWidths, true, language, hyphenationPatterns, lefthyphenmin, righthyphenmin, context);
|
|
833
840
|
currentItems = itemsWithHyphenation;
|
|
834
841
|
breaks = LineBreak.findBreakpoints(currentItems, width, tolerance, looseness, false, 0, context);
|
|
835
842
|
}
|
|
@@ -1143,9 +1150,9 @@
|
|
|
1143
1150
|
? items[lineEnd].width
|
|
1144
1151
|
: items[lineEnd].preBreakWidth;
|
|
1145
1152
|
}
|
|
1146
|
-
// Correct for trailing letter spacing at the end of the line segment
|
|
1153
|
+
// Correct for trailing letter spacing at the end of the line segment
|
|
1147
1154
|
// Our token measurement includes letter spacing after the final glyph;
|
|
1148
|
-
// shaping does not add letter spacing after the final glyph in a line
|
|
1155
|
+
// shaping does not add letter spacing after the final glyph in a line
|
|
1149
1156
|
if (context?.letterSpacingFU && totalWidth !== 0) {
|
|
1150
1157
|
totalWidth -= context.letterSpacingFU;
|
|
1151
1158
|
}
|
|
@@ -1311,7 +1318,7 @@
|
|
|
1311
1318
|
}
|
|
1312
1319
|
}
|
|
1313
1320
|
const lineText = lineTextParts.join('');
|
|
1314
|
-
// Correct for trailing letter spacing at the end of the line
|
|
1321
|
+
// Correct for trailing letter spacing at the end of the line
|
|
1315
1322
|
if (context?.letterSpacingFU && naturalWidth !== 0) {
|
|
1316
1323
|
naturalWidth -= context.letterSpacingFU;
|
|
1317
1324
|
}
|
|
@@ -1368,7 +1375,7 @@
|
|
|
1368
1375
|
finalNaturalWidth += item.width;
|
|
1369
1376
|
}
|
|
1370
1377
|
const finalLineText = finalLineTextParts.join('');
|
|
1371
|
-
// Correct for trailing letter spacing at the end of the final line
|
|
1378
|
+
// Correct for trailing letter spacing at the end of the final line
|
|
1372
1379
|
if (context?.letterSpacingFU && finalNaturalWidth !== 0) {
|
|
1373
1380
|
finalNaturalWidth -= context.letterSpacingFU;
|
|
1374
1381
|
}
|
|
@@ -1405,12 +1412,21 @@
|
|
|
1405
1412
|
}
|
|
1406
1413
|
}
|
|
1407
1414
|
|
|
1415
|
+
// Memoize conversion per feature-object identity to avoid rebuilding the same
|
|
1416
|
+
// comma-separated string on every HarfBuzz shape call
|
|
1417
|
+
const featureStringCache = new WeakMap();
|
|
1408
1418
|
// Convert feature objects to HarfBuzz comma-separated format
|
|
1409
1419
|
function convertFontFeaturesToString(features) {
|
|
1410
1420
|
if (!features || Object.keys(features).length === 0) {
|
|
1411
1421
|
return undefined;
|
|
1412
1422
|
}
|
|
1423
|
+
const cached = featureStringCache.get(features);
|
|
1424
|
+
if (cached !== undefined) {
|
|
1425
|
+
return cached ?? undefined;
|
|
1426
|
+
}
|
|
1413
1427
|
const featureStrings = [];
|
|
1428
|
+
// Preserve insertion order of the input object
|
|
1429
|
+
// (The public API/tests expect this to be stable and predictable)
|
|
1414
1430
|
for (const [tag, value] of Object.entries(features)) {
|
|
1415
1431
|
if (!/^[a-zA-Z0-9]{4}$/.test(tag)) {
|
|
1416
1432
|
logger.warn(`Invalid OpenType feature tag: "${tag}". Tags must be exactly 4 alphanumeric characters.`);
|
|
@@ -1429,10 +1445,63 @@
|
|
|
1429
1445
|
logger.warn(`Invalid value for feature "${tag}": ${value}. Expected boolean or positive number.`);
|
|
1430
1446
|
}
|
|
1431
1447
|
}
|
|
1432
|
-
|
|
1448
|
+
const result = featureStrings.length > 0 ? featureStrings.join(',') : undefined;
|
|
1449
|
+
featureStringCache.set(features, result ?? null);
|
|
1450
|
+
return result;
|
|
1433
1451
|
}
|
|
1434
1452
|
|
|
1435
1453
|
class TextMeasurer {
|
|
1454
|
+
// Shape once and return per-codepoint widths aligned with Array.from(text)
|
|
1455
|
+
// Groups glyph advances by HarfBuzz cluster (cl)
|
|
1456
|
+
// Includes trailing per-glyph letter spacing like measureTextWidth
|
|
1457
|
+
static measureTextWidths(loadedFont, text, letterSpacing = 0) {
|
|
1458
|
+
const chars = Array.from(text);
|
|
1459
|
+
if (chars.length === 0)
|
|
1460
|
+
return [];
|
|
1461
|
+
// HarfBuzz clusters are UTF-16 code unit indices
|
|
1462
|
+
const startToCharIndex = new Map();
|
|
1463
|
+
let codeUnitIndex = 0;
|
|
1464
|
+
for (let i = 0; i < chars.length; i++) {
|
|
1465
|
+
startToCharIndex.set(codeUnitIndex, i);
|
|
1466
|
+
codeUnitIndex += chars[i].length;
|
|
1467
|
+
}
|
|
1468
|
+
const widths = new Array(chars.length).fill(0);
|
|
1469
|
+
const buffer = loadedFont.hb.createBuffer();
|
|
1470
|
+
try {
|
|
1471
|
+
buffer.addText(text);
|
|
1472
|
+
buffer.guessSegmentProperties();
|
|
1473
|
+
const featuresString = convertFontFeaturesToString(loadedFont.fontFeatures);
|
|
1474
|
+
loadedFont.hb.shape(loadedFont.font, buffer, featuresString);
|
|
1475
|
+
const glyphInfos = buffer.json(loadedFont.font);
|
|
1476
|
+
const letterSpacingInFontUnits = letterSpacing * loadedFont.upem;
|
|
1477
|
+
for (let i = 0; i < glyphInfos.length; i++) {
|
|
1478
|
+
const glyph = glyphInfos[i];
|
|
1479
|
+
const cl = glyph.cl ?? 0;
|
|
1480
|
+
let charIndex = startToCharIndex.get(cl);
|
|
1481
|
+
// Fallback if cl lands mid-codepoint
|
|
1482
|
+
if (charIndex === undefined) {
|
|
1483
|
+
// Find the closest start <= cl
|
|
1484
|
+
for (let back = cl; back >= 0; back--) {
|
|
1485
|
+
const candidate = startToCharIndex.get(back);
|
|
1486
|
+
if (candidate !== undefined) {
|
|
1487
|
+
charIndex = candidate;
|
|
1488
|
+
break;
|
|
1489
|
+
}
|
|
1490
|
+
}
|
|
1491
|
+
}
|
|
1492
|
+
if (charIndex === undefined)
|
|
1493
|
+
continue;
|
|
1494
|
+
widths[charIndex] += glyph.ax;
|
|
1495
|
+
if (letterSpacingInFontUnits !== 0) {
|
|
1496
|
+
widths[charIndex] += letterSpacingInFontUnits;
|
|
1497
|
+
}
|
|
1498
|
+
}
|
|
1499
|
+
return widths;
|
|
1500
|
+
}
|
|
1501
|
+
finally {
|
|
1502
|
+
buffer.destroy();
|
|
1503
|
+
}
|
|
1504
|
+
}
|
|
1436
1505
|
static measureTextWidth(loadedFont, text, letterSpacing = 0) {
|
|
1437
1506
|
const buffer = loadedFont.hb.createBuffer();
|
|
1438
1507
|
buffer.addText(text);
|
|
@@ -1489,7 +1558,8 @@
|
|
|
1489
1558
|
unitsPerEm: this.loadedFont.upem,
|
|
1490
1559
|
letterSpacing,
|
|
1491
1560
|
measureText: (textToMeasure) => TextMeasurer.measureTextWidth(this.loadedFont, textToMeasure, letterSpacing // Letter spacing included in width measurements
|
|
1492
|
-
)
|
|
1561
|
+
),
|
|
1562
|
+
measureTextWidths: (textToMeasure) => TextMeasurer.measureTextWidths(this.loadedFont, textToMeasure, letterSpacing)
|
|
1493
1563
|
});
|
|
1494
1564
|
}
|
|
1495
1565
|
else {
|
|
@@ -1511,6 +1581,15 @@
|
|
|
1511
1581
|
return { lines };
|
|
1512
1582
|
}
|
|
1513
1583
|
applyAlignment(vertices, options) {
|
|
1584
|
+
const { offset, adjustedBounds } = this.computeAlignmentOffset(options);
|
|
1585
|
+
if (offset !== 0) {
|
|
1586
|
+
for (let i = 0; i < vertices.length; i += 3) {
|
|
1587
|
+
vertices[i] += offset;
|
|
1588
|
+
}
|
|
1589
|
+
}
|
|
1590
|
+
return { offset, adjustedBounds };
|
|
1591
|
+
}
|
|
1592
|
+
computeAlignmentOffset(options) {
|
|
1514
1593
|
const { width, align, planeBounds } = options;
|
|
1515
1594
|
let offset = 0;
|
|
1516
1595
|
const adjustedBounds = {
|
|
@@ -1522,17 +1601,13 @@
|
|
|
1522
1601
|
if (align === 'center') {
|
|
1523
1602
|
offset = (width - lineWidth) / 2 - planeBounds.min.x;
|
|
1524
1603
|
}
|
|
1525
|
-
else
|
|
1604
|
+
else {
|
|
1526
1605
|
offset = width - planeBounds.max.x;
|
|
1527
1606
|
}
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
}
|
|
1533
|
-
adjustedBounds.min.x += offset;
|
|
1534
|
-
adjustedBounds.max.x += offset;
|
|
1535
|
-
}
|
|
1607
|
+
}
|
|
1608
|
+
if (offset !== 0) {
|
|
1609
|
+
adjustedBounds.min.x += offset;
|
|
1610
|
+
adjustedBounds.max.x += offset;
|
|
1536
1611
|
}
|
|
1537
1612
|
return { offset, adjustedBounds };
|
|
1538
1613
|
}
|
|
@@ -2629,7 +2704,7 @@
|
|
|
2629
2704
|
var libtess_minExports = libtess_min.exports;
|
|
2630
2705
|
|
|
2631
2706
|
class Tessellator {
|
|
2632
|
-
process(paths, removeOverlaps = true, isCFF = false) {
|
|
2707
|
+
process(paths, removeOverlaps = true, isCFF = false, needsExtrusionContours = true) {
|
|
2633
2708
|
if (paths.length === 0) {
|
|
2634
2709
|
return { triangles: { vertices: [], indices: [] }, contours: [] };
|
|
2635
2710
|
}
|
|
@@ -2638,66 +2713,108 @@
|
|
|
2638
2713
|
return { triangles: { vertices: [], indices: [] }, contours: [] };
|
|
2639
2714
|
}
|
|
2640
2715
|
logger.log(`Tessellator: removeOverlaps=${removeOverlaps}, processing ${valid.length} paths`);
|
|
2641
|
-
return this.tessellate(valid, removeOverlaps, isCFF);
|
|
2642
|
-
}
|
|
2643
|
-
tessellate(paths, removeOverlaps, isCFF) {
|
|
2644
|
-
//
|
|
2645
|
-
const
|
|
2646
|
-
|
|
2647
|
-
|
|
2648
|
-
|
|
2716
|
+
return this.tessellate(valid, removeOverlaps, isCFF, needsExtrusionContours);
|
|
2717
|
+
}
|
|
2718
|
+
tessellate(paths, removeOverlaps, isCFF, needsExtrusionContours) {
|
|
2719
|
+
// libtess expects CCW winding; TTF outer contours are CW
|
|
2720
|
+
const needsWindingReversal = !isCFF && !removeOverlaps;
|
|
2721
|
+
let originalContours;
|
|
2722
|
+
let tessContours;
|
|
2723
|
+
if (needsWindingReversal) {
|
|
2724
|
+
tessContours = this.pathsToContours(paths, true);
|
|
2725
|
+
if (removeOverlaps || needsExtrusionContours) {
|
|
2726
|
+
originalContours = this.pathsToContours(paths);
|
|
2727
|
+
}
|
|
2728
|
+
}
|
|
2729
|
+
else {
|
|
2730
|
+
originalContours = this.pathsToContours(paths);
|
|
2731
|
+
tessContours = originalContours;
|
|
2732
|
+
}
|
|
2733
|
+
let extrusionContours = needsExtrusionContours
|
|
2734
|
+
? needsWindingReversal
|
|
2735
|
+
? tessContours
|
|
2736
|
+
: originalContours ?? this.pathsToContours(paths)
|
|
2737
|
+
: [];
|
|
2649
2738
|
if (removeOverlaps) {
|
|
2650
2739
|
logger.log('Two-pass: boundary extraction then triangulation');
|
|
2651
|
-
// Extract boundaries to remove overlaps
|
|
2652
2740
|
perfLogger.start('Tessellator.boundaryPass', {
|
|
2653
|
-
contourCount:
|
|
2741
|
+
contourCount: tessContours.length
|
|
2654
2742
|
});
|
|
2655
|
-
const boundaryResult = this.performTessellation(
|
|
2743
|
+
const boundaryResult = this.performTessellation(originalContours, 'boundary');
|
|
2656
2744
|
perfLogger.end('Tessellator.boundaryPass');
|
|
2657
2745
|
if (!boundaryResult) {
|
|
2658
2746
|
logger.warn('libtess returned empty result from boundary pass');
|
|
2659
2747
|
return { triangles: { vertices: [], indices: [] }, contours: [] };
|
|
2660
2748
|
}
|
|
2661
|
-
//
|
|
2662
|
-
|
|
2663
|
-
|
|
2749
|
+
// Boundary pass normalizes winding (outer CCW, holes CW)
|
|
2750
|
+
tessContours = this.boundaryToContours(boundaryResult);
|
|
2751
|
+
if (needsExtrusionContours) {
|
|
2752
|
+
extrusionContours = tessContours;
|
|
2753
|
+
}
|
|
2754
|
+
logger.log(`Boundary pass created ${tessContours.length} contours. Starting triangulation pass.`);
|
|
2664
2755
|
}
|
|
2665
2756
|
else {
|
|
2666
2757
|
logger.log(`Single-pass triangulation for ${isCFF ? 'CFF' : 'TTF'}`);
|
|
2667
2758
|
}
|
|
2668
|
-
// Triangulate the contours
|
|
2669
2759
|
perfLogger.start('Tessellator.triangulationPass', {
|
|
2670
|
-
contourCount:
|
|
2760
|
+
contourCount: tessContours.length
|
|
2671
2761
|
});
|
|
2672
|
-
const triangleResult = this.performTessellation(
|
|
2762
|
+
const triangleResult = this.performTessellation(tessContours, 'triangles');
|
|
2673
2763
|
perfLogger.end('Tessellator.triangulationPass');
|
|
2674
2764
|
if (!triangleResult) {
|
|
2675
2765
|
const warning = removeOverlaps
|
|
2676
2766
|
? 'libtess returned empty result from triangulation pass'
|
|
2677
2767
|
: 'libtess returned empty result from single-pass triangulation';
|
|
2678
2768
|
logger.warn(warning);
|
|
2679
|
-
return { triangles: { vertices: [], indices: [] }, contours };
|
|
2769
|
+
return { triangles: { vertices: [], indices: [] }, contours: extrusionContours };
|
|
2680
2770
|
}
|
|
2681
2771
|
return {
|
|
2682
2772
|
triangles: {
|
|
2683
2773
|
vertices: triangleResult.vertices,
|
|
2684
2774
|
indices: triangleResult.indices || []
|
|
2685
2775
|
},
|
|
2686
|
-
contours
|
|
2776
|
+
contours: extrusionContours
|
|
2687
2777
|
};
|
|
2688
2778
|
}
|
|
2689
|
-
pathsToContours(paths) {
|
|
2690
|
-
|
|
2691
|
-
|
|
2692
|
-
|
|
2693
|
-
|
|
2779
|
+
pathsToContours(paths, reversePoints = false) {
|
|
2780
|
+
const contours = new Array(paths.length);
|
|
2781
|
+
for (let p = 0; p < paths.length; p++) {
|
|
2782
|
+
const points = paths[p].points;
|
|
2783
|
+
const pointCount = points.length;
|
|
2784
|
+
// Clipper-style paths can be explicitly closed by repeating the first point at the end
|
|
2785
|
+
// Normalize to a single closing vertex for stable side wall generation
|
|
2786
|
+
const isClosed = pointCount > 1 &&
|
|
2787
|
+
points[0].x === points[pointCount - 1].x &&
|
|
2788
|
+
points[0].y === points[pointCount - 1].y;
|
|
2789
|
+
const end = isClosed ? pointCount - 1 : pointCount;
|
|
2790
|
+
// +1 to append a closing vertex
|
|
2791
|
+
const contour = new Array((end + 1) * 2);
|
|
2792
|
+
let i = 0;
|
|
2793
|
+
if (reversePoints) {
|
|
2794
|
+
for (let k = end - 1; k >= 0; k--) {
|
|
2795
|
+
const pt = points[k];
|
|
2796
|
+
contour[i++] = pt.x;
|
|
2797
|
+
contour[i++] = pt.y;
|
|
2798
|
+
}
|
|
2694
2799
|
}
|
|
2695
|
-
|
|
2696
|
-
|
|
2800
|
+
else {
|
|
2801
|
+
for (let k = 0; k < end; k++) {
|
|
2802
|
+
const pt = points[k];
|
|
2803
|
+
contour[i++] = pt.x;
|
|
2804
|
+
contour[i++] = pt.y;
|
|
2805
|
+
}
|
|
2806
|
+
}
|
|
2807
|
+
// Some glyphs omit closePath, leaving gaps in extruded side walls
|
|
2808
|
+
if (i >= 2) {
|
|
2809
|
+
contour[i++] = contour[0];
|
|
2810
|
+
contour[i++] = contour[1];
|
|
2811
|
+
}
|
|
2812
|
+
contours[p] = contour;
|
|
2813
|
+
}
|
|
2814
|
+
return contours;
|
|
2697
2815
|
}
|
|
2698
2816
|
performTessellation(contours, mode) {
|
|
2699
2817
|
const tess = new libtess_minExports.GluTesselator();
|
|
2700
|
-
// Set winding rule to NON-ZERO
|
|
2701
2818
|
tess.gluTessProperty(libtess_minExports.gluEnum.GLU_TESS_WINDING_RULE, libtess_minExports.windingRule.GLU_TESS_WINDING_NONZERO);
|
|
2702
2819
|
const vertices = [];
|
|
2703
2820
|
const indices = [];
|
|
@@ -2720,7 +2837,7 @@
|
|
|
2720
2837
|
});
|
|
2721
2838
|
tess.gluTessCallback(libtess_minExports.gluEnum.GLU_TESS_END, () => {
|
|
2722
2839
|
if (currentContour.length > 0) {
|
|
2723
|
-
contourIndices.push(
|
|
2840
|
+
contourIndices.push(currentContour);
|
|
2724
2841
|
}
|
|
2725
2842
|
});
|
|
2726
2843
|
}
|
|
@@ -2765,7 +2882,6 @@
|
|
|
2765
2882
|
const vertIdx = idx * 2;
|
|
2766
2883
|
contour.push(boundaryResult.vertices[vertIdx], boundaryResult.vertices[vertIdx + 1]);
|
|
2767
2884
|
}
|
|
2768
|
-
// Ensure contour is closed for side wall generation
|
|
2769
2885
|
if (contour.length > 2) {
|
|
2770
2886
|
if (contour[0] !== contour[contour.length - 2] ||
|
|
2771
2887
|
contour[1] !== contour[contour.length - 1]) {
|
|
@@ -2776,38 +2892,102 @@
|
|
|
2776
2892
|
}
|
|
2777
2893
|
return contours;
|
|
2778
2894
|
}
|
|
2779
|
-
|
|
2780
|
-
|
|
2781
|
-
|
|
2782
|
-
|
|
2783
|
-
|
|
2895
|
+
// Check if contours need winding normalization via boundary pass
|
|
2896
|
+
// Returns false if topology is simple enough to skip the expensive pass
|
|
2897
|
+
needsWindingNormalization(contours) {
|
|
2898
|
+
if (contours.length === 0)
|
|
2899
|
+
return false;
|
|
2900
|
+
// Heuristic 1: Single contour never needs normalization
|
|
2901
|
+
if (contours.length === 1)
|
|
2902
|
+
return false;
|
|
2903
|
+
// Heuristic 2: All same winding = all outers, no holes
|
|
2904
|
+
// Compute signed areas
|
|
2905
|
+
let firstSign = null;
|
|
2906
|
+
for (const contour of contours) {
|
|
2907
|
+
const area = this.signedArea(contour);
|
|
2908
|
+
const sign = area >= 0 ? 1 : -1;
|
|
2909
|
+
if (firstSign === null) {
|
|
2910
|
+
firstSign = sign;
|
|
2911
|
+
}
|
|
2912
|
+
else if (sign !== firstSign) {
|
|
2913
|
+
// Mixed winding detected → might have holes or complex topology
|
|
2914
|
+
return true;
|
|
2915
|
+
}
|
|
2916
|
+
}
|
|
2917
|
+
// All same winding → simple topology, no normalization needed
|
|
2918
|
+
return false;
|
|
2919
|
+
}
|
|
2920
|
+
// Compute signed area (CCW = positive, CW = negative)
|
|
2921
|
+
signedArea(contour) {
|
|
2922
|
+
let area = 0;
|
|
2923
|
+
const len = contour.length;
|
|
2924
|
+
if (len < 6)
|
|
2925
|
+
return 0; // Need at least 3 points
|
|
2926
|
+
for (let i = 0; i < len; i += 2) {
|
|
2927
|
+
const x1 = contour[i];
|
|
2928
|
+
const y1 = contour[i + 1];
|
|
2929
|
+
const x2 = contour[(i + 2) % len];
|
|
2930
|
+
const y2 = contour[(i + 3) % len];
|
|
2931
|
+
area += x1 * y2 - x2 * y1;
|
|
2932
|
+
}
|
|
2933
|
+
return area / 2;
|
|
2784
2934
|
}
|
|
2785
2935
|
}
|
|
2786
2936
|
|
|
2787
2937
|
class Extruder {
|
|
2788
2938
|
constructor() { }
|
|
2939
|
+
packEdge(a, b) {
|
|
2940
|
+
const lo = a < b ? a : b;
|
|
2941
|
+
const hi = a < b ? b : a;
|
|
2942
|
+
return lo * 0x100000000 + hi;
|
|
2943
|
+
}
|
|
2789
2944
|
extrude(geometry, depth = 0, unitsPerEm) {
|
|
2790
2945
|
const points = geometry.triangles.vertices;
|
|
2791
2946
|
const triangleIndices = geometry.triangles.indices;
|
|
2792
2947
|
const numPoints = points.length / 2;
|
|
2793
|
-
// Count side
|
|
2794
|
-
let
|
|
2948
|
+
// Count boundary edges for side walls (4 vertices + 6 indices per edge)
|
|
2949
|
+
let boundaryEdges = [];
|
|
2795
2950
|
if (depth !== 0) {
|
|
2796
|
-
|
|
2797
|
-
|
|
2798
|
-
|
|
2799
|
-
|
|
2800
|
-
|
|
2951
|
+
const counts = new Map();
|
|
2952
|
+
const oriented = new Map();
|
|
2953
|
+
for (let i = 0; i < triangleIndices.length; i += 3) {
|
|
2954
|
+
const a = triangleIndices[i];
|
|
2955
|
+
const b = triangleIndices[i + 1];
|
|
2956
|
+
const c = triangleIndices[i + 2];
|
|
2957
|
+
const k0 = this.packEdge(a, b);
|
|
2958
|
+
const n0 = (counts.get(k0) ?? 0) + 1;
|
|
2959
|
+
counts.set(k0, n0);
|
|
2960
|
+
if (n0 === 1)
|
|
2961
|
+
oriented.set(k0, [a, b]);
|
|
2962
|
+
const k1 = this.packEdge(b, c);
|
|
2963
|
+
const n1 = (counts.get(k1) ?? 0) + 1;
|
|
2964
|
+
counts.set(k1, n1);
|
|
2965
|
+
if (n1 === 1)
|
|
2966
|
+
oriented.set(k1, [b, c]);
|
|
2967
|
+
const k2 = this.packEdge(c, a);
|
|
2968
|
+
const n2 = (counts.get(k2) ?? 0) + 1;
|
|
2969
|
+
counts.set(k2, n2);
|
|
2970
|
+
if (n2 === 1)
|
|
2971
|
+
oriented.set(k2, [c, a]);
|
|
2972
|
+
}
|
|
2973
|
+
boundaryEdges = [];
|
|
2974
|
+
for (const [key, count] of counts) {
|
|
2975
|
+
if (count !== 1)
|
|
2976
|
+
continue;
|
|
2977
|
+
const edge = oriented.get(key);
|
|
2978
|
+
if (edge)
|
|
2979
|
+
boundaryEdges.push(edge);
|
|
2801
2980
|
}
|
|
2802
2981
|
}
|
|
2803
|
-
const
|
|
2982
|
+
const sideEdgeCount = depth === 0 ? 0 : boundaryEdges.length;
|
|
2983
|
+
const sideVertexCount = depth === 0 ? 0 : sideEdgeCount * 4;
|
|
2804
2984
|
const baseVertexCount = depth === 0 ? numPoints : numPoints * 2;
|
|
2805
2985
|
const vertexCount = baseVertexCount + sideVertexCount;
|
|
2806
2986
|
const vertices = new Float32Array(vertexCount * 3);
|
|
2807
2987
|
const normals = new Float32Array(vertexCount * 3);
|
|
2808
2988
|
const indexCount = depth === 0
|
|
2809
2989
|
? triangleIndices.length
|
|
2810
|
-
: triangleIndices.length * 2 +
|
|
2990
|
+
: triangleIndices.length * 2 + sideEdgeCount * 6;
|
|
2811
2991
|
const indices = new Uint32Array(indexCount);
|
|
2812
2992
|
if (depth === 0) {
|
|
2813
2993
|
// Single-sided flat geometry at z=0
|
|
@@ -2830,25 +3010,26 @@
|
|
|
2830
3010
|
// Extruded geometry: front at z=0, back at z=depth
|
|
2831
3011
|
const minBackOffset = unitsPerEm * 0.000025;
|
|
2832
3012
|
const backZ = depth <= minBackOffset ? minBackOffset : depth;
|
|
2833
|
-
//
|
|
3013
|
+
// Generate both caps in one pass
|
|
2834
3014
|
for (let p = 0, vi = 0; p < points.length; p += 2, vi++) {
|
|
2835
|
-
const
|
|
2836
|
-
|
|
2837
|
-
|
|
2838
|
-
|
|
2839
|
-
|
|
2840
|
-
|
|
2841
|
-
|
|
2842
|
-
|
|
2843
|
-
|
|
2844
|
-
|
|
2845
|
-
|
|
2846
|
-
|
|
2847
|
-
vertices[
|
|
2848
|
-
vertices[
|
|
2849
|
-
|
|
2850
|
-
normals[
|
|
2851
|
-
normals[
|
|
3015
|
+
const x = points[p];
|
|
3016
|
+
const y = points[p + 1];
|
|
3017
|
+
// Cap at z=0
|
|
3018
|
+
const base0 = vi * 3;
|
|
3019
|
+
vertices[base0] = x;
|
|
3020
|
+
vertices[base0 + 1] = y;
|
|
3021
|
+
vertices[base0 + 2] = 0;
|
|
3022
|
+
normals[base0] = 0;
|
|
3023
|
+
normals[base0 + 1] = 0;
|
|
3024
|
+
normals[base0 + 2] = -1;
|
|
3025
|
+
// Cap at z=depth
|
|
3026
|
+
const baseD = (numPoints + vi) * 3;
|
|
3027
|
+
vertices[baseD] = x;
|
|
3028
|
+
vertices[baseD + 1] = y;
|
|
3029
|
+
vertices[baseD + 2] = backZ;
|
|
3030
|
+
normals[baseD] = 0;
|
|
3031
|
+
normals[baseD + 1] = 0;
|
|
3032
|
+
normals[baseD + 2] = 1;
|
|
2852
3033
|
}
|
|
2853
3034
|
// libtess outputs CCW triangles (viewed from +Z)
|
|
2854
3035
|
// Z=0 cap faces -Z, reverse winding
|
|
@@ -2862,60 +3043,62 @@
|
|
|
2862
3043
|
// Side walls
|
|
2863
3044
|
let nextVertex = numPoints * 2;
|
|
2864
3045
|
let idxPos = triangleIndices.length * 2;
|
|
2865
|
-
for (
|
|
2866
|
-
|
|
2867
|
-
|
|
2868
|
-
|
|
2869
|
-
|
|
2870
|
-
|
|
2871
|
-
|
|
2872
|
-
|
|
2873
|
-
|
|
2874
|
-
|
|
2875
|
-
|
|
2876
|
-
|
|
2877
|
-
|
|
2878
|
-
|
|
2879
|
-
|
|
2880
|
-
|
|
2881
|
-
|
|
2882
|
-
|
|
2883
|
-
|
|
2884
|
-
|
|
2885
|
-
|
|
2886
|
-
|
|
2887
|
-
|
|
2888
|
-
|
|
2889
|
-
|
|
2890
|
-
|
|
2891
|
-
|
|
2892
|
-
|
|
2893
|
-
|
|
2894
|
-
|
|
2895
|
-
|
|
2896
|
-
|
|
2897
|
-
|
|
2898
|
-
|
|
2899
|
-
|
|
2900
|
-
|
|
2901
|
-
|
|
2902
|
-
|
|
2903
|
-
|
|
2904
|
-
|
|
2905
|
-
|
|
2906
|
-
|
|
2907
|
-
|
|
2908
|
-
|
|
2909
|
-
|
|
2910
|
-
|
|
2911
|
-
|
|
2912
|
-
|
|
2913
|
-
|
|
2914
|
-
|
|
2915
|
-
|
|
2916
|
-
|
|
2917
|
-
|
|
2918
|
-
|
|
3046
|
+
for (let e = 0; e < boundaryEdges.length; e++) {
|
|
3047
|
+
const [u, v] = boundaryEdges[e];
|
|
3048
|
+
const u2 = u * 2;
|
|
3049
|
+
const v2 = v * 2;
|
|
3050
|
+
const p0x = points[u2];
|
|
3051
|
+
const p0y = points[u2 + 1];
|
|
3052
|
+
const p1x = points[v2];
|
|
3053
|
+
const p1y = points[v2 + 1];
|
|
3054
|
+
// Perpendicular normal for this wall segment
|
|
3055
|
+
// Uses the edge direction from the cap triangulation so winding does not depend on contour direction
|
|
3056
|
+
const ex = p1x - p0x;
|
|
3057
|
+
const ey = p1y - p0y;
|
|
3058
|
+
const lenSq = ex * ex + ey * ey;
|
|
3059
|
+
let nx = 0;
|
|
3060
|
+
let ny = 0;
|
|
3061
|
+
if (lenSq > 0) {
|
|
3062
|
+
const invLen = 1 / Math.sqrt(lenSq);
|
|
3063
|
+
nx = ey * invLen;
|
|
3064
|
+
ny = -ex * invLen;
|
|
3065
|
+
}
|
|
3066
|
+
const baseVertex = nextVertex;
|
|
3067
|
+
const base = baseVertex * 3;
|
|
3068
|
+
// Wall quad: front edge at z=0, back edge at z=depth
|
|
3069
|
+
vertices[base] = p0x;
|
|
3070
|
+
vertices[base + 1] = p0y;
|
|
3071
|
+
vertices[base + 2] = 0;
|
|
3072
|
+
vertices[base + 3] = p1x;
|
|
3073
|
+
vertices[base + 4] = p1y;
|
|
3074
|
+
vertices[base + 5] = 0;
|
|
3075
|
+
vertices[base + 6] = p0x;
|
|
3076
|
+
vertices[base + 7] = p0y;
|
|
3077
|
+
vertices[base + 8] = backZ;
|
|
3078
|
+
vertices[base + 9] = p1x;
|
|
3079
|
+
vertices[base + 10] = p1y;
|
|
3080
|
+
vertices[base + 11] = backZ;
|
|
3081
|
+
// Wall normals point perpendicular to edge
|
|
3082
|
+
normals[base] = nx;
|
|
3083
|
+
normals[base + 1] = ny;
|
|
3084
|
+
normals[base + 2] = 0;
|
|
3085
|
+
normals[base + 3] = nx;
|
|
3086
|
+
normals[base + 4] = ny;
|
|
3087
|
+
normals[base + 5] = 0;
|
|
3088
|
+
normals[base + 6] = nx;
|
|
3089
|
+
normals[base + 7] = ny;
|
|
3090
|
+
normals[base + 8] = 0;
|
|
3091
|
+
normals[base + 9] = nx;
|
|
3092
|
+
normals[base + 10] = ny;
|
|
3093
|
+
normals[base + 11] = 0;
|
|
3094
|
+
// Two triangles per wall segment
|
|
3095
|
+
indices[idxPos++] = baseVertex;
|
|
3096
|
+
indices[idxPos++] = baseVertex + 1;
|
|
3097
|
+
indices[idxPos++] = baseVertex + 2;
|
|
3098
|
+
indices[idxPos++] = baseVertex + 1;
|
|
3099
|
+
indices[idxPos++] = baseVertex + 3;
|
|
3100
|
+
indices[idxPos++] = baseVertex + 2;
|
|
3101
|
+
nextVertex += 4;
|
|
2919
3102
|
}
|
|
2920
3103
|
return { vertices, normals, indices };
|
|
2921
3104
|
}
|
|
@@ -3142,21 +3325,23 @@
|
|
|
3142
3325
|
return path;
|
|
3143
3326
|
}
|
|
3144
3327
|
this.stats.originalPointCount += path.points.length;
|
|
3145
|
-
|
|
3328
|
+
// Most paths are already immutable after collection; avoid copying large point arrays
|
|
3329
|
+
// The optimizers below never mutate the input `points` array
|
|
3330
|
+
const points = path.points;
|
|
3146
3331
|
if (points.length < 5) {
|
|
3147
3332
|
return path;
|
|
3148
3333
|
}
|
|
3149
|
-
|
|
3150
|
-
if (
|
|
3334
|
+
let optimized = this.simplifyPathVW(points, this.config.areaThreshold);
|
|
3335
|
+
if (optimized.length < 3) {
|
|
3151
3336
|
return path;
|
|
3152
3337
|
}
|
|
3153
|
-
|
|
3154
|
-
if (
|
|
3338
|
+
optimized = this.removeColinearPoints(optimized, this.config.colinearThreshold);
|
|
3339
|
+
if (optimized.length < 3) {
|
|
3155
3340
|
return path;
|
|
3156
3341
|
}
|
|
3157
3342
|
return {
|
|
3158
3343
|
...path,
|
|
3159
|
-
points
|
|
3344
|
+
points: optimized
|
|
3160
3345
|
};
|
|
3161
3346
|
}
|
|
3162
3347
|
// Visvalingam-Whyatt algorithm
|
|
@@ -3610,7 +3795,7 @@
|
|
|
3610
3795
|
if (this.currentGlyphPaths.length > 0) {
|
|
3611
3796
|
this.collectedGlyphs.push({
|
|
3612
3797
|
glyphId: this.currentGlyphId,
|
|
3613
|
-
paths:
|
|
3798
|
+
paths: this.currentGlyphPaths,
|
|
3614
3799
|
bounds: {
|
|
3615
3800
|
min: {
|
|
3616
3801
|
x: this.currentGlyphBounds.min.x,
|
|
@@ -3662,11 +3847,10 @@
|
|
|
3662
3847
|
return;
|
|
3663
3848
|
}
|
|
3664
3849
|
const flattenedPoints = this.polygonizer.polygonizeQuadratic(start, control, end);
|
|
3665
|
-
for (const point of flattenedPoints) {
|
|
3666
|
-
this.updateBounds(point);
|
|
3667
|
-
}
|
|
3668
3850
|
for (let i = 0; i < flattenedPoints.length; i++) {
|
|
3669
|
-
|
|
3851
|
+
const pt = flattenedPoints[i];
|
|
3852
|
+
this.updateBounds(pt);
|
|
3853
|
+
this.currentPath.points.push(pt);
|
|
3670
3854
|
}
|
|
3671
3855
|
this.currentPoint = end;
|
|
3672
3856
|
}
|
|
@@ -3686,11 +3870,10 @@
|
|
|
3686
3870
|
return;
|
|
3687
3871
|
}
|
|
3688
3872
|
const flattenedPoints = this.polygonizer.polygonizeCubic(start, control1, control2, end);
|
|
3689
|
-
for (const point of flattenedPoints) {
|
|
3690
|
-
this.updateBounds(point);
|
|
3691
|
-
}
|
|
3692
3873
|
for (let i = 0; i < flattenedPoints.length; i++) {
|
|
3693
|
-
|
|
3874
|
+
const pt = flattenedPoints[i];
|
|
3875
|
+
this.updateBounds(pt);
|
|
3876
|
+
this.currentPath.points.push(pt);
|
|
3694
3877
|
}
|
|
3695
3878
|
this.currentPoint = end;
|
|
3696
3879
|
}
|
|
@@ -3880,6 +4063,7 @@
|
|
|
3880
4063
|
constructor(cache, loadedFont) {
|
|
3881
4064
|
this.fontId = 'default';
|
|
3882
4065
|
this.cacheKeyPrefix = 'default';
|
|
4066
|
+
this.emptyGlyphs = new Set();
|
|
3883
4067
|
this.cache = cache;
|
|
3884
4068
|
this.loadedFont = loadedFont;
|
|
3885
4069
|
this.tessellator = new Tessellator();
|
|
@@ -3933,63 +4117,34 @@
|
|
|
3933
4117
|
}
|
|
3934
4118
|
// Build instanced geometry from glyph contours
|
|
3935
4119
|
buildInstancedGeometry(clustersByLine, depth, removeOverlaps, isCFF, separateGlyphs = false, coloredTextIndices) {
|
|
3936
|
-
|
|
3937
|
-
|
|
3938
|
-
|
|
3939
|
-
|
|
3940
|
-
|
|
3941
|
-
|
|
3942
|
-
|
|
3943
|
-
|
|
3944
|
-
|
|
3945
|
-
|
|
3946
|
-
|
|
3947
|
-
|
|
3948
|
-
|
|
3949
|
-
|
|
3950
|
-
|
|
3951
|
-
|
|
3952
|
-
|
|
3953
|
-
|
|
3954
|
-
|
|
3955
|
-
|
|
3956
|
-
|
|
3957
|
-
|
|
3958
|
-
|
|
3959
|
-
|
|
3960
|
-
|
|
3961
|
-
|
|
3962
|
-
|
|
3963
|
-
|
|
3964
|
-
nextSize *= 2;
|
|
3965
|
-
const next = new Uint32Array(nextSize);
|
|
3966
|
-
next.set(buffer);
|
|
3967
|
-
return next;
|
|
3968
|
-
};
|
|
3969
|
-
const appendGeometryToBuffers = (data, position, vertexOffset) => {
|
|
3970
|
-
const v = data.vertices;
|
|
3971
|
-
const n = data.normals;
|
|
3972
|
-
const idx = data.indices;
|
|
3973
|
-
// Grow buffers as needed
|
|
3974
|
-
vertexBuffer = ensureFloatCapacity(vertexBuffer, vertexPos + v.length);
|
|
3975
|
-
normalBuffer = ensureFloatCapacity(normalBuffer, normalPos + n.length);
|
|
3976
|
-
indexBuffer = ensureIndexCapacity(indexBuffer, indexPos + idx.length);
|
|
3977
|
-
// Vertices: translate by position
|
|
3978
|
-
const px = position.x;
|
|
3979
|
-
const py = position.y;
|
|
3980
|
-
const pz = position.z;
|
|
3981
|
-
for (let j = 0; j < v.length; j += 3) {
|
|
3982
|
-
vertexBuffer[vertexPos++] = v[j] + px;
|
|
3983
|
-
vertexBuffer[vertexPos++] = v[j + 1] + py;
|
|
3984
|
-
vertexBuffer[vertexPos++] = v[j + 2] + pz;
|
|
3985
|
-
}
|
|
3986
|
-
// Normals: straight copy
|
|
3987
|
-
normalBuffer.set(n, normalPos);
|
|
3988
|
-
normalPos += n.length;
|
|
3989
|
-
// Indices: copy with vertex offset
|
|
3990
|
-
for (let j = 0; j < idx.length; j++) {
|
|
3991
|
-
indexBuffer[indexPos++] = idx[j] + vertexOffset;
|
|
3992
|
-
}
|
|
4120
|
+
if (isLogEnabled) {
|
|
4121
|
+
let wordCount = 0;
|
|
4122
|
+
for (let i = 0; i < clustersByLine.length; i++) {
|
|
4123
|
+
wordCount += clustersByLine[i].length;
|
|
4124
|
+
}
|
|
4125
|
+
perfLogger.start('GlyphGeometryBuilder.buildInstancedGeometry', {
|
|
4126
|
+
lineCount: clustersByLine.length,
|
|
4127
|
+
wordCount,
|
|
4128
|
+
depth,
|
|
4129
|
+
removeOverlaps
|
|
4130
|
+
});
|
|
4131
|
+
}
|
|
4132
|
+
else {
|
|
4133
|
+
perfLogger.start('GlyphGeometryBuilder.buildInstancedGeometry');
|
|
4134
|
+
}
|
|
4135
|
+
const tasks = [];
|
|
4136
|
+
let totalVertexFloats = 0;
|
|
4137
|
+
let totalNormalFloats = 0;
|
|
4138
|
+
let totalIndexCount = 0;
|
|
4139
|
+
let vertexCursor = 0; // vertex offset (not float offset)
|
|
4140
|
+
const pushTask = (data, px, py, pz) => {
|
|
4141
|
+
const vertexStart = vertexCursor;
|
|
4142
|
+
tasks.push({ data, px, py, pz, vertexStart });
|
|
4143
|
+
totalVertexFloats += data.vertices.length;
|
|
4144
|
+
totalNormalFloats += data.normals.length;
|
|
4145
|
+
totalIndexCount += data.indices.length;
|
|
4146
|
+
vertexCursor += data.vertices.length / 3;
|
|
4147
|
+
return vertexStart;
|
|
3993
4148
|
};
|
|
3994
4149
|
const glyphInfos = [];
|
|
3995
4150
|
const planeBounds = {
|
|
@@ -3999,6 +4154,9 @@
|
|
|
3999
4154
|
for (let lineIndex = 0; lineIndex < clustersByLine.length; lineIndex++) {
|
|
4000
4155
|
const line = clustersByLine[lineIndex];
|
|
4001
4156
|
for (const cluster of line) {
|
|
4157
|
+
const clusterX = cluster.position.x;
|
|
4158
|
+
const clusterY = cluster.position.y;
|
|
4159
|
+
const clusterZ = cluster.position.z;
|
|
4002
4160
|
const clusterGlyphContours = [];
|
|
4003
4161
|
for (const glyph of cluster.glyphs) {
|
|
4004
4162
|
clusterGlyphContours.push(this.getContoursForGlyph(glyph.g));
|
|
@@ -4039,7 +4197,7 @@
|
|
|
4039
4197
|
// Use glyph-level caching when separateGlyphs is set or when cluster contains colored text
|
|
4040
4198
|
const forceSeparate = separateGlyphs || clusterHasColoredGlyphs;
|
|
4041
4199
|
// Iterate over the geometric groups identified by BoundaryClusterer
|
|
4042
|
-
// logical groups (words)
|
|
4200
|
+
// logical groups (words) split into geometric sub-groups
|
|
4043
4201
|
for (const groupIndices of boundaryGroups) {
|
|
4044
4202
|
const isOverlappingGroup = groupIndices.length > 1;
|
|
4045
4203
|
const shouldCluster = isOverlappingGroup && !forceSeparate;
|
|
@@ -4071,16 +4229,19 @@
|
|
|
4071
4229
|
// Calculate the absolute position of this sub-cluster based on its first glyph
|
|
4072
4230
|
// (since the cached geometry is relative to that first glyph)
|
|
4073
4231
|
const firstGlyphInGroup = subClusterGlyphs[0];
|
|
4074
|
-
const
|
|
4075
|
-
const
|
|
4076
|
-
|
|
4232
|
+
const groupPosX = clusterX + (firstGlyphInGroup.x ?? 0);
|
|
4233
|
+
const groupPosY = clusterY + (firstGlyphInGroup.y ?? 0);
|
|
4234
|
+
const groupPosZ = clusterZ;
|
|
4235
|
+
const vertexStart = pushTask(cachedCluster, groupPosX, groupPosY, groupPosZ);
|
|
4077
4236
|
const clusterVertexCount = cachedCluster.vertices.length / 3;
|
|
4078
4237
|
for (let i = 0; i < groupIndices.length; i++) {
|
|
4079
4238
|
const originalIndex = groupIndices[i];
|
|
4080
4239
|
const glyph = cluster.glyphs[originalIndex];
|
|
4081
4240
|
const glyphContours = clusterGlyphContours[originalIndex];
|
|
4082
|
-
const
|
|
4083
|
-
const
|
|
4241
|
+
const glyphPosX = clusterX + (glyph.x ?? 0);
|
|
4242
|
+
const glyphPosY = clusterY + (glyph.y ?? 0);
|
|
4243
|
+
const glyphPosZ = clusterZ;
|
|
4244
|
+
const glyphInfo = this.createGlyphInfo(glyph, vertexStart, clusterVertexCount, glyphPosX, glyphPosY, glyphPosZ, glyphContours, depth);
|
|
4084
4245
|
glyphInfos.push(glyphInfo);
|
|
4085
4246
|
this.updatePlaneBounds(glyphInfo.bounds, planeBounds);
|
|
4086
4247
|
}
|
|
@@ -4090,24 +4251,26 @@
|
|
|
4090
4251
|
for (const i of groupIndices) {
|
|
4091
4252
|
const glyph = cluster.glyphs[i];
|
|
4092
4253
|
const glyphContours = clusterGlyphContours[i];
|
|
4093
|
-
const
|
|
4254
|
+
const glyphPosX = clusterX + (glyph.x ?? 0);
|
|
4255
|
+
const glyphPosY = clusterY + (glyph.y ?? 0);
|
|
4256
|
+
const glyphPosZ = clusterZ;
|
|
4094
4257
|
// Skip glyphs with no paths (spaces, zero-width characters, etc.)
|
|
4095
4258
|
if (glyphContours.paths.length === 0) {
|
|
4096
|
-
const glyphInfo = this.createGlyphInfo(glyph, 0, 0,
|
|
4259
|
+
const glyphInfo = this.createGlyphInfo(glyph, 0, 0, glyphPosX, glyphPosY, glyphPosZ, glyphContours, depth);
|
|
4097
4260
|
glyphInfos.push(glyphInfo);
|
|
4098
4261
|
continue;
|
|
4099
4262
|
}
|
|
4100
|
-
|
|
4263
|
+
const glyphCacheKey = getGlyphCacheKey(this.cacheKeyPrefix, glyph.g, depth, removeOverlaps);
|
|
4264
|
+
let cachedGlyph = this.cache.get(glyphCacheKey);
|
|
4101
4265
|
if (!cachedGlyph) {
|
|
4102
4266
|
cachedGlyph = this.tessellateGlyph(glyphContours, depth, removeOverlaps, isCFF);
|
|
4103
|
-
this.cache.set(
|
|
4267
|
+
this.cache.set(glyphCacheKey, cachedGlyph);
|
|
4104
4268
|
}
|
|
4105
4269
|
else {
|
|
4106
4270
|
cachedGlyph.useCount++;
|
|
4107
4271
|
}
|
|
4108
|
-
const
|
|
4109
|
-
|
|
4110
|
-
const glyphInfo = this.createGlyphInfo(glyph, vertexOffset, cachedGlyph.vertices.length / 3, glyphPosition, glyphContours, depth);
|
|
4272
|
+
const vertexStart = pushTask(cachedGlyph, glyphPosX, glyphPosY, glyphPosZ);
|
|
4273
|
+
const glyphInfo = this.createGlyphInfo(glyph, vertexStart, cachedGlyph.vertices.length / 3, glyphPosX, glyphPosY, glyphPosZ, glyphContours, depth);
|
|
4111
4274
|
glyphInfos.push(glyphInfo);
|
|
4112
4275
|
this.updatePlaneBounds(glyphInfo.bounds, planeBounds);
|
|
4113
4276
|
}
|
|
@@ -4115,10 +4278,33 @@
|
|
|
4115
4278
|
}
|
|
4116
4279
|
}
|
|
4117
4280
|
}
|
|
4118
|
-
//
|
|
4119
|
-
const vertexArray =
|
|
4120
|
-
const normalArray =
|
|
4121
|
-
const indexArray =
|
|
4281
|
+
// Allocate exact-sized buffers and fill once
|
|
4282
|
+
const vertexArray = new Float32Array(totalVertexFloats);
|
|
4283
|
+
const normalArray = new Float32Array(totalNormalFloats);
|
|
4284
|
+
const indexArray = new Uint32Array(totalIndexCount);
|
|
4285
|
+
let vertexPos = 0; // float index (multiple of 3)
|
|
4286
|
+
let normalPos = 0; // float index (multiple of 3)
|
|
4287
|
+
let indexPos = 0; // index count
|
|
4288
|
+
for (let t = 0; t < tasks.length; t++) {
|
|
4289
|
+
const task = tasks[t];
|
|
4290
|
+
const v = task.data.vertices;
|
|
4291
|
+
const n = task.data.normals;
|
|
4292
|
+
const idx = task.data.indices;
|
|
4293
|
+
const px = task.px;
|
|
4294
|
+
const py = task.py;
|
|
4295
|
+
const pz = task.pz;
|
|
4296
|
+
for (let j = 0; j < v.length; j += 3) {
|
|
4297
|
+
vertexArray[vertexPos++] = v[j] + px;
|
|
4298
|
+
vertexArray[vertexPos++] = v[j + 1] + py;
|
|
4299
|
+
vertexArray[vertexPos++] = v[j + 2] + pz;
|
|
4300
|
+
}
|
|
4301
|
+
normalArray.set(n, normalPos);
|
|
4302
|
+
normalPos += n.length;
|
|
4303
|
+
const vertexStart = task.vertexStart;
|
|
4304
|
+
for (let j = 0; j < idx.length; j++) {
|
|
4305
|
+
indexArray[indexPos++] = idx[j] + vertexStart;
|
|
4306
|
+
}
|
|
4307
|
+
}
|
|
4122
4308
|
perfLogger.end('GlyphGeometryBuilder.buildInstancedGeometry');
|
|
4123
4309
|
return {
|
|
4124
4310
|
vertices: vertexArray,
|
|
@@ -4143,7 +4329,7 @@
|
|
|
4143
4329
|
const roundedDepth = Math.round(depth * 1000) / 1000;
|
|
4144
4330
|
return `${this.cacheKeyPrefix}_${ids}_${roundedDepth}_${removeOverlaps}`;
|
|
4145
4331
|
}
|
|
4146
|
-
createGlyphInfo(glyph, vertexStart, vertexCount,
|
|
4332
|
+
createGlyphInfo(glyph, vertexStart, vertexCount, positionX, positionY, positionZ, contours, depth) {
|
|
4147
4333
|
return {
|
|
4148
4334
|
textIndex: glyph.absoluteTextIndex,
|
|
4149
4335
|
lineIndex: glyph.lineIndex,
|
|
@@ -4151,19 +4337,30 @@
|
|
|
4151
4337
|
vertexCount,
|
|
4152
4338
|
bounds: {
|
|
4153
4339
|
min: {
|
|
4154
|
-
x: contours.bounds.min.x +
|
|
4155
|
-
y: contours.bounds.min.y +
|
|
4156
|
-
z:
|
|
4340
|
+
x: contours.bounds.min.x + positionX,
|
|
4341
|
+
y: contours.bounds.min.y + positionY,
|
|
4342
|
+
z: positionZ
|
|
4157
4343
|
},
|
|
4158
4344
|
max: {
|
|
4159
|
-
x: contours.bounds.max.x +
|
|
4160
|
-
y: contours.bounds.max.y +
|
|
4161
|
-
z:
|
|
4345
|
+
x: contours.bounds.max.x + positionX,
|
|
4346
|
+
y: contours.bounds.max.y + positionY,
|
|
4347
|
+
z: positionZ + depth
|
|
4162
4348
|
}
|
|
4163
4349
|
}
|
|
4164
4350
|
};
|
|
4165
4351
|
}
|
|
4166
4352
|
getContoursForGlyph(glyphId) {
|
|
4353
|
+
// Fast path: skip HarfBuzz draw for known-empty glyphs (spaces, zero-width, etc)
|
|
4354
|
+
if (this.emptyGlyphs.has(glyphId)) {
|
|
4355
|
+
return {
|
|
4356
|
+
glyphId,
|
|
4357
|
+
paths: [],
|
|
4358
|
+
bounds: {
|
|
4359
|
+
min: { x: 0, y: 0 },
|
|
4360
|
+
max: { x: 0, y: 0 }
|
|
4361
|
+
}
|
|
4362
|
+
};
|
|
4363
|
+
}
|
|
4167
4364
|
const key = `${this.cacheKeyPrefix}_${glyphId}`;
|
|
4168
4365
|
const cached = this.contourCache.get(key);
|
|
4169
4366
|
if (cached) {
|
|
@@ -4184,11 +4381,15 @@
|
|
|
4184
4381
|
max: { x: 0, y: 0 }
|
|
4185
4382
|
}
|
|
4186
4383
|
};
|
|
4384
|
+
// Mark glyph as empty for future fast-path
|
|
4385
|
+
if (contours.paths.length === 0) {
|
|
4386
|
+
this.emptyGlyphs.add(glyphId);
|
|
4387
|
+
}
|
|
4187
4388
|
this.contourCache.set(key, contours);
|
|
4188
4389
|
return contours;
|
|
4189
4390
|
}
|
|
4190
4391
|
tessellateGlyphCluster(paths, depth, isCFF) {
|
|
4191
|
-
const processedGeometry = this.tessellator.process(paths, true, isCFF);
|
|
4392
|
+
const processedGeometry = this.tessellator.process(paths, true, isCFF, depth !== 0);
|
|
4192
4393
|
return this.extrudeAndPackage(processedGeometry, depth);
|
|
4193
4394
|
}
|
|
4194
4395
|
extrudeAndPackage(processedGeometry, depth) {
|
|
@@ -4236,7 +4437,7 @@
|
|
|
4236
4437
|
glyphId: glyphContours.glyphId,
|
|
4237
4438
|
pathCount: glyphContours.paths.length
|
|
4238
4439
|
});
|
|
4239
|
-
const processedGeometry = this.tessellator.process(glyphContours.paths, removeOverlaps, isCFF);
|
|
4440
|
+
const processedGeometry = this.tessellator.process(glyphContours.paths, removeOverlaps, isCFF, depth !== 0);
|
|
4240
4441
|
perfLogger.end('GlyphGeometryBuilder.tessellateGlyph');
|
|
4241
4442
|
return this.extrudeAndPackage(processedGeometry, depth);
|
|
4242
4443
|
}
|
|
@@ -4306,8 +4507,11 @@
|
|
|
4306
4507
|
const clusters = [];
|
|
4307
4508
|
let currentClusterGlyphs = [];
|
|
4308
4509
|
let currentClusterText = '';
|
|
4309
|
-
let
|
|
4310
|
-
let
|
|
4510
|
+
let clusterStartX = 0;
|
|
4511
|
+
let clusterStartY = 0;
|
|
4512
|
+
let cursorX = lineInfo.xOffset;
|
|
4513
|
+
let cursorY = -lineIndex * scaledLineHeight;
|
|
4514
|
+
const cursorZ = 0;
|
|
4311
4515
|
// Apply letter spacing after each glyph to match width measurements used during line breaking
|
|
4312
4516
|
const letterSpacingFU = letterSpacing * this.loadedFont.upem;
|
|
4313
4517
|
const spaceAdjustment = this.calculateSpaceAdjustment(lineInfo, align, letterSpacing);
|
|
@@ -4332,31 +4536,31 @@
|
|
|
4332
4536
|
clusters.push({
|
|
4333
4537
|
text: currentClusterText,
|
|
4334
4538
|
glyphs: currentClusterGlyphs,
|
|
4335
|
-
position:
|
|
4539
|
+
position: new Vec3(clusterStartX, clusterStartY, cursorZ)
|
|
4336
4540
|
});
|
|
4337
4541
|
currentClusterGlyphs = [];
|
|
4338
4542
|
currentClusterText = '';
|
|
4339
4543
|
}
|
|
4340
4544
|
}
|
|
4341
|
-
const
|
|
4342
|
-
|
|
4343
|
-
.add(new Vec3(glyph.dx, glyph.dy, 0));
|
|
4545
|
+
const absoluteGlyphX = cursorX + glyph.dx;
|
|
4546
|
+
const absoluteGlyphY = cursorY + glyph.dy;
|
|
4344
4547
|
if (!isWhitespace) {
|
|
4345
4548
|
if (currentClusterGlyphs.length === 0) {
|
|
4346
|
-
|
|
4549
|
+
clusterStartX = absoluteGlyphX;
|
|
4550
|
+
clusterStartY = absoluteGlyphY;
|
|
4347
4551
|
}
|
|
4348
|
-
glyph.x =
|
|
4349
|
-
glyph.y =
|
|
4552
|
+
glyph.x = absoluteGlyphX - clusterStartX;
|
|
4553
|
+
glyph.y = absoluteGlyphY - clusterStartY;
|
|
4350
4554
|
currentClusterGlyphs.push(glyph);
|
|
4351
4555
|
currentClusterText += lineInfo.text[glyph.cl];
|
|
4352
4556
|
}
|
|
4353
|
-
|
|
4354
|
-
|
|
4557
|
+
cursorX += glyph.ax;
|
|
4558
|
+
cursorY += glyph.ay;
|
|
4355
4559
|
if (letterSpacingFU !== 0 && i < glyphInfos.length - 1) {
|
|
4356
|
-
|
|
4560
|
+
cursorX += letterSpacingFU;
|
|
4357
4561
|
}
|
|
4358
4562
|
if (isWhitespace) {
|
|
4359
|
-
|
|
4563
|
+
cursorX += spaceAdjustment;
|
|
4360
4564
|
}
|
|
4361
4565
|
// CJK glue adjustment (must match exactly where LineBreak adds glue)
|
|
4362
4566
|
if (cjkAdjustment !== 0 && i < glyphInfos.length - 1 && !isWhitespace) {
|
|
@@ -4377,7 +4581,7 @@
|
|
|
4377
4581
|
shouldApply = false;
|
|
4378
4582
|
}
|
|
4379
4583
|
if (shouldApply) {
|
|
4380
|
-
|
|
4584
|
+
cursorX += cjkAdjustment;
|
|
4381
4585
|
}
|
|
4382
4586
|
}
|
|
4383
4587
|
}
|
|
@@ -4386,7 +4590,7 @@
|
|
|
4386
4590
|
clusters.push({
|
|
4387
4591
|
text: currentClusterText,
|
|
4388
4592
|
glyphs: currentClusterGlyphs,
|
|
4389
|
-
position:
|
|
4593
|
+
position: new Vec3(clusterStartX, clusterStartY, cursorZ)
|
|
4390
4594
|
});
|
|
4391
4595
|
}
|
|
4392
4596
|
return clusters;
|
|
@@ -5213,9 +5417,8 @@
|
|
|
5213
5417
|
const loadedFont = await Text.resolveFont(options);
|
|
5214
5418
|
const text = new Text();
|
|
5215
5419
|
text.setLoadedFont(loadedFont);
|
|
5216
|
-
//
|
|
5217
|
-
const
|
|
5218
|
-
const result = await text.createGeometry(geometryOptions);
|
|
5420
|
+
// Pass full options so createGeometry honors maxCacheSizeMB etc
|
|
5421
|
+
const result = await text.createGeometry(options);
|
|
5219
5422
|
// Recursive update function
|
|
5220
5423
|
const update = async (newOptions) => {
|
|
5221
5424
|
// Merge options - preserve font from original options if not provided
|
|
@@ -5237,8 +5440,7 @@
|
|
|
5237
5440
|
}
|
|
5238
5441
|
// Update closure options for next time
|
|
5239
5442
|
options = mergedOptions;
|
|
5240
|
-
const
|
|
5241
|
-
const newResult = await text.createGeometry(currentGeometryOptions);
|
|
5443
|
+
const newResult = await text.createGeometry(options);
|
|
5242
5444
|
return {
|
|
5243
5445
|
...newResult,
|
|
5244
5446
|
getLoadedFont: () => text.getLoadedFont(),
|
|
@@ -5663,7 +5865,7 @@
|
|
|
5663
5865
|
if (!this.textLayout) {
|
|
5664
5866
|
this.textLayout = new TextLayout(this.loadedFont);
|
|
5665
5867
|
}
|
|
5666
|
-
const alignmentResult = this.textLayout.
|
|
5868
|
+
const alignmentResult = this.textLayout.computeAlignmentOffset({
|
|
5667
5869
|
width,
|
|
5668
5870
|
align,
|
|
5669
5871
|
planeBounds
|
|
@@ -5672,9 +5874,19 @@
|
|
|
5672
5874
|
planeBounds.min.x = alignmentResult.adjustedBounds.min.x;
|
|
5673
5875
|
planeBounds.max.x = alignmentResult.adjustedBounds.max.x;
|
|
5674
5876
|
const finalScale = size / this.loadedFont.upem;
|
|
5877
|
+
const offsetScaled = offset * finalScale;
|
|
5675
5878
|
// Scale vertices only (normals are unit vectors, don't scale)
|
|
5676
|
-
|
|
5677
|
-
|
|
5879
|
+
if (offsetScaled === 0) {
|
|
5880
|
+
for (let i = 0; i < vertices.length; i++) {
|
|
5881
|
+
vertices[i] *= finalScale;
|
|
5882
|
+
}
|
|
5883
|
+
}
|
|
5884
|
+
else {
|
|
5885
|
+
for (let i = 0; i < vertices.length; i += 3) {
|
|
5886
|
+
vertices[i] = vertices[i] * finalScale + offsetScaled;
|
|
5887
|
+
vertices[i + 1] *= finalScale;
|
|
5888
|
+
vertices[i + 2] *= finalScale;
|
|
5889
|
+
}
|
|
5678
5890
|
}
|
|
5679
5891
|
planeBounds.min.x *= finalScale;
|
|
5680
5892
|
planeBounds.min.y *= finalScale;
|
|
@@ -5684,14 +5896,10 @@
|
|
|
5684
5896
|
planeBounds.max.z *= finalScale;
|
|
5685
5897
|
for (let i = 0; i < glyphInfoArray.length; i++) {
|
|
5686
5898
|
const glyphInfo = glyphInfoArray[i];
|
|
5687
|
-
|
|
5688
|
-
glyphInfo.bounds.min.x += offset;
|
|
5689
|
-
glyphInfo.bounds.max.x += offset;
|
|
5690
|
-
}
|
|
5691
|
-
glyphInfo.bounds.min.x *= finalScale;
|
|
5899
|
+
glyphInfo.bounds.min.x = glyphInfo.bounds.min.x * finalScale + offsetScaled;
|
|
5692
5900
|
glyphInfo.bounds.min.y *= finalScale;
|
|
5693
5901
|
glyphInfo.bounds.min.z *= finalScale;
|
|
5694
|
-
glyphInfo.bounds.max.x
|
|
5902
|
+
glyphInfo.bounds.max.x = glyphInfo.bounds.max.x * finalScale + offsetScaled;
|
|
5695
5903
|
glyphInfo.bounds.max.y *= finalScale;
|
|
5696
5904
|
glyphInfo.bounds.max.z *= finalScale;
|
|
5697
5905
|
}
|