@vitrosoftware/common-ui-ts 1.1.122 → 1.1.123
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/css/std/controls/checkbox/checkbox.css +4 -0
- package/css/std/controls/checkbox/img/checkbox-indeterminate.svg +4 -0
- package/css/std/controls/date-picker/date-picker.css +1 -25
- package/css/std/controls/dxf-viewer/annotation.css +85 -0
- package/css/std/controls/dxf-viewer/common.css +24 -0
- package/css/std/controls/dxf-viewer/dxf-viewer-index.css +14081 -0
- package/css/std/controls/dxf-viewer/dxf-viewer.css +194 -0
- package/css/std/controls/dxf-viewer/img/cancel-dark-grey.svg +5 -0
- package/css/std/controls/dxf-viewer/img/collapse-bottom.svg +5 -0
- package/css/std/controls/dxf-viewer/img/collapse-up-blue.svg +5 -0
- package/css/std/controls/dxf-viewer/img/delete-active.svg +11 -0
- package/css/std/controls/dxf-viewer/img/delete.svg +11 -0
- package/css/std/controls/dxf-viewer/img/draw-annotation.svg +3 -0
- package/css/std/controls/dxf-viewer/img/invisible-eye.svg +4 -0
- package/css/std/controls/dxf-viewer/img/show-annotation.svg +3 -0
- package/css/std/controls/dxf-viewer/img/sidebar-layers-toggle.svg +6 -0
- package/css/std/controls/dxf-viewer/img/sidebar-notes-toggle.svg +5 -0
- package/css/std/controls/dxf-viewer/img/sidebar-resizer.svg +6 -0
- package/css/std/controls/dxf-viewer/img/sidebar-toggle.svg +7 -0
- package/css/std/controls/dxf-viewer/img/visible-eye.svg +4 -0
- package/css/std/controls/dxf-viewer/img/zoom-in.svg +6 -0
- package/css/std/controls/dxf-viewer/img/zoom-out.svg +5 -0
- package/css/std/controls/dxf-viewer/layer-list.css +104 -0
- package/css/std/controls/dxf-viewer/panel.css +34 -0
- package/css/std/controls/dxf-viewer/prop-inspector.css +102 -0
- package/css/std/controls/dxf-viewer/select.css +111 -0
- package/css/std/controls/dxf-viewer/sidebar.css +190 -0
- package/css/std/controls/dxf-viewer/thumbnail-list.css +65 -0
- package/css/std/controls/dxf-viewer/toolbar.css +117 -0
- package/css/std/controls/dxf-viewer/treeview.css +3 -0
- package/css/std/controls/dxf-viewer/treeview.panel.css +108 -0
- package/css/std/controls/error-message/error-message.css +22 -0
- package/css/std/controls/image-picker/image-picker.css +0 -26
- package/css/std/controls/input/input.css +1 -24
- package/css/std/controls/issue-tile/issue-tile-header.css +1 -0
- package/css/std/controls/login/ntlm-authentication-form.css +9 -12
- package/css/std/controls/lookup-picker/lookup-picker-value-list.css +38 -2
- package/css/std/controls/lookup-picker/lookup-picker.css +1 -25
- package/css/std/controls/table-view/treegrid-context-menu.css +44 -18
- package/css/std/controls/table-view/treegrid-message.css +4 -4
- package/css/std/controls/time-picker/time-picker.css +1 -25
- package/dist/index.css +81 -143
- package/dist/index.js +15137 -489
- package/dist/index.js.map +1 -1
- package/dist/src/controls/Checkbox/Checkbox.d.ts +1 -0
- package/dist/src/controls/DxfViewer/DxfViewer.d.ts +6 -0
- package/dist/src/controls/DxfViewer/DxfViewerContext.d.ts +31 -0
- package/dist/src/controls/DxfViewer/Layer.d.ts +9 -0
- package/dist/src/controls/DxfViewer/LayerList.d.ts +11 -0
- package/dist/src/controls/DxfViewer/Thumbnail.d.ts +7 -0
- package/dist/src/controls/DxfViewer/ThumbnailList.d.ts +6 -0
- package/dist/src/controls/DxfViewer/Viewer.d.ts +6 -0
- package/dist/src/controls/ErrorMessage/ErrorMessage.d.ts +6 -0
- package/dist/src/controls/Login/FormRef.d.ts +3 -0
- package/dist/src/controls/Login/LoginConstants.d.ts +2 -1
- package/dist/src/controls/Login/LoginFormRef.d.ts +2 -2
- package/dist/src/controls/Login/NTLMAuthenticationForm.d.ts +5 -2
- package/dist/src/controls/LookupPicker/LookupPicker.d.ts +2 -0
- package/dist/src/controls/LookupPicker/ValueList.d.ts +2 -0
- package/dist/src/controls/TableView/TableViewConstants.d.ts +11 -0
- package/dist/src/controls/TableView/TreeGridTableViewContextImpl.d.ts +1 -0
- package/dist/src/controls/TreeView/TreeView.d.ts +4 -0
- package/dist/src/controls/TreeView/TreeViewConfig.d.ts +3 -0
- package/dist/src/controls/TreeView/TreeViewConstants.d.ts +2 -1
- package/dist/src/index.d.ts +7 -1
- package/lib/dxf-viewer/BatchingKey.js +91 -0
- package/lib/dxf-viewer/DxfFetcher.js +39 -0
- package/lib/dxf-viewer/DxfScene.js +2695 -0
- package/lib/dxf-viewer/DxfViewer.js +1056 -0
- package/lib/dxf-viewer/DxfWorker.js +229 -0
- package/lib/dxf-viewer/DynamicBuffer.js +100 -0
- package/lib/dxf-viewer/HatchCalculator.js +345 -0
- package/lib/dxf-viewer/LinearDimension.js +323 -0
- package/lib/dxf-viewer/MTextFormatParser.js +211 -0
- package/lib/dxf-viewer/MaterialKey.js +37 -0
- package/lib/dxf-viewer/OrbitControls.js +1253 -0
- package/lib/dxf-viewer/Pattern.js +94 -0
- package/lib/dxf-viewer/RBTree.js +471 -0
- package/lib/dxf-viewer/TextRenderer.js +1038 -0
- package/lib/dxf-viewer/index.js +42 -0
- package/lib/dxf-viewer/math/Matrix2.js +77 -0
- package/lib/dxf-viewer/math/utils.js +59 -0
- package/lib/dxf-viewer/parser/AutoCadColorIndex.js +265 -0
- package/lib/dxf-viewer/parser/DimStyleCodes.js +33 -0
- package/lib/dxf-viewer/parser/DxfArrayScanner.js +143 -0
- package/lib/dxf-viewer/parser/DxfParser.js +980 -0
- package/lib/dxf-viewer/parser/ExtendedDataParse-My.js +91 -0
- package/lib/dxf-viewer/parser/ExtendedDataParser.js +123 -0
- package/lib/dxf-viewer/parser/ParseHelpers.js +142 -0
- package/lib/dxf-viewer/parser/entities/3dface.js +83 -0
- package/lib/dxf-viewer/parser/entities/arc.js +38 -0
- package/lib/dxf-viewer/parser/entities/attdef.js +89 -0
- package/lib/dxf-viewer/parser/entities/attrib.js +34 -0
- package/lib/dxf-viewer/parser/entities/attribute.js +109 -0
- package/lib/dxf-viewer/parser/entities/circle.js +43 -0
- package/lib/dxf-viewer/parser/entities/dimension.js +72 -0
- package/lib/dxf-viewer/parser/entities/ellipse.js +46 -0
- package/lib/dxf-viewer/parser/entities/hatch.js +343 -0
- package/lib/dxf-viewer/parser/entities/insert.js +62 -0
- package/lib/dxf-viewer/parser/entities/leader.js +84 -0
- package/lib/dxf-viewer/parser/entities/line.js +34 -0
- package/lib/dxf-viewer/parser/entities/lwpolyline.js +100 -0
- package/lib/dxf-viewer/parser/entities/mtext.js +54 -0
- package/lib/dxf-viewer/parser/entities/point.js +35 -0
- package/lib/dxf-viewer/parser/entities/polyline.js +92 -0
- package/lib/dxf-viewer/parser/entities/solid.js +40 -0
- package/lib/dxf-viewer/parser/entities/spline.js +70 -0
- package/lib/dxf-viewer/parser/entities/text.js +47 -0
- package/lib/dxf-viewer/parser/entities/vertex.js +62 -0
- package/lib/dxf-viewer/parser/entities/viewport.js +56 -0
- package/lib/dxf-viewer/parser/objects/dictionary.js +29 -0
- package/lib/dxf-viewer/parser/objects/layout.js +35 -0
- package/lib/dxf-viewer/parser/objects/xrecord.js +29 -0
- package/lib/opentype/opentype.module.js +14571 -0
- package/lib/three/CSS2DRenderer.js +235 -0
- package/lib/three/three.module.js +49912 -0
- package/package.json +12 -10
- package/src/controls/BimViewer/js/bim-viewer.js +2 -2
- package/src/controls/DxfViewer/js/dxf-viewer.js +3541 -0
- package/src/controls/PdfViewer/js/pdf-viewer.js +1 -1
- package/css/std/controls/input/img/error-message.svg +0 -6
- package/css/std/controls/lookup-picker/img/error-message.svg +0 -6
- package/css/std/controls/time-picker/img/error-message.svg +0 -6
- /package/css/std/controls/{date-picker → error-message}/img/error-message.svg +0 -0
|
@@ -0,0 +1,2695 @@
|
|
|
1
|
+
import {DynamicBuffer, NativeType} from "./DynamicBuffer.js"
|
|
2
|
+
import {BatchingKey} from "./BatchingKey.js"
|
|
3
|
+
import { Matrix3, Vector2 } from "/resource/dxfViewer/js/three/three.module.js"
|
|
4
|
+
import {TextRenderer, ParseSpecialChars, HAlign, VAlign} from "./TextRenderer.js"
|
|
5
|
+
import {RBTree} from "./RBTree.js"
|
|
6
|
+
import {MTextFormatParser} from "./MTextFormatParser.js";
|
|
7
|
+
import dimStyleCodes from './parser/DimStyleCodes.js'
|
|
8
|
+
import {LinearDimension} from "./LinearDimension.js"
|
|
9
|
+
//import { HatchCalculator, HatchStyle } from "./HatchCalculator.js"
|
|
10
|
+
//import { LookupPattern, Pattern } from "./Pattern.js"
|
|
11
|
+
//import "./patterns"
|
|
12
|
+
|
|
13
|
+
export { Block, RenderBatch, BlockContext };
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
/** Use 16-bit indices for indexed geometry. */
|
|
19
|
+
const INDEXED_CHUNK_SIZE = 0x10000
|
|
20
|
+
/** Arc angle for tessellating point circle shape. */
|
|
21
|
+
const POINT_CIRCLE_TESSELLATION_ANGLE = 15 * Math.PI / 180
|
|
22
|
+
const POINT_SHAPE_BLOCK_NAME = "__point_shape"
|
|
23
|
+
/** Flatten a block if its total vertices count in all instances is less than this value. */
|
|
24
|
+
const BLOCK_FLATTENING_VERTICES_THRESHOLD = 1024
|
|
25
|
+
/** Number of subdivisions per spline point. */
|
|
26
|
+
const SPLINE_SUBDIVISION = 4
|
|
27
|
+
/** Limit hatch lines number to some reasonable value to mitigate hanging and out-of-memory issues
|
|
28
|
+
* on bad files.
|
|
29
|
+
*/
|
|
30
|
+
const MAX_HATCH_LINES = 20000
|
|
31
|
+
/** Limit hatch segments number per line to some reasonable value to mitigate hanging and
|
|
32
|
+
* out-of-memory issues on bad files.
|
|
33
|
+
*/
|
|
34
|
+
const MAX_HATCH_SEGMENTS = 20000
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
/** Default values for system variables. Entry may be either value or function to call for obtaining
|
|
38
|
+
* a value, the function `this` argument is DxfScene.
|
|
39
|
+
*/
|
|
40
|
+
const DEFAULT_VARS = {
|
|
41
|
+
/* https://knowledge.autodesk.com/support/autocad/learn-explore/caas/CloudHelp/cloudhelp/2016/ENU/AutoCAD-Core/files/GUID-A17A69D7-25EF-4F57-B4EB-D53A56AB909C-htm.html */
|
|
42
|
+
DIMTXT: function() {
|
|
43
|
+
//XXX should select value for imperial or metric units
|
|
44
|
+
return 2.5 //XXX 0.18 for imperial
|
|
45
|
+
},
|
|
46
|
+
DIMASZ: 2.5,//XXX 0.18 for imperial
|
|
47
|
+
DIMCLRD: 0,
|
|
48
|
+
DIMCLRE: 0,
|
|
49
|
+
DIMCLRT: 0,
|
|
50
|
+
DIMDEC: 2, //XXX 4 for imperial,
|
|
51
|
+
DIMDLE: 0,
|
|
52
|
+
DIMDSEP: ".".charCodeAt(0), //XXX "," for imperial,
|
|
53
|
+
DIMEXE: 1.25, //XXX 0.18 for imperial
|
|
54
|
+
DIMEXO: 0.625, // XXX 0.0625 for imperial
|
|
55
|
+
DIMFXL: 1,
|
|
56
|
+
DIMFXLON: false,
|
|
57
|
+
DIMGAP: 0.625,//XXX for imperial
|
|
58
|
+
DIMLFAC: 1,
|
|
59
|
+
DIMRND: 0,
|
|
60
|
+
DIMSAH: 0,
|
|
61
|
+
DIMSCALE: 1,
|
|
62
|
+
DIMSD1: 0,
|
|
63
|
+
DIMSD2: 0,
|
|
64
|
+
DIMSE1: 0,
|
|
65
|
+
DIMSE2: 0,
|
|
66
|
+
DIMSOXD: false,
|
|
67
|
+
DIMTSZ: 0,
|
|
68
|
+
DIMZIN: 8, //XXX 0 for imperial,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/** This class prepares an internal representation of a DXF file, optimized fo WebGL rendering. It
|
|
72
|
+
* is decoupled in such a way so that it should be possible to build it in a web-worker, effectively
|
|
73
|
+
* transfer it to the main thread, and easily apply it to a Three.js scene there.
|
|
74
|
+
*/
|
|
75
|
+
export class DxfScene {
|
|
76
|
+
|
|
77
|
+
constructor(options) {
|
|
78
|
+
this.options = Object.create(DxfScene.DefaultOptions)
|
|
79
|
+
if (options) {
|
|
80
|
+
Object.assign(this.options, options.sceneOptions)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/* Scene origin. All input coordinates are made local to this point to minimize precision
|
|
84
|
+
* loss.
|
|
85
|
+
*/
|
|
86
|
+
this.origin = null
|
|
87
|
+
/* RBTree<BatchingKey, RenderBatch> */
|
|
88
|
+
this.batches = new RBTree((b1, b2) => b1.key.Compare(b2.key))
|
|
89
|
+
/* Indexed by layer name, value is layer object from parsed DXF. */
|
|
90
|
+
this.layers = new Map()
|
|
91
|
+
/* Indexed by block name, value is Block. */
|
|
92
|
+
this.blocks = new Map()
|
|
93
|
+
/** Indexed by dimension style name, value is DIMSTYLE object from parsed DXF. */
|
|
94
|
+
this.dimStyles = new Map()
|
|
95
|
+
/** Indexed by variable name (without leading '$'). */
|
|
96
|
+
this.vars = new Map()
|
|
97
|
+
this.fontStyles = new Map();
|
|
98
|
+
this.bounds = null
|
|
99
|
+
this.pointShapeBlock = null
|
|
100
|
+
this.numBlocksFlattened = 0
|
|
101
|
+
this.numEntitiesFiltered = 0
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/** Build the scene from the provided parsed DXF.
|
|
105
|
+
* @param dxf {{}} Parsed DXF file.
|
|
106
|
+
* @param fontFetchers {?Function[]} List of font fetchers. Fetcher should return promise with
|
|
107
|
+
* loaded font object (opentype.js). They are invoked only when necessary. Each glyph is being
|
|
108
|
+
* searched sequentially in each provided font.
|
|
109
|
+
*/
|
|
110
|
+
async Build(dxf, fontFetchers) {
|
|
111
|
+
const header = dxf.header || {}
|
|
112
|
+
|
|
113
|
+
for (const [name, value] of Object.entries(header)) {
|
|
114
|
+
if (name.startsWith("$")) {
|
|
115
|
+
this.vars.set(name.slice(1), value)
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/* Zero angle direction, 0 is +X. */
|
|
120
|
+
this.angBase = this.vars.get("ANGBASE") ?? 0
|
|
121
|
+
/* 0 - CCW, 1 - CW */
|
|
122
|
+
this.angDir = this.vars.get("ANGDIR") ?? 0
|
|
123
|
+
this.pdSize = this.vars.get("PDSIZE") ?? 0
|
|
124
|
+
this.isMetric = (this.vars.get("MEASUREMENT") ?? 1) == 1
|
|
125
|
+
|
|
126
|
+
if(dxf.tables && dxf.tables.layer) {
|
|
127
|
+
for (const [, layer] of Object.entries(dxf.tables.layer.layers)) {
|
|
128
|
+
layer.displayName = ParseSpecialChars(layer.name)
|
|
129
|
+
this.layers.set(layer.name, layer)
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if(dxf.tables && dxf.tables.dimstyle) {
|
|
134
|
+
for (const [, style] of Object.entries(dxf.tables.dimstyle.dimStyles)) {
|
|
135
|
+
this.dimStyles.set(style.name, style)
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
if (dxf.tables && dxf.tables.style) {
|
|
140
|
+
for (const [, style] of Object.entries(dxf.tables.style.styles)) {
|
|
141
|
+
this.fontStyles.set(style.styleName, style);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (dxf.blocks) {
|
|
146
|
+
for (const [, block] of Object.entries(dxf.blocks)) {
|
|
147
|
+
this.blocks.set(block.name, new Block(block))
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
this.textRenderer = new TextRenderer(fontFetchers, this.options.textOptions)
|
|
152
|
+
this.hasMissingChars = false
|
|
153
|
+
await this._FetchFonts(dxf)
|
|
154
|
+
|
|
155
|
+
/* Scan all entities to analyze block usage statistics. */
|
|
156
|
+
for (const entity of dxf.entities) {
|
|
157
|
+
if (!this._FilterEntity(entity)) {
|
|
158
|
+
continue
|
|
159
|
+
}
|
|
160
|
+
if (entity.type === "INSERT") {
|
|
161
|
+
const block = this.blocks.get(entity.name)
|
|
162
|
+
block?.RegisterInsert(entity)
|
|
163
|
+
|
|
164
|
+
} else if (entity.type == "DIMENSION") {
|
|
165
|
+
if ((entity.block ?? null) !== null) {
|
|
166
|
+
const block = this.blocks.get(entity.block)
|
|
167
|
+
block?.RegisterInsert(entity)
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
for (const block of this.blocks.values()) {
|
|
173
|
+
if (block.data.hasOwnProperty("entities")) {
|
|
174
|
+
const blockCtx = block.DefinitionContext()
|
|
175
|
+
for (const entity of block.data.entities) {
|
|
176
|
+
if (!this._FilterEntity(entity)) {
|
|
177
|
+
continue
|
|
178
|
+
}
|
|
179
|
+
this._ProcessDxfEntity(entity, blockCtx)
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
if (block.SetFlatten()) {
|
|
183
|
+
this.numBlocksFlattened++
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
console.log(`${this.numBlocksFlattened} blocks flattened`)
|
|
187
|
+
|
|
188
|
+
for (const entity of dxf.entities) {
|
|
189
|
+
if (!this._FilterEntity(entity)) {
|
|
190
|
+
this.numEntitiesFiltered++
|
|
191
|
+
continue
|
|
192
|
+
}
|
|
193
|
+
this._ProcessDxfEntity(entity)
|
|
194
|
+
}
|
|
195
|
+
console.log(`${this.numEntitiesFiltered} entities filtered`)
|
|
196
|
+
|
|
197
|
+
this.scene = this._BuildScene()
|
|
198
|
+
|
|
199
|
+
delete this.batches
|
|
200
|
+
delete this.layers
|
|
201
|
+
delete this.blocks
|
|
202
|
+
delete this.textRenderer
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
/** @return False to suppress the specified entity, true to permit rendering. */
|
|
206
|
+
_FilterEntity(entity) {
|
|
207
|
+
return !this.options.suppressPaperSpace || !entity.inPaperSpace
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
async _FetchFonts(dxf) {
|
|
211
|
+
|
|
212
|
+
function IsTextEntity(entity) {
|
|
213
|
+
return entity.type === "TEXT" || entity.type === "MTEXT" ||
|
|
214
|
+
entity.type === "DIMENSION" || entity.type === "ATTDEF" ||
|
|
215
|
+
entity.type === "ATTRIB"
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
const ProcessEntity = async (entity) => {
|
|
219
|
+
if (!this._FilterEntity(entity)) {
|
|
220
|
+
return
|
|
221
|
+
}
|
|
222
|
+
let ret
|
|
223
|
+
if (entity.type === "TEXT" || entity.type === "ATTRIB" || entity.type === "ATTDEF") {
|
|
224
|
+
ret = await this.textRenderer.FetchFonts(ParseSpecialChars(entity.text))
|
|
225
|
+
|
|
226
|
+
} else if (entity.type === "MTEXT") {
|
|
227
|
+
const parser = new MTextFormatParser()
|
|
228
|
+
parser.Parse(entity.text)
|
|
229
|
+
ret = true
|
|
230
|
+
//XXX formatted MTEXT may specify some fonts explicitly, this is not yet supported
|
|
231
|
+
for (const text of parser.GetText()) {
|
|
232
|
+
if (!await this.textRenderer.FetchFonts(ParseSpecialChars(text))) {
|
|
233
|
+
ret = false
|
|
234
|
+
break
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
} else if (entity.type === "DIMENSION") {
|
|
239
|
+
ret = true
|
|
240
|
+
const dim = this._CreateLinearDimension(entity)
|
|
241
|
+
if (dim) {
|
|
242
|
+
for (const text of dim.GetTexts()) {
|
|
243
|
+
if (!await this.textRenderer.FetchFonts(text)) {
|
|
244
|
+
ret = false
|
|
245
|
+
break
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
} else {
|
|
251
|
+
throw new Error("Bad entity type")
|
|
252
|
+
}
|
|
253
|
+
if (!ret) {
|
|
254
|
+
this.hasMissingChars = true
|
|
255
|
+
}
|
|
256
|
+
return ret
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
for (const entity of dxf.entities) {
|
|
260
|
+
if (IsTextEntity(entity)) {
|
|
261
|
+
if (!await ProcessEntity(entity)) {
|
|
262
|
+
/* Failing to resolve some character means that all fonts have been loaded and
|
|
263
|
+
* checked. No mean to check the rest strings. However until it is encountered,
|
|
264
|
+
* all strings should be checked, even if all fonts already loaded. This needed
|
|
265
|
+
* to properly set hasMissingChars which allows displaying some warning in a
|
|
266
|
+
* viewer.
|
|
267
|
+
*/
|
|
268
|
+
return
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
for (const block of this.blocks.values()) {
|
|
273
|
+
if (block.data.hasOwnProperty("entities")) {
|
|
274
|
+
for (const entity of block.data.entities) {
|
|
275
|
+
if (IsTextEntity(entity)) {
|
|
276
|
+
if (!await ProcessEntity(entity)) {
|
|
277
|
+
return
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
_ProcessDxfEntity(entity, blockCtx = null) {
|
|
286
|
+
let renderEntities
|
|
287
|
+
switch (entity.type) {
|
|
288
|
+
case "LINE":
|
|
289
|
+
renderEntities = this._DecomposeLine(entity, blockCtx)
|
|
290
|
+
break
|
|
291
|
+
case "POLYLINE":
|
|
292
|
+
case "LWPOLYLINE":
|
|
293
|
+
renderEntities = this._DecomposePolyline(entity, blockCtx)
|
|
294
|
+
break
|
|
295
|
+
case "ARC":
|
|
296
|
+
renderEntities = this._DecomposeArc(entity, blockCtx)
|
|
297
|
+
break
|
|
298
|
+
case "CIRCLE":
|
|
299
|
+
renderEntities = this._DecomposeCircle(entity, blockCtx)
|
|
300
|
+
break
|
|
301
|
+
case "ELLIPSE":
|
|
302
|
+
renderEntities = this._DecomposeEllipse(entity, blockCtx)
|
|
303
|
+
break
|
|
304
|
+
case "POINT":
|
|
305
|
+
renderEntities = this._DecomposePoint(entity, blockCtx)
|
|
306
|
+
break
|
|
307
|
+
case "SPLINE":
|
|
308
|
+
renderEntities = this._DecomposeSpline(entity, blockCtx)
|
|
309
|
+
break
|
|
310
|
+
case "INSERT":
|
|
311
|
+
/* Works with rendering batches without intermediate entities. */
|
|
312
|
+
this._ProcessInsert(entity, blockCtx)
|
|
313
|
+
return
|
|
314
|
+
case "TEXT":
|
|
315
|
+
renderEntities = this._DecomposeText(entity, blockCtx)
|
|
316
|
+
break
|
|
317
|
+
case "MTEXT":
|
|
318
|
+
renderEntities = this._DecomposeMText(entity, blockCtx)
|
|
319
|
+
break
|
|
320
|
+
case "3DFACE":
|
|
321
|
+
renderEntities = this._Decompose3DFace(entity, blockCtx)
|
|
322
|
+
break
|
|
323
|
+
case "SOLID":
|
|
324
|
+
renderEntities = this._DecomposeSolid(entity, blockCtx)
|
|
325
|
+
break
|
|
326
|
+
case "DIMENSION":
|
|
327
|
+
renderEntities = this._DecomposeDimension(entity, blockCtx)
|
|
328
|
+
break
|
|
329
|
+
case "ATTRIB":
|
|
330
|
+
renderEntities = this._DecomposeAttribute(entity, blockCtx)
|
|
331
|
+
break
|
|
332
|
+
case "HATCH":
|
|
333
|
+
renderEntities = this._DecomposeHatch(entity, blockCtx)
|
|
334
|
+
break
|
|
335
|
+
default:
|
|
336
|
+
console.log("Unhandled entity type: " + entity.type)
|
|
337
|
+
return
|
|
338
|
+
}
|
|
339
|
+
for (const renderEntity of renderEntities) {
|
|
340
|
+
this._ProcessEntity(renderEntity, blockCtx)
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* @param entity {Entity}
|
|
345
|
+
* @param blockCtx {?BlockContext}
|
|
346
|
+
*/
|
|
347
|
+
_ProcessEntity(entity, blockCtx = null) {
|
|
348
|
+
switch (entity.type) {
|
|
349
|
+
case Entity.Type.POINTS:
|
|
350
|
+
this._ProcessPoints(entity, blockCtx)
|
|
351
|
+
break
|
|
352
|
+
case Entity.Type.LINE_SEGMENTS:
|
|
353
|
+
this._ProcessLineSegments(entity, blockCtx)
|
|
354
|
+
break
|
|
355
|
+
case Entity.Type.POLYLINE:
|
|
356
|
+
this._ProcessPolyline(entity, blockCtx)
|
|
357
|
+
break
|
|
358
|
+
case Entity.Type.TRIANGLES:
|
|
359
|
+
this._ProcessTriangles(entity, blockCtx)
|
|
360
|
+
break
|
|
361
|
+
default:
|
|
362
|
+
throw new Error("Unhandled entity type: " + entity.type)
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
/**
|
|
367
|
+
* @param entity
|
|
368
|
+
* @param vertex
|
|
369
|
+
* @param blockCtx {?BlockContext}
|
|
370
|
+
* @return {number}
|
|
371
|
+
*/
|
|
372
|
+
_GetLineType(entity, vertex = null, blockCtx = null) {
|
|
373
|
+
//XXX lookup
|
|
374
|
+
return 0
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
/** Check if start/end with are not specified. */
|
|
378
|
+
_IsPlainLine(entity) {
|
|
379
|
+
return !Boolean(entity.startWidth || entity.endWidth)
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
*_DecomposeLine(entity, blockCtx) {
|
|
383
|
+
/* start/end width, bulge - seems cannot be present, at least with current parser */
|
|
384
|
+
if (entity.vertices.length !== 2) {
|
|
385
|
+
return
|
|
386
|
+
}
|
|
387
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
388
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
389
|
+
yield new Entity({
|
|
390
|
+
type: Entity.Type.LINE_SEGMENTS,
|
|
391
|
+
vertices: entity.vertices,
|
|
392
|
+
layer, color,
|
|
393
|
+
lineType: this._GetLineType(entity, entity.vertices[0])
|
|
394
|
+
})
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
/** Generate vertices for bulged line segment.
|
|
398
|
+
*
|
|
399
|
+
* @param vertices Generated vertices pushed here.
|
|
400
|
+
* @param startVtx Starting vertex. Assuming it is already present in the vertices array.
|
|
401
|
+
* @param endVtx Ending vertex.
|
|
402
|
+
* @param bulge Bulge value (see DXF specification).
|
|
403
|
+
*/
|
|
404
|
+
_GenerateBulgeVertices(vertices, startVtx, endVtx, bulge) {
|
|
405
|
+
const a = 4 * Math.atan(bulge)
|
|
406
|
+
const aAbs = Math.abs(a)
|
|
407
|
+
if (aAbs < this.options.arcTessellationAngle) {
|
|
408
|
+
vertices.push(new Vector2(endVtx.x, endVtx.y))
|
|
409
|
+
return
|
|
410
|
+
}
|
|
411
|
+
const ha = a / 2
|
|
412
|
+
const sha = Math.sin(ha)
|
|
413
|
+
const cha = Math.cos(ha)
|
|
414
|
+
const d = {x: endVtx.x - startVtx.x, y: endVtx.y - startVtx.y}
|
|
415
|
+
const dSq = d.x * d.x + d.y * d.y
|
|
416
|
+
if (dSq < Number.MIN_VALUE * 2) {
|
|
417
|
+
/* No vertex is pushed since end vertex is duplicate of start vertex. */
|
|
418
|
+
return
|
|
419
|
+
}
|
|
420
|
+
const D = Math.sqrt(dSq)
|
|
421
|
+
let R = D / 2 / sha
|
|
422
|
+
d.x /= D
|
|
423
|
+
d.y /= D
|
|
424
|
+
const center = {
|
|
425
|
+
x: (d.x * sha - d.y * cha) * R + startVtx.x,
|
|
426
|
+
y: (d.x * cha + d.y * sha) * R + startVtx.y
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
let numSegments = Math.floor(aAbs / this.options.arcTessellationAngle)
|
|
430
|
+
if (numSegments < this.options.minArcTessellationSubdivisions) {
|
|
431
|
+
numSegments = this.options.minArcTessellationSubdivisions
|
|
432
|
+
}
|
|
433
|
+
if (numSegments > 1) {
|
|
434
|
+
const startAngle = Math.atan2(startVtx.y - center.y, startVtx.x - center.x)
|
|
435
|
+
const step = a / numSegments
|
|
436
|
+
if (a < 0) {
|
|
437
|
+
R = -R
|
|
438
|
+
}
|
|
439
|
+
for (let i = 1; i < numSegments; i++) {
|
|
440
|
+
const a = startAngle + i * step
|
|
441
|
+
const v = new Vector2(
|
|
442
|
+
center.x + R * Math.cos(a),
|
|
443
|
+
center.y + R * Math.sin(a)
|
|
444
|
+
)
|
|
445
|
+
vertices.push(v)
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
vertices.push(new Vector2(endVtx.x, endVtx.y))
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
/** Generate vertices for arc segment.
|
|
452
|
+
*
|
|
453
|
+
* @param vertices Generated vertices pushed here.
|
|
454
|
+
* @param {{x, y}} center Center vector.
|
|
455
|
+
* @param {number} radius
|
|
456
|
+
* @param {?number} startAngle Start angle in radians. Zero if not specified. Arc is drawn in
|
|
457
|
+
* CCW direction from start angle towards end angle.
|
|
458
|
+
* @param {?number} endAngle Optional end angle in radians. Full circle is drawn if not
|
|
459
|
+
* specified.
|
|
460
|
+
* @param {?number} tessellationAngle Arc tessellation angle in radians, default value is taken
|
|
461
|
+
* from scene options.
|
|
462
|
+
* @param {?number} yRadius Specify to get ellipse arc. `radius` parameter used as X radius.
|
|
463
|
+
* @param {?Matrix3} transform Optional transform matrix for the arc. Applied as last operation.
|
|
464
|
+
* @param {?number} rotation Optional rotation angle for generated arc. Mostly for ellipses.
|
|
465
|
+
* @param {?boolean} cwAngleDir Angles counted in clockwise direction from X positive direction.
|
|
466
|
+
* @return {Vector2[]} List of generated vertices.
|
|
467
|
+
*/
|
|
468
|
+
_GenerateArcVertices({vertices, center, radius, startAngle = null, endAngle = null,
|
|
469
|
+
tessellationAngle = null, yRadius = null, transform = null,
|
|
470
|
+
rotation = null, ccwAngleDir = true}) {
|
|
471
|
+
if (!center || !radius) {
|
|
472
|
+
return
|
|
473
|
+
}
|
|
474
|
+
if (!tessellationAngle) {
|
|
475
|
+
tessellationAngle = this.options.arcTessellationAngle
|
|
476
|
+
}
|
|
477
|
+
if (yRadius === null) {
|
|
478
|
+
yRadius = radius
|
|
479
|
+
}
|
|
480
|
+
/* Normalize angles - make them starting from +X in CCW direction. End angle should be
|
|
481
|
+
* greater than start angle.
|
|
482
|
+
*/
|
|
483
|
+
if (startAngle === undefined || startAngle === null) {
|
|
484
|
+
startAngle = 0
|
|
485
|
+
} else {
|
|
486
|
+
startAngle += this.angBase
|
|
487
|
+
}
|
|
488
|
+
let isClosed = false
|
|
489
|
+
if (endAngle === undefined || endAngle === null) {
|
|
490
|
+
endAngle = startAngle + 2 * Math.PI
|
|
491
|
+
isClosed = true
|
|
492
|
+
} else {
|
|
493
|
+
endAngle += this.angBase
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
//XXX this.angDir - not clear, seem in practice it does not alter arcs rendering.
|
|
497
|
+
if (!ccwAngleDir) {
|
|
498
|
+
const tmp = startAngle
|
|
499
|
+
startAngle = -endAngle
|
|
500
|
+
endAngle = -tmp
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
while (endAngle <= startAngle) {
|
|
504
|
+
endAngle += Math.PI * 2
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
const arcAngle = endAngle - startAngle
|
|
508
|
+
|
|
509
|
+
let numSegments = Math.floor(arcAngle / tessellationAngle)
|
|
510
|
+
if (numSegments === 0) {
|
|
511
|
+
numSegments = 1
|
|
512
|
+
}
|
|
513
|
+
const step = arcAngle / numSegments
|
|
514
|
+
|
|
515
|
+
let rotationTransform = null
|
|
516
|
+
if (rotation) {
|
|
517
|
+
rotationTransform = new Matrix3().makeRotation(rotation)
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
for (let i = 0; i <= numSegments; i++) {
|
|
521
|
+
if (i === numSegments && isClosed) {
|
|
522
|
+
break
|
|
523
|
+
}
|
|
524
|
+
let a
|
|
525
|
+
if (ccwAngleDir) {
|
|
526
|
+
a = startAngle + i * step
|
|
527
|
+
} else {
|
|
528
|
+
a = startAngle + (numSegments - i) * step
|
|
529
|
+
}
|
|
530
|
+
const v = new Vector2(radius * Math.cos(a), yRadius * Math.sin(a))
|
|
531
|
+
|
|
532
|
+
if (rotationTransform) {
|
|
533
|
+
v.applyMatrix3(rotationTransform)
|
|
534
|
+
}
|
|
535
|
+
v.add(center)
|
|
536
|
+
if (transform) {
|
|
537
|
+
v.applyMatrix3(transform)
|
|
538
|
+
}
|
|
539
|
+
vertices.push(v)
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
*_DecomposeArc(entity, blockCtx) {
|
|
544
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
545
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
546
|
+
const lineType = this._GetLineType(entity, null, blockCtx)
|
|
547
|
+
const vertices = []
|
|
548
|
+
this._GenerateArcVertices({vertices, center: entity.center, radius: entity.radius,
|
|
549
|
+
startAngle: entity.startAngle, endAngle: entity.endAngle,
|
|
550
|
+
transform: this._GetEntityExtrusionTransform(entity)})
|
|
551
|
+
yield new Entity({
|
|
552
|
+
type: Entity.Type.POLYLINE,
|
|
553
|
+
vertices, layer, color, lineType,
|
|
554
|
+
shape: entity.endAngle === undefined
|
|
555
|
+
})
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
*_DecomposeCircle(entity, blockCtx) {
|
|
559
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
560
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
561
|
+
const lineType = this._GetLineType(entity, null, blockCtx)
|
|
562
|
+
const vertices = []
|
|
563
|
+
this._GenerateArcVertices({vertices, center: entity.center, radius: entity.radius,
|
|
564
|
+
transform: this._GetEntityExtrusionTransform(entity)})
|
|
565
|
+
yield new Entity({
|
|
566
|
+
type: Entity.Type.POLYLINE,
|
|
567
|
+
vertices, layer, color, lineType,
|
|
568
|
+
shape: true
|
|
569
|
+
})
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
*_DecomposeEllipse(entity, blockCtx) {
|
|
573
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
574
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
575
|
+
const lineType = this._GetLineType(entity, null, blockCtx)
|
|
576
|
+
const vertices = []
|
|
577
|
+
const xR = Math.sqrt(entity.majorAxisEndPoint.x * entity.majorAxisEndPoint.x +
|
|
578
|
+
entity.majorAxisEndPoint.y * entity.majorAxisEndPoint.y)
|
|
579
|
+
const yR = xR * entity.axisRatio
|
|
580
|
+
const rotation = Math.atan2(entity.majorAxisEndPoint.y, entity.majorAxisEndPoint.x)
|
|
581
|
+
|
|
582
|
+
const startAngle = entity.startAngle ?? 0
|
|
583
|
+
let endAngle = entity.endAngle ?? startAngle + 2 * Math.PI
|
|
584
|
+
while (endAngle <= startAngle) {
|
|
585
|
+
endAngle += Math.PI * 2
|
|
586
|
+
}
|
|
587
|
+
const isClosed = (entity.endAngle ?? null) === null ||
|
|
588
|
+
Math.abs(endAngle - startAngle - 2 * Math.PI) < 1e-6
|
|
589
|
+
|
|
590
|
+
this._GenerateArcVertices({vertices, center: entity.center, radius: xR,
|
|
591
|
+
startAngle: entity.startAngle,
|
|
592
|
+
endAngle: isClosed ? null : entity.endAngle,
|
|
593
|
+
yRadius: yR,
|
|
594
|
+
rotation,
|
|
595
|
+
/* Assuming mirror transform if present, for ellipse it just
|
|
596
|
+
* reverses angle direction.
|
|
597
|
+
*/
|
|
598
|
+
ccwAngleDir: !this._GetEntityExtrusionTransform(entity)})
|
|
599
|
+
|
|
600
|
+
yield new Entity({
|
|
601
|
+
type: Entity.Type.POLYLINE,
|
|
602
|
+
vertices, layer, color, lineType,
|
|
603
|
+
shape: isClosed
|
|
604
|
+
})
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
*_DecomposePoint(entity, blockCtx) {
|
|
608
|
+
if (this.pdMode === PdMode.NONE) {
|
|
609
|
+
/* Points not displayed. */
|
|
610
|
+
return
|
|
611
|
+
}
|
|
612
|
+
if (this.pdMode !== PdMode.DOT && this.pdSize <= 0) {
|
|
613
|
+
/* Currently not supported. */
|
|
614
|
+
return
|
|
615
|
+
}
|
|
616
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
617
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
618
|
+
const markType = this.pdMode & PdMode.MARK_MASK
|
|
619
|
+
const isShaped = (this.pdMode & PdMode.SHAPE_MASK) !== 0
|
|
620
|
+
|
|
621
|
+
if (isShaped) {
|
|
622
|
+
/* Shaped mark should be instanced. */
|
|
623
|
+
const key = new BatchingKey(layer, POINT_SHAPE_BLOCK_NAME,
|
|
624
|
+
BatchingKey.GeometryType.POINT_INSTANCE, color, 0)
|
|
625
|
+
const batch = this._GetBatch(key)
|
|
626
|
+
batch.PushVertex(this._TransformVertex(entity.position))
|
|
627
|
+
this._CreatePointShapeBlock()
|
|
628
|
+
return
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
if (markType === PdMode.DOT) {
|
|
632
|
+
yield new Entity({
|
|
633
|
+
type: Entity.Type.POINTS,
|
|
634
|
+
vertices: [entity.position],
|
|
635
|
+
layer, color,
|
|
636
|
+
lineType: null
|
|
637
|
+
})
|
|
638
|
+
return
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
const vertices = []
|
|
642
|
+
this._CreatePointMarker(vertices, markType, entity.position)
|
|
643
|
+
yield new Entity({
|
|
644
|
+
type: Entity.Type.LINE_SEGMENTS,
|
|
645
|
+
vertices, layer, color,
|
|
646
|
+
lineType: null
|
|
647
|
+
})
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
*_DecomposeAttribute(entity, blockCtx) {
|
|
651
|
+
if (!this.textRenderer.canRender) {
|
|
652
|
+
return;
|
|
653
|
+
}
|
|
654
|
+
const layer = this._GetEntityLayer(entity, blockCtx);
|
|
655
|
+
const color = this._GetEntityColor(entity, blockCtx);
|
|
656
|
+
|
|
657
|
+
const font = this.fontStyles.get(entity.textStyle);
|
|
658
|
+
|
|
659
|
+
yield* this.textRenderer.Render({
|
|
660
|
+
text: ParseSpecialChars(entity.text),
|
|
661
|
+
fontSize: entity.textHeight * entity.scale,
|
|
662
|
+
startPos: entity.startPoint,
|
|
663
|
+
endPos: entity.endPoint,
|
|
664
|
+
rotation: entity.rotation,
|
|
665
|
+
hAlign: entity.horizontalJustification,
|
|
666
|
+
vAlign: entity.verticalJustification,
|
|
667
|
+
widthFactor: font?.widthFactor,
|
|
668
|
+
color,
|
|
669
|
+
layer,
|
|
670
|
+
});
|
|
671
|
+
}
|
|
672
|
+
|
|
673
|
+
|
|
674
|
+
/** Create line segments for point marker.
|
|
675
|
+
* @param vertices
|
|
676
|
+
* @param markType
|
|
677
|
+
* @param position {?{x,y}} point center position, default is zero.
|
|
678
|
+
*/
|
|
679
|
+
_CreatePointMarker(vertices, markType, position = null) {
|
|
680
|
+
const _this = this
|
|
681
|
+
function PushVertex(offsetX, offsetY) {
|
|
682
|
+
vertices.push({
|
|
683
|
+
x: (position?.x ?? 0) + offsetX * _this.pdSize * 0.5,
|
|
684
|
+
y: (position?.y ?? 0) + offsetY * _this.pdSize * 0.5
|
|
685
|
+
})
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
switch(markType) {
|
|
689
|
+
case PdMode.PLUS:
|
|
690
|
+
PushVertex(0, 1.5)
|
|
691
|
+
PushVertex(0, -1.5)
|
|
692
|
+
PushVertex(-1.5, 0)
|
|
693
|
+
PushVertex(1.5, 0)
|
|
694
|
+
break
|
|
695
|
+
case PdMode.CROSS:
|
|
696
|
+
PushVertex(-1, 1)
|
|
697
|
+
PushVertex(1, -1)
|
|
698
|
+
PushVertex(1, 1)
|
|
699
|
+
PushVertex(-1, -1)
|
|
700
|
+
break
|
|
701
|
+
case PdMode.TICK:
|
|
702
|
+
PushVertex(0, 1)
|
|
703
|
+
PushVertex(0, 0)
|
|
704
|
+
break
|
|
705
|
+
default:
|
|
706
|
+
console.warn("Unsupported point display type: " + markType)
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
/** Create point shape block if not yet done. */
|
|
711
|
+
_CreatePointShapeBlock() {
|
|
712
|
+
if (this.pointShapeBlock) {
|
|
713
|
+
return
|
|
714
|
+
}
|
|
715
|
+
/* This mimics DXF block entity. */
|
|
716
|
+
this.pointShapeBlock = new Block({
|
|
717
|
+
name: POINT_SHAPE_BLOCK_NAME,
|
|
718
|
+
position: { x: 0, y: 0}
|
|
719
|
+
})
|
|
720
|
+
/* Fix block origin at zero. */
|
|
721
|
+
this.pointShapeBlock.offset = new Vector2(0, 0)
|
|
722
|
+
const blockCtx = this.pointShapeBlock.DefinitionContext()
|
|
723
|
+
|
|
724
|
+
const markType = this.pdMode & PdMode.MARK_MASK
|
|
725
|
+
if (markType !== PdMode.DOT && markType !== PdMode.NONE) {
|
|
726
|
+
const vertices = []
|
|
727
|
+
this._CreatePointMarker(vertices, markType)
|
|
728
|
+
const entity = new Entity({
|
|
729
|
+
type: Entity.Type.LINE_SEGMENTS,
|
|
730
|
+
vertices,
|
|
731
|
+
color: ColorCode.BY_BLOCK
|
|
732
|
+
})
|
|
733
|
+
this._ProcessEntity(entity, blockCtx)
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
if (this.pdMode & PdMode.SQUARE) {
|
|
737
|
+
const r = this.pdSize * 0.5
|
|
738
|
+
const vertices = [
|
|
739
|
+
{x: -r, y: r},
|
|
740
|
+
{x: r, y: r},
|
|
741
|
+
{x: r, y: -r},
|
|
742
|
+
{x: -r, y: -r}
|
|
743
|
+
]
|
|
744
|
+
const entity = new Entity({
|
|
745
|
+
type: Entity.Type.POLYLINE, vertices,
|
|
746
|
+
color: ColorCode.BY_BLOCK,
|
|
747
|
+
shape: true
|
|
748
|
+
})
|
|
749
|
+
this._ProcessEntity(entity, blockCtx)
|
|
750
|
+
}
|
|
751
|
+
if (this.pdMode & PdMode.CIRCLE) {
|
|
752
|
+
const vertices = []
|
|
753
|
+
this._GenerateArcVertices({vertices, center: {x: 0, y: 0},
|
|
754
|
+
radius: this.pdSize * 0.5,
|
|
755
|
+
tessellationAngle: POINT_CIRCLE_TESSELLATION_ANGLE})
|
|
756
|
+
const entity = new Entity({
|
|
757
|
+
type: Entity.Type.POLYLINE, vertices,
|
|
758
|
+
color: ColorCode.BY_BLOCK,
|
|
759
|
+
shape: true
|
|
760
|
+
})
|
|
761
|
+
this._ProcessEntity(entity, blockCtx)
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
*_Decompose3DFace(entity, blockCtx) {
|
|
766
|
+
yield *this._DecomposeFace(entity, entity.vertices, blockCtx, this.options.wireframeMesh)
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
*_DecomposeSolid(entity, blockCtx) {
|
|
770
|
+
yield *this._DecomposeFace(entity, entity.points, blockCtx, false,
|
|
771
|
+
this._GetEntityExtrusionTransform(entity))
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
*_DecomposeFace(entity, vertices, blockCtx, wireframe, transform = null) {
|
|
775
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
776
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
777
|
+
|
|
778
|
+
function IsValidTriangle(v1, v2, v3) {
|
|
779
|
+
const e1 = new Vector2().subVectors(v2, v1)
|
|
780
|
+
const e2 = new Vector2().subVectors(v3, v1)
|
|
781
|
+
const area = Math.abs(e1.cross(e2))
|
|
782
|
+
return area > Number.EPSILON
|
|
783
|
+
}
|
|
784
|
+
|
|
785
|
+
const v0 = new Vector2(vertices[0].x, vertices[0].y)
|
|
786
|
+
const v1 = new Vector2(vertices[1].x, vertices[1].y)
|
|
787
|
+
const v2 = new Vector2(vertices[2].x, vertices[2].y)
|
|
788
|
+
let v3 = null
|
|
789
|
+
|
|
790
|
+
let hasFirstTriangle = IsValidTriangle(v0, v1, v2)
|
|
791
|
+
let hasSecondTriangle = false
|
|
792
|
+
|
|
793
|
+
if (vertices.length > 3) {
|
|
794
|
+
/* Fourth vertex may be the same as one of the previous vertices, so additional triangle
|
|
795
|
+
* for degeneration.
|
|
796
|
+
*/
|
|
797
|
+
|
|
798
|
+
v3 = new Vector2(vertices[3].x, vertices[3].y)
|
|
799
|
+
hasSecondTriangle = IsValidTriangle(v1, v3, v2)
|
|
800
|
+
if (transform) {
|
|
801
|
+
v3.applyMatrix3(transform)
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
if (transform) {
|
|
805
|
+
v0.applyMatrix3(transform)
|
|
806
|
+
v1.applyMatrix3(transform)
|
|
807
|
+
v2.applyMatrix3(transform)
|
|
808
|
+
}
|
|
809
|
+
|
|
810
|
+
if (!hasFirstTriangle && !hasSecondTriangle) {
|
|
811
|
+
return
|
|
812
|
+
}
|
|
813
|
+
|
|
814
|
+
if (wireframe) {
|
|
815
|
+
const _vertices = []
|
|
816
|
+
if (hasFirstTriangle && !hasSecondTriangle) {
|
|
817
|
+
_vertices.push(v0, v1, v2)
|
|
818
|
+
} else if (!hasFirstTriangle && hasSecondTriangle) {
|
|
819
|
+
_vertices.push(v1, v3, v2)
|
|
820
|
+
} else {
|
|
821
|
+
_vertices.push(v0, v1, v3, v2)
|
|
822
|
+
}
|
|
823
|
+
yield new Entity({
|
|
824
|
+
type: Entity.Type.POLYLINE,
|
|
825
|
+
vertices: _vertices, layer, color,
|
|
826
|
+
shape: true
|
|
827
|
+
})
|
|
828
|
+
|
|
829
|
+
} else {
|
|
830
|
+
const _vertices = []
|
|
831
|
+
const indices = []
|
|
832
|
+
if (hasFirstTriangle) {
|
|
833
|
+
_vertices.push(v0, v1, v2)
|
|
834
|
+
indices.push(0, 1, 2)
|
|
835
|
+
}
|
|
836
|
+
if (hasSecondTriangle) {
|
|
837
|
+
if (!hasFirstTriangle) {
|
|
838
|
+
_vertices.push(v1, v2)
|
|
839
|
+
indices.push(0, 1, 2)
|
|
840
|
+
} else {
|
|
841
|
+
indices.push(1, 2, 3)
|
|
842
|
+
}
|
|
843
|
+
_vertices.push(v3)
|
|
844
|
+
}
|
|
845
|
+
yield new Entity({
|
|
846
|
+
type: Entity.Type.TRIANGLES,
|
|
847
|
+
vertices: _vertices, indices, layer, color
|
|
848
|
+
})
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
*_DecomposeText(entity, blockCtx) {
|
|
853
|
+
if (!this.textRenderer.canRender) {
|
|
854
|
+
return
|
|
855
|
+
}
|
|
856
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
857
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
858
|
+
yield* this.textRenderer.Render({
|
|
859
|
+
text: ParseSpecialChars(entity.text),
|
|
860
|
+
fontSize: entity.textHeight,
|
|
861
|
+
startPos: entity.startPoint,
|
|
862
|
+
endPos: entity.endPoint,
|
|
863
|
+
rotation: entity.rotation,
|
|
864
|
+
hAlign: entity.halign,
|
|
865
|
+
vAlign: entity.valign,
|
|
866
|
+
widthFactor: entity.xScale,
|
|
867
|
+
color, layer
|
|
868
|
+
})
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
*_DecomposeMText(entity, blockCtx) {
|
|
872
|
+
if (!this.textRenderer.canRender) {
|
|
873
|
+
return
|
|
874
|
+
}
|
|
875
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
876
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
877
|
+
const parser = new MTextFormatParser()
|
|
878
|
+
parser.Parse(ParseSpecialChars(entity.text))
|
|
879
|
+
yield* this.textRenderer.RenderMText({
|
|
880
|
+
formattedText: parser.GetContent(),
|
|
881
|
+
fontSize: entity.height,
|
|
882
|
+
position: entity.position,
|
|
883
|
+
rotation: entity.rotation,
|
|
884
|
+
direction: entity.direction,
|
|
885
|
+
attachment: entity.attachmentPoint,
|
|
886
|
+
lineSpacing: entity.lineSpacing,
|
|
887
|
+
width: entity.width,
|
|
888
|
+
color, layer
|
|
889
|
+
})
|
|
890
|
+
}
|
|
891
|
+
|
|
892
|
+
/**
|
|
893
|
+
* @return {?LinearDimension} Dimension handler instance, null if not possible to create from
|
|
894
|
+
* the provided entity.
|
|
895
|
+
*/
|
|
896
|
+
_CreateLinearDimension(entity) {
|
|
897
|
+
const type = (entity.dimensionType || 0) & 0xf
|
|
898
|
+
/* For now support linear dimensions only. */
|
|
899
|
+
if ((type != 0 && type != 1) || !entity.linearOrAngularPoint1 ||
|
|
900
|
+
!entity.linearOrAngularPoint2 || !entity.anchorPoint) {
|
|
901
|
+
|
|
902
|
+
return null
|
|
903
|
+
}
|
|
904
|
+
|
|
905
|
+
let style = null
|
|
906
|
+
if (entity.hasOwnProperty("styleName")) {
|
|
907
|
+
style = this.dimStyles.get(entity.styleName)
|
|
908
|
+
}
|
|
909
|
+
|
|
910
|
+
const dim = new LinearDimension({
|
|
911
|
+
p1: new Vector2().copy(entity.linearOrAngularPoint1),
|
|
912
|
+
p2: new Vector2().copy(entity.linearOrAngularPoint2),
|
|
913
|
+
anchor: new Vector2().copy(entity.anchorPoint),
|
|
914
|
+
isAligned: type == 1,
|
|
915
|
+
angle: entity.angle,
|
|
916
|
+
text: entity.text,
|
|
917
|
+
textAnchor: entity.middleOfText ? new Vector2().copy(entity.middleOfText) : null,
|
|
918
|
+
textRotation: entity.textRotation
|
|
919
|
+
|
|
920
|
+
/* styleResolver */
|
|
921
|
+
}, valueName => {
|
|
922
|
+
return this._GetDimStyleValue(valueName, entity, style)
|
|
923
|
+
|
|
924
|
+
/* textWidthCalculator */
|
|
925
|
+
}, (text, fontSize) => {
|
|
926
|
+
return this.textRenderer.GetLineWidth(text, fontSize)
|
|
927
|
+
})
|
|
928
|
+
|
|
929
|
+
if (!dim.IsValid) {
|
|
930
|
+
console.warn("Invalid dimension geometry detected for " + entity.handle)
|
|
931
|
+
return null
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
return dim
|
|
935
|
+
}
|
|
936
|
+
|
|
937
|
+
*_DecomposeDimension(entity, blockCtx) {
|
|
938
|
+
if ((entity.block ?? null) !== null && this.blocks.has(entity.block)) {
|
|
939
|
+
/* Dimension may have pre-rendered block attached. Then just render this block instead
|
|
940
|
+
* of synthesizing dimension geometry from parameters.
|
|
941
|
+
*
|
|
942
|
+
* Create dummy INSERT entity.
|
|
943
|
+
*/
|
|
944
|
+
const insert = {
|
|
945
|
+
name: entity.block,
|
|
946
|
+
position: {x: 0, y: 0},
|
|
947
|
+
layer: entity.layer,
|
|
948
|
+
color: entity.color,
|
|
949
|
+
colorIndex: entity.colorIndex
|
|
950
|
+
}
|
|
951
|
+
this._ProcessInsert(insert, blockCtx)
|
|
952
|
+
return
|
|
953
|
+
}
|
|
954
|
+
|
|
955
|
+
/* https://ezdxf.readthedocs.io/en/stable/tutorials/linear_dimension.html
|
|
956
|
+
* https://ezdxf.readthedocs.io/en/stable/tables/dimstyle_table_entry.html
|
|
957
|
+
*/
|
|
958
|
+
|
|
959
|
+
const dim = this._CreateLinearDimension(entity)
|
|
960
|
+
if (!dim) {
|
|
961
|
+
return
|
|
962
|
+
}
|
|
963
|
+
|
|
964
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
965
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
966
|
+
const transform = this._GetEntityExtrusionTransform(entity)
|
|
967
|
+
|
|
968
|
+
const layout = dim.GenerateLayout()
|
|
969
|
+
|
|
970
|
+
for (const line of layout.lines) {
|
|
971
|
+
const vertices = []
|
|
972
|
+
|
|
973
|
+
if (transform) {
|
|
974
|
+
line.start.applyMatrix3(transform)
|
|
975
|
+
line.end.applyMatrix3(transform)
|
|
976
|
+
}
|
|
977
|
+
vertices.push(line.start, line.end)
|
|
978
|
+
|
|
979
|
+
yield new Entity({
|
|
980
|
+
type: Entity.Type.LINE_SEGMENTS,
|
|
981
|
+
vertices,
|
|
982
|
+
layer,
|
|
983
|
+
color: line.color ?? color
|
|
984
|
+
})
|
|
985
|
+
}
|
|
986
|
+
|
|
987
|
+
for (const triangle of layout.triangles) {
|
|
988
|
+
if (transform) {
|
|
989
|
+
for (const v of triangle.vertices) {
|
|
990
|
+
v.applyMatrix3(transform)
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
yield new Entity({
|
|
995
|
+
type: Entity.Type.TRIANGLES,
|
|
996
|
+
vertices: triangle.vertices,
|
|
997
|
+
indices: triangle.indices,
|
|
998
|
+
layer,
|
|
999
|
+
color: triangle.color ?? color
|
|
1000
|
+
})
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
if (this.textRenderer.canRender) {
|
|
1004
|
+
for (const text of layout.texts) {
|
|
1005
|
+
if (transform) {
|
|
1006
|
+
//XXX does not affect text rotation and mirroring
|
|
1007
|
+
text.position.applyMatrix3(transform)
|
|
1008
|
+
}
|
|
1009
|
+
yield* this.textRenderer.Render({
|
|
1010
|
+
text: text.text,
|
|
1011
|
+
fontSize: text.size,
|
|
1012
|
+
startPos: text.position,
|
|
1013
|
+
rotation: text.angle,
|
|
1014
|
+
hAlign: HAlign.CENTER,
|
|
1015
|
+
vAlign: VAlign.MIDDLE,
|
|
1016
|
+
color: text.color ?? color,
|
|
1017
|
+
layer
|
|
1018
|
+
})
|
|
1019
|
+
}
|
|
1020
|
+
}
|
|
1021
|
+
}
|
|
1022
|
+
|
|
1023
|
+
*_DecomposeHatch(entity, blockCtx) {
|
|
1024
|
+
if (entity.isSolid) {
|
|
1025
|
+
//XXX solid hatch not yet supported
|
|
1026
|
+
return
|
|
1027
|
+
}
|
|
1028
|
+
|
|
1029
|
+
const style = entity.hatchStyle ?? 0
|
|
1030
|
+
|
|
1031
|
+
if (style != HatchStyle.ODD_PARITY && style != HatchStyle.THROUGH_ENTIRE_AREA) {
|
|
1032
|
+
//XXX other styles not yet supported
|
|
1033
|
+
return
|
|
1034
|
+
}
|
|
1035
|
+
|
|
1036
|
+
const boundaryLoops = this._GetHatchBoundaryLoops(entity)
|
|
1037
|
+
if (boundaryLoops.length == 0) {
|
|
1038
|
+
console.warn("HATCH entity with empty boundary loops array " +
|
|
1039
|
+
"(perhaps some loop types are not implemented yet)")
|
|
1040
|
+
return
|
|
1041
|
+
}
|
|
1042
|
+
|
|
1043
|
+
const calc = new HatchCalculator(boundaryLoops, style)
|
|
1044
|
+
|
|
1045
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
1046
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
1047
|
+
const transform = this._GetEntityExtrusionTransform(entity)
|
|
1048
|
+
|
|
1049
|
+
let pattern = null
|
|
1050
|
+
if (entity.patternName) {
|
|
1051
|
+
pattern = LookupPattern(entity.patternName, this.isMetric)
|
|
1052
|
+
if (!pattern) {
|
|
1053
|
+
console.log(`Hatch pattern with name ${entity.patternName} not found ` +
|
|
1054
|
+
`(metric: ${this.isMetric})`)
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
if (pattern == null && entity.definitionLines) {
|
|
1058
|
+
pattern = new Pattern(entity.definitionLines)
|
|
1059
|
+
}
|
|
1060
|
+
if (pattern == null) {
|
|
1061
|
+
pattern = LookupPattern("ANSI31")
|
|
1062
|
+
}
|
|
1063
|
+
if (!pattern) {
|
|
1064
|
+
return
|
|
1065
|
+
}
|
|
1066
|
+
|
|
1067
|
+
const seedPoints = entity.seedPoints ? entity.seedPoints : [{x: 0, y: 0}]
|
|
1068
|
+
|
|
1069
|
+
for (const seedPoint of seedPoints) {
|
|
1070
|
+
|
|
1071
|
+
const patTransform = calc.GetPatternTransform({
|
|
1072
|
+
seedPoint,
|
|
1073
|
+
angle: entity.patternAngle,
|
|
1074
|
+
scale: entity.patternScale
|
|
1075
|
+
})
|
|
1076
|
+
|
|
1077
|
+
for (const line of pattern.lines) {
|
|
1078
|
+
|
|
1079
|
+
let offsetX = line.offset.x
|
|
1080
|
+
let offsetY = line.offset.y
|
|
1081
|
+
|
|
1082
|
+
/* Normalize offset so that Y is always non-negative. Inverting offset vector
|
|
1083
|
+
* direction does not change lines positions.
|
|
1084
|
+
*/
|
|
1085
|
+
if (offsetY < 0) {
|
|
1086
|
+
offsetY = -offsetY
|
|
1087
|
+
offsetX = -offsetX
|
|
1088
|
+
}
|
|
1089
|
+
|
|
1090
|
+
const lineTransform = calc.GetLineTransform({
|
|
1091
|
+
patTransform,
|
|
1092
|
+
basePoint: line.base,
|
|
1093
|
+
angle: line.angle ?? 0
|
|
1094
|
+
})
|
|
1095
|
+
|
|
1096
|
+
const bbox = calc.GetBoundingBox(lineTransform)
|
|
1097
|
+
const margin = (bbox.max.x - bbox.min.x) * 0.05
|
|
1098
|
+
|
|
1099
|
+
/* First determine range of line indices. Line with index 0 goes through base point
|
|
1100
|
+
* (which is [0; 0] in line coordinates system). Line with index `n`` starts in `n`
|
|
1101
|
+
* offset vectors added to the base point.
|
|
1102
|
+
*/
|
|
1103
|
+
let minLineIdx, maxLineIdx
|
|
1104
|
+
if (offsetY == 0) {
|
|
1105
|
+
/* Degenerated to single line. */
|
|
1106
|
+
minLineIdx = 0
|
|
1107
|
+
maxLineIdx = 0
|
|
1108
|
+
} else {
|
|
1109
|
+
minLineIdx = Math.ceil(bbox.min.y / offsetY)
|
|
1110
|
+
maxLineIdx = Math.floor(bbox.max.y / offsetY)
|
|
1111
|
+
}
|
|
1112
|
+
|
|
1113
|
+
if (maxLineIdx - minLineIdx > MAX_HATCH_LINES) {
|
|
1114
|
+
console.warn("Too many lines produced by hatching pattern")
|
|
1115
|
+
continue
|
|
1116
|
+
}
|
|
1117
|
+
|
|
1118
|
+
let dashPatLength
|
|
1119
|
+
if (line.dashes && line.dashes.length > 1) {
|
|
1120
|
+
dashPatLength = 0
|
|
1121
|
+
for (const dash of line.dashes) {
|
|
1122
|
+
if (dash < 0) {
|
|
1123
|
+
dashPatLength -= dash
|
|
1124
|
+
} else {
|
|
1125
|
+
dashPatLength += dash
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
} else {
|
|
1129
|
+
dashPatLength = null
|
|
1130
|
+
}
|
|
1131
|
+
|
|
1132
|
+
const ocsTransform = lineTransform.clone().invert()
|
|
1133
|
+
|
|
1134
|
+
for (let lineIdx = minLineIdx; lineIdx <= maxLineIdx; lineIdx++) {
|
|
1135
|
+
const y = lineIdx * offsetY
|
|
1136
|
+
const xBase = lineIdx * offsetX
|
|
1137
|
+
|
|
1138
|
+
const xStart = bbox.min.x - margin
|
|
1139
|
+
const xEnd = bbox.max.x + margin
|
|
1140
|
+
const lineLength = xEnd - xStart
|
|
1141
|
+
const start = new Vector2(xStart, y).applyMatrix3(ocsTransform)
|
|
1142
|
+
const end = new Vector2(xEnd, y).applyMatrix3(ocsTransform)
|
|
1143
|
+
const lineVec = end.clone().sub(start)
|
|
1144
|
+
const clippedSegments = calc.ClipLine([start, end])
|
|
1145
|
+
|
|
1146
|
+
function GetParam(x) {
|
|
1147
|
+
return (x - xStart) / lineLength
|
|
1148
|
+
}
|
|
1149
|
+
|
|
1150
|
+
function RenderSegment(seg) {
|
|
1151
|
+
const p1 = lineVec.clone().multiplyScalar(seg[0]).add(start)
|
|
1152
|
+
const p2 = lineVec.clone().multiplyScalar(seg[1]).add(start)
|
|
1153
|
+
if (transform) {
|
|
1154
|
+
p1.applyMatrix3(transform)
|
|
1155
|
+
p2.applyMatrix3(transform)
|
|
1156
|
+
}
|
|
1157
|
+
if (seg[1] - seg[0] <= Number.EPSILON) {
|
|
1158
|
+
return new Entity({
|
|
1159
|
+
type: Entity.Type.POINTS,
|
|
1160
|
+
vertices: [p1],
|
|
1161
|
+
layer, color
|
|
1162
|
+
})
|
|
1163
|
+
}
|
|
1164
|
+
return new Entity({
|
|
1165
|
+
type: Entity.Type.LINE_SEGMENTS,
|
|
1166
|
+
vertices: [p1, p2],
|
|
1167
|
+
layer, color
|
|
1168
|
+
})
|
|
1169
|
+
}
|
|
1170
|
+
|
|
1171
|
+
/** Clip segment against `clippedSegments`. */
|
|
1172
|
+
function *ClipSegment(segStart, segEnd) {
|
|
1173
|
+
for (const seg of clippedSegments) {
|
|
1174
|
+
if (seg[0] >= segEnd) {
|
|
1175
|
+
return
|
|
1176
|
+
}
|
|
1177
|
+
if (seg[1] <= segStart) {
|
|
1178
|
+
continue
|
|
1179
|
+
}
|
|
1180
|
+
const _start = Math.max(segStart, seg[0])
|
|
1181
|
+
const _end = Math.min(segEnd, seg[1])
|
|
1182
|
+
yield [_start, _end]
|
|
1183
|
+
segStart = _end
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
|
|
1187
|
+
/* Determine range for segment indices. One segment is one full sequence of
|
|
1188
|
+
* dashes. In case there is no dashes (solid line), just use hatch bounds.
|
|
1189
|
+
*/
|
|
1190
|
+
if (dashPatLength !== null) {
|
|
1191
|
+
let minSegIdx = Math.floor((xStart - xBase) / dashPatLength)
|
|
1192
|
+
let maxSegIdx = Math.floor((xEnd - xBase) / dashPatLength)
|
|
1193
|
+
if (maxSegIdx - minSegIdx >= MAX_HATCH_SEGMENTS) {
|
|
1194
|
+
console.warn("Too many segments produced by hatching pattern line")
|
|
1195
|
+
continue
|
|
1196
|
+
}
|
|
1197
|
+
|
|
1198
|
+
for (let segIdx = minSegIdx; segIdx <= maxSegIdx; segIdx++) {
|
|
1199
|
+
let segStartParam = GetParam(xBase + segIdx * dashPatLength)
|
|
1200
|
+
|
|
1201
|
+
for (let dashLength of line.dashes) {
|
|
1202
|
+
const isSpace = dashLength < 0
|
|
1203
|
+
if (isSpace) {
|
|
1204
|
+
dashLength = - dashLength
|
|
1205
|
+
}
|
|
1206
|
+
const dashLengthParam = dashLength / lineLength
|
|
1207
|
+
if (!isSpace) {
|
|
1208
|
+
for (const seg of ClipSegment(segStartParam,
|
|
1209
|
+
segStartParam + dashLengthParam)) {
|
|
1210
|
+
yield RenderSegment(seg)
|
|
1211
|
+
}
|
|
1212
|
+
}
|
|
1213
|
+
segStartParam += dashLengthParam
|
|
1214
|
+
}
|
|
1215
|
+
}
|
|
1216
|
+
|
|
1217
|
+
} else {
|
|
1218
|
+
/* Single solid line. */
|
|
1219
|
+
for (const seg of clippedSegments) {
|
|
1220
|
+
yield RenderSegment(seg)
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
}
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
}
|
|
1227
|
+
|
|
1228
|
+
/** @return {Vector2[][]} Each loop is a list of points in OCS coordinates. */
|
|
1229
|
+
_GetHatchBoundaryLoops(entity) {
|
|
1230
|
+
if (!entity.boundaryLoops) {
|
|
1231
|
+
return []
|
|
1232
|
+
}
|
|
1233
|
+
|
|
1234
|
+
const result = []
|
|
1235
|
+
|
|
1236
|
+
const AddPoints = (vertices, points) => {
|
|
1237
|
+
const n = points.length
|
|
1238
|
+
if (n == 0) {
|
|
1239
|
+
return
|
|
1240
|
+
}
|
|
1241
|
+
if (vertices.length == 0) {
|
|
1242
|
+
vertices.push(points[0])
|
|
1243
|
+
} else {
|
|
1244
|
+
const lastPt = vertices[vertices.length - 1]
|
|
1245
|
+
if (lastPt.x != points[0].x || lastPt.y != points[0].y) {
|
|
1246
|
+
vertices.push(points[0])
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
for (let i = 1; i < n; i++) {
|
|
1250
|
+
vertices.push(points[i])
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
|
|
1254
|
+
for (const loop of entity.boundaryLoops) {
|
|
1255
|
+
const vertices = []
|
|
1256
|
+
|
|
1257
|
+
//XXX handle external references
|
|
1258
|
+
|
|
1259
|
+
if (loop.type & 2) {
|
|
1260
|
+
/* Polyline. */
|
|
1261
|
+
for (let vtxIdx = 0; vtxIdx < loop.polyline.vertices.length; vtxIdx++) {
|
|
1262
|
+
const vtx = loop.polyline.vertices[vtxIdx]
|
|
1263
|
+
if ((vtx.bulge ?? 0) == 0) {
|
|
1264
|
+
vertices.push(new Vector2(vtx.x, vtx.y))
|
|
1265
|
+
} else {
|
|
1266
|
+
const prevVtx = loop.polyline.vertices[vtxIdx == 0 ?
|
|
1267
|
+
loop.polyline.vertices.length - 1 : vtxIdx - 1]
|
|
1268
|
+
if ((prevVtx.bulge ?? 0) == 0) {
|
|
1269
|
+
/* Start vertex is not produced by _GenerateBulgeVertices(). */
|
|
1270
|
+
vertices.push(new Vector2(vtx.x, vtx.y))
|
|
1271
|
+
}
|
|
1272
|
+
const nextVtx = loop.polyline.vertices[
|
|
1273
|
+
vtxIdx == loop.polyline.vertices.length - 1 ? 0 : vtxIdx + 1]
|
|
1274
|
+
this._GenerateBulgeVertices(vertices, vtx, nextVtx, vtx.bulge)
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
|
|
1278
|
+
} else if (loop.edges && loop.edges.length > 0) {
|
|
1279
|
+
for (const edge of loop.edges) {
|
|
1280
|
+
switch (edge.type) {
|
|
1281
|
+
case 1:
|
|
1282
|
+
/* Line segment. */
|
|
1283
|
+
AddPoints(vertices, [new Vector2(edge.start.x, edge.start.y),
|
|
1284
|
+
new Vector2(edge.end.x, edge.end.y)])
|
|
1285
|
+
break
|
|
1286
|
+
case 2: {
|
|
1287
|
+
/* Circular arc. */
|
|
1288
|
+
const arcVertices = []
|
|
1289
|
+
this._GenerateArcVertices({
|
|
1290
|
+
vertices: arcVertices,
|
|
1291
|
+
center: edge.start,
|
|
1292
|
+
radius: edge.radius,
|
|
1293
|
+
startAngle: edge.startAngle,
|
|
1294
|
+
endAngle: edge.endAngle,
|
|
1295
|
+
ccwAngleDir: edge.isCcw
|
|
1296
|
+
})
|
|
1297
|
+
AddPoints(vertices, arcVertices)
|
|
1298
|
+
break
|
|
1299
|
+
}
|
|
1300
|
+
case 3: {
|
|
1301
|
+
/* Elliptic arc. */
|
|
1302
|
+
const center = edge.start
|
|
1303
|
+
const majorAxisEndPoint = edge.end
|
|
1304
|
+
const xR = Math.sqrt(majorAxisEndPoint.x * majorAxisEndPoint.x +
|
|
1305
|
+
majorAxisEndPoint.y * majorAxisEndPoint.y)
|
|
1306
|
+
const axisRatio = edge.radius
|
|
1307
|
+
const yR = xR * axisRatio
|
|
1308
|
+
const rotation = Math.atan2(majorAxisEndPoint.y, majorAxisEndPoint.x)
|
|
1309
|
+
const arcVertices = []
|
|
1310
|
+
this._GenerateArcVertices({
|
|
1311
|
+
vertices: arcVertices,
|
|
1312
|
+
center,
|
|
1313
|
+
radius: xR,
|
|
1314
|
+
startAngle: edge.startAngle,
|
|
1315
|
+
endAngle: edge.endAngle,
|
|
1316
|
+
yRadius: yR,
|
|
1317
|
+
ccwAngleDir: edge.isCcw
|
|
1318
|
+
})
|
|
1319
|
+
if (rotation !== 0) {
|
|
1320
|
+
//XXX should account angDir?
|
|
1321
|
+
const cos = Math.cos(rotation)
|
|
1322
|
+
const sin = Math.sin(rotation)
|
|
1323
|
+
for (const v of arcVertices) {
|
|
1324
|
+
const tx = v.x - center.x
|
|
1325
|
+
const ty = v.y - center.y
|
|
1326
|
+
/* Rotate the vertex around the ellipse center point. */
|
|
1327
|
+
v.x = tx * cos - ty * sin + center.x
|
|
1328
|
+
v.y = tx * sin + ty * cos + center.y
|
|
1329
|
+
}
|
|
1330
|
+
}
|
|
1331
|
+
AddPoints(vertices, arcVertices)
|
|
1332
|
+
break;
|
|
1333
|
+
}
|
|
1334
|
+
case 4:
|
|
1335
|
+
/* Spline. */
|
|
1336
|
+
const controlPoints = edge.controlPoints.map(p => [p.x, p.y])
|
|
1337
|
+
const subdivisions = controlPoints.length * SPLINE_SUBDIVISION
|
|
1338
|
+
const step = 1 / subdivisions
|
|
1339
|
+
for (let i = 0; i <= subdivisions; i++) {
|
|
1340
|
+
const pt = this._InterpolateSpline(i * step, edge.degreeOfSplineCurve,
|
|
1341
|
+
controlPoints,
|
|
1342
|
+
edge.knotValues)
|
|
1343
|
+
vertices.push(new Vector2(pt[0],pt[1]))
|
|
1344
|
+
}
|
|
1345
|
+
break;
|
|
1346
|
+
default:
|
|
1347
|
+
console.warn("Unhandled hatch boundary loop edge type: " + edge.type)
|
|
1348
|
+
}
|
|
1349
|
+
}
|
|
1350
|
+
}
|
|
1351
|
+
|
|
1352
|
+
if (vertices.length > 2) {
|
|
1353
|
+
const first = vertices[0]
|
|
1354
|
+
const last = vertices[vertices.length - 1]
|
|
1355
|
+
if (last.x == first.x && last.y == first.y) {
|
|
1356
|
+
vertices.length = vertices.length - 1
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
if (vertices.length > 2) {
|
|
1360
|
+
result.push(vertices)
|
|
1361
|
+
}
|
|
1362
|
+
}
|
|
1363
|
+
|
|
1364
|
+
return result
|
|
1365
|
+
}
|
|
1366
|
+
|
|
1367
|
+
_GetDimStyleValue(valueName, entity, style) {
|
|
1368
|
+
const entries = entity?.xdata?.ACAD?.DSTYLE?.values
|
|
1369
|
+
if (entries) {
|
|
1370
|
+
let isVarCode = true
|
|
1371
|
+
let found = false
|
|
1372
|
+
for (const e of entries) {
|
|
1373
|
+
if (isVarCode) {
|
|
1374
|
+
if (e.code != 1070) {
|
|
1375
|
+
/* Unexpected group code. */
|
|
1376
|
+
break
|
|
1377
|
+
}
|
|
1378
|
+
if (dimStyleCodes.get(e.value) == valueName) {
|
|
1379
|
+
found = true
|
|
1380
|
+
}
|
|
1381
|
+
} else if (found) {
|
|
1382
|
+
return e.value
|
|
1383
|
+
}
|
|
1384
|
+
isVarCode = !isVarCode
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
if (style && style.hasOwnProperty(valueName)) {
|
|
1388
|
+
return style[valueName]
|
|
1389
|
+
}
|
|
1390
|
+
if (this.vars.has(valueName)) {
|
|
1391
|
+
return this.vars.get(valueName)
|
|
1392
|
+
}
|
|
1393
|
+
if (DEFAULT_VARS.hasOwnProperty(valueName)) {
|
|
1394
|
+
const value = DEFAULT_VARS[valueName]
|
|
1395
|
+
if (value instanceof Function) {
|
|
1396
|
+
return value.call(this)
|
|
1397
|
+
}
|
|
1398
|
+
return value
|
|
1399
|
+
}
|
|
1400
|
+
return null
|
|
1401
|
+
}
|
|
1402
|
+
|
|
1403
|
+
/**
|
|
1404
|
+
* Updates batches directly.
|
|
1405
|
+
* @param entity
|
|
1406
|
+
* @param blockCtx {?BlockContext} Nested block insert when non-null.
|
|
1407
|
+
*/
|
|
1408
|
+
_ProcessInsert(entity, blockCtx = null) {
|
|
1409
|
+
if (blockCtx) {
|
|
1410
|
+
//XXX handle indirect recursion
|
|
1411
|
+
if (blockCtx.name === entity.name) {
|
|
1412
|
+
console.warn("Recursive block reference: " + blockCtx.name)
|
|
1413
|
+
return
|
|
1414
|
+
}
|
|
1415
|
+
/* Flatten nested blocks definition. */
|
|
1416
|
+
const block = this.blocks.get(entity.name)
|
|
1417
|
+
if (!block) {
|
|
1418
|
+
console.warn("Unresolved nested block reference: " + entity.name)
|
|
1419
|
+
}
|
|
1420
|
+
const nestedCtx = blockCtx.NestedBlockContext(block, entity)
|
|
1421
|
+
if (block.data.entities) {
|
|
1422
|
+
for (const entity of block.data.entities) {
|
|
1423
|
+
this._ProcessDxfEntity(entity, nestedCtx)
|
|
1424
|
+
}
|
|
1425
|
+
}
|
|
1426
|
+
return
|
|
1427
|
+
}
|
|
1428
|
+
|
|
1429
|
+
const block = this.blocks.get(entity.name)
|
|
1430
|
+
if (block === null) {
|
|
1431
|
+
console.warn("Unresolved block reference in INSERT: " + entity.name)
|
|
1432
|
+
return
|
|
1433
|
+
}
|
|
1434
|
+
if (!block.HasGeometry()) {
|
|
1435
|
+
return
|
|
1436
|
+
}
|
|
1437
|
+
|
|
1438
|
+
const layer = this._GetEntityLayer(entity, null)
|
|
1439
|
+
const color = this._GetEntityColor(entity, null)
|
|
1440
|
+
const lineType = this._GetLineType(entity, null, null)
|
|
1441
|
+
//XXX apply extrusion direction
|
|
1442
|
+
const transform = block.InstantiationContext().GetInsertionTransform(entity)
|
|
1443
|
+
|
|
1444
|
+
/* Update bounding box and origin with transformed block bounds corner points. */
|
|
1445
|
+
const bounds = block.bounds
|
|
1446
|
+
this._UpdateBounds(new Vector2(bounds.minX, bounds.minY).applyMatrix3(transform))
|
|
1447
|
+
this._UpdateBounds(new Vector2(bounds.maxX, bounds.maxY).applyMatrix3(transform))
|
|
1448
|
+
this._UpdateBounds(new Vector2(bounds.minX, bounds.maxY).applyMatrix3(transform))
|
|
1449
|
+
this._UpdateBounds(new Vector2(bounds.maxX, bounds.minY).applyMatrix3(transform))
|
|
1450
|
+
|
|
1451
|
+
transform.translate(-this.origin.x, -this.origin.y)
|
|
1452
|
+
//XXX grid instancing not supported yet
|
|
1453
|
+
if (block.flatten) {
|
|
1454
|
+
for (const batch of block.batches) {
|
|
1455
|
+
this._FlattenBatch(batch, layer, color, lineType, transform)
|
|
1456
|
+
}
|
|
1457
|
+
} else {
|
|
1458
|
+
const key = new BatchingKey(layer, entity.name, BatchingKey.GeometryType.BLOCK_INSTANCE,
|
|
1459
|
+
color, lineType)
|
|
1460
|
+
const batch = this._GetBatch(key)
|
|
1461
|
+
batch.PushInstanceTransform(transform)
|
|
1462
|
+
}
|
|
1463
|
+
}
|
|
1464
|
+
|
|
1465
|
+
/** Flatten block definition batch. It is merged into suitable instant rendering batch. */
|
|
1466
|
+
_FlattenBatch(blockBatch, layerName, blockColor, blockLineType, transform) {
|
|
1467
|
+
const layer = this.layers.get(layerName)
|
|
1468
|
+
let color, lineType = 0
|
|
1469
|
+
if (blockBatch.key.color === ColorCode.BY_BLOCK) {
|
|
1470
|
+
color = blockColor
|
|
1471
|
+
} else if (blockBatch.key.color === ColorCode.BY_LAYER) {
|
|
1472
|
+
color = layer?.color ?? 0
|
|
1473
|
+
} else {
|
|
1474
|
+
color = blockBatch.key.color
|
|
1475
|
+
}
|
|
1476
|
+
//XXX line type
|
|
1477
|
+
const key = new BatchingKey(layerName, null, blockBatch.key.geometryType, color, lineType)
|
|
1478
|
+
const batch = this._GetBatch(key)
|
|
1479
|
+
batch.Merge(blockBatch, transform)
|
|
1480
|
+
}
|
|
1481
|
+
|
|
1482
|
+
/**
|
|
1483
|
+
* Generate entities for shaped polyline (e.g. line resulting in mesh). All segments are shaped
|
|
1484
|
+
* (have start/end width). Segments may be bulge.
|
|
1485
|
+
* @param vertices
|
|
1486
|
+
* @param layer
|
|
1487
|
+
* @param color
|
|
1488
|
+
* @param lineType
|
|
1489
|
+
* @param shape {Boolean} True if closed polyline.
|
|
1490
|
+
* @return {Generator<Entity>}
|
|
1491
|
+
*/
|
|
1492
|
+
*_GenerateShapedPolyline(vertices, layer, color, lineType, shape) {
|
|
1493
|
+
//XXX
|
|
1494
|
+
yield new Entity({
|
|
1495
|
+
type: Entity.Type.POLYLINE,
|
|
1496
|
+
vertices,
|
|
1497
|
+
layer,
|
|
1498
|
+
color,
|
|
1499
|
+
lineType,
|
|
1500
|
+
shape
|
|
1501
|
+
})
|
|
1502
|
+
}
|
|
1503
|
+
|
|
1504
|
+
/** Mirror entity vertices if necessary in case of extrusionDirection with negative Z specified.
|
|
1505
|
+
*
|
|
1506
|
+
* @param entity Entity to check.
|
|
1507
|
+
* @param vertices {?{x,y}[]} Vertices array to use instead of entity vertices attribute.
|
|
1508
|
+
* @return {{x,y}[]} Vertices array with mirrored X if necessary. All attributes preserved.
|
|
1509
|
+
*/
|
|
1510
|
+
_MirrorEntityVertices(entity, vertices = null) {
|
|
1511
|
+
if (!entity.extrusionDirection || entity.extrusionDirection.z >= 0) {
|
|
1512
|
+
return vertices ?? entity.vertices
|
|
1513
|
+
}
|
|
1514
|
+
if (!vertices || vertices === entity.vertices) {
|
|
1515
|
+
vertices = entity.vertices.slice()
|
|
1516
|
+
}
|
|
1517
|
+
const n = vertices.length
|
|
1518
|
+
for (let i = 0; i < n; i++) {
|
|
1519
|
+
const v = vertices[i]
|
|
1520
|
+
const _v = {x: -v.x}
|
|
1521
|
+
for (const propName in v) {
|
|
1522
|
+
if (!v.hasOwnProperty(propName)) {
|
|
1523
|
+
continue
|
|
1524
|
+
}
|
|
1525
|
+
if (propName !== "x") {
|
|
1526
|
+
_v[propName] = v[propName]
|
|
1527
|
+
}
|
|
1528
|
+
}
|
|
1529
|
+
vertices[i] = _v
|
|
1530
|
+
}
|
|
1531
|
+
return vertices
|
|
1532
|
+
}
|
|
1533
|
+
|
|
1534
|
+
*_DecomposePolyline(entity, blockCtx = null) {
|
|
1535
|
+
|
|
1536
|
+
if (entity.isPolyfaceMesh) {
|
|
1537
|
+
yield *this._DecomposePolyfaceMesh(entity, blockCtx)
|
|
1538
|
+
return
|
|
1539
|
+
}
|
|
1540
|
+
|
|
1541
|
+
let entityVertices, verticesCount
|
|
1542
|
+
if (entity.includesCurveFitVertices || entity.includesSplineFitVertices) {
|
|
1543
|
+
entityVertices = entity.vertices.filter(v => v.splineVertex || v.curveFittingVertex)
|
|
1544
|
+
verticesCount = entityVertices.length
|
|
1545
|
+
} else {
|
|
1546
|
+
entityVertices = entity.vertices
|
|
1547
|
+
verticesCount = entity.vertices.length
|
|
1548
|
+
}
|
|
1549
|
+
if (verticesCount < 2) {
|
|
1550
|
+
return
|
|
1551
|
+
}
|
|
1552
|
+
entityVertices = this._MirrorEntityVertices(entity, entityVertices)
|
|
1553
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
1554
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
1555
|
+
const _this = this
|
|
1556
|
+
let startIdx = 0
|
|
1557
|
+
let curPlainLine = this._IsPlainLine(entityVertices[0])
|
|
1558
|
+
let curLineType = this._GetLineType(entity, entityVertices[0], blockCtx)
|
|
1559
|
+
let curVertices = null
|
|
1560
|
+
|
|
1561
|
+
function *CommitSegment(endIdx) {
|
|
1562
|
+
if (endIdx === startIdx) {
|
|
1563
|
+
return
|
|
1564
|
+
}
|
|
1565
|
+
let isClosed = false
|
|
1566
|
+
let vertices = curVertices
|
|
1567
|
+
if (endIdx === verticesCount && startIdx === 0) {
|
|
1568
|
+
isClosed = true
|
|
1569
|
+
if (vertices === null) {
|
|
1570
|
+
vertices = entityVertices
|
|
1571
|
+
}
|
|
1572
|
+
} else if (endIdx === verticesCount - 1 && startIdx === 0) {
|
|
1573
|
+
if (vertices === null) {
|
|
1574
|
+
vertices = entityVertices
|
|
1575
|
+
}
|
|
1576
|
+
} else if (endIdx === verticesCount) {
|
|
1577
|
+
if (vertices === null) {
|
|
1578
|
+
vertices = entityVertices.slice(startIdx, endIdx)
|
|
1579
|
+
vertices.push(entityVertices[0])
|
|
1580
|
+
}
|
|
1581
|
+
} else {
|
|
1582
|
+
if (vertices === null) {
|
|
1583
|
+
vertices = entityVertices.slice(startIdx, endIdx + 1)
|
|
1584
|
+
}
|
|
1585
|
+
}
|
|
1586
|
+
|
|
1587
|
+
if (curPlainLine) {
|
|
1588
|
+
yield new Entity({
|
|
1589
|
+
type: Entity.Type.POLYLINE,
|
|
1590
|
+
vertices, layer, color,
|
|
1591
|
+
lineType: curLineType,
|
|
1592
|
+
shape: isClosed
|
|
1593
|
+
})
|
|
1594
|
+
} else {
|
|
1595
|
+
yield* _this._GenerateShapedPolyline(vertices, layer, color, curLineType, isClosed)
|
|
1596
|
+
}
|
|
1597
|
+
|
|
1598
|
+
startIdx = endIdx
|
|
1599
|
+
if (endIdx !== verticesCount) {
|
|
1600
|
+
curPlainLine = _this._IsPlainLine(entityVertices[endIdx])
|
|
1601
|
+
curLineType = _this._GetLineType(entity, entityVertices[endIdx])
|
|
1602
|
+
}
|
|
1603
|
+
curVertices = null
|
|
1604
|
+
}
|
|
1605
|
+
|
|
1606
|
+
for (let vIdx = 1; vIdx <= verticesCount; vIdx++) {
|
|
1607
|
+
const prevVtx = entityVertices[vIdx - 1]
|
|
1608
|
+
let vtx
|
|
1609
|
+
if (vIdx === verticesCount) {
|
|
1610
|
+
if (!entity.shape) {
|
|
1611
|
+
yield* CommitSegment(vIdx - 1)
|
|
1612
|
+
break
|
|
1613
|
+
}
|
|
1614
|
+
vtx = entityVertices[0]
|
|
1615
|
+
} else {
|
|
1616
|
+
vtx = entityVertices[vIdx]
|
|
1617
|
+
}
|
|
1618
|
+
|
|
1619
|
+
if (Boolean(prevVtx.bulge) && curPlainLine) {
|
|
1620
|
+
if (curVertices === null) {
|
|
1621
|
+
curVertices = entityVertices.slice(startIdx, vIdx)
|
|
1622
|
+
}
|
|
1623
|
+
this._GenerateBulgeVertices(curVertices, prevVtx, vtx, prevVtx.bulge)
|
|
1624
|
+
} else if (curVertices !== null) {
|
|
1625
|
+
curVertices.push(vtx)
|
|
1626
|
+
}
|
|
1627
|
+
|
|
1628
|
+
if (vIdx === verticesCount) {
|
|
1629
|
+
yield* CommitSegment(vIdx)
|
|
1630
|
+
break
|
|
1631
|
+
}
|
|
1632
|
+
|
|
1633
|
+
const isPlainLine = this._IsPlainLine(vtx)
|
|
1634
|
+
const lineType = this._GetLineType(entity, vtx)
|
|
1635
|
+
if (isPlainLine !== curPlainLine ||
|
|
1636
|
+
/* Line type is accounted for plain lines only. */
|
|
1637
|
+
(curPlainLine && lineType !== curLineType)) {
|
|
1638
|
+
|
|
1639
|
+
yield* CommitSegment(vIdx)
|
|
1640
|
+
}
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
*_DecomposePolyfaceMesh(entity, blockCtx = null) {
|
|
1645
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
1646
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
1647
|
+
|
|
1648
|
+
const vertices = []
|
|
1649
|
+
const faces = []
|
|
1650
|
+
|
|
1651
|
+
for (const v of entity.vertices) {
|
|
1652
|
+
if (v.faces) {
|
|
1653
|
+
const face = {
|
|
1654
|
+
indices: [],
|
|
1655
|
+
hiddenEdges: []
|
|
1656
|
+
}
|
|
1657
|
+
for (const vIdx of v.faces) {
|
|
1658
|
+
if (vIdx == 0) {
|
|
1659
|
+
break
|
|
1660
|
+
}
|
|
1661
|
+
face.indices.push(vIdx < 0 ? -vIdx - 1 : vIdx - 1)
|
|
1662
|
+
face.hiddenEdges.push(vIdx < 0)
|
|
1663
|
+
}
|
|
1664
|
+
if (face.indices.length == 3 || face.indices.length == 4) {
|
|
1665
|
+
faces.push(face)
|
|
1666
|
+
}
|
|
1667
|
+
} else {
|
|
1668
|
+
vertices.push(new Vector2(v.x, v.y))
|
|
1669
|
+
}
|
|
1670
|
+
}
|
|
1671
|
+
|
|
1672
|
+
const polylines = []
|
|
1673
|
+
const CommitLineSegment = (startIdx, endIdx) => {
|
|
1674
|
+
if (polylines.length > 0) {
|
|
1675
|
+
const prev = polylines[polylines.length - 1]
|
|
1676
|
+
if (prev.indices[prev.indices.length - 1] == startIdx) {
|
|
1677
|
+
prev.indices.push(endIdx)
|
|
1678
|
+
return
|
|
1679
|
+
}
|
|
1680
|
+
if (prev.indices[0] == prev.indices[prev.indices.length - 1]) {
|
|
1681
|
+
prev.isClosed = true
|
|
1682
|
+
}
|
|
1683
|
+
}
|
|
1684
|
+
polylines.push({
|
|
1685
|
+
indices: [startIdx, endIdx],
|
|
1686
|
+
isClosed: false
|
|
1687
|
+
})
|
|
1688
|
+
}
|
|
1689
|
+
|
|
1690
|
+
for (const face of faces) {
|
|
1691
|
+
|
|
1692
|
+
if (this.options.wireframeMesh) {
|
|
1693
|
+
for (let i = 0; i < face.indices.length; i++) {
|
|
1694
|
+
if (face.hiddenEdges[i]) {
|
|
1695
|
+
continue
|
|
1696
|
+
}
|
|
1697
|
+
const nextIdx = i < face.indices.length - 1 ? i + 1 : 0
|
|
1698
|
+
CommitLineSegment(face.indices[i], face.indices[nextIdx])
|
|
1699
|
+
}
|
|
1700
|
+
|
|
1701
|
+
} else {
|
|
1702
|
+
let indices
|
|
1703
|
+
if (face.indices.length == 3) {
|
|
1704
|
+
indices = face.indices
|
|
1705
|
+
} else {
|
|
1706
|
+
indices = [face.indices[0], face.indices[1], face.indices[2],
|
|
1707
|
+
face.indices[0], face.indices[2], face.indices[3]]
|
|
1708
|
+
}
|
|
1709
|
+
yield new Entity({
|
|
1710
|
+
type: Entity.Type.TRIANGLES,
|
|
1711
|
+
vertices, indices, layer, color
|
|
1712
|
+
})
|
|
1713
|
+
}
|
|
1714
|
+
}
|
|
1715
|
+
|
|
1716
|
+
if (this.options.wireframeMesh) {
|
|
1717
|
+
for (const pl of polylines) {
|
|
1718
|
+
if (pl.length == 2) {
|
|
1719
|
+
yield new Entity({
|
|
1720
|
+
type: Entity.Type.LINE_SEGMENTS,
|
|
1721
|
+
vertices: [vertices[pl.indices[0]], vertices[pl.indices[1]]],
|
|
1722
|
+
layer, color
|
|
1723
|
+
})
|
|
1724
|
+
} else {
|
|
1725
|
+
const _vertices = []
|
|
1726
|
+
for (const vIdx of pl.indices) {
|
|
1727
|
+
_vertices.push(vertices[vIdx])
|
|
1728
|
+
}
|
|
1729
|
+
yield new Entity({
|
|
1730
|
+
type: Entity.Type.POLYLINE,
|
|
1731
|
+
vertices: _vertices, layer, color,
|
|
1732
|
+
shape: pl.isClosed
|
|
1733
|
+
})
|
|
1734
|
+
}
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
}
|
|
1738
|
+
|
|
1739
|
+
*_DecomposeSpline(entity, blockCtx = null) {
|
|
1740
|
+
const color = this._GetEntityColor(entity, blockCtx)
|
|
1741
|
+
const layer = this._GetEntityLayer(entity, blockCtx)
|
|
1742
|
+
const lineType = this._GetLineType(entity, null, blockCtx)
|
|
1743
|
+
const controlPoints = entity.controlPoints.map(p => [p.x, p.y])
|
|
1744
|
+
const vertices = []
|
|
1745
|
+
const subdivisions = controlPoints.length * SPLINE_SUBDIVISION
|
|
1746
|
+
const step = 1 / subdivisions
|
|
1747
|
+
for (let i = 0; i <= subdivisions; i++) {
|
|
1748
|
+
const pt = this._InterpolateSpline(i * step, entity.degreeOfSplineCurve, controlPoints,
|
|
1749
|
+
entity.knotValues)
|
|
1750
|
+
vertices.push({x: pt[0], y: pt[1]})
|
|
1751
|
+
}
|
|
1752
|
+
//XXX extrusionDirection (normalVector) transform?
|
|
1753
|
+
yield new Entity({type: Entity.Type.POLYLINE, vertices, layer, color, lineType})
|
|
1754
|
+
}
|
|
1755
|
+
|
|
1756
|
+
/** Get a point on a B-spline.
|
|
1757
|
+
* https://github.com/thibauts/b-spline
|
|
1758
|
+
* @param t {number} Point position on spline, [0..1].
|
|
1759
|
+
* @param degree {number} B-spline degree.
|
|
1760
|
+
* @param points {number[][]} Control points. Each point should have the same dimension which
|
|
1761
|
+
* defines dimension of the result.
|
|
1762
|
+
* @param knots {?number[]} Knot vector. Should have size `points.length + degree + 1`. Default
|
|
1763
|
+
* is uniform spline.
|
|
1764
|
+
* @param weights {?number} Optional weights vector.
|
|
1765
|
+
* @return {number[]} Resulting point on the specified position.
|
|
1766
|
+
*/
|
|
1767
|
+
_InterpolateSpline(t, degree, points, knots = null, weights = null) {
|
|
1768
|
+
let i, j, s, l // function-scoped iteration variables
|
|
1769
|
+
const n = points.length // points count
|
|
1770
|
+
const d = points[0].length // point dimensionality
|
|
1771
|
+
|
|
1772
|
+
if (degree < 1) {
|
|
1773
|
+
throw new Error("Degree must be at least 1 (linear)")
|
|
1774
|
+
}
|
|
1775
|
+
if (degree > (n - 1)) {
|
|
1776
|
+
throw new Error("Degree must be less than or equal to point count - 1")
|
|
1777
|
+
}
|
|
1778
|
+
|
|
1779
|
+
if (!weights) {
|
|
1780
|
+
// build weight vector of length [n]
|
|
1781
|
+
weights = []
|
|
1782
|
+
for(i = 0; i < n; i++) {
|
|
1783
|
+
weights[i] = 1
|
|
1784
|
+
}
|
|
1785
|
+
}
|
|
1786
|
+
|
|
1787
|
+
if (!knots) {
|
|
1788
|
+
// build knot vector of length [n + degree + 1]
|
|
1789
|
+
knots = []
|
|
1790
|
+
for(i = 0; i < n + degree + 1; i++) {
|
|
1791
|
+
knots[i] = i
|
|
1792
|
+
}
|
|
1793
|
+
} else {
|
|
1794
|
+
if (knots.length !== n + degree + 1) {
|
|
1795
|
+
throw new Error("Bad knot vector length")
|
|
1796
|
+
}
|
|
1797
|
+
}
|
|
1798
|
+
|
|
1799
|
+
const domain = [
|
|
1800
|
+
degree,
|
|
1801
|
+
knots.length-1 - degree
|
|
1802
|
+
]
|
|
1803
|
+
|
|
1804
|
+
// remap t to the domain where the spline is defined
|
|
1805
|
+
const low = knots[domain[0]]
|
|
1806
|
+
const high = knots[domain[1]]
|
|
1807
|
+
t = t * (high - low) + low
|
|
1808
|
+
|
|
1809
|
+
if (t < low) {
|
|
1810
|
+
t = low
|
|
1811
|
+
} else if (t > high) {
|
|
1812
|
+
t = high
|
|
1813
|
+
}
|
|
1814
|
+
|
|
1815
|
+
// find s (the spline segment) for the [t] value provided
|
|
1816
|
+
for (s = domain[0]; s < domain[1]; s++) {
|
|
1817
|
+
if (t >= knots[s] && t <= knots[s + 1]) {
|
|
1818
|
+
break
|
|
1819
|
+
}
|
|
1820
|
+
}
|
|
1821
|
+
|
|
1822
|
+
// convert points to homogeneous coordinates
|
|
1823
|
+
const v = []
|
|
1824
|
+
for (i = 0; i < n; i++) {
|
|
1825
|
+
v[i] = []
|
|
1826
|
+
for (j = 0; j < d; j++) {
|
|
1827
|
+
v[i][j] = points[i][j] * weights[i]
|
|
1828
|
+
}
|
|
1829
|
+
v[i][d] = weights[i]
|
|
1830
|
+
}
|
|
1831
|
+
|
|
1832
|
+
// l (level) goes from 1 to the curve degree + 1
|
|
1833
|
+
let alpha
|
|
1834
|
+
for (l = 1; l <= degree + 1; l++) {
|
|
1835
|
+
// build level l of the pyramid
|
|
1836
|
+
for(i = s; i > s - degree - 1 + l; i--) {
|
|
1837
|
+
alpha = (t - knots[i]) / (knots[i + degree + 1 - l] - knots[i])
|
|
1838
|
+
// interpolate each component
|
|
1839
|
+
for(j = 0; j < d + 1; j++) {
|
|
1840
|
+
v[i][j] = (1 - alpha) * v[i - 1][j] + alpha * v[i][j]
|
|
1841
|
+
}
|
|
1842
|
+
}
|
|
1843
|
+
}
|
|
1844
|
+
|
|
1845
|
+
// convert back to cartesian and return
|
|
1846
|
+
const result = []
|
|
1847
|
+
for(i = 0; i < d; i++) {
|
|
1848
|
+
result[i] = v[s][i] / v[s][d]
|
|
1849
|
+
}
|
|
1850
|
+
return result
|
|
1851
|
+
}
|
|
1852
|
+
|
|
1853
|
+
/**
|
|
1854
|
+
* @param entity {Entity}
|
|
1855
|
+
* @param blockCtx {?BlockContext}
|
|
1856
|
+
*/
|
|
1857
|
+
_ProcessPoints(entity, blockCtx = null) {
|
|
1858
|
+
const key = new BatchingKey(entity.layer, blockCtx?.name,
|
|
1859
|
+
BatchingKey.GeometryType.POINTS, entity.color, 0)
|
|
1860
|
+
const batch = this._GetBatch(key)
|
|
1861
|
+
for (const v of entity.vertices) {
|
|
1862
|
+
batch.PushVertex(this._TransformVertex(v, blockCtx))
|
|
1863
|
+
}
|
|
1864
|
+
}
|
|
1865
|
+
|
|
1866
|
+
/**
|
|
1867
|
+
* @param entity {Entity}
|
|
1868
|
+
* @param blockCtx {?BlockContext}
|
|
1869
|
+
*/
|
|
1870
|
+
_ProcessLineSegments(entity, blockCtx = null) {
|
|
1871
|
+
if (entity.vertices.length % 2 !== 0) {
|
|
1872
|
+
throw Error("Even number of vertices expected")
|
|
1873
|
+
}
|
|
1874
|
+
const key = new BatchingKey(entity.layer, blockCtx?.name,
|
|
1875
|
+
BatchingKey.GeometryType.LINES, entity.color, entity.lineType)
|
|
1876
|
+
const batch = this._GetBatch(key)
|
|
1877
|
+
for (const v of entity.vertices) {
|
|
1878
|
+
batch.PushVertex(this._TransformVertex(v, blockCtx))
|
|
1879
|
+
}
|
|
1880
|
+
}
|
|
1881
|
+
|
|
1882
|
+
/**
|
|
1883
|
+
* @param entity {Entity}
|
|
1884
|
+
* @param blockCtx {?BlockContext}
|
|
1885
|
+
*/
|
|
1886
|
+
_ProcessPolyline(entity, blockCtx = null) {
|
|
1887
|
+
if (entity.vertices.length < 2) {
|
|
1888
|
+
return
|
|
1889
|
+
}
|
|
1890
|
+
/* It is more optimal to render short polylines un-indexed. Also DXF often contains
|
|
1891
|
+
* polylines with just two points.
|
|
1892
|
+
*/
|
|
1893
|
+
const verticesCount = entity.vertices.length
|
|
1894
|
+
if (verticesCount <= 3) {
|
|
1895
|
+
const key = new BatchingKey(entity.layer, blockCtx?.name,
|
|
1896
|
+
BatchingKey.GeometryType.LINES, entity.color,
|
|
1897
|
+
entity.lineType)
|
|
1898
|
+
const batch = this._GetBatch(key)
|
|
1899
|
+
let prev = null
|
|
1900
|
+
for (const v of entity.vertices) {
|
|
1901
|
+
if (prev !== null) {
|
|
1902
|
+
batch.PushVertex(this._TransformVertex(prev, blockCtx))
|
|
1903
|
+
batch.PushVertex(this._TransformVertex(v, blockCtx))
|
|
1904
|
+
}
|
|
1905
|
+
prev = v
|
|
1906
|
+
}
|
|
1907
|
+
if (entity.shape && verticesCount > 2) {
|
|
1908
|
+
batch.PushVertex(this._TransformVertex(entity.vertices[verticesCount - 1], blockCtx))
|
|
1909
|
+
batch.PushVertex(this._TransformVertex(entity.vertices[0], blockCtx))
|
|
1910
|
+
}
|
|
1911
|
+
return
|
|
1912
|
+
}
|
|
1913
|
+
|
|
1914
|
+
const key = new BatchingKey(entity.layer, blockCtx?.name,
|
|
1915
|
+
BatchingKey.GeometryType.INDEXED_LINES,
|
|
1916
|
+
entity.color, entity.lineType)
|
|
1917
|
+
const batch = this._GetBatch(key)
|
|
1918
|
+
/* Line may be split if exceeds chunk limit. */
|
|
1919
|
+
for (const lineChunk of entity._IterateLineChunks()) {
|
|
1920
|
+
const chunk = batch.PushChunk(lineChunk.verticesCount)
|
|
1921
|
+
for (const v of lineChunk.vertices) {
|
|
1922
|
+
chunk.PushVertex(this._TransformVertex(v, blockCtx))
|
|
1923
|
+
}
|
|
1924
|
+
for (const idx of lineChunk.indices) {
|
|
1925
|
+
chunk.PushIndex(idx)
|
|
1926
|
+
}
|
|
1927
|
+
chunk.Finish()
|
|
1928
|
+
}
|
|
1929
|
+
}
|
|
1930
|
+
|
|
1931
|
+
/**
|
|
1932
|
+
* @param entity {Entity}
|
|
1933
|
+
* @param blockCtx {?BlockContext}
|
|
1934
|
+
*/
|
|
1935
|
+
_ProcessTriangles(entity, blockCtx = null) {
|
|
1936
|
+
if (entity.vertices.length < 3) {
|
|
1937
|
+
return
|
|
1938
|
+
}
|
|
1939
|
+
if (entity.indices.length % 3 !== 0) {
|
|
1940
|
+
console.error("Unexpected size of indices array: " + entity.indices.length)
|
|
1941
|
+
return
|
|
1942
|
+
}
|
|
1943
|
+
const key = new BatchingKey(entity.layer, blockCtx?.name,
|
|
1944
|
+
BatchingKey.GeometryType.INDEXED_TRIANGLES,
|
|
1945
|
+
entity.color, 0)
|
|
1946
|
+
const batch = this._GetBatch(key)
|
|
1947
|
+
//XXX splitting into chunks is not yet implemented. Currently used only for text glyphs so
|
|
1948
|
+
// should fit into one chunk
|
|
1949
|
+
const chunk = batch.PushChunk(entity.vertices.length)
|
|
1950
|
+
for (const v of entity.vertices) {
|
|
1951
|
+
chunk.PushVertex(this._TransformVertex(v, blockCtx))
|
|
1952
|
+
}
|
|
1953
|
+
for (const idx of entity.indices) {
|
|
1954
|
+
chunk.PushIndex(idx)
|
|
1955
|
+
}
|
|
1956
|
+
chunk.Finish()
|
|
1957
|
+
}
|
|
1958
|
+
|
|
1959
|
+
/** Resolve entity color.
|
|
1960
|
+
*
|
|
1961
|
+
* @param entity
|
|
1962
|
+
* @param blockCtx {?BlockContext}
|
|
1963
|
+
* @return {number} RGB color value. For block entity it also may be one of ColorCode values
|
|
1964
|
+
* which are resolved on block instantiation.
|
|
1965
|
+
*/
|
|
1966
|
+
_GetEntityColor(entity, blockCtx = null) {
|
|
1967
|
+
let color = ColorCode.BY_LAYER
|
|
1968
|
+
if (entity.colorIndex === 0) {
|
|
1969
|
+
color = ColorCode.BY_BLOCK
|
|
1970
|
+
} else if (entity.colorIndex === 256) {
|
|
1971
|
+
color = ColorCode.BY_LAYER
|
|
1972
|
+
} else if (entity.hasOwnProperty("color")) {
|
|
1973
|
+
color = entity.color
|
|
1974
|
+
}
|
|
1975
|
+
|
|
1976
|
+
if (blockCtx) {
|
|
1977
|
+
return color
|
|
1978
|
+
}
|
|
1979
|
+
if (color === ColorCode.BY_LAYER || color === ColorCode.BY_BLOCK) {
|
|
1980
|
+
/* BY_BLOCK is not useful when not in block so replace it by layer as well. */
|
|
1981
|
+
if (entity.hasOwnProperty("layer")) {
|
|
1982
|
+
const layer = this.layers.get(entity.layer)
|
|
1983
|
+
if (layer) {
|
|
1984
|
+
return layer.color
|
|
1985
|
+
}
|
|
1986
|
+
}
|
|
1987
|
+
} else {
|
|
1988
|
+
return color
|
|
1989
|
+
}
|
|
1990
|
+
/* Fallback to black. */
|
|
1991
|
+
return 0
|
|
1992
|
+
}
|
|
1993
|
+
|
|
1994
|
+
/** @return {?string} Layer name, null for block entity. */
|
|
1995
|
+
_GetEntityLayer(entity, blockCtx = null) {
|
|
1996
|
+
if (blockCtx) {
|
|
1997
|
+
return null
|
|
1998
|
+
}
|
|
1999
|
+
if (entity.hasOwnProperty("layer")) {
|
|
2000
|
+
return entity.layer
|
|
2001
|
+
}
|
|
2002
|
+
return "0"
|
|
2003
|
+
}
|
|
2004
|
+
|
|
2005
|
+
/** Check extrusionDirection property of the entity and return corresponding transform matrix.
|
|
2006
|
+
*
|
|
2007
|
+
* @return {?Matrix3} Null if not transform required.
|
|
2008
|
+
*/
|
|
2009
|
+
_GetEntityExtrusionTransform(entity) {
|
|
2010
|
+
//XXX For now just mirror X axis if extrusion Z is negative. No full support for arbitrary
|
|
2011
|
+
// OCS yet.
|
|
2012
|
+
if (!entity.hasOwnProperty("extrusionDirection")) {
|
|
2013
|
+
return null
|
|
2014
|
+
}
|
|
2015
|
+
if (entity.extrusionDirection.z > 0) {
|
|
2016
|
+
return null
|
|
2017
|
+
}
|
|
2018
|
+
return new Matrix3().scale(-1, 1)
|
|
2019
|
+
}
|
|
2020
|
+
|
|
2021
|
+
/** @return {RenderBatch} */
|
|
2022
|
+
_GetBatch(key) {
|
|
2023
|
+
let batch = this.batches.find({key})
|
|
2024
|
+
if (batch !== null) {
|
|
2025
|
+
return batch
|
|
2026
|
+
}
|
|
2027
|
+
batch = new RenderBatch(key)
|
|
2028
|
+
this.batches.insert(batch)
|
|
2029
|
+
if (key.blockName !== null && !key.IsInstanced()) {
|
|
2030
|
+
/* Block definition batch. */
|
|
2031
|
+
const block = this.blocks.get(key.blockName)
|
|
2032
|
+
if (block) {
|
|
2033
|
+
block.batches.push(batch)
|
|
2034
|
+
}
|
|
2035
|
+
}
|
|
2036
|
+
return batch
|
|
2037
|
+
}
|
|
2038
|
+
|
|
2039
|
+
/**
|
|
2040
|
+
* Apply all necessary final transforms to a vertex before just before storing it in a rendering
|
|
2041
|
+
* batch.
|
|
2042
|
+
* @param v {{x: number, y: number}}
|
|
2043
|
+
* @param blockCtx {BlockContext}
|
|
2044
|
+
* @return {{x: number, y: number}}
|
|
2045
|
+
*/
|
|
2046
|
+
_TransformVertex(v, blockCtx = null) {
|
|
2047
|
+
if (blockCtx) {
|
|
2048
|
+
/* Block definition in block coordinates. So it should not touch bounds and origin. */
|
|
2049
|
+
return blockCtx.TransformVertex(v)
|
|
2050
|
+
}
|
|
2051
|
+
this._UpdateBounds(v)
|
|
2052
|
+
return { x: v.x - this.origin.x, y: v.y - this.origin.y }
|
|
2053
|
+
}
|
|
2054
|
+
|
|
2055
|
+
/** @param v {{x,y}} Vertex to extend bounding box with and set origin. */
|
|
2056
|
+
_UpdateBounds(v) {
|
|
2057
|
+
if (this.bounds === null) {
|
|
2058
|
+
this.bounds = { minX: v.x, maxX: v.x, minY: v.y, maxY: v.y }
|
|
2059
|
+
} else {
|
|
2060
|
+
if (v.x < this.bounds.minX) {
|
|
2061
|
+
this.bounds.minX = v.x
|
|
2062
|
+
} else if (v.x > this.bounds.maxX) {
|
|
2063
|
+
this.bounds.maxX = v.x
|
|
2064
|
+
}
|
|
2065
|
+
if (v.y < this.bounds.minY) {
|
|
2066
|
+
this.bounds.minY = v.y
|
|
2067
|
+
} else if (v.y > this.bounds.maxY) {
|
|
2068
|
+
this.bounds.maxY = v.y
|
|
2069
|
+
}
|
|
2070
|
+
}
|
|
2071
|
+
if (this.origin === null) {
|
|
2072
|
+
this.origin = { x: v.x, y: v.y }
|
|
2073
|
+
}
|
|
2074
|
+
}
|
|
2075
|
+
|
|
2076
|
+
_BuildScene() {
|
|
2077
|
+
let verticesSize = 0
|
|
2078
|
+
let indicesSize = 0
|
|
2079
|
+
let transformsSize = 0
|
|
2080
|
+
this.batches.each(b => {
|
|
2081
|
+
verticesSize += b.GetVerticesBufferSize()
|
|
2082
|
+
indicesSize += b.GetIndicesBufferSize()
|
|
2083
|
+
transformsSize += b.GetTransformsSize()
|
|
2084
|
+
})
|
|
2085
|
+
|
|
2086
|
+
const scene = {
|
|
2087
|
+
vertices: new ArrayBuffer(verticesSize),
|
|
2088
|
+
indices: new ArrayBuffer(indicesSize),
|
|
2089
|
+
transforms: new ArrayBuffer(transformsSize),
|
|
2090
|
+
batches: [],
|
|
2091
|
+
layers: [],
|
|
2092
|
+
origin: this.origin,
|
|
2093
|
+
bounds: this.bounds,
|
|
2094
|
+
hasMissingChars: this.hasMissingChars
|
|
2095
|
+
}
|
|
2096
|
+
|
|
2097
|
+
const buffers = {
|
|
2098
|
+
vertices: new Float32Array(scene.vertices),
|
|
2099
|
+
verticesOffset: 0,
|
|
2100
|
+
indices: new Uint16Array(scene.indices),
|
|
2101
|
+
indicesOffset: 0,
|
|
2102
|
+
transforms: new Float32Array(scene.transforms),
|
|
2103
|
+
transformsOffset: 0
|
|
2104
|
+
}
|
|
2105
|
+
|
|
2106
|
+
this.batches.each(b => {
|
|
2107
|
+
scene.batches.push(b.Serialize(buffers))
|
|
2108
|
+
})
|
|
2109
|
+
|
|
2110
|
+
for (const layer of this.layers.values()) {
|
|
2111
|
+
scene.layers.push({
|
|
2112
|
+
name: layer.name,
|
|
2113
|
+
displayName: layer.displayName,
|
|
2114
|
+
color: layer.color
|
|
2115
|
+
})
|
|
2116
|
+
}
|
|
2117
|
+
|
|
2118
|
+
scene.pointShapeHasDot = (this.pdMode & PdMode.MARK_MASK) === PdMode.DOT
|
|
2119
|
+
|
|
2120
|
+
return scene
|
|
2121
|
+
}
|
|
2122
|
+
}
|
|
2123
|
+
|
|
2124
|
+
class RenderBatch {
|
|
2125
|
+
constructor(key) {
|
|
2126
|
+
this.key = key
|
|
2127
|
+
if (key.IsIndexed()) {
|
|
2128
|
+
this.chunks = []
|
|
2129
|
+
} else if (key.geometryType === BatchingKey.GeometryType.BLOCK_INSTANCE) {
|
|
2130
|
+
this.transforms = new DynamicBuffer(NativeType.FLOAT32)
|
|
2131
|
+
} else {
|
|
2132
|
+
this.vertices = new DynamicBuffer(NativeType.FLOAT32)
|
|
2133
|
+
}
|
|
2134
|
+
}
|
|
2135
|
+
|
|
2136
|
+
PushVertex(v) {
|
|
2137
|
+
const idx = this.vertices.Push(v.x)
|
|
2138
|
+
this.vertices.Push(v.y)
|
|
2139
|
+
return idx
|
|
2140
|
+
}
|
|
2141
|
+
|
|
2142
|
+
/**
|
|
2143
|
+
* @param matrix {Matrix3} 3x3 Transform matrix. Assuming 2D affine transform so only top 3x2
|
|
2144
|
+
* sub-matrix is taken.
|
|
2145
|
+
*/
|
|
2146
|
+
PushInstanceTransform(matrix) {
|
|
2147
|
+
/* Storing in row-major order as expected by renderer. */
|
|
2148
|
+
for (let row = 0; row < 2; row++) {
|
|
2149
|
+
for (let col = 0; col < 3; col++) {
|
|
2150
|
+
this.transforms.Push(matrix.elements[col * 3 + row])
|
|
2151
|
+
}
|
|
2152
|
+
}
|
|
2153
|
+
}
|
|
2154
|
+
|
|
2155
|
+
/** This method actually reserves space for the specified number of indexed vertices in some
|
|
2156
|
+
* chunk. The returned object should be used to push exactly the same amount vertices and any
|
|
2157
|
+
* number of their referring indices.
|
|
2158
|
+
* @param verticesCount Number of vertices in the chunk.
|
|
2159
|
+
* @return {IndexedChunkWriter}
|
|
2160
|
+
*/
|
|
2161
|
+
PushChunk(verticesCount) {
|
|
2162
|
+
if (verticesCount > INDEXED_CHUNK_SIZE) {
|
|
2163
|
+
throw new Error("Vertices count exceeds chunk limit: " + verticesCount)
|
|
2164
|
+
}
|
|
2165
|
+
/* Find suitable chunk with minimal remaining space to fill them as fully as possible. */
|
|
2166
|
+
let curChunk = null
|
|
2167
|
+
let curSpace = 0
|
|
2168
|
+
for (const chunk of this.chunks) {
|
|
2169
|
+
const space = INDEXED_CHUNK_SIZE - chunk.vertices.GetSize() / 2
|
|
2170
|
+
if (space < verticesCount) {
|
|
2171
|
+
continue
|
|
2172
|
+
}
|
|
2173
|
+
if (curChunk === null || space < curSpace) {
|
|
2174
|
+
curChunk = chunk
|
|
2175
|
+
curSpace = space
|
|
2176
|
+
}
|
|
2177
|
+
}
|
|
2178
|
+
if (curChunk === null) {
|
|
2179
|
+
curChunk = this._NewChunk(verticesCount)
|
|
2180
|
+
}
|
|
2181
|
+
return new IndexedChunkWriter(curChunk, verticesCount)
|
|
2182
|
+
}
|
|
2183
|
+
|
|
2184
|
+
/** Merge other batch into this one. They should have the same geometry type. Instanced batches
|
|
2185
|
+
* are disallowed.
|
|
2186
|
+
*
|
|
2187
|
+
* @param batch {RenderBatch}
|
|
2188
|
+
* @param transform {?Matrix3} Optional transform to apply for merged vertices.
|
|
2189
|
+
*/
|
|
2190
|
+
Merge(batch, transform = null) {
|
|
2191
|
+
if (this.key.geometryType !== batch.key.geometryType) {
|
|
2192
|
+
throw new Error("Rendering batch merging geometry type mismatch: " +
|
|
2193
|
+
`${this.key.geometryType} !== ${batch.key.geometryType}`)
|
|
2194
|
+
}
|
|
2195
|
+
if (this.key.IsInstanced()) {
|
|
2196
|
+
throw new Error("Attempted to merge instanced batch")
|
|
2197
|
+
}
|
|
2198
|
+
if (this.key.IsIndexed()) {
|
|
2199
|
+
/* Merge chunks. */
|
|
2200
|
+
for (const chunk of batch.chunks) {
|
|
2201
|
+
const verticesSize = chunk.vertices.size
|
|
2202
|
+
const chunkWriter = this.PushChunk(verticesSize / 2)
|
|
2203
|
+
for (let i = 0; i < verticesSize; i += 2) {
|
|
2204
|
+
const v = new Vector2(chunk.vertices.Get(i), chunk.vertices.Get(i + 1))
|
|
2205
|
+
if (transform) {
|
|
2206
|
+
v.applyMatrix3(transform)
|
|
2207
|
+
}
|
|
2208
|
+
chunkWriter.PushVertex(v)
|
|
2209
|
+
}
|
|
2210
|
+
const numIndices = chunk.indices.size
|
|
2211
|
+
for (let i = 0; i < numIndices; i ++) {
|
|
2212
|
+
chunkWriter.PushIndex(chunk.indices.Get(i))
|
|
2213
|
+
}
|
|
2214
|
+
chunkWriter.Finish()
|
|
2215
|
+
}
|
|
2216
|
+
} else {
|
|
2217
|
+
const n = batch.vertices.size
|
|
2218
|
+
for (let i = 0; i < n; i += 2) {
|
|
2219
|
+
const v = new Vector2(batch.vertices.Get(i), batch.vertices.Get(i + 1))
|
|
2220
|
+
if (transform) {
|
|
2221
|
+
v.applyMatrix3(transform)
|
|
2222
|
+
}
|
|
2223
|
+
this.PushVertex(v)
|
|
2224
|
+
}
|
|
2225
|
+
}
|
|
2226
|
+
}
|
|
2227
|
+
|
|
2228
|
+
/** @return Vertices buffer required size in bytes. */
|
|
2229
|
+
GetVerticesBufferSize() {
|
|
2230
|
+
if (this.key.IsIndexed()) {
|
|
2231
|
+
let size = 0
|
|
2232
|
+
for (const chunk of this.chunks) {
|
|
2233
|
+
size += chunk.vertices.GetSize()
|
|
2234
|
+
}
|
|
2235
|
+
return size * Float32Array.BYTES_PER_ELEMENT
|
|
2236
|
+
} else if (this.key.geometryType === BatchingKey.GeometryType.BLOCK_INSTANCE) {
|
|
2237
|
+
return 0
|
|
2238
|
+
} else {
|
|
2239
|
+
return this.vertices.GetSize() * Float32Array.BYTES_PER_ELEMENT
|
|
2240
|
+
}
|
|
2241
|
+
}
|
|
2242
|
+
|
|
2243
|
+
/** @return Indices buffer required size in bytes. */
|
|
2244
|
+
GetIndicesBufferSize() {
|
|
2245
|
+
if (this.key.IsIndexed()) {
|
|
2246
|
+
let size = 0
|
|
2247
|
+
for (const chunk of this.chunks) {
|
|
2248
|
+
size += chunk.indices.GetSize()
|
|
2249
|
+
}
|
|
2250
|
+
return size * Uint16Array.BYTES_PER_ELEMENT
|
|
2251
|
+
} else {
|
|
2252
|
+
return 0
|
|
2253
|
+
}
|
|
2254
|
+
}
|
|
2255
|
+
|
|
2256
|
+
/** @return Instances transforms buffer required size in bytes. */
|
|
2257
|
+
GetTransformsSize() {
|
|
2258
|
+
if (this.key.geometryType === BatchingKey.GeometryType.BLOCK_INSTANCE) {
|
|
2259
|
+
return this.transforms.GetSize() * Float32Array.BYTES_PER_ELEMENT
|
|
2260
|
+
} else {
|
|
2261
|
+
return 0
|
|
2262
|
+
}
|
|
2263
|
+
}
|
|
2264
|
+
|
|
2265
|
+
Serialize(buffers) {
|
|
2266
|
+
if (this.key.IsIndexed()) {
|
|
2267
|
+
const batch = {
|
|
2268
|
+
key: this.key,
|
|
2269
|
+
chunks: []
|
|
2270
|
+
}
|
|
2271
|
+
for (const chunk of this.chunks) {
|
|
2272
|
+
batch.chunks.push(chunk.Serialize(buffers))
|
|
2273
|
+
}
|
|
2274
|
+
return batch
|
|
2275
|
+
|
|
2276
|
+
} else if (this.key.geometryType === BatchingKey.GeometryType.BLOCK_INSTANCE) {
|
|
2277
|
+
const size = this.transforms.GetSize()
|
|
2278
|
+
const batch = {
|
|
2279
|
+
key: this.key,
|
|
2280
|
+
transformsOffset: buffers.transformsOffset,
|
|
2281
|
+
transformsSize: size
|
|
2282
|
+
}
|
|
2283
|
+
this.transforms.CopyTo(buffers.transforms, buffers.transformsOffset)
|
|
2284
|
+
buffers.transformsOffset += size
|
|
2285
|
+
return batch
|
|
2286
|
+
|
|
2287
|
+
} else {
|
|
2288
|
+
const size = this.vertices.GetSize()
|
|
2289
|
+
const batch = {
|
|
2290
|
+
key: this.key,
|
|
2291
|
+
verticesOffset: buffers.verticesOffset,
|
|
2292
|
+
verticesSize: size
|
|
2293
|
+
}
|
|
2294
|
+
this.vertices.CopyTo(buffers.vertices, buffers.verticesOffset)
|
|
2295
|
+
buffers.verticesOffset += size
|
|
2296
|
+
return batch
|
|
2297
|
+
}
|
|
2298
|
+
}
|
|
2299
|
+
|
|
2300
|
+
_NewChunk(initialCapacity) {
|
|
2301
|
+
const chunk = new IndexedChunk(initialCapacity)
|
|
2302
|
+
this.chunks.push(chunk)
|
|
2303
|
+
return chunk
|
|
2304
|
+
}
|
|
2305
|
+
}
|
|
2306
|
+
|
|
2307
|
+
class Block {
|
|
2308
|
+
/** @param data {{}} Raw DXF entity. */
|
|
2309
|
+
constructor(data) {
|
|
2310
|
+
this.data = data
|
|
2311
|
+
/* Number of times referenced from top-level entities (INSERT). */
|
|
2312
|
+
this.useCount = 0
|
|
2313
|
+
/* Number of times referenced by other block. */
|
|
2314
|
+
this.nestedUseCount = 0
|
|
2315
|
+
/* Total number of vertices in this block. Used for flattening decision. */
|
|
2316
|
+
this.verticesCount = 0
|
|
2317
|
+
/* Offset {x, y} to apply for all vertices. Used to move origin near vertices location to
|
|
2318
|
+
* minimize precision loss.
|
|
2319
|
+
*/
|
|
2320
|
+
this.offset = null
|
|
2321
|
+
/* Definition batches. Used for root blocks flattening. */
|
|
2322
|
+
this.batches = []
|
|
2323
|
+
this.flatten = false
|
|
2324
|
+
/** Bounds in block coordinates (with offset applied). */
|
|
2325
|
+
this.bounds = null
|
|
2326
|
+
}
|
|
2327
|
+
|
|
2328
|
+
/** Set block flattening flag based on usage statistics.
|
|
2329
|
+
* @return {Boolean} New flatten flag state.
|
|
2330
|
+
*/
|
|
2331
|
+
SetFlatten() {
|
|
2332
|
+
if (!this.HasGeometry()) {
|
|
2333
|
+
return false
|
|
2334
|
+
}
|
|
2335
|
+
/* Flatten if a block is used once (pure optimization if shares its layer with other
|
|
2336
|
+
* geometry) or if total instanced vertices number is less than a threshold (trade some
|
|
2337
|
+
* space for draw calls number).
|
|
2338
|
+
*/
|
|
2339
|
+
this.flatten = this.useCount === 1 ||
|
|
2340
|
+
this.useCount * this.verticesCount <= BLOCK_FLATTENING_VERTICES_THRESHOLD
|
|
2341
|
+
return this.flatten
|
|
2342
|
+
}
|
|
2343
|
+
|
|
2344
|
+
/** @return {Boolean} True if has something to draw. */
|
|
2345
|
+
HasGeometry() {
|
|
2346
|
+
/* Offset is set on first geometry vertex encountered. */
|
|
2347
|
+
return this.offset !== null
|
|
2348
|
+
}
|
|
2349
|
+
|
|
2350
|
+
/** @param {{}} entity May be either INSERT or DIMENSION. */
|
|
2351
|
+
RegisterInsert(entity) {
|
|
2352
|
+
this.useCount++
|
|
2353
|
+
}
|
|
2354
|
+
|
|
2355
|
+
RegisterNestedUse(usedByBlock) {
|
|
2356
|
+
this.nestedUseCount++
|
|
2357
|
+
}
|
|
2358
|
+
|
|
2359
|
+
/** @return {BlockContext} Context for block definition. */
|
|
2360
|
+
DefinitionContext() {
|
|
2361
|
+
return new BlockContext(this, BlockContext.Type.DEFINITION)
|
|
2362
|
+
}
|
|
2363
|
+
|
|
2364
|
+
InstantiationContext() {
|
|
2365
|
+
return new BlockContext(this, BlockContext.Type.INSTANTIATION)
|
|
2366
|
+
}
|
|
2367
|
+
|
|
2368
|
+
UpdateBounds(v) {
|
|
2369
|
+
if (this.bounds === null) {
|
|
2370
|
+
this.bounds = { minX: v.x, maxX: v.x, minY: v.y, maxY: v.y }
|
|
2371
|
+
} else {
|
|
2372
|
+
if (v.x < this.bounds.minX) {
|
|
2373
|
+
this.bounds.minX = v.x
|
|
2374
|
+
} else if (v.x > this.bounds.maxX) {
|
|
2375
|
+
this.bounds.maxX = v.x
|
|
2376
|
+
}
|
|
2377
|
+
if (v.y < this.bounds.minY) {
|
|
2378
|
+
this.bounds.minY = v.y
|
|
2379
|
+
} else if (v.y > this.bounds.maxY) {
|
|
2380
|
+
this.bounds.maxY = v.y
|
|
2381
|
+
}
|
|
2382
|
+
}
|
|
2383
|
+
}
|
|
2384
|
+
}
|
|
2385
|
+
|
|
2386
|
+
class BlockContext {
|
|
2387
|
+
constructor(block, type) {
|
|
2388
|
+
this.block = block
|
|
2389
|
+
this.type = type
|
|
2390
|
+
this.origin = this.block.data.position
|
|
2391
|
+
/* Transform to apply for block definition entities not including block offset. */
|
|
2392
|
+
this.transform = new Matrix3()
|
|
2393
|
+
}
|
|
2394
|
+
|
|
2395
|
+
/** @return {string} Block name */
|
|
2396
|
+
get name() {
|
|
2397
|
+
return this.block.data.name
|
|
2398
|
+
}
|
|
2399
|
+
|
|
2400
|
+
/**
|
|
2401
|
+
* @param v {{x,y}}
|
|
2402
|
+
* @return {{x,y}}
|
|
2403
|
+
*/
|
|
2404
|
+
TransformVertex(v) {
|
|
2405
|
+
const result = new Vector2(v.x, v.y).applyMatrix3(this.transform)
|
|
2406
|
+
if (this.type !== BlockContext.Type.DEFINITION &&
|
|
2407
|
+
this.type !== BlockContext.Type.NESTED_DEFINITION) {
|
|
2408
|
+
|
|
2409
|
+
throw new Error("Unexpected transform type")
|
|
2410
|
+
}
|
|
2411
|
+
this.block.verticesCount++
|
|
2412
|
+
if (this.block.offset === null) {
|
|
2413
|
+
/* This is the first vertex. Take it as a block origin. So the result is always zero
|
|
2414
|
+
* vector for the first vertex.
|
|
2415
|
+
*/
|
|
2416
|
+
this.block.offset = result
|
|
2417
|
+
const v = new Vector2()
|
|
2418
|
+
this.block.UpdateBounds(v)
|
|
2419
|
+
return v
|
|
2420
|
+
}
|
|
2421
|
+
result.sub(this.block.offset)
|
|
2422
|
+
this.block.UpdateBounds(result)
|
|
2423
|
+
return result
|
|
2424
|
+
}
|
|
2425
|
+
|
|
2426
|
+
/**
|
|
2427
|
+
* Get transform for block instance.
|
|
2428
|
+
* @param entity Raw DXF INSERT entity.
|
|
2429
|
+
* @return {Matrix3} Transform matrix for block instance to apply to the block definition.
|
|
2430
|
+
*/
|
|
2431
|
+
GetInsertionTransform(entity) {
|
|
2432
|
+
const mInsert = new Matrix3().translate(-this.origin.x, -this.origin.y)
|
|
2433
|
+
const yScale = entity.yScale || 1
|
|
2434
|
+
const xScale = entity.xScale || 1
|
|
2435
|
+
const rotation = -(entity.rotation || 0) * Math.PI / 180
|
|
2436
|
+
let x = entity.position.x
|
|
2437
|
+
const y = entity.position.y
|
|
2438
|
+
mInsert.scale(xScale, yScale)
|
|
2439
|
+
mInsert.rotate(rotation)
|
|
2440
|
+
mInsert.translate(x, y)
|
|
2441
|
+
if (entity.extrusionDirection && entity.extrusionDirection.z < 0) {
|
|
2442
|
+
mInsert.scale(-1, 1)
|
|
2443
|
+
}
|
|
2444
|
+
if (this.type !== BlockContext.Type.INSTANTIATION) {
|
|
2445
|
+
return mInsert
|
|
2446
|
+
}
|
|
2447
|
+
const mOffset = new Matrix3().translate(this.block.offset.x, this.block.offset.y)
|
|
2448
|
+
return mInsert.multiply(mOffset)
|
|
2449
|
+
}
|
|
2450
|
+
|
|
2451
|
+
/**
|
|
2452
|
+
* Create context for nested block.
|
|
2453
|
+
* @param block {Block} Nested block.
|
|
2454
|
+
* @param entity Raw DXF INSERT entity.
|
|
2455
|
+
* @return {BlockContext} Context to use for nested block entities.
|
|
2456
|
+
*/
|
|
2457
|
+
NestedBlockContext(block, entity) {
|
|
2458
|
+
block.RegisterNestedUse(this.block)
|
|
2459
|
+
const nestedCtx = new BlockContext(block, BlockContext.Type.NESTED_DEFINITION)
|
|
2460
|
+
const nestedTransform = nestedCtx.GetInsertionTransform(entity)
|
|
2461
|
+
const ctx = new BlockContext(this.block, BlockContext.Type.NESTED_DEFINITION)
|
|
2462
|
+
ctx.transform = new Matrix3().multiplyMatrices(this.transform, nestedTransform)
|
|
2463
|
+
return ctx
|
|
2464
|
+
}
|
|
2465
|
+
}
|
|
2466
|
+
|
|
2467
|
+
BlockContext.Type = Object.freeze({
|
|
2468
|
+
DEFINITION: 0,
|
|
2469
|
+
NESTED_DEFINITION: 1,
|
|
2470
|
+
INSTANTIATION: 2
|
|
2471
|
+
})
|
|
2472
|
+
|
|
2473
|
+
class IndexedChunk {
|
|
2474
|
+
constructor(initialCapacity) {
|
|
2475
|
+
if (initialCapacity < 16) {
|
|
2476
|
+
initialCapacity = 16
|
|
2477
|
+
}
|
|
2478
|
+
/* Average two indices per vertex. */
|
|
2479
|
+
this.indices = new DynamicBuffer(NativeType.UINT16, initialCapacity * 2)
|
|
2480
|
+
/* Two components per vertex. */
|
|
2481
|
+
this.vertices = new DynamicBuffer(NativeType.FLOAT32, initialCapacity * 2)
|
|
2482
|
+
}
|
|
2483
|
+
|
|
2484
|
+
Serialize(buffers) {
|
|
2485
|
+
const chunk = {}
|
|
2486
|
+
{
|
|
2487
|
+
const size = this.vertices.GetSize()
|
|
2488
|
+
chunk.verticesOffset = buffers.verticesOffset
|
|
2489
|
+
chunk.verticesSize = size
|
|
2490
|
+
this.vertices.CopyTo(buffers.vertices, buffers.verticesOffset)
|
|
2491
|
+
buffers.verticesOffset += size
|
|
2492
|
+
}
|
|
2493
|
+
{
|
|
2494
|
+
const size = this.indices.GetSize()
|
|
2495
|
+
chunk.indicesOffset = buffers.indicesOffset
|
|
2496
|
+
chunk.indicesSize = size
|
|
2497
|
+
this.indices.CopyTo(buffers.indices, buffers.indicesOffset)
|
|
2498
|
+
buffers.indicesOffset += size
|
|
2499
|
+
}
|
|
2500
|
+
return chunk
|
|
2501
|
+
}
|
|
2502
|
+
}
|
|
2503
|
+
|
|
2504
|
+
class IndexedChunkWriter {
|
|
2505
|
+
constructor(chunk, verticesCount) {
|
|
2506
|
+
this.chunk = chunk
|
|
2507
|
+
this.verticesCount = verticesCount
|
|
2508
|
+
this.verticesOffset = this.chunk.vertices.GetSize() / 2
|
|
2509
|
+
this.numVerticesPushed = 0
|
|
2510
|
+
}
|
|
2511
|
+
|
|
2512
|
+
PushVertex(v) {
|
|
2513
|
+
if (this.numVerticesPushed === this.verticesCount) {
|
|
2514
|
+
throw new Error()
|
|
2515
|
+
}
|
|
2516
|
+
this.chunk.vertices.Push(v.x)
|
|
2517
|
+
this.chunk.vertices.Push(v.y)
|
|
2518
|
+
this.numVerticesPushed++
|
|
2519
|
+
}
|
|
2520
|
+
|
|
2521
|
+
PushIndex(idx) {
|
|
2522
|
+
if (idx < 0 || idx >= this.verticesCount) {
|
|
2523
|
+
throw new Error(`Index out of range: ${idx}/${this.verticesCount}`)
|
|
2524
|
+
}
|
|
2525
|
+
this.chunk.indices.Push(idx + this.verticesOffset)
|
|
2526
|
+
}
|
|
2527
|
+
|
|
2528
|
+
Finish() {
|
|
2529
|
+
if (this.numVerticesPushed !== this.verticesCount) {
|
|
2530
|
+
throw new Error(`Not all vertices pushed: ${this.numVerticesPushed}/${this.verticesCount}`)
|
|
2531
|
+
}
|
|
2532
|
+
}
|
|
2533
|
+
}
|
|
2534
|
+
|
|
2535
|
+
/** Internal entity representation. DXF features are decomposed into these simpler entities. Whole
|
|
2536
|
+
* entity always shares single material.
|
|
2537
|
+
*/
|
|
2538
|
+
export class Entity {
|
|
2539
|
+
/** @param type {number} See Entity.Type
|
|
2540
|
+
* @param vertices {{x, y}[]}
|
|
2541
|
+
* @param indices {?number[]} Indices for indexed geometry.
|
|
2542
|
+
* @param layer {?string}
|
|
2543
|
+
* @param color {number}
|
|
2544
|
+
* @param lineType {?number}
|
|
2545
|
+
* @param shape {Boolean} true if closed shape.
|
|
2546
|
+
*/
|
|
2547
|
+
constructor({type, vertices, indices = null, layer = null, color, lineType = 0, shape = false}) {
|
|
2548
|
+
this.type = type
|
|
2549
|
+
this.vertices = vertices
|
|
2550
|
+
this.indices = indices
|
|
2551
|
+
this.layer = layer
|
|
2552
|
+
this.color = color
|
|
2553
|
+
this.lineType = lineType
|
|
2554
|
+
this.shape = shape
|
|
2555
|
+
}
|
|
2556
|
+
|
|
2557
|
+
*_IterateVertices(startIndex, count) {
|
|
2558
|
+
for (let idx = startIndex; idx < startIndex + count; idx++) {
|
|
2559
|
+
yield this.vertices[idx]
|
|
2560
|
+
}
|
|
2561
|
+
}
|
|
2562
|
+
|
|
2563
|
+
/** Split line into chunks with at most INDEXED_CHUNK_SIZE vertices in each one. Each chunk is
|
|
2564
|
+
* an object with the following properties:
|
|
2565
|
+
* * "verticesCount" - length of "vertices"
|
|
2566
|
+
* * "vertices" - iterator for included vertices.
|
|
2567
|
+
* * "indices" - iterator for indices.
|
|
2568
|
+
* Closed shapes are handled properly.
|
|
2569
|
+
*/
|
|
2570
|
+
*_IterateLineChunks() {
|
|
2571
|
+
const verticesCount = this.vertices.length
|
|
2572
|
+
if (verticesCount < 2) {
|
|
2573
|
+
return
|
|
2574
|
+
}
|
|
2575
|
+
const _this = this
|
|
2576
|
+
/* chunkOffset == verticesCount for shape closing vertex. */
|
|
2577
|
+
for (let chunkOffset = 0; chunkOffset <= verticesCount; chunkOffset += INDEXED_CHUNK_SIZE) {
|
|
2578
|
+
let count = verticesCount - chunkOffset
|
|
2579
|
+
let isLast
|
|
2580
|
+
if (count > INDEXED_CHUNK_SIZE) {
|
|
2581
|
+
count = INDEXED_CHUNK_SIZE
|
|
2582
|
+
isLast = false
|
|
2583
|
+
} else {
|
|
2584
|
+
isLast = true
|
|
2585
|
+
}
|
|
2586
|
+
if (isLast && this.shape && chunkOffset > 0 && count === INDEXED_CHUNK_SIZE) {
|
|
2587
|
+
/* Corner case - required shape closing vertex does not fit into the chunk. Will
|
|
2588
|
+
* require additional chunk.
|
|
2589
|
+
*/
|
|
2590
|
+
isLast = false
|
|
2591
|
+
}
|
|
2592
|
+
if (chunkOffset === verticesCount && !this.shape) {
|
|
2593
|
+
/* Shape is not closed and it is last closing vertex iteration. */
|
|
2594
|
+
break
|
|
2595
|
+
}
|
|
2596
|
+
|
|
2597
|
+
let vertices, indices, chunkVerticesCount
|
|
2598
|
+
if (count < 2) {
|
|
2599
|
+
/* Either last vertex or last shape-closing vertex, or both. */
|
|
2600
|
+
if (count === 1 && this.shape) {
|
|
2601
|
+
/* Both. */
|
|
2602
|
+
vertices = (function*() {
|
|
2603
|
+
yield this.vertices[chunkOffset]
|
|
2604
|
+
yield this.vertices[0]
|
|
2605
|
+
})()
|
|
2606
|
+
} else if (count === 1) {
|
|
2607
|
+
/* Just last vertex. Take previous one to make a line. */
|
|
2608
|
+
vertices = (function*() {
|
|
2609
|
+
yield this.vertices[chunkOffset - 1]
|
|
2610
|
+
yield this.vertices[chunkOffset]
|
|
2611
|
+
})()
|
|
2612
|
+
} else {
|
|
2613
|
+
/* Just shape-closing vertex. Take last one to make a line. */
|
|
2614
|
+
vertices = (function*() {
|
|
2615
|
+
yield this.vertices[verticesCount - 1]
|
|
2616
|
+
yield this.vertices[0]
|
|
2617
|
+
})()
|
|
2618
|
+
}
|
|
2619
|
+
indices = _IterateLineIndices(2, false)
|
|
2620
|
+
chunkVerticesCount = 2
|
|
2621
|
+
} else if (isLast && this.shape && chunkOffset > 0 && count < INDEXED_CHUNK_SIZE) {
|
|
2622
|
+
/* Additional vertex to close the shape. */
|
|
2623
|
+
vertices = (function*() {
|
|
2624
|
+
yield* _this._IterateVertices(chunkOffset, count)
|
|
2625
|
+
yield this.vertices[0]
|
|
2626
|
+
})()
|
|
2627
|
+
indices = _IterateLineIndices(count + 1, false)
|
|
2628
|
+
chunkVerticesCount = count + 1
|
|
2629
|
+
} else {
|
|
2630
|
+
vertices = this._IterateVertices(chunkOffset, count)
|
|
2631
|
+
indices = _IterateLineIndices(count,
|
|
2632
|
+
isLast && chunkOffset === 0 && this.shape)
|
|
2633
|
+
chunkVerticesCount = count
|
|
2634
|
+
}
|
|
2635
|
+
yield {
|
|
2636
|
+
verticesCount: chunkVerticesCount,
|
|
2637
|
+
vertices,
|
|
2638
|
+
indices
|
|
2639
|
+
}
|
|
2640
|
+
}
|
|
2641
|
+
}
|
|
2642
|
+
}
|
|
2643
|
+
|
|
2644
|
+
Entity.Type = Object.freeze({
|
|
2645
|
+
POINTS: 0,
|
|
2646
|
+
/** Each vertices pair defines a segment. */
|
|
2647
|
+
LINE_SEGMENTS: 1,
|
|
2648
|
+
POLYLINE: 2,
|
|
2649
|
+
TRIANGLES: 3
|
|
2650
|
+
})
|
|
2651
|
+
|
|
2652
|
+
function* _IterateLineIndices(verticesCount, close) {
|
|
2653
|
+
for (let idx = 0; idx < verticesCount - 1; idx++) {
|
|
2654
|
+
yield idx
|
|
2655
|
+
yield idx + 1
|
|
2656
|
+
}
|
|
2657
|
+
if (close && verticesCount > 2) {
|
|
2658
|
+
yield verticesCount - 1
|
|
2659
|
+
yield 0
|
|
2660
|
+
}
|
|
2661
|
+
}
|
|
2662
|
+
|
|
2663
|
+
/** Point display mode, $PDMODE system variable. */
|
|
2664
|
+
const PdMode = Object.freeze({
|
|
2665
|
+
DOT: 0,
|
|
2666
|
+
NONE: 1,
|
|
2667
|
+
PLUS: 2,
|
|
2668
|
+
CROSS: 3,
|
|
2669
|
+
TICK: 4,
|
|
2670
|
+
MARK_MASK: 0xf,
|
|
2671
|
+
|
|
2672
|
+
CIRCLE: 0x20,
|
|
2673
|
+
SQUARE: 0x40,
|
|
2674
|
+
|
|
2675
|
+
SHAPE_MASK: 0xf0
|
|
2676
|
+
})
|
|
2677
|
+
|
|
2678
|
+
/** Special color values, used for block entities. Regular entities color is resolved instantly. */
|
|
2679
|
+
export const ColorCode = Object.freeze({
|
|
2680
|
+
BY_LAYER: -1,
|
|
2681
|
+
BY_BLOCK: -2
|
|
2682
|
+
})
|
|
2683
|
+
|
|
2684
|
+
DxfScene.DefaultOptions = {
|
|
2685
|
+
/** Target angle for each segment of tessellated arc. */
|
|
2686
|
+
arcTessellationAngle: 10 / 180 * Math.PI,
|
|
2687
|
+
/** Divide arc to at least the specified number of segments. */
|
|
2688
|
+
minArcTessellationSubdivisions: 8,
|
|
2689
|
+
/** Render meshes (3DFACE group, POLYLINE polyface mesh) as wireframe instead of solid. */
|
|
2690
|
+
wireframeMesh: false,
|
|
2691
|
+
/** Suppress paper-space entities when true (only model-space is rendered). */
|
|
2692
|
+
suppressPaperSpace: false,
|
|
2693
|
+
/** Text rendering options. */
|
|
2694
|
+
textOptions: TextRenderer.DefaultOptions,
|
|
2695
|
+
}
|