meerk40t 0.9.3001__py2.py3-none-any.whl → 0.9.7010__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- meerk40t/__init__.py +1 -1
- meerk40t/balormk/balor_params.py +167 -167
- meerk40t/balormk/clone_loader.py +457 -457
- meerk40t/balormk/controller.py +1566 -1512
- meerk40t/balormk/cylindermod.py +64 -0
- meerk40t/balormk/device.py +966 -1959
- meerk40t/balormk/driver.py +778 -591
- meerk40t/balormk/galvo_commands.py +1195 -0
- meerk40t/balormk/gui/balorconfig.py +237 -111
- meerk40t/balormk/gui/balorcontroller.py +191 -184
- meerk40t/balormk/gui/baloroperationproperties.py +116 -115
- meerk40t/balormk/gui/corscene.py +845 -0
- meerk40t/balormk/gui/gui.py +179 -147
- meerk40t/balormk/livelightjob.py +466 -382
- meerk40t/balormk/mock_connection.py +131 -109
- meerk40t/balormk/plugin.py +133 -135
- meerk40t/balormk/usb_connection.py +306 -301
- meerk40t/camera/__init__.py +1 -1
- meerk40t/camera/camera.py +514 -397
- meerk40t/camera/gui/camerapanel.py +1241 -1095
- meerk40t/camera/gui/gui.py +58 -58
- meerk40t/camera/plugin.py +441 -399
- meerk40t/ch341/__init__.py +27 -27
- meerk40t/ch341/ch341device.py +628 -628
- meerk40t/ch341/libusb.py +595 -589
- meerk40t/ch341/mock.py +171 -171
- meerk40t/ch341/windriver.py +157 -157
- meerk40t/constants.py +13 -0
- meerk40t/core/__init__.py +1 -1
- meerk40t/core/bindalias.py +550 -539
- meerk40t/core/core.py +47 -47
- meerk40t/core/cutcode/cubiccut.py +73 -73
- meerk40t/core/cutcode/cutcode.py +315 -312
- meerk40t/core/cutcode/cutgroup.py +141 -137
- meerk40t/core/cutcode/cutobject.py +192 -185
- meerk40t/core/cutcode/dwellcut.py +37 -37
- meerk40t/core/cutcode/gotocut.py +29 -29
- meerk40t/core/cutcode/homecut.py +29 -29
- meerk40t/core/cutcode/inputcut.py +34 -34
- meerk40t/core/cutcode/linecut.py +33 -33
- meerk40t/core/cutcode/outputcut.py +34 -34
- meerk40t/core/cutcode/plotcut.py +335 -335
- meerk40t/core/cutcode/quadcut.py +61 -61
- meerk40t/core/cutcode/rastercut.py +168 -148
- meerk40t/core/cutcode/waitcut.py +34 -34
- meerk40t/core/cutplan.py +1843 -1316
- meerk40t/core/drivers.py +330 -329
- meerk40t/core/elements/align.py +801 -669
- meerk40t/core/elements/branches.py +1844 -1507
- meerk40t/core/elements/clipboard.py +229 -219
- meerk40t/core/elements/element_treeops.py +4561 -2837
- meerk40t/core/elements/element_types.py +125 -105
- meerk40t/core/elements/elements.py +4329 -3617
- meerk40t/core/elements/files.py +117 -64
- meerk40t/core/elements/geometry.py +473 -224
- meerk40t/core/elements/grid.py +467 -316
- meerk40t/core/elements/materials.py +158 -94
- meerk40t/core/elements/notes.py +50 -38
- meerk40t/core/elements/offset_clpr.py +933 -912
- meerk40t/core/elements/offset_mk.py +963 -955
- meerk40t/core/elements/penbox.py +339 -267
- meerk40t/core/elements/placements.py +300 -83
- meerk40t/core/elements/render.py +785 -687
- meerk40t/core/elements/shapes.py +2618 -2092
- meerk40t/core/elements/trace.py +651 -563
- meerk40t/core/elements/tree_commands.py +415 -409
- meerk40t/core/elements/undo_redo.py +116 -58
- meerk40t/core/elements/wordlist.py +319 -200
- meerk40t/core/exceptions.py +9 -9
- meerk40t/core/laserjob.py +220 -220
- meerk40t/core/logging.py +63 -63
- meerk40t/core/node/blobnode.py +83 -86
- meerk40t/core/node/bootstrap.py +105 -103
- meerk40t/core/node/branch_elems.py +40 -31
- meerk40t/core/node/branch_ops.py +45 -38
- meerk40t/core/node/branch_regmark.py +48 -41
- meerk40t/core/node/cutnode.py +29 -32
- meerk40t/core/node/effect_hatch.py +375 -257
- meerk40t/core/node/effect_warp.py +398 -0
- meerk40t/core/node/effect_wobble.py +441 -309
- meerk40t/core/node/elem_ellipse.py +404 -309
- meerk40t/core/node/elem_image.py +1082 -801
- meerk40t/core/node/elem_line.py +358 -292
- meerk40t/core/node/elem_path.py +259 -201
- meerk40t/core/node/elem_point.py +129 -102
- meerk40t/core/node/elem_polyline.py +310 -246
- meerk40t/core/node/elem_rect.py +376 -286
- meerk40t/core/node/elem_text.py +445 -418
- meerk40t/core/node/filenode.py +59 -40
- meerk40t/core/node/groupnode.py +138 -74
- meerk40t/core/node/image_processed.py +777 -766
- meerk40t/core/node/image_raster.py +156 -113
- meerk40t/core/node/layernode.py +31 -31
- meerk40t/core/node/mixins.py +135 -107
- meerk40t/core/node/node.py +1427 -1304
- meerk40t/core/node/nutils.py +117 -114
- meerk40t/core/node/op_cut.py +462 -335
- meerk40t/core/node/op_dots.py +296 -251
- meerk40t/core/node/op_engrave.py +414 -311
- meerk40t/core/node/op_image.py +755 -369
- meerk40t/core/node/op_raster.py +787 -522
- meerk40t/core/node/place_current.py +37 -40
- meerk40t/core/node/place_point.py +329 -126
- meerk40t/core/node/refnode.py +58 -47
- meerk40t/core/node/rootnode.py +225 -219
- meerk40t/core/node/util_console.py +48 -48
- meerk40t/core/node/util_goto.py +84 -65
- meerk40t/core/node/util_home.py +61 -61
- meerk40t/core/node/util_input.py +102 -102
- meerk40t/core/node/util_output.py +102 -102
- meerk40t/core/node/util_wait.py +65 -65
- meerk40t/core/parameters.py +709 -707
- meerk40t/core/planner.py +875 -785
- meerk40t/core/plotplanner.py +656 -652
- meerk40t/core/space.py +120 -113
- meerk40t/core/spoolers.py +706 -705
- meerk40t/core/svg_io.py +1836 -1549
- meerk40t/core/treeop.py +534 -445
- meerk40t/core/undos.py +278 -124
- meerk40t/core/units.py +784 -680
- meerk40t/core/view.py +393 -322
- meerk40t/core/webhelp.py +62 -62
- meerk40t/core/wordlist.py +513 -504
- meerk40t/cylinder/cylinder.py +247 -0
- meerk40t/cylinder/gui/cylindersettings.py +41 -0
- meerk40t/cylinder/gui/gui.py +24 -0
- meerk40t/device/__init__.py +1 -1
- meerk40t/device/basedevice.py +322 -123
- meerk40t/device/devicechoices.py +50 -0
- meerk40t/device/dummydevice.py +163 -128
- meerk40t/device/gui/defaultactions.py +618 -602
- meerk40t/device/gui/effectspanel.py +114 -0
- meerk40t/device/gui/formatterpanel.py +253 -290
- meerk40t/device/gui/warningpanel.py +337 -260
- meerk40t/device/mixins.py +13 -13
- meerk40t/dxf/__init__.py +1 -1
- meerk40t/dxf/dxf_io.py +766 -554
- meerk40t/dxf/plugin.py +47 -35
- meerk40t/external_plugins.py +79 -79
- meerk40t/external_plugins_build.py +28 -28
- meerk40t/extra/cag.py +112 -116
- meerk40t/extra/coolant.py +403 -0
- meerk40t/extra/encode_detect.py +198 -0
- meerk40t/extra/ezd.py +1165 -1165
- meerk40t/extra/hershey.py +835 -340
- meerk40t/extra/imageactions.py +322 -316
- meerk40t/extra/inkscape.py +630 -622
- meerk40t/extra/lbrn.py +424 -424
- meerk40t/extra/outerworld.py +284 -0
- meerk40t/extra/param_functions.py +1542 -1556
- meerk40t/extra/potrace.py +257 -253
- meerk40t/extra/serial_exchange.py +118 -0
- meerk40t/extra/updater.py +602 -453
- meerk40t/extra/vectrace.py +147 -146
- meerk40t/extra/winsleep.py +83 -83
- meerk40t/extra/xcs_reader.py +597 -0
- meerk40t/fill/fills.py +781 -335
- meerk40t/fill/patternfill.py +1061 -1061
- meerk40t/fill/patterns.py +614 -567
- meerk40t/grbl/control.py +87 -87
- meerk40t/grbl/controller.py +990 -903
- meerk40t/grbl/device.py +1081 -768
- meerk40t/grbl/driver.py +989 -771
- meerk40t/grbl/emulator.py +532 -497
- meerk40t/grbl/gcodejob.py +783 -767
- meerk40t/grbl/gui/grblconfiguration.py +373 -298
- meerk40t/grbl/gui/grblcontroller.py +485 -271
- meerk40t/grbl/gui/grblhardwareconfig.py +269 -153
- meerk40t/grbl/gui/grbloperationconfig.py +105 -0
- meerk40t/grbl/gui/gui.py +147 -116
- meerk40t/grbl/interpreter.py +44 -44
- meerk40t/grbl/loader.py +22 -22
- meerk40t/grbl/mock_connection.py +56 -56
- meerk40t/grbl/plugin.py +294 -264
- meerk40t/grbl/serial_connection.py +93 -88
- meerk40t/grbl/tcp_connection.py +81 -79
- meerk40t/grbl/ws_connection.py +112 -0
- meerk40t/gui/__init__.py +1 -1
- meerk40t/gui/about.py +2042 -296
- meerk40t/gui/alignment.py +1644 -1608
- meerk40t/gui/autoexec.py +199 -0
- meerk40t/gui/basicops.py +791 -670
- meerk40t/gui/bufferview.py +77 -71
- meerk40t/gui/busy.py +170 -133
- meerk40t/gui/choicepropertypanel.py +1673 -1469
- meerk40t/gui/consolepanel.py +706 -542
- meerk40t/gui/devicepanel.py +687 -581
- meerk40t/gui/dialogoptions.py +110 -107
- meerk40t/gui/executejob.py +316 -306
- meerk40t/gui/fonts.py +90 -90
- meerk40t/gui/functionwrapper.py +252 -0
- meerk40t/gui/gui_mixins.py +729 -0
- meerk40t/gui/guicolors.py +205 -182
- meerk40t/gui/help_assets/help_assets.py +218 -201
- meerk40t/gui/helper.py +154 -0
- meerk40t/gui/hersheymanager.py +1430 -846
- meerk40t/gui/icons.py +3422 -2747
- meerk40t/gui/imagesplitter.py +555 -508
- meerk40t/gui/keymap.py +354 -344
- meerk40t/gui/laserpanel.py +892 -806
- meerk40t/gui/laserrender.py +1470 -1232
- meerk40t/gui/lasertoolpanel.py +805 -793
- meerk40t/gui/magnetoptions.py +436 -0
- meerk40t/gui/materialmanager.py +2917 -0
- meerk40t/gui/materialtest.py +1722 -1694
- meerk40t/gui/mkdebug.py +646 -359
- meerk40t/gui/mwindow.py +163 -140
- meerk40t/gui/navigationpanels.py +2605 -2467
- meerk40t/gui/notes.py +143 -142
- meerk40t/gui/opassignment.py +414 -410
- meerk40t/gui/operation_info.py +310 -299
- meerk40t/gui/plugin.py +494 -328
- meerk40t/gui/position.py +714 -669
- meerk40t/gui/preferences.py +901 -650
- meerk40t/gui/propertypanels/attributes.py +1461 -1131
- meerk40t/gui/propertypanels/blobproperty.py +117 -114
- meerk40t/gui/propertypanels/consoleproperty.py +83 -80
- meerk40t/gui/propertypanels/gotoproperty.py +77 -0
- meerk40t/gui/propertypanels/groupproperties.py +223 -217
- meerk40t/gui/propertypanels/hatchproperty.py +489 -469
- meerk40t/gui/propertypanels/imageproperty.py +2244 -1384
- meerk40t/gui/propertypanels/inputproperty.py +59 -58
- meerk40t/gui/propertypanels/opbranchproperties.py +82 -80
- meerk40t/gui/propertypanels/operationpropertymain.py +1890 -1638
- meerk40t/gui/propertypanels/outputproperty.py +59 -58
- meerk40t/gui/propertypanels/pathproperty.py +389 -380
- meerk40t/gui/propertypanels/placementproperty.py +1214 -383
- meerk40t/gui/propertypanels/pointproperty.py +140 -136
- meerk40t/gui/propertypanels/propertywindow.py +313 -181
- meerk40t/gui/propertypanels/rasterwizardpanels.py +996 -912
- meerk40t/gui/propertypanels/regbranchproperties.py +76 -0
- meerk40t/gui/propertypanels/textproperty.py +770 -755
- meerk40t/gui/propertypanels/waitproperty.py +56 -55
- meerk40t/gui/propertypanels/warpproperty.py +121 -0
- meerk40t/gui/propertypanels/wobbleproperty.py +255 -204
- meerk40t/gui/ribbon.py +2468 -2210
- meerk40t/gui/scene/scene.py +1100 -1051
- meerk40t/gui/scene/sceneconst.py +22 -22
- meerk40t/gui/scene/scenepanel.py +439 -349
- meerk40t/gui/scene/scenespacewidget.py +365 -365
- meerk40t/gui/scene/widget.py +518 -505
- meerk40t/gui/scenewidgets/affinemover.py +215 -215
- meerk40t/gui/scenewidgets/attractionwidget.py +315 -309
- meerk40t/gui/scenewidgets/bedwidget.py +120 -97
- meerk40t/gui/scenewidgets/elementswidget.py +137 -107
- meerk40t/gui/scenewidgets/gridwidget.py +785 -745
- meerk40t/gui/scenewidgets/guidewidget.py +765 -765
- meerk40t/gui/scenewidgets/laserpathwidget.py +66 -66
- meerk40t/gui/scenewidgets/machineoriginwidget.py +86 -86
- meerk40t/gui/scenewidgets/nodeselector.py +28 -28
- meerk40t/gui/scenewidgets/rectselectwidget.py +589 -346
- meerk40t/gui/scenewidgets/relocatewidget.py +33 -33
- meerk40t/gui/scenewidgets/reticlewidget.py +83 -83
- meerk40t/gui/scenewidgets/selectionwidget.py +2952 -2756
- meerk40t/gui/simpleui.py +357 -333
- meerk40t/gui/simulation.py +2431 -2094
- meerk40t/gui/snapoptions.py +208 -203
- meerk40t/gui/spoolerpanel.py +1227 -1180
- meerk40t/gui/statusbarwidgets/defaultoperations.py +480 -353
- meerk40t/gui/statusbarwidgets/infowidget.py +520 -483
- meerk40t/gui/statusbarwidgets/opassignwidget.py +356 -355
- meerk40t/gui/statusbarwidgets/selectionwidget.py +172 -171
- meerk40t/gui/statusbarwidgets/shapepropwidget.py +754 -236
- meerk40t/gui/statusbarwidgets/statusbar.py +272 -260
- meerk40t/gui/statusbarwidgets/statusbarwidget.py +268 -270
- meerk40t/gui/statusbarwidgets/strokewidget.py +267 -251
- meerk40t/gui/themes.py +200 -78
- meerk40t/gui/tips.py +591 -0
- meerk40t/gui/toolwidgets/circlebrush.py +35 -35
- meerk40t/gui/toolwidgets/toolcircle.py +248 -242
- meerk40t/gui/toolwidgets/toolcontainer.py +82 -77
- meerk40t/gui/toolwidgets/tooldraw.py +97 -90
- meerk40t/gui/toolwidgets/toolellipse.py +219 -212
- meerk40t/gui/toolwidgets/toolimagecut.py +25 -132
- meerk40t/gui/toolwidgets/toolline.py +39 -144
- meerk40t/gui/toolwidgets/toollinetext.py +79 -236
- meerk40t/gui/toolwidgets/toollinetext_inline.py +296 -0
- meerk40t/gui/toolwidgets/toolmeasure.py +160 -216
- meerk40t/gui/toolwidgets/toolnodeedit.py +2088 -2074
- meerk40t/gui/toolwidgets/toolnodemove.py +92 -94
- meerk40t/gui/toolwidgets/toolparameter.py +754 -668
- meerk40t/gui/toolwidgets/toolplacement.py +108 -108
- meerk40t/gui/toolwidgets/toolpoint.py +68 -59
- meerk40t/gui/toolwidgets/toolpointlistbuilder.py +294 -0
- meerk40t/gui/toolwidgets/toolpointmove.py +183 -0
- meerk40t/gui/toolwidgets/toolpolygon.py +288 -403
- meerk40t/gui/toolwidgets/toolpolyline.py +38 -196
- meerk40t/gui/toolwidgets/toolrect.py +211 -207
- meerk40t/gui/toolwidgets/toolrelocate.py +72 -72
- meerk40t/gui/toolwidgets/toolribbon.py +598 -113
- meerk40t/gui/toolwidgets/tooltabedit.py +546 -0
- meerk40t/gui/toolwidgets/tooltext.py +98 -89
- meerk40t/gui/toolwidgets/toolvector.py +213 -204
- meerk40t/gui/toolwidgets/toolwidget.py +39 -39
- meerk40t/gui/usbconnect.py +98 -91
- meerk40t/gui/utilitywidgets/buttonwidget.py +18 -18
- meerk40t/gui/utilitywidgets/checkboxwidget.py +90 -90
- meerk40t/gui/utilitywidgets/controlwidget.py +14 -14
- meerk40t/gui/utilitywidgets/cyclocycloidwidget.py +343 -340
- meerk40t/gui/utilitywidgets/debugwidgets.py +148 -0
- meerk40t/gui/utilitywidgets/handlewidget.py +27 -27
- meerk40t/gui/utilitywidgets/harmonograph.py +450 -447
- meerk40t/gui/utilitywidgets/openclosewidget.py +40 -40
- meerk40t/gui/utilitywidgets/rotationwidget.py +54 -54
- meerk40t/gui/utilitywidgets/scalewidget.py +75 -75
- meerk40t/gui/utilitywidgets/seekbarwidget.py +183 -183
- meerk40t/gui/utilitywidgets/togglewidget.py +142 -142
- meerk40t/gui/utilitywidgets/toolbarwidget.py +8 -8
- meerk40t/gui/wordlisteditor.py +985 -931
- meerk40t/gui/wxmeerk40t.py +1444 -1169
- meerk40t/gui/wxmmain.py +5578 -4112
- meerk40t/gui/wxmribbon.py +1591 -1076
- meerk40t/gui/wxmscene.py +1635 -1453
- meerk40t/gui/wxmtree.py +2410 -2089
- meerk40t/gui/wxutils.py +1769 -1099
- meerk40t/gui/zmatrix.py +102 -102
- meerk40t/image/__init__.py +1 -1
- meerk40t/image/dither.py +429 -0
- meerk40t/image/imagetools.py +2778 -2269
- meerk40t/internal_plugins.py +150 -130
- meerk40t/kernel/__init__.py +63 -12
- meerk40t/kernel/channel.py +259 -212
- meerk40t/kernel/context.py +538 -538
- meerk40t/kernel/exceptions.py +41 -41
- meerk40t/kernel/functions.py +463 -414
- meerk40t/kernel/jobs.py +100 -100
- meerk40t/kernel/kernel.py +3809 -3571
- meerk40t/kernel/lifecycles.py +71 -71
- meerk40t/kernel/module.py +49 -49
- meerk40t/kernel/service.py +147 -147
- meerk40t/kernel/settings.py +383 -343
- meerk40t/lihuiyu/controller.py +883 -876
- meerk40t/lihuiyu/device.py +1181 -1069
- meerk40t/lihuiyu/driver.py +1466 -1372
- meerk40t/lihuiyu/gui/gui.py +127 -106
- meerk40t/lihuiyu/gui/lhyaccelgui.py +377 -363
- meerk40t/lihuiyu/gui/lhycontrollergui.py +741 -651
- meerk40t/lihuiyu/gui/lhydrivergui.py +470 -446
- meerk40t/lihuiyu/gui/lhyoperationproperties.py +238 -237
- meerk40t/lihuiyu/gui/tcpcontroller.py +226 -190
- meerk40t/lihuiyu/interpreter.py +53 -53
- meerk40t/lihuiyu/laserspeed.py +450 -450
- meerk40t/lihuiyu/loader.py +90 -90
- meerk40t/lihuiyu/parser.py +404 -404
- meerk40t/lihuiyu/plugin.py +101 -102
- meerk40t/lihuiyu/tcp_connection.py +111 -109
- meerk40t/main.py +231 -165
- meerk40t/moshi/builder.py +788 -781
- meerk40t/moshi/controller.py +505 -499
- meerk40t/moshi/device.py +495 -442
- meerk40t/moshi/driver.py +862 -696
- meerk40t/moshi/gui/gui.py +78 -76
- meerk40t/moshi/gui/moshicontrollergui.py +538 -522
- meerk40t/moshi/gui/moshidrivergui.py +87 -75
- meerk40t/moshi/plugin.py +43 -43
- meerk40t/network/console_server.py +102 -57
- meerk40t/network/kernelserver.py +10 -9
- meerk40t/network/tcp_server.py +142 -140
- meerk40t/network/udp_server.py +103 -77
- meerk40t/network/web_server.py +390 -0
- meerk40t/newly/controller.py +1158 -1144
- meerk40t/newly/device.py +874 -732
- meerk40t/newly/driver.py +540 -412
- meerk40t/newly/gui/gui.py +219 -188
- meerk40t/newly/gui/newlyconfig.py +116 -101
- meerk40t/newly/gui/newlycontroller.py +193 -186
- meerk40t/newly/gui/operationproperties.py +51 -51
- meerk40t/newly/mock_connection.py +82 -82
- meerk40t/newly/newly_params.py +56 -56
- meerk40t/newly/plugin.py +1214 -1246
- meerk40t/newly/usb_connection.py +322 -322
- meerk40t/rotary/gui/gui.py +52 -46
- meerk40t/rotary/gui/rotarysettings.py +240 -232
- meerk40t/rotary/rotary.py +202 -98
- meerk40t/ruida/control.py +291 -91
- meerk40t/ruida/controller.py +138 -1088
- meerk40t/ruida/device.py +672 -231
- meerk40t/ruida/driver.py +534 -472
- meerk40t/ruida/emulator.py +1494 -1491
- meerk40t/ruida/exceptions.py +4 -4
- meerk40t/ruida/gui/gui.py +71 -76
- meerk40t/ruida/gui/ruidaconfig.py +239 -72
- meerk40t/ruida/gui/ruidacontroller.py +187 -184
- meerk40t/ruida/gui/ruidaoperationproperties.py +48 -47
- meerk40t/ruida/loader.py +54 -52
- meerk40t/ruida/mock_connection.py +57 -109
- meerk40t/ruida/plugin.py +124 -87
- meerk40t/ruida/rdjob.py +2084 -945
- meerk40t/ruida/serial_connection.py +116 -0
- meerk40t/ruida/tcp_connection.py +146 -0
- meerk40t/ruida/udp_connection.py +73 -0
- meerk40t/svgelements.py +9671 -9669
- meerk40t/tools/driver_to_path.py +584 -579
- meerk40t/tools/geomstr.py +5583 -4680
- meerk40t/tools/jhfparser.py +357 -292
- meerk40t/tools/kerftest.py +904 -890
- meerk40t/tools/livinghinges.py +1168 -1033
- meerk40t/tools/pathtools.py +987 -949
- meerk40t/tools/pmatrix.py +234 -0
- meerk40t/tools/pointfinder.py +942 -942
- meerk40t/tools/polybool.py +940 -940
- meerk40t/tools/rasterplotter.py +1660 -547
- meerk40t/tools/shxparser.py +989 -901
- meerk40t/tools/ttfparser.py +726 -446
- meerk40t/tools/zinglplotter.py +595 -593
- {meerk40t-0.9.3001.dist-info → meerk40t-0.9.7010.dist-info}/LICENSE +21 -21
- {meerk40t-0.9.3001.dist-info → meerk40t-0.9.7010.dist-info}/METADATA +150 -139
- meerk40t-0.9.7010.dist-info/RECORD +445 -0
- {meerk40t-0.9.3001.dist-info → meerk40t-0.9.7010.dist-info}/WHEEL +1 -1
- {meerk40t-0.9.3001.dist-info → meerk40t-0.9.7010.dist-info}/top_level.txt +0 -1
- {meerk40t-0.9.3001.dist-info → meerk40t-0.9.7010.dist-info}/zip-safe +1 -1
- meerk40t/balormk/elementlightjob.py +0 -159
- meerk40t-0.9.3001.dist-info/RECORD +0 -437
- test/bootstrap.py +0 -63
- test/test_cli.py +0 -12
- test/test_core_cutcode.py +0 -418
- test/test_core_elements.py +0 -144
- test/test_core_plotplanner.py +0 -397
- test/test_core_viewports.py +0 -312
- test/test_drivers_grbl.py +0 -108
- test/test_drivers_lihuiyu.py +0 -443
- test/test_drivers_newly.py +0 -113
- test/test_element_degenerate_points.py +0 -43
- test/test_elements_classify.py +0 -97
- test/test_elements_penbox.py +0 -22
- test/test_file_svg.py +0 -176
- test/test_fill.py +0 -155
- test/test_geomstr.py +0 -1523
- test/test_geomstr_nodes.py +0 -18
- test/test_imagetools_actualize.py +0 -306
- test/test_imagetools_wizard.py +0 -258
- test/test_kernel.py +0 -200
- test/test_laser_speeds.py +0 -3303
- test/test_length.py +0 -57
- test/test_lifecycle.py +0 -66
- test/test_operations.py +0 -251
- test/test_operations_hatch.py +0 -57
- test/test_ruida.py +0 -19
- test/test_spooler.py +0 -22
- test/test_tools_rasterplotter.py +0 -29
- test/test_wobble.py +0 -133
- test/test_zingl.py +0 -124
- {test → meerk40t/cylinder}/__init__.py +0 -0
- /meerk40t/{core/element_commands.py → cylinder/gui/__init__.py} +0 -0
- {meerk40t-0.9.3001.dist-info → meerk40t-0.9.7010.dist-info}/entry_points.txt +0 -0
meerk40t/core/cutplan.py
CHANGED
@@ -1,1316 +1,1843 @@
|
|
1
|
-
"""
|
2
|
-
CutPlan contains code to process LaserOperations into CutCode objects which are spooled.
|
3
|
-
|
4
|
-
CutPlan handles the various complicated algorithms to optimising the sequence of CutObjects to:
|
5
|
-
* Sort burns so that travel time is minimised
|
6
|
-
* Do burns with multiple passes all at the same time (Merge Passes)
|
7
|
-
* Sort burns for all operations at the same time rather than operation by operation
|
8
|
-
* Ensure that elements inside closed cut paths are burned before the outside path
|
9
|
-
* Group these inner burns so that one component on a sheet is completed before the next one is started
|
10
|
-
* Ensure that non-closed paths start from one of the ends and burned in one continuous burn
|
11
|
-
rather than being burned in 2 or more separate parts
|
12
|
-
* Split raster images in to self-contained areas to avoid sweeping over large empty areas
|
13
|
-
including splitting into individual small areas if burn inner first is set and then recombining
|
14
|
-
those inside the same curves so that raster burns are fully optimised.
|
15
|
-
"""
|
16
|
-
|
17
|
-
from copy import copy
|
18
|
-
from math import isinf
|
19
|
-
from os import times
|
20
|
-
from time import perf_counter, time
|
21
|
-
from typing import Optional
|
22
|
-
|
23
|
-
import numpy as np
|
24
|
-
|
25
|
-
from ..svgelements import Group, Polygon
|
26
|
-
from ..tools.
|
27
|
-
from .
|
28
|
-
from .cutcode.
|
29
|
-
from .cutcode.
|
30
|
-
from .cutcode.
|
31
|
-
from .
|
32
|
-
from .
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
"""
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
def
|
97
|
-
"""
|
98
|
-
|
99
|
-
|
100
|
-
If a command's execution
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
""
|
105
|
-
|
106
|
-
while self.
|
107
|
-
# Executing command can add a command, complete them all.
|
108
|
-
commands = self.
|
109
|
-
self.
|
110
|
-
for command in commands:
|
111
|
-
command()
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
if
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
self.
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
"""
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
"""
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
"""
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
if
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
|
454
|
-
|
455
|
-
"""
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
|
487
|
-
|
488
|
-
|
489
|
-
|
490
|
-
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
498
|
-
|
499
|
-
|
500
|
-
|
501
|
-
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
521
|
-
|
522
|
-
|
523
|
-
|
524
|
-
|
525
|
-
|
526
|
-
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
|
544
|
-
|
545
|
-
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
561
|
-
|
562
|
-
|
563
|
-
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
|
568
|
-
|
569
|
-
|
570
|
-
|
571
|
-
|
572
|
-
|
573
|
-
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
584
|
-
|
585
|
-
|
586
|
-
|
587
|
-
|
588
|
-
|
589
|
-
|
590
|
-
|
591
|
-
|
592
|
-
|
593
|
-
|
594
|
-
|
595
|
-
|
596
|
-
|
597
|
-
|
598
|
-
|
599
|
-
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
|
604
|
-
|
605
|
-
|
606
|
-
|
607
|
-
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
629
|
-
|
630
|
-
|
631
|
-
|
632
|
-
|
633
|
-
|
634
|
-
|
635
|
-
|
636
|
-
|
637
|
-
|
638
|
-
|
639
|
-
|
640
|
-
|
641
|
-
|
642
|
-
|
643
|
-
|
644
|
-
|
645
|
-
|
646
|
-
|
647
|
-
|
648
|
-
|
649
|
-
|
650
|
-
|
651
|
-
|
652
|
-
|
653
|
-
|
654
|
-
|
655
|
-
|
656
|
-
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
661
|
-
|
662
|
-
)
|
663
|
-
|
664
|
-
|
665
|
-
|
666
|
-
|
667
|
-
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
|
702
|
-
|
703
|
-
|
704
|
-
|
705
|
-
|
706
|
-
|
707
|
-
|
708
|
-
|
709
|
-
|
710
|
-
|
711
|
-
|
712
|
-
|
713
|
-
|
714
|
-
|
715
|
-
|
716
|
-
|
717
|
-
|
718
|
-
|
719
|
-
|
720
|
-
|
721
|
-
|
722
|
-
|
723
|
-
|
724
|
-
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
729
|
-
|
730
|
-
|
731
|
-
|
732
|
-
|
733
|
-
|
734
|
-
|
735
|
-
|
736
|
-
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
|
757
|
-
|
758
|
-
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
|
763
|
-
|
764
|
-
|
765
|
-
|
766
|
-
|
767
|
-
|
768
|
-
|
769
|
-
|
770
|
-
|
771
|
-
|
772
|
-
|
773
|
-
|
774
|
-
|
775
|
-
|
776
|
-
|
777
|
-
|
778
|
-
|
779
|
-
|
780
|
-
|
781
|
-
|
782
|
-
|
783
|
-
|
784
|
-
|
785
|
-
|
786
|
-
|
787
|
-
|
788
|
-
|
789
|
-
|
790
|
-
|
791
|
-
|
792
|
-
|
793
|
-
|
794
|
-
|
795
|
-
|
796
|
-
|
797
|
-
|
798
|
-
|
799
|
-
|
800
|
-
|
801
|
-
|
802
|
-
|
803
|
-
|
804
|
-
|
805
|
-
|
806
|
-
|
807
|
-
|
808
|
-
|
809
|
-
|
810
|
-
|
811
|
-
|
812
|
-
|
813
|
-
|
814
|
-
|
815
|
-
|
816
|
-
|
817
|
-
|
818
|
-
|
819
|
-
|
820
|
-
|
821
|
-
|
822
|
-
|
823
|
-
|
824
|
-
|
825
|
-
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
|
834
|
-
|
835
|
-
|
836
|
-
|
837
|
-
|
838
|
-
|
839
|
-
i
|
840
|
-
|
841
|
-
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
|
846
|
-
|
847
|
-
|
848
|
-
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
|
853
|
-
|
854
|
-
|
855
|
-
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
860
|
-
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
def
|
867
|
-
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
873
|
-
|
874
|
-
#
|
875
|
-
#
|
876
|
-
#
|
877
|
-
#
|
878
|
-
|
879
|
-
|
880
|
-
|
881
|
-
|
882
|
-
|
883
|
-
|
884
|
-
|
885
|
-
|
886
|
-
|
887
|
-
|
888
|
-
|
889
|
-
|
890
|
-
|
891
|
-
|
892
|
-
|
893
|
-
|
894
|
-
#
|
895
|
-
|
896
|
-
|
897
|
-
|
898
|
-
|
899
|
-
|
900
|
-
|
901
|
-
|
902
|
-
|
903
|
-
|
904
|
-
|
905
|
-
|
906
|
-
|
907
|
-
|
908
|
-
|
909
|
-
|
910
|
-
|
911
|
-
|
912
|
-
|
913
|
-
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
|
927
|
-
|
928
|
-
|
929
|
-
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
|
949
|
-
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
|
965
|
-
|
966
|
-
|
967
|
-
|
968
|
-
|
969
|
-
|
970
|
-
|
971
|
-
|
972
|
-
|
973
|
-
|
974
|
-
|
975
|
-
|
976
|
-
|
977
|
-
|
978
|
-
|
979
|
-
|
980
|
-
|
981
|
-
|
982
|
-
|
983
|
-
|
984
|
-
|
985
|
-
|
986
|
-
|
987
|
-
|
988
|
-
|
989
|
-
|
990
|
-
|
991
|
-
|
992
|
-
|
993
|
-
|
994
|
-
|
995
|
-
|
996
|
-
|
997
|
-
|
998
|
-
|
999
|
-
|
1000
|
-
|
1001
|
-
|
1002
|
-
|
1003
|
-
|
1004
|
-
|
1005
|
-
|
1006
|
-
|
1007
|
-
|
1008
|
-
|
1009
|
-
|
1010
|
-
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1022
|
-
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1027
|
-
|
1028
|
-
|
1029
|
-
|
1030
|
-
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
1047
|
-
|
1048
|
-
|
1049
|
-
|
1050
|
-
|
1051
|
-
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1055
|
-
|
1056
|
-
|
1057
|
-
|
1058
|
-
|
1059
|
-
|
1060
|
-
|
1061
|
-
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
|
1100
|
-
|
1101
|
-
|
1102
|
-
|
1103
|
-
|
1104
|
-
|
1105
|
-
|
1106
|
-
|
1107
|
-
)
|
1108
|
-
|
1109
|
-
|
1110
|
-
|
1111
|
-
|
1112
|
-
|
1113
|
-
|
1114
|
-
|
1115
|
-
|
1116
|
-
|
1117
|
-
|
1118
|
-
|
1119
|
-
|
1120
|
-
|
1121
|
-
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1131
|
-
|
1132
|
-
|
1133
|
-
|
1134
|
-
|
1135
|
-
|
1136
|
-
|
1137
|
-
|
1138
|
-
|
1139
|
-
|
1140
|
-
|
1141
|
-
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1147
|
-
|
1148
|
-
|
1149
|
-
|
1150
|
-
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1155
|
-
|
1156
|
-
|
1157
|
-
|
1158
|
-
|
1159
|
-
if
|
1160
|
-
|
1161
|
-
|
1162
|
-
|
1163
|
-
|
1164
|
-
|
1165
|
-
|
1166
|
-
|
1167
|
-
|
1168
|
-
|
1169
|
-
|
1170
|
-
|
1171
|
-
|
1172
|
-
|
1173
|
-
|
1174
|
-
|
1175
|
-
|
1176
|
-
|
1177
|
-
|
1178
|
-
|
1179
|
-
|
1180
|
-
|
1181
|
-
|
1182
|
-
|
1183
|
-
|
1184
|
-
|
1185
|
-
|
1186
|
-
|
1187
|
-
|
1188
|
-
|
1189
|
-
|
1190
|
-
|
1191
|
-
|
1192
|
-
|
1193
|
-
|
1194
|
-
|
1195
|
-
|
1196
|
-
|
1197
|
-
|
1198
|
-
|
1199
|
-
|
1200
|
-
|
1201
|
-
|
1202
|
-
|
1203
|
-
|
1204
|
-
|
1205
|
-
|
1206
|
-
|
1207
|
-
|
1208
|
-
|
1209
|
-
|
1210
|
-
|
1211
|
-
|
1212
|
-
|
1213
|
-
|
1214
|
-
|
1215
|
-
|
1216
|
-
|
1217
|
-
|
1218
|
-
|
1219
|
-
|
1220
|
-
|
1221
|
-
|
1222
|
-
|
1223
|
-
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1227
|
-
|
1228
|
-
|
1229
|
-
|
1230
|
-
|
1231
|
-
|
1232
|
-
|
1233
|
-
|
1234
|
-
|
1235
|
-
|
1236
|
-
|
1237
|
-
|
1238
|
-
|
1239
|
-
|
1240
|
-
|
1241
|
-
|
1242
|
-
|
1243
|
-
|
1244
|
-
|
1245
|
-
|
1246
|
-
|
1247
|
-
|
1248
|
-
|
1249
|
-
|
1250
|
-
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
1264
|
-
|
1265
|
-
|
1266
|
-
|
1267
|
-
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
|
1277
|
-
)
|
1278
|
-
|
1279
|
-
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
|
1284
|
-
|
1285
|
-
|
1286
|
-
|
1287
|
-
|
1288
|
-
|
1289
|
-
|
1290
|
-
|
1291
|
-
|
1292
|
-
|
1293
|
-
|
1294
|
-
|
1295
|
-
|
1296
|
-
|
1297
|
-
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
1303
|
-
|
1304
|
-
|
1305
|
-
|
1306
|
-
|
1307
|
-
|
1308
|
-
|
1309
|
-
|
1310
|
-
|
1311
|
-
|
1312
|
-
|
1313
|
-
|
1314
|
-
|
1315
|
-
|
1316
|
-
|
1
|
+
"""
|
2
|
+
CutPlan contains code to process LaserOperations into CutCode objects which are spooled.
|
3
|
+
|
4
|
+
CutPlan handles the various complicated algorithms to optimising the sequence of CutObjects to:
|
5
|
+
* Sort burns so that travel time is minimised
|
6
|
+
* Do burns with multiple passes all at the same time (Merge Passes)
|
7
|
+
* Sort burns for all operations at the same time rather than operation by operation
|
8
|
+
* Ensure that elements inside closed cut paths are burned before the outside path
|
9
|
+
* Group these inner burns so that one component on a sheet is completed before the next one is started
|
10
|
+
* Ensure that non-closed paths start from one of the ends and burned in one continuous burn
|
11
|
+
rather than being burned in 2 or more separate parts
|
12
|
+
* Split raster images in to self-contained areas to avoid sweeping over large empty areas
|
13
|
+
including splitting into individual small areas if burn inner first is set and then recombining
|
14
|
+
those inside the same curves so that raster burns are fully optimised.
|
15
|
+
"""
|
16
|
+
|
17
|
+
from copy import copy
|
18
|
+
from math import isinf
|
19
|
+
from os import times
|
20
|
+
from time import perf_counter, time
|
21
|
+
from typing import Optional
|
22
|
+
from functools import lru_cache
|
23
|
+
import numpy as np
|
24
|
+
|
25
|
+
from ..svgelements import Group, Matrix, Path, Polygon
|
26
|
+
from ..tools.geomstr import Geomstr
|
27
|
+
from ..tools.pathtools import VectorMontonizer
|
28
|
+
from .cutcode.cutcode import CutCode
|
29
|
+
from .cutcode.cutgroup import CutGroup
|
30
|
+
from .cutcode.cutobject import CutObject
|
31
|
+
from .cutcode.rastercut import RasterCut
|
32
|
+
from .node.node import Node
|
33
|
+
from .node.util_console import ConsoleOperation
|
34
|
+
from .units import Length, UNITS_PER_MM
|
35
|
+
|
36
|
+
"""
|
37
|
+
The time to compile does outweigh the benefit...
|
38
|
+
try:
|
39
|
+
from numba import jit
|
40
|
+
except Exception as e:
|
41
|
+
# Jit does not exist, add a dummy decorator and continue.
|
42
|
+
# print (f"Encountered error: {e}")
|
43
|
+
def jit(*args, **kwargs):
|
44
|
+
def inner(func):
|
45
|
+
return func
|
46
|
+
|
47
|
+
return inner
|
48
|
+
"""
|
49
|
+
|
50
|
+
class CutPlanningFailedError(Exception):
|
51
|
+
pass
|
52
|
+
|
53
|
+
|
54
|
+
class CutPlan:
|
55
|
+
"""
|
56
|
+
CutPlan is a centralized class to modify plans during cutplanning. It is typically is used to progress from
|
57
|
+
copied operations through the stages to being properly optimized cutcode.
|
58
|
+
|
59
|
+
The stages are:
|
60
|
+
1. Copy: This can be `copy-selected` or `copy` to decide which operations are moved initially into the plan.
|
61
|
+
a. Copied operations are copied to real. All the reference nodes are replaced with copies of the actual elements
|
62
|
+
2. Preprocess: Convert from scene space to device space and add validation operations.
|
63
|
+
3. Validate: Run all the validation operations, this could be anything the nodes added during preprocess.
|
64
|
+
a. Calls `execute` operation.
|
65
|
+
4. Blob: We convert all the operations/elements into proper cutcode. Some operations do not necessarily need to
|
66
|
+
convert to cutcode. They merely need to convert to some type of spoolable operation.
|
67
|
+
5. Preopt: Preoptimize adds in the relevant optimization operations into the cutcode.
|
68
|
+
6. Optimize: This calls the added functions set during the preopt process.
|
69
|
+
a. Calls `execute` operation.
|
70
|
+
"""
|
71
|
+
|
72
|
+
def __init__(self, name, planner):
|
73
|
+
self.name = name
|
74
|
+
self.context = planner
|
75
|
+
self.plan = list()
|
76
|
+
self.spool_commands = list()
|
77
|
+
self.commands = list()
|
78
|
+
self.channel = self.context.channel("optimize", timestamp=True)
|
79
|
+
self.outline = None
|
80
|
+
self._previous_bounds = None
|
81
|
+
|
82
|
+
def __str__(self):
|
83
|
+
parts = list()
|
84
|
+
parts.append(self.name)
|
85
|
+
if len(self.plan):
|
86
|
+
parts.append(f"#{len(self.plan)}")
|
87
|
+
for p in self.plan:
|
88
|
+
try:
|
89
|
+
parts.append(p.__name__)
|
90
|
+
except AttributeError:
|
91
|
+
parts.append(p.__class__.__name__)
|
92
|
+
else:
|
93
|
+
parts.append("-- Empty --")
|
94
|
+
return " ".join(parts)
|
95
|
+
|
96
|
+
def execute(self):
|
97
|
+
"""
|
98
|
+
Execute runs all the commands built during `preprocess` and `preopt` (preoptimize) stages.
|
99
|
+
|
100
|
+
If a command's execution adds a command to commands, this command is also executed.
|
101
|
+
@return:
|
102
|
+
"""
|
103
|
+
# Using copy of commands, so commands can add ops.
|
104
|
+
self._debug_me("At start of execute")
|
105
|
+
|
106
|
+
while self.commands:
|
107
|
+
# Executing command can add a command, complete them all.
|
108
|
+
commands = self.commands[:]
|
109
|
+
self.commands.clear()
|
110
|
+
for command in commands:
|
111
|
+
command()
|
112
|
+
self._debug_me(f"At end of {command.__name__}")
|
113
|
+
|
114
|
+
def final(self):
|
115
|
+
"""
|
116
|
+
Executes all the spool_commands built during the other stages.
|
117
|
+
|
118
|
+
If a command's execution added a spool_command we run it during final.
|
119
|
+
|
120
|
+
Final is called during at the time of spool. Just before the laserjob is created.
|
121
|
+
@return:
|
122
|
+
"""
|
123
|
+
# Using copy of commands, so commands can add ops.
|
124
|
+
while self.spool_commands:
|
125
|
+
# Executing command can add a command, complete them all.
|
126
|
+
commands = self.spool_commands[:]
|
127
|
+
self.spool_commands.clear()
|
128
|
+
for command in commands:
|
129
|
+
command()
|
130
|
+
|
131
|
+
def preprocess(self):
|
132
|
+
"""
|
133
|
+
Preprocess stage.
|
134
|
+
|
135
|
+
All operation nodes are called with the current context, the matrix converting from scene to device, and
|
136
|
+
commands.
|
137
|
+
|
138
|
+
Nodes are expected to convert relevant properties and shapes from scene coordinates to device coordinate systems
|
139
|
+
if they need operations. They are also expected to add any relevant commands to the commands list. The commands
|
140
|
+
list sequentially in the next stage.
|
141
|
+
"""
|
142
|
+
device = self.context.device
|
143
|
+
|
144
|
+
scene_to_device_matrix = device.view.matrix
|
145
|
+
|
146
|
+
# ==========
|
147
|
+
# Determine the jobs bounds.
|
148
|
+
# ==========
|
149
|
+
bounds = Node.union_bounds(self.plan, bounds=self._previous_bounds)
|
150
|
+
self._previous_bounds = bounds
|
151
|
+
if bounds is not None:
|
152
|
+
left, top, right, bottom = bounds
|
153
|
+
min_x = min(right, left)
|
154
|
+
min_y = min(top, bottom)
|
155
|
+
max_x = max(right, left)
|
156
|
+
max_y = max(top, bottom)
|
157
|
+
if isinf(min_x) or isinf(min_y) or isinf(max_x) or isinf(max_y):
|
158
|
+
# Infinite bounds are invalid.
|
159
|
+
self.outline = None
|
160
|
+
else:
|
161
|
+
self.outline = (
|
162
|
+
device.view.position(min_x, min_y, margins=False),
|
163
|
+
device.view.position(max_x, min_y, margins=False),
|
164
|
+
device.view.position(max_x, max_y, margins=False),
|
165
|
+
device.view.position(min_x, max_y, margins=False),
|
166
|
+
)
|
167
|
+
|
168
|
+
# ==========
|
169
|
+
# Query Placements
|
170
|
+
# ==========
|
171
|
+
placements = []
|
172
|
+
for place in self.plan:
|
173
|
+
if not hasattr(place, "type"):
|
174
|
+
continue
|
175
|
+
if place.type.startswith("place ") and (hasattr(place, "output") and place.output):
|
176
|
+
loops = 1
|
177
|
+
if hasattr(place, "loops") and place.loops > 1:
|
178
|
+
loops = place.loops
|
179
|
+
for idx in range(loops):
|
180
|
+
placements.extend(
|
181
|
+
place.placements(
|
182
|
+
self.context, self.outline, scene_to_device_matrix, self
|
183
|
+
)
|
184
|
+
)
|
185
|
+
if not placements:
|
186
|
+
# Absolute coordinates.
|
187
|
+
placements.append(scene_to_device_matrix)
|
188
|
+
|
189
|
+
original_ops = copy(self.plan)
|
190
|
+
if self.context.opt_raster_optimisation and self.context.do_optimization:
|
191
|
+
try:
|
192
|
+
margin = float(Length(self.context.opt_raster_opt_margin, "0"))
|
193
|
+
except (AttributeError, ValueError):
|
194
|
+
margin = 0
|
195
|
+
self.optimize_rasters(original_ops, "op raster", margin)
|
196
|
+
# We could do this as well, but images are burnt separately anyway...
|
197
|
+
# self.optimize_rasters(original_ops, "op image", margin)
|
198
|
+
self.plan.clear()
|
199
|
+
|
200
|
+
idx = 0
|
201
|
+
self.context.elements.mywordlist.push()
|
202
|
+
|
203
|
+
perform_simplify = (
|
204
|
+
self.context.opt_reduce_details and self.context.do_optimization
|
205
|
+
)
|
206
|
+
tolerance = self.context.opt_reduce_tolerance
|
207
|
+
for placement in placements:
|
208
|
+
# Adjust wordlist
|
209
|
+
if idx > 0:
|
210
|
+
self.context.elements.mywordlist.move_all_indices(1)
|
211
|
+
|
212
|
+
current_cool = 0
|
213
|
+
for original_op in original_ops:
|
214
|
+
# First, do we have a valid coolant aka airassist command?
|
215
|
+
# And is this relevant, as in does the device support it?
|
216
|
+
coolid = getattr(self.context.device, "device_coolant", "")
|
217
|
+
if hasattr(original_op, "coolant"):
|
218
|
+
cool = original_op.coolant
|
219
|
+
if cool is None:
|
220
|
+
cool = 0
|
221
|
+
if cool in (1, 2): # Explicit on / off
|
222
|
+
if cool != current_cool:
|
223
|
+
cmd = "coolant_on" if cool == 1 else "coolant_off"
|
224
|
+
if coolid:
|
225
|
+
coolop = ConsoleOperation(command=cmd)
|
226
|
+
self.plan.append(coolop)
|
227
|
+
else:
|
228
|
+
self.channel("The current device does not support a coolant method")
|
229
|
+
current_cool = cool
|
230
|
+
# Is there already a coolant operation?
|
231
|
+
if getattr(original_op, "type", "") == "util console":
|
232
|
+
if original_op.command == "coolant_on":
|
233
|
+
current_cool = 1
|
234
|
+
elif original_op.command == "coolant_off":
|
235
|
+
current_cool = 2
|
236
|
+
|
237
|
+
try:
|
238
|
+
op = original_op.copy_with_reified_tree()
|
239
|
+
except AttributeError:
|
240
|
+
op = original_op
|
241
|
+
if not hasattr(op, "type") or op.type is None:
|
242
|
+
self.plan.append(op)
|
243
|
+
continue
|
244
|
+
op_type = getattr(op, "type", "")
|
245
|
+
if op_type.startswith("place "):
|
246
|
+
continue
|
247
|
+
self.plan.append(op)
|
248
|
+
if (op_type.startswith("op") or op_type.startswith("util")) and hasattr(op, "preprocess"):
|
249
|
+
op.preprocess(self.context, placement, self)
|
250
|
+
if op_type.startswith("op"):
|
251
|
+
for node in op.flat():
|
252
|
+
if node is op:
|
253
|
+
continue
|
254
|
+
if hasattr(node, "geometry") and perform_simplify:
|
255
|
+
# We are still in scene reolution and not yet at device level
|
256
|
+
node.geometry = node.geometry.simplify(tolerance=tolerance)
|
257
|
+
if hasattr(node, "mktext") and hasattr(node, "_cache"):
|
258
|
+
newtext = self.context.elements.wordlist_translate(
|
259
|
+
node.mktext, elemnode=node, increment=False
|
260
|
+
)
|
261
|
+
oldtext = getattr(node, "_translated_text", "")
|
262
|
+
# print (f"Was called inside preprocess for {node.type} with {node.mktext}, old: {oldtext}, new:{newtext}")
|
263
|
+
if newtext != oldtext:
|
264
|
+
node._translated_text = newtext
|
265
|
+
kernel = self.context.elements.kernel
|
266
|
+
for property_op in kernel.lookup_all("path_updater/.*"):
|
267
|
+
property_op(kernel.root, node)
|
268
|
+
if hasattr(node, "_cache"):
|
269
|
+
node._cache = None
|
270
|
+
if hasattr(node, "preprocess"):
|
271
|
+
node.preprocess(self.context, placement, self)
|
272
|
+
idx += 1
|
273
|
+
self.context.elements.mywordlist.pop()
|
274
|
+
|
275
|
+
def _to_grouped_plan(self, plan):
|
276
|
+
"""
|
277
|
+
Break operations into grouped sequences of Operations and utility operations.
|
278
|
+
|
279
|
+
We can only merge between contiguous groups of operations. We cannot merge util node types with op node types.
|
280
|
+
|
281
|
+
Anything that does not have a type is likely able to spool, but cannot merge and are not grouped. Only grouped
|
282
|
+
operations are candidates for cutcode merging.
|
283
|
+
@return:
|
284
|
+
"""
|
285
|
+
last_type = None
|
286
|
+
group = list()
|
287
|
+
for c in plan:
|
288
|
+
c_type = (
|
289
|
+
c.type
|
290
|
+
if hasattr(c, "type") and c.type is not None
|
291
|
+
else type(c).__name__
|
292
|
+
)
|
293
|
+
if c_type.startswith("effect"):
|
294
|
+
# Effects should not be used here.
|
295
|
+
continue
|
296
|
+
if last_type is not None:
|
297
|
+
if c_type.startswith("op") != last_type.startswith("op"):
|
298
|
+
# This cannot merge
|
299
|
+
yield group
|
300
|
+
group = list()
|
301
|
+
group.append(c)
|
302
|
+
last_type = c_type
|
303
|
+
if group:
|
304
|
+
yield group
|
305
|
+
|
306
|
+
def _to_blob_plan_passes_first(self, grouped_plan):
|
307
|
+
"""
|
308
|
+
If Merge operations and not merge passes we need to iterate passes first and operations second.
|
309
|
+
|
310
|
+
This function is specific to that case, when passes first operations second.
|
311
|
+
|
312
|
+
Converts the operations to cutcode.
|
313
|
+
@param grouped_plan:
|
314
|
+
@return:
|
315
|
+
"""
|
316
|
+
for plan in grouped_plan:
|
317
|
+
pass_idx = 0
|
318
|
+
while True:
|
319
|
+
more_passes_possible = False
|
320
|
+
for op in plan:
|
321
|
+
if (
|
322
|
+
not hasattr(op, "type")
|
323
|
+
or op.type == "util console"
|
324
|
+
or (
|
325
|
+
not op.type.startswith("op")
|
326
|
+
and not op.type.startswith("util")
|
327
|
+
)
|
328
|
+
):
|
329
|
+
# This is an irregular object and can't become cutcode.
|
330
|
+
if pass_idx == 0:
|
331
|
+
# irregular objects have an implicit single pass.
|
332
|
+
yield op
|
333
|
+
continue
|
334
|
+
if pass_idx > op.implicit_passes - 1:
|
335
|
+
continue
|
336
|
+
more_passes_possible = True
|
337
|
+
yield from self._blob_convert(op, 1, 1, force_idx=pass_idx)
|
338
|
+
if not more_passes_possible:
|
339
|
+
# No operation needs additional passes.
|
340
|
+
break
|
341
|
+
pass_idx += 1
|
342
|
+
|
343
|
+
def _to_blob_plan(self, grouped_plan):
|
344
|
+
"""
|
345
|
+
Iterate operations first and passes second. Operation first mode. Passes are done within cutcode pass value.
|
346
|
+
|
347
|
+
Converts the operations to cutcode.
|
348
|
+
|
349
|
+
@param grouped_plan:
|
350
|
+
@return:
|
351
|
+
"""
|
352
|
+
context = self.context
|
353
|
+
for plan in grouped_plan:
|
354
|
+
for op in plan:
|
355
|
+
if not hasattr(op, "type") or op.type is None:
|
356
|
+
yield op
|
357
|
+
continue
|
358
|
+
if (
|
359
|
+
not op.type.startswith("op")
|
360
|
+
and not op.type.startswith("util")
|
361
|
+
or op.type == "util console"
|
362
|
+
):
|
363
|
+
yield op
|
364
|
+
continue
|
365
|
+
passes = op.implicit_passes
|
366
|
+
if context.opt_merge_passes and (
|
367
|
+
context.opt_nearest_neighbor or context.opt_inner_first
|
368
|
+
):
|
369
|
+
# Providing we do some sort of post-processing of blobs,
|
370
|
+
# then merge passes is handled by the greedy or inner_first algorithms
|
371
|
+
|
372
|
+
# So, we only need 1 copy and to set the passes.
|
373
|
+
yield from self._blob_convert(op, copies=1, passes=passes)
|
374
|
+
else:
|
375
|
+
# We do passes by making copies of the cutcode.
|
376
|
+
yield from self._blob_convert(op, copies=passes, passes=1)
|
377
|
+
|
378
|
+
def _blob_convert(self, op, copies, passes, force_idx=None):
|
379
|
+
"""
|
380
|
+
Converts the given op into cutcode. Provides `copies` copies of that cutcode, sets
|
381
|
+
the passes to passes for each cutcode object.
|
382
|
+
|
383
|
+
@param op:
|
384
|
+
@param copies:
|
385
|
+
@param passes:
|
386
|
+
@param force_idx:
|
387
|
+
@return:
|
388
|
+
"""
|
389
|
+
context = self.context
|
390
|
+
for pass_idx in range(copies):
|
391
|
+
# if the settings dictionary doesn't exist we use the defined instance dictionary
|
392
|
+
try:
|
393
|
+
settings_dict = op.settings
|
394
|
+
except AttributeError:
|
395
|
+
settings_dict = op.__dict__
|
396
|
+
# If passes isn't equal to implicit passes then we need a different settings to permit change
|
397
|
+
settings = (
|
398
|
+
settings_dict if op.implicit_passes == passes else dict(settings_dict)
|
399
|
+
)
|
400
|
+
cutcode = CutCode(
|
401
|
+
op.as_cutobjects(
|
402
|
+
closed_distance=context.opt_closed_distance,
|
403
|
+
passes=passes,
|
404
|
+
),
|
405
|
+
settings=settings,
|
406
|
+
)
|
407
|
+
if len(cutcode) == 0:
|
408
|
+
break
|
409
|
+
op_type = getattr(op, "type", "")
|
410
|
+
cutcode.constrained = op_type == "op cut" and context.opt_inner_first
|
411
|
+
cutcode.pass_index = pass_idx if force_idx is None else force_idx
|
412
|
+
cutcode.original_op = op_type
|
413
|
+
yield cutcode
|
414
|
+
|
415
|
+
def _to_merged_plan(self, blob_plan):
|
416
|
+
"""
|
417
|
+
Convert the blobbed plan of cutcode (rather than operations) into a merged plan for those cutcode operations
|
418
|
+
which are permitted to merge into the same cutcode object. All items within the same cutcode object are
|
419
|
+
candidates for optimizations. For example, if a HomeCut was merged LineCut in the cutcode, that entire group
|
420
|
+
would merge together, finding the most optimized time to home the machine (if optimization was enabled).
|
421
|
+
|
422
|
+
@param blob_plan:
|
423
|
+
@return:
|
424
|
+
"""
|
425
|
+
last_item = None
|
426
|
+
context = self.context
|
427
|
+
for blob in blob_plan:
|
428
|
+
try:
|
429
|
+
blob.jog_distance = context.opt_jog_minimum
|
430
|
+
blob.jog_enable = context.opt_rapid_between
|
431
|
+
except AttributeError:
|
432
|
+
pass
|
433
|
+
if last_item and self._should_merge(context, last_item, blob):
|
434
|
+
# Do not check empty plan.
|
435
|
+
if blob.constrained:
|
436
|
+
# if any merged object is constrained, then combined blob is also constrained.
|
437
|
+
last_item.constrained = True
|
438
|
+
last_item.extend(blob)
|
439
|
+
|
440
|
+
else:
|
441
|
+
if isinstance(blob, CutObject) and not isinstance(blob, CutCode):
|
442
|
+
cc = CutCode([blob])
|
443
|
+
cc.original_op = blob.original_op
|
444
|
+
cc.pass_index = blob.pass_index
|
445
|
+
last_item = cc
|
446
|
+
else:
|
447
|
+
last_item = blob
|
448
|
+
yield last_item
|
449
|
+
|
450
|
+
def _should_merge(self, context, last_item, current_item):
|
451
|
+
"""
|
452
|
+
Checks whether we should merge the blob with the current plan.
|
453
|
+
|
454
|
+
We can only merge things if we have the right objects and settings.
|
455
|
+
"""
|
456
|
+
if not isinstance(last_item, CutCode):
|
457
|
+
# The last plan item is not cutcode, merge is only between cutobjects adding to cutcode.
|
458
|
+
self.channel (f"last_item is no cutcode ({type(last_item).__name__}), can't merge")
|
459
|
+
return False
|
460
|
+
if not isinstance(current_item, CutObject):
|
461
|
+
# The object to be merged is not a cutObject and cannot be added to Cutcode.
|
462
|
+
self.channel (f"current_item is no cutcode ({type(current_item).__name__}), can't merge")
|
463
|
+
return False
|
464
|
+
last_op = last_item.original_op
|
465
|
+
if last_op is None:
|
466
|
+
last_op = ""
|
467
|
+
current_op = current_item.original_op
|
468
|
+
if current_op is None:
|
469
|
+
current_op = ""
|
470
|
+
if last_op.startswith("util") or current_op.startswith("util"):
|
471
|
+
self.channel (f"{last_op} / {current_op} - at least one is a util operation, can't merge")
|
472
|
+
return False
|
473
|
+
|
474
|
+
if (
|
475
|
+
not context.opt_merge_passes
|
476
|
+
and last_item.pass_index != current_item.pass_index
|
477
|
+
):
|
478
|
+
# Do not merge if opt_merge_passes is off, and pass_index do not match
|
479
|
+
self.channel (f"{last_item.pass_index} / {current_item.pass_index} - pass indices are different, can't merge")
|
480
|
+
return False
|
481
|
+
|
482
|
+
if (
|
483
|
+
not context.opt_merge_ops
|
484
|
+
and last_item.settings is not current_item.settings
|
485
|
+
):
|
486
|
+
# Do not merge if opt_merge_ops is off, and the original ops do not match
|
487
|
+
# Same settings object implies same original operation
|
488
|
+
self.channel (f"Settings do differ from {last_op} to {current_op} and merge ops= {context.opt_merge_ops}")
|
489
|
+
return False
|
490
|
+
if not context.opt_inner_first and last_item.original_op == "op cut":
|
491
|
+
# Do not merge if opt_inner_first is off, and operation was originally a cut.
|
492
|
+
self.channel (f"Inner first {context.opt_inner_first}, last op= {last_item.original_op} - Last op was a cut, can't merge")
|
493
|
+
return False
|
494
|
+
return True # No reason these should not be merged.
|
495
|
+
|
496
|
+
def _debug_me(self, message):
|
497
|
+
debug_level = 0
|
498
|
+
if not self.channel:
|
499
|
+
return
|
500
|
+
self.channel(f"Plan at {message}")
|
501
|
+
for pitem in self.plan:
|
502
|
+
if isinstance(pitem, (tuple, list)):
|
503
|
+
self.channel(f"-{type(pitem).__name__}: {len(pitem)} items")
|
504
|
+
if debug_level > 0:
|
505
|
+
for cut in pitem:
|
506
|
+
if isinstance(cut, (tuple, list)):
|
507
|
+
self.channel(
|
508
|
+
f"--{type(pitem).__name__}: {type(cut).__name__}: {len(cut)} items"
|
509
|
+
)
|
510
|
+
else:
|
511
|
+
self.channel(
|
512
|
+
f"--{type(pitem).__name__}: {type(cut).__name__}: --childless--"
|
513
|
+
)
|
514
|
+
|
515
|
+
elif hasattr(pitem, "children"):
|
516
|
+
self.channel(
|
517
|
+
f" {type(pitem).__name__}: {len(pitem.children)} children"
|
518
|
+
)
|
519
|
+
else:
|
520
|
+
self.channel(f" {type(pitem).__name__}: --childless--")
|
521
|
+
|
522
|
+
self.channel("------------")
|
523
|
+
|
524
|
+
def geometry(self):
|
525
|
+
"""
|
526
|
+
Geometry converts User operations to naked geomstr objects.
|
527
|
+
"""
|
528
|
+
|
529
|
+
if not self.plan:
|
530
|
+
return
|
531
|
+
|
532
|
+
plan = list(self.plan)
|
533
|
+
self.plan.clear()
|
534
|
+
g = Geomstr()
|
535
|
+
settings_index = 0
|
536
|
+
for c in plan:
|
537
|
+
c_type = (
|
538
|
+
c.type
|
539
|
+
if hasattr(c, "type") and c.type is not None
|
540
|
+
else type(c).__name__
|
541
|
+
)
|
542
|
+
settings_index += 1
|
543
|
+
if hasattr(c, "settings"):
|
544
|
+
settings = dict(c.settings)
|
545
|
+
else:
|
546
|
+
settings = dict(c.__dict__)
|
547
|
+
g.settings(settings_index, settings)
|
548
|
+
|
549
|
+
if c_type in ("op cut", "op engrave"):
|
550
|
+
for elem in c.children:
|
551
|
+
if hasattr(elem, "final_geometry"):
|
552
|
+
start_index = g.index
|
553
|
+
g.append(elem.final_geometry())
|
554
|
+
end_index = g.index
|
555
|
+
g.flag_settings(settings_index, start_index, end_index)
|
556
|
+
elif hasattr(elem, "as_geometry"):
|
557
|
+
start_index = g.index
|
558
|
+
g.append(elem.as_geometry())
|
559
|
+
end_index = g.index
|
560
|
+
g.flag_settings(settings_index, start_index, end_index)
|
561
|
+
elif c_type in ("op raster", "op image"):
|
562
|
+
for elem in c.children:
|
563
|
+
if hasattr(elem, "as_image"):
|
564
|
+
settings["raster"] = True
|
565
|
+
image, box = elem.as_image()
|
566
|
+
m = elem.matrix
|
567
|
+
start_index = g.index
|
568
|
+
image_geom = Geomstr.image(image)
|
569
|
+
image_geom.transform(m)
|
570
|
+
g.append(image_geom)
|
571
|
+
end_index = g.index
|
572
|
+
g.flag_settings(settings_index, start_index, end_index)
|
573
|
+
else:
|
574
|
+
if g:
|
575
|
+
self.plan.append(g)
|
576
|
+
g = Geomstr()
|
577
|
+
self.plan.append(c)
|
578
|
+
if g:
|
579
|
+
self.plan.append(g)
|
580
|
+
|
581
|
+
def blob(self):
|
582
|
+
"""
|
583
|
+
Blob converts User operations to CutCode objects.
|
584
|
+
|
585
|
+
In order to have CutCode objects in the correct sequence for merging we need to:
|
586
|
+
1. Break operations into grouped sequences of Operations and utility operations.
|
587
|
+
We can only merge between contiguous groups of operations (with option set)
|
588
|
+
2. The sequence of CutObjects needs to reflect merge settings
|
589
|
+
Normal sequence is to iterate operations and then passes for each operation.
|
590
|
+
With Merge ops and not Merge passes, we need to iterate on passes first and then ops within.
|
591
|
+
"""
|
592
|
+
|
593
|
+
if not self.plan:
|
594
|
+
return
|
595
|
+
context = self.context
|
596
|
+
grouped_plan = list(self._to_grouped_plan(self.plan))
|
597
|
+
if context.opt_merge_ops and not context.opt_merge_passes:
|
598
|
+
blob_plan = list(self._to_blob_plan_passes_first(grouped_plan))
|
599
|
+
else:
|
600
|
+
blob_plan = list(self._to_blob_plan(grouped_plan))
|
601
|
+
self.plan.clear()
|
602
|
+
self.plan.extend(self._to_merged_plan(blob_plan))
|
603
|
+
|
604
|
+
def preopt(self):
|
605
|
+
"""
|
606
|
+
Add commands for optimize stage. This stage tends to do very little but checks the settings and adds the
|
607
|
+
relevant operations.
|
608
|
+
|
609
|
+
@return:
|
610
|
+
"""
|
611
|
+
context = self.context
|
612
|
+
has_cutcode = False
|
613
|
+
for op in self.plan:
|
614
|
+
try:
|
615
|
+
if isinstance(op, CutCode):
|
616
|
+
has_cutcode = True
|
617
|
+
break
|
618
|
+
except AttributeError:
|
619
|
+
pass
|
620
|
+
if not has_cutcode:
|
621
|
+
return
|
622
|
+
|
623
|
+
if context.opt_effect_combine:
|
624
|
+
self.commands.append(self.combine_effects)
|
625
|
+
|
626
|
+
if context.opt_reduce_travel and (
|
627
|
+
context.opt_nearest_neighbor or context.opt_2opt
|
628
|
+
):
|
629
|
+
if context.opt_nearest_neighbor:
|
630
|
+
self.commands.append(self.optimize_travel)
|
631
|
+
if context.opt_2opt and not context.opt_inner_first:
|
632
|
+
self.commands.append(self.optimize_travel_2opt)
|
633
|
+
|
634
|
+
elif context.opt_inner_first:
|
635
|
+
self.commands.append(self.optimize_cuts)
|
636
|
+
self.commands.append(self.merge_cutcode)
|
637
|
+
if context.opt_reduce_directions:
|
638
|
+
pass
|
639
|
+
if context.opt_remove_overlap:
|
640
|
+
pass
|
641
|
+
|
642
|
+
def combine_effects(self):
|
643
|
+
"""
|
644
|
+
Will browse through the cutcode entries grouping everything together
|
645
|
+
that as a common 'source' attribute
|
646
|
+
"""
|
647
|
+
|
648
|
+
def update_group_sequence(group):
|
649
|
+
if len(group) == 0:
|
650
|
+
return
|
651
|
+
glen = len(group)
|
652
|
+
for i, cut_obj in enumerate(group):
|
653
|
+
cut_obj.first = i == 0
|
654
|
+
cut_obj.last = i == glen - 1
|
655
|
+
next_idx = i + 1 if i < glen - 1 else 0
|
656
|
+
cut_obj.next = group[next_idx]
|
657
|
+
cut_obj.previous = group[i - 1]
|
658
|
+
group.path = group._geometry.as_path()
|
659
|
+
|
660
|
+
def update_busy_info(busy, idx, l_pitem, plan_idx, l_plan):
|
661
|
+
busy.change(
|
662
|
+
msg=_("Combine effect primitives")
|
663
|
+
+ f" {idx + 1}/{l_pitem} ({plan_idx + 1}/{l_plan})",
|
664
|
+
keep=1,
|
665
|
+
)
|
666
|
+
busy.show()
|
667
|
+
|
668
|
+
def process_plan_item(pitem, busy, total, plan_idx, l_plan):
|
669
|
+
grouping = {}
|
670
|
+
l_pitem = len(pitem)
|
671
|
+
to_be_deleted = []
|
672
|
+
combined = 0
|
673
|
+
for idx, cut in enumerate(pitem):
|
674
|
+
total += 1
|
675
|
+
if busy.shown and total % 100 == 0:
|
676
|
+
update_busy_info(busy, idx, l_pitem, plan_idx, l_plan)
|
677
|
+
if not isinstance(cut, CutGroup) or cut.origin is None:
|
678
|
+
continue
|
679
|
+
combined += process_cut(cut, grouping, pitem, idx, to_be_deleted)
|
680
|
+
return grouping, to_be_deleted, combined, total
|
681
|
+
|
682
|
+
def process_cut(cut, grouping, pitem, idx, to_be_deleted):
|
683
|
+
if cut.origin not in grouping:
|
684
|
+
grouping[cut.origin] = idx
|
685
|
+
return 0
|
686
|
+
mastercut = grouping[cut.origin]
|
687
|
+
geom = cut._geometry
|
688
|
+
pitem[mastercut].skip = True
|
689
|
+
pitem[mastercut].extend(cut)
|
690
|
+
pitem[mastercut]._geometry.append(geom)
|
691
|
+
cut.clear()
|
692
|
+
to_be_deleted.append(idx)
|
693
|
+
return 1
|
694
|
+
|
695
|
+
busy = self.context.kernel.busyinfo
|
696
|
+
_ = self.context.kernel.translation
|
697
|
+
if busy.shown:
|
698
|
+
busy.change(msg=_("Combine effect primitives"), keep=1)
|
699
|
+
busy.show()
|
700
|
+
combined = 0
|
701
|
+
l_plan = len(self.plan)
|
702
|
+
total = -1
|
703
|
+
group_count = 0
|
704
|
+
for plan_idx, pitem in enumerate(self.plan):
|
705
|
+
# We don't combine across plan boundaries
|
706
|
+
if not isinstance(pitem, CutGroup):
|
707
|
+
continue
|
708
|
+
grouping, to_be_deleted, item_combined, total = process_plan_item(pitem, busy, total, plan_idx, l_plan)
|
709
|
+
combined += item_combined
|
710
|
+
group_count += len(grouping)
|
711
|
+
|
712
|
+
for key, item in grouping.items():
|
713
|
+
update_group_sequence(pitem[item])
|
714
|
+
|
715
|
+
for p in reversed(to_be_deleted):
|
716
|
+
pitem.pop(p)
|
717
|
+
|
718
|
+
if self.channel:
|
719
|
+
self.channel(f"Combined: {combined}, groups: {group_count}")
|
720
|
+
|
721
|
+
def optimize_travel_2opt(self):
|
722
|
+
"""
|
723
|
+
Optimize travel 2opt at optimize stage on cutcode
|
724
|
+
@return:
|
725
|
+
"""
|
726
|
+
busy = self.context.kernel.busyinfo
|
727
|
+
_ = self.context.kernel.translation
|
728
|
+
if busy.shown:
|
729
|
+
busy.change(msg=_("Optimize inner travel"), keep=1)
|
730
|
+
busy.show()
|
731
|
+
channel = self.context.channel("optimize", timestamp=True)
|
732
|
+
for i, c in enumerate(self.plan):
|
733
|
+
if isinstance(c, CutCode):
|
734
|
+
self.plan[i] = short_travel_cutcode_2opt(
|
735
|
+
self.plan[i], kernel=self.context.kernel, channel=channel
|
736
|
+
)
|
737
|
+
|
738
|
+
def optimize_cuts(self):
|
739
|
+
"""
|
740
|
+
Optimize cuts at optimize stage on cutcode
|
741
|
+
@return:
|
742
|
+
"""
|
743
|
+
# Update Info-panel if displayed
|
744
|
+
busy = self.context.kernel.busyinfo
|
745
|
+
_ = self.context.kernel.translation
|
746
|
+
if busy.shown:
|
747
|
+
busy.change(msg=_("Optimize cuts"), keep=1)
|
748
|
+
busy.show()
|
749
|
+
tolerance = 0
|
750
|
+
if self.context.opt_inner_first:
|
751
|
+
stol = self.context.opt_inner_tolerance
|
752
|
+
try:
|
753
|
+
tolerance = (
|
754
|
+
float(Length(stol))
|
755
|
+
* 2
|
756
|
+
/ (
|
757
|
+
self.context.device.view.native_scale_x
|
758
|
+
+ self.context.device.view.native_scale_y
|
759
|
+
)
|
760
|
+
)
|
761
|
+
except ValueError:
|
762
|
+
pass
|
763
|
+
# print(f"Tolerance: {tolerance}")
|
764
|
+
|
765
|
+
channel = self.context.channel("optimize", timestamp=True)
|
766
|
+
grouped_inner = self.context.opt_inner_first and self.context.opt_inners_grouped
|
767
|
+
for i, c in enumerate(self.plan):
|
768
|
+
if busy.shown:
|
769
|
+
busy.change(
|
770
|
+
msg=_("Optimize cuts") + f" {i + 1}/{len(self.plan)}", keep=1
|
771
|
+
)
|
772
|
+
busy.show()
|
773
|
+
if isinstance(c, CutCode):
|
774
|
+
if c.constrained:
|
775
|
+
self.plan[i] = inner_first_ident(
|
776
|
+
c,
|
777
|
+
kernel=self.context.kernel,
|
778
|
+
channel=channel,
|
779
|
+
tolerance=tolerance,
|
780
|
+
)
|
781
|
+
c = self.plan[i]
|
782
|
+
self.plan[i] = inner_selection_cutcode(
|
783
|
+
c,
|
784
|
+
channel=channel,
|
785
|
+
grouped_inner=grouped_inner,
|
786
|
+
)
|
787
|
+
|
788
|
+
def optimize_travel(self):
|
789
|
+
"""
|
790
|
+
Optimize travel at optimize stage on cutcode.
|
791
|
+
@return:
|
792
|
+
"""
|
793
|
+
# Update Info-panel if displayed
|
794
|
+
busy = self.context.kernel.busyinfo
|
795
|
+
_ = self.context.kernel.translation
|
796
|
+
if busy.shown:
|
797
|
+
busy.change(msg=_("Optimize travel"), keep=1)
|
798
|
+
busy.show()
|
799
|
+
try:
|
800
|
+
last = self.context.device.native
|
801
|
+
except AttributeError:
|
802
|
+
last = None
|
803
|
+
tolerance = 0
|
804
|
+
if self.context.opt_inner_first:
|
805
|
+
stol = self.context.opt_inner_tolerance
|
806
|
+
try:
|
807
|
+
tolerance = (
|
808
|
+
float(Length(stol))
|
809
|
+
* 2
|
810
|
+
/ (
|
811
|
+
self.context.device.view.native_scale_x
|
812
|
+
+ self.context.device.view.native_scale_y
|
813
|
+
)
|
814
|
+
)
|
815
|
+
except ValueError:
|
816
|
+
pass
|
817
|
+
# print(f"Tolerance: {tolerance}")
|
818
|
+
|
819
|
+
channel = self.context.channel("optimize", timestamp=True)
|
820
|
+
grouped_inner = self.context.opt_inner_first and self.context.opt_inners_grouped
|
821
|
+
for i, c in enumerate(self.plan):
|
822
|
+
if busy.shown:
|
823
|
+
busy.change(
|
824
|
+
msg=_("Optimize travel") + f" {i + 1}/{len(self.plan)}", keep=1
|
825
|
+
)
|
826
|
+
busy.show()
|
827
|
+
|
828
|
+
if isinstance(c, CutCode):
|
829
|
+
if c.constrained:
|
830
|
+
self.plan[i] = inner_first_ident(
|
831
|
+
c,
|
832
|
+
kernel=self.context.kernel,
|
833
|
+
channel=channel,
|
834
|
+
tolerance=tolerance,
|
835
|
+
)
|
836
|
+
c = self.plan[i]
|
837
|
+
if last is not None:
|
838
|
+
c._start_x, c._start_y = last
|
839
|
+
self.plan[i] = short_travel_cutcode(
|
840
|
+
c,
|
841
|
+
kernel=self.context.kernel,
|
842
|
+
channel=channel,
|
843
|
+
complete_path=self.context.opt_complete_subpaths,
|
844
|
+
grouped_inner=grouped_inner,
|
845
|
+
hatch_optimize=self.context.opt_effect_optimize,
|
846
|
+
)
|
847
|
+
last = self.plan[i].end
|
848
|
+
|
849
|
+
def merge_cutcode(self):
|
850
|
+
"""
|
851
|
+
Merge all adjacent optimized cutcode into single cutcode objects.
|
852
|
+
@return:
|
853
|
+
"""
|
854
|
+
busy = self.context.kernel.busyinfo
|
855
|
+
_ = self.context.kernel.translation
|
856
|
+
if busy.shown:
|
857
|
+
busy.change(msg=_("Merging cutcode"), keep=1)
|
858
|
+
busy.show()
|
859
|
+
for i in range(len(self.plan) - 1, 0, -1):
|
860
|
+
cur = self.plan[i]
|
861
|
+
prev = self.plan[i - 1]
|
862
|
+
if isinstance(cur, CutCode) and isinstance(prev, CutCode):
|
863
|
+
prev.extend(cur)
|
864
|
+
del self.plan[i]
|
865
|
+
|
866
|
+
def clear(self):
|
867
|
+
self._previous_bounds = None
|
868
|
+
self.plan.clear()
|
869
|
+
self.commands.clear()
|
870
|
+
|
871
|
+
def optimize_rasters(self, operation_list, op_type, margin):
|
872
|
+
def generate_clusters(operation):
|
873
|
+
def overlapping(bounds1, bounds2, margin):
|
874
|
+
# The rectangles don't overlap if
|
875
|
+
# one rectangle's minimum in some dimension
|
876
|
+
# is greater than the other's maximum in
|
877
|
+
# that dimension.
|
878
|
+
flagx = (bounds1[0] > bounds2[2] + margin) or (
|
879
|
+
bounds2[0] > bounds1[2] + margin
|
880
|
+
)
|
881
|
+
flagy = (bounds1[1] > bounds2[3] + margin) or (
|
882
|
+
bounds2[1] > bounds1[3] + margin
|
883
|
+
)
|
884
|
+
return bool(not (flagx or flagy))
|
885
|
+
|
886
|
+
clusters = list()
|
887
|
+
cluster_bounds = list()
|
888
|
+
for child in operation.children:
|
889
|
+
try:
|
890
|
+
if child.type == "reference":
|
891
|
+
child = child.node
|
892
|
+
bb = child.paint_bounds
|
893
|
+
except AttributeError:
|
894
|
+
# Either no element node or does not have bounds
|
895
|
+
continue
|
896
|
+
clusters.append([child])
|
897
|
+
cluster_bounds.append(
|
898
|
+
(
|
899
|
+
bb[0],
|
900
|
+
bb[1],
|
901
|
+
bb[2],
|
902
|
+
bb[3],
|
903
|
+
)
|
904
|
+
)
|
905
|
+
|
906
|
+
def detail_overlap(index1, index2):
|
907
|
+
# But is there a real overlap, or just one with the union bounds?
|
908
|
+
for outer_node in clusters[index1]:
|
909
|
+
try:
|
910
|
+
bb_outer = outer_node.paint_bounds
|
911
|
+
except AttributeError:
|
912
|
+
continue
|
913
|
+
for inner_node in clusters[index2]:
|
914
|
+
try:
|
915
|
+
bb_inner = inner_node.paint_bounds
|
916
|
+
except AttributeError:
|
917
|
+
continue
|
918
|
+
if overlapping(bb_outer, bb_inner, margin):
|
919
|
+
return True
|
920
|
+
# We did not find anything...
|
921
|
+
return False
|
922
|
+
|
923
|
+
needs_repeat = True
|
924
|
+
while needs_repeat:
|
925
|
+
needs_repeat = False
|
926
|
+
for outer_idx in range(len(clusters) - 1, -1, -1):
|
927
|
+
# Loop downwards as we are manipulating the arrays
|
928
|
+
bb = cluster_bounds[outer_idx]
|
929
|
+
for inner_idx in range(outer_idx - 1, -1, -1):
|
930
|
+
cc = cluster_bounds[inner_idx]
|
931
|
+
if not overlapping(bb, cc, margin):
|
932
|
+
continue
|
933
|
+
# Overlap!
|
934
|
+
# print (f"Reuse cluster {inner_idx} for {outer_idx}")
|
935
|
+
real_overlap = detail_overlap(outer_idx, inner_idx)
|
936
|
+
if real_overlap:
|
937
|
+
needs_repeat = True
|
938
|
+
# We need to extend the inner cluster by the outer
|
939
|
+
clusters[inner_idx].extend(clusters[outer_idx])
|
940
|
+
cluster_bounds[inner_idx] = (
|
941
|
+
min(bb[0], cc[0]),
|
942
|
+
min(bb[1], cc[1]),
|
943
|
+
max(bb[2], cc[2]),
|
944
|
+
max(bb[3], cc[3]),
|
945
|
+
)
|
946
|
+
clusters.pop(outer_idx)
|
947
|
+
cluster_bounds.pop(outer_idx)
|
948
|
+
# We are done with the inner loop, as we effectively
|
949
|
+
# destroyed the cluster element we compared
|
950
|
+
break
|
951
|
+
|
952
|
+
return clusters
|
953
|
+
|
954
|
+
stime = perf_counter()
|
955
|
+
scount = 0
|
956
|
+
ecount = 0
|
957
|
+
for idx in range(len(operation_list) - 1, -1, -1):
|
958
|
+
op = operation_list[idx]
|
959
|
+
if (
|
960
|
+
not hasattr(op, "type")
|
961
|
+
or not hasattr(op, "children")
|
962
|
+
or op.type != op_type
|
963
|
+
):
|
964
|
+
# That's not what we are looking for
|
965
|
+
continue
|
966
|
+
scount += 1
|
967
|
+
clusters = generate_clusters(op)
|
968
|
+
ecount += len(clusters)
|
969
|
+
if len(clusters) > 0:
|
970
|
+
# Create cluster copies of the raster op
|
971
|
+
for entry in clusters:
|
972
|
+
newop = copy(op)
|
973
|
+
newop._references.clear()
|
974
|
+
for node in entry:
|
975
|
+
newop.add_reference(node)
|
976
|
+
newop.set_dirty_bounds()
|
977
|
+
operation_list.insert(idx + 1, newop)
|
978
|
+
|
979
|
+
# And remove the original one...
|
980
|
+
operation_list.pop(idx)
|
981
|
+
etime = perf_counter()
|
982
|
+
if self.channel:
|
983
|
+
self.channel(
|
984
|
+
f"Optimise {op_type} finished after {etime-stime:.2f} seconds, inflated {scount} operations to {ecount}"
|
985
|
+
)
|
986
|
+
|
987
|
+
|
988
|
+
def is_inside(inner, outer, tolerance=0, resolution=50):
|
989
|
+
"""
|
990
|
+
Test that path1 is inside path2.
|
991
|
+
@param inner: inner path
|
992
|
+
@param outer: outer path
|
993
|
+
@param tolerance: 0
|
994
|
+
@return: whether path1 is wholly inside path2.
|
995
|
+
"""
|
996
|
+
|
997
|
+
def convex_geometry(raster) -> Geomstr:
|
998
|
+
dx = raster.bounding_box[0]
|
999
|
+
dy = raster.bounding_box[1]
|
1000
|
+
dw = raster.bounding_box[2] - raster.bounding_box[0]
|
1001
|
+
dh = raster.bounding_box[3] - raster.bounding_box[1]
|
1002
|
+
if raster.image is None:
|
1003
|
+
return Geomstr.rect(dx, dy, dw, dh)
|
1004
|
+
image_np = np.array(raster.image.convert("L"))
|
1005
|
+
# Find non-white pixels
|
1006
|
+
# Iterate over each row in the image
|
1007
|
+
left_side = []
|
1008
|
+
right_side = []
|
1009
|
+
for y in range(image_np.shape[0]):
|
1010
|
+
row = image_np[y]
|
1011
|
+
non_white_indices = np.where(row < 255)[0]
|
1012
|
+
|
1013
|
+
if non_white_indices.size > 0:
|
1014
|
+
leftmost = non_white_indices[0]
|
1015
|
+
rightmost = non_white_indices[-1]
|
1016
|
+
left_side.append((leftmost, y))
|
1017
|
+
right_side.insert(0, (rightmost, y))
|
1018
|
+
left_side.extend(right_side)
|
1019
|
+
non_white_pixels = left_side
|
1020
|
+
# Compute convex hull
|
1021
|
+
pts = list(Geomstr.convex_hull(None, non_white_pixels))
|
1022
|
+
if pts:
|
1023
|
+
pts.append(pts[0])
|
1024
|
+
geom = Geomstr.lines(*pts)
|
1025
|
+
sx = dw / raster.image.width
|
1026
|
+
sy = dh / raster.image.height
|
1027
|
+
matrix = Matrix()
|
1028
|
+
matrix.post_scale(sx, sy)
|
1029
|
+
matrix.post_translate(dx, dy)
|
1030
|
+
geom.transform(matrix)
|
1031
|
+
return geom
|
1032
|
+
|
1033
|
+
# We still consider a path to be inside another path if it is
|
1034
|
+
# within a certain tolerance
|
1035
|
+
inner_path = inner
|
1036
|
+
outer_path = outer
|
1037
|
+
if outer == inner: # This is the same object.
|
1038
|
+
return False
|
1039
|
+
if hasattr(inner, "path") and inner.path is not None:
|
1040
|
+
inner_path = inner.path
|
1041
|
+
if hasattr(outer, "path") and outer.path is not None:
|
1042
|
+
outer_path = outer.path
|
1043
|
+
if not hasattr(inner, "bounding_box"):
|
1044
|
+
inner.bounding_box = Group.union_bbox([inner_path])
|
1045
|
+
if not hasattr(outer, "bounding_box"):
|
1046
|
+
outer.bounding_box = Group.union_bbox([outer_path])
|
1047
|
+
if outer.bounding_box is None:
|
1048
|
+
return False
|
1049
|
+
if inner.bounding_box is None:
|
1050
|
+
return False
|
1051
|
+
if isinstance(inner, RasterCut):
|
1052
|
+
if not hasattr(inner, "convex_path"):
|
1053
|
+
inner.convex_path = convex_geometry(inner).as_path()
|
1054
|
+
inner_path = inner.convex_path
|
1055
|
+
# # Raster is inner if the bboxes overlap anywhere
|
1056
|
+
# if isinstance(inner, RasterCut) and not hasattr(inner, "path"):
|
1057
|
+
# image = inner.image
|
1058
|
+
# return (
|
1059
|
+
# inner.bounding_box[0] <= outer.bounding_box[2] + tolerance
|
1060
|
+
# and inner.bounding_box[1] <= outer.bounding_box[3] + tolerance
|
1061
|
+
# and inner.bounding_box[2] >= outer.bounding_box[0] - tolerance
|
1062
|
+
# and inner.bounding_box[3] >= outer.bounding_box[1] - tolerance
|
1063
|
+
# )
|
1064
|
+
if outer.bounding_box[0] > inner.bounding_box[2] + tolerance:
|
1065
|
+
# outer minx > inner maxx (is not contained)
|
1066
|
+
return False
|
1067
|
+
if outer.bounding_box[1] > inner.bounding_box[3] + tolerance:
|
1068
|
+
# outer miny > inner maxy (is not contained)
|
1069
|
+
return False
|
1070
|
+
if outer.bounding_box[2] < inner.bounding_box[0] - tolerance:
|
1071
|
+
# outer maxx < inner minx (is not contained)
|
1072
|
+
return False
|
1073
|
+
if outer.bounding_box[3] < inner.bounding_box[1] - tolerance:
|
1074
|
+
# outer maxy < inner maxy (is not contained)
|
1075
|
+
return False
|
1076
|
+
|
1077
|
+
# Inner bbox is entirely inside outer bbox,
|
1078
|
+
# however that does not mean that inner is actually inside outer
|
1079
|
+
# i.e. inner could be small and between outer and the bbox corner,
|
1080
|
+
# or small and contained in a concave indentation.
|
1081
|
+
#
|
1082
|
+
# VectorMontonizer can determine whether a point is inside a polygon.
|
1083
|
+
# The code below uses a brute force approach by considering a fixed number of points,
|
1084
|
+
# however we should consider a future enhancement whereby we create
|
1085
|
+
# a polygon more intelligently based on size and curvature
|
1086
|
+
# i.e. larger bboxes need more points and
|
1087
|
+
# tighter curves need more points (i.e. compare vector directions)
|
1088
|
+
|
1089
|
+
def vm_code(outer, outer_polygon, inner, inner_polygon):
|
1090
|
+
if not hasattr(outer, "vm"):
|
1091
|
+
vm = VectorMontonizer()
|
1092
|
+
vm.add_pointlist(outer_polygon)
|
1093
|
+
outer.vm = vm
|
1094
|
+
for gp in inner_polygon:
|
1095
|
+
if not outer.vm.is_point_inside(gp[0], gp[1], tolerance=tolerance):
|
1096
|
+
return False
|
1097
|
+
return True
|
1098
|
+
|
1099
|
+
def scanbeam_code_not_working_reliably(outer_cut, outer_path, inner_cut, inner_path):
|
1100
|
+
from ..tools.geomstr import Polygon as Gpoly
|
1101
|
+
from ..tools.geomstr import Scanbeam
|
1102
|
+
|
1103
|
+
if not hasattr(outer_cut, "sb"):
|
1104
|
+
pg = outer_path.npoint(np.linspace(0, 1, 1001), error=1e4)
|
1105
|
+
pg = pg[:, 0] + pg[:, 1] * 1j
|
1106
|
+
|
1107
|
+
outer_path = Gpoly(*pg)
|
1108
|
+
sb = Scanbeam(outer_path.geomstr)
|
1109
|
+
outer_cut.sb = sb
|
1110
|
+
p = inner_path.npoint(np.linspace(0, 1, 101), error=1e4)
|
1111
|
+
points = p[:, 0] + p[:, 1] * 1j
|
1112
|
+
|
1113
|
+
q = outer_cut.sb.points_in_polygon(points)
|
1114
|
+
return q.all()
|
1115
|
+
|
1116
|
+
def raycasting_code_old(outer_polygon, inner_polygon):
|
1117
|
+
# The time to compile is outweighing the benefits...
|
1118
|
+
|
1119
|
+
def ray_tracing(x, y, poly, tolerance):
|
1120
|
+
def sq_length(a, b):
|
1121
|
+
return a * a + b * b
|
1122
|
+
|
1123
|
+
tolerance_square = tolerance * tolerance
|
1124
|
+
n = len(poly)
|
1125
|
+
inside = False
|
1126
|
+
xints = 0
|
1127
|
+
|
1128
|
+
p1x, p1y = poly[0]
|
1129
|
+
old_sq_dist = sq_length(p1x - x, p1y - y)
|
1130
|
+
for i in range(n+1):
|
1131
|
+
p2x, p2y = poly[i % n]
|
1132
|
+
new_sq_dist = sq_length(p2x - x, p2y - y)
|
1133
|
+
# We are approximating the edge to an extremely thin ellipse and see
|
1134
|
+
# whether our point is on that ellipse
|
1135
|
+
reldist = (
|
1136
|
+
old_sq_dist + new_sq_dist +
|
1137
|
+
2.0 * np.sqrt(old_sq_dist * new_sq_dist) -
|
1138
|
+
sq_length(p2x - p1x, p2y - p1y)
|
1139
|
+
)
|
1140
|
+
if reldist < tolerance_square:
|
1141
|
+
return True
|
1142
|
+
|
1143
|
+
if y > min(p1y,p2y):
|
1144
|
+
if y <= max(p1y,p2y):
|
1145
|
+
if x <= max(p1x,p2x):
|
1146
|
+
if p1y != p2y:
|
1147
|
+
xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
|
1148
|
+
if p1x == p2x or x <= xints:
|
1149
|
+
inside = not inside
|
1150
|
+
p1x, p1y = p2x, p2y
|
1151
|
+
old_sq_dist = new_sq_dist
|
1152
|
+
return inside
|
1153
|
+
|
1154
|
+
points = inner_polygon
|
1155
|
+
vertices = outer_polygon
|
1156
|
+
return all(ray_tracing(p[0], p[1], vertices, tolerance) for p in points)
|
1157
|
+
|
1158
|
+
"""
|
1159
|
+
Unfortunately this does not work if one of the objects is concave.
|
1160
|
+
|
1161
|
+
def sat_code(outer_polygon, inner_polygon):
|
1162
|
+
# https://en.wikipedia.org/wiki/Hyperplane_separation_theorem
|
1163
|
+
|
1164
|
+
# Separating Axis Theorem (SAT) for Polygon Containment
|
1165
|
+
|
1166
|
+
# The Separating Axis Theorem (SAT) is a powerful technique for collision detection
|
1167
|
+
# between convex polygons. It can also be adapted to determine polygon containment.
|
1168
|
+
|
1169
|
+
# How SAT Works:
|
1170
|
+
|
1171
|
+
# Generate Axes: For each edge of the outer polygon, create a perpendicular axis.
|
1172
|
+
# Project Polygons: Project both the inner and outer polygons onto each axis.
|
1173
|
+
# Check Overlap: If the projections of the inner polygon are completely contained
|
1174
|
+
# within the projections of the outer polygon on all axes,
|
1175
|
+
# then the inner polygon is fully contained.
|
1176
|
+
|
1177
|
+
# Convex Polygons: SAT is most efficient for convex polygons.
|
1178
|
+
# For concave polygons, you might need to decompose them into convex sub-polygons.
|
1179
|
+
# Computational Cost: SAT can be computationally expensive for large numbers of polygons.
|
1180
|
+
# In such cases, spatial indexing can be used to reduce the number of pairwise comparisons.
|
1181
|
+
def project_polygon(polygon, axis):
|
1182
|
+
# Projects a polygon onto a given axis.
|
1183
|
+
min_projection, max_projection = np.dot(polygon, axis).min(), np.dot(polygon, axis).max()
|
1184
|
+
return min_projection, max_projection
|
1185
|
+
|
1186
|
+
def is_polygon_inside_polygon_sat(inner_polygon, outer_polygon):
|
1187
|
+
# Determines if one polygon is fully inside another using the Separating Axis Theorem.
|
1188
|
+
|
1189
|
+
# Args:
|
1190
|
+
# inner_polygon: A 2D array of inner polygon vertices.
|
1191
|
+
# outer_polygon: A 2D array of outer polygon vertices.
|
1192
|
+
|
1193
|
+
# Returns:
|
1194
|
+
# True if the inner polygon is fully inside the outer polygon, False otherwise.
|
1195
|
+
|
1196
|
+
for i in range(len(outer_polygon)):
|
1197
|
+
# Calculate the axis perpendicular to the current edge
|
1198
|
+
edge = outer_polygon[i] - outer_polygon[(i+1) % len(outer_polygon)]
|
1199
|
+
axis = np.array([-edge[1], edge[0]])
|
1200
|
+
|
1201
|
+
# Project both polygons onto the axis
|
1202
|
+
min_inner, max_inner = project_polygon(inner_polygon, axis)
|
1203
|
+
min_outer, max_outer = project_polygon(outer_polygon, axis)
|
1204
|
+
|
1205
|
+
# Check if the inner polygon's projection is fully contained within the outer polygon's projection
|
1206
|
+
if not (min_inner >= min_outer and max_inner <= max_outer):
|
1207
|
+
return False
|
1208
|
+
|
1209
|
+
return True
|
1210
|
+
|
1211
|
+
return is_polygon_inside_polygon_sat(inner_polygon, outer_polygon)
|
1212
|
+
"""
|
1213
|
+
|
1214
|
+
def raycasting_code_new(outer_polygon, inner_polygon):
|
1215
|
+
|
1216
|
+
def precompute_intersections(polygon):
|
1217
|
+
slopes = []
|
1218
|
+
intercepts = []
|
1219
|
+
is_vertical = []
|
1220
|
+
for i in range(len(polygon)):
|
1221
|
+
p1, p2 = polygon[i], polygon[(i + 1) % len(polygon)]
|
1222
|
+
dx = p2[0] - p1[0]
|
1223
|
+
dy = p2[1] - p1[1]
|
1224
|
+
|
1225
|
+
if dx == 0:
|
1226
|
+
slopes.append(np.nan) # Use NaN to indicate vertical line
|
1227
|
+
intercepts.append(p1[0])
|
1228
|
+
is_vertical.append(True)
|
1229
|
+
else:
|
1230
|
+
slope = dy / dx
|
1231
|
+
intercept = p1[1] - slope * p1[0]
|
1232
|
+
slopes.append(slope)
|
1233
|
+
intercepts.append(intercept)
|
1234
|
+
is_vertical.append(False)
|
1235
|
+
|
1236
|
+
return np.array(slopes), np.array(intercepts), np.array(is_vertical)
|
1237
|
+
|
1238
|
+
def point_in_polygon(x, y, slopes, intercepts, is_vertical, polygon):
|
1239
|
+
inside = False
|
1240
|
+
for i in range(len(slopes)):
|
1241
|
+
slope = slopes[i]
|
1242
|
+
intercept = intercepts[i]
|
1243
|
+
p1, p2 = polygon[i], polygon[(i + 1) % len(polygon)]
|
1244
|
+
|
1245
|
+
if np.isnan(slope): # Vertical line
|
1246
|
+
if x == intercept and y >= min(p1[1], p2[1]) and y <= max(p1[1], p2[1]):
|
1247
|
+
inside = not inside
|
1248
|
+
else:
|
1249
|
+
if y > min(p1[1], p2[1]):
|
1250
|
+
if y <= max(p1[1], p2[1]):
|
1251
|
+
if x <= max(p1[0], p2[0]):
|
1252
|
+
if p1[1] != p2[1]:
|
1253
|
+
xints = (y - intercept) / slope
|
1254
|
+
if p1[0] == p2[0] or x <= xints:
|
1255
|
+
inside = not inside
|
1256
|
+
|
1257
|
+
return inside
|
1258
|
+
|
1259
|
+
def is_polygon_inside(outer_polygon, inner_polygon):
|
1260
|
+
slopes, intercepts, is_vertical = precompute_intersections(outer_polygon)
|
1261
|
+
for point in inner_polygon:
|
1262
|
+
if not point_in_polygon(point[0], point[1], slopes, intercepts, is_vertical, outer_polygon):
|
1263
|
+
return False
|
1264
|
+
return True
|
1265
|
+
|
1266
|
+
return is_polygon_inside(outer_polygon=outer_polygon, inner_polygon=inner_polygon)
|
1267
|
+
|
1268
|
+
def shapely_code(outer_polygon, inner_polygon):
|
1269
|
+
from shapely.geometry import Polygon
|
1270
|
+
|
1271
|
+
# Create Shapely Polygon objects
|
1272
|
+
poly_a = Polygon(inner_polygon)
|
1273
|
+
poly_b = Polygon(outer_polygon)
|
1274
|
+
# Check for containment
|
1275
|
+
return poly_a.within(poly_b)
|
1276
|
+
|
1277
|
+
@lru_cache(maxsize=128)
|
1278
|
+
def get_polygon(path, resolution):
|
1279
|
+
geom = Geomstr.svg(path)
|
1280
|
+
polygon = np.array(
|
1281
|
+
list((p.real, p.imag) for p in geom.as_equal_interpolated_points(distance = resolution))
|
1282
|
+
)
|
1283
|
+
return polygon
|
1284
|
+
|
1285
|
+
"""
|
1286
|
+
# Testroutines
|
1287
|
+
from time import perf_counter
|
1288
|
+
inner_polygon = get_polygon(inner_path.d(), resolution)
|
1289
|
+
outer_polygon = get_polygon(outer_path.d(), resolution)
|
1290
|
+
|
1291
|
+
t0 = perf_counter()
|
1292
|
+
res0 = vm_code(outer, outer_polygon, inner, inner_polygon)
|
1293
|
+
t1 = perf_counter()
|
1294
|
+
res1 = scanbeam_code_not_working_reliably(outer, outer_path, inner, inner_path)
|
1295
|
+
t2 = perf_counter()
|
1296
|
+
res2 = raycasting_code_old(outer_polygon, inner_polygon)
|
1297
|
+
t3 = perf_counter()
|
1298
|
+
# res3 = sat_code(outer, outer_path, inner, inner_path)
|
1299
|
+
res3 = raycasting_code_new(outer_polygon, inner_polygon)
|
1300
|
+
t4 = perf_counter()
|
1301
|
+
try:
|
1302
|
+
import shapely
|
1303
|
+
res4 = shapely_code(outer_polygon, inner_polygon)
|
1304
|
+
t5 = perf_counter()
|
1305
|
+
except ImportError:
|
1306
|
+
res4 = "Shapely missing"
|
1307
|
+
t5 = t4
|
1308
|
+
print (f"Tolerance: {tolerance}, vm={res0} in {t1 - t0:.3f}s, sb={res1} in {t1 - t0:.3f}s, ray-old={res2} in {t2 - t1:.3f}s, ray-new={res3} in {t3 - t2:.3f}s, shapely={res4} in {t4 - t3:.3f}s")
|
1309
|
+
"""
|
1310
|
+
inner_polygon = get_polygon(inner_path.d(), resolution)
|
1311
|
+
outer_polygon = get_polygon(outer_path.d(), resolution)
|
1312
|
+
try:
|
1313
|
+
import shapely
|
1314
|
+
return shapely_code(outer_polygon, inner_polygon)
|
1315
|
+
except ImportError:
|
1316
|
+
return vm_code(outer, outer_polygon, inner, inner_polygon)
|
1317
|
+
# return raycasting_code_new(outer_polygon, inner_polygon)
|
1318
|
+
# return scanbeam_code_not_working_reliably(outer, outer_path, inner, inner_path)
|
1319
|
+
# return vm_code(outer, outer_path, inner, inner_path)
|
1320
|
+
return
|
1321
|
+
|
1322
|
+
def reify_matrix(self):
|
1323
|
+
"""Apply the matrix to the path and reset matrix."""
|
1324
|
+
self.element = abs(self.element)
|
1325
|
+
self.scene_bounds = None
|
1326
|
+
|
1327
|
+
|
1328
|
+
# def bounding_box(elements):
|
1329
|
+
# if isinstance(elements, SVGElement):
|
1330
|
+
# elements = [elements]
|
1331
|
+
# elif isinstance(elements, list):
|
1332
|
+
# try:
|
1333
|
+
# elements = [e.object for e in elements if isinstance(e.object, SVGElement)]
|
1334
|
+
# except AttributeError:
|
1335
|
+
# pass
|
1336
|
+
# boundary_points = []
|
1337
|
+
# for e in elements:
|
1338
|
+
# box = e.bbox(False)
|
1339
|
+
# if box is None:
|
1340
|
+
# continue
|
1341
|
+
# top_left = e.transform.point_in_matrix_space([box[0], box[1]])
|
1342
|
+
# top_right = e.transform.point_in_matrix_space([box[2], box[1]])
|
1343
|
+
# bottom_left = e.transform.point_in_matrix_space([box[0], box[3]])
|
1344
|
+
# bottom_right = e.transform.point_in_matrix_space([box[2], box[3]])
|
1345
|
+
# boundary_points.append(top_left)
|
1346
|
+
# boundary_points.append(top_right)
|
1347
|
+
# boundary_points.append(bottom_left)
|
1348
|
+
# boundary_points.append(bottom_right)
|
1349
|
+
# if len(boundary_points) == 0:
|
1350
|
+
# return None
|
1351
|
+
# xmin = min([e[0] for e in boundary_points])
|
1352
|
+
# ymin = min([e[1] for e in boundary_points])
|
1353
|
+
# xmax = max([e[0] for e in boundary_points])
|
1354
|
+
# ymax = max([e[1] for e in boundary_points])
|
1355
|
+
# return xmin, ymin, xmax, ymax
|
1356
|
+
|
1357
|
+
|
1358
|
+
def correct_empty(context: CutGroup):
|
1359
|
+
"""
|
1360
|
+
Iterates through backwards deleting any entries that are empty.
|
1361
|
+
"""
|
1362
|
+
for index in range(len(context) - 1, -1, -1):
|
1363
|
+
c = context[index]
|
1364
|
+
if not isinstance(c, CutGroup):
|
1365
|
+
continue
|
1366
|
+
correct_empty(c)
|
1367
|
+
if len(c) == 0:
|
1368
|
+
del context[index]
|
1369
|
+
|
1370
|
+
|
1371
|
+
def inner_first_ident(context: CutGroup, kernel=None, channel=None, tolerance=0):
|
1372
|
+
"""
|
1373
|
+
Identifies closed CutGroups and then identifies any other CutGroups which
|
1374
|
+
are entirely inside.
|
1375
|
+
|
1376
|
+
The CutGroup candidate generator uses this information to not offer the outer CutGroup
|
1377
|
+
as a candidate for a burn unless all contained CutGroups are cut.
|
1378
|
+
|
1379
|
+
The Cutcode is resequenced in either short_travel_cutcode or inner_selection_cutcode
|
1380
|
+
based on this information, as used in the
|
1381
|
+
"""
|
1382
|
+
if channel:
|
1383
|
+
start_time = time()
|
1384
|
+
start_times = times()
|
1385
|
+
channel("Executing Inner-First Identification")
|
1386
|
+
|
1387
|
+
groups = [cut for cut in context if isinstance(cut, (CutGroup, RasterCut))]
|
1388
|
+
closed_groups = [g for g in groups if isinstance(g, CutGroup) and g.closed]
|
1389
|
+
total_pass = len(groups) * len(closed_groups)
|
1390
|
+
context.contains = closed_groups
|
1391
|
+
if channel:
|
1392
|
+
channel(
|
1393
|
+
f"Compare {len(groups)} groups against {len(closed_groups)} closed groups"
|
1394
|
+
)
|
1395
|
+
|
1396
|
+
constrained = False
|
1397
|
+
current_pass = 0
|
1398
|
+
if kernel:
|
1399
|
+
busy = kernel.busyinfo
|
1400
|
+
_ = kernel.translation
|
1401
|
+
min_res = min(kernel.device.view.native_scale_x, kernel.device.view.native_scale_y)
|
1402
|
+
# a 0.5 mm resolution is enough
|
1403
|
+
resolution = int(0.5 * UNITS_PER_MM / min_res)
|
1404
|
+
# print(f"Chosen resolution: {resolution} - minscale = {min_res}")
|
1405
|
+
else:
|
1406
|
+
busy = None
|
1407
|
+
resolution = 10
|
1408
|
+
for outer in closed_groups:
|
1409
|
+
for inner in groups:
|
1410
|
+
current_pass += 1
|
1411
|
+
if outer is inner:
|
1412
|
+
continue
|
1413
|
+
# if outer is inside inner, then inner cannot be inside outer
|
1414
|
+
if inner.contains and outer in inner.contains:
|
1415
|
+
continue
|
1416
|
+
if current_pass % 50 == 0 and busy and busy.shown:
|
1417
|
+
# Can't execute without kernel, reference before assignment is safe.
|
1418
|
+
message = _("Pass {cpass}/{tpass}").format(
|
1419
|
+
cpass=current_pass, tpass=total_pass
|
1420
|
+
)
|
1421
|
+
busy.change(msg=message, keep=2)
|
1422
|
+
busy.show()
|
1423
|
+
|
1424
|
+
if is_inside(inner, outer, tolerance, resolution):
|
1425
|
+
constrained = True
|
1426
|
+
if outer.contains is None:
|
1427
|
+
outer.contains = []
|
1428
|
+
outer.contains.append(inner)
|
1429
|
+
|
1430
|
+
if inner.inside is None:
|
1431
|
+
inner.inside = []
|
1432
|
+
inner.inside.append(outer)
|
1433
|
+
|
1434
|
+
context.constrained = constrained
|
1435
|
+
|
1436
|
+
# for g in groups:
|
1437
|
+
# if g.contains is not None:
|
1438
|
+
# for inner in g.contains:
|
1439
|
+
# assert inner in groups
|
1440
|
+
# assert inner is not g
|
1441
|
+
# assert g in inner.inside
|
1442
|
+
# if g.inside is not None:
|
1443
|
+
# for outer in g.inside:
|
1444
|
+
# assert outer in groups
|
1445
|
+
# assert outer is not g
|
1446
|
+
# assert g in outer.contains
|
1447
|
+
|
1448
|
+
if channel:
|
1449
|
+
end_times = times()
|
1450
|
+
channel(
|
1451
|
+
f"Inner paths identified in {time() - start_time:.3f} elapsed seconds: {constrained} "
|
1452
|
+
f"using {end_times[0] - start_times[0]:.3f} seconds CPU"
|
1453
|
+
)
|
1454
|
+
for outer in closed_groups:
|
1455
|
+
if outer is None:
|
1456
|
+
continue
|
1457
|
+
channel(
|
1458
|
+
f"Outer {type(outer).__name__} contains: {'None' if outer.contains is None else str(len(outer.contains))} cutcode elements"
|
1459
|
+
)
|
1460
|
+
|
1461
|
+
return context
|
1462
|
+
|
1463
|
+
|
1464
|
+
def short_travel_cutcode(
|
1465
|
+
context: CutCode,
|
1466
|
+
kernel=None,
|
1467
|
+
channel=None,
|
1468
|
+
complete_path: Optional[bool] = False,
|
1469
|
+
grouped_inner: Optional[bool] = False,
|
1470
|
+
hatch_optimize: Optional[bool] = False,
|
1471
|
+
):
|
1472
|
+
"""
|
1473
|
+
Selects cutcode from candidate cutcode (burns_done < passes in this CutCode),
|
1474
|
+
optimizing with greedy/brute for shortest distances optimizations.
|
1475
|
+
|
1476
|
+
For paths starting at exactly the same point forward paths are preferred over reverse paths
|
1477
|
+
|
1478
|
+
We start at either 0,0 or the value given in `context.start`
|
1479
|
+
|
1480
|
+
This is time-intense hyper-optimized code, so it contains several seemingly redundant
|
1481
|
+
checks.
|
1482
|
+
"""
|
1483
|
+
if channel:
|
1484
|
+
start_length = context.length_travel(True)
|
1485
|
+
start_time = time()
|
1486
|
+
start_times = times()
|
1487
|
+
channel("Executing Greedy Short-Travel optimization")
|
1488
|
+
channel(f"Length at start: {start_length:.0f} steps")
|
1489
|
+
unordered = []
|
1490
|
+
for idx in range(len(context) - 1, -1, -1):
|
1491
|
+
c = context[idx]
|
1492
|
+
if isinstance(c, CutGroup) and c.skip:
|
1493
|
+
unordered.append(c)
|
1494
|
+
context.pop(idx)
|
1495
|
+
|
1496
|
+
curr = context.start
|
1497
|
+
curr = 0 if curr is None else complex(curr[0], curr[1])
|
1498
|
+
|
1499
|
+
cutcode_len = 0
|
1500
|
+
for c in context.flat():
|
1501
|
+
cutcode_len += 1
|
1502
|
+
c.burns_done = 0
|
1503
|
+
|
1504
|
+
ordered = CutCode()
|
1505
|
+
current_pass = 0
|
1506
|
+
if kernel:
|
1507
|
+
busy = kernel.busyinfo
|
1508
|
+
_ = kernel.translation
|
1509
|
+
else:
|
1510
|
+
busy = None
|
1511
|
+
# print (f"Cutcode-Len={cutcode_len}, unordered: {len(unordered)}")
|
1512
|
+
while True:
|
1513
|
+
current_pass += 1
|
1514
|
+
if current_pass % 50 == 0 and busy and busy.shown:
|
1515
|
+
# That may not be a fully correct approximation
|
1516
|
+
# in terms of the total passes required, but it
|
1517
|
+
# should give an idea...
|
1518
|
+
message = _("Pass {cpass}/{tpass}").format(
|
1519
|
+
cpass=current_pass, tpass=cutcode_len
|
1520
|
+
)
|
1521
|
+
busy.change(msg=message, keep=2)
|
1522
|
+
busy.show()
|
1523
|
+
closest = None
|
1524
|
+
backwards = False
|
1525
|
+
distance = float("inf")
|
1526
|
+
|
1527
|
+
try:
|
1528
|
+
last_segment = ordered[-1]
|
1529
|
+
except IndexError:
|
1530
|
+
pass
|
1531
|
+
else:
|
1532
|
+
if last_segment.normal:
|
1533
|
+
# Attempt to initialize value to next segment in subpath
|
1534
|
+
cut = last_segment.next
|
1535
|
+
if cut and cut.burns_done < cut.passes:
|
1536
|
+
closest = cut
|
1537
|
+
backwards = False
|
1538
|
+
start = closest.start
|
1539
|
+
distance = abs(complex(start[0], start[1]) - curr)
|
1540
|
+
else:
|
1541
|
+
# Attempt to initialize value to previous segment in subpath
|
1542
|
+
cut = last_segment.previous
|
1543
|
+
if cut and cut.burns_done < cut.passes:
|
1544
|
+
closest = cut
|
1545
|
+
backwards = True
|
1546
|
+
end = closest.end
|
1547
|
+
distance = abs(complex(end[0], end[1]) - curr)
|
1548
|
+
# Gap or continuing on path not permitted, try reversing
|
1549
|
+
if (
|
1550
|
+
distance > 50
|
1551
|
+
and last_segment.burns_done < last_segment.passes
|
1552
|
+
and last_segment.reversible()
|
1553
|
+
and last_segment.next is not None
|
1554
|
+
):
|
1555
|
+
# last_segment is a copy, so we need to get original
|
1556
|
+
closest = last_segment.next.previous
|
1557
|
+
backwards = last_segment.normal
|
1558
|
+
distance = 0 # By definition since we are reversing and reburning
|
1559
|
+
|
1560
|
+
# Stay on path in same direction if gap <= 1/20" i.e. path not quite closed
|
1561
|
+
# Travel only if path is completely burned or gap > 1/20"
|
1562
|
+
if distance > 50:
|
1563
|
+
for cut in context.candidate(
|
1564
|
+
complete_path=complete_path, grouped_inner=grouped_inner
|
1565
|
+
):
|
1566
|
+
s = cut.start
|
1567
|
+
if (
|
1568
|
+
abs(s[0] - curr.real) <= distance
|
1569
|
+
and abs(s[1] - curr.imag) <= distance
|
1570
|
+
and (not complete_path or cut.closed or cut.first)
|
1571
|
+
):
|
1572
|
+
d = abs(complex(s[0], s[1]) - curr)
|
1573
|
+
if d < distance:
|
1574
|
+
closest = cut
|
1575
|
+
backwards = False
|
1576
|
+
if d <= 0.1: # Distance in px is zero, we cannot improve.
|
1577
|
+
break
|
1578
|
+
distance = d
|
1579
|
+
|
1580
|
+
if not cut.reversible():
|
1581
|
+
continue
|
1582
|
+
e = cut.end
|
1583
|
+
if (
|
1584
|
+
abs(e[0] - curr.real) <= distance
|
1585
|
+
and abs(e[1] - curr.imag) <= distance
|
1586
|
+
and (not complete_path or cut.closed or cut.last)
|
1587
|
+
):
|
1588
|
+
d = abs(complex(e[0], e[1]) - curr)
|
1589
|
+
if d < distance:
|
1590
|
+
closest = cut
|
1591
|
+
backwards = True
|
1592
|
+
if d <= 0.1: # Distance in px is zero, we cannot improve.
|
1593
|
+
break
|
1594
|
+
distance = d
|
1595
|
+
|
1596
|
+
if closest is None:
|
1597
|
+
break
|
1598
|
+
|
1599
|
+
# Change direction if other direction is coincident and has more burns remaining
|
1600
|
+
if backwards:
|
1601
|
+
if (
|
1602
|
+
closest.next
|
1603
|
+
and closest.next.burns_done <= closest.burns_done
|
1604
|
+
and closest.next.start == closest.end
|
1605
|
+
):
|
1606
|
+
closest = closest.next
|
1607
|
+
backwards = False
|
1608
|
+
elif closest.reversible():
|
1609
|
+
if (
|
1610
|
+
closest.previous
|
1611
|
+
and closest.previous is not closest
|
1612
|
+
and closest.previous.burns_done < closest.burns_done
|
1613
|
+
and closest.previous.end == closest.start
|
1614
|
+
):
|
1615
|
+
closest = closest.previous
|
1616
|
+
backwards = True
|
1617
|
+
|
1618
|
+
closest.burns_done += 1
|
1619
|
+
c = copy(closest)
|
1620
|
+
if backwards:
|
1621
|
+
c.reverse()
|
1622
|
+
end = c.end
|
1623
|
+
curr = complex(end[0], end[1])
|
1624
|
+
ordered.append(c)
|
1625
|
+
# print (f"Now we have {len(ordered)} items in list")
|
1626
|
+
if hatch_optimize:
|
1627
|
+
for idx, c in enumerate(unordered):
|
1628
|
+
if isinstance(c, CutGroup):
|
1629
|
+
c.skip = False
|
1630
|
+
unordered[idx] = short_travel_cutcode(context=c, kernel=kernel, complete_path=False, grouped_inner=False, channel=channel)
|
1631
|
+
# As these are reversed, we reverse again...
|
1632
|
+
ordered.extend(reversed(unordered))
|
1633
|
+
# print (f"And after extension {len(ordered)} items in list")
|
1634
|
+
# for c in ordered:
|
1635
|
+
# print (f"{type(c).__name__} - {len(c) if isinstance(c, (list, tuple)) else '-childless-'}")
|
1636
|
+
if context.start is not None:
|
1637
|
+
ordered._start_x, ordered._start_y = context.start
|
1638
|
+
else:
|
1639
|
+
ordered._start_x = 0
|
1640
|
+
ordered._start_y = 0
|
1641
|
+
if channel:
|
1642
|
+
end_times = times()
|
1643
|
+
end_length = ordered.length_travel(True)
|
1644
|
+
try:
|
1645
|
+
delta = (end_length - start_length) / start_length
|
1646
|
+
except ZeroDivisionError:
|
1647
|
+
delta = 0
|
1648
|
+
channel(
|
1649
|
+
f"Length at end: {end_length:.0f} steps "
|
1650
|
+
f"({delta:+.0%}), "
|
1651
|
+
f"optimized in {time() - start_time:.3f} "
|
1652
|
+
f"elapsed seconds using {end_times[0] - start_times[0]:.3f} seconds CPU"
|
1653
|
+
)
|
1654
|
+
return ordered
|
1655
|
+
|
1656
|
+
|
1657
|
+
def short_travel_cutcode_2opt(
|
1658
|
+
context: CutCode, kernel=None, passes: int = 50, channel=None
|
1659
|
+
):
|
1660
|
+
"""
|
1661
|
+
This implements 2-opt algorithm using numpy.
|
1662
|
+
|
1663
|
+
Skipping of the candidate code it does not perform inner first optimizations.
|
1664
|
+
Due to the numpy requirement, doesn't work without numpy.
|
1665
|
+
--
|
1666
|
+
Uses code I wrote for vpype:
|
1667
|
+
https://github.com/abey79/vpype/commit/7b1fad6bd0fcfc267473fdb8ba2166821c80d9cd
|
1668
|
+
|
1669
|
+
@param context: cutcode to be optimized
|
1670
|
+
@param kernel: kernel value
|
1671
|
+
@param passes: max passes to perform 2-opt
|
1672
|
+
@param channel: Channel to send data about the optimization process.
|
1673
|
+
@return:
|
1674
|
+
"""
|
1675
|
+
if channel:
|
1676
|
+
start_length = context.length_travel(True)
|
1677
|
+
start_time = time()
|
1678
|
+
start_times = times()
|
1679
|
+
channel("Executing 2-Opt Short-Travel optimization")
|
1680
|
+
channel(f"Length at start: {start_length:.0f} steps")
|
1681
|
+
|
1682
|
+
ordered = CutCode(context.flat())
|
1683
|
+
length = len(ordered)
|
1684
|
+
if length <= 1:
|
1685
|
+
if channel:
|
1686
|
+
channel("2-Opt: Not enough elements to optimize.")
|
1687
|
+
return ordered
|
1688
|
+
|
1689
|
+
curr = context.start
|
1690
|
+
if curr is None:
|
1691
|
+
curr = 0
|
1692
|
+
else:
|
1693
|
+
curr = complex(curr)
|
1694
|
+
|
1695
|
+
current_pass = 1
|
1696
|
+
min_value = -1e-10 # Do not swap on rounding error.
|
1697
|
+
|
1698
|
+
endpoints = np.zeros((length, 4), dtype="complex")
|
1699
|
+
# start, index, reverse-index, end
|
1700
|
+
for i in range(length):
|
1701
|
+
endpoints[i] = complex(ordered[i].start), i, ~i, complex(ordered[i].end)
|
1702
|
+
indexes0 = np.arange(0, length - 1)
|
1703
|
+
indexes1 = indexes0 + 1
|
1704
|
+
|
1705
|
+
def log_progress(pos):
|
1706
|
+
starts = endpoints[indexes0, -1]
|
1707
|
+
ends = endpoints[indexes1, 0]
|
1708
|
+
dists = np.abs(starts - ends)
|
1709
|
+
dist_sum = dists.sum() + abs(curr - endpoints[0][0])
|
1710
|
+
channel(
|
1711
|
+
f"optimize: laser-off distance is {dist_sum}. {100 * pos / length:.02f}% done with pass {current_pass}/{passes}"
|
1712
|
+
)
|
1713
|
+
if kernel:
|
1714
|
+
busy = kernel.busyinfo
|
1715
|
+
_ = kernel.translation
|
1716
|
+
if busy.shown:
|
1717
|
+
busy.change(
|
1718
|
+
msg=_("Pass {cpass}/{tpass}").format(
|
1719
|
+
cpass=current_pass, tpass=passes
|
1720
|
+
),
|
1721
|
+
keep=2,
|
1722
|
+
)
|
1723
|
+
busy.show()
|
1724
|
+
|
1725
|
+
improved = True
|
1726
|
+
while improved:
|
1727
|
+
improved = False
|
1728
|
+
|
1729
|
+
first = endpoints[0][0]
|
1730
|
+
cut_ends = endpoints[indexes0, -1]
|
1731
|
+
cut_starts = endpoints[indexes1, 0]
|
1732
|
+
|
1733
|
+
# delta = np.abs(curr - first) + np.abs(first - cut_starts) - np.abs(cut_ends - cut_starts)
|
1734
|
+
delta = (
|
1735
|
+
np.abs(curr - cut_ends)
|
1736
|
+
+ np.abs(first - cut_starts)
|
1737
|
+
- np.abs(cut_ends - cut_starts)
|
1738
|
+
- np.abs(curr - first)
|
1739
|
+
)
|
1740
|
+
index = int(np.argmin(delta))
|
1741
|
+
if delta[index] < min_value:
|
1742
|
+
endpoints[: index + 1] = np.flip(
|
1743
|
+
endpoints[: index + 1], (0, 1)
|
1744
|
+
) # top to bottom, and right to left flips.
|
1745
|
+
improved = True
|
1746
|
+
if channel:
|
1747
|
+
log_progress(1)
|
1748
|
+
for mid in range(1, length - 1):
|
1749
|
+
idxs = np.arange(mid, length - 1)
|
1750
|
+
|
1751
|
+
mid_source = endpoints[mid - 1, -1]
|
1752
|
+
mid_dest = endpoints[mid, 0]
|
1753
|
+
cut_ends = endpoints[idxs, -1]
|
1754
|
+
cut_starts = endpoints[idxs + 1, 0]
|
1755
|
+
delta = (
|
1756
|
+
np.abs(mid_source - cut_ends)
|
1757
|
+
+ np.abs(mid_dest - cut_starts)
|
1758
|
+
- np.abs(cut_ends - cut_starts)
|
1759
|
+
- np.abs(mid_source - mid_dest)
|
1760
|
+
)
|
1761
|
+
index = int(np.argmin(delta))
|
1762
|
+
if delta[index] < min_value:
|
1763
|
+
endpoints[mid : mid + index + 1] = np.flip(
|
1764
|
+
endpoints[mid : mid + index + 1], (0, 1)
|
1765
|
+
)
|
1766
|
+
improved = True
|
1767
|
+
if channel:
|
1768
|
+
log_progress(mid)
|
1769
|
+
|
1770
|
+
last = endpoints[-1, -1]
|
1771
|
+
cut_ends = endpoints[indexes0, -1]
|
1772
|
+
cut_starts = endpoints[indexes1, 0]
|
1773
|
+
|
1774
|
+
delta = np.abs(cut_ends - last) - np.abs(cut_ends - cut_starts)
|
1775
|
+
index = int(np.argmin(delta))
|
1776
|
+
if delta[index] < min_value:
|
1777
|
+
endpoints[index + 1 :] = np.flip(
|
1778
|
+
endpoints[index + 1 :], (0, 1)
|
1779
|
+
) # top to bottom, and right to left flips.
|
1780
|
+
improved = True
|
1781
|
+
if channel:
|
1782
|
+
log_progress(length)
|
1783
|
+
if current_pass >= passes:
|
1784
|
+
break
|
1785
|
+
current_pass += 1
|
1786
|
+
|
1787
|
+
# Two-opt complete.
|
1788
|
+
order = endpoints[:, 1].real.astype(int)
|
1789
|
+
ordered.reordered(order)
|
1790
|
+
if channel:
|
1791
|
+
end_times = times()
|
1792
|
+
end_length = ordered.length_travel(True)
|
1793
|
+
channel(
|
1794
|
+
f"Length at end: {end_length:.0f} steps "
|
1795
|
+
f"({(end_length - start_length) / start_length:+.0%}), "
|
1796
|
+
f"optimized in {time() - start_time:.3f} "
|
1797
|
+
f"elapsed seconds using {end_times[0] - start_times[0]:.3f} seconds CPU"
|
1798
|
+
)
|
1799
|
+
return ordered
|
1800
|
+
|
1801
|
+
|
1802
|
+
def inner_selection_cutcode(
|
1803
|
+
context: CutCode, channel=None, grouped_inner: Optional[bool] = False
|
1804
|
+
):
|
1805
|
+
"""
|
1806
|
+
Selects cutcode from candidate cutcode permitted but does nothing to optimize beyond
|
1807
|
+
finding a valid solution.
|
1808
|
+
|
1809
|
+
This routine runs if opt_inner first is selected and opt_greedy is not selected.
|
1810
|
+
"""
|
1811
|
+
if channel:
|
1812
|
+
start_length = context.length_travel(True)
|
1813
|
+
start_time = time()
|
1814
|
+
start_times = times()
|
1815
|
+
channel("Executing Inner Selection-Only optimization")
|
1816
|
+
channel(f"Length at start: {start_length:.0f} steps")
|
1817
|
+
|
1818
|
+
for c in context.flat():
|
1819
|
+
c.burns_done = 0
|
1820
|
+
|
1821
|
+
ordered = CutCode()
|
1822
|
+
iterations = 0
|
1823
|
+
while True:
|
1824
|
+
c = list(context.candidate(grouped_inner=grouped_inner))
|
1825
|
+
if not c:
|
1826
|
+
break
|
1827
|
+
for o in c:
|
1828
|
+
o.burns_done += 1
|
1829
|
+
ordered.extend(copy(c))
|
1830
|
+
iterations += 1
|
1831
|
+
|
1832
|
+
if channel:
|
1833
|
+
end_times = times()
|
1834
|
+
end_length = ordered.length_travel(True)
|
1835
|
+
msg = f"Length at end: {end_length:.0f} steps "
|
1836
|
+
if start_length != 0:
|
1837
|
+
msg += f"({(end_length - start_length) / start_length:+.0%}), "
|
1838
|
+
msg += f"optimized in {time() - start_time:.3f} "
|
1839
|
+
msg += f"elapsed seconds using {end_times[0] - start_times[0]:.3f} "
|
1840
|
+
msg += f"seconds CPU in {iterations} iterations"
|
1841
|
+
|
1842
|
+
channel(msg)
|
1843
|
+
return ordered
|