netgen-mesher 6.2.2506.post35.dev0__cp314-cp314-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- netgen/NgOCC.py +7 -0
- netgen/__init__.py +114 -0
- netgen/__init__.pyi +22 -0
- netgen/__main__.py +53 -0
- netgen/cmake/NetgenConfig.cmake +79 -0
- netgen/cmake/netgen-targets-release.cmake +69 -0
- netgen/cmake/netgen-targets.cmake +146 -0
- netgen/config/__init__.py +1 -0
- netgen/config/__init__.pyi +52 -0
- netgen/config/__main__.py +4 -0
- netgen/config/config.py +68 -0
- netgen/config/config.pyi +54 -0
- netgen/csg.py +25 -0
- netgen/geom2d.py +178 -0
- netgen/gui.py +82 -0
- netgen/include/core/archive.hpp +1256 -0
- netgen/include/core/array.hpp +1760 -0
- netgen/include/core/autodiff.hpp +1131 -0
- netgen/include/core/autodiffdiff.hpp +733 -0
- netgen/include/core/bitarray.hpp +240 -0
- netgen/include/core/concurrentqueue.h +3619 -0
- netgen/include/core/exception.hpp +145 -0
- netgen/include/core/flags.hpp +199 -0
- netgen/include/core/hashtable.hpp +1281 -0
- netgen/include/core/localheap.hpp +318 -0
- netgen/include/core/logging.hpp +117 -0
- netgen/include/core/memtracer.hpp +221 -0
- netgen/include/core/mpi4py_pycapi.h +245 -0
- netgen/include/core/mpi_wrapper.hpp +643 -0
- netgen/include/core/ng_mpi.hpp +94 -0
- netgen/include/core/ng_mpi_generated_declarations.hpp +155 -0
- netgen/include/core/ng_mpi_native.hpp +25 -0
- netgen/include/core/ngcore.hpp +32 -0
- netgen/include/core/ngcore_api.hpp +152 -0
- netgen/include/core/ngstream.hpp +115 -0
- netgen/include/core/paje_trace.hpp +279 -0
- netgen/include/core/profiler.hpp +382 -0
- netgen/include/core/python_ngcore.hpp +457 -0
- netgen/include/core/ranges.hpp +109 -0
- netgen/include/core/register_archive.hpp +100 -0
- netgen/include/core/signal.hpp +82 -0
- netgen/include/core/simd.hpp +160 -0
- netgen/include/core/simd_arm64.hpp +407 -0
- netgen/include/core/simd_avx.hpp +394 -0
- netgen/include/core/simd_avx512.hpp +285 -0
- netgen/include/core/simd_generic.hpp +1053 -0
- netgen/include/core/simd_math.hpp +178 -0
- netgen/include/core/simd_sse.hpp +289 -0
- netgen/include/core/statushandler.hpp +37 -0
- netgen/include/core/symboltable.hpp +153 -0
- netgen/include/core/table.hpp +810 -0
- netgen/include/core/taskmanager.hpp +1161 -0
- netgen/include/core/type_traits.hpp +65 -0
- netgen/include/core/utils.hpp +385 -0
- netgen/include/core/version.hpp +102 -0
- netgen/include/core/xbool.hpp +47 -0
- netgen/include/csg/algprim.hpp +563 -0
- netgen/include/csg/brick.hpp +150 -0
- netgen/include/csg/csg.hpp +43 -0
- netgen/include/csg/csgeom.hpp +389 -0
- netgen/include/csg/csgparser.hpp +101 -0
- netgen/include/csg/curve2d.hpp +67 -0
- netgen/include/csg/edgeflw.hpp +112 -0
- netgen/include/csg/explicitcurve2d.hpp +113 -0
- netgen/include/csg/extrusion.hpp +185 -0
- netgen/include/csg/gencyl.hpp +70 -0
- netgen/include/csg/geoml.hpp +16 -0
- netgen/include/csg/identify.hpp +213 -0
- netgen/include/csg/manifold.hpp +29 -0
- netgen/include/csg/meshsurf.hpp +46 -0
- netgen/include/csg/polyhedra.hpp +121 -0
- netgen/include/csg/revolution.hpp +180 -0
- netgen/include/csg/singularref.hpp +84 -0
- netgen/include/csg/solid.hpp +295 -0
- netgen/include/csg/specpoin.hpp +194 -0
- netgen/include/csg/spline3d.hpp +99 -0
- netgen/include/csg/splinesurface.hpp +85 -0
- netgen/include/csg/surface.hpp +394 -0
- netgen/include/csg/triapprox.hpp +63 -0
- netgen/include/csg/vscsg.hpp +34 -0
- netgen/include/general/autodiff.hpp +356 -0
- netgen/include/general/autoptr.hpp +39 -0
- netgen/include/general/gzstream.h +121 -0
- netgen/include/general/hashtabl.hpp +1692 -0
- netgen/include/general/myadt.hpp +48 -0
- netgen/include/general/mystring.hpp +226 -0
- netgen/include/general/netgenout.hpp +205 -0
- netgen/include/general/ngarray.hpp +797 -0
- netgen/include/general/ngbitarray.hpp +149 -0
- netgen/include/general/ngpython.hpp +74 -0
- netgen/include/general/optmem.hpp +44 -0
- netgen/include/general/parthreads.hpp +138 -0
- netgen/include/general/seti.hpp +50 -0
- netgen/include/general/sort.hpp +47 -0
- netgen/include/general/spbita2d.hpp +59 -0
- netgen/include/general/stack.hpp +114 -0
- netgen/include/general/table.hpp +280 -0
- netgen/include/general/template.hpp +509 -0
- netgen/include/geom2d/csg2d.hpp +750 -0
- netgen/include/geom2d/geometry2d.hpp +280 -0
- netgen/include/geom2d/spline2d.hpp +234 -0
- netgen/include/geom2d/vsgeom2d.hpp +28 -0
- netgen/include/gprim/adtree.hpp +1392 -0
- netgen/include/gprim/geom2d.hpp +858 -0
- netgen/include/gprim/geom3d.hpp +749 -0
- netgen/include/gprim/geomfuncs.hpp +212 -0
- netgen/include/gprim/geomobjects.hpp +544 -0
- netgen/include/gprim/geomops.hpp +404 -0
- netgen/include/gprim/geomtest3d.hpp +101 -0
- netgen/include/gprim/gprim.hpp +33 -0
- netgen/include/gprim/spline.hpp +778 -0
- netgen/include/gprim/splinegeometry.hpp +73 -0
- netgen/include/gprim/transform3d.hpp +216 -0
- netgen/include/include/acisgeom.hpp +3 -0
- netgen/include/include/csg.hpp +1 -0
- netgen/include/include/geometry2d.hpp +1 -0
- netgen/include/include/gprim.hpp +1 -0
- netgen/include/include/incopengl.hpp +62 -0
- netgen/include/include/inctcl.hpp +13 -0
- netgen/include/include/incvis.hpp +6 -0
- netgen/include/include/linalg.hpp +1 -0
- netgen/include/include/meshing.hpp +1 -0
- netgen/include/include/myadt.hpp +1 -0
- netgen/include/include/mydefs.hpp +70 -0
- netgen/include/include/mystdlib.h +59 -0
- netgen/include/include/netgen_config.hpp +27 -0
- netgen/include/include/netgen_version.hpp +9 -0
- netgen/include/include/nginterface_v2_impl.hpp +395 -0
- netgen/include/include/ngsimd.hpp +1 -0
- netgen/include/include/occgeom.hpp +1 -0
- netgen/include/include/opti.hpp +1 -0
- netgen/include/include/parallel.hpp +1 -0
- netgen/include/include/stlgeom.hpp +1 -0
- netgen/include/include/visual.hpp +1 -0
- netgen/include/interface/rw_medit.hpp +11 -0
- netgen/include/interface/writeuser.hpp +80 -0
- netgen/include/linalg/densemat.hpp +414 -0
- netgen/include/linalg/linalg.hpp +29 -0
- netgen/include/linalg/opti.hpp +142 -0
- netgen/include/linalg/polynomial.hpp +47 -0
- netgen/include/linalg/vector.hpp +217 -0
- netgen/include/meshing/adfront2.hpp +274 -0
- netgen/include/meshing/adfront3.hpp +332 -0
- netgen/include/meshing/basegeom.hpp +370 -0
- netgen/include/meshing/bcfunctions.hpp +53 -0
- netgen/include/meshing/bisect.hpp +72 -0
- netgen/include/meshing/boundarylayer.hpp +113 -0
- netgen/include/meshing/classifyhpel.hpp +1984 -0
- netgen/include/meshing/clusters.hpp +46 -0
- netgen/include/meshing/curvedelems.hpp +274 -0
- netgen/include/meshing/delaunay2d.hpp +73 -0
- netgen/include/meshing/fieldlines.hpp +103 -0
- netgen/include/meshing/findip.hpp +198 -0
- netgen/include/meshing/findip2.hpp +103 -0
- netgen/include/meshing/geomsearch.hpp +69 -0
- netgen/include/meshing/global.hpp +54 -0
- netgen/include/meshing/hpref_hex.hpp +330 -0
- netgen/include/meshing/hpref_prism.hpp +3405 -0
- netgen/include/meshing/hpref_pyramid.hpp +154 -0
- netgen/include/meshing/hpref_quad.hpp +2082 -0
- netgen/include/meshing/hpref_segm.hpp +122 -0
- netgen/include/meshing/hpref_tet.hpp +4230 -0
- netgen/include/meshing/hpref_trig.hpp +848 -0
- netgen/include/meshing/hprefinement.hpp +366 -0
- netgen/include/meshing/improve2.hpp +178 -0
- netgen/include/meshing/improve3.hpp +151 -0
- netgen/include/meshing/localh.hpp +223 -0
- netgen/include/meshing/meshclass.hpp +1076 -0
- netgen/include/meshing/meshfunc.hpp +47 -0
- netgen/include/meshing/meshing.hpp +63 -0
- netgen/include/meshing/meshing2.hpp +163 -0
- netgen/include/meshing/meshing3.hpp +123 -0
- netgen/include/meshing/meshtool.hpp +90 -0
- netgen/include/meshing/meshtype.hpp +1930 -0
- netgen/include/meshing/msghandler.hpp +62 -0
- netgen/include/meshing/paralleltop.hpp +172 -0
- netgen/include/meshing/python_mesh.hpp +206 -0
- netgen/include/meshing/ruler2.hpp +172 -0
- netgen/include/meshing/ruler3.hpp +211 -0
- netgen/include/meshing/soldata.hpp +141 -0
- netgen/include/meshing/specials.hpp +17 -0
- netgen/include/meshing/surfacegeom.hpp +73 -0
- netgen/include/meshing/topology.hpp +1003 -0
- netgen/include/meshing/validate.hpp +21 -0
- netgen/include/meshing/visual_interface.hpp +71 -0
- netgen/include/mydefs.hpp +70 -0
- netgen/include/nginterface.h +474 -0
- netgen/include/nginterface_v2.hpp +406 -0
- netgen/include/nglib.h +697 -0
- netgen/include/nglib_occ.h +50 -0
- netgen/include/occ/occ_edge.hpp +47 -0
- netgen/include/occ/occ_face.hpp +52 -0
- netgen/include/occ/occ_solid.hpp +23 -0
- netgen/include/occ/occ_utils.hpp +376 -0
- netgen/include/occ/occ_vertex.hpp +30 -0
- netgen/include/occ/occgeom.hpp +659 -0
- netgen/include/occ/occmeshsurf.hpp +168 -0
- netgen/include/occ/vsocc.hpp +33 -0
- netgen/include/pybind11/LICENSE +29 -0
- netgen/include/pybind11/attr.h +722 -0
- netgen/include/pybind11/buffer_info.h +208 -0
- netgen/include/pybind11/cast.h +2361 -0
- netgen/include/pybind11/chrono.h +228 -0
- netgen/include/pybind11/common.h +2 -0
- netgen/include/pybind11/complex.h +74 -0
- netgen/include/pybind11/conduit/README.txt +15 -0
- netgen/include/pybind11/conduit/pybind11_conduit_v1.h +116 -0
- netgen/include/pybind11/conduit/pybind11_platform_abi_id.h +87 -0
- netgen/include/pybind11/conduit/wrap_include_python_h.h +72 -0
- netgen/include/pybind11/critical_section.h +56 -0
- netgen/include/pybind11/detail/class.h +823 -0
- netgen/include/pybind11/detail/common.h +1348 -0
- netgen/include/pybind11/detail/cpp_conduit.h +75 -0
- netgen/include/pybind11/detail/descr.h +226 -0
- netgen/include/pybind11/detail/dynamic_raw_ptr_cast_if_possible.h +39 -0
- netgen/include/pybind11/detail/exception_translation.h +71 -0
- netgen/include/pybind11/detail/function_record_pyobject.h +191 -0
- netgen/include/pybind11/detail/init.h +538 -0
- netgen/include/pybind11/detail/internals.h +799 -0
- netgen/include/pybind11/detail/native_enum_data.h +209 -0
- netgen/include/pybind11/detail/pybind11_namespace_macros.h +82 -0
- netgen/include/pybind11/detail/struct_smart_holder.h +378 -0
- netgen/include/pybind11/detail/type_caster_base.h +1591 -0
- netgen/include/pybind11/detail/typeid.h +65 -0
- netgen/include/pybind11/detail/using_smart_holder.h +22 -0
- netgen/include/pybind11/detail/value_and_holder.h +90 -0
- netgen/include/pybind11/eigen/common.h +9 -0
- netgen/include/pybind11/eigen/matrix.h +723 -0
- netgen/include/pybind11/eigen/tensor.h +521 -0
- netgen/include/pybind11/eigen.h +12 -0
- netgen/include/pybind11/embed.h +320 -0
- netgen/include/pybind11/eval.h +161 -0
- netgen/include/pybind11/functional.h +147 -0
- netgen/include/pybind11/gil.h +199 -0
- netgen/include/pybind11/gil_safe_call_once.h +102 -0
- netgen/include/pybind11/gil_simple.h +37 -0
- netgen/include/pybind11/iostream.h +265 -0
- netgen/include/pybind11/native_enum.h +67 -0
- netgen/include/pybind11/numpy.h +2312 -0
- netgen/include/pybind11/operators.h +202 -0
- netgen/include/pybind11/options.h +92 -0
- netgen/include/pybind11/pybind11.h +3645 -0
- netgen/include/pybind11/pytypes.h +2680 -0
- netgen/include/pybind11/stl/filesystem.h +114 -0
- netgen/include/pybind11/stl.h +666 -0
- netgen/include/pybind11/stl_bind.h +858 -0
- netgen/include/pybind11/subinterpreter.h +299 -0
- netgen/include/pybind11/trampoline_self_life_support.h +65 -0
- netgen/include/pybind11/type_caster_pyobject_ptr.h +61 -0
- netgen/include/pybind11/typing.h +298 -0
- netgen/include/pybind11/warnings.h +75 -0
- netgen/include/stlgeom/meshstlsurface.hpp +67 -0
- netgen/include/stlgeom/stlgeom.hpp +491 -0
- netgen/include/stlgeom/stlline.hpp +193 -0
- netgen/include/stlgeom/stltool.hpp +331 -0
- netgen/include/stlgeom/stltopology.hpp +419 -0
- netgen/include/stlgeom/vsstl.hpp +58 -0
- netgen/include/visualization/meshdoc.hpp +42 -0
- netgen/include/visualization/mvdraw.hpp +325 -0
- netgen/include/visualization/vispar.hpp +128 -0
- netgen/include/visualization/visual.hpp +28 -0
- netgen/include/visualization/visual_api.hpp +10 -0
- netgen/include/visualization/vssolution.hpp +399 -0
- netgen/lib/libnggui.lib +0 -0
- netgen/lib/ngcore.lib +0 -0
- netgen/lib/nglib.lib +0 -0
- netgen/lib/togl.lib +0 -0
- netgen/libnggui.dll +0 -0
- netgen/libngguipy.lib +0 -0
- netgen/libngguipy.pyd +0 -0
- netgen/libngpy/_NgOCC.pyi +1545 -0
- netgen/libngpy/__init__.pyi +7 -0
- netgen/libngpy/_csg.pyi +259 -0
- netgen/libngpy/_geom2d.pyi +323 -0
- netgen/libngpy/_meshing.pyi +1111 -0
- netgen/libngpy/_stl.pyi +131 -0
- netgen/libngpy.lib +0 -0
- netgen/libngpy.pyd +0 -0
- netgen/meshing.py +65 -0
- netgen/ngcore.dll +0 -0
- netgen/nglib.dll +0 -0
- netgen/occ.py +52 -0
- netgen/read_gmsh.py +259 -0
- netgen/read_meshio.py +22 -0
- netgen/stl.py +2 -0
- netgen/togl.dll +0 -0
- netgen/version.py +2 -0
- netgen/webgui.py +529 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/boundarycondition.geo +16 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/boxcyl.geo +32 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/circle_on_cube.geo +27 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cone.geo +13 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cube.geo +16 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cubeandring.geo +55 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cubeandspheres.geo +21 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cubemcyl.geo +18 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cubemsphere.geo +19 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cylinder.geo +12 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/cylsphere.geo +12 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/doc/ng4.pdf +0 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/ellipsoid.geo +8 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/ellipticcyl.geo +10 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/extrusion.geo +99 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/fichera.geo +24 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/frame.step +11683 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/hinge.stl +8486 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/lshape3d.geo +26 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/manyholes.geo +26 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/manyholes2.geo +26 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/matrix.geo +27 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/ortho.geo +11 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/part1.stl +2662 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/period.geo +33 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/py_tutorials/exportNeutral.py +26 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/py_tutorials/mesh.py +19 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/py_tutorials/shaft.geo +65 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/revolution.geo +18 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/screw.step +1694 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/sculpture.geo +13 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/shaft.geo +65 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/shell.geo +10 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/sphere.geo +8 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/sphereincube.geo +17 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/square.in2d +35 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/squarecircle.in2d +48 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/squarehole.in2d +47 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/torus.geo +8 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/trafo.geo +57 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/twobricks.geo +15 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/twocubes.geo +18 -0
- netgen_mesher-6.2.2506.post35.dev0.data/data/share/netgen/twocyl.geo +16 -0
- netgen_mesher-6.2.2506.post35.dev0.dist-info/METADATA +15 -0
- netgen_mesher-6.2.2506.post35.dev0.dist-info/RECORD +340 -0
- netgen_mesher-6.2.2506.post35.dev0.dist-info/WHEEL +5 -0
- netgen_mesher-6.2.2506.post35.dev0.dist-info/entry_points.txt +2 -0
- netgen_mesher-6.2.2506.post35.dev0.dist-info/licenses/AUTHORS +1 -0
- netgen_mesher-6.2.2506.post35.dev0.dist-info/licenses/LICENSE +504 -0
- netgen_mesher-6.2.2506.post35.dev0.dist-info/top_level.txt +2 -0
- pyngcore/__init__.py +1 -0
- pyngcore/pyngcore.cp314-win_amd64.pyd +0 -0
|
@@ -0,0 +1,643 @@
|
|
|
1
|
+
#ifndef NGCORE_MPIWRAPPER_HPP
|
|
2
|
+
#define NGCORE_MPIWRAPPER_HPP
|
|
3
|
+
|
|
4
|
+
#include <array>
|
|
5
|
+
|
|
6
|
+
#include <complex>
|
|
7
|
+
|
|
8
|
+
#include "array.hpp"
|
|
9
|
+
#include "table.hpp"
|
|
10
|
+
#include "exception.hpp"
|
|
11
|
+
#include "profiler.hpp"
|
|
12
|
+
#include "ngstream.hpp"
|
|
13
|
+
#include "ng_mpi.hpp"
|
|
14
|
+
|
|
15
|
+
namespace ngcore
|
|
16
|
+
{
|
|
17
|
+
|
|
18
|
+
#ifdef PARALLEL
|
|
19
|
+
|
|
20
|
+
template <class T> struct MPI_typetrait { };
|
|
21
|
+
|
|
22
|
+
template <> struct MPI_typetrait<int> {
|
|
23
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_INT; } };
|
|
24
|
+
|
|
25
|
+
template <> struct MPI_typetrait<short> {
|
|
26
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_SHORT; } };
|
|
27
|
+
|
|
28
|
+
template <> struct MPI_typetrait<char> {
|
|
29
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_CHAR; } };
|
|
30
|
+
|
|
31
|
+
template <> struct MPI_typetrait<signed char> {
|
|
32
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_CHAR; } };
|
|
33
|
+
|
|
34
|
+
template <> struct MPI_typetrait<unsigned char> {
|
|
35
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_CHAR; } };
|
|
36
|
+
|
|
37
|
+
template <> struct MPI_typetrait<size_t> {
|
|
38
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_UINT64_T; } };
|
|
39
|
+
|
|
40
|
+
template <> struct MPI_typetrait<double> {
|
|
41
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_DOUBLE; } };
|
|
42
|
+
|
|
43
|
+
template <> struct MPI_typetrait<std::complex<double>> {
|
|
44
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_CXX_DOUBLE_COMPLEX; } };
|
|
45
|
+
|
|
46
|
+
template <> struct MPI_typetrait<bool> {
|
|
47
|
+
static NG_MPI_Datatype MPIType () { return NG_MPI_C_BOOL; } };
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
template<typename T, size_t S>
|
|
51
|
+
struct MPI_typetrait<std::array<T,S>>
|
|
52
|
+
{
|
|
53
|
+
static NG_MPI_Datatype MPIType ()
|
|
54
|
+
{
|
|
55
|
+
static NG_MPI_Datatype NG_MPI_T = 0;
|
|
56
|
+
if (!NG_MPI_T)
|
|
57
|
+
{
|
|
58
|
+
NG_MPI_Type_contiguous ( S, MPI_typetrait<T>::MPIType(), &NG_MPI_T);
|
|
59
|
+
NG_MPI_Type_commit ( &NG_MPI_T );
|
|
60
|
+
}
|
|
61
|
+
return NG_MPI_T;
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
template <class T, class T2 = decltype(MPI_typetrait<T>::MPIType())>
|
|
66
|
+
inline NG_MPI_Datatype GetMPIType () {
|
|
67
|
+
return MPI_typetrait<T>::MPIType();
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
template <class T>
|
|
71
|
+
inline NG_MPI_Datatype GetMPIType (T &) {
|
|
72
|
+
return GetMPIType<T>();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
class NgMPI_Request
|
|
76
|
+
{
|
|
77
|
+
NG_MPI_Request request;
|
|
78
|
+
public:
|
|
79
|
+
NgMPI_Request (NG_MPI_Request requ) : request{requ} { }
|
|
80
|
+
NgMPI_Request (const NgMPI_Request&) = delete;
|
|
81
|
+
NgMPI_Request (NgMPI_Request&&) = default;
|
|
82
|
+
~NgMPI_Request () { NG_MPI_Wait (&request, NG_MPI_STATUS_IGNORE); }
|
|
83
|
+
void Wait() { NG_MPI_Wait (&request, NG_MPI_STATUS_IGNORE); }
|
|
84
|
+
operator NG_MPI_Request() &&
|
|
85
|
+
{
|
|
86
|
+
auto tmp = request;
|
|
87
|
+
request = NG_MPI_REQUEST_NULL;
|
|
88
|
+
return tmp;
|
|
89
|
+
}
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
class NgMPI_Requests
|
|
93
|
+
{
|
|
94
|
+
Array<NG_MPI_Request> requests;
|
|
95
|
+
public:
|
|
96
|
+
NgMPI_Requests() = default;
|
|
97
|
+
~NgMPI_Requests() { WaitAll(); }
|
|
98
|
+
|
|
99
|
+
void Reset() { requests.SetSize0(); }
|
|
100
|
+
|
|
101
|
+
NgMPI_Requests & operator+= (NgMPI_Request && r)
|
|
102
|
+
{
|
|
103
|
+
requests += NG_MPI_Request(std::move(r));
|
|
104
|
+
return *this;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
NgMPI_Requests & operator+= (NG_MPI_Request r)
|
|
108
|
+
{
|
|
109
|
+
requests += r;
|
|
110
|
+
return *this;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
void WaitAll()
|
|
114
|
+
{
|
|
115
|
+
static Timer t("NgMPI - WaitAll"); RegionTimer reg(t);
|
|
116
|
+
if (!requests.Size()) return;
|
|
117
|
+
NG_MPI_Waitall (requests.Size(), requests.Data(), NG_MPI_STATUSES_IGNORE);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
int WaitAny ()
|
|
121
|
+
{
|
|
122
|
+
int nr;
|
|
123
|
+
NG_MPI_Waitany (requests.Size(), requests.Data(), &nr, NG_MPI_STATUS_IGNORE);
|
|
124
|
+
return nr;
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
[[deprecated("use requests.WaitAll instread")]]
|
|
129
|
+
inline void MyMPI_WaitAll (FlatArray<NG_MPI_Request> requests)
|
|
130
|
+
{
|
|
131
|
+
static Timer t("MPI - WaitAll"); RegionTimer reg(t);
|
|
132
|
+
if (!requests.Size()) return;
|
|
133
|
+
NG_MPI_Waitall (requests.Size(), requests.Data(), NG_MPI_STATUSES_IGNORE);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
[[deprecated("use requests.WaitAny instread")]]
|
|
137
|
+
inline int MyMPI_WaitAny (FlatArray<NG_MPI_Request> requests)
|
|
138
|
+
{
|
|
139
|
+
int nr;
|
|
140
|
+
NG_MPI_Waitany (requests.Size(), requests.Data(), &nr, NG_MPI_STATUS_IGNORE);
|
|
141
|
+
return nr;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class NgMPI_Comm
|
|
147
|
+
{
|
|
148
|
+
protected:
|
|
149
|
+
NG_MPI_Comm comm;
|
|
150
|
+
bool valid_comm;
|
|
151
|
+
int * refcount;
|
|
152
|
+
int rank, size;
|
|
153
|
+
public:
|
|
154
|
+
NgMPI_Comm ()
|
|
155
|
+
: valid_comm(false), refcount(nullptr), rank(0), size(1)
|
|
156
|
+
{ ; }
|
|
157
|
+
|
|
158
|
+
NgMPI_Comm (NG_MPI_Comm _comm, bool owns = false)
|
|
159
|
+
: comm(_comm), valid_comm(true)
|
|
160
|
+
{
|
|
161
|
+
int flag;
|
|
162
|
+
NG_MPI_Initialized (&flag);
|
|
163
|
+
if (!flag)
|
|
164
|
+
{
|
|
165
|
+
valid_comm = false;
|
|
166
|
+
refcount = nullptr;
|
|
167
|
+
rank = 0;
|
|
168
|
+
size = 1;
|
|
169
|
+
return;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (!owns)
|
|
173
|
+
refcount = nullptr;
|
|
174
|
+
else
|
|
175
|
+
refcount = new int{1};
|
|
176
|
+
|
|
177
|
+
NG_MPI_Comm_rank(comm, &rank);
|
|
178
|
+
NG_MPI_Comm_size(comm, &size);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
NgMPI_Comm (const NgMPI_Comm & c)
|
|
182
|
+
: comm(c.comm), valid_comm(c.valid_comm), refcount(c.refcount),
|
|
183
|
+
rank(c.rank), size(c.size)
|
|
184
|
+
{
|
|
185
|
+
if (refcount) (*refcount)++;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
NgMPI_Comm (NgMPI_Comm && c)
|
|
189
|
+
: comm(c.comm), valid_comm(c.valid_comm), refcount(c.refcount),
|
|
190
|
+
rank(c.rank), size(c.size)
|
|
191
|
+
{
|
|
192
|
+
c.refcount = nullptr;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
~NgMPI_Comm()
|
|
196
|
+
{
|
|
197
|
+
if (refcount)
|
|
198
|
+
if (--(*refcount) == 0)
|
|
199
|
+
NG_MPI_Comm_free(&comm);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
bool ValidCommunicator() const
|
|
203
|
+
{
|
|
204
|
+
return valid_comm;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
NgMPI_Comm & operator= (const NgMPI_Comm & c)
|
|
208
|
+
{
|
|
209
|
+
if (refcount)
|
|
210
|
+
if (--(*refcount) == 0)
|
|
211
|
+
NG_MPI_Comm_free(&comm);
|
|
212
|
+
|
|
213
|
+
refcount = c.refcount;
|
|
214
|
+
if (refcount) (*refcount)++;
|
|
215
|
+
comm = c.comm;
|
|
216
|
+
valid_comm = c.valid_comm;
|
|
217
|
+
size = c.size;
|
|
218
|
+
rank = c.rank;
|
|
219
|
+
return *this;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
class InvalidCommException : public Exception {
|
|
223
|
+
public:
|
|
224
|
+
InvalidCommException() : Exception("Do not have a valid communicator") { ; }
|
|
225
|
+
};
|
|
226
|
+
|
|
227
|
+
operator NG_MPI_Comm() const {
|
|
228
|
+
if (!valid_comm) throw InvalidCommException();
|
|
229
|
+
return comm;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
int Rank() const { return rank; }
|
|
233
|
+
int Size() const { return size; }
|
|
234
|
+
void Barrier() const {
|
|
235
|
+
static Timer t("MPI - Barrier"); RegionTimer reg(t);
|
|
236
|
+
if (size > 1) NG_MPI_Barrier (comm);
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
/** --- blocking P2P --- **/
|
|
241
|
+
|
|
242
|
+
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
243
|
+
void Send (T & val, int dest, int tag) const {
|
|
244
|
+
NG_MPI_Send (&val, 1, GetMPIType<T>(), dest, tag, comm);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
void Send (const std::string & s, int dest, int tag) const {
|
|
248
|
+
NG_MPI_Send( const_cast<char*> (&s[0]), s.length(), NG_MPI_CHAR, dest, tag, comm);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
template<typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
|
252
|
+
void Send(FlatArray<T,TI> s, int dest, int tag) const {
|
|
253
|
+
NG_MPI_Send (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm);
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
257
|
+
void Recv (T & val, int src, int tag) const {
|
|
258
|
+
NG_MPI_Recv (&val, 1, GetMPIType<T>(), src, tag, comm, NG_MPI_STATUS_IGNORE);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
void Recv (std::string & s, int src, int tag) const {
|
|
262
|
+
NG_MPI_Status status;
|
|
263
|
+
int len;
|
|
264
|
+
NG_MPI_Probe (src, tag, comm, &status);
|
|
265
|
+
NG_MPI_Get_count (&status, NG_MPI_CHAR, &len);
|
|
266
|
+
// s.assign (len, ' ');
|
|
267
|
+
s.resize (len);
|
|
268
|
+
NG_MPI_Recv( &s[0], len, NG_MPI_CHAR, src, tag, comm, NG_MPI_STATUS_IGNORE);
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
|
273
|
+
void Recv (FlatArray <T,TI> s, int src, int tag) const {
|
|
274
|
+
NG_MPI_Recv (s.Data(), s.Size(), GetMPIType<T> (), src, tag, comm, NG_MPI_STATUS_IGNORE);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
|
278
|
+
void Recv (Array <T,TI> & s, int src, int tag) const
|
|
279
|
+
{
|
|
280
|
+
NG_MPI_Status status;
|
|
281
|
+
int len;
|
|
282
|
+
const NG_MPI_Datatype NG_MPI_T = GetMPIType<T> ();
|
|
283
|
+
NG_MPI_Probe (src, tag, comm, &status);
|
|
284
|
+
NG_MPI_Get_count (&status, NG_MPI_T, &len);
|
|
285
|
+
s.SetSize (len);
|
|
286
|
+
NG_MPI_Recv (s.Data(), len, NG_MPI_T, src, tag, comm, NG_MPI_STATUS_IGNORE);
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
/** --- non-blocking P2P --- **/
|
|
290
|
+
|
|
291
|
+
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
292
|
+
[[nodiscard]] NG_MPI_Request ISend (T & val, int dest, int tag) const
|
|
293
|
+
{
|
|
294
|
+
NG_MPI_Request request;
|
|
295
|
+
NG_MPI_Isend (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
|
|
296
|
+
return request;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
300
|
+
[[nodiscard]] NG_MPI_Request ISend (FlatArray<T> s, int dest, int tag) const
|
|
301
|
+
{
|
|
302
|
+
NG_MPI_Request request;
|
|
303
|
+
NG_MPI_Isend (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm, &request);
|
|
304
|
+
return request;
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
308
|
+
[[nodiscard]] NG_MPI_Request IRecv (T & val, int dest, int tag) const
|
|
309
|
+
{
|
|
310
|
+
NG_MPI_Request request;
|
|
311
|
+
NG_MPI_Irecv (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
|
|
312
|
+
return request;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
316
|
+
[[nodiscard]] NG_MPI_Request IRecv (FlatArray<T> s, int src, int tag) const
|
|
317
|
+
{
|
|
318
|
+
NG_MPI_Request request;
|
|
319
|
+
NG_MPI_Irecv (s.Data(), s.Size(), GetMPIType<T>(), src, tag, comm, &request);
|
|
320
|
+
return request;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
/** --- collectives --- **/
|
|
325
|
+
|
|
326
|
+
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
327
|
+
T Reduce (T d, const NG_MPI_Op & op, int root = 0) const
|
|
328
|
+
{
|
|
329
|
+
static Timer t("MPI - Reduce"); RegionTimer reg(t);
|
|
330
|
+
if (size == 1) return d;
|
|
331
|
+
|
|
332
|
+
T global_d;
|
|
333
|
+
NG_MPI_Reduce (&d, &global_d, 1, GetMPIType<T>(), op, root, comm);
|
|
334
|
+
return global_d;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
338
|
+
T AllReduce (T d, const NG_MPI_Op & op) const
|
|
339
|
+
{
|
|
340
|
+
static Timer t("MPI - AllReduce"); RegionTimer reg(t);
|
|
341
|
+
if (size == 1) return d;
|
|
342
|
+
|
|
343
|
+
T global_d;
|
|
344
|
+
NG_MPI_Allreduce ( &d, &global_d, 1, GetMPIType<T>(), op, comm);
|
|
345
|
+
return global_d;
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
349
|
+
void AllReduce (FlatArray<T> d, const NG_MPI_Op & op) const
|
|
350
|
+
{
|
|
351
|
+
static Timer t("MPI - AllReduce Array"); RegionTimer reg(t);
|
|
352
|
+
if (size == 1) return;
|
|
353
|
+
|
|
354
|
+
NG_MPI_Allreduce (NG_MPI_IN_PLACE, d.Data(), d.Size(), GetMPIType<T>(), op, comm);
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
|
358
|
+
void Bcast (T & s, int root = 0) const {
|
|
359
|
+
if (size == 1) return;
|
|
360
|
+
static Timer t("MPI - Bcast"); RegionTimer reg(t);
|
|
361
|
+
NG_MPI_Bcast (&s, 1, GetMPIType<T>(), root, comm);
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
template <class T, size_t S>
|
|
366
|
+
void Bcast (std::array<T,S> & d, int root = 0) const
|
|
367
|
+
{
|
|
368
|
+
if (size == 1) return;
|
|
369
|
+
if (S != 0)
|
|
370
|
+
NG_MPI_Bcast (&d[0], S, GetMPIType<T>(), root, comm);
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
template <class T>
|
|
375
|
+
void Bcast (Array<T> & d, int root = 0) const
|
|
376
|
+
{
|
|
377
|
+
if (size == 1) return;
|
|
378
|
+
|
|
379
|
+
int ds = d.Size();
|
|
380
|
+
Bcast (ds, root);
|
|
381
|
+
if (Rank() != root) d.SetSize (ds);
|
|
382
|
+
if (ds != 0)
|
|
383
|
+
NG_MPI_Bcast (d.Data(), ds, GetMPIType<T>(), root, comm);
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
void Bcast (std::string & s, int root = 0) const
|
|
388
|
+
{
|
|
389
|
+
if (size == 1) return;
|
|
390
|
+
int len = s.length();
|
|
391
|
+
Bcast (len, root);
|
|
392
|
+
if (rank != 0) s.resize (len);
|
|
393
|
+
NG_MPI_Bcast (&s[0], len, NG_MPI_CHAR, root, comm);
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
template <class T, size_t S>
|
|
399
|
+
[[nodiscard]] NgMPI_Request IBcast (std::array<T,S> & d, int root = 0) const
|
|
400
|
+
{
|
|
401
|
+
NG_MPI_Request request;
|
|
402
|
+
NG_MPI_Ibcast (&d[0], S, GetMPIType<T>(), root, comm, &request);
|
|
403
|
+
return request;
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
template <class T>
|
|
407
|
+
[[nodiscard]] NgMPI_Request IBcast (FlatArray<T> d, int root = 0) const
|
|
408
|
+
{
|
|
409
|
+
NG_MPI_Request request;
|
|
410
|
+
int ds = d.Size();
|
|
411
|
+
NG_MPI_Ibcast (d.Data(), ds, GetMPIType<T>(), root, comm, &request);
|
|
412
|
+
return request;
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
template <typename T>
|
|
417
|
+
void AllToAll (FlatArray<T> send, FlatArray<T> recv) const
|
|
418
|
+
{
|
|
419
|
+
NG_MPI_Alltoall (send.Data(), 1, GetMPIType<T>(),
|
|
420
|
+
recv.Data(), 1, GetMPIType<T>(), comm);
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
template <typename T>
|
|
425
|
+
void ScatterRoot (FlatArray<T> send) const
|
|
426
|
+
{
|
|
427
|
+
if (size == 1) return;
|
|
428
|
+
NG_MPI_Scatter (send.Data(), 1, GetMPIType<T>(),
|
|
429
|
+
NG_MPI_IN_PLACE, -1, GetMPIType<T>(), 0, comm);
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
template <typename T>
|
|
433
|
+
void Scatter (T & recv) const
|
|
434
|
+
{
|
|
435
|
+
if (size == 1) return;
|
|
436
|
+
NG_MPI_Scatter (NULL, 0, GetMPIType<T>(),
|
|
437
|
+
&recv, 1, GetMPIType<T>(), 0, comm);
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
template <typename T>
|
|
441
|
+
void GatherRoot (FlatArray<T> recv) const
|
|
442
|
+
{
|
|
443
|
+
recv[0] = T(0);
|
|
444
|
+
if (size == 1) return;
|
|
445
|
+
NG_MPI_Gather (NG_MPI_IN_PLACE, 1, GetMPIType<T>(),
|
|
446
|
+
recv.Data(), 1, GetMPIType<T>(), 0, comm);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
template <typename T>
|
|
450
|
+
void Gather (T send) const
|
|
451
|
+
{
|
|
452
|
+
if (size == 1) return;
|
|
453
|
+
NG_MPI_Gather (&send, 1, GetMPIType<T>(),
|
|
454
|
+
NULL, 1, GetMPIType<T>(), 0, comm);
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
template <typename T>
|
|
459
|
+
void AllGather (T val, FlatArray<T> recv) const
|
|
460
|
+
{
|
|
461
|
+
if (size == 1)
|
|
462
|
+
{
|
|
463
|
+
recv[0] = val;
|
|
464
|
+
return;
|
|
465
|
+
}
|
|
466
|
+
NG_MPI_Allgather (&val, 1, GetMPIType<T>(),
|
|
467
|
+
recv.Data(), 1, GetMPIType<T>(),
|
|
468
|
+
comm);
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
template <typename T>
|
|
474
|
+
void ExchangeTable (DynamicTable<T> & send_data,
|
|
475
|
+
DynamicTable<T> & recv_data, int tag)
|
|
476
|
+
{
|
|
477
|
+
Array<int> send_sizes(size);
|
|
478
|
+
Array<int> recv_sizes(size);
|
|
479
|
+
|
|
480
|
+
for (int i = 0; i < size; i++)
|
|
481
|
+
send_sizes[i] = send_data[i].Size();
|
|
482
|
+
|
|
483
|
+
AllToAll (send_sizes, recv_sizes);
|
|
484
|
+
|
|
485
|
+
recv_data = DynamicTable<T> (recv_sizes, true);
|
|
486
|
+
|
|
487
|
+
NgMPI_Requests requests;
|
|
488
|
+
for (int dest = 0; dest < size; dest++)
|
|
489
|
+
if (dest != rank && send_data[dest].Size())
|
|
490
|
+
requests += ISend (FlatArray<T>(send_data[dest]), dest, tag);
|
|
491
|
+
|
|
492
|
+
for (int dest = 0; dest < size; dest++)
|
|
493
|
+
if (dest != rank && recv_data[dest].Size())
|
|
494
|
+
requests += IRecv (FlatArray<T>(recv_data[dest]), dest, tag);
|
|
495
|
+
|
|
496
|
+
requests.WaitAll();
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
|
|
504
|
+
{
|
|
505
|
+
NG_MPI_Comm subcomm;
|
|
506
|
+
NG_MPI_Group gcomm, gsubcomm;
|
|
507
|
+
NG_MPI_Comm_group(comm, &gcomm);
|
|
508
|
+
NG_MPI_Group_incl(gcomm, procs.Size(), procs.Data(), &gsubcomm);
|
|
509
|
+
NG_MPI_Comm_create_group(comm, gsubcomm, 4242, &subcomm);
|
|
510
|
+
return NgMPI_Comm(subcomm, true);
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
}; // class NgMPI_Comm
|
|
514
|
+
|
|
515
|
+
#else // PARALLEL
|
|
516
|
+
class NG_MPI_Comm {
|
|
517
|
+
int nr;
|
|
518
|
+
public:
|
|
519
|
+
NG_MPI_Comm (int _nr = 0) : nr(_nr) { ; }
|
|
520
|
+
operator int() const { return nr; }
|
|
521
|
+
bool operator== (NG_MPI_Comm c2) const { return nr == c2.nr; }
|
|
522
|
+
};
|
|
523
|
+
static NG_MPI_Comm NG_MPI_COMM_WORLD = 12345, NG_MPI_COMM_NULL = 10000;
|
|
524
|
+
|
|
525
|
+
typedef int NG_MPI_Op;
|
|
526
|
+
typedef int NG_MPI_Datatype;
|
|
527
|
+
typedef int NG_MPI_Request;
|
|
528
|
+
|
|
529
|
+
enum { NG_MPI_SUM = 0, NG_MPI_MIN = 1, NG_MPI_MAX = 2, NG_MPI_LOR = 4711 };
|
|
530
|
+
|
|
531
|
+
inline void NG_MPI_Type_contiguous ( int, NG_MPI_Datatype, NG_MPI_Datatype*) { ; }
|
|
532
|
+
inline void NG_MPI_Type_commit ( NG_MPI_Datatype * ) { ; }
|
|
533
|
+
|
|
534
|
+
template <class T> struct MPI_typetrait {
|
|
535
|
+
static NG_MPI_Datatype MPIType () { return -1; }
|
|
536
|
+
};
|
|
537
|
+
template <class T, class T2=void>
|
|
538
|
+
inline NG_MPI_Datatype GetMPIType () { return -1; }
|
|
539
|
+
|
|
540
|
+
class NgMPI_Request {
|
|
541
|
+
public:
|
|
542
|
+
NgMPI_Request() = default;
|
|
543
|
+
NgMPI_Request(NgMPI_Request &&) { ; }
|
|
544
|
+
NgMPI_Request(NG_MPI_Request &&) { ; }
|
|
545
|
+
};
|
|
546
|
+
class NgMPI_Requests
|
|
547
|
+
{
|
|
548
|
+
public:
|
|
549
|
+
NgMPI_Requests & operator+= (NgMPI_Request &&) { return *this; }
|
|
550
|
+
NgMPI_Requests & operator+= (NG_MPI_Request r) { return *this; }
|
|
551
|
+
void Reset() { ; }
|
|
552
|
+
void WaitAll() { ; }
|
|
553
|
+
int WaitAny() { return 0; }
|
|
554
|
+
};
|
|
555
|
+
|
|
556
|
+
class NgMPI_Comm
|
|
557
|
+
{
|
|
558
|
+
|
|
559
|
+
public:
|
|
560
|
+
NgMPI_Comm () { ; }
|
|
561
|
+
NgMPI_Comm (NG_MPI_Comm _comm, bool owns = false) { ; }
|
|
562
|
+
|
|
563
|
+
size_t Rank() const { return 0; }
|
|
564
|
+
size_t Size() const { return 1; }
|
|
565
|
+
bool ValidCommunicator() const { return false; }
|
|
566
|
+
void Barrier() const { ; }
|
|
567
|
+
operator NG_MPI_Comm() const { return NG_MPI_Comm(); }
|
|
568
|
+
|
|
569
|
+
template<typename T>
|
|
570
|
+
void Send( T & val, int dest, int tag) const { ; }
|
|
571
|
+
|
|
572
|
+
template<typename T>
|
|
573
|
+
void Send(FlatArray<T> s, int dest, int tag) const { ; }
|
|
574
|
+
|
|
575
|
+
template<typename T>
|
|
576
|
+
void Recv (T & val, int src, int tag) const { ; }
|
|
577
|
+
|
|
578
|
+
template <typename T>
|
|
579
|
+
void Recv (FlatArray <T> s, int src, int tag) const { ; }
|
|
580
|
+
|
|
581
|
+
template <typename T>
|
|
582
|
+
void Recv (Array <T> & s, int src, int tag) const { ; }
|
|
583
|
+
|
|
584
|
+
template<typename T>
|
|
585
|
+
NG_MPI_Request ISend (T & val, int dest, int tag) const { return 0; }
|
|
586
|
+
|
|
587
|
+
template<typename T>
|
|
588
|
+
NG_MPI_Request ISend (FlatArray<T> s, int dest, int tag) const { return 0; }
|
|
589
|
+
|
|
590
|
+
template<typename T>
|
|
591
|
+
NG_MPI_Request IRecv (T & val, int dest, int tag) const { return 0; }
|
|
592
|
+
|
|
593
|
+
template<typename T>
|
|
594
|
+
NG_MPI_Request IRecv (FlatArray<T> s, int src, int tag) const { return 0; }
|
|
595
|
+
|
|
596
|
+
template <typename T>
|
|
597
|
+
T Reduce (T d, const NG_MPI_Op & op, int root = 0) const { return d; }
|
|
598
|
+
|
|
599
|
+
template <typename T>
|
|
600
|
+
T AllReduce (T d, const NG_MPI_Op & op) const { return d; }
|
|
601
|
+
|
|
602
|
+
template <typename T>
|
|
603
|
+
void AllReduce (FlatArray<T> d, const NG_MPI_Op & op) const { ; }
|
|
604
|
+
|
|
605
|
+
template <typename T>
|
|
606
|
+
void Bcast (T & s, int root = 0) const { ; }
|
|
607
|
+
|
|
608
|
+
template <class T, size_t S>
|
|
609
|
+
void Bcast (std::array<T,S> & d, int root = 0) const {}
|
|
610
|
+
|
|
611
|
+
template <class T>
|
|
612
|
+
void Bcast (Array<T> & d, int root = 0) const { ; }
|
|
613
|
+
|
|
614
|
+
template <class T, size_t S>
|
|
615
|
+
NG_MPI_Request IBcast (std::array<T,S> & d, int root = 0) const { return 0; }
|
|
616
|
+
|
|
617
|
+
template <class T>
|
|
618
|
+
NG_MPI_Request IBcast (FlatArray<T> d, int root = 0) const { return 0; }
|
|
619
|
+
|
|
620
|
+
template <typename T>
|
|
621
|
+
void AllGather (T val, FlatArray<T> recv) const
|
|
622
|
+
{
|
|
623
|
+
recv[0] = val;
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
template <typename T>
|
|
627
|
+
void ExchangeTable (DynamicTable<T> & send_data,
|
|
628
|
+
DynamicTable<T> & recv_data, int tag) { ; }
|
|
629
|
+
|
|
630
|
+
|
|
631
|
+
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
|
|
632
|
+
{ return *this; }
|
|
633
|
+
};
|
|
634
|
+
|
|
635
|
+
inline void MyMPI_WaitAll (FlatArray<NG_MPI_Request> requests) { ; }
|
|
636
|
+
inline int MyMPI_WaitAny (FlatArray<NG_MPI_Request> requests) { return 0; }
|
|
637
|
+
|
|
638
|
+
#endif // PARALLEL
|
|
639
|
+
|
|
640
|
+
} // namespace ngcore
|
|
641
|
+
|
|
642
|
+
#endif // NGCORE_MPIWRAPPER_HPP
|
|
643
|
+
|