medcoupling 9.13.0__cp310-cp310-win_amd64.whl → 9.15.0__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
MEDLoaderFinalize.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # -*- coding: iso-8859-1 -*-
2
- # Copyright (C) 2023-2024 CEA, EDF
2
+ # Copyright (C) 2023-2025 CEA, EDF
3
3
  #
4
4
  # This library is free software; you can redistribute it and/or
5
5
  # modify it under the terms of the GNU Lesser General Public
@@ -18,19 +18,631 @@
18
18
  # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
19
19
  #
20
20
 
21
+ import logging
22
+
23
+
24
+ def MEDFileUMeshFuseNodesAndCells(
25
+ self, compType=2, eps=1e-6, logLev=logging.INFO, infoWrapNodes=None
26
+ ):
27
+ """
28
+ Method fusing nodes in this, then fusing cells in this. Fusion is done following eps and compType.
29
+
30
+ :param compType : see MEDCouplingPointSet.zipConnectivityTraducer method for explanations
31
+ :param eps: see DataArrayDouble.findCommonTuples for explanations.
32
+ :param logLev: Integer specifying log level
33
+ :param n2oHolder: Optional output param storing for each level ext n2o conversions applied during transformation. The storage should follow pydict concept. ket is levelext. Value is n2o array associated.
34
+ :param infoWrapNodes: Optional input. If precised contains tuple of size 4 ( cNode, ciNodes, o2nNodes, n2oNodes ) fully defining the merge of nodes.
35
+ :return: MEDFileUMesh instance containing the result of nodes and cells fusion
36
+ """
37
+ mmOut, _ = MEDFileUMeshFuseNodesAndCellsAdv(
38
+ self, compType, eps, logLev, infoWrapNodes
39
+ )
40
+ return mmOut
41
+
42
+
43
+ def getLogger(level=logging.INFO):
44
+ FORMAT = "%(levelname)s : %(asctime)s : [%(filename)s:%(funcName)s:%(lineno)s] : %(message)s"
45
+ logging.basicConfig(format=FORMAT, level=level)
46
+ return logging.getLogger()
47
+ logger = getLogger(logLev)
48
+
49
+
50
+ def MEDFileUMeshFuseNodesAndCellsAdv(
51
+ self, compType=2, eps=1e-6, logLev=logging.INFO, infoWrapNodes=None
52
+ ):
53
+ """
54
+ Same than MEDFileUMeshfuseNodesAndCells except that
55
+
56
+ :return: a tuple a size 2. First element is MEDFileUMesh instance containing the result of nodes and cells fusion, 2nd element is dict storing for each level ext n2o conversions applied during transformation. key is levelext. Value is n2o array associated.
57
+ """
58
+ import MEDLoader as ml
59
+
60
+ def updateMap(
61
+ mm: ml.MEDFileUMesh, lev: int, famMap: ml.DataArrayInt, famMapI: ml.DataArrayInt
62
+ ):
63
+ """
64
+ mm instance to be updated
65
+ """
66
+
67
+ def famIdManager(lev, famId):
68
+ if lev <= 0:
69
+ return -famId
70
+ else:
71
+ return famId
72
+
73
+ nbOfPartSetToBeUpdated = len(famMapI) - 1
74
+ for partSetId in range(nbOfPartSetToBeUpdated):
75
+ newFamId = famIdManager(lev, famMap[famMapI[partSetId]])
76
+ newFamName = f"Family_{newFamId}"
77
+ logger.debug(f"For level {lev} new family : {newFamId}")
78
+ mm.addFamily(newFamName, newFamId)
79
+ for famId in famMap[famMapI[partSetId] + 1 : famMapI[partSetId + 1]]:
80
+ zeFamId = famIdManager(lev, int(famId))
81
+ if not mm.existsFamily(zeFamId):
82
+ continue
83
+ grpsToBeUpdated = mm.getGroupsOnFamily(mm.getFamilyNameGivenId(zeFamId))
84
+ for grpToBeUpdated in grpsToBeUpdated:
85
+ mm.addFamilyOnGrp(grpToBeUpdated, newFamName)
86
+ pass
87
+
88
+ n2oHolder = {}
89
+ logger = getLogger(level=logLev)
90
+ initNbNodes = len(self.getCoords())
91
+ if infoWrapNodes is None:
92
+ logger.info(f"No n2onodes given. Trying to compute it using given eps = {eps}")
93
+ logger.info(f"Begin merging nodes with eps = {eps}")
94
+ cNode, ciNodes = self.getCoords().findCommonTuples(eps)
95
+ logger.info(
96
+ f"End of merging nodes with eps = {eps} : Nb of nodes groups to be merged : {len(ciNodes) - 1} / {self.getNumberOfNodes()}"
97
+ )
98
+ o2nNodes, newNbNodes = ml.DataArrayInt.ConvertIndexArrayToO2N(
99
+ initNbNodes, cNode, ciNodes
100
+ )
101
+ n2oNodes = o2nNodes.invertArrayO2N2N2O(newNbNodes)
102
+ else:
103
+ cNode, ciNodes, o2nNodes, n2oNodes = infoWrapNodes
104
+ newCoords = self.getCoords()[n2oNodes]
105
+ n2oHolder[1] = n2oNodes
106
+ # creation of
107
+ mmOut = ml.MEDFileUMesh()
108
+ mmOut.copyFamGrpMapsFrom(self)
109
+
110
+ for lev in self.getNonEmptyLevels():
111
+ logger.debug(f"Begin level {lev}")
112
+ m1 = self[lev].deepCopy()
113
+ logger.debug(f"Begin renumbering connectivity of level {lev}")
114
+ m1.renumberNodesInConn(o2nNodes)
115
+ logger.debug(f"End renumbering connectivity of level {lev}")
116
+ m1.setCoords(newCoords)
117
+ logger.info(f"Begin of finding of same cells of level {lev}")
118
+ cce, ccei = m1.findCommonCells(compType, 0)
119
+ logger.info(
120
+ f"End of finding of same cells of level {lev} : Nb of cells groups to be merged : {len(ccei) - 1} / {m1.getNumberOfCells()}"
121
+ )
122
+ famsCell = self.getFamilyFieldAtLevel(lev)
123
+ if famsCell:
124
+ famsCell = -famsCell
125
+ famsMergedCell, famMap, famMapI = famsCell.forThisAsPartitionBuildReduction(
126
+ cce, ccei
127
+ ) # <- method updating family field array
128
+ updateMap(mmOut, lev, famMap, famMapI)
129
+ famsMergedCell = -famsMergedCell
130
+ o2nCells, newNbCells = ml.DataArrayInt.ConvertIndexArrayToO2N(
131
+ m1.getNumberOfCells(), cce, ccei
132
+ )
133
+ n2oCells = o2nCells.invertArrayO2N2N2O(newNbCells)
134
+ n2oHolder[lev] = n2oCells
135
+ m1 = m1[n2oCells]
136
+ m1.setCoords(newCoords)
137
+ m1.setName(self.getName())
138
+ mmOut[lev] = m1
139
+ if famsCell:
140
+ mmOut.setFamilyFieldArr(lev, famsMergedCell)
141
+
142
+ famsNode = self.getFamilyFieldAtLevel(1)
143
+ if famsNode:
144
+ famsMergedNode, famMap, famMapI = famsNode.forThisAsPartitionBuildReduction(
145
+ cNode, ciNodes
146
+ )
147
+ updateMap(mmOut, 1, famMap, famMapI)
148
+ mmOut.setFamilyFieldArr(1, famsMergedNode)
149
+ return mmOut, n2oHolder
150
+
151
+
152
+ def FindIdFromPathAndPattern(fname, pat):
153
+ import re
154
+ from pathlib import Path
155
+
156
+ patRe = Path(pat).name.replace("*", "([\d]+)")
157
+ patReRe = re.compile(patRe)
158
+ m = patReRe.match(Path(fname).name)
159
+ if not m:
160
+ raise RuntimeError("Unrecognized pattern {} in file {}".format(pat, fname))
161
+ return int(m.group(1))
162
+
163
+
164
+ def GetNodesFusionInfoFromJointsOf(pat: str):
165
+ """
166
+ [EDF32671]. This method expects that each MED file fitting pat pattern contains joints with correspondance on NODES. If yes a n2o conversion array will be computed and returned
167
+ This output may be used by MEDFileUMesh.fuseNodesAndCells.
168
+
169
+
170
+ :param pat : Pattern pointing to MED files candidates of fusion.
171
+ :return: tuple of size 4 ( cNode, ciNodes, o2nNodes, n2oNodes ) fully defining the merge of nodes ( may be useful for MEDFileUMesh.fuseNodesAndCells )
172
+ """
173
+
174
+ import re
175
+ from glob import glob
176
+ import MEDLoader as ml
177
+
178
+ def GetAllCommonNodesRegardingJoints(iPart, fileToMerge):
179
+ """
180
+ [EDF32671] : Return list of 2 components arrays giving correspondances. Name of components returns the rank of proc attached.
181
+ """
182
+
183
+ def RetriveCorrespondanceForOneJoint(iPart, joint):
184
+ """
185
+ Returns for one joint the
186
+ """
187
+ if joint.getNumberOfSteps() != 1:
188
+ raise NotImplementedError("Juste single timestep joint supported")
189
+ # p0 is for receiving proc. p1 is for sending proc
190
+ p0, p1 = [int(p) for p in re.split("[\s]+", joint.getJointName())]
191
+ if (p0 != iPart) and (p1 != iPart):
192
+ raise RuntimeError(
193
+ "Unexpected joint name {!r} in proc {}".format(
194
+ joint.getJointName(), iPart
195
+ )
196
+ )
197
+ # Find correspondance on NODES. Expected to have exactly one
198
+ cors = [
199
+ cor
200
+ for cor in [
201
+ joint[0].getCorrespondenceAtPos(i)
202
+ for i in range(joint[0].getNumberOfCorrespondences())
203
+ ]
204
+ if cor.getLocalGeometryType() == ml.NORM_ERROR
205
+ ]
206
+ if len(cors) != 1:
207
+ raise RuntimeError(
208
+ "No correspondances lying on NODES in {}".format(fileToMerge)
209
+ )
210
+ cor = cors[0].getCorrespondence()
211
+ # put correspondence cor in right shape. 2 components and in C format
212
+ cor = cor - 1
213
+ cor.rearrange(2)
214
+ #
215
+ if p0 == iPart:
216
+ # receiving
217
+ pOther = p1
218
+ else:
219
+ # sending
220
+ pOther = p0
221
+ if pOther < iPart:
222
+ return None
223
+ cor.setInfoOnComponent(0, str(iPart))
224
+ cor.setInfoOnComponent(1, str(pOther))
225
+ return cor
226
+
227
+ allMeshNames = ml.GetMeshNames(fileToMerge)
228
+ if len(allMeshNames) != 1:
229
+ raise NotImplementedError(
230
+ "{} contains not exactly one mesh".format(fileToMerge)
231
+ )
232
+ joints = ml.MEDFileJoints(fileToMerge, allMeshNames[0])
233
+
234
+ ret = [
235
+ RetriveCorrespondanceForOneJoint(iPart, joints[iJoint])
236
+ for iJoint in range(joints.getNumberOfJoints())
237
+ ]
238
+ return [elt for elt in ret if elt is not None]
239
+
240
+ filesToMerge = sorted(glob(pat), key=lambda x: FindIdFromPathAndPattern(x, pat))
241
+
242
+ nbNodesPerProc = []
243
+ allNodesCorr = []
244
+
245
+ for fileToMerge in filesToMerge:
246
+ iPart = FindIdFromPathAndPattern(fileToMerge, pat)
247
+ allMeshNames = ml.GetMeshNames(fileToMerge)
248
+ if len(allMeshNames) != 1:
249
+ raise NotImplementedError(
250
+ "{} contains not exactly one mesh".format(fileToMerge)
251
+ )
252
+ _, _, _, curNbNodes = ml.GetUMeshGlobalInfo(fileToMerge, allMeshNames[0])
253
+ curNodeCorr = GetAllCommonNodesRegardingJoints(iPart, fileToMerge)
254
+ allNodesCorr += curNodeCorr
255
+ nbNodesPerProc.append(curNbNodes)
256
+
257
+ # apply node offsets
258
+
259
+ nodeOffsets = ml.DataArrayInt(nbNodesPerProc)
260
+ nodeOffsets.computeOffsetsFull()
261
+ for elt in allNodesCorr:
262
+ elt[:, 0] += nodeOffsets[int(elt.getInfoOnComponent(0))]
263
+ elt[:, 1] += nodeOffsets[int(elt.getInfoOnComponent(1))]
264
+ c, ci = ml.DataArrayInt.Aggregate(allNodesCorr).fromListOfPairsToIndexArray()
265
+ totalNbOfNodesWithDup = sum(nbNodesPerProc)
266
+ o2nNodes, newNbNodes = ml.DataArrayInt.ConvertIndexArrayToO2N(
267
+ totalNbOfNodesWithDup, c, ci
268
+ )
269
+ n2oNodes = o2nNodes.invertArrayO2N2N2O(newNbNodes)
270
+ return c, ci, o2nNodes, n2oNodes
271
+
272
+
273
+ def AggregateMEDFilesNoProfilesNoFusion(pat: str, fnameOut: str, logLev=logging.INFO):
274
+ """
275
+ This method is useful to aggregate split MED files into a single one.
276
+
277
+ This method fuse content of MED files in pat and put the result into fnameOut. For the moment profiles are not managed. And only one mesh is supported. All MED files
278
+ for pat are expected to have the same structure.
279
+
280
+ Pay attention nodes and cells may be duplicated by this method. To remove cells/nodes duplication call fuseCellsAndNodes
281
+
282
+ :param pat: pattern of MED files to be aggregated
283
+ :param fnameOut: output file storing the result
284
+ """
285
+ import MEDLoader as ml
286
+ import contextlib
287
+ from glob import glob
288
+ from distutils.version import StrictVersion
289
+
290
+ logger = getLogger(logLev)
291
+ filesToMerge = sorted(glob(pat), key=lambda x: FindIdFromPathAndPattern(x, pat))
292
+ inpVersion = StrictVersion(ml.MEDFileVersionOfFileStr(filesToMerge[0])).version
293
+ meshes = [ml.MEDFileMesh.New(elt) for elt in filesToMerge]
294
+ mm = ml.MEDFileUMesh.Aggregate(meshes)
295
+ mm.writeXX(fnameOut, 2, *inpVersion)
296
+ allFields = ml.GetAllFieldNames(filesToMerge[0])
297
+ ## Trés important on vérifie l'absence de profile
298
+ for elt in allFields:
299
+ f1ts = ml.MEDFileField1TS(filesToMerge[0], elt)
300
+ assert len(f1ts.getPflsReallyUsed()) == 0
301
+ ##
302
+
303
+ fmts = [
304
+ [ml.MEDFileFieldMultiTS(fn, fieldName, False) for fn in filesToMerge]
305
+ for fieldName in allFields
306
+ ]
307
+
308
+ for iField, listOfFmts in enumerate(fmts):
309
+ refField = listOfFmts[0]
310
+ nbTs = len(refField)
311
+ for iTs in range(nbTs):
312
+ with contextlib.ExitStack() as stack:
313
+ for iPart in range(len(listOfFmts)):
314
+ stack.enter_context(listOfFmts[iPart][iTs])
315
+ logger.info(
316
+ f"Dealing field {refField.getName()!r} time step {listOfFmts[0][iTs].getTime()[2]}"
317
+ )
318
+ mcf = [
319
+ fmts[iTs].field(meshes[iPart])
320
+ for iPart, fmts in enumerate(listOfFmts)
321
+ ]
322
+ fagg = ml.MEDCouplingFieldDouble.MergeFields(mcf)
323
+ if fagg.getDiscretization().getEnum() != ml.ON_NODES:
324
+ m = fagg.getMesh().deepCopy()
325
+ o2n = m.sortCellsInMEDFileFrmt()
326
+ n2o = o2n.invertArrayO2N2N2O(m.getNumberOfCells())
327
+ fagg = fagg[n2o]
328
+ f1tsOut = ml.MEDFileField1TS()
329
+ f1tsOut.setFieldNoProfileSBT(fagg)
330
+ f1tsOut.writeXX(fnameOut, 0, *inpVersion)
331
+
332
+
333
+ def FuseCellsAndNodesInMEDFile(
334
+ fnameIn, fnameOut, compType=2, eps=1e-6, logLev=logging.INFO, infoWrapNodes=None
335
+ ):
336
+ """
337
+ This method read fnameIn MED file, perform operation of fusion and write the result back MED fnameOut file.
338
+
339
+ Warning : fnameIn and fnameOut are expected to be separate files.
340
+
341
+ See MEDFileUMesh.fuseNodesAndCells for doc of other params.
342
+ """
343
+ from distutils.version import StrictVersion
344
+ import MEDLoader as ml
345
+
346
+ def reduceOnCells(fIn, n2os):
347
+ fOut = fIn[n2os[0]]
348
+ return fOut
349
+
350
+ def reduceOnNodes(fIn, n2os):
351
+ fIn.setArray(fIn.getArray()[n2os[1]])
352
+ return fIn
353
+
354
+ inpVersion = StrictVersion(ml.MEDFileVersionOfFileStr(fnameIn)).version
355
+ logger = getLogger(logLev)
356
+ mm = ml.MEDFileMesh.New(fnameIn)
357
+ mm.removeOrphanFamilies()
358
+ mmOut, n2os = mm.fuseNodesAndCellsAdv(compType, eps, logLev, infoWrapNodes)
359
+ allFields = ml.GetAllFieldNames(fnameIn)
360
+ mmOut.writeXX(fnameOut, 2, *inpVersion)
361
+ logger.info(f"Writing mesh into {fnameOut}")
362
+ fmtss = [
363
+ ml.MEDFileFieldMultiTS(fnameIn, fieldName, False) for fieldName in allFields
364
+ ]
365
+ for fmts in fmtss:
366
+ for f1ts in fmts:
367
+ with f1ts:
368
+ logger.info(
369
+ f"Dealing field {f1ts.getName()!r} time step {f1ts.getTime()[2]}"
370
+ )
371
+ fIn = f1ts.field(mm)
372
+ if fIn.getDiscretization().getEnum() == ml.ON_NODES:
373
+ fOut = reduceOnNodes(fIn, n2os)
374
+ else:
375
+ fOut = reduceOnCells(fIn, n2os)
376
+ f1tsOut = ml.MEDFileField1TS()
377
+ f1tsOut.setFieldNoProfileSBT(fOut)
378
+ f1tsOut.writeXX(fnameOut, 0, *inpVersion)
379
+
380
+
381
+ def MEDFileUMeshTetrahedrize(self, splitType, logLev=logging.INFO):
382
+ """
383
+ [EDF30178] : Method splitting hexa,prisms and underlying quads into resp and underlying triangles
384
+ """
385
+ import MEDLoader as ml
386
+
387
+ def getLogger(level=logging.INFO):
388
+ FORMAT = "%(levelname)s : %(asctime)s : [%(filename)s:%(funcName)s:%(lineno)s] : %(message)s"
389
+ logging.basicConfig(format=FORMAT, level=level)
390
+ return logging.getLogger()
391
+
392
+ logger = getLogger(logLev)
393
+
394
+ def HexaSpliter(splitType):
395
+ """
396
+ :param splitType : see MEDCouplingUMesh.simplexize
397
+ """
398
+ m3 = ml.MEDCouplingUMesh("", 3)
399
+ m3.allocateCells()
400
+ m3.insertNextCell(ml.NORM_HEXA8, list(range(8)))
401
+ m3.simplexize(splitType)
402
+ m3 = ml.MEDCoupling1SGTUMesh(m3)
403
+ conn = m3.getNodalConnectivity()
404
+ conn.rearrange(4)
405
+ return conn.getValuesAsTuple()
406
+
407
+ def Penta6Spliter(splitType):
408
+ return [(3, 5, 4, 1), (1, 3, 5, 0), (0, 5, 1, 2)]
409
+
410
+ def SplitByType(geoType, splitType):
411
+ m = {ml.NORM_HEXA8: HexaSpliter, ml.NORM_PENTA6: Penta6Spliter}
412
+ return m[geoType](splitType)
413
+
414
+ def SplitMeshByType(splitType, m0st, famSt=None):
415
+ """
416
+ :param m0st: MEDCoupling1SGTUMesh instance to be split
417
+ :param famSt: DataArrayInt storing input family field attached to m0st
418
+ """
419
+ conn = m0st.getNodalConnectivity()[:]
420
+ conn.rearrange(m0st.getNumberOfNodesPerCell())
421
+ geoType = m0st.getCellModelEnum()
422
+ subTetra = SplitByType(geoType, splitType)
423
+ famOut = None
424
+ if famSt:
425
+ famOut = famSt.duplicateEachTupleNTimes(len(subTetra))
426
+ m0stTetras = ml.MEDCoupling1SGTUMesh(m0st.getName(), ml.NORM_TETRA4)
427
+ m0stTetras.setCoords(self.getCoords())
428
+ connTetras = ml.DataArrayInt.Meld([conn[:, elt] for elt in subTetra])
429
+ connTetras.rearrange(1)
430
+ m0stTetras.setNodalConnectivity(connTetras)
431
+ return m0stTetras.buildUnstructured(), famOut
432
+
433
+ def LocateTwoTrisForEachQuad(quads, tris):
434
+ """
435
+ This function locate for each quad in quads the 2 triangles among triangles into tris.
436
+
437
+ :param quads: 4 components DataArrayInt storing nodal conn of quad4
438
+ :param tris: 3 components DataArrayInt storing nodal conn containing division of quad4 to locate
439
+ """
440
+ from itertools import combinations
441
+
442
+ quads.sortPerTuple(True)
443
+ tris.sortPerTuple(True)
444
+ curCompoId = ml.DataArrayInt(len(quads))
445
+ curCompoId[:] = 0
446
+ res = ml.DataArrayInt(len(quads) * 2)
447
+ res[:] = -1
448
+ for elt in combinations(range(4), 3):
449
+ arr = ml.DataArrayInt.Aggregate([quads[:, elt], tris])
450
+ offset = len(quads)
451
+ c, ci = arr.findCommonTuples(offset)
452
+ if not ci.deltaShiftIndex().isUniform(2):
453
+ raise RuntimeError("Duplication of tris detected should never happen !")
454
+ c.rearrange(2)
455
+ if not c[:, 0].findIdsGreaterOrEqualTo(offset).empty():
456
+ raise RuntimeError("Duplication of tris detected should never happen !")
457
+ if not curCompoId[c[:, 0]].findIdsGreaterOrEqualTo(2).empty():
458
+ raise RuntimeError(
459
+ "Internal Error : Quad4 is mapped into more than 2 sub cell triangles ! Something is wrong ! Presence of 3D overlapping cells ?"
460
+ )
461
+ res[2 * c[:, 0] + curCompoId[c[:, 0]]] = c[:, 1] - offset
462
+ curCompoId[c[:, 0]] += 1
463
+ if not curCompoId.isUniform(2):
464
+ raise RuntimeError(
465
+ "It smells very bad ! Impossible to find 2 triangles for some of quadrangles !"
466
+ )
467
+ res.rearrange(2)
468
+ return res
469
+
470
+ def deal3D(mmOut, splitType):
471
+ """
472
+ : return : 3D cells in self having a QUAD4 as subcell candidate of spliting
473
+ """
474
+ m0 = self[0]
475
+ m0s = [ml.MEDCoupling1SGTUMesh(elt) for elt in m0.splitByType()]
476
+ fams0 = self.getFamilyFieldAtLevel(0)
477
+ outSubMesh = []
478
+ outFams = []
479
+ startCellId = 0
480
+ for m0st in m0s:
481
+ endCellId = startCellId + m0st.getNumberOfCells()
482
+ famSt = fams0[startCellId:endCellId]
483
+ geoType = m0st.getCellModelEnum()
484
+ if geoType == ml.NORM_TETRA4:
485
+ outSubMesh.append(m0st.buildUnstructured())
486
+ outFams.append(famSt)
487
+ continue
488
+ m0StSplit, famStOut = SplitMeshByType(splitType, m0st, famSt)
489
+ outFams.append(famStOut)
490
+ outSubMesh.append(m0StSplit)
491
+ startCellId = endCellId
492
+ m0tetra = ml.MEDCouplingUMesh.MergeUMeshesOnSameCoords(outSubMesh)
493
+ fam0tetra = ml.DataArrayInt.Aggregate(outFams)
494
+ m0tetra.setDescription(self.getDescription())
495
+ m0tetra.setName(self.getName())
496
+ mmOut[0] = m0tetra
497
+ mmOut.setFamilyFieldArr(0, fam0tetra)
498
+ return ml.MEDCouplingUMesh.MergeUMeshesOnSameCoords(
499
+ [
500
+ elt.buildUnstructured()
501
+ for elt in m0s
502
+ if elt.getCellModelEnum() != ml.NORM_TETRA4
503
+ ]
504
+ )
505
+
506
+ def deal2D(mmOut, meshContainingQuadsAsSubCells, splitType):
507
+ m1 = self[-1]
508
+ m1s = [ml.MEDCoupling1SGTUMesh(elt) for elt in m1.splitByType()]
509
+ managed2DTypes = [ml.NORM_TRI3, ml.NORM_QUAD4]
510
+ quads4 = [elt for elt in m1s if elt.getCellModelEnum() == ml.NORM_QUAD4]
511
+ if not all(
512
+ [elt.getCellModelEnum() in [ml.NORM_TRI3, ml.NORM_QUAD4] for elt in m1s]
513
+ ):
514
+ typesStr = [
515
+ ml.MEDCouplingUMesh.GetReprOfGeometricType(elt.getCellModelEnum())
516
+ for elt in m1s
517
+ ]
518
+ managedTypesStr = [
519
+ ml.MEDCouplingUMesh.GetReprOfGeometricType(elt)
520
+ for elt in managed2DTypes
521
+ ]
522
+ raise RuntimeError(
523
+ f"Some geotype in -1 level ( {typesStr} ) are not in managed types ( {managedTypesStr} )"
524
+ )
525
+ if len(quads4) == 1:
526
+ quads4 = quads4[0]
527
+ pass
528
+ logger.debug("Starting to deduce triangulation of quads in -1 level")
529
+ logger.debug(
530
+ "Starting to compute sub cells of 3D cells containing QUAD4 as subcell"
531
+ )
532
+ two2DCellContainingQuads, _, _, rd, rdi = (
533
+ meshContainingQuadsAsSubCells.buildDescendingConnectivity()
534
+ )
535
+ tmp = ml.MEDCouplingUMesh.MergeUMeshesOnSameCoords(
536
+ [quads4.buildUnstructured(), two2DCellContainingQuads]
537
+ )
538
+ offset = quads4.getNumberOfCells()
539
+ logger.debug("Try to reduce list of 3D cells containing QUAD4 as subcell")
540
+ cce, ccei = tmp.findCommonCells(2, offset)
541
+ if not ccei.deltaShiftIndex().isUniform(2):
542
+ raise RuntimeError("Case of fusable quad4 not managed")
543
+ cce.rearrange(2)
544
+ if not cce[:, 0].findIdsGreaterOrEqualTo(offset).empty():
545
+ raise RuntimeError("Case of fusable quad4 not managed")
546
+ cells3DToKeep, _ = ml.DataArrayInt.ExtractFromIndexedArrays(
547
+ cce[:, 1] - offset, rd, rdi
548
+ )
549
+ cells3DToKeep.sort()
550
+ cells3DToKeep = cells3DToKeep.buildUnique()
551
+ threedCellsLyingOnQuads = meshContainingQuadsAsSubCells[cells3DToKeep]
552
+ threedCellsLyingOnQuads.sortCellsInMEDFileFrmt()
553
+ logger.debug("Start to compute the most compact list of tetras")
554
+ allSubTetras = ml.MEDCouplingUMesh.MergeUMeshesOnSameCoords(
555
+ [
556
+ SplitMeshByType(splitType, ml.MEDCoupling1SGTUMesh(elt))[0]
557
+ for elt in threedCellsLyingOnQuads.splitByType()
558
+ ]
559
+ )
560
+ allSubTris = ml.MEDCoupling1SGTUMesh(
561
+ allSubTetras.buildDescendingConnectivity()[0]
562
+ )
563
+ cSubTris = allSubTris.getNodalConnectivity()[:]
564
+ cSubTris.rearrange(3)
565
+ cQuads4 = quads4.getNodalConnectivity()[:]
566
+ cQuads4.rearrange(4)
567
+ logger.debug(
568
+ "Start to find the right split of input quads to respect conformity with previous 3D splitting"
569
+ )
570
+ res = LocateTwoTrisForEachQuad(cQuads4, cSubTris)
571
+
572
+ m1Out = ml.MEDCoupling1SGTUMesh(self.getName(), ml.NORM_TRI3)
573
+ m1Out.copyTinyInfoFrom(self[0])
574
+ m1Out.setCoords(self.getCoords())
575
+ res.rearrange(1)
576
+ cSubTris[res]
577
+ connOut = cSubTris[res]
578
+ connOut.rearrange(1)
579
+ m1Out.setNodalConnectivity(connOut)
580
+ m1Out = ml.MEDCouplingUMesh.MergeUMeshesOnSameCoords(
581
+ [
582
+ elt.buildUnstructured()
583
+ for elt in m1s
584
+ if elt.getCellModelEnum() == ml.NORM_TRI3
585
+ ]
586
+ + [m1Out.buildUnstructured()]
587
+ )
588
+ m1Out.copyTinyInfoFrom(self[0])
589
+ mmOut[-1] = m1Out
590
+ famM1 = self.getFamilyFieldAtLevel(-1)
591
+ if famM1:
592
+ outFams = []
593
+ logger.debug("Start dealing families of 2D cells")
594
+ startCellId = 0
595
+ for m1st in m1s:
596
+ endCellId = startCellId + m1st.getNumberOfCells()
597
+ famSt = famM1[startCellId:endCellId]
598
+ startCellId = endCellId
599
+ geoType = m1st.getCellModelEnum()
600
+ if geoType == ml.NORM_TRI3:
601
+ outFams.append(famSt)
602
+ elif geoType == ml.NORM_QUAD4:
603
+ outFams.append(famSt.duplicateEachTupleNTimes(2))
604
+ else:
605
+ raise RuntimeError("Not managed geo type !")
606
+ mmOut.setFamilyFieldArr(-1, ml.DataArrayInt.Aggregate(outFams))
607
+
608
+ #
609
+ if self.getMeshDimension() != 3:
610
+ raise RuntimeError(
611
+ f"Expecting mesh with dimension 3 ! Dimension is {self.getMeshDimension()}"
612
+ )
613
+ mmOut = ml.MEDFileUMesh()
614
+ levs = self.getNonEmptyLevels()
615
+ logger.info("Treating 3D level")
616
+ meshContainingQuadsAsSubCells = deal3D(mmOut, splitType)
617
+ if -1 in levs:
618
+ deal2D(mmOut, meshContainingQuadsAsSubCells, splitType)
619
+ # dealing remaining levs not impacting by tetrahedrization
620
+ for remainingLev in [elt for elt in self.getNonEmptyLevels() if elt not in [0, -1]]:
621
+ logger.debug(f"Dealing with level {remainingLev}")
622
+ mLev = self[remainingLev]
623
+ mmOut[remainingLev] = mLev
624
+ famField = self.getFamilyFieldAtLevel(remainingLev)
625
+ if famField:
626
+ mmOut.setFamilyFieldArr(remainingLev, famField)
627
+ #
628
+ mmOut.copyFamGrpMapsFrom(self)
629
+ return mmOut
630
+
631
+
21
632
  def MEDFileUMeshReduceToCells(self, level, keepCells, removeOrphanNodes=True):
22
633
  """
23
634
  Method returning a new MEDFileUMesh, restriction of self to level and keepCell cells at this level.
24
- This method also
635
+ This method also
25
636
 
26
637
  :param level: Specifies the top level of the returned MEDFileUMesh expected
27
638
  :param keepCells: A DataArrayInt specifying cell ids at level level of self
28
639
  :param removeOrphanNodes: Specifies if orphan nodes should be removed at the end
29
-
640
+
30
641
  see also MEDFileUMesh.extractPart
31
642
  """
32
643
  import MEDLoader as ml
33
- subLevs = [l for l in self.getNonEmptyLevels() if l<=level]
644
+
645
+ subLevs = [l for l in self.getNonEmptyLevels() if l <= level]
34
646
  subMeshes = [self[lev] for lev in subLevs]
35
647
  allFamilyFields = [self.getFamilyFieldAtLevel(lev) for lev in subLevs]
36
648
  allRefMesh = subMeshes[0]
@@ -39,25 +651,27 @@ def MEDFileUMeshReduceToCells(self, level, keepCells, removeOrphanNodes=True):
39
651
  mmOut = ml.MEDFileUMesh()
40
652
  # level 0
41
653
  mmOut[0] = refMesh
42
- mmOut.setFamilyFieldArr(0,allFamilyFields[0][keepCells])
654
+ mmOut.setFamilyFieldArr(0, allFamilyFields[0][keepCells])
43
655
 
44
656
  # subLevels
45
- for curLev,meshLev,famFieldLev in zip(subLevs[1:],subMeshes[1:],allFamilyFields[1:]):
46
- allMeshLev,d,di, rd,rdi = allRefMesh.explodeMeshTo( curLev-level )
47
- a,b = allMeshLev.areCellsIncludedIn(meshLev,2)
657
+ for curLev, meshLev, famFieldLev in zip(
658
+ subLevs[1:], subMeshes[1:], allFamilyFields[1:]
659
+ ):
660
+ allMeshLev, d, di, rd, rdi = allRefMesh.explodeMeshTo(curLev - level)
661
+ a, b = allMeshLev.areCellsIncludedIn(meshLev, 2)
48
662
  if not a:
49
663
  raise RuntimeError("Error in mesh {}")
50
- dlev,dlevi = ml.DataArrayInt.ExtractFromIndexedArrays( keepCells, d,di )
664
+ dlev, dlevi = ml.DataArrayInt.ExtractFromIndexedArrays(keepCells, d, di)
51
665
  dlev2 = dlev.buildUniqueNotSorted()
52
- cellsToKeepLev = ml.DataArrayInt.BuildIntersection([dlev2,b])
666
+ cellsToKeepLev = ml.DataArrayInt.BuildIntersection([dlev2, b])
53
667
  cellsToKeepLev = b.indicesOfSubPart(cellsToKeepLev)
54
668
  cellsToKeepLev.sort()
55
669
  mmOut[curLev] = meshLev[cellsToKeepLev]
56
- mmOut.setFamilyFieldArr(curLev,famFieldLev[cellsToKeepLev])
670
+ mmOut.setFamilyFieldArr(curLev, famFieldLev[cellsToKeepLev])
57
671
 
58
672
  allFamNodes = mmOut.getFamilyFieldAtLevel(1)
59
673
  if allFamNodes:
60
- mmOut.setFamilyFieldArr(1,allFamNodes[:])
674
+ mmOut.setFamilyFieldArr(1, allFamNodes[:])
61
675
 
62
676
  if removeOrphanNodes:
63
677
  mmOut.zipCoords()