xslope 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
xslope/mesh copy.py ADDED
@@ -0,0 +1,2962 @@
1
+ # Copyright 2025 Norman L. Jones
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import numpy as np
16
+ from scipy.sparse import coo_matrix
17
+ from scipy.sparse.csgraph import reverse_cuthill_mckee
18
+
19
+ # Lazy import gmsh - only needed for mesh generation functions
20
+ _gmsh = None
21
+ def _get_gmsh():
22
+ global _gmsh
23
+ if _gmsh is None:
24
+ try:
25
+ import gmsh
26
+ _gmsh = gmsh
27
+ except (ImportError, OSError) as e:
28
+ error_msg = str(e)
29
+ error_repr = repr(e)
30
+ # Check for OpenGL library issues (common in Colab/headless environments)
31
+ # Check both str() and repr() to catch all variations, and check exception args
32
+ error_text = error_msg + " " + error_repr
33
+ if hasattr(e, 'args') and e.args:
34
+ error_text += " " + " ".join(str(arg) for arg in e.args)
35
+ if ("libGL" in error_text or "libGLU" in error_text):
36
+ help_msg = (
37
+ "gmsh is required for mesh generation but could not be imported due to missing OpenGL libraries. "
38
+ "This is common in headless environments like Google Colab.\n\n"
39
+ "To fix this in Google Colab, install the required system libraries first:\n"
40
+ " !apt-get update && apt-get install -y libgl1-mesa-glx libglu1-mesa\n"
41
+ "Then install gmsh:\n"
42
+ " !pip install gmsh\n\n"
43
+ "For other headless environments, install the appropriate OpenGL libraries for your system.\n"
44
+ f"Original error: {e}"
45
+ )
46
+ else:
47
+ help_msg = (
48
+ "gmsh is required for mesh generation but could not be imported. "
49
+ "If you only need limit equilibrium analysis, you can ignore this. "
50
+ "To use FEM features, install gmsh: pip install gmsh\n"
51
+ f"Original error: {e}"
52
+ )
53
+ raise ImportError(help_msg) from e
54
+ return _gmsh
55
+
56
+
57
+
58
+ def build_mesh_from_polygons(polygons, target_size, element_type='tri3', lines=None, debug=False, mesh_params=None, target_size_1d=None):
59
+ """
60
+ Build a finite element mesh with material regions using Gmsh.
61
+ Fixed version that properly handles shared boundaries between polygons.
62
+
63
+ Parameters:
64
+ polygons : List of lists of (x, y) tuples defining material boundaries
65
+ target_size : Desired element size
66
+ element_type : 'tri3' (3-node triangles), 'tri6' (6-node triangles),
67
+ 'quad4' (4-node quadrilaterals), 'quad8' (8-node quadrilaterals),
68
+ 'quad9' (9-node quadrilaterals)
69
+ lines : Optional list of lines, each defined by list of (x, y) tuples for 1D elements
70
+ debug : Enable debug output
71
+ mesh_params : Optional dictionary of GMSH meshing parameters to override defaults
72
+ target_size_1d : Optional target size for 1D elements (default None, which is set to target_size if None)
73
+
74
+ Returns:
75
+ mesh dict containing:
76
+ nodes : np.ndarray of node coordinates (n_nodes, 2)
77
+ elements : np.ndarray of 2D element vertex indices (n_elements, 9) - unused nodes set to 0
78
+ element_types: np.ndarray indicating number of nodes per 2D element (3, 4, 6, 8, or 9)
79
+ element_materials: np.ndarray of material ID for each 2D element
80
+
81
+ If lines is provided, also includes:
82
+ elements_1d : np.ndarray of 1D element vertex indices (n_elements_1d, 3) - unused nodes set to 0
83
+ element_types_1d: np.ndarray indicating element type (2 for linear, 3 for quadratic)
84
+ element_materials_1d: np.ndarray of material ID for each 1D element (line index)
85
+ """
86
+ gmsh = _get_gmsh()
87
+ from collections import defaultdict
88
+
89
+ # Set default target_size_1d if None
90
+ if target_size_1d is None:
91
+ target_size_1d = target_size
92
+ if debug:
93
+ print(f"Using default target_size_1d = target_size = {target_size_1d}")
94
+
95
+ # build a list of region ids (list of material IDs - one per polygon)
96
+ region_ids = [i for i in range(len(polygons))]
97
+
98
+ if element_type not in ['tri3', 'tri6', 'quad4', 'quad8', 'quad9']:
99
+ raise ValueError("element_type must be 'tri3', 'tri6', 'quad4', 'quad8', or 'quad9'")
100
+
101
+ # Determine if we need quadratic elements - but always generate linear first
102
+ quadratic = element_type in ['tri6', 'quad8', 'quad9']
103
+
104
+ # For quadratic elements, always start with linear base element
105
+ if quadratic:
106
+ if element_type == 'tri6':
107
+ base_element_type = 'tri3'
108
+ elif element_type in ['quad8', 'quad9']:
109
+ base_element_type = 'quad4'
110
+ if debug:
111
+ print(f"Quadratic element '{element_type}' requested: generating '{base_element_type}' first, then post-processing")
112
+ else:
113
+ base_element_type = element_type
114
+
115
+ # Adjust target_size for quads to compensate for recombination creating finer meshes
116
+ if element_type.startswith('quad'):
117
+ # Different adjustment factors based on meshing parameters
118
+ if mesh_params and 'size_factor' in mesh_params:
119
+ size_factor = mesh_params['size_factor']
120
+ else:
121
+ # Default size factors for different approaches
122
+ if mesh_params and mesh_params.get("Mesh.RecombinationAlgorithm") == 0:
123
+ size_factor = 1.2 # Fast algorithm needs less adjustment
124
+ elif mesh_params and mesh_params.get("Mesh.RecombineOptimizeTopology", 0) > 50:
125
+ size_factor = 1.8 # High optimization creates more elements
126
+ else:
127
+ size_factor = 1.4 # Default
128
+
129
+ adjusted_target_size = target_size * size_factor
130
+ if debug:
131
+ print(f"Adjusted target size for quads: {target_size} -> {adjusted_target_size} (factor: {size_factor})")
132
+ else:
133
+ adjusted_target_size = target_size
134
+
135
+ gmsh.initialize()
136
+ gmsh.option.setNumber("General.Verbosity", 4) # Reduce verbosity
137
+ gmsh.model.add("multi_region_mesh")
138
+
139
+ # Global point map to ensure shared boundaries use the same points
140
+ point_map = {} # maps (x, y) to Gmsh point tag
141
+
142
+ # Track all unique edges and their usage
143
+ edge_map = {} # maps (pt1, pt2) tuple to line tag
144
+ edge_usage = defaultdict(list) # maps edge to list of (region_id, orientation)
145
+
146
+ def add_point(x, y, size_override=None):
147
+ key = (x, y)
148
+ if key not in point_map:
149
+ point_size = size_override if size_override is not None else adjusted_target_size
150
+ tag = gmsh.model.geo.addPoint(x, y, 0, point_size)
151
+ point_map[key] = tag
152
+ return point_map[key]
153
+
154
+ def get_edge_key(pt1, pt2):
155
+ """Get canonical edge key (always smaller point first)"""
156
+ return (min(pt1, pt2), max(pt1, pt2))
157
+
158
+ # First pass: Create all points and identify short edges
159
+ polygon_data = []
160
+ short_edge_points = set() # Points that are endpoints of short edges
161
+
162
+ # Pre-pass to identify short edges - improved logic
163
+ for idx, (poly_pts, region_id) in enumerate(zip(polygons, region_ids)):
164
+ poly_pts_clean = remove_duplicate_endpoint(list(poly_pts))
165
+ for i in range(len(poly_pts_clean)):
166
+ p1 = poly_pts_clean[i]
167
+ p2 = poly_pts_clean[(i + 1) % len(poly_pts_clean)]
168
+ edge_length = ((p2[0] - p1[0])**2 + (p2[1] - p1[1])**2)**0.5
169
+
170
+ # Only mark as short edge if it's genuinely short AND not a major boundary
171
+ # Major boundaries should maintain consistent mesh sizing
172
+ is_major_boundary = False
173
+
174
+ # Check if this edge is part of a major boundary (long horizontal or vertical edge)
175
+ if abs(p2[0] - p1[0]) > adjusted_target_size * 5: # Long horizontal edge
176
+ is_major_boundary = True
177
+ elif abs(p2[1] - p1[1]) > adjusted_target_size * 5: # Long vertical edge
178
+ is_major_boundary = True
179
+
180
+ # Only apply short edge sizing if edge is genuinely short AND not a major boundary
181
+ if edge_length < adjusted_target_size and not is_major_boundary:
182
+ short_edge_points.add(p1)
183
+ short_edge_points.add(p2)
184
+ if debug:
185
+ print(f"Short edge found: {p1} to {p2}, length={edge_length:.2f}")
186
+ elif debug and edge_length < adjusted_target_size:
187
+ print(f"Short edge ignored (major boundary): {p1} to {p2}, length={edge_length:.2f}")
188
+
189
+ # Main pass: Create points with appropriate sizes
190
+ for idx, (poly_pts, region_id) in enumerate(zip(polygons, region_ids)):
191
+ poly_pts_clean = remove_duplicate_endpoint(list(poly_pts)) # make a copy
192
+ pt_tags = []
193
+ for x, y in poly_pts_clean:
194
+ # Use larger size for points on short edges to discourage subdivision
195
+ # But be more conservative about when to apply this
196
+ if (x, y) in short_edge_points:
197
+ point_size = adjusted_target_size * 2.0 # Reduced from 3.0 to 2.0
198
+ pt_tags.append(add_point(x, y, point_size))
199
+ else:
200
+ pt_tags.append(add_point(x, y))
201
+
202
+ # Track edges for this polygon
203
+ edges = []
204
+ for i in range(len(pt_tags)):
205
+ pt1 = pt_tags[i]
206
+ pt2 = pt_tags[(i + 1) % len(pt_tags)]
207
+
208
+ edge_key = get_edge_key(pt1, pt2)
209
+
210
+ # Determine orientation: True if pt1 < pt2, False otherwise
211
+ forward = (pt1 < pt2)
212
+
213
+ # Store edge usage
214
+ edge_usage[edge_key].append((region_id, forward))
215
+ edges.append((pt1, pt2, edge_key, forward))
216
+
217
+ polygon_data.append({
218
+ 'region_id': region_id,
219
+ 'pt_tags': pt_tags,
220
+ 'edges': edges
221
+ })
222
+
223
+ # Second pass: Create all unique lines and track short edges
224
+ short_edges = [] # Track short edges for later processing
225
+ for edge_key in edge_usage.keys():
226
+ pt1, pt2 = edge_key
227
+ line_tag = gmsh.model.geo.addLine(pt1, pt2)
228
+ edge_map[edge_key] = line_tag
229
+
230
+ # Calculate edge length from point coordinates
231
+ pt1_coords = None
232
+ pt2_coords = None
233
+ for (x, y), tag in point_map.items():
234
+ if tag == pt1:
235
+ pt1_coords = (x, y)
236
+ if tag == pt2:
237
+ pt2_coords = (x, y)
238
+
239
+ if pt1_coords and pt2_coords:
240
+ edge_length = ((pt2_coords[0] - pt1_coords[0])**2 + (pt2_coords[1] - pt1_coords[1])**2)**0.5
241
+
242
+ # Add transfinite constraints for long boundary edges to ensure consistent mesh sizing
243
+ # This prevents the creation of overly coarse elements along major boundaries
244
+ if edge_length > adjusted_target_size * 3: # Long edge
245
+ # Calculate how many elements should be along this edge
246
+ num_elements = max(3, int(edge_length / adjusted_target_size))
247
+ try:
248
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, num_elements)
249
+ if debug:
250
+ print(f"Set transfinite constraint on long edge: {pt1_coords} to {pt2_coords}, length={edge_length:.2f}, num_elements={num_elements}")
251
+ except Exception as e:
252
+ if debug:
253
+ print(f"Warning: Could not set transfinite constraint on edge {pt1_coords} to {pt2_coords}: {e}")
254
+
255
+ # Add transfinite constraints for short edges to prevent subdivision
256
+ # This forces GMSH to use exactly 2 nodes (start and end) for short edges
257
+ elif edge_length < adjusted_target_size:
258
+ try:
259
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, 2) # Exactly 2 nodes
260
+ if debug:
261
+ print(f"Set transfinite constraint on short edge: {pt1_coords} to {pt2_coords}, length={edge_length:.2f}, exactly 2 nodes")
262
+ except Exception as e:
263
+ if debug:
264
+ print(f"Warning: Could not set transfinite constraint on short edge {pt1_coords} to {pt2_coords}: {e}")
265
+
266
+ # Short edges are now handled by point sizing, no need for transfinite curves
267
+
268
+ # Ensure all polygon points (including intersection points) are created as GMSH points
269
+ # The intersection points were already added to polygons in build_polygons(),
270
+ # so we just need to ensure they exist as GMSH geometric entities
271
+ if lines is not None:
272
+ if debug:
273
+ print("Ensuring all polygon points (including intersections) are created as GMSH points...")
274
+
275
+ # Collect all points from all polygons to ensure they exist in GMSH
276
+ all_polygon_points = set()
277
+ for poly_data in polygon_data:
278
+ pt_tags = poly_data['pt_tags']
279
+ for tag in pt_tags:
280
+ # Find the coordinates for this point tag
281
+ for (x, y), point_tag in point_map.items():
282
+ if point_tag == tag:
283
+ all_polygon_points.add((x, y))
284
+ break
285
+
286
+ # Create any missing GMSH points
287
+ for x, y in all_polygon_points:
288
+ key = (x, y)
289
+ if key not in point_map:
290
+ pt_tag = gmsh.model.geo.addPoint(x, y, 0.0, adjusted_target_size * 0.5)
291
+ point_map[key] = pt_tag
292
+ if debug:
293
+ print(f"Created GMSH point for polygon vertex {key}: tag {pt_tag}")
294
+
295
+ if debug:
296
+ print(f"Ensured {len(all_polygon_points)} polygon points exist as GMSH entities")
297
+
298
+ # Create enhanced reinforcement lines that include intersection points from polygons
299
+ # This is essential for proper mesh generation with embedded 1D elements
300
+ enhanced_lines = []
301
+ for line_idx, line_pts in enumerate(lines):
302
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
303
+
304
+ # Collect all points for this line: original + intersection points from polygons
305
+ all_line_points = []
306
+
307
+ # Add original line points
308
+ for x, y in line_pts_clean:
309
+ all_line_points.append((x, y, 'original'))
310
+
311
+ # Add intersection points that are on this line (from polygon data)
312
+ for poly_data in polygon_data:
313
+ pt_tags = poly_data['pt_tags']
314
+ for tag in pt_tags:
315
+ # Find the coordinates for this point tag
316
+ for (x, y), point_tag in point_map.items():
317
+ if point_tag == tag:
318
+ # Check if this point is on the reinforcement line
319
+ if is_point_on_line_segments((x, y), line_pts_clean, tolerance=1e-6):
320
+ all_line_points.append((x, y, 'intersection'))
321
+ break
322
+
323
+ # Sort all points along the line to maintain proper order
324
+ if len(all_line_points) > 1:
325
+ all_line_points.sort(key=lambda p: line_segment_parameter((p[0], p[1]), line_pts_clean[0], line_pts_clean[-1]))
326
+
327
+ # Remove duplicates (keep first occurrence)
328
+ unique_points = []
329
+ seen = set()
330
+ for x, y, point_type in all_line_points:
331
+ point_key = (round(x, 8), round(y, 8)) # Round to avoid floating point issues
332
+ if point_key not in seen:
333
+ seen.add(point_key)
334
+ unique_points.append((x, y, point_type))
335
+
336
+ # Create the enhanced line
337
+ enhanced_line = [(x, y) for x, y, _ in unique_points]
338
+ enhanced_lines.append(enhanced_line)
339
+
340
+ if debug:
341
+ print(f"Enhanced line {line_idx}: {len(line_pts_clean)} original points -> {len(enhanced_line)} total points")
342
+
343
+ # Replace original lines with enhanced lines
344
+ lines = enhanced_lines
345
+
346
+ # Create reinforcement lines as geometric constraints to force 2D mesh edges
347
+ line_data = []
348
+
349
+ if lines is not None:
350
+ for line_idx, line_pts in enumerate(lines):
351
+ # Use the enhanced line coordinates (which include intersection points)
352
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
353
+
354
+ # Create points for this reinforcement line
355
+ line_point_tags = []
356
+
357
+ # Create all points for this line (original + intersection points)
358
+ for x, y in line_pts_clean:
359
+ key = (x, y)
360
+ if key in point_map:
361
+ line_point_tags.append((x, y, point_map[key]))
362
+ else:
363
+ # Create new point with small mesh size to ensure it's preserved
364
+ pt_tag = gmsh.model.geo.addPoint(x, y, 0.0, adjusted_target_size * 0.5)
365
+ point_map[key] = pt_tag
366
+ line_point_tags.append((x, y, pt_tag))
367
+
368
+ # Sort points along the line to maintain proper order
369
+ line_point_tags.sort(key=lambda p: line_segment_parameter((p[0], p[1]), line_pts_clean[0], line_pts_clean[-1]))
370
+
371
+ # Extract just the point tags in order
372
+ pt_tags = [tag for _, _, tag in line_point_tags]
373
+
374
+ if debug:
375
+ print(f" Line {line_idx} points: {[(x, y) for x, y, _ in line_point_tags]}")
376
+
377
+ # Create line segments as geometric constraints with controlled meshing
378
+ line_tags = []
379
+ for i in range(len(pt_tags) - 1):
380
+ pt1, pt2 = pt_tags[i], pt_tags[i + 1]
381
+
382
+ # Calculate segment length to determine number of subdivisions
383
+ coord1 = None
384
+ coord2 = None
385
+ for (x, y), tag in point_map.items():
386
+ if tag == pt1:
387
+ coord1 = (x, y)
388
+ if tag == pt2:
389
+ coord2 = (x, y)
390
+
391
+ if coord1 and coord2:
392
+ segment_length = ((coord2[0] - coord1[0])**2 + (coord2[1] - coord1[1])**2)**0.5
393
+ # Calculate number of elements needed to achieve target_size_1d
394
+ # For segments longer than target_size_1d, we want multiple elements
395
+ # For segments shorter than target_size_1d, we still want at least 2 elements
396
+ if segment_length > target_size_1d:
397
+ num_elements = max(3, int(round(segment_length / target_size_1d)))
398
+ else:
399
+ num_elements = 2
400
+
401
+ if debug:
402
+ print(f" Segment {i}: length {segment_length:.2f}, creating {num_elements} elements")
403
+
404
+ line_tag = gmsh.model.geo.addLine(pt1, pt2)
405
+ line_tags.append(line_tag)
406
+
407
+ # Set transfinite constraint to create appropriate number of nodes
408
+ try:
409
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, num_elements)
410
+ if debug:
411
+ print(f" Set transfinite constraint on line segment {i}: {num_elements} nodes")
412
+ except Exception as e:
413
+ if debug:
414
+ print(f" Warning: Could not set transfinite constraint on segment {i}: {e}")
415
+ else:
416
+ # Fallback: create line with default 2 nodes
417
+ line_tag = gmsh.model.geo.addLine(pt1, pt2)
418
+ line_tags.append(line_tag)
419
+
420
+ try:
421
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, 2)
422
+ if debug:
423
+ print(f" Set transfinite constraint on line segment {i}: 2 nodes (fallback)")
424
+ except Exception as e:
425
+ if debug:
426
+ print(f" Warning: Could not set transfinite constraint on segment {i}: {e}")
427
+
428
+ # Store line data for later 1D element extraction
429
+ # Use the enhanced line coordinates (which include intersection points)
430
+ line_data.append({
431
+ 'line_idx': line_idx,
432
+ 'line_tags': line_tags,
433
+ 'point_coords': line_pts_clean # This now contains the enhanced coordinates
434
+ })
435
+
436
+ if debug:
437
+ print(f"Created reinforcement constraint line {line_idx} with {len(line_tags)} segments: {line_pts_clean}")
438
+
439
+ # Third pass: Create surfaces using the shared lines
440
+ surface_to_region = {}
441
+
442
+ for poly_data in polygon_data:
443
+ region_id = poly_data['region_id']
444
+ edges = poly_data['edges']
445
+
446
+ line_tags = []
447
+ for pt1, pt2, edge_key, forward in edges:
448
+ line_tag = edge_map[edge_key]
449
+
450
+ # Use positive or negative line tag based on orientation
451
+ if forward:
452
+ line_tags.append(line_tag)
453
+ else:
454
+ line_tags.append(-line_tag)
455
+
456
+ # Create curve loop and surface
457
+ try:
458
+ loop = gmsh.model.geo.addCurveLoop(line_tags)
459
+ surface = gmsh.model.geo.addPlaneSurface([loop])
460
+ surface_to_region[surface] = region_id
461
+ except Exception as e:
462
+ print(f"Warning: Could not create surface for region {region_id}: {e}")
463
+ continue
464
+
465
+ # Synchronize geometry
466
+ gmsh.model.geo.synchronize()
467
+
468
+ # Force mesh edges along reinforcement lines by creating additional geometric constraints
469
+ if lines is not None:
470
+ for line_info in line_data:
471
+ line_idx = line_info['line_idx']
472
+ line_tags = line_info['line_tags']
473
+ line_pts = line_info['point_coords']
474
+
475
+ # Set transfinite constraints to force mesh edges along each line segment
476
+ # REMOVED: This was conflicting with the target_size_1d calculations above
477
+ # for i, line_tag in enumerate(line_tags):
478
+ # try:
479
+ # # Force exactly 2 nodes (start and end) to prevent subdivision
480
+ # gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, 2)
481
+ # if debug:
482
+ # print(f"Set transfinite constraint on line {line_idx} segment {i}: exactly 2 nodes")
483
+ # except Exception as e:
484
+ # if debug:
485
+ # print(f"Warning: Could not set transfinite constraint on line {line_idx} segment {i}: {e}")
486
+
487
+ # Embed reinforcement lines in all surfaces to ensure they're part of the mesh
488
+ for surface in surface_to_region.keys():
489
+ try:
490
+ # Embed all line segments of this reinforcement line
491
+ gmsh.model.mesh.embed(1, line_tags, 2, surface)
492
+ if debug:
493
+ print(f"Embedded reinforcement line {line_idx} in surface {surface}")
494
+ except Exception as e:
495
+ if debug:
496
+ print(f"Could not embed line {line_idx} in surface {surface}: {e}")
497
+
498
+ # CRITICAL: Set mesh coherence to ensure shared nodes along boundaries
499
+ # This forces Gmsh to use the same nodes for shared geometric entities
500
+ gmsh.model.mesh.removeDuplicateNodes()
501
+
502
+ # Create physical groups for material regions (this helps with mesh consistency)
503
+ physical_surfaces = []
504
+ for surface, region_id in surface_to_region.items():
505
+ physical_tag = gmsh.model.addPhysicalGroup(2, [surface])
506
+ physical_surfaces.append((physical_tag, region_id))
507
+
508
+ # Create physical groups for embedded reinforcement lines
509
+ physical_lines = []
510
+ if lines is not None:
511
+ for line_info in line_data:
512
+ line_idx = line_info['line_idx']
513
+ line_tags = line_info['line_tags']
514
+ physical_tag = gmsh.model.addPhysicalGroup(1, line_tags)
515
+ physical_lines.append((physical_tag, line_idx))
516
+
517
+ # Check for potential quad4 + reinforcement line conflicts
518
+ has_reinforcement_lines = lines is not None and len(lines) > 0
519
+ wants_quads = base_element_type.startswith('quad')
520
+
521
+ # Set mesh algorithm and recombination options BEFORE generating mesh
522
+ if base_element_type.startswith('quad'):
523
+ # Check if we need to use a more robust algorithm for reinforcement lines
524
+ if has_reinforcement_lines:
525
+ if debug:
526
+ print(f"Detected quad elements with reinforcement lines.")
527
+ print(f"Using robust recombination algorithm to handle embedded line constraints.")
528
+
529
+ # Use 'fast' algorithm which is more robust with embedded constraints
530
+ default_params = {
531
+ "Mesh.Algorithm": 8, # Frontal-Delaunay for quads
532
+ "Mesh.RecombineAll": 1, # Recombine triangles into quads
533
+ "Mesh.RecombinationAlgorithm": 0, # Standard (more robust than simple)
534
+ "Mesh.SubdivisionAlgorithm": 0, # Mixed tri/quad where needed
535
+ "Mesh.RecombineOptimizeTopology": 0, # Minimal optimization
536
+ "Mesh.RecombineNodeRepositioning": 1, # Still reposition nodes
537
+ "Mesh.RecombineMinimumQuality": 0.01, # Keep quality threshold
538
+ "Mesh.Smoothing": 5, # Reduced smoothing
539
+ "Mesh.SmoothNormals": 1, # Keep smooth normals
540
+ "Mesh.SmoothRatio": 1.8, # Keep smoothing ratio
541
+ }
542
+ else:
543
+ # Standard quad meshing parameters for cases without reinforcement lines
544
+ default_params = {
545
+ "Mesh.Algorithm": 8, # Frontal-Delaunay for quads (try 5, 6, 8)
546
+ "Mesh.RecombineAll": 1, # Recombine triangles into quads
547
+ "Mesh.RecombinationAlgorithm": 1, # Simple recombination (try 0, 1, 2, 3)
548
+ "Mesh.SubdivisionAlgorithm": 1, # All quads (try 0, 1, 2)
549
+ "Mesh.RecombineOptimizeTopology": 5, # Optimize topology (0-100)
550
+ "Mesh.RecombineNodeRepositioning": 1, # Reposition nodes (0 or 1)
551
+ "Mesh.RecombineMinimumQuality": 0.01, # Minimum quality threshold
552
+ "Mesh.Smoothing": 10, # Number of smoothing steps (try 0-100)
553
+ "Mesh.SmoothNormals": 1, # Smooth normals
554
+ "Mesh.SmoothRatio": 1.8, # Smoothing ratio (1.0-3.0)
555
+ }
556
+
557
+ # Override with user-provided parameters
558
+ if mesh_params:
559
+ default_params.update(mesh_params)
560
+
561
+ # Apply all parameters (except our custom ones)
562
+ for param, value in default_params.items():
563
+ if param not in ['size_factor']: # Skip our custom parameters
564
+ gmsh.option.setNumber(param, value)
565
+
566
+ # Set recombination for each surface
567
+ for surface in surface_to_region.keys():
568
+ gmsh.model.mesh.setRecombine(2, surface)
569
+ else:
570
+ gmsh.option.setNumber("Mesh.Algorithm", 6) # Frontal-Delaunay for triangles
571
+
572
+ # Always generate linear elements first - quadratic conversion is done in post-processing
573
+ # This avoids gmsh issues with quadratic elements and embedded 1D lines
574
+ gmsh.option.setNumber("Mesh.ElementOrder", 1)
575
+
576
+ # Force mesh coherence before generation
577
+ gmsh.option.setNumber("Mesh.ToleranceInitialDelaunay", 1e-12)
578
+
579
+ # Short edge control is now handled by point sizing during geometry creation
580
+
581
+ # Generate mesh
582
+ gmsh.model.mesh.generate(2)
583
+
584
+ # Remove duplicate nodes again after mesh generation (belt and suspenders)
585
+ gmsh.model.mesh.removeDuplicateNodes()
586
+
587
+ # Get nodes
588
+ node_tags, coords, _ = gmsh.model.mesh.getNodes()
589
+ nodes = np.array(coords).reshape(-1, 3)[:, :2]
590
+
591
+ # Create node tag to index mapping
592
+ node_tag_to_index = {tag: i for i, tag in enumerate(node_tags)}
593
+
594
+ elements = []
595
+ mat_ids = []
596
+ element_node_counts = []
597
+
598
+ # For quad8: track center nodes to delete later
599
+ center_nodes_to_delete = set() if element_type == 'quad8' else None
600
+
601
+ # Extract elements using physical groups for better region identification
602
+ for physical_tag, region_id in physical_surfaces:
603
+ try:
604
+ # Get entities in this physical group
605
+ entities = gmsh.model.getEntitiesForPhysicalGroup(2, physical_tag)
606
+
607
+ for entity in entities:
608
+ # Get all elements for this entity
609
+ elem_types, elem_tags_list, node_tags_list = gmsh.model.mesh.getElements(2, entity)
610
+
611
+ for elem_type, elem_tags, node_tags in zip(elem_types, elem_tags_list, node_tags_list):
612
+ # Gmsh element type mapping:
613
+ # 2: 3-node triangle, 9: 6-node triangle
614
+ # 3: 4-node quadrilateral, 10: 8-node quadrilateral
615
+ if elem_type == 2: # 3-node triangle
616
+ elements_array = np.array(node_tags).reshape(-1, 3)
617
+ for element in elements_array:
618
+ idxs = [node_tag_to_index[tag] for tag in element]
619
+
620
+ # GMSH returns clockwise triangles - reorder to counter-clockwise
621
+ idxs[1], idxs[2] = idxs[2], idxs[1]
622
+
623
+ # Pad to 9 columns with zeros
624
+ padded_idxs = idxs + [0] * (9 - len(idxs))
625
+ elements.append(padded_idxs)
626
+ mat_ids.append(region_id)
627
+ element_node_counts.append(3)
628
+ elif elem_type == 9: # 6-node triangle
629
+ elements_array = np.array(node_tags).reshape(-1, 6)
630
+ for element in elements_array:
631
+ idxs = [node_tag_to_index[tag] for tag in element]
632
+
633
+ # GMSH returns clockwise tri6 elements - reorder to counter-clockwise
634
+ # Swap corner nodes 1 and 2
635
+ idxs[1], idxs[2] = idxs[2], idxs[1]
636
+ # Fix midpoint assignments after corner swap 1<->2:
637
+ # GMSH gives: n3=edge(0-1), n4=edge(1-2), n5=edge(2-0)
638
+ # After swap: n3=edge(0-2), n4=edge(2-1), n5=edge(1-0)
639
+ # Standard requires: n3=edge(0-1), n4=edge(1-2), n5=edge(2-0)
640
+ # So remap: new_n3=old_n5, new_n4=old_n4, new_n5=old_n3
641
+ old_3, old_4, old_5 = idxs[3], idxs[4], idxs[5]
642
+ idxs[3] = old_5 # standard edge(0-1) gets GMSH edge(2-0) midpoint
643
+ idxs[4] = old_4 # standard edge(1-2) gets GMSH edge(1-2) midpoint
644
+ idxs[5] = old_3 # standard edge(2-0) gets GMSH edge(0-1) midpoint
645
+
646
+ # Pad to 9 columns with zeros
647
+ padded_idxs = idxs + [0] * (9 - len(idxs))
648
+ elements.append(padded_idxs)
649
+ mat_ids.append(region_id)
650
+ element_node_counts.append(6)
651
+ elif elem_type == 3: # 4-node quadrilateral
652
+ elements_array = np.array(node_tags).reshape(-1, 4)
653
+ for element in elements_array:
654
+ idxs = [node_tag_to_index[tag] for tag in element]
655
+ # Fix node ordering for quadrilateral elements
656
+ if element_type.startswith('quad'):
657
+ idxs = idxs[::-1] # Simple reversal of node order
658
+ # Pad to 9 columns with zeros
659
+ padded_idxs = idxs + [0] * (9 - len(idxs))
660
+ elements.append(padded_idxs)
661
+ mat_ids.append(region_id)
662
+ element_node_counts.append(4)
663
+ elif elem_type == 10: # Quadratic quadrilateral (gmsh generates 9-node Lagrange)
664
+ # Gmsh always generates 9-node Lagrange quads for order 2
665
+ elements_array = np.array(node_tags).reshape(-1, 9)
666
+ for element in elements_array:
667
+ idxs = [node_tag_to_index[tag] for tag in element]
668
+
669
+ if element_type in ['quad8', 'quad9']:
670
+ # Both quad8 and quad9 need CW to CCW conversion for first 8 nodes
671
+ # Convert from Gmsh CW to CCW ordering for quadrilateral
672
+ # Corner nodes: reverse order (0,1,2,3) -> (0,3,2,1)
673
+ # Midpoint nodes need to be reordered accordingly:
674
+ # GMSH: n4=edge(0-1), n5=edge(1-2), n6=edge(2-3), n7=edge(3-0)
675
+ # After corner reversal: need n4=edge(0-3), n5=edge(3-2), n6=edge(2-1), n7=edge(1-0)
676
+ # So: new_n4=old_n7, new_n5=old_n6, new_n6=old_n5, new_n7=old_n4
677
+ reordered_first8 = [
678
+ idxs[0], # corner 0 stays
679
+ idxs[3], # corner 1 -> corner 3
680
+ idxs[2], # corner 2 stays
681
+ idxs[1], # corner 3 -> corner 1
682
+ idxs[7], # edge(0-1) -> edge(0-3) = old edge(3-0)
683
+ idxs[6], # edge(1-2) -> edge(3-2) = old edge(2-3)
684
+ idxs[5], # edge(2-3) -> edge(2-1) = old edge(1-2)
685
+ idxs[4] # edge(3-0) -> edge(1-0) = old edge(0-1)
686
+ ]
687
+
688
+ if element_type == 'quad8':
689
+ # For quad8, skip center node and mark for deletion
690
+ center_node_idx = idxs[8] # Mark center node for deletion
691
+ center_nodes_to_delete.add(center_node_idx)
692
+ padded_idxs = reordered_first8 + [0] # Skip center node, pad to 9
693
+ elements.append(padded_idxs)
694
+ mat_ids.append(region_id)
695
+ element_node_counts.append(8)
696
+ else: # quad9
697
+ # For quad9, keep center node (9th node unchanged)
698
+ full_idxs = reordered_first8 + [idxs[8]] # Add center node
699
+ elements.append(full_idxs)
700
+ mat_ids.append(region_id)
701
+ element_node_counts.append(9)
702
+ else:
703
+ # This should never happen since element_type is validated earlier
704
+ raise ValueError(f"Unexpected element_type '{element_type}' for Gmsh elem_type {elem_type}")
705
+ except Exception as e:
706
+ print(f"Warning: Could not extract elements for physical group {physical_tag} (region {region_id}): {e}")
707
+ continue
708
+
709
+ # Convert to numpy arrays
710
+ elements_array = np.array(elements, dtype=int)
711
+ element_types = np.array(element_node_counts, dtype=int)
712
+ element_materials = np.array(mat_ids, dtype=int)
713
+
714
+ # Extract 1D elements from Gmsh-generated 1D mesh along reinforcement lines
715
+ elements_1d = []
716
+ mat_ids_1d = []
717
+ element_node_counts_1d = []
718
+
719
+ if lines is not None:
720
+ # Extract 1D elements from physical groups for each reinforcement line
721
+ for physical_tag, line_idx in physical_lines:
722
+ try:
723
+ # Get entities in this physical group
724
+ entities = gmsh.model.getEntitiesForPhysicalGroup(1, physical_tag)
725
+
726
+ if debug:
727
+ print(f" Physical group {physical_tag} (line {line_idx}): found {len(entities)} entities")
728
+
729
+ for entity in entities:
730
+ # Get all 1D elements for this entity
731
+ elem_types, elem_tags_list, node_tags_list = gmsh.model.mesh.getElements(1, entity)
732
+
733
+ for elem_type, elem_tags, node_tags in zip(elem_types, elem_tags_list, node_tags_list):
734
+ # Gmsh 1D element type mapping:
735
+ # 1: 2-node line (linear), 8: 3-node line (quadratic)
736
+ if elem_type == 1: # Linear 1D elements (2 nodes)
737
+ elements_array = np.array(node_tags).reshape(-1, 2)
738
+ for element in elements_array:
739
+ try:
740
+ # Convert numpy arrays to regular Python scalars
741
+ element_list = element.tolist() # Convert to Python list
742
+ if len(element_list) >= 2:
743
+ tag1 = int(element_list[0])
744
+ tag2 = int(element_list[1])
745
+
746
+ # Get node indices
747
+ idx1 = node_tag_to_index[tag1]
748
+ idx2 = node_tag_to_index[tag2]
749
+
750
+ # Create 1D element
751
+ padded_idxs = [idx1, idx2, 0]
752
+ elements_1d.append(padded_idxs)
753
+ mat_ids_1d.append(line_idx)
754
+ element_node_counts_1d.append(2)
755
+
756
+ if debug:
757
+ coord1 = nodes[idx1]
758
+ coord2 = nodes[idx2]
759
+ print(f" Created 1D element: {coord1} -> {coord2}")
760
+ except (KeyError, TypeError, ValueError, IndexError) as e:
761
+ if debug:
762
+ print(f" Skipping 1D element due to error: {e}")
763
+ continue
764
+ elif elem_type == 8: # Quadratic 1D elements (3 nodes)
765
+ elements_array = np.array(node_tags).reshape(-1, 3)
766
+ for element in elements_array:
767
+ try:
768
+ # Convert numpy arrays to regular Python scalars
769
+ element_list = element.tolist() # Convert to Python list
770
+ if len(element_list) >= 3:
771
+ tag1 = int(element_list[0])
772
+ tag2 = int(element_list[1])
773
+ tag3 = int(element_list[2])
774
+
775
+ # Get node indices
776
+ idx1 = node_tag_to_index[tag1]
777
+ idx2 = node_tag_to_index[tag2]
778
+ idx3 = node_tag_to_index[tag3]
779
+
780
+ # Create 1D element
781
+ padded_idxs = [idx1, idx2, idx3]
782
+ elements_1d.append(padded_idxs)
783
+ mat_ids_1d.append(line_idx)
784
+ element_node_counts_1d.append(3)
785
+ except (KeyError, TypeError, ValueError, IndexError) as e:
786
+ if debug:
787
+ print(f" Skipping quadratic 1D element due to error: {e}")
788
+ continue
789
+ except Exception as e:
790
+ if debug:
791
+ print(f" Error extracting 1D elements for line {line_idx}: {e}")
792
+ continue
793
+
794
+ gmsh.finalize()
795
+
796
+ # Clean up center nodes for quad8 elements
797
+ if element_type == 'quad8' and center_nodes_to_delete:
798
+ print(f"Quad8 cleanup: removing {len(center_nodes_to_delete)} center nodes from {len(nodes)} total nodes")
799
+
800
+ # c) Create array tracking original node numbering
801
+ original_node_count = len(nodes)
802
+ nodes_to_keep = [i for i in range(original_node_count) if i not in center_nodes_to_delete]
803
+
804
+ # d) Delete center nodes - create new nodes array
805
+ new_nodes = nodes[nodes_to_keep]
806
+
807
+ # e) Create mapping from old node indices to new node indices
808
+ old_to_new_mapping = {old_idx: new_idx for new_idx, old_idx in enumerate(nodes_to_keep)}
809
+
810
+ # f) Update element topology to use new node numbering
811
+ new_elements = []
812
+ for element in elements_array:
813
+ new_element = []
814
+ for node_idx in element:
815
+ if node_idx == 0: # Keep padding zeros
816
+ new_element.append(0)
817
+ elif node_idx in center_nodes_to_delete:
818
+ # This should not happen since we set center nodes to 0
819
+ new_element.append(0)
820
+ else:
821
+ # Map to new node index
822
+ new_element.append(old_to_new_mapping[node_idx])
823
+ new_elements.append(new_element)
824
+
825
+ # g) Replace arrays with consolidated versions
826
+ elements_array = np.array(new_elements, dtype=int)
827
+ nodes = new_nodes
828
+
829
+ print(f"Quad8 cleanup complete: {len(nodes)} nodes, {len(elements_array)} elements")
830
+
831
+ # Convert lists to arrays
832
+ elements_array = np.array(elements, dtype=int)
833
+ element_types = np.array(element_node_counts, dtype=int)
834
+ element_materials = np.array(mat_ids, dtype=int) + 1 # Make 1-based
835
+
836
+ mesh = {
837
+ "nodes": nodes,
838
+ "elements": elements_array,
839
+ "element_types": element_types,
840
+ "element_materials": element_materials,
841
+ }
842
+
843
+ # Add 1D element data if lines were provided
844
+ if lines is not None and len(elements_1d) > 0:
845
+ elements_1d_array = np.array(elements_1d, dtype=int)
846
+ element_types_1d = np.array(element_node_counts_1d, dtype=int)
847
+ element_materials_1d = np.array(mat_ids_1d, dtype=int) + 1 # Make 1-based
848
+
849
+ mesh["elements_1d"] = elements_1d_array
850
+ mesh["element_types_1d"] = element_types_1d
851
+ mesh["element_materials_1d"] = element_materials_1d
852
+
853
+ # Post-process to convert linear elements to quadratic if requested
854
+ if quadratic:
855
+ if debug:
856
+ print(f"Converting linear {base_element_type} mesh to quadratic {element_type}")
857
+ mesh = convert_linear_to_quadratic_mesh(mesh, element_type, debug=debug)
858
+
859
+ return mesh
860
+
861
+
862
+ def convert_linear_to_quadratic_mesh(mesh, target_element_type, debug=False):
863
+ """
864
+ Convert a linear mesh (tri3/quad4) to quadratic (tri6/quad8/quad9) by adding midside nodes.
865
+
866
+ This is much more robust than gmsh's built-in quadratic generation, especially
867
+ when dealing with embedded 1D elements (reinforcement lines).
868
+
869
+ Parameters:
870
+ mesh: Dictionary containing linear mesh data
871
+ target_element_type: 'tri6', 'quad8', or 'quad9'
872
+ debug: Enable debug output
873
+
874
+ Returns:
875
+ Updated mesh dictionary with quadratic elements
876
+ """
877
+ if debug:
878
+ print(f"Converting to {target_element_type} elements...")
879
+
880
+ nodes = mesh["nodes"].copy()
881
+ elements = mesh["elements"].copy()
882
+ element_types = mesh["element_types"].copy()
883
+ element_materials = mesh["element_materials"].copy()
884
+
885
+ # Handle 1D elements if present
886
+ elements_1d = mesh.get("elements_1d")
887
+ element_types_1d = mesh.get("element_types_1d")
888
+ element_materials_1d = mesh.get("element_materials_1d")
889
+ has_1d_elements = elements_1d is not None
890
+
891
+ if has_1d_elements:
892
+ elements_1d = elements_1d.copy()
893
+ element_types_1d = element_types_1d.copy()
894
+ element_materials_1d = element_materials_1d.copy()
895
+
896
+ # Dictionary to store midside nodes: (node1_idx, node2_idx) -> midside_node_idx
897
+ # Always store with node1_idx < node2_idx for consistency
898
+ midside_nodes = {}
899
+ next_node_idx = len(nodes)
900
+
901
+ def get_or_create_midside_node(n1_idx, n2_idx):
902
+ """Get existing midside node or create new one between n1 and n2"""
903
+ nonlocal next_node_idx, nodes
904
+
905
+ # Ensure consistent ordering
906
+ if n1_idx > n2_idx:
907
+ n1_idx, n2_idx = n2_idx, n1_idx
908
+
909
+ edge_key = (n1_idx, n2_idx)
910
+
911
+ if edge_key in midside_nodes:
912
+ return midside_nodes[edge_key]
913
+
914
+ # Create new midside node at edge center
915
+ n1 = nodes[n1_idx]
916
+ n2 = nodes[n2_idx]
917
+ midside_coord = (n1 + n2) / 2.0
918
+
919
+ # Add to nodes array
920
+ nodes_list = nodes.tolist()
921
+ nodes_list.append(midside_coord.tolist())
922
+ nodes = np.array(nodes_list)
923
+
924
+ midside_idx = next_node_idx
925
+ midside_nodes[edge_key] = midside_idx
926
+ next_node_idx += 1
927
+
928
+ if debug and len(midside_nodes) <= 10: # Only print first few
929
+ print(f" Created midside node {midside_idx} between {n1_idx}-{n2_idx} at {midside_coord}")
930
+
931
+ return midside_idx
932
+
933
+ # Convert 2D elements
934
+ new_elements = []
935
+ new_element_types = []
936
+
937
+ for elem_idx, element in enumerate(elements):
938
+ elem_type = element_types[elem_idx]
939
+
940
+ if target_element_type == 'tri6' and elem_type == 3:
941
+ # Convert tri3 to tri6
942
+ n0, n1, n2 = element[0], element[1], element[2]
943
+
944
+ # Get/create midside nodes
945
+ n3 = get_or_create_midside_node(n0, n1) # edge 0-1
946
+ n4 = get_or_create_midside_node(n1, n2) # edge 1-2
947
+ n5 = get_or_create_midside_node(n2, n0) # edge 2-0
948
+
949
+ # tri6 node ordering: [corner_nodes, midside_nodes]
950
+ new_element = [n0, n1, n2, n3, n4, n5, 0, 0, 0]
951
+ new_elements.append(new_element)
952
+ new_element_types.append(6)
953
+
954
+ elif target_element_type == 'quad8' and elem_type == 4:
955
+ # Convert quad4 to quad8
956
+ n0, n1, n2, n3 = element[0], element[1], element[2], element[3]
957
+
958
+ # Get/create midside nodes on edges
959
+ n4 = get_or_create_midside_node(n0, n1) # edge 0-1
960
+ n5 = get_or_create_midside_node(n1, n2) # edge 1-2
961
+ n6 = get_or_create_midside_node(n2, n3) # edge 2-3
962
+ n7 = get_or_create_midside_node(n3, n0) # edge 3-0
963
+
964
+ # quad8 node ordering: [corner_nodes, midside_nodes]
965
+ new_element = [n0, n1, n2, n3, n4, n5, n6, n7, 0]
966
+ new_elements.append(new_element)
967
+ new_element_types.append(8)
968
+
969
+ elif target_element_type == 'quad9' and elem_type == 4:
970
+ # Convert quad4 to quad9
971
+ n0, n1, n2, n3 = element[0], element[1], element[2], element[3]
972
+
973
+ # Get/create midside nodes on edges
974
+ n4 = get_or_create_midside_node(n0, n1) # edge 0-1
975
+ n5 = get_or_create_midside_node(n1, n2) # edge 1-2
976
+ n6 = get_or_create_midside_node(n2, n3) # edge 2-3
977
+ n7 = get_or_create_midside_node(n3, n0) # edge 3-0
978
+
979
+ # Create center node
980
+ center_coord = (nodes[n0] + nodes[n1] + nodes[n2] + nodes[n3]) / 4.0
981
+ nodes_list = nodes.tolist()
982
+ nodes_list.append(center_coord.tolist())
983
+ nodes = np.array(nodes_list)
984
+ n8 = next_node_idx
985
+ next_node_idx += 1
986
+
987
+ # quad9 node ordering: [corner_nodes, midside_nodes, center_node]
988
+ new_element = [n0, n1, n2, n3, n4, n5, n6, n7, n8]
989
+ new_elements.append(new_element)
990
+ new_element_types.append(9)
991
+
992
+ else:
993
+ # Keep original element unchanged
994
+ new_elements.append(element.tolist())
995
+ new_element_types.append(elem_type)
996
+
997
+ # Convert 1D elements to quadratic if present
998
+ new_elements_1d = []
999
+ new_element_types_1d = []
1000
+
1001
+ if has_1d_elements:
1002
+ for elem_idx, element in enumerate(elements_1d):
1003
+ elem_type = element_types_1d[elem_idx]
1004
+
1005
+ if elem_type == 2: # Convert linear 1D to quadratic
1006
+ n0, n1 = element[0], element[1]
1007
+
1008
+ # Get/create midside node (reuse if already created for 2D elements)
1009
+ n2 = get_or_create_midside_node(n0, n1)
1010
+
1011
+ new_element = [n0, n1, n2]
1012
+ new_elements_1d.append(new_element)
1013
+ new_element_types_1d.append(3) # quadratic 1D
1014
+ else:
1015
+ # Keep original element unchanged
1016
+ new_elements_1d.append(element.tolist())
1017
+ new_element_types_1d.append(elem_type)
1018
+
1019
+ if debug:
1020
+ print(f" Added {len(midside_nodes)} midside nodes")
1021
+ print(f" Total nodes: {len(nodes)} (was {len(mesh['nodes'])})")
1022
+
1023
+ # Create updated mesh
1024
+ updated_mesh = {
1025
+ "nodes": nodes,
1026
+ "elements": np.array(new_elements, dtype=int),
1027
+ "element_types": np.array(new_element_types, dtype=int),
1028
+ "element_materials": element_materials
1029
+ }
1030
+
1031
+ if has_1d_elements:
1032
+ updated_mesh["elements_1d"] = np.array(new_elements_1d, dtype=int)
1033
+ updated_mesh["element_types_1d"] = np.array(new_element_types_1d, dtype=int)
1034
+ updated_mesh["element_materials_1d"] = element_materials_1d
1035
+
1036
+ return updated_mesh
1037
+
1038
+
1039
+ def line_segment_parameter(point, line_start, line_end):
1040
+ """
1041
+ Calculate the parameter t (0 to 1) of a point along a line segment.
1042
+ Returns t where point = line_start + t * (line_end - line_start)
1043
+ """
1044
+ px, py = point
1045
+ x1, y1 = line_start
1046
+ x2, y2 = line_end
1047
+
1048
+ # Calculate parameter t
1049
+ dx = x2 - x1
1050
+ dy = y2 - y1
1051
+
1052
+ if abs(dx) > abs(dy):
1053
+ t = (px - x1) / dx
1054
+ else:
1055
+ t = (py - y1) / dy
1056
+
1057
+ return t
1058
+
1059
+
1060
+ def line_segment_intersection(p1, p2, p3, p4, tol=1e-8):
1061
+ """
1062
+ Find intersection point between two line segments.
1063
+ Returns intersection point (x, y) if it exists, None otherwise.
1064
+ """
1065
+ x1, y1 = p1
1066
+ x2, y2 = p2
1067
+ x3, y3 = p3
1068
+ x4, y4 = p4
1069
+
1070
+ # Calculate direction vectors
1071
+ d1x, d1y = x2 - x1, y2 - y1
1072
+ d2x, d2y = x4 - x3, y4 - y3
1073
+
1074
+ # Calculate determinant
1075
+ det = d1x * d2y - d1y * d2x
1076
+
1077
+ if abs(det) < tol: # Lines are parallel
1078
+ return None
1079
+
1080
+ # Calculate parameters
1081
+ t1 = ((x3 - x1) * d2y - (y3 - y1) * d2x) / det
1082
+ t2 = ((x3 - x1) * d1y - (y3 - y1) * d1x) / det
1083
+
1084
+ # Check if intersection is within both segments
1085
+ if 0 <= t1 <= 1 and 0 <= t2 <= 1:
1086
+ # Calculate intersection point
1087
+ ix = x1 + t1 * d1x
1088
+ iy = y1 + t1 * d1y
1089
+ return (round(ix, 6), round(iy, 6))
1090
+
1091
+ return None
1092
+
1093
+
1094
+ def point_near_existing(point, existing_points, tol=1e-8):
1095
+ """Check if a point is near any existing points."""
1096
+ px, py = point
1097
+ for ex, ey in existing_points:
1098
+ if abs(px - ex) < tol and abs(py - ey) < tol:
1099
+ return True
1100
+ return False
1101
+
1102
+
1103
+ def insert_point_into_polygon_edge(intersection, edge_start, edge_end, poly_data, point_map, target_size):
1104
+ """Insert an intersection point into a polygon edge, updating the polygon's coordinate list."""
1105
+ x, y = intersection
1106
+ # Ensure the point exists in the point_map (for Gmsh)
1107
+ if (x, y) not in point_map:
1108
+ tag = len(point_map) + 1 # Simple tag assignment
1109
+ point_map[(x, y)] = tag
1110
+
1111
+ # Insert the intersection point into the polygon's coordinate list at the correct edge
1112
+ # poly_data['pt_tags'] is a list of Gmsh point tags, but we need to update the coordinate list used to build the polygon
1113
+ # We'll reconstruct the coordinate list from the tags and point_map
1114
+ pt_tags = poly_data['pt_tags']
1115
+ # Build coordinate list for the polygon
1116
+ coords = []
1117
+ tag_to_coord = {v: k for k, v in point_map.items()}
1118
+ for tag in pt_tags:
1119
+ if tag in tag_to_coord:
1120
+ coords.append(tag_to_coord[tag])
1121
+ else:
1122
+ # Fallback: try to find the coordinate in point_map
1123
+ found = False
1124
+ for (cx, cy), t in point_map.items():
1125
+ if t == tag:
1126
+ coords.append((cx, cy))
1127
+ found = True
1128
+ break
1129
+ if not found:
1130
+ coords.append((None, None)) # Should not happen
1131
+ # Find the edge to insert after
1132
+ insert_idx = None
1133
+ for i in range(len(coords)):
1134
+ a = coords[i]
1135
+ b = coords[(i + 1) % len(coords)]
1136
+ if (abs(a[0] - edge_start[0]) < 1e-8 and abs(a[1] - edge_start[1]) < 1e-8 and
1137
+ abs(b[0] - edge_end[0]) < 1e-8 and abs(b[1] - edge_end[1]) < 1e-8):
1138
+ insert_idx = i + 1
1139
+ break
1140
+ # Also check reversed edge
1141
+ if (abs(a[0] - edge_end[0]) < 1e-8 and abs(a[1] - edge_end[1]) < 1e-8 and
1142
+ abs(b[0] - edge_start[0]) < 1e-8 and abs(b[1] - edge_start[1]) < 1e-8):
1143
+ insert_idx = i + 1
1144
+ break
1145
+ if insert_idx is not None:
1146
+ # Insert the intersection point into the coordinate list
1147
+ coords.insert(insert_idx, (x, y))
1148
+ # Now update pt_tags to match
1149
+ tag = point_map[(x, y)]
1150
+ pt_tags.insert(insert_idx, tag)
1151
+ # Update poly_data
1152
+ poly_data['pt_tags'] = pt_tags
1153
+ # If not found, do nothing (should not happen)
1154
+
1155
+
1156
+ def get_quad_mesh_presets():
1157
+ """
1158
+ Returns dictionary of preset quad meshing parameter combinations to try.
1159
+ """
1160
+ presets = {
1161
+ 'default': {
1162
+ "Mesh.Algorithm": 8,
1163
+ "Mesh.RecombinationAlgorithm": 1,
1164
+ "Mesh.SubdivisionAlgorithm": 1,
1165
+ "Mesh.RecombineOptimizeTopology": 5,
1166
+ "Mesh.Smoothing": 10,
1167
+ "size_factor": 1.4, # Target size adjustment
1168
+ },
1169
+ 'blossom': {
1170
+ "Mesh.Algorithm": 6,
1171
+ "Mesh.RecombinationAlgorithm": 2, # Blossom
1172
+ "Mesh.SubdivisionAlgorithm": 1,
1173
+ "Mesh.RecombineOptimizeTopology": 20,
1174
+ "Mesh.Smoothing": 20,
1175
+ "size_factor": 1.6, # Slightly larger for better recombination
1176
+ },
1177
+ 'blossom_full': {
1178
+ "Mesh.Algorithm": 5,
1179
+ "Mesh.RecombinationAlgorithm": 3, # Blossom full-quad
1180
+ "Mesh.SubdivisionAlgorithm": 1,
1181
+ "Mesh.RecombineOptimizeTopology": 50,
1182
+ "Mesh.Smoothing": 30,
1183
+ "size_factor": 1.7, # Larger for complex recombination
1184
+ },
1185
+ 'high_quality': {
1186
+ "Mesh.Algorithm": 6,
1187
+ "Mesh.RecombinationAlgorithm": 1,
1188
+ "Mesh.SubdivisionAlgorithm": 1,
1189
+ "Mesh.RecombineOptimizeTopology": 100,
1190
+ "Mesh.RecombineNodeRepositioning": 1,
1191
+ "Mesh.RecombineMinimumQuality": 0.1,
1192
+ "Mesh.Smoothing": 50,
1193
+ "Mesh.SmoothRatio": 2.0,
1194
+ "size_factor": 2.0, # Much larger due to heavy optimization
1195
+ },
1196
+ 'fast': {
1197
+ "Mesh.Algorithm": 8,
1198
+ "Mesh.RecombinationAlgorithm": 0, # Standard (fastest)
1199
+ "Mesh.SubdivisionAlgorithm": 0,
1200
+ "Mesh.RecombineOptimizeTopology": 0,
1201
+ "Mesh.Smoothing": 5,
1202
+ "size_factor": 0.7, # Smaller adjustment = more elements
1203
+ }
1204
+ }
1205
+ return presets
1206
+
1207
+
1208
+
1209
+ def build_polygons(slope_data, reinf_lines=None, debug=False):
1210
+ """
1211
+ Build material zone polygons from slope_data.
1212
+
1213
+ Extracts profile lines and max depth, then creates polygons for each material zone.
1214
+ Also integrates distributed load points and reinforcement line endpoints that are
1215
+ coincident with polygon edges.
1216
+
1217
+ Parameters:
1218
+ slope_data: Dictionary containing slope geometry data
1219
+
1220
+ Returns:
1221
+ List of polygons, each defined by (x,y) coordinate tuples
1222
+ """
1223
+ import numpy as np
1224
+ import copy
1225
+
1226
+ # Extract profile lines and max depth from slope_data
1227
+ profile_lines = slope_data.get('profile_lines', [])
1228
+ max_depth = slope_data.get('max_depth', None)
1229
+
1230
+ if not profile_lines:
1231
+ raise ValueError("Need at least 1 profile line to create material zones")
1232
+
1233
+ # For single profile line, max_depth serves as the bottom boundary
1234
+ if len(profile_lines) == 1:
1235
+ if max_depth is None:
1236
+ raise ValueError("When using only 1 profile line, max_depth must be specified")
1237
+
1238
+ n = len(profile_lines)
1239
+ lines = [list(line) for line in copy.deepcopy(profile_lines)]
1240
+ tol = 1e-8
1241
+
1242
+ for i in range(n - 1):
1243
+ top = lines[i]
1244
+ for endpoint in [0, -1]: # left and right
1245
+ x_top, y_top = top[endpoint]
1246
+ # Find the highest lower profile at this x
1247
+ best_j = None
1248
+ best_y = -np.inf
1249
+ for j in range(i + 1, n):
1250
+ lower = lines[j]
1251
+ xs_lower = np.array([x for x, y in lower])
1252
+ ys_lower = np.array([y for x, y in lower])
1253
+ if xs_lower[0] - tol <= x_top <= xs_lower[-1] + tol:
1254
+ y_proj = np.interp(x_top, xs_lower, ys_lower)
1255
+ if y_proj > best_y:
1256
+ best_y = y_proj
1257
+ best_j = j
1258
+ if best_j is not None:
1259
+ lower = lines[best_j]
1260
+ xs_lower = np.array([x for x, y in lower])
1261
+ ys_lower = np.array([y for x, y in lower])
1262
+ y_proj = np.interp(x_top, xs_lower, ys_lower)
1263
+ # Check if lower profile already has a point at this x (within tol)
1264
+ found = False
1265
+ for (x_l, y_l) in lower:
1266
+ if abs(x_l - x_top) < tol:
1267
+ found = True
1268
+ break
1269
+ if abs(y_proj - y_top) < tol:
1270
+ # Coincident: insert (x_top, y_top) if not present
1271
+ if not found:
1272
+ insert_idx = np.searchsorted(xs_lower, x_top)
1273
+ lower.insert(insert_idx, (round(x_top, 6), round(y_top, 6)))
1274
+ else:
1275
+ # Not coincident: insert (x_top, y_proj) if not present
1276
+ if not found:
1277
+ insert_idx = np.searchsorted(xs_lower, x_top)
1278
+ lower.insert(insert_idx, (round(x_top, 6), round(y_proj, 6)))
1279
+
1280
+ def clean_polygon(poly, tol=1e-8):
1281
+ # Remove consecutive duplicate points (except for closing point)
1282
+ if not poly:
1283
+ return poly
1284
+ cleaned = [poly[0]]
1285
+ for pt in poly[1:]:
1286
+ if abs(pt[0] - cleaned[-1][0]) > tol or abs(pt[1] - cleaned[-1][1]) > tol:
1287
+ cleaned.append(pt)
1288
+ # Ensure closed
1289
+ if abs(cleaned[0][0] - cleaned[-1][0]) > tol or abs(cleaned[0][1] - cleaned[-1][1]) > tol:
1290
+ cleaned.append(cleaned[0])
1291
+ return cleaned
1292
+
1293
+ # Now build polygons as before
1294
+ polygons = []
1295
+ for i, top_line in enumerate(lines):
1296
+ xs_top, ys_top = zip(*top_line)
1297
+ xs_top = np.array(xs_top)
1298
+ ys_top = np.array(ys_top)
1299
+ left_x, left_y = xs_top[0], ys_top[0]
1300
+ right_x, right_y = xs_top[-1], ys_top[-1]
1301
+
1302
+ # Initialize variables for debug output
1303
+ lower_left_x = None
1304
+ lower_right_x = None
1305
+ proj_left_x = None
1306
+ proj_right_x = None
1307
+ bottom_cleaned = []
1308
+
1309
+ # Initialize vertical edge points (used for intermediate points on vertical edges)
1310
+ left_vertical_points = [] # Intermediate points on left vertical edge (bottom to top)
1311
+ right_vertical_points = [] # Intermediate points on right vertical edge (top to bottom)
1312
+ left_y_bot = -np.inf
1313
+ right_y_bot = -np.inf
1314
+
1315
+ if i < n - 1:
1316
+ # Use the immediate next line as the lower boundary
1317
+ lower_line = lines[i + 1]
1318
+ xs_bot, ys_bot = zip(*lower_line)
1319
+ xs_bot = np.array(xs_bot)
1320
+ ys_bot = np.array(ys_bot)
1321
+ lower_left_x = xs_bot[0]
1322
+ lower_right_x = xs_bot[-1]
1323
+
1324
+ # Collect actual points from all lower lines within the top line's x-range
1325
+ # But only include a point if it's actually on the highest lower profile at that x
1326
+ bottom_points = [] # List of (x, y, line_idx) tuples
1327
+
1328
+ for j in range(i + 1, n):
1329
+ lower_candidate = lines[j]
1330
+ xs_cand = np.array([x for x, y in lower_candidate])
1331
+ ys_cand = np.array([y for x, y in lower_candidate])
1332
+
1333
+ # Only include points that are within the top line's x-range
1334
+ mask = (xs_cand >= left_x - tol) & (xs_cand <= right_x + tol)
1335
+ for x, y in zip(xs_cand[mask], ys_cand[mask]):
1336
+ # Check if this point is actually on the highest lower profile at this x
1337
+ # Compare with all other lower lines at this x-coordinate
1338
+ is_highest = True
1339
+ for k in range(i + 1, n):
1340
+ if k == j:
1341
+ continue
1342
+ other_line = lines[k]
1343
+ xs_other = np.array([x_o for x_o, y_o in other_line])
1344
+ ys_other = np.array([y_o for x_o, y_o in other_line])
1345
+ if xs_other[0] - tol <= x <= xs_other[-1] + tol:
1346
+ y_other = np.interp(x, xs_other, ys_other)
1347
+ if y_other > y + tol: # Other line is higher
1348
+ is_highest = False
1349
+ break
1350
+
1351
+ if is_highest:
1352
+ bottom_points.append((x, y, j))
1353
+
1354
+ # Group by x-coordinate (within tolerance) and keep only the highest y at each x
1355
+ # This handles cases where multiple lines have points at the same x
1356
+ bottom_dict = {} # x_key -> (y, line_idx, orig_x, orig_y)
1357
+ for x, y, line_idx in bottom_points:
1358
+ x_key = round(x / tol) * tol # Round to tolerance to group nearby points
1359
+ if x_key not in bottom_dict or y > bottom_dict[x_key][0]:
1360
+ bottom_dict[x_key] = (y, line_idx, x, y)
1361
+
1362
+ # Convert to sorted list
1363
+ bottom_cleaned = sorted([(orig_x, orig_y) for _, _, orig_x, orig_y in bottom_dict.values()])
1364
+
1365
+ # Helper function to check if a point already exists in a list
1366
+ def point_exists(point_list, x, y, tol=1e-8):
1367
+ """Check if a point (x, y) already exists in the point list within tolerance."""
1368
+ for px, py in point_list:
1369
+ if abs(px - x) < tol and abs(py - y) < tol:
1370
+ return True
1371
+ return False
1372
+
1373
+ # Helper function to find the lowest y value at a given x by checking all segments
1374
+ def find_lowest_y_at_x(line_points, x_query, tol=1e-8):
1375
+ """
1376
+ Find the lowest y value at x_query by checking all segments of the line.
1377
+ Handles vertical segments properly by finding all y values at that x and returning the minimum.
1378
+
1379
+ Returns:
1380
+ tuple: (y_value, is_at_endpoint) where is_at_endpoint indicates if x_query is at an endpoint
1381
+ """
1382
+ if not line_points:
1383
+ return None, False
1384
+
1385
+ xs = np.array([x for x, y in line_points])
1386
+ ys = np.array([y for x, y in line_points])
1387
+
1388
+ # Check if x_query is within the line's x-range
1389
+ if xs[0] - tol > x_query or xs[-1] + tol < x_query:
1390
+ return None, False
1391
+
1392
+ # Check if x_query is at an endpoint
1393
+ is_at_left_endpoint = abs(x_query - xs[0]) < tol
1394
+ is_at_right_endpoint = abs(x_query - xs[-1]) < tol
1395
+ is_at_endpoint = is_at_left_endpoint or is_at_right_endpoint
1396
+
1397
+ # Find all y values at x_query by checking all segments
1398
+ y_values = []
1399
+
1400
+ # Check all points that are exactly at x_query
1401
+ for k in range(len(line_points)):
1402
+ if abs(xs[k] - x_query) < tol:
1403
+ y_values.append(ys[k])
1404
+
1405
+ # Check all segments that contain x_query
1406
+ for k in range(len(line_points) - 1):
1407
+ x1, y1 = line_points[k]
1408
+ x2, y2 = line_points[k + 1]
1409
+
1410
+ # Check if segment is vertical and contains x_query
1411
+ if abs(x1 - x_query) < tol and abs(x2 - x_query) < tol:
1412
+ # Vertical segment - include both y values
1413
+ y_values.append(y1)
1414
+ y_values.append(y2)
1415
+ # Check if segment is horizontal or sloped and contains x_query
1416
+ elif min(x1, x2) - tol <= x_query <= max(x1, x2) + tol:
1417
+ # Interpolate y value
1418
+ if abs(x2 - x1) < tol:
1419
+ # Segment is vertical (should have been caught above, but just in case)
1420
+ y_values.append(y1)
1421
+ y_values.append(y2)
1422
+ else:
1423
+ # Linear interpolation
1424
+ t = (x_query - x1) / (x2 - x1)
1425
+ if 0 <= t <= 1:
1426
+ y_interp = y1 + t * (y2 - y1)
1427
+ y_values.append(y_interp)
1428
+
1429
+ if not y_values:
1430
+ return None, False
1431
+
1432
+ # Return the lowest y value
1433
+ y_min = min(y_values)
1434
+ return y_min, is_at_endpoint
1435
+
1436
+ # Project endpoints - find highest lower profile or use max_depth
1437
+ # When projecting right side: if intersection is at left end of lower line,
1438
+ # add that point but continue projecting down
1439
+ # When projecting left side: if intersection is at right end of lower line,
1440
+ # add that point but continue projecting down
1441
+ for j in range(i + 1, n):
1442
+ lower_candidate = lines[j]
1443
+ xs_cand = np.array([x for x, y in lower_candidate])
1444
+ ys_cand = np.array([y for x, y in lower_candidate])
1445
+
1446
+ # Check left endpoint projection
1447
+ if xs_cand[0] - tol <= left_x <= xs_cand[-1] + tol:
1448
+ y_cand, is_at_endpoint = find_lowest_y_at_x(lower_candidate, left_x, tol)
1449
+ if y_cand is not None:
1450
+ # If intersection is at the right end of the lower line, add point but continue
1451
+ if is_at_endpoint and abs(left_x - xs_cand[-1]) < tol: # At right endpoint
1452
+ # Only add if not duplicate of the endpoint being projected and not already in list
1453
+ if abs(y_cand - left_y) > tol and not point_exists(left_vertical_points, left_x, y_cand, tol):
1454
+ left_vertical_points.append((left_x, y_cand))
1455
+ else: # Not at endpoint, use as stopping point
1456
+ if y_cand > left_y_bot:
1457
+ left_y_bot = y_cand
1458
+
1459
+ # Check right endpoint projection
1460
+ if xs_cand[0] - tol <= right_x <= xs_cand[-1] + tol:
1461
+ y_cand, is_at_endpoint = find_lowest_y_at_x(lower_candidate, right_x, tol)
1462
+ if y_cand is not None:
1463
+ # If intersection is at the left end of the lower line, add point but continue
1464
+ if is_at_endpoint and abs(right_x - xs_cand[0]) < tol: # At left endpoint
1465
+ # Only add if not duplicate of the endpoint being projected and not already in list
1466
+ if abs(y_cand - right_y) > tol and not point_exists(right_vertical_points, right_x, y_cand, tol):
1467
+ right_vertical_points.append((right_x, y_cand))
1468
+ else: # Not at endpoint, use as stopping point
1469
+ if y_cand > right_y_bot:
1470
+ right_y_bot = y_cand
1471
+
1472
+ # If no lower profile at endpoints, use max_depth
1473
+ if left_y_bot == -np.inf:
1474
+ left_y_bot = max_depth if max_depth is not None else -np.inf
1475
+ if right_y_bot == -np.inf:
1476
+ right_y_bot = max_depth if max_depth is not None else -np.inf
1477
+
1478
+ # Deduplicate vertical points (remove points that are too close to each other)
1479
+ def deduplicate_points(points, tol=1e-8):
1480
+ """Remove duplicate points within tolerance."""
1481
+ if not points:
1482
+ return []
1483
+ unique_points = [points[0]]
1484
+ for p in points[1:]:
1485
+ # Check if this point is too close to any existing unique point
1486
+ is_duplicate = False
1487
+ for up in unique_points:
1488
+ if abs(p[0] - up[0]) < tol and abs(p[1] - up[1]) < tol:
1489
+ is_duplicate = True
1490
+ break
1491
+ if not is_duplicate:
1492
+ unique_points.append(p)
1493
+ return unique_points
1494
+
1495
+ right_vertical_points = deduplicate_points(right_vertical_points, tol)
1496
+ left_vertical_points = deduplicate_points(left_vertical_points, tol)
1497
+
1498
+ # Sort vertical points: right edge top to bottom, left edge bottom to top
1499
+ right_vertical_points.sort(key=lambda p: -p[1]) # Sort by y descending (top to bottom)
1500
+ left_vertical_points.sort(key=lambda p: p[1]) # Sort by y ascending (bottom to top)
1501
+
1502
+ # Build bottom boundary: right projection, intermediate points (right to left), left projection
1503
+ # The bottom should go from right to left to close the polygon
1504
+ bottom = []
1505
+
1506
+ # Start with right endpoint
1507
+ if right_y_bot != -np.inf:
1508
+ bottom.append((right_x, right_y_bot))
1509
+
1510
+ # Add intermediate points in reverse order (right to left)
1511
+ # Filter out points too close to endpoints
1512
+ for x, y in reversed(bottom_cleaned):
1513
+ if abs(x - left_x) > tol and abs(x - right_x) > tol:
1514
+ bottom.append((x, y))
1515
+
1516
+ # End with left endpoint
1517
+ if left_y_bot != -np.inf:
1518
+ bottom.append((left_x, left_y_bot))
1519
+
1520
+ # Store for debug output
1521
+ proj_left_x = left_x
1522
+ proj_right_x = right_x
1523
+ else:
1524
+ # For the lowest polygon, bottom is at max_depth
1525
+ # Only need endpoints - no intermediate points
1526
+ left_y_bot = max_depth if max_depth is not None else -np.inf
1527
+ right_y_bot = max_depth if max_depth is not None else -np.inf
1528
+ bottom = []
1529
+ bottom.append((right_x, max_depth))
1530
+ bottom.append((left_x, max_depth))
1531
+
1532
+ # Build polygon: top left-to-right, right vertical edge (with intermediate points),
1533
+ # bottom right-to-left, left vertical edge (with intermediate points)
1534
+ poly = []
1535
+
1536
+ # Top edge: left to right along profile line
1537
+ for x, y in zip(xs_top, ys_top):
1538
+ poly.append((round(x, 6), round(y, 6)))
1539
+
1540
+ # Right vertical edge: from (right_x, right_y) down to (right_x, right_y_bot)
1541
+ # Include intermediate points where we intersect left endpoints of lower lines
1542
+ # Note: (right_x, right_y_bot) will be added as part of the bottom edge, so don't add it here
1543
+ if i < n - 1:
1544
+ for x, y in right_vertical_points:
1545
+ # Only add if it's between top and bottom (not duplicate of endpoints)
1546
+ if abs(y - right_y) > tol and abs(y - right_y_bot) > tol:
1547
+ poly.append((round(x, 6), round(y, 6)))
1548
+
1549
+ # Bottom edge: right to left (already includes (right_x, right_y_bot) and (left_x, left_y_bot))
1550
+ for x, y in bottom:
1551
+ poly.append((round(x, 6), round(y, 6)))
1552
+
1553
+ # Left vertical edge: from (left_x, left_y_bot) up to (left_x, left_y)
1554
+ # Include intermediate points where we intersect right endpoints of lower lines
1555
+ # Note: (left_x, left_y_bot) was already added as part of the bottom edge
1556
+ if i < n - 1:
1557
+ for x, y in reversed(left_vertical_points): # Reverse to go bottom to top
1558
+ # Only add if it's between bottom and top (not duplicate of endpoints)
1559
+ if abs(y - left_y_bot) > tol and abs(y - left_y) > tol:
1560
+ poly.append((round(x, 6), round(y, 6)))
1561
+
1562
+ # Clean up polygon (should rarely do anything)
1563
+ poly = clean_polygon(poly)
1564
+ polygons.append(poly)
1565
+
1566
+ # Add distributed load points to polygon edges if coincident
1567
+ polygons = add_dload_points_to_polygons(polygons, slope_data)
1568
+
1569
+ # Add intersection points with reinforcement lines if provided
1570
+ if reinf_lines is not None:
1571
+ polygons = add_intersection_points_to_polygons(polygons, reinf_lines, debug=debug)
1572
+
1573
+ return polygons
1574
+
1575
+ def add_dload_points_to_polygons(polygons, slope_data):
1576
+ """
1577
+ Add distributed load points to polygon edges if they are coincident with edges
1578
+ but not existing vertices.
1579
+
1580
+ Parameters:
1581
+ polygons: List of polygons (lists of (x,y) tuples)
1582
+ slope_data: Dictionary containing slope data
1583
+
1584
+ Returns:
1585
+ Updated list of polygons with added points
1586
+ """
1587
+ import numpy as np
1588
+ tol = 1e-8
1589
+
1590
+ # Collect distributed load points to check
1591
+ points_to_check = []
1592
+
1593
+ # Add distributed load points
1594
+ distributed_loads = slope_data.get('distributed_loads', [])
1595
+ for load in distributed_loads:
1596
+ if 'xy' in load:
1597
+ for point in load['xy']:
1598
+ points_to_check.append(point)
1599
+
1600
+ if not points_to_check:
1601
+ return polygons
1602
+
1603
+ # Process each polygon
1604
+ updated_polygons = []
1605
+ for poly in polygons:
1606
+ updated_poly = list(poly) # Make a copy
1607
+
1608
+ # Check each point against polygon edges
1609
+ for check_point in points_to_check:
1610
+ x_check, y_check = check_point
1611
+
1612
+ # Check if point is already a vertex
1613
+ is_vertex = False
1614
+ for vertex in updated_poly:
1615
+ if abs(vertex[0] - x_check) < tol and abs(vertex[1] - y_check) < tol:
1616
+ is_vertex = True
1617
+ break
1618
+
1619
+ if is_vertex:
1620
+ continue
1621
+
1622
+ # Check if point lies on any edge
1623
+ for i in range(len(updated_poly)):
1624
+ x1, y1 = updated_poly[i]
1625
+ x2, y2 = updated_poly[(i + 1) % len(updated_poly)]
1626
+
1627
+ # Check if point lies on edge segment
1628
+ if is_point_on_edge((x_check, y_check), (x1, y1), (x2, y2), tol):
1629
+ # Insert point after vertex i
1630
+ updated_poly.insert(i + 1, (round(x_check, 6), round(y_check, 6)))
1631
+ break # Only insert once per point
1632
+
1633
+ updated_polygons.append(updated_poly)
1634
+
1635
+ return updated_polygons
1636
+
1637
+ def is_point_on_edge(point, edge_start, edge_end, tol=1e-8):
1638
+ """
1639
+ Check if a point lies on a line segment (edge).
1640
+
1641
+ Parameters:
1642
+ point: (x, y) tuple of point to check
1643
+ edge_start: (x, y) tuple of edge start
1644
+ edge_end: (x, y) tuple of edge end
1645
+ tol: Tolerance for coincidence
1646
+
1647
+ Returns:
1648
+ bool: True if point lies on edge segment
1649
+ """
1650
+ px, py = point
1651
+ x1, y1 = edge_start
1652
+ x2, y2 = edge_end
1653
+
1654
+ # Check if point is within bounding box of edge
1655
+ if not (min(x1, x2) - tol <= px <= max(x1, x2) + tol and
1656
+ min(y1, y2) - tol <= py <= max(y1, y2) + tol):
1657
+ return False
1658
+
1659
+ # Check if point is collinear with edge
1660
+ # Use cross product to check collinearity
1661
+ cross_product = abs((py - y1) * (x2 - x1) - (px - x1) * (y2 - y1))
1662
+
1663
+ # If cross product is close to zero, point is on the line
1664
+ # Also check that it's within the segment bounds
1665
+ if cross_product < tol:
1666
+ # Check if point is between edge endpoints
1667
+ dot_product = (px - x1) * (x2 - x1) + (py - y1) * (y2 - y1)
1668
+ edge_length_sq = (x2 - x1) ** 2 + (y2 - y1) ** 2
1669
+
1670
+ if edge_length_sq < tol: # Edge is essentially a point
1671
+ return abs(px - x1) < tol and abs(py - y1) < tol
1672
+
1673
+ # Parameter t should be between 0 and 1 for point to be on segment
1674
+ t = dot_product / edge_length_sq
1675
+ return -tol <= t <= 1 + tol
1676
+
1677
+ return False
1678
+
1679
+ def print_polygon_summary(polygons):
1680
+ """
1681
+ Prints a summary of the generated polygons for diagnostic purposes.
1682
+
1683
+ Parameters:
1684
+ polygons: List of polygon coordinate lists
1685
+ """
1686
+ print("=== POLYGON SUMMARY ===")
1687
+ print(f"Number of material zones: {len(polygons)}")
1688
+ print()
1689
+
1690
+ for i, polygon in enumerate(polygons):
1691
+ print(f"Material Zone {i+1} (Material ID: {i}):")
1692
+ print(f" Number of vertices: {len(polygon)}")
1693
+
1694
+ # Calculate area (simple shoelace formula)
1695
+ area = 0
1696
+ for j in range(len(polygon) - 1):
1697
+ x1, y1 = polygon[j]
1698
+ x2, y2 = polygon[j + 1]
1699
+ area += (x2 - x1) * (y2 + y1) / 2
1700
+ area = abs(area)
1701
+
1702
+ print(f" Approximate area: {area:.2f} square units")
1703
+
1704
+ # Print bounding box
1705
+ xs = [x for x, y in polygon]
1706
+ ys = [y for x, y in polygon]
1707
+ print(f" Bounding box: x=[{min(xs):.2f}, {max(xs):.2f}], y=[{min(ys):.2f}, {max(ys):.2f}]")
1708
+ print()
1709
+
1710
+
1711
+
1712
+
1713
+ def export_mesh_to_json(mesh, filename):
1714
+ """Save mesh dictionary to JSON file."""
1715
+ import json
1716
+ import numpy as np
1717
+
1718
+ # Convert numpy arrays to lists for JSON serialization
1719
+ mesh_json = {}
1720
+ for key, value in mesh.items():
1721
+ if isinstance(value, np.ndarray):
1722
+ mesh_json[key] = value.tolist()
1723
+ else:
1724
+ mesh_json[key] = value
1725
+
1726
+ with open(filename, 'w') as f:
1727
+ json.dump(mesh_json, f, indent=2)
1728
+
1729
+ print(f"Mesh saved to {filename}")
1730
+
1731
+ def import_mesh_from_json(filename):
1732
+ """Load mesh dictionary from JSON file."""
1733
+ import json
1734
+ import numpy as np
1735
+
1736
+ with open(filename, 'r') as f:
1737
+ mesh_json = json.load(f)
1738
+
1739
+ # Convert lists back to numpy arrays
1740
+ mesh = {}
1741
+ for key, value in mesh_json.items():
1742
+ if isinstance(value, list):
1743
+ mesh[key] = np.array(value)
1744
+ else:
1745
+ mesh[key] = value
1746
+
1747
+ return mesh
1748
+
1749
+ def remove_duplicate_endpoint(poly, tol=1e-8):
1750
+ if len(poly) > 1 and abs(poly[0][0] - poly[-1][0]) < tol and abs(poly[0][1] - poly[-1][1]) < tol:
1751
+ return poly[:-1]
1752
+ return poly
1753
+
1754
+
1755
+ def extract_1d_elements_from_2d_edges(nodes, elements_2d, element_types_2d, lines, debug=False):
1756
+ """
1757
+ Extract 1D elements from 2D element edges that lie along reinforcement lines.
1758
+ This ensures proper finite element integration where 1D elements are shared edges of 2D elements.
1759
+
1760
+ Parameters:
1761
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
1762
+ elements_2d: np.ndarray of 2D element vertex indices (n_elements, 9)
1763
+ element_types_2d: np.ndarray indicating 2D element type (3, 4, 6, 8, or 9 nodes)
1764
+ lines: List of reinforcement lines, each defined by list of (x, y) tuples
1765
+ debug: Enable debug output
1766
+
1767
+ Returns:
1768
+ tuple: (elements_1d, mat_ids_1d, element_node_counts_1d)
1769
+ """
1770
+ import numpy as np
1771
+ from collections import defaultdict
1772
+
1773
+ elements_1d = []
1774
+ mat_ids_1d = []
1775
+ element_node_counts_1d = []
1776
+
1777
+ # Build edge-to-element mapping from 2D elements
1778
+ edge_to_element = defaultdict(list) # edge (n1, n2) -> list of element indices
1779
+ element_edges = {} # element_idx -> list of edges
1780
+
1781
+ for elem_idx, (element, elem_type) in enumerate(zip(elements_2d, element_types_2d)):
1782
+ edges = []
1783
+
1784
+ if elem_type in [3, 6]: # Triangle
1785
+ # Triangle edges: (0,1), (1,2), (2,0)
1786
+ corner_nodes = [element[0], element[1], element[2]]
1787
+ edge_pairs = [(0, 1), (1, 2), (2, 0)]
1788
+
1789
+ for i, j in edge_pairs:
1790
+ n1, n2 = corner_nodes[i], corner_nodes[j]
1791
+ edge_key = (min(n1, n2), max(n1, n2)) # Canonical edge representation
1792
+ edges.append(edge_key)
1793
+ edge_to_element[edge_key].append(elem_idx)
1794
+
1795
+ elif elem_type in [4, 8, 9]: # Quadrilateral
1796
+ # Quadrilateral edges: (0,1), (1,2), (2,3), (3,0)
1797
+ corner_nodes = [element[0], element[1], element[2], element[3]]
1798
+ edge_pairs = [(0, 1), (1, 2), (2, 3), (3, 0)]
1799
+
1800
+ for i, j in edge_pairs:
1801
+ n1, n2 = corner_nodes[i], corner_nodes[j]
1802
+ edge_key = (min(n1, n2), max(n1, n2)) # Canonical edge representation
1803
+ edges.append(edge_key)
1804
+ edge_to_element[edge_key].append(elem_idx)
1805
+
1806
+ element_edges[elem_idx] = edges
1807
+
1808
+ if debug:
1809
+ print(f"Built edge map with {len(edge_to_element)} unique edges from {len(elements_2d)} 2D elements")
1810
+
1811
+ # For each reinforcement line, find 2D element edges that lie along it
1812
+ for line_idx, line_pts in enumerate(lines):
1813
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
1814
+
1815
+ if len(line_pts_clean) < 2:
1816
+ continue
1817
+
1818
+ if debug:
1819
+ print(f"Processing reinforcement line {line_idx}: {line_pts_clean}")
1820
+
1821
+ # Find all 2D element edges that lie along this reinforcement line
1822
+ line_edges = []
1823
+
1824
+ for edge_key, elem_indices in edge_to_element.items():
1825
+ n1, n2 = edge_key
1826
+
1827
+ # Get coordinates of edge endpoints
1828
+ coord1 = nodes[n1]
1829
+ coord2 = nodes[n2]
1830
+
1831
+ # Check if this edge lies along the reinforcement line
1832
+ if is_edge_on_reinforcement_line(coord1, coord2, line_pts_clean, tolerance=1e-6):
1833
+ line_edges.append((n1, n2))
1834
+ if debug:
1835
+ print(f" Found edge ({n1}, {n2}) at coords {coord1} -> {coord2}")
1836
+
1837
+ # Sort edges to form continuous 1D elements along the line
1838
+ if line_edges:
1839
+ sorted_edges = sort_edges_along_line(line_edges, nodes, line_pts_clean, debug)
1840
+
1841
+ # Create 1D elements from sorted edges
1842
+ for n1, n2 in sorted_edges:
1843
+ # For linear elements, just use the two nodes
1844
+ elements_1d.append([n1, n2, 0]) # Pad to 3 columns
1845
+ mat_ids_1d.append(line_idx)
1846
+ element_node_counts_1d.append(2)
1847
+
1848
+ if debug:
1849
+ print(f" Created {len(sorted_edges)} 1D elements for line {line_idx}")
1850
+
1851
+ if debug:
1852
+ print(f"Total 1D elements extracted: {len(elements_1d)}")
1853
+
1854
+ return elements_1d, mat_ids_1d, element_node_counts_1d
1855
+
1856
+
1857
+ def is_edge_on_reinforcement_line(coord1, coord2, line_pts, tolerance=1e-6):
1858
+ """
1859
+ Check if an edge lies along a reinforcement line.
1860
+
1861
+ Parameters:
1862
+ coord1, coord2: Edge endpoint coordinates (x, y)
1863
+ line_pts: List of (x, y) points defining the reinforcement line
1864
+ tolerance: Tolerance for coincidence checking
1865
+
1866
+ Returns:
1867
+ bool: True if edge lies along the reinforcement line
1868
+ """
1869
+ x1, y1 = coord1
1870
+ x2, y2 = coord2
1871
+
1872
+ # Check if both endpoints lie on the reinforcement line
1873
+ point1_on_line = is_point_on_line_segments(coord1, line_pts, tolerance)
1874
+ point2_on_line = is_point_on_line_segments(coord2, line_pts, tolerance)
1875
+
1876
+ if not (point1_on_line and point2_on_line):
1877
+ return False
1878
+
1879
+ # Additional check: ensure edge direction is consistent with line direction
1880
+ # This prevents selecting edges that cross the reinforcement line
1881
+ edge_vector = np.array([x2 - x1, y2 - y1])
1882
+ edge_length = np.linalg.norm(edge_vector)
1883
+
1884
+ if edge_length < tolerance:
1885
+ return False
1886
+
1887
+ edge_unit = edge_vector / edge_length
1888
+
1889
+ # Check alignment with any segment of the reinforcement line
1890
+ # This allows edges to span multiple segments after intersection preprocessing
1891
+ for i in range(len(line_pts) - 1):
1892
+ seg_start = np.array(line_pts[i])
1893
+ seg_end = np.array(line_pts[i + 1])
1894
+ seg_vector = seg_end - seg_start
1895
+ seg_length = np.linalg.norm(seg_vector)
1896
+
1897
+ if seg_length < tolerance:
1898
+ continue
1899
+
1900
+ seg_unit = seg_vector / seg_length
1901
+
1902
+ # Check if edge is aligned with this segment (or opposite direction)
1903
+ dot_product = abs(np.dot(edge_unit, seg_unit))
1904
+ if dot_product > 0.95: # Nearly parallel (cos(18°) ≈ 0.95)
1905
+ # More flexible check: edge should be collinear with the reinforcement line
1906
+ # and both endpoints should lie on the line (but not necessarily on the same segment)
1907
+ return True
1908
+
1909
+ return False
1910
+
1911
+
1912
+ def is_point_on_line_segments(point, line_pts, tolerance=1e-6):
1913
+ """
1914
+ Check if a point lies on any segment of a multi-segment line.
1915
+
1916
+ Parameters:
1917
+ point: (x, y) coordinates of point to check
1918
+ line_pts: List of (x, y) points defining the line segments
1919
+ tolerance: Tolerance for coincidence checking
1920
+
1921
+ Returns:
1922
+ bool: True if point lies on any line segment
1923
+ """
1924
+ for i in range(len(line_pts) - 1):
1925
+ if is_point_on_line_segment(point, line_pts[i], line_pts[i + 1], tolerance):
1926
+ return True
1927
+ return False
1928
+
1929
+
1930
+ def is_point_on_line_segment(point, seg_start, seg_end, tolerance=1e-6):
1931
+ """
1932
+ Check if a point lies on a line segment.
1933
+
1934
+ Parameters:
1935
+ point: (x, y) coordinates of point to check
1936
+ seg_start: (x, y) coordinates of segment start
1937
+ seg_end: (x, y) coordinates of segment end
1938
+ tolerance: Tolerance for coincidence checking
1939
+
1940
+ Returns:
1941
+ bool: True if point lies on the line segment
1942
+ """
1943
+ px, py = point
1944
+ x1, y1 = seg_start
1945
+ x2, y2 = seg_end
1946
+
1947
+ # Check if point is within bounding box of segment
1948
+ if not (min(x1, x2) - tolerance <= px <= max(x1, x2) + tolerance and
1949
+ min(y1, y2) - tolerance <= py <= max(y1, y2) + tolerance):
1950
+ return False
1951
+
1952
+ # Check collinearity using cross product
1953
+ cross_product = abs((py - y1) * (x2 - x1) - (px - x1) * (y2 - y1))
1954
+
1955
+ # Check if cross product is close to zero (collinear)
1956
+ if cross_product < tolerance:
1957
+ # Verify point is between segment endpoints using dot product
1958
+ dot_product = (px - x1) * (x2 - x1) + (py - y1) * (y2 - y1)
1959
+ segment_length_sq = (x2 - x1) ** 2 + (y2 - y1) ** 2
1960
+
1961
+ if segment_length_sq < tolerance: # Degenerate segment
1962
+ return abs(px - x1) < tolerance and abs(py - y1) < tolerance
1963
+
1964
+ # Parameter t should be between 0 and 1 for point to be on segment
1965
+ t = dot_product / segment_length_sq
1966
+ return -tolerance <= t <= 1 + tolerance
1967
+
1968
+ return False
1969
+
1970
+
1971
+ def sort_edges_along_line(edges, nodes, line_pts, debug=False):
1972
+ """
1973
+ Sort edges to form a continuous sequence along a reinforcement line.
1974
+
1975
+ Parameters:
1976
+ edges: List of (n1, n2) edge tuples
1977
+ nodes: Node coordinates array
1978
+ line_pts: Reinforcement line points
1979
+ debug: Enable debug output
1980
+
1981
+ Returns:
1982
+ list: Sorted list of (n1, n2) edge tuples
1983
+ """
1984
+ if not edges:
1985
+ return []
1986
+
1987
+ if len(edges) == 1:
1988
+ return edges
1989
+
1990
+ # Build connectivity graph
1991
+ node_connections = defaultdict(list)
1992
+ for n1, n2 in edges:
1993
+ node_connections[n1].append(n2)
1994
+ node_connections[n2].append(n1)
1995
+
1996
+ # Find start node (should have only one connection, or be closest to line start)
1997
+ line_start = np.array(line_pts[0])
1998
+ line_end = np.array(line_pts[-1])
1999
+
2000
+ start_candidates = []
2001
+ for node in node_connections:
2002
+ if len(node_connections[node]) == 1: # End node
2003
+ start_candidates.append(node)
2004
+
2005
+ if not start_candidates:
2006
+ # No clear end nodes, use node closest to line start
2007
+ min_dist = float('inf')
2008
+ start_node = list(node_connections.keys())[0]
2009
+ for node in node_connections:
2010
+ dist = np.linalg.norm(nodes[node] - line_start)
2011
+ if dist < min_dist:
2012
+ min_dist = dist
2013
+ start_node = node
2014
+ else:
2015
+ # Choose end node closest to line start
2016
+ min_dist = float('inf')
2017
+ start_node = start_candidates[0]
2018
+ for node in start_candidates:
2019
+ dist = np.linalg.norm(nodes[node] - line_start)
2020
+ if dist < min_dist:
2021
+ min_dist = dist
2022
+ start_node = node
2023
+
2024
+ # Trace path from start node
2025
+ sorted_edges = []
2026
+ used_edges = set()
2027
+ current_node = start_node
2028
+
2029
+ while True:
2030
+ # Find next unused edge from current node
2031
+ next_node = None
2032
+ for neighbor in node_connections[current_node]:
2033
+ edge_key = (min(current_node, neighbor), max(current_node, neighbor))
2034
+ if edge_key not in used_edges:
2035
+ next_node = neighbor
2036
+ used_edges.add(edge_key)
2037
+ sorted_edges.append((current_node, next_node))
2038
+ break
2039
+
2040
+ if next_node is None:
2041
+ break
2042
+
2043
+ current_node = next_node
2044
+
2045
+ if debug:
2046
+ print(f" Sorted {len(sorted_edges)} edges along line")
2047
+
2048
+ return sorted_edges
2049
+
2050
+ def verify_mesh_connectivity(mesh, tolerance=1e-8):
2051
+ """
2052
+ Verify that the mesh is properly connected by checking for duplicate nodes at shared boundaries.
2053
+
2054
+ Parameters:
2055
+ mesh: Mesh dictionary with 'nodes' and 'elements' keys
2056
+ tolerance: Tolerance for considering nodes as duplicates
2057
+
2058
+ Returns:
2059
+ dict: Connectivity verification results
2060
+ """
2061
+ import numpy as np
2062
+ from collections import defaultdict
2063
+
2064
+ nodes = mesh["nodes"]
2065
+ elements = mesh["elements"]
2066
+
2067
+ # Find duplicate nodes (nodes at same location)
2068
+ duplicate_groups = []
2069
+ used_indices = set()
2070
+
2071
+ for i in range(len(nodes)):
2072
+ if i in used_indices:
2073
+ continue
2074
+
2075
+ duplicates = [i]
2076
+ for j in range(i + 1, len(nodes)):
2077
+ if j in used_indices:
2078
+ continue
2079
+
2080
+ if np.linalg.norm(nodes[i] - nodes[j]) < tolerance:
2081
+ duplicates.append(j)
2082
+ used_indices.add(j)
2083
+
2084
+ if len(duplicates) > 1:
2085
+ duplicate_groups.append(duplicates)
2086
+ used_indices.add(i)
2087
+
2088
+ # Check element connectivity
2089
+ element_connectivity = defaultdict(set)
2090
+ for elem_idx, element in enumerate(elements):
2091
+ for node_idx in element:
2092
+ element_connectivity[node_idx].add(elem_idx)
2093
+
2094
+ # Find isolated nodes (nodes not used by any element)
2095
+ isolated_nodes = []
2096
+ for i in range(len(nodes)):
2097
+ if i not in element_connectivity:
2098
+ isolated_nodes.append(i)
2099
+
2100
+ # Find elements with duplicate nodes
2101
+ elements_with_duplicates = []
2102
+ for elem_idx, element in enumerate(elements):
2103
+ unique_nodes = set(element)
2104
+ if len(unique_nodes) != len(element):
2105
+ elements_with_duplicates.append(elem_idx)
2106
+
2107
+ results = {
2108
+ "total_nodes": len(nodes),
2109
+ "total_elements": len(elements),
2110
+ "duplicate_node_groups": duplicate_groups,
2111
+ "isolated_nodes": isolated_nodes,
2112
+ "elements_with_duplicates": elements_with_duplicates,
2113
+ "is_connected": len(duplicate_groups) == 0 and len(isolated_nodes) == 0
2114
+ }
2115
+
2116
+ return results
2117
+
2118
+ def print_mesh_connectivity_report(mesh, tolerance=1e-8):
2119
+ """
2120
+ Print a detailed report about mesh connectivity.
2121
+
2122
+ Parameters:
2123
+ mesh: Mesh dictionary
2124
+ tolerance: Tolerance for considering nodes as duplicates
2125
+ """
2126
+ results = verify_mesh_connectivity(mesh, tolerance)
2127
+
2128
+ print("=== MESH CONNECTIVITY REPORT ===")
2129
+ print(f"Total nodes: {results['total_nodes']}")
2130
+ print(f"Total elements: {results['total_elements']}")
2131
+ print(f"Mesh is properly connected: {results['is_connected']}")
2132
+ print()
2133
+
2134
+ if results['duplicate_node_groups']:
2135
+ print(f"WARNING: Found {len(results['duplicate_node_groups'])} groups of duplicate nodes:")
2136
+ for i, group in enumerate(results['duplicate_node_groups']):
2137
+ print(f" Group {i+1}: Nodes {group} at position {mesh['nodes'][group[0]]}")
2138
+ print()
2139
+
2140
+ if results['isolated_nodes']:
2141
+ print(f"WARNING: Found {len(results['isolated_nodes'])} isolated nodes:")
2142
+ for node_idx in results['isolated_nodes']:
2143
+ print(f" Node {node_idx} at position {mesh['nodes'][node_idx]}")
2144
+ print()
2145
+
2146
+ if results['elements_with_duplicates']:
2147
+ print(f"WARNING: Found {len(results['elements_with_duplicates'])} elements with duplicate nodes:")
2148
+ for elem_idx in results['elements_with_duplicates']:
2149
+ print(f" Element {elem_idx}: {mesh['elements'][elem_idx]}")
2150
+ print()
2151
+
2152
+ if results['is_connected']:
2153
+ print("✓ Mesh connectivity is good - no duplicate nodes or isolated nodes found.")
2154
+ else:
2155
+ print("✗ Mesh connectivity issues detected. Consider regenerating the mesh.")
2156
+
2157
+ def find_element_containing_point(nodes, elements, element_types, point):
2158
+ """
2159
+ Find which element contains the given point using spatial indexing for efficiency.
2160
+
2161
+ Parameters:
2162
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
2163
+ elements: np.ndarray of element vertex indices (n_elements, 9) - unused nodes set to 0
2164
+ element_types: np.ndarray indicating element type (3, 4, 6, 8, or 9 nodes)
2165
+ point: tuple (x, y) coordinates of the point to find
2166
+
2167
+ Returns:
2168
+ int: Index of the element containing the point, or -1 if not found
2169
+ """
2170
+ x, y = point
2171
+
2172
+ # Use spatial indexing to find candidate elements quickly
2173
+ # Build spatial hash grid if not already built
2174
+ if not hasattr(find_element_containing_point, '_spatial_grid'):
2175
+ find_element_containing_point._spatial_grid = _build_spatial_grid(nodes, elements, element_types)
2176
+
2177
+ spatial_grid = find_element_containing_point._spatial_grid
2178
+
2179
+ # Find grid cell containing the point
2180
+ grid_x = int((x - spatial_grid['x_min']) / spatial_grid['cell_size'])
2181
+ grid_y = int((y - spatial_grid['y_min']) / spatial_grid['cell_size'])
2182
+
2183
+ # Get candidate elements from this cell and neighboring cells
2184
+ candidate_elements = set()
2185
+ for dx in [-1, 0, 1]:
2186
+ for dy in [-1, 0, 1]:
2187
+ cell_key = (grid_x + dx, grid_y + dy)
2188
+ if cell_key in spatial_grid['cells']:
2189
+ candidate_elements.update(spatial_grid['cells'][cell_key])
2190
+
2191
+ # Check only the candidate elements
2192
+ for elem_idx in candidate_elements:
2193
+ element = elements[elem_idx]
2194
+ elem_type = element_types[elem_idx]
2195
+
2196
+ if elem_type in [3, 6]: # Triangle (linear or quadratic)
2197
+ # For point-in-element testing, use only corner nodes
2198
+ x1, y1 = nodes[element[0]]
2199
+ x2, y2 = nodes[element[1]]
2200
+ x3, y3 = nodes[element[2]]
2201
+
2202
+ # Calculate barycentric coordinates
2203
+ det = (y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3)
2204
+ if abs(det) < 1e-12: # Degenerate triangle
2205
+ continue
2206
+
2207
+ lambda1 = ((y2 - y3) * (x - x3) + (x3 - x2) * (y - y3)) / det
2208
+ lambda2 = ((y3 - y1) * (x - x3) + (x1 - x3) * (y - y3)) / det
2209
+ lambda3 = 1.0 - lambda1 - lambda2
2210
+
2211
+ # Check if point is inside triangle (all barycentric coordinates >= 0)
2212
+ if lambda1 >= -1e-12 and lambda2 >= -1e-12 and lambda3 >= -1e-12:
2213
+ return elem_idx
2214
+
2215
+ elif elem_type in [4, 8, 9]: # Quadrilateral (linear or quadratic)
2216
+ # For point-in-element testing, use only corner nodes
2217
+ x1, y1 = nodes[element[0]]
2218
+ x2, y2 = nodes[element[1]]
2219
+ x3, y3 = nodes[element[2]]
2220
+ x4, y4 = nodes[element[3]]
2221
+
2222
+ # Use point-in-polygon test for quadrilaterals
2223
+ # Check if point is inside by counting crossings
2224
+ vertices = [(x1, y1), (x2, y2), (x3, y3), (x4, y4)]
2225
+ inside = False
2226
+
2227
+ for j in range(len(vertices)):
2228
+ xi, yi = vertices[j]
2229
+ xj, yj = vertices[(j + 1) % len(vertices)]
2230
+
2231
+ if ((yi > y) != (yj > y)) and (x < (xj - xi) * (y - yi) / (yj - yi) + xi):
2232
+ inside = not inside
2233
+
2234
+ if inside:
2235
+ return elem_idx
2236
+
2237
+ return -1 # Point not found in any element
2238
+
2239
+
2240
+ def _build_spatial_grid(nodes, elements, element_types):
2241
+ """
2242
+ Build a spatial hash grid for efficient element searching.
2243
+
2244
+ Parameters:
2245
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
2246
+ elements: np.ndarray of element vertex indices (n_elements, 8)
2247
+ element_types: np.ndarray indicating element type (3, 4, 6, or 8 nodes)
2248
+
2249
+ Returns:
2250
+ dict: Spatial grid data structure
2251
+ """
2252
+ # Calculate bounding box
2253
+ x_coords = nodes[:, 0]
2254
+ y_coords = nodes[:, 1]
2255
+ x_min, x_max = x_coords.min(), x_coords.max()
2256
+ y_min, y_max = y_coords.min(), y_coords.max()
2257
+
2258
+ # Determine optimal cell size based on average element size
2259
+ total_area = 0
2260
+ for i, (element, elem_type) in enumerate(zip(elements, element_types)):
2261
+ if elem_type in [3, 6]: # Triangle
2262
+ x1, y1 = nodes[element[0]]
2263
+ x2, y2 = nodes[element[1]]
2264
+ x3, y3 = nodes[element[2]]
2265
+ area = 0.5 * abs((x2 - x1) * (y3 - y1) - (x3 - x1) * (y2 - y1))
2266
+ else: # Quadrilateral (4 or 8 nodes)
2267
+ x1, y1 = nodes[element[0]]
2268
+ x2, y2 = nodes[element[1]]
2269
+ x3, y3 = nodes[element[2]]
2270
+ x4, y4 = nodes[element[3]]
2271
+ area = 0.5 * abs((x2 - x1) * (y4 - y1) - (x4 - x1) * (y2 - y1))
2272
+ total_area += area
2273
+
2274
+ avg_element_area = total_area / len(elements)
2275
+ # Cell size should be roughly 2-3 times the square root of average element area
2276
+ cell_size = max(0.1, 2.5 * np.sqrt(avg_element_area))
2277
+
2278
+ # Build grid
2279
+ grid = {
2280
+ 'x_min': x_min,
2281
+ 'y_min': y_min,
2282
+ 'cell_size': cell_size,
2283
+ 'cells': {}
2284
+ }
2285
+
2286
+ # Assign elements to grid cells
2287
+ for elem_idx, (element, elem_type) in enumerate(zip(elements, element_types)):
2288
+ # Calculate element bounding box
2289
+ if elem_type in [3, 6]: # Triangle
2290
+ x_coords = [nodes[element[0]][0], nodes[element[1]][0], nodes[element[2]][0]]
2291
+ y_coords = [nodes[element[0]][1], nodes[element[1]][1], nodes[element[2]][1]]
2292
+ else: # Quadrilateral (4 or 8 nodes)
2293
+ x_coords = [nodes[element[0]][0], nodes[element[1]][0], nodes[element[2]][0], nodes[element[3]][0]]
2294
+ y_coords = [nodes[element[0]][1], nodes[element[1]][1], nodes[element[2]][1], nodes[element[3]][1]]
2295
+
2296
+ elem_x_min, elem_x_max = min(x_coords), max(x_coords)
2297
+ elem_y_min, elem_y_max = min(y_coords), max(y_coords)
2298
+
2299
+ # Find grid cells that overlap with this element
2300
+ start_x = int((elem_x_min - x_min) / cell_size)
2301
+ end_x = int((elem_x_max - x_min) / cell_size) + 1
2302
+ start_y = int((elem_y_min - y_min) / cell_size)
2303
+ end_y = int((elem_y_max - y_min) / cell_size) + 1
2304
+
2305
+ # Add element to all overlapping cells
2306
+ for grid_x in range(start_x, end_x + 1):
2307
+ for grid_y in range(start_y, end_y + 1):
2308
+ cell_key = (grid_x, grid_y)
2309
+ if cell_key not in grid['cells']:
2310
+ grid['cells'][cell_key] = set()
2311
+ grid['cells'][cell_key].add(elem_idx)
2312
+
2313
+ return grid
2314
+
2315
+
2316
+ def interpolate_at_point(nodes, elements, element_types, values, point):
2317
+ """
2318
+ Interpolate values at a given point using the mesh.
2319
+
2320
+ Parameters:
2321
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
2322
+ elements: np.ndarray of element vertex indices (n_elements, 8)
2323
+ element_types: np.ndarray indicating element type (3, 4, 6, or 8 nodes)
2324
+ values: np.ndarray of values at nodes (n_nodes,)
2325
+ point: tuple (x, y) coordinates of the point to interpolate at
2326
+
2327
+ Returns:
2328
+ float: Interpolated value at the point, or 0.0 if point not found
2329
+ """
2330
+ # Find the element containing the point
2331
+ element_idx = find_element_containing_point(nodes, elements, element_types, point)
2332
+
2333
+ if element_idx == -1:
2334
+ return 0.0 # Point not found in any element
2335
+
2336
+ element = elements[element_idx]
2337
+ elem_type = element_types[element_idx]
2338
+ x, y = point
2339
+
2340
+ if elem_type == 3: # Linear triangle
2341
+ # Get triangle vertices and values
2342
+ x1, y1 = nodes[element[0]]
2343
+ x2, y2 = nodes[element[1]]
2344
+ x3, y3 = nodes[element[2]]
2345
+ v1 = values[element[0]]
2346
+ v2 = values[element[1]]
2347
+ v3 = values[element[2]]
2348
+
2349
+ # Calculate barycentric coordinates
2350
+ det = (y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3)
2351
+ lambda1 = ((y2 - y3) * (x - x3) + (x3 - x2) * (y - y3)) / det
2352
+ lambda2 = ((y3 - y1) * (x - x3) + (x1 - x3) * (y - y3)) / det
2353
+ lambda3 = 1.0 - lambda1 - lambda2
2354
+
2355
+ # Interpolate using barycentric coordinates
2356
+ interpolated_value = lambda1 * v1 + lambda2 * v2 + lambda3 * v3
2357
+
2358
+ elif elem_type == 6: # Quadratic triangle
2359
+ # Get all 6 nodes: corners (0,1,2) and midpoints (3,4,5)
2360
+ # Node ordering: 0-1-2 corners, 3 midpoint of 0-1, 4 midpoint of 1-2, 5 midpoint of 2-0
2361
+ corner_nodes = [element[0], element[1], element[2]]
2362
+ midpoint_nodes = [element[3], element[4], element[5]]
2363
+
2364
+ # Get coordinates
2365
+ x1, y1 = nodes[corner_nodes[0]] # Node 0
2366
+ x2, y2 = nodes[corner_nodes[1]] # Node 1
2367
+ x3, y3 = nodes[corner_nodes[2]] # Node 2
2368
+
2369
+ # Calculate barycentric coordinates (L1, L2, L3)
2370
+ det = (y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3)
2371
+ L1 = ((y2 - y3) * (x - x3) + (x3 - x2) * (y - y3)) / det
2372
+ L2 = ((y3 - y1) * (x - x3) + (x1 - x3) * (y - y3)) / det
2373
+ L3 = 1.0 - L1 - L2
2374
+
2375
+ # Quadratic shape functions for 6-node triangle
2376
+ N = np.zeros(6)
2377
+ N[0] = L1 * (2*L1 - 1) # Corner node 0
2378
+ N[1] = L2 * (2*L2 - 1) # Corner node 1
2379
+ N[2] = L3 * (2*L3 - 1) # Corner node 2
2380
+ N[3] = 4 * L1 * L2 # Midpoint node 0-1
2381
+ N[4] = 4 * L2 * L3 # Midpoint node 1-2
2382
+ N[5] = 4 * L3 * L1 # Midpoint node 2-0
2383
+
2384
+ # Interpolate using quadratic shape functions
2385
+ interpolated_value = 0.0
2386
+ for i in range(6):
2387
+ interpolated_value += N[i] * values[element[i]]
2388
+
2389
+ elif elem_type == 4: # Linear quadrilateral
2390
+ # Get quadrilateral vertices and values
2391
+ x1, y1 = nodes[element[0]]
2392
+ x2, y2 = nodes[element[1]]
2393
+ x3, y3 = nodes[element[2]]
2394
+ x4, y4 = nodes[element[3]]
2395
+ v1 = values[element[0]]
2396
+ v2 = values[element[1]]
2397
+ v3 = values[element[2]]
2398
+ v4 = values[element[3]]
2399
+
2400
+ # Use proper bilinear shape functions for quadrilaterals
2401
+ # Map to natural coordinates (xi, eta) in [-1, 1] x [-1, 1]
2402
+
2403
+ # For bilinear quad4, use iterative Newton-Raphson to find natural coordinates
2404
+ # Initial guess at element center
2405
+ xi, eta = 0.0, 0.0
2406
+
2407
+ # Newton-Raphson iteration to find (xi, eta) such that physical coordinates match
2408
+ for _ in range(10): # Max 10 iterations
2409
+ # Bilinear shape functions
2410
+ N = np.array([
2411
+ 0.25 * (1-xi) * (1-eta), # Node 0
2412
+ 0.25 * (1+xi) * (1-eta), # Node 1
2413
+ 0.25 * (1+xi) * (1+eta), # Node 2
2414
+ 0.25 * (1-xi) * (1+eta) # Node 3
2415
+ ])
2416
+
2417
+ # Shape function derivatives
2418
+ dN_dxi = np.array([
2419
+ -0.25 * (1-eta), # Node 0
2420
+ 0.25 * (1-eta), # Node 1
2421
+ 0.25 * (1+eta), # Node 2
2422
+ -0.25 * (1+eta) # Node 3
2423
+ ])
2424
+
2425
+ dN_deta = np.array([
2426
+ -0.25 * (1-xi), # Node 0
2427
+ -0.25 * (1+xi), # Node 1
2428
+ 0.25 * (1+xi), # Node 2
2429
+ 0.25 * (1-xi) # Node 3
2430
+ ])
2431
+
2432
+ # Current physical coordinates
2433
+ x_curr = N[0]*x1 + N[1]*x2 + N[2]*x3 + N[3]*x4
2434
+ y_curr = N[0]*y1 + N[1]*y2 + N[2]*y3 + N[3]*y4
2435
+
2436
+ # Residual
2437
+ fx = x_curr - x
2438
+ fy = y_curr - y
2439
+
2440
+ if abs(fx) < 1e-10 and abs(fy) < 1e-10:
2441
+ break
2442
+
2443
+ # Jacobian
2444
+ dx_dxi = dN_dxi[0]*x1 + dN_dxi[1]*x2 + dN_dxi[2]*x3 + dN_dxi[3]*x4
2445
+ dx_deta = dN_deta[0]*x1 + dN_deta[1]*x2 + dN_deta[2]*x3 + dN_deta[3]*x4
2446
+ dy_dxi = dN_dxi[0]*y1 + dN_dxi[1]*y2 + dN_dxi[2]*y3 + dN_dxi[3]*y4
2447
+ dy_deta = dN_deta[0]*y1 + dN_deta[1]*y2 + dN_deta[2]*y3 + dN_deta[3]*y4
2448
+
2449
+ det_J = dx_dxi * dy_deta - dx_deta * dy_dxi
2450
+ if abs(det_J) < 1e-12:
2451
+ break
2452
+
2453
+ # Newton-Raphson update
2454
+ dxi = (dy_deta * fx - dx_deta * fy) / det_J
2455
+ deta = (-dy_dxi * fx + dx_dxi * fy) / det_J
2456
+
2457
+ xi -= dxi
2458
+ eta -= deta
2459
+
2460
+ # Clamp to [-1,1]
2461
+ xi = max(-1, min(1, xi))
2462
+ eta = max(-1, min(1, eta))
2463
+
2464
+ # Final bilinear shape functions
2465
+ N = np.array([
2466
+ 0.25 * (1-xi) * (1-eta), # Node 0
2467
+ 0.25 * (1+xi) * (1-eta), # Node 1
2468
+ 0.25 * (1+xi) * (1+eta), # Node 2
2469
+ 0.25 * (1-xi) * (1+eta) # Node 3
2470
+ ])
2471
+
2472
+ # Interpolate using bilinear shape functions
2473
+ interpolated_value = N[0]*v1 + N[1]*v2 + N[2]*v3 + N[3]*v4
2474
+
2475
+ elif elem_type == 8: # Quadratic quadrilateral
2476
+ # Get all 8 nodes: corners (0,1,2,3) and midpoints (4,5,6,7)
2477
+ # Node ordering: 0-1-2-3 corners, 4 midpoint of 0-1, 5 midpoint of 1-2,
2478
+ # 6 midpoint of 2-3, 7 midpoint of 3-0
2479
+
2480
+ # Get corner coordinates for mapping to natural coordinates
2481
+ x1, y1 = nodes[element[0]] # Node 0
2482
+ x2, y2 = nodes[element[1]] # Node 1
2483
+ x3, y3 = nodes[element[2]] # Node 2
2484
+ x4, y4 = nodes[element[3]] # Node 3
2485
+
2486
+ # For quadratic quads, we need to map from physical (x,y) to natural coordinates (xi,eta)
2487
+ # This is complex for general quadrilaterals, so use simplified approach:
2488
+ # Map to unit square [-1,1] x [-1,1] using bilinear mapping of corners
2489
+
2490
+ # Bilinear inverse mapping (approximate for general quads)
2491
+ # Solve for natural coordinates xi, eta in [-1,1] x [-1,1]
2492
+
2493
+ # For simplicity, use area coordinate method similar to linear quad
2494
+ # but with quadratic shape functions
2495
+
2496
+ # Calculate area coordinates (this is an approximation)
2497
+ A_total = 0.5 * abs((x3-x1)*(y4-y2) - (x4-x2)*(y3-y1))
2498
+ if A_total < 1e-12:
2499
+ # Degenerate element, fall back to linear
2500
+ A1 = abs((x - x1) * (y2 - y1) - (x2 - x1) * (y - y1)) / 2
2501
+ A2 = abs((x - x2) * (y3 - y2) - (x3 - x2) * (y - y2)) / 2
2502
+ A3 = abs((x - x3) * (y4 - y3) - (x4 - x3) * (y - y3)) / 2
2503
+ A4 = abs((x - x4) * (y1 - y4) - (x1 - x4) * (y - y4)) / 2
2504
+ A_sum = A1 + A2 + A3 + A4
2505
+ if A_sum > 1e-12:
2506
+ w1, w2, w3, w4 = A1/A_sum, A2/A_sum, A3/A_sum, A4/A_sum
2507
+ else:
2508
+ w1 = w2 = w3 = w4 = 0.25
2509
+
2510
+ # Linear interpolation as fallback
2511
+ interpolated_value = (w1 * values[element[0]] + w2 * values[element[1]] +
2512
+ w3 * values[element[2]] + w4 * values[element[3]])
2513
+ else:
2514
+ # For proper quadratic interpolation, we need natural coordinates
2515
+ # This is a simplified implementation - full implementation would solve
2516
+ # the nonlinear system for xi,eta
2517
+
2518
+ # Use parametric coordinates estimation
2519
+ # Map point to approximate natural coordinates
2520
+ xi_approx = 2 * (x - 0.5*(x1+x3)) / (x2+x3-x1-x4) if abs(x2+x3-x1-x4) > 1e-12 else 0
2521
+ eta_approx = 2 * (y - 0.5*(y1+y3)) / (y2+y4-y1-y3) if abs(y2+y4-y1-y3) > 1e-12 else 0
2522
+
2523
+ # Clamp to [-1,1]
2524
+ xi = max(-1, min(1, xi_approx))
2525
+ eta = max(-1, min(1, eta_approx))
2526
+
2527
+ # Quadratic shape functions for 8-node quad in natural coordinates
2528
+ N = np.zeros(8)
2529
+ # Corner nodes
2530
+ N[0] = 0.25 * (1-xi) * (1-eta) * (-xi-eta-1) # Node 0
2531
+ N[1] = 0.25 * (1+xi) * (1-eta) * (xi-eta-1) # Node 1
2532
+ N[2] = 0.25 * (1+xi) * (1+eta) * (xi+eta-1) # Node 2
2533
+ N[3] = 0.25 * (1-xi) * (1+eta) * (-xi+eta-1) # Node 3
2534
+ # Midpoint nodes
2535
+ N[4] = 0.5 * (1-xi*xi) * (1-eta) # Node 4 (midpoint 0-1)
2536
+ N[5] = 0.5 * (1+xi) * (1-eta*eta) # Node 5 (midpoint 1-2)
2537
+ N[6] = 0.5 * (1-xi*xi) * (1+eta) # Node 6 (midpoint 2-3)
2538
+ N[7] = 0.5 * (1-xi) * (1-eta*eta) # Node 7 (midpoint 3-0)
2539
+
2540
+ # Interpolate using quadratic shape functions
2541
+ interpolated_value = 0.0
2542
+ for i in range(8):
2543
+ interpolated_value += N[i] * values[element[i]]
2544
+
2545
+ elif elem_type == 9: # Biquadratic quadrilateral (9-node Lagrange)
2546
+ # Get all 9 nodes: corners (0,1,2,3), edges (4,5,6,7), and center (8)
2547
+ # Node ordering: 0-1-2-3 corners, 4 midpoint of 0-1, 5 midpoint of 1-2,
2548
+ # 6 midpoint of 2-3, 7 midpoint of 3-0, 8 center
2549
+
2550
+ # Get corner coordinates for mapping to natural coordinates
2551
+ x1, y1 = nodes[element[0]] # Node 0
2552
+ x2, y2 = nodes[element[1]] # Node 1
2553
+ x3, y3 = nodes[element[2]] # Node 2
2554
+ x4, y4 = nodes[element[3]] # Node 3
2555
+
2556
+ # Newton-Raphson iteration to find natural coordinates (xi, eta)
2557
+ xi, eta = 0.0, 0.0 # Initial guess at element center
2558
+
2559
+ for _ in range(10): # Max 10 iterations
2560
+ # Biquadratic Lagrange shape functions for all 9 nodes
2561
+ N = np.zeros(9)
2562
+ # Corner nodes
2563
+ N[0] = 0.25 * xi * (xi-1) * eta * (eta-1) # Node 0: (-1,-1)
2564
+ N[1] = 0.25 * xi * (xi+1) * eta * (eta-1) # Node 1: (1,-1)
2565
+ N[2] = 0.25 * xi * (xi+1) * eta * (eta+1) # Node 2: (1,1)
2566
+ N[3] = 0.25 * xi * (xi-1) * eta * (eta+1) # Node 3: (-1,1)
2567
+ # Edge nodes
2568
+ N[4] = 0.5 * (1-xi*xi) * eta * (eta-1) # Node 4: (0,-1)
2569
+ N[5] = 0.5 * xi * (xi+1) * (1-eta*eta) # Node 5: (1,0)
2570
+ N[6] = 0.5 * (1-xi*xi) * eta * (eta+1) # Node 6: (0,1)
2571
+ N[7] = 0.5 * xi * (xi-1) * (1-eta*eta) # Node 7: (-1,0)
2572
+ # Center node
2573
+ N[8] = (1-xi*xi) * (1-eta*eta) # Node 8: (0,0)
2574
+
2575
+ # Shape function derivatives w.r.t. xi
2576
+ dN_dxi = np.zeros(9)
2577
+ dN_dxi[0] = 0.25 * (2*xi-1) * eta * (eta-1)
2578
+ dN_dxi[1] = 0.25 * (2*xi+1) * eta * (eta-1)
2579
+ dN_dxi[2] = 0.25 * (2*xi+1) * eta * (eta+1)
2580
+ dN_dxi[3] = 0.25 * (2*xi-1) * eta * (eta+1)
2581
+ dN_dxi[4] = -xi * eta * (eta-1)
2582
+ dN_dxi[5] = 0.5 * (2*xi+1) * (1-eta*eta)
2583
+ dN_dxi[6] = -xi * eta * (eta+1)
2584
+ dN_dxi[7] = 0.5 * (2*xi-1) * (1-eta*eta)
2585
+ dN_dxi[8] = -2*xi * (1-eta*eta)
2586
+
2587
+ # Shape function derivatives w.r.t. eta
2588
+ dN_deta = np.zeros(9)
2589
+ dN_deta[0] = 0.25 * xi * (xi-1) * (2*eta-1)
2590
+ dN_deta[1] = 0.25 * xi * (xi+1) * (2*eta-1)
2591
+ dN_deta[2] = 0.25 * xi * (xi+1) * (2*eta+1)
2592
+ dN_deta[3] = 0.25 * xi * (xi-1) * (2*eta+1)
2593
+ dN_deta[4] = 0.5 * (1-xi*xi) * (2*eta-1)
2594
+ dN_deta[5] = -eta * xi * (xi+1)
2595
+ dN_deta[6] = 0.5 * (1-xi*xi) * (2*eta+1)
2596
+ dN_deta[7] = -eta * xi * (xi-1)
2597
+ dN_deta[8] = -2*eta * (1-xi*xi)
2598
+
2599
+ # Current physical coordinates using all 9 nodes
2600
+ node_coords = nodes[element[:9]]
2601
+ x_curr = np.sum(N * node_coords[:, 0])
2602
+ y_curr = np.sum(N * node_coords[:, 1])
2603
+
2604
+ # Residual
2605
+ fx = x_curr - x
2606
+ fy = y_curr - y
2607
+
2608
+ if abs(fx) < 1e-10 and abs(fy) < 1e-10:
2609
+ break
2610
+
2611
+ # Jacobian
2612
+ dx_dxi = np.sum(dN_dxi * node_coords[:, 0])
2613
+ dx_deta = np.sum(dN_deta * node_coords[:, 0])
2614
+ dy_dxi = np.sum(dN_dxi * node_coords[:, 1])
2615
+ dy_deta = np.sum(dN_deta * node_coords[:, 1])
2616
+
2617
+ det_J = dx_dxi * dy_deta - dx_deta * dy_dxi
2618
+ if abs(det_J) < 1e-12:
2619
+ break
2620
+
2621
+ # Newton-Raphson update
2622
+ dxi = (dy_deta * fx - dx_deta * fy) / det_J
2623
+ deta = (-dy_dxi * fx + dx_dxi * fy) / det_J
2624
+
2625
+ xi -= dxi
2626
+ eta -= deta
2627
+
2628
+ # Clamp to [-1,1]
2629
+ xi = max(-1, min(1, xi))
2630
+ eta = max(-1, min(1, eta))
2631
+
2632
+ # Final biquadratic shape functions
2633
+ N = np.zeros(9)
2634
+ N[0] = 0.25 * xi * (xi-1) * eta * (eta-1) # Node 0
2635
+ N[1] = 0.25 * xi * (xi+1) * eta * (eta-1) # Node 1
2636
+ N[2] = 0.25 * xi * (xi+1) * eta * (eta+1) # Node 2
2637
+ N[3] = 0.25 * xi * (xi-1) * eta * (eta+1) # Node 3
2638
+ N[4] = 0.5 * (1-xi*xi) * eta * (eta-1) # Node 4
2639
+ N[5] = 0.5 * xi * (xi+1) * (1-eta*eta) # Node 5
2640
+ N[6] = 0.5 * (1-xi*xi) * eta * (eta+1) # Node 6
2641
+ N[7] = 0.5 * xi * (xi-1) * (1-eta*eta) # Node 7
2642
+ N[8] = (1-xi*xi) * (1-eta*eta) # Node 8
2643
+
2644
+ # Interpolate using biquadratic shape functions
2645
+ interpolated_value = 0.0
2646
+ for i in range(9):
2647
+ interpolated_value += N[i] * values[element[i]]
2648
+
2649
+ else:
2650
+ return 0.0 # Unknown element type
2651
+
2652
+ # Return zero if interpolated value is negative (pore pressure cannot be negative)
2653
+ return max(0.0, interpolated_value)
2654
+
2655
+
2656
+ def test_1d_element_alignment(mesh, reinforcement_lines, tolerance=1e-6, debug=True):
2657
+ """
2658
+ Test that 1D elements correctly align with reinforcement lines.
2659
+
2660
+ This function verifies that:
2661
+ 1. Each reinforcement line is represented by a sequence of 1D elements
2662
+ 2. The 1D elements form continuous paths along each reinforcement line
2663
+ 3. The element endpoints match the expected line segment endpoints
2664
+
2665
+ Parameters:
2666
+ mesh: Dictionary containing nodes and 1D element data
2667
+ reinforcement_lines: List of reinforcement lines, each containing coordinate tuples
2668
+ tolerance: Tolerance for coordinate comparison (default 1e-6)
2669
+ debug: Enable detailed debug output
2670
+
2671
+ Returns:
2672
+ bool: True if all tests pass, False otherwise
2673
+ """
2674
+ if debug:
2675
+ print("\n=== Testing 1D Element Alignment ===")
2676
+
2677
+ if 'elements_1d' not in mesh:
2678
+ print("ERROR: No 1D elements found in mesh")
2679
+ return False
2680
+
2681
+ elements_1d = mesh['elements_1d']
2682
+ if elements_1d is None or len(elements_1d) == 0:
2683
+ print("ERROR: No 1D elements found in mesh")
2684
+ return False
2685
+
2686
+ nodes = np.array(mesh['nodes'])
2687
+ elements_1d = mesh['elements_1d']
2688
+
2689
+ if debug:
2690
+ print(f"Testing {len(reinforcement_lines)} reinforcement lines")
2691
+ print(f"Found {len(elements_1d)} 1D elements")
2692
+
2693
+ success = True
2694
+
2695
+ for line_idx, line_pts in enumerate(reinforcement_lines):
2696
+ if debug:
2697
+ print(f"\nTesting line {line_idx}: {line_pts}")
2698
+
2699
+ # Remove duplicate endpoints and get expected segments
2700
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
2701
+ if len(line_pts_clean) < 2:
2702
+ if debug:
2703
+ print(f" Skipping line {line_idx}: insufficient points")
2704
+ continue
2705
+
2706
+ # Expected segments for this line
2707
+ expected_segments = []
2708
+ for i in range(len(line_pts_clean) - 1):
2709
+ expected_segments.append((line_pts_clean[i], line_pts_clean[i + 1]))
2710
+
2711
+ if debug:
2712
+ print(f" Expected {len(expected_segments)} segments:")
2713
+ for i, (start, end) in enumerate(expected_segments):
2714
+ print(f" Segment {i}: {start} -> {end}")
2715
+
2716
+ # Find 1D elements that belong to this reinforcement line using material IDs
2717
+ line_elements = []
2718
+ if 'element_materials_1d' in mesh:
2719
+ element_materials_1d = mesh['element_materials_1d']
2720
+ for elem_idx, (element, material_id) in enumerate(zip(elements_1d, element_materials_1d)):
2721
+ # Skip zero-padded elements
2722
+ if len(element) < 2 or element[1] == 0:
2723
+ continue
2724
+
2725
+ # Check if this element belongs to the current line
2726
+ if material_id == line_idx + 1: # Material IDs are 1-based
2727
+ # Get element coordinates
2728
+ try:
2729
+ coord1 = nodes[element[0]]
2730
+ coord2 = nodes[element[1]]
2731
+ except IndexError:
2732
+ if debug:
2733
+ print(f" WARNING: Element {elem_idx} has invalid node indices {element[0]}, {element[1]}")
2734
+ continue
2735
+
2736
+ line_elements.append((elem_idx, coord1, coord2))
2737
+ else:
2738
+ # Fallback: use the old method if material IDs are not available
2739
+ for elem_idx, element in enumerate(elements_1d):
2740
+ # Skip zero-padded elements
2741
+ if len(element) < 2 or element[1] == 0:
2742
+ continue
2743
+
2744
+ # Get element coordinates
2745
+ try:
2746
+ coord1 = nodes[element[0]]
2747
+ coord2 = nodes[element[1]]
2748
+ except IndexError:
2749
+ if debug:
2750
+ print(f" WARNING: Element {elem_idx} has invalid node indices {element[0]}, {element[1]}")
2751
+ continue
2752
+
2753
+ # Check if this element lies on the current reinforcement line
2754
+ if is_edge_on_reinforcement_line(coord1, coord2, line_pts_clean, tolerance):
2755
+ line_elements.append((elem_idx, coord1, coord2))
2756
+
2757
+ if debug:
2758
+ print(f" Found {len(line_elements)} 1D elements on this line:")
2759
+ for elem_idx, coord1, coord2 in line_elements:
2760
+ print(f" Element {elem_idx}: {coord1} -> {coord2}")
2761
+
2762
+ # Test 1: Check that we have at least some 1D elements for this line
2763
+ if len(line_elements) == 0:
2764
+ print(f"ERROR: Line {line_idx} has no 1D elements")
2765
+ success = False
2766
+ continue
2767
+
2768
+ # Test 2: Check that we have reasonable number of elements
2769
+ # After intersection preprocessing, we may have more elements than original segments
2770
+ # But we should have at least some elements for each line
2771
+ if len(line_elements) == 0:
2772
+ print(f"ERROR: Line {line_idx} has no 1D elements")
2773
+ success = False
2774
+ continue
2775
+
2776
+ # Test 2: Check if elements form continuous path
2777
+ if len(line_elements) > 1:
2778
+ # Sort elements to form continuous sequence
2779
+ sorted_elements = []
2780
+ remaining_elements = line_elements.copy()
2781
+
2782
+ # Start with first element
2783
+ current_elem = remaining_elements.pop(0)
2784
+ sorted_elements.append(current_elem)
2785
+
2786
+ # Build chain by finding connecting elements
2787
+ while remaining_elements:
2788
+ last_coord = sorted_elements[-1][2] # End coordinate of last element
2789
+
2790
+ # Find next element that starts where last one ended
2791
+ found_next = False
2792
+ for i, (elem_idx, coord1, coord2) in enumerate(remaining_elements):
2793
+ if np.linalg.norm(np.array(coord1) - np.array(last_coord)) < tolerance:
2794
+ sorted_elements.append((elem_idx, coord1, coord2))
2795
+ remaining_elements.pop(i)
2796
+ found_next = True
2797
+ break
2798
+ elif np.linalg.norm(np.array(coord2) - np.array(last_coord)) < tolerance:
2799
+ # Element is reversed, flip it
2800
+ sorted_elements.append((elem_idx, coord2, coord1))
2801
+ remaining_elements.pop(i)
2802
+ found_next = True
2803
+ break
2804
+
2805
+ if not found_next:
2806
+ print(f"ERROR: Line {line_idx} elements do not form continuous path")
2807
+ print(f" Cannot connect from {last_coord}")
2808
+ print(f" Remaining elements: {remaining_elements}")
2809
+ success = False
2810
+ break
2811
+
2812
+ line_elements = sorted_elements
2813
+
2814
+ # Test 3: Check that the 1D elements cover the reinforcement line from start to end
2815
+ if len(line_elements) > 0:
2816
+ # Get the start and end points of the reinforcement line
2817
+ line_start = line_pts_clean[0]
2818
+ line_end = line_pts_clean[-1]
2819
+
2820
+ # Find the first and last 1D elements
2821
+ first_elem = line_elements[0]
2822
+ last_elem = line_elements[-1]
2823
+
2824
+ # Check if the first element starts near the line start
2825
+ first_start_dist = np.linalg.norm(np.array(first_elem[1]) - np.array(line_start))
2826
+ first_end_dist = np.linalg.norm(np.array(first_elem[2]) - np.array(line_start))
2827
+
2828
+ # Check if the last element ends near the line end
2829
+ last_start_dist = np.linalg.norm(np.array(last_elem[1]) - np.array(line_end))
2830
+ last_end_dist = np.linalg.norm(np.array(last_elem[2]) - np.array(line_end))
2831
+
2832
+ # The first element should start near the line start (either direction)
2833
+ # Be more flexible due to intersection preprocessing
2834
+ if first_start_dist > tolerance * 10 and first_end_dist > tolerance * 10:
2835
+ print(f"WARNING: Line {line_idx} first element does not start at line start")
2836
+ print(f" Line start: {line_start}")
2837
+ print(f" First element: {first_elem[1]} -> {first_elem[2]}")
2838
+ print(f" Start distances: {first_start_dist:.2e}, {first_end_dist:.2e}")
2839
+ # Don't fail the test for this - just warn
2840
+
2841
+ # The last element should end near the line end (either direction)
2842
+ # Be more flexible due to intersection preprocessing
2843
+ if last_start_dist > tolerance * 10 and last_end_dist > tolerance * 10:
2844
+ print(f"WARNING: Line {line_idx} last element does not end at line end")
2845
+ print(f" Line end: {line_end}")
2846
+ print(f" Last element: {last_elem[1]} -> {last_elem[2]}")
2847
+ print(f" End distances: {last_start_dist:.2e}, {last_end_dist:.2e}")
2848
+ # Don't fail the test for this - just warn
2849
+
2850
+ # Test 4: Check that line path is continuous
2851
+ if len(line_elements) > 1:
2852
+ for i in range(len(line_elements) - 1):
2853
+ end_coord = line_elements[i][2] # End of current element
2854
+ start_coord = line_elements[i + 1][1] # Start of next element
2855
+
2856
+ gap = np.linalg.norm(np.array(end_coord) - np.array(start_coord))
2857
+ if gap > tolerance:
2858
+ print(f"ERROR: Line {line_idx} has gap between elements {i} and {i+1}")
2859
+ print(f" Gap size: {gap:.2e}")
2860
+ print(f" Element {i} end: {end_coord}")
2861
+ print(f" Element {i+1} start: {start_coord}")
2862
+ success = False
2863
+
2864
+ if debug and success:
2865
+ print(f" ✓ Line {line_idx} passes all alignment tests")
2866
+
2867
+ if debug:
2868
+ if success:
2869
+ print("\n=== All 1D Element Alignment Tests PASSED ===")
2870
+ else:
2871
+ print("\n=== 1D Element Alignment Tests FAILED ===")
2872
+
2873
+ return success
2874
+
2875
+ def add_intersection_points_to_polygons(polygons, lines, debug=False):
2876
+ """
2877
+ Add intersection points between reinforcement lines and polygon edges to the polygon vertex lists.
2878
+ This ensures that polygons have vertices at all intersection points with reinforcement lines.
2879
+
2880
+ Parameters:
2881
+ polygons: List of polygons (lists of (x,y) tuples)
2882
+ lines: List of reinforcement lines (lists of (x,y) tuples)
2883
+ debug: Enable debug output
2884
+
2885
+ Returns:
2886
+ Updated list of polygons with intersection points added
2887
+ """
2888
+ if not lines:
2889
+ return polygons
2890
+
2891
+ if debug:
2892
+ print("Adding intersection points to polygons...")
2893
+
2894
+ # Make a copy of polygons to modify
2895
+ updated_polygons = []
2896
+ for poly in polygons:
2897
+ updated_polygons.append(list(poly)) # Convert to list for modification
2898
+
2899
+ # Find all intersections
2900
+ for line_idx, line_pts in enumerate(lines):
2901
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
2902
+
2903
+ if debug:
2904
+ print(f"Processing line {line_idx}: {line_pts_clean}")
2905
+
2906
+ # Check each segment of the reinforcement line
2907
+ for i in range(len(line_pts_clean) - 1):
2908
+ line_seg_start = line_pts_clean[i]
2909
+ line_seg_end = line_pts_clean[i + 1]
2910
+
2911
+ # Check intersection with each polygon
2912
+ for poly_idx, poly in enumerate(updated_polygons):
2913
+ # Check each edge of this polygon
2914
+ for j in range(len(poly)):
2915
+ poly_edge_start = poly[j]
2916
+ poly_edge_end = poly[(j + 1) % len(poly)]
2917
+
2918
+ # Find intersection point if it exists
2919
+ intersection = line_segment_intersection(
2920
+ line_seg_start, line_seg_end,
2921
+ poly_edge_start, poly_edge_end
2922
+ )
2923
+
2924
+ if intersection:
2925
+ if debug:
2926
+ print(f"Found intersection {intersection} between line {line_idx} segment {i} and polygon {poly_idx} edge {j}")
2927
+
2928
+ # Check if intersection point is already a vertex of this polygon
2929
+ is_vertex = False
2930
+ for vertex in poly:
2931
+ if abs(vertex[0] - intersection[0]) < 1e-8 and abs(vertex[1] - intersection[1]) < 1e-8:
2932
+ is_vertex = True
2933
+ break
2934
+
2935
+ if not is_vertex:
2936
+ # Insert intersection point into polygon at the correct position
2937
+ # Insert after vertex j (which is the start of the edge)
2938
+ insert_idx = j + 1
2939
+ updated_polygons[poly_idx].insert(insert_idx, intersection)
2940
+
2941
+ if debug:
2942
+ print(f"Added intersection point {intersection} to polygon {poly_idx} at position {insert_idx}")
2943
+
2944
+ return updated_polygons
2945
+
2946
+ def extract_reinforcement_line_geometry(slope_data):
2947
+ """
2948
+ Extract reinforcement line geometry from slope_data in the format needed for mesh generation.
2949
+
2950
+ Parameters:
2951
+ slope_data: Dictionary containing slope data with 'reinforce_lines' key
2952
+
2953
+ Returns:
2954
+ List of reinforcement lines, where each line is a list of (x, y) coordinate tuples
2955
+ """
2956
+ lines = []
2957
+ if 'reinforce_lines' in slope_data and slope_data['reinforce_lines']:
2958
+ for line in slope_data['reinforce_lines']:
2959
+ # Convert from dict format to tuple format
2960
+ line_coords = [(point['X'], point['Y']) for point in line]
2961
+ lines.append(line_coords)
2962
+ return lines