xslope 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
xslope/mesh.py ADDED
@@ -0,0 +1,2719 @@
1
+ # Copyright 2025 Norman L. Jones
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import numpy as np
16
+ from scipy.sparse import coo_matrix
17
+ from scipy.sparse.csgraph import reverse_cuthill_mckee
18
+
19
+ # Lazy import gmsh - only needed for mesh generation functions
20
+ _gmsh = None
21
+ def _get_gmsh():
22
+ global _gmsh
23
+ if _gmsh is None:
24
+ try:
25
+ import gmsh
26
+ _gmsh = gmsh
27
+ except (ImportError, OSError) as e:
28
+ raise ImportError(
29
+ "gmsh is required for mesh generation but could not be imported. "
30
+ "If you only need limit equilibrium analysis, you can ignore this. "
31
+ "To use FEM features, install gmsh: pip install gmsh\n"
32
+ f"Original error: {e}"
33
+ ) from e
34
+ return _gmsh
35
+
36
+
37
+
38
+ def build_mesh_from_polygons(polygons, target_size, element_type='tri3', lines=None, debug=False, mesh_params=None, target_size_1d=None):
39
+ """
40
+ Build a finite element mesh with material regions using Gmsh.
41
+ Fixed version that properly handles shared boundaries between polygons.
42
+
43
+ Parameters:
44
+ polygons : List of lists of (x, y) tuples defining material boundaries
45
+ target_size : Desired element size
46
+ element_type : 'tri3' (3-node triangles), 'tri6' (6-node triangles),
47
+ 'quad4' (4-node quadrilaterals), 'quad8' (8-node quadrilaterals),
48
+ 'quad9' (9-node quadrilaterals)
49
+ lines : Optional list of lines, each defined by list of (x, y) tuples for 1D elements
50
+ debug : Enable debug output
51
+ mesh_params : Optional dictionary of GMSH meshing parameters to override defaults
52
+ target_size_1d : Optional target size for 1D elements (default None, which is set to target_size if None)
53
+
54
+ Returns:
55
+ mesh dict containing:
56
+ nodes : np.ndarray of node coordinates (n_nodes, 2)
57
+ elements : np.ndarray of 2D element vertex indices (n_elements, 9) - unused nodes set to 0
58
+ element_types: np.ndarray indicating number of nodes per 2D element (3, 4, 6, 8, or 9)
59
+ element_materials: np.ndarray of material ID for each 2D element
60
+
61
+ If lines is provided, also includes:
62
+ elements_1d : np.ndarray of 1D element vertex indices (n_elements_1d, 3) - unused nodes set to 0
63
+ element_types_1d: np.ndarray indicating element type (2 for linear, 3 for quadratic)
64
+ element_materials_1d: np.ndarray of material ID for each 1D element (line index)
65
+ """
66
+ gmsh = _get_gmsh()
67
+ from collections import defaultdict
68
+
69
+ # Set default target_size_1d if None
70
+ if target_size_1d is None:
71
+ target_size_1d = target_size
72
+ if debug:
73
+ print(f"Using default target_size_1d = target_size = {target_size_1d}")
74
+
75
+ # build a list of region ids (list of material IDs - one per polygon)
76
+ region_ids = [i for i in range(len(polygons))]
77
+
78
+ if element_type not in ['tri3', 'tri6', 'quad4', 'quad8', 'quad9']:
79
+ raise ValueError("element_type must be 'tri3', 'tri6', 'quad4', 'quad8', or 'quad9'")
80
+
81
+ # Determine if we need quadratic elements - but always generate linear first
82
+ quadratic = element_type in ['tri6', 'quad8', 'quad9']
83
+
84
+ # For quadratic elements, always start with linear base element
85
+ if quadratic:
86
+ if element_type == 'tri6':
87
+ base_element_type = 'tri3'
88
+ elif element_type in ['quad8', 'quad9']:
89
+ base_element_type = 'quad4'
90
+ if debug:
91
+ print(f"Quadratic element '{element_type}' requested: generating '{base_element_type}' first, then post-processing")
92
+ else:
93
+ base_element_type = element_type
94
+
95
+ # Adjust target_size for quads to compensate for recombination creating finer meshes
96
+ if element_type.startswith('quad'):
97
+ # Different adjustment factors based on meshing parameters
98
+ if mesh_params and 'size_factor' in mesh_params:
99
+ size_factor = mesh_params['size_factor']
100
+ else:
101
+ # Default size factors for different approaches
102
+ if mesh_params and mesh_params.get("Mesh.RecombinationAlgorithm") == 0:
103
+ size_factor = 1.2 # Fast algorithm needs less adjustment
104
+ elif mesh_params and mesh_params.get("Mesh.RecombineOptimizeTopology", 0) > 50:
105
+ size_factor = 1.8 # High optimization creates more elements
106
+ else:
107
+ size_factor = 1.4 # Default
108
+
109
+ adjusted_target_size = target_size * size_factor
110
+ if debug:
111
+ print(f"Adjusted target size for quads: {target_size} -> {adjusted_target_size} (factor: {size_factor})")
112
+ else:
113
+ adjusted_target_size = target_size
114
+
115
+ gmsh.initialize()
116
+ gmsh.option.setNumber("General.Verbosity", 4) # Reduce verbosity
117
+ gmsh.model.add("multi_region_mesh")
118
+
119
+ # Global point map to ensure shared boundaries use the same points
120
+ point_map = {} # maps (x, y) to Gmsh point tag
121
+
122
+ # Track all unique edges and their usage
123
+ edge_map = {} # maps (pt1, pt2) tuple to line tag
124
+ edge_usage = defaultdict(list) # maps edge to list of (region_id, orientation)
125
+
126
+ def add_point(x, y, size_override=None):
127
+ key = (x, y)
128
+ if key not in point_map:
129
+ point_size = size_override if size_override is not None else adjusted_target_size
130
+ tag = gmsh.model.geo.addPoint(x, y, 0, point_size)
131
+ point_map[key] = tag
132
+ return point_map[key]
133
+
134
+ def get_edge_key(pt1, pt2):
135
+ """Get canonical edge key (always smaller point first)"""
136
+ return (min(pt1, pt2), max(pt1, pt2))
137
+
138
+ # First pass: Create all points and identify short edges
139
+ polygon_data = []
140
+ short_edge_points = set() # Points that are endpoints of short edges
141
+
142
+ # Pre-pass to identify short edges - improved logic
143
+ for idx, (poly_pts, region_id) in enumerate(zip(polygons, region_ids)):
144
+ poly_pts_clean = remove_duplicate_endpoint(list(poly_pts))
145
+ for i in range(len(poly_pts_clean)):
146
+ p1 = poly_pts_clean[i]
147
+ p2 = poly_pts_clean[(i + 1) % len(poly_pts_clean)]
148
+ edge_length = ((p2[0] - p1[0])**2 + (p2[1] - p1[1])**2)**0.5
149
+
150
+ # Only mark as short edge if it's genuinely short AND not a major boundary
151
+ # Major boundaries should maintain consistent mesh sizing
152
+ is_major_boundary = False
153
+
154
+ # Check if this edge is part of a major boundary (long horizontal or vertical edge)
155
+ if abs(p2[0] - p1[0]) > adjusted_target_size * 5: # Long horizontal edge
156
+ is_major_boundary = True
157
+ elif abs(p2[1] - p1[1]) > adjusted_target_size * 5: # Long vertical edge
158
+ is_major_boundary = True
159
+
160
+ # Only apply short edge sizing if edge is genuinely short AND not a major boundary
161
+ if edge_length < adjusted_target_size and not is_major_boundary:
162
+ short_edge_points.add(p1)
163
+ short_edge_points.add(p2)
164
+ if debug:
165
+ print(f"Short edge found: {p1} to {p2}, length={edge_length:.2f}")
166
+ elif debug and edge_length < adjusted_target_size:
167
+ print(f"Short edge ignored (major boundary): {p1} to {p2}, length={edge_length:.2f}")
168
+
169
+ # Main pass: Create points with appropriate sizes
170
+ for idx, (poly_pts, region_id) in enumerate(zip(polygons, region_ids)):
171
+ poly_pts_clean = remove_duplicate_endpoint(list(poly_pts)) # make a copy
172
+ pt_tags = []
173
+ for x, y in poly_pts_clean:
174
+ # Use larger size for points on short edges to discourage subdivision
175
+ # But be more conservative about when to apply this
176
+ if (x, y) in short_edge_points:
177
+ point_size = adjusted_target_size * 2.0 # Reduced from 3.0 to 2.0
178
+ pt_tags.append(add_point(x, y, point_size))
179
+ else:
180
+ pt_tags.append(add_point(x, y))
181
+
182
+ # Track edges for this polygon
183
+ edges = []
184
+ for i in range(len(pt_tags)):
185
+ pt1 = pt_tags[i]
186
+ pt2 = pt_tags[(i + 1) % len(pt_tags)]
187
+
188
+ edge_key = get_edge_key(pt1, pt2)
189
+
190
+ # Determine orientation: True if pt1 < pt2, False otherwise
191
+ forward = (pt1 < pt2)
192
+
193
+ # Store edge usage
194
+ edge_usage[edge_key].append((region_id, forward))
195
+ edges.append((pt1, pt2, edge_key, forward))
196
+
197
+ polygon_data.append({
198
+ 'region_id': region_id,
199
+ 'pt_tags': pt_tags,
200
+ 'edges': edges
201
+ })
202
+
203
+ # Second pass: Create all unique lines and track short edges
204
+ short_edges = [] # Track short edges for later processing
205
+ for edge_key in edge_usage.keys():
206
+ pt1, pt2 = edge_key
207
+ line_tag = gmsh.model.geo.addLine(pt1, pt2)
208
+ edge_map[edge_key] = line_tag
209
+
210
+ # Calculate edge length from point coordinates
211
+ pt1_coords = None
212
+ pt2_coords = None
213
+ for (x, y), tag in point_map.items():
214
+ if tag == pt1:
215
+ pt1_coords = (x, y)
216
+ if tag == pt2:
217
+ pt2_coords = (x, y)
218
+
219
+ if pt1_coords and pt2_coords:
220
+ edge_length = ((pt2_coords[0] - pt1_coords[0])**2 + (pt2_coords[1] - pt1_coords[1])**2)**0.5
221
+
222
+ # Add transfinite constraints for long boundary edges to ensure consistent mesh sizing
223
+ # This prevents the creation of overly coarse elements along major boundaries
224
+ if edge_length > adjusted_target_size * 3: # Long edge
225
+ # Calculate how many elements should be along this edge
226
+ num_elements = max(3, int(edge_length / adjusted_target_size))
227
+ try:
228
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, num_elements)
229
+ if debug:
230
+ print(f"Set transfinite constraint on long edge: {pt1_coords} to {pt2_coords}, length={edge_length:.2f}, num_elements={num_elements}")
231
+ except Exception as e:
232
+ if debug:
233
+ print(f"Warning: Could not set transfinite constraint on edge {pt1_coords} to {pt2_coords}: {e}")
234
+
235
+ # Add transfinite constraints for short edges to prevent subdivision
236
+ # This forces GMSH to use exactly 2 nodes (start and end) for short edges
237
+ elif edge_length < adjusted_target_size:
238
+ try:
239
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, 2) # Exactly 2 nodes
240
+ if debug:
241
+ print(f"Set transfinite constraint on short edge: {pt1_coords} to {pt2_coords}, length={edge_length:.2f}, exactly 2 nodes")
242
+ except Exception as e:
243
+ if debug:
244
+ print(f"Warning: Could not set transfinite constraint on short edge {pt1_coords} to {pt2_coords}: {e}")
245
+
246
+ # Short edges are now handled by point sizing, no need for transfinite curves
247
+
248
+ # Ensure all polygon points (including intersection points) are created as GMSH points
249
+ # The intersection points were already added to polygons in build_polygons(),
250
+ # so we just need to ensure they exist as GMSH geometric entities
251
+ if lines is not None:
252
+ if debug:
253
+ print("Ensuring all polygon points (including intersections) are created as GMSH points...")
254
+
255
+ # Collect all points from all polygons to ensure they exist in GMSH
256
+ all_polygon_points = set()
257
+ for poly_data in polygon_data:
258
+ pt_tags = poly_data['pt_tags']
259
+ for tag in pt_tags:
260
+ # Find the coordinates for this point tag
261
+ for (x, y), point_tag in point_map.items():
262
+ if point_tag == tag:
263
+ all_polygon_points.add((x, y))
264
+ break
265
+
266
+ # Create any missing GMSH points
267
+ for x, y in all_polygon_points:
268
+ key = (x, y)
269
+ if key not in point_map:
270
+ pt_tag = gmsh.model.geo.addPoint(x, y, 0.0, adjusted_target_size * 0.5)
271
+ point_map[key] = pt_tag
272
+ if debug:
273
+ print(f"Created GMSH point for polygon vertex {key}: tag {pt_tag}")
274
+
275
+ if debug:
276
+ print(f"Ensured {len(all_polygon_points)} polygon points exist as GMSH entities")
277
+
278
+ # Create enhanced reinforcement lines that include intersection points from polygons
279
+ # This is essential for proper mesh generation with embedded 1D elements
280
+ enhanced_lines = []
281
+ for line_idx, line_pts in enumerate(lines):
282
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
283
+
284
+ # Collect all points for this line: original + intersection points from polygons
285
+ all_line_points = []
286
+
287
+ # Add original line points
288
+ for x, y in line_pts_clean:
289
+ all_line_points.append((x, y, 'original'))
290
+
291
+ # Add intersection points that are on this line (from polygon data)
292
+ for poly_data in polygon_data:
293
+ pt_tags = poly_data['pt_tags']
294
+ for tag in pt_tags:
295
+ # Find the coordinates for this point tag
296
+ for (x, y), point_tag in point_map.items():
297
+ if point_tag == tag:
298
+ # Check if this point is on the reinforcement line
299
+ if is_point_on_line_segments((x, y), line_pts_clean, tolerance=1e-6):
300
+ all_line_points.append((x, y, 'intersection'))
301
+ break
302
+
303
+ # Sort all points along the line to maintain proper order
304
+ if len(all_line_points) > 1:
305
+ all_line_points.sort(key=lambda p: line_segment_parameter((p[0], p[1]), line_pts_clean[0], line_pts_clean[-1]))
306
+
307
+ # Remove duplicates (keep first occurrence)
308
+ unique_points = []
309
+ seen = set()
310
+ for x, y, point_type in all_line_points:
311
+ point_key = (round(x, 8), round(y, 8)) # Round to avoid floating point issues
312
+ if point_key not in seen:
313
+ seen.add(point_key)
314
+ unique_points.append((x, y, point_type))
315
+
316
+ # Create the enhanced line
317
+ enhanced_line = [(x, y) for x, y, _ in unique_points]
318
+ enhanced_lines.append(enhanced_line)
319
+
320
+ if debug:
321
+ print(f"Enhanced line {line_idx}: {len(line_pts_clean)} original points -> {len(enhanced_line)} total points")
322
+
323
+ # Replace original lines with enhanced lines
324
+ lines = enhanced_lines
325
+
326
+ # Create reinforcement lines as geometric constraints to force 2D mesh edges
327
+ line_data = []
328
+
329
+ if lines is not None:
330
+ for line_idx, line_pts in enumerate(lines):
331
+ # Use the enhanced line coordinates (which include intersection points)
332
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
333
+
334
+ # Create points for this reinforcement line
335
+ line_point_tags = []
336
+
337
+ # Create all points for this line (original + intersection points)
338
+ for x, y in line_pts_clean:
339
+ key = (x, y)
340
+ if key in point_map:
341
+ line_point_tags.append((x, y, point_map[key]))
342
+ else:
343
+ # Create new point with small mesh size to ensure it's preserved
344
+ pt_tag = gmsh.model.geo.addPoint(x, y, 0.0, adjusted_target_size * 0.5)
345
+ point_map[key] = pt_tag
346
+ line_point_tags.append((x, y, pt_tag))
347
+
348
+ # Sort points along the line to maintain proper order
349
+ line_point_tags.sort(key=lambda p: line_segment_parameter((p[0], p[1]), line_pts_clean[0], line_pts_clean[-1]))
350
+
351
+ # Extract just the point tags in order
352
+ pt_tags = [tag for _, _, tag in line_point_tags]
353
+
354
+ if debug:
355
+ print(f" Line {line_idx} points: {[(x, y) for x, y, _ in line_point_tags]}")
356
+
357
+ # Create line segments as geometric constraints with controlled meshing
358
+ line_tags = []
359
+ for i in range(len(pt_tags) - 1):
360
+ pt1, pt2 = pt_tags[i], pt_tags[i + 1]
361
+
362
+ # Calculate segment length to determine number of subdivisions
363
+ coord1 = None
364
+ coord2 = None
365
+ for (x, y), tag in point_map.items():
366
+ if tag == pt1:
367
+ coord1 = (x, y)
368
+ if tag == pt2:
369
+ coord2 = (x, y)
370
+
371
+ if coord1 and coord2:
372
+ segment_length = ((coord2[0] - coord1[0])**2 + (coord2[1] - coord1[1])**2)**0.5
373
+ # Calculate number of elements needed to achieve target_size_1d
374
+ # For segments longer than target_size_1d, we want multiple elements
375
+ # For segments shorter than target_size_1d, we still want at least 2 elements
376
+ if segment_length > target_size_1d:
377
+ num_elements = max(3, int(round(segment_length / target_size_1d)))
378
+ else:
379
+ num_elements = 2
380
+
381
+ if debug:
382
+ print(f" Segment {i}: length {segment_length:.2f}, creating {num_elements} elements")
383
+
384
+ line_tag = gmsh.model.geo.addLine(pt1, pt2)
385
+ line_tags.append(line_tag)
386
+
387
+ # Set transfinite constraint to create appropriate number of nodes
388
+ try:
389
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, num_elements)
390
+ if debug:
391
+ print(f" Set transfinite constraint on line segment {i}: {num_elements} nodes")
392
+ except Exception as e:
393
+ if debug:
394
+ print(f" Warning: Could not set transfinite constraint on segment {i}: {e}")
395
+ else:
396
+ # Fallback: create line with default 2 nodes
397
+ line_tag = gmsh.model.geo.addLine(pt1, pt2)
398
+ line_tags.append(line_tag)
399
+
400
+ try:
401
+ gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, 2)
402
+ if debug:
403
+ print(f" Set transfinite constraint on line segment {i}: 2 nodes (fallback)")
404
+ except Exception as e:
405
+ if debug:
406
+ print(f" Warning: Could not set transfinite constraint on segment {i}: {e}")
407
+
408
+ # Store line data for later 1D element extraction
409
+ # Use the enhanced line coordinates (which include intersection points)
410
+ line_data.append({
411
+ 'line_idx': line_idx,
412
+ 'line_tags': line_tags,
413
+ 'point_coords': line_pts_clean # This now contains the enhanced coordinates
414
+ })
415
+
416
+ if debug:
417
+ print(f"Created reinforcement constraint line {line_idx} with {len(line_tags)} segments: {line_pts_clean}")
418
+
419
+ # Third pass: Create surfaces using the shared lines
420
+ surface_to_region = {}
421
+
422
+ for poly_data in polygon_data:
423
+ region_id = poly_data['region_id']
424
+ edges = poly_data['edges']
425
+
426
+ line_tags = []
427
+ for pt1, pt2, edge_key, forward in edges:
428
+ line_tag = edge_map[edge_key]
429
+
430
+ # Use positive or negative line tag based on orientation
431
+ if forward:
432
+ line_tags.append(line_tag)
433
+ else:
434
+ line_tags.append(-line_tag)
435
+
436
+ # Create curve loop and surface
437
+ try:
438
+ loop = gmsh.model.geo.addCurveLoop(line_tags)
439
+ surface = gmsh.model.geo.addPlaneSurface([loop])
440
+ surface_to_region[surface] = region_id
441
+ except Exception as e:
442
+ print(f"Warning: Could not create surface for region {region_id}: {e}")
443
+ continue
444
+
445
+ # Synchronize geometry
446
+ gmsh.model.geo.synchronize()
447
+
448
+ # Force mesh edges along reinforcement lines by creating additional geometric constraints
449
+ if lines is not None:
450
+ for line_info in line_data:
451
+ line_idx = line_info['line_idx']
452
+ line_tags = line_info['line_tags']
453
+ line_pts = line_info['point_coords']
454
+
455
+ # Set transfinite constraints to force mesh edges along each line segment
456
+ # REMOVED: This was conflicting with the target_size_1d calculations above
457
+ # for i, line_tag in enumerate(line_tags):
458
+ # try:
459
+ # # Force exactly 2 nodes (start and end) to prevent subdivision
460
+ # gmsh.model.geo.mesh.setTransfiniteCurve(line_tag, 2)
461
+ # if debug:
462
+ # print(f"Set transfinite constraint on line {line_idx} segment {i}: exactly 2 nodes")
463
+ # except Exception as e:
464
+ # if debug:
465
+ # print(f"Warning: Could not set transfinite constraint on line {line_idx} segment {i}: {e}")
466
+
467
+ # Embed reinforcement lines in all surfaces to ensure they're part of the mesh
468
+ for surface in surface_to_region.keys():
469
+ try:
470
+ # Embed all line segments of this reinforcement line
471
+ gmsh.model.mesh.embed(1, line_tags, 2, surface)
472
+ if debug:
473
+ print(f"Embedded reinforcement line {line_idx} in surface {surface}")
474
+ except Exception as e:
475
+ if debug:
476
+ print(f"Could not embed line {line_idx} in surface {surface}: {e}")
477
+
478
+ # CRITICAL: Set mesh coherence to ensure shared nodes along boundaries
479
+ # This forces Gmsh to use the same nodes for shared geometric entities
480
+ gmsh.model.mesh.removeDuplicateNodes()
481
+
482
+ # Create physical groups for material regions (this helps with mesh consistency)
483
+ physical_surfaces = []
484
+ for surface, region_id in surface_to_region.items():
485
+ physical_tag = gmsh.model.addPhysicalGroup(2, [surface])
486
+ physical_surfaces.append((physical_tag, region_id))
487
+
488
+ # Create physical groups for embedded reinforcement lines
489
+ physical_lines = []
490
+ if lines is not None:
491
+ for line_info in line_data:
492
+ line_idx = line_info['line_idx']
493
+ line_tags = line_info['line_tags']
494
+ physical_tag = gmsh.model.addPhysicalGroup(1, line_tags)
495
+ physical_lines.append((physical_tag, line_idx))
496
+
497
+ # Check for potential quad4 + reinforcement line conflicts
498
+ has_reinforcement_lines = lines is not None and len(lines) > 0
499
+ wants_quads = base_element_type.startswith('quad')
500
+
501
+ # Set mesh algorithm and recombination options BEFORE generating mesh
502
+ if base_element_type.startswith('quad'):
503
+ # Check if we need to use a more robust algorithm for reinforcement lines
504
+ if has_reinforcement_lines:
505
+ if debug:
506
+ print(f"Detected quad elements with reinforcement lines.")
507
+ print(f"Using robust recombination algorithm to handle embedded line constraints.")
508
+
509
+ # Use 'fast' algorithm which is more robust with embedded constraints
510
+ default_params = {
511
+ "Mesh.Algorithm": 8, # Frontal-Delaunay for quads
512
+ "Mesh.RecombineAll": 1, # Recombine triangles into quads
513
+ "Mesh.RecombinationAlgorithm": 0, # Standard (more robust than simple)
514
+ "Mesh.SubdivisionAlgorithm": 0, # Mixed tri/quad where needed
515
+ "Mesh.RecombineOptimizeTopology": 0, # Minimal optimization
516
+ "Mesh.RecombineNodeRepositioning": 1, # Still reposition nodes
517
+ "Mesh.RecombineMinimumQuality": 0.01, # Keep quality threshold
518
+ "Mesh.Smoothing": 5, # Reduced smoothing
519
+ "Mesh.SmoothNormals": 1, # Keep smooth normals
520
+ "Mesh.SmoothRatio": 1.8, # Keep smoothing ratio
521
+ }
522
+ else:
523
+ # Standard quad meshing parameters for cases without reinforcement lines
524
+ default_params = {
525
+ "Mesh.Algorithm": 8, # Frontal-Delaunay for quads (try 5, 6, 8)
526
+ "Mesh.RecombineAll": 1, # Recombine triangles into quads
527
+ "Mesh.RecombinationAlgorithm": 1, # Simple recombination (try 0, 1, 2, 3)
528
+ "Mesh.SubdivisionAlgorithm": 1, # All quads (try 0, 1, 2)
529
+ "Mesh.RecombineOptimizeTopology": 5, # Optimize topology (0-100)
530
+ "Mesh.RecombineNodeRepositioning": 1, # Reposition nodes (0 or 1)
531
+ "Mesh.RecombineMinimumQuality": 0.01, # Minimum quality threshold
532
+ "Mesh.Smoothing": 10, # Number of smoothing steps (try 0-100)
533
+ "Mesh.SmoothNormals": 1, # Smooth normals
534
+ "Mesh.SmoothRatio": 1.8, # Smoothing ratio (1.0-3.0)
535
+ }
536
+
537
+ # Override with user-provided parameters
538
+ if mesh_params:
539
+ default_params.update(mesh_params)
540
+
541
+ # Apply all parameters (except our custom ones)
542
+ for param, value in default_params.items():
543
+ if param not in ['size_factor']: # Skip our custom parameters
544
+ gmsh.option.setNumber(param, value)
545
+
546
+ # Set recombination for each surface
547
+ for surface in surface_to_region.keys():
548
+ gmsh.model.mesh.setRecombine(2, surface)
549
+ else:
550
+ gmsh.option.setNumber("Mesh.Algorithm", 6) # Frontal-Delaunay for triangles
551
+
552
+ # Always generate linear elements first - quadratic conversion is done in post-processing
553
+ # This avoids gmsh issues with quadratic elements and embedded 1D lines
554
+ gmsh.option.setNumber("Mesh.ElementOrder", 1)
555
+
556
+ # Force mesh coherence before generation
557
+ gmsh.option.setNumber("Mesh.ToleranceInitialDelaunay", 1e-12)
558
+
559
+ # Short edge control is now handled by point sizing during geometry creation
560
+
561
+ # Generate mesh
562
+ gmsh.model.mesh.generate(2)
563
+
564
+ # Remove duplicate nodes again after mesh generation (belt and suspenders)
565
+ gmsh.model.mesh.removeDuplicateNodes()
566
+
567
+ # Get nodes
568
+ node_tags, coords, _ = gmsh.model.mesh.getNodes()
569
+ nodes = np.array(coords).reshape(-1, 3)[:, :2]
570
+
571
+ # Create node tag to index mapping
572
+ node_tag_to_index = {tag: i for i, tag in enumerate(node_tags)}
573
+
574
+ elements = []
575
+ mat_ids = []
576
+ element_node_counts = []
577
+
578
+ # For quad8: track center nodes to delete later
579
+ center_nodes_to_delete = set() if element_type == 'quad8' else None
580
+
581
+ # Extract elements using physical groups for better region identification
582
+ for physical_tag, region_id in physical_surfaces:
583
+ try:
584
+ # Get entities in this physical group
585
+ entities = gmsh.model.getEntitiesForPhysicalGroup(2, physical_tag)
586
+
587
+ for entity in entities:
588
+ # Get all elements for this entity
589
+ elem_types, elem_tags_list, node_tags_list = gmsh.model.mesh.getElements(2, entity)
590
+
591
+ for elem_type, elem_tags, node_tags in zip(elem_types, elem_tags_list, node_tags_list):
592
+ # Gmsh element type mapping:
593
+ # 2: 3-node triangle, 9: 6-node triangle
594
+ # 3: 4-node quadrilateral, 10: 8-node quadrilateral
595
+ if elem_type == 2: # 3-node triangle
596
+ elements_array = np.array(node_tags).reshape(-1, 3)
597
+ for element in elements_array:
598
+ idxs = [node_tag_to_index[tag] for tag in element]
599
+
600
+ # GMSH returns clockwise triangles - reorder to counter-clockwise
601
+ idxs[1], idxs[2] = idxs[2], idxs[1]
602
+
603
+ # Pad to 9 columns with zeros
604
+ padded_idxs = idxs + [0] * (9 - len(idxs))
605
+ elements.append(padded_idxs)
606
+ mat_ids.append(region_id)
607
+ element_node_counts.append(3)
608
+ elif elem_type == 9: # 6-node triangle
609
+ elements_array = np.array(node_tags).reshape(-1, 6)
610
+ for element in elements_array:
611
+ idxs = [node_tag_to_index[tag] for tag in element]
612
+
613
+ # GMSH returns clockwise tri6 elements - reorder to counter-clockwise
614
+ # Swap corner nodes 1 and 2
615
+ idxs[1], idxs[2] = idxs[2], idxs[1]
616
+ # Fix midpoint assignments after corner swap 1<->2:
617
+ # GMSH gives: n3=edge(0-1), n4=edge(1-2), n5=edge(2-0)
618
+ # After swap: n3=edge(0-2), n4=edge(2-1), n5=edge(1-0)
619
+ # Standard requires: n3=edge(0-1), n4=edge(1-2), n5=edge(2-0)
620
+ # So remap: new_n3=old_n5, new_n4=old_n4, new_n5=old_n3
621
+ old_3, old_4, old_5 = idxs[3], idxs[4], idxs[5]
622
+ idxs[3] = old_5 # standard edge(0-1) gets GMSH edge(2-0) midpoint
623
+ idxs[4] = old_4 # standard edge(1-2) gets GMSH edge(1-2) midpoint
624
+ idxs[5] = old_3 # standard edge(2-0) gets GMSH edge(0-1) midpoint
625
+
626
+ # Pad to 9 columns with zeros
627
+ padded_idxs = idxs + [0] * (9 - len(idxs))
628
+ elements.append(padded_idxs)
629
+ mat_ids.append(region_id)
630
+ element_node_counts.append(6)
631
+ elif elem_type == 3: # 4-node quadrilateral
632
+ elements_array = np.array(node_tags).reshape(-1, 4)
633
+ for element in elements_array:
634
+ idxs = [node_tag_to_index[tag] for tag in element]
635
+ # Fix node ordering for quadrilateral elements
636
+ if element_type.startswith('quad'):
637
+ idxs = idxs[::-1] # Simple reversal of node order
638
+ # Pad to 9 columns with zeros
639
+ padded_idxs = idxs + [0] * (9 - len(idxs))
640
+ elements.append(padded_idxs)
641
+ mat_ids.append(region_id)
642
+ element_node_counts.append(4)
643
+ elif elem_type == 10: # Quadratic quadrilateral (gmsh generates 9-node Lagrange)
644
+ # Gmsh always generates 9-node Lagrange quads for order 2
645
+ elements_array = np.array(node_tags).reshape(-1, 9)
646
+ for element in elements_array:
647
+ idxs = [node_tag_to_index[tag] for tag in element]
648
+
649
+ if element_type in ['quad8', 'quad9']:
650
+ # Both quad8 and quad9 need CW to CCW conversion for first 8 nodes
651
+ # Convert from Gmsh CW to CCW ordering for quadrilateral
652
+ # Corner nodes: reverse order (0,1,2,3) -> (0,3,2,1)
653
+ # Midpoint nodes need to be reordered accordingly:
654
+ # GMSH: n4=edge(0-1), n5=edge(1-2), n6=edge(2-3), n7=edge(3-0)
655
+ # After corner reversal: need n4=edge(0-3), n5=edge(3-2), n6=edge(2-1), n7=edge(1-0)
656
+ # So: new_n4=old_n7, new_n5=old_n6, new_n6=old_n5, new_n7=old_n4
657
+ reordered_first8 = [
658
+ idxs[0], # corner 0 stays
659
+ idxs[3], # corner 1 -> corner 3
660
+ idxs[2], # corner 2 stays
661
+ idxs[1], # corner 3 -> corner 1
662
+ idxs[7], # edge(0-1) -> edge(0-3) = old edge(3-0)
663
+ idxs[6], # edge(1-2) -> edge(3-2) = old edge(2-3)
664
+ idxs[5], # edge(2-3) -> edge(2-1) = old edge(1-2)
665
+ idxs[4] # edge(3-0) -> edge(1-0) = old edge(0-1)
666
+ ]
667
+
668
+ if element_type == 'quad8':
669
+ # For quad8, skip center node and mark for deletion
670
+ center_node_idx = idxs[8] # Mark center node for deletion
671
+ center_nodes_to_delete.add(center_node_idx)
672
+ padded_idxs = reordered_first8 + [0] # Skip center node, pad to 9
673
+ elements.append(padded_idxs)
674
+ mat_ids.append(region_id)
675
+ element_node_counts.append(8)
676
+ else: # quad9
677
+ # For quad9, keep center node (9th node unchanged)
678
+ full_idxs = reordered_first8 + [idxs[8]] # Add center node
679
+ elements.append(full_idxs)
680
+ mat_ids.append(region_id)
681
+ element_node_counts.append(9)
682
+ else:
683
+ # This should never happen since element_type is validated earlier
684
+ raise ValueError(f"Unexpected element_type '{element_type}' for Gmsh elem_type {elem_type}")
685
+ except Exception as e:
686
+ print(f"Warning: Could not extract elements for physical group {physical_tag} (region {region_id}): {e}")
687
+ continue
688
+
689
+ # Convert to numpy arrays
690
+ elements_array = np.array(elements, dtype=int)
691
+ element_types = np.array(element_node_counts, dtype=int)
692
+ element_materials = np.array(mat_ids, dtype=int)
693
+
694
+ # Extract 1D elements from Gmsh-generated 1D mesh along reinforcement lines
695
+ elements_1d = []
696
+ mat_ids_1d = []
697
+ element_node_counts_1d = []
698
+
699
+ if lines is not None:
700
+ # Extract 1D elements from physical groups for each reinforcement line
701
+ for physical_tag, line_idx in physical_lines:
702
+ try:
703
+ # Get entities in this physical group
704
+ entities = gmsh.model.getEntitiesForPhysicalGroup(1, physical_tag)
705
+
706
+ if debug:
707
+ print(f" Physical group {physical_tag} (line {line_idx}): found {len(entities)} entities")
708
+
709
+ for entity in entities:
710
+ # Get all 1D elements for this entity
711
+ elem_types, elem_tags_list, node_tags_list = gmsh.model.mesh.getElements(1, entity)
712
+
713
+ for elem_type, elem_tags, node_tags in zip(elem_types, elem_tags_list, node_tags_list):
714
+ # Gmsh 1D element type mapping:
715
+ # 1: 2-node line (linear), 8: 3-node line (quadratic)
716
+ if elem_type == 1: # Linear 1D elements (2 nodes)
717
+ elements_array = np.array(node_tags).reshape(-1, 2)
718
+ for element in elements_array:
719
+ try:
720
+ # Convert numpy arrays to regular Python scalars
721
+ element_list = element.tolist() # Convert to Python list
722
+ if len(element_list) >= 2:
723
+ tag1 = int(element_list[0])
724
+ tag2 = int(element_list[1])
725
+
726
+ # Get node indices
727
+ idx1 = node_tag_to_index[tag1]
728
+ idx2 = node_tag_to_index[tag2]
729
+
730
+ # Create 1D element
731
+ padded_idxs = [idx1, idx2, 0]
732
+ elements_1d.append(padded_idxs)
733
+ mat_ids_1d.append(line_idx)
734
+ element_node_counts_1d.append(2)
735
+
736
+ if debug:
737
+ coord1 = nodes[idx1]
738
+ coord2 = nodes[idx2]
739
+ print(f" Created 1D element: {coord1} -> {coord2}")
740
+ except (KeyError, TypeError, ValueError, IndexError) as e:
741
+ if debug:
742
+ print(f" Skipping 1D element due to error: {e}")
743
+ continue
744
+ elif elem_type == 8: # Quadratic 1D elements (3 nodes)
745
+ elements_array = np.array(node_tags).reshape(-1, 3)
746
+ for element in elements_array:
747
+ try:
748
+ # Convert numpy arrays to regular Python scalars
749
+ element_list = element.tolist() # Convert to Python list
750
+ if len(element_list) >= 3:
751
+ tag1 = int(element_list[0])
752
+ tag2 = int(element_list[1])
753
+ tag3 = int(element_list[2])
754
+
755
+ # Get node indices
756
+ idx1 = node_tag_to_index[tag1]
757
+ idx2 = node_tag_to_index[tag2]
758
+ idx3 = node_tag_to_index[tag3]
759
+
760
+ # Create 1D element
761
+ padded_idxs = [idx1, idx2, idx3]
762
+ elements_1d.append(padded_idxs)
763
+ mat_ids_1d.append(line_idx)
764
+ element_node_counts_1d.append(3)
765
+ except (KeyError, TypeError, ValueError, IndexError) as e:
766
+ if debug:
767
+ print(f" Skipping quadratic 1D element due to error: {e}")
768
+ continue
769
+ except Exception as e:
770
+ if debug:
771
+ print(f" Error extracting 1D elements for line {line_idx}: {e}")
772
+ continue
773
+
774
+ gmsh.finalize()
775
+
776
+ # Clean up center nodes for quad8 elements
777
+ if element_type == 'quad8' and center_nodes_to_delete:
778
+ print(f"Quad8 cleanup: removing {len(center_nodes_to_delete)} center nodes from {len(nodes)} total nodes")
779
+
780
+ # c) Create array tracking original node numbering
781
+ original_node_count = len(nodes)
782
+ nodes_to_keep = [i for i in range(original_node_count) if i not in center_nodes_to_delete]
783
+
784
+ # d) Delete center nodes - create new nodes array
785
+ new_nodes = nodes[nodes_to_keep]
786
+
787
+ # e) Create mapping from old node indices to new node indices
788
+ old_to_new_mapping = {old_idx: new_idx for new_idx, old_idx in enumerate(nodes_to_keep)}
789
+
790
+ # f) Update element topology to use new node numbering
791
+ new_elements = []
792
+ for element in elements_array:
793
+ new_element = []
794
+ for node_idx in element:
795
+ if node_idx == 0: # Keep padding zeros
796
+ new_element.append(0)
797
+ elif node_idx in center_nodes_to_delete:
798
+ # This should not happen since we set center nodes to 0
799
+ new_element.append(0)
800
+ else:
801
+ # Map to new node index
802
+ new_element.append(old_to_new_mapping[node_idx])
803
+ new_elements.append(new_element)
804
+
805
+ # g) Replace arrays with consolidated versions
806
+ elements_array = np.array(new_elements, dtype=int)
807
+ nodes = new_nodes
808
+
809
+ print(f"Quad8 cleanup complete: {len(nodes)} nodes, {len(elements_array)} elements")
810
+
811
+ # Convert lists to arrays
812
+ elements_array = np.array(elements, dtype=int)
813
+ element_types = np.array(element_node_counts, dtype=int)
814
+ element_materials = np.array(mat_ids, dtype=int) + 1 # Make 1-based
815
+
816
+ mesh = {
817
+ "nodes": nodes,
818
+ "elements": elements_array,
819
+ "element_types": element_types,
820
+ "element_materials": element_materials,
821
+ }
822
+
823
+ # Add 1D element data if lines were provided
824
+ if lines is not None and len(elements_1d) > 0:
825
+ elements_1d_array = np.array(elements_1d, dtype=int)
826
+ element_types_1d = np.array(element_node_counts_1d, dtype=int)
827
+ element_materials_1d = np.array(mat_ids_1d, dtype=int) + 1 # Make 1-based
828
+
829
+ mesh["elements_1d"] = elements_1d_array
830
+ mesh["element_types_1d"] = element_types_1d
831
+ mesh["element_materials_1d"] = element_materials_1d
832
+
833
+ # Post-process to convert linear elements to quadratic if requested
834
+ if quadratic:
835
+ if debug:
836
+ print(f"Converting linear {base_element_type} mesh to quadratic {element_type}")
837
+ mesh = convert_linear_to_quadratic_mesh(mesh, element_type, debug=debug)
838
+
839
+ return mesh
840
+
841
+
842
+ def convert_linear_to_quadratic_mesh(mesh, target_element_type, debug=False):
843
+ """
844
+ Convert a linear mesh (tri3/quad4) to quadratic (tri6/quad8/quad9) by adding midside nodes.
845
+
846
+ This is much more robust than gmsh's built-in quadratic generation, especially
847
+ when dealing with embedded 1D elements (reinforcement lines).
848
+
849
+ Parameters:
850
+ mesh: Dictionary containing linear mesh data
851
+ target_element_type: 'tri6', 'quad8', or 'quad9'
852
+ debug: Enable debug output
853
+
854
+ Returns:
855
+ Updated mesh dictionary with quadratic elements
856
+ """
857
+ if debug:
858
+ print(f"Converting to {target_element_type} elements...")
859
+
860
+ nodes = mesh["nodes"].copy()
861
+ elements = mesh["elements"].copy()
862
+ element_types = mesh["element_types"].copy()
863
+ element_materials = mesh["element_materials"].copy()
864
+
865
+ # Handle 1D elements if present
866
+ elements_1d = mesh.get("elements_1d")
867
+ element_types_1d = mesh.get("element_types_1d")
868
+ element_materials_1d = mesh.get("element_materials_1d")
869
+ has_1d_elements = elements_1d is not None
870
+
871
+ if has_1d_elements:
872
+ elements_1d = elements_1d.copy()
873
+ element_types_1d = element_types_1d.copy()
874
+ element_materials_1d = element_materials_1d.copy()
875
+
876
+ # Dictionary to store midside nodes: (node1_idx, node2_idx) -> midside_node_idx
877
+ # Always store with node1_idx < node2_idx for consistency
878
+ midside_nodes = {}
879
+ next_node_idx = len(nodes)
880
+
881
+ def get_or_create_midside_node(n1_idx, n2_idx):
882
+ """Get existing midside node or create new one between n1 and n2"""
883
+ nonlocal next_node_idx, nodes
884
+
885
+ # Ensure consistent ordering
886
+ if n1_idx > n2_idx:
887
+ n1_idx, n2_idx = n2_idx, n1_idx
888
+
889
+ edge_key = (n1_idx, n2_idx)
890
+
891
+ if edge_key in midside_nodes:
892
+ return midside_nodes[edge_key]
893
+
894
+ # Create new midside node at edge center
895
+ n1 = nodes[n1_idx]
896
+ n2 = nodes[n2_idx]
897
+ midside_coord = (n1 + n2) / 2.0
898
+
899
+ # Add to nodes array
900
+ nodes_list = nodes.tolist()
901
+ nodes_list.append(midside_coord.tolist())
902
+ nodes = np.array(nodes_list)
903
+
904
+ midside_idx = next_node_idx
905
+ midside_nodes[edge_key] = midside_idx
906
+ next_node_idx += 1
907
+
908
+ if debug and len(midside_nodes) <= 10: # Only print first few
909
+ print(f" Created midside node {midside_idx} between {n1_idx}-{n2_idx} at {midside_coord}")
910
+
911
+ return midside_idx
912
+
913
+ # Convert 2D elements
914
+ new_elements = []
915
+ new_element_types = []
916
+
917
+ for elem_idx, element in enumerate(elements):
918
+ elem_type = element_types[elem_idx]
919
+
920
+ if target_element_type == 'tri6' and elem_type == 3:
921
+ # Convert tri3 to tri6
922
+ n0, n1, n2 = element[0], element[1], element[2]
923
+
924
+ # Get/create midside nodes
925
+ n3 = get_or_create_midside_node(n0, n1) # edge 0-1
926
+ n4 = get_or_create_midside_node(n1, n2) # edge 1-2
927
+ n5 = get_or_create_midside_node(n2, n0) # edge 2-0
928
+
929
+ # tri6 node ordering: [corner_nodes, midside_nodes]
930
+ new_element = [n0, n1, n2, n3, n4, n5, 0, 0, 0]
931
+ new_elements.append(new_element)
932
+ new_element_types.append(6)
933
+
934
+ elif target_element_type == 'quad8' and elem_type == 4:
935
+ # Convert quad4 to quad8
936
+ n0, n1, n2, n3 = element[0], element[1], element[2], element[3]
937
+
938
+ # Get/create midside nodes on edges
939
+ n4 = get_or_create_midside_node(n0, n1) # edge 0-1
940
+ n5 = get_or_create_midside_node(n1, n2) # edge 1-2
941
+ n6 = get_or_create_midside_node(n2, n3) # edge 2-3
942
+ n7 = get_or_create_midside_node(n3, n0) # edge 3-0
943
+
944
+ # quad8 node ordering: [corner_nodes, midside_nodes]
945
+ new_element = [n0, n1, n2, n3, n4, n5, n6, n7, 0]
946
+ new_elements.append(new_element)
947
+ new_element_types.append(8)
948
+
949
+ elif target_element_type == 'quad9' and elem_type == 4:
950
+ # Convert quad4 to quad9
951
+ n0, n1, n2, n3 = element[0], element[1], element[2], element[3]
952
+
953
+ # Get/create midside nodes on edges
954
+ n4 = get_or_create_midside_node(n0, n1) # edge 0-1
955
+ n5 = get_or_create_midside_node(n1, n2) # edge 1-2
956
+ n6 = get_or_create_midside_node(n2, n3) # edge 2-3
957
+ n7 = get_or_create_midside_node(n3, n0) # edge 3-0
958
+
959
+ # Create center node
960
+ center_coord = (nodes[n0] + nodes[n1] + nodes[n2] + nodes[n3]) / 4.0
961
+ nodes_list = nodes.tolist()
962
+ nodes_list.append(center_coord.tolist())
963
+ nodes = np.array(nodes_list)
964
+ n8 = next_node_idx
965
+ next_node_idx += 1
966
+
967
+ # quad9 node ordering: [corner_nodes, midside_nodes, center_node]
968
+ new_element = [n0, n1, n2, n3, n4, n5, n6, n7, n8]
969
+ new_elements.append(new_element)
970
+ new_element_types.append(9)
971
+
972
+ else:
973
+ # Keep original element unchanged
974
+ new_elements.append(element.tolist())
975
+ new_element_types.append(elem_type)
976
+
977
+ # Convert 1D elements to quadratic if present
978
+ new_elements_1d = []
979
+ new_element_types_1d = []
980
+
981
+ if has_1d_elements:
982
+ for elem_idx, element in enumerate(elements_1d):
983
+ elem_type = element_types_1d[elem_idx]
984
+
985
+ if elem_type == 2: # Convert linear 1D to quadratic
986
+ n0, n1 = element[0], element[1]
987
+
988
+ # Get/create midside node (reuse if already created for 2D elements)
989
+ n2 = get_or_create_midside_node(n0, n1)
990
+
991
+ new_element = [n0, n1, n2]
992
+ new_elements_1d.append(new_element)
993
+ new_element_types_1d.append(3) # quadratic 1D
994
+ else:
995
+ # Keep original element unchanged
996
+ new_elements_1d.append(element.tolist())
997
+ new_element_types_1d.append(elem_type)
998
+
999
+ if debug:
1000
+ print(f" Added {len(midside_nodes)} midside nodes")
1001
+ print(f" Total nodes: {len(nodes)} (was {len(mesh['nodes'])})")
1002
+
1003
+ # Create updated mesh
1004
+ updated_mesh = {
1005
+ "nodes": nodes,
1006
+ "elements": np.array(new_elements, dtype=int),
1007
+ "element_types": np.array(new_element_types, dtype=int),
1008
+ "element_materials": element_materials
1009
+ }
1010
+
1011
+ if has_1d_elements:
1012
+ updated_mesh["elements_1d"] = np.array(new_elements_1d, dtype=int)
1013
+ updated_mesh["element_types_1d"] = np.array(new_element_types_1d, dtype=int)
1014
+ updated_mesh["element_materials_1d"] = element_materials_1d
1015
+
1016
+ return updated_mesh
1017
+
1018
+
1019
+ def line_segment_parameter(point, line_start, line_end):
1020
+ """
1021
+ Calculate the parameter t (0 to 1) of a point along a line segment.
1022
+ Returns t where point = line_start + t * (line_end - line_start)
1023
+ """
1024
+ px, py = point
1025
+ x1, y1 = line_start
1026
+ x2, y2 = line_end
1027
+
1028
+ # Calculate parameter t
1029
+ dx = x2 - x1
1030
+ dy = y2 - y1
1031
+
1032
+ if abs(dx) > abs(dy):
1033
+ t = (px - x1) / dx
1034
+ else:
1035
+ t = (py - y1) / dy
1036
+
1037
+ return t
1038
+
1039
+
1040
+ def line_segment_intersection(p1, p2, p3, p4, tol=1e-8):
1041
+ """
1042
+ Find intersection point between two line segments.
1043
+ Returns intersection point (x, y) if it exists, None otherwise.
1044
+ """
1045
+ x1, y1 = p1
1046
+ x2, y2 = p2
1047
+ x3, y3 = p3
1048
+ x4, y4 = p4
1049
+
1050
+ # Calculate direction vectors
1051
+ d1x, d1y = x2 - x1, y2 - y1
1052
+ d2x, d2y = x4 - x3, y4 - y3
1053
+
1054
+ # Calculate determinant
1055
+ det = d1x * d2y - d1y * d2x
1056
+
1057
+ if abs(det) < tol: # Lines are parallel
1058
+ return None
1059
+
1060
+ # Calculate parameters
1061
+ t1 = ((x3 - x1) * d2y - (y3 - y1) * d2x) / det
1062
+ t2 = ((x3 - x1) * d1y - (y3 - y1) * d1x) / det
1063
+
1064
+ # Check if intersection is within both segments
1065
+ if 0 <= t1 <= 1 and 0 <= t2 <= 1:
1066
+ # Calculate intersection point
1067
+ ix = x1 + t1 * d1x
1068
+ iy = y1 + t1 * d1y
1069
+ return (round(ix, 6), round(iy, 6))
1070
+
1071
+ return None
1072
+
1073
+
1074
+ def point_near_existing(point, existing_points, tol=1e-8):
1075
+ """Check if a point is near any existing points."""
1076
+ px, py = point
1077
+ for ex, ey in existing_points:
1078
+ if abs(px - ex) < tol and abs(py - ey) < tol:
1079
+ return True
1080
+ return False
1081
+
1082
+
1083
+ def insert_point_into_polygon_edge(intersection, edge_start, edge_end, poly_data, point_map, target_size):
1084
+ """Insert an intersection point into a polygon edge, updating the polygon's coordinate list."""
1085
+ x, y = intersection
1086
+ # Ensure the point exists in the point_map (for Gmsh)
1087
+ if (x, y) not in point_map:
1088
+ tag = len(point_map) + 1 # Simple tag assignment
1089
+ point_map[(x, y)] = tag
1090
+
1091
+ # Insert the intersection point into the polygon's coordinate list at the correct edge
1092
+ # poly_data['pt_tags'] is a list of Gmsh point tags, but we need to update the coordinate list used to build the polygon
1093
+ # We'll reconstruct the coordinate list from the tags and point_map
1094
+ pt_tags = poly_data['pt_tags']
1095
+ # Build coordinate list for the polygon
1096
+ coords = []
1097
+ tag_to_coord = {v: k for k, v in point_map.items()}
1098
+ for tag in pt_tags:
1099
+ if tag in tag_to_coord:
1100
+ coords.append(tag_to_coord[tag])
1101
+ else:
1102
+ # Fallback: try to find the coordinate in point_map
1103
+ found = False
1104
+ for (cx, cy), t in point_map.items():
1105
+ if t == tag:
1106
+ coords.append((cx, cy))
1107
+ found = True
1108
+ break
1109
+ if not found:
1110
+ coords.append((None, None)) # Should not happen
1111
+ # Find the edge to insert after
1112
+ insert_idx = None
1113
+ for i in range(len(coords)):
1114
+ a = coords[i]
1115
+ b = coords[(i + 1) % len(coords)]
1116
+ if (abs(a[0] - edge_start[0]) < 1e-8 and abs(a[1] - edge_start[1]) < 1e-8 and
1117
+ abs(b[0] - edge_end[0]) < 1e-8 and abs(b[1] - edge_end[1]) < 1e-8):
1118
+ insert_idx = i + 1
1119
+ break
1120
+ # Also check reversed edge
1121
+ if (abs(a[0] - edge_end[0]) < 1e-8 and abs(a[1] - edge_end[1]) < 1e-8 and
1122
+ abs(b[0] - edge_start[0]) < 1e-8 and abs(b[1] - edge_start[1]) < 1e-8):
1123
+ insert_idx = i + 1
1124
+ break
1125
+ if insert_idx is not None:
1126
+ # Insert the intersection point into the coordinate list
1127
+ coords.insert(insert_idx, (x, y))
1128
+ # Now update pt_tags to match
1129
+ tag = point_map[(x, y)]
1130
+ pt_tags.insert(insert_idx, tag)
1131
+ # Update poly_data
1132
+ poly_data['pt_tags'] = pt_tags
1133
+ # If not found, do nothing (should not happen)
1134
+
1135
+
1136
+ def get_quad_mesh_presets():
1137
+ """
1138
+ Returns dictionary of preset quad meshing parameter combinations to try.
1139
+ """
1140
+ presets = {
1141
+ 'default': {
1142
+ "Mesh.Algorithm": 8,
1143
+ "Mesh.RecombinationAlgorithm": 1,
1144
+ "Mesh.SubdivisionAlgorithm": 1,
1145
+ "Mesh.RecombineOptimizeTopology": 5,
1146
+ "Mesh.Smoothing": 10,
1147
+ "size_factor": 1.4, # Target size adjustment
1148
+ },
1149
+ 'blossom': {
1150
+ "Mesh.Algorithm": 6,
1151
+ "Mesh.RecombinationAlgorithm": 2, # Blossom
1152
+ "Mesh.SubdivisionAlgorithm": 1,
1153
+ "Mesh.RecombineOptimizeTopology": 20,
1154
+ "Mesh.Smoothing": 20,
1155
+ "size_factor": 1.6, # Slightly larger for better recombination
1156
+ },
1157
+ 'blossom_full': {
1158
+ "Mesh.Algorithm": 5,
1159
+ "Mesh.RecombinationAlgorithm": 3, # Blossom full-quad
1160
+ "Mesh.SubdivisionAlgorithm": 1,
1161
+ "Mesh.RecombineOptimizeTopology": 50,
1162
+ "Mesh.Smoothing": 30,
1163
+ "size_factor": 1.7, # Larger for complex recombination
1164
+ },
1165
+ 'high_quality': {
1166
+ "Mesh.Algorithm": 6,
1167
+ "Mesh.RecombinationAlgorithm": 1,
1168
+ "Mesh.SubdivisionAlgorithm": 1,
1169
+ "Mesh.RecombineOptimizeTopology": 100,
1170
+ "Mesh.RecombineNodeRepositioning": 1,
1171
+ "Mesh.RecombineMinimumQuality": 0.1,
1172
+ "Mesh.Smoothing": 50,
1173
+ "Mesh.SmoothRatio": 2.0,
1174
+ "size_factor": 2.0, # Much larger due to heavy optimization
1175
+ },
1176
+ 'fast': {
1177
+ "Mesh.Algorithm": 8,
1178
+ "Mesh.RecombinationAlgorithm": 0, # Standard (fastest)
1179
+ "Mesh.SubdivisionAlgorithm": 0,
1180
+ "Mesh.RecombineOptimizeTopology": 0,
1181
+ "Mesh.Smoothing": 5,
1182
+ "size_factor": 0.7, # Smaller adjustment = more elements
1183
+ }
1184
+ }
1185
+ return presets
1186
+
1187
+
1188
+
1189
+ def build_polygons(slope_data, reinf_lines=None, debug=False):
1190
+ """
1191
+ Build material zone polygons from slope_data.
1192
+
1193
+ Extracts profile lines and max depth, then creates polygons for each material zone.
1194
+ Also integrates distributed load points and reinforcement line endpoints that are
1195
+ coincident with polygon edges.
1196
+
1197
+ Parameters:
1198
+ slope_data: Dictionary containing slope geometry data
1199
+
1200
+ Returns:
1201
+ List of polygons, each defined by (x,y) coordinate tuples
1202
+ """
1203
+ import numpy as np
1204
+ import copy
1205
+
1206
+ # Extract profile lines and max depth from slope_data
1207
+ profile_lines = slope_data.get('profile_lines', [])
1208
+ max_depth = slope_data.get('max_depth', None)
1209
+
1210
+ if not profile_lines:
1211
+ raise ValueError("Need at least 1 profile line to create material zones")
1212
+
1213
+ # For single profile line, max_depth serves as the bottom boundary
1214
+ if len(profile_lines) == 1:
1215
+ if max_depth is None:
1216
+ raise ValueError("When using only 1 profile line, max_depth must be specified")
1217
+
1218
+ def get_avg_y(line):
1219
+ return sum(y for _, y in line) / len(line)
1220
+
1221
+ # Sort profile lines from top to bottom by average y
1222
+ sorted_lines = sorted(profile_lines, key=get_avg_y, reverse=True)
1223
+ n = len(sorted_lines)
1224
+ # Deep copy so we can insert points
1225
+ lines = [list(line) for line in copy.deepcopy(sorted_lines)]
1226
+ tol = 1e-8
1227
+
1228
+ for i in range(n - 1):
1229
+ top = lines[i]
1230
+ for endpoint in [0, -1]: # left and right
1231
+ x_top, y_top = top[endpoint]
1232
+ # Find the highest lower profile at this x
1233
+ best_j = None
1234
+ best_y = -np.inf
1235
+ for j in range(i + 1, n):
1236
+ lower = lines[j]
1237
+ xs_lower = np.array([x for x, y in lower])
1238
+ ys_lower = np.array([y for x, y in lower])
1239
+ if xs_lower[0] - tol <= x_top <= xs_lower[-1] + tol:
1240
+ y_proj = np.interp(x_top, xs_lower, ys_lower)
1241
+ if y_proj > best_y:
1242
+ best_y = y_proj
1243
+ best_j = j
1244
+ if best_j is not None:
1245
+ lower = lines[best_j]
1246
+ xs_lower = np.array([x for x, y in lower])
1247
+ ys_lower = np.array([y for x, y in lower])
1248
+ y_proj = np.interp(x_top, xs_lower, ys_lower)
1249
+ # Check if lower profile already has a point at this x (within tol)
1250
+ found = False
1251
+ for (x_l, y_l) in lower:
1252
+ if abs(x_l - x_top) < tol:
1253
+ found = True
1254
+ break
1255
+ if abs(y_proj - y_top) < tol:
1256
+ # Coincident: insert (x_top, y_top) if not present
1257
+ if not found:
1258
+ insert_idx = np.searchsorted(xs_lower, x_top)
1259
+ lower.insert(insert_idx, (round(x_top, 6), round(y_top, 6)))
1260
+ else:
1261
+ # Not coincident: insert (x_top, y_proj) if not present
1262
+ if not found:
1263
+ insert_idx = np.searchsorted(xs_lower, x_top)
1264
+ lower.insert(insert_idx, (round(x_top, 6), round(y_proj, 6)))
1265
+
1266
+ def clean_polygon(poly, tol=1e-8):
1267
+ # Remove consecutive duplicate points (except for closing point)
1268
+ if not poly:
1269
+ return poly
1270
+ cleaned = [poly[0]]
1271
+ for pt in poly[1:]:
1272
+ if abs(pt[0] - cleaned[-1][0]) > tol or abs(pt[1] - cleaned[-1][1]) > tol:
1273
+ cleaned.append(pt)
1274
+ # Ensure closed
1275
+ if abs(cleaned[0][0] - cleaned[-1][0]) > tol or abs(cleaned[0][1] - cleaned[-1][1]) > tol:
1276
+ cleaned.append(cleaned[0])
1277
+ return cleaned
1278
+
1279
+ # Now build polygons as before
1280
+ polygons = []
1281
+ for i, top_line in enumerate(lines):
1282
+ xs_top, ys_top = zip(*top_line)
1283
+ xs_top = np.array(xs_top)
1284
+ ys_top = np.array(ys_top)
1285
+ left_x, left_y = xs_top[0], ys_top[0]
1286
+ right_x, right_y = xs_top[-1], ys_top[-1]
1287
+
1288
+ if i < n - 1:
1289
+ lower_line = lines[i + 1]
1290
+ xs_bot, ys_bot = zip(*lower_line)
1291
+ xs_bot = np.array(xs_bot)
1292
+ ys_bot = np.array(ys_bot)
1293
+ # Project left and right endpoints vertically to lower profile
1294
+ left_y_bot = np.interp(left_x, xs_bot, ys_bot)
1295
+ right_y_bot = np.interp(right_x, xs_bot, ys_bot)
1296
+ # Find all lower profile points between left_x and right_x (exclusive)
1297
+ mask = (xs_bot > left_x) & (xs_bot < right_x)
1298
+ xs_bot_in = xs_bot[mask]
1299
+ ys_bot_in = ys_bot[mask]
1300
+ # Build bottom boundary: right projection, lower profile points (right to left), left projection
1301
+ bottom = []
1302
+ bottom.append((right_x, right_y_bot))
1303
+ for x, y in zip(xs_bot_in[::-1], ys_bot_in[::-1]):
1304
+ bottom.append((x, y))
1305
+ bottom.append((left_x, left_y_bot))
1306
+ else:
1307
+ # For the lowest polygon, bottom is at max_depth
1308
+ # Only need endpoints - no intermediate points
1309
+ bottom = []
1310
+ bottom.append((right_x, max_depth))
1311
+ bottom.append((left_x, max_depth))
1312
+
1313
+ # Build polygon: top left-to-right, bottom right-to-left
1314
+ poly = []
1315
+ for x, y in zip(xs_top, ys_top):
1316
+ poly.append((round(x, 6), round(y, 6)))
1317
+ for x, y in bottom:
1318
+ poly.append((round(x, 6), round(y, 6)))
1319
+ # Clean up polygon (should rarely do anything)
1320
+ poly = clean_polygon(poly)
1321
+ polygons.append(poly)
1322
+
1323
+ # Add distributed load points to polygon edges if coincident
1324
+ polygons = add_dload_points_to_polygons(polygons, slope_data)
1325
+
1326
+ # Add intersection points with reinforcement lines if provided
1327
+ if reinf_lines is not None:
1328
+ polygons = add_intersection_points_to_polygons(polygons, reinf_lines, debug=debug)
1329
+
1330
+ return polygons
1331
+
1332
+ def add_dload_points_to_polygons(polygons, slope_data):
1333
+ """
1334
+ Add distributed load points to polygon edges if they are coincident with edges
1335
+ but not existing vertices.
1336
+
1337
+ Parameters:
1338
+ polygons: List of polygons (lists of (x,y) tuples)
1339
+ slope_data: Dictionary containing slope data
1340
+
1341
+ Returns:
1342
+ Updated list of polygons with added points
1343
+ """
1344
+ import numpy as np
1345
+ tol = 1e-8
1346
+
1347
+ # Collect distributed load points to check
1348
+ points_to_check = []
1349
+
1350
+ # Add distributed load points
1351
+ distributed_loads = slope_data.get('distributed_loads', [])
1352
+ for load in distributed_loads:
1353
+ if 'xy' in load:
1354
+ for point in load['xy']:
1355
+ points_to_check.append(point)
1356
+
1357
+ if not points_to_check:
1358
+ return polygons
1359
+
1360
+ # Process each polygon
1361
+ updated_polygons = []
1362
+ for poly in polygons:
1363
+ updated_poly = list(poly) # Make a copy
1364
+
1365
+ # Check each point against polygon edges
1366
+ for check_point in points_to_check:
1367
+ x_check, y_check = check_point
1368
+
1369
+ # Check if point is already a vertex
1370
+ is_vertex = False
1371
+ for vertex in updated_poly:
1372
+ if abs(vertex[0] - x_check) < tol and abs(vertex[1] - y_check) < tol:
1373
+ is_vertex = True
1374
+ break
1375
+
1376
+ if is_vertex:
1377
+ continue
1378
+
1379
+ # Check if point lies on any edge
1380
+ for i in range(len(updated_poly)):
1381
+ x1, y1 = updated_poly[i]
1382
+ x2, y2 = updated_poly[(i + 1) % len(updated_poly)]
1383
+
1384
+ # Check if point lies on edge segment
1385
+ if is_point_on_edge((x_check, y_check), (x1, y1), (x2, y2), tol):
1386
+ # Insert point after vertex i
1387
+ updated_poly.insert(i + 1, (round(x_check, 6), round(y_check, 6)))
1388
+ break # Only insert once per point
1389
+
1390
+ updated_polygons.append(updated_poly)
1391
+
1392
+ return updated_polygons
1393
+
1394
+ def is_point_on_edge(point, edge_start, edge_end, tol=1e-8):
1395
+ """
1396
+ Check if a point lies on a line segment (edge).
1397
+
1398
+ Parameters:
1399
+ point: (x, y) tuple of point to check
1400
+ edge_start: (x, y) tuple of edge start
1401
+ edge_end: (x, y) tuple of edge end
1402
+ tol: Tolerance for coincidence
1403
+
1404
+ Returns:
1405
+ bool: True if point lies on edge segment
1406
+ """
1407
+ px, py = point
1408
+ x1, y1 = edge_start
1409
+ x2, y2 = edge_end
1410
+
1411
+ # Check if point is within bounding box of edge
1412
+ if not (min(x1, x2) - tol <= px <= max(x1, x2) + tol and
1413
+ min(y1, y2) - tol <= py <= max(y1, y2) + tol):
1414
+ return False
1415
+
1416
+ # Check if point is collinear with edge
1417
+ # Use cross product to check collinearity
1418
+ cross_product = abs((py - y1) * (x2 - x1) - (px - x1) * (y2 - y1))
1419
+
1420
+ # If cross product is close to zero, point is on the line
1421
+ # Also check that it's within the segment bounds
1422
+ if cross_product < tol:
1423
+ # Check if point is between edge endpoints
1424
+ dot_product = (px - x1) * (x2 - x1) + (py - y1) * (y2 - y1)
1425
+ edge_length_sq = (x2 - x1) ** 2 + (y2 - y1) ** 2
1426
+
1427
+ if edge_length_sq < tol: # Edge is essentially a point
1428
+ return abs(px - x1) < tol and abs(py - y1) < tol
1429
+
1430
+ # Parameter t should be between 0 and 1 for point to be on segment
1431
+ t = dot_product / edge_length_sq
1432
+ return -tol <= t <= 1 + tol
1433
+
1434
+ return False
1435
+
1436
+ def print_polygon_summary(polygons):
1437
+ """
1438
+ Prints a summary of the generated polygons for diagnostic purposes.
1439
+
1440
+ Parameters:
1441
+ polygons: List of polygon coordinate lists
1442
+ """
1443
+ print("=== POLYGON SUMMARY ===")
1444
+ print(f"Number of material zones: {len(polygons)}")
1445
+ print()
1446
+
1447
+ for i, polygon in enumerate(polygons):
1448
+ print(f"Material Zone {i+1} (Material ID: {i}):")
1449
+ print(f" Number of vertices: {len(polygon)}")
1450
+
1451
+ # Calculate area (simple shoelace formula)
1452
+ area = 0
1453
+ for j in range(len(polygon) - 1):
1454
+ x1, y1 = polygon[j]
1455
+ x2, y2 = polygon[j + 1]
1456
+ area += (x2 - x1) * (y2 + y1) / 2
1457
+ area = abs(area)
1458
+
1459
+ print(f" Approximate area: {area:.2f} square units")
1460
+
1461
+ # Print bounding box
1462
+ xs = [x for x, y in polygon]
1463
+ ys = [y for x, y in polygon]
1464
+ print(f" Bounding box: x=[{min(xs):.2f}, {max(xs):.2f}], y=[{min(ys):.2f}, {max(ys):.2f}]")
1465
+ print()
1466
+
1467
+
1468
+
1469
+
1470
+ def export_mesh_to_json(mesh, filename):
1471
+ """Save mesh dictionary to JSON file."""
1472
+ import json
1473
+ import numpy as np
1474
+
1475
+ # Convert numpy arrays to lists for JSON serialization
1476
+ mesh_json = {}
1477
+ for key, value in mesh.items():
1478
+ if isinstance(value, np.ndarray):
1479
+ mesh_json[key] = value.tolist()
1480
+ else:
1481
+ mesh_json[key] = value
1482
+
1483
+ with open(filename, 'w') as f:
1484
+ json.dump(mesh_json, f, indent=2)
1485
+
1486
+ print(f"Mesh saved to {filename}")
1487
+
1488
+ def import_mesh_from_json(filename):
1489
+ """Load mesh dictionary from JSON file."""
1490
+ import json
1491
+ import numpy as np
1492
+
1493
+ with open(filename, 'r') as f:
1494
+ mesh_json = json.load(f)
1495
+
1496
+ # Convert lists back to numpy arrays
1497
+ mesh = {}
1498
+ for key, value in mesh_json.items():
1499
+ if isinstance(value, list):
1500
+ mesh[key] = np.array(value)
1501
+ else:
1502
+ mesh[key] = value
1503
+
1504
+ return mesh
1505
+
1506
+ def remove_duplicate_endpoint(poly, tol=1e-8):
1507
+ if len(poly) > 1 and abs(poly[0][0] - poly[-1][0]) < tol and abs(poly[0][1] - poly[-1][1]) < tol:
1508
+ return poly[:-1]
1509
+ return poly
1510
+
1511
+
1512
+ def extract_1d_elements_from_2d_edges(nodes, elements_2d, element_types_2d, lines, debug=False):
1513
+ """
1514
+ Extract 1D elements from 2D element edges that lie along reinforcement lines.
1515
+ This ensures proper finite element integration where 1D elements are shared edges of 2D elements.
1516
+
1517
+ Parameters:
1518
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
1519
+ elements_2d: np.ndarray of 2D element vertex indices (n_elements, 9)
1520
+ element_types_2d: np.ndarray indicating 2D element type (3, 4, 6, 8, or 9 nodes)
1521
+ lines: List of reinforcement lines, each defined by list of (x, y) tuples
1522
+ debug: Enable debug output
1523
+
1524
+ Returns:
1525
+ tuple: (elements_1d, mat_ids_1d, element_node_counts_1d)
1526
+ """
1527
+ import numpy as np
1528
+ from collections import defaultdict
1529
+
1530
+ elements_1d = []
1531
+ mat_ids_1d = []
1532
+ element_node_counts_1d = []
1533
+
1534
+ # Build edge-to-element mapping from 2D elements
1535
+ edge_to_element = defaultdict(list) # edge (n1, n2) -> list of element indices
1536
+ element_edges = {} # element_idx -> list of edges
1537
+
1538
+ for elem_idx, (element, elem_type) in enumerate(zip(elements_2d, element_types_2d)):
1539
+ edges = []
1540
+
1541
+ if elem_type in [3, 6]: # Triangle
1542
+ # Triangle edges: (0,1), (1,2), (2,0)
1543
+ corner_nodes = [element[0], element[1], element[2]]
1544
+ edge_pairs = [(0, 1), (1, 2), (2, 0)]
1545
+
1546
+ for i, j in edge_pairs:
1547
+ n1, n2 = corner_nodes[i], corner_nodes[j]
1548
+ edge_key = (min(n1, n2), max(n1, n2)) # Canonical edge representation
1549
+ edges.append(edge_key)
1550
+ edge_to_element[edge_key].append(elem_idx)
1551
+
1552
+ elif elem_type in [4, 8, 9]: # Quadrilateral
1553
+ # Quadrilateral edges: (0,1), (1,2), (2,3), (3,0)
1554
+ corner_nodes = [element[0], element[1], element[2], element[3]]
1555
+ edge_pairs = [(0, 1), (1, 2), (2, 3), (3, 0)]
1556
+
1557
+ for i, j in edge_pairs:
1558
+ n1, n2 = corner_nodes[i], corner_nodes[j]
1559
+ edge_key = (min(n1, n2), max(n1, n2)) # Canonical edge representation
1560
+ edges.append(edge_key)
1561
+ edge_to_element[edge_key].append(elem_idx)
1562
+
1563
+ element_edges[elem_idx] = edges
1564
+
1565
+ if debug:
1566
+ print(f"Built edge map with {len(edge_to_element)} unique edges from {len(elements_2d)} 2D elements")
1567
+
1568
+ # For each reinforcement line, find 2D element edges that lie along it
1569
+ for line_idx, line_pts in enumerate(lines):
1570
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
1571
+
1572
+ if len(line_pts_clean) < 2:
1573
+ continue
1574
+
1575
+ if debug:
1576
+ print(f"Processing reinforcement line {line_idx}: {line_pts_clean}")
1577
+
1578
+ # Find all 2D element edges that lie along this reinforcement line
1579
+ line_edges = []
1580
+
1581
+ for edge_key, elem_indices in edge_to_element.items():
1582
+ n1, n2 = edge_key
1583
+
1584
+ # Get coordinates of edge endpoints
1585
+ coord1 = nodes[n1]
1586
+ coord2 = nodes[n2]
1587
+
1588
+ # Check if this edge lies along the reinforcement line
1589
+ if is_edge_on_reinforcement_line(coord1, coord2, line_pts_clean, tolerance=1e-6):
1590
+ line_edges.append((n1, n2))
1591
+ if debug:
1592
+ print(f" Found edge ({n1}, {n2}) at coords {coord1} -> {coord2}")
1593
+
1594
+ # Sort edges to form continuous 1D elements along the line
1595
+ if line_edges:
1596
+ sorted_edges = sort_edges_along_line(line_edges, nodes, line_pts_clean, debug)
1597
+
1598
+ # Create 1D elements from sorted edges
1599
+ for n1, n2 in sorted_edges:
1600
+ # For linear elements, just use the two nodes
1601
+ elements_1d.append([n1, n2, 0]) # Pad to 3 columns
1602
+ mat_ids_1d.append(line_idx)
1603
+ element_node_counts_1d.append(2)
1604
+
1605
+ if debug:
1606
+ print(f" Created {len(sorted_edges)} 1D elements for line {line_idx}")
1607
+
1608
+ if debug:
1609
+ print(f"Total 1D elements extracted: {len(elements_1d)}")
1610
+
1611
+ return elements_1d, mat_ids_1d, element_node_counts_1d
1612
+
1613
+
1614
+ def is_edge_on_reinforcement_line(coord1, coord2, line_pts, tolerance=1e-6):
1615
+ """
1616
+ Check if an edge lies along a reinforcement line.
1617
+
1618
+ Parameters:
1619
+ coord1, coord2: Edge endpoint coordinates (x, y)
1620
+ line_pts: List of (x, y) points defining the reinforcement line
1621
+ tolerance: Tolerance for coincidence checking
1622
+
1623
+ Returns:
1624
+ bool: True if edge lies along the reinforcement line
1625
+ """
1626
+ x1, y1 = coord1
1627
+ x2, y2 = coord2
1628
+
1629
+ # Check if both endpoints lie on the reinforcement line
1630
+ point1_on_line = is_point_on_line_segments(coord1, line_pts, tolerance)
1631
+ point2_on_line = is_point_on_line_segments(coord2, line_pts, tolerance)
1632
+
1633
+ if not (point1_on_line and point2_on_line):
1634
+ return False
1635
+
1636
+ # Additional check: ensure edge direction is consistent with line direction
1637
+ # This prevents selecting edges that cross the reinforcement line
1638
+ edge_vector = np.array([x2 - x1, y2 - y1])
1639
+ edge_length = np.linalg.norm(edge_vector)
1640
+
1641
+ if edge_length < tolerance:
1642
+ return False
1643
+
1644
+ edge_unit = edge_vector / edge_length
1645
+
1646
+ # Check alignment with any segment of the reinforcement line
1647
+ # This allows edges to span multiple segments after intersection preprocessing
1648
+ for i in range(len(line_pts) - 1):
1649
+ seg_start = np.array(line_pts[i])
1650
+ seg_end = np.array(line_pts[i + 1])
1651
+ seg_vector = seg_end - seg_start
1652
+ seg_length = np.linalg.norm(seg_vector)
1653
+
1654
+ if seg_length < tolerance:
1655
+ continue
1656
+
1657
+ seg_unit = seg_vector / seg_length
1658
+
1659
+ # Check if edge is aligned with this segment (or opposite direction)
1660
+ dot_product = abs(np.dot(edge_unit, seg_unit))
1661
+ if dot_product > 0.95: # Nearly parallel (cos(18°) ≈ 0.95)
1662
+ # More flexible check: edge should be collinear with the reinforcement line
1663
+ # and both endpoints should lie on the line (but not necessarily on the same segment)
1664
+ return True
1665
+
1666
+ return False
1667
+
1668
+
1669
+ def is_point_on_line_segments(point, line_pts, tolerance=1e-6):
1670
+ """
1671
+ Check if a point lies on any segment of a multi-segment line.
1672
+
1673
+ Parameters:
1674
+ point: (x, y) coordinates of point to check
1675
+ line_pts: List of (x, y) points defining the line segments
1676
+ tolerance: Tolerance for coincidence checking
1677
+
1678
+ Returns:
1679
+ bool: True if point lies on any line segment
1680
+ """
1681
+ for i in range(len(line_pts) - 1):
1682
+ if is_point_on_line_segment(point, line_pts[i], line_pts[i + 1], tolerance):
1683
+ return True
1684
+ return False
1685
+
1686
+
1687
+ def is_point_on_line_segment(point, seg_start, seg_end, tolerance=1e-6):
1688
+ """
1689
+ Check if a point lies on a line segment.
1690
+
1691
+ Parameters:
1692
+ point: (x, y) coordinates of point to check
1693
+ seg_start: (x, y) coordinates of segment start
1694
+ seg_end: (x, y) coordinates of segment end
1695
+ tolerance: Tolerance for coincidence checking
1696
+
1697
+ Returns:
1698
+ bool: True if point lies on the line segment
1699
+ """
1700
+ px, py = point
1701
+ x1, y1 = seg_start
1702
+ x2, y2 = seg_end
1703
+
1704
+ # Check if point is within bounding box of segment
1705
+ if not (min(x1, x2) - tolerance <= px <= max(x1, x2) + tolerance and
1706
+ min(y1, y2) - tolerance <= py <= max(y1, y2) + tolerance):
1707
+ return False
1708
+
1709
+ # Check collinearity using cross product
1710
+ cross_product = abs((py - y1) * (x2 - x1) - (px - x1) * (y2 - y1))
1711
+
1712
+ # Check if cross product is close to zero (collinear)
1713
+ if cross_product < tolerance:
1714
+ # Verify point is between segment endpoints using dot product
1715
+ dot_product = (px - x1) * (x2 - x1) + (py - y1) * (y2 - y1)
1716
+ segment_length_sq = (x2 - x1) ** 2 + (y2 - y1) ** 2
1717
+
1718
+ if segment_length_sq < tolerance: # Degenerate segment
1719
+ return abs(px - x1) < tolerance and abs(py - y1) < tolerance
1720
+
1721
+ # Parameter t should be between 0 and 1 for point to be on segment
1722
+ t = dot_product / segment_length_sq
1723
+ return -tolerance <= t <= 1 + tolerance
1724
+
1725
+ return False
1726
+
1727
+
1728
+ def sort_edges_along_line(edges, nodes, line_pts, debug=False):
1729
+ """
1730
+ Sort edges to form a continuous sequence along a reinforcement line.
1731
+
1732
+ Parameters:
1733
+ edges: List of (n1, n2) edge tuples
1734
+ nodes: Node coordinates array
1735
+ line_pts: Reinforcement line points
1736
+ debug: Enable debug output
1737
+
1738
+ Returns:
1739
+ list: Sorted list of (n1, n2) edge tuples
1740
+ """
1741
+ if not edges:
1742
+ return []
1743
+
1744
+ if len(edges) == 1:
1745
+ return edges
1746
+
1747
+ # Build connectivity graph
1748
+ node_connections = defaultdict(list)
1749
+ for n1, n2 in edges:
1750
+ node_connections[n1].append(n2)
1751
+ node_connections[n2].append(n1)
1752
+
1753
+ # Find start node (should have only one connection, or be closest to line start)
1754
+ line_start = np.array(line_pts[0])
1755
+ line_end = np.array(line_pts[-1])
1756
+
1757
+ start_candidates = []
1758
+ for node in node_connections:
1759
+ if len(node_connections[node]) == 1: # End node
1760
+ start_candidates.append(node)
1761
+
1762
+ if not start_candidates:
1763
+ # No clear end nodes, use node closest to line start
1764
+ min_dist = float('inf')
1765
+ start_node = list(node_connections.keys())[0]
1766
+ for node in node_connections:
1767
+ dist = np.linalg.norm(nodes[node] - line_start)
1768
+ if dist < min_dist:
1769
+ min_dist = dist
1770
+ start_node = node
1771
+ else:
1772
+ # Choose end node closest to line start
1773
+ min_dist = float('inf')
1774
+ start_node = start_candidates[0]
1775
+ for node in start_candidates:
1776
+ dist = np.linalg.norm(nodes[node] - line_start)
1777
+ if dist < min_dist:
1778
+ min_dist = dist
1779
+ start_node = node
1780
+
1781
+ # Trace path from start node
1782
+ sorted_edges = []
1783
+ used_edges = set()
1784
+ current_node = start_node
1785
+
1786
+ while True:
1787
+ # Find next unused edge from current node
1788
+ next_node = None
1789
+ for neighbor in node_connections[current_node]:
1790
+ edge_key = (min(current_node, neighbor), max(current_node, neighbor))
1791
+ if edge_key not in used_edges:
1792
+ next_node = neighbor
1793
+ used_edges.add(edge_key)
1794
+ sorted_edges.append((current_node, next_node))
1795
+ break
1796
+
1797
+ if next_node is None:
1798
+ break
1799
+
1800
+ current_node = next_node
1801
+
1802
+ if debug:
1803
+ print(f" Sorted {len(sorted_edges)} edges along line")
1804
+
1805
+ return sorted_edges
1806
+
1807
+ def verify_mesh_connectivity(mesh, tolerance=1e-8):
1808
+ """
1809
+ Verify that the mesh is properly connected by checking for duplicate nodes at shared boundaries.
1810
+
1811
+ Parameters:
1812
+ mesh: Mesh dictionary with 'nodes' and 'elements' keys
1813
+ tolerance: Tolerance for considering nodes as duplicates
1814
+
1815
+ Returns:
1816
+ dict: Connectivity verification results
1817
+ """
1818
+ import numpy as np
1819
+ from collections import defaultdict
1820
+
1821
+ nodes = mesh["nodes"]
1822
+ elements = mesh["elements"]
1823
+
1824
+ # Find duplicate nodes (nodes at same location)
1825
+ duplicate_groups = []
1826
+ used_indices = set()
1827
+
1828
+ for i in range(len(nodes)):
1829
+ if i in used_indices:
1830
+ continue
1831
+
1832
+ duplicates = [i]
1833
+ for j in range(i + 1, len(nodes)):
1834
+ if j in used_indices:
1835
+ continue
1836
+
1837
+ if np.linalg.norm(nodes[i] - nodes[j]) < tolerance:
1838
+ duplicates.append(j)
1839
+ used_indices.add(j)
1840
+
1841
+ if len(duplicates) > 1:
1842
+ duplicate_groups.append(duplicates)
1843
+ used_indices.add(i)
1844
+
1845
+ # Check element connectivity
1846
+ element_connectivity = defaultdict(set)
1847
+ for elem_idx, element in enumerate(elements):
1848
+ for node_idx in element:
1849
+ element_connectivity[node_idx].add(elem_idx)
1850
+
1851
+ # Find isolated nodes (nodes not used by any element)
1852
+ isolated_nodes = []
1853
+ for i in range(len(nodes)):
1854
+ if i not in element_connectivity:
1855
+ isolated_nodes.append(i)
1856
+
1857
+ # Find elements with duplicate nodes
1858
+ elements_with_duplicates = []
1859
+ for elem_idx, element in enumerate(elements):
1860
+ unique_nodes = set(element)
1861
+ if len(unique_nodes) != len(element):
1862
+ elements_with_duplicates.append(elem_idx)
1863
+
1864
+ results = {
1865
+ "total_nodes": len(nodes),
1866
+ "total_elements": len(elements),
1867
+ "duplicate_node_groups": duplicate_groups,
1868
+ "isolated_nodes": isolated_nodes,
1869
+ "elements_with_duplicates": elements_with_duplicates,
1870
+ "is_connected": len(duplicate_groups) == 0 and len(isolated_nodes) == 0
1871
+ }
1872
+
1873
+ return results
1874
+
1875
+ def print_mesh_connectivity_report(mesh, tolerance=1e-8):
1876
+ """
1877
+ Print a detailed report about mesh connectivity.
1878
+
1879
+ Parameters:
1880
+ mesh: Mesh dictionary
1881
+ tolerance: Tolerance for considering nodes as duplicates
1882
+ """
1883
+ results = verify_mesh_connectivity(mesh, tolerance)
1884
+
1885
+ print("=== MESH CONNECTIVITY REPORT ===")
1886
+ print(f"Total nodes: {results['total_nodes']}")
1887
+ print(f"Total elements: {results['total_elements']}")
1888
+ print(f"Mesh is properly connected: {results['is_connected']}")
1889
+ print()
1890
+
1891
+ if results['duplicate_node_groups']:
1892
+ print(f"WARNING: Found {len(results['duplicate_node_groups'])} groups of duplicate nodes:")
1893
+ for i, group in enumerate(results['duplicate_node_groups']):
1894
+ print(f" Group {i+1}: Nodes {group} at position {mesh['nodes'][group[0]]}")
1895
+ print()
1896
+
1897
+ if results['isolated_nodes']:
1898
+ print(f"WARNING: Found {len(results['isolated_nodes'])} isolated nodes:")
1899
+ for node_idx in results['isolated_nodes']:
1900
+ print(f" Node {node_idx} at position {mesh['nodes'][node_idx]}")
1901
+ print()
1902
+
1903
+ if results['elements_with_duplicates']:
1904
+ print(f"WARNING: Found {len(results['elements_with_duplicates'])} elements with duplicate nodes:")
1905
+ for elem_idx in results['elements_with_duplicates']:
1906
+ print(f" Element {elem_idx}: {mesh['elements'][elem_idx]}")
1907
+ print()
1908
+
1909
+ if results['is_connected']:
1910
+ print("✓ Mesh connectivity is good - no duplicate nodes or isolated nodes found.")
1911
+ else:
1912
+ print("✗ Mesh connectivity issues detected. Consider regenerating the mesh.")
1913
+
1914
+ def find_element_containing_point(nodes, elements, element_types, point):
1915
+ """
1916
+ Find which element contains the given point using spatial indexing for efficiency.
1917
+
1918
+ Parameters:
1919
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
1920
+ elements: np.ndarray of element vertex indices (n_elements, 9) - unused nodes set to 0
1921
+ element_types: np.ndarray indicating element type (3, 4, 6, 8, or 9 nodes)
1922
+ point: tuple (x, y) coordinates of the point to find
1923
+
1924
+ Returns:
1925
+ int: Index of the element containing the point, or -1 if not found
1926
+ """
1927
+ x, y = point
1928
+
1929
+ # Use spatial indexing to find candidate elements quickly
1930
+ # Build spatial hash grid if not already built
1931
+ if not hasattr(find_element_containing_point, '_spatial_grid'):
1932
+ find_element_containing_point._spatial_grid = _build_spatial_grid(nodes, elements, element_types)
1933
+
1934
+ spatial_grid = find_element_containing_point._spatial_grid
1935
+
1936
+ # Find grid cell containing the point
1937
+ grid_x = int((x - spatial_grid['x_min']) / spatial_grid['cell_size'])
1938
+ grid_y = int((y - spatial_grid['y_min']) / spatial_grid['cell_size'])
1939
+
1940
+ # Get candidate elements from this cell and neighboring cells
1941
+ candidate_elements = set()
1942
+ for dx in [-1, 0, 1]:
1943
+ for dy in [-1, 0, 1]:
1944
+ cell_key = (grid_x + dx, grid_y + dy)
1945
+ if cell_key in spatial_grid['cells']:
1946
+ candidate_elements.update(spatial_grid['cells'][cell_key])
1947
+
1948
+ # Check only the candidate elements
1949
+ for elem_idx in candidate_elements:
1950
+ element = elements[elem_idx]
1951
+ elem_type = element_types[elem_idx]
1952
+
1953
+ if elem_type in [3, 6]: # Triangle (linear or quadratic)
1954
+ # For point-in-element testing, use only corner nodes
1955
+ x1, y1 = nodes[element[0]]
1956
+ x2, y2 = nodes[element[1]]
1957
+ x3, y3 = nodes[element[2]]
1958
+
1959
+ # Calculate barycentric coordinates
1960
+ det = (y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3)
1961
+ if abs(det) < 1e-12: # Degenerate triangle
1962
+ continue
1963
+
1964
+ lambda1 = ((y2 - y3) * (x - x3) + (x3 - x2) * (y - y3)) / det
1965
+ lambda2 = ((y3 - y1) * (x - x3) + (x1 - x3) * (y - y3)) / det
1966
+ lambda3 = 1.0 - lambda1 - lambda2
1967
+
1968
+ # Check if point is inside triangle (all barycentric coordinates >= 0)
1969
+ if lambda1 >= -1e-12 and lambda2 >= -1e-12 and lambda3 >= -1e-12:
1970
+ return elem_idx
1971
+
1972
+ elif elem_type in [4, 8, 9]: # Quadrilateral (linear or quadratic)
1973
+ # For point-in-element testing, use only corner nodes
1974
+ x1, y1 = nodes[element[0]]
1975
+ x2, y2 = nodes[element[1]]
1976
+ x3, y3 = nodes[element[2]]
1977
+ x4, y4 = nodes[element[3]]
1978
+
1979
+ # Use point-in-polygon test for quadrilaterals
1980
+ # Check if point is inside by counting crossings
1981
+ vertices = [(x1, y1), (x2, y2), (x3, y3), (x4, y4)]
1982
+ inside = False
1983
+
1984
+ for j in range(len(vertices)):
1985
+ xi, yi = vertices[j]
1986
+ xj, yj = vertices[(j + 1) % len(vertices)]
1987
+
1988
+ if ((yi > y) != (yj > y)) and (x < (xj - xi) * (y - yi) / (yj - yi) + xi):
1989
+ inside = not inside
1990
+
1991
+ if inside:
1992
+ return elem_idx
1993
+
1994
+ return -1 # Point not found in any element
1995
+
1996
+
1997
+ def _build_spatial_grid(nodes, elements, element_types):
1998
+ """
1999
+ Build a spatial hash grid for efficient element searching.
2000
+
2001
+ Parameters:
2002
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
2003
+ elements: np.ndarray of element vertex indices (n_elements, 8)
2004
+ element_types: np.ndarray indicating element type (3, 4, 6, or 8 nodes)
2005
+
2006
+ Returns:
2007
+ dict: Spatial grid data structure
2008
+ """
2009
+ # Calculate bounding box
2010
+ x_coords = nodes[:, 0]
2011
+ y_coords = nodes[:, 1]
2012
+ x_min, x_max = x_coords.min(), x_coords.max()
2013
+ y_min, y_max = y_coords.min(), y_coords.max()
2014
+
2015
+ # Determine optimal cell size based on average element size
2016
+ total_area = 0
2017
+ for i, (element, elem_type) in enumerate(zip(elements, element_types)):
2018
+ if elem_type in [3, 6]: # Triangle
2019
+ x1, y1 = nodes[element[0]]
2020
+ x2, y2 = nodes[element[1]]
2021
+ x3, y3 = nodes[element[2]]
2022
+ area = 0.5 * abs((x2 - x1) * (y3 - y1) - (x3 - x1) * (y2 - y1))
2023
+ else: # Quadrilateral (4 or 8 nodes)
2024
+ x1, y1 = nodes[element[0]]
2025
+ x2, y2 = nodes[element[1]]
2026
+ x3, y3 = nodes[element[2]]
2027
+ x4, y4 = nodes[element[3]]
2028
+ area = 0.5 * abs((x2 - x1) * (y4 - y1) - (x4 - x1) * (y2 - y1))
2029
+ total_area += area
2030
+
2031
+ avg_element_area = total_area / len(elements)
2032
+ # Cell size should be roughly 2-3 times the square root of average element area
2033
+ cell_size = max(0.1, 2.5 * np.sqrt(avg_element_area))
2034
+
2035
+ # Build grid
2036
+ grid = {
2037
+ 'x_min': x_min,
2038
+ 'y_min': y_min,
2039
+ 'cell_size': cell_size,
2040
+ 'cells': {}
2041
+ }
2042
+
2043
+ # Assign elements to grid cells
2044
+ for elem_idx, (element, elem_type) in enumerate(zip(elements, element_types)):
2045
+ # Calculate element bounding box
2046
+ if elem_type in [3, 6]: # Triangle
2047
+ x_coords = [nodes[element[0]][0], nodes[element[1]][0], nodes[element[2]][0]]
2048
+ y_coords = [nodes[element[0]][1], nodes[element[1]][1], nodes[element[2]][1]]
2049
+ else: # Quadrilateral (4 or 8 nodes)
2050
+ x_coords = [nodes[element[0]][0], nodes[element[1]][0], nodes[element[2]][0], nodes[element[3]][0]]
2051
+ y_coords = [nodes[element[0]][1], nodes[element[1]][1], nodes[element[2]][1], nodes[element[3]][1]]
2052
+
2053
+ elem_x_min, elem_x_max = min(x_coords), max(x_coords)
2054
+ elem_y_min, elem_y_max = min(y_coords), max(y_coords)
2055
+
2056
+ # Find grid cells that overlap with this element
2057
+ start_x = int((elem_x_min - x_min) / cell_size)
2058
+ end_x = int((elem_x_max - x_min) / cell_size) + 1
2059
+ start_y = int((elem_y_min - y_min) / cell_size)
2060
+ end_y = int((elem_y_max - y_min) / cell_size) + 1
2061
+
2062
+ # Add element to all overlapping cells
2063
+ for grid_x in range(start_x, end_x + 1):
2064
+ for grid_y in range(start_y, end_y + 1):
2065
+ cell_key = (grid_x, grid_y)
2066
+ if cell_key not in grid['cells']:
2067
+ grid['cells'][cell_key] = set()
2068
+ grid['cells'][cell_key].add(elem_idx)
2069
+
2070
+ return grid
2071
+
2072
+
2073
+ def interpolate_at_point(nodes, elements, element_types, values, point):
2074
+ """
2075
+ Interpolate values at a given point using the mesh.
2076
+
2077
+ Parameters:
2078
+ nodes: np.ndarray of node coordinates (n_nodes, 2)
2079
+ elements: np.ndarray of element vertex indices (n_elements, 8)
2080
+ element_types: np.ndarray indicating element type (3, 4, 6, or 8 nodes)
2081
+ values: np.ndarray of values at nodes (n_nodes,)
2082
+ point: tuple (x, y) coordinates of the point to interpolate at
2083
+
2084
+ Returns:
2085
+ float: Interpolated value at the point, or 0.0 if point not found
2086
+ """
2087
+ # Find the element containing the point
2088
+ element_idx = find_element_containing_point(nodes, elements, element_types, point)
2089
+
2090
+ if element_idx == -1:
2091
+ return 0.0 # Point not found in any element
2092
+
2093
+ element = elements[element_idx]
2094
+ elem_type = element_types[element_idx]
2095
+ x, y = point
2096
+
2097
+ if elem_type == 3: # Linear triangle
2098
+ # Get triangle vertices and values
2099
+ x1, y1 = nodes[element[0]]
2100
+ x2, y2 = nodes[element[1]]
2101
+ x3, y3 = nodes[element[2]]
2102
+ v1 = values[element[0]]
2103
+ v2 = values[element[1]]
2104
+ v3 = values[element[2]]
2105
+
2106
+ # Calculate barycentric coordinates
2107
+ det = (y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3)
2108
+ lambda1 = ((y2 - y3) * (x - x3) + (x3 - x2) * (y - y3)) / det
2109
+ lambda2 = ((y3 - y1) * (x - x3) + (x1 - x3) * (y - y3)) / det
2110
+ lambda3 = 1.0 - lambda1 - lambda2
2111
+
2112
+ # Interpolate using barycentric coordinates
2113
+ interpolated_value = lambda1 * v1 + lambda2 * v2 + lambda3 * v3
2114
+
2115
+ elif elem_type == 6: # Quadratic triangle
2116
+ # Get all 6 nodes: corners (0,1,2) and midpoints (3,4,5)
2117
+ # Node ordering: 0-1-2 corners, 3 midpoint of 0-1, 4 midpoint of 1-2, 5 midpoint of 2-0
2118
+ corner_nodes = [element[0], element[1], element[2]]
2119
+ midpoint_nodes = [element[3], element[4], element[5]]
2120
+
2121
+ # Get coordinates
2122
+ x1, y1 = nodes[corner_nodes[0]] # Node 0
2123
+ x2, y2 = nodes[corner_nodes[1]] # Node 1
2124
+ x3, y3 = nodes[corner_nodes[2]] # Node 2
2125
+
2126
+ # Calculate barycentric coordinates (L1, L2, L3)
2127
+ det = (y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3)
2128
+ L1 = ((y2 - y3) * (x - x3) + (x3 - x2) * (y - y3)) / det
2129
+ L2 = ((y3 - y1) * (x - x3) + (x1 - x3) * (y - y3)) / det
2130
+ L3 = 1.0 - L1 - L2
2131
+
2132
+ # Quadratic shape functions for 6-node triangle
2133
+ N = np.zeros(6)
2134
+ N[0] = L1 * (2*L1 - 1) # Corner node 0
2135
+ N[1] = L2 * (2*L2 - 1) # Corner node 1
2136
+ N[2] = L3 * (2*L3 - 1) # Corner node 2
2137
+ N[3] = 4 * L1 * L2 # Midpoint node 0-1
2138
+ N[4] = 4 * L2 * L3 # Midpoint node 1-2
2139
+ N[5] = 4 * L3 * L1 # Midpoint node 2-0
2140
+
2141
+ # Interpolate using quadratic shape functions
2142
+ interpolated_value = 0.0
2143
+ for i in range(6):
2144
+ interpolated_value += N[i] * values[element[i]]
2145
+
2146
+ elif elem_type == 4: # Linear quadrilateral
2147
+ # Get quadrilateral vertices and values
2148
+ x1, y1 = nodes[element[0]]
2149
+ x2, y2 = nodes[element[1]]
2150
+ x3, y3 = nodes[element[2]]
2151
+ x4, y4 = nodes[element[3]]
2152
+ v1 = values[element[0]]
2153
+ v2 = values[element[1]]
2154
+ v3 = values[element[2]]
2155
+ v4 = values[element[3]]
2156
+
2157
+ # Use proper bilinear shape functions for quadrilaterals
2158
+ # Map to natural coordinates (xi, eta) in [-1, 1] x [-1, 1]
2159
+
2160
+ # For bilinear quad4, use iterative Newton-Raphson to find natural coordinates
2161
+ # Initial guess at element center
2162
+ xi, eta = 0.0, 0.0
2163
+
2164
+ # Newton-Raphson iteration to find (xi, eta) such that physical coordinates match
2165
+ for _ in range(10): # Max 10 iterations
2166
+ # Bilinear shape functions
2167
+ N = np.array([
2168
+ 0.25 * (1-xi) * (1-eta), # Node 0
2169
+ 0.25 * (1+xi) * (1-eta), # Node 1
2170
+ 0.25 * (1+xi) * (1+eta), # Node 2
2171
+ 0.25 * (1-xi) * (1+eta) # Node 3
2172
+ ])
2173
+
2174
+ # Shape function derivatives
2175
+ dN_dxi = np.array([
2176
+ -0.25 * (1-eta), # Node 0
2177
+ 0.25 * (1-eta), # Node 1
2178
+ 0.25 * (1+eta), # Node 2
2179
+ -0.25 * (1+eta) # Node 3
2180
+ ])
2181
+
2182
+ dN_deta = np.array([
2183
+ -0.25 * (1-xi), # Node 0
2184
+ -0.25 * (1+xi), # Node 1
2185
+ 0.25 * (1+xi), # Node 2
2186
+ 0.25 * (1-xi) # Node 3
2187
+ ])
2188
+
2189
+ # Current physical coordinates
2190
+ x_curr = N[0]*x1 + N[1]*x2 + N[2]*x3 + N[3]*x4
2191
+ y_curr = N[0]*y1 + N[1]*y2 + N[2]*y3 + N[3]*y4
2192
+
2193
+ # Residual
2194
+ fx = x_curr - x
2195
+ fy = y_curr - y
2196
+
2197
+ if abs(fx) < 1e-10 and abs(fy) < 1e-10:
2198
+ break
2199
+
2200
+ # Jacobian
2201
+ dx_dxi = dN_dxi[0]*x1 + dN_dxi[1]*x2 + dN_dxi[2]*x3 + dN_dxi[3]*x4
2202
+ dx_deta = dN_deta[0]*x1 + dN_deta[1]*x2 + dN_deta[2]*x3 + dN_deta[3]*x4
2203
+ dy_dxi = dN_dxi[0]*y1 + dN_dxi[1]*y2 + dN_dxi[2]*y3 + dN_dxi[3]*y4
2204
+ dy_deta = dN_deta[0]*y1 + dN_deta[1]*y2 + dN_deta[2]*y3 + dN_deta[3]*y4
2205
+
2206
+ det_J = dx_dxi * dy_deta - dx_deta * dy_dxi
2207
+ if abs(det_J) < 1e-12:
2208
+ break
2209
+
2210
+ # Newton-Raphson update
2211
+ dxi = (dy_deta * fx - dx_deta * fy) / det_J
2212
+ deta = (-dy_dxi * fx + dx_dxi * fy) / det_J
2213
+
2214
+ xi -= dxi
2215
+ eta -= deta
2216
+
2217
+ # Clamp to [-1,1]
2218
+ xi = max(-1, min(1, xi))
2219
+ eta = max(-1, min(1, eta))
2220
+
2221
+ # Final bilinear shape functions
2222
+ N = np.array([
2223
+ 0.25 * (1-xi) * (1-eta), # Node 0
2224
+ 0.25 * (1+xi) * (1-eta), # Node 1
2225
+ 0.25 * (1+xi) * (1+eta), # Node 2
2226
+ 0.25 * (1-xi) * (1+eta) # Node 3
2227
+ ])
2228
+
2229
+ # Interpolate using bilinear shape functions
2230
+ interpolated_value = N[0]*v1 + N[1]*v2 + N[2]*v3 + N[3]*v4
2231
+
2232
+ elif elem_type == 8: # Quadratic quadrilateral
2233
+ # Get all 8 nodes: corners (0,1,2,3) and midpoints (4,5,6,7)
2234
+ # Node ordering: 0-1-2-3 corners, 4 midpoint of 0-1, 5 midpoint of 1-2,
2235
+ # 6 midpoint of 2-3, 7 midpoint of 3-0
2236
+
2237
+ # Get corner coordinates for mapping to natural coordinates
2238
+ x1, y1 = nodes[element[0]] # Node 0
2239
+ x2, y2 = nodes[element[1]] # Node 1
2240
+ x3, y3 = nodes[element[2]] # Node 2
2241
+ x4, y4 = nodes[element[3]] # Node 3
2242
+
2243
+ # For quadratic quads, we need to map from physical (x,y) to natural coordinates (xi,eta)
2244
+ # This is complex for general quadrilaterals, so use simplified approach:
2245
+ # Map to unit square [-1,1] x [-1,1] using bilinear mapping of corners
2246
+
2247
+ # Bilinear inverse mapping (approximate for general quads)
2248
+ # Solve for natural coordinates xi, eta in [-1,1] x [-1,1]
2249
+
2250
+ # For simplicity, use area coordinate method similar to linear quad
2251
+ # but with quadratic shape functions
2252
+
2253
+ # Calculate area coordinates (this is an approximation)
2254
+ A_total = 0.5 * abs((x3-x1)*(y4-y2) - (x4-x2)*(y3-y1))
2255
+ if A_total < 1e-12:
2256
+ # Degenerate element, fall back to linear
2257
+ A1 = abs((x - x1) * (y2 - y1) - (x2 - x1) * (y - y1)) / 2
2258
+ A2 = abs((x - x2) * (y3 - y2) - (x3 - x2) * (y - y2)) / 2
2259
+ A3 = abs((x - x3) * (y4 - y3) - (x4 - x3) * (y - y3)) / 2
2260
+ A4 = abs((x - x4) * (y1 - y4) - (x1 - x4) * (y - y4)) / 2
2261
+ A_sum = A1 + A2 + A3 + A4
2262
+ if A_sum > 1e-12:
2263
+ w1, w2, w3, w4 = A1/A_sum, A2/A_sum, A3/A_sum, A4/A_sum
2264
+ else:
2265
+ w1 = w2 = w3 = w4 = 0.25
2266
+
2267
+ # Linear interpolation as fallback
2268
+ interpolated_value = (w1 * values[element[0]] + w2 * values[element[1]] +
2269
+ w3 * values[element[2]] + w4 * values[element[3]])
2270
+ else:
2271
+ # For proper quadratic interpolation, we need natural coordinates
2272
+ # This is a simplified implementation - full implementation would solve
2273
+ # the nonlinear system for xi,eta
2274
+
2275
+ # Use parametric coordinates estimation
2276
+ # Map point to approximate natural coordinates
2277
+ xi_approx = 2 * (x - 0.5*(x1+x3)) / (x2+x3-x1-x4) if abs(x2+x3-x1-x4) > 1e-12 else 0
2278
+ eta_approx = 2 * (y - 0.5*(y1+y3)) / (y2+y4-y1-y3) if abs(y2+y4-y1-y3) > 1e-12 else 0
2279
+
2280
+ # Clamp to [-1,1]
2281
+ xi = max(-1, min(1, xi_approx))
2282
+ eta = max(-1, min(1, eta_approx))
2283
+
2284
+ # Quadratic shape functions for 8-node quad in natural coordinates
2285
+ N = np.zeros(8)
2286
+ # Corner nodes
2287
+ N[0] = 0.25 * (1-xi) * (1-eta) * (-xi-eta-1) # Node 0
2288
+ N[1] = 0.25 * (1+xi) * (1-eta) * (xi-eta-1) # Node 1
2289
+ N[2] = 0.25 * (1+xi) * (1+eta) * (xi+eta-1) # Node 2
2290
+ N[3] = 0.25 * (1-xi) * (1+eta) * (-xi+eta-1) # Node 3
2291
+ # Midpoint nodes
2292
+ N[4] = 0.5 * (1-xi*xi) * (1-eta) # Node 4 (midpoint 0-1)
2293
+ N[5] = 0.5 * (1+xi) * (1-eta*eta) # Node 5 (midpoint 1-2)
2294
+ N[6] = 0.5 * (1-xi*xi) * (1+eta) # Node 6 (midpoint 2-3)
2295
+ N[7] = 0.5 * (1-xi) * (1-eta*eta) # Node 7 (midpoint 3-0)
2296
+
2297
+ # Interpolate using quadratic shape functions
2298
+ interpolated_value = 0.0
2299
+ for i in range(8):
2300
+ interpolated_value += N[i] * values[element[i]]
2301
+
2302
+ elif elem_type == 9: # Biquadratic quadrilateral (9-node Lagrange)
2303
+ # Get all 9 nodes: corners (0,1,2,3), edges (4,5,6,7), and center (8)
2304
+ # Node ordering: 0-1-2-3 corners, 4 midpoint of 0-1, 5 midpoint of 1-2,
2305
+ # 6 midpoint of 2-3, 7 midpoint of 3-0, 8 center
2306
+
2307
+ # Get corner coordinates for mapping to natural coordinates
2308
+ x1, y1 = nodes[element[0]] # Node 0
2309
+ x2, y2 = nodes[element[1]] # Node 1
2310
+ x3, y3 = nodes[element[2]] # Node 2
2311
+ x4, y4 = nodes[element[3]] # Node 3
2312
+
2313
+ # Newton-Raphson iteration to find natural coordinates (xi, eta)
2314
+ xi, eta = 0.0, 0.0 # Initial guess at element center
2315
+
2316
+ for _ in range(10): # Max 10 iterations
2317
+ # Biquadratic Lagrange shape functions for all 9 nodes
2318
+ N = np.zeros(9)
2319
+ # Corner nodes
2320
+ N[0] = 0.25 * xi * (xi-1) * eta * (eta-1) # Node 0: (-1,-1)
2321
+ N[1] = 0.25 * xi * (xi+1) * eta * (eta-1) # Node 1: (1,-1)
2322
+ N[2] = 0.25 * xi * (xi+1) * eta * (eta+1) # Node 2: (1,1)
2323
+ N[3] = 0.25 * xi * (xi-1) * eta * (eta+1) # Node 3: (-1,1)
2324
+ # Edge nodes
2325
+ N[4] = 0.5 * (1-xi*xi) * eta * (eta-1) # Node 4: (0,-1)
2326
+ N[5] = 0.5 * xi * (xi+1) * (1-eta*eta) # Node 5: (1,0)
2327
+ N[6] = 0.5 * (1-xi*xi) * eta * (eta+1) # Node 6: (0,1)
2328
+ N[7] = 0.5 * xi * (xi-1) * (1-eta*eta) # Node 7: (-1,0)
2329
+ # Center node
2330
+ N[8] = (1-xi*xi) * (1-eta*eta) # Node 8: (0,0)
2331
+
2332
+ # Shape function derivatives w.r.t. xi
2333
+ dN_dxi = np.zeros(9)
2334
+ dN_dxi[0] = 0.25 * (2*xi-1) * eta * (eta-1)
2335
+ dN_dxi[1] = 0.25 * (2*xi+1) * eta * (eta-1)
2336
+ dN_dxi[2] = 0.25 * (2*xi+1) * eta * (eta+1)
2337
+ dN_dxi[3] = 0.25 * (2*xi-1) * eta * (eta+1)
2338
+ dN_dxi[4] = -xi * eta * (eta-1)
2339
+ dN_dxi[5] = 0.5 * (2*xi+1) * (1-eta*eta)
2340
+ dN_dxi[6] = -xi * eta * (eta+1)
2341
+ dN_dxi[7] = 0.5 * (2*xi-1) * (1-eta*eta)
2342
+ dN_dxi[8] = -2*xi * (1-eta*eta)
2343
+
2344
+ # Shape function derivatives w.r.t. eta
2345
+ dN_deta = np.zeros(9)
2346
+ dN_deta[0] = 0.25 * xi * (xi-1) * (2*eta-1)
2347
+ dN_deta[1] = 0.25 * xi * (xi+1) * (2*eta-1)
2348
+ dN_deta[2] = 0.25 * xi * (xi+1) * (2*eta+1)
2349
+ dN_deta[3] = 0.25 * xi * (xi-1) * (2*eta+1)
2350
+ dN_deta[4] = 0.5 * (1-xi*xi) * (2*eta-1)
2351
+ dN_deta[5] = -eta * xi * (xi+1)
2352
+ dN_deta[6] = 0.5 * (1-xi*xi) * (2*eta+1)
2353
+ dN_deta[7] = -eta * xi * (xi-1)
2354
+ dN_deta[8] = -2*eta * (1-xi*xi)
2355
+
2356
+ # Current physical coordinates using all 9 nodes
2357
+ node_coords = nodes[element[:9]]
2358
+ x_curr = np.sum(N * node_coords[:, 0])
2359
+ y_curr = np.sum(N * node_coords[:, 1])
2360
+
2361
+ # Residual
2362
+ fx = x_curr - x
2363
+ fy = y_curr - y
2364
+
2365
+ if abs(fx) < 1e-10 and abs(fy) < 1e-10:
2366
+ break
2367
+
2368
+ # Jacobian
2369
+ dx_dxi = np.sum(dN_dxi * node_coords[:, 0])
2370
+ dx_deta = np.sum(dN_deta * node_coords[:, 0])
2371
+ dy_dxi = np.sum(dN_dxi * node_coords[:, 1])
2372
+ dy_deta = np.sum(dN_deta * node_coords[:, 1])
2373
+
2374
+ det_J = dx_dxi * dy_deta - dx_deta * dy_dxi
2375
+ if abs(det_J) < 1e-12:
2376
+ break
2377
+
2378
+ # Newton-Raphson update
2379
+ dxi = (dy_deta * fx - dx_deta * fy) / det_J
2380
+ deta = (-dy_dxi * fx + dx_dxi * fy) / det_J
2381
+
2382
+ xi -= dxi
2383
+ eta -= deta
2384
+
2385
+ # Clamp to [-1,1]
2386
+ xi = max(-1, min(1, xi))
2387
+ eta = max(-1, min(1, eta))
2388
+
2389
+ # Final biquadratic shape functions
2390
+ N = np.zeros(9)
2391
+ N[0] = 0.25 * xi * (xi-1) * eta * (eta-1) # Node 0
2392
+ N[1] = 0.25 * xi * (xi+1) * eta * (eta-1) # Node 1
2393
+ N[2] = 0.25 * xi * (xi+1) * eta * (eta+1) # Node 2
2394
+ N[3] = 0.25 * xi * (xi-1) * eta * (eta+1) # Node 3
2395
+ N[4] = 0.5 * (1-xi*xi) * eta * (eta-1) # Node 4
2396
+ N[5] = 0.5 * xi * (xi+1) * (1-eta*eta) # Node 5
2397
+ N[6] = 0.5 * (1-xi*xi) * eta * (eta+1) # Node 6
2398
+ N[7] = 0.5 * xi * (xi-1) * (1-eta*eta) # Node 7
2399
+ N[8] = (1-xi*xi) * (1-eta*eta) # Node 8
2400
+
2401
+ # Interpolate using biquadratic shape functions
2402
+ interpolated_value = 0.0
2403
+ for i in range(9):
2404
+ interpolated_value += N[i] * values[element[i]]
2405
+
2406
+ else:
2407
+ return 0.0 # Unknown element type
2408
+
2409
+ # Return zero if interpolated value is negative (pore pressure cannot be negative)
2410
+ return max(0.0, interpolated_value)
2411
+
2412
+
2413
+ def test_1d_element_alignment(mesh, reinforcement_lines, tolerance=1e-6, debug=True):
2414
+ """
2415
+ Test that 1D elements correctly align with reinforcement lines.
2416
+
2417
+ This function verifies that:
2418
+ 1. Each reinforcement line is represented by a sequence of 1D elements
2419
+ 2. The 1D elements form continuous paths along each reinforcement line
2420
+ 3. The element endpoints match the expected line segment endpoints
2421
+
2422
+ Parameters:
2423
+ mesh: Dictionary containing nodes and 1D element data
2424
+ reinforcement_lines: List of reinforcement lines, each containing coordinate tuples
2425
+ tolerance: Tolerance for coordinate comparison (default 1e-6)
2426
+ debug: Enable detailed debug output
2427
+
2428
+ Returns:
2429
+ bool: True if all tests pass, False otherwise
2430
+ """
2431
+ if debug:
2432
+ print("\n=== Testing 1D Element Alignment ===")
2433
+
2434
+ if 'elements_1d' not in mesh:
2435
+ print("ERROR: No 1D elements found in mesh")
2436
+ return False
2437
+
2438
+ elements_1d = mesh['elements_1d']
2439
+ if elements_1d is None or len(elements_1d) == 0:
2440
+ print("ERROR: No 1D elements found in mesh")
2441
+ return False
2442
+
2443
+ nodes = np.array(mesh['nodes'])
2444
+ elements_1d = mesh['elements_1d']
2445
+
2446
+ if debug:
2447
+ print(f"Testing {len(reinforcement_lines)} reinforcement lines")
2448
+ print(f"Found {len(elements_1d)} 1D elements")
2449
+
2450
+ success = True
2451
+
2452
+ for line_idx, line_pts in enumerate(reinforcement_lines):
2453
+ if debug:
2454
+ print(f"\nTesting line {line_idx}: {line_pts}")
2455
+
2456
+ # Remove duplicate endpoints and get expected segments
2457
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
2458
+ if len(line_pts_clean) < 2:
2459
+ if debug:
2460
+ print(f" Skipping line {line_idx}: insufficient points")
2461
+ continue
2462
+
2463
+ # Expected segments for this line
2464
+ expected_segments = []
2465
+ for i in range(len(line_pts_clean) - 1):
2466
+ expected_segments.append((line_pts_clean[i], line_pts_clean[i + 1]))
2467
+
2468
+ if debug:
2469
+ print(f" Expected {len(expected_segments)} segments:")
2470
+ for i, (start, end) in enumerate(expected_segments):
2471
+ print(f" Segment {i}: {start} -> {end}")
2472
+
2473
+ # Find 1D elements that belong to this reinforcement line using material IDs
2474
+ line_elements = []
2475
+ if 'element_materials_1d' in mesh:
2476
+ element_materials_1d = mesh['element_materials_1d']
2477
+ for elem_idx, (element, material_id) in enumerate(zip(elements_1d, element_materials_1d)):
2478
+ # Skip zero-padded elements
2479
+ if len(element) < 2 or element[1] == 0:
2480
+ continue
2481
+
2482
+ # Check if this element belongs to the current line
2483
+ if material_id == line_idx + 1: # Material IDs are 1-based
2484
+ # Get element coordinates
2485
+ try:
2486
+ coord1 = nodes[element[0]]
2487
+ coord2 = nodes[element[1]]
2488
+ except IndexError:
2489
+ if debug:
2490
+ print(f" WARNING: Element {elem_idx} has invalid node indices {element[0]}, {element[1]}")
2491
+ continue
2492
+
2493
+ line_elements.append((elem_idx, coord1, coord2))
2494
+ else:
2495
+ # Fallback: use the old method if material IDs are not available
2496
+ for elem_idx, element in enumerate(elements_1d):
2497
+ # Skip zero-padded elements
2498
+ if len(element) < 2 or element[1] == 0:
2499
+ continue
2500
+
2501
+ # Get element coordinates
2502
+ try:
2503
+ coord1 = nodes[element[0]]
2504
+ coord2 = nodes[element[1]]
2505
+ except IndexError:
2506
+ if debug:
2507
+ print(f" WARNING: Element {elem_idx} has invalid node indices {element[0]}, {element[1]}")
2508
+ continue
2509
+
2510
+ # Check if this element lies on the current reinforcement line
2511
+ if is_edge_on_reinforcement_line(coord1, coord2, line_pts_clean, tolerance):
2512
+ line_elements.append((elem_idx, coord1, coord2))
2513
+
2514
+ if debug:
2515
+ print(f" Found {len(line_elements)} 1D elements on this line:")
2516
+ for elem_idx, coord1, coord2 in line_elements:
2517
+ print(f" Element {elem_idx}: {coord1} -> {coord2}")
2518
+
2519
+ # Test 1: Check that we have at least some 1D elements for this line
2520
+ if len(line_elements) == 0:
2521
+ print(f"ERROR: Line {line_idx} has no 1D elements")
2522
+ success = False
2523
+ continue
2524
+
2525
+ # Test 2: Check that we have reasonable number of elements
2526
+ # After intersection preprocessing, we may have more elements than original segments
2527
+ # But we should have at least some elements for each line
2528
+ if len(line_elements) == 0:
2529
+ print(f"ERROR: Line {line_idx} has no 1D elements")
2530
+ success = False
2531
+ continue
2532
+
2533
+ # Test 2: Check if elements form continuous path
2534
+ if len(line_elements) > 1:
2535
+ # Sort elements to form continuous sequence
2536
+ sorted_elements = []
2537
+ remaining_elements = line_elements.copy()
2538
+
2539
+ # Start with first element
2540
+ current_elem = remaining_elements.pop(0)
2541
+ sorted_elements.append(current_elem)
2542
+
2543
+ # Build chain by finding connecting elements
2544
+ while remaining_elements:
2545
+ last_coord = sorted_elements[-1][2] # End coordinate of last element
2546
+
2547
+ # Find next element that starts where last one ended
2548
+ found_next = False
2549
+ for i, (elem_idx, coord1, coord2) in enumerate(remaining_elements):
2550
+ if np.linalg.norm(np.array(coord1) - np.array(last_coord)) < tolerance:
2551
+ sorted_elements.append((elem_idx, coord1, coord2))
2552
+ remaining_elements.pop(i)
2553
+ found_next = True
2554
+ break
2555
+ elif np.linalg.norm(np.array(coord2) - np.array(last_coord)) < tolerance:
2556
+ # Element is reversed, flip it
2557
+ sorted_elements.append((elem_idx, coord2, coord1))
2558
+ remaining_elements.pop(i)
2559
+ found_next = True
2560
+ break
2561
+
2562
+ if not found_next:
2563
+ print(f"ERROR: Line {line_idx} elements do not form continuous path")
2564
+ print(f" Cannot connect from {last_coord}")
2565
+ print(f" Remaining elements: {remaining_elements}")
2566
+ success = False
2567
+ break
2568
+
2569
+ line_elements = sorted_elements
2570
+
2571
+ # Test 3: Check that the 1D elements cover the reinforcement line from start to end
2572
+ if len(line_elements) > 0:
2573
+ # Get the start and end points of the reinforcement line
2574
+ line_start = line_pts_clean[0]
2575
+ line_end = line_pts_clean[-1]
2576
+
2577
+ # Find the first and last 1D elements
2578
+ first_elem = line_elements[0]
2579
+ last_elem = line_elements[-1]
2580
+
2581
+ # Check if the first element starts near the line start
2582
+ first_start_dist = np.linalg.norm(np.array(first_elem[1]) - np.array(line_start))
2583
+ first_end_dist = np.linalg.norm(np.array(first_elem[2]) - np.array(line_start))
2584
+
2585
+ # Check if the last element ends near the line end
2586
+ last_start_dist = np.linalg.norm(np.array(last_elem[1]) - np.array(line_end))
2587
+ last_end_dist = np.linalg.norm(np.array(last_elem[2]) - np.array(line_end))
2588
+
2589
+ # The first element should start near the line start (either direction)
2590
+ # Be more flexible due to intersection preprocessing
2591
+ if first_start_dist > tolerance * 10 and first_end_dist > tolerance * 10:
2592
+ print(f"WARNING: Line {line_idx} first element does not start at line start")
2593
+ print(f" Line start: {line_start}")
2594
+ print(f" First element: {first_elem[1]} -> {first_elem[2]}")
2595
+ print(f" Start distances: {first_start_dist:.2e}, {first_end_dist:.2e}")
2596
+ # Don't fail the test for this - just warn
2597
+
2598
+ # The last element should end near the line end (either direction)
2599
+ # Be more flexible due to intersection preprocessing
2600
+ if last_start_dist > tolerance * 10 and last_end_dist > tolerance * 10:
2601
+ print(f"WARNING: Line {line_idx} last element does not end at line end")
2602
+ print(f" Line end: {line_end}")
2603
+ print(f" Last element: {last_elem[1]} -> {last_elem[2]}")
2604
+ print(f" End distances: {last_start_dist:.2e}, {last_end_dist:.2e}")
2605
+ # Don't fail the test for this - just warn
2606
+
2607
+ # Test 4: Check that line path is continuous
2608
+ if len(line_elements) > 1:
2609
+ for i in range(len(line_elements) - 1):
2610
+ end_coord = line_elements[i][2] # End of current element
2611
+ start_coord = line_elements[i + 1][1] # Start of next element
2612
+
2613
+ gap = np.linalg.norm(np.array(end_coord) - np.array(start_coord))
2614
+ if gap > tolerance:
2615
+ print(f"ERROR: Line {line_idx} has gap between elements {i} and {i+1}")
2616
+ print(f" Gap size: {gap:.2e}")
2617
+ print(f" Element {i} end: {end_coord}")
2618
+ print(f" Element {i+1} start: {start_coord}")
2619
+ success = False
2620
+
2621
+ if debug and success:
2622
+ print(f" ✓ Line {line_idx} passes all alignment tests")
2623
+
2624
+ if debug:
2625
+ if success:
2626
+ print("\n=== All 1D Element Alignment Tests PASSED ===")
2627
+ else:
2628
+ print("\n=== 1D Element Alignment Tests FAILED ===")
2629
+
2630
+ return success
2631
+
2632
+ def add_intersection_points_to_polygons(polygons, lines, debug=False):
2633
+ """
2634
+ Add intersection points between reinforcement lines and polygon edges to the polygon vertex lists.
2635
+ This ensures that polygons have vertices at all intersection points with reinforcement lines.
2636
+
2637
+ Parameters:
2638
+ polygons: List of polygons (lists of (x,y) tuples)
2639
+ lines: List of reinforcement lines (lists of (x,y) tuples)
2640
+ debug: Enable debug output
2641
+
2642
+ Returns:
2643
+ Updated list of polygons with intersection points added
2644
+ """
2645
+ if not lines:
2646
+ return polygons
2647
+
2648
+ if debug:
2649
+ print("Adding intersection points to polygons...")
2650
+
2651
+ # Make a copy of polygons to modify
2652
+ updated_polygons = []
2653
+ for poly in polygons:
2654
+ updated_polygons.append(list(poly)) # Convert to list for modification
2655
+
2656
+ # Find all intersections
2657
+ for line_idx, line_pts in enumerate(lines):
2658
+ line_pts_clean = remove_duplicate_endpoint(list(line_pts))
2659
+
2660
+ if debug:
2661
+ print(f"Processing line {line_idx}: {line_pts_clean}")
2662
+
2663
+ # Check each segment of the reinforcement line
2664
+ for i in range(len(line_pts_clean) - 1):
2665
+ line_seg_start = line_pts_clean[i]
2666
+ line_seg_end = line_pts_clean[i + 1]
2667
+
2668
+ # Check intersection with each polygon
2669
+ for poly_idx, poly in enumerate(updated_polygons):
2670
+ # Check each edge of this polygon
2671
+ for j in range(len(poly)):
2672
+ poly_edge_start = poly[j]
2673
+ poly_edge_end = poly[(j + 1) % len(poly)]
2674
+
2675
+ # Find intersection point if it exists
2676
+ intersection = line_segment_intersection(
2677
+ line_seg_start, line_seg_end,
2678
+ poly_edge_start, poly_edge_end
2679
+ )
2680
+
2681
+ if intersection:
2682
+ if debug:
2683
+ print(f"Found intersection {intersection} between line {line_idx} segment {i} and polygon {poly_idx} edge {j}")
2684
+
2685
+ # Check if intersection point is already a vertex of this polygon
2686
+ is_vertex = False
2687
+ for vertex in poly:
2688
+ if abs(vertex[0] - intersection[0]) < 1e-8 and abs(vertex[1] - intersection[1]) < 1e-8:
2689
+ is_vertex = True
2690
+ break
2691
+
2692
+ if not is_vertex:
2693
+ # Insert intersection point into polygon at the correct position
2694
+ # Insert after vertex j (which is the start of the edge)
2695
+ insert_idx = j + 1
2696
+ updated_polygons[poly_idx].insert(insert_idx, intersection)
2697
+
2698
+ if debug:
2699
+ print(f"Added intersection point {intersection} to polygon {poly_idx} at position {insert_idx}")
2700
+
2701
+ return updated_polygons
2702
+
2703
+ def extract_reinforcement_line_geometry(slope_data):
2704
+ """
2705
+ Extract reinforcement line geometry from slope_data in the format needed for mesh generation.
2706
+
2707
+ Parameters:
2708
+ slope_data: Dictionary containing slope data with 'reinforce_lines' key
2709
+
2710
+ Returns:
2711
+ List of reinforcement lines, where each line is a list of (x, y) coordinate tuples
2712
+ """
2713
+ lines = []
2714
+ if 'reinforce_lines' in slope_data and slope_data['reinforce_lines']:
2715
+ for line in slope_data['reinforce_lines']:
2716
+ # Convert from dict format to tuple format
2717
+ line_coords = [(point['X'], point['Y']) for point in line]
2718
+ lines.append(line_coords)
2719
+ return lines