xslope 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xslope/_version.py +1 -1
- xslope/fileio.py +130 -60
- xslope/mesh.py +109 -4
- xslope/plot.py +661 -68
- xslope/plot_fem.py +25 -73
- xslope/plot_seep.py +21 -69
- xslope/seep.py +11 -6
- {xslope-0.1.10.dist-info → xslope-0.1.12.dist-info}/METADATA +1 -1
- xslope-0.1.12.dist-info/RECORD +21 -0
- xslope-0.1.10.dist-info/RECORD +0 -21
- {xslope-0.1.10.dist-info → xslope-0.1.12.dist-info}/LICENSE +0 -0
- {xslope-0.1.10.dist-info → xslope-0.1.12.dist-info}/NOTICE +0 -0
- {xslope-0.1.10.dist-info → xslope-0.1.12.dist-info}/WHEEL +0 -0
- {xslope-0.1.10.dist-info → xslope-0.1.12.dist-info}/top_level.txt +0 -0
xslope/_version.py
CHANGED
xslope/fileio.py
CHANGED
|
@@ -142,7 +142,8 @@ def load_slope_data(filepath):
|
|
|
142
142
|
|
|
143
143
|
profile_data_blocks = [
|
|
144
144
|
{"header_row": 4, "data_start": 5, "data_end": 20},
|
|
145
|
-
{"header_row": 22, "data_start": 23, "data_end": 38}
|
|
145
|
+
{"header_row": 22, "data_start": 23, "data_end": 38},
|
|
146
|
+
{"header_row": 40, "data_start": 41, "data_end": 56},
|
|
146
147
|
]
|
|
147
148
|
profile_block_width = 3
|
|
148
149
|
|
|
@@ -182,35 +183,62 @@ def load_slope_data(filepath):
|
|
|
182
183
|
mat_df = xls.parse('mat', header=2) # header=2 because the header row is row 3 in Excel
|
|
183
184
|
materials = []
|
|
184
185
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
186
|
+
required_materials = len(profile_lines)
|
|
187
|
+
|
|
188
|
+
def _num(x):
|
|
189
|
+
v = pd.to_numeric(x, errors="coerce")
|
|
190
|
+
return float(v) if pd.notna(v) else 0.0
|
|
191
|
+
|
|
192
|
+
# Read exactly one material row per profile line.
|
|
193
|
+
# Materials are positional: Excel row 4 corresponds to profile line 1, row 5 to line 2, etc.
|
|
194
|
+
for i in range(required_materials):
|
|
195
|
+
# Excel row number: header is on row 3, first data row is row 4
|
|
196
|
+
excel_row = i + 4
|
|
197
|
+
|
|
198
|
+
if i >= len(mat_df):
|
|
199
|
+
raise ValueError(
|
|
200
|
+
"CRITICAL ERROR: Materials table ended early. "
|
|
201
|
+
f"Expected {required_materials} materials for {required_materials} profile lines, "
|
|
202
|
+
f"but ran out of rows at Excel row {excel_row}."
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
row = mat_df.iloc[i]
|
|
206
|
+
# For seepage workflows, 'g' (unit weight) and shear strength properties are not required.
|
|
207
|
+
# A material row is considered "missing" only if Excel columns C:X are empty.
|
|
208
|
+
# (Excel A:B are number and name; C:X contain the actual property fields.)
|
|
209
|
+
start_col = 2 # C
|
|
210
|
+
end_col = min(mat_df.shape[1], 24) # X is column 24 (1-based) -> index 23, so slice end is 24
|
|
211
|
+
c_to_x_empty = True if start_col >= end_col else row.iloc[start_col:end_col].isna().all()
|
|
212
|
+
if c_to_x_empty:
|
|
213
|
+
raise ValueError(
|
|
214
|
+
"CRITICAL ERROR: Missing material row for a profile line. "
|
|
215
|
+
f"Material {i+1} of {required_materials} is blank in columns C:X (Excel row {excel_row})."
|
|
216
|
+
)
|
|
217
|
+
|
|
190
218
|
materials.append({
|
|
191
219
|
"name": row.get('name', ''),
|
|
192
|
-
"gamma":
|
|
220
|
+
"gamma": _num(row.get("g", 0)),
|
|
193
221
|
"option": str(row.get('option', '')).strip().lower(),
|
|
194
|
-
"c":
|
|
195
|
-
"phi":
|
|
196
|
-
"cp":
|
|
197
|
-
"r_elev":
|
|
198
|
-
"d":
|
|
199
|
-
"psi":
|
|
222
|
+
"c": _num(row.get('c', 0)),
|
|
223
|
+
"phi": _num(row.get('f', 0)),
|
|
224
|
+
"cp": _num(row.get('cp', 0)),
|
|
225
|
+
"r_elev": _num(row.get('r-elev', 0)),
|
|
226
|
+
"d": _num(row.get('d', 0)) if pd.notna(row.get('d')) else 0,
|
|
227
|
+
"psi": _num(row.get('ψ', 0)) if pd.notna(row.get('ψ')) else 0,
|
|
200
228
|
"u": str(row.get('u', 'none')).strip().lower(),
|
|
201
|
-
"sigma_gamma":
|
|
202
|
-
"sigma_c":
|
|
203
|
-
"sigma_phi":
|
|
204
|
-
"sigma_cp":
|
|
205
|
-
"sigma_d":
|
|
206
|
-
"sigma_psi":
|
|
207
|
-
"k1":
|
|
208
|
-
"k2":
|
|
209
|
-
"alpha":
|
|
210
|
-
"kr0" :
|
|
211
|
-
"h0" :
|
|
212
|
-
"E":
|
|
213
|
-
"nu":
|
|
229
|
+
"sigma_gamma": _num(row.get('s(g)', 0)),
|
|
230
|
+
"sigma_c": _num(row.get('s(c)', 0)),
|
|
231
|
+
"sigma_phi": _num(row.get('s(f)', 0)),
|
|
232
|
+
"sigma_cp": _num(row.get('s(cp)', 0)),
|
|
233
|
+
"sigma_d": _num(row.get('s(d)', 0)),
|
|
234
|
+
"sigma_psi": _num(row.get('s(ψ)', 0)),
|
|
235
|
+
"k1": _num(row.get('k1', 0)),
|
|
236
|
+
"k2": _num(row.get('k2', 0)),
|
|
237
|
+
"alpha": _num(row.get('alpha', 0)),
|
|
238
|
+
"kr0" : _num(row.get('kr0', 0)),
|
|
239
|
+
"h0" : _num(row.get('h0', 0)),
|
|
240
|
+
"E": _num(row.get('E', 0)),
|
|
241
|
+
"nu": _num(row.get('n', 0))
|
|
214
242
|
})
|
|
215
243
|
|
|
216
244
|
# === SEEPAGE ANALYSIS FILES ===
|
|
@@ -222,21 +250,21 @@ def load_slope_data(filepath):
|
|
|
222
250
|
seep_u2 = None
|
|
223
251
|
|
|
224
252
|
if has_seep_materials:
|
|
225
|
-
# Read seepage file names directly from Excel cells
|
|
253
|
+
# Read seepage file names directly from Excel cells L22, L23, L24
|
|
226
254
|
try:
|
|
227
255
|
# Read the 'mat' sheet directly without header parsing
|
|
228
256
|
mat_raw_df = xls.parse('mat', header=None)
|
|
229
257
|
|
|
230
|
-
#
|
|
231
|
-
mesh_filename = str(mat_raw_df.iloc[
|
|
232
|
-
solution1_filename = str(mat_raw_df.iloc[
|
|
233
|
-
solution2_filename = str(mat_raw_df.iloc[
|
|
258
|
+
# L22 = row 21, column 11 (0-indexed)
|
|
259
|
+
mesh_filename = str(mat_raw_df.iloc[21, 11]).strip() # L22
|
|
260
|
+
solution1_filename = str(mat_raw_df.iloc[22, 11]).strip() # L23
|
|
261
|
+
solution2_filename = str(mat_raw_df.iloc[23, 11]).strip() # L24
|
|
234
262
|
|
|
235
263
|
# Validate required files
|
|
236
264
|
if not mesh_filename or mesh_filename.lower() == 'nan':
|
|
237
|
-
raise ValueError("CRITICAL ERROR: Mesh filename is required when using 'seep' pore pressure option but is blank in cell
|
|
265
|
+
raise ValueError("CRITICAL ERROR: Mesh filename is required when using 'seep' pore pressure option but is blank in cell L22.")
|
|
238
266
|
if not solution1_filename or solution1_filename.lower() == 'nan':
|
|
239
|
-
raise ValueError("CRITICAL ERROR: Solution1 filename is required when using 'seep' pore pressure option but is blank in cell
|
|
267
|
+
raise ValueError("CRITICAL ERROR: Solution1 filename is required when using 'seep' pore pressure option but is blank in cell L23.")
|
|
240
268
|
|
|
241
269
|
# Load mesh file
|
|
242
270
|
if not os.path.exists(mesh_filename):
|
|
@@ -510,43 +538,51 @@ def load_slope_data(filepath):
|
|
|
510
538
|
# === SEEPAGE ANALYSIS BOUNDARY CONDITIONS ===
|
|
511
539
|
seep_df = xls.parse('seep bc', header=None)
|
|
512
540
|
seepage_bc = {"specified_heads": [], "exit_face": []}
|
|
541
|
+
seepage_bc2 = {"specified_heads": [], "exit_face": []}
|
|
542
|
+
|
|
543
|
+
def _read_specified_head_block(
|
|
544
|
+
df,
|
|
545
|
+
head_row: int,
|
|
546
|
+
head_col: int,
|
|
547
|
+
x_col: int,
|
|
548
|
+
y_col: int,
|
|
549
|
+
data_start_row: int,
|
|
550
|
+
data_end_row: int,
|
|
551
|
+
):
|
|
552
|
+
"""Read a specified-head block; returns (head_value, coords_list)."""
|
|
553
|
+
head_val = (
|
|
554
|
+
df.iloc[head_row, head_col]
|
|
555
|
+
if df.shape[0] > head_row and df.shape[1] > head_col
|
|
556
|
+
else None
|
|
557
|
+
)
|
|
558
|
+
coords = []
|
|
559
|
+
for r in range(data_start_row, data_end_row):
|
|
560
|
+
if r >= df.shape[0]:
|
|
561
|
+
break
|
|
562
|
+
x = df.iloc[r, x_col] if df.shape[1] > x_col else None
|
|
563
|
+
y = df.iloc[r, y_col] if df.shape[1] > y_col else None
|
|
564
|
+
if pd.notna(x) and pd.notna(y):
|
|
565
|
+
coords.append((float(x), float(y)))
|
|
566
|
+
return head_val, coords
|
|
513
567
|
|
|
514
568
|
# Specified Head #1
|
|
515
|
-
head1
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
if i >= seep_df.shape[0]:
|
|
519
|
-
break
|
|
520
|
-
x = seep_df.iloc[i, 1] if seep_df.shape[1] > 1 else None
|
|
521
|
-
y = seep_df.iloc[i, 2] if seep_df.shape[1] > 2 else None
|
|
522
|
-
if pd.notna(x) and pd.notna(y):
|
|
523
|
-
coords1.append((float(x), float(y)))
|
|
569
|
+
head1, coords1 = _read_specified_head_block(
|
|
570
|
+
seep_df, head_row=2, head_col=2, x_col=1, y_col=2, data_start_row=4, data_end_row=12
|
|
571
|
+
)
|
|
524
572
|
if head1 is not None and coords1:
|
|
525
573
|
seepage_bc["specified_heads"].append({"head": float(head1), "coords": coords1})
|
|
526
574
|
|
|
527
575
|
# Specified Head #2
|
|
528
|
-
head2
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
if i >= seep_df.shape[0]:
|
|
532
|
-
break
|
|
533
|
-
x = seep_df.iloc[i, 4] if seep_df.shape[1] > 4 else None
|
|
534
|
-
y = seep_df.iloc[i, 5] if seep_df.shape[1] > 5 else None
|
|
535
|
-
if pd.notna(x) and pd.notna(y):
|
|
536
|
-
coords2.append((float(x), float(y)))
|
|
576
|
+
head2, coords2 = _read_specified_head_block(
|
|
577
|
+
seep_df, head_row=2, head_col=5, x_col=4, y_col=5, data_start_row=4, data_end_row=12
|
|
578
|
+
)
|
|
537
579
|
if head2 is not None and coords2:
|
|
538
580
|
seepage_bc["specified_heads"].append({"head": float(head2), "coords": coords2})
|
|
539
581
|
|
|
540
582
|
# Specified Head #3
|
|
541
|
-
head3
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
if i >= seep_df.shape[0]:
|
|
545
|
-
break
|
|
546
|
-
x = seep_df.iloc[i, 7] if seep_df.shape[1] > 7 else None
|
|
547
|
-
y = seep_df.iloc[i, 8] if seep_df.shape[1] > 8 else None
|
|
548
|
-
if pd.notna(x) and pd.notna(y):
|
|
549
|
-
coords3.append((float(x), float(y)))
|
|
583
|
+
head3, coords3 = _read_specified_head_block(
|
|
584
|
+
seep_df, head_row=2, head_col=8, x_col=7, y_col=8, data_start_row=4, data_end_row=12
|
|
585
|
+
)
|
|
550
586
|
if head3 is not None and coords3:
|
|
551
587
|
seepage_bc["specified_heads"].append({"head": float(head3), "coords": coords3})
|
|
552
588
|
|
|
@@ -561,6 +597,39 @@ def load_slope_data(filepath):
|
|
|
561
597
|
exit_coords.append((float(x), float(y)))
|
|
562
598
|
seepage_bc["exit_face"] = exit_coords
|
|
563
599
|
|
|
600
|
+
# --- RAPID DRAWDOWN BCs (second set) ---
|
|
601
|
+
# User-added second set starts at:
|
|
602
|
+
# - Specified Head #1: head in C26, coords in B28:C35
|
|
603
|
+
head1b, coords1b = _read_specified_head_block(
|
|
604
|
+
seep_df, head_row=25, head_col=2, x_col=1, y_col=2, data_start_row=27, data_end_row=35
|
|
605
|
+
)
|
|
606
|
+
if head1b is not None and coords1b:
|
|
607
|
+
seepage_bc2["specified_heads"].append({"head": float(head1b), "coords": coords1b})
|
|
608
|
+
|
|
609
|
+
# Mirror the same layout for the other two specified-head blocks (same columns as the first set)
|
|
610
|
+
head2b, coords2b = _read_specified_head_block(
|
|
611
|
+
seep_df, head_row=25, head_col=5, x_col=4, y_col=5, data_start_row=27, data_end_row=35
|
|
612
|
+
)
|
|
613
|
+
if head2b is not None and coords2b:
|
|
614
|
+
seepage_bc2["specified_heads"].append({"head": float(head2b), "coords": coords2b})
|
|
615
|
+
|
|
616
|
+
head3b, coords3b = _read_specified_head_block(
|
|
617
|
+
seep_df, head_row=25, head_col=8, x_col=7, y_col=8, data_start_row=27, data_end_row=35
|
|
618
|
+
)
|
|
619
|
+
if head3b is not None and coords3b:
|
|
620
|
+
seepage_bc2["specified_heads"].append({"head": float(head3b), "coords": coords3b})
|
|
621
|
+
|
|
622
|
+
# Exit Face #2: positioned lower on the sheet (same columns as the first exit face block)
|
|
623
|
+
exit_coords2 = []
|
|
624
|
+
for i in range(38, 46): # rows 39-46 (0-indexed 38-45)
|
|
625
|
+
if i >= seep_df.shape[0]:
|
|
626
|
+
break
|
|
627
|
+
x = seep_df.iloc[i, 1] if seep_df.shape[1] > 1 else None
|
|
628
|
+
y = seep_df.iloc[i, 2] if seep_df.shape[1] > 2 else None
|
|
629
|
+
if pd.notna(x) and pd.notna(y):
|
|
630
|
+
exit_coords2.append((float(x), float(y)))
|
|
631
|
+
seepage_bc2["exit_face"] = exit_coords2
|
|
632
|
+
|
|
564
633
|
# === VALIDATION ===
|
|
565
634
|
|
|
566
635
|
circular = len(circles) > 0
|
|
@@ -600,6 +669,7 @@ def load_slope_data(filepath):
|
|
|
600
669
|
globals_data["dloads2"] = dloads2
|
|
601
670
|
globals_data["reinforce_lines"] = reinforce_lines
|
|
602
671
|
globals_data["seepage_bc"] = seepage_bc
|
|
672
|
+
globals_data["seepage_bc2"] = seepage_bc2
|
|
603
673
|
|
|
604
674
|
# Add seepage data if available
|
|
605
675
|
if has_seep_materials:
|
xslope/mesh.py
CHANGED
|
@@ -1206,7 +1206,7 @@ def get_quad_mesh_presets():
|
|
|
1206
1206
|
|
|
1207
1207
|
|
|
1208
1208
|
|
|
1209
|
-
def build_polygons(slope_data, reinf_lines=None, debug=False):
|
|
1209
|
+
def build_polygons(slope_data, reinf_lines=None, tol = 0.000001, debug=False):
|
|
1210
1210
|
"""
|
|
1211
1211
|
Build material zone polygons from slope_data.
|
|
1212
1212
|
|
|
@@ -1237,7 +1237,6 @@ def build_polygons(slope_data, reinf_lines=None, debug=False):
|
|
|
1237
1237
|
|
|
1238
1238
|
n = len(profile_lines)
|
|
1239
1239
|
lines = [list(line) for line in copy.deepcopy(profile_lines)]
|
|
1240
|
-
tol = 1e-8
|
|
1241
1240
|
|
|
1242
1241
|
for i in range(n - 1):
|
|
1243
1242
|
top = lines[i]
|
|
@@ -1432,6 +1431,93 @@ def build_polygons(slope_data, reinf_lines=None, debug=False):
|
|
|
1432
1431
|
# Return the lowest y value
|
|
1433
1432
|
y_min = min(y_values)
|
|
1434
1433
|
return y_min, is_at_endpoint
|
|
1434
|
+
|
|
1435
|
+
def find_projected_y_at_x(line_points, x_query, y_ref, side, tol=1e-8):
|
|
1436
|
+
"""
|
|
1437
|
+
For vertical endpoint projections: choose the intersection y at x_query that is
|
|
1438
|
+
closest *below* the point we're projecting from.
|
|
1439
|
+
|
|
1440
|
+
This fixes the case where a candidate profile has a vertical segment at x_query
|
|
1441
|
+
(e.g., (260,229) then (260,202)). In that situation, using the "lowest y" (202)
|
|
1442
|
+
is wrong; we want the first hit when projecting downward (229).
|
|
1443
|
+
|
|
1444
|
+
Behavior is intentionally conservative:
|
|
1445
|
+
- If there is at least one intersection strictly below y_ref, return the highest of those.
|
|
1446
|
+
- Otherwise fall back to the original behavior (lowest y), preserving legacy behavior
|
|
1447
|
+
in edge cases (e.g., coincident/above intersections).
|
|
1448
|
+
"""
|
|
1449
|
+
# Reuse the exact same intersection enumeration logic as find_lowest_y_at_x,
|
|
1450
|
+
# but keep the full set of y-values.
|
|
1451
|
+
if not line_points:
|
|
1452
|
+
return None, False
|
|
1453
|
+
|
|
1454
|
+
xs = np.array([x for x, y in line_points])
|
|
1455
|
+
ys = np.array([y for x, y in line_points])
|
|
1456
|
+
|
|
1457
|
+
if xs[0] - tol > x_query or xs[-1] + tol < x_query:
|
|
1458
|
+
return None, False
|
|
1459
|
+
|
|
1460
|
+
is_at_left_endpoint = abs(x_query - xs[0]) < tol
|
|
1461
|
+
is_at_right_endpoint = abs(x_query - xs[-1]) < tol
|
|
1462
|
+
is_at_endpoint = is_at_left_endpoint or is_at_right_endpoint
|
|
1463
|
+
|
|
1464
|
+
y_values = []
|
|
1465
|
+
for k in range(len(line_points)):
|
|
1466
|
+
if abs(xs[k] - x_query) < tol:
|
|
1467
|
+
y_values.append(float(ys[k]))
|
|
1468
|
+
|
|
1469
|
+
for k in range(len(line_points) - 1):
|
|
1470
|
+
x1, y1 = line_points[k]
|
|
1471
|
+
x2, y2 = line_points[k + 1]
|
|
1472
|
+
|
|
1473
|
+
if abs(x1 - x_query) < tol and abs(x2 - x_query) < tol:
|
|
1474
|
+
y_values.append(float(y1))
|
|
1475
|
+
y_values.append(float(y2))
|
|
1476
|
+
elif min(x1, x2) - tol <= x_query <= max(x1, x2) + tol:
|
|
1477
|
+
if abs(x2 - x1) < tol:
|
|
1478
|
+
y_values.append(float(y1))
|
|
1479
|
+
y_values.append(float(y2))
|
|
1480
|
+
else:
|
|
1481
|
+
t = (x_query - x1) / (x2 - x1)
|
|
1482
|
+
if 0 <= t <= 1:
|
|
1483
|
+
y_values.append(float(y1 + t * (y2 - y1)))
|
|
1484
|
+
|
|
1485
|
+
if not y_values:
|
|
1486
|
+
return None, False
|
|
1487
|
+
|
|
1488
|
+
# If the polyline has multiple *vertices* exactly at this x (vertical segment / duplicate-x),
|
|
1489
|
+
# use a deterministic selection based on which side we are projecting from:
|
|
1490
|
+
# - projecting from LEFT endpoint of the upper line: keep the LAST y encountered
|
|
1491
|
+
# - projecting from RIGHT endpoint of the upper line: keep the FIRST y encountered
|
|
1492
|
+
#
|
|
1493
|
+
# This matches the intended "walk along the lower boundary" behavior and fixes cases like:
|
|
1494
|
+
# - right projection at x=260 with vertices (260,229) then (260,202): choose 229 (first)
|
|
1495
|
+
# - left projection at x=240 with vertices (240,140) then (240,190): choose 190 (last)
|
|
1496
|
+
vertex_y_at_x = [float(y) for (x, y) in line_points if abs(x - x_query) < tol]
|
|
1497
|
+
if len(vertex_y_at_x) >= 2:
|
|
1498
|
+
if side == "right":
|
|
1499
|
+
# first encountered vertex at this x
|
|
1500
|
+
y_pick = vertex_y_at_x[0]
|
|
1501
|
+
# If we are exactly on a vertex at y_ref, that is the first hit.
|
|
1502
|
+
if abs(y_pick - y_ref) < tol:
|
|
1503
|
+
return float(y_ref), is_at_endpoint
|
|
1504
|
+
if y_pick < (y_ref - tol):
|
|
1505
|
+
return y_pick, is_at_endpoint
|
|
1506
|
+
elif side == "left":
|
|
1507
|
+
# last encountered vertex at this x
|
|
1508
|
+
y_pick = vertex_y_at_x[-1]
|
|
1509
|
+
# If we are exactly on a vertex at y_ref, that is the first hit.
|
|
1510
|
+
if abs(y_pick - y_ref) < tol:
|
|
1511
|
+
return float(y_ref), is_at_endpoint
|
|
1512
|
+
if y_pick < (y_ref - tol):
|
|
1513
|
+
return y_pick, is_at_endpoint
|
|
1514
|
+
|
|
1515
|
+
y_below = [y for y in y_values if y < (y_ref - tol)]
|
|
1516
|
+
if y_below:
|
|
1517
|
+
return max(y_below), is_at_endpoint
|
|
1518
|
+
|
|
1519
|
+
# Fall back to legacy behavior
|
|
1520
|
+
return min(y_values), is_at_endpoint
|
|
1435
1521
|
|
|
1436
1522
|
# Project endpoints - find highest lower profile or use max_depth
|
|
1437
1523
|
# When projecting right side: if intersection is at left end of lower line,
|
|
@@ -1445,7 +1531,7 @@ def build_polygons(slope_data, reinf_lines=None, debug=False):
|
|
|
1445
1531
|
|
|
1446
1532
|
# Check left endpoint projection
|
|
1447
1533
|
if xs_cand[0] - tol <= left_x <= xs_cand[-1] + tol:
|
|
1448
|
-
y_cand, is_at_endpoint =
|
|
1534
|
+
y_cand, is_at_endpoint = find_projected_y_at_x(lower_candidate, left_x, left_y, side="left", tol=tol)
|
|
1449
1535
|
if y_cand is not None:
|
|
1450
1536
|
# If intersection is at the right end of the lower line, add point but continue
|
|
1451
1537
|
if is_at_endpoint and abs(left_x - xs_cand[-1]) < tol: # At right endpoint
|
|
@@ -1458,7 +1544,7 @@ def build_polygons(slope_data, reinf_lines=None, debug=False):
|
|
|
1458
1544
|
|
|
1459
1545
|
# Check right endpoint projection
|
|
1460
1546
|
if xs_cand[0] - tol <= right_x <= xs_cand[-1] + tol:
|
|
1461
|
-
y_cand, is_at_endpoint =
|
|
1547
|
+
y_cand, is_at_endpoint = find_projected_y_at_x(lower_candidate, right_x, right_y, side="right", tol=tol)
|
|
1462
1548
|
if y_cand is not None:
|
|
1463
1549
|
# If intersection is at the left end of the lower line, add point but continue
|
|
1464
1550
|
if is_at_endpoint and abs(right_x - xs_cand[0]) < tol: # At left endpoint
|
|
@@ -1474,6 +1560,25 @@ def build_polygons(slope_data, reinf_lines=None, debug=False):
|
|
|
1474
1560
|
left_y_bot = max_depth if max_depth is not None else -np.inf
|
|
1475
1561
|
if right_y_bot == -np.inf:
|
|
1476
1562
|
right_y_bot = max_depth if max_depth is not None else -np.inf
|
|
1563
|
+
|
|
1564
|
+
# Filter vertical-edge "continue projecting" points so we only keep points that
|
|
1565
|
+
# actually lie on the final vertical edge between the top and bottom of this zone.
|
|
1566
|
+
#
|
|
1567
|
+
# Without this, a deeper left-endpoint intersection (e.g., (240,190) at the left
|
|
1568
|
+
# endpoint of some deeper line) can be appended to right_vertical_points even after
|
|
1569
|
+
# we've already found the correct bottom (e.g., right_y_bot=229). That creates the
|
|
1570
|
+
# dangling vertical segment you observed.
|
|
1571
|
+
if right_y_bot != -np.inf:
|
|
1572
|
+
right_vertical_points = [
|
|
1573
|
+
(x, y) for (x, y) in right_vertical_points
|
|
1574
|
+
if (y < right_y - tol) and (y > right_y_bot + tol)
|
|
1575
|
+
]
|
|
1576
|
+
if left_y_bot != -np.inf:
|
|
1577
|
+
# Left edge runs from bottom up to top; keep points strictly between bottom and top.
|
|
1578
|
+
left_vertical_points = [
|
|
1579
|
+
(x, y) for (x, y) in left_vertical_points
|
|
1580
|
+
if (y > left_y_bot + tol) and (y < left_y - tol)
|
|
1581
|
+
]
|
|
1477
1582
|
|
|
1478
1583
|
# Deduplicate vertical points (remove points that are too close to each other)
|
|
1479
1584
|
def deduplicate_points(points, tol=1e-8):
|