xslope 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
xslope/fileio.py ADDED
@@ -0,0 +1,671 @@
1
+ # Copyright 2025 Norman L. Jones
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import os
16
+ import pickle
17
+
18
+ import numpy as np
19
+ import pandas as pd
20
+ from shapely.geometry import LineString, Point
21
+
22
+ from .mesh import import_mesh_from_json
23
+
24
+ def build_ground_surface(profile_lines):
25
+ """
26
+ Constructs the topmost ground surface LineString from a set of profile lines.
27
+
28
+ The function finds the highest elevation at each x-coordinate across all profile lines,
29
+ which represents the true ground surface.
30
+
31
+ Parameters:
32
+ profile_lines (list of list of tuple): A list of profile lines, each represented
33
+ as a list of (x, y) coordinate tuples.
34
+
35
+ Returns:
36
+ shapely.geometry.LineString: A LineString of the top surface, or an empty LineString
37
+ if fewer than two valid points are found.
38
+ """
39
+
40
+ if not profile_lines:
41
+ return LineString([])
42
+
43
+ # Step 1: Gather all points from all profile lines
44
+ all_points = []
45
+ for line in profile_lines:
46
+ all_points.extend(line)
47
+
48
+ # Step 2: Group points by x-coordinate and find the highest y for each x
49
+ x_groups = {}
50
+ for x, y in all_points:
51
+ if x not in x_groups:
52
+ x_groups[x] = y
53
+ else:
54
+ x_groups[x] = max(x_groups[x], y)
55
+
56
+ # Step 3: For each candidate point, check if any profile line is above it
57
+ ground_surface_points = []
58
+ for x, y in sorted(x_groups.items()):
59
+ # Create a vertical line at this x-coordinate
60
+ vertical_line = LineString([(x, y - 1000), (x, y + 1000)])
61
+
62
+ # Check intersections with all profile lines
63
+ is_topmost = True
64
+ for profile_line in profile_lines:
65
+ line = LineString(profile_line)
66
+ if line.length == 0:
67
+ continue
68
+
69
+ # Find intersection with this profile line
70
+ intersection = line.intersection(vertical_line)
71
+ if not intersection.is_empty:
72
+ # Get the y-coordinate of the intersection
73
+ if hasattr(intersection, 'y'):
74
+ # Single point intersection
75
+ if intersection.y > y + 1e-6: # Allow small numerical tolerance
76
+ is_topmost = False
77
+ break
78
+ elif hasattr(intersection, 'geoms'):
79
+ # Multiple points or line intersection
80
+ for geom in intersection.geoms:
81
+ if hasattr(geom, 'y') and geom.y > y + 1e-6:
82
+ is_topmost = False
83
+ break
84
+ if not is_topmost:
85
+ break
86
+
87
+ if is_topmost:
88
+ ground_surface_points.append((x, y))
89
+
90
+ # Ensure we have at least 2 points
91
+ if len(ground_surface_points) < 2:
92
+ return LineString([])
93
+
94
+ return LineString(ground_surface_points)
95
+
96
+
97
+
98
+ def load_slope_data(filepath):
99
+ """
100
+ This function reads input data from various Excel sheets and parses it into
101
+ structured components used throughout the slope stability analysis framework.
102
+ It handles circular and non-circular failure surface data, reinforcement, piezometric
103
+ lines, and distributed loads.
104
+
105
+ Validation is enforced to ensure required geometry and material information is present:
106
+ - Circular failure surface: must contain at least one valid row with Xo and Yo
107
+ - Non-circular failure surface: required if no circular data is provided
108
+ - Profile lines: must contain at least one valid set, and each line must have ≥ 2 points
109
+ - Materials: must match the number of profile lines
110
+ - Piezometric line: only included if it contains ≥ 2 valid rows
111
+ - Distributed loads and reinforcement: each block must contain ≥ 2 valid entries
112
+
113
+ Raises:
114
+ ValueError: if required inputs are missing or inconsistent.
115
+
116
+ Returns:
117
+ dict: Parsed and validated global data structure for analysis
118
+ """
119
+
120
+ xls = pd.ExcelFile(filepath)
121
+ globals_data = {}
122
+
123
+ # === STATIC GLOBALS ===
124
+ main_df = xls.parse('main', header=None)
125
+
126
+ try:
127
+ template_version = main_df.iloc[4, 3] # Excel row 5, column D
128
+ gamma_water = float(main_df.iloc[18, 3]) # Excel row 19, column D
129
+ tcrack_depth = float(main_df.iloc[19, 3]) # Excel row 20, column D
130
+ tcrack_water = float(main_df.iloc[20, 3]) # Excel row 21, column D
131
+ k_seismic = float(main_df.iloc[21, 3]) # Excel row 22, column D
132
+ except Exception as e:
133
+ raise ValueError(f"Error reading static global values from 'main' tab: {e}")
134
+
135
+
136
+ # === PROFILE LINES ===
137
+ profile_df = xls.parse('profile', header=None)
138
+
139
+ max_depth = float(profile_df.iloc[1, 1]) # Excel B2 = row 1, column 1
140
+
141
+ profile_lines = []
142
+
143
+ profile_data_blocks = [
144
+ {"header_row": 4, "data_start": 5, "data_end": 20},
145
+ {"header_row": 22, "data_start": 23, "data_end": 38}
146
+ ]
147
+ profile_block_width = 3
148
+
149
+ for block in profile_data_blocks:
150
+ for col in range(0, profile_df.shape[1], profile_block_width):
151
+ x_col, y_col = col, col + 1
152
+ try:
153
+ x_header = str(profile_df.iloc[block["header_row"], x_col]).strip().lower()
154
+ y_header = str(profile_df.iloc[block["header_row"], y_col]).strip().lower()
155
+ except:
156
+ continue
157
+ if x_header != 'x' or y_header != 'y':
158
+ continue
159
+ data = profile_df.iloc[block["data_start"]:block["data_end"], [x_col, y_col]]
160
+ data = data.dropna(how='all')
161
+ if data.empty:
162
+ continue
163
+ if data.iloc[0].isna().any():
164
+ continue
165
+ coords = data.dropna().apply(lambda r: (float(r.iloc[0]), float(r.iloc[1])), axis=1).tolist()
166
+ if len(coords) == 1:
167
+ raise ValueError("Each profile line must contain at least two points.")
168
+ if coords:
169
+ profile_lines.append(coords)
170
+
171
+ # === BUILD GROUND SURFACE FROM PROFILE LINES ===
172
+
173
+ ground_surface = build_ground_surface(profile_lines)
174
+
175
+ # === BUILD TENSILE CRACK LINE ===
176
+
177
+ tcrack_surface = None
178
+ if tcrack_depth > 0:
179
+ tcrack_surface = LineString([(x, y - tcrack_depth) for (x, y) in ground_surface.coords])
180
+
181
+ # === MATERIALS (Optimized Parsing) ===
182
+ mat_df = xls.parse('mat', header=2) # header=2 because the header row is row 3 in Excel
183
+ materials = []
184
+
185
+ # Only process rows 4-15 (Excel), which are 0-indexed 0-11 in pandas
186
+ for _, row in mat_df.iloc[0:12].iterrows():
187
+ # Check if the row is blank (columns 2-22, which are indices 1-21)
188
+ if row.iloc[1:22].isna().all():
189
+ continue
190
+ materials.append({
191
+ "name": row.get('name', ''),
192
+ "gamma": float(row.get('g', 0) or 0),
193
+ "option": str(row.get('option', '')).strip().lower(),
194
+ "c": float(row.get('c', 0) or 0),
195
+ "phi": float(row.get('f', 0) or 0),
196
+ "cp": float(row.get('cp', 0) or 0),
197
+ "r_elev": float(row.get('r-elev', 0) or 0),
198
+ "d": float(row.get('d', 0)) if pd.notna(row.get('d')) else 0,
199
+ "psi": float(row.get('ψ', 0)) if pd.notna(row.get('ψ')) else 0,
200
+ "u": str(row.get('u', 'none')).strip().lower(),
201
+ "sigma_gamma": float(row.get('s(g)', 0) or 0),
202
+ "sigma_c": float(row.get('s(c)', 0) or 0),
203
+ "sigma_phi": float(row.get('s(f)', 0) or 0),
204
+ "sigma_cp": float(row.get('s(cp)', 0) or 0),
205
+ "sigma_d": float(row.get('s(d)', 0) or 0),
206
+ "sigma_psi": float(row.get('s(ψ)', 0) or 0),
207
+ "k1": float(row.get('k1', 0) or 0),
208
+ "k2": float(row.get('k2', 0) or 0),
209
+ "alpha": float(row.get('alpha', 0) or 0),
210
+ "kr0" : float(row.get('kr0', 0) or 0),
211
+ "h0" : float(row.get('h0', 0) or 0),
212
+ "E": float(row.get('E', 0) or 0),
213
+ "nu": float(row.get('n', 0) or 0)
214
+ })
215
+
216
+ # === SEEPAGE ANALYSIS FILES ===
217
+ # Check if any materials use seepage analysis for pore pressure
218
+ has_seep_materials = any(material["u"] == "seep" for material in materials)
219
+
220
+ seep_mesh = None
221
+ seep_u = None
222
+ seep_u2 = None
223
+
224
+ if has_seep_materials:
225
+ # Read seepage file names directly from Excel cells L19, L20, L21
226
+ try:
227
+ # Read the 'mat' sheet directly without header parsing
228
+ mat_raw_df = xls.parse('mat', header=None)
229
+
230
+ # L19 = row 18, column 11 (0-indexed)
231
+ mesh_filename = str(mat_raw_df.iloc[18, 11]).strip() # L19
232
+ solution1_filename = str(mat_raw_df.iloc[19, 11]).strip() # L20
233
+ solution2_filename = str(mat_raw_df.iloc[20, 11]).strip() # L21
234
+
235
+ # Validate required files
236
+ if not mesh_filename or mesh_filename.lower() == 'nan':
237
+ raise ValueError("CRITICAL ERROR: Mesh filename is required when using 'seep' pore pressure option but is blank in cell L19.")
238
+ if not solution1_filename or solution1_filename.lower() == 'nan':
239
+ raise ValueError("CRITICAL ERROR: Solution1 filename is required when using 'seep' pore pressure option but is blank in cell L20.")
240
+
241
+ # Load mesh file
242
+ if not os.path.exists(mesh_filename):
243
+ raise ValueError(f"CRITICAL ERROR: Mesh file '{mesh_filename}' not found.")
244
+ seep_mesh = import_mesh_from_json(mesh_filename)
245
+
246
+ # Load solution1 file
247
+ if not os.path.exists(solution1_filename):
248
+ raise ValueError(f"CRITICAL ERROR: Solution1 file '{solution1_filename}' not found.")
249
+ solution1_df = pd.read_csv(solution1_filename)
250
+ # Skip the last row which contains the total flowrate comment
251
+ solution1_df = solution1_df.iloc[:-1]
252
+ seep_u = solution1_df["u"].to_numpy()
253
+
254
+ # Load solution2 file if provided
255
+ if solution2_filename and solution2_filename.lower() != 'nan':
256
+ if not os.path.exists(solution2_filename):
257
+ raise ValueError(f"CRITICAL ERROR: Solution2 file '{solution2_filename}' not found.")
258
+ solution2_df = pd.read_csv(solution2_filename)
259
+ # Skip the last row which contains the total flowrate comment
260
+ solution2_df = solution2_df.iloc[:-1]
261
+ seep_u2 = solution2_df["u"].to_numpy()
262
+
263
+ except Exception as e:
264
+ if "CRITICAL ERROR" in str(e):
265
+ raise e
266
+ else:
267
+ raise ValueError(f"Error reading seepage files: {e}")
268
+
269
+ # === PIEZOMETRIC LINE ===
270
+ piezo_df = xls.parse('piezo')
271
+ piezo_line = []
272
+ piezo_line2 = []
273
+
274
+ # Read all data once (rows 4-18)
275
+ piezo_data = piezo_df.iloc[2:18].dropna(how='all')
276
+
277
+ if len(piezo_data) >= 2:
278
+ # Extract first table (A4:B18) - columns 0 and 1
279
+ try:
280
+ piezo_data1 = piezo_data.dropna(subset=[piezo_data.columns[0], piezo_data.columns[1]], how='all')
281
+ if len(piezo_data1) < 2:
282
+ raise ValueError("First piezometric line must contain at least two points.")
283
+ piezo_line = piezo_data1.apply(lambda row: (float(row.iloc[0]), float(row.iloc[1])), axis=1).tolist()
284
+ except Exception:
285
+ raise ValueError("Invalid first piezometric line format.")
286
+
287
+ # Extract second table (D4:E18) - columns 3 and 4
288
+ try:
289
+ piezo_data2 = piezo_data.dropna(subset=[piezo_data.columns[3], piezo_data.columns[4]], how='all')
290
+ if len(piezo_data2) < 2:
291
+ raise ValueError("Second piezometric line must contain at least two points.")
292
+ piezo_line2 = piezo_data2.apply(lambda row: (float(row.iloc[3]), float(row.iloc[4])), axis=1).tolist()
293
+ except Exception:
294
+ # If second table reading fails, just leave piezo_line2 as empty list
295
+ piezo_line2 = []
296
+ elif len(piezo_data) == 1:
297
+ raise ValueError("Piezometric line must contain at least two points.")
298
+
299
+ # === DISTRIBUTED LOADS ===
300
+ dload_df = xls.parse('dloads', header=None)
301
+ dloads = []
302
+ dloads2 = []
303
+ dload_data_blocks = [
304
+ {"start_row": 3, "end_row": 13},
305
+ {"start_row": 16, "end_row": 26}
306
+ ]
307
+ dload_block_starts = [1, 5, 9, 13]
308
+
309
+ for block_idx, block in enumerate(dload_data_blocks):
310
+ for col in dload_block_starts:
311
+ section = dload_df.iloc[block["start_row"]:block["end_row"], col:col + 3]
312
+ section = section.dropna(how='all')
313
+ section = section.dropna(subset=[col, col + 1], how='any')
314
+ if len(section) >= 2:
315
+ try:
316
+ block_points = section.apply(
317
+ lambda row: {
318
+ "X": float(row.iloc[0]),
319
+ "Y": float(row.iloc[1]),
320
+ "Normal": float(row.iloc[2])
321
+ }, axis=1).tolist()
322
+ if block_idx == 0:
323
+ dloads.append(block_points)
324
+ else:
325
+ dloads2.append(block_points)
326
+ except:
327
+ raise ValueError("Invalid data format in distributed load block.")
328
+ elif len(section) == 1:
329
+ raise ValueError("Each distributed load block must contain at least two points.")
330
+
331
+ # === CIRCLES ===
332
+
333
+ # Read the first 3 rows to get the max depth
334
+ raw_df = xls.parse('circles', header=None) # No header, get full sheet
335
+
336
+ # Read the circles data starting from row 2 (index 1)
337
+ circles_df = xls.parse('circles', header=1)
338
+ raw = circles_df.dropna(subset=['Xo', 'Yo'], how='any')
339
+ circles = []
340
+ for _, row in raw.iterrows():
341
+ Xo = row['Xo']
342
+ Yo = row['Yo']
343
+ Option = row.get('Option', None)
344
+ Depth = row.get('Depth', None)
345
+ Xi = row.get('Xi', None)
346
+ Yi = row.get('Yi', None)
347
+ R = row.get('R', None)
348
+ # For each circle, fill in the radius and depth values depending on the circle option
349
+ if Option == 'Depth':
350
+ R = Yo - Depth
351
+ elif Option == 'Intercept':
352
+ R = ((Xi - Xo) ** 2 + (Yi - Yo) ** 2) ** 0.5
353
+ Depth = Yo - R
354
+ elif Option == 'Radius':
355
+ Depth = Yo - R
356
+ else:
357
+ raise ValueError(f"Unknown option '{Option}' for circles.")
358
+ circle = {
359
+ "Xo": Xo,
360
+ "Yo": Yo,
361
+ "Depth": Depth,
362
+ "R": R,
363
+ }
364
+ circles.append(circle)
365
+
366
+ # === NON-CIRCULAR SURFACES ===
367
+ noncirc_df = xls.parse('non-circ')
368
+ non_circ = list(noncirc_df.iloc[1:].dropna(subset=['Unnamed: 0']).apply(
369
+ lambda row: {
370
+ "X": float(row['Unnamed: 0']),
371
+ "Y": float(row['Unnamed: 1']),
372
+ "Movement": row['Unnamed: 2']
373
+ }, axis=1))
374
+
375
+ # === REINFORCEMENT LINES ===
376
+ reinforce_df = xls.parse('reinforce', header=1) # Header in row 2 (0-indexed row 1)
377
+ reinforce_lines = []
378
+
379
+ # Process rows 3-22 (Excel) which are 0-indexed rows 0-19 in pandas after header=1
380
+ for i, row in reinforce_df.iloc[0:20].iterrows():
381
+ # Check if the row has coordinate data (x1, y1, x2, y2)
382
+ if pd.isna(row.iloc[1]) or pd.isna(row.iloc[2]) or pd.isna(row.iloc[3]) or pd.isna(row.iloc[4]):
383
+ continue # Skip empty rows
384
+
385
+ # If coordinates are present, check for required parameters (Tmax, Lp1, Lp2)
386
+ if pd.isna(row.iloc[5]) or pd.isna(row.iloc[7]) or pd.isna(row.iloc[8]):
387
+ raise ValueError(f"Reinforcement line in row {i + 3} has coordinates but missing required parameters (Tmax, Lp1, Lp2). All three must be specified.")
388
+
389
+ try:
390
+ # Extract coordinates and parameters
391
+ x1, y1 = float(row.iloc[1]), float(row.iloc[2]) # Columns B, C
392
+ x2, y2 = float(row.iloc[3]), float(row.iloc[4]) # Columns D, E
393
+ Tmax = float(row.iloc[5]) # Column F
394
+ Tres = float(row.iloc[6]) # Column G
395
+ Lp1 = float(row.iloc[7]) if not pd.isna(row.iloc[7]) else 0.0 # Column H
396
+ Lp2 = float(row.iloc[8]) if not pd.isna(row.iloc[8]) else 0.0 # Column I
397
+ E = float(row.iloc[9]) # Column J
398
+ Area = float(row.iloc[10]) # Column K
399
+
400
+ # Calculate line length and direction
401
+ import math
402
+ line_length = math.sqrt((x2 - x1)**2 + (y2 - y1)**2)
403
+ if line_length == 0:
404
+ continue # Skip zero-length lines
405
+
406
+ # Unit vector from (x1,y1) to (x2,y2)
407
+ dx = (x2 - x1) / line_length
408
+ dy = (y2 - y1) / line_length
409
+
410
+ line_points = []
411
+
412
+ # Handle different cases based on pullout lengths
413
+ if Lp1 + Lp2 >= line_length:
414
+ # Line too short - create single interior point
415
+ if Lp1 == 0 and Lp2 == 0:
416
+ # Both ends anchored - uniform tension
417
+ line_points = [
418
+ {"X": x1, "Y": y1, "T": Tmax, "Tres": Tres, "E": E, "Area": Area},
419
+ {"X": x2, "Y": y2, "T": Tmax, "Tres": Tres, "E": E, "Area": Area}
420
+ ]
421
+ else:
422
+ # Find equilibrium point where tensions are equal
423
+ # T1 = Tmax * d1/Lp1, T2 = Tmax * d2/Lp2
424
+ # At equilibrium: d1/Lp1 = d2/Lp2 and d1 + d2 = line_length
425
+ if Lp1 == 0:
426
+ # End 1 anchored, all tension at end 1
427
+ line_points = [
428
+ {"X": x1, "Y": y1, "T": Tmax, "Tres": Tres, "E": E, "Area": Area},
429
+ {"X": x2, "Y": y2, "T": 0.0, "Tres": 0, "E": E, "Area": Area}
430
+ ]
431
+ elif Lp2 == 0:
432
+ # End 2 anchored, all tension at end 2
433
+ line_points = [
434
+ {"X": x1, "Y": y1, "T": 0.0, "Tres": 0, "E": E, "Area": Area},
435
+ {"X": x2, "Y": y2, "T": Tmax, "Tres": Tres, "E": E, "Area": Area}
436
+ ]
437
+ else:
438
+ # Both ends have pullout - find equilibrium point
439
+ ratio_sum = 1.0/Lp1 + 1.0/Lp2
440
+ d1 = line_length / (Lp2 * ratio_sum)
441
+ d2 = line_length / (Lp1 * ratio_sum)
442
+ T_eq = Tmax * d1 / Lp1 # = Tmax * d2 / Lp2
443
+
444
+ # Interior point location
445
+ x_int = x1 + d1 * dx
446
+ y_int = y1 + d1 * dy
447
+
448
+ line_points = [
449
+ {"X": x1, "Y": y1, "T": 0.0, "Tres": 0, "E": E, "Area": Area},
450
+ {"X": x_int, "Y": y_int, "T": T_eq, "Tres": Tres, "E": E, "Area": Area},
451
+ {"X": x2, "Y": y2, "T": 0.0, "Tres": 0, "E": E, "Area": Area}
452
+ ]
453
+ else:
454
+ # Normal case - line long enough for 4 points
455
+ points_to_add = []
456
+
457
+ # Point 1: Start point
458
+ points_to_add.append((x1, y1, 0.0, 0.0))
459
+
460
+ # Point 2: At distance Lp1 from start (if Lp1 > 0)
461
+ if Lp1 > 0:
462
+ x_p2 = x1 + Lp1 * dx
463
+ y_p2 = y1 + Lp1 * dy
464
+ points_to_add.append((x_p2, y_p2, Tmax, Tres))
465
+ else:
466
+ # Lp1 = 0, so start point gets Tmax tension
467
+ points_to_add[0] = (x1, y1, Tmax, Tres)
468
+
469
+ # Point 3: At distance Lp2 back from end (if Lp2 > 0)
470
+ if Lp2 > 0:
471
+ x_p3 = x2 - Lp2 * dx
472
+ y_p3 = y2 - Lp2 * dy
473
+ points_to_add.append((x_p3, y_p3, Tmax, Tres))
474
+ else:
475
+ # Lp2 = 0, so end point gets Tmax tension
476
+ pass # Will be handled when adding end point
477
+
478
+ # Point 4: End point
479
+ if Lp2 > 0:
480
+ points_to_add.append((x2, y2, 0.0, 0.0))
481
+ else:
482
+ points_to_add.append((x2, y2, Tmax, Tres))
483
+
484
+ # Remove duplicate points (same x,y coordinates)
485
+ unique_points = []
486
+ tolerance = 1e-6
487
+ for x, y, T, Tres in points_to_add:
488
+ is_duplicate = False
489
+ for ux, uy, uT, uTres in unique_points:
490
+ if abs(x - ux) < tolerance and abs(y - uy) < tolerance:
491
+ # Update tension to maximum value at this location
492
+ for i, (px, py, pT, pTres) in enumerate(unique_points):
493
+ if abs(x - px) < tolerance and abs(y - py) < tolerance:
494
+ unique_points[i] = (px, py, max(pT, T), max(pTres, Tres))
495
+ is_duplicate = True
496
+ break
497
+ if not is_duplicate:
498
+ unique_points.append((x, y, T, Tres))
499
+
500
+ # Convert to required format
501
+ line_points = [{"X": x, "Y": y, "T": T, "Tres": Tres, "E": E, "Area": Area} for x, y, T, Tres in unique_points]
502
+
503
+ if len(line_points) >= 2:
504
+ reinforce_lines.append(line_points)
505
+
506
+ except Exception as e:
507
+ raise ValueError(f"Error processing reinforcement line in row {row.name + 3}: {e}")
508
+
509
+
510
+ # === SEEPAGE ANALYSIS BOUNDARY CONDITIONS ===
511
+ seep_df = xls.parse('seep bc', header=None)
512
+ seepage_bc = {"specified_heads": [], "exit_face": []}
513
+
514
+ # Specified Head #1
515
+ head1 = seep_df.iloc[2, 2] if seep_df.shape[1] > 2 and seep_df.shape[0] > 2 else None
516
+ coords1 = []
517
+ for i in range(4, 12): # rows 5-12 (0-indexed 4-11)
518
+ if i >= seep_df.shape[0]:
519
+ break
520
+ x = seep_df.iloc[i, 1] if seep_df.shape[1] > 1 else None
521
+ y = seep_df.iloc[i, 2] if seep_df.shape[1] > 2 else None
522
+ if pd.notna(x) and pd.notna(y):
523
+ coords1.append((float(x), float(y)))
524
+ if head1 is not None and coords1:
525
+ seepage_bc["specified_heads"].append({"head": float(head1), "coords": coords1})
526
+
527
+ # Specified Head #2
528
+ head2 = seep_df.iloc[2, 5] if seep_df.shape[1] > 5 and seep_df.shape[0] > 2 else None
529
+ coords2 = []
530
+ for i in range(4, 12):
531
+ if i >= seep_df.shape[0]:
532
+ break
533
+ x = seep_df.iloc[i, 4] if seep_df.shape[1] > 4 else None
534
+ y = seep_df.iloc[i, 5] if seep_df.shape[1] > 5 else None
535
+ if pd.notna(x) and pd.notna(y):
536
+ coords2.append((float(x), float(y)))
537
+ if head2 is not None and coords2:
538
+ seepage_bc["specified_heads"].append({"head": float(head2), "coords": coords2})
539
+
540
+ # Specified Head #3
541
+ head3 = seep_df.iloc[2, 8] if seep_df.shape[1] > 8 and seep_df.shape[0] > 2 else None
542
+ coords3 = []
543
+ for i in range(4, 12):
544
+ if i >= seep_df.shape[0]:
545
+ break
546
+ x = seep_df.iloc[i, 7] if seep_df.shape[1] > 7 else None
547
+ y = seep_df.iloc[i, 8] if seep_df.shape[1] > 8 else None
548
+ if pd.notna(x) and pd.notna(y):
549
+ coords3.append((float(x), float(y)))
550
+ if head3 is not None and coords3:
551
+ seepage_bc["specified_heads"].append({"head": float(head3), "coords": coords3})
552
+
553
+ # Exit Face
554
+ exit_coords = []
555
+ for i in range(15, 23): # rows 16-23 (0-indexed 15-22)
556
+ if i >= seep_df.shape[0]:
557
+ break
558
+ x = seep_df.iloc[i, 1] if seep_df.shape[1] > 1 else None
559
+ y = seep_df.iloc[i, 2] if seep_df.shape[1] > 2 else None
560
+ if pd.notna(x) and pd.notna(y):
561
+ exit_coords.append((float(x), float(y)))
562
+ seepage_bc["exit_face"] = exit_coords
563
+
564
+ # === VALIDATION ===
565
+
566
+ circular = len(circles) > 0
567
+ if not circular and len(non_circ) == 0:
568
+ raise ValueError("Input must include either circular or non-circular surface data.")
569
+ if not profile_lines:
570
+ raise ValueError("Profile lines sheet is empty or invalid.")
571
+ if not materials:
572
+ raise ValueError("Materials sheet is empty.")
573
+ if len(materials) != len(profile_lines):
574
+ raise ValueError("Each profile line must have a corresponding material. You have " + str(len(materials)) + " materials and " + str(len(profile_lines)) + " profile lines.")
575
+
576
+
577
+ # Add everything to globals_data
578
+ globals_data["template_version"] = template_version
579
+ globals_data["gamma_water"] = gamma_water
580
+ globals_data["tcrack_depth"] = tcrack_depth
581
+ globals_data["tcrack_water"] = tcrack_water
582
+ globals_data["k_seismic"] = k_seismic
583
+ globals_data["max_depth"] = max_depth
584
+ globals_data["profile_lines"] = profile_lines
585
+ globals_data["ground_surface"] = ground_surface
586
+ globals_data["tcrack_surface"] = tcrack_surface
587
+ globals_data["materials"] = materials
588
+ globals_data["piezo_line"] = piezo_line
589
+ globals_data["piezo_line2"] = piezo_line2
590
+ globals_data["circular"] = circular # True if circles are present
591
+ globals_data["circles"] = circles
592
+ globals_data["non_circ"] = non_circ
593
+ globals_data["dloads"] = dloads
594
+ globals_data["dloads2"] = dloads2
595
+ globals_data["reinforce_lines"] = reinforce_lines
596
+ globals_data["seepage_bc"] = seepage_bc
597
+
598
+ # Add seepage data if available
599
+ if has_seep_materials:
600
+ globals_data["seep_mesh"] = seep_mesh
601
+ globals_data["seep_u"] = seep_u
602
+ if seep_u2 is not None:
603
+ globals_data["seep_u2"] = seep_u2
604
+
605
+ return globals_data
606
+
607
+ def save_data_to_pickle(data, filepath):
608
+ """
609
+ Save a data object to a pickle file.
610
+
611
+ This function serializes the data object and saves it to the specified filepath.
612
+ Useful for saving processed data from Excel templates for later use.
613
+
614
+ Parameters:
615
+ data: The data object to save (typically a dictionary from load_slope_data)
616
+ filepath (str): The file path where the pickle file should be saved
617
+
618
+ Returns:
619
+ None
620
+
621
+ Raises:
622
+ IOError: If the file cannot be written
623
+ PickleError: If the data cannot be serialized
624
+ """
625
+ try:
626
+ with open(filepath, 'wb') as f:
627
+ pickle.dump(data, f)
628
+ except Exception as e:
629
+ raise IOError(f"Failed to save data to pickle file '{filepath}': {e}")
630
+
631
+
632
+ def load_data_from_pickle(filepath):
633
+ """
634
+ Load a data object from a pickle file.
635
+
636
+ This function deserializes a data object from the specified pickle file.
637
+ Useful for loading previously saved data without re-processing Excel templates.
638
+
639
+ Parameters:
640
+ filepath (str): The file path of the pickle file to load
641
+
642
+ Returns:
643
+ The deserialized data object (typically a dictionary)
644
+
645
+ Raises:
646
+ FileNotFoundError: If the pickle file doesn't exist
647
+ IOError: If the file cannot be read
648
+ PickleError: If the data cannot be deserialized
649
+ """
650
+ try:
651
+ with open(filepath, 'rb') as f:
652
+ data = pickle.load(f)
653
+ return data
654
+ except FileNotFoundError:
655
+ raise FileNotFoundError(f"Pickle file not found: '{filepath}'")
656
+ except Exception as e:
657
+ raise IOError(f"Failed to load data from pickle file '{filepath}': {e}")
658
+
659
+
660
+ def print_dictionary(dictionary):
661
+ """
662
+ Print the contents of a dictionary to the console.
663
+ This can be used for slope_data, seep_data, or any other dictionary.
664
+ """
665
+ for key, value in dictionary.items():
666
+ print(f"\n=== {key} ===")
667
+ if isinstance(value, list):
668
+ for item in value:
669
+ print(item)
670
+ else:
671
+ print(value)