struct_post 0.1.5__tar.gz → 0.1.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: struct_post
3
- Version: 0.1.5
3
+ Version: 0.1.8
4
4
  Summary: A module designed to analyse common structural test results.
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -8,6 +8,13 @@ License-File: LICENSE
8
8
  Requires-Dist: matplotlib>=3.10.6
9
9
  Requires-Dist: openpyxl>=3.1.5
10
10
  Requires-Dist: pandas>=2.3.2
11
+ Requires-Dist: requests>=2.32.5
12
+ Provides-Extra: doc
13
+ Requires-Dist: sphinx>=8.0; extra == "doc"
14
+ Requires-Dist: myst-parser>=4.0.1; extra == "doc"
15
+ Requires-Dist: nbsphinx>=0.9.7; extra == "doc"
16
+ Requires-Dist: pydata-sphinx-theme>=0.16.1; extra == "doc"
17
+ Requires-Dist: ipykernel>=6.30.1; extra == "doc"
11
18
  Dynamic: license-file
12
19
 
13
20
  # struct_post
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "struct_post"
3
- version = "0.1.5"
3
+ version = "0.1.8"
4
4
  description = "A module designed to analyse common structural test results."
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
@@ -8,4 +8,14 @@ dependencies = [
8
8
  "matplotlib>=3.10.6",
9
9
  "openpyxl>=3.1.5",
10
10
  "pandas>=2.3.2",
11
+ "requests>=2.32.5",
12
+ ]
13
+
14
+ [project.optional-dependencies]
15
+ doc = [
16
+ "sphinx>=8.0",
17
+ "myst-parser>=4.0.1",
18
+ "nbsphinx>=0.9.7",
19
+ "pydata-sphinx-theme>=0.16.1",
20
+ "ipykernel>=6.30.1"
11
21
  ]
@@ -0,0 +1,163 @@
1
+ from pandas import DataFrame
2
+ import pandas as pd
3
+
4
+ def beam_lvmdata_file(file_name, force_index=2, displ_index=3,
5
+ mid_lvdt_ind=None, left_lvdt_ind=None, right_lvdt_ind=None):
6
+ """
7
+ Read a .lvm file and extract force, displacement, and LVDT data in groups.
8
+
9
+ Parameters
10
+ ----------
11
+ file_name : str
12
+ Path to the .lvm file to read.
13
+ force_index : int, default=2
14
+ 1-based column index for the force data.
15
+ displ_index : int, default=3
16
+ 1-based column index for the displacement (stroke) data.
17
+ mid_lvdt_ind : int or list of int, optional
18
+ Column index/indices for the middle LVDTs.
19
+ left_lvdt_ind : int or list of int, optional
20
+ Column index/indices for the left LVDTs.
21
+ right_lvdt_ind : int or list of int, optional
22
+ Column index/indices for the right LVDTs.
23
+
24
+ Returns
25
+ -------
26
+ df_new : pandas.DataFrame
27
+ Extracted data as a DataFrame with columns renamed:
28
+ ['Force (kN)', 'Stroke (mm)', 'Mid_1 (mm)', 'Mid_2 (mm)',
29
+ 'Left_1 (mm)', 'Left_2 (mm)', 'Right_1 (mm)', 'Right_2 (mm)'].
30
+ Only existing columns are included; missing columns are ignored.
31
+ file_base_name : str
32
+ Base name of the file without the '.lvm' extension.
33
+
34
+ Notes
35
+ -----
36
+ - Automatically detects the start of the data by locating the last
37
+ occurrence of '***End_of_Header***' in the file.
38
+ - Converts all columns to numeric values; non-numeric entries
39
+ are coerced to NaN.
40
+ - The last column is dropped assuming it is empty.
41
+ - Single integer indices are automatically converted to lists.
42
+ - LVDT columns are combined in the order: mid -> left -> right.
43
+ """
44
+
45
+ mid_lvdt_ind = mid_lvdt_ind or []
46
+ left_lvdt_ind = left_lvdt_ind or []
47
+ right_lvdt_ind = right_lvdt_ind or []
48
+
49
+ if isinstance(mid_lvdt_ind, int):
50
+ mid_lvdt_ind = [mid_lvdt_ind]
51
+ if isinstance(left_lvdt_ind, int):
52
+ left_lvdt_ind = [left_lvdt_ind]
53
+ if isinstance(right_lvdt_ind, int):
54
+ right_lvdt_ind = [right_lvdt_ind]
55
+
56
+ lvdt_indices = mid_lvdt_ind + left_lvdt_ind + right_lvdt_ind
57
+
58
+ with open(file_name, 'r') as f:
59
+ lines = f.readlines()
60
+ start_idx = max(i for i, line in enumerate(lines) if "***End_of_Header***" in line) + 1
61
+ df = pd.read_csv(file_name, sep="\t", skiprows=start_idx)
62
+ df = df.apply(pd.to_numeric, errors='coerce')
63
+ df = df.drop(df.columns[-1], axis=1)
64
+
65
+ user_indices = [force_index, displ_index] + lvdt_indices
66
+ existing_cols = [df.columns[i-1] for i in user_indices if i-1 < len(df.columns)]
67
+
68
+ df_new = df[existing_cols].copy()
69
+
70
+ col_names = ['Force (kN)','Stroke (mm)','Mid_1 (mm)','Mid_2 (mm)','Left_1 (mm)','Left_2 (mm)','Right_1 (mm)','Right_2 (mm)']
71
+ df_new.columns = col_names[:len(existing_cols)]
72
+
73
+ return df_new, file_name[:-4]
74
+
75
+ def beam_four_point_bending (data: DataFrame,
76
+ width: float,
77
+ depth: float,
78
+ beam_span: float):
79
+ """
80
+ Perform four-point bending analysis to calculate apparent and true modulus of elasticity.
81
+
82
+ Parameters
83
+ ----------
84
+ data : tuple
85
+ A tuple containing:
86
+ - data[0] : pandas.DataFrame
87
+ Experimental dataset with columns:
88
+ - 'Moog Force_kN' : Applied load (kN).
89
+ - 'LVDT 1_mm' ... 'LVDT 6_mm' : Deflections from six LVDTs (mm).
90
+ - data[1] : str
91
+ Sample name identifier.
92
+ width : float
93
+ Beam specimen width (mm).
94
+ depth : float
95
+ Beam specimen depth (mm).
96
+ beam_span : float
97
+ Beam span length (mm).
98
+
99
+ Returns
100
+ -------
101
+ tuple
102
+ (sample_name, results) where:
103
+ - sample_name : str
104
+ Name of the processed sample.
105
+ - results : dict
106
+ Dictionary containing:
107
+ - "E_app" : float
108
+ Apparent modulus of elasticity (MPa).
109
+ - "E_true" : float
110
+ True modulus of elasticity (MPa).
111
+
112
+ Notes
113
+ -----
114
+ - Apparent modulus (E_app) is calculated from mid-span deflection (LVDT 3 & 4).
115
+ - True modulus (E_true) is calculated from relative deflection (mid-span vs. supports).
116
+ - Load range for regression is limited to 10–40% of ultimate load.
117
+ """
118
+
119
+ import pandas as pd
120
+ import numpy as np
121
+
122
+ #Experimental test data post-process
123
+ sample_name = data[1]
124
+ force = data[0]['Force (kN)'] * 1000
125
+ delta_1 = abs(data[0]['Left_1 (mm)'])
126
+ delta_2 = abs(data[0]['Left_2 (mm)'])
127
+ delta_3 = abs(data[0]['Mid_1 (mm)'])
128
+ delta_4 = abs(data[0]['Mid_2 (mm)'])
129
+ delta_5 = abs(data[0]['Right_1 (mm)'])
130
+ delta_6 = abs(data[0]['Right_2 (mm)'])
131
+
132
+ F_ult = force.max()
133
+ f_b = (F_ult * beam_span) / (width * depth **2) #MPa
134
+
135
+
136
+ delta_ms = (delta_3 + delta_4)/2
137
+ delta_rel = delta_ms - (delta_1 + delta_2 + delta_5 + delta_6) / 4
138
+
139
+
140
+ lower_bound = 0.1 * F_ult
141
+ upper_bound = 0.4 * F_ult
142
+
143
+ calcs_reg = (lower_bound <= force) & (force <= upper_bound)
144
+
145
+ F_ms = force[calcs_reg]
146
+ delta_ms_calcs = delta_ms[calcs_reg]
147
+ delta_rel_calcs = delta_rel[calcs_reg]
148
+
149
+ Delta_ms, intercept_ms = np.polyfit(delta_ms_calcs,F_ms,1)
150
+ Delta_rel, intercept_rel = np.polyfit(delta_rel_calcs,F_ms,1)
151
+
152
+ E_app = (23/108) * (beam_span/depth)**3 * Delta_ms * (1/width)
153
+ E_true = (1/36) * (beam_span/depth)**3 * Delta_rel * (1/width)
154
+
155
+ results = {
156
+ "E_app": E_app,
157
+ "E_true": E_true,
158
+ }
159
+
160
+ print(f"Sample Name: {sample_name}")
161
+ print('-' * 40)
162
+ return sample_name, results
163
+
@@ -69,7 +69,7 @@ def coupon_test_analysis (sample_name: str,
69
69
 
70
70
  # Calculate stress and strain
71
71
  force = Force * 1000 # Convert kN to N
72
- stress = (force / area) # N/m^2 or Pa
72
+ stress = (force / area) # N/mm^2 or MPa
73
73
  uts = stress.max()
74
74
 
75
75
  #find the data before uts
@@ -77,7 +77,7 @@ def coupon_test_analysis (sample_name: str,
77
77
  strain_up = Strain[:idx_peak+1]
78
78
  stress_up = stress[:idx_peak+1]
79
79
 
80
- #Boundary for 20% - 50% of UTS
80
+ #Boundary for [low bound] - [up bound] of uts
81
81
  lower_bound = low_bound * uts
82
82
  upper_bound = up_bound * uts
83
83
 
@@ -91,7 +91,7 @@ def coupon_test_analysis (sample_name: str,
91
91
  E_GPa = E / 1000 # Convert MPa to GPa
92
92
  #print(f"Intercept: {intercept} MPa")
93
93
 
94
- # Select over 30% of UTS, as yield stress will over 30% uts
94
+ # Select over [lower bound] of UTS, as yield stress will over [lower bound] uts
95
95
  strain_new = strain
96
96
  stress_new = force / area
97
97
  mask = (lower_bound <= stress)
@@ -143,7 +143,6 @@ def coupon_test_analysis (sample_name: str,
143
143
  print(f"Young's Modulus (E): {E:.2f} MPa")
144
144
  print(f"Ultimate Tensile Strength (UTS): {uts:.2f} MPa")
145
145
  print(f"Yield Strength: {yield_strength:.2f} MPa")
146
- print(yield_strain)
147
146
  print('-' * 40)
148
147
 
149
148
  # Prepare results dictionary
@@ -192,25 +191,39 @@ def coupon_batch_analysis(Coupon_geodata: str,
192
191
  showfig: bool = True,
193
192
  savefig: bool = False):
194
193
  """
195
- Perform batch analysis on a list of samples and return the results.
194
+ Perform batch analysis on a list of tensile coupon samples and return their results.
196
195
 
197
196
  Parameters
198
197
  ----------
199
- Coupon_geodata : list
198
+ Coupon_geodata : list of SampleDetails
200
199
  A list of SampleDetails objects, each containing:
201
- - sample_file_name : Name of the sample file
202
- - thickness : Sample thickness
203
- - width : Sample width
200
+ - sample_file_name : str
201
+ Path to the sample CSV file.
202
+ - thickness : float
203
+ Sample thickness (mm).
204
+ - width : float
205
+ Sample width (mm).
206
+ force_index : int
207
+ Column index of the Force data in the CSV file (1-based).
208
+ strain_index : int
209
+ Column index of the Strain data in the CSV file (1-based).
210
+ showfig : bool, optional
211
+ Whether to display stress-strain plots during analysis. Default is True.
204
212
  savefig : bool, optional
205
- Whether to save the figures generated during analysis. Default is False.
213
+ Whether to save the stress-strain plots to files. Default is False.
206
214
 
207
215
  Returns
208
216
  -------
209
- list
217
+ list of SampleAnalysisResults
210
218
  A list of SampleAnalysisResults objects, each containing:
211
- - modulus_of_elasticity : Elastic modulus (E_GPa)
212
- - ultimate_tensile_strength : Ultimate tensile strength (UTS_MPa)
213
- - yield_Strength : Yield strength (Yield_Strength_MPa)
219
+ - sample_name : str
220
+ Name of the processed sample.
221
+ - modulus_of_elasticity : float
222
+ Elastic modulus (E_GPa).
223
+ - ultimate_tensile_strength : float
224
+ Ultimate tensile strength (UTS_MPa).
225
+ - yield_strength : float
226
+ Yield strength (Yield_Strength_MPa).
214
227
  """
215
228
  SARS = []
216
229
  for Coupon_detail in Coupon_geodata:
@@ -271,7 +284,7 @@ def coupon_results_save(Excelfile_name: str, analysis_results: list):
271
284
  print('The coupon test data analysis is complete.')
272
285
 
273
286
  from dataclasses import dataclass
274
- @dataclass # this thing is called a "decorator"
287
+ @dataclass
275
288
  class coupon_SampleDetails:
276
289
  """
277
290
  Holds basic information for a sample.
@@ -292,7 +305,7 @@ class coupon_SampleDetails:
292
305
  thickness: float
293
306
  sample_file_name: str
294
307
 
295
- @dataclass # this thing is called a "decorator"
308
+ @dataclass
296
309
  class coupon_SampleAnalysisResults:
297
310
  """
298
311
  Stores the analysis results for a sample after mechanical testing.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: struct_post
3
- Version: 0.1.5
3
+ Version: 0.1.8
4
4
  Summary: A module designed to analyse common structural test results.
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -8,6 +8,13 @@ License-File: LICENSE
8
8
  Requires-Dist: matplotlib>=3.10.6
9
9
  Requires-Dist: openpyxl>=3.1.5
10
10
  Requires-Dist: pandas>=2.3.2
11
+ Requires-Dist: requests>=2.32.5
12
+ Provides-Extra: doc
13
+ Requires-Dist: sphinx>=8.0; extra == "doc"
14
+ Requires-Dist: myst-parser>=4.0.1; extra == "doc"
15
+ Requires-Dist: nbsphinx>=0.9.7; extra == "doc"
16
+ Requires-Dist: pydata-sphinx-theme>=0.16.1; extra == "doc"
17
+ Requires-Dist: ipykernel>=6.30.1; extra == "doc"
11
18
  Dynamic: license-file
12
19
 
13
20
  # struct_post
@@ -0,0 +1,11 @@
1
+ matplotlib>=3.10.6
2
+ openpyxl>=3.1.5
3
+ pandas>=2.3.2
4
+ requests>=2.32.5
5
+
6
+ [doc]
7
+ sphinx>=8.0
8
+ myst-parser>=4.0.1
9
+ nbsphinx>=0.9.7
10
+ pydata-sphinx-theme>=0.16.1
11
+ ipykernel>=6.30.1
@@ -1,49 +0,0 @@
1
- from pandas import DataFrame
2
- def four_point_bending (data: DataFrame,
3
- width: float,
4
- depth: float,
5
- beam_span: float):
6
- import pandas as pd
7
- import numpy as np
8
-
9
- #geo
10
- sample_name = data[1]
11
- force = data[0]['Moog Force_kN'] * 1000
12
- delta_1 = abs(data[0]['LVDT 1_mm'])
13
- delta_2 = abs(data[0]['LVDT 2_mm'])
14
- delta_3 = abs(data[0]['LVDT 3_mm'])
15
- delta_4 = abs(data[0]['LVDT 4_mm'])
16
- delta_5 = abs(data[0]['LVDT 5_mm'])
17
- delta_6 = abs(data[0]['LVDT 6_mm'])
18
-
19
- F_ult = force.max()
20
- f_b = (F_ult * beam_span) / (width * depth **2) #MPa
21
-
22
-
23
- delta_ms = (delta_3 + delta_4)/2
24
- delta_rel = delta_ms - (delta_1 + delta_2 + delta_5 + delta_6) / 4
25
-
26
-
27
- lower_bound = 0.1 * F_ult
28
- upper_bound = 0.4 * F_ult
29
-
30
- calcs_reg = (lower_bound <= force) & (force <= upper_bound)
31
-
32
- F_ms = force[calcs_reg]
33
- delta_ms_calcs = delta_ms[calcs_reg]
34
- delat_rel_calcs = delta_rel[calcs_reg]
35
-
36
- Delta_ms, intercept_ms = np.polyfit(delta_ms_calcs,F_ms,1)
37
- Delta_rel, intercept_rel = np.polyfit(delat_rel_calcs,F_ms,1)
38
-
39
- E_app = (23/108) * (beam_span/depth)**3 * Delta_ms * (1/width)
40
- E_true = (1/36) * (beam_span/depth)**3 * Delta_rel * (1/width)
41
-
42
- results = {
43
- "E_app": E_app,
44
- "E_true": E_true,
45
- }
46
-
47
- print(f"Sample Name: {sample_name}")
48
- print('-' * 40)
49
- return sample_name, results
@@ -1,3 +0,0 @@
1
- matplotlib>=3.10.6
2
- openpyxl>=3.1.5
3
- pandas>=2.3.2
File without changes
File without changes
File without changes