DiadFit 0.0.84__py3-none-any.whl → 0.0.88__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. DiadFit/CO2_EOS.py +2 -2
  2. DiadFit/CO2_H2O_EOS.py +173 -90
  3. DiadFit/CO2_in_bubble_error.py +217 -115
  4. DiadFit/Highrho_polyfit_dataUCB_1117_1400.pkl +0 -0
  5. DiadFit/Highrho_polyfit_dataUCB_1117_1447.pkl +0 -0
  6. DiadFit/Highrho_polyfit_dataUCB_1220_1400.pkl +0 -0
  7. DiadFit/Highrho_polyfit_dataUCB_1220_1447.pkl +0 -0
  8. DiadFit/Highrho_polyfit_dataUCB_1220_1567.pkl +0 -0
  9. DiadFit/Highrho_polyfit_data_CMASS_24C.pkl +0 -0
  10. DiadFit/Lowrho_polyfit_dataUCB_1117_1400.pkl +0 -0
  11. DiadFit/Lowrho_polyfit_dataUCB_1117_1447.pkl +0 -0
  12. DiadFit/Lowrho_polyfit_dataUCB_1220_1400.pkl +0 -0
  13. DiadFit/Lowrho_polyfit_dataUCB_1220_1447.pkl +0 -0
  14. DiadFit/Lowrho_polyfit_dataUCB_1220_1567.pkl +0 -0
  15. DiadFit/Lowrho_polyfit_data_CMASS_24C.pkl +0 -0
  16. DiadFit/Mediumrho_polyfit_dataUCB_1117_1400.pkl +0 -0
  17. DiadFit/Mediumrho_polyfit_dataUCB_1117_1447.pkl +0 -0
  18. DiadFit/Mediumrho_polyfit_dataUCB_1220_1400.pkl +0 -0
  19. DiadFit/Mediumrho_polyfit_dataUCB_1220_1447.pkl +0 -0
  20. DiadFit/Mediumrho_polyfit_dataUCB_1220_1567.pkl +0 -0
  21. DiadFit/_version.py +1 -1
  22. DiadFit/densimeter_fitting.py +7 -1
  23. DiadFit/densimeters.py +182 -40
  24. DiadFit/density_depth_crustal_profiles.py +37 -5
  25. DiadFit/diads.py +85 -48
  26. DiadFit/error_propagation.py +141 -229
  27. DiadFit/importing_data_files.py +81 -15
  28. DiadFit/lookup_table.csv +64001 -0
  29. DiadFit/lookup_table_noneg.csv +63707 -0
  30. DiadFit/ne_lines.py +58 -29
  31. {DiadFit-0.0.84.dist-info → DiadFit-0.0.88.dist-info}/METADATA +1 -1
  32. DiadFit-0.0.88.dist-info/RECORD +50 -0
  33. {DiadFit-0.0.84.dist-info → DiadFit-0.0.88.dist-info}/WHEEL +1 -1
  34. DiadFit-0.0.84.dist-info/RECORD +0 -40
  35. {DiadFit-0.0.84.dist-info → DiadFit-0.0.88.dist-info}/top_level.txt +0 -0
@@ -14,6 +14,70 @@ import datetime
14
14
  import calendar
15
15
 
16
16
  encode="ISO-8859-1"
17
+ ## GEt video mag
18
+
19
+ # Function to check if "Video Image" is in the first line, considering variations
20
+ def line_contains_video_image(line):
21
+ """ This function returns video image information """
22
+ return "video image" in line.lower()
23
+
24
+
25
+ def get_video_mag(metadata_path):
26
+ """ This function finds all the video files in a single folder, and returns a dataframe of the filename and the magnification used.
27
+ """
28
+ folder_path=metadata_path
29
+ data=[]
30
+
31
+
32
+ # Code below this
33
+
34
+
35
+
36
+ # Ensure the directory exists and contains files
37
+ if os.path.exists(folder_path) and os.path.isdir(folder_path):
38
+ # Go through each file in the folder
39
+ for filename in os.listdir(folder_path):
40
+ if filename.endswith('.txt'): # Confirming it's a text file
41
+ file_path = os.path.join(folder_path, filename)
42
+ with open(file_path, 'r', encoding="ISO-8859-1") as file:
43
+ first_line = file.readline()
44
+ # Initialize placeholders for magnification, width, and height
45
+ magnification = None
46
+ image_width = None
47
+ image_height = None
48
+
49
+ if "video image" in first_line.lower(): # Checks if "Video Image" is in the line
50
+ for line in file:
51
+ if "Objective Magnification:" in line:
52
+ magnification = line.split(":")[-1].strip()
53
+ elif "Image Width [µm]:" in line:
54
+ image_width = line.split(":")[-1].strip()
55
+ elif "Image Height [µm]:" in line:
56
+ image_height = line.split(":")[-1].strip()
57
+
58
+ # Add to data if magnification is found (assuming it's mandatory)
59
+ if magnification:
60
+ data.append({
61
+ "Filename": filename,
62
+ "Mag": magnification,
63
+ "Width (µm)": image_width,
64
+ "Height (µm)": image_height
65
+ })
66
+ else:
67
+ print(f"The specified path {folder_path} does not exist or is not a directory.")
68
+
69
+ # Create a DataFrame from the data
70
+ df = pd.DataFrame(data)
71
+
72
+ # Display the DataFrame or a message if empty
73
+ if not df.empty:
74
+ return df
75
+ else:
76
+ print("No data found. Please check the folder path and the content of the files.")
77
+
78
+
79
+
80
+
17
81
 
18
82
  ## Functions for getting file names
19
83
 
@@ -37,7 +101,7 @@ def check_for_duplicates(spectra_path, prefix=True, prefix_str=' ', exception=Tr
37
101
 
38
102
  All_files_spectra= [f for f in listdir(spectra_path) if isfile(join(spectra_path, f))]
39
103
 
40
- file_m=np.empty(len(All_files_spectra), dtype=object)
104
+ file_m=np.zeros(len(All_files_spectra), dtype=object)
41
105
  for i in range(0, len(All_files_spectra)):
42
106
  name=All_files_spectra[i]
43
107
  # If no prefix or suffix to remove, simple
@@ -129,6 +193,8 @@ def get_all_txt_files(path):
129
193
  if '.txt' in file and 'pandas' not in file:
130
194
  All_files.append(format(file))
131
195
  return All_files
196
+
197
+ # Function to get magnification of
132
198
 
133
199
 
134
200
  ## Functions to just simply get data to plot up
@@ -925,14 +991,14 @@ def stitch_metadata_in_loop_witec(*, Allfiles, path, prefix=True, trupower=False
925
991
  date_str=[]
926
992
  month_str=[]
927
993
  # Numerical values
928
- Int_time=np.empty(len(Allfiles), dtype=float)
929
- objec=np.empty(len(Allfiles), dtype=float)
930
- time=np.empty(len(Allfiles), dtype=float)
994
+ Int_time=np.zeros(len(Allfiles), dtype=float)
995
+ objec=np.zeros(len(Allfiles), dtype=float)
996
+ time=np.zeros(len(Allfiles), dtype=float)
931
997
 
932
- Day=np.empty(len(Allfiles), dtype=float)
933
- power=np.empty(len(Allfiles), dtype=float)
934
- accumulations=np.empty(len(Allfiles), dtype=float)
935
- spectral_cent=np.empty(len(Allfiles), dtype=float)
998
+ Day=np.zeros(len(Allfiles), dtype=float)
999
+ power=np.zeros(len(Allfiles), dtype=float)
1000
+ accumulations=np.zeros(len(Allfiles), dtype=float)
1001
+ spectral_cent=np.zeros(len(Allfiles), dtype=float)
936
1002
 
937
1003
  for i in tqdm(range(0, len(Allfiles))):
938
1004
  filename1=Allfiles[i] #.rsplit('.',1)[0]
@@ -1081,7 +1147,7 @@ def extracting_filenames_generic(*, names, prefix=False,
1081
1147
 
1082
1148
  file_m=list(names)
1083
1149
 
1084
- file_m=np.empty(len(names), dtype=object)
1150
+ file_m=np.zeros(len(names), dtype=object)
1085
1151
  for i in range(0, len(names)):
1086
1152
  name=names.iloc[i]
1087
1153
  # If no prefix or suffix to remove, simple
@@ -1126,12 +1192,12 @@ def extract_temp_Aranet(df):
1126
1192
  """ Extracts temperature data from the aranet
1127
1193
  """
1128
1194
  TD=str(Temp['Time(dd/mm/yyyy)'])
1129
- hour=np.empty(len(Temp), dtype=object)
1130
- date=np.empty(len(Temp), dtype=object)
1131
- time=np.empty(len(Temp), dtype=object)
1132
- minutes=np.empty(len(Temp), dtype=object)
1133
- seconds=np.empty(len(Temp), dtype=object)
1134
- secs_sm=np.empty(len(Temp), dtype=object)
1195
+ hour=np.zeros(len(Temp), dtype=object)
1196
+ date=np.zeros(len(Temp), dtype=object)
1197
+ time=np.zeros(len(Temp), dtype=object)
1198
+ minutes=np.zeros(len(Temp), dtype=object)
1199
+ seconds=np.zeros(len(Temp), dtype=object)
1200
+ secs_sm=np.zeros(len(Temp), dtype=object)
1135
1201
  for i in range(0, len(Temp)):
1136
1202
  TD=str(Temp['Time(dd/mm/yyyy)'].iloc[i])
1137
1203
  date[i]=TD.split(' ')[0]