AeroViz 0.1.14__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of AeroViz might be problematic. Click here for more details.

Files changed (81) hide show
  1. AeroViz/__pycache__/__init__.cpython-312.pyc +0 -0
  2. AeroViz/dataProcess/Chemistry/__pycache__/__init__.cpython-312.pyc +0 -0
  3. AeroViz/dataProcess/Optical/__pycache__/PyMieScatt_update.cpython-312.pyc +0 -0
  4. AeroViz/dataProcess/Optical/__pycache__/__init__.cpython-312.pyc +0 -0
  5. AeroViz/dataProcess/Optical/__pycache__/mie_theory.cpython-312.pyc +0 -0
  6. AeroViz/dataProcess/Optical/_absorption.py +2 -0
  7. AeroViz/dataProcess/SizeDistr/__pycache__/__init__.cpython-312.pyc +0 -0
  8. AeroViz/dataProcess/VOC/__pycache__/__init__.cpython-312.pyc +0 -0
  9. AeroViz/dataProcess/__pycache__/__init__.cpython-312.pyc +0 -0
  10. AeroViz/dataProcess/core/__pycache__/__init__.cpython-312.pyc +0 -0
  11. AeroViz/plot/__pycache__/__init__.cpython-312.pyc +0 -0
  12. AeroViz/plot/__pycache__/bar.cpython-312.pyc +0 -0
  13. AeroViz/plot/__pycache__/box.cpython-312.pyc +0 -0
  14. AeroViz/plot/__pycache__/pie.cpython-312.pyc +0 -0
  15. AeroViz/plot/__pycache__/radar.cpython-312.pyc +0 -0
  16. AeroViz/plot/__pycache__/regression.cpython-312.pyc +0 -0
  17. AeroViz/plot/__pycache__/scatter.cpython-312.pyc +0 -0
  18. AeroViz/plot/__pycache__/violin.cpython-312.pyc +0 -0
  19. AeroViz/plot/distribution/__pycache__/__init__.cpython-312.pyc +0 -0
  20. AeroViz/plot/distribution/__pycache__/distribution.cpython-312.pyc +0 -0
  21. AeroViz/plot/meteorology/__pycache__/CBPF.cpython-312.pyc +0 -0
  22. AeroViz/plot/meteorology/__pycache__/__init__.cpython-312.pyc +0 -0
  23. AeroViz/plot/meteorology/__pycache__/hysplit.cpython-312.pyc +0 -0
  24. AeroViz/plot/meteorology/__pycache__/wind_rose.cpython-312.pyc +0 -0
  25. AeroViz/plot/optical/__pycache__/__init__.cpython-312.pyc +0 -0
  26. AeroViz/plot/optical/__pycache__/optical.cpython-312.pyc +0 -0
  27. AeroViz/plot/templates/__init__.py +1 -1
  28. AeroViz/plot/templates/__pycache__/__init__.cpython-312.pyc +0 -0
  29. AeroViz/plot/templates/__pycache__/ammonium_rich.cpython-312.pyc +0 -0
  30. AeroViz/plot/templates/__pycache__/contour.cpython-312.pyc +0 -0
  31. AeroViz/plot/templates/__pycache__/corr_matrix.cpython-312.pyc +0 -0
  32. AeroViz/plot/templates/__pycache__/diurnal_pattern.cpython-312.pyc +0 -0
  33. AeroViz/plot/templates/__pycache__/koschmieder.cpython-312.pyc +0 -0
  34. AeroViz/plot/templates/__pycache__/metal_heatmap.cpython-312.pyc +0 -0
  35. AeroViz/plot/templates/corr_matrix.py +168 -2
  36. AeroViz/plot/templates/metal_heatmap.py +15 -6
  37. AeroViz/plot/timeseries/__pycache__/__init__.cpython-312.pyc +0 -0
  38. AeroViz/plot/timeseries/__pycache__/template.cpython-312.pyc +0 -0
  39. AeroViz/plot/timeseries/__pycache__/timeseries.cpython-312.pyc +0 -0
  40. AeroViz/plot/timeseries/timeseries.py +96 -52
  41. AeroViz/plot/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  42. AeroViz/plot/utils/__pycache__/_color.cpython-312.pyc +0 -0
  43. AeroViz/plot/utils/__pycache__/_unit.cpython-312.pyc +0 -0
  44. AeroViz/plot/utils/__pycache__/plt_utils.cpython-312.pyc +0 -0
  45. AeroViz/plot/utils/__pycache__/sklearn_utils.cpython-312.pyc +0 -0
  46. AeroViz/rawDataReader/__init__.py +35 -5
  47. AeroViz/rawDataReader/__pycache__/__init__.cpython-312.pyc +0 -0
  48. AeroViz/rawDataReader/config/__pycache__/__init__.cpython-312.pyc +0 -0
  49. AeroViz/rawDataReader/config/__pycache__/supported_instruments.cpython-312.pyc +0 -0
  50. AeroViz/rawDataReader/core/__init__.py +131 -41
  51. AeroViz/rawDataReader/core/__pycache__/__init__.cpython-312.pyc +0 -0
  52. AeroViz/rawDataReader/core/__pycache__/logger.cpython-312.pyc +0 -0
  53. AeroViz/rawDataReader/core/__pycache__/qc.cpython-312.pyc +0 -0
  54. AeroViz/rawDataReader/core/logger.py +9 -9
  55. AeroViz/rawDataReader/script/SMPS.py +9 -0
  56. AeroViz/rawDataReader/script/__pycache__/AE33.cpython-312.pyc +0 -0
  57. AeroViz/rawDataReader/script/__pycache__/AE43.cpython-312.pyc +0 -0
  58. AeroViz/rawDataReader/script/__pycache__/APS.cpython-312.pyc +0 -0
  59. AeroViz/rawDataReader/script/__pycache__/Aurora.cpython-312.pyc +0 -0
  60. AeroViz/rawDataReader/script/__pycache__/BAM1020.cpython-312.pyc +0 -0
  61. AeroViz/rawDataReader/script/__pycache__/BC1054.cpython-312.pyc +0 -0
  62. AeroViz/rawDataReader/script/__pycache__/EPA.cpython-312.pyc +0 -0
  63. AeroViz/rawDataReader/script/__pycache__/GRIMM.cpython-312.pyc +0 -0
  64. AeroViz/rawDataReader/script/__pycache__/IGAC.cpython-312.pyc +0 -0
  65. AeroViz/rawDataReader/script/__pycache__/MA350.cpython-312.pyc +0 -0
  66. AeroViz/rawDataReader/script/__pycache__/Minion.cpython-312.pyc +0 -0
  67. AeroViz/rawDataReader/script/__pycache__/NEPH.cpython-312.pyc +0 -0
  68. AeroViz/rawDataReader/script/__pycache__/OCEC.cpython-312.pyc +0 -0
  69. AeroViz/rawDataReader/script/__pycache__/SMPS.cpython-312.pyc +0 -0
  70. AeroViz/rawDataReader/script/__pycache__/TEOM.cpython-312.pyc +0 -0
  71. AeroViz/rawDataReader/script/__pycache__/VOC.cpython-312.pyc +0 -0
  72. AeroViz/rawDataReader/script/__pycache__/XRF.cpython-312.pyc +0 -0
  73. AeroViz/rawDataReader/script/__pycache__/__init__.cpython-312.pyc +0 -0
  74. AeroViz/tools/__pycache__/__init__.cpython-312.pyc +0 -0
  75. AeroViz/tools/__pycache__/database.cpython-312.pyc +0 -0
  76. AeroViz/tools/__pycache__/dataclassifier.cpython-312.pyc +0 -0
  77. {AeroViz-0.1.14.dist-info → AeroViz-0.1.15.dist-info}/METADATA +9 -11
  78. {AeroViz-0.1.14.dist-info → AeroViz-0.1.15.dist-info}/RECORD +81 -81
  79. {AeroViz-0.1.14.dist-info → AeroViz-0.1.15.dist-info}/WHEEL +1 -1
  80. {AeroViz-0.1.14.dist-info → AeroViz-0.1.15.dist-info}/LICENSE +0 -0
  81. {AeroViz-0.1.14.dist-info → AeroViz-0.1.15.dist-info}/top_level.txt +0 -0
@@ -1,14 +1,14 @@
1
1
  import json
2
2
  from abc import ABC, abstractmethod
3
3
  from contextlib import contextmanager
4
- from datetime import datetime
4
+ from datetime import datetime, timedelta
5
5
  from pathlib import Path
6
6
  from typing import Generator
7
7
 
8
8
  import numpy as np
9
9
  import pandas as pd
10
10
  from rich.console import Console
11
- from rich.progress import Progress, TextColumn, BarColumn, TimeRemainingColumn, TaskProgressColumn
11
+ from rich.progress import Progress, TextColumn, BarColumn, SpinnerColumn, TaskProgressColumn
12
12
 
13
13
  from AeroViz.rawDataReader.config.supported_instruments import meta
14
14
  from AeroViz.rawDataReader.core.logger import ReaderLogger
@@ -55,6 +55,7 @@ class AbstractReader(ABC):
55
55
  self.pkl_nam_raw = output_folder / f'_read_{self.nam.lower()}_raw.pkl'
56
56
  self.csv_nam_raw = output_folder / f'_read_{self.nam.lower()}_raw.csv'
57
57
  self.csv_out = output_folder / f'output_{self.nam.lower()}.csv'
58
+ self.report_out = output_folder / 'report.json'
58
59
 
59
60
  def __call__(self,
60
61
  start: datetime,
@@ -79,55 +80,144 @@ class AbstractReader(ABC):
79
80
  def _QC(self, df: pd.DataFrame) -> pd.DataFrame:
80
81
  return df
81
82
 
82
- def _rate_calculate(self, raw_data, qc_data) -> None:
83
- def __base_rate(raw_data, qc_data):
84
- period_size = len(raw_data.resample('1h').mean().index)
83
+ def __calculate_rates(self, raw_data, qc_data, all_keys=False, with_log=False):
84
+ """計算獲取率、良率和總比率
85
85
 
86
- for _nam, _key in self.meta['deter_key'].items():
87
- _columns_key, _drop_how = (qc_data.keys(), 'all') if _key == ['all'] else (_key, 'any')
86
+ Args:
87
+ raw_data: 原始數據
88
+ qc_data: QC後的數據
89
+ all_keys: 是否計算所有 deter_key
90
+ with_log: 是否輸出計算日誌
91
+ """
92
+ if raw_data.empty or qc_data.empty:
93
+ return {'acquisition_rate': 0, 'yield_rate': 0, 'total_rate': 0}
88
94
 
89
- sample_size = len(raw_data[_columns_key].resample('1h').mean().copy().dropna(how=_drop_how).index)
90
- qc_size = len(qc_data[_columns_key].resample('1h').mean().copy().dropna(how=_drop_how).index)
95
+ def _calculate_single_key(key_name, key_columns):
96
+ columns, drop_how = (qc_data.keys(), 'all') if key_columns == ['all'] else (key_columns, 'any')
91
97
 
92
- # validate rate calculation
93
- if period_size == 0 or sample_size == 0 or qc_size == 0:
98
+ # 重採樣並計算有效數據量
99
+ period_size = len(raw_data.resample('1h').mean().index)
100
+ sample_size = len(raw_data[columns].resample('1h').mean().dropna(how=drop_how).index)
101
+ qc_size = len(qc_data[columns].resample('1h').mean().dropna(how=drop_how).index)
102
+
103
+ # 驗證計算
104
+ if any([
105
+ period_size == 0 or sample_size == 0 or qc_size == 0,
106
+ period_size < sample_size,
107
+ sample_size < qc_size
108
+ ]):
109
+ if with_log:
94
110
  self.logger.warning(f'\t\t No data for this period... skip')
95
- continue
96
- if period_size < sample_size:
97
- self.logger.warning(f'\t\tError: Sample({sample_size}) > Period({period_size})... skip')
98
- continue
99
- if sample_size < qc_size:
100
- self.logger.warning(f'\t\tError: QC({qc_size}) > Sample({sample_size})... skip')
101
- continue
102
-
103
- else:
104
- _sample_rate = round((sample_size / period_size) * 100, 1)
105
- _valid_rate = round((qc_size / sample_size) * 100, 1)
106
- _total_rate = round((qc_size / period_size) * 100, 1)
107
-
108
- self.logger.info(f"\t\t{self.logger.CYAN}{self.logger.ARROW} {_nam}{self.logger.RESET}")
111
+ return None
112
+
113
+ # 計算比率
114
+ sample_rate = round((sample_size / period_size) * 100, 1)
115
+ valid_rate = round((qc_size / sample_size) * 100, 1)
116
+ total_rate = round((qc_size / period_size) * 100, 1)
117
+
118
+ if with_log:
119
+ self.logger.info(f"\t\t> {key_name}")
109
120
  self.logger.info(
110
- f"\t\t\t├─ {'Sample Rate':15}: {self.logger.BLUE}{_sample_rate:>6.1f}%{self.logger.RESET}")
121
+ f"\t\t\t> {'Sample Rate':13}: {self.logger.BLUE}{sample_rate:>6.1f}%{self.logger.RESET}")
111
122
  self.logger.info(
112
- f"\t\t\t├─ {'Valid Rate':15}: {self.logger.BLUE}{_valid_rate:>6.1f}%{self.logger.RESET}")
123
+ f"\t\t\t> {'Valid Rate':13}: {self.logger.BLUE}{valid_rate:>6.1f}%{self.logger.RESET}")
113
124
  self.logger.info(
114
- f"\t\t\t└─ {'Total Rate':15}: {self.logger.BLUE}{_total_rate:>6.1f}%{self.logger.RESET}")
125
+ f"\t\t\t> {'Total Rate':13}: {self.logger.BLUE}{total_rate:>6.1f}%{self.logger.RESET}")
126
+
127
+ return {
128
+ 'acquisition_rate': sample_rate,
129
+ 'yield_rate': valid_rate,
130
+ 'total_rate': total_rate
131
+ }
132
+
133
+ if all_keys:
134
+ # 計算所有 key 並回傳所有結果(用於日誌輸出)
135
+ all_results = []
136
+ for name, columns in self.meta['deter_key'].items():
137
+ result = _calculate_single_key(name, columns)
138
+ if result:
139
+ all_results.append(result)
140
+
141
+ if not all_results:
142
+ return {'acquisition_rate': 0, 'yield_rate': 0, 'total_rate': 0}
143
+
144
+ # 回傳所有結果中比率最低的
145
+ return {
146
+ 'acquisition_rate': min(r['acquisition_rate'] for r in all_results),
147
+ 'yield_rate': min(r['yield_rate'] for r in all_results),
148
+ 'total_rate': min(r['total_rate'] for r in all_results)
149
+ }
150
+ else:
151
+ # 計算所有 key 但只回傳最低的比率
152
+ min_rates = {'acquisition_rate': 200, 'yield_rate': 200, 'total_rate': 200}
153
+
154
+ for name, columns in self.meta['deter_key'].items():
155
+ result = _calculate_single_key(name, columns)
156
+ if result:
157
+ min_rates['acquisition_rate'] = min(min_rates['acquisition_rate'], result['acquisition_rate'])
158
+ min_rates['yield_rate'] = min(min_rates['yield_rate'], result['yield_rate'])
159
+ min_rates['total_rate'] = min(min_rates['total_rate'], result['total_rate'])
160
+
161
+ # 如果沒有任何有效結果,回傳 0
162
+ if min_rates['acquisition_rate'] == 200 and min_rates['yield_rate'] == 200:
163
+ return {'acquisition_rate': 0, 'yield_rate': 0, 'total_rate': 0}
115
164
 
165
+ return min_rates
166
+
167
+ def _rate_calculate(self, raw_data, qc_data) -> None:
116
168
  if self.meta['deter_key'] is not None:
117
- # use qc_freq to calculate each period rate
118
169
  if self.qc_freq is not None:
119
170
  raw_data_grouped = raw_data.groupby(pd.Grouper(freq=self.qc_freq))
120
171
  qc_data_grouped = qc_data.groupby(pd.Grouper(freq=self.qc_freq))
121
172
 
122
173
  for (month, _sub_raw_data), (_, _sub_qc_data) in zip(raw_data_grouped, qc_data_grouped):
123
174
  self.logger.info(
124
- f"\t{self.logger.BLUE}{self.logger.ARROW} Processing: {_sub_raw_data.index[0].strftime('%F')}"
175
+ f"\t{self.logger.BLUE}> Processing: {_sub_raw_data.index[0].strftime('%F')}"
125
176
  f" to {_sub_raw_data.index[-1].strftime('%F')}{self.logger.RESET}")
126
177
 
127
- __base_rate(_sub_raw_data, _sub_qc_data)
128
-
178
+ self.__calculate_rates(_sub_raw_data, _sub_qc_data, all_keys=True, with_log=True)
129
179
  else:
130
- __base_rate(raw_data, qc_data)
180
+ self.__calculate_rates(raw_data, qc_data, all_keys=True, with_log=True)
181
+
182
+ # 計算週和月的數據
183
+ current_time = datetime.now()
184
+ week_mask = raw_data.index >= current_time - timedelta(days=7)
185
+ month_mask = raw_data.index >= current_time - timedelta(days=30)
186
+
187
+ # 生成報告
188
+ self.__generate_report(
189
+ current_time,
190
+ raw_data[week_mask], qc_data[week_mask],
191
+ raw_data[month_mask], qc_data[month_mask]
192
+ )
193
+
194
+ def __generate_report(self, current_time, week_raw_data, week_qc_data, month_raw_data, month_qc_data):
195
+ """生成獲取率和良率的報告"""
196
+ report = {
197
+ "report_time": current_time.strftime('%Y-%m-%d %H:%M:%S'),
198
+ "instrument_info": {
199
+ "station": self.path.name[:2],
200
+ "instrument": self.nam
201
+ },
202
+ "rates": {
203
+ "weekly": self.__calculate_rates(week_raw_data, week_qc_data),
204
+ "monthly": self.__calculate_rates(month_raw_data, month_qc_data),
205
+ },
206
+ "details": {
207
+ "weekly": {
208
+ "start_time": (current_time - timedelta(days=7)).strftime('%Y-%m-%d %H:%M:%S'),
209
+ "end_time": current_time.strftime('%Y-%m-%d %H:%M:%S')
210
+ },
211
+ "monthly": {
212
+ "start_time": (current_time - timedelta(days=30)).strftime('%Y-%m-%d %H:%M:%S'),
213
+ "end_time": current_time.strftime('%Y-%m-%d %H:%M:%S')
214
+ }
215
+ }
216
+ }
217
+
218
+ # 寫入報告
219
+ with open(self.report_out, 'w') as f:
220
+ json.dump(report, f, indent=4)
131
221
 
132
222
  def _timeIndex_process(self, _df, user_start=None, user_end=None, append_df=None):
133
223
  """
@@ -202,15 +292,15 @@ class AbstractReader(ABC):
202
292
 
203
293
  try:
204
294
  with Progress(
205
- TextColumn("[bold blue]{task.description}", style="bold blue"),
295
+ SpinnerColumn(finished_text=""),
206
296
  BarColumn(bar_width=25, complete_style="green", finished_style="bright_green"),
207
- TaskProgressColumn(),
208
- TimeRemainingColumn(),
209
- TextColumn("{task.fields[filename]}", style="yellow"),
297
+ TaskProgressColumn(style="bold", text_format="[bright_green]{task.percentage:>3.0f}%"),
298
+ TextColumn("{task.description}", style="bold blue"),
299
+ TextColumn("{task.fields[filename]}", style="bold blue"),
210
300
  console=Console(force_terminal=True, color_system="auto", width=120),
211
301
  expand=False
212
302
  ) as progress:
213
- task = progress.add_task(f"{self.logger.ARROW} Reading {self.nam} files", total=len(files), filename="")
303
+ task = progress.add_task(f"Reading {self.nam} files:", total=len(files), filename="")
214
304
  yield progress, task
215
305
  finally:
216
306
  # Restore logger method and output message
@@ -262,12 +352,12 @@ class AbstractReader(ABC):
262
352
  def _run(self, user_start, user_end):
263
353
  # read pickle if pickle file exists and 'reset=False' or process raw data or append new data
264
354
  if self.pkl_nam_raw.exists() and self.pkl_nam.exists() and not self.reset:
265
- self.logger.info_box(f"Reading {self.nam} PICKLE from {user_start} to {user_end}", color_part="PICKLE")
355
+ self.logger.info_box(f"Reading {self.nam} PICKLE from {user_start} to {user_end}")
266
356
 
267
357
  _f_raw_done, _f_qc_done = pd.read_pickle(self.pkl_nam_raw), pd.read_pickle(self.pkl_nam)
268
358
 
269
359
  if self.append:
270
- self.logger.info_box(f"Appending New data from {user_start} to {user_end}", color_part="New data")
360
+ self.logger.info_box(f"Appending New data from {user_start} to {user_end}")
271
361
 
272
362
  _f_raw_new, _f_qc_new = self._read_raw_files()
273
363
  _f_raw = self._timeIndex_process(_f_raw_done, append_df=_f_raw_new)
@@ -279,7 +369,7 @@ class AbstractReader(ABC):
279
369
  return _f_qc if self.qc else _f_raw
280
370
 
281
371
  else:
282
- self.logger.info_box(f"Reading {self.nam} RAW DATA from {user_start} to {user_end}", color_part="RAW DATA")
372
+ self.logger.info_box(f"Reading {self.nam} RAW DATA from {user_start} to {user_end}")
283
373
 
284
374
  _f_raw, _f_qc = self._read_raw_files()
285
375
 
@@ -8,7 +8,7 @@ from pathlib import Path
8
8
 
9
9
 
10
10
  class ReaderLogger:
11
- def __init__(self, name: str, log_path: Path, log_level: str = 'WARNING'):
11
+ def __init__(self, name: str, log_path: Path, log_level: str = 'INFO'):
12
12
  self.name = name
13
13
  self.log_path = log_path
14
14
  self._log_level = getattr(logging, log_level)
@@ -33,16 +33,16 @@ class ReaderLogger:
33
33
  self.RESET = ''
34
34
 
35
35
  # 檢查 Unicode 支持
36
- self.unicode_support = self._setup_unicode()
36
+ self.unicode_support = self._check_unicode_support()
37
37
 
38
38
  # 設置框架字符
39
39
  if self.unicode_support:
40
- self.BOX_TOP_LEFT = ""
41
- self.BOX_TOP_RIGHT = ""
42
- self.BOX_BOTTOM_LEFT = ""
43
- self.BOX_BOTTOM_RIGHT = ""
44
- self.BOX_HORIZONTAL = ""
45
- self.BOX_VERTICAL = ""
40
+ self.BOX_TOP_LEFT = ""
41
+ self.BOX_TOP_RIGHT = ""
42
+ self.BOX_BOTTOM_LEFT = ""
43
+ self.BOX_BOTTOM_RIGHT = ""
44
+ self.BOX_HORIZONTAL = ""
45
+ self.BOX_VERTICAL = ""
46
46
  self.ARROW = "▶"
47
47
  else:
48
48
  self.BOX_TOP_LEFT = "+"
@@ -75,7 +75,7 @@ class ReaderLogger:
75
75
  # 其他系統檢查
76
76
  return hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
77
77
 
78
- def _setup_unicode(self) -> bool:
78
+ def _check_unicode_support(self) -> bool:
79
79
  """設置 Unicode 支持"""
80
80
  if platform.system().lower() == 'windows':
81
81
  try:
@@ -38,6 +38,15 @@ class Reader(AbstractReader):
38
38
 
39
39
  _df = read_csv(f, sep=delimiter, skiprows=skip, low_memory=False)
40
40
 
41
+ if 'Date' not in _df.columns: # 資料需要轉置
42
+ try:
43
+ _df = _df.T # 轉置
44
+ _df.columns = _df.iloc[0] # 使用第一列作為欄位名稱
45
+ _df = _df.iloc[1:] # 移除第一列(因為已經變成欄位名稱)
46
+ _df = _df.reset_index(drop=True) # 重設索引
47
+ except:
48
+ raise NotImplementedError('Not supported date format')
49
+
41
50
  for date_format in date_formats:
42
51
  _time_index = parse_date(_df, date_format)
43
52
  if not _time_index.isna().all():
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: AeroViz
3
- Version: 0.1.14
3
+ Version: 0.1.15
4
4
  Summary: Aerosol science
5
5
  Author-email: alex <alex870521@gmail.com>
6
6
  License: MIT
@@ -18,7 +18,7 @@ License-File: LICENSE
18
18
  Requires-Dist: pandas>=2.2.0
19
19
  Requires-Dist: numpy>=1.26.4
20
20
  Requires-Dist: matplotlib==3.8.4
21
- Requires-Dist: scipy==1.14.0
21
+ Requires-Dist: scipy>=1.14.0
22
22
  Requires-Dist: seaborn==0.13.2
23
23
  Requires-Dist: scikit-learn==1.5.1
24
24
  Requires-Dist: windrose==1.9.2
@@ -53,13 +53,11 @@ Requires-Dist: mkdocstrings[python]>=0.18.0; extra == "docs"
53
53
  </div>
54
54
 
55
55
  <div align="center">
56
-
57
- <a href="https://github.com/Alex870521"><img src="https://github.com/Alex870521/AeroViz/blob/main/assets/media/logo-social-github.png?raw=true" width="3%" alt="Alex870521 GitHub"></a>
58
- <img src="https://github.com/Alex870521/AeroViz/blob/main/assets/media/logo-transparent.png?raw=true" width="3%">
59
- <a href="https://www.linkedin.com/in/Alex870521/"><img src="https://github.com/Alex870521/AeroViz/blob/main/assets/media/logo-social-linkedin.png?raw=true" width="3%" alt="Alex870521 LinkedIn"></a>
60
- <img src="https://github.com/Alex870521/AeroViz/blob/main/assets/media/logo-transparent.png?raw=true" width="3%">
61
- <a href="https://medium.com/@alex870521"><img src="https://github.com/Alex870521/AeroViz/blob/main/assets/media/logo-social-medium.png?raw=true" width="3%" alt="Alex870521 Medium"></a>
62
- </div>
56
+ <a href="https://github.com/Alex870521"><img src="https://cdn.simpleicons.org/github/0A66C2" width="3%" alt="GitHub"></a>
57
+ <span style="margin: 0 1%"></span>
58
+ <a href="https://www.linkedin.com/in/Alex870521/"><img src="https://cdn.simpleicons.org/linkedin/0A66C2" width="3%" alt="LinkedIn"></a>
59
+ <span style="margin: 0 1%"></span>
60
+ <a href="https://medium.com/@alex870521"><img src="https://cdn.simpleicons.org/medium/0A66C2" width="3%" alt="Medium"></a></div>
63
61
 
64
62
  ## <div align="center">Installation</div>
65
63
  ```bash
@@ -105,7 +103,7 @@ from AeroViz import RawDataReader, DataProcess, plot
105
103
 
106
104
  # Read data from a supported instrument
107
105
  data = RawDataReader(
108
- instrument='Neph',
106
+ instrument='NEPH',
109
107
  path=Path('/path/to/data'),
110
108
  start=datetime(2024, 2, 1),
111
109
  end=datetime(2024, 4, 30)