AeroViz 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of AeroViz might be problematic. Click here for more details.

Files changed (121) hide show
  1. AeroViz/__init__.py +7 -5
  2. AeroViz/{config → data}/DEFAULT_DATA.csv +1 -1
  3. AeroViz/dataProcess/Chemistry/__init__.py +40 -40
  4. AeroViz/dataProcess/Chemistry/_calculate.py +15 -15
  5. AeroViz/dataProcess/Chemistry/_isoropia.py +72 -68
  6. AeroViz/dataProcess/Chemistry/_mass_volume.py +158 -161
  7. AeroViz/dataProcess/Chemistry/_ocec.py +109 -109
  8. AeroViz/dataProcess/Chemistry/_partition.py +19 -18
  9. AeroViz/dataProcess/Chemistry/_teom.py +9 -11
  10. AeroViz/dataProcess/Chemistry/isrpia.cnf +21 -0
  11. AeroViz/dataProcess/Optical/Angstrom_exponent.py +20 -0
  12. AeroViz/dataProcess/Optical/_IMPROVE.py +40 -41
  13. AeroViz/dataProcess/Optical/__init__.py +29 -44
  14. AeroViz/dataProcess/Optical/_absorption.py +21 -47
  15. AeroViz/dataProcess/Optical/_extinction.py +31 -25
  16. AeroViz/dataProcess/Optical/_mie.py +5 -7
  17. AeroViz/dataProcess/Optical/_mie_sd.py +89 -90
  18. AeroViz/dataProcess/Optical/_scattering.py +19 -20
  19. AeroViz/dataProcess/SizeDistr/__init__.py +39 -39
  20. AeroViz/dataProcess/SizeDistr/__merge.py +159 -158
  21. AeroViz/dataProcess/SizeDistr/_merge.py +155 -154
  22. AeroViz/dataProcess/SizeDistr/_merge_v1.py +162 -161
  23. AeroViz/dataProcess/SizeDistr/_merge_v2.py +153 -152
  24. AeroViz/dataProcess/SizeDistr/_merge_v3.py +327 -327
  25. AeroViz/dataProcess/SizeDistr/_merge_v4.py +273 -275
  26. AeroViz/dataProcess/SizeDistr/_size_distr.py +51 -51
  27. AeroViz/dataProcess/VOC/__init__.py +9 -9
  28. AeroViz/dataProcess/VOC/_potential_par.py +53 -55
  29. AeroViz/dataProcess/__init__.py +28 -6
  30. AeroViz/dataProcess/core/__init__.py +59 -65
  31. AeroViz/plot/__init__.py +7 -2
  32. AeroViz/plot/bar.py +126 -0
  33. AeroViz/plot/box.py +69 -0
  34. AeroViz/plot/distribution/distribution.py +421 -427
  35. AeroViz/plot/meteorology/meteorology.py +240 -292
  36. AeroViz/plot/optical/__init__.py +0 -1
  37. AeroViz/plot/optical/optical.py +230 -230
  38. AeroViz/plot/pie.py +198 -0
  39. AeroViz/plot/regression.py +196 -0
  40. AeroViz/plot/scatter.py +165 -0
  41. AeroViz/plot/templates/__init__.py +2 -4
  42. AeroViz/plot/templates/ammonium_rich.py +34 -0
  43. AeroViz/plot/templates/contour.py +25 -25
  44. AeroViz/plot/templates/corr_matrix.py +86 -93
  45. AeroViz/plot/templates/diurnal_pattern.py +28 -26
  46. AeroViz/plot/templates/koschmieder.py +59 -123
  47. AeroViz/plot/templates/metal_heatmap.py +135 -37
  48. AeroViz/plot/timeseries/__init__.py +1 -0
  49. AeroViz/plot/timeseries/template.py +47 -0
  50. AeroViz/plot/timeseries/timeseries.py +324 -264
  51. AeroViz/plot/utils/__init__.py +2 -1
  52. AeroViz/plot/utils/_color.py +57 -57
  53. AeroViz/plot/utils/_unit.py +48 -48
  54. AeroViz/plot/utils/plt_utils.py +92 -0
  55. AeroViz/plot/utils/sklearn_utils.py +49 -0
  56. AeroViz/plot/utils/units.json +5 -0
  57. AeroViz/plot/violin.py +80 -0
  58. AeroViz/process/__init__.py +17 -17
  59. AeroViz/process/core/DataProc.py +9 -9
  60. AeroViz/process/core/SizeDist.py +81 -81
  61. AeroViz/process/method/PyMieScatt_update.py +488 -488
  62. AeroViz/process/method/mie_theory.py +231 -229
  63. AeroViz/process/method/prop.py +40 -40
  64. AeroViz/process/script/AbstractDistCalc.py +103 -103
  65. AeroViz/process/script/Chemical.py +168 -167
  66. AeroViz/process/script/IMPACT.py +40 -40
  67. AeroViz/process/script/IMPROVE.py +152 -152
  68. AeroViz/process/script/Others.py +45 -45
  69. AeroViz/process/script/PSD.py +26 -26
  70. AeroViz/process/script/PSD_dry.py +69 -70
  71. AeroViz/process/script/retrieve_RI.py +50 -51
  72. AeroViz/rawDataReader/__init__.py +53 -58
  73. AeroViz/rawDataReader/config/supported_instruments.py +155 -0
  74. AeroViz/rawDataReader/core/__init__.py +233 -356
  75. AeroViz/rawDataReader/script/AE33.py +17 -18
  76. AeroViz/rawDataReader/script/AE43.py +18 -21
  77. AeroViz/rawDataReader/script/APS_3321.py +30 -30
  78. AeroViz/rawDataReader/script/Aurora.py +23 -24
  79. AeroViz/rawDataReader/script/BC1054.py +36 -40
  80. AeroViz/rawDataReader/script/EPA_vertical.py +37 -9
  81. AeroViz/rawDataReader/script/GRIMM.py +16 -23
  82. AeroViz/rawDataReader/script/IGAC.py +90 -0
  83. AeroViz/rawDataReader/script/MA350.py +32 -39
  84. AeroViz/rawDataReader/script/Minion.py +103 -0
  85. AeroViz/rawDataReader/script/NEPH.py +69 -74
  86. AeroViz/rawDataReader/script/SMPS_TH.py +25 -25
  87. AeroViz/rawDataReader/script/SMPS_aim11.py +32 -32
  88. AeroViz/rawDataReader/script/SMPS_genr.py +31 -31
  89. AeroViz/rawDataReader/script/Sunset_OCEC.py +60 -0
  90. AeroViz/rawDataReader/script/TEOM.py +30 -28
  91. AeroViz/rawDataReader/script/Table.py +13 -14
  92. AeroViz/rawDataReader/script/VOC.py +26 -0
  93. AeroViz/rawDataReader/script/__init__.py +18 -20
  94. AeroViz/tools/database.py +64 -66
  95. AeroViz/tools/dataclassifier.py +106 -106
  96. AeroViz/tools/dataprinter.py +51 -51
  97. AeroViz/tools/datareader.py +38 -38
  98. {AeroViz-0.1.3.dist-info → AeroViz-0.1.4.dist-info}/METADATA +5 -4
  99. AeroViz-0.1.4.dist-info/RECORD +112 -0
  100. AeroViz/plot/improve/__init__.py +0 -1
  101. AeroViz/plot/improve/improve.py +0 -240
  102. AeroViz/plot/optical/aethalometer.py +0 -77
  103. AeroViz/plot/templates/event_evolution.py +0 -65
  104. AeroViz/plot/templates/regression.py +0 -256
  105. AeroViz/plot/templates/scatter.py +0 -130
  106. AeroViz/plot/templates/templates.py +0 -398
  107. AeroViz/plot/utils/_decorator.py +0 -74
  108. AeroViz/rawDataReader/script/IGAC_TH.py +0 -104
  109. AeroViz/rawDataReader/script/IGAC_ZM.py +0 -90
  110. AeroViz/rawDataReader/script/OCEC_LCRES.py +0 -34
  111. AeroViz/rawDataReader/script/OCEC_RES.py +0 -28
  112. AeroViz/rawDataReader/script/VOC_TH.py +0 -30
  113. AeroViz/rawDataReader/script/VOC_ZM.py +0 -37
  114. AeroViz/rawDataReader/utils/__init__.py +0 -0
  115. AeroViz/rawDataReader/utils/config.py +0 -169
  116. AeroViz-0.1.3.dist-info/RECORD +0 -111
  117. /AeroViz/{config → data}/DEFAULT_PNSD_DATA.csv +0 -0
  118. /AeroViz/{config → rawDataReader/config}/__init__.py +0 -0
  119. {AeroViz-0.1.3.dist-info → AeroViz-0.1.4.dist-info}/LICENSE +0 -0
  120. {AeroViz-0.1.3.dist-info → AeroViz-0.1.4.dist-info}/WHEEL +0 -0
  121. {AeroViz-0.1.3.dist-info → AeroViz-0.1.4.dist-info}/top_level.txt +0 -0
@@ -2,79 +2,79 @@ __all__ = ['_basic']
2
2
 
3
3
 
4
4
  def _geometric_prop(_dp, _prop):
5
- import numpy as n
5
+ import numpy as n
6
6
 
7
- _prop_t = _prop.sum(axis=1)
8
- _prop_t = _prop_t.where(_prop_t > 0).copy()
7
+ _prop_t = _prop.sum(axis=1)
8
+ _prop_t = _prop_t.where(_prop_t > 0).copy()
9
9
 
10
- _dp = n.log(_dp)
11
- _gmd = (((_prop * _dp).sum(axis=1)) / _prop_t.copy())
10
+ _dp = n.log(_dp)
11
+ _gmd = (((_prop * _dp).sum(axis=1)) / _prop_t.copy())
12
12
 
13
- _dp_mesh, _gmd_mesh = n.meshgrid(_dp, _gmd)
14
- _gsd = ((((_dp_mesh - _gmd_mesh) ** 2) * _prop).sum(axis=1) / _prop_t.copy()) ** .5
13
+ _dp_mesh, _gmd_mesh = n.meshgrid(_dp, _gmd)
14
+ _gsd = ((((_dp_mesh - _gmd_mesh) ** 2) * _prop).sum(axis=1) / _prop_t.copy()) ** .5
15
15
 
16
- return _prop_t, _gmd.apply(n.exp), _gsd.apply(n.exp)
16
+ return _prop_t, _gmd.apply(n.exp), _gsd.apply(n.exp)
17
17
 
18
18
 
19
19
  def _basic(df, hybrid, unit, bin_rg, input_type):
20
- import numpy as n
21
- from pandas import DataFrame, concat
20
+ import numpy as n
21
+ from pandas import DataFrame
22
22
 
23
- ## get number conc. data and total, mode
24
- dN = df
25
- dN.columns = dN.keys().to_numpy(float)
23
+ ## get number conc. data and total, mode
24
+ dN = df
25
+ dN.columns = dN.keys().to_numpy(float)
26
26
 
27
- dN_ky = dN.keys()[(dN.keys() >= bin_rg[0]) & (dN.keys() <= bin_rg[-1])]
28
- dN = dN[dN_ky].copy()
27
+ dN_ky = dN.keys()[(dN.keys() >= bin_rg[0]) & (dN.keys() <= bin_rg[-1])]
28
+ dN = dN[dN_ky].copy()
29
29
 
30
- out_dic = {}
31
- ## diameter
32
- dp = dN.keys().to_numpy()
33
- if hybrid:
34
- dlog_dp = n.diff(n.log10(dp)).mean()
35
- else:
36
- dlog_dp = n.ones(dp.size)
37
- dlog_dp[:hybrid] = n.diff(n.log10(dp[:hybrid])).mean()
38
- dlog_dp[hybrid:] = n.diff(n.log10(dp[hybrid:])).mean()
30
+ out_dic = {}
31
+ ## diameter
32
+ dp = dN.keys().to_numpy()
33
+ if hybrid:
34
+ dlog_dp = n.diff(n.log10(dp)).mean()
35
+ else:
36
+ dlog_dp = n.ones(dp.size)
37
+ dlog_dp[:hybrid] = n.diff(n.log10(dp[:hybrid])).mean()
38
+ dlog_dp[hybrid:] = n.diff(n.log10(dp[hybrid:])).mean()
39
39
 
40
- ## calculate normalize and non-normalize data
41
- if input_type == 'norm':
42
- out_dic['number'] = (dN * dlog_dp).copy()
43
- out_dic['number_norm'] = dN.copy()
44
- else:
45
- out_dic['number'] = dN.copy()
46
- out_dic['number_norm'] = (dN / dlog_dp).copy()
40
+ ## calculate normalize and non-normalize data
41
+ if input_type == 'norm':
42
+ out_dic['number'] = (dN * dlog_dp).copy()
43
+ out_dic['number_norm'] = dN.copy()
44
+ else:
45
+ out_dic['number'] = dN.copy()
46
+ out_dic['number_norm'] = (dN / dlog_dp).copy()
47
47
 
48
- out_dic['surface'] = out_dic['number'] * n.pi * dp ** 2
49
- out_dic['volume'] = out_dic['number'] * n.pi * (dp ** 3) / 6
48
+ out_dic['surface'] = out_dic['number'] * n.pi * dp ** 2
49
+ out_dic['volume'] = out_dic['number'] * n.pi * (dp ** 3) / 6
50
50
 
51
- out_dic['surface_norm'] = out_dic['number_norm'] * n.pi * dp ** 2
52
- out_dic['volume_norm'] = out_dic['number_norm'] * n.pi * (dp ** 3) / 6
51
+ out_dic['surface_norm'] = out_dic['number_norm'] * n.pi * dp ** 2
52
+ out_dic['volume_norm'] = out_dic['number_norm'] * n.pi * (dp ** 3) / 6
53
53
 
54
- ## size range mode process
55
- df_oth = DataFrame(index=dN.index)
54
+ ## size range mode process
55
+ df_oth = DataFrame(index=dN.index)
56
56
 
57
- bound = n.array([(dp.min(), dp.max() + 1), (10, 25), (25, 100), (100, 1e3), (1e3, 2.5e3), ])
58
- if unit == 'um':
59
- bound[1:] /= 1e3
57
+ bound = n.array([(dp.min(), dp.max() + 1), (10, 25), (25, 100), (100, 1e3), (1e3, 2.5e3), ])
58
+ if unit == 'um':
59
+ bound[1:] /= 1e3
60
60
 
61
- for _tp_nam, _tp_dt in zip(['num', 'surf', 'vol'], [out_dic['number'], out_dic['surface'], out_dic['volume']]):
61
+ for _tp_nam, _tp_dt in zip(['num', 'surf', 'vol'], [out_dic['number'], out_dic['surface'], out_dic['volume']]):
62
62
 
63
- for _md_nam, _range in zip(['all', 'Nucleation', 'Aitken', 'Accumulation', 'Coarse'], bound):
63
+ for _md_nam, _range in zip(['all', 'Nucleation', 'Aitken', 'Accumulation', 'Coarse'], bound):
64
64
 
65
- _dia = dp[(dp >= _range[0]) & (dp < _range[-1])]
66
- if ~_dia.any(): continue
65
+ _dia = dp[(dp >= _range[0]) & (dp < _range[-1])]
66
+ if ~_dia.any(): continue
67
67
 
68
- _dt = _tp_dt[_dia].copy()
68
+ _dt = _tp_dt[_dia].copy()
69
69
 
70
- df_oth[f'total_{_tp_nam}_{_md_nam}'], df_oth[f'GMD_{_tp_nam}_{_md_nam}'], df_oth[
71
- f'GSD_{_tp_nam}_{_md_nam}'] = _geometric_prop(_dia, _dt)
72
- df_oth[f'mode_{_tp_nam}_{_md_nam}'] = _dt.idxmax(axis=1)
70
+ df_oth[f'total_{_tp_nam}_{_md_nam}'], df_oth[f'GMD_{_tp_nam}_{_md_nam}'], df_oth[
71
+ f'GSD_{_tp_nam}_{_md_nam}'] = _geometric_prop(_dia, _dt)
72
+ df_oth[f'mode_{_tp_nam}_{_md_nam}'] = _dt.idxmax(axis=1)
73
73
 
74
- ## out
75
- out_dic['other'] = df_oth
74
+ ## out
75
+ out_dic['other'] = df_oth
76
76
 
77
- return out_dic
77
+ return out_dic
78
78
 
79
79
  # old 20230113
80
80
 
@@ -1,19 +1,19 @@
1
- from ..core import _writter, _run_process
1
+ from ..core import Writer, run_process
2
2
 
3
3
  __all__ = [
4
4
 
5
- 'VOC',
5
+ 'VOC',
6
6
 
7
7
  ]
8
8
 
9
9
 
10
- class VOC(_writter):
10
+ class VOC(Writer):
11
11
 
12
- ## Reconstruction
13
- @_run_process('VOC - basic', 'voc_basic')
14
- def VOC_basic(self, _df_voc):
15
- from ._potential_par import _basic
12
+ ## Reconstruction
13
+ @run_process('VOC - basic', 'voc_basic')
14
+ def VOC_basic(self, _df_voc):
15
+ from ._potential_par import _basic
16
16
 
17
- out = _basic(_df_voc)
17
+ out = _basic(_df_voc)
18
18
 
19
- return self, out
19
+ return self, out
@@ -1,76 +1,74 @@
1
- from datetime import datetime as dtm
2
- from pandas import DataFrame, to_datetime, read_json
3
- from pathlib import Path
4
1
  import pickle as pkl
2
+ from pathlib import Path
5
3
 
6
- import numpy as np
4
+ from pandas import DataFrame, read_json
7
5
 
8
6
 
9
7
  def _basic(_df_voc):
10
- ## parameter
11
- _keys = _df_voc.keys()
8
+ ## parameter
9
+ _keys = _df_voc.keys()
12
10
 
13
- with (Path(__file__).parent / 'voc_par.pkl').open('rb') as f:
14
- _par = pkl.load(f)
15
- _MW, _MIR, _SOAP, _KOH = _par.loc['MW', _keys], _par.loc['MIR', _keys], _par.loc['SOAP', _keys], _par.loc[
16
- 'KOH', _keys]
11
+ with (Path(__file__).parent / 'voc_par.pkl').open('rb') as f:
12
+ _par = pkl.load(f)
13
+ _MW, _MIR, _SOAP, _KOH = _par.loc['MW', _keys], _par.loc['MIR', _keys], _par.loc['SOAP', _keys], _par.loc[
14
+ 'KOH', _keys]
17
15
 
18
- with (Path(__file__).parent / 'voc_par.json').open('r', encoding='utf-8', errors='ignore') as f:
19
- _parr = read_json(f)
20
- _MW, _MIR, _SOAP, _KOH = _par.loc['MW', _keys], _par.loc['MIR', _keys], _par.loc['SOAP', _keys], _par.loc[
21
- 'KOH', _keys]
16
+ with (Path(__file__).parent / 'voc_par.json').open('r', encoding='utf-8', errors='ignore') as f:
17
+ _parr = read_json(f)
18
+ _MW, _MIR, _SOAP, _KOH = _par.loc['MW', _keys], _par.loc['MIR', _keys], _par.loc['SOAP', _keys], _par.loc[
19
+ 'KOH', _keys]
22
20
 
23
- _voc_clasfy = {
24
- 'alkane_total': ['Isopentane', 'n-Butane', '2-Methylhexane', 'Cyclopentane', '3-Methylpentane',
25
- '2,3-Dimethylbutane',
26
- '2-Methylheptane', 'n-Nonane', 'Methylcyclohexane', '2,4-Dimethylpentane', '2-Methylpentane',
27
- 'n-Decane',
28
- 'n-Heptane', 'Cyclohexane', 'n-Octane', 'Isobutane', '2,2-Dimethylbutane',
29
- 'Methylcyclopentane', 'n-Hexane',
30
- '2,3,4-Trimethylpentane', '3-Methylhexane', 'n-Undecane', '3-Methylheptane', 'Hexane',
31
- '2,2,4-Trimethylpentane', 'n-Pentane', 'Ethane', 'Propane'],
21
+ _voc_clasfy = {
22
+ 'alkane_total': ['Isopentane', 'n-Butane', '2-Methylhexane', 'Cyclopentane', '3-Methylpentane',
23
+ '2,3-Dimethylbutane',
24
+ '2-Methylheptane', 'n-Nonane', 'Methylcyclohexane', '2,4-Dimethylpentane', '2-Methylpentane',
25
+ 'n-Decane',
26
+ 'n-Heptane', 'Cyclohexane', 'n-Octane', 'Isobutane', '2,2-Dimethylbutane',
27
+ 'Methylcyclopentane', 'n-Hexane',
28
+ '2,3,4-Trimethylpentane', '3-Methylhexane', 'n-Undecane', '3-Methylheptane', 'Hexane',
29
+ '2,2,4-Trimethylpentane', 'n-Pentane', 'Ethane', 'Propane'],
32
30
 
33
- 'alkane_total': ['Isoprene', '1-Butene', 'cis-2-Butene', 'Propene', '1.3-Butadiene',
34
- 't-2-Butene', 'cis-2-Pentene', 'Propylene', 'isoprene', '1-Pentene',
35
- 'Ethylene', 't-2-Pentene', '1-Octene'],
31
+ 'alkane_total': ['Isoprene', '1-Butene', 'cis-2-Butene', 'Propene', '1.3-Butadiene',
32
+ 't-2-Butene', 'cis-2-Pentene', 'Propylene', 'isoprene', '1-Pentene',
33
+ 'Ethylene', 't-2-Pentene', '1-Octene'],
36
34
 
37
- 'aromatic_total': ['o-Ethyltoluene', '1,3,5-Trimethylbenzene', 'Ethylbenzene', 'm,p-Xylene', 'n-Propylbenzene',
38
- 'Benzene', 'Toluene', '1.2.4-TMB', 'Styrene', 'p-Ethyltoluene', 'o-Xylene',
39
- 'm-Diethylbenzene',
40
- '1.2.3-TMB', 'Isopropylbenzene', 'm-Ethyltoluene', '2-Ethyltoluene', '1.3.5-TMB',
41
- 'Iso-Propylbenzene',
42
- '3.4-Ethyltoluene', 'p-Diethylbenzene', '1,2,4-Trimethylbenzene', 'm.p-Xylene',
43
- '1,2,3-Trimethylbenzene'],
35
+ 'aromatic_total': ['o-Ethyltoluene', '1,3,5-Trimethylbenzene', 'Ethylbenzene', 'm,p-Xylene', 'n-Propylbenzene',
36
+ 'Benzene', 'Toluene', '1.2.4-TMB', 'Styrene', 'p-Ethyltoluene', 'o-Xylene',
37
+ 'm-Diethylbenzene',
38
+ '1.2.3-TMB', 'Isopropylbenzene', 'm-Ethyltoluene', '2-Ethyltoluene', '1.3.5-TMB',
39
+ 'Iso-Propylbenzene',
40
+ '3.4-Ethyltoluene', 'p-Diethylbenzene', '1,2,4-Trimethylbenzene', 'm.p-Xylene',
41
+ '1,2,3-Trimethylbenzene'],
44
42
 
45
- 'alkyne_total': ['Acetylene'],
43
+ 'alkyne_total': ['Acetylene'],
46
44
 
47
- 'OVOC': ['Acetaldehyde', 'Ethanol', 'Acetone', 'IPA', 'Ethyl Acetate', 'Butyl Acetate'],
45
+ 'OVOC': ['Acetaldehyde', 'Ethanol', 'Acetone', 'IPA', 'Ethyl Acetate', 'Butyl Acetate'],
48
46
 
49
- 'ClVOC': ['VCM', 'TCE', 'PCE', '1.4-DCB', '1.2-DCB'],
50
- }
47
+ 'ClVOC': ['VCM', 'TCE', 'PCE', '1.4-DCB', '1.2-DCB'],
48
+ }
51
49
 
52
- _df_MW = (_df_voc * _MW).copy()
53
- _df_dic = {
54
- 'Conc': _df_voc.copy(),
55
- 'OFP': _df_MW / 48 * _MIR,
56
- 'SOAP': _df_MW / 24.5 * _SOAP / 100 * 0.054,
57
- 'LOH': _df_MW / 24.5 / _MW * 0.602 * _KOH,
58
- }
50
+ _df_MW = (_df_voc * _MW).copy()
51
+ _df_dic = {
52
+ 'Conc': _df_voc.copy(),
53
+ 'OFP': _df_MW / 48 * _MIR,
54
+ 'SOAP': _df_MW / 24.5 * _SOAP / 100 * 0.054,
55
+ 'LOH': _df_MW / 24.5 / _MW * 0.602 * _KOH,
56
+ }
59
57
 
60
- ## calculate
61
- _out = {}
62
- for _nam, _df in _df_dic.items():
58
+ ## calculate
59
+ _out = {}
60
+ for _nam, _df in _df_dic.items():
63
61
 
64
- _df_out = DataFrame(index=_df_voc.index)
62
+ _df_out = DataFrame(index=_df_voc.index)
65
63
 
66
- for _voc_nam, _voc_lst in _voc_clasfy.items():
67
- _lst = list(set(_keys) & set(_voc_lst))
68
- if len(_lst) == 0: continue
64
+ for _voc_nam, _voc_lst in _voc_clasfy.items():
65
+ _lst = list(set(_keys) & set(_voc_lst))
66
+ if len(_lst) == 0: continue
69
67
 
70
- _df_out[_voc_nam] = _df[_lst].sum(axis=1, min_count=1)
68
+ _df_out[_voc_nam] = _df[_lst].sum(axis=1, min_count=1)
71
69
 
72
- _df_out['Total'] = _df.sum(axis=1, min_count=1)
70
+ _df_out['Total'] = _df.sum(axis=1, min_count=1)
73
71
 
74
- _out[_nam] = _df_out
72
+ _out[_nam] = _df_out
75
73
 
76
- return _out
74
+ return _out
@@ -1,11 +1,33 @@
1
+ from pathlib import Path
2
+
1
3
  from .Chemistry import Chemistry
2
4
  from .Optical import Optical
3
5
  from .SizeDistr import SizeDistr
4
6
  from .VOC import VOC
5
7
 
6
- __all__ = [
7
- 'Optical',
8
- 'SizeDistr',
9
- 'Chemistry',
10
- 'VOC',
11
- ]
8
+ __all__ = ['DataProcess']
9
+
10
+
11
+ def DataProcess(method: str,
12
+ path_out: Path,
13
+ excel: bool = False,
14
+ csv: bool = True,
15
+ ):
16
+ # Mapping of method names to their respective classes
17
+ method_class_map = {
18
+ 'Chemistry': Chemistry,
19
+ 'Optical': Optical,
20
+ 'SizeDistr': SizeDistr,
21
+ 'VOC': VOC
22
+ }
23
+
24
+ if method not in method_class_map.keys():
25
+ raise ValueError(f"Method name '{method}' is not valid. \nMust be one of: {list(method_class_map.keys())}")
26
+
27
+ writer_module = method_class_map[method](
28
+ path_out=path_out,
29
+ excel=excel,
30
+ csv=csv
31
+ )
32
+
33
+ return writer_module
@@ -1,92 +1,86 @@
1
- from pandas import DatetimeIndex, DataFrame, concat
2
- from pathlib import Path
3
1
  import pickle as pkl
4
2
  from datetime import datetime as dtm
3
+ from pathlib import Path
5
4
 
5
+ from pandas import concat
6
6
 
7
- class _writter:
8
-
9
- def __init__(self, path_out=None, excel=True, csv=False):
10
-
11
- self.path_out = Path(path_out) if path_out is not None else path_out
12
- self.excel = excel
13
- self.csv = csv
14
-
15
- def _pre_process(self, _out):
16
-
17
- if type(_out) == dict:
18
- for _ky, _df in _out.items():
19
- _df.index.name = 'time'
20
- else:
21
- _out.index.name = 'time'
22
7
 
23
- return _out
8
+ class Writer:
24
9
 
25
- def _save_out(self, _nam, _out):
10
+ def __init__(self, path_out=None, excel=True, csv=False):
11
+ self.path_out = Path(path_out) if path_out is not None else path_out
12
+ self.excel = excel
13
+ self.csv = csv
26
14
 
27
- _check = True
28
- while _check:
15
+ @staticmethod
16
+ def pre_process(_out):
17
+ if isinstance(_out, dict):
18
+ for _ky, _df in _out.items():
19
+ _df.index.name = 'time'
20
+ else:
21
+ _out.index.name = 'time'
29
22
 
30
- try:
31
- if self.path_out is not None:
32
- self.path_out.mkdir(exist_ok=True, parents=True)
33
- with (self.path_out / f'{_nam}.pkl').open('wb') as f:
34
- pkl.dump(_out, f, protocol=pkl.HIGHEST_PROTOCOL)
23
+ return _out
35
24
 
36
- if self.excel:
37
- from pandas import ExcelWriter
38
- with ExcelWriter(self.path_out / f'{_nam}.xlsx') as f:
39
- if type(_out) == dict:
40
- for _key, _val in _out.items():
41
- _val.to_excel(f, sheet_name=f'{_key}')
42
- else:
43
- _out.to_excel(f, sheet_name=f'{_nam}')
25
+ def save_out(self, _nam, _out):
26
+ _check = True
27
+ while _check:
44
28
 
45
- if self.csv:
46
- if type(_out) == dict:
47
- _path_out = self.path_out / _nam
48
- _path_out.mkdir(exist_ok=True, parents=True)
29
+ try:
30
+ if self.path_out is not None:
31
+ self.path_out.mkdir(exist_ok=True, parents=True)
32
+ with (self.path_out / f'{_nam}.pkl').open('wb') as f:
33
+ pkl.dump(_out, f, protocol=pkl.HIGHEST_PROTOCOL)
49
34
 
50
- for _key, _val in _out.items():
51
- _val.to_csv(_path_out / f'{_key}.csv')
52
- else:
53
- _out.to_csv(self.path_out / f'{_nam}.csv')
35
+ if self.excel:
36
+ from pandas import ExcelWriter
37
+ with ExcelWriter(self.path_out / f'{_nam}.xlsx') as f:
38
+ if type(_out) == dict:
39
+ for _key, _val in _out.items():
40
+ _val.to_excel(f, sheet_name=f'{_key}')
41
+ else:
42
+ _out.to_excel(f, sheet_name=f'{_nam}')
54
43
 
55
- _check = False
44
+ if self.csv:
45
+ if isinstance(_out, dict):
46
+ _path_out = self.path_out / _nam
47
+ _path_out.mkdir(exist_ok=True, parents=True)
56
48
 
57
- except PermissionError as _err:
58
- print('\n', _err)
59
- input('\t\t\33[41m Please Close The File And Press "Enter" \33[0m\n')
49
+ for _key, _val in _out.items():
50
+ _val.to_csv(_path_out / f'{_key}.csv')
51
+ else:
52
+ _out.to_csv(self.path_out / f'{_nam}.csv')
60
53
 
54
+ _check = False
61
55
 
62
- def _run_process(*_ini_set):
63
- def _decorator(_prcs_fc):
64
- def _wrap(*arg, **kwarg):
65
- _fc_name, _nam = _ini_set
56
+ except PermissionError as _err:
57
+ print('\n', _err)
58
+ input('\t\t\33[41m Please Close The File And Press "Enter" \33[0m\n')
66
59
 
67
- if kwarg.get('nam') is not None:
68
- _nam = kwarg.pop('nam')
69
60
 
70
- print(f"\n\t{dtm.now().strftime('%m/%d %X')} : Process \033[92m{_fc_name}\033[0m -> {_nam}")
61
+ def run_process(*_ini_set):
62
+ def _decorator(_prcs_fc):
63
+ def _wrap(*arg, **kwarg):
64
+ _fc_name, _nam = _ini_set
71
65
 
72
- _class, _out = _prcs_fc(*arg, **kwarg)
73
- _out = _class._pre_process(_out)
66
+ if kwarg.get('nam') is not None:
67
+ _nam = kwarg.pop('nam')
74
68
 
75
- _class._save_out(_nam, _out)
69
+ print(f"\n\t{dtm.now().strftime('%m/%d %X')} : Process \033[92m{_fc_name}\033[0m -> {_nam}")
76
70
 
77
- return _out
71
+ _class, _out = _prcs_fc(*arg, **kwarg)
72
+ _out = _class.pre_process(_out)
78
73
 
79
- return _wrap
74
+ _class.save_out(_nam, _out)
80
75
 
81
- return _decorator
76
+ return _out
82
77
 
78
+ return _wrap
83
79
 
84
- def _union_index(*_df_arg):
85
- _idx = concat(_df_arg, axis=1).index
80
+ return _decorator
86
81
 
87
- # _idx = DatetimeIndex([])
88
82
 
89
- # for _df in _df_arg:
90
- # _idx = _idx.union(DataFrame(_df).index)
83
+ def union_index(*_df_arg):
84
+ _idx = concat(_df_arg, axis=1).index
91
85
 
92
- return [_df.reindex(_idx) if _df is not None else None for _df in _df_arg]
86
+ return [_df.reindex(_idx) if _df is not None else None for _df in _df_arg]
AeroViz/plot/__init__.py CHANGED
@@ -1,7 +1,12 @@
1
1
  from . import distribution
2
- from . import improve
3
2
  from . import meteorology
4
3
  from . import optical
5
- from . import timeseries
4
+ from .bar import bar
5
+ from .box import box
6
+ from .pie import pie, donuts
7
+ from .regression import linear_regression, multiple_linear_regression
8
+ from .scatter import scatter
6
9
  from .templates import *
10
+ from .timeseries import timeseries, timeseries_template, timeseries_stacked
7
11
  from .utils import *
12
+ from .violin import violin
AeroViz/plot/bar.py ADDED
@@ -0,0 +1,126 @@
1
+ from typing import Literal
2
+
3
+ import matplotlib.pyplot as plt
4
+ import numpy as np
5
+ import pandas as pd
6
+ from matplotlib.pyplot import Figure, Axes
7
+ from pandas import DataFrame
8
+
9
+ from AeroViz.plot.utils import *
10
+
11
+ __all__ = ['bar']
12
+
13
+
14
+ @set_figure(fw='bold')
15
+ def bar(data_set: DataFrame | dict,
16
+ data_std: DataFrame | None,
17
+ labels: list[str],
18
+ unit: str,
19
+ style: Literal["stacked", "dispersed"] = "dispersed",
20
+ orientation: Literal["va", "ha"] = 'va',
21
+ ax: Axes | None = None,
22
+ symbol=True,
23
+ **kwargs
24
+ ) -> tuple[Figure, Axes]:
25
+ """
26
+ Parameters
27
+ ----------
28
+ data_set : pd.DataFrame or dict
29
+ A mapping from category names to a list of species mean or a DataFrame with columns as categories and values as means.
30
+ data_std : pd.DataFrame or None
31
+ A DataFrame with standard deviations corresponding to data_set, or None if standard deviations are not provided.
32
+ labels : list of str
33
+ The species names.
34
+ unit : str
35
+ The unit for the values.
36
+ style : {'stacked', 'dispersed'}, default 'dispersed'
37
+ Whether to display the bars stacked or dispersed.
38
+ orientation : {'va', 'ha'}, default 'va'
39
+ The orientation of the bars, 'va' for vertical and 'ha' for horizontal.
40
+ ax : plt.Axes or None, default None
41
+ The Axes object to plot on. If None, a new figure and Axes are created.
42
+ symbol : bool, default True
43
+ Whether to display values for each bar.
44
+ kwargs : dict
45
+ Additional keyword arguments passed to the barplot function.
46
+
47
+ Returns
48
+ -------
49
+ matplotlib.Axes
50
+ The Axes object containing the plot.
51
+
52
+ """
53
+ # data process
54
+ data = data_set.values
55
+
56
+ if data_std is None:
57
+ data_std = np.zeros(data.shape)
58
+ else:
59
+ data_std = data_std.values
60
+
61
+ groups, species = data.shape
62
+ groups_arr = np.arange(groups)
63
+ species_arr = np.arange(species)
64
+
65
+ total = np.array([data.sum(axis=1), ] * species).T
66
+
67
+ pct_data = data / total * 100
68
+ data_cum = pct_data.cumsum(axis=1)
69
+
70
+ # figure info
71
+ category_names = kwargs.get('ticks') or list(data_set.index)
72
+ title = kwargs.get('title', '')
73
+ colors = kwargs.get('colors') or (Color.colors1 if species == 6 else Color.getColor(num=species))
74
+
75
+ fig, ax = plt.subplots(**kwargs.get('fig_kws', {})) if ax is None else (ax.get_figure(), ax)
76
+
77
+ if style == "stacked":
78
+ for i in range(species):
79
+ widths = pct_data[:, i]
80
+ starts = data_cum[:, i] - pct_data[:, i]
81
+
82
+ if orientation == 'va':
83
+ _ = ax.bar(groups_arr, widths, bottom=starts, width=0.7, color=colors[i], label=labels[i],
84
+ edgecolor=None, capsize=None)
85
+ else:
86
+ _ = ax.barh(groups_arr, widths, left=starts, height=0.7, color=colors[i], label=labels[i],
87
+ edgecolor=None, capsize=None)
88
+ if symbol:
89
+ ax.bar_label(_, fmt=auto_label_pct, label_type='center', padding=0, fontsize=8, weight='bold')
90
+
91
+ if style == "dispersed":
92
+ width = 0.1
93
+ block = width / 4
94
+
95
+ for i in range(species):
96
+ val = data[:, i]
97
+ std = (0,) * groups, data_std[:, i]
98
+ if orientation == 'va':
99
+ _ = ax.bar(groups_arr + (i + 1) * (width + block), val, yerr=std, width=width, color=colors[i],
100
+ edgecolor=None, capsize=None)
101
+ else:
102
+ _ = ax.barh(groups_arr + (i + 1) * (width + block), val, xerr=std, height=width, color=colors[i],
103
+ edgecolor=None, capsize=None)
104
+ if symbol:
105
+ ax.bar_label(_, fmt=auto_label_pct, label_type='center', padding=0, fontsize=8, weight='bold')
106
+
107
+ if orientation == 'va':
108
+ xticks = groups_arr + (species / 2 + 0.5) * (width + block) if style == "dispersed" else groups_arr
109
+ ax.set_xticks(xticks, category_names, weight='bold')
110
+ ax.set_ylabel(Unit(unit) if style == "dispersed" else '$Contribution (\\%)$')
111
+ ax.set_ylim(0, None if style == "dispersed" else 100)
112
+ ax.legend(labels, bbox_to_anchor=(1, 1), loc='upper left', prop={'size': 8})
113
+
114
+ if orientation == 'ha':
115
+ ax.invert_yaxis()
116
+ yticks = groups_arr + 3.5 * (width + block) if style == "dispersed" else groups_arr
117
+ ax.set_yticks(yticks, category_names, weight='bold')
118
+ ax.set_xlabel(Unit(unit) if style == "dispersed" else '$Contribution (\\%)$')
119
+ ax.set_xlim(0, None if style == "dispersed" else 100)
120
+ ax.legend(labels, bbox_to_anchor=(1, 1), loc='upper left', prop={'size': 8})
121
+
122
+ # fig.savefig(f"Barplot_{title}")
123
+
124
+ plt.show()
125
+
126
+ return fig, ax