geocif 0.1.34__tar.gz → 0.1.36__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {geocif-0.1.34/geocif.egg-info → geocif-0.1.36}/PKG-INFO +1 -1
  2. {geocif-0.1.34 → geocif-0.1.36}/geocif/analysis.py +3 -5
  3. {geocif-0.1.34 → geocif-0.1.36}/geocif/cei/indices.py +1 -0
  4. geocif-0.1.36/geocif/experiments.py +121 -0
  5. {geocif-0.1.34 → geocif-0.1.36}/geocif/geocif.py +23 -12
  6. {geocif-0.1.34 → geocif-0.1.36}/geocif/indices_runner.py +6 -6
  7. {geocif-0.1.34 → geocif-0.1.36}/geocif/indices_runner_v2.py +7 -10
  8. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/correlations.py +32 -16
  9. {geocif-0.1.34 → geocif-0.1.36/geocif.egg-info}/PKG-INFO +1 -1
  10. {geocif-0.1.34 → geocif-0.1.36}/geocif.egg-info/SOURCES.txt +1 -0
  11. {geocif-0.1.34 → geocif-0.1.36}/setup.py +1 -1
  12. {geocif-0.1.34 → geocif-0.1.36}/LICENSE +0 -0
  13. {geocif-0.1.34 → geocif-0.1.36}/MANIFEST.in +0 -0
  14. {geocif-0.1.34 → geocif-0.1.36}/README.md +0 -0
  15. {geocif-0.1.34 → geocif-0.1.36}/geocif/__init__.py +0 -0
  16. {geocif-0.1.34 → geocif-0.1.36}/geocif/agmet/__init__.py +0 -0
  17. {geocif-0.1.34 → geocif-0.1.36}/geocif/agmet/geoagmet.py +0 -0
  18. {geocif-0.1.34 → geocif-0.1.36}/geocif/agmet/plot.py +0 -0
  19. {geocif-0.1.34 → geocif-0.1.36}/geocif/agmet/utils.py +0 -0
  20. {geocif-0.1.34 → geocif-0.1.36}/geocif/backup/__init__.py +0 -0
  21. {geocif-0.1.34 → geocif-0.1.36}/geocif/backup/constants.py +0 -0
  22. {geocif-0.1.34 → geocif-0.1.36}/geocif/backup/features.py +0 -0
  23. {geocif-0.1.34 → geocif-0.1.36}/geocif/backup/geo.py +0 -0
  24. {geocif-0.1.34 → geocif-0.1.36}/geocif/backup/geocif.py +0 -0
  25. {geocif-0.1.34 → geocif-0.1.36}/geocif/backup/metadata.py +0 -0
  26. {geocif-0.1.34 → geocif-0.1.36}/geocif/backup/models.py +0 -0
  27. {geocif-0.1.34 → geocif-0.1.36}/geocif/cei/__init__.py +0 -0
  28. {geocif-0.1.34 → geocif-0.1.36}/geocif/cei/definitions.py +0 -0
  29. {geocif-0.1.34 → geocif-0.1.36}/geocif/logger.py +0 -0
  30. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/__init__.py +0 -0
  31. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/correlations_backup.py +0 -0
  32. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/embedding.py +0 -0
  33. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/feature_engineering.py +0 -0
  34. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/feature_selection.py +0 -0
  35. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/outliers.py +0 -0
  36. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/outlook.py +0 -0
  37. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/output.py +0 -0
  38. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/spatial_autocorrelation.py +0 -0
  39. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/stages.py +0 -0
  40. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/stats.py +0 -0
  41. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/trainers.py +0 -0
  42. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/trend.py +0 -0
  43. {geocif-0.1.34 → geocif-0.1.36}/geocif/ml/xai.py +0 -0
  44. {geocif-0.1.34 → geocif-0.1.36}/geocif/playground/__init__.py +0 -0
  45. {geocif-0.1.34 → geocif-0.1.36}/geocif/playground/automl.py +0 -0
  46. {geocif-0.1.34 → geocif-0.1.36}/geocif/playground/misc.py +0 -0
  47. {geocif-0.1.34 → geocif-0.1.36}/geocif/utils.py +0 -0
  48. {geocif-0.1.34 → geocif-0.1.36}/geocif/viz/__init__.py +0 -0
  49. {geocif-0.1.34 → geocif-0.1.36}/geocif/viz/plot.py +0 -0
  50. {geocif-0.1.34 → geocif-0.1.36}/geocif.egg-info/dependency_links.txt +0 -0
  51. {geocif-0.1.34 → geocif-0.1.36}/geocif.egg-info/not-zip-safe +0 -0
  52. {geocif-0.1.34 → geocif-0.1.36}/geocif.egg-info/top_level.txt +0 -0
  53. {geocif-0.1.34 → geocif-0.1.36}/requirements.txt +0 -0
  54. {geocif-0.1.34 → geocif-0.1.36}/setup.cfg +0 -0
  55. {geocif-0.1.34 → geocif-0.1.36}/tests/test_geocif.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: geocif
3
- Version: 0.1.34
3
+ Version: 0.1.36
4
4
  Summary: Models to visualize and forecast crop conditions and yields
5
5
  Home-page: https://ritviksahajpal.github.io/yield_forecasting/
6
6
  Author: Ritvik Sahajpal
@@ -162,8 +162,8 @@ class Geoanalysis:
162
162
  return pd.DataFrame(), pd.DataFrame()
163
163
 
164
164
  df_metrics = self._compute_metrics(df)
165
- #df_metrics = self._process_metrics(df_metrics)
166
- #self._plot_metrics(df_metrics)
165
+ # df_metrics = self._process_metrics(df_metrics)
166
+ # self._plot_metrics(df_metrics)
167
167
 
168
168
  df_regional_metrics_by_year = self._compute_regional_metrics(
169
169
  df, by="Harvest Year"
@@ -173,9 +173,7 @@ class Geoanalysis:
173
173
  )
174
174
  df_regional_metrics = self._average_mape(df_regional_metrics_by_year)
175
175
  breakpoint()
176
- self._store_results(
177
- None, df_regional_metrics, df_regional_metrics_by_year
178
- )
176
+ self._store_results(None, df_regional_metrics, df_regional_metrics_by_year)
179
177
 
180
178
  df_national_yield = self._compute_national_yield(df)
181
179
  self._plot_national_yield(df_national_yield)
@@ -393,6 +393,7 @@ class CEIs:
393
393
  / self.admin_zone
394
394
  / self.country
395
395
  )
396
+
396
397
  os.makedirs(self.dir_output, exist_ok=True)
397
398
  os.makedirs(self.dir_intermediate, exist_ok=True)
398
399
 
@@ -0,0 +1,121 @@
1
+ import configparser
2
+ from pathlib import Path
3
+
4
+ import matplotlib.pyplot as plt
5
+ import sklearn
6
+
7
+ from geocif import geocif as gc
8
+ from geocif import logger as log
9
+
10
+ plt.style.use("default")
11
+ sklearn.set_config(transform_output="pandas")
12
+
13
+ import warnings
14
+
15
+ warnings.simplefilter(action="ignore", category=FutureWarning)
16
+
17
+
18
+ def run(inputs, logger, parser, section, item, type, values):
19
+ if type == "str":
20
+ original_value = parser.get(section, item)
21
+ elif type == "bool":
22
+ original_value = parser.getboolean(section, item)
23
+ elif type == "int":
24
+ original_value = parser.getint(section, item)
25
+ elif type == "float":
26
+ original_value = parser.getfloat(section, item)
27
+
28
+ for value in values:
29
+ if type == "str":
30
+ parser.set(section, item, value)
31
+ elif type == "bool":
32
+ parser.set(section, item, str(value))
33
+ elif type == "int":
34
+ parser.set(section, item, str(value))
35
+ elif type == "float":
36
+ parser.set(section, item, str(value))
37
+
38
+ gc.execute_models(inputs, logger, parser)
39
+
40
+ parser.set(section, item, original_value)
41
+
42
+ return parser
43
+
44
+
45
+ def main(path_config_files=[Path("../config/geocif.txt")]):
46
+ logger, parser = log.setup_logger_parser(path_config_files)
47
+ inputs = gc.gather_inputs(parser)
48
+
49
+ logger.info("=============================")
50
+ logger.info("\tStarting GEOCIF Experiments")
51
+ logger.info("=============================")
52
+
53
+ # Experiment 1: include_lat_lon
54
+ logger.info("Experiment 1: include_lat_lon")
55
+ parser = run(
56
+ inputs, logger, parser, "experiments", "include_lat_lon", "bool", [True, False]
57
+ )
58
+
59
+ # Experiment 2: feature_selection
60
+ logger.info("Experiment 2: feature_selection")
61
+ parser = run(
62
+ inputs,
63
+ logger,
64
+ parser,
65
+ "experiments",
66
+ "feature_selection",
67
+ "str",
68
+ ["SelectKBest", "BorutaPy", "Leshy", "RFECV", "RFE"],
69
+ )
70
+
71
+ # Experiment 3: lag_years
72
+ logger.info("Experiment 3: lag_years")
73
+ parser = run(
74
+ inputs, logger, parser, "experiments", "lag_years", "int", [1, 2, 3, 4, 5]
75
+ )
76
+
77
+ # Experiment 4: lag_yield_as_feature
78
+ logger.info("Experiment 4: lag_yield_as_feature")
79
+ parser = run(
80
+ inputs,
81
+ logger,
82
+ parser,
83
+ "experiments",
84
+ "lag_yield_as_feature",
85
+ "bool",
86
+ [True, False],
87
+ )
88
+
89
+ # Experiment 5: median_years
90
+ logger.info("Experiment 5: median_years")
91
+ parser = run(
92
+ inputs, logger, parser, "experiments", "median_years", "int", [2, 3, 4, 5]
93
+ )
94
+
95
+ # Experiment 6: median_yield_as_feature
96
+ logger.info("Experiment 6: median_yield_as_feature")
97
+ parser = run(
98
+ inputs,
99
+ logger,
100
+ parser,
101
+ "experiments",
102
+ "median_yield_as_feature",
103
+ "bool",
104
+ [True, False],
105
+ )
106
+
107
+ # Experiment 7: analogous_year_yield_as_feature
108
+ logger.info("Experiment 7: analogous_year_yield_as_feature")
109
+ parser = run(
110
+ inputs,
111
+ logger,
112
+ parser,
113
+ "experiments",
114
+ "analogous_year_yield_as_feature",
115
+ "bool",
116
+ [True, False],
117
+ )
118
+
119
+
120
+ if __name__ == "__main__":
121
+ main()
@@ -114,6 +114,7 @@ class Geocif:
114
114
  self.analogous_year_yield_as_feature = self.parser.getboolean(
115
115
  "ML", "analogous_year_yield_as_feature"
116
116
  )
117
+ self.include_lat_lon = self.parser.getboolean("ML", "include_lat_lon")
117
118
  self.spatial_autocorrelation = self.parser.getboolean(
118
119
  "ML", "spatial_autocorrelation"
119
120
  )
@@ -187,7 +188,6 @@ class Geocif:
187
188
  os.makedirs(self.dir_analysis, exist_ok=True)
188
189
 
189
190
  self.db_path = self.dir_db / self.db_forecasts
190
- output.config_to_db(self.db_path, self.parser, self.today_full)
191
191
 
192
192
  # self.pickle_file = self.base_dir / self.parser.get("outlook", "pickle_file")
193
193
  # obj_pickle = outlook.Outlook(self.pickle_file)
@@ -222,9 +222,9 @@ class Geocif:
222
222
  self.logger.info(f"Selected features: {self.selected_features}")
223
223
 
224
224
  """ Update model to include conformal estimates """
225
- if "lat" not in self.selected_features:
225
+ if "lat" not in self.selected_features and self.include_lat_lon:
226
226
  self.selected_features.append("lat")
227
- if "lon" not in self.selected_features:
227
+ if "lon" not in self.selected_features and self.include_lat_lon:
228
228
  self.selected_features.append("lon")
229
229
  X_train = df_region[self.selected_features + self.cat_features]
230
230
  dir_output = (
@@ -358,7 +358,9 @@ class Geocif:
358
358
  # Remove any categorical features
359
359
  X_test = X_test.drop(columns=self.cat_features)
360
360
  X = torch.from_numpy(X_test.to_numpy()).float()
361
- coord = torch.from_numpy(self.df_test[['lon', 'lat']].to_numpy()).float()
361
+ coord = torch.from_numpy(
362
+ self.df_test[["lon", "lat"]].to_numpy()
363
+ ).float()
362
364
 
363
365
  p = X.shape[1]
364
366
  n = X.shape[0]
@@ -367,7 +369,9 @@ class Geocif:
367
369
  data = geospaNN.make_graph(X, Y, coord, nn)
368
370
 
369
371
  # remove categorical features from df_train
370
- data_train = df_region[self.selected_features + self.cat_features + [self.target]]
372
+ data_train = df_region[
373
+ self.selected_features + self.cat_features + [self.target]
374
+ ]
371
375
  w_train = data_train.y - self.estimate(data_train.x)
372
376
 
373
377
  else:
@@ -401,6 +405,7 @@ class Geocif:
401
405
  df = pd.DataFrame(
402
406
  {
403
407
  "Experiment_ID": np.full(shp, experiment_id),
408
+ "Experiment Name": np.full(shp, self.experiment_name),
404
409
  "Date": np.full(shp, self.today),
405
410
  "Time": np.full(shp, now),
406
411
  "Country": np.full(shp, self.country),
@@ -479,6 +484,7 @@ class Geocif:
479
484
 
480
485
  # Create an index based on following columns
481
486
  index_columns = [
487
+ "Experiment Name",
482
488
  "Model",
483
489
  "Cluster Strategy",
484
490
  "Country",
@@ -487,10 +493,9 @@ class Geocif:
487
493
  "Harvest Year",
488
494
  "Stage Name",
489
495
  ]
490
- try:
491
- df.index = df.apply(lambda row: "_".join([str(row[col]) for col in index_columns]), axis=1)
492
- except Exception as e:
493
- breakpoint()
496
+ df.index = df.apply(
497
+ lambda row: "_".join([str(row[col]) for col in index_columns]), axis=1
498
+ )
494
499
 
495
500
  # name the index level
496
501
  df.index.set_names(["Index"], inplace=True)
@@ -558,8 +563,9 @@ class Geocif:
558
563
  self.feature_names.append("FCST")
559
564
 
560
565
  # Add lat and lon to feature names
561
- self.feature_names.append("lat")
562
- self.feature_names.append("lon")
566
+ if self.include_lat_lon:
567
+ self.feature_names.append("lat")
568
+ self.feature_names.append("lon")
563
569
 
564
570
  self.selected_features = []
565
571
 
@@ -821,7 +827,11 @@ class Geocif:
821
827
  self.dg_country["lon"] = self.dg_country.centroid.x
822
828
 
823
829
  # Add lat and lon columns to df by merging on Country Region column
824
- df = df.merge(self.dg_country[["Country Region", "lat", "lon"]].drop_duplicates(), on="Country Region", how="left")
830
+ df = df.merge(
831
+ self.dg_country[["Country Region", "lat", "lon"]].drop_duplicates(),
832
+ on="Country Region",
833
+ how="left",
834
+ )
825
835
 
826
836
  dict_kwargs = {}
827
837
  dict_kwargs["all_stages"] = self.all_stages
@@ -918,6 +928,7 @@ class Geocif:
918
928
 
919
929
  self.forecast_season = forecast_season
920
930
  self.model_name = model
931
+ self.experiment_name = self.parser.get("ML", "experiment_name")
921
932
  self.ml_model = self.parser.getboolean(self.model_name, "ML_model")
922
933
  self.model_names = ast.literal_eval(self.parser.get(self.country, "models"))
923
934
  self.optimize = self.parser.getboolean(self.country, "optimize")
@@ -165,13 +165,13 @@ class cei_runner(base.BaseGeo):
165
165
  combinations = [
166
166
  i
167
167
  for i in combinations
168
- if "angola_maize" in i[3] or
169
- "lesotho_maize" in i[3] or
168
+ if "angola_maize" in i[3] or "lesotho_maize" in i[3] or
170
169
  # "namibia_" in i[2] or
171
- "united_republic_of_tanzania_maize" in i[3] or
172
- "zambia_maize" in i[3] or "zimbabwe_maize" in i[3] or
173
- "south_africa_maize" in i[3] or
174
- "mozambique_maize" in i[3]
170
+ "united_republic_of_tanzania_maize" in i[3]
171
+ or "zambia_maize" in i[3]
172
+ or "zimbabwe_maize" in i[3]
173
+ or "south_africa_maize" in i[3]
174
+ or "mozambique_maize" in i[3]
175
175
  ]
176
176
  # "malawi" in i[2]]
177
177
 
@@ -46,7 +46,9 @@ class cei_runner(base.BaseGeo):
46
46
  self.parse_config()
47
47
 
48
48
  self.dir_input = Path(self.parser.get("PATHS", "dir_input"))
49
- self.base_dir = Path(self.parser.get("PATHS", "dir_crop_inputs"))
49
+ self.base_dir = Path(
50
+ r"D:\Users\ritvik\projects\GEOGLAM\Output\countries\malawi"
51
+ ) # Path(self.parser.get("PATHS", "dir_crop_inputs"))
50
52
  self.do_parallel = self.parser.getboolean("DEFAULT", "do_parallel")
51
53
 
52
54
  def collect_files(self):
@@ -155,22 +157,17 @@ class cei_runner(base.BaseGeo):
155
157
  "ndvi",
156
158
  False, # redo
157
159
  )
158
- for year in range(2001, ar.utcnow().year + 1)
160
+ for year in range(2024, ar.utcnow().year + 1)
159
161
  for status, path, filename, admin_zone, category in combinations
160
162
  ]
161
163
 
162
164
  # Only keep those entries in combinations where the third elemt is
163
165
  # mozambique, south_africa, angola or dem_people's_rep_of_korea
164
166
  # This is done to test the code for these countries
165
- #combinations = [
166
- # i
167
- # for i in combinations
168
- # if "ethiopia_maize_s1" in i[3]
169
- #]
170
- # "malawi" in i[2]]
167
+ combinations = [i for i in combinations if "malawi_maize_s1" in i[3]]
171
168
 
172
169
  if True:
173
- num_cpu = int(cpu_count() * 0.8)
170
+ num_cpu = int(cpu_count() * 0.5)
174
171
  with Pool(num_cpu) as p:
175
172
  for i, _ in enumerate(p.imap_unordered(indices.process, combinations)):
176
173
  pass
@@ -198,7 +195,7 @@ def run(path_config_files=[]):
198
195
  indices.validate_index_definitions()
199
196
 
200
197
  for method in [
201
- "phenological_stages", # "dekad_r" # "dekad_r"
198
+ "biweekly_r", # "dekad_r" # "dekad_r"
202
199
  ]: # , "full_season", "phenological_stages", "fraction_season"]:
203
200
  obj = cei_runner(path_config_files)
204
201
  obj.main(method)
@@ -246,6 +246,7 @@ def all_correlated_feature_by_time(df, **kwargs):
246
246
  Returns:
247
247
 
248
248
  """
249
+ THRESHOLD = 0.1
249
250
  national_correlation = kwargs.get("national_correlation")
250
251
  group_by = kwargs.get("groupby")
251
252
  combined_dict = kwargs.get("combined_dict")
@@ -264,8 +265,16 @@ def all_correlated_feature_by_time(df, **kwargs):
264
265
  df_corr = df_corr.dropna(thresh=len(df_corr) / 2, axis=1)
265
266
 
266
267
  if not df_corr.empty:
267
- df_tmp = df_corr[df_corr.columns[(df_corr.mean() > 0.1)]]
268
- dict_selected_features[region_id] = df_tmp.columns
268
+ df_tmp = df_corr[df_corr.columns[(abs(df_corr.mean()) > THRESHOLD)]]
269
+ # Add the columns to dict_selected_features along with the absolute mean value
270
+ absolute_medians = df_tmp.abs().median()
271
+
272
+ # Create a DataFrame to display the column names and their absolute median values
273
+ absolute_median_df = absolute_medians.reset_index()
274
+ absolute_median_df.columns = ['CEI', 'Median']
275
+
276
+ # Add the CEI and Median value to dict_selected_features
277
+ dict_selected_features[region_id] = absolute_median_df
269
278
 
270
279
  df_tmp2 = (
271
280
  df_tmp.median(axis=0)
@@ -293,24 +302,31 @@ def all_correlated_feature_by_time(df, **kwargs):
293
302
  else:
294
303
  # HACK
295
304
  df_corr = _all_correlated_feature_by_time(df, **kwargs)
296
- dict_selected_features[region_id] = df_corr.columns
297
- dict_best_cei[region_id] = {}
298
305
 
299
- # dict_selected_features[region_id] = dict_selected_features[0]
300
- # dict_best_cei[region_id] = dict_best_cei[0]
301
- # Combine all unique values from the existing dictionary elements
302
- # combined_metrics = set()
303
- # for key in dict_selected_features:
304
- # breakpoint()
305
- # combined_metrics.update(dict_selected_features[key])
306
- #
307
- # # Add the combined set as a new element with key 3
308
- # dict_selected_features[region_id] = sorted(list(combined_metrics))
306
+ df_tmp = df_corr[df_corr.columns[(abs(df_corr.mean()) > THRESHOLD)]]
307
+ # Add the columns to dict_selected_features along with the absolute mean value
308
+ absolute_medians = df_tmp.abs().median()
309
+
310
+ # Create a DataFrame to display the column names and their absolute median values
311
+ absolute_median_df = absolute_medians.reset_index()
312
+ absolute_median_df.columns = ['CEI', 'Median']
313
+
314
+ # Add the CEI and Median value to dict_selected_features
315
+ dict_selected_features[region_id] = absolute_median_df
316
+ dict_best_cei[region_id] = {}
309
317
  else:
310
318
  df_corr = _all_correlated_feature_by_time(df, **kwargs)
311
- dict_selected_features[0] = df_corr.columns
319
+ df_tmp = df_corr[df_corr.columns[(abs(df_corr.mean()) > THRESHOLD)]]
320
+ # Add the columns to dict_selected_features along with the absolute mean value
321
+ absolute_medians = df_tmp.abs().median()
322
+
323
+ # Create a DataFrame to display the column names and their absolute median values
324
+ absolute_median_df = absolute_medians.reset_index()
325
+ absolute_median_df.columns = ['CEI', 'Median']
326
+
327
+ # Add the CEI and Median value to dict_selected_features
328
+ dict_selected_features[0] = absolute_median_df
312
329
 
313
- df_corr = df_corr[df_corr.columns[(df_corr.mean() > 0.1)]]
314
330
  plot_feature_corr_by_time(df_corr, **kwargs)
315
331
 
316
332
  return dict_selected_features, dict_best_cei
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: geocif
3
- Version: 0.1.34
3
+ Version: 0.1.36
4
4
  Summary: Models to visualize and forecast crop conditions and yields
5
5
  Home-page: https://ritviksahajpal.github.io/yield_forecasting/
6
6
  Author: Ritvik Sahajpal
@@ -6,6 +6,7 @@ setup.cfg
6
6
  setup.py
7
7
  geocif/__init__.py
8
8
  geocif/analysis.py
9
+ geocif/experiments.py
9
10
  geocif/geocif.py
10
11
  geocif/indices_runner.py
11
12
  geocif/indices_runner_v2.py
@@ -50,6 +50,6 @@ setup(
50
50
  test_suite="tests",
51
51
  tests_require=test_requirements,
52
52
  url="https://ritviksahajpal.github.io/yield_forecasting/",
53
- version="0.1.34",
53
+ version="0.1.36",
54
54
  zip_safe=False,
55
55
  )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes