upgini 1.2.30__py3-none-any.whl → 1.2.30a7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of upgini might be problematic. Click here for more details.

upgini/__about__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.2.30"
1
+ __version__ = "1.2.30a7 "
@@ -54,7 +54,6 @@ from upgini.metadata import (
54
54
  SYSTEM_RECORD_ID,
55
55
  TARGET,
56
56
  CVType,
57
- FeaturesMetadataV2,
58
57
  FileColumnMeaningType,
59
58
  ModelTaskType,
60
59
  RuntimeParameters,
@@ -350,7 +349,6 @@ class FeaturesEnricher(TransformerMixin):
350
349
  self.add_date_if_missing = add_date_if_missing
351
350
  self.features_info_display_handle = None
352
351
  self.data_sources_display_handle = None
353
- self.autofe_features_display_handle = None
354
352
  self.report_button_handle = None
355
353
 
356
354
  def _get_api_key(self):
@@ -1050,7 +1048,7 @@ class FeaturesEnricher(TransformerMixin):
1050
1048
  enriched_shaps = enriched_cv_result.shap_values
1051
1049
 
1052
1050
  if enriched_shaps is not None:
1053
- self._update_shap_values(trace_id, validated_X.columns.to_list(), enriched_shaps)
1051
+ self._update_shap_values(enriched_shaps)
1054
1052
 
1055
1053
  if enriched_metric is None:
1056
1054
  self.logger.warning(
@@ -1210,11 +1208,37 @@ class FeaturesEnricher(TransformerMixin):
1210
1208
  finally:
1211
1209
  self.logger.info(f"Calculating metrics elapsed time: {time.time() - start_time}")
1212
1210
 
1213
- def _update_shap_values(self, trace_id: str, x_columns: List[str], new_shaps: Dict[str, float]):
1211
+ def _update_shap_values(self, new_shaps: Dict[str, float]):
1214
1212
  new_shaps = {
1215
1213
  feature: _round_shap_value(shap) for feature, shap in new_shaps.items() if feature in self.feature_names_
1216
1214
  }
1217
- self.__prepare_feature_importances(trace_id, x_columns, new_shaps, silent=True)
1215
+ features_importances = list(new_shaps.items())
1216
+ features_importances.sort(key=lambda m: (-m[1], m[0]))
1217
+ self.feature_names_, self.feature_importances_ = zip(*features_importances)
1218
+ self.feature_names_ = list(self.feature_names_)
1219
+ self.feature_importances_ = list(self.feature_importances_)
1220
+
1221
+ feature_name_header = self.bundle.get("features_info_name")
1222
+ shap_value_header = self.bundle.get("features_info_shap")
1223
+
1224
+ def update_shap(row):
1225
+ return new_shaps.get(row[feature_name_header], row[shap_value_header])
1226
+
1227
+ self.features_info[shap_value_header] = self.features_info.apply(update_shap, axis=1)
1228
+ self._internal_features_info[shap_value_header] = self._internal_features_info.apply(update_shap, axis=1)
1229
+ self._features_info_without_links[shap_value_header] = self._features_info_without_links.apply(
1230
+ update_shap, axis=1
1231
+ )
1232
+ self.logger.info(f"Recalculated SHAP values:\n{self._features_info_without_links}")
1233
+
1234
+ self.features_info.sort_values(by=shap_value_header, ascending=False, inplace=True)
1235
+ self._internal_features_info.sort_values(by=shap_value_header, ascending=False, inplace=True)
1236
+ self._features_info_without_links.sort_values(by=shap_value_header, ascending=False, inplace=True)
1237
+
1238
+ self.relevant_data_sources = self._group_relevant_data_sources(self.features_info, self.bundle)
1239
+ self._relevant_data_sources_wo_links = self._group_relevant_data_sources(
1240
+ self._features_info_without_links, self.bundle
1241
+ )
1218
1242
 
1219
1243
  if self.features_info_display_handle is not None:
1220
1244
  try:
@@ -1227,7 +1251,7 @@ class FeaturesEnricher(TransformerMixin):
1227
1251
  display_handle=self.features_info_display_handle,
1228
1252
  )
1229
1253
  except (ImportError, NameError):
1230
- pass
1254
+ print(self._internal_features_info)
1231
1255
  if self.data_sources_display_handle is not None:
1232
1256
  try:
1233
1257
  _ = get_ipython() # type: ignore
@@ -1235,24 +1259,11 @@ class FeaturesEnricher(TransformerMixin):
1235
1259
  display_html_dataframe(
1236
1260
  self.relevant_data_sources,
1237
1261
  self._relevant_data_sources_wo_links,
1238
- self.bundle.get("relevant_data_sources_header"),
1262
+ self.bundle.get("relevant_features_header"),
1239
1263
  display_handle=self.data_sources_display_handle,
1240
1264
  )
1241
1265
  except (ImportError, NameError):
1242
- pass
1243
- if self.autofe_features_display_handle is not None:
1244
- try:
1245
- _ = get_ipython() # type: ignore
1246
- autofe_descriptions_df = self.get_autofe_features_description()
1247
- if autofe_descriptions_df is not None:
1248
- display_html_dataframe(
1249
- df=autofe_descriptions_df,
1250
- internal_df=autofe_descriptions_df,
1251
- header=self.bundle.get("autofe_descriptions_header"),
1252
- display_handle=self.autofe_features_display_handle,
1253
- )
1254
- except (ImportError, NameError):
1255
- pass
1266
+ print(self._relevant_data_sources_wo_links)
1256
1267
  if self.report_button_handle is not None:
1257
1268
  try:
1258
1269
  _ = get_ipython() # type: ignore
@@ -1436,11 +1447,7 @@ class FeaturesEnricher(TransformerMixin):
1436
1447
  client_features = [
1437
1448
  c
1438
1449
  for c in X_sampled.columns.to_list()
1439
- if (
1440
- not self.select_features
1441
- or c in self.feature_names_
1442
- or (self.fit_columns_renaming is not None and self.fit_columns_renaming.get(c) in self.feature_names_)
1443
- )
1450
+ if (not self.select_features or c in self.feature_names_)
1444
1451
  and c
1445
1452
  not in (
1446
1453
  excluding_search_keys
@@ -1657,10 +1664,7 @@ class FeaturesEnricher(TransformerMixin):
1657
1664
  generated_features = []
1658
1665
  if date_column is not None:
1659
1666
  converter = DateTimeSearchKeyConverter(date_column, self.date_format, self.logger, self.bundle)
1660
- # Leave original date column values
1661
- df_with_date_features = converter.convert(df, keep_time=True)
1662
- df_with_date_features[date_column] = df[date_column]
1663
- df = df_with_date_features
1667
+ df = converter.convert(df, keep_time=True)
1664
1668
  generated_features = converter.generated_features
1665
1669
 
1666
1670
  email_columns = SearchKey.find_all_keys(search_keys, SearchKey.EMAIL)
@@ -1669,10 +1673,9 @@ class FeaturesEnricher(TransformerMixin):
1669
1673
  df = generator.generate(df)
1670
1674
  generated_features.extend(generator.generated_features)
1671
1675
 
1672
- # normalizer = Normalizer(self.bundle, self.logger)
1673
- # df, search_keys, generated_features = normalizer.normalize(df, search_keys, generated_features)
1674
- # columns_renaming = normalizer.columns_renaming
1675
- columns_renaming = {c: c for c in df.columns}
1676
+ normalizer = Normalizer(self.bundle, self.logger)
1677
+ df, search_keys, generated_features = normalizer.normalize(df, search_keys, generated_features)
1678
+ columns_renaming = normalizer.columns_renaming
1676
1679
 
1677
1680
  df, _ = clean_full_duplicates(df, logger=self.logger, bundle=self.bundle)
1678
1681
 
@@ -1988,19 +1991,9 @@ class FeaturesEnricher(TransformerMixin):
1988
1991
  file_metadata = self._search_task.get_file_metadata(str(uuid.uuid4()))
1989
1992
  search_keys = file_metadata.search_types()
1990
1993
  if SearchKey.IPV6_ADDRESS in search_keys:
1991
- # search_keys.remove(SearchKey.IPV6_ADDRESS)
1992
- search_keys.pop(SearchKey.IPV6_ADDRESS, None)
1994
+ search_keys.remove(SearchKey.IPV6_ADDRESS)
1993
1995
 
1994
- keys = (
1995
- "{"
1996
- + ", ".join(
1997
- [
1998
- f'"{key.name}": {{"name": "{name}", "value": "{key_example(key)}"}}'
1999
- for key, name in search_keys.items()
2000
- ]
2001
- )
2002
- + "}"
2003
- )
1996
+ keys = "{" + ", ".join([f'"{key.name}": "{key_example(key)}"' for key in search_keys]) + "}"
2004
1997
  features_for_transform = self._search_task.get_features_for_transform()
2005
1998
  if features_for_transform:
2006
1999
  original_features_for_transform = [
@@ -2112,7 +2105,7 @@ class FeaturesEnricher(TransformerMixin):
2112
2105
  date_column = SearchKey.find_key(search_keys, [SearchKey.DATE, SearchKey.DATETIME])
2113
2106
  if date_column is not None:
2114
2107
  converter = DateTimeSearchKeyConverter(date_column, self.date_format, self.logger, bundle=self.bundle)
2115
- df = converter.convert(df, keep_time=True)
2108
+ df = converter.convert(df)
2116
2109
  self.logger.info(f"Date column after convertion: {df[date_column]}")
2117
2110
  generated_features.extend(converter.generated_features)
2118
2111
  else:
@@ -2207,12 +2200,11 @@ class FeaturesEnricher(TransformerMixin):
2207
2200
 
2208
2201
  if add_fit_system_record_id:
2209
2202
  df = self.__add_fit_system_record_id(df, search_keys, SYSTEM_RECORD_ID)
2203
+ if DateTimeSearchKeyConverter.DATETIME_COL in df.columns:
2204
+ df = df.drop(columns=DateTimeSearchKeyConverter.DATETIME_COL)
2210
2205
  df = df.rename(columns={SYSTEM_RECORD_ID: SORT_ID})
2211
2206
  features_not_to_pass.append(SORT_ID)
2212
2207
 
2213
- if DateTimeSearchKeyConverter.DATETIME_COL in df.columns:
2214
- df = df.drop(columns=DateTimeSearchKeyConverter.DATETIME_COL)
2215
-
2216
2208
  # search keys might be changed after explode
2217
2209
  columns_for_system_record_id = sorted(list(search_keys.keys()) + features_for_transform)
2218
2210
  df[SYSTEM_RECORD_ID] = pd.util.hash_pandas_object(df[columns_for_system_record_id], index=False).astype(
@@ -2231,7 +2223,7 @@ class FeaturesEnricher(TransformerMixin):
2231
2223
 
2232
2224
  combined_search_keys = combine_search_keys(search_keys.keys())
2233
2225
 
2234
- df_without_features = df.drop(columns=features_not_to_pass, errors="ignore")
2226
+ df_without_features = df.drop(columns=features_not_to_pass)
2235
2227
 
2236
2228
  df_without_features, full_duplicates_warning = clean_full_duplicates(
2237
2229
  df_without_features, self.logger, bundle=self.bundle
@@ -2803,12 +2795,7 @@ class FeaturesEnricher(TransformerMixin):
2803
2795
  autofe_description = self.get_autofe_features_description()
2804
2796
  if autofe_description is not None:
2805
2797
  self.logger.info(f"AutoFE descriptions: {autofe_description}")
2806
- self.autofe_features_display_handle = display_html_dataframe(
2807
- df=autofe_description,
2808
- internal_df=autofe_description,
2809
- header=self.bundle.get("autofe_descriptions_header"),
2810
- display_id="autofe_descriptions",
2811
- )
2798
+ display_html_dataframe(autofe_description, autofe_description, "*Description of AutoFE feature names")
2812
2799
 
2813
2800
  if self._has_paid_features(exclude_features_sources):
2814
2801
  if calculate_metrics is not None and calculate_metrics:
@@ -3520,9 +3507,7 @@ class FeaturesEnricher(TransformerMixin):
3520
3507
 
3521
3508
  return result_train, result_eval_sets
3522
3509
 
3523
- def __prepare_feature_importances(
3524
- self, trace_id: str, x_columns: List[str], updated_shaps: Optional[Dict[str, float]] = None, silent=False
3525
- ):
3510
+ def __prepare_feature_importances(self, trace_id: str, x_columns: List[str], silent=False):
3526
3511
  if self._search_task is None:
3527
3512
  raise NotFittedError(self.bundle.get("transform_unfitted_enricher"))
3528
3513
  features_meta = self._search_task.get_all_features_metadata_v2()
@@ -3539,10 +3524,6 @@ class FeaturesEnricher(TransformerMixin):
3539
3524
  features_info_without_links = []
3540
3525
  internal_features_info = []
3541
3526
 
3542
- if updated_shaps is not None:
3543
- for fm in features_meta:
3544
- fm.shap_value = updated_shaps.get(fm.name, 0.0)
3545
-
3546
3527
  features_meta.sort(key=lambda m: (-m.shap_value, m.name))
3547
3528
  for feature_meta in features_meta:
3548
3529
  if feature_meta.name in original_names_dict.keys():
@@ -3595,22 +3576,7 @@ class FeaturesEnricher(TransformerMixin):
3595
3576
  autofe_meta = self._search_task.get_autofe_metadata()
3596
3577
  if autofe_meta is None:
3597
3578
  return None
3598
- if len(self._internal_features_info) != 0:
3599
-
3600
- def to_feature_meta(row):
3601
- fm = FeaturesMetadataV2(
3602
- name=row[bundle.get("features_info_name")],
3603
- type="",
3604
- source="",
3605
- hit_rate=row[bundle.get("features_info_hitrate")],
3606
- shap_value=row[bundle.get("features_info_shap")],
3607
- data_source=row[bundle.get("features_info_source")],
3608
- )
3609
- return fm
3610
-
3611
- features_meta = self._internal_features_info.apply(to_feature_meta, axis=1).to_list()
3612
- else:
3613
- features_meta = self._search_task.get_all_features_metadata_v2()
3579
+ features_meta = self._search_task.get_all_features_metadata_v2()
3614
3580
 
3615
3581
  def get_feature_by_name(name: str):
3616
3582
  for m in features_meta:
@@ -3639,32 +3605,27 @@ class FeaturesEnricher(TransformerMixin):
3639
3605
  self.logger.warning(f"Feature meta for display index {m.display_index} not found")
3640
3606
  continue
3641
3607
  description["shap"] = feature_meta.shap_value
3642
- description[self.bundle.get("autofe_descriptions_sources")] = feature_meta.data_source.replace(
3643
- "AutoFE: features from ", ""
3644
- ).replace("AutoFE: feature from ", "")
3645
- description[self.bundle.get("autofe_descriptions_feature_name")] = feature_meta.name
3608
+ description["Sources"] = feature_meta.data_source.replace("AutoFE: features from ", "").replace(
3609
+ "AutoFE: feature from ", ""
3610
+ )
3611
+ description["Feature name"] = feature_meta.name
3646
3612
 
3647
3613
  feature_idx = 1
3648
3614
  for bc in m.base_columns:
3649
- description[self.bundle.get("autofe_descriptions_feature").format(feature_idx)] = bc.hashed_name
3615
+ description[f"Feature {feature_idx}"] = bc.hashed_name
3650
3616
  feature_idx += 1
3651
3617
 
3652
- description[self.bundle.get("autofe_descriptions_function")] = ",".join(
3653
- sorted(autofe_feature.get_all_operand_names())
3654
- )
3618
+ description["Function"] = ",".join(sorted(autofe_feature.get_all_operand_names()))
3655
3619
 
3656
3620
  descriptions.append(description)
3657
3621
 
3658
3622
  if len(descriptions) == 0:
3659
3623
  return None
3660
3624
 
3661
- descriptions_df = (
3662
- pd.DataFrame(descriptions)
3663
- .fillna("")
3664
- .sort_values(by="shap", ascending=False)
3665
- .drop(columns="shap")
3666
- .reset_index(drop=True)
3667
- )
3625
+ descriptions_df = pd.DataFrame(descriptions)
3626
+ descriptions_df.fillna("", inplace=True)
3627
+ descriptions_df.sort_values(by="shap", ascending=False, inplace=True)
3628
+ descriptions_df.drop(columns="shap", inplace=True)
3668
3629
  return descriptions_df
3669
3630
 
3670
3631
  except Exception:
upgini/http.py CHANGED
@@ -882,7 +882,7 @@ class _RestClient:
882
882
  if content_type:
883
883
  headers[_RestClient.CONTENT_TYPE_HEADER_NAME] = content_type
884
884
  if trace_id:
885
- headers[_RestClient.TRACE_ID_HEADER_NAME] = str(trace_id)
885
+ headers[_RestClient.TRACE_ID_HEADER_NAME] = trace_id
886
886
  for header_key, header_value in additional_headers.items():
887
887
  headers[header_key] = header_value
888
888
  return headers
upgini/metadata.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from enum import Enum
4
- from typing import Any, Dict, List, Optional, Union
4
+ from typing import Any, Dict, List, Optional, Set, Union
5
5
 
6
6
  from pydantic import BaseModel
7
7
 
@@ -228,13 +228,13 @@ class FileMetadata(BaseModel):
228
228
  return c
229
229
  return None
230
230
 
231
- def search_types(self) -> Dict[SearchKey, str]:
232
- search_keys = dict()
231
+ def search_types(self) -> Set[SearchKey]:
232
+ search_keys = set()
233
233
  for keys_group in self.searchKeys:
234
234
  for key in keys_group:
235
235
  column = self.column_by_name(key)
236
236
  if column:
237
- search_keys[SearchKey.from_meaning_type(column.meaningType)] = column.name
237
+ search_keys.add(SearchKey.from_meaning_type(column.meaningType))
238
238
  return search_keys
239
239
 
240
240
 
@@ -251,14 +251,6 @@ relevant_data_sources_header=Relevant data sources
251
251
  relevant_data_sources_all_shap=All features SHAP
252
252
  relevant_data_sources_number=Number of relevant features
253
253
 
254
- # Autofe descriptions
255
- autofe_descriptions_header=*Description of AutoFE feature names
256
- autofe_descriptions_sources=Sources
257
- autofe_descriptions_feature_name=Feature name
258
- autofe_descriptions_feature=Feature {}
259
- autofe_descriptions_function=Function
260
-
261
-
262
254
  # Quality metrics table
263
255
  quality_metrics_header=Accuracy after enrichment
264
256
  quality_metrics_train_segment=Train
@@ -39,7 +39,7 @@ class EmailDomainGenerator:
39
39
  for email_col in self.email_columns:
40
40
  domain_feature = email_col + self.DOMAIN_SUFFIX
41
41
  if domain_feature not in df.columns:
42
- df[domain_feature] = df[email_col].apply(self._email_to_domain).astype("string")
42
+ df[domain_feature] = df[email_col].apply(self._email_to_domain)
43
43
  self.generated_features.append(domain_feature)
44
44
  return df
45
45
 
@@ -2,7 +2,6 @@ import logging
2
2
  from logging import Logger
3
3
  from typing import Dict, List, Optional, Tuple
4
4
 
5
- import numpy as np
6
5
  import pandas as pd
7
6
  from pandas.api.types import is_integer_dtype, is_object_dtype, is_string_dtype
8
7
 
@@ -84,21 +83,10 @@ class FeaturesValidator:
84
83
  return [
85
84
  i
86
85
  for i in df
87
- if (is_object_dtype(df[i]) or is_string_dtype(df[i]) or FeaturesValidator.__is_integer(df[i]))
86
+ if (is_object_dtype(df[i]) or is_string_dtype(df[i]) or is_integer_dtype(df[i]))
88
87
  and (df[i].nunique(dropna=False) / row_count >= 0.85)
89
88
  ]
90
89
 
91
- @staticmethod
92
- def __is_integer(series: pd.Series) -> bool:
93
- return (
94
- is_integer_dtype(series)
95
- or series.dropna()
96
- .apply(
97
- lambda f: (float.is_integer(f) and abs(f) < np.iinfo(np.int64).max) if isinstance(f, float) else False
98
- )
99
- .all()
100
- )
101
-
102
90
  @staticmethod
103
91
  def find_constant_features(df: pd.DataFrame) -> List[str]:
104
92
  return [i for i in df if df[i].nunique() <= 1]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: upgini
3
- Version: 1.2.30
3
+ Version: 1.2.30a7
4
4
  Summary: Intelligent data search & enrichment for Machine Learning
5
5
  Project-URL: Bug Reports, https://github.com/upgini/upgini/issues
6
6
  Project-URL: Homepage, https://upgini.com/
@@ -1,12 +1,12 @@
1
- upgini/__about__.py,sha256=WGj1CvVJizDkAvd9BtLpwcsI-hzacJoXGbC8sVpoHYk,23
1
+ upgini/__about__.py,sha256=xOnRXUeEWrmeV3WnxmWZqXNizXXkbEqZwcHHbNkMC3I,26
2
2
  upgini/__init__.py,sha256=LXSfTNU0HnlOkE69VCxkgIKDhWP-JFo_eBQ71OxTr5Y,261
3
3
  upgini/ads.py,sha256=nvuRxRx5MHDMgPr9SiU-fsqRdFaBv8p4_v1oqiysKpc,2714
4
4
  upgini/dataset.py,sha256=iPFiMJtk4HF1ytw9wCQr8H9RfoOKj_TIo8XYZKWgcMc,31331
5
5
  upgini/errors.py,sha256=2b_Wbo0OYhLUbrZqdLIx5jBnAsiD1Mcenh-VjR4HCTw,950
6
- upgini/features_enricher.py,sha256=lNfu5Z40NmkkGJScKAwe_0VBtL8liePifuAlKE_flfA,192053
7
- upgini/http.py,sha256=plZGTGoi1h2edd8Cnjt4eYB8t4NbBGnZz7DtPTByiNc,42885
6
+ upgini/features_enricher.py,sha256=yiMy36rV0j5VCO6mwmUEbDrQTEpQSJDFEDlBLpgO5DU,190878
7
+ upgini/http.py,sha256=21asexflvavydzCOONJDGQBtQanCElrbnqLXakJ9Cu8,42880
8
8
  upgini/lazy_import.py,sha256=74gQ8JuA48BGRLxAo7lNHNKY2D2emMxrUxKGdxVGhuY,1012
9
- upgini/metadata.py,sha256=lUa2xYhBhnCeTqNt6lWc9iP_YuikYGIsDSn8Vwyjv1I,11235
9
+ upgini/metadata.py,sha256=osmzdNESeh7yP3BZday6N9Q3eaIHfzhhRM1d6NSgcf0,11223
10
10
  upgini/metrics.py,sha256=hr7UwLphbZ_FEglLuO2lzr_pFgxOJ4c3WBeg7H-fNqY,35521
11
11
  upgini/search_task.py,sha256=qxUxAD-bed-FpZYmTB_4orW7YJsW_O6a1TcgnZIRFr4,17307
12
12
  upgini/spinner.py,sha256=4iMd-eIe_BnkqFEMIliULTbj6rNI2HkN_VJ4qYe0cUc,1118
@@ -30,7 +30,7 @@ upgini/normalizer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
30
30
  upgini/normalizer/normalize_utils.py,sha256=Ft2MwSgVoBilXAORAOYAuwPD79GOLfwn4qQE3IUFzzg,7218
31
31
  upgini/resource_bundle/__init__.py,sha256=S5F2G47pnJd2LDpmFsjDqEwiKkP8Hm-hcseDbMka6Ko,8345
32
32
  upgini/resource_bundle/exceptions.py,sha256=5fRvx0_vWdE1-7HcSgF0tckB4A9AKyf5RiinZkInTsI,621
33
- upgini/resource_bundle/strings.properties,sha256=bKw_rjZZTomLJhQBqiM7_P2EoRq45_Ng2gP4WE6MRBE,26921
33
+ upgini/resource_bundle/strings.properties,sha256=fOAeLTsnx8xvJK-7RPFXprATG0n56jeCdse8sQTuVX8,26674
34
34
  upgini/resource_bundle/strings_widget.properties,sha256=gOdqvZWntP2LCza_tyVk1_yRYcG4c04K9sQOAVhF_gw,1577
35
35
  upgini/sampler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
36
  upgini/sampler/base.py,sha256=7GpjYqjOp58vYcJLiX__1R5wjUlyQbxvHJ2klFnup_M,6389
@@ -46,10 +46,10 @@ upgini/utils/cv_utils.py,sha256=w6FQb9nO8BWDx88EF83NpjPLarK4eR4ia0Wg0kLBJC4,3525
46
46
  upgini/utils/datetime_utils.py,sha256=F61i2vZCB6eUy4WwodDyPi50XKPbhOHsxDrU6tGa6CM,13133
47
47
  upgini/utils/deduplicate_utils.py,sha256=SMZx9IKIhWI5HqXepfKiQb3uDJrogQZtG6jcWuMo5Z4,8855
48
48
  upgini/utils/display_utils.py,sha256=DsBjJ8jEYAh8BPgfAbzq5imoGFV6IACP20PQ78BQCX0,11964
49
- upgini/utils/email_utils.py,sha256=GbnhHJn1nhUBytmK6PophYqaoq4t7Lp6i0-O0Gd3RV8,5265
49
+ upgini/utils/email_utils.py,sha256=sCe7G-04-Wu9tRnL15l4Q6RVetCamJZyLjxLkWbxS-A,5248
50
50
  upgini/utils/fallback_progress_bar.py,sha256=PDaKb8dYpVZaWMroNcOHsTc3pSjgi9mOm0--cOFTwJ0,1074
51
51
  upgini/utils/feature_info.py,sha256=Tp_2g5-rCjY4NpzKhzxwNxuqH5FFL8vG94OU5kH6wzk,6702
52
- upgini/utils/features_validator.py,sha256=lEfmk4DoxZ4ooOE1HC0ZXtUb_lFKRFHIrnFULZ4_rL8,3746
52
+ upgini/utils/features_validator.py,sha256=1Xj2ir5LzzYiX3NH8o88c2J6RTTetaTwu0MhjLTyuvM,3378
53
53
  upgini/utils/format.py,sha256=Yv5cvvSs2bOLUzzNu96Pu33VMDNbabio92QepUj41jU,243
54
54
  upgini/utils/ip_utils.py,sha256=Q6vb7Sr5Khx3Sq3eENjW2qCXKej_S5jZbneH6zEOkzQ,5171
55
55
  upgini/utils/phone_utils.py,sha256=IrbztLuOJBiePqqxllfABWfYlfAjYevPhXKipl95wUI,10432
@@ -59,7 +59,7 @@ upgini/utils/sklearn_ext.py,sha256=13jQS_k7v0aUtudXV6nGUEWjttPQzAW9AFYL5wgEz9k,4
59
59
  upgini/utils/target_utils.py,sha256=PU77nIhTz7IHbC4rpTpxrVxib6cdpRL9F1dhkjIffLY,10225
60
60
  upgini/utils/track_info.py,sha256=G5Lu1xxakg2_TQjKZk4b5SvrHsATTXNVV3NbvWtT8k8,5663
61
61
  upgini/utils/warning_counter.py,sha256=-GRY8EUggEBKODPSuXAkHn9KnEQwAORC0mmz_tim-PM,254
62
- upgini-1.2.30.dist-info/METADATA,sha256=93iMDL28nXF2DJaDP-oUS2CeCFHgnjk2zmAhge2LAHg,48578
63
- upgini-1.2.30.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
64
- upgini-1.2.30.dist-info/licenses/LICENSE,sha256=5RRzgvdJUu3BUDfv4bzVU6FqKgwHlIay63pPCSmSgzw,1514
65
- upgini-1.2.30.dist-info/RECORD,,
62
+ upgini-1.2.30a7.dist-info/METADATA,sha256=JsVd5wcm_dhko9GQN2KxBWJhILrRr6sMN5H5SfwXPdE,48580
63
+ upgini-1.2.30a7.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
64
+ upgini-1.2.30a7.dist-info/licenses/LICENSE,sha256=5RRzgvdJUu3BUDfv4bzVU6FqKgwHlIay63pPCSmSgzw,1514
65
+ upgini-1.2.30a7.dist-info/RECORD,,