upgini 1.2.85a3857.dev1__py3-none-any.whl → 1.2.86a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of upgini might be problematic. Click here for more details.

upgini/__about__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.2.85a3857.dev1"
1
+ __version__ = "1.2.86a1"
upgini/autofe/feature.py CHANGED
@@ -8,6 +8,7 @@ from pandas._typing import DtypeObj
8
8
 
9
9
  from upgini.autofe.all_operators import find_op
10
10
  from upgini.autofe.operator import Operator, PandasOperator
11
+ from upgini.autofe.utils import pydantic_dump_method, pydantic_parse_method
11
12
 
12
13
 
13
14
  class Column:
@@ -80,9 +81,9 @@ class Feature:
80
81
  self.alias = alias
81
82
 
82
83
  def set_op_params(self, params: Optional[Dict[str, str]]) -> "Feature":
83
- obj_dict = self.op.dict().copy()
84
+ obj_dict = pydantic_dump_method(self.op)().copy()
84
85
  obj_dict.update(params or {})
85
- self.op = self.op.__class__.parse_obj(obj_dict)
86
+ self.op = pydantic_parse_method(self.op.__class__)(obj_dict)
86
87
  self.op.set_params(params)
87
88
 
88
89
  for child in self.children:
upgini/autofe/utils.py CHANGED
@@ -81,3 +81,31 @@ def pydantic_validator(field_name: str, *fields, mode: str = "before", **kwargs)
81
81
  return wrapper
82
82
 
83
83
  return decorator
84
+
85
+
86
+ def pydantic_json_method(obj):
87
+ if get_pydantic_version() >= 2:
88
+ return obj.model_dump_json
89
+ else:
90
+ return obj.json
91
+
92
+
93
+ def pydantic_parse_method(cls):
94
+ if get_pydantic_version() >= 2:
95
+ return cls.model_validate
96
+ else:
97
+ return cls.parse_obj
98
+
99
+
100
+ def pydantic_dump_method(obj):
101
+ if get_pydantic_version() >= 2:
102
+ return obj.model_dump
103
+ else:
104
+ return obj.dict
105
+
106
+
107
+ def pydantic_copy_method(obj):
108
+ if get_pydantic_version() >= 2:
109
+ return obj.model_copy
110
+ else:
111
+ return obj.copy
@@ -3,6 +3,7 @@ import datetime
3
3
  import gc
4
4
  import hashlib
5
5
  import itertools
6
+ import json
6
7
  import logging
7
8
  import numbers
8
9
  import os
@@ -59,6 +60,7 @@ from upgini.metadata import (
59
60
  CVType,
60
61
  FeaturesMetadataV2,
61
62
  FileColumnMeaningType,
63
+ FileColumnMetadata,
62
64
  ModelTaskType,
63
65
  RuntimeParameters,
64
66
  SearchKey,
@@ -2152,7 +2154,7 @@ class FeaturesEnricher(TransformerMixin):
2152
2154
  trace_id = trace_id or uuid.uuid4()
2153
2155
  return search_task.get_progress(trace_id)
2154
2156
 
2155
- def get_transactional_transform_api(self, only_online_sources=False):
2157
+ def display_transactional_transform_api(self, only_online_sources=False):
2156
2158
  if self.api_key is None:
2157
2159
  raise ValidationError(self.bundle.get("transactional_transform_unregistered"))
2158
2160
  if self._search_task is None:
@@ -2178,20 +2180,36 @@ class FeaturesEnricher(TransformerMixin):
2178
2180
  return "test_value"
2179
2181
 
2180
2182
  file_metadata = self._search_task.get_file_metadata(str(uuid.uuid4()))
2183
+
2184
+ def get_column_meta(column_name: str) -> FileColumnMetadata:
2185
+ for c in file_metadata.columns:
2186
+ if c.name == column_name:
2187
+ return c
2188
+
2181
2189
  search_keys = file_metadata.search_types()
2182
2190
  if SearchKey.IPV6_ADDRESS in search_keys:
2183
2191
  search_keys.pop(SearchKey.IPV6_ADDRESS, None)
2184
- original_names = {c.name: c.originalName for c in file_metadata.columns}
2185
- keys = (
2186
- "{"
2187
- + ", ".join(
2188
- [
2189
- f'"{key.name}": {{"name": "{original_names.get(name, name)}", "value": "{key_example(key)}"}}'
2190
- for key, name in search_keys.items()
2191
- ]
2192
- )
2193
- + "}"
2194
- )
2192
+
2193
+ search_keys_with_values = dict()
2194
+ for sk_type, sk_name in search_keys.items():
2195
+ if sk_type == SearchKey.IPV6_ADDRESS:
2196
+ continue
2197
+
2198
+ sk_meta = get_column_meta(sk_name)
2199
+ if sk_meta is None:
2200
+ search_keys_with_values[sk_type.name] = [{"name": sk_name, "value": key_example(sk_type)}]
2201
+ else:
2202
+ if sk_meta.isUnnest:
2203
+ search_keys_with_values[sk_type.name] = [
2204
+ {"name": name, "value": key_example(sk_type)} for name in sk_meta.unnestKeyNames
2205
+ ]
2206
+ else:
2207
+ search_keys_with_values[sk_type.name] = [{
2208
+ "name": sk_meta.originalName,
2209
+ "value": key_example(sk_type),
2210
+ }]
2211
+
2212
+ keys_section = json.dumps(search_keys_with_values)
2195
2213
  features_for_transform = self._search_task.get_features_for_transform()
2196
2214
  if features_for_transform:
2197
2215
  original_features_for_transform = [
@@ -2212,7 +2230,7 @@ class FeaturesEnricher(TransformerMixin):
2212
2230
  curl 'https://search.upgini.com/online/api/http_inference_trigger?search_id={search_id}' \\
2213
2231
  -H 'Authorization: {self.api_key}' \\
2214
2232
  -H 'Content-Type: application/json' \\
2215
- -d '{{"search_keys": {keys}{features_section}, "only_online_sources": {str(only_online_sources).lower()}}}'
2233
+ -d '{{"search_keys": {keys_section}{features_section}, "only_online_sources": {str(only_online_sources).lower()}}}'
2216
2234
 
2217
2235
  {Format.BOLD}Python{Format.END}:
2218
2236
 
@@ -2221,13 +2239,12 @@ import requests
2221
2239
  response = requests.post(
2222
2240
  url='https://search.upgini.com/online/api/http_inference_trigger?search_id={search_id}',
2223
2241
  headers={{'Authorization': '{self.api_key}'}},
2224
- json={{"search_keys": {keys}{features_section}, "only_online_sources": {only_online_sources}}}
2242
+ json={{"search_keys": {keys_section}{features_section}, "only_online_sources": {only_online_sources}}}
2225
2243
  )
2226
2244
  if response.status_code == 200:
2227
2245
  print(response.json())
2228
2246
  """
2229
-
2230
- return api_example
2247
+ print(api_example)
2231
2248
 
2232
2249
  def _get_copy_of_runtime_parameters(self) -> RuntimeParameters:
2233
2250
  return RuntimeParameters(properties=self.runtime_parameters.properties.copy())
@@ -2288,7 +2305,7 @@ if response.status_code == 200:
2288
2305
  msg = self.bundle.get("online_api_features_transform").format(online_api_features)
2289
2306
  self.logger.warning(msg)
2290
2307
  print(msg)
2291
- print(self.get_transactional_transform_api(only_online_sources=True))
2308
+ self.display_transactional_transform_api(only_online_sources=True)
2292
2309
 
2293
2310
  if not metrics_calculation:
2294
2311
  transform_usage = self.rest_client.get_current_transform_usage(trace_id)
upgini/http.py CHANGED
@@ -24,6 +24,12 @@ from pythonjsonlogger import json as jsonlogger
24
24
  from requests.exceptions import RequestException
25
25
 
26
26
  from upgini.__about__ import __version__
27
+ from upgini.autofe.utils import (
28
+ pydantic_copy_method,
29
+ pydantic_dump_method,
30
+ pydantic_json_method,
31
+ pydantic_parse_method,
32
+ )
27
33
  from upgini.errors import (
28
34
  HttpError,
29
35
  UnauthorizedError,
@@ -459,19 +465,19 @@ class _RestClient:
459
465
  content = file.read()
460
466
  md5_hash.update(content)
461
467
  digest = md5_hash.hexdigest()
462
- metadata_with_md5 = metadata.model_copy(update={"checksumMD5": digest})
468
+ metadata_with_md5 = pydantic_copy_method(metadata)(update={"checksumMD5": digest})
463
469
 
464
470
  # digest_sha256 = hashlib.sha256(
465
471
  # pd.util.hash_pandas_object(pd.read_parquet(file_path, engine="fastparquet")).values
466
472
  # ).hexdigest()
467
473
  digest_sha256 = self.compute_file_digest(file_path)
468
- metadata_with_md5 = metadata_with_md5.model_copy(update={"digest": digest_sha256})
474
+ metadata_with_md5 = pydantic_copy_method(metadata_with_md5)(update={"digest": digest_sha256})
469
475
 
470
476
  with open(file_path, "rb") as file:
471
477
  files = {
472
478
  "metadata": (
473
479
  "metadata.json",
474
- metadata_with_md5.model_dump_json(exclude_none=True).encode(),
480
+ pydantic_json_method(metadata_with_md5)(exclude_none=True).encode(),
475
481
  "application/json",
476
482
  ),
477
483
  "tracking": (
@@ -481,7 +487,7 @@ class _RestClient:
481
487
  ),
482
488
  "metrics": (
483
489
  "metrics.json",
484
- metrics.model_dump_json(exclude_none=True).encode(),
490
+ pydantic_json_method(metrics)(exclude_none=True).encode(),
485
491
  "application/json",
486
492
  ),
487
493
  "file": (metadata_with_md5.name, file, "application/octet-stream"),
@@ -489,7 +495,7 @@ class _RestClient:
489
495
  if search_customization is not None:
490
496
  files["customization"] = (
491
497
  "customization.json",
492
- search_customization.model_dump_json(exclude_none=True).encode(),
498
+ pydantic_json_method(search_customization)(exclude_none=True).encode(),
493
499
  "application/json",
494
500
  )
495
501
  additional_headers = {self.SEARCH_KEYS_HEADER_NAME: ",".join(self.search_keys_meaning_types(metadata))}
@@ -504,7 +510,7 @@ class _RestClient:
504
510
  def check_uploaded_file_v2(self, trace_id: str, file_upload_id: str, metadata: FileMetadata) -> bool:
505
511
  api_path = self.CHECK_UPLOADED_FILE_URL_FMT_V2.format(file_upload_id)
506
512
  response = self._with_unauth_retry(
507
- lambda: self._send_post_req(api_path, trace_id, metadata.model_dump_json(exclude_none=True))
513
+ lambda: self._send_post_req(api_path, trace_id, pydantic_json_method(metadata)(exclude_none=True))
508
514
  )
509
515
  return bool(response)
510
516
 
@@ -518,11 +524,15 @@ class _RestClient:
518
524
  ) -> SearchTaskResponse:
519
525
  api_path = self.INITIAL_SEARCH_WITHOUT_UPLOAD_URI_FMT_V2.format(file_upload_id)
520
526
  files = {
521
- "metadata": ("metadata.json", metadata.model_dump_json(exclude_none=True).encode(), "application/json"),
522
- "metrics": ("metrics.json", metrics.model_dump_json(exclude_none=True).encode(), "application/json"),
527
+ "metadata": (
528
+ "metadata.json",
529
+ pydantic_json_method(metadata)(exclude_none=True).encode(),
530
+ "application/json",
531
+ ),
532
+ "metrics": ("metrics.json", pydantic_json_method(metrics)(exclude_none=True).encode(), "application/json"),
523
533
  }
524
534
  if search_customization is not None:
525
- files["customization"] = search_customization.model_dump_json(exclude_none=True).encode()
535
+ files["customization"] = pydantic_json_method(search_customization)(exclude_none=True).encode()
526
536
  additional_headers = {self.SEARCH_KEYS_HEADER_NAME: ",".join(self.search_keys_meaning_types(metadata))}
527
537
  response = self._with_unauth_retry(
528
538
  lambda: self._send_post_file_req_v2(
@@ -548,19 +558,19 @@ class _RestClient:
548
558
  content = file.read()
549
559
  md5_hash.update(content)
550
560
  digest = md5_hash.hexdigest()
551
- metadata_with_md5 = metadata.model_copy(update={"checksumMD5": digest})
561
+ metadata_with_md5 = pydantic_copy_method(metadata)(update={"checksumMD5": digest})
552
562
 
553
563
  # digest_sha256 = hashlib.sha256(
554
564
  # pd.util.hash_pandas_object(pd.read_parquet(file_path, engine="fastparquet")).values
555
565
  # ).hexdigest()
556
566
  digest_sha256 = self.compute_file_digest(file_path)
557
- metadata_with_md5 = metadata_with_md5.model_copy(update={"digest": digest_sha256})
567
+ metadata_with_md5 = pydantic_copy_method(metadata_with_md5)(update={"digest": digest_sha256})
558
568
 
559
569
  with open(file_path, "rb") as file:
560
570
  files = {
561
571
  "metadata": (
562
572
  "metadata.json",
563
- metadata_with_md5.model_dump_json(exclude_none=True).encode(),
573
+ pydantic_json_method(metadata_with_md5)(exclude_none=True).encode(),
564
574
  "application/json",
565
575
  ),
566
576
  "tracking": (
@@ -570,7 +580,7 @@ class _RestClient:
570
580
  ),
571
581
  "metrics": (
572
582
  "metrics.json",
573
- metrics.model_dump_json(exclude_none=True).encode(),
583
+ pydantic_json_method(metrics)(exclude_none=True).encode(),
574
584
  "application/json",
575
585
  ),
576
586
  "file": (metadata_with_md5.name, file, "application/octet-stream"),
@@ -578,7 +588,7 @@ class _RestClient:
578
588
  if search_customization is not None:
579
589
  files["customization"] = (
580
590
  "customization.json",
581
- search_customization.model_dump_json(exclude_none=True).encode(),
591
+ pydantic_json_method(search_customization)(exclude_none=True).encode(),
582
592
  "application/json",
583
593
  )
584
594
 
@@ -602,11 +612,15 @@ class _RestClient:
602
612
  ) -> SearchTaskResponse:
603
613
  api_path = self.VALIDATION_SEARCH_WITHOUT_UPLOAD_URI_FMT_V2.format(file_upload_id, initial_search_task_id)
604
614
  files = {
605
- "metadata": ("metadata.json", metadata.model_dump_json(exclude_none=True).encode(), "application/json"),
606
- "metrics": ("metrics.json", metrics.model_dump_json(exclude_none=True).encode(), "application/json"),
615
+ "metadata": (
616
+ "metadata.json",
617
+ pydantic_json_method(metadata)(exclude_none=True).encode(),
618
+ "application/json",
619
+ ),
620
+ "metrics": ("metrics.json", pydantic_json_method(metrics)(exclude_none=True).encode(), "application/json"),
607
621
  }
608
622
  if search_customization is not None:
609
- files["customization"] = search_customization.model_dump_json(exclude_none=True).encode()
623
+ files["customization"] = pydantic_json_method(search_customization)(exclude_none=True).encode()
610
624
  additional_headers = {self.SEARCH_KEYS_HEADER_NAME: ",".join(self.search_keys_meaning_types(metadata))}
611
625
  response = self._with_unauth_retry(
612
626
  lambda: self._send_post_file_req_v2(
@@ -670,7 +684,7 @@ class _RestClient:
670
684
  "file": (metadata.name, file, "application/octet-stream"),
671
685
  "metadata": (
672
686
  "metadata.json",
673
- metadata.model_dump_json(exclude_none=True).encode(),
687
+ pydantic_json_method(metadata)(exclude_none=True).encode(),
674
688
  "application/json",
675
689
  ),
676
690
  }
@@ -682,12 +696,12 @@ class _RestClient:
682
696
  def get_search_file_metadata(self, search_task_id: str, trace_id: str) -> FileMetadata:
683
697
  api_path = self.SEARCH_FILE_METADATA_URI_FMT_V2.format(search_task_id)
684
698
  response = self._with_unauth_retry(lambda: self._send_get_req(api_path, trace_id))
685
- return FileMetadata.model_validate(response)
699
+ return pydantic_parse_method(FileMetadata)(response)
686
700
 
687
701
  def get_provider_search_metadata_v3(self, provider_search_task_id: str, trace_id: str) -> ProviderTaskMetadataV2:
688
702
  api_path = self.SEARCH_TASK_METADATA_FMT_V3.format(provider_search_task_id)
689
703
  response = self._with_unauth_retry(lambda: self._send_get_req(api_path, trace_id))
690
- return ProviderTaskMetadataV2.model_validate(response)
704
+ return pydantic_parse_method(ProviderTaskMetadataV2)(response)
691
705
 
692
706
  def get_current_transform_usage(self, trace_id) -> TransformUsage:
693
707
  track_metrics = get_track_metrics(self.client_ip, self.client_visitorid)
@@ -706,7 +720,7 @@ class _RestClient:
706
720
  lambda: self._send_post_req(
707
721
  api_path,
708
722
  trace_id=None,
709
- json_data=log_event.dict(exclude_none=True),
723
+ json_data=pydantic_dump_method(log_event)(exclude_none=True),
710
724
  content_type="application/json",
711
725
  result_format="text",
712
726
  silent=True,
@@ -723,7 +737,7 @@ class _RestClient:
723
737
  try:
724
738
  requests.post(
725
739
  url=urljoin(_RestClient.PROD_BACKEND_URL, api_path),
726
- json=log_event.model_dump(exclude_none=True),
740
+ json=pydantic_dump_method(log_event)(exclude_none=True),
727
741
  headers=_RestClient._get_base_headers(content_type="application/json"),
728
742
  )
729
743
  except Exception:
upgini/metrics.py CHANGED
@@ -391,7 +391,9 @@ class EstimatorWrapper:
391
391
  self.converted_to_int.append(c)
392
392
  self.cat_features.remove(c)
393
393
  elif is_float_dtype(x[c]) or (x[c].dtype == "category" and is_float_dtype(x[c].cat.categories)):
394
- self.logger.info(f"Convert float cat feature {c} to string")
394
+ self.logger.info(
395
+ f"Convert float cat feature {c} to string"
396
+ )
395
397
  x[c] = x[c].astype(str)
396
398
  self.converted_to_str.append(c)
397
399
  elif x[c].dtype not in ["category", "int64"]:
@@ -692,15 +694,7 @@ class CatBoostWrapper(EstimatorWrapper):
692
694
  x[c] = x[c].fillna(np.nan)
693
695
  elif x[c].dtype != "category":
694
696
  x[c] = x[c].fillna("NA")
695
- if isinstance(self.cv, TimeSeriesSplit) or isinstance(self.cv, BlockedTimeSeriesSplit):
696
- self.logger.info("Using time-aware encoder for CatBoost")
697
- encoder = CatBoostEncoder(random_state=DEFAULT_RANDOM_STATE, cols=self.cat_features, return_df=True)
698
- encoded = encoder.fit_transform(x[self.cat_features].astype("object"), y)
699
- x[self.cat_features] = encoded
700
- self.cat_encoder = encoder
701
- else:
702
- self.cat_encoder = None
703
- params["cat_features"] = self.cat_features
697
+ params["cat_features"] = self.cat_features
704
698
 
705
699
  return x, y, groups, params
706
700
 
@@ -744,16 +738,7 @@ class CatBoostWrapper(EstimatorWrapper):
744
738
  x[c] = x[c].fillna(np.nan)
745
739
  elif x[c].dtype != "category":
746
740
  x[c] = x[c].fillna("NA")
747
- if (
748
- isinstance(self.cv, TimeSeriesSplit)
749
- or isinstance(self.cv, BlockedTimeSeriesSplit)
750
- and self.cat_encoder is not None
751
- ):
752
- self.logger.info("Using time-aware encoder for CatBoost")
753
- encoded = self.cat_encoder.transform(x[self.cat_features].astype("object"), y)
754
- x[self.cat_features] = encoded
755
- else:
756
- params["cat_features"] = self.cat_features
741
+ params["cat_features"] = self.cat_features
757
742
 
758
743
  return x, y, params
759
744
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: upgini
3
- Version: 1.2.85a3857.dev1
3
+ Version: 1.2.86a1
4
4
  Summary: Intelligent data search & enrichment for Machine Learning
5
5
  Project-URL: Bug Reports, https://github.com/upgini/upgini/issues
6
6
  Project-URL: Homepage, https://upgini.com/
@@ -1,12 +1,12 @@
1
- upgini/__about__.py,sha256=fECI7PUZQG8IW2eHjUqgqHVtT40sMjfMgzLhuxKuQFA,33
1
+ upgini/__about__.py,sha256=ecLPy8e5nmkHwHQrZPVIZD4Et6AoZCfYC38tO5V5-UI,25
2
2
  upgini/__init__.py,sha256=LXSfTNU0HnlOkE69VCxkgIKDhWP-JFo_eBQ71OxTr5Y,261
3
3
  upgini/ads.py,sha256=nvuRxRx5MHDMgPr9SiU-fsqRdFaBv8p4_v1oqiysKpc,2714
4
4
  upgini/dataset.py,sha256=fRtqSkXNONLnPe6cCL967GMt349FTIpXzy_u8LUKncw,35354
5
5
  upgini/errors.py,sha256=2b_Wbo0OYhLUbrZqdLIx5jBnAsiD1Mcenh-VjR4HCTw,950
6
- upgini/features_enricher.py,sha256=2ryADtOVEEebuUBhimusvnBzGxUkdTaqpEh2F1PqHSs,212719
7
- upgini/http.py,sha256=AfaJ3c8z_tK2hZFEehNybDKE0mp1tYcyAP_l0_p8bLQ,43933
6
+ upgini/features_enricher.py,sha256=G0qbRPdlWe9p6cwYF3khP99-0kgAO8N0A2sfQxSLgmM,213446
7
+ upgini/http.py,sha256=6Qcepv0tDC72mBBJxYHnA2xqw6QwFaKrXN8o4vju8Es,44372
8
8
  upgini/metadata.py,sha256=zt_9k0iQbWXuiRZcel4ORNPdQKt6Ou69ucZD_E1Q46o,12341
9
- upgini/metrics.py,sha256=zRrRpNqjSTubsyKPi0_jbHjE8QO_YqyHWtt1B5MfVH8,44086
9
+ upgini/metrics.py,sha256=3cip0_L6-OFew74KsRwzxJDU6UFq05h2v7IsyHLcMRc,43164
10
10
  upgini/search_task.py,sha256=Q5HjBpLIB3OCxAD1zNv5yQ3ZNJx696WCK_-H35_y7Rs,17912
11
11
  upgini/spinner.py,sha256=4iMd-eIe_BnkqFEMIliULTbj6rNI2HkN_VJ4qYe0cUc,1118
12
12
  upgini/version_validator.py,sha256=DvbaAvuYFoJqYt0fitpsk6Xcv-H1BYDJYHUMxaKSH_Y,1509
@@ -16,11 +16,11 @@ upgini/autofe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
16
  upgini/autofe/all_operators.py,sha256=rdjF5eaE4bC6Q4eu_el5Z7ekYt8DjOFermz2bePPbUc,333
17
17
  upgini/autofe/binary.py,sha256=oOEECc4nRzZN2tYaiqx8F2XHnfWpk1bVvb7ZkZJ0lO8,7709
18
18
  upgini/autofe/date.py,sha256=MM1S-6imNSzCDOhbNnmsc_bwSqUWBcS8vWAdHF8j1kY,11134
19
- upgini/autofe/feature.py,sha256=MjBbviB5Jy90EuWlnhgGgBptn3GomJv0xNjhjZN0P5I,15329
19
+ upgini/autofe/feature.py,sha256=aivkeVkJYzaT65Ug8dnd6__vr9QwcXxXQiNummjFwkE,15435
20
20
  upgini/autofe/groupby.py,sha256=IYmQV9uoCdRcpkeWZj_kI3ObzoNCNx3ff3h8sTL01tk,3603
21
21
  upgini/autofe/operator.py,sha256=EOffJw6vKXpEh5yymqb1RFNJPxGxmnHdFRo9dB5SCFo,4969
22
22
  upgini/autofe/unary.py,sha256=Sx11IoHRh5nwyALzjgG9GQOrVNIs8NZ1JzunAJuN66A,5731
23
- upgini/autofe/utils.py,sha256=fK1am2_tQj3fL2vDslblye8lmyfWgGIUOX1beYVBz4k,2420
23
+ upgini/autofe/utils.py,sha256=dYrtyAM8Vcc_R8u4dNo54IsGrHKagTHDJTKhGho0bRg,2967
24
24
  upgini/autofe/vector.py,sha256=jHs0nNTOaHspYUlxW7fjQepk4cvr_JDQ65L1OCiVsds,1360
25
25
  upgini/autofe/timeseries/__init__.py,sha256=PGwwDAMwvkXl3el12tXVEmZUgDUvlmIPlXtROm6bD18,738
26
26
  upgini/autofe/timeseries/base.py,sha256=rWJqRuFAzTZEsUdWG5s1Vhif9zzRRmalASXvarufRxI,3610
@@ -70,7 +70,7 @@ upgini/utils/target_utils.py,sha256=LRN840dzx78-wg7ftdxAkp2c1eu8-JDvkACiRThm4HE,
70
70
  upgini/utils/track_info.py,sha256=G5Lu1xxakg2_TQjKZk4b5SvrHsATTXNVV3NbvWtT8k8,5663
71
71
  upgini/utils/ts_utils.py,sha256=26vhC0pN7vLXK6R09EEkMK3Lwb9IVPH7LRdqFIQ3kPs,1383
72
72
  upgini/utils/warning_counter.py,sha256=-GRY8EUggEBKODPSuXAkHn9KnEQwAORC0mmz_tim-PM,254
73
- upgini-1.2.85a3857.dev1.dist-info/METADATA,sha256=XycmCsMeqC_7hsO0YzR0E8b4eGnIcD-MBzuFvB4T24s,49172
74
- upgini-1.2.85a3857.dev1.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
75
- upgini-1.2.85a3857.dev1.dist-info/licenses/LICENSE,sha256=5RRzgvdJUu3BUDfv4bzVU6FqKgwHlIay63pPCSmSgzw,1514
76
- upgini-1.2.85a3857.dev1.dist-info/RECORD,,
73
+ upgini-1.2.86a1.dist-info/METADATA,sha256=NFKCl8fGXfDQAKjDwAKG-Q6xEpvQ9mRld6WvqhUSZcU,49164
74
+ upgini-1.2.86a1.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
75
+ upgini-1.2.86a1.dist-info/licenses/LICENSE,sha256=5RRzgvdJUu3BUDfv4bzVU6FqKgwHlIay63pPCSmSgzw,1514
76
+ upgini-1.2.86a1.dist-info/RECORD,,