datupapi 1.108.11__py3-none-any.whl → 1.110.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -271,6 +271,9 @@ class Anomaly():
271
271
  if alert_anomalies_item else "")
272
272
 
273
273
  # 3. Alerta por items con poco histórico -------------------
274
+ items_alerta_max = np.array(items_alerta_max).astype(str)
275
+ items_alerta_media = np.array(items_alerta_media).astype(str)
276
+
274
277
  alert_insufficient_history = alerta_max > 0 or alerta_media > 0
275
278
  alert_insufficient_history_txt = (
276
279
  f"Items con menos de 24 meses en el histórico: {alerta_max}. ({', '.join(map(str, items_alerta_max))}). "
@@ -377,6 +380,7 @@ class Anomaly():
377
380
  # Aplicar LOF
378
381
  lof4.fit(item_tmp[['SuggestedForecast', 'Target']])
379
382
  probs = lof4.predict_proba(item_tmp[['SuggestedForecast', 'Target']])
383
+ # item_tmp["probabilidad"] = probs[:, 1] #Agregando probabilidad de anomalía a los items
380
384
  item_tmp = item_tmp.copy()
381
385
  item_tmp.loc[:, "probabilidad"] = probs[:, 1]
382
386
  is_out4 = probs[:, 1] > (prob_lim_item_frcst/100)
@@ -450,39 +454,57 @@ class Anomaly():
450
454
  log_forecast = log_forecast.drop(columns=drop_columns)
451
455
  print(f'Logs descargados y recortados a {log_forecast.shape[0]} ejecuciones.')
452
456
 
457
+ if log_forecast.shape[0] < 2:
458
+ print('Logs insuficientes. Las alertas dependientes de logs pasarán a estado nulo')
459
+
453
460
  #10. Cambios en el número total de items ----------------------
454
- num_actual_items = demand_item['item_id'].nunique()
455
- num_ant_items = log_forecast['Items'].iloc[-2]
456
- cambio_num_items = ((num_actual_items - num_ant_items) / num_ant_items) * 100
461
+ if log_forecast.shape[0] > 1:
462
+ num_actual_items = demand_item['item_id'].nunique()
463
+ num_ant_items = log_forecast['Items'].iloc[-2]
464
+ cambio_num_items = ((num_actual_items - num_ant_items) / num_ant_items) * 100
457
465
 
458
- #Alerta
459
- alert_items_var = True if abs(cambio_num_items) > 10 else False
460
- alert_items_var_txt = f"Variación en items: {cambio_num_items:.2f}%." if alert_items_var else ""
461
- print(f"El número de ítems únicos ha cambiado en un {cambio_num_items:.2f}%.")
466
+ #Alerta
467
+ alert_items_var = True if abs(cambio_num_items) > 10 else False
468
+ alert_items_var_txt = f"Variación en items: {cambio_num_items:.2f}%." if alert_items_var else ""
469
+ print(f"El número de ítems únicos ha cambiado en un {cambio_num_items:.2f}%.")
462
470
 
463
- #11. Cambios en el número total de ubicaciones -------------- DE UBICACIÓN
464
- if location:
465
- num_actual_loc = df_demand['location'].nunique()
466
- num_ant_loc = log_forecast['Locations'].iloc[-2]
467
- cambio_num_loc = ((num_actual_loc - num_ant_loc) / num_ant_loc) * 100
468
471
  else:
469
- cambio_num_loc = 0
472
+ alert_items_var = False
473
+ alert_items_var_txt = 'Nulo'
470
474
 
471
- #Alerta
472
- alert_loc_var = True if abs(cambio_num_loc) > limite_cambio_loc else False
473
- alert_loc_var_txt = f"Variación en ubicaciones: {cambio_num_loc:.2f}%." if alert_loc_var else ""
474
- print(f"El número de ubicaciones únicos ha cambiado en un {cambio_num_loc:.2f}%.")
475
+ #11. Cambios en el número total de ubicaciones -------------- DE UBICACIÓN
476
+ if log_forecast.shape[0] > 1:
477
+ if location:
478
+ num_actual_loc = df_demand['location'].nunique()
479
+ num_ant_loc = log_forecast['Locations'].iloc[-2]
480
+ cambio_num_loc = ((num_actual_loc - num_ant_loc) / num_ant_loc) * 100
481
+ else:
482
+ cambio_num_loc = 0
483
+
484
+ #Alerta
485
+ alert_loc_var = True if abs(cambio_num_loc) > limite_cambio_loc else False
486
+ alert_loc_var_txt = f"Variación en ubicaciones: {cambio_num_loc:.2f}%." if alert_loc_var else ""
487
+ print(f"El número de ubicaciones únicos ha cambiado en un {cambio_num_loc:.2f}%.")
488
+
489
+ else:
490
+ alert_loc_var = False
491
+ alert_loc_var_txt = 'Nulo'
475
492
 
476
493
  ##12. Aumento en el WMAPE
477
- wmape_actual = log_forecast['WMAPE'].iloc[-1]
478
- wmape_anterior = log_forecast['WMAPE'].iloc[-2]
494
+ if log_forecast.shape[0] > 1:
495
+ wmape_actual = log_forecast['WMAPE'].iloc[-1]
496
+ wmape_anterior = log_forecast['WMAPE'].iloc[-2]
479
497
 
480
- #Alerta
481
- alert_wmape_var = True if wmape_anterior < wmape_actual else False
482
- alert_wmape_var_txt = ((f"El error promedio aumentó de {wmape_anterior:.2f}% a {wmape_actual:.2f}%") if alert_wmape_var
483
- else (f"El error promedio disminuyó de {wmape_anterior:.2f}% a {wmape_actual:.2f}%"))
484
- print(f"El error pasó de {wmape_anterior:.2f}% a {wmape_actual:.2f}%")
485
- print(alert_wmape_var_txt)
498
+ #Alerta
499
+ alert_wmape_var = True if wmape_anterior < wmape_actual else False
500
+ alert_wmape_var_txt = ((f"El error promedio aumentó de {wmape_anterior:.2f}% a {wmape_actual:.2f}%") if alert_wmape_var
501
+ else (f"El error promedio disminuyó de {wmape_anterior:.2f}% a {wmape_actual:.2f}%"))
502
+ print(f"El error pasó de {wmape_anterior:.2f}% a {wmape_actual:.2f}%")
503
+ print(alert_wmape_var_txt)
504
+
505
+ else:
506
+ alert_wmape_var = False
507
+ alert_wmape_var_txt = 'Nulo'
486
508
 
487
509
  ##13. Fecha de corte
488
510
  fecha_corte = log_forecast['DateDataPrep'].iloc[-1]
@@ -497,7 +519,7 @@ class Anomaly():
497
519
 
498
520
  #####--------------------Creción tabla resumen
499
521
 
500
-
522
+
501
523
  if location:
502
524
  # Creo tabla para lo items con baja precisión
503
525
  tabla = items_baja_precision[['Item', 'Location','ItemType', 'Ranking', 'AccuracyBestFit']]
@@ -516,7 +538,7 @@ class Anomaly():
516
538
  tabla = tabla.merge(historico_meses, left_on=['Item', 'Location'], right_on=['item_id', 'location'], how='left')
517
539
  tabla.drop(columns=['item_id', 'location'], inplace=True, errors='ignore')
518
540
  tabla['Historico (Meses)'] = tabla.set_index(['Item', 'Location']).index.map(df_demand.groupby(['item_id', 'location']).size())
519
-
541
+
520
542
  else:
521
543
  # Creo tabla para lo items con baja precisión
522
544
  tabla = items_baja_precision[['Item','ItemType', 'Ranking', 'AccuracyBestFit']]
@@ -534,8 +556,15 @@ class Anomaly():
534
556
  tabla.drop(columns=['item_id'], inplace=True, errors='ignore')
535
557
  tabla['Historico (Meses)'] = tabla.set_index(['Item']).index.map(df_demand.groupby(['item_id']).size())
536
558
 
559
+
537
560
  #Añadir alertas LOF
561
+ out_if_item2['item_id'] = out_if_item2['item_id'].astype(str)
562
+ out_if_item4['Item'] = out_if_item4['Item'].astype(str)
563
+
538
564
  def obtener_alerta(item):
565
+
566
+ item = str(item)
567
+
539
568
  # Filtrar por item
540
569
  alerta_prep = out_if_item2[out_if_item2['item_id'] == item][['timestamp', 'probabilidad']].copy()
541
570
  alerta_forecast = out_if_item4[out_if_item4['Item'] == item][['Date', 'probabilidad']].copy()
@@ -554,6 +583,8 @@ class Anomaly():
554
583
  return alertas.to_dict(orient='records') if not alertas.empty else None
555
584
 
556
585
  tabla['AlertaAnomalia'] = tabla['Item'].apply(obtener_alerta)
586
+ items_con_alerta = set(out_if_item2['item_id']).union(set(out_if_item4['Item']))
587
+ print(f'Items con baja precisión con alertas de anomalías: {items_con_alerta}')
557
588
 
558
589
  #---------------------Cargando tabla resumen
559
590
  self.io.upload_csv(tabla, q_name='ResumenAlertas', datalake_path='output')
@@ -11,7 +11,7 @@ class FutureReorder():
11
11
 
12
12
  def __init__(self, df_inv, df_lead_time, df_prep, df_fcst, periods, start_date, location=False, security_stock_ref=False):
13
13
  self.df_inv = df_inv
14
- self.df_lead_time = df_lead_time
14
+ self.df_lead_time = df_lead_time
15
15
  self.df_prep = df_prep
16
16
  self.df_fcst = df_fcst
17
17
  self.default_coverage = 30
@@ -63,7 +63,12 @@ class FutureReorder():
63
63
  if self.location:
64
64
  metadata.append('Location')
65
65
 
66
- SuggestedForecast_1p = {}
66
+ df_lead_time_rf = self.df_lead_time.copy()
67
+ df_lead_time_rf['Coverage'] = df_lead_time_rf['ReorderFreq']
68
+
69
+ SuggestedForecast_cov = {}
70
+ SuggestedForecast_rf = {}
71
+ df_forecast = {}
67
72
  df_avg_gen = {}
68
73
  df_max_gen = {}
69
74
  df_sstock = {}
@@ -84,19 +89,40 @@ class FutureReorder():
84
89
 
85
90
  for i, date in enumerate(dates):
86
91
  if self.location:
87
- current_df_lead_time = self.df_lead_time[(self.df_lead_time['Item'] == item) &
92
+ current_df_lead_time_cov = self.df_lead_time[(self.df_lead_time['Item'] == item) &
88
93
  (self.df_lead_time['Location'] == location)]
94
+
95
+ current_df_lead_time_rf = df_lead_time_rf[(df_lead_time_rf['Item'] == item) &
96
+ (df_lead_time_rf['Location'] == location)]
97
+
89
98
  current_df_inv = self.df_inv[(self.df_inv['Item'] == item) &
90
99
  (self.df_inv['Location'] == location)]
100
+
91
101
  else:
92
- current_df_lead_time = self.df_lead_time[self.df_lead_time['Item'] == item]
102
+ current_df_lead_time_cov = self.df_lead_time[self.df_lead_time['Item'] == item]
103
+ current_df_lead_time_rf = df_lead_time_rf[df_lead_time_rf['Item'] == item]
93
104
  current_df_inv = self.df_inv[self.df_inv['Item'] == item]
94
-
95
- if current_df_lead_time.empty or current_df_inv.empty:
105
+
106
+ if current_df_lead_time_cov.empty or current_df_lead_time_rf.empty or current_df_inv.empty:
96
107
  continue
97
108
 
98
- # SuggestedForecast
99
- SuggestedForecast_1p[i] = SuggestedForecast(df_LeadTimes=current_df_lead_time,
109
+ # SuggestedForecast_Coverage
110
+ SuggestedForecast_cov[i] = SuggestedForecast(df_LeadTimes=current_df_lead_time_cov,
111
+ df_Forecast=self.df_fcst,
112
+ df_Prep=self.df_prep,
113
+ df_inv=current_df_inv,
114
+ column_forecast='SuggestedForecast',
115
+ columns_metadata=metadata,
116
+ frequency_='M',
117
+ location=self.location,
118
+ actualdate=date,
119
+ default_coverage_=self.default_coverage,
120
+ join_='left').suggested_forecast()
121
+
122
+ SuggestedForecast_cov[i].rename(columns={'SuggestedForecast':'Suggested_Coverage'},inplace=True)
123
+
124
+ # SuggestedForecast_ReorderFreq
125
+ SuggestedForecast_rf[i] = SuggestedForecast(df_LeadTimes=current_df_lead_time_rf,
100
126
  df_Forecast=self.df_fcst,
101
127
  df_Prep=self.df_prep,
102
128
  df_inv=current_df_inv,
@@ -107,12 +133,18 @@ class FutureReorder():
107
133
  actualdate=date,
108
134
  default_coverage_=self.default_coverage,
109
135
  join_='left').suggested_forecast()
136
+
137
+ SuggestedForecast_rf[i].rename(columns={'SuggestedForecast':'Suggested_ReorderFreq'},inplace=True)
138
+ SuggestedForecast_rf[i] = SuggestedForecast_rf[i][metadata + ['Suggested_ReorderFreq']]
110
139
 
140
+ # Concatenar
141
+ df_forecast[i] = pd.merge(SuggestedForecast_cov[i], SuggestedForecast_rf[i], on=metadata, how='outer')
142
+
111
143
  # Calcular AvgDailyUsage y MaxDailyUsage
112
144
  df_avg_gen[i] = DailyUsageFuture(location=self.location,
113
145
  column_forecast='SuggestedForecast',
114
146
  date=date,
115
- df_fcst=self.df_fcst).daily_usage(SuggestedForecast_1p[i], 'AvgDailyUsage').fillna(0)
147
+ df_fcst=self.df_fcst).daily_usage(df_forecast[i], 'AvgDailyUsage').fillna(0)
116
148
 
117
149
  df_max_gen[i] = DailyUsageFuture(location=self.location,
118
150
  column_forecast='SuggestedForecast',
@@ -121,11 +153,11 @@ class FutureReorder():
121
153
 
122
154
  #Ajustar AvgDailyUsage y MaxDailyUsage si es cero.
123
155
  df_avg_gen[i] = df_avg_gen[i].replace(0,0.001)
124
- df_max_gen[i] = df_max_gen[i].replace(0,0.0012)
125
-
156
+ df_max_gen[i] = df_max_gen[i].replace(0,0.0012)
157
+
126
158
  # Calcular Stock de Seguridad
127
159
  merge_columns = ['Item', 'Location', 'AvgLeadTime', 'MaxLeadTime'] if self.location else ['Item', 'AvgLeadTime', 'MaxLeadTime']
128
- df_sstock[i] = pd.merge(df_max_gen[i], current_df_lead_time[merge_columns], on=metadata, how='inner').drop_duplicates()
160
+ df_sstock[i] = pd.merge(df_max_gen[i], current_df_lead_time_cov[merge_columns], on=metadata, how='inner').drop_duplicates()
129
161
 
130
162
  # Current Period
131
163
  if i == 0:
@@ -134,17 +166,17 @@ class FutureReorder():
134
166
  df_inventory[i]['InventoryTransit'] = df_inventory[i]['Inventory'] + df_inventory[i]['Transit']
135
167
  df_inventory[i] = df_inventory[i][metadata + ['InventoryTransit']]
136
168
  df[i] = pd.merge(df_inventory[i], df_sstock[i], on=metadata, how='inner')
137
-
169
+
138
170
  if self.security_stock_ref:
139
171
  df[i]['SecurityStock'] = df[i]['SecurityStockDaysRef'] * df[i]['AvgDailyUsage']
140
172
  else:
141
173
  df[i]['SecurityStock'] = (df[i]['MaxDailyUsage'] * df[i]['MaxLeadTime']) - (df[i]['AvgDailyUsage'] * df[i]['AvgLeadTime'])
142
174
 
143
- df[i]['ReorderPoint'] = (df[i]['SuggestedForecast'] + df[i]['SecurityStock']).clip(lower=0)
175
+ df[i]['ReorderPoint'] = (df[i]['Suggested_Coverage'] + df[i]['SecurityStock']).clip(lower=0)
144
176
  df[i]['ReorderQtyBase'] = (df[i]['ReorderPoint'] - df[i]['InventoryTransit']).clip(lower=1)
145
177
  df[i]['ReorderQty'] = ((df[i]['ReorderQtyBase'] / df[i]['PurchaseFactor']).apply(np.ceil)) * df[i]['PurchaseFactor']
146
178
  df[i]['ReorderQtyDays'] = (df[i]['ReorderQty'] / df[i]['AvgDailyUsage']).astype(int)
147
-
179
+
148
180
  # Future Dates
149
181
  else:
150
182
  inventory_columns = ['Item', 'Location', 'PurchaseFactor'] if self.location else ['Item', 'PurchaseFactor']
@@ -156,15 +188,17 @@ class FutureReorder():
156
188
  else:
157
189
  df[i]['SecurityStock'] = (df[i]['MaxDailyUsage'] * df[i]['MaxLeadTime']) - (df[i]['AvgDailyUsage'] * df[i]['AvgLeadTime'])
158
190
 
159
- df[i]['InventoryTransit'] = ((df[i-1]['InventoryTransit'] - df[i-1]['SuggestedForecast']) + df[i-1]['ReorderQty']).clip(lower=0)
160
- df[i]['ReorderPoint'] = (df[i]['SuggestedForecast'] + df[i]['SecurityStock']).clip(lower=0)
191
+ df[i]['InventoryTransit'] = ((df[i-1]['InventoryTransit'] - df[i-1]['Suggested_ReorderFreq']) + df[i-1]['ReorderQty']).clip(lower=0)
192
+ df[i]['ReorderPoint'] = (df[i]['Suggested_Coverage'] + df[i]['SecurityStock']).clip(lower=0)
161
193
  df[i]['ReorderQtyBase'] = (df[i]['ReorderPoint'] - df[i]['InventoryTransit']).clip(lower=1)
162
194
  df[i]['ReorderQty'] = ((df[i]['ReorderQtyBase'] / df[i]['PurchaseFactor']).apply(np.ceil)) * df[i]['PurchaseFactor']
163
195
  df[i]['ReorderQtyDays'] = (df[i]['ReorderQty'] / df[i]['AvgDailyUsage']).astype(int)
196
+
164
197
 
165
198
  # Insert columns
166
199
  df[i].insert(loc=0, column='Date', value=date)
167
200
  df[i]['Item'] = item
201
+
168
202
  if self.location:
169
203
  df[i]['Location'] = location
170
204
 
@@ -176,12 +210,12 @@ class FutureReorder():
176
210
  df_final = pd.merge(data_frame, leadtimes, on=metadata, how='left').fillna(0)
177
211
 
178
212
  df_final['Date'] = pd.to_datetime(df_final['Date'], format='%Y%m%d').dt.strftime('%Y-%m-%d')
179
- df_final = df_final.rename(columns={'InventoryTransit': 'FutureInventory'})
180
- cols_to_round = ['AvgDailyUsage', 'MaxDailyUsage', 'AvgLeadTime', 'MaxLeadTime', 'SecurityStock', 'FutureInventory', 'SuggestedForecast', 'ReorderPoint', 'ReorderQtyBase']
181
- df_final[cols_to_round] = df_final[cols_to_round].round(2)
182
- final_cols = ['Date', 'Item', 'ItemDescription', 'Location', 'SuggestedForecast', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
213
+ df_final = df_final.rename(columns={'InventoryTransit': 'FutureInventory'})
214
+ cols_to_round = ['SecurityStock', 'FutureInventory', 'Suggested_Coverage', 'Suggested_ReorderFreq', 'ReorderPoint', 'ReorderQtyBase']
215
+ df_final[cols_to_round] = df_final[cols_to_round].apply(np.ceil)
216
+ final_cols = ['Date', 'Item', 'ItemDescription', 'Location', 'Suggested_Coverage', 'Suggested_ReorderFreq', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
183
217
  'AvgDailyUsage', 'MaxDailyUsage', 'AvgLeadTime', 'MaxLeadTime', 'ReorderFreq', 'Coverage'] if self.location \
184
- else ['Date', 'Item', 'ItemDescription', 'SuggestedForecast', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
218
+ else ['Date', 'Item', 'ItemDescription', 'Suggested_Coverage', 'Suggested_ReorderFreq', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
185
219
  'AvgDailyUsage', 'MaxDailyUsage', 'AvgLeadTime', 'MaxLeadTime', 'ReorderFreq', 'Coverage']
186
220
  df_final = df_final[final_cols]
187
221
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datupapi
3
- Version: 1.108.11
3
+ Version: 1.110.0
4
4
  Summary: Utility library to support Datup AI MLOps processes
5
5
  Author: Datup AI
6
6
  Author-email: ramiro@datup.ai
@@ -7,7 +7,7 @@ datupapi/distribution/src/DistributionFunctions/functions_distribution.py,sha256
7
7
  datupapi/distribution/src/Format/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  datupapi/distribution/src/Format/distribution_format.py,sha256=CFqUHTk9StDvaOvlR3yLr3NZiFY2Ao1yVXoY-IsrNWE,3964
9
9
  datupapi/evaluate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- datupapi/evaluate/anomaly.py,sha256=s_HJb_5T3xNhL26y5bsHWuS-ucbkhSGtCyF10d_iJ3I,32379
10
+ datupapi/evaluate/anomaly.py,sha256=fjIDAvEPGBJdZjVXhz7Rk90WKCR5t3Hbe6zeTKVXFlw,33506
11
11
  datupapi/evaluate/errors.py,sha256=9SRYAjwRDfEdP1EnBbfA7zoQEi4xU4qI16vBE8-jkeA,7039
12
12
  datupapi/extract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  datupapi/extract/io.py,sha256=fYPXf-SmYyw4ywbN3SjQsdl6qBQvQz1K3i9kbpiEkkA,84343
@@ -22,7 +22,7 @@ datupapi/inventory/src/Format/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
22
22
  datupapi/inventory/src/Format/inventory_format.py,sha256=qrHkr6orSMdiraRH69nKawW1WBi_OFbqei7z2LJeSNI,7080
23
23
  datupapi/inventory/src/FutureInventory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  datupapi/inventory/src/FutureInventory/daily_usage_future.py,sha256=LMYL6trGdOl0piEVC-GmzwCxuWCafRhehCQDHOuP30A,3388
25
- datupapi/inventory/src/FutureInventory/future_reorder.py,sha256=yZtNw9B7rB6h-2T3Ywi7oQgw5hwwXT1RfK1TRDde0gU,10450
25
+ datupapi/inventory/src/FutureInventory/future_reorder.py,sha256=SqJUiKaD7YybFO8DOQv1vBoFEhZHuP8IGFR0OXmUf0g,12776
26
26
  datupapi/inventory/src/InventoryFunctions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  datupapi/inventory/src/InventoryFunctions/functions_inventory.py,sha256=RgKlF_YTuIUs03CLGpekPqmTaRvbsvwIn-62ClWqNGg,13319
28
28
  datupapi/inventory/src/ProcessForecast/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -48,7 +48,7 @@ datupapi/transform/forecasting.py,sha256=OboiVyErzWXJAv6R4fCXiPNaoVp5dNAP9F53EDq
48
48
  datupapi/transform/ranking.py,sha256=XOI0XqMx9Cy52Xjc4LCzJCNUsJZNjgrPky7nrpELr-U,7943
49
49
  datupapi/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
50
  datupapi/utils/utils.py,sha256=pU3mXPupm-1gvODI-kPlIpOdMHa2F9lEXvqBn6t3ajc,4637
51
- datupapi-1.108.11.dist-info/METADATA,sha256=s70m4oBa9C6lShSQx4C52jSSiHZDorDMtE-NCcp9JAU,1517
52
- datupapi-1.108.11.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
53
- datupapi-1.108.11.dist-info/top_level.txt,sha256=oERwtRZu8xq2u1TDGwJwuWK0iJbH4p7x9kYECAL5So0,9
54
- datupapi-1.108.11.dist-info/RECORD,,
51
+ datupapi-1.110.0.dist-info/METADATA,sha256=Ix3BdEi_gpJJv86soI-nigcTp1xE0lUFNAHuOh3FGRk,1516
52
+ datupapi-1.110.0.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
53
+ datupapi-1.110.0.dist-info/top_level.txt,sha256=oERwtRZu8xq2u1TDGwJwuWK0iJbH4p7x9kYECAL5So0,9
54
+ datupapi-1.110.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (79.0.0)
2
+ Generator: setuptools (80.3.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5