datupapi 1.109.0__tar.gz → 1.110.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {datupapi-1.109.0 → datupapi-1.110.1}/PKG-INFO +1 -1
  2. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/FutureInventory/future_reorder.py +56 -22
  3. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi.egg-info/PKG-INFO +1 -1
  4. {datupapi-1.109.0 → datupapi-1.110.1}/setup.py +1 -1
  5. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/__init__.py +0 -0
  6. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/configure/__init__.py +0 -0
  7. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/configure/config.py +0 -0
  8. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/distribution/conf/__init__.py +0 -0
  9. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/distribution/src/DistributionFunctions/__init__.py +0 -0
  10. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/distribution/src/DistributionFunctions/functions_distribution.py +0 -0
  11. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/distribution/src/Format/__init__.py +0 -0
  12. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/distribution/src/Format/distribution_format.py +0 -0
  13. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/evaluate/__init__.py +0 -0
  14. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/evaluate/anomaly.py +0 -0
  15. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/evaluate/errors.py +0 -0
  16. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/extract/__init__.py +0 -0
  17. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/extract/io.py +0 -0
  18. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/extract/io_citrix.py +0 -0
  19. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/feateng/__init__.py +0 -0
  20. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/feateng/relation.py +0 -0
  21. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/feateng/scale.py +0 -0
  22. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/conf/__init__.py +0 -0
  23. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/DailyUsage/__init__.py +0 -0
  24. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/DailyUsage/daily_usage.py +0 -0
  25. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/Format/__init__.py +0 -0
  26. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/Format/inventory_format.py +0 -0
  27. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/FutureInventory/__init__.py +0 -0
  28. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/FutureInventory/daily_usage_future.py +0 -0
  29. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/InventoryFunctions/__init__.py +0 -0
  30. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/InventoryFunctions/functions_inventory.py +0 -0
  31. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/ProcessForecast/__init__.py +0 -0
  32. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/ProcessForecast/define_periods.py +0 -0
  33. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/ProcessForecast/extract_forecast.py +0 -0
  34. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/SuggestedForecast/__init__.py +0 -0
  35. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/SuggestedForecast/suggested_forecast.py +0 -0
  36. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/Transformation/__init__.py +0 -0
  37. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/inventory/src/Transformation/inventory_transformation.py +0 -0
  38. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/predict/__init__.py +0 -0
  39. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/predict/forecast.py +0 -0
  40. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/prepare/__init__.py +0 -0
  41. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/prepare/cleanse.py +0 -0
  42. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/prepare/format.py +0 -0
  43. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/prepare/format_dask.py +0 -0
  44. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/training/__init__.py +0 -0
  45. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/training/attup.py +0 -0
  46. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/training/deepar.py +0 -0
  47. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/training/tft.py +0 -0
  48. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/transform/__init__.py +0 -0
  49. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/transform/backtesting.py +0 -0
  50. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/transform/forecasting.py +0 -0
  51. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/transform/ranking.py +0 -0
  52. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/utils/__init__.py +0 -0
  53. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi/utils/utils.py +0 -0
  54. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi.egg-info/SOURCES.txt +0 -0
  55. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi.egg-info/dependency_links.txt +0 -0
  56. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi.egg-info/requires.txt +0 -0
  57. {datupapi-1.109.0 → datupapi-1.110.1}/datupapi.egg-info/top_level.txt +0 -0
  58. {datupapi-1.109.0 → datupapi-1.110.1}/pyproject.toml +0 -0
  59. {datupapi-1.109.0 → datupapi-1.110.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datupapi
3
- Version: 1.109.0
3
+ Version: 1.110.1
4
4
  Summary: Utility library to support Datup AI MLOps processes
5
5
  Author: Datup AI
6
6
  Author-email: ramiro@datup.ai
@@ -11,7 +11,7 @@ class FutureReorder():
11
11
 
12
12
  def __init__(self, df_inv, df_lead_time, df_prep, df_fcst, periods, start_date, location=False, security_stock_ref=False):
13
13
  self.df_inv = df_inv
14
- self.df_lead_time = df_lead_time
14
+ self.df_lead_time = df_lead_time
15
15
  self.df_prep = df_prep
16
16
  self.df_fcst = df_fcst
17
17
  self.default_coverage = 30
@@ -63,7 +63,12 @@ class FutureReorder():
63
63
  if self.location:
64
64
  metadata.append('Location')
65
65
 
66
- SuggestedForecast_1p = {}
66
+ df_lead_time_rf = self.df_lead_time.copy()
67
+ df_lead_time_rf['Coverage'] = df_lead_time_rf['ReorderFreq']
68
+
69
+ SuggestedForecast_cov = {}
70
+ SuggestedForecast_rf = {}
71
+ df_forecast = {}
67
72
  df_avg_gen = {}
68
73
  df_max_gen = {}
69
74
  df_sstock = {}
@@ -84,19 +89,40 @@ class FutureReorder():
84
89
 
85
90
  for i, date in enumerate(dates):
86
91
  if self.location:
87
- current_df_lead_time = self.df_lead_time[(self.df_lead_time['Item'] == item) &
92
+ current_df_lead_time_cov = self.df_lead_time[(self.df_lead_time['Item'] == item) &
88
93
  (self.df_lead_time['Location'] == location)]
94
+
95
+ current_df_lead_time_rf = df_lead_time_rf[(df_lead_time_rf['Item'] == item) &
96
+ (df_lead_time_rf['Location'] == location)]
97
+
89
98
  current_df_inv = self.df_inv[(self.df_inv['Item'] == item) &
90
99
  (self.df_inv['Location'] == location)]
100
+
91
101
  else:
92
- current_df_lead_time = self.df_lead_time[self.df_lead_time['Item'] == item]
102
+ current_df_lead_time_cov = self.df_lead_time[self.df_lead_time['Item'] == item]
103
+ current_df_lead_time_rf = df_lead_time_rf[df_lead_time_rf['Item'] == item]
93
104
  current_df_inv = self.df_inv[self.df_inv['Item'] == item]
94
-
95
- if current_df_lead_time.empty or current_df_inv.empty:
105
+
106
+ if current_df_lead_time_cov.empty or current_df_lead_time_rf.empty or current_df_inv.empty:
96
107
  continue
97
108
 
98
- # SuggestedForecast
99
- SuggestedForecast_1p[i] = SuggestedForecast(df_LeadTimes=current_df_lead_time,
109
+ # SuggestedForecast_Coverage
110
+ SuggestedForecast_cov[i] = SuggestedForecast(df_LeadTimes=current_df_lead_time_cov,
111
+ df_Forecast=self.df_fcst,
112
+ df_Prep=self.df_prep,
113
+ df_inv=current_df_inv,
114
+ column_forecast='SuggestedForecast',
115
+ columns_metadata=metadata,
116
+ frequency_='M',
117
+ location=self.location,
118
+ actualdate=date,
119
+ default_coverage_=self.default_coverage,
120
+ join_='left').suggested_forecast()
121
+
122
+ SuggestedForecast_cov[i].rename(columns={'SuggestedForecast':'Suggested_Coverage'},inplace=True)
123
+
124
+ # SuggestedForecast_ReorderFreq
125
+ SuggestedForecast_rf[i] = SuggestedForecast(df_LeadTimes=current_df_lead_time_rf,
100
126
  df_Forecast=self.df_fcst,
101
127
  df_Prep=self.df_prep,
102
128
  df_inv=current_df_inv,
@@ -107,12 +133,18 @@ class FutureReorder():
107
133
  actualdate=date,
108
134
  default_coverage_=self.default_coverage,
109
135
  join_='left').suggested_forecast()
136
+
137
+ SuggestedForecast_rf[i].rename(columns={'SuggestedForecast':'Suggested_ReorderFreq'},inplace=True)
138
+ SuggestedForecast_rf[i] = SuggestedForecast_rf[i][metadata + ['Suggested_ReorderFreq']]
110
139
 
140
+ # Concatenar
141
+ df_forecast[i] = pd.merge(SuggestedForecast_cov[i], SuggestedForecast_rf[i], on=metadata, how='outer')
142
+
111
143
  # Calcular AvgDailyUsage y MaxDailyUsage
112
144
  df_avg_gen[i] = DailyUsageFuture(location=self.location,
113
145
  column_forecast='SuggestedForecast',
114
146
  date=date,
115
- df_fcst=self.df_fcst).daily_usage(SuggestedForecast_1p[i], 'AvgDailyUsage').fillna(0)
147
+ df_fcst=self.df_fcst).daily_usage(df_forecast[i], 'AvgDailyUsage').fillna(0)
116
148
 
117
149
  df_max_gen[i] = DailyUsageFuture(location=self.location,
118
150
  column_forecast='SuggestedForecast',
@@ -121,11 +153,11 @@ class FutureReorder():
121
153
 
122
154
  #Ajustar AvgDailyUsage y MaxDailyUsage si es cero.
123
155
  df_avg_gen[i] = df_avg_gen[i].replace(0,0.001)
124
- df_max_gen[i] = df_max_gen[i].replace(0,0.0012)
125
-
156
+ df_max_gen[i] = df_max_gen[i].replace(0,0.0012)
157
+
126
158
  # Calcular Stock de Seguridad
127
159
  merge_columns = ['Item', 'Location', 'AvgLeadTime', 'MaxLeadTime'] if self.location else ['Item', 'AvgLeadTime', 'MaxLeadTime']
128
- df_sstock[i] = pd.merge(df_max_gen[i], current_df_lead_time[merge_columns], on=metadata, how='inner').drop_duplicates()
160
+ df_sstock[i] = pd.merge(df_max_gen[i], current_df_lead_time_cov[merge_columns], on=metadata, how='inner').drop_duplicates()
129
161
 
130
162
  # Current Period
131
163
  if i == 0:
@@ -134,17 +166,17 @@ class FutureReorder():
134
166
  df_inventory[i]['InventoryTransit'] = df_inventory[i]['Inventory'] + df_inventory[i]['Transit']
135
167
  df_inventory[i] = df_inventory[i][metadata + ['InventoryTransit']]
136
168
  df[i] = pd.merge(df_inventory[i], df_sstock[i], on=metadata, how='inner')
137
-
169
+
138
170
  if self.security_stock_ref:
139
171
  df[i]['SecurityStock'] = df[i]['SecurityStockDaysRef'] * df[i]['AvgDailyUsage']
140
172
  else:
141
173
  df[i]['SecurityStock'] = (df[i]['MaxDailyUsage'] * df[i]['MaxLeadTime']) - (df[i]['AvgDailyUsage'] * df[i]['AvgLeadTime'])
142
174
 
143
- df[i]['ReorderPoint'] = (df[i]['SuggestedForecast'] + df[i]['SecurityStock']).clip(lower=0)
175
+ df[i]['ReorderPoint'] = (df[i]['Suggested_Coverage'] + df[i]['SecurityStock']).clip(lower=0)
144
176
  df[i]['ReorderQtyBase'] = (df[i]['ReorderPoint'] - df[i]['InventoryTransit']).clip(lower=1)
145
177
  df[i]['ReorderQty'] = ((df[i]['ReorderQtyBase'] / df[i]['PurchaseFactor']).apply(np.ceil)) * df[i]['PurchaseFactor']
146
178
  df[i]['ReorderQtyDays'] = (df[i]['ReorderQty'] / df[i]['AvgDailyUsage']).astype(int)
147
-
179
+
148
180
  # Future Dates
149
181
  else:
150
182
  inventory_columns = ['Item', 'Location', 'PurchaseFactor'] if self.location else ['Item', 'PurchaseFactor']
@@ -156,15 +188,17 @@ class FutureReorder():
156
188
  else:
157
189
  df[i]['SecurityStock'] = (df[i]['MaxDailyUsage'] * df[i]['MaxLeadTime']) - (df[i]['AvgDailyUsage'] * df[i]['AvgLeadTime'])
158
190
 
159
- df[i]['InventoryTransit'] = ((df[i-1]['InventoryTransit'] - df[i-1]['SuggestedForecast']) + df[i-1]['ReorderQty']).clip(lower=0)
160
- df[i]['ReorderPoint'] = (df[i]['SuggestedForecast'] + df[i]['SecurityStock']).clip(lower=0)
191
+ df[i]['InventoryTransit'] = ((df[i-1]['InventoryTransit'] - df[i-1]['Suggested_ReorderFreq']) + df[i-1]['ReorderQty']).clip(lower=0)
192
+ df[i]['ReorderPoint'] = (df[i]['Suggested_Coverage'] + df[i]['SecurityStock']).clip(lower=0)
161
193
  df[i]['ReorderQtyBase'] = (df[i]['ReorderPoint'] - df[i]['InventoryTransit']).clip(lower=1)
162
194
  df[i]['ReorderQty'] = ((df[i]['ReorderQtyBase'] / df[i]['PurchaseFactor']).apply(np.ceil)) * df[i]['PurchaseFactor']
163
195
  df[i]['ReorderQtyDays'] = (df[i]['ReorderQty'] / df[i]['AvgDailyUsage']).astype(int)
196
+
164
197
 
165
198
  # Insert columns
166
199
  df[i].insert(loc=0, column='Date', value=date)
167
200
  df[i]['Item'] = item
201
+
168
202
  if self.location:
169
203
  df[i]['Location'] = location
170
204
 
@@ -176,12 +210,12 @@ class FutureReorder():
176
210
  df_final = pd.merge(data_frame, leadtimes, on=metadata, how='left').fillna(0)
177
211
 
178
212
  df_final['Date'] = pd.to_datetime(df_final['Date'], format='%Y%m%d').dt.strftime('%Y-%m-%d')
179
- df_final = df_final.rename(columns={'InventoryTransit': 'FutureInventory'})
180
- cols_to_round = ['AvgDailyUsage', 'MaxDailyUsage', 'AvgLeadTime', 'MaxLeadTime', 'SecurityStock', 'FutureInventory', 'SuggestedForecast', 'ReorderPoint', 'ReorderQtyBase']
181
- df_final[cols_to_round] = df_final[cols_to_round].round(2)
182
- final_cols = ['Date', 'Item', 'ItemDescription', 'Location', 'SuggestedForecast', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
213
+ df_final = df_final.rename(columns={'InventoryTransit': 'FutureInventory'})
214
+ cols_to_round = ['SecurityStock', 'FutureInventory', 'Suggested_Coverage', 'Suggested_ReorderFreq', 'ReorderPoint', 'ReorderQtyBase']
215
+ df_final[cols_to_round] = df_final[cols_to_round].apply(np.ceil)
216
+ final_cols = ['Date', 'Item', 'ItemDescription', 'Location', 'Suggested_Coverage', 'Suggested_ReorderFreq', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
183
217
  'AvgDailyUsage', 'MaxDailyUsage', 'AvgLeadTime', 'MaxLeadTime', 'ReorderFreq', 'Coverage'] if self.location \
184
- else ['Date', 'Item', 'ItemDescription', 'SuggestedForecast', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
218
+ else ['Date', 'Item', 'ItemDescription', 'Suggested_Coverage', 'Suggested_ReorderFreq', 'FutureInventory', 'ReorderQtyBase', 'ReorderQty', 'ReorderQtyDays', 'PurchaseFactor', 'ReorderPoint', 'SecurityStock',
185
219
  'AvgDailyUsage', 'MaxDailyUsage', 'AvgLeadTime', 'MaxLeadTime', 'ReorderFreq', 'Coverage']
186
220
  df_final = df_final[final_cols]
187
221
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datupapi
3
- Version: 1.109.0
3
+ Version: 1.110.1
4
4
  Summary: Utility library to support Datup AI MLOps processes
5
5
  Author: Datup AI
6
6
  Author-email: ramiro@datup.ai
@@ -1,7 +1,7 @@
1
1
  from setuptools import setup, find_packages
2
2
 
3
3
  setup(name='datupapi',
4
- version='1.109.0',
4
+ version='1.110.1',
5
5
  description='Utility library to support Datup AI MLOps processes',
6
6
  long_description_content_type="text/markdown",
7
7
  long_description="foo bar baz",
File without changes
File without changes