datupapi 1.112.1__tar.gz → 1.113.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. {datupapi-1.112.1 → datupapi-1.113.0}/PKG-INFO +1 -1
  2. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/Format/inventory_format.py +1 -1
  3. datupapi-1.113.0/datupapi/prepare/format_opt.py +400 -0
  4. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi.egg-info/PKG-INFO +1 -1
  5. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi.egg-info/SOURCES.txt +1 -0
  6. {datupapi-1.112.1 → datupapi-1.113.0}/setup.py +1 -1
  7. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/__init__.py +0 -0
  8. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/configure/__init__.py +0 -0
  9. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/configure/config.py +0 -0
  10. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/distribution/conf/__init__.py +0 -0
  11. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/distribution/src/DistributionFunctions/__init__.py +0 -0
  12. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/distribution/src/DistributionFunctions/functions_distribution.py +0 -0
  13. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/distribution/src/Format/__init__.py +0 -0
  14. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/distribution/src/Format/distribution_format.py +0 -0
  15. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/evaluate/__init__.py +0 -0
  16. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/evaluate/anomaly.py +0 -0
  17. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/evaluate/errors.py +0 -0
  18. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/extract/__init__.py +0 -0
  19. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/extract/io.py +0 -0
  20. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/extract/io_citrix.py +0 -0
  21. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/feateng/__init__.py +0 -0
  22. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/feateng/relation.py +0 -0
  23. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/feateng/scale.py +0 -0
  24. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/conf/__init__.py +0 -0
  25. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/DailyUsage/__init__.py +0 -0
  26. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/DailyUsage/daily_usage.py +0 -0
  27. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/Format/__init__.py +0 -0
  28. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/FutureInventory/__init__.py +0 -0
  29. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/FutureInventory/daily_usage_future.py +0 -0
  30. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/FutureInventory/future_reorder.py +0 -0
  31. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/InventoryFunctions/__init__.py +0 -0
  32. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/InventoryFunctions/functions_inventory.py +0 -0
  33. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/ProcessForecast/__init__.py +0 -0
  34. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/ProcessForecast/define_periods.py +0 -0
  35. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/ProcessForecast/extract_forecast.py +0 -0
  36. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/SuggestedForecast/__init__.py +0 -0
  37. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/SuggestedForecast/suggested_forecast.py +0 -0
  38. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/Transformation/__init__.py +0 -0
  39. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/inventory/src/Transformation/inventory_transformation.py +0 -0
  40. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/predict/__init__.py +0 -0
  41. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/predict/forecast.py +0 -0
  42. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/prepare/__init__.py +0 -0
  43. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/prepare/cleanse.py +0 -0
  44. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/prepare/format.py +0 -0
  45. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/prepare/format_dask.py +0 -0
  46. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/training/__init__.py +0 -0
  47. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/training/attup.py +0 -0
  48. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/training/deepar.py +0 -0
  49. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/training/tft.py +0 -0
  50. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/transform/__init__.py +0 -0
  51. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/transform/backtesting.py +0 -0
  52. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/transform/forecasting.py +0 -0
  53. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/transform/ranking.py +0 -0
  54. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/utils/__init__.py +0 -0
  55. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi/utils/utils.py +0 -0
  56. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi.egg-info/dependency_links.txt +0 -0
  57. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi.egg-info/requires.txt +0 -0
  58. {datupapi-1.112.1 → datupapi-1.113.0}/datupapi.egg-info/top_level.txt +0 -0
  59. {datupapi-1.112.1 → datupapi-1.113.0}/pyproject.toml +0 -0
  60. {datupapi-1.112.1 → datupapi-1.113.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datupapi
3
- Version: 1.112.1
3
+ Version: 1.113.0
4
4
  Summary: Utility library to support Datup AI MLOps processes
5
5
  Author: Datup AI
6
6
  Author-email: ramiro@datup.ai
@@ -40,7 +40,7 @@ class InventoryFormat():
40
40
 
41
41
  colmuns_meta = ["Customer","Country","ProductType","Weight","Dimension","Color","Origen","Gama","Marca","MateriaPrima",
42
42
  "JefeProducto","JefeProductoDescription","GrupoCompra","Familia","Seccion","Categoria","SubCategoria","Linea",
43
- "SubLinea","Canal","InventoryUnit","Comments","DeliveryFactor","PurchaseOrderUnit","PalletFactor","MOQ"]
43
+ "SubLinea","Canal","InventoryUnit","Comments","DeliveryFactor","PurchaseOrderUnit","PalletFactor","MOQ","Metadata"]
44
44
 
45
45
  columns_advance = ["BackSuggestedForecast","NextSuggestedForecast","BackReorderQtyBase","BackReorderQty","NextReorderQtyBase",
46
46
  "NextReorderQty","MinOrderQty","MaxOrderQty","OtifOrder","TotalOrder","DelayDays","ShortFall"]
@@ -0,0 +1,400 @@
1
+ import os
2
+ import polars as pl
3
+ import pandas as pd
4
+ import re
5
+ from datupapi.configure.config import Config
6
+
7
+
8
+ class FormatOptimized(Config):
9
+ """
10
+ Optimized Format class using Polars for efficient data resampling operations.
11
+ This class provides the same interface as Format but with improved performance
12
+ through Polars' efficient processing capabilities.
13
+ """
14
+
15
+ def __init__(self, config_file, logfile, log_path, *args, **kwargs):
16
+ Config.__init__(self, config_file=config_file, logfile=logfile)
17
+ self.log_path = log_path
18
+
19
+ def _convert_frequency_to_polars(self, frequency: str) -> str:
20
+ """
21
+ Convert pandas frequency notation to Polars notation.
22
+
23
+ :param frequency: Pandas frequency string (e.g., 'M', 'W', 'D', 'Q', '2M', '3W')
24
+ :return: Polars frequency string (e.g., '1mo', '1w', '1d', '1q', '2mo', '3w')
25
+ """
26
+ # Mapping of pandas frequency codes to Polars
27
+ freq_map = {
28
+ 'D': 'd', # Day
29
+ 'W': 'w', # Week
30
+ 'M': 'mo', # Month
31
+ 'Q': 'q', # Quarter
32
+ 'Y': 'y', # Year
33
+ 'H': 'h', # Hour
34
+ 'T': 'm', # Minute (T in pandas, m in polars)
35
+ 'S': 's', # Second
36
+ }
37
+
38
+ # Extract number prefix if exists (e.g., '2M' -> '2', 'M')
39
+ match = re.match(r'^(\d*)([A-Z]+)$', frequency.upper())
40
+
41
+ if not match:
42
+ raise ValueError(f"Invalid frequency format: {frequency}")
43
+
44
+ number = match.group(1) or '1'
45
+ freq_code = match.group(2)
46
+
47
+ if freq_code not in freq_map:
48
+ raise ValueError(f"Unsupported frequency code: {freq_code}")
49
+
50
+ polars_freq = freq_map[freq_code]
51
+
52
+ return f"{number}{polars_freq}"
53
+
54
+ def reorder_cols(self, df, first_cols):
55
+ """
56
+ Return a dataframe with columns specified in first_col at the leading positions
57
+
58
+ :param df: Dataframe to reorder
59
+ :param first_cols: Leading columns to appear in the dataframe
60
+ :return df: Dataframe reordered
61
+
62
+ >>> df = reorder_cols(df, first_cols)
63
+ >>> df =
64
+ var1 var2 var3
65
+ idx0 1 2 3
66
+ """
67
+ cols = list(df.columns)
68
+ for col in reversed(first_cols):
69
+ if col in cols:
70
+ cols.remove(col)
71
+ cols.insert(0, col)
72
+ df = df[cols]
73
+ return df
74
+
75
+ def resample_dataset(self, df, date_col=None, item_col=None, frequency=None, agg_dict=None, use_lazy=True):
76
+ """
77
+ Return a dataframe resampling the date dimension to the specified frequency using Polars.
78
+
79
+ This optimized version:
80
+ - Converts pandas to Polars for faster processing
81
+ - Uses lazy evaluation for optimal query planning (when use_lazy=True)
82
+ - Uses group_by_dynamic for efficient resampling
83
+ - Fills missing date ranges with 0
84
+ - Adjusts dates to the last day of each month
85
+ - Returns a pandas DataFrame
86
+
87
+ :param df: Pandas DataFrame to be resampled
88
+ :param date_col: Name of the date column
89
+ :param item_col: Name of the item column
90
+ :param frequency: Target frequency to resample the data (e.g., 'M' for monthly, 'W' for weekly)
91
+ :param agg_dict: Aggregation dictionary including column as key and operation as value
92
+ :param use_lazy: Use lazy evaluation for better performance (default: True)
93
+ :return df_out: Pandas DataFrame resampled
94
+
95
+ >>> df_out = resample_dataset(df, date_col='timestamp', item_col='item_id',
96
+ ... frequency='M', agg_dict={'demand': 'sum'})
97
+ >>> df_out =
98
+ timestamp item_id demand
99
+ 0 2021-01-31 sku1 23
100
+ 1 2021-02-28 sku1 543
101
+ """
102
+ try:
103
+ # Convert pandas frequency to Polars frequency
104
+ polars_frequency = self._convert_frequency_to_polars(frequency)
105
+
106
+ # Convert pandas DataFrame to Polars (lazy if requested)
107
+ if use_lazy:
108
+ df_pl = pl.from_pandas(df).lazy()
109
+ else:
110
+ df_pl = pl.from_pandas(df)
111
+
112
+ # Build the lazy query
113
+ df_lazy = (
114
+ df_pl
115
+ # Ensure date column is datetime type
116
+ .with_columns(
117
+ pl.col(date_col).cast(pl.Datetime)
118
+ )
119
+ # Sort by date column
120
+ .sort(date_col)
121
+ )
122
+
123
+ # Collect to perform group_by_dynamic (not supported in lazy mode)
124
+ if use_lazy:
125
+ df_collected = df_lazy.collect()
126
+ else:
127
+ df_collected = df_lazy
128
+
129
+ # Perform dynamic grouping and resampling
130
+ df_resampled = (
131
+ df_collected.group_by_dynamic(
132
+ index_column=date_col,
133
+ every=polars_frequency,
134
+ closed="left", # Left-closed interval
135
+ by=[item_col]
136
+ )
137
+ .agg([getattr(pl.col(col), func)().alias(col) for col, func in agg_dict.items()])
138
+ )
139
+
140
+ # Continue with lazy operations
141
+ if use_lazy:
142
+ df_out_lazy = df_resampled.lazy()
143
+ else:
144
+ df_out_lazy = df_resampled
145
+
146
+ # Adjust to the last day of the month
147
+ df_out_lazy = df_out_lazy.with_columns(
148
+ pl.col(date_col).dt.month_end().alias(date_col)
149
+ )
150
+
151
+ # Collect to get min/max dates for range creation
152
+ if use_lazy:
153
+ df_temp = df_out_lazy.collect()
154
+ else:
155
+ df_temp = df_out_lazy
156
+
157
+ # Fill missing date ranges with 0
158
+ # Get all unique items
159
+ items = df_temp.select(item_col).unique()
160
+
161
+ # Get date range from min to max
162
+ min_date = df_temp.select(pl.col(date_col).min()).item()
163
+ max_date = df_temp.select(pl.col(date_col).max()).item()
164
+
165
+ # Create complete date range at month end
166
+ date_range = pl.datetime_range(
167
+ min_date,
168
+ max_date,
169
+ interval=polars_frequency,
170
+ eager=True
171
+ ).dt.month_end()
172
+
173
+ # Create a complete grid of dates and items
174
+ complete_grid = items.join(
175
+ pl.DataFrame({date_col: date_range}),
176
+ how="cross"
177
+ )
178
+
179
+ # Build final lazy query for joins and fills
180
+ if use_lazy:
181
+ complete_grid_lazy = complete_grid.lazy()
182
+ df_temp_lazy = df_temp.lazy()
183
+
184
+ df_out_lazy = (
185
+ complete_grid_lazy
186
+ .join(
187
+ df_temp_lazy,
188
+ on=[date_col, item_col],
189
+ how="left"
190
+ )
191
+ )
192
+
193
+ # Fill null values with 0 for aggregated columns
194
+ for col in agg_dict.keys():
195
+ df_out_lazy = df_out_lazy.with_columns(
196
+ pl.col(col).fill_null(0)
197
+ )
198
+
199
+ # Reorder columns: date_col, item_col, then others
200
+ other_cols = [c for c in df_temp.columns if c not in [date_col, item_col]]
201
+ df_out_lazy = df_out_lazy.select(
202
+ [pl.col(date_col), pl.col(item_col)] + [pl.col(c) for c in other_cols]
203
+ )
204
+
205
+ # Collect the final result
206
+ df_out = df_out_lazy.collect()
207
+ else:
208
+ # Join with resampled data and fill nulls with 0
209
+ df_out = complete_grid.join(
210
+ df_temp,
211
+ on=[date_col, item_col],
212
+ how="left"
213
+ )
214
+
215
+ # Fill null values with 0 for aggregated columns
216
+ for col in agg_dict.keys():
217
+ df_out = df_out.with_columns(
218
+ pl.col(col).fill_null(0)
219
+ )
220
+
221
+ # Reorder columns: date_col, item_col, then others
222
+ other_cols = [c for c in df_out.columns if c not in [date_col, item_col]]
223
+ df_out = df_out.select(
224
+ [pl.col(date_col), pl.col(item_col)] + [pl.col(c) for c in other_cols]
225
+ )
226
+
227
+ # Convert back to pandas
228
+ df_pandas = df_out.to_pandas()
229
+
230
+ # Reorder columns using the class method
231
+ df_pandas = self.reorder_cols(df_pandas, first_cols=[date_col, item_col])
232
+
233
+ except KeyError as err:
234
+ self.logger.exception(f'Columns for index, item or qty not found. Please check spelling: {err}')
235
+ raise
236
+
237
+ return df_pandas
238
+
239
+ def resample_dataset_with_location(self, df, date_col_=None, item_col_=None, location_col_=None, frequency_=None, agg_dict_=None, use_lazy=True):
240
+ """
241
+ Return a dataframe resampling the date dimension to the specified frequency using Polars,
242
+ including location grouping.
243
+
244
+ This optimized version:
245
+ - Converts pandas to Polars for faster processing
246
+ - Uses lazy evaluation for optimal query planning (when use_lazy=True)
247
+ - Uses group_by_dynamic for efficient resampling with location
248
+ - Fills missing date ranges with 0
249
+ - Adjusts dates to the last day of each month
250
+ - Returns a pandas DataFrame
251
+
252
+ :param df: Pandas DataFrame to be resampled
253
+ :param date_col_: Name of the date column
254
+ :param item_col_: Name of the item column
255
+ :param location_col_: Name of the location column
256
+ :param frequency_: Target frequency to resample the data (e.g., 'M' for monthly, 'W' for weekly)
257
+ :param agg_dict_: Aggregation dictionary including column as key and operation as value
258
+ :param use_lazy: Use lazy evaluation for better performance (default: True)
259
+ :return df_out: Pandas DataFrame resampled
260
+
261
+ >>> df_out = resample_dataset_with_location(df, date_col_='timestamp',
262
+ ... item_col_='item_id', location_col_='location',
263
+ ... frequency_='M', agg_dict_={'demand': 'sum'})
264
+ """
265
+ try:
266
+ # Convert pandas frequency to Polars frequency
267
+ polars_frequency = self._convert_frequency_to_polars(frequency_)
268
+
269
+ # Convert pandas DataFrame to Polars (lazy if requested)
270
+ if use_lazy:
271
+ df_pl = pl.from_pandas(df).lazy()
272
+ else:
273
+ df_pl = pl.from_pandas(df)
274
+
275
+ # Build the lazy query
276
+ df_lazy = (
277
+ df_pl
278
+ # Ensure date column is datetime type
279
+ .with_columns(
280
+ pl.col(date_col_).cast(pl.Datetime)
281
+ )
282
+ # Sort by date column
283
+ .sort(date_col_)
284
+ )
285
+
286
+ # Collect to perform group_by_dynamic (not supported in lazy mode)
287
+ if use_lazy:
288
+ df_collected = df_lazy.collect()
289
+ else:
290
+ df_collected = df_lazy
291
+
292
+ # Perform dynamic grouping and resampling
293
+ df_resampled = (
294
+ df_collected.group_by_dynamic(
295
+ index_column=date_col_,
296
+ every=polars_frequency,
297
+ closed="left", # Left-closed interval
298
+ by=[location_col_, item_col_]
299
+ )
300
+ .agg([getattr(pl.col(col), func)().alias(col) for col, func in agg_dict_.items()])
301
+ )
302
+
303
+ # Continue with lazy operations
304
+ if use_lazy:
305
+ df_out_lazy = df_resampled.lazy()
306
+ else:
307
+ df_out_lazy = df_resampled
308
+
309
+ # Adjust to the last day of the month
310
+ df_out_lazy = df_out_lazy.with_columns(
311
+ pl.col(date_col_).dt.month_end().alias(date_col_)
312
+ )
313
+
314
+ # Collect to get min/max dates for range creation
315
+ if use_lazy:
316
+ df_temp = df_out_lazy.collect()
317
+ else:
318
+ df_temp = df_out_lazy
319
+
320
+ # Fill missing date ranges with 0
321
+ # Get all unique combinations of location and item
322
+ location_items = df_temp.select([location_col_, item_col_]).unique()
323
+
324
+ # Get date range from min to max
325
+ min_date = df_temp.select(pl.col(date_col_).min()).item()
326
+ max_date = df_temp.select(pl.col(date_col_).max()).item()
327
+
328
+ # Create complete date range at month end
329
+ date_range = pl.datetime_range(
330
+ min_date,
331
+ max_date,
332
+ interval=polars_frequency,
333
+ eager=True
334
+ ).dt.month_end()
335
+
336
+ # Create a complete grid of dates, locations, and items
337
+ complete_grid = location_items.join(
338
+ pl.DataFrame({date_col_: date_range}),
339
+ how="cross"
340
+ )
341
+
342
+ # Build final lazy query for joins and fills
343
+ if use_lazy:
344
+ complete_grid_lazy = complete_grid.lazy()
345
+ df_temp_lazy = df_temp.lazy()
346
+
347
+ df_out_lazy = (
348
+ complete_grid_lazy
349
+ .join(
350
+ df_temp_lazy,
351
+ on=[date_col_, location_col_, item_col_],
352
+ how="left"
353
+ )
354
+ )
355
+
356
+ # Fill null values with 0 for aggregated columns
357
+ for col in agg_dict_.keys():
358
+ df_out_lazy = df_out_lazy.with_columns(
359
+ pl.col(col).fill_null(0)
360
+ )
361
+
362
+ # Reorder columns: date_col, item_col, location_col, then others
363
+ other_cols = [c for c in df_temp.columns if c not in [date_col_, item_col_, location_col_]]
364
+ df_out_lazy = df_out_lazy.select(
365
+ [pl.col(date_col_), pl.col(item_col_), pl.col(location_col_)] + [pl.col(c) for c in other_cols]
366
+ )
367
+
368
+ # Collect the final result
369
+ df_out = df_out_lazy.collect()
370
+ else:
371
+ # Join with resampled data and fill nulls with 0
372
+ df_out = complete_grid.join(
373
+ df_temp,
374
+ on=[date_col_, location_col_, item_col_],
375
+ how="left"
376
+ )
377
+
378
+ # Fill null values with 0 for aggregated columns
379
+ for col in agg_dict_.keys():
380
+ df_out = df_out.with_columns(
381
+ pl.col(col).fill_null(0)
382
+ )
383
+
384
+ # Reorder columns: date_col, item_col, location_col, then others
385
+ other_cols = [c for c in df_out.columns if c not in [date_col_, item_col_, location_col_]]
386
+ df_out = df_out.select(
387
+ [pl.col(date_col_), pl.col(item_col_), pl.col(location_col_)] + [pl.col(c) for c in other_cols]
388
+ )
389
+
390
+ # Convert back to pandas
391
+ df_pandas = df_out.to_pandas()
392
+
393
+ # Reorder columns using the class method
394
+ df_pandas = self.reorder_cols(df_pandas, first_cols=[date_col_, item_col_, location_col_])
395
+
396
+ except KeyError as err:
397
+ self.logger.exception(f'Columns for index, item or qty not found. Please check spelling: {err}')
398
+ raise
399
+
400
+ return df_pandas
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datupapi
3
- Version: 1.112.1
3
+ Version: 1.113.0
4
4
  Summary: Utility library to support Datup AI MLOps processes
5
5
  Author: Datup AI
6
6
  Author-email: ramiro@datup.ai
@@ -45,6 +45,7 @@ datupapi/prepare/__init__.py
45
45
  datupapi/prepare/cleanse.py
46
46
  datupapi/prepare/format.py
47
47
  datupapi/prepare/format_dask.py
48
+ datupapi/prepare/format_opt.py
48
49
  datupapi/training/__init__.py
49
50
  datupapi/training/attup.py
50
51
  datupapi/training/deepar.py
@@ -1,7 +1,7 @@
1
1
  from setuptools import setup, find_packages
2
2
 
3
3
  setup(name='datupapi',
4
- version='1.112.1',
4
+ version='1.113.0',
5
5
  description='Utility library to support Datup AI MLOps processes',
6
6
  long_description_content_type="text/markdown",
7
7
  long_description="foo bar baz",
File without changes
File without changes