sibi-dst 0.3.32__py3-none-any.whl → 0.3.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,33 @@ from sibi_dst.utils import Logger
9
9
 
10
10
 
11
11
  class ClickHouseWriter:
12
+ """
13
+ Provides functionality to write a Dask DataFrame to a ClickHouse database using
14
+ a specified schema. This class handles the creation of tables, schema generation,
15
+ data transformation, and data insertion. It ensures compatibility between Dask
16
+ data types and ClickHouse types.
17
+
18
+ :ivar clickhouse_host: Host address of the ClickHouse database.
19
+ :type clickhouse_host: str
20
+ :ivar clickhouse_port: Port of the ClickHouse database.
21
+ :type clickhouse_port: int
22
+ :ivar clickhouse_dbname: Name of the database to connect to in ClickHouse.
23
+ :type clickhouse_dbname: str
24
+ :ivar clickhouse_user: Username for database authentication.
25
+ :type clickhouse_user: str
26
+ :ivar clickhouse_password: Password for database authentication.
27
+ :type clickhouse_password: str
28
+ :ivar clickhouse_table: Name of the table to store the data in.
29
+ :type clickhouse_table: str
30
+ :ivar logger: Logger instance for logging messages.
31
+ :type logger: logging.Logger
32
+ :ivar client: Instance of the ClickHouse database client.
33
+ :type client: clickhouse_connect.Client or None
34
+ :ivar df: Dask DataFrame to be written into ClickHouse.
35
+ :type df: dask.dataframe.DataFrame
36
+ :ivar order_by: Field or column name to use for table ordering.
37
+ :type order_by: str
38
+ """
12
39
  dtype_to_clickhouse = {
13
40
  'int64': 'Int64',
14
41
  'int32': 'Int32',
@@ -5,12 +5,43 @@ from sibi_dst.utils import Logger
5
5
 
6
6
 
7
7
  class DataUtils:
8
-
8
+ """
9
+ Utility class for data transformation, manipulation, and merging.
10
+
11
+ This class provides functionalities for transforming numeric and boolean columns, merging
12
+ lookup data, checking DataFrame emptiness, and converting columns to datetime format in
13
+ Pandas or Dask DataFrames. It is designed to handle data preprocessing steps efficiently
14
+ for both small-scale and large-scale datasets. Logging and debug options are available
15
+ to trace execution and monitor operations.
16
+
17
+ :ivar logger: Logger instance for logging messages.
18
+ :type logger: logging.Logger
19
+ :ivar debug: Flag to enable or disable debug mode.
20
+ :type debug: bool
21
+ """
9
22
  def __init__(self, logger=None, **kwargs):
10
23
  self.logger = logger or Logger.default_logger(logger_name=self.__class__.__name__)
11
24
  self.debug = kwargs.get('debug', False)
12
25
 
13
26
  def transform_numeric_cols(self, df, columns, fill_value=0, dtype=int):
27
+ """
28
+ This function transforms the specified numeric columns in the given dataframe by converting
29
+ their data types to the specified dtype, with an optional parameter for replacing missing
30
+ values. It first checks if the provided columns exist in the dataframe, processes each column
31
+ to replace non-numeric values with NaN, fills NaN values with the given fill_value, and finally
32
+ converts the column to the specified dtype.
33
+
34
+ :param df: DataFrame to be transformed.
35
+ :type df: dask.dataframe.DataFrame
36
+ :param columns: List of column names to be transformed.
37
+ :type columns: list[str]
38
+ :param fill_value: Value used to replace missing or invalid data. Default is 0.
39
+ :type fill_value: int or float
40
+ :param dtype: Target data type for the columns after transformation. Default is int.
41
+ :type dtype: type
42
+ :return: Transformed dataframe with the specified numeric columns converted and modified.
43
+ :rtype: dask.dataframe.DataFrame
44
+ """
14
45
  if not columns:
15
46
  self.logger.warning('No columns specified')
16
47
  self.logger.debug(f'Dataframe type:{type(df)}')
@@ -12,6 +12,62 @@ from sibi_dst.utils import ParquetSaver
12
12
 
13
13
 
14
14
  class DataWrapper:
15
+ """
16
+ Utility class for handling file-based operations, including processing and saving data
17
+ in Parquet format, while managing a hierarchy of conditions such as overwrite, history
18
+ threshold, and missing file detection.
19
+
20
+ This class aims to simplify the process of managing large datasets stored in a filesystem.
21
+ It allows for controlled updates to data files based on parameters set by the user, with
22
+ support for different filesystem types and options.
23
+
24
+ It also provides features like logging actions, managing processing threads, generating
25
+ update plans, checking file age, and dynamically creating date ranges for data operations.
26
+
27
+ The design supports flexible integration with user-defined classes (dataclasses) to define
28
+ custom loading and processing behavior.
29
+
30
+ :ivar dataclass: The user-defined class for data processing.
31
+ :type dataclass: Type
32
+ :ivar date_field: The name of the date field in the user-defined class.
33
+ :type date_field: str
34
+ :ivar data_path: Base path for the dataset storage.
35
+ :type data_path: str
36
+ :ivar parquet_filename: File name for the Parquet file.
37
+ :type parquet_filename: str
38
+ :ivar start_date: Start date for processing.
39
+ :type start_date: datetime.date
40
+ :ivar end_date: End date for processing.
41
+ :type end_date: datetime.date
42
+ :ivar fs: File system object for managing files.
43
+ :type fs: Optional[fsspec.AbstractFileSystem]
44
+ :ivar filesystem_type: Type of the filesystem (e.g., "file", "s3").
45
+ :type filesystem_type: str
46
+ :ivar filesystem_options: Additional options for initializing the filesystem.
47
+ :type filesystem_options: Optional[Dict]
48
+ :ivar verbose: Flag to enable verbose logging.
49
+ :type verbose: bool
50
+ :ivar class_params: Parameters to initialize the dataclass.
51
+ :type class_params: Optional[Dict]
52
+ :ivar load_params: Additional parameters for loading functions.
53
+ :type load_params: Optional[Dict]
54
+ :ivar reverse_order: Flag to reverse the order of date range generation.
55
+ :type reverse_order: bool
56
+ :ivar overwrite: Whether to overwrite all files during processing.
57
+ :type overwrite: bool
58
+ :ivar ignore_missing: Whether to ignore missing files.
59
+ :type ignore_missing: bool
60
+ :ivar logger: Logger instance for logging information.
61
+ :type logger: Optional[Logger]
62
+ :ivar max_age_minutes: Maximum file age threshold in minutes.
63
+ :type max_age_minutes: int
64
+ :ivar history_days_threshold: Number of days for the history threshold.
65
+ :type history_days_threshold: int
66
+ :ivar show_progress: Flag to enable progress display.
67
+ :type show_progress: bool
68
+ :ivar timeout: Timeout in seconds for processing tasks with threads.
69
+ :type timeout: Optional[int]
70
+ """
15
71
  DEFAULT_MAX_AGE_MINUTES = 1440
16
72
  DEFAULT_HISTORY_DAYS_THRESHOLD = 30
17
73
 
@@ -80,7 +136,19 @@ class DataWrapper:
80
136
  yield date.date()
81
137
 
82
138
  def process(self):
83
- """Execute the update plan using 'update_priority' to determine processing order."""
139
+ """
140
+ Processes update tasks by generating an update plan, filtering required updates, and distributing
141
+ the workload across threads based on priority levels.
142
+
143
+ This method operates by assessing required updates through generated conditions,
144
+ grouping them by priority levels, and processing them in parallel threads.
145
+ Each thread handles the updates for a specific priority level, ensuring a streamlined approach
146
+ to handling the updates efficiently.
147
+
148
+ :raises TimeoutError: If a thread processing a priority level exceeds the allowed timeout duration.
149
+
150
+ :return: None
151
+ """
84
152
  update_plan_table = self.generate_update_plan_with_conditions()
85
153
 
86
154
  # Display the update plan table to the user if requested
@@ -171,7 +239,20 @@ class DataWrapper:
171
239
  return True #
172
240
 
173
241
  def process_date(self, date: datetime.date):
174
- """Process a specific date by regenerating data as necessary."""
242
+ """
243
+ Processes data for a given date and saves it as a Parquet file.
244
+
245
+ This method processes data for the specified date by loading the data
246
+ corresponding to that day, saving it into a structured storage format
247
+ (Parquet), and logging relevant information such as processing time
248
+ and errors that may occur during the process. It uses provided
249
+ dataclass and parameters to operate and ensures the data is stored
250
+ in a structured folder hierarchy.
251
+
252
+ :param date: The specific date for which data processing and saving should occur
253
+ :type date: datetime.date
254
+ :return: None
255
+ """
175
256
  folder = f'{self.data_path}{date.year}/{date.month:02d}/{date.day:02d}/'
176
257
  full_parquet_filename = f"{folder}{self.parquet_filename}"
177
258
 
@@ -196,10 +277,17 @@ class DataWrapper:
196
277
 
197
278
  def generate_update_plan_with_conditions(self):
198
279
  """
199
- Generate an update plan that evaluates files based on the specified hierarchy:
200
- 1. Overwrite (all files regenerated).
201
- 2. History threshold: Files within `history_days_threshold` are evaluated for `max_age_minutes`.
202
- 3. Missing files: Detect missing files, ignoring future dates.
280
+ Generates an update plan for data files based on specific conditions. The function evaluates the need for updating or
281
+ overwriting data files for a given date range. Conditions include file existence, whether the file falls within a
282
+ specified historical threshold, and the necessity to overwrite or handle missing files. A priority map is utilized to
283
+ assign priority levels to update categories.
284
+
285
+ :raises FileNotFoundError: If any file is referenced that does not exist and the ``ignore_missing`` property is set to False.
286
+ :raises AttributeError: If any required attribute like ``fs``, ``dataclass``, or others are not properly set or initialized.
287
+
288
+ :return: A Pandas DataFrame representing the update plan, where each row contains information about a date, the conditions
289
+ evaluated for that date, and the determined update priority.
290
+ :rtype: pandas.DataFrame
203
291
  """
204
292
  rows = []
205
293
 
@@ -8,6 +8,24 @@ from sibi_dst.utils import Logger
8
8
 
9
9
 
10
10
  class DateUtils:
11
+ """
12
+ Utility class for date-related operations.
13
+
14
+ The DateUtils class provides a variety of operations to manipulate and retrieve
15
+ information about dates, such as calculating week ranges, determining start or
16
+ end dates for specific periods (quarters, months, years), and dynamically
17
+ registering custom time period functions. It also supports parsing specific
18
+ periods for date range computations and ensuring the input date is correctly
19
+ converted to the desired format.
20
+
21
+ :ivar logger: Logger instance used for logging messages. Defaults to the logger
22
+ for the current class if not provided.
23
+ :type logger: Logger
24
+
25
+ :ivar _PERIOD_FUNCTIONS: Stores dynamically registered period functions that
26
+ return start and end dates.
27
+ :type _PERIOD_FUNCTIONS: Dict[str, Callable[[], Tuple[datetime.date, datetime.date]]]
28
+ """
11
29
  _PERIOD_FUNCTIONS: Dict[str, Callable[[], Tuple[datetime.date, datetime.date]]] = {}
12
30
 
13
31
  def __init__(self, logger=None):
@@ -127,6 +145,23 @@ class DateUtils:
127
145
 
128
146
 
129
147
  class BusinessDays:
148
+ """
149
+ Provides functionality for handling business days calculations with a custom
150
+ holiday list. The class includes methods for calculating the number of
151
+ business days, modifying dates by adding business days, and applying these
152
+ operations to Dask DataFrames.
153
+
154
+ :ivar logger: Logger instance for logging error, warning, and debug messages.
155
+ :type logger: logging.Logger
156
+ :ivar HOLIDAY_LIST: Dictionary mapping years to lists of holiday dates.
157
+ :type HOLIDAY_LIST: dict
158
+ :ivar bd_cal: Numpy busdaycalendar object containing holidays and week mask.
159
+ :type bd_cal: numpy.busdaycalendar
160
+ :ivar holidays: Array of holiday dates used by the business day calendar.
161
+ :type holidays: numpy.ndarray
162
+ :ivar week_mask: Boolean array indicating working days within a week.
163
+ :type week_mask: numpy.ndarray
164
+ """
130
165
  def __init__(self, holiday_list, logger):
131
166
  """
132
167
  Initialize a BusinessDays object with a given holiday list.
@@ -1,11 +1,28 @@
1
- # Copyright (c) 2023. ISTMO Center S.A. All Rights Reserved
2
- #
3
1
  import logging
4
2
  import os
5
3
  import sys
6
4
 
7
5
 
8
6
  class Logger:
7
+ """
8
+ Handles the creation, setup, and management of logging functionalities.
9
+
10
+ This class facilitates logging by creating and managing a logger instance with
11
+ customizable logging directory, name, and file. It ensures logs from a script
12
+ are stored in a well-defined directory and file, and provides various logging
13
+ methods for different log levels. The logger automatically formats and handles
14
+ log messages. Additionally, this class provides a class method to initialize a
15
+ logger with default behaviors.
16
+
17
+ :ivar log_dir: Path to the directory where log files are stored.
18
+ :type log_dir: str
19
+ :ivar logger_name: Name of the logger instance.
20
+ :type logger_name: str
21
+ :ivar log_file: Base name of the log file.
22
+ :type log_file: str
23
+ :ivar logger: The initialized logger instance used for logging messages.
24
+ :type logger: logging.Logger
25
+ """
9
26
  def __init__(self, log_dir, logger_name, log_file):
10
27
  self.log_dir = log_dir
11
28
  self.logger_name = logger_name
@@ -121,109 +121,3 @@ class ParquetSaver:
121
121
  write_index=False,
122
122
  )
123
123
 
124
- # from pathlib import Path
125
- # from typing import Optional
126
- #
127
- # import fsspec
128
- # import pyarrow as pa
129
- #
130
- # from sibi_dst.utils import Logger
131
- #
132
- #
133
- # class ParquetSaver:
134
- # def __init__(self, df_result, parquet_storage_path, logger=None, fs=None):
135
- # # Ensure df_result is a Dask DataFrame
136
- # self.fs = fs or fsspec.filesystem("file")
137
- # self.df_result = df_result
138
- # self.parquet_storage_path = parquet_storage_path
139
- # self.logger = logger or Logger.default_logger(logger_name=self.__class__.__name__)
140
- #
141
- # def save_to_parquet(self, parquet_filename: Optional[str] = None, clear_existing=True):
142
- # full_path = self._construct_full_path(parquet_filename)
143
- #
144
- # # We cannot check for empty DataFrame directly with Dask without computation
145
- # # Proceed with saving; if the DataFrame is empty, an empty Parquet file will be created
146
- #
147
- # # Ensure directory exists and clear if necessary
148
- # self._ensure_directory_exists(full_path, clear_existing=clear_existing)
149
- #
150
- # # Define schema and save DataFrame to Parquet
151
- # schema = self._define_schema()
152
- # self._convert_dtypes(schema)
153
- # self._save_dataframe_to_parquet(full_path, schema)
154
- #
155
- # def _define_schema(self) -> pa.Schema:
156
- # """Define a PyArrow schema dynamically based on df_result column types."""
157
- # pandas_dtype_to_pa = {
158
- # 'object': pa.string(),
159
- # 'string': pa.string(),
160
- # 'Int64': pa.int64(),
161
- # 'int64': pa.int64(),
162
- # 'float64': pa.float64(),
163
- # 'float32': pa.float32(),
164
- # 'bool': pa.bool_(),
165
- # 'boolean': pa.bool_(), # pandas nullable boolean
166
- # 'datetime64[ns]': pa.timestamp('ns'),
167
- # 'timedelta[ns]': pa.duration('ns')
168
- # }
169
- #
170
- # dtypes = self.df_result.dtypes # No need to call .compute()
171
- #
172
- # fields = [
173
- # pa.field(col, pandas_dtype_to_pa.get(str(dtype), pa.string()))
174
- # for col, dtype in dtypes.items()
175
- # ]
176
- # return pa.schema(fields)
177
- #
178
- # def _convert_dtypes(self, schema: pa.Schema):
179
- # """Convert DataFrame columns to match the specified schema."""
180
- # dtype_mapping = {}
181
- # for field in schema:
182
- # col_name = field.name
183
- # if col_name in self.df_result.columns:
184
- # if pa.types.is_string(field.type):
185
- # dtype_mapping[col_name] = 'string'
186
- # elif pa.types.is_int64(field.type):
187
- # dtype_mapping[col_name] = 'Int64' # pandas nullable integer
188
- # elif pa.types.is_float64(field.type):
189
- # dtype_mapping[col_name] = 'float64'
190
- # elif pa.types.is_float32(field.type):
191
- # dtype_mapping[col_name] = 'float32'
192
- # elif pa.types.is_boolean(field.type):
193
- # dtype_mapping[col_name] = 'boolean' # pandas nullable boolean
194
- # elif pa.types.is_timestamp(field.type):
195
- # dtype_mapping[col_name] = 'datetime64[ns]'
196
- # else:
197
- # dtype_mapping[col_name] = 'object' # Fallback to object
198
- # # Convert dtypes
199
- # self.df_result = self.df_result.astype(dtype_mapping)
200
- #
201
- # def _construct_full_path(self, parquet_filename: Optional[str]) -> Path:
202
- # """Construct and return the full path for the Parquet file."""
203
- # _, base_path = fsspec.core.url_to_fs(self.parquet_storage_path)
204
- # parquet_filename = parquet_filename or "default.parquet"
205
- # return Path(base_path) / parquet_filename
206
- #
207
- # @staticmethod
208
- # def _ensure_directory_exists(full_path: Path, clear_existing=False):
209
- # """Ensure that the directory for the path exists, clearing it if specified."""
210
- # fs, _ = fsspec.core.url_to_fs(str(full_path))
211
- # directory = str(full_path.parent)
212
- #
213
- # if fs.exists(directory):
214
- # if clear_existing:
215
- # fs.rm(directory, recursive=True)
216
- # else:
217
- # fs.mkdirs(directory, exist_ok=True)
218
- #
219
- # def _save_dataframe_to_parquet(self, full_path: Path, schema: pa.Schema):
220
- # """Save the DataFrame to Parquet using the specified schema."""
221
- # fs, _ = fsspec.core.url_to_fs(str(full_path))
222
- # print(f"Saving to {str(full_path)}")
223
- # if fs.exists(str(full_path)):
224
- # fs.rm(str(full_path), recursive=True)
225
- #
226
- # # Save the Dask DataFrame to Parquet
227
- # self.df_result.to_parquet(
228
- # str(full_path), engine="pyarrow", schema=schema, write_index=False
229
- # )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sibi-dst
3
- Version: 0.3.32
3
+ Version: 0.3.34
4
4
  Summary: Data Science Toolkit
5
5
  Author: Luis Valverde
6
6
  Author-email: lvalverdeb@gmail.com
@@ -1,55 +1,55 @@
1
1
  sibi_dst/__init__.py,sha256=CLHfzrFNqklNx5uMKAPtbZfkbBbVYR5qsiMro0RTfmA,252
2
2
  sibi_dst/df_helper/__init__.py,sha256=5yzslP6zYYOHsTtAzHnNDXHYjf_T6yW7baxwgtduWqQ,292
3
- sibi_dst/df_helper/_df_helper.py,sha256=sZaI998N9yd7FuUgZ8Esrz-K0eh2kXky53h9K8-l4cw,23650
4
- sibi_dst/df_helper/_parquet_artifact.py,sha256=HVChP3UBCsEMpY-yyFERLaB76mWaziQXkdu2Qtzm7_s,5291
5
- sibi_dst/df_helper/_parquet_reader.py,sha256=0qJHMS1PLcODTLMS13UW5iFQLK8b3qjgy7qDzcupgII,1963
3
+ sibi_dst/df_helper/_df_helper.py,sha256=NRiLdHHO45SPwhif5JIQpfj56iC8HcffaRAyT7-TC2w,29585
4
+ sibi_dst/df_helper/_parquet_artifact.py,sha256=K9FnKjXDmkqCzYqv5weS9scLHsPGyj0UUUoVzOtWv30,8858
5
+ sibi_dst/df_helper/_parquet_reader.py,sha256=HhzhKtV_7qABHJvmpU2CssjNLgQHUB07eF0CqqzmkOs,3654
6
6
  sibi_dst/df_helper/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  sibi_dst/df_helper/backends/django/__init__.py,sha256=uWHi-DtQX5re7b2HcqoXUH3_FZWOw1VTmDf552FAkNs,256
8
- sibi_dst/df_helper/backends/django/_db_connection.py,sha256=kWITSPqn3286NzPvWSSE_PtJCm1tyfrv2RIuPSThXlQ,1634
9
- sibi_dst/df_helper/backends/django/_io_dask.py,sha256=jryDojeA62rB3seRaWWMjsAmekKacK5xctwCQGVklPQ,9063
10
- sibi_dst/df_helper/backends/django/_load_from_db.py,sha256=GLsAsuEQD1cXfEm7BuxofZfR32VwZNEfwR9c-AZn-x0,5555
11
- sibi_dst/df_helper/backends/django/_sql_model_builder.py,sha256=L_wDPaFnFFlewlTcD_KNlOwECU4Dy8zC58tOuZY4208,14886
8
+ sibi_dst/df_helper/backends/django/_db_connection.py,sha256=AGbqCnmiX4toMaFPE5ne5h7QCkImjnBKvzGtUD6Ge8Q,3698
9
+ sibi_dst/df_helper/backends/django/_io_dask.py,sha256=NjvJg6y9qKKCRiNrJL4f_A03iKDKEcjCi7LGbr9DgtM,19555
10
+ sibi_dst/df_helper/backends/django/_load_from_db.py,sha256=jQGIeviggHmSfK28Z47nHz3cXs78gHsE8Op7Ov1kFCo,10562
11
+ sibi_dst/df_helper/backends/django/_sql_model_builder.py,sha256=at9J7ecGkZbOOYba85uofe9C-ic4wwOqVgJcHpQNiYQ,21449
12
12
  sibi_dst/df_helper/backends/http/__init__.py,sha256=d1pfgYxbiYg7E0Iw8RbJ7xfqIfJShqqTBQQGU_S6OOo,105
13
- sibi_dst/df_helper/backends/http/_http_config.py,sha256=l6GdzTsknfzyf8LAo_TuIWeiswLRRrLcmqAmirxpH8Q,2132
13
+ sibi_dst/df_helper/backends/http/_http_config.py,sha256=eGPFdqZ5M3Tscqx2P93B6XoBEEzlmdt7yNg7PXUQnNQ,4726
14
14
  sibi_dst/df_helper/backends/parquet/__init__.py,sha256=esWJ9aSuYC26d-T01z9dPrJ1uqJzvdaPNTYRb5qXTlQ,182
15
- sibi_dst/df_helper/backends/parquet/_filter_handler.py,sha256=Q8Ic9PLDGT4L97yqr20mr_NsdEeMMOlFkT7Z12yYCxI,3663
16
- sibi_dst/df_helper/backends/parquet/_parquet_options.py,sha256=BJQC2ZPnMMeN8iVq2scmhYtoZzkhdkZIPV1KamCPasc,4689
15
+ sibi_dst/df_helper/backends/parquet/_filter_handler.py,sha256=VrT79Cr1HXu0VLVTuwAYOwSo52p8B1Q5lT3tTIk4uAA,5136
16
+ sibi_dst/df_helper/backends/parquet/_parquet_options.py,sha256=RZ5JopJVv9G_j0S9Vw66ohNvNcZHmeBfncMwSsLjM6o,10707
17
17
  sibi_dst/df_helper/backends/sqlalchemy/__init__.py,sha256=TuVp8Ce49dCIIxtyrtFGRblarQUl8QGcS-TDZd515IE,348
18
- sibi_dst/df_helper/backends/sqlalchemy/_db_connection.py,sha256=Og8dDFZX0FnS_ClLAik5O36mNgHSixUdg0_FNo-w-t4,1641
18
+ sibi_dst/df_helper/backends/sqlalchemy/_db_connection.py,sha256=-z3eYZVDKJPbuB_RWbY_zBlLuwvVyY5R5FvnLHp0S8A,2501
19
19
  sibi_dst/df_helper/backends/sqlalchemy/_filter_handler.py,sha256=58RCda1Hg_nsuJw-2V36IstsT8O84IQFgsdE7FnqvMk,4655
20
20
  sibi_dst/df_helper/backends/sqlalchemy/_io_dask.py,sha256=UuAHzZWBADsTwGhwZTJzR66Xdh189OR81C1IITwzls0,5620
21
- sibi_dst/df_helper/backends/sqlalchemy/_load_from_db.py,sha256=ML-m_WeTR1_UMgiDRMp_z4ebyPGqsFV8SJ3KeDoQAkA,2215
22
- sibi_dst/df_helper/backends/sqlalchemy/_sql_model_builder.py,sha256=Bmhh6VvmBfNfBA2JpuEdsYD_193yJ768Si2TvkY9HmU,4405
21
+ sibi_dst/df_helper/backends/sqlalchemy/_load_from_db.py,sha256=I2Us3RrxHci561yyZYBuUCrLVOhB0F3KBnae78m_ARw,6259
22
+ sibi_dst/df_helper/backends/sqlalchemy/_sql_model_builder.py,sha256=9oOOGrqAj9yL0FNWR1Fm9PdN7GoFi03ktCOyMjxAKLY,8402
23
23
  sibi_dst/df_helper/core/__init__.py,sha256=o4zDwgVmaijde3oix0ezb6KLxI5QFy-SGUhFTDVFLT4,569
24
24
  sibi_dst/df_helper/core/_defaults.py,sha256=eNpHD2sZxir-2xO0b3_V16ryw8YP_5FfpIKK0HNuiN4,7011
25
25
  sibi_dst/df_helper/core/_filter_handler.py,sha256=t3uLLJX5hWO_dWKCCz8Dwpc9RZ5PMHBIWkHSELCpFXI,11131
26
- sibi_dst/df_helper/core/_params_config.py,sha256=Og3GYth0GVWpcOYWZWRy7CZ5PDsg63Nmqo-W7TUrA_0,3503
26
+ sibi_dst/df_helper/core/_params_config.py,sha256=DYx2drDz3uF-lSPzizPkchhy-kxRrQKE5FQRxcEWsac,6736
27
27
  sibi_dst/df_helper/core/_query_config.py,sha256=Y8LVSyaKuVkrPluRDkQoOwuXHQxner1pFWG3HPfnDHM,441
28
28
  sibi_dst/df_helper/data_cleaner.py,sha256=lkxQoXLvGzXCicFUimnA5nen5qkrO1oxgl_p2Be2o8w,5183
29
29
  sibi_dst/geopy_helper/__init__.py,sha256=Q1RJiUZIOlV0QNNLjxZ_2IZS5LqIe5jRbeQkfD1Vm60,112
30
- sibi_dst/geopy_helper/geo_location_service.py,sha256=l0dV0XuEk-tcWdaOymgN9WulR6xp3k7yJUgqYvnqCKo,2288
31
- sibi_dst/geopy_helper/utils.py,sha256=R9X6ew0L_SuCpsA_AQK1wd3BspRGtV83q3mhBkcKr4A,1664
30
+ sibi_dst/geopy_helper/geo_location_service.py,sha256=1ArI980QF_gRw096ZsABHwJt-m55jrfOlB8tPwL1BvY,2959
31
+ sibi_dst/geopy_helper/utils.py,sha256=Sb7qfSqIyWh-AZ4GBdB9-z5FrQPWtrdtQLLcNjph0yw,3351
32
32
  sibi_dst/osmnx_helper/__init__.py,sha256=QeAKEeVXZk_qn8o0d3BOoGgv2lzatcI2yBqY3ZqviKI,153
33
- sibi_dst/osmnx_helper/base_osm_map.py,sha256=s2OY_XfwjZA3ImJNtCgevGBCbwRVe3dY3QVkTHEulB0,5794
33
+ sibi_dst/osmnx_helper/base_osm_map.py,sha256=L7g3VBiayHX41BcCBTOCS0iJOKzp2ZZYcrp8N-mnU90,19392
34
34
  sibi_dst/osmnx_helper/basemaps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
35
  sibi_dst/osmnx_helper/basemaps/calendar_html.py,sha256=UArt6FDgoCgoRte45Xo3IHqd-RNzW0YgitgZYfOFasY,4031
36
36
  sibi_dst/osmnx_helper/basemaps/router_plotter.py,sha256=QznnBGsUwhl8ZITcVNBrQDm-MXAd0jpJGPuyozKyQg0,8537
37
- sibi_dst/osmnx_helper/utils.py,sha256=8sF-wNSL38WzhWS3DceZ1cP8BM11i7D0bI-E4XYD8K4,8449
37
+ sibi_dst/osmnx_helper/utils.py,sha256=BzuY8CtYnBAAO8UAr_M7EOk6CP1zcifNLs8pkdFZEFg,20577
38
38
  sibi_dst/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  sibi_dst/tests/test_data_wrapper_class.py,sha256=Nkup5OFH5Cos2fxPaU7g9IEyINJM0uJ5-rOZ-eNtd20,3275
40
40
  sibi_dst/utils/__init__.py,sha256=z51o5sjIo_gTjnDXk5SBniCxWJIrDBMS7df0dTs8VMk,775
41
41
  sibi_dst/utils/airflow_manager.py,sha256=-d44EKUZNYJyp4wuNwRvilRQktunArPOB5fZuWdQv10,7526
42
- sibi_dst/utils/clickhouse_writer.py,sha256=xUhFDOuZt0eZDpVJNuLb7pfTHUV06NCYrNUx_a7qrSM,8580
42
+ sibi_dst/utils/clickhouse_writer.py,sha256=syXGN9NG1FS8soHuMj6QNRqTRWi-thuYUF-_BWDc_KI,9883
43
43
  sibi_dst/utils/credentials.py,sha256=cHJPPsmVyijqbUQIq7WWPe-lIallA-mI5RAy3YUuRME,1724
44
- sibi_dst/utils/data_utils.py,sha256=Kv87Br78EXlH_MSVzRspqLwrf6sqHIRQc0t3LDI0dSM,7045
45
- sibi_dst/utils/data_wrapper.py,sha256=Ope_G2Eq9FWg-phdTyU_7nsGnu4evsvofUVedd_SGas,11941
46
- sibi_dst/utils/date_utils.py,sha256=CMAZBNwVj7cvERcNiTA8Pf7_5EjV9By9yxkYJpkqz1g,10656
44
+ sibi_dst/utils/data_utils.py,sha256=j-lEKt6EJL2fm0z7adcjtVG7yFYLRpQL8xSgh2CVmJg,8769
45
+ sibi_dst/utils/data_wrapper.py,sha256=Ybmn9V7XYuPdUliMz1QrwrXraR_YFOzSr2zJmZOVmWM,16462
46
+ sibi_dst/utils/date_utils.py,sha256=ei7WgzIUk1tRa3sHniaVm_lNmfTGq12b_HzmMV91k18,12407
47
47
  sibi_dst/utils/df_utils.py,sha256=OFEtcwVKIilvf9qVf-IfIOHp4jcFAHX5l2IDGudhPZg,10989
48
48
  sibi_dst/utils/file_utils.py,sha256=JpsybYj3XvVJisSBeVU6YSaZnYRm4_6YWTI3TLnnY4Y,1257
49
49
  sibi_dst/utils/filepath_generator.py,sha256=volVm0SSlBrtZp1RpTHxyui5rj5asNcVsWEBRY5FOUQ,6673
50
- sibi_dst/utils/log_utils.py,sha256=4eLmoV8VC7wDwPr1mRfDKP24_-laGO6ogE4U0u3DUuA,2315
51
- sibi_dst/utils/parquet_saver.py,sha256=kR4FsjdMurQF46M0jc2Kvze4Ue70lUxefEzS0iszln8,9740
50
+ sibi_dst/utils/log_utils.py,sha256=XUbeXa1JsOlcEJyW8jnBlWo295rLUnuYi-HMzyhHwJg,3145
51
+ sibi_dst/utils/parquet_saver.py,sha256=FmSTOVhKruGw6r5G1sH3kKqsP0tCuU32KTlyQBLpXos,5092
52
52
  sibi_dst/utils/storage_manager.py,sha256=qHo5vTv-dr1roRr_mOcprSTdlAfH4Q2Dy5tQUz06Pnk,4228
53
- sibi_dst-0.3.32.dist-info/METADATA,sha256=8CNqCjmW44vqkrhy-hvVlSmHS3s5jiPr2VDZV5V1Nl0,2564
54
- sibi_dst-0.3.32.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
55
- sibi_dst-0.3.32.dist-info/RECORD,,
53
+ sibi_dst-0.3.34.dist-info/METADATA,sha256=ewd8lmlRjJg0lEeEI0ju5g20zGk7Lk1bdgBxunNpf3s,2564
54
+ sibi_dst-0.3.34.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
55
+ sibi_dst-0.3.34.dist-info/RECORD,,