sibi-dst 0.3.63__tar.gz → 2025.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. sibi_dst-2025.1.1/PKG-INFO +55 -0
  2. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/README.md +3 -9
  3. sibi_dst-2025.1.1/pyproject.toml +49 -0
  4. sibi_dst-2025.1.1/sibi_dst/df_helper/_df_helper.py +232 -0
  5. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/backends/sqlalchemy/__init__.py +0 -2
  6. sibi_dst-2025.1.1/sibi_dst/df_helper/backends/sqlalchemy/_db_connection.py +248 -0
  7. sibi_dst-2025.1.1/sibi_dst/df_helper/backends/sqlalchemy/_io_dask.py +329 -0
  8. sibi_dst-2025.1.1/sibi_dst/df_helper/backends/sqlalchemy/_load_from_db.py +74 -0
  9. sibi_dst-2025.1.1/sibi_dst/df_helper/backends/sqlalchemy/_sql_model_builder.py +206 -0
  10. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/core/__init__.py +0 -4
  11. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/core/_defaults.py +1 -50
  12. {sibi_dst-0.3.63/sibi_dst/v2 → sibi_dst-2025.1.1/sibi_dst}/df_helper/core/_query_config.py +2 -2
  13. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/__init__.py +0 -2
  14. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/data_wrapper.py +9 -12
  15. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/log_utils.py +15 -11
  16. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/update_planner.py +2 -0
  17. sibi_dst-2025.1.1/sibi_dst/v2/df_helper/backends/sqlalchemy/_db_connection.py +357 -0
  18. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlalchemy/_io_dask.py +2 -2
  19. sibi_dst-2025.1.1/sibi_dst/v2/df_helper/backends/sqlmodel/_db_connection.py +357 -0
  20. sibi_dst-2025.1.1/sibi_dst/v3/__init__.py +0 -0
  21. sibi_dst-2025.1.1/sibi_dst/v3/backends/__init__.py +0 -0
  22. sibi_dst-2025.1.1/sibi_dst/v3/df_helper/__init__.py +0 -0
  23. sibi_dst-2025.1.1/sibi_dst/v3/df_helper/_df_helper.py +91 -0
  24. sibi_dst-0.3.63/PKG-INFO +0 -90
  25. sibi_dst-0.3.63/pyproject.toml +0 -55
  26. sibi_dst-0.3.63/sibi_dst/df_helper/_df_helper.py +0 -637
  27. sibi_dst-0.3.63/sibi_dst/df_helper/backends/django/__init__.py +0 -11
  28. sibi_dst-0.3.63/sibi_dst/df_helper/backends/django/_db_connection.py +0 -88
  29. sibi_dst-0.3.63/sibi_dst/df_helper/backends/django/_io_dask.py +0 -450
  30. sibi_dst-0.3.63/sibi_dst/df_helper/backends/django/_load_from_db.py +0 -227
  31. sibi_dst-0.3.63/sibi_dst/df_helper/backends/django/_sql_model_builder.py +0 -493
  32. sibi_dst-0.3.63/sibi_dst/df_helper/backends/sqlalchemy/_db_connection.py +0 -202
  33. sibi_dst-0.3.63/sibi_dst/df_helper/backends/sqlalchemy/_filter_handler.py +0 -119
  34. sibi_dst-0.3.63/sibi_dst/df_helper/backends/sqlalchemy/_io_dask.py +0 -135
  35. sibi_dst-0.3.63/sibi_dst/df_helper/backends/sqlalchemy/_load_from_db.py +0 -145
  36. sibi_dst-0.3.63/sibi_dst/df_helper/backends/sqlalchemy/_sql_model_builder.py +0 -193
  37. sibi_dst-0.3.63/sibi_dst/utils/airflow_manager.py +0 -212
  38. sibi_dst-0.3.63/sibi_dst/v2/df_helper/backends/sqlalchemy/_db_connection.py +0 -82
  39. sibi_dst-0.3.63/sibi_dst/v2/df_helper/backends/sqlmodel/_db_connection.py +0 -78
  40. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/__init__.py +0 -0
  41. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/__init__.py +0 -0
  42. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/_artifact_updater_multi_wrapper.py +0 -0
  43. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/_parquet_artifact.py +0 -0
  44. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/_parquet_reader.py +0 -0
  45. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/backends/__init__.py +0 -0
  46. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/backends/http/__init__.py +0 -0
  47. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/backends/http/_http_config.py +0 -0
  48. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/backends/parquet/__init__.py +0 -0
  49. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/backends/parquet/_filter_handler.py +0 -0
  50. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/backends/parquet/_parquet_options.py +0 -0
  51. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/core/_filter_handler.py +0 -0
  52. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/core/_params_config.py +0 -0
  53. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/df_helper/data_cleaner.py +0 -0
  54. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/geopy_helper/__init__.py +0 -0
  55. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/geopy_helper/geo_location_service.py +0 -0
  56. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/geopy_helper/utils.py +0 -0
  57. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/osmnx_helper/__init__.py +0 -0
  58. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/osmnx_helper/base_osm_map.py +0 -0
  59. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/osmnx_helper/basemaps/__init__.py +0 -0
  60. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/osmnx_helper/basemaps/calendar_html.py +0 -0
  61. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/osmnx_helper/basemaps/router_plotter.py +0 -0
  62. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/osmnx_helper/utils.py +0 -0
  63. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/tests/__init__.py +0 -0
  64. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/tests/test_data_wrapper_class.py +0 -0
  65. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/clickhouse_writer.py +0 -0
  66. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/credentials.py +0 -0
  67. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/data_from_http_source.py +0 -0
  68. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/data_utils.py +0 -0
  69. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/date_utils.py +0 -0
  70. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/df_utils.py +0 -0
  71. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/file_utils.py +0 -0
  72. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/filepath_generator.py +0 -0
  73. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/manifest_manager.py +0 -0
  74. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/parquet_saver.py +0 -0
  75. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/phone_formatter.py +0 -0
  76. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/storage_config.py +0 -0
  77. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/storage_manager.py +0 -0
  78. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/utils/webdav_client.py +0 -0
  79. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/__init__.py +0 -0
  80. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/__init__.py +0 -0
  81. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/_df_helper.py +0 -0
  82. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/__init__.py +0 -0
  83. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlalchemy/__init__.py +0 -0
  84. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlalchemy/_load_from_db.py +0 -0
  85. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlalchemy/_model_builder.py +0 -0
  86. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlmodel/__init__.py +0 -0
  87. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlmodel/_io_dask.py +0 -0
  88. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlmodel/_load_from_db.py +0 -0
  89. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/backends/sqlmodel/_model_builder.py +0 -0
  90. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/core/__init__.py +0 -0
  91. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/core/_filter_handler.py +0 -0
  92. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/df_helper/core/_params_config.py +0 -0
  93. {sibi_dst-0.3.63/sibi_dst → sibi_dst-2025.1.1/sibi_dst/v2}/df_helper/core/_query_config.py +0 -0
  94. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/utils/__init__.py +0 -0
  95. {sibi_dst-0.3.63 → sibi_dst-2025.1.1}/sibi_dst/v2/utils/log_utils.py +0 -0
@@ -0,0 +1,55 @@
1
+ Metadata-Version: 2.1
2
+ Name: sibi-dst
3
+ Version: 2025.1.1
4
+ Summary: Data Science Toolkit
5
+ Author: Luis Valverde
6
+ Author-email: lvalverdeb@gmail.com
7
+ Requires-Python: >=3.12,<4.0
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.12
10
+ Classifier: Programming Language :: Python :: 3.13
11
+ Requires-Dist: clickhouse-connect (>=0.8.18,<0.9.0)
12
+ Requires-Dist: clickhouse-driver (>=0.2.9,<0.3.0)
13
+ Requires-Dist: dask[complete] (>=2025.5.1,<2026.0.0)
14
+ Requires-Dist: mysqlclient (>=2.2.7,<3.0.0)
15
+ Requires-Dist: pandas (>=2.3.1,<3.0.0)
16
+ Requires-Dist: psycopg2 (>=2.9.10,<3.0.0)
17
+ Requires-Dist: pydantic (>=2.11.7,<3.0.0)
18
+ Requires-Dist: pymysql (>=1.1.1,<2.0.0)
19
+ Requires-Dist: s3fs (>=2025.5.1,<2026.0.0)
20
+ Requires-Dist: sqlalchemy (>=2.0.41,<3.0.0)
21
+ Requires-Dist: tqdm (>=4.67.1,<5.0.0)
22
+ Requires-Dist: webdav4 (>=0.10.0,<0.11.0)
23
+ Description-Content-Type: text/markdown
24
+
25
+ ### SIBI-DST
26
+
27
+ Data Science Toolkit built with Python, Pandas, Dask, OpenStreetMaps, NetworkX, SQLAlchemy, GeoPandas, and Folium.
28
+
29
+ ## Example Use Cases
30
+
31
+ 1. **Build DataCubes, DataSets, and DataObjects** from diverse data sources, including **relational databases, Parquet files, Excel (`.xlsx`), delimited tables (`.csv`, `.tsv`), JSON, and RESTful APIs**.
32
+ 2. **Comprehensive DataFrame Management** utilities for efficient data handling, transformation, and optimization using **Pandas** and **Dask**.
33
+ 3. **Flexible Data Sharing** with client applications by writing to **Data Warehouses in Clickhouse, local filesystems, and cloud storage platforms** such as **S3**.
34
+ 4. **Microservices for Data Access** – Build scalable **API-driven services** using **RESTful APIs (`Django REST Framework`, `FastAPI`)** for high-performance data exchange.
35
+ 5. **Geospatial Analysis** – Utilize **OpenStreetMaps** and **GeoPandas** for advanced geospatial data processing and visualization.
36
+
37
+ ## Supported Technologies
38
+
39
+ - **Data Processing**: Pandas, Dask
40
+ - **Databases & Storage**: SQLAlchemy, Parquet, S3, Clickhouse
41
+ - **Mapping & Geospatial Analysis**: OpenStreetMaps, OSMnx, Geopy
42
+ - **API Development**: Django REST Framework, FastAPI
43
+
44
+ ## Installation
45
+
46
+ ```bash
47
+ # with pip
48
+
49
+ pip install sibi-dst # Install only the main package
50
+ pip install sibi-dst[geospatial] # Install with geospatial dependencies
51
+ pip install sibi-dst[dev,test,geospatial] # Install all optional dependencies
52
+
53
+
54
+ ```
55
+
@@ -22,15 +22,9 @@ Data Science Toolkit built with Python, Pandas, Dask, OpenStreetMaps, NetworkX,
22
22
  ```bash
23
23
  # with pip
24
24
 
25
- pip install sibi-dst[complete] # Install all dependencies
26
- pip install sibi-dst[df_helper] # Install only df_helper dependencies
27
- pip install sibi-dst[geospatial] # Install only geospatial dependencies
28
-
29
- # with poetry
30
-
31
- poetry add "sibi-dst[complete]" # Install all dependencies
32
- poetry add "sibi-dst[df_helper]" # Install only df_helper dependencies
33
- poetry add "sibi-dst[geospatial]" # Install only geospatial dependencies
25
+ pip install sibi-dst # Install only the main package
26
+ pip install sibi-dst[geospatial] # Install with geospatial dependencies
27
+ pip install sibi-dst[dev,test,geospatial] # Install all optional dependencies
34
28
 
35
29
 
36
30
  ```
@@ -0,0 +1,49 @@
1
+ [tool.poetry]
2
+ name = "sibi-dst"
3
+ version = "2025.1.1"
4
+ description = "Data Science Toolkit"
5
+ authors = ["Luis Valverde <lvalverdeb@gmail.com>"]
6
+ readme = "README.md"
7
+ packages = [{ include = "sibi_dst" }]
8
+
9
+ [tool.poetry.dependencies]
10
+ python = "^3.12"
11
+ pandas = "^2.3.1"
12
+ dask = {extras = ["complete"], version = "^2025.5.1"}
13
+ psycopg2 = "^2.9.10"
14
+ mysqlclient = "^2.2.7"
15
+ webdav4 = "^0.10.0"
16
+ clickhouse-connect = "^0.8.18"
17
+ clickhouse-driver = "^0.2.9"
18
+ tqdm = "^4.67.1"
19
+ s3fs = "^2025.5.1"
20
+ pydantic = "^2.11.7"
21
+ sqlalchemy = "^2.0.41"
22
+ pymysql = "^1.1.1"
23
+
24
+ [tool.poetry.group.dev]
25
+ optional = true
26
+
27
+ [tool.poetry.group.dev.dependencies]
28
+ jupyter = "^1.1.1"
29
+ python-dotenv = "^1.1.1"
30
+ black = "^25.1.0"
31
+
32
+ [tool.poetry.group.test]
33
+ optional = true
34
+
35
+ [tool.poetry.group.test.dependencies]
36
+ pytest = "^8.4.1"
37
+ pytest-cov = "^6.2.1"
38
+
39
+ [tool.poetry.group.geospatial]
40
+ optional = true
41
+
42
+ [tool.poetry.group.geospatial.dependencies]
43
+ osmnx = "^2.0.5"
44
+ geopy = "^2.4.1"
45
+ folium = "^0.20.0"
46
+
47
+ [build-system]
48
+ requires = ["poetry-core"]
49
+ build-backend = "poetry.core.masonry.api"
@@ -0,0 +1,232 @@
1
+ from __future__ import annotations
2
+
3
+ import warnings
4
+ from typing import Any, Dict, Optional, Union, TypeVar
5
+
6
+ import dask.dataframe as dd
7
+ import fsspec
8
+ import pandas as pd
9
+ from pydantic import BaseModel
10
+
11
+ from sibi_dst.df_helper.core import QueryConfig, ParamsConfig, FilterHandler
12
+ from sibi_dst.utils import Logger, ParquetSaver, ClickHouseWriter
13
+ from .backends.http import HttpConfig
14
+ from .backends.parquet import ParquetConfig
15
+ from .backends.sqlalchemy import SqlAlchemyConnectionConfig, SqlAlchemyLoadFromDb
16
+
17
+ warnings.filterwarnings("ignore")
18
+ T = TypeVar("T", bound=BaseModel)
19
+
20
+
21
+ # --- Backend Strategy Pattern Implementation ---
22
+
23
+ class BaseBackend:
24
+ """Abstract base class defining clear sync and async loading interfaces."""
25
+
26
+ def __init__(self, helper: DfHelper):
27
+ self.helper = helper
28
+ self.logger = helper.logger
29
+ self.debug = helper.debug
30
+
31
+ def load(self, **options) -> dd.DataFrame | pd.DataFrame:
32
+ """Synchronous data loading method. Must be implemented by sync backends."""
33
+ raise NotImplementedError(f"Backend '{self.__class__.__name__}' does not support synchronous loading.")
34
+
35
+ async def aload(self, **options) -> dd.DataFrame | pd.DataFrame:
36
+ """Asynchronous data loading method. By default, it calls the sync version."""
37
+ return self.load(**options)
38
+
39
+
40
+ class SqlAlchemyBackend(BaseBackend):
41
+ def load(self, **options) -> dd.DataFrame:
42
+ try:
43
+ # Process incoming filter options into the ParamsConfig object
44
+ if options and hasattr(self.helper._backend_params, 'parse_params'):
45
+ self.helper._backend_params.parse_params(options)
46
+
47
+ db_loader = SqlAlchemyLoadFromDb(
48
+ plugin_sqlalchemy=self.helper.backend_db_connection,
49
+ plugin_query=self.helper._backend_query,
50
+ plugin_params=self.helper._backend_params,
51
+ logger=self.logger,
52
+ debug= self.debug
53
+ )
54
+ return db_loader.build_and_load()
55
+ except Exception as e:
56
+ self.logger.error(f"Failed to load data from sqlalchemy: {e}", exc_info=self.debug)
57
+ return dd.from_pandas(pd.DataFrame(), npartitions=1)
58
+
59
+
60
+ class ParquetBackend(BaseBackend):
61
+ """This backend is also purely synchronous."""
62
+
63
+ def load(self, **options) -> dd.DataFrame | pd.DataFrame:
64
+ try:
65
+ df = self.helper.backend_parquet.load_files()
66
+ if options and df is not None:
67
+ df = FilterHandler('dask', logger=self.logger, debug=False).apply_filters(df, filters=options)
68
+ return df
69
+ except Exception as e:
70
+ self.logger.error(f"Failed to load data from parquet: {e}", exc_info=True)
71
+ return dd.from_pandas(pd.DataFrame(), npartitions=1)
72
+
73
+
74
+ class HttpBackend(BaseBackend):
75
+ """This backend is purely asynchronous."""
76
+
77
+ def load(self, **options) -> dd.DataFrame | pd.DataFrame:
78
+ # This will correctly fail by raising NotImplementedError from the base class.
79
+ return self.helper.backend_http.fetch_data(**options)
80
+
81
+ async def aload(self, **options) -> Union[pd.DataFrame, dd.DataFrame]:
82
+ if not self.helper.backend_http:
83
+ self.logger.warning("HTTP plugin not configured properly.")
84
+ return dd.from_pandas(pd.DataFrame(), npartitions=1)
85
+ return await self.helper.backend_http.fetch_data(**options)
86
+
87
+
88
+ # --- Main DfHelper Facade Class ---
89
+
90
+ class DfHelper:
91
+ """
92
+ A reusable utility for loading data. It provides both sync (`load`) and
93
+ async (`aload`) methods to accommodate different backends.
94
+ """
95
+ _BACKEND_STRATEGIES = {
96
+ 'sqlalchemy': SqlAlchemyBackend,
97
+ 'parquet': ParquetBackend,
98
+ 'http': HttpBackend,
99
+ }
100
+
101
+ default_config: Dict = None
102
+
103
+ def __init__(self, backend='sqlalchemy', **kwargs):
104
+ self.default_config = self.default_config or {}
105
+ kwargs = {**self.default_config.copy(), **kwargs}
106
+ self.backend = backend
107
+ self.debug = kwargs.get("debug", False)
108
+ self.logger = kwargs.get("logger", Logger.default_logger(logger_name=self.__class__.__name__))
109
+ self.logger.set_level(Logger.DEBUG if self.debug else Logger.INFO)
110
+ self.fs = kwargs.get("fs", fsspec.filesystem('file'))
111
+ kwargs.setdefault("fs", self.fs)
112
+ kwargs.setdefault("logger", self.logger)
113
+ self._backend_query = self._get_config(QueryConfig, kwargs)
114
+ self._backend_params = self._get_config(ParamsConfig, kwargs)
115
+ self.backend_db_connection: Optional[SqlAlchemyConnectionConfig] = None
116
+ self.backend_parquet: Optional[ParquetConfig] = None
117
+ self.backend_http: Optional[HttpConfig] = None
118
+
119
+ if self.backend == 'sqlalchemy':
120
+ self.backend_db_connection = self._get_config(SqlAlchemyConnectionConfig, kwargs)
121
+ elif self.backend == 'parquet':
122
+ self.backend_parquet = self._get_config(ParquetConfig, kwargs)
123
+ elif self.backend == 'http':
124
+ self.backend_http = self._get_config(HttpConfig, kwargs)
125
+
126
+ strategy_class = self._BACKEND_STRATEGIES.get(self.backend)
127
+ if not strategy_class: raise ValueError(f"Unsupported backend: {self.backend}")
128
+ self.backend_strategy = strategy_class(self)
129
+
130
+ def __enter__(self):
131
+ return self
132
+
133
+ def __exit__(self, exc_type, exc_value, traceback):
134
+ self._cleanup()
135
+
136
+ def _cleanup(self):
137
+ active_config = getattr(self, f"backend_{self.backend}", None)
138
+ if active_config and hasattr(active_config, "close"):
139
+ self.logger.debug(f"Closing resources for '{self.backend}' backend.")
140
+ active_config.close()
141
+
142
+ def _get_config(self, model: T, kwargs: Dict[str, Any]) -> T:
143
+ recognized_keys = set(model.model_fields.keys())
144
+ model_kwargs = {k: kwargs[k] for k in recognized_keys if k in kwargs}
145
+ return model(**model_kwargs)
146
+
147
+ def load(self, as_pandas=False, **options) -> Union[pd.DataFrame, dd.DataFrame]:
148
+ """Loads data synchronously. Fails if backend is async-only."""
149
+ self.logger.debug(f"Loading data from {self.backend} backend with options: {options}")
150
+ df = self.backend_strategy.load(**options)
151
+ df = self._process_loaded_data(df)
152
+ df = self._post_process_df(df)
153
+ return df.compute() if as_pandas else df
154
+
155
+ async def aload(self, as_pandas=False, **options) -> Union[pd.DataFrame, dd.DataFrame]:
156
+ """Loads data asynchronously from any backend."""
157
+ df = await self.backend_strategy.aload(**options)
158
+ df = self._process_loaded_data(df)
159
+ df = self._post_process_df(df)
160
+ return df.compute() if as_pandas else df
161
+
162
+ def _post_process_df(self, df: dd.DataFrame) -> dd.DataFrame:
163
+ df_params = self._backend_params.df_params
164
+ if not df_params: return df
165
+ fieldnames, column_names, index_col = (df_params.get("fieldnames"), df_params.get("column_names"),
166
+ df_params.get("index_col"))
167
+ if not any([fieldnames, column_names, index_col]): return df
168
+ self.logger.debug("Post-processing DataFrame.")
169
+ if fieldnames:
170
+ valid_fieldnames = [f for f in fieldnames if f in df.columns]
171
+ if len(valid_fieldnames) < len(fieldnames): self.logger.warning(
172
+ f"Missing columns for filtering: {set(fieldnames) - set(valid_fieldnames)}")
173
+ df = df[valid_fieldnames]
174
+ if column_names:
175
+ if len(df.columns) != len(column_names): raise ValueError(
176
+ f"Length mismatch: DataFrame has {len(df.columns)} columns, but {len(column_names)} names were provided.")
177
+ df = df.rename(columns=dict(zip(df.columns, column_names)))
178
+ if index_col:
179
+ if index_col not in df.columns: raise ValueError(f"Index column '{index_col}' not found in DataFrame.")
180
+ df = df.set_index(index_col)
181
+ return df
182
+
183
+ def _process_loaded_data(self, df: dd.DataFrame) -> dd.DataFrame:
184
+ field_map = self._backend_params.field_map or {}
185
+ if not isinstance(field_map, dict) or not field_map: return df
186
+ if hasattr(df, 'npartitions') and df.npartitions == 1 and not len(df.head(1)): return df
187
+ self.logger.debug("Processing loaded data...")
188
+ rename_mapping = {k: v for k, v in field_map.items() if k in df.columns}
189
+ if rename_mapping: df = df.rename(columns=rename_mapping)
190
+ return df
191
+
192
+ def save_to_parquet(self, df: dd.DataFrame, parquet_filename: str, **kwargs):
193
+ if hasattr(df, 'npartitions') and df.npartitions == 1 and not len(df.head(1)):
194
+ self.logger.warning("Cannot save to parquet; DataFrame is empty.")
195
+ return
196
+ fs = kwargs.pop('fs', self.fs)
197
+ path = kwargs.pop('parquet_storage_path', self.backend_parquet.parquet_storage_path)
198
+ ParquetSaver(df, path, self.logger, fs).save_to_parquet(parquet_filename)
199
+ self.logger.debug(f"Parquet saved to {parquet_filename} in path: {path}.")
200
+
201
+ def save_to_clickhouse(self, df: dd.DataFrame, **credentials):
202
+ if hasattr(df, 'npartitions') and df.npartitions == 1 and not len(df.head(1)):
203
+ self.logger.warning("Cannot write to ClickHouse; DataFrame is empty.")
204
+ return
205
+ ClickHouseWriter(self.logger, **credentials).save_to_clickhouse(df)
206
+ self.logger.debug("Save to ClickHouse completed.")
207
+
208
+ def load_period(self, dt_field: str, start: str, end: str, **kwargs) -> Union[pd.DataFrame, dd.DataFrame]:
209
+ """Synchronous convenience method for loading a date range."""
210
+ final_kwargs = self._prepare_period_filters(dt_field, start, end, **kwargs)
211
+ return self.load(**final_kwargs)
212
+
213
+ async def aload_period(self, dt_field: str, start: str, end: str, **kwargs) -> Union[pd.DataFrame, dd.DataFrame]:
214
+ """Asynchronous convenience method for loading a date range."""
215
+ final_kwargs = self._prepare_period_filters(dt_field, start, end, **kwargs)
216
+ return await self.aload(**final_kwargs)
217
+
218
+ def _prepare_period_filters(self, dt_field: str, start: str, end: str, **kwargs) -> dict:
219
+ start_date, end_date = pd.to_datetime(start).date(), pd.to_datetime(end).date()
220
+ if start_date > end_date: raise ValueError("'start' date cannot be later than 'end' date.")
221
+ field_map = self._backend_params.field_map or {}
222
+ reverse_map = {v: k for k, v in field_map.items()} if field_map else {}
223
+ if len(reverse_map) != len(field_map): self.logger.warning(
224
+ "field_map values are not unique; reverse mapping may be unreliable.")
225
+ mapped_field = reverse_map.get(dt_field, dt_field)
226
+ if start_date == end_date:
227
+ kwargs[f"{mapped_field}__date"] = start_date
228
+ else:
229
+ kwargs[f"{mapped_field}__date__range"] = [start_date, end_date]
230
+ self.logger.debug(f"Period load generated filters: {kwargs}")
231
+ return kwargs
232
+
@@ -1,4 +1,3 @@
1
- from ._filter_handler import SqlAlchemyFilterHandler
2
1
  from ._db_connection import SqlAlchemyConnectionConfig
3
2
  from ._load_from_db import SqlAlchemyLoadFromDb
4
3
  from ._sql_model_builder import SqlAlchemyModelBuilder
@@ -7,5 +6,4 @@ __all__ = [
7
6
  'SqlAlchemyConnectionConfig',
8
7
  'SqlAlchemyModelBuilder',
9
8
  'SqlAlchemyLoadFromDb',
10
- 'SqlAlchemyFilterHandler'
11
9
  ]
@@ -0,0 +1,248 @@
1
+ from __future__ import annotations
2
+
3
+ import threading
4
+ from contextlib import contextmanager
5
+ from typing import Any, Optional, ClassVar, Generator, Type, Dict
6
+
7
+ from pydantic import (
8
+ BaseModel,
9
+ field_validator,
10
+ model_validator,
11
+ ConfigDict,
12
+ )
13
+ from sqlalchemy import create_engine, event, text
14
+ from sqlalchemy.engine import url as sqlalchemy_url
15
+ from sqlalchemy.engine import Engine
16
+ from sqlalchemy.exc import OperationalError, SQLAlchemyError
17
+ from sqlalchemy.orm import sessionmaker, Session
18
+ from sqlalchemy.pool import QueuePool, NullPool, StaticPool
19
+
20
+ # Assuming these are your project's internal modules
21
+ from sibi_dst.utils import Logger
22
+ from ._sql_model_builder import SqlAlchemyModelBuilder
23
+
24
+
25
+ class SqlAlchemyConnectionConfig(BaseModel):
26
+ """
27
+ A thread-safe, registry-backed SQLAlchemy connection manager.
28
+
29
+ This class encapsulates database connection configuration and provides robust,
30
+ shared resource management. It is designed to be used as a context manager
31
+ to ensure resources are always released correctly.
32
+
33
+ Recommended Usage is via the `with` statement.
34
+ with SqlAlchemyConnectionConfig(...) as config:
35
+ session = config.get_session()
36
+ # ... do work ...
37
+ # config.close() is called automatically upon exiting the block.
38
+
39
+ Key Features:
40
+ - Context Manager Support: Guarantees resource cleanup.
41
+ - Shared Engine & Pool: Reuses a single SQLAlchemy Engine for identical
42
+ database URLs and pool settings, improving application performance.
43
+ - Reference Counting: Safely manages the lifecycle of the shared engine,
44
+ disposing of it only when the last user has closed its connection config.
45
+ """
46
+ # --- Public Configuration ---
47
+ connection_url: str
48
+ table: Optional[str] = None
49
+ debug: bool = False
50
+
51
+ # --- Pool Configuration ---
52
+ pool_size: int = 5
53
+ max_overflow: int = 10
54
+ pool_timeout: int = 30
55
+ pool_recycle: int = 1800
56
+ pool_pre_ping: bool = True
57
+ poolclass: Type[QueuePool] = QueuePool
58
+
59
+ # --- Internal & Runtime State ---
60
+ model: Optional[Type[Any]] = None
61
+ engine: Optional[Engine] = None
62
+ logger: Optional[Logger] = None
63
+ session_factory: Optional[sessionmaker] = None
64
+
65
+ # --- Private State ---
66
+ _engine_key_instance: tuple = ()
67
+ _closed: bool = False # Flag to prevent double-closing.
68
+
69
+ # --- Class-level Shared Resources ---
70
+ _engine_registry: ClassVar[Dict[tuple, Dict[str, Any]]] = {}
71
+ _registry_lock: ClassVar[threading.Lock] = threading.Lock()
72
+
73
+ model_config = ConfigDict(arbitrary_types_allowed=True)
74
+
75
+ # Add __enter__ and __exit__ for context manager protocol
76
+ def __enter__(self) -> SqlAlchemyConnectionConfig:
77
+ """Enter the runtime context, returning self."""
78
+ return self
79
+
80
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
81
+ """Exit the runtime context, ensuring that close() is called."""
82
+ self.close()
83
+
84
+ @field_validator("pool_size", "max_overflow", "pool_timeout", "pool_recycle")
85
+ @classmethod
86
+ def _validate_pool_params(cls, v: int) -> int:
87
+ if v < 0:
88
+ raise ValueError("Pool parameters must be non-negative")
89
+ return v
90
+
91
+ @model_validator(mode="after")
92
+ def _init_all(self) -> SqlAlchemyConnectionConfig:
93
+ """Orchestrates the initialization process after Pydantic validation."""
94
+ self._init_logger()
95
+ self._engine_key_instance = self._get_engine_key()
96
+ self._init_engine()
97
+ self._validate_conn()
98
+ self._build_model()
99
+ if self.engine:
100
+ self.session_factory = sessionmaker(bind=self.engine, expire_on_commit=False)
101
+ return self
102
+
103
+ def _init_logger(self) -> None:
104
+ """Initializes the logger for this instance."""
105
+ if self.logger is None:
106
+ self.logger = Logger.default_logger(logger_name=self.__class__.__name__)
107
+ log_level = Logger.DEBUG if self.debug else Logger.INFO
108
+ self.logger.set_level(log_level)
109
+
110
+ def _get_engine_key(self) -> tuple:
111
+ """Generates a unique, normalized key for an engine configuration."""
112
+ parsed = sqlalchemy_url.make_url(self.connection_url)
113
+ query = {k: v for k, v in parsed.query.items() if not k.startswith("pool_")}
114
+ normalized_url = parsed.set(query=query)
115
+ key_parts = [str(normalized_url)]
116
+ if self.poolclass not in (NullPool, StaticPool):
117
+ key_parts += [
118
+ self.pool_size, self.max_overflow, self.pool_timeout,
119
+ self.pool_recycle, self.pool_pre_ping
120
+ ]
121
+ return tuple(key_parts)
122
+
123
+ def _init_engine(self) -> None:
124
+ """Initializes or reuses a shared SQLAlchemy Engine."""
125
+ with self._registry_lock:
126
+ engine_wrapper = self._engine_registry.get(self._engine_key_instance)
127
+ if engine_wrapper:
128
+ self.engine = engine_wrapper['engine']
129
+ engine_wrapper['ref_count'] += 1
130
+ self.logger.debug(f"Reusing engine. Ref count: {engine_wrapper['ref_count']}.")
131
+ else:
132
+ self.logger.debug(f"Creating new engine for key: {self._engine_key_instance}")
133
+ try:
134
+ new_engine = create_engine(
135
+ self.connection_url, pool_size=self.pool_size,
136
+ max_overflow=self.max_overflow, pool_timeout=self.pool_timeout,
137
+ pool_recycle=self.pool_recycle, pool_pre_ping=self.pool_pre_ping,
138
+ poolclass=self.poolclass,
139
+ )
140
+ self.engine = new_engine
141
+ self._attach_events()
142
+ self._engine_registry[self._engine_key_instance] = {
143
+ 'engine': new_engine, 'ref_count': 1, 'active_connections': 0
144
+ }
145
+ except Exception as e:
146
+ self.logger.error(f"Failed to create engine: {e}")
147
+ raise SQLAlchemyError(f"Engine creation failed: {e}") from e
148
+
149
+ def close(self) -> None:
150
+ """
151
+ Decrements the engine's reference count and disposes of the engine
152
+ if the count reaches zero. This is now typically called automatically
153
+ when exiting a `with` block.
154
+ """
155
+ # Prevent the method from running more than once per instance.
156
+ if self._closed:
157
+ self.logger.debug("Attempted to close an already-closed config instance.")
158
+ return
159
+
160
+ with self._registry_lock:
161
+ key = self._engine_key_instance
162
+ engine_wrapper = self._engine_registry.get(key)
163
+
164
+ if not engine_wrapper:
165
+ self.logger.warning("Attempted to close a config whose engine is not in the registry.")
166
+ return
167
+
168
+ engine_wrapper['ref_count'] -= 1
169
+ self.logger.debug(f"Closing config. Ref count is now {engine_wrapper['ref_count']}.")
170
+
171
+ if engine_wrapper['ref_count'] <= 0:
172
+ self.logger.debug(f"Disposing engine as reference count is zero. Key: {key}")
173
+ engine_wrapper['engine'].dispose()
174
+ del self._engine_registry[key]
175
+
176
+ # Mark this instance as closed to prevent subsequent calls.
177
+ self._closed = True
178
+
179
+ # ... (the rest of your methods like _attach_events, _on_checkout, get_session, etc. remain unchanged)
180
+ # They are omitted here for brevity but should be included in your final file.
181
+
182
+ def _attach_events(self) -> None:
183
+ """Attaches checkout/checkin events to the engine for connection tracking."""
184
+ if self.engine:
185
+ event.listen(self.engine, "checkout", self._on_checkout)
186
+ event.listen(self.engine, "checkin", self._on_checkin)
187
+
188
+ def _on_checkout(self, *args) -> None:
189
+ """Event listener for when a connection is checked out from the pool."""
190
+ with self._registry_lock:
191
+ wrapper = self._engine_registry.get(self._engine_key_instance)
192
+ if wrapper:
193
+ wrapper['active_connections'] += 1
194
+ self.logger.debug(f"Connection checked out. Active: {self.active_connections}")
195
+
196
+ def _on_checkin(self, *args) -> None:
197
+ """Event listener for when a connection is returned to the pool."""
198
+ with self._registry_lock:
199
+ wrapper = self._engine_registry.get(self._engine_key_instance)
200
+ if wrapper:
201
+ wrapper['active_connections'] = max(0, wrapper['active_connections'] - 1)
202
+ self.logger.debug(f"Connection checked in. Active: {self.active_connections}")
203
+
204
+ @property
205
+ def active_connections(self) -> int:
206
+ """Returns the number of active connections for this instance's engine."""
207
+ with self._registry_lock:
208
+ wrapper = self._engine_registry.get(self._engine_key_instance)
209
+ return wrapper['active_connections'] if wrapper else 0
210
+
211
+ def _validate_conn(self) -> None:
212
+ """Tests the database connection by executing a simple query."""
213
+ try:
214
+ with self.managed_connection() as conn:
215
+ conn.execute(text("SELECT 1"))
216
+ self.logger.debug("Database connection validated successfully.")
217
+ except OperationalError as e:
218
+ self.logger.error(f"Database connection failed: {e}")
219
+ raise ValueError(f"DB connection failed: {e}") from e
220
+
221
+ @contextmanager
222
+ def managed_connection(self) -> Generator[Any, None, None]:
223
+ """Provides a single database connection from the engine pool."""
224
+ if not self.engine:
225
+ raise RuntimeError("Engine not initialized. Cannot get a connection.")
226
+ conn = self.engine.connect()
227
+ try:
228
+ yield conn
229
+ finally:
230
+ conn.close()
231
+
232
+ def get_session(self) -> Session:
233
+ """Returns a new SQLAlchemy Session from the session factory."""
234
+ if not self.session_factory:
235
+ raise RuntimeError("Session factory not initialized. Cannot get a session.")
236
+ return self.session_factory()
237
+
238
+ def _build_model(self) -> None:
239
+ """Dynamically builds an ORM model if `self.table` is set."""
240
+ if not self.table or not self.engine:
241
+ return
242
+ try:
243
+ builder = SqlAlchemyModelBuilder(self.engine, self.table)
244
+ self.model = builder.build_model()
245
+ self.logger.debug(f"Successfully built ORM model for table: {self.table}")
246
+ except Exception as e:
247
+ self.logger.error(f"Failed to build ORM model for table '{self.table}': {e}")
248
+ raise ValueError(f"Model construction failed for table '{self.table}': {e}") from e