sibi-dst 0.3.32__tar.gz → 0.3.34__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/PKG-INFO +1 -1
  2. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/pyproject.toml +1 -1
  3. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/_df_helper.py +108 -5
  4. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/_parquet_artifact.py +63 -0
  5. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/_parquet_reader.py +36 -0
  6. sibi_dst-0.3.34/sibi_dst/df_helper/backends/django/_db_connection.py +88 -0
  7. sibi_dst-0.3.34/sibi_dst/df_helper/backends/django/_io_dask.py +450 -0
  8. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/django/_load_from_db.py +96 -1
  9. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/django/_sql_model_builder.py +132 -6
  10. sibi_dst-0.3.34/sibi_dst/df_helper/backends/http/_http_config.py +101 -0
  11. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/parquet/_filter_handler.py +28 -0
  12. sibi_dst-0.3.34/sibi_dst/df_helper/backends/parquet/_parquet_options.py +205 -0
  13. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/sqlalchemy/_db_connection.py +17 -0
  14. sibi_dst-0.3.34/sibi_dst/df_helper/backends/sqlalchemy/_load_from_db.py +141 -0
  15. sibi_dst-0.3.34/sibi_dst/df_helper/backends/sqlalchemy/_sql_model_builder.py +192 -0
  16. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/core/_params_config.py +59 -0
  17. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/geopy_helper/geo_location_service.py +14 -0
  18. sibi_dst-0.3.34/sibi_dst/geopy_helper/utils.py +89 -0
  19. sibi_dst-0.3.34/sibi_dst/osmnx_helper/base_osm_map.py +419 -0
  20. sibi_dst-0.3.34/sibi_dst/osmnx_helper/utils.py +489 -0
  21. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/clickhouse_writer.py +27 -0
  22. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/data_utils.py +32 -1
  23. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/data_wrapper.py +94 -6
  24. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/date_utils.py +35 -0
  25. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/log_utils.py +19 -2
  26. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/parquet_saver.py +0 -106
  27. sibi_dst-0.3.32/sibi_dst/df_helper/backends/django/_db_connection.py +0 -48
  28. sibi_dst-0.3.32/sibi_dst/df_helper/backends/django/_io_dask.py +0 -242
  29. sibi_dst-0.3.32/sibi_dst/df_helper/backends/http/_http_config.py +0 -50
  30. sibi_dst-0.3.32/sibi_dst/df_helper/backends/parquet/_parquet_options.py +0 -101
  31. sibi_dst-0.3.32/sibi_dst/df_helper/backends/sqlalchemy/_load_from_db.py +0 -63
  32. sibi_dst-0.3.32/sibi_dst/df_helper/backends/sqlalchemy/_sql_model_builder.py +0 -131
  33. sibi_dst-0.3.32/sibi_dst/geopy_helper/utils.py +0 -55
  34. sibi_dst-0.3.32/sibi_dst/osmnx_helper/base_osm_map.py +0 -165
  35. sibi_dst-0.3.32/sibi_dst/osmnx_helper/utils.py +0 -267
  36. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/README.md +0 -0
  37. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/__init__.py +0 -0
  38. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/__init__.py +0 -0
  39. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/__init__.py +0 -0
  40. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/django/__init__.py +0 -0
  41. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/http/__init__.py +0 -0
  42. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/parquet/__init__.py +0 -0
  43. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/sqlalchemy/__init__.py +0 -0
  44. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/sqlalchemy/_filter_handler.py +0 -0
  45. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/backends/sqlalchemy/_io_dask.py +0 -0
  46. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/core/__init__.py +0 -0
  47. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/core/_defaults.py +0 -0
  48. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/core/_filter_handler.py +0 -0
  49. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/core/_query_config.py +0 -0
  50. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/df_helper/data_cleaner.py +0 -0
  51. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/geopy_helper/__init__.py +0 -0
  52. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/osmnx_helper/__init__.py +0 -0
  53. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/osmnx_helper/basemaps/__init__.py +0 -0
  54. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/osmnx_helper/basemaps/calendar_html.py +0 -0
  55. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/osmnx_helper/basemaps/router_plotter.py +0 -0
  56. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/tests/__init__.py +0 -0
  57. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/tests/test_data_wrapper_class.py +0 -0
  58. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/__init__.py +0 -0
  59. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/airflow_manager.py +0 -0
  60. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/credentials.py +0 -0
  61. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/df_utils.py +0 -0
  62. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/file_utils.py +0 -0
  63. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/filepath_generator.py +0 -0
  64. {sibi_dst-0.3.32 → sibi_dst-0.3.34}/sibi_dst/utils/storage_manager.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sibi-dst
3
- Version: 0.3.32
3
+ Version: 0.3.34
4
4
  Summary: Data Science Toolkit
5
5
  Author: Luis Valverde
6
6
  Author-email: lvalverdeb@gmail.com
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "sibi-dst"
3
- version = "0.3.32"
3
+ version = "0.3.34"
4
4
  description = "Data Science Toolkit"
5
5
  authors = ["Luis Valverde <lvalverdeb@gmail.com>"]
6
6
  readme = "README.md"
@@ -91,7 +91,7 @@ class DfHelper:
91
91
  self.filesystem_options = kwargs.pop('filesystem_options', {})
92
92
  kwargs.setdefault("live", True)
93
93
  kwargs.setdefault("logger", self.logger)
94
- kwargs.setdefault("fs", fsspec.filesystem('file'))
94
+ self.fs =kwargs.setdefault("fs", fsspec.filesystem('file'))
95
95
  self.__post_init(**kwargs)
96
96
 
97
97
  def __str__(self):
@@ -208,6 +208,18 @@ class DfHelper:
208
208
  return asyncio.run(self.__load_from_http(**options))
209
209
 
210
210
  def __load_from_sqlalchemy(self, **options):
211
+ """
212
+ Loads data from an SQLAlchemy database source into a dataframe. The method processes
213
+ the loaded data and applies post-processing to transform it into the desired structure.
214
+ If the operation fails, an empty pandas DataFrame is created as a fallback.
215
+
216
+ :param options: Additional keyword arguments to configure the data loading process.
217
+ These options can include configurations such as 'debug' and other parameters
218
+ required by the `SqlAlchemyLoadFromDb` class.
219
+ :type options: dict
220
+ :return: A dataframe containing the data loaded from the SQLAlchemy database.
221
+ :rtype: dask.dataframe.DataFrame
222
+ """
211
223
  try:
212
224
  options.setdefault("debug", self.debug)
213
225
  db_loader = SqlAlchemyLoadFromDb(
@@ -228,6 +240,17 @@ class DfHelper:
228
240
  return self.df
229
241
 
230
242
  def __load_from_db(self, **options) -> Union[pd.DataFrame, dd.DataFrame]:
243
+ """
244
+ Loads data from a Django database using a specific backend query mechanism. Processes the loaded data
245
+ and applies further post-processing before returning the dataframe. If the operation fails, an
246
+ empty dataframe with a single partition is returned instead.
247
+
248
+ :param options: Additional settings for the database loading process, which include optional configurations
249
+ like debug mode, among others.
250
+ :type options: dict
251
+ :return: A dataframe containing the loaded data either as a Pandas or Dask dataframe.
252
+ :rtype: Union[pd.DataFrame, dd.DataFrame]
253
+ """
231
254
  try:
232
255
  options.setdefault("debug", self.debug)
233
256
  db_loader = DjangoLoadFromDb(
@@ -248,7 +271,18 @@ class DfHelper:
248
271
  return self.df
249
272
 
250
273
  async def __load_from_http(self, **options) -> Union[pd.DataFrame, dd.DataFrame]:
251
- """Delegate asynchronous HTTP data loading to HttpDatabackend plugin."""
274
+ """
275
+ Loads data asynchronously from an HTTP source using the configured HTTP plugin.
276
+ If the HTTP plugin is not properly configured, this method logs a debug message and
277
+ returns an empty Dask DataFrame. If an exception occurs during data fetching, the error
278
+ is logged and an empty Dask DataFrame with one partition is returned.
279
+
280
+ :param options: Additional keyword arguments that are passed to the HTTP plugin for
281
+ fetching the data.
282
+ :returns: A DataFrame object that can either be a pandas or a Dask DataFrame. When the
283
+ fetching operation fails, it defaults to returning an empty Dask DataFrame
284
+ with a single partition.
285
+ """
252
286
  if not self.backend_http:
253
287
  self.logger.debug("HTTP plugin not configured properly.")
254
288
  return dd.from_pandas(pd.DataFrame(), npartitions=1)
@@ -339,12 +373,45 @@ class DfHelper:
339
373
 
340
374
  self.logger.debug("Processing of loaded data completed.")
341
375
 
342
- def save_to_parquet(self, parquet_filename: Optional[str] = None):
343
- ps = ParquetSaver(self.df, self.parquet_storage_path, self.logger)
376
+ def save_to_parquet(self, parquet_filename: Optional[str] = None, **kwargs):
377
+ """
378
+ Save the dataframe result to a Parquet file using specified configurations.
379
+
380
+ This method leverages the ParquetSaver class to store the dataframe result
381
+ into a Parquet file. It also provides functionality for overriding the default
382
+ filesystem (`fs`) and storage path (`parquet_storage_path`). The method logs
383
+ details about the saving operation for debugging purposes.
384
+
385
+ :param parquet_filename: The name of the Parquet file to save the dataframe to.
386
+ If not provided, a default name will be used.
387
+ :param kwargs: Additional arguments to customize the saving process. These may
388
+ include:
389
+ - `fs`: Filesystem to be used for saving Parquet files. If not
390
+ provided, defaults to the instance's filesystem attribute.
391
+ - `parquet_storage_path`: The root path in the filesystem where
392
+ Parquet files should be saved. If not provided, defaults to
393
+ the instance's attribute for storage path.
394
+ :return: None
395
+ """
396
+ fs = kwargs.pop('fs', self.fs)
397
+ parquet_storage_path = kwargs.pop('parquet_storage_path', self.parquet_storage_path)
398
+ ps = ParquetSaver(df_result=self.df, parquet_storage_path=parquet_storage_path, logger=self.logger, fs=fs)
344
399
  ps.save_to_parquet(parquet_filename)
345
- self.logger.debug(f"Parquet saved to {parquet_filename} in parquet storage: {self.parquet_storage_path}.")
400
+ self.logger.debug(f"Parquet saved to {parquet_filename} in parquet storage: {parquet_storage_path}.")
346
401
 
347
402
  def save_to_clickhouse(self, **credentials):
403
+ """
404
+ Saves the current DataFrame to ClickHouse using the provided credentials. This
405
+ method first checks if the DataFrame is empty. If it is empty, the method logs
406
+ a debug message and does not proceed with saving. Otherwise, it initializes
407
+ a ClickHouseWriter instance and uses it to save the DataFrame to ClickHouse,
408
+ logging a debug message upon successful completion.
409
+
410
+ :param credentials: Credentials required to connect to ClickHouse as keyword
411
+ arguments.
412
+ :type credentials: dict
413
+ :return: None
414
+ """
348
415
  if self.df.map_partitions(len).compute().sum() == 0:
349
416
  self.logger.debug("Cannot write to clickhouse since Dataframe is empty")
350
417
  return
@@ -353,6 +420,21 @@ class DfHelper:
353
420
  self.logger.debug("Save to ClickHouse completed.")
354
421
 
355
422
  def __load_from_parquet(self, **options) -> Union[pd.DataFrame, dd.DataFrame]:
423
+ """
424
+ Loads data from parquet files into a DataFrame, applies provided filters, and handles exceptions.
425
+
426
+ This method leverages a backend-specific implementation to load data from parquet files into a
427
+ DataFrame. If additional options are provided and the data is successfully loaded, filters are
428
+ applied to the DataFrame using a filter handler. Errors during this process are handled gracefully
429
+ by logging the issue and returning an empty Dask DataFrame.
430
+
431
+ :param options: A dictionary of filter options to be applied to the DataFrame.
432
+ :type options: dict
433
+
434
+ :return: A DataFrame containing the loaded and filtered data. If the operation fails, an empty
435
+ Dask DataFrame is returned.
436
+ :rtype: Union[pd.DataFrame, dd.DataFrame]
437
+ """
356
438
  try:
357
439
  self.df = self.backend_parquet.load_files()
358
440
  if options and self.df is not None:
@@ -368,6 +450,27 @@ class DfHelper:
368
450
  return dd.from_pandas(pd.DataFrame(), npartitions=1)
369
451
 
370
452
  def load_period(self, **kwargs):
453
+ """
454
+ Loads a period with specified parameters.
455
+
456
+ This method acts as a wrapper around the private ``__load_period`` method. It
457
+ accepts arbitrary keyword arguments that are passed directly to the private
458
+ method for execution. The purpose of allowing keyword arguments is to permit
459
+ flexible configuration or parameterization for loading a specific period, based
460
+ on the internal implementation of the private ``__load_period`` method.
461
+
462
+ Note:
463
+ The arguments and return values are entirely determined by the private
464
+ method's behavior. This method is intentionally designed to mask details
465
+ of the internal logic behind the abstraction.
466
+
467
+ :param kwargs: Arbitrary keyword arguments to parameterize the internal logic
468
+ of loading a period. The specific keys and values expected by the
469
+ ``__load_period`` method depend on its own internal implementation.
470
+ :return: The result of calling the private ``__load_period`` method with the
471
+ provided keyword arguments. The return type is dependent on the internal
472
+ implementation of ``__load_period``.
473
+ """
371
474
  return self.__load_period(**kwargs)
372
475
 
373
476
  def __load_period(self, **kwargs):
@@ -9,11 +9,74 @@ from sibi_dst.utils import DateUtils
9
9
 
10
10
 
11
11
  class ParquetArtifact(DfHelper):
12
+ """
13
+ Class designed to manage Parquet data storage and retrieval using a specified
14
+ DataWrapper class for data processing. It provides functionality for loading,
15
+ updating, rebuilding, and generating Parquet files within a configurable
16
+ storage filesystem. The class ensures that all essential configurations and
17
+ filesystems are properly set up before operations.
18
+
19
+ Detailed functionality includes support for dynamically managing and generating
20
+ Parquet files based on time periods, with customizable options for paths,
21
+ filenames, date fields, and more. It is an abstraction for efficiently handling
22
+ storage tasks related to distributed or local file systems.
23
+
24
+ :ivar config: Configuration dictionary containing all configurable parameters
25
+ for managing Parquet data storage, such as paths, filenames,
26
+ and date ranges.
27
+ :type config: dict
28
+ :ivar df: Cached Dask DataFrame used to store and manipulate data loaded
29
+ from the Parquet file.
30
+ :type df: Optional[dask.dataframe.DataFrame]
31
+ :ivar data_wrapper_class: Class responsible for abstracting data processing
32
+ operations required for Parquet file generation.
33
+ :type data_wrapper_class: type
34
+ :ivar date_field: Name of the field used to identify and process data by date.
35
+ :type date_field: Optional[str]
36
+ :ivar parquet_storage_path: Filesystem path to store Parquet files.
37
+ :type parquet_storage_path: Optional[str]
38
+ :ivar parquet_filename: Name of the Parquet file to be generated and managed.
39
+ :type parquet_filename: Optional[str]
40
+ :ivar parquet_start_date: Date string specifying the start date for data range
41
+ processing.
42
+ :type parquet_start_date: Optional[str]
43
+ :ivar parquet_end_date: Date string specifying the end date for data range
44
+ processing.
45
+ :type parquet_end_date: Optional[str]
46
+ :ivar filesystem_type: Type of the filesystem used for managing storage
47
+ operations (e.g., `file`, `s3`, etc.).
48
+ :type filesystem_type: str
49
+ :ivar filesystem_options: Additional options for configuring the filesystem.
50
+ :type filesystem_options: dict
51
+ :ivar fs: Filesystem object used for storage operations.
52
+ :type fs: fsspec.AbstractFileSystem
53
+ """
12
54
  DEFAULT_CONFIG = {
13
55
  'backend': 'parquet'
14
56
  }
15
57
 
16
58
  def __init__(self, data_wrapper_class, **kwargs):
59
+ """
60
+ Initializes an instance of the class with given configuration and validates
61
+ required parameters. Sets up the filesystem to handle storage, ensuring
62
+ necessary directories exist. The configuration supports a variety of options
63
+ to manage parquet storage requirements, including paths, filenames, and date
64
+ ranges.
65
+
66
+ :param data_wrapper_class: The class responsible for wrapping data to be managed
67
+ by this instance.
68
+ :type data_wrapper_class: type
69
+ :param kwargs: Arbitrary keyword arguments to override default configuration.
70
+ Includes settings for `date_field`, `parquet_storage_path`,
71
+ `parquet_filename`, `parquet_start_date`, `parquet_end_date`,
72
+ `filesystem_type`, `filesystem_options`, and `fs`.
73
+ :type kwargs: dict
74
+
75
+ :raises ValueError: If any of the required configuration options
76
+ (`date_field`, `parquet_storage_path`,
77
+ `parquet_filename`, `parquet_start_date`,
78
+ or `parquet_end_date`) are missing or not set properly.
79
+ """
17
80
  self.config = {
18
81
  **self.DEFAULT_CONFIG,
19
82
  **kwargs,
@@ -7,6 +7,42 @@ from sibi_dst.df_helper import DfHelper
7
7
 
8
8
 
9
9
  class ParquetReader(DfHelper):
10
+ """
11
+ This class is a specialized helper for reading and managing Parquet files.
12
+
13
+ The `ParquetReader` class is designed to facilitate working with Parquet
14
+ datasets stored across different filesystems. It initializes the required
15
+ resources, ensures the existence of the specified Parquet directory,
16
+ and provides an abstraction to load the data into a Dask DataFrame.
17
+
18
+ The class requires configuration for the storage path and dates defining
19
+ a range of interest. It also supports various filesystem types through
20
+ `fsspec`.
21
+
22
+ :ivar config: Holds the final configuration for this instance, combining
23
+ `DEFAULT_CONFIG` with user-provided configuration.
24
+ :type config: dict
25
+ :ivar df: Stores the loaded Dask DataFrame after the `load()` method is
26
+ invoked. Initially set to None.
27
+ :type df: Optional[dd.DataFrame]
28
+ :ivar parquet_storage_path: The path to the Parquet storage directory.
29
+ :type parquet_storage_path: str
30
+ :ivar parquet_start_date: Start date for Parquet data selection. Must
31
+ be set in the configuration.
32
+ :type parquet_start_date: str
33
+ :ivar parquet_end_date: End date for Parquet data selection. Must be
34
+ set in the configuration.
35
+ :type parquet_end_date: str
36
+ :ivar filesystem_type: The type of filesystem the Parquet files are
37
+ stored on (e.g., "file", "s3").
38
+ :type filesystem_type: str
39
+ :ivar filesystem_options: Any additional options required for the
40
+ specified filesystem type.
41
+ :type filesystem_options: dict
42
+ :ivar fs: Instance of `fsspec` filesystem used to interact with the
43
+ Parquet storage.
44
+ :type fs: fsspec.AbstractFileSystem
45
+ """
10
46
  DEFAULT_CONFIG = {
11
47
  'backend': 'parquet'
12
48
  }
@@ -0,0 +1,88 @@
1
+ from typing import Any
2
+
3
+ from pydantic import BaseModel, model_validator
4
+
5
+ from ._sql_model_builder import DjangoSqlModelBuilder
6
+
7
+
8
+ class DjangoConnectionConfig(BaseModel):
9
+ """
10
+ Represents a configuration for establishing a Django database connection.
11
+
12
+ This class is used for defining the configurations necessary to establish a Django
13
+ database connection. It supports dynamic model generation if the model is not
14
+ provided explicitly. It also validates the connection configuration to ensure it
15
+ is properly set up before being used.
16
+
17
+ :ivar live: Indicates whether the connection is live. Automatically set to False if
18
+ a table is provided without a pre-built model.
19
+ :type live: bool
20
+ :ivar connection_name: The name of the database connection to use. This is a mandatory
21
+ parameter and must be provided.
22
+ :type connection_name: str
23
+ :ivar table: The name of the database table to use. Required for dynamic model
24
+ generation when no model is provided.
25
+ :type table: str
26
+ :ivar model: The Django model that represents the database table. If not provided,
27
+ this can be generated dynamically by using the table name.
28
+ :type model: Any
29
+ """
30
+ live: bool = False
31
+ connection_name: str = None
32
+ table: str = None
33
+ model: Any = None
34
+
35
+ @model_validator(mode="after")
36
+ def check_model(self):
37
+ """
38
+ Validates and modifies the instance based on the provided attributes and conditions.
39
+ This method ensures that all required parameters are populated and consistent, and it
40
+ dynamically builds a model if necessary. The method also ensures the connection is
41
+ validated after the model preparation process.
42
+
43
+ :raises ValueError: If `connection_name` is not provided.
44
+ :raises ValueError: If `table` name is not specified when building the model dynamically.
45
+ :raises ValueError: If there are errors during the dynamic model-building process.
46
+ :raises ValueError: If `validate_connection` fails due to invalid configuration.
47
+ :return: The validated and potentially mutated instance.
48
+ """
49
+ # connection_name is mandatory
50
+ if self.connection_name is None:
51
+ raise ValueError("Connection name must be specified")
52
+
53
+ # If table is provided, enforce live=False
54
+ if self.table:
55
+ self.live = False
56
+
57
+ # If model is not provided, build it dynamically
58
+ if not self.model:
59
+ if not self.table:
60
+ raise ValueError("Table name must be specified to build the model")
61
+ try:
62
+ self.model = DjangoSqlModelBuilder(
63
+ connection_name=self.connection_name, table=self.table
64
+ ).build_model()
65
+ except Exception as e:
66
+ raise ValueError(f"Failed to build model: {e}")
67
+ else:
68
+ self.live = True
69
+ # Validate the connection after building the model
70
+ self.validate_connection()
71
+ return self
72
+
73
+ def validate_connection(self):
74
+ """
75
+ Ensures the database connection is valid by performing a simple
76
+ query. Raises a ValueError if the connection is broken or if any
77
+ other exception occurs during the query.
78
+
79
+ :raises ValueError: If the connection to the database cannot be
80
+ established or if the query fails.
81
+ """
82
+ try:
83
+ # Perform a simple query to test the connection
84
+ self.model.objects.using(self.connection_name).exists()
85
+ except Exception as e:
86
+ raise ValueError(
87
+ f"Failed to connect to the database '{self.connection_name}': {e}"
88
+ )