cloe-nessy 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. cloe_nessy/__init__.py +0 -0
  2. cloe_nessy/clients/__init__.py +5 -0
  3. cloe_nessy/clients/api_client/__init__.py +3 -0
  4. cloe_nessy/clients/api_client/api_client.py +188 -0
  5. cloe_nessy/clients/api_client/api_response.py +72 -0
  6. cloe_nessy/clients/api_client/auth.py +178 -0
  7. cloe_nessy/clients/api_client/exceptions.py +22 -0
  8. cloe_nessy/file_utilities/__init__.py +3 -0
  9. cloe_nessy/file_utilities/exceptions.py +4 -0
  10. cloe_nessy/file_utilities/factory.py +42 -0
  11. cloe_nessy/file_utilities/get_file_paths.py +72 -0
  12. cloe_nessy/file_utilities/location_types.py +29 -0
  13. cloe_nessy/file_utilities/strategies/__init__.py +0 -0
  14. cloe_nessy/file_utilities/strategies/base_strategy.py +59 -0
  15. cloe_nessy/file_utilities/strategies/local_strategy.py +51 -0
  16. cloe_nessy/file_utilities/strategies/onelake_strategy.py +31 -0
  17. cloe_nessy/file_utilities/strategies/utils_strategy.py +72 -0
  18. cloe_nessy/integration/__init__.py +0 -0
  19. cloe_nessy/integration/reader/__init__.py +6 -0
  20. cloe_nessy/integration/reader/api_reader.py +141 -0
  21. cloe_nessy/integration/reader/catalog_reader.py +49 -0
  22. cloe_nessy/integration/reader/excel_reader.py +170 -0
  23. cloe_nessy/integration/reader/exceptions.py +10 -0
  24. cloe_nessy/integration/reader/file_reader.py +96 -0
  25. cloe_nessy/integration/reader/reader.py +34 -0
  26. cloe_nessy/integration/writer/__init__.py +3 -0
  27. cloe_nessy/integration/writer/catalog_writer.py +48 -0
  28. cloe_nessy/logging/__init__.py +3 -0
  29. cloe_nessy/logging/logger_mixin.py +162 -0
  30. cloe_nessy/models/__init__.py +13 -0
  31. cloe_nessy/models/column.py +65 -0
  32. cloe_nessy/models/constraint.py +9 -0
  33. cloe_nessy/models/foreign_key.py +34 -0
  34. cloe_nessy/models/mixins/__init__.py +0 -0
  35. cloe_nessy/models/mixins/read_instance_mixin.py +124 -0
  36. cloe_nessy/models/mixins/template_loader_mixin.py +18 -0
  37. cloe_nessy/models/schema.py +76 -0
  38. cloe_nessy/models/table.py +236 -0
  39. cloe_nessy/models/types.py +7 -0
  40. cloe_nessy/object_manager/__init__.py +3 -0
  41. cloe_nessy/object_manager/table_manager.py +58 -0
  42. cloe_nessy/pipeline/__init__.py +7 -0
  43. cloe_nessy/pipeline/actions/__init__.py +50 -0
  44. cloe_nessy/pipeline/actions/read_api.py +178 -0
  45. cloe_nessy/pipeline/actions/read_catalog_table.py +68 -0
  46. cloe_nessy/pipeline/actions/read_excel.py +177 -0
  47. cloe_nessy/pipeline/actions/read_files.py +105 -0
  48. cloe_nessy/pipeline/actions/read_metadata_yaml.py +66 -0
  49. cloe_nessy/pipeline/actions/transform_change_datatype.py +56 -0
  50. cloe_nessy/pipeline/actions/transform_concat_columns.py +88 -0
  51. cloe_nessy/pipeline/actions/transform_decode.py +102 -0
  52. cloe_nessy/pipeline/actions/transform_distinct.py +40 -0
  53. cloe_nessy/pipeline/actions/transform_filter.py +51 -0
  54. cloe_nessy/pipeline/actions/transform_generic_sql.py +66 -0
  55. cloe_nessy/pipeline/actions/transform_join.py +81 -0
  56. cloe_nessy/pipeline/actions/transform_json_normalize.py +106 -0
  57. cloe_nessy/pipeline/actions/transform_rename_columns.py +60 -0
  58. cloe_nessy/pipeline/actions/transform_replace_values.py +59 -0
  59. cloe_nessy/pipeline/actions/transform_select_columns.py +83 -0
  60. cloe_nessy/pipeline/actions/transform_union.py +71 -0
  61. cloe_nessy/pipeline/actions/write_catalog_table.py +73 -0
  62. cloe_nessy/pipeline/pipeline.py +201 -0
  63. cloe_nessy/pipeline/pipeline_action.py +62 -0
  64. cloe_nessy/pipeline/pipeline_config.py +92 -0
  65. cloe_nessy/pipeline/pipeline_context.py +56 -0
  66. cloe_nessy/pipeline/pipeline_parsing_service.py +156 -0
  67. cloe_nessy/pipeline/pipeline_step.py +50 -0
  68. cloe_nessy/py.typed +0 -0
  69. cloe_nessy/session/__init__.py +3 -0
  70. cloe_nessy/session/session_manager.py +188 -0
  71. cloe_nessy/settings/__init__.py +3 -0
  72. cloe_nessy/settings/settings.py +91 -0
  73. cloe_nessy/utils/__init__.py +0 -0
  74. cloe_nessy/utils/file_and_directory_handler.py +19 -0
  75. cloe_nessy-0.2.9.dist-info/METADATA +26 -0
  76. cloe_nessy-0.2.9.dist-info/RECORD +78 -0
  77. cloe_nessy-0.2.9.dist-info/WHEEL +5 -0
  78. cloe_nessy-0.2.9.dist-info/top_level.txt +1 -0
@@ -0,0 +1,50 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Any
3
+
4
+ from .pipeline_action import PipelineAction
5
+ from .pipeline_context import PipelineContext
6
+
7
+
8
+ @dataclass
9
+ class PipelineStep:
10
+ """A PipelineStep is a logical step within a Pipeline.
11
+
12
+ The step stores the PipelineContext and offers an interface to interact with
13
+ the Steps DataFrame.
14
+
15
+ Attributes:
16
+ name: The name of the step.
17
+ action: The action to be executed.
18
+ is_successor: A boolean indicating if the step is a successor and takes
19
+ the previous steps context.
20
+ context: The context of the step.
21
+ options: Additional options for the step
22
+ _predecessors: A list of names of the steps that are predecessors to this step.
23
+ _context_ref: Reference to the previous steps context
24
+ _table_metadata_ref: Reference to the previous steps metadata
25
+ """
26
+
27
+ name: str
28
+ action: PipelineAction
29
+ context: PipelineContext = field(default_factory=lambda: PipelineContext())
30
+ options: dict[str, Any] = field(default_factory=lambda: {})
31
+ result: PipelineContext = field(default_factory=lambda: PipelineContext())
32
+ _predecessors: set[str] = field(default_factory=lambda: set())
33
+ _context_ref: str | None = None
34
+ _table_metadata_ref: str | None = None
35
+
36
+ def __post_init__(self) -> None:
37
+ if not isinstance(self.action, PipelineAction):
38
+ raise ValueError("action must be a PipelineAction subclass.")
39
+ if self._context_ref:
40
+ self._predecessors.add(self._context_ref)
41
+ if self._table_metadata_ref:
42
+ self._predecessors.add(self._table_metadata_ref)
43
+ if self.options:
44
+ for val in self.options.values():
45
+ if isinstance(val, PipelineStep):
46
+ self._predecessors.add(val.name)
47
+
48
+ def run(self) -> None:
49
+ """Execute the action on the context."""
50
+ self.result = self.action.run(context=self.context, **self.options)
cloe_nessy/py.typed ADDED
File without changes
@@ -0,0 +1,3 @@
1
+ from .session_manager import SessionManager
2
+
3
+ __all__ = ["SessionManager"]
@@ -0,0 +1,188 @@
1
+ import os
2
+ from enum import Enum
3
+ from typing import Any
4
+
5
+ from pyspark.sql import SparkSession
6
+
7
+
8
+ class SessionManager:
9
+ """SessionManager is a singleton class that manages the SparkSession instance."""
10
+
11
+ class Environment(Enum):
12
+ """Enumeration of execution environments for Spark utilities.
13
+
14
+ This Enum defines the different environments in which the Spark session
15
+ can operate, including:
16
+ - DATABRICKS_UI: Represents the Databricks user interface.
17
+ - FABRIC_UI: Represents the Fabric user interface.
18
+ - DATABRICKS_CONNECT: Represents the Databricks Connect environment.
19
+ - OTHER_REMOTE_SPARK: Represents other remote Spark environments, such as used in tests.
20
+ """
21
+
22
+ DATABRICKS_UI = "databricks_ui"
23
+ FABRIC_UI = "fabric_ui"
24
+ DATABRICKS_CONNECT = "databricks_connect"
25
+ OTHER_REMOTE_SPARK = "other_remote_spark"
26
+
27
+ _spark: SparkSession | None = None
28
+ _utils = None
29
+ _env: Environment | None = None
30
+
31
+ @classmethod
32
+ def get_spark_session(cls, config: dict[str, str] | None = None, profile_name: str = "DEFAULT") -> SparkSession:
33
+ """Creates or retrieves an existing SparkSession.
34
+
35
+ This method initializes a SparkSession based on the provided
36
+ configuration and profile name. If a SparkSession already exists,
37
+ it returns that instance; otherwise, it creates a new one.
38
+
39
+ Args:
40
+ config: An optional Spark configuration
41
+ provided as key-value pairs.
42
+ profile_name: The name of the Databricks profile to use.
43
+ Defaults to "DEFAULT".
44
+
45
+ Returns:
46
+ An instance of SparkSession for data processing.
47
+ """
48
+ if cls._spark is not None:
49
+ return cls._spark
50
+
51
+ if cls._env is None:
52
+ cls._detect_env()
53
+
54
+ builder = cls.get_spark_builder()
55
+
56
+ if config:
57
+ for key, value in config.items():
58
+ builder.config(key, value) # type: ignore
59
+
60
+ cls._spark = builder.getOrCreate()
61
+
62
+ return cls._spark
63
+
64
+ @classmethod
65
+ def get_utils(cls) -> Any: # return type should be Union[DBUtils, MsSparkUtils], but can't import locally.
66
+ """Get or create a DBUtils or MsSparkUtils instance, depending on the context.
67
+
68
+ In Databricks this will return DBUtils, while in Fabric it will return MsSparkUtils.
69
+
70
+ Returns:
71
+ utils: The DBUtils or MsSparkUtils instance.
72
+
73
+ Raises:
74
+ RuntimeError: If the instance cannot be created.
75
+ """
76
+ if cls._utils is not None:
77
+ return cls._utils
78
+
79
+ if cls._env is None:
80
+ cls._detect_env()
81
+
82
+ utils_function = {
83
+ cls.Environment.DATABRICKS_UI: cls._get_dbutils,
84
+ cls.Environment.DATABRICKS_CONNECT: cls._get_dbutils,
85
+ cls.Environment.OTHER_REMOTE_SPARK: cls._get_dbutils,
86
+ cls.Environment.FABRIC_UI: cls._get_mssparkutils,
87
+ }
88
+
89
+ try:
90
+ cls._utils = utils_function[cls._env](cls) # type: ignore
91
+ except Exception as e:
92
+ raise RuntimeError(f"Cannot create utils instance. Error: {e}") from e
93
+
94
+ return cls._utils
95
+
96
+ def _get_dbutils(cls):
97
+ from pyspark.dbutils import DBUtils
98
+
99
+ cls.get_spark_session()
100
+ utils = DBUtils(cls._spark)
101
+ return utils
102
+
103
+ def _get_mssparkutils(cls):
104
+ from notebookutils import mssparkutils # type: ignore
105
+
106
+ cls._utils = mssparkutils
107
+
108
+ @classmethod
109
+ def _detect_env(cls) -> Environment | None:
110
+ """Detects the current execution environment for Spark.
111
+
112
+ This class method attempts to import the necessary modules to determine
113
+ whether the code is running in a Databricks UI, Fabric UI, or using
114
+ Databricks Connect. It sets the class variable `_env` accordingly.
115
+
116
+ The detection process involves checking the type of `dbutils` to identify
117
+ the environment. If the environment is already detected, it returns the
118
+ cached value.
119
+
120
+ Returns:
121
+ Environment: An enum value indicating the detected environment
122
+
123
+ Raises:
124
+ RuntimeError: If the environment cannot be detected due to
125
+ import errors or other exceptions.
126
+ """
127
+ if cls._env is not None:
128
+ return cls._env
129
+
130
+ try:
131
+ from databricks.sdk.dbutils import RemoteDbUtils # type: ignore
132
+
133
+ if isinstance(dbutils, RemoteDbUtils): # type: ignore [name-defined]
134
+ cls._env = cls.Environment.DATABRICKS_CONNECT
135
+ return cls._env
136
+ except (ImportError, NameError):
137
+ pass
138
+
139
+ try:
140
+ from notebookutils import mssparkutils # type: ignore # noqa: F401
141
+
142
+ cls._env = cls.Environment.FABRIC_UI
143
+ return cls._env
144
+ except ImportError:
145
+ pass
146
+
147
+ try:
148
+ from dbruntime.dbutils import DBUtils # type: ignore [import-not-found] # noqa: F401
149
+
150
+ cls._env = cls.Environment.DATABRICKS_UI
151
+ return cls._env
152
+ except ImportError:
153
+ pass
154
+
155
+ try:
156
+ from pyspark.sql.connect.session import (
157
+ SparkSession as RemoteSparkSession, # type: ignore [import-not-found] # noqa: F401
158
+ )
159
+
160
+ cls._env = cls.Environment.OTHER_REMOTE_SPARK
161
+ return cls._env
162
+ except ImportError:
163
+ pass
164
+
165
+ raise RuntimeError("Cannot detect environment.")
166
+
167
+ @classmethod
168
+ def get_spark_builder(cls):
169
+ """Get the SparkSession builder based on the current environment."""
170
+ builders = {
171
+ cls.Environment.DATABRICKS_UI: SparkSession.builder,
172
+ cls.Environment.FABRIC_UI: SparkSession.builder,
173
+ cls.Environment.DATABRICKS_CONNECT: cls._get_databricks_connect_builder(),
174
+ cls.Environment.OTHER_REMOTE_SPARK: cls._get_databricks_connect_builder(),
175
+ }
176
+
177
+ builder = builders.get(cls._env)
178
+ if builder is None:
179
+ raise ValueError(f"Unsupported environment: {cls._env}")
180
+
181
+ return builder
182
+
183
+ @staticmethod
184
+ def _get_databricks_connect_builder():
185
+ from databricks.connect import DatabricksSession
186
+
187
+ selected_profile_name = os.environ.get("NESSY_DATABRICKSPROFILE") or "DEFAULT"
188
+ return DatabricksSession.builder.profile(selected_profile_name)
@@ -0,0 +1,3 @@
1
+ from .settings import LoggingSettings, NessySettings
2
+
3
+ __all__ = ["LoggingSettings", "NessySettings"]
@@ -0,0 +1,91 @@
1
+ import logging
2
+
3
+ from pydantic import AnyUrl, Field, model_validator
4
+ from pydantic_settings import BaseSettings, SettingsConfigDict
5
+
6
+
7
+ def get_log_level(log_level: int | str) -> int:
8
+ """Convert the log level to an integer.
9
+
10
+ Args:
11
+ log_level: The log level as a string or integer.
12
+
13
+ Returns:
14
+ The log level as an integer.
15
+ """
16
+ try:
17
+ log_level = int(log_level)
18
+ except ValueError:
19
+ if isinstance(log_level, str):
20
+ log_level = int(logging.getLevelName(log_level.upper()))
21
+ else:
22
+ log_level = 20
23
+ return log_level
24
+
25
+
26
+ class LoggingSettings(BaseSettings):
27
+ """This class defines the logging settings of the nessy Framework.
28
+
29
+ Attributes:
30
+ target_log_analytics: Whether to log to Azure Log Analytics.
31
+ target_unity_catalog_table: Whether to log to the Unity Catalog Table.
32
+ log_analytics_workspace_id: The workspace ID for Azure Log Analytics.
33
+ log_analytics_shared_key: The shared key for Azure Log Analytics.
34
+ uc_workspace_url: The workspace URL for the Unity Catalog Table.
35
+ uc_warehouse_id: The warehouse ID for the Unity Catalog Table.
36
+ uc_catalog_name: The catalog name for the Unity Catalog Table.
37
+ uc_schema_name: The schema name for the Unity Catalog Table.
38
+ log_level_console: The log level for the console logger.
39
+ log_level_tabular: The log level for the tabular logger.
40
+ log_format_console: The format of the console logger.
41
+ """
42
+
43
+ model_config = SettingsConfigDict(env_prefix="nessy_")
44
+
45
+ target_log_analytics: bool = Field(default=False)
46
+ target_unity_catalog_table: bool = Field(default=False)
47
+
48
+ log_analytics_workspace_id: str | None = Field(default=None)
49
+ log_analytics_shared_key: str | None = Field(default=None)
50
+ # log_type is not implement on purpose, because separate loggers will
51
+ # require different schemas, that can't be in the same table
52
+
53
+ uc_workspace_url: AnyUrl | None = Field(default=None)
54
+ uc_warehouse_id: str | None = Field(default=None)
55
+ uc_catalog_name: str | None = Field(default=None)
56
+ uc_schema_name: str | None = Field(default=None)
57
+ # table is not implement on purpose, because separate logger will require
58
+ # different schemas, that can't be in the same table
59
+
60
+ log_level_console: int = Field(default=logging.INFO)
61
+ log_level_tabular: int = Field(default=logging.INFO)
62
+
63
+ log_format_console: str = "%(asctime)s - %(message)s"
64
+
65
+ @model_validator(mode="before")
66
+ def _convert_log_levels(cls, settings):
67
+ """Convert the log levels to integers."""
68
+ settings["log_level_console"] = get_log_level(settings.get("log_level_console", logging.INFO))
69
+ settings["log_level_tabular"] = get_log_level(settings.get("log_level_tabular", logging.INFO))
70
+ return settings
71
+
72
+ @model_validator(mode="after")
73
+ def _validate_log_analytics_settings(cls, settings):
74
+ if settings.target_log_analytics is True:
75
+ if not settings.log_analytics_workspace_id or not settings.log_analytics_shared_key:
76
+ raise ValueError(
77
+ "`NESSY_LOG_ANALYTICS_WORKSPACE_ID` and `NESSY_LOG_ANALYTICS_SHARED_KEY` environment variables must be set if `NESSY_LOG_TO_LOG_ANALYTICS_WORKSPACE` is set to true."
78
+ )
79
+ return settings
80
+
81
+
82
+ class NessySettings(BaseSettings):
83
+ """This class defines the settings of the nessy Framework.
84
+
85
+ Attributes:
86
+ logging: The logging settings of the nessy Framework.
87
+ """
88
+
89
+ model_config = SettingsConfigDict(env_prefix="nessy_")
90
+
91
+ logging: LoggingSettings = Field(default_factory=LoggingSettings)
File without changes
@@ -0,0 +1,19 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def process_path(path: str | Path | None) -> Path | None:
5
+ """Converts the input to a pathlib.Path object if it is a string, and returns the pathlib.Path object.
6
+
7
+ Args:
8
+ path: The file path, which can be a string or a pathlib.Path object.
9
+
10
+ Raises:
11
+ TypeError: If the input is neither a string nor a pathlib.Path object.
12
+ """
13
+ if not path:
14
+ path = None
15
+ elif isinstance(path, str):
16
+ path = Path(path)
17
+ elif not isinstance(path, Path):
18
+ raise TypeError("path must be a string or a pathlib.Path object")
19
+ return path
@@ -0,0 +1,26 @@
1
+ Metadata-Version: 2.2
2
+ Name: cloe-nessy
3
+ Version: 0.2.9
4
+ Summary: Your friendly datalake monster.
5
+ Home-page: https://initions.com/
6
+ Author: initions
7
+ Author-email: ICSMC_EXT_PYPIORG@accenture.com
8
+ License: MIT
9
+ Requires-Python: <3.12,>=3.11
10
+ Requires-Dist: pydantic<3.0.0,>=2.7.2
11
+ Requires-Dist: pyyaml<7.0.0,>=6.0.1
12
+ Requires-Dist: types-pyyaml<7.0.0.0,>=6.0.12.20240311
13
+ Requires-Dist: jinja2<4.0.0,>=3.1.4
14
+ Requires-Dist: pydantic-settings<3.0.0,>=2.4.0
15
+ Requires-Dist: openpyxl<4.0.0,>=3.1.5
16
+ Requires-Dist: requests<3.0.0,>=2.32.3
17
+ Requires-Dist: types-requests<3.0.0.0,>=2.32.0.20240712
18
+ Requires-Dist: pandas-stubs<3.0.0.0,>=2.2.2.240807
19
+ Requires-Dist: azure-identity<2.0.0,>=1.19.0
20
+ Requires-Dist: httpx<1.0.0,>=0.27.2
21
+ Requires-Dist: databricks-sdk<1.0.0,>=0.36.0
22
+ Requires-Dist: networkx<4.0,>=3.3
23
+ Requires-Dist: matplotlib<4.0.0,>=3.9.2
24
+ Requires-Dist: types-networkx<4.0.0.0,>=3.2.1.20240820
25
+ Requires-Dist: fsspec<2025.0.0,>=2024.9.0
26
+ Requires-Dist: cloe-logging[databricks,log-analytics]<0.4,>=0.3.7
@@ -0,0 +1,78 @@
1
+ cloe_nessy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ cloe_nessy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ cloe_nessy/clients/__init__.py,sha256=GhiRvemNZ4TMS5rrHdmZEF73zozmrhvom2R5Oj6j9FI,71
4
+ cloe_nessy/clients/api_client/__init__.py,sha256=hPp8ByXw7mYfroIfDjD2ya79f-ZHAuaNJ07ff1dCe9Y,62
5
+ cloe_nessy/clients/api_client/api_client.py,sha256=7_hhj9bogS9sR21f0nDCx-O8-k0kzGsumBLKPF3gOvQ,7074
6
+ cloe_nessy/clients/api_client/api_response.py,sha256=f5KQbHdl47JTS-6luY34GlGx_l2qUxK49_ihmn3P--w,2884
7
+ cloe_nessy/clients/api_client/auth.py,sha256=TNJQeSfBQ6O6jmqZvoeS-vyyG4PnhpyqbNIwfyElhpM,6737
8
+ cloe_nessy/clients/api_client/exceptions.py,sha256=VR9nYMHWzIRLlMZMrPpOsEX0X_P0jXJCTSBLTEhtN1E,403
9
+ cloe_nessy/file_utilities/__init__.py,sha256=nY8H48jYHvTy0VYSRHVhZaFMlzfch4-T7y3N73tgMpI,73
10
+ cloe_nessy/file_utilities/exceptions.py,sha256=RDeV2S6AQnFhFINRo84HDV_hk2RMrf5oNQ7GhHmAZy0,97
11
+ cloe_nessy/file_utilities/factory.py,sha256=JONYGI8MCkNwG2_ujvjN3iB7BIdl7SqXKgV05YY_i4E,1735
12
+ cloe_nessy/file_utilities/get_file_paths.py,sha256=RoIOaBcHCMPiVHVEDm5ijnhLxFABZNVZK-nXooh2c7A,2895
13
+ cloe_nessy/file_utilities/location_types.py,sha256=G0FjpEu4_inmWbu5tvs2FyZv2TIhmPgjWU_Rtvmd6i8,801
14
+ cloe_nessy/file_utilities/strategies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ cloe_nessy/file_utilities/strategies/base_strategy.py,sha256=qOcJE7re9-LNlrwtpFoAQIzH_YXx0jHHNKeqKEZzKrs,2591
16
+ cloe_nessy/file_utilities/strategies/local_strategy.py,sha256=z_fKAfPcAeKE7SJ8-v8iFwHiWBxyFipfPs7VJhv1FSU,2073
17
+ cloe_nessy/file_utilities/strategies/onelake_strategy.py,sha256=dIAkHPbmybVVxZSlnCPx3CpprahtopV5lxjJXPUKhz8,1405
18
+ cloe_nessy/file_utilities/strategies/utils_strategy.py,sha256=luFPpjdc-B9-vcLoHun9gguG86o0ERzH9lplmzOpoQE,3050
19
+ cloe_nessy/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ cloe_nessy/integration/reader/__init__.py,sha256=J5vlORqHLBpHEvzIwfIjzN5xEdOat-8jlmdLcGj8nsA,239
21
+ cloe_nessy/integration/reader/api_reader.py,sha256=j3Z5O1oH-Zc43TyA_aYtnDNYC9xFMxMqDsRQWtEZGD8,5636
22
+ cloe_nessy/integration/reader/catalog_reader.py,sha256=tGK-Y0jZQGOrF9eZUzSr7ils-L58uex6qH9PZ81ZLy8,1835
23
+ cloe_nessy/integration/reader/excel_reader.py,sha256=4kifpIakHpGmap0-P0SUgjJoQdY-eeiZBIDrQp87wK8,8012
24
+ cloe_nessy/integration/reader/exceptions.py,sha256=_A9jFpe_RIDZCGY76qzjic9bsshxns6yXPSl141dq1c,203
25
+ cloe_nessy/integration/reader/file_reader.py,sha256=CsKjn2W7-w6drbWtD7PNMniJ8kCgxm4qW4knhYA37tg,3850
26
+ cloe_nessy/integration/reader/reader.py,sha256=e2KVPePQme8SBQJEbL-3zpGasOgTiEvKFTslow2wGPw,1034
27
+ cloe_nessy/integration/writer/__init__.py,sha256=NIh0t1RYlG3J1Y5_CvnR36N9tISmcElD5Tq06ksmqoA,71
28
+ cloe_nessy/integration/writer/catalog_writer.py,sha256=49lDvYttUY79Ye_OMN2cji7lGJNNML4TTsjY7VvLVfc,2137
29
+ cloe_nessy/logging/__init__.py,sha256=ySVCVbdyR3Dno_tl2ZfiER_7EVaDoQMHVkNyfdMZumY,65
30
+ cloe_nessy/logging/logger_mixin.py,sha256=9iy7BF6drYme-f7Rrt_imbVBRgVqQ89xjcP1X5aMtfY,7467
31
+ cloe_nessy/models/__init__.py,sha256=_JPN_R5-QDfjYzvrvZDdeOezl0C-JTG-Rk4S1VE5vJM,242
32
+ cloe_nessy/models/column.py,sha256=53fBwRnino72XKACsHZpN9QfCBqqSXyKLHZlM0huumg,1988
33
+ cloe_nessy/models/constraint.py,sha256=hsFlhn4n928z81O3dl3v5bMetewPWzMjkJK3_4kASSM,178
34
+ cloe_nessy/models/foreign_key.py,sha256=DwRVHs9sShqqPV-NL7ow_3AmPPWX0Od26yZn_I565pU,1001
35
+ cloe_nessy/models/schema.py,sha256=8bc1fakLUWZzkVZ_Zn5iWMUvfDNxnuoToNE4kmqtBJo,2764
36
+ cloe_nessy/models/table.py,sha256=lshPBA3D6vA1samtC7WmlfZZWrMUrOLna89rs8lhGCI,10472
37
+ cloe_nessy/models/types.py,sha256=XRbuJGdTNa6aXyE3IAzs_J9gVjbfkzMDLfGl-k6jI_4,223
38
+ cloe_nessy/models/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
+ cloe_nessy/models/mixins/read_instance_mixin.py,sha256=j5Y4aNWOh1jlskEaxNooZFJgPyxRmik00gAVLJnAaRs,4507
40
+ cloe_nessy/models/mixins/template_loader_mixin.py,sha256=5MXhEGBFlq3dwZvINEyBowSlipNnVun2H_TmhI_fsS4,549
41
+ cloe_nessy/object_manager/__init__.py,sha256=tt_sBt8eC-jCP8FShqRPKJvGNVIPeb-htA7NoUivTjY,68
42
+ cloe_nessy/object_manager/table_manager.py,sha256=K6OGCNNDt1ceLA0MkwtyW6AR5tYIW3tfqF3ZcvHlcUw,2717
43
+ cloe_nessy/pipeline/__init__.py,sha256=sespmJ5JsgyiFyZiedTiL2kg--zGIX7cjTYsD5vemEg,325
44
+ cloe_nessy/pipeline/pipeline.py,sha256=oQ1PwYkOSGHOfgbmImy7IbB5Ma-NKHN_CMXq1FepTc4,9206
45
+ cloe_nessy/pipeline/pipeline_action.py,sha256=S7IVFdmG12fRBzHuE_DiWn7qlMtApz6IloVd2Fj31Sg,1944
46
+ cloe_nessy/pipeline/pipeline_config.py,sha256=BN3ZSbr6bC-X9edoh-n5vRfPHFMbgtAU7mQ3dBrcWO8,3131
47
+ cloe_nessy/pipeline/pipeline_context.py,sha256=csElDc6BsynDUtRXgQOSCH7ONc_b-ag0YEg0zlQTz58,1874
48
+ cloe_nessy/pipeline/pipeline_parsing_service.py,sha256=c_nAsgw81QYBM9AFiTxGgqRhNXABkDKplbeoCJPtbpE,6434
49
+ cloe_nessy/pipeline/pipeline_step.py,sha256=UlnmpS6gm_dZ7m9dD1mZvye7mvUF_DA7HjOZo0oGYDU,1977
50
+ cloe_nessy/pipeline/actions/__init__.py,sha256=shWYl1TDL2f58wHfBhPpiLldreNkvLGJjhnBaTYusFY,2066
51
+ cloe_nessy/pipeline/actions/read_api.py,sha256=wGyPZdeh3Cam_BQBilltWBWCIdD9I_kv4lunEhE39Tg,6625
52
+ cloe_nessy/pipeline/actions/read_catalog_table.py,sha256=aZy4sJLLE8ZQ_SPXGSDoHYaBJTz8s7xQDVn5eYrYHvE,2689
53
+ cloe_nessy/pipeline/actions/read_excel.py,sha256=EgHbK1wO6dkDo0KErYDhK_2sNIkIoa-6As9oo9dNFsE,7708
54
+ cloe_nessy/pipeline/actions/read_files.py,sha256=8twjprqKYEmVu5QITEGe4no45TfhgzZosTFVQ89vV6g,3861
55
+ cloe_nessy/pipeline/actions/read_metadata_yaml.py,sha256=aZtkstf9jBYYN2MGnazz63BG_hJ7mIgAfKiNqUpc26E,2235
56
+ cloe_nessy/pipeline/actions/transform_change_datatype.py,sha256=Nz3Ncr-Zd-wy8g9-aN5XcvpWAHLyWs70RpZ7KqKqIaU,1788
57
+ cloe_nessy/pipeline/actions/transform_concat_columns.py,sha256=V0TzeQFpBYur_T1Nv0nRpOU02nKQ2iypo2CCcV2rBtk,3083
58
+ cloe_nessy/pipeline/actions/transform_decode.py,sha256=DmT-29dIqbz_xTj4GSCfnbgYRCiUrWzKvGrRYy1frNw,4004
59
+ cloe_nessy/pipeline/actions/transform_distinct.py,sha256=sdCElXCM77AQ0m6Zzg_h7cyavBOxo7W9K1NrsvNLufA,1105
60
+ cloe_nessy/pipeline/actions/transform_filter.py,sha256=vOAxKtNWCABLb6G6Xz98NK7fEfgn6QJia31S7IvoUTg,1428
61
+ cloe_nessy/pipeline/actions/transform_generic_sql.py,sha256=cli59HCERFge7f0RB8yXw2oDtHSbMCWQMdeCeqhbdg8,2355
62
+ cloe_nessy/pipeline/actions/transform_join.py,sha256=qktyaN2kcCkmoH3RILTc-UGYsGACx1nXH6xLtuvYi7k,3080
63
+ cloe_nessy/pipeline/actions/transform_json_normalize.py,sha256=xN_cQgHSMSyPsyYXBdoe2i5pHnyH-kkH5do8qr3vybw,4157
64
+ cloe_nessy/pipeline/actions/transform_rename_columns.py,sha256=fFdg3353QCE3zBei6iYQW9huPBcQ906sJLioaOUWj3s,1924
65
+ cloe_nessy/pipeline/actions/transform_replace_values.py,sha256=-uOAbHkQZ2X23GB15W4-miAoHzyFH9hJyc6Y_5PA0w8,2017
66
+ cloe_nessy/pipeline/actions/transform_select_columns.py,sha256=Kez8puDK7cRfhleBEX-B-elKCvNPRU9ERSWs9afMGO8,3369
67
+ cloe_nessy/pipeline/actions/transform_union.py,sha256=TDER06IABzxvIez4bGLKCLaDA4eScpTzYRbfUzwv_RQ,2342
68
+ cloe_nessy/pipeline/actions/write_catalog_table.py,sha256=6yAHTX5kZviumgBW_NYVGAUin6U2nDzmic9of6wA8FY,2590
69
+ cloe_nessy/session/__init__.py,sha256=t7_YjUhJYW3km_FrucaUdbIl1boQtwkyhw_8yE10qzc,74
70
+ cloe_nessy/session/session_manager.py,sha256=B1TCfpZ8aieN37WWyY2b9qs2U7muyL1edzDCCPeOxHs,6407
71
+ cloe_nessy/settings/__init__.py,sha256=ZbkneO3WaKOxon7qHFHnou7EnBOSnBFyKMDZblIEvzM,101
72
+ cloe_nessy/settings/settings.py,sha256=I4n129lrujriW-d8q4as2Kb4_kI932ModfZ5Ow_UpVM,3653
73
+ cloe_nessy/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
+ cloe_nessy/utils/file_and_directory_handler.py,sha256=r2EVt9xG81p6ScaJCwETC5an6pMT6WseB0jMOR-JlpU,602
75
+ cloe_nessy-0.2.9.dist-info/METADATA,sha256=42HBrdWGyYtb3eOER40KHnW0wVOUK0yQHI8Xi5uEscE,1837
76
+ cloe_nessy-0.2.9.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
77
+ cloe_nessy-0.2.9.dist-info/top_level.txt,sha256=Z7izn8HmQpg2wBUb-0jzaKlYKMU7Ypzuc9__9vPtW_I,11
78
+ cloe_nessy-0.2.9.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (75.8.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ cloe_nessy