snowpark-checkpoints-configuration 0.1.3__tar.gz → 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (18) hide show
  1. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/PKG-INFO +2 -2
  2. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/pyproject.toml +1 -1
  3. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/src/snowflake/snowpark_checkpoints_configuration/__init__.py +10 -0
  4. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/src/snowflake/snowpark_checkpoints_configuration/__version__.py +1 -1
  5. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/src/snowflake/snowpark_checkpoints_configuration/checkpoint_metadata.py +22 -8
  6. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/src/snowflake/snowpark_checkpoints_configuration/model/checkpoints.py +30 -4
  7. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/test/unit/test_checkpoint_metadata.py +23 -9
  8. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/test/unit/test_checkpoint_name_utils.py +17 -8
  9. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/test/unit/test_checkpoints.py +37 -5
  10. snowpark_checkpoints_configuration-0.2.0/test/unit/test_logger.py +128 -0
  11. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/.gitignore +0 -0
  12. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/LICENSE +0 -0
  13. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/README.md +0 -0
  14. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/src/snowflake/snowpark_checkpoints_configuration/checkpoint_name_utils.py +0 -0
  15. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/src/snowflake/snowpark_checkpoints_configuration/singleton.py +0 -0
  16. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/test/.coveragerc +0 -0
  17. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/test/unit/invalid_checkpoint/checkpoints.json +0 -0
  18. {snowpark_checkpoints_configuration-0.1.3 → snowpark_checkpoints_configuration-0.2.0}/test/unit/valid_checkpoint/checkpoints.json +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowpark-checkpoints-configuration
3
- Version: 0.1.3
3
+ Version: 0.2.0
4
4
  Summary: Migration tools for Snowpark
5
5
  Project-URL: Bug Tracker, https://github.com/snowflakedb/snowpark-checkpoints/issues
6
6
  Project-URL: Source code, https://github.com/snowflakedb/snowpark-checkpoints/
@@ -27,7 +27,7 @@ Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
27
27
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
28
28
  Requires-Python: <3.12,>=3.9
29
29
  Requires-Dist: pydantic>=1.8.2
30
- Requires-Dist: snowflake-snowpark-python==1.26.0
30
+ Requires-Dist: snowflake-snowpark-python>=1.23.0
31
31
  Provides-Extra: development
32
32
  Requires-Dist: coverage>=7.6.7; extra == 'development'
33
33
  Requires-Dist: hatchling==1.25.0; extra == 'development'
@@ -25,7 +25,7 @@ classifiers = [
25
25
 
26
26
  dependencies = [
27
27
  "pydantic>=1.8.2",
28
- "snowflake-snowpark-python==1.26.0",
28
+ "snowflake-snowpark-python>=1.23.0",
29
29
  ]
30
30
 
31
31
  authors = [
@@ -13,10 +13,20 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ import logging
17
+
18
+
19
+ # Add a NullHandler to prevent logging messages from being output to
20
+ # sys.stderr if no logging configuration is provided.
21
+ logging.getLogger(__name__).addHandler(logging.NullHandler())
22
+
23
+ # ruff: noqa: E402
24
+
16
25
  from snowflake.snowpark_checkpoints_configuration.checkpoint_metadata import (
17
26
  CheckpointMetadata,
18
27
  )
19
28
 
29
+
20
30
  __all__ = [
21
31
  "CheckpointMetadata",
22
32
  ]
@@ -13,4 +13,4 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- __version__ = "0.1.3"
16
+ __version__ = "0.2.0"
@@ -12,8 +12,12 @@
12
12
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
+
16
+ import logging
15
17
  import os
16
18
 
19
+ from typing import Optional
20
+
17
21
  from snowflake.snowpark_checkpoints_configuration.model.checkpoints import (
18
22
  Checkpoint,
19
23
  Checkpoints,
@@ -21,6 +25,9 @@ from snowflake.snowpark_checkpoints_configuration.model.checkpoints import (
21
25
  from snowflake.snowpark_checkpoints_configuration.singleton import Singleton
22
26
 
23
27
 
28
+ LOGGER = logging.getLogger(__name__)
29
+
30
+
24
31
  class CheckpointMetadata(metaclass=Singleton):
25
32
 
26
33
  """CheckpointMetadata class.
@@ -33,21 +40,28 @@ class CheckpointMetadata(metaclass=Singleton):
33
40
 
34
41
  """
35
42
 
36
- def __init__(self, path: str = None):
37
- directory = path if path is not None else os.getcwd()
43
+ def __init__(self, path: Optional[str] = None):
38
44
  self.checkpoint_model: Checkpoints = Checkpoints(type="", pipelines=[])
45
+ directory = path if path is not None else os.getcwd()
39
46
  checkpoints_file = os.path.join(directory, "checkpoints.json")
40
47
  if os.path.exists(checkpoints_file):
41
- with open(checkpoints_file) as f:
42
- try:
48
+ LOGGER.info("Reading checkpoints file: '%s'", checkpoints_file)
49
+ try:
50
+ with open(checkpoints_file) as f:
43
51
  checkpoint_json = f.read()
44
52
  self.checkpoint_model = Checkpoints.model_validate_json(
45
53
  checkpoint_json
46
54
  )
47
- except Exception as e:
48
- raise Exception(
49
- f"Error reading checkpoints file: {checkpoints_file} \n {e}"
50
- ) from None
55
+ LOGGER.info(
56
+ "Successfully read and validated checkpoints file: '%s'",
57
+ checkpoints_file,
58
+ )
59
+ except Exception as e:
60
+ error_msg = f"An error occurred while reading the checkpoints file: '{checkpoints_file}'"
61
+ LOGGER.exception(error_msg)
62
+ raise Exception(f"{error_msg} \n {e}") from None
63
+ else:
64
+ LOGGER.warning("Checkpoints file not found: '%s'", checkpoints_file)
51
65
 
52
66
  def get_checkpoint(self, checkpoint_name: str) -> Checkpoint:
53
67
  """Get a checkpoint by its name.
@@ -13,6 +13,8 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ import logging
17
+
16
18
  from typing import Optional
17
19
 
18
20
  from pydantic import BaseModel, ConfigDict, field_validator
@@ -21,6 +23,9 @@ from pydantic.alias_generators import to_camel
21
23
  from snowflake.snowpark_checkpoints_configuration import checkpoint_name_utils
22
24
 
23
25
 
26
+ LOGGER = logging.getLogger(__name__)
27
+
28
+
24
29
  class Checkpoint(BaseModel):
25
30
 
26
31
  """Checkpoint model.
@@ -42,15 +47,19 @@ class Checkpoint(BaseModel):
42
47
  @field_validator("name", mode="before")
43
48
  @classmethod
44
49
  def normalize(cls, name: str) -> str:
50
+ LOGGER.debug("Normalizing checkpoint name: '%s'", name)
45
51
  normalized_name = checkpoint_name_utils.normalize_checkpoint_name(name)
52
+ LOGGER.debug("Checkpoint name was normalized to: '%s'", normalized_name)
46
53
  is_valid_checkpoint_name = checkpoint_name_utils.is_valid_checkpoint_name(
47
54
  normalized_name
48
55
  )
49
56
  if not is_valid_checkpoint_name:
50
- raise Exception(
51
- f"Invalid checkpoint name: {name} in checkpoints.json file. Checkpoint names must only contain "
52
- f"alphanumeric characters and underscores."
57
+ error_msg = (
58
+ f"Invalid checkpoint name: {name} in checkpoints.json file. "
59
+ f"Checkpoint names must only contain alphanumeric characters and underscores."
53
60
  )
61
+ LOGGER.error(error_msg)
62
+ raise Exception(error_msg)
54
63
 
55
64
  return normalized_name
56
65
 
@@ -93,8 +102,11 @@ class Checkpoints(BaseModel):
93
102
  _checkpoints = {}
94
103
 
95
104
  def _build_checkpoints_dict(self):
105
+ LOGGER.debug("Building checkpoints dictionary from pipelines.")
96
106
  for pipeline in self.pipelines:
107
+ LOGGER.debug("Processing pipeline: '%s'", pipeline.entry_point)
97
108
  for checkpoint in pipeline.checkpoints:
109
+ LOGGER.debug("Adding checkpoint: %s", checkpoint)
98
110
  self._checkpoints[checkpoint.name] = checkpoint
99
111
 
100
112
  def get_check_point(self, checkpoint_name: str) -> Checkpoint:
@@ -108,21 +120,35 @@ class Checkpoints(BaseModel):
108
120
  with the name set to the checkpoint_id.
109
121
 
110
122
  """
123
+ LOGGER.info("Fetching checkpoint: '%s'", checkpoint_name)
111
124
  if not self._checkpoints:
125
+ LOGGER.debug("Checkpoints dictionary is empty, building it...")
112
126
  self._build_checkpoints_dict()
113
127
 
114
128
  checkpoint = self._checkpoints.get(checkpoint_name)
115
129
  if len(self._checkpoints) == 0:
130
+ LOGGER.info(
131
+ "No checkpoints found, creating a new enabled checkpoint with name: '%s'",
132
+ checkpoint_name,
133
+ )
116
134
  checkpoint = Checkpoint(name=checkpoint_name, enabled=True)
117
135
  elif checkpoint is None:
136
+ LOGGER.info(
137
+ "Checkpoint not found, creating a new disabled checkpoint with name: '%s'",
138
+ checkpoint_name,
139
+ )
118
140
  checkpoint = Checkpoint(name=checkpoint_name, enabled=False)
141
+
142
+ LOGGER.debug("Returning checkpoint: %s", checkpoint)
119
143
  return checkpoint
120
144
 
121
145
  def add_checkpoint(self, checkpoint: Checkpoint) -> None:
122
146
  """Add a checkpoint to the checkpoints' dictionary.
123
147
 
124
- Args:.
148
+ Args:
125
149
  checkpoint (Checkpoint): The checkpoint object to add
126
150
 
127
151
  """
152
+ LOGGER.debug("Adding checkpoint: %s", checkpoint)
128
153
  self._checkpoints[checkpoint.name] = checkpoint
154
+ LOGGER.info("Checkpoint '%s' added successfully.", checkpoint.name)
@@ -12,6 +12,8 @@
12
12
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
+
16
+ import logging
15
17
  import os.path
16
18
  import tempfile
17
19
 
@@ -28,12 +30,16 @@ from snowflake.snowpark_checkpoints_configuration.model.checkpoints import (
28
30
  from snowflake.snowpark_checkpoints_configuration.singleton import Singleton
29
31
 
30
32
 
31
- @pytest.fixture
33
+ @pytest.fixture(autouse=True)
32
34
  def singleton():
33
35
  Singleton._instances = {}
34
36
 
35
37
 
36
- def test_checkpoint_metadata_loading(singleton):
38
+ LOGGER_NAME = "snowflake.snowpark_checkpoints_configuration.checkpoint_metadata"
39
+
40
+
41
+ def test_checkpoint_metadata_loading(caplog: pytest.LogCaptureFixture):
42
+ caplog.set_level(level=logging.INFO, logger=LOGGER_NAME)
37
43
  path = os.path.dirname(os.path.abspath(__file__))
38
44
  path = os.path.join(path, "valid_checkpoint")
39
45
  metadata = CheckpointMetadata(path)
@@ -62,26 +68,34 @@ def test_checkpoint_metadata_loading(singleton):
62
68
 
63
69
  expected_checkpoints = Checkpoints(type="Collection", pipelines=[expected_pipeline])
64
70
  assert metadata.checkpoint_model == expected_checkpoints
71
+ assert "Successfully read and validated checkpoints file" in caplog.text
65
72
 
66
73
 
67
- def test_checkpoint_metadata_loading_no_file(singleton):
74
+ def test_checkpoint_metadata_loading_no_file(caplog: pytest.LogCaptureFixture):
75
+ caplog.set_level(level=logging.WARNING, logger=LOGGER_NAME)
68
76
  path = tempfile.gettempdir()
69
77
  metadata = CheckpointMetadata(path)
70
78
  expected_checkpoints = Checkpoints(type="", pipelines=[])
71
79
  assert metadata.checkpoint_model == expected_checkpoints
80
+ assert "Checkpoints file not found" in caplog.text
72
81
 
73
82
 
74
- def test_checkpoint_metadata_loading_invalid_file(singleton):
83
+ def test_checkpoint_metadata_loading_invalid_file(caplog: pytest.LogCaptureFixture):
84
+ caplog.set_level(level=logging.ERROR, logger=LOGGER_NAME)
75
85
  path = os.path.dirname(os.path.abspath(__file__))
76
86
  path = os.path.join(path, "invalid_checkpoint")
77
87
  checkpoint_file_name = os.path.join(path, "checkpoints.json")
78
- expected_exception = f"Error reading checkpoints file: {checkpoint_file_name} \n"
88
+ expected_error_msg = f"An error occurred while reading the checkpoints file: '{checkpoint_file_name}'"
79
89
  with pytest.raises(Exception) as ex_info:
80
90
  CheckpointMetadata(path)
81
- assert str(ex_info.value).startswith(expected_exception)
91
+ assert str(ex_info.value).startswith(expected_error_msg)
92
+ assert expected_error_msg in caplog.text
93
+ assert any(
94
+ record.exc_info is not None for record in caplog.records
95
+ ), "Log message does not contain the exception details, but it should"
82
96
 
83
97
 
84
- def test_checkpoint_metadata_get_checkpoint_exist(singleton):
98
+ def test_checkpoint_metadata_get_checkpoint_exist():
85
99
  path = os.path.dirname(os.path.abspath(__file__))
86
100
  path = os.path.join(path, "valid_checkpoint")
87
101
  metadata = CheckpointMetadata(path)
@@ -98,7 +112,7 @@ def test_checkpoint_metadata_get_checkpoint_exist(singleton):
98
112
  assert checkpoint == expected_checkpoint
99
113
 
100
114
 
101
- def test_checkpoint_metadata_get_checkpoint_not_exist(singleton):
115
+ def test_checkpoint_metadata_get_checkpoint_not_exist():
102
116
  path = os.path.dirname(os.path.abspath(__file__))
103
117
  path = os.path.join(path, "valid_checkpoint")
104
118
  metadata = CheckpointMetadata(path)
@@ -107,7 +121,7 @@ def test_checkpoint_metadata_get_checkpoint_not_exist(singleton):
107
121
  assert checkpoint == expected_checkpoint
108
122
 
109
123
 
110
- def test_checkpoint_metadata_get_checkpoint_no_file(singleton):
124
+ def test_checkpoint_metadata_get_checkpoint_no_file():
111
125
  path = tempfile.gettempdir()
112
126
  metadata = CheckpointMetadata(path)
113
127
  checkpoint = metadata.get_checkpoint("not-exist-checkpoint")
@@ -12,6 +12,9 @@
12
12
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
+
16
+ import logging
17
+
15
18
  import pytest
16
19
 
17
20
  from snowflake.snowpark_checkpoints_configuration.model.checkpoints import (
@@ -19,6 +22,9 @@ from snowflake.snowpark_checkpoints_configuration.model.checkpoints import (
19
22
  )
20
23
 
21
24
 
25
+ LOGGER_NAME = "snowflake.snowpark_checkpoints_configuration.model.checkpoints"
26
+
27
+
22
28
  @pytest.mark.parametrize(
23
29
  "input_value,expected_value",
24
30
  [("my checkpoint", "my_checkpoint"), ("my checkpoint ", "my__checkpoint_")],
@@ -73,10 +79,14 @@ def test_validate_checkpoint_name_valid_case(input_value):
73
79
  assert checkpoint.name == input_value
74
80
 
75
81
 
76
- @pytest.mark.parametrize(
77
- "input_value", ["_", "5", "", "56_my_checkpoint", "my-check", "_+check"]
78
- )
79
- def test_checkpoint_invalid_name(input_value):
82
+ @pytest.mark.parametrize("input_value", ["_", "5", "", "56_my_checkpoint", "_+check"])
83
+ def test_checkpoint_invalid_name(input_value: str, caplog: pytest.LogCaptureFixture):
84
+ caplog.set_level(level=logging.ERROR, logger=LOGGER_NAME)
85
+ expected_error_msg = (
86
+ f"Invalid checkpoint name: {input_value} in checkpoints.json file. "
87
+ f"Checkpoint names must only contain alphanumeric characters and underscores."
88
+ )
89
+
80
90
  with pytest.raises(Exception) as ex_info:
81
91
  Checkpoint(
82
92
  name=input_value,
@@ -87,7 +97,6 @@ def test_checkpoint_invalid_name(input_value):
87
97
  location=1,
88
98
  enabled=True,
89
99
  )
90
- assert (
91
- f"Invalid checkpoint name: {Checkpoint.name}. Checkpoint names must only contain alphanumeric "
92
- f"characters and underscores."
93
- ) == str(ex_info.value)
100
+
101
+ assert str(ex_info.value).startswith(expected_error_msg)
102
+ assert expected_error_msg in caplog.text
@@ -13,6 +13,9 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ import logging
17
+ import pytest
18
+
16
19
  from snowflake.snowpark_checkpoints_configuration.model.checkpoints import (
17
20
  Checkpoint,
18
21
  Checkpoints,
@@ -20,8 +23,13 @@ from snowflake.snowpark_checkpoints_configuration.model.checkpoints import (
20
23
  )
21
24
 
22
25
 
23
- def test_add_checkpoint():
26
+ LOGGER_NAME = "snowflake.snowpark_checkpoints_configuration.model.checkpoints"
27
+
28
+
29
+ def test_add_checkpoint(caplog: pytest.LogCaptureFixture):
30
+ caplog.set_level(level=logging.DEBUG, logger=LOGGER_NAME)
24
31
  checkpoints = Checkpoints(type="Collection", pipelines=[])
32
+ normalized_checkpoint_name = "checkpoint_name"
25
33
  new_checkpoint = Checkpoint(
26
34
  name="checkpoint-name",
27
35
  df="df",
@@ -32,7 +40,10 @@ def test_add_checkpoint():
32
40
  enabled=False,
33
41
  )
34
42
  checkpoints.add_checkpoint(new_checkpoint)
35
- assert checkpoints.get_check_point("checkpoint_name") == new_checkpoint
43
+ assert checkpoints.get_check_point(normalized_checkpoint_name) == new_checkpoint
44
+ assert (
45
+ f"Checkpoint '{normalized_checkpoint_name}' added successfully" in caplog.text
46
+ )
36
47
 
37
48
 
38
49
  def test_add_checkpoint_with_same_name():
@@ -87,9 +98,30 @@ def test_get_checkpoint_existing():
87
98
  assert checkpoints.get_check_point("checkpoint_name") == expected_checkpoint
88
99
 
89
100
 
90
- def test_get_checkpoint_non_existing():
101
+ def test_get_checkpoint_non_existing_empty_dict(caplog: pytest.LogCaptureFixture):
102
+ caplog.set_level(level=logging.DEBUG, logger=LOGGER_NAME)
91
103
  checkpoints = Checkpoints(type="Collection", pipelines=[])
104
+ checkpoint_name = "checkpoint-name-2"
105
+ checkpoint = checkpoints.get_check_point(checkpoint_name)
106
+ assert checkpoint == Checkpoint(name=checkpoint_name, enabled=True)
107
+ assert "creating a new enabled checkpoint" in caplog.text
108
+
92
109
 
93
- assert checkpoints.get_check_point("checkpoint-name-2") == Checkpoint(
94
- name="checkpoint-name-2", enabled=True
110
+ def test_get_checkpoint_no_existing_non_empty_dict(caplog: pytest.LogCaptureFixture):
111
+ caplog.set_level(level=logging.INFO, logger=LOGGER_NAME)
112
+ checkpoint = Checkpoint(
113
+ name="checkpoint_name_1",
114
+ df="df",
115
+ mode=1,
116
+ function="",
117
+ file="file",
118
+ location=1,
119
+ enabled=False,
95
120
  )
121
+ pipeline = Pipeline(entry_point="entry-point", checkpoints=[checkpoint])
122
+ checkpoints = Checkpoints(type="Collection", pipelines=[pipeline])
123
+ checkpoint_name = "checkpoint_name_2"
124
+ checkpoint = checkpoints.get_check_point(checkpoint_name)
125
+
126
+ assert checkpoint == Checkpoint(name=checkpoint_name, enabled=False)
127
+ assert "creating a new disabled checkpoint" in caplog.text
@@ -0,0 +1,128 @@
1
+ # Copyright 2025 Snowflake Inc.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import io
17
+ import logging
18
+
19
+ from collections.abc import Generator
20
+ from importlib import import_module
21
+ from typing import Any, Union
22
+
23
+ import pytest
24
+
25
+
26
+ TOP_LEVEL_LOGGER_NAME = "snowflake.snowpark_checkpoints_configuration"
27
+
28
+ LoggerDict = dict[str, Union[logging.Logger, logging.PlaceHolder]]
29
+ LoggerGenerator = Generator[logging.Logger, Any, None]
30
+
31
+
32
+ @pytest.fixture(name="registered_loggers", scope="module")
33
+ def fixture_registered_loggers() -> LoggerDict:
34
+ """Return a dictionary with all the registered loggers."""
35
+ # We need to import the snowflake.snowpark_checkpoints_configuration module
36
+ # to ensure the __init__.py file is executed.
37
+ import_module("snowflake.snowpark_checkpoints_configuration")
38
+
39
+ return logging.getLogger().manager.loggerDict
40
+
41
+
42
+ @pytest.fixture(name="top_level_logger", scope="module")
43
+ def fixture_top_level_logger(registered_loggers: LoggerDict) -> LoggerGenerator:
44
+ """Return the top-level logger of the snowpark-checkpoints-configuration package."""
45
+ logger = registered_loggers.get(TOP_LEVEL_LOGGER_NAME, None)
46
+ assert isinstance(logger, logging.Logger)
47
+
48
+ # Save the original state of the logger
49
+ original_handlers = logger.handlers
50
+ original_propagate = logger.propagate
51
+ original_level = logger.level
52
+ original_filters = logger.filters
53
+
54
+ yield logger
55
+
56
+ # Restore the original state of the logger
57
+ logger.handlers = original_handlers
58
+ logger.propagate = original_propagate
59
+ logger.setLevel(original_level)
60
+ logger.filters = original_filters
61
+
62
+
63
+ def test_loggers_exists(registered_loggers: LoggerDict):
64
+ """Validates that all the snowflake.snowpark_checkpoints_configuration loggers exist."""
65
+ logger_names = {
66
+ "snowflake.snowpark_checkpoints_configuration",
67
+ "snowflake.snowpark_checkpoints_configuration.checkpoint_metadata",
68
+ "snowflake.snowpark_checkpoints_configuration.model.checkpoints",
69
+ }
70
+
71
+ for logger_name in logger_names:
72
+ logger = registered_loggers.get(logger_name, None)
73
+ assert logger is not None
74
+ assert isinstance(logger, logging.Logger)
75
+
76
+
77
+ def test_top_level_logger_has_null_handler(top_level_logger: LoggerGenerator):
78
+ """Validates that the top-level logger has a single handler and that it is a NullHandler."""
79
+ assert isinstance(top_level_logger, logging.Logger)
80
+ assert len(top_level_logger.handlers) == 1
81
+ assert isinstance(top_level_logger.handlers[0], logging.NullHandler)
82
+
83
+
84
+ def test_top_level_logger_default_log_level(top_level_logger: LoggerGenerator):
85
+ """Validates that the default log level of the top-level logger is logging.WARNING."""
86
+ assert isinstance(top_level_logger, logging.Logger)
87
+ assert top_level_logger.getEffectiveLevel() == logging.WARNING
88
+
89
+
90
+ def test_child_logger_inheritance(
91
+ registered_loggers: LoggerDict, top_level_logger: LoggerGenerator
92
+ ):
93
+ """Validates the inheritance of the loggers."""
94
+ child_logger = registered_loggers.get(
95
+ f"{TOP_LEVEL_LOGGER_NAME}.checkpoint_metadata", None
96
+ )
97
+ assert isinstance(child_logger, logging.Logger)
98
+ assert child_logger.parent == top_level_logger
99
+
100
+
101
+ def test_log_propagation(
102
+ registered_loggers: LoggerDict, top_level_logger: LoggerGenerator
103
+ ):
104
+ """Validates that log messages are propagated from a child logger to the top-level logger."""
105
+ assert isinstance(top_level_logger, logging.Logger)
106
+
107
+ stream_handler = logging.StreamHandler(io.StringIO())
108
+ top_level_logger.addHandler(stream_handler)
109
+
110
+ child_logger = registered_loggers.get(
111
+ f"{TOP_LEVEL_LOGGER_NAME}.checkpoint_metadata", None
112
+ )
113
+ assert isinstance(child_logger, logging.Logger)
114
+ assert len(child_logger.handlers) == 0
115
+
116
+ child_logger.warning("Test message")
117
+ assert "Test message" in stream_handler.stream.getvalue()
118
+
119
+
120
+ def test_null_handler_supresses_output(
121
+ capsys: pytest.CaptureFixture[str], top_level_logger: LoggerGenerator
122
+ ):
123
+ """Validates that NullHandler suppresses output to stderr."""
124
+ assert isinstance(top_level_logger, logging.Logger)
125
+ top_level_logger.propagate = False
126
+ top_level_logger.error("This should not appear in stderr")
127
+ captured = capsys.readouterr()
128
+ assert captured.err == ""