stouputils 1.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (140) hide show
  1. stouputils/__init__.py +40 -0
  2. stouputils/__main__.py +86 -0
  3. stouputils/_deprecated.py +37 -0
  4. stouputils/all_doctests.py +160 -0
  5. stouputils/applications/__init__.py +22 -0
  6. stouputils/applications/automatic_docs.py +634 -0
  7. stouputils/applications/upscaler/__init__.py +39 -0
  8. stouputils/applications/upscaler/config.py +128 -0
  9. stouputils/applications/upscaler/image.py +247 -0
  10. stouputils/applications/upscaler/video.py +287 -0
  11. stouputils/archive.py +344 -0
  12. stouputils/backup.py +488 -0
  13. stouputils/collections.py +244 -0
  14. stouputils/continuous_delivery/__init__.py +27 -0
  15. stouputils/continuous_delivery/cd_utils.py +243 -0
  16. stouputils/continuous_delivery/github.py +522 -0
  17. stouputils/continuous_delivery/pypi.py +130 -0
  18. stouputils/continuous_delivery/pyproject.py +147 -0
  19. stouputils/continuous_delivery/stubs.py +86 -0
  20. stouputils/ctx.py +408 -0
  21. stouputils/data_science/config/get.py +51 -0
  22. stouputils/data_science/config/set.py +125 -0
  23. stouputils/data_science/data_processing/image/__init__.py +66 -0
  24. stouputils/data_science/data_processing/image/auto_contrast.py +79 -0
  25. stouputils/data_science/data_processing/image/axis_flip.py +58 -0
  26. stouputils/data_science/data_processing/image/bias_field_correction.py +74 -0
  27. stouputils/data_science/data_processing/image/binary_threshold.py +73 -0
  28. stouputils/data_science/data_processing/image/blur.py +59 -0
  29. stouputils/data_science/data_processing/image/brightness.py +54 -0
  30. stouputils/data_science/data_processing/image/canny.py +110 -0
  31. stouputils/data_science/data_processing/image/clahe.py +92 -0
  32. stouputils/data_science/data_processing/image/common.py +30 -0
  33. stouputils/data_science/data_processing/image/contrast.py +53 -0
  34. stouputils/data_science/data_processing/image/curvature_flow_filter.py +74 -0
  35. stouputils/data_science/data_processing/image/denoise.py +378 -0
  36. stouputils/data_science/data_processing/image/histogram_equalization.py +123 -0
  37. stouputils/data_science/data_processing/image/invert.py +64 -0
  38. stouputils/data_science/data_processing/image/laplacian.py +60 -0
  39. stouputils/data_science/data_processing/image/median_blur.py +52 -0
  40. stouputils/data_science/data_processing/image/noise.py +59 -0
  41. stouputils/data_science/data_processing/image/normalize.py +65 -0
  42. stouputils/data_science/data_processing/image/random_erase.py +66 -0
  43. stouputils/data_science/data_processing/image/resize.py +69 -0
  44. stouputils/data_science/data_processing/image/rotation.py +80 -0
  45. stouputils/data_science/data_processing/image/salt_pepper.py +68 -0
  46. stouputils/data_science/data_processing/image/sharpening.py +55 -0
  47. stouputils/data_science/data_processing/image/shearing.py +64 -0
  48. stouputils/data_science/data_processing/image/threshold.py +64 -0
  49. stouputils/data_science/data_processing/image/translation.py +71 -0
  50. stouputils/data_science/data_processing/image/zoom.py +83 -0
  51. stouputils/data_science/data_processing/image_augmentation.py +118 -0
  52. stouputils/data_science/data_processing/image_preprocess.py +183 -0
  53. stouputils/data_science/data_processing/prosthesis_detection.py +359 -0
  54. stouputils/data_science/data_processing/technique.py +481 -0
  55. stouputils/data_science/dataset/__init__.py +45 -0
  56. stouputils/data_science/dataset/dataset.py +292 -0
  57. stouputils/data_science/dataset/dataset_loader.py +135 -0
  58. stouputils/data_science/dataset/grouping_strategy.py +296 -0
  59. stouputils/data_science/dataset/image_loader.py +100 -0
  60. stouputils/data_science/dataset/xy_tuple.py +696 -0
  61. stouputils/data_science/metric_dictionnary.py +106 -0
  62. stouputils/data_science/metric_utils.py +847 -0
  63. stouputils/data_science/mlflow_utils.py +206 -0
  64. stouputils/data_science/models/abstract_model.py +149 -0
  65. stouputils/data_science/models/all.py +85 -0
  66. stouputils/data_science/models/base_keras.py +765 -0
  67. stouputils/data_science/models/keras/all.py +38 -0
  68. stouputils/data_science/models/keras/convnext.py +62 -0
  69. stouputils/data_science/models/keras/densenet.py +50 -0
  70. stouputils/data_science/models/keras/efficientnet.py +60 -0
  71. stouputils/data_science/models/keras/mobilenet.py +56 -0
  72. stouputils/data_science/models/keras/resnet.py +52 -0
  73. stouputils/data_science/models/keras/squeezenet.py +233 -0
  74. stouputils/data_science/models/keras/vgg.py +42 -0
  75. stouputils/data_science/models/keras/xception.py +38 -0
  76. stouputils/data_science/models/keras_utils/callbacks/__init__.py +20 -0
  77. stouputils/data_science/models/keras_utils/callbacks/colored_progress_bar.py +219 -0
  78. stouputils/data_science/models/keras_utils/callbacks/learning_rate_finder.py +148 -0
  79. stouputils/data_science/models/keras_utils/callbacks/model_checkpoint_v2.py +31 -0
  80. stouputils/data_science/models/keras_utils/callbacks/progressive_unfreezing.py +249 -0
  81. stouputils/data_science/models/keras_utils/callbacks/warmup_scheduler.py +66 -0
  82. stouputils/data_science/models/keras_utils/losses/__init__.py +12 -0
  83. stouputils/data_science/models/keras_utils/losses/next_generation_loss.py +56 -0
  84. stouputils/data_science/models/keras_utils/visualizations.py +416 -0
  85. stouputils/data_science/models/model_interface.py +939 -0
  86. stouputils/data_science/models/sandbox.py +116 -0
  87. stouputils/data_science/range_tuple.py +234 -0
  88. stouputils/data_science/scripts/augment_dataset.py +77 -0
  89. stouputils/data_science/scripts/exhaustive_process.py +133 -0
  90. stouputils/data_science/scripts/preprocess_dataset.py +70 -0
  91. stouputils/data_science/scripts/routine.py +168 -0
  92. stouputils/data_science/utils.py +285 -0
  93. stouputils/decorators.py +605 -0
  94. stouputils/image.py +441 -0
  95. stouputils/installer/__init__.py +18 -0
  96. stouputils/installer/common.py +67 -0
  97. stouputils/installer/downloader.py +101 -0
  98. stouputils/installer/linux.py +144 -0
  99. stouputils/installer/main.py +223 -0
  100. stouputils/installer/windows.py +136 -0
  101. stouputils/io.py +486 -0
  102. stouputils/parallel.py +483 -0
  103. stouputils/print.py +482 -0
  104. stouputils/py.typed +1 -0
  105. stouputils/stouputils/__init__.pyi +15 -0
  106. stouputils/stouputils/_deprecated.pyi +12 -0
  107. stouputils/stouputils/all_doctests.pyi +46 -0
  108. stouputils/stouputils/applications/__init__.pyi +2 -0
  109. stouputils/stouputils/applications/automatic_docs.pyi +106 -0
  110. stouputils/stouputils/applications/upscaler/__init__.pyi +3 -0
  111. stouputils/stouputils/applications/upscaler/config.pyi +18 -0
  112. stouputils/stouputils/applications/upscaler/image.pyi +109 -0
  113. stouputils/stouputils/applications/upscaler/video.pyi +60 -0
  114. stouputils/stouputils/archive.pyi +67 -0
  115. stouputils/stouputils/backup.pyi +109 -0
  116. stouputils/stouputils/collections.pyi +86 -0
  117. stouputils/stouputils/continuous_delivery/__init__.pyi +5 -0
  118. stouputils/stouputils/continuous_delivery/cd_utils.pyi +129 -0
  119. stouputils/stouputils/continuous_delivery/github.pyi +162 -0
  120. stouputils/stouputils/continuous_delivery/pypi.pyi +53 -0
  121. stouputils/stouputils/continuous_delivery/pyproject.pyi +67 -0
  122. stouputils/stouputils/continuous_delivery/stubs.pyi +39 -0
  123. stouputils/stouputils/ctx.pyi +211 -0
  124. stouputils/stouputils/decorators.pyi +252 -0
  125. stouputils/stouputils/image.pyi +172 -0
  126. stouputils/stouputils/installer/__init__.pyi +5 -0
  127. stouputils/stouputils/installer/common.pyi +39 -0
  128. stouputils/stouputils/installer/downloader.pyi +24 -0
  129. stouputils/stouputils/installer/linux.pyi +39 -0
  130. stouputils/stouputils/installer/main.pyi +57 -0
  131. stouputils/stouputils/installer/windows.pyi +31 -0
  132. stouputils/stouputils/io.pyi +213 -0
  133. stouputils/stouputils/parallel.pyi +216 -0
  134. stouputils/stouputils/print.pyi +136 -0
  135. stouputils/stouputils/version_pkg.pyi +15 -0
  136. stouputils/version_pkg.py +189 -0
  137. stouputils-1.14.0.dist-info/METADATA +178 -0
  138. stouputils-1.14.0.dist-info/RECORD +140 -0
  139. stouputils-1.14.0.dist-info/WHEEL +4 -0
  140. stouputils-1.14.0.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,147 @@
1
+ """ Utilities for reading, writing and managing pyproject.toml files.
2
+
3
+ This module provides functions to handle pyproject.toml files, including reading,
4
+ writing, version management and TOML formatting capabilities.
5
+
6
+ - read_pyproject: Read the pyproject.toml file.
7
+ - write_pyproject: Write to the pyproject.toml file.
8
+ - format_toml_lists: Format TOML lists with proper indentation.
9
+ - increment_version_from_input: Increment the patch version number.
10
+ - increment_version_from_pyproject: Increment version in pyproject.toml.
11
+ - get_version_from_pyproject: Get version from pyproject.toml.
12
+
13
+ .. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/continuous_delivery/pyproject_module.gif
14
+ :alt: stouputils pyproject examples
15
+ """
16
+
17
+ # Imports
18
+ from typing import Any
19
+
20
+ from ..io import super_open
21
+
22
+
23
+ def read_pyproject(pyproject_path: str) -> dict[str, Any]:
24
+ """ Read the pyproject.toml file.
25
+
26
+ Args:
27
+ pyproject_path: Path to the pyproject.toml file.
28
+ Returns:
29
+ dict[str, Any]: The content of the pyproject.toml file.
30
+ Example:
31
+ >>> content = read_pyproject("pyproject.toml")
32
+ >>> "." in content["project"]["version"]
33
+ True
34
+ """
35
+ from msgspec import toml
36
+ with open(pyproject_path) as file:
37
+ content = file.read()
38
+ return toml.decode(content)
39
+
40
+
41
+ def format_toml_lists(content: str) -> str:
42
+ """ Format TOML lists with indentation.
43
+
44
+ Args:
45
+ content (str): The content of the pyproject.toml file.
46
+ Returns:
47
+ str: The formatted content with properly indented lists.
48
+ Example:
49
+ >>> toml_content = '''[project]
50
+ ... dependencies = [ "tqdm>=4.0.0", "requests>=2.20.0", "pyyaml>=6.0.0", ]'''
51
+ >>> format_toml_lists(toml_content).replace("\\t", " ") == '''[project]
52
+ ... dependencies = [
53
+ ... "tqdm>=4.0.0",
54
+ ... "requests>=2.20.0",
55
+ ... "pyyaml>=6.0.0",
56
+ ... ]'''
57
+ True
58
+ """
59
+ # Split the content into individual lines for processing
60
+ lines: list[str] = content.split("\n")
61
+ formatted_lines: list[str] = []
62
+
63
+ for line in lines:
64
+ # Check if line contains a list definition (has both [ ] and = characters)
65
+ if "[" in line and "]" in line and "=" in line:
66
+ # Only process simple lists that have one opening and closing bracket
67
+ if line.count("[") == 1 and line.count("]") == 1:
68
+ # Split into key and values parts
69
+ key, values = line.split("=", 1)
70
+ values = values.strip()
71
+
72
+ # Check if values portion is a list
73
+ if values.startswith("[") and values.endswith("]"):
74
+ # Parse list values, removing empty entries
75
+ values = [v.strip() for v in values[1:-1].split(",") if v.strip()]
76
+
77
+ # For lists with multiple items, format across multiple lines
78
+ if len(values) > 1:
79
+ formatted_lines.append(f"{key}= [")
80
+ for value in values:
81
+ formatted_lines.append(f"\t{value},")
82
+ formatted_lines.append("]")
83
+ # For single item lists, keep on one line
84
+ else:
85
+ formatted_lines.append(f"{key}= [{values[0]}]")
86
+ continue
87
+
88
+ # Keep non-list lines unchanged
89
+ formatted_lines.append(line)
90
+
91
+ # Rejoin all lines with newlines
92
+ return "\n".join(formatted_lines)
93
+
94
+
95
+ def write_pyproject(path: str, content: dict[str, Any]) -> None:
96
+ """ Write to the pyproject.toml file with properly indented lists.
97
+
98
+ Args:
99
+ path: Path to the pyproject.toml file.
100
+ content: Content to write to the pyproject.toml file.
101
+ """
102
+ from msgspec.toml import _import_tomli_w # pyright: ignore[reportPrivateUsage]
103
+ toml = _import_tomli_w()
104
+ string: str = "\n" + toml.dumps(content) + "\n"
105
+ string = format_toml_lists(string) # Apply formatting
106
+
107
+ with super_open(path, "w") as file:
108
+ file.write(string)
109
+
110
+
111
+ def increment_version_from_input(version: str) -> str:
112
+ """ Increment the version.
113
+
114
+ Args:
115
+ version: The version to increment. (ex: "0.1.0")
116
+ Returns:
117
+ str: The incremented version. (ex: "0.1.1")
118
+ Example:
119
+ >>> increment_version_from_input("0.1.0")
120
+ '0.1.1'
121
+ >>> increment_version_from_input("1.2.9")
122
+ '1.2.10'
123
+ """
124
+ version_parts: list[str] = version.split(".")
125
+ version_parts[-1] = str(int(version_parts[-1]) + 1)
126
+ return ".".join(version_parts)
127
+
128
+ def increment_version_from_pyproject(path: str) -> None:
129
+ """ Increment the version in the pyproject.toml file.
130
+
131
+ Args:
132
+ path: Path to the pyproject.toml file.
133
+ """
134
+ pyproject_content: dict[str, Any] = read_pyproject(path)
135
+ pyproject_content["project"]["version"] = increment_version_from_input(pyproject_content["project"]["version"])
136
+ write_pyproject(path, pyproject_content)
137
+
138
+ def get_version_from_pyproject(path: str) -> str:
139
+ """ Get the version from the pyproject.toml file.
140
+
141
+ Args:
142
+ path: Path to the pyproject.toml file.
143
+ Returns:
144
+ str: The version. (ex: "0.1.0")
145
+ """
146
+ return read_pyproject(path)["project"]["version"]
147
+
@@ -0,0 +1,86 @@
1
+ """ This module contains utilities for generating stub files using stubgen.
2
+
3
+ - generate_stubs: Generate stub files for a Python package using stubgen
4
+ - stubs_full_routine: Generate stub files for a Python package
5
+
6
+ """
7
+
8
+ # Imports
9
+ import os
10
+ from collections.abc import Callable
11
+
12
+ from ..decorators import LogLevels, handle_error
13
+
14
+
15
+ def generate_stubs(
16
+ package_name: str,
17
+ extra_args: str = "--include-docstrings --include-private",
18
+ ) -> int:
19
+ """ Generate stub files for a Python package using stubgen.
20
+
21
+ Note: stubgen generates stubs in the 'out' directory by default in the current working directory.
22
+
23
+ Args:
24
+ package_name (str): Name of the package to generate stubs for.
25
+ extra_args (str): Extra arguments to pass to stubgen. Defaults to "--include-docstrings --include-private".
26
+ Returns:
27
+ int: Return code of the os.system call.
28
+ """
29
+ try:
30
+ from mypy.stubgen import main as stubgen_main
31
+ except ImportError as e:
32
+ raise ImportError("mypy is required for array_to_disk function. Please install it via 'pip install mypy'.") from e
33
+ try:
34
+ stubgen_main(["-p", package_name, *extra_args.split()])
35
+ return 0
36
+ except Exception:
37
+ return 1
38
+
39
+ def clean_stubs_directory(output_directory: str, package_name: str) -> None:
40
+ """ Clean the stubs directory by deleting all .pyi files.
41
+
42
+ Args:
43
+ output_directory (str): Directory to clean.
44
+ package_name (str): Package name subdirectory. Only cleans output_directory/package_name.
45
+ """
46
+ target_dir: str = os.path.join(output_directory, package_name)
47
+ if os.path.exists(target_dir):
48
+ for root, _, files in os.walk(target_dir):
49
+ for file in files:
50
+ if file.endswith(".pyi"):
51
+ os.remove(os.path.join(root, file))
52
+
53
+ @handle_error(message="Error while doing the stubs full routine", error_log=LogLevels.ERROR_TRACEBACK)
54
+ def stubs_full_routine(
55
+ package_name: str,
56
+ output_directory: str = "typings",
57
+ extra_args: str = "--include-docstrings --include-private",
58
+ clean_before: bool = False,
59
+
60
+ generate_stubs_function: Callable[[str, str], int] = generate_stubs,
61
+ clean_stubs_function: Callable[[str, str], None] = clean_stubs_directory,
62
+ ) -> None:
63
+ """ Generate stub files for a Python package using stubgen.
64
+
65
+ Note: stubgen generates stubs in the 'out' directory by default in the current working directory.
66
+
67
+ Args:
68
+ package_name (str): Name of the package to generate stubs for.
69
+ output_directory (str): Directory to clean before generating stubs. Defaults to "typings".
70
+ This parameter is used for cleaning the directory before stub generation.
71
+ extra_args (str): Extra arguments to pass to stubgen. Defaults to "--include-docstrings --include-private".
72
+ clean_before (bool): Whether to clean the output directory before generating stubs. Defaults to False.
73
+ generate_stubs_function (Callable[[str, str], int]): Function to generate stubs.
74
+ Defaults to :func:`generate_stubs`.
75
+ clean_stubs_function (Callable[[str], None]): Function to clean the stubs directory.
76
+ Defaults to :func:`clean_stubs_directory`.
77
+ Raises:
78
+ Exception: If stub generation fails.
79
+ """
80
+ if clean_before:
81
+ clean_stubs_function(output_directory, package_name)
82
+ extra_args += f" -o {output_directory}"
83
+
84
+ if generate_stubs_function(package_name, extra_args) != 0:
85
+ raise Exception(f"Error while generating stubs for {package_name}")
86
+
stouputils/ctx.py ADDED
@@ -0,0 +1,408 @@
1
+ """
2
+ This module provides context managers for various utilities such as logging to a file,
3
+ measuring execution time, silencing output, and setting multiprocessing start methods.
4
+
5
+ - LogToFile: Context manager to log to a file every print call (with LINE_UP handling)
6
+ - MeasureTime: Context manager to measure execution time of a code block
7
+ - Muffle: Context manager that temporarily silences output (alternative to stouputils.decorators.silent())
8
+ - DoNothing: Context manager that does nothing (no-op)
9
+ - SetMPStartMethod: Context manager to temporarily set multiprocessing start method
10
+
11
+ .. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/ctx_module.gif
12
+ :alt: stouputils ctx examples
13
+ """
14
+
15
+ # Imports
16
+ from __future__ import annotations
17
+
18
+ import os
19
+ import sys
20
+ import time
21
+ from collections.abc import Callable
22
+ from contextlib import AbstractAsyncContextManager, AbstractContextManager
23
+ from typing import IO, Any, TextIO, TypeVar
24
+
25
+ from .io import super_open
26
+ from .print import TeeMultiOutput, debug
27
+
28
+ # Type variable for context managers
29
+ T = TypeVar("T")
30
+
31
+ # Abstract base class for context managers supporting both sync and async usage
32
+ class AbstractBothContextManager[T](AbstractContextManager[T], AbstractAsyncContextManager[T]):
33
+ """ Abstract base class for context managers that support both synchronous and asynchronous usage. """
34
+ pass
35
+
36
+ # Context manager to log to a file
37
+ class LogToFile(AbstractBothContextManager["LogToFile"]):
38
+ """ Context manager to log to a file.
39
+
40
+ This context manager allows you to temporarily log output to a file while still printing normally.
41
+ The file will receive log messages without ANSI color codes.
42
+
43
+ Args:
44
+ path (str): Path to the log file
45
+ mode (str): Mode to open the file in (default: "w")
46
+ encoding (str): Encoding to use for the file (default: "utf-8")
47
+ tee_stdout (bool): Whether to redirect stdout to the file (default: True)
48
+ tee_stderr (bool): Whether to redirect stderr to the file (default: True)
49
+ ignore_lineup (bool): Whether to ignore lines containing LINE_UP escape sequence in files (default: False)
50
+ restore_on_exit (bool): Whether to restore original stdout/stderr on exit (default: False)
51
+ This ctx uses TeeMultiOutput which handles closed files gracefully, so restoring is not mandatory.
52
+
53
+ Examples:
54
+ .. code-block:: python
55
+
56
+ > import stouputils as stp
57
+ > with stp.LogToFile("output.log"):
58
+ > stp.info("This will be logged to output.log and printed normally")
59
+ > print("This will also be logged")
60
+
61
+ > with stp.LogToFile("output.log") as log_ctx:
62
+ > stp.warning("This will be logged to output.log and printed normally")
63
+ > log_ctx.change_file("new_file.log")
64
+ > print("This will be logged to new_file.log")
65
+ """
66
+ def __init__(
67
+ self,
68
+ path: str,
69
+ mode: str = "w",
70
+ encoding: str = "utf-8",
71
+ tee_stdout: bool = True,
72
+ tee_stderr: bool = True,
73
+ ignore_lineup: bool = True,
74
+ restore_on_exit: bool = False
75
+ ) -> None:
76
+ self.path: str = path
77
+ """ Attribute remembering path to the log file """
78
+ self.mode: str = mode
79
+ """ Attribute remembering mode to open the file in """
80
+ self.encoding: str = encoding
81
+ """ Attribute remembering encoding to use for the file """
82
+ self.tee_stdout: bool = tee_stdout
83
+ """ Whether to redirect stdout to the file """
84
+ self.tee_stderr: bool = tee_stderr
85
+ """ Whether to redirect stderr to the file """
86
+ self.ignore_lineup: bool = ignore_lineup
87
+ """ Whether to ignore lines containing LINE_UP escape sequence in files """
88
+ self.restore_on_exit: bool = restore_on_exit
89
+ """ Whether to restore original stdout/stderr on exit.
90
+ This ctx uses TeeMultiOutput which handles closed files gracefully, so restoring is not mandatory. """
91
+ self.file: IO[Any]
92
+ """ Attribute remembering opened file """
93
+ self.original_stdout: TextIO
94
+ """ Original stdout before redirection """
95
+ self.original_stderr: TextIO
96
+ """ Original stderr before redirection """
97
+
98
+ def __enter__(self) -> LogToFile:
99
+ """ Enter context manager which opens the log file and redirects stdout/stderr """
100
+ # Open file
101
+ self.file = super_open(self.path, mode=self.mode, encoding=self.encoding)
102
+
103
+ # Redirect stdout and stderr if requested
104
+ if self.tee_stdout:
105
+ self.original_stdout = sys.stdout
106
+ sys.stdout = TeeMultiOutput(self.original_stdout, self.file, ignore_lineup=self.ignore_lineup)
107
+ if self.tee_stderr:
108
+ self.original_stderr = sys.stderr
109
+ sys.stderr = TeeMultiOutput(self.original_stderr, self.file, ignore_lineup=self.ignore_lineup)
110
+
111
+ # Return self
112
+ return self
113
+
114
+ def __exit__(self, exc_type: type[BaseException]|None, exc_val: BaseException|None, exc_tb: Any|None) -> None:
115
+ """ Exit context manager which closes the log file and restores stdout/stderr """
116
+ # Restore original stdout and stderr (if requested)
117
+ if self.restore_on_exit:
118
+ if self.tee_stdout:
119
+ sys.stdout = self.original_stdout
120
+ if self.tee_stderr:
121
+ sys.stderr = self.original_stderr
122
+
123
+ # Close file
124
+ self.file.close()
125
+
126
+ async def __aenter__(self) -> LogToFile:
127
+ """ Enter async context manager which opens the log file and redirects stdout/stderr """
128
+ return self.__enter__()
129
+
130
+ async def __aexit__(self, exc_type: type[BaseException]|None, exc_val: BaseException|None, exc_tb: Any|None) -> None:
131
+ """ Exit async context manager which closes the log file and restores stdout/stderr """
132
+ self.__exit__(exc_type, exc_val, exc_tb)
133
+
134
+ def change_file(self, new_path: str) -> None:
135
+ """ Change the log file to a new path.
136
+
137
+ Args:
138
+ new_path (str): New path to the log file
139
+ """
140
+ # Close current file, open new file and redirect outputs
141
+ self.file.close()
142
+ self.path = new_path
143
+ self.__enter__()
144
+
145
+ @staticmethod
146
+ def common(logs_folder: str, filepath: str, func: Callable[..., Any], *args: Any, **kwargs: Any) -> Any:
147
+ """ Common code used at the beginning of a program to launch main function
148
+
149
+ Args:
150
+ logs_folder (str): Folder to store logs in
151
+ filepath (str): Path to the main function
152
+ func (Callable[..., Any]): Main function to launch
153
+ *args (tuple[Any, ...]): Arguments to pass to the main function
154
+ **kwargs (dict[str, Any]): Keyword arguments to pass to the main function
155
+ Returns:
156
+ Any: Return value of the main function
157
+
158
+ Examples:
159
+ >>> if __name__ == "__main__":
160
+ ... LogToFile.common(f"{ROOT}/logs", __file__, main)
161
+ """
162
+ # Import datetime
163
+ from datetime import datetime
164
+
165
+ # Build log file path
166
+ file_basename: str = os.path.splitext(os.path.basename(filepath))[0]
167
+ date_time: str = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
168
+ date_str, time_str = date_time.split("_")
169
+ log_filepath: str = f"{logs_folder}/{file_basename}/{date_str}/{time_str}.log"
170
+
171
+ # Launch function with arguments if any
172
+ with LogToFile(log_filepath):
173
+ return func(*args, **kwargs)
174
+
175
+ # Context manager to measure execution time
176
+ class MeasureTime(AbstractBothContextManager["MeasureTime"]):
177
+ """ Context manager to measure execution time.
178
+
179
+ This context manager measures the execution time of the code block it wraps
180
+ and prints the result using a specified print function.
181
+
182
+ Args:
183
+ print_func (Callable): Function to use to print the execution time (e.g. debug, info, warning, error, etc.).
184
+ message (str): Message to display with the execution time. Defaults to "Execution time".
185
+ perf_counter (bool): Whether to use time.perf_counter_ns or time.time_ns. Defaults to True.
186
+
187
+ Examples:
188
+ .. code-block:: python
189
+
190
+ > import time
191
+ > import stouputils as stp
192
+ > with stp.MeasureTime(stp.info, message="My operation"):
193
+ ... time.sleep(0.5)
194
+ > # [INFO HH:MM:SS] My operation: 500.123ms (500123456ns)
195
+
196
+ > with stp.MeasureTime(): # Uses debug by default
197
+ ... time.sleep(0.1)
198
+ > # [DEBUG HH:MM:SS] Execution time: 100.456ms (100456789ns)
199
+ """
200
+ def __init__(
201
+ self,
202
+ print_func: Callable[..., None] = debug,
203
+ message: str = "Execution time",
204
+ perf_counter: bool = True
205
+ ) -> None:
206
+ self.print_func: Callable[..., None] = print_func
207
+ """ Function to use for printing the execution time """
208
+ self.message: str = message
209
+ """ Message to display with the execution time """
210
+ self.perf_counter: bool = perf_counter
211
+ """ Whether to use time.perf_counter_ns or time.time_ns """
212
+ self.ns: Callable[[], int] = time.perf_counter_ns if perf_counter else time.time_ns
213
+ """ Time function to use """
214
+ self.start_ns: int = 0
215
+ """ Start time in nanoseconds """
216
+
217
+ def __enter__(self) -> MeasureTime:
218
+ """ Enter context manager, record start time """
219
+ self.start_ns = self.ns()
220
+ return self
221
+
222
+ def __exit__(self, exc_type: type[BaseException]|None, exc_val: BaseException|None, exc_tb: Any|None) -> None:
223
+ """ Exit context manager, calculate duration and print """
224
+ # Measure the execution time (nanoseconds and seconds)
225
+ total_ns: int = self.ns() - self.start_ns
226
+ total_ms: float = total_ns / 1_000_000
227
+ total_s: float = total_ns / 1_000_000_000
228
+
229
+ # Print the execution time (nanoseconds if less than 0.1s, seconds otherwise)
230
+ if total_ms < 100:
231
+ self.print_func(f"{self.message}: {total_ms:.3f}ms ({total_ns}ns)")
232
+ elif total_s < 60:
233
+ self.print_func(f"{self.message}: {(total_s):.5f}s")
234
+ else:
235
+ minutes: int = int(total_s) // 60
236
+ seconds: int = int(total_s) % 60
237
+ if minutes < 60:
238
+ self.print_func(f"{self.message}: {minutes}m {seconds}s")
239
+ else:
240
+ hours: int = minutes // 60
241
+ minutes: int = minutes % 60
242
+ if hours < 24:
243
+ self.print_func(f"{self.message}: {hours}h {minutes}m {seconds}s")
244
+ else:
245
+ days: int = hours // 24
246
+ hours: int = hours % 24
247
+ self.print_func(f"{self.message}: {days}d {hours}h {minutes}m {seconds}s")
248
+
249
+ async def __aenter__(self) -> MeasureTime:
250
+ """ Enter async context manager, record start time """
251
+ return self.__enter__()
252
+
253
+ async def __aexit__(self, exc_type: type[BaseException]|None, exc_val: BaseException|None, exc_tb: Any|None) -> None:
254
+ """ Exit async context manager, calculate duration and print """
255
+ self.__exit__(exc_type, exc_val, exc_tb)
256
+
257
+ # Context manager to temporarily silence output
258
+ class Muffle(AbstractBothContextManager["Muffle"]):
259
+ """ Context manager that temporarily silences output.
260
+ (No thread-safety guaranteed)
261
+
262
+ Alternative to stouputils.decorators.silent()
263
+
264
+ Examples:
265
+ >>> with Muffle():
266
+ ... print("This will not be printed")
267
+ """
268
+ def __init__(self, mute_stderr: bool = False) -> None:
269
+ self.mute_stderr: bool = mute_stderr
270
+ """ Attribute remembering if stderr should be muted """
271
+ self.original_stdout: IO[Any]
272
+ """ Attribute remembering original stdout """
273
+ self.original_stderr: IO[Any]
274
+ """ Attribute remembering original stderr """
275
+
276
+ def __enter__(self) -> Muffle:
277
+ """ Enter context manager which redirects stdout and stderr to devnull """
278
+ # Redirect stdout to devnull
279
+ self.original_stdout = sys.stdout
280
+ sys.stdout = open(os.devnull, "w", encoding="utf-8")
281
+
282
+ # Redirect stderr to devnull if needed
283
+ if self.mute_stderr:
284
+ self.original_stderr = sys.stderr
285
+ sys.stderr = open(os.devnull, "w", encoding="utf-8")
286
+
287
+ # Return self
288
+ return self
289
+
290
+ def __exit__(self, exc_type: type[BaseException]|None, exc_val: BaseException|None, exc_tb: Any|None) -> None:
291
+ """ Exit context manager which restores original stdout and stderr """
292
+ # Restore original stdout
293
+ sys.stdout.close()
294
+ sys.stdout = self.original_stdout
295
+
296
+ # Restore original stderr if needed
297
+ if self.mute_stderr:
298
+ sys.stderr.close()
299
+ sys.stderr = self.original_stderr
300
+
301
+ async def __aenter__(self) -> Muffle:
302
+ """ Enter async context manager which redirects stdout and stderr to devnull """
303
+ return self.__enter__()
304
+
305
+ async def __aexit__(self, exc_type: type[BaseException]|None, exc_val: BaseException|None, exc_tb: Any|None) -> None:
306
+ """ Exit async context manager which restores original stdout and stderr """
307
+ self.__exit__(exc_type, exc_val, exc_tb)
308
+
309
+ # Context manager that does nothing
310
+ class DoNothing(AbstractBothContextManager["DoNothing"]):
311
+ """ Context manager that does nothing.
312
+
313
+ This is a no-op context manager that can be used as a placeholder
314
+ or for conditional context management.
315
+
316
+ Different from contextlib.nullcontext because it handles args and kwargs,
317
+ along with **async** context management.
318
+
319
+ Examples:
320
+ >>> with DoNothing():
321
+ ... print("This will be printed normally")
322
+ This will be printed normally
323
+
324
+ >>> # Conditional context management
325
+ >>> some_condition = True
326
+ >>> ctx = DoNothing() if some_condition else Muffle()
327
+ >>> with ctx:
328
+ ... print("May or may not be printed depending on condition")
329
+ May or may not be printed depending on condition
330
+ """
331
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
332
+ """ No initialization needed, this is a no-op context manager """
333
+ pass
334
+
335
+ def __enter__(self) -> DoNothing:
336
+ """ Enter context manager (does nothing) """
337
+ return self
338
+
339
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
340
+ """ Exit context manager (does nothing) """
341
+ pass
342
+
343
+ async def __aenter__(self) -> DoNothing:
344
+ """ Enter async context manager (does nothing) """
345
+ return self
346
+
347
+ async def __aexit__(self, *excinfo: Any) -> None:
348
+ """ Exit async context manager (does nothing) """
349
+ pass
350
+ NullContextManager = DoNothing
351
+ """ Alias for DoNothing context manager """
352
+
353
+ # Context manager to temporarily set multiprocessing start method
354
+ class SetMPStartMethod(AbstractBothContextManager["SetMPStartMethod"]):
355
+ """ Context manager to temporarily set multiprocessing start method.
356
+
357
+ This context manager allows you to temporarily change the multiprocessing start method
358
+ and automatically restores the original method when exiting the context.
359
+
360
+ Args:
361
+ start_method (str): The start method to use: "spawn", "fork", or "forkserver"
362
+
363
+ Examples:
364
+ .. code-block:: python
365
+
366
+ > import multiprocessing as mp
367
+ > import stouputils as stp
368
+ > # Temporarily use spawn method
369
+ > with stp.SetMPStartMethod("spawn"):
370
+ > ... # Your multiprocessing code here
371
+ > ... pass
372
+
373
+ > # Original method is automatically restored
374
+ """
375
+ def __init__(self, start_method: str | None) -> None:
376
+ self.start_method: str | None = start_method
377
+ """ The start method to use """
378
+ self.old_method: str | None = None
379
+ """ The original start method to restore """
380
+
381
+ def __enter__(self) -> SetMPStartMethod:
382
+ """ Enter context manager which sets the start method """
383
+ if self.start_method is None:
384
+ return self
385
+ import multiprocessing as mp
386
+
387
+ self.old_method = mp.get_start_method(allow_none=True)
388
+ if self.old_method != self.start_method:
389
+ mp.set_start_method(self.start_method, force=True)
390
+ return self
391
+
392
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
393
+ """ Exit context manager which restores the original start method """
394
+ if self.start_method is None:
395
+ return
396
+ import multiprocessing as mp
397
+
398
+ if self.old_method != self.start_method:
399
+ mp.set_start_method(self.old_method, force=True)
400
+
401
+ async def __aenter__(self) -> SetMPStartMethod:
402
+ """ Enter async context manager which sets the start method """
403
+ return self.__enter__()
404
+
405
+ async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
406
+ """ Exit async context manager which restores the original start method """
407
+ self.__exit__(exc_type, exc_val, exc_tb)
408
+
@@ -0,0 +1,51 @@
1
+ """ Load configuration from the set.py file and handle some special cases.
2
+
3
+ Proper way to get the configuration is by importing this module, not the set.py file directly.
4
+ """
5
+
6
+ # pyright: reportUnknownMemberType=false
7
+ # pyright: reportUnknownVariableType=false
8
+ # pyright: reportMissingTypeStubs=false
9
+
10
+ # Imports
11
+ import os
12
+ from typing import Any
13
+
14
+ from .set import DataScienceConfig
15
+
16
+ # Special cases
17
+ # Hide GPU when using CPU
18
+ if DataScienceConfig.TENSORFLOW_DEVICE.lower().startswith("/cpu"):
19
+ os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
20
+
21
+ # Precise which GPU we use
22
+ elif DataScienceConfig.TENSORFLOW_DEVICE.lower().startswith("/gpu"):
23
+ os.environ["CUDA_VISIBLE_DEVICES"] = DataScienceConfig.TENSORFLOW_DEVICE.split(":")[-1]
24
+
25
+ # Configure TensorFlow (if available)
26
+ try:
27
+ from tensorflow import config as tf_config
28
+
29
+ # Get the physical devices
30
+ physical_devices: list[Any] = tf_config.list_physical_devices("GPU")
31
+
32
+ # Configure TensorFlow GPU memory management to allocate memory dynamically
33
+ # This prevents TensorFlow from allocating all GPU memory upfront
34
+ # Instead, memory will grow as needed, allowing better resource sharing
35
+ for device in physical_devices:
36
+ tf_config.experimental.set_memory_growth(device, True)
37
+
38
+ # Disable eager execution mode in TensorFlow
39
+ # This improves performance by allowing TensorFlow to create an optimized graph
40
+ # of operations instead of executing operations one by one (at the cost of debugging difficulty)
41
+ tf_config.run_functions_eagerly(False)
42
+ except ImportError:
43
+ pass
44
+
45
+ # Enable mixed precision training (if available)
46
+ try:
47
+ from keras import mixed_precision
48
+ mixed_precision.set_global_policy(DataScienceConfig.MIXED_PRECISION_POLICY)
49
+ except ImportError:
50
+ pass
51
+