pyadps 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -34,7 +34,7 @@ if "add_attributes_DRW" not in st.session_state:
34
34
 
35
35
  if "file_prefix" not in st.session_state:
36
36
  raw_basename = os.path.basename(st.session_state.fname)
37
- st.session_state.filename = os.path.splitext(raw_basename)[0]
37
+ st.session_state.filename = os.path.splitext(raw_basename)[0]
38
38
  st.session_state.file_prefix = st.session_state.filename
39
39
 
40
40
 
@@ -45,7 +45,6 @@ if "filename" not in st.session_state:
45
45
  st.session_state.filename = "" # <-- Default file name if not passed
46
46
 
47
47
 
48
-
49
48
  ################ Functions #######################
50
49
  @st.cache_data()
51
50
  def file_access(uploaded_file):
@@ -63,6 +62,7 @@ def read_file(filepath):
63
62
  ds.fixensemble()
64
63
  st.session_state.ds = ds
65
64
 
65
+
66
66
  @st.cache_data
67
67
  def get_prefixed_filename(base_name):
68
68
  """Generates the file name with the optional prefix."""
@@ -74,57 +74,55 @@ def get_prefixed_filename(base_name):
74
74
  @st.cache_data
75
75
  def file_write(path, axis_option, add_attributes=True):
76
76
  tempdirname = tempfile.TemporaryDirectory(delete=False)
77
- st.session_state.rawfilename = os.path.join(tempdirname.name, get_prefixed_filename("RAW_DAT.nc"))
77
+ st.session_state.rawfilename = os.path.join(
78
+ tempdirname.name, get_prefixed_filename("RAW_DAT.nc")
79
+ )
78
80
 
79
81
  if add_attributes:
80
82
  wr.rawnc(
81
83
  path,
82
84
  st.session_state.rawfilename,
83
- st.session_state.date1,
84
- axis_option,
85
+ axis_option=axis_option,
85
86
  attributes=st.session_state.attributes,
86
87
  )
87
88
  else:
88
- wr.rawnc(
89
- path, st.session_state.rawfilename, st.session_state.date1, axis_option
90
- )
89
+ wr.rawnc(path, st.session_state.rawfilename, axis_option)
90
+
91
91
 
92
92
  @st.cache_data
93
93
  def file_write_flead(path, axis_option, add_attributes=True):
94
94
  tempvardirname = tempfile.TemporaryDirectory(delete=False)
95
- st.session_state.fleadfilename = os.path.join(tempvardirname.name, get_prefixed_filename("RAW_FIX.nc"))
95
+ st.session_state.fleadfilename = os.path.join(
96
+ tempvardirname.name, get_prefixed_filename("RAW_FIX.nc")
97
+ )
96
98
 
97
99
  if add_attributes:
98
100
  wr.flead_nc(
99
101
  path,
100
102
  st.session_state.fleadfilename,
101
- st.session_state.date2,
102
- axis_option,
103
+ axis_option=axis_option,
103
104
  attributes=st.session_state.attributes,
104
105
  )
105
106
  else:
106
- wr.flead_nc(
107
- path, st.session_state.fleadfilename, st.session_state.date2, axis_option
108
- )
107
+ wr.flead_nc(path, st.session_state.fleadfilename, axis_option)
108
+
109
109
 
110
110
  @st.cache_data
111
111
  def file_write_vlead(path, axis_option, add_attributes=True):
112
112
  tempvardirname = tempfile.TemporaryDirectory(delete=False)
113
- st.session_state.vleadfilename = os.path.join(tempvardirname.name, get_prefixed_filename("RAW_VAR.nc"))
113
+ st.session_state.vleadfilename = os.path.join(
114
+ tempvardirname.name, get_prefixed_filename("RAW_VAR.nc")
115
+ )
114
116
 
115
117
  if add_attributes:
116
118
  wr.vlead_nc(
117
119
  path,
118
120
  st.session_state.vleadfilename,
119
- st.session_state.date3,
120
- axis_option,
121
+ axis_option=axis_option,
121
122
  attributes=st.session_state.attributes,
122
123
  )
123
124
  else:
124
- wr.vlead_nc(
125
- path, st.session_state.vleadfilename, st.session_state.date3, axis_option
126
- )
127
-
125
+ wr.vlead_nc(path, st.session_state.vleadfilename, axis_option)
128
126
 
129
127
 
130
128
  if "axis_option" not in st.session_state:
@@ -92,11 +92,12 @@ def qc_submit():
92
92
  evt = st.session_state.evt_QCT
93
93
  ft = st.session_state.ft_QCT
94
94
  is3beam = st.session_state.is3beam_QCT
95
+ beam_ignore = st.session_state.beam_to_ignore
95
96
  mask = pg_check(ds, mask, pgt, threebeam=is3beam)
96
- mask = correlation_check(ds, mask, ct)
97
- mask = echo_check(ds, mask, et)
97
+ mask = correlation_check(ds, mask, ct,is3beam,beam_ignore=beam_ignore)
98
+ mask = echo_check(ds, mask, et,is3beam,beam_ignore=beam_ignore)
98
99
  mask = ev_check(ds, mask, evt)
99
- mask = false_target(ds, mask, ft, threebeam=True)
100
+ mask = false_target(ds, mask, ft, threebeam=is3beam, beam_ignore=beam_ignore)
100
101
  # Store the processed mask in a temporary mask
101
102
  st.session_state.qc_mask_temp = mask
102
103
 
@@ -337,6 +338,22 @@ with tab2:
337
338
  "Would you like to use a three-beam solution?", (True, False)
338
339
  )
339
340
 
341
+ if st.session_state.is3beam_QCT:
342
+ beam_label_to_value = {
343
+ "None": None,
344
+ "Beam 1": 0,
345
+ "Beam 2": 1,
346
+ "Beam 3": 2,
347
+ "Beam 4": 3
348
+ }
349
+
350
+ selected_beam = st.selectbox(
351
+ "Select Beam to Ignore",
352
+ options=list(beam_label_to_value.keys()),
353
+ index=0 # Default is "None"
354
+ )
355
+ st.session_state.beam_to_ignore = beam_label_to_value[selected_beam]
356
+
340
357
  st.session_state.pgt_QCT = st.number_input(
341
358
  "Select Percent Good Threshold",
342
359
  0,
@@ -1,6 +1,6 @@
1
1
  import configparser
2
2
  import tempfile
3
-
3
+ import os
4
4
  import numpy as np
5
5
  import pandas as pd
6
6
  import plotly.graph_objects as go
@@ -506,6 +506,8 @@ if generate_config_radio == "Yes":
506
506
  config["QCTest"]["error_velocity"] = str(st.session_state.evt_QCT)
507
507
  config["QCTest"]["false_target"] = str(st.session_state.ft_QCT)
508
508
  config["QCTest"]["three_beam"] = str(st.session_state.is3beam_QCT)
509
+ if st.session_state.is3beam_QCT:
510
+ config["QCTest"]["beam_ignore"] = str(st.session_state.beam_to_ignore)
509
511
  config["QCTest"]["percent_good"] = str(st.session_state.pgt_QCT)
510
512
 
511
513
  # Tab 4
@@ -0,0 +1,168 @@
1
+ import os
2
+ import tempfile
3
+ from pathlib import Path
4
+
5
+ import re
6
+ import io
7
+ import contextlib
8
+
9
+ import configparser
10
+ import streamlit as st
11
+ from utils.autoprocess import autoprocess
12
+ from utils.multifile import ADCPBinFileCombiner
13
+
14
+ # To make the page wider if the user presses the reload button.
15
+ st.set_page_config(layout="wide")
16
+
17
+
18
+ def ansi_to_html(text):
19
+ """
20
+ Function to convert ANSI (console color) to HTML.
21
+ To display the text, map the output to st.markdown
22
+ """
23
+ text = re.sub(r"\x1b\[31m", "<span style='color:red'><br>", text) # red
24
+ text = re.sub(r"\x1b\[32m", "<span style='color:green'><br>", text) # red
25
+ text = re.sub(r"\x1b\[33m", "<span style='color:orange'><br>", text) # green
26
+ text = re.sub(r"\x1b\[0m", "</span>", text) # reset
27
+ return text
28
+
29
+
30
+ @st.cache_data
31
+ def file_access(uploaded_file):
32
+ """
33
+ Function creates temporary directory to store the uploaded file.
34
+ The path of the file is returned
35
+
36
+ Args:
37
+ uploaded_file (string): Name of the uploaded file
38
+
39
+ Returns:
40
+ path (string): Path of the uploaded file
41
+ """
42
+ temp_dir = tempfile.mkdtemp()
43
+ path = os.path.join(temp_dir, uploaded_file.name)
44
+ with open(path, "wb") as f:
45
+ f.write(uploaded_file.getvalue())
46
+ return path
47
+
48
+
49
+ def display_config_as_json(config_file):
50
+ config = configparser.ConfigParser()
51
+ config.read_string(config_file.getvalue().decode("utf-8"))
52
+ st.json({section: dict(config[section]) for section in config.sections()})
53
+
54
+
55
+ def main():
56
+ st.title("🧰 Add-Ons")
57
+ st.header("🔧 Auto Processing Tool", divider=True)
58
+ st.write(
59
+ "You can use a configuration file from `pyadps` to re-process ADCP data by simply adjusting threshold values within the file. "
60
+ "This allows you to fine-tune the output without repeating the full processing workflow in the software."
61
+ )
62
+ st.write(
63
+ "To begin, upload both a binary ADCP file and a `config.ini` file for processing."
64
+ )
65
+
66
+ # File Upload Section
67
+ uploaded_binary_file = st.file_uploader(
68
+ "Upload ADCP Binary File", type=["000", "bin"]
69
+ )
70
+ uploaded_config_file = st.file_uploader(
71
+ "Upload Config File (config.ini)", type=["ini"]
72
+ )
73
+
74
+ if uploaded_binary_file and uploaded_config_file:
75
+ st.success("Files uploaded successfully!")
76
+
77
+ # Display config.ini file content as JSON
78
+ display_config_as_json(uploaded_config_file)
79
+
80
+ fpath = file_access(uploaded_binary_file)
81
+ # Process files
82
+ with st.spinner("Processing files. Please wait..."):
83
+ autoprocess(uploaded_config_file, binary_file_path=fpath)
84
+ st.success("Processing completed successfully!")
85
+ st.write("Processed file written.")
86
+
87
+ st.header("🔗 Binary File Combiner", divider=True)
88
+ st.write(
89
+ "ADCPs may produce multiple binary segments instead of a single continuous file. "
90
+ "This tool scans each uploaded binary file for the `7f7f` header, removes any broken ensembles at the beginning or the end, and combines all valid segments into a single file. "
91
+ "To ensure correct order during concatenation, please rename the files using sequential numbering. "
92
+ "For example: `KKS_000.000`, `KKS_001.000`, `KKS_002.000`."
93
+ )
94
+ output_cat_filename = "merged_000.000"
95
+ st.info(f"Current file name: **{output_cat_filename}**")
96
+ output_cat_filename_radio = st.radio(
97
+ "Would you like to edit the output filename?",
98
+ ["No", "Yes"],
99
+ horizontal=True,
100
+ )
101
+ if output_cat_filename_radio == "Yes":
102
+ output_cat_filename = st.text_input(
103
+ "Enter file name (e.g., GD10A000)",
104
+ value=output_cat_filename,
105
+ )
106
+
107
+ display_log = st.radio(
108
+ "Display log from console:",
109
+ ["No", "Yes"],
110
+ horizontal=True,
111
+ )
112
+
113
+ uploaded_files = st.file_uploader(
114
+ "Upload multiple binary files", type=["bin", "000"], accept_multiple_files=True
115
+ )
116
+
117
+ if uploaded_files:
118
+ st.info("Saving uploaded files to temporary disk files...")
119
+
120
+ # Save files to temporary path
121
+ temp_file_paths = []
122
+ for uploaded_file in uploaded_files:
123
+ suffix = Path(uploaded_file.name).suffix
124
+ with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
125
+ tmp.write(uploaded_file.read())
126
+ temp_path = Path(tmp.name)
127
+ temp_file_paths.append(temp_path)
128
+
129
+ st.divider()
130
+ st.subheader("🛠 Processing and Combining...")
131
+
132
+ if display_log == "Yes":
133
+ # The `buffer` is used to display console output to streamlit
134
+ buffer = io.StringIO()
135
+ with contextlib.redirect_stdout(buffer):
136
+ adcpcat = ADCPBinFileCombiner()
137
+ combined_data = adcpcat.combine_files(temp_file_paths)
138
+ st.markdown(ansi_to_html(buffer.getvalue()), unsafe_allow_html=True)
139
+ else:
140
+ adcpcat = ADCPBinFileCombiner()
141
+ combined_data = adcpcat.combine_files(temp_file_paths)
142
+
143
+ if combined_data:
144
+ st.success("✅ Valid binary data has been combined successfully.")
145
+ st.warning(
146
+ "⚠️ Note: The time axis in the final file may be irregular due to missing ensembles during concatenation."
147
+ )
148
+ st.download_button(
149
+ label="📥 Download Combined Binary File",
150
+ data=bytes(combined_data),
151
+ file_name=output_cat_filename,
152
+ mime="application/octet-stream",
153
+ )
154
+ else:
155
+ st.warning("⚠️ No valid data found to combine.")
156
+
157
+ # Optional: Clean up temporary files
158
+ for path in temp_file_paths:
159
+ try:
160
+ os.remove(path)
161
+ except Exception as e:
162
+ st.warning(f"Failed to delete temp file {path}: {e}")
163
+ else:
164
+ st.info("Please upload binary files to begin.")
165
+
166
+
167
+ if __name__ == "__main__":
168
+ main()
pyadps/utils/__init__.py CHANGED
@@ -9,4 +9,6 @@ from pyadps.utils.signal_quality import *
9
9
  from pyadps.utils.velocity_test import *
10
10
  from pyadps.utils.writenc import *
11
11
  from pyadps.utils.autoprocess import *
12
+ from pyadps.utils.logging_utils import *
13
+ from pyadps.utils.multifile import *
12
14
  from pyadps.utils.script import *
@@ -192,15 +192,20 @@ def autoprocess(config_file, binary_file_path=None):
192
192
  evt = config.getint("QCTest", "error_velocity")
193
193
  et = config.getint("QCTest", "echo_intensity")
194
194
  ft = config.getint("QCTest", "false_target")
195
- is3Beam = config.getboolean("QCTest", "three_beam")
195
+ is3beam = config.getboolean("QCTest", "three_beam")
196
+ if is3beam != None:
197
+ is3beam = int(is3beam)
198
+ beam_ignore = config.get("QCTest","beam_ignore")
196
199
  pgt = config.getint("QCTest", "percent_good")
197
200
  orientation = config.get("QCTest", "orientation")
201
+ beam_ignore = config.getboolean("QCTest",)
198
202
 
199
- mask = pg_check(ds, mask, pgt, threebeam=is3Beam)
200
- mask = correlation_check(ds, mask, ct)
201
- mask = echo_check(ds, mask, et)
203
+ mask = pg_check(ds, mask, pgt, threebeam=is3beam)
204
+ mask = correlation_check(ds, mask, ct,is3beam,beam_ignore=beam_ignore)
205
+ mask = echo_check(ds, mask, et,is3beam,beam_ignore=beam_ignore)
202
206
  mask = ev_check(ds, mask, evt)
203
- mask = false_target(ds, mask, ft, threebeam=True)
207
+ mask = false_target(ds, mask, ft, threebeam=is3beam, beam_ignore=beam_ignore)
208
+
204
209
 
205
210
  print("QC Check Complete.")
206
211
 
@@ -0,0 +1,269 @@
1
+ """
2
+ Reusable Logging Utilities
3
+ A clean, configurable logging module that can be used across multiple projects.
4
+ """
5
+
6
+ import logging
7
+ import sys
8
+ from enum import Enum
9
+ from typing import Optional, Union
10
+ from pathlib import Path
11
+
12
+
13
+ class LogLevel(Enum):
14
+ """Log level enumeration"""
15
+
16
+ DEBUG = logging.DEBUG
17
+ INFO = logging.INFO
18
+ WARNING = logging.WARNING
19
+ ERROR = logging.ERROR
20
+ CRITICAL = logging.CRITICAL
21
+
22
+
23
+ class CustomFormatter(logging.Formatter):
24
+ """Custom colored formatter for console logging"""
25
+
26
+ COLORS = {
27
+ logging.DEBUG: "\x1b[36m", # Cyan
28
+ logging.INFO: "\x1b[32m", # Green
29
+ logging.WARNING: "\x1b[33m", # Yellow
30
+ logging.ERROR: "\x1b[31m", # Red
31
+ logging.CRITICAL: "\x1b[31;1m", # Bold Red
32
+ }
33
+ RESET = "\x1b[0m"
34
+
35
+ def __init__(self, include_timestamp: bool = True, include_module: bool = False):
36
+ """
37
+ Initialize formatter with optional components
38
+
39
+ Args:
40
+ include_timestamp: Whether to include timestamp in log format
41
+ include_module: Whether to include module name in log format
42
+ """
43
+ self.include_timestamp = include_timestamp
44
+ self.include_module = include_module
45
+ super().__init__()
46
+
47
+ def format(self, record):
48
+ """Format log record with colors and optional components"""
49
+ # Build format string based on options
50
+ format_parts = []
51
+
52
+ if self.include_timestamp:
53
+ format_parts.append("%(asctime)s")
54
+
55
+ format_parts.append("%(levelname)s")
56
+
57
+ if self.include_module:
58
+ format_parts.append("%(name)s")
59
+
60
+ format_parts.append("%(message)s")
61
+
62
+ log_format = " - ".join(format_parts)
63
+
64
+ # Apply color
65
+ color = self.COLORS.get(record.levelno, "")
66
+ colored_format = color + log_format + self.RESET
67
+
68
+ formatter = logging.Formatter(
69
+ colored_format,
70
+ datefmt="%Y-%m-%d %H:%M:%S" if self.include_timestamp else None,
71
+ )
72
+ return formatter.format(record)
73
+
74
+
75
+ class LoggerConfig:
76
+ """Configuration class for logger setup"""
77
+
78
+ def __init__(
79
+ self,
80
+ level: LogLevel = LogLevel.INFO,
81
+ include_timestamp: bool = True,
82
+ include_module: bool = False,
83
+ log_to_file: bool = False,
84
+ log_file_path: Optional[Union[str, Path]] = None,
85
+ file_log_level: Optional[LogLevel] = None,
86
+ max_file_size: int = 10 * 1024 * 1024, # 10MB
87
+ backup_count: int = 5,
88
+ ):
89
+ """
90
+ Initialize logger configuration
91
+
92
+ Args:
93
+ level: Console logging level
94
+ include_timestamp: Include timestamp in console output
95
+ include_module: Include module name in output
96
+ log_to_file: Whether to also log to file
97
+ log_file_path: Path for log file (if log_to_file is True)
98
+ file_log_level: File logging level (defaults to console level)
99
+ max_file_size: Maximum size of log file before rotation
100
+ backup_count: Number of backup files to keep
101
+ """
102
+ self.level = level
103
+ self.include_timestamp = include_timestamp
104
+ self.include_module = include_module
105
+ self.log_to_file = log_to_file
106
+ self.log_file_path = Path(log_file_path) if log_file_path else None
107
+ self.file_log_level = file_log_level or level
108
+ self.max_file_size = max_file_size
109
+ self.backup_count = backup_count
110
+
111
+
112
+ class LoggerManager:
113
+ """Manages logger configuration and setup"""
114
+
115
+ _loggers = {} # Cache for created loggers
116
+
117
+ @classmethod
118
+ def setup_logger(
119
+ self, name: str = "app", config: Optional[LoggerConfig] = None
120
+ ) -> logging.Logger:
121
+ """
122
+ Set up and configure logger with given configuration
123
+
124
+ Args:
125
+ name: Logger name
126
+ config: Logger configuration (uses defaults if None)
127
+
128
+ Returns:
129
+ Configured logger instance
130
+ """
131
+ # Use default config if none provided
132
+ if config is None:
133
+ config = LoggerConfig()
134
+
135
+ # Return cached logger if it exists
136
+ cache_key = f"{name}_{id(config)}"
137
+ if cache_key in self._loggers:
138
+ return self._loggers[cache_key]
139
+
140
+ logger = logging.getLogger(name)
141
+ logger.setLevel(config.level.value)
142
+
143
+ # Remove existing handlers to avoid duplicates
144
+ logger.handlers.clear()
145
+
146
+ # Create console handler
147
+ console_handler = logging.StreamHandler(sys.stdout)
148
+ console_handler.setLevel(config.level.value)
149
+ console_formatter = CustomFormatter(
150
+ include_timestamp=config.include_timestamp,
151
+ include_module=config.include_module,
152
+ )
153
+ console_handler.setFormatter(console_formatter)
154
+ logger.addHandler(console_handler)
155
+
156
+ # Add file handler if requested
157
+ if config.log_to_file and config.log_file_path:
158
+ self._add_file_handler(logger, config)
159
+
160
+ # Prevent propagation to root logger
161
+ logger.propagate = False
162
+
163
+ # Cache the logger
164
+ self._loggers[cache_key] = logger
165
+
166
+ return logger
167
+
168
+ @classmethod
169
+ def _add_file_handler(self, logger: logging.Logger, config: LoggerConfig):
170
+ """Add rotating file handler to logger"""
171
+ from logging.handlers import RotatingFileHandler
172
+
173
+ # Ensure log directory exists
174
+ config.log_file_path.parent.mkdir(parents=True, exist_ok=True)
175
+
176
+ file_handler = RotatingFileHandler(
177
+ config.log_file_path,
178
+ maxBytes=config.max_file_size,
179
+ backupCount=config.backup_count,
180
+ )
181
+ file_handler.setLevel(config.file_log_level.value)
182
+
183
+ # File logs typically include more detail
184
+ file_formatter = logging.Formatter(
185
+ "%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s",
186
+ datefmt="%Y-%m-%d %H:%M:%S",
187
+ )
188
+ file_handler.setFormatter(file_formatter)
189
+ logger.addHandler(file_handler)
190
+
191
+ @classmethod
192
+ def get_logger(self, name: str = "app") -> logging.Logger:
193
+ """Get existing logger or create with default config"""
194
+ return logging.getLogger(name) or self.setup_logger(name)
195
+
196
+ @classmethod
197
+ def clear_cache(self):
198
+ """Clear logger cache (useful for testing)"""
199
+ self._loggers.clear()
200
+
201
+
202
+ # Convenience functions for quick setup
203
+ def get_console_logger(
204
+ name: str = "app", level: LogLevel = LogLevel.INFO, include_timestamp: bool = True
205
+ ) -> logging.Logger:
206
+ """Quick setup for console-only logger"""
207
+ config = LoggerConfig(
208
+ level=level, include_timestamp=include_timestamp, include_module=False
209
+ )
210
+ return LoggerManager.setup_logger(name, config)
211
+
212
+
213
+ def get_file_logger(
214
+ name: str = "app",
215
+ log_file: Union[str, Path] = "app.log",
216
+ level: LogLevel = LogLevel.INFO,
217
+ file_level: Optional[LogLevel] = None,
218
+ ) -> logging.Logger:
219
+ """Quick setup for file + console logger"""
220
+ config = LoggerConfig(
221
+ level=level,
222
+ log_to_file=True,
223
+ log_file_path=log_file,
224
+ file_log_level=file_level or LogLevel.DEBUG,
225
+ )
226
+ return LoggerManager.setup_logger(name, config)
227
+
228
+
229
+ def get_detailed_logger(
230
+ name: str = "app",
231
+ log_file: Union[str, Path] = "app.log",
232
+ console_level: LogLevel = LogLevel.INFO,
233
+ file_level: LogLevel = LogLevel.DEBUG,
234
+ ) -> logging.Logger:
235
+ """Setup logger with detailed configuration"""
236
+ config = LoggerConfig(
237
+ level=console_level,
238
+ include_timestamp=True,
239
+ include_module=True,
240
+ log_to_file=True,
241
+ log_file_path=log_file,
242
+ file_log_level=file_level,
243
+ )
244
+ return LoggerManager.setup_logger(name, config)
245
+
246
+
247
+ # Example usage
248
+ if __name__ == "__main__":
249
+ # Test different logger configurations
250
+
251
+ # Simple console logger
252
+ simple_logger = get_console_logger("simple", LogLevel.DEBUG)
253
+ simple_logger.debug("Debug message")
254
+ simple_logger.info("Info message")
255
+ simple_logger.warning("Warning message")
256
+ simple_logger.error("Error message")
257
+
258
+ print("\n" + "=" * 50 + "\n")
259
+
260
+ # File + console logger
261
+ file_logger = get_file_logger("file_test", "test.log", LogLevel.INFO)
262
+ file_logger.info("This goes to both console and file")
263
+ file_logger.debug("This only goes to file")
264
+
265
+ print("\n" + "=" * 50 + "\n")
266
+
267
+ # Detailed logger
268
+ detailed_logger = get_detailed_logger("detailed", "detailed.log")
269
+ detailed_logger.info("Detailed logging with module names")