samgis_core 2.0.2__tar.gz → 3.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: samgis_core
3
- Version: 2.0.2
3
+ Version: 3.0.1
4
4
  Summary: SamGIS CORE
5
5
  License: MIT
6
6
  Author: alessandro trinca tornidor
@@ -16,6 +16,8 @@ Requires-Dist: numpy (==1.25.2) ; python_version >= "3.10" and python_version <
16
16
  Requires-Dist: numpy (>=1.26,<2.0) ; python_version >= "3.11" and python_version < "3.12"
17
17
  Requires-Dist: onnxruntime (>=1.18.1,<2.0.0)
18
18
  Requires-Dist: pillow (>=10.4.0,<11.0.0)
19
+ Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
20
+ Requires-Dist: structlog (>=24.4.0,<25.0.0)
19
21
  Project-URL: Source, https://gitlab.com/aletrn/samgis_core
20
22
  Description-Content-Type: text/markdown
21
23
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "samgis_core"
3
- version = "2.0.2"
3
+ version = "3.0.1"
4
4
  description = "SamGIS CORE"
5
5
  authors = ["alessandro trinca tornidor <alessandro@trinca.tornidor.com>"]
6
6
  license = "MIT license"
@@ -8,26 +8,29 @@ readme = "README.md"
8
8
 
9
9
  [metadata]
10
10
  name = "samgis_core"
11
- version = "2.0.2"
11
+ version = "3.0.1"
12
12
 
13
13
  [tool.poetry.urls]
14
14
  Source = "https://gitlab.com/aletrn/samgis_core"
15
15
 
16
16
  [tool.poetry.dependencies]
17
+ bson = "^0.5.10"
18
+ loguru = "^0.7.2"
17
19
  numpy = [
18
20
  {version = "1.25.2", python = "~3.10"},
19
21
  {version = "^1.26", python = "~3.11"}
20
22
  ]
21
23
  pillow = "^10.4.0"
22
24
  python = ">=3.10, <3.12"
25
+ python-dotenv = "^1.0.1"
23
26
  onnxruntime = "^1.18.1"
24
- loguru = "^0.7.2"
25
- bson = "^0.5.10"
27
+ structlog = "^24.4.0"
26
28
 
27
29
  [tool.poetry.group.test]
28
30
  optional = true
29
31
 
30
32
  [tool.poetry.group.test.dependencies]
33
+ mpld3 = "^0.5.10"
31
34
  pytest = "^8.2.2"
32
35
  pytest-cov = "^5.0.0"
33
36
 
@@ -0,0 +1,18 @@
1
+ """Get machine learning predictions from geodata raster images"""
2
+ import os
3
+ from pathlib import Path
4
+
5
+ import structlog
6
+ from dotenv import load_dotenv
7
+
8
+ from samgis_core.utilities import session_logger
9
+
10
+
11
+ load_dotenv()
12
+ PROJECT_ROOT_FOLDER = Path(globals().get("__file__", "./_")).absolute().parent.parent
13
+ PROJECT_MODEL_FOLDER = Path(PROJECT_ROOT_FOLDER / "machine_learning_models")
14
+ MODEL_FOLDER = os.getenv("MODEL_FOLDER", PROJECT_MODEL_FOLDER)
15
+ LOG_JSON_FORMAT = bool(os.getenv("LOG_JSON_FORMAT", False))
16
+ log_level = os.getenv("LOG_LEVEL", "INFO")
17
+ session_logger.setup_logging(json_logs=LOG_JSON_FORMAT, log_level=log_level)
18
+ app_logger = structlog.stdlib.get_logger(__name__)
@@ -62,7 +62,7 @@ class SegmentAnythingONNX2:
62
62
  encoder_model_path, providers=providers
63
63
  )
64
64
  self.encoder_input_name = self.encoder_session.get_inputs()[0].name
65
- app_logger.info("encoder_input_name:", self.encoder_input_name)
65
+ app_logger.info(f"encoder_input_name:{self.encoder_input_name}.")
66
66
  self.decoder_session = InferenceSession(
67
67
  decoder_model_path, providers=providers
68
68
  )
@@ -0,0 +1,59 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ from pathlib import Path
5
+
6
+
7
+ def stats_pathname(pathname: Path | str):
8
+ current_pathname = Path(pathname)
9
+ return current_pathname.is_dir()
10
+
11
+
12
+ def create_folder_if_not_exists(pathname: Path | str):
13
+ current_pathname = Path(pathname)
14
+ try:
15
+ print(f"Pathname exists? {current_pathname.exists()}, That's a folder? {current_pathname.is_dir()}...")
16
+ logging.info(f"Pathname exists? {current_pathname.exists()}, That's a folder? {current_pathname.is_dir()}...")
17
+ current_pathname.unlink(missing_ok=True)
18
+ except PermissionError as pe:
19
+ print(f"permission denied on removing pathname before folder creation:{pe}.")
20
+ logging.error(f"permission denied on removing pathname before folder creation:{pe}.")
21
+ except IsADirectoryError as errdir:
22
+ print(f"that's a directory:{errdir}.")
23
+ logging.error(f"that's a directory:{errdir}.")
24
+
25
+ print(f"Creating pathname: {current_pathname} ...")
26
+ logging.info(f"Creating pathname: {current_pathname} ...")
27
+ current_pathname.mkdir(mode=0o770, parents=True, exist_ok=True)
28
+
29
+ print(f"assertion: pathname exists and is a folder: {current_pathname} ...")
30
+ logging.info(f"assertion: pathname exists and is a folder: {current_pathname} ...")
31
+ assert current_pathname.is_dir()
32
+
33
+
34
+ def folders_creation(folders_map: dict | str = None, ignore_errors: bool = True):
35
+ enforce_validation_with_getenv = folders_map is None
36
+ if enforce_validation_with_getenv:
37
+ folders_map = os.getenv("FOLDERS_MAP")
38
+ try:
39
+ folders_dict = folders_map if isinstance(folders_map, dict) else json.loads(folders_map)
40
+ for folder_env_ref, folder_env_path in folders_dict.items():
41
+ logging.info(f"folder_env_ref:{folder_env_ref}, folder_env_path:{folder_env_path}.")
42
+ create_folder_if_not_exists(folder_env_path)
43
+ print("========")
44
+ if enforce_validation_with_getenv:
45
+ assert os.getenv(folder_env_ref) == folder_env_path
46
+ except (json.JSONDecodeError, TypeError) as jde:
47
+ logging.error(f"jde:{jde}.")
48
+ msg = "double check your variables, e.g. for misspelling like 'FOLDER_MAP'"
49
+ msg += "instead than 'FOLDERS_MAP', or invalid json values."
50
+ logging.error(msg)
51
+ for k_env, v_env in dict(os.environ).items():
52
+ logging.info(f"{k_env}, v_env:{v_env}.")
53
+ if not ignore_errors:
54
+ raise TypeError(jde)
55
+
56
+
57
+ if __name__ == '__main__':
58
+ folders_creation()
59
+
@@ -0,0 +1,129 @@
1
+ import logging
2
+ import sys
3
+
4
+ import structlog
5
+ from structlog.types import EventDict, Processor
6
+
7
+
8
+ # https://github.com/hynek/structlog/issues/35#issuecomment-591321744
9
+ def rename_event_key(_, __, event_dict: EventDict) -> EventDict:
10
+ """
11
+ Log entries keep the text message in the `event` field, but Datadog
12
+ uses the `message` field. This processor moves the value from one field to
13
+ the other.
14
+ See https://github.com/hynek/structlog/issues/35#issuecomment-591321744
15
+ """
16
+ event_dict["message"] = event_dict.pop("event")
17
+ return event_dict
18
+
19
+
20
+ def drop_color_message_key(_, __, event_dict: EventDict) -> EventDict:
21
+ """
22
+ Uvicorn logs the message a second time in the extra `color_message`, but we don't
23
+ need it. This processor drops the key from the event dict if it exists.
24
+ """
25
+ event_dict.pop("color_message", None)
26
+ return event_dict
27
+
28
+
29
+ def setup_logging(json_logs: bool = False, log_level: str = "INFO"):
30
+ timestamper = structlog.processors.TimeStamper(fmt="iso")
31
+
32
+ shared_processors: list[Processor] = [
33
+ structlog.contextvars.merge_contextvars,
34
+ structlog.stdlib.add_logger_name,
35
+ structlog.stdlib.add_log_level,
36
+ structlog.stdlib.PositionalArgumentsFormatter(),
37
+ structlog.stdlib.ExtraAdder(),
38
+ drop_color_message_key,
39
+ timestamper,
40
+ structlog.processors.StackInfoRenderer(),
41
+ # adapted from https://www.structlog.org/en/stable/standard-library.html
42
+ # If the "exc_info" key in the event dict is either true or a
43
+ # sys.exc_info() tuple, remove "exc_info" and render the exception
44
+ # with traceback into the "exception" key.
45
+ structlog.processors.format_exc_info,
46
+ # If some value is in bytes, decode it to a Unicode str.
47
+ structlog.processors.UnicodeDecoder(),
48
+ # Add callsite parameters.
49
+ structlog.processors.CallsiteParameterAdder(
50
+ {
51
+ structlog.processors.CallsiteParameter.FUNC_NAME,
52
+ structlog.processors.CallsiteParameter.LINENO,
53
+ }
54
+ ),
55
+ # Render the final event dict as JSON.
56
+ ]
57
+
58
+ if json_logs:
59
+ # We rename the `event` key to `message` only in JSON logs, as Datadog looks for the
60
+ # `message` key but the pretty ConsoleRenderer looks for `event`
61
+ shared_processors.append(rename_event_key)
62
+ # Format the exception only for JSON logs, as we want to pretty-print them when
63
+ # using the ConsoleRenderer
64
+ shared_processors.append(structlog.processors.format_exc_info)
65
+
66
+ structlog.configure(
67
+ processors=shared_processors
68
+ + [
69
+ # Prepare event dict for `ProcessorFormatter`.
70
+ structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
71
+ ],
72
+ logger_factory=structlog.stdlib.LoggerFactory(),
73
+ cache_logger_on_first_use=True,
74
+ )
75
+
76
+ log_renderer: structlog.types.Processor
77
+ if json_logs:
78
+ log_renderer = structlog.processors.JSONRenderer()
79
+ else:
80
+ log_renderer = structlog.dev.ConsoleRenderer()
81
+
82
+ formatter = structlog.stdlib.ProcessorFormatter(
83
+ # These run ONLY on `logging` entries that do NOT originate within
84
+ # structlog.
85
+ foreign_pre_chain=shared_processors,
86
+ # These run on ALL entries after the pre_chain is done.
87
+ processors=[
88
+ # Remove _record & _from_structlog.
89
+ structlog.stdlib.ProcessorFormatter.remove_processors_meta,
90
+ log_renderer,
91
+ ],
92
+ )
93
+
94
+ handler = logging.StreamHandler()
95
+ # Use OUR `ProcessorFormatter` to format all `logging` entries.
96
+ handler.setFormatter(formatter)
97
+ root_logger = logging.getLogger()
98
+ root_logger.addHandler(handler)
99
+ root_logger.setLevel(log_level.upper())
100
+
101
+ for _log in ["uvicorn", "uvicorn.error"]:
102
+ # Clear the log handlers for uvicorn loggers, and enable propagation
103
+ # so the messages are caught by our root logger and formatted correctly
104
+ # by structlog
105
+ logging.getLogger(_log).handlers.clear()
106
+ logging.getLogger(_log).propagate = True
107
+
108
+ # Since we re-create the access logs ourselves, to add all information
109
+ # in the structured log (see the `logging_middleware` in main.py), we clear
110
+ # the handlers and prevent the logs to propagate to a logger higher up in the
111
+ # hierarchy (effectively rendering them silent).
112
+ logging.getLogger("uvicorn.access").handlers.clear()
113
+ logging.getLogger("uvicorn.access").propagate = False
114
+
115
+ def handle_exception(exc_type, exc_value, exc_traceback):
116
+ """
117
+ Log any uncaught exception instead of letting it be printed by Python
118
+ (but leave KeyboardInterrupt untouched to allow users to Ctrl+C to stop)
119
+ See https://stackoverflow.com/a/16993115/3641865
120
+ """
121
+ if issubclass(exc_type, KeyboardInterrupt):
122
+ sys.__excepthook__(exc_type, exc_value, exc_traceback)
123
+ return
124
+
125
+ root_logger.error(
126
+ "Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
127
+ )
128
+
129
+ sys.excepthook = handle_exception
@@ -6,6 +6,9 @@ from PIL.Image import Image
6
6
  from numpy import ndarray
7
7
 
8
8
 
9
+ class ListStr(list[str]): pass
10
+
11
+
9
12
  class DictStrInt(dict[str, int]): pass
10
13
 
11
14
 
@@ -1,9 +0,0 @@
1
- """Get machine learning predictions from geodata raster images"""
2
- from pathlib import Path
3
-
4
- from samgis_core.utilities.fastapi_logger import setup_logging
5
-
6
-
7
- app_logger = setup_logging(debug=True)
8
- PROJECT_ROOT_FOLDER = Path(globals().get("__file__", "./_")).absolute().parent.parent
9
- MODEL_FOLDER = Path(PROJECT_ROOT_FOLDER / "machine_learning_models")
@@ -1,26 +0,0 @@
1
- import loguru
2
-
3
-
4
- format_string = "{time} - {level} - {file} - {function} - ({extra[request_id]}) {message} "
5
-
6
-
7
- def setup_logging(debug: bool = False, formatter: str = format_string) -> loguru.logger:
8
- """
9
- Create a logging instance with log string formatter.
10
-
11
- Args:
12
- debug: logging debug argument
13
- formatter: log string formatter
14
-
15
- Returns:
16
- Logger
17
-
18
- """
19
- import sys
20
-
21
- logger = loguru.logger
22
- logger.remove()
23
- level_logger = "DEBUG" if debug else "INFO"
24
- logger.add(sys.stdout, format=formatter, level=level_logger)
25
- logger.info(f"type_logger:{type(logger)}, logger:{logger}.")
26
- return logger
File without changes
File without changes