appkit-commons 0.7.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,114 @@
1
+ __pycache__/
2
+ __pypackages__/
3
+ .cache
4
+ .coverage
5
+ .coverage.*
6
+ .dmypy.json
7
+ .DS_Store
8
+ .eggs/
9
+ .env
10
+ .env.backup
11
+ .env.docker
12
+ .hypothesis/
13
+ .idea/
14
+ .installed.cfg
15
+ .ipynb_checkpoints
16
+ .mypy_cache/
17
+ .nox/
18
+ .pdm.toml
19
+ .pybuilder/
20
+ .pyre/
21
+ .pytest_cache/
22
+ .Python
23
+ .python_packages
24
+ .pytype/
25
+ .ropeproject
26
+ .scrapy
27
+ .spyderproject
28
+ .spyproject
29
+ .states
30
+ .tox/
31
+ .venv
32
+ .venv.mac
33
+ .web
34
+ .webassets-cache
35
+ *.bak
36
+ *.cover
37
+ *.db
38
+ *.egg
39
+ *.egg-info/
40
+ *.kv-env.*
41
+ *.log
42
+ *.manifest
43
+ *.mo
44
+ *.pot
45
+ *.py,cover
46
+ *.py[cod]
47
+ *.sage.py
48
+ *.so
49
+ *.spec
50
+ *.terraform.lock.hcl
51
+ *.tfplan
52
+ *.tfstate
53
+ *.tfstate.*.backup
54
+ *.tfstate.backup
55
+ *.tfvars
56
+ **/.terraform/*
57
+ *$py.class
58
+ /site
59
+ /vectorstore/
60
+ aila-storage/
61
+ assets/external/
62
+ build/
63
+ celerybeat-schedule
64
+ celerybeat.pid
65
+ configuration/config.abaz009.yaml
66
+ configuration/config.bubb001.yaml
67
+ configuration/config.stie104.yaml
68
+ configuration/config.voro047.yaml
69
+ connector examples/sharepoint.json
70
+ cover/
71
+ coverage.xml
72
+ cython_debug/
73
+ db.sqlite3
74
+ db.sqlite3-journal
75
+ develop-eggs/
76
+ dist/
77
+ dmypy.json
78
+ docs/_build/
79
+ Documents/
80
+ downloads/
81
+ eggs/
82
+ env.bak/
83
+ env/
84
+ ENV/
85
+ htmlcov/
86
+ instance/
87
+ ipython_config.py
88
+ knowledge/migrate.py
89
+ lib/
90
+ lib64/
91
+ local_settings.py
92
+ local.settings.json
93
+ MANIFEST
94
+ nosetests.xml
95
+ out
96
+ parts/
97
+ pip-delete-this-directory.txt
98
+ pip-log.txt
99
+ Pipfile
100
+ profile_default/
101
+ sdist/
102
+ share/python-wheels/
103
+ sketchpad/
104
+ sketchpad/
105
+ stores/
106
+ target/
107
+ tests/mcp_test.py
108
+ tmp.txt
109
+ uploaded_files/
110
+ uploads/
111
+ var/
112
+ venv.bak/
113
+ venv/
114
+ wheels/
@@ -0,0 +1,15 @@
1
+ Metadata-Version: 2.4
2
+ Name: appkit-commons
3
+ Version: 0.7.1
4
+ Summary: Add your description here
5
+ Author: Jens Rehpöhler
6
+ Requires-Python: >=3.13
7
+ Requires-Dist: colorlog>=6.9.0
8
+ Requires-Dist: cryptography>=46.0.2
9
+ Requires-Dist: pydantic-settings>=2.10.1
10
+ Requires-Dist: pyyaml==6.0.2
11
+ Requires-Dist: sqlalchemy-utils==0.42.0
12
+ Requires-Dist: sqlalchemy==2.0.41
13
+ Provides-Extra: azure
14
+ Requires-Dist: azure-identity==1.23.1; extra == 'azure'
15
+ Requires-Dist: azure-keyvault-secrets==4.10.0; extra == 'azure'
File without changes
@@ -0,0 +1,32 @@
1
+ [project]
2
+ name = "appkit-commons"
3
+ version = "0.7.1"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ authors = [{ name = "Jens Rehpöhler" }]
7
+ requires-python = ">=3.13"
8
+ dependencies = [
9
+ "colorlog>=6.9.0",
10
+ "cryptography>=46.0.2",
11
+ "pydantic-settings>=2.10.1",
12
+ "pyyaml==6.0.2",
13
+ "sqlalchemy-utils==0.42.0",
14
+ "sqlalchemy==2.0.41",
15
+ ]
16
+
17
+ [project.optional-dependencies]
18
+ azure = [
19
+ "azure-identity==1.23.1",
20
+ "azure-keyvault-secrets==4.10.0",
21
+ ]
22
+
23
+
24
+ [tool.setuptools.packages.find]
25
+ where = ["src"]
26
+
27
+ [tool.hatch.build.targets.wheel]
28
+ packages = ["src/appkit_commons"]
29
+
30
+ [build-system]
31
+ requires = ["hatchling"]
32
+ build-backend = "hatchling.build"
@@ -0,0 +1,11 @@
1
+ from pathlib import Path
2
+ from typing import Final
3
+
4
+ from dotenv import load_dotenv
5
+
6
+ # init first to prevent circular dependencies
7
+ BASE_PATH: Final[Path] = Path.cwd()
8
+ CONFIGURATION_PATH: Final[Path] = BASE_PATH / "configuration"
9
+
10
+ # initialize logging and .env before everything else
11
+ load_dotenv()
@@ -0,0 +1,54 @@
1
+ import importlib
2
+ from typing import Any
3
+
4
+ from appkit_commons.configuration.base import BaseConfig
5
+ from appkit_commons.configuration.secret_provider import (
6
+ SecretNotFoundError,
7
+ SecretProvider,
8
+ get_secret,
9
+ )
10
+ from appkit_commons.configuration.yaml import (
11
+ YamlConfigReader,
12
+ YamlConfigSettingsSource,
13
+ )
14
+ # Remove this direct import that causes the circular dependency
15
+ # from appkit_commons.configuration.logging import init_logging
16
+
17
+ __all__ = [
18
+ "ApplicationConfig",
19
+ "BaseConfig",
20
+ "Configuration",
21
+ "DatabaseConfig",
22
+ "Protocol",
23
+ "SecretNotFoundError",
24
+ "SecretProvider",
25
+ "ServerConfig",
26
+ "WorkerConfig",
27
+ "YamlConfigReader",
28
+ "YamlConfigSettingsSource",
29
+ "get_secret",
30
+ "init_logging",
31
+ ]
32
+
33
+ # Keep backward compatibility if someone used the wrong name
34
+ __ALL__ = __all__
35
+
36
+ _lazy_map: dict[str, str] = {
37
+ "Configuration": "appkit_commons.configuration.configuration",
38
+ "ApplicationConfig": "appkit_commons.configuration.configuration",
39
+ "DatabaseConfig": "appkit_commons.configuration.configuration",
40
+ "ServerConfig": "appkit_commons.configuration.configuration",
41
+ "WorkerConfig": "appkit_commons.configuration.configuration",
42
+ "Protocol": "appkit_commons.configuration.configuration",
43
+ "init_logging": "appkit_commons.configuration.logging",
44
+ }
45
+
46
+
47
+ def __getattr__(name: str) -> Any:
48
+ module_path = _lazy_map.get(name)
49
+ if module_path is None:
50
+ raise AttributeError(
51
+ f"module 'appkit_commons.configuration' has no attribute {name!r}"
52
+ )
53
+ module = importlib.import_module(module_path)
54
+ return getattr(module, name)
@@ -0,0 +1,70 @@
1
+ import logging
2
+ import os
3
+ from collections.abc import Callable
4
+ from typing import Any
5
+
6
+ from pydantic import model_validator
7
+ from pydantic_settings import (
8
+ BaseSettings,
9
+ PydanticBaseSettingsSource,
10
+ SettingsConfigDict,
11
+ )
12
+
13
+ from appkit_commons.configuration.secret_provider import SECRET, get_secret
14
+ from appkit_commons.configuration.yaml import YamlConfigSettingsSource
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def _starts_with_secret(s: str) -> bool:
20
+ return s.lower().startswith(SECRET)
21
+
22
+
23
+ def _replace_value_if_secret(
24
+ key: str, value: Any, secret_function: Callable[[str], str]
25
+ ) -> Any:
26
+ if not isinstance(value, str):
27
+ return value
28
+
29
+ value = str(value)
30
+ if not _starts_with_secret(value):
31
+ return value
32
+
33
+ # remove SECRET from value
34
+ # 1. secret:mysecret -> mysecret
35
+ secret_len = len(SECRET)
36
+ value = value[secret_len:]
37
+ key = value if len(value) > 0 else key
38
+ return secret_function(key)
39
+
40
+
41
+ class BaseConfig(BaseSettings):
42
+ model_config = SettingsConfigDict(extra="ignore", env_nested_delimiter="__")
43
+
44
+ @classmethod
45
+ def settings_customise_sources(
46
+ cls,
47
+ settings_cls: type[BaseSettings],
48
+ init_settings: PydanticBaseSettingsSource,
49
+ env_settings: PydanticBaseSettingsSource,
50
+ dotenv_settings: PydanticBaseSettingsSource,
51
+ file_secret_settings: PydanticBaseSettingsSource,
52
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
53
+ profiles: list[str] = [
54
+ profile.strip() for profile in os.getenv("PROFILES", "").split(",")
55
+ ]
56
+
57
+ return (
58
+ init_settings,
59
+ env_settings,
60
+ dotenv_settings,
61
+ file_secret_settings,
62
+ YamlConfigSettingsSource(settings_cls, profiles=profiles),
63
+ )
64
+
65
+ @model_validator(mode="before")
66
+ @classmethod
67
+ def secret_update(cls, values: dict[str, Any]) -> dict[str, Any]:
68
+ return {
69
+ k: _replace_value_if_secret(k, v, get_secret) for k, v in values.items()
70
+ }
@@ -0,0 +1,70 @@
1
+ from __future__ import annotations
2
+
3
+ from enum import StrEnum
4
+ from typing import Generic, TypeVar
5
+
6
+ from pydantic import Field
7
+
8
+ from appkit_commons.configuration.base import BaseConfig
9
+ from appkit_commons.database.configuration import DatabaseConfig
10
+
11
+
12
+ class ConfigurationError(ValueError):
13
+ pass
14
+
15
+
16
+ class Environment(StrEnum):
17
+ development = "dev"
18
+ production = ""
19
+ testing = "test"
20
+ staging = "stage"
21
+ docker = "container"
22
+ local = "local"
23
+ ci = "ci"
24
+
25
+
26
+ class WorkerConfig(StrEnum):
27
+ multiprocessing = "multiprocessing"
28
+ webconcurrency = "webconcurrency"
29
+
30
+
31
+ class Protocol(StrEnum):
32
+ http = "http"
33
+ https = "https"
34
+
35
+
36
+ class ServerConfig(BaseConfig):
37
+ host: str
38
+ port: int
39
+ docker_port: int
40
+ protocol: Protocol = Protocol.http
41
+ reload: bool = False
42
+ workers: int | WorkerConfig = WorkerConfig.webconcurrency
43
+
44
+
45
+ class ReflexConfig(BaseConfig):
46
+ deploy_url: str
47
+ frontend_port: int = 80
48
+ backend_port: int = 3030
49
+ workers: int = 3
50
+ default_timeout: int = 300 # seconds
51
+ backend_timeout: int = 180 # seconds
52
+ single_port: bool = False
53
+
54
+
55
+ class ApplicationConfig(BaseConfig):
56
+ version: str
57
+ name: str
58
+ logging: str
59
+ environment: Environment | None = Environment.local
60
+ database: DatabaseConfig | None = Field(..., alias="database")
61
+
62
+
63
+ T = TypeVar("T", bound=ApplicationConfig)
64
+
65
+
66
+ class Configuration(BaseConfig, Generic[T]): # noqa: UP046
67
+ profile: str
68
+ server: ServerConfig | None = Field(default=None, alias="server")
69
+ reflex: ReflexConfig | None = Field(default=None, alias="reflex")
70
+ app: T
@@ -0,0 +1,25 @@
1
+ import logging.config
2
+ from pathlib import Path
3
+
4
+ import yaml
5
+
6
+ from appkit_commons import CONFIGURATION_PATH
7
+ from appkit_commons.configuration.configuration import Configuration
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def init_logging(configuration: Configuration) -> None:
13
+ # check if profile based logging configuration exists
14
+ log_configuration = configuration.app.logging
15
+ if CONFIGURATION_PATH.joinpath(log_configuration).exists():
16
+ logger.info(
17
+ "Using logging configuration: \x1b[31;1m%s\x1b[0m", log_configuration
18
+ )
19
+ with Path.open(
20
+ CONFIGURATION_PATH / log_configuration, "rt", encoding="utf-8"
21
+ ) as f:
22
+ config = yaml.safe_load(f.read())
23
+ logging.config.dictConfig(config)
24
+ else:
25
+ logger.info("Using logging configuration: \x1b[31;1mlogging.conf\x1b[0m")
@@ -0,0 +1,95 @@
1
+ import os
2
+ from enum import StrEnum
3
+ from functools import lru_cache
4
+ from typing import Final
5
+
6
+ from dotenv import load_dotenv
7
+
8
+ load_dotenv()
9
+
10
+
11
+ SECRET_PROVIDER: Final[str] = os.getenv("SECRET_PROVIDER", "local").lower()
12
+ SECRET: Final[str] = "secret:" # noqa: S105
13
+
14
+
15
+ class SecretNotFoundError(Exception):
16
+ pass
17
+
18
+
19
+ class SecretProvider(StrEnum):
20
+ AZURE = "azure"
21
+ LOCAL = "local"
22
+
23
+
24
+ @lru_cache(maxsize=1)
25
+ def _get_azure_client():
26
+ try:
27
+ from azure.identity import ( # type: ignore # noqa: PLC0415
28
+ DefaultAzureCredential,
29
+ )
30
+ from azure.keyvault.secrets import SecretClient # type: ignore # noqa: PLC0415
31
+ except ImportError as exc:
32
+ raise ImportError(
33
+ "Optional Azure dependencies are required to use SecretProvider.AZURE. "
34
+ "Install 'azure-identity' and 'azure-keyvault-secrets'."
35
+ ) from exc
36
+
37
+ vault_url = os.environ.get("AZURE_KEY_VAULT_URL")
38
+ if not vault_url:
39
+ raise RuntimeError(
40
+ "Environment variable 'AZURE_KEY_VAULT_URL' must be set to use "
41
+ "SecretProvider.AZURE"
42
+ )
43
+ credential = DefaultAzureCredential()
44
+ return SecretClient(vault_url=vault_url, credential=credential)
45
+
46
+
47
+ def _get_secret_from_azure(key: str) -> str:
48
+ client = _get_azure_client()
49
+ secret = client.get_secret(key.lower())
50
+ if not secret.value:
51
+ raise SecretNotFoundError(f"Secret '{key}' not found in Azure Key Vault")
52
+ return secret.value
53
+
54
+
55
+ def _get_secret_from_env(key: str) -> str:
56
+ """
57
+ Get secret from environment variables.
58
+
59
+ This function supports multiple naming conventions:
60
+ - Direct key lookup: key -> env[key]
61
+ - Uppercase transformation: key -> env[key.upper()]
62
+ - Dash-to-underscore: avui-db-user -> AVUI_DB_USER
63
+ """
64
+ # Try direct lookup first
65
+ value = os.getenv(key)
66
+ if value:
67
+ return value
68
+
69
+ # Try uppercase
70
+ value = os.getenv(key.upper())
71
+ if value:
72
+ return value
73
+
74
+ # Try dash-to-underscore + uppercase transformation
75
+ transformed_key = key.replace("-", "_").upper()
76
+ value = os.getenv(transformed_key)
77
+ if value:
78
+ return value
79
+
80
+ # Try dash-to-underscore + lowercase transformation
81
+ value = os.getenv(transformed_key.lower())
82
+ if value:
83
+ return value
84
+
85
+ error_msg = (
86
+ f"Secret '{key}' not found in environment variables. "
87
+ f"Tried: {key}, {key.upper()}, {transformed_key}, {transformed_key.lower()}"
88
+ )
89
+ raise SecretNotFoundError(error_msg)
90
+
91
+
92
+ def get_secret(key: str) -> str:
93
+ if SECRET_PROVIDER == SecretProvider.AZURE:
94
+ return _get_secret_from_azure(key)
95
+ return _get_secret_from_env(key)
@@ -0,0 +1,160 @@
1
+ import logging
2
+ from functools import lru_cache
3
+ from pathlib import Path
4
+ from typing import Any
5
+
6
+ import yaml
7
+ from pydantic_settings import BaseSettings, InitSettingsSource
8
+
9
+ PROJECT_ROOT = Path.cwd()
10
+ DEFAULT_PATH: Path = PROJECT_ROOT / "configuration"
11
+ DEFAULT_CONFIG_YAML: Path = Path("config.yaml")
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class YamlConfigReader:
17
+ def __init__(
18
+ self,
19
+ yaml_file_path: Path = DEFAULT_PATH,
20
+ yaml_file: Path = DEFAULT_CONFIG_YAML,
21
+ yaml_file_encoding: str = "utf-8",
22
+ ):
23
+ self.yaml_file_path = yaml_file_path
24
+ self.yaml_file = yaml_file
25
+ self.yaml_file_encoding = yaml_file_encoding
26
+ self.yaml_file_prefix = yaml_file.stem
27
+ self.yaml_file_suffix = yaml_file.suffix
28
+
29
+ @classmethod
30
+ def __merge(cls, master: dict, updates: dict) -> dict:
31
+ """
32
+ Deep merge two dictionaries
33
+ """
34
+ # Safety check for None values
35
+ if updates is None:
36
+ return master
37
+ if master is None:
38
+ return updates if updates is not None else {}
39
+
40
+ for key in updates: # noqa
41
+ if (
42
+ key in master
43
+ and isinstance(master[key], dict)
44
+ and isinstance(updates[key], dict)
45
+ ):
46
+ cls.__merge(master[key], updates[key])
47
+ else:
48
+ master[key] = updates[key]
49
+
50
+ return master
51
+
52
+ @classmethod
53
+ @lru_cache
54
+ def read_file(cls, file_path: Path, encoding: str = "utf-8") -> Any:
55
+ try:
56
+ with Path.open(file_path, "r", encoding=encoding) as file:
57
+ result = yaml.safe_load(file)
58
+ # Handle case where YAML file is empty or contains only comments
59
+ return result if result is not None else {}
60
+ except yaml.YAMLError as ex:
61
+ raise ex
62
+ except FileNotFoundError:
63
+ logger.warning("Configuration file '%s' not found.", file_path)
64
+ return {}
65
+
66
+ def read_and_merge_files(self, profiles: list[str] | None) -> dict[str, Any]:
67
+ base_config: dict = self.read_file(
68
+ self.yaml_file_path / self.yaml_file, self.yaml_file_encoding
69
+ )
70
+
71
+ if profiles is None:
72
+ return base_config
73
+
74
+ # Load profiles
75
+ merged_config = base_config
76
+ for environment in profiles:
77
+ merge_config = (
78
+ self.yaml_file_path
79
+ / f"{self.yaml_file_prefix}.{environment}{self.yaml_file_suffix}"
80
+ )
81
+ updates: dict = self.read_file(merge_config, self.yaml_file_encoding)
82
+ merged_config = self.__merge(master=merged_config, updates=updates)
83
+
84
+ return merged_config
85
+
86
+
87
+ class YamlConfigSettingsSource(InitSettingsSource):
88
+ """This class is designed to load variables from a YAML file with inheritance.
89
+
90
+ The YAML file is loaded from the path specified in the `yaml_file_path` attribute.
91
+ By default, the configuration file should be named "config.yaml" and located in the
92
+ specified path. The default directory where `config.yaml` files are stored is
93
+ `configuration` in the project root.
94
+
95
+ To extend or override the configuration, you can set e.g. a `PROFILES` environment
96
+ variable. Additional YAML files should follow the naming convention
97
+ `config.{profile}.yaml` and be stored in the same directory as the default
98
+ configuration.
99
+
100
+ **Important Note:** The order in which profiles are specified in the `profiles` list
101
+ matters. Profile files will be loaded and merged in the order
102
+ they are listed. E.g. if `profiles` is set to `dev,prod`, the `dev` profile will be
103
+ loaded first after the default config, and the `prod` profile will be loaded second,
104
+ with the `prod` profile overriding any values from the `default`and `dev` profile.
105
+
106
+ When setting the `yaml_file` attribute, the class will search for profiles by
107
+ splitting the file name and using the first part as the prefix. For instance, if
108
+ `yaml_file` is set to `my_config.yaml`, the class will look for profiles in files
109
+ named `my_config.{profile}.yaml`.
110
+
111
+ Usage:
112
+ ```python
113
+ class BaseConfig(BaseSettings):
114
+ model_config = SettingsConfigDict(extra="ignore", env_nested_delimiter="__")
115
+
116
+ @classmethod
117
+ def settings_customise_sources(
118
+ cls,
119
+ settings_cls: Type[BaseSettings],
120
+ init_settings: PydanticBaseSettingsSource,
121
+ env_settings: PydanticBaseSettingsSource,
122
+ dotenv_settings: PydanticBaseSettingsSource,
123
+ file_secret_settings: PydanticBaseSettingsSource,
124
+ ) -> Tuple[PydanticBaseSettingsSource, ...]:
125
+ profiles: List[str] = [
126
+ profile.strip() for profile in os.getenv("PROFILES", "").split(",")
127
+ ]
128
+
129
+ return (
130
+ init_settings,
131
+ env_settings,
132
+ dotenv_settings,
133
+ file_secret_settings,
134
+ YamlConfigSettingsSource(settings_cls, profiles=profiles),
135
+ )
136
+ ```
137
+ """
138
+
139
+ def __init__(
140
+ self,
141
+ settings_cls: type[BaseSettings],
142
+ profiles: list[str] | None = None,
143
+ yaml_file_path: Path = DEFAULT_PATH,
144
+ yaml_file: Path = DEFAULT_CONFIG_YAML,
145
+ yaml_file_encoding: str = "utf-8",
146
+ ):
147
+ reader = YamlConfigReader(
148
+ yaml_file_path=yaml_file_path,
149
+ yaml_file=yaml_file,
150
+ yaml_file_encoding=yaml_file_encoding,
151
+ )
152
+ self.yaml_data = reader.read_and_merge_files(profiles=profiles)
153
+ # Filter out YAML entries with no matching Pydantic field
154
+ valid_yaml_data = {
155
+ k: v for k, v in self.yaml_data.items() if k in settings_cls.__fields__
156
+ }
157
+ super().__init__(settings_cls, valid_yaml_data)
158
+
159
+ def __repr__(self) -> str:
160
+ return f"YamlConfigSettingsSource(yaml_data={self.yaml_data!r})"
@@ -0,0 +1,46 @@
1
+ from urllib.parse import quote
2
+
3
+ from pydantic import SecretStr, computed_field
4
+ from pydantic_settings import SettingsConfigDict
5
+
6
+ from appkit_commons.configuration.base import BaseConfig
7
+
8
+
9
+ class DatabaseConfig(BaseConfig):
10
+ model_config = SettingsConfigDict(env_prefix="app_database_", env_file=".env")
11
+
12
+ type: str = "postgresql"
13
+ username: str = "postgres"
14
+ password: SecretStr = SecretStr("postgres")
15
+ host: str = "localhost"
16
+ port: int = 5432
17
+ name: str = "postgres"
18
+ encryption_key: SecretStr = SecretStr("")
19
+ pool_size: int = 10
20
+ max_overflow: int = 30
21
+ echo: bool = False
22
+ testing: bool = False
23
+ # SSL mode: disable, allow, prefer, require, verify-ca, verify-full
24
+ ssl_mode: str = "disable"
25
+
26
+ @computed_field(repr=False) # type: ignore
27
+ @property
28
+ def url(self) -> str:
29
+ if self.type == "sqlite":
30
+ return f"sqlite:///{self.name}"
31
+
32
+ if self.type == "postgresql":
33
+ # URL encode the password to handle special characters
34
+ encoded_password = quote(self.password.get_secret_value(), safe="")
35
+ base_url = (
36
+ f"postgresql+psycopg://{self.username}:{encoded_password}"
37
+ f"@{self.host}:{self.port}/{self.name}"
38
+ )
39
+
40
+ # Add SSL parameters if specified
41
+ if self.ssl_mode != "disable":
42
+ base_url += f"?sslmode={self.ssl_mode}"
43
+
44
+ return base_url
45
+
46
+ raise ValueError(f"Unsupported database type: {self.type}")
@@ -0,0 +1,63 @@
1
+ import datetime
2
+
3
+ from cryptography.fernet import Fernet
4
+ from sqlalchemy import (
5
+ DateTime,
6
+ Dialect,
7
+ String,
8
+ TypeDecorator,
9
+ )
10
+ from sqlalchemy.orm import (
11
+ DeclarativeBase,
12
+ Mapped,
13
+ mapped_column,
14
+ )
15
+ from sqlalchemy.sql import func
16
+
17
+ from appkit_commons.database.configuration import DatabaseConfig
18
+ from appkit_commons.registry import service_registry
19
+
20
+
21
+ def get_cipher_key() -> str:
22
+ """Get cipher key from database config, with lazy initialization."""
23
+ return service_registry().get(DatabaseConfig).encryption_key.get_secret_value()
24
+
25
+
26
+ class EncryptedString(TypeDecorator):
27
+ impl = String
28
+ cache_ok = True # Added to allow caching of the custom type
29
+
30
+ def __init__(self, *args: any, **kwargs: any):
31
+ super().__init__(*args, **kwargs)
32
+ self.cipher_key = get_cipher_key()
33
+ self.cipher = Fernet(self.cipher_key)
34
+
35
+ def process_bind_param(self, value: any, dialect: Dialect) -> str | None: # noqa: ARG002
36
+ if value is not None:
37
+ return self.cipher.encrypt(value.encode()).decode()
38
+ return value
39
+
40
+ def process_result_value(self, value: any, dialect: Dialect) -> str | None: # noqa: ARG002
41
+ if value is not None:
42
+ return self.cipher.decrypt(value.encode()).decode()
43
+ return value
44
+
45
+
46
+ class Base(DeclarativeBase):
47
+ pass
48
+
49
+
50
+ class Entity: # mixin class with default columns
51
+ id: Mapped[int] = mapped_column(primary_key=True, index=True, autoincrement=True)
52
+
53
+ created: Mapped[datetime.datetime] = mapped_column(
54
+ DateTime(timezone=True),
55
+ server_default=func.now(),
56
+ nullable=False,
57
+ )
58
+ updated: Mapped[datetime.datetime] = mapped_column(
59
+ DateTime(timezone=True),
60
+ server_default=func.now(),
61
+ onupdate=func.now(),
62
+ nullable=False,
63
+ )
@@ -0,0 +1,78 @@
1
+ import contextlib
2
+ import logging
3
+ from collections.abc import AsyncGenerator, Iterator
4
+ from functools import lru_cache
5
+ from typing import Any
6
+
7
+ from sqlalchemy import Engine
8
+ from sqlalchemy.ext.asyncio import AsyncSession
9
+ from sqlalchemy.orm import Session
10
+
11
+ from appkit_commons.database.configuration import DatabaseConfig
12
+ from appkit_commons.database.sessionmanager import (
13
+ AsyncSessionManager,
14
+ SessionManager,
15
+ )
16
+ from appkit_commons.registry import service_registry
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def _get_db_config() -> DatabaseConfig:
22
+ """Get database configuration from registry."""
23
+ db_config = service_registry().get(DatabaseConfig)
24
+ if db_config is None:
25
+ logger.error("DatabaseConfig not found in registry")
26
+ raise RuntimeError("DatabaseConfig not initialized in registry")
27
+ return db_config
28
+
29
+
30
+ def _get_engine_kwargs() -> dict[str, Any]:
31
+ """Get engine configuration kwargs."""
32
+ db_config = _get_db_config()
33
+
34
+ if db_config.type == "postgres":
35
+ return {
36
+ "pool_size": db_config.pool_size,
37
+ "max_overflow": db_config.max_overflow,
38
+ "echo": db_config.echo,
39
+ }
40
+
41
+ return {}
42
+
43
+
44
+ # if app_config.testing:
45
+ # _engine_kwargs["poolclass"] = NullPool # type: ignore
46
+ # _engine_kwargs["echo"] = app_config.database.echo
47
+ # _engine_kwargs.pop("pool_size")
48
+ # _engine_kwargs.pop("max_overflow")
49
+
50
+
51
+ # Create a database engine
52
+ @lru_cache(maxsize=1)
53
+ def get_async_session_manager() -> AsyncSessionManager:
54
+ db_config = _get_db_config()
55
+ engine_kwargs = _get_engine_kwargs()
56
+ return AsyncSessionManager(db_config.url, **engine_kwargs)
57
+
58
+
59
+ @lru_cache(maxsize=1)
60
+ def get_session_manager() -> SessionManager:
61
+ db_config = _get_db_config()
62
+ engine_kwargs = _get_engine_kwargs()
63
+ return SessionManager(db_config.url, **engine_kwargs)
64
+
65
+
66
+ @contextlib.asynccontextmanager
67
+ async def get_asyncdb_session() -> AsyncGenerator[AsyncSession, None]:
68
+ async with get_async_session_manager().session() as session:
69
+ yield session
70
+
71
+
72
+ def get_db_session() -> Iterator[Session]:
73
+ with get_session_manager().session() as session:
74
+ yield session
75
+
76
+
77
+ def get_db_engine() -> Engine:
78
+ return get_session_manager().get_engine() # type: ignore
@@ -0,0 +1,52 @@
1
+ import contextlib
2
+ from collections.abc import AsyncIterator, Iterator
3
+ from contextlib import contextmanager
4
+ from typing import Any
5
+
6
+ from sqlalchemy import create_engine
7
+ from sqlalchemy.ext.asyncio import (
8
+ AsyncSession,
9
+ async_sessionmaker,
10
+ create_async_engine,
11
+ )
12
+ from sqlalchemy.orm import Session, sessionmaker
13
+
14
+
15
+ class AsyncSessionManager:
16
+ def __init__(self, host: str, engine_kwargs: dict[str, Any] | None = None):
17
+ self._engine = create_async_engine(host, **(engine_kwargs or {}))
18
+ self._sessionmaker = async_sessionmaker(bind=self._engine)
19
+
20
+ async def close(self) -> None:
21
+ if self._engine:
22
+ await self._engine.dispose()
23
+
24
+ @contextlib.asynccontextmanager
25
+ async def session(self) -> AsyncIterator[AsyncSession]:
26
+ async with self._sessionmaker() as session:
27
+ try:
28
+ yield session
29
+ await session.commit()
30
+ except Exception:
31
+ await session.rollback()
32
+ raise
33
+
34
+
35
+ class SessionManager:
36
+ def __init__(self, host: str, engine_kwargs: dict[str, Any] | None = None):
37
+ self._engine = create_engine(host, **(engine_kwargs or {}))
38
+ self._sessionmaker = sessionmaker(bind=self._engine)
39
+
40
+ def close(self) -> None:
41
+ if self._engine:
42
+ self._engine.dispose()
43
+
44
+ @contextmanager
45
+ def session(self) -> Iterator[Session]:
46
+ with self._sessionmaker() as session:
47
+ try:
48
+ yield session
49
+ session.commit()
50
+ except Exception:
51
+ session.rollback()
52
+ raise
@@ -0,0 +1,188 @@
1
+ import logging
2
+ from functools import lru_cache
3
+ from typing import TYPE_CHECKING, Any, TypeVar, cast
4
+
5
+ if TYPE_CHECKING:
6
+ from appkit_commons.configuration.configuration import (
7
+ ApplicationConfig,
8
+ Configuration,
9
+ )
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ T = TypeVar("T")
14
+ ConfigT = TypeVar("ConfigT", bound="ApplicationConfig")
15
+
16
+
17
+ class ServiceRegistry:
18
+ """Registry for storing and retrieving initialized instances by their class type."""
19
+
20
+ def __init__(self) -> None:
21
+ self._instances: dict[type[Any], Any] = {}
22
+
23
+ def _register_config_recursively( # noqa: PLR0912
24
+ self, obj: Any, visited: set[int] | None = None
25
+ ) -> None:
26
+ """Recursively register configuration objects and their attributes."""
27
+ if visited is None:
28
+ visited = set()
29
+
30
+ # Avoid infinite recursion by tracking visited objects
31
+ obj_id = id(obj)
32
+ if obj_id in visited:
33
+ return
34
+ visited.add(obj_id)
35
+
36
+ # Use __dict__ to get instance attributes directly
37
+ if hasattr(obj, "__dict__"):
38
+ for attr_name, attr_value in obj.__dict__.items():
39
+ # Skip private attributes and None values
40
+ if attr_name.startswith("_") or attr_value is None:
41
+ continue
42
+
43
+ try:
44
+ # Check if this is a configuration object (not a basic type)
45
+ if hasattr(attr_value, "__class__"):
46
+ attr_class = attr_value.__class__
47
+
48
+ # Skip built-in types, pydantic types, and already registered
49
+ if (
50
+ attr_class.__module__ != "builtins"
51
+ and attr_class.__name__ not in ("SecretStr", "StrEnum")
52
+ and not self.has(attr_class)
53
+ ):
54
+ self.register_as(attr_class, attr_value)
55
+ logger.debug(
56
+ "Registered service configuration: %s from attribute %s", # noqa: E501
57
+ attr_class.__name__,
58
+ attr_name,
59
+ )
60
+
61
+ # Recursively register nested configurations
62
+ self._register_config_recursively(attr_value, visited)
63
+
64
+ except Exception as e:
65
+ logger.warning(
66
+ "Failed to process attribute %s: %s", attr_name, str(e)
67
+ )
68
+
69
+ # Also check class annotations to handle properties/descriptors
70
+ if hasattr(obj.__class__, "__annotations__"):
71
+ for attr_name in obj.__class__.__annotations__:
72
+ if attr_name.startswith("_"):
73
+ continue
74
+
75
+ try:
76
+ attr_value = getattr(obj, attr_name, None)
77
+ if attr_value is not None and hasattr(attr_value, "__class__"):
78
+ attr_class = attr_value.__class__
79
+
80
+ if (
81
+ attr_class.__module__ != "builtins"
82
+ and attr_class.__name__ not in ("SecretStr", "StrEnum")
83
+ and not self.has(attr_class)
84
+ ):
85
+ self.register_as(attr_class, attr_value)
86
+ logger.debug(
87
+ "Registered service configuration: %s from annotated attribute %s", # noqa: E501
88
+ attr_class.__name__,
89
+ attr_name,
90
+ )
91
+
92
+ # Recursively register nested configurations
93
+ self._register_config_recursively(attr_value, visited)
94
+
95
+ except Exception as e:
96
+ logger.warning(
97
+ "Failed to access annotated attribute %s: %s", attr_name, str(e)
98
+ )
99
+
100
+ def configure(
101
+ self, app_config_class: type[ConfigT], env_file: str = ".env"
102
+ ) -> "Configuration[ConfigT]":
103
+ """Configure and register the application configuration."""
104
+ from appkit_commons.configuration.configuration import ( # noqa: PLC0415
105
+ Configuration,
106
+ )
107
+
108
+ logger.debug(
109
+ "Configuring application with config class: %s", app_config_class.__name__
110
+ )
111
+
112
+ # Create the configuration instance
113
+ configuration = Configuration[app_config_class](_env_file=env_file)
114
+
115
+ # Register the configuration instance
116
+ self.register_as(Configuration, configuration)
117
+ self._register_config_recursively(configuration)
118
+
119
+ logger.info("Application configuration initialized and registered")
120
+ logger.info("Total registered instances: %d", len(self._instances))
121
+ for registered_type in self.list_registered():
122
+ logger.debug("Registered: %s", registered_type.__name__)
123
+
124
+ return configuration
125
+
126
+ def register(self, instance: object) -> None:
127
+ """Register an initialized instance using its class type as the key."""
128
+ instance_type = type(instance)
129
+
130
+ if instance_type in self._instances:
131
+ logger.warning(
132
+ "Overwriting existing instance of type: %s", instance_type.__name__
133
+ )
134
+
135
+ self._instances[instance_type] = instance
136
+ logger.debug("Registered instance of type %s", instance_type.__name__)
137
+
138
+ def register_as(self, instance_type: type[T], instance: T) -> None:
139
+ """Register an initialized instance with a specific type as the key."""
140
+ if instance_type in self._instances:
141
+ logger.warning(
142
+ "Overwriting existing instance of type: %s", instance_type.__name__
143
+ )
144
+
145
+ self._instances[instance_type] = instance
146
+ logger.debug("Registered instance as type %s", instance_type.__name__)
147
+
148
+ def get(self, instance_type: type[T]) -> T:
149
+ """Retrieve an instance by its class type, returning None if not found."""
150
+ instance: type[T] | None = self._instances.get(instance_type)
151
+ if instance is None:
152
+ logger.error(
153
+ "Instance of type %s not found in registry", instance_type.__name__
154
+ )
155
+ raise KeyError(
156
+ f"Instance of type {instance_type.__name__} not found in registry"
157
+ )
158
+ return cast(T, instance)
159
+
160
+ def unregister(self, instance_type: type[T]) -> None:
161
+ """Remove an instance from the registry by its class type."""
162
+ if instance_type in self._instances:
163
+ del self._instances[instance_type]
164
+ logger.debug("Unregistered instance of type: %s", instance_type.__name__)
165
+ else:
166
+ logger.warning(
167
+ "Attempted to unregister non-existent type: %s", instance_type.__name__
168
+ )
169
+
170
+ def list_registered(self) -> list[type[Any]]:
171
+ """Get a list of all registered class types."""
172
+ return list(self._instances.keys())
173
+
174
+ def has(self, instance_type: type[T]) -> bool:
175
+ """Check if an instance is registered for the given class type."""
176
+ return instance_type in self._instances
177
+
178
+ def clear(self) -> None:
179
+ """Clear all registered instances."""
180
+ count = len(self._instances)
181
+ self._instances.clear()
182
+ logger.debug("Cleared %d instances from registry", count)
183
+
184
+
185
+ @lru_cache(maxsize=1)
186
+ def service_registry() -> ServiceRegistry:
187
+ logger.debug("Creating the service registry instance")
188
+ return ServiceRegistry()
@@ -0,0 +1,112 @@
1
+ from __future__ import annotations
2
+
3
+ import hashlib
4
+ import hmac
5
+ import secrets
6
+
7
+ SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
8
+ DEFAULT_PBKDF2_ITERATIONS = 1_000_000
9
+
10
+
11
+ def _gen_salt(length: int) -> str:
12
+ """Generate a random string of SALT_CHARS with specified ``length``."""
13
+ if length <= 0:
14
+ raise ValueError("Salt length must be at least 1.")
15
+
16
+ return "".join(secrets.choice(SALT_CHARS) for _ in range(length))
17
+
18
+
19
+ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]:
20
+ method, *args = method.split(":")
21
+ salt_bytes = salt.encode()
22
+ password_bytes = password.encode()
23
+
24
+ if method == "scrypt":
25
+ if not args:
26
+ n = 2**15
27
+ r = 8
28
+ p = 1
29
+ else:
30
+ try:
31
+ n, r, p = map(int, args)
32
+ except ValueError:
33
+ raise ValueError("'scrypt' takes 3 arguments.") from None
34
+
35
+ maxmem = 132 * n * r * p # ideally 128, but some extra seems needed
36
+ return (
37
+ hashlib.scrypt(
38
+ password_bytes, salt=salt_bytes, n=n, r=r, p=p, maxmem=maxmem
39
+ ).hex(),
40
+ f"scrypt:{n}:{r}:{p}",
41
+ )
42
+ if method == "pbkdf2":
43
+ len_args = len(args)
44
+
45
+ if len_args == 0:
46
+ hash_name = "sha256"
47
+ iterations = DEFAULT_PBKDF2_ITERATIONS
48
+ elif len_args == 1:
49
+ hash_name = args[0]
50
+ iterations = DEFAULT_PBKDF2_ITERATIONS
51
+ elif len_args == 2: # noqa: PLR2004
52
+ hash_name = args[0]
53
+ iterations = int(args[1])
54
+ else:
55
+ raise ValueError("'pbkdf2' takes 2 arguments.")
56
+
57
+ return (
58
+ hashlib.pbkdf2_hmac(
59
+ hash_name, password_bytes, salt_bytes, iterations
60
+ ).hex(),
61
+ f"pbkdf2:{hash_name}:{iterations}",
62
+ )
63
+ raise ValueError(f"Invalid hash method '{method}'.")
64
+
65
+
66
+ def generate_password_hash(
67
+ password: str, method: str = "scrypt", salt_length: int = 16
68
+ ) -> str:
69
+ """Securely hash a password for storage. A password can be compared to a stored hash
70
+ using :func:`check_password_hash`.
71
+
72
+ The following methods are supported:
73
+
74
+ - ``scrypt``, the default. The parameters are ``n``, ``r``, and ``p``, the default
75
+ is ``scrypt:32768:8:1``. See :func:`hashlib.scrypt`.
76
+ - ``pbkdf2``, less secure. The parameters are ``hash_method`` and ``iterations``,
77
+ the default is ``pbkdf2:sha256:600000``. See :func:`hashlib.pbkdf2_hmac`.
78
+
79
+ Default parameters may be updated to reflect current guidelines, and methods may be
80
+ deprecated and removed if they are no longer considered secure. To migrate old
81
+ hashes, you may generate a new hash when checking an old hash, or you may contact
82
+ users with a link to reset their password.
83
+
84
+ :param password: The plaintext password.
85
+ :param method: The key derivation function and parameters.
86
+ :param salt_length: The number of characters to generate for the salt.
87
+
88
+ .. versionchanged:: 3.1
89
+ The default iterations for pbkdf2 was increased to 1,000,000.
90
+ """
91
+ salt = _gen_salt(salt_length)
92
+ h, actual_method = _hash_internal(method, salt, password)
93
+ return f"{actual_method}${salt}${h}"
94
+
95
+
96
+ def check_password_hash(pwhash: str, password: str) -> bool:
97
+ """Securely check that the given stored password hash, previously generated using
98
+ :func:`generate_password_hash`, matches the given password.
99
+
100
+ Methods may be deprecated and removed if they are no longer considered secure. To
101
+ migrate old hashes, you may generate a new hash when checking an old hash, or you
102
+ may contact users with a link to reset their password.
103
+
104
+ :param pwhash: The hashed password.
105
+ :param password: The plaintext password.
106
+ """
107
+ try:
108
+ method, salt, hashval = pwhash.split("$", 2)
109
+ except ValueError:
110
+ return False
111
+
112
+ return hmac.compare_digest(_hash_internal(method, salt, password)[0], hashval)