tks-essentials 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tks-essentials-0.1.1/LICENSE +19 -0
- tks-essentials-0.1.1/PKG-INFO +91 -0
- tks-essentials-0.1.1/README.md +70 -0
- tks-essentials-0.1.1/pyproject.toml +51 -0
- tks-essentials-0.1.1/setup.cfg +4 -0
- tks-essentials-0.1.1/setup.py +26 -0
- tks-essentials-0.1.1/tests/test_asset_formatter.py +60 -0
- tks-essentials-0.1.1/tests/test_security.py +35 -0
- tks-essentials-0.1.1/tests/test_utils.py +120 -0
- tks-essentials-0.1.1/tests/test_validators.py +32 -0
- tks-essentials-0.1.1/tks_essentials.egg-info/PKG-INFO +91 -0
- tks-essentials-0.1.1/tks_essentials.egg-info/SOURCES.txt +20 -0
- tks-essentials-0.1.1/tks_essentials.egg-info/dependency_links.txt +1 -0
- tks-essentials-0.1.1/tks_essentials.egg-info/requires.txt +11 -0
- tks-essentials-0.1.1/tks_essentials.egg-info/top_level.txt +1 -0
- tks-essentials-0.1.1/tksessentials/__init__.py +0 -0
- tks-essentials-0.1.1/tksessentials/asset_formatter.py +134 -0
- tks-essentials-0.1.1/tksessentials/constants.py +1 -0
- tks-essentials-0.1.1/tksessentials/global_logger.py +61 -0
- tks-essentials-0.1.1/tksessentials/security.py +104 -0
- tks-essentials-0.1.1/tksessentials/utils.py +160 -0
- tks-essentials-0.1.1/tksessentials/validators.py +9 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
Copyright (c) 2018 The Python Packaging Authority
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
4
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
5
|
+
in the Software without restriction, including without limitation the rights
|
|
6
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
7
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
8
|
+
furnished to do so, subject to the following conditions:
|
|
9
|
+
|
|
10
|
+
The above copyright notice and this permission notice shall be included in all
|
|
11
|
+
copies or substantial portions of the Software.
|
|
12
|
+
|
|
13
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
14
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
15
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
16
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
17
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
18
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
19
|
+
SOFTWARE.
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: tks-essentials
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Essentials for our TKS projects.
|
|
5
|
+
Author-email: Brayan <brayan@sparkandhale.com>
|
|
6
|
+
Project-URL: Homepage, https://github.com/The-Kara-System/tks-essentials
|
|
7
|
+
Keywords: finance,trading,models
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Programming Language :: Python
|
|
10
|
+
Requires-Python: >=3.9
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
License-File: LICENSE
|
|
13
|
+
Requires-Dist: redis
|
|
14
|
+
Requires-Dist: tomli; python_version < "3.11"
|
|
15
|
+
Provides-Extra: dev
|
|
16
|
+
Requires-Dist: black; extra == "dev"
|
|
17
|
+
Requires-Dist: bumpver; extra == "dev"
|
|
18
|
+
Requires-Dist: isort; extra == "dev"
|
|
19
|
+
Requires-Dist: pip-tools; extra == "dev"
|
|
20
|
+
Requires-Dist: pytest; extra == "dev"
|
|
21
|
+
|
|
22
|
+
# tks-essentials
|
|
23
|
+
A library with essentials needed in every backend python app. e.g. logging, local db connection, filtering, formatting etc.
|
|
24
|
+
|
|
25
|
+
## Sponsors
|
|
26
|
+
Freya Alpha,
|
|
27
|
+
The Kára System,
|
|
28
|
+
Spark & Hale Robotic Industries
|
|
29
|
+
|
|
30
|
+
## General
|
|
31
|
+
Run and compiled for Python 3.9.13.
|
|
32
|
+
Expected to run for Python 3+
|
|
33
|
+
|
|
34
|
+
## Development
|
|
35
|
+
|
|
36
|
+
### Testing
|
|
37
|
+
run tests with `pytest -s -vv` to see all the details.
|
|
38
|
+
|
|
39
|
+
### Installation as Consuming Developer
|
|
40
|
+
|
|
41
|
+
Simply run: `pip install tks-essentials`
|
|
42
|
+
|
|
43
|
+
Import in modules without the dash (e.g.): `from tksessentials.globallogger import GlobalLogger`
|
|
44
|
+
|
|
45
|
+
### Setup as Contributor
|
|
46
|
+
Create the virtul environment:
|
|
47
|
+
```
|
|
48
|
+
py -m venv .venv
|
|
49
|
+
```
|
|
50
|
+
Start the Environment:
|
|
51
|
+
```
|
|
52
|
+
./.venv/Scripts/activate
|
|
53
|
+
```
|
|
54
|
+
(or allow VS Code to start it). Use `deactivate`to stop it.
|
|
55
|
+
|
|
56
|
+
All the required libraries must be listed in requirements.txt and installed by
|
|
57
|
+
```
|
|
58
|
+
python -m pip install -r .\requirements.txt
|
|
59
|
+
```
|
|
60
|
+
For Dev use
|
|
61
|
+
```
|
|
62
|
+
python -m pip install -r .\requirements-dev.txt
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
To cleanup the environment run:
|
|
66
|
+
```
|
|
67
|
+
pip3 freeze > to-uninstall.txt
|
|
68
|
+
```
|
|
69
|
+
and then
|
|
70
|
+
```
|
|
71
|
+
pip3 uninstall -y -r to-uninstall.txt
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
or
|
|
75
|
+
```
|
|
76
|
+
pip3 install pip-autoremove
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Build Library
|
|
80
|
+
Prerequisite: make sure that you give your Operating System user the right to modify files in the python directory. The directory where pyhton is installed.
|
|
81
|
+
Use
|
|
82
|
+
```python setup.py bdist_wheel```
|
|
83
|
+
to create the dist, build and .eggs folder.
|
|
84
|
+
|
|
85
|
+
## Reference from a different project
|
|
86
|
+
In order to use your own version of the project - to maybe contribute to the library - simply clone the code from github into new directory. Then add the path of that new directory to the requirements.txt file of your project. Then change in tks-essentials whatever you recommend to improve. Don't forget the Open-Closed Principle: extend only (unless it requires a breaking change)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
## Releasing a new version
|
|
90
|
+
|
|
91
|
+
This is entirely executed with Github Actions.
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# tks-essentials
|
|
2
|
+
A library with essentials needed in every backend python app. e.g. logging, local db connection, filtering, formatting etc.
|
|
3
|
+
|
|
4
|
+
## Sponsors
|
|
5
|
+
Freya Alpha,
|
|
6
|
+
The Kára System,
|
|
7
|
+
Spark & Hale Robotic Industries
|
|
8
|
+
|
|
9
|
+
## General
|
|
10
|
+
Run and compiled for Python 3.9.13.
|
|
11
|
+
Expected to run for Python 3+
|
|
12
|
+
|
|
13
|
+
## Development
|
|
14
|
+
|
|
15
|
+
### Testing
|
|
16
|
+
run tests with `pytest -s -vv` to see all the details.
|
|
17
|
+
|
|
18
|
+
### Installation as Consuming Developer
|
|
19
|
+
|
|
20
|
+
Simply run: `pip install tks-essentials`
|
|
21
|
+
|
|
22
|
+
Import in modules without the dash (e.g.): `from tksessentials.globallogger import GlobalLogger`
|
|
23
|
+
|
|
24
|
+
### Setup as Contributor
|
|
25
|
+
Create the virtul environment:
|
|
26
|
+
```
|
|
27
|
+
py -m venv .venv
|
|
28
|
+
```
|
|
29
|
+
Start the Environment:
|
|
30
|
+
```
|
|
31
|
+
./.venv/Scripts/activate
|
|
32
|
+
```
|
|
33
|
+
(or allow VS Code to start it). Use `deactivate`to stop it.
|
|
34
|
+
|
|
35
|
+
All the required libraries must be listed in requirements.txt and installed by
|
|
36
|
+
```
|
|
37
|
+
python -m pip install -r .\requirements.txt
|
|
38
|
+
```
|
|
39
|
+
For Dev use
|
|
40
|
+
```
|
|
41
|
+
python -m pip install -r .\requirements-dev.txt
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
To cleanup the environment run:
|
|
45
|
+
```
|
|
46
|
+
pip3 freeze > to-uninstall.txt
|
|
47
|
+
```
|
|
48
|
+
and then
|
|
49
|
+
```
|
|
50
|
+
pip3 uninstall -y -r to-uninstall.txt
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
or
|
|
54
|
+
```
|
|
55
|
+
pip3 install pip-autoremove
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### Build Library
|
|
59
|
+
Prerequisite: make sure that you give your Operating System user the right to modify files in the python directory. The directory where pyhton is installed.
|
|
60
|
+
Use
|
|
61
|
+
```python setup.py bdist_wheel```
|
|
62
|
+
to create the dist, build and .eggs folder.
|
|
63
|
+
|
|
64
|
+
## Reference from a different project
|
|
65
|
+
In order to use your own version of the project - to maybe contribute to the library - simply clone the code from github into new directory. Then add the path of that new directory to the requirements.txt file of your project. Then change in tks-essentials whatever you recommend to improve. Don't forget the Open-Closed Principle: extend only (unless it requires a breaking change)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
## Releasing a new version
|
|
69
|
+
|
|
70
|
+
This is entirely executed with Github Actions.
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# pyproject.toml
|
|
2
|
+
|
|
3
|
+
[build-system]
|
|
4
|
+
requires = ["setuptools>=66.1.0", "wheel"]
|
|
5
|
+
build-backend = "setuptools.build_meta"
|
|
6
|
+
|
|
7
|
+
[project]
|
|
8
|
+
name = "tks-essentials"
|
|
9
|
+
version = "0.1.1"
|
|
10
|
+
description = "Essentials for our TKS projects."
|
|
11
|
+
readme = "README.md"
|
|
12
|
+
authors = [{ name = "Brayan", email = "brayan@sparkandhale.com" }]
|
|
13
|
+
#license = { file = "LICENSE" }
|
|
14
|
+
classifiers = [
|
|
15
|
+
"License :: OSI Approved :: MIT License",
|
|
16
|
+
"Programming Language :: Python"
|
|
17
|
+
]
|
|
18
|
+
keywords = ["finance", "trading", "models"]
|
|
19
|
+
dependencies = [
|
|
20
|
+
"redis",
|
|
21
|
+
'tomli; python_version < "3.11"',
|
|
22
|
+
]
|
|
23
|
+
requires-python = ">=3.9"
|
|
24
|
+
|
|
25
|
+
[project.optional-dependencies]
|
|
26
|
+
dev = ["black", "bumpver", "isort", "pip-tools", "pytest"]
|
|
27
|
+
|
|
28
|
+
[project.urls]
|
|
29
|
+
Homepage = "https://github.com/The-Kara-System/tks-essentials"
|
|
30
|
+
|
|
31
|
+
[tool.bumpver]
|
|
32
|
+
current_version = "0.1.1"
|
|
33
|
+
version_pattern = "MAJOR.MINOR.PATCH"
|
|
34
|
+
commit_message = "bump version {old_version} -> {new_version}"
|
|
35
|
+
commit = true
|
|
36
|
+
tag = true
|
|
37
|
+
push = true
|
|
38
|
+
|
|
39
|
+
[tool.bumpver.file_patterns]
|
|
40
|
+
"pyproject.toml" = [
|
|
41
|
+
'current_version = "{version}"',
|
|
42
|
+
'version = "{version}"',
|
|
43
|
+
]
|
|
44
|
+
#"setup.py" = [
|
|
45
|
+
# "version = '{version}'",
|
|
46
|
+
#]
|
|
47
|
+
#"README.md" = [
|
|
48
|
+
# "{version}",
|
|
49
|
+
# "{pep440_version}",
|
|
50
|
+
#]
|
|
51
|
+
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from setuptools import find_packages, setup
|
|
2
|
+
|
|
3
|
+
setup(
|
|
4
|
+
name="tks-essentials",
|
|
5
|
+
packages=find_packages(
|
|
6
|
+
include=[
|
|
7
|
+
"tksessentials",
|
|
8
|
+
# 'tksessentials.models',
|
|
9
|
+
],
|
|
10
|
+
exclude=["tests*"],
|
|
11
|
+
),
|
|
12
|
+
# packages=['tksessentials'],
|
|
13
|
+
# package_dir={'tksessentials':'src'}
|
|
14
|
+
# packages=find_packages(),
|
|
15
|
+
# version='0.1.0',
|
|
16
|
+
# description='The library describes the most common models used in trading systems.',
|
|
17
|
+
# author='Brayan Svan',
|
|
18
|
+
# license='MIT',
|
|
19
|
+
# install_requires=['sqlmodel'],
|
|
20
|
+
# setup_requires=['pytest-runner'],
|
|
21
|
+
# tests_require=['pytest==4.4.1'],
|
|
22
|
+
# test_suite='tests',
|
|
23
|
+
# cmdclass={
|
|
24
|
+
# 'build': ProduceAvroSchemas,
|
|
25
|
+
# },
|
|
26
|
+
)
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from tksessentials.asset_formatter import AssetFormatter, BinanceFormatter, BybitFormatter
|
|
3
|
+
|
|
4
|
+
# Test unwrap_symbol
|
|
5
|
+
def test_unwrap_symbol():
|
|
6
|
+
formatter = AssetFormatter()
|
|
7
|
+
assert formatter.unwrap_symbol('WETH') == 'ETH'
|
|
8
|
+
assert formatter.unwrap_symbol('BTC') == 'BTC' # Non-existent in symbol_pairs
|
|
9
|
+
|
|
10
|
+
# Test unwrap_market
|
|
11
|
+
def test_unwrap_market():
|
|
12
|
+
formatter = AssetFormatter()
|
|
13
|
+
assert formatter.unwrap_market('WETH/USDT') == 'ETH/USDT'
|
|
14
|
+
assert formatter.unwrap_market('BTC-USDT') == 'BTC-USDT'
|
|
15
|
+
|
|
16
|
+
@pytest.mark.parametrize("input_markets, expected_output", [
|
|
17
|
+
(["BTC/USDT", "ETH-USDT", "SOL/USDT"], ["BTC/USDT", "ETH-USDT", "SOL/USDT"]), # valid markets
|
|
18
|
+
(["BTCUSDT", "ETH*USDT", "SOLUSDT"], []), # invalid markets (missing '-' or '/')
|
|
19
|
+
(["BTC/USDT", "ETH*USDT", "SOL-USDT"], ["BTC/USDT", "SOL-USDT"]), # mix of valid and invalid
|
|
20
|
+
(["BTC/USDTLONGG", "SHORT/ETH", "12CHARS-XY"], ["SHORT/ETH", "12CHARS-XY"]), # check length and required characters
|
|
21
|
+
(["BTC-USDT", "ETH/USDT", "SOL"], ["BTC-USDT", "ETH/USDT"]), # valid and without required characters
|
|
22
|
+
([], []) # empty list
|
|
23
|
+
])
|
|
24
|
+
def test_clean_markets(input_markets, expected_output):
|
|
25
|
+
assert AssetFormatter().clean_markets(input_markets) == expected_output
|
|
26
|
+
|
|
27
|
+
# Test get_base and get_quote
|
|
28
|
+
def test_get_base_and_quote():
|
|
29
|
+
formatter = AssetFormatter()
|
|
30
|
+
assert formatter.get_base('BTC/USDT') == 'BTC'
|
|
31
|
+
assert formatter.get_quote('BTC/USDT') == 'USDT'
|
|
32
|
+
# Add more tests for different formats and edge cases
|
|
33
|
+
|
|
34
|
+
# Test format_pair_default
|
|
35
|
+
def test_format_pair_default():
|
|
36
|
+
formatter = AssetFormatter()
|
|
37
|
+
assert formatter.format_pair_default('BTCUSDT') == 'BTC/USDT'
|
|
38
|
+
# Add tests for different formats and edge cases
|
|
39
|
+
|
|
40
|
+
# Test format_to_dash and format_to_slash
|
|
41
|
+
def test_format_to_dash_and_slash():
|
|
42
|
+
formatter = AssetFormatter()
|
|
43
|
+
assert formatter.format_to_dash('BTC/USDT') == 'BTC-USDT'
|
|
44
|
+
assert formatter.format_to_slash('BTC-USDT') == 'BTC/USDT'
|
|
45
|
+
|
|
46
|
+
# Test format_set_to_dash and format_set_to_slash
|
|
47
|
+
def test_format_set_to_dash_and_slash():
|
|
48
|
+
formatter = AssetFormatter()
|
|
49
|
+
markets = {'BTC/USDT', 'ETH/USDT'}
|
|
50
|
+
assert formatter.format_set_to_dash(markets) == {'BTC-USDT', 'ETH-USDT'}
|
|
51
|
+
assert formatter.format_set_to_slash(markets) == {'BTC/USDT', 'ETH/USDT'}
|
|
52
|
+
|
|
53
|
+
# Tests for BinanceFormatter and BybitFormatter
|
|
54
|
+
def test_binance_formatter():
|
|
55
|
+
formatter = BinanceFormatter()
|
|
56
|
+
assert formatter.format_pair('BTC/USDT') == 'BTC-USDT'
|
|
57
|
+
|
|
58
|
+
def test_bybit_formatter():
|
|
59
|
+
formatter = BybitFormatter()
|
|
60
|
+
assert formatter.format_pair('BTC/USDT') == 'BTC_USDT'
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from tksessentials.security import Crypto, IPSecurity
|
|
3
|
+
from famodels.blocked_ip import BlockedIpReasonType
|
|
4
|
+
|
|
5
|
+
AES_SECRET = "ZufDdKmoYgBv272G0DQWqz8Ng9ewM+IMGIMzkRQUoVNujiCHfdD4EULwXtn5fvwL"
|
|
6
|
+
|
|
7
|
+
@pytest.mark.parametrize("message", ["test", "123908234ÖÄ$ASDFdd", "äjklöèü"])
|
|
8
|
+
def test_encrypt_and_decrypt_text_message(monkeypatch, message):
|
|
9
|
+
monkeypatch.setenv("AES_SECRET", AES_SECRET)
|
|
10
|
+
crypto = Crypto()
|
|
11
|
+
|
|
12
|
+
encrypted_message = crypto.encrypt_as_text(message)
|
|
13
|
+
decrypted_message = crypto.decrypt_as_text(encrypted_message)
|
|
14
|
+
|
|
15
|
+
assert decrypted_message == message
|
|
16
|
+
|
|
17
|
+
@pytest.mark.parametrize("message", ["firstname.name@domain.com"])
|
|
18
|
+
def test_encrypt_and_decrypt_email_address(monkeypatch, message):
|
|
19
|
+
monkeypatch.setenv("AES_SECRET", AES_SECRET)
|
|
20
|
+
crypto = Crypto()
|
|
21
|
+
|
|
22
|
+
encrypted_message = crypto.encrypt_as_text(message)
|
|
23
|
+
decrypted_message = crypto.decrypt_as_text(encrypted_message)
|
|
24
|
+
|
|
25
|
+
assert decrypted_message == message
|
|
26
|
+
|
|
27
|
+
# FAILY - NEEDS TO BE FIXED
|
|
28
|
+
# @pytest.mark.parametrize("ip_address, blocking_reason", [("155.255.452.55", BlockedIpReasonType.EXCESSIVE_FAILED_LOGIN_ATTEMPTS)])
|
|
29
|
+
# def test_block_ip_address(ip_address: str, blocking_reason: BlockedIpReasonType):
|
|
30
|
+
# ipSecurity = IPSecurity()
|
|
31
|
+
|
|
32
|
+
# assert ipSecurity.block_ip(ip_address, blocking_reason) is True
|
|
33
|
+
# assert ipSecurity.is_ip_blocked(ip_address) is True
|
|
34
|
+
# assert ipSecurity.unblock_ip(ip_address) is True
|
|
35
|
+
# assert ipSecurity.is_ip_blocked(ip_address) is False
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pathlib
|
|
3
|
+
import pytest
|
|
4
|
+
import yaml
|
|
5
|
+
from tksessentials import utils
|
|
6
|
+
|
|
7
|
+
@pytest.fixture
|
|
8
|
+
def mock_project_root(monkeypatch, tmp_path):
|
|
9
|
+
# Directly set the PROJECT_ROOT in the utils module
|
|
10
|
+
utils.PROJECT_ROOT = tmp_path
|
|
11
|
+
return tmp_path
|
|
12
|
+
|
|
13
|
+
def test_get_project_root(mock_project_root):
|
|
14
|
+
expected_path = str(mock_project_root)
|
|
15
|
+
assert utils.get_project_root() == expected_path
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def test_get_project_root_path(mock_project_root):
|
|
19
|
+
assert utils.get_project_root_path() == mock_project_root
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def test_get_app_config(monkeypatch, tmp_path):
|
|
23
|
+
# Create a temporary config directory
|
|
24
|
+
config_dir = tmp_path / "config"
|
|
25
|
+
config_dir.mkdir()
|
|
26
|
+
mock_config_file = config_dir / "app_config.yaml"
|
|
27
|
+
|
|
28
|
+
# Write the mock configuration
|
|
29
|
+
expected_config = {
|
|
30
|
+
"application": "TestApp",
|
|
31
|
+
"domain": "test.domain.com",
|
|
32
|
+
"env": "DEV",
|
|
33
|
+
"logging_level": "INFO"
|
|
34
|
+
}
|
|
35
|
+
with open(mock_config_file, "w") as file:
|
|
36
|
+
yaml.dump(expected_config, file)
|
|
37
|
+
|
|
38
|
+
# Override the `find_project_root` function to return the tmp_path
|
|
39
|
+
def mock_find_project_root(*args, **kwargs):
|
|
40
|
+
return tmp_path
|
|
41
|
+
|
|
42
|
+
monkeypatch.setattr(utils, "find_project_root", mock_find_project_root)
|
|
43
|
+
|
|
44
|
+
# Run the test
|
|
45
|
+
assert utils.get_app_config() == expected_config
|
|
46
|
+
|
|
47
|
+
def test_find_project_root():
|
|
48
|
+
try:
|
|
49
|
+
project_root = utils.find_project_root(pathlib.Path(__file__).resolve())
|
|
50
|
+
print(project_root)
|
|
51
|
+
except Exception as e:
|
|
52
|
+
# If any exception is caught, fail the test explicitly.
|
|
53
|
+
assert False, f"An exception was thrown: {e}"
|
|
54
|
+
|
|
55
|
+
def test_get_application_name_success(monkeypatch):
|
|
56
|
+
def mock_get_app_config():
|
|
57
|
+
return {"application": "TestApp"}
|
|
58
|
+
|
|
59
|
+
monkeypatch.setattr(utils, "get_app_config", mock_get_app_config)
|
|
60
|
+
assert utils.get_application_name() == "TestApp"
|
|
61
|
+
|
|
62
|
+
def test_get_application_name_failure(monkeypatch):
|
|
63
|
+
def mock_get_app_config():
|
|
64
|
+
return {}
|
|
65
|
+
|
|
66
|
+
monkeypatch.setattr(utils, "get_app_config", mock_get_app_config)
|
|
67
|
+
with pytest.raises(ValueError):
|
|
68
|
+
utils.get_application_name()
|
|
69
|
+
|
|
70
|
+
def test_get_domain_name_success(monkeypatch):
|
|
71
|
+
def mock_get_app_config():
|
|
72
|
+
return {"domain": "test.domain.com"}
|
|
73
|
+
|
|
74
|
+
monkeypatch.setattr(utils, "get_app_config", mock_get_app_config)
|
|
75
|
+
assert utils.get_domain_name() == "test.domain.com"
|
|
76
|
+
|
|
77
|
+
def test_get_domain_name_failure(monkeypatch):
|
|
78
|
+
def mock_get_app_config():
|
|
79
|
+
return {}
|
|
80
|
+
|
|
81
|
+
monkeypatch.setattr(utils, "get_app_config", mock_get_app_config)
|
|
82
|
+
with pytest.raises(ValueError):
|
|
83
|
+
utils.get_domain_name()
|
|
84
|
+
|
|
85
|
+
def test_get_redis_cluster_service_name_with_env_in_dev(monkeypatch):
|
|
86
|
+
monkeypatch.setenv("REDIS_CLUSTER_NODES", "redis-node1:1234")
|
|
87
|
+
monkeypatch.setenv("ENV", "DEV")
|
|
88
|
+
expected_result = ["UNDEFINED - EMPLOYING LOCAL CLUSTER"]
|
|
89
|
+
assert utils.get_redis_cluster_service_name() == expected_result
|
|
90
|
+
|
|
91
|
+
def test_get_redis_cluster_service_name_with_env_in_prod(monkeypatch):
|
|
92
|
+
monkeypatch.setenv("REDIS_CLUSTER_NODES", "redis-node1:1234")
|
|
93
|
+
monkeypatch.setenv("ENV", "PROD")
|
|
94
|
+
expected_result = ["redis-node1", "1234"]
|
|
95
|
+
assert utils.get_redis_cluster_service_name() == expected_result
|
|
96
|
+
|
|
97
|
+
def test_get_redis_cluster_service_name_default(monkeypatch):
|
|
98
|
+
monkeypatch.delenv("REDIS_CLUSTER_NODES", raising=False)
|
|
99
|
+
expected_result = ["UNDEFINED - EMPLOYING LOCAL CLUSTER"]
|
|
100
|
+
assert utils.get_redis_cluster_service_name() == expected_result
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@pytest.mark.skipif(os.environ.get("GITHUB_ACTIONS") == "true", reason="Requires DEV environment with a local Redis cluster and should not run on GitHub Actions")
|
|
104
|
+
def test_get_redis_cluster_client_add_method():
|
|
105
|
+
# Obtain the RedisCluster client
|
|
106
|
+
rc = utils.get_redis_cluster_client()
|
|
107
|
+
|
|
108
|
+
# Perform a test operation - add a key with a value
|
|
109
|
+
test_key = "test_key"
|
|
110
|
+
test_value = "test_value"
|
|
111
|
+
rc.set(test_key, test_value)
|
|
112
|
+
|
|
113
|
+
# Retrieve the value to verify the operation
|
|
114
|
+
value = rc.get(test_key)
|
|
115
|
+
|
|
116
|
+
# Clean up by deleting the test key
|
|
117
|
+
rc.delete(test_key)
|
|
118
|
+
|
|
119
|
+
# Assert that the retrieved value matches the test value
|
|
120
|
+
assert value == test_value
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from email_validator import EmailSyntaxError
|
|
2
|
+
import pytest
|
|
3
|
+
from tksessentials import validators
|
|
4
|
+
|
|
5
|
+
@pytest.mark.parametrize("email", ["john.doe@gmail.com"])
|
|
6
|
+
def test_email_address(email):
|
|
7
|
+
try:
|
|
8
|
+
validators.validate_email(email)
|
|
9
|
+
except EmailSyntaxError:
|
|
10
|
+
pytest.fail("validate_email() raised exception unexpectedly!")
|
|
11
|
+
|
|
12
|
+
@pytest.mark.parametrize("email", ["name@com", "@domain.com", "name@domain", "firstname.name@DOMAIN-THAT-PROBABLY-WILL-NEVER-EXIST-TEST-0123.com"])
|
|
13
|
+
def test_wrong_email_addresses(email):
|
|
14
|
+
with pytest.raises(Exception):
|
|
15
|
+
validators.validate_email(email)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@pytest.mark.parametrize("ip_address", ["100.128.0.0"])
|
|
19
|
+
def test_ip_addresses(ip_address: str):
|
|
20
|
+
try:
|
|
21
|
+
validators.validate_ip_address(ip_address)
|
|
22
|
+
except ValueError:
|
|
23
|
+
pytest.fail("validate_ip_address() raised exception unexpectedly!")
|
|
24
|
+
|
|
25
|
+
@pytest.mark.parametrize("ip_address", [
|
|
26
|
+
None,
|
|
27
|
+
"127 .0.0.1",
|
|
28
|
+
"999.255.255.255",
|
|
29
|
+
"100.128.0.0/222"])
|
|
30
|
+
def test_wrong_ip_addresses(ip_address: str):
|
|
31
|
+
with pytest.raises(Exception):
|
|
32
|
+
validators.validate_ip_address(ip_address)
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: tks-essentials
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Essentials for our TKS projects.
|
|
5
|
+
Author-email: Brayan <brayan@sparkandhale.com>
|
|
6
|
+
Project-URL: Homepage, https://github.com/The-Kara-System/tks-essentials
|
|
7
|
+
Keywords: finance,trading,models
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Programming Language :: Python
|
|
10
|
+
Requires-Python: >=3.9
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
License-File: LICENSE
|
|
13
|
+
Requires-Dist: redis
|
|
14
|
+
Requires-Dist: tomli; python_version < "3.11"
|
|
15
|
+
Provides-Extra: dev
|
|
16
|
+
Requires-Dist: black; extra == "dev"
|
|
17
|
+
Requires-Dist: bumpver; extra == "dev"
|
|
18
|
+
Requires-Dist: isort; extra == "dev"
|
|
19
|
+
Requires-Dist: pip-tools; extra == "dev"
|
|
20
|
+
Requires-Dist: pytest; extra == "dev"
|
|
21
|
+
|
|
22
|
+
# tks-essentials
|
|
23
|
+
A library with essentials needed in every backend python app. e.g. logging, local db connection, filtering, formatting etc.
|
|
24
|
+
|
|
25
|
+
## Sponsors
|
|
26
|
+
Freya Alpha,
|
|
27
|
+
The Kára System,
|
|
28
|
+
Spark & Hale Robotic Industries
|
|
29
|
+
|
|
30
|
+
## General
|
|
31
|
+
Run and compiled for Python 3.9.13.
|
|
32
|
+
Expected to run for Python 3+
|
|
33
|
+
|
|
34
|
+
## Development
|
|
35
|
+
|
|
36
|
+
### Testing
|
|
37
|
+
run tests with `pytest -s -vv` to see all the details.
|
|
38
|
+
|
|
39
|
+
### Installation as Consuming Developer
|
|
40
|
+
|
|
41
|
+
Simply run: `pip install tks-essentials`
|
|
42
|
+
|
|
43
|
+
Import in modules without the dash (e.g.): `from tksessentials.globallogger import GlobalLogger`
|
|
44
|
+
|
|
45
|
+
### Setup as Contributor
|
|
46
|
+
Create the virtul environment:
|
|
47
|
+
```
|
|
48
|
+
py -m venv .venv
|
|
49
|
+
```
|
|
50
|
+
Start the Environment:
|
|
51
|
+
```
|
|
52
|
+
./.venv/Scripts/activate
|
|
53
|
+
```
|
|
54
|
+
(or allow VS Code to start it). Use `deactivate`to stop it.
|
|
55
|
+
|
|
56
|
+
All the required libraries must be listed in requirements.txt and installed by
|
|
57
|
+
```
|
|
58
|
+
python -m pip install -r .\requirements.txt
|
|
59
|
+
```
|
|
60
|
+
For Dev use
|
|
61
|
+
```
|
|
62
|
+
python -m pip install -r .\requirements-dev.txt
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
To cleanup the environment run:
|
|
66
|
+
```
|
|
67
|
+
pip3 freeze > to-uninstall.txt
|
|
68
|
+
```
|
|
69
|
+
and then
|
|
70
|
+
```
|
|
71
|
+
pip3 uninstall -y -r to-uninstall.txt
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
or
|
|
75
|
+
```
|
|
76
|
+
pip3 install pip-autoremove
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Build Library
|
|
80
|
+
Prerequisite: make sure that you give your Operating System user the right to modify files in the python directory. The directory where pyhton is installed.
|
|
81
|
+
Use
|
|
82
|
+
```python setup.py bdist_wheel```
|
|
83
|
+
to create the dist, build and .eggs folder.
|
|
84
|
+
|
|
85
|
+
## Reference from a different project
|
|
86
|
+
In order to use your own version of the project - to maybe contribute to the library - simply clone the code from github into new directory. Then add the path of that new directory to the requirements.txt file of your project. Then change in tks-essentials whatever you recommend to improve. Don't forget the Open-Closed Principle: extend only (unless it requires a breaking change)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
## Releasing a new version
|
|
90
|
+
|
|
91
|
+
This is entirely executed with Github Actions.
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
setup.py
|
|
5
|
+
tests/test_asset_formatter.py
|
|
6
|
+
tests/test_security.py
|
|
7
|
+
tests/test_utils.py
|
|
8
|
+
tests/test_validators.py
|
|
9
|
+
tks_essentials.egg-info/PKG-INFO
|
|
10
|
+
tks_essentials.egg-info/SOURCES.txt
|
|
11
|
+
tks_essentials.egg-info/dependency_links.txt
|
|
12
|
+
tks_essentials.egg-info/requires.txt
|
|
13
|
+
tks_essentials.egg-info/top_level.txt
|
|
14
|
+
tksessentials/__init__.py
|
|
15
|
+
tksessentials/asset_formatter.py
|
|
16
|
+
tksessentials/constants.py
|
|
17
|
+
tksessentials/global_logger.py
|
|
18
|
+
tksessentials/security.py
|
|
19
|
+
tksessentials/utils.py
|
|
20
|
+
tksessentials/validators.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
tksessentials
|
|
File without changes
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import Set
|
|
3
|
+
from . import global_logger
|
|
4
|
+
|
|
5
|
+
class AssetFormatter:
|
|
6
|
+
def __init__(self, default_quote_asset: str = "USDT") -> None:
|
|
7
|
+
self.default_quote_asset = default_quote_asset
|
|
8
|
+
self.logger = global_logger.setup_custom_logger("app")
|
|
9
|
+
self.symbol_pairs = {"WETH": "ETH"}
|
|
10
|
+
|
|
11
|
+
def unwrap_symbol(self, wrapped_symbol):
|
|
12
|
+
# Convert the input to uppercase to make the function case-insensitive
|
|
13
|
+
wrapped_symbol_upper = wrapped_symbol.upper()
|
|
14
|
+
# Return the unwrapped symbol, or the original symbol if not found
|
|
15
|
+
return self.symbol_pairs.get(wrapped_symbol_upper, wrapped_symbol)
|
|
16
|
+
|
|
17
|
+
def get_wrapped_symbol(self, original_symbol) -> str:
|
|
18
|
+
"""Returns the wrapped symbol for an asset. Or None"""
|
|
19
|
+
for key, val in self.symbol_pairs.items():
|
|
20
|
+
if val == original_symbol:
|
|
21
|
+
return key
|
|
22
|
+
return None
|
|
23
|
+
|
|
24
|
+
def unwrap_market(self, market):
|
|
25
|
+
# Extract the base and quote assets from the market string
|
|
26
|
+
base_asset = self.get_base(market)
|
|
27
|
+
quote_asset = self.get_quote(market)
|
|
28
|
+
unwrapped_base = self.unwrap_symbol(base_asset)
|
|
29
|
+
delimiter = "/"
|
|
30
|
+
if "-" in market:
|
|
31
|
+
delimiter = "-"
|
|
32
|
+
|
|
33
|
+
# Combine the unwrapped base asset with the quote asset
|
|
34
|
+
return f"{unwrapped_base}{delimiter}{quote_asset}"
|
|
35
|
+
|
|
36
|
+
def clean_markets(self, markets: dict) -> dict:
|
|
37
|
+
"""
|
|
38
|
+
Removes all non-compliant markets, those exceeding 12 characters in length,
|
|
39
|
+
and those not containing a '-' or '/'.
|
|
40
|
+
"""
|
|
41
|
+
allowed_chars_pattern = re.compile(r"^[a-zA-Z0-9/-]{1,12}$")
|
|
42
|
+
must_have_chars = {"-", "/"}
|
|
43
|
+
|
|
44
|
+
cleaned_markets = []
|
|
45
|
+
|
|
46
|
+
for market in markets:
|
|
47
|
+
if allowed_chars_pattern.match(market) and any(
|
|
48
|
+
char in market for char in must_have_chars
|
|
49
|
+
):
|
|
50
|
+
cleaned_markets.append(market)
|
|
51
|
+
else:
|
|
52
|
+
self.logger.info(
|
|
53
|
+
f"Invalid market format, length, or missing required characters found and removed: {market}"
|
|
54
|
+
)
|
|
55
|
+
self.logger.debug(f"Cleaned markets: {cleaned_markets}")
|
|
56
|
+
return cleaned_markets
|
|
57
|
+
|
|
58
|
+
def get_base(self, pair_string: str) -> str:
|
|
59
|
+
"""Returns the base asset. From SOL/USDT that is SOL."""
|
|
60
|
+
match = re.match(r"([A-Za-z]+)[/-]", pair_string)
|
|
61
|
+
if match:
|
|
62
|
+
base_asset = match.group(1)
|
|
63
|
+
return base_asset
|
|
64
|
+
else:
|
|
65
|
+
raise ValueError("Invalid pair string format")
|
|
66
|
+
|
|
67
|
+
def get_quote(self, pair_string: str) -> str:
|
|
68
|
+
"""Returns the quote asset. From SOL/USDT that is USDT."""
|
|
69
|
+
match = re.match(r"[A-Za-z]+[/-]([A-Za-z]+)", pair_string)
|
|
70
|
+
if match:
|
|
71
|
+
quote_asset = match.group(1)
|
|
72
|
+
return quote_asset
|
|
73
|
+
else:
|
|
74
|
+
# Fall back to default quote asset if not found
|
|
75
|
+
return self.default_quote_asset
|
|
76
|
+
|
|
77
|
+
def format_pair(self, pair_string: str) -> str:
|
|
78
|
+
raise NotImplementedError("This method should be implemented by subclasses.")
|
|
79
|
+
|
|
80
|
+
def format_set_of_pairs(self, pairs: Set[str]) -> Set[str]:
|
|
81
|
+
return {self.format_pair(pair) for pair in pairs}
|
|
82
|
+
|
|
83
|
+
def format_pair_default(self, pair_string: str) -> str:
|
|
84
|
+
"""Will format the pair string to a slash-format. Mainly used for the ccxt LIB."""
|
|
85
|
+
# Regex pattern to match 'BTCUSDT', 'BTC/USDT', and 'BTC-USDT'
|
|
86
|
+
pattern = r"([A-Za-z]+)[/-]?(USDT)$"
|
|
87
|
+
match = re.match(pattern, pair_string)
|
|
88
|
+
|
|
89
|
+
if match:
|
|
90
|
+
base_asset, quote_asset = match.groups()
|
|
91
|
+
formatted_pair = f"{base_asset}/{quote_asset}"
|
|
92
|
+
return formatted_pair
|
|
93
|
+
else:
|
|
94
|
+
self.logger.error(f"Invalid pair format: {pair_string}")
|
|
95
|
+
raise ValueError(f"Invalid pair format: {pair_string}")
|
|
96
|
+
|
|
97
|
+
def format_to_dash(self, pair_string: str) -> str:
|
|
98
|
+
"""Takes any combination of a market pair and replaces the divider by a dash (-)."""
|
|
99
|
+
match = re.match(r"([A-Za-z0-9]+)[/-]?([A-Za-z0-9]+)", pair_string)
|
|
100
|
+
if match is None:
|
|
101
|
+
raise ValueError(f"Invalid market pair format: '{pair_string}'")
|
|
102
|
+
base_asset, _ = match.groups()
|
|
103
|
+
return f"{base_asset}-{self.default_quote_asset}"
|
|
104
|
+
|
|
105
|
+
def format_to_slash(self, pair_string: str) -> str:
|
|
106
|
+
"""Takes any combinatin of a market pair and replaces the divider by a front slash (/)."""
|
|
107
|
+
match = re.match(r"([A-Za-z0-9]+)[/-]?([A-Za-z0-9]+)", pair_string)
|
|
108
|
+
if match is None:
|
|
109
|
+
raise ValueError(f"Invalid market pair format: '{pair_string}'")
|
|
110
|
+
base_asset, _ = match.groups()
|
|
111
|
+
return f"{base_asset}/{self.default_quote_asset}"
|
|
112
|
+
|
|
113
|
+
def format_set_to_dash(self, markets: Set[str]) -> Set[str]:
|
|
114
|
+
"""Takes a set of market pairs and formats each to dash notation."""
|
|
115
|
+
return {self.format_to_dash(pair) for pair in markets}
|
|
116
|
+
|
|
117
|
+
def format_set_to_slash(self, markets: Set[str]) -> Set[str]:
|
|
118
|
+
"""Takes a set of market pairs and formats each to front slash notation."""
|
|
119
|
+
return {self.format_to_slash(pair) for pair in markets}
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class BinanceFormatter(AssetFormatter):
|
|
123
|
+
def format_pair(self, pair_string: str) -> str:
|
|
124
|
+
match = re.match(r"([A-Za-z]+)[/-]?([A-Za-z]+)", pair_string)
|
|
125
|
+
base_asset, _ = match.groups()
|
|
126
|
+
return f"{base_asset}-{self.default_quote_asset}"
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class BybitFormatter(AssetFormatter):
|
|
130
|
+
def format_pair(self, pair_string: str) -> str:
|
|
131
|
+
match = re.match(r"([A-Za-z]+)[/-]?([A-Za-z]+)", pair_string)
|
|
132
|
+
base_asset, _ = match.groups()
|
|
133
|
+
# Assuming Bybit format is slightly different, for example
|
|
134
|
+
return f"{base_asset}_{self.default_quote_asset}"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
DEFAULT_ENCODING = "utf-8"
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import logging
|
|
3
|
+
import queue
|
|
4
|
+
import logging.handlers
|
|
5
|
+
from . import utils
|
|
6
|
+
|
|
7
|
+
loggers = {}
|
|
8
|
+
|
|
9
|
+
def setup_custom_logger(name):
|
|
10
|
+
global loggers
|
|
11
|
+
|
|
12
|
+
if loggers.get(name):
|
|
13
|
+
return loggers.get(name)
|
|
14
|
+
else:
|
|
15
|
+
formatter = logging.Formatter(
|
|
16
|
+
"%(asctime)s [%(module)s:%(lineno)d] %(levelname)s %(message)s"
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(name)
|
|
20
|
+
# clean the handlers, otherwise you get duplicated records when logging
|
|
21
|
+
if logger.hasHandlers():
|
|
22
|
+
return logger
|
|
23
|
+
# logger.handlers.clear()
|
|
24
|
+
logger.propagate = False
|
|
25
|
+
level = logging.getLevelName(utils.get_logging_level())
|
|
26
|
+
logger.setLevel(level)
|
|
27
|
+
log_queue = queue.Queue()
|
|
28
|
+
queue_handler = logging.handlers.QueueHandler(log_queue)
|
|
29
|
+
# set the non-blocking handler first
|
|
30
|
+
logger.addHandler(queue_handler)
|
|
31
|
+
|
|
32
|
+
# Stream is important for looking at the k8s pod logs.
|
|
33
|
+
stream_handler = logging.StreamHandler()
|
|
34
|
+
stream_handler.setLevel(logging.DEBUG)
|
|
35
|
+
stream_handler.setFormatter(formatter)
|
|
36
|
+
|
|
37
|
+
# the local logging is fine, but there sould be a Fluent Bit integration, which sends logs to the central LMM instance
|
|
38
|
+
timerotating_handler = logging.handlers.TimedRotatingFileHandler(
|
|
39
|
+
utils.get_log_path().joinpath("app_rolling.log"), when="D", backupCount=30
|
|
40
|
+
)
|
|
41
|
+
timerotating_handler.setLevel(utils.get_logging_level())
|
|
42
|
+
timerotating_handler.setFormatter(formatter)
|
|
43
|
+
listener = logging.handlers.QueueListener(
|
|
44
|
+
log_queue, stream_handler, timerotating_handler, respect_handler_level=True
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
# Only print the following when instantiated by the main.py file - and not other files.
|
|
48
|
+
# This will ensure that the important project variables are printet on startup.
|
|
49
|
+
current_stack = inspect.stack()
|
|
50
|
+
if any("main.py" in frame.filename for frame in current_stack):
|
|
51
|
+
# Print settings:
|
|
52
|
+
logger.info(f"Starting {utils.get_application_name()} in {utils.get_environment()}.")
|
|
53
|
+
logger.info(f"Domain {utils.get_domain_name()}")
|
|
54
|
+
logger.info(f"Root {utils.get_project_root()}")
|
|
55
|
+
logger.info(f"Log Path {utils.get_log_path()}")
|
|
56
|
+
logger.info(f"Logging Level {utils.get_logging_level()}")
|
|
57
|
+
logger.info(f"Redis Cluster Service Name {utils.get_redis_cluster_service_name()}")
|
|
58
|
+
|
|
59
|
+
listener.start()
|
|
60
|
+
|
|
61
|
+
return logger
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
from tksessentials.constants import DEFAULT_ENCODING
|
|
5
|
+
from builtins import bytes
|
|
6
|
+
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
|
7
|
+
from cryptography.hazmat.primitives import padding
|
|
8
|
+
from cryptography.hazmat.backends import default_backend
|
|
9
|
+
from famodels.blocked_ip import BlockedIp, BlockedIpReasonType
|
|
10
|
+
from redis_om.model.model import NotFoundError
|
|
11
|
+
from tksessentials import global_logger, utils
|
|
12
|
+
|
|
13
|
+
def get_secret_key() -> str:
|
|
14
|
+
"""Returns the general encryption key to encrypt data."""
|
|
15
|
+
key = os.getenv("ENCRYPTION_KEY")
|
|
16
|
+
if not key:
|
|
17
|
+
raise ValueError("ENCRYPTION_KEY environment variable is not set or empty.")
|
|
18
|
+
return key
|
|
19
|
+
|
|
20
|
+
def get_JWT_secret() -> str:
|
|
21
|
+
"""Returns JWT secret."""
|
|
22
|
+
jwt_secret = os.getenv("JWT_SECRET")
|
|
23
|
+
if not jwt_secret:
|
|
24
|
+
raise ValueError("JWT_SECRET environment variable is not set or empty.")
|
|
25
|
+
return jwt_secret
|
|
26
|
+
|
|
27
|
+
def get_AES_secret() -> bytes:
|
|
28
|
+
"""Returns AES secret."""
|
|
29
|
+
aes_secret = bytes(os.getenv("AES_SECRET"), DEFAULT_ENCODING)
|
|
30
|
+
if not aes_secret:
|
|
31
|
+
raise ValueError("AES_SECRET environment variable is not set or empty.")
|
|
32
|
+
return aes_secret
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class Crypto:
|
|
36
|
+
def __init__(self):
|
|
37
|
+
self.backend = default_backend()
|
|
38
|
+
self.key = base64.urlsafe_b64encode(get_AES_secret())[:32]
|
|
39
|
+
self.encryptor = Cipher(algorithms.AES(self.key), modes.ECB(), self.backend).encryptor()
|
|
40
|
+
self.decryptor = Cipher(algorithms.AES(self.key), modes.ECB(), self.backend).decryptor()
|
|
41
|
+
|
|
42
|
+
def encrypt(self, value: str) -> bytes:
|
|
43
|
+
byte_value = bytes(value, DEFAULT_ENCODING)
|
|
44
|
+
padder = padding.PKCS7(algorithms.AES(self.key).block_size).padder()
|
|
45
|
+
padded_data = padder.update(byte_value) + padder.finalize()
|
|
46
|
+
encrypted_text = self.encryptor.update(padded_data) + self.encryptor.finalize()
|
|
47
|
+
return base64.urlsafe_b64encode(encrypted_text)
|
|
48
|
+
|
|
49
|
+
def encrypt_as_text(self, value) -> str:
|
|
50
|
+
return str(self.encrypt(value), encoding=DEFAULT_ENCODING)
|
|
51
|
+
|
|
52
|
+
def decrypt(self, value: str) -> bytes:
|
|
53
|
+
byte_value = base64.urlsafe_b64decode(bytes(value, DEFAULT_ENCODING))
|
|
54
|
+
padder = padding.PKCS7(algorithms.AES(self.key).block_size).unpadder()
|
|
55
|
+
decrypted_data = self.decryptor.update(byte_value)
|
|
56
|
+
unpadded = padder.update(decrypted_data) + padder.finalize()
|
|
57
|
+
return unpadded
|
|
58
|
+
|
|
59
|
+
def decrypt_as_text(self, value) -> str:
|
|
60
|
+
return str(self.decrypt(value), encoding=DEFAULT_ENCODING)
|
|
61
|
+
|
|
62
|
+
class IPSecurity():
|
|
63
|
+
def __init__(self):
|
|
64
|
+
self.rc = utils.get_redis_cluster_client()
|
|
65
|
+
self.db_path = f"{utils.get_domain_name()}:blocked-ip"
|
|
66
|
+
self.logger = global_logger.setup_custom_logger("app")
|
|
67
|
+
|
|
68
|
+
def __get_blocked_ip_from_database(self, ip_address: str) -> str:
|
|
69
|
+
blockedIP_json: str = None
|
|
70
|
+
try:
|
|
71
|
+
blockedIP_json = self.rc.execute_command("JSON.GET", f"{self.db_path}:{ip_address}")
|
|
72
|
+
except NotFoundError:
|
|
73
|
+
self.logger.info(f"There is no blocked ip with address {ip_address} in the database.")
|
|
74
|
+
|
|
75
|
+
return blockedIP_json
|
|
76
|
+
|
|
77
|
+
def get_blocked_ip(self, ip_address: str) -> BlockedIp:
|
|
78
|
+
"""Will try to fetch a blocked ip record in the database by the provided ip address """
|
|
79
|
+
blockedIp: BlockedIp = None
|
|
80
|
+
blockedIP_json = self.__get_blocked_ip_from_database(ip_address)
|
|
81
|
+
if (blockedIP_json):
|
|
82
|
+
blockedIp_dict = json.loads(blockedIP_json)
|
|
83
|
+
blockedIp = BlockedIp(**blockedIp_dict)
|
|
84
|
+
|
|
85
|
+
return blockedIp
|
|
86
|
+
|
|
87
|
+
def is_ip_blocked(self, ip_address: str) -> bool:
|
|
88
|
+
"""Will try to determine if provided ip address is in the blocked ip list"""
|
|
89
|
+
blockedIP_json = self.__get_blocked_ip_from_database(ip_address)
|
|
90
|
+
if (blockedIP_json):
|
|
91
|
+
return True
|
|
92
|
+
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
def block_ip(self, ip_address: str, blocking_reason: BlockedIpReasonType) -> bool:
|
|
96
|
+
blockedIp = BlockedIp(ip_address=ip_address, blocking_reason=blocking_reason)
|
|
97
|
+
blockedIpJsonModel = blockedIp.model_dump_json()
|
|
98
|
+
|
|
99
|
+
self.logger.debug(f"About to block ip address '{ip_address}' due to {blocking_reason}")
|
|
100
|
+
return self.rc.execute_command("JSON.SET", f"{self.db_path}:blocked-ip:{ip_address}", ".", blockedIpJsonModel)
|
|
101
|
+
|
|
102
|
+
def unblock_ip(self, ip_address) -> bool:
|
|
103
|
+
self.logger.debug(f"About to unblock ip address '{ip_address}'")
|
|
104
|
+
return self.rc.execute_command("JSON.DEL", f"{self.db_path}:blocked-ip:{ip_address}", ".")
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pathlib
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import yaml
|
|
5
|
+
from redis.cluster import RedisCluster, ClusterNode
|
|
6
|
+
|
|
7
|
+
PROJECT_ROOT = None
|
|
8
|
+
|
|
9
|
+
# Determine the project root path when the module is loaded
|
|
10
|
+
def find_project_root(current_path: pathlib.Path, max_depth: int = 10) -> pathlib.Path:
|
|
11
|
+
"""
|
|
12
|
+
Recursively search for a marker (like the 'config' or 'logs' directory) to find the project root.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
# Check if PROJECT_ROOT environment variable is set
|
|
16
|
+
project_root_env = os.getenv('PROJECT_ROOT')
|
|
17
|
+
if project_root_env:
|
|
18
|
+
return pathlib.Path(project_root_env)
|
|
19
|
+
|
|
20
|
+
for _ in range(max_depth):
|
|
21
|
+
if (current_path.cwd() / "config").exists() or (current_path.cwd() / "logs").exists():
|
|
22
|
+
return current_path.cwd()
|
|
23
|
+
current_path = current_path.parent
|
|
24
|
+
raise FileNotFoundError(f"Could not find the project root within the provided path {current_path} \
|
|
25
|
+
with the max depth of {max_depth}. \
|
|
26
|
+
The current path is {current_path.cwd()}. \
|
|
27
|
+
Ensure the 'config' or 'logs' folder exists in {str(current_path)}. \
|
|
28
|
+
The PROJECT_ROOT environement variable is: {project_root_env}")
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Initialize PROJECT_ROOT when the module is loaded
|
|
32
|
+
def initialize_project_root():
|
|
33
|
+
global PROJECT_ROOT
|
|
34
|
+
PROJECT_ROOT = find_project_root(pathlib.Path(__file__).resolve())
|
|
35
|
+
|
|
36
|
+
initialize_project_root()
|
|
37
|
+
|
|
38
|
+
def get_project_root_path() -> Path:
|
|
39
|
+
"""
|
|
40
|
+
Return the project root path.
|
|
41
|
+
"""
|
|
42
|
+
return PROJECT_ROOT
|
|
43
|
+
|
|
44
|
+
def get_project_root() -> str:
|
|
45
|
+
str_path = str(PROJECT_ROOT)
|
|
46
|
+
# print(f"utils.py: {str_path}")
|
|
47
|
+
return str_path
|
|
48
|
+
|
|
49
|
+
def get_log_path() -> Path:
|
|
50
|
+
abs_path = get_project_root_path().joinpath("logs")
|
|
51
|
+
return abs_path
|
|
52
|
+
|
|
53
|
+
def get_secrets_path() -> Path:
|
|
54
|
+
abs_path = get_project_root_path().joinpath("secrets")
|
|
55
|
+
return abs_path
|
|
56
|
+
|
|
57
|
+
def get_app_config() -> dict:
|
|
58
|
+
app_cfg = None
|
|
59
|
+
try:
|
|
60
|
+
project_root = find_project_root(pathlib.Path(__file__).resolve())
|
|
61
|
+
config_path = project_root.joinpath("config/app_config.yaml")
|
|
62
|
+
with open(config_path, "r") as ymlfile:
|
|
63
|
+
app_cfg = yaml.safe_load(ymlfile)
|
|
64
|
+
except yaml.YAMLError as ex:
|
|
65
|
+
raise FileNotFoundError(
|
|
66
|
+
f"Failed to load the config/app_config.yaml file. Aborting the application. Error: {ex}"
|
|
67
|
+
)
|
|
68
|
+
return app_cfg
|
|
69
|
+
|
|
70
|
+
def get_application_name() -> str:
|
|
71
|
+
app_name = get_app_config().get("application")
|
|
72
|
+
if app_name is None:
|
|
73
|
+
raise ValueError("Application name not found in app_config.")
|
|
74
|
+
return app_name
|
|
75
|
+
|
|
76
|
+
def get_domain_name() -> str:
|
|
77
|
+
domain_name = get_app_config().get("domain")
|
|
78
|
+
if domain_name is None:
|
|
79
|
+
raise ValueError("Domain name not found in app_config.")
|
|
80
|
+
return domain_name
|
|
81
|
+
|
|
82
|
+
def get_environment() -> str:
|
|
83
|
+
"""Will fetch the environment variable ENV. If not present it will fall back to DEV """
|
|
84
|
+
return os.environ.get("ENV", "DEV")
|
|
85
|
+
|
|
86
|
+
def get_service_url() -> str:
|
|
87
|
+
"""This own service url value. This global environment variable is usually used by consumers apps of this API."""
|
|
88
|
+
return os.getenv("OPENAPI_SERVICE_URL", "http://localhost:8080")
|
|
89
|
+
|
|
90
|
+
def get_service_doc_url() -> str:
|
|
91
|
+
"""Return the OpenAPI url"""
|
|
92
|
+
return f"{get_service_url}/docs"
|
|
93
|
+
|
|
94
|
+
def get_logging_level() -> str:
|
|
95
|
+
return get_app_config().get("logging_level", os.getenv("LOGGING_LEVEL", "DEBUG")).upper()
|
|
96
|
+
|
|
97
|
+
def get_redis_cluster_service_name():
|
|
98
|
+
"""Reads one service name and one port from the environemnt variable.
|
|
99
|
+
For all environements BUT the DEV environment.
|
|
100
|
+
For PROD/UAT the Kubernetes Service will route the requests to any of the leaders,
|
|
101
|
+
summarized by redis-cluster-leader
|
|
102
|
+
"""
|
|
103
|
+
if get_environment().upper() == "DEV" or get_environment().upper() is None:
|
|
104
|
+
nodes_env = "UNDEFINED - EMPLOYING LOCAL CLUSTER"
|
|
105
|
+
else:
|
|
106
|
+
nodes_env = os.getenv("REDIS_CLUSTER_NODES", "NODES_NOT_DEFINED")
|
|
107
|
+
return nodes_env.split(":")
|
|
108
|
+
|
|
109
|
+
def get_redis_cluster_pw():
|
|
110
|
+
return os.getenv("REDIS_CLUSTER_PW")
|
|
111
|
+
|
|
112
|
+
def get_redis_cluster_client() -> RedisCluster:
|
|
113
|
+
"""Creates a redis client to access the redis cluster in the current environment.
|
|
114
|
+
That could be PROD, UAT or DEV."""
|
|
115
|
+
rc: RedisCluster = None
|
|
116
|
+
if get_environment().upper() == "DEV":
|
|
117
|
+
REDIS_SERVICE = os.environ.get("REDIS_SERVICE", "127.0.0.1")
|
|
118
|
+
REDIS_PORTS = os.environ.get("REDIS_PORTS", "7000,7001,7002").split(",")
|
|
119
|
+
nodes = [ClusterNode(REDIS_SERVICE, int(port)) for port in REDIS_PORTS]
|
|
120
|
+
address_remap_dict = {
|
|
121
|
+
"172.30.0.11:6379": ("127.0.0.1", 7000),
|
|
122
|
+
"172.30.0.12:6379": ("127.0.0.1", 7001),
|
|
123
|
+
"172.30.0.13:6379": ("127.0.0.1", 7002),
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
def address_remap(address):
|
|
127
|
+
host, port = address
|
|
128
|
+
return address_remap_dict.get(f"{host}:{port}", address)
|
|
129
|
+
|
|
130
|
+
# rc = RedisCluster(startup_nodes=nodes, decode_responses=True, skip_full_coverage_check=True)
|
|
131
|
+
rc = RedisCluster(
|
|
132
|
+
username='default',
|
|
133
|
+
password='my-password',
|
|
134
|
+
startup_nodes=nodes,
|
|
135
|
+
decode_responses=True,
|
|
136
|
+
skip_full_coverage_check=True,
|
|
137
|
+
address_remap=address_remap,
|
|
138
|
+
)
|
|
139
|
+
else:
|
|
140
|
+
# PROD/UAT (any non-DEV environment)
|
|
141
|
+
host_name, port = get_redis_cluster_service_name()
|
|
142
|
+
if not host_name:
|
|
143
|
+
raise Exception("No Redis cluster nodes in app_config file.")
|
|
144
|
+
# TODO add the error log as soon this common code is in the library.
|
|
145
|
+
|
|
146
|
+
PW = get_redis_cluster_pw()
|
|
147
|
+
if isinstance(PW, str):
|
|
148
|
+
rc = RedisCluster(
|
|
149
|
+
host=host_name,
|
|
150
|
+
port=int(port),
|
|
151
|
+
username='default',
|
|
152
|
+
password=PW,
|
|
153
|
+
decode_responses=True,
|
|
154
|
+
require_full_coverage=False,
|
|
155
|
+
read_from_replicas=True
|
|
156
|
+
)
|
|
157
|
+
else:
|
|
158
|
+
raise ValueError("There is NO password for the Redis Cluster available with this deployment. Please see to it.")
|
|
159
|
+
|
|
160
|
+
return rc
|