everysk-lib 1.10.2__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- everysk/__init__.py +30 -0
- everysk/_version.py +683 -0
- everysk/api/__init__.py +61 -0
- everysk/api/api_requestor.py +167 -0
- everysk/api/api_resources/__init__.py +23 -0
- everysk/api/api_resources/api_resource.py +371 -0
- everysk/api/api_resources/calculation.py +779 -0
- everysk/api/api_resources/custom_index.py +42 -0
- everysk/api/api_resources/datastore.py +81 -0
- everysk/api/api_resources/file.py +42 -0
- everysk/api/api_resources/market_data.py +223 -0
- everysk/api/api_resources/parser.py +66 -0
- everysk/api/api_resources/portfolio.py +43 -0
- everysk/api/api_resources/private_security.py +42 -0
- everysk/api/api_resources/report.py +65 -0
- everysk/api/api_resources/report_template.py +39 -0
- everysk/api/api_resources/tests.py +115 -0
- everysk/api/api_resources/worker_execution.py +64 -0
- everysk/api/api_resources/workflow.py +65 -0
- everysk/api/api_resources/workflow_execution.py +93 -0
- everysk/api/api_resources/workspace.py +42 -0
- everysk/api/http_client.py +63 -0
- everysk/api/tests.py +32 -0
- everysk/api/utils.py +262 -0
- everysk/config.py +451 -0
- everysk/core/_tests/serialize/test_json.py +336 -0
- everysk/core/_tests/serialize/test_orjson.py +295 -0
- everysk/core/_tests/serialize/test_pickle.py +48 -0
- everysk/core/cloud_function/main.py +78 -0
- everysk/core/cloud_function/tests.py +86 -0
- everysk/core/compress.py +245 -0
- everysk/core/datetime/__init__.py +12 -0
- everysk/core/datetime/calendar.py +144 -0
- everysk/core/datetime/date.py +424 -0
- everysk/core/datetime/date_expression.py +299 -0
- everysk/core/datetime/date_mixin.py +1475 -0
- everysk/core/datetime/date_settings.py +30 -0
- everysk/core/datetime/datetime.py +713 -0
- everysk/core/exceptions.py +435 -0
- everysk/core/fields.py +1176 -0
- everysk/core/firestore.py +555 -0
- everysk/core/fixtures/_settings.py +29 -0
- everysk/core/fixtures/other/_settings.py +18 -0
- everysk/core/fixtures/user_agents.json +88 -0
- everysk/core/http.py +691 -0
- everysk/core/lists.py +92 -0
- everysk/core/log.py +709 -0
- everysk/core/number.py +37 -0
- everysk/core/object.py +1469 -0
- everysk/core/redis.py +1021 -0
- everysk/core/retry.py +51 -0
- everysk/core/serialize.py +674 -0
- everysk/core/sftp.py +414 -0
- everysk/core/signing.py +53 -0
- everysk/core/slack.py +127 -0
- everysk/core/string.py +199 -0
- everysk/core/tests.py +240 -0
- everysk/core/threads.py +199 -0
- everysk/core/undefined.py +70 -0
- everysk/core/unittests.py +73 -0
- everysk/core/workers.py +241 -0
- everysk/sdk/__init__.py +23 -0
- everysk/sdk/base.py +98 -0
- everysk/sdk/brutils/cnpj.py +391 -0
- everysk/sdk/brutils/cnpj_pd.py +129 -0
- everysk/sdk/engines/__init__.py +26 -0
- everysk/sdk/engines/cache.py +185 -0
- everysk/sdk/engines/compliance.py +37 -0
- everysk/sdk/engines/cryptography.py +69 -0
- everysk/sdk/engines/expression.cp312-win_amd64.pyd +0 -0
- everysk/sdk/engines/expression.pyi +55 -0
- everysk/sdk/engines/helpers.cp312-win_amd64.pyd +0 -0
- everysk/sdk/engines/helpers.pyi +26 -0
- everysk/sdk/engines/lock.py +120 -0
- everysk/sdk/engines/market_data.py +244 -0
- everysk/sdk/engines/settings.py +19 -0
- everysk/sdk/entities/__init__.py +23 -0
- everysk/sdk/entities/base.py +784 -0
- everysk/sdk/entities/base_list.py +131 -0
- everysk/sdk/entities/custom_index/base.py +209 -0
- everysk/sdk/entities/custom_index/settings.py +29 -0
- everysk/sdk/entities/datastore/base.py +160 -0
- everysk/sdk/entities/datastore/settings.py +17 -0
- everysk/sdk/entities/fields.py +375 -0
- everysk/sdk/entities/file/base.py +215 -0
- everysk/sdk/entities/file/settings.py +63 -0
- everysk/sdk/entities/portfolio/base.py +248 -0
- everysk/sdk/entities/portfolio/securities.py +241 -0
- everysk/sdk/entities/portfolio/security.py +580 -0
- everysk/sdk/entities/portfolio/settings.py +97 -0
- everysk/sdk/entities/private_security/base.py +226 -0
- everysk/sdk/entities/private_security/settings.py +17 -0
- everysk/sdk/entities/query.py +603 -0
- everysk/sdk/entities/report/base.py +214 -0
- everysk/sdk/entities/report/settings.py +23 -0
- everysk/sdk/entities/script.py +310 -0
- everysk/sdk/entities/secrets/base.py +128 -0
- everysk/sdk/entities/secrets/script.py +119 -0
- everysk/sdk/entities/secrets/settings.py +17 -0
- everysk/sdk/entities/settings.py +48 -0
- everysk/sdk/entities/tags.py +174 -0
- everysk/sdk/entities/worker_execution/base.py +307 -0
- everysk/sdk/entities/worker_execution/settings.py +63 -0
- everysk/sdk/entities/workflow_execution/base.py +113 -0
- everysk/sdk/entities/workflow_execution/settings.py +32 -0
- everysk/sdk/entities/workspace/base.py +99 -0
- everysk/sdk/entities/workspace/settings.py +27 -0
- everysk/sdk/settings.py +67 -0
- everysk/sdk/tests.py +105 -0
- everysk/sdk/worker_base.py +47 -0
- everysk/server/__init__.py +9 -0
- everysk/server/applications.py +63 -0
- everysk/server/endpoints.py +516 -0
- everysk/server/example_api.py +69 -0
- everysk/server/middlewares.py +80 -0
- everysk/server/requests.py +62 -0
- everysk/server/responses.py +119 -0
- everysk/server/routing.py +64 -0
- everysk/server/settings.py +36 -0
- everysk/server/tests.py +36 -0
- everysk/settings.py +98 -0
- everysk/sql/__init__.py +9 -0
- everysk/sql/connection.py +232 -0
- everysk/sql/model.py +376 -0
- everysk/sql/query.py +417 -0
- everysk/sql/row_factory.py +63 -0
- everysk/sql/settings.py +49 -0
- everysk/sql/utils.py +129 -0
- everysk/tests.py +23 -0
- everysk/utils.py +81 -0
- everysk/version.py +15 -0
- everysk_lib-1.10.2.dist-info/.gitignore +5 -0
- everysk_lib-1.10.2.dist-info/METADATA +326 -0
- everysk_lib-1.10.2.dist-info/RECORD +137 -0
- everysk_lib-1.10.2.dist-info/WHEEL +5 -0
- everysk_lib-1.10.2.dist-info/licenses/LICENSE.txt +9 -0
- everysk_lib-1.10.2.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2023 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
# Remember that this code will be running on Cloud Functions environment
|
|
11
|
+
# then not all modules can be used/imported.
|
|
12
|
+
from logging import getLogger, INFO, StreamHandler, Formatter
|
|
13
|
+
from os import getenv
|
|
14
|
+
from sys import stdout
|
|
15
|
+
from redis import Redis
|
|
16
|
+
|
|
17
|
+
## Create a Logger object
|
|
18
|
+
log = getLogger('firestore-cached-document-write')
|
|
19
|
+
log.setLevel(INFO)
|
|
20
|
+
log.propagate = False # Don't pass message to others loggers
|
|
21
|
+
handler = StreamHandler(stdout)
|
|
22
|
+
handler.setLevel(INFO)
|
|
23
|
+
handler.setFormatter(Formatter('%(asctime)s - %(levelname)s - %(message)s'))
|
|
24
|
+
log.addHandler(handler)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class RedisClient:
|
|
28
|
+
|
|
29
|
+
## Private attributes
|
|
30
|
+
_connection: Redis = None
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def connection(self) -> Redis:
|
|
34
|
+
"""
|
|
35
|
+
We use this property to check if Redis is online
|
|
36
|
+
then returning the working connection.
|
|
37
|
+
"""
|
|
38
|
+
try:
|
|
39
|
+
self._connection.ping()
|
|
40
|
+
except Exception: # pylint: disable=broad-exception-caught
|
|
41
|
+
# Create a new connection
|
|
42
|
+
self._connection = Redis(
|
|
43
|
+
host=getenv('REDIS_HOST'),
|
|
44
|
+
port=int(getenv('REDIS_PORT'))
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
return self._connection
|
|
48
|
+
|
|
49
|
+
def delete(self, key):
|
|
50
|
+
return self.connection.delete(key)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def firestore_cached_document_write(event: dict, context: type) -> None: # pylint: disable=unused-argument
|
|
54
|
+
"""
|
|
55
|
+
Triggered by a change to a Firestore document.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
event (dict): Event payload -> {'oldValue': {}, 'updateMask': {}, 'value': {}}.
|
|
59
|
+
context (google.cloud.functions.Context): Metadata for the event.
|
|
60
|
+
"""
|
|
61
|
+
old_document = event.get('oldValue', {})
|
|
62
|
+
new_document = event.get('value', {})
|
|
63
|
+
client = RedisClient()
|
|
64
|
+
|
|
65
|
+
if new_document:
|
|
66
|
+
# Create or Update was triggered
|
|
67
|
+
doc = new_document
|
|
68
|
+
else:
|
|
69
|
+
# Delete was triggered
|
|
70
|
+
doc = old_document
|
|
71
|
+
|
|
72
|
+
key = doc.get('fields', {}).get('redis_key', None)
|
|
73
|
+
# We just need to delete the key as the original process
|
|
74
|
+
# will take care of creating it if necessary.
|
|
75
|
+
if key is not None:
|
|
76
|
+
key = key['stringValue']
|
|
77
|
+
client.delete(key)
|
|
78
|
+
log.info('Redis host: %s - %s', getenv('REDIS_HOST'), key)
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2023 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
import os
|
|
11
|
+
from everysk.core.object import BaseObject
|
|
12
|
+
from everysk.core.cloud_function import main
|
|
13
|
+
from everysk.core.unittests import TestCase, mock
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class CloudFunctionTestCase(TestCase):
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
def setUpClass(cls) -> None:
|
|
20
|
+
cls.old_redis_host = os.environ.get('REDIS_HOST', None)
|
|
21
|
+
cls.old_redis_port = os.environ.get('REDIS_PORT', None)
|
|
22
|
+
os.environ['REDIS_HOST'] = '127.0.0.1'
|
|
23
|
+
os.environ['REDIS_PORT'] = '6379'
|
|
24
|
+
cls.client = main.RedisClient()
|
|
25
|
+
cls.redis_key = 'cloud-function-unit-test-redis-key'
|
|
26
|
+
|
|
27
|
+
def setUp(self) -> None:
|
|
28
|
+
self.client.connection.set(self.redis_key, 1, 1)
|
|
29
|
+
self.context = BaseObject(resource='/project/collection/document')
|
|
30
|
+
self.document = {
|
|
31
|
+
'createTime': '2023-01-01T00:00:00+00:00',
|
|
32
|
+
'fields': {
|
|
33
|
+
'redis_key': {'stringValue': self.redis_key}
|
|
34
|
+
},
|
|
35
|
+
'name': '/project/collection/document',
|
|
36
|
+
'updateTime': '2023-01-01T00:00:00+00:00'
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def tearDownClass(cls) -> None:
|
|
41
|
+
if cls.old_redis_host is not None:
|
|
42
|
+
os.environ['REDIS_HOST'] = cls.old_redis_host
|
|
43
|
+
else:
|
|
44
|
+
del os.environ['REDIS_HOST']
|
|
45
|
+
|
|
46
|
+
if cls.old_redis_port is not None:
|
|
47
|
+
os.environ['REDIS_PORT'] = cls.old_redis_port
|
|
48
|
+
else:
|
|
49
|
+
del os.environ['REDIS_PORT']
|
|
50
|
+
|
|
51
|
+
def test_redis_connection(self):
|
|
52
|
+
client = main.RedisClient()
|
|
53
|
+
self.assertIsInstance(client.connection, main.Redis)
|
|
54
|
+
|
|
55
|
+
def test_redis_delete(self):
|
|
56
|
+
self.assertEqual(self.client.connection.get(self.redis_key), b'1')
|
|
57
|
+
self.client.delete(self.redis_key)
|
|
58
|
+
self.assertIsNone(self.client.connection.get(self.redis_key))
|
|
59
|
+
|
|
60
|
+
@mock.patch.object(main.log, 'info')
|
|
61
|
+
def test_firestore_create(self, info: mock.MagicMock):
|
|
62
|
+
event = {'oldValue': {}, 'updateMask': {}, 'value': self.document}
|
|
63
|
+
main.firestore_cached_document_write(event, self.context)
|
|
64
|
+
info.assert_called_once_with('Redis host: %s - %s', os.environ['REDIS_HOST'], self.redis_key)
|
|
65
|
+
self.assertIsNone(self.client.connection.get(self.redis_key))
|
|
66
|
+
|
|
67
|
+
@mock.patch.object(main.log, 'info')
|
|
68
|
+
def test_firestore_delete(self, info: mock.MagicMock):
|
|
69
|
+
event = {'oldValue': self.document, 'updateMask': {}, 'value': {}}
|
|
70
|
+
main.firestore_cached_document_write(event, self.context)
|
|
71
|
+
info.assert_called_once_with('Redis host: %s - %s', os.environ['REDIS_HOST'], self.redis_key)
|
|
72
|
+
self.assertIsNone(self.client.connection.get(self.redis_key))
|
|
73
|
+
|
|
74
|
+
@mock.patch.object(main.log, 'info')
|
|
75
|
+
def test_firestore_update(self, info: mock.MagicMock):
|
|
76
|
+
event = {'oldValue': self.document, 'updateMask': {}, 'value': self.document}
|
|
77
|
+
main.firestore_cached_document_write(event, self.context)
|
|
78
|
+
info.assert_called_once_with('Redis host: %s - %s', os.environ['REDIS_HOST'], self.redis_key)
|
|
79
|
+
self.assertIsNone(self.client.connection.get(self.redis_key))
|
|
80
|
+
|
|
81
|
+
@mock.patch.object(main.log, 'info')
|
|
82
|
+
def test_firestore_other(self, info: mock.MagicMock):
|
|
83
|
+
event = {'oldValue': {}, 'updateMask': {}, 'value': {}}
|
|
84
|
+
main.firestore_cached_document_write(event, self.context)
|
|
85
|
+
info.assert_not_called()
|
|
86
|
+
self.assertEqual(self.client.connection.get(self.redis_key), b'1')
|
everysk/core/compress.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2023 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
|
|
11
|
+
__all__ = ['compress', 'decompress']
|
|
12
|
+
|
|
13
|
+
import gzip
|
|
14
|
+
import zlib
|
|
15
|
+
import io
|
|
16
|
+
import os
|
|
17
|
+
import zipfile
|
|
18
|
+
import base64
|
|
19
|
+
import fnmatch
|
|
20
|
+
from typing import Any
|
|
21
|
+
|
|
22
|
+
from everysk.core.log import Logger
|
|
23
|
+
from everysk.core.serialize import dumps, loads
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
log = Logger('everysk-lib-compress')
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
###############################################################################
|
|
30
|
+
# Private Functions Implementation
|
|
31
|
+
###############################################################################
|
|
32
|
+
def compress_json(obj: Any) -> str:
|
|
33
|
+
"""
|
|
34
|
+
This function first serializes the input object into a JSON string using the dumps function.
|
|
35
|
+
Then it compresses the JSON string using zlib compression.
|
|
36
|
+
The output is a bytes object which may contain non-printable characters due to compression
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
obj (Any): The JSON-serializable object to compress.
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
str: The compressed string representation of the JSON-serialized object.
|
|
43
|
+
|
|
44
|
+
Example:
|
|
45
|
+
>>> data = {'key': 'value'}
|
|
46
|
+
>>> compressed_data = compress_json(data)
|
|
47
|
+
>>> print(compressed_data)
|
|
48
|
+
>>> b'x\x9c\xcbH\xcd\xc9\xc9W(....)' # Example of the compressed data
|
|
49
|
+
"""
|
|
50
|
+
log.deprecated("compress_json is deprecated. Use compress(obj, serialize='json') instead.")
|
|
51
|
+
return compress(obj, serialize='json')
|
|
52
|
+
|
|
53
|
+
def decompress_json(data: str, convert_str_to_date: bool = False) -> Any: # pylint: disable=unused-argument
|
|
54
|
+
"""
|
|
55
|
+
Decompress data with zlib and transform to an obj with loads function.
|
|
56
|
+
The input data should be a zlib-compressed string.
|
|
57
|
+
The returned object may be any valid JSON-serializable Python object
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
data (str): the zlib-compressed string to decompress and deserialized data.
|
|
61
|
+
convert_str_to_date (bool, optional): Enable conversion of str to Date and Datetime. Default is True.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
Any: The Python object reconstructed from the decompressed and deserialized.
|
|
65
|
+
|
|
66
|
+
Example:
|
|
67
|
+
>>> from everysk.core.compress import decompress_json
|
|
68
|
+
>>> compressed_data = b'x\x9c\xabV\xcaN\xadT\xb2RP*K\xcc)MU\xaa\x05\x00+\xaf\x05A'
|
|
69
|
+
>>> decompressed_data = decompress_json(compressed_data)
|
|
70
|
+
>>> print(decompressed_data)
|
|
71
|
+
>>> {'key': 'value'} # Example of the decompressed data'
|
|
72
|
+
"""
|
|
73
|
+
log.deprecated("decompress_json is deprecated. Use decompress(obj, serialize='json') instead.")
|
|
74
|
+
return decompress(data, serialize='json')
|
|
75
|
+
|
|
76
|
+
def compress_pickle(obj: Any) -> str:
|
|
77
|
+
"""
|
|
78
|
+
Convert obj to string with pickle dumps then uses zlib to compress it.
|
|
79
|
+
The output is a bytes object which may contain non-printable characters due to compression.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
obj (Any): The Python object to compress.
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
str: The compressed string representation of the serialized object.
|
|
86
|
+
|
|
87
|
+
Example:
|
|
88
|
+
>>> data = {'key': 'value'}
|
|
89
|
+
>>> compressed_data = compress_pickle(data)
|
|
90
|
+
>>> print(compressed_data)
|
|
91
|
+
>>> b'x\x9c\xabV*I,.Q(...)' # Example compressed string output
|
|
92
|
+
"""
|
|
93
|
+
log.deprecated("compress_pickle is deprecated. Use compress(obj, serialize='pickle') instead.")
|
|
94
|
+
return compress(obj, serialize='pickle')
|
|
95
|
+
|
|
96
|
+
def decompress_pickle(data: str) -> Any:
|
|
97
|
+
"""
|
|
98
|
+
Decompress data with zlib and transform to a obj with pickle loads.
|
|
99
|
+
The input data should be a zlib-compressed string generated from a pickled Python object.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
data (str): The zlib-compressed string to decompress and deserialize.
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Any: The Python object reconstructed from the decompressed and deserialized data.
|
|
106
|
+
|
|
107
|
+
Example:
|
|
108
|
+
>>> compressed_data = b'x\x9c\xabV\xcaN\xadT\xb2RP*K\xcc)MU\xaa\x05\x00+\xaf\x05A' # Example compressed data
|
|
109
|
+
>>> decompressed_data = decompress_pickle(compressed_data)
|
|
110
|
+
>>> print(decompressed_data)
|
|
111
|
+
>>> {'key': 'value'} # Example decompressed object output
|
|
112
|
+
"""
|
|
113
|
+
log.deprecated("decompress_pickle is deprecated. Use decompress(obj, serialize='pickle') instead.")
|
|
114
|
+
return decompress(data, serialize='pickle')
|
|
115
|
+
|
|
116
|
+
def zip_directory_to_str(path_list: str | list, path_name_list: str | list, ignore_files: list = None, ignore_roots: list = None) -> str:
|
|
117
|
+
"""
|
|
118
|
+
This function takes a directory path as input and creates a zip file in memory.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
path_list (str | list): The path to the directory to zip, it can be a list of paths.
|
|
122
|
+
path_name_list (str | list): The name of the root folder inside the zip file, it can be a list of path names.
|
|
123
|
+
ignore_files (list, optional): A list of file names to ignore when zipping the directory.
|
|
124
|
+
ignore_roots (list, optional): A list of patterns to ignore when zipping the directory.
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
str: The base64 encoded string representation of the zip file.
|
|
128
|
+
|
|
129
|
+
Raises:
|
|
130
|
+
ValueError: If the length of path_list and path_name_list is not the same.
|
|
131
|
+
|
|
132
|
+
Example:
|
|
133
|
+
>>> zip_directory_to_str('path/to/directory', 'root_folder_name')
|
|
134
|
+
>>> zip_directory_to_str('path/to/directory', 'root_folder_name', ['config.json'], ['**/log', '**/temp/**'])
|
|
135
|
+
|
|
136
|
+
Below is an example of using a list of paths and path names:
|
|
137
|
+
>>> zip_directory_to_str(['path/to/directory1', 'path/to/directory2'], ['root_folder_name1', 'root_folder_name2'], ['config.json'], ['**/log', '**/temp/**'])
|
|
138
|
+
"""
|
|
139
|
+
# Create a BytesIO object to store the zip file
|
|
140
|
+
memory_zip = io.BytesIO()
|
|
141
|
+
|
|
142
|
+
if isinstance(path_list, str):
|
|
143
|
+
path_list = [path_list]
|
|
144
|
+
|
|
145
|
+
if isinstance(path_name_list, str):
|
|
146
|
+
path_name_list = [path_name_list]
|
|
147
|
+
|
|
148
|
+
if len(path_list) != len(path_name_list):
|
|
149
|
+
raise ValueError("The length of path_list and path_name_list should be the same.")
|
|
150
|
+
|
|
151
|
+
# Create a ZipFile object with the memory buffer as its destination
|
|
152
|
+
with zipfile.ZipFile(memory_zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
|
153
|
+
for path, path_name in zip(path_list, path_name_list):
|
|
154
|
+
for root, _, files in os.walk(path):
|
|
155
|
+
# Check if the current root matches any of the ignore_roots patterns
|
|
156
|
+
if ignore_roots and any(fnmatch.fnmatch(root, pattern) for pattern in ignore_roots):
|
|
157
|
+
continue
|
|
158
|
+
for file in files:
|
|
159
|
+
# Ignore files if specified
|
|
160
|
+
if ignore_files and file in ignore_files:
|
|
161
|
+
continue
|
|
162
|
+
file_path = os.path.join(root, file)
|
|
163
|
+
# Compute the relative path and add the path_name as the root folder
|
|
164
|
+
relative_path = os.path.relpath(file_path, path)
|
|
165
|
+
zip_file.write(file_path, os.path.join(path_name, relative_path))
|
|
166
|
+
|
|
167
|
+
# We need to adjust the cursor of the BytesIO object to the start after writing
|
|
168
|
+
memory_zip.seek(0)
|
|
169
|
+
|
|
170
|
+
# Return the base64 encoded string representation of the zip file
|
|
171
|
+
return base64.b64encode(memory_zip.getvalue()).decode('utf-8')
|
|
172
|
+
|
|
173
|
+
###############################################################################
|
|
174
|
+
# Public Functions Implementation
|
|
175
|
+
###############################################################################
|
|
176
|
+
def compress(obj: Any, protocol: str = 'zlib', serialize: str | None = 'pickle', use_undefined: bool | None = True, add_class_path: bool | None = None) -> bytes:
|
|
177
|
+
"""
|
|
178
|
+
Compress an object using either zlib or gzip compression.
|
|
179
|
+
If serialize is set, the object is serialized using the specified serialization format before compression.
|
|
180
|
+
Supported options for serialization are JSON or Pickle.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
obj (Any): The Python object to compress
|
|
184
|
+
protocol (str, optional): The serialization protocol to use. Default is 'zlib'. Options are 'zlib' and 'gzip'.
|
|
185
|
+
serialize (str, optional): The serialization format to use. Default is 'pickle'. Options are 'json', 'pickle' and None.
|
|
186
|
+
use_undefined (bool, optional): If True, undefined values are included in the serialization. Default is True.
|
|
187
|
+
add_class_path (bool, optional): If True, the class path is included in the serialization. Default is None.
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
bytes: The compressed string representation of the serialized object.
|
|
191
|
+
|
|
192
|
+
Example:
|
|
193
|
+
>>> data = {'key': 'value'}
|
|
194
|
+
>>> compress(data)
|
|
195
|
+
>>> b'x\x9c\xabV*I,.Q(...)' # Example compressed string output
|
|
196
|
+
"""
|
|
197
|
+
if protocol == 'zlib':
|
|
198
|
+
compress_fn = zlib.compress
|
|
199
|
+
elif protocol == 'gzip':
|
|
200
|
+
compress_fn = gzip.compress
|
|
201
|
+
else:
|
|
202
|
+
raise ValueError(f"Unsupported compression protocol '{protocol}'. Use 'zlib' or 'gzip'.")
|
|
203
|
+
|
|
204
|
+
result = obj
|
|
205
|
+
if serialize:
|
|
206
|
+
result = dumps(obj, protocol=serialize, use_undefined=use_undefined, add_class_path=add_class_path)
|
|
207
|
+
|
|
208
|
+
if isinstance(result, str):
|
|
209
|
+
result = result.encode('utf-8')
|
|
210
|
+
|
|
211
|
+
return compress_fn(result)
|
|
212
|
+
|
|
213
|
+
def decompress(data: bytes, protocol: str = 'zlib', serialize: str | None = 'pickle', use_undefined: bool | None = True, instantiate_object: bool | None = True) -> Any:
|
|
214
|
+
"""
|
|
215
|
+
Decompress data using either zlib or gzip compression.
|
|
216
|
+
If serialize is set, the result from decompress is serialized using the specified serialization converting it to a python object.
|
|
217
|
+
Supported options for serialization are JSON or Pickle.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
data (bytes): The compressed data to decompress.
|
|
221
|
+
protocol (str, optional): The serialization protocol to use. Default is 'zlib'. Options are 'zlib' and 'gzip'.
|
|
222
|
+
serialize (str, optional): The serialization format to use. Default is 'pickle'. Options are 'json', 'pickle' and None.
|
|
223
|
+
use_undefined (bool, optional): If True, undefined values are included in the serialization. Default is True.
|
|
224
|
+
instantiate_object (bool, optional): If True, the object is instantiated. Default is True.
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
Any: If serialize is set returns a Python object otherwise a bytes object.
|
|
228
|
+
|
|
229
|
+
Example:
|
|
230
|
+
>>> compressed_data = b'x\x9c\xabV\xcaN\xadT\xb2RP*K\xcc)MU\xaa\x05\x00+\xaf\x05A' # Example of compressed data
|
|
231
|
+
>>> decompress(compressed_data)
|
|
232
|
+
>>> {'key': 'value'}
|
|
233
|
+
"""
|
|
234
|
+
if protocol == 'zlib':
|
|
235
|
+
decompress_fn = zlib.decompress
|
|
236
|
+
elif protocol == 'gzip':
|
|
237
|
+
decompress_fn = gzip.decompress
|
|
238
|
+
else:
|
|
239
|
+
raise ValueError(f"Unsupported decompression protocol '{protocol}'. Use 'zlib' or 'gzip'.")
|
|
240
|
+
|
|
241
|
+
result = decompress_fn(data)
|
|
242
|
+
if serialize:
|
|
243
|
+
result = loads(result, protocol=serialize, use_undefined=use_undefined, instantiate_object=instantiate_object)
|
|
244
|
+
|
|
245
|
+
return result
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2023 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
from everysk.core.datetime.datetime import DateTime, timezone, ZoneInfo
|
|
11
|
+
from everysk.core.datetime.date import Date
|
|
12
|
+
from everysk.core.datetime.date_mixin import timedelta
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2023 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
from functools import cache
|
|
11
|
+
|
|
12
|
+
import holidays
|
|
13
|
+
from holidays.countries import BR
|
|
14
|
+
|
|
15
|
+
from everysk.core.datetime import Date
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class BRHolidays(BR):
|
|
19
|
+
"""
|
|
20
|
+
Subclass of BR representing holidays specific to Brazil and also provides further functionality
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def _populate(self, year: int) -> None:
|
|
24
|
+
"""
|
|
25
|
+
Populate holidays specific to Brazil for the given year.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
year (int): The year for which holidays are to be populated.
|
|
29
|
+
|
|
30
|
+
Example:
|
|
31
|
+
>>> from everysk.core.datetime.calendar import BRHolidays
|
|
32
|
+
>>> br_holidays = BRHolidays()
|
|
33
|
+
>>> br_holidays._populate(2022)
|
|
34
|
+
>>> print(br_holidays)
|
|
35
|
+
{
|
|
36
|
+
datetime.date(2022, 1, 1): 'Confraternização Universal',
|
|
37
|
+
datetime.date(2022, 4, 15): 'Sexta-feira Santa',
|
|
38
|
+
...
|
|
39
|
+
}
|
|
40
|
+
"""
|
|
41
|
+
super()._populate(year)
|
|
42
|
+
|
|
43
|
+
# We need to check individually for these holidays
|
|
44
|
+
try:
|
|
45
|
+
self.pop_named('Início da Quaresma')
|
|
46
|
+
except KeyError:
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
self.pop_named('Dia do Servidor Público')
|
|
51
|
+
except KeyError:
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
self.pop_named('Véspera de Natal')
|
|
56
|
+
except KeyError:
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
self.pop_named('Véspera de Ano-Novo')
|
|
61
|
+
except KeyError:
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class ANBIMA(BRHolidays):
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class BVMF(BRHolidays):
|
|
70
|
+
"""
|
|
71
|
+
Subclass of BRHolidays representing holidays specific to the BVMF calendar.
|
|
72
|
+
"""
|
|
73
|
+
def _populate(self, year: int) -> None:
|
|
74
|
+
"""
|
|
75
|
+
_summary_
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
year (int): The year for the holidays to be populated
|
|
79
|
+
|
|
80
|
+
Example:
|
|
81
|
+
Display the holidays for the year of 2022
|
|
82
|
+
|
|
83
|
+
>>> from everysk.core.datetime.calendar import BVMF
|
|
84
|
+
>>> bvmf_holidays = BVMF()
|
|
85
|
+
>>> bvmf_holidays._populate(2022)
|
|
86
|
+
>>> print(bvmf_holidays)
|
|
87
|
+
{
|
|
88
|
+
datetime.date(2022, 1, 1): 'Confraternização Universal',
|
|
89
|
+
datetime.date(2022, 4, 15): 'Sexta-feira Santa',
|
|
90
|
+
...
|
|
91
|
+
}
|
|
92
|
+
"""
|
|
93
|
+
super()._populate(year)
|
|
94
|
+
|
|
95
|
+
if year < 2022:
|
|
96
|
+
self[Date(year, 1, 25)] = 'Aniversário de São Paulo'
|
|
97
|
+
self[Date(year, 11, 20)] = 'Dia da Consciência Negra'
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def get_holidays(calendar: str, years: list = range(2000, 2100)) -> dict:
|
|
101
|
+
"""
|
|
102
|
+
Uses the holidays library (https://pypi.org/project/holidays/) to retrieve a list of holidays for a specific country
|
|
103
|
+
|
|
104
|
+
It also uses a range of years, if more specification needed
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
calendar (str): Two digit country symbol.
|
|
108
|
+
years (list, optional): List of int years. Ex: [2021, 2022]. Defaults to [2000, ..., 2099].
|
|
109
|
+
|
|
110
|
+
Example:
|
|
111
|
+
Getting holidays for Brazil (BVMF calendar) for the years 2021 and 2022.
|
|
112
|
+
|
|
113
|
+
>>> from everysk.core.datetime.calendar import get_holidays
|
|
114
|
+
>>> brazil_holidays = get_holidays('BR', years=[2021, 2022])
|
|
115
|
+
>>> print(brazil_holidays)
|
|
116
|
+
{
|
|
117
|
+
datetime.date(2021, 1, 1) : 'Confraternização Universal',
|
|
118
|
+
datetime.date(2021, 4, 2) : 'Sexta-feira Santa',
|
|
119
|
+
...
|
|
120
|
+
}
|
|
121
|
+
"""
|
|
122
|
+
return _get_holidays(calendar=calendar, years=tuple(years))
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@cache
|
|
126
|
+
def _get_holidays(calendar: str, years: tuple[int]) -> dict:
|
|
127
|
+
"""
|
|
128
|
+
Cacheable version of get_holidays.
|
|
129
|
+
"""
|
|
130
|
+
holidays.BVMF = BVMF
|
|
131
|
+
holidays.ANBIMA = ANBIMA
|
|
132
|
+
holidays.BRHolidays = BRHolidays
|
|
133
|
+
|
|
134
|
+
# We need to remove some holidays for the BR calendar so we change it
|
|
135
|
+
if calendar == 'BR':
|
|
136
|
+
calendar = 'BRHolidays'
|
|
137
|
+
|
|
138
|
+
# Every country has public holidays
|
|
139
|
+
# Brazil has optional holidays as well
|
|
140
|
+
categories = ['public']
|
|
141
|
+
if calendar in {'BRHolidays', 'BVMF', 'ANBIMA'}:
|
|
142
|
+
categories.append('optional')
|
|
143
|
+
|
|
144
|
+
return {Date(dt.year, dt.month, dt.day): name for dt, name in holidays.country_holidays(calendar, years=years, categories=categories).items()}
|