everysk-lib 1.10.2__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- everysk/__init__.py +30 -0
- everysk/_version.py +683 -0
- everysk/api/__init__.py +61 -0
- everysk/api/api_requestor.py +167 -0
- everysk/api/api_resources/__init__.py +23 -0
- everysk/api/api_resources/api_resource.py +371 -0
- everysk/api/api_resources/calculation.py +779 -0
- everysk/api/api_resources/custom_index.py +42 -0
- everysk/api/api_resources/datastore.py +81 -0
- everysk/api/api_resources/file.py +42 -0
- everysk/api/api_resources/market_data.py +223 -0
- everysk/api/api_resources/parser.py +66 -0
- everysk/api/api_resources/portfolio.py +43 -0
- everysk/api/api_resources/private_security.py +42 -0
- everysk/api/api_resources/report.py +65 -0
- everysk/api/api_resources/report_template.py +39 -0
- everysk/api/api_resources/tests.py +115 -0
- everysk/api/api_resources/worker_execution.py +64 -0
- everysk/api/api_resources/workflow.py +65 -0
- everysk/api/api_resources/workflow_execution.py +93 -0
- everysk/api/api_resources/workspace.py +42 -0
- everysk/api/http_client.py +63 -0
- everysk/api/tests.py +32 -0
- everysk/api/utils.py +262 -0
- everysk/config.py +451 -0
- everysk/core/_tests/serialize/test_json.py +336 -0
- everysk/core/_tests/serialize/test_orjson.py +295 -0
- everysk/core/_tests/serialize/test_pickle.py +48 -0
- everysk/core/cloud_function/main.py +78 -0
- everysk/core/cloud_function/tests.py +86 -0
- everysk/core/compress.py +245 -0
- everysk/core/datetime/__init__.py +12 -0
- everysk/core/datetime/calendar.py +144 -0
- everysk/core/datetime/date.py +424 -0
- everysk/core/datetime/date_expression.py +299 -0
- everysk/core/datetime/date_mixin.py +1475 -0
- everysk/core/datetime/date_settings.py +30 -0
- everysk/core/datetime/datetime.py +713 -0
- everysk/core/exceptions.py +435 -0
- everysk/core/fields.py +1176 -0
- everysk/core/firestore.py +555 -0
- everysk/core/fixtures/_settings.py +29 -0
- everysk/core/fixtures/other/_settings.py +18 -0
- everysk/core/fixtures/user_agents.json +88 -0
- everysk/core/http.py +691 -0
- everysk/core/lists.py +92 -0
- everysk/core/log.py +709 -0
- everysk/core/number.py +37 -0
- everysk/core/object.py +1469 -0
- everysk/core/redis.py +1021 -0
- everysk/core/retry.py +51 -0
- everysk/core/serialize.py +674 -0
- everysk/core/sftp.py +414 -0
- everysk/core/signing.py +53 -0
- everysk/core/slack.py +127 -0
- everysk/core/string.py +199 -0
- everysk/core/tests.py +240 -0
- everysk/core/threads.py +199 -0
- everysk/core/undefined.py +70 -0
- everysk/core/unittests.py +73 -0
- everysk/core/workers.py +241 -0
- everysk/sdk/__init__.py +23 -0
- everysk/sdk/base.py +98 -0
- everysk/sdk/brutils/cnpj.py +391 -0
- everysk/sdk/brutils/cnpj_pd.py +129 -0
- everysk/sdk/engines/__init__.py +26 -0
- everysk/sdk/engines/cache.py +185 -0
- everysk/sdk/engines/compliance.py +37 -0
- everysk/sdk/engines/cryptography.py +69 -0
- everysk/sdk/engines/expression.cp312-win_amd64.pyd +0 -0
- everysk/sdk/engines/expression.pyi +55 -0
- everysk/sdk/engines/helpers.cp312-win_amd64.pyd +0 -0
- everysk/sdk/engines/helpers.pyi +26 -0
- everysk/sdk/engines/lock.py +120 -0
- everysk/sdk/engines/market_data.py +244 -0
- everysk/sdk/engines/settings.py +19 -0
- everysk/sdk/entities/__init__.py +23 -0
- everysk/sdk/entities/base.py +784 -0
- everysk/sdk/entities/base_list.py +131 -0
- everysk/sdk/entities/custom_index/base.py +209 -0
- everysk/sdk/entities/custom_index/settings.py +29 -0
- everysk/sdk/entities/datastore/base.py +160 -0
- everysk/sdk/entities/datastore/settings.py +17 -0
- everysk/sdk/entities/fields.py +375 -0
- everysk/sdk/entities/file/base.py +215 -0
- everysk/sdk/entities/file/settings.py +63 -0
- everysk/sdk/entities/portfolio/base.py +248 -0
- everysk/sdk/entities/portfolio/securities.py +241 -0
- everysk/sdk/entities/portfolio/security.py +580 -0
- everysk/sdk/entities/portfolio/settings.py +97 -0
- everysk/sdk/entities/private_security/base.py +226 -0
- everysk/sdk/entities/private_security/settings.py +17 -0
- everysk/sdk/entities/query.py +603 -0
- everysk/sdk/entities/report/base.py +214 -0
- everysk/sdk/entities/report/settings.py +23 -0
- everysk/sdk/entities/script.py +310 -0
- everysk/sdk/entities/secrets/base.py +128 -0
- everysk/sdk/entities/secrets/script.py +119 -0
- everysk/sdk/entities/secrets/settings.py +17 -0
- everysk/sdk/entities/settings.py +48 -0
- everysk/sdk/entities/tags.py +174 -0
- everysk/sdk/entities/worker_execution/base.py +307 -0
- everysk/sdk/entities/worker_execution/settings.py +63 -0
- everysk/sdk/entities/workflow_execution/base.py +113 -0
- everysk/sdk/entities/workflow_execution/settings.py +32 -0
- everysk/sdk/entities/workspace/base.py +99 -0
- everysk/sdk/entities/workspace/settings.py +27 -0
- everysk/sdk/settings.py +67 -0
- everysk/sdk/tests.py +105 -0
- everysk/sdk/worker_base.py +47 -0
- everysk/server/__init__.py +9 -0
- everysk/server/applications.py +63 -0
- everysk/server/endpoints.py +516 -0
- everysk/server/example_api.py +69 -0
- everysk/server/middlewares.py +80 -0
- everysk/server/requests.py +62 -0
- everysk/server/responses.py +119 -0
- everysk/server/routing.py +64 -0
- everysk/server/settings.py +36 -0
- everysk/server/tests.py +36 -0
- everysk/settings.py +98 -0
- everysk/sql/__init__.py +9 -0
- everysk/sql/connection.py +232 -0
- everysk/sql/model.py +376 -0
- everysk/sql/query.py +417 -0
- everysk/sql/row_factory.py +63 -0
- everysk/sql/settings.py +49 -0
- everysk/sql/utils.py +129 -0
- everysk/tests.py +23 -0
- everysk/utils.py +81 -0
- everysk/version.py +15 -0
- everysk_lib-1.10.2.dist-info/.gitignore +5 -0
- everysk_lib-1.10.2.dist-info/METADATA +326 -0
- everysk_lib-1.10.2.dist-info/RECORD +137 -0
- everysk_lib-1.10.2.dist-info/WHEEL +5 -0
- everysk_lib-1.10.2.dist-info/licenses/LICENSE.txt +9 -0
- everysk_lib-1.10.2.dist-info/top_level.txt +2 -0
everysk/sql/query.py
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2025 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
import hashlib
|
|
11
|
+
from functools import partial
|
|
12
|
+
|
|
13
|
+
from psycopg.types.json import Jsonb, set_json_dumps, set_json_loads
|
|
14
|
+
|
|
15
|
+
from everysk.core.object import BaseDict
|
|
16
|
+
from everysk.core.serialize import dumps, loads
|
|
17
|
+
from everysk.sql.utils import ConditionOperator
|
|
18
|
+
|
|
19
|
+
# https://www.psycopg.org/psycopg3/docs/basic/adapt.html#json-adaptation
|
|
20
|
+
set_json_dumps(
|
|
21
|
+
partial(
|
|
22
|
+
dumps,
|
|
23
|
+
add_class_path=True,
|
|
24
|
+
date_format='%Y-%m-%d',
|
|
25
|
+
datetime_format='%Y-%m-%dT%H:%M:%S',
|
|
26
|
+
indent=None,
|
|
27
|
+
separators=(',', ':'),
|
|
28
|
+
use_undefined=True,
|
|
29
|
+
)
|
|
30
|
+
)
|
|
31
|
+
set_json_loads(partial(loads, use_undefined=True, instantiate_object=True))
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
## Constants
|
|
35
|
+
_SQL_FIELDS = {
|
|
36
|
+
'bool': 'BOOLEAN',
|
|
37
|
+
'bytes': 'BYTEA',
|
|
38
|
+
'date': 'DATE',
|
|
39
|
+
'Date': 'DATE',
|
|
40
|
+
'datetime': 'TIMESTAMPTZ',
|
|
41
|
+
'DateTime': 'TIMESTAMPTZ',
|
|
42
|
+
'dict': 'JSONB',
|
|
43
|
+
'float': 'FLOAT',
|
|
44
|
+
'int': 'INTEGER',
|
|
45
|
+
'list': 'JSONB',
|
|
46
|
+
'set': 'JSONB',
|
|
47
|
+
'str': 'TEXT',
|
|
48
|
+
'tuple': 'JSONB',
|
|
49
|
+
}
|
|
50
|
+
_SQL_ORDER_BY = {'asc': 'ASC NULLS LAST', 'desc': 'DESC NULLS LAST'}
|
|
51
|
+
|
|
52
|
+
# SQL queries constants
|
|
53
|
+
|
|
54
|
+
# Create new schema
|
|
55
|
+
_SQL_CREATE_SCHEMA = 'CREATE SCHEMA IF NOT EXISTS "{schema}"{authorization}'
|
|
56
|
+
|
|
57
|
+
# Create new role
|
|
58
|
+
# FEAT: add others Role Attributes if needed, like CREATEDB, etc.
|
|
59
|
+
_SQL_CREATE_ROLE = "CREATE ROLE {role_name} WITH LOGIN PASSWORD '{role_password}'"
|
|
60
|
+
|
|
61
|
+
## Use {name} when you need to replace a value in the query using the format method.
|
|
62
|
+
## Use %(name)s when you need to replace a value in the query using the execute method.
|
|
63
|
+
_SQL_CREATE_TABLE = 'CREATE TABLE IF NOT EXISTS "{schema}"."{table}" ({fields})'
|
|
64
|
+
|
|
65
|
+
# https://www.postgresqltutorial.com/postgresql-delete/
|
|
66
|
+
_SQL_DELETE = 'DELETE FROM "{schema}"."{table}" WHERE "{primary_key}" = ANY(%(ids)s)'
|
|
67
|
+
|
|
68
|
+
# https://www.postgresqltutorial.com/postgresql-tutorial/postgresql-upsert/
|
|
69
|
+
# https://stackoverflow.com/a/30917361
|
|
70
|
+
_SQL_INSERT_OR_UPDATE = (
|
|
71
|
+
'INSERT INTO "{schema}"."{table}" ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) DO UPDATE SET {update}'
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# create index
|
|
75
|
+
_SQL_INDEX = (
|
|
76
|
+
'CREATE INDEX IF NOT EXISTS "index_{table_name}_{index_name}_btree" ON "{schema}"."{table_name}" '
|
|
77
|
+
"USING btree ({fields} NULLS LAST) WITH (deduplicate_items='true')"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# https://www.postgresqltutorial.com/postgresql-tutorial/postgresql-select/
|
|
81
|
+
_SQL_SELECT = 'SELECT {fields} FROM "{schema}"."{table}" {group_by} ORDER BY {order_by} {limit} {offset}'
|
|
82
|
+
_SQL_SELECT_WHERE = (
|
|
83
|
+
'SELECT {fields} FROM "{schema}"."{table}" {conditions} {group_by} ORDER BY {order_by} {limit} {offset}'
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
# set field to not null
|
|
87
|
+
_SQL_SET_NOT_NULL = 'ALTER TABLE IF EXISTS "{schema}"."{table}" ALTER COLUMN "{field}" SET NOT NULL'
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class Query:
|
|
91
|
+
## Public attributes
|
|
92
|
+
primary_key: str = None
|
|
93
|
+
schema: str = None
|
|
94
|
+
table_name: str = None
|
|
95
|
+
|
|
96
|
+
def __init__(self, table_name: str, primary_key: str, schema: str | None = None) -> None:
|
|
97
|
+
# Default schema is public
|
|
98
|
+
if not schema:
|
|
99
|
+
schema = 'public'
|
|
100
|
+
|
|
101
|
+
self.primary_key = primary_key
|
|
102
|
+
self.schema = schema
|
|
103
|
+
self.table_name = table_name
|
|
104
|
+
|
|
105
|
+
## Private methods
|
|
106
|
+
def _sql_conditions(self, conditions: dict) -> tuple[str | None, dict]:
|
|
107
|
+
"""
|
|
108
|
+
Generate the SQL conditions and parameters from the given conditions.
|
|
109
|
+
Conditions are provided as a dictionary where the key is the field operator
|
|
110
|
+
and the value is the value to compare against. Ex: {'age__gt': 30, 'name__eq': 'John'}
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
conditions (dict): A dictionary of field operators and their values.
|
|
114
|
+
"""
|
|
115
|
+
result = []
|
|
116
|
+
params = {}
|
|
117
|
+
for field_operator, value in conditions.items():
|
|
118
|
+
sql, param = ConditionOperator(field_operator=field_operator, value=value).get_sql()
|
|
119
|
+
result.append(sql)
|
|
120
|
+
if param is not None:
|
|
121
|
+
params[field_operator] = param
|
|
122
|
+
|
|
123
|
+
if result:
|
|
124
|
+
sql = 'WHERE {conditions}'.format(conditions=' AND '.join(result))
|
|
125
|
+
else:
|
|
126
|
+
sql = None
|
|
127
|
+
|
|
128
|
+
return sql, params
|
|
129
|
+
|
|
130
|
+
def _sql_group_by(self, group_by: list[str] | None) -> str:
|
|
131
|
+
"""
|
|
132
|
+
Constructs a SQL GROUP BY clause from the provided fields.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
group_by (set | list[str] | None): A set or list of field names to group by, or None.
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
str: A SQL GROUP BY clause string if fields are provided, otherwise an empty string.
|
|
139
|
+
"""
|
|
140
|
+
if not group_by:
|
|
141
|
+
return ''
|
|
142
|
+
|
|
143
|
+
return 'GROUP BY {}'.format(', '.join(f'"{field}"' for field in group_by))
|
|
144
|
+
|
|
145
|
+
def _sql_order_by(self, order_by: str | list[str]) -> str:
|
|
146
|
+
"""
|
|
147
|
+
Generate the SQL order by clause from the given order by string.
|
|
148
|
+
The order by string can be in the format 'field__operator' where operator
|
|
149
|
+
can be 'asc' or 'desc'. If no operator is provided, 'asc' is used by default.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
order_by (str): The order by string.
|
|
153
|
+
|
|
154
|
+
Raises:
|
|
155
|
+
ValueError: If the order by operator is invalid.
|
|
156
|
+
"""
|
|
157
|
+
if not order_by:
|
|
158
|
+
order_by = [f'{self.primary_key}__asc']
|
|
159
|
+
|
|
160
|
+
if isinstance(order_by, str):
|
|
161
|
+
order_by = [order.strip() for order in order_by.split(',')]
|
|
162
|
+
|
|
163
|
+
result = []
|
|
164
|
+
for order in order_by:
|
|
165
|
+
if '__' in order:
|
|
166
|
+
field, operator = order.split('__')
|
|
167
|
+
|
|
168
|
+
else:
|
|
169
|
+
field, operator = order, 'asc'
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
operator = _SQL_ORDER_BY[operator]
|
|
173
|
+
except KeyError as error:
|
|
174
|
+
msg = f'Invalid order_by operator: {operator}.'
|
|
175
|
+
raise ValueError(msg) from error
|
|
176
|
+
result.append(f'"{field}" {operator}')
|
|
177
|
+
|
|
178
|
+
return ', '.join(result)
|
|
179
|
+
|
|
180
|
+
def _sql_limit(self, limit: int | str | None) -> str | None:
|
|
181
|
+
"""
|
|
182
|
+
Generate the SQL limit clause from the given limit value.
|
|
183
|
+
If limit is '*', no limit is applied. If limit is None or invalid, a default
|
|
184
|
+
limit of 10 is applied.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
limit (int | str): The limit value.
|
|
188
|
+
|
|
189
|
+
Raises:
|
|
190
|
+
TypeError: If the limit is not an integer or "*".
|
|
191
|
+
"""
|
|
192
|
+
if limit == '*':
|
|
193
|
+
return ''
|
|
194
|
+
|
|
195
|
+
if limit is None:
|
|
196
|
+
limit = 10
|
|
197
|
+
|
|
198
|
+
if not isinstance(limit, int):
|
|
199
|
+
msg = 'Limit must be an integer or "*".'
|
|
200
|
+
raise TypeError(msg)
|
|
201
|
+
|
|
202
|
+
if limit <= 0:
|
|
203
|
+
limit = 10
|
|
204
|
+
|
|
205
|
+
return f'LIMIT {limit}'
|
|
206
|
+
|
|
207
|
+
def _sql_offset(self, offset: int | str | None) -> str:
|
|
208
|
+
"""
|
|
209
|
+
Generates an SQL OFFSET clause using the provided offset value.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
offset (int | str | None): The offset value to be used in the SQL query. Must be convertible to an integer.
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
str: A string representing the SQL OFFSET clause.
|
|
216
|
+
|
|
217
|
+
Raises:
|
|
218
|
+
TypeError: If the offset is not an integer or cannot be converted to an integer.
|
|
219
|
+
"""
|
|
220
|
+
if offset is None:
|
|
221
|
+
return ''
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
offset = int(offset)
|
|
225
|
+
except (ValueError, TypeError) as error:
|
|
226
|
+
msg = 'Offset must be an integer or a string representing it.'
|
|
227
|
+
raise TypeError(msg) from error
|
|
228
|
+
|
|
229
|
+
if offset < 0:
|
|
230
|
+
msg = 'Offset must not be negative.'
|
|
231
|
+
raise TypeError(msg)
|
|
232
|
+
|
|
233
|
+
if offset == 0:
|
|
234
|
+
return ''
|
|
235
|
+
|
|
236
|
+
return f'OFFSET {offset}'
|
|
237
|
+
|
|
238
|
+
## Public methods
|
|
239
|
+
def parse_create_schema(self, authorization: str | None = None) -> str:
|
|
240
|
+
"""
|
|
241
|
+
Generates a SQL statement to create a schema using the current schema name with optional authorization.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
authorization (str | None, optional): The authorization role to assign to the schema.
|
|
245
|
+
If None, no authorization clause is added. Defaults to None.
|
|
246
|
+
To avoid to do alter schema with another SQL.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
str: The formatted SQL statement for creating the schema.
|
|
250
|
+
"""
|
|
251
|
+
return _SQL_CREATE_SCHEMA.format(
|
|
252
|
+
schema=self.schema, authorization=f' AUTHORIZATION {authorization}' if authorization else ''
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
def parse_create_role(self, role_name: str, role_password: str) -> str:
|
|
256
|
+
"""
|
|
257
|
+
Generates a SQL statement to create a role with the given name and password.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
role_name (str): The name of the role to create.
|
|
261
|
+
role_password (str): The password for the role.
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
str: The formatted SQL statement for creating the role.
|
|
265
|
+
"""
|
|
266
|
+
return _SQL_CREATE_ROLE.format(role_name=role_name, role_password=role_password)
|
|
267
|
+
|
|
268
|
+
def parse_create_table(self, fields: dict[str, type]) -> str:
|
|
269
|
+
"""
|
|
270
|
+
Generate the SQL create table query from the given fields.
|
|
271
|
+
The fields are provided as a dictionary where the key is the field name
|
|
272
|
+
and the value is the field type.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
fields (dict[str, type]): A dictionary of field names and their types.
|
|
276
|
+
"""
|
|
277
|
+
sql_fields = []
|
|
278
|
+
for field, attr_type in fields.items():
|
|
279
|
+
name = getattr(attr_type, '__name__', getattr(attr_type.__class__, '__name__', str(attr_type))).lower()
|
|
280
|
+
sql_type = _SQL_FIELDS.get(name, 'TEXT')
|
|
281
|
+
if field == self.primary_key:
|
|
282
|
+
# Primary key should be unique and not null
|
|
283
|
+
sql_type = f'{sql_type} PRIMARY KEY NOT NULL'
|
|
284
|
+
|
|
285
|
+
sql_fields.append(f'"{field}" {sql_type}')
|
|
286
|
+
|
|
287
|
+
fields = ', '.join(sql_fields)
|
|
288
|
+
return _SQL_CREATE_TABLE.format(schema=self.schema, table=self.table_name, fields=fields)
|
|
289
|
+
|
|
290
|
+
def parse_delete(self) -> str:
|
|
291
|
+
"""
|
|
292
|
+
Generate the SQL delete query.
|
|
293
|
+
Deletes rows based on the primary key.
|
|
294
|
+
"""
|
|
295
|
+
return _SQL_DELETE.format(schema=self.schema, table=self.table_name, primary_key=self.primary_key)
|
|
296
|
+
|
|
297
|
+
def parse_set_not_null(self, field: str) -> str:
|
|
298
|
+
"""
|
|
299
|
+
Generates an SQL statement to set the specified field as NOT NULL.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
field (str): The name of the field to modify.
|
|
303
|
+
|
|
304
|
+
Returns:
|
|
305
|
+
str: The formatted SQL statement to set the field as NOT NULL.
|
|
306
|
+
"""
|
|
307
|
+
return _SQL_SET_NOT_NULL.format(schema=self.schema, table=self.table_name, field=field)
|
|
308
|
+
|
|
309
|
+
def parse_index(self, fields: str, index_name: str | None = None) -> str:
|
|
310
|
+
"""
|
|
311
|
+
Generates an SQL index creation statement for the specified fields and index name.
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
fields (str): A comma-separated string of field names to include in the index.
|
|
315
|
+
index_name (str | None, optional): The name of the index. If None, a deterministic index name
|
|
316
|
+
is generated based on the sorted field names.
|
|
317
|
+
|
|
318
|
+
Returns:
|
|
319
|
+
str: The formatted SQL statement for creating the index.
|
|
320
|
+
"""
|
|
321
|
+
table_index_name: str = index_name
|
|
322
|
+
|
|
323
|
+
if table_index_name is None:
|
|
324
|
+
# Generate a eight-character deterministic index name based on the sorted field names
|
|
325
|
+
fields_names: list = [x.strip().split(' ')[0].strip().lower() for x in fields.split(',')]
|
|
326
|
+
fields_names.sort()
|
|
327
|
+
table_index_name = hashlib.sha256('__'.join(fields_names).encode()).hexdigest()[:8]
|
|
328
|
+
|
|
329
|
+
return _SQL_INDEX.format(
|
|
330
|
+
schema=self.schema, table_name=self.table_name, fields=fields.strip(), index_name=table_index_name
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
def parse_insert_or_update(self, fields: set | list) -> str:
|
|
334
|
+
"""
|
|
335
|
+
Generate the SQL insert or update query from the given fields.
|
|
336
|
+
|
|
337
|
+
Args:
|
|
338
|
+
fields (set | list): A set or list of field names to include in the query.
|
|
339
|
+
"""
|
|
340
|
+
# Create the values string
|
|
341
|
+
# We do not use " here because we are using the values as placeholders
|
|
342
|
+
values = ', '.join(f'%({field})s' for field in fields)
|
|
343
|
+
|
|
344
|
+
# Create the SQL query
|
|
345
|
+
update = ', '.join(f'"{field}" = EXCLUDED."{field}"' for field in fields)
|
|
346
|
+
fields = ', '.join([f'"{field}"' for field in fields])
|
|
347
|
+
|
|
348
|
+
return _SQL_INSERT_OR_UPDATE.format(
|
|
349
|
+
schema=self.schema,
|
|
350
|
+
fields=fields,
|
|
351
|
+
table=self.table_name,
|
|
352
|
+
values=values,
|
|
353
|
+
primary_key=self.primary_key,
|
|
354
|
+
update=update,
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
def parse_insert_or_update_params(self, params: dict) -> dict:
|
|
358
|
+
"""
|
|
359
|
+
Prepare the parameters for the insert or update query.
|
|
360
|
+
This method ensures that lists, sets, and tuples are converted to JSONB
|
|
361
|
+
and strings are converted to TEXT for proper handling by PostgreSQL.
|
|
362
|
+
|
|
363
|
+
Args:
|
|
364
|
+
params (dict): A dictionary of parameters to prepare.
|
|
365
|
+
"""
|
|
366
|
+
# https://www.psycopg.org/psycopg3/docs/basic/adapt.html#json-adaptation
|
|
367
|
+
for key, value in params.items():
|
|
368
|
+
if isinstance(value, (set, tuple)):
|
|
369
|
+
params[key] = Jsonb(list(value))
|
|
370
|
+
elif isinstance(value, (dict, list)):
|
|
371
|
+
params[key] = Jsonb(value)
|
|
372
|
+
elif isinstance(value, BaseDict):
|
|
373
|
+
params[key] = Jsonb(value.to_dict())
|
|
374
|
+
|
|
375
|
+
return params
|
|
376
|
+
|
|
377
|
+
def parse_select(
|
|
378
|
+
self,
|
|
379
|
+
fields: set | list,
|
|
380
|
+
limit: int | str | None = None,
|
|
381
|
+
offset: int | str | None = None,
|
|
382
|
+
conditions: dict | None = None,
|
|
383
|
+
group_by: list | None = None,
|
|
384
|
+
order_by: str | list | None = None,
|
|
385
|
+
) -> tuple[str, dict]:
|
|
386
|
+
"""
|
|
387
|
+
Constructs a SQL SELECT query string with optional WHERE, GROUP BY, ORDER BY, LIMIT, and OFFSET clauses.
|
|
388
|
+
|
|
389
|
+
Args:
|
|
390
|
+
fields (set | list): The fields/columns to select in the query.
|
|
391
|
+
limit (int | str | None, optional): The maximum number of rows to return.
|
|
392
|
+
offset (int | str | None, optional): The number of rows to skip before starting to return rows.
|
|
393
|
+
conditions (dict | None, optional): Conditions for the WHERE clause.
|
|
394
|
+
group_by (list | None, optional): Fields to group the results by.
|
|
395
|
+
order_by (str | list | None, optional): Fields to order the results by.
|
|
396
|
+
|
|
397
|
+
Returns:
|
|
398
|
+
tuple[str, dict]: A tuple containing the SQL query string and a dictionary of parameters for the query.
|
|
399
|
+
"""
|
|
400
|
+
# FEAT: treat specials like sum, count, avg, round, etc.
|
|
401
|
+
params = {
|
|
402
|
+
'schema': self.schema,
|
|
403
|
+
'fields': ', '.join([f'"{field}"' for field in fields]),
|
|
404
|
+
'table': self.table_name,
|
|
405
|
+
'limit': self._sql_limit(limit),
|
|
406
|
+
'offset': self._sql_offset(offset),
|
|
407
|
+
'group_by': self._sql_group_by(group_by),
|
|
408
|
+
'order_by': self._sql_order_by(order_by),
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
sql_params = {}
|
|
412
|
+
if conditions:
|
|
413
|
+
where, sql_params = self._sql_conditions(conditions)
|
|
414
|
+
params['conditions'] = where
|
|
415
|
+
return ' '.join(_SQL_SELECT_WHERE.format(**params).split()), sql_params
|
|
416
|
+
|
|
417
|
+
return ' '.join(_SQL_SELECT.format(**params).split()), sql_params
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2025 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
from collections.abc import Callable, Iterable
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from psycopg import Cursor
|
|
14
|
+
from psycopg.rows import _get_names, no_result
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def cls_row(cls: type, loads: Callable | None = None) -> Callable:
|
|
18
|
+
"""
|
|
19
|
+
Function to convert a row from a cursor to an instance of the specified class.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
cls (type): The class to instantiate for each row.
|
|
23
|
+
loads (callable | None, optional): Optional function to process each value. Defaults to None.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def inner(cursor: Cursor) -> Callable:
|
|
27
|
+
names = _get_names(cursor)
|
|
28
|
+
if names is None:
|
|
29
|
+
return no_result
|
|
30
|
+
|
|
31
|
+
def cls_row_(values: Iterable) -> Any:
|
|
32
|
+
if loads is None:
|
|
33
|
+
return cls(**dict(zip(names, values, strict=True)))
|
|
34
|
+
|
|
35
|
+
return cls(**dict(zip(names, map(loads, values), strict=True)))
|
|
36
|
+
|
|
37
|
+
return cls_row_
|
|
38
|
+
|
|
39
|
+
return inner
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def dict_row(loads: Callable | None = None) -> Callable:
|
|
43
|
+
"""
|
|
44
|
+
Function to convert a row from a cursor to a dictionary.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
loads (Callable | None): Optional function to process each value. Defaults to None.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def inner(cursor: Cursor) -> Callable:
|
|
51
|
+
names = _get_names(cursor)
|
|
52
|
+
if names is None:
|
|
53
|
+
return no_result
|
|
54
|
+
|
|
55
|
+
def dict_row_(values: Iterable) -> dict[str, Any]:
|
|
56
|
+
if loads is None:
|
|
57
|
+
return dict(zip(names, values, strict=True))
|
|
58
|
+
|
|
59
|
+
return dict(zip(names, map(loads, values), strict=True))
|
|
60
|
+
|
|
61
|
+
return dict_row_
|
|
62
|
+
|
|
63
|
+
return inner
|
everysk/sql/settings.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2025 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
|
|
11
|
+
# Activate this setting to log all SQL queries
|
|
12
|
+
POSTGRESQL_LOG_QUERIES: bool = False
|
|
13
|
+
|
|
14
|
+
# Connection settings
|
|
15
|
+
POSTGRESQL_CONNECTION_DATABASE: str = None
|
|
16
|
+
POSTGRESQL_CONNECTION_PASSWORD: str = None
|
|
17
|
+
POSTGRESQL_CONNECTION_PORT: int = 5432
|
|
18
|
+
POSTGRESQL_CONNECTION_HOST: str = None
|
|
19
|
+
POSTGRESQL_CONNECTION_USER: str = None
|
|
20
|
+
POSTGRESQL_POOL_MAX_SIZE: int = 10
|
|
21
|
+
POSTGRESQL_POOL_MIN_SIZE: int = 1
|
|
22
|
+
|
|
23
|
+
# https://www.psycopg.org/psycopg3/docs/api/pool.html
|
|
24
|
+
# Maximum time, in seconds, that a connection can stay unused in the pool before being closed, and the pool shrunk.
|
|
25
|
+
# This only happens to connections more than min_size, if max_size allowed the pool to grow.
|
|
26
|
+
POSTGRESQL_POOL_MAX_IDLE: int = 60 * 5 # 5 minutes
|
|
27
|
+
|
|
28
|
+
# The maximum lifetime of a connection in the pool, in seconds. Connections used for longer get closed and replaced by
|
|
29
|
+
# a new one. The amount is reduced by a random 10% to avoid mass eviction.
|
|
30
|
+
POSTGRESQL_POOL_MAX_LIFETIME: int = 60 * 30 # 30 minutes
|
|
31
|
+
|
|
32
|
+
# Maximum number of requests that can be queued to the pool, after which new requests will fail.
|
|
33
|
+
# Raising TooManyRequests, 0 means no queue limit.
|
|
34
|
+
POSTGRESQL_POOL_MAX_WAITING: int = 0
|
|
35
|
+
|
|
36
|
+
# If the connections are opened on init or later.
|
|
37
|
+
POSTGRESQL_POOL_OPEN: bool = True
|
|
38
|
+
|
|
39
|
+
# Maximum time, in seconds, the pool will try to create a connection. If a connection attempt fails, the pool will try
|
|
40
|
+
# to reconnect a few times, using an exponential backoff and some random factor to avoid mass attempts.
|
|
41
|
+
POSTGRESQL_POOL_RECONNECT_TIMEOUT: int = 60 * 2 # 2 minutes
|
|
42
|
+
|
|
43
|
+
# The default maximum time in seconds that a client can wait to receive a connection
|
|
44
|
+
# from the pool (using connection() or getconn()).
|
|
45
|
+
POSTGRESQL_POOL_TIMEOUT: int = 30 # 30 seconds
|
|
46
|
+
|
|
47
|
+
# e.g., 'require', 'verify-ca', 'verify-full'
|
|
48
|
+
# the default if not set is 'prefer' which means try an SSL connection first, and fallback to a non-SSL connection
|
|
49
|
+
POSTGRESQL_CONNECTION_SSLMODE: str = None
|
everysk/sql/utils.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2025 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
_SQL_OPERATORS = {
|
|
13
|
+
'endswith': 'LIKE',
|
|
14
|
+
'eq': '=',
|
|
15
|
+
'gt': '>',
|
|
16
|
+
'gte': '>=',
|
|
17
|
+
'ilike': 'ILIKE',
|
|
18
|
+
'in': 'IN',
|
|
19
|
+
'inside': '?',
|
|
20
|
+
'insidebinary': '?|',
|
|
21
|
+
'isnotnull': 'IS NOT NULL',
|
|
22
|
+
'isnull': 'IS NULL',
|
|
23
|
+
'like': 'LIKE',
|
|
24
|
+
'lt': '<',
|
|
25
|
+
'lte': '<=',
|
|
26
|
+
'ne': '!=',
|
|
27
|
+
'nin': 'NOT IN',
|
|
28
|
+
'startswith': 'LIKE',
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ConditionOperator:
|
|
33
|
+
## Public attributes
|
|
34
|
+
field_operator: str = None
|
|
35
|
+
field: str = None
|
|
36
|
+
operator: str = None
|
|
37
|
+
sql_operator: str = None
|
|
38
|
+
value: Any = None
|
|
39
|
+
|
|
40
|
+
## Internal methods
|
|
41
|
+
def __init__(self, *, field_operator: str, value: Any) -> None:
|
|
42
|
+
self.field_operator = field_operator
|
|
43
|
+
self.value = value
|
|
44
|
+
|
|
45
|
+
if '__' in self.field_operator:
|
|
46
|
+
self.field, self.operator = field_operator.split('__', 1)
|
|
47
|
+
else:
|
|
48
|
+
self.field, self.operator = field_operator, 'eq'
|
|
49
|
+
|
|
50
|
+
# To adjust when used field__isnull = False
|
|
51
|
+
if self.operator == 'isnull' and not value:
|
|
52
|
+
self.operator = 'isnotnull'
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
self.sql_operator = _SQL_OPERATORS[self.operator]
|
|
56
|
+
except KeyError as error:
|
|
57
|
+
msg = f'Invalid field({self.field}) operator: {self.operator}.'
|
|
58
|
+
raise ValueError(msg) from error
|
|
59
|
+
|
|
60
|
+
## Operators methods
|
|
61
|
+
def _operator_default(self) -> tuple[str, Any]:
|
|
62
|
+
"""Default operator method "field" <operator> %(field)s."""
|
|
63
|
+
operation = f'{self.sql_operator} %({self.field_operator})s'
|
|
64
|
+
sql = f'"{self.field}" {operation}'
|
|
65
|
+
return sql, self.value
|
|
66
|
+
|
|
67
|
+
def _operator_endswith(self) -> tuple[str, str]:
|
|
68
|
+
"""Operator method for endswith: "field" LIKE %value."""
|
|
69
|
+
sql, value = self._operator_default()
|
|
70
|
+
return sql, f'%{value}'
|
|
71
|
+
|
|
72
|
+
def _operator_in(self) -> tuple[str, list]:
|
|
73
|
+
"""Operator method for in: "field" = ANY(%(field__in)s)."""
|
|
74
|
+
# https://www.psycopg.org/psycopg3/docs/basic/from_pg2.html#you-cannot-use-in-s-with-a-tuple
|
|
75
|
+
operation = f'ANY(%({self.field_operator})s)'
|
|
76
|
+
sql = f'"{self.field}" = {operation}'
|
|
77
|
+
if isinstance(self.value, str):
|
|
78
|
+
return sql, self.value.split(',')
|
|
79
|
+
|
|
80
|
+
return sql, self.value
|
|
81
|
+
|
|
82
|
+
def _operator_isnotnull(self) -> tuple[str, None]:
|
|
83
|
+
"""Operator method for isnotnull: "field" IS NOT NULL."""
|
|
84
|
+
sql = f'"{self.field}" IS NOT NULL'
|
|
85
|
+
return sql, None
|
|
86
|
+
|
|
87
|
+
def _operator_isnull(self) -> tuple[str, None]:
|
|
88
|
+
"""Operator method for isnull: "field" IS NULL."""
|
|
89
|
+
sql = f'"{self.field}" IS NULL'
|
|
90
|
+
return sql, None
|
|
91
|
+
|
|
92
|
+
def _operator_like(self) -> tuple[str, str]:
|
|
93
|
+
"""Operator method for like: "field" LIKE %value%."""
|
|
94
|
+
sql, value = self._operator_default()
|
|
95
|
+
return sql, f'%{value}%'
|
|
96
|
+
|
|
97
|
+
def _operator_ilike(self) -> tuple[str, str]:
|
|
98
|
+
"""Operator method for ilike: "field" ILIKE %value%."""
|
|
99
|
+
return self._operator_like()
|
|
100
|
+
|
|
101
|
+
def _operator_nin(self) -> tuple[str, list]:
|
|
102
|
+
"""Operator method for nin: "field" != ALL(%(field__nin)s)."""
|
|
103
|
+
operation = f'ALL(%({self.field_operator})s)'
|
|
104
|
+
sql = f'"{self.field}" != {operation}'
|
|
105
|
+
if isinstance(self.value, str):
|
|
106
|
+
return sql, self.value.split(',')
|
|
107
|
+
|
|
108
|
+
return sql, self.value
|
|
109
|
+
|
|
110
|
+
def _operator_startswith(self) -> tuple[str, str]:
|
|
111
|
+
"""Operator method for startswith: "field" LIKE value%."""
|
|
112
|
+
sql, value = self._operator_default()
|
|
113
|
+
return sql, f'{value}%'
|
|
114
|
+
|
|
115
|
+
## Public methods
|
|
116
|
+
def get_sql(self) -> tuple[str, Any]:
|
|
117
|
+
"""
|
|
118
|
+
Get the SQL representation of the field and operator.
|
|
119
|
+
|
|
120
|
+
Example:
|
|
121
|
+
field__operator = 'age__gt'
|
|
122
|
+
value = 30
|
|
123
|
+
returns: ('"age" > %(age__gt)s', 30)
|
|
124
|
+
"""
|
|
125
|
+
method_name = f'_operator_{self.operator}'
|
|
126
|
+
if hasattr(self, method_name):
|
|
127
|
+
return getattr(self, method_name)()
|
|
128
|
+
|
|
129
|
+
return self._operator_default()
|
everysk/tests.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2023 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
# ruff: noqa: F403
|
|
11
|
+
try:
|
|
12
|
+
# everysk/api/__init__.py imports requests
|
|
13
|
+
from everysk.api.tests import *
|
|
14
|
+
except ModuleNotFoundError as error:
|
|
15
|
+
# This will prevent running these tests if requests is not installed
|
|
16
|
+
if not error.args[0].startswith("No module named 'requests'"):
|
|
17
|
+
raise
|
|
18
|
+
|
|
19
|
+
from everysk.core.tests import *
|
|
20
|
+
from everysk.sdk.brutils.tests import *
|
|
21
|
+
from everysk.sdk.tests import *
|
|
22
|
+
from everysk.server.tests import *
|
|
23
|
+
from everysk.sql.tests import *
|