pancakes-orm 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pancakes/__init__.py +7 -0
- pancakes/cook/__init__.py +6 -0
- pancakes/cook/flavor.py +407 -0
- pancakes/cook/mold.py +560 -0
- pancakes/datatype/__init__.py +5 -0
- pancakes/datatype/sql_datatype.py +168 -0
- pancakes/tool/__init__.py +5 -0
- pancakes/tool/function.py +54 -0
- pancakes_orm-0.1.0.dist-info/METADATA +19 -0
- pancakes_orm-0.1.0.dist-info/RECORD +13 -0
- pancakes_orm-0.1.0.dist-info/WHEEL +5 -0
- pancakes_orm-0.1.0.dist-info/licenses/LICENSE +201 -0
- pancakes_orm-0.1.0.dist-info/top_level.txt +1 -0
pancakes/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
# Copyright 2026 AeroGenCreator
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
5
|
+
from . import cook
|
|
6
|
+
from . import datatype
|
|
7
|
+
from . import tool
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# Copyright 2026 AeroGenCreator
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
5
|
+
from . import mold
|
|
6
|
+
from . import flavor
|
pancakes/cook/flavor.py
ADDED
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
# Copyright 2026 AeroGenCreator
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
5
|
+
"""
|
|
6
|
+
Este codigo recopila la funcion de query avanzado.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# Modulos Propios
|
|
10
|
+
from tool.function import db_connection
|
|
11
|
+
|
|
12
|
+
# Modulos Python
|
|
13
|
+
import logging
|
|
14
|
+
|
|
15
|
+
logging.basicConfig(
|
|
16
|
+
level=logging.INFO, # Captura todo desde DEBUG hacia arriba
|
|
17
|
+
format='%(asctime)s [%(levelname)s] '
|
|
18
|
+
'%(name)s.%(funcName)s:%(lineno)d - %(message)s'
|
|
19
|
+
)
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def pancakes(
|
|
24
|
+
db_path: str,
|
|
25
|
+
select: str | list,
|
|
26
|
+
from_: str,
|
|
27
|
+
special_select: list = None,
|
|
28
|
+
join: list = None,
|
|
29
|
+
condition: list = None,
|
|
30
|
+
group_by: list = None,
|
|
31
|
+
order_by: list = None,
|
|
32
|
+
limit: int = None
|
|
33
|
+
):
|
|
34
|
+
"""
|
|
35
|
+
Esta funcion no soporta:
|
|
36
|
+
HAVING,
|
|
37
|
+
FOR,
|
|
38
|
+
PARTITION BY,
|
|
39
|
+
PIVOT,
|
|
40
|
+
RECURSIVE CTE,
|
|
41
|
+
CROSS APPLY
|
|
42
|
+
|
|
43
|
+
PARAMETROS:
|
|
44
|
+
|
|
45
|
+
-*- db_path -*- | string
|
|
46
|
+
Ruta a la base de datos donde se desea realizar el query.
|
|
47
|
+
|
|
48
|
+
-*- select -*- | list
|
|
49
|
+
Lista de diccionarios que especifica columnas de tabla "main"
|
|
50
|
+
y su funcion de agregacio, la funcion de agregacion puede ser
|
|
51
|
+
omitida.
|
|
52
|
+
O la estrella '*' para seleccionar todas las columnas
|
|
53
|
+
|
|
54
|
+
select = '*'
|
|
55
|
+
select = [
|
|
56
|
+
{
|
|
57
|
+
'agg': 'count', <- Se puede agregar una funcion
|
|
58
|
+
'column': 'name',
|
|
59
|
+
'alias': True <- Si desea el selector '*' unicamente para Tabla 1
|
|
60
|
+
}
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
ALLOWED_AGGREGATIONS = ('MIN', 'MAX', 'SUM', 'COUNT', 'AVG', "")
|
|
64
|
+
|
|
65
|
+
-*- from_ -*- | str
|
|
66
|
+
Tabla "main" que servira como el inicio del query
|
|
67
|
+
|
|
68
|
+
from = 'curso'
|
|
69
|
+
|
|
70
|
+
-*- special_select -*- | list
|
|
71
|
+
Lista de diccionarios -> selecciona la funcion de agregacion, la tabla
|
|
72
|
+
y el nombre de columna de un query relacional.
|
|
73
|
+
Tambien admite se admite 'column':'*' <- Para todas las columnas.
|
|
74
|
+
|
|
75
|
+
special_select = [
|
|
76
|
+
{
|
|
77
|
+
'agg': 'max' <- Se puede agregar una funcion.
|
|
78
|
+
'table': 'estudiante',
|
|
79
|
+
'column': 'age' 'column':'*' <- Para todas las columnas.
|
|
80
|
+
}
|
|
81
|
+
]
|
|
82
|
+
|
|
83
|
+
ALLOWED_AGGREGATIONS = ('MIN', 'MAX', 'SUM', 'COUNT', 'AVG', "")
|
|
84
|
+
|
|
85
|
+
-*- join -*- | list
|
|
86
|
+
Lista de diccionarios, mapeando sentencias de relacion de tablas.
|
|
87
|
+
ALLOWED_RELATIONS = ('INNER', 'LEFT', 'RIGHT')
|
|
88
|
+
|
|
89
|
+
join = [
|
|
90
|
+
{
|
|
91
|
+
'join': 'inner', <- Tipo de union.
|
|
92
|
+
'extra': 'estudiante', <- Tabla extra.
|
|
93
|
+
'fkey': 'curso_id', <- "ForeignKey" o columna id.
|
|
94
|
+
'origin': 'curso', <- Tabla de origin (Tabla Padre).
|
|
95
|
+
'id': 'curso_id', <- Columna "id" o columna ForeignKey.
|
|
96
|
+
}
|
|
97
|
+
]
|
|
98
|
+
|
|
99
|
+
El join, permite ordenar en ambos sentidos, por comodidad
|
|
100
|
+
recomiendo usar la tabla hija como tabla extra, y la padre
|
|
101
|
+
como tabla main.
|
|
102
|
+
|
|
103
|
+
(PanCakesORM) nombra al indice de cualquier tabla como:
|
|
104
|
+
nombre_de_tabla_id
|
|
105
|
+
|
|
106
|
+
-*- condition -*- | list
|
|
107
|
+
Lista de diccionarios que agreguen condicion a la consulta:
|
|
108
|
+
ALLOWED_JOINS = ('AND', 'OR', "")
|
|
109
|
+
ALLOWED_OPERATORS = (
|
|
110
|
+
"=", "==", "<", "<=", ">", ">=", "!=", "",
|
|
111
|
+
"IN", "NOT IN", "BETWEEN", "IS", "IS NOT", "LIKE",
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
condition = [
|
|
115
|
+
{
|
|
116
|
+
'table': , <- Tabla para la condicion
|
|
117
|
+
'column': , <- Columna para la condicion
|
|
118
|
+
'operator': , <- Operador de comparacion
|
|
119
|
+
'value': , <- Valor (str, int, float, bool, list, tuple)
|
|
120
|
+
'join': , <- Operador logico
|
|
121
|
+
}
|
|
122
|
+
]
|
|
123
|
+
|
|
124
|
+
# El operador logico se puede omitir si no hay necesidad de uno.
|
|
125
|
+
|
|
126
|
+
-*- group_by -*- | list
|
|
127
|
+
Lista de diccionario mapeando por que alias y columna agrupar
|
|
128
|
+
la consulta.
|
|
129
|
+
|
|
130
|
+
group_by = [
|
|
131
|
+
{
|
|
132
|
+
'table':'country',
|
|
133
|
+
'column':'name'
|
|
134
|
+
}
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
-*- order_by -*- | list
|
|
138
|
+
Lista de diccionarios para mapear orden:
|
|
139
|
+
|
|
140
|
+
order_by = [
|
|
141
|
+
{
|
|
142
|
+
'table':'estudiante', <- Se puede omitir:
|
|
143
|
+
'column':'name',
|
|
144
|
+
'order'DESC'
|
|
145
|
+
}
|
|
146
|
+
]
|
|
147
|
+
Omitir 'table' cuando la tabla de la cual se toma la columna de
|
|
148
|
+
referencia de orden es la main o sea T1 del argumento from_.
|
|
149
|
+
|
|
150
|
+
-*- limit -*-
|
|
151
|
+
Un integer para controlar el numero de filas devueltas por la
|
|
152
|
+
consulta.
|
|
153
|
+
|
|
154
|
+
limit = 5
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
ALLOWED_AGGREGATIONS = ('MIN', 'MAX', 'SUM', 'COUNT', 'AVG', "")
|
|
158
|
+
ALLOWED_RELATIONS = ('INNER', 'LEFT', 'RIGHT')
|
|
159
|
+
ALLOWED_OPERATORS = (
|
|
160
|
+
"=", "<", "<=", ">", ">=", "<>",
|
|
161
|
+
"IN", "NOT IN",
|
|
162
|
+
"BETWEEN",
|
|
163
|
+
"IS", "IS NOT",
|
|
164
|
+
"LIKE", "NOT LIKE"
|
|
165
|
+
)
|
|
166
|
+
ALLOWED_JOINS = ('AND', 'OR', "")
|
|
167
|
+
ALLOWED_ORDERS = ('ASC', 'DESC', '')
|
|
168
|
+
|
|
169
|
+
# -*- MAIN SELECT -*-
|
|
170
|
+
if not isinstance(from_, str):
|
|
171
|
+
msg = (
|
|
172
|
+
f'Table name "from_" {from_} must be a string. '
|
|
173
|
+
f'Given {type(from_)}'
|
|
174
|
+
)
|
|
175
|
+
logger.error(msg)
|
|
176
|
+
raise TypeError(msg)
|
|
177
|
+
|
|
178
|
+
if not isinstance(select, (list, tuple, str)):
|
|
179
|
+
msg = (
|
|
180
|
+
f'"Select" argument invalid datatype. {select}. '
|
|
181
|
+
f'Must be (str, list). Given {type(select)}'
|
|
182
|
+
)
|
|
183
|
+
logger.error(msg)
|
|
184
|
+
raise TypeError(msg)
|
|
185
|
+
|
|
186
|
+
s_alias_confirm = False
|
|
187
|
+
s_line = [] if isinstance(select, (list, tuple)) else "*"
|
|
188
|
+
if isinstance(s_line, list):
|
|
189
|
+
for data in select:
|
|
190
|
+
try:
|
|
191
|
+
s_alias = data.get('alias')
|
|
192
|
+
except AttributeError:
|
|
193
|
+
continue
|
|
194
|
+
if s_alias:
|
|
195
|
+
s_alias_confirm = True
|
|
196
|
+
break
|
|
197
|
+
for data in select:
|
|
198
|
+
try:
|
|
199
|
+
s_agg = data.get('agg').upper()
|
|
200
|
+
except AttributeError:
|
|
201
|
+
s_agg = ""
|
|
202
|
+
s_col = data.get('column')
|
|
203
|
+
if s_agg:
|
|
204
|
+
if s_agg in ALLOWED_AGGREGATIONS:
|
|
205
|
+
s_line.append(f"{s_agg}([{from_}].[{s_col}])")
|
|
206
|
+
else:
|
|
207
|
+
msg = (
|
|
208
|
+
f'Invalid aggregation function {s_agg}.'
|
|
209
|
+
)
|
|
210
|
+
logger.error(msg)
|
|
211
|
+
raise ValueError(msg)
|
|
212
|
+
else:
|
|
213
|
+
s_line.append(f"[{from_}].[{s_col}]")
|
|
214
|
+
select_head = s_line
|
|
215
|
+
|
|
216
|
+
# -*- SPECIAL SELECT -*-
|
|
217
|
+
select_body = []
|
|
218
|
+
if special_select:
|
|
219
|
+
if not isinstance(special_select, list):
|
|
220
|
+
raise Exception(f"""
|
|
221
|
+
Argument "special_select" must be a list of dictionaries
|
|
222
|
+
{special_select}.
|
|
223
|
+
""")
|
|
224
|
+
sp_line = []
|
|
225
|
+
for data in special_select:
|
|
226
|
+
try:
|
|
227
|
+
sp_agg = data.get('agg').upper()
|
|
228
|
+
except AttributeError:
|
|
229
|
+
sp_agg = ""
|
|
230
|
+
sp_tab = data.get('table')
|
|
231
|
+
sp_col = data.get('column')
|
|
232
|
+
if not isinstance(sp_col, (str)):
|
|
233
|
+
raise Exception(f"""
|
|
234
|
+
Invalid selector for "special_select" argument:
|
|
235
|
+
{sp_col}.
|
|
236
|
+
""")
|
|
237
|
+
if sp_col != '*':
|
|
238
|
+
if sp_agg:
|
|
239
|
+
if sp_agg in ALLOWED_AGGREGATIONS:
|
|
240
|
+
sp_line.append(f"{sp_agg}([{sp_tab}].[{sp_col}])")
|
|
241
|
+
else:
|
|
242
|
+
raise Exception(f"""
|
|
243
|
+
Invalid aggregation function {sp_agg}
|
|
244
|
+
""")
|
|
245
|
+
else:
|
|
246
|
+
sp_line.append(f"[{sp_tab}].[{sp_col}]")
|
|
247
|
+
else:
|
|
248
|
+
sp_line.append(f"[{sp_tab}].*")
|
|
249
|
+
select_body.extend(sp_line)
|
|
250
|
+
|
|
251
|
+
# -*- SELECT CLAUSE -*-
|
|
252
|
+
partial_star = [True for star in select_body if "*" in star]
|
|
253
|
+
if partial_star or s_alias_confirm:
|
|
254
|
+
select_head = [f"[{from_}].*"]
|
|
255
|
+
if isinstance(select_head, list):
|
|
256
|
+
sub_select = ", ".join(select_head + select_body)
|
|
257
|
+
else:
|
|
258
|
+
sub_select = select_head
|
|
259
|
+
|
|
260
|
+
select_clause = (
|
|
261
|
+
f"""SELECT
|
|
262
|
+
{sub_select}
|
|
263
|
+
FROM [{from_}]"""
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
# -*- CLAUSULA DE RELACION -*-
|
|
267
|
+
join_clause = ""
|
|
268
|
+
if join:
|
|
269
|
+
try:
|
|
270
|
+
sentences = []
|
|
271
|
+
for data in join:
|
|
272
|
+
rel = data.get('join').upper()
|
|
273
|
+
extra = data.get('extra')
|
|
274
|
+
fkey = data.get('fkey')
|
|
275
|
+
origin = data.get('origin')
|
|
276
|
+
index = data.get('id')
|
|
277
|
+
if rel.upper() not in ALLOWED_RELATIONS:
|
|
278
|
+
raise Exception(f"""
|
|
279
|
+
Invalid "join" Relation {rel}.
|
|
280
|
+
Valid ones: {ALLOWED_RELATIONS}
|
|
281
|
+
""")
|
|
282
|
+
line = (f""" {rel} JOIN [{extra}]
|
|
283
|
+
ON [{extra}].[{fkey}] = [{origin}].[{index}]
|
|
284
|
+
""")
|
|
285
|
+
sentences.append(line)
|
|
286
|
+
join_clause = " ".join(sentences)
|
|
287
|
+
except Exception as e:
|
|
288
|
+
print(e)
|
|
289
|
+
return
|
|
290
|
+
|
|
291
|
+
# -*- CLAUSULA DE CONDICION -*-
|
|
292
|
+
lines = []
|
|
293
|
+
raw_data = []
|
|
294
|
+
if condition and isinstance(condition, list):
|
|
295
|
+
try:
|
|
296
|
+
for data in condition:
|
|
297
|
+
cache_data = []
|
|
298
|
+
con_table = data.get('table')
|
|
299
|
+
column = data.get('column')
|
|
300
|
+
operator = data.get('operator').upper()
|
|
301
|
+
value = data.get('value')
|
|
302
|
+
try:
|
|
303
|
+
conj = data.get('join').upper()
|
|
304
|
+
except AttributeError:
|
|
305
|
+
conj = ""
|
|
306
|
+
if operator not in ALLOWED_OPERATORS:
|
|
307
|
+
raise Exception(
|
|
308
|
+
f"""Invalid Operator Try:
|
|
309
|
+
{ALLOWED_OPERATORS}"""
|
|
310
|
+
)
|
|
311
|
+
elif conj not in ALLOWED_JOINS:
|
|
312
|
+
raise Exception(f"""
|
|
313
|
+
Values for 'join' not in allowed joins:
|
|
314
|
+
{ALLOWED_JOINS}. {conj}
|
|
315
|
+
""")
|
|
316
|
+
if isinstance(value, (int, str, bool, float)):
|
|
317
|
+
cache_data.append(value)
|
|
318
|
+
marks = ", ".join(['?'] * len([value]))
|
|
319
|
+
raw_data.extend(cache_data)
|
|
320
|
+
elif isinstance(value, (list, tuple)):
|
|
321
|
+
cache_data.extend(value)
|
|
322
|
+
marks = ", ".join(['?'] * len(cache_data))
|
|
323
|
+
raw_data.extend(cache_data)
|
|
324
|
+
else:
|
|
325
|
+
raise Exception(
|
|
326
|
+
f""" No Integer or Interable was found in your
|
|
327
|
+
condition {data}: 'value' statement: {value}
|
|
328
|
+
"""
|
|
329
|
+
)
|
|
330
|
+
conj = f' {conj}' if conj else conj
|
|
331
|
+
line = (f"""
|
|
332
|
+
[{con_table}].[{column}] {operator} ({marks}){conj}"""
|
|
333
|
+
)
|
|
334
|
+
lines.append(line)
|
|
335
|
+
lines = " ".join(lines)
|
|
336
|
+
except Exception as e:
|
|
337
|
+
print(e)
|
|
338
|
+
return
|
|
339
|
+
where_clause = f" WHERE {lines}" if lines else ""
|
|
340
|
+
|
|
341
|
+
# -*- GROUP BY SENTENCE -*-
|
|
342
|
+
group_clause = ""
|
|
343
|
+
if group_by and isinstance(group_by, list):
|
|
344
|
+
try:
|
|
345
|
+
g_line = []
|
|
346
|
+
for data in group_by:
|
|
347
|
+
g_tab = data.get('table')
|
|
348
|
+
g_col = data.get('column')
|
|
349
|
+
g_line.append(f'[{g_tab}].[{g_col}]')
|
|
350
|
+
group_clause = ", ".join(g_line)
|
|
351
|
+
except Exception as e:
|
|
352
|
+
print(e)
|
|
353
|
+
return
|
|
354
|
+
group_clause = f' GROUP BY {group_clause}' if group_clause else ""
|
|
355
|
+
|
|
356
|
+
# -*- ORDER BY SENTENCE -*-
|
|
357
|
+
order_clause = []
|
|
358
|
+
if order_by and isinstance(order_by, list):
|
|
359
|
+
for data in order_by:
|
|
360
|
+
try:
|
|
361
|
+
o_table = data.get('table')
|
|
362
|
+
except AttributeError:
|
|
363
|
+
o_table = from_
|
|
364
|
+
o_column = data.get('column')
|
|
365
|
+
o_order = data.get('order').upper()
|
|
366
|
+
if o_order in ALLOWED_ORDERS:
|
|
367
|
+
order_clause.append(f'[{o_table}].[{o_column}] {o_order}')
|
|
368
|
+
else:
|
|
369
|
+
print(f"""
|
|
370
|
+
Invalid "order" keyword: {o_order}, allowed ones:
|
|
371
|
+
{ALLOWED_ORDERS}.
|
|
372
|
+
""")
|
|
373
|
+
return
|
|
374
|
+
if order_clause:
|
|
375
|
+
order_clause = f" ORDER BY {", ".join(order_clause)}"
|
|
376
|
+
else:
|
|
377
|
+
order_clause = ""
|
|
378
|
+
|
|
379
|
+
# -*- LIMIT SENTENCE -*-
|
|
380
|
+
limit_clause = ""
|
|
381
|
+
if limit and isinstance(limit, int):
|
|
382
|
+
limit_clause = " LIMIT ?"
|
|
383
|
+
raw_data.append(limit)
|
|
384
|
+
else:
|
|
385
|
+
limit_clause = ""
|
|
386
|
+
|
|
387
|
+
# -*- FINAL QUERY -*-
|
|
388
|
+
query = (
|
|
389
|
+
f"""
|
|
390
|
+
{select_clause}
|
|
391
|
+
{join_clause}
|
|
392
|
+
{where_clause}
|
|
393
|
+
{group_clause}
|
|
394
|
+
{order_clause}
|
|
395
|
+
{limit_clause};
|
|
396
|
+
"""
|
|
397
|
+
)
|
|
398
|
+
query = query.strip().replace(" ", " ")
|
|
399
|
+
|
|
400
|
+
# -*- EXECUTE QUERY -*-
|
|
401
|
+
with db_connection(db_path=db_path) as (conn, cur):
|
|
402
|
+
if raw_data:
|
|
403
|
+
res = cur.execute(query, tuple(raw_data))
|
|
404
|
+
return res.fetchall(), [d[0] for d in cur.description]
|
|
405
|
+
else:
|
|
406
|
+
res = cur.execute(query)
|
|
407
|
+
return res.fetchall(), [d[0] for d in cur.description]
|