dapper-sqls 1.2.3__py3-none-any.whl → 1.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ from itertools import groupby
4
4
  import os
5
5
  from .utils import (create_content_orm, TableInformation, ColumnInformation, InformationSchemaRoutines, create_field, create_content_async_orm,
6
6
  create_params_routine, get_parameters_with_defaults, create_queue_update, create_table_description, create_arg,
7
- SqlTable)
7
+ SqlTable, SqlStored)
8
8
  from ...models import TableBaseModel
9
9
 
10
10
  class TableBuilderData:
@@ -294,24 +294,26 @@ class ModelBuilder(object):
294
294
  """
295
295
 
296
296
  self.query_routines = f"""
297
- SELECT
298
- p.SPECIFIC_NAME,
299
- p.PARAMETER_NAME,
300
- p.DATA_TYPE,
301
- p.SPECIFIC_CATALOG,
302
- p.SPECIFIC_SCHEMA,
303
- p.ORDINAL_POSITION,
304
- sm.definition AS PROCEDURE_DEFINITION
305
- FROM
306
- INFORMATION_SCHEMA.PARAMETERS p
307
- JOIN
308
- INFORMATION_SCHEMA.ROUTINES r ON p.SPECIFIC_NAME = r.SPECIFIC_NAME
309
- JOIN
310
- sys.sql_modules sm ON OBJECT_NAME(sm.object_id) = r.SPECIFIC_NAME
311
- WHERE
312
- r.ROUTINE_TYPE = 'PROCEDURE'
313
- ORDER BY
314
- p.SPECIFIC_NAME, p.ORDINAL_POSITION;
297
+ SELECT
298
+ r.SPECIFIC_CATALOG,
299
+ r.SPECIFIC_SCHEMA,
300
+ r.SPECIFIC_NAME,
301
+ sm.definition AS PROCEDURE_DEFINITION,
302
+ (
303
+ SELECT
304
+ p.ORDINAL_POSITION,
305
+ p.PARAMETER_NAME,
306
+ p.DATA_TYPE
307
+ FROM INFORMATION_SCHEMA.PARAMETERS p
308
+ WHERE p.SPECIFIC_NAME = r.SPECIFIC_NAME
309
+ ORDER BY p.ORDINAL_POSITION
310
+ FOR JSON PATH
311
+ ) AS PARAMETERS
312
+ FROM INFORMATION_SCHEMA.ROUTINES r
313
+ JOIN sys.sql_modules sm
314
+ ON OBJECT_NAME(sm.object_id) = r.SPECIFIC_NAME
315
+ WHERE r.ROUTINE_TYPE = 'PROCEDURE'
316
+ ORDER BY r.SPECIFIC_NAME;
315
317
  """
316
318
 
317
319
  @property
@@ -358,19 +360,12 @@ class ModelBuilder(object):
358
360
  return False
359
361
  return grouped_list
360
362
 
361
- def get_routines_data(self):
363
+ def get_routines_data(self) -> list[SqlStored]:
362
364
  with self.dapper.query() as db:
363
- information_schema_routines = db.fetchall(self.query_routines)
364
- if not information_schema_routines:
365
+ rows = db.fetchall(self.query_routines)
366
+ if not rows.success:
365
367
  return []
366
- information_schema_routines = self.dapper.load(InformationSchemaRoutines, information_schema_routines)
367
-
368
- information_schema_routines.sort(key=lambda x: x.SPECIFIC_NAME)
369
- grouped_data = groupby(information_schema_routines, lambda x: x.SPECIFIC_NAME)
370
- grouped_list : list[list[InformationSchemaRoutines]] = [[obj for obj in group] for _, group in grouped_data]
371
- if not grouped_list:
372
- return []
373
- return grouped_list
368
+ return self.dapper.load(SqlStored, rows)
374
369
 
375
370
  def get_models_db(self, tables : dict[str, SqlTable]):
376
371
  models : list[TableBaseModel] = []
@@ -410,7 +405,7 @@ class ModelBuilder(object):
410
405
  if d.CHARACTER_MAXIMUM_LENGTH is not None and d.CHARACTER_MAXIMUM_LENGTH > 0 and d.available
411
406
  }
412
407
 
413
- table_alias = f'_alias_{table_name.lower()}_alias_'
408
+ table_alias = table_name.lower()
414
409
 
415
410
  content_model = f'''# coding: utf-8
416
411
  class {class_name}(TableBaseModel):
@@ -533,7 +528,7 @@ class {class_name}(TableBaseModel):
533
528
  if d.CHARACTER_MAXIMUM_LENGTH is not None and d.CHARACTER_MAXIMUM_LENGTH > 0
534
529
  }
535
530
 
536
- table_alias = f'_alias_{table_name.lower()}_alias_'
531
+ table_alias = table_name.lower()
537
532
 
538
533
  content_model = f'''# coding: utf-8
539
534
 
@@ -581,36 +576,36 @@ class {class_name}(TableBaseModel):
581
576
  for data in information_routines:
582
577
 
583
578
  if table_catalog != "all":
584
- if data[0].SPECIFIC_CATALOG not in table_catalog :
579
+ if data.SPECIFIC_CATALOG not in table_catalog :
585
580
  continue
586
581
 
587
582
  if table_schema != "all":
588
- if data[0].SPECIFIC_SCHEMA not in table_schema :
583
+ if data.SPECIFIC_SCHEMA not in table_schema :
589
584
  continue
590
585
 
591
- routine_oprions = dict_routines_options.get(data[0].SPECIFIC_NAME)
586
+ routine_oprions = dict_routines_options.get(data.SPECIFIC_NAME)
592
587
  if routine_oprions:
593
588
  if routine_oprions.ignore_routine:
594
589
  continue
595
590
 
596
- defaults_values = get_parameters_with_defaults(data[0].PROCEDURE_DEFINITION)
591
+ defaults_values = get_parameters_with_defaults(data.PROCEDURE_DEFINITION)
597
592
  params_routine = [create_params_routine(row, defaults_values) for row in data]
598
593
  params_routine_str = ", ".join(params_routine)
599
594
 
600
- stp_name = data[0].SPECIFIC_NAME.replace('STP_', '')
595
+ stp_name = data.SPECIFIC_NAME.replace('STP_', '')
601
596
  content_routine = f'''
602
597
  def {stp_name}(self, *, {params_routine_str}):
603
- return StpBuilder(self.dapper, '[{data[0].SPECIFIC_SCHEMA}].[{data[0].SPECIFIC_NAME}]',locals())'''
598
+ return StpBuilder(self.dapper, '[{data.SPECIFIC_SCHEMA}].[{data.SPECIFIC_NAME}]',locals())'''
604
599
 
605
600
  content_async_routine = f'''
606
601
  def {stp_name}(self, *, {params_routine_str}):
607
- return AsyncStpBuilder(self.async_dapper, '[{data[0].SPECIFIC_SCHEMA}].[{data[0].SPECIFIC_NAME}]', locals())'''
602
+ return AsyncStpBuilder(self.async_dapper, '[{data.SPECIFIC_SCHEMA}].[{data.SPECIFIC_NAME}]', locals())'''
608
603
 
609
- catalog = data[0].SPECIFIC_CATALOG
604
+ catalog = data.SPECIFIC_CATALOG
610
605
  if catalog not in builder_data:
611
606
  builder_data[catalog] = BuilderData(catalog)
612
607
 
613
- builder_data[catalog].routines.append(RoutineBuilderData(data[0].SPECIFIC_SCHEMA, data[0].SPECIFIC_NAME, content_routine, content_async_routine))
608
+ builder_data[catalog].routines.append(RoutineBuilderData(data.SPECIFIC_SCHEMA, data.SPECIFIC_NAME, content_routine, content_async_routine))
614
609
 
615
610
  for catalog, data in builder_data.items():
616
611
  import_init_db += f"from .{catalog} import {catalog}\n"
@@ -161,13 +161,27 @@ class SqlTable(TableInformation):
161
161
  COLUMNS : list[ColumnInformation] = []
162
162
 
163
163
  class InformationSchemaRoutines(BaseModel):
164
- SPECIFIC_CATALOG : str = Field("", description="")
165
- SPECIFIC_SCHEMA : str = Field("", description="")
166
- SPECIFIC_NAME : str = Field("", description="")
167
- ORDINAL_POSITION : int = Field(None, description="")
168
- PARAMETER_NAME : str = Field("", description="")
169
- DATA_TYPE : str = Field("", description="")
170
- PROCEDURE_DEFINITION : str = Field("", description="")
164
+ ORDINAL_POSITION: int | None = None
165
+ PARAMETER_NAME: str = ""
166
+ DATA_TYPE: str = ""
167
+
168
+ class SqlStored(BaseModel):
169
+ SPECIFIC_CATALOG: str
170
+ SPECIFIC_SCHEMA: str
171
+ SPECIFIC_NAME: str
172
+ PROCEDURE_DEFINITION: str
173
+ PARAMETERS: list[InformationSchemaRoutines] = []
174
+
175
+ @model_validator(mode="before")
176
+ @classmethod
177
+ def parse_json_fields(cls, data):
178
+ if isinstance(data, dict):
179
+ if isinstance(data.get("PARAMETERS"), str):
180
+ try:
181
+ data["PARAMETERS"] = json.loads(data["PARAMETERS"])
182
+ except Exception:
183
+ data["PARAMETERS"] = []
184
+ return data
171
185
 
172
186
  def create_database_description(tables: List[TableInformation]) -> str:
173
187
  lines = []
@@ -2,11 +2,12 @@
2
2
  from typing import Union
3
3
  from datetime import datetime, date
4
4
  from ..models import TableBaseModel, QueryFieldBase, BaseJoinConditionField, SearchTable, JoinSearchTable
5
+ import json
5
6
 
6
7
  class QueryBuilder(object):
7
8
 
8
9
  @classmethod
9
- def build_where_clause(cls, model: TableBaseModel, table_alias: str = "", base_table_alias : str = "") -> str:
10
+ def build_where_clause(cls, model: TableBaseModel, table_alias: str = "") -> str:
10
11
  clause_parts = []
11
12
 
12
13
  for field_name, field_value in model:
@@ -18,9 +19,7 @@ class QueryBuilder(object):
18
19
  qualified_field = f"{table_alias}.{field_name}" if table_alias.strip() else field_name
19
20
 
20
21
  if isinstance(field_value, BaseJoinConditionField):
21
- sql = field_value.to_sql(base_table_alias, qualified_field)
22
- if sql:
23
- clause_parts.append(sql)
22
+ continue
24
23
 
25
24
  elif isinstance(field_value, QueryFieldBase):
26
25
  sql = field_value.to_sql(qualified_field)
@@ -34,16 +33,20 @@ class QueryBuilder(object):
34
33
  elif isinstance(field_value, bool):
35
34
  clause_parts.append(f"{qualified_field} = {'1' if field_value else '0'}")
36
35
  elif isinstance(field_value, datetime):
37
- clause_parts.append(f"{qualified_field} = '{field_value.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}'")
36
+ ms = field_value.microsecond // 1000
37
+ formatted = field_value.strftime(f"%Y-%m-%d %H:%M:%S.{ms:03d}")
38
+ clause_parts.append(f"{qualified_field} = '{formatted}'")
38
39
  elif isinstance(field_value, date):
39
40
  clause_parts.append(f"{qualified_field} = '{field_value.strftime('%Y-%m-%d')}'")
40
41
  elif isinstance(field_value, bytes):
41
42
  # Converte os bytes em uma string hexadecimal para SQL (ex: 0x4E6574)
42
43
  hex_value = field_value.hex()
43
44
  clause_parts.append(f"{qualified_field} = 0x{hex_value}")
45
+ elif isinstance(field_value, dict):
46
+ clause_parts.append(f"{qualified_field} = '{json.dumps(field_value, ensure_ascii=False)}'")
44
47
  else:
45
- clause_parts.append(f"{qualified_field} = {field_value}")
46
-
48
+ clause_parts.append(f"{qualified_field} = '{str(field_value)}'")
49
+
47
50
  return " AND ".join(clause_parts)
48
51
 
49
52
  @staticmethod
@@ -54,15 +57,19 @@ class QueryBuilder(object):
54
57
  elif isinstance(value, bool):
55
58
  return '1' if value else '0'
56
59
  elif isinstance(value, datetime):
57
- return f"'{value.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}'"
60
+ ms = value.microsecond // 1000
61
+ formatted = value.strftime(f"%Y-%m-%d %H:%M:%S.{ms:03d}")
62
+ return f"'{formatted}'"
58
63
  elif isinstance(value, date):
59
64
  return f"'{value.strftime('%Y-%m-%d')}'"
60
65
  elif isinstance(value, bytes):
61
66
  return f"0x{value.hex()}"
67
+ elif isinstance(value, dict):
68
+ return f"'{json.dumps(value, ensure_ascii=False)}'"
62
69
  elif value is None:
63
70
  return "NULL"
64
71
  else:
65
- return str(value)
72
+ return f"'{str(value)}'"
66
73
 
67
74
  @staticmethod
68
75
  def build_select_fields(search_table: SearchTable, table_alias: str, rename_fields: bool = True) -> str:
@@ -91,10 +98,10 @@ class QueryBuilder(object):
91
98
  @classmethod
92
99
  def update(cls, model: TableBaseModel, where : Union[str , TableBaseModel]):
93
100
  model._reset_defaults()
94
- update_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(mode="json", exclude_none=True).items()}
101
+ update_data = model.model_dump(exclude_none=True)
95
102
  if not isinstance(where, str):
96
103
  where._reset_defaults()
97
- for key in where.model_dump(mode="json", exclude_none=True):
104
+ for key in where.model_dump(exclude_none=True):
98
105
  if key in update_data:
99
106
  update_data.pop(key, None)
100
107
  where = cls.build_where_clause(where)
@@ -111,7 +118,7 @@ class QueryBuilder(object):
111
118
  model._reset_defaults()
112
119
  insert_data = {
113
120
  k: int(v) if isinstance(v, bool) else v
114
- for k, v in model.model_dump(mode="json", exclude_none=True).items()
121
+ for k, v in model.model_dump(exclude_none=True).items()
115
122
  }
116
123
  columns = ", ".join(insert_data.keys())
117
124
  values = ", ".join(cls.format_sql_value(v) for v in insert_data.values())
@@ -157,6 +164,7 @@ class QueryBuilder(object):
157
164
 
158
165
  # JOINs
159
166
  join_clauses = []
167
+ join_where_clauses : list[str] = []
160
168
  for join_search in joins:
161
169
  join_model = join_search.model
162
170
  join_model._reset_defaults()
@@ -172,13 +180,19 @@ class QueryBuilder(object):
172
180
  f"ON {on_conditions}"
173
181
  )
174
182
  join_clauses.append(join_clause)
175
-
183
+
176
184
  # Campos SELECT do JOIN
177
185
  select_fields += ", " + cls.build_select_fields(join_search, join_table_alias)
178
186
 
187
+ join_where_clause = cls.build_where_clause(join_model, join_table_alias)
188
+ if join_where_clause.strip():
189
+ join_where_clauses.append(join_where_clause)
190
+
179
191
  # WHERE principal
180
192
  where_clause = cls.build_where_clause(main_model, main_table_alias)
181
193
  where_part = f"WHERE {where_clause}" if where_clause else ""
194
+ for join_where_clause in join_where_clauses:
195
+ where_part += f" AND {join_where_clause}"
182
196
 
183
197
  # SQL final
184
198
  sql_query = (
@@ -1,24 +1,49 @@
1
1
  # coding: utf-8
2
2
  from typing import Type
3
- from pydantic import BaseModel
3
+ from datetime import datetime, date
4
+ from ..models import TableBaseModel
5
+ import json
4
6
 
5
7
  class StoredBuilder:
6
- @staticmethod
7
- def _build_where_clause(**kwargs):
8
+
9
+ @classmethod
10
+ def _build_where_clause(cls, **kwargs):
8
11
  conditions = []
9
12
  parameters = []
10
13
  for field, value in kwargs.items():
11
14
  if value is not None:
12
- conditions.append(f"{field} = ?")
13
- parameters.append(value)
15
+ conditions.append(f"{field} = ?")
16
+ parameters.append(cls.format_sql_value(value))
14
17
  return " AND ".join(conditions), tuple(parameters)
18
+
19
+ @staticmethod
20
+ def format_sql_value(value):
21
+ if isinstance(value, str):
22
+ value = value.replace("'", "''")
23
+ return f"{value}"
24
+ elif isinstance(value, bool):
25
+ return 1 if value else 0
26
+ elif isinstance(value, datetime):
27
+ ms = value.microsecond // 1000
28
+ formatted = value.strftime(f"%Y-%m-%d %H:%M:%S.{ms:03d}")
29
+ return f"{formatted}"
30
+ elif isinstance(value, date):
31
+ return f"{value.strftime('%Y-%m-%d')}"
32
+ elif isinstance(value, bytes):
33
+ return f"0x{value.hex()}"
34
+ elif isinstance(value, dict):
35
+ return json.dumps(value, ensure_ascii=False)
36
+ elif value is None:
37
+ return "NULL"
38
+ else:
39
+ return str(value)
15
40
 
16
41
  @classmethod
17
- def update(cls, model: Type[BaseModel], where: Type[BaseModel]):
42
+ def update(cls, model: Type[TableBaseModel], where: Type[TableBaseModel]):
18
43
  model._reset_defaults()
19
44
  where._reset_defaults()
20
- update_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(mode="json", exclude_none=True).items()}
21
- where_data = {k: int(v) if isinstance(v, bool) else v for k, v in where.model_dump(mode="json", exclude_none=True).items()}
45
+ update_data = model.model_dump(exclude_none=True)
46
+ where_data = where.model_dump(exclude_none=True)
22
47
 
23
48
  for key in where_data:
24
49
  if key in update_data:
@@ -32,9 +57,9 @@ class StoredBuilder:
32
57
  return sql_query, tuple(update_data.values()) + where_params
33
58
 
34
59
  @classmethod
35
- def insert(cls, model : Type[BaseModel], name_column_id = 'Id'):
60
+ def insert(cls, model : Type[TableBaseModel], name_column_id = 'Id'):
36
61
  model._reset_defaults()
37
- insert_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(mode="json", exclude_none=True).items()}
62
+ insert_data = {k: cls.format_sql_value(v) for k, v in model.model_dump(exclude_none=True).items()}
38
63
  columns = ", ".join(insert_data.keys())
39
64
  values = ", ".join(["?" for _ in insert_data.values()])
40
65
  sql_query = f"""
@@ -45,10 +70,10 @@ class StoredBuilder:
45
70
  return sql_query, tuple(insert_data.values())
46
71
 
47
72
  @classmethod
48
- def select(cls, model : Type[BaseModel], additional_sql : str = "" ,select_top : int= None):
73
+ def select(cls, model : Type[TableBaseModel], additional_sql : str = "" ,select_top : int= None):
49
74
  model._reset_defaults()
50
75
  top_clause = f"TOP ({select_top}) * " if select_top else "*"
51
- select_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(mode="json", exclude_none=True).items()}
76
+ select_data = model.model_dump(exclude_none=True)
52
77
  where_clause, parameters = cls._build_where_clause(**select_data)
53
78
 
54
79
  sql_query = f"SELECT {top_clause} FROM {model.TABLE_NAME}"
@@ -58,9 +83,9 @@ class StoredBuilder:
58
83
  return sql_query, parameters
59
84
 
60
85
  @classmethod
61
- def delete(cls, model : Type[BaseModel]):
86
+ def delete(cls, model : Type[TableBaseModel]):
62
87
  model._reset_defaults()
63
- delete_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(mode="json", exclude_none=True).items()}
88
+ delete_data = model.model_dump(exclude_none=True)
64
89
  where_clause, parameters = cls._build_where_clause(**delete_data)
65
90
  if not where_clause:
66
91
  raise ValueError("DELETE operation requires at least one condition.")
@@ -249,7 +249,7 @@ class Query(BaseExecutor):
249
249
  elif exec_type == ExecType.fetchall:
250
250
  return Result.Fetchall(query_sql, None, None, ex)
251
251
  elif exec_type == ExecType.send:
252
- return Result.Send(query_sql, False, str(ex))
252
+ return Result.Send(query_sql, False, ex)
253
253
 
254
254
  # retorna o resultado
255
255
  return result
@@ -235,9 +235,17 @@ class TableBaseModel(BaseModel, ABC):
235
235
  class SearchTable(BaseModel):
236
236
  model: TableBaseModel
237
237
  include: Optional[List[str]] = Field(default_factory=list)
238
-
238
+
239
+ def model_dump_log(self):
240
+ model = self.model.model_dump(exclude_none=True, mode="json")
241
+ return {'model': model, 'include': self.include}
242
+
239
243
  class JoinSearchTable(SearchTable):
240
244
  join_type: Literal["INNER", "LEFT", "RIGHT", "FULL"] = "LEFT"
245
+
246
+ def model_dump_log(self,):
247
+ model = self.model.model_dump(exclude_none=True, mode="json")
248
+ return {'table': self.model.__class__.__name__, 'model': model, 'include': self.include, 'type': self.join_type}
241
249
 
242
250
  class BaseUpdate(ABC):
243
251
 
@@ -24,7 +24,9 @@ class QueryFieldBase(BaseModel, ABC):
24
24
  elif isinstance(val, bool):
25
25
  return '1' if val else '0'
26
26
  elif isinstance(val, datetime):
27
- return f"'{val.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}'"
27
+ ms = val.microsecond // 1000
28
+ formatted = val.strftime(f"%Y-%m-%d %H:%M:%S.{ms:03d}")
29
+ return f"'{formatted}'"
28
30
  elif isinstance(val, date):
29
31
  return f"'{val.strftime('%Y-%m-%d')}'"
30
32
  return str(val)
@@ -4,6 +4,7 @@ from .._types import SqlErrorType, SQL_ERROR_HTTP_CODES, T
4
4
  from .base import SensitiveFields
5
5
  import json
6
6
  from collections import defaultdict
7
+ import sqlparse
7
8
 
8
9
  def result_dict(cursor, result):
9
10
  return dict(
@@ -47,17 +48,28 @@ class Error(object):
47
48
  self.message = str(exception) if isinstance(exception, Exception) else ""
48
49
  self.type = classify_error(self.message)
49
50
 
51
+ def format_sql(query: str) -> str:
52
+ formatted_query = sqlparse.format(
53
+ query,
54
+ reindent=True,
55
+ keyword_case='upper',
56
+ identifier_case=None,
57
+ strip_comments=False,
58
+ use_space_around_operators=True
59
+ )
60
+ return formatted_query
61
+
50
62
  class BaseResult(object):
51
63
  def __init__(self, query : str | tuple):
52
64
  if isinstance(query, tuple):
53
65
  q_str, *params = query
54
66
  stored_procedure = {
55
- "query": q_str,
67
+ "query": format_sql(q_str),
56
68
  "params": [list(p) if isinstance(p, tuple) else p for p in params]
57
69
  }
58
70
  self._query = json.dumps(stored_procedure)
59
71
  else:
60
- self._query = query
72
+ self._query = format_sql(query)
61
73
 
62
74
  @property
63
75
  def query(self):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dapper-sqls
3
- Version: 1.2.3
3
+ Version: 1.2.4
4
4
  Summary: UNKNOWN
5
5
  Home-page: UNKNOWN
6
6
  Author: Samuel Semedo
@@ -36,6 +36,7 @@ Requires-Dist: typing-extensions
36
36
  Requires-Dist: urllib3
37
37
  Requires-Dist: wheel-filename
38
38
  Requires-Dist: yarl
39
+ Requires-Dist: sqlparse
39
40
 
40
41
  UNKNOWN
41
42
 
@@ -7,26 +7,26 @@ dapper_sqls/async_dapper/__init__.py,sha256=lBXRyXMCaiwgcK5TCw5rg-niwFS4GcZVW5Q2
7
7
  dapper_sqls/async_dapper/async_dapper.py,sha256=n9oDmgvRvB3TkuW3e-rcvy35VZfYespB9qn9qtCVa7s,2454
8
8
  dapper_sqls/async_dapper/async_executors.py,sha256=8P-tvoq45fuPrzwbESbwlmTvb0OjBAuEhhNj30AxzlY,17213
9
9
  dapper_sqls/builders/__init__.py,sha256=o_dGrHF09NOj3KFLpkpfaRzJMgYcddpQy-QTOJJTLPA,153
10
- dapper_sqls/builders/query.py,sha256=Qf2V4MSSrZsHsMoSjczrAga5gcRf6Vl-vhJxtcLGVBA,8345
11
- dapper_sqls/builders/stored.py,sha256=EHNRTLFYDVvqqWkmQggzJIbXfTKZN4Se8rbME7wLPHU,3131
10
+ dapper_sqls/builders/query.py,sha256=HZvZk1bqL9cH29p8Csg5OAUsaJNWqYtg0CGJQ0VFynM,8935
11
+ dapper_sqls/builders/stored.py,sha256=eRoQVvV9Np3mDiZh0M-mCpHg8Fm6yZa8ektf9X4Ii4E,3716
12
12
  dapper_sqls/builders/stp.py,sha256=LELylyG5tefcIk6kpsUTTuCkakRKTu93OwWCHB6mxU4,4915
13
13
  dapper_sqls/builders/model/__init__.py,sha256=9cAgoo-zu82YhtsmePseZhfeX94UMwfYuP9j-3d597Q,41
14
- dapper_sqls/builders/model/model.py,sha256=OULDuudqkj5oh1NlYzDJZwbQVDXh735LXhyCWiqpvWM,34668
15
- dapper_sqls/builders/model/utils.py,sha256=w-EBGxwltFFIw4jGvuQAvyCAnoDysqaPhfk86JLvCTs,27583
14
+ dapper_sqls/builders/model/model.py,sha256=3Hq3wjpvCP6rzp-uSuHo2pbYpSjeQqXrEwvhb0UhJpM,34286
15
+ dapper_sqls/builders/model/utils.py,sha256=PAT8HeQea-dIcJukFFlC2AFvRMWDPlo8DUZ4KDpy--I,27872
16
16
  dapper_sqls/dapper/__init__.py,sha256=AlQJ-QYMqKeerSQBM8Dc9_VQwUKCN4bl4ThKHsTlYms,38
17
17
  dapper_sqls/dapper/dapper.py,sha256=3sFRtgw_M3zZI8dyDJacVFwzAuH7jbyQehfCiYMymZs,2584
18
- dapper_sqls/dapper/executors.py,sha256=e6JZ2urLAMI3AyuxAquaQ9gficrtIxRswJnQS7pPTaM,16717
18
+ dapper_sqls/dapper/executors.py,sha256=rcrkjF1lMXXdMaTFST1Hq3LNruXDxAVCMEe7JyvHDgU,16712
19
19
  dapper_sqls/http/__init__.py,sha256=-BkIvJqXqnzFIpsBlipLUj_Hvbx_qvFwDF_LekmXh7Y,91
20
20
  dapper_sqls/http/aiohttp.py,sha256=Z8ugmMI7YiowYwS3qArdmj_cq2neUo0cY3EY3n5CUos,6334
21
21
  dapper_sqls/http/decorators.py,sha256=zBWujQ8pbSYCl1CAvGJieXDu97fPN4hITRafTISajhU,3942
22
22
  dapper_sqls/http/models.py,sha256=dHWOZ_bo9EhRXFSIf1eHDe0zG6pY58_TWtif5Hr-xPg,1724
23
23
  dapper_sqls/http/request.py,sha256=dbiCv1gc3IbHolzZteF2dlVeVA8jdHnT_n-gqWqdjGQ,5690
24
24
  dapper_sqls/models/__init__.py,sha256=pRrgVBMx8_AHPBKr7Zv7a2Mr3gYJpVHl-lRXAKZ6cJE,496
25
- dapper_sqls/models/base.py,sha256=KmlbLa9_k0ioGDWVjISes4kIP8OgElAoCqXbr3VbSCE,8588
25
+ dapper_sqls/models/base.py,sha256=26UC5erfkIKINZkqYeunZDgczdBibah71dqy94SUMyY,8976
26
26
  dapper_sqls/models/connection.py,sha256=z5OkYeY5Qp-dXdy1bvZHAlov-Kq-lgMxC6RqNdTiiq4,1745
27
27
  dapper_sqls/models/http.py,sha256=rCmf4Mj1bA_oc0k0vDxi2v1Cqd4oXDDU83P_6kNwTuA,356
28
- dapper_sqls/models/query_field.py,sha256=QOQlB6hlVm7fGZqsAFgWGznLEDSR03uTI_--xDyUEao,7763
29
- dapper_sqls/models/result.py,sha256=EsbgP7sY8QO5i-ddPwWH9mmRYGPLpQnwe89H1zWImqg,14728
28
+ dapper_sqls/models/query_field.py,sha256=aJErQrRuXbvP21v_B6b3mwEeKm8Bgt7dBBS39Lnt3VE,7842
29
+ dapper_sqls/models/result.py,sha256=lxC-Gic3jC8FdxTAhY6ABUqHwjcfqBbjzXVI0TLTYgk,15064
30
30
  dapper_sqls/sqlite/__init__.py,sha256=EUREulLqSlZ43_3pEYlDJ3rF7i0cSwROnqrlZX9ircI,247
31
31
  dapper_sqls/sqlite/async_local_database.py,sha256=VqMdAD2wdzqFMIKEpnOgbajEwwSmxLqtSkqWSipbu6o,7754
32
32
  dapper_sqls/sqlite/decorators.py,sha256=hvehaUi6uV3BkLNOcbGhv3-fV0VJD0hLomXznSSMJEA,2420
@@ -34,7 +34,7 @@ dapper_sqls/sqlite/installer.py,sha256=pHRXFn8gFPUj_d42ErKcMUDFmzc8tK1DabuhgqUIu
34
34
  dapper_sqls/sqlite/local_database.py,sha256=kuEVRJvzUUQg3X1Abq9ZYslO9n5IMovaCj7jnB51qSs,5902
35
35
  dapper_sqls/sqlite/models.py,sha256=m1I0-iYDdv-4fVYMsAboRYHbMEcwI4GScZSdbs7_nUY,2232
36
36
  dapper_sqls/sqlite/utils.py,sha256=22n2ry7_7b7XGDjwv3sY8swADpXDAynR0-E8WQrvHzc,230
37
- dapper_sqls-1.2.3.dist-info/METADATA,sha256=8la5wW7ydUMVjseiK-XDcDaalnkz2tMYTFl0-g7HZms,964
38
- dapper_sqls-1.2.3.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
39
- dapper_sqls-1.2.3.dist-info/top_level.txt,sha256=Pe1YqCPngnYbSVdhJyDrdFWHFCOqBvFW8WK7kTaIax4,12
40
- dapper_sqls-1.2.3.dist-info/RECORD,,
37
+ dapper_sqls-1.2.4.dist-info/METADATA,sha256=Zt_7GVqTBlKNjswYV8J0tNNhFsgc3RSSKUA8TSAhEB4,989
38
+ dapper_sqls-1.2.4.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
39
+ dapper_sqls-1.2.4.dist-info/top_level.txt,sha256=Pe1YqCPngnYbSVdhJyDrdFWHFCOqBvFW8WK7kTaIax4,12
40
+ dapper_sqls-1.2.4.dist-info/RECORD,,