dapper-sqls 0.9.5__py3-none-any.whl → 1.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dapper_sqls/__init__.py +1 -1
- dapper_sqls/async_dapper/async_dapper.py +1 -1
- dapper_sqls/async_dapper/async_executors.py +1 -1
- dapper_sqls/builders/model/model.py +4 -4
- dapper_sqls/builders/model/utils.py +3 -3
- dapper_sqls/builders/query.py +7 -7
- dapper_sqls/builders/stored.py +6 -10
- dapper_sqls/config.py +1 -1
- dapper_sqls/dapper/dapper.py +1 -1
- dapper_sqls/dapper/executors.py +1 -1
- dapper_sqls/decorators.py +5 -3
- dapper_sqls/models/connection.py +2 -2
- dapper_sqls/models/result.py +1 -1
- dapper_sqls/sqlite/__init__.py +4 -1
- dapper_sqls/sqlite/async_local_database.py +104 -0
- dapper_sqls/sqlite/installer.py +93 -0
- dapper_sqls/sqlite/local_database.py +32 -184
- dapper_sqls/sqlite/models.py +27 -1
- dapper_sqls/sqlite/utils.py +8 -0
- dapper_sqls/utils.py +1 -1
- dapper_sqls-1.1.3.dist-info/METADATA +10 -0
- dapper_sqls-1.1.3.dist-info/RECORD +33 -0
- {dapper_sqls-0.9.5.dist-info → dapper_sqls-1.1.3.dist-info}/WHEEL +1 -1
- dapper_sqls-0.9.5.dist-info/METADATA +0 -16
- dapper_sqls-0.9.5.dist-info/RECORD +0 -30
- {dapper_sqls-0.9.5.dist-info → dapper_sqls-1.1.3.dist-info}/top_level.txt +0 -0
dapper_sqls/__init__.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
|
3
3
|
from itertools import groupby
|
4
4
|
import os
|
@@ -152,7 +152,7 @@ class ModelBuilder(object):
|
|
152
152
|
if table_options.ignore_table:
|
153
153
|
continue
|
154
154
|
|
155
|
-
content_model = '''#
|
155
|
+
content_model = '''# coding: utf-8
|
156
156
|
|
157
157
|
from dapper_sqls import TableBaseModel
|
158
158
|
from datetime import datetime
|
@@ -268,7 +268,7 @@ class {class_name}(TableBaseModel):
|
|
268
268
|
schema_data_tables[table.table_schema].append(table)
|
269
269
|
|
270
270
|
schema_data_routine : dict[str, list[RoutineBuilderData]] = {}
|
271
|
-
content_file_routine = '''#
|
271
|
+
content_file_routine = '''# coding: utf-8
|
272
272
|
from dapper_sqls import StpBuilder
|
273
273
|
from dapper_sqls import Dapper
|
274
274
|
from datetime import datetime
|
@@ -284,7 +284,7 @@ class stp(object):
|
|
284
284
|
return self._dapper
|
285
285
|
'''
|
286
286
|
|
287
|
-
content_file_async_rounine = '''#
|
287
|
+
content_file_async_rounine = '''# coding: utf-8
|
288
288
|
from dapper_sqls import AsyncStpBuilder
|
289
289
|
from dapper_sqls import AsyncDapper
|
290
290
|
from datetime import datetime
|
@@ -1,4 +1,4 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
import re
|
3
3
|
from pydantic import Field, BaseModel
|
4
4
|
|
@@ -118,7 +118,7 @@ def create_params_routine(data : InformationSchemaRoutines, defaults_values : di
|
|
118
118
|
|
119
119
|
def create_content_orm(class_name : str, fields_args_str : str):
|
120
120
|
|
121
|
-
return f'''#
|
121
|
+
return f'''# coding: utf-8
|
122
122
|
|
123
123
|
from datetime import datetime
|
124
124
|
from typing import overload, Union
|
@@ -268,7 +268,7 @@ class {class_name}ORM(object):
|
|
268
268
|
|
269
269
|
def create_content_async_orm(class_name : str, fields_args_str : str):
|
270
270
|
|
271
|
-
return f'''#
|
271
|
+
return f'''# coding: utf-8
|
272
272
|
|
273
273
|
from datetime import datetime
|
274
274
|
from typing import overload, Union
|
dapper_sqls/builders/query.py
CHANGED
@@ -1,11 +1,11 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
from typing import Type, Union
|
3
3
|
from pydantic import BaseModel
|
4
4
|
from datetime import datetime, date
|
5
5
|
import json
|
6
6
|
|
7
7
|
class Value:
|
8
|
-
def __init__(self, value : Union[str, int, bytes, float, datetime, date], prefix : str, suffix : str):
|
8
|
+
def __init__(self, value : Union[str, int, bytes, float, datetime, date, bool], prefix : str, suffix : str):
|
9
9
|
self.value = value
|
10
10
|
self.prefix = prefix
|
11
11
|
self.suffix = suffix
|
@@ -37,9 +37,9 @@ class QueryBuilder(object):
|
|
37
37
|
|
38
38
|
@classmethod
|
39
39
|
def update(cls, model: Type[BaseModel], where : Union[str , Type[BaseModel]]):
|
40
|
-
update_data = model.model_dump(exclude_none=True)
|
40
|
+
update_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
41
41
|
if not isinstance(where, str):
|
42
|
-
where_data = where.model_dump(exclude_none=True)
|
42
|
+
where_data = {k: int(v) if isinstance(v, bool) else v for k, v in where.model_dump(exclude_none=True).items()}
|
43
43
|
where = cls._build_where_clause(**where_data)
|
44
44
|
|
45
45
|
set_clause = ", ".join([f"{key} = '{value}'" if isinstance(value, str) else f"{key} = {value}" for key, value in update_data.items()])
|
@@ -47,7 +47,7 @@ class QueryBuilder(object):
|
|
47
47
|
return sql_query
|
48
48
|
|
49
49
|
def insert(model: Type[BaseModel], name_column_id = 'Id'):
|
50
|
-
insert_data = model.model_dump(exclude_none=True)
|
50
|
+
insert_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
51
51
|
columns = ", ".join(insert_data.keys())
|
52
52
|
values = ", ".join([f"'{value}'" if isinstance(value, str) else str(value) for value in insert_data.values()])
|
53
53
|
sql_query = f"""
|
@@ -60,7 +60,7 @@ class QueryBuilder(object):
|
|
60
60
|
@classmethod
|
61
61
|
def select(cls, model: Type[BaseModel], additional_sql : str = "" ,select_top : int= None):
|
62
62
|
top_clause = f"TOP ({select_top}) * " if select_top else "*"
|
63
|
-
select_data = model.model_dump(exclude_none=True)
|
63
|
+
select_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
64
64
|
where_clause = cls._build_where_clause(**select_data)
|
65
65
|
|
66
66
|
sql_query = f"SELECT {top_clause} FROM {model.TABLE_NAME}"
|
@@ -71,7 +71,7 @@ class QueryBuilder(object):
|
|
71
71
|
|
72
72
|
@classmethod
|
73
73
|
def delete(cls, model: Type[BaseModel]):
|
74
|
-
delete_data = model.model_dump(exclude_none=True)
|
74
|
+
delete_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
75
75
|
where_clause = cls._build_where_clause(**delete_data)
|
76
76
|
if not where_clause:
|
77
77
|
raise ValueError("DELETE operation requires at least one condition.")
|
dapper_sqls/builders/stored.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
from typing import Type
|
3
3
|
from pydantic import BaseModel
|
4
4
|
|
@@ -9,18 +9,14 @@ class StoredBuilder:
|
|
9
9
|
parameters = []
|
10
10
|
for field, value in kwargs.items():
|
11
11
|
if value is not None:
|
12
|
-
if isinstance(value, str):
|
13
|
-
conditions.append(f"{field} = ?")
|
14
|
-
parameters.append(value)
|
15
|
-
else:
|
16
12
|
conditions.append(f"{field} = ?")
|
17
13
|
parameters.append(value)
|
18
14
|
return " AND ".join(conditions), tuple(parameters)
|
19
15
|
|
20
16
|
@classmethod
|
21
17
|
def update(cls, model: Type[BaseModel], where: Type[BaseModel]):
|
22
|
-
update_data = model.model_dump(exclude_none=True)
|
23
|
-
where_data = where.model_dump(exclude_none=True)
|
18
|
+
update_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
19
|
+
where_data = {k: int(v) if isinstance(v, bool) else v for k, v in where.model_dump(exclude_none=True).items()}
|
24
20
|
where_clause, where_params = cls._build_where_clause(**where_data)
|
25
21
|
|
26
22
|
set_clause = ", ".join([f"{key} = ?" for key in update_data.keys()])
|
@@ -30,7 +26,7 @@ class StoredBuilder:
|
|
30
26
|
|
31
27
|
@classmethod
|
32
28
|
def insert(cls, model : Type[BaseModel], name_column_id = 'Id'):
|
33
|
-
insert_data = model.model_dump(exclude_none=True)
|
29
|
+
insert_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
34
30
|
columns = ", ".join(insert_data.keys())
|
35
31
|
values = ", ".join(["?" for _ in insert_data.values()])
|
36
32
|
sql_query = f"""
|
@@ -43,7 +39,7 @@ class StoredBuilder:
|
|
43
39
|
@classmethod
|
44
40
|
def select(cls, model : Type[BaseModel], additional_sql : str = "" ,select_top : int= None):
|
45
41
|
top_clause = f"TOP ({select_top}) * " if select_top else "*"
|
46
|
-
select_data = model.model_dump(exclude_none=True)
|
42
|
+
select_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
47
43
|
where_clause, parameters = cls._build_where_clause(**select_data)
|
48
44
|
|
49
45
|
sql_query = f"SELECT {top_clause} FROM {model.TABLE_NAME}"
|
@@ -54,7 +50,7 @@ class StoredBuilder:
|
|
54
50
|
|
55
51
|
@classmethod
|
56
52
|
def delete(cls, model : Type[BaseModel]):
|
57
|
-
delete_data = model.model_dump(exclude_none=True)
|
53
|
+
delete_data = {k: int(v) if isinstance(v, bool) else v for k, v in model.model_dump(exclude_none=True).items()}
|
58
54
|
where_clause, parameters = cls._build_where_clause(**delete_data)
|
59
55
|
if not where_clause:
|
60
56
|
raise ValueError("DELETE operation requires at least one condition.")
|
dapper_sqls/config.py
CHANGED
dapper_sqls/dapper/dapper.py
CHANGED
dapper_sqls/dapper/executors.py
CHANGED
dapper_sqls/decorators.py
CHANGED
@@ -1,10 +1,10 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
from functools import wraps
|
3
3
|
import asyncio
|
4
4
|
from time import perf_counter
|
5
5
|
from typing import Callable
|
6
6
|
|
7
|
-
def func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True):
|
7
|
+
def func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True, default_value = None):
|
8
8
|
"""
|
9
9
|
Synchronous function decorator for validation, error handling, and logging execution time.
|
10
10
|
"""
|
@@ -25,6 +25,7 @@ def func_validation(callable_msg_error: Callable = None, use_raise: bool = False
|
|
25
25
|
raise
|
26
26
|
else:
|
27
27
|
print(f"Unhandled exception in '{func.__name__}': {error_message}")
|
28
|
+
return default_value
|
28
29
|
finally:
|
29
30
|
if use_log:
|
30
31
|
stop = perf_counter()
|
@@ -33,7 +34,7 @@ def func_validation(callable_msg_error: Callable = None, use_raise: bool = False
|
|
33
34
|
return wrapper
|
34
35
|
return decorator
|
35
36
|
|
36
|
-
def async_func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True):
|
37
|
+
def async_func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True, default_value = None):
|
37
38
|
"""
|
38
39
|
Asynchronous function decorator for validation, error handling, and logging execution time.
|
39
40
|
"""
|
@@ -54,6 +55,7 @@ def async_func_validation(callable_msg_error: Callable = None, use_raise: bool =
|
|
54
55
|
raise
|
55
56
|
else:
|
56
57
|
print(f"Unhandled exception in async function '{func.__name__}': {error_message}")
|
58
|
+
return default_value
|
57
59
|
finally:
|
58
60
|
if use_log:
|
59
61
|
stop = perf_counter()
|
dapper_sqls/models/connection.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
|
3
3
|
class ConnectionStringData(object):
|
4
4
|
def __init__(self, server: str, database: str, username: str, password: str):
|
@@ -34,7 +34,7 @@ class ConnectionStringData(object):
|
|
34
34
|
@username.setter
|
35
35
|
def username(self, value: str):
|
36
36
|
if not isinstance(value, str):
|
37
|
-
raise ValueError("O nome de usu�rio deve ser uma string.")
|
37
|
+
raise ValueError("O nome de usu�rio deve ser uma string.")
|
38
38
|
self._username = value
|
39
39
|
|
40
40
|
@property
|
dapper_sqls/models/result.py
CHANGED
dapper_sqls/sqlite/__init__.py
CHANGED
@@ -0,0 +1,104 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncEngine
|
3
|
+
from sqlalchemy import text
|
4
|
+
from .models import BaseTables, Path, System, EnvVar
|
5
|
+
from .utils import get_value
|
6
|
+
|
7
|
+
class BaseAsyncLocalDatabase:
|
8
|
+
|
9
|
+
def __init__(self, app_name: str, path : str, is_new_database : bool):
|
10
|
+
self._app_name = app_name
|
11
|
+
self.is_new_database = is_new_database
|
12
|
+
self._engine: AsyncEngine = create_async_engine(f'sqlite+aiosqlite:///{path}')
|
13
|
+
|
14
|
+
@property
|
15
|
+
def engine(self):
|
16
|
+
return self._engine
|
17
|
+
|
18
|
+
@property
|
19
|
+
def app_name(self):
|
20
|
+
return self._app_name
|
21
|
+
|
22
|
+
async def init_db(self):
|
23
|
+
async with self.engine.begin() as conn:
|
24
|
+
await conn.run_sync(BaseTables.meta_data.create_all)
|
25
|
+
await conn.execute(BaseTables.system.insert().values(App=self.app_name, Tema='light'))
|
26
|
+
await conn.commit()
|
27
|
+
|
28
|
+
async def select(self, table: str, where: str = None):
|
29
|
+
async with self.engine.connect() as conn:
|
30
|
+
query = f"SELECT * FROM {table} WHERE App = :app_name"
|
31
|
+
if where:
|
32
|
+
query += f" AND {where}"
|
33
|
+
result = await conn.execute(text(query), {'app_name': self.app_name})
|
34
|
+
return [row._mapping for row in result]
|
35
|
+
|
36
|
+
async def get_path(self, name):
|
37
|
+
name = get_value(name)
|
38
|
+
data = await self.select('path', f"Name = '{name}'")
|
39
|
+
return Path(data[0]).Path if data else None
|
40
|
+
|
41
|
+
async def update_path(self, name: str, path_name: str):
|
42
|
+
name, path_name = get_value(name), get_value(path_name)
|
43
|
+
exists_path = await self.get_path(name)
|
44
|
+
async with self.engine.begin() as conn:
|
45
|
+
if exists_path:
|
46
|
+
await conn.execute(
|
47
|
+
BaseTables.path.update().where(
|
48
|
+
(BaseTables.path.c.Name == name) & (BaseTables.path.c.App == self.app_name)
|
49
|
+
).values(Path=path_name)
|
50
|
+
)
|
51
|
+
else:
|
52
|
+
await conn.execute(BaseTables.path.insert().values(App=self.app_name, Name=name, Path=path_name))
|
53
|
+
|
54
|
+
async def delete_path(self, name: str):
|
55
|
+
name = get_value(name)
|
56
|
+
async with self.engine.begin() as conn:
|
57
|
+
await conn.execute(
|
58
|
+
BaseTables.path.delete().where(
|
59
|
+
(BaseTables.path.c.Name == name) & (BaseTables.path.c.App == self.app_name)
|
60
|
+
)
|
61
|
+
)
|
62
|
+
|
63
|
+
async def get_var(self, name):
|
64
|
+
name = get_value(name)
|
65
|
+
data = await self.select('env_var', f"Name = '{name}'")
|
66
|
+
return EnvVar(data[0]).Value if data else None
|
67
|
+
|
68
|
+
async def update_var(self, name: str, value: str):
|
69
|
+
name, value = get_value(name), get_value(value)
|
70
|
+
exists_var = await self.get_var(name)
|
71
|
+
async with self.engine.begin() as conn:
|
72
|
+
if exists_var:
|
73
|
+
await conn.execute(
|
74
|
+
BaseTables.env_var.update().where(
|
75
|
+
(BaseTables.env_var.c.Name == name) & (BaseTables.env_var.c.App == self.app_name)
|
76
|
+
).values(Value=value)
|
77
|
+
)
|
78
|
+
else:
|
79
|
+
await conn.execute(BaseTables.env_var.insert().values(App=self.app_name, Name=name, Value=value))
|
80
|
+
|
81
|
+
async def delete_var(self, name: str):
|
82
|
+
name = get_value(name)
|
83
|
+
async with self.engine.begin() as conn:
|
84
|
+
await conn.execute(
|
85
|
+
BaseTables.env_var.delete().where(
|
86
|
+
(BaseTables.env_var.c.Name == name) & (BaseTables.env_var.c.App == self.app_name)
|
87
|
+
)
|
88
|
+
)
|
89
|
+
|
90
|
+
async def get_theme(self):
|
91
|
+
data = await self.select('system')
|
92
|
+
if data:
|
93
|
+
return System(data[0]).Theme
|
94
|
+
async with self.engine.begin() as conn:
|
95
|
+
await conn.execute(BaseTables.system.insert().values(App=self.app_name, Tema='light'))
|
96
|
+
return 'light'
|
97
|
+
|
98
|
+
async def update_theme(self, theme: str):
|
99
|
+
theme = get_value(theme)
|
100
|
+
async with self.engine.begin() as conn:
|
101
|
+
await conn.execute(
|
102
|
+
BaseTables.system.update().where(BaseTables.system.c.App == self.app_name).values(Tema=theme)
|
103
|
+
)
|
104
|
+
|
@@ -0,0 +1,93 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
from sqlalchemy import create_engine, inspect, text, insert
|
3
|
+
from sqlalchemy.engine import Engine
|
4
|
+
from tempfile import gettempdir
|
5
|
+
from os import path, makedirs
|
6
|
+
from .models import BaseTables
|
7
|
+
from .utils import is_valid_name, get_value
|
8
|
+
from typing import TypeVar
|
9
|
+
T = TypeVar('T')
|
10
|
+
|
11
|
+
class DataBaseInstall(object):
|
12
|
+
|
13
|
+
def __init__(self, app_name : str, *, tables : BaseTables = None, path_local_database = gettempdir(), database_name="MyLocalDatabase", database_folder_name = "MyApp"):
|
14
|
+
app_name = get_value(app_name)
|
15
|
+
if not database_name.endswith('.db'):
|
16
|
+
database_name = f'{database_name}.db'
|
17
|
+
if not is_valid_name(app_name):
|
18
|
+
app_name = "my_app"
|
19
|
+
self._app_name = app_name
|
20
|
+
self._path_database = path.join(path_local_database,database_folder_name, database_name)
|
21
|
+
self.tables = tables if tables else BaseTables
|
22
|
+
self._engine : Engine = None
|
23
|
+
self.new_database = not path.isfile(self._path_database)
|
24
|
+
if not path.isfile(self._path_database):
|
25
|
+
if not path.exists(path.join(path_local_database,database_folder_name)):
|
26
|
+
makedirs(path.join(path_local_database,database_folder_name))
|
27
|
+
|
28
|
+
with self.engine.connect() as conn:
|
29
|
+
conn.execute(text("PRAGMA encoding = 'UTF-8'"))
|
30
|
+
conn.commit()
|
31
|
+
|
32
|
+
with self.engine.connect() as conn:
|
33
|
+
self.tables.meta_data.create_all(self.engine)
|
34
|
+
ins = insert(self.tables.system).values(App=app_name, Tema='light')
|
35
|
+
conn.execute(ins)
|
36
|
+
conn.commit()
|
37
|
+
else:
|
38
|
+
if not self.are_tables_existing(self.engine):
|
39
|
+
try:
|
40
|
+
self.tables.meta_data.create_all(self.engine)
|
41
|
+
except:
|
42
|
+
...
|
43
|
+
else:
|
44
|
+
try:
|
45
|
+
self.synchronize_columns(self.engine)
|
46
|
+
except:
|
47
|
+
...
|
48
|
+
|
49
|
+
def instance(self, obj : T) -> T:
|
50
|
+
return obj(self._app_name, self._path_database, self.new_database)
|
51
|
+
|
52
|
+
@property
|
53
|
+
def engine(self):
|
54
|
+
if not self._engine:
|
55
|
+
self._engine = create_engine(f'sqlite:///{self._path_database}')
|
56
|
+
return self._engine
|
57
|
+
|
58
|
+
def are_columns_existing(self, engine):
|
59
|
+
inspector = inspect(engine)
|
60
|
+
existing_tables = inspector.get_table_names()
|
61
|
+
required_tables = self.tables.meta_data.tables.keys()
|
62
|
+
for table_name in required_tables:
|
63
|
+
if table_name in existing_tables:
|
64
|
+
existing_columns = inspector.get_columns(table_name)
|
65
|
+
existing_column_names = [column['name'] for column in existing_columns]
|
66
|
+
required_columns = self.tables.meta_data.tables[table_name].c.keys()
|
67
|
+
if not all(column in existing_column_names for column in required_columns):
|
68
|
+
return False
|
69
|
+
else:
|
70
|
+
return False
|
71
|
+
return True
|
72
|
+
|
73
|
+
def are_tables_existing(self, engine):
|
74
|
+
inspector = inspect(engine)
|
75
|
+
existing_tables = inspector.get_table_names()
|
76
|
+
required_tables = self.tables.meta_data.tables.keys()
|
77
|
+
return all(table in existing_tables for table in required_tables)
|
78
|
+
|
79
|
+
def synchronize_columns(self, engine):
|
80
|
+
inspector = inspect(engine)
|
81
|
+
existing_tables = inspector.get_table_names()
|
82
|
+
for table_name in self.tables.meta_data.tables.keys():
|
83
|
+
if table_name in existing_tables:
|
84
|
+
existing_columns = inspector.get_columns(table_name)
|
85
|
+
existing_column_names = [column['name'] for column in existing_columns]
|
86
|
+
required_columns = self.tables.meta_data.tables[table_name].c.keys()
|
87
|
+
required_column_defs = {col.name: col for col in self.tables.meta_data.tables[table_name].columns}
|
88
|
+
for column in required_columns:
|
89
|
+
if column not in existing_column_names:
|
90
|
+
column_type = required_column_defs[column].type
|
91
|
+
add_column_sql = f'ALTER TABLE {table_name} ADD COLUMN {column} {column_type}'
|
92
|
+
with engine.connect() as conn:
|
93
|
+
conn.execute(text(add_column_sql))
|
@@ -1,171 +1,23 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
|
+
from sqlalchemy import create_engine, text, insert, delete, update
|
3
|
+
from .models import BaseTables, Path, System, EnvVar
|
4
|
+
from .utils import get_value
|
2
5
|
|
3
|
-
|
4
|
-
from sqlalchemy.engine import Engine
|
5
|
-
from tempfile import gettempdir
|
6
|
-
from os import path, makedirs
|
7
|
-
from .models import Path, System ,EnvVar
|
8
|
-
import re
|
6
|
+
class BaseLocalDatabase(object):
|
9
7
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
def is_valid_name(name, max_length=255):
|
14
|
-
# Verifica se o nome n�o � vazio
|
15
|
-
if not name:
|
16
|
-
return False
|
17
|
-
|
18
|
-
# Verifica se o nome n�o excede o comprimento m�ximo permitido
|
19
|
-
if len(name) > max_length:
|
20
|
-
return False
|
21
|
-
|
22
|
-
# Verifica se o nome n�o cont�m caracteres inv�lidos
|
23
|
-
# Neste exemplo, permitimos apenas letras, n�meros, espa�os e underscore (_)
|
24
|
-
if not re.match(r'^[\w\s]+$', name):
|
25
|
-
return False
|
26
|
-
|
27
|
-
return True
|
28
|
-
|
29
|
-
class LocalDatabase(object):
|
30
|
-
|
31
|
-
class Tables(object):
|
32
|
-
|
33
|
-
meta_data = MetaData()
|
34
|
-
|
35
|
-
system = Table(
|
36
|
-
'system', # Nome da tabela
|
37
|
-
meta_data,
|
38
|
-
Column('id', Integer, primary_key=True),
|
39
|
-
Column('App', String),
|
40
|
-
Column('Tema', String)
|
41
|
-
)
|
42
|
-
|
43
|
-
env_var = Table(
|
44
|
-
'env_var',
|
45
|
-
meta_data,
|
46
|
-
Column('id', Integer, primary_key=True),
|
47
|
-
Column('App', String),
|
48
|
-
Column('Name', String),
|
49
|
-
Column('Value', String)
|
50
|
-
)
|
51
|
-
|
52
|
-
path = Table(
|
53
|
-
'path',
|
54
|
-
meta_data,
|
55
|
-
Column('id', Integer, primary_key=True),
|
56
|
-
Column('App', String),
|
57
|
-
Column('Name', String),
|
58
|
-
Column('Path', String)
|
59
|
-
)
|
60
|
-
|
61
|
-
|
62
|
-
def __init__(self, app_name : str, *, path_local_database = gettempdir(), database_name="saftOnlineLocalDatabase", database_folder_name = "SaftOnline"):
|
63
|
-
app_name = self.get_value(app_name)
|
64
|
-
if not database_name.endswith('.db'):
|
65
|
-
database_name = f'{database_name}.db'
|
66
|
-
if not is_valid_name(app_name):
|
67
|
-
raise Exception("Nome de identifica��o do programa � inv�lido")
|
8
|
+
def __init__(self, app_name : str, path : str, is_new_database : bool):
|
68
9
|
self._app_name = app_name
|
69
|
-
self.
|
70
|
-
self._engine
|
71
|
-
|
72
|
-
if not path.isfile(self._path_database):
|
73
|
-
if not path.exists(path.join(path_local_database,database_folder_name)):
|
74
|
-
makedirs(path.join(path_local_database,database_folder_name))
|
75
|
-
|
76
|
-
# Se o banco de dados n�o existe, crie-o e crie a tabela
|
77
|
-
conn = self.engine.connect()
|
78
|
-
|
79
|
-
self.Tables.meta_data.create_all(self.engine)
|
80
|
-
|
81
|
-
ins = insert(self.Tables.system).values(App=app_name, Tema='light')
|
82
|
-
conn.execute(ins)
|
83
|
-
conn.commit()
|
84
|
-
conn.close()
|
85
|
-
|
86
|
-
else:
|
87
|
-
if not self.are_tables_existing(self.engine):
|
88
|
-
try:
|
89
|
-
self.Tables.meta_data.create_all(self.engine)
|
90
|
-
...
|
91
|
-
except:
|
92
|
-
...
|
93
|
-
else:
|
94
|
-
try:
|
95
|
-
self.synchronize_columns(self.engine)
|
96
|
-
except:
|
97
|
-
...
|
98
|
-
|
99
|
-
|
10
|
+
self.is_new_database = is_new_database
|
11
|
+
self._engine = create_engine(f'sqlite:///{path}')
|
12
|
+
|
100
13
|
@property
|
101
14
|
def engine(self):
|
102
|
-
if not self._engine:
|
103
|
-
self._engine = create_engine(f'sqlite:///{self.path_database}')
|
104
15
|
return self._engine
|
105
16
|
|
106
17
|
@property
|
107
18
|
def app_name(self):
|
108
19
|
return self._app_name
|
109
|
-
|
110
|
-
@property
|
111
|
-
def path_database(self):
|
112
|
-
return self._path_database
|
113
|
-
|
114
|
-
def are_columns_existing(self, engine):
|
115
|
-
inspector = inspect(engine)
|
116
|
-
existing_tables = inspector.get_table_names()
|
117
|
-
required_tables = self.Tables.meta_data.tables.keys()
|
118
|
-
|
119
|
-
for table_name in required_tables:
|
120
|
-
if table_name in existing_tables:
|
121
|
-
# Get the columns in the existing table
|
122
|
-
existing_columns = inspector.get_columns(table_name)
|
123
|
-
existing_column_names = [column['name'] for column in existing_columns]
|
124
|
-
|
125
|
-
# Get the columns defined in the Table class
|
126
|
-
required_columns = self.Tables.meta_data.tables[table_name].c.keys()
|
127
|
-
|
128
|
-
if not all(column in existing_column_names for column in required_columns):
|
129
|
-
return False
|
130
|
-
else:
|
131
|
-
return False
|
132
|
-
return True
|
133
|
-
|
134
|
-
def are_tables_existing(self, engine):
|
135
|
-
inspector = inspect(engine)
|
136
|
-
existing_tables = inspector.get_table_names()
|
137
|
-
required_tables = self.Tables.meta_data.tables.keys()
|
138
|
-
|
139
|
-
return all(table in existing_tables for table in required_tables)
|
140
|
-
|
141
|
-
def synchronize_columns(self, engine):
|
142
|
-
inspector = inspect(engine)
|
143
|
-
existing_tables = inspector.get_table_names()
|
144
|
-
|
145
|
-
for table_name in self.Tables.meta_data.tables.keys():
|
146
|
-
if table_name in existing_tables:
|
147
|
-
# Get the columns in the existing table
|
148
|
-
existing_columns = inspector.get_columns(table_name)
|
149
|
-
existing_column_names = [column['name'] for column in existing_columns]
|
150
|
-
|
151
|
-
# Get the columns defined in the Table class
|
152
|
-
required_columns = self.Tables.meta_data.tables[table_name].c.keys()
|
153
|
-
required_column_defs = {col.name: col for col in self.Tables.meta_data.tables[table_name].columns}
|
154
|
-
|
155
|
-
# Add columns in lack
|
156
|
-
for column in required_columns:
|
157
|
-
if column not in existing_column_names:
|
158
|
-
column_type = required_column_defs[column].type
|
159
|
-
add_column_sql = f'ALTER TABLE {table_name} ADD COLUMN {column} {column_type}'
|
160
|
-
with engine.connect() as conn:
|
161
|
-
conn.execute(text(add_column_sql))
|
162
|
-
|
163
|
-
def get_value(self, value):
|
164
|
-
try :
|
165
|
-
return value.value
|
166
|
-
except:
|
167
|
-
return value
|
168
|
-
|
20
|
+
|
169
21
|
def select(self, table : str, where : str = None):
|
170
22
|
with self.engine.connect() as conn:
|
171
23
|
if where:
|
@@ -175,87 +27,83 @@ class LocalDatabase(object):
|
|
175
27
|
data = [dict(zip(tuple(query.keys()), i)) for i in query.cursor]
|
176
28
|
return data
|
177
29
|
|
178
|
-
|
179
30
|
def get_path(self, name):
|
180
|
-
name =
|
31
|
+
name = get_value(name)
|
181
32
|
data = self.select('path', f"Name = '{name}'")
|
182
33
|
for d in data:
|
183
34
|
return Path(d).Path
|
184
35
|
|
185
36
|
def update_path(self, name : str, path_name : str):
|
186
|
-
name =
|
187
|
-
path_name =
|
37
|
+
name = get_value(name)
|
38
|
+
path_name = get_value(path_name)
|
188
39
|
existsPath = self.get_path(name)
|
189
40
|
|
190
41
|
with self.engine.connect() as conn:
|
191
42
|
if existsPath != None:
|
192
|
-
stmt = update(
|
193
|
-
(
|
194
|
-
(
|
43
|
+
stmt = update(BaseTables.path).where(
|
44
|
+
(BaseTables.path.c.Name == name) &
|
45
|
+
(BaseTables.path.c.App == self.app_name)
|
195
46
|
).values(Path=path_name)
|
196
47
|
conn.execute(stmt)
|
197
48
|
else:
|
198
|
-
ins = insert(
|
49
|
+
ins = insert(BaseTables.path).values(App=self.app_name, Name=name, Path=path_name)
|
199
50
|
conn.execute(ins)
|
200
51
|
conn.commit()
|
201
52
|
|
202
53
|
def delete_path(self, name : str):
|
203
|
-
name =
|
54
|
+
name = get_value(name)
|
204
55
|
with self.engine.connect() as conn:
|
205
|
-
stmt = delete(
|
56
|
+
stmt = delete(BaseTables.path).where((BaseTables.path.c.Name == name) & (BaseTables.env_var.c.App == self.app_name))
|
206
57
|
conn.execute(stmt)
|
207
58
|
conn.commit()
|
208
59
|
|
209
60
|
def get_var(self, name):
|
210
|
-
name =
|
61
|
+
name = get_value(name)
|
211
62
|
data = self.select('env_var', f"name = '{name}'")
|
212
63
|
for d in data:
|
213
64
|
return EnvVar(d).Value
|
214
65
|
|
215
66
|
def update_var(self, name : str, value : str):
|
216
|
-
name =
|
217
|
-
value =
|
67
|
+
name = get_value(name)
|
68
|
+
value = get_value(value)
|
218
69
|
existsVar = self.get_var(name)
|
219
70
|
with self.engine.connect() as conn:
|
220
71
|
if existsVar != None:
|
221
|
-
stmt = update(
|
222
|
-
(
|
223
|
-
(
|
72
|
+
stmt = update(BaseTables.env_var).where(
|
73
|
+
(BaseTables.env_var.c.Name == name) &
|
74
|
+
(BaseTables.env_var.c.App == self.app_name)
|
224
75
|
).values(Value=value)
|
225
76
|
conn.execute(stmt)
|
226
77
|
else:
|
227
|
-
ins = insert(
|
78
|
+
ins = insert(BaseTables.env_var).values(App=self.app_name, Name=name, Value=value)
|
228
79
|
conn.execute(ins)
|
229
80
|
conn.commit()
|
230
81
|
|
231
82
|
def delete_var(self, name : str):
|
232
|
-
name =
|
83
|
+
name = get_value(name)
|
233
84
|
with self.engine.connect() as conn:
|
234
|
-
stmt = delete(
|
85
|
+
stmt = delete(BaseTables.env_var).where((BaseTables.env_var.c.Name == name) & (BaseTables.env_var.c.App == self.app_name))
|
235
86
|
conn.execute(stmt)
|
236
87
|
conn.commit()
|
237
88
|
|
238
89
|
def get_theme(self):
|
239
|
-
# Consulta o tema no banco de dados
|
240
90
|
data = self.select('system')
|
241
91
|
if data:
|
242
|
-
# Se houver dados, retorna o tema
|
243
92
|
return System(data[0]).Theme
|
244
93
|
else:
|
245
|
-
# Se n�o houver dados, insere um tema padr�o e retorna 'light'
|
246
94
|
with self.engine.connect() as conn:
|
247
|
-
ins = insert(
|
95
|
+
ins = insert(BaseTables.system).values(App=self.app_name, Tema='light')
|
248
96
|
conn.execute(ins)
|
249
97
|
conn.commit()
|
250
98
|
return 'light'
|
251
99
|
|
252
100
|
def update_theme(self, theme : str):
|
253
|
-
theme =
|
101
|
+
theme = get_value(theme)
|
254
102
|
_theme = self.get_theme()
|
255
103
|
if _theme:
|
256
104
|
with self.engine.connect() as conn:
|
257
|
-
stmt = update(
|
258
|
-
|
105
|
+
stmt = update(BaseTables.system).where(
|
106
|
+
BaseTables.system.c.App == self.app_name
|
259
107
|
).values(Tema=theme)
|
260
108
|
conn.execute(stmt)
|
261
109
|
conn.commit()
|
dapper_sqls/sqlite/models.py
CHANGED
@@ -1,4 +1,28 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
|
+
from sqlalchemy import MetaData, Table, Column, Integer, String
|
3
|
+
|
4
|
+
class BaseTables:
|
5
|
+
meta_data = MetaData()
|
6
|
+
system = Table(
|
7
|
+
'system', meta_data,
|
8
|
+
Column('id', Integer, primary_key=True),
|
9
|
+
Column('App', String),
|
10
|
+
Column('Tema', String)
|
11
|
+
)
|
12
|
+
env_var = Table(
|
13
|
+
'env_var', meta_data,
|
14
|
+
Column('id', Integer, primary_key=True),
|
15
|
+
Column('App', String),
|
16
|
+
Column('Name', String),
|
17
|
+
Column('Value', String)
|
18
|
+
)
|
19
|
+
path = Table(
|
20
|
+
'path', meta_data,
|
21
|
+
Column('id', Integer, primary_key=True),
|
22
|
+
Column('App', String),
|
23
|
+
Column('Name', String),
|
24
|
+
Column('Path', String)
|
25
|
+
)
|
2
26
|
|
3
27
|
class BaseTableModel(object):
|
4
28
|
def __init__(self, dados : dict):
|
@@ -24,6 +48,8 @@ class System(BaseTableModel):
|
|
24
48
|
self.Theme : str = dados['Tema']
|
25
49
|
|
26
50
|
|
51
|
+
|
52
|
+
|
27
53
|
|
28
54
|
|
29
55
|
|
dapper_sqls/utils.py
CHANGED
@@ -0,0 +1,10 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: dapper_sqls
|
3
|
+
Version: 1.1.3
|
4
|
+
Author: Samuel Semedo
|
5
|
+
Requires-Dist: aioodbc==0.5.0
|
6
|
+
Requires-Dist: annotated-types==0.7.0
|
7
|
+
Requires-Dist: greenlet==3.1.1
|
8
|
+
Requires-Dist: pyodbc==5.2.0
|
9
|
+
Requires-Dist: SQLAlchemy==2.0.36
|
10
|
+
Requires-Dist: typing_extensions==4.12.2
|
@@ -0,0 +1,33 @@
|
|
1
|
+
dapper_sqls/__init__.py,sha256=-m_igeGpFiWm-B1WEpyNg9nIC3WApaLEdlbRg6v61cg,174
|
2
|
+
dapper_sqls/_types.py,sha256=GM_qDsjKGRSFC0NsU5hkqLKH-ydVKJ-1TgEkxQzi4Hw,181
|
3
|
+
dapper_sqls/config.py,sha256=CN5x_E8PlV734Ra57ensLbdFCxe7ASXp7M1wdubcT5s,4625
|
4
|
+
dapper_sqls/decorators.py,sha256=I2Uo3Uj1-K4XVTNTvcejaUfd_tuUmM6kOWXrMyX1Dts,2767
|
5
|
+
dapper_sqls/utils.py,sha256=0vXX-mhbwSL3Kx7_ErHaxaASgJzrs-g9kwW38kEJZDw,3452
|
6
|
+
dapper_sqls/async_dapper/__init__.py,sha256=lBXRyXMCaiwgcK5TCw5rg-niwFS4GcZVW5Q2enKcS-0,47
|
7
|
+
dapper_sqls/async_dapper/async_dapper.py,sha256=n9oDmgvRvB3TkuW3e-rcvy35VZfYespB9qn9qtCVa7s,2454
|
8
|
+
dapper_sqls/async_dapper/async_executors.py,sha256=uaI5xemBC3Cgs69utC6qqmaaMPdXegarkxyeWVkiTog,13156
|
9
|
+
dapper_sqls/builders/__init__.py,sha256=o_dGrHF09NOj3KFLpkpfaRzJMgYcddpQy-QTOJJTLPA,153
|
10
|
+
dapper_sqls/builders/query.py,sha256=9-wh2zALVNc4dbBcmkvZvKCxKxEoR4G8Sovcrp1go6Q,3895
|
11
|
+
dapper_sqls/builders/stored.py,sha256=dAaKeDfW_9mKqkEgpKrTHwZmzRpsb_iYuJW9v2RXAkA,2778
|
12
|
+
dapper_sqls/builders/stp.py,sha256=wYhhO-tlfH2izUyXhMn9sipy_8_IbKwcqkm0SvkdJfo,4739
|
13
|
+
dapper_sqls/builders/model/__init__.py,sha256=9cAgoo-zu82YhtsmePseZhfeX94UMwfYuP9j-3d597Q,41
|
14
|
+
dapper_sqls/builders/model/model.py,sha256=bawug0tc5PuRVgYU_3JdI7l9s4RyTQT3tDBF5TaGuQo,17026
|
15
|
+
dapper_sqls/builders/model/utils.py,sha256=nhTODL6WTGNI5vtKZ_cRyGcBtv8G574a52-HUT1axao,17240
|
16
|
+
dapper_sqls/dapper/__init__.py,sha256=AlQJ-QYMqKeerSQBM8Dc9_VQwUKCN4bl4ThKHsTlYms,38
|
17
|
+
dapper_sqls/dapper/dapper.py,sha256=3sFRtgw_M3zZI8dyDJacVFwzAuH7jbyQehfCiYMymZs,2584
|
18
|
+
dapper_sqls/dapper/executors.py,sha256=-Q9gFkNXyP1QSezta2AtywOIRx_8PsTyMOvl0q4SwH4,12760
|
19
|
+
dapper_sqls/models/__init__.py,sha256=z1kR1XLmwxQTMctvsbVR8lcXNafQXxkEP0mdbKzB31s,175
|
20
|
+
dapper_sqls/models/base.py,sha256=ddBgBPTVA2_cPNZ4QrYNXqcpsQ8m4Ipy7TWGoHxYF4c,766
|
21
|
+
dapper_sqls/models/connection.py,sha256=xSSB12_Odrdplfn7_724slEAE02rDt2yrdNFsD3ENuU,1746
|
22
|
+
dapper_sqls/models/http.py,sha256=rCmf4Mj1bA_oc0k0vDxi2v1Cqd4oXDDU83P_6kNwTuA,356
|
23
|
+
dapper_sqls/models/result.py,sha256=7IDY3omTGHV8IVpHdej1FaxGXmK1v6h_0iH9zov4Z94,3241
|
24
|
+
dapper_sqls/sqlite/__init__.py,sha256=A94N9nQArs7JIzGHn7gjFIBMVxFW9rnwk-y29fL7_6k,183
|
25
|
+
dapper_sqls/sqlite/async_local_database.py,sha256=cGdGeUjvtWHktkbTCQGbgCNzRdPV6EtDRnwp5x1OtyI,4450
|
26
|
+
dapper_sqls/sqlite/installer.py,sha256=XlXyvNEa59Qr1Kb4bw-7JXXKZBPOuHe7HSVr1KPDYg4,4560
|
27
|
+
dapper_sqls/sqlite/local_database.py,sha256=pNccTu8s34aPVVRQd8NRdDsY4yAszmF51kh9bSUY7f8,4190
|
28
|
+
dapper_sqls/sqlite/models.py,sha256=rZ2R4I3EhrgAxrQwj0vUZkLsjhjHcpCLCmPMBZM19hw,1403
|
29
|
+
dapper_sqls/sqlite/utils.py,sha256=ITuUrp8chmHoVWlP1Ro_EGrWagswuYjspSwmSfWBSrw,226
|
30
|
+
dapper_sqls-1.1.3.dist-info/METADATA,sha256=J8Fafn2-NCfnwxtTAitVOKOMfYxv79TMHZs0AeRAgEE,290
|
31
|
+
dapper_sqls-1.1.3.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
32
|
+
dapper_sqls-1.1.3.dist-info/top_level.txt,sha256=Pe1YqCPngnYbSVdhJyDrdFWHFCOqBvFW8WK7kTaIax4,12
|
33
|
+
dapper_sqls-1.1.3.dist-info/RECORD,,
|
@@ -1,16 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.1
|
2
|
-
Name: dapper-sqls
|
3
|
-
Version: 0.9.5
|
4
|
-
Summary: UNKNOWN
|
5
|
-
Home-page: UNKNOWN
|
6
|
-
Author: Samuel Semedo
|
7
|
-
License: UNKNOWN
|
8
|
-
Platform: UNKNOWN
|
9
|
-
Requires-Dist: annotated-types ==0.7.0
|
10
|
-
Requires-Dist: pydantic ==2.10.2
|
11
|
-
Requires-Dist: pydantic-core ==2.27.1
|
12
|
-
Requires-Dist: pyodbc ==5.2.0
|
13
|
-
Requires-Dist: typing-extensions ==4.12.2
|
14
|
-
|
15
|
-
UNKNOWN
|
16
|
-
|
@@ -1,30 +0,0 @@
|
|
1
|
-
dapper_sqls/__init__.py,sha256=KzUmxAhkQiQIu7Oa1yRtvRhCbsxg29aNa-MszPL6PrY,207
|
2
|
-
dapper_sqls/_types.py,sha256=GM_qDsjKGRSFC0NsU5hkqLKH-ydVKJ-1TgEkxQzi4Hw,181
|
3
|
-
dapper_sqls/config.py,sha256=z370PREmTu8dpZV4Fm6ZyWrQBDJeb0xMr28I-ZCGRIo,4633
|
4
|
-
dapper_sqls/decorators.py,sha256=5mrabdIv4yZpwmN1XDjAhDXIU8ruoxww6b9X7fmRtE8,2655
|
5
|
-
dapper_sqls/utils.py,sha256=4XCK-5Ikf4bJ18EewhK2UCDR_xi3TMYpFScJJfYjulI,3460
|
6
|
-
dapper_sqls/async_dapper/__init__.py,sha256=lBXRyXMCaiwgcK5TCw5rg-niwFS4GcZVW5Q2enKcS-0,47
|
7
|
-
dapper_sqls/async_dapper/async_dapper.py,sha256=fcJbscwJHCS4_cZJKwfzTJPkEaVBnSc7p8MpPEqD3-0,2462
|
8
|
-
dapper_sqls/async_dapper/async_executors.py,sha256=djBDfpz3Dg2sx7Ct29khmbeRtwrEgQb_v-HOTUFGPh4,13164
|
9
|
-
dapper_sqls/builders/__init__.py,sha256=o_dGrHF09NOj3KFLpkpfaRzJMgYcddpQy-QTOJJTLPA,153
|
10
|
-
dapper_sqls/builders/query.py,sha256=Z6ShajmCDjIcvVv34LuLd2_yXShDRERH84HatiacBtc,3586
|
11
|
-
dapper_sqls/builders/stored.py,sha256=C5TvscEwfXRIeOt-ysry9rn8DS0GoH-eTdnhIGpprP8,2644
|
12
|
-
dapper_sqls/builders/stp.py,sha256=wYhhO-tlfH2izUyXhMn9sipy_8_IbKwcqkm0SvkdJfo,4739
|
13
|
-
dapper_sqls/builders/model/__init__.py,sha256=9cAgoo-zu82YhtsmePseZhfeX94UMwfYuP9j-3d597Q,41
|
14
|
-
dapper_sqls/builders/model/model.py,sha256=2fCpEH_2DJt90NGQBM_tgOy0Xkj99AMmNFpUdCwqb2A,17058
|
15
|
-
dapper_sqls/builders/model/utils.py,sha256=oVSRCUWggC6Arhy_jYzds20apk1TfyQKJnmdZdalYcA,17264
|
16
|
-
dapper_sqls/dapper/__init__.py,sha256=AlQJ-QYMqKeerSQBM8Dc9_VQwUKCN4bl4ThKHsTlYms,38
|
17
|
-
dapper_sqls/dapper/dapper.py,sha256=iw2UitEDcQcnw4-EuRhEe5ZpQxgLKkWHliQGlGv_qFI,2592
|
18
|
-
dapper_sqls/dapper/executors.py,sha256=nkR8h051nKheWjWwfe8O9yIK4sg9Q9IBX-l2lYQzLlQ,12768
|
19
|
-
dapper_sqls/models/__init__.py,sha256=z1kR1XLmwxQTMctvsbVR8lcXNafQXxkEP0mdbKzB31s,175
|
20
|
-
dapper_sqls/models/base.py,sha256=ddBgBPTVA2_cPNZ4QrYNXqcpsQ8m4Ipy7TWGoHxYF4c,766
|
21
|
-
dapper_sqls/models/connection.py,sha256=9z3GZW7vhk4w7m7jhey3hblTCh3wrKS90VffHkouu6Y,1752
|
22
|
-
dapper_sqls/models/http.py,sha256=rCmf4Mj1bA_oc0k0vDxi2v1Cqd4oXDDU83P_6kNwTuA,356
|
23
|
-
dapper_sqls/models/result.py,sha256=4yCJmQEy87gPOi6sUdrXD3xtSWAbXeAX_uR2Ou6EYpg,3249
|
24
|
-
dapper_sqls/sqlite/__init__.py,sha256=VifBg1-pubsrYs5jm3ZsgtIqM5JzeNDS2tdInAKvjwM,49
|
25
|
-
dapper_sqls/sqlite/local_database.py,sha256=PpIokP09sfIT8JbwRVi0OxegrubDwOVXvajO4lU1U4g,9873
|
26
|
-
dapper_sqls/sqlite/models.py,sha256=QwV771SexBq5j92Ls9bgcuR1ucDx9ARazj9JkhMTSB0,712
|
27
|
-
dapper_sqls-0.9.5.dist-info/METADATA,sha256=XvXm_sgs_txo7DpqiXjXYLgt2OMlvpbmeCOBzEg5ELs,356
|
28
|
-
dapper_sqls-0.9.5.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
29
|
-
dapper_sqls-0.9.5.dist-info/top_level.txt,sha256=Pe1YqCPngnYbSVdhJyDrdFWHFCOqBvFW8WK7kTaIax4,12
|
30
|
-
dapper_sqls-0.9.5.dist-info/RECORD,,
|
File without changes
|