deepfos 1.1.60__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfos/__init__.py +6 -0
- deepfos/_version.py +21 -0
- deepfos/algo/__init__.py +0 -0
- deepfos/algo/graph.py +171 -0
- deepfos/algo/segtree.py +31 -0
- deepfos/api/V1_1/__init__.py +0 -0
- deepfos/api/V1_1/business_model.py +119 -0
- deepfos/api/V1_1/dimension.py +599 -0
- deepfos/api/V1_1/models/__init__.py +0 -0
- deepfos/api/V1_1/models/business_model.py +1033 -0
- deepfos/api/V1_1/models/dimension.py +2768 -0
- deepfos/api/V1_2/__init__.py +0 -0
- deepfos/api/V1_2/dimension.py +285 -0
- deepfos/api/V1_2/models/__init__.py +0 -0
- deepfos/api/V1_2/models/dimension.py +2923 -0
- deepfos/api/__init__.py +0 -0
- deepfos/api/account.py +167 -0
- deepfos/api/accounting_engines.py +147 -0
- deepfos/api/app.py +626 -0
- deepfos/api/approval_process.py +198 -0
- deepfos/api/base.py +983 -0
- deepfos/api/business_model.py +160 -0
- deepfos/api/consolidation.py +129 -0
- deepfos/api/consolidation_process.py +106 -0
- deepfos/api/datatable.py +341 -0
- deepfos/api/deep_pipeline.py +61 -0
- deepfos/api/deepconnector.py +36 -0
- deepfos/api/deepfos_task.py +92 -0
- deepfos/api/deepmodel.py +188 -0
- deepfos/api/dimension.py +486 -0
- deepfos/api/financial_model.py +319 -0
- deepfos/api/journal_model.py +119 -0
- deepfos/api/journal_template.py +132 -0
- deepfos/api/memory_financial_model.py +98 -0
- deepfos/api/models/__init__.py +3 -0
- deepfos/api/models/account.py +483 -0
- deepfos/api/models/accounting_engines.py +756 -0
- deepfos/api/models/app.py +1338 -0
- deepfos/api/models/approval_process.py +1043 -0
- deepfos/api/models/base.py +234 -0
- deepfos/api/models/business_model.py +805 -0
- deepfos/api/models/consolidation.py +711 -0
- deepfos/api/models/consolidation_process.py +248 -0
- deepfos/api/models/datatable_mysql.py +427 -0
- deepfos/api/models/deep_pipeline.py +55 -0
- deepfos/api/models/deepconnector.py +28 -0
- deepfos/api/models/deepfos_task.py +386 -0
- deepfos/api/models/deepmodel.py +308 -0
- deepfos/api/models/dimension.py +1576 -0
- deepfos/api/models/financial_model.py +1796 -0
- deepfos/api/models/journal_model.py +341 -0
- deepfos/api/models/journal_template.py +854 -0
- deepfos/api/models/memory_financial_model.py +478 -0
- deepfos/api/models/platform.py +178 -0
- deepfos/api/models/python.py +221 -0
- deepfos/api/models/reconciliation_engine.py +411 -0
- deepfos/api/models/reconciliation_report.py +161 -0
- deepfos/api/models/role_strategy.py +884 -0
- deepfos/api/models/smartlist.py +237 -0
- deepfos/api/models/space.py +1137 -0
- deepfos/api/models/system.py +1065 -0
- deepfos/api/models/variable.py +463 -0
- deepfos/api/models/workflow.py +946 -0
- deepfos/api/platform.py +199 -0
- deepfos/api/python.py +90 -0
- deepfos/api/reconciliation_engine.py +181 -0
- deepfos/api/reconciliation_report.py +64 -0
- deepfos/api/role_strategy.py +234 -0
- deepfos/api/smartlist.py +69 -0
- deepfos/api/space.py +582 -0
- deepfos/api/system.py +372 -0
- deepfos/api/variable.py +154 -0
- deepfos/api/workflow.py +264 -0
- deepfos/boost/__init__.py +6 -0
- deepfos/boost/py_jstream.py +89 -0
- deepfos/boost/py_pandas.py +20 -0
- deepfos/cache.py +121 -0
- deepfos/config.py +6 -0
- deepfos/core/__init__.py +27 -0
- deepfos/core/cube/__init__.py +10 -0
- deepfos/core/cube/_base.py +462 -0
- deepfos/core/cube/constants.py +21 -0
- deepfos/core/cube/cube.py +408 -0
- deepfos/core/cube/formula.py +707 -0
- deepfos/core/cube/syscube.py +532 -0
- deepfos/core/cube/typing.py +7 -0
- deepfos/core/cube/utils.py +238 -0
- deepfos/core/dimension/__init__.py +11 -0
- deepfos/core/dimension/_base.py +506 -0
- deepfos/core/dimension/dimcreator.py +184 -0
- deepfos/core/dimension/dimension.py +472 -0
- deepfos/core/dimension/dimexpr.py +271 -0
- deepfos/core/dimension/dimmember.py +155 -0
- deepfos/core/dimension/eledimension.py +22 -0
- deepfos/core/dimension/filters.py +99 -0
- deepfos/core/dimension/sysdimension.py +168 -0
- deepfos/core/logictable/__init__.py +5 -0
- deepfos/core/logictable/_cache.py +141 -0
- deepfos/core/logictable/_operator.py +663 -0
- deepfos/core/logictable/nodemixin.py +673 -0
- deepfos/core/logictable/sqlcondition.py +609 -0
- deepfos/core/logictable/tablemodel.py +497 -0
- deepfos/db/__init__.py +36 -0
- deepfos/db/cipher.py +660 -0
- deepfos/db/clickhouse.py +191 -0
- deepfos/db/connector.py +195 -0
- deepfos/db/daclickhouse.py +171 -0
- deepfos/db/dameng.py +101 -0
- deepfos/db/damysql.py +189 -0
- deepfos/db/dbkits.py +358 -0
- deepfos/db/deepengine.py +99 -0
- deepfos/db/deepmodel.py +82 -0
- deepfos/db/deepmodel_kingbase.py +83 -0
- deepfos/db/edb.py +214 -0
- deepfos/db/gauss.py +83 -0
- deepfos/db/kingbase.py +83 -0
- deepfos/db/mysql.py +184 -0
- deepfos/db/oracle.py +131 -0
- deepfos/db/postgresql.py +192 -0
- deepfos/db/sqlserver.py +99 -0
- deepfos/db/utils.py +135 -0
- deepfos/element/__init__.py +89 -0
- deepfos/element/accounting.py +348 -0
- deepfos/element/apvlprocess.py +215 -0
- deepfos/element/base.py +398 -0
- deepfos/element/bizmodel.py +1269 -0
- deepfos/element/datatable.py +2467 -0
- deepfos/element/deep_pipeline.py +186 -0
- deepfos/element/deepconnector.py +59 -0
- deepfos/element/deepmodel.py +1806 -0
- deepfos/element/dimension.py +1254 -0
- deepfos/element/fact_table.py +427 -0
- deepfos/element/finmodel.py +1485 -0
- deepfos/element/journal.py +840 -0
- deepfos/element/journal_template.py +943 -0
- deepfos/element/pyscript.py +412 -0
- deepfos/element/reconciliation.py +553 -0
- deepfos/element/rolestrategy.py +243 -0
- deepfos/element/smartlist.py +457 -0
- deepfos/element/variable.py +756 -0
- deepfos/element/workflow.py +560 -0
- deepfos/exceptions/__init__.py +239 -0
- deepfos/exceptions/hook.py +86 -0
- deepfos/lazy.py +104 -0
- deepfos/lazy_import.py +84 -0
- deepfos/lib/__init__.py +0 -0
- deepfos/lib/_javaobj.py +366 -0
- deepfos/lib/asynchronous.py +879 -0
- deepfos/lib/concurrency.py +107 -0
- deepfos/lib/constant.py +39 -0
- deepfos/lib/decorator.py +310 -0
- deepfos/lib/deepchart.py +778 -0
- deepfos/lib/deepux.py +477 -0
- deepfos/lib/discovery.py +273 -0
- deepfos/lib/edb_lexer.py +789 -0
- deepfos/lib/eureka.py +156 -0
- deepfos/lib/filterparser.py +751 -0
- deepfos/lib/httpcli.py +106 -0
- deepfos/lib/jsonstreamer.py +80 -0
- deepfos/lib/msg.py +394 -0
- deepfos/lib/nacos.py +225 -0
- deepfos/lib/patch.py +92 -0
- deepfos/lib/redis.py +241 -0
- deepfos/lib/serutils.py +181 -0
- deepfos/lib/stopwatch.py +99 -0
- deepfos/lib/subtask.py +572 -0
- deepfos/lib/sysutils.py +703 -0
- deepfos/lib/utils.py +1003 -0
- deepfos/local.py +160 -0
- deepfos/options.py +670 -0
- deepfos/translation.py +237 -0
- deepfos-1.1.60.dist-info/METADATA +33 -0
- deepfos-1.1.60.dist-info/RECORD +175 -0
- deepfos-1.1.60.dist-info/WHEEL +5 -0
- deepfos-1.1.60.dist-info/top_level.txt +1 -0
deepfos/db/mysql.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
"""数据库连接类"""
|
|
2
|
+
import asyncio
|
|
3
|
+
|
|
4
|
+
from cachetools import TTLCache
|
|
5
|
+
|
|
6
|
+
from deepfos.lib.decorator import cached_property
|
|
7
|
+
from typing import Union, List, Iterable, TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
import pandas as pd
|
|
10
|
+
|
|
11
|
+
from deepfos.api.datatable import MySQLAPI
|
|
12
|
+
from deepfos.lib.utils import cachedclass
|
|
13
|
+
from deepfos.lib.decorator import singleton
|
|
14
|
+
from deepfos.cache import Manager
|
|
15
|
+
from deepfos.options import OPTION
|
|
16
|
+
from .dbkits import BaseSqlParser, SyncMeta, DataframeSQLConvertor, T_DataInfo, escape_mysql_string
|
|
17
|
+
from .connector import MySQLAPIConnector, MySQLDirectAccess
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
'MySQLClient',
|
|
22
|
+
'AsyncMySQLClient',
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@singleton
|
|
27
|
+
class SqlParser(BaseSqlParser):
|
|
28
|
+
api_cls = MySQLAPI
|
|
29
|
+
|
|
30
|
+
@cached_property
|
|
31
|
+
def datatable_cls(self):
|
|
32
|
+
from deepfos.element.datatable import AsyncDataTableMySQL
|
|
33
|
+
return AsyncDataTableMySQL
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class MySQLConvertor(DataframeSQLConvertor):
|
|
37
|
+
escape_string = escape_mysql_string
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# -----------------------------------------------------------------------------
|
|
41
|
+
# core
|
|
42
|
+
class _AbsAsyncMySQLClient:
|
|
43
|
+
connector_cls = None
|
|
44
|
+
convertor = MySQLConvertor(quote_char='`')
|
|
45
|
+
|
|
46
|
+
def __init__(self, version: Union[float, str] = None):
|
|
47
|
+
self.parser = SqlParser()
|
|
48
|
+
self.connector = self.connector_cls(version)
|
|
49
|
+
|
|
50
|
+
async def exec_sqls(
|
|
51
|
+
self,
|
|
52
|
+
sqls: Union[str, Iterable[str]],
|
|
53
|
+
table_info: T_DataInfo = None
|
|
54
|
+
):
|
|
55
|
+
"""以事务执行多句sql
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
sqls: 要执行的sql语句,str或多句sql的list
|
|
59
|
+
table_info: sql中表名占位符对应的数据表元素信息。
|
|
60
|
+
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
if isinstance(sqls, str):
|
|
64
|
+
sqls = [sqls]
|
|
65
|
+
|
|
66
|
+
parsed_sql = await self.parser.parse(sqls, table_info)
|
|
67
|
+
resp = await self.connector.trxn_execute(parsed_sql)
|
|
68
|
+
return resp
|
|
69
|
+
|
|
70
|
+
async def query_dfs(
|
|
71
|
+
self,
|
|
72
|
+
sqls: Union[str, Iterable[str]],
|
|
73
|
+
table_info: T_DataInfo = None
|
|
74
|
+
) -> Union[pd.DataFrame, List[pd.DataFrame]]:
|
|
75
|
+
"""执行sql查询语句
|
|
76
|
+
|
|
77
|
+
获取DataFrame格式的二维表
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
sqls: 执行的sql,表名可以通过${table_name}占位
|
|
81
|
+
table_info: sql中表名占位符对应的数据表元素信息。
|
|
82
|
+
|
|
83
|
+
Notes:
|
|
84
|
+
如果执行的sql中没有任何表名占位符,sql将直接执行。
|
|
85
|
+
如果有占位符, 例如 ``${table1}``,那么要求 ``table_info`` 有
|
|
86
|
+
key值为 ``table1`` , 对应键值为包含
|
|
87
|
+
``elementName, elementType, folderId/path`` 的字典,
|
|
88
|
+
或者 :class:`DataTableMySQL` 类型,或者 :class:`ElementModel` 类型
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
:class:`DataFrame` 格式的二维数据表
|
|
92
|
+
|
|
93
|
+
"""
|
|
94
|
+
if isinstance(sqls, str):
|
|
95
|
+
sql_list = await self.parser.parse([sqls], table_info)
|
|
96
|
+
return await self.connector.query_dataframe(sql_list[0])
|
|
97
|
+
else:
|
|
98
|
+
sqls = await self.parser.parse(sqls, table_info)
|
|
99
|
+
dfs = await asyncio.gather(
|
|
100
|
+
*(self.connector.query_dataframe(sql) for sql in sqls)
|
|
101
|
+
)
|
|
102
|
+
return list(dfs)
|
|
103
|
+
|
|
104
|
+
async def insert_df(
|
|
105
|
+
self,
|
|
106
|
+
dataframe: pd.DataFrame,
|
|
107
|
+
element_name: str = None,
|
|
108
|
+
table_name: str = None,
|
|
109
|
+
updatecol: Iterable[str] = None,
|
|
110
|
+
table_info: T_DataInfo = None,
|
|
111
|
+
chunksize: int = None,
|
|
112
|
+
):
|
|
113
|
+
"""将 :class:`DataFrame` 的插入数据表
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
dataframe: 入库数据
|
|
117
|
+
element_name: 数据表元素名
|
|
118
|
+
table_name: 数据表的 **实际表名**
|
|
119
|
+
updatecol: 更新的列 (用于INSERT INTO ON DUPLICATE)
|
|
120
|
+
table_info: 数据表元素信息,使用table
|
|
121
|
+
chunksize: 单次插库的数据行数
|
|
122
|
+
|
|
123
|
+
"""
|
|
124
|
+
if table_name is not None:
|
|
125
|
+
tbl_name = table_name
|
|
126
|
+
elif element_name is not None:
|
|
127
|
+
tbl_name = (await self.parser.parse(["${%s}" % element_name], table_info))[0]
|
|
128
|
+
else:
|
|
129
|
+
raise ValueError("Either 'element_name' or 'table_name' must be presented.")
|
|
130
|
+
|
|
131
|
+
sqls = list(self.convertor.iter_sql(
|
|
132
|
+
dataframe, tbl_name, updatecol=updatecol, chunksize=chunksize
|
|
133
|
+
))
|
|
134
|
+
return await self.connector.trxn_execute(sqls)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class _AbsMySQLClient(_AbsAsyncMySQLClient, metaclass=SyncMeta):
|
|
138
|
+
synchronize = (
|
|
139
|
+
'exec_sqls',
|
|
140
|
+
'query_dfs',
|
|
141
|
+
'insert_df',
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
145
|
+
def exec_sqls(
|
|
146
|
+
self,
|
|
147
|
+
sqls: Union[str, Iterable[str]],
|
|
148
|
+
table_info: T_DataInfo = None
|
|
149
|
+
):
|
|
150
|
+
...
|
|
151
|
+
|
|
152
|
+
def query_dfs(
|
|
153
|
+
self,
|
|
154
|
+
sqls: Union[str, Iterable[str]],
|
|
155
|
+
table_info: T_DataInfo = None
|
|
156
|
+
) -> Union[pd.DataFrame, List[pd.DataFrame]]:
|
|
157
|
+
...
|
|
158
|
+
|
|
159
|
+
def insert_df(
|
|
160
|
+
self,
|
|
161
|
+
dataframe: pd.DataFrame,
|
|
162
|
+
element_name: str = None,
|
|
163
|
+
table_name: str = None,
|
|
164
|
+
updatecol: Iterable[str] = None,
|
|
165
|
+
table_info: T_DataInfo = None,
|
|
166
|
+
chunksize: int = None,
|
|
167
|
+
):
|
|
168
|
+
...
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
if OPTION.general.db_direct_access:
|
|
172
|
+
CONN_CLS = MySQLDirectAccess
|
|
173
|
+
else:
|
|
174
|
+
CONN_CLS = MySQLAPIConnector
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
@cachedclass(Manager.create_cache(TTLCache, maxsize=5, ttl=3600))
|
|
178
|
+
class AsyncMySQLClient(_AbsAsyncMySQLClient):
|
|
179
|
+
connector_cls = CONN_CLS
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
@cachedclass(Manager.create_cache(TTLCache, maxsize=5, ttl=3600))
|
|
183
|
+
class MySQLClient(_AbsMySQLClient):
|
|
184
|
+
connector_cls = CONN_CLS
|
deepfos/db/oracle.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"""Oracle客户端"""
|
|
2
|
+
import asyncio
|
|
3
|
+
from deepfos.lib.decorator import cached_property
|
|
4
|
+
from typing import Union, List, Iterable, TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
import pandas as pd
|
|
7
|
+
|
|
8
|
+
from deepfos.api.datatable import OracleAPI
|
|
9
|
+
from deepfos.cache import Manager, SpaceSeperatedTTLCache
|
|
10
|
+
from deepfos.lib.utils import cachedclass
|
|
11
|
+
from deepfos.lib.decorator import singleton
|
|
12
|
+
from .dbkits import BaseSqlParser, SyncMeta, T_DataInfo, DataframeSQLConvertor
|
|
13
|
+
from .connector import OracleAPIConnector
|
|
14
|
+
from .mysql import _AbsAsyncMySQLClient # noqa
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
'OracleClient',
|
|
19
|
+
'AsyncOracleClient',
|
|
20
|
+
'OracleDFSQLConvertor'
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class OracleDFSQLConvertor(DataframeSQLConvertor):
|
|
25
|
+
def build_sql(
|
|
26
|
+
self,
|
|
27
|
+
columns: str,
|
|
28
|
+
values_in_line: Iterable[str],
|
|
29
|
+
tablename: str,
|
|
30
|
+
updatecol: Iterable[str] = None,
|
|
31
|
+
**opts
|
|
32
|
+
):
|
|
33
|
+
if updatecol is not None:
|
|
34
|
+
raise NotImplementedError("`updatecol` is not yet implemented for OracleDB.")
|
|
35
|
+
|
|
36
|
+
inserts = '\n'.join(
|
|
37
|
+
f'INTO {self.quote_char}{tablename.upper()}{self.quote_char} ({columns}) VALUES {value}'
|
|
38
|
+
for value in values_in_line
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
return f'INSERT ALL {inserts} SELECT 1 FROM DUAL'
|
|
42
|
+
|
|
43
|
+
def build_column_string(self, columns):
|
|
44
|
+
return ','.join(columns.map(lambda x: f'"{x.upper()}"'))
|
|
45
|
+
|
|
46
|
+
@staticmethod
|
|
47
|
+
def format_datetime(maybe_datetime):
|
|
48
|
+
return f'TO_DATE(\'' + \
|
|
49
|
+
maybe_datetime.dt.strftime("%Y-%m-%d %H:%M:%S") + \
|
|
50
|
+
'\', \'YYYY-MM-DD HH24:MI:SS\')'
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@singleton
|
|
54
|
+
class SqlParser(BaseSqlParser):
|
|
55
|
+
api_cls = OracleAPI
|
|
56
|
+
|
|
57
|
+
@cached_property
|
|
58
|
+
def datatable_cls(self):
|
|
59
|
+
from deepfos.element.datatable import AsyncDataTableOracle
|
|
60
|
+
return AsyncDataTableOracle
|
|
61
|
+
|
|
62
|
+
@staticmethod
|
|
63
|
+
async def query_table_names(api: OracleAPI, query_table):
|
|
64
|
+
async def query_single(tbl_ele):
|
|
65
|
+
return tbl_ele.elementName, await api.dml.get_tablename(tbl_ele)
|
|
66
|
+
|
|
67
|
+
tablenames = await asyncio.gather(*(
|
|
68
|
+
query_single(table)
|
|
69
|
+
for table in query_table
|
|
70
|
+
))
|
|
71
|
+
|
|
72
|
+
if len(tablenames) != len(query_table):
|
|
73
|
+
missing = set(t.elementName for t in query_table).difference(
|
|
74
|
+
set(t[0] for t in tablenames))
|
|
75
|
+
|
|
76
|
+
raise ValueError(f"Cannot resolve actual table names for element: {missing}")
|
|
77
|
+
return dict(tablenames)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
# -----------------------------------------------------------------------------
|
|
81
|
+
# core
|
|
82
|
+
class _AsyncOracleClient(_AbsAsyncMySQLClient):
|
|
83
|
+
convertor = OracleDFSQLConvertor(quote_char='"')
|
|
84
|
+
|
|
85
|
+
def __init__(self, version: Union[float, str] = None): # noqa
|
|
86
|
+
self.parser = SqlParser()
|
|
87
|
+
self.connector = OracleAPIConnector(version)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class _OracleClient(_AsyncOracleClient, metaclass=SyncMeta):
|
|
91
|
+
synchronize = (
|
|
92
|
+
'exec_sqls',
|
|
93
|
+
'query_dfs',
|
|
94
|
+
'insert_df',
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
98
|
+
def exec_sqls(
|
|
99
|
+
self,
|
|
100
|
+
sqls: Union[str, Iterable[str]],
|
|
101
|
+
table_info: T_DataInfo = None
|
|
102
|
+
):
|
|
103
|
+
...
|
|
104
|
+
|
|
105
|
+
def query_dfs(
|
|
106
|
+
self,
|
|
107
|
+
sqls: Union[str, Iterable[str]],
|
|
108
|
+
table_info: T_DataInfo = None
|
|
109
|
+
) -> Union[pd.DataFrame, List[pd.DataFrame]]:
|
|
110
|
+
...
|
|
111
|
+
|
|
112
|
+
def insert_df(
|
|
113
|
+
self,
|
|
114
|
+
dataframe: pd.DataFrame,
|
|
115
|
+
element_name: str = None,
|
|
116
|
+
table_name: str = None,
|
|
117
|
+
updatecol: Iterable[str] = None,
|
|
118
|
+
table_info: T_DataInfo = None,
|
|
119
|
+
chunksize: int = None,
|
|
120
|
+
):
|
|
121
|
+
...
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@cachedclass(Manager.create_cache(SpaceSeperatedTTLCache, maxsize=5, ttl=3600))
|
|
125
|
+
class AsyncOracleClient(_AsyncOracleClient):
|
|
126
|
+
pass
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@cachedclass(Manager.create_cache(SpaceSeperatedTTLCache, maxsize=5, ttl=3600))
|
|
130
|
+
class OracleClient(_OracleClient):
|
|
131
|
+
pass
|
deepfos/db/postgresql.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
"""PostgreSQL客户端"""
|
|
2
|
+
from functools import cached_property
|
|
3
|
+
from typing import Union, List, Iterable, TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
|
|
7
|
+
from deepfos.api.datatable import PostgreSQLAPI
|
|
8
|
+
from deepfos.cache import Manager, SpaceSeperatedTTLCache
|
|
9
|
+
from deepfos.lib.utils import cachedclass
|
|
10
|
+
from deepfos.lib.decorator import singleton
|
|
11
|
+
from .dbkits import BaseSqlParser, SyncMeta, T_DataInfo, DataframeSQLConvertor, escape_pg_string
|
|
12
|
+
from .connector import PostgreSQLAPIConnector
|
|
13
|
+
from .mysql import _AbsAsyncMySQLClient # noqa
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
'PostgreSQLClient',
|
|
18
|
+
'AsyncPostgreSQLClient',
|
|
19
|
+
'_AsyncPostgreSQLClient',
|
|
20
|
+
'PostgreSQLConvertor',
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@singleton
|
|
25
|
+
class SqlParser(BaseSqlParser):
|
|
26
|
+
api_cls = PostgreSQLAPI
|
|
27
|
+
|
|
28
|
+
@cached_property
|
|
29
|
+
def datatable_cls(self):
|
|
30
|
+
from deepfos.element.datatable import AsyncDataTablePostgreSQL
|
|
31
|
+
return AsyncDataTablePostgreSQL
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class PostgreSQLConvertor(DataframeSQLConvertor):
|
|
35
|
+
escape_string = escape_pg_string
|
|
36
|
+
|
|
37
|
+
def iter_sql(
|
|
38
|
+
self,
|
|
39
|
+
dataframe: pd.DataFrame,
|
|
40
|
+
tablename: str,
|
|
41
|
+
updatecol: Iterable = None,
|
|
42
|
+
chunksize: int = None,
|
|
43
|
+
conflict_target: Iterable[str] = None,
|
|
44
|
+
**opts
|
|
45
|
+
) -> Iterable[str]:
|
|
46
|
+
""" :class:`DataFrame` 对象转换为sql生成器
|
|
47
|
+
|
|
48
|
+
如果传了updatecol,将使用 ``INSERT INTO ON CONFLICT`` 语法
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
dataframe: 待插入数据
|
|
52
|
+
tablename: 数据库表名
|
|
53
|
+
updatecol: 更新的列
|
|
54
|
+
chunksize: 单条sql对应的最大dataframe行数
|
|
55
|
+
conflict_target: 使用INSERT INTO ON CONFLICT语法时的conflict基准列信息
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
sql语句生成器
|
|
59
|
+
|
|
60
|
+
See Also:
|
|
61
|
+
:func:`df_to_sql`
|
|
62
|
+
|
|
63
|
+
"""
|
|
64
|
+
# 获取sql
|
|
65
|
+
return super().iter_sql(dataframe, tablename, updatecol, chunksize, conflict_target=conflict_target, **opts)
|
|
66
|
+
|
|
67
|
+
def build_sql(
|
|
68
|
+
self,
|
|
69
|
+
columns: str,
|
|
70
|
+
values_in_line: Iterable[str],
|
|
71
|
+
tablename: str,
|
|
72
|
+
updatecol: Iterable[str] = None,
|
|
73
|
+
conflict_target: Iterable[str] = None,
|
|
74
|
+
**opts
|
|
75
|
+
):
|
|
76
|
+
values = ','.join(values_in_line)
|
|
77
|
+
if updatecol is None:
|
|
78
|
+
return f'INSERT INTO {self.quote_char}{tablename}{self.quote_char} ({columns}) VALUES {values}'
|
|
79
|
+
|
|
80
|
+
update_str = ','.join([
|
|
81
|
+
f"{self.quote_char}{x}{self.quote_char}="
|
|
82
|
+
f"EXCLUDED.{self.quote_char}{x}{self.quote_char}"
|
|
83
|
+
for x in updatecol
|
|
84
|
+
])
|
|
85
|
+
if not update_str:
|
|
86
|
+
return f'INSERT INTO {self.quote_char}{tablename}{self.quote_char} ({columns}) VALUES {values}'
|
|
87
|
+
|
|
88
|
+
if conflict_target is None:
|
|
89
|
+
raise ValueError('如需使用ON CONFLICT DO UPDATE语法,'
|
|
90
|
+
'需提供有唯一约束的列作为conflict_target列信息')
|
|
91
|
+
|
|
92
|
+
conflict_target_clause = ",".join([
|
|
93
|
+
f"{self.quote_char}{x}{self.quote_char}"
|
|
94
|
+
for x in conflict_target
|
|
95
|
+
])
|
|
96
|
+
|
|
97
|
+
if conflict_target_clause:
|
|
98
|
+
conflict_target_clause = f"({conflict_target_clause})"
|
|
99
|
+
|
|
100
|
+
return f'INSERT INTO {self.quote_char}{tablename}{self.quote_char} ({columns}) ' \
|
|
101
|
+
f'VALUES {values} ' \
|
|
102
|
+
f'ON CONFLICT {conflict_target_clause} ' \
|
|
103
|
+
f'DO UPDATE SET {update_str}'
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
# -----------------------------------------------------------------------------
|
|
107
|
+
# core
|
|
108
|
+
class _AsyncPostgreSQLClient(_AbsAsyncMySQLClient):
|
|
109
|
+
convertor = PostgreSQLConvertor(quote_char='"')
|
|
110
|
+
|
|
111
|
+
def __init__(self, version: Union[float, str] = None): # noqa
|
|
112
|
+
self.parser = SqlParser()
|
|
113
|
+
self.connector = PostgreSQLAPIConnector(version)
|
|
114
|
+
|
|
115
|
+
async def insert_df(
|
|
116
|
+
self,
|
|
117
|
+
dataframe: pd.DataFrame,
|
|
118
|
+
element_name: str = None,
|
|
119
|
+
table_name: str = None,
|
|
120
|
+
updatecol: Iterable[str] = None,
|
|
121
|
+
table_info: T_DataInfo = None,
|
|
122
|
+
chunksize: int = None,
|
|
123
|
+
conflict_target: Iterable[str] = None,
|
|
124
|
+
):
|
|
125
|
+
"""将 :class:`DataFrame` 的插入数据表
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
dataframe: 入库数据
|
|
129
|
+
element_name: 数据表元素名
|
|
130
|
+
table_name: 数据表的 **实际表名**
|
|
131
|
+
updatecol: 更新的列 (用于INSERT INTO ON CONFLICT)
|
|
132
|
+
table_info: 数据表元素信息,使用table
|
|
133
|
+
chunksize: 单次插库的数据行数
|
|
134
|
+
conflict_target: 使用INSERT INTO ON CONFLICT语法时的约束列信息
|
|
135
|
+
|
|
136
|
+
"""
|
|
137
|
+
if table_name is not None:
|
|
138
|
+
tbl_name = table_name
|
|
139
|
+
elif element_name is not None:
|
|
140
|
+
tbl_name = (await self.parser.parse(["${%s}" % element_name], table_info))[0]
|
|
141
|
+
else:
|
|
142
|
+
raise ValueError("Either 'element_name' or 'table_name' must be presented.")
|
|
143
|
+
|
|
144
|
+
sqls = list(self.convertor.iter_sql(
|
|
145
|
+
dataframe, tbl_name, updatecol=updatecol, chunksize=chunksize, conflict_target=conflict_target
|
|
146
|
+
))
|
|
147
|
+
return await self.connector.trxn_execute(sqls)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
class _PostgreSQLClient(_AsyncPostgreSQLClient, metaclass=SyncMeta):
|
|
151
|
+
synchronize = (
|
|
152
|
+
'exec_sqls',
|
|
153
|
+
'query_dfs',
|
|
154
|
+
'insert_df',
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
158
|
+
def exec_sqls(
|
|
159
|
+
self,
|
|
160
|
+
sqls: Union[str, Iterable[str]],
|
|
161
|
+
table_info: T_DataInfo = None
|
|
162
|
+
):
|
|
163
|
+
...
|
|
164
|
+
|
|
165
|
+
def query_dfs(
|
|
166
|
+
self,
|
|
167
|
+
sqls: Union[str, Iterable[str]],
|
|
168
|
+
table_info: T_DataInfo = None
|
|
169
|
+
) -> Union[pd.DataFrame, List[pd.DataFrame]]:
|
|
170
|
+
...
|
|
171
|
+
|
|
172
|
+
def insert_df(
|
|
173
|
+
self,
|
|
174
|
+
dataframe: pd.DataFrame,
|
|
175
|
+
element_name: str = None,
|
|
176
|
+
table_name: str = None,
|
|
177
|
+
updatecol: Iterable[str] = None,
|
|
178
|
+
table_info: T_DataInfo = None,
|
|
179
|
+
chunksize: int = None,
|
|
180
|
+
conflict_target: Iterable[str] = None,
|
|
181
|
+
):
|
|
182
|
+
...
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
@cachedclass(Manager.create_cache(SpaceSeperatedTTLCache, maxsize=5, ttl=3600))
|
|
186
|
+
class AsyncPostgreSQLClient(_AsyncPostgreSQLClient):
|
|
187
|
+
pass
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@cachedclass(Manager.create_cache(SpaceSeperatedTTLCache, maxsize=5, ttl=3600))
|
|
191
|
+
class PostgreSQLClient(_PostgreSQLClient):
|
|
192
|
+
pass
|
deepfos/db/sqlserver.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""SQLServer客户端"""
|
|
2
|
+
from deepfos.lib.decorator import cached_property
|
|
3
|
+
from typing import Union, List, Iterable, TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
|
|
7
|
+
from deepfos.api.datatable import SQLServerAPI
|
|
8
|
+
from deepfos.cache import Manager, SpaceSeperatedTTLCache
|
|
9
|
+
from deepfos.lib.utils import cachedclass
|
|
10
|
+
from deepfos.lib.decorator import singleton
|
|
11
|
+
from .dbkits import BaseSqlParser, SyncMeta, T_DataInfo
|
|
12
|
+
from .connector import SQLServerAPIConnector
|
|
13
|
+
from .mysql import _AbsAsyncMySQLClient, MySQLConvertor # noqa
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
'SQLServerClient',
|
|
18
|
+
'AsyncSQLServerClient',
|
|
19
|
+
'SQLServerDFSQLConvertor',
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@singleton
|
|
24
|
+
class SqlParser(BaseSqlParser):
|
|
25
|
+
api_cls = SQLServerAPI
|
|
26
|
+
|
|
27
|
+
@cached_property
|
|
28
|
+
def datatable_cls(self):
|
|
29
|
+
from deepfos.element.datatable import AsyncDataTableSQLServer
|
|
30
|
+
return AsyncDataTableSQLServer
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class SQLServerDFSQLConvertor(MySQLConvertor):
|
|
34
|
+
def build_sql(
|
|
35
|
+
self,
|
|
36
|
+
columns: str,
|
|
37
|
+
values_in_line: Iterable[str],
|
|
38
|
+
tablename: str,
|
|
39
|
+
updatecol: Iterable[str] = None,
|
|
40
|
+
**opts
|
|
41
|
+
):
|
|
42
|
+
if updatecol is not None:
|
|
43
|
+
raise NotImplementedError("`updatecol` is not yet implemented for SQLServerDB.")
|
|
44
|
+
|
|
45
|
+
return super().build_sql(columns, values_in_line, tablename, None, **opts)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
# -----------------------------------------------------------------------------
|
|
49
|
+
# core
|
|
50
|
+
class _AsyncSQLServerClient(_AbsAsyncMySQLClient):
|
|
51
|
+
convertor = SQLServerDFSQLConvertor(quote_char="")
|
|
52
|
+
|
|
53
|
+
def __init__(self, version: Union[float, str] = None): # noqa
|
|
54
|
+
self.parser = SqlParser()
|
|
55
|
+
self.connector = SQLServerAPIConnector(version)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class _SQLServerClient(_AsyncSQLServerClient, metaclass=SyncMeta):
|
|
59
|
+
synchronize = (
|
|
60
|
+
'exec_sqls',
|
|
61
|
+
'query_dfs',
|
|
62
|
+
'insert_df',
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
66
|
+
def exec_sqls(
|
|
67
|
+
self,
|
|
68
|
+
sqls: Union[str, Iterable[str]],
|
|
69
|
+
table_info: T_DataInfo = None
|
|
70
|
+
):
|
|
71
|
+
...
|
|
72
|
+
|
|
73
|
+
def query_dfs(
|
|
74
|
+
self,
|
|
75
|
+
sqls: Union[str, Iterable[str]],
|
|
76
|
+
table_info: T_DataInfo = None
|
|
77
|
+
) -> Union[pd.DataFrame, List[pd.DataFrame]]:
|
|
78
|
+
...
|
|
79
|
+
|
|
80
|
+
def insert_df(
|
|
81
|
+
self,
|
|
82
|
+
dataframe: pd.DataFrame,
|
|
83
|
+
element_name: str = None,
|
|
84
|
+
table_name: str = None,
|
|
85
|
+
updatecol: Iterable[str] = None,
|
|
86
|
+
table_info: T_DataInfo = None,
|
|
87
|
+
chunksize: int = None,
|
|
88
|
+
):
|
|
89
|
+
...
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@cachedclass(Manager.create_cache(SpaceSeperatedTTLCache, maxsize=5, ttl=3600))
|
|
93
|
+
class AsyncSQLServerClient(_AsyncSQLServerClient):
|
|
94
|
+
pass
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@cachedclass(Manager.create_cache(SpaceSeperatedTTLCache, maxsize=5, ttl=3600))
|
|
98
|
+
class SQLServerClient(_SQLServerClient):
|
|
99
|
+
pass
|