mdbq 1.1.7__tar.gz → 1.1.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mdbq-1.1.7 → mdbq-1.1.9}/PKG-INFO +1 -1
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/aggregation/aggregation.py +24 -18
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mysql/mysql.py +12 -8
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq.egg-info/PKG-INFO +1 -1
- {mdbq-1.1.7 → mdbq-1.1.9}/setup.py +1 -1
- {mdbq-1.1.7 → mdbq-1.1.9}/README.txt +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/__version__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/aggregation/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/aggregation/df_types.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/aggregation/mysql_types.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/aggregation/optimize_data.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/aggregation/query_data.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/bdup/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/bdup/bdup.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/clean/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/clean/data_clean.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/company/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/company/copysh.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/config/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/config/get_myconf.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/config/products.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/config/set_support.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/config/update_conf.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/dataframe/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/dataframe/converter.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/log/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/log/mylogger.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mongo/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mongo/mongo.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mysql/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mysql/data_types_/345/215/263/345/260/206/345/210/240/351/231/244.py" +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mysql/s_query.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mysql/year_month_day.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/other/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/other/porxy.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/other/pov_city.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/other/ua_sj.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/pbix/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/pbix/pbix_refresh.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/pbix/refresh_all.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq/spider/__init__.py +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq.egg-info/SOURCES.txt +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq.egg-info/dependency_links.txt +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/mdbq.egg-info/top_level.txt +0 -0
- {mdbq-1.1.7 → mdbq-1.1.9}/setup.cfg +0 -0
@@ -917,8 +917,8 @@ def upload(path, db_name, collection_name):
|
|
917
917
|
dtypes = {k: dtypes[k] for k in intersection_keys} # 使用交集的键创建新字典
|
918
918
|
df = df.astype(dtypes)
|
919
919
|
|
920
|
-
|
921
|
-
m.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, filename=name, count=f'{i}/{count}')
|
920
|
+
d.df_to_mongo(df=df, db_name=db_name, collection_name=collection_name)
|
921
|
+
m.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, drop_dup=False, filename=name, count=f'{i}/{count}')
|
922
922
|
# nas.df_to_mysql(df=df, db_name=db_name, table_name=collection_name)
|
923
923
|
except Exception as e:
|
924
924
|
print(name, e)
|
@@ -935,7 +935,7 @@ def one_file_to_mysql(file, db_name, table_name, target_service, database):
|
|
935
935
|
m.df_to_mysql(df=df, db_name=db_name, table_name=table_name, filename=filename)
|
936
936
|
|
937
937
|
|
938
|
-
def file_dir():
|
938
|
+
def file_dir(one_file=True):
|
939
939
|
"""
|
940
940
|
按照文件记录对照表
|
941
941
|
批量上传数据库
|
@@ -954,20 +954,26 @@ def file_dir():
|
|
954
954
|
if platform.system() == 'Windows':
|
955
955
|
sub_path = sub_path.replace('/', '\\')
|
956
956
|
# print(os.path.join(path, sub_path), db_name, table_name)
|
957
|
-
# 从每个文件夹中取出一个文件
|
958
|
-
|
959
|
-
|
960
|
-
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
|
965
|
-
|
966
|
-
|
967
|
-
|
968
|
-
|
969
|
-
|
970
|
-
|
957
|
+
if one_file: # 从每个文件夹中取出一个文件
|
958
|
+
real_path_list = []
|
959
|
+
for root, dirs, files in os.walk(os.path.join(path, sub_path), topdown=False):
|
960
|
+
for name in files:
|
961
|
+
if name.endswith('.csv') and 'baidu' not in name and '~' not in name:
|
962
|
+
real_path_list.append(os.path.join(root, name))
|
963
|
+
break
|
964
|
+
for real_path in real_path_list:
|
965
|
+
one_file_to_mysql(
|
966
|
+
file=real_path,
|
967
|
+
db_name=db_name,
|
968
|
+
table_name=table_name,
|
969
|
+
target_service='home_lx',
|
970
|
+
database='mysql'
|
971
|
+
)
|
972
|
+
else: # 上传全部文件夹
|
973
|
+
upload(
|
974
|
+
path=os.path.join(path, sub_path),
|
975
|
+
db_name = db_name,
|
976
|
+
collection_name = table_name,
|
971
977
|
)
|
972
978
|
data.update({'入库进度': 1}) # 更新进度
|
973
979
|
df = pd.DataFrame.from_dict(datas, orient='columns')
|
@@ -995,4 +1001,4 @@ if __name__ == '__main__':
|
|
995
1001
|
# db_name = '生意经2',
|
996
1002
|
# collection_name = 'e3_零售明细统计',
|
997
1003
|
# )
|
998
|
-
file_dir()
|
1004
|
+
file_dir(one_file=True)
|
@@ -37,7 +37,7 @@ class MysqlUpload:
|
|
37
37
|
}
|
38
38
|
self.filename = None
|
39
39
|
|
40
|
-
def df_to_mysql(self, df, table_name, db_name='远程数据源', drop_duplicates=False, filename=None, count=None):
|
40
|
+
def df_to_mysql(self, df, table_name, db_name='远程数据源', drop_dup=True, drop_duplicates=False, filename=None, count=None):
|
41
41
|
"""
|
42
42
|
将 df 写入数据库
|
43
43
|
db_name: 数据库名称
|
@@ -144,15 +144,19 @@ class MysqlUpload:
|
|
144
144
|
condition = ' AND '.join(condition) # 构建查询条件
|
145
145
|
# print(condition)
|
146
146
|
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
147
|
+
if drop_dup:
|
148
|
+
sql = f"SELECT {cols} FROM `{table_name}` WHERE {condition}"
|
149
|
+
# sql = f"SELECT {cols} FROM `{table_name}` WHERE `创建时间` = '2014-09-19 14:32:33'"
|
150
|
+
cursor.execute(sql)
|
151
|
+
result = cursor.fetchall() # 获取查询结果, 有结果返回 list 表示数据已存在(不重复插入),没有则返回空 tuple
|
152
|
+
if not result: # 数据不存在则插入
|
153
|
+
sql = f"INSERT INTO `{table_name}` ({cols}) VALUES ({values});"
|
154
|
+
cursor.execute(sql)
|
155
|
+
# else:
|
156
|
+
# print(f'重复数据不插入: {condition[:50]}...')
|
157
|
+
else:
|
152
158
|
sql = f"INSERT INTO `{table_name}` ({cols}) VALUES ({values});"
|
153
159
|
cursor.execute(sql)
|
154
|
-
# else:
|
155
|
-
# print(f'重复数据不插入: {condition[:50]}...')
|
156
160
|
except Exception as e:
|
157
161
|
# print(data)
|
158
162
|
# print(values)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{mdbq-1.1.7 → mdbq-1.1.9}/mdbq/mysql/data_types_/345/215/263/345/260/206/345/210/240/351/231/244.py"
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|