mdbq 2.7.9__py3-none-any.whl → 2.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mdbq/aggregation/aggregation.py +71 -167
- mdbq/aggregation/query_data.py +41 -3
- mdbq/spider/aikucun.py +6 -3
- {mdbq-2.7.9.dist-info → mdbq-2.8.1.dist-info}/METADATA +1 -1
- {mdbq-2.7.9.dist-info → mdbq-2.8.1.dist-info}/RECORD +7 -7
- {mdbq-2.7.9.dist-info → mdbq-2.8.1.dist-info}/WHEEL +0 -0
- {mdbq-2.7.9.dist-info → mdbq-2.8.1.dist-info}/top_level.txt +0 -0
mdbq/aggregation/aggregation.py
CHANGED
@@ -6,6 +6,7 @@ import pandas as pd
|
|
6
6
|
import numpy as np
|
7
7
|
import chardet
|
8
8
|
import zipfile
|
9
|
+
import socket
|
9
10
|
from pandas.tseries.holiday import next_monday
|
10
11
|
from pyzipper import PyZipFile
|
11
12
|
import os
|
@@ -16,6 +17,7 @@ from mdbq.mysql import mysql
|
|
16
17
|
from mdbq.aggregation import df_types
|
17
18
|
from mdbq.config import get_myconf
|
18
19
|
from mdbq.config import set_support
|
20
|
+
from mdbq.config import myconfig
|
19
21
|
from mdbq.dataframe import converter
|
20
22
|
import datetime
|
21
23
|
import time
|
@@ -25,14 +27,32 @@ import getpass
|
|
25
27
|
|
26
28
|
warnings.filterwarnings('ignore')
|
27
29
|
"""
|
30
|
+
|
31
|
+
此文件不再更新
|
32
|
+
|
33
|
+
|
28
34
|
1. DatabaseUpdate: 程序用于对爬虫下载的原始数据进行清洗并入库;
|
29
35
|
数据入库时会较检并更新本地 json 文件的 dtypes 信息;
|
30
36
|
若 json 缺失 dtypes 信息, 会按 df 类型自动转换并更新本地 json, 可以手动修改添加本地 json 信息,手动修改优先;
|
31
37
|
2. upload_dir: 函数将一个文件夹上传至数据库;
|
32
38
|
"""
|
33
39
|
|
40
|
+
username, password, host, port, service_database = None, None, None, None, None,
|
41
|
+
if socket.gethostname() in ['xigua_lx', 'xigua1', 'MacBookPro']:
|
42
|
+
conf = myconfig.main()
|
43
|
+
conf_data = conf['Windows']['xigua_lx']['mysql']['local']
|
44
|
+
username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data['port']
|
45
|
+
service_database = {'xigua_lx': 'mysql'}
|
46
|
+
elif socket.gethostname() in ['company', 'Mac2.local']:
|
47
|
+
conf = myconfig.main()
|
48
|
+
conf_data = conf['Windows']['company']['mysql']['local']
|
49
|
+
username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data['port']
|
50
|
+
service_database = {'company': 'mysql'}
|
51
|
+
if not username:
|
52
|
+
print(f'找不到主机:')
|
53
|
+
|
34
54
|
|
35
|
-
class
|
55
|
+
class DatabaseUpdateBak:
|
36
56
|
"""
|
37
57
|
清洗文件,并入库,被 tg.py 调用
|
38
58
|
"""
|
@@ -858,10 +878,6 @@ class DatabaseUpdate:
|
|
858
878
|
for service_name, database in service_database.items():
|
859
879
|
# print(service_name, database)
|
860
880
|
if database == 'mongodb':
|
861
|
-
username, password, host, port = get_myconf.select_config_values(
|
862
|
-
target_service=service_name,
|
863
|
-
database=database,
|
864
|
-
)
|
865
881
|
d = mongo.UploadMongo(
|
866
882
|
username=username,
|
867
883
|
password=password,
|
@@ -882,10 +898,6 @@ class DatabaseUpdate:
|
|
882
898
|
d.client.close()
|
883
899
|
|
884
900
|
elif database == 'mysql':
|
885
|
-
username, password, host, port = get_myconf.select_config_values(
|
886
|
-
target_service=service_name,
|
887
|
-
database=database,
|
888
|
-
)
|
889
901
|
m = mysql.MysqlUpload(
|
890
902
|
username=username,
|
891
903
|
password=password,
|
@@ -1058,10 +1070,6 @@ class DatabaseUpdate:
|
|
1058
1070
|
|
1059
1071
|
for service_database in service_databases:
|
1060
1072
|
for service_name, database in service_database.items():
|
1061
|
-
username, password, host, port = get_myconf.select_config_values(
|
1062
|
-
target_service=service_name,
|
1063
|
-
database=database,
|
1064
|
-
)
|
1065
1073
|
m = mysql.MysqlUpload(
|
1066
1074
|
username=username,
|
1067
1075
|
password=password,
|
@@ -1095,10 +1103,6 @@ class DatabaseUpdate:
|
|
1095
1103
|
df['日期'] = datetime.datetime.now().strftime('%Y-%m-%d')
|
1096
1104
|
for service_database in service_databases:
|
1097
1105
|
for service_name, database in service_database.items():
|
1098
|
-
username, password, host, port = get_myconf.select_config_values(
|
1099
|
-
target_service=service_name,
|
1100
|
-
database=database,
|
1101
|
-
)
|
1102
1106
|
m = mysql.MysqlUpload(
|
1103
1107
|
username=username,
|
1104
1108
|
password=password,
|
@@ -1117,46 +1121,13 @@ class DatabaseUpdate:
|
|
1117
1121
|
)
|
1118
1122
|
|
1119
1123
|
|
1120
|
-
def upload_dir(path, db_name, collection_name,
|
1124
|
+
def upload_dir(path, db_name, collection_name, json_path=None):
|
1121
1125
|
""" 上传一个文件夹到 mysql 或者 mongodb 数据库 """
|
1122
1126
|
if not os.path.isdir(path):
|
1123
1127
|
print(f'{os.path.splitext(os.path.basename(__file__))[0]}.upload_dir: 函数只接受文件夹路径,不是一个文件夹: {path}')
|
1124
1128
|
return
|
1125
1129
|
|
1126
|
-
|
1127
|
-
username, password, host, port = get_myconf.select_config_values(
|
1128
|
-
target_service=target_service,
|
1129
|
-
database='mongodb',
|
1130
|
-
)
|
1131
|
-
d = mongo.UploadMongo(
|
1132
|
-
username=username,
|
1133
|
-
password=password,
|
1134
|
-
host=host,
|
1135
|
-
port=port,
|
1136
|
-
drop_duplicates=False,
|
1137
|
-
)
|
1138
|
-
|
1139
|
-
if dbs['mysql']:
|
1140
|
-
username, password, host, port = get_myconf.select_config_values(
|
1141
|
-
target_service=target_service,
|
1142
|
-
database='mysql',
|
1143
|
-
)
|
1144
|
-
m = mysql.MysqlUpload(
|
1145
|
-
username=username,
|
1146
|
-
password=password,
|
1147
|
-
host=host,
|
1148
|
-
port=port,
|
1149
|
-
)
|
1150
|
-
# username, password, host, port = get_myconf.select_config_values(
|
1151
|
-
# target_service='nas',
|
1152
|
-
# database='mysql',
|
1153
|
-
# )
|
1154
|
-
# nas = mysql.MysqlUpload(
|
1155
|
-
# username=username,
|
1156
|
-
# password=password,
|
1157
|
-
# host=host,
|
1158
|
-
# port=port,
|
1159
|
-
# )
|
1130
|
+
m = mysql.MysqlUpload(username=username, password=password, host=host, port=port)
|
1160
1131
|
|
1161
1132
|
# 从本地 json 文件从读取 df 的数据类型信息
|
1162
1133
|
df_to_json = df_types.DataTypes()
|
@@ -1197,31 +1168,23 @@ def upload_dir(path, db_name, collection_name, dbs={'mysql': True, 'mongodb': Tr
|
|
1197
1168
|
dtypes = {k: dtypes[k] for k in intersection_keys} # 使用交集的键创建新字典
|
1198
1169
|
df = df.astype(dtypes) # 再次更新 df 的数据类型
|
1199
1170
|
|
1200
|
-
|
1201
|
-
|
1202
|
-
|
1203
|
-
|
1204
|
-
|
1205
|
-
|
1206
|
-
|
1207
|
-
filename=name, count=f'{i}/{count}',
|
1208
|
-
service_database={target_service: 'mysql'}, # 这个参数是用来设置更新哪台服务器的 types 信息到本地 json 文件
|
1209
|
-
)
|
1210
|
-
# nas.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, drop_duplicates=True,)
|
1171
|
+
m.df_to_mysql(df=df, db_name=db_name, table_name=collection_name,
|
1172
|
+
move_insert=False, # 先删除,再插入
|
1173
|
+
df_sql = True,
|
1174
|
+
drop_duplicates=False,
|
1175
|
+
filename=name, count=f'{i}/{count}',
|
1176
|
+
)
|
1177
|
+
# nas.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, drop_duplicates=True,)
|
1211
1178
|
except Exception as e:
|
1212
1179
|
print(name, e)
|
1213
1180
|
i += 1
|
1214
|
-
if dbs['mongodb']:
|
1215
|
-
if d.client:
|
1216
|
-
d.client.close() # 必须手动关闭数据库连接
|
1217
1181
|
|
1218
1182
|
|
1219
|
-
def one_file_to_mysql(file, db_name, table_name
|
1183
|
+
def one_file_to_mysql(file, db_name, table_name):
|
1220
1184
|
""" 上传单个文件到 mysql 数据库 file 参数是一个文件 """
|
1221
1185
|
if not os.path.isfile(file):
|
1222
1186
|
print(f'{os.path.splitext(os.path.basename(__file__))[0]}.one_file_to_mysql: 函数只接受文件, 此文件不存在: {file}')
|
1223
1187
|
return
|
1224
|
-
username, password, host, port = get_myconf.select_config_values(target_service=target_service, database=database)
|
1225
1188
|
filename = os.path.basename(file)
|
1226
1189
|
df = pd.read_csv(file, encoding='utf-8_sig', header=0, na_filter=False, float_precision='high')
|
1227
1190
|
# df.replace(to_replace=[','], value='', regex=True, inplace=True) # 替换掉特殊字符
|
@@ -1234,121 +1197,62 @@ def one_file_to_mysql(file, db_name, table_name, target_service, database):
|
|
1234
1197
|
move_insert=False,
|
1235
1198
|
df_sql=True,
|
1236
1199
|
drop_duplicates=False,
|
1237
|
-
service_database={target_service: database},
|
1238
1200
|
)
|
1239
1201
|
|
1240
1202
|
|
1241
|
-
def file_dir(one_file=True, target_service='company'):
|
1242
|
-
"""
|
1243
|
-
按照文件记录对照表上传数据
|
1244
|
-
批量上传数据库
|
1245
|
-
one_file: 值为 True 时每个文件夹取一个文件上传数据库,反之上传所有文件夹数据
|
1246
|
-
"""
|
1247
|
-
filename = '文件目录对照表.csv'
|
1248
|
-
if platform.system() == 'Windows':
|
1249
|
-
path = 'C:\\同步空间\\BaiduSyncdisk\\原始文件2'
|
1250
|
-
else:
|
1251
|
-
path = '/Users/xigua/数据中心/原始文件2'
|
1252
|
-
|
1253
|
-
support_file = set_support.SetSupport(dirname='support').dirname
|
1254
|
-
df = pd.read_csv(os.path.join(support_file, filename), encoding='utf-8_sig', header=0, na_filter=False)
|
1255
|
-
datas = df.to_dict('records') # 转字典
|
1256
|
-
for data in datas:
|
1257
|
-
# print(data)
|
1258
|
-
if data['入库进度'] == 0:
|
1259
|
-
sub_path, db_name, table_name = data['子文件夹'], data['数据库名'], data['数据表']
|
1260
|
-
if platform.system() == 'Windows':
|
1261
|
-
sub_path = sub_path.replace('/', '\\')
|
1262
|
-
# print(os.path.join(path, sub_path), db_name, table_name)
|
1263
|
-
if one_file: # 从每个文件夹中取出一个文件上传
|
1264
|
-
real_path_list = []
|
1265
|
-
for root, dirs, files in os.walk(os.path.join(path, sub_path), topdown=False):
|
1266
|
-
for name in files:
|
1267
|
-
if name.endswith('.csv') and 'baidu' not in name and '~' not in name:
|
1268
|
-
real_path_list.append(os.path.join(root, name))
|
1269
|
-
break
|
1270
|
-
for real_path in real_path_list:
|
1271
|
-
one_file_to_mysql(
|
1272
|
-
file=real_path,
|
1273
|
-
db_name=db_name,
|
1274
|
-
table_name=table_name,
|
1275
|
-
target_service=target_service,
|
1276
|
-
database='mysql'
|
1277
|
-
)
|
1278
|
-
else: # 上传全部文件夹
|
1279
|
-
upload_dir(
|
1280
|
-
path=os.path.join(path, sub_path),
|
1281
|
-
db_name = db_name,
|
1282
|
-
collection_name = table_name,
|
1283
|
-
dbs={'mysql': True, 'mongodb': False},
|
1284
|
-
target_service=target_service,
|
1285
|
-
)
|
1286
|
-
data.update({'入库进度': 1}) # 更新进度为已上传
|
1287
|
-
# 将进度信息写回文件
|
1288
|
-
df = pd.DataFrame.from_dict(datas, orient='columns')
|
1289
|
-
df.to_csv(os.path.join(support_file, filename), encoding='utf-8_sig', index=False, header=True)
|
1290
|
-
|
1291
|
-
|
1292
1203
|
def test():
|
1293
|
-
path = os.path.relpath(r'/Users/xigua
|
1204
|
+
path = os.path.relpath(r'/Users/xigua/Downloads/直播间')
|
1205
|
+
results = []
|
1294
1206
|
for root, dirs, files in os.walk(path, topdown=False):
|
1295
1207
|
for name in files:
|
1296
1208
|
if name.endswith('.csv') and 'baidu' not in name and '~' not in name:
|
1297
1209
|
# print(name)
|
1298
1210
|
# df = pd.read_excel(os.path.join(root, name), header=0)
|
1299
1211
|
df = pd.read_csv(os.path.join(root, name), encoding='utf-8_sig', header=0, na_filter=False)
|
1300
|
-
print(name)
|
1212
|
+
# print(name)
|
1301
1213
|
if len(df) == 0:
|
1302
|
-
print(name)
|
1303
|
-
os.remove(os.path.join(root, name))
|
1304
1214
|
continue
|
1305
|
-
df
|
1306
|
-
#
|
1307
|
-
#
|
1308
|
-
|
1309
|
-
|
1310
|
-
|
1311
|
-
|
1312
|
-
|
1313
|
-
|
1314
|
-
|
1315
|
-
|
1316
|
-
|
1317
|
-
|
1318
|
-
|
1319
|
-
|
1320
|
-
|
1321
|
-
|
1322
|
-
# new_name = f'{os.path.splitext(name)[0]}.xlsx'
|
1323
|
-
# df.to_excel(os.path.join(root, name),
|
1324
|
-
# index=False, header=True, engine='openpyxl', freeze_panes=(1, 0))
|
1325
|
-
# break
|
1326
|
-
# break
|
1215
|
+
results.append(df)
|
1216
|
+
# df = df[df['日期'] != '']
|
1217
|
+
# df.to_csv(os.path.join(root, name), encoding='utf-8_sig', index=False, header=True)
|
1218
|
+
df = pd.concat(results)
|
1219
|
+
|
1220
|
+
df.rename(columns={
|
1221
|
+
'订单id': '计划ID',
|
1222
|
+
'投资回报率': '投入产出比',
|
1223
|
+
'直接投资回报率': '直接成交投入产出比',
|
1224
|
+
'预售金额': '直接成交投入产出比',
|
1225
|
+
'直接投资回报率': '总预售成交金额',
|
1226
|
+
'预售订单量': '总预售成交笔数',
|
1227
|
+
# '商品点击量': '展现量',
|
1228
|
+
# '商品点击率': '观看率',
|
1229
|
+
}, inplace=True)
|
1230
|
+
|
1231
|
+
df.to_csv(os.path.join('/Users/xigua/Downloads/', f'py_xg_tg_report_超级直播报表_人群_万里马官方旗舰店_.csv'), encoding='utf-8_sig', index=False, header=True)
|
1327
1232
|
|
1328
1233
|
|
1329
1234
|
if __name__ == '__main__':
|
1330
|
-
username
|
1331
|
-
|
1332
|
-
|
1235
|
+
username = 'root'
|
1236
|
+
password = ''
|
1237
|
+
host = ''
|
1238
|
+
port = ''
|
1333
1239
|
|
1334
|
-
#
|
1335
|
-
|
1336
|
-
|
1337
|
-
|
1338
|
-
|
1339
|
-
# target_service='home_lx',
|
1340
|
-
# database='mysql'
|
1341
|
-
# )
|
1342
|
-
#
|
1343
|
-
# 上传一个目录到指定数据库
|
1344
|
-
db_name = '京东数据3'
|
1345
|
-
table_name = '京东商智_店铺来源'
|
1346
|
-
upload_dir(
|
1347
|
-
path=os.path.relpath(r'/Users/xigua/数据中心/原始文件3/京东报表/店铺来源_三级来asdasdas源'),
|
1348
|
-
db_name=db_name,
|
1349
|
-
collection_name=table_name,
|
1350
|
-
dbs={'mysql': True, 'mongodb': False},
|
1351
|
-
target_service='company',
|
1240
|
+
# 上传 1 个文件到数据库
|
1241
|
+
one_file_to_mysql(
|
1242
|
+
file=r'/Users/xi',
|
1243
|
+
db_name='推广数据2',
|
1244
|
+
table_name='超级直播',
|
1352
1245
|
)
|
1353
1246
|
|
1247
|
+
# # 上传一个目录到指定数据库
|
1248
|
+
# db_name = '推广数据2'
|
1249
|
+
# table_name = '超级直播'
|
1250
|
+
# upload_dir(
|
1251
|
+
# path=os.path.relpath(r'/Users/xigua/数据中心/原始文件3/天猫推广报表/超级直播报表_人群/2024-04'),
|
1252
|
+
# db_name=db_name,
|
1253
|
+
# collection_name=table_name,
|
1254
|
+
# )
|
1255
|
+
|
1256
|
+
|
1354
1257
|
# test()
|
1258
|
+
|
mdbq/aggregation/query_data.py
CHANGED
@@ -669,6 +669,44 @@ class MysqlDatasQuery:
|
|
669
669
|
df_tm_pxb.rename(columns={'报表类型': '营销场景', '消耗': '花费'}, inplace=True)
|
670
670
|
df_tm_pxb['营销场景'] = '品销宝'
|
671
671
|
|
672
|
+
# 因为 2024.04.16及之前的营销场景报表不含超级直播,所以在此添加
|
673
|
+
if start_date < pd.to_datetime('2024-04-17'):
|
674
|
+
projection = {
|
675
|
+
'日期': 1,
|
676
|
+
'场景名字': 1,
|
677
|
+
'花费': 1,
|
678
|
+
'展现量': 1,
|
679
|
+
'观看次数': 1,
|
680
|
+
'总购物车数': 1,
|
681
|
+
'总成交笔数': 1,
|
682
|
+
'总成交金额': 1,
|
683
|
+
'店铺名称': 1,
|
684
|
+
}
|
685
|
+
df_tm_living = self.download.data_to_df(
|
686
|
+
db_name='推广数据2',
|
687
|
+
table_name='超级直播',
|
688
|
+
start_date=start_date,
|
689
|
+
end_date=pd.to_datetime('2024-04-16'), # 只可以取此日期之前的数据
|
690
|
+
projection=projection,
|
691
|
+
)
|
692
|
+
if len(df_tm_living) > 0:
|
693
|
+
df_tm_living.rename(columns={'场景名字': '营销场景'}, inplace=True)
|
694
|
+
df_tm_living = df_tm_living.groupby(
|
695
|
+
['日期', '店铺名称', '营销场景', '花费'],
|
696
|
+
as_index=False).agg(
|
697
|
+
**{
|
698
|
+
'展现量': ('展现量', np.max),
|
699
|
+
'点击量': ('观看次数', np.max),
|
700
|
+
'加购量': ('总购物车数', np.max),
|
701
|
+
'成交笔数': ('总成交笔数', np.max),
|
702
|
+
'成交金额': ('总成交金额', np.max)
|
703
|
+
}
|
704
|
+
)
|
705
|
+
else:
|
706
|
+
df_tm_living = pd.DataFrame()
|
707
|
+
else:
|
708
|
+
df_tm_living = pd.DataFrame()
|
709
|
+
|
672
710
|
projection = {
|
673
711
|
'日期': 1,
|
674
712
|
'产品线': 1,
|
@@ -741,7 +779,7 @@ class MysqlDatasQuery:
|
|
741
779
|
df_jd_qzyx = df_jd_qzyx[['日期', '店铺名称', '营销场景', '花费', '展现量', '点击量', '成交笔数', '成交金额']]
|
742
780
|
df_jd_qzyx = df_jd_qzyx[df_jd_qzyx['花费'] > 0]
|
743
781
|
|
744
|
-
_datas = [item for item in [df_tm, df_tb, df_tm_pxb, df_jd, df_jd_qzyx] if len(item) > 0] # 阻止空的 dataframe
|
782
|
+
_datas = [item for item in [df_tm, df_tb, df_tm_pxb, df_tm_living, df_jd, df_jd_qzyx] if len(item) > 0] # 阻止空的 dataframe
|
745
783
|
df = pd.concat(_datas, axis=0, ignore_index=True)
|
746
784
|
return df
|
747
785
|
|
@@ -2277,7 +2315,7 @@ def main():
|
|
2277
2315
|
|
2278
2316
|
if __name__ == '__main__':
|
2279
2317
|
data_aggregation(
|
2280
|
-
months=
|
2318
|
+
months=15,
|
2281
2319
|
is_juhe=True, # 生成聚合表
|
2282
|
-
# less_dict=['
|
2320
|
+
# less_dict=['多店推广场景_按日聚合'], # 单独聚合某一个数据库
|
2283
2321
|
)
|
mdbq/spider/aikucun.py
CHANGED
@@ -96,7 +96,10 @@ def get_cookie_aikucun():
|
|
96
96
|
|
97
97
|
# 将cookies保存为json格式
|
98
98
|
cookies_list = _driver.get_cookies()
|
99
|
-
|
99
|
+
for cookie in cookies_list:
|
100
|
+
# 该字段有问题所以删除就可以
|
101
|
+
if 'expiry' in cookie:
|
102
|
+
del cookie['expiry']
|
100
103
|
json_file = os.path.join(cookie_path, filename_aikucun)
|
101
104
|
with open(json_file, 'w', encoding='utf-8') as f:
|
102
105
|
json.dump(cookies_list, f, ensure_ascii=False, sort_keys=True, indent=4)
|
@@ -216,7 +219,7 @@ class AikuCun:
|
|
216
219
|
today = datetime.date.today()
|
217
220
|
for date_s in range(date_num):
|
218
221
|
new_date = today - datetime.timedelta(days=date_s) # 会用作文件名
|
219
|
-
print(f'正在下载爱库存文件 {date_s}/{date_num}: {new_date}')
|
222
|
+
print(f'正在下载爱库存文件 {date_s+1}/{date_num}: {new_date}')
|
220
223
|
str_date = str(new_date)[2:]
|
221
224
|
wait = WebDriverWait(_driver, timeout=15) #
|
222
225
|
elements = _driver.find_elements(
|
@@ -361,7 +364,7 @@ class AikuCunNew:
|
|
361
364
|
|
362
365
|
if __name__ == '__main__':
|
363
366
|
pass
|
364
|
-
get_cookie_aikucun()
|
367
|
+
# get_cookie_aikucun()
|
365
368
|
akucun()
|
366
369
|
|
367
370
|
# a = AikuCunNew(shop_name='aikucun')
|
@@ -1,11 +1,11 @@
|
|
1
1
|
mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
|
2
2
|
mdbq/__version__.py,sha256=y9Mp_8x0BCZSHsdLT_q5tX9wZwd5QgqrSIENLrb6vXA,62
|
3
3
|
mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
|
4
|
-
mdbq/aggregation/aggregation.py,sha256=
|
4
|
+
mdbq/aggregation/aggregation.py,sha256=FsKrOTCgwUgIuTKNf6rJSrV1g58al9qZ-yLTokWU_YY,72134
|
5
5
|
mdbq/aggregation/df_types.py,sha256=U9i3q2eRPTDY8qAPTw7irzu-Tlg4CIySW9uYro81wdk,8125
|
6
6
|
mdbq/aggregation/mysql_types.py,sha256=2nMEeGGJXdxC3kp0xz2DJ3q6-1rfxi3GkRgDnsKyuNI,10934
|
7
7
|
mdbq/aggregation/optimize_data.py,sha256=79uwiM2WqNNFxGpE2wKz742PRq-ZGgFjdOV0vgptHdY,3513
|
8
|
-
mdbq/aggregation/query_data.py,sha256=
|
8
|
+
mdbq/aggregation/query_data.py,sha256=eWAVdrNgUZgZGS_QNIdLS5Cin3MUHqwcKIzX5VQ4E4E,100065
|
9
9
|
mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
|
10
10
|
mdbq/bdup/bdup.py,sha256=LAV0TgnQpc-LB-YuJthxb0U42_VkPidzQzAagan46lU,4234
|
11
11
|
mdbq/clean/__init__.py,sha256=A1d6x3L27j4NtLgiFV5TANwEkLuaDfPHDQNrPBbNWtU,41
|
@@ -44,8 +44,8 @@ mdbq/pbix/refresh_all_old.py,sha256=_pq3WSQ728GPtEG5pfsZI2uTJhU8D6ra-htIk1JXYzw,
|
|
44
44
|
mdbq/req_post/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
|
45
45
|
mdbq/req_post/req_tb.py,sha256=PexWSCPJNM6Tv0ol4lAWIhlOwsAr_frnjtcdSHCFiek,36179
|
46
46
|
mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
|
47
|
-
mdbq/spider/aikucun.py,sha256=
|
48
|
-
mdbq-2.
|
49
|
-
mdbq-2.
|
50
|
-
mdbq-2.
|
51
|
-
mdbq-2.
|
47
|
+
mdbq/spider/aikucun.py,sha256=jHrdGWBJQaSywx7V-U4YuM6vWkwC5SR5tTOOdB3YU_c,17306
|
48
|
+
mdbq-2.8.1.dist-info/METADATA,sha256=1w7s0_GV9KpA2rz5V-ExJa8TXITolje1BkQWZp-n6ww,243
|
49
|
+
mdbq-2.8.1.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
|
50
|
+
mdbq-2.8.1.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
|
51
|
+
mdbq-2.8.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|