mdbq 2.1.1__tar.gz → 2.1.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mdbq-2.1.1 → mdbq-2.1.3}/PKG-INFO +1 -1
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/aggregation/aggregation.py +1 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/aggregation/query_data.py +42 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/mysql/mysql.py +39 -2
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/other/sku_picture.py +148 -3
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq.egg-info/PKG-INFO +1 -1
- {mdbq-2.1.1 → mdbq-2.1.3}/setup.py +1 -1
- {mdbq-2.1.1 → mdbq-2.1.3}/README.txt +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/__version__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/aggregation/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/aggregation/df_types.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/aggregation/mysql_types.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/aggregation/optimize_data.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/bdup/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/bdup/bdup.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/clean/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/clean/data_clean.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/company/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/company/copysh.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/company/home_sh.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/config/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/config/get_myconf.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/config/products.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/config/set_support.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/config/update_conf.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/dataframe/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/dataframe/converter.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/log/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/log/mylogger.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/mongo/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/mongo/mongo.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/mysql/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/mysql/s_query.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/mysql/year_month_day.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/other/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/other/porxy.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/other/pov_city.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/other/ua_sj.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/pbix/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/pbix/pbix_refresh.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/pbix/refresh_all.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq/spider/__init__.py +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq.egg-info/SOURCES.txt +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq.egg-info/dependency_links.txt +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/mdbq.egg-info/top_level.txt +0 -0
- {mdbq-2.1.1 → mdbq-2.1.3}/setup.cfg +0 -0
@@ -421,6 +421,27 @@ class MysqlDatasQuery:
|
|
421
421
|
projection=projection,
|
422
422
|
)
|
423
423
|
return df
|
424
|
+
def spu_sales(self):
|
425
|
+
start_date, end_date = self.months_data(num=self.months)
|
426
|
+
projection = {
|
427
|
+
'日期': 1,
|
428
|
+
'商品id': 1,
|
429
|
+
'货号': 1,
|
430
|
+
'成交单量': 1,
|
431
|
+
'成交金额': 1,
|
432
|
+
'访客数': 1,
|
433
|
+
'成交客户数': 1,
|
434
|
+
'加购商品件数': 1,
|
435
|
+
'加购人数': 1,
|
436
|
+
}
|
437
|
+
df = self.download.data_to_df(
|
438
|
+
db_name='京东数据2',
|
439
|
+
table_name='spu_商品明细',
|
440
|
+
start_date=start_date,
|
441
|
+
end_date=end_date,
|
442
|
+
projection=projection,
|
443
|
+
)
|
444
|
+
return df
|
424
445
|
|
425
446
|
@staticmethod
|
426
447
|
def months_data(num=0, end_date=None):
|
@@ -1190,6 +1211,21 @@ class GroupBy:
|
|
1190
1211
|
}
|
1191
1212
|
)
|
1192
1213
|
return df
|
1214
|
+
elif '京东_spu_商品明细' in table_name:
|
1215
|
+
df = df[df['商品id'] != '合计']
|
1216
|
+
df = df.groupby(['日期', '商品id', '货号', '访客数', '成交客户数', '加购商品件数', '加购人数'],
|
1217
|
+
as_index=False).agg(
|
1218
|
+
**{
|
1219
|
+
'成交单量': ('成交单量', np.max),
|
1220
|
+
'成交金额': ('成交金额', np.max),
|
1221
|
+
}
|
1222
|
+
)
|
1223
|
+
self.data_jdtg.update(
|
1224
|
+
{
|
1225
|
+
table_name: df,
|
1226
|
+
}
|
1227
|
+
)
|
1228
|
+
return df
|
1193
1229
|
elif '京东_关键词报表' in table_name:
|
1194
1230
|
df_lin = df[['计划id', '推广计划']]
|
1195
1231
|
df_lin.drop_duplicates(subset=['计划id'], keep='last', inplace=True, ignore_index=True)
|
@@ -1580,6 +1616,12 @@ def data_aggregation(service_databases=[{}], months=1):
|
|
1580
1616
|
'唯一主键': ['日期', '商品id', '成交单量'],
|
1581
1617
|
'数据主体': sdq.sku_sales(),
|
1582
1618
|
},
|
1619
|
+
{
|
1620
|
+
'数据库名': '聚合数据',
|
1621
|
+
'集合名': '京东_spu_商品明细',
|
1622
|
+
'唯一主键': ['日期', '商品id', '成交单量'],
|
1623
|
+
'数据主体': sdq.spu_sales(),
|
1624
|
+
},
|
1583
1625
|
{
|
1584
1626
|
'数据库名': '聚合数据',
|
1585
1627
|
'集合名': '天猫_人群报表',
|
@@ -68,8 +68,8 @@ class MysqlUpload:
|
|
68
68
|
|
69
69
|
return wrapper
|
70
70
|
|
71
|
-
@try_except
|
72
|
-
def df_to_mysql(self, df, table_name, db_name='远程数据源', icm_update=[], service_database={'home_lx': 'mysql'}, move_insert=False, df_sql=False, drop_duplicates=False, filename=None, count=None, json_path=None):
|
71
|
+
# @try_except
|
72
|
+
def df_to_mysql(self, df, table_name, db_name='远程数据源', icm_update=[], service_database={'home_lx': 'mysql'}, move_insert=False, df_sql=False, drop_duplicates=False, filename=None, count=None, json_path=None, reset_id=False):
|
73
73
|
"""
|
74
74
|
将 df 写入数据库
|
75
75
|
db_name: 数据库名称
|
@@ -167,6 +167,17 @@ class MysqlUpload:
|
|
167
167
|
index=False,
|
168
168
|
chunksize=1000
|
169
169
|
)
|
170
|
+
try:
|
171
|
+
cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
|
172
|
+
result = cursor.fetchone()
|
173
|
+
if result:
|
174
|
+
cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
|
175
|
+
cursor.execute(
|
176
|
+
f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
|
177
|
+
cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
|
178
|
+
except Exception as e:
|
179
|
+
print(f'{e}')
|
180
|
+
connection.rollback()
|
170
181
|
connection.close()
|
171
182
|
return
|
172
183
|
|
@@ -205,6 +216,19 @@ class MysqlUpload:
|
|
205
216
|
index=False,
|
206
217
|
chunksize=1000
|
207
218
|
)
|
219
|
+
# 6. 重置自增列
|
220
|
+
if reset_id:
|
221
|
+
try:
|
222
|
+
cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
|
223
|
+
result = cursor.fetchone()
|
224
|
+
if result:
|
225
|
+
cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
|
226
|
+
cursor.execute(
|
227
|
+
f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
|
228
|
+
cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
|
229
|
+
except Exception as e:
|
230
|
+
print(f'{e}')
|
231
|
+
connection.rollback()
|
208
232
|
connection.close()
|
209
233
|
return
|
210
234
|
|
@@ -293,6 +317,19 @@ class MysqlUpload:
|
|
293
317
|
# print(values)
|
294
318
|
print(f'mysql -> df_to_mysql 报错: {e}, {self.filename}')
|
295
319
|
# breakpoint()
|
320
|
+
|
321
|
+
# 6. 重置自增列
|
322
|
+
try:
|
323
|
+
cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
|
324
|
+
result = cursor.fetchone()
|
325
|
+
if result:
|
326
|
+
cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
|
327
|
+
cursor.execute(
|
328
|
+
f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
|
329
|
+
cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
|
330
|
+
except Exception as e:
|
331
|
+
print(f'{e}')
|
332
|
+
connection.rollback()
|
296
333
|
connection.commit() # 提交事务
|
297
334
|
connection.close()
|
298
335
|
|
@@ -11,6 +11,7 @@ import time
|
|
11
11
|
import warnings
|
12
12
|
import pandas as pd
|
13
13
|
from lxml import etree
|
14
|
+
from rich.pretty import pretty_repr
|
14
15
|
from selenium import webdriver
|
15
16
|
from selenium.webdriver.support.wait import WebDriverWait
|
16
17
|
from selenium.webdriver.common.by import By
|
@@ -501,7 +502,7 @@ class DownloadPicture():
|
|
501
502
|
self.download = s_query.QueryDatas(username=username, password=password, host=host, port=port)
|
502
503
|
self.df = pd.DataFrame()
|
503
504
|
self.headers = {'User-Agent': ua_sj.get_ua()}
|
504
|
-
self.save_path = '
|
505
|
+
self.save_path = os.path.join(D_PATH, 'sku图片链接')
|
505
506
|
self.filename = ''
|
506
507
|
if not os.path.exists(self.save_path):
|
507
508
|
os.mkdir(self.save_path)
|
@@ -582,6 +583,34 @@ class DownloadPicture():
|
|
582
583
|
i += 1
|
583
584
|
time.sleep(0.5)
|
584
585
|
|
586
|
+
def download_from_df(self, col_name='商品图片'):
|
587
|
+
if not os.path.exists(self.save_path):
|
588
|
+
os.mkdir(self.save_path)
|
589
|
+
dict_data = self.df.to_dict('records')
|
590
|
+
num = len(dict_data)
|
591
|
+
i = 1
|
592
|
+
for data in dict_data:
|
593
|
+
url = data[col_name]
|
594
|
+
# self.filename = f'{data['店铺名称']}_{data['商品id']}_{data['商家编码']}.jpg'
|
595
|
+
self.filename = f'{data['商品id']}_{data['商家编码']}.jpg'
|
596
|
+
if os.path.isfile(os.path.join(self.save_path, self.filename)):
|
597
|
+
self.finish_download.append(data['商品id'])
|
598
|
+
i += 1
|
599
|
+
continue
|
600
|
+
if 'https' not in url:
|
601
|
+
i += 1
|
602
|
+
continue
|
603
|
+
|
604
|
+
print(f'正在下载: {i}/{num}, {data['商品id']}')
|
605
|
+
self.headers.update({'User-Agent': ua_sj.get_ua()})
|
606
|
+
res = requests.get(url, headers=self.headers) # 下载图片到内存
|
607
|
+
# 保存图片到本地文件夹
|
608
|
+
with open(os.path.join(self.save_path, self.filename), 'wb') as f:
|
609
|
+
f.write(res.content)
|
610
|
+
self.finish_download.append(data['商品id'])
|
611
|
+
i += 1
|
612
|
+
time.sleep(0.5)
|
613
|
+
|
585
614
|
@staticmethod
|
586
615
|
def months_data(num=0, end_date=None):
|
587
616
|
""" 读取近 num 个月的数据, 0 表示读取当月的数据 """
|
@@ -783,7 +812,7 @@ def main3():
|
|
783
812
|
p.insert_data()
|
784
813
|
|
785
814
|
|
786
|
-
def
|
815
|
+
def download_sku(service_name='company', database='mysql', db_name='属性设置2', table_name='商品素材下载记录', col_name='sku图片链接'):
|
787
816
|
""" 从数据库中获取商品id信息 """
|
788
817
|
# 实例化一个下载类
|
789
818
|
username, password, host, port = get_myconf.select_config_values(target_service=service_name, database=database)
|
@@ -885,9 +914,125 @@ def get_sp_id(service_name='company', database='mysql', db_name='属性设置2',
|
|
885
914
|
) # 3. 回传数据库
|
886
915
|
|
887
916
|
|
917
|
+
def download_spu(service_name='company', database='mysql', db_name='属性设置2', table_name='商品spu素材下载记录', col_name='商品图片'):
|
918
|
+
"""
|
919
|
+
|
920
|
+
"""
|
921
|
+
|
922
|
+
# 1. 从商品素材导出中获取数据
|
923
|
+
username, password, host, port = get_myconf.select_config_values(target_service=service_name, database=database)
|
924
|
+
download = s_query.QueryDatas(username=username, password=password, host=host, port=port)
|
925
|
+
projection = {
|
926
|
+
'店铺名称': 1,
|
927
|
+
'商品id': 1,
|
928
|
+
'商品标题': 1,
|
929
|
+
'商品状态': 1,
|
930
|
+
'商品白底图': 1,
|
931
|
+
'方版场景图': 1,
|
932
|
+
'日期':1,
|
933
|
+
}
|
934
|
+
df = download.data_to_df(
|
935
|
+
db_name='属性设置2',
|
936
|
+
table_name='商品素材导出',
|
937
|
+
start_date='2019-01-01',
|
938
|
+
end_date='2099-12-31',
|
939
|
+
projection=projection,
|
940
|
+
)
|
941
|
+
df['商品id'] = df['商品id'].astype('int64')
|
942
|
+
df['日期'] = df['日期'].astype('datetime64[ns]')
|
943
|
+
df = df[(df['商品白底图'] != '0') | (df['方版场景图'] != '0')]
|
944
|
+
# 白底图优先
|
945
|
+
df['商品图片'] = df[['商品白底图', '方版场景图']].apply(
|
946
|
+
lambda x: x['商品白底图'] if x['商品白底图'] != '0' else x['方版场景图'], axis=1)
|
947
|
+
# # 方版场景图优先
|
948
|
+
# df['商品图片'] = df[['商品白底图', '方版场景图']].apply(
|
949
|
+
# lambda x: x['方版场景图'] if x['方版场景图'] != '0' else x['商品白底图'], axis=1)
|
950
|
+
df.sort_values(by=['商品id', '日期'], ascending=[False, True], ignore_index=True, inplace=True)
|
951
|
+
df.drop_duplicates(subset=['商品id'], keep='last', inplace=True, ignore_index=True)
|
952
|
+
# df = df[['商品id', '商品图片', '日期']]
|
953
|
+
df['商品图片'] = df['商品图片'].apply(lambda x: x if 'http' in x else None) # 检查是否是 http 链接
|
954
|
+
df.dropna(how='all', subset=['商品图片'], axis=0, inplace=True) # 删除指定列含有空值的行
|
955
|
+
df['商品链接'] = df['商品id'].apply(
|
956
|
+
lambda x: f'https://detail.tmall.com/item.htm?id={str(x)}' if x and '.com' not in str(x) else x)
|
957
|
+
df.sort_values(by='商品id', ascending=False, ignore_index=True, inplace=True) # ascending=False 降序排列
|
958
|
+
df['商品id'] = df['商品id'].astype(str)
|
959
|
+
# df = df.head(2)
|
960
|
+
|
961
|
+
# 2. 从商品id编码表 中获取数据
|
962
|
+
projection = {
|
963
|
+
'宝贝id': 1,
|
964
|
+
'商家编码': 1,
|
965
|
+
}
|
966
|
+
df_spbm = download.data_to_df(
|
967
|
+
db_name='聚合数据',
|
968
|
+
table_name='商品id编码表',
|
969
|
+
start_date='2019-01-01',
|
970
|
+
end_date='2099-12-31',
|
971
|
+
projection=projection,
|
972
|
+
)
|
973
|
+
df_spbm.drop_duplicates(subset=['宝贝id'], keep='last', inplace=True, ignore_index=True)
|
974
|
+
# 合并两个表
|
975
|
+
df = pd.merge(df, df_spbm, left_on=['商品id'], right_on=['宝贝id'], how='left')
|
976
|
+
df.pop('宝贝id')
|
977
|
+
df['获取与下载'] = '已获取'
|
978
|
+
df['时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
979
|
+
# df.to_csv(os.path.join(D_PATH, f'{col_name}.csv'), index=False, header=True, encoding='utf-8_sig')
|
980
|
+
if '方版场景图' in df.columns.tolist():
|
981
|
+
df['方版场景图'] = df['方版场景图'].astype(str)
|
982
|
+
|
983
|
+
# 3. 更新数据库
|
984
|
+
username, password, host, port = get_myconf.select_config_values(target_service=service_name, database=database)
|
985
|
+
m = mysql.MysqlUpload(username=username, password=password, host=host, port=port)
|
986
|
+
m.df_to_mysql(
|
987
|
+
df=df,
|
988
|
+
db_name=db_name,
|
989
|
+
table_name=table_name,
|
990
|
+
move_insert=True, # 先删除,再插入
|
991
|
+
df_sql=False,
|
992
|
+
drop_duplicates=False,
|
993
|
+
icm_update=[],
|
994
|
+
service_database={service_name: database},
|
995
|
+
) # 3. 回传数据库
|
996
|
+
|
997
|
+
# 4. 从数据库中提取未下载的数据
|
998
|
+
projection = {}
|
999
|
+
df_before = download.data_to_df(
|
1000
|
+
db_name=db_name,
|
1001
|
+
table_name=table_name,
|
1002
|
+
start_date='2019-01-01',
|
1003
|
+
end_date='2099-12-31',
|
1004
|
+
projection=projection,
|
1005
|
+
)
|
1006
|
+
df = df_before[df_before['获取与下载'] != '已下载']
|
1007
|
+
|
1008
|
+
if len(df) > 0:
|
1009
|
+
# 5. 实例化一个下载器类,并下载数据
|
1010
|
+
d = DownloadPicture(service_name=service_name)
|
1011
|
+
d.save_path = os.path.join(D_PATH, '商品id_商家编码_图片') # 下载图片到本地时的存储位置
|
1012
|
+
d.filename = f'{db_name}_{table_name}.xlsx'
|
1013
|
+
d.df = df
|
1014
|
+
d.download_from_df(col_name=col_name)
|
1015
|
+
df['获取与下载'] = df.apply(lambda x: '已下载' if x['商品id'] in d.finish_download else x['获取与下载'], axis=1)
|
1016
|
+
|
1017
|
+
# 6. 回传数据库
|
1018
|
+
username, password, host, port = get_myconf.select_config_values(target_service=service_name, database=database)
|
1019
|
+
m = mysql.MysqlUpload(username=username, password=password, host=host, port=port)
|
1020
|
+
m.df_to_mysql(
|
1021
|
+
df=df,
|
1022
|
+
db_name=db_name,
|
1023
|
+
table_name=table_name,
|
1024
|
+
move_insert=True, # 先删除,再插入
|
1025
|
+
df_sql=False,
|
1026
|
+
drop_duplicates=False,
|
1027
|
+
icm_update=[],
|
1028
|
+
service_database={service_name: database},
|
1029
|
+
reset_id=True,
|
1030
|
+
) # 3. 回传数据库
|
1031
|
+
|
888
1032
|
|
889
1033
|
if __name__ == '__main__':
|
890
1034
|
# main(service_name='home_lx', database='mysql')
|
891
1035
|
# main2(service_name='home_lx', database='mysql')
|
892
1036
|
# main3()
|
893
|
-
|
1037
|
+
# download_sku(service_name='company', database='mysql', db_name='属性设置2', table_name='商品素材下载记录')
|
1038
|
+
download_spu(service_name='company', database='mysql', db_name='属性设置2', table_name='商品spu素材下载记录', col_name='商品图片')
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|