mdbq 3.3.12__py3-none-any.whl → 3.3.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,433 @@
1
+ # -*- coding: UTF-8 –*-
2
+ import os
3
+ import socket
4
+ import platform
5
+ import datetime
6
+ import time
7
+ from mdbq.config import myconfig
8
+ from mdbq.mysql import mysql
9
+ from mdbq.mysql import s_query
10
+ import pandas as pd
11
+ import numpy as np
12
+ import plotly.express as px
13
+ import plotly.graph_objects as go
14
+ from plotly.subplots import make_subplots
15
+ import tkinter as tk
16
+
17
+ from sqlalchemy.sql.functions import count
18
+
19
+ m_engine = mysql.MysqlUpload(username='', password='', host='', port=0, charset='utf8mb4')
20
+ company_engine = mysql.MysqlUpload(username='', password='', host='', port=0, charset='utf8mb4')
21
+
22
+ if socket.gethostname() == 'company' or socket.gethostname() == 'Mac2.local':
23
+ conf = myconfig.main()
24
+ conf_data = conf['Windows']['xigua_lx']['mysql']['remoto']
25
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data['port']
26
+ m_engine = mysql.MysqlUpload(
27
+ username=username,
28
+ password=password,
29
+ host=host,
30
+ port=port,
31
+ charset='utf8mb4'
32
+ )
33
+ conf_data = conf['Windows']['company']['mysql']['local']
34
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data['port']
35
+ company_engine = mysql.MysqlUpload(
36
+ username=username,
37
+ password=password,
38
+ host=host,
39
+ port=port,
40
+ charset='utf8mb4'
41
+ )
42
+ targe_host = 'company'
43
+
44
+ else:
45
+ conf = myconfig.main()
46
+
47
+ conf_data = conf['Windows']['company']['mysql']['remoto']
48
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data['port']
49
+ company_engine = mysql.MysqlUpload(
50
+ username=username,
51
+ password=password,
52
+ host=host,
53
+ port=port,
54
+ charset='utf8mb4'
55
+ )
56
+
57
+ conf_data = conf['Windows']['xigua_lx']['mysql']['local']
58
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data['port']
59
+ m_engine = mysql.MysqlUpload(
60
+ username=username,
61
+ password=password,
62
+ host=host,
63
+ port=port,
64
+ charset='utf8mb4'
65
+ )
66
+ targe_host = 'xigua_lx'
67
+
68
+
69
+ # def getdata():
70
+ # download = s_query.QueryDatas(username=username, password=password, host=host, port=port)
71
+ # start_date, end_date = '2024-01-01', '2024-12-20'
72
+ # projection = {
73
+ # '日期': 1,
74
+ # '三级来源': 1,
75
+ # '访客数': 1,
76
+ # }
77
+ # __res = []
78
+ # for year in range(2024, datetime.datetime.today().year + 1):
79
+ # df = download.data_to_df(
80
+ # db_name='聚合数据',
81
+ # table_name=f'店铺流量来源构成',
82
+ # start_date=start_date,
83
+ # end_date=end_date,
84
+ # projection=projection,
85
+ # )
86
+ # __res.append(df)
87
+ # df = pd.concat(__res, ignore_index=True)
88
+ # return df
89
+
90
+
91
+ class DataShow:
92
+ def __init__(self):
93
+ self.path = '/Users/xigua/Downloads'
94
+ root = tk.Tk()
95
+ self.screen_width = root.winfo_screenwidth()
96
+ self.screen_height = root.winfo_screenheight()
97
+ root.destroy()
98
+ self.today = datetime.date.today()
99
+ self.start_date = (self.today - datetime.timedelta(days=15)).strftime('%Y-%m-%d')
100
+ self.end_date = (self.today - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
101
+
102
+ def getdata(self, db_name, table_name, pro_list, start_date=None, end_date=None):
103
+ download = s_query.QueryDatas(username=username, password=password, host=host, port=port)
104
+ if not start_date or not end_date:
105
+ start_date, end_date = '2000-01-01', '2099-12-31' # 从数据库提取数据,不能是 self.start_date
106
+ projection = {}
107
+ [projection.update({k: 1}) for k in pro_list]
108
+ __res = []
109
+ for year in range(2024, datetime.datetime.today().year + 1):
110
+ df = download.data_to_df(
111
+ db_name=db_name,
112
+ table_name=table_name,
113
+ start_date=start_date,
114
+ end_date=end_date,
115
+ projection=projection,
116
+ )
117
+ __res.append(df)
118
+ df = pd.concat(__res, ignore_index=True)
119
+ return df
120
+
121
+ def dpll(self, db_name='聚合数据', table_name='店铺流量来源构成', pro_list=None, filename='店铺流量来源'):
122
+ if not pro_list:
123
+ pro_list = ['日期', '三级来源', '访客数']
124
+ df = self.getdata(db_name=db_name, table_name=table_name, pro_list=pro_list, start_date='2024-11-01', end_date=self.end_date)
125
+ if len(df) == 0:
126
+ print(f'数据不能为空: {table_name}')
127
+ return
128
+ df = df[df['三级来源'] != '汇总']
129
+ df['日期'] = pd.to_datetime(df['日期'])
130
+ today = datetime.date.today()
131
+
132
+ def st_date(num=1):
133
+ return pd.to_datetime(today - datetime.timedelta(days=num))
134
+
135
+ df1 = df[df['日期'] >= st_date(1)]
136
+ df2 = df[df['日期'] >= st_date(7)]
137
+ df3 = df[df['日期'] >= st_date(30)]
138
+ df2 = df2.groupby(
139
+ ['三级来源'],
140
+ as_index=False).agg(
141
+ **{
142
+ '访客数': ('访客数', np.sum),
143
+ }
144
+ )
145
+ df3 = df3.groupby(
146
+ ['三级来源'],
147
+ as_index=False).agg(
148
+ **{
149
+ '访客数': ('访客数', np.sum),
150
+ }
151
+ )
152
+ # print(df)
153
+ labels1 = df1['三级来源'].tolist()
154
+ values1 = df1['访客数'].tolist()
155
+ labels2 = df2['三级来源'].tolist()
156
+ values2 = df2['访客数'].tolist()
157
+ labels3 = df3['三级来源'].tolist()
158
+ values3 = df3['访客数'].tolist()
159
+
160
+ def make_sub(data_list, num):
161
+ # 创建一个具有1行2列的网格布局
162
+ t_p = []
163
+ for i in range(num):
164
+ t_p.extend([{"type": "pie"}])
165
+ fig = make_subplots(rows=1, cols=num, specs=[t_p])
166
+ pie_title = {1: 1, 2: 7, 3: 30}
167
+ i = 1
168
+ for item in data_list:
169
+ # 计算每个扇区的百分比,并找出哪些扇区应该被保留
170
+ total = sum(item['值'])
171
+ # 计算每个扇区的百分比,并找出哪些扇区应该被保留
172
+ threshold_percentage = 0.1 # 阈值百分比
173
+ filtered_indices = [i for i, value in enumerate(item['值']) if
174
+ (value / total) * 100 >= threshold_percentage]
175
+ # 提取被保留的扇区的标签和值
176
+ filtered_labels = [item['键'][i] for i in filtered_indices]
177
+ filtered_values = [item['值'][i] for i in filtered_indices]
178
+
179
+ # 添加饼图
180
+ fig.add_trace(
181
+ go.Pie(labels=filtered_labels, values=filtered_values, name=f'pie {i}', textinfo='label+percent'),
182
+ row=1, col=i)
183
+ # fig.add_trace(go.Pie(labels=item['键'], values=item['值'], name=f'最近{pie_title[i]}天', textinfo='label+percent'), row=1, col=i)
184
+ fig.add_annotation(
185
+ text=f'最近{pie_title[i]}天',
186
+ x=0.15 + 0.35 * (i - 1),
187
+ y=0.95,
188
+ xref='paper', # # 相对于整个图表区域
189
+ yref='paper',
190
+ showarrow=True, # 显示箭头
191
+ align="left", # 文本对齐方式
192
+ font=dict(size=16),
193
+ )
194
+ i += 1
195
+ fig.update_layout(
196
+ title_text='店铺流量来源',
197
+ xaxis_title='X Axis',
198
+ yaxis_title='Y Axis',
199
+ # width=self.screen_width // 1.4,
200
+ # height=self.screen_width // 2,
201
+ margin=dict(
202
+ l=100, # 左边距
203
+ r=300,
204
+ t=100, # 上边距
205
+ b=400,
206
+ ),
207
+ )
208
+ fig.update_layout(xaxis_showgrid=False, yaxis_showgrid=False, xaxis_visible=False, yaxis_visible=False)
209
+ return fig
210
+
211
+ data_list = [{'键': labels1, '值': values1}, {'键': labels2, '值': values2}, {'键': labels3, '值': values3}]
212
+ fig = make_sub(data_list=data_list, num=3)
213
+ fig.write_html(os.path.join(self.path, f'{filename}.html'))
214
+
215
+ def tg(self, db_name='聚合数据', table_name='多店推广场景_按日聚合', pro_list=None, filename='多店推广场景', days=None, start_date=None, end_date=None):
216
+ """
217
+ :param db_name:
218
+ :param table_name:
219
+ :param pro_list:
220
+ :param filename:
221
+ :param days:
222
+ :param start_date: 如果指定,则 days 失效,如果都不指定,则设置 days = 7
223
+ :param end_date:
224
+ :return:
225
+ """
226
+ if not pro_list:
227
+ pro_list = ['日期', '店铺名称', '营销场景', '花费', '成交金额']
228
+ df = self.getdata(db_name=db_name, table_name=table_name, pro_list=pro_list)
229
+ if len(df) == 0:
230
+ print(f'数据不能为空: {table_name}')
231
+ return
232
+ df['日期'] = pd.to_datetime(df['日期'])
233
+ today = datetime.date.today()
234
+
235
+ def st_date(num=1):
236
+ return pd.to_datetime(today - datetime.timedelta(days=num))
237
+
238
+ if start_date and end_date:
239
+ df = df[(df['日期'] >= pd.to_datetime(start_date)) & (df['日期'] <= pd.to_datetime(end_date))]
240
+ elif days:
241
+ df = df[df['日期'] >= st_date(num=days)]
242
+ else:
243
+ df = df[df['日期'] >= st_date(num=7)]
244
+
245
+ df = df.groupby(['日期', '店铺名称', '营销场景'], as_index=False).agg(**{'花费': ('花费', np.sum), '成交金额': ('成交金额', np.sum)})
246
+ df_other = df.groupby(['店铺名称'], as_index=False).agg(**{'花费': ('花费', np.sum)})
247
+ df_other = df_other.sort_values('花费', ascending=False)
248
+ data_list = []
249
+ for shopname in df_other['店铺名称'].tolist():
250
+ data_list.append(df[df['店铺名称'] == shopname])
251
+ # df1 = df[df['店铺名称'] == '万里马官方旗舰店']
252
+ # df2 = df[df['店铺名称'] == '万里马官方企业店']
253
+ # df3 = df[df['店铺名称'] == '京东箱包旗舰店']
254
+ # data_list = [df1, df2, df3]
255
+
256
+ def make_sub(data_list):
257
+ steps = len(data_list)
258
+ specs = []
259
+ t_p1 = []
260
+ for i in range(steps):
261
+ t_p1.extend([{"type": "xy"}]) # 折线图类型
262
+ t_p2 = []
263
+ for i in range(steps):
264
+ t_p2.extend([{"type": "pie"}]) # 饼图类型
265
+ specs = [t_p1, t_p2]
266
+
267
+ # 创建一个包含两个子图的图表,子图排列为1行2列
268
+ fig = make_subplots(
269
+ rows=2,
270
+ cols=steps,
271
+ specs=specs, # 注意 specs 是用列表传入
272
+ # subplot_titles=("First Line Chart", "Second Line Chart")
273
+ )
274
+ count = 1
275
+ for df in data_list:
276
+ shop = df['店铺名称'].tolist()[0]
277
+ # 在第 1 行添加折线图
278
+ scences = df['营销场景'].unique()
279
+ for scence in scences:
280
+ df_inside = df[df['营销场景'] == scence]
281
+ # if len(df_inside) < 7:
282
+ # continue
283
+ fig.add_trace(go.Scatter(x=df_inside['日期'].tolist(), y=df_inside['花费'].tolist(), mode='lines', name=f'{scence}_{shop}'), row=1, col=count)
284
+ # 在第 2 行添加饼图
285
+ df = df.groupby(['营销场景'], as_index=False).agg(**{'花费': ('花费', np.sum)})
286
+ labels = df['营销场景'].tolist()
287
+ values = df['花费'].tolist()
288
+ fig.add_trace(go.Pie(labels=labels, values=values, name=shop, textinfo='label+percent'), row=2, col=count)
289
+ fig.add_annotation(
290
+ text=shop,
291
+ x=0.01 + 0.395 * (count - 1),
292
+ y=1.04,
293
+ xref='paper', # # 相对于整个图表区域
294
+ yref='paper',
295
+ showarrow=False, # 显示箭头
296
+ align="left", # 文本对齐方式
297
+ font=dict(size=16),
298
+ )
299
+ count += 1
300
+ return fig
301
+
302
+ fig = make_sub(data_list=data_list)
303
+ fig.update_layout(
304
+ title_text='多店推广花费_按日聚合',
305
+ xaxis_title='日期',
306
+ yaxis_title='花费',
307
+ # width=self.screen_width // 1.4,
308
+ # height=self.screen_width // 2,
309
+ margin=dict(
310
+ l=100, # 左边距
311
+ r=100,
312
+ t=100, # 上边距
313
+ b=150,
314
+ ),
315
+ # legend=dict(orientation="h")
316
+ )
317
+ count = 1
318
+ for item in data_list:
319
+ roi = round(item['成交金额'].sum() / item['花费'].sum(), 2)
320
+ fig.add_annotation(
321
+ text=f'合计: {int(item['花费'].sum())}元 / roi: {roi}',
322
+ x=0.15 + 0.425 * (count - 1),
323
+ y=1.04,
324
+ xref='paper', # # 相对于整个图表区域
325
+ yref='paper',
326
+ showarrow=False, # 显示箭头
327
+ align="left", # 文本对齐方式
328
+ font=dict(size=16),
329
+ )
330
+ count += 1
331
+ fig.write_html(os.path.join(self.path, f'{filename}.html'))
332
+
333
+ def item_crowd(self, db_name='商品人群画像2', table_list=None, pro_list=None, filename='商品人群画像', item_id=None):
334
+ if not pro_list:
335
+ pro_list = ['日期', '店铺名称', '洞察类型', '行为类型', '商品id', '统计周期', '标签名称', '标签人群数量']
336
+ if not table_list:
337
+ table_list = ['消费能力等级', '用户年龄', '月均消费金额']
338
+ for table_name in table_list:
339
+ df = self.getdata(db_name=db_name, table_name=table_name, pro_list=pro_list)
340
+
341
+ df['日期'] = pd.to_datetime(df['日期'])
342
+ if item_id:
343
+ df = df[df['商品id'] == int(item_id)]
344
+ df = df[(df['日期'] == df['日期'].max()) & ~df['标签名称'].str.contains('unknown', case=False)]
345
+ df = df[df['统计周期'] == '近30天']
346
+ item_ids = df['商品id'].unique()
347
+ data_list = []
348
+ for item in item_ids:
349
+ data_list.append(df[df['商品id'] == item])
350
+
351
+ fig = make_subplots(rows=2, cols=3)
352
+ # 在每个子图中绘制柱形图
353
+ for count, item in enumerate(data_list):
354
+ labels = item['标签名称'].tolist()
355
+ values = item['标签人群数量'].tolist()
356
+ item_id = item['商品id'].tolist()[0]
357
+ item['Percentage'] = item['标签人群数量'] / item['标签人群数量'].sum() * 100
358
+ percentages = item['Percentage']
359
+ bar = go.Bar(
360
+ x=labels,
361
+ y=values,
362
+ name=item_id,
363
+ text=percentages.map('{:.2f}%'.format), # 设置要显示的文本(百分比)
364
+ # textposition = 'outside', # 设置文本位置在柱形图外部
365
+ )
366
+ fig.add_trace(
367
+ bar,
368
+ row=count // 3 + 1,
369
+ col=count % 3 + 1
370
+ )
371
+ if count < 3:
372
+ x = 0.01 + 0.395 * (count)
373
+ y = 1.04
374
+ else:
375
+ x = 0.01 + 0.395 * (count % 3)
376
+ y = 1.04 - 0.59 * (count // 3)
377
+ fig.add_annotation(
378
+ text=item_id,
379
+ x=x,
380
+ y=y,
381
+ xref='paper', # # 相对于整个图表区域
382
+ yref='paper',
383
+ showarrow=False, # 显示箭头
384
+ align="left", # 文本对齐方式
385
+ font=dict(size=16),
386
+ )
387
+ if count == 5:
388
+ break
389
+ fig.update_layout(
390
+ title_text=db_name,
391
+ xaxis_title='标签',
392
+ yaxis_title='人群数量',
393
+ # width=self.screen_width // 1.4,
394
+ # height=self.screen_width // 2,
395
+ margin=dict(
396
+ l=100, # 左边距
397
+ r=100,
398
+ t=100, # 上边距
399
+ b=150,
400
+ ),
401
+ # legend=dict(orientation="h")
402
+ )
403
+ # 更新布局以优化显示
404
+ for count, item in enumerate(data_list):
405
+ fig.add_annotation(
406
+ text=f'合计: ',
407
+ x=0.15 + 0.425 * (count),
408
+ y=1.04,
409
+ xref='paper', # # 相对于整个图表区域
410
+ yref='paper',
411
+ showarrow=False, # 显示箭头
412
+ align="left", # 文本对齐方式
413
+ font=dict(size=16),
414
+ )
415
+ count += 1
416
+ fig.write_html(os.path.join(self.path, f'{filename}.html'))
417
+
418
+
419
+ def main():
420
+ ds = DataShow()
421
+ # ds.dpll()
422
+ # ds.tg(
423
+ # days=15,
424
+ # # start_date='2024-12-01',
425
+ # # end_date='2024-12-31',
426
+ # )
427
+ ds.item_crowd(
428
+ # item_id='839225234621',
429
+ )
430
+
431
+
432
+ if __name__ == '__main__':
433
+ main()
@@ -3806,4 +3806,9 @@ if __name__ == '__main__':
3806
3806
  # is_mysql=True,
3807
3807
  # )
3808
3808
 
3809
- query3(months=2, less_dict=[])
3809
+ # query3(months=2, less_dict=[])
3810
+
3811
+ sdq = MysqlDatasQuery() # 实例化数据处理类
3812
+ sdq.months = 100 # 设置数据周期, 1 表示近 2 个月
3813
+ sdq.update_service = True # 调试时加,true: 将数据写入 mysql 服务器
3814
+ sdq.dplyd(db_name='聚合数据', table_name='店铺流量来源构成')
mdbq/mysql/mysql.py CHANGED
@@ -128,7 +128,7 @@ class MysqlUpload:
128
128
 
129
129
  return wrapper
130
130
 
131
- def keep_connect(self, _db_name, _config, max_try: int=5):
131
+ def keep_connect(self, _db_name, _config, max_try: int=10):
132
132
  attempts = 1
133
133
  while attempts <= max_try:
134
134
  try:
@@ -137,7 +137,7 @@ class MysqlUpload:
137
137
  except Exception as e:
138
138
  print(f'连接失败,正在重试: {attempts}/{max_try} {e}')
139
139
  attempts += 1
140
- time.sleep(20)
140
+ time.sleep(30)
141
141
  print(f'{_db_name}: 连接失败,重试次数超限,当前设定次数: {max_try}')
142
142
  return None
143
143
 
@@ -215,7 +215,7 @@ class MysqlUpload:
215
215
  return
216
216
 
217
217
  # connection = pymysql.connect(**self.config) # 连接数据库
218
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
218
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
219
219
  if not connection:
220
220
  return
221
221
  with connection.cursor() as cursor:
@@ -236,7 +236,7 @@ class MysqlUpload:
236
236
 
237
237
  self.config.update({'database': db_name}) # 添加更新 config 字段
238
238
  # connection = pymysql.connect(**self.config) # 重新连接数据库
239
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
239
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
240
240
  if not connection:
241
241
  return
242
242
  with connection.cursor() as cursor:
@@ -386,7 +386,7 @@ class MysqlUpload:
386
386
  print(f'{table_name} 将数据按年/月保存(cut_data),但在转换日期时报错 -> {e}')
387
387
 
388
388
  # connection = pymysql.connect(**self.config) # 连接数据库
389
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
389
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
390
390
  if not connection:
391
391
  return
392
392
  with connection.cursor() as cursor:
@@ -407,7 +407,7 @@ class MysqlUpload:
407
407
 
408
408
  self.config.update({'database': db_name}) # 添加更新 config 字段
409
409
  # connection = pymysql.connect(**self.config) # 重新连接数据库
410
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
410
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
411
411
  if not connection:
412
412
  return
413
413
  with connection.cursor() as cursor:
@@ -597,7 +597,7 @@ class MysqlUpload:
597
597
  print(f'{table_name} 将数据按年/月保存(cut_data),但在转换日期时报错 -> {e}')
598
598
 
599
599
  # connection = pymysql.connect(**self.config) # 连接数据库
600
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
600
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
601
601
  if not connection:
602
602
  return
603
603
  with connection.cursor() as cursor:
@@ -618,7 +618,7 @@ class MysqlUpload:
618
618
 
619
619
  self.config.update({'database': db_name}) # 添加更新 config 字段
620
620
  # connection = pymysql.connect(**self.config) # 重新连接数据库
621
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
621
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
622
622
  if not connection:
623
623
  return
624
624
  with connection.cursor() as cursor:
@@ -964,7 +964,7 @@ class MysqlUpload:
964
964
  [dtypes.update({k: inside_v}) for inside_k, inside_v in set_typ.items() if k == inside_k]
965
965
 
966
966
  # connection = pymysql.connect(**self.config) # 连接数据库
967
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
967
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
968
968
  if not connection:
969
969
  return
970
970
  with connection.cursor() as cursor:
@@ -985,7 +985,7 @@ class MysqlUpload:
985
985
 
986
986
  self.config.update({'database': db_name}) # 添加更新 config 字段
987
987
  # connection = pymysql.connect(**self.config) # 重新连接数据库
988
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
988
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
989
989
  if not connection:
990
990
  return
991
991
  with connection.cursor() as cursor:
@@ -1264,7 +1264,7 @@ class MysqlUpload:
1264
1264
  print(f'未指定文件名: filename')
1265
1265
  return
1266
1266
  # connection = pymysql.connect(**self.config) # 连接数据库
1267
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
1267
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
1268
1268
  if not connection:
1269
1269
  return
1270
1270
  # try:
@@ -1276,7 +1276,7 @@ class MysqlUpload:
1276
1276
  return
1277
1277
  self.config.update({'database': db_name})
1278
1278
  # connection = pymysql.connect(**self.config) # 重新连接数据库
1279
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
1279
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
1280
1280
  if not connection:
1281
1281
  return
1282
1282
  with connection.cursor() as cursor:
@@ -1308,7 +1308,7 @@ class MysqlUpload:
1308
1308
  df = pd.DataFrame()
1309
1309
 
1310
1310
  # connection = pymysql.connect(**self.config) # 连接数据库
1311
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
1311
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
1312
1312
  if not connection:
1313
1313
  return
1314
1314
  try:
@@ -1330,7 +1330,7 @@ class MysqlUpload:
1330
1330
  # 读取数据
1331
1331
  self.config.update({'database': db_name})
1332
1332
  # connection = pymysql.connect(**self.config) # 重新连接数据库
1333
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
1333
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
1334
1334
  if not connection:
1335
1335
  return
1336
1336
  try:
@@ -1454,7 +1454,7 @@ class OptimizeDatas:
1454
1454
 
1455
1455
  return wrapper
1456
1456
 
1457
- def keep_connect(self, _db_name, _config, max_try: int=5):
1457
+ def keep_connect(self, _db_name, _config, max_try: int=10):
1458
1458
  attempts = 1
1459
1459
  while attempts <= max_try:
1460
1460
  try:
@@ -1463,7 +1463,7 @@ class OptimizeDatas:
1463
1463
  except Exception as e:
1464
1464
  print(f'连接失败,正在重试: {attempts}/{max_try} {e}')
1465
1465
  attempts += 1
1466
- time.sleep(20)
1466
+ time.sleep(30)
1467
1467
  print(f'{_db_name}: 连接失败,重试次数超限,当前设定次数: {max_try}')
1468
1468
  return None
1469
1469
 
@@ -1514,7 +1514,7 @@ class OptimizeDatas:
1514
1514
  # continue
1515
1515
  self.config.update({'database': self.db_name}) # 添加更新 config 字段
1516
1516
  # self.connection = pymysql.connect(**self.config)
1517
- self.connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=5)
1517
+ self.connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=10)
1518
1518
  if not self.connection:
1519
1519
  return
1520
1520
  with self.connection.cursor() as cursor:
@@ -1669,7 +1669,7 @@ class OptimizeDatas:
1669
1669
  def database_list(self):
1670
1670
  """ 获取所有数据库 """
1671
1671
  # connection = pymysql.connect(**self.config) # 连接数据库
1672
- connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=5)
1672
+ connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=10)
1673
1673
  if not connection:
1674
1674
  return
1675
1675
  with connection.cursor() as cursor:
@@ -1681,7 +1681,7 @@ class OptimizeDatas:
1681
1681
  def table_list(self, db_name):
1682
1682
  """ 获取指定数据库的所有数据表 """
1683
1683
  # connection = pymysql.connect(**self.config) # 连接数据库
1684
- connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=5)
1684
+ connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=10)
1685
1685
  if not connection:
1686
1686
  return
1687
1687
  try:
@@ -1700,7 +1700,7 @@ class OptimizeDatas:
1700
1700
 
1701
1701
  self.config.update({'database': db_name}) # 添加更新 config 字段
1702
1702
  # connection = pymysql.connect(**self.config) # 重新连接数据库
1703
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
1703
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
1704
1704
  if not connection:
1705
1705
  return
1706
1706
  with connection.cursor() as cursor:
@@ -1715,7 +1715,7 @@ class OptimizeDatas:
1715
1715
  """
1716
1716
  self.config.update({'database': db_name}) # 添加更新 config 字段
1717
1717
  # connection = pymysql.connect(**self.config)
1718
- connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=5)
1718
+ connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
1719
1719
  if not connection:
1720
1720
  return
1721
1721
  try:
@@ -1751,7 +1751,7 @@ class OptimizeDatas:
1751
1751
  for key, table_name in table_dict.items():
1752
1752
  self.config.update({'database': self.db_name}) # 添加更新 config 字段
1753
1753
  # self.connection = pymysql.connect(**self.config)
1754
- self.connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=5)
1754
+ self.connection = self.keep_connect(_db_name=self.db_name, _config=self.config, max_try=10)
1755
1755
  if not self.connection:
1756
1756
  return
1757
1757
  with self.connection.cursor() as cursor:
mdbq/spider/aikucun.py CHANGED
@@ -483,7 +483,7 @@ class AikuCunNew:
483
483
 
484
484
  if __name__ == '__main__':
485
485
  get_cookie_aikucun() # 登录并获取 cookies
486
- akucun(date_num=5, headless=True) # 下载数据
486
+ akucun(date_num=3, headless=True) # 下载数据
487
487
 
488
488
  # a = AikuCunNew(shop_name='aikucun')
489
489
  # a.akc()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 3.3.12
3
+ Version: 3.3.14
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -2,8 +2,9 @@ mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
2
  mdbq/__version__.py,sha256=y9Mp_8x0BCZSHsdLT_q5tX9wZwd5QgqrSIENLrb6vXA,62
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
4
  mdbq/aggregation/aggregation.py,sha256=-yzApnlqSN2L0E1YMu5ml-W827qpKQvWPCOI7jj2kzY,80264
5
+ mdbq/aggregation/datashow.py,sha256=MuD5wHOKayKnneGcXIA4U6G3mJxFrAA6lVLBn9Vyt4M,17715
5
6
  mdbq/aggregation/optimize_data.py,sha256=RXIv7cACCgYyehAxMjUYi_S7rVyjIwXKWMaM3nduGtA,3068
6
- mdbq/aggregation/query_data.py,sha256=tXQzRkexIbVzwYwPO2Kp3HAhqfTPDRc-ZLeBKlGrvYw,173547
7
+ mdbq/aggregation/query_data.py,sha256=4Fd4dMGi6Cu-KgNTf1OBNYe8InjvpMA5JALxCwvsHyw,173841
7
8
  mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
8
9
  mdbq/bdup/bdup.py,sha256=LAV0TgnQpc-LB-YuJthxb0U42_VkPidzQzAagan46lU,4234
9
10
  mdbq/config/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
@@ -17,7 +18,7 @@ mdbq/log/mylogger.py,sha256=oaT7Bp-Hb9jZt52seP3ISUuxVcI19s4UiqTeouScBO0,3258
17
18
  mdbq/mongo/__init__.py,sha256=SILt7xMtQIQl_m-ik9WLtJSXIVf424iYgCfE_tnQFbw,13
18
19
  mdbq/mongo/mongo.py,sha256=M9DUeUCMPDngkwn9-ui0uTiFrvfNU1kLs22s5SmoNm0,31899
19
20
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
20
- mdbq/mysql/mysql.py,sha256=5c3hY-wHP24krhgbMHYlV-mbJNhnq5F8AeRUczWLjog,99358
21
+ mdbq/mysql/mysql.py,sha256=OndnoP1cBDM9h1bR_Uh2waT3yUjlgr05zHIlC7mmxhc,99378
21
22
  mdbq/mysql/recheck_mysql.py,sha256=ppBTfBLgkRWirMVZ31e_ZPULiGPJU7K3PP9G6QBZ3QI,8605
22
23
  mdbq/mysql/s_query.py,sha256=6L5Cp90zq13noZHjzSA5mqms_hD01c8GO1_NfbYDu6w,9252
23
24
  mdbq/mysql/year_month_day.py,sha256=VgewoE2pJxK7ErjfviL_SMTN77ki8GVbTUcao3vFUCE,1523
@@ -32,8 +33,8 @@ mdbq/pbix/pbix_refresh.py,sha256=JUjKW3bNEyoMVfVfo77UhguvS5AWkixvVhDbw4_MHco,239
32
33
  mdbq/pbix/refresh_all.py,sha256=OBT9EewSZ0aRS9vL_FflVn74d4l2G00wzHiikCC4TC0,5926
33
34
  mdbq/pbix/refresh_all_old.py,sha256=_pq3WSQ728GPtEG5pfsZI2uTJhU8D6ra-htIk1JXYzw,7192
34
35
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
35
- mdbq/spider/aikucun.py,sha256=zOacjrJ3MvToyuugA68xB-oN6RKj8K3GxMKudnln9EA,22207
36
- mdbq-3.3.12.dist-info/METADATA,sha256=3wdTVOw51cYbBG2GJ9m_eMaJtgs4-ZpFOxV6ePkajDQ,244
37
- mdbq-3.3.12.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
38
- mdbq-3.3.12.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
39
- mdbq-3.3.12.dist-info/RECORD,,
36
+ mdbq/spider/aikucun.py,sha256=eAIITxnbbxsR_EoohJ78CRw2dEdfSHOltfpxBrh0cvc,22207
37
+ mdbq-3.3.14.dist-info/METADATA,sha256=hLNo4Sr6v2frEpCx7fX4B9h55Jt0Dj7nnd4tagKv2VI,244
38
+ mdbq-3.3.14.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
39
+ mdbq-3.3.14.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
40
+ mdbq-3.3.14.dist-info/RECORD,,
File without changes